diff --git a/config.yaml b/config.yaml index 6dab79014..8d367fbaa 100644 --- a/config.yaml +++ b/config.yaml @@ -187,6 +187,7 @@ packages: - meet:v2 - memcache:v1 - metastore:v1 + - metastore:v2 - migrationcenter:v1 - ml:v1 # https://cloud.google.com/ml-engine/ - monitoring:v3 # https://cloud.google.com/monitoring/api/ @@ -211,6 +212,7 @@ packages: - osconfig:v1 - oslogin:v1 # https://cloud.google.com/compute/docs/oslogin/rest/ - pagespeedonline:v5 # https://developers.google.com/speed/docs/insights/about + - parallelstore:v1 - paymentsresellersubscription:v1 - people:v1 # https://developers.google.com/people/ - places:v1 @@ -242,6 +244,7 @@ packages: - searchconsole:v1 # https://developers.google.com/webmaster-tools/search-console-api/ - secretmanager:v1 - securitycenter:v1 + - securityposture:v1 - serviceconsumermanagement:v1 # https://cloud.google.com/service-consumer-management/docs/overview - servicecontrol:v1 # https://cloud.google.com/service-control/ - servicecontrol:v2 @@ -431,6 +434,8 @@ skipped_apis: - merchantapi:reviews_v1beta - metastore:v1alpha # v1beta included - metastore:v1beta +- metastore:v2alpha +- metastore:v2beta - migrationcenter:v1alpha1 - monitoring:v1 # v3 included - netapp:v1beta1 @@ -441,8 +446,10 @@ skipped_apis: - ondemandscanning:v1beta1 - osconfig:v1alpha - osconfig:v1beta # v1 included +- osconfig:v2beta - oslogin:v1alpha # v1 available - oslogin:v1beta # v1 available +- parallelstore:v1beta - playdeveloperreporting:v1alpha1 - playdeveloperreporting:v1beta1 - playgrouping:v1alpha1 diff --git a/discovery-artifact-manager b/discovery-artifact-manager index 79623101c..c2e52eef2 160000 --- a/discovery-artifact-manager +++ b/discovery-artifact-manager @@ -1 +1 @@ -Subproject commit 79623101ce429c6cccd0a48ec767c4e584ce7f12 +Subproject commit c2e52eef2461a9c51d77fe4f2f38e5c85f7c3840 diff --git a/discovery/googleapis/accesscontextmanager__v1.json b/discovery/googleapis/accesscontextmanager__v1.json index b144f42a9..29975ae5f 100644 --- a/discovery/googleapis/accesscontextmanager__v1.json +++ b/discovery/googleapis/accesscontextmanager__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241014", + "revision": "20241209", "rootUrl": "https://accesscontextmanager.googleapis.com/", "servicePath": "", "title": "Access Context Manager API", @@ -962,7 +962,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "accesscontextmanager.operations.cancel", @@ -1210,7 +1210,7 @@ ], "parameters": { "append": { - "description": "Optional. This field controls whether or not certain repeated settings in the update request overwrite or append to existing settings on the binding. If true, then append. Otherwise overwrite. So far, only scoped_access_settings supports appending. Global access_levels, dry_run_access_levels, and reauth_settings are not compatible with append functionality, and the request will return an error if append=true when these settings are in the update_mask. The request will also return an error if append=true when \"scoped_access_settings\" is not set in the update_mask.", + "description": "Optional. This field controls whether or not certain repeated settings in the update request overwrite or append to existing settings on the binding. If true, then append. Otherwise overwrite. So far, only scoped_access_settings with reauth_settings supports appending. Global access_levels, access_levels in scoped_access_settings, dry_run_access_levels, reauth_settings, and session_settings are not compatible with append functionality, and the request will return an error if append=true when these settings are in the update_mask. The request will also return an error if append=true when \"scoped_access_settings\" is not set in the update_mask.", "location": "query", "type": "boolean" }, @@ -1222,7 +1222,7 @@ "type": "string" }, "updateMask": { - "description": "Required. Only the fields specified in this mask are updated. Because name and group_key cannot be changed, update_mask is required and may only contain the following fields: `access_levels`, `dry_run_access_levels`, `reauth_settings`, `scoped_access_settings`. update_mask { paths: \"access_levels\" }", + "description": "Required. Only the fields specified in this mask are updated. Because name and group_key cannot be changed, update_mask is required and may only contain the following fields: `access_levels`, `dry_run_access_levels`, `reauth_settings` `session_settings`, `scoped_access_settings`. update_mask { paths: \"access_levels\" }", "format": "google-fieldmask", "location": "query", "type": "string" @@ -1386,9 +1386,9 @@ }, "type": "array" }, - "reauthSettings": { - "$ref": "ReauthSettings", - "description": "Optional. Reauth settings applied to user access on a given AccessScope." + "sessionSettings": { + "$ref": "SessionSettings", + "description": "Optional. Session settings applied to user access on a given AccessScope." } }, "type": "object" @@ -1809,6 +1809,10 @@ "egressTo": { "$ref": "EgressTo", "description": "Defines the conditions on the ApiOperation and destination resources that cause this EgressPolicy to apply." + }, + "title": { + "description": "Optional. Human-readable title for the egress rule. The title must be unique within the perimeter and can not exceed 100 characters. Within the access policy, the combined length of all rule titles must not exceed 240,000 characters.", + "type": "string" } }, "type": "object" @@ -1907,10 +1911,6 @@ "description": "Immutable. Assigned by the server during creation. The last segment has an arbitrary length and has only URI unreserved characters (as defined by [RFC 3986 Section 2.3](https://tools.ietf.org/html/rfc3986#section-2.3)). Should not be specified by the client during creation. Example: \"organizations/256/gcpUserAccessBindings/b3-BhcX_Ud5N\"", "type": "string" }, - "reauthSettings": { - "$ref": "ReauthSettings", - "description": "Optional. GCSL policy for the group key." - }, "restrictedClientApplications": { "description": "Optional. A list of applications that are subject to this binding's restrictions. If the list is empty, the binding restrictions will universally apply to all applications.", "items": { @@ -1924,6 +1924,10 @@ "$ref": "ScopedAccessSettings" }, "type": "array" + }, + "sessionSettings": { + "$ref": "SessionSettings", + "description": "Optional. The Google Cloud session length (GCSL) policy for the group key." } }, "type": "object" @@ -2005,6 +2009,10 @@ "ingressTo": { "$ref": "IngressTo", "description": "Defines the conditions on the ApiOperation and request destination that cause this IngressPolicy to apply." + }, + "title": { + "description": "Optional. Human-readable title for the ingress rule. The title must be unique within the perimeter and can not exceed 100 characters. Within the access policy, the combined length of all rule titles must not exceed 240,000 characters.", + "type": "string" } }, "type": "object" @@ -2289,47 +2297,6 @@ }, "type": "object" }, - "ReauthSettings": { - "description": "Stores settings related to Google Cloud Session Length including session duration, the type of challenge (i.e. method) they should face when their session expires, and other related settings.", - "id": "ReauthSettings", - "properties": { - "maxInactivity": { - "description": "Optional. How long a user is allowed to take between actions before a new access token must be issued. Presently only set for Cloud Apps.", - "format": "google-duration", - "type": "string" - }, - "reauthMethod": { - "description": "Optional. Reauth method when users GCP session is up.", - "enum": [ - "REAUTH_METHOD_UNSPECIFIED", - "LOGIN", - "SECURITY_KEY", - "PASSWORD" - ], - "enumDescriptions": [ - "If method undefined in API, we will use LOGIN by default.", - "The user will prompted to perform regular login. Users who are enrolled for two-step verification and haven't chosen to \"Remember this computer\" will be prompted for their second factor.", - "The user will be prompted to autheticate using their security key. If no security key has been configured, then we will fallback to LOGIN.", - "The user will be prompted for their password." - ], - "type": "string" - }, - "sessionLength": { - "description": "Optional. The session length. Setting this field to zero is equal to disabling. Reauth. Also can set infinite session by flipping the enabled bit to false below. If use_oidc_max_age is true, for OIDC apps, the session length will be the minimum of this field and OIDC max_age param.", - "format": "google-duration", - "type": "string" - }, - "sessionLengthEnabled": { - "description": "Optional. Big red button to turn off GCSL. When false, all fields set above will be disregarded and the session length is basically infinite.", - "type": "boolean" - }, - "useOidcMaxAge": { - "description": "Optional. Only useful for OIDC apps. When false, the OIDC max_age param, if passed in the authentication request will be ignored. When true, the re-auth period will be the minimum of the session_length field and the max_age OIDC param.", - "type": "boolean" - } - }, - "type": "object" - }, "ReplaceAccessLevelsRequest": { "description": "A request to replace all existing Access Levels in an Access Policy with the Access Levels provided. This is done atomically.", "id": "ReplaceAccessLevelsRequest", @@ -2421,6 +2388,10 @@ "description": "Description of the `ServicePerimeter` and its use. Does not affect behavior.", "type": "string" }, + "etag": { + "description": "Optional. An opaque identifier for the current version of the `ServicePerimeter`. This identifier does not follow any specific format. If an etag is not provided, the operation will be performed as if a valid etag is provided.", + "type": "string" + }, "name": { "description": "Identifier. Resource name for the `ServicePerimeter`. Format: `accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}`. The `service_perimeter` component must begin with a letter, followed by alphanumeric characters or `_`. After you create a `ServicePerimeter`, you cannot change its `name`.", "type": "string" @@ -2502,6 +2473,47 @@ }, "type": "object" }, + "SessionSettings": { + "description": "Stores settings related to Google Cloud Session Length including session duration, the type of challenge (i.e. method) they should face when their session expires, and other related settings.", + "id": "SessionSettings", + "properties": { + "maxInactivity": { + "description": "Optional. How long a user is allowed to take between actions before a new access token must be issued. Only set for Google Cloud apps.", + "format": "google-duration", + "type": "string" + }, + "sessionLength": { + "description": "Optional. The session length. Setting this field to zero is equal to disabling session. Also can set infinite session by flipping the enabled bit to false below. If use_oidc_max_age is true, for OIDC apps, the session length will be the minimum of this field and OIDC max_age param.", + "format": "google-duration", + "type": "string" + }, + "sessionLengthEnabled": { + "description": "Optional. This field enables or disables Google Cloud session length. When false, all fields set above will be disregarded and the session length is basically infinite.", + "type": "boolean" + }, + "sessionReauthMethod": { + "description": "Optional. Session method when user's Google Cloud session is up.", + "enum": [ + "SESSION_REAUTH_METHOD_UNSPECIFIED", + "LOGIN", + "SECURITY_KEY", + "PASSWORD" + ], + "enumDescriptions": [ + "If method is undefined in the API, LOGIN will be used by default.", + "The user will be prompted to perform regular login. Users who are enrolled for two-step verification and haven't chosen \"Remember this computer\" will be prompted for their second factor.", + "The user will be prompted to authenticate using their security key. If no security key has been configured, then authentication will fallback to LOGIN.", + "The user will be prompted for their password." + ], + "type": "string" + }, + "useOidcMaxAge": { + "description": "Optional. Only useful for OIDC apps. When false, the OIDC max_age param, if passed in the authentication request will be ignored. When true, the re-auth period will be the minimum of the session_length field and the max_age OIDC param.", + "type": "boolean" + } + }, + "type": "object" + }, "SetIamPolicyRequest": { "description": "Request message for `SetIamPolicy` method.", "id": "SetIamPolicyRequest", diff --git a/discovery/googleapis/addressvalidation__v1.json b/discovery/googleapis/addressvalidation__v1.json index 6120f9a2d..003e975fc 100644 --- a/discovery/googleapis/addressvalidation__v1.json +++ b/discovery/googleapis/addressvalidation__v1.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240703", + "revision": "20241120", "rootUrl": "https://addressvalidation.googleapis.com/", "servicePath": "", "title": "Address Validation API", @@ -183,7 +183,7 @@ "type": "array" }, "formattedAddress": { - "description": "The post-processed address, formatted as a single-line address following the address formatting rules of the region where the address is located.", + "description": "The post-processed address, formatted as a single-line address following the address formatting rules of the region where the address is located. Note: the format of this address may not match the format of the address in the `postal_address` field. For example, the `postal_address` always represents the country as a 2 letter `region_code`, such as \"US\" or \"NZ\". By contrast, this field uses a longer form of the country name, such as \"USA\" or \"New Zealand\".", "type": "string" }, "missingComponentTypes": { @@ -205,7 +205,7 @@ "type": "array" }, "unresolvedTokens": { - "description": "Any tokens in the input that could not be resolved. This might be an input that was not recognized as a valid part of an address (for example in an input like \"123235253253 Main St, San Francisco, CA, 94105\", the unresolved tokens may look like `[\"123235253253\"]` since that does not look like a valid street number.", + "description": "Any tokens in the input that could not be resolved. This might be an input that was not recognized as a valid part of an address. For example, for an input such as \"Parcel 0000123123 & 0000456456 Str # Guthrie Center IA 50115 US\", the unresolved tokens might look like `[\"Parcel\", \"0000123123\", \"&\", \"0000456456\"]`.", "items": { "type": "string" }, @@ -771,18 +771,18 @@ "type": "object" }, "GoogleTypePostalAddress": { - "description": "Represents a postal address, e.g. for postal delivery or payments addresses. Given a postal address, a postal service can deliver items to a premise, P.O. Box or similar. It is not intended to model geographical locations (roads, towns, mountains). In typical usage an address would be created via user input or from importing existing data, depending on the type of process. Advice on address input / editing: - Use an internationalization-ready address widget such as https://github.com/google/libaddressinput) - Users should not be presented with UI elements for input or editing of fields outside countries where that field is used. For more guidance on how to use this schema, please see: https://support.google.com/business/answer/6397478", + "description": "Represents a postal address. For example for postal delivery or payments addresses. Given a postal address, a postal service can deliver items to a premise, P.O. Box or similar. It is not intended to model geographical locations (roads, towns, mountains). In typical usage an address would be created by user input or from importing existing data, depending on the type of process. Advice on address input / editing: - Use an internationalization-ready address widget such as https://github.com/google/libaddressinput) - Users should not be presented with UI elements for input or editing of fields outside countries where that field is used. For more guidance on how to use this schema, see: https://support.google.com/business/answer/6397478", "id": "GoogleTypePostalAddress", "properties": { "addressLines": { - "description": "Unstructured address lines describing the lower levels of an address. Because values in address_lines do not have type information and may sometimes contain multiple values in a single field (e.g. \"Austin, TX\"), it is important that the line order is clear. The order of address lines should be \"envelope order\" for the country/region of the address. In places where this can vary (e.g. Japan), address_language is used to make it explicit (e.g. \"ja\" for large-to-small ordering and \"ja-Latn\" or \"en\" for small-to-large). This way, the most specific line of an address can be selected based on the language. The minimum permitted structural representation of an address consists of a region_code with all remaining information placed in the address_lines. It would be possible to format such an address very approximately without geocoding, but no semantic reasoning could be made about any of the address components until it was at least partially resolved. Creating an address only containing a region_code and address_lines, and then geocoding is the recommended way to handle completely unstructured addresses (as opposed to guessing which parts of the address should be localities or administrative areas).", + "description": "Unstructured address lines describing the lower levels of an address. Because values in address_lines do not have type information and may sometimes contain multiple values in a single field (For example \"Austin, TX\"), it is important that the line order is clear. The order of address lines should be \"envelope order\" for the country/region of the address. In places where this can vary (For example Japan), address_language is used to make it explicit (For example \"ja\" for large-to-small ordering and \"ja-Latn\" or \"en\" for small-to-large). This way, the most specific line of an address can be selected based on the language. The minimum permitted structural representation of an address consists of a region_code with all remaining information placed in the address_lines. It would be possible to format such an address very approximately without geocoding, but no semantic reasoning could be made about any of the address components until it was at least partially resolved. Creating an address only containing a region_code and address_lines, and then geocoding is the recommended way to handle completely unstructured addresses (as opposed to guessing which parts of the address should be localities or administrative areas).", "items": { "type": "string" }, "type": "array" }, "administrativeArea": { - "description": "Optional. Highest administrative subdivision which is used for postal addresses of a country or region. For example, this can be a state, a province, an oblast, or a prefecture. Specifically, for Spain this is the province and not the autonomous community (e.g. \"Barcelona\" and not \"Catalonia\"). Many countries don't use an administrative area in postal addresses. E.g. in Switzerland this should be left unpopulated.", + "description": "Optional. Highest administrative subdivision which is used for postal addresses of a country or region. For example, this can be a state, a province, an oblast, or a prefecture. Specifically, for Spain this is the province and not the autonomous community (For example \"Barcelona\" and not \"Catalonia\"). Many countries don't use an administrative area in postal addresses. For example in Switzerland this should be left unpopulated.", "type": "string" }, "languageCode": { @@ -798,7 +798,7 @@ "type": "string" }, "postalCode": { - "description": "Optional. Postal code of the address. Not all countries use or require postal codes to be present, but where they are used, they may trigger additional validation with other parts of the address (e.g. state/zip validation in the U.S.A.).", + "description": "Optional. Postal code of the address. Not all countries use or require postal codes to be present, but where they are used, they may trigger additional validation with other parts of the address (For example state/zip validation in the U.S.A.).", "type": "string" }, "recipients": { @@ -818,7 +818,7 @@ "type": "integer" }, "sortingCode": { - "description": "Optional. Additional, country-specific, sorting code. This is not used in most regions. Where it is used, the value is either a string like \"CEDEX\", optionally followed by a number (e.g. \"CEDEX 7\"), or just a number alone, representing the \"sector code\" (Jamaica), \"delivery area indicator\" (Malawi) or \"post office indicator\" (e.g. Côte d'Ivoire).", + "description": "Optional. Additional, country-specific, sorting code. This is not used in most regions. Where it is used, the value is either a string like \"CEDEX\", optionally followed by a number (For example \"CEDEX 7\"), or just a number alone, representing the \"sector code\" (Jamaica), \"delivery area indicator\" (Malawi) or \"post office indicator\" (For example Côte d'Ivoire).", "type": "string" }, "sublocality": { diff --git a/discovery/googleapis/admin__directory_v1.json b/discovery/googleapis/admin__directory_v1.json index a271487a3..a4e0c894a 100644 --- a/discovery/googleapis/admin__directory_v1.json +++ b/discovery/googleapis/admin__directory_v1.json @@ -110,7 +110,7 @@ "ownerName": "Google", "packagePath": "admin", "protocol": "rest", - "revision": "20240924", + "revision": "20241210", "rootUrl": "https://admin.googleapis.com/", "servicePath": "", "title": "Admin SDK API", @@ -5185,6 +5185,25 @@ }, "type": "object" }, + "ByteUsage": { + "description": "Represents a data capacity with some amount of current usage in bytes.", + "id": "ByteUsage", + "properties": { + "capacityBytes": { + "description": "Output only. The total capacity value, in bytes.", + "format": "int64", + "readOnly": true, + "type": "string" + }, + "usedBytes": { + "description": "Output only. The current usage value, in bytes.", + "format": "int64", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, "CalendarResource": { "description": "Public API: Resources.calendars", "id": "CalendarResource", @@ -5641,6 +5660,11 @@ "readOnly": true, "type": "string" }, + "diskSpaceUsage": { + "$ref": "ByteUsage", + "description": "Output only. How much disk space the device has available and is currently using.", + "readOnly": true + }, "diskVolumeReports": { "description": "Reports of disk space and other info about mounted/connected volumes.", "items": { @@ -7047,7 +7071,7 @@ "properties": { "blockInheritance": { "deprecated": true, - "description": "Determines if a sub-organizational unit can inherit the settings of the parent organization. The default value is `false`, meaning a sub-organizational unit inherits the settings of the nearest parent organizational unit. This field is deprecated. Setting it to `true` is no longer supported and can have _unintended consequences_. For more information about inheritance and users in an organization structure, see the [administration help center](https://support.google.com/a/answer/4352075).", + "description": "This field is deprecated and setting its value has no effect.", "type": "boolean" }, "description": { @@ -7470,6 +7494,10 @@ "readOnly": true, "type": "string" }, + "condition": { + "description": "Optional. The condition associated with this role assignment. Note: Feature is available to Enterprise Standard, Enterprise Plus, Google Workspace for Education Plus and Cloud Identity Premium customers. A `RoleAssignment` with the `condition` field set will only take effect when the resource being accessed meets the condition. If `condition` is empty, the role (`role_id`) is applied to the actor (`assigned_to`) at the scope (`scope_type`) unconditionally. Currently, the following conditions are supported: - To make the `RoleAssignment` only applicable to [Security Groups](https://cloud.google.com/identity/docs/groups#group_types): `api.getAttribute('cloudidentity.googleapis.com/groups.labels', []).hasAny(['groups.security']) && resource.type == 'cloudidentity.googleapis.com/Group'` - To make the `RoleAssignment` not applicable to [Security Groups](https://cloud.google.com/identity/docs/groups#group_types): `!api.getAttribute('cloudidentity.googleapis.com/groups.labels', []).hasAny(['groups.security']) && resource.type == 'cloudidentity.googleapis.com/Group'` Currently, the condition strings have to be verbatim and they only work with the following [pre-built administrator roles](https://support.google.com/a/answer/2405986): - Groups Editor - Groups Reader The condition follows [Cloud IAM condition syntax](https://cloud.google.com/iam/docs/conditions-overview). Additional conditions related to Locked Groups are available under Open Beta. - To make the `RoleAssignment` not applicable to [Locked Groups](https://cloud.google.com/identity/docs/groups#group_types): `!api.getAttribute('cloudidentity.googleapis.com/groups.labels', []).hasAny(['groups.locked']) && resource.type == 'cloudidentity.googleapis.com/Group'` This condition can also be used in conjunction with a Security-related condition.", + "type": "string" + }, "etag": { "description": "ETag of the resource.", "type": "string" diff --git a/discovery/googleapis/adsense__v2.json b/discovery/googleapis/adsense__v2.json index f1a4e1049..5870b71a5 100644 --- a/discovery/googleapis/adsense__v2.json +++ b/discovery/googleapis/adsense__v2.json @@ -27,7 +27,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240515", + "revision": "20241211", "rootUrl": "https://adsense.googleapis.com/", "servicePath": "", "title": "AdSense Management API", @@ -975,6 +975,7 @@ "AD_UNIT_SIZE_CODE", "CUSTOM_CHANNEL_NAME", "CUSTOM_CHANNEL_ID", + "HOSTED_CUSTOM_CHANNEL_ID", "OWNED_SITE_DOMAIN_NAME", "OWNED_SITE_ID", "PAGE_URL", @@ -1025,6 +1026,7 @@ "The size code of an ad unit (e.g. \"728x90\", \"responsive\").", "Custom channel name. The members of this dimension match the values from CustomChannel.display_name.", "Unique ID of a custom channel. The members of this dimension match the values from CustomChannel.reporting_dimension_id.", + "Unique ID of a hosted client's custom channel.", "Domain name of a verified site (e.g. \"example.com\"). The members of this dimension match the values from Site.domain.", "Unique ID of a verified site. The members of this dimension match the values from Site.reporting_dimension_id.", "URL of the page upon which the ad was served. This is a complete URL including scheme and query parameters. Note that the URL that appears in this dimension may be a canonicalized version of the one that was used in the original request, and so may not exactly match the URL that a user might have seen. Note that there are also some caveats to be aware of when using this dimension. For more information, see [Page URL breakdown](https://support.google.com/adsense/answer/11988478).", @@ -1295,6 +1297,7 @@ "AD_UNIT_SIZE_CODE", "CUSTOM_CHANNEL_NAME", "CUSTOM_CHANNEL_ID", + "HOSTED_CUSTOM_CHANNEL_ID", "OWNED_SITE_DOMAIN_NAME", "OWNED_SITE_ID", "PAGE_URL", @@ -1345,6 +1348,7 @@ "The size code of an ad unit (e.g. \"728x90\", \"responsive\").", "Custom channel name. The members of this dimension match the values from CustomChannel.display_name.", "Unique ID of a custom channel. The members of this dimension match the values from CustomChannel.reporting_dimension_id.", + "Unique ID of a hosted client's custom channel.", "Domain name of a verified site (e.g. \"example.com\"). The members of this dimension match the values from Site.domain.", "Unique ID of a verified site. The members of this dimension match the values from Site.reporting_dimension_id.", "URL of the page upon which the ad was served. This is a complete URL including scheme and query parameters. Note that the URL that appears in this dimension may be a canonicalized version of the one that was used in the original request, and so may not exactly match the URL that a user might have seen. Note that there are also some caveats to be aware of when using this dimension. For more information, see [Page URL breakdown](https://support.google.com/adsense/answer/11988478).", @@ -2788,11 +2792,11 @@ "id": "TimeZone", "properties": { "id": { - "description": "IANA Time Zone Database time zone, e.g. \"America/New_York\".", + "description": "IANA Time Zone Database time zone. For example \"America/New_York\".", "type": "string" }, "version": { - "description": "Optional. IANA Time Zone Database version number, e.g. \"2019a\".", + "description": "Optional. IANA Time Zone Database version number. For example \"2019a\".", "type": "string" } }, diff --git a/discovery/googleapis/adsenseplatform__v1.json b/discovery/googleapis/adsenseplatform__v1.json index 52f5fb399..30de99d75 100644 --- a/discovery/googleapis/adsenseplatform__v1.json +++ b/discovery/googleapis/adsenseplatform__v1.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240722", + "revision": "20241204", "rootUrl": "https://adsenseplatform.googleapis.com/", "servicePath": "", "title": "AdSense Platform API", @@ -705,11 +705,11 @@ "id": "TimeZone", "properties": { "id": { - "description": "IANA Time Zone Database time zone, e.g. \"America/New_York\".", + "description": "IANA Time Zone Database time zone. For example \"America/New_York\".", "type": "string" }, "version": { - "description": "Optional. IANA Time Zone Database version number, e.g. \"2019a\".", + "description": "Optional. IANA Time Zone Database version number. For example \"2019a\".", "type": "string" } }, diff --git a/discovery/googleapis/aiplatform__v1.json b/discovery/googleapis/aiplatform__v1.json index d2df12efa..b29f6ae66 100644 --- a/discovery/googleapis/aiplatform__v1.json +++ b/discovery/googleapis/aiplatform__v1.json @@ -225,7 +225,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241007", + "revision": "20241203", "rootUrl": "https://aiplatform.googleapis.com/", "servicePath": "", "title": "Vertex AI API", @@ -794,10 +794,165 @@ } } }, + "media": { + "methods": { + "upload": { + "description": "Upload a file into a RagCorpus.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/ragCorpora/{ragCorporaId}/ragFiles:upload", + "httpMethod": "POST", + "id": "aiplatform.media.upload", + "mediaUpload": { + "accept": [ + "*/*" + ], + "protocols": { + "simple": { + "multipart": true, + "path": "/upload/v1/{+parent}/ragFiles:upload" + } + } + }, + "parameterOrder": [ + "parent" + ], + "parameters": { + "parent": { + "description": "Required. The name of the RagCorpus resource into which to upload the file. Format: `projects/{project}/locations/{location}/ragCorpora/{rag_corpus}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/ragCorpora/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/ragFiles:upload", + "request": { + "$ref": "GoogleCloudAiplatformV1UploadRagFileRequest" + }, + "response": { + "$ref": "GoogleCloudAiplatformV1UploadRagFileResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ], + "supportsMediaUpload": true + } + } + }, "projects": { + "methods": { + "getCacheConfig": { + "description": "Gets a GenAI cache config.", + "flatPath": "v1/projects/{projectsId}/cacheConfig", + "httpMethod": "GET", + "id": "aiplatform.projects.getCacheConfig", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. Name of the cache config. Format: - `projects/{project}/cacheConfig`.", + "location": "path", + "pattern": "^projects/[^/]+/cacheConfig$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "GoogleCloudAiplatformV1CacheConfig" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "updateCacheConfig": { + "description": "Updates a cache config.", + "flatPath": "v1/projects/{projectsId}/cacheConfig", + "httpMethod": "PATCH", + "id": "aiplatform.projects.updateCacheConfig", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Identifier. Name of the cache config. Format: - `projects/{project}/cacheConfig`.", + "location": "path", + "pattern": "^projects/[^/]+/cacheConfig$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "request": { + "$ref": "GoogleCloudAiplatformV1CacheConfig" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + }, "resources": { "locations": { "methods": { + "augmentPrompt": { + "description": "Given an input prompt, it returns augmented prompt from vertex rag store to guide LLM towards generating grounded responses.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}:augmentPrompt", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.augmentPrompt", + "parameterOrder": [ + "parent" + ], + "parameters": { + "parent": { + "description": "Required. The resource name of the Location from which to augment prompt. The users must have permission to make a call in the project. Format: `projects/{project}/locations/{location}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}:augmentPrompt", + "request": { + "$ref": "GoogleCloudAiplatformV1AugmentPromptRequest" + }, + "response": { + "$ref": "GoogleCloudAiplatformV1AugmentPromptResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "corroborateContent": { + "description": "Given an input text, it returns a score that evaluates the factuality of the text. It also extracts and returns claims from the text and provides supporting facts.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}:corroborateContent", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.corroborateContent", + "parameterOrder": [ + "parent" + ], + "parameters": { + "parent": { + "description": "Required. The resource name of the Location from which to corroborate text. The users must have permission to make a call in the project. Format: `projects/{project}/locations/{location}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}:corroborateContent", + "request": { + "$ref": "GoogleCloudAiplatformV1CorroborateContentRequest" + }, + "response": { + "$ref": "GoogleCloudAiplatformV1CorroborateContentResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, "evaluateInstances": { "description": "Evaluates instances based on a given metric.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}:evaluateInstances", @@ -891,6 +1046,34 @@ "scopes": [ "https://www.googleapis.com/auth/cloud-platform" ] + }, + "retrieveContexts": { + "description": "Retrieves relevant contexts for a query.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}:retrieveContexts", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.retrieveContexts", + "parameterOrder": [ + "parent" + ], + "parameters": { + "parent": { + "description": "Required. The resource name of the Location from which to retrieve RagContexts. The users must have permission to make a call in the project. Format: `projects/{project}/locations/{location}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}:retrieveContexts", + "request": { + "$ref": "GoogleCloudAiplatformV1RetrieveContextsRequest" + }, + "response": { + "$ref": "GoogleCloudAiplatformV1RetrieveContextsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] } }, "resources": { @@ -1051,6 +1234,158 @@ } } }, + "cachedContents": { + "methods": { + "create": { + "description": "Creates cached content, this call will initialize the cached content in the data storage, and users need to pay for the cache data storage.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/cachedContents", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.cachedContents.create", + "parameterOrder": [ + "parent" + ], + "parameters": { + "parent": { + "description": "Required. The parent resource where the cached content will be created", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/cachedContents", + "request": { + "$ref": "GoogleCloudAiplatformV1CachedContent" + }, + "response": { + "$ref": "GoogleCloudAiplatformV1CachedContent" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "delete": { + "description": "Deletes cached content", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/cachedContents/{cachedContentsId}", + "httpMethod": "DELETE", + "id": "aiplatform.projects.locations.cachedContents.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The resource name referring to the cached content", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/cachedContents/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "GoogleProtobufEmpty" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Gets cached content configurations", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/cachedContents/{cachedContentsId}", + "httpMethod": "GET", + "id": "aiplatform.projects.locations.cachedContents.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The resource name referring to the cached content", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/cachedContents/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "GoogleCloudAiplatformV1CachedContent" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists cached contents in a project", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/cachedContents", + "httpMethod": "GET", + "id": "aiplatform.projects.locations.cachedContents.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "pageSize": { + "description": "Optional. The maximum number of cached contents to return. The service may return fewer than this value. If unspecified, some default (under maximum) number of items will be returned. The maximum value is 1000; values above 1000 will be coerced to 1000.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. A page token, received from a previous `ListCachedContents` call. Provide this to retrieve the subsequent page. When paginating, all other parameters provided to `ListCachedContents` must match the call that provided the page token.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The parent, which owns this collection of cached contents.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/cachedContents", + "response": { + "$ref": "GoogleCloudAiplatformV1ListCachedContentsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "patch": { + "description": "Updates cached content configurations", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/cachedContents/{cachedContentsId}", + "httpMethod": "PATCH", + "id": "aiplatform.projects.locations.cachedContents.patch", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Immutable. Identifier. The server-generated resource name of the cached content Format: projects/{project}/locations/{location}/cachedContents/{cached_content}", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/cachedContents/[^/]+$", + "required": true, + "type": "string" + }, + "updateMask": { + "description": "Required. The list of fields to update.", + "format": "google-fieldmask", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}", + "request": { + "$ref": "GoogleCloudAiplatformV1CachedContent" + }, + "response": { + "$ref": "GoogleCloudAiplatformV1CachedContent" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + } + }, "customJobs": { "methods": { "cancel": { @@ -1211,7 +1546,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/customJobs/{customJobsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.customJobs.operations.cancel", @@ -1526,7 +1861,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/dataLabelingJobs/{dataLabelingJobsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.dataLabelingJobs.operations.cancel", @@ -2041,7 +2376,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/annotationSpecs/{annotationSpecsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.datasets.annotationSpecs.operations.cancel", @@ -2306,7 +2641,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/dataItems/{dataItemsId}/annotations/{annotationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.datasets.dataItems.annotations.operations.cancel", @@ -2459,7 +2794,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/dataItems/{dataItemsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.datasets.dataItems.operations.cancel", @@ -2811,7 +3146,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.datasets.operations.cancel", @@ -3043,7 +3378,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/savedQueries/{savedQueriesId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.datasets.savedQueries.operations.cancel", @@ -3386,7 +3721,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/deploymentResourcePools/{deploymentResourcePoolsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.deploymentResourcePools.operations.cancel", @@ -3767,6 +4102,35 @@ "https://www.googleapis.com/auth/cloud-platform.read-only" ] }, + "fetchPredictOperation": { + "description": "Fetch an asynchronous online prediction operation.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/endpoints/{endpointsId}:fetchPredictOperation", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.endpoints.fetchPredictOperation", + "parameterOrder": [ + "endpoint" + ], + "parameters": { + "endpoint": { + "description": "Required. The name of the Endpoint requested to serve the prediction. Format: `projects/{project}/locations/{location}/endpoints/{endpoint}` or `projects/{project}/locations/{location}/publishers/{publisher}/models/{model}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/endpoints/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+endpoint}:fetchPredictOperation", + "request": { + "$ref": "GoogleCloudAiplatformV1FetchPredictOperationRequest" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only" + ] + }, "generateContent": { "description": "Generate content with multimodal inputs.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/endpoints/{endpointsId}:generateContent", @@ -3964,6 +4328,35 @@ "https://www.googleapis.com/auth/cloud-platform.read-only" ] }, + "predictLongRunning": { + "description": "", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/endpoints/{endpointsId}:predictLongRunning", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.endpoints.predictLongRunning", + "parameterOrder": [ + "endpoint" + ], + "parameters": { + "endpoint": { + "description": "Required. The name of the Endpoint requested to serve the prediction. Format: `projects/{project}/locations/{location}/endpoints/{endpoint}` or `projects/{project}/locations/{location}/publishers/{publisher}/models/{model}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/endpoints/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+endpoint}:predictLongRunning", + "request": { + "$ref": "GoogleCloudAiplatformV1PredictLongRunningRequest" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only" + ] + }, "rawPredict": { "description": "Perform an online prediction with an arbitrary HTTP payload. The response includes the following HTTP headers: * `X-Vertex-AI-Endpoint-Id`: ID of the Endpoint that served this prediction. * `X-Vertex-AI-Deployed-Model-Id`: ID of the Endpoint's DeployedModel that served this prediction.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/endpoints/{endpointsId}:rawPredict", @@ -4107,6 +4500,34 @@ "scopes": [ "https://www.googleapis.com/auth/cloud-platform" ] + }, + "update": { + "description": "Updates an Endpoint with a long running operation.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/endpoints/{endpointsId}:update", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.endpoints.update", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Output only. The resource name of the Endpoint.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/endpoints/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:update", + "request": { + "$ref": "GoogleCloudAiplatformV1UpdateEndpointLongRunningRequest" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] } }, "resources": { @@ -4146,7 +4567,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/endpoints/{endpointsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.endpoints.operations.cancel", @@ -4412,7 +4833,7 @@ "type": "integer" }, "pageToken": { - "description": "A page token, received from a previous FeatureGroupAdminService.ListFeatureGroups call. Provide this to retrieve the subsequent page. When paginating, all other parameters provided to FeatureGroupAdminService.ListFeatureGroups must match the call that provided the page token.", + "description": "A page token, received from a previous FeatureRegistryService.ListFeatureGroups call. Provide this to retrieve the subsequent page. When paginating, all other parameters provided to FeatureRegistryService.ListFeatureGroups must match the call that provided the page token.", "location": "query", "type": "string" }, @@ -5405,7 +5826,7 @@ "type": "string" }, "updateMask": { - "description": "Field mask is used to specify the fields to be overwritten in the FeatureView resource by the update. The fields specified in the update_mask are relative to the resource, not the full request. A field will be overwritten if it is in the mask. If the user does not provide a mask then only the non-empty fields present in the request will be overwritten. Set the update_mask to `*` to override all fields. Updatable fields: * `labels` * `service_agent_type` * `big_query_source` * `big_query_source.uri` * `big_query_source.entity_id_columns` * `feature_registry_source` * `feature_registry_source.feature_groups` * `sync_config` * `sync_config.cron`", + "description": "Field mask is used to specify the fields to be overwritten in the FeatureView resource by the update. The fields specified in the update_mask are relative to the resource, not the full request. A field will be overwritten if it is in the mask. If the user does not provide a mask then only the non-empty fields present in the request will be overwritten. Set the update_mask to `*` to override all fields. Updatable fields: * `labels` * `service_agent_type` * `big_query_source` * `big_query_source.uri` * `big_query_source.entity_id_columns` * `feature_registry_source` * `feature_registry_source.feature_groups` * `sync_config` * `sync_config.cron` * `optimized_config.automatic_resources`", "format": "google-fieldmask", "location": "query", "type": "string" @@ -6853,7 +7274,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}/features/{featuresId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.featurestores.entityTypes.features.operations.cancel", @@ -7006,7 +7427,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.featurestores.entityTypes.operations.cancel", @@ -7159,7 +7580,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.featurestores.operations.cancel", @@ -7469,7 +7890,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/hyperparameterTuningJobs/{hyperparameterTuningJobsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.hyperparameterTuningJobs.operations.cancel", @@ -7925,7 +8346,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/indexEndpoints/{indexEndpointsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.indexEndpoints.operations.cancel", @@ -8297,7 +8718,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/indexes/{indexesId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.indexes.operations.cancel", @@ -8820,7 +9241,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/artifacts/{artifactsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.metadataStores.artifacts.operations.cancel", @@ -9292,7 +9713,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/contexts/{contextsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.metadataStores.contexts.operations.cancel", @@ -9703,7 +10124,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/executions/{executionsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.metadataStores.executions.operations.cancel", @@ -9959,7 +10380,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.metadataStores.operations.cancel", @@ -10172,7 +10593,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/migratableResources/{migratableResourcesId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.migratableResources.operations.cancel", @@ -10572,7 +10993,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/modelDeploymentMonitoringJobs/{modelDeploymentMonitoringJobsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.modelDeploymentMonitoringJobs.operations.cancel", @@ -11276,7 +11697,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}/evaluations/{evaluationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.models.evaluations.operations.cancel", @@ -11533,7 +11954,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.models.operations.cancel", @@ -12034,7 +12455,7 @@ "type": "integer" }, "pageToken": { - "description": "Optional. The standard list page token. Typically obtained via ListNotebookExecutionJobs.next_page_token of the previous NotebookService.ListNotebookExecutionJobs call.", + "description": "Optional. The standard list page token. Typically obtained via ListNotebookExecutionJobsResponse.next_page_token of the previous NotebookService.ListNotebookExecutionJobs call.", "location": "query", "type": "string" }, @@ -12074,7 +12495,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/notebookExecutionJobs/{notebookExecutionJobsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.notebookExecutionJobs.operations.cancel", @@ -12350,7 +12771,7 @@ ], "parameters": { "filter": { - "description": "Optional. An expression for filtering the results of the request. For field names both snake_case and camelCase are supported. * `notebookRuntimeTemplate` supports = and !=. `notebookRuntimeTemplate` represents the NotebookRuntimeTemplate ID, i.e. the last segment of the NotebookRuntimeTemplate's resource name. * `display_name` supports = and != * `labels` supports general map functions that is: * `labels.key=value` - key:value equality * `labels.key:* or labels:key - key existence * A key including a space must be quoted. `labels.\"a key\"`. * `notebookRuntimeType` supports = and !=. notebookRuntimeType enum: [USER_DEFINED, ONE_CLICK]. Some examples: * `notebookRuntimeTemplate=notebookRuntimeTemplate123` * `displayName=\"myDisplayName\"` * `labels.myKey=\"myValue\"` * `notebookRuntimeType=USER_DEFINED`", + "description": "Optional. An expression for filtering the results of the request. For field names both snake_case and camelCase are supported. * `notebookRuntimeTemplate` supports = and !=. `notebookRuntimeTemplate` represents the NotebookRuntimeTemplate ID, i.e. the last segment of the NotebookRuntimeTemplate's resource name. * `display_name` supports = and != * `labels` supports general map functions that is: * `labels.key=value` - key:value equality * `labels.key:* or labels:key - key existence * A key including a space must be quoted. `labels.\"a key\"`. * `notebookRuntimeType` supports = and !=. notebookRuntimeType enum: [USER_DEFINED, ONE_CLICK]. * `machineType` supports = and !=. * `acceleratorType` supports = and !=. Some examples: * `notebookRuntimeTemplate=notebookRuntimeTemplate123` * `displayName=\"myDisplayName\"` * `labels.myKey=\"myValue\"` * `notebookRuntimeType=USER_DEFINED` * `machineType=e2-standard-4` * `acceleratorType=NVIDIA_TESLA_T4`", "location": "query", "type": "string" }, @@ -12490,7 +12911,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/notebookRuntimeTemplates/{notebookRuntimeTemplatesId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.notebookRuntimeTemplates.operations.cancel", @@ -12730,7 +13151,7 @@ ], "parameters": { "filter": { - "description": "Optional. An expression for filtering the results of the request. For field names both snake_case and camelCase are supported. * `notebookRuntime` supports = and !=. `notebookRuntime` represents the NotebookRuntime ID, i.e. the last segment of the NotebookRuntime's resource name. * `displayName` supports = and != and regex. * `notebookRuntimeTemplate` supports = and !=. `notebookRuntimeTemplate` represents the NotebookRuntimeTemplate ID, i.e. the last segment of the NotebookRuntimeTemplate's resource name. * `healthState` supports = and !=. healthState enum: [HEALTHY, UNHEALTHY, HEALTH_STATE_UNSPECIFIED]. * `runtimeState` supports = and !=. runtimeState enum: [RUNTIME_STATE_UNSPECIFIED, RUNNING, BEING_STARTED, BEING_STOPPED, STOPPED, BEING_UPGRADED, ERROR, INVALID]. * `runtimeUser` supports = and !=. * API version is UI only: `uiState` supports = and !=. uiState enum: [UI_RESOURCE_STATE_UNSPECIFIED, UI_RESOURCE_STATE_BEING_CREATED, UI_RESOURCE_STATE_ACTIVE, UI_RESOURCE_STATE_BEING_DELETED, UI_RESOURCE_STATE_CREATION_FAILED]. * `notebookRuntimeType` supports = and !=. notebookRuntimeType enum: [USER_DEFINED, ONE_CLICK]. Some examples: * `notebookRuntime=\"notebookRuntime123\"` * `displayName=\"myDisplayName\"` and `displayName=~\"myDisplayNameRegex\"` * `notebookRuntimeTemplate=\"notebookRuntimeTemplate321\"` * `healthState=HEALTHY` * `runtimeState=RUNNING` * `runtimeUser=\"test@google.com\"` * `uiState=UI_RESOURCE_STATE_BEING_DELETED` * `notebookRuntimeType=USER_DEFINED`", + "description": "Optional. An expression for filtering the results of the request. For field names both snake_case and camelCase are supported. * `notebookRuntime` supports = and !=. `notebookRuntime` represents the NotebookRuntime ID, i.e. the last segment of the NotebookRuntime's resource name. * `displayName` supports = and != and regex. * `notebookRuntimeTemplate` supports = and !=. `notebookRuntimeTemplate` represents the NotebookRuntimeTemplate ID, i.e. the last segment of the NotebookRuntimeTemplate's resource name. * `healthState` supports = and !=. healthState enum: [HEALTHY, UNHEALTHY, HEALTH_STATE_UNSPECIFIED]. * `runtimeState` supports = and !=. runtimeState enum: [RUNTIME_STATE_UNSPECIFIED, RUNNING, BEING_STARTED, BEING_STOPPED, STOPPED, BEING_UPGRADED, ERROR, INVALID]. * `runtimeUser` supports = and !=. * API version is UI only: `uiState` supports = and !=. uiState enum: [UI_RESOURCE_STATE_UNSPECIFIED, UI_RESOURCE_STATE_BEING_CREATED, UI_RESOURCE_STATE_ACTIVE, UI_RESOURCE_STATE_BEING_DELETED, UI_RESOURCE_STATE_CREATION_FAILED]. * `notebookRuntimeType` supports = and !=. notebookRuntimeType enum: [USER_DEFINED, ONE_CLICK]. * `machineType` supports = and !=. * `acceleratorType` supports = and !=. Some examples: * `notebookRuntime=\"notebookRuntime123\"` * `displayName=\"myDisplayName\"` and `displayName=~\"myDisplayNameRegex\"` * `notebookRuntimeTemplate=\"notebookRuntimeTemplate321\"` * `healthState=HEALTHY` * `runtimeState=RUNNING` * `runtimeUser=\"test@google.com\"` * `uiState=UI_RESOURCE_STATE_BEING_DELETED` * `notebookRuntimeType=USER_DEFINED` * `machineType=e2-standard-4` * `acceleratorType=NVIDIA_TESLA_T4`", "location": "query", "type": "string" }, @@ -12800,6 +13221,34 @@ "https://www.googleapis.com/auth/cloud-platform" ] }, + "stop": { + "description": "Stops a NotebookRuntime.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/notebookRuntimes/{notebookRuntimesId}:stop", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.notebookRuntimes.stop", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the NotebookRuntime resource to be stopped. Instead of checking whether the name is in valid NotebookRuntime resource name format, directly throw NotFound exception if there is no such NotebookRuntime in spanner.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/notebookRuntimes/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:stop", + "request": { + "$ref": "GoogleCloudAiplatformV1StopNotebookRuntimeRequest" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, "upgrade": { "description": "Upgrades a NotebookRuntime.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/notebookRuntimes/{notebookRuntimesId}:upgrade", @@ -12833,7 +13282,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/notebookRuntimes/{notebookRuntimesId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.notebookRuntimes.operations.cancel", @@ -12986,7 +13435,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.operations.cancel", @@ -13235,7 +13684,7 @@ "type": "integer" }, "pageToken": { - "description": "Optional. The standard list page token. Typically obtained via ListPersistentResourceResponse.next_page_token of the previous PersistentResourceService.ListPersistentResource call.", + "description": "Optional. The standard list page token. Typically obtained via ListPersistentResourcesResponse.next_page_token of the previous PersistentResourceService.ListPersistentResource call.", "location": "query", "type": "string" }, @@ -13322,7 +13771,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/persistentResources/{persistentResourcesId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.persistentResources.operations.cancel", @@ -13698,7 +14147,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/pipelineJobs/{pipelineJobsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.pipelineJobs.operations.cancel", @@ -13815,9 +14264,1218 @@ }, "wait": { "description": "Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/pipelineJobs/{pipelineJobsId}/operations/{operationsId}:wait", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/pipelineJobs/{pipelineJobsId}/operations/{operationsId}:wait", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.pipelineJobs.operations.wait", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "The name of the operation resource to wait on.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/pipelineJobs/[^/]+/operations/[^/]+$", + "required": true, + "type": "string" + }, + "timeout": { + "description": "The maximum duration to wait before timing out. If left blank, the wait will be at most the time permitted by the underlying HTTP/RPC protocol. If RPC context deadline is also specified, the shorter one will be used.", + "format": "google-duration", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}:wait", + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + } + } + } + }, + "publishers": { + "resources": { + "models": { + "methods": { + "computeTokens": { + "description": "Return a list of tokens based on the input text.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/publishers/{publishersId}/models/{modelsId}:computeTokens", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.publishers.models.computeTokens", + "parameterOrder": [ + "endpoint" + ], + "parameters": { + "endpoint": { + "description": "Required. The name of the Endpoint requested to get lists of tokens and token ids.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/publishers/[^/]+/models/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+endpoint}:computeTokens", + "request": { + "$ref": "GoogleCloudAiplatformV1ComputeTokensRequest" + }, + "response": { + "$ref": "GoogleCloudAiplatformV1ComputeTokensResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "countTokens": { + "description": "Perform a token counting.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/publishers/{publishersId}/models/{modelsId}:countTokens", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.publishers.models.countTokens", + "parameterOrder": [ + "endpoint" + ], + "parameters": { + "endpoint": { + "description": "Required. The name of the Endpoint requested to perform token counting. Format: `projects/{project}/locations/{location}/endpoints/{endpoint}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/publishers/[^/]+/models/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+endpoint}:countTokens", + "request": { + "$ref": "GoogleCloudAiplatformV1CountTokensRequest" + }, + "response": { + "$ref": "GoogleCloudAiplatformV1CountTokensResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "fetchPredictOperation": { + "description": "Fetch an asynchronous online prediction operation.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/publishers/{publishersId}/models/{modelsId}:fetchPredictOperation", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.publishers.models.fetchPredictOperation", + "parameterOrder": [ + "endpoint" + ], + "parameters": { + "endpoint": { + "description": "Required. The name of the Endpoint requested to serve the prediction. Format: `projects/{project}/locations/{location}/endpoints/{endpoint}` or `projects/{project}/locations/{location}/publishers/{publisher}/models/{model}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/publishers/[^/]+/models/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+endpoint}:fetchPredictOperation", + "request": { + "$ref": "GoogleCloudAiplatformV1FetchPredictOperationRequest" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only" + ] + }, + "generateContent": { + "description": "Generate content with multimodal inputs.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/publishers/{publishersId}/models/{modelsId}:generateContent", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.publishers.models.generateContent", + "parameterOrder": [ + "model" + ], + "parameters": { + "model": { + "description": "Required. The fully qualified name of the publisher model or tuned model endpoint to use. Publisher model format: `projects/{project}/locations/{location}/publishers/*/models/*` Tuned model endpoint format: `projects/{project}/locations/{location}/endpoints/{endpoint}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/publishers/[^/]+/models/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+model}:generateContent", + "request": { + "$ref": "GoogleCloudAiplatformV1GenerateContentRequest" + }, + "response": { + "$ref": "GoogleCloudAiplatformV1GenerateContentResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only" + ] + }, + "predict": { + "description": "Perform an online prediction.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/publishers/{publishersId}/models/{modelsId}:predict", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.publishers.models.predict", + "parameterOrder": [ + "endpoint" + ], + "parameters": { + "endpoint": { + "description": "Required. The name of the Endpoint requested to serve the prediction. Format: `projects/{project}/locations/{location}/endpoints/{endpoint}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/publishers/[^/]+/models/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+endpoint}:predict", + "request": { + "$ref": "GoogleCloudAiplatformV1PredictRequest" + }, + "response": { + "$ref": "GoogleCloudAiplatformV1PredictResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only" + ] + }, + "predictLongRunning": { + "description": "", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/publishers/{publishersId}/models/{modelsId}:predictLongRunning", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.publishers.models.predictLongRunning", + "parameterOrder": [ + "endpoint" + ], + "parameters": { + "endpoint": { + "description": "Required. The name of the Endpoint requested to serve the prediction. Format: `projects/{project}/locations/{location}/endpoints/{endpoint}` or `projects/{project}/locations/{location}/publishers/{publisher}/models/{model}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/publishers/[^/]+/models/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+endpoint}:predictLongRunning", + "request": { + "$ref": "GoogleCloudAiplatformV1PredictLongRunningRequest" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only" + ] + }, + "rawPredict": { + "description": "Perform an online prediction with an arbitrary HTTP payload. The response includes the following HTTP headers: * `X-Vertex-AI-Endpoint-Id`: ID of the Endpoint that served this prediction. * `X-Vertex-AI-Deployed-Model-Id`: ID of the Endpoint's DeployedModel that served this prediction.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/publishers/{publishersId}/models/{modelsId}:rawPredict", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.publishers.models.rawPredict", + "parameterOrder": [ + "endpoint" + ], + "parameters": { + "endpoint": { + "description": "Required. The name of the Endpoint requested to serve the prediction. Format: `projects/{project}/locations/{location}/endpoints/{endpoint}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/publishers/[^/]+/models/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+endpoint}:rawPredict", + "request": { + "$ref": "GoogleCloudAiplatformV1RawPredictRequest" + }, + "response": { + "$ref": "GoogleApiHttpBody" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only" + ] + }, + "serverStreamingPredict": { + "description": "Perform a server-side streaming online prediction request for Vertex LLM streaming.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/publishers/{publishersId}/models/{modelsId}:serverStreamingPredict", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.publishers.models.serverStreamingPredict", + "parameterOrder": [ + "endpoint" + ], + "parameters": { + "endpoint": { + "description": "Required. The name of the Endpoint requested to serve the prediction. Format: `projects/{project}/locations/{location}/endpoints/{endpoint}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/publishers/[^/]+/models/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+endpoint}:serverStreamingPredict", + "request": { + "$ref": "GoogleCloudAiplatformV1StreamingPredictRequest" + }, + "response": { + "$ref": "GoogleCloudAiplatformV1StreamingPredictResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only" + ] + }, + "streamGenerateContent": { + "description": "Generate content with multimodal inputs with streaming support.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/publishers/{publishersId}/models/{modelsId}:streamGenerateContent", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.publishers.models.streamGenerateContent", + "parameterOrder": [ + "model" + ], + "parameters": { + "model": { + "description": "Required. The fully qualified name of the publisher model or tuned model endpoint to use. Publisher model format: `projects/{project}/locations/{location}/publishers/*/models/*` Tuned model endpoint format: `projects/{project}/locations/{location}/endpoints/{endpoint}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/publishers/[^/]+/models/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+model}:streamGenerateContent", + "request": { + "$ref": "GoogleCloudAiplatformV1GenerateContentRequest" + }, + "response": { + "$ref": "GoogleCloudAiplatformV1GenerateContentResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only" + ] + }, + "streamRawPredict": { + "description": "Perform a streaming online prediction with an arbitrary HTTP payload.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/publishers/{publishersId}/models/{modelsId}:streamRawPredict", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.publishers.models.streamRawPredict", + "parameterOrder": [ + "endpoint" + ], + "parameters": { + "endpoint": { + "description": "Required. The name of the Endpoint requested to serve the prediction. Format: `projects/{project}/locations/{location}/endpoints/{endpoint}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/publishers/[^/]+/models/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+endpoint}:streamRawPredict", + "request": { + "$ref": "GoogleCloudAiplatformV1StreamRawPredictRequest" + }, + "response": { + "$ref": "GoogleApiHttpBody" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only" + ] + } + } + } + } + }, + "ragCorpora": { + "methods": { + "create": { + "description": "Creates a RagCorpus.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/ragCorpora", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.ragCorpora.create", + "parameterOrder": [ + "parent" + ], + "parameters": { + "parent": { + "description": "Required. The resource name of the Location to create the RagCorpus in. Format: `projects/{project}/locations/{location}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/ragCorpora", + "request": { + "$ref": "GoogleCloudAiplatformV1RagCorpus" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "delete": { + "description": "Deletes a RagCorpus.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/ragCorpora/{ragCorporaId}", + "httpMethod": "DELETE", + "id": "aiplatform.projects.locations.ragCorpora.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "force": { + "description": "Optional. If set to true, any RagFiles in this RagCorpus will also be deleted. Otherwise, the request will only work if the RagCorpus has no RagFiles.", + "location": "query", + "type": "boolean" + }, + "name": { + "description": "Required. The name of the RagCorpus resource to be deleted. Format: `projects/{project}/locations/{location}/ragCorpora/{rag_corpus}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/ragCorpora/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Gets a RagCorpus.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/ragCorpora/{ragCorporaId}", + "httpMethod": "GET", + "id": "aiplatform.projects.locations.ragCorpora.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the RagCorpus resource. Format: `projects/{project}/locations/{location}/ragCorpora/{rag_corpus}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/ragCorpora/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "GoogleCloudAiplatformV1RagCorpus" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists RagCorpora in a Location.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/ragCorpora", + "httpMethod": "GET", + "id": "aiplatform.projects.locations.ragCorpora.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "pageSize": { + "description": "Optional. The standard list page size.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. The standard list page token. Typically obtained via ListRagCorporaResponse.next_page_token of the previous VertexRagDataService.ListRagCorpora call.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The resource name of the Location from which to list the RagCorpora. Format: `projects/{project}/locations/{location}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/ragCorpora", + "response": { + "$ref": "GoogleCloudAiplatformV1ListRagCorporaResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "patch": { + "description": "Updates a RagCorpus.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/ragCorpora/{ragCorporaId}", + "httpMethod": "PATCH", + "id": "aiplatform.projects.locations.ragCorpora.patch", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Output only. The resource name of the RagCorpus.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/ragCorpora/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "request": { + "$ref": "GoogleCloudAiplatformV1RagCorpus" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + }, + "resources": { + "operations": { + "methods": { + "cancel": { + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/ragCorpora/{ragCorporaId}/operations/{operationsId}:cancel", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.ragCorpora.operations.cancel", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "The name of the operation resource to be cancelled.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/ragCorpora/[^/]+/operations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:cancel", + "response": { + "$ref": "GoogleProtobufEmpty" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "delete": { + "description": "Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/ragCorpora/{ragCorporaId}/operations/{operationsId}", + "httpMethod": "DELETE", + "id": "aiplatform.projects.locations.ragCorpora.operations.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "The name of the operation resource to be deleted.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/ragCorpora/[^/]+/operations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "GoogleProtobufEmpty" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/ragCorpora/{ragCorporaId}/operations/{operationsId}", + "httpMethod": "GET", + "id": "aiplatform.projects.locations.ragCorpora.operations.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "The name of the operation resource.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/ragCorpora/[^/]+/operations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/ragCorpora/{ragCorporaId}/operations", + "httpMethod": "GET", + "id": "aiplatform.projects.locations.ragCorpora.operations.list", + "parameterOrder": [ + "name" + ], + "parameters": { + "filter": { + "description": "The standard list filter.", + "location": "query", + "type": "string" + }, + "name": { + "description": "The name of the operation's parent resource.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/ragCorpora/[^/]+$", + "required": true, + "type": "string" + }, + "pageSize": { + "description": "The standard list page size.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "The standard list page token.", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}/operations", + "response": { + "$ref": "GoogleLongrunningListOperationsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "wait": { + "description": "Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/ragCorpora/{ragCorporaId}/operations/{operationsId}:wait", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.ragCorpora.operations.wait", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "The name of the operation resource to wait on.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/ragCorpora/[^/]+/operations/[^/]+$", + "required": true, + "type": "string" + }, + "timeout": { + "description": "The maximum duration to wait before timing out. If left blank, the wait will be at most the time permitted by the underlying HTTP/RPC protocol. If RPC context deadline is also specified, the shorter one will be used.", + "format": "google-duration", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}:wait", + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + } + }, + "ragFiles": { + "methods": { + "delete": { + "description": "Deletes a RagFile.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/ragCorpora/{ragCorporaId}/ragFiles/{ragFilesId}", + "httpMethod": "DELETE", + "id": "aiplatform.projects.locations.ragCorpora.ragFiles.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the RagFile resource to be deleted. Format: `projects/{project}/locations/{location}/ragCorpora/{rag_corpus}/ragFiles/{rag_file}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/ragCorpora/[^/]+/ragFiles/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Gets a RagFile.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/ragCorpora/{ragCorporaId}/ragFiles/{ragFilesId}", + "httpMethod": "GET", + "id": "aiplatform.projects.locations.ragCorpora.ragFiles.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the RagFile resource. Format: `projects/{project}/locations/{location}/ragCorpora/{rag_corpus}/ragFiles/{rag_file}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/ragCorpora/[^/]+/ragFiles/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "GoogleCloudAiplatformV1RagFile" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "import": { + "description": "Import files from Google Cloud Storage or Google Drive into a RagCorpus.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/ragCorpora/{ragCorporaId}/ragFiles:import", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.ragCorpora.ragFiles.import", + "parameterOrder": [ + "parent" + ], + "parameters": { + "parent": { + "description": "Required. The name of the RagCorpus resource into which to import files. Format: `projects/{project}/locations/{location}/ragCorpora/{rag_corpus}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/ragCorpora/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/ragFiles:import", + "request": { + "$ref": "GoogleCloudAiplatformV1ImportRagFilesRequest" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists RagFiles in a RagCorpus.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/ragCorpora/{ragCorporaId}/ragFiles", + "httpMethod": "GET", + "id": "aiplatform.projects.locations.ragCorpora.ragFiles.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "pageSize": { + "description": "Optional. The standard list page size.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. The standard list page token. Typically obtained via ListRagFilesResponse.next_page_token of the previous VertexRagDataService.ListRagFiles call.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The resource name of the RagCorpus from which to list the RagFiles. Format: `projects/{project}/locations/{location}/ragCorpora/{rag_corpus}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/ragCorpora/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/ragFiles", + "response": { + "$ref": "GoogleCloudAiplatformV1ListRagFilesResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + }, + "resources": { + "operations": { + "methods": { + "cancel": { + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/ragCorpora/{ragCorporaId}/ragFiles/{ragFilesId}/operations/{operationsId}:cancel", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.ragCorpora.ragFiles.operations.cancel", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "The name of the operation resource to be cancelled.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/ragCorpora/[^/]+/ragFiles/[^/]+/operations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:cancel", + "response": { + "$ref": "GoogleProtobufEmpty" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "delete": { + "description": "Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/ragCorpora/{ragCorporaId}/ragFiles/{ragFilesId}/operations/{operationsId}", + "httpMethod": "DELETE", + "id": "aiplatform.projects.locations.ragCorpora.ragFiles.operations.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "The name of the operation resource to be deleted.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/ragCorpora/[^/]+/ragFiles/[^/]+/operations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "GoogleProtobufEmpty" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/ragCorpora/{ragCorporaId}/ragFiles/{ragFilesId}/operations/{operationsId}", + "httpMethod": "GET", + "id": "aiplatform.projects.locations.ragCorpora.ragFiles.operations.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "The name of the operation resource.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/ragCorpora/[^/]+/ragFiles/[^/]+/operations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/ragCorpora/{ragCorporaId}/ragFiles/{ragFilesId}/operations", + "httpMethod": "GET", + "id": "aiplatform.projects.locations.ragCorpora.ragFiles.operations.list", + "parameterOrder": [ + "name" + ], + "parameters": { + "filter": { + "description": "The standard list filter.", + "location": "query", + "type": "string" + }, + "name": { + "description": "The name of the operation's parent resource.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/ragCorpora/[^/]+/ragFiles/[^/]+$", + "required": true, + "type": "string" + }, + "pageSize": { + "description": "The standard list page size.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "The standard list page token.", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}/operations", + "response": { + "$ref": "GoogleLongrunningListOperationsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "wait": { + "description": "Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/ragCorpora/{ragCorporaId}/ragFiles/{ragFilesId}/operations/{operationsId}:wait", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.ragCorpora.ragFiles.operations.wait", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "The name of the operation resource to wait on.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/ragCorpora/[^/]+/ragFiles/[^/]+/operations/[^/]+$", + "required": true, + "type": "string" + }, + "timeout": { + "description": "The maximum duration to wait before timing out. If left blank, the wait will be at most the time permitted by the underlying HTTP/RPC protocol. If RPC context deadline is also specified, the shorter one will be used.", + "format": "google-duration", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}:wait", + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + } + } + } + } + } + }, + "reasoningEngines": { + "methods": { + "create": { + "description": "Creates a reasoning engine.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/reasoningEngines", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.reasoningEngines.create", + "parameterOrder": [ + "parent" + ], + "parameters": { + "parent": { + "description": "Required. The resource name of the Location to create the ReasoningEngine in. Format: `projects/{project}/locations/{location}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/reasoningEngines", + "request": { + "$ref": "GoogleCloudAiplatformV1ReasoningEngine" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "delete": { + "description": "Deletes a reasoning engine.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/reasoningEngines/{reasoningEnginesId}", + "httpMethod": "DELETE", + "id": "aiplatform.projects.locations.reasoningEngines.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the ReasoningEngine resource to be deleted. Format: `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/reasoningEngines/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Gets a reasoning engine.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/reasoningEngines/{reasoningEnginesId}", + "httpMethod": "GET", + "id": "aiplatform.projects.locations.reasoningEngines.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the ReasoningEngine resource. Format: `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/reasoningEngines/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "GoogleCloudAiplatformV1ReasoningEngine" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists reasoning engines in a location.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/reasoningEngines", + "httpMethod": "GET", + "id": "aiplatform.projects.locations.reasoningEngines.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "filter": { + "description": "Optional. The standard list filter. More detail in [AIP-160](https://google.aip.dev/160).", + "location": "query", + "type": "string" + }, + "pageSize": { + "description": "Optional. The standard list page size.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. The standard list page token.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The resource name of the Location to list the ReasoningEngines from. Format: `projects/{project}/locations/{location}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/reasoningEngines", + "response": { + "$ref": "GoogleCloudAiplatformV1ListReasoningEnginesResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "patch": { + "description": "Updates a reasoning engine.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/reasoningEngines/{reasoningEnginesId}", + "httpMethod": "PATCH", + "id": "aiplatform.projects.locations.reasoningEngines.patch", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Identifier. The resource name of the ReasoningEngine.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/reasoningEngines/[^/]+$", + "required": true, + "type": "string" + }, + "updateMask": { + "description": "Optional. Mask specifying which fields to update.", + "format": "google-fieldmask", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}", + "request": { + "$ref": "GoogleCloudAiplatformV1ReasoningEngine" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "query": { + "description": "Queries using a reasoning engine.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/reasoningEngines/{reasoningEnginesId}:query", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.reasoningEngines.query", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the ReasoningEngine resource to use. Format: `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/reasoningEngines/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:query", + "request": { + "$ref": "GoogleCloudAiplatformV1QueryReasoningEngineRequest" + }, + "response": { + "$ref": "GoogleCloudAiplatformV1QueryReasoningEngineResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + }, + "resources": { + "operations": { + "methods": { + "cancel": { + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/reasoningEngines/{reasoningEnginesId}/operations/{operationsId}:cancel", + "httpMethod": "POST", + "id": "aiplatform.projects.locations.reasoningEngines.operations.cancel", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "The name of the operation resource to be cancelled.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/reasoningEngines/[^/]+/operations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:cancel", + "response": { + "$ref": "GoogleProtobufEmpty" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "delete": { + "description": "Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/reasoningEngines/{reasoningEnginesId}/operations/{operationsId}", + "httpMethod": "DELETE", + "id": "aiplatform.projects.locations.reasoningEngines.operations.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "The name of the operation resource to be deleted.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/reasoningEngines/[^/]+/operations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "GoogleProtobufEmpty" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/reasoningEngines/{reasoningEnginesId}/operations/{operationsId}", + "httpMethod": "GET", + "id": "aiplatform.projects.locations.reasoningEngines.operations.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "The name of the operation resource.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/reasoningEngines/[^/]+/operations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/reasoningEngines/{reasoningEnginesId}/operations", + "httpMethod": "GET", + "id": "aiplatform.projects.locations.reasoningEngines.operations.list", + "parameterOrder": [ + "name" + ], + "parameters": { + "filter": { + "description": "The standard list filter.", + "location": "query", + "type": "string" + }, + "name": { + "description": "The name of the operation's parent resource.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/reasoningEngines/[^/]+$", + "required": true, + "type": "string" + }, + "pageSize": { + "description": "The standard list page size.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "The standard list page token.", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}/operations", + "response": { + "$ref": "GoogleLongrunningListOperationsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "wait": { + "description": "Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/reasoningEngines/{reasoningEnginesId}/operations/{operationsId}:wait", "httpMethod": "POST", - "id": "aiplatform.projects.locations.pipelineJobs.operations.wait", + "id": "aiplatform.projects.locations.reasoningEngines.operations.wait", "parameterOrder": [ "name" ], @@ -13825,7 +15483,7 @@ "name": { "description": "The name of the operation resource to wait on.", "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/pipelineJobs/[^/]+/operations/[^/]+$", + "pattern": "^projects/[^/]+/locations/[^/]+/reasoningEngines/[^/]+/operations/[^/]+$", "required": true, "type": "string" }, @@ -13848,244 +15506,6 @@ } } }, - "publishers": { - "resources": { - "models": { - "methods": { - "computeTokens": { - "description": "Return a list of tokens based on the input text.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/publishers/{publishersId}/models/{modelsId}:computeTokens", - "httpMethod": "POST", - "id": "aiplatform.projects.locations.publishers.models.computeTokens", - "parameterOrder": [ - "endpoint" - ], - "parameters": { - "endpoint": { - "description": "Required. The name of the Endpoint requested to get lists of tokens and token ids.", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/publishers/[^/]+/models/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v1/{+endpoint}:computeTokens", - "request": { - "$ref": "GoogleCloudAiplatformV1ComputeTokensRequest" - }, - "response": { - "$ref": "GoogleCloudAiplatformV1ComputeTokensResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] - }, - "countTokens": { - "description": "Perform a token counting.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/publishers/{publishersId}/models/{modelsId}:countTokens", - "httpMethod": "POST", - "id": "aiplatform.projects.locations.publishers.models.countTokens", - "parameterOrder": [ - "endpoint" - ], - "parameters": { - "endpoint": { - "description": "Required. The name of the Endpoint requested to perform token counting. Format: `projects/{project}/locations/{location}/endpoints/{endpoint}`", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/publishers/[^/]+/models/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v1/{+endpoint}:countTokens", - "request": { - "$ref": "GoogleCloudAiplatformV1CountTokensRequest" - }, - "response": { - "$ref": "GoogleCloudAiplatformV1CountTokensResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] - }, - "generateContent": { - "description": "Generate content with multimodal inputs.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/publishers/{publishersId}/models/{modelsId}:generateContent", - "httpMethod": "POST", - "id": "aiplatform.projects.locations.publishers.models.generateContent", - "parameterOrder": [ - "model" - ], - "parameters": { - "model": { - "description": "Required. The fully qualified name of the publisher model or tuned model endpoint to use. Publisher model format: `projects/{project}/locations/{location}/publishers/*/models/*` Tuned model endpoint format: `projects/{project}/locations/{location}/endpoints/{endpoint}`", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/publishers/[^/]+/models/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v1/{+model}:generateContent", - "request": { - "$ref": "GoogleCloudAiplatformV1GenerateContentRequest" - }, - "response": { - "$ref": "GoogleCloudAiplatformV1GenerateContentResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only" - ] - }, - "predict": { - "description": "Perform an online prediction.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/publishers/{publishersId}/models/{modelsId}:predict", - "httpMethod": "POST", - "id": "aiplatform.projects.locations.publishers.models.predict", - "parameterOrder": [ - "endpoint" - ], - "parameters": { - "endpoint": { - "description": "Required. The name of the Endpoint requested to serve the prediction. Format: `projects/{project}/locations/{location}/endpoints/{endpoint}`", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/publishers/[^/]+/models/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v1/{+endpoint}:predict", - "request": { - "$ref": "GoogleCloudAiplatformV1PredictRequest" - }, - "response": { - "$ref": "GoogleCloudAiplatformV1PredictResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only" - ] - }, - "rawPredict": { - "description": "Perform an online prediction with an arbitrary HTTP payload. The response includes the following HTTP headers: * `X-Vertex-AI-Endpoint-Id`: ID of the Endpoint that served this prediction. * `X-Vertex-AI-Deployed-Model-Id`: ID of the Endpoint's DeployedModel that served this prediction.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/publishers/{publishersId}/models/{modelsId}:rawPredict", - "httpMethod": "POST", - "id": "aiplatform.projects.locations.publishers.models.rawPredict", - "parameterOrder": [ - "endpoint" - ], - "parameters": { - "endpoint": { - "description": "Required. The name of the Endpoint requested to serve the prediction. Format: `projects/{project}/locations/{location}/endpoints/{endpoint}`", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/publishers/[^/]+/models/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v1/{+endpoint}:rawPredict", - "request": { - "$ref": "GoogleCloudAiplatformV1RawPredictRequest" - }, - "response": { - "$ref": "GoogleApiHttpBody" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only" - ] - }, - "serverStreamingPredict": { - "description": "Perform a server-side streaming online prediction request for Vertex LLM streaming.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/publishers/{publishersId}/models/{modelsId}:serverStreamingPredict", - "httpMethod": "POST", - "id": "aiplatform.projects.locations.publishers.models.serverStreamingPredict", - "parameterOrder": [ - "endpoint" - ], - "parameters": { - "endpoint": { - "description": "Required. The name of the Endpoint requested to serve the prediction. Format: `projects/{project}/locations/{location}/endpoints/{endpoint}`", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/publishers/[^/]+/models/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v1/{+endpoint}:serverStreamingPredict", - "request": { - "$ref": "GoogleCloudAiplatformV1StreamingPredictRequest" - }, - "response": { - "$ref": "GoogleCloudAiplatformV1StreamingPredictResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only" - ] - }, - "streamGenerateContent": { - "description": "Generate content with multimodal inputs with streaming support.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/publishers/{publishersId}/models/{modelsId}:streamGenerateContent", - "httpMethod": "POST", - "id": "aiplatform.projects.locations.publishers.models.streamGenerateContent", - "parameterOrder": [ - "model" - ], - "parameters": { - "model": { - "description": "Required. The fully qualified name of the publisher model or tuned model endpoint to use. Publisher model format: `projects/{project}/locations/{location}/publishers/*/models/*` Tuned model endpoint format: `projects/{project}/locations/{location}/endpoints/{endpoint}`", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/publishers/[^/]+/models/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v1/{+model}:streamGenerateContent", - "request": { - "$ref": "GoogleCloudAiplatformV1GenerateContentRequest" - }, - "response": { - "$ref": "GoogleCloudAiplatformV1GenerateContentResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only" - ] - }, - "streamRawPredict": { - "description": "Perform a streaming online prediction with an arbitrary HTTP payload.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/publishers/{publishersId}/models/{modelsId}:streamRawPredict", - "httpMethod": "POST", - "id": "aiplatform.projects.locations.publishers.models.streamRawPredict", - "parameterOrder": [ - "endpoint" - ], - "parameters": { - "endpoint": { - "description": "Required. The name of the Endpoint requested to serve the prediction. Format: `projects/{project}/locations/{location}/endpoints/{endpoint}`", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/publishers/[^/]+/models/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v1/{+endpoint}:streamRawPredict", - "request": { - "$ref": "GoogleCloudAiplatformV1StreamRawPredictRequest" - }, - "response": { - "$ref": "GoogleApiHttpBody" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only" - ] - } - } - } - } - }, "schedules": { "methods": { "create": { @@ -14275,7 +15695,7 @@ ] }, "resume": { - "description": "Resumes a paused Schedule to start scheduling new runs. Will mark Schedule.state to 'ACTIVE'. Only paused Schedule can be resumed. When the Schedule is resumed, new runs will be scheduled starting from the next execution time after the current time based on the time_specification in the Schedule. If Schedule.catchUp is set up true, all missed runs will be scheduled for backfill first.", + "description": "Resumes a paused Schedule to start scheduling new runs. Will mark Schedule.state to 'ACTIVE'. Only paused Schedule can be resumed. When the Schedule is resumed, new runs will be scheduled starting from the next execution time after the current time based on the time_specification in the Schedule. If Schedule.catch_up is set up true, all missed runs will be scheduled for backfill first.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/schedules/{schedulesId}:resume", "httpMethod": "POST", "id": "aiplatform.projects.locations.schedules.resume", @@ -14307,7 +15727,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/schedules/{schedulesId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.schedules.operations.cancel", @@ -14623,7 +16043,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/specialistPools/{specialistPoolsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.specialistPools.operations.cancel", @@ -14922,7 +16342,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/studies/{studiesId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.studies.operations.cancel", @@ -15359,7 +16779,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/studies/{studiesId}/trials/{trialsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.studies.trials.operations.cancel", @@ -15996,7 +17416,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.tensorboards.experiments.operations.cancel", @@ -16378,7 +17798,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.tensorboards.experiments.runs.operations.cancel", @@ -16801,7 +18221,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}/timeSeries/{timeSeriesId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.tensorboards.experiments.runs.timeSeries.operations.cancel", @@ -16958,7 +18378,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.tensorboards.operations.cancel", @@ -17268,7 +18688,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/trainingPipelines/{trainingPipelinesId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.trainingPipelines.operations.cancel", @@ -17522,7 +18942,7 @@ "type": "integer" }, "pageToken": { - "description": "Optional. The standard list page token. Typically obtained via ListTuningJob.next_page_token of the previous GenAiTuningService.ListTuningJob][] call.", + "description": "Optional. The standard list page token. Typically obtained via ListTuningJobsResponse.next_page_token of the previous GenAiTuningService.ListTuningJob][] call.", "location": "query", "type": "string" }, @@ -17575,7 +18995,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/tuningJobs/{tuningJobsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "aiplatform.projects.locations.tuningJobs.operations.cancel", @@ -18172,6 +19592,14 @@ "description": "Video", "id": "CloudAiLargeModelsVisionVideo", "properties": { + "encodedVideo": { + "description": "Base 64 encoded video bytes.", + "type": "string" + }, + "encoding": { + "description": "Video encoding, for example \"video/mp4\".", + "type": "string" + }, "uri": { "description": "Path to another storage (typically Google Cloud Storage).", "type": "string" @@ -18463,6 +19891,28 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1ApiAuth": { + "description": "The generic reusable api auth config.", + "id": "GoogleCloudAiplatformV1ApiAuth", + "properties": { + "apiKeyConfig": { + "$ref": "GoogleCloudAiplatformV1ApiAuthApiKeyConfig", + "description": "The API secret." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1ApiAuthApiKeyConfig": { + "description": "The API secret.", + "id": "GoogleCloudAiplatformV1ApiAuthApiKeyConfig", + "properties": { + "apiKeySecretVersion": { + "description": "Required. The SecretManager secret version resource name storing API key. e.g. projects/{project}/secrets/{secret}/versions/{version}", + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1Artifact": { "description": "Instance of a general artifact.", "id": "GoogleCloudAiplatformV1Artifact", @@ -18623,6 +20073,64 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1AugmentPromptRequest": { + "description": "Request message for AugmentPrompt.", + "id": "GoogleCloudAiplatformV1AugmentPromptRequest", + "properties": { + "contents": { + "description": "Optional. Input content to augment, only text format is supported for now.", + "items": { + "$ref": "GoogleCloudAiplatformV1Content" + }, + "type": "array" + }, + "model": { + "$ref": "GoogleCloudAiplatformV1AugmentPromptRequestModel", + "description": "Optional. Metadata of the backend deployed model." + }, + "vertexRagStore": { + "$ref": "GoogleCloudAiplatformV1VertexRagStore", + "description": "Optional. Retrieves contexts from the Vertex RagStore." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1AugmentPromptRequestModel": { + "description": "Metadata of the backend deployed model.", + "id": "GoogleCloudAiplatformV1AugmentPromptRequestModel", + "properties": { + "model": { + "description": "Optional. The model that the user will send the augmented prompt for content generation.", + "type": "string" + }, + "modelVersion": { + "description": "Optional. The model version of the backend deployed model.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1AugmentPromptResponse": { + "description": "Response message for AugmentPrompt.", + "id": "GoogleCloudAiplatformV1AugmentPromptResponse", + "properties": { + "augmentedPrompt": { + "description": "Augmented prompt, only text format is supported for now.", + "items": { + "$ref": "GoogleCloudAiplatformV1Content" + }, + "type": "array" + }, + "facts": { + "description": "Retrieved facts from RAG data sources.", + "items": { + "$ref": "GoogleCloudAiplatformV1Fact" + }, + "type": "array" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1AutomaticResources": { "description": "A description of resources that to large degree are decided by Vertex AI, and require only a modest additional configuration. Each Model supporting these resources documents its specific guidelines.", "id": "GoogleCloudAiplatformV1AutomaticResources", @@ -18903,7 +20411,7 @@ }, "request": { "$ref": "GoogleCloudAiplatformV1MigrateResourceRequest", - "description": "It's the same as the value in MigrateResourceRequest.migrate_resource_requests." + "description": "It's the same as the value in BatchMigrateResourcesRequest.migrate_resource_requests." } }, "type": "object" @@ -19438,6 +20946,121 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1CacheConfig": { + "description": "Config of GenAI caching features. This is a singleton resource.", + "id": "GoogleCloudAiplatformV1CacheConfig", + "properties": { + "disableCache": { + "description": "If set to true, disables GenAI caching. Otherwise caching is enabled.", + "type": "boolean" + }, + "name": { + "description": "Identifier. Name of the cache config. Format: - `projects/{project}/cacheConfig`.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1CachedContent": { + "description": "A resource used in LLM queries for users to explicitly specify what to cache and how to cache.", + "id": "GoogleCloudAiplatformV1CachedContent", + "properties": { + "contents": { + "description": "Optional. Input only. Immutable. The content to cache", + "items": { + "$ref": "GoogleCloudAiplatformV1Content" + }, + "type": "array" + }, + "createTime": { + "description": "Output only. Creatation time of the cache entry.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "displayName": { + "description": "Optional. Immutable. The user-generated meaningful display name of the cached content.", + "type": "string" + }, + "expireTime": { + "description": "Timestamp of when this resource is considered expired. This is *always* provided on output, regardless of what was sent on input.", + "format": "google-datetime", + "type": "string" + }, + "model": { + "description": "Immutable. The name of the publisher model to use for cached content. Format: projects/{project}/locations/{location}/publishers/{publisher}/models/{model}", + "type": "string" + }, + "name": { + "description": "Immutable. Identifier. The server-generated resource name of the cached content Format: projects/{project}/locations/{location}/cachedContents/{cached_content}", + "type": "string" + }, + "systemInstruction": { + "$ref": "GoogleCloudAiplatformV1Content", + "description": "Optional. Input only. Immutable. Developer set system instruction. Currently, text only" + }, + "toolConfig": { + "$ref": "GoogleCloudAiplatformV1ToolConfig", + "description": "Optional. Input only. Immutable. Tool config. This config is shared for all tools" + }, + "tools": { + "description": "Optional. Input only. Immutable. A list of `Tools` the model may use to generate the next response", + "items": { + "$ref": "GoogleCloudAiplatformV1Tool" + }, + "type": "array" + }, + "ttl": { + "description": "Input only. The TTL for this resource. The expiration time is computed: now + TTL.", + "format": "google-duration", + "type": "string" + }, + "updateTime": { + "description": "Output only. When the cache entry was last updated in UTC time.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "usageMetadata": { + "$ref": "GoogleCloudAiplatformV1CachedContentUsageMetadata", + "description": "Output only. Metadata on the usage of the cached content.", + "readOnly": true + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1CachedContentUsageMetadata": { + "description": "Metadata on the usage of the cached content.", + "id": "GoogleCloudAiplatformV1CachedContentUsageMetadata", + "properties": { + "audioDurationSeconds": { + "description": "Duration of audio in seconds.", + "format": "int32", + "type": "integer" + }, + "imageCount": { + "description": "Number of images.", + "format": "int32", + "type": "integer" + }, + "textCount": { + "description": "Number of text characters.", + "format": "int32", + "type": "integer" + }, + "totalTokenCount": { + "description": "Total number of tokens that the cached content consumes.", + "format": "int32", + "type": "integer" + }, + "videoDurationSeconds": { + "description": "Duration of video in seconds.", + "format": "int32", + "type": "integer" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1CancelBatchPredictionJobRequest": { "description": "Request message for JobService.CancelBatchPredictionJob.", "id": "GoogleCloudAiplatformV1CancelBatchPredictionJobRequest", @@ -19530,7 +21153,7 @@ "Token generation reached a natural stopping point or a configured stop sequence.", "Token generation reached the configured maximum output tokens.", "Token generation stopped because the content potentially contains safety violations. NOTE: When streaming, content is empty if content filters blocks the output.", - "Token generation stopped because the content potentially contains copyright violations.", + "The token generation stopped because of potential recitation.", "All other reasons that stopped the token generation.", "Token generation stopped because the content contains forbidden terms.", "Token generation stopped for potentially containing prohibited content.", @@ -19657,6 +21280,48 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1Claim": { + "description": "Claim that is extracted from the input text and facts that support it.", + "id": "GoogleCloudAiplatformV1Claim", + "properties": { + "endIndex": { + "description": "Index in the input text where the claim ends (exclusive).", + "format": "int32", + "type": "integer" + }, + "factIndexes": { + "description": "Indexes of the facts supporting this claim.", + "items": { + "format": "int32", + "type": "integer" + }, + "type": "array" + }, + "score": { + "description": "Confidence score of this corroboration.", + "format": "float", + "type": "number" + }, + "startIndex": { + "description": "Index in the input text where the claim starts (inclusive).", + "format": "int32", + "type": "integer" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1ClientConnectionConfig": { + "description": "Configurations (e.g. inference timeout) that are applied on your endpoints.", + "id": "GoogleCloudAiplatformV1ClientConnectionConfig", + "properties": { + "inferenceTimeout": { + "description": "Customizable online prediction request timeout.", + "format": "google-duration", + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1CoherenceInput": { "description": "Input for coherence metric.", "id": "GoogleCloudAiplatformV1CoherenceInput", @@ -19719,6 +21384,80 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1CometInput": { + "description": "Input for Comet metric.", + "id": "GoogleCloudAiplatformV1CometInput", + "properties": { + "instance": { + "$ref": "GoogleCloudAiplatformV1CometInstance", + "description": "Required. Comet instance." + }, + "metricSpec": { + "$ref": "GoogleCloudAiplatformV1CometSpec", + "description": "Required. Spec for comet metric." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1CometInstance": { + "description": "Spec for Comet instance - The fields used for evaluation are dependent on the comet version.", + "id": "GoogleCloudAiplatformV1CometInstance", + "properties": { + "prediction": { + "description": "Required. Output of the evaluated model.", + "type": "string" + }, + "reference": { + "description": "Optional. Ground truth used to compare against the prediction.", + "type": "string" + }, + "source": { + "description": "Optional. Source text in original language.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1CometResult": { + "description": "Spec for Comet result - calculates the comet score for the given instance using the version specified in the spec.", + "id": "GoogleCloudAiplatformV1CometResult", + "properties": { + "score": { + "description": "Output only. Comet score. Range depends on version.", + "format": "float", + "readOnly": true, + "type": "number" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1CometSpec": { + "description": "Spec for Comet metric.", + "id": "GoogleCloudAiplatformV1CometSpec", + "properties": { + "sourceLanguage": { + "description": "Optional. Source language in BCP-47 format.", + "type": "string" + }, + "targetLanguage": { + "description": "Optional. Target language in BCP-47 format. Covers both prediction and reference.", + "type": "string" + }, + "version": { + "description": "Required. Which version to use for evaluation.", + "enum": [ + "COMET_VERSION_UNSPECIFIED", + "COMET_22_SRC_REF" + ], + "enumDescriptions": [ + "Comet version unspecified.", + "Comet 22 for translation + source + reference (source-reference-combined)." + ], + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1CompleteTrialRequest": { "description": "Request message for VizierService.CompleteTrial.", "id": "GoogleCloudAiplatformV1CompleteTrialRequest", @@ -19985,6 +21724,88 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1CorpusStatus": { + "description": "RagCorpus status.", + "id": "GoogleCloudAiplatformV1CorpusStatus", + "properties": { + "errorStatus": { + "description": "Output only. Only when the `state` field is ERROR.", + "readOnly": true, + "type": "string" + }, + "state": { + "description": "Output only. RagCorpus life state.", + "enum": [ + "UNKNOWN", + "INITIALIZED", + "ACTIVE", + "ERROR" + ], + "enumDescriptions": [ + "This state is not supposed to happen.", + "RagCorpus resource entry is initialized, but hasn't done validation.", + "RagCorpus is provisioned successfully and is ready to serve.", + "RagCorpus is in a problematic situation. See `error_message` field for details." + ], + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1CorroborateContentRequest": { + "description": "Request message for CorroborateContent.", + "id": "GoogleCloudAiplatformV1CorroborateContentRequest", + "properties": { + "content": { + "$ref": "GoogleCloudAiplatformV1Content", + "description": "Optional. Input content to corroborate, only text format is supported for now." + }, + "facts": { + "description": "Optional. Facts used to generate the text can also be used to corroborate the text.", + "items": { + "$ref": "GoogleCloudAiplatformV1Fact" + }, + "type": "array" + }, + "parameters": { + "$ref": "GoogleCloudAiplatformV1CorroborateContentRequestParameters", + "description": "Optional. Parameters that can be set to override default settings per request." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1CorroborateContentRequestParameters": { + "description": "Parameters that can be overrided per request.", + "id": "GoogleCloudAiplatformV1CorroborateContentRequestParameters", + "properties": { + "citationThreshold": { + "description": "Optional. Only return claims with citation score larger than the threshold.", + "format": "double", + "type": "number" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1CorroborateContentResponse": { + "description": "Response message for CorroborateContent.", + "id": "GoogleCloudAiplatformV1CorroborateContentResponse", + "properties": { + "claims": { + "description": "Claims that are extracted from the input content and facts that support the claims.", + "items": { + "$ref": "GoogleCloudAiplatformV1Claim" + }, + "type": "array" + }, + "corroborationScore": { + "description": "Confidence score of corroborating content. Value is [0,1] with 1 is the most confidence.", + "format": "float", + "type": "number" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1CountTokensRequest": { "description": "Request message for PredictionService.CountTokens.", "id": "GoogleCloudAiplatformV1CountTokensRequest", @@ -20549,10 +22370,6 @@ "description": "The ID of the location to store protected artifacts. e.g. us-central1. Populate only when the location is different than CustomJob location. List of supported locations: https://cloud.google.com/vertex-ai/docs/general/locations", "type": "string" }, - "pscInterfaceConfig": { - "$ref": "GoogleCloudAiplatformV1PscInterfaceConfig", - "description": "Optional. Configuration for PSC-I for CustomJob." - }, "reservedIpRanges": { "description": "Optional. A list of names for the reserved ip ranges under the VPC network that can be used for this job. If set, we will deploy the job within the provided ip ranges. Otherwise, the job will be deployed to any ip ranges under the provided VPC network. Example: ['vertex-ai-ip-range'].", "items": { @@ -20952,6 +22769,11 @@ "format": "int32", "type": "integer" }, + "requiredReplicaCount": { + "description": "Optional. Number of required available replicas for the deployment to succeed. This field is only needed when partial model deployment/mutation is desired. If set, the model deploy/mutate operation will succeed once available_replica_count reaches required_replica_count, and the rest of the replicas will be retried. If not set, the default required_replica_count will be min_replica_count.", + "format": "int32", + "type": "integer" + }, "spot": { "description": "Optional. If true, schedule the deployment workload on [spot VMs](https://cloud.google.com/kubernetes-engine/docs/concepts/spot-vms).", "type": "boolean" @@ -21329,6 +23151,10 @@ "$ref": "GoogleCloudAiplatformV1ExplanationSpec", "description": "Explanation configuration for this DeployedModel. When deploying a Model using EndpointService.DeployModel, this value overrides the value of Model.explanation_spec. All fields of explanation_spec are optional in the request. If a field of explanation_spec is not populated, the value of the same field of Model.explanation_spec is inherited. If the corresponding Model.explanation_spec is not populated, all fields of the explanation_spec will be used for the explanation configuration." }, + "fasterDeploymentConfig": { + "$ref": "GoogleCloudAiplatformV1FasterDeploymentConfig", + "description": "Configuration for faster model deployment." + }, "id": { "description": "Immutable. The ID of the DeployedModel. If not provided upon deployment, Vertex AI will generate a value for this ID. This value should be 1-10 characters, and valid characters are `/[0-9]/`.", "type": "string" @@ -21354,6 +23180,18 @@ "sharedResources": { "description": "The resource name of the shared DeploymentResourcePool to deploy on. Format: `projects/{project}/locations/{location}/deploymentResourcePools/{deployment_resource_pool}`", "type": "string" + }, + "status": { + "$ref": "GoogleCloudAiplatformV1DeployedModelStatus", + "description": "Output only. Runtime status of the deployed model.", + "readOnly": true + }, + "systemLabels": { + "additionalProperties": { + "type": "string" + }, + "description": "System labels to apply to Model Garden deployments. System labels are managed by Google for internal use only.", + "type": "object" } }, "type": "object" @@ -21373,6 +23211,30 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1DeployedModelStatus": { + "description": "Runtime status of the deployed model.", + "id": "GoogleCloudAiplatformV1DeployedModelStatus", + "properties": { + "availableReplicaCount": { + "description": "Output only. The number of available replicas of the deployed model.", + "format": "int32", + "readOnly": true, + "type": "integer" + }, + "lastUpdateTime": { + "description": "Output only. The time at which the status was last updated.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "message": { + "description": "Output only. The latest deployed model's status message (if any).", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1DeploymentResourcePool": { "description": "A description of resources that can be shared by multiple DeployedModels, whose underlying specification consists of a DedicatedResources.", "id": "GoogleCloudAiplatformV1DeploymentResourcePool", @@ -21494,6 +23356,12 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1DirectUploadSource": { + "description": "The input content is encapsulated and uploaded in the request.", + "id": "GoogleCloudAiplatformV1DirectUploadSource", + "properties": {}, + "type": "object" + }, "GoogleCloudAiplatformV1DiskSpec": { "description": "Represents the spec of disk options.", "id": "GoogleCloudAiplatformV1DiskSpec", @@ -21564,6 +23432,10 @@ "description": "Models are deployed into it, and afterwards Endpoint is called to obtain predictions and explanations.", "id": "GoogleCloudAiplatformV1Endpoint", "properties": { + "clientConnectionConfig": { + "$ref": "GoogleCloudAiplatformV1ClientConnectionConfig", + "description": "Configurations that are applied to the endpoint for online prediction." + }, "createTime": { "description": "Output only. Timestamp when this Endpoint was created.", "format": "google-datetime", @@ -21819,6 +23691,10 @@ "$ref": "GoogleCloudAiplatformV1CoherenceInput", "description": "Input for coherence metric." }, + "cometInput": { + "$ref": "GoogleCloudAiplatformV1CometInput", + "description": "Translation metrics. Input for Comet metric." + }, "exactMatchInput": { "$ref": "GoogleCloudAiplatformV1ExactMatchInput", "description": "Auto metric instances. Instances and metric spec for exact match metric." @@ -21835,6 +23711,10 @@ "$ref": "GoogleCloudAiplatformV1GroundednessInput", "description": "Input for groundedness metric." }, + "metricxInput": { + "$ref": "GoogleCloudAiplatformV1MetricxInput", + "description": "Input for Metricx metric." + }, "pairwiseMetricInput": { "$ref": "GoogleCloudAiplatformV1PairwiseMetricInput", "description": "Input for pairwise metric." @@ -21902,6 +23782,30 @@ "toolParameterKvMatchInput": { "$ref": "GoogleCloudAiplatformV1ToolParameterKVMatchInput", "description": "Input for tool parameter key value match metric." + }, + "trajectoryAnyOrderMatchInput": { + "$ref": "GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput", + "description": "Input for trajectory match any order metric." + }, + "trajectoryExactMatchInput": { + "$ref": "GoogleCloudAiplatformV1TrajectoryExactMatchInput", + "description": "Input for trajectory exact match metric." + }, + "trajectoryInOrderMatchInput": { + "$ref": "GoogleCloudAiplatformV1TrajectoryInOrderMatchInput", + "description": "Input for trajectory in order match metric." + }, + "trajectoryPrecisionInput": { + "$ref": "GoogleCloudAiplatformV1TrajectoryPrecisionInput", + "description": "Input for trajectory precision metric." + }, + "trajectoryRecallInput": { + "$ref": "GoogleCloudAiplatformV1TrajectoryRecallInput", + "description": "Input for trajectory recall metric." + }, + "trajectorySingleToolUseInput": { + "$ref": "GoogleCloudAiplatformV1TrajectorySingleToolUseInput", + "description": "Input for trajectory single tool use metric." } }, "type": "object" @@ -21918,6 +23822,10 @@ "$ref": "GoogleCloudAiplatformV1CoherenceResult", "description": "Result for coherence metric." }, + "cometResult": { + "$ref": "GoogleCloudAiplatformV1CometResult", + "description": "Translation metrics. Result for Comet metric." + }, "exactMatchResults": { "$ref": "GoogleCloudAiplatformV1ExactMatchResults", "description": "Auto metric evaluation results. Results for exact match metric." @@ -21934,6 +23842,10 @@ "$ref": "GoogleCloudAiplatformV1GroundednessResult", "description": "Result for groundedness metric." }, + "metricxResult": { + "$ref": "GoogleCloudAiplatformV1MetricxResult", + "description": "Result for Metricx metric." + }, "pairwiseMetricResult": { "$ref": "GoogleCloudAiplatformV1PairwiseMetricResult", "description": "Result for pairwise metric." @@ -22001,6 +23913,30 @@ "toolParameterKvMatchResults": { "$ref": "GoogleCloudAiplatformV1ToolParameterKVMatchResults", "description": "Results for tool parameter key value match metric." + }, + "trajectoryAnyOrderMatchResults": { + "$ref": "GoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults", + "description": "Result for trajectory any order match metric." + }, + "trajectoryExactMatchResults": { + "$ref": "GoogleCloudAiplatformV1TrajectoryExactMatchResults", + "description": "Result for trajectory exact match metric." + }, + "trajectoryInOrderMatchResults": { + "$ref": "GoogleCloudAiplatformV1TrajectoryInOrderMatchResults", + "description": "Result for trajectory in order match metric." + }, + "trajectoryPrecisionResults": { + "$ref": "GoogleCloudAiplatformV1TrajectoryPrecisionResults", + "description": "Result for trajectory precision metric." + }, + "trajectoryRecallResults": { + "$ref": "GoogleCloudAiplatformV1TrajectoryRecallResults", + "description": "Results for trajectory recall metric." + }, + "trajectorySingleToolUseResults": { + "$ref": "GoogleCloudAiplatformV1TrajectorySingleToolUseResults", + "description": "Results for trajectory single tool use metric." } }, "type": "object" @@ -22809,7 +24745,7 @@ "id": "GoogleCloudAiplatformV1ExportDataConfig", "properties": { "annotationSchemaUri": { - "description": "The Cloud Storage URI that points to a YAML file describing the annotation schema. The schema is defined as an OpenAPI 3.0.2 [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). The schema files that can be used here are found in gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the chosen schema must be consistent with metadata of the Dataset specified by dataset_id. Only used for custom training data export use cases. Only applicable to Datasets that have DataItems and Annotations. Only Annotations that both match this schema and belong to DataItems not ignored by the split method are used in respectively training, validation or test role, depending on the role of the DataItem they are on. When used in conjunction with annotations_filter, the Annotations used for training are filtered by both annotations_filter and annotation_schema_uri.", + "description": "The Cloud Storage URI that points to a YAML file describing the annotation schema. The schema is defined as an OpenAPI 3.0.2 [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). The schema files that can be used here are found in gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the chosen schema must be consistent with metadata of the Dataset specified by ExportDataRequest.name. Only used for custom training data export use cases. Only applicable to Datasets that have DataItems and Annotations. Only Annotations that both match this schema and belong to DataItems not ignored by the split method are used in respectively training, validation or test role, depending on the role of the DataItem they are on. When used in conjunction with annotations_filter, the Annotations used for training are filtered by both annotations_filter and annotation_schema_uri.", "type": "string" }, "annotationsFilter": { @@ -22841,7 +24777,7 @@ "description": "The Google Cloud Storage location where the output is to be written to. In the given directory a new directory will be created with name: `export-data--` where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. All export output will be written into that directory. Inside that directory, annotations with the same schema will be grouped into sub directories which are named with the corresponding annotations' schema title. Inside these sub directories, a schema.yaml will be created to describe the output format." }, "savedQueryId": { - "description": "The ID of a SavedQuery (annotation set) under the Dataset specified by dataset_id used for filtering Annotations for training. Only used for custom training data export use cases. Only applicable to Datasets that have SavedQueries. Only Annotations that are associated with this SavedQuery are used in respectively training. When used in conjunction with annotations_filter, the Annotations used for training are filtered by both saved_query_id and annotations_filter. Only one of saved_query_id and annotation_schema_uri should be specified as both of them represent the same thing: problem type.", + "description": "The ID of a SavedQuery (annotation set) under the Dataset specified by ExportDataRequest.name used for filtering Annotations for training. Only used for custom training data export use cases. Only applicable to Datasets that have SavedQueries. Only Annotations that are associated with this SavedQuery are used in respectively training. When used in conjunction with annotations_filter, the Annotations used for training are filtered by both saved_query_id and annotations_filter. Only one of saved_query_id and annotation_schema_uri should be specified as both of them represent the same thing: problem type.", "type": "string" } }, @@ -23124,6 +25060,51 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1Fact": { + "description": "The fact used in grounding.", + "id": "GoogleCloudAiplatformV1Fact", + "properties": { + "query": { + "description": "Query that is used to retrieve this fact.", + "type": "string" + }, + "score": { + "description": "If present, according to the underlying Vector DB and the selected metric type, the score can be either the distance or the similarity between the query and the fact and its range depends on the metric type. For example, if the metric type is COSINE_DISTANCE, it represents the distance between the query and the fact. The larger the distance, the less relevant the fact is to the query. The range is [0, 2], while 0 means the most relevant and 2 means the least relevant.", + "format": "double", + "type": "number" + }, + "summary": { + "description": "If present, the summary/snippet of the fact.", + "type": "string" + }, + "title": { + "description": "If present, it refers to the title of this fact.", + "type": "string" + }, + "uri": { + "description": "If present, this uri links to the source of the fact.", + "type": "string" + }, + "vectorDistance": { + "deprecated": true, + "description": "If present, the distance between the query vector and this fact vector.", + "format": "double", + "type": "number" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1FasterDeploymentConfig": { + "description": "Configuration for faster model deployment.", + "id": "GoogleCloudAiplatformV1FasterDeploymentConfig", + "properties": { + "fastTryoutEnabled": { + "description": "If true, enable fast tryout feature for this deployed model.", + "type": "boolean" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1Feature": { "description": "Feature Metadata information. For example, color is a feature that describes an apple.", "id": "GoogleCloudAiplatformV1Feature", @@ -23671,6 +25652,10 @@ "description": "Identifier. Name of the FeatureView. Format: `projects/{project}/locations/{location}/featureOnlineStores/{feature_online_store}/featureViews/{feature_view}`", "type": "string" }, + "optimizedConfig": { + "$ref": "GoogleCloudAiplatformV1FeatureViewOptimizedConfig", + "description": "Optional. Configuration for FeatureView created under Optimized FeatureOnlineStore." + }, "satisfiesPzi": { "description": "Output only. Reserved for future use.", "readOnly": true, @@ -23850,6 +25835,17 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1FeatureViewOptimizedConfig": { + "description": "Configuration for FeatureViews created in Optimized FeatureOnlineStore.", + "id": "GoogleCloudAiplatformV1FeatureViewOptimizedConfig", + "properties": { + "automaticResources": { + "$ref": "GoogleCloudAiplatformV1AutomaticResources", + "description": "Optional. A description of resources that the FeatureView uses, which to large degree are decided by Vertex AI, and optionally allows only a modest additional configuration. If min_replica_count is not set, the default value is 2. If max_replica_count is not set, the default value is 6. The max allowed replica count is 1000." + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1FeatureViewSync": { "description": "FeatureViewSync is a representation of sync operation which copies data from data source to Feature View in Online Store.", "id": "GoogleCloudAiplatformV1FeatureViewSync", @@ -24230,6 +26226,17 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1FetchPredictOperationRequest": { + "description": "Request message for PredictionService.FetchPredictOperation.", + "id": "GoogleCloudAiplatformV1FetchPredictOperationRequest", + "properties": { + "operationName": { + "description": "Required. The server-assigned name for the operation.", + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1FileData": { "description": "URI based data.", "id": "GoogleCloudAiplatformV1FileData", @@ -24245,6 +26252,33 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1FileStatus": { + "description": "RagFile status.", + "id": "GoogleCloudAiplatformV1FileStatus", + "properties": { + "errorStatus": { + "description": "Output only. Only when the `state` field is ERROR.", + "readOnly": true, + "type": "string" + }, + "state": { + "description": "Output only. RagFile state.", + "enum": [ + "STATE_UNSPECIFIED", + "ACTIVE", + "ERROR" + ], + "enumDescriptions": [ + "RagFile state is unspecified.", + "RagFile resource has been created and indexed successfully.", + "RagFile resource is in a problematic state. See `error_message` field for details." + ], + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1FilterSplit": { "description": "Assigns input data to training, validation, and test sets based on the given filters, data pieces not matched by any filter are ignored. Currently only supported for Datasets containing DataItems. If any of the filters in this message are to match nothing, then they can be set as '-' (the minus sign). Supported only for unstructured Datasets. ", "id": "GoogleCloudAiplatformV1FilterSplit", @@ -24586,7 +26620,7 @@ "type": "object" }, "GoogleCloudAiplatformV1FunctionDeclaration": { - "description": "Structured representation of a function declaration as defined by the [OpenAPI 3.0 specification](https://spec.openapis.org/oas/v3.0.3). Included in this declaration are the function name and parameters. This FunctionDeclaration is a representation of a block of code that can be used as a `Tool` by the model and executed by the client.", + "description": "Structured representation of a function declaration as defined by the [OpenAPI 3.0 specification](https://spec.openapis.org/oas/v3.0.3). Included in this declaration are the function name, description, parameters and response type. This FunctionDeclaration is a representation of a block of code that can be used as a `Tool` by the model and executed by the client.", "id": "GoogleCloudAiplatformV1FunctionDeclaration", "properties": { "description": { @@ -24656,6 +26690,10 @@ "description": "Request message for [PredictionService.GenerateContent].", "id": "GoogleCloudAiplatformV1GenerateContentRequest", "properties": { + "cachedContent": { + "description": "Optional. The name of the cached content used as context to serve the prediction. Note: only used in explicit caching, where users can have control over caching (e.g. what content to cache) and enjoy guaranteed cost savings. Format: `projects/{project}/locations/{location}/cachedContents/{cachedContent}`", + "type": "string" + }, "contents": { "description": "Required. The content of the current conversation with the model. For single-turn queries, this is a single instance. For multi-turn queries, this is a repeated field that contains conversation history + latest request.", "items": { @@ -24771,6 +26809,12 @@ "description": "Usage metadata about response(s).", "id": "GoogleCloudAiplatformV1GenerateContentResponseUsageMetadata", "properties": { + "cachedContentTokenCount": { + "description": "Output only. Number of tokens in the cached part in the input (the cached content).", + "format": "int32", + "readOnly": true, + "type": "integer" + }, "candidatesTokenCount": { "description": "Number of tokens in the response(s).", "format": "int32", @@ -24830,6 +26874,25 @@ "description": "Optional. Output response mimetype of the generated candidate text. Supported mimetype: - `text/plain`: (default) Text output. - `application/json`: JSON response in the candidates. The model needs to be prompted to output the appropriate response type, otherwise the behavior is undefined. This is a preview feature.", "type": "string" }, + "responseModalities": { + "description": "Optional. The modalities of the response.", + "items": { + "enum": [ + "MODALITY_UNSPECIFIED", + "TEXT", + "IMAGE", + "AUDIO" + ], + "enumDescriptions": [ + "Unspecified modality. Will be processed as text.", + "Text modality.", + "Image modality.", + "Audio modality." + ], + "type": "string" + }, + "type": "array" + }, "responseSchema": { "$ref": "GoogleCloudAiplatformV1Schema", "description": "Optional. The `Schema` object allows the definition of input and output data types. These types can be objects, but also primitives and arrays. Represents a select subset of an [OpenAPI 3.0 schema object](https://spec.openapis.org/oas/v3.0.3#schema). If set, a compatible response_mime_type must also be set. Compatible mimetypes: `application/json`: Schema for JSON response." @@ -24843,6 +26906,10 @@ "format": "int32", "type": "integer" }, + "speechConfig": { + "$ref": "GoogleCloudAiplatformV1SpeechConfig", + "description": "Optional. The speech generation config." + }, "stopSequences": { "description": "Optional. Stop sequences.", "items": { @@ -24855,6 +26922,22 @@ "format": "float", "type": "number" }, + "tokenResolution": { + "description": "Optional. If specified, the token resolution specified will be used.", + "enum": [ + "TOKEN_RESOLUTION_UNSPECIFIED", + "TOKEN_RESOLUTION_LOW", + "TOKEN_RESOLUTION_MEDIUM", + "TOKEN_RESOLUTION_HIGH" + ], + "enumDescriptions": [ + "Token resolution has not been set.", + "Token resolution set to low (64 tokens).", + "Token resolution set to medium (256 tokens).", + "Token resolution set to high (P&S with 256 tokens)." + ], + "type": "string" + }, "topK": { "description": "Optional. If specified, top-k sampling will be used.", "format": "float", @@ -24955,6 +27038,45 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1GoogleDriveSource": { + "description": "The Google Drive location for the input content.", + "id": "GoogleCloudAiplatformV1GoogleDriveSource", + "properties": { + "resourceIds": { + "description": "Required. Google Drive resource IDs.", + "items": { + "$ref": "GoogleCloudAiplatformV1GoogleDriveSourceResourceId" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1GoogleDriveSourceResourceId": { + "description": "The type and ID of the Google Drive resource.", + "id": "GoogleCloudAiplatformV1GoogleDriveSourceResourceId", + "properties": { + "resourceId": { + "description": "Required. The ID of the Google Drive resource.", + "type": "string" + }, + "resourceType": { + "description": "Required. The type of the Google Drive resource.", + "enum": [ + "RESOURCE_TYPE_UNSPECIFIED", + "RESOURCE_TYPE_FILE", + "RESOURCE_TYPE_FOLDER" + ], + "enumDescriptions": [ + "Unspecified resource type.", + "File resource type.", + "Folder resource type." + ], + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1GoogleSearchRetrieval": { "description": "Tool to retrieve public web data for grounding, powered by Google.", "id": "GoogleCloudAiplatformV1GoogleSearchRetrieval", @@ -25051,6 +27173,10 @@ "description": "Chunk from context retrieved by the retrieval tools.", "id": "GoogleCloudAiplatformV1GroundingChunkRetrievedContext", "properties": { + "text": { + "description": "Text of the attribution.", + "type": "string" + }, "title": { "description": "Title of the attribution.", "type": "string" @@ -25493,6 +27619,63 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1ImportRagFilesConfig": { + "description": "Config for importing RagFiles.", + "id": "GoogleCloudAiplatformV1ImportRagFilesConfig", + "properties": { + "gcsSource": { + "$ref": "GoogleCloudAiplatformV1GcsSource", + "description": "Google Cloud Storage location. Supports importing individual files as well as entire Google Cloud Storage directories. Sample formats: - `gs://bucket_name/my_directory/object_name/my_file.txt` - `gs://bucket_name/my_directory`" + }, + "googleDriveSource": { + "$ref": "GoogleCloudAiplatformV1GoogleDriveSource", + "description": "Google Drive location. Supports importing individual files as well as Google Drive folders." + }, + "jiraSource": { + "$ref": "GoogleCloudAiplatformV1JiraSource", + "description": "Jira queries with their corresponding authentication." + }, + "maxEmbeddingRequestsPerMin": { + "description": "Optional. The max number of queries per minute that this job is allowed to make to the embedding model specified on the corpus. This value is specific to this job and not shared across other import jobs. Consult the Quotas page on the project to set an appropriate value here. If unspecified, a default value of 1,000 QPM would be used.", + "format": "int32", + "type": "integer" + }, + "partialFailureBigquerySink": { + "$ref": "GoogleCloudAiplatformV1BigQueryDestination", + "deprecated": true, + "description": "The BigQuery destination to write partial failures to. It should be a bigquery table resource name (e.g. \"bq://projectId.bqDatasetId.bqTableId\"). The dataset must exist. If the table does not exist, it will be created with the expected schema. If the table exists, the schema will be validated and data will be added to this existing table. Deprecated. Prefer to use `import_result_bq_sink`." + }, + "partialFailureGcsSink": { + "$ref": "GoogleCloudAiplatformV1GcsDestination", + "deprecated": true, + "description": "The Cloud Storage path to write partial failures to. Deprecated. Prefer to use `import_result_gcs_sink`." + }, + "ragFileTransformationConfig": { + "$ref": "GoogleCloudAiplatformV1RagFileTransformationConfig", + "description": "Specifies the transformation config for RagFiles." + }, + "sharePointSources": { + "$ref": "GoogleCloudAiplatformV1SharePointSources", + "description": "SharePoint sources." + }, + "slackSource": { + "$ref": "GoogleCloudAiplatformV1SlackSource", + "description": "Slack channels with their corresponding access tokens." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1ImportRagFilesRequest": { + "description": "Request message for VertexRagDataService.ImportRagFiles.", + "id": "GoogleCloudAiplatformV1ImportRagFilesRequest", + "properties": { + "importRagFilesConfig": { + "$ref": "GoogleCloudAiplatformV1ImportRagFilesConfig", + "description": "Required. The config for the RagFiles to be synced and imported into the RagCorpus. VertexRagDataService.ImportRagFiles." + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1Index": { "description": "A representation of a collection of database items organized in a way that allows for approximate nearest neighbor (a.k.a ANN) algorithms search.", "id": "GoogleCloudAiplatformV1Index", @@ -25960,6 +28143,53 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1JiraSource": { + "description": "The Jira source for the ImportRagFilesRequest.", + "id": "GoogleCloudAiplatformV1JiraSource", + "properties": { + "jiraQueries": { + "description": "Required. The Jira queries.", + "items": { + "$ref": "GoogleCloudAiplatformV1JiraSourceJiraQueries" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1JiraSourceJiraQueries": { + "description": "JiraQueries contains the Jira queries and corresponding authentication.", + "id": "GoogleCloudAiplatformV1JiraSourceJiraQueries", + "properties": { + "apiKeyConfig": { + "$ref": "GoogleCloudAiplatformV1ApiAuthApiKeyConfig", + "description": "Required. The SecretManager secret version resource name (e.g. projects/{project}/secrets/{secret}/versions/{version}) storing the Jira API key. See [Manage API tokens for your Atlassian account](https://support.atlassian.com/atlassian-account/docs/manage-api-tokens-for-your-atlassian-account/)." + }, + "customQueries": { + "description": "A list of custom Jira queries to import. For information about JQL (Jira Query Language), see https://support.atlassian.com/jira-service-management-cloud/docs/use-advanced-search-with-jira-query-language-jql/", + "items": { + "type": "string" + }, + "type": "array" + }, + "email": { + "description": "Required. The Jira email address.", + "type": "string" + }, + "projects": { + "description": "A list of Jira projects to import in their entirety.", + "items": { + "type": "string" + }, + "type": "array" + }, + "serverUri": { + "description": "Required. The Jira server URI.", + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1LargeModelReference": { "description": "Contains information about the Large Model.", "id": "GoogleCloudAiplatformV1LargeModelReference", @@ -26053,6 +28283,24 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1ListCachedContentsResponse": { + "description": "Response with a list of CachedContents.", + "id": "GoogleCloudAiplatformV1ListCachedContentsResponse", + "properties": { + "cachedContents": { + "description": "List of cached contents.", + "items": { + "$ref": "GoogleCloudAiplatformV1CachedContent" + }, + "type": "array" + }, + "nextPageToken": { + "description": "A token, which can be sent as `page_token` to retrieve the next page. If this field is omitted, there are no subsequent pages.", + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1ListContextsResponse": { "description": "Response message for MetadataService.ListContexts.", "id": "GoogleCloudAiplatformV1ListContextsResponse", @@ -26562,7 +28810,7 @@ "id": "GoogleCloudAiplatformV1ListNotebookExecutionJobsResponse", "properties": { "nextPageToken": { - "description": "A token to retrieve next page of results. Pass to ListNotebookExecutionJobs.page_token to obtain that page.", + "description": "A token to retrieve next page of results. Pass to ListNotebookExecutionJobsRequest.page_token to obtain that page.", "type": "string" }, "notebookExecutionJobs": { @@ -26666,6 +28914,60 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1ListRagCorporaResponse": { + "description": "Response message for VertexRagDataService.ListRagCorpora.", + "id": "GoogleCloudAiplatformV1ListRagCorporaResponse", + "properties": { + "nextPageToken": { + "description": "A token to retrieve the next page of results. Pass to ListRagCorporaRequest.page_token to obtain that page.", + "type": "string" + }, + "ragCorpora": { + "description": "List of RagCorpora in the requested page.", + "items": { + "$ref": "GoogleCloudAiplatformV1RagCorpus" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1ListRagFilesResponse": { + "description": "Response message for VertexRagDataService.ListRagFiles.", + "id": "GoogleCloudAiplatformV1ListRagFilesResponse", + "properties": { + "nextPageToken": { + "description": "A token to retrieve the next page of results. Pass to ListRagFilesRequest.page_token to obtain that page.", + "type": "string" + }, + "ragFiles": { + "description": "List of RagFiles in the requested page.", + "items": { + "$ref": "GoogleCloudAiplatformV1RagFile" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1ListReasoningEnginesResponse": { + "description": "Response message for ReasoningEngineService.ListReasoningEngines", + "id": "GoogleCloudAiplatformV1ListReasoningEnginesResponse", + "properties": { + "nextPageToken": { + "description": "A token to retrieve the next page of results. Pass to ListReasoningEnginesRequest.page_token to obtain that page.", + "type": "string" + }, + "reasoningEngines": { + "description": "List of ReasoningEngines in the requested page.", + "items": { + "$ref": "GoogleCloudAiplatformV1ReasoningEngine" + }, + "type": "array" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1ListSavedQueriesResponse": { "description": "Response message for DatasetService.ListSavedQueries.", "id": "GoogleCloudAiplatformV1ListSavedQueriesResponse", @@ -26953,6 +29255,7 @@ "NVIDIA_A100_80GB", "NVIDIA_L4", "NVIDIA_H100_80GB", + "NVIDIA_H100_MEGA_80GB", "TPU_V2", "TPU_V3", "TPU_V4_POD", @@ -26972,6 +29275,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -26985,6 +29289,7 @@ "Nvidia A100 80GB GPU.", "Nvidia L4 GPU.", "Nvidia H100 80Gb GPU.", + "Nvidia H100 Mega 80Gb GPU.", "TPU v2.", "TPU v3.", "TPU v4.", @@ -27188,6 +29493,84 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1MetricxInput": { + "description": "Input for MetricX metric.", + "id": "GoogleCloudAiplatformV1MetricxInput", + "properties": { + "instance": { + "$ref": "GoogleCloudAiplatformV1MetricxInstance", + "description": "Required. Metricx instance." + }, + "metricSpec": { + "$ref": "GoogleCloudAiplatformV1MetricxSpec", + "description": "Required. Spec for Metricx metric." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1MetricxInstance": { + "description": "Spec for MetricX instance - The fields used for evaluation are dependent on the MetricX version.", + "id": "GoogleCloudAiplatformV1MetricxInstance", + "properties": { + "prediction": { + "description": "Required. Output of the evaluated model.", + "type": "string" + }, + "reference": { + "description": "Optional. Ground truth used to compare against the prediction.", + "type": "string" + }, + "source": { + "description": "Optional. Source text in original language.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1MetricxResult": { + "description": "Spec for MetricX result - calculates the MetricX score for the given instance using the version specified in the spec.", + "id": "GoogleCloudAiplatformV1MetricxResult", + "properties": { + "score": { + "description": "Output only. MetricX score. Range depends on version.", + "format": "float", + "readOnly": true, + "type": "number" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1MetricxSpec": { + "description": "Spec for MetricX metric.", + "id": "GoogleCloudAiplatformV1MetricxSpec", + "properties": { + "sourceLanguage": { + "description": "Optional. Source language in BCP-47 format.", + "type": "string" + }, + "targetLanguage": { + "description": "Optional. Target language in BCP-47 format. Covers both prediction and reference.", + "type": "string" + }, + "version": { + "description": "Required. Which version to use for evaluation.", + "enum": [ + "METRICX_VERSION_UNSPECIFIED", + "METRICX_24_REF", + "METRICX_24_SRC", + "METRICX_24_SRC_REF" + ], + "enumDescriptions": [ + "MetricX version unspecified.", + "MetricX 2024 (2.6) for translation + reference (reference-based).", + "MetricX 2024 (2.6) for translation + source (QE).", + "MetricX 2024 (2.6) for translation + source + reference (source-reference-combined)." + ], + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1MigratableResource": { "description": "Represents one resource that exists in automl.googleapis.com, datalabeling.googleapis.com or ml.googleapis.com.", "id": "GoogleCloudAiplatformV1MigratableResource", @@ -28277,7 +30660,7 @@ "description": "Email alert config." }, "enableLogging": { - "description": "Dump the anomalies to Cloud Logging. The anomalies will be put to json payload encoded from proto google.cloud.aiplatform.logging.ModelMonitoringAnomaliesLogEntry. This can be further sinked to Pub/Sub or any other services supported by Cloud Logging.", + "description": "Dump the anomalies to Cloud Logging. The anomalies will be put to json payload encoded from proto ModelMonitoringStatsAnomalies. This can be further synced to Pub/Sub or any other services supported by Cloud Logging.", "type": "boolean" }, "notificationChannels": { @@ -29362,6 +31745,10 @@ "readOnly": true, "type": "string" }, + "customEnvironmentSpec": { + "$ref": "GoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec", + "description": "The custom compute configuration for an execution job." + }, "dataformRepositorySource": { "$ref": "GoogleCloudAiplatformV1NotebookExecutionJobDataformRepositorySource", "description": "The Dataform Repository pointing to a single file notebook repository." @@ -29376,7 +31763,7 @@ }, "encryptionSpec": { "$ref": "GoogleCloudAiplatformV1EncryptionSpec", - "description": "Customer-managed encryption key spec for the notebook execution job. This field is auto-populated if the NotebookService.NotebookRuntimeTemplate has an encryption spec." + "description": "Customer-managed encryption key spec for the notebook execution job. This field is auto-populated if the NotebookRuntimeTemplate has an encryption spec." }, "executionTimeout": { "description": "Max running time of the execution job in seconds (default 86400s / 24 hrs).", @@ -29428,6 +31815,10 @@ "readOnly": true, "type": "string" }, + "kernelName": { + "description": "The name of the kernel to use during notebook execution. If unset, the default kernel is used.", + "type": "string" + }, "labels": { "additionalProperties": { "type": "string" @@ -29463,6 +31854,29 @@ "format": "google-datetime", "readOnly": true, "type": "string" + }, + "workbenchRuntime": { + "$ref": "GoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime", + "description": "The Workbench runtime configuration to use for the notebook execution." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec": { + "description": "Compute configuration to use for an execution job.", + "id": "GoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec", + "properties": { + "machineSpec": { + "$ref": "GoogleCloudAiplatformV1MachineSpec", + "description": "The specification of a single machine for the execution job." + }, + "networkSpec": { + "$ref": "GoogleCloudAiplatformV1NetworkSpec", + "description": "The network configuration to use for the execution job." + }, + "persistentDiskSpec": { + "$ref": "GoogleCloudAiplatformV1PersistentDiskSpec", + "description": "The specification of a persistent disk to attach for the execution job." } }, "type": "object" @@ -29509,6 +31923,12 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime": { + "description": "Configuration for a Workbench Instances-based environment.", + "id": "GoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime", + "properties": {}, + "type": "object" + }, "GoogleCloudAiplatformV1NotebookIdleShutdownConfig": { "description": "The idle shutdown configuration of NotebookRuntimeTemplate, which contains the idle_timeout as required field.", "id": "GoogleCloudAiplatformV1NotebookIdleShutdownConfig", @@ -29535,6 +31955,11 @@ "readOnly": true, "type": "string" }, + "dataPersistentDiskSpec": { + "$ref": "GoogleCloudAiplatformV1PersistentDiskSpec", + "description": "Output only. The specification of persistent disk attached to the notebook runtime as data disk storage.", + "readOnly": true + }, "description": { "description": "The description of the NotebookRuntime.", "type": "string" @@ -29548,6 +31973,11 @@ "description": "Output only. Customer-managed encryption key spec for the notebook runtime.", "readOnly": true }, + "eucConfig": { + "$ref": "GoogleCloudAiplatformV1NotebookEucConfig", + "description": "Output only. EUC configuration of the notebook runtime.", + "readOnly": true + }, "expirationTime": { "description": "Output only. Timestamp when this NotebookRuntime will be expired: 1. System Predefined NotebookRuntime: 24 hours after creation. After expiration, system predifined runtime will be deleted. 2. User created NotebookRuntime: 6 months after last upgrade. After expiration, user created runtime will be stopped and allowed for upgrade.", "format": "google-datetime", @@ -29586,11 +32016,21 @@ "description": "The labels with user-defined metadata to organize your NotebookRuntime. Label keys and values can be no longer than 64 characters (Unicode codepoints), can only contain lowercase letters, numeric characters, underscores and dashes. International characters are allowed. No more than 64 user labels can be associated with one NotebookRuntime (System labels are excluded). See https://goo.gl/xmQnxf for more information and examples of labels. System reserved label keys are prefixed with \"aiplatform.googleapis.com/\" and are immutable. Following system labels exist for NotebookRuntime: * \"aiplatform.googleapis.com/notebook_runtime_gce_instance_id\": output only, its value is the Compute Engine instance id. * \"aiplatform.googleapis.com/colab_enterprise_entry_service\": its value is either \"bigquery\" or \"vertex\"; if absent, it should be \"vertex\". This is to describe the entry service, either BigQuery or Vertex.", "type": "object" }, + "machineSpec": { + "$ref": "GoogleCloudAiplatformV1MachineSpec", + "description": "Output only. The specification of a single machine used by the notebook runtime.", + "readOnly": true + }, "name": { "description": "Output only. The resource name of the NotebookRuntime.", "readOnly": true, "type": "string" }, + "networkSpec": { + "$ref": "GoogleCloudAiplatformV1NetworkSpec", + "description": "Output only. Network spec of the notebook runtime.", + "readOnly": true + }, "networkTags": { "description": "Optional. The Compute Engine tags to add to runtime (see [Tagging instances](https://cloud.google.com/vpc/docs/add-remove-network-tags)).", "items": { @@ -29663,10 +32103,15 @@ "type": "boolean" }, "serviceAccount": { - "description": "Output only. The service account that the NotebookRuntime workload runs as.", + "description": "Output only. Deprecated: This field is no longer used and the \"Vertex AI Notebook Service Account\" (service-PROJECT_NUMBER@gcp-sa-aiplatform-vm.iam.gserviceaccount.com) is used for the runtime workload identity. See https://cloud.google.com/iam/docs/service-agents#vertex-ai-notebook-service-account for more details. The service account that the NotebookRuntime workload runs as.", "readOnly": true, "type": "string" }, + "shieldedVmConfig": { + "$ref": "GoogleCloudAiplatformV1ShieldedVmConfig", + "description": "Output only. Runtime Shielded VM spec.", + "readOnly": true + }, "updateTime": { "description": "Output only. Timestamp when this NotebookRuntime was most recently updated.", "format": "google-datetime", @@ -29720,7 +32165,8 @@ "description": "The idle shutdown configuration of NotebookRuntimeTemplate. This config will only be set when idle shutdown is enabled." }, "isDefault": { - "description": "Output only. The default template to use if not specified.", + "deprecated": true, + "description": "Output only. Deprecated: This field has no behavior. Use notebook_runtime_type = 'ONE_CLICK' instead. The default template to use if not specified.", "readOnly": true, "type": "boolean" }, @@ -29765,7 +32211,8 @@ "type": "string" }, "serviceAccount": { - "description": "The service account that the runtime workload runs as. You can use any service account within the same project, but you must have the service account user permission to use the instance. If not specified, the [Compute Engine default service account](https://cloud.google.com/compute/docs/access/service-accounts#default_service_account) is used.", + "deprecated": true, + "description": "Deprecated: This field is ignored and the \"Vertex AI Notebook Service Account\" (service-PROJECT_NUMBER@gcp-sa-aiplatform-vm.iam.gserviceaccount.com) is used for the runtime workload identity. See https://cloud.google.com/iam/docs/service-agents#vertex-ai-notebook-service-account for more details. For NotebookExecutionJob, use NotebookExecutionJob.service_account instead. The service account that the runtime workload runs as. You can use any service account within the same project, but you must have the service account user permission to use the instance. If not specified, the [Compute Engine default service account](https://cloud.google.com/compute/docs/access/service-accounts#default_service_account) is used.", "type": "string" }, "shieldedVmConfig": { @@ -30763,6 +33210,17 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1PrebuiltVoiceConfig": { + "description": "The configuration for the prebuilt speaker to use.", + "id": "GoogleCloudAiplatformV1PrebuiltVoiceConfig", + "properties": { + "voiceName": { + "description": "The name of the preset voice to use.", + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1PredefinedSplit": { "description": "Assigns input data to training, validation, and test sets based on the value of a provided key. Supported only for tabular Datasets.", "id": "GoogleCloudAiplatformV1PredefinedSplit", @@ -30774,6 +33232,24 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1PredictLongRunningRequest": { + "description": "Request message for PredictionService.PredictLongRunning.", + "id": "GoogleCloudAiplatformV1PredictLongRunningRequest", + "properties": { + "instances": { + "description": "Required. The instances that are the input to the prediction call. A DeployedModel may have an upper limit on the number of instances it supports per request, and when it is exceeded the prediction call errors in case of AutoML Models, or, in case of customer created Models, the behaviour is as documented by that Model. The schema of any single instance may be specified via Endpoint's DeployedModels' Model's PredictSchemata's instance_schema_uri.", + "items": { + "type": "any" + }, + "type": "array" + }, + "parameters": { + "description": "Optional. The parameters that govern the prediction. The schema of the parameters may be specified via Endpoint's DeployedModels' Model's PredictSchemata's parameters_schema_uri.", + "type": "any" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1PredictRequest": { "description": "Request message for PredictionService.Predict.", "id": "GoogleCloudAiplatformV1PredictRequest", @@ -31008,12 +33484,6 @@ }, "type": "object" }, - "GoogleCloudAiplatformV1PscInterfaceConfig": { - "description": "Configuration for PSC-I.", - "id": "GoogleCloudAiplatformV1PscInterfaceConfig", - "properties": {}, - "type": "object" - }, "GoogleCloudAiplatformV1PublisherModel": { "description": "A Model Garden Publisher Model.", "id": "GoogleCloudAiplatformV1PublisherModel", @@ -31225,7 +33695,7 @@ "additionalProperties": { "type": "string" }, - "description": "Optional. Labels for the deployment. For managing deployment config like verifying, source of deployment config, etc.", + "description": "Optional. Labels for the deployment config. For managing deployment config like verifying, source of deployment config, etc.", "type": "object" }, "sampleRequest": { @@ -31586,6 +34056,36 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1QueryReasoningEngineRequest": { + "description": "Request message for ReasoningEngineExecutionService.Query.", + "id": "GoogleCloudAiplatformV1QueryReasoningEngineRequest", + "properties": { + "classMethod": { + "description": "Optional. Class method to be used for the query. It is optional and defaults to \"query\" if unspecified.", + "type": "string" + }, + "input": { + "additionalProperties": { + "description": "Properties of the object.", + "type": "any" + }, + "description": "Optional. Input content provided by users in JSON object format. Examples include text query, function calling parameters, media bytes, etc.", + "type": "object" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1QueryReasoningEngineResponse": { + "description": "Response message for ReasoningEngineExecutionService.Query", + "id": "GoogleCloudAiplatformV1QueryReasoningEngineResponse", + "properties": { + "output": { + "description": "Response provided by users in JSON object format.", + "type": "any" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1QuestionAnsweringCorrectnessInput": { "description": "Input for question answering correctness metric.", "id": "GoogleCloudAiplatformV1QuestionAnsweringCorrectnessInput", @@ -31898,6 +34398,331 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1RagContexts": { + "description": "Relevant contexts for one query.", + "id": "GoogleCloudAiplatformV1RagContexts", + "properties": { + "contexts": { + "description": "All its contexts.", + "items": { + "$ref": "GoogleCloudAiplatformV1RagContextsContext" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1RagContextsContext": { + "description": "A context of the query.", + "id": "GoogleCloudAiplatformV1RagContextsContext", + "properties": { + "score": { + "description": "According to the underlying Vector DB and the selected metric type, the score can be either the distance or the similarity between the query and the context and its range depends on the metric type. For example, if the metric type is COSINE_DISTANCE, it represents the distance between the query and the context. The larger the distance, the less relevant the context is to the query. The range is [0, 2], while 0 means the most relevant and 2 means the least relevant.", + "format": "double", + "type": "number" + }, + "sourceDisplayName": { + "description": "The file display name.", + "type": "string" + }, + "sourceUri": { + "description": "If the file is imported from Cloud Storage or Google Drive, source_uri will be original file URI in Cloud Storage or Google Drive; if file is uploaded, source_uri will be file display name.", + "type": "string" + }, + "text": { + "description": "The text chunk.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1RagCorpus": { + "description": "A RagCorpus is a RagFile container and a project can have multiple RagCorpora.", + "id": "GoogleCloudAiplatformV1RagCorpus", + "properties": { + "corpusStatus": { + "$ref": "GoogleCloudAiplatformV1CorpusStatus", + "description": "Output only. RagCorpus state.", + "readOnly": true + }, + "createTime": { + "description": "Output only. Timestamp when this RagCorpus was created.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "description": { + "description": "Optional. The description of the RagCorpus.", + "type": "string" + }, + "displayName": { + "description": "Required. The display name of the RagCorpus. The name can be up to 128 characters long and can consist of any UTF-8 characters.", + "type": "string" + }, + "name": { + "description": "Output only. The resource name of the RagCorpus.", + "readOnly": true, + "type": "string" + }, + "updateTime": { + "description": "Output only. Timestamp when this RagCorpus was last updated.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "vectorDbConfig": { + "$ref": "GoogleCloudAiplatformV1RagVectorDbConfig", + "description": "Optional. Immutable. The config for the Vector DBs." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1RagEmbeddingModelConfig": { + "description": "Config for the embedding model to use for RAG.", + "id": "GoogleCloudAiplatformV1RagEmbeddingModelConfig", + "properties": { + "vertexPredictionEndpoint": { + "$ref": "GoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint", + "description": "The Vertex AI Prediction Endpoint that either refers to a publisher model or an endpoint that is hosting a 1P fine-tuned text embedding model. Endpoints hosting non-1P fine-tuned text embedding models are currently not supported. This is used for dense vector search." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint": { + "description": "Config representing a model hosted on Vertex Prediction Endpoint.", + "id": "GoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint", + "properties": { + "endpoint": { + "description": "Required. The endpoint resource name. Format: `projects/{project}/locations/{location}/publishers/{publisher}/models/{model}` or `projects/{project}/locations/{location}/endpoints/{endpoint}`", + "type": "string" + }, + "model": { + "description": "Output only. The resource name of the model that is deployed on the endpoint. Present only when the endpoint is not a publisher model. Pattern: `projects/{project}/locations/{location}/models/{model}`", + "readOnly": true, + "type": "string" + }, + "modelVersionId": { + "description": "Output only. Version ID of the model that is deployed on the endpoint. Present only when the endpoint is not a publisher model.", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1RagFile": { + "description": "A RagFile contains user data for chunking, embedding and indexing.", + "id": "GoogleCloudAiplatformV1RagFile", + "properties": { + "createTime": { + "description": "Output only. Timestamp when this RagFile was created.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "description": { + "description": "Optional. The description of the RagFile.", + "type": "string" + }, + "directUploadSource": { + "$ref": "GoogleCloudAiplatformV1DirectUploadSource", + "description": "Output only. The RagFile is encapsulated and uploaded in the UploadRagFile request.", + "readOnly": true + }, + "displayName": { + "description": "Required. The display name of the RagFile. The name can be up to 128 characters long and can consist of any UTF-8 characters.", + "type": "string" + }, + "fileStatus": { + "$ref": "GoogleCloudAiplatformV1FileStatus", + "description": "Output only. State of the RagFile.", + "readOnly": true + }, + "gcsSource": { + "$ref": "GoogleCloudAiplatformV1GcsSource", + "description": "Output only. Google Cloud Storage location of the RagFile. It does not support wildcards in the Cloud Storage uri for now.", + "readOnly": true + }, + "googleDriveSource": { + "$ref": "GoogleCloudAiplatformV1GoogleDriveSource", + "description": "Output only. Google Drive location. Supports importing individual files as well as Google Drive folders.", + "readOnly": true + }, + "jiraSource": { + "$ref": "GoogleCloudAiplatformV1JiraSource", + "description": "The RagFile is imported from a Jira query." + }, + "name": { + "description": "Output only. The resource name of the RagFile.", + "readOnly": true, + "type": "string" + }, + "sharePointSources": { + "$ref": "GoogleCloudAiplatformV1SharePointSources", + "description": "The RagFile is imported from a SharePoint source." + }, + "slackSource": { + "$ref": "GoogleCloudAiplatformV1SlackSource", + "description": "The RagFile is imported from a Slack channel." + }, + "updateTime": { + "description": "Output only. Timestamp when this RagFile was last updated.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1RagFileChunkingConfig": { + "description": "Specifies the size and overlap of chunks for RagFiles.", + "id": "GoogleCloudAiplatformV1RagFileChunkingConfig", + "properties": { + "fixedLengthChunking": { + "$ref": "GoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking", + "description": "Specifies the fixed length chunking config." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking": { + "description": "Specifies the fixed length chunking config.", + "id": "GoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking", + "properties": { + "chunkOverlap": { + "description": "The overlap between chunks.", + "format": "int32", + "type": "integer" + }, + "chunkSize": { + "description": "The size of the chunks.", + "format": "int32", + "type": "integer" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1RagFileTransformationConfig": { + "description": "Specifies the transformation config for RagFiles.", + "id": "GoogleCloudAiplatformV1RagFileTransformationConfig", + "properties": { + "ragFileChunkingConfig": { + "$ref": "GoogleCloudAiplatformV1RagFileChunkingConfig", + "description": "Specifies the chunking config for RagFiles." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1RagQuery": { + "description": "A query to retrieve relevant contexts.", + "id": "GoogleCloudAiplatformV1RagQuery", + "properties": { + "ragRetrievalConfig": { + "$ref": "GoogleCloudAiplatformV1RagRetrievalConfig", + "description": "Optional. The retrieval config for the query." + }, + "text": { + "description": "Optional. The query in text format to get relevant contexts.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1RagRetrievalConfig": { + "description": "Specifies the context retrieval config.", + "id": "GoogleCloudAiplatformV1RagRetrievalConfig", + "properties": { + "filter": { + "$ref": "GoogleCloudAiplatformV1RagRetrievalConfigFilter", + "description": "Optional. Config for filters." + }, + "topK": { + "description": "Optional. The number of contexts to retrieve.", + "format": "int32", + "type": "integer" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1RagRetrievalConfigFilter": { + "description": "Config for filters.", + "id": "GoogleCloudAiplatformV1RagRetrievalConfigFilter", + "properties": { + "metadataFilter": { + "description": "Optional. String for metadata filtering.", + "type": "string" + }, + "vectorDistanceThreshold": { + "description": "Optional. Only returns contexts with vector distance smaller than the threshold.", + "format": "double", + "type": "number" + }, + "vectorSimilarityThreshold": { + "description": "Optional. Only returns contexts with vector similarity larger than the threshold.", + "format": "double", + "type": "number" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1RagVectorDbConfig": { + "description": "Config for the Vector DB to use for RAG.", + "id": "GoogleCloudAiplatformV1RagVectorDbConfig", + "properties": { + "apiAuth": { + "$ref": "GoogleCloudAiplatformV1ApiAuth", + "description": "Authentication config for the chosen Vector DB." + }, + "pinecone": { + "$ref": "GoogleCloudAiplatformV1RagVectorDbConfigPinecone", + "description": "The config for the Pinecone." + }, + "ragEmbeddingModelConfig": { + "$ref": "GoogleCloudAiplatformV1RagEmbeddingModelConfig", + "description": "Optional. Immutable. The embedding model config of the Vector DB." + }, + "ragManagedDb": { + "$ref": "GoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb", + "description": "The config for the RAG-managed Vector DB." + }, + "vertexVectorSearch": { + "$ref": "GoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch", + "description": "The config for the Vertex Vector Search." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1RagVectorDbConfigPinecone": { + "description": "The config for the Pinecone.", + "id": "GoogleCloudAiplatformV1RagVectorDbConfigPinecone", + "properties": { + "indexName": { + "description": "Pinecone index name. This value cannot be changed after it's set.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb": { + "description": "The config for the default RAG-managed Vector DB.", + "id": "GoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb", + "properties": {}, + "type": "object" + }, + "GoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch": { + "description": "The config for the Vertex Vector Search.", + "id": "GoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch", + "properties": { + "index": { + "description": "The resource name of the Index. Format: `projects/{project}/locations/{location}/indexes/{index}`", + "type": "string" + }, + "indexEndpoint": { + "description": "The resource name of the Index Endpoint. Format: `projects/{project}/locations/{location}/indexEndpoints/{index_endpoint}`", + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1RawPredictRequest": { "description": "Request message for PredictionService.RawPredict.", "id": "GoogleCloudAiplatformV1RawPredictRequest", @@ -32166,6 +34991,90 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1ReasoningEngine": { + "description": "ReasoningEngine provides a customizable runtime for models to determine which actions to take and in which order.", + "id": "GoogleCloudAiplatformV1ReasoningEngine", + "properties": { + "createTime": { + "description": "Output only. Timestamp when this ReasoningEngine was created.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "description": { + "description": "Optional. The description of the ReasoningEngine.", + "type": "string" + }, + "displayName": { + "description": "Required. The display name of the ReasoningEngine.", + "type": "string" + }, + "etag": { + "description": "Optional. Used to perform consistent read-modify-write updates. If not set, a blind \"overwrite\" update happens.", + "type": "string" + }, + "name": { + "description": "Identifier. The resource name of the ReasoningEngine.", + "type": "string" + }, + "spec": { + "$ref": "GoogleCloudAiplatformV1ReasoningEngineSpec", + "description": "Required. Configurations of the ReasoningEngine" + }, + "updateTime": { + "description": "Output only. Timestamp when this ReasoningEngine was most recently updated.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1ReasoningEngineSpec": { + "description": "ReasoningEngine configurations", + "id": "GoogleCloudAiplatformV1ReasoningEngineSpec", + "properties": { + "classMethods": { + "description": "Optional. Declarations for object class methods in OpenAPI specification format.", + "items": { + "additionalProperties": { + "description": "Properties of the object.", + "type": "any" + }, + "type": "object" + }, + "type": "array" + }, + "packageSpec": { + "$ref": "GoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec", + "description": "Required. User provided package spec of the ReasoningEngine." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec": { + "description": "User provided package spec like pickled object and package requirements.", + "id": "GoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec", + "properties": { + "dependencyFilesGcsUri": { + "description": "Optional. The Cloud Storage URI of the dependency files in tar.gz format.", + "type": "string" + }, + "pickleObjectGcsUri": { + "description": "Optional. The Cloud Storage URI of the pickled python object.", + "type": "string" + }, + "pythonVersion": { + "description": "Optional. The Python version. Currently support 3.8, 3.9, 3.10, 3.11. If not specified, default value is 3.10.", + "type": "string" + }, + "requirementsGcsUri": { + "description": "Optional. The Cloud Storage URI of the `requirements.txt` file", + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1RebaseTunedModelRequest": { "description": "Request message for GenAiTuningService.RebaseTunedModel.", "id": "GoogleCloudAiplatformV1RebaseTunedModelRequest", @@ -32438,6 +35347,70 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1RetrieveContextsRequest": { + "description": "Request message for VertexRagService.RetrieveContexts.", + "id": "GoogleCloudAiplatformV1RetrieveContextsRequest", + "properties": { + "query": { + "$ref": "GoogleCloudAiplatformV1RagQuery", + "description": "Required. Single RAG retrieve query." + }, + "vertexRagStore": { + "$ref": "GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore", + "description": "The data source for Vertex RagStore." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore": { + "description": "The data source for Vertex RagStore.", + "id": "GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore", + "properties": { + "ragResources": { + "description": "Optional. The representation of the rag source. It can be used to specify corpus only or ragfiles. Currently only support one corpus or multiple files from one corpus. In the future we may open up multiple corpora support.", + "items": { + "$ref": "GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource" + }, + "type": "array" + }, + "vectorDistanceThreshold": { + "deprecated": true, + "description": "Optional. Only return contexts with vector distance smaller than the threshold.", + "format": "double", + "type": "number" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource": { + "description": "The definition of the Rag resource.", + "id": "GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource", + "properties": { + "ragCorpus": { + "description": "Optional. RagCorpora resource name. Format: `projects/{project}/locations/{location}/ragCorpora/{rag_corpus}`", + "type": "string" + }, + "ragFileIds": { + "description": "Optional. rag_file_id. The files should be in the same rag_corpus set in rag_corpus field.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1RetrieveContextsResponse": { + "description": "Response message for VertexRagService.RetrieveContexts.", + "id": "GoogleCloudAiplatformV1RetrieveContextsResponse", + "properties": { + "contexts": { + "$ref": "GoogleCloudAiplatformV1RagContexts", + "description": "The contexts of the query." + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1RougeInput": { "description": "Input for rouge metric.", "id": "GoogleCloudAiplatformV1RougeInput", @@ -33000,7 +35973,7 @@ "type": "string" }, "restartJobOnWorkerRestart": { - "description": "Restarts the entire CustomJob if a worker gets restarted. This feature can be used by distributed training jobs that are not resilient to workers leaving and joining a job.", + "description": "Optional. Restarts the entire CustomJob if a worker gets restarted. This feature can be used by distributed training jobs that are not resilient to workers leaving and joining a job.", "type": "boolean" }, "strategy": { @@ -33032,7 +36005,7 @@ "type": "string" }, "timeout": { - "description": "The maximum job running time. The default is 7 days.", + "description": "Optional. The maximum job running time. The default is 7 days.", "format": "google-duration", "type": "string" } @@ -34991,6 +37964,10 @@ "structuredPrompt": { "$ref": "GoogleCloudAiplatformV1SchemaPromptSpecStructuredPrompt", "description": "The prompt variation that stores preambles in separate fields." + }, + "translationPrompt": { + "$ref": "GoogleCloudAiplatformV1SchemaPromptSpecTranslationPrompt", + "description": "The prompt variation for Translation use case." } }, "type": "object" @@ -35089,6 +38066,35 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1SchemaPromptSpecReferenceSentencePair": { + "description": "A pair of sentences used as reference in source and target languages.", + "id": "GoogleCloudAiplatformV1SchemaPromptSpecReferenceSentencePair", + "properties": { + "sourceSentence": { + "description": "Source sentence in the sentence pair.", + "type": "string" + }, + "targetSentence": { + "description": "Target sentence in the sentence pair.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1SchemaPromptSpecReferenceSentencePairList": { + "description": "A list of reference sentence pairs.", + "id": "GoogleCloudAiplatformV1SchemaPromptSpecReferenceSentencePairList", + "properties": { + "referenceSentencePairs": { + "description": "Reference sentence pairs.", + "items": { + "$ref": "GoogleCloudAiplatformV1SchemaPromptSpecReferenceSentencePair" + }, + "type": "array" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1SchemaPromptSpecStructuredPrompt": { "description": "Prompt variation that stores preambles in separate fields.", "id": "GoogleCloudAiplatformV1SchemaPromptSpecStructuredPrompt", @@ -35104,6 +38110,14 @@ }, "type": "array" }, + "infillPrefix": { + "description": "Preamble: For infill prompt, the prefix before expected model response.", + "type": "string" + }, + "infillSuffix": { + "description": "Preamble: For infill prompt, the suffix after expected model response.", + "type": "string" + }, "inputPrefixes": { "description": "Preamble: The input prefixes before each example input.", "items": { @@ -35118,9 +38132,118 @@ }, "type": "array" }, + "predictionInputs": { + "description": "Preamble: The input test data for prediction. Each PartList in this field represents one text-only input set for a single model request.", + "items": { + "$ref": "GoogleCloudAiplatformV1SchemaPromptSpecPartList" + }, + "type": "array" + }, + "promptMessage": { + "$ref": "GoogleCloudAiplatformV1SchemaPromptSpecPromptMessage", + "description": "The prompt message." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1SchemaPromptSpecTranslationExample": { + "description": "The translation example that contains reference sentences from various sources.", + "id": "GoogleCloudAiplatformV1SchemaPromptSpecTranslationExample", + "properties": { + "referenceSentencePairLists": { + "description": "The reference sentences from inline text.", + "items": { + "$ref": "GoogleCloudAiplatformV1SchemaPromptSpecReferenceSentencePairList" + }, + "type": "array" + }, + "referenceSentencesFileInputs": { + "description": "The reference sentences from file.", + "items": { + "$ref": "GoogleCloudAiplatformV1SchemaPromptSpecTranslationSentenceFileInput" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1SchemaPromptSpecTranslationFileInputSource": { + "id": "GoogleCloudAiplatformV1SchemaPromptSpecTranslationFileInputSource", + "properties": { + "content": { + "description": "The file's contents.", + "type": "string" + }, + "displayName": { + "description": "The file's display name.", + "type": "string" + }, + "mimeType": { + "description": "The file's mime type.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1SchemaPromptSpecTranslationGcsInputSource": { + "id": "GoogleCloudAiplatformV1SchemaPromptSpecTranslationGcsInputSource", + "properties": { + "inputUri": { + "description": "Source data URI. For example, `gs://my_bucket/my_object`.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1SchemaPromptSpecTranslationOption": { + "description": "Optional settings for translation prompt.", + "id": "GoogleCloudAiplatformV1SchemaPromptSpecTranslationOption", + "properties": { + "numberOfShots": { + "description": "How many shots to use.", + "format": "int32", + "type": "integer" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1SchemaPromptSpecTranslationPrompt": { + "description": "Prompt variation for Translation use case.", + "id": "GoogleCloudAiplatformV1SchemaPromptSpecTranslationPrompt", + "properties": { + "example": { + "$ref": "GoogleCloudAiplatformV1SchemaPromptSpecTranslationExample", + "description": "The translation example." + }, + "option": { + "$ref": "GoogleCloudAiplatformV1SchemaPromptSpecTranslationOption", + "description": "The translation option." + }, "promptMessage": { "$ref": "GoogleCloudAiplatformV1SchemaPromptSpecPromptMessage", "description": "The prompt message." + }, + "sourceLanguageCode": { + "description": "The source language code.", + "type": "string" + }, + "targetLanguageCode": { + "description": "The target language code.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1SchemaPromptSpecTranslationSentenceFileInput": { + "id": "GoogleCloudAiplatformV1SchemaPromptSpecTranslationSentenceFileInput", + "properties": { + "fileInputSource": { + "$ref": "GoogleCloudAiplatformV1SchemaPromptSpecTranslationFileInputSource", + "description": "Inlined file source." + }, + "gcsInputSource": { + "$ref": "GoogleCloudAiplatformV1SchemaPromptSpecTranslationGcsInputSource", + "description": "Cloud Storage file source." } }, "type": "object" @@ -37373,6 +40496,64 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1SharePointSources": { + "description": "The SharePointSources to pass to ImportRagFiles.", + "id": "GoogleCloudAiplatformV1SharePointSources", + "properties": { + "sharePointSources": { + "description": "The SharePoint sources.", + "items": { + "$ref": "GoogleCloudAiplatformV1SharePointSourcesSharePointSource" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1SharePointSourcesSharePointSource": { + "description": "An individual SharePointSource.", + "id": "GoogleCloudAiplatformV1SharePointSourcesSharePointSource", + "properties": { + "clientId": { + "description": "The Application ID for the app registered in Microsoft Azure Portal. The application must also be configured with MS Graph permissions \"Files.ReadAll\", \"Sites.ReadAll\" and BrowserSiteLists.Read.All.", + "type": "string" + }, + "clientSecret": { + "$ref": "GoogleCloudAiplatformV1ApiAuthApiKeyConfig", + "description": "The application secret for the app registered in Azure." + }, + "driveId": { + "description": "The ID of the drive to download from.", + "type": "string" + }, + "driveName": { + "description": "The name of the drive to download from.", + "type": "string" + }, + "fileId": { + "description": "Output only. The SharePoint file id. Output only.", + "readOnly": true, + "type": "string" + }, + "sharepointFolderId": { + "description": "The ID of the SharePoint folder to download from.", + "type": "string" + }, + "sharepointFolderPath": { + "description": "The path of the SharePoint folder to download from.", + "type": "string" + }, + "sharepointSiteName": { + "description": "The name of the SharePoint site to download from. This can be the site name or the site id.", + "type": "string" + }, + "tenantId": { + "description": "Unique identifier of the Azure Active Directory Instance.", + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1ShieldedVmConfig": { "description": "A set of Shielded Instance options. See [Images using supported Shielded VM features](https://cloud.google.com/compute/docs/instances/modifying-shielded-vm).", "id": "GoogleCloudAiplatformV1ShieldedVmConfig", @@ -37384,6 +40565,59 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1SlackSource": { + "description": "The Slack source for the ImportRagFilesRequest.", + "id": "GoogleCloudAiplatformV1SlackSource", + "properties": { + "channels": { + "description": "Required. The Slack channels.", + "items": { + "$ref": "GoogleCloudAiplatformV1SlackSourceSlackChannels" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1SlackSourceSlackChannels": { + "description": "SlackChannels contains the Slack channels and corresponding access token.", + "id": "GoogleCloudAiplatformV1SlackSourceSlackChannels", + "properties": { + "apiKeyConfig": { + "$ref": "GoogleCloudAiplatformV1ApiAuthApiKeyConfig", + "description": "Required. The SecretManager secret version resource name (e.g. projects/{project}/secrets/{secret}/versions/{version}) storing the Slack channel access token that has access to the slack channel IDs. See: https://api.slack.com/tutorials/tracks/getting-a-token." + }, + "channels": { + "description": "Required. The Slack channel IDs.", + "items": { + "$ref": "GoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel": { + "description": "SlackChannel contains the Slack channel ID and the time range to import.", + "id": "GoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel", + "properties": { + "channelId": { + "description": "Required. The Slack channel ID.", + "type": "string" + }, + "endTime": { + "description": "Optional. The ending timestamp for messages to import.", + "format": "google-datetime", + "type": "string" + }, + "startTime": { + "description": "Optional. The starting timestamp for messages to import.", + "format": "google-datetime", + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1SmoothGradConfig": { "description": "Config for SmoothGrad approximation of gradients. When enabled, the gradients are approximated by averaging the gradients from noisy samples in the vicinity of the inputs. Adding noise can help improve the computed gradients. Refer to this paper for more details: https://arxiv.org/pdf/1706.03825.pdf", "id": "GoogleCloudAiplatformV1SmoothGradConfig", @@ -37448,6 +40682,17 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1SpeechConfig": { + "description": "The speech generation config.", + "id": "GoogleCloudAiplatformV1SpeechConfig", + "properties": { + "voiceConfig": { + "$ref": "GoogleCloudAiplatformV1VoiceConfig", + "description": "The configuration for the speaker to use." + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1StartNotebookRuntimeOperationMetadata": { "description": "Metadata information for NotebookService.StartNotebookRuntime.", "id": "GoogleCloudAiplatformV1StartNotebookRuntimeOperationMetadata", @@ -37469,6 +40714,12 @@ "properties": {}, "type": "object" }, + "GoogleCloudAiplatformV1StopNotebookRuntimeRequest": { + "description": "Request message for NotebookService.StopNotebookRuntime.", + "id": "GoogleCloudAiplatformV1StopNotebookRuntimeRequest", + "properties": {}, + "type": "object" + }, "GoogleCloudAiplatformV1StopTrialRequest": { "description": "Request message for VizierService.StopTrial.", "id": "GoogleCloudAiplatformV1StopTrialRequest", @@ -37549,7 +40800,7 @@ "type": "object" }, "GoogleCloudAiplatformV1StreamingReadFeatureValuesRequest": { - "description": "Request message for FeaturestoreOnlineServingService.StreamingFeatureValuesRead.", + "description": "Request message for FeaturestoreOnlineServingService.StreamingReadFeatureValues.", "id": "GoogleCloudAiplatformV1StreamingReadFeatureValuesRequest", "properties": { "entityIds": { @@ -39229,6 +42480,10 @@ }, "type": "array" }, + "googleSearch": { + "$ref": "GoogleCloudAiplatformV1ToolGoogleSearch", + "description": "Optional. GoogleSearch tool type. Tool to support Google Search in Model. Powered by Google." + }, "googleSearchRetrieval": { "$ref": "GoogleCloudAiplatformV1GoogleSearchRetrieval", "description": "Optional. GoogleSearchRetrieval tool type. Specialized retrieval tool that is powered by Google search." @@ -39240,6 +42495,21 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1ToolCall": { + "description": "Spec for tool call.", + "id": "GoogleCloudAiplatformV1ToolCall", + "properties": { + "toolInput": { + "description": "Optional. Spec for tool input", + "type": "string" + }, + "toolName": { + "description": "Required. Spec for tool name", + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1ToolCallValidInput": { "description": "Input for tool call valid metric.", "id": "GoogleCloudAiplatformV1ToolCallValidInput", @@ -39318,6 +42588,12 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1ToolGoogleSearch": { + "description": "GoogleSearch tool type. Tool to support Google Search in Model. Powered by Google.", + "id": "GoogleCloudAiplatformV1ToolGoogleSearch", + "properties": {}, + "type": "object" + }, "GoogleCloudAiplatformV1ToolNameMatchInput": { "description": "Input for tool name match metric.", "id": "GoogleCloudAiplatformV1ToolNameMatchInput", @@ -39648,6 +42924,423 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1Trajectory": { + "description": "Spec for trajectory.", + "id": "GoogleCloudAiplatformV1Trajectory", + "properties": { + "toolCalls": { + "description": "Required. Tool calls in the trajectory.", + "items": { + "$ref": "GoogleCloudAiplatformV1ToolCall" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput": { + "description": "Instances and metric spec for TrajectoryAnyOrderMatch metric.", + "id": "GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput", + "properties": { + "instances": { + "description": "Required. Repeated TrajectoryAnyOrderMatch instance.", + "items": { + "$ref": "GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance" + }, + "type": "array" + }, + "metricSpec": { + "$ref": "GoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec", + "description": "Required. Spec for TrajectoryAnyOrderMatch metric." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance": { + "description": "Spec for TrajectoryAnyOrderMatch instance.", + "id": "GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance", + "properties": { + "predictedTrajectory": { + "$ref": "GoogleCloudAiplatformV1Trajectory", + "description": "Required. Spec for predicted tool call trajectory." + }, + "referenceTrajectory": { + "$ref": "GoogleCloudAiplatformV1Trajectory", + "description": "Required. Spec for reference tool call trajectory." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue": { + "description": "TrajectoryAnyOrderMatch metric value for an instance.", + "id": "GoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue", + "properties": { + "score": { + "description": "Output only. TrajectoryAnyOrderMatch score.", + "format": "float", + "readOnly": true, + "type": "number" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults": { + "description": "Results for TrajectoryAnyOrderMatch metric.", + "id": "GoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults", + "properties": { + "trajectoryAnyOrderMatchMetricValues": { + "description": "Output only. TrajectoryAnyOrderMatch metric values.", + "items": { + "$ref": "GoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue" + }, + "readOnly": true, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec": { + "description": "Spec for TrajectoryAnyOrderMatch metric - returns 1 if all tool calls in the reference trajectory appear in the predicted trajectory in any order, else 0.", + "id": "GoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec", + "properties": {}, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryExactMatchInput": { + "description": "Instances and metric spec for TrajectoryExactMatch metric.", + "id": "GoogleCloudAiplatformV1TrajectoryExactMatchInput", + "properties": { + "instances": { + "description": "Required. Repeated TrajectoryExactMatch instance.", + "items": { + "$ref": "GoogleCloudAiplatformV1TrajectoryExactMatchInstance" + }, + "type": "array" + }, + "metricSpec": { + "$ref": "GoogleCloudAiplatformV1TrajectoryExactMatchSpec", + "description": "Required. Spec for TrajectoryExactMatch metric." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryExactMatchInstance": { + "description": "Spec for TrajectoryExactMatch instance.", + "id": "GoogleCloudAiplatformV1TrajectoryExactMatchInstance", + "properties": { + "predictedTrajectory": { + "$ref": "GoogleCloudAiplatformV1Trajectory", + "description": "Required. Spec for predicted tool call trajectory." + }, + "referenceTrajectory": { + "$ref": "GoogleCloudAiplatformV1Trajectory", + "description": "Required. Spec for reference tool call trajectory." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryExactMatchMetricValue": { + "description": "TrajectoryExactMatch metric value for an instance.", + "id": "GoogleCloudAiplatformV1TrajectoryExactMatchMetricValue", + "properties": { + "score": { + "description": "Output only. TrajectoryExactMatch score.", + "format": "float", + "readOnly": true, + "type": "number" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryExactMatchResults": { + "description": "Results for TrajectoryExactMatch metric.", + "id": "GoogleCloudAiplatformV1TrajectoryExactMatchResults", + "properties": { + "trajectoryExactMatchMetricValues": { + "description": "Output only. TrajectoryExactMatch metric values.", + "items": { + "$ref": "GoogleCloudAiplatformV1TrajectoryExactMatchMetricValue" + }, + "readOnly": true, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryExactMatchSpec": { + "description": "Spec for TrajectoryExactMatch metric - returns 1 if tool calls in the reference trajectory exactly match the predicted trajectory, else 0.", + "id": "GoogleCloudAiplatformV1TrajectoryExactMatchSpec", + "properties": {}, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryInOrderMatchInput": { + "description": "Instances and metric spec for TrajectoryInOrderMatch metric.", + "id": "GoogleCloudAiplatformV1TrajectoryInOrderMatchInput", + "properties": { + "instances": { + "description": "Required. Repeated TrajectoryInOrderMatch instance.", + "items": { + "$ref": "GoogleCloudAiplatformV1TrajectoryInOrderMatchInstance" + }, + "type": "array" + }, + "metricSpec": { + "$ref": "GoogleCloudAiplatformV1TrajectoryInOrderMatchSpec", + "description": "Required. Spec for TrajectoryInOrderMatch metric." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryInOrderMatchInstance": { + "description": "Spec for TrajectoryInOrderMatch instance.", + "id": "GoogleCloudAiplatformV1TrajectoryInOrderMatchInstance", + "properties": { + "predictedTrajectory": { + "$ref": "GoogleCloudAiplatformV1Trajectory", + "description": "Required. Spec for predicted tool call trajectory." + }, + "referenceTrajectory": { + "$ref": "GoogleCloudAiplatformV1Trajectory", + "description": "Required. Spec for reference tool call trajectory." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue": { + "description": "TrajectoryInOrderMatch metric value for an instance.", + "id": "GoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue", + "properties": { + "score": { + "description": "Output only. TrajectoryInOrderMatch score.", + "format": "float", + "readOnly": true, + "type": "number" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryInOrderMatchResults": { + "description": "Results for TrajectoryInOrderMatch metric.", + "id": "GoogleCloudAiplatformV1TrajectoryInOrderMatchResults", + "properties": { + "trajectoryInOrderMatchMetricValues": { + "description": "Output only. TrajectoryInOrderMatch metric values.", + "items": { + "$ref": "GoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue" + }, + "readOnly": true, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryInOrderMatchSpec": { + "description": "Spec for TrajectoryInOrderMatch metric - returns 1 if tool calls in the reference trajectory appear in the predicted trajectory in the same order, else 0.", + "id": "GoogleCloudAiplatformV1TrajectoryInOrderMatchSpec", + "properties": {}, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryPrecisionInput": { + "description": "Instances and metric spec for TrajectoryPrecision metric.", + "id": "GoogleCloudAiplatformV1TrajectoryPrecisionInput", + "properties": { + "instances": { + "description": "Required. Repeated TrajectoryPrecision instance.", + "items": { + "$ref": "GoogleCloudAiplatformV1TrajectoryPrecisionInstance" + }, + "type": "array" + }, + "metricSpec": { + "$ref": "GoogleCloudAiplatformV1TrajectoryPrecisionSpec", + "description": "Required. Spec for TrajectoryPrecision metric." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryPrecisionInstance": { + "description": "Spec for TrajectoryPrecision instance.", + "id": "GoogleCloudAiplatformV1TrajectoryPrecisionInstance", + "properties": { + "predictedTrajectory": { + "$ref": "GoogleCloudAiplatformV1Trajectory", + "description": "Required. Spec for predicted tool call trajectory." + }, + "referenceTrajectory": { + "$ref": "GoogleCloudAiplatformV1Trajectory", + "description": "Required. Spec for reference tool call trajectory." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryPrecisionMetricValue": { + "description": "TrajectoryPrecision metric value for an instance.", + "id": "GoogleCloudAiplatformV1TrajectoryPrecisionMetricValue", + "properties": { + "score": { + "description": "Output only. TrajectoryPrecision score.", + "format": "float", + "readOnly": true, + "type": "number" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryPrecisionResults": { + "description": "Results for TrajectoryPrecision metric.", + "id": "GoogleCloudAiplatformV1TrajectoryPrecisionResults", + "properties": { + "trajectoryPrecisionMetricValues": { + "description": "Output only. TrajectoryPrecision metric values.", + "items": { + "$ref": "GoogleCloudAiplatformV1TrajectoryPrecisionMetricValue" + }, + "readOnly": true, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryPrecisionSpec": { + "description": "Spec for TrajectoryPrecision metric - returns a float score based on average precision of individual tool calls.", + "id": "GoogleCloudAiplatformV1TrajectoryPrecisionSpec", + "properties": {}, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryRecallInput": { + "description": "Instances and metric spec for TrajectoryRecall metric.", + "id": "GoogleCloudAiplatformV1TrajectoryRecallInput", + "properties": { + "instances": { + "description": "Required. Repeated TrajectoryRecall instance.", + "items": { + "$ref": "GoogleCloudAiplatformV1TrajectoryRecallInstance" + }, + "type": "array" + }, + "metricSpec": { + "$ref": "GoogleCloudAiplatformV1TrajectoryRecallSpec", + "description": "Required. Spec for TrajectoryRecall metric." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryRecallInstance": { + "description": "Spec for TrajectoryRecall instance.", + "id": "GoogleCloudAiplatformV1TrajectoryRecallInstance", + "properties": { + "predictedTrajectory": { + "$ref": "GoogleCloudAiplatformV1Trajectory", + "description": "Required. Spec for predicted tool call trajectory." + }, + "referenceTrajectory": { + "$ref": "GoogleCloudAiplatformV1Trajectory", + "description": "Required. Spec for reference tool call trajectory." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryRecallMetricValue": { + "description": "TrajectoryRecall metric value for an instance.", + "id": "GoogleCloudAiplatformV1TrajectoryRecallMetricValue", + "properties": { + "score": { + "description": "Output only. TrajectoryRecall score.", + "format": "float", + "readOnly": true, + "type": "number" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryRecallResults": { + "description": "Results for TrajectoryRecall metric.", + "id": "GoogleCloudAiplatformV1TrajectoryRecallResults", + "properties": { + "trajectoryRecallMetricValues": { + "description": "Output only. TrajectoryRecall metric values.", + "items": { + "$ref": "GoogleCloudAiplatformV1TrajectoryRecallMetricValue" + }, + "readOnly": true, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectoryRecallSpec": { + "description": "Spec for TrajectoryRecall metric - returns a float score based on average recall of individual tool calls.", + "id": "GoogleCloudAiplatformV1TrajectoryRecallSpec", + "properties": {}, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectorySingleToolUseInput": { + "description": "Instances and metric spec for TrajectorySingleToolUse metric.", + "id": "GoogleCloudAiplatformV1TrajectorySingleToolUseInput", + "properties": { + "instances": { + "description": "Required. Repeated TrajectorySingleToolUse instance.", + "items": { + "$ref": "GoogleCloudAiplatformV1TrajectorySingleToolUseInstance" + }, + "type": "array" + }, + "metricSpec": { + "$ref": "GoogleCloudAiplatformV1TrajectorySingleToolUseSpec", + "description": "Required. Spec for TrajectorySingleToolUse metric." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectorySingleToolUseInstance": { + "description": "Spec for TrajectorySingleToolUse instance.", + "id": "GoogleCloudAiplatformV1TrajectorySingleToolUseInstance", + "properties": { + "predictedTrajectory": { + "$ref": "GoogleCloudAiplatformV1Trajectory", + "description": "Required. Spec for predicted tool call trajectory." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue": { + "description": "TrajectorySingleToolUse metric value for an instance.", + "id": "GoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue", + "properties": { + "score": { + "description": "Output only. TrajectorySingleToolUse score.", + "format": "float", + "readOnly": true, + "type": "number" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectorySingleToolUseResults": { + "description": "Results for TrajectorySingleToolUse metric.", + "id": "GoogleCloudAiplatformV1TrajectorySingleToolUseResults", + "properties": { + "trajectorySingleToolUseMetricValues": { + "description": "Output only. TrajectorySingleToolUse metric values.", + "items": { + "$ref": "GoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue" + }, + "readOnly": true, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1TrajectorySingleToolUseSpec": { + "description": "Spec for TrajectorySingleToolUse metric - returns 1 if tool is present in the predicted trajectory, else 0.", + "id": "GoogleCloudAiplatformV1TrajectorySingleToolUseSpec", + "properties": { + "toolName": { + "description": "Required. Spec for tool name to be checked for in the predicted trajectory.", + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1Trial": { "description": "A message representing a Trial. A Trial contains a unique set of Parameters that has been or will be evaluated, along with the objective metrics got by running the Trial.", "id": "GoogleCloudAiplatformV1Trial", @@ -39828,7 +43521,7 @@ "id": "GoogleCloudAiplatformV1TuningJob", "properties": { "baseModel": { - "description": "The base model that is being tuned, e.g., \"gemini-1.0-pro-002\".", + "description": "The base model that is being tuned, e.g., \"gemini-1.0-pro-002\". .", "type": "string" }, "createTime": { @@ -39873,6 +43566,10 @@ "readOnly": true, "type": "string" }, + "serviceAccount": { + "description": "The service account that the tuningJob workload runs as. If not specified, the Vertex AI Secure Fine-Tuned Service Agent in the project will be used. See https://cloud.google.com/iam/docs/service-agents#vertex-ai-secure-fine-tuning-service-agent Users starting the pipeline must have the `iam.serviceAccounts.actAs` permission on this service account.", + "type": "string" + }, "startTime": { "description": "Output only. Time when the TuningJob for the first time entered the `JOB_STATE_RUNNING` state.", "format": "google-datetime", @@ -40033,6 +43730,17 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1UpdateEndpointLongRunningRequest": { + "description": "Request message for EndpointService.UpdateEndpointLongRunning.", + "id": "GoogleCloudAiplatformV1UpdateEndpointLongRunningRequest", + "properties": { + "endpoint": { + "$ref": "GoogleCloudAiplatformV1Endpoint", + "description": "Required. The Endpoint which replaces the resource on the server. Currently we only support updating the `client_connection_config` field, all the other fields' update will be blocked." + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1UpdateExplanationDatasetOperationMetadata": { "description": "Runtime operation information for ModelService.UpdateExplanationDataset.", "id": "GoogleCloudAiplatformV1UpdateExplanationDatasetOperationMetadata", @@ -40255,6 +43963,47 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1UploadRagFileConfig": { + "description": "Config for uploading RagFile.", + "id": "GoogleCloudAiplatformV1UploadRagFileConfig", + "properties": { + "ragFileTransformationConfig": { + "$ref": "GoogleCloudAiplatformV1RagFileTransformationConfig", + "description": "Specifies the transformation config for RagFiles." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1UploadRagFileRequest": { + "description": "Request message for VertexRagDataService.UploadRagFile.", + "id": "GoogleCloudAiplatformV1UploadRagFileRequest", + "properties": { + "ragFile": { + "$ref": "GoogleCloudAiplatformV1RagFile", + "description": "Required. The RagFile to upload." + }, + "uploadRagFileConfig": { + "$ref": "GoogleCloudAiplatformV1UploadRagFileConfig", + "description": "Required. The config for the RagFiles to be uploaded into the RagCorpus. VertexRagDataService.UploadRagFile." + } + }, + "type": "object" + }, + "GoogleCloudAiplatformV1UploadRagFileResponse": { + "description": "Response message for VertexRagDataService.UploadRagFile.", + "id": "GoogleCloudAiplatformV1UploadRagFileResponse", + "properties": { + "error": { + "$ref": "GoogleRpcStatus", + "description": "The error that occurred while processing the RagFile." + }, + "ragFile": { + "$ref": "GoogleCloudAiplatformV1RagFile", + "description": "The RagFile that had been uploaded into the RagCorpus." + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1UpsertDatapointsRequest": { "description": "Request message for IndexService.UpsertDatapoints", "id": "GoogleCloudAiplatformV1UpsertDatapointsRequest", @@ -40335,14 +44084,6 @@ "description": "Retrieve from Vertex RAG Store for grounding.", "id": "GoogleCloudAiplatformV1VertexRagStore", "properties": { - "ragCorpora": { - "deprecated": true, - "description": "Optional. Deprecated. Please use rag_resources instead.", - "items": { - "type": "string" - }, - "type": "array" - }, "ragResources": { "description": "Optional. The representation of the rag source. It can be used to specify corpus only or ragfiles. Currently only support one corpus or multiple files from one corpus. In the future we may open up multiple corpora support.", "items": { @@ -40350,12 +44091,18 @@ }, "type": "array" }, + "ragRetrievalConfig": { + "$ref": "GoogleCloudAiplatformV1RagRetrievalConfig", + "description": "Optional. The retrieval config for the Rag query." + }, "similarityTopK": { + "deprecated": true, "description": "Optional. Number of top k results to return from the selected corpora.", "format": "int32", "type": "integer" }, "vectorDistanceThreshold": { + "deprecated": true, "description": "Optional. Only return results with vector distance smaller than the threshold.", "format": "double", "type": "number" @@ -40398,6 +44145,17 @@ }, "type": "object" }, + "GoogleCloudAiplatformV1VoiceConfig": { + "description": "The configuration for the voice to use.", + "id": "GoogleCloudAiplatformV1VoiceConfig", + "properties": { + "prebuiltVoiceConfig": { + "$ref": "GoogleCloudAiplatformV1PrebuiltVoiceConfig", + "description": "The configuration for the prebuilt voice to use." + } + }, + "type": "object" + }, "GoogleCloudAiplatformV1WorkerPoolSpec": { "description": "Represents the spec of a worker pool in a job.", "id": "GoogleCloudAiplatformV1WorkerPoolSpec", diff --git a/discovery/googleapis/alloydb__v1.json b/discovery/googleapis/alloydb__v1.json index 1a4a5c7b7..4f9c567e0 100644 --- a/discovery/googleapis/alloydb__v1.json +++ b/discovery/googleapis/alloydb__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241002", + "revision": "20241204", "rootUrl": "https://alloydb.googleapis.com/", "servicePath": "", "title": "AlloyDB API", @@ -521,6 +521,34 @@ "https://www.googleapis.com/auth/cloud-platform" ] }, + "export": { + "description": "Exports data from the cluster. Imperative only.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/clusters/{clustersId}:export", + "httpMethod": "POST", + "id": "alloydb.projects.locations.clusters.export", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The resource name of the cluster.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/clusters/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:export", + "request": { + "$ref": "ExportClusterRequest" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, "get": { "description": "Gets details of a single Cluster.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/clusters/{clustersId}", @@ -1356,7 +1384,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "alloydb.projects.locations.operations.cancel", @@ -2253,6 +2281,29 @@ }, "type": "object" }, + "CsvExportOptions": { + "description": "Options for exporting data in CSV format.", + "id": "CsvExportOptions", + "properties": { + "escapeCharacter": { + "description": "Optional. Specifies the character that should appear before a data character that needs to be escaped. The default is the same as quote character. The value of this argument has to be a character in Hex ASCII Code.", + "type": "string" + }, + "fieldDelimiter": { + "description": "Optional. Specifies the character that separates columns within each row (line) of the file. The default is comma. The value of this argument has to be a character in Hex ASCII Code.", + "type": "string" + }, + "quoteCharacter": { + "description": "Optional. Specifies the quoting character to be used when a data value is quoted. The default is double-quote. The value of this argument has to be a character in Hex ASCII Code.", + "type": "string" + }, + "selectQuery": { + "description": "Required. The SELECT query used to extract the data.", + "type": "string" + } + }, + "type": "object" + }, "Empty": { "description": "A generic empty message that you can re-use to avoid defining duplicated empty messages in your APIs. A typical example is to use it as the request or the response type of an API method. For instance: service Foo { rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); }", "id": "Empty", @@ -2300,6 +2351,40 @@ }, "type": "object" }, + "ExportClusterRequest": { + "description": "Export cluster request.", + "id": "ExportClusterRequest", + "properties": { + "csvExportOptions": { + "$ref": "CsvExportOptions", + "description": "Options for exporting data in CSV format. Required field to be set for CSV file type." + }, + "database": { + "description": "Required. Name of the database where the export command will be executed. Note - Value provided should be the same as expected from `SELECT current_database();` and NOT as a resource reference.", + "type": "string" + }, + "gcsDestination": { + "$ref": "GcsDestination", + "description": "Required. Option to export data to cloud storage." + }, + "sqlExportOptions": { + "$ref": "SqlExportOptions", + "description": "Options for exporting data in SQL format. Required field to be set for SQL file type." + } + }, + "type": "object" + }, + "ExportClusterResponse": { + "description": "Response of export cluster rpc.", + "id": "ExportClusterResponse", + "properties": { + "gcsDestination": { + "$ref": "GcsDestination", + "description": "Required. Option to export data to cloud storage." + } + }, + "type": "object" + }, "FailoverInstanceRequest": { "description": "Message for triggering failover on an Instance", "id": "FailoverInstanceRequest", @@ -2315,6 +2400,17 @@ }, "type": "object" }, + "GcsDestination": { + "description": "Destination for Export. Export will be done to cloud storage.", + "id": "GcsDestination", + "properties": { + "uri": { + "description": "Required. The path to the file in Google Cloud Storage where the export will be stored. The URI is in the form `gs://bucketName/fileName`.", + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudLocationListLocationsResponse": { "description": "The response message for Locations.ListLocations.", "id": "GoogleCloudLocationListLocationsResponse", @@ -2460,7 +2556,7 @@ "additionalProperties": { "type": "string" }, - "description": "Database flags. Set at instance level. * They are copied from primary instance on read instance creation. * Read instances can set new or override existing flags that are relevant for reads, e.g. for enabling columnar cache on a read instance. Flags set on read instance may or may not be present on primary. This is a list of \"key\": \"value\" pairs. \"key\": The name of the flag. These flags are passed at instance setup time, so include both server options and system variables for Postgres. Flags are specified with underscores, not hyphens. \"value\": The value of the flag. Booleans are set to **on** for true and **off** for false. This field must be omitted if the flag doesn't take a value.", + "description": "Database flags. Set at the instance level. They are copied from the primary instance on secondary instance creation. Flags that have restrictions default to the value at primary instance on read instances during creation. Read instances can set new flags or override existing flags that are relevant for reads, for example, for enabling columnar cache on a read instance. Flags set on read instance might or might not be present on the primary instance. This is a list of \"key\": \"value\" pairs. \"key\": The name of the flag. These flags are passed at instance setup time, so include both server options and system variables for Postgres. Flags are specified with underscores, not hyphens. \"value\": The value of the flag. Booleans are set to **on** for true and **off** for false. This field must be omitted if the flag doesn't take a value.", "type": "object" }, "deleteTime": { @@ -3036,7 +3132,7 @@ "type": "string" }, "requestedCancellation": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, @@ -3258,6 +3354,32 @@ }, "type": "object" }, + "SqlExportOptions": { + "description": "Options for exporting data in SQL format.", + "id": "SqlExportOptions", + "properties": { + "cleanTargetObjects": { + "description": "Optional. If true, output commands to DROP all the dumped database objects prior to outputting the commands for creating them.", + "type": "boolean" + }, + "ifExistTargetObjects": { + "description": "Optional. If true, use DROP ... IF EXISTS commands to check for the object's existence before dropping it in clean_target_objects mode.", + "type": "boolean" + }, + "schemaOnly": { + "description": "Optional. If true, only export the schema.", + "type": "boolean" + }, + "tables": { + "description": "Optional. Tables to export from.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, "SslConfig": { "description": "SSL configuration.", "id": "SslConfig", @@ -4527,6 +4649,11 @@ "description": "Optional. Number of shards (if applicable).", "format": "int32", "type": "integer" + }, + "vcpuCount": { + "description": "Optional. The number of vCPUs. TODO(b/342344482, b/342346271) add proto validations again after bug fix.", + "format": "double", + "type": "number" } }, "type": "object" @@ -4637,6 +4764,7 @@ "type": "integer" }, "retentionUnit": { + "deprecated": true, "description": "The unit that 'retained_backups' represents.", "enum": [ "RETENTION_UNIT_UNSPECIFIED", @@ -4657,6 +4785,11 @@ "timeBasedRetention": { "format": "google-duration", "type": "string" + }, + "timestampBasedRetentionTime": { + "description": "Timestamp based retention period i.e. 2024-05-01T00:00:00Z", + "format": "google-datetime", + "type": "string" } }, "type": "object" diff --git a/discovery/googleapis/analyticshub__v1.json b/discovery/googleapis/analyticshub__v1.json index 6254cc061..1428fa5ba 100644 --- a/discovery/googleapis/analyticshub__v1.json +++ b/discovery/googleapis/analyticshub__v1.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240925", + "revision": "20241028", "rootUrl": "https://analyticshub.googleapis.com/", "servicePath": "", "title": "Analytics Hub API", @@ -2230,6 +2230,10 @@ "description": "Required. The parent resource path of the Subscription. e.g. `projects/subscriberproject/locations/US`", "type": "string" }, + "destinationDataset": { + "$ref": "DestinationDataset", + "description": "Optional. BigQuery destination dataset to create for the subscriber." + }, "subscriberContact": { "description": "Email of the subscriber.", "type": "string" diff --git a/discovery/googleapis/androiddeviceprovisioning__v1.json b/discovery/googleapis/androiddeviceprovisioning__v1.json index 56dc584c4..19d334b37 100644 --- a/discovery/googleapis/androiddeviceprovisioning__v1.json +++ b/discovery/googleapis/androiddeviceprovisioning__v1.json @@ -16,7 +16,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240916", + "revision": "20241202", "rootUrl": "https://androiddeviceprovisioning.googleapis.com/", "servicePath": "", "title": "Android Device Provisioning Partner API", @@ -110,7 +110,7 @@ "parameterOrder": [], "parameters": { "pageSize": { - "description": "The maximum number of customers to show in a page of results. A number between 1 and 100 (inclusive).", + "description": "Required. The maximum number of customers to show in a page of results. A number between 1 and 100 (inclusive).", "format": "int32", "location": "query", "type": "integer" @@ -313,7 +313,7 @@ ], "parameters": { "pageSize": { - "description": "The maximum number of devices to show in a page of results. Must be between 1 and 100 inclusive.", + "description": "Required. The maximum number of devices to show in a page of results. Must be between 1 and 100 inclusive.", "format": "int64", "location": "query", "type": "string" diff --git a/discovery/googleapis/androidenterprise__v1.json b/discovery/googleapis/androidenterprise__v1.json index d3edd6891..3a8774ea6 100644 --- a/discovery/googleapis/androidenterprise__v1.json +++ b/discovery/googleapis/androidenterprise__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241021", + "revision": "20241113", "rootUrl": "https://androidenterprise.googleapis.com/", "servicePath": "", "title": "Google Play EMM API", @@ -397,7 +397,8 @@ ], "parameters": { "deviceType": { - "description": "Whether it’s a dedicated device or a knowledge worker device.", + "deprecated": true, + "description": "Deprecated: Use enrollment_token instead. this field will be removed in the future.", "enum": [ "unknown", "dedicatedDevice", @@ -411,8 +412,34 @@ "location": "query", "type": "string" }, + "enrollmentToken.duration": { + "description": "[Optional] The length of time the enrollment token is valid, ranging from 1 minute to [`Durations.MAX_VALUE`](https://developers.google.com/protocol-buffers/docs/reference/java/com/google/protobuf/util/Durations.html#MAX_VALUE), approximately 10,000 years. If not specified, the default duration is 1 hour.", + "format": "google-duration", + "location": "query", + "type": "string" + }, + "enrollmentToken.enrollmentTokenType": { + "description": "[Required] The type of the enrollment token.", + "enum": [ + "enrollmentTokenTypeUnspecified", + "userlessDevice", + "userDevice" + ], + "enumDescriptions": [ + "The value is unused.", + "The enrollment token is for a userless device.", + "The enrollment token is for a user device." + ], + "location": "query", + "type": "string" + }, + "enrollmentToken.token": { + "description": "The token value that's passed to the device and authorizes the device to enroll. This is a read-only field generated by the server.", + "location": "query", + "type": "string" + }, "enterpriseId": { - "description": "The ID of the enterprise.", + "description": "Required. The ID of the enterprise.", "location": "path", "required": true, "type": "string" @@ -3155,8 +3182,13 @@ "id": "CreateEnrollmentTokenResponse", "properties": { "enrollmentToken": { - "description": "Enrollment token.", + "deprecated": true, + "description": "Deprecated: Use token instead. This field will be removed in the future.", "type": "string" + }, + "token": { + "$ref": "EnrollmentToken", + "description": "[Required] The created enrollment token." } }, "type": "object" @@ -3295,6 +3327,36 @@ }, "type": "object" }, + "EnrollmentToken": { + "description": "A token used to enroll a device.", + "id": "EnrollmentToken", + "properties": { + "duration": { + "description": "[Optional] The length of time the enrollment token is valid, ranging from 1 minute to [`Durations.MAX_VALUE`](https://developers.google.com/protocol-buffers/docs/reference/java/com/google/protobuf/util/Durations.html#MAX_VALUE), approximately 10,000 years. If not specified, the default duration is 1 hour.", + "format": "google-duration", + "type": "string" + }, + "enrollmentTokenType": { + "description": "[Required] The type of the enrollment token.", + "enum": [ + "enrollmentTokenTypeUnspecified", + "userlessDevice", + "userDevice" + ], + "enumDescriptions": [ + "The value is unused.", + "The enrollment token is for a userless device.", + "The enrollment token is for a user device." + ], + "type": "string" + }, + "token": { + "description": "The token value that's passed to the device and authorizes the device to enroll. This is a read-only field generated by the server.", + "type": "string" + } + }, + "type": "object" + }, "Enterprise": { "description": "An Enterprises resource represents the binding between an EMM and a specific organization. That binding can be instantiated in one of two different ways using this API as follows: - For Google managed domain customers, the process involves using Enterprises.enroll and Enterprises.setAccount (in conjunction with artifacts obtained from the Admin console and the Google API Console) and submitted to the EMM through a more-or-less manual process. - For managed Google Play Accounts customers, the process involves using Enterprises.generateSignupUrl and Enterprises.completeSignup in conjunction with the managed Google Play sign-up UI (Google-provided mechanism) to create the binding without manual steps. As an EMM, you can support either or both approaches in your EMM console. See Create an Enterprise for details.", "id": "Enterprise", @@ -4017,7 +4079,7 @@ "properties": { "autoUpdatePolicy": { "deprecated": true, - "description": "Controls when automatic app updates on the device can be applied. Recommended alternative: autoUpdateMode which is set per app, provides greater flexibility around update frequency. When autoUpdateMode is set to AUTO_UPDATE_POSTPONED or AUTO_UPDATE_HIGH_PRIORITY, autoUpdatePolicy has no effect. \"choiceToTheUser\" allows the device's user to configure the app update policy. \"always\" enables auto updates. \"never\" disables auto updates. \"wifiOnly\" enables auto updates only when the device is connected to wifi.", + "description": "Controls when automatic app updates on the device can be applied. Recommended alternative: autoUpdateMode which is set per app, provides greater flexibility around update frequency. When autoUpdateMode is set to AUTO_UPDATE_POSTPONED or AUTO_UPDATE_HIGH_PRIORITY, autoUpdatePolicy has no effect. - choiceToTheUser allows the device's user to configure the app update policy. - always enables auto updates. - never disables auto updates. - wifiOnly enables auto updates only when the device is connected to wifi. *Important:* Changes to app update policies don't affect updates that are in progress. Any policy changes will apply to subsequent app updates. ", "enum": [ "autoUpdatePolicyUnspecified", "choiceToTheUser", diff --git a/discovery/googleapis/androidmanagement__v1.json b/discovery/googleapis/androidmanagement__v1.json index 02a2da8c4..404aaedd5 100644 --- a/discovery/googleapis/androidmanagement__v1.json +++ b/discovery/googleapis/androidmanagement__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241022", + "revision": "20241204", "rootUrl": "https://androidmanagement.googleapis.com/", "servicePath": "", "title": "Android Management API", @@ -1718,7 +1718,7 @@ "enumDescriptions": [ "Policy not specified. If no policy is specified for a permission at any level, then the PROMPT behavior is used by default.", "Prompt the user to grant a permission.", - "Automatically grant a permission.On Android 12 and above, Manifest.permission.READ_SMS (https://developer.android.com/reference/android/Manifest.permission#READ_SMS) and following sensor-related permissions can only be granted on fully managed devices: Manifest.permission.ACCESS_FINE_LOCATION (https://developer.android.com/reference/android/Manifest.permission#ACCESS_FINE_LOCATION) Manifest.permission.ACCESS_BACKGROUND_LOCATION (https://developer.android.com/reference/android/Manifest.permission#ACCESS_BACKGROUND_LOCATION) Manifest.permission.ACCESS_COARSE_LOCATION (https://developer.android.com/reference/android/Manifest.permission#ACCESS_COARSE_LOCATION) Manifest.permission.CAMERA (https://developer.android.com/reference/android/Manifest.permission#CAMERA) Manifest.permission.RECORD_AUDIO (https://developer.android.com/reference/android/Manifest.permission#RECORD_AUDIO) Manifest.permission.ACTIVITY_RECOGNITION (https://developer.android.com/reference/android/Manifest.permission#ACTIVITY_RECOGNITION) Manifest.permission.BODY_SENSORS (https://developer.android.com/reference/android/Manifest.permission#BODY_SENSORS)", + "Automatically grant a permission.On Android 12 and above, READ_SMS (https://developer.android.com/reference/android/Manifest.permission#READ_SMS) and following sensor-related permissions can only be granted on fully managed devices: ACCESS_FINE_LOCATION (https://developer.android.com/reference/android/Manifest.permission#ACCESS_FINE_LOCATION) ACCESS_BACKGROUND_LOCATION (https://developer.android.com/reference/android/Manifest.permission#ACCESS_BACKGROUND_LOCATION) ACCESS_COARSE_LOCATION (https://developer.android.com/reference/android/Manifest.permission#ACCESS_COARSE_LOCATION) CAMERA (https://developer.android.com/reference/android/Manifest.permission#CAMERA) RECORD_AUDIO (https://developer.android.com/reference/android/Manifest.permission#RECORD_AUDIO) ACTIVITY_RECOGNITION (https://developer.android.com/reference/android/Manifest.permission#ACTIVITY_RECOGNITION) BODY_SENSORS (https://developer.android.com/reference/android/Manifest.permission#BODY_SENSORS)", "Automatically deny a permission." ], "type": "string" @@ -1740,15 +1740,15 @@ ], "enumDescriptions": [ "No delegation scope specified.", - "Grants access to certificate installation and management.", - "Grants access to managed configurations management.", - "Grants access to blocking uninstallation.", - "Grants access to permission policy and permission grant state.", - "Grants access to package access state.", - "Grants access for enabling system apps.", + "Grants access to certificate installation and management. This scope can be delegated to multiple applications.", + "Grants access to managed configurations management. This scope can be delegated to multiple applications.", + "Grants access to blocking uninstallation. This scope can be delegated to multiple applications.", + "Grants access to permission policy and permission grant state. This scope can be delegated to multiple applications.", + "Grants access to package access state. This scope can be delegated to multiple applications.", + "Grants access for enabling system apps. This scope can be delegated to multiple applications.", "Grants access to network activity logs. Allows the delegated application to call setNetworkLoggingEnabled (https://developer.android.com/reference/android/app/admin/DevicePolicyManager#setNetworkLoggingEnabled%28android.content.ComponentName,%20boolean%29), isNetworkLoggingEnabled (https://developer.android.com/reference/android/app/admin/DevicePolicyManager#isNetworkLoggingEnabled%28android.content.ComponentName%29) and retrieveNetworkLogs (https://developer.android.com/reference/android/app/admin/DevicePolicyManager#retrieveNetworkLogs%28android.content.ComponentName,%20long%29) methods. This scope can be delegated to at most one application. Supported for fully managed devices on Android 10 and above. Supported for a work profile on Android 12 and above. When delegation is supported and set, NETWORK_ACTIVITY_LOGS is ignored.", "Grants access to security logs. Allows the delegated application to call setSecurityLoggingEnabled (https://developer.android.com/reference/android/app/admin/DevicePolicyManager#setSecurityLoggingEnabled%28android.content.ComponentName,%20boolean%29), isSecurityLoggingEnabled (https://developer.android.com/reference/android/app/admin/DevicePolicyManager#isSecurityLoggingEnabled%28android.content.ComponentName%29), retrieveSecurityLogs (https://developer.android.com/reference/android/app/admin/DevicePolicyManager#retrieveSecurityLogs%28android.content.ComponentName%29) and retrievePreRebootSecurityLogs (https://developer.android.com/reference/android/app/admin/DevicePolicyManager#retrievePreRebootSecurityLogs%28android.content.ComponentName%29) methods. This scope can be delegated to at most one application. Supported for fully managed devices and company-owned devices with a work profile on Android 12 and above. When delegation is supported and set, SECURITY_LOGS is ignored.", - "Grants access to selection of KeyChain certificates on behalf of requesting apps. Once granted, the delegated application will start receiving DelegatedAdminReceiver#onChoosePrivateKeyAlias (https://developer.android.com/reference/android/app/admin/DelegatedAdminReceiver#onChoosePrivateKeyAlias%28android.content.Context,%20android.content.Intent,%20int,%20android.net.Uri,%20java.lang.String%29). Allows the delegated application to call grantKeyPairToApp (https://developer.android.com/reference/android/app/admin/DevicePolicyManager#grantKeyPairToApp%28android.content.ComponentName,%20java.lang.String,%20java.lang.String%29) and revokeKeyPairFromApp (https://developer.android.com/reference/android/app/admin/DevicePolicyManager#revokeKeyPairFromApp%28android.content.ComponentName,%20java.lang.String,%20java.lang.String%29) methods. There can be at most one app that has this delegation. choosePrivateKeyRules must be empty and privateKeySelectionEnabled has no effect if certificate selection is delegated to an application." + "Grants access to selection of KeyChain certificates on behalf of requesting apps. Once granted, the delegated application will start receiving DelegatedAdminReceiver#onChoosePrivateKeyAlias (https://developer.android.com/reference/android/app/admin/DelegatedAdminReceiver#onChoosePrivateKeyAlias%28android.content.Context,%20android.content.Intent,%20int,%20android.net.Uri,%20java.lang.String%29). Allows the delegated application to call grantKeyPairToApp (https://developer.android.com/reference/android/app/admin/DevicePolicyManager#grantKeyPairToApp%28android.content.ComponentName,%20java.lang.String,%20java.lang.String%29) and revokeKeyPairFromApp (https://developer.android.com/reference/android/app/admin/DevicePolicyManager#revokeKeyPairFromApp%28android.content.ComponentName,%20java.lang.String,%20java.lang.String%29) methods. This scope can be delegated to at most one application. choosePrivateKeyRules must be empty and privateKeySelectionEnabled has no effect if certificate selection is delegated to an application." ], "type": "string" }, @@ -3218,7 +3218,7 @@ "type": "object" }, "ExtensionConfig": { - "description": "Configuration to enable an app as an extension app, with the capability of interacting with Android Device Policy offline. For Android versions 13 and above, extension apps are exempt from battery restrictions so will not be placed into the restricted App Standby Bucket (https://developer.android.com/topic/performance/appstandby#restricted-bucket). Extensions apps are also protected against users clearing their data or force-closing the application, although admins can continue to use the clear app data command on extension apps if needed for Android 13 and above.", + "description": "Configuration to enable an app as an extension app, with the capability of interacting with Android Device Policy offline. For Android versions 11 and above, extension apps are exempt from battery restrictions so will not be placed into the restricted App Standby Bucket (https://developer.android.com/topic/performance/appstandby#restricted-bucket). Extensions apps are also protected against users clearing their data or force-closing the application, although admins can continue to use the clear app data command on extension apps if needed for Android 11 and above.", "id": "ExtensionConfig", "properties": { "notificationReceiver": { @@ -4710,7 +4710,7 @@ "enumDescriptions": [ "Policy not specified. If no policy is specified for a permission at any level, then the PROMPT behavior is used by default.", "Prompt the user to grant a permission.", - "Automatically grant a permission.On Android 12 and above, Manifest.permission.READ_SMS (https://developer.android.com/reference/android/Manifest.permission#READ_SMS) and following sensor-related permissions can only be granted on fully managed devices: Manifest.permission.ACCESS_FINE_LOCATION (https://developer.android.com/reference/android/Manifest.permission#ACCESS_FINE_LOCATION) Manifest.permission.ACCESS_BACKGROUND_LOCATION (https://developer.android.com/reference/android/Manifest.permission#ACCESS_BACKGROUND_LOCATION) Manifest.permission.ACCESS_COARSE_LOCATION (https://developer.android.com/reference/android/Manifest.permission#ACCESS_COARSE_LOCATION) Manifest.permission.CAMERA (https://developer.android.com/reference/android/Manifest.permission#CAMERA) Manifest.permission.RECORD_AUDIO (https://developer.android.com/reference/android/Manifest.permission#RECORD_AUDIO) Manifest.permission.ACTIVITY_RECOGNITION (https://developer.android.com/reference/android/Manifest.permission#ACTIVITY_RECOGNITION) Manifest.permission.BODY_SENSORS (https://developer.android.com/reference/android/Manifest.permission#BODY_SENSORS)", + "Automatically grant a permission.On Android 12 and above, READ_SMS (https://developer.android.com/reference/android/Manifest.permission#READ_SMS) and following sensor-related permissions can only be granted on fully managed devices: ACCESS_FINE_LOCATION (https://developer.android.com/reference/android/Manifest.permission#ACCESS_FINE_LOCATION) ACCESS_BACKGROUND_LOCATION (https://developer.android.com/reference/android/Manifest.permission#ACCESS_BACKGROUND_LOCATION) ACCESS_COARSE_LOCATION (https://developer.android.com/reference/android/Manifest.permission#ACCESS_COARSE_LOCATION) CAMERA (https://developer.android.com/reference/android/Manifest.permission#CAMERA) RECORD_AUDIO (https://developer.android.com/reference/android/Manifest.permission#RECORD_AUDIO) ACTIVITY_RECOGNITION (https://developer.android.com/reference/android/Manifest.permission#ACTIVITY_RECOGNITION) BODY_SENSORS (https://developer.android.com/reference/android/Manifest.permission#BODY_SENSORS)", "Automatically deny a permission." ], "type": "string" @@ -4817,6 +4817,20 @@ ], "type": "string" }, + "privateSpacePolicy": { + "description": "Optional. Controls whether a private space is allowed on the device.", + "enum": [ + "PRIVATE_SPACE_POLICY_UNSPECIFIED", + "PRIVATE_SPACE_ALLOWED", + "PRIVATE_SPACE_DISALLOWED" + ], + "enumDescriptions": [ + "Unspecified. Defaults to PRIVATE_SPACE_ALLOWED.", + "Users can create a private space profile.", + "Users cannot create a private space profile. Supported only for company-owned devices with a work profile. Caution: Any existing private space will be removed." + ], + "type": "string" + }, "screenCaptureDisabled": { "description": "If true, screen capture is disabled for all users.", "type": "boolean" @@ -4836,7 +4850,7 @@ "type": "array" }, "addUserDisabled": { - "description": "Whether adding new users and profiles is disabled.", + "description": "Whether adding new users and profiles is disabled. For devices where managementMode is DEVICE_OWNER this field is ignored and the user is never allowed to add or remove users.", "type": "boolean" }, "adjustVolumeDisabled": { @@ -5030,7 +5044,7 @@ "enumDescriptions": [ "Policy not specified. If no policy is specified for a permission at any level, then the PROMPT behavior is used by default.", "Prompt the user to grant a permission.", - "Automatically grant a permission.On Android 12 and above, Manifest.permission.READ_SMS (https://developer.android.com/reference/android/Manifest.permission#READ_SMS) and following sensor-related permissions can only be granted on fully managed devices: Manifest.permission.ACCESS_FINE_LOCATION (https://developer.android.com/reference/android/Manifest.permission#ACCESS_FINE_LOCATION) Manifest.permission.ACCESS_BACKGROUND_LOCATION (https://developer.android.com/reference/android/Manifest.permission#ACCESS_BACKGROUND_LOCATION) Manifest.permission.ACCESS_COARSE_LOCATION (https://developer.android.com/reference/android/Manifest.permission#ACCESS_COARSE_LOCATION) Manifest.permission.CAMERA (https://developer.android.com/reference/android/Manifest.permission#CAMERA) Manifest.permission.RECORD_AUDIO (https://developer.android.com/reference/android/Manifest.permission#RECORD_AUDIO) Manifest.permission.ACTIVITY_RECOGNITION (https://developer.android.com/reference/android/Manifest.permission#ACTIVITY_RECOGNITION) Manifest.permission.BODY_SENSORS (https://developer.android.com/reference/android/Manifest.permission#BODY_SENSORS)", + "Automatically grant a permission.On Android 12 and above, READ_SMS (https://developer.android.com/reference/android/Manifest.permission#READ_SMS) and following sensor-related permissions can only be granted on fully managed devices: ACCESS_FINE_LOCATION (https://developer.android.com/reference/android/Manifest.permission#ACCESS_FINE_LOCATION) ACCESS_BACKGROUND_LOCATION (https://developer.android.com/reference/android/Manifest.permission#ACCESS_BACKGROUND_LOCATION) ACCESS_COARSE_LOCATION (https://developer.android.com/reference/android/Manifest.permission#ACCESS_COARSE_LOCATION) CAMERA (https://developer.android.com/reference/android/Manifest.permission#CAMERA) RECORD_AUDIO (https://developer.android.com/reference/android/Manifest.permission#RECORD_AUDIO) ACTIVITY_RECOGNITION (https://developer.android.com/reference/android/Manifest.permission#ACTIVITY_RECOGNITION) BODY_SENSORS (https://developer.android.com/reference/android/Manifest.permission#BODY_SENSORS)", "Automatically deny a permission." ], "type": "string" @@ -5095,7 +5109,7 @@ "type": "boolean" }, "keyguardDisabled": { - "description": "If true, this disables the Lock Screen (https://source.android.com/docs/core/display/multi_display/lock-screen) for primary and/or secondary displays.", + "description": "If true, this disables the Lock Screen (https://source.android.com/docs/core/display/multi_display/lock-screen) for primary and/or secondary displays. This policy is supported only in dedicated device management mode.", "type": "boolean" }, "keyguardDisabledFeatures": { @@ -6628,11 +6642,13 @@ "description": "Required. Wi-Fi roaming mode for the specified SSID.", "enum": [ "WIFI_ROAMING_MODE_UNSPECIFIED", + "WIFI_ROAMING_DISABLED", "WIFI_ROAMING_DEFAULT", "WIFI_ROAMING_AGGRESSIVE" ], "enumDescriptions": [ "Unspecified. Defaults to WIFI_ROAMING_DEFAULT.", + "Wi-Fi roaming is disabled. Supported on Android 15 and above on fully managed devices and work profiles on company-owned devices. A nonComplianceDetail with MANAGEMENT_MODE is reported for other management modes. A nonComplianceDetail with API_LEVEL is reported if the Android version is less than 15.", "Default Wi-Fi roaming mode of the device.", "Aggressive roaming mode which allows quicker Wi-Fi roaming. Supported on Android 15 and above on fully managed devices and work profiles on company-owned devices. A nonComplianceDetail with MANAGEMENT_MODE is reported for other management modes. A nonComplianceDetail with API_LEVEL is reported if the Android version is less than 15. A nonComplianceDetail with DEVICE_INCOMPATIBLE is reported if the device does not support aggressive roaming mode." ], diff --git a/discovery/googleapis/androidpublisher__v3.json b/discovery/googleapis/androidpublisher__v3.json index ed72b4974..ae2168aab 100644 --- a/discovery/googleapis/androidpublisher__v3.json +++ b/discovery/googleapis/androidpublisher__v3.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241016", + "revision": "20241125", "rootUrl": "https://androidpublisher.googleapis.com/", "servicePath": "", "title": "Google Play Android Developer API", @@ -7590,6 +7590,12 @@ }, "type": "object" }, + "OneTimeCode": { + "description": "A single use promotion code.", + "id": "OneTimeCode", + "properties": {}, + "type": "object" + }, "OneTimeExternalTransaction": { "description": "Represents a one-time transaction.", "id": "OneTimeExternalTransaction", @@ -8408,6 +8414,21 @@ }, "type": "object" }, + "SignupPromotion": { + "description": "The promotion applied on this item when purchased.", + "id": "SignupPromotion", + "properties": { + "oneTimeCode": { + "$ref": "OneTimeCode", + "description": "A one-time code was applied." + }, + "vanityCode": { + "$ref": "VanityCode", + "description": "A vanity code was applied." + } + }, + "type": "object" + }, "SplitApkMetadata": { "description": "Holds data specific to Split APKs.", "id": "SplitApkMetadata", @@ -8925,6 +8946,10 @@ "productId": { "description": "The purchased product ID (for example, 'monthly001').", "type": "string" + }, + "signupPromotion": { + "$ref": "SignupPromotion", + "description": "Promotion details about this item. Only set if a promotion was applied during signup." } }, "type": "object" @@ -9907,6 +9932,17 @@ }, "type": "object" }, + "VanityCode": { + "description": "A multiple use, predefined promotion code.", + "id": "VanityCode", + "properties": { + "promotionCode": { + "description": "The promotion code.", + "type": "string" + } + }, + "type": "object" + }, "Variant": { "description": "APK that is suitable for inclusion in a system image. The resource of SystemApksService.", "id": "Variant", diff --git a/discovery/googleapis/apigateway__v1.json b/discovery/googleapis/apigateway__v1.json index 3d2eab6f5..53b6c472a 100644 --- a/discovery/googleapis/apigateway__v1.json +++ b/discovery/googleapis/apigateway__v1.json @@ -24,7 +24,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240118", + "revision": "20241204", "rootUrl": "https://apigateway.googleapis.com/", "servicePath": "", "title": "API Gateway API", @@ -962,7 +962,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "apigateway.projects.locations.operations.cancel", @@ -1655,7 +1655,7 @@ "type": "string" }, "requestedCancellation": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, diff --git a/discovery/googleapis/apigee__v1.json b/discovery/googleapis/apigee__v1.json index cba05e4cb..8c8db3c28 100644 --- a/discovery/googleapis/apigee__v1.json +++ b/discovery/googleapis/apigee__v1.json @@ -37,7 +37,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241017", + "revision": "20241210", "rootUrl": "https://apigee.googleapis.com/", "servicePath": "", "title": "Apigee API", @@ -13494,6 +13494,10 @@ "description": "The location for the config blob of API Runtime Control, aka Envoy Adapter, for op-based authentication as a URI, e.g. a Cloud Storage URI. This is only used by Envoy-based gateways.", "type": "string" }, + "clientIpResolutionConfig": { + "$ref": "GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig", + "description": "The algorithm to resolve IP." + }, "createTime": { "description": "Time that the environment configuration was created.", "format": "google-datetime", @@ -13612,6 +13616,33 @@ }, "type": "object" }, + "GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig": { + "description": "Configuration for resolving the client ip.", + "id": "GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig", + "properties": { + "headerIndexAlgorithm": { + "$ref": "GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm", + "description": "Resolves the client ip based on a custom header." + } + }, + "type": "object" + }, + "GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm": { + "description": "Resolves the client ip based on a custom header.", + "id": "GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm", + "properties": { + "ipHeaderIndex": { + "description": "The index of the ip in the header. (By default, value is 0 if missing)", + "format": "int32", + "type": "integer" + }, + "ipHeaderName": { + "description": "The name of the header to extract the client ip from.", + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudApigeeV1EnvironmentGroup": { "description": "EnvironmentGroup configuration. An environment group is used to group one or more Apigee environments under a single host name.", "id": "GoogleCloudApigeeV1EnvironmentGroup", @@ -15443,7 +15474,7 @@ "type": "array" }, "authorizedNetwork": { - "description": "Compute Engine network used for Service Networking to be peered with Apigee runtime instances. See [Getting started with the Service Networking API](https://cloud.google.com/service-infrastructure/docs/service-networking/getting-started). Valid only when [RuntimeType](#RuntimeType) is set to `CLOUD`. The value must be set before the creation of a runtime instance and can be updated only when there are no runtime instances. For example: `default`. Apigee also supports shared VPC (that is, the host network project is not the same as the one that is peering with Apigee). See [Shared VPC overview](https://cloud.google.com/vpc/docs/shared-vpc). To use a shared VPC network, use the following format: `projects/{host-project-id}/{region}/networks/{network-name}`. For example: `projects/my-sharedvpc-host/global/networks/mynetwork` **Note:** Not supported for Apigee hybrid.", + "description": "Compute Engine network used for Service Networking to be peered with Apigee runtime instances. See [Getting started with the Service Networking API](https://cloud.google.com/service-infrastructure/docs/service-networking/getting-started). Valid only when [RuntimeType](#RuntimeType) is set to `CLOUD`. The value must be set before the creation of a runtime instance and can be updated only when there are no runtime instances. For example: `default`. When changing authorizedNetwork, you must reconfigure VPC peering. After VPC peering with previous network is deleted, [run the following command](https://cloud.google.com/sdk/gcloud/reference/services/vpc-peerings/delete): `gcloud services vpc-peerings delete --network=NETWORK`, where `NETWORK` is the name of the previous network. This will delete the previous Service Networking. Otherwise, you will get the following error: `The resource 'projects/...-tp' is already linked to another shared VPC host 'projects/...-tp`. Apigee also supports shared VPC (that is, the host network project is not the same as the one that is peering with Apigee). See [Shared VPC overview](https://cloud.google.com/vpc/docs/shared-vpc). To use a shared VPC network, use the following format: `projects/{host-project-id}/{region}/networks/{network-name}`. For example: `projects/my-sharedvpc-host/global/networks/mynetwork` **Note:** Not supported for Apigee hybrid.", "type": "string" }, "billingType": { diff --git a/discovery/googleapis/appengine__v1.json b/discovery/googleapis/appengine__v1.json index 1077b974b..dc5614693 100644 --- a/discovery/googleapis/appengine__v1.json +++ b/discovery/googleapis/appengine__v1.json @@ -30,7 +30,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241007", + "revision": "20241205", "rootUrl": "https://appengine.googleapis.com/", "servicePath": "", "title": "App Engine Admin API", @@ -1716,6 +1716,53 @@ } }, "services": { + "methods": { + "delete": { + "description": "Deletes the specified service and all enclosed versions.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/applications/{applicationsId}/services/{servicesId}", + "httpMethod": "DELETE", + "id": "appengine.projects.locations.applications.services.delete", + "parameterOrder": [ + "projectsId", + "locationsId", + "applicationsId", + "servicesId" + ], + "parameters": { + "applicationsId": { + "description": "Part of `name`. See documentation of `projectsId`.", + "location": "path", + "required": true, + "type": "string" + }, + "locationsId": { + "description": "Part of `name`. See documentation of `projectsId`.", + "location": "path", + "required": true, + "type": "string" + }, + "projectsId": { + "description": "Part of `name`. Name of the resource requested. Example: apps/myapp/services/default.", + "location": "path", + "required": true, + "type": "string" + }, + "servicesId": { + "description": "Part of `name`. See documentation of `projectsId`.", + "location": "path", + "required": true, + "type": "string" + } + }, + "path": "v1/projects/{projectsId}/locations/{locationsId}/applications/{applicationsId}/services/{servicesId}", + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + }, "resources": { "versions": { "methods": { @@ -3619,6 +3666,25 @@ }, "type": "object" }, + "ResourceEvent": { + "description": "The request that is passed to CLH during per-resource events. The request will be sent with update semantics in all cases except for data governance purge events. These events will be sent with delete semantics and the CLH is expected to delete the resource receiving this event.", + "id": "ResourceEvent", + "properties": { + "eventId": { + "description": "The unique ID for this per-resource event. CLHs can use this value to dedup repeated calls. required", + "type": "string" + }, + "name": { + "description": "The name of the resource for which this event is. required", + "type": "string" + }, + "state": { + "$ref": "ContainerState", + "description": "The state of the project that led to this event." + } + }, + "type": "object" + }, "ResourceRecord": { "description": "A DNS resource record.", "id": "ResourceRecord", diff --git a/discovery/googleapis/artifactregistry__v1.json b/discovery/googleapis/artifactregistry__v1.json index e84425602..7e07df589 100644 --- a/discovery/googleapis/artifactregistry__v1.json +++ b/discovery/googleapis/artifactregistry__v1.json @@ -245,7 +245,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241011", + "revision": "20241205", "rootUrl": "https://artifactregistry.googleapis.com/", "servicePath": "", "title": "Artifact Registry API", @@ -4147,6 +4147,11 @@ "description": "The name of the repository, for example: `projects/p1/locations/us-central1/repositories/repo1`. For each location in a project, repository names must be unique.", "type": "string" }, + "registryUri": { + "description": "Output only. The repository endpoint, for example: `us-docker.pkg.dev/my-proj/my-repo`.", + "readOnly": true, + "type": "string" + }, "remoteRepositoryConfig": { "$ref": "RemoteRepositoryConfig", "description": "Configuration specific for a Remote Repository." diff --git a/discovery/googleapis/assuredworkloads__v1.json b/discovery/googleapis/assuredworkloads__v1.json index 09fd5fa58..f669f8bce 100644 --- a/discovery/googleapis/assuredworkloads__v1.json +++ b/discovery/googleapis/assuredworkloads__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241007", + "revision": "20241206", "rootUrl": "https://assuredworkloads.googleapis.com/", "servicePath": "", "title": "Assured Workloads API", @@ -702,7 +702,35 @@ "REGIONAL_CONTROLS", "HEALTHCARE_AND_LIFE_SCIENCES_CONTROLS", "HEALTHCARE_AND_LIFE_SCIENCES_CONTROLS_US_SUPPORT", - "IRS_1075" + "IRS_1075", + "CANADA_CONTROLLED_GOODS" + ], + "enumDeprecated": [ + false, + false, + false, + false, + false, + false, + true, + true, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false ], "enumDescriptions": [ "Unknown compliance regime.", @@ -711,8 +739,8 @@ "FedRAMP High data protection controls", "FedRAMP Moderate data protection controls", "Assured Workloads For US Regions data protection controls", - "Health Insurance Portability and Accountability Act controls", - "Health Information Trust Alliance controls", + "[DEPRECATED] Health Insurance Portability and Accountability Act controls", + "[DEPRECATED] Health Information Trust Alliance controls", "Assured Workloads For EU Regions and Support controls", "Assured Workloads For Canada Regions and Support controls", "International Traffic in Arms Regulations", @@ -728,7 +756,8 @@ "Assured Workloads for Regional Controls", "Healthcare and Life Science Controls", "Healthcare and Life Science Controls with US Support", - "Internal Revenue Service 1075 controls" + "Internal Revenue Service 1075 controls", + "Canada Controlled Goods" ], "type": "string" }, @@ -1191,7 +1220,35 @@ "REGIONAL_CONTROLS", "HEALTHCARE_AND_LIFE_SCIENCES_CONTROLS", "HEALTHCARE_AND_LIFE_SCIENCES_CONTROLS_US_SUPPORT", - "IRS_1075" + "IRS_1075", + "CANADA_CONTROLLED_GOODS" + ], + "enumDeprecated": [ + false, + false, + false, + false, + false, + false, + true, + true, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false ], "enumDescriptions": [ "Unknown compliance regime.", @@ -1200,8 +1257,8 @@ "FedRAMP High data protection controls", "FedRAMP Moderate data protection controls", "Assured Workloads For US Regions data protection controls", - "Health Insurance Portability and Accountability Act controls", - "Health Information Trust Alliance controls", + "[DEPRECATED] Health Insurance Portability and Accountability Act controls", + "[DEPRECATED] Health Information Trust Alliance controls", "Assured Workloads For EU Regions and Support controls", "Assured Workloads For Canada Regions and Support controls", "International Traffic in Arms Regulations", @@ -1217,7 +1274,8 @@ "Assured Workloads for Regional Controls", "Healthcare and Life Science Controls", "Healthcare and Life Science Controls with US Support", - "Internal Revenue Service 1075 controls" + "Internal Revenue Service 1075 controls", + "Canada Controlled Goods" ], "type": "string" }, @@ -1426,7 +1484,7 @@ "type": "string" }, "ekmProvisioningState": { - "description": "Indicates Ekm enrollment Provisioning of a given workload.", + "description": "Output only. Indicates Ekm enrollment Provisioning of a given workload.", "enum": [ "EKM_PROVISIONING_STATE_UNSPECIFIED", "EKM_PROVISIONING_STATE_PENDING", @@ -1439,6 +1497,7 @@ "Failed State for Ekm Provisioning", "Completed State for Ekm Provisioning" ], + "readOnly": true, "type": "string" } }, @@ -1475,7 +1534,7 @@ "type": "boolean" }, "dataLogsViewer": { - "description": "Allow the partner to view inspectability logs and monitoring violations.", + "description": "Optional. Allow the partner to view inspectability logs and monitoring violations.", "type": "boolean" }, "serviceAccessApprover": { @@ -1490,8 +1549,9 @@ "id": "GoogleCloudAssuredworkloadsV1WorkloadResourceInfo", "properties": { "resourceId": { - "description": "Resource identifier. For a project this represents project_number.", + "description": "Output only. Resource identifier. For a project this represents project_number.", "format": "int64", + "readOnly": true, "type": "string" }, "resourceType": { @@ -1588,7 +1648,7 @@ "type": "array" }, "setupStatus": { - "description": "Indicates SAA enrollment status of a given workload.", + "description": "Output only. Indicates SAA enrollment status of a given workload.", "enum": [ "SETUP_STATE_UNSPECIFIED", "STATUS_PENDING", @@ -1599,6 +1659,7 @@ "SAA enrollment pending.", "SAA enrollment comopleted." ], + "readOnly": true, "type": "string" } }, diff --git a/discovery/googleapis/authorizedbuyersmarketplace__v1.json b/discovery/googleapis/authorizedbuyersmarketplace__v1.json index ca1e45282..235a8ae96 100644 --- a/discovery/googleapis/authorizedbuyersmarketplace__v1.json +++ b/discovery/googleapis/authorizedbuyersmarketplace__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241002", + "revision": "20241120", "rootUrl": "https://authorizedbuyersmarketplace.googleapis.com/", "servicePath": "", "title": "Authorized Buyers Marketplace API", @@ -3140,11 +3140,11 @@ "id": "TimeZone", "properties": { "id": { - "description": "IANA Time Zone Database time zone, e.g. \"America/New_York\".", + "description": "IANA Time Zone Database time zone. For example \"America/New_York\".", "type": "string" }, "version": { - "description": "Optional. IANA Time Zone Database version number, e.g. \"2019a\".", + "description": "Optional. IANA Time Zone Database version number. For example \"2019a\".", "type": "string" } }, diff --git a/discovery/googleapis/backupdr__v1.json b/discovery/googleapis/backupdr__v1.json index a6a0d852e..d8a5fecb6 100644 --- a/discovery/googleapis/backupdr__v1.json +++ b/discovery/googleapis/backupdr__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241009", + "revision": "20241204", "rootUrl": "https://backupdr.googleapis.com/", "servicePath": "", "title": "Backup and DR Service API", @@ -1542,7 +1542,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "backupdr.projects.locations.operations.cancel", @@ -1661,6 +1661,38 @@ ] } } + }, + "serviceConfig": { + "methods": { + "initialize": { + "description": "Initializes the service related config for a project.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/serviceConfig:initialize", + "httpMethod": "POST", + "id": "backupdr.projects.locations.serviceConfig.initialize", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The resource name of the serviceConfig used to initialize the service. Format: `projects/{project_id}/locations/{location}/serviceConfig`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/serviceConfig$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:initialize", + "request": { + "$ref": "InitializeServiceRequest" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + } } } } @@ -1829,42 +1861,6 @@ }, "type": "object" }, - "AssetLocation": { - "description": "Provides the mapping of a cloud asset to a direct physical location or to a proxy that defines the location on its behalf.", - "id": "AssetLocation", - "properties": { - "ccfeRmsPath": { - "description": "Spanner path of the CCFE RMS database. It is only applicable for CCFE tenants that use CCFE RMS for storing resource metadata.", - "type": "string" - }, - "expected": { - "$ref": "IsolationExpectations", - "description": "Defines the customer expectation around ZI/ZS for this asset and ZI/ZS state of the region at the time of asset creation." - }, - "extraParameters": { - "description": "Defines extra parameters required for specific asset types.", - "items": { - "$ref": "ExtraParameter" - }, - "type": "array" - }, - "locationData": { - "description": "Contains all kinds of physical location definitions for this asset.", - "items": { - "$ref": "LocationData" - }, - "type": "array" - }, - "parentAsset": { - "description": "Defines parents assets if any in order to allow later generation of child_asset_location data via child assets.", - "items": { - "$ref": "CloudAsset" - }, - "type": "array" - } - }, - "type": "object" - }, "AttachedDisk": { "description": "An instance-attached disk resource.", "id": "AttachedDisk", @@ -2143,6 +2139,16 @@ "readOnly": true, "type": "string" }, + "satisfiesPzi": { + "description": "Optional. Output only. Reserved for future use.", + "readOnly": true, + "type": "boolean" + }, + "satisfiesPzs": { + "description": "Optional. Output only. Reserved for future use.", + "readOnly": true, + "type": "boolean" + }, "serviceLocks": { "description": "Output only. The list of BackupLocks taken by the service to prevent the deletion of the backup.", "items": { @@ -2391,7 +2397,7 @@ "type": "string" }, "resourceType": { - "description": "Required. The resource type to which the `BackupPlan` will be applied. Examples include, \"compute.googleapis.com/Instance\" and \"storage.googleapis.com/Bucket\".", + "description": "Required. The resource type to which the `BackupPlan` will be applied. Examples include, \"compute.googleapis.com/Instance\", \"sqladmin.googleapis.com/Instance\", or \"alloydb.googleapis.com/Cluster\".", "type": "string" }, "state": { @@ -2437,7 +2443,7 @@ "type": "string" }, "dataSource": { - "description": "Output only. Output Only. Resource name of data source which will be used as storage location for backups taken. Format : projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}", + "description": "Output only. Resource name of data source which will be used as storage location for backups taken. Format : projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}", "readOnly": true, "type": "string" }, @@ -2451,7 +2457,7 @@ "type": "string" }, "resourceType": { - "description": "Optional. Resource type of workload on which backupplan is applied", + "description": "Required. Immutable. Resource type of workload on which backupplan is applied", "type": "string" }, "rulesConfigInfo": { @@ -2497,7 +2503,7 @@ "id": "BackupRule", "properties": { "backupRetentionDays": { - "description": "Required. Configures the duration for which backup data will be kept. It is defined in “days”. The value should be greater than or equal to minimum enforced retention of the backup vault.", + "description": "Required. Configures the duration for which backup data will be kept. It is defined in “days”. The value should be greater than or equal to minimum enforced retention of the backup vault. Minimum value is 1 and maximum value is 90 for hourly backups. Minimum value is 1 and maximum value is 90 for daily backups. Minimum value is 7 and maximum value is 186 for weekly backups. Minimum value is 30 and maximum value is 732 for monthly backups. Minimum value is 365 and maximum value is 36159 for yearly backups.", "format": "int32", "type": "integer" }, @@ -2517,18 +2523,20 @@ "id": "BackupVault", "properties": { "accessRestriction": { - "description": "Optional. Note: This field is added for future use case and will not be supported in the current release. Optional. Access restriction for the backup vault. Default value is WITHIN_ORGANIZATION if not provided during creation.", + "description": "Optional. Note: This field is added for future use case and will not be supported in the current release. Access restriction for the backup vault. Default value is WITHIN_ORGANIZATION if not provided during creation.", "enum": [ "ACCESS_RESTRICTION_UNSPECIFIED", "WITHIN_PROJECT", "WITHIN_ORGANIZATION", - "UNRESTRICTED" + "UNRESTRICTED", + "WITHIN_ORG_BUT_UNRESTRICTED_FOR_BA" ], "enumDescriptions": [ - "Access restriction not set.", + "Access restriction not set. If user does not provide any value or pass this value, it will be changed to WITHIN_ORGANIZATION.", "Access to or from resources outside your current project will be denied.", "Access to or from resources outside your current organization will be denied.", - "No access restriction." + "No access restriction.", + "Access to or from resources outside your current organization will be denied except for backup appliance." ], "type": "string" }, @@ -2617,7 +2625,7 @@ "type": "string" }, "uid": { - "description": "Output only. Output only Immutable after resource creation until resource deletion.", + "description": "Output only. Immutable after resource creation until resource deletion.", "readOnly": true, "type": "string" }, @@ -2669,49 +2677,12 @@ }, "type": "object" }, - "BlobstoreLocation": { - "description": "Policy ID that identified data placement in Blobstore as per go/blobstore-user-guide#data-metadata-placement-and-failure-domains", - "id": "BlobstoreLocation", - "properties": { - "policyId": { - "items": { - "type": "string" - }, - "type": "array" - } - }, - "type": "object" - }, "CancelOperationRequest": { "description": "The request message for Operations.CancelOperation.", "id": "CancelOperationRequest", "properties": {}, "type": "object" }, - "CloudAsset": { - "id": "CloudAsset", - "properties": { - "assetName": { - "type": "string" - }, - "assetType": { - "type": "string" - } - }, - "type": "object" - }, - "CloudAssetComposition": { - "id": "CloudAssetComposition", - "properties": { - "childAsset": { - "items": { - "$ref": "CloudAsset" - }, - "type": "array" - } - }, - "type": "object" - }, "ComputeInstanceBackupProperties": { "description": "ComputeInstanceBackupProperties represents Compute Engine instance backup properties.", "id": "ComputeInstanceBackupProperties", @@ -3179,18 +3150,6 @@ }, "type": "object" }, - "DirectLocationAssignment": { - "id": "DirectLocationAssignment", - "properties": { - "location": { - "items": { - "$ref": "LocationAssignment" - }, - "type": "array" - } - }, - "type": "object" - }, "DisplayDevice": { "description": "A set of Display Device options", "id": "DisplayDevice", @@ -3246,17 +3205,6 @@ }, "type": "object" }, - "ExtraParameter": { - "description": "Defines parameters that should only be used for specific asset types.", - "id": "ExtraParameter", - "properties": { - "regionalMigDistributionPolicy": { - "$ref": "RegionalMigDistributionPolicy", - "description": "Details about zones used by regional compute.googleapis.com/InstanceGroupManager to create instances." - } - }, - "type": "object" - }, "FetchAccessTokenRequest": { "description": "Request message for FetchAccessToken.", "id": "FetchAccessTokenRequest", @@ -3480,6 +3428,21 @@ }, "type": "object" }, + "InitializeServiceRequest": { + "description": "Request message for initializing the service.", + "id": "InitializeServiceRequest", + "properties": { + "requestId": { + "description": "Optional. An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. For example, consider a situation where you make an initial request and t he request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).", + "type": "string" + }, + "resourceType": { + "description": "Required. The resource type to which the default service config will be applied. Examples include, \"compute.googleapis.com/Instance\" and \"storage.googleapis.com/Bucket\".", + "type": "string" + } + }, + "type": "object" + }, "InitiateBackupRequest": { "description": "request message for InitiateBackup.", "id": "InitiateBackupRequest", @@ -3530,131 +3493,6 @@ }, "type": "object" }, - "IsolationExpectations": { - "id": "IsolationExpectations", - "properties": { - "requirementOverride": { - "$ref": "RequirementOverride", - "description": "Explicit overrides for ZI and ZS requirements to be used for resources that should be excluded from ZI/ZS verification logic." - }, - "ziOrgPolicy": { - "enum": [ - "ZI_UNSPECIFIED", - "ZI_UNKNOWN", - "ZI_NOT_REQUIRED", - "ZI_PREFERRED", - "ZI_REQUIRED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "", - "" - ], - "type": "string" - }, - "ziRegionPolicy": { - "enum": [ - "ZI_REGION_POLICY_UNSPECIFIED", - "ZI_REGION_POLICY_UNKNOWN", - "ZI_REGION_POLICY_NOT_SET", - "ZI_REGION_POLICY_FAIL_OPEN", - "ZI_REGION_POLICY_FAIL_CLOSED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "", - "" - ], - "type": "string" - }, - "ziRegionState": { - "enum": [ - "ZI_REGION_UNSPECIFIED", - "ZI_REGION_UNKNOWN", - "ZI_REGION_NOT_ENABLED", - "ZI_REGION_ENABLED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "" - ], - "type": "string" - }, - "zoneIsolation": { - "deprecated": true, - "description": "Deprecated: use zi_org_policy, zi_region_policy and zi_region_state instead for setting ZI expectations as per go/zicy-publish-physical-location.", - "enum": [ - "ZI_UNSPECIFIED", - "ZI_UNKNOWN", - "ZI_NOT_REQUIRED", - "ZI_PREFERRED", - "ZI_REQUIRED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "", - "" - ], - "type": "string" - }, - "zoneSeparation": { - "deprecated": true, - "description": "Deprecated: use zs_org_policy, and zs_region_stateinstead for setting Zs expectations as per go/zicy-publish-physical-location.", - "enum": [ - "ZS_UNSPECIFIED", - "ZS_UNKNOWN", - "ZS_NOT_REQUIRED", - "ZS_REQUIRED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "" - ], - "type": "string" - }, - "zsOrgPolicy": { - "enum": [ - "ZS_UNSPECIFIED", - "ZS_UNKNOWN", - "ZS_NOT_REQUIRED", - "ZS_REQUIRED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "" - ], - "type": "string" - }, - "zsRegionState": { - "enum": [ - "ZS_REGION_UNSPECIFIED", - "ZS_REGION_UNKNOWN", - "ZS_REGION_NOT_ENABLED", - "ZS_REGION_ENABLED" - ], - "enumDescriptions": [ - "", - "To be used if tracking of the asset ZS-bit is not available", - "", - "" - ], - "type": "string" - } - }, - "type": "object" - }, "ListBackupPlanAssociationsResponse": { "description": "Response message for List BackupPlanAssociation", "id": "ListBackupPlanAssociationsResponse", @@ -3875,64 +3713,6 @@ }, "type": "object" }, - "LocationAssignment": { - "id": "LocationAssignment", - "properties": { - "location": { - "type": "string" - }, - "locationType": { - "enum": [ - "UNSPECIFIED", - "CLUSTER", - "POP", - "CLOUD_ZONE", - "CLOUD_REGION", - "MULTI_REGION_GEO", - "MULTI_REGION_JURISDICTION", - "GLOBAL", - "OTHER" - ], - "enumDescriptions": [ - "", - "1-10: Physical failure domains.", - "", - "11-20: Logical failure domains.", - "", - "", - "", - "", - "" - ], - "type": "string" - } - }, - "type": "object" - }, - "LocationData": { - "id": "LocationData", - "properties": { - "blobstoreLocation": { - "$ref": "BlobstoreLocation" - }, - "childAssetLocation": { - "$ref": "CloudAssetComposition" - }, - "directLocation": { - "$ref": "DirectLocationAssignment" - }, - "gcpProjectProxy": { - "$ref": "TenantProjectProxy" - }, - "placerLocation": { - "$ref": "PlacerLocation" - }, - "spannerLocation": { - "$ref": "SpannerLocation" - } - }, - "type": "object" - }, "ManagementServer": { "description": "ManagementServer describes a single BackupDR ManagementServer instance.", "id": "ManagementServer", @@ -3977,7 +3757,7 @@ "type": "string" }, "networks": { - "description": "Required. VPC networks to which the ManagementServer instance is connected. For this version, only a single network is supported.", + "description": "Optional. VPC networks to which the ManagementServer instance is connected. For this version, only a single network is supported. This field is optional if MS is created without PSA", "items": { "$ref": "NetworkConfig" }, @@ -4331,7 +4111,7 @@ "type": "string" }, "requestedCancellation": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have Operation.error value with a google.rpc.Status.code of 1, corresponding to 'Code.CANCELLED'.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of 1, corresponding to 'Code.CANCELLED'.", "readOnly": true, "type": "boolean" }, @@ -4353,17 +4133,6 @@ }, "type": "object" }, - "PlacerLocation": { - "description": "Message describing that the location of the customer resource is tied to placer allocations", - "id": "PlacerLocation", - "properties": { - "placerConfig": { - "description": "Directory with a config related to it in placer (e.g. \"/placer/prod/home/my-root/my-dir\")", - "type": "string" - } - }, - "type": "object" - }, "Policy": { "description": "An Identity and Access Management (IAM) policy, which specifies access controls for Google Cloud resources. A `Policy` is a collection of `bindings`. A `binding` binds one or more `members`, or principals, to a single `role`. Principals can be user accounts, service accounts, Google groups, and domains (such as G Suite). A `role` is a named list of permissions; each `role` can be an IAM predefined role or a user-created custom role. For some types of Google Cloud resources, a `binding` can also specify a `condition`, which is a logical expression that allows access to a resource only if the expression evaluates to `true`. A condition can add constraints based on attributes of the request, the resource, or both. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). **JSON example:** ``` { \"bindings\": [ { \"role\": \"roles/resourcemanager.organizationAdmin\", \"members\": [ \"user:mike@example.com\", \"group:admins@example.com\", \"domain:google.com\", \"serviceAccount:my-project-id@appspot.gserviceaccount.com\" ] }, { \"role\": \"roles/resourcemanager.organizationViewer\", \"members\": [ \"user:eve@example.com\" ], \"condition\": { \"title\": \"expirable access\", \"description\": \"Does not grant access after Sep 2020\", \"expression\": \"request.time < timestamp('2020-10-01T00:00:00.000Z')\", } } ], \"etag\": \"BwWWja0YfJA=\", \"version\": 3 } ``` **YAML example:** ``` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3 ``` For a description of IAM and its features, see the [IAM documentation](https://cloud.google.com/iam/docs/).", "id": "Policy", @@ -4395,25 +4164,6 @@ }, "type": "object" }, - "RegionalMigDistributionPolicy": { - "description": "To be used for specifying the intended distribution of regional compute.googleapis.com/InstanceGroupManager instances", - "id": "RegionalMigDistributionPolicy", - "properties": { - "targetShape": { - "description": "The shape in which the group converges around distribution of resources. Instance of proto2 enum", - "format": "int32", - "type": "integer" - }, - "zones": { - "description": "Cloud zones used by regional MIG to create instances.", - "items": { - "$ref": "ZoneConfiguration" - }, - "type": "array" - } - }, - "type": "object" - }, "RemoveDataSourceRequest": { "description": "Message for deleting a DataSource.", "id": "RemoveDataSourceRequest", @@ -4425,44 +4175,6 @@ }, "type": "object" }, - "RequirementOverride": { - "id": "RequirementOverride", - "properties": { - "ziOverride": { - "enum": [ - "ZI_UNSPECIFIED", - "ZI_UNKNOWN", - "ZI_NOT_REQUIRED", - "ZI_PREFERRED", - "ZI_REQUIRED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "", - "" - ], - "type": "string" - }, - "zsOverride": { - "enum": [ - "ZS_UNSPECIFIED", - "ZS_UNKNOWN", - "ZS_NOT_REQUIRED", - "ZS_REQUIRED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "" - ], - "type": "string" - } - }, - "type": "object" - }, "RestoreBackupRequest": { "description": "Request message for restoring from a Backup.", "id": "RestoreBackupRequest", @@ -4499,7 +4211,7 @@ "properties": { "lastBackupError": { "$ref": "Status", - "description": "Output only. Output Only. google.rpc.Status object to store the last backup error.", + "description": "Output only. google.rpc.Status object to store the last backup error.", "readOnly": true }, "lastBackupState": { @@ -4528,7 +4240,7 @@ "type": "string" }, "ruleId": { - "description": "Output only. Output Only. Backup Rule id fetched from backup plan.", + "description": "Output only. Backup Rule id fetched from backup plan.", "readOnly": true, "type": "string" } @@ -4708,26 +4420,6 @@ "properties": {}, "type": "object" }, - "SpannerLocation": { - "id": "SpannerLocation", - "properties": { - "backupName": { - "description": "Set of backups used by the resource with name in the same format as what is available at http://table/spanner_automon.backup_metadata", - "items": { - "type": "string" - }, - "type": "array" - }, - "dbName": { - "description": "Set of databases used by the resource in format /span//", - "items": { - "type": "string" - }, - "type": "array" - } - }, - "type": "object" - }, "StandardSchedule": { "description": "`StandardSchedule` defines a schedule that run within the confines of a defined window of days. We can define recurrence type for schedule as HOURLY, DAILY, WEEKLY, MONTHLY or YEARLY.", "id": "StandardSchedule", @@ -4896,18 +4588,6 @@ }, "type": "object" }, - "TenantProjectProxy": { - "id": "TenantProjectProxy", - "properties": { - "projectNumbers": { - "items": { - "type": "string" - }, - "type": "array" - } - }, - "type": "object" - }, "TestIamPermissionsRequest": { "description": "Request message for `TestIamPermissions` method.", "id": "TestIamPermissionsRequest", @@ -5035,15 +4715,6 @@ } }, "type": "object" - }, - "ZoneConfiguration": { - "id": "ZoneConfiguration", - "properties": { - "zone": { - "type": "string" - } - }, - "type": "object" } } } diff --git a/discovery/googleapis/batch__v1.json b/discovery/googleapis/batch__v1.json index 2e99bc634..0512047cf 100644 --- a/discovery/googleapis/batch__v1.json +++ b/discovery/googleapis/batch__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241002", + "revision": "20241204", "rootUrl": "https://batch.googleapis.com/", "servicePath": "", "title": "Batch API", @@ -408,7 +408,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "batch.projects.locations.operations.cancel", @@ -1072,7 +1072,7 @@ "id": "CloudLoggingOption", "properties": { "useGenericTaskMonitoredResource": { - "description": "Optional. Set this flag to true to change the [monitored resource type](https://cloud.google.com/monitoring/api/resources) for Cloud Logging logs generated by this Batch job from the [`batch.googleapis.com/Job`](https://cloud.google.com/monitoring/api/resources#tag_batch.googleapis.com/Job) type to the formerly used [`generic_task`](https://cloud.google.com/monitoring/api/resources#tag_generic_task) type.", + "description": "Optional. Set this field to `true` to change the [monitored resource type](https://cloud.google.com/monitoring/api/resources) for Cloud Logging logs generated by this Batch job from the [`batch.googleapis.com/Job`](https://cloud.google.com/monitoring/api/resources#tag_batch.googleapis.com/Job) type to the formerly used [`generic_task`](https://cloud.google.com/monitoring/api/resources#tag_generic_task) type.", "type": "boolean" } }, @@ -1289,7 +1289,7 @@ "type": "boolean" }, "instanceTemplate": { - "description": "Name of an instance template used to create VMs. Named the field as 'instance_template' instead of 'template' to avoid C++ keyword conflict. Batch only supports global instance templates. You can specify the global instance template as a full or partial URL.", + "description": "Name of an instance template used to create VMs. Named the field as 'instance_template' instead of 'template' to avoid C++ keyword conflict. Batch only supports global instance templates from the same project as the job. You can specify the global instance template as a full or partial URL.", "type": "string" }, "policy": { @@ -1639,29 +1639,29 @@ "type": "object" }, "LogsPolicy": { - "description": "LogsPolicy describes how outputs from a Job's Tasks (stdout/stderr) will be preserved.", + "description": "LogsPolicy describes if and how a job's logs are preserved. Logs include information that is automatically written by the Batch service agent and any information that you configured the job's runnables to write to the `stdout` or `stderr` streams.", "id": "LogsPolicy", "properties": { "cloudLoggingOption": { "$ref": "CloudLoggingOption", - "description": "Optional. Additional settings for Cloud Logging. It will only take effect when the destination of `LogsPolicy` is set to `CLOUD_LOGGING`." + "description": "Optional. When `destination` is set to `CLOUD_LOGGING`, you can optionally set this field to configure additional settings for Cloud Logging." }, "destination": { - "description": "Where logs should be saved.", + "description": "If and where logs should be saved.", "enum": [ "DESTINATION_UNSPECIFIED", "CLOUD_LOGGING", "PATH" ], "enumDescriptions": [ - "Logs are not preserved.", - "Logs are streamed to Cloud Logging.", - "Logs are saved to a file path." + "(Default) Logs are not preserved.", + "Logs are streamed to Cloud Logging. Optionally, you can configure additional settings in the `cloudLoggingOption` field.", + "Logs are saved to the file path specified in the `logsPath` field." ], "type": "string" }, "logsPath": { - "description": "The path to which logs are saved when the destination = PATH. This can be a local file path on the VM, or under the mount point of a Persistent Disk or Filestore, or a Cloud Storage path.", + "description": "When `destination` is set to `PATH`, you must set this field to the path where you want logs to be saved. This path can point to a local directory on the VM or (if congifured) a directory under the mount path of any Cloud Storage bucket, network file system (NFS), or writable persistent disk that is mounted to the job. For example, if the job has a bucket with `mountPath` set to `/mnt/disks/my-bucket`, you can write logs to the root directory of the `remotePath` of that bucket by setting this field to `/mnt/disks/my-bucket/`.", "type": "string" } }, @@ -1837,7 +1837,7 @@ "type": "string" }, "requestedCancellation": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, diff --git a/discovery/googleapis/beyondcorp__v1.json b/discovery/googleapis/beyondcorp__v1.json index 437506ef8..9e94a1bf6 100644 --- a/discovery/googleapis/beyondcorp__v1.json +++ b/discovery/googleapis/beyondcorp__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241011", + "revision": "20241204", "rootUrl": "https://beyondcorp.googleapis.com/", "servicePath": "", "title": "BeyondCorp API", @@ -395,7 +395,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "beyondcorp.organizations.locations.operations.cancel", @@ -1837,7 +1837,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "beyondcorp.projects.locations.operations.cancel", @@ -2173,29 +2173,29 @@ "https://www.googleapis.com/auth/cloud-platform" ] }, - "setPeering": { - "description": "This is a custom method to allow customers to create a peering connections between Google network and customer networks. This is enabled only for the allowlisted customers.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/securityGateways/{securityGatewaysId}:setPeering", + "setIamPolicy": { + "description": "Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/securityGateways/{securityGatewaysId}:setIamPolicy", "httpMethod": "POST", - "id": "beyondcorp.projects.locations.securityGateways.setPeering", + "id": "beyondcorp.projects.locations.securityGateways.setIamPolicy", "parameterOrder": [ - "securityGateway" + "resource" ], "parameters": { - "securityGateway": { - "description": "Required. BeyondCorp SecurityGateway name using the form: `projects/{project}/locations/{location}/securityGateways/{security_gateway}`", + "resource": { + "description": "REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field.", "location": "path", "pattern": "^projects/[^/]+/locations/[^/]+/securityGateways/[^/]+$", "required": true, "type": "string" } }, - "path": "v1/{+securityGateway}:setPeering", + "path": "v1/{+resource}:setIamPolicy", "request": { - "$ref": "GoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest" + "$ref": "GoogleIamV1SetIamPolicyRequest" }, "response": { - "$ref": "GoogleLongrunningOperation" + "$ref": "GoogleIamV1Policy" }, "scopes": [ "https://www.googleapis.com/auth/cloud-platform" @@ -2405,50 +2405,6 @@ } } } - }, - "v": { - "resources": { - "projects": { - "resources": { - "locations": { - "resources": { - "securityGateways": { - "methods": { - "setIamPolicy": { - "description": "Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors.", - "flatPath": "v/projects/{projectsId}/locations/{locationsId}/securityGateways/{securityGatewaysId}:setIamPolicy", - "httpMethod": "POST", - "id": "beyondcorp.v.projects.locations.securityGateways.setIamPolicy", - "parameterOrder": [ - "resource" - ], - "parameters": { - "resource": { - "description": "REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field.", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/securityGateways/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v/{+resource}:setIamPolicy", - "request": { - "$ref": "GoogleIamV1SetIamPolicyRequest" - }, - "response": { - "$ref": "GoogleIamV1Policy" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] - } - } - } - } - } - } - } - } } }, "schemas": { @@ -2597,7 +2553,7 @@ "type": "string" }, "requestedCancellation": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, @@ -2883,7 +2839,7 @@ "type": "string" }, "requestedCancellation": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, @@ -2995,7 +2951,7 @@ "type": "string" }, "requestedCancellation": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, @@ -3135,7 +3091,7 @@ "type": "string" }, "requestedCancellation": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, @@ -3374,7 +3330,7 @@ "type": "string" }, "requestedCancellation": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, @@ -3450,7 +3406,7 @@ "type": "string" }, "requestedCancellation": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, @@ -3809,24 +3765,6 @@ }, "type": "object" }, - "GoogleCloudBeyondcorpSecuritygatewaysV1Peering": { - "description": "VPC Peering details.", - "id": "GoogleCloudBeyondcorpSecuritygatewaysV1Peering", - "properties": { - "dnsZones": { - "description": "Optional. List of DNS zones for DNS peering with the customer VPC network.", - "items": { - "type": "string" - }, - "type": "array" - }, - "targetNetwork": { - "description": "Required. The name of the Target VPC network name in the format: `projects/{project}/global/networks/{network}", - "type": "string" - } - }, - "type": "object" - }, "GoogleCloudBeyondcorpSecuritygatewaysV1SecurityGateway": { "description": "Information about a BeyoncCorp SecurityGateway resource.", "id": "GoogleCloudBeyondcorpSecuritygatewaysV1SecurityGateway", @@ -3936,28 +3874,6 @@ }, "type": "object" }, - "GoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest": { - "description": "Set Peering request for creating a VPC peering between Google network and customer networks.", - "id": "GoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest", - "properties": { - "peerings": { - "description": "Required. List of Peering connection information.", - "items": { - "$ref": "GoogleCloudBeyondcorpSecuritygatewaysV1Peering" - }, - "type": "array" - }, - "requestId": { - "description": "Optional. An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).", - "type": "string" - }, - "validateOnly": { - "description": "Optional. If set, validates request by executing a dry-run which would not alter the resource in any way.", - "type": "boolean" - } - }, - "type": "object" - }, "GoogleCloudBeyondcorpSecuritygatewaysV1alphaSecurityGatewayOperationMetadata": { "description": "Represents the metadata of the long-running operation.", "id": "GoogleCloudBeyondcorpSecuritygatewaysV1alphaSecurityGatewayOperationMetadata", diff --git a/discovery/googleapis/bigquery__v2.json b/discovery/googleapis/bigquery__v2.json index 0399b5c99..e809887ad 100644 --- a/discovery/googleapis/bigquery__v2.json +++ b/discovery/googleapis/bigquery__v2.json @@ -125,7 +125,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241013", + "revision": "20241115", "rootUrl": "https://bigquery.googleapis.com/", "servicePath": "bigquery/v2/", "title": "BigQuery API", @@ -257,7 +257,7 @@ ], "parameters": { "accessPolicyVersion": { - "description": "Optional. The version of the access policy schema to fetch. Valid values are 0, 1, and 3. Requests specifying an invalid value will be rejected. Requests for conditional access policy binding in datasets must specify version 3. Dataset with no conditional role bindings in access policy may specify any valid value or leave the field unset. This field will be maped to [IAM Policy version] (https://cloud.google.com/iam/docs/policies#versions) and will be used to fetch policy from IAM. If unset or if 0 or 1 value is used for dataset with conditional bindings, access entry with condition will have role string appended by 'withcond' string followed by a hash value. For example : { \"access\": [ { \"role\": \"roles/bigquery.dataViewer_with_conditionalbinding_7a34awqsda\", \"userByEmail\": \"user@example.com\", } ] } Please refer https://cloud.google.com/iam/docs/troubleshooting-withcond for more details.", + "description": "Optional. The version of the access policy schema to fetch. Valid values are 0, 1, and 3. Requests specifying an invalid value will be rejected. Requests for conditional access policy binding in datasets must specify version 3. Dataset with no conditional role bindings in access policy may specify any valid value or leave the field unset. This field will be mapped to [IAM Policy version] (https://cloud.google.com/iam/docs/policies#versions) and will be used to fetch policy from IAM. If unset or if 0 or 1 value is used for dataset with conditional bindings, access entry with condition will have role string appended by 'withcond' string followed by a hash value. For example : { \"access\": [ { \"role\": \"roles/bigquery.dataViewer_with_conditionalbinding_7a34awqsda\", \"userByEmail\": \"user@example.com\", } ] } Please refer https://cloud.google.com/iam/docs/troubleshooting-withcond for more details.", "format": "int32", "location": "query", "type": "integer" @@ -314,7 +314,7 @@ ], "parameters": { "accessPolicyVersion": { - "description": "Optional. The version of the provided access policy schema. Valid values are 0, 1, and 3. Requests specifying an invalid value will be rejected. This version refers to the schema version of the access policy and not the version of access policy. This field's value can be equal or more than the access policy schema provided in the request. For example, * Requests with conditional access policy binding in datasets must specify version 3. * But dataset with no conditional role bindings in access policy may specify any valid value or leave the field unset. If unset or if 0 or 1 value is used for dataset with conditional bindings, request will be rejected. This field will be maped to IAM Policy version (https://cloud.google.com/iam/docs/policies#versions) and will be used to set policy in IAM.", + "description": "Optional. The version of the provided access policy schema. Valid values are 0, 1, and 3. Requests specifying an invalid value will be rejected. This version refers to the schema version of the access policy and not the version of access policy. This field's value can be equal or more than the access policy schema provided in the request. For example, * Requests with conditional access policy binding in datasets must specify version 3. * But dataset with no conditional role bindings in access policy may specify any valid value or leave the field unset. If unset or if 0 or 1 value is used for dataset with conditional bindings, request will be rejected. This field will be mapped to IAM Policy version (https://cloud.google.com/iam/docs/policies#versions) and will be used to set policy in IAM.", "format": "int32", "location": "query", "type": "integer" @@ -398,7 +398,7 @@ ], "parameters": { "accessPolicyVersion": { - "description": "Optional. The version of the provided access policy schema. Valid values are 0, 1, and 3. Requests specifying an invalid value will be rejected. This version refers to the schema version of the access policy and not the version of access policy. This field's value can be equal or more than the access policy schema provided in the request. For example, * Operations updating conditional access policy binding in datasets must specify version 3. Some of the operations are : - Adding a new access policy entry with condition. - Removing an access policy entry with condition. - Updating an access policy entry with condition. * But dataset with no conditional role bindings in access policy may specify any valid value or leave the field unset. If unset or if 0 or 1 value is used for dataset with conditional bindings, request will be rejected. This field will be maped to IAM Policy version (https://cloud.google.com/iam/docs/policies#versions) and will be used to set policy in IAM.", + "description": "Optional. The version of the provided access policy schema. Valid values are 0, 1, and 3. Requests specifying an invalid value will be rejected. This version refers to the schema version of the access policy and not the version of access policy. This field's value can be equal or more than the access policy schema provided in the request. For example, * Operations updating conditional access policy binding in datasets must specify version 3. Some of the operations are : - Adding a new access policy entry with condition. - Removing an access policy entry with condition. - Updating an access policy entry with condition. * But dataset with no conditional role bindings in access policy may specify any valid value or leave the field unset. If unset or if 0 or 1 value is used for dataset with conditional bindings, request will be rejected. This field will be mapped to IAM Policy version (https://cloud.google.com/iam/docs/policies#versions) and will be used to set policy in IAM.", "format": "int32", "location": "query", "type": "integer" @@ -478,7 +478,7 @@ ], "parameters": { "accessPolicyVersion": { - "description": "Optional. The version of the provided access policy schema. Valid values are 0, 1, and 3. Requests specifying an invalid value will be rejected. This version refers to the schema version of the access policy and not the version of access policy. This field's value can be equal or more than the access policy schema provided in the request. For example, * Operations updating conditional access policy binding in datasets must specify version 3. Some of the operations are : - Adding a new access policy entry with condition. - Removing an access policy entry with condition. - Updating an access policy entry with condition. * But dataset with no conditional role bindings in access policy may specify any valid value or leave the field unset. If unset or if 0 or 1 value is used for dataset with conditional bindings, request will be rejected. This field will be maped to IAM Policy version (https://cloud.google.com/iam/docs/policies#versions) and will be used to set policy in IAM.", + "description": "Optional. The version of the provided access policy schema. Valid values are 0, 1, and 3. Requests specifying an invalid value will be rejected. This version refers to the schema version of the access policy and not the version of access policy. This field's value can be equal or more than the access policy schema provided in the request. For example, * Operations updating conditional access policy binding in datasets must specify version 3. Some of the operations are : - Adding a new access policy entry with condition. - Removing an access policy entry with condition. - Updating an access policy entry with condition. * But dataset with no conditional role bindings in access policy may specify any valid value or leave the field unset. If unset or if 0 or 1 value is used for dataset with conditional bindings, request will be rejected. This field will be mapped to IAM Policy version (https://cloud.google.com/iam/docs/policies#versions) and will be used to set policy in IAM.", "format": "int32", "location": "query", "type": "integer" @@ -2088,13 +2088,13 @@ "enumDescriptions": [ "Default value.", "The argument is a variable with fully specified type, which can be a struct or an array, but not a table.", - "The argument is any type, including struct or array, but not a table. To be added: FIXED_TABLE, ANY_TABLE" + "The argument is any type, including struct or array, but not a table." ], "type": "string" }, "dataType": { "$ref": "StandardSqlDataType", - "description": "Required unless argument_kind = ANY_TYPE." + "description": "Set if argument_kind == FIXED_TYPE." }, "isAggregate": { "description": "Optional. Whether the argument is an aggregate function parameter. Must be Unset for routine types other than AGGREGATE_FUNCTION. For AGGREGATE_FUNCTION, if set to false, it is equivalent to adding \"NOT AGGREGATE\" clause in DDL; Otherwise, it is equivalent to omitting \"NOT AGGREGATE\" clause in DDL.", @@ -2593,11 +2593,11 @@ "id": "BigLakeConfiguration", "properties": { "connectionId": { - "description": "Required. The connection specifying the credentials to be used to read and write to external storage, such as Cloud Storage. The connection_id can have the form `{project}.{location}.{connection_id}` or `projects/{project}/locations/{location}/connections/{connection_id}\".", + "description": "Optional. The connection specifying the credentials to be used to read and write to external storage, such as Cloud Storage. The connection_id can have the form `{project}.{location}.{connection_id}` or `projects/{project}/locations/{location}/connections/{connection_id}\".", "type": "string" }, "fileFormat": { - "description": "Required. The file format the table data is stored in.", + "description": "Optional. The file format the table data is stored in.", "enum": [ "FILE_FORMAT_UNSPECIFIED", "PARQUET" @@ -2609,11 +2609,11 @@ "type": "string" }, "storageUri": { - "description": "Required. The fully qualified location prefix of the external folder where table data is stored. The '*' wildcard character is not allowed. The URI should be in the format `gs://bucket/path_to_table/`", + "description": "Optional. The fully qualified location prefix of the external folder where table data is stored. The '*' wildcard character is not allowed. The URI should be in the format `gs://bucket/path_to_table/`", "type": "string" }, "tableFormat": { - "description": "Required. The table format the metadata only snapshots are stored in.", + "description": "Optional. The table format the metadata only snapshots are stored in.", "enum": [ "TABLE_FORMAT_UNSPECIFIED", "ICEBERG" @@ -5611,7 +5611,7 @@ "Default value, which will be treated as ENTERPRISE.", "Standard edition.", "Enterprise edition.", - "Enterprise plus edition." + "Enterprise Plus edition." ], "readOnly": true, "type": "string" @@ -8525,6 +8525,20 @@ "readOnly": true, "type": "string" }, + "managedTableType": { + "description": "Optional. If set, overrides the default managed table type configured in the dataset.", + "enum": [ + "MANAGED_TABLE_TYPE_UNSPECIFIED", + "NATIVE", + "ICEBERG" + ], + "enumDescriptions": [ + "No managed table type specified.", + "The managed table is a native BigQuery table.", + "The managed table is a BigQuery table for Apache Iceberg." + ], + "type": "string" + }, "materializedView": { "$ref": "MaterializedViewDefinition", "description": "Optional. The materialized view definition." @@ -9408,7 +9422,7 @@ "type": "number" }, "contributionMetric": { - "description": "The contribution metric. Applies to contribution analysis models. Allowed formats supported are for summable and summable ratio contribution metrics. These include expressions such as \"SUM(x)\" or \"SUM(x)/SUM(y)\", where x and y are column names from the base table.", + "description": "The contribution metric. Applies to contribution analysis models. Allowed formats supported are for summable and summable ratio contribution metrics. These include expressions such as `SUM(x)` or `SUM(x)/SUM(y)`, where x and y are column names from the base table.", "type": "string" }, "dartNormalizeType": { diff --git a/discovery/googleapis/bigqueryreservation__v1.json b/discovery/googleapis/bigqueryreservation__v1.json index 9c3c50d88..2f4d780ab 100644 --- a/discovery/googleapis/bigqueryreservation__v1.json +++ b/discovery/googleapis/bigqueryreservation__v1.json @@ -110,7 +110,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240921", + "revision": "20241209", "rootUrl": "https://bigqueryreservation.googleapis.com/", "servicePath": "", "title": "BigQuery Reservation API", @@ -639,7 +639,7 @@ ] }, "failoverReservation": { - "description": "Failover a reservation to the secondary location. The operation should be done in the current secondary location, which will be promoted to the new primary location for the reservation. Attempting to failover a reservation in the current primary location will fail with the error code `google.rpc.Code.FAILED_PRECONDITION`.", + "description": "Fail over a reservation to the secondary location. The operation should be done in the current secondary location, which will be promoted to the new primary location for the reservation. Attempting to failover a reservation in the current primary location will fail with the error code `google.rpc.Code.FAILED_PRECONDITION`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/reservations/{reservationsId}:failoverReservation", "httpMethod": "POST", "id": "bigqueryreservation.projects.locations.reservations.failoverReservation", @@ -948,6 +948,10 @@ "description": "The resource which will use the reservation. E.g. `projects/myproject`, `folders/123`, or `organizations/456`.", "type": "string" }, + "enableGeminiInBigquery": { + "description": "Optional. This field controls if \"Gemini in BigQuery\" (https://cloud.google.com/gemini/docs/bigquery/overview) features should be enabled for this reservation assignment, which is not on by default. \"Gemini in BigQuery\" has a distinct compliance posture from BigQuery. If this field is set to true, the assignment job type is QUERY, and the parent reservation edition is ENTERPRISE_PLUS, then the assignment will give the grantee project/organization access to \"Gemini in BigQuery\" features.", + "type": "boolean" + }, "jobType": { "description": "Which type of jobs will use the reservation.", "enum": [ @@ -996,7 +1000,7 @@ "id": "Autoscale", "properties": { "currentSlots": { - "description": "Output only. The slot capacity added to this reservation when autoscale happens. Will be between [0, max_slots].", + "description": "Output only. The slot capacity added to this reservation when autoscale happens. Will be between [0, max_slots]. Note: after users reduce max_slots, it may take a while before it can be propagated, so current_slots may stay in the original value and could be larger than max_slots for that brief period (less than one minute)", "format": "int64", "readOnly": true, "type": "string" @@ -1066,7 +1070,7 @@ "Default value, which will be treated as ENTERPRISE.", "Standard edition.", "Enterprise edition.", - "Enterprise plus edition." + "Enterprise Plus edition." ], "type": "string" }, @@ -1298,7 +1302,7 @@ "description": "The configuration parameters for the auto scaling feature." }, "concurrency": { - "description": "Job concurrency target which sets a soft upper bound on the number of jobs that can run concurrently in this reservation. This is a soft target due to asynchronous nature of the system and various optimizations for small queries. Default value is 0 which means that concurrency target will be automatically computed by the system. NOTE: this field is exposed as target job concurrency in the Information Schema, DDL and BQ CLI.", + "description": "Job concurrency target which sets a soft upper bound on the number of jobs that can run concurrently in this reservation. This is a soft target due to asynchronous nature of the system and various optimizations for small queries. Default value is 0 which means that concurrency target will be automatically computed by the system. NOTE: this field is exposed as target job concurrency in the Information Schema, DDL and BigQuery CLI.", "format": "int64", "type": "string" }, @@ -1320,7 +1324,7 @@ "Default value, which will be treated as ENTERPRISE.", "Standard edition.", "Enterprise edition.", - "Enterprise plus edition." + "Enterprise Plus edition." ], "type": "string" }, @@ -1328,6 +1332,13 @@ "description": "If false, any query or pipeline job using this reservation will use idle slots from other reservations within the same admin project. If true, a query or pipeline job using this reservation will execute with the slot capacity specified in the slot_capacity field at most.", "type": "boolean" }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. The labels associated with this reservation. You can use these to organize and group your reservations. You can set this property when inserting or updating a reservation.", + "type": "object" + }, "multiRegionAuxiliary": { "description": "Applicable only for reservations located within one of the BigQuery multi-regions (US or EU). If set to true, this reservation is placed in the organization's secondary region which is designated for disaster recovery purposes. If false, this reservation is placed in the organization's default region. NOTE: this is a preview feature. Project must be allow-listed in order to set this field.", "type": "boolean" @@ -1337,15 +1348,17 @@ "type": "string" }, "originalPrimaryLocation": { - "description": "Optional. The original primary location of the reservation which is set only during its creation and remains unchanged afterwards. It can be used by the customer to answer questions about disaster recovery billing. The field is output only for customers and should not be specified, however, the google.api.field_behavior is not set to OUTPUT_ONLY since these fields are set in rerouted requests sent across regions.", + "description": "Output only. The location where the reservation was originally created. This is set only during the failover reservation's creation. All billing charges for the failover reservation will be applied to this location.", + "readOnly": true, "type": "string" }, "primaryLocation": { - "description": "Optional. The primary location of the reservation. The field is only meaningful for reservation used for cross region disaster recovery. The field is output only for customers and should not be specified, however, the google.api.field_behavior is not set to OUTPUT_ONLY since these fields are set in rerouted requests sent across regions.", + "description": "Output only. The current location of the reservation's primary replica. This field is only set for reservations using the managed disaster recovery feature.", + "readOnly": true, "type": "string" }, "secondaryLocation": { - "description": "Optional. The secondary location of the reservation which is used for cross region disaster recovery purposes. Customer can set this in create/update reservation calls to create a failover reservation or convert a non-failover reservation to a failover reservation.", + "description": "Optional. The current location of the reservation's secondary replica. This field is only set for reservations using the managed disaster recovery feature. Users can set this in create reservation calls to create a failover reservation or in update reservation calls to convert a non-failover reservation to a failover reservation(or vice versa).", "type": "string" }, "slotCapacity": { diff --git a/discovery/googleapis/bigtableadmin__v2.json b/discovery/googleapis/bigtableadmin__v2.json index c81faa654..f43844308 100644 --- a/discovery/googleapis/bigtableadmin__v2.json +++ b/discovery/googleapis/bigtableadmin__v2.json @@ -49,7 +49,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241008", + "revision": "20241203", "rootUrl": "https://bigtableadmin.googleapis.com/", "servicePath": "", "title": "Cloud Bigtable Admin API", @@ -2332,7 +2332,7 @@ "type": "string" }, "retentionPeriod": { - "description": "Required. How long the automated backups should be retained. The only supported value at this time is 3 days.", + "description": "Required. How long the automated backups should be retained. Values must be at least 3 days and at most 90 days.", "format": "google-duration", "type": "string" } @@ -4012,7 +4012,7 @@ "type": "string" }, "optimizeTableOperationName": { - "description": "If exists, the name of the long-running operation that will be used to track the post-restore optimization process to optimize the performance of the restored table. The metadata type of the long-running operation is OptimizeRestoreTableMetadata. The response type is Empty. This long-running operation may be automatically created by the system if applicable after the RestoreTable long-running operation completes successfully. This operation may not be created if the table is already optimized or the restore was not successful.", + "description": "If exists, the name of the long-running operation that will be used to track the post-restore optimization process to optimize the performance of the restored table. The metadata type of the long-running operation is OptimizeRestoredTableMetadata. The response type is Empty. This long-running operation may be automatically created by the system if applicable after the RestoreTable long-running operation completes successfully. This operation may not be created if the table is already optimized or the restore was not successful.", "type": "string" }, "progress": { diff --git a/discovery/googleapis/binaryauthorization__v1.json b/discovery/googleapis/binaryauthorization__v1.json index b4240feec..fd59d41a1 100644 --- a/discovery/googleapis/binaryauthorization__v1.json +++ b/discovery/googleapis/binaryauthorization__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240614", + "revision": "20241115", "rootUrl": "https://binaryauthorization.googleapis.com/", "servicePath": "", "title": "Binary Authorization API", @@ -787,7 +787,6 @@ "type": "string" }, "requireAttestationsBy": { - "description": "Optional. The resource names of the attestors that must attest to a container image, in the format `projects/*/attestors/*`. Each attestor must exist before a policy can reference it. To add an attestor to a policy the principal issuing the policy change request must be able to read the attestor resource. Note: this field must be non-empty when the `evaluation_mode` field specifies `REQUIRE_ATTESTATION`, otherwise it must be empty.", "items": { "type": "string" }, diff --git a/discovery/googleapis/blockchainnodeengine__v1.json b/discovery/googleapis/blockchainnodeengine__v1.json index 900a52afc..919a0458f 100644 --- a/discovery/googleapis/blockchainnodeengine__v1.json +++ b/discovery/googleapis/blockchainnodeengine__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240504", + "revision": "20241204", "rootUrl": "https://blockchainnodeengine.googleapis.com/", "servicePath": "", "title": "Blockchain Node Engine API", @@ -366,7 +366,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "blockchainnodeengine.projects.locations.operations.cancel", diff --git a/discovery/googleapis/calendar__v3.json b/discovery/googleapis/calendar__v3.json index 90acbc037..f89511933 100644 --- a/discovery/googleapis/calendar__v3.json +++ b/discovery/googleapis/calendar__v3.json @@ -36,7 +36,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240927", + "revision": "20241101", "rootUrl": "https://www.googleapis.com/", "servicePath": "calendar/v3/", "title": "Calendar API", @@ -2365,6 +2365,10 @@ "description": "Whether attendees may have been omitted from the event's representation. When retrieving an event, this may be due to a restriction specified by the maxAttendee query parameter. When updating an event, this can be used to only update the participant's response. Optional. The default is False.", "type": "boolean" }, + "birthdayProperties": { + "$ref": "EventBirthdayProperties", + "description": "Birthday or special event data. Used if eventType is \"birthday\". Immutable." + }, "colorId": { "description": "The color of the event. This is an ID referring to an entry in the event section of the colors definition (see the colors endpoint). Optional.", "type": "string" @@ -2751,6 +2755,25 @@ }, "type": "object" }, + "EventBirthdayProperties": { + "id": "EventBirthdayProperties", + "properties": { + "contact": { + "description": "Resource name of the contact this birthday event is linked to. This can be used to fetch contact details from People API. Format: \"people/c12345\". Read-only.", + "type": "string" + }, + "customTypeName": { + "description": "Custom type label specified for this event. This is populated if birthdayProperties.type is set to \"custom\". Read-only.", + "type": "string" + }, + "type": { + "default": "birthday", + "description": "Type of birthday or special event. Possible values are: \n- \"anniversary\" - An anniversary other than birthday. Always has a contact. \n- \"birthday\" - A birthday event. This is the default value. \n- \"custom\" - A special date whose label is further specified in the customTypeName field. Always has a contact. \n- \"other\" - A special date which does not fall into the other categories, and does not have a custom label. Always has a contact. \n- \"self\" - Calendar owner's own birthday. Cannot have a contact. The Calendar API only supports creating events with the type \"birthday\". The type cannot be changed after the event is created.", + "type": "string" + } + }, + "type": "object" + }, "EventDateTime": { "id": "EventDateTime", "properties": { diff --git a/discovery/googleapis/certificatemanager__v1.json b/discovery/googleapis/certificatemanager__v1.json index 054c3b552..88009ef89 100644 --- a/discovery/googleapis/certificatemanager__v1.json +++ b/discovery/googleapis/certificatemanager__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240729", + "revision": "20241204", "rootUrl": "https://certificatemanager.googleapis.com/", "servicePath": "", "title": "Certificate Manager API", @@ -1021,7 +1021,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "certificatemanager.projects.locations.operations.cancel", @@ -1443,9 +1443,9 @@ "ALL_REGIONS" ], "enumDescriptions": [ - "Certificates with default scope are served from core Google data centers. If unsure, choose this option.", - "Certificates with scope EDGE_CACHE are special-purposed certificates, served from Edge Points of Presence. See https://cloud.google.com/vpc/docs/edge-locations.", - "Certificates with ALL_REGIONS scope are served from all Google Cloud regions. See https://cloud.google.com/compute/docs/regions-zones." + "Use the DEFAULT scope if you plan to use the certificate with global external Application Load Balancer, global external proxy Network Load Balancer, or any of the regional Google Cloud services.", + "Use the EDGE_CACHE scope if you plan to use the certificate with Media CDN. The certificates are served from Edge Points of Presence. See https://cloud.google.com/vpc/docs/edge-locations.", + "Use the ALL_REGIONS scope if you plan to use the certificate with cross-region internal Application Load Balancer. The certificates are served from all Google Cloud regions. See https://cloud.google.com/compute/docs/regions-zones." ], "type": "string" }, @@ -2131,7 +2131,7 @@ "type": "string" }, "requestedCancellation": { - "description": "Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "type": "boolean" }, "statusMessage": { diff --git a/discovery/googleapis/chat__v1.json b/discovery/googleapis/chat__v1.json index e77dcd98a..a71366db4 100644 --- a/discovery/googleapis/chat__v1.json +++ b/discovery/googleapis/chat__v1.json @@ -100,7 +100,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241008", + "revision": "20241210", "rootUrl": "https://chat.googleapis.com/", "servicePath": "", "title": "Google Chat API", @@ -294,7 +294,7 @@ ] }, "create": { - "description": "Creates a space with no members. Can be used to create a named space. Spaces grouped by topics aren't supported. For an example, see [Create a space](https://developers.google.com/workspace/chat/create-spaces). If you receive the error message `ALREADY_EXISTS` when creating a space, try a different `displayName`. An existing space within the Google Workspace organization might already use this display name. If you're a member of the [Developer Preview program](https://developers.google.com/workspace/preview), you can create a group chat in import mode using `spaceType.GROUP_CHAT`. Supports the following types of [authentication](https://developers.google.com/workspace/chat/authenticate-authorize): - [App authentication](https://developers.google.com/workspace/chat/authenticate-authorize-chat-app) with [administrator approval](https://support.google.com/a?p=chat-app-auth) in [Developer Preview](https://developers.google.com/workspace/preview) - [User authentication](https://developers.google.com/workspace/chat/authenticate-authorize-chat-user) When authenticating as an app, the `space.customer` field must be set in the request.", + "description": "Creates a space. Can be used to create a named space, or a group chat in `Import mode`. For an example, see [Create a space](https://developers.google.com/workspace/chat/create-spaces). Supports the following types of [authentication](https://developers.google.com/workspace/chat/authenticate-authorize): - [App authentication](https://developers.google.com/workspace/chat/authenticate-authorize-chat-app) with [administrator approval](https://support.google.com/a?p=chat-app-auth) in [Developer Preview](https://developers.google.com/workspace/preview) - [User authentication](https://developers.google.com/workspace/chat/authenticate-authorize-chat-user) When authenticating as an app, the `space.customer` field must be set in the request. Space membership upon creation depends on whether the space is created in `Import mode`: * **Import mode:** No members are created. * **All other modes:** The calling user is added as a member. This is: * The app itself when using app authentication. * The human user when using user authentication. If you receive the error message `ALREADY_EXISTS` when creating a space, try a different `displayName`. An existing space within the Google Workspace organization might already use this display name.", "flatPath": "v1/spaces", "httpMethod": "POST", "id": "chat.spaces.create", @@ -338,7 +338,7 @@ "type": "string" }, "useAdminAccess": { - "description": "When `true`, the method runs using the user's Google Workspace administrator privileges. The calling user must be a Google Workspace administrator with the [manage chat and spaces conversations privilege](https://support.google.com/a/answer/13369245). Requires the `chat.admin.delete` [OAuth 2.0 scope](https://developers.google.com/workspace/chat/authenticate-authorize#chat-api-scopes).", + "description": "Optional. When `true`, the method runs using the user's Google Workspace administrator privileges. The calling user must be a Google Workspace administrator with the [manage chat and spaces conversations privilege](https://support.google.com/a/answer/13369245). Requires the `chat.admin.delete` [OAuth 2.0 scope](https://developers.google.com/workspace/chat/authenticate-authorize#chat-api-scopes).", "location": "query", "type": "boolean" } @@ -394,7 +394,7 @@ "type": "string" }, "useAdminAccess": { - "description": "When `true`, the method runs using the user's Google Workspace administrator privileges. The calling user must be a Google Workspace administrator with the [manage chat and spaces conversations privilege](https://support.google.com/a/answer/13369245). Requires the `chat.admin.spaces` or `chat.admin.spaces.readonly` [OAuth 2.0 scopes](https://developers.google.com/workspace/chat/authenticate-authorize#chat-api-scopes).", + "description": "Optional. When `true`, the method runs using the user's Google Workspace administrator privileges. The calling user must be a Google Workspace administrator with the [manage chat and spaces conversations privilege](https://support.google.com/a/answer/13369245). Requires the `chat.admin.spaces` or `chat.admin.spaces.readonly` [OAuth 2.0 scopes](https://developers.google.com/workspace/chat/authenticate-authorize#chat-api-scopes).", "location": "query", "type": "boolean" } @@ -456,7 +456,7 @@ ], "parameters": { "name": { - "description": "Resource name of the space. Format: `spaces/{space}` Where `{space}` represents the system-assigned ID for the space. You can obtain the space ID by calling the [`spaces.list()`](https://developers.google.com/workspace/chat/api/reference/rest/v1/spaces/list) method or from the space URL. For example, if the space URL is `https://mail.google.com/mail/u/0/#chat/space/AAAAAAAAA`, the space ID is `AAAAAAAAA`.", + "description": "Identifier. Resource name of the space. Format: `spaces/{space}` Where `{space}` represents the system-assigned ID for the space. You can obtain the space ID by calling the [`spaces.list()`](https://developers.google.com/workspace/chat/api/reference/rest/v1/spaces/list) method or from the space URL. For example, if the space URL is `https://mail.google.com/mail/u/0/#chat/space/AAAAAAAAA`, the space ID is `AAAAAAAAA`.", "location": "path", "pattern": "^spaces/[^/]+$", "required": true, @@ -469,7 +469,7 @@ "type": "string" }, "useAdminAccess": { - "description": "When `true`, the method runs using the user's Google Workspace administrator privileges. The calling user must be a Google Workspace administrator with the [manage chat and spaces conversations privilege](https://support.google.com/a/answer/13369245). Requires the `chat.admin.spaces` [OAuth 2.0 scope](https://developers.google.com/workspace/chat/authenticate-authorize#chat-api-scopes). Some `FieldMask` values are not supported using admin access. For details, see the description of `update_mask`.", + "description": "Optional. When `true`, the method runs using the user's Google Workspace administrator privileges. The calling user must be a Google Workspace administrator with the [manage chat and spaces conversations privilege](https://support.google.com/a/answer/13369245). Requires the `chat.admin.spaces` [OAuth 2.0 scope](https://developers.google.com/workspace/chat/authenticate-authorize#chat-api-scopes). Some `FieldMask` values are not supported using admin access. For details, see the description of `update_mask`.", "location": "query", "type": "boolean" } @@ -571,7 +571,7 @@ "type": "string" }, "useAdminAccess": { - "description": "When `true`, the method runs using the user's Google Workspace administrator privileges. The calling user must be a Google Workspace administrator with the [manage chat and spaces conversations privilege](https://support.google.com/a/answer/13369245). Requires the `chat.admin.memberships` [OAuth 2.0 scope](https://developers.google.com/workspace/chat/authenticate-authorize#chat-api-scopes). Creating app memberships or creating memberships for users outside the administrator's Google Workspace organization isn't supported using admin access.", + "description": "Optional. When `true`, the method runs using the user's Google Workspace administrator privileges. The calling user must be a Google Workspace administrator with the [manage chat and spaces conversations privilege](https://support.google.com/a/answer/13369245). Requires the `chat.admin.memberships` [OAuth 2.0 scope](https://developers.google.com/workspace/chat/authenticate-authorize#chat-api-scopes). Creating app memberships or creating memberships for users outside the administrator's Google Workspace organization isn't supported using admin access.", "location": "query", "type": "boolean" } @@ -608,7 +608,7 @@ "type": "string" }, "useAdminAccess": { - "description": "When `true`, the method runs using the user's Google Workspace administrator privileges. The calling user must be a Google Workspace administrator with the [manage chat and spaces conversations privilege](https://support.google.com/a/answer/13369245). Requires the `chat.admin.memberships` [OAuth 2.0 scope](https://developers.google.com/workspace/chat/authenticate-authorize#chat-api-scopes). Deleting app memberships in a space isn't supported using admin access.", + "description": "Optional. When `true`, the method runs using the user's Google Workspace administrator privileges. The calling user must be a Google Workspace administrator with the [manage chat and spaces conversations privilege](https://support.google.com/a/answer/13369245). Requires the `chat.admin.memberships` [OAuth 2.0 scope](https://developers.google.com/workspace/chat/authenticate-authorize#chat-api-scopes). Deleting app memberships in a space isn't supported using admin access.", "location": "query", "type": "boolean" } @@ -642,7 +642,7 @@ "type": "string" }, "useAdminAccess": { - "description": "When `true`, the method runs using the user's Google Workspace administrator privileges. The calling user must be a Google Workspace administrator with the [manage chat and spaces conversations privilege](https://support.google.com/a/answer/13369245). Requires the `chat.admin.memberships` or `chat.admin.memberships.readonly` [OAuth 2.0 scopes](https://developers.google.com/workspace/chat/authenticate-authorize#chat-api-scopes). Getting app memberships in a space isn't supported when using admin access.", + "description": "Optional. When `true`, the method runs using the user's Google Workspace administrator privileges. The calling user must be a Google Workspace administrator with the [manage chat and spaces conversations privilege](https://support.google.com/a/answer/13369245). Requires the `chat.admin.memberships` or `chat.admin.memberships.readonly` [OAuth 2.0 scopes](https://developers.google.com/workspace/chat/authenticate-authorize#chat-api-scopes). Getting app memberships in a space isn't supported when using admin access.", "location": "query", "type": "boolean" } @@ -702,7 +702,7 @@ "type": "boolean" }, "useAdminAccess": { - "description": "When `true`, the method runs using the user's Google Workspace administrator privileges. The calling user must be a Google Workspace administrator with the [manage chat and spaces conversations privilege](https://support.google.com/a/answer/13369245). Requires either the `chat.admin.memberships.readonly` or `chat.admin.memberships` [OAuth 2.0 scope](https://developers.google.com/workspace/chat/authenticate-authorize#chat-api-scopes). Listing app memberships in a space isn't supported when using admin access.", + "description": "Optional. When `true`, the method runs using the user's Google Workspace administrator privileges. The calling user must be a Google Workspace administrator with the [manage chat and spaces conversations privilege](https://support.google.com/a/answer/13369245). Requires either the `chat.admin.memberships.readonly` or `chat.admin.memberships` [OAuth 2.0 scope](https://developers.google.com/workspace/chat/authenticate-authorize#chat-api-scopes). Listing app memberships in a space isn't supported when using admin access.", "location": "query", "type": "boolean" } @@ -730,7 +730,7 @@ ], "parameters": { "name": { - "description": "Resource name of the membership, assigned by the server. Format: `spaces/{space}/members/{member}`", + "description": "Identifier. Resource name of the membership, assigned by the server. Format: `spaces/{space}/members/{member}`", "location": "path", "pattern": "^spaces/[^/]+/members/[^/]+$", "required": true, @@ -743,7 +743,7 @@ "type": "string" }, "useAdminAccess": { - "description": "When `true`, the method runs using the user's Google Workspace administrator privileges. The calling user must be a Google Workspace administrator with the [manage chat and spaces conversations privilege](https://support.google.com/a/answer/13369245). Requires the `chat.admin.memberships` [OAuth 2.0 scope](https://developers.google.com/workspace/chat/authenticate-authorize#chat-api-scopes).", + "description": "Optional. When `true`, the method runs using the user's Google Workspace administrator privileges. The calling user must be a Google Workspace administrator with the [manage chat and spaces conversations privilege](https://support.google.com/a/answer/13369245). Requires the `chat.admin.memberships` [OAuth 2.0 scope](https://developers.google.com/workspace/chat/authenticate-authorize#chat-api-scopes).", "location": "query", "type": "boolean" } @@ -767,7 +767,7 @@ "messages": { "methods": { "create": { - "description": "Creates a message in a Google Chat space. For an example, see [Send a message](https://developers.google.com/workspace/chat/create-messages). The `create()` method requires either [user authentication](https://developers.google.com/workspace/chat/authenticate-authorize-chat-user) or [app authentication](https://developers.google.com/workspace/chat/authorize-import). Chat attributes the message sender differently depending on the type of authentication that you use in your request. The following image shows how Chat attributes a message when you use app authentication. Chat displays the Chat app as the message sender. The content of the message can contain text (`text`), cards (`cardsV2`), and accessory widgets (`accessoryWidgets`). ![Message sent with app authentication](https://developers.google.com/workspace/chat/images/message-app-auth.svg) The following image shows how Chat attributes a message when you use user authentication. Chat displays the user as the message sender and attributes the Chat app to the message by displaying its name. The content of message can only contain text (`text`). ![Message sent with user authentication](https://developers.google.com/workspace/chat/images/message-user-auth.svg) The maximum message size, including the message contents, is 32,000 bytes.", + "description": "Creates a message in a Google Chat space. For an example, see [Send a message](https://developers.google.com/workspace/chat/create-messages). The `create()` method requires either [user authentication](https://developers.google.com/workspace/chat/authenticate-authorize-chat-user) or [app authentication](https://developers.google.com/workspace/chat/authorize-import). Chat attributes the message sender differently depending on the type of authentication that you use in your request. The following image shows how Chat attributes a message when you use app authentication. Chat displays the Chat app as the message sender. The content of the message can contain text (`text`), cards (`cardsV2`), and accessory widgets (`accessoryWidgets`). ![Message sent with app authentication](https://developers.google.com/workspace/chat/images/message-app-auth.svg) The following image shows how Chat attributes a message when you use user authentication. Chat displays the user as the message sender and attributes the Chat app to the message by displaying its name. The content of message can only contain text (`text`). ![Message sent with user authentication](https://developers.google.com/workspace/chat/images/message-user-auth.svg) The maximum message size, including the message contents, is 32,000 bytes. For [webhook](https://developers.google.com/workspace/chat/quickstart/webhooks) requests, the response doesn't contain the full message. The response only populates the `name` and `thread.name` fields in addition to the information that was in the request.", "flatPath": "v1/spaces/{spacesId}/messages", "httpMethod": "POST", "id": "chat.spaces.messages.create", @@ -781,7 +781,7 @@ "type": "string" }, "messageReplyOption": { - "description": "Optional. Specifies whether a message starts a thread or replies to one. Only supported in named spaces.", + "description": "Optional. Specifies whether a message starts a thread or replies to one. Only supported in named spaces. When [responding to user interactions](https://developers.google.com/workspace/chat/receive-respond-interactions), this field is ignored. For interactions within a thread, the reply is created in the same thread. Otherwise, the reply is created as a new thread.", "enum": [ "MESSAGE_REPLY_OPTION_UNSPECIFIED", "REPLY_MESSAGE_FALLBACK_TO_NEW_THREAD", @@ -838,7 +838,7 @@ ], "parameters": { "force": { - "description": "When `true`, deleting a message also deletes its threaded replies. When `false`, if a message has threaded replies, deletion fails. Only applies when [authenticating as a user](https://developers.google.com/workspace/chat/authenticate-authorize-chat-user). Has no effect when [authenticating as a Chat app] (https://developers.google.com/workspace/chat/authenticate-authorize-chat-app).", + "description": "Optional. When `true`, deleting a message also deletes its threaded replies. When `false`, if a message has threaded replies, deletion fails. Only applies when [authenticating as a user](https://developers.google.com/workspace/chat/authenticate-authorize-chat-user). Has no effect when [authenticating as a Chat app] (https://developers.google.com/workspace/chat/authenticate-authorize-chat-app).", "location": "query", "type": "boolean" }, @@ -897,23 +897,23 @@ ], "parameters": { "filter": { - "description": "A query filter. You can filter messages by date (`create_time`) and thread (`thread.name`). To filter messages by the date they were created, specify the `create_time` with a timestamp in [RFC-3339](https://www.rfc-editor.org/rfc/rfc3339) format and double quotation marks. For example, `\"2023-04-21T11:30:00-04:00\"`. You can use the greater than operator `>` to list messages that were created after a timestamp, or the less than operator `<` to list messages that were created before a timestamp. To filter messages within a time interval, use the `AND` operator between two timestamps. To filter by thread, specify the `thread.name`, formatted as `spaces/{space}/threads/{thread}`. You can only specify one `thread.name` per query. To filter by both thread and date, use the `AND` operator in your query. For example, the following queries are valid: ``` create_time > \"2012-04-21T11:30:00-04:00\" create_time > \"2012-04-21T11:30:00-04:00\" AND thread.name = spaces/AAAAAAAAAAA/threads/123 create_time > \"2012-04-21T11:30:00+00:00\" AND create_time < \"2013-01-01T00:00:00+00:00\" AND thread.name = spaces/AAAAAAAAAAA/threads/123 thread.name = spaces/AAAAAAAAAAA/threads/123 ``` Invalid queries are rejected by the server with an `INVALID_ARGUMENT` error.", + "description": "Optional. A query filter. You can filter messages by date (`create_time`) and thread (`thread.name`). To filter messages by the date they were created, specify the `create_time` with a timestamp in [RFC-3339](https://www.rfc-editor.org/rfc/rfc3339) format and double quotation marks. For example, `\"2023-04-21T11:30:00-04:00\"`. You can use the greater than operator `>` to list messages that were created after a timestamp, or the less than operator `<` to list messages that were created before a timestamp. To filter messages within a time interval, use the `AND` operator between two timestamps. To filter by thread, specify the `thread.name`, formatted as `spaces/{space}/threads/{thread}`. You can only specify one `thread.name` per query. To filter by both thread and date, use the `AND` operator in your query. For example, the following queries are valid: ``` create_time > \"2012-04-21T11:30:00-04:00\" create_time > \"2012-04-21T11:30:00-04:00\" AND thread.name = spaces/AAAAAAAAAAA/threads/123 create_time > \"2012-04-21T11:30:00+00:00\" AND create_time < \"2013-01-01T00:00:00+00:00\" AND thread.name = spaces/AAAAAAAAAAA/threads/123 thread.name = spaces/AAAAAAAAAAA/threads/123 ``` Invalid queries are rejected by the server with an `INVALID_ARGUMENT` error.", "location": "query", "type": "string" }, "orderBy": { - "description": "Optional, if resuming from a previous query. How the list of messages is ordered. Specify a value to order by an ordering operation. Valid ordering operation values are as follows: - `ASC` for ascending. - `DESC` for descending. The default ordering is `create_time ASC`.", + "description": "Optional. How the list of messages is ordered. Specify a value to order by an ordering operation. Valid ordering operation values are as follows: - `ASC` for ascending. - `DESC` for descending. The default ordering is `create_time ASC`.", "location": "query", "type": "string" }, "pageSize": { - "description": "The maximum number of messages returned. The service might return fewer messages than this value. If unspecified, at most 25 are returned. The maximum value is 1000. If you use a value more than 1000, it's automatically changed to 1000. Negative values return an `INVALID_ARGUMENT` error.", + "description": "Optional. The maximum number of messages returned. The service might return fewer messages than this value. If unspecified, at most 25 are returned. The maximum value is 1000. If you use a value more than 1000, it's automatically changed to 1000. Negative values return an `INVALID_ARGUMENT` error.", "format": "int32", "location": "query", "type": "integer" }, "pageToken": { - "description": "Optional, if resuming from a previous query. A page token received from a previous list messages call. Provide this parameter to retrieve the subsequent page. When paginating, all other parameters provided should match the call that provided the page token. Passing different values to the other parameters might lead to unexpected results.", + "description": "Optional. A page token received from a previous list messages call. Provide this parameter to retrieve the subsequent page. When paginating, all other parameters provided should match the call that provided the page token. Passing different values to the other parameters might lead to unexpected results.", "location": "query", "type": "string" }, @@ -925,7 +925,7 @@ "type": "string" }, "showDeleted": { - "description": "Whether to include deleted messages. Deleted messages include deleted time and metadata about their deletion, but message content is unavailable.", + "description": "Optional. Whether to include deleted messages. Deleted messages include deleted time and metadata about their deletion, but message content is unavailable.", "location": "query", "type": "boolean" } @@ -955,7 +955,7 @@ "type": "boolean" }, "name": { - "description": "Resource name of the message. Format: `spaces/{space}/messages/{message}` Where `{space}` is the ID of the space where the message is posted and `{message}` is a system-assigned ID for the message. For example, `spaces/AAAAAAAAAAA/messages/BBBBBBBBBBB.BBBBBBBBBBB`. If you set a custom ID when you create a message, you can use this ID to specify the message in a request by replacing `{message}` with the value from the `clientAssignedMessageId` field. For example, `spaces/AAAAAAAAAAA/messages/client-custom-name`. For details, see [Name a message](https://developers.google.com/workspace/chat/create-messages#name_a_created_message).", + "description": "Identifier. Resource name of the message. Format: `spaces/{space}/messages/{message}` Where `{space}` is the ID of the space where the message is posted and `{message}` is a system-assigned ID for the message. For example, `spaces/AAAAAAAAAAA/messages/BBBBBBBBBBB.BBBBBBBBBBB`. If you set a custom ID when you create a message, you can use this ID to specify the message in a request by replacing `{message}` with the value from the `clientAssignedMessageId` field. For example, `spaces/AAAAAAAAAAA/messages/client-custom-name`. For details, see [Name a message](https://developers.google.com/workspace/chat/create-messages#name_a_created_message).", "location": "path", "pattern": "^spaces/[^/]+/messages/[^/]+$", "required": true, @@ -996,7 +996,7 @@ "type": "boolean" }, "name": { - "description": "Resource name of the message. Format: `spaces/{space}/messages/{message}` Where `{space}` is the ID of the space where the message is posted and `{message}` is a system-assigned ID for the message. For example, `spaces/AAAAAAAAAAA/messages/BBBBBBBBBBB.BBBBBBBBBBB`. If you set a custom ID when you create a message, you can use this ID to specify the message in a request by replacing `{message}` with the value from the `clientAssignedMessageId` field. For example, `spaces/AAAAAAAAAAA/messages/client-custom-name`. For details, see [Name a message](https://developers.google.com/workspace/chat/create-messages#name_a_created_message).", + "description": "Identifier. Resource name of the message. Format: `spaces/{space}/messages/{message}` Where `{space}` is the ID of the space where the message is posted and `{message}` is a system-assigned ID for the message. For example, `spaces/AAAAAAAAAAA/messages/BBBBBBBBBBB.BBBBBBBBBBB`. If you set a custom ID when you create a message, you can use this ID to specify the message in a request by replacing `{message}` with the value from the `clientAssignedMessageId` field. For example, `spaces/AAAAAAAAAAA/messages/client-custom-name`. For details, see [Name a message](https://developers.google.com/workspace/chat/create-messages#name_a_created_message).", "location": "path", "pattern": "^spaces/[^/]+/messages/[^/]+$", "required": true, @@ -1216,7 +1216,7 @@ "type": "integer" }, "pageToken": { - "description": "A page token, received from a previous list space events call. Provide this to retrieve the subsequent page. When paginating, all other parameters provided to list space events must match the call that provided the page token. Passing different values to the other parameters might lead to unexpected results.", + "description": "Optional. A page token, received from a previous list space events call. Provide this to retrieve the subsequent page. When paginating, all other parameters provided to list space events must match the call that provided the page token. Passing different values to the other parameters might lead to unexpected results.", "location": "query", "type": "string" }, @@ -1558,7 +1558,7 @@ "properties": { "attachmentDataRef": { "$ref": "AttachmentDataRef", - "description": "A reference to the attachment data. This field is used with the media API to download the attachment data." + "description": "Optional. A reference to the attachment data. This field is used to create or update messages with attachments, or with the media API to download the attachment data." }, "contentName": { "description": "Output only. The original file name for the content, not the full path.", @@ -1581,7 +1581,7 @@ "readOnly": true }, "name": { - "description": "Resource name of the attachment, in the form `spaces/{space}/messages/{message}/attachments/{attachment}`.", + "description": "Optional. Resource name of the attachment, in the form `spaces/{space}/messages/{message}/attachments/{attachment}`.", "type": "string" }, "source": { @@ -1612,11 +1612,11 @@ "id": "AttachmentDataRef", "properties": { "attachmentUploadToken": { - "description": "Opaque token containing a reference to an uploaded attachment. Treated by clients as an opaque string and used to create or update Chat messages with attachments.", + "description": "Optional. Opaque token containing a reference to an uploaded attachment. Treated by clients as an opaque string and used to create or update Chat messages with attachments.", "type": "string" }, "resourceName": { - "description": "The resource name of the attachment data. This field is used with the media API to download the attachment data.", + "description": "Optional. The resource name of the attachment data. This field is used with the media API to download the attachment data.", "type": "string" } }, @@ -2001,7 +2001,7 @@ "Default value. Unspecified.", "A user opens a dialog.", "A user clicks an interactive element of a dialog. For example, a user fills out information in a dialog and clicks a button to submit the information.", - "A user closes a dialog without submitting information, or the dialog is canceled." + "A user closes a dialog without submitting information. The Chat app only receives this interaction event when users click the close icon in the top right corner of the dialog. When the user closes the dialog by other means (such as refreshing the browser, clicking outside the dialog box, or pressing the escape key), no event is sent. ." ], "type": "string" }, @@ -2119,7 +2119,7 @@ "readOnly": true }, "unicode": { - "description": "A basic emoji represented by a unicode string.", + "description": "Optional. A basic emoji represented by a unicode string.", "type": "string" } }, @@ -2131,11 +2131,13 @@ "properties": { "emoji": { "$ref": "Emoji", - "description": "Emoji associated with the reactions." + "description": "Output only. Emoji associated with the reactions.", + "readOnly": true }, "reactionCount": { - "description": "The total number of reactions using the associated emoji.", + "description": "Output only. The total number of reactions using the associated emoji.", "format": "int32", + "readOnly": true, "type": "integer" } }, @@ -2283,6 +2285,24 @@ "text": { "description": "The text displayed inside the button.", "type": "string" + }, + "type": { + "description": "Optional. The type of a button. If unset, button type defaults to `OUTLINED`. If the `color` field is set, the button type is forced to `FILLED` and any value set for this field is ignored. [Google Chat apps](https://developers.google.com/workspace/chat):", + "enum": [ + "TYPE_UNSPECIFIED", + "OUTLINED", + "FILLED", + "FILLED_TONAL", + "BORDERLESS" + ], + "enumDescriptions": [ + "Don't use. Unspecified.", + "Outlined buttons are medium-emphasis buttons. They usually contain actions that are important, but aren’t the primary action in a Chat app or an add-on.", + "A filled button has a container with a solid color. It has the most visual impact and is recommended for the important and primary action in a Chat app or an add-on.", + "A filled tonal button is an alternative middle ground between filled and outlined buttons. They’re useful in contexts where a lower-priority button requires slightly more emphasis than an outline button would give.", + "A button does not have an invisible container in its default state. It is often used for the lowest priority actions, especially when presenting multiple options." + ], + "type": "string" } }, "type": "object" @@ -2302,7 +2322,7 @@ "type": "object" }, "GoogleAppsCardV1Card": { - "description": "A card interface displayed in a Google Chat message or Google Workspace Add-on. Cards support a defined layout, interactive UI elements like buttons, and rich media like images. Use cards to present detailed information, gather information from users, and guide users to take a next step. [Card builder](https://addons.gsuite.google.com/uikit/builder) To learn how to build cards, see the following documentation: * For Google Chat apps, see [Design the components of a card or dialog](https://developers.google.com/workspace/chat/design-components-card-dialog). * For Google Workspace Add-ons, see [Card-based interfaces](https://developers.google.com/apps-script/add-ons/concepts/cards). **Example: Card message for a Google Chat app** ![Example contact card](https://developers.google.com/workspace/chat/images/card_api_reference.png) To create the sample card message in Google Chat, use the following JSON: ``` { \"cardsV2\": [ { \"cardId\": \"unique-card-id\", \"card\": { \"header\": { \"title\": \"Sasha\", \"subtitle\": \"Software Engineer\", \"imageUrl\": \"https://developers.google.com/workspace/chat/images/quickstart-app-avatar.png\", \"imageType\": \"CIRCLE\", \"imageAltText\": \"Avatar for Sasha\" }, \"sections\": [ { \"header\": \"Contact Info\", \"collapsible\": true, \"uncollapsibleWidgetsCount\": 1, \"widgets\": [ { \"decoratedText\": { \"startIcon\": { \"knownIcon\": \"EMAIL\" }, \"text\": \"sasha@example.com\" } }, { \"decoratedText\": { \"startIcon\": { \"knownIcon\": \"PERSON\" }, \"text\": \"Online\" } }, { \"decoratedText\": { \"startIcon\": { \"knownIcon\": \"PHONE\" }, \"text\": \"+1 (555) 555-1234\" } }, { \"buttonList\": { \"buttons\": [ { \"text\": \"Share\", \"onClick\": { \"openLink\": { \"url\": \"https://example.com/share\" } } }, { \"text\": \"Edit\", \"onClick\": { \"action\": { \"function\": \"goToView\", \"parameters\": [ { \"key\": \"viewType\", \"value\": \"EDIT\" } ] } } } ] } } ] } ] } } ] } ```", + "description": "A card interface displayed in a Google Chat message or Google Workspace Add-on. Cards support a defined layout, interactive UI elements like buttons, and rich media like images. Use cards to present detailed information, gather information from users, and guide users to take a next step. [Card builder](https://addons.gsuite.google.com/uikit/builder) To learn how to build cards, see the following documentation: * For Google Chat apps, see [Design the components of a card or dialog](https://developers.google.com/workspace/chat/design-components-card-dialog). * For Google Workspace Add-ons, see [Card-based interfaces](https://developers.google.com/apps-script/add-ons/concepts/cards). Note: You can add up to 100 widgets per card. Any widgets beyond this limit are ignored. This limit applies to both card messages and dialogs in Google Chat apps, and to cards in Google Workspace Add-ons. **Example: Card message for a Google Chat app** ![Example contact card](https://developers.google.com/workspace/chat/images/card_api_reference.png) To create the sample card message in Google Chat, use the following JSON: ``` { \"cardsV2\": [ { \"cardId\": \"unique-card-id\", \"card\": { \"header\": { \"title\": \"Sasha\", \"subtitle\": \"Software Engineer\", \"imageUrl\": \"https://developers.google.com/workspace/chat/images/quickstart-app-avatar.png\", \"imageType\": \"CIRCLE\", \"imageAltText\": \"Avatar for Sasha\" }, \"sections\": [ { \"header\": \"Contact Info\", \"collapsible\": true, \"uncollapsibleWidgetsCount\": 1, \"widgets\": [ { \"decoratedText\": { \"startIcon\": { \"knownIcon\": \"EMAIL\" }, \"text\": \"sasha@example.com\" } }, { \"decoratedText\": { \"startIcon\": { \"knownIcon\": \"PERSON\" }, \"text\": \"Online\" } }, { \"decoratedText\": { \"startIcon\": { \"knownIcon\": \"PHONE\" }, \"text\": \"+1 (555) 555-1234\" } }, { \"buttonList\": { \"buttons\": [ { \"text\": \"Share\", \"onClick\": { \"openLink\": { \"url\": \"https://example.com/share\" } } }, { \"text\": \"Edit\", \"onClick\": { \"action\": { \"function\": \"goToView\", \"parameters\": [ { \"key\": \"viewType\", \"value\": \"EDIT\" } ] } } } ] } } ] } ] } } ] } ```", "id": "GoogleAppsCardV1Card", "properties": { "cardActions": { @@ -2431,6 +2451,132 @@ }, "type": "object" }, + "GoogleAppsCardV1Carousel": { + "description": "[Developer Preview](https://developers.google.com/workspace/preview): A carousel, also known as a slider, rotates and displays a list of widgets in a slideshow format, with buttons navigating to the previous or next widget. For example, this is a JSON representation of a carousel that contains three text paragraph widgets. ``` { \"carouselCards\": [ { \"widgets\": [ { \"textParagraph\": { \"text\": \"First text paragraph in carousel\", } } ] }, { \"widgets\": [ { \"textParagraph\": { \"text\": \"Second text paragraph in carousel\", } } ] }, { \"widgets\": [ { \"textParagraph\": { \"text\": \"Third text paragraph in carousel\", } } ] } ] } ``` [Google Chat apps](https://developers.google.com/workspace/chat):", + "id": "GoogleAppsCardV1Carousel", + "properties": { + "carouselCards": { + "description": "A list of cards included in the carousel.", + "items": { + "$ref": "GoogleAppsCardV1CarouselCard" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleAppsCardV1CarouselCard": { + "description": "[Developer Preview](https://developers.google.com/workspace/preview): A card that can be displayed as a carousel item. [Google Chat apps](https://developers.google.com/workspace/chat):", + "id": "GoogleAppsCardV1CarouselCard", + "properties": { + "footerWidgets": { + "description": "A list of widgets displayed at the bottom of the carousel card. The widgets are displayed in the order that they are specified.", + "items": { + "$ref": "GoogleAppsCardV1NestedWidget" + }, + "type": "array" + }, + "widgets": { + "description": "A list of widgets displayed in the carousel card. The widgets are displayed in the order that they are specified.", + "items": { + "$ref": "GoogleAppsCardV1NestedWidget" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleAppsCardV1Chip": { + "description": "A text, icon, or text and icon chip that users can click. [Google Chat apps](https://developers.google.com/workspace/chat):", + "id": "GoogleAppsCardV1Chip", + "properties": { + "altText": { + "description": "The alternative text that's used for accessibility. Set descriptive text that lets users know what the chip does. For example, if a chip opens a hyperlink, write: \"Opens a new browser tab and navigates to the Google Chat developer documentation at https://developers.google.com/workspace/chat\".", + "type": "string" + }, + "disabled": { + "description": "Whether the chip is in an inactive state and ignores user actions. Defaults to `false`.", + "type": "boolean" + }, + "enabled": { + "deprecated": true, + "description": "Whether the chip is in an active state and responds to user actions. Defaults to `true`. Deprecated. Use `disabled` instead.", + "type": "boolean" + }, + "icon": { + "$ref": "GoogleAppsCardV1Icon", + "description": "The icon image. If both `icon` and `text` are set, then the icon appears before the text." + }, + "label": { + "description": "The text displayed inside the chip.", + "type": "string" + }, + "onClick": { + "$ref": "GoogleAppsCardV1OnClick", + "description": "Optional. The action to perform when a user clicks the chip, such as opening a hyperlink or running a custom function." + } + }, + "type": "object" + }, + "GoogleAppsCardV1ChipList": { + "description": "A list of chips layed out horizontally, which can either scroll horizontally or wrap to the next line. [Google Chat apps](https://developers.google.com/workspace/chat):", + "id": "GoogleAppsCardV1ChipList", + "properties": { + "chips": { + "description": "An array of chips.", + "items": { + "$ref": "GoogleAppsCardV1Chip" + }, + "type": "array" + }, + "layout": { + "description": "Specified chip list layout.", + "enum": [ + "LAYOUT_UNSPECIFIED", + "WRAPPED", + "HORIZONTAL_SCROLLABLE" + ], + "enumDescriptions": [ + "Don't use. Unspecified.", + "Default value. The chip list wraps to the next line if there isn't enough horizontal space.", + "The chips scroll horizontally if they don't fit in the available space." + ], + "type": "string" + } + }, + "type": "object" + }, + "GoogleAppsCardV1CollapseControl": { + "description": "Represent an expand and collapse control. [Google Chat apps](https://developers.google.com/workspace/chat):", + "id": "GoogleAppsCardV1CollapseControl", + "properties": { + "collapseButton": { + "$ref": "GoogleAppsCardV1Button", + "description": "Optional. Define a customizable button to collapse the section. Both expand_button and collapse_button field must be set. Only one field set will not take into effect. If this field isn't set, the default button is used." + }, + "expandButton": { + "$ref": "GoogleAppsCardV1Button", + "description": "Optional. Define a customizable button to expand the section. Both expand_button and collapse_button field must be set. Only one field set will not take into effect. If this field isn't set, the default button is used." + }, + "horizontalAlignment": { + "description": "The horizontal alignment of the expand and collapse button.", + "enum": [ + "HORIZONTAL_ALIGNMENT_UNSPECIFIED", + "START", + "CENTER", + "END" + ], + "enumDescriptions": [ + "Don't use. Unspecified.", + "Default value. Aligns widgets to the start position of the column. For left-to-right layouts, aligns to the left. For right-to-left layouts, aligns to the right.", + "Aligns widgets to the center of the column.", + "Aligns widgets to the end position of the column. For left-to-right layouts, aligns widgets to the right. For right-to-left layouts, aligns widgets to the left." + ], + "type": "string" + } + }, + "type": "object" + }, "GoogleAppsCardV1Column": { "description": "A column. [Google Workspace Add-ons and Chat apps](https://developers.google.com/workspace/extend)", "id": "GoogleAppsCardV1Column", @@ -2802,6 +2948,25 @@ }, "type": "object" }, + "GoogleAppsCardV1NestedWidget": { + "description": "[Developer Preview](https://developers.google.com/workspace/preview): A list of widgets that can be displayed in a containing layout, such as a `CarouselCard`. [Google Chat apps](https://developers.google.com/workspace/chat):", + "id": "GoogleAppsCardV1NestedWidget", + "properties": { + "buttonList": { + "$ref": "GoogleAppsCardV1ButtonList", + "description": "A button list widget." + }, + "image": { + "$ref": "GoogleAppsCardV1Image", + "description": "An image widget." + }, + "textParagraph": { + "$ref": "GoogleAppsCardV1TextParagraph", + "description": "A text paragraph widget." + } + }, + "type": "object" + }, "GoogleAppsCardV1OnClick": { "description": "Represents how to respond when users click an interactive element on a card, such as a button. [Google Workspace Add-ons and Chat apps](https://developers.google.com/workspace/extend):", "id": "GoogleAppsCardV1OnClick", @@ -2821,6 +2986,10 @@ "openLink": { "$ref": "GoogleAppsCardV1OpenLink", "description": "If specified, this `onClick` triggers an open link action." + }, + "overflowMenu": { + "$ref": "GoogleAppsCardV1OverflowMenu", + "description": "If specified, this `onClick` opens an overflow menu. [Google Chat apps](https://developers.google.com/workspace/chat):" } }, "type": "object" @@ -2860,6 +3029,43 @@ }, "type": "object" }, + "GoogleAppsCardV1OverflowMenu": { + "description": "A widget that presents a pop-up menu with one or more actions that users can invoke. For example, showing non-primary actions in a card. You can use this widget when actions don't fit in the available space. To use, specify this widget in the `OnClick` action of widgets that support it. For example, in a `Button`. [Google Chat apps](https://developers.google.com/workspace/chat):", + "id": "GoogleAppsCardV1OverflowMenu", + "properties": { + "items": { + "description": "Required. The list of menu options.", + "items": { + "$ref": "GoogleAppsCardV1OverflowMenuItem" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleAppsCardV1OverflowMenuItem": { + "description": "An option that users can invoke in an overflow menu. [Google Chat apps](https://developers.google.com/workspace/chat):", + "id": "GoogleAppsCardV1OverflowMenuItem", + "properties": { + "disabled": { + "description": "Whether the menu option is disabled. Defaults to false.", + "type": "boolean" + }, + "onClick": { + "$ref": "GoogleAppsCardV1OnClick", + "description": "Required. The action invoked when a menu option is selected. This `OnClick` cannot contain an `OverflowMenu`, any specified `OverflowMenu` is dropped and the menu item disabled." + }, + "startIcon": { + "$ref": "GoogleAppsCardV1Icon", + "description": "The icon displayed in front of the text." + }, + "text": { + "description": "Required. The text that identifies or describes the item to users.", + "type": "string" + } + }, + "type": "object" + }, "GoogleAppsCardV1PlatformDataSource": { "description": "For a `SelectionInput` widget that uses a multiselect menu, a data source from Google Workspace. Used to populate items in a multiselect menu. [Google Chat apps](https://developers.google.com/workspace/chat):", "id": "GoogleAppsCardV1PlatformDataSource", @@ -2887,6 +3093,10 @@ "description": "A section contains a collection of widgets that are rendered vertically in the order that they're specified. [Google Workspace Add-ons and Chat apps](https://developers.google.com/workspace/extend):", "id": "GoogleAppsCardV1Section", "properties": { + "collapseControl": { + "$ref": "GoogleAppsCardV1CollapseControl", + "description": "Optional. Define the expand and collapse button of the section. This button will be shown only if the section is collapsible. If this field isn't set, the default button is used. [Google Chat apps](https://developers.google.com/workspace/chat):" + }, "collapsible": { "description": "Indicates whether this section is collapsible. Collapsible sections hide some or all widgets, but users can expand the section to reveal the hidden widgets by clicking **Show more**. Users can hide the widgets again by clicking **Show less**. To determine which widgets are hidden, specify `uncollapsibleWidgetsCount`.", "type": "boolean" @@ -2916,7 +3126,7 @@ "properties": { "externalDataSource": { "$ref": "GoogleAppsCardV1Action", - "description": "An external data source, such as a relational data base." + "description": "An external data source, such as a relational database." }, "items": { "description": "An array of selectable items. For example, an array of radio buttons or checkboxes. Supports up to 100 items.", @@ -2935,7 +3145,7 @@ "type": "integer" }, "multiSelectMinQueryLength": { - "description": "For multiselect menus, the number of text characters that a user inputs before the app queries autocomplete and displays suggested items in the menu. If unspecified, defaults to 0 characters for static data sources and 3 characters for external data sources.", + "description": "For multiselect menus, the number of text characters that a user inputs before the menu returns suggested selection items. If unset, the multiselect menu uses the following default values: * If the menu uses a static array of `SelectionInput` items, defaults to 0 characters and immediately populates items from the array. * If the menu uses a dynamic data source (`multi_select_data_source`), defaults to 3 characters before querying the data source to return suggested items.", "format": "int32", "type": "integer" }, @@ -2965,7 +3175,7 @@ "A set of radio buttons. Users can select one radio button.", "A set of switches. Users can turn on one or more switches.", "A dropdown menu. Users can select one item from the menu.", - "A multiselect menu for static or dynamic data. From the menu bar, users select one or more items. Users can also input values to populate dynamic data. For example, users can start typing the name of a Google Chat space and the widget autosuggests the space. To populate items for a multiselect menu, you can use one of the following types of data sources: * Static data: Items are specified as `SelectionItem` objects in the widget. Up to 100 items. * Google Workspace data: Items are populated using data from Google Workspace, such as Google Workspace users or Google Chat spaces. * External data: Items are populated from an external data source outside of Google Workspace. For examples of how to implement multiselect menus, see [Add a multiselect menu](https://developers.google.com/workspace/chat/design-interactive-card-dialog#multiselect-menu). [Google Workspace Add-ons and Chat apps](https://developers.google.com/workspace/extend):" + "A menu with a text box. Users can type and select one or more items. For Google Workspace Add-ons, you must populate items using a static array of `SelectionItem` objects. For Google Chat apps, you can also populate items using a dynamic data source and autosuggest items as users type in the menu. For example, users can start typing the name of a Google Chat space and the widget autosuggests the space. To dynamically populate items for a multiselect menu, use one of the following types of data sources: * Google Workspace data: Items are populated using data from Google Workspace, such as Google Workspace users or Google Chat spaces. * External data: Items are populated from an external data source outside of Google Workspace. For examples of how to implement multiselect menus for Chat apps, see [Add a multiselect menu](https://developers.google.com/workspace/chat/design-interactive-card-dialog#multiselect-menu). [Google Workspace Add-ons and Chat apps](https://developers.google.com/workspace/extend):" ], "type": "string" } @@ -2973,7 +3183,7 @@ "type": "object" }, "GoogleAppsCardV1SelectionItem": { - "description": "An item that users can select in a selection input, such as a checkbox or switch. [Google Workspace Add-ons and Chat apps](https://developers.google.com/workspace/extend):", + "description": "An item that users can select in a selection input, such as a checkbox or switch. Supports up to 100 items. [Google Workspace Add-ons and Chat apps](https://developers.google.com/workspace/extend):", "id": "GoogleAppsCardV1SelectionItem", "properties": { "bottomText": { @@ -3116,6 +3326,11 @@ "description": "A paragraph of text that supports formatting. For an example in Google Chat apps, see [Add a paragraph of formatted text](https://developers.google.com/workspace/chat/add-text-image-card-dialog#add_a_paragraph_of_formatted_text). For more information about formatting text, see [Formatting text in Google Chat apps](https://developers.google.com/workspace/chat/format-messages#card-formatting) and [Formatting text in Google Workspace Add-ons](https://developers.google.com/apps-script/add-ons/concepts/widgets#text_formatting). [Google Workspace Add-ons and Chat apps](https://developers.google.com/workspace/extend):", "id": "GoogleAppsCardV1TextParagraph", "properties": { + "maxLines": { + "description": "The maximum number of lines of text that are displayed in the widget. If the text exceeds the specified maximum number of lines, the excess content is concealed behind a **show more** button. If the text is equal or shorter than the specified maximum number of lines, a **show more** button isn't displayed. The default value is 0, in which case all context is displayed. Negative values are ignored. [Google Chat apps](https://developers.google.com/workspace/chat):", + "format": "int32", + "type": "integer" + }, "text": { "description": "The text that's shown in the widget.", "type": "string" @@ -3131,6 +3346,14 @@ "$ref": "GoogleAppsCardV1ButtonList", "description": "A list of buttons. For example, the following JSON creates two buttons. The first is a blue text button and the second is an image button that opens a link: ``` \"buttonList\": { \"buttons\": [ { \"text\": \"Edit\", \"color\": { \"red\": 0, \"green\": 0, \"blue\": 1, }, \"disabled\": true, }, { \"icon\": { \"knownIcon\": \"INVITE\", \"altText\": \"check calendar\" }, \"onClick\": { \"openLink\": { \"url\": \"https://example.com/calendar\" } } } ] } ```" }, + "carousel": { + "$ref": "GoogleAppsCardV1Carousel", + "description": "A carousel contains a collection of nested widgets. For example, this is a JSON representation of a carousel that contains two text paragraphs. ``` { \"widgets\": [ { \"textParagraph\": { \"text\": \"First text paragraph in the carousel.\" } }, { \"textParagraph\": { \"text\": \"Second text paragraph in the carousel.\" } } ] } ```" + }, + "chipList": { + "$ref": "GoogleAppsCardV1ChipList", + "description": "A list of chips. For example, the following JSON creates two chips. The first is a text chip and the second is an icon chip that opens a link: ``` \"chipList\": { \"chips\": [ { \"text\": \"Edit\", \"disabled\": true, }, { \"icon\": { \"knownIcon\": \"INVITE\", \"altText\": \"check calendar\" }, \"onClick\": { \"openLink\": { \"url\": \"https://example.com/calendar\" } } } ] } ``` [Google Chat apps](https://developers.google.com/workspace/chat):" + }, "columns": { "$ref": "GoogleAppsCardV1Columns", "description": "Displays up to 2 columns. To include more than 2 columns, or to use rows, use the `Grid` widget. For example, the following JSON creates 2 columns that each contain text paragraphs: ``` \"columns\": { \"columnItems\": [ { \"horizontalSizeStyle\": \"FILL_AVAILABLE_SPACE\", \"horizontalAlignment\": \"CENTER\", \"verticalAlignment\": \"CENTER\", \"widgets\": [ { \"textParagraph\": { \"text\": \"First column text paragraph\" } } ] }, { \"horizontalSizeStyle\": \"FILL_AVAILABLE_SPACE\", \"horizontalAlignment\": \"CENTER\", \"verticalAlignment\": \"CENTER\", \"widgets\": [ { \"textParagraph\": { \"text\": \"Second column text paragraph\" } } ] } ] } ```" @@ -3194,6 +3417,10 @@ "$ref": "GoogleAppsCardV1ButtonList", "description": "ButtonList widget." }, + "chipList": { + "$ref": "GoogleAppsCardV1ChipList", + "description": "ChipList widget. [Google Chat apps](https://developers.google.com/workspace/chat):" + }, "dateTimePicker": { "$ref": "GoogleAppsCardV1DateTimePicker", "description": "DateTimePicker widget." @@ -3609,14 +3836,14 @@ }, "groupMember": { "$ref": "Group", - "description": "The Google Group the membership corresponds to. Reading or mutating memberships for Google Groups requires [user authentication](https://developers.google.com/workspace/chat/authenticate-authorize-chat-user)." + "description": "Optional. The Google Group the membership corresponds to. Reading or mutating memberships for Google Groups requires [user authentication](https://developers.google.com/workspace/chat/authenticate-authorize-chat-user)." }, "member": { "$ref": "User", - "description": "The Google Chat user or app the membership corresponds to. If your Chat app [authenticates as a user](https://developers.google.com/workspace/chat/authenticate-authorize-chat-user), the output populates the [user](https://developers.google.com/workspace/chat/api/reference/rest/v1/User) `name` and `type`." + "description": "Optional. The Google Chat user or app the membership corresponds to. If your Chat app [authenticates as a user](https://developers.google.com/workspace/chat/authenticate-authorize-chat-user), the output populates the [user](https://developers.google.com/workspace/chat/api/reference/rest/v1/User) `name` and `type`." }, "name": { - "description": "Resource name of the membership, assigned by the server. Format: `spaces/{space}/members/{member}`", + "description": "Identifier. Resource name of the membership, assigned by the server. Format: `spaces/{space}/members/{member}`", "type": "string" }, "role": { @@ -3700,13 +3927,15 @@ "id": "MembershipCount", "properties": { "joinedDirectHumanUserCount": { - "description": "Count of human users that have directly joined the space, not counting users joined by having membership in a joined group.", + "description": "Output only. Count of human users that have directly joined the space, not counting users joined by having membership in a joined group.", "format": "int32", + "readOnly": true, "type": "integer" }, "joinedGroupCount": { - "description": "Count of all groups that have directly joined the space.", + "description": "Output only. Count of all groups that have directly joined the space.", "format": "int32", + "readOnly": true, "type": "integer" } }, @@ -3750,7 +3979,7 @@ "id": "Message", "properties": { "accessoryWidgets": { - "description": "One or more interactive widgets that appear at the bottom of a message. You can add accessory widgets to messages that contain text, cards, or both text and cards. Not supported for messages that contain dialogs. For details, see [Add interactive widgets at the bottom of a message](https://developers.google.com/workspace/chat/create-messages#add-accessory-widgets). Creating a message with accessory widgets requires [app authentication] (https://developers.google.com/workspace/chat/authenticate-authorize-chat-app).", + "description": "Optional. One or more interactive widgets that appear at the bottom of a message. You can add accessory widgets to messages that contain text, cards, or both text and cards. Not supported for messages that contain dialogs. For details, see [Add interactive widgets at the bottom of a message](https://developers.google.com/workspace/chat/create-messages#add-accessory-widgets). Creating a message with accessory widgets requires [app authentication] (https://developers.google.com/workspace/chat/authenticate-authorize-chat-app).", "items": { "$ref": "AccessoryWidget" }, @@ -3782,7 +4011,7 @@ "type": "array" }, "attachment": { - "description": "User-uploaded attachment.", + "description": "Optional. User-uploaded attachment.", "items": { "$ref": "Attachment" }, @@ -3797,7 +4026,7 @@ "type": "array" }, "cardsV2": { - "description": "An array of [cards](https://developers.google.com/workspace/chat/api/reference/rest/v1/cards). Only Chat apps can create cards. If your Chat app [authenticates as a user](https://developers.google.com/workspace/chat/authenticate-authorize-chat-user), the messages can't contain cards. To learn how to create a message that contains cards, see [Send a message](https://developers.google.com/workspace/chat/create-messages). [Card builder](https://addons.gsuite.google.com/uikit/builder)", + "description": "Optional. An array of [cards](https://developers.google.com/workspace/chat/api/reference/rest/v1/cards). Only Chat apps can create cards. If your Chat app [authenticates as a user](https://developers.google.com/workspace/chat/authenticate-authorize-chat-user), the messages can't contain cards. To learn how to create a message that contains cards, see [Send a message](https://developers.google.com/workspace/chat/create-messages). [Card builder](https://addons.gsuite.google.com/uikit/builder)", "items": { "$ref": "CardWithId" }, @@ -3832,7 +4061,7 @@ "type": "array" }, "fallbackText": { - "description": "A plain-text description of the message's cards, used when the actual cards can't be displayed—for example, mobile notifications.", + "description": "Optional. A plain-text description of the message's cards, used when the actual cards can't be displayed—for example, mobile notifications.", "type": "string" }, "formattedText": { @@ -3852,12 +4081,12 @@ "readOnly": true }, "name": { - "description": "Resource name of the message. Format: `spaces/{space}/messages/{message}` Where `{space}` is the ID of the space where the message is posted and `{message}` is a system-assigned ID for the message. For example, `spaces/AAAAAAAAAAA/messages/BBBBBBBBBBB.BBBBBBBBBBB`. If you set a custom ID when you create a message, you can use this ID to specify the message in a request by replacing `{message}` with the value from the `clientAssignedMessageId` field. For example, `spaces/AAAAAAAAAAA/messages/client-custom-name`. For details, see [Name a message](https://developers.google.com/workspace/chat/create-messages#name_a_created_message).", + "description": "Identifier. Resource name of the message. Format: `spaces/{space}/messages/{message}` Where `{space}` is the ID of the space where the message is posted and `{message}` is a system-assigned ID for the message. For example, `spaces/AAAAAAAAAAA/messages/BBBBBBBBBBB.BBBBBBBBBBB`. If you set a custom ID when you create a message, you can use this ID to specify the message in a request by replacing `{message}` with the value from the `clientAssignedMessageId` field. For example, `spaces/AAAAAAAAAAA/messages/client-custom-name`. For details, see [Name a message](https://developers.google.com/workspace/chat/create-messages#name_a_created_message).", "type": "string" }, "privateMessageViewer": { "$ref": "User", - "description": "Immutable. Input for creating a message, otherwise output only. The user that can view the message. When set, the message is private and only visible to the specified user and the Chat app. To include this field in your request, you must call the Chat API using [app authentication](https://developers.google.com/workspace/chat/authenticate-authorize-chat-app) and omit the following: * [Attachments](https://developers.google.com/workspace/chat/api/reference/rest/v1/spaces.messages.attachments) * [Accessory widgets](https://developers.google.com/workspace/chat/api/reference/rest/v1/spaces.messages#Message.AccessoryWidget) For details, see [Send a message privately](https://developers.google.com/workspace/chat/create-messages#private)." + "description": "Optional. Immutable. Input for creating a message, otherwise output only. The user that can view the message. When set, the message is private and only visible to the specified user and the Chat app. To include this field in your request, you must call the Chat API using [app authentication](https://developers.google.com/workspace/chat/authenticate-authorize-chat-app) and omit the following: * [Attachments](https://developers.google.com/workspace/chat/api/reference/rest/v1/spaces.messages.attachments) * [Accessory widgets](https://developers.google.com/workspace/chat/api/reference/rest/v1/spaces.messages#Message.AccessoryWidget) For details, see [Send a message privately](https://developers.google.com/workspace/chat/create-messages#private)." }, "quotedMessageMetadata": { "$ref": "QuotedMessageMetadata", @@ -3876,10 +4105,11 @@ }, "space": { "$ref": "Space", - "description": "If your Chat app [authenticates as a user](https://developers.google.com/workspace/chat/authenticate-authorize-chat-user), the output populates the [space](https://developers.google.com/workspace/chat/api/reference/rest/v1/spaces) `name`." + "description": "Output only. If your Chat app [authenticates as a user](https://developers.google.com/workspace/chat/authenticate-authorize-chat-user), the output only populates the [space](https://developers.google.com/workspace/chat/api/reference/rest/v1/spaces) `name`.", + "readOnly": true }, "text": { - "description": "Plain-text body of the message. The first link to an image, video, or web page generates a [preview chip](https://developers.google.com/workspace/chat/preview-links). You can also [@mention a Google Chat user](https://developers.google.com/workspace/chat/format-messages#messages-@mention), or everyone in the space. To learn about creating text messages, see [Send a message](https://developers.google.com/workspace/chat/create-messages).", + "description": "Optional. Plain-text body of the message. The first link to an image, video, or web page generates a [preview chip](https://developers.google.com/workspace/chat/preview-links). You can also [@mention a Google Chat user](https://developers.google.com/workspace/chat/format-messages#messages-@mention), or everyone in the space. To learn about creating text messages, see [Send a message](https://developers.google.com/workspace/chat/create-messages).", "type": "string" }, "thread": { @@ -4000,11 +4230,11 @@ "id": "PermissionSetting", "properties": { "managersAllowed": { - "description": "Whether spaces managers have this permission.", + "description": "Optional. Whether spaces managers have this permission.", "type": "boolean" }, "membersAllowed": { - "description": "Whether non-manager members have this permission.", + "description": "Optional. Whether non-manager members have this permission.", "type": "boolean" } }, @@ -4016,19 +4246,19 @@ "properties": { "manageApps": { "$ref": "PermissionSetting", - "description": "Setting for managing apps in a space." + "description": "Optional. Setting for managing apps in a space." }, "manageMembersAndGroups": { "$ref": "PermissionSetting", - "description": "Setting for managing members and groups in a space." + "description": "Optional. Setting for managing members and groups in a space." }, "manageWebhooks": { "$ref": "PermissionSetting", - "description": "Setting for managing webhooks in a space." + "description": "Optional. Setting for managing webhooks in a space." }, "modifySpaceDetails": { "$ref": "PermissionSetting", - "description": "Setting for updating space name, avatar, description and guidelines." + "description": "Optional. Setting for updating space name, avatar, description and guidelines." }, "postMessages": { "$ref": "PermissionSetting", @@ -4037,15 +4267,15 @@ }, "replyMessages": { "$ref": "PermissionSetting", - "description": "Setting for replying to messages in a space." + "description": "Optional. Setting for replying to messages in a space." }, "toggleHistory": { "$ref": "PermissionSetting", - "description": "Setting for toggling space history on and off." + "description": "Optional. Setting for toggling space history on and off." }, "useAtMentionAll": { "$ref": "PermissionSetting", - "description": "Setting for using @all in a space." + "description": "Optional. Setting for using @all in a space." } }, "type": "object" @@ -4074,10 +4304,10 @@ "properties": { "emoji": { "$ref": "Emoji", - "description": "The emoji used in the reaction." + "description": "Required. The emoji used in the reaction." }, "name": { - "description": "The resource name of the reaction. Format: `spaces/{space}/messages/{message}/reactions/{reaction}`", + "description": "Identifier. The resource name of the reaction. Format: `spaces/{space}/messages/{message}/reactions/{reaction}`", "type": "string" }, "user": { @@ -4231,7 +4461,7 @@ "id": "SetUpSpaceRequest", "properties": { "memberships": { - "description": "Optional. The Google Chat users or groups to invite to join the space. Omit the calling user, as they are added automatically. The set currently allows up to 20 memberships (in addition to the caller). For human membership, the `Membership.member` field must contain a `user` with `name` populated (format: `users/{user}`) and `type` set to `User.Type.HUMAN`. You can only add human users when setting up a space (adding Chat apps is only supported for direct message setup with the calling app). You can also add members using the user's email as an alias for {user}. For example, the `user.name` can be `users/example@gmail.com`. To invite Gmail users or users from external Google Workspace domains, user's email must be used for `{user}`. For Google group membership, the `Membership.group_member` field must contain a `group` with `name` populated (format `groups/{group}`). You can only add Google groups when setting `Space.spaceType` to `SPACE`. Optional when setting `Space.spaceType` to `SPACE`. Required when setting `Space.spaceType` to `GROUP_CHAT`, along with at least two memberships. Required when setting `Space.spaceType` to `DIRECT_MESSAGE` with a human user, along with exactly one membership. Must be empty when creating a 1:1 conversation between a human and the calling Chat app (when setting `Space.spaceType` to `DIRECT_MESSAGE` and `Space.singleUserBotDm` to `true`).", + "description": "Optional. The Google Chat users or groups to invite to join the space. Omit the calling user, as they are added automatically. The set currently allows up to 49 memberships (in addition to the caller). For human membership, the `Membership.member` field must contain a `user` with `name` populated (format: `users/{user}`) and `type` set to `User.Type.HUMAN`. You can only add human users when setting up a space (adding Chat apps is only supported for direct message setup with the calling app). You can also add members using the user's email as an alias for {user}. For example, the `user.name` can be `users/example@gmail.com`. To invite Gmail users or users from external Google Workspace domains, user's email must be used for `{user}`. For Google group membership, the `Membership.group_member` field must contain a `group` with `name` populated (format `groups/{group}`). You can only add Google groups when setting `Space.spaceType` to `SPACE`. Optional when setting `Space.spaceType` to `SPACE`. Required when setting `Space.spaceType` to `GROUP_CHAT`, along with at least two memberships. Required when setting `Space.spaceType` to `DIRECT_MESSAGE` with a human user, along with exactly one membership. Must be empty when creating a 1:1 conversation between a human and the calling Chat app (when setting `Space.spaceType` to `DIRECT_MESSAGE` and `Space.singleUserBotDm` to `true`).", "items": { "$ref": "Membership" }, @@ -4317,17 +4547,23 @@ "type": "string" }, "displayName": { - "description": "The space's display name. Required when [creating a space](https://developers.google.com/workspace/chat/api/reference/rest/v1/spaces/create) with a `spaceType` of `SPACE`. If you receive the error message `ALREADY_EXISTS` when creating a space or updating the `displayName`, try a different `displayName`. An existing space within the Google Workspace organization might already use this display name. For direct messages, this field might be empty. Supports up to 128 characters.", + "description": "Optional. The space's display name. Required when [creating a space](https://developers.google.com/workspace/chat/api/reference/rest/v1/spaces/create) with a `spaceType` of `SPACE`. If you receive the error message `ALREADY_EXISTS` when creating a space or updating the `displayName`, try a different `displayName`. An existing space within the Google Workspace organization might already use this display name. For direct messages, this field might be empty. Supports up to 128 characters.", "type": "string" }, "externalUserAllowed": { - "description": "Immutable. Whether this space permits any Google Chat user as a member. Input when creating a space in a Google Workspace organization. Omit this field when creating spaces in the following conditions: * The authenticated user uses a consumer account (unmanaged user account). By default, a space created by a consumer account permits any Google Chat user. For existing spaces, this field is output only.", + "description": "Optional. Immutable. Whether this space permits any Google Chat user as a member. Input when creating a space in a Google Workspace organization. Omit this field when creating spaces in the following conditions: * The authenticated user uses a consumer account (unmanaged user account). By default, a space created by a consumer account permits any Google Chat user. For existing spaces, this field is output only.", "type": "boolean" }, "importMode": { "description": "Optional. Whether this space is created in `Import Mode` as part of a data migration into Google Workspace. While spaces are being imported, they aren't visible to users until the import is complete. Creating a space in `Import Mode`requires [user authentication](https://developers.google.com/workspace/chat/authenticate-authorize-chat-user).", "type": "boolean" }, + "importModeExpireTime": { + "description": "Output only. The time when the space will be automatically deleted by the system if it remains in import mode. Each space created in import mode must exit this mode before this expire time using `spaces.completeImport`. This field is only populated for spaces that were created with import mode.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, "lastActiveTime": { "description": "Output only. Timestamp of the last message in the space.", "format": "google-datetime", @@ -4340,7 +4576,7 @@ "readOnly": true }, "name": { - "description": "Resource name of the space. Format: `spaces/{space}` Where `{space}` represents the system-assigned ID for the space. You can obtain the space ID by calling the [`spaces.list()`](https://developers.google.com/workspace/chat/api/reference/rest/v1/spaces/list) method or from the space URL. For example, if the space URL is `https://mail.google.com/mail/u/0/#chat/space/AAAAAAAAA`, the space ID is `AAAAAAAAA`.", + "description": "Identifier. Resource name of the space. Format: `spaces/{space}` Where `{space}` represents the system-assigned ID for the space. You can obtain the space ID by calling the [`spaces.list()`](https://developers.google.com/workspace/chat/api/reference/rest/v1/spaces/list) method or from the space URL. For example, if the space URL is `https://mail.google.com/mail/u/0/#chat/space/AAAAAAAAA`, the space ID is `AAAAAAAAA`.", "type": "string" }, "permissionSettings": { @@ -4367,10 +4603,10 @@ }, "spaceDetails": { "$ref": "SpaceDetails", - "description": "Details about the space including description and rules." + "description": "Optional. Details about the space including description and rules." }, "spaceHistoryState": { - "description": "The message history state for messages and threads in this space.", + "description": "Optional. The message history state for messages and threads in this space.", "enum": [ "HISTORY_STATE_UNSPECIFIED", "HISTORY_OFF", @@ -4401,7 +4637,7 @@ "type": "string" }, "spaceType": { - "description": "The type of space. Required when creating a space or updating the space type of a space. Output only for other usage.", + "description": "Optional. The type of space. Required when creating a space or updating the space type of a space. Output only for other usage.", "enum": [ "SPACE_TYPE_UNSPECIFIED", "SPACE", @@ -4676,7 +4912,7 @@ "id": "Thread", "properties": { "name": { - "description": "Resource name of the thread. Example: `spaces/{space}/threads/{thread}`", + "description": "Identifier. Resource name of the thread. Example: `spaces/{space}/threads/{thread}`", "type": "string" }, "threadKey": { @@ -4736,7 +4972,7 @@ "type": "object" }, "UpdatedWidget": { - "description": "The response of the updated widget. Used to provide autocomplete options for a widget.", + "description": "For `selectionInput` widgets, returns autocomplete suggestions for a multiselect menu.", "id": "UpdatedWidget", "properties": { "suggestions": { diff --git a/discovery/googleapis/chromemanagement__v1.json b/discovery/googleapis/chromemanagement__v1.json index 39eace93d..29e3dfc9b 100644 --- a/discovery/googleapis/chromemanagement__v1.json +++ b/discovery/googleapis/chromemanagement__v1.json @@ -31,7 +31,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241003", + "revision": "20241210", "rootUrl": "https://chromemanagement.googleapis.com/", "servicePath": "", "title": "Chrome Management API", @@ -348,6 +348,97 @@ } } }, + "profiles": { + "methods": { + "delete": { + "description": "Deletes the data collected from a Chrome browser profile.", + "flatPath": "v1/customers/{customersId}/profiles/{profilesId}", + "httpMethod": "DELETE", + "id": "chromemanagement.customers.profiles.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. Format: customers/{customer_id}/profiles/{profile_permanent_id}", + "location": "path", + "pattern": "^customers/[^/]+/profiles/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "GoogleProtobufEmpty" + } + }, + "get": { + "description": "Gets a Chrome browser profile with customer ID and profile permanent ID.", + "flatPath": "v1/customers/{customersId}/profiles/{profilesId}", + "httpMethod": "GET", + "id": "chromemanagement.customers.profiles.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. Format: customers/{customer_id}/profiles/{profile_permanent_id}", + "location": "path", + "pattern": "^customers/[^/]+/profiles/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "GoogleChromeManagementVersionsV1ChromeBrowserProfile" + } + }, + "list": { + "description": "Lists Chrome browser profiles of a customer based on the given search and sorting criteria.", + "flatPath": "v1/customers/{customersId}/profiles", + "httpMethod": "GET", + "id": "chromemanagement.customers.profiles.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "filter": { + "description": "Optional. The filter used to filter profiles. The following fields can be used in the filter: - profile_id - display_name - user_email - last_activity_time - last_policy_sync_time - last_status_report_time - first_enrollment_time - os_platform_type - os_version - browser_version - browser_channel - policy_count - extension_count - identity_provider - affiliation_state - ouId Any of the above fields can be used to specify a filter, and filtering by multiple fields is supported with AND operator. String type fields and enum type fields support '=' and '!=' operators. The integer type and the timestamp type fields support '=', '!=', '<', '>', '<=' and '>=' operators. Timestamps expect an RFC-3339 formatted string (e.g. 2012-04-21T11:30:00-04:00). Wildcard '*' can be used with a string type field filter. In addition, string literal filtering is also supported, for example, 'ABC' as a filter maps to a filter that checks if any of the filterable string type fields contains 'ABC'. Organization unit number can be used as a filtering criteria here by specifying 'ouId = ${your_org_unit_id}', please note that only single OU ID matching is supported.", + "location": "query", + "type": "string" + }, + "orderBy": { + "description": "Optional. The fields used to specify the ordering of the results. The supported fields are: - profile_id - display_name - user_email - last_activity_time - last_policy_sync_time - last_status_report_time - first_enrollment_time - os_platform_type - os_version - browser_version - browser_channel - policy_count - extension_count - identity_provider - affiliation_state By default, sorting is in ascending order, to specify descending order for a field, a suffix \" desc\" should be added to the field name. The default ordering is the descending order of last_status_report_time.", + "location": "query", + "type": "string" + }, + "pageSize": { + "description": "Optional. The maximum number of profiles to return. The default page size is 100 if page_size is unspecified, and the maximum page size allowed is 200.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. The page token used to retrieve a specific page of the listing request.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. Format: customers/{customer_id}", + "location": "path", + "pattern": "^customers/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/profiles", + "response": { + "$ref": "GoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse" + } + } + } + }, "reports": { "methods": { "countChromeBrowsersNeedingAttention": { @@ -595,12 +686,12 @@ "type": "string" }, "filter": { - "description": "Query string to filter results, AND-separated fields in EBNF syntax. Note: OR operations are not supported in this filter. Supported filter fields: * app_name * app_type * install_type * number_of_permissions * total_install_count * latest_profile_active_date * permission_name * app_id * manifest_versions", + "description": "Query string to filter results, AND-separated fields in EBNF syntax. Note: OR operations are not supported in this filter. Supported filter fields: * app_name * app_type * install_type * number_of_permissions * total_install_count * latest_profile_active_date * permission_name * app_id * manifest_versions * risk_score", "location": "query", "type": "string" }, "orderBy": { - "description": "Field used to order results. Supported order by fields: * app_name * app_type * install_type * number_of_permissions * total_install_count * app_id * manifest_versions", + "description": "Field used to order results. Supported order by fields: * app_name * app_type * install_type * number_of_permissions * total_install_count * app_id * manifest_versions * risk_score", "location": "query", "type": "string" }, @@ -1373,6 +1464,24 @@ "APPLICATION_TYPE_STANDALONE_BROWSER_EXTENSION", "APPLICATION_TYPE_BRUSCHETTA" ], + "enumDeprecated": [ + false, + false, + false, + false, + false, + false, + false, + false, + true, + false, + false, + false, + true, + false, + true, + false + ], "enumDescriptions": [ "Application type unknown.", "Application type arc (Android app).", @@ -1382,13 +1491,13 @@ "Application type web.", "Application type Mac OS.", "Application type Plugin VM.", - "Application type standalone browser (Lacros browser app).", + "Deprecated. This vaule is no longer used. Application type standalone browser (Lacros browser app).", "Application type remote.", "Application type borealis.", "Application type system web.", - "Application type standalone browser chrome app (hosted in Lacros).", + "Deprecated. This vaule is no longer used. Application type standalone browser chrome app.", "Application type extension.", - "Application type standalone browser extension.", + "Deprecated. This vaule is no longer used. Application type standalone browser extension.", "Application type bruschetta." ], "type": "string" @@ -2851,6 +2960,11 @@ }, "readOnly": true, "type": "array" + }, + "riskAssessment": { + "$ref": "GoogleChromeManagementV1RiskAssessmentData", + "description": "Output only. If available, the risk assessment data about this extension.", + "readOnly": true } }, "type": "object" @@ -3421,6 +3535,96 @@ }, "type": "object" }, + "GoogleChromeManagementV1RiskAssessment": { + "description": "Risk assessment for a Chrome extension.", + "id": "GoogleChromeManagementV1RiskAssessment", + "properties": { + "assessment": { + "description": "Risk assessment for the extension. Currently, this is a numerical value, and its interpretation is specific to each risk assessment provider.", + "type": "string" + }, + "detailsUrl": { + "description": "A URL that a user can navigate to for more information about the risk assessment.", + "type": "string" + }, + "version": { + "description": "The version of the extension that this assessment applies to.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleChromeManagementV1RiskAssessmentData": { + "description": "Risk assessment data about an extension/app.", + "id": "GoogleChromeManagementV1RiskAssessmentData", + "properties": { + "entries": { + "description": "Individual risk assessments.", + "items": { + "$ref": "GoogleChromeManagementV1RiskAssessmentEntry" + }, + "type": "array" + }, + "overallRiskLevel": { + "description": "Overall assessed risk level across all entries. This will be the highest risk level from all entries.", + "enum": [ + "RISK_LEVEL_UNSPECIFIED", + "RISK_LEVEL_LOW", + "RISK_LEVEL_MEDIUM", + "RISK_LEVEL_HIGH" + ], + "enumDescriptions": [ + "Risk level not specified.", + "Extension that represents a low risk.", + "Extension that represents a medium risk.", + "Extension that represents a high risk." + ], + "type": "string" + } + }, + "type": "object" + }, + "GoogleChromeManagementV1RiskAssessmentEntry": { + "description": "One risk assessment entry.", + "id": "GoogleChromeManagementV1RiskAssessmentEntry", + "properties": { + "provider": { + "description": "The risk assessment provider from which this entry comes from.", + "enum": [ + "RISK_ASSESSMENT_PROVIDER_UNSPECIFIED", + "RISK_ASSESSMENT_PROVIDER_CRXCAVATOR", + "RISK_ASSESSMENT_PROVIDER_SPIN_AI" + ], + "enumDescriptions": [ + "Default value when no provider is specified.", + "CRXcavator.", + "Spin.Ai." + ], + "type": "string" + }, + "riskAssessment": { + "$ref": "GoogleChromeManagementV1RiskAssessment", + "description": "The details of the provider's risk assessment." + }, + "riskLevel": { + "description": "The bucketed risk level for the risk assessment.", + "enum": [ + "RISK_LEVEL_UNSPECIFIED", + "RISK_LEVEL_LOW", + "RISK_LEVEL_MEDIUM", + "RISK_LEVEL_HIGH" + ], + "enumDescriptions": [ + "Risk level not specified.", + "Extension that represents a low risk.", + "Extension that represents a medium risk.", + "Extension that represents a high risk." + ], + "type": "string" + } + }, + "type": "object" + }, "GoogleChromeManagementV1RuntimeCountersReport": { "description": "Runtime counters retrieved from CPU. Currently the runtime counters telemetry is only supported by Intel vPro PSR on Gen 14+.", "id": "GoogleChromeManagementV1RuntimeCountersReport", @@ -3609,6 +3813,24 @@ "APPLICATION_TYPE_STANDALONE_BROWSER_EXTENSION", "APPLICATION_TYPE_BRUSCHETTA" ], + "enumDeprecated": [ + false, + false, + false, + false, + false, + false, + false, + false, + true, + false, + false, + false, + true, + false, + true, + false + ], "enumDescriptions": [ "Application type unknown.", "Application type arc (Android app).", @@ -3618,13 +3840,13 @@ "Application type web.", "Application type Mac OS.", "Application type Plugin VM.", - "Application type standalone browser (Lacros browser app).", + "Deprecated. This vaule is no longer used. Application type standalone browser (Lacros browser app).", "Application type remote.", "Application type borealis.", "Application type system web.", - "Application type standalone browser chrome app (hosted in Lacros).", + "Deprecated. This vaule is no longer used. Application type standalone browser chrome app.", "Application type extension.", - "Application type standalone browser extension.", + "Deprecated. This vaule is no longer used. Application type standalone browser extension.", "Application type bruschetta." ], "type": "string" @@ -3685,7 +3907,8 @@ "APPLICATION_LAUNCH_SOURCE_WELCOME_TOUR", "APPLICATION_LAUNCH_SOURCE_FOCUS_MODE", "APPLICATION_LAUNCH_SOURCE_SPARKY", - "APPLICATION_LAUNCH_SOURCE_NAVIGATION_CAPTURING" + "APPLICATION_LAUNCH_SOURCE_NAVIGATION_CAPTURING", + "APPLICATION_LAUNCH_SOURCE_WEB_INSTALL_API" ], "enumDescriptions": [ "Application launch source unknown.", @@ -3730,7 +3953,8 @@ "Application launched from welcome tour.", "Applicationed launched from focus panel.", "Application launched from experimental feature Sparky.", - "Application launched from navigation capturing." + "Application launched from navigation capturing.", + "Application launched from web install API." ], "type": "string" }, @@ -3754,6 +3978,24 @@ "APPLICATION_TYPE_STANDALONE_BROWSER_EXTENSION", "APPLICATION_TYPE_BRUSCHETTA" ], + "enumDeprecated": [ + false, + false, + false, + false, + false, + false, + false, + false, + true, + false, + false, + false, + true, + false, + true, + false + ], "enumDescriptions": [ "Application type unknown.", "Application type arc (Android app).", @@ -3763,13 +4005,13 @@ "Application type web.", "Application type Mac OS.", "Application type Plugin VM.", - "Application type standalone browser (Lacros browser app).", + "Deprecated. This vaule is no longer used. Application type standalone browser (Lacros browser app).", "Application type remote.", "Application type borealis.", "Application type system web.", - "Application type standalone browser chrome app (hosted in Lacros).", + "Deprecated. This vaule is no longer used. Application type standalone browser chrome app.", "Application type extension.", - "Application type standalone browser extension.", + "Deprecated. This vaule is no longer used. Application type standalone browser extension.", "Application type bruschetta." ], "type": "string" @@ -3805,6 +4047,24 @@ "APPLICATION_TYPE_STANDALONE_BROWSER_EXTENSION", "APPLICATION_TYPE_BRUSCHETTA" ], + "enumDeprecated": [ + false, + false, + false, + false, + false, + false, + false, + false, + true, + false, + false, + false, + true, + false, + true, + false + ], "enumDescriptions": [ "Application type unknown.", "Application type arc (Android app).", @@ -3814,13 +4074,13 @@ "Application type web.", "Application type Mac OS.", "Application type Plugin VM.", - "Application type standalone browser (Lacros browser app).", + "Deprecated. This vaule is no longer used. Application type standalone browser (Lacros browser app).", "Application type remote.", "Application type borealis.", "Application type system web.", - "Application type standalone browser chrome app (hosted in Lacros).", + "Deprecated. This vaule is no longer used. Application type standalone browser chrome app.", "Application type extension.", - "Application type standalone browser extension.", + "Deprecated. This vaule is no longer used. Application type standalone browser extension.", "Application type bruschetta." ], "type": "string" @@ -4685,6 +4945,677 @@ }, "type": "object" }, + "GoogleChromeManagementVersionsV1AttestationCredential": { + "description": "Information of public key associated with a Chrome browser profile.", + "id": "GoogleChromeManagementVersionsV1AttestationCredential", + "properties": { + "keyRotationTime": { + "description": "Output only. Latest rotation timestamp of the public key rotation.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "keyTrustLevel": { + "description": "Output only. Trust level of the public key.", + "enum": [ + "KEY_TRUST_LEVEL_UNSPECIFIED", + "CHROME_BROWSER_HW_KEY", + "CHROME_BROWSER_OS_KEY" + ], + "enumDescriptions": [ + "Represents an unspecified public key trust level.", + "Represents a HW key.", + "Represents an OS key." + ], + "readOnly": true, + "type": "string" + }, + "keyType": { + "description": "Output only. Type of the public key.", + "enum": [ + "KEY_TYPE_UNSPECIFIED", + "RSA_KEY", + "EC_KEY" + ], + "enumDescriptions": [ + "Represents an unspecified public key type.", + "Represents a RSA key.", + "Represents an EC key." + ], + "readOnly": true, + "type": "string" + }, + "publicKey": { + "description": "Output only. Value of the public key.", + "format": "byte", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleChromeManagementVersionsV1ChromeBrowserProfile": { + "description": "A representation of a Chrome browser profile.", + "id": "GoogleChromeManagementVersionsV1ChromeBrowserProfile", + "properties": { + "affiliationState": { + "description": "Output only. The specific affiliation state of the profile.", + "enum": [ + "AFFILIATION_STATE_UNSPECIFIED", + "UNAFFILIATED_GENERIC", + "PROFILE_ONLY", + "UNAFFILIATED_LOCAL_MACHINE", + "UNAFFILIATED_CLOUD_MACHINE", + "AFFILIATED_CLOUD_MANAGED" + ], + "enumDescriptions": [ + "Unspecified affiliation state.", + "Unaffiliated - but we do not have the details for the type of unaffiliated profile.", + "Unaffiliated - A managed profile that appears on a totally unamanaged browser.", + "Unaffiliated - A managed profile that appears on a machine that is locally managed by a different organization (through platform management mechanisms like GPO).", + "Unaffiliated - A managed profile that appears on a managed browser that is cloud managed by a different organization (using Chrome Browser Cloud Management).", + "Affiliated - Both the profile and the managed browser are managed by the same organization." + ], + "readOnly": true, + "type": "string" + }, + "annotatedLocation": { + "description": "Optional. Location of the profile annotated by the admin.", + "type": "string" + }, + "annotatedUser": { + "description": "Optional. User of the profile annotated by the admin.", + "type": "string" + }, + "attestationCredential": { + "$ref": "GoogleChromeManagementVersionsV1AttestationCredential", + "description": "Output only. Attestation credential information of the profile.", + "readOnly": true + }, + "browserChannel": { + "description": "Output only. Channel of the browser on which the profile exists.", + "readOnly": true, + "type": "string" + }, + "browserVersion": { + "description": "Output only. Version of the browser on which the profile exists.", + "readOnly": true, + "type": "string" + }, + "deviceInfo": { + "$ref": "GoogleChromeManagementVersionsV1DeviceInfo", + "description": "Output only. Basic information of the device on which the profile exists. This information is only available for the affiliated profiles.", + "readOnly": true + }, + "displayName": { + "description": "Output only. Profile display name set by client.", + "readOnly": true, + "type": "string" + }, + "etag": { + "description": "Output only. Etag of this ChromeBrowserProfile resource. This etag can be used with UPDATE operation to ensure consistency.", + "readOnly": true, + "type": "string" + }, + "extensionCount": { + "description": "Output only. Number of extensions installed on the profile.", + "format": "int64", + "readOnly": true, + "type": "string" + }, + "firstEnrollmentTime": { + "description": "Output only. Timestamp of the first enrollment of the profile.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "identityProvider": { + "description": "Output only. Identify provider of the profile.", + "enum": [ + "IDENTITY_PROVIDER_UNSPECIFIED", + "GOOGLE_IDENTITY_PROVIDER", + "EXTERNAL_IDENTITY_PROVIDER" + ], + "enumDescriptions": [ + "Represents an unspecified identity provider.", + "Represents a Google identity provider.", + "Represents an external identity provider." + ], + "readOnly": true, + "type": "string" + }, + "lastActivityTime": { + "description": "Output only. Timestamp of the latest activity by the profile.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "lastPolicyFetchTime": { + "description": "Output only. Timestamp of the latest policy fetch by the profile.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "lastPolicySyncTime": { + "description": "Output only. Timestamp of the latest policy sync by the profile.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "lastStatusReportTime": { + "description": "Output only. Timestamp of the latest status report by the profile.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "name": { + "description": "Identifier. Format: customers/{customer_id}/profiles/{profile_permanent_id}", + "type": "string" + }, + "osPlatformType": { + "description": "Output only. OS platform of the device on which the profile exists.", + "readOnly": true, + "type": "string" + }, + "osPlatformVersion": { + "description": "Output only. Major OS version of the device on which the profile exists. (i.e. Windows 10)", + "readOnly": true, + "type": "string" + }, + "osVersion": { + "description": "Output only. OS version of the device on which the profile exists.", + "readOnly": true, + "type": "string" + }, + "policyCount": { + "description": "Output only. Number of policies applied on the profile.", + "format": "int64", + "readOnly": true, + "type": "string" + }, + "profileId": { + "description": "Output only. Chrome client side profile ID.", + "readOnly": true, + "type": "string" + }, + "profilePermanentId": { + "description": "Output only. Profile permanent ID is the unique identifier of a profile within one customer.", + "readOnly": true, + "type": "string" + }, + "reportingData": { + "$ref": "GoogleChromeManagementVersionsV1ReportingData", + "description": "Output only. Detailed reporting data of the profile. This information is only available when the profile reporting policy is enabled.", + "readOnly": true + }, + "userEmail": { + "description": "Output only. Email address of the user to which the profile belongs.", + "readOnly": true, + "type": "string" + }, + "userId": { + "description": "Output only. Unique Directory API ID of the user that can be used in Admin SDK Users API.", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleChromeManagementVersionsV1DeviceInfo": { + "description": "Information of a device that runs a Chrome browser profile.", + "id": "GoogleChromeManagementVersionsV1DeviceInfo", + "properties": { + "affiliatedDeviceId": { + "description": "Output only. Device ID that identifies the affiliated device on which the profile exists. If the device type is CHROME_BROWSER, then this represents a unique Directory API ID of the device that can be used in Admin SDK Browsers API.", + "readOnly": true, + "type": "string" + }, + "deviceType": { + "description": "Output only. Type of the device on which the profile exists.", + "enum": [ + "DEVICE_TYPE_UNSPECIFIED", + "CHROME_BROWSER" + ], + "enumDescriptions": [ + "Represents an unspecified device type.", + "Represents a Chrome browser device." + ], + "readOnly": true, + "type": "string" + }, + "hostname": { + "description": "Output only. Hostname of the device on which the profile exists.", + "readOnly": true, + "type": "string" + }, + "machine": { + "description": "Output only. Machine name of the device on which the profile exists. On platforms which do not report the machine name (currently iOS and Android) this is instead set to the browser's device_id - but note that this is a different device_id than the |affiliated_device_id|.", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse": { + "description": "Response to ListChromeBrowserProfiles method.", + "id": "GoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse", + "properties": { + "chromeBrowserProfiles": { + "description": "The list of profiles returned.", + "items": { + "$ref": "GoogleChromeManagementVersionsV1ChromeBrowserProfile" + }, + "type": "array" + }, + "nextPageToken": { + "description": "The pagination token that can be used to list the next page.", + "type": "string" + }, + "totalSize": { + "description": "Total size represents an estimated number of resources returned. Not guaranteed to be accurate above 10k profiles.", + "format": "int64", + "type": "string" + } + }, + "type": "object" + }, + "GoogleChromeManagementVersionsV1ReportingData": { + "description": "Reporting data of a Chrome browser profile.", + "id": "GoogleChromeManagementVersionsV1ReportingData", + "properties": { + "browserExecutablePath": { + "description": "Output only. Executable path of the installed Chrome browser. A valid path is included only in affiliated profiles.", + "readOnly": true, + "type": "string" + }, + "extensionData": { + "description": "Output only. Information of the extensions installed on the profile.", + "items": { + "$ref": "GoogleChromeManagementVersionsV1ReportingDataExtensionData" + }, + "readOnly": true, + "type": "array" + }, + "extensionPolicyData": { + "description": "Output only. Information of the policies applied on the extensions.", + "items": { + "$ref": "GoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData" + }, + "readOnly": true, + "type": "array" + }, + "installedBrowserVersion": { + "description": "Output only. Updated version of a browser, if it is different from the active browser version.", + "readOnly": true, + "type": "string" + }, + "policyData": { + "description": "Output only. Information of the policies applied on the profile.", + "items": { + "$ref": "GoogleChromeManagementVersionsV1ReportingDataPolicyData" + }, + "readOnly": true, + "type": "array" + }, + "profilePath": { + "description": "Output only. Path of the profile. A valid path is included only in affiliated profiles.", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData": { + "description": "Information of conflicting policy applied on a Chrome browser profile.", + "id": "GoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData", + "properties": { + "source": { + "description": "Output only. Source of the policy.", + "enum": [ + "POLICY_SOURCE_UNSPECIFIED", + "MACHINE_PLATFORM", + "USER_PLATFORM", + "MACHINE_LEVEL_USER_CLOUD", + "USER_CLOUD", + "MACHINE_MERGED" + ], + "enumDescriptions": [ + "Represents an unspecified policy source.", + "Represents a machine level platform policy.", + "Represents a user level platform policy.", + "Represents a machine level user cloud policy.", + "Represents a user level cloud policy.", + "Represents a machine level merged policy." + ], + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleChromeManagementVersionsV1ReportingDataExtensionData": { + "description": "Information of an extension installed on a Chrome browser profile.", + "id": "GoogleChromeManagementVersionsV1ReportingDataExtensionData", + "properties": { + "description": { + "description": "Output only. Description of the extension.", + "readOnly": true, + "type": "string" + }, + "extensionId": { + "description": "Output only. ID of the extension.", + "readOnly": true, + "type": "string" + }, + "extensionType": { + "description": "Output only. Type of the extension.", + "enum": [ + "EXTENSION_TYPE_UNSPECIFIED", + "EXTENSION", + "APP", + "THEME", + "HOSTED_APP" + ], + "enumDescriptions": [ + "Represents an unspecified extension type.", + "Represents an extension.", + "Represents an app.", + "Represents a theme.", + "Represents a hosted app." + ], + "readOnly": true, + "type": "string" + }, + "homepageUri": { + "description": "Output only. The URL of the homepage of the extension.", + "readOnly": true, + "type": "string" + }, + "installationType": { + "description": "Output only. Installation type of the extension.", + "enum": [ + "INSTALLATION_TYPE_UNSPECIFIED", + "MULTIPLE", + "NORMAL", + "ADMIN", + "DEVELOPMENT", + "SIDELOAD", + "OTHER" + ], + "enumDescriptions": [ + "Represents an unspecified installation type.", + "Represents instances of the extension having mixed installation types.", + "Represents a normal installation type.", + "Represents an installation by admin.", + "Represents a development installation type.", + "Represents a sideload installation type.", + "Represents an installation type that is not covered in the other options." + ], + "readOnly": true, + "type": "string" + }, + "isDisabled": { + "description": "Output only. Represents whether the user disabled the extension.", + "readOnly": true, + "type": "boolean" + }, + "isWebstoreExtension": { + "description": "Output only. Represents whether the extension is from the webstore.", + "readOnly": true, + "type": "boolean" + }, + "manifestVersion": { + "description": "Output only. Manifest version of the extension.", + "format": "int32", + "readOnly": true, + "type": "integer" + }, + "name": { + "description": "Output only. Name of the extension.", + "readOnly": true, + "type": "string" + }, + "permissions": { + "description": "Output only. Permissions requested by the extension.", + "items": { + "type": "string" + }, + "readOnly": true, + "type": "array" + }, + "version": { + "description": "Output only. Version of the extension.", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData": { + "description": "Information of the policies applied on an extension.", + "id": "GoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData", + "properties": { + "extensionId": { + "description": "Output only. ID of the extension.", + "readOnly": true, + "type": "string" + }, + "extensionName": { + "description": "Output only. Name of the extension.", + "readOnly": true, + "type": "string" + }, + "policyData": { + "description": "Output only. Information of the policies applied on the extension.", + "items": { + "$ref": "GoogleChromeManagementVersionsV1ReportingDataPolicyData" + }, + "readOnly": true, + "type": "array" + } + }, + "type": "object" + }, + "GoogleChromeManagementVersionsV1ReportingDataPolicyData": { + "description": "Information of a policy applied on a Chrome browser profile.", + "id": "GoogleChromeManagementVersionsV1ReportingDataPolicyData", + "properties": { + "conflicts": { + "description": "Output only. Conflicting policy information.", + "items": { + "$ref": "GoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData" + }, + "readOnly": true, + "type": "array" + }, + "error": { + "description": "Output only. Error message of the policy, if any.", + "readOnly": true, + "type": "string" + }, + "name": { + "description": "Output only. Name of the policy.", + "readOnly": true, + "type": "string" + }, + "source": { + "description": "Output only. Source of the policy.", + "enum": [ + "POLICY_SOURCE_UNSPECIFIED", + "MACHINE_PLATFORM", + "USER_PLATFORM", + "MACHINE_LEVEL_USER_CLOUD", + "USER_CLOUD", + "MACHINE_MERGED" + ], + "enumDescriptions": [ + "Represents an unspecified policy source.", + "Represents a machine level platform policy.", + "Represents a user level platform policy.", + "Represents a machine level user cloud policy.", + "Represents a user level cloud policy.", + "Represents a machine level merged policy." + ], + "readOnly": true, + "type": "string" + }, + "value": { + "description": "Output only. Value of the policy.", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleChromeManagementVersionsV1alpha1CertificateProvisioningProcess": { + "description": "A certificate provisioning process.", + "id": "GoogleChromeManagementVersionsV1alpha1CertificateProvisioningProcess", + "properties": { + "caConnectionAdapterConfigReference": { + "description": "Output only. A JSON string that contains the administrator-provided configuration for the certification authority service. This field can be missing if no configuration was given.", + "readOnly": true, + "type": "string" + }, + "chromeOsDevice": { + "$ref": "GoogleChromeManagementVersionsV1alpha1ChromeOsDevice", + "description": "Output only. The client certificate is being provisioned for a ChromeOS device. This contains information about the device.", + "readOnly": true + }, + "chromeOsUserSession": { + "$ref": "GoogleChromeManagementVersionsV1alpha1ChromeOsUserSession", + "description": "Output only. The client certificate is being provisioned for a ChromeOS user session. This contains information about the user session.", + "readOnly": true + }, + "failureMessage": { + "description": "Output only. A message describing why this `CertificateProvisioningProcess` failed. Presence of this field indicates that the `CertificateProvisioningProcess` has failed.", + "readOnly": true, + "type": "string" + }, + "issuedCertificate": { + "description": "Output only. The issued certificate for this `CertificateProvisioningProcess` in PEM format.", + "readOnly": true, + "type": "string" + }, + "name": { + "description": "Identifier. Resource name of the `CertificateProvisioningProcess`. The name pattern is given as `customers/{customer}/certificateProvisioningProcesses/{certificate_provisioning_process}` with `{customer}` being the obfuscated customer id and `{certificate_provisioning_process}` being the certificate provisioning process id.", + "type": "string" + }, + "profileAdapterConfigReference": { + "description": "Output only. A JSON string that contains the administrator-provided configuration for the certificate provisioning profile. This field can be missing if no configuration was given.", + "readOnly": true, + "type": "string" + }, + "provisioningProfileId": { + "description": "Output only. The ID of the certificate provisioning profile.", + "readOnly": true, + "type": "string" + }, + "signData": { + "description": "Output only. The data that the client was asked to sign. This field is only present after the `SignData` operation has been initiated.", + "format": "byte", + "readOnly": true, + "type": "string" + }, + "signature": { + "description": "Output only. The signature of `signature_algorithm`, generated using the client's private key using `signature_algorithm`. This field is only present after the`SignData` operation has finished.", + "format": "byte", + "readOnly": true, + "type": "string" + }, + "signatureAlgorithm": { + "description": "Output only. The signature algorithm that the adapter expects the client and backend components to use when processing `sign_data`. This field is only present after the `SignData` operation has been initiated.", + "enum": [ + "SIGNATURE_ALGORITHM_UNSPECIFIED", + "SIGNATURE_ALGORITHM_RSA_PKCS1_V1_5_SHA256", + "SIGNATURE_ALGORITHM_ECDSA_SHA256" + ], + "enumDescriptions": [ + "Default value. This value is unused.", + "The server-side builds the PKCS#1 DigestInfo, i.e., the SHA256 hash is constructed on the server-side. The client should sign using RSA with PKCS#1 v1.5 padding.", + "The PKCS#1 digest info is built by the server-side and sent to the client unhashed. The client is responsible for signing and hashing. Uses the P-256 curve." + ], + "readOnly": true, + "type": "string" + }, + "startTime": { + "description": "Output only. Server-generated timestamp of when the certificate provisioning process has been created.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "subjectPublicKeyInfo": { + "description": "Output only. The public key for which a certificate should be provisioned. Represented as a DER-encoded X.509 SubjectPublicKeyInfo.", + "format": "byte", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleChromeManagementVersionsV1alpha1ChromeOsDevice": { + "description": "Describes the ChromeOS device that a `CertificateProvisioningProcess` belongs to.", + "id": "GoogleChromeManagementVersionsV1alpha1ChromeOsDevice", + "properties": { + "deviceDirectoryApiId": { + "description": "Output only. The unique Directory API ID of the device. This value is the same as the Admin Console's Directory API ID in the ChromeOS Devices tab.", + "readOnly": true, + "type": "string" + }, + "serialNumber": { + "description": "Output only. Device serial number. This value is the same as the Admin Console's Serial Number in the ChromeOS Devices tab.", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleChromeManagementVersionsV1alpha1ChromeOsUserSession": { + "description": "Describes the ChromeOS user session that a `CertificateProvisioningProcess` belongs to.", + "id": "GoogleChromeManagementVersionsV1alpha1ChromeOsUserSession", + "properties": { + "chromeOsDevice": { + "$ref": "GoogleChromeManagementVersionsV1alpha1ChromeOsDevice", + "description": "Output only. This field contains information about the ChromeOS device that the user session is running on. It is only set if the user session is affiliated, i.e. if the user is managed by the same organization that managed the ChromeOS device.", + "readOnly": true + }, + "userDirectoryApiId": { + "description": "Output only. The unique Directory API ID of the user.", + "readOnly": true, + "type": "string" + }, + "userPrimaryEmail": { + "description": "Output only. The primary e-mail address of the user.", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleChromeManagementVersionsV1alpha1SignDataMetadata": { + "description": "Metadata for the long-running operation returned by signData.", + "id": "GoogleChromeManagementVersionsV1alpha1SignDataMetadata", + "properties": { + "startTime": { + "description": "Output only. Start time of the SignData operation.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleChromeManagementVersionsV1alpha1SignDataResponse": { + "description": "Response message for requesting a signature from the client that initated a certificate provisioning process.", + "id": "GoogleChromeManagementVersionsV1alpha1SignDataResponse", + "properties": { + "certificateProvisioningProcess": { + "$ref": "GoogleChromeManagementVersionsV1alpha1CertificateProvisioningProcess", + "description": "Output only. The certificate provisioning process. The signature generated by the client will be available in the `signature` field of `CertificateProvisioningProcess`.", + "readOnly": true + } + }, + "type": "object" + }, "GoogleProtobufEmpty": { "description": "A generic empty message that you can re-use to avoid defining duplicated empty messages in your APIs. A typical example is to use it as the request or the response type of an API method. For instance: service Foo { rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); }", "id": "GoogleProtobufEmpty", diff --git a/discovery/googleapis/chromepolicy__v1.json b/discovery/googleapis/chromepolicy__v1.json index a4f2d893e..89ac4ce6b 100644 --- a/discovery/googleapis/chromepolicy__v1.json +++ b/discovery/googleapis/chromepolicy__v1.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241004", + "revision": "20241118", "rootUrl": "https://chromepolicy.googleapis.com/", "servicePath": "", "title": "Chrome Policy API", @@ -1788,7 +1788,7 @@ "type": "string" }, "syntax": { - "description": "The syntax of the proto file. The supported values are \"proto2\", \"proto3\", and \"editions\". If `edition` is present, this value must be \"editions\".", + "description": "The syntax of the proto file. The supported values are \"proto2\", \"proto3\", and \"editions\". If `edition` is present, this value must be \"editions\". WARNING: This field should only be used by protobuf plugins or special cases like the proto compiler. Other uses are discouraged and developers should rely on the protoreflect APIs for their client language.", "type": "string" } }, diff --git a/discovery/googleapis/classroom__v1.json b/discovery/googleapis/classroom__v1.json index 49178ed44..0615c0e4d 100644 --- a/discovery/googleapis/classroom__v1.json +++ b/discovery/googleapis/classroom__v1.json @@ -87,7 +87,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241007", + "revision": "20241209", "rootUrl": "https://classroom.googleapis.com/", "servicePath": "", "title": "Google Classroom API", @@ -1226,6 +1226,51 @@ "scopes": [ "https://www.googleapis.com/auth/classroom.coursework.students" ] + }, + "updateRubric": { + "description": "Updates a rubric. See google.classroom.v1.Rubric for details of which fields can be updated. Rubric update capabilities are [limited](/classroom/rubrics/limitations) once grading has started. The requesting user and course owner must have rubrics creation capabilities. For details, see [licensing requirements](https://developers.google.com/classroom/rubrics/limitations#license-requirements). This request must be made by the Google Cloud console of the [OAuth client ID](https://support.google.com/cloud/answer/6158849) used to create the parent course work item. This method returns the following error codes: * `PERMISSION_DENIED` if the requesting developer project didn't create the corresponding course work, if the user isn't permitted to make the requested modification to the rubric, or for access errors. This error code is also returned if grading has already started on the rubric. * `INVALID_ARGUMENT` if the request is malformed and for the following request error: * `RubricCriteriaInvalidFormat` * `NOT_FOUND` if the requested course, course work, or rubric doesn't exist or if the user doesn't have access to the corresponding course work. * `INTERNAL` if grading has already started on the rubric.", + "flatPath": "v1/courses/{courseId}/courseWork/{courseWorkId}/rubric", + "httpMethod": "PATCH", + "id": "classroom.courses.courseWork.updateRubric", + "parameterOrder": [ + "courseId", + "courseWorkId" + ], + "parameters": { + "courseId": { + "description": "Required. Identifier of the course.", + "location": "path", + "required": true, + "type": "string" + }, + "courseWorkId": { + "description": "Required. Identifier of the course work.", + "location": "path", + "required": true, + "type": "string" + }, + "id": { + "description": "Optional. Identifier of the rubric.", + "location": "query", + "type": "string" + }, + "updateMask": { + "description": "Optional. Mask that identifies which fields on the rubric to update. This field is required to do an update. The update fails if invalid fields are specified. There are multiple options to define the criteria of a rubric: the `source_spreadsheet_id` and the `criteria` list. Only one of these can be used at a time to define a rubric. The rubric `criteria` list is fully replaced by the rubric criteria specified in the update request. For example, if a criterion or level is missing from the request, it is deleted. New criteria and levels are added and an ID is assigned. Existing criteria and levels retain the previously assigned ID if the ID is specified in the request. The following fields can be specified by teachers: * `criteria` * `source_spreadsheet_id`", + "format": "google-fieldmask", + "location": "query", + "type": "string" + } + }, + "path": "v1/courses/{courseId}/courseWork/{courseWorkId}/rubric", + "request": { + "$ref": "Rubric" + }, + "response": { + "$ref": "Rubric" + }, + "scopes": [ + "https://www.googleapis.com/auth/classroom.coursework.students" + ] } }, "resources": { @@ -1565,6 +1610,215 @@ } } }, + "rubrics": { + "methods": { + "create": { + "description": "Creates a rubric. The requesting user and course owner must have rubrics creation capabilities. For details, see [licensing requirements](https://developers.google.com/classroom/rubrics/limitations#license-requirements). For further details, see [Rubrics structure and known limitations](/classroom/rubrics/limitations). This request must be made by the Google Cloud console of the [OAuth client ID](https://support.google.com/cloud/answer/6158849) used to create the parent course work item. This method returns the following error codes: * `PERMISSION_DENIED` if the requesting user isn't permitted to create rubrics for course work in the requested course. * `INTERNAL` if the request has insufficient OAuth scopes. * `INVALID_ARGUMENT` if the request is malformed and for the following request error: * `RubricCriteriaInvalidFormat` * `NOT_FOUND` if the requested course or course work don't exist or the user doesn't have access to the course or course work. * `FAILED_PRECONDITION` for the following request error: * `AttachmentNotVisible`", + "flatPath": "v1/courses/{courseId}/courseWork/{courseWorkId}/rubrics", + "httpMethod": "POST", + "id": "classroom.courses.courseWork.rubrics.create", + "parameterOrder": [ + "courseId", + "courseWorkId" + ], + "parameters": { + "courseId": { + "description": "Required. Identifier of the course.", + "location": "path", + "required": true, + "type": "string" + }, + "courseWorkId": { + "description": "Required. Identifier of the course work.", + "location": "path", + "required": true, + "type": "string" + } + }, + "path": "v1/courses/{courseId}/courseWork/{courseWorkId}/rubrics", + "request": { + "$ref": "Rubric" + }, + "response": { + "$ref": "Rubric" + }, + "scopes": [ + "https://www.googleapis.com/auth/classroom.coursework.students" + ] + }, + "delete": { + "description": "Deletes a rubric. The requesting user and course owner must have rubrics creation capabilities. For details, see [licensing requirements](https://developers.google.com/classroom/rubrics/limitations#license-requirements). This request must be made by the Google Cloud console of the [OAuth client ID](https://support.google.com/cloud/answer/6158849) used to create the corresponding rubric. This method returns the following error codes: * `PERMISSION_DENIED` if the requesting developer project didn't create the corresponding rubric, or if the requesting user isn't permitted to delete the requested rubric. * `NOT_FOUND` if no rubric exists with the requested ID or the user does not have access to the course, course work, or rubric. * `INVALID_ARGUMENT` if grading has already started on the rubric.", + "flatPath": "v1/courses/{courseId}/courseWork/{courseWorkId}/rubrics/{id}", + "httpMethod": "DELETE", + "id": "classroom.courses.courseWork.rubrics.delete", + "parameterOrder": [ + "courseId", + "courseWorkId", + "id" + ], + "parameters": { + "courseId": { + "description": "Required. Identifier of the course.", + "location": "path", + "required": true, + "type": "string" + }, + "courseWorkId": { + "description": "Required. Identifier of the course work.", + "location": "path", + "required": true, + "type": "string" + }, + "id": { + "description": "Required. Identifier of the rubric.", + "location": "path", + "required": true, + "type": "string" + } + }, + "path": "v1/courses/{courseId}/courseWork/{courseWorkId}/rubrics/{id}", + "response": { + "$ref": "Empty" + }, + "scopes": [ + "https://www.googleapis.com/auth/classroom.coursework.students" + ] + }, + "get": { + "description": "Returns a rubric. This method returns the following error codes: * `PERMISSION_DENIED` for access errors. * `INVALID_ARGUMENT` if the request is malformed. * `NOT_FOUND` if the requested course, course work, or rubric doesn't exist or if the user doesn't have access to the corresponding course work.", + "flatPath": "v1/courses/{courseId}/courseWork/{courseWorkId}/rubrics/{id}", + "httpMethod": "GET", + "id": "classroom.courses.courseWork.rubrics.get", + "parameterOrder": [ + "courseId", + "courseWorkId", + "id" + ], + "parameters": { + "courseId": { + "description": "Required. Identifier of the course.", + "location": "path", + "required": true, + "type": "string" + }, + "courseWorkId": { + "description": "Required. Identifier of the course work.", + "location": "path", + "required": true, + "type": "string" + }, + "id": { + "description": "Required. Identifier of the rubric.", + "location": "path", + "required": true, + "type": "string" + } + }, + "path": "v1/courses/{courseId}/courseWork/{courseWorkId}/rubrics/{id}", + "response": { + "$ref": "Rubric" + }, + "scopes": [ + "https://www.googleapis.com/auth/classroom.coursework.me", + "https://www.googleapis.com/auth/classroom.coursework.me.readonly", + "https://www.googleapis.com/auth/classroom.coursework.students", + "https://www.googleapis.com/auth/classroom.coursework.students.readonly" + ] + }, + "list": { + "description": "Returns a list of rubrics that the requester is permitted to view. This method returns the following error codes: * `PERMISSION_DENIED` for access errors. * `INVALID_ARGUMENT` if the request is malformed. * `NOT_FOUND` if the requested course or course work doesn't exist or if the user doesn't have access to the corresponding course work.", + "flatPath": "v1/courses/{courseId}/courseWork/{courseWorkId}/rubrics", + "httpMethod": "GET", + "id": "classroom.courses.courseWork.rubrics.list", + "parameterOrder": [ + "courseId", + "courseWorkId" + ], + "parameters": { + "courseId": { + "description": "Required. Identifier of the course.", + "location": "path", + "required": true, + "type": "string" + }, + "courseWorkId": { + "description": "Required. Identifier of the course work.", + "location": "path", + "required": true, + "type": "string" + }, + "pageSize": { + "description": "The maximum number of rubrics to return. If unspecified, at most 1 rubric is returned. The maximum value is 1; values above 1 are coerced to 1.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "nextPageToken value returned from a previous list call, indicating that the subsequent page of results should be returned. The list request must be otherwise identical to the one that resulted in this token.", + "location": "query", + "type": "string" + } + }, + "path": "v1/courses/{courseId}/courseWork/{courseWorkId}/rubrics", + "response": { + "$ref": "ListRubricsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/classroom.coursework.me", + "https://www.googleapis.com/auth/classroom.coursework.me.readonly", + "https://www.googleapis.com/auth/classroom.coursework.students", + "https://www.googleapis.com/auth/classroom.coursework.students.readonly" + ] + }, + "patch": { + "description": "Updates a rubric. See google.classroom.v1.Rubric for details of which fields can be updated. Rubric update capabilities are [limited](/classroom/rubrics/limitations) once grading has started. The requesting user and course owner must have rubrics creation capabilities. For details, see [licensing requirements](https://developers.google.com/classroom/rubrics/limitations#license-requirements). This request must be made by the Google Cloud console of the [OAuth client ID](https://support.google.com/cloud/answer/6158849) used to create the parent course work item. This method returns the following error codes: * `PERMISSION_DENIED` if the requesting developer project didn't create the corresponding course work, if the user isn't permitted to make the requested modification to the rubric, or for access errors. This error code is also returned if grading has already started on the rubric. * `INVALID_ARGUMENT` if the request is malformed and for the following request error: * `RubricCriteriaInvalidFormat` * `NOT_FOUND` if the requested course, course work, or rubric doesn't exist or if the user doesn't have access to the corresponding course work. * `INTERNAL` if grading has already started on the rubric.", + "flatPath": "v1/courses/{courseId}/courseWork/{courseWorkId}/rubrics/{id}", + "httpMethod": "PATCH", + "id": "classroom.courses.courseWork.rubrics.patch", + "parameterOrder": [ + "courseId", + "courseWorkId", + "id" + ], + "parameters": { + "courseId": { + "description": "Required. Identifier of the course.", + "location": "path", + "required": true, + "type": "string" + }, + "courseWorkId": { + "description": "Required. Identifier of the course work.", + "location": "path", + "required": true, + "type": "string" + }, + "id": { + "description": "Optional. Identifier of the rubric.", + "location": "path", + "required": true, + "type": "string" + }, + "updateMask": { + "description": "Optional. Mask that identifies which fields on the rubric to update. This field is required to do an update. The update fails if invalid fields are specified. There are multiple options to define the criteria of a rubric: the `source_spreadsheet_id` and the `criteria` list. Only one of these can be used at a time to define a rubric. The rubric `criteria` list is fully replaced by the rubric criteria specified in the update request. For example, if a criterion or level is missing from the request, it is deleted. New criteria and levels are added and an ID is assigned. Existing criteria and levels retain the previously assigned ID if the ID is specified in the request. The following fields can be specified by teachers: * `criteria` * `source_spreadsheet_id`", + "format": "google-fieldmask", + "location": "query", + "type": "string" + } + }, + "path": "v1/courses/{courseId}/courseWork/{courseWorkId}/rubrics/{id}", + "request": { + "$ref": "Rubric" + }, + "response": { + "$ref": "Rubric" + }, + "scopes": [ + "https://www.googleapis.com/auth/classroom.coursework.students" + ] + } + } + }, "studentSubmissions": { "methods": { "get": { @@ -4410,6 +4664,32 @@ }, "type": "object" }, + "Criterion": { + "description": "A rubric criterion. Each criterion is a dimension on which performance is rated.", + "id": "Criterion", + "properties": { + "description": { + "description": "The description of the criterion.", + "type": "string" + }, + "id": { + "description": "The criterion ID. On creation, an ID is assigned.", + "type": "string" + }, + "levels": { + "description": "The list of levels within this criterion.", + "items": { + "$ref": "Level" + }, + "type": "array" + }, + "title": { + "description": "The title of the criterion.", + "type": "string" + } + }, + "type": "object" + }, "Date": { "description": "Represents a whole or partial calendar date, such as a birthday. The time of day and time zone are either specified elsewhere or are insignificant. The date is relative to the Gregorian Calendar. This can represent one of the following: * A full date, with non-zero year, month, and day values. * A month and day, with a zero year (for example, an anniversary). * A year on its own, with a zero month and a zero day. * A year and month, with a zero day (for example, a credit card expiration date). Related types: * google.type.TimeOfDay * google.type.DateTime * google.protobuf.Timestamp", "id": "Date", @@ -4785,6 +5065,30 @@ }, "type": "object" }, + "Level": { + "description": "A level of the criterion.", + "id": "Level", + "properties": { + "description": { + "description": "The description of the level.", + "type": "string" + }, + "id": { + "description": "The level ID. On creation, an ID is assigned.", + "type": "string" + }, + "points": { + "description": "Optional points associated with this level. If set, all levels within the rubric must specify points and the value must be distinct across all levels within a single criterion. 0 is distinct from no points.", + "format": "double", + "type": "number" + }, + "title": { + "description": "The title of the level. If the level has no points set, title must be set.", + "type": "string" + } + }, + "type": "object" + }, "Link": { "description": "URL item.", "id": "Link", @@ -4966,6 +5270,24 @@ }, "type": "object" }, + "ListRubricsResponse": { + "description": "Response when listing rubrics.", + "id": "ListRubricsResponse", + "properties": { + "nextPageToken": { + "description": "Token identifying the next page of results to return. If empty, no further results are available.", + "type": "string" + }, + "rubrics": { + "description": "Rubrics that match the request.", + "items": { + "$ref": "Rubric" + }, + "type": "array" + } + }, + "type": "object" + }, "ListStudentSubmissionsResponse": { "description": "Response when listing student submissions.", "id": "ListStudentSubmissionsResponse", @@ -5226,6 +5548,68 @@ "properties": {}, "type": "object" }, + "Rubric": { + "description": "The rubric of the course work. A rubric is a scoring guide used to evaluate student work and give feedback. For further details, see [Rubrics structure and known limitations](/classroom/rubrics/limitations).", + "id": "Rubric", + "properties": { + "courseId": { + "description": "Identifier of the course. Read-only.", + "type": "string" + }, + "courseWorkId": { + "description": "Identifier for the course work this corresponds to. Read-only.", + "type": "string" + }, + "creationTime": { + "description": "Output only. Timestamp when this rubric was created. Read-only.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "criteria": { + "description": "List of criteria. Each criterion is a dimension on which performance is rated.", + "items": { + "$ref": "Criterion" + }, + "type": "array" + }, + "id": { + "description": "Classroom-assigned identifier for the rubric. This is unique among rubrics for the relevant course work. Read-only.", + "type": "string" + }, + "sourceSpreadsheetId": { + "description": "Input only. Immutable. Google Sheets ID of the spreadsheet. This spreadsheet must contain formatted rubric settings. See [Create or reuse a rubric for an assignment](https://support.google.com/edu/classroom/answer/9335069). Use of this field requires the `https://www.googleapis.com/auth/spreadsheets.readonly` or `https://www.googleapis.com/auth/spreadsheets` scope.", + "type": "string" + }, + "updateTime": { + "description": "Output only. Timestamp of the most recent change to this rubric. Read-only.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "RubricGrade": { + "description": "A rubric grade set for the student submission. There is at most one entry per rubric criterion.", + "id": "RubricGrade", + "properties": { + "criterionId": { + "description": "Optional. Criterion ID.", + "type": "string" + }, + "levelId": { + "description": "Optional. Optional level ID of the selected level. If empty, no level was selected.", + "type": "string" + }, + "points": { + "description": "Optional. Optional points assigned for this criterion, typically based on the level. Levels might or might not have points. If unset, no points were set for this criterion.", + "format": "double", + "type": "number" + } + }, + "type": "object" + }, "SharedDriveFile": { "description": "Drive file that is used as material for course work.", "id": "SharedDriveFile", @@ -5347,6 +5731,13 @@ "format": "double", "type": "number" }, + "assignedRubricGrades": { + "additionalProperties": { + "$ref": "RubricGrade" + }, + "description": "Assigned rubric grades based on the rubric's Criteria. This map is empty if there is no rubric attached to this course work or if a rubric is attached, but no grades have been set on any Criteria. Entries are only populated for grades that have been set. Key: The rubric's criterion ID. Read-only.", + "type": "object" + }, "assignmentSubmission": { "$ref": "AssignmentSubmission", "description": "Submission content when course_work_type is ASSIGNMENT. Students can modify this content using ModifyAttachments." @@ -5389,6 +5780,13 @@ "format": "double", "type": "number" }, + "draftRubricGrades": { + "additionalProperties": { + "$ref": "RubricGrade" + }, + "description": "Pending rubric grades based on the rubric's criteria. This map is empty if there is no rubric attached to this course work or if a rubric is attached, but no grades have been set on any criteria. Entries are only populated for grades that have been set. Key: The rubric's criterion ID. Read-only.", + "type": "object" + }, "id": { "description": "Classroom-assigned Identifier for the student submission. This is unique among submissions for the relevant course work. Read-only.", "type": "string" diff --git a/discovery/googleapis/cloudasset__v1.json b/discovery/googleapis/cloudasset__v1.json index 6f724c57d..942ca8583 100644 --- a/discovery/googleapis/cloudasset__v1.json +++ b/discovery/googleapis/cloudasset__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240831", + "revision": "20241211", "rootUrl": "https://cloudasset.googleapis.com/", "servicePath": "", "title": "Cloud Asset API", @@ -1172,7 +1172,7 @@ "description": "The main analysis that matches the original request." }, "serviceAccountImpersonationAnalysis": { - "description": "The service account impersonation analysis if AnalyzeIamPolicyRequest.analyze_service_account_impersonation is enabled.", + "description": "The service account impersonation analysis if IamPolicyAnalysisQuery.Options.analyze_service_account_impersonation is enabled.", "items": { "$ref": "IamPolicyAnalysis" }, @@ -1622,7 +1622,7 @@ "id": "EffectiveTagDetails", "properties": { "attachedResource": { - "description": "The [full resource name](https://cloud.google.com/asset-inventory/docs/resource-name-format) of the ancestor from which an effective_tag is inherited, according to [tag inheritance](https://cloud.google.com/resource-manager/docs/tags/tags-overview#inheritance).", + "description": "The [full resource name](https://cloud.google.com/asset-inventory/docs/resource-name-format) of the ancestor from which effective_tags are inherited, according to [tag inheritance](https://cloud.google.com/resource-manager/docs/tags/tags-overview#inheritance).", "type": "string" }, "effectiveTags": { @@ -1886,7 +1886,7 @@ "description": "A Google Cloud resource governed by the organization policies of the AnalyzeOrgPolicyGovernedAssetsRequest.constraint." }, "policyBundle": { - "description": "The ordered list of all organization policies from the AnalyzeOrgPoliciesResponse.OrgPolicyResult.consolidated_policy.attached_resource to the scope specified in the request. If the constraint is defined with default policy, it will also appear in the list.", + "description": "The ordered list of all organization policies from the consolidated_policy.attached_resource to the scope specified in the request. If the constraint is defined with default policy, it will also appear in the list.", "items": { "$ref": "AnalyzerOrgPolicy" }, @@ -2176,7 +2176,7 @@ "type": "string" }, "policyBundle": { - "description": "The ordered list of all organization policies from the AnalyzeOrgPoliciesResponse.OrgPolicyResult.consolidated_policy.attached_resource. to the scope specified in the request. If the constraint is defined with default policy, it will also appear in the list.", + "description": "The ordered list of all organization policies from the consolidated_policy.attached_resource. to the scope specified in the request. If the constraint is defined with default policy, it will also appear in the list.", "items": { "$ref": "AnalyzerOrgPolicy" }, @@ -2873,6 +2873,10 @@ "egressTo": { "$ref": "GoogleIdentityAccesscontextmanagerV1EgressTo", "description": "Defines the conditions on the ApiOperation and destination resources that cause this EgressPolicy to apply." + }, + "title": { + "description": "Optional. Human-readable title for the egress rule. The title must be unique within the perimeter and can not exceed 100 characters. Within the access policy, the combined length of all rule titles must not exceed 240,000 characters.", + "type": "string" } }, "type": "object" @@ -2964,6 +2968,10 @@ "ingressTo": { "$ref": "GoogleIdentityAccesscontextmanagerV1IngressTo", "description": "Defines the conditions on the ApiOperation and request destination that cause this IngressPolicy to apply." + }, + "title": { + "description": "Optional. Human-readable title for the ingress rule. The title must be unique within the perimeter and can not exceed 100 characters. Within the access policy, the combined length of all rule titles must not exceed 240,000 characters.", + "type": "string" } }, "type": "object" @@ -3064,6 +3072,10 @@ "description": "Description of the `ServicePerimeter` and its use. Does not affect behavior.", "type": "string" }, + "etag": { + "description": "Optional. An opaque identifier for the current version of the `ServicePerimeter`. This identifier does not follow any specific format. If an etag is not provided, the operation will be performed as if a valid etag is provided.", + "type": "string" + }, "name": { "description": "Identifier. Resource name for the `ServicePerimeter`. Format: `accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}`. The `service_perimeter` component must begin with a letter, followed by alphanumeric characters or `_`. After you create a `ServicePerimeter`, you cannot change its `name`.", "type": "string" @@ -3660,7 +3672,7 @@ "properties": { "consolidatedPolicy": { "$ref": "AnalyzerOrgPolicy", - "description": "The consolidated organization policy for the analyzed resource. The consolidated organization policy is computed by merging and evaluating AnalyzeOrgPoliciesResponse.policy_bundle. The evaluation will respect the organization policy [hierarchy rules](https://cloud.google.com/resource-manager/docs/organization-policy/understanding-hierarchy)." + "description": "The consolidated organization policy for the analyzed resource. The consolidated organization policy is computed by merging and evaluating policy_bundle. The evaluation will respect the organization policy [hierarchy rules](https://cloud.google.com/resource-manager/docs/organization-policy/understanding-hierarchy)." }, "folders": { "description": "The folder(s) that this consolidated policy belongs to, in the format of folders/{FOLDER_NUMBER}. This field is available when the consolidated policy belongs (directly or cascadingly) to one or more folders.", @@ -3674,7 +3686,7 @@ "type": "string" }, "policyBundle": { - "description": "The ordered list of all organization policies from the AnalyzeOrgPoliciesResponse.OrgPolicyResult.consolidated_policy.attached_resource. to the scope specified in the request. If the constraint is defined with default policy, it will also appear in the list.", + "description": "The ordered list of all organization policies from the consolidated_policy.attached_resource. to the scope specified in the request. If the constraint is defined with default policy, it will also appear in the list.", "items": { "$ref": "AnalyzerOrgPolicy" }, diff --git a/discovery/googleapis/cloudbuild__v1.json b/discovery/googleapis/cloudbuild__v1.json index a25130ab4..5e2fd84e1 100644 --- a/discovery/googleapis/cloudbuild__v1.json +++ b/discovery/googleapis/cloudbuild__v1.json @@ -15,6 +15,233 @@ "description": "Creates and manages builds on Google Cloud Platform.", "discoveryVersion": "v1", "documentationLink": "https://cloud.google.com/cloud-build/docs/", + "endpoints": [ + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.africa-south1.rep.googleapis.com/", + "location": "africa-south1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.asia-east1.rep.googleapis.com/", + "location": "asia-east1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.asia-east2.rep.googleapis.com/", + "location": "asia-east2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.asia-northeast1.rep.googleapis.com/", + "location": "asia-northeast1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.asia-northeast2.rep.googleapis.com/", + "location": "asia-northeast2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.asia-northeast3.rep.googleapis.com/", + "location": "asia-northeast3" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.asia-south1.rep.googleapis.com/", + "location": "asia-south1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.asia-south2.rep.googleapis.com/", + "location": "asia-south2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.asia-southeast1.rep.googleapis.com/", + "location": "asia-southeast1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.asia-southeast2.rep.googleapis.com/", + "location": "asia-southeast2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.australia-southeast1.rep.googleapis.com/", + "location": "australia-southeast1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.australia-southeast2.rep.googleapis.com/", + "location": "australia-southeast2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.europe-central2.rep.googleapis.com/", + "location": "europe-central2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.europe-north1.rep.googleapis.com/", + "location": "europe-north1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.europe-southwest1.rep.googleapis.com/", + "location": "europe-southwest1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.europe-west1.rep.googleapis.com/", + "location": "europe-west1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.europe-west10.rep.googleapis.com/", + "location": "europe-west10" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.europe-west12.rep.googleapis.com/", + "location": "europe-west12" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.europe-west2.rep.googleapis.com/", + "location": "europe-west2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.europe-west3.rep.googleapis.com/", + "location": "europe-west3" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.europe-west4.rep.googleapis.com/", + "location": "europe-west4" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.europe-west6.rep.googleapis.com/", + "location": "europe-west6" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.europe-west8.rep.googleapis.com/", + "location": "europe-west8" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.europe-west9.rep.googleapis.com/", + "location": "europe-west9" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.me-central1.rep.googleapis.com/", + "location": "me-central1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.me-central2.rep.googleapis.com/", + "location": "me-central2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.me-west1.rep.googleapis.com/", + "location": "me-west1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.northamerica-northeast1.rep.googleapis.com/", + "location": "northamerica-northeast1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.northamerica-northeast2.rep.googleapis.com/", + "location": "northamerica-northeast2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.northamerica-south1.rep.googleapis.com/", + "location": "northamerica-south1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.southamerica-east1.rep.googleapis.com/", + "location": "southamerica-east1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.southamerica-west1.rep.googleapis.com/", + "location": "southamerica-west1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-central1.rep.googleapis.com/", + "location": "us-central1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-central2.rep.googleapis.com/", + "location": "us-central2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-east1.rep.googleapis.com/", + "location": "us-east1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-east10.rep.googleapis.com/", + "location": "us-east10" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-east4.rep.googleapis.com/", + "location": "us-east4" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-east5.rep.googleapis.com/", + "location": "us-east5" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-east7.rep.googleapis.com/", + "location": "us-east7" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-south1.rep.googleapis.com/", + "location": "us-south1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-west1.rep.googleapis.com/", + "location": "us-west1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-west2.rep.googleapis.com/", + "location": "us-west2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-west3.rep.googleapis.com/", + "location": "us-west3" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-west4.rep.googleapis.com/", + "location": "us-west4" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-west8.rep.googleapis.com/", + "location": "us-west8" + } + ], "icons": { "x16": "http://www.google.com/images/icons/product/search-16.gif", "x32": "http://www.google.com/images/icons/product/search-32.gif" @@ -25,7 +252,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240923", + "revision": "20241210", "rootUrl": "https://cloudbuild.googleapis.com/", "servicePath": "", "title": "Cloud Build API", @@ -171,7 +398,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "cloudbuild.operations.cancel", @@ -1556,7 +1783,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "cloudbuild.projects.locations.operations.cancel", @@ -4580,7 +4807,7 @@ "type": "string" }, "cancelRequested": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have been cancelled successfully have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have been cancelled successfully have google.longrunning.Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, @@ -4633,6 +4860,10 @@ "$ref": "NetworkConfig", "description": "Network configuration for the pool." }, + "privateServiceConnect": { + "$ref": "PrivateServiceConnect", + "description": "Immutable. Private Service Connect(PSC) Network configuration for the pool." + }, "workerConfig": { "$ref": "WorkerConfig", "description": "Machine configuration for the workers in the pool." @@ -4640,6 +4871,25 @@ }, "type": "object" }, + "PrivateServiceConnect": { + "description": "Defines the Private Service Connect network configuration for the pool.", + "id": "PrivateServiceConnect", + "properties": { + "networkAttachment": { + "description": "Required. Immutable. The network attachment that the worker network interface is peered to. Must be in the format `projects/{project}/regions/{region}/networkAttachments/{networkAttachment}`. The region of network attachment must be the same as the worker pool. See [Network Attachments](https://cloud.google.com/vpc/docs/about-network-attachments)", + "type": "string" + }, + "publicIpAddressDisabled": { + "description": "Required. Immutable. Disable public IP on the primary network interface. If true, workers are created without any public address, which prevents network egress to public IPs unless a network proxy is configured. If false, workers are created with a public address which allows for public internet egress. The public address only applies to traffic through the primary network interface. If `route_all_traffic` is set to true, all traffic will go through the non-primary network interface, this boolean has no effect.", + "type": "boolean" + }, + "routeAllTraffic": { + "description": "Immutable. Route all traffic through PSC interface. Enable this if you want full control of traffic in the private pool. Configure Cloud NAT for the subnet of network attachment if you need to access public Internet. If false, Only route private IPs, e.g. 10.0.0.0/8, 172.16.0.0/12, and 192.168.0.0/16 through PSC interface.", + "type": "boolean" + } + }, + "type": "object" + }, "ProcessAppManifestCallbackOperationMetadata": { "description": "Metadata for `ProcessAppManifestCallback` operation.", "id": "ProcessAppManifestCallbackOperationMetadata", diff --git a/discovery/googleapis/cloudbuild__v2.json b/discovery/googleapis/cloudbuild__v2.json index 17c82a3c1..ee971bc7b 100644 --- a/discovery/googleapis/cloudbuild__v2.json +++ b/discovery/googleapis/cloudbuild__v2.json @@ -15,6 +15,233 @@ "description": "Creates and manages builds on Google Cloud Platform.", "discoveryVersion": "v1", "documentationLink": "https://cloud.google.com/cloud-build/docs/", + "endpoints": [ + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.africa-south1.rep.googleapis.com/", + "location": "africa-south1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.asia-east1.rep.googleapis.com/", + "location": "asia-east1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.asia-east2.rep.googleapis.com/", + "location": "asia-east2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.asia-northeast1.rep.googleapis.com/", + "location": "asia-northeast1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.asia-northeast2.rep.googleapis.com/", + "location": "asia-northeast2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.asia-northeast3.rep.googleapis.com/", + "location": "asia-northeast3" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.asia-south1.rep.googleapis.com/", + "location": "asia-south1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.asia-south2.rep.googleapis.com/", + "location": "asia-south2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.asia-southeast1.rep.googleapis.com/", + "location": "asia-southeast1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.asia-southeast2.rep.googleapis.com/", + "location": "asia-southeast2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.australia-southeast1.rep.googleapis.com/", + "location": "australia-southeast1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.australia-southeast2.rep.googleapis.com/", + "location": "australia-southeast2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.europe-central2.rep.googleapis.com/", + "location": "europe-central2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.europe-north1.rep.googleapis.com/", + "location": "europe-north1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.europe-southwest1.rep.googleapis.com/", + "location": "europe-southwest1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.europe-west1.rep.googleapis.com/", + "location": "europe-west1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.europe-west10.rep.googleapis.com/", + "location": "europe-west10" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.europe-west12.rep.googleapis.com/", + "location": "europe-west12" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.europe-west2.rep.googleapis.com/", + "location": "europe-west2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.europe-west3.rep.googleapis.com/", + "location": "europe-west3" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.europe-west4.rep.googleapis.com/", + "location": "europe-west4" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.europe-west6.rep.googleapis.com/", + "location": "europe-west6" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.europe-west8.rep.googleapis.com/", + "location": "europe-west8" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.europe-west9.rep.googleapis.com/", + "location": "europe-west9" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.me-central1.rep.googleapis.com/", + "location": "me-central1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.me-central2.rep.googleapis.com/", + "location": "me-central2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.me-west1.rep.googleapis.com/", + "location": "me-west1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.northamerica-northeast1.rep.googleapis.com/", + "location": "northamerica-northeast1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.northamerica-northeast2.rep.googleapis.com/", + "location": "northamerica-northeast2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.northamerica-south1.rep.googleapis.com/", + "location": "northamerica-south1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.southamerica-east1.rep.googleapis.com/", + "location": "southamerica-east1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.southamerica-west1.rep.googleapis.com/", + "location": "southamerica-west1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-central1.rep.googleapis.com/", + "location": "us-central1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-central2.rep.googleapis.com/", + "location": "us-central2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-east1.rep.googleapis.com/", + "location": "us-east1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-east10.rep.googleapis.com/", + "location": "us-east10" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-east4.rep.googleapis.com/", + "location": "us-east4" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-east5.rep.googleapis.com/", + "location": "us-east5" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-east7.rep.googleapis.com/", + "location": "us-east7" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-south1.rep.googleapis.com/", + "location": "us-south1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-west1.rep.googleapis.com/", + "location": "us-west1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-west2.rep.googleapis.com/", + "location": "us-west2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-west3.rep.googleapis.com/", + "location": "us-west3" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-west4.rep.googleapis.com/", + "location": "us-west4" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://cloudbuild.us-west8.rep.googleapis.com/", + "location": "us-west8" + } + ], "icons": { "x16": "http://www.google.com/images/icons/product/search-16.gif", "x32": "http://www.google.com/images/icons/product/search-32.gif" @@ -25,7 +252,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240731", + "revision": "20241210", "rootUrl": "https://cloudbuild.googleapis.com/", "servicePath": "", "title": "Cloud Build API", @@ -789,7 +1016,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v2/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "cloudbuild.projects.locations.operations.cancel", @@ -1738,7 +1965,7 @@ "type": "string" }, "cancelRequested": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have been cancelled successfully have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have been cancelled successfully have google.longrunning.Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, diff --git a/discovery/googleapis/cloudchannel__v1.json b/discovery/googleapis/cloudchannel__v1.json index 11be5179c..e5537b039 100644 --- a/discovery/googleapis/cloudchannel__v1.json +++ b/discovery/googleapis/cloudchannel__v1.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241020", + "revision": "20241202", "rootUrl": "https://cloudchannel.googleapis.com/", "servicePath": "", "title": "Cloud Channel API", @@ -143,7 +143,7 @@ ] }, "listSubscribers": { - "description": "Lists service accounts with subscriber privileges on the Cloud Pub/Sub topic created for this Channel Services account. Possible error codes: * PERMISSION_DENIED: The reseller account making the request and the provided reseller account are different, or the impersonated user is not a super admin. * INVALID_ARGUMENT: Required request parameters are missing or invalid. * NOT_FOUND: The topic resource doesn't exist. * INTERNAL: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. * UNKNOWN: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. Return value: A list of service email addresses.", + "description": "Lists service accounts with subscriber privileges on the Pub/Sub topic created for this Channel Services account or integrator. Possible error codes: * PERMISSION_DENIED: The reseller account making the request and the provided reseller account are different, or the impersonated user is not a super admin. * INVALID_ARGUMENT: Required request parameters are missing or invalid. * NOT_FOUND: The topic resource doesn't exist. * INTERNAL: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. * UNKNOWN: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. Return value: A list of service email addresses.", "flatPath": "v1/accounts/{accountsId}:listSubscribers", "httpMethod": "GET", "id": "cloudchannel.accounts.listSubscribers", @@ -152,12 +152,17 @@ ], "parameters": { "account": { - "description": "Optional. Resource name of the account.", + "description": "Optional. Resource name of the account. Required if integrator is not provided. Otherwise, leave this field empty/unset.", "location": "path", "pattern": "^accounts/[^/]+$", "required": true, "type": "string" }, + "integrator": { + "description": "Optional. Resource name of the integrator. Required if account is not provided. Otherwise, leave this field empty/unset.", + "location": "query", + "type": "string" + }, "pageSize": { "description": "Optional. The maximum number of service accounts to return. The service may return fewer than this value. If unspecified, returns at most 100 service accounts. The maximum value is 1000; the server will coerce values above 1000.", "format": "int32", @@ -235,7 +240,7 @@ ] }, "register": { - "description": "Registers a service account with subscriber privileges on the Cloud Pub/Sub topic for this Channel Services account. After you create a subscriber, you get the events through SubscriberEvent Possible error codes: * PERMISSION_DENIED: The reseller account making the request and the provided reseller account are different, or the impersonated user is not a super admin. * INVALID_ARGUMENT: Required request parameters are missing or invalid. * INTERNAL: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. * UNKNOWN: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. Return value: The topic name with the registered service email address.", + "description": "Registers a service account with subscriber privileges on the Pub/Sub topic for this Channel Services account or integrator. After you create a subscriber, you get the events through SubscriberEvent Possible error codes: * PERMISSION_DENIED: The reseller account making the request and the provided reseller account are different, or the impersonated user is not a super admin. * INVALID_ARGUMENT: Required request parameters are missing or invalid. * INTERNAL: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. * UNKNOWN: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. Return value: The topic name with the registered service email address.", "flatPath": "v1/accounts/{accountsId}:register", "httpMethod": "POST", "id": "cloudchannel.accounts.register", @@ -244,7 +249,7 @@ ], "parameters": { "account": { - "description": "Optional. Resource name of the account.", + "description": "Optional. Resource name of the account. Required if integrator is not provided. Otherwise, leave this field empty/unset.", "location": "path", "pattern": "^accounts/[^/]+$", "required": true, @@ -263,7 +268,7 @@ ] }, "unregister": { - "description": "Unregisters a service account with subscriber privileges on the Cloud Pub/Sub topic created for this Channel Services account. If there are no service accounts left with subscriber privileges, this deletes the topic. You can call ListSubscribers to check for these accounts. Possible error codes: * PERMISSION_DENIED: The reseller account making the request and the provided reseller account are different, or the impersonated user is not a super admin. * INVALID_ARGUMENT: Required request parameters are missing or invalid. * NOT_FOUND: The topic resource doesn't exist. * INTERNAL: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. * UNKNOWN: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. Return value: The topic name that unregistered the service email address. Returns a success response if the service email address wasn't registered with the topic.", + "description": "Unregisters a service account with subscriber privileges on the Pub/Sub topic created for this Channel Services account or integrator. If there are no service accounts left with subscriber privileges, this deletes the topic. You can call ListSubscribers to check for these accounts. Possible error codes: * PERMISSION_DENIED: The reseller account making the request and the provided reseller account are different, or the impersonated user is not a super admin. * INVALID_ARGUMENT: Required request parameters are missing or invalid. * NOT_FOUND: The topic resource doesn't exist. * INTERNAL: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. * UNKNOWN: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. Return value: The topic name that unregistered the service email address. Returns a success response if the service email address wasn't registered with the topic.", "flatPath": "v1/accounts/{accountsId}:unregister", "httpMethod": "POST", "id": "cloudchannel.accounts.unregister", @@ -272,7 +277,7 @@ ], "parameters": { "account": { - "description": "Optional. Resource name of the account.", + "description": "Optional. Resource name of the account. Required if integrator is not provided. Otherwise, leave this field empty/unset.", "location": "path", "pattern": "^accounts/[^/]+$", "required": true, @@ -1904,7 +1909,7 @@ "type": "integer" }, "pageToken": { - "description": "Optional. A token identifying a page of results beyond the first page. Obtained through ListSkuGroups.next_page_token of the previous CloudChannelService.ListSkuGroups call.", + "description": "Optional. A token identifying a page of results beyond the first page. Obtained through ListSkuGroupsResponse.next_page_token of the previous CloudChannelService.ListSkuGroups call.", "location": "query", "type": "string" }, @@ -1944,7 +1949,7 @@ "type": "integer" }, "pageToken": { - "description": "Optional. A token identifying a page of results beyond the first page. Obtained through ListSkuGroupBillableSkus.next_page_token of the previous CloudChannelService.ListSkuGroupBillableSkus call.", + "description": "Optional. A token identifying a page of results beyond the first page. Obtained through ListSkuGroupBillableSkusResponse.next_page_token of the previous CloudChannelService.ListSkuGroupBillableSkus call.", "location": "query", "type": "string" }, @@ -1970,10 +1975,111 @@ } } }, + "integrators": { + "methods": { + "listSubscribers": { + "description": "Lists service accounts with subscriber privileges on the Pub/Sub topic created for this Channel Services account or integrator. Possible error codes: * PERMISSION_DENIED: The reseller account making the request and the provided reseller account are different, or the impersonated user is not a super admin. * INVALID_ARGUMENT: Required request parameters are missing or invalid. * NOT_FOUND: The topic resource doesn't exist. * INTERNAL: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. * UNKNOWN: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. Return value: A list of service email addresses.", + "flatPath": "v1/integrators/{integratorsId}:listSubscribers", + "httpMethod": "GET", + "id": "cloudchannel.integrators.listSubscribers", + "parameterOrder": [ + "integrator" + ], + "parameters": { + "account": { + "description": "Optional. Resource name of the account. Required if integrator is not provided. Otherwise, leave this field empty/unset.", + "location": "query", + "type": "string" + }, + "integrator": { + "description": "Optional. Resource name of the integrator. Required if account is not provided. Otherwise, leave this field empty/unset.", + "location": "path", + "pattern": "^integrators/[^/]+$", + "required": true, + "type": "string" + }, + "pageSize": { + "description": "Optional. The maximum number of service accounts to return. The service may return fewer than this value. If unspecified, returns at most 100 service accounts. The maximum value is 1000; the server will coerce values above 1000.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. A page token, received from a previous `ListSubscribers` call. Provide this to retrieve the subsequent page. When paginating, all other parameters provided to `ListSubscribers` must match the call that provided the page token.", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+integrator}:listSubscribers", + "response": { + "$ref": "GoogleCloudChannelV1ListSubscribersResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/apps.order" + ] + }, + "registerSubscriber": { + "description": "Registers a service account with subscriber privileges on the Pub/Sub topic for this Channel Services account or integrator. After you create a subscriber, you get the events through SubscriberEvent Possible error codes: * PERMISSION_DENIED: The reseller account making the request and the provided reseller account are different, or the impersonated user is not a super admin. * INVALID_ARGUMENT: Required request parameters are missing or invalid. * INTERNAL: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. * UNKNOWN: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. Return value: The topic name with the registered service email address.", + "flatPath": "v1/integrators/{integratorsId}:registerSubscriber", + "httpMethod": "POST", + "id": "cloudchannel.integrators.registerSubscriber", + "parameterOrder": [ + "integrator" + ], + "parameters": { + "integrator": { + "description": "Optional. Resource name of the integrator. Required if account is not provided. Otherwise, leave this field empty/unset.", + "location": "path", + "pattern": "^integrators/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+integrator}:registerSubscriber", + "request": { + "$ref": "GoogleCloudChannelV1RegisterSubscriberRequest" + }, + "response": { + "$ref": "GoogleCloudChannelV1RegisterSubscriberResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/apps.order" + ] + }, + "unregisterSubscriber": { + "description": "Unregisters a service account with subscriber privileges on the Pub/Sub topic created for this Channel Services account or integrator. If there are no service accounts left with subscriber privileges, this deletes the topic. You can call ListSubscribers to check for these accounts. Possible error codes: * PERMISSION_DENIED: The reseller account making the request and the provided reseller account are different, or the impersonated user is not a super admin. * INVALID_ARGUMENT: Required request parameters are missing or invalid. * NOT_FOUND: The topic resource doesn't exist. * INTERNAL: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. * UNKNOWN: Any non-user error related to a technical issue in the backend. Contact Cloud Channel support. Return value: The topic name that unregistered the service email address. Returns a success response if the service email address wasn't registered with the topic.", + "flatPath": "v1/integrators/{integratorsId}:unregisterSubscriber", + "httpMethod": "POST", + "id": "cloudchannel.integrators.unregisterSubscriber", + "parameterOrder": [ + "integrator" + ], + "parameters": { + "integrator": { + "description": "Optional. Resource name of the integrator. Required if account is not provided. Otherwise, leave this field empty/unset.", + "location": "path", + "pattern": "^integrators/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+integrator}:unregisterSubscriber", + "request": { + "$ref": "GoogleCloudChannelV1UnregisterSubscriberRequest" + }, + "response": { + "$ref": "GoogleCloudChannelV1UnregisterSubscriberResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/apps.order" + ] + } + } + }, "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "cloudchannel.operations.cancel", @@ -2325,6 +2431,10 @@ }, "type": "array" }, + "priceReferenceId": { + "description": "Optional. Price reference ID for the offer. Optional field only for offers that require additional price information. Used to guarantee that the pricing is consistent between quoting the offer and placing the order. Not yet implemented: if populated in a request, this field isn't evaluated in the API.", + "type": "string" + }, "purchaseOrderId": { "description": "Optional. Purchase order id provided by the reseller.", "type": "string" @@ -2337,7 +2447,7 @@ "type": "object" }, "GoogleCloudChannelV1ChangeParametersRequest": { - "description": "Request message for CloudChannelService.ChangeParametersRequest.", + "description": "Request message for CloudChannelService.ChangeParameters.", "id": "GoogleCloudChannelV1ChangeParametersRequest", "properties": { "parameters": { @@ -2520,7 +2630,7 @@ "type": "boolean" }, "owned": { - "description": "Returns true if the Cloud Identity account is associated with a customer of the Channel Services partner.", + "description": "Returns true if the Cloud Identity account is associated with a customer of the Channel Services partner (with active subscriptions or purchase consents).", "type": "boolean" } }, @@ -2851,7 +2961,7 @@ "type": "string" }, "eventType": { - "description": "Type of event which happened on the customer.", + "description": "Type of event which happened for the customer.", "enum": [ "TYPE_UNSPECIFIED", "PRIMARY_DOMAIN_CHANGED", @@ -3000,6 +3110,10 @@ }, "type": "array" }, + "priceReferenceId": { + "description": "Optional. Price reference ID for the offer. Optional field only for offers that require additional price information. Used to guarantee that the pricing is consistent between quoting the offer and placing the order. Not yet implemented: if this field is populated in a request, it isn't evaluated in the API.", + "type": "string" + }, "provisionedService": { "$ref": "GoogleCloudChannelV1ProvisionedService", "description": "Output only. Service provisioning details for the entitlement.", @@ -3220,7 +3334,7 @@ "type": "string" }, "eventType": { - "description": "Type of event which happened on the entitlement.", + "description": "Type of event which happened for the entitlement.", "enum": [ "TYPE_UNSPECIFIED", "CREATED", @@ -3549,7 +3663,7 @@ "type": "array" }, "nextPageToken": { - "description": "A token to retrieve the next page of results. Pass to ListSkuGroupBillableSkus.page_token to obtain that page.", + "description": "A token to retrieve the next page of results. Pass to ListSkuGroupBillableSkusRequest.page_token to obtain that page.", "type": "string" } }, @@ -3560,7 +3674,7 @@ "id": "GoogleCloudChannelV1ListSkuGroupsResponse", "properties": { "nextPageToken": { - "description": "A token to retrieve the next page of results. Pass to ListSkuGroups.page_token to obtain that page.", + "description": "A token to retrieve the next page of results. Pass to ListSkuGroupsRequest.page_token to obtain that page.", "type": "string" }, "skuGroups": { @@ -4212,6 +4326,10 @@ "offer": { "$ref": "GoogleCloudChannelV1Offer", "description": "Offer." + }, + "priceReferenceId": { + "description": "Optional. Price reference ID for the offer. Optional field only for offers that require additional price information. Used to guarantee that the pricing is consistent between quoting the offer and placing the order.", + "type": "string" } }, "type": "object" @@ -4245,6 +4363,14 @@ "description": "Request Message for RegisterSubscriber.", "id": "GoogleCloudChannelV1RegisterSubscriberRequest", "properties": { + "account": { + "description": "Optional. Resource name of the account. Required if integrator is not provided. Otherwise, leave this field empty/unset.", + "type": "string" + }, + "integrator": { + "description": "Optional. Resource name of the integrator. Required if account is not provided. Otherwise, leave this field empty/unset.", + "type": "string" + }, "serviceAccount": { "description": "Required. Service account that provides subscriber access to the registered topic.", "type": "string" @@ -4776,6 +4902,10 @@ "offer": { "$ref": "GoogleCloudChannelV1Offer", "description": "Offer with parameter constraints updated to allow the Transfer." + }, + "priceReferenceId": { + "description": "Optional. Price reference ID for the offer. Optional field only for offers that require additional price information. Used to guarantee that the pricing is consistent between quoting the offer and placing the order.", + "type": "string" } }, "type": "object" @@ -4819,6 +4949,14 @@ "description": "Request Message for UnregisterSubscriber.", "id": "GoogleCloudChannelV1UnregisterSubscriberRequest", "properties": { + "account": { + "description": "Optional. Resource name of the account. Required if integrator is not provided. Otherwise, leave this field empty/unset.", + "type": "string" + }, + "integrator": { + "description": "Optional. Resource name of the integrator. Required if account is not provided. Otherwise, leave this field empty/unset.", + "type": "string" + }, "serviceAccount": { "description": "Required. Service account to unregister from subscriber access to the topic.", "type": "string" @@ -4906,7 +5044,7 @@ "type": "string" }, "eventType": { - "description": "Type of event performed on the Channel Partner.", + "description": "Type of event which happened for the channel partner.", "enum": [ "TYPE_UNSPECIFIED", "LINK_STATE_CHANGED", @@ -4992,7 +5130,7 @@ "type": "string" }, "eventType": { - "description": "Type of event which happened on the customer.", + "description": "Type of event which happened for the customer.", "enum": [ "TYPE_UNSPECIFIED", "PRIMARY_DOMAIN_CHANGED", @@ -5094,7 +5232,7 @@ "type": "array" }, "priceReferenceId": { - "description": "Optional. Price reference ID for the offer. Optional field only for offers that require additional price information. Used to guarantee that the pricing is consistent between quoting the offer and placing the order. Yet to be implemented: this field is currently not evaluated in the API if populated in a request.", + "description": "Optional. Price reference ID for the offer. Optional field only for offers that require additional price information. Used to guarantee that the pricing is consistent between quoting the offer and placing the order. Not yet implemented: if this field is populated in a request, it isn't evaluated in the API.", "type": "string" }, "provisionedService": { @@ -5182,7 +5320,7 @@ "type": "string" }, "eventType": { - "description": "Type of event which happened on the entitlement.", + "description": "Type of event which happened for the entitlement.", "enum": [ "TYPE_UNSPECIFIED", "CREATED", @@ -5263,6 +5401,31 @@ }, "type": "object" }, + "GoogleCloudChannelV1alpha1OpportunityEvent": { + "description": "Represents Pub/Sub message content describing opportunity updates.", + "id": "GoogleCloudChannelV1alpha1OpportunityEvent", + "properties": { + "eventType": { + "description": "Type of event which happened for the opportunity.", + "enum": [ + "TYPE_UNSPECIFIED", + "CREATED", + "UPDATED" + ], + "enumDescriptions": [ + "Not used.", + "New opportunity created.", + "Existing opportunity updated." + ], + "type": "string" + }, + "opportunity": { + "description": "Resource name of the opportunity. Format: opportunities/{opportunity}", + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudChannelV1alpha1Parameter": { "description": "Definition for extended entitlement parameters.", "id": "GoogleCloudChannelV1alpha1Parameter", @@ -5532,6 +5695,10 @@ "entitlementEvent": { "$ref": "GoogleCloudChannelV1alpha1EntitlementEvent", "description": "Entitlement event sent as part of Pub/Sub event to partners." + }, + "opportunityEvent": { + "$ref": "GoogleCloudChannelV1alpha1OpportunityEvent", + "description": "Opportunity event sent as part of Pub/Sub event to partners/integrators." } }, "type": "object" @@ -5765,7 +5932,7 @@ "type": "object" }, "GoogleTypeDecimal": { - "description": "A representation of a decimal value, such as 2.5. Clients may convert values into language-native decimal formats, such as Java's BigDecimal or Python's decimal.Decimal. [BigDecimal]: https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/math/BigDecimal.html [decimal.Decimal]: https://docs.python.org/3/library/decimal.html", + "description": "A representation of a decimal value, such as 2.5. Clients may convert values into language-native decimal formats, such as Java's [BigDecimal](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/math/BigDecimal.html) or Python's [decimal.Decimal](https://docs.python.org/3/library/decimal.html).", "id": "GoogleTypeDecimal", "properties": { "value": { @@ -5797,18 +5964,18 @@ "type": "object" }, "GoogleTypePostalAddress": { - "description": "Represents a postal address, e.g. for postal delivery or payments addresses. Given a postal address, a postal service can deliver items to a premise, P.O. Box or similar. It is not intended to model geographical locations (roads, towns, mountains). In typical usage an address would be created via user input or from importing existing data, depending on the type of process. Advice on address input / editing: - Use an internationalization-ready address widget such as https://github.com/google/libaddressinput) - Users should not be presented with UI elements for input or editing of fields outside countries where that field is used. For more guidance on how to use this schema, please see: https://support.google.com/business/answer/6397478", + "description": "Represents a postal address. For example for postal delivery or payments addresses. Given a postal address, a postal service can deliver items to a premise, P.O. Box or similar. It is not intended to model geographical locations (roads, towns, mountains). In typical usage an address would be created by user input or from importing existing data, depending on the type of process. Advice on address input / editing: - Use an internationalization-ready address widget such as https://github.com/google/libaddressinput) - Users should not be presented with UI elements for input or editing of fields outside countries where that field is used. For more guidance on how to use this schema, see: https://support.google.com/business/answer/6397478", "id": "GoogleTypePostalAddress", "properties": { "addressLines": { - "description": "Unstructured address lines describing the lower levels of an address. Because values in address_lines do not have type information and may sometimes contain multiple values in a single field (e.g. \"Austin, TX\"), it is important that the line order is clear. The order of address lines should be \"envelope order\" for the country/region of the address. In places where this can vary (e.g. Japan), address_language is used to make it explicit (e.g. \"ja\" for large-to-small ordering and \"ja-Latn\" or \"en\" for small-to-large). This way, the most specific line of an address can be selected based on the language. The minimum permitted structural representation of an address consists of a region_code with all remaining information placed in the address_lines. It would be possible to format such an address very approximately without geocoding, but no semantic reasoning could be made about any of the address components until it was at least partially resolved. Creating an address only containing a region_code and address_lines, and then geocoding is the recommended way to handle completely unstructured addresses (as opposed to guessing which parts of the address should be localities or administrative areas).", + "description": "Unstructured address lines describing the lower levels of an address. Because values in address_lines do not have type information and may sometimes contain multiple values in a single field (For example \"Austin, TX\"), it is important that the line order is clear. The order of address lines should be \"envelope order\" for the country/region of the address. In places where this can vary (For example Japan), address_language is used to make it explicit (For example \"ja\" for large-to-small ordering and \"ja-Latn\" or \"en\" for small-to-large). This way, the most specific line of an address can be selected based on the language. The minimum permitted structural representation of an address consists of a region_code with all remaining information placed in the address_lines. It would be possible to format such an address very approximately without geocoding, but no semantic reasoning could be made about any of the address components until it was at least partially resolved. Creating an address only containing a region_code and address_lines, and then geocoding is the recommended way to handle completely unstructured addresses (as opposed to guessing which parts of the address should be localities or administrative areas).", "items": { "type": "string" }, "type": "array" }, "administrativeArea": { - "description": "Optional. Highest administrative subdivision which is used for postal addresses of a country or region. For example, this can be a state, a province, an oblast, or a prefecture. Specifically, for Spain this is the province and not the autonomous community (e.g. \"Barcelona\" and not \"Catalonia\"). Many countries don't use an administrative area in postal addresses. E.g. in Switzerland this should be left unpopulated.", + "description": "Optional. Highest administrative subdivision which is used for postal addresses of a country or region. For example, this can be a state, a province, an oblast, or a prefecture. Specifically, for Spain this is the province and not the autonomous community (For example \"Barcelona\" and not \"Catalonia\"). Many countries don't use an administrative area in postal addresses. For example in Switzerland this should be left unpopulated.", "type": "string" }, "languageCode": { @@ -5824,7 +5991,7 @@ "type": "string" }, "postalCode": { - "description": "Optional. Postal code of the address. Not all countries use or require postal codes to be present, but where they are used, they may trigger additional validation with other parts of the address (e.g. state/zip validation in the U.S.A.).", + "description": "Optional. Postal code of the address. Not all countries use or require postal codes to be present, but where they are used, they may trigger additional validation with other parts of the address (For example state/zip validation in the U.S.A.).", "type": "string" }, "recipients": { @@ -5844,7 +6011,7 @@ "type": "integer" }, "sortingCode": { - "description": "Optional. Additional, country-specific, sorting code. This is not used in most regions. Where it is used, the value is either a string like \"CEDEX\", optionally followed by a number (e.g. \"CEDEX 7\"), or just a number alone, representing the \"sector code\" (Jamaica), \"delivery area indicator\" (Malawi) or \"post office indicator\" (e.g. Côte d'Ivoire).", + "description": "Optional. Additional, country-specific, sorting code. This is not used in most regions. Where it is used, the value is either a string like \"CEDEX\", optionally followed by a number (For example \"CEDEX 7\"), or just a number alone, representing the \"sector code\" (Jamaica), \"delivery area indicator\" (Malawi) or \"post office indicator\" (For example Côte d'Ivoire).", "type": "string" }, "sublocality": { @@ -5859,11 +6026,11 @@ "id": "GoogleTypeTimeZone", "properties": { "id": { - "description": "IANA Time Zone Database time zone, e.g. \"America/New_York\".", + "description": "IANA Time Zone Database time zone. For example \"America/New_York\".", "type": "string" }, "version": { - "description": "Optional. IANA Time Zone Database version number, e.g. \"2019a\".", + "description": "Optional. IANA Time Zone Database version number. For example \"2019a\".", "type": "string" } }, diff --git a/discovery/googleapis/cloudcontrolspartner__v1.json b/discovery/googleapis/cloudcontrolspartner__v1.json index 62b53c64b..e352a8368 100644 --- a/discovery/googleapis/cloudcontrolspartner__v1.json +++ b/discovery/googleapis/cloudcontrolspartner__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240904", + "revision": "20241024", "rootUrl": "https://cloudcontrolspartner.googleapis.com/", "servicePath": "", "title": "Cloud Controls Partner API", @@ -142,6 +142,64 @@ "resources": { "customers": { "methods": { + "create": { + "description": "Creates a new customer.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/customers", + "httpMethod": "POST", + "id": "cloudcontrolspartner.organizations.locations.customers.create", + "parameterOrder": [ + "parent" + ], + "parameters": { + "customerId": { + "description": "Required. The customer id to use for the customer, which will become the final component of the customer's resource name. The specified value must be a valid Google cloud organization id.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. Parent resource Format: `organizations/{organization}/locations/{location}`", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/customers", + "request": { + "$ref": "Customer" + }, + "response": { + "$ref": "Customer" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "delete": { + "description": "Delete details of a single customer", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/customers/{customersId}", + "httpMethod": "DELETE", + "id": "cloudcontrolspartner.organizations.locations.customers.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. name of the resource to be deleted format: name=organizations/*/locations/*/customers/*", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+/customers/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "Empty" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, "get": { "description": "Gets details of a single customer", "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/customers/{customersId}", @@ -212,6 +270,40 @@ "scopes": [ "https://www.googleapis.com/auth/cloud-platform" ] + }, + "patch": { + "description": "Update details of a single customer", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/customers/{customersId}", + "httpMethod": "PATCH", + "id": "cloudcontrolspartner.organizations.locations.customers.patch", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Identifier. Format: `organizations/{organization}/locations/{location}/customers/{customer}`", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+/customers/[^/]+$", + "required": true, + "type": "string" + }, + "updateMask": { + "description": "Optional. The list of fields to update", + "format": "google-fieldmask", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}", + "request": { + "$ref": "Customer" + }, + "response": { + "$ref": "Customer" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] } }, "resources": { @@ -611,6 +703,11 @@ "name": { "description": "Identifier. Format: `organizations/{organization}/locations/{location}/customers/{customer}`", "type": "string" + }, + "organizationDomain": { + "description": "Output only. The customer organization domain, extracted from CRM Organization’s display_name field. e.g. \"google.com\"", + "readOnly": true, + "type": "string" } }, "type": "object" @@ -760,6 +857,12 @@ }, "type": "object" }, + "Empty": { + "description": "A generic empty message that you can re-use to avoid defining duplicated empty messages in your APIs. A typical example is to use it as the request or the response type of an API method. For instance: service Foo { rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); }", + "id": "Empty", + "properties": {}, + "type": "object" + }, "Gcloud": { "description": "Remediation instructions to resolve violation via gcloud cli", "id": "Gcloud", diff --git a/discovery/googleapis/clouddeploy__v1.json b/discovery/googleapis/clouddeploy__v1.json index 65ad9f0e3..57d367eae 100644 --- a/discovery/googleapis/clouddeploy__v1.json +++ b/discovery/googleapis/clouddeploy__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241010", + "revision": "20241204", "rootUrl": "https://clouddeploy.googleapis.com/", "servicePath": "", "title": "Cloud Deploy API", @@ -1869,7 +1869,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "clouddeploy.projects.locations.operations.cancel", @@ -2696,6 +2696,10 @@ "repairRolloutRule": { "$ref": "RepairRolloutRule", "description": "Optional. The `RepairRolloutRule` will automatically repair a failed rollout." + }, + "timedPromoteReleaseRule": { + "$ref": "TimedPromoteReleaseRule", + "description": "Optional. The `TimedPromoteReleaseRule` will automatically promote a release from the current target(s) to the specified target(s) on a configured schedule." } }, "type": "object" @@ -2707,6 +2711,10 @@ "targetsPresentCondition": { "$ref": "TargetsPresentCondition", "description": "Optional. Details around targets enumerated in the rule." + }, + "timedPromoteReleaseCondition": { + "$ref": "TimedPromoteReleaseCondition", + "description": "Optional. TimedPromoteReleaseCondition contains rule conditions specific to a an Automation with a timed promote release rule defined." } }, "type": "object" @@ -2806,10 +2814,15 @@ "type": "string" }, "targetId": { - "description": "Output only. The ID of the target that represents the promotion stage that initiates the `AutomationRun`. The value of this field is the last segment of a target name.", + "description": "Output only. The ID of the source target that initiates the `AutomationRun`. The value of this field is the last segment of a target name.", "readOnly": true, "type": "string" }, + "timedPromoteReleaseOperation": { + "$ref": "TimedPromoteReleaseOperation", + "description": "Output only. Promotes a release to a specified 'Target' as defined in a Timed Promote Release rule.", + "readOnly": true + }, "updateTime": { "description": "Output only. Time at which the automationRun was updated.", "format": "google-datetime", @@ -4761,7 +4774,7 @@ "type": "string" }, "requestedCancellation": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, @@ -5317,7 +5330,7 @@ "type": "string" }, "skaffoldVersion": { - "description": "The Skaffold version to use when operating on this release, such as \"1.20.0\". Not all versions are valid; Cloud Deploy supports a specific set of versions. If unset, the most recent supported Skaffold version will be used.", + "description": "Optional. The Skaffold version to use when operating on this release, such as \"1.20.0\". Not all versions are valid; Cloud Deploy supports a specific set of versions. If unset, the most recent supported Skaffold version will be used.", "type": "string" }, "targetArtifacts": { @@ -6916,6 +6929,21 @@ }, "type": "object" }, + "Targets": { + "description": "The targets involved in a single timed promotion.", + "id": "Targets", + "properties": { + "destinationTargetId": { + "description": "Optional. The destination target ID.", + "type": "string" + }, + "sourceTargetId": { + "description": "Optional. The source target ID.", + "type": "string" + } + }, + "type": "object" + }, "TargetsPresentCondition": { "description": "`TargetsPresentCondition` contains information on any Targets referenced in the Delivery Pipeline that do not actually exist.", "id": "TargetsPresentCondition", @@ -7054,6 +7082,81 @@ }, "type": "object" }, + "TimedPromoteReleaseCondition": { + "description": "`TimedPromoteReleaseCondition` contains conditions specific to an Automation with a Timed Promote Release rule defined.", + "id": "TimedPromoteReleaseCondition", + "properties": { + "nextPromotionTime": { + "description": "Output only. When the next scheduled promotion(s) will occur.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "targetsList": { + "description": "Output only. A list of targets involved in the upcoming timed promotion(s).", + "items": { + "$ref": "Targets" + }, + "readOnly": true, + "type": "array" + } + }, + "type": "object" + }, + "TimedPromoteReleaseOperation": { + "description": "Contains the information of an automated timed promote-release operation.", + "id": "TimedPromoteReleaseOperation", + "properties": { + "phase": { + "description": "Output only. The starting phase of the rollout created by this operation.", + "readOnly": true, + "type": "string" + }, + "release": { + "description": "Output only. The name of the release to be promoted.", + "readOnly": true, + "type": "string" + }, + "targetId": { + "description": "Output only. The ID of the target that represents the promotion stage to which the release will be promoted. The value of this field is the last segment of a target name.", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "TimedPromoteReleaseRule": { + "description": "The `TimedPromoteReleaseRule` will automatically promote a release from the current target(s) to the specified target(s) on a configured schedule.", + "id": "TimedPromoteReleaseRule", + "properties": { + "condition": { + "$ref": "AutomationRuleCondition", + "description": "Output only. Information around the state of the Automation rule.", + "readOnly": true + }, + "destinationPhase": { + "description": "Optional. The starting phase of the rollout created by this rule. Default to the first phase.", + "type": "string" + }, + "destinationTargetId": { + "description": "Optional. The ID of the stage in the pipeline to which this `Release` is deploying. If unspecified, default it to the next stage in the promotion flow. The value of this field could be one of the following: * The last segment of a target name * \"@next\", the next target in the promotion sequence", + "type": "string" + }, + "id": { + "description": "Required. ID of the rule. This ID must be unique in the `Automation` resource to which this rule belongs. The format is `[a-z]([a-z0-9-]{0,61}[a-z0-9])?`.", + "type": "string" + }, + "schedule": { + "description": "Required. Schedule in crontab format. e.g. \"0 9 * * 1\" for every Monday at 9am.", + "type": "string" + }, + "timeZone": { + "description": "Required. The time zone in IANA format [IANA Time Zone Database](https://www.iana.org/time-zones) (e.g. America/New_York).", + "type": "string" + } + }, + "type": "object" + }, "VerifyJob": { "description": "A verify Job.", "id": "VerifyJob", diff --git a/discovery/googleapis/cloudfunctions__v2.json b/discovery/googleapis/cloudfunctions__v2.json index 159a536df..1cad6f4a7 100644 --- a/discovery/googleapis/cloudfunctions__v2.json +++ b/discovery/googleapis/cloudfunctions__v2.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240905", + "revision": "20241205", "rootUrl": "https://cloudfunctions.googleapis.com/", "servicePath": "", "title": "Cloud Functions API", @@ -830,7 +830,7 @@ "type": "string" }, "dockerRepository": { - "description": "Repository in Artifact Registry to which the function docker image will be pushed after it is built by Cloud Build. If specified by user, it is created and managed by user with a customer managed encryption key. Otherwise, GCF will create and use a repository named 'gcf-artifacts' for every deployed region. It must match the pattern `projects/{project}/locations/{location}/repositories/{repository}`. Cross-project repositories are not supported. Cross-location repositories are not supported. Repository format must be 'DOCKER'.", + "description": "Repository in Artifact Registry to which the function docker image will be pushed after it is built by Cloud Build. If specified by user, it is created and managed by user with a customer managed encryption key. Otherwise, GCF will create and use a repository named 'gcf-artifacts' for every deployed region. It must match the pattern `projects/{project}/locations/{location}/repositories/{repository}`. Repository format must be 'DOCKER'.", "type": "string" }, "entryPoint": { @@ -1228,7 +1228,8 @@ "ROLLBACK_FUNCTION_UPGRADE_TRAFFIC", "SETUP_FUNCTION_UPGRADE_CONFIG", "ABORT_FUNCTION_UPGRADE", - "COMMIT_FUNCTION_UPGRADE" + "COMMIT_FUNCTION_UPGRADE", + "DETACH_FUNCTION" ], "enumDescriptions": [ "Unspecified", @@ -1239,7 +1240,8 @@ "RollbackFunctionUpgradeTraffic", "SetupFunctionUpgradeConfig", "AbortFunctionUpgrade", - "CommitFunctionUpgrade" + "CommitFunctionUpgrade", + "DetachFunction" ], "type": "string" }, @@ -2060,7 +2062,7 @@ "type": "object" }, "UpgradeInfo": { - "description": "Information related to: * A function's eligibility for 1st Gen to 2nd Gen migration * Current state of migration for function undergoing migration.", + "description": "Information related to: * A function's eligibility for 1st Gen to 2nd Gen migration and 2nd Gen to CRf detach. * Current state of migration for function undergoing migration/detach.", "id": "UpgradeInfo", "properties": { "buildConfig": { @@ -2087,7 +2089,8 @@ "REDIRECT_FUNCTION_UPGRADE_TRAFFIC_SUCCESSFUL", "REDIRECT_FUNCTION_UPGRADE_TRAFFIC_ERROR", "ROLLBACK_FUNCTION_UPGRADE_TRAFFIC_ERROR", - "COMMIT_FUNCTION_UPGRADE_ERROR" + "COMMIT_FUNCTION_UPGRADE_ERROR", + "DETACH_IN_PROGRESS" ], "enumDescriptions": [ "Unspecified state. Most functions are in this upgrade state.", @@ -2099,7 +2102,8 @@ "RedirectFunctionUpgradeTraffic API was successful and traffic is served by 2nd Gen function stack.", "RedirectFunctionUpgradeTraffic API was un-successful.", "RollbackFunctionUpgradeTraffic API was un-successful.", - "CommitFunctionUpgrade API was un-successful." + "CommitFunctionUpgrade API was un-successful.", + "Function is requested to be detached from 2nd Gen to CRf." ], "type": "string" } diff --git a/discovery/googleapis/cloudidentity__v1.json b/discovery/googleapis/cloudidentity__v1.json index 1bbdaee84..817e7ba58 100644 --- a/discovery/googleapis/cloudidentity__v1.json +++ b/discovery/googleapis/cloudidentity__v1.json @@ -17,6 +17,18 @@ "https://www.googleapis.com/auth/cloud-identity.groups.readonly": { "description": "See any Cloud Identity Groups that you can access, including group members and their emails" }, + "https://www.googleapis.com/auth/cloud-identity.inboundsso": { + "description": "See and edit all of the Inbound SSO profiles and their assignments to any Org Units or Google Groups in your Cloud Identity Organization." + }, + "https://www.googleapis.com/auth/cloud-identity.inboundsso.readonly": { + "description": "See all of the Inbound SSO profiles and their assignments to any Org Units or Google Groups in your Cloud Identity Organization." + }, + "https://www.googleapis.com/auth/cloud-identity.policies": { + "description": "See and edit policies in your Cloud Identity Organization." + }, + "https://www.googleapis.com/auth/cloud-identity.policies.readonly": { + "description": "See policies in your Cloud Identity Organization." + }, "https://www.googleapis.com/auth/cloud-platform": { "description": "See, edit, configure, and delete your Google Cloud data and see the email address for your Google Account." } @@ -40,7 +52,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241010", + "revision": "20241208", "rootUrl": "https://cloudidentity.googleapis.com/", "servicePath": "", "title": "Cloud Identity API", @@ -1485,7 +1497,7 @@ "type": "integer" }, "pageToken": { - "description": "The next_page_token value returned from a previous list request, if any", + "description": "The `next_page_token` value returned from a previous list request, if any", "location": "query", "type": "string" }, @@ -1528,7 +1540,7 @@ "type": "integer" }, "pageToken": { - "description": "The next_page_token value returned from a previous list request, if any.", + "description": "The `next_page_token` value returned from a previous list request, if any.", "location": "query", "type": "string" }, @@ -1540,7 +1552,7 @@ "type": "string" }, "query": { - "description": "Required. A CEL expression that MUST include member specification AND label(s). This is a `required` field. Users can search on label attributes of groups. CONTAINS match ('in') is supported on labels. Identity-mapped groups are uniquely identified by both a `member_key_id` and a `member_key_namespace`, which requires an additional query input: `member_key_namespace`. Example query: `member_key_id == 'member_key_id_value' && in labels` Query may optionally contain equality operators on the parent of the group restricting the search within a particular customer, e.g. `parent == 'customers/{customer_id}'`. The `customer_id` must begin with \"C\" (for example, 'C046psxkn'). This filtering is only supported for Admins with groups read permissons on the input customer. Example query: `member_key_id == 'member_key_id_value' && in labels && parent == 'customers/C046psxkn'`", + "description": "Required. A CEL expression that MUST include member specification AND label(s). This is a `required` field. Users can search on label attributes of groups. CONTAINS match ('in') is supported on labels. Identity-mapped groups are uniquely identified by both a `member_key_id` and a `member_key_namespace`, which requires an additional query input: `member_key_namespace`. Example query: `member_key_id == 'member_key_id_value' && in labels` Query may optionally contain equality operators on the parent of the group restricting the search within a particular customer, e.g. `parent == 'customers/{customer_id}'`. The `customer_id` must begin with \"C\" (for example, 'C046psxkn'). This filtering is only supported for Admins with groups read permissions on the input customer. Example query: `member_key_id == 'member_key_id_value' && in labels && parent == 'customers/C046psxkn'`", "location": "query", "type": "string" } @@ -1571,7 +1583,7 @@ "type": "integer" }, "pageToken": { - "description": "The next_page_token value returned from a previous list request, if any.", + "description": "The `next_page_token` value returned from a previous list request, if any.", "location": "query", "type": "string" }, @@ -1614,6 +1626,7 @@ "$ref": "Operation" }, "scopes": [ + "https://www.googleapis.com/auth/cloud-identity.inboundsso", "https://www.googleapis.com/auth/cloud-platform" ] }, @@ -1639,6 +1652,7 @@ "$ref": "Operation" }, "scopes": [ + "https://www.googleapis.com/auth/cloud-identity.inboundsso", "https://www.googleapis.com/auth/cloud-platform" ] }, @@ -1664,6 +1678,8 @@ "$ref": "InboundSamlSsoProfile" }, "scopes": [ + "https://www.googleapis.com/auth/cloud-identity.inboundsso", + "https://www.googleapis.com/auth/cloud-identity.inboundsso.readonly", "https://www.googleapis.com/auth/cloud-platform" ] }, @@ -1696,6 +1712,8 @@ "$ref": "ListInboundSamlSsoProfilesResponse" }, "scopes": [ + "https://www.googleapis.com/auth/cloud-identity.inboundsso", + "https://www.googleapis.com/auth/cloud-identity.inboundsso.readonly", "https://www.googleapis.com/auth/cloud-platform" ] }, @@ -1730,6 +1748,7 @@ "$ref": "Operation" }, "scopes": [ + "https://www.googleapis.com/auth/cloud-identity.inboundsso", "https://www.googleapis.com/auth/cloud-platform" ] } @@ -1762,6 +1781,7 @@ "$ref": "Operation" }, "scopes": [ + "https://www.googleapis.com/auth/cloud-identity.inboundsso", "https://www.googleapis.com/auth/cloud-platform" ] }, @@ -1787,6 +1807,7 @@ "$ref": "Operation" }, "scopes": [ + "https://www.googleapis.com/auth/cloud-identity.inboundsso", "https://www.googleapis.com/auth/cloud-platform" ] }, @@ -1812,6 +1833,8 @@ "$ref": "IdpCredential" }, "scopes": [ + "https://www.googleapis.com/auth/cloud-identity.inboundsso", + "https://www.googleapis.com/auth/cloud-identity.inboundsso.readonly", "https://www.googleapis.com/auth/cloud-platform" ] }, @@ -1848,6 +1871,8 @@ "$ref": "ListIdpCredentialsResponse" }, "scopes": [ + "https://www.googleapis.com/auth/cloud-identity.inboundsso", + "https://www.googleapis.com/auth/cloud-identity.inboundsso.readonly", "https://www.googleapis.com/auth/cloud-platform" ] } @@ -1872,6 +1897,7 @@ "$ref": "Operation" }, "scopes": [ + "https://www.googleapis.com/auth/cloud-identity.inboundsso", "https://www.googleapis.com/auth/cloud-platform" ] }, @@ -1897,6 +1923,7 @@ "$ref": "Operation" }, "scopes": [ + "https://www.googleapis.com/auth/cloud-identity.inboundsso", "https://www.googleapis.com/auth/cloud-platform" ] }, @@ -1922,6 +1949,8 @@ "$ref": "InboundSsoAssignment" }, "scopes": [ + "https://www.googleapis.com/auth/cloud-identity.inboundsso", + "https://www.googleapis.com/auth/cloud-identity.inboundsso.readonly", "https://www.googleapis.com/auth/cloud-platform" ] }, @@ -1954,6 +1983,8 @@ "$ref": "ListInboundSsoAssignmentsResponse" }, "scopes": [ + "https://www.googleapis.com/auth/cloud-identity.inboundsso", + "https://www.googleapis.com/auth/cloud-identity.inboundsso.readonly", "https://www.googleapis.com/auth/cloud-platform" ] }, @@ -1988,10 +2019,74 @@ "$ref": "Operation" }, "scopes": [ + "https://www.googleapis.com/auth/cloud-identity.inboundsso", "https://www.googleapis.com/auth/cloud-platform" ] } } + }, + "policies": { + "methods": { + "get": { + "description": "Get a Policy", + "flatPath": "v1/policies/{policiesId}", + "httpMethod": "GET", + "id": "cloudidentity.policies.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the policy to retrieve. Format: \"policies/{policy}\".", + "location": "path", + "pattern": "^policies/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "Policy" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-identity.policies", + "https://www.googleapis.com/auth/cloud-identity.policies.readonly" + ] + }, + "list": { + "description": "List Policies", + "flatPath": "v1/policies", + "httpMethod": "GET", + "id": "cloudidentity.policies.list", + "parameterOrder": [], + "parameters": { + "filter": { + "description": "Optional. A CEL expression for filtering the results. Policies can be filtered by application with this expression: setting.name = 'settings/gmail.*' Policies can be filtered by setting type with this expression: setting.name = '*.service_status' A maximum of one of the above setting.name clauses can be used. Policies can be filtered by customer with this expression: customer = \"customers/{customer}\" Where `customer` is the `id` from the [Admin SDK `Customer` resource](https://developers.google.com/admin-sdk/directory/reference/rest/v1/customers). You may use `customers/my_customer` to specify your own organization. When no customer is mentioned it will be default to customers/my_customer. A maximum of one customer clause can be used. The above clauses can only be combined together in a single filter expression with the `&&` operator.", + "location": "query", + "type": "string" + }, + "pageSize": { + "description": "Optional. The maximum number of results to return. The service can return fewer than this number. If omitted or set to 0, the default is 50 results per page. The maximum allowed value is 100. `page_size` values greater than 100 default to 100.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. The pagination token received from a prior call to PoliciesService.ListPolicies to retrieve the next page of results. When paginating, all other parameters provided to `ListPoliciesRequest` must match the call that provided the page token.", + "location": "query", + "type": "string" + } + }, + "path": "v1/policies", + "response": { + "$ref": "ListPoliciesResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-identity.policies", + "https://www.googleapis.com/auth/cloud-identity.policies.readonly" + ] + } + } } }, "schemas": { @@ -2915,7 +3010,7 @@ "type": "string" }, "name": { - "description": "Output only. [Resource name](https://cloud.google.com/apis/design/resource_names) of the Device in format: `devices/{device}`, where device is the unique id assigned to the Device.", + "description": "Output only. [Resource name](https://cloud.google.com/apis/design/resource_names) of the Device in format: `devices/{device}`, where device is the unique id assigned to the Device. Important: Device API scopes require that you use domain-wide delegation to access the API. For more information, see [Set up the Devices API](https://cloud.google.com/identity/docs/how-to/setup-devices).", "readOnly": true, "type": "string" }, @@ -3583,6 +3678,24 @@ }, "type": "object" }, + "ListPoliciesResponse": { + "description": "The response message for PoliciesService.ListPolicies.", + "id": "ListPoliciesResponse", + "properties": { + "nextPageToken": { + "description": "The pagination token to retrieve the next page of results. If this field is empty, there are no subsequent pages.", + "type": "string" + }, + "policies": { + "description": "The results", + "items": { + "$ref": "Policy" + }, + "type": "array" + } + }, + "type": "object" + }, "ListUserInvitationsResponse": { "description": "Response message for UserInvitation listing request.", "id": "ListUserInvitationsResponse", @@ -3763,7 +3876,7 @@ "id": "MembershipAdjacencyList", "properties": { "edges": { - "description": "Each edge contains information about the member that belongs to this group. Note: Fields returned here will help identify the specific Membership resource (e.g name, preferred_member_key and role), but may not be a comprehensive list of all fields.", + "description": "Each edge contains information about the member that belongs to this group. Note: Fields returned here will help identify the specific Membership resource (e.g `name`, `preferred_member_key` and `role`), but may not be a comprehensive list of all fields.", "items": { "$ref": "Membership" }, @@ -3936,6 +4049,70 @@ }, "type": "object" }, + "Policy": { + "description": "A Policy resource binds an instance of a single Setting with the scope of a PolicyQuery. The Setting instance will be applied to all entities that satisfy the query.", + "id": "Policy", + "properties": { + "customer": { + "description": "Immutable. Customer that the Policy belongs to. The value is in the format 'customers/{customerId}'. The `customerId` must begin with \"C\" To find your customer ID in Admin Console see https://support.google.com/a/answer/10070793.", + "type": "string" + }, + "name": { + "description": "Output only. Identifier. The [resource name](https://cloud.google.com/apis/design/resource_names) of the Policy. Format: policies/{policy}.", + "readOnly": true, + "type": "string" + }, + "policyQuery": { + "$ref": "PolicyQuery", + "description": "Required. The PolicyQuery the Setting applies to." + }, + "setting": { + "$ref": "Setting", + "description": "Required. The Setting configured by this Policy." + }, + "type": { + "description": "Output only. The type of the policy.", + "enum": [ + "POLICY_TYPE_UNSPECIFIED", + "SYSTEM", + "ADMIN" + ], + "enumDescriptions": [ + "Unspecified policy type.", + "Policy type denoting the system-configured policies.", + "Policy type denoting the admin-configurable policies." + ], + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "PolicyQuery": { + "description": "PolicyQuery", + "id": "PolicyQuery", + "properties": { + "group": { + "description": "Immutable. The group that the query applies to. This field is only set if there is a single value for group that satisfies all clauses of the query. If no group applies, this will be the empty string.", + "type": "string" + }, + "orgUnit": { + "description": "Required. Immutable. Non-empty default. The OrgUnit the query applies to. This field is only set if there is a single value for org_unit that satisfies all clauses of the query.", + "type": "string" + }, + "query": { + "description": "Immutable. The CEL query that defines which entities the Policy applies to (ex. a User entity). For details about CEL see https://opensource.google.com/projects/cel. The OrgUnits the Policy applies to are represented by a clause like so: entity.org_units.exists(org_unit, org_unit.org_unit_id == orgUnitId('{orgUnitId}')) The Group the Policy applies to are represented by a clause like so: entity.groups.exists(group, group.group_id == groupId('{groupId}')) The Licenses the Policy applies to are represented by a clause like so: entity.licenses.exists(license, license in ['/product/{productId}/sku/{skuId}']) The above clauses can be present in any combination, and used in conjunction with the &&, || and ! operators. The org_unit and group fields below are helper fields that contain the corresponding value(s) as the query to make the query easier to use.", + "type": "string" + }, + "sortOrder": { + "description": "Output only. The decimal sort order of this PolicyQuery. The value is relative to all other policies with the same setting type for the customer. (There are no duplicates within this set).", + "format": "double", + "readOnly": true, + "type": "number" + } + }, + "type": "object" + }, "RestrictionEvaluation": { "description": "The evaluated state of this restriction.", "id": "RestrictionEvaluation", @@ -4130,6 +4307,25 @@ "properties": {}, "type": "object" }, + "Setting": { + "description": "Setting", + "id": "Setting", + "properties": { + "type": { + "description": "Required. Immutable. The type of the Setting. .", + "type": "string" + }, + "value": { + "additionalProperties": { + "description": "Properties of the object.", + "type": "any" + }, + "description": "Required. The value of the Setting.", + "type": "object" + } + }, + "type": "object" + }, "SignInBehavior": { "description": "Controls sign-in behavior.", "id": "SignInBehavior", diff --git a/discovery/googleapis/cloudkms__v1.json b/discovery/googleapis/cloudkms__v1.json index 894aecab3..19486abad 100644 --- a/discovery/googleapis/cloudkms__v1.json +++ b/discovery/googleapis/cloudkms__v1.json @@ -110,7 +110,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240926", + "revision": "20241111", "rootUrl": "https://cloudkms.googleapis.com/", "servicePath": "", "title": "Cloud Key Management Service (KMS) API", @@ -3762,7 +3762,7 @@ "type": "string" }, "macCrc32c": { - "description": "Optional. An optional CRC32C checksum of the MacVerifyRequest.mac. If specified, KeyManagementService will verify the integrity of the received MacVerifyRequest.mac using this checksum. KeyManagementService will report an error if the checksum verification fails. If you receive a checksum error, your client should verify that CRC32C(MacVerifyRequest.tag) is equal to MacVerifyRequest.mac_crc32c, and if so, perform a limited number of retries. A persistent mismatch may indicate an issue in your computation of the CRC32C checksum. Note: This field is defined as int64 for reasons of compatibility across different languages. However, it is a non-negative integer, which will never exceed 2^32-1, and can be safely downconverted to uint32 in languages that support this type.", + "description": "Optional. An optional CRC32C checksum of the MacVerifyRequest.mac. If specified, KeyManagementService will verify the integrity of the received MacVerifyRequest.mac using this checksum. KeyManagementService will report an error if the checksum verification fails. If you receive a checksum error, your client should verify that CRC32C(MacVerifyRequest.mac) is equal to MacVerifyRequest.mac_crc32c, and if so, perform a limited number of retries. A persistent mismatch may indicate an issue in your computation of the CRC32C checksum. Note: This field is defined as int64 for reasons of compatibility across different languages. However, it is a non-negative integer, which will never exceed 2^32-1, and can be safely downconverted to uint32 in languages that support this type.", "format": "int64", "type": "string" } diff --git a/discovery/googleapis/cloudscheduler__v1.json b/discovery/googleapis/cloudscheduler__v1.json index 5718c733e..9a7200a58 100644 --- a/discovery/googleapis/cloudscheduler__v1.json +++ b/discovery/googleapis/cloudscheduler__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240917", + "revision": "20241203", "rootUrl": "https://cloudscheduler.googleapis.com/", "servicePath": "", "title": "Cloud Scheduler API", @@ -112,7 +112,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "cloudscheduler.operations.cancel", @@ -388,7 +388,7 @@ "type": "integer" }, "pageToken": { - "description": "A token identifying a page of results the server will return. To request the first page results, page_token must be empty. To request the next page of results, page_token must be the value of next_page_token returned from the previous call to ListJobs. It is an error to switch the value of filter or order_by while iterating through pages.", + "description": "A token identifying a page of results the server will return. To request the first page results, page_token must be empty. To request the next page of results, page_token must be the value of next_page_token returned from the previous call to ListJobs.", "location": "query", "type": "string" }, @@ -924,7 +924,7 @@ "type": "string" }, "cancelRequested": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have been cancelled successfully have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have been cancelled successfully have google.longrunning.Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, diff --git a/discovery/googleapis/cloudsearch__v1.json b/discovery/googleapis/cloudsearch__v1.json index 990dab6aa..87dc95e74 100644 --- a/discovery/googleapis/cloudsearch__v1.json +++ b/discovery/googleapis/cloudsearch__v1.json @@ -49,7 +49,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241016", + "revision": "20241113", "rootUrl": "https://cloudsearch.googleapis.com/", "servicePath": "", "title": "Cloud Search API", @@ -7044,68 +7044,6 @@ }, "type": "object" }, - "RewrittenQueries": { - "description": "The rewritten queries returned by Apps Search Query Understanding service.", - "id": "RewrittenQueries", - "properties": { - "rewrittenQueries": { - "items": { - "$ref": "RewrittenQuery" - }, - "type": "array" - }, - "selectedQueryIndex": { - "description": "The index of the selected query in `rewritten_queries` that is used by QAPI to call CSSR to get search results. If none of the queries were used (i.e. they all give empty search results), `selected_query_index` would default to -1.", - "format": "int32", - "type": "integer" - } - }, - "type": "object" - }, - "RewrittenQuery": { - "id": "RewrittenQuery", - "properties": { - "rewrittenQuery": { - "type": "string" - }, - "score": { - "format": "double", - "type": "number" - }, - "sortBy": { - "enum": [ - "SORTBY_UNSUPPORTED", - "SORTBY_RELEVANCY", - "SORTBY_LATEST", - "SORTBY_OLDEST", - "SORTBY_LARGEST", - "SORTBY_SMALLEST", - "SORTBY_MODIFY_LATEST", - "SORTBY_MODIFY_OLDEST", - "SORTBY_VIEW_LATEST", - "SORTBY_VIEW_OLDEST", - "SORTBY_CREATE_LATEST", - "SORTBY_CREATE_OLDEST" - ], - "enumDescriptions": [ - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "", - "" - ], - "type": "string" - } - }, - "type": "object" - }, "SafeHtmlProto": { "description": "IMPORTANT: It is unsafe to accept this message from an untrusted source, since it's trivial for an attacker to forge serialized messages that don't fulfill the type's safety contract -- for example, it could contain attacker controlled script. A system which receives a SafeHtmlProto implicitly trusts the producer of the SafeHtmlProto. So, it's generally safe to return this message in RPC responses, but generally unsafe to accept it in RPC requests.", "id": "SafeHtmlProto", diff --git a/discovery/googleapis/cloudshell__v1.json b/discovery/googleapis/cloudshell__v1.json index 2bc765786..826e44290 100644 --- a/discovery/googleapis/cloudshell__v1.json +++ b/discovery/googleapis/cloudshell__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20230807", + "revision": "20241118", "rootUrl": "https://cloudshell.googleapis.com/", "servicePath": "", "title": "Cloud Shell API", @@ -112,7 +112,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "cloudshell.operations.cancel", diff --git a/discovery/googleapis/cloudsupport__v2.json b/discovery/googleapis/cloudsupport__v2.json index 6df4693f4..5940d20b9 100644 --- a/discovery/googleapis/cloudsupport__v2.json +++ b/discovery/googleapis/cloudsupport__v2.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240403", + "revision": "20241212", "rootUrl": "https://cloudsupport.googleapis.com/", "servicePath": "", "title": "Google Cloud Support API", @@ -386,7 +386,7 @@ ], "parameters": { "pageSize": { - "description": "The maximum number of attachments fetched with each request. If not provided, the default is 10. The maximum page size that will be returned is 100.", + "description": "The maximum number of attachments fetched with each request. If not provided, the default is 10. The maximum page size that will be returned is 100. The size of each page can be smaller than the requested page size and can include zero. For example, you could request 100 attachments on one page, receive 0, and then on the next page, receive 90.", "format": "int32", "location": "query", "type": "integer" diff --git a/discovery/googleapis/cloudtasks__v2.json b/discovery/googleapis/cloudtasks__v2.json index a5cc7bbbd..7be6087d5 100644 --- a/discovery/googleapis/cloudtasks__v2.json +++ b/discovery/googleapis/cloudtasks__v2.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240808", + "revision": "20241111", "rootUrl": "https://cloudtasks.googleapis.com/", "servicePath": "", "title": "Cloud Tasks API", @@ -750,7 +750,7 @@ ] }, "run": { - "description": "Forces a task to run now. When this method is called, Cloud Tasks will dispatch the task, even if the task is already running, the queue has reached its RateLimits or is PAUSED. This command is meant to be used for manual debugging. For example, RunTask can be used to retry a failed task after a fix has been made or to manually force a task to be dispatched now. The dispatched task is returned. That is, the task that is returned contains the status after the task is dispatched but before the task is received by its target. If Cloud Tasks receives a successful response from the task's target, then the task will be deleted; otherwise the task's schedule_time will be reset to the time that RunTask was called plus the retry delay specified in the queue's RetryConfig. RunTask returns NOT_FOUND when it is called on a task that has already succeeded or permanently failed.", + "description": "Forces a task to run now. When this method is called, Cloud Tasks will dispatch the task, even if the task is already running, the queue has reached its RateLimits or is PAUSED. This command is meant to be used for manual debugging. For example, RunTask can be used to retry a failed task after a fix has been made or to manually force a task to be dispatched now. If Cloud Tasks receives a successful response from the task's target, then the task will be deleted; otherwise the task's schedule_time will be reset to the time that RunTask was called plus the retry delay specified in the queue's RetryConfig. RunTask returns NOT_FOUND when it is called on a task that has already succeeded or permanently failed.", "flatPath": "v2/projects/{projectsId}/locations/{locationsId}/queues/{queuesId}/tasks/{tasksId}:run", "httpMethod": "POST", "id": "cloudtasks.projects.locations.queues.tasks.run", diff --git a/discovery/googleapis/composer__v1.json b/discovery/googleapis/composer__v1.json index 376f429eb..c4a9e0ac7 100644 --- a/discovery/googleapis/composer__v1.json +++ b/discovery/googleapis/composer__v1.json @@ -247,7 +247,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241013", + "revision": "20241027", "rootUrl": "https://composer.googleapis.com/", "servicePath": "", "title": "Cloud Composer API", @@ -1510,7 +1510,7 @@ "properties": { "config": { "$ref": "EnvironmentConfig", - "description": "Configuration parameters for this environment." + "description": "Optional. Configuration parameters for this environment." }, "createTime": { "description": "Output only. The time at which this environment was created.", @@ -1525,7 +1525,7 @@ "type": "object" }, "name": { - "description": "The resource name of the environment, in the form: \"projects/{projectId}/locations/{locationId}/environments/{environmentId}\" EnvironmentId must start with a lowercase letter followed by up to 63 lowercase letters, numbers, or hyphens, and cannot end with a hyphen.", + "description": "Identifier. The resource name of the environment, in the form: \"projects/{projectId}/locations/{locationId}/environments/{environmentId}\" EnvironmentId must start with a lowercase letter followed by up to 63 lowercase letters, numbers, or hyphens, and cannot end with a hyphen.", "type": "string" }, "satisfiesPzi": { @@ -1633,7 +1633,7 @@ }, "nodeConfig": { "$ref": "NodeConfig", - "description": "The configuration used for the Kubernetes Engine cluster." + "description": "Optional. The configuration used for the Kubernetes Engine cluster." }, "nodeCount": { "description": "The number of nodes in the Kubernetes Engine cluster that will be used to run this environment. This field is supported for Cloud Composer environments in versions composer-1.*.*-airflow-*.*.*.", @@ -1642,7 +1642,7 @@ }, "privateEnvironmentConfig": { "$ref": "PrivateEnvironmentConfig", - "description": "The configuration used for the Private IP Cloud Composer environment." + "description": "Optional. The configuration used for the Private IP Cloud Composer environment." }, "recoveryConfig": { "$ref": "RecoveryConfig", @@ -1662,7 +1662,7 @@ }, "softwareConfig": { "$ref": "SoftwareConfig", - "description": "The configuration settings for software inside the environment." + "description": "Optional. The configuration settings for software inside the environment." }, "webServerConfig": { "$ref": "WebServerConfig", @@ -2428,7 +2428,7 @@ "type": "object" }, "imageVersion": { - "description": "The version of the software running in the environment. This encapsulates both the version of Cloud Composer functionality and the version of Apache Airflow. It must match the regular expression `composer-([0-9]+(\\.[0-9]+\\.[0-9]+(-preview\\.[0-9]+)?)?|latest)-airflow-([0-9]+(\\.[0-9]+(\\.[0-9]+)?)?)`. When used as input, the server also checks if the provided version is supported and denies the request for an unsupported version. The Cloud Composer portion of the image version is a full [semantic version](https://semver.org), or an alias in the form of major version number or `latest`. When an alias is provided, the server replaces it with the current Cloud Composer version that satisfies the alias. The Apache Airflow portion of the image version is a full semantic version that points to one of the supported Apache Airflow versions, or an alias in the form of only major or major.minor versions specified. When an alias is provided, the server replaces it with the latest Apache Airflow version that satisfies the alias and is supported in the given Cloud Composer version. In all cases, the resolved image version is stored in the same field. See also [version list](/composer/docs/concepts/versioning/composer-versions) and [versioning overview](/composer/docs/concepts/versioning/composer-versioning-overview).", + "description": "Optional. The version of the software running in the environment. This encapsulates both the version of Cloud Composer functionality and the version of Apache Airflow. It must match the regular expression `composer-([0-9]+(\\.[0-9]+\\.[0-9]+(-preview\\.[0-9]+)?)?|latest)-airflow-([0-9]+(\\.[0-9]+(\\.[0-9]+)?)?)`. When used as input, the server also checks if the provided version is supported and denies the request for an unsupported version. The Cloud Composer portion of the image version is a full [semantic version](https://semver.org), or an alias in the form of major version number or `latest`. When an alias is provided, the server replaces it with the current Cloud Composer version that satisfies the alias. The Apache Airflow portion of the image version is a full semantic version that points to one of the supported Apache Airflow versions, or an alias in the form of only major or major.minor versions specified. When an alias is provided, the server replaces it with the latest Apache Airflow version that satisfies the alias and is supported in the given Cloud Composer version. In all cases, the resolved image version is stored in the same field. See also [version list](/composer/docs/concepts/versioning/composer-versions) and [versioning overview](/composer/docs/concepts/versioning/composer-versioning-overview).", "type": "string" }, "pypiPackages": { @@ -2594,7 +2594,7 @@ "additionalProperties": { "type": "string" }, - "description": "Optional. The \"data\" field of Kubernetes ConfigMap, organized in key-value pairs. For details see: https://kubernetes.io/docs/concepts/configuration/configmap/", + "description": "Optional. The \"data\" field of Kubernetes ConfigMap, organized in key-value pairs. For details see: https://kubernetes.io/docs/concepts/configuration/configmap/ Example: { \"example_key\": \"example_value\", \"another_key\": \"another_value\" }", "type": "object" }, "name": { @@ -2612,7 +2612,7 @@ "additionalProperties": { "type": "string" }, - "description": "Optional. The \"data\" field of Kubernetes Secret, organized in key-value pairs, which can contain sensitive values such as a password, a token, or a key. The values for all keys have to be base64-encoded strings. For details see: https://kubernetes.io/docs/concepts/configuration/secret/", + "description": "Optional. The \"data\" field of Kubernetes Secret, organized in key-value pairs, which can contain sensitive values such as a password, a token, or a key. The values for all keys have to be base64-encoded strings. For details see: https://kubernetes.io/docs/concepts/configuration/secret/ Example: { \"example\": \"ZXhhbXBsZV92YWx1ZQ==\", \"another-example\": \"YW5vdGhlcl9leGFtcGxlX3ZhbHVl\" }", "type": "object" }, "name": { diff --git a/discovery/googleapis/compute__v1.json b/discovery/googleapis/compute__v1.json index 7b58ff7d2..d14e5fc04 100644 --- a/discovery/googleapis/compute__v1.json +++ b/discovery/googleapis/compute__v1.json @@ -39,7 +39,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241008", + "revision": "20241201", "rootUrl": "https://compute.googleapis.com/", "servicePath": "compute/v1/", "title": "Compute Engine API", @@ -8687,6 +8687,54 @@ "https://www.googleapis.com/auth/compute" ] }, + "resumeInstances": { + "description": "Flags the specified instances in the managed instance group to be resumed. This method increases the targetSize and decreases the targetSuspendedSize of the managed instance group by the number of instances that you resume. The resumeInstances operation is marked DONE if the resumeInstances request is successful. The underlying actions take additional time. You must separately verify the status of the RESUMING action with the listmanagedinstances method. In this request, you can only specify instances that are suspended. For example, if an instance was previously suspended using the suspendInstances method, it can be resumed using the resumeInstances method. If a health check is attached to the managed instance group, the specified instances will be verified as healthy after they are resumed. You can specify a maximum of 1000 instances with this method per request.", + "flatPath": "projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/resumeInstances", + "httpMethod": "POST", + "id": "compute.instanceGroupManagers.resumeInstances", + "parameterOrder": [ + "project", + "zone", + "instanceGroupManager" + ], + "parameters": { + "instanceGroupManager": { + "description": "The name of the managed instance group.", + "location": "path", + "required": true, + "type": "string" + }, + "project": { + "description": "Project ID for this request.", + "location": "path", + "pattern": "(?:(?:[-a-z0-9]{1,63}\\.)*(?:[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?):)?(?:[0-9]{1,19}|(?:[a-z0-9](?:[-a-z0-9]{0,61}[a-z0-9])?))", + "required": true, + "type": "string" + }, + "requestId": { + "description": "An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).", + "location": "query", + "type": "string" + }, + "zone": { + "description": "The name of the zone where the managed instance group is located.", + "location": "path", + "required": true, + "type": "string" + } + }, + "path": "projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/resumeInstances", + "request": { + "$ref": "InstanceGroupManagersResumeInstancesRequest" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/compute" + ] + }, "setInstanceTemplate": { "description": "Specifies the instance template to use when creating new instances in this group. The templates for existing instances in the group do not change unless you run recreateInstances, run applyUpdatesToInstances, or set the group's updatePolicy.type to PROACTIVE.", "flatPath": "projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/setInstanceTemplate", @@ -8783,6 +8831,150 @@ "https://www.googleapis.com/auth/compute" ] }, + "startInstances": { + "description": "Flags the specified instances in the managed instance group to be started. This method increases the targetSize and decreases the targetStoppedSize of the managed instance group by the number of instances that you start. The startInstances operation is marked DONE if the startInstances request is successful. The underlying actions take additional time. You must separately verify the status of the STARTING action with the listmanagedinstances method. In this request, you can only specify instances that are stopped. For example, if an instance was previously stopped using the stopInstances method, it can be started using the startInstances method. If a health check is attached to the managed instance group, the specified instances will be verified as healthy after they are started. You can specify a maximum of 1000 instances with this method per request.", + "flatPath": "projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/startInstances", + "httpMethod": "POST", + "id": "compute.instanceGroupManagers.startInstances", + "parameterOrder": [ + "project", + "zone", + "instanceGroupManager" + ], + "parameters": { + "instanceGroupManager": { + "description": "The name of the managed instance group.", + "location": "path", + "required": true, + "type": "string" + }, + "project": { + "description": "Project ID for this request.", + "location": "path", + "pattern": "(?:(?:[-a-z0-9]{1,63}\\.)*(?:[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?):)?(?:[0-9]{1,19}|(?:[a-z0-9](?:[-a-z0-9]{0,61}[a-z0-9])?))", + "required": true, + "type": "string" + }, + "requestId": { + "description": "An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).", + "location": "query", + "type": "string" + }, + "zone": { + "description": "The name of the zone where the managed instance group is located.", + "location": "path", + "required": true, + "type": "string" + } + }, + "path": "projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/startInstances", + "request": { + "$ref": "InstanceGroupManagersStartInstancesRequest" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/compute" + ] + }, + "stopInstances": { + "description": "Flags the specified instances in the managed instance group to be immediately stopped. You can only specify instances that are running in this request. This method reduces the targetSize and increases the targetStoppedSize of the managed instance group by the number of instances that you stop. The stopInstances operation is marked DONE if the stopInstances request is successful. The underlying actions take additional time. You must separately verify the status of the STOPPING action with the listmanagedinstances method. If the standbyPolicy.initialDelaySec field is set, the group delays stopping the instances until initialDelaySec have passed from instance.creationTimestamp (that is, when the instance was created). This delay gives your application time to set itself up and initialize on the instance. If more than initialDelaySec seconds have passed since instance.creationTimestamp when this method is called, there will be zero delay. If the group is part of a backend service that has enabled connection draining, it can take up to 60 seconds after the connection draining duration has elapsed before the VM instance is stopped. Stopped instances can be started using the startInstances method. You can specify a maximum of 1000 instances with this method per request.", + "flatPath": "projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/stopInstances", + "httpMethod": "POST", + "id": "compute.instanceGroupManagers.stopInstances", + "parameterOrder": [ + "project", + "zone", + "instanceGroupManager" + ], + "parameters": { + "instanceGroupManager": { + "description": "The name of the managed instance group.", + "location": "path", + "required": true, + "type": "string" + }, + "project": { + "description": "Project ID for this request.", + "location": "path", + "pattern": "(?:(?:[-a-z0-9]{1,63}\\.)*(?:[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?):)?(?:[0-9]{1,19}|(?:[a-z0-9](?:[-a-z0-9]{0,61}[a-z0-9])?))", + "required": true, + "type": "string" + }, + "requestId": { + "description": "An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).", + "location": "query", + "type": "string" + }, + "zone": { + "description": "The name of the zone where the managed instance group is located.", + "location": "path", + "required": true, + "type": "string" + } + }, + "path": "projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/stopInstances", + "request": { + "$ref": "InstanceGroupManagersStopInstancesRequest" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/compute" + ] + }, + "suspendInstances": { + "description": "Flags the specified instances in the managed instance group to be immediately suspended. You can only specify instances that are running in this request. This method reduces the targetSize and increases the targetSuspendedSize of the managed instance group by the number of instances that you suspend. The suspendInstances operation is marked DONE if the suspendInstances request is successful. The underlying actions take additional time. You must separately verify the status of the SUSPENDING action with the listmanagedinstances method. If the standbyPolicy.initialDelaySec field is set, the group delays suspension of the instances until initialDelaySec have passed from instance.creationTimestamp (that is, when the instance was created). This delay gives your application time to set itself up and initialize on the instance. If more than initialDelaySec seconds have passed since instance.creationTimestamp when this method is called, there will be zero delay. If the group is part of a backend service that has enabled connection draining, it can take up to 60 seconds after the connection draining duration has elapsed before the VM instance is suspended. Suspended instances can be resumed using the resumeInstances method. You can specify a maximum of 1000 instances with this method per request.", + "flatPath": "projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/suspendInstances", + "httpMethod": "POST", + "id": "compute.instanceGroupManagers.suspendInstances", + "parameterOrder": [ + "project", + "zone", + "instanceGroupManager" + ], + "parameters": { + "instanceGroupManager": { + "description": "The name of the managed instance group.", + "location": "path", + "required": true, + "type": "string" + }, + "project": { + "description": "Project ID for this request.", + "location": "path", + "pattern": "(?:(?:[-a-z0-9]{1,63}\\.)*(?:[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?):)?(?:[0-9]{1,19}|(?:[a-z0-9](?:[-a-z0-9]{0,61}[a-z0-9])?))", + "required": true, + "type": "string" + }, + "requestId": { + "description": "An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).", + "location": "query", + "type": "string" + }, + "zone": { + "description": "The name of the zone where the managed instance group is located.", + "location": "path", + "required": true, + "type": "string" + } + }, + "path": "projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/suspendInstances", + "request": { + "$ref": "InstanceGroupManagersSuspendInstancesRequest" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/compute" + ] + }, "updatePerInstanceConfigs": { "description": "Inserts or updates per-instance configurations for the managed instance group. perInstanceConfig.name serves as a key used to distinguish whether to perform insert or patch.", "flatPath": "projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/updatePerInstanceConfigs", @@ -10450,7 +10642,7 @@ "type": "string" }, "start": { - "description": "Specifies the starting byte position of the output to return. To start with the first byte of output to the specified port, omit this field or set it to `0`. If the output for that byte position is available, this field matches the `start` parameter sent with the request. If the amount of serial console output exceeds the size of the buffer (1 MB), the oldest output is discarded and is no longer available. If the requested start position refers to discarded output, the start position is adjusted to the oldest output still available, and the adjusted start position is returned as the `start` property value. You can also provide a negative start position, which translates to the most recent number of bytes written to the serial port. For example, -3 is interpreted as the most recent 3 bytes written to the serial console.", + "description": "Specifies the starting byte position of the output to return. To start with the first byte of output to the specified port, omit this field or set it to `0`. If the output for that byte position is available, this field matches the `start` parameter sent with the request. If the amount of serial console output exceeds the size of the buffer (1 MB), the oldest output is discarded and is no longer available. If the requested start position refers to discarded output, the start position is adjusted to the oldest output still available, and the adjusted start position is returned as the `start` property value. You can also provide a negative start position, which translates to the most recent number of bytes written to the serial port. For example, -3 is interpreted as the most recent 3 bytes written to the serial console. Note that the negative start is bounded by the retained buffer size, and the returned serial console output will not exceed the max buffer size.", "format": "int64", "location": "query", "type": "string" @@ -16280,6 +16472,100 @@ } } }, + "networkProfiles": { + "methods": { + "get": { + "description": "Returns the specified network profile.", + "flatPath": "projects/{project}/global/networkProfiles/{networkProfile}", + "httpMethod": "GET", + "id": "compute.networkProfiles.get", + "parameterOrder": [ + "project", + "networkProfile" + ], + "parameters": { + "networkProfile": { + "description": "Name of the network profile to return.", + "location": "path", + "pattern": "[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?|[1-9][0-9]{0,19}", + "required": true, + "type": "string" + }, + "project": { + "description": "Project ID for this request.", + "location": "path", + "pattern": "(?:(?:[-a-z0-9]{1,63}\\.)*(?:[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?):)?(?:[0-9]{1,19}|(?:[a-z0-9](?:[-a-z0-9]{0,61}[a-z0-9])?))", + "required": true, + "type": "string" + } + }, + "path": "projects/{project}/global/networkProfiles/{networkProfile}", + "response": { + "$ref": "NetworkProfile" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/compute.readonly" + ] + }, + "list": { + "description": "Retrieves a list of network profiles available to the specified project.", + "flatPath": "projects/{project}/global/networkProfiles", + "httpMethod": "GET", + "id": "compute.networkProfiles.list", + "parameterOrder": [ + "project" + ], + "parameters": { + "filter": { + "description": "A filter expression that filters resources listed in the response. Most Compute resources support two types of filter expressions: expressions that support regular expressions and expressions that follow API improvement proposal AIP-160. These two types of filter expressions cannot be mixed in one request. If you want to use AIP-160, your expression must specify the field name, an operator, and the value that you want to use for filtering. The value must be a string, a number, or a boolean. The operator must be either `=`, `!=`, `>`, `<`, `<=`, `>=` or `:`. For example, if you are filtering Compute Engine instances, you can exclude instances named `example-instance` by specifying `name != example-instance`. The `:*` comparison can be used to test whether a key has been defined. For example, to find all objects with `owner` label use: ``` labels.owner:* ``` You can also filter nested fields. For example, you could specify `scheduling.automaticRestart = false` to include instances only if they are not scheduled for automatic restarts. You can use filtering on nested fields to filter based on resource labels. To filter on multiple expressions, provide each separate expression within parentheses. For example: ``` (scheduling.automaticRestart = true) (cpuPlatform = \"Intel Skylake\") ``` By default, each expression is an `AND` expression. However, you can include `AND` and `OR` expressions explicitly. For example: ``` (cpuPlatform = \"Intel Skylake\") OR (cpuPlatform = \"Intel Broadwell\") AND (scheduling.automaticRestart = true) ``` If you want to use a regular expression, use the `eq` (equal) or `ne` (not equal) operator against a single un-parenthesized expression with or without quotes or against multiple parenthesized expressions. Examples: `fieldname eq unquoted literal` `fieldname eq 'single quoted literal'` `fieldname eq \"double quoted literal\"` `(fieldname1 eq literal) (fieldname2 ne \"literal\")` The literal value is interpreted as a regular expression using Google RE2 library syntax. The literal value must match the entire field. For example, to filter for instances that do not end with name \"instance\", you would use `name ne .*instance`. You cannot combine constraints on multiple fields using regular expressions.", + "location": "query", + "type": "string" + }, + "maxResults": { + "default": "500", + "description": "The maximum number of results per page that should be returned. If the number of available results is larger than `maxResults`, Compute Engine returns a `nextPageToken` that can be used to get the next page of results in subsequent list requests. Acceptable values are `0` to `500`, inclusive. (Default: `500`)", + "format": "uint32", + "location": "query", + "minimum": "0", + "type": "integer" + }, + "orderBy": { + "description": "Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource name. You can also sort results in descending order based on the creation timestamp using `orderBy=\"creationTimestamp desc\"`. This sorts results based on the `creationTimestamp` field in reverse chronological order (newest result first). Use this to sort resources like operations so that the newest operation is returned first. Currently, only sorting by `name` or `creationTimestamp desc` is supported.", + "location": "query", + "type": "string" + }, + "pageToken": { + "description": "Specifies a page token to use. Set `pageToken` to the `nextPageToken` returned by a previous list request to get the next page of results.", + "location": "query", + "type": "string" + }, + "project": { + "description": "Project ID for this request.", + "location": "path", + "pattern": "(?:(?:[-a-z0-9]{1,63}\\.)*(?:[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?):)?(?:[0-9]{1,19}|(?:[a-z0-9](?:[-a-z0-9]{0,61}[a-z0-9])?))", + "required": true, + "type": "string" + }, + "returnPartialSuccess": { + "description": "Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. For example, when partial success behavior is enabled, aggregatedList for a single zone scope either returns all resources in the zone or no resources, with an error code.", + "location": "query", + "type": "boolean" + } + }, + "path": "projects/{project}/global/networkProfiles", + "response": { + "$ref": "NetworkProfilesListResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/compute.readonly" + ] + } + } + }, "networks": { "methods": { "addPeering": { @@ -23182,11 +23468,11 @@ "https://www.googleapis.com/auth/compute" ] }, - "setInstanceTemplate": { - "description": "Sets the instance template to use when creating new instances or recreating instances in this group. Existing instances are not affected.", - "flatPath": "projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/setInstanceTemplate", + "resumeInstances": { + "description": "Flags the specified instances in the managed instance group to be resumed. This method increases the targetSize and decreases the targetSuspendedSize of the managed instance group by the number of instances that you resume. The resumeInstances operation is marked DONE if the resumeInstances request is successful. The underlying actions take additional time. You must separately verify the status of the RESUMING action with the listmanagedinstances method. In this request, you can only specify instances that are suspended. For example, if an instance was previously suspended using the suspendInstances method, it can be resumed using the resumeInstances method. If a health check is attached to the managed instance group, the specified instances will be verified as healthy after they are resumed. You can specify a maximum of 1000 instances with this method per request.", + "flatPath": "projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/resumeInstances", "httpMethod": "POST", - "id": "compute.regionInstanceGroupManagers.setInstanceTemplate", + "id": "compute.regionInstanceGroupManagers.resumeInstances", "parameterOrder": [ "project", "region", @@ -23194,7 +23480,7 @@ ], "parameters": { "instanceGroupManager": { - "description": "The name of the managed instance group.", + "description": "Name of the managed instance group.", "location": "path", "required": true, "type": "string" @@ -23218,9 +23504,9 @@ "type": "string" } }, - "path": "projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/setInstanceTemplate", + "path": "projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/resumeInstances", "request": { - "$ref": "RegionInstanceGroupManagersSetTemplateRequest" + "$ref": "RegionInstanceGroupManagersResumeInstancesRequest" }, "response": { "$ref": "Operation" @@ -23230,11 +23516,11 @@ "https://www.googleapis.com/auth/compute" ] }, - "setTargetPools": { - "description": "Modifies the target pools to which all new instances in this group are assigned. Existing instances in the group are not affected.", - "flatPath": "projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/setTargetPools", + "setInstanceTemplate": { + "description": "Sets the instance template to use when creating new instances or recreating instances in this group. Existing instances are not affected.", + "flatPath": "projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/setInstanceTemplate", "httpMethod": "POST", - "id": "compute.regionInstanceGroupManagers.setTargetPools", + "id": "compute.regionInstanceGroupManagers.setInstanceTemplate", "parameterOrder": [ "project", "region", @@ -23242,7 +23528,55 @@ ], "parameters": { "instanceGroupManager": { - "description": "Name of the managed instance group.", + "description": "The name of the managed instance group.", + "location": "path", + "required": true, + "type": "string" + }, + "project": { + "description": "Project ID for this request.", + "location": "path", + "pattern": "(?:(?:[-a-z0-9]{1,63}\\.)*(?:[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?):)?(?:[0-9]{1,19}|(?:[a-z0-9](?:[-a-z0-9]{0,61}[a-z0-9])?))", + "required": true, + "type": "string" + }, + "region": { + "description": "Name of the region scoping this request.", + "location": "path", + "required": true, + "type": "string" + }, + "requestId": { + "description": "An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).", + "location": "query", + "type": "string" + } + }, + "path": "projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/setInstanceTemplate", + "request": { + "$ref": "RegionInstanceGroupManagersSetTemplateRequest" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/compute" + ] + }, + "setTargetPools": { + "description": "Modifies the target pools to which all new instances in this group are assigned. Existing instances in the group are not affected.", + "flatPath": "projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/setTargetPools", + "httpMethod": "POST", + "id": "compute.regionInstanceGroupManagers.setTargetPools", + "parameterOrder": [ + "project", + "region", + "instanceGroupManager" + ], + "parameters": { + "instanceGroupManager": { + "description": "Name of the managed instance group.", "location": "path", "required": true, "type": "string" @@ -23278,6 +23612,150 @@ "https://www.googleapis.com/auth/compute" ] }, + "startInstances": { + "description": "Flags the specified instances in the managed instance group to be started. This method increases the targetSize and decreases the targetStoppedSize of the managed instance group by the number of instances that you start. The startInstances operation is marked DONE if the startInstances request is successful. The underlying actions take additional time. You must separately verify the status of the STARTING action with the listmanagedinstances method. In this request, you can only specify instances that are stopped. For example, if an instance was previously stopped using the stopInstances method, it can be started using the startInstances method. If a health check is attached to the managed instance group, the specified instances will be verified as healthy after they are started. You can specify a maximum of 1000 instances with this method per request.", + "flatPath": "projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/startInstances", + "httpMethod": "POST", + "id": "compute.regionInstanceGroupManagers.startInstances", + "parameterOrder": [ + "project", + "region", + "instanceGroupManager" + ], + "parameters": { + "instanceGroupManager": { + "description": "Name of the managed instance group.", + "location": "path", + "required": true, + "type": "string" + }, + "project": { + "description": "Project ID for this request.", + "location": "path", + "pattern": "(?:(?:[-a-z0-9]{1,63}\\.)*(?:[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?):)?(?:[0-9]{1,19}|(?:[a-z0-9](?:[-a-z0-9]{0,61}[a-z0-9])?))", + "required": true, + "type": "string" + }, + "region": { + "description": "Name of the region scoping this request.", + "location": "path", + "required": true, + "type": "string" + }, + "requestId": { + "description": "An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).", + "location": "query", + "type": "string" + } + }, + "path": "projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/startInstances", + "request": { + "$ref": "RegionInstanceGroupManagersStartInstancesRequest" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/compute" + ] + }, + "stopInstances": { + "description": "Flags the specified instances in the managed instance group to be immediately stopped. You can only specify instances that are running in this request. This method reduces the targetSize and increases the targetStoppedSize of the managed instance group by the number of instances that you stop. The stopInstances operation is marked DONE if the stopInstances request is successful. The underlying actions take additional time. You must separately verify the status of the STOPPING action with the listmanagedinstances method. If the standbyPolicy.initialDelaySec field is set, the group delays stopping the instances until initialDelaySec have passed from instance.creationTimestamp (that is, when the instance was created). This delay gives your application time to set itself up and initialize on the instance. If more than initialDelaySec seconds have passed since instance.creationTimestamp when this method is called, there will be zero delay. If the group is part of a backend service that has enabled connection draining, it can take up to 60 seconds after the connection draining duration has elapsed before the VM instance is stopped. Stopped instances can be started using the startInstances method. You can specify a maximum of 1000 instances with this method per request.", + "flatPath": "projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/stopInstances", + "httpMethod": "POST", + "id": "compute.regionInstanceGroupManagers.stopInstances", + "parameterOrder": [ + "project", + "region", + "instanceGroupManager" + ], + "parameters": { + "instanceGroupManager": { + "description": "The name of the managed instance group.", + "location": "path", + "required": true, + "type": "string" + }, + "project": { + "description": "Project ID for this request.", + "location": "path", + "pattern": "(?:(?:[-a-z0-9]{1,63}\\.)*(?:[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?):)?(?:[0-9]{1,19}|(?:[a-z0-9](?:[-a-z0-9]{0,61}[a-z0-9])?))", + "required": true, + "type": "string" + }, + "region": { + "description": "Name of the region scoping this request.", + "location": "path", + "required": true, + "type": "string" + }, + "requestId": { + "description": "An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).", + "location": "query", + "type": "string" + } + }, + "path": "projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/stopInstances", + "request": { + "$ref": "RegionInstanceGroupManagersStopInstancesRequest" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/compute" + ] + }, + "suspendInstances": { + "description": "Flags the specified instances in the managed instance group to be immediately suspended. You can only specify instances that are running in this request. This method reduces the targetSize and increases the targetSuspendedSize of the managed instance group by the number of instances that you suspend. The suspendInstances operation is marked DONE if the suspendInstances request is successful. The underlying actions take additional time. You must separately verify the status of the SUSPENDING action with the listmanagedinstances method. If the standbyPolicy.initialDelaySec field is set, the group delays suspension of the instances until initialDelaySec have passed from instance.creationTimestamp (that is, when the instance was created). This delay gives your application time to set itself up and initialize on the instance. If more than initialDelaySec seconds have passed since instance.creationTimestamp when this method is called, there will be zero delay. If the group is part of a backend service that has enabled connection draining, it can take up to 60 seconds after the connection draining duration has elapsed before the VM instance is suspended. Suspended instances can be resumed using the resumeInstances method. You can specify a maximum of 1000 instances with this method per request.", + "flatPath": "projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/suspendInstances", + "httpMethod": "POST", + "id": "compute.regionInstanceGroupManagers.suspendInstances", + "parameterOrder": [ + "project", + "region", + "instanceGroupManager" + ], + "parameters": { + "instanceGroupManager": { + "description": "Name of the managed instance group.", + "location": "path", + "required": true, + "type": "string" + }, + "project": { + "description": "Project ID for this request.", + "location": "path", + "pattern": "(?:(?:[-a-z0-9]{1,63}\\.)*(?:[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?):)?(?:[0-9]{1,19}|(?:[a-z0-9](?:[-a-z0-9]{0,61}[a-z0-9])?))", + "required": true, + "type": "string" + }, + "region": { + "description": "Name of the region scoping this request.", + "location": "path", + "required": true, + "type": "string" + }, + "requestId": { + "description": "An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).", + "location": "query", + "type": "string" + } + }, + "path": "projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/suspendInstances", + "request": { + "$ref": "RegionInstanceGroupManagersSuspendInstancesRequest" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/compute" + ] + }, "updatePerInstanceConfigs": { "description": "Inserts or updates per-instance configurations for the managed instance group. perInstanceConfig.name serves as a key used to distinguish whether to perform insert or patch.", "flatPath": "projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/updatePerInstanceConfigs", @@ -26262,6 +26740,56 @@ "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/compute" ] + }, + "setLabels": { + "description": "Sets the labels on a security policy. To learn more about labels, read the Labeling Resources documentation.", + "flatPath": "projects/{project}/regions/{region}/securityPolicies/{resource}/setLabels", + "httpMethod": "POST", + "id": "compute.regionSecurityPolicies.setLabels", + "parameterOrder": [ + "project", + "region", + "resource" + ], + "parameters": { + "project": { + "description": "Project ID for this request.", + "location": "path", + "pattern": "(?:(?:[-a-z0-9]{1,63}\\.)*(?:[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?):)?(?:[0-9]{1,19}|(?:[a-z0-9](?:[-a-z0-9]{0,61}[a-z0-9])?))", + "required": true, + "type": "string" + }, + "region": { + "description": "The region for this request.", + "location": "path", + "pattern": "[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?", + "required": true, + "type": "string" + }, + "requestId": { + "description": "An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).", + "location": "query", + "type": "string" + }, + "resource": { + "description": "Name or id of the resource for this request.", + "location": "path", + "pattern": "[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?|[1-9][0-9]{0,19}", + "required": true, + "type": "string" + } + }, + "path": "projects/{project}/regions/{region}/securityPolicies/{resource}/setLabels", + "request": { + "$ref": "RegionSetLabelsRequest" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/compute" + ] } } }, @@ -37574,6 +38102,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -37610,6 +38139,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -37634,6 +38164,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -37726,6 +38257,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -37762,6 +38294,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -37786,6 +38319,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -37860,6 +38394,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -37896,6 +38431,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -37920,6 +38456,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -38261,6 +38798,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -38297,6 +38835,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -38321,6 +38860,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -38413,6 +38953,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -38449,6 +38990,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -38473,6 +39015,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -38547,6 +39090,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -38583,6 +39127,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -38607,6 +39152,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -39142,7 +39688,7 @@ "type": "object" }, "AuditConfig": { - "description": "Specifies the audit configuration for a service. The configuration determines which permission types are logged, and what identities, if any, are exempted from logging. An AuditConfig must have one or more AuditLogConfigs. If there are AuditConfigs for both `allServices` and a specific service, the union of the two AuditConfigs is used for that service: the log_types specified in each AuditConfig are enabled, and the exempted_members in each AuditLogConfig are exempted. Example Policy with multiple AuditConfigs: { \"audit_configs\": [ { \"service\": \"allServices\", \"audit_log_configs\": [ { \"log_type\": \"DATA_READ\", \"exempted_members\": [ \"user:jose@example.com\" ] }, { \"log_type\": \"DATA_WRITE\" }, { \"log_type\": \"ADMIN_READ\" } ] }, { \"service\": \"sampleservice.googleapis.com\", \"audit_log_configs\": [ { \"log_type\": \"DATA_READ\" }, { \"log_type\": \"DATA_WRITE\", \"exempted_members\": [ \"user:aliya@example.com\" ] } ] } ] } For sampleservice, this policy enables DATA_READ, DATA_WRITE and ADMIN_READ logging. It also exempts jose@example.com from DATA_READ logging, and aliya@example.com from DATA_WRITE logging.", + "description": "Specifies the audit configuration for a service. The configuration determines which permission types are logged, and what identities, if any, are exempted from logging. An AuditConfig must have one or more AuditLogConfigs. If there are AuditConfigs for both `allServices` and a specific service, the union of the two AuditConfigs is used for that service: the log_types specified in each AuditConfig are enabled, and the exempted_members in each AuditLogConfig are exempted. Example Policy with multiple AuditConfigs: { \"audit_configs\": [ { \"service\": \"allServices\", \"audit_log_configs\": [ { \"log_type\": \"DATA_READ\", \"exempted_members\": [ \"user:jose@example.com\" ] }, { \"log_type\": \"DATA_WRITE\" }, { \"log_type\": \"ADMIN_READ\" } ] }, { \"service\": \"sampleservice.googleapis.com\", \"audit_log_configs\": [ { \"log_type\": \"DATA_READ\" }, { \"log_type\": \"DATA_WRITE\", \"exempted_members\": [ \"user:aliya@example.com\" ] } ] } ] } For sampleservice, this policy enables DATA_READ, DATA_WRITE and ADMIN_READ logging. It also exempts `jose@example.com` from DATA_READ logging, and `aliya@example.com` from DATA_WRITE logging.", "id": "AuditConfig", "properties": { "auditLogConfigs": { @@ -39341,6 +39887,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -39377,6 +39924,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -39401,6 +39949,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -39493,6 +40042,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -39529,6 +40079,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -39553,6 +40104,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -39685,6 +40237,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -39721,6 +40274,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -39745,6 +40299,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -40148,7 +40703,7 @@ "description": "The CacheKeyPolicy for this CdnPolicy." }, "cacheMode": { - "description": "Specifies the cache setting for all responses from this backend. The possible values are: USE_ORIGIN_HEADERS Requires the origin to set valid caching headers to cache content. Responses without these headers will not be cached at Google's edge, and will require a full trip to the origin on every request, potentially impacting performance and increasing load on the origin server. FORCE_CACHE_ALL Cache all content, ignoring any \"private\", \"no-store\" or \"no-cache\" directives in Cache-Control response headers. Warning: this may result in Cloud CDN caching private, per-user (user identifiable) content. CACHE_ALL_STATIC Automatically cache static content, including common image formats, media (video and audio), and web assets (JavaScript and CSS). Requests and responses that are marked as uncacheable, as well as dynamic content (including HTML), will not be cached.", + "description": "Specifies the cache setting for all responses from this backend. The possible values are: USE_ORIGIN_HEADERS Requires the origin to set valid caching headers to cache content. Responses without these headers will not be cached at Google's edge, and will require a full trip to the origin on every request, potentially impacting performance and increasing load on the origin server. FORCE_CACHE_ALL Cache all content, ignoring any \"private\", \"no-store\" or \"no-cache\" directives in Cache-Control response headers. Warning: this may result in Cloud CDN caching private, per-user (user identifiable) content. CACHE_ALL_STATIC Automatically cache static content, including common image formats, media (video and audio), and web assets (JavaScript and CSS). Requests and responses that are marked as uncacheable, as well as dynamic content (including HTML), will not be cached. If no value is provided for cdnPolicy.cacheMode, it defaults to CACHE_ALL_STATIC.", "enum": [ "CACHE_ALL_STATIC", "FORCE_CACHE_ALL", @@ -40317,6 +40872,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -40353,6 +40909,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -40377,6 +40934,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -40797,6 +41355,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -40833,6 +41392,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -40857,6 +41417,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -40910,7 +41471,7 @@ "description": "The CacheKeyPolicy for this CdnPolicy." }, "cacheMode": { - "description": "Specifies the cache setting for all responses from this backend. The possible values are: USE_ORIGIN_HEADERS Requires the origin to set valid caching headers to cache content. Responses without these headers will not be cached at Google's edge, and will require a full trip to the origin on every request, potentially impacting performance and increasing load on the origin server. FORCE_CACHE_ALL Cache all content, ignoring any \"private\", \"no-store\" or \"no-cache\" directives in Cache-Control response headers. Warning: this may result in Cloud CDN caching private, per-user (user identifiable) content. CACHE_ALL_STATIC Automatically cache static content, including common image formats, media (video and audio), and web assets (JavaScript and CSS). Requests and responses that are marked as uncacheable, as well as dynamic content (including HTML), will not be cached.", + "description": "Specifies the cache setting for all responses from this backend. The possible values are: USE_ORIGIN_HEADERS Requires the origin to set valid caching headers to cache content. Responses without these headers will not be cached at Google's edge, and will require a full trip to the origin on every request, potentially impacting performance and increasing load on the origin server. FORCE_CACHE_ALL Cache all content, ignoring any \"private\", \"no-store\" or \"no-cache\" directives in Cache-Control response headers. Warning: this may result in Cloud CDN caching private, per-user (user identifiable) content. CACHE_ALL_STATIC Automatically cache static content, including common image formats, media (video and audio), and web assets (JavaScript and CSS). Requests and responses that are marked as uncacheable, as well as dynamic content (including HTML), will not be cached. If no value is provided for cdnPolicy.cacheMode, it defaults to CACHE_ALL_STATIC.", "enum": [ "CACHE_ALL_STATIC", "FORCE_CACHE_ALL", @@ -41189,6 +41750,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -41225,6 +41787,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -41249,6 +41812,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -41341,6 +41905,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -41377,6 +41942,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -41401,6 +41967,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -41590,6 +42157,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -41626,6 +42194,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -41650,6 +42219,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -41934,10 +42504,6 @@ "description": "Associates `members`, or principals, with a `role`.", "id": "Binding", "properties": { - "bindingId": { - "description": "This is deprecated and has no effect. Do not use.", - "type": "string" - }, "condition": { "$ref": "Expr", "description": "The condition that is associated with this binding. If the condition evaluates to `true`, then this binding applies to the current request. If the condition evaluates to `false`, then this binding does not apply to the current request. However, a different role binding might grant the same role to one or more of the principals in this binding. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies)." @@ -42310,6 +42876,7 @@ "COMPUTE_OPTIMIZED_H3", "GENERAL_PURPOSE", "GENERAL_PURPOSE_C4", + "GENERAL_PURPOSE_C4A", "GENERAL_PURPOSE_E2", "GENERAL_PURPOSE_N2", "GENERAL_PURPOSE_N2D", @@ -42318,6 +42885,9 @@ "GRAPHICS_OPTIMIZED", "MEMORY_OPTIMIZED", "MEMORY_OPTIMIZED_M3", + "MEMORY_OPTIMIZED_X4_16TB", + "MEMORY_OPTIMIZED_X4_24TB", + "MEMORY_OPTIMIZED_X4_32TB", "STORAGE_OPTIMIZED_Z3", "TYPE_UNSPECIFIED" ], @@ -42341,6 +42911,10 @@ "", "", "", + "", + "", + "", + "", "" ], "type": "string" @@ -42410,6 +42984,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -42446,6 +43021,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -42470,6 +43046,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -42562,6 +43139,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -42598,6 +43176,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -42622,6 +43201,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -42707,6 +43287,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -42743,6 +43324,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -42767,6 +43349,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -42804,86 +43387,6 @@ }, "type": "object" }, - "Condition": { - "description": "This is deprecated and has no effect. Do not use.", - "id": "Condition", - "properties": { - "iam": { - "description": "This is deprecated and has no effect. Do not use.", - "enum": [ - "APPROVER", - "ATTRIBUTION", - "AUTHORITY", - "CREDENTIALS_TYPE", - "CREDS_ASSERTION", - "JUSTIFICATION_TYPE", - "NO_ATTR", - "SECURITY_REALM" - ], - "enumDescriptions": [ - "This is deprecated and has no effect. Do not use.", - "This is deprecated and has no effect. Do not use.", - "This is deprecated and has no effect. Do not use.", - "This is deprecated and has no effect. Do not use.", - "This is deprecated and has no effect. Do not use.", - "This is deprecated and has no effect. Do not use.", - "This is deprecated and has no effect. Do not use.", - "This is deprecated and has no effect. Do not use." - ], - "type": "string" - }, - "op": { - "description": "This is deprecated and has no effect. Do not use.", - "enum": [ - "DISCHARGED", - "EQUALS", - "IN", - "NOT_EQUALS", - "NOT_IN", - "NO_OP" - ], - "enumDescriptions": [ - "This is deprecated and has no effect. Do not use.", - "This is deprecated and has no effect. Do not use.", - "This is deprecated and has no effect. Do not use.", - "This is deprecated and has no effect. Do not use.", - "This is deprecated and has no effect. Do not use.", - "This is deprecated and has no effect. Do not use." - ], - "type": "string" - }, - "svc": { - "description": "This is deprecated and has no effect. Do not use.", - "type": "string" - }, - "sys": { - "description": "This is deprecated and has no effect. Do not use.", - "enum": [ - "IP", - "NAME", - "NO_ATTR", - "REGION", - "SERVICE" - ], - "enumDescriptions": [ - "This is deprecated and has no effect. Do not use.", - "This is deprecated and has no effect. Do not use.", - "This is deprecated and has no effect. Do not use.", - "This is deprecated and has no effect. Do not use.", - "This is deprecated and has no effect. Do not use." - ], - "type": "string" - }, - "values": { - "description": "This is deprecated and has no effect. Do not use.", - "items": { - "type": "string" - }, - "type": "array" - } - }, - "type": "object" - }, "ConfidentialInstanceConfig": { "description": "A set of Confidential Instance options.", "id": "ConfidentialInstanceConfig", @@ -43481,6 +43984,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -43517,6 +44021,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -43541,6 +44046,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -43705,6 +44211,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -43741,6 +44248,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -43765,6 +44273,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -43988,6 +44497,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -44024,6 +44534,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -44048,6 +44559,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -44140,6 +44652,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -44176,6 +44689,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -44200,6 +44714,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -44274,6 +44789,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -44310,6 +44826,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -44334,6 +44851,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -44445,6 +44963,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -44481,6 +45000,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -44505,6 +45025,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -44748,6 +45269,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -44784,6 +45306,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -44808,6 +45331,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -45018,6 +45542,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -45054,6 +45579,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -45078,6 +45604,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -45346,6 +45873,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -45382,6 +45910,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -45406,6 +45935,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -45641,6 +46171,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -45677,6 +46208,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -45701,6 +46233,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -46280,6 +46813,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -46316,6 +46850,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -46340,6 +46875,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -46432,6 +46968,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -46468,6 +47005,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -46492,6 +47030,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -46594,6 +47133,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -46630,6 +47170,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -46654,6 +47195,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -47254,6 +47796,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -47290,6 +47833,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -47314,6 +47858,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -47512,6 +48057,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -47548,6 +48094,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -47572,6 +48119,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -47671,6 +48219,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -47707,6 +48256,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -47731,6 +48281,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -47805,6 +48356,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -47841,6 +48393,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -47865,6 +48418,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -48349,6 +48903,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -48385,6 +48940,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -48409,6 +48965,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -48608,15 +49165,15 @@ }, "routeAction": { "$ref": "HttpRouteAction", - "description": "In response to a matching matchRule, the load balancer performs advanced routing actions, such as URL rewrites and header transformations, before forwarding the request to the selected backend. If routeAction specifies any weightedBackendServices, service must not be set. Conversely if service is set, routeAction cannot contain any weightedBackendServices. Only one of urlRedirect, service or routeAction.weightedBackendService must be set. URL maps for classic Application Load Balancers only support the urlRewrite action within a route rule's routeAction." + "description": "In response to a matching matchRule, the load balancer performs advanced routing actions, such as URL rewrites and header transformations, before forwarding the request to the selected backend. Only one of urlRedirect, service or routeAction.weightedBackendService can be set. URL maps for classic Application Load Balancers only support the urlRewrite action within a route rule's routeAction." }, "service": { - "description": "The full or partial URL of the backend service resource to which traffic is directed if this rule is matched. If routeAction is also specified, advanced routing actions, such as URL rewrites, take effect before sending the request to the backend. However, if service is specified, routeAction cannot contain any weightedBackendServices. Conversely, if routeAction specifies any weightedBackendServices, service must not be specified. Only one of urlRedirect, service or routeAction.weightedBackendService must be set.", + "description": "The full or partial URL of the backend service resource to which traffic is directed if this rule is matched. If routeAction is also specified, advanced routing actions, such as URL rewrites, take effect before sending the request to the backend. Only one of urlRedirect, service or routeAction.weightedBackendService can be set.", "type": "string" }, "urlRedirect": { "$ref": "HttpRedirectAction", - "description": "When this rule is matched, the request is redirected to a URL specified by urlRedirect. If urlRedirect is specified, service or routeAction must not be set. Not supported when the URL map is bound to a target gRPC proxy." + "description": "When this rule is matched, the request is redirected to a URL specified by urlRedirect. Only one of urlRedirect, service or routeAction.weightedBackendService can be set. Not supported when the URL map is bound to a target gRPC proxy." } }, "type": "object" @@ -48791,6 +49348,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -48827,6 +49385,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -48851,6 +49410,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -49175,6 +49735,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -49211,6 +49772,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -49235,6 +49797,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -49625,6 +50188,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -49661,6 +50225,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -49685,6 +50250,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -49896,6 +50462,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -49932,6 +50499,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -49956,6 +50524,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -50048,6 +50617,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -50084,6 +50654,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -50108,6 +50679,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -50190,6 +50762,10 @@ "format": "uint64", "type": "string" }, + "instanceFlexibilityPolicy": { + "$ref": "InstanceGroupManagerInstanceFlexibilityPolicy", + "description": "Instance flexibility allowing MIG to create VMs from multiple types of machines. Instance flexibility configuration on MIG overrides instance template configuration." + }, "instanceGroup": { "description": "[Output Only] The URL of the Instance Group resource.", "type": "string" @@ -50231,7 +50807,7 @@ "type": "string" }, "namedPorts": { - "description": "Named ports configured for the Instance Groups complementary to this Instance Group Manager.", + "description": "[Output Only] Named ports configured on the Instance Groups complementary to this Instance Group Manager.", "items": { "$ref": "NamedPort" }, @@ -50253,6 +50829,10 @@ "description": "[Output Only] The URL for this managed instance group. The server defines this URL.", "type": "string" }, + "standbyPolicy": { + "$ref": "InstanceGroupManagerStandbyPolicy", + "description": "Standby policy for stopped and suspended instances." + }, "statefulPolicy": { "$ref": "StatefulPolicy", "description": "Stateful configuration for this Instanced Group Manager" @@ -50279,6 +50859,16 @@ "format": "int32", "type": "integer" }, + "targetStoppedSize": { + "description": "The target number of stopped instances for this managed instance group. This number changes when you: - Stop instance using the stopInstances method or start instances using the startInstances method. - Manually change the targetStoppedSize using the update method. ", + "format": "int32", + "type": "integer" + }, + "targetSuspendedSize": { + "description": "The target number of suspended instances for this managed instance group. This number changes when you: - Suspend instance using the suspendInstances method or resume instances using the resumeInstances method. - Manually change the targetSuspendedSize using the update method. ", + "format": "int32", + "type": "integer" + }, "updatePolicy": { "$ref": "InstanceGroupManagerUpdatePolicy", "description": "The update policy for this managed instance group." @@ -50430,6 +51020,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -50466,6 +51057,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -50490,6 +51082,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -50552,6 +51145,37 @@ }, "type": "object" }, + "InstanceGroupManagerInstanceFlexibilityPolicy": { + "id": "InstanceGroupManagerInstanceFlexibilityPolicy", + "properties": { + "instanceSelections": { + "additionalProperties": { + "$ref": "InstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection" + }, + "description": "Named instance selections configuring properties that the group will use when creating new VMs.", + "type": "object" + } + }, + "type": "object" + }, + "InstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection": { + "id": "InstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection", + "properties": { + "machineTypes": { + "description": "Full machine-type names, e.g. \"n1-standard-16\".", + "items": { + "type": "string" + }, + "type": "array" + }, + "rank": { + "description": "Preference of this instance selection. Lower number means higher preference. MIG will first try to create a VM based on the machine-type with lowest rank and fallback to next rank based on availability. Machine types and instance selections with the same rank have the same preference.", + "format": "int32", + "type": "integer" + } + }, + "type": "object" + }, "InstanceGroupManagerInstanceLifecyclePolicy": { "id": "InstanceGroupManagerInstanceLifecyclePolicy", "properties": { @@ -50637,6 +51261,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -50673,6 +51298,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -50697,6 +51323,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -50981,6 +51608,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -51017,6 +51645,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -51041,6 +51670,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -51078,6 +51708,29 @@ }, "type": "object" }, + "InstanceGroupManagerStandbyPolicy": { + "id": "InstanceGroupManagerStandbyPolicy", + "properties": { + "initialDelaySec": { + "description": "Specifies the number of seconds that the MIG should wait to suspend or stop a VM after that VM was created. The initial delay gives the initialization script the time to prepare your VM for a quick scale out. The value of initial delay must be between 0 and 3600 seconds. The default value is 0.", + "format": "int32", + "type": "integer" + }, + "mode": { + "description": "Defines how a MIG resumes or starts VMs from a standby pool when the group scales out. The default mode is `MANUAL`.", + "enum": [ + "MANUAL", + "SCALE_OUT_POOL" + ], + "enumDescriptions": [ + "MIG does not automatically resume or start VMs in the standby pool when the group scales out.", + "MIG automatically resumes or starts VMs in the standby pool when the group scales out, and replenishes the standby pool afterwards." + ], + "type": "string" + } + }, + "type": "object" + }, "InstanceGroupManagerStatus": { "id": "InstanceGroupManagerStatus", "properties": { @@ -51435,6 +52088,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -51471,6 +52125,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -51495,6 +52150,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -51559,6 +52215,19 @@ }, "type": "object" }, + "InstanceGroupManagersResumeInstancesRequest": { + "id": "InstanceGroupManagersResumeInstancesRequest", + "properties": { + "instances": { + "description": "The URLs of one or more instances to resume. This can be a full URL or a partial URL, such as zones/[ZONE]/instances/[INSTANCE_NAME].", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, "InstanceGroupManagersScopedList": { "id": "InstanceGroupManagersScopedList", "properties": { @@ -51596,6 +52265,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -51632,6 +52302,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -51656,6 +52327,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -51721,6 +52393,53 @@ }, "type": "object" }, + "InstanceGroupManagersStartInstancesRequest": { + "id": "InstanceGroupManagersStartInstancesRequest", + "properties": { + "instances": { + "description": "The URLs of one or more instances to start. This can be a full URL or a partial URL, such as zones/[ZONE]/instances/[INSTANCE_NAME].", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "InstanceGroupManagersStopInstancesRequest": { + "id": "InstanceGroupManagersStopInstancesRequest", + "properties": { + "forceStop": { + "description": "If this flag is set to true, the Instance Group Manager will proceed to stop the instances, skipping initialization on them.", + "type": "boolean" + }, + "instances": { + "description": "The URLs of one or more instances to stop. This can be a full URL or a partial URL, such as zones/[ZONE]/instances/[INSTANCE_NAME].", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "InstanceGroupManagersSuspendInstancesRequest": { + "id": "InstanceGroupManagersSuspendInstancesRequest", + "properties": { + "forceSuspend": { + "description": "If this flag is set to true, the Instance Group Manager will proceed to suspend the instances, skipping initialization on them.", + "type": "boolean" + }, + "instances": { + "description": "The URLs of one or more instances to suspend. This can be a full URL or a partial URL, such as zones/[ZONE]/instances/[INSTANCE_NAME].", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, "InstanceGroupManagersUpdatePerInstanceConfigsReq": { "description": "InstanceGroupManagers.updatePerInstanceConfigs", "id": "InstanceGroupManagersUpdatePerInstanceConfigsReq", @@ -51802,6 +52521,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -51838,6 +52558,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -51862,6 +52583,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -51967,6 +52689,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -52003,6 +52726,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -52027,6 +52751,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -52137,6 +52862,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -52173,6 +52899,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -52197,6 +52924,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -52289,6 +53017,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -52325,6 +53054,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -52349,6 +53079,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -52815,6 +53546,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -52851,6 +53583,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -52875,6 +53608,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -52967,6 +53701,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -53003,6 +53738,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -53027,6 +53763,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -53101,6 +53838,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -53137,6 +53875,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -53161,6 +53900,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -53390,6 +54130,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -53426,6 +54167,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -53450,6 +54192,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -53776,6 +54519,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -53812,6 +54556,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -53836,6 +54581,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -53928,6 +54674,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -53964,150 +54711,6 @@ false, false, false, - false - ], - "enumDescriptions": [ - "Warning about failed cleanup of transient changes made by a failed operation.", - "A link to a deprecated resource was created.", - "When deploying and at least one of the resources has a type marked as deprecated", - "The user created a boot disk that is larger than image size.", - "When deploying and at least one of the resources has a type marked as experimental", - "Warning that is present in an external api call", - "Warning that value of a field has been overridden. Deprecated unused field.", - "The operation involved use of an injected kernel, which is deprecated.", - "A WEIGHTED_MAGLEV backend service is associated with a health check that is not of type HTTP/HTTPS/HTTP2.", - "When deploying a deployment with a exceedingly large number of resources", - "Resource can't be retrieved due to list overhead quota exceed which captures the amount of resources filtered out by user-defined list filter.", - "A resource depends on a missing type", - "The route's nextHopIp address is not assigned to an instance on the network.", - "The route's next hop instance cannot ip forward.", - "The route's nextHopInstance URL refers to an instance that does not have an ipv6 interface on the same network as the route.", - "The route's nextHopInstance URL refers to an instance that does not exist.", - "The route's nextHopInstance URL refers to an instance that is not on the same network as the route.", - "The route's next hop instance does not have a status of RUNNING.", - "Error which is not critical. We decided to continue the process despite the mentioned error.", - "No results are present on a particular list page.", - "Success is reported, but some results may be missing due to errors", - "The user attempted to use a resource that requires a TOS they have not accepted.", - "Warning that a resource is in use.", - "One or more of the resources set to auto-delete could not be deleted because they were in use.", - "When a resource schema validation is ignored.", - "Instance template used in instance group manager is valid as such, but its application does not make a lot of sense, because it allows only single instance in instance group.", - "When undeclared properties in the schema are present", - "A given scope cannot be reached." - ], - "type": "string" - }, - "data": { - "description": "[Output Only] Metadata about this warning in key: value format. For example: \"data\": [ { \"key\": \"scope\", \"value\": \"zones/us-east1-d\" } ", - "items": { - "properties": { - "key": { - "description": "[Output Only] A key that provides more detail on the warning being returned. For example, for warnings where there are no results in a list request for a particular zone, this key might be scope and the key value might be the zone name. Other examples might be a key indicating a deprecated resource and a suggested replacement, or a warning about invalid network settings (for example, if an instance attempts to perform IP forwarding but is not enabled for IP forwarding).", - "type": "string" - }, - "value": { - "description": "[Output Only] A warning data value corresponding to the key.", - "type": "string" - } - }, - "type": "object" - }, - "type": "array" - }, - "message": { - "description": "[Output Only] A human-readable description of the warning code.", - "type": "string" - } - }, - "type": "object" - } - }, - "type": "object" - }, - "InstantSnapshotResourceStatus": { - "id": "InstantSnapshotResourceStatus", - "properties": { - "storageSizeBytes": { - "description": "[Output Only] The storage size of this instant snapshot.", - "format": "int64", - "type": "string" - } - }, - "type": "object" - }, - "InstantSnapshotsScopedList": { - "id": "InstantSnapshotsScopedList", - "properties": { - "instantSnapshots": { - "description": "[Output Only] A list of instantSnapshots contained in this scope.", - "items": { - "$ref": "InstantSnapshot" - }, - "type": "array" - }, - "warning": { - "description": "[Output Only] Informational warning which replaces the list of instantSnapshots when the list is empty.", - "properties": { - "code": { - "description": "[Output Only] A warning code, if applicable. For example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no results in the response.", - "enum": [ - "CLEANUP_FAILED", - "DEPRECATED_RESOURCE_USED", - "DEPRECATED_TYPE_USED", - "DISK_SIZE_LARGER_THAN_IMAGE_SIZE", - "EXPERIMENTAL_TYPE_USED", - "EXTERNAL_API_WARNING", - "FIELD_VALUE_OVERRIDEN", - "INJECTED_KERNELS_DEPRECATED", - "INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB", - "LARGE_DEPLOYMENT_WARNING", - "LIST_OVERHEAD_QUOTA_EXCEED", - "MISSING_TYPE_DEPENDENCY", - "NEXT_HOP_ADDRESS_NOT_ASSIGNED", - "NEXT_HOP_CANNOT_IP_FORWARD", - "NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE", - "NEXT_HOP_INSTANCE_NOT_FOUND", - "NEXT_HOP_INSTANCE_NOT_ON_NETWORK", - "NEXT_HOP_NOT_RUNNING", - "NOT_CRITICAL_ERROR", - "NO_RESULTS_ON_PAGE", - "PARTIAL_SUCCESS", - "REQUIRED_TOS_AGREEMENT", - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", - "RESOURCE_NOT_DELETED", - "SCHEMA_VALIDATION_IGNORED", - "SINGLE_INSTANCE_PROPERTY_TEMPLATE", - "UNDECLARED_PROPERTIES", - "UNREACHABLE" - ], - "enumDeprecated": [ - false, - false, - false, - false, - false, - false, - true, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, false, false ], @@ -54133,6 +54736,155 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", + "The user attempted to use a resource that requires a TOS they have not accepted.", + "Warning that a resource is in use.", + "One or more of the resources set to auto-delete could not be deleted because they were in use.", + "When a resource schema validation is ignored.", + "Instance template used in instance group manager is valid as such, but its application does not make a lot of sense, because it allows only single instance in instance group.", + "When undeclared properties in the schema are present", + "A given scope cannot be reached." + ], + "type": "string" + }, + "data": { + "description": "[Output Only] Metadata about this warning in key: value format. For example: \"data\": [ { \"key\": \"scope\", \"value\": \"zones/us-east1-d\" } ", + "items": { + "properties": { + "key": { + "description": "[Output Only] A key that provides more detail on the warning being returned. For example, for warnings where there are no results in a list request for a particular zone, this key might be scope and the key value might be the zone name. Other examples might be a key indicating a deprecated resource and a suggested replacement, or a warning about invalid network settings (for example, if an instance attempts to perform IP forwarding but is not enabled for IP forwarding).", + "type": "string" + }, + "value": { + "description": "[Output Only] A warning data value corresponding to the key.", + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + }, + "message": { + "description": "[Output Only] A human-readable description of the warning code.", + "type": "string" + } + }, + "type": "object" + } + }, + "type": "object" + }, + "InstantSnapshotResourceStatus": { + "id": "InstantSnapshotResourceStatus", + "properties": { + "storageSizeBytes": { + "description": "[Output Only] The storage size of this instant snapshot.", + "format": "int64", + "type": "string" + } + }, + "type": "object" + }, + "InstantSnapshotsScopedList": { + "id": "InstantSnapshotsScopedList", + "properties": { + "instantSnapshots": { + "description": "[Output Only] A list of instantSnapshots contained in this scope.", + "items": { + "$ref": "InstantSnapshot" + }, + "type": "array" + }, + "warning": { + "description": "[Output Only] Informational warning which replaces the list of instantSnapshots when the list is empty.", + "properties": { + "code": { + "description": "[Output Only] A warning code, if applicable. For example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no results in the response.", + "enum": [ + "CLEANUP_FAILED", + "DEPRECATED_RESOURCE_USED", + "DEPRECATED_TYPE_USED", + "DISK_SIZE_LARGER_THAN_IMAGE_SIZE", + "EXPERIMENTAL_TYPE_USED", + "EXTERNAL_API_WARNING", + "FIELD_VALUE_OVERRIDEN", + "INJECTED_KERNELS_DEPRECATED", + "INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB", + "LARGE_DEPLOYMENT_WARNING", + "LIST_OVERHEAD_QUOTA_EXCEED", + "MISSING_TYPE_DEPENDENCY", + "NEXT_HOP_ADDRESS_NOT_ASSIGNED", + "NEXT_HOP_CANNOT_IP_FORWARD", + "NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE", + "NEXT_HOP_INSTANCE_NOT_FOUND", + "NEXT_HOP_INSTANCE_NOT_ON_NETWORK", + "NEXT_HOP_NOT_RUNNING", + "NOT_CRITICAL_ERROR", + "NO_RESULTS_ON_PAGE", + "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", + "REQUIRED_TOS_AGREEMENT", + "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", + "RESOURCE_NOT_DELETED", + "SCHEMA_VALIDATION_IGNORED", + "SINGLE_INSTANCE_PROPERTY_TEMPLATE", + "UNDECLARED_PROPERTIES", + "UNREACHABLE" + ], + "enumDeprecated": [ + false, + false, + false, + false, + false, + false, + true, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false + ], + "enumDescriptions": [ + "Warning about failed cleanup of transient changes made by a failed operation.", + "A link to a deprecated resource was created.", + "When deploying and at least one of the resources has a type marked as deprecated", + "The user created a boot disk that is larger than image size.", + "When deploying and at least one of the resources has a type marked as experimental", + "Warning that is present in an external api call", + "Warning that value of a field has been overridden. Deprecated unused field.", + "The operation involved use of an injected kernel, which is deprecated.", + "A WEIGHTED_MAGLEV backend service is associated with a health check that is not of type HTTP/HTTPS/HTTP2.", + "When deploying a deployment with a exceedingly large number of resources", + "Resource can't be retrieved due to list overhead quota exceed which captures the amount of resources filtered out by user-defined list filter.", + "A resource depends on a missing type", + "The route's nextHopIp address is not assigned to an instance on the network.", + "The route's next hop instance cannot ip forward.", + "The route's nextHopInstance URL refers to an instance that does not have an ipv6 interface on the same network as the route.", + "The route's nextHopInstance URL refers to an instance that does not exist.", + "The route's nextHopInstance URL refers to an instance that is not on the same network as the route.", + "The route's next hop instance does not have a status of RUNNING.", + "Error which is not critical. We decided to continue the process despite the mentioned error.", + "No results are present on a particular list page.", + "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -54730,6 +55482,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -54766,6 +55519,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -54790,6 +55544,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -54923,6 +55678,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -54959,170 +55715,6 @@ false, false, false, - false - ], - "enumDescriptions": [ - "Warning about failed cleanup of transient changes made by a failed operation.", - "A link to a deprecated resource was created.", - "When deploying and at least one of the resources has a type marked as deprecated", - "The user created a boot disk that is larger than image size.", - "When deploying and at least one of the resources has a type marked as experimental", - "Warning that is present in an external api call", - "Warning that value of a field has been overridden. Deprecated unused field.", - "The operation involved use of an injected kernel, which is deprecated.", - "A WEIGHTED_MAGLEV backend service is associated with a health check that is not of type HTTP/HTTPS/HTTP2.", - "When deploying a deployment with a exceedingly large number of resources", - "Resource can't be retrieved due to list overhead quota exceed which captures the amount of resources filtered out by user-defined list filter.", - "A resource depends on a missing type", - "The route's nextHopIp address is not assigned to an instance on the network.", - "The route's next hop instance cannot ip forward.", - "The route's nextHopInstance URL refers to an instance that does not have an ipv6 interface on the same network as the route.", - "The route's nextHopInstance URL refers to an instance that does not exist.", - "The route's nextHopInstance URL refers to an instance that is not on the same network as the route.", - "The route's next hop instance does not have a status of RUNNING.", - "Error which is not critical. We decided to continue the process despite the mentioned error.", - "No results are present on a particular list page.", - "Success is reported, but some results may be missing due to errors", - "The user attempted to use a resource that requires a TOS they have not accepted.", - "Warning that a resource is in use.", - "One or more of the resources set to auto-delete could not be deleted because they were in use.", - "When a resource schema validation is ignored.", - "Instance template used in instance group manager is valid as such, but its application does not make a lot of sense, because it allows only single instance in instance group.", - "When undeclared properties in the schema are present", - "A given scope cannot be reached." - ], - "type": "string" - }, - "data": { - "description": "[Output Only] Metadata about this warning in key: value format. For example: \"data\": [ { \"key\": \"scope\", \"value\": \"zones/us-east1-d\" } ", - "items": { - "properties": { - "key": { - "description": "[Output Only] A key that provides more detail on the warning being returned. For example, for warnings where there are no results in a list request for a particular zone, this key might be scope and the key value might be the zone name. Other examples might be a key indicating a deprecated resource and a suggested replacement, or a warning about invalid network settings (for example, if an instance attempts to perform IP forwarding but is not enabled for IP forwarding).", - "type": "string" - }, - "value": { - "description": "[Output Only] A warning data value corresponding to the key.", - "type": "string" - } - }, - "type": "object" - }, - "type": "array" - }, - "message": { - "description": "[Output Only] A human-readable description of the warning code.", - "type": "string" - } - }, - "type": "object" - } - }, - "type": "object" - }, - "InterconnectAttachmentPartnerMetadata": { - "description": "Informational metadata about Partner attachments from Partners to display to customers. These fields are propagated from PARTNER_PROVIDER attachments to their corresponding PARTNER attachments.", - "id": "InterconnectAttachmentPartnerMetadata", - "properties": { - "interconnectName": { - "description": "Plain text name of the Interconnect this attachment is connected to, as displayed in the Partner's portal. For instance \"Chicago 1\". This value may be validated to match approved Partner values.", - "type": "string" - }, - "partnerName": { - "description": "Plain text name of the Partner providing this attachment. This value may be validated to match approved Partner values.", - "type": "string" - }, - "portalUrl": { - "description": "URL of the Partner's portal for this Attachment. Partners may customise this to be a deep link to the specific resource on the Partner portal. This value may be validated to match approved Partner values.", - "type": "string" - } - }, - "type": "object" - }, - "InterconnectAttachmentPrivateInfo": { - "description": "Information for an interconnect attachment when this belongs to an interconnect of type DEDICATED.", - "id": "InterconnectAttachmentPrivateInfo", - "properties": { - "tag8021q": { - "description": "[Output Only] 802.1q encapsulation tag to be used for traffic between Google and the customer, going to and from this network and region.", - "format": "uint32", - "type": "integer" - } - }, - "type": "object" - }, - "InterconnectAttachmentsScopedList": { - "id": "InterconnectAttachmentsScopedList", - "properties": { - "interconnectAttachments": { - "description": "A list of interconnect attachments contained in this scope.", - "items": { - "$ref": "InterconnectAttachment" - }, - "type": "array" - }, - "warning": { - "description": "Informational warning which replaces the list of addresses when the list is empty.", - "properties": { - "code": { - "description": "[Output Only] A warning code, if applicable. For example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no results in the response.", - "enum": [ - "CLEANUP_FAILED", - "DEPRECATED_RESOURCE_USED", - "DEPRECATED_TYPE_USED", - "DISK_SIZE_LARGER_THAN_IMAGE_SIZE", - "EXPERIMENTAL_TYPE_USED", - "EXTERNAL_API_WARNING", - "FIELD_VALUE_OVERRIDEN", - "INJECTED_KERNELS_DEPRECATED", - "INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB", - "LARGE_DEPLOYMENT_WARNING", - "LIST_OVERHEAD_QUOTA_EXCEED", - "MISSING_TYPE_DEPENDENCY", - "NEXT_HOP_ADDRESS_NOT_ASSIGNED", - "NEXT_HOP_CANNOT_IP_FORWARD", - "NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE", - "NEXT_HOP_INSTANCE_NOT_FOUND", - "NEXT_HOP_INSTANCE_NOT_ON_NETWORK", - "NEXT_HOP_NOT_RUNNING", - "NOT_CRITICAL_ERROR", - "NO_RESULTS_ON_PAGE", - "PARTIAL_SUCCESS", - "REQUIRED_TOS_AGREEMENT", - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", - "RESOURCE_NOT_DELETED", - "SCHEMA_VALIDATION_IGNORED", - "SINGLE_INSTANCE_PROPERTY_TEMPLATE", - "UNDECLARED_PROPERTIES", - "UNREACHABLE" - ], - "enumDeprecated": [ - false, - false, - false, - false, - false, - false, - true, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, false, false ], @@ -55148,6 +55740,175 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", + "The user attempted to use a resource that requires a TOS they have not accepted.", + "Warning that a resource is in use.", + "One or more of the resources set to auto-delete could not be deleted because they were in use.", + "When a resource schema validation is ignored.", + "Instance template used in instance group manager is valid as such, but its application does not make a lot of sense, because it allows only single instance in instance group.", + "When undeclared properties in the schema are present", + "A given scope cannot be reached." + ], + "type": "string" + }, + "data": { + "description": "[Output Only] Metadata about this warning in key: value format. For example: \"data\": [ { \"key\": \"scope\", \"value\": \"zones/us-east1-d\" } ", + "items": { + "properties": { + "key": { + "description": "[Output Only] A key that provides more detail on the warning being returned. For example, for warnings where there are no results in a list request for a particular zone, this key might be scope and the key value might be the zone name. Other examples might be a key indicating a deprecated resource and a suggested replacement, or a warning about invalid network settings (for example, if an instance attempts to perform IP forwarding but is not enabled for IP forwarding).", + "type": "string" + }, + "value": { + "description": "[Output Only] A warning data value corresponding to the key.", + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + }, + "message": { + "description": "[Output Only] A human-readable description of the warning code.", + "type": "string" + } + }, + "type": "object" + } + }, + "type": "object" + }, + "InterconnectAttachmentPartnerMetadata": { + "description": "Informational metadata about Partner attachments from Partners to display to customers. These fields are propagated from PARTNER_PROVIDER attachments to their corresponding PARTNER attachments.", + "id": "InterconnectAttachmentPartnerMetadata", + "properties": { + "interconnectName": { + "description": "Plain text name of the Interconnect this attachment is connected to, as displayed in the Partner's portal. For instance \"Chicago 1\". This value may be validated to match approved Partner values.", + "type": "string" + }, + "partnerName": { + "description": "Plain text name of the Partner providing this attachment. This value may be validated to match approved Partner values.", + "type": "string" + }, + "portalUrl": { + "description": "URL of the Partner's portal for this Attachment. Partners may customise this to be a deep link to the specific resource on the Partner portal. This value may be validated to match approved Partner values.", + "type": "string" + } + }, + "type": "object" + }, + "InterconnectAttachmentPrivateInfo": { + "description": "Information for an interconnect attachment when this belongs to an interconnect of type DEDICATED.", + "id": "InterconnectAttachmentPrivateInfo", + "properties": { + "tag8021q": { + "description": "[Output Only] 802.1q encapsulation tag to be used for traffic between Google and the customer, going to and from this network and region.", + "format": "uint32", + "type": "integer" + } + }, + "type": "object" + }, + "InterconnectAttachmentsScopedList": { + "id": "InterconnectAttachmentsScopedList", + "properties": { + "interconnectAttachments": { + "description": "A list of interconnect attachments contained in this scope.", + "items": { + "$ref": "InterconnectAttachment" + }, + "type": "array" + }, + "warning": { + "description": "Informational warning which replaces the list of addresses when the list is empty.", + "properties": { + "code": { + "description": "[Output Only] A warning code, if applicable. For example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no results in the response.", + "enum": [ + "CLEANUP_FAILED", + "DEPRECATED_RESOURCE_USED", + "DEPRECATED_TYPE_USED", + "DISK_SIZE_LARGER_THAN_IMAGE_SIZE", + "EXPERIMENTAL_TYPE_USED", + "EXTERNAL_API_WARNING", + "FIELD_VALUE_OVERRIDEN", + "INJECTED_KERNELS_DEPRECATED", + "INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB", + "LARGE_DEPLOYMENT_WARNING", + "LIST_OVERHEAD_QUOTA_EXCEED", + "MISSING_TYPE_DEPENDENCY", + "NEXT_HOP_ADDRESS_NOT_ASSIGNED", + "NEXT_HOP_CANNOT_IP_FORWARD", + "NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE", + "NEXT_HOP_INSTANCE_NOT_FOUND", + "NEXT_HOP_INSTANCE_NOT_ON_NETWORK", + "NEXT_HOP_NOT_RUNNING", + "NOT_CRITICAL_ERROR", + "NO_RESULTS_ON_PAGE", + "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", + "REQUIRED_TOS_AGREEMENT", + "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", + "RESOURCE_NOT_DELETED", + "SCHEMA_VALIDATION_IGNORED", + "SINGLE_INSTANCE_PROPERTY_TEMPLATE", + "UNDECLARED_PROPERTIES", + "UNREACHABLE" + ], + "enumDeprecated": [ + false, + false, + false, + false, + false, + false, + true, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false + ], + "enumDescriptions": [ + "Warning about failed cleanup of transient changes made by a failed operation.", + "A link to a deprecated resource was created.", + "When deploying and at least one of the resources has a type marked as deprecated", + "The user created a boot disk that is larger than image size.", + "When deploying and at least one of the resources has a type marked as experimental", + "Warning that is present in an external api call", + "Warning that value of a field has been overridden. Deprecated unused field.", + "The operation involved use of an injected kernel, which is deprecated.", + "A WEIGHTED_MAGLEV backend service is associated with a health check that is not of type HTTP/HTTPS/HTTP2.", + "When deploying a deployment with a exceedingly large number of resources", + "Resource can't be retrieved due to list overhead quota exceed which captures the amount of resources filtered out by user-defined list filter.", + "A resource depends on a missing type", + "The route's nextHopIp address is not assigned to an instance on the network.", + "The route's next hop instance cannot ip forward.", + "The route's nextHopInstance URL refers to an instance that does not have an ipv6 interface on the same network as the route.", + "The route's nextHopInstance URL refers to an instance that does not exist.", + "The route's nextHopInstance URL refers to an instance that is not on the same network as the route.", + "The route's next hop instance does not have a status of RUNNING.", + "Error which is not critical. We decided to continue the process despite the mentioned error.", + "No results are present on a particular list page.", + "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -55441,6 +56202,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -55477,6 +56239,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -55501,6 +56264,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -55551,7 +56315,7 @@ "type": "string" }, "availableFeatures": { - "description": "[Output only] List of features available at this InterconnectLocation, which can take one of the following values: - MACSEC ", + "description": "[Output only] List of features available at this InterconnectLocation, which can take one of the following values: - IF_MACSEC ", "items": { "enum": [ "IF_MACSEC" @@ -55729,6 +56493,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -55765,6 +56530,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -55789,6 +56555,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -56233,6 +57000,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -56269,6 +57037,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -56293,6 +57062,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -56406,7 +57176,8 @@ "type": "string" }, "resourceRequirements": { - "$ref": "LicenseResourceRequirements" + "$ref": "LicenseResourceRequirements", + "description": "[Input Only] Deprecated." }, "selfLink": { "description": "[Output Only] Server-defined URL for the resource.", @@ -56525,12 +57296,12 @@ "id": "LicenseResourceRequirements", "properties": { "minGuestCpuCount": { - "description": "Minimum number of guest cpus required to use the Instance. Enforced at Instance creation and Instance start.", + "description": "[Input Only] Deprecated. This field no longer reflects the minimum number of guest cpus required to use the Instance.", "format": "int32", "type": "integer" }, "minMemoryMb": { - "description": "Minimum memory required to use the Instance. Enforced at Instance creation and Instance start.", + "description": "[Input Only] Deprecated. This field no longer reflects the minimum memory required to use the Instance.", "format": "int32", "type": "integer" } @@ -56586,6 +57357,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -56622,6 +57394,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -56646,6 +57419,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -56782,102 +57556,6 @@ }, "type": "object" }, - "LogConfig": { - "description": "This is deprecated and has no effect. Do not use.", - "id": "LogConfig", - "properties": { - "cloudAudit": { - "$ref": "LogConfigCloudAuditOptions", - "description": "This is deprecated and has no effect. Do not use." - }, - "counter": { - "$ref": "LogConfigCounterOptions", - "description": "This is deprecated and has no effect. Do not use." - }, - "dataAccess": { - "$ref": "LogConfigDataAccessOptions", - "description": "This is deprecated and has no effect. Do not use." - } - }, - "type": "object" - }, - "LogConfigCloudAuditOptions": { - "description": "This is deprecated and has no effect. Do not use.", - "id": "LogConfigCloudAuditOptions", - "properties": { - "logName": { - "description": "This is deprecated and has no effect. Do not use.", - "enum": [ - "ADMIN_ACTIVITY", - "DATA_ACCESS", - "UNSPECIFIED_LOG_NAME" - ], - "enumDescriptions": [ - "This is deprecated and has no effect. Do not use.", - "This is deprecated and has no effect. Do not use.", - "This is deprecated and has no effect. Do not use." - ], - "type": "string" - } - }, - "type": "object" - }, - "LogConfigCounterOptions": { - "description": "This is deprecated and has no effect. Do not use.", - "id": "LogConfigCounterOptions", - "properties": { - "customFields": { - "description": "This is deprecated and has no effect. Do not use.", - "items": { - "$ref": "LogConfigCounterOptionsCustomField" - }, - "type": "array" - }, - "field": { - "description": "This is deprecated and has no effect. Do not use.", - "type": "string" - }, - "metric": { - "description": "This is deprecated and has no effect. Do not use.", - "type": "string" - } - }, - "type": "object" - }, - "LogConfigCounterOptionsCustomField": { - "description": "This is deprecated and has no effect. Do not use.", - "id": "LogConfigCounterOptionsCustomField", - "properties": { - "name": { - "description": "This is deprecated and has no effect. Do not use.", - "type": "string" - }, - "value": { - "description": "This is deprecated and has no effect. Do not use.", - "type": "string" - } - }, - "type": "object" - }, - "LogConfigDataAccessOptions": { - "description": "This is deprecated and has no effect. Do not use.", - "id": "LogConfigDataAccessOptions", - "properties": { - "logMode": { - "description": "This is deprecated and has no effect. Do not use.", - "enum": [ - "LOG_FAIL_CLOSED", - "LOG_MODE_UNSPECIFIED" - ], - "enumDescriptions": [ - "This is deprecated and has no effect. Do not use.", - "This is deprecated and has no effect. Do not use." - ], - "type": "string" - } - }, - "type": "object" - }, "MachineImage": { "description": "Represents a machine image resource. A machine image is a Compute Engine resource that stores all the configuration, metadata, permissions, and data from one or more disks required to create a Virtual machine (VM) instance. For more information, see Machine images.", "id": "MachineImage", @@ -57045,6 +57723,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -57081,6 +57760,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -57105,6 +57785,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -57321,6 +58002,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -57357,6 +58039,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -57381,6 +58064,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -57473,6 +58157,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -57509,6 +58194,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -57533,6 +58219,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -57607,6 +58294,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -57643,6 +58331,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -57667,6 +58356,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -57802,6 +58492,10 @@ "$ref": "PreservedState", "description": "[Output Only] Preserved state generated based on stateful policy for this instance." }, + "propertiesFromFlexibilityPolicy": { + "$ref": "ManagedInstancePropertiesFromFlexibilityPolicy", + "description": "[Output Only] Instance properties selected for this instance resulting from InstanceFlexibilityPolicy." + }, "version": { "$ref": "ManagedInstanceVersion", "description": "[Output Only] Intended version of this instance." @@ -57891,6 +58585,16 @@ }, "type": "object" }, + "ManagedInstancePropertiesFromFlexibilityPolicy": { + "id": "ManagedInstancePropertiesFromFlexibilityPolicy", + "properties": { + "machineType": { + "description": "The machine type to be used for this instance.", + "type": "string" + } + }, + "type": "object" + }, "ManagedInstanceVersion": { "id": "ManagedInstanceVersion", "properties": { @@ -58154,6 +58858,10 @@ ], "type": "string" }, + "networkProfile": { + "description": "A full or partial URL of the network profile to apply to this network. This field can be set only at resource creation time. For example, the following are valid URLs: - https://www.googleapis.com/compute/{api_version}/projects/{project_id}/global/networkProfiles/{network_profile_name} - projects/{project_id}/global/networkProfiles/{network_profile_name} ", + "type": "string" + }, "peerings": { "description": "[Output Only] A list of network peerings for the resource.", "items": { @@ -58335,6 +59043,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -58371,6 +59080,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -58395,6 +59105,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -58539,6 +59250,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -58575,6 +59287,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -58599,6 +59312,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -58673,6 +59387,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -58709,6 +59424,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -58733,6 +59449,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -58886,6 +59603,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -58922,6 +59640,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -58946,6 +59665,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -59020,6 +59740,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -59056,6 +59777,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -59080,6 +59802,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -59328,6 +60051,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -59364,6 +60088,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -59388,6 +60113,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -59532,6 +60258,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -59568,6 +60295,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -59592,6 +60320,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -59764,6 +60493,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -59800,6 +60530,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -59824,6 +60555,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -59898,6 +60630,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -59934,6 +60667,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -59958,6 +60692,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -60089,12 +60824,16 @@ "enum": [ "GVNIC", "IDPF", + "IRDMA", + "MRDMA", "UNSPECIFIED_NIC_TYPE", "VIRTIO_NET" ], "enumDescriptions": [ "GVNIC", "IDPF", + "IRDMA", + "MRDMA", "No type specified.", "VIRTIO" ], @@ -60181,6 +60920,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -60217,6 +60957,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -60241,6 +60982,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -60367,10 +61109,577 @@ }, "type": "object" }, + "NetworkProfile": { + "description": "NetworkProfile represents a Google managed network profile resource.", + "id": "NetworkProfile", + "properties": { + "creationTimestamp": { + "description": "[Output Only] Creation timestamp in RFC3339 text format.", + "type": "string" + }, + "description": { + "description": "[Output Only] An optional description of this resource.", + "type": "string" + }, + "features": { + "$ref": "NetworkProfileNetworkFeatures", + "description": "[Output Only] Features supported by the network." + }, + "id": { + "description": "[Output Only] The unique identifier for the resource. This identifier is defined by the server.", + "format": "uint64", + "type": "string" + }, + "kind": { + "default": "compute#networkProfile", + "description": "[Output Only] Type of the resource. Always compute#networkProfile for network profiles.", + "type": "string" + }, + "location": { + "$ref": "NetworkProfileLocation", + "description": "[Output Only] Location to which the network is restricted." + }, + "name": { + "description": "[Output Only] Name of the resource.", + "pattern": "[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?", + "type": "string" + }, + "selfLink": { + "description": "[Output Only] Server-defined URL for the resource.", + "type": "string" + }, + "selfLinkWithId": { + "description": "[Output Only] Server-defined URL for this resource with the resource id.", + "type": "string" + }, + "zone": { + "description": "[Output Only] Zone to which the network is restricted.", + "type": "string" + } + }, + "type": "object" + }, + "NetworkProfileLocation": { + "id": "NetworkProfileLocation", + "properties": { + "name": { + "type": "string" + }, + "scope": { + "enum": [ + "REGION", + "ZONE" + ], + "enumDescriptions": [ + "", + "" + ], + "type": "string" + } + }, + "type": "object" + }, + "NetworkProfileNetworkFeatures": { + "id": "NetworkProfileNetworkFeatures", + "properties": { + "addressPurposes": { + "description": "Specifies what address purposes are supported. If empty, all address purposes are supported.", + "items": { + "enum": [ + "DNS_RESOLVER", + "GCE_ENDPOINT", + "IPSEC_INTERCONNECT", + "NAT_AUTO", + "PRIVATE_SERVICE_CONNECT", + "SERVERLESS", + "SHARED_LOADBALANCER_VIP", + "VPC_PEERING" + ], + "enumDescriptions": [ + "DNS resolver address in the subnetwork.", + "VM internal/alias IP, Internal LB service IP, etc.", + "A regional internal IP address range reserved for the VLAN attachment that is used in HA VPN over Cloud Interconnect. This regional internal IP address range must not overlap with any IP address range of subnet/route in the VPC network and its peering networks. After the VLAN attachment is created with the reserved IP address range, when creating a new VPN gateway, its interface IP address is allocated from the associated VLAN attachment’s IP address range.", + "External IP automatically reserved for Cloud NAT.", + "A private network IP address that can be used to configure Private Service Connect. This purpose can be specified only for GLOBAL addresses of Type INTERNAL", + "A regional internal IP address range reserved for Serverless.", + "A private network IP address that can be shared by multiple Internal Load Balancer forwarding rules.", + "IP range for peer networks." + ], + "type": "string" + }, + "type": "array" + }, + "allowAliasIpRanges": { + "description": "Specifies whether alias IP ranges (and secondary address ranges) are allowed.", + "enum": [ + "ALIAS_IP_RANGES_ALLOWED", + "ALIAS_IP_RANGES_BLOCKED" + ], + "enumDescriptions": [ + "", + "" + ], + "type": "string" + }, + "allowAutoModeSubnet": { + "description": "Specifies whether auto mode subnet creation is allowed.", + "enum": [ + "AUTO_MODE_SUBNET_ALLOWED", + "AUTO_MODE_SUBNET_BLOCKED" + ], + "enumDescriptions": [ + "", + "" + ], + "type": "string" + }, + "allowClassDFirewalls": { + "description": "Specifies whether firewalls for Class D address ranges are supported.", + "enum": [ + "CLASS_D_FIREWALLS_ALLOWED", + "CLASS_D_FIREWALLS_BLOCKED" + ], + "enumDescriptions": [ + "", + "" + ], + "type": "string" + }, + "allowCloudNat": { + "description": "Specifies whether cloud NAT creation is allowed.", + "enum": [ + "CLOUD_NAT_ALLOWED", + "CLOUD_NAT_BLOCKED" + ], + "enumDescriptions": [ + "", + "" + ], + "type": "string" + }, + "allowCloudRouter": { + "description": "Specifies whether cloud router creation is allowed.", + "enum": [ + "CLOUD_ROUTER_ALLOWED", + "CLOUD_ROUTER_BLOCKED" + ], + "enumDescriptions": [ + "", + "" + ], + "type": "string" + }, + "allowExternalIpAccess": { + "description": "Specifies whether VMs are allowed to have external IP access on network interfaces connected to this VPC.", + "enum": [ + "EXTERNAL_IP_ACCESS_ALLOWED", + "EXTERNAL_IP_ACCESS_BLOCKED" + ], + "enumDescriptions": [ + "", + "" + ], + "type": "string" + }, + "allowInterconnect": { + "description": "Specifies whether Cloud Interconnect creation is allowed.", + "enum": [ + "INTERCONNECT_ALLOWED", + "INTERCONNECT_BLOCKED" + ], + "enumDescriptions": [ + "", + "" + ], + "type": "string" + }, + "allowLoadBalancing": { + "description": "Specifies whether cloud load balancing is allowed.", + "enum": [ + "LOAD_BALANCING_ALLOWED", + "LOAD_BALANCING_BLOCKED" + ], + "enumDescriptions": [ + "", + "" + ], + "type": "string" + }, + "allowMultiNicInSameNetwork": { + "description": "Specifies whether multi-nic in the same network is allowed.", + "enum": [ + "MULTI_NIC_IN_SAME_NETWORK_ALLOWED", + "MULTI_NIC_IN_SAME_NETWORK_BLOCKED" + ], + "enumDescriptions": [ + "", + "" + ], + "type": "string" + }, + "allowPacketMirroring": { + "description": "Specifies whether Packet Mirroring 1.0 is supported.", + "enum": [ + "PACKET_MIRRORING_ALLOWED", + "PACKET_MIRRORING_BLOCKED" + ], + "enumDescriptions": [ + "", + "" + ], + "type": "string" + }, + "allowPrivateGoogleAccess": { + "description": "Specifies whether private Google access is allowed.", + "enum": [ + "PRIVATE_GOOGLE_ACCESS_ALLOWED", + "PRIVATE_GOOGLE_ACCESS_BLOCKED" + ], + "enumDescriptions": [ + "", + "" + ], + "type": "string" + }, + "allowPsc": { + "description": "Specifies whether PSC creation is allowed.", + "enum": [ + "PSC_ALLOWED", + "PSC_BLOCKED" + ], + "enumDescriptions": [ + "", + "" + ], + "type": "string" + }, + "allowSameNetworkUnicast": { + "description": "Specifies whether unicast within the same network is allowed.", + "enum": [ + "SAME_NETWORK_UNICAST_ALLOWED", + "SAME_NETWORK_UNICAST_BLOCKED" + ], + "enumDescriptions": [ + "", + "" + ], + "type": "string" + }, + "allowStaticRoutes": { + "description": "Specifies whether static route creation is allowed.", + "enum": [ + "STATIC_ROUTES_ALLOWED", + "STATIC_ROUTES_BLOCKED" + ], + "enumDescriptions": [ + "", + "" + ], + "type": "string" + }, + "allowSubInterfaces": { + "description": "Specifies whether sub interfaces are allowed.", + "enum": [ + "SUBINTERFACES_ALLOWED", + "SUBINTERFACES_BLOCKED" + ], + "enumDescriptions": [ + "", + "" + ], + "type": "string" + }, + "allowVpcPeering": { + "description": "Specifies whether VPC peering is allowed.", + "enum": [ + "VPC_PEERING_ALLOWED", + "VPC_PEERING_BLOCKED" + ], + "enumDescriptions": [ + "", + "" + ], + "type": "string" + }, + "allowVpn": { + "description": "Specifies whether VPN creation is allowed.", + "enum": [ + "VPN_ALLOWED", + "VPN_BLOCKED" + ], + "enumDescriptions": [ + "", + "" + ], + "type": "string" + }, + "interfaceTypes": { + "description": "If set, limits the interface types that the network supports. If empty, all interface types are supported.", + "items": { + "enum": [ + "GVNIC", + "IDPF", + "IRDMA", + "MRDMA", + "UNSPECIFIED_NIC_TYPE", + "VIRTIO_NET" + ], + "enumDescriptions": [ + "GVNIC", + "IDPF", + "IRDMA", + "MRDMA", + "No type specified.", + "VIRTIO" + ], + "type": "string" + }, + "type": "array" + }, + "subnetPurposes": { + "description": "Specifies which subnetwork purposes are supported.", + "items": { + "enum": [ + "SUBNET_PURPOSE_CUSTOM_HARDWARE", + "SUBNET_PURPOSE_PRIVATE" + ], + "enumDescriptions": [ + "", + "" + ], + "type": "string" + }, + "type": "array" + }, + "subnetStackTypes": { + "description": "Specifies which subnetwork stack types are supported.", + "items": { + "enum": [ + "SUBNET_STACK_TYPE_IPV4_IPV6", + "SUBNET_STACK_TYPE_IPV4_ONLY", + "SUBNET_STACK_TYPE_IPV6_ONLY" + ], + "enumDescriptions": [ + "", + "", + "" + ], + "type": "string" + }, + "type": "array" + }, + "unicast": { + "description": "Specifies which type of unicast is supported.", + "enum": [ + "UNICAST_SDN", + "UNICAST_ULL" + ], + "enumDescriptions": [ + "", + "" + ], + "type": "string" + } + }, + "type": "object" + }, + "NetworkProfilesListResponse": { + "description": "Contains a list of network profiles.", + "id": "NetworkProfilesListResponse", + "properties": { + "etag": { + "type": "string" + }, + "id": { + "description": "[Output Only] Unique identifier for the resource; defined by the server.", + "type": "string" + }, + "items": { + "description": "A list of NetworkProfile resources.", + "items": { + "$ref": "NetworkProfile" + }, + "type": "array" + }, + "kind": { + "default": "compute#networkProfileList", + "description": "[Output Only] Type of resource. Always compute#networkProfileList for network profiles.", + "type": "string" + }, + "nextPageToken": { + "description": "[Output Only] This token allows you to get the next page of results for list requests. If the number of results is larger than maxResults, use the nextPageToken as a value for the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results.", + "type": "string" + }, + "selfLink": { + "description": "[Output Only] Server-defined URL for this resource.", + "type": "string" + }, + "unreachables": { + "description": "[Output Only] Unreachable resources. end_interface: MixerListResponseWithEtagBuilder", + "items": { + "type": "string" + }, + "type": "array" + }, + "warning": { + "description": "[Output Only] Informational warning message.", + "properties": { + "code": { + "description": "[Output Only] A warning code, if applicable. For example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no results in the response.", + "enum": [ + "CLEANUP_FAILED", + "DEPRECATED_RESOURCE_USED", + "DEPRECATED_TYPE_USED", + "DISK_SIZE_LARGER_THAN_IMAGE_SIZE", + "EXPERIMENTAL_TYPE_USED", + "EXTERNAL_API_WARNING", + "FIELD_VALUE_OVERRIDEN", + "INJECTED_KERNELS_DEPRECATED", + "INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB", + "LARGE_DEPLOYMENT_WARNING", + "LIST_OVERHEAD_QUOTA_EXCEED", + "MISSING_TYPE_DEPENDENCY", + "NEXT_HOP_ADDRESS_NOT_ASSIGNED", + "NEXT_HOP_CANNOT_IP_FORWARD", + "NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE", + "NEXT_HOP_INSTANCE_NOT_FOUND", + "NEXT_HOP_INSTANCE_NOT_ON_NETWORK", + "NEXT_HOP_NOT_RUNNING", + "NOT_CRITICAL_ERROR", + "NO_RESULTS_ON_PAGE", + "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", + "REQUIRED_TOS_AGREEMENT", + "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", + "RESOURCE_NOT_DELETED", + "SCHEMA_VALIDATION_IGNORED", + "SINGLE_INSTANCE_PROPERTY_TEMPLATE", + "UNDECLARED_PROPERTIES", + "UNREACHABLE" + ], + "enumDeprecated": [ + false, + false, + false, + false, + false, + false, + true, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false + ], + "enumDescriptions": [ + "Warning about failed cleanup of transient changes made by a failed operation.", + "A link to a deprecated resource was created.", + "When deploying and at least one of the resources has a type marked as deprecated", + "The user created a boot disk that is larger than image size.", + "When deploying and at least one of the resources has a type marked as experimental", + "Warning that is present in an external api call", + "Warning that value of a field has been overridden. Deprecated unused field.", + "The operation involved use of an injected kernel, which is deprecated.", + "A WEIGHTED_MAGLEV backend service is associated with a health check that is not of type HTTP/HTTPS/HTTP2.", + "When deploying a deployment with a exceedingly large number of resources", + "Resource can't be retrieved due to list overhead quota exceed which captures the amount of resources filtered out by user-defined list filter.", + "A resource depends on a missing type", + "The route's nextHopIp address is not assigned to an instance on the network.", + "The route's next hop instance cannot ip forward.", + "The route's nextHopInstance URL refers to an instance that does not have an ipv6 interface on the same network as the route.", + "The route's nextHopInstance URL refers to an instance that does not exist.", + "The route's nextHopInstance URL refers to an instance that is not on the same network as the route.", + "The route's next hop instance does not have a status of RUNNING.", + "Error which is not critical. We decided to continue the process despite the mentioned error.", + "No results are present on a particular list page.", + "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", + "The user attempted to use a resource that requires a TOS they have not accepted.", + "Warning that a resource is in use.", + "One or more of the resources set to auto-delete could not be deleted because they were in use.", + "When a resource schema validation is ignored.", + "Instance template used in instance group manager is valid as such, but its application does not make a lot of sense, because it allows only single instance in instance group.", + "When undeclared properties in the schema are present", + "A given scope cannot be reached." + ], + "type": "string" + }, + "data": { + "description": "[Output Only] Metadata about this warning in key: value format. For example: \"data\": [ { \"key\": \"scope\", \"value\": \"zones/us-east1-d\" } ", + "items": { + "properties": { + "key": { + "description": "[Output Only] A key that provides more detail on the warning being returned. For example, for warnings where there are no results in a list request for a particular zone, this key might be scope and the key value might be the zone name. Other examples might be a key indicating a deprecated resource and a suggested replacement, or a warning about invalid network settings (for example, if an instance attempts to perform IP forwarding but is not enabled for IP forwarding).", + "type": "string" + }, + "value": { + "description": "[Output Only] A warning data value corresponding to the key.", + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + }, + "message": { + "description": "[Output Only] A human-readable description of the warning code.", + "type": "string" + } + }, + "type": "object" + } + }, + "type": "object" + }, "NetworkRoutingConfig": { "description": "A routing configuration attached to a network resource. The message includes the list of routers associated with the network, and a flag indicating the type of routing behavior to enforce network-wide.", "id": "NetworkRoutingConfig", "properties": { + "bgpAlwaysCompareMed": { + "description": "Enable comparison of Multi-Exit Discriminators (MED) across routes with different neighbor ASNs when using the STANDARD BGP best path selection algorithm.", + "type": "boolean" + }, + "bgpBestPathSelectionMode": { + "description": "The BGP best path selection algorithm to be employed within this network for dynamic routes learned by Cloud Routers. Can be LEGACY (default) or STANDARD.", + "enum": [ + "LEGACY", + "STANDARD" + ], + "enumDescriptions": [ + "", + "" + ], + "type": "string" + }, + "bgpInterRegionCost": { + "description": "Allows to define a preferred approach for handling inter-region cost in the selection process when using the STANDARD BGP best path selection algorithm. Can be DEFAULT or ADD_COST_TO_MED.", + "enum": [ + "ADD_COST_TO_MED", + "DEFAULT" + ], + "enumDescriptions": [ + "", + "" + ], + "type": "string" + }, "routingMode": { "description": "The network-wide routing mode to use. If set to REGIONAL, this network's Cloud Routers will only advertise routes with subnets of this network in the same region as the router. If set to GLOBAL, this network's Cloud Routers will advertise routes with all subnets of this network, across regions.", "enum": [ @@ -60669,6 +61978,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -60705,6 +62015,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -60729,6 +62040,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -60853,6 +62165,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -60889,6 +62202,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -60913,6 +62227,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -61140,6 +62455,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -61176,6 +62492,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -61200,6 +62517,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -61291,6 +62609,294 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", + "REQUIRED_TOS_AGREEMENT", + "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", + "RESOURCE_NOT_DELETED", + "SCHEMA_VALIDATION_IGNORED", + "SINGLE_INSTANCE_PROPERTY_TEMPLATE", + "UNDECLARED_PROPERTIES", + "UNREACHABLE" + ], + "enumDeprecated": [ + false, + false, + false, + false, + false, + false, + true, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false + ], + "enumDescriptions": [ + "Warning about failed cleanup of transient changes made by a failed operation.", + "A link to a deprecated resource was created.", + "When deploying and at least one of the resources has a type marked as deprecated", + "The user created a boot disk that is larger than image size.", + "When deploying and at least one of the resources has a type marked as experimental", + "Warning that is present in an external api call", + "Warning that value of a field has been overridden. Deprecated unused field.", + "The operation involved use of an injected kernel, which is deprecated.", + "A WEIGHTED_MAGLEV backend service is associated with a health check that is not of type HTTP/HTTPS/HTTP2.", + "When deploying a deployment with a exceedingly large number of resources", + "Resource can't be retrieved due to list overhead quota exceed which captures the amount of resources filtered out by user-defined list filter.", + "A resource depends on a missing type", + "The route's nextHopIp address is not assigned to an instance on the network.", + "The route's next hop instance cannot ip forward.", + "The route's nextHopInstance URL refers to an instance that does not have an ipv6 interface on the same network as the route.", + "The route's nextHopInstance URL refers to an instance that does not exist.", + "The route's nextHopInstance URL refers to an instance that is not on the same network as the route.", + "The route's next hop instance does not have a status of RUNNING.", + "Error which is not critical. We decided to continue the process despite the mentioned error.", + "No results are present on a particular list page.", + "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", + "The user attempted to use a resource that requires a TOS they have not accepted.", + "Warning that a resource is in use.", + "One or more of the resources set to auto-delete could not be deleted because they were in use.", + "When a resource schema validation is ignored.", + "Instance template used in instance group manager is valid as such, but its application does not make a lot of sense, because it allows only single instance in instance group.", + "When undeclared properties in the schema are present", + "A given scope cannot be reached." + ], + "type": "string" + }, + "data": { + "description": "[Output Only] Metadata about this warning in key: value format. For example: \"data\": [ { \"key\": \"scope\", \"value\": \"zones/us-east1-d\" } ", + "items": { + "properties": { + "key": { + "description": "[Output Only] A key that provides more detail on the warning being returned. For example, for warnings where there are no results in a list request for a particular zone, this key might be scope and the key value might be the zone name. Other examples might be a key indicating a deprecated resource and a suggested replacement, or a warning about invalid network settings (for example, if an instance attempts to perform IP forwarding but is not enabled for IP forwarding).", + "type": "string" + }, + "value": { + "description": "[Output Only] A warning data value corresponding to the key.", + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + }, + "message": { + "description": "[Output Only] A human-readable description of the warning code.", + "type": "string" + } + }, + "type": "object" + } + }, + "type": "object" + }, + "NodeGroupsSetNodeTemplateRequest": { + "id": "NodeGroupsSetNodeTemplateRequest", + "properties": { + "nodeTemplate": { + "description": "Full or partial URL of the node template resource to be updated for this node group.", + "type": "string" + } + }, + "type": "object" + }, + "NodeGroupsSimulateMaintenanceEventRequest": { + "id": "NodeGroupsSimulateMaintenanceEventRequest", + "properties": { + "nodes": { + "description": "Names of the nodes to go under maintenance simulation.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "NodeTemplate": { + "description": "Represent a sole-tenant Node Template resource. You can use a template to define properties for nodes in a node group. For more information, read Creating node groups and instances.", + "id": "NodeTemplate", + "properties": { + "accelerators": { + "items": { + "$ref": "AcceleratorConfig" + }, + "type": "array" + }, + "cpuOvercommitType": { + "description": "CPU overcommit.", + "enum": [ + "CPU_OVERCOMMIT_TYPE_UNSPECIFIED", + "ENABLED", + "NONE" + ], + "enumDescriptions": [ + "", + "", + "" + ], + "type": "string" + }, + "creationTimestamp": { + "description": "[Output Only] Creation timestamp in RFC3339 text format.", + "type": "string" + }, + "description": { + "description": "An optional description of this resource. Provide this property when you create the resource.", + "type": "string" + }, + "disks": { + "items": { + "$ref": "LocalDisk" + }, + "type": "array" + }, + "id": { + "description": "[Output Only] The unique identifier for the resource. This identifier is defined by the server.", + "format": "uint64", + "type": "string" + }, + "kind": { + "default": "compute#nodeTemplate", + "description": "[Output Only] The type of the resource. Always compute#nodeTemplate for node templates.", + "type": "string" + }, + "name": { + "description": "The name of the resource, provided by the client when initially creating the resource. The resource name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash.", + "type": "string" + }, + "nodeAffinityLabels": { + "additionalProperties": { + "type": "string" + }, + "description": "Labels to use for node affinity, which will be used in instance scheduling.", + "type": "object" + }, + "nodeType": { + "description": "The node type to use for nodes group that are created from this template.", + "type": "string" + }, + "nodeTypeFlexibility": { + "$ref": "NodeTemplateNodeTypeFlexibility", + "description": "Do not use. Instead, use the node_type property." + }, + "region": { + "description": "[Output Only] The name of the region where the node template resides, such as us-central1.", + "type": "string" + }, + "selfLink": { + "description": "[Output Only] Server-defined URL for the resource.", + "type": "string" + }, + "serverBinding": { + "$ref": "ServerBinding", + "description": "Sets the binding properties for the physical server. Valid values include: - *[Default]* RESTART_NODE_ON_ANY_SERVER: Restarts VMs on any available physical server - RESTART_NODE_ON_MINIMAL_SERVER: Restarts VMs on the same physical server whenever possible See Sole-tenant node options for more information." + }, + "status": { + "description": "[Output Only] The status of the node template. One of the following values: CREATING, READY, and DELETING.", + "enum": [ + "CREATING", + "DELETING", + "INVALID", + "READY" + ], + "enumDescriptions": [ + "Resources are being allocated.", + "The node template is currently being deleted.", + "Invalid status.", + "The node template is ready." + ], + "type": "string" + }, + "statusMessage": { + "description": "[Output Only] An optional, human-readable explanation of the status.", + "type": "string" + } + }, + "type": "object" + }, + "NodeTemplateAggregatedList": { + "id": "NodeTemplateAggregatedList", + "properties": { + "id": { + "description": "[Output Only] Unique identifier for the resource; defined by the server.", + "type": "string" + }, + "items": { + "additionalProperties": { + "$ref": "NodeTemplatesScopedList", + "description": "[Output Only] Name of the scope containing this set of node templates." + }, + "description": "A list of NodeTemplatesScopedList resources.", + "type": "object" + }, + "kind": { + "default": "compute#nodeTemplateAggregatedList", + "description": "[Output Only] Type of resource.Always compute#nodeTemplateAggregatedList for aggregated lists of node templates.", + "type": "string" + }, + "nextPageToken": { + "description": "[Output Only] This token allows you to get the next page of results for list requests. If the number of results is larger than maxResults, use the nextPageToken as a value for the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results.", + "type": "string" + }, + "selfLink": { + "description": "[Output Only] Server-defined URL for this resource.", + "type": "string" + }, + "unreachables": { + "description": "[Output Only] Unreachable resources.", + "items": { + "type": "string" + }, + "type": "array" + }, + "warning": { + "description": "[Output Only] Informational warning message.", + "properties": { + "code": { + "description": "[Output Only] A warning code, if applicable. For example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no results in the response.", + "enum": [ + "CLEANUP_FAILED", + "DEPRECATED_RESOURCE_USED", + "DEPRECATED_TYPE_USED", + "DISK_SIZE_LARGER_THAN_IMAGE_SIZE", + "EXPERIMENTAL_TYPE_USED", + "EXTERNAL_API_WARNING", + "FIELD_VALUE_OVERRIDEN", + "INJECTED_KERNELS_DEPRECATED", + "INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB", + "LARGE_DEPLOYMENT_WARNING", + "LIST_OVERHEAD_QUOTA_EXCEED", + "MISSING_TYPE_DEPENDENCY", + "NEXT_HOP_ADDRESS_NOT_ASSIGNED", + "NEXT_HOP_CANNOT_IP_FORWARD", + "NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE", + "NEXT_HOP_INSTANCE_NOT_FOUND", + "NEXT_HOP_INSTANCE_NOT_ON_NETWORK", + "NEXT_HOP_NOT_RUNNING", + "NOT_CRITICAL_ERROR", + "NO_RESULTS_ON_PAGE", + "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -61327,289 +62933,6 @@ false, false, false, - false - ], - "enumDescriptions": [ - "Warning about failed cleanup of transient changes made by a failed operation.", - "A link to a deprecated resource was created.", - "When deploying and at least one of the resources has a type marked as deprecated", - "The user created a boot disk that is larger than image size.", - "When deploying and at least one of the resources has a type marked as experimental", - "Warning that is present in an external api call", - "Warning that value of a field has been overridden. Deprecated unused field.", - "The operation involved use of an injected kernel, which is deprecated.", - "A WEIGHTED_MAGLEV backend service is associated with a health check that is not of type HTTP/HTTPS/HTTP2.", - "When deploying a deployment with a exceedingly large number of resources", - "Resource can't be retrieved due to list overhead quota exceed which captures the amount of resources filtered out by user-defined list filter.", - "A resource depends on a missing type", - "The route's nextHopIp address is not assigned to an instance on the network.", - "The route's next hop instance cannot ip forward.", - "The route's nextHopInstance URL refers to an instance that does not have an ipv6 interface on the same network as the route.", - "The route's nextHopInstance URL refers to an instance that does not exist.", - "The route's nextHopInstance URL refers to an instance that is not on the same network as the route.", - "The route's next hop instance does not have a status of RUNNING.", - "Error which is not critical. We decided to continue the process despite the mentioned error.", - "No results are present on a particular list page.", - "Success is reported, but some results may be missing due to errors", - "The user attempted to use a resource that requires a TOS they have not accepted.", - "Warning that a resource is in use.", - "One or more of the resources set to auto-delete could not be deleted because they were in use.", - "When a resource schema validation is ignored.", - "Instance template used in instance group manager is valid as such, but its application does not make a lot of sense, because it allows only single instance in instance group.", - "When undeclared properties in the schema are present", - "A given scope cannot be reached." - ], - "type": "string" - }, - "data": { - "description": "[Output Only] Metadata about this warning in key: value format. For example: \"data\": [ { \"key\": \"scope\", \"value\": \"zones/us-east1-d\" } ", - "items": { - "properties": { - "key": { - "description": "[Output Only] A key that provides more detail on the warning being returned. For example, for warnings where there are no results in a list request for a particular zone, this key might be scope and the key value might be the zone name. Other examples might be a key indicating a deprecated resource and a suggested replacement, or a warning about invalid network settings (for example, if an instance attempts to perform IP forwarding but is not enabled for IP forwarding).", - "type": "string" - }, - "value": { - "description": "[Output Only] A warning data value corresponding to the key.", - "type": "string" - } - }, - "type": "object" - }, - "type": "array" - }, - "message": { - "description": "[Output Only] A human-readable description of the warning code.", - "type": "string" - } - }, - "type": "object" - } - }, - "type": "object" - }, - "NodeGroupsSetNodeTemplateRequest": { - "id": "NodeGroupsSetNodeTemplateRequest", - "properties": { - "nodeTemplate": { - "description": "Full or partial URL of the node template resource to be updated for this node group.", - "type": "string" - } - }, - "type": "object" - }, - "NodeGroupsSimulateMaintenanceEventRequest": { - "id": "NodeGroupsSimulateMaintenanceEventRequest", - "properties": { - "nodes": { - "description": "Names of the nodes to go under maintenance simulation.", - "items": { - "type": "string" - }, - "type": "array" - } - }, - "type": "object" - }, - "NodeTemplate": { - "description": "Represent a sole-tenant Node Template resource. You can use a template to define properties for nodes in a node group. For more information, read Creating node groups and instances.", - "id": "NodeTemplate", - "properties": { - "accelerators": { - "items": { - "$ref": "AcceleratorConfig" - }, - "type": "array" - }, - "cpuOvercommitType": { - "description": "CPU overcommit.", - "enum": [ - "CPU_OVERCOMMIT_TYPE_UNSPECIFIED", - "ENABLED", - "NONE" - ], - "enumDescriptions": [ - "", - "", - "" - ], - "type": "string" - }, - "creationTimestamp": { - "description": "[Output Only] Creation timestamp in RFC3339 text format.", - "type": "string" - }, - "description": { - "description": "An optional description of this resource. Provide this property when you create the resource.", - "type": "string" - }, - "disks": { - "items": { - "$ref": "LocalDisk" - }, - "type": "array" - }, - "id": { - "description": "[Output Only] The unique identifier for the resource. This identifier is defined by the server.", - "format": "uint64", - "type": "string" - }, - "kind": { - "default": "compute#nodeTemplate", - "description": "[Output Only] The type of the resource. Always compute#nodeTemplate for node templates.", - "type": "string" - }, - "name": { - "description": "The name of the resource, provided by the client when initially creating the resource. The resource name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash.", - "type": "string" - }, - "nodeAffinityLabels": { - "additionalProperties": { - "type": "string" - }, - "description": "Labels to use for node affinity, which will be used in instance scheduling.", - "type": "object" - }, - "nodeType": { - "description": "The node type to use for nodes group that are created from this template.", - "type": "string" - }, - "nodeTypeFlexibility": { - "$ref": "NodeTemplateNodeTypeFlexibility", - "description": "Do not use. Instead, use the node_type property." - }, - "region": { - "description": "[Output Only] The name of the region where the node template resides, such as us-central1.", - "type": "string" - }, - "selfLink": { - "description": "[Output Only] Server-defined URL for the resource.", - "type": "string" - }, - "serverBinding": { - "$ref": "ServerBinding", - "description": "Sets the binding properties for the physical server. Valid values include: - *[Default]* RESTART_NODE_ON_ANY_SERVER: Restarts VMs on any available physical server - RESTART_NODE_ON_MINIMAL_SERVER: Restarts VMs on the same physical server whenever possible See Sole-tenant node options for more information." - }, - "status": { - "description": "[Output Only] The status of the node template. One of the following values: CREATING, READY, and DELETING.", - "enum": [ - "CREATING", - "DELETING", - "INVALID", - "READY" - ], - "enumDescriptions": [ - "Resources are being allocated.", - "The node template is currently being deleted.", - "Invalid status.", - "The node template is ready." - ], - "type": "string" - }, - "statusMessage": { - "description": "[Output Only] An optional, human-readable explanation of the status.", - "type": "string" - } - }, - "type": "object" - }, - "NodeTemplateAggregatedList": { - "id": "NodeTemplateAggregatedList", - "properties": { - "id": { - "description": "[Output Only] Unique identifier for the resource; defined by the server.", - "type": "string" - }, - "items": { - "additionalProperties": { - "$ref": "NodeTemplatesScopedList", - "description": "[Output Only] Name of the scope containing this set of node templates." - }, - "description": "A list of NodeTemplatesScopedList resources.", - "type": "object" - }, - "kind": { - "default": "compute#nodeTemplateAggregatedList", - "description": "[Output Only] Type of resource.Always compute#nodeTemplateAggregatedList for aggregated lists of node templates.", - "type": "string" - }, - "nextPageToken": { - "description": "[Output Only] This token allows you to get the next page of results for list requests. If the number of results is larger than maxResults, use the nextPageToken as a value for the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results.", - "type": "string" - }, - "selfLink": { - "description": "[Output Only] Server-defined URL for this resource.", - "type": "string" - }, - "unreachables": { - "description": "[Output Only] Unreachable resources.", - "items": { - "type": "string" - }, - "type": "array" - }, - "warning": { - "description": "[Output Only] Informational warning message.", - "properties": { - "code": { - "description": "[Output Only] A warning code, if applicable. For example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no results in the response.", - "enum": [ - "CLEANUP_FAILED", - "DEPRECATED_RESOURCE_USED", - "DEPRECATED_TYPE_USED", - "DISK_SIZE_LARGER_THAN_IMAGE_SIZE", - "EXPERIMENTAL_TYPE_USED", - "EXTERNAL_API_WARNING", - "FIELD_VALUE_OVERRIDEN", - "INJECTED_KERNELS_DEPRECATED", - "INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB", - "LARGE_DEPLOYMENT_WARNING", - "LIST_OVERHEAD_QUOTA_EXCEED", - "MISSING_TYPE_DEPENDENCY", - "NEXT_HOP_ADDRESS_NOT_ASSIGNED", - "NEXT_HOP_CANNOT_IP_FORWARD", - "NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE", - "NEXT_HOP_INSTANCE_NOT_FOUND", - "NEXT_HOP_INSTANCE_NOT_ON_NETWORK", - "NEXT_HOP_NOT_RUNNING", - "NOT_CRITICAL_ERROR", - "NO_RESULTS_ON_PAGE", - "PARTIAL_SUCCESS", - "REQUIRED_TOS_AGREEMENT", - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", - "RESOURCE_NOT_DELETED", - "SCHEMA_VALIDATION_IGNORED", - "SINGLE_INSTANCE_PROPERTY_TEMPLATE", - "UNDECLARED_PROPERTIES", - "UNREACHABLE" - ], - "enumDeprecated": [ - false, - false, - false, - false, - false, - false, - true, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, false, false ], @@ -61635,6 +62958,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -61727,6 +63051,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -61763,6 +63088,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -61787,6 +63113,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -61876,6 +63203,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -61912,6 +63240,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -61936,6 +63265,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -62096,6 +63426,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -62132,6 +63463,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -62156,6 +63488,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -62248,6 +63581,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -62284,6 +63618,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -62308,6 +63643,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -62382,6 +63718,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -62418,6 +63755,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -62442,6 +63780,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -62603,6 +63942,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -62639,6 +63979,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -62663,6 +64004,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -62886,6 +64228,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -62922,6 +64265,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -62946,6 +64290,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -63051,6 +64396,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -63087,6 +64433,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -63111,6 +64458,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -63203,6 +64551,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -63239,6 +64588,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -63263,6 +64613,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -63337,6 +64688,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -63373,6 +64725,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -63397,6 +64750,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -63697,6 +65051,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -63733,6 +65088,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -63757,6 +65113,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -63897,6 +65254,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -63933,6 +65291,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -63957,6 +65316,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -64100,6 +65460,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -64136,6 +65497,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -64160,6 +65522,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -64207,15 +65570,15 @@ }, "defaultRouteAction": { "$ref": "HttpRouteAction", - "description": "defaultRouteAction takes effect when none of the pathRules or routeRules match. The load balancer performs advanced routing actions, such as URL rewrites and header transformations, before forwarding the request to the selected backend. If defaultRouteAction specifies any weightedBackendServices, defaultService must not be set. Conversely if defaultService is set, defaultRouteAction cannot contain any weightedBackendServices. If defaultRouteAction is specified, don't set defaultUrlRedirect. If defaultRouteAction.weightedBackendServices is specified, don't set defaultService. URL maps for classic Application Load Balancers only support the urlRewrite action within a path matcher's defaultRouteAction." + "description": "defaultRouteAction takes effect when none of the pathRules or routeRules match. The load balancer performs advanced routing actions, such as URL rewrites and header transformations, before forwarding the request to the selected backend. Only one of defaultUrlRedirect, defaultService or defaultRouteAction.weightedBackendService can be set. URL maps for classic Application Load Balancers only support the urlRewrite action within a path matcher's defaultRouteAction." }, "defaultService": { - "description": "The full or partial URL to the BackendService resource. This URL is used if none of the pathRules or routeRules defined by this PathMatcher are matched. For example, the following are all valid URLs to a BackendService resource: - https://www.googleapis.com/compute/v1/projects/project /global/backendServices/backendService - compute/v1/projects/project/global/backendServices/backendService - global/backendServices/backendService If defaultRouteAction is also specified, advanced routing actions, such as URL rewrites, take effect before sending the request to the backend. However, if defaultService is specified, defaultRouteAction cannot contain any weightedBackendServices. Conversely, if defaultRouteAction specifies any weightedBackendServices, defaultService must not be specified. If defaultService is specified, then set either defaultUrlRedirect or defaultRouteAction.weightedBackendService. Don't set both. Authorization requires one or more of the following Google IAM permissions on the specified resource default_service: - compute.backendBuckets.use - compute.backendServices.use ", + "description": "The full or partial URL to the BackendService resource. This URL is used if none of the pathRules or routeRules defined by this PathMatcher are matched. For example, the following are all valid URLs to a BackendService resource: - https://www.googleapis.com/compute/v1/projects/project /global/backendServices/backendService - compute/v1/projects/project/global/backendServices/backendService - global/backendServices/backendService If defaultRouteAction is also specified, advanced routing actions, such as URL rewrites, take effect before sending the request to the backend. Only one of defaultUrlRedirect, defaultService or defaultRouteAction.weightedBackendService can be set. Authorization requires one or more of the following Google IAM permissions on the specified resource default_service: - compute.backendBuckets.use - compute.backendServices.use ", "type": "string" }, "defaultUrlRedirect": { "$ref": "HttpRedirectAction", - "description": "When none of the specified pathRules or routeRules match, the request is redirected to a URL specified by defaultUrlRedirect. If defaultUrlRedirect is specified, then set either defaultService or defaultRouteAction. Don't set both. Not supported when the URL map is bound to a target gRPC proxy." + "description": "When none of the specified pathRules or routeRules match, the request is redirected to a URL specified by defaultUrlRedirect. Only one of defaultUrlRedirect, defaultService or defaultRouteAction.weightedBackendService can be set. Not supported when the URL map is bound to a target gRPC proxy." }, "description": { "description": "An optional description of this resource. Provide this property when you create the resource.", @@ -64263,15 +65626,15 @@ }, "routeAction": { "$ref": "HttpRouteAction", - "description": "In response to a matching path, the load balancer performs advanced routing actions, such as URL rewrites and header transformations, before forwarding the request to the selected backend. If routeAction specifies any weightedBackendServices, service must not be set. Conversely if service is set, routeAction cannot contain any weightedBackendServices. Only one of routeAction or urlRedirect must be set. URL maps for classic Application Load Balancers only support the urlRewrite action within a path rule's routeAction." + "description": "In response to a matching path, the load balancer performs advanced routing actions, such as URL rewrites and header transformations, before forwarding the request to the selected backend. Only one of urlRedirect, service or routeAction.weightedBackendService can be set. URL maps for classic Application Load Balancers only support the urlRewrite action within a path rule's routeAction." }, "service": { - "description": "The full or partial URL of the backend service resource to which traffic is directed if this rule is matched. If routeAction is also specified, advanced routing actions, such as URL rewrites, take effect before sending the request to the backend. However, if service is specified, routeAction cannot contain any weightedBackendServices. Conversely, if routeAction specifies any weightedBackendServices, service must not be specified. Only one of urlRedirect, service or routeAction.weightedBackendService must be set.", + "description": "The full or partial URL of the backend service resource to which traffic is directed if this rule is matched. If routeAction is also specified, advanced routing actions, such as URL rewrites, take effect before sending the request to the backend. Only one of urlRedirect, service or routeAction.weightedBackendService can be set.", "type": "string" }, "urlRedirect": { "$ref": "HttpRedirectAction", - "description": "When a path pattern is matched, the request is redirected to a URL specified by urlRedirect. If urlRedirect is specified, service or routeAction must not be set. Not supported when the URL map is bound to a target gRPC proxy." + "description": "When a path pattern is matched, the request is redirected to a URL specified by urlRedirect. Only one of urlRedirect, service or routeAction.weightedBackendService can be set. Not supported when the URL map is bound to a target gRPC proxy." } }, "type": "object" @@ -64285,7 +65648,7 @@ "type": "string" }, "name": { - "description": "The name of a per-instance configuration and its corresponding instance. Serves as a merge key during UpdatePerInstanceConfigs operations, that is, if a per-instance configuration with the same name exists then it will be updated, otherwise a new one will be created for the VM instance with the same name. An attempt to create a per-instance configconfiguration for a VM instance that either doesn't exist or is not part of the group will result in an error.", + "description": "The name of a per-instance configuration and its corresponding instance. Serves as a merge key during UpdatePerInstanceConfigs operations, that is, if a per-instance configuration with the same name exists then it will be updated, otherwise a new one will be created for the VM instance with the same name. An attempt to create a per-instance configuration for a VM instance that either doesn't exist or is not part of the group will result in an error.", "type": "string" }, "preservedState": { @@ -64338,13 +65701,6 @@ "format": "byte", "type": "string" }, - "rules": { - "description": "This is deprecated and has no effect. Do not use.", - "items": { - "$ref": "Rule" - }, - "type": "array" - }, "version": { "description": "Specifies the format of the policy. Valid values are `0`, `1`, and `3`. Requests that specify an invalid value are rejected. Any operation that affects conditional role bindings must specify version `3`. This requirement applies to the following operations: * Getting a policy that includes a conditional role binding * Adding a conditional role binding to a policy * Changing a conditional role binding in a policy * Removing any role binding, with or without a condition, from a policy that includes conditions **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost. If a policy does not include any conditions, operations on that policy may specify any valid version or leave the field unset. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).", "format": "int32", @@ -64522,7 +65878,7 @@ "type": "string" }, "enabledFeatures": { - "description": "Restricted features enabled for use on this project.", + "description": "An optional list of restricted features enabled for use on this project.", "items": { "type": "string" }, @@ -64555,7 +65911,7 @@ }, "usageExportLocation": { "$ref": "UsageExportLocation", - "description": "The naming prefix for daily usage reports and the Google Cloud Storage bucket where they are stored." + "description": "An optional naming prefix for daily usage reports and the Google Cloud Storage bucket where they are stored." }, "vmDnsSetting": { "description": "[Output Only] Default internal DNS setting used by VMs running in this project.", @@ -64851,6 +66207,312 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", + "REQUIRED_TOS_AGREEMENT", + "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", + "RESOURCE_NOT_DELETED", + "SCHEMA_VALIDATION_IGNORED", + "SINGLE_INSTANCE_PROPERTY_TEMPLATE", + "UNDECLARED_PROPERTIES", + "UNREACHABLE" + ], + "enumDeprecated": [ + false, + false, + false, + false, + false, + false, + true, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false + ], + "enumDescriptions": [ + "Warning about failed cleanup of transient changes made by a failed operation.", + "A link to a deprecated resource was created.", + "When deploying and at least one of the resources has a type marked as deprecated", + "The user created a boot disk that is larger than image size.", + "When deploying and at least one of the resources has a type marked as experimental", + "Warning that is present in an external api call", + "Warning that value of a field has been overridden. Deprecated unused field.", + "The operation involved use of an injected kernel, which is deprecated.", + "A WEIGHTED_MAGLEV backend service is associated with a health check that is not of type HTTP/HTTPS/HTTP2.", + "When deploying a deployment with a exceedingly large number of resources", + "Resource can't be retrieved due to list overhead quota exceed which captures the amount of resources filtered out by user-defined list filter.", + "A resource depends on a missing type", + "The route's nextHopIp address is not assigned to an instance on the network.", + "The route's next hop instance cannot ip forward.", + "The route's nextHopInstance URL refers to an instance that does not have an ipv6 interface on the same network as the route.", + "The route's nextHopInstance URL refers to an instance that does not exist.", + "The route's nextHopInstance URL refers to an instance that is not on the same network as the route.", + "The route's next hop instance does not have a status of RUNNING.", + "Error which is not critical. We decided to continue the process despite the mentioned error.", + "No results are present on a particular list page.", + "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", + "The user attempted to use a resource that requires a TOS they have not accepted.", + "Warning that a resource is in use.", + "One or more of the resources set to auto-delete could not be deleted because they were in use.", + "When a resource schema validation is ignored.", + "Instance template used in instance group manager is valid as such, but its application does not make a lot of sense, because it allows only single instance in instance group.", + "When undeclared properties in the schema are present", + "A given scope cannot be reached." + ], + "type": "string" + }, + "data": { + "description": "[Output Only] Metadata about this warning in key: value format. For example: \"data\": [ { \"key\": \"scope\", \"value\": \"zones/us-east1-d\" } ", + "items": { + "properties": { + "key": { + "description": "[Output Only] A key that provides more detail on the warning being returned. For example, for warnings where there are no results in a list request for a particular zone, this key might be scope and the key value might be the zone name. Other examples might be a key indicating a deprecated resource and a suggested replacement, or a warning about invalid network settings (for example, if an instance attempts to perform IP forwarding but is not enabled for IP forwarding).", + "type": "string" + }, + "value": { + "description": "[Output Only] A warning data value corresponding to the key.", + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + }, + "message": { + "description": "[Output Only] A human-readable description of the warning code.", + "type": "string" + } + }, + "type": "object" + } + }, + "type": "object" + }, + "PublicAdvertisedPrefixPublicDelegatedPrefix": { + "description": "Represents a CIDR range which can be used to assign addresses.", + "id": "PublicAdvertisedPrefixPublicDelegatedPrefix", + "properties": { + "ipRange": { + "description": "The IP address range of the public delegated prefix", + "type": "string" + }, + "name": { + "description": "The name of the public delegated prefix", + "type": "string" + }, + "project": { + "description": "The project number of the public delegated prefix", + "type": "string" + }, + "region": { + "description": "The region of the public delegated prefix if it is regional. If absent, the prefix is global.", + "type": "string" + }, + "status": { + "description": "The status of the public delegated prefix. Possible values are: INITIALIZING: The public delegated prefix is being initialized and addresses cannot be created yet. ANNOUNCED: The public delegated prefix is active.", + "type": "string" + } + }, + "type": "object" + }, + "PublicDelegatedPrefix": { + "description": "A PublicDelegatedPrefix resource represents an IP block within a PublicAdvertisedPrefix that is configured within a single cloud scope (global or region). IPs in the block can be allocated to resources within that scope. Public delegated prefixes may be further broken up into smaller IP blocks in the same scope as the parent block.", + "id": "PublicDelegatedPrefix", + "properties": { + "allocatablePrefixLength": { + "description": "The allocatable prefix length supported by this public delegated prefix. This field is optional and cannot be set for prefixes in DELEGATION mode. It cannot be set for IPv4 prefixes either, and it always defaults to 32.", + "format": "int32", + "type": "integer" + }, + "byoipApiVersion": { + "description": "[Output Only] The version of BYOIP API.", + "enum": [ + "V1", + "V2" + ], + "enumDescriptions": [ + "This public delegated prefix usually takes 4 weeks to delete, and the BGP status cannot be changed. Announce and Withdraw APIs can not be used on this prefix.", + "This public delegated prefix takes minutes to delete. Announce and Withdraw APIs can be used on this prefix to change the BGP status." + ], + "type": "string" + }, + "creationTimestamp": { + "description": "[Output Only] Creation timestamp in RFC3339 text format.", + "type": "string" + }, + "description": { + "description": "An optional description of this resource. Provide this property when you create the resource.", + "type": "string" + }, + "fingerprint": { + "description": "Fingerprint of this resource. A hash of the contents stored in this object. This field is used in optimistic locking. This field will be ignored when inserting a new PublicDelegatedPrefix. An up-to-date fingerprint must be provided in order to update the PublicDelegatedPrefix, otherwise the request will fail with error 412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve a PublicDelegatedPrefix.", + "format": "byte", + "type": "string" + }, + "id": { + "description": "[Output Only] The unique identifier for the resource type. The server generates this identifier.", + "format": "uint64", + "type": "string" + }, + "ipCidrRange": { + "description": "The IP address range, in CIDR format, represented by this public delegated prefix.", + "type": "string" + }, + "isLiveMigration": { + "description": "If true, the prefix will be live migrated.", + "type": "boolean" + }, + "kind": { + "default": "compute#publicDelegatedPrefix", + "description": "[Output Only] Type of the resource. Always compute#publicDelegatedPrefix for public delegated prefixes.", + "type": "string" + }, + "mode": { + "description": "The public delegated prefix mode for IPv6 only.", + "enum": [ + "DELEGATION", + "EXTERNAL_IPV6_FORWARDING_RULE_CREATION" + ], + "enumDescriptions": [ + "The public delegated prefix is used for further sub-delegation only. Such prefixes cannot set allocatablePrefixLength.", + "The public delegated prefix is used for creating forwarding rules only. Such prefixes cannot set publicDelegatedSubPrefixes." + ], + "type": "string" + }, + "name": { + "annotations": { + "required": [ + "compute.publicDelegatedPrefixes.insert" + ] + }, + "description": "Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash.", + "pattern": "[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?", + "type": "string" + }, + "parentPrefix": { + "description": "The URL of parent prefix. Either PublicAdvertisedPrefix or PublicDelegatedPrefix.", + "type": "string" + }, + "publicDelegatedSubPrefixs": { + "description": "The list of sub public delegated prefixes that exist for this public delegated prefix.", + "items": { + "$ref": "PublicDelegatedPrefixPublicDelegatedSubPrefix" + }, + "type": "array" + }, + "region": { + "description": "[Output Only] URL of the region where the public delegated prefix resides. This field applies only to the region resource. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body.", + "type": "string" + }, + "selfLink": { + "description": "[Output Only] Server-defined URL for the resource.", + "type": "string" + }, + "status": { + "description": "[Output Only] The status of the public delegated prefix, which can be one of following values: - `INITIALIZING` The public delegated prefix is being initialized and addresses cannot be created yet. - `READY_TO_ANNOUNCE` The public delegated prefix is a live migration prefix and is active. - `ANNOUNCED` The public delegated prefix is active. - `DELETING` The public delegated prefix is being deprovsioned. ", + "enum": [ + "ANNOUNCED", + "ANNOUNCED_TO_GOOGLE", + "ANNOUNCED_TO_INTERNET", + "DELETING", + "INITIALIZING", + "READY_TO_ANNOUNCE" + ], + "enumDescriptions": [ + "The public delegated prefix is active.", + "The prefix is announced within Google network.", + "The prefix is announced to Internet and within Google.", + "The public delegated prefix is being deprovsioned.", + "The public delegated prefix is being initialized and addresses cannot be created yet.", + "The public delegated prefix is currently withdrawn but ready to be announced." + ], + "type": "string" + } + }, + "type": "object" + }, + "PublicDelegatedPrefixAggregatedList": { + "id": "PublicDelegatedPrefixAggregatedList", + "properties": { + "id": { + "description": "[Output Only] Unique identifier for the resource; defined by the server.", + "type": "string" + }, + "items": { + "additionalProperties": { + "$ref": "PublicDelegatedPrefixesScopedList", + "description": "[Output Only] Name of the scope containing this set of PublicDelegatedPrefixes." + }, + "description": "A list of PublicDelegatedPrefixesScopedList resources.", + "type": "object" + }, + "kind": { + "default": "compute#publicDelegatedPrefixAggregatedList", + "description": "[Output Only] Type of the resource. Always compute#publicDelegatedPrefixAggregatedList for aggregated lists of public delegated prefixes.", + "type": "string" + }, + "nextPageToken": { + "description": "[Output Only] This token allows you to get the next page of results for list requests. If the number of results is larger than maxResults, use the nextPageToken as a value for the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results.", + "type": "string" + }, + "selfLink": { + "description": "[Output Only] Server-defined URL for this resource.", + "type": "string" + }, + "unreachables": { + "description": "[Output Only] Unreachable resources.", + "items": { + "type": "string" + }, + "type": "array" + }, + "warning": { + "description": "[Output Only] Informational warning message.", + "properties": { + "code": { + "description": "[Output Only] A warning code, if applicable. For example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no results in the response.", + "enum": [ + "CLEANUP_FAILED", + "DEPRECATED_RESOURCE_USED", + "DEPRECATED_TYPE_USED", + "DISK_SIZE_LARGER_THAN_IMAGE_SIZE", + "EXPERIMENTAL_TYPE_USED", + "EXTERNAL_API_WARNING", + "FIELD_VALUE_OVERRIDEN", + "INJECTED_KERNELS_DEPRECATED", + "INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB", + "LARGE_DEPLOYMENT_WARNING", + "LIST_OVERHEAD_QUOTA_EXCEED", + "MISSING_TYPE_DEPENDENCY", + "NEXT_HOP_ADDRESS_NOT_ASSIGNED", + "NEXT_HOP_CANNOT_IP_FORWARD", + "NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE", + "NEXT_HOP_INSTANCE_NOT_FOUND", + "NEXT_HOP_INSTANCE_NOT_ON_NETWORK", + "NEXT_HOP_NOT_RUNNING", + "NOT_CRITICAL_ERROR", + "NO_RESULTS_ON_PAGE", + "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -64887,307 +66549,6 @@ false, false, false, - false - ], - "enumDescriptions": [ - "Warning about failed cleanup of transient changes made by a failed operation.", - "A link to a deprecated resource was created.", - "When deploying and at least one of the resources has a type marked as deprecated", - "The user created a boot disk that is larger than image size.", - "When deploying and at least one of the resources has a type marked as experimental", - "Warning that is present in an external api call", - "Warning that value of a field has been overridden. Deprecated unused field.", - "The operation involved use of an injected kernel, which is deprecated.", - "A WEIGHTED_MAGLEV backend service is associated with a health check that is not of type HTTP/HTTPS/HTTP2.", - "When deploying a deployment with a exceedingly large number of resources", - "Resource can't be retrieved due to list overhead quota exceed which captures the amount of resources filtered out by user-defined list filter.", - "A resource depends on a missing type", - "The route's nextHopIp address is not assigned to an instance on the network.", - "The route's next hop instance cannot ip forward.", - "The route's nextHopInstance URL refers to an instance that does not have an ipv6 interface on the same network as the route.", - "The route's nextHopInstance URL refers to an instance that does not exist.", - "The route's nextHopInstance URL refers to an instance that is not on the same network as the route.", - "The route's next hop instance does not have a status of RUNNING.", - "Error which is not critical. We decided to continue the process despite the mentioned error.", - "No results are present on a particular list page.", - "Success is reported, but some results may be missing due to errors", - "The user attempted to use a resource that requires a TOS they have not accepted.", - "Warning that a resource is in use.", - "One or more of the resources set to auto-delete could not be deleted because they were in use.", - "When a resource schema validation is ignored.", - "Instance template used in instance group manager is valid as such, but its application does not make a lot of sense, because it allows only single instance in instance group.", - "When undeclared properties in the schema are present", - "A given scope cannot be reached." - ], - "type": "string" - }, - "data": { - "description": "[Output Only] Metadata about this warning in key: value format. For example: \"data\": [ { \"key\": \"scope\", \"value\": \"zones/us-east1-d\" } ", - "items": { - "properties": { - "key": { - "description": "[Output Only] A key that provides more detail on the warning being returned. For example, for warnings where there are no results in a list request for a particular zone, this key might be scope and the key value might be the zone name. Other examples might be a key indicating a deprecated resource and a suggested replacement, or a warning about invalid network settings (for example, if an instance attempts to perform IP forwarding but is not enabled for IP forwarding).", - "type": "string" - }, - "value": { - "description": "[Output Only] A warning data value corresponding to the key.", - "type": "string" - } - }, - "type": "object" - }, - "type": "array" - }, - "message": { - "description": "[Output Only] A human-readable description of the warning code.", - "type": "string" - } - }, - "type": "object" - } - }, - "type": "object" - }, - "PublicAdvertisedPrefixPublicDelegatedPrefix": { - "description": "Represents a CIDR range which can be used to assign addresses.", - "id": "PublicAdvertisedPrefixPublicDelegatedPrefix", - "properties": { - "ipRange": { - "description": "The IP address range of the public delegated prefix", - "type": "string" - }, - "name": { - "description": "The name of the public delegated prefix", - "type": "string" - }, - "project": { - "description": "The project number of the public delegated prefix", - "type": "string" - }, - "region": { - "description": "The region of the public delegated prefix if it is regional. If absent, the prefix is global.", - "type": "string" - }, - "status": { - "description": "The status of the public delegated prefix. Possible values are: INITIALIZING: The public delegated prefix is being initialized and addresses cannot be created yet. ANNOUNCED: The public delegated prefix is active.", - "type": "string" - } - }, - "type": "object" - }, - "PublicDelegatedPrefix": { - "description": "A PublicDelegatedPrefix resource represents an IP block within a PublicAdvertisedPrefix that is configured within a single cloud scope (global or region). IPs in the block can be allocated to resources within that scope. Public delegated prefixes may be further broken up into smaller IP blocks in the same scope as the parent block.", - "id": "PublicDelegatedPrefix", - "properties": { - "allocatablePrefixLength": { - "description": "The allocatable prefix length supported by this public delegated prefix. This field is optional and cannot be set for prefixes in DELEGATION mode. It cannot be set for IPv4 prefixes either, and it always defaults to 32.", - "format": "int32", - "type": "integer" - }, - "byoipApiVersion": { - "description": "[Output Only] The version of BYOIP API.", - "enum": [ - "V1", - "V2" - ], - "enumDescriptions": [ - "This public delegated prefix usually takes 4 weeks to delete, and the BGP status cannot be changed. Announce and Withdraw APIs can not be used on this prefix.", - "This public delegated prefix takes minutes to delete. Announce and Withdraw APIs can be used on this prefix to change the BGP status." - ], - "type": "string" - }, - "creationTimestamp": { - "description": "[Output Only] Creation timestamp in RFC3339 text format.", - "type": "string" - }, - "description": { - "description": "An optional description of this resource. Provide this property when you create the resource.", - "type": "string" - }, - "fingerprint": { - "description": "Fingerprint of this resource. A hash of the contents stored in this object. This field is used in optimistic locking. This field will be ignored when inserting a new PublicDelegatedPrefix. An up-to-date fingerprint must be provided in order to update the PublicDelegatedPrefix, otherwise the request will fail with error 412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve a PublicDelegatedPrefix.", - "format": "byte", - "type": "string" - }, - "id": { - "description": "[Output Only] The unique identifier for the resource type. The server generates this identifier.", - "format": "uint64", - "type": "string" - }, - "ipCidrRange": { - "description": "The IP address range, in CIDR format, represented by this public delegated prefix.", - "type": "string" - }, - "isLiveMigration": { - "description": "If true, the prefix will be live migrated.", - "type": "boolean" - }, - "kind": { - "default": "compute#publicDelegatedPrefix", - "description": "[Output Only] Type of the resource. Always compute#publicDelegatedPrefix for public delegated prefixes.", - "type": "string" - }, - "mode": { - "description": "The public delegated prefix mode for IPv6 only.", - "enum": [ - "DELEGATION", - "EXTERNAL_IPV6_FORWARDING_RULE_CREATION" - ], - "enumDescriptions": [ - "The public delegated prefix is used for further sub-delegation only. Such prefixes cannot set allocatablePrefixLength.", - "The public delegated prefix is used for creating forwarding rules only. Such prefixes cannot set publicDelegatedSubPrefixes." - ], - "type": "string" - }, - "name": { - "annotations": { - "required": [ - "compute.publicDelegatedPrefixes.insert" - ] - }, - "description": "Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash.", - "pattern": "[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?", - "type": "string" - }, - "parentPrefix": { - "description": "The URL of parent prefix. Either PublicAdvertisedPrefix or PublicDelegatedPrefix.", - "type": "string" - }, - "publicDelegatedSubPrefixs": { - "description": "The list of sub public delegated prefixes that exist for this public delegated prefix.", - "items": { - "$ref": "PublicDelegatedPrefixPublicDelegatedSubPrefix" - }, - "type": "array" - }, - "region": { - "description": "[Output Only] URL of the region where the public delegated prefix resides. This field applies only to the region resource. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body.", - "type": "string" - }, - "selfLink": { - "description": "[Output Only] Server-defined URL for the resource.", - "type": "string" - }, - "status": { - "description": "[Output Only] The status of the public delegated prefix, which can be one of following values: - `INITIALIZING` The public delegated prefix is being initialized and addresses cannot be created yet. - `READY_TO_ANNOUNCE` The public delegated prefix is a live migration prefix and is active. - `ANNOUNCED` The public delegated prefix is active. - `DELETING` The public delegated prefix is being deprovsioned. ", - "enum": [ - "ANNOUNCED", - "ANNOUNCED_TO_GOOGLE", - "ANNOUNCED_TO_INTERNET", - "DELETING", - "INITIALIZING", - "READY_TO_ANNOUNCE" - ], - "enumDescriptions": [ - "The public delegated prefix is active.", - "The prefix is announced within Google network.", - "The prefix is announced to Internet and within Google.", - "The public delegated prefix is being deprovsioned.", - "The public delegated prefix is being initialized and addresses cannot be created yet.", - "The public delegated prefix is currently withdrawn but ready to be announced." - ], - "type": "string" - } - }, - "type": "object" - }, - "PublicDelegatedPrefixAggregatedList": { - "id": "PublicDelegatedPrefixAggregatedList", - "properties": { - "id": { - "description": "[Output Only] Unique identifier for the resource; defined by the server.", - "type": "string" - }, - "items": { - "additionalProperties": { - "$ref": "PublicDelegatedPrefixesScopedList", - "description": "[Output Only] Name of the scope containing this set of PublicDelegatedPrefixes." - }, - "description": "A list of PublicDelegatedPrefixesScopedList resources.", - "type": "object" - }, - "kind": { - "default": "compute#publicDelegatedPrefixAggregatedList", - "description": "[Output Only] Type of the resource. Always compute#publicDelegatedPrefixAggregatedList for aggregated lists of public delegated prefixes.", - "type": "string" - }, - "nextPageToken": { - "description": "[Output Only] This token allows you to get the next page of results for list requests. If the number of results is larger than maxResults, use the nextPageToken as a value for the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results.", - "type": "string" - }, - "selfLink": { - "description": "[Output Only] Server-defined URL for this resource.", - "type": "string" - }, - "unreachables": { - "description": "[Output Only] Unreachable resources.", - "items": { - "type": "string" - }, - "type": "array" - }, - "warning": { - "description": "[Output Only] Informational warning message.", - "properties": { - "code": { - "description": "[Output Only] A warning code, if applicable. For example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no results in the response.", - "enum": [ - "CLEANUP_FAILED", - "DEPRECATED_RESOURCE_USED", - "DEPRECATED_TYPE_USED", - "DISK_SIZE_LARGER_THAN_IMAGE_SIZE", - "EXPERIMENTAL_TYPE_USED", - "EXTERNAL_API_WARNING", - "FIELD_VALUE_OVERRIDEN", - "INJECTED_KERNELS_DEPRECATED", - "INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB", - "LARGE_DEPLOYMENT_WARNING", - "LIST_OVERHEAD_QUOTA_EXCEED", - "MISSING_TYPE_DEPENDENCY", - "NEXT_HOP_ADDRESS_NOT_ASSIGNED", - "NEXT_HOP_CANNOT_IP_FORWARD", - "NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE", - "NEXT_HOP_INSTANCE_NOT_FOUND", - "NEXT_HOP_INSTANCE_NOT_ON_NETWORK", - "NEXT_HOP_NOT_RUNNING", - "NOT_CRITICAL_ERROR", - "NO_RESULTS_ON_PAGE", - "PARTIAL_SUCCESS", - "REQUIRED_TOS_AGREEMENT", - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", - "RESOURCE_NOT_DELETED", - "SCHEMA_VALIDATION_IGNORED", - "SINGLE_INSTANCE_PROPERTY_TEMPLATE", - "UNDECLARED_PROPERTIES", - "UNREACHABLE" - ], - "enumDeprecated": [ - false, - false, - false, - false, - false, - false, - true, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, false, false ], @@ -65213,6 +66574,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -65304,6 +66666,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -65340,6 +66703,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -65364,6 +66728,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -65498,6 +66863,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -65534,6 +66900,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -65558,6 +66925,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -66077,6 +67445,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -66113,6 +67482,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -66137,6 +67507,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -66277,6 +67648,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -66313,6 +67685,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -66337,6 +67710,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -66428,6 +67802,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -66464,6 +67839,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -66488,6 +67864,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -66627,6 +68004,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -66663,6 +68041,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -66687,6 +68066,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -66793,6 +68173,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -66829,6 +68210,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -66853,6 +68235,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -67070,6 +68453,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -67106,6 +68490,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -67130,6 +68515,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -67197,6 +68583,19 @@ }, "type": "object" }, + "RegionInstanceGroupManagersResumeInstancesRequest": { + "id": "RegionInstanceGroupManagersResumeInstancesRequest", + "properties": { + "instances": { + "description": "The URLs of one or more instances to resume. This can be a full URL or a partial URL, such as zones/[ZONE]/instances/[INSTANCE_NAME].", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, "RegionInstanceGroupManagersSetTargetPoolsRequest": { "id": "RegionInstanceGroupManagersSetTargetPoolsRequest", "properties": { @@ -67225,6 +68624,53 @@ }, "type": "object" }, + "RegionInstanceGroupManagersStartInstancesRequest": { + "id": "RegionInstanceGroupManagersStartInstancesRequest", + "properties": { + "instances": { + "description": "The URLs of one or more instances to start. This can be a full URL or a partial URL, such as zones/[ZONE]/instances/[INSTANCE_NAME].", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "RegionInstanceGroupManagersStopInstancesRequest": { + "id": "RegionInstanceGroupManagersStopInstancesRequest", + "properties": { + "forceStop": { + "description": "If this flag is set to true, the Instance Group Manager will proceed to stop the instances, skipping initialization on them.", + "type": "boolean" + }, + "instances": { + "description": "The URLs of one or more instances to stop. This can be a full URL or a partial URL, such as zones/[ZONE]/instances/[INSTANCE_NAME].", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "RegionInstanceGroupManagersSuspendInstancesRequest": { + "id": "RegionInstanceGroupManagersSuspendInstancesRequest", + "properties": { + "forceSuspend": { + "description": "If this flag is set to true, the Instance Group Manager will proceed to suspend the instances, skipping initialization on them.", + "type": "boolean" + }, + "instances": { + "description": "The URLs of one or more instances to suspend. This can be a full URL or a partial URL, such as zones/[ZONE]/instances/[INSTANCE_NAME].", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, "RegionInstanceGroupsListInstances": { "id": "RegionInstanceGroupsListInstances", "properties": { @@ -67279,6 +68725,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -67315,6 +68762,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -67339,6 +68787,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -67472,6 +68921,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -67508,6 +68958,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -67532,6 +68983,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -67919,6 +69371,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -67955,6 +69408,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -67979,6 +69433,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -68070,6 +69525,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -68106,6 +69562,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -68130,6 +69587,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -68215,6 +69673,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -68251,6 +69710,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -68275,6 +69735,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -68393,6 +69854,252 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", + "REQUIRED_TOS_AGREEMENT", + "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", + "RESOURCE_NOT_DELETED", + "SCHEMA_VALIDATION_IGNORED", + "SINGLE_INSTANCE_PROPERTY_TEMPLATE", + "UNDECLARED_PROPERTIES", + "UNREACHABLE" + ], + "enumDeprecated": [ + false, + false, + false, + false, + false, + false, + true, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false + ], + "enumDescriptions": [ + "Warning about failed cleanup of transient changes made by a failed operation.", + "A link to a deprecated resource was created.", + "When deploying and at least one of the resources has a type marked as deprecated", + "The user created a boot disk that is larger than image size.", + "When deploying and at least one of the resources has a type marked as experimental", + "Warning that is present in an external api call", + "Warning that value of a field has been overridden. Deprecated unused field.", + "The operation involved use of an injected kernel, which is deprecated.", + "A WEIGHTED_MAGLEV backend service is associated with a health check that is not of type HTTP/HTTPS/HTTP2.", + "When deploying a deployment with a exceedingly large number of resources", + "Resource can't be retrieved due to list overhead quota exceed which captures the amount of resources filtered out by user-defined list filter.", + "A resource depends on a missing type", + "The route's nextHopIp address is not assigned to an instance on the network.", + "The route's next hop instance cannot ip forward.", + "The route's nextHopInstance URL refers to an instance that does not have an ipv6 interface on the same network as the route.", + "The route's nextHopInstance URL refers to an instance that does not exist.", + "The route's nextHopInstance URL refers to an instance that is not on the same network as the route.", + "The route's next hop instance does not have a status of RUNNING.", + "Error which is not critical. We decided to continue the process despite the mentioned error.", + "No results are present on a particular list page.", + "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", + "The user attempted to use a resource that requires a TOS they have not accepted.", + "Warning that a resource is in use.", + "One or more of the resources set to auto-delete could not be deleted because they were in use.", + "When a resource schema validation is ignored.", + "Instance template used in instance group manager is valid as such, but its application does not make a lot of sense, because it allows only single instance in instance group.", + "When undeclared properties in the schema are present", + "A given scope cannot be reached." + ], + "type": "string" + }, + "data": { + "description": "[Output Only] Metadata about this warning in key: value format. For example: \"data\": [ { \"key\": \"scope\", \"value\": \"zones/us-east1-d\" } ", + "items": { + "properties": { + "key": { + "description": "[Output Only] A key that provides more detail on the warning being returned. For example, for warnings where there are no results in a list request for a particular zone, this key might be scope and the key value might be the zone name. Other examples might be a key indicating a deprecated resource and a suggested replacement, or a warning about invalid network settings (for example, if an instance attempts to perform IP forwarding but is not enabled for IP forwarding).", + "type": "string" + }, + "value": { + "description": "[Output Only] A warning data value corresponding to the key.", + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + }, + "message": { + "description": "[Output Only] A human-readable description of the warning code.", + "type": "string" + } + }, + "type": "object" + } + }, + "type": "object" + }, + "ResourcePolicy": { + "description": "Represents a Resource Policy resource. You can use resource policies to schedule actions for some Compute Engine resources. For example, you can use them to schedule persistent disk snapshots.", + "id": "ResourcePolicy", + "properties": { + "creationTimestamp": { + "description": "[Output Only] Creation timestamp in RFC3339 text format.", + "type": "string" + }, + "description": { + "type": "string" + }, + "diskConsistencyGroupPolicy": { + "$ref": "ResourcePolicyDiskConsistencyGroupPolicy", + "description": "Resource policy for disk consistency groups." + }, + "groupPlacementPolicy": { + "$ref": "ResourcePolicyGroupPlacementPolicy", + "description": "Resource policy for instances for placement configuration." + }, + "id": { + "description": "[Output Only] The unique identifier for the resource. This identifier is defined by the server.", + "format": "uint64", + "type": "string" + }, + "instanceSchedulePolicy": { + "$ref": "ResourcePolicyInstanceSchedulePolicy", + "description": "Resource policy for scheduling instance operations." + }, + "kind": { + "default": "compute#resourcePolicy", + "description": "[Output Only] Type of the resource. Always compute#resource_policies for resource policies.", + "type": "string" + }, + "name": { + "annotations": { + "required": [ + "compute.instances.insert" + ] + }, + "description": "The name of the resource, provided by the client when initially creating the resource. The resource name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash.", + "pattern": "[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?", + "type": "string" + }, + "region": { + "type": "string" + }, + "resourceStatus": { + "$ref": "ResourcePolicyResourceStatus", + "description": "[Output Only] The system status of the resource policy." + }, + "selfLink": { + "description": "[Output Only] Server-defined fully-qualified URL for this resource.", + "type": "string" + }, + "snapshotSchedulePolicy": { + "$ref": "ResourcePolicySnapshotSchedulePolicy", + "description": "Resource policy for persistent disks for creating snapshots." + }, + "status": { + "description": "[Output Only] The status of resource policy creation.", + "enum": [ + "CREATING", + "DELETING", + "EXPIRED", + "INVALID", + "READY" + ], + "enumDescriptions": [ + "Resource policy is being created.", + "Resource policy is being deleted.", + "Resource policy is expired and will not run again.", + "", + "Resource policy is ready to be used." + ], + "type": "string" + } + }, + "type": "object" + }, + "ResourcePolicyAggregatedList": { + "description": "Contains a list of resourcePolicies.", + "id": "ResourcePolicyAggregatedList", + "properties": { + "etag": { + "type": "string" + }, + "id": { + "description": "[Output Only] Unique identifier for the resource; defined by the server.", + "type": "string" + }, + "items": { + "additionalProperties": { + "$ref": "ResourcePoliciesScopedList", + "description": "Name of the scope containing this set of resourcePolicies." + }, + "description": "A list of ResourcePolicy resources.", + "type": "object" + }, + "kind": { + "default": "compute#resourcePolicyAggregatedList", + "description": "Type of resource.", + "type": "string" + }, + "nextPageToken": { + "description": "[Output Only] This token allows you to get the next page of results for list requests. If the number of results is larger than maxResults, use the nextPageToken as a value for the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results.", + "type": "string" + }, + "selfLink": { + "description": "[Output Only] Server-defined URL for this resource.", + "type": "string" + }, + "unreachables": { + "description": "[Output Only] Unreachable resources.", + "items": { + "type": "string" + }, + "type": "array" + }, + "warning": { + "description": "[Output Only] Informational warning message.", + "properties": { + "code": { + "description": "[Output Only] A warning code, if applicable. For example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no results in the response.", + "enum": [ + "CLEANUP_FAILED", + "DEPRECATED_RESOURCE_USED", + "DEPRECATED_TYPE_USED", + "DISK_SIZE_LARGER_THAN_IMAGE_SIZE", + "EXPERIMENTAL_TYPE_USED", + "EXTERNAL_API_WARNING", + "FIELD_VALUE_OVERRIDEN", + "INJECTED_KERNELS_DEPRECATED", + "INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB", + "LARGE_DEPLOYMENT_WARNING", + "LIST_OVERHEAD_QUOTA_EXCEED", + "MISSING_TYPE_DEPENDENCY", + "NEXT_HOP_ADDRESS_NOT_ASSIGNED", + "NEXT_HOP_CANNOT_IP_FORWARD", + "NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE", + "NEXT_HOP_INSTANCE_NOT_FOUND", + "NEXT_HOP_INSTANCE_NOT_ON_NETWORK", + "NEXT_HOP_NOT_RUNNING", + "NOT_CRITICAL_ERROR", + "NO_RESULTS_ON_PAGE", + "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -68429,247 +70136,6 @@ false, false, false, - false - ], - "enumDescriptions": [ - "Warning about failed cleanup of transient changes made by a failed operation.", - "A link to a deprecated resource was created.", - "When deploying and at least one of the resources has a type marked as deprecated", - "The user created a boot disk that is larger than image size.", - "When deploying and at least one of the resources has a type marked as experimental", - "Warning that is present in an external api call", - "Warning that value of a field has been overridden. Deprecated unused field.", - "The operation involved use of an injected kernel, which is deprecated.", - "A WEIGHTED_MAGLEV backend service is associated with a health check that is not of type HTTP/HTTPS/HTTP2.", - "When deploying a deployment with a exceedingly large number of resources", - "Resource can't be retrieved due to list overhead quota exceed which captures the amount of resources filtered out by user-defined list filter.", - "A resource depends on a missing type", - "The route's nextHopIp address is not assigned to an instance on the network.", - "The route's next hop instance cannot ip forward.", - "The route's nextHopInstance URL refers to an instance that does not have an ipv6 interface on the same network as the route.", - "The route's nextHopInstance URL refers to an instance that does not exist.", - "The route's nextHopInstance URL refers to an instance that is not on the same network as the route.", - "The route's next hop instance does not have a status of RUNNING.", - "Error which is not critical. We decided to continue the process despite the mentioned error.", - "No results are present on a particular list page.", - "Success is reported, but some results may be missing due to errors", - "The user attempted to use a resource that requires a TOS they have not accepted.", - "Warning that a resource is in use.", - "One or more of the resources set to auto-delete could not be deleted because they were in use.", - "When a resource schema validation is ignored.", - "Instance template used in instance group manager is valid as such, but its application does not make a lot of sense, because it allows only single instance in instance group.", - "When undeclared properties in the schema are present", - "A given scope cannot be reached." - ], - "type": "string" - }, - "data": { - "description": "[Output Only] Metadata about this warning in key: value format. For example: \"data\": [ { \"key\": \"scope\", \"value\": \"zones/us-east1-d\" } ", - "items": { - "properties": { - "key": { - "description": "[Output Only] A key that provides more detail on the warning being returned. For example, for warnings where there are no results in a list request for a particular zone, this key might be scope and the key value might be the zone name. Other examples might be a key indicating a deprecated resource and a suggested replacement, or a warning about invalid network settings (for example, if an instance attempts to perform IP forwarding but is not enabled for IP forwarding).", - "type": "string" - }, - "value": { - "description": "[Output Only] A warning data value corresponding to the key.", - "type": "string" - } - }, - "type": "object" - }, - "type": "array" - }, - "message": { - "description": "[Output Only] A human-readable description of the warning code.", - "type": "string" - } - }, - "type": "object" - } - }, - "type": "object" - }, - "ResourcePolicy": { - "description": "Represents a Resource Policy resource. You can use resource policies to schedule actions for some Compute Engine resources. For example, you can use them to schedule persistent disk snapshots.", - "id": "ResourcePolicy", - "properties": { - "creationTimestamp": { - "description": "[Output Only] Creation timestamp in RFC3339 text format.", - "type": "string" - }, - "description": { - "type": "string" - }, - "diskConsistencyGroupPolicy": { - "$ref": "ResourcePolicyDiskConsistencyGroupPolicy", - "description": "Resource policy for disk consistency groups." - }, - "groupPlacementPolicy": { - "$ref": "ResourcePolicyGroupPlacementPolicy", - "description": "Resource policy for instances for placement configuration." - }, - "id": { - "description": "[Output Only] The unique identifier for the resource. This identifier is defined by the server.", - "format": "uint64", - "type": "string" - }, - "instanceSchedulePolicy": { - "$ref": "ResourcePolicyInstanceSchedulePolicy", - "description": "Resource policy for scheduling instance operations." - }, - "kind": { - "default": "compute#resourcePolicy", - "description": "[Output Only] Type of the resource. Always compute#resource_policies for resource policies.", - "type": "string" - }, - "name": { - "annotations": { - "required": [ - "compute.instances.insert" - ] - }, - "description": "The name of the resource, provided by the client when initially creating the resource. The resource name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash.", - "pattern": "[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?", - "type": "string" - }, - "region": { - "type": "string" - }, - "resourceStatus": { - "$ref": "ResourcePolicyResourceStatus", - "description": "[Output Only] The system status of the resource policy." - }, - "selfLink": { - "description": "[Output Only] Server-defined fully-qualified URL for this resource.", - "type": "string" - }, - "snapshotSchedulePolicy": { - "$ref": "ResourcePolicySnapshotSchedulePolicy", - "description": "Resource policy for persistent disks for creating snapshots." - }, - "status": { - "description": "[Output Only] The status of resource policy creation.", - "enum": [ - "CREATING", - "DELETING", - "EXPIRED", - "INVALID", - "READY" - ], - "enumDescriptions": [ - "Resource policy is being created.", - "Resource policy is being deleted.", - "Resource policy is expired and will not run again.", - "", - "Resource policy is ready to be used." - ], - "type": "string" - } - }, - "type": "object" - }, - "ResourcePolicyAggregatedList": { - "description": "Contains a list of resourcePolicies.", - "id": "ResourcePolicyAggregatedList", - "properties": { - "etag": { - "type": "string" - }, - "id": { - "description": "[Output Only] Unique identifier for the resource; defined by the server.", - "type": "string" - }, - "items": { - "additionalProperties": { - "$ref": "ResourcePoliciesScopedList", - "description": "Name of the scope containing this set of resourcePolicies." - }, - "description": "A list of ResourcePolicy resources.", - "type": "object" - }, - "kind": { - "default": "compute#resourcePolicyAggregatedList", - "description": "Type of resource.", - "type": "string" - }, - "nextPageToken": { - "description": "[Output Only] This token allows you to get the next page of results for list requests. If the number of results is larger than maxResults, use the nextPageToken as a value for the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results.", - "type": "string" - }, - "selfLink": { - "description": "[Output Only] Server-defined URL for this resource.", - "type": "string" - }, - "unreachables": { - "description": "[Output Only] Unreachable resources.", - "items": { - "type": "string" - }, - "type": "array" - }, - "warning": { - "description": "[Output Only] Informational warning message.", - "properties": { - "code": { - "description": "[Output Only] A warning code, if applicable. For example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no results in the response.", - "enum": [ - "CLEANUP_FAILED", - "DEPRECATED_RESOURCE_USED", - "DEPRECATED_TYPE_USED", - "DISK_SIZE_LARGER_THAN_IMAGE_SIZE", - "EXPERIMENTAL_TYPE_USED", - "EXTERNAL_API_WARNING", - "FIELD_VALUE_OVERRIDEN", - "INJECTED_KERNELS_DEPRECATED", - "INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB", - "LARGE_DEPLOYMENT_WARNING", - "LIST_OVERHEAD_QUOTA_EXCEED", - "MISSING_TYPE_DEPENDENCY", - "NEXT_HOP_ADDRESS_NOT_ASSIGNED", - "NEXT_HOP_CANNOT_IP_FORWARD", - "NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE", - "NEXT_HOP_INSTANCE_NOT_FOUND", - "NEXT_HOP_INSTANCE_NOT_ON_NETWORK", - "NEXT_HOP_NOT_RUNNING", - "NOT_CRITICAL_ERROR", - "NO_RESULTS_ON_PAGE", - "PARTIAL_SUCCESS", - "REQUIRED_TOS_AGREEMENT", - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", - "RESOURCE_NOT_DELETED", - "SCHEMA_VALIDATION_IGNORED", - "SINGLE_INSTANCE_PROPERTY_TEMPLATE", - "UNDECLARED_PROPERTIES", - "UNREACHABLE" - ], - "enumDeprecated": [ - false, - false, - false, - false, - false, - false, - true, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, false, false ], @@ -68695,6 +70161,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -68902,6 +70369,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -68938,6 +70406,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -68962,6 +70431,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -69171,7 +70641,7 @@ "id": "ResourceStatus", "properties": { "physicalHost": { - "description": "[Output Only] An opaque ID of the host on which the VM is running.", + "description": "[Output Only] The precise location of your instance within the zone's data center, including the block, sub-block, and host. The field is formatted as follows: blockId/subBlockId/hostId.", "type": "string" }, "scheduling": { @@ -69267,14 +70737,38 @@ "description": "The URL to an instance that should handle matching packets. You can specify this as a full or partial URL. For example: https://www.googleapis.com/compute/v1/projects/project/zones/zone/instances/", "type": "string" }, + "nextHopInterRegionCost": { + "description": "[Output only] Internal fixed region-to-region cost that Google Cloud calculates based on factors such as network performance, distance, and available bandwidth between regions.", + "format": "uint32", + "type": "integer" + }, "nextHopIp": { "description": "The network IP address of an instance that should handle matching packets. Both IPv6 address and IPv4 addresses are supported. Must specify an IPv4 address in dot-decimal notation (e.g. 192.0.2.99) or an IPv6 address in RFC 4291 format (e.g. 2001:db8::2d9:51:0:0 or 2001:db8:0:0:2d9:51:0:0). IPv6 addresses will be displayed using RFC 5952 compressed format (e.g. 2001:db8::2d9:51:0:0). Should never be an IPv4-mapped IPv6 address.", "type": "string" }, + "nextHopMed": { + "description": "[Output Only] Multi-Exit Discriminator, a BGP route metric that indicates the desirability of a particular route in a network.", + "format": "uint32", + "type": "integer" + }, "nextHopNetwork": { "description": "The URL of the local network if it should handle matching packets.", "type": "string" }, + "nextHopOrigin": { + "description": "[Output Only] Indicates the origin of the route. Can be IGP (Interior Gateway Protocol), EGP (Exterior Gateway Protocol), or INCOMPLETE.", + "enum": [ + "EGP", + "IGP", + "INCOMPLETE" + ], + "enumDescriptions": [ + "", + "", + "" + ], + "type": "string" + }, "nextHopPeering": { "description": "[Output Only] The network peering name that should handle matching packets, which should conform to RFC1035.", "type": "string" @@ -69369,6 +70863,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -69405,6 +70900,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -69429,6 +70925,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -69553,6 +71050,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -69589,6 +71087,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -69613,6 +71112,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -69817,6 +71317,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -69853,6 +71354,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -69877,6 +71379,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -70292,6 +71795,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -70328,6 +71832,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -70352,6 +71857,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -70995,6 +72501,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -71031,6 +72538,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -71055,6 +72563,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -71092,72 +72601,6 @@ }, "type": "object" }, - "Rule": { - "description": "This is deprecated and has no effect. Do not use.", - "id": "Rule", - "properties": { - "action": { - "description": "This is deprecated and has no effect. Do not use.", - "enum": [ - "ALLOW", - "ALLOW_WITH_LOG", - "DENY", - "DENY_WITH_LOG", - "LOG", - "NO_ACTION" - ], - "enumDescriptions": [ - "This is deprecated and has no effect. Do not use.", - "This is deprecated and has no effect. Do not use.", - "This is deprecated and has no effect. Do not use.", - "This is deprecated and has no effect. Do not use.", - "This is deprecated and has no effect. Do not use.", - "This is deprecated and has no effect. Do not use." - ], - "type": "string" - }, - "conditions": { - "description": "This is deprecated and has no effect. Do not use.", - "items": { - "$ref": "Condition" - }, - "type": "array" - }, - "description": { - "description": "This is deprecated and has no effect. Do not use.", - "type": "string" - }, - "ins": { - "description": "This is deprecated and has no effect. Do not use.", - "items": { - "type": "string" - }, - "type": "array" - }, - "logConfigs": { - "description": "This is deprecated and has no effect. Do not use.", - "items": { - "$ref": "LogConfig" - }, - "type": "array" - }, - "notIns": { - "description": "This is deprecated and has no effect. Do not use.", - "items": { - "type": "string" - }, - "type": "array" - }, - "permissions": { - "description": "This is deprecated and has no effect. Do not use.", - "items": { - "type": "string" - }, - "type": "array" - } - }, - "type": "object" - }, "SSLHealthCheck": { "id": "SSLHealthCheck", "properties": { @@ -71610,6 +73053,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -71646,6 +73090,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -71670,6 +73115,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -71753,6 +73199,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -71789,6 +73236,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -71813,6 +73261,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -72191,6 +73640,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -72227,6 +73677,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -72251,6 +73702,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -72643,7 +74095,7 @@ "type": "string" }, "enforceOnKey": { - "description": "Determines the key to enforce the rate_limit_threshold on. Possible values are: - ALL: A single rate limit threshold is applied to all the requests matching this rule. This is the default value if \"enforceOnKey\" is not configured. - IP: The source IP address of the request is the key. Each IP has this limit enforced separately. - HTTP_HEADER: The value of the HTTP header whose name is configured under \"enforceOnKeyName\". The key value is truncated to the first 128 bytes of the header value. If no such header is present in the request, the key type defaults to ALL. - XFF_IP: The first IP address (i.e. the originating client IP address) specified in the list of IPs under X-Forwarded-For HTTP header. If no such header is present or the value is not a valid IP, the key defaults to the source IP address of the request i.e. key type IP. - HTTP_COOKIE: The value of the HTTP cookie whose name is configured under \"enforceOnKeyName\". The key value is truncated to the first 128 bytes of the cookie value. If no such cookie is present in the request, the key type defaults to ALL. - HTTP_PATH: The URL path of the HTTP request. The key value is truncated to the first 128 bytes. - SNI: Server name indication in the TLS session of the HTTPS request. The key value is truncated to the first 128 bytes. The key type defaults to ALL on a HTTP session. - REGION_CODE: The country/region from which the request originates. - TLS_JA3_FINGERPRINT: JA3 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. - USER_IP: The IP address of the originating client, which is resolved based on \"userIpRequestHeaders\" configured with the security policy. If there is no \"userIpRequestHeaders\" configuration or an IP address cannot be resolved from it, the key type defaults to IP. ", + "description": "Determines the key to enforce the rate_limit_threshold on. Possible values are: - ALL: A single rate limit threshold is applied to all the requests matching this rule. This is the default value if \"enforceOnKey\" is not configured. - IP: The source IP address of the request is the key. Each IP has this limit enforced separately. - HTTP_HEADER: The value of the HTTP header whose name is configured under \"enforceOnKeyName\". The key value is truncated to the first 128 bytes of the header value. If no such header is present in the request, the key type defaults to ALL. - XFF_IP: The first IP address (i.e. the originating client IP address) specified in the list of IPs under X-Forwarded-For HTTP header. If no such header is present or the value is not a valid IP, the key defaults to the source IP address of the request i.e. key type IP. - HTTP_COOKIE: The value of the HTTP cookie whose name is configured under \"enforceOnKeyName\". The key value is truncated to the first 128 bytes of the cookie value. If no such cookie is present in the request, the key type defaults to ALL. - HTTP_PATH: The URL path of the HTTP request. The key value is truncated to the first 128 bytes. - SNI: Server name indication in the TLS session of the HTTPS request. The key value is truncated to the first 128 bytes. The key type defaults to ALL on a HTTP session. - REGION_CODE: The country/region from which the request originates. - TLS_JA3_FINGERPRINT: JA3 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. - USER_IP: The IP address of the originating client, which is resolved based on \"userIpRequestHeaders\" configured with the security policy. If there is no \"userIpRequestHeaders\" configuration or an IP address cannot be resolved from it, the key type defaults to IP. - TLS_JA4_FINGERPRINT: JA4 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. ", "enum": [ "ALL", "HTTP_COOKIE", @@ -72653,6 +74105,7 @@ "REGION_CODE", "SNI", "TLS_JA3_FINGERPRINT", + "TLS_JA4_FINGERPRINT", "USER_IP", "XFF_IP" ], @@ -72666,6 +74119,7 @@ "", "", "", + "", "" ], "type": "string" @@ -72704,7 +74158,7 @@ "type": "string" }, "enforceOnKeyType": { - "description": "Determines the key to enforce the rate_limit_threshold on. Possible values are: - ALL: A single rate limit threshold is applied to all the requests matching this rule. This is the default value if \"enforceOnKeyConfigs\" is not configured. - IP: The source IP address of the request is the key. Each IP has this limit enforced separately. - HTTP_HEADER: The value of the HTTP header whose name is configured under \"enforceOnKeyName\". The key value is truncated to the first 128 bytes of the header value. If no such header is present in the request, the key type defaults to ALL. - XFF_IP: The first IP address (i.e. the originating client IP address) specified in the list of IPs under X-Forwarded-For HTTP header. If no such header is present or the value is not a valid IP, the key defaults to the source IP address of the request i.e. key type IP. - HTTP_COOKIE: The value of the HTTP cookie whose name is configured under \"enforceOnKeyName\". The key value is truncated to the first 128 bytes of the cookie value. If no such cookie is present in the request, the key type defaults to ALL. - HTTP_PATH: The URL path of the HTTP request. The key value is truncated to the first 128 bytes. - SNI: Server name indication in the TLS session of the HTTPS request. The key value is truncated to the first 128 bytes. The key type defaults to ALL on a HTTP session. - REGION_CODE: The country/region from which the request originates. - TLS_JA3_FINGERPRINT: JA3 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. - USER_IP: The IP address of the originating client, which is resolved based on \"userIpRequestHeaders\" configured with the security policy. If there is no \"userIpRequestHeaders\" configuration or an IP address cannot be resolved from it, the key type defaults to IP. ", + "description": "Determines the key to enforce the rate_limit_threshold on. Possible values are: - ALL: A single rate limit threshold is applied to all the requests matching this rule. This is the default value if \"enforceOnKeyConfigs\" is not configured. - IP: The source IP address of the request is the key. Each IP has this limit enforced separately. - HTTP_HEADER: The value of the HTTP header whose name is configured under \"enforceOnKeyName\". The key value is truncated to the first 128 bytes of the header value. If no such header is present in the request, the key type defaults to ALL. - XFF_IP: The first IP address (i.e. the originating client IP address) specified in the list of IPs under X-Forwarded-For HTTP header. If no such header is present or the value is not a valid IP, the key defaults to the source IP address of the request i.e. key type IP. - HTTP_COOKIE: The value of the HTTP cookie whose name is configured under \"enforceOnKeyName\". The key value is truncated to the first 128 bytes of the cookie value. If no such cookie is present in the request, the key type defaults to ALL. - HTTP_PATH: The URL path of the HTTP request. The key value is truncated to the first 128 bytes. - SNI: Server name indication in the TLS session of the HTTPS request. The key value is truncated to the first 128 bytes. The key type defaults to ALL on a HTTP session. - REGION_CODE: The country/region from which the request originates. - TLS_JA3_FINGERPRINT: JA3 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. - USER_IP: The IP address of the originating client, which is resolved based on \"userIpRequestHeaders\" configured with the security policy. If there is no \"userIpRequestHeaders\" configuration or an IP address cannot be resolved from it, the key type defaults to IP. - TLS_JA4_FINGERPRINT: JA4 TLS/SSL fingerprint if the client connects using HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. ", "enum": [ "ALL", "HTTP_COOKIE", @@ -72714,6 +74168,7 @@ "REGION_CODE", "SNI", "TLS_JA3_FINGERPRINT", + "TLS_JA4_FINGERPRINT", "USER_IP", "XFF_IP" ], @@ -72727,6 +74182,7 @@ "", "", "", + "", "" ], "type": "string" @@ -73087,6 +74543,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -73123,6 +74580,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -73147,6 +74605,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -73302,6 +74761,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -73338,6 +74798,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -73362,6 +74823,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -73436,6 +74898,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -73472,6 +74935,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -73496,6 +74960,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -73979,6 +75444,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -74015,6 +75481,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -74039,6 +75506,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -74383,6 +75851,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -74419,6 +75888,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -74443,6 +75913,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -74535,6 +76006,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -74571,6 +76043,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -74595,6 +76068,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -74743,6 +76217,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -74779,6 +76254,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -74803,6 +76279,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -74905,6 +76382,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -74941,6 +76419,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -74965,6 +76444,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -75056,6 +76536,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -75092,6 +76573,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -75116,6 +76598,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -75202,6 +76685,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -75238,6 +76722,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -75262,6 +76747,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -75411,6 +76897,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -75447,6 +76934,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -75471,6 +76959,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -75820,6 +77309,248 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", + "REQUIRED_TOS_AGREEMENT", + "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", + "RESOURCE_NOT_DELETED", + "SCHEMA_VALIDATION_IGNORED", + "SINGLE_INSTANCE_PROPERTY_TEMPLATE", + "UNDECLARED_PROPERTIES", + "UNREACHABLE" + ], + "enumDeprecated": [ + false, + false, + false, + false, + false, + false, + true, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false + ], + "enumDescriptions": [ + "Warning about failed cleanup of transient changes made by a failed operation.", + "A link to a deprecated resource was created.", + "When deploying and at least one of the resources has a type marked as deprecated", + "The user created a boot disk that is larger than image size.", + "When deploying and at least one of the resources has a type marked as experimental", + "Warning that is present in an external api call", + "Warning that value of a field has been overridden. Deprecated unused field.", + "The operation involved use of an injected kernel, which is deprecated.", + "A WEIGHTED_MAGLEV backend service is associated with a health check that is not of type HTTP/HTTPS/HTTP2.", + "When deploying a deployment with a exceedingly large number of resources", + "Resource can't be retrieved due to list overhead quota exceed which captures the amount of resources filtered out by user-defined list filter.", + "A resource depends on a missing type", + "The route's nextHopIp address is not assigned to an instance on the network.", + "The route's next hop instance cannot ip forward.", + "The route's nextHopInstance URL refers to an instance that does not have an ipv6 interface on the same network as the route.", + "The route's nextHopInstance URL refers to an instance that does not exist.", + "The route's nextHopInstance URL refers to an instance that is not on the same network as the route.", + "The route's next hop instance does not have a status of RUNNING.", + "Error which is not critical. We decided to continue the process despite the mentioned error.", + "No results are present on a particular list page.", + "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", + "The user attempted to use a resource that requires a TOS they have not accepted.", + "Warning that a resource is in use.", + "One or more of the resources set to auto-delete could not be deleted because they were in use.", + "When a resource schema validation is ignored.", + "Instance template used in instance group manager is valid as such, but its application does not make a lot of sense, because it allows only single instance in instance group.", + "When undeclared properties in the schema are present", + "A given scope cannot be reached." + ], + "type": "string" + }, + "data": { + "description": "[Output Only] Metadata about this warning in key: value format. For example: \"data\": [ { \"key\": \"scope\", \"value\": \"zones/us-east1-d\" } ", + "items": { + "properties": { + "key": { + "description": "[Output Only] A key that provides more detail on the warning being returned. For example, for warnings where there are no results in a list request for a particular zone, this key might be scope and the key value might be the zone name. Other examples might be a key indicating a deprecated resource and a suggested replacement, or a warning about invalid network settings (for example, if an instance attempts to perform IP forwarding but is not enabled for IP forwarding).", + "type": "string" + }, + "value": { + "description": "[Output Only] A warning data value corresponding to the key.", + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + }, + "message": { + "description": "[Output Only] A human-readable description of the warning code.", + "type": "string" + } + }, + "type": "object" + } + }, + "type": "object" + }, + "StoragePoolDisk": { + "id": "StoragePoolDisk", + "properties": { + "attachedInstances": { + "description": "[Output Only] Instances this disk is attached to.", + "items": { + "type": "string" + }, + "type": "array" + }, + "creationTimestamp": { + "description": "[Output Only] Creation timestamp in RFC3339 text format.", + "type": "string" + }, + "disk": { + "description": "[Output Only] The URL of the disk.", + "type": "string" + }, + "name": { + "description": "[Output Only] The name of the disk.", + "type": "string" + }, + "provisionedIops": { + "description": "[Output Only] The number of IOPS provisioned for the disk.", + "format": "int64", + "type": "string" + }, + "provisionedThroughput": { + "description": "[Output Only] The throughput provisioned for the disk.", + "format": "int64", + "type": "string" + }, + "resourcePolicies": { + "description": "[Output Only] Resource policies applied to disk for automatic snapshot creations.", + "items": { + "type": "string" + }, + "type": "array" + }, + "sizeGb": { + "description": "[Output Only] The disk size, in GB.", + "format": "int64", + "type": "string" + }, + "status": { + "description": "[Output Only] The disk status.", + "enum": [ + "CREATING", + "DELETING", + "FAILED", + "READY", + "RESTORING", + "UNAVAILABLE" + ], + "enumDescriptions": [ + "Disk is provisioning", + "Disk is deleting.", + "Disk creation failed.", + "Disk is ready for use.", + "Source data is being copied into the disk.", + "Disk is currently unavailable and cannot be accessed, attached or detached." + ], + "type": "string" + }, + "type": { + "description": "[Output Only] The disk type.", + "type": "string" + }, + "usedBytes": { + "description": "[Output Only] Amount of disk space used.", + "format": "int64", + "type": "string" + } + }, + "type": "object" + }, + "StoragePoolList": { + "description": "A list of StoragePool resources.", + "id": "StoragePoolList", + "properties": { + "etag": { + "type": "string" + }, + "id": { + "description": "[Output Only] Unique identifier for the resource; defined by the server.", + "type": "string" + }, + "items": { + "description": "A list of StoragePool resources.", + "items": { + "$ref": "StoragePool" + }, + "type": "array" + }, + "kind": { + "default": "compute#storagePoolList", + "description": "[Output Only] Type of resource. Always compute#storagePoolList for lists of storagePools.", + "type": "string" + }, + "nextPageToken": { + "description": "[Output Only] This token allows you to get the next page of results for list requests. If the number of results is larger than maxResults, use the nextPageToken as a value for the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results.", + "type": "string" + }, + "selfLink": { + "description": "[Output Only] Server-defined URL for this resource.", + "type": "string" + }, + "unreachables": { + "description": "[Output Only] Unreachable resources. end_interface: MixerListResponseWithEtagBuilder", + "items": { + "type": "string" + }, + "type": "array" + }, + "warning": { + "description": "[Output Only] Informational warning message.", + "properties": { + "code": { + "description": "[Output Only] A warning code, if applicable. For example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no results in the response.", + "enum": [ + "CLEANUP_FAILED", + "DEPRECATED_RESOURCE_USED", + "DEPRECATED_TYPE_USED", + "DISK_SIZE_LARGER_THAN_IMAGE_SIZE", + "EXPERIMENTAL_TYPE_USED", + "EXTERNAL_API_WARNING", + "FIELD_VALUE_OVERRIDEN", + "INJECTED_KERNELS_DEPRECATED", + "INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB", + "LARGE_DEPLOYMENT_WARNING", + "LIST_OVERHEAD_QUOTA_EXCEED", + "MISSING_TYPE_DEPENDENCY", + "NEXT_HOP_ADDRESS_NOT_ASSIGNED", + "NEXT_HOP_CANNOT_IP_FORWARD", + "NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE", + "NEXT_HOP_INSTANCE_NOT_FOUND", + "NEXT_HOP_INSTANCE_NOT_ON_NETWORK", + "NEXT_HOP_NOT_RUNNING", + "NOT_CRITICAL_ERROR", + "NO_RESULTS_ON_PAGE", + "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -75856,243 +77587,6 @@ false, false, false, - false - ], - "enumDescriptions": [ - "Warning about failed cleanup of transient changes made by a failed operation.", - "A link to a deprecated resource was created.", - "When deploying and at least one of the resources has a type marked as deprecated", - "The user created a boot disk that is larger than image size.", - "When deploying and at least one of the resources has a type marked as experimental", - "Warning that is present in an external api call", - "Warning that value of a field has been overridden. Deprecated unused field.", - "The operation involved use of an injected kernel, which is deprecated.", - "A WEIGHTED_MAGLEV backend service is associated with a health check that is not of type HTTP/HTTPS/HTTP2.", - "When deploying a deployment with a exceedingly large number of resources", - "Resource can't be retrieved due to list overhead quota exceed which captures the amount of resources filtered out by user-defined list filter.", - "A resource depends on a missing type", - "The route's nextHopIp address is not assigned to an instance on the network.", - "The route's next hop instance cannot ip forward.", - "The route's nextHopInstance URL refers to an instance that does not have an ipv6 interface on the same network as the route.", - "The route's nextHopInstance URL refers to an instance that does not exist.", - "The route's nextHopInstance URL refers to an instance that is not on the same network as the route.", - "The route's next hop instance does not have a status of RUNNING.", - "Error which is not critical. We decided to continue the process despite the mentioned error.", - "No results are present on a particular list page.", - "Success is reported, but some results may be missing due to errors", - "The user attempted to use a resource that requires a TOS they have not accepted.", - "Warning that a resource is in use.", - "One or more of the resources set to auto-delete could not be deleted because they were in use.", - "When a resource schema validation is ignored.", - "Instance template used in instance group manager is valid as such, but its application does not make a lot of sense, because it allows only single instance in instance group.", - "When undeclared properties in the schema are present", - "A given scope cannot be reached." - ], - "type": "string" - }, - "data": { - "description": "[Output Only] Metadata about this warning in key: value format. For example: \"data\": [ { \"key\": \"scope\", \"value\": \"zones/us-east1-d\" } ", - "items": { - "properties": { - "key": { - "description": "[Output Only] A key that provides more detail on the warning being returned. For example, for warnings where there are no results in a list request for a particular zone, this key might be scope and the key value might be the zone name. Other examples might be a key indicating a deprecated resource and a suggested replacement, or a warning about invalid network settings (for example, if an instance attempts to perform IP forwarding but is not enabled for IP forwarding).", - "type": "string" - }, - "value": { - "description": "[Output Only] A warning data value corresponding to the key.", - "type": "string" - } - }, - "type": "object" - }, - "type": "array" - }, - "message": { - "description": "[Output Only] A human-readable description of the warning code.", - "type": "string" - } - }, - "type": "object" - } - }, - "type": "object" - }, - "StoragePoolDisk": { - "id": "StoragePoolDisk", - "properties": { - "attachedInstances": { - "description": "[Output Only] Instances this disk is attached to.", - "items": { - "type": "string" - }, - "type": "array" - }, - "creationTimestamp": { - "description": "[Output Only] Creation timestamp in RFC3339 text format.", - "type": "string" - }, - "disk": { - "description": "[Output Only] The URL of the disk.", - "type": "string" - }, - "name": { - "description": "[Output Only] The name of the disk.", - "type": "string" - }, - "provisionedIops": { - "description": "[Output Only] The number of IOPS provisioned for the disk.", - "format": "int64", - "type": "string" - }, - "provisionedThroughput": { - "description": "[Output Only] The throughput provisioned for the disk.", - "format": "int64", - "type": "string" - }, - "resourcePolicies": { - "description": "[Output Only] Resource policies applied to disk for automatic snapshot creations.", - "items": { - "type": "string" - }, - "type": "array" - }, - "sizeGb": { - "description": "[Output Only] The disk size, in GB.", - "format": "int64", - "type": "string" - }, - "status": { - "description": "[Output Only] The disk status.", - "enum": [ - "CREATING", - "DELETING", - "FAILED", - "READY", - "RESTORING", - "UNAVAILABLE" - ], - "enumDescriptions": [ - "Disk is provisioning", - "Disk is deleting.", - "Disk creation failed.", - "Disk is ready for use.", - "Source data is being copied into the disk.", - "Disk is currently unavailable and cannot be accessed, attached or detached." - ], - "type": "string" - }, - "type": { - "description": "[Output Only] The disk type.", - "type": "string" - }, - "usedBytes": { - "description": "[Output Only] Amount of disk space used.", - "format": "int64", - "type": "string" - } - }, - "type": "object" - }, - "StoragePoolList": { - "description": "A list of StoragePool resources.", - "id": "StoragePoolList", - "properties": { - "etag": { - "type": "string" - }, - "id": { - "description": "[Output Only] Unique identifier for the resource; defined by the server.", - "type": "string" - }, - "items": { - "description": "A list of StoragePool resources.", - "items": { - "$ref": "StoragePool" - }, - "type": "array" - }, - "kind": { - "default": "compute#storagePoolList", - "description": "[Output Only] Type of resource. Always compute#storagePoolList for lists of storagePools.", - "type": "string" - }, - "nextPageToken": { - "description": "[Output Only] This token allows you to get the next page of results for list requests. If the number of results is larger than maxResults, use the nextPageToken as a value for the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results.", - "type": "string" - }, - "selfLink": { - "description": "[Output Only] Server-defined URL for this resource.", - "type": "string" - }, - "unreachables": { - "description": "[Output Only] Unreachable resources. end_interface: MixerListResponseWithEtagBuilder", - "items": { - "type": "string" - }, - "type": "array" - }, - "warning": { - "description": "[Output Only] Informational warning message.", - "properties": { - "code": { - "description": "[Output Only] A warning code, if applicable. For example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no results in the response.", - "enum": [ - "CLEANUP_FAILED", - "DEPRECATED_RESOURCE_USED", - "DEPRECATED_TYPE_USED", - "DISK_SIZE_LARGER_THAN_IMAGE_SIZE", - "EXPERIMENTAL_TYPE_USED", - "EXTERNAL_API_WARNING", - "FIELD_VALUE_OVERRIDEN", - "INJECTED_KERNELS_DEPRECATED", - "INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB", - "LARGE_DEPLOYMENT_WARNING", - "LIST_OVERHEAD_QUOTA_EXCEED", - "MISSING_TYPE_DEPENDENCY", - "NEXT_HOP_ADDRESS_NOT_ASSIGNED", - "NEXT_HOP_CANNOT_IP_FORWARD", - "NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE", - "NEXT_HOP_INSTANCE_NOT_FOUND", - "NEXT_HOP_INSTANCE_NOT_ON_NETWORK", - "NEXT_HOP_NOT_RUNNING", - "NOT_CRITICAL_ERROR", - "NO_RESULTS_ON_PAGE", - "PARTIAL_SUCCESS", - "REQUIRED_TOS_AGREEMENT", - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", - "RESOURCE_NOT_DELETED", - "SCHEMA_VALIDATION_IGNORED", - "SINGLE_INSTANCE_PROPERTY_TEMPLATE", - "UNDECLARED_PROPERTIES", - "UNREACHABLE" - ], - "enumDeprecated": [ - false, - false, - false, - false, - false, - false, - true, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, false, false ], @@ -76118,6 +77612,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -76219,6 +77714,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -76255,6 +77751,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -76279,6 +77776,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -76514,6 +78012,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -76550,6 +78049,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -76574,6 +78074,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -76666,6 +78167,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -76702,6 +78204,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -76726,6 +78229,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -76800,6 +78304,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -76836,6 +78341,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -76860,6 +78366,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -76934,6 +78441,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -76970,6 +78478,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -76994,6 +78503,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -77126,10 +78636,11 @@ "type": "string" }, "purpose": { - "description": "The purpose of the resource. This field can be either PRIVATE, GLOBAL_MANAGED_PROXY, REGIONAL_MANAGED_PROXY, PRIVATE_SERVICE_CONNECT, or PRIVATE is the default purpose for user-created subnets or subnets that are automatically created in auto mode networks. Subnets with purpose set to GLOBAL_MANAGED_PROXY or REGIONAL_MANAGED_PROXY are user-created subnetworks that are reserved for Envoy-based load balancers. A subnet with purpose set to PRIVATE_SERVICE_CONNECT is used to publish services using Private Service Connect. If unspecified, the subnet purpose defaults to PRIVATE. The enableFlowLogs field isn't supported if the subnet purpose field is set to GLOBAL_MANAGED_PROXY or REGIONAL_MANAGED_PROXY.", + "description": "The purpose of the resource. This field can be either PRIVATE, GLOBAL_MANAGED_PROXY, REGIONAL_MANAGED_PROXY, PEER_MIGRATION or PRIVATE_SERVICE_CONNECT. PRIVATE is the default purpose for user-created subnets or subnets that are automatically created in auto mode networks. Subnets with purpose set to GLOBAL_MANAGED_PROXY or REGIONAL_MANAGED_PROXY are user-created subnetworks that are reserved for Envoy-based load balancers. A subnet with purpose set to PRIVATE_SERVICE_CONNECT is used to publish services using Private Service Connect. A subnet with purpose set to PEER_MIGRATION is used for subnet migration from one peered VPC to another. If unspecified, the subnet purpose defaults to PRIVATE. The enableFlowLogs field isn't supported if the subnet purpose field is set to GLOBAL_MANAGED_PROXY or REGIONAL_MANAGED_PROXY.", "enum": [ "GLOBAL_MANAGED_PROXY", "INTERNAL_HTTPS_LOAD_BALANCER", + "PEER_MIGRATION", "PRIVATE", "PRIVATE_NAT", "PRIVATE_RFC_1918", @@ -77139,6 +78650,7 @@ "enumDescriptions": [ "Subnet reserved for Global Envoy-based Load Balancing.", "Subnet reserved for Internal HTTP(S) Load Balancing. This is a legacy purpose, please use REGIONAL_MANAGED_PROXY instead.", + "Subnetwork will be used for Migration from one peered VPC to another. (a transient state of subnetwork while migrating resources from one project to another).", "Regular user created or automatically created subnet.", "Subnetwork used as source range for Private NAT Gateways.", "Regular user created or automatically created subnet.", @@ -77269,6 +78781,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -77305,6 +78818,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -77329,6 +78843,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -77421,6 +78936,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -77457,6 +78973,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -77481,6 +78998,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -77645,6 +79163,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -77681,6 +79200,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -77705,6 +79225,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -77948,6 +79469,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -77984,6 +79506,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -78008,6 +79531,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -78082,6 +79606,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -78118,6 +79643,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -78142,6 +79668,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -78328,6 +79855,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -78364,6 +79892,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -78388,6 +79917,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -78462,6 +79992,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -78498,6 +80029,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -78522,6 +80054,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -78603,7 +80136,7 @@ "type": "object" }, "TargetHttpsProxy": { - "description": "Represents a Target HTTPS Proxy resource. Google Compute Engine has two Target HTTPS Proxy resources: * [Global](/compute/docs/reference/rest/v1/targetHttpsProxies) * [Regional](/compute/docs/reference/rest/v1/regionTargetHttpsProxies) A target HTTPS proxy is a component of GCP HTTPS load balancers. * targetHttpProxies are used by global external Application Load Balancers, classic Application Load Balancers, cross-region internal Application Load Balancers, and Traffic Director. * regionTargetHttpProxies are used by regional internal Application Load Balancers and regional external Application Load Balancers. Forwarding rules reference a target HTTPS proxy, and the target proxy then references a URL map. For more information, read Using Target Proxies and Forwarding rule concepts.", + "description": "Represents a Target HTTPS Proxy resource. Google Compute Engine has two Target HTTPS Proxy resources: * [Global](/compute/docs/reference/rest/v1/targetHttpsProxies) * [Regional](/compute/docs/reference/rest/v1/regionTargetHttpsProxies) A target HTTPS proxy is a component of Google Cloud HTTPS load balancers. * targetHttpProxies are used by global external Application Load Balancers, classic Application Load Balancers, cross-region internal Application Load Balancers, and Traffic Director. * regionTargetHttpProxies are used by regional internal Application Load Balancers and regional external Application Load Balancers. Forwarding rules reference a target HTTPS proxy, and the target proxy then references a URL map. For more information, read Using Target Proxies and Forwarding rule concepts.", "id": "TargetHttpsProxy", "properties": { "authorizationPolicy": { @@ -78693,12 +80226,14 @@ "enum": [ "DISABLED", "PERMISSIVE", - "STRICT" + "STRICT", + "UNRESTRICTED" ], "enumDescriptions": [ "TLS 1.3 Early Data is not advertised, and any (invalid) attempts to send Early Data will be rejected by closing the connection.", "This enables TLS 1.3 0-RTT, and only allows Early Data to be included on requests with safe HTTP methods (GET, HEAD, OPTIONS, TRACE). This mode does not enforce any other limitations for requests with Early Data. The application owner should validate that Early Data is acceptable for a given request path.", - "This enables TLS 1.3 0-RTT, and only allows Early Data to be included on requests with safe HTTP methods (GET, HEAD, OPTIONS, TRACE) without query parameters. Requests that send Early Data with non-idempotent HTTP methods or with query parameters will be rejected with a HTTP 425." + "This enables TLS 1.3 0-RTT, and only allows Early Data to be included on requests with safe HTTP methods (GET, HEAD, OPTIONS, TRACE) without query parameters. Requests that send Early Data with non-idempotent HTTP methods or with query parameters will be rejected with a HTTP 425.", + "This enables TLS 1.3 Early Data for requests with any HTTP method including non-idempotent methods list POST. This mode does not enforce any other limitations. This may be valuable for gRPC use cases. However, we do not recommend this method unless you have evaluated your security stance and mitigated the risk of replay attacks using other mechanisms." ], "type": "string" }, @@ -78771,6 +80306,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -78807,6 +80343,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -78831,6 +80368,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -78923,6 +80461,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -78959,6 +80498,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -78983,6 +80523,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -79142,6 +80683,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -79178,6 +80720,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -79202,6 +80745,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -79294,6 +80838,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -79330,6 +80875,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -79354,6 +80900,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -79428,6 +80975,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -79464,6 +81012,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -79488,6 +81037,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -79678,6 +81228,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -79714,6 +81265,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -79738,6 +81290,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -79847,6 +81400,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -79883,6 +81437,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -79907,6 +81462,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -80033,6 +81589,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -80069,6 +81626,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -80093,6 +81651,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -80310,6 +81869,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -80346,6 +81906,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -80370,6 +81931,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -80444,6 +82006,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -80480,6 +82043,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -80504,6 +82068,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -80689,6 +82254,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -80725,6 +82291,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -80749,6 +82316,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -80841,6 +82409,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -80877,6 +82446,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -80901,6 +82471,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -81094,6 +82665,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -81130,6 +82702,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -81154,6 +82727,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -81246,6 +82820,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -81282,6 +82857,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -81306,6 +82882,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -81380,6 +82957,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -81416,6 +82994,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -81440,6 +83019,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -81628,15 +83208,15 @@ }, "defaultRouteAction": { "$ref": "HttpRouteAction", - "description": "defaultRouteAction takes effect when none of the hostRules match. The load balancer performs advanced routing actions, such as URL rewrites and header transformations, before forwarding the request to the selected backend. If defaultRouteAction specifies any weightedBackendServices, defaultService must not be set. Conversely if defaultService is set, defaultRouteAction cannot contain any weightedBackendServices. Only one of defaultRouteAction or defaultUrlRedirect must be set. URL maps for classic Application Load Balancers only support the urlRewrite action within defaultRouteAction. defaultRouteAction has no effect when the URL map is bound to a target gRPC proxy that has the validateForProxyless field set to true." + "description": "defaultRouteAction takes effect when none of the hostRules match. The load balancer performs advanced routing actions, such as URL rewrites and header transformations, before forwarding the request to the selected backend. Only one of defaultUrlRedirect, defaultService or defaultRouteAction.weightedBackendService can be set. URL maps for classic Application Load Balancers only support the urlRewrite action within defaultRouteAction. defaultRouteAction has no effect when the URL map is bound to a target gRPC proxy that has the validateForProxyless field set to true." }, "defaultService": { - "description": "The full or partial URL of the defaultService resource to which traffic is directed if none of the hostRules match. If defaultRouteAction is also specified, advanced routing actions, such as URL rewrites, take effect before sending the request to the backend. However, if defaultService is specified, defaultRouteAction cannot contain any defaultRouteAction.weightedBackendServices. Conversely, if defaultRouteAction specifies any defaultRouteAction.weightedBackendServices, defaultService must not be specified. If defaultService is specified, then set either defaultUrlRedirect , or defaultRouteAction.weightedBackendService Don't set both. defaultService has no effect when the URL map is bound to a target gRPC proxy that has the validateForProxyless field set to true.", + "description": "The full or partial URL of the defaultService resource to which traffic is directed if none of the hostRules match. If defaultRouteAction is also specified, advanced routing actions, such as URL rewrites, take effect before sending the request to the backend. Only one of defaultUrlRedirect, defaultService or defaultRouteAction.weightedBackendService can be set. defaultService has no effect when the URL map is bound to a target gRPC proxy that has the validateForProxyless field set to true.", "type": "string" }, "defaultUrlRedirect": { "$ref": "HttpRedirectAction", - "description": "When none of the specified hostRules match, the request is redirected to a URL specified by defaultUrlRedirect. If defaultUrlRedirect is specified, defaultService or defaultRouteAction must not be set. Not supported when the URL map is bound to a target gRPC proxy." + "description": "When none of the specified hostRules match, the request is redirected to a URL specified by defaultUrlRedirect. Only one of defaultUrlRedirect, defaultService or defaultRouteAction.weightedBackendService can be set. Not supported when the URL map is bound to a target gRPC proxy." }, "description": { "description": "An optional description of this resource. Provide this property when you create the resource.", @@ -81753,6 +83333,259 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", + "REQUIRED_TOS_AGREEMENT", + "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", + "RESOURCE_NOT_DELETED", + "SCHEMA_VALIDATION_IGNORED", + "SINGLE_INSTANCE_PROPERTY_TEMPLATE", + "UNDECLARED_PROPERTIES", + "UNREACHABLE" + ], + "enumDeprecated": [ + false, + false, + false, + false, + false, + false, + true, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false + ], + "enumDescriptions": [ + "Warning about failed cleanup of transient changes made by a failed operation.", + "A link to a deprecated resource was created.", + "When deploying and at least one of the resources has a type marked as deprecated", + "The user created a boot disk that is larger than image size.", + "When deploying and at least one of the resources has a type marked as experimental", + "Warning that is present in an external api call", + "Warning that value of a field has been overridden. Deprecated unused field.", + "The operation involved use of an injected kernel, which is deprecated.", + "A WEIGHTED_MAGLEV backend service is associated with a health check that is not of type HTTP/HTTPS/HTTP2.", + "When deploying a deployment with a exceedingly large number of resources", + "Resource can't be retrieved due to list overhead quota exceed which captures the amount of resources filtered out by user-defined list filter.", + "A resource depends on a missing type", + "The route's nextHopIp address is not assigned to an instance on the network.", + "The route's next hop instance cannot ip forward.", + "The route's nextHopInstance URL refers to an instance that does not have an ipv6 interface on the same network as the route.", + "The route's nextHopInstance URL refers to an instance that does not exist.", + "The route's nextHopInstance URL refers to an instance that is not on the same network as the route.", + "The route's next hop instance does not have a status of RUNNING.", + "Error which is not critical. We decided to continue the process despite the mentioned error.", + "No results are present on a particular list page.", + "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", + "The user attempted to use a resource that requires a TOS they have not accepted.", + "Warning that a resource is in use.", + "One or more of the resources set to auto-delete could not be deleted because they were in use.", + "When a resource schema validation is ignored.", + "Instance template used in instance group manager is valid as such, but its application does not make a lot of sense, because it allows only single instance in instance group.", + "When undeclared properties in the schema are present", + "A given scope cannot be reached." + ], + "type": "string" + }, + "data": { + "description": "[Output Only] Metadata about this warning in key: value format. For example: \"data\": [ { \"key\": \"scope\", \"value\": \"zones/us-east1-d\" } ", + "items": { + "properties": { + "key": { + "description": "[Output Only] A key that provides more detail on the warning being returned. For example, for warnings where there are no results in a list request for a particular zone, this key might be scope and the key value might be the zone name. Other examples might be a key indicating a deprecated resource and a suggested replacement, or a warning about invalid network settings (for example, if an instance attempts to perform IP forwarding but is not enabled for IP forwarding).", + "type": "string" + }, + "value": { + "description": "[Output Only] A warning data value corresponding to the key.", + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + }, + "message": { + "description": "[Output Only] A human-readable description of the warning code.", + "type": "string" + } + }, + "type": "object" + } + }, + "type": "object" + }, + "UrlMapReference": { + "id": "UrlMapReference", + "properties": { + "urlMap": { + "type": "string" + } + }, + "type": "object" + }, + "UrlMapTest": { + "description": "Message for the expected URL mappings.", + "id": "UrlMapTest", + "properties": { + "description": { + "description": "Description of this test case.", + "type": "string" + }, + "expectedOutputUrl": { + "description": "The expected output URL evaluated by the load balancer containing the scheme, host, path and query parameters. For rules that forward requests to backends, the test passes only when expectedOutputUrl matches the request forwarded by the load balancer to backends. For rules with urlRewrite, the test verifies that the forwarded request matches hostRewrite and pathPrefixRewrite in the urlRewrite action. When service is specified, expectedOutputUrl`s scheme is ignored. For rules with urlRedirect, the test passes only if expectedOutputUrl matches the URL in the load balancer's redirect response. If urlRedirect specifies https_redirect, the test passes only if the scheme in expectedOutputUrl is also set to HTTPS. If urlRedirect specifies strip_query, the test passes only if expectedOutputUrl does not contain any query parameters. expectedOutputUrl is optional when service is specified.", + "type": "string" + }, + "expectedRedirectResponseCode": { + "description": "For rules with urlRedirect, the test passes only if expectedRedirectResponseCode matches the HTTP status code in load balancer's redirect response. expectedRedirectResponseCode cannot be set when service is set.", + "format": "int32", + "type": "integer" + }, + "headers": { + "description": "HTTP headers for this request. If headers contains a host header, then host must also match the header value.", + "items": { + "$ref": "UrlMapTestHeader" + }, + "type": "array" + }, + "host": { + "description": "Host portion of the URL. If headers contains a host header, then host must also match the header value.", + "type": "string" + }, + "path": { + "description": "Path portion of the URL.", + "type": "string" + }, + "service": { + "description": "Expected BackendService or BackendBucket resource the given URL should be mapped to. The service field cannot be set if expectedRedirectResponseCode is set.", + "type": "string" + } + }, + "type": "object" + }, + "UrlMapTestHeader": { + "description": "HTTP headers used in UrlMapTests.", + "id": "UrlMapTestHeader", + "properties": { + "name": { + "description": "Header name.", + "type": "string" + }, + "value": { + "description": "Header value.", + "type": "string" + } + }, + "type": "object" + }, + "UrlMapValidationResult": { + "description": "Message representing the validation result for a UrlMap.", + "id": "UrlMapValidationResult", + "properties": { + "loadErrors": { + "items": { + "type": "string" + }, + "type": "array" + }, + "loadSucceeded": { + "description": "Whether the given UrlMap can be successfully loaded. If false, 'loadErrors' indicates the reasons.", + "type": "boolean" + }, + "testFailures": { + "items": { + "$ref": "TestFailure" + }, + "type": "array" + }, + "testPassed": { + "description": "If successfully loaded, this field indicates whether the test passed. If false, 'testFailures's indicate the reason of failure.", + "type": "boolean" + } + }, + "type": "object" + }, + "UrlMapsAggregatedList": { + "id": "UrlMapsAggregatedList", + "properties": { + "id": { + "description": "[Output Only] Unique identifier for the resource; defined by the server.", + "type": "string" + }, + "items": { + "additionalProperties": { + "$ref": "UrlMapsScopedList", + "description": "Name of the scope containing this set of UrlMaps." + }, + "description": "A list of UrlMapsScopedList resources.", + "type": "object" + }, + "kind": { + "default": "compute#urlMapsAggregatedList", + "description": "Type of resource.", + "type": "string" + }, + "nextPageToken": { + "description": "[Output Only] This token allows you to get the next page of results for list requests. If the number of results is larger than maxResults, use the nextPageToken as a value for the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results.", + "type": "string" + }, + "selfLink": { + "description": "[Output Only] Server-defined URL for this resource.", + "type": "string" + }, + "unreachables": { + "description": "[Output Only] Unreachable resources.", + "items": { + "type": "string" + }, + "type": "array" + }, + "warning": { + "description": "[Output Only] Informational warning message.", + "properties": { + "code": { + "description": "[Output Only] A warning code, if applicable. For example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no results in the response.", + "enum": [ + "CLEANUP_FAILED", + "DEPRECATED_RESOURCE_USED", + "DEPRECATED_TYPE_USED", + "DISK_SIZE_LARGER_THAN_IMAGE_SIZE", + "EXPERIMENTAL_TYPE_USED", + "EXTERNAL_API_WARNING", + "FIELD_VALUE_OVERRIDEN", + "INJECTED_KERNELS_DEPRECATED", + "INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB", + "LARGE_DEPLOYMENT_WARNING", + "LIST_OVERHEAD_QUOTA_EXCEED", + "MISSING_TYPE_DEPENDENCY", + "NEXT_HOP_ADDRESS_NOT_ASSIGNED", + "NEXT_HOP_CANNOT_IP_FORWARD", + "NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE", + "NEXT_HOP_INSTANCE_NOT_FOUND", + "NEXT_HOP_INSTANCE_NOT_ON_NETWORK", + "NEXT_HOP_NOT_RUNNING", + "NOT_CRITICAL_ERROR", + "NO_RESULTS_ON_PAGE", + "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -81789,254 +83622,6 @@ false, false, false, - false - ], - "enumDescriptions": [ - "Warning about failed cleanup of transient changes made by a failed operation.", - "A link to a deprecated resource was created.", - "When deploying and at least one of the resources has a type marked as deprecated", - "The user created a boot disk that is larger than image size.", - "When deploying and at least one of the resources has a type marked as experimental", - "Warning that is present in an external api call", - "Warning that value of a field has been overridden. Deprecated unused field.", - "The operation involved use of an injected kernel, which is deprecated.", - "A WEIGHTED_MAGLEV backend service is associated with a health check that is not of type HTTP/HTTPS/HTTP2.", - "When deploying a deployment with a exceedingly large number of resources", - "Resource can't be retrieved due to list overhead quota exceed which captures the amount of resources filtered out by user-defined list filter.", - "A resource depends on a missing type", - "The route's nextHopIp address is not assigned to an instance on the network.", - "The route's next hop instance cannot ip forward.", - "The route's nextHopInstance URL refers to an instance that does not have an ipv6 interface on the same network as the route.", - "The route's nextHopInstance URL refers to an instance that does not exist.", - "The route's nextHopInstance URL refers to an instance that is not on the same network as the route.", - "The route's next hop instance does not have a status of RUNNING.", - "Error which is not critical. We decided to continue the process despite the mentioned error.", - "No results are present on a particular list page.", - "Success is reported, but some results may be missing due to errors", - "The user attempted to use a resource that requires a TOS they have not accepted.", - "Warning that a resource is in use.", - "One or more of the resources set to auto-delete could not be deleted because they were in use.", - "When a resource schema validation is ignored.", - "Instance template used in instance group manager is valid as such, but its application does not make a lot of sense, because it allows only single instance in instance group.", - "When undeclared properties in the schema are present", - "A given scope cannot be reached." - ], - "type": "string" - }, - "data": { - "description": "[Output Only] Metadata about this warning in key: value format. For example: \"data\": [ { \"key\": \"scope\", \"value\": \"zones/us-east1-d\" } ", - "items": { - "properties": { - "key": { - "description": "[Output Only] A key that provides more detail on the warning being returned. For example, for warnings where there are no results in a list request for a particular zone, this key might be scope and the key value might be the zone name. Other examples might be a key indicating a deprecated resource and a suggested replacement, or a warning about invalid network settings (for example, if an instance attempts to perform IP forwarding but is not enabled for IP forwarding).", - "type": "string" - }, - "value": { - "description": "[Output Only] A warning data value corresponding to the key.", - "type": "string" - } - }, - "type": "object" - }, - "type": "array" - }, - "message": { - "description": "[Output Only] A human-readable description of the warning code.", - "type": "string" - } - }, - "type": "object" - } - }, - "type": "object" - }, - "UrlMapReference": { - "id": "UrlMapReference", - "properties": { - "urlMap": { - "type": "string" - } - }, - "type": "object" - }, - "UrlMapTest": { - "description": "Message for the expected URL mappings.", - "id": "UrlMapTest", - "properties": { - "description": { - "description": "Description of this test case.", - "type": "string" - }, - "expectedOutputUrl": { - "description": "The expected output URL evaluated by the load balancer containing the scheme, host, path and query parameters. For rules that forward requests to backends, the test passes only when expectedOutputUrl matches the request forwarded by the load balancer to backends. For rules with urlRewrite, the test verifies that the forwarded request matches hostRewrite and pathPrefixRewrite in the urlRewrite action. When service is specified, expectedOutputUrl`s scheme is ignored. For rules with urlRedirect, the test passes only if expectedOutputUrl matches the URL in the load balancer's redirect response. If urlRedirect specifies https_redirect, the test passes only if the scheme in expectedOutputUrl is also set to HTTPS. If urlRedirect specifies strip_query, the test passes only if expectedOutputUrl does not contain any query parameters. expectedOutputUrl is optional when service is specified.", - "type": "string" - }, - "expectedRedirectResponseCode": { - "description": "For rules with urlRedirect, the test passes only if expectedRedirectResponseCode matches the HTTP status code in load balancer's redirect response. expectedRedirectResponseCode cannot be set when service is set.", - "format": "int32", - "type": "integer" - }, - "headers": { - "description": "HTTP headers for this request. If headers contains a host header, then host must also match the header value.", - "items": { - "$ref": "UrlMapTestHeader" - }, - "type": "array" - }, - "host": { - "description": "Host portion of the URL. If headers contains a host header, then host must also match the header value.", - "type": "string" - }, - "path": { - "description": "Path portion of the URL.", - "type": "string" - }, - "service": { - "description": "Expected BackendService or BackendBucket resource the given URL should be mapped to. The service field cannot be set if expectedRedirectResponseCode is set.", - "type": "string" - } - }, - "type": "object" - }, - "UrlMapTestHeader": { - "description": "HTTP headers used in UrlMapTests.", - "id": "UrlMapTestHeader", - "properties": { - "name": { - "description": "Header name.", - "type": "string" - }, - "value": { - "description": "Header value.", - "type": "string" - } - }, - "type": "object" - }, - "UrlMapValidationResult": { - "description": "Message representing the validation result for a UrlMap.", - "id": "UrlMapValidationResult", - "properties": { - "loadErrors": { - "items": { - "type": "string" - }, - "type": "array" - }, - "loadSucceeded": { - "description": "Whether the given UrlMap can be successfully loaded. If false, 'loadErrors' indicates the reasons.", - "type": "boolean" - }, - "testFailures": { - "items": { - "$ref": "TestFailure" - }, - "type": "array" - }, - "testPassed": { - "description": "If successfully loaded, this field indicates whether the test passed. If false, 'testFailures's indicate the reason of failure.", - "type": "boolean" - } - }, - "type": "object" - }, - "UrlMapsAggregatedList": { - "id": "UrlMapsAggregatedList", - "properties": { - "id": { - "description": "[Output Only] Unique identifier for the resource; defined by the server.", - "type": "string" - }, - "items": { - "additionalProperties": { - "$ref": "UrlMapsScopedList", - "description": "Name of the scope containing this set of UrlMaps." - }, - "description": "A list of UrlMapsScopedList resources.", - "type": "object" - }, - "kind": { - "default": "compute#urlMapsAggregatedList", - "description": "Type of resource.", - "type": "string" - }, - "nextPageToken": { - "description": "[Output Only] This token allows you to get the next page of results for list requests. If the number of results is larger than maxResults, use the nextPageToken as a value for the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results.", - "type": "string" - }, - "selfLink": { - "description": "[Output Only] Server-defined URL for this resource.", - "type": "string" - }, - "unreachables": { - "description": "[Output Only] Unreachable resources.", - "items": { - "type": "string" - }, - "type": "array" - }, - "warning": { - "description": "[Output Only] Informational warning message.", - "properties": { - "code": { - "description": "[Output Only] A warning code, if applicable. For example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no results in the response.", - "enum": [ - "CLEANUP_FAILED", - "DEPRECATED_RESOURCE_USED", - "DEPRECATED_TYPE_USED", - "DISK_SIZE_LARGER_THAN_IMAGE_SIZE", - "EXPERIMENTAL_TYPE_USED", - "EXTERNAL_API_WARNING", - "FIELD_VALUE_OVERRIDEN", - "INJECTED_KERNELS_DEPRECATED", - "INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB", - "LARGE_DEPLOYMENT_WARNING", - "LIST_OVERHEAD_QUOTA_EXCEED", - "MISSING_TYPE_DEPENDENCY", - "NEXT_HOP_ADDRESS_NOT_ASSIGNED", - "NEXT_HOP_CANNOT_IP_FORWARD", - "NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE", - "NEXT_HOP_INSTANCE_NOT_FOUND", - "NEXT_HOP_INSTANCE_NOT_ON_NETWORK", - "NEXT_HOP_NOT_RUNNING", - "NOT_CRITICAL_ERROR", - "NO_RESULTS_ON_PAGE", - "PARTIAL_SUCCESS", - "REQUIRED_TOS_AGREEMENT", - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", - "RESOURCE_NOT_DELETED", - "SCHEMA_VALIDATION_IGNORED", - "SINGLE_INSTANCE_PROPERTY_TEMPLATE", - "UNDECLARED_PROPERTIES", - "UNREACHABLE" - ], - "enumDeprecated": [ - false, - false, - false, - false, - false, - false, - true, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, false, false ], @@ -82062,6 +83647,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -82136,6 +83722,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -82172,6 +83759,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -82196,6 +83784,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -82321,10 +83910,11 @@ "type": "string" }, "purpose": { - "description": "The purpose of the resource. This field can be either PRIVATE, GLOBAL_MANAGED_PROXY, REGIONAL_MANAGED_PROXY, PRIVATE_SERVICE_CONNECT, or PRIVATE is the default purpose for user-created subnets or subnets that are automatically created in auto mode networks. Subnets with purpose set to GLOBAL_MANAGED_PROXY or REGIONAL_MANAGED_PROXY are user-created subnetworks that are reserved for Envoy-based load balancers. A subnet with purpose set to PRIVATE_SERVICE_CONNECT is used to publish services using Private Service Connect. If unspecified, the subnet purpose defaults to PRIVATE. The enableFlowLogs field isn't supported if the subnet purpose field is set to GLOBAL_MANAGED_PROXY or REGIONAL_MANAGED_PROXY.", + "description": "The purpose of the resource. This field can be either PRIVATE, GLOBAL_MANAGED_PROXY, REGIONAL_MANAGED_PROXY, PEER_MIGRATION or PRIVATE_SERVICE_CONNECT. PRIVATE is the default purpose for user-created subnets or subnets that are automatically created in auto mode networks. Subnets with purpose set to GLOBAL_MANAGED_PROXY or REGIONAL_MANAGED_PROXY are user-created subnetworks that are reserved for Envoy-based load balancers. A subnet with purpose set to PRIVATE_SERVICE_CONNECT is used to publish services using Private Service Connect. A subnet with purpose set to PEER_MIGRATION is used for subnet migration from one peered VPC to another. If unspecified, the subnet purpose defaults to PRIVATE. The enableFlowLogs field isn't supported if the subnet purpose field is set to GLOBAL_MANAGED_PROXY or REGIONAL_MANAGED_PROXY.", "enum": [ "GLOBAL_MANAGED_PROXY", "INTERNAL_HTTPS_LOAD_BALANCER", + "PEER_MIGRATION", "PRIVATE", "PRIVATE_NAT", "PRIVATE_RFC_1918", @@ -82334,6 +83924,7 @@ "enumDescriptions": [ "Subnet reserved for Global Envoy-based Load Balancing.", "Subnet reserved for Internal HTTP(S) Load Balancing. This is a legacy purpose, please use REGIONAL_MANAGED_PROXY instead.", + "Subnetwork will be used for Migration from one peered VPC to another. (a transient state of subnetwork while migrating resources from one project to another).", "Regular user created or automatically created subnet.", "Subnetwork used as source range for Private NAT Gateways.", "Regular user created or automatically created subnet.", @@ -82451,6 +84042,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -82487,6 +84079,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -82511,6 +84104,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -82717,6 +84311,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -82753,6 +84348,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -82777,6 +84373,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -82973,6 +84570,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -83009,6 +84607,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -83033,6 +84632,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -83125,6 +84725,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -83161,6 +84762,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -83185,6 +84787,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -83381,6 +84984,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -83417,6 +85021,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -83441,6 +85046,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -83694,6 +85300,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -83730,6 +85337,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -83754,6 +85362,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -83846,6 +85455,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -83882,6 +85492,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -83906,6 +85517,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -83980,6 +85592,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -84016,6 +85629,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -84040,6 +85654,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -84190,6 +85805,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -84226,6 +85842,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -84250,6 +85867,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", @@ -84429,6 +86047,7 @@ "NOT_CRITICAL_ERROR", "NO_RESULTS_ON_PAGE", "PARTIAL_SUCCESS", + "QUOTA_INFO_UNAVAILABLE", "REQUIRED_TOS_AGREEMENT", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", "RESOURCE_NOT_DELETED", @@ -84465,6 +86084,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -84489,6 +86109,7 @@ "Error which is not critical. We decided to continue the process despite the mentioned error.", "No results are present on a particular list page.", "Success is reported, but some results may be missing due to errors", + "Quota information is not available to client requests (e.g: regions.list).", "The user attempted to use a resource that requires a TOS they have not accepted.", "Warning that a resource is in use.", "One or more of the resources set to auto-delete could not be deleted because they were in use.", diff --git a/discovery/googleapis/config__v1.json b/discovery/googleapis/config__v1.json index 90032d785..0f65c1a44 100644 --- a/discovery/googleapis/config__v1.json +++ b/discovery/googleapis/config__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240710", + "revision": "20241211", "rootUrl": "https://config.googleapis.com/", "servicePath": "", "title": "Infrastructure Manager API", @@ -820,7 +820,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "config.projects.locations.operations.cancel", @@ -1424,7 +1424,7 @@ "type": "string" }, "serviceAccount": { - "description": "Optional. User-specified Service Account (SA) credentials to be used when actuating resources. Format: `projects/{projectID}/serviceAccounts/{serviceAccount}`", + "description": "Required. User-specified Service Account (SA) credentials to be used when actuating resources. Format: `projects/{projectID}/serviceAccounts/{serviceAccount}`", "type": "string" }, "state": { @@ -1710,7 +1710,7 @@ "type": "string" }, "previews": { - "description": "List of Previewss.", + "description": "List of Previews.", "items": { "$ref": "Preview" }, @@ -1735,7 +1735,7 @@ "type": "string" }, "resources": { - "description": "List of Resourcess.", + "description": "List of Resources.", "items": { "$ref": "Resource" }, @@ -1941,7 +1941,7 @@ "readOnly": true }, "requestedCancellation": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, @@ -2093,7 +2093,7 @@ "type": "string" }, "serviceAccount": { - "description": "Optional. User-specified Service Account (SA) credentials to be used when previewing resources. Format: `projects/{projectID}/serviceAccounts/{serviceAccount}`", + "description": "Required. User-specified Service Account (SA) credentials to be used when previewing resources. Format: `projects/{projectID}/serviceAccounts/{serviceAccount}`", "type": "string" }, "state": { @@ -2558,7 +2558,7 @@ "additionalProperties": { "$ref": "TerraformVariable" }, - "description": "Input variable values for the Terraform blueprint.", + "description": "Optional. Input variable values for the Terraform blueprint.", "type": "object" } }, @@ -2570,7 +2570,8 @@ "properties": { "error": { "$ref": "Status", - "description": "Original error response from underlying Google API, if available." + "description": "Output only. Original error response from underlying Google API, if available.", + "readOnly": true }, "errorDescription": { "description": "A human-readable error description.", @@ -2608,7 +2609,7 @@ "id": "TerraformVariable", "properties": { "inputValue": { - "description": "Input variable value.", + "description": "Optional. Input variable value.", "type": "any" } }, diff --git a/discovery/googleapis/connectors__v1.json b/discovery/googleapis/connectors__v1.json index 6bb764e4b..ceb0471e3 100644 --- a/discovery/googleapis/connectors__v1.json +++ b/discovery/googleapis/connectors__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241015", + "revision": "20241203", "rootUrl": "https://connectors.googleapis.com/", "servicePath": "", "title": "Connectors API", @@ -1249,6 +1249,62 @@ "scopes": [ "https://www.googleapis.com/auth/cloud-platform" ] + }, + "publish": { + "description": "Publish request for the CustomConnectorVersion. Once approved, the CustomConnectorVersion will be published as PartnerConnector.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/customConnectors/{customConnectorsId}/customConnectorVersions/{customConnectorVersionsId}:publish", + "httpMethod": "POST", + "id": "connectors.projects.locations.customConnectors.customConnectorVersions.publish", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. Resource name of the form: `projects/{project}/locations/{location}/customConnectors/{custom_connector}/customConnectorVersions/{custom_connector_version}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/customConnectors/[^/]+/customConnectorVersions/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:publish", + "request": { + "$ref": "PublishCustomConnectorVersionRequest" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "withdraw": { + "description": "Withdraw the publish request for the CustomConnectorVersion. This can only be used before the CustomConnectorVersion is published.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/customConnectors/{customConnectorsId}/customConnectorVersions/{customConnectorVersionsId}:withdraw", + "httpMethod": "POST", + "id": "connectors.projects.locations.customConnectors.customConnectorVersions.withdraw", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. Resource name of the form: `projects/{project}/locations/{location}/customConnectors/{custom_connector}/customConnectorVersions/{custom_connector_version}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/customConnectors/[^/]+/customConnectorVersions/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:withdraw", + "request": { + "$ref": "WithdrawCustomConnectorVersionRequest" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] } } } @@ -1953,7 +2009,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "connectors.projects.locations.operations.cancel", @@ -2806,7 +2862,7 @@ "id": "AuthorizationCodeLink", "properties": { "clientId": { - "description": "The client ID assigned to the Google Cloud Connectors OAuth app for the connector data source.", + "description": "Optional. The client ID assigned to the Google Cloud Connectors OAuth app for the connector data source.", "type": "string" }, "clientSecret": { @@ -2814,7 +2870,7 @@ "description": "Optional. The client secret assigned to the Google Cloud Connectors OAuth app for the connector data source." }, "enablePkce": { - "description": "Whether to enable PKCE for the auth code flow.", + "description": "Optional. Whether to enable PKCE for the auth code flow.", "type": "boolean" }, "omitQueryParams": { @@ -2822,14 +2878,14 @@ "type": "boolean" }, "scopes": { - "description": "The scopes for which the user will authorize Google Cloud Connectors on the connector data source.", + "description": "Optional. The scopes for which the user will authorize Google Cloud Connectors on the connector data source.", "items": { "type": "string" }, "type": "array" }, "uri": { - "description": "The base URI the user must click to trigger the authorization code login flow.", + "description": "Optional. The base URI the user must click to trigger the authorization code login flow.", "type": "string" } }, @@ -2923,18 +2979,18 @@ "properties": { "authorizationCodeLink": { "$ref": "AuthorizationCodeLink", - "description": "Authorization code link options. To be populated if `ValueType` is `AUTHORIZATION_CODE`" + "description": "Optional. Authorization code link options. To be populated if `ValueType` is `AUTHORIZATION_CODE`" }, "description": { - "description": "Description.", + "description": "Optional. Description.", "type": "string" }, "displayName": { - "description": "Display name of the parameter.", + "description": "Optional. Display name of the parameter.", "type": "string" }, "enumOptions": { - "description": "Enum options. To be populated if `ValueType` is `ENUM`", + "description": "Optional. Enum options. To be populated if `ValueType` is `ENUM`", "items": { "$ref": "EnumOption" }, @@ -2953,11 +3009,11 @@ "type": "string" }, "isAdvanced": { - "description": "Indicates if current template is part of advanced settings", + "description": "Optional. Indicates if current template is part of advanced settings", "type": "boolean" }, "key": { - "description": "Key of the config variable.", + "description": "Optional. Key of the config variable.", "type": "string" }, "locationType": { @@ -2983,19 +3039,19 @@ "description": "Optional. MultipleSelectConfig represents the multiple options for a config variable." }, "required": { - "description": "Flag represents that this `ConfigVariable` must be provided for a connection.", + "description": "Optional. Flag represents that this `ConfigVariable` must be provided for a connection.", "type": "boolean" }, "requiredCondition": { "$ref": "LogicalExpression", - "description": "Condition under which a field would be required. The condition can be represented in the form of a logical expression." + "description": "Optional. Condition under which a field would be required. The condition can be represented in the form of a logical expression." }, "roleGrant": { "$ref": "RoleGrant", - "description": "Role grant configuration for the config variable." + "description": "Optional. Role grant configuration for the config variable." }, "state": { - "description": "State of the config variable.", + "description": "Output only. State of the config variable.", "enum": [ "STATE_UNSPECIFIED", "ACTIVE", @@ -3006,14 +3062,15 @@ "Config variable is active", "Config variable is deprecated." ], + "readOnly": true, "type": "string" }, "validationRegex": { - "description": "Regular expression in RE2 syntax used for validating the `value` of a `ConfigVariable`.", + "description": "Optional. Regular expression in RE2 syntax used for validating the `value` of a `ConfigVariable`.", "type": "string" }, "valueType": { - "description": "Type of the parameter: string, int, bool etc. consider custom type for the benefit for the validation.", + "description": "Optional. Type of the parameter: string, int, bool etc. consider custom type for the benefit for the validation.", "enum": [ "VALUE_TYPE_UNSPECIFIED", "STRING", @@ -3091,6 +3148,7 @@ "PREVIEW", "GA", "DEPRECATED", + "TEST", "PRIVATE_PREVIEW" ], "enumDescriptions": [ @@ -3098,6 +3156,7 @@ "PREVIEW.", "GA.", "DEPRECATED.", + "TEST.", "PRIVATE_PREVIEW." ], "readOnly": true, @@ -3418,6 +3477,7 @@ "PREVIEW", "GA", "DEPRECATED", + "TEST", "PRIVATE_PREVIEW" ], "enumDescriptions": [ @@ -3425,11 +3485,17 @@ "PREVIEW.", "GA.", "DEPRECATED.", + "TEST.", "PRIVATE_PREVIEW." ], "readOnly": true, "type": "string" }, + "marketplaceConnectorDetails": { + "$ref": "MarketplaceConnectorDetails", + "description": "Output only. Marketplace connector details. Will be null if the connector is not marketplace connector.", + "readOnly": true + }, "name": { "description": "Output only. Resource name of the Connector. Format: projects/{project}/locations/{location}/providers/{provider}/connectors/{connector} Only global location is supported for Connector resource.", "readOnly": true, @@ -3623,6 +3689,7 @@ "PREVIEW", "GA", "DEPRECATED", + "TEST", "PRIVATE_PREVIEW" ], "enumDescriptions": [ @@ -3630,6 +3697,7 @@ "PREVIEW.", "GA.", "DEPRECATED.", + "TEST.", "PRIVATE_PREVIEW." ], "readOnly": true, @@ -3728,7 +3796,7 @@ "type": "string" }, "deploymentModel": { - "description": "Optional. Indicates whether connector is deployed on GKE/CloudRun", + "description": "Output only. Indicates whether connector is deployed on GKE/CloudRun", "enum": [ "DEPLOYMENT_MODEL_UNSPECIFIED", "GKE_MST", @@ -3739,6 +3807,7 @@ "Default model gke mst.", "Cloud run mst." ], + "readOnly": true, "type": "string" }, "deploymentModelMigrationState": { @@ -3863,6 +3932,14 @@ "readOnly": true, "type": "array" }, + "allMarketplaceVersions": { + "description": "Output only. All marketplace versions.", + "items": { + "type": "string" + }, + "readOnly": true, + "type": "array" + }, "createTime": { "description": "Output only. Created time.", "format": "google-datetime", @@ -3906,6 +3983,14 @@ "description": "Identifier. Resource name of the CustomConnector. Format: projects/{project}/locations/{location}/customConnectors/{connector}", "type": "string" }, + "publishedMarketplaceVersions": { + "description": "Output only. Published marketplace versions.", + "items": { + "type": "string" + }, + "readOnly": true, + "type": "array" + }, "updateTime": { "description": "Output only. Updated time.", "format": "google-datetime", @@ -3959,6 +4044,15 @@ "readOnly": true, "type": "string" }, + "partnerMetadata": { + "$ref": "PartnerMetadata", + "description": "Optional. Partner metadata details. This should be populated only when publishing the custom connector to partner connector." + }, + "publishStatus": { + "$ref": "PublishStatus", + "description": "Output only. Publish status of a custom connector.", + "readOnly": true + }, "serviceAccount": { "description": "Optional. Service account used by runtime plane to access auth config secrets.", "type": "string" @@ -4347,11 +4441,11 @@ "id": "EnumOption", "properties": { "displayName": { - "description": "Display name of the option.", + "description": "Optional. Display name of the option.", "type": "string" }, "id": { - "description": "Id of the option.", + "description": "Optional. Id of the option.", "type": "string" } }, @@ -4419,6 +4513,10 @@ "$ref": "EndPoint", "description": "OPTION 1: Hit an endpoint when we receive an event." }, + "gsutil": { + "$ref": "GSUtil", + "description": "OPTION 2: Write the event to Cloud Storage bucket." + }, "serviceAccount": { "description": "Service account needed for runtime plane to trigger IP workflow.", "type": "string" @@ -4427,11 +4525,15 @@ "description": "type of the destination", "enum": [ "TYPE_UNSPECIFIED", - "ENDPOINT" + "ENDPOINT", + "GCS", + "PUBSUB" ], "enumDescriptions": [ "Default state.", - "Endpoint - Hit the value of endpoint when event is received" + "Endpoint - Hit the value of endpoint when event is received", + "Cloud Storage - Write the event to Cloud Storage bucket", + "Pub/Sub - Write the event to Pub/Sub topic" ], "type": "string" } @@ -4525,7 +4627,7 @@ "id": "EventingConfig", "properties": { "additionalVariables": { - "description": "Additional eventing related field values", + "description": "Optional. Additional eventing related field values", "items": { "$ref": "ConfigVariable" }, @@ -4533,14 +4635,14 @@ }, "authConfig": { "$ref": "AuthConfig", - "description": "Auth details for the webhook adapter." + "description": "Optional. Auth details for the webhook adapter." }, "deadLetterConfig": { "$ref": "DeadLetterConfig", "description": "Optional. Dead letter configuration for eventing of a connection." }, "enrichmentEnabled": { - "description": "Enrichment Enabled.", + "description": "Optional. Enrichment Enabled.", "type": "boolean" }, "eventsListenerIngressEndpoint": { @@ -4561,7 +4663,7 @@ }, "registrationDestinationConfig": { "$ref": "DestinationConfig", - "description": "Registration endpoint for auto registration." + "description": "Optional. Registration endpoint for auto registration." } }, "type": "object" @@ -4674,6 +4776,7 @@ "PREVIEW", "GA", "DEPRECATED", + "TEST", "PRIVATE_PREVIEW" ], "enumDescriptions": [ @@ -4681,6 +4784,7 @@ "PREVIEW.", "GA.", "DEPRECATED.", + "TEST.", "PRIVATE_PREVIEW." ], "readOnly": true, @@ -4740,6 +4844,11 @@ "$ref": "WebhookData", "description": "Output only. Webhook data.", "readOnly": true + }, + "webhookSubscriptions": { + "$ref": "WebhookSubscriptions", + "description": "Output only. Webhook subscriptions.", + "readOnly": true } }, "type": "object" @@ -5040,7 +5149,7 @@ "type": "boolean" }, "comparator": { - "description": "Comparator to use for comparing the field value.", + "description": "Optional. Comparator to use for comparing the field value.", "enum": [ "COMPARATOR_UNSPECIFIED", "EQUALS", @@ -5059,7 +5168,7 @@ "type": "string" }, "key": { - "description": "Key of the field.", + "description": "Optional. Key of the field.", "type": "string" }, "stringValue": { @@ -5069,6 +5178,17 @@ }, "type": "object" }, + "GSUtil": { + "description": "GSUtil message includes details of the Destination Cloud Storage bucket.", + "id": "GSUtil", + "properties": { + "gsutilUri": { + "description": "Required. The URI of the Cloud Storage bucket.", + "type": "string" + } + }, + "type": "object" + }, "HPAConfig": { "description": "Autoscaling config for connector deployment system metrics.", "id": "HPAConfig", @@ -6075,21 +6195,21 @@ "id": "LogicalExpression", "properties": { "fieldComparisons": { - "description": "A list of fields to be compared.", + "description": "Optional. A list of fields to be compared.", "items": { "$ref": "FieldComparison" }, "type": "array" }, "logicalExpressions": { - "description": "A list of nested conditions to be compared.", + "description": "Optional. A list of nested conditions to be compared.", "items": { "$ref": "LogicalExpression" }, "type": "array" }, "logicalOperator": { - "description": "The logical operator to use between the fields and conditions.", + "description": "Optional. The logical operator to use between the fields and conditions.", "enum": [ "OPERATOR_UNSPECIFIED", "AND", @@ -6270,6 +6390,29 @@ }, "type": "object" }, + "MarketplaceConnectorDetails": { + "description": "Marketplace connector details.", + "id": "MarketplaceConnectorDetails", + "properties": { + "marketplaceProduct": { + "description": "Marketplace product name.", + "type": "string" + }, + "marketplaceProductId": { + "description": "Marketplace product ID.", + "type": "string" + }, + "marketplaceProductUri": { + "description": "Marketplace product URL.", + "type": "string" + }, + "partner": { + "description": "The name of the partner.", + "type": "string" + } + }, + "type": "object" + }, "MultipleSelectConfig": { "description": "MultipleSelectConfig represents the multiple options for a config variable.", "id": "MultipleSelectConfig", @@ -6567,6 +6710,75 @@ }, "type": "object" }, + "PartnerMetadata": { + "description": "Partner metadata details. This will be populated when publishing the custom connector as a partner connector version. On publishing, parntner connector version will be created using the fields in PartnerMetadata.", + "id": "PartnerMetadata", + "properties": { + "acceptGcpTos": { + "description": "Required. Whether the user has accepted the Google Cloud Platform Terms of Service (https://cloud.google.com/terms/) and the Google Cloud Marketplace Terms of Service (https://cloud.google.com/terms/marketplace/launcher?hl=en).", + "type": "boolean" + }, + "additionalComments": { + "description": "Optional. Additional comments for the submission.", + "type": "string" + }, + "confirmPartnerRequirements": { + "description": "Required. Confirmation that connector meets all applicable requirements mentioned in the Partner Connector Publishing requirements list and Partner onboardiong requirements list (https://cloud.google.com/marketplace/docs/partners/get-started#requirements).", + "type": "boolean" + }, + "demoUri": { + "description": "Required. Public URL for the demo video.", + "type": "string" + }, + "integrationTemplates": { + "description": "Required. Integration example templates for the custom connector.", + "type": "string" + }, + "marketplaceProduct": { + "description": "Optional. Marketplace product name.", + "type": "string" + }, + "marketplaceProductId": { + "description": "Required. Marketplace product ID.", + "type": "string" + }, + "marketplaceProductProjectId": { + "description": "Optional. Marketplace product project ID.", + "type": "string" + }, + "marketplaceProductUri": { + "description": "Optional. Marketplace product URL.", + "type": "string" + }, + "partner": { + "description": "Required. Partner name.", + "type": "string" + }, + "partnerConnectorDisplayName": { + "description": "Required. Partner connector display name.", + "type": "string" + }, + "publishRequestTime": { + "description": "Output only. Publish request time.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "targetApplication": { + "description": "Required. Target application for which partner connector is built.", + "type": "string" + }, + "targetCustomerSegment": { + "description": "Required. Target customer segment for the partner connector.", + "type": "string" + }, + "useCases": { + "description": "Required. Details about partner connector use cases.", + "type": "string" + } + }, + "type": "object" + }, "PerSliSloEligibility": { "description": "PerSliSloEligibility is a mapping from an SLI name to eligibility.", "id": "PerSliSloEligibility", @@ -6657,6 +6869,7 @@ "PREVIEW", "GA", "DEPRECATED", + "TEST", "PRIVATE_PREVIEW" ], "enumDescriptions": [ @@ -6664,6 +6877,7 @@ "PREVIEW.", "GA.", "DEPRECATED.", + "TEST.", "PRIVATE_PREVIEW." ], "readOnly": true, @@ -6703,6 +6917,57 @@ }, "type": "object" }, + "PublishCustomConnectorVersionRequest": { + "description": "Request message for ConnectorsService.PublishCustomConnectorVersion", + "id": "PublishCustomConnectorVersionRequest", + "properties": { + "partnerMetadata": { + "$ref": "PartnerMetadata", + "description": "Required. Partner metadata details for validating and publishing the custom connector as a partner connector version." + } + }, + "type": "object" + }, + "PublishStatus": { + "description": "Publish status of a custom connector.", + "id": "PublishStatus", + "properties": { + "publishState": { + "description": "Output only. Publish state of the custom connector.", + "enum": [ + "PUBLISH_STATE_UNSPECIFIED", + "PUBLISHED", + "PUBLISH_IN_PROGRESS", + "UNPUBLISHED" + ], + "enumDescriptions": [ + "State Unspecified.", + "Connector version has been published as partner connector version.", + "Connector version is in the process of being published as partner connector version.", + "Connector version has been unpublished as partner connector version" + ], + "readOnly": true, + "type": "string" + }, + "publishTime": { + "description": "Output only. Publish time.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "publishedAs": { + "description": "Output only. Partner connector name. Will be set on the custom connector. Format: providers/partner/connectors//versions/", + "readOnly": true, + "type": "string" + }, + "publishedSource": { + "description": "Output only. Custom connector name. Will be set on the partner connector. Format: providers/customconnectors/connectors//versions/", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, "RefreshConnectionSchemaMetadataRequest": { "description": "Request message for ConnectorsService.RefreshConnectionSchemaMetadata.", "id": "RefreshConnectionSchemaMetadataRequest", @@ -6745,11 +7010,11 @@ "id": "Resource", "properties": { "pathTemplate": { - "description": "Template to uniquely represent a Google Cloud resource in a format IAM expects This is a template that can have references to other values provided in the config variable template.", + "description": "Optional. Template to uniquely represent a Google Cloud resource in a format IAM expects This is a template that can have references to other values provided in the config variable template.", "type": "string" }, "type": { - "description": "Different types of resource supported.", + "description": "Optional. Different types of resource supported.", "enum": [ "TYPE_UNSPECIFIED", "GCP_PROJECT", @@ -6986,11 +7251,11 @@ "id": "RoleGrant", "properties": { "helperTextTemplate": { - "description": "Template that UI can use to provide helper text to customers.", + "description": "Optional. Template that UI can use to provide helper text to customers.", "type": "string" }, "principal": { - "description": "Prinicipal/Identity for whom the role need to assigned.", + "description": "Optional. Prinicipal/Identity for whom the role need to assigned.", "enum": [ "PRINCIPAL_UNSPECIFIED", "CONNECTOR_SA" @@ -7003,10 +7268,10 @@ }, "resource": { "$ref": "Resource", - "description": "Resource on which the roles needs to be granted for the principal." + "description": "Optional. Resource on which the roles needs to be granted for the principal." }, "roles": { - "description": "List of roles that need to be granted.", + "description": "Optional. List of roles that need to be granted.", "items": { "type": "string" }, @@ -7448,14 +7713,14 @@ "id": "SslConfig", "properties": { "additionalVariables": { - "description": "Additional SSL related field values", + "description": "Optional. Additional SSL related field values", "items": { "$ref": "ConfigVariable" }, "type": "array" }, "clientCertType": { - "description": "Type of Client Cert (PEM/JKS/.. etc.)", + "description": "Optional. Type of Client Cert (PEM/JKS/.. etc.)", "enum": [ "CERT_TYPE_UNSPECIFIED", "PEM" @@ -7468,22 +7733,22 @@ }, "clientCertificate": { "$ref": "Secret", - "description": "Client Certificate" + "description": "Optional. Client Certificate" }, "clientPrivateKey": { "$ref": "Secret", - "description": "Client Private Key" + "description": "Optional. Client Private Key" }, "clientPrivateKeyPass": { "$ref": "Secret", - "description": "Secret containing the passphrase protecting the Client Private Key" + "description": "Optional. Secret containing the passphrase protecting the Client Private Key" }, "privateServerCertificate": { "$ref": "Secret", - "description": "Private Server Certificate. Needs to be specified if trust model is `PRIVATE`." + "description": "Optional. Private Server Certificate. Needs to be specified if trust model is `PRIVATE`." }, "serverCertType": { - "description": "Type of Server Cert (PEM/JKS/.. etc.)", + "description": "Optional. Type of Server Cert (PEM/JKS/.. etc.)", "enum": [ "CERT_TYPE_UNSPECIFIED", "PEM" @@ -7495,7 +7760,7 @@ "type": "string" }, "trustModel": { - "description": "Trust Model of the SSL connection", + "description": "Optional. Trust Model of the SSL connection", "enum": [ "PUBLIC", "PRIVATE", @@ -7509,7 +7774,7 @@ "type": "string" }, "type": { - "description": "Controls the ssl type for the given connector version.", + "description": "Optional. Controls the ssl type for the given connector version.", "enum": [ "SSL_TYPE_UNSPECIFIED", "TLS", @@ -7523,7 +7788,7 @@ "type": "string" }, "useSsl": { - "description": "Bool for enabling SSL", + "description": "Optional. Bool for enabling SSL", "type": "boolean" } }, @@ -7850,6 +8115,21 @@ }, "type": "object" }, + "WebhookSubscriptions": { + "description": "WebhookSubscriptions has details of webhook subscriptions.", + "id": "WebhookSubscriptions", + "properties": { + "webhookData": { + "description": "Output only. Webhook data.", + "items": { + "$ref": "WebhookData" + }, + "readOnly": true, + "type": "array" + } + }, + "type": "object" + }, "WeeklyCycle": { "description": "Time window specified for weekly operations.", "id": "WeeklyCycle", @@ -7863,6 +8143,12 @@ } }, "type": "object" + }, + "WithdrawCustomConnectorVersionRequest": { + "description": "Request message for ConnectorsService.WithdrawCustomConnectorVersion", + "id": "WithdrawCustomConnectorVersionRequest", + "properties": {}, + "type": "object" } } } diff --git a/discovery/googleapis/contactcenterinsights__v1.json b/discovery/googleapis/contactcenterinsights__v1.json index fdf2bdd01..70d89548f 100644 --- a/discovery/googleapis/contactcenterinsights__v1.json +++ b/discovery/googleapis/contactcenterinsights__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241015", + "revision": "20241209", "rootUrl": "https://contactcenterinsights.googleapis.com/", "servicePath": "", "title": "Contact Center AI Insights API", @@ -476,45 +476,15 @@ } } }, - "authorizedViewSet": { + "authorizedViewSets": { "resources": { - "authorizedView": { + "authorizedViews": { "methods": { - "calculateStats": { - "description": "Gets conversation statistics.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSet/{authorizedViewSetId}/authorizedView/{authorizedViewId}:calculateStats", - "httpMethod": "GET", - "id": "contactcenterinsights.projects.locations.authorizedViewSet.authorizedView.calculateStats", - "parameterOrder": [ - "location" - ], - "parameters": { - "filter": { - "description": "A filter to reduce results to a specific subset. This field is useful for getting statistics about conversations with specific properties.", - "location": "query", - "type": "string" - }, - "location": { - "description": "Required. The location of the conversations.", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/authorizedViewSet/[^/]+/authorizedView/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v1/{+location}:calculateStats", - "response": { - "$ref": "GoogleCloudContactcenterinsightsV1CalculateStatsResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] - }, "queryMetrics": { "description": "Query metrics.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSet/{authorizedViewSetId}/authorizedView/{authorizedViewId}:queryMetrics", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSets/{authorizedViewSetsId}/authorizedViews/{authorizedViewsId}:queryMetrics", "httpMethod": "POST", - "id": "contactcenterinsights.projects.locations.authorizedViewSet.authorizedView.queryMetrics", + "id": "contactcenterinsights.projects.locations.authorizedViewSets.authorizedViews.queryMetrics", "parameterOrder": [ "location" ], @@ -522,7 +492,7 @@ "location": { "description": "Required. The location of the data. \"projects/{project}/locations/{location}\"", "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/authorizedViewSet/[^/]+/authorizedView/[^/]+$", + "pattern": "^projects/[^/]+/locations/[^/]+/authorizedViewSets/[^/]+/authorizedViews/[^/]+$", "required": true, "type": "string" } @@ -538,736 +508,35 @@ "https://www.googleapis.com/auth/cloud-platform" ] } - } - }, - "authorizedViews": { - "resources": { - "conversations": { - "resources": { - "feedbackLabels": { - "methods": { - "create": { - "description": "Create feedback label.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSet/{authorizedViewSetId}/authorizedViews/{authorizedViewsId}/conversations/{conversationsId}/feedbackLabels", - "httpMethod": "POST", - "id": "contactcenterinsights.projects.locations.authorizedViewSet.authorizedViews.conversations.feedbackLabels.create", - "parameterOrder": [ - "parent" - ], - "parameters": { - "feedbackLabelId": { - "description": "Optional. The ID of the feedback label to create. If one is not specified it will be generated by the server.", - "location": "query", - "type": "string" - }, - "parent": { - "description": "Required. The parent resource of the feedback label.", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/authorizedViewSet/[^/]+/authorizedViews/[^/]+/conversations/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v1/{+parent}/feedbackLabels", - "request": { - "$ref": "GoogleCloudContactcenterinsightsV1FeedbackLabel" - }, - "response": { - "$ref": "GoogleCloudContactcenterinsightsV1FeedbackLabel" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] - }, - "delete": { - "description": "Delete feedback label.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSet/{authorizedViewSetId}/authorizedViews/{authorizedViewsId}/conversations/{conversationsId}/feedbackLabels/{feedbackLabelsId}", - "httpMethod": "DELETE", - "id": "contactcenterinsights.projects.locations.authorizedViewSet.authorizedViews.conversations.feedbackLabels.delete", - "parameterOrder": [ - "name" - ], - "parameters": { - "name": { - "description": "Required. The name of the feedback label to delete.", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/authorizedViewSet/[^/]+/authorizedViews/[^/]+/conversations/[^/]+/feedbackLabels/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v1/{+name}", - "response": { - "$ref": "GoogleProtobufEmpty" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] - }, - "get": { - "description": "Get feedback label.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSet/{authorizedViewSetId}/authorizedViews/{authorizedViewsId}/conversations/{conversationsId}/feedbackLabels/{feedbackLabelsId}", - "httpMethod": "GET", - "id": "contactcenterinsights.projects.locations.authorizedViewSet.authorizedViews.conversations.feedbackLabels.get", - "parameterOrder": [ - "name" - ], - "parameters": { - "name": { - "description": "Required. The name of the feedback label to get.", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/authorizedViewSet/[^/]+/authorizedViews/[^/]+/conversations/[^/]+/feedbackLabels/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v1/{+name}", - "response": { - "$ref": "GoogleCloudContactcenterinsightsV1FeedbackLabel" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] - }, - "list": { - "description": "List feedback labels.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSet/{authorizedViewSetId}/authorizedViews/{authorizedViewsId}/conversations/{conversationsId}/feedbackLabels", - "httpMethod": "GET", - "id": "contactcenterinsights.projects.locations.authorizedViewSet.authorizedViews.conversations.feedbackLabels.list", - "parameterOrder": [ - "parent" - ], - "parameters": { - "filter": { - "description": "Optional. A filter to reduce results to a specific subset. Supports disjunctions (OR) and conjunctions (AND). Automatically sorts by conversation ID. To sort by all feedback labels in a project see ListAllFeedbackLabels. Supported fields: * `issue_model_id` * `qa_question_id` * `qa_scorecard_id` * `min_create_time` * `max_create_time` * `min_update_time` * `max_update_time` * `feedback_label_type`: QUALITY_AI, TOPIC_MODELING", - "location": "query", - "type": "string" - }, - "pageSize": { - "description": "Optional. The maximum number of feedback labels to return in the response. A valid page size ranges from 0 to 100,000 inclusive. If the page size is zero or unspecified, a default page size of 100 will be chosen. Note that a call might return fewer results than the requested page size.", - "format": "int32", - "location": "query", - "type": "integer" - }, - "pageToken": { - "description": "Optional. The value returned by the last `ListFeedbackLabelsResponse`. This value indicates that this is a continuation of a prior `ListFeedbackLabels` call and that the system should return the next page of data.", - "location": "query", - "type": "string" - }, - "parent": { - "description": "Required. The parent resource of the feedback labels.", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/authorizedViewSet/[^/]+/authorizedViews/[^/]+/conversations/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v1/{+parent}/feedbackLabels", - "response": { - "$ref": "GoogleCloudContactcenterinsightsV1ListFeedbackLabelsResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] - }, - "patch": { - "description": "Update feedback label.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSet/{authorizedViewSetId}/authorizedViews/{authorizedViewsId}/conversations/{conversationsId}/feedbackLabels/{feedbackLabelsId}", - "httpMethod": "PATCH", - "id": "contactcenterinsights.projects.locations.authorizedViewSet.authorizedViews.conversations.feedbackLabels.patch", - "parameterOrder": [ - "name" - ], - "parameters": { - "name": { - "description": "Immutable. Resource name of the FeedbackLabel. Format: projects/{project}/locations/{location}/conversations/{conversation}/feedbackLabels/{feedback_label}", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/authorizedViewSet/[^/]+/authorizedViews/[^/]+/conversations/[^/]+/feedbackLabels/[^/]+$", - "required": true, - "type": "string" - }, - "updateMask": { - "description": "Required. The list of fields to be updated.", - "format": "google-fieldmask", - "location": "query", - "type": "string" - } - }, - "path": "v1/{+name}", - "request": { - "$ref": "GoogleCloudContactcenterinsightsV1FeedbackLabel" - }, - "response": { - "$ref": "GoogleCloudContactcenterinsightsV1FeedbackLabel" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] - } - } - } - } - } - } - } - } - }, - "authorizedViewSets": { - "resources": { - "authorizedViews": { - "methods": { - "getIamPolicy": { - "description": "Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSets/{authorizedViewSetsId}/authorizedViews/{authorizedViewsId}:getIamPolicy", - "httpMethod": "GET", - "id": "contactcenterinsights.projects.locations.authorizedViewSets.authorizedViews.getIamPolicy", - "parameterOrder": [ - "resource" - ], - "parameters": { - "options.requestedPolicyVersion": { - "description": "Optional. The maximum policy version that will be used to format the policy. Valid values are 0, 1, and 3. Requests specifying an invalid value will be rejected. Requests for policies with any conditional role bindings must specify version 3. Policies with no conditional role bindings may specify any valid value or leave the field unset. The policy in the response might use the policy version that you specified, or it might use a lower policy version. For example, if you specify version 3, but the policy has no conditional role bindings, the response uses version 1. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).", - "format": "int32", - "location": "query", - "type": "integer" - }, - "resource": { - "description": "REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field.", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/authorizedViewSets/[^/]+/authorizedViews/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v1/{+resource}:getIamPolicy", - "response": { - "$ref": "GoogleIamV1Policy" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] - }, - "setIamPolicy": { - "description": "Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSets/{authorizedViewSetsId}/authorizedViews/{authorizedViewsId}:setIamPolicy", - "httpMethod": "POST", - "id": "contactcenterinsights.projects.locations.authorizedViewSets.authorizedViews.setIamPolicy", - "parameterOrder": [ - "resource" - ], - "parameters": { - "resource": { - "description": "REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field.", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/authorizedViewSets/[^/]+/authorizedViews/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v1/{+resource}:setIamPolicy", - "request": { - "$ref": "GoogleIamV1SetIamPolicyRequest" - }, - "response": { - "$ref": "GoogleIamV1Policy" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] - }, - "testIamPermissions": { - "description": "Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of permissions, not a `NOT_FOUND` error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may \"fail open\" without warning.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSets/{authorizedViewSetsId}/authorizedViews/{authorizedViewsId}:testIamPermissions", - "httpMethod": "POST", - "id": "contactcenterinsights.projects.locations.authorizedViewSets.authorizedViews.testIamPermissions", - "parameterOrder": [ - "resource" - ], - "parameters": { - "resource": { - "description": "REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field.", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/authorizedViewSets/[^/]+/authorizedViews/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v1/{+resource}:testIamPermissions", - "request": { - "$ref": "GoogleIamV1TestIamPermissionsRequest" - }, - "response": { - "$ref": "GoogleIamV1TestIamPermissionsResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] - } }, "resources": { "conversations": { "methods": { - "create": { - "description": "Creates a conversation. Note that this method does not support audio transcription or redaction. Use `conversations.upload` instead.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSets/{authorizedViewSetsId}/authorizedViews/{authorizedViewsId}/conversations", - "httpMethod": "POST", - "id": "contactcenterinsights.projects.locations.authorizedViewSets.authorizedViews.conversations.create", - "parameterOrder": [ - "parent" - ], - "parameters": { - "conversationId": { - "description": "A unique ID for the new conversation. This ID will become the final component of the conversation's resource name. If no ID is specified, a server-generated ID will be used. This value should be 4-64 characters and must match the regular expression `^[a-z0-9-]{4,64}$`. Valid characters are `a-z-`", - "location": "query", - "type": "string" - }, - "parent": { - "description": "Required. The parent resource of the conversation.", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/authorizedViewSets/[^/]+/authorizedViews/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v1/{+parent}/conversations", - "request": { - "$ref": "GoogleCloudContactcenterinsightsV1Conversation" - }, - "response": { - "$ref": "GoogleCloudContactcenterinsightsV1Conversation" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] - }, - "delete": { - "description": "Deletes a conversation.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSets/{authorizedViewSetsId}/authorizedViews/{authorizedViewsId}/conversations/{conversationsId}", - "httpMethod": "DELETE", - "id": "contactcenterinsights.projects.locations.authorizedViewSets.authorizedViews.conversations.delete", - "parameterOrder": [ - "name" - ], - "parameters": { - "force": { - "description": "If set to true, all of this conversation's analyses will also be deleted. Otherwise, the request will only succeed if the conversation has no analyses.", - "location": "query", - "type": "boolean" - }, - "name": { - "description": "Required. The name of the conversation to delete.", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/authorizedViewSets/[^/]+/authorizedViews/[^/]+/conversations/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v1/{+name}", - "response": { - "$ref": "GoogleProtobufEmpty" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] - }, - "get": { - "description": "Gets a conversation.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSets/{authorizedViewSetsId}/authorizedViews/{authorizedViewsId}/conversations/{conversationsId}", - "httpMethod": "GET", - "id": "contactcenterinsights.projects.locations.authorizedViewSets.authorizedViews.conversations.get", - "parameterOrder": [ - "name" - ], - "parameters": { - "name": { - "description": "Required. The name of the conversation to get.", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/authorizedViewSets/[^/]+/authorizedViews/[^/]+/conversations/[^/]+$", - "required": true, - "type": "string" - }, - "view": { - "description": "The level of details of the conversation. Default is `FULL`.", - "enum": [ - "CONVERSATION_VIEW_UNSPECIFIED", - "FULL", - "BASIC" - ], - "enumDescriptions": [ - "The conversation view is not specified. * Defaults to `FULL` in `GetConversationRequest`. * Defaults to `BASIC` in `ListConversationsRequest`.", - "Populates all fields in the conversation.", - "Populates all fields in the conversation except the transcript." - ], - "location": "query", - "type": "string" - } - }, - "path": "v1/{+name}", - "response": { - "$ref": "GoogleCloudContactcenterinsightsV1Conversation" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] - }, - "list": { - "description": "Lists conversations.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSets/{authorizedViewSetsId}/authorizedViews/{authorizedViewsId}/conversations", + "calculateStats": { + "description": "Gets conversation statistics.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSets/{authorizedViewSetsId}/authorizedViews/{authorizedViewsId}/conversations:calculateStats", "httpMethod": "GET", - "id": "contactcenterinsights.projects.locations.authorizedViewSets.authorizedViews.conversations.list", + "id": "contactcenterinsights.projects.locations.authorizedViewSets.authorizedViews.conversations.calculateStats", "parameterOrder": [ - "parent" + "location" ], "parameters": { "filter": { - "description": "A filter to reduce results to a specific subset. Useful for querying conversations with specific properties.", - "location": "query", - "type": "string" - }, - "orderBy": { - "description": "Optional. The attribute by which to order conversations in the response. If empty, conversations will be ordered by descending creation time. Supported values are one of the following: * create_time * customer_satisfaction_rating * duration * latest_analysis * start_time * turn_count The default sort order is ascending. To specify order, append `asc` or `desc` (`create_time desc`). For more details, see [Google AIPs Ordering](https://google.aip.dev/132#ordering).", - "location": "query", - "type": "string" - }, - "pageSize": { - "description": "The maximum number of conversations to return in the response. A valid page size ranges from 0 to 100,000 inclusive. If the page size is zero or unspecified, a default page size of 100 will be chosen. Note that a call might return fewer results than the requested page size.", - "format": "int32", - "location": "query", - "type": "integer" - }, - "pageToken": { - "description": "The value returned by the last `ListConversationsResponse`. This value indicates that this is a continuation of a prior `ListConversations` call and that the system should return the next page of data.", - "location": "query", - "type": "string" - }, - "parent": { - "description": "Required. The parent resource of the conversation.", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/authorizedViewSets/[^/]+/authorizedViews/[^/]+$", - "required": true, - "type": "string" - }, - "view": { - "description": "The level of details of the conversation. Default is `BASIC`.", - "enum": [ - "CONVERSATION_VIEW_UNSPECIFIED", - "FULL", - "BASIC" - ], - "enumDescriptions": [ - "The conversation view is not specified. * Defaults to `FULL` in `GetConversationRequest`. * Defaults to `BASIC` in `ListConversationsRequest`.", - "Populates all fields in the conversation.", - "Populates all fields in the conversation except the transcript." - ], - "location": "query", - "type": "string" - } - }, - "path": "v1/{+parent}/conversations", - "response": { - "$ref": "GoogleCloudContactcenterinsightsV1ListConversationsResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] - }, - "patch": { - "description": "Updates a conversation.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSets/{authorizedViewSetsId}/authorizedViews/{authorizedViewsId}/conversations/{conversationsId}", - "httpMethod": "PATCH", - "id": "contactcenterinsights.projects.locations.authorizedViewSets.authorizedViews.conversations.patch", - "parameterOrder": [ - "name" - ], - "parameters": { - "name": { - "description": "Immutable. The resource name of the conversation. Format: projects/{project}/locations/{location}/conversations/{conversation}", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/authorizedViewSets/[^/]+/authorizedViews/[^/]+/conversations/[^/]+$", - "required": true, - "type": "string" - }, - "updateMask": { - "description": "The list of fields to be updated. All possible fields can be updated by passing `*`, or a subset of the following updateable fields can be provided: * `agent_id` * `language_code` * `labels` * `metadata` * `quality_metadata` * `call_metadata` * `start_time` * `expire_time` or `ttl` * `data_source.gcs_source.audio_uri` or `data_source.dialogflow_source.audio_uri`", - "format": "google-fieldmask", - "location": "query", - "type": "string" - } - }, - "path": "v1/{+name}", - "request": { - "$ref": "GoogleCloudContactcenterinsightsV1Conversation" - }, - "response": { - "$ref": "GoogleCloudContactcenterinsightsV1Conversation" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] - }, - "upload": { - "description": "Create a long-running conversation upload operation. This method differs from `CreateConversation` by allowing audio transcription and optional DLP redaction.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSets/{authorizedViewSetsId}/authorizedViews/{authorizedViewsId}/conversations:upload", - "httpMethod": "POST", - "id": "contactcenterinsights.projects.locations.authorizedViewSets.authorizedViews.conversations.upload", - "parameterOrder": [ - "parent" - ], - "parameters": { - "conversationId": { - "description": "Optional. A unique ID for the new conversation. This ID will become the final component of the conversation's resource name. If no ID is specified, a server-generated ID will be used. This value should be 4-64 characters and must match the regular expression `^[a-z0-9-]{4,64}$`. Valid characters are `a-z-`", + "description": "A filter to reduce results to a specific subset. This field is useful for getting statistics about conversations with specific properties.", "location": "query", "type": "string" }, - "parent": { - "description": "Required. The parent resource of the conversation.", + "location": { + "description": "Required. The location of the conversations.", "location": "path", "pattern": "^projects/[^/]+/locations/[^/]+/authorizedViewSets/[^/]+/authorizedViews/[^/]+$", "required": true, "type": "string" - }, - "redactionConfig.deidentifyTemplate": { - "description": "The fully-qualified DLP deidentify template resource name. Format: `projects/{project}/deidentifyTemplates/{template}`", - "location": "query", - "type": "string" - }, - "redactionConfig.inspectTemplate": { - "description": "The fully-qualified DLP inspect template resource name. Format: `projects/{project}/locations/{location}/inspectTemplates/{template}`", - "location": "query", - "type": "string" - }, - "speechConfig.speechRecognizer": { - "description": "The fully-qualified Speech Recognizer resource name. Format: `projects/{project_id}/locations/{location}/recognizer/{recognizer}`", - "location": "query", - "type": "string" } }, - "path": "v1/{+parent}/conversations:upload", - "request": { - "$ref": "GoogleCloudContactcenterinsightsV1Conversation" - }, + "path": "v1/{+location}/conversations:calculateStats", "response": { - "$ref": "GoogleLongrunningOperation" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] - } - }, - "resources": { - "analyses": { - "methods": { - "create": { - "description": "Creates an analysis. The long running operation is done when the analysis has completed.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSets/{authorizedViewSetsId}/authorizedViews/{authorizedViewsId}/conversations/{conversationsId}/analyses", - "httpMethod": "POST", - "id": "contactcenterinsights.projects.locations.authorizedViewSets.authorizedViews.conversations.analyses.create", - "parameterOrder": [ - "parent" - ], - "parameters": { - "parent": { - "description": "Required. The parent resource of the analysis.", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/authorizedViewSets/[^/]+/authorizedViews/[^/]+/conversations/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v1/{+parent}/analyses", - "request": { - "$ref": "GoogleCloudContactcenterinsightsV1Analysis" - }, - "response": { - "$ref": "GoogleLongrunningOperation" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] - }, - "delete": { - "description": "Deletes an analysis.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSets/{authorizedViewSetsId}/authorizedViews/{authorizedViewsId}/conversations/{conversationsId}/analyses/{analysesId}", - "httpMethod": "DELETE", - "id": "contactcenterinsights.projects.locations.authorizedViewSets.authorizedViews.conversations.analyses.delete", - "parameterOrder": [ - "name" - ], - "parameters": { - "name": { - "description": "Required. The name of the analysis to delete.", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/authorizedViewSets/[^/]+/authorizedViews/[^/]+/conversations/[^/]+/analyses/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v1/{+name}", - "response": { - "$ref": "GoogleProtobufEmpty" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] - }, - "get": { - "description": "Gets an analysis.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSets/{authorizedViewSetsId}/authorizedViews/{authorizedViewsId}/conversations/{conversationsId}/analyses/{analysesId}", - "httpMethod": "GET", - "id": "contactcenterinsights.projects.locations.authorizedViewSets.authorizedViews.conversations.analyses.get", - "parameterOrder": [ - "name" - ], - "parameters": { - "name": { - "description": "Required. The name of the analysis to get.", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/authorizedViewSets/[^/]+/authorizedViews/[^/]+/conversations/[^/]+/analyses/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v1/{+name}", - "response": { - "$ref": "GoogleCloudContactcenterinsightsV1Analysis" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] - }, - "list": { - "description": "Lists analyses.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSets/{authorizedViewSetsId}/authorizedViews/{authorizedViewsId}/conversations/{conversationsId}/analyses", - "httpMethod": "GET", - "id": "contactcenterinsights.projects.locations.authorizedViewSets.authorizedViews.conversations.analyses.list", - "parameterOrder": [ - "parent" - ], - "parameters": { - "filter": { - "description": "A filter to reduce results to a specific subset. Useful for querying conversations with specific properties.", - "location": "query", - "type": "string" - }, - "pageSize": { - "description": "The maximum number of analyses to return in the response. If this value is zero, the service will select a default size. A call might return fewer objects than requested. A non-empty `next_page_token` in the response indicates that more data is available.", - "format": "int32", - "location": "query", - "type": "integer" - }, - "pageToken": { - "description": "The value returned by the last `ListAnalysesResponse`; indicates that this is a continuation of a prior `ListAnalyses` call and the system should return the next page of data.", - "location": "query", - "type": "string" - }, - "parent": { - "description": "Required. The parent resource of the analyses.", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/authorizedViewSets/[^/]+/authorizedViews/[^/]+/conversations/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v1/{+parent}/analyses", - "response": { - "$ref": "GoogleCloudContactcenterinsightsV1ListAnalysesResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] - } - } - } - } - }, - "operations": { - "methods": { - "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSets/{authorizedViewSetsId}/authorizedViews/{authorizedViewsId}/operations/{operationsId}:cancel", - "httpMethod": "POST", - "id": "contactcenterinsights.projects.locations.authorizedViewSets.authorizedViews.operations.cancel", - "parameterOrder": [ - "name" - ], - "parameters": { - "name": { - "description": "The name of the operation resource to be cancelled.", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/authorizedViewSets/[^/]+/authorizedViews/[^/]+/operations/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v1/{+name}:cancel", - "response": { - "$ref": "GoogleProtobufEmpty" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] - }, - "get": { - "description": "Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSets/{authorizedViewSetsId}/authorizedViews/{authorizedViewsId}/operations/{operationsId}", - "httpMethod": "GET", - "id": "contactcenterinsights.projects.locations.authorizedViewSets.authorizedViews.operations.get", - "parameterOrder": [ - "name" - ], - "parameters": { - "name": { - "description": "The name of the operation resource.", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/authorizedViewSets/[^/]+/authorizedViews/[^/]+/operations/[^/]+$", - "required": true, - "type": "string" - } - }, - "path": "v1/{+name}", - "response": { - "$ref": "GoogleLongrunningOperation" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] - }, - "list": { - "description": "Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authorizedViewSets/{authorizedViewSetsId}/authorizedViews/{authorizedViewsId}/operations", - "httpMethod": "GET", - "id": "contactcenterinsights.projects.locations.authorizedViewSets.authorizedViews.operations.list", - "parameterOrder": [ - "name" - ], - "parameters": { - "filter": { - "description": "The standard list filter.", - "location": "query", - "type": "string" - }, - "name": { - "description": "The name of the operation's parent resource.", - "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/authorizedViewSets/[^/]+/authorizedViews/[^/]+$", - "required": true, - "type": "string" - }, - "pageSize": { - "description": "The standard list page size.", - "format": "int32", - "location": "query", - "type": "integer" - }, - "pageToken": { - "description": "The standard list page token.", - "location": "query", - "type": "string" - } - }, - "path": "v1/{+name}/operations", - "response": { - "$ref": "GoogleLongrunningListOperationsResponse" + "$ref": "GoogleCloudContactcenterinsightsV1CalculateStatsResponse" }, "scopes": [ "https://www.googleapis.com/auth/cloud-platform" @@ -2370,7 +1639,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "contactcenterinsights.projects.locations.operations.cancel", @@ -4208,7 +3477,7 @@ "additionalProperties": { "type": "string" }, - "description": "A map for the user to specify any custom fields. A maximum of 20 labels per conversation is allowed, with a maximum of 256 characters per entry.", + "description": "A map for the user to specify any custom fields. A maximum of 100 labels per conversation is allowed, with a maximum of 256 characters per entry.", "type": "object" }, "languageCode": { @@ -4436,6 +3705,24 @@ "description": "A user-specified string representing the agent.", "type": "string" }, + "agentType": { + "description": "The agent type, e.g. HUMAN_AGENT.", + "enum": [ + "ROLE_UNSPECIFIED", + "HUMAN_AGENT", + "AUTOMATED_AGENT", + "END_USER", + "ANY_AGENT" + ], + "enumDescriptions": [ + "Participant's role is not set.", + "Participant is a human agent.", + "Participant is an automated agent.", + "Participant is an end user who conversed with the contact center.", + "Participant is either a human or automated agent." + ], + "type": "string" + }, "displayName": { "description": "The agent's name.", "type": "string" @@ -4444,9 +3731,21 @@ "description": "A user-provided string indicating the outcome of the agent's segment of the call.", "type": "string" }, + "location": { + "description": "The agent's location.", + "type": "string" + }, "team": { - "description": "A user-specified string representing the agent's team.", + "deprecated": true, + "description": "A user-specified string representing the agent's team. Deprecated in favor of the `teams` field.", "type": "string" + }, + "teams": { + "description": "User-specified strings representing the agent's teams.", + "items": { + "type": "string" + }, + "type": "array" } }, "type": "object" @@ -5314,7 +4613,12 @@ "GoogleCloudContactcenterinsightsV1ImportIssueModelResponse": { "description": "Response from import issue model", "id": "GoogleCloudContactcenterinsightsV1ImportIssueModelResponse", - "properties": {}, + "properties": { + "issueModel": { + "$ref": "GoogleCloudContactcenterinsightsV1IssueModel", + "description": "The issue model that was imported." + } + }, "type": "object" }, "GoogleCloudContactcenterinsightsV1IngestConversationsMetadata": { @@ -5465,7 +4769,7 @@ "type": "string" }, "customMetadataKeys": { - "description": "Optional. Custom keys to extract as conversation labels from metadata files in `metadata_bucket_uri`. Keys not included in this field will be ignored. Note that there is a limit of 20 labels per conversation.", + "description": "Optional. Custom keys to extract as conversation labels from metadata files in `metadata_bucket_uri`. Keys not included in this field will be ignored. Note that there is a limit of 100 labels per conversation.", "items": { "type": "string" }, @@ -6215,11 +5519,6 @@ "description": "The conversation the answer applies to.", "type": "string" }, - "potentialScore": { - "description": "The maximum potential score of the question. If the question was answered using `na_value`, this field will be zero.", - "format": "double", - "type": "number" - }, "qaQuestion": { "description": "The QaQuestion answered by this answer.", "type": "string" @@ -6264,7 +5563,7 @@ "type": "object" }, "GoogleCloudContactcenterinsightsV1QaAnswerAnswerValue": { - "description": "Message for holding the value of the answer. QaQuestion.AnswerChoice defines the possible answer values for a question.", + "description": "Message for holding the value of a QaAnswer. QaQuestion.AnswerChoice defines the possible answer values for a question.", "id": "GoogleCloudContactcenterinsightsV1QaAnswerAnswerValue", "properties": { "boolValue": { @@ -6276,17 +5575,30 @@ "type": "string" }, "naValue": { - "description": "A value of \"Not Applicable (N/A)\".", + "description": "A value of \"Not Applicable (N/A)\". Should only ever be `true`.", "type": "boolean" }, + "normalizedScore": { + "description": "Output only. Normalized score of the questions. Calculated as score / potential_score.", + "format": "double", + "readOnly": true, + "type": "number" + }, "numValue": { "description": "Numerical value.", "format": "double", "type": "number" }, + "potentialScore": { + "description": "Output only. The maximum potential score of the question.", + "format": "double", + "readOnly": true, + "type": "number" + }, "score": { - "description": "Numerical score of the answer.", + "description": "Output only. Numerical score of the answer.", "format": "double", + "readOnly": true, "type": "number" }, "strValue": { @@ -6692,7 +6004,8 @@ "DAILY", "HOURLY", "PER_MINUTE", - "PER_5_MINUTES" + "PER_5_MINUTES", + "MONTHLY" ], "enumDescriptions": [ "The time granularity is unspecified and will default to NONE.", @@ -6700,7 +6013,8 @@ "Data points in the time series will aggregate at a daily granularity. 1 day means [midnight to midnight).", "Data points in the time series will aggregate at a daily granularity. 1 HOUR means [01:00 to 02:00).", "Data points in the time series will aggregate at a daily granularity. PER_MINUTE means [01:00 to 01:01).", - "Data points in the time series will aggregate at a 1 minute granularity. PER_5_MINUTES means [01:00 to 01:05)." + "Data points in the time series will aggregate at a 1 minute granularity. PER_5_MINUTES means [01:00 to 01:05).", + "Data points in the time series will aggregate at a monthly granularity. 1 MONTH means [01st of the month to 1st of the next month)." ], "type": "string" } @@ -7816,7 +7130,7 @@ "additionalProperties": { "type": "string" }, - "description": "A map for the user to specify any custom fields. A maximum of 20 labels per conversation is allowed, with a maximum of 256 characters per entry.", + "description": "A map for the user to specify any custom fields. A maximum of 100 labels per conversation is allowed, with a maximum of 256 characters per entry.", "type": "object" }, "languageCode": { @@ -8044,6 +7358,24 @@ "description": "A user-specified string representing the agent.", "type": "string" }, + "agentType": { + "description": "The agent type, e.g. HUMAN_AGENT.", + "enum": [ + "ROLE_UNSPECIFIED", + "HUMAN_AGENT", + "AUTOMATED_AGENT", + "END_USER", + "ANY_AGENT" + ], + "enumDescriptions": [ + "Participant's role is not set.", + "Participant is a human agent.", + "Participant is an automated agent.", + "Participant is an end user who conversed with the contact center.", + "Participant is either a human or automated agent." + ], + "type": "string" + }, "displayName": { "description": "The agent's name.", "type": "string" @@ -8052,9 +7384,21 @@ "description": "A user-provided string indicating the outcome of the agent's segment of the call.", "type": "string" }, + "location": { + "description": "The agent's location.", + "type": "string" + }, "team": { - "description": "A user-specified string representing the agent's team.", + "deprecated": true, + "description": "A user-specified string representing the agent's team. Deprecated in favor of the `teams` field.", "type": "string" + }, + "teams": { + "description": "User-specified strings representing the agent's teams.", + "items": { + "type": "string" + }, + "type": "array" } }, "type": "object" @@ -8905,7 +8249,12 @@ "GoogleCloudContactcenterinsightsV1alpha1ImportIssueModelResponse": { "description": "Response from import issue model", "id": "GoogleCloudContactcenterinsightsV1alpha1ImportIssueModelResponse", - "properties": {}, + "properties": { + "issueModel": { + "$ref": "GoogleCloudContactcenterinsightsV1alpha1IssueModel", + "description": "The issue model that was imported." + } + }, "type": "object" }, "GoogleCloudContactcenterinsightsV1alpha1IngestConversationsMetadata": { @@ -9056,7 +8405,7 @@ "type": "string" }, "customMetadataKeys": { - "description": "Optional. Custom keys to extract as conversation labels from metadata files in `metadata_bucket_uri`. Keys not included in this field will be ignored. Note that there is a limit of 20 labels per conversation.", + "description": "Optional. Custom keys to extract as conversation labels from metadata files in `metadata_bucket_uri`. Keys not included in this field will be ignored. Note that there is a limit of 100 labels per conversation.", "items": { "type": "string" }, @@ -9452,11 +8801,6 @@ "description": "The conversation the answer applies to.", "type": "string" }, - "potentialScore": { - "description": "The maximum potential score of the question. If the question was answered using `na_value`, this field will be zero.", - "format": "double", - "type": "number" - }, "qaQuestion": { "description": "The QaQuestion answered by this answer.", "type": "string" @@ -9501,7 +8845,7 @@ "type": "object" }, "GoogleCloudContactcenterinsightsV1alpha1QaAnswerAnswerValue": { - "description": "Message for holding the value of the answer. QaQuestion.AnswerChoice defines the possible answer values for a question.", + "description": "Message for holding the value of a QaAnswer. QaQuestion.AnswerChoice defines the possible answer values for a question.", "id": "GoogleCloudContactcenterinsightsV1alpha1QaAnswerAnswerValue", "properties": { "boolValue": { @@ -9513,17 +8857,30 @@ "type": "string" }, "naValue": { - "description": "A value of \"Not Applicable (N/A)\".", + "description": "A value of \"Not Applicable (N/A)\". Should only ever be `true`.", "type": "boolean" }, + "normalizedScore": { + "description": "Output only. Normalized score of the questions. Calculated as score / potential_score.", + "format": "double", + "readOnly": true, + "type": "number" + }, "numValue": { "description": "Numerical value.", "format": "double", "type": "number" }, + "potentialScore": { + "description": "Output only. The maximum potential score of the question.", + "format": "double", + "readOnly": true, + "type": "number" + }, "score": { - "description": "Numerical score of the answer.", + "description": "Output only. Numerical score of the answer.", "format": "double", + "readOnly": true, "type": "number" }, "strValue": { @@ -10115,151 +9472,6 @@ }, "type": "object" }, - "GoogleIamV1AuditConfig": { - "description": "Specifies the audit configuration for a service. The configuration determines which permission types are logged, and what identities, if any, are exempted from logging. An AuditConfig must have one or more AuditLogConfigs. If there are AuditConfigs for both `allServices` and a specific service, the union of the two AuditConfigs is used for that service: the log_types specified in each AuditConfig are enabled, and the exempted_members in each AuditLogConfig are exempted. Example Policy with multiple AuditConfigs: { \"audit_configs\": [ { \"service\": \"allServices\", \"audit_log_configs\": [ { \"log_type\": \"DATA_READ\", \"exempted_members\": [ \"user:jose@example.com\" ] }, { \"log_type\": \"DATA_WRITE\" }, { \"log_type\": \"ADMIN_READ\" } ] }, { \"service\": \"sampleservice.googleapis.com\", \"audit_log_configs\": [ { \"log_type\": \"DATA_READ\" }, { \"log_type\": \"DATA_WRITE\", \"exempted_members\": [ \"user:aliya@example.com\" ] } ] } ] } For sampleservice, this policy enables DATA_READ, DATA_WRITE and ADMIN_READ logging. It also exempts `jose@example.com` from DATA_READ logging, and `aliya@example.com` from DATA_WRITE logging.", - "id": "GoogleIamV1AuditConfig", - "properties": { - "auditLogConfigs": { - "description": "The configuration for logging of each type of permission.", - "items": { - "$ref": "GoogleIamV1AuditLogConfig" - }, - "type": "array" - }, - "service": { - "description": "Specifies a service that will be enabled for audit logging. For example, `storage.googleapis.com`, `cloudsql.googleapis.com`. `allServices` is a special value that covers all services.", - "type": "string" - } - }, - "type": "object" - }, - "GoogleIamV1AuditLogConfig": { - "description": "Provides the configuration for logging a type of permissions. Example: { \"audit_log_configs\": [ { \"log_type\": \"DATA_READ\", \"exempted_members\": [ \"user:jose@example.com\" ] }, { \"log_type\": \"DATA_WRITE\" } ] } This enables 'DATA_READ' and 'DATA_WRITE' logging, while exempting jose@example.com from DATA_READ logging.", - "id": "GoogleIamV1AuditLogConfig", - "properties": { - "exemptedMembers": { - "description": "Specifies the identities that do not cause logging for this type of permission. Follows the same format of Binding.members.", - "items": { - "type": "string" - }, - "type": "array" - }, - "logType": { - "description": "The log type that this config enables.", - "enum": [ - "LOG_TYPE_UNSPECIFIED", - "ADMIN_READ", - "DATA_WRITE", - "DATA_READ" - ], - "enumDescriptions": [ - "Default case. Should never be this.", - "Admin reads. Example: CloudIAM getIamPolicy", - "Data writes. Example: CloudSQL Users create", - "Data reads. Example: CloudSQL Users list" - ], - "type": "string" - } - }, - "type": "object" - }, - "GoogleIamV1Binding": { - "description": "Associates `members`, or principals, with a `role`.", - "id": "GoogleIamV1Binding", - "properties": { - "condition": { - "$ref": "GoogleTypeExpr", - "description": "The condition that is associated with this binding. If the condition evaluates to `true`, then this binding applies to the current request. If the condition evaluates to `false`, then this binding does not apply to the current request. However, a different role binding might grant the same role to one or more of the principals in this binding. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies)." - }, - "members": { - "description": "Specifies the principals requesting access for a Google Cloud resource. `members` can have the following values: * `allUsers`: A special identifier that represents anyone who is on the internet; with or without a Google account. * `allAuthenticatedUsers`: A special identifier that represents anyone who is authenticated with a Google account or a service account. Does not include identities that come from external identity providers (IdPs) through identity federation. * `user:{emailid}`: An email address that represents a specific Google account. For example, `alice@example.com` . * `serviceAccount:{emailid}`: An email address that represents a Google service account. For example, `my-other-app@appspot.gserviceaccount.com`. * `serviceAccount:{projectid}.svc.id.goog[{namespace}/{kubernetes-sa}]`: An identifier for a [Kubernetes service account](https://cloud.google.com/kubernetes-engine/docs/how-to/kubernetes-service-accounts). For example, `my-project.svc.id.goog[my-namespace/my-kubernetes-sa]`. * `group:{emailid}`: An email address that represents a Google group. For example, `admins@example.com`. * `domain:{domain}`: The G Suite domain (primary) that represents all the users of that domain. For example, `google.com` or `example.com`. * `principal://iam.googleapis.com/locations/global/workforcePools/{pool_id}/subject/{subject_attribute_value}`: A single identity in a workforce identity pool. * `principalSet://iam.googleapis.com/locations/global/workforcePools/{pool_id}/group/{group_id}`: All workforce identities in a group. * `principalSet://iam.googleapis.com/locations/global/workforcePools/{pool_id}/attribute.{attribute_name}/{attribute_value}`: All workforce identities with a specific attribute value. * `principalSet://iam.googleapis.com/locations/global/workforcePools/{pool_id}/*`: All identities in a workforce identity pool. * `principal://iam.googleapis.com/projects/{project_number}/locations/global/workloadIdentityPools/{pool_id}/subject/{subject_attribute_value}`: A single identity in a workload identity pool. * `principalSet://iam.googleapis.com/projects/{project_number}/locations/global/workloadIdentityPools/{pool_id}/group/{group_id}`: A workload identity pool group. * `principalSet://iam.googleapis.com/projects/{project_number}/locations/global/workloadIdentityPools/{pool_id}/attribute.{attribute_name}/{attribute_value}`: All identities in a workload identity pool with a certain attribute. * `principalSet://iam.googleapis.com/projects/{project_number}/locations/global/workloadIdentityPools/{pool_id}/*`: All identities in a workload identity pool. * `deleted:user:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a user that has been recently deleted. For example, `alice@example.com?uid=123456789012345678901`. If the user is recovered, this value reverts to `user:{emailid}` and the recovered user retains the role in the binding. * `deleted:serviceAccount:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a service account that has been recently deleted. For example, `my-other-app@appspot.gserviceaccount.com?uid=123456789012345678901`. If the service account is undeleted, this value reverts to `serviceAccount:{emailid}` and the undeleted service account retains the role in the binding. * `deleted:group:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a Google group that has been recently deleted. For example, `admins@example.com?uid=123456789012345678901`. If the group is recovered, this value reverts to `group:{emailid}` and the recovered group retains the role in the binding. * `deleted:principal://iam.googleapis.com/locations/global/workforcePools/{pool_id}/subject/{subject_attribute_value}`: Deleted single identity in a workforce identity pool. For example, `deleted:principal://iam.googleapis.com/locations/global/workforcePools/my-pool-id/subject/my-subject-attribute-value`.", - "items": { - "type": "string" - }, - "type": "array" - }, - "role": { - "description": "Role that is assigned to the list of `members`, or principals. For example, `roles/viewer`, `roles/editor`, or `roles/owner`. For an overview of the IAM roles and permissions, see the [IAM documentation](https://cloud.google.com/iam/docs/roles-overview). For a list of the available pre-defined roles, see [here](https://cloud.google.com/iam/docs/understanding-roles).", - "type": "string" - } - }, - "type": "object" - }, - "GoogleIamV1Policy": { - "description": "An Identity and Access Management (IAM) policy, which specifies access controls for Google Cloud resources. A `Policy` is a collection of `bindings`. A `binding` binds one or more `members`, or principals, to a single `role`. Principals can be user accounts, service accounts, Google groups, and domains (such as G Suite). A `role` is a named list of permissions; each `role` can be an IAM predefined role or a user-created custom role. For some types of Google Cloud resources, a `binding` can also specify a `condition`, which is a logical expression that allows access to a resource only if the expression evaluates to `true`. A condition can add constraints based on attributes of the request, the resource, or both. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). **JSON example:** ``` { \"bindings\": [ { \"role\": \"roles/resourcemanager.organizationAdmin\", \"members\": [ \"user:mike@example.com\", \"group:admins@example.com\", \"domain:google.com\", \"serviceAccount:my-project-id@appspot.gserviceaccount.com\" ] }, { \"role\": \"roles/resourcemanager.organizationViewer\", \"members\": [ \"user:eve@example.com\" ], \"condition\": { \"title\": \"expirable access\", \"description\": \"Does not grant access after Sep 2020\", \"expression\": \"request.time < timestamp('2020-10-01T00:00:00.000Z')\", } } ], \"etag\": \"BwWWja0YfJA=\", \"version\": 3 } ``` **YAML example:** ``` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3 ``` For a description of IAM and its features, see the [IAM documentation](https://cloud.google.com/iam/docs/).", - "id": "GoogleIamV1Policy", - "properties": { - "auditConfigs": { - "description": "Specifies cloud audit logging configuration for this policy.", - "items": { - "$ref": "GoogleIamV1AuditConfig" - }, - "type": "array" - }, - "bindings": { - "description": "Associates a list of `members`, or principals, with a `role`. Optionally, may specify a `condition` that determines how and when the `bindings` are applied. Each of the `bindings` must contain at least one principal. The `bindings` in a `Policy` can refer to up to 1,500 principals; up to 250 of these principals can be Google groups. Each occurrence of a principal counts towards these limits. For example, if the `bindings` grant 50 different roles to `user:alice@example.com`, and not to any other principal, then you can add another 1,450 principals to the `bindings` in the `Policy`.", - "items": { - "$ref": "GoogleIamV1Binding" - }, - "type": "array" - }, - "etag": { - "description": "`etag` is used for optimistic concurrency control as a way to help prevent simultaneous updates of a policy from overwriting each other. It is strongly suggested that systems make use of the `etag` in the read-modify-write cycle to perform policy updates in order to avoid race conditions: An `etag` is returned in the response to `getIamPolicy`, and systems are expected to put that etag in the request to `setIamPolicy` to ensure that their change will be applied to the same version of the policy. **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost.", - "format": "byte", - "type": "string" - }, - "version": { - "description": "Specifies the format of the policy. Valid values are `0`, `1`, and `3`. Requests that specify an invalid value are rejected. Any operation that affects conditional role bindings must specify version `3`. This requirement applies to the following operations: * Getting a policy that includes a conditional role binding * Adding a conditional role binding to a policy * Changing a conditional role binding in a policy * Removing any role binding, with or without a condition, from a policy that includes conditions **Important:** If you use IAM Conditions, you must include the `etag` field whenever you call `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a version `3` policy with a version `1` policy, and all of the conditions in the version `3` policy are lost. If a policy does not include any conditions, operations on that policy may specify any valid version or leave the field unset. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).", - "format": "int32", - "type": "integer" - } - }, - "type": "object" - }, - "GoogleIamV1SetIamPolicyRequest": { - "description": "Request message for `SetIamPolicy` method.", - "id": "GoogleIamV1SetIamPolicyRequest", - "properties": { - "policy": { - "$ref": "GoogleIamV1Policy", - "description": "REQUIRED: The complete policy to be applied to the `resource`. The size of the policy is limited to a few 10s of KB. An empty policy is a valid policy but certain Google Cloud services (such as Projects) might reject them." - }, - "updateMask": { - "description": "OPTIONAL: A FieldMask specifying which fields of the policy to modify. Only the fields in the mask will be modified. If no mask is provided, the following default mask is used: `paths: \"bindings, etag\"`", - "format": "google-fieldmask", - "type": "string" - } - }, - "type": "object" - }, - "GoogleIamV1TestIamPermissionsRequest": { - "description": "Request message for `TestIamPermissions` method.", - "id": "GoogleIamV1TestIamPermissionsRequest", - "properties": { - "permissions": { - "description": "The set of permissions to check for the `resource`. Permissions with wildcards (such as `*` or `storage.*`) are not allowed. For more information see [IAM Overview](https://cloud.google.com/iam/docs/overview#permissions).", - "items": { - "type": "string" - }, - "type": "array" - } - }, - "type": "object" - }, - "GoogleIamV1TestIamPermissionsResponse": { - "description": "Response message for `TestIamPermissions` method.", - "id": "GoogleIamV1TestIamPermissionsResponse", - "properties": { - "permissions": { - "description": "A subset of `TestPermissionsRequest.permissions` that the caller is allowed.", - "items": { - "type": "string" - }, - "type": "array" - } - }, - "type": "object" - }, "GoogleLongrunningListOperationsResponse": { "description": "The response message for Operations.ListOperations.", "id": "GoogleLongrunningListOperationsResponse", @@ -10346,29 +9558,6 @@ }, "type": "object" }, - "GoogleTypeExpr": { - "description": "Represents a textual expression in the Common Expression Language (CEL) syntax. CEL is a C-like expression language. The syntax and semantics of CEL are documented at https://github.com/google/cel-spec. Example (Comparison): title: \"Summary size limit\" description: \"Determines if a summary is less than 100 chars\" expression: \"document.summary.size() < 100\" Example (Equality): title: \"Requestor is owner\" description: \"Determines if requestor is the document owner\" expression: \"document.owner == request.auth.claims.email\" Example (Logic): title: \"Public documents\" description: \"Determine whether the document should be publicly visible\" expression: \"document.type != 'private' && document.type != 'internal'\" Example (Data Manipulation): title: \"Notification string\" description: \"Create a notification string with a timestamp.\" expression: \"'New message received at ' + string(document.create_time)\" The exact variables and functions that may be referenced within an expression are determined by the service that evaluates it. See the service documentation for additional information.", - "id": "GoogleTypeExpr", - "properties": { - "description": { - "description": "Optional. Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.", - "type": "string" - }, - "expression": { - "description": "Textual representation of an expression in Common Expression Language syntax.", - "type": "string" - }, - "location": { - "description": "Optional. String indicating the location of the expression for error reporting, e.g. a file name and a position in the file.", - "type": "string" - }, - "title": { - "description": "Optional. Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression.", - "type": "string" - } - }, - "type": "object" - }, "GoogleTypeInterval": { "description": "Represents a time interval, encoded as a Timestamp start (inclusive) and a Timestamp end (exclusive). The start must be less than or equal to the end. When the start equals the end, the interval is empty (matches no time). When both start and end are unspecified, the interval matches any time.", "id": "GoogleTypeInterval", diff --git a/discovery/googleapis/container__v1.json b/discovery/googleapis/container__v1.json index 308bf0c25..7125e721f 100644 --- a/discovery/googleapis/container__v1.json +++ b/discovery/googleapis/container__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241008", + "revision": "20241203", "rootUrl": "https://container.googleapis.com/", "servicePath": "", "title": "Kubernetes Engine API", @@ -2785,7 +2785,7 @@ }, "workloadPolicyConfig": { "$ref": "WorkloadPolicyConfig", - "description": "Workload policy configuration for Autopilot." + "description": "WorkloadPolicyConfig is the configuration related to GCW workload policy" } }, "type": "object" @@ -3577,7 +3577,7 @@ }, "desiredAutopilotWorkloadPolicyConfig": { "$ref": "WorkloadPolicyConfig", - "description": "The desired workload policy configuration for the autopilot cluster." + "description": "WorkloadPolicyConfig is the configuration related to GCW workload policy" }, "desiredBinaryAuthorization": { "$ref": "BinaryAuthorization", @@ -3650,6 +3650,10 @@ "description": "Enable/Disable private endpoint for the cluster's master. Deprecated: Use desired_control_plane_endpoints_config.ip_endpoints_config.enable_public_endpoint instead. Note that the value of enable_public_endpoint is reversed: if enable_private_endpoint is false, then enable_public_endpoint will be true.", "type": "boolean" }, + "desiredEnterpriseConfig": { + "$ref": "DesiredEnterpriseConfig", + "description": "The desired enterprise configuration for the cluster." + }, "desiredFleet": { "$ref": "Fleet", "description": "The desired fleet configuration for the cluster." @@ -3744,6 +3748,10 @@ "$ref": "NodeKubeletConfig", "description": "The desired node kubelet config for all auto-provisioned node pools in autopilot clusters and node auto-provisioning enabled clusters." }, + "desiredNodePoolAutoConfigLinuxNodeConfig": { + "$ref": "LinuxNodeConfig", + "description": "The desired Linux node config for all auto-provisioned node pools in autopilot clusters and node auto-provisioning enabled clusters. Currently only `cgroup_mode` can be set here." + }, "desiredNodePoolAutoConfigNetworkTags": { "$ref": "NetworkTags", "description": "The desired network tags that apply to all auto-provisioned node pools in autopilot clusters and node auto-provisioning enabled clusters." @@ -4214,6 +4222,27 @@ }, "type": "object" }, + "DesiredEnterpriseConfig": { + "description": "DesiredEnterpriseConfig is a wrapper used for updating enterprise_config.", + "id": "DesiredEnterpriseConfig", + "properties": { + "desiredTier": { + "description": "desired_tier specifies the desired tier of the cluster.", + "enum": [ + "CLUSTER_TIER_UNSPECIFIED", + "STANDARD", + "ENTERPRISE" + ], + "enumDescriptions": [ + "CLUSTER_TIER_UNSPECIFIED is when cluster_tier is not set.", + "STANDARD indicates a standard GKE cluster.", + "ENTERPRISE indicates a GKE Enterprise cluster." + ], + "type": "string" + } + }, + "type": "object" + }, "DnsCacheConfig": { "description": "Configuration for NodeLocal DNSCache", "id": "DnsCacheConfig", @@ -4249,6 +4278,20 @@ ], "readOnly": true, "type": "string" + }, + "desiredTier": { + "description": "desired_tier specifies the desired tier of the cluster.", + "enum": [ + "CLUSTER_TIER_UNSPECIFIED", + "STANDARD", + "ENTERPRISE" + ], + "enumDescriptions": [ + "CLUSTER_TIER_UNSPECIFIED is when cluster_tier is not set.", + "STANDARD indicates a standard GKE cluster.", + "ENTERPRISE indicates a GKE Enterprise cluster." + ], + "type": "string" } }, "type": "object" @@ -4888,7 +4931,7 @@ "additionalProperties": { "type": "string" }, - "description": "The Linux kernel parameters to be applied to the nodes and all pods running on the nodes. The following parameters are supported. net.core.busy_poll net.core.busy_read net.core.netdev_max_backlog net.core.rmem_max net.core.wmem_default net.core.wmem_max net.core.optmem_max net.core.somaxconn net.ipv4.tcp_rmem net.ipv4.tcp_wmem net.ipv4.tcp_tw_reuse", + "description": "The Linux kernel parameters to be applied to the nodes and all pods running on the nodes. The following parameters are supported. net.core.busy_poll net.core.busy_read net.core.netdev_max_backlog net.core.rmem_max net.core.wmem_default net.core.wmem_max net.core.optmem_max net.core.somaxconn net.ipv4.tcp_rmem net.ipv4.tcp_wmem net.ipv4.tcp_tw_reuse kernel.shmmni kernel.shmmax kernel.shmall", "type": "object" } }, @@ -5593,6 +5636,20 @@ "format": "int32", "type": "integer" }, + "localSsdEncryptionMode": { + "description": "Specifies which method should be used for encrypting the Local SSDs attahced to the node.", + "enum": [ + "LOCAL_SSD_ENCRYPTION_MODE_UNSPECIFIED", + "STANDARD_ENCRYPTION", + "EPHEMERAL_KEY_ENCRYPTION" + ], + "enumDescriptions": [ + "The given node will be encrypted using keys managed by Google infrastructure and the keys will be deleted when the node is deleted.", + "The given node will be encrypted using keys managed by Google infrastructure and the keys will be deleted when the node is deleted.", + "The given node will opt-in for using ephemeral key for encryption of Local SSDs. The Local SSDs will not be able to recover data in case of node crash." + ], + "type": "string" + }, "loggingConfig": { "$ref": "NodePoolLoggingConfig", "description": "Logging configuration." @@ -5601,6 +5658,11 @@ "description": "The name of a Google Compute Engine [machine type](https://cloud.google.com/compute/docs/machine-types) If unspecified, the default machine type is `e2-medium`.", "type": "string" }, + "maxRunDuration": { + "description": "The maximum duration for the nodes to exist. If unspecified, the nodes can exist indefinitely.", + "format": "google-duration", + "type": "string" + }, "metadata": { "additionalProperties": { "type": "string" @@ -5971,6 +6033,11 @@ "description": "Node pool configs that apply to all auto-provisioned node pools in autopilot clusters and node auto-provisioning enabled clusters.", "id": "NodePoolAutoConfig", "properties": { + "linuxNodeConfig": { + "$ref": "LinuxNodeConfig", + "description": "Output only. Configuration options for Linux nodes.", + "readOnly": true + }, "networkTags": { "$ref": "NetworkTags", "description": "The list of instance tags applied to all nodes. Tags are used to identify valid sources or targets for network firewalls and are specified by the client during cluster creation. Each tag within the list must comply with RFC1035." @@ -6013,22 +6080,22 @@ "type": "string" }, "maxNodeCount": { - "description": "Maximum number of nodes for one location in the NodePool. Must be >= min_node_count. There has to be enough quota to scale up the cluster.", + "description": "Maximum number of nodes for one location in the node pool. Must be >= min_node_count. There has to be enough quota to scale up the cluster.", "format": "int32", "type": "integer" }, "minNodeCount": { - "description": "Minimum number of nodes for one location in the NodePool. Must be >= 1 and <= max_node_count.", + "description": "Minimum number of nodes for one location in the node pool. Must be greater than or equal to 0 and less than or equal to max_node_count.", "format": "int32", "type": "integer" }, "totalMaxNodeCount": { - "description": "Maximum number of nodes in the node pool. Must be greater than total_min_node_count. There has to be enough quota to scale up the cluster. The total_*_node_count fields are mutually exclusive with the *_node_count fields.", + "description": "Maximum number of nodes in the node pool. Must be greater than or equal to total_min_node_count. There has to be enough quota to scale up the cluster. The total_*_node_count fields are mutually exclusive with the *_node_count fields.", "format": "int32", "type": "integer" }, "totalMinNodeCount": { - "description": "Minimum number of nodes in the node pool. Must be greater than 1 less than total_max_node_count. The total_*_node_count fields are mutually exclusive with the *_node_count fields.", + "description": "Minimum number of nodes in the node pool. Must be greater than or equal to 0 and less than or equal to total_max_node_count. The total_*_node_count fields are mutually exclusive with the *_node_count fields.", "format": "int32", "type": "integer" } @@ -7792,6 +7859,11 @@ "description": "Optional. The desired [Google Compute Engine machine type](https://cloud.google.com/compute/docs/machine-types) for nodes in the node pool. Initiates an upgrade operation that migrates the nodes in the node pool to the specified machine type.", "type": "string" }, + "maxRunDuration": { + "description": "The maximum duration for the nodes to exist. If unspecified, the nodes can exist indefinitely.", + "format": "google-duration", + "type": "string" + }, "name": { "description": "The name (project, location, cluster, node pool) of the node pool to update. Specified in the format `projects/*/locations/*/clusters/*/nodePools/*`.", "type": "string" @@ -7936,6 +8008,76 @@ }, "type": "object" }, + "UpgradeInfoEvent": { + "description": "UpgradeInfoEvent is a notification sent to customers about the upgrade information of a resource.", + "id": "UpgradeInfoEvent", + "properties": { + "currentVersion": { + "description": "The current version before the upgrade.", + "type": "string" + }, + "description": { + "description": "A brief description of the event.", + "type": "string" + }, + "endTime": { + "description": "The time when the operation ended.", + "format": "google-datetime", + "type": "string" + }, + "operation": { + "description": "The operation associated with this upgrade.", + "type": "string" + }, + "resource": { + "description": "Optional relative path to the resource. For example in node pool upgrades, the relative path of the node pool.", + "type": "string" + }, + "resourceType": { + "description": "The resource type associated with the upgrade.", + "enum": [ + "UPGRADE_RESOURCE_TYPE_UNSPECIFIED", + "MASTER", + "NODE_POOL" + ], + "enumDescriptions": [ + "Default value. This shouldn't be used.", + "Master / control plane", + "Node pool" + ], + "type": "string" + }, + "startTime": { + "description": "The time when the operation was started.", + "format": "google-datetime", + "type": "string" + }, + "state": { + "description": "Output only. The state of the upgrade.", + "enum": [ + "STATE_UNSPECIFIED", + "STARTED", + "SUCCEEDED", + "FAILED", + "CANCELED" + ], + "enumDescriptions": [ + "STATE_UNSPECIFIED indicates the state is unspecified.", + "STARTED indicates the upgrade has started.", + "SUCCEEDED indicates the upgrade has completed successfully.", + "FAILED indicates the upgrade has failed.", + "CANCELED indicates the upgrade has canceled." + ], + "readOnly": true, + "type": "string" + }, + "targetVersion": { + "description": "The target version for the upgrade.", + "type": "string" + } + }, + "type": "object" + }, "UpgradeSettings": { "description": "These upgrade settings control the level of parallelism and the level of disruption caused by an upgrade. maxUnavailable controls the number of nodes that can be simultaneously unavailable. maxSurge controls the number of additional nodes that can be added to the node pool temporarily for the time of the upgrade to increase the number of available nodes. (maxUnavailable + maxSurge) determines the level of parallelism (how many nodes are being upgraded at the same time). Note: upgrades inevitably introduce some disruption since workloads need to be moved from old nodes to new, upgraded ones. Even if maxUnavailable=0, this holds true. (Disruption stays within the limits of PodDisruptionBudget, if it is configured.) Consider a hypothetical node pool with 5 nodes having maxSurge=2, maxUnavailable=1. This means the upgrade process upgrades 3 nodes simultaneously. It creates 2 additional (upgraded) nodes, then it brings down 3 old (not yet upgraded) nodes at the same time. This ensures that there are always at least 4 nodes available. These upgrade settings configure the upgrade strategy for the node pool. Use strategy to switch between the strategies applied to the node pool. If the strategy is ROLLING, use max_surge and max_unavailable to control the level of parallelism and the level of disruption caused by upgrade. 1. maxSurge controls the number of additional nodes that can be added to the node pool temporarily for the time of the upgrade to increase the number of available nodes. 2. maxUnavailable controls the number of nodes that can be simultaneously unavailable. 3. (maxUnavailable + maxSurge) determines the level of parallelism (how many nodes are being upgraded at the same time). If the strategy is BLUE_GREEN, use blue_green_settings to configure the blue-green upgrade related settings. 1. standard_rollout_policy is the default policy. The policy is used to control the way blue pool gets drained. The draining is executed in the batch mode. The batch size could be specified as either percentage of the node pool size or the number of nodes. batch_soak_duration is the soak time after each batch gets drained. 2. node_pool_soak_duration is the soak time after all blue nodes are drained. After this period, the blue pool nodes will be deleted.", "id": "UpgradeSettings", @@ -8155,7 +8297,7 @@ "type": "object" }, "WorkloadPolicyConfig": { - "description": "WorkloadPolicyConfig is the configuration of workload policy for autopilot clusters.", + "description": "WorkloadPolicyConfig is the configuration related to GCW workload policy", "id": "WorkloadPolicyConfig", "properties": { "allowNetAdmin": { diff --git a/discovery/googleapis/content__v2.1.json b/discovery/googleapis/content__v2.1.json index e24008f69..8aeb7475e 100644 --- a/discovery/googleapis/content__v2.1.json +++ b/discovery/googleapis/content__v2.1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241022", + "revision": "20241120", "rootUrl": "https://shoppingcontent.googleapis.com/", "servicePath": "content/v2.1/", "title": "Content API for Shopping", @@ -4316,141 +4316,6 @@ } } }, - "settlementreports": { - "methods": { - "get": { - "description": "Retrieves a settlement report from your Merchant Center account.", - "flatPath": "{merchantId}/settlementreports/{settlementId}", - "httpMethod": "GET", - "id": "content.settlementreports.get", - "parameterOrder": [ - "merchantId", - "settlementId" - ], - "parameters": { - "merchantId": { - "description": "The Merchant Center account of the settlement report.", - "format": "uint64", - "location": "path", - "required": true, - "type": "string" - }, - "settlementId": { - "description": "The Google-provided ID of the settlement.", - "location": "path", - "required": true, - "type": "string" - } - }, - "path": "{merchantId}/settlementreports/{settlementId}", - "response": { - "$ref": "SettlementReport" - }, - "scopes": [ - "https://www.googleapis.com/auth/content" - ] - }, - "list": { - "description": "Retrieves a list of settlement reports from your Merchant Center account.", - "flatPath": "{merchantId}/settlementreports", - "httpMethod": "GET", - "id": "content.settlementreports.list", - "parameterOrder": [ - "merchantId" - ], - "parameters": { - "maxResults": { - "description": "The maximum number of settlements to return in the response, used for paging. The default value is 200 returns per page, and the maximum allowed value is 5000 returns per page.", - "format": "uint32", - "location": "query", - "type": "integer" - }, - "merchantId": { - "description": "The Merchant Center account to list settlements for.", - "format": "uint64", - "location": "path", - "required": true, - "type": "string" - }, - "pageToken": { - "description": "The token returned by the previous request.", - "location": "query", - "type": "string" - }, - "transferEndDate": { - "description": "Obtains settlements which have transactions before this date (inclusively), in ISO 8601 format.", - "location": "query", - "type": "string" - }, - "transferStartDate": { - "description": "Obtains settlements which have transactions after this date (inclusively), in ISO 8601 format.", - "location": "query", - "type": "string" - } - }, - "path": "{merchantId}/settlementreports", - "response": { - "$ref": "SettlementreportsListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/content" - ] - } - } - }, - "settlementtransactions": { - "methods": { - "list": { - "description": "Retrieves a list of transactions for the settlement.", - "flatPath": "{merchantId}/settlementreports/{settlementId}/transactions", - "httpMethod": "GET", - "id": "content.settlementtransactions.list", - "parameterOrder": [ - "merchantId", - "settlementId" - ], - "parameters": { - "maxResults": { - "description": "The maximum number of transactions to return in the response, used for paging. The default value is 200 transactions per page, and the maximum allowed value is 5000 transactions per page.", - "format": "uint32", - "location": "query", - "type": "integer" - }, - "merchantId": { - "description": "The Merchant Center account to list transactions for.", - "format": "uint64", - "location": "path", - "required": true, - "type": "string" - }, - "pageToken": { - "description": "The token returned by the previous request.", - "location": "query", - "type": "string" - }, - "settlementId": { - "description": "The Google-provided ID of the settlement.", - "location": "path", - "required": true, - "type": "string" - }, - "transactionIds": { - "description": "The list of transactions to return. If not set, all transactions will be returned.", - "location": "query", - "repeated": true, - "type": "string" - } - }, - "path": "{merchantId}/settlementreports/{settlementId}/transactions", - "response": { - "$ref": "SettlementtransactionsListResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/content" - ] - } - } - }, "shippingsettings": { "methods": { "custombatch": { @@ -9115,6 +8980,10 @@ "description": "Required. The label of the loyalty program. This is an internal label that uniquely identifies the relationship between a merchant entity and a loyalty program entity. It must be provided so that system can associate the assets below (for example, price and points) with a merchant. The corresponding program must be linked to the merchant account.", "type": "string" }, + "shippingLabel": { + "description": "Optional. The shipping label for the loyalty program. You can use this label to indicate whether this offer has the loyalty shipping benefit. If not specified, the item is not eligible for loyalty shipping for the given loyalty tier.", + "type": "string" + }, "tierLabel": { "description": "Required. The label of the tier within the loyalty program. Must match one of the labels within the program.", "type": "string" @@ -13625,193 +13494,6 @@ }, "type": "object" }, - "SettlementReport": { - "description": " Settlement reports detail order-level and item-level credits and debits between you and Google.", - "id": "SettlementReport", - "properties": { - "endDate": { - "description": "The end date on which all transactions are included in the report, in ISO 8601 format.", - "type": "string" - }, - "kind": { - "description": "Identifies what kind of resource this is. Value: the fixed string \"`content#settlementReport`\"", - "type": "string" - }, - "previousBalance": { - "$ref": "Price", - "description": "The residual amount from the previous invoice. This is set only if the previous invoices are not paid because of negative balance." - }, - "settlementId": { - "description": "The ID of the settlement report.", - "type": "string" - }, - "startDate": { - "description": "The start date on which all transactions are included in the report, in ISO 8601 format.", - "type": "string" - }, - "transferAmount": { - "$ref": "Price", - "description": "The money due to the merchant." - }, - "transferDate": { - "description": "Date on which transfer for this payment was initiated by Google, in ISO 8601 format.", - "type": "string" - }, - "transferIds": { - "description": "The list of bank identifiers used for the transfer. For example, Trace ID for Federal Automated Clearing House (ACH). This may also be known as the Wire ID.", - "items": { - "type": "string" - }, - "type": "array" - } - }, - "type": "object" - }, - "SettlementTransaction": { - "description": "Settlement transactions give a detailed breakdown of the settlement report.", - "id": "SettlementTransaction", - "properties": { - "amount": { - "$ref": "SettlementTransactionAmount", - "description": "The amount for the transaction." - }, - "identifiers": { - "$ref": "SettlementTransactionIdentifiers", - "description": "Identifiers of the transaction." - }, - "kind": { - "description": "Identifies what kind of resource this is. Value: the fixed string \"`content#settlementTransaction`\"", - "type": "string" - }, - "transaction": { - "$ref": "SettlementTransactionTransaction", - "description": "Details of the transaction." - } - }, - "type": "object" - }, - "SettlementTransactionAmount": { - "id": "SettlementTransactionAmount", - "properties": { - "commission": { - "$ref": "SettlementTransactionAmountCommission" - }, - "description": { - "description": "The description of the event. Acceptable values are: - \"`taxWithhold`\" - \"`principal`\" - \"`principalAdjustment`\" - \"`shippingFee`\" - \"`merchantRemittedSalesTax`\" - \"`googleRemittedSalesTax`\" - \"`merchantCoupon`\" - \"`merchantCouponTax`\" - \"`merchantRemittedDisposalTax`\" - \"`googleRemittedDisposalTax`\" - \"`merchantRemittedRedemptionFee`\" - \"`googleRemittedRedemptionFee`\" - \"`eeeEcoFee`\" - \"`furnitureEcoFee`\" - \"`copyPrivateFee`\" - \"`eeeEcoFeeCommission`\" - \"`furnitureEcoFeeCommission`\" - \"`copyPrivateFeeCommission`\" - \"`principalRefund`\" - \"`principalRefundTax`\" - \"`itemCommission`\" - \"`adjustmentCommission`\" - \"`shippingFeeCommission`\" - \"`commissionRefund`\" - \"`damaged`\" - \"`damagedOrDefectiveItem`\" - \"`expiredItem`\" - \"`faultyItem`\" - \"`incorrectItemReceived`\" - \"`itemMissing`\" - \"`qualityNotExpected`\" - \"`receivedTooLate`\" - \"`storePackageMissing`\" - \"`transitPackageMissing`\" - \"`unsuccessfulDeliveryUndeliverable`\" - \"`wrongChargeInStore`\" - \"`wrongItem`\" - \"`returns`\" - \"`undeliverable`\" - \"`issueRelatedRefundAndReplacementAmountDescription`\" - \"`refundFromMerchant`\" - \"`returnLabelShippingFee`\" - \"`lumpSumCorrection`\" - \"`pspFee`\" - \"`principalRefundDoesNotFit`\" - \"`principalRefundOrderedWrongItem`\" - \"`principalRefundQualityNotExpected`\" - \"`principalRefundBetterPriceFound`\" - \"`principalRefundNoLongerNeeded`\" - \"`principalRefundChangedMind`\" - \"`principalRefundReceivedTooLate`\" - \"`principalRefundIncorrectItemReceived`\" - \"`principalRefundDamagedOrDefectiveItem`\" - \"`principalRefundDidNotMatchDescription`\" - \"`principalRefundExpiredItem`\" ", - "type": "string" - }, - "transactionAmount": { - "$ref": "Price", - "description": "The amount that contributes to the line item price." - }, - "type": { - "description": "The type of the amount. Acceptable values are: - \"`itemPrice`\" - \"`orderPrice`\" - \"`refund`\" - \"`earlyRefund`\" - \"`courtesyRefund`\" - \"`returnRefund`\" - \"`returnLabelShippingFeeAmount`\" - \"`lumpSumCorrectionAmount`\" ", - "type": "string" - } - }, - "type": "object" - }, - "SettlementTransactionAmountCommission": { - "id": "SettlementTransactionAmountCommission", - "properties": { - "category": { - "description": "The category of the commission. Acceptable values are: - \"`animalsAndPetSupplies`\" - \"`dogCatFoodAndCatLitter`\" - \"`apparelAndAccessories`\" - \"`shoesHandbagsAndSunglasses`\" - \"`costumesAndAccessories`\" - \"`jewelry`\" - \"`watches`\" - \"`hobbiesArtsAndCrafts`\" - \"`homeAndGarden`\" - \"`entertainmentCollectibles`\" - \"`collectibleCoins`\" - \"`sportsCollectibles`\" - \"`sportingGoods`\" - \"`toysAndGames`\" - \"`musicalInstruments`\" - \"`giftCards`\" - \"`babyAndToddler`\" - \"`babyFoodWipesAndDiapers`\" - \"`businessAndIndustrial`\" - \"`camerasOpticsAndPhotography`\" - \"`consumerElectronics`\" - \"`electronicsAccessories`\" - \"`personalComputers`\" - \"`videoGameConsoles`\" - \"`foodAndGrocery`\" - \"`beverages`\" - \"`tobaccoProducts`\" - \"`furniture`\" - \"`hardware`\" - \"`buildingMaterials`\" - \"`tools`\" - \"`healthAndPersonalCare`\" - \"`beauty`\" - \"`householdSupplies`\" - \"`kitchenAndDining`\" - \"`majorAppliances`\" - \"`luggageAndBags`\" - \"`media`\" - \"`officeSupplies`\" - \"`softwareAndVideoGames`\" - \"`vehiclePartsAndAccessories`\" - \"`vehicleTiresAndWheels`\" - \"`vehicles`\" - \"`everythingElse`\" ", - "type": "string" - }, - "rate": { - "description": "Rate of the commission in percentage.", - "type": "string" - } - }, - "type": "object" - }, - "SettlementTransactionIdentifiers": { - "id": "SettlementTransactionIdentifiers", - "properties": { - "adjustmentId": { - "description": "The identifier of the adjustments, if it's available.", - "type": "string" - }, - "merchantOrderId": { - "description": "The merchant provided order ID.", - "type": "string" - }, - "orderItemId": { - "description": "The identifier of the item.", - "type": "string" - }, - "settlementEntryId": { - "description": "The unique ID of the settlement transaction entry.", - "type": "string" - }, - "shipmentIds": { - "description": "The shipment ids for the item.", - "items": { - "type": "string" - }, - "type": "array" - }, - "transactionId": { - "description": "The Google transaction ID.", - "type": "string" - } - }, - "type": "object" - }, - "SettlementTransactionTransaction": { - "id": "SettlementTransactionTransaction", - "properties": { - "postDate": { - "description": "The time on which the event occurred in ISO 8601 format.", - "type": "string" - }, - "type": { - "description": "The type of the transaction that occurred. Acceptable values are: - \"`order`\" - \"`reversal`\" - \"`orderRefund`\" - \"`reversalRefund`\" - \"`issueRelatedRefundAndReplacement`\" - \"`returnLabelShippingFeeTransaction`\" - \"`reversalIssueRelatedRefundAndReplacement`\" - \"`reversalReturnLabelShippingFeeTransaction`\" - \"`lumpSumCorrectionTransaction`\" ", - "type": "string" - } - }, - "type": "object" - }, - "SettlementreportsListResponse": { - "id": "SettlementreportsListResponse", - "properties": { - "kind": { - "description": "Identifies what kind of resource this is. Value: the fixed string \"`content#settlementreportsListResponse`\".", - "type": "string" - }, - "nextPageToken": { - "description": "The token for the retrieval of the next page of returns.", - "type": "string" - }, - "resources": { - "items": { - "$ref": "SettlementReport" - }, - "type": "array" - } - }, - "type": "object" - }, - "SettlementtransactionsListResponse": { - "id": "SettlementtransactionsListResponse", - "properties": { - "kind": { - "description": "Identifies what kind of resource this is. Value: the fixed string \"`content#settlementtransactionsListResponse`\".", - "type": "string" - }, - "nextPageToken": { - "description": "The token for the retrieval of the next page of returns.", - "type": "string" - }, - "resources": { - "items": { - "$ref": "SettlementTransaction" - }, - "type": "array" - } - }, - "type": "object" - }, "ShippingSettings": { "description": "The merchant account's shipping settings. All methods except getsupportedcarriers and getsupportedholidays require the admin role.", "id": "ShippingSettings", @@ -14220,11 +13902,11 @@ "id": "TimeZone", "properties": { "id": { - "description": "IANA Time Zone Database time zone, e.g. \"America/New_York\".", + "description": "IANA Time Zone Database time zone. For example \"America/New_York\".", "type": "string" }, "version": { - "description": "Optional. IANA Time Zone Database version number, e.g. \"2019a\".", + "description": "Optional. IANA Time Zone Database version number. For example \"2019a\".", "type": "string" } }, diff --git a/discovery/googleapis/contentwarehouse__v1.json b/discovery/googleapis/contentwarehouse__v1.json index a98a658f2..ce4bba3fa 100644 --- a/discovery/googleapis/contentwarehouse__v1.json +++ b/discovery/googleapis/contentwarehouse__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240723", + "revision": "20241204", "rootUrl": "https://contentwarehouse.googleapis.com/", "servicePath": "", "title": "Document AI Warehouse API", @@ -5818,18 +5818,18 @@ "type": "object" }, "GoogleTypePostalAddress": { - "description": "Represents a postal address, e.g. for postal delivery or payments addresses. Given a postal address, a postal service can deliver items to a premise, P.O. Box or similar. It is not intended to model geographical locations (roads, towns, mountains). In typical usage an address would be created via user input or from importing existing data, depending on the type of process. Advice on address input / editing: - Use an internationalization-ready address widget such as https://github.com/google/libaddressinput) - Users should not be presented with UI elements for input or editing of fields outside countries where that field is used. For more guidance on how to use this schema, please see: https://support.google.com/business/answer/6397478", + "description": "Represents a postal address. For example for postal delivery or payments addresses. Given a postal address, a postal service can deliver items to a premise, P.O. Box or similar. It is not intended to model geographical locations (roads, towns, mountains). In typical usage an address would be created by user input or from importing existing data, depending on the type of process. Advice on address input / editing: - Use an internationalization-ready address widget such as https://github.com/google/libaddressinput) - Users should not be presented with UI elements for input or editing of fields outside countries where that field is used. For more guidance on how to use this schema, see: https://support.google.com/business/answer/6397478", "id": "GoogleTypePostalAddress", "properties": { "addressLines": { - "description": "Unstructured address lines describing the lower levels of an address. Because values in address_lines do not have type information and may sometimes contain multiple values in a single field (e.g. \"Austin, TX\"), it is important that the line order is clear. The order of address lines should be \"envelope order\" for the country/region of the address. In places where this can vary (e.g. Japan), address_language is used to make it explicit (e.g. \"ja\" for large-to-small ordering and \"ja-Latn\" or \"en\" for small-to-large). This way, the most specific line of an address can be selected based on the language. The minimum permitted structural representation of an address consists of a region_code with all remaining information placed in the address_lines. It would be possible to format such an address very approximately without geocoding, but no semantic reasoning could be made about any of the address components until it was at least partially resolved. Creating an address only containing a region_code and address_lines, and then geocoding is the recommended way to handle completely unstructured addresses (as opposed to guessing which parts of the address should be localities or administrative areas).", + "description": "Unstructured address lines describing the lower levels of an address. Because values in address_lines do not have type information and may sometimes contain multiple values in a single field (For example \"Austin, TX\"), it is important that the line order is clear. The order of address lines should be \"envelope order\" for the country/region of the address. In places where this can vary (For example Japan), address_language is used to make it explicit (For example \"ja\" for large-to-small ordering and \"ja-Latn\" or \"en\" for small-to-large). This way, the most specific line of an address can be selected based on the language. The minimum permitted structural representation of an address consists of a region_code with all remaining information placed in the address_lines. It would be possible to format such an address very approximately without geocoding, but no semantic reasoning could be made about any of the address components until it was at least partially resolved. Creating an address only containing a region_code and address_lines, and then geocoding is the recommended way to handle completely unstructured addresses (as opposed to guessing which parts of the address should be localities or administrative areas).", "items": { "type": "string" }, "type": "array" }, "administrativeArea": { - "description": "Optional. Highest administrative subdivision which is used for postal addresses of a country or region. For example, this can be a state, a province, an oblast, or a prefecture. Specifically, for Spain this is the province and not the autonomous community (e.g. \"Barcelona\" and not \"Catalonia\"). Many countries don't use an administrative area in postal addresses. E.g. in Switzerland this should be left unpopulated.", + "description": "Optional. Highest administrative subdivision which is used for postal addresses of a country or region. For example, this can be a state, a province, an oblast, or a prefecture. Specifically, for Spain this is the province and not the autonomous community (For example \"Barcelona\" and not \"Catalonia\"). Many countries don't use an administrative area in postal addresses. For example in Switzerland this should be left unpopulated.", "type": "string" }, "languageCode": { @@ -5845,7 +5845,7 @@ "type": "string" }, "postalCode": { - "description": "Optional. Postal code of the address. Not all countries use or require postal codes to be present, but where they are used, they may trigger additional validation with other parts of the address (e.g. state/zip validation in the U.S.A.).", + "description": "Optional. Postal code of the address. Not all countries use or require postal codes to be present, but where they are used, they may trigger additional validation with other parts of the address (For example state/zip validation in the U.S.A.).", "type": "string" }, "recipients": { @@ -5865,7 +5865,7 @@ "type": "integer" }, "sortingCode": { - "description": "Optional. Additional, country-specific, sorting code. This is not used in most regions. Where it is used, the value is either a string like \"CEDEX\", optionally followed by a number (e.g. \"CEDEX 7\"), or just a number alone, representing the \"sector code\" (Jamaica), \"delivery area indicator\" (Malawi) or \"post office indicator\" (e.g. Côte d'Ivoire).", + "description": "Optional. Additional, country-specific, sorting code. This is not used in most regions. Where it is used, the value is either a string like \"CEDEX\", optionally followed by a number (For example \"CEDEX 7\"), or just a number alone, representing the \"sector code\" (Jamaica), \"delivery area indicator\" (Malawi) or \"post office indicator\" (For example Côte d'Ivoire).", "type": "string" }, "sublocality": { @@ -5880,11 +5880,11 @@ "id": "GoogleTypeTimeZone", "properties": { "id": { - "description": "IANA Time Zone Database time zone, e.g. \"America/New_York\".", + "description": "IANA Time Zone Database time zone. For example \"America/New_York\".", "type": "string" }, "version": { - "description": "Optional. IANA Time Zone Database version number, e.g. \"2019a\".", + "description": "Optional. IANA Time Zone Database version number. For example \"2019a\".", "type": "string" } }, diff --git a/discovery/googleapis/css__v1.json b/discovery/googleapis/css__v1.json index e65d1b188..11f5f3a3b 100644 --- a/discovery/googleapis/css__v1.json +++ b/discovery/googleapis/css__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240909", + "revision": "20241210", "rootUrl": "https://css.googleapis.com/", "servicePath": "", "title": "CSS API", @@ -162,7 +162,7 @@ "type": "string" }, "pageSize": { - "description": "Optional. The maximum number of accounts to return. The service may return fewer than this value. If unspecified, at most 50 accounts will be returned. The maximum value is 1000; values above 1000 will be coerced to 1000.", + "description": "Optional. The maximum number of accounts to return. The service may return fewer than this value. If unspecified, at most 50 accounts will be returned. The maximum value is 100; values above 100 will be coerced to 100.", "format": "int32", "location": "query", "type": "integer" @@ -261,7 +261,8 @@ ], "parameters": { "feedId": { - "description": "Required. The primary or supplemental feed id. If CSS Product already exists and feed id provided is different, then the CSS Product will be moved to a new feed. Note: For now, CSSs do not need to provide feed ids as we create feeds on the fly. We do not have supplemental feed support for CSS Products yet.", + "deprecated": true, + "description": "Optional. The primary or supplemental feed id. If CSS Product already exists and feed id provided is different, then the CSS Product will be moved to a new feed. Note: For now, CSSs do not need to provide feed ids as we create feeds on the fly. We do not have supplemental feed support for CSS Products yet.", "format": "int64", "location": "query", "type": "string" @@ -408,7 +409,7 @@ ] }, "list": { - "description": "Lists the labels assigned to an account.", + "description": "Lists the labels owned by an account.", "flatPath": "v1/accounts/{accountsId}/labels", "httpMethod": "GET", "id": "css.accounts.labels.list", @@ -707,11 +708,11 @@ }, "headlineOfferPrice": { "$ref": "Price", - "description": "Headline Price of the aggregate offer." + "description": "Headline Price of the CSS Product." }, "headlineOfferShippingPrice": { "$ref": "Price", - "description": "Headline Price of the aggregate offer." + "description": "Headline Price of the CSS Product." }, "headlineOfferSubscriptionCost": { "$ref": "HeadlineOfferSubscriptionCost", @@ -719,7 +720,7 @@ }, "highPrice": { "$ref": "Price", - "description": "High Price of the aggregate offer." + "description": "High Price of the CSS Product." }, "imageLink": { "description": "URL of an image of the item.", @@ -742,7 +743,7 @@ }, "lowPrice": { "$ref": "Price", - "description": "Low Price of the aggregate offer." + "description": "Low Price of the CSS Product." }, "material": { "description": "The material of which the item is made.", @@ -758,7 +759,7 @@ "type": "string" }, "numberOfOffers": { - "description": "The number of aggregate offers.", + "description": "The number of CSS Products.", "format": "int64", "type": "string" }, @@ -849,7 +850,7 @@ "type": "object" }, "CssProduct": { - "description": "The processed CSS Product(a.k.a Aggregate Offer internally).", + "description": "The processed CSS Product.", "id": "CssProduct", "properties": { "attributes": { @@ -921,6 +922,7 @@ "type": "string" }, "freshnessTime": { + "deprecated": true, "description": "Represents the existing version (freshness) of the CSS Product, which can be used to preserve the right order when multiple updates are done at the same time. This field must not be set to the future time. If set, the update is prevented if a newer version of the item already exists in our system (that is the last update time of the existing CSS products is later than the freshness time set in the update). If the update happens, the last update time is then set to this freshness time. If not set, the update will not be prevented and the last update time will default to when this request was received by the CSS API. If the operation is prevented, the aborted exception will be thrown.", "format": "google-datetime", "type": "string" @@ -999,7 +1001,7 @@ "id": "DestinationStatus", "properties": { "approvedCountries": { - "description": "List of country codes (ISO 3166-1 alpha-2) where the aggregate offer is approved.", + "description": "List of country codes (ISO 3166-1 alpha-2) where the CSS Product is approved.", "items": { "type": "string" }, @@ -1010,14 +1012,14 @@ "type": "string" }, "disapprovedCountries": { - "description": "List of country codes (ISO 3166-1 alpha-2) where the aggregate offer is disapproved.", + "description": "List of country codes (ISO 3166-1 alpha-2) where the CSS Product is disapproved.", "items": { "type": "string" }, "type": "array" }, "pendingCountries": { - "description": "List of country codes (ISO 3166-1 alpha-2) where the aggregate offer is pending approval.", + "description": "List of country codes (ISO 3166-1 alpha-2) where the CSS Product is pending approval.", "items": { "type": "string" }, @@ -1087,7 +1089,7 @@ "id": "ItemLevelIssue", "properties": { "applicableCountries": { - "description": "List of country codes (ISO 3166-1 alpha-2) where issue applies to the aggregate offer.", + "description": "List of country codes (ISO 3166-1 alpha-2) where issue applies to the CSS Product.", "items": { "type": "string" }, @@ -1122,7 +1124,7 @@ "type": "string" }, "servability": { - "description": "How this issue affects serving of the aggregate offer.", + "description": "How this issue affects serving of the CSS Product.", "type": "string" } }, diff --git a/discovery/googleapis/datacatalog__v1.json b/discovery/googleapis/datacatalog__v1.json index 80643af50..7489a533b 100644 --- a/discovery/googleapis/datacatalog__v1.json +++ b/discovery/googleapis/datacatalog__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240614", + "revision": "20241202", "rootUrl": "https://datacatalog.googleapis.com/", "servicePath": "", "title": "Google Cloud Data Catalog API", @@ -176,9 +176,150 @@ } } }, + "organizations": { + "resources": { + "locations": { + "methods": { + "retrieveConfig": { + "description": "Retrieves the configuration related to the migration from Data Catalog to Dataplex for a specific organization, including all the projects under it which have a separate configuration set.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}:retrieveConfig", + "httpMethod": "GET", + "id": "datacatalog.organizations.locations.retrieveConfig", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The organization whose config is being retrieved.", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:retrieveConfig", + "response": { + "$ref": "GoogleCloudDatacatalogV1OrganizationConfig" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "retrieveEffectiveConfig": { + "description": "Retrieves the effective configuration related to the migration from Data Catalog to Dataplex for a specific organization or project. If there is no specific configuration set for the resource, the setting is checked hierarchicahlly through the ancestors of the resource, starting from the resource itself.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}:retrieveEffectiveConfig", + "httpMethod": "GET", + "id": "datacatalog.organizations.locations.retrieveEffectiveConfig", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The resource whose effective config is being retrieved.", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:retrieveEffectiveConfig", + "response": { + "$ref": "GoogleCloudDatacatalogV1MigrationConfig" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "setConfig": { + "description": "Sets the configuration related to the migration to Dataplex for an organization or project.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}:setConfig", + "httpMethod": "POST", + "id": "datacatalog.organizations.locations.setConfig", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The organization or project whose config is being specified.", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:setConfig", + "request": { + "$ref": "GoogleCloudDatacatalogV1SetConfigRequest" + }, + "response": { + "$ref": "GoogleCloudDatacatalogV1MigrationConfig" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + } + } + } + }, "projects": { "resources": { "locations": { + "methods": { + "retrieveEffectiveConfig": { + "description": "Retrieves the effective configuration related to the migration from Data Catalog to Dataplex for a specific organization or project. If there is no specific configuration set for the resource, the setting is checked hierarchicahlly through the ancestors of the resource, starting from the resource itself.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}:retrieveEffectiveConfig", + "httpMethod": "GET", + "id": "datacatalog.projects.locations.retrieveEffectiveConfig", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The resource whose effective config is being retrieved.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:retrieveEffectiveConfig", + "response": { + "$ref": "GoogleCloudDatacatalogV1MigrationConfig" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "setConfig": { + "description": "Sets the configuration related to the migration to Dataplex for an organization or project.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}:setConfig", + "httpMethod": "POST", + "id": "datacatalog.projects.locations.setConfig", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The organization or project whose config is being specified.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:setConfig", + "request": { + "$ref": "GoogleCloudDatacatalogV1SetConfigRequest" + }, + "response": { + "$ref": "GoogleCloudDatacatalogV1MigrationConfig" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + }, "resources": { "entryGroups": { "methods": { @@ -1080,7 +1221,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "datacatalog.projects.locations.operations.cancel", @@ -3026,6 +3167,10 @@ "name": { "description": "Identifier. The resource name of the entry group in URL format. Note: The entry group itself and its child resources might not be stored in the location specified in its name.", "type": "string" + }, + "transferredToDataplex": { + "description": "Optional. When set to [true], it means DataCatalog EntryGroup was transferred to Dataplex Catalog Service. It makes EntryGroup and its Entries to be read-only in DataCatalog. However, new Tags on EntryGroup and its Entries can be created. After setting the flag to [true] it cannot be unset.", + "type": "boolean" } }, "type": "object" @@ -3413,6 +3558,41 @@ }, "type": "object" }, + "GoogleCloudDatacatalogV1MigrationConfig": { + "description": "The configuration related to the migration to Dataplex applied to an organization or project. It is the response message for SetConfig and RetrieveEffectiveConfig.", + "id": "GoogleCloudDatacatalogV1MigrationConfig", + "properties": { + "catalogUiExperience": { + "description": "Opt-in status for the UI switch to Dataplex.", + "enum": [ + "CATALOG_UI_EXPERIENCE_UNSPECIFIED", + "CATALOG_UI_EXPERIENCE_ENABLED", + "CATALOG_UI_EXPERIENCE_DISABLED" + ], + "enumDescriptions": [ + "Default value. The default UI is Dataplex.", + "The UI is Dataplex.", + "The UI is Data Catalog." + ], + "type": "string" + }, + "tagTemplateMigration": { + "description": "Opt-in status for the migration of Tag Templates to Dataplex.", + "enum": [ + "TAG_TEMPLATE_MIGRATION_UNSPECIFIED", + "TAG_TEMPLATE_MIGRATION_ENABLED", + "TAG_TEMPLATE_MIGRATION_DISABLED" + ], + "enumDescriptions": [ + "Default value. Migration of Tag Templates from Data Catalog to Dataplex is not performed.", + "Migration of Tag Templates from Data Catalog to Dataplex is enabled.", + "Migration of Tag Templates from Data Catalog to Dataplex is disabled." + ], + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudDatacatalogV1ModelSpec": { "description": "Specification that applies to a model. Valid only for entries with the `MODEL` type.", "id": "GoogleCloudDatacatalogV1ModelSpec", @@ -3446,6 +3626,20 @@ }, "type": "object" }, + "GoogleCloudDatacatalogV1OrganizationConfig": { + "description": "The configuration related to the migration from Data Catalog to Dataplex that has been applied to an organization and any projects under it. It is the response message for RetrieveConfig.", + "id": "GoogleCloudDatacatalogV1OrganizationConfig", + "properties": { + "config": { + "additionalProperties": { + "$ref": "GoogleCloudDatacatalogV1MigrationConfig" + }, + "description": "Map of organizations and project resource names and their configuration. The format for the map keys is `organizations/{organizationId}` or `projects/{projectId}`.", + "type": "object" + } + }, + "type": "object" + }, "GoogleCloudDatacatalogV1PersonalDetails": { "description": "Entry metadata relevant only to the user and private to them.", "id": "GoogleCloudDatacatalogV1PersonalDetails", @@ -4033,6 +4227,41 @@ }, "type": "object" }, + "GoogleCloudDatacatalogV1SetConfigRequest": { + "description": "Request message for SetConfig.", + "id": "GoogleCloudDatacatalogV1SetConfigRequest", + "properties": { + "catalogUiExperience": { + "description": "Opt-in status for the UI switch to Dataplex.", + "enum": [ + "CATALOG_UI_EXPERIENCE_UNSPECIFIED", + "CATALOG_UI_EXPERIENCE_ENABLED", + "CATALOG_UI_EXPERIENCE_DISABLED" + ], + "enumDescriptions": [ + "Default value. The default UI is Dataplex.", + "The UI is Dataplex.", + "The UI is Data Catalog." + ], + "type": "string" + }, + "tagTemplateMigration": { + "description": "Opt-in status for the migration of Tag Templates to Dataplex.", + "enum": [ + "TAG_TEMPLATE_MIGRATION_UNSPECIFIED", + "TAG_TEMPLATE_MIGRATION_ENABLED", + "TAG_TEMPLATE_MIGRATION_DISABLED" + ], + "enumDescriptions": [ + "Default value. Migration of Tag Templates from Data Catalog to Dataplex is not performed.", + "Migration of Tag Templates from Data Catalog to Dataplex is enabled.", + "Migration of Tag Templates from Data Catalog to Dataplex is disabled." + ], + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudDatacatalogV1SqlDatabaseSystemSpec": { "description": "Specification that applies to entries that are part `SQL_DATABASE` system (user_specified_type)", "id": "GoogleCloudDatacatalogV1SqlDatabaseSystemSpec", @@ -4125,6 +4354,26 @@ "description": "Resources like entry can have schemas associated with them. This scope allows you to attach tags to an individual column based on that schema. To attach a tag to a nested column, separate column names with a dot (`.`). Example: `column.nested_column`.", "type": "string" }, + "dataplexTransferStatus": { + "description": "Output only. Denotes the transfer status of the Tag Template.", + "enum": [ + "DATAPLEX_TRANSFER_STATUS_UNSPECIFIED", + "MIGRATED", + "TRANSFERRED" + ], + "enumDeprecated": [ + false, + true, + false + ], + "enumDescriptions": [ + "Default value. TagTemplate and its tags are only visible and editable in DataCatalog.", + "TagTemplate and its tags are auto-copied to Dataplex service. Visible in both services. Editable in DataCatalog, read-only in Dataplex. Deprecated: Individual TagTemplate migration is deprecated in favor of organization or project wide TagTemplate migration opt-in.", + "TagTemplate and its tags are auto-copied to Dataplex service. Visible in both services. Editable in Dataplex, read-only in DataCatalog." + ], + "readOnly": true, + "type": "string" + }, "fields": { "additionalProperties": { "$ref": "GoogleCloudDatacatalogV1TagField" @@ -4211,15 +4460,18 @@ "description": "Optional. Transfer status of the TagTemplate", "enum": [ "DATAPLEX_TRANSFER_STATUS_UNSPECIFIED", - "MIGRATED" + "MIGRATED", + "TRANSFERRED" ], "enumDeprecated": [ false, - true + true, + false ], "enumDescriptions": [ "Default value. TagTemplate and its tags are only visible and editable in DataCatalog.", - "TagTemplate and its tags are auto-copied to Dataplex service. Visible in both services. Editable in DataCatalog, read-only in Dataplex. Deprecated: Individual TagTemplate migration is deprecated in favor of organization or project wide TagTemplate migration opt-in." + "TagTemplate and its tags are auto-copied to Dataplex service. Visible in both services. Editable in DataCatalog, read-only in Dataplex. Deprecated: Individual TagTemplate migration is deprecated in favor of organization or project wide TagTemplate migration opt-in.", + "TagTemplate and its tags are auto-copied to Dataplex service. Visible in both services. Editable in Dataplex, read-only in DataCatalog." ], "type": "string" }, diff --git a/discovery/googleapis/datafusion__v1.json b/discovery/googleapis/datafusion__v1.json index 23401bb79..3602317f4 100644 --- a/discovery/googleapis/datafusion__v1.json +++ b/discovery/googleapis/datafusion__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240703", + "revision": "20241204", "rootUrl": "https://datafusion.googleapis.com/", "servicePath": "", "title": "Cloud Data Fusion API", @@ -566,7 +566,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "datafusion.projects.locations.operations.cancel", @@ -779,38 +779,6 @@ }, "type": "object" }, - "AssetLocation": { - "description": "Provides the mapping of a cloud asset to a direct physical location or to a proxy that defines the location on its behalf.", - "id": "AssetLocation", - "properties": { - "expected": { - "$ref": "IsolationExpectations", - "description": "Defines the customer expectation around ZI/ZS for this asset and ZI/ZS state of the region at the time of asset creation." - }, - "extraParameters": { - "description": "Defines extra parameters required for specific asset types.", - "items": { - "$ref": "ExtraParameter" - }, - "type": "array" - }, - "locationData": { - "description": "Contains all kinds of physical location definitions for this asset.", - "items": { - "$ref": "LocationData" - }, - "type": "array" - }, - "parentAsset": { - "description": "Defines parents assets if any in order to allow later generation of child_asset_location data via child assets.", - "items": { - "$ref": "CloudAsset" - }, - "type": "array" - } - }, - "type": "object" - }, "AuditConfig": { "description": "Specifies the audit configuration for a service. The configuration determines which permission types are logged, and what identities, if any, are exempted from logging. An AuditConfig must have one or more AuditLogConfigs. If there are AuditConfigs for both `allServices` and a specific service, the union of the two AuditConfigs is used for that service: the log_types specified in each AuditConfig are enabled, and the exempted_members in each AuditLogConfig are exempted. Example Policy with multiple AuditConfigs: { \"audit_configs\": [ { \"service\": \"allServices\", \"audit_log_configs\": [ { \"log_type\": \"DATA_READ\", \"exempted_members\": [ \"user:jose@example.com\" ] }, { \"log_type\": \"DATA_WRITE\" }, { \"log_type\": \"ADMIN_READ\" } ] }, { \"service\": \"sampleservice.googleapis.com\", \"audit_log_configs\": [ { \"log_type\": \"DATA_READ\" }, { \"log_type\": \"DATA_WRITE\", \"exempted_members\": [ \"user:aliya@example.com\" ] } ] } ] } For sampleservice, this policy enables DATA_READ, DATA_WRITE and ADMIN_READ logging. It also exempts `jose@example.com` from DATA_READ logging, and `aliya@example.com` from DATA_WRITE logging.", "id": "AuditConfig", @@ -881,72 +849,23 @@ }, "type": "object" }, - "BlobstoreLocation": { - "description": "Policy ID that identified data placement in Blobstore as per go/blobstore-user-guide#data-metadata-placement-and-failure-domains", - "id": "BlobstoreLocation", - "properties": { - "policyId": { - "items": { - "type": "string" - }, - "type": "array" - } - }, - "type": "object" - }, "CancelOperationRequest": { "description": "The request message for Operations.CancelOperation.", "id": "CancelOperationRequest", "properties": {}, "type": "object" }, - "CloudAsset": { - "id": "CloudAsset", - "properties": { - "assetName": { - "type": "string" - }, - "assetType": { - "type": "string" - } - }, - "type": "object" - }, - "CloudAssetComposition": { - "id": "CloudAssetComposition", - "properties": { - "childAsset": { - "items": { - "$ref": "CloudAsset" - }, - "type": "array" - } - }, - "type": "object" - }, "CryptoKeyConfig": { "description": "The crypto key configuration. This field is used by the Customer-managed encryption keys (CMEK) feature.", "id": "CryptoKeyConfig", "properties": { "keyReference": { - "description": "The name of the key which is used to encrypt/decrypt customer data. For key in Cloud KMS, the key should be in the format of `projects/*/locations/*/keyRings/*/cryptoKeys/*`.", + "description": "Optional. The name of the key which is used to encrypt/decrypt customer data. For key in Cloud KMS, the key should be in the format of `projects/*/locations/*/keyRings/*/cryptoKeys/*`.", "type": "string" } }, "type": "object" }, - "DirectLocationAssignment": { - "id": "DirectLocationAssignment", - "properties": { - "location": { - "items": { - "$ref": "LocationAssignment" - }, - "type": "array" - } - }, - "type": "object" - }, "DnsPeering": { "description": "DNS peering configuration. These configurations are used to create DNS peering with the customer Cloud DNS.", "id": "DnsPeering", @@ -960,7 +879,7 @@ "type": "string" }, "name": { - "description": "Required. The resource name of the dns peering zone. Format: projects/{project}/locations/{location}/instances/{instance}/dnsPeerings/{dns_peering}", + "description": "Identifier. The resource name of the dns peering zone. Format: projects/{project}/locations/{location}/instances/{instance}/dnsPeerings/{dns_peering}", "type": "string" }, "targetNetwork": { @@ -1018,17 +937,6 @@ }, "type": "object" }, - "ExtraParameter": { - "description": "Defines parameters that should only be used for specific asset types.", - "id": "ExtraParameter", - "properties": { - "regionalMigDistributionPolicy": { - "$ref": "RegionalMigDistributionPolicy", - "description": "Details about zones used by regional compute.googleapis.com/InstanceGroupManager to create instances." - } - }, - "type": "object" - }, "Instance": { "description": "Represents a Data Fusion instance.", "id": "Instance", @@ -1062,18 +970,18 @@ }, "cryptoKeyConfig": { "$ref": "CryptoKeyConfig", - "description": "The crypto key configuration. This field is used by the Customer-Managed Encryption Keys (CMEK) feature." + "description": "Optional. The crypto key configuration. This field is used by the Customer-Managed Encryption Keys (CMEK) feature." }, "dataplexDataLineageIntegrationEnabled": { "description": "Optional. Option to enable the Dataplex Lineage Integration feature.", "type": "boolean" }, "dataprocServiceAccount": { - "description": "User-managed service account to set on Dataproc when Cloud Data Fusion creates Dataproc to run data processing pipelines. This allows users to have fine-grained access control on Dataproc's accesses to cloud resources.", + "description": "Optional. User-managed service account to set on Dataproc when Cloud Data Fusion creates Dataproc to run data processing pipelines. This allows users to have fine-grained access control on Dataproc's accesses to cloud resources.", "type": "string" }, "description": { - "description": "A description of this instance.", + "description": "Optional. A description of this instance.", "type": "string" }, "disabledReason": { @@ -1093,28 +1001,29 @@ "type": "array" }, "displayName": { - "description": "Display name for an instance.", + "description": "Optional. Display name for an instance.", "type": "string" }, "enableRbac": { - "description": "Option to enable granular role-based access control.", + "description": "Optional. Option to enable granular role-based access control.", "type": "boolean" }, "enableStackdriverLogging": { - "description": "Option to enable Stackdriver Logging.", + "description": "Optional. Option to enable Stackdriver Logging.", "type": "boolean" }, "enableStackdriverMonitoring": { - "description": "Option to enable Stackdriver Monitoring.", + "description": "Optional. Option to enable Stackdriver Monitoring.", "type": "boolean" }, "enableZoneSeparation": { - "description": "Option to enable granular zone separation.", + "description": "Output only. Option to enable granular zone separation.", + "readOnly": true, "type": "boolean" }, "eventPublishConfig": { "$ref": "EventPublishConfig", - "description": "Option to enable and pass metadata for event publishing." + "description": "Optional. Option to enable and pass metadata for event publishing." }, "gcsBucket": { "description": "Output only. Cloud Storage bucket generated by Data Fusion in the customer project.", @@ -1139,7 +1048,7 @@ }, "networkConfig": { "$ref": "NetworkConfig", - "description": "Network configuration options. These are required when a private Data Fusion instance is to be created." + "description": "Optional. Network configuration options. These are required when a private Data Fusion instance is to be created." }, "options": { "additionalProperties": { @@ -1158,7 +1067,7 @@ "type": "string" }, "privateInstance": { - "description": "Specifies whether the Data Fusion instance should be private. If set to true, all Data Fusion nodes will have private IP addresses and will not be able to access the public internet.", + "description": "Optional. Specifies whether the Data Fusion instance should be private. If set to true, all Data Fusion nodes will have private IP addresses and will not be able to access the public internet.", "type": "boolean" }, "satisfiesPzs": { @@ -1241,7 +1150,7 @@ "type": "string" }, "version": { - "description": "Current version of the Data Fusion. Only specifiable in Update.", + "description": "Optional. Current version of the Data Fusion. Only specifiable in Update.", "type": "string" }, "workforceIdentityServiceEndpoint": { @@ -1250,128 +1159,7 @@ "type": "string" }, "zone": { - "description": "Name of the zone in which the Data Fusion instance will be created. Only DEVELOPER instances use this field.", - "type": "string" - } - }, - "type": "object" - }, - "IsolationExpectations": { - "id": "IsolationExpectations", - "properties": { - "ziOrgPolicy": { - "enum": [ - "ZI_UNSPECIFIED", - "ZI_UNKNOWN", - "ZI_NOT_REQUIRED", - "ZI_PREFERRED", - "ZI_REQUIRED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "", - "" - ], - "type": "string" - }, - "ziRegionPolicy": { - "enum": [ - "ZI_REGION_POLICY_UNSPECIFIED", - "ZI_REGION_POLICY_UNKNOWN", - "ZI_REGION_POLICY_NOT_SET", - "ZI_REGION_POLICY_FAIL_OPEN", - "ZI_REGION_POLICY_FAIL_CLOSED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "", - "" - ], - "type": "string" - }, - "ziRegionState": { - "enum": [ - "ZI_REGION_UNSPECIFIED", - "ZI_REGION_UNKNOWN", - "ZI_REGION_NOT_ENABLED", - "ZI_REGION_ENABLED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "" - ], - "type": "string" - }, - "zoneIsolation": { - "deprecated": true, - "description": "Deprecated: use zi_org_policy, zi_region_policy and zi_region_state instead for setting ZI expectations as per go/zicy-publish-physical-location.", - "enum": [ - "ZI_UNSPECIFIED", - "ZI_UNKNOWN", - "ZI_NOT_REQUIRED", - "ZI_PREFERRED", - "ZI_REQUIRED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "", - "" - ], - "type": "string" - }, - "zoneSeparation": { - "deprecated": true, - "description": "Deprecated: use zs_org_policy, and zs_region_stateinstead for setting Zs expectations as per go/zicy-publish-physical-location.", - "enum": [ - "ZS_UNSPECIFIED", - "ZS_UNKNOWN", - "ZS_NOT_REQUIRED", - "ZS_REQUIRED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "" - ], - "type": "string" - }, - "zsOrgPolicy": { - "enum": [ - "ZS_UNSPECIFIED", - "ZS_UNKNOWN", - "ZS_NOT_REQUIRED", - "ZS_REQUIRED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "" - ], - "type": "string" - }, - "zsRegionState": { - "enum": [ - "ZS_REGION_UNSPECIFIED", - "ZS_REGION_UNKNOWN", - "ZS_REGION_NOT_ENABLED", - "ZS_REGION_ENABLED" - ], - "enumDescriptions": [ - "", - "To be used if tracking of the asset ZS-bit is not available", - "", - "" - ], + "description": "Optional. Name of the zone in which the Data Fusion instance will be created. Only DEVELOPER instances use this field.", "type": "string" } }, @@ -1382,7 +1170,8 @@ "id": "ListAvailableVersionsResponse", "properties": { "availableVersions": { - "description": "Represents a list of versions that are supported.", + "deprecated": true, + "description": "Represents a list of versions that are supported. Deprecated: Use versions field instead.", "items": { "$ref": "Version" }, @@ -1391,6 +1180,13 @@ "nextPageToken": { "description": "Token to retrieve the next page of results or empty if there are no more results in the list.", "type": "string" + }, + "versions": { + "description": "Represents a list of all versions.", + "items": { + "$ref": "Version" + }, + "type": "array" } }, "type": "object" @@ -1508,64 +1304,6 @@ }, "type": "object" }, - "LocationAssignment": { - "id": "LocationAssignment", - "properties": { - "location": { - "type": "string" - }, - "locationType": { - "enum": [ - "UNSPECIFIED", - "CLUSTER", - "POP", - "CLOUD_ZONE", - "CLOUD_REGION", - "MULTI_REGION_GEO", - "MULTI_REGION_JURISDICTION", - "GLOBAL", - "OTHER" - ], - "enumDescriptions": [ - "", - "1-10: Physical failure domains.", - "", - "11-20: Logical failure domains.", - "", - "", - "", - "", - "" - ], - "type": "string" - } - }, - "type": "object" - }, - "LocationData": { - "id": "LocationData", - "properties": { - "blobstoreLocation": { - "$ref": "BlobstoreLocation" - }, - "childAssetLocation": { - "$ref": "CloudAssetComposition" - }, - "directLocation": { - "$ref": "DirectLocationAssignment" - }, - "gcpProjectProxy": { - "$ref": "TenantProjectProxy" - }, - "placerLocation": { - "$ref": "PlacerLocation" - }, - "spannerLocation": { - "$ref": "SpannerLocation" - } - }, - "type": "object" - }, "MaintenancePolicy": { "description": "Maintenance policy of the instance.", "id": "MaintenancePolicy", @@ -1704,17 +1442,6 @@ }, "type": "object" }, - "PlacerLocation": { - "description": "Message describing that the location of the customer resource is tied to placer allocations", - "id": "PlacerLocation", - "properties": { - "placerConfig": { - "description": "Directory with a config related to it in placer (e.g. \"/placer/prod/home/my-root/my-dir\")", - "type": "string" - } - }, - "type": "object" - }, "Policy": { "description": "An Identity and Access Management (IAM) policy, which specifies access controls for Google Cloud resources. A `Policy` is a collection of `bindings`. A `binding` binds one or more `members`, or principals, to a single `role`. Principals can be user accounts, service accounts, Google groups, and domains (such as G Suite). A `role` is a named list of permissions; each `role` can be an IAM predefined role or a user-created custom role. For some types of Google Cloud resources, a `binding` can also specify a `condition`, which is a logical expression that allows access to a resource only if the expression evaluates to `true`. A condition can add constraints based on attributes of the request, the resource, or both. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). **JSON example:** ``` { \"bindings\": [ { \"role\": \"roles/resourcemanager.organizationAdmin\", \"members\": [ \"user:mike@example.com\", \"group:admins@example.com\", \"domain:google.com\", \"serviceAccount:my-project-id@appspot.gserviceaccount.com\" ] }, { \"role\": \"roles/resourcemanager.organizationViewer\", \"members\": [ \"user:eve@example.com\" ], \"condition\": { \"title\": \"expirable access\", \"description\": \"Does not grant access after Sep 2020\", \"expression\": \"request.time < timestamp('2020-10-01T00:00:00.000Z')\", } } ], \"etag\": \"BwWWja0YfJA=\", \"version\": 3 } ``` **YAML example:** ``` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3 ``` For a description of IAM and its features, see the [IAM documentation](https://cloud.google.com/iam/docs/).", "id": "Policy", @@ -1781,25 +1508,6 @@ }, "type": "object" }, - "RegionalMigDistributionPolicy": { - "description": "To be used for specifying the intended distribution of regional compute.googleapis.com/InstanceGroupManager instances", - "id": "RegionalMigDistributionPolicy", - "properties": { - "targetShape": { - "description": "The shape in which the group converges around distribution of resources. Instance of proto2 enum", - "format": "int32", - "type": "integer" - }, - "zones": { - "description": "Cloud zones used by regional MIG to create instances.", - "items": { - "$ref": "ZoneConfiguration" - }, - "type": "array" - } - }, - "type": "object" - }, "RestartInstanceRequest": { "description": "Request message for restarting a Data Fusion instance.", "id": "RestartInstanceRequest", @@ -1822,26 +1530,6 @@ }, "type": "object" }, - "SpannerLocation": { - "id": "SpannerLocation", - "properties": { - "backupName": { - "description": "Set of backups used by the resource with name in the same format as what is available at http://table/spanner_automon.backup_metadata", - "items": { - "type": "string" - }, - "type": "array" - }, - "dbName": { - "description": "Set of databases used by the resource in format /span//", - "items": { - "type": "string" - }, - "type": "array" - } - }, - "type": "object" - }, "Status": { "description": "The `Status` type defines a logical error model that is suitable for different programming environments, including REST APIs and RPC APIs. It is used by [gRPC](https://github.com/grpc). Each `Status` message contains three pieces of data: error code, error message, and error details. You can find out more about this error model and how to work with it in the [API Design Guide](https://cloud.google.com/apis/design/errors).", "id": "Status", @@ -1869,18 +1557,6 @@ }, "type": "object" }, - "TenantProjectProxy": { - "id": "TenantProjectProxy", - "properties": { - "projectNumbers": { - "items": { - "type": "string" - }, - "type": "array" - } - }, - "type": "object" - }, "TestIamPermissionsRequest": { "description": "Request message for `TestIamPermissions` method.", "id": "TestIamPermissionsRequest", @@ -1946,12 +1622,14 @@ "enum": [ "TYPE_UNSPECIFIED", "TYPE_PREVIEW", - "TYPE_GENERAL_AVAILABILITY" + "TYPE_GENERAL_AVAILABILITY", + "TYPE_DEPRECATED" ], "enumDescriptions": [ "Version does not have availability yet", "Version is under development and not considered stable", - "Version is available for public use" + "Version is available for public use", + "Version is no longer supported." ], "type": "string" }, @@ -1961,15 +1639,6 @@ } }, "type": "object" - }, - "ZoneConfiguration": { - "id": "ZoneConfiguration", - "properties": { - "zone": { - "type": "string" - } - }, - "type": "object" } } } diff --git a/discovery/googleapis/datalineage__v1.json b/discovery/googleapis/datalineage__v1.json index b8b4a7eb2..0513be1b1 100644 --- a/discovery/googleapis/datalineage__v1.json +++ b/discovery/googleapis/datalineage__v1.json @@ -240,6 +240,16 @@ "description": "Regional Endpoint", "endpointUrl": "https://datalineage.us-west8.rep.googleapis.com/", "location": "us-west8" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://datalineage.us.rep.googleapis.com/", + "location": "us" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://datalineage.eu.rep.googleapis.com/", + "location": "eu" } ], "icons": { @@ -252,7 +262,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241018", + "revision": "20241202", "rootUrl": "https://datalineage.googleapis.com/", "servicePath": "", "title": "Data Lineage API", @@ -432,7 +442,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "datalineage.projects.locations.operations.cancel", @@ -1275,7 +1285,7 @@ "type": "string" }, "sourceType": { - "description": "Type of the source. Use of a source_type other than `CUSTOM` for process creation or updating is highly discouraged. It may cause additional billing costs and be restricted in the future without notice.", + "description": "Type of the source. Use of a source_type other than `CUSTOM` for process creation or updating is highly discouraged. It might be restricted in the future without notice. There will be increase in cost if you use any of the source types other than `CUSTOM`.", "enum": [ "SOURCE_TYPE_UNSPECIFIED", "CUSTOM", diff --git a/discovery/googleapis/datamigration__v1.json b/discovery/googleapis/datamigration__v1.json index 58bfc82ea..e74686e4f 100644 --- a/discovery/googleapis/datamigration__v1.json +++ b/discovery/googleapis/datamigration__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241015", + "revision": "20241202", "rootUrl": "https://datamigration.googleapis.com/", "servicePath": "", "title": "Database Migration API", @@ -1343,6 +1343,31 @@ "https://www.googleapis.com/auth/cloud-platform" ] }, + "fetchSourceObjects": { + "description": "Retrieves objects from the source database that can be selected for data migration. This is applicable for the following migrations: 1. PostgreSQL to Cloud SQL for PostgreSQL 2. PostgreSQL to AlloyDB for PostgreSQL.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/migrationJobs/{migrationJobsId}:fetchSourceObjects", + "httpMethod": "GET", + "id": "datamigration.projects.locations.migrationJobs.fetchSourceObjects", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The resource name for the migration job for which source objects should be returned.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/migrationJobs/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:fetchSourceObjects", + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, "generateSshScript": { "description": "Generate a SSH configuration script to configure the reverse SSH connectivity.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/migrationJobs/{migrationJobsId}:generateSshScript", @@ -1768,6 +1793,31 @@ "resources": { "objects": { "methods": { + "get": { + "description": "Use this method to get details about a migration job object.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/migrationJobs/{migrationJobsId}/objects/{objectsId}", + "httpMethod": "GET", + "id": "datamigration.projects.locations.migrationJobs.objects.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the migration job object resource to get.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/migrationJobs/[^/]+/objects/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "MigrationJobObject" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, "getIamPolicy": { "description": "Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/migrationJobs/{migrationJobsId}/objects/{objectsId}:getIamPolicy", @@ -1799,6 +1849,70 @@ "https://www.googleapis.com/auth/cloud-platform" ] }, + "list": { + "description": "Use this method to list the objects of a specific migration job.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/migrationJobs/{migrationJobsId}/objects", + "httpMethod": "GET", + "id": "datamigration.projects.locations.migrationJobs.objects.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "pageSize": { + "description": "Maximum number of objects to return. Default is 50. The maximum value is 1000; values above 1000 will be coerced to 1000.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Page token received from a previous `ListMigrationJObObjectsRequest` call. Provide this to retrieve the subsequent page. When paginating, all other parameters provided to `ListMigrationJobObjectsRequest` must match the call that provided the page token.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The parent migration job that owns the collection of objects.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/migrationJobs/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/objects", + "response": { + "$ref": "ListMigrationJobObjectsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "lookup": { + "description": "Use this method to look up a migration job object by its source object identifier.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/migrationJobs/{migrationJobsId}/objects:lookup", + "httpMethod": "POST", + "id": "datamigration.projects.locations.migrationJobs.objects.lookup", + "parameterOrder": [ + "parent" + ], + "parameters": { + "parent": { + "description": "Required. The parent migration job that owns the collection of objects.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/migrationJobs/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/objects:lookup", + "request": { + "$ref": "LookupMigrationJobObjectRequest" + }, + "response": { + "$ref": "MigrationJobObject" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, "setIamPolicy": { "description": "Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/migrationJobs/{migrationJobsId}/objects/{objectsId}:setIamPolicy", @@ -1862,7 +1976,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "datamigration.projects.locations.operations.cancel", @@ -3988,7 +4102,7 @@ "type": "string" }, "requestedCancellation": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, @@ -4251,6 +4365,24 @@ }, "type": "object" }, + "ListMigrationJobObjectsResponse": { + "description": "Response containing the objects for a migration job.", + "id": "ListMigrationJobObjectsResponse", + "properties": { + "migrationJobObjects": { + "description": "List of migration job objects.", + "items": { + "$ref": "MigrationJobObject" + }, + "type": "array" + }, + "nextPageToken": { + "description": "A token, which can be sent as `page_token` to retrieve the next page.", + "type": "string" + } + }, + "type": "object" + }, "ListMigrationJobsResponse": { "description": "Response message for 'ListMigrationJobs' request.", "id": "ListMigrationJobsResponse", @@ -4374,6 +4506,17 @@ "properties": {}, "type": "object" }, + "LookupMigrationJobObjectRequest": { + "description": "Request for looking up a specific migration job object by its source object identifier.", + "id": "LookupMigrationJobObjectRequest", + "properties": { + "sourceObjectIdentifier": { + "$ref": "SourceObjectIdentifier", + "description": "Required. The source object identifier which maps to the migration job object." + } + }, + "type": "object" + }, "MachineConfig": { "description": "MachineConfig describes the configuration of a machine.", "id": "MachineConfig", @@ -4563,6 +4706,13 @@ "description": "Custom engine specific features.", "type": "object" }, + "indices": { + "description": "View indices.", + "items": { + "$ref": "IndexEntity" + }, + "type": "array" + }, "sqlCode": { "description": "The SQL code which creates the view.", "type": "string" @@ -4654,6 +4804,10 @@ "description": "The name (URI) of this migration job resource, in the form of: projects/{project}/locations/{location}/migrationJobs/{migrationJob}.", "type": "string" }, + "objectsConfig": { + "$ref": "MigrationJobObjectsConfig", + "description": "Optional. The objects that need to be migrated." + }, "oracleToPostgresConfig": { "$ref": "OracleToPostgresConfig", "description": "Configuration for heterogeneous **Oracle to Cloud SQL for PostgreSQL** and **Oracle to AlloyDB for PostgreSQL** migrations." @@ -4772,6 +4926,100 @@ }, "type": "object" }, + "MigrationJobObject": { + "description": "A specific Migration Job Object (e.g. a specifc DB Table)", + "id": "MigrationJobObject", + "properties": { + "createTime": { + "description": "Output only. The creation time of the migration job object.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "error": { + "$ref": "Status", + "description": "Output only. The error details in case of failure.", + "readOnly": true + }, + "name": { + "description": "The object's name.", + "type": "string" + }, + "phase": { + "description": "Output only. The phase of the migration job object.", + "enum": [ + "PHASE_UNSPECIFIED", + "FULL_DUMP", + "CDC", + "READY_FOR_PROMOTE", + "PROMOTE_IN_PROGRESS", + "PROMOTED", + "DIFF_BACKUP" + ], + "enumDescriptions": [ + "The phase of the migration job is unknown.", + "The migration job object is in the full dump phase.", + "The migration job object is in CDC phase.", + "The migration job object is ready to be promoted.", + "The migration job object is in running the promote phase.", + "The migration job is promoted.", + "The migration job object is in the differential backup phase." + ], + "readOnly": true, + "type": "string" + }, + "sourceObject": { + "$ref": "SourceObjectIdentifier", + "description": "The object identifier in the data source." + }, + "state": { + "description": "The state of the migration job object.", + "enum": [ + "STATE_UNSPECIFIED", + "NOT_STARTED", + "RUNNING", + "STOPPING", + "STOPPED", + "RESTARTING", + "FAILED", + "REMOVING", + "NOT_SELECTED", + "COMPLETED" + ], + "enumDescriptions": [ + "The state of the migration job object is unknown.", + "The migration job object is not started.", + "The migration job object is running.", + "The migration job object is being stopped.", + "The migration job object is currently stopped.", + "The migration job object is restarting.", + "The migration job object failed.", + "The migration job object is deleting.", + "The migration job object is not selected for migration.", + "The migration job object is completed." + ], + "type": "string" + }, + "updateTime": { + "description": "Output only. The last update time of the migration job object.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "MigrationJobObjectsConfig": { + "description": "Configuration for the objects to be migrated.", + "id": "MigrationJobObjectsConfig", + "properties": { + "sourceObjectsConfig": { + "$ref": "SourceObjectsConfig", + "description": "The list of the migration job objects." + } + }, + "type": "object" + }, "MigrationJobVerificationError": { "description": "Error message of a verification Migration job.", "id": "MigrationJobVerificationError", @@ -5271,6 +5519,10 @@ "description": "If the source is a Cloud SQL database, use this field to provide the Cloud SQL instance ID of the source.", "type": "string" }, + "database": { + "description": "Optional. The name of the specific database within the host.", + "type": "string" + }, "host": { "description": "Required. The IP or hostname of the source PostgreSQL database.", "type": "string" @@ -5477,13 +5729,22 @@ "PromoteMigrationJobRequest": { "description": "Request message for 'PromoteMigrationJob' request.", "id": "PromoteMigrationJobRequest", - "properties": {}, + "properties": { + "objectsFilter": { + "$ref": "MigrationJobObjectsConfig", + "description": "Optional. The object filter to apply to the migration job." + } + }, "type": "object" }, "RestartMigrationJobRequest": { "description": "Request message for 'RestartMigrationJob' request.", "id": "RestartMigrationJobRequest", "properties": { + "objectsFilter": { + "$ref": "MigrationJobObjectsConfig", + "description": "Optional. The object filter to apply to the migration job." + }, "skipValidation": { "description": "Optional. Restart the migration job without running prior configuration verification. Defaults to `false`.", "type": "boolean" @@ -5845,6 +6106,68 @@ }, "type": "object" }, + "SourceObjectConfig": { + "description": "Config for a single migration job object.", + "id": "SourceObjectConfig", + "properties": { + "objectIdentifier": { + "$ref": "SourceObjectIdentifier", + "description": "The object identifier." + } + }, + "type": "object" + }, + "SourceObjectIdentifier": { + "description": "An identifier for the Migration Job Object.", + "id": "SourceObjectIdentifier", + "properties": { + "database": { + "description": "The database name. This will be required only if the object uses a database name as part of its unique identifier.", + "type": "string" + }, + "type": { + "description": "Required. The type of the migration job object.", + "enum": [ + "MIGRATION_JOB_OBJECT_TYPE_UNSPECIFIED", + "DATABASE" + ], + "enumDescriptions": [ + "The type of the migration job object is unknown.", + "The migration job object is a database." + ], + "type": "string" + } + }, + "type": "object" + }, + "SourceObjectsConfig": { + "description": "List of configurations for the source objects to be migrated.", + "id": "SourceObjectsConfig", + "properties": { + "objectConfigs": { + "description": "The list of the objects to be migrated.", + "items": { + "$ref": "SourceObjectConfig" + }, + "type": "array" + }, + "objectsSelectionType": { + "description": "Optional. The objects selection type of the migration job.", + "enum": [ + "OBJECTS_SELECTION_TYPE_UNSPECIFIED", + "ALL_OBJECTS", + "SPECIFIED_OBJECTS" + ], + "enumDescriptions": [ + "The type of the objects selection is unknown, indicating that the migration job is at instance level.", + "Migrate all of the objects.", + "Migrate specific objects." + ], + "type": "string" + } + }, + "type": "object" + }, "SourceSqlChange": { "description": "Options to configure rule type SourceSqlChange. The rule is used to alter the sql code for database entities. The rule filter field can refer to one entity. The rule scope can be: StoredProcedure, Function, Trigger, View", "id": "SourceSqlChange", @@ -6020,15 +6343,15 @@ "id": "SqlServerEncryptionOptions", "properties": { "certPath": { - "description": "Required. Path to certificate.", + "description": "Required. Path to the Certificate (.cer) in Cloud Storage, in the form `gs://bucketName/fileName`. The instance must have write permissions to the bucket and read access to the file.", "type": "string" }, "pvkPassword": { - "description": "Required. Input only. Private key password.", + "description": "Required. Input only. Password that encrypts the private key.", "type": "string" }, "pvkPath": { - "description": "Required. Path to certificate private key.", + "description": "Required. Path to the Certificate Private Key (.pvk) in Cloud Storage, in the form `gs://bucketName/fileName`. The instance must have write permissions to the bucket and read access to the file.", "type": "string" } }, @@ -6092,12 +6415,16 @@ "enum": [ "SSL_TYPE_UNSPECIFIED", "SERVER_ONLY", - "SERVER_CLIENT" + "SERVER_CLIENT", + "REQUIRED", + "NONE" ], "enumDescriptions": [ "Unspecified.", "Only 'ca_certificate' specified.", - "Both server ('ca_certificate'), and client ('client_key', 'client_certificate') specified." + "Both server ('ca_certificate'), and client ('client_key', 'client_certificate') specified.", + "Mandates SSL encryption for all connections. This doesn’t require certificate verification.", + "Connection is not encrypted." ], "type": "string" } diff --git a/discovery/googleapis/dataplex__v1.json b/discovery/googleapis/dataplex__v1.json index a8f89760e..851bf1827 100644 --- a/discovery/googleapis/dataplex__v1.json +++ b/discovery/googleapis/dataplex__v1.json @@ -15,6 +15,243 @@ "description": "Dataplex API is used to manage the lifecycle of data lakes.", "discoveryVersion": "v1", "documentationLink": "https://cloud.google.com/dataplex/docs", + "endpoints": [ + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.africa-south1.rep.googleapis.com/", + "location": "africa-south1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.asia-east1.rep.googleapis.com/", + "location": "asia-east1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.asia-east2.rep.googleapis.com/", + "location": "asia-east2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.asia-northeast1.rep.googleapis.com/", + "location": "asia-northeast1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.asia-northeast2.rep.googleapis.com/", + "location": "asia-northeast2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.asia-northeast3.rep.googleapis.com/", + "location": "asia-northeast3" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.asia-south1.rep.googleapis.com/", + "location": "asia-south1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.asia-south2.rep.googleapis.com/", + "location": "asia-south2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.asia-southeast1.rep.googleapis.com/", + "location": "asia-southeast1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.asia-southeast2.rep.googleapis.com/", + "location": "asia-southeast2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.australia-southeast1.rep.googleapis.com/", + "location": "australia-southeast1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.australia-southeast2.rep.googleapis.com/", + "location": "australia-southeast2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.europe-central2.rep.googleapis.com/", + "location": "europe-central2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.europe-north1.rep.googleapis.com/", + "location": "europe-north1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.europe-north2.rep.googleapis.com/", + "location": "europe-north2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.europe-southwest1.rep.googleapis.com/", + "location": "europe-southwest1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.europe-west1.rep.googleapis.com/", + "location": "europe-west1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.europe-west10.rep.googleapis.com/", + "location": "europe-west10" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.europe-west12.rep.googleapis.com/", + "location": "europe-west12" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.europe-west2.rep.googleapis.com/", + "location": "europe-west2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.europe-west3.rep.googleapis.com/", + "location": "europe-west3" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.europe-west4.rep.googleapis.com/", + "location": "europe-west4" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.europe-west6.rep.googleapis.com/", + "location": "europe-west6" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.europe-west8.rep.googleapis.com/", + "location": "europe-west8" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.europe-west9.rep.googleapis.com/", + "location": "europe-west9" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.me-central1.rep.googleapis.com/", + "location": "me-central1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.me-central2.rep.googleapis.com/", + "location": "me-central2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.me-west1.rep.googleapis.com/", + "location": "me-west1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.northamerica-northeast1.rep.googleapis.com/", + "location": "northamerica-northeast1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.northamerica-northeast2.rep.googleapis.com/", + "location": "northamerica-northeast2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.northamerica-south1.rep.googleapis.com/", + "location": "northamerica-south1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.southamerica-east1.rep.googleapis.com/", + "location": "southamerica-east1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.southamerica-west1.rep.googleapis.com/", + "location": "southamerica-west1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.us-central1.rep.googleapis.com/", + "location": "us-central1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.us-central2.rep.googleapis.com/", + "location": "us-central2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.us-east1.rep.googleapis.com/", + "location": "us-east1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.us-east4.rep.googleapis.com/", + "location": "us-east4" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.us-east5.rep.googleapis.com/", + "location": "us-east5" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.us-east7.rep.googleapis.com/", + "location": "us-east7" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.us-south1.rep.googleapis.com/", + "location": "us-south1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.us-west1.rep.googleapis.com/", + "location": "us-west1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.us-west2.rep.googleapis.com/", + "location": "us-west2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.us-west3.rep.googleapis.com/", + "location": "us-west3" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.us-west4.rep.googleapis.com/", + "location": "us-west4" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.us-west8.rep.googleapis.com/", + "location": "us-west8" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.us.rep.googleapis.com/", + "location": "us" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://dataplex.eu.rep.googleapis.com/", + "location": "eu" + } + ], "icons": { "x16": "http://www.google.com/images/icons/product/search-16.gif", "x32": "http://www.google.com/images/icons/product/search-32.gif" @@ -25,7 +262,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241001", + "revision": "20241206", "rootUrl": "https://dataplex.googleapis.com/", "servicePath": "", "title": "Cloud Dataplex API", @@ -479,7 +716,7 @@ "type": "string" }, "orderBy": { - "description": "Optional. Specifies the ordering of results.", + "description": "Optional. Specifies the ordering of results. Supported values are: * relevance (default) * last_modified_timestamp * last_modified_timestamp asc", "location": "query", "type": "string" }, @@ -495,7 +732,7 @@ "type": "string" }, "query": { - "description": "Required. The query against which entries in scope should be matched.", + "description": "Required. The query against which entries in scope should be matched. The query syntax is defined in Search syntax for Dataplex Catalog (https://cloud.google.com/dataplex/docs/search-syntax).", "location": "query", "type": "string" }, @@ -1102,6 +1339,11 @@ "name" ], "parameters": { + "force": { + "description": "Optional. If set to true, any child resources of this data scan will also be deleted. (Otherwise, the request will only work if the data scan has no child resources.)", + "location": "query", + "type": "boolean" + }, "name": { "description": "Required. The resource name of the dataScan: projects/{project}/locations/{location_id}/dataScans/{data_scan_id} where project refers to a project_id or project_number and location_id refers to a GCP region.", "location": "path", @@ -1273,14 +1515,14 @@ ], "parameters": { "name": { - "description": "Output only. The relative resource name of the scan, of the form: projects/{project}/locations/{location_id}/dataScans/{datascan_id}, where project refers to a project_id or project_number and location_id refers to a GCP region.", + "description": "Output only. Identifier. The relative resource name of the scan, of the form: projects/{project}/locations/{location_id}/dataScans/{datascan_id}, where project refers to a project_id or project_number and location_id refers to a GCP region.", "location": "path", "pattern": "^projects/[^/]+/locations/[^/]+/dataScans/[^/]+$", "required": true, "type": "string" }, "updateMask": { - "description": "Required. Mask of fields to update.", + "description": "Optional. Mask of fields to update.", "format": "google-fieldmask", "location": "query", "type": "string" @@ -2484,7 +2726,7 @@ "type": "boolean" }, "aspectKeys": { - "description": "Optional. The map keys of the Aspects which the service should modify. It supports the following syntaxes: - matches an aspect of the given type and empty path. @path - matches an aspect of the given type and specified path. For example, to attach an aspect to a field that is specified by the schema aspect, the path should have the format Schema.. * - matches aspects of the given type for all paths. *@path - matches aspects of all types on the given path.The service will not remove existing aspects matching the syntax unless delete_missing_aspects is set to true.If this field is left empty, the service treats it as specifying exactly those Aspects present in the request.", + "description": "Optional. The map keys of the Aspects which the service should modify. It supports the following syntaxes: - matches an aspect of the given type and empty path. @path - matches an aspect of the given type and specified path. For example, to attach an aspect to a field that is specified by the schema aspect, the path should have the format Schema.. @* - matches aspects of the given type for all paths. *@path - matches aspects of all types on the given path.The service will not remove existing aspects matching the syntax unless delete_missing_aspects is set to true.If this field is left empty, the service treats it as specifying exactly those Aspects present in the request.", "location": "query", "repeated": true, "type": "string" @@ -5787,6 +6029,11 @@ "pattern": "^projects/[^/]+/locations/[^/]+$", "required": true, "type": "string" + }, + "validateOnly": { + "description": "Optional. The service validates the request without performing any mutations. The default is false.", + "location": "query", + "type": "boolean" } }, "path": "v1/{+parent}/metadataJobs", @@ -7120,6 +7367,142 @@ }, "type": "object" }, + "GoogleCloudDataplexV1DataDiscoveryResult": { + "description": "The output of a data discovery scan.", + "id": "GoogleCloudDataplexV1DataDiscoveryResult", + "properties": { + "bigqueryPublishing": { + "$ref": "GoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing", + "description": "Output only. Configuration for metadata publishing.", + "readOnly": true + } + }, + "type": "object" + }, + "GoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing": { + "description": "Describes BigQuery publishing configurations.", + "id": "GoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing", + "properties": { + "dataset": { + "description": "Output only. The BigQuery dataset to publish to. It takes the form projects/{project_id}/datasets/{dataset_id}. If not set, the service creates a default publishing dataset.", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudDataplexV1DataDiscoverySpec": { + "description": "Spec for a data discovery scan.", + "id": "GoogleCloudDataplexV1DataDiscoverySpec", + "properties": { + "bigqueryPublishingConfig": { + "$ref": "GoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig", + "description": "Optional. Configuration for metadata publishing." + }, + "storageConfig": { + "$ref": "GoogleCloudDataplexV1DataDiscoverySpecStorageConfig", + "description": "Cloud Storage related configurations." + } + }, + "type": "object" + }, + "GoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig": { + "description": "Describes BigQuery publishing configurations.", + "id": "GoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig", + "properties": { + "connection": { + "description": "Optional. The BigQuery connection used to create BigLake tables. Must be in the form projects/{project_id}/locations/{location_id}/connections/{connection_id}", + "type": "string" + }, + "tableType": { + "description": "Optional. Determines whether to publish discovered tables as BigLake external tables or non-BigLake external tables.", + "enum": [ + "TABLE_TYPE_UNSPECIFIED", + "EXTERNAL", + "BIGLAKE" + ], + "enumDescriptions": [ + "Table type unspecified.", + "Default. Discovered tables are published as BigQuery external tables whose data is accessed using the credentials of the user querying the table.", + "Discovered tables are published as BigLake external tables whose data is accessed using the credentials of the associated BigQuery connection." + ], + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudDataplexV1DataDiscoverySpecStorageConfig": { + "description": "Configurations related to Cloud Storage as the data source.", + "id": "GoogleCloudDataplexV1DataDiscoverySpecStorageConfig", + "properties": { + "csvOptions": { + "$ref": "GoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions", + "description": "Optional. Configuration for CSV data." + }, + "excludePatterns": { + "description": "Optional. Defines the data to exclude during discovery. Provide a list of patterns that identify the data to exclude. For Cloud Storage bucket assets, these patterns are interpreted as glob patterns used to match object names. For BigQuery dataset assets, these patterns are interpreted as patterns to match table names.", + "items": { + "type": "string" + }, + "type": "array" + }, + "includePatterns": { + "description": "Optional. Defines the data to include during discovery when only a subset of the data should be considered. Provide a list of patterns that identify the data to include. For Cloud Storage bucket assets, these patterns are interpreted as glob patterns used to match object names. For BigQuery dataset assets, these patterns are interpreted as patterns to match table names.", + "items": { + "type": "string" + }, + "type": "array" + }, + "jsonOptions": { + "$ref": "GoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions", + "description": "Optional. Configuration for JSON data." + } + }, + "type": "object" + }, + "GoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions": { + "description": "Describes CSV and similar semi-structured data formats.", + "id": "GoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions", + "properties": { + "delimiter": { + "description": "Optional. The delimiter that is used to separate values. The default is , (comma).", + "type": "string" + }, + "encoding": { + "description": "Optional. The character encoding of the data. The default is UTF-8.", + "type": "string" + }, + "headerRows": { + "description": "Optional. The number of rows to interpret as header rows that should be skipped when reading data rows.", + "format": "int32", + "type": "integer" + }, + "quote": { + "description": "Optional. The character used to quote column values. Accepts \" (double quotation mark) or ' (single quotation mark). If unspecified, defaults to \" (double quotation mark).", + "type": "string" + }, + "typeInferenceDisabled": { + "description": "Optional. Whether to disable the inference of data types for CSV data. If true, all columns are registered as strings.", + "type": "boolean" + } + }, + "type": "object" + }, + "GoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions": { + "description": "Describes JSON data format.", + "id": "GoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions", + "properties": { + "encoding": { + "description": "Optional. The character encoding of the data. The default is UTF-8.", + "type": "string" + }, + "typeInferenceDisabled": { + "description": "Optional. Whether to disable the inference of data types for JSON data. If true, all columns are registered as their primitive types (strings, number, or boolean).", + "type": "boolean" + } + }, + "type": "object" + }, "GoogleCloudDataplexV1DataProfileResult": { "description": "DataProfileResult defines the output of DataProfileScan. Each field of the table will have field type specific profile result.", "id": "GoogleCloudDataplexV1DataProfileResult", @@ -8066,6 +8449,15 @@ "$ref": "GoogleCloudDataplexV1DataSource", "description": "Required. The data source for DataScan." }, + "dataDiscoveryResult": { + "$ref": "GoogleCloudDataplexV1DataDiscoveryResult", + "description": "Output only. The result of a data discovery scan.", + "readOnly": true + }, + "dataDiscoverySpec": { + "$ref": "GoogleCloudDataplexV1DataDiscoverySpec", + "description": "Settings for a data discovery scan." + }, "dataProfileResult": { "$ref": "GoogleCloudDataplexV1DataProfileResult", "description": "Output only. The result of a data profile scan.", @@ -8109,7 +8501,7 @@ "type": "object" }, "name": { - "description": "Output only. The relative resource name of the scan, of the form: projects/{project}/locations/{location_id}/dataScans/{datascan_id}, where project refers to a project_id or project_number and location_id refers to a GCP region.", + "description": "Output only. Identifier. The relative resource name of the scan, of the form: projects/{project}/locations/{location_id}/dataScans/{datascan_id}, where project refers to a project_id or project_number and location_id refers to a GCP region.", "readOnly": true, "type": "string" }, @@ -8137,12 +8529,14 @@ "enum": [ "DATA_SCAN_TYPE_UNSPECIFIED", "DATA_QUALITY", - "DATA_PROFILE" + "DATA_PROFILE", + "DATA_DISCOVERY" ], "enumDescriptions": [ "The data scan type is unspecified.", "Data quality scan.", - "Data profile scan." + "Data profile scan.", + "Data discovery scan." ], "readOnly": true, "type": "string" @@ -8162,7 +8556,7 @@ "type": "object" }, "GoogleCloudDataplexV1DataScanEvent": { - "description": "These messages contain information about the execution of a datascan. The monitored resource is 'DataScan' Next ID: 13", + "description": "These messages contain information about the execution of a datascan. The monitored resource is 'DataScan'", "id": "GoogleCloudDataplexV1DataScanEvent", "properties": { "createTime": { @@ -8269,12 +8663,14 @@ "enum": [ "SCAN_TYPE_UNSPECIFIED", "DATA_PROFILE", - "DATA_QUALITY" + "DATA_QUALITY", + "DATA_DISCOVERY" ], "enumDescriptions": [ "An unspecified data scan type.", "Data scan for data profile.", - "Data scan for data quality." + "Data scan for data quality.", + "Data scan for data discovery." ], "type": "string" } @@ -8436,12 +8832,12 @@ "type": "string" }, "latestJobEndTime": { - "description": "The time when the latest DataScanJob ended.", + "description": "Optional. The time when the latest DataScanJob ended.", "format": "google-datetime", "type": "string" }, "latestJobStartTime": { - "description": "The time when the latest DataScanJob started.", + "description": "Optional. The time when the latest DataScanJob started.", "format": "google-datetime", "type": "string" } @@ -8458,6 +8854,16 @@ "readOnly": true, "type": "string" }, + "dataDiscoveryResult": { + "$ref": "GoogleCloudDataplexV1DataDiscoveryResult", + "description": "Output only. The result of a data discovery scan.", + "readOnly": true + }, + "dataDiscoverySpec": { + "$ref": "GoogleCloudDataplexV1DataDiscoverySpec", + "description": "Output only. Settings for a data discovery scan.", + "readOnly": true + }, "dataProfileResult": { "$ref": "GoogleCloudDataplexV1DataProfileResult", "description": "Output only. The result of a data profile scan.", @@ -8490,7 +8896,7 @@ "type": "string" }, "name": { - "description": "Output only. The relative resource name of the DataScanJob, of the form: projects/{project}/locations/{location_id}/dataScans/{datascan_id}/jobs/{job_id}, where project refers to a project_id or project_number and location_id refers to a GCP region.", + "description": "Output only. Identifier. The relative resource name of the DataScanJob, of the form: projects/{project}/locations/{location_id}/dataScans/{datascan_id}/jobs/{job_id}, where project refers to a project_id or project_number and location_id refers to a GCP region.", "readOnly": true, "type": "string" }, @@ -8528,12 +8934,14 @@ "enum": [ "DATA_SCAN_TYPE_UNSPECIFIED", "DATA_QUALITY", - "DATA_PROFILE" + "DATA_PROFILE", + "DATA_DISCOVERY" ], "enumDescriptions": [ "The data scan type is unspecified.", "Data quality scan.", - "Data profile scan." + "Data profile scan.", + "Data discovery scan." ], "readOnly": true, "type": "string" @@ -8675,7 +9083,11 @@ "ENTITY_DELETED", "PARTITION_CREATED", "PARTITION_UPDATED", - "PARTITION_DELETED" + "PARTITION_DELETED", + "TABLE_PUBLISHED", + "TABLE_UPDATED", + "TABLE_IGNORED", + "TABLE_DELETED" ], "enumDescriptions": [ "An unspecified event type.", @@ -8685,7 +9097,11 @@ "An event representing a metadata entity being deleted.", "An event representing a partition being created.", "An event representing a partition being updated.", - "An event representing a partition being deleted." + "An event representing a partition being deleted.", + "An event representing a table being published.", + "An event representing a table being updated.", + "An event representing a table being skipped in publishing.", + "An event representing a table being deleted." ], "type": "string" }, @@ -9532,7 +9948,7 @@ "id": "GoogleCloudDataplexV1ImportItem", "properties": { "aspectKeys": { - "description": "The aspects to modify. Supports the following syntaxes: {aspect_type_reference}: matches aspects that belong to the specified aspect type and are attached directly to the entry. {aspect_type_reference}@{path}: matches aspects that belong to the specified aspect type and path. {aspect_type_reference}@*: matches aspects that belong to the specified aspect type for all paths.Replace {aspect_type_reference} with a reference to the aspect type, in the format {project_id_or_number}.{location_id}.{aspect_type_id}.If you leave this field empty, it is treated as specifying exactly those aspects that are present within the specified entry.In FULL entry sync mode, Dataplex implicitly adds the keys for all of the required aspects of an entry.", + "description": "The aspects to modify. Supports the following syntaxes: {aspect_type_reference}: matches aspects that belong to the specified aspect type and are attached directly to the entry. {aspect_type_reference}@{path}: matches aspects that belong to the specified aspect type and path. @* : matches aspects of the given type for all paths. *@path : matches aspects of all types on the given path. Replace {aspect_type_reference} with a reference to the aspect type, in the format {project_id_or_number}.{location_id}.{aspect_type_id}.If you leave this field empty, it is treated as specifying exactly those aspects that are present within the specified entry.In FULL entry sync mode, Dataplex implicitly adds the keys for all of the required aspects of an entry.", "items": { "type": "string" }, diff --git a/discovery/googleapis/dataportability__v1.json b/discovery/googleapis/dataportability__v1.json index 2dc12fe15..e348af9ff 100644 --- a/discovery/googleapis/dataportability__v1.json +++ b/discovery/googleapis/dataportability__v1.json @@ -217,7 +217,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240828", + "revision": "20241209", "rootUrl": "https://dataportability.googleapis.com/", "servicePath": "", "title": "Data Portability API", diff --git a/discovery/googleapis/dataproc__v1.json b/discovery/googleapis/dataproc__v1.json index 145114305..6f42bf937 100644 --- a/discovery/googleapis/dataproc__v1.json +++ b/discovery/googleapis/dataproc__v1.json @@ -102,7 +102,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240928", + "revision": "20241025", "rootUrl": "https://dataproc.googleapis.com/", "servicePath": "", "title": "Cloud Dataproc API", @@ -9033,7 +9033,7 @@ "type": "object" }, "PySparkJob": { - "description": "A Dataproc job for running Apache PySpark (https://spark.apache.org/docs/0.9.0/python-programming-guide.html) applications on YARN.", + "description": "A Dataproc job for running Apache PySpark (https://spark.apache.org/docs/latest/api/python/index.html#pyspark-overview) applications on YARN.", "id": "PySparkJob", "properties": { "archiveUris": { @@ -10445,6 +10445,7 @@ "HBASE", "HIVE_WEBHCAT", "HUDI", + "ICEBERG", "JUPYTER", "PRESTO", "TRINO", @@ -10462,6 +10463,7 @@ "HBase. (beta)", "The Hive Web HCatalog (the REST service for accessing HCatalog).", "Hudi.", + "Iceberg.", "The Jupyter Notebook.", "The Presto query engine.", "The Trino query engine.", diff --git a/discovery/googleapis/datastore__v1.json b/discovery/googleapis/datastore__v1.json index 9cf637c48..ac7d72692 100644 --- a/discovery/googleapis/datastore__v1.json +++ b/discovery/googleapis/datastore__v1.json @@ -27,7 +27,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241008", + "revision": "20241204", "rootUrl": "https://datastore.googleapis.com/", "servicePath": "", "title": "Cloud Datastore API", @@ -534,7 +534,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "datastore.projects.operations.cancel", @@ -1066,6 +1066,51 @@ }, "type": "object" }, + "FindNearest": { + "description": "Nearest Neighbors search config. The ordering provided by FindNearest supersedes the order_by stage. If multiple documents have the same vector distance, the returned document order is not guaranteed to be stable between queries.", + "id": "FindNearest", + "properties": { + "distanceMeasure": { + "description": "Required. The Distance Measure to use, required.", + "enum": [ + "DISTANCE_MEASURE_UNSPECIFIED", + "EUCLIDEAN", + "COSINE", + "DOT_PRODUCT" + ], + "enumDescriptions": [ + "Should not be set.", + "Measures the EUCLIDEAN distance between the vectors. See [Euclidean](https://en.wikipedia.org/wiki/Euclidean_distance) to learn more. The resulting distance decreases the more similar two vectors are.", + "COSINE distance compares vectors based on the angle between them, which allows you to measure similarity that isn't based on the vectors magnitude. We recommend using DOT_PRODUCT with unit normalized vectors instead of COSINE distance, which is mathematically equivalent with better performance. See [Cosine Similarity](https://en.wikipedia.org/wiki/Cosine_similarity) to learn more about COSINE similarity and COSINE distance. The resulting COSINE distance decreases the more similar two vectors are.", + "Similar to cosine but is affected by the magnitude of the vectors. See [Dot Product](https://en.wikipedia.org/wiki/Dot_product) to learn more. The resulting distance increases the more similar two vectors are." + ], + "type": "string" + }, + "distanceResultProperty": { + "description": "Optional. Optional name of the field to output the result of the vector distance calculation. Must conform to entity property limitations.", + "type": "string" + }, + "distanceThreshold": { + "description": "Optional. Option to specify a threshold for which no less similar documents will be returned. The behavior of the specified `distance_measure` will affect the meaning of the distance threshold. Since DOT_PRODUCT distances increase when the vectors are more similar, the comparison is inverted. * For EUCLIDEAN, COSINE: WHERE distance <= distance_threshold * For DOT_PRODUCT: WHERE distance >= distance_threshold", + "format": "double", + "type": "number" + }, + "limit": { + "description": "Required. The number of nearest neighbors to return. Must be a positive integer of no more than 100.", + "format": "int32", + "type": "integer" + }, + "queryVector": { + "$ref": "Value", + "description": "Required. The query vector that we are searching on. Must be a vector of no more than 2048 dimensions." + }, + "vectorProperty": { + "$ref": "PropertyReference", + "description": "Required. An indexed vector property to search upon. Only documents which contain vectors whose dimensionality match the query_vector can be returned." + } + }, + "type": "object" + }, "GoogleDatastoreAdminV1CommonMetadata": { "description": "Metadata common to all Datastore Admin operations.", "id": "GoogleDatastoreAdminV1CommonMetadata", @@ -2230,7 +2275,7 @@ "type": "object" }, "Query": { - "description": "A query for entities.", + "description": "A query for entities. The query stages are executed in the following order: 1. kind 2. filter 3. projection 4. order + start_cursor + end_cursor 5. offset 6. limit 7. find_nearest", "id": "Query", "properties": { "distinctOn": { @@ -2249,6 +2294,10 @@ "$ref": "Filter", "description": "The filter to apply." }, + "findNearest": { + "$ref": "FindNearest", + "description": "Optional. A potential Nearest Neighbors Search. Applies after all other filters and ordering. Finds the closest vector embeddings to the given query vector." + }, "kind": { "description": "The kinds to query (if empty, returns entities of all kinds). Currently at most 1 kind may be specified.", "items": { diff --git a/discovery/googleapis/datastream__v1.json b/discovery/googleapis/datastream__v1.json index 3805f0560..efb04fabd 100644 --- a/discovery/googleapis/datastream__v1.json +++ b/discovery/googleapis/datastream__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240930", + "revision": "20241204", "rootUrl": "https://datastream.googleapis.com/", "servicePath": "", "title": "Datastream API", @@ -411,7 +411,7 @@ "type": "boolean" }, "name": { - "description": "Output only. The resource's name.", + "description": "Output only. Identifier. The resource's name.", "location": "path", "pattern": "^projects/[^/]+/locations/[^/]+/connectionProfiles/[^/]+$", "required": true, @@ -450,7 +450,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "datastream.projects.locations.operations.cancel", @@ -1034,7 +1034,7 @@ "type": "boolean" }, "name": { - "description": "Output only. The stream's name.", + "description": "Output only. Identifier. The stream's name.", "location": "path", "pattern": "^projects/[^/]+/locations/[^/]+/streams/[^/]+$", "required": true, @@ -1396,6 +1396,27 @@ "properties": {}, "type": "object" }, + "BinaryLogParser": { + "description": "Configuration to use Binary Log Parser CDC technique.", + "id": "BinaryLogParser", + "properties": { + "logFileDirectories": { + "$ref": "LogFileDirectories", + "description": "Use Oracle directories." + }, + "oracleAsmLogFileAccess": { + "$ref": "OracleAsmLogFileAccess", + "description": "Use Oracle ASM." + } + }, + "type": "object" + }, + "BinaryLogPosition": { + "description": "Use Binary log position based replication.", + "id": "BinaryLogPosition", + "properties": {}, + "type": "object" + }, "CancelOperationRequest": { "description": "The request message for Operations.CancelOperation.", "id": "CancelOperationRequest", @@ -1459,7 +1480,7 @@ "description": "MySQL ConnectionProfile configuration." }, "name": { - "description": "Output only. The resource's name.", + "description": "Output only. Identifier. The resource's name.", "readOnly": true, "type": "string" }, @@ -1726,6 +1747,12 @@ }, "type": "object" }, + "Gtid": { + "description": "Use GTID based replication.", + "id": "Gtid", + "properties": {}, + "type": "object" + }, "JsonFileFormat": { "description": "JSON file format configuration.", "id": "JsonFileFormat", @@ -1949,6 +1976,27 @@ }, "type": "object" }, + "LogFileDirectories": { + "description": "Configuration to specify the Oracle directories to access the log files.", + "id": "LogFileDirectories", + "properties": { + "archivedLogDirectory": { + "description": "Required. Oracle directory for archived logs.", + "type": "string" + }, + "onlineLogDirectory": { + "description": "Required. Oracle directory for online logs.", + "type": "string" + } + }, + "type": "object" + }, + "LogMiner": { + "description": "Configuration to use LogMiner CDC method.", + "id": "LogMiner", + "properties": {}, + "type": "object" + }, "LookupStreamObjectRequest": { "description": "Request for looking up a specific stream object by its source object identifier.", "id": "LookupStreamObjectRequest", @@ -2114,10 +2162,18 @@ "description": "MySQL source configuration", "id": "MysqlSourceConfig", "properties": { + "binaryLogPosition": { + "$ref": "BinaryLogPosition", + "description": "Use Binary log position based replication." + }, "excludeObjects": { "$ref": "MysqlRdbms", "description": "MySQL objects to exclude from the stream." }, + "gtid": { + "$ref": "Gtid", + "description": "Use GTID based replication." + }, "includeObjects": { "$ref": "MysqlRdbms", "description": "MySQL objects to retrieve from the source." @@ -2250,7 +2306,7 @@ "type": "string" }, "requestedCancellation": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, @@ -2277,6 +2333,51 @@ }, "type": "object" }, + "OracleAsmConfig": { + "description": "Configuration for Oracle Automatic Storage Management (ASM) connection. .", + "id": "OracleAsmConfig", + "properties": { + "asmService": { + "description": "Required. ASM service name for the Oracle ASM connection.", + "type": "string" + }, + "connectionAttributes": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. Connection string attributes", + "type": "object" + }, + "hostname": { + "description": "Required. Hostname for the Oracle ASM connection.", + "type": "string" + }, + "oracleSslConfig": { + "$ref": "OracleSslConfig", + "description": "Optional. SSL configuration for the Oracle connection." + }, + "password": { + "description": "Optional. Password for the Oracle ASM connection.", + "type": "string" + }, + "port": { + "description": "Required. Port for the Oracle ASM connection.", + "format": "int32", + "type": "integer" + }, + "username": { + "description": "Required. Username for the Oracle ASM connection.", + "type": "string" + } + }, + "type": "object" + }, + "OracleAsmLogFileAccess": { + "description": "Configuration to use Oracle ASM to access the log files.", + "id": "OracleAsmLogFileAccess", + "properties": {}, + "type": "object" + }, "OracleColumn": { "description": "Oracle Column.", "id": "OracleColumn", @@ -2358,6 +2459,10 @@ "description": "Required. Hostname for the Oracle connection.", "type": "string" }, + "oracleAsmConfig": { + "$ref": "OracleAsmConfig", + "description": "Optional. Configuration for Oracle ASM connection." + }, "oracleSslConfig": { "$ref": "OracleSslConfig", "description": "Optional. SSL configuration for the Oracle connection." @@ -2371,6 +2476,10 @@ "format": "int32", "type": "integer" }, + "secretManagerStoredPassword": { + "description": "Optional. A reference to a Secret Manager resource name storing the Oracle connection password. Mutually exclusive with the `password` field.", + "type": "string" + }, "username": { "description": "Required. Username for the Oracle connection.", "type": "string" @@ -2426,6 +2535,10 @@ "description": "Oracle data source configuration", "id": "OracleSourceConfig", "properties": { + "binaryLogParser": { + "$ref": "BinaryLogParser", + "description": "Use Binary Log Parser." + }, "dropLargeObjects": { "$ref": "DropLargeObjects", "description": "Drop large object values." @@ -2438,6 +2551,10 @@ "$ref": "OracleRdbms", "description": "Oracle objects to include in the stream." }, + "logMiner": { + "$ref": "LogMiner", + "description": "Use LogMiner." + }, "maxConcurrentBackfillTasks": { "description": "Maximum number of concurrent backfill tasks. The number should be non-negative. If not set (or set to 0), the system's default value is used.", "format": "int32", @@ -2548,7 +2665,7 @@ "type": "object" }, "PostgresqlProfile": { - "description": "PostgreSQL database profile. Next ID: 7.", + "description": "PostgreSQL database profile.", "id": "PostgresqlProfile", "properties": { "database": { @@ -2680,7 +2797,7 @@ "type": "object" }, "name": { - "description": "Output only. The resource's name.", + "description": "Output only. Identifier. The resource's name.", "readOnly": true, "type": "string" }, @@ -2760,7 +2877,7 @@ "type": "object" }, "name": { - "description": "Output only. The resource's name.", + "description": "Output only. Identifier. The resource's name.", "readOnly": true, "type": "string" }, @@ -2871,6 +2988,10 @@ "oracleScnPosition": { "$ref": "OracleScnPosition", "description": "Oracle SCN to start replicating from." + }, + "sqlServerLsnPosition": { + "$ref": "SqlServerLsnPosition", + "description": "SqlServer LSN to start replicating from." } }, "type": "object" @@ -2924,6 +3045,17 @@ }, "type": "object" }, + "SqlServerLsnPosition": { + "description": "SQL Server LSN position", + "id": "SqlServerLsnPosition", + "properties": { + "lsn": { + "description": "Required. Log sequence number (LSN) from where Logs will be read", + "type": "string" + } + }, + "type": "object" + }, "SqlServerObjectIdentifier": { "description": "SQLServer data source object identifier.", "id": "SqlServerObjectIdentifier", @@ -3175,7 +3307,7 @@ "type": "string" }, "name": { - "description": "Output only. The stream's name.", + "description": "Output only. Identifier. The stream's name.", "readOnly": true, "type": "string" }, @@ -3251,7 +3383,7 @@ "type": "array" }, "name": { - "description": "Output only. The object resource's name.", + "description": "Output only. Identifier. The object resource's name.", "readOnly": true, "type": "string" }, diff --git a/discovery/googleapis/deploymentmanager__v2.json b/discovery/googleapis/deploymentmanager__v2.json index 49ab3236b..604cd1f17 100644 --- a/discovery/googleapis/deploymentmanager__v2.json +++ b/discovery/googleapis/deploymentmanager__v2.json @@ -34,7 +34,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240214", + "revision": "20241122", "rootUrl": "https://deploymentmanager.googleapis.com/", "servicePath": "", "title": "Cloud Deployment Manager V2 API", @@ -186,6 +186,10 @@ "required": true, "type": "string" }, + "header.bypassBillingFilter": { + "location": "query", + "type": "boolean" + }, "project": { "description": "The project ID for this request.", "location": "path", @@ -220,6 +224,10 @@ "required": true, "type": "string" }, + "header.bypassBillingFilter": { + "location": "query", + "type": "boolean" + }, "project": { "description": "The project ID for this request.", "location": "path", @@ -249,6 +257,10 @@ "resource" ], "parameters": { + "header.bypassBillingFilter": { + "location": "query", + "type": "boolean" + }, "optionsRequestedPolicyVersion": { "description": "Requested IAM Policy version.", "format": "int32", @@ -302,6 +314,10 @@ "location": "query", "type": "string" }, + "header.bypassBillingFilter": { + "location": "query", + "type": "boolean" + }, "preview": { "description": "If set to true, creates a deployment and creates \"shell\" resources but does not actually instantiate these resources. This allows you to preview what your deployment looks like. After previewing a deployment, you can deploy your resources by making a request with the `update()` method or you can use the `cancelPreview()` method to cancel the preview altogether. Note that the deployment will still exist after you cancel the preview and you must separately delete this deployment if you want to remove it.", "location": "query", @@ -423,6 +439,10 @@ "required": true, "type": "string" }, + "header.bypassBillingFilter": { + "location": "query", + "type": "boolean" + }, "preview": { "default": "false", "description": "If set to true, updates the deployment and creates and updates the \"shell\" resources but does not actually alter or instantiate these resources. This allows you to preview what your deployment will look like. You can use this intent to preview how an update would affect your deployment. You must provide a `target.config` with a configuration if this is set to true. After previewing a deployment, you can deploy your resources by making a request with the `update()` or you can `cancelPreview()` to remove the preview altogether. Note that the deployment will still exist after you cancel the preview and you must separately delete this deployment if you want to remove it.", @@ -533,6 +553,10 @@ "resource" ], "parameters": { + "header.bypassBillingFilter": { + "location": "query", + "type": "boolean" + }, "project": { "description": "Project ID for this request.", "location": "path", @@ -605,6 +629,10 @@ "required": true, "type": "string" }, + "header.bypassBillingFilter": { + "location": "query", + "type": "boolean" + }, "preview": { "default": "false", "description": "If set to true, updates the deployment and creates and updates the \"shell\" resources but does not actually alter or instantiate these resources. This allows you to preview what your deployment will look like. You can use this intent to preview how an update would affect your deployment. You must provide a `target.config` with a configuration if this is set to true. After previewing a deployment, you can deploy your resources by making a request with the `update()` or you can `cancelPreview()` to remove the preview altogether. Note that the deployment will still exist after you cancel the preview and you must separately delete this deployment if you want to remove it.", @@ -653,6 +681,10 @@ "required": true, "type": "string" }, + "header.bypassBillingFilter": { + "location": "query", + "type": "boolean" + }, "manifest": { "description": "The name of the manifest for this request.", "location": "path", @@ -752,6 +784,10 @@ "operation" ], "parameters": { + "header.bypassBillingFilter": { + "location": "query", + "type": "boolean" + }, "operation": { "description": "The name of the operation for this request.", "location": "path", @@ -850,6 +886,10 @@ "required": true, "type": "string" }, + "header.bypassBillingFilter": { + "location": "query", + "type": "boolean" + }, "project": { "description": "The project ID for this request.", "location": "path", @@ -1115,6 +1155,24 @@ }, "type": "object" }, + "DebugInfo": { + "description": "Describes additional debugging info.", + "id": "DebugInfo", + "properties": { + "detail": { + "description": "Additional debugging information provided by the server.", + "type": "string" + }, + "stackEntries": { + "description": "The stack trace entries indicating where the error occurred.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, "Deployment": { "id": "Deployment", "properties": { @@ -1264,6 +1322,28 @@ }, "type": "object" }, + "ErrorInfo": { + "description": "Describes the cause of the error with structured details. Example of an error when contacting the \"pubsub.googleapis.com\" API when it is not enabled: { \"reason\": \"API_DISABLED\" \"domain\": \"googleapis.com\" \"metadata\": { \"resource\": \"projects/123\", \"service\": \"pubsub.googleapis.com\" } } This response indicates that the pubsub.googleapis.com API is not enabled. Example of an error that is returned when attempting to create a Spanner instance in a region that is out of stock: { \"reason\": \"STOCKOUT\" \"domain\": \"spanner.googleapis.com\", \"metadata\": { \"availableRegions\": \"us-central1,us-east2\" } }", + "id": "ErrorInfo", + "properties": { + "domain": { + "description": "The logical grouping to which the \"reason\" belongs. The error domain is typically the registered service name of the tool or product that generates the error. Example: \"pubsub.googleapis.com\". If the error is generated by some common infrastructure, the error domain must be a globally unique value that identifies the infrastructure. For Google API infrastructure, the error domain is \"googleapis.com\".", + "type": "string" + }, + "metadata": { + "additionalProperties": { + "type": "string" + }, + "description": "Additional structured details about this error. Keys must match /a-z+/ but should ideally be lowerCamelCase. Also they must be limited to 64 characters in length. When identifying the current value of an exceeded limit, the units should be contained in the key, not the value. For example, rather than {\"instanceLimit\": \"100/request\"}, should be returned as, {\"instanceLimitPerRequest\": \"100\"}, if the client exceeds the number of instances that can be created in a single (batch) request.", + "type": "object" + }, + "reason": { + "description": "The reason of the error. This is a constant value that identifies the proximate cause of the error. Error reasons are unique within a particular domain of errors. This should be at most 63 characters and match a regular expression of `A-Z+[A-Z0-9]`, which represents UPPER_SNAKE_CASE.", + "type": "string" + } + }, + "type": "object" + }, "Expr": { "description": "Represents a textual expression in the Common Expression Language (CEL) syntax. CEL is a C-like expression language. The syntax and semantics of CEL are documented at https://github.com/google/cel-spec. Example (Comparison): title: \"Summary size limit\" description: \"Determines if a summary is less than 100 chars\" expression: \"document.summary.size() < 100\" Example (Equality): title: \"Requestor is owner\" description: \"Determines if requestor is the document owner\" expression: \"document.owner == request.auth.claims.email\" Example (Logic): title: \"Public documents\" description: \"Determine whether the document should be publicly visible\" expression: \"document.type != 'private' && document.type != 'internal'\" Example (Data Manipulation): title: \"Notification string\" description: \"Create a notification string with a timestamp.\" expression: \"'New message received at ' + string(document.create_time)\" The exact variables and functions that may be referenced within an expression are determined by the service that evaluates it. See the service documentation for additional information.", "id": "Expr", @@ -1305,6 +1385,39 @@ "policy": { "$ref": "Policy", "description": "REQUIRED: The complete policy to be applied to the 'resource'. The size of the policy is limited to a few 10s of KB. An empty policy is in general a valid policy but certain services (like Projects) might reject them." + }, + "updateMask": { + "format": "google-fieldmask", + "type": "string" + } + }, + "type": "object" + }, + "Help": { + "description": "Provides links to documentation or for performing an out of band action. For example, if a quota check failed with an error indicating the calling project hasn't enabled the accessed service, this can contain a URL pointing directly to the right place in the developer console to flip the bit.", + "id": "Help", + "properties": { + "links": { + "description": "URL(s) pointing to additional information on handling the current error.", + "items": { + "$ref": "HelpLink" + }, + "type": "array" + } + }, + "type": "object" + }, + "HelpLink": { + "description": "Describes a URL link.", + "id": "HelpLink", + "properties": { + "description": { + "description": "Describes what the link offers.", + "type": "string" + }, + "url": { + "description": "The URL of the link.", + "type": "string" } }, "type": "object" @@ -1336,6 +1449,21 @@ }, "type": "object" }, + "LocalizedMessage": { + "description": "Provides a localized error message that is safe to return to the user which can be attached to an RPC error.", + "id": "LocalizedMessage", + "properties": { + "locale": { + "description": "The locale used following the specification defined at https://www.rfc-editor.org/rfc/bcp/bcp47.txt. Examples are: \"en-US\", \"fr-CH\", \"es-MX\"", + "type": "string" + }, + "message": { + "description": "The localized error message in the above locale.", + "type": "string" + } + }, + "type": "object" + }, "Manifest": { "id": "Manifest", "properties": { @@ -1432,10 +1560,41 @@ "description": "[Output Only] The array of errors encountered while processing this operation.", "items": { "properties": { + "arguments": { + "description": "[Output Only] Optional error details WARNING: DO NOT MAKE VISIBLE This is for internal use-only (like componentization) (thus the visibility \"none\") and in case of public exposure it is strongly recommended to follow pattern of: https://aip.dev/193 and expose as details field.", + "items": { + "type": "string" + }, + "type": "array" + }, "code": { "description": "[Output Only] The error type identifier for this error.", "type": "string" }, + "debugInfo": { + "$ref": "DebugInfo" + }, + "errorDetails": { + "description": "[Output Only] An optional list of messages that contain the error details. There is a set of defined message types to use for providing details.The syntax depends on the error code. For example, QuotaExceededInfo will have details when the error code is QUOTA_EXCEEDED.", + "items": { + "properties": { + "errorInfo": { + "$ref": "ErrorInfo" + }, + "help": { + "$ref": "Help" + }, + "localizedMessage": { + "$ref": "LocalizedMessage" + }, + "quotaInfo": { + "$ref": "QuotaExceededInfo" + } + }, + "type": "object" + }, + "type": "array" + }, "location": { "description": "[Output Only] Indicates the field in the request that caused the error. This property is optional.", "type": "string" @@ -1503,6 +1662,10 @@ "description": "[Output Only] Server-defined URL for the resource.", "type": "string" }, + "selfLinkWithId": { + "description": "[Output Only] Server-defined URL for this resource with the resource id.", + "type": "string" + }, "setCommonInstanceMetadataOperationMetadata": { "$ref": "SetCommonInstanceMetadataOperationMetadata", "description": "[Output Only] If the operation is for projects.setCommonInstanceMetadata, this field will contain information on all underlying zonal actions and their state." @@ -1535,7 +1698,7 @@ "type": "string" }, "targetLink": { - "description": "[Output Only] The URL of the resource that the operation modifies. For operations related to creating a snapshot, this points to the persistent disk that the snapshot was created from.", + "description": "[Output Only] The URL of the resource that the operation modifies. For operations related to creating a snapshot, this points to the disk that the snapshot was created from.", "type": "string" }, "user": { @@ -1566,6 +1729,10 @@ "CLEANUP_FAILED", "FIELD_VALUE_OVERRIDEN", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", + "NETWORK_ENDPOINT_NOT_DETACHED", + "PAGE_MISSING_RESULTS", + "SSL_POLICY_ENABLED_FEATURES_NOT_FETCHED", + "RESOURCE_NOT_FOUND_WARNING", "MISSING_TYPE_DEPENDENCY", "EXTERNAL_API_WARNING", "SCHEMA_VALIDATION_IGNORED", @@ -1576,7 +1743,15 @@ "LARGE_DEPLOYMENT_WARNING", "NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE", "INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB", - "LIST_OVERHEAD_QUOTA_EXCEED" + "LIST_OVERHEAD_QUOTA_EXCEED", + "QUOTA_INFO_UNAVAILABLE", + "RESOURCE_USES_GLOBAL_DNS", + "RATE_LIMIT_EXCEEDED", + "RESERVED_ENTRY_135", + "RESERVED_ENTRY_136", + "RESERVED_ENTRY_139", + "RESERVED_ENTRY_141", + "RESERVED_ENTRY_142" ], "enumDeprecated": [ false, @@ -1606,6 +1781,18 @@ false, false, false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, false ], "enumDescriptions": [ @@ -1626,6 +1813,10 @@ "Warning about failed cleanup of transient changes made by a failed operation.", "Warning that value of a field has been overridden. Deprecated unused field.", "Warning that a resource is in use.", + "Warning that network endpoint was not detached.", + "Current page contains less results than requested but a next page token exists.", + "Warning that SSL policy resource in the response does not contain information about the list of enabled features.", + "Warning that a resource is not found.", "A resource depends on a missing type", "Warning that is present in an external api call", "When a resource schema validation is ignored.", @@ -1636,7 +1827,15 @@ "When deploying a deployment with a exceedingly large number of resources", "The route's nextHopInstance URL refers to an instance that does not have an ipv6 interface on the same network as the route.", "A WEIGHTED_MAGLEV backend service is associated with a health check that is not of type HTTP/HTTPS/HTTP2.", - "Resource can't be retrieved due to list overhead quota exceed which captures the amount of resources filtered out by user-defined list filter." + "Resource can't be retrieved due to list overhead quota exceed which captures the amount of resources filtered out by user-defined list filter.", + "Quota information is not available to client requests (e.g: regions.list).", + "Indicates that a VM is using global DNS. Can also be used to indicate that a resource has attributes that could result in the creation of a VM that uses global DNS.", + "Resource can't be retrieved due to api quota exceeded.", + "Reserved entries for quickly adding new warnings without breaking dependent clients.", + "", + "", + "", + "" ], "type": "string" }, @@ -1722,6 +1921,50 @@ }, "type": "object" }, + "QuotaExceededInfo": { + "description": "Additional details for quota exceeded error for resource quota.", + "id": "QuotaExceededInfo", + "properties": { + "dimensions": { + "additionalProperties": { + "type": "string" + }, + "description": "The map holding related quota dimensions.", + "type": "object" + }, + "futureLimit": { + "description": "Future quota limit being rolled out. The limit's unit depends on the quota type or metric.", + "format": "double", + "type": "number" + }, + "limit": { + "description": "Current effective quota limit. The limit's unit depends on the quota type or metric.", + "format": "double", + "type": "number" + }, + "limitName": { + "description": "The name of the quota limit.", + "type": "string" + }, + "metricName": { + "description": "The Compute Engine quota metric name.", + "type": "string" + }, + "rolloutStatus": { + "description": "Rollout status of the future quota limit.", + "enum": [ + "ROLLOUT_STATUS_UNSPECIFIED", + "IN_PROGRESS" + ], + "enumDescriptions": [ + "ROLLOUT_STATUS_UNSPECIFIED - Rollout status is not specified. The default value.", + "IN_PROGRESS - A rollout is in process which will change the limit value to future limit." + ], + "type": "string" + } + }, + "type": "object" + }, "Resource": { "id": "Resource", "properties": { @@ -1793,6 +2036,10 @@ "CLEANUP_FAILED", "FIELD_VALUE_OVERRIDEN", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", + "NETWORK_ENDPOINT_NOT_DETACHED", + "PAGE_MISSING_RESULTS", + "SSL_POLICY_ENABLED_FEATURES_NOT_FETCHED", + "RESOURCE_NOT_FOUND_WARNING", "MISSING_TYPE_DEPENDENCY", "EXTERNAL_API_WARNING", "SCHEMA_VALIDATION_IGNORED", @@ -1803,7 +2050,15 @@ "LARGE_DEPLOYMENT_WARNING", "NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE", "INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB", - "LIST_OVERHEAD_QUOTA_EXCEED" + "LIST_OVERHEAD_QUOTA_EXCEED", + "QUOTA_INFO_UNAVAILABLE", + "RESOURCE_USES_GLOBAL_DNS", + "RATE_LIMIT_EXCEEDED", + "RESERVED_ENTRY_135", + "RESERVED_ENTRY_136", + "RESERVED_ENTRY_139", + "RESERVED_ENTRY_141", + "RESERVED_ENTRY_142" ], "enumDeprecated": [ false, @@ -1833,6 +2088,18 @@ false, false, false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, false ], "enumDescriptions": [ @@ -1853,6 +2120,10 @@ "Warning about failed cleanup of transient changes made by a failed operation.", "Warning that value of a field has been overridden. Deprecated unused field.", "Warning that a resource is in use.", + "Warning that network endpoint was not detached.", + "Current page contains less results than requested but a next page token exists.", + "Warning that SSL policy resource in the response does not contain information about the list of enabled features.", + "Warning that a resource is not found.", "A resource depends on a missing type", "Warning that is present in an external api call", "When a resource schema validation is ignored.", @@ -1863,7 +2134,15 @@ "When deploying a deployment with a exceedingly large number of resources", "The route's nextHopInstance URL refers to an instance that does not have an ipv6 interface on the same network as the route.", "A WEIGHTED_MAGLEV backend service is associated with a health check that is not of type HTTP/HTTPS/HTTP2.", - "Resource can't be retrieved due to list overhead quota exceed which captures the amount of resources filtered out by user-defined list filter." + "Resource can't be retrieved due to list overhead quota exceed which captures the amount of resources filtered out by user-defined list filter.", + "Quota information is not available to client requests (e.g: regions.list).", + "Indicates that a VM is using global DNS. Can also be used to indicate that a resource has attributes that could result in the creation of a VM that uses global DNS.", + "Resource can't be retrieved due to api quota exceeded.", + "Reserved entries for quickly adding new warnings without breaking dependent clients.", + "", + "", + "", + "" ], "type": "string" }, @@ -1921,10 +2200,41 @@ "description": "[Output Only] The array of errors encountered while processing this operation.", "items": { "properties": { + "arguments": { + "description": "[Output Only] Optional error details WARNING: DO NOT MAKE VISIBLE This is for internal use-only (like componentization) (thus the visibility \"none\") and in case of public exposure it is strongly recommended to follow pattern of: https://aip.dev/193 and expose as details field.", + "items": { + "type": "string" + }, + "type": "array" + }, "code": { "description": "[Output Only] The error type identifier for this error.", "type": "string" }, + "debugInfo": { + "$ref": "DebugInfo" + }, + "errorDetails": { + "description": "[Output Only] An optional list of messages that contain the error details. There is a set of defined message types to use for providing details.The syntax depends on the error code. For example, QuotaExceededInfo will have details when the error code is QUOTA_EXCEEDED.", + "items": { + "properties": { + "errorInfo": { + "$ref": "ErrorInfo" + }, + "help": { + "$ref": "Help" + }, + "localizedMessage": { + "$ref": "LocalizedMessage" + }, + "quotaInfo": { + "$ref": "QuotaExceededInfo" + } + }, + "type": "object" + }, + "type": "array" + }, "location": { "description": "[Output Only] Indicates the field in the request that caused the error. This property is optional.", "type": "string" @@ -2015,6 +2325,10 @@ "CLEANUP_FAILED", "FIELD_VALUE_OVERRIDEN", "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING", + "NETWORK_ENDPOINT_NOT_DETACHED", + "PAGE_MISSING_RESULTS", + "SSL_POLICY_ENABLED_FEATURES_NOT_FETCHED", + "RESOURCE_NOT_FOUND_WARNING", "MISSING_TYPE_DEPENDENCY", "EXTERNAL_API_WARNING", "SCHEMA_VALIDATION_IGNORED", @@ -2025,7 +2339,15 @@ "LARGE_DEPLOYMENT_WARNING", "NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE", "INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB", - "LIST_OVERHEAD_QUOTA_EXCEED" + "LIST_OVERHEAD_QUOTA_EXCEED", + "QUOTA_INFO_UNAVAILABLE", + "RESOURCE_USES_GLOBAL_DNS", + "RATE_LIMIT_EXCEEDED", + "RESERVED_ENTRY_135", + "RESERVED_ENTRY_136", + "RESERVED_ENTRY_139", + "RESERVED_ENTRY_141", + "RESERVED_ENTRY_142" ], "enumDeprecated": [ false, @@ -2055,6 +2377,18 @@ false, false, false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, false ], "enumDescriptions": [ @@ -2075,6 +2409,10 @@ "Warning about failed cleanup of transient changes made by a failed operation.", "Warning that value of a field has been overridden. Deprecated unused field.", "Warning that a resource is in use.", + "Warning that network endpoint was not detached.", + "Current page contains less results than requested but a next page token exists.", + "Warning that SSL policy resource in the response does not contain information about the list of enabled features.", + "Warning that a resource is not found.", "A resource depends on a missing type", "Warning that is present in an external api call", "When a resource schema validation is ignored.", @@ -2085,7 +2423,15 @@ "When deploying a deployment with a exceedingly large number of resources", "The route's nextHopInstance URL refers to an instance that does not have an ipv6 interface on the same network as the route.", "A WEIGHTED_MAGLEV backend service is associated with a health check that is not of type HTTP/HTTPS/HTTP2.", - "Resource can't be retrieved due to list overhead quota exceed which captures the amount of resources filtered out by user-defined list filter." + "Resource can't be retrieved due to list overhead quota exceed which captures the amount of resources filtered out by user-defined list filter.", + "Quota information is not available to client requests (e.g: regions.list).", + "Indicates that a VM is using global DNS. Can also be used to indicate that a resource has attributes that could result in the creation of a VM that uses global DNS.", + "Resource can't be retrieved due to api quota exceeded.", + "Reserved entries for quickly adding new warnings without breaking dependent clients.", + "", + "", + "", + "" ], "type": "string" }, diff --git a/discovery/googleapis/developerconnect__v1.json b/discovery/googleapis/developerconnect__v1.json index f115bf0e4..ebc4f0a7c 100644 --- a/discovery/googleapis/developerconnect__v1.json +++ b/discovery/googleapis/developerconnect__v1.json @@ -77,7 +77,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241017", + "revision": "20241205", "rootUrl": "https://developerconnect.googleapis.com/", "servicePath": "", "title": "Developer Connect API", @@ -812,10 +812,7 @@ }, "response": { "$ref": "Empty" - }, - "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" - ] + } }, "processGitLabWebhook": { "description": "ProcessGitLabWebhook is called by the GitLab.com for notifying events.", @@ -849,7 +846,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "developerconnect.projects.locations.operations.cancel", @@ -1054,7 +1051,7 @@ "type": "boolean" }, "uid": { - "description": "Output only. A system-assigned unique identifier for a the GitRepositoryLink.", + "description": "Output only. A system-assigned unique identifier for the Connection.", "readOnly": true, "type": "string" }, @@ -1376,7 +1373,7 @@ "type": "boolean" }, "uid": { - "description": "Output only. A system-assigned unique identifier for a the GitRepositoryLink.", + "description": "Output only. A system-assigned unique identifier for the GitRepositoryLink.", "readOnly": true, "type": "string" }, @@ -1681,7 +1678,7 @@ "type": "string" }, "requestedCancellation": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have been cancelled successfully have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have been cancelled successfully have google.longrunning.Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, diff --git a/discovery/googleapis/dialogflow__v2.json b/discovery/googleapis/dialogflow__v2.json index 7e1c8b7d4..a170a5930 100644 --- a/discovery/googleapis/dialogflow__v2.json +++ b/discovery/googleapis/dialogflow__v2.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241018", + "revision": "20241212", "rootUrl": "https://dialogflow.googleapis.com/", "servicePath": "", "title": "Dialogflow API", @@ -3064,7 +3064,7 @@ ] }, "create": { - "description": "Creates a conversation profile in the specified project. ConversationProfile.CreateTime and ConversationProfile.UpdateTime aren't populated in the response. You can retrieve them via GetConversationProfile API.", + "description": "Creates a conversation profile in the specified project. ConversationProfile.create_time and ConversationProfile.update_time aren't populated in the response. You can retrieve them via GetConversationProfile API.", "flatPath": "v2/projects/{projectsId}/conversationProfiles", "httpMethod": "POST", "id": "dialogflow.projects.conversationProfiles.create", @@ -3182,7 +3182,7 @@ ] }, "patch": { - "description": "Updates the specified conversation profile. ConversationProfile.CreateTime and ConversationProfile.UpdateTime aren't populated in the response. You can retrieve them via GetConversationProfile API.", + "description": "Updates the specified conversation profile. ConversationProfile.create_time and ConversationProfile.update_time aren't populated in the response. You can retrieve them via GetConversationProfile API.", "flatPath": "v2/projects/{projectsId}/conversationProfiles/{conversationProfilesId}", "httpMethod": "PATCH", "id": "dialogflow.projects.conversationProfiles.patch", @@ -7037,7 +7037,7 @@ ] }, "create": { - "description": "Creates a conversation profile in the specified project. ConversationProfile.CreateTime and ConversationProfile.UpdateTime aren't populated in the response. You can retrieve them via GetConversationProfile API.", + "description": "Creates a conversation profile in the specified project. ConversationProfile.create_time and ConversationProfile.update_time aren't populated in the response. You can retrieve them via GetConversationProfile API.", "flatPath": "v2/projects/{projectsId}/locations/{locationsId}/conversationProfiles", "httpMethod": "POST", "id": "dialogflow.projects.locations.conversationProfiles.create", @@ -7155,7 +7155,7 @@ ] }, "patch": { - "description": "Updates the specified conversation profile. ConversationProfile.CreateTime and ConversationProfile.UpdateTime aren't populated in the response. You can retrieve them via GetConversationProfile API.", + "description": "Updates the specified conversation profile. ConversationProfile.create_time and ConversationProfile.update_time aren't populated in the response. You can retrieve them via GetConversationProfile API.", "flatPath": "v2/projects/{projectsId}/locations/{locationsId}/conversationProfiles/{conversationProfilesId}", "httpMethod": "PATCH", "id": "dialogflow.projects.locations.conversationProfiles.patch", @@ -8363,7 +8363,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v2/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "dialogflow.projects.locations.operations.cancel", @@ -8558,7 +8558,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v2/projects/{projectsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "dialogflow.projects.operations.cancel", @@ -8836,7 +8836,7 @@ "type": "object" }, "GoogleCloudDialogflowCxV3BargeInConfig": { - "description": "Configuration of the barge-in behavior. Barge-in instructs the API to return a detected utterance at a proper time while the client is playing back the response audio from a previous request. When the client sees the utterance, it should stop the playback and immediately get ready for receiving the responses for the current request. The barge-in handling requires the client to start streaming audio input as soon as it starts playing back the audio from the previous response. The playback is modeled into two phases: * No barge-in phase: which goes first and during which speech detection should not be carried out. * Barge-in phase: which follows the no barge-in phase and during which the API starts speech detection and may inform the client that an utterance has been detected. Note that no-speech event is not expected in this phase. The client provides this configuration in terms of the durations of those two phases. The durations are measured in terms of the audio length from the the start of the input audio. No-speech event is a response with END_OF_UTTERANCE without any transcript following up.", + "description": "Configuration of the barge-in behavior. Barge-in instructs the API to return a detected utterance at a proper time while the client is playing back the response audio from a previous request. When the client sees the utterance, it should stop the playback and immediately get ready for receiving the responses for the current request. The barge-in handling requires the client to start streaming audio input as soon as it starts playing back the audio from the previous response. The playback is modeled into two phases: * No barge-in phase: which goes first and during which speech detection should not be carried out. * Barge-in phase: which follows the no barge-in phase and during which the API starts speech detection and may inform the client that an utterance has been detected. Note that no-speech event is not expected in this phase. The client provides this configuration in terms of the durations of those two phases. The durations are measured in terms of the audio length from the start of the input audio. No-speech event is a response with END_OF_UTTERANCE without any transcript following up.", "id": "GoogleCloudDialogflowCxV3BargeInConfig", "properties": { "noBargeInDuration": { @@ -9836,6 +9836,7 @@ "properties": { "id": { "description": "Output only. The unique identifier of the training phrase.", + "readOnly": true, "type": "string" }, "parts": { @@ -11143,7 +11144,7 @@ "type": "object" }, "GoogleCloudDialogflowCxV3beta1BargeInConfig": { - "description": "Configuration of the barge-in behavior. Barge-in instructs the API to return a detected utterance at a proper time while the client is playing back the response audio from a previous request. When the client sees the utterance, it should stop the playback and immediately get ready for receiving the responses for the current request. The barge-in handling requires the client to start streaming audio input as soon as it starts playing back the audio from the previous response. The playback is modeled into two phases: * No barge-in phase: which goes first and during which speech detection should not be carried out. * Barge-in phase: which follows the no barge-in phase and during which the API starts speech detection and may inform the client that an utterance has been detected. Note that no-speech event is not expected in this phase. The client provides this configuration in terms of the durations of those two phases. The durations are measured in terms of the audio length from the the start of the input audio. No-speech event is a response with END_OF_UTTERANCE without any transcript following up.", + "description": "Configuration of the barge-in behavior. Barge-in instructs the API to return a detected utterance at a proper time while the client is playing back the response audio from a previous request. When the client sees the utterance, it should stop the playback and immediately get ready for receiving the responses for the current request. The barge-in handling requires the client to start streaming audio input as soon as it starts playing back the audio from the previous response. The playback is modeled into two phases: * No barge-in phase: which goes first and during which speech detection should not be carried out. * Barge-in phase: which follows the no barge-in phase and during which the API starts speech detection and may inform the client that an utterance has been detected. Note that no-speech event is not expected in this phase. The client provides this configuration in terms of the durations of those two phases. The durations are measured in terms of the audio length from the start of the input audio. No-speech event is a response with END_OF_UTTERANCE without any transcript following up.", "id": "GoogleCloudDialogflowCxV3beta1BargeInConfig", "properties": { "noBargeInDuration": { @@ -12143,6 +12144,7 @@ "properties": { "id": { "description": "Output only. The unique identifier of the training phrase.", + "readOnly": true, "type": "string" }, "parts": { @@ -13682,7 +13684,7 @@ "properties": { "automatedAgentReply": { "$ref": "GoogleCloudDialogflowV2AutomatedAgentReply", - "description": "Only set if a Dialogflow automated agent has responded. Note that: AutomatedAgentReply.detect_intent_response.output_audio and AutomatedAgentReply.detect_intent_response.output_audio_config are always empty, use reply_audio instead." + "description": "Only set if a Dialogflow automated agent has responded. Note that in AutomatedAgentReply.DetectIntentResponse, Sessions.DetectIntentResponse.output_audio and Sessions.DetectIntentResponse.output_audio_config are always empty, use reply_audio instead." }, "dtmfParameters": { "$ref": "GoogleCloudDialogflowV2DtmfParameters", @@ -13782,7 +13784,7 @@ "type": "object" }, "GoogleCloudDialogflowV2AnswerRecord": { - "description": "Answer records are records to manage answer history and feedbacks for Dialogflow. Currently, answer record includes: - human agent assistant article suggestion - human agent assistant faq article It doesn't include: - `DetectIntent` intent matching - `DetectIntent` knowledge Answer records are not related to the conversation history in the Dialogflow Console. A Record is generated even when the end-user disables conversation history in the console. Records are created when there's a human agent assistant suggestion generated. A typical workflow for customers provide feedback to an answer is: 1. For human agent assistant, customers get suggestion via ListSuggestions API. Together with the answers, AnswerRecord.name are returned to the customers. 2. The customer uses the AnswerRecord.name to call the UpdateAnswerRecord method to send feedback about a specific answer that they believe is wrong.", + "description": "Answer records are records to manage answer history and feedbacks for Dialogflow. Currently, answer record includes: - human agent assistant article suggestion - human agent assistant faq article It doesn't include: - `DetectIntent` intent matching - `DetectIntent` knowledge Answer records are not related to the conversation history in the Dialogflow Console. A Record is generated even when the end-user disables conversation history in the console. Records are created when there's a human agent assistant suggestion generated. A typical workflow for customers provide feedback to an answer is: 1. For human agent assistant, customers get suggestion via ListSuggestions API. Together with the answers, AnswerRecord.name are returned to the customers. 2. The customer uses the AnswerRecord.name to call the AnswerRecords.UpdateAnswerRecord method to send feedback about a specific answer that they believe is wrong.", "id": "GoogleCloudDialogflowV2AnswerRecord", "properties": { "agentAssistantRecord": { @@ -14098,7 +14100,7 @@ "type": "object" }, "GoogleCloudDialogflowV2ClearSuggestionFeatureConfigOperationMetadata": { - "description": "Metadata for a ConversationProfile.ClearSuggestionFeatureConfig operation.", + "description": "Metadata for a ConversationProfiles.ClearSuggestionFeatureConfig operation.", "id": "GoogleCloudDialogflowV2ClearSuggestionFeatureConfigOperationMetadata", "properties": { "conversationProfile": { @@ -14150,7 +14152,7 @@ "type": "object" }, "GoogleCloudDialogflowV2ClearSuggestionFeatureConfigRequest": { - "description": "The request message for ConversationProfiles.ClearFeature.", + "description": "The request message for ConversationProfiles.ClearSuggestionFeatureConfig.", "id": "GoogleCloudDialogflowV2ClearSuggestionFeatureConfigRequest", "properties": { "participantRole": { @@ -14596,7 +14598,7 @@ "type": "object" }, "GoogleCloudDialogflowV2CreateConversationDatasetOperationMetadata": { - "description": "Metadata for ConversationDatasets.", + "description": "Metadata for CreateConversationDataset.", "id": "GoogleCloudDialogflowV2CreateConversationDatasetOperationMetadata", "properties": { "conversationDataset": { @@ -14696,7 +14698,7 @@ "type": "object" }, "GoogleCloudDialogflowV2DeleteConversationDatasetOperationMetadata": { - "description": "Metadata for ConversationDatasets.", + "description": "Metadata for DeleteConversationDataset.", "id": "GoogleCloudDialogflowV2DeleteConversationDatasetOperationMetadata", "properties": {}, "type": "object" @@ -15295,7 +15297,7 @@ "type": "object" }, "GoogleCloudDialogflowV2FewShotExample": { - "description": "Providing examples in the generator (i.e. building a few-shot generator) helps convey the desired format of the LLM response. NEXT_ID: 10", + "description": "Providing examples in the generator (i.e. building a few-shot generator) helps convey the desired format of the LLM response.", "id": "GoogleCloudDialogflowV2FewShotExample", "properties": { "conversationContext": { @@ -15515,7 +15517,7 @@ "id": "GoogleCloudDialogflowV2GenerateStatelessSummaryResponse", "properties": { "contextSize": { - "description": "Number of messages prior to and including last_conversation_message used to compile the suggestion. It may be smaller than the GenerateStatelessSummaryRequest.context_size field in the request if there weren't that many messages in the conversation.", + "description": "Number of messages prior to and including latest_message used to compile the suggestion. It may be smaller than the GenerateStatelessSummaryRequest.max_context_size field in the request if there weren't that many messages in the conversation.", "format": "int32", "type": "integer" }, @@ -15767,6 +15769,11 @@ "$ref": "GoogleCloudDialogflowV2HumanAgentAssistantConfigSuggestionQueryConfigContextFilterSettings", "description": "Determines how recent conversation context is filtered when generating suggestions. If unspecified, no messages will be dropped." }, + "contextSize": { + "description": "Optional. The number of recent messages to include in the context. Supported features: KNOWLEDGE_ASSIST.", + "format": "int32", + "type": "integer" + }, "dialogflowQuerySource": { "$ref": "GoogleCloudDialogflowV2HumanAgentAssistantConfigSuggestionQueryConfigDialogflowQuerySource", "description": "Query from Dialogflow agent. It is used by DIALOGFLOW_ASSIST." @@ -16248,6 +16255,13 @@ }, "type": "array" }, + "phraseSets": { + "description": "A collection of phrase set resources to use for speech adaptation.", + "items": { + "type": "string" + }, + "type": "array" + }, "sampleRateHertz": { "description": "Required. Sample rate (in Hertz) of the audio content sent in the query. Refer to [Cloud Speech API documentation](https://cloud.google.com/speech-to-text/docs/basics) for more details.", "format": "int32", @@ -17178,6 +17192,7 @@ "properties": { "name": { "description": "Output only. The unique identifier of this training phrase.", + "readOnly": true, "type": "string" }, "parts": { @@ -17304,6 +17319,14 @@ "description": "Snippet Source for a Generative Prediction.", "id": "GoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeSourceSnippet", "properties": { + "metadata": { + "additionalProperties": { + "description": "Properties of the object.", + "type": "any" + }, + "description": "Metadata of the document.", + "type": "object" + }, "text": { "description": "Text taken from that URI.", "type": "string" @@ -18219,6 +18242,14 @@ "description": "The sources of the answers.", "id": "GoogleCloudDialogflowV2SearchKnowledgeAnswerAnswerSource", "properties": { + "metadata": { + "additionalProperties": { + "description": "Properties of the object.", + "type": "any" + }, + "description": "Metadata associated with the article.", + "type": "object" + }, "snippet": { "description": "The relevant snippet of the article.", "type": "string" @@ -18246,6 +18277,18 @@ "description": "Required. The conversation profile used to configure the search. Format: `projects//locations//conversationProfiles/`.", "type": "string" }, + "endUserMetadata": { + "additionalProperties": { + "description": "Properties of the object.", + "type": "any" + }, + "description": "Optional. Information about the end-user to improve the relevance and accuracy of generative answers. This will be interpreted and used by a language model, so, for good results, the data should be self-descriptive, and in a simple structure. Example: ```json { \"subscription plan\": \"Business Premium Plus\", \"devices owned\": [ {\"model\": \"Google Pixel 7\"}, {\"model\": \"Google Pixel Tablet\"} ] } ```", + "type": "object" + }, + "exactSearch": { + "description": "Optional. Whether to search the query exactly without query rewrite.", + "type": "boolean" + }, "latestMessage": { "description": "Optional. The name of the latest conversation message when the request is triggered. Format: `projects//locations//conversations//messages/`.", "type": "string" @@ -18258,6 +18301,24 @@ "$ref": "GoogleCloudDialogflowV2TextInput", "description": "Required. The natural language text query for knowledge search." }, + "querySource": { + "description": "Optional. The source of the query in the request.", + "enum": [ + "QUERY_SOURCE_UNSPECIFIED", + "AGENT_QUERY", + "SUGGESTED_QUERY" + ], + "enumDescriptions": [ + "Unknown query source.", + "The query is from agents.", + "The query is a suggested query from Participants.SuggestKnowledgeAssist." + ], + "type": "string" + }, + "searchConfig": { + "$ref": "GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig", + "description": "Optional. Configuration specific to search queries with data stores." + }, "sessionId": { "description": "Required. The ID of the search session. The session_id can be combined with Dialogflow V3 Agent ID retrieved from conversation profile or on its own to identify a search session. The search history of the same session will impact the search result. It's up to the API caller to choose an appropriate `Session ID`. It can be a random number or some type of session identifiers (preferably hashed). The length must not exceed 36 characters.", "type": "string" @@ -18265,6 +18326,160 @@ }, "type": "object" }, + "GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig": { + "description": "Configuration specific to search queries with data stores.", + "id": "GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig", + "properties": { + "boostSpecs": { + "description": "Optional. Boost specifications for data stores.", + "items": { + "$ref": "GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs" + }, + "type": "array" + }, + "filterSpecs": { + "description": "Optional. Filter specification for data store queries.", + "items": { + "$ref": "GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs": { + "description": "Boost specifications for data stores.", + "id": "GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs", + "properties": { + "dataStores": { + "description": "Optional. Data Stores where the boosting configuration is applied. The full names of the referenced data stores. Formats: `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}` `projects/{project}/locations/{location}/dataStores/{data_store}`", + "items": { + "type": "string" + }, + "type": "array" + }, + "spec": { + "description": "Optional. A list of boosting specifications.", + "items": { + "$ref": "GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec": { + "description": "Boost specification to boost certain documents. A copy of google.cloud.discoveryengine.v1main.BoostSpec, field documentation is available at https://cloud.google.com/generative-ai-app-builder/docs/reference/rest/v1alpha/BoostSpec", + "id": "GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec", + "properties": { + "conditionBoostSpecs": { + "description": "Optional. Condition boost specifications. If a document matches multiple conditions in the specifictions, boost scores from these specifications are all applied and combined in a non-linear way. Maximum number of specifications is 20.", + "items": { + "$ref": "GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec": { + "description": "Boost applies to documents which match a condition.", + "id": "GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec", + "properties": { + "boost": { + "description": "Optional. Strength of the condition boost, which should be in [-1, 1]. Negative boost means demotion. Default is 0.0. Setting to 1.0 gives the document a big promotion. However, it does not necessarily mean that the boosted document will be the top result at all times, nor that other documents will be excluded. Results could still be shown even when none of them matches the condition. And results that are significantly more relevant to the search query can still trump your heavily favored but irrelevant documents. Setting to -1.0 gives the document a big demotion. However, results that are deeply relevant might still be shown. The document will have an upstream battle to get a fairly high ranking, but it is not blocked out completely. Setting to 0.0 means no boost applied. The boosting condition is ignored.", + "format": "float", + "type": "number" + }, + "boostControlSpec": { + "$ref": "GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec", + "description": "Optional. Complex specification for custom ranking based on customer defined attribute value." + }, + "condition": { + "description": "Optional. An expression which specifies a boost condition. The syntax and supported fields are the same as a filter expression. Examples: * To boost documents with document ID \"doc_1\" or \"doc_2\", and color \"Red\" or \"Blue\": * (id: ANY(\"doc_1\", \"doc_2\")) AND (color: ANY(\"Red\",\"Blue\"))", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec": { + "description": "Specification for custom ranking based on customer specified attribute value. It provides more controls for customized ranking than the simple (condition, boost) combination above.", + "id": "GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec", + "properties": { + "attributeType": { + "description": "Optional. The attribute type to be used to determine the boost amount. The attribute value can be derived from the field value of the specified field_name. In the case of numerical it is straightforward i.e. attribute_value = numerical_field_value. In the case of freshness however, attribute_value = (time.now() - datetime_field_value).", + "enum": [ + "ATTRIBUTE_TYPE_UNSPECIFIED", + "NUMERICAL", + "FRESHNESS" + ], + "enumDescriptions": [ + "Unspecified AttributeType.", + "The value of the numerical field will be used to dynamically update the boost amount. In this case, the attribute_value (the x value) of the control point will be the actual value of the numerical field for which the boost_amount is specified.", + "For the freshness use case the attribute value will be the duration between the current time and the date in the datetime field specified. The value must be formatted as an XSD `dayTimeDuration` value (a restricted subset of an ISO 8601 duration value). The pattern for this is: `nDnM]`. E.g. `5D`, `3DT12H30M`, `T24H`." + ], + "type": "string" + }, + "controlPoints": { + "description": "Optional. The control points used to define the curve. The monotonic function (defined through the interpolation_type above) passes through the control points listed here.", + "items": { + "$ref": "GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint" + }, + "type": "array" + }, + "fieldName": { + "description": "Optional. The name of the field whose value will be used to determine the boost amount.", + "type": "string" + }, + "interpolationType": { + "description": "Optional. The interpolation type to be applied to connect the control points listed below.", + "enum": [ + "INTERPOLATION_TYPE_UNSPECIFIED", + "LINEAR" + ], + "enumDescriptions": [ + "Interpolation type is unspecified. In this case, it defaults to Linear.", + "Piecewise linear interpolation will be applied." + ], + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint": { + "description": "The control points used to define the curve. The curve defined through these control points can only be monotonically increasing or decreasing(constant values are acceptable).", + "id": "GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint", + "properties": { + "attributeValue": { + "description": "Optional. Can be one of: 1. The numerical field value. 2. The duration spec for freshness: The value must be formatted as an XSD `dayTimeDuration` value (a restricted subset of an ISO 8601 duration value). The pattern for this is: `nDnM]`.", + "type": "string" + }, + "boostAmount": { + "description": "Optional. The value between -1 to 1 by which to boost the score if the attribute_value evaluates to the value specified above.", + "format": "float", + "type": "number" + } + }, + "type": "object" + }, + "GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs": { + "description": "Filter specification for data store queries.", + "id": "GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs", + "properties": { + "dataStores": { + "description": "Optional. The data store where the filter configuration is applied. Full resource name of data store, such as projects/{project}/locations/{location}/collections/{collectionId}/ dataStores/{dataStoreId}.", + "items": { + "type": "string" + }, + "type": "array" + }, + "filter": { + "description": "Optional. The filter expression to be applied. Expression syntax is documented at https://cloud.google.com/generative-ai-app-builder/docs/filter-search-metadata#filter-expression-syntax", + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudDialogflowV2SearchKnowledgeResponse": { "description": "The response message for Conversations.SearchKnowledge.", "id": "GoogleCloudDialogflowV2SearchKnowledgeResponse", @@ -18312,7 +18527,7 @@ "type": "object" }, "GoogleCloudDialogflowV2SentimentAnalysisResult": { - "description": "The result of sentiment analysis. Sentiment analysis inspects user input and identifies the prevailing subjective opinion, especially to determine a user's attitude as positive, negative, or neutral. For Participants.DetectIntent, it needs to be configured in DetectIntentRequest.query_params. For Participants.StreamingDetectIntent, it needs to be configured in StreamingDetectIntentRequest.query_params. And for Participants.AnalyzeContent and Participants.StreamingAnalyzeContent, it needs to be configured in ConversationProfile.human_agent_assistant_config", + "description": "The result of sentiment analysis. Sentiment analysis inspects user input and identifies the prevailing subjective opinion, especially to determine a user's attitude as positive, negative, or neutral. For DetectIntent, it needs to be configured in DetectIntentRequest.query_params. For StreamingDetectIntent, it needs to be configured in StreamingDetectIntentRequest.query_params. And for Participants.AnalyzeContent and Participants.StreamingAnalyzeContent, it needs to be configured in ConversationProfile.human_agent_assistant_config", "id": "GoogleCloudDialogflowV2SentimentAnalysisResult", "properties": { "queryTextSentiment": { @@ -18355,7 +18570,7 @@ "type": "object" }, "GoogleCloudDialogflowV2SetSuggestionFeatureConfigOperationMetadata": { - "description": "Metadata for a ConversationProfile.SetSuggestionFeatureConfig operation.", + "description": "Metadata for a ConversationProfiles.SetSuggestionFeatureConfig operation.", "id": "GoogleCloudDialogflowV2SetSuggestionFeatureConfigOperationMetadata", "properties": { "conversationProfile": { @@ -18407,7 +18622,7 @@ "type": "object" }, "GoogleCloudDialogflowV2SetSuggestionFeatureConfigRequest": { - "description": "The request message for ConversationProfiles.SetSuggestionFeature.", + "description": "The request message for ConversationProfiles.SetSuggestionFeatureConfig.", "id": "GoogleCloudDialogflowV2SetSuggestionFeatureConfigRequest", "properties": { "participantRole": { @@ -18576,6 +18791,13 @@ "description": "Which Speech model to select. Select the model best suited to your domain to get best results. If a model is not explicitly specified, then Dialogflow auto-selects a model based on other parameters in the SpeechToTextConfig and Agent settings. If enhanced speech model is enabled for the agent and an enhanced version of the specified model for the language does not exist, then the speech is recognized using the standard version of the specified model. Refer to [Cloud Speech API documentation](https://cloud.google.com/speech-to-text/docs/basics#select-model) for more details. If you specify a model, the following models typically have the best performance: - phone_call (best for Agent Assist and telephony) - latest_short (best for Dialogflow non-telephony) - command_and_search Leave this field unspecified to use [Agent Speech settings](https://cloud.google.com/dialogflow/cx/docs/concept/agent#settings-speech) for model selection.", "type": "string" }, + "phraseSets": { + "description": "List of names of Cloud Speech phrase sets that are used for transcription.", + "items": { + "type": "string" + }, + "type": "array" + }, "sampleRateHertz": { "description": "Sample rate (in Hertz) of the audio content sent in the query. Refer to [Cloud Speech API documentation](https://cloud.google.com/speech-to-text/docs/basics) for more details.", "format": "int32", @@ -18672,7 +18894,7 @@ "id": "GoogleCloudDialogflowV2SuggestConversationSummaryResponse", "properties": { "contextSize": { - "description": "Number of messages prior to and including last_conversation_message used to compile the suggestion. It may be smaller than the SuggestSummaryRequest.context_size field in the request if there weren't that many messages in the conversation.", + "description": "Number of messages prior to and including latest_message used to compile the suggestion. It may be smaller than the SuggestConversationSummaryRequest.context_size field in the request if there weren't that many messages in the conversation.", "format": "int32", "type": "integer" }, @@ -20963,6 +21185,7 @@ "properties": { "name": { "description": "Output only. The unique identifier of this training phrase.", + "readOnly": true, "type": "string" }, "parts": { @@ -21143,6 +21366,14 @@ "description": "Snippet Source for a Generative Prediction.", "id": "GoogleCloudDialogflowV2beta1KnowledgeAssistAnswerKnowledgeAnswerGenerativeSourceSnippet", "properties": { + "metadata": { + "additionalProperties": { + "description": "Properties of the object.", + "type": "any" + }, + "description": "Metadata of the document.", + "type": "object" + }, "text": { "description": "Text taken from that URI.", "type": "string" diff --git a/discovery/googleapis/dialogflow__v3.json b/discovery/googleapis/dialogflow__v3.json index 94c01f042..71118097b 100644 --- a/discovery/googleapis/dialogflow__v3.json +++ b/discovery/googleapis/dialogflow__v3.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241019", + "revision": "20241212", "rootUrl": "https://dialogflow.googleapis.com/", "servicePath": "", "title": "Dialogflow API", @@ -4102,7 +4102,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v3/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "dialogflow.projects.locations.operations.cancel", @@ -4359,7 +4359,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v3/projects/{projectsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "dialogflow.projects.operations.cancel", @@ -4832,7 +4832,7 @@ "type": "object" }, "GoogleCloudDialogflowCxV3BargeInConfig": { - "description": "Configuration of the barge-in behavior. Barge-in instructs the API to return a detected utterance at a proper time while the client is playing back the response audio from a previous request. When the client sees the utterance, it should stop the playback and immediately get ready for receiving the responses for the current request. The barge-in handling requires the client to start streaming audio input as soon as it starts playing back the audio from the previous response. The playback is modeled into two phases: * No barge-in phase: which goes first and during which speech detection should not be carried out. * Barge-in phase: which follows the no barge-in phase and during which the API starts speech detection and may inform the client that an utterance has been detected. Note that no-speech event is not expected in this phase. The client provides this configuration in terms of the durations of those two phases. The durations are measured in terms of the audio length from the the start of the input audio. No-speech event is a response with END_OF_UTTERANCE without any transcript following up.", + "description": "Configuration of the barge-in behavior. Barge-in instructs the API to return a detected utterance at a proper time while the client is playing back the response audio from a previous request. When the client sees the utterance, it should stop the playback and immediately get ready for receiving the responses for the current request. The barge-in handling requires the client to start streaming audio input as soon as it starts playing back the audio from the previous response. The playback is modeled into two phases: * No barge-in phase: which goes first and during which speech detection should not be carried out. * Barge-in phase: which follows the no barge-in phase and during which the API starts speech detection and may inform the client that an utterance has been detected. Note that no-speech event is not expected in this phase. The client provides this configuration in terms of the durations of those two phases. The durations are measured in terms of the audio length from the start of the input audio. No-speech event is a response with END_OF_UTTERANCE without any transcript following up.", "id": "GoogleCloudDialogflowCxV3BargeInConfig", "properties": { "noBargeInDuration": { @@ -6916,6 +6916,10 @@ "description": "Required. The human-readable name of the generator, unique within the agent. The prompt contains pre-defined parameters such as $conversation, $last-user-utterance, etc. populated by Dialogflow. It can also contain custom placeholders which will be resolved during fulfillment.", "type": "string" }, + "modelParameter": { + "$ref": "GoogleCloudDialogflowCxV3GeneratorModelParameter", + "description": "Parameters passed to the LLM to configure its behavior." + }, "name": { "description": "The unique identifier of the generator. Must be set for the Generators.UpdateGenerator method. Generators.CreateGenerate populates the name automatically. Format: `projects//locations//agents//generators/`.", "type": "string" @@ -6934,6 +6938,33 @@ }, "type": "object" }, + "GoogleCloudDialogflowCxV3GeneratorModelParameter": { + "description": "Parameters to be passed to the LLM. If not set, default values will be used.", + "id": "GoogleCloudDialogflowCxV3GeneratorModelParameter", + "properties": { + "maxDecodeSteps": { + "description": "The maximum number of tokens to generate.", + "format": "int32", + "type": "integer" + }, + "temperature": { + "description": "The temperature used for sampling. Temperature sampling occurs after both topP and topK have been applied. Valid range: [0.0, 1.0] Low temperature = less random. High temperature = more random.", + "format": "float", + "type": "number" + }, + "topK": { + "description": "If set, the sampling process in each step is limited to the top_k tokens with highest probabilities. Valid range: [1, 40] or 1000+. Small topK = less random. Large topK = more random.", + "format": "int32", + "type": "integer" + }, + "topP": { + "description": "If set, only the tokens comprising the top top_p probability mass are considered. If both top_p and top_k are set, top_p will be used for further refining candidates selected with top_k. Valid range: (0.0, 1.0]. Small topP = less random. Large topP = more random.", + "format": "float", + "type": "number" + } + }, + "type": "object" + }, "GoogleCloudDialogflowCxV3GeneratorPlaceholder": { "description": "Represents a custom placeholder in the prompt text.", "id": "GoogleCloudDialogflowCxV3GeneratorPlaceholder", @@ -7442,6 +7473,7 @@ "properties": { "id": { "description": "Output only. The unique identifier of the training phrase.", + "readOnly": true, "type": "string" }, "parts": { @@ -10342,7 +10374,7 @@ "type": "object" }, "GoogleCloudDialogflowCxV3beta1BargeInConfig": { - "description": "Configuration of the barge-in behavior. Barge-in instructs the API to return a detected utterance at a proper time while the client is playing back the response audio from a previous request. When the client sees the utterance, it should stop the playback and immediately get ready for receiving the responses for the current request. The barge-in handling requires the client to start streaming audio input as soon as it starts playing back the audio from the previous response. The playback is modeled into two phases: * No barge-in phase: which goes first and during which speech detection should not be carried out. * Barge-in phase: which follows the no barge-in phase and during which the API starts speech detection and may inform the client that an utterance has been detected. Note that no-speech event is not expected in this phase. The client provides this configuration in terms of the durations of those two phases. The durations are measured in terms of the audio length from the the start of the input audio. No-speech event is a response with END_OF_UTTERANCE without any transcript following up.", + "description": "Configuration of the barge-in behavior. Barge-in instructs the API to return a detected utterance at a proper time while the client is playing back the response audio from a previous request. When the client sees the utterance, it should stop the playback and immediately get ready for receiving the responses for the current request. The barge-in handling requires the client to start streaming audio input as soon as it starts playing back the audio from the previous response. The playback is modeled into two phases: * No barge-in phase: which goes first and during which speech detection should not be carried out. * Barge-in phase: which follows the no barge-in phase and during which the API starts speech detection and may inform the client that an utterance has been detected. Note that no-speech event is not expected in this phase. The client provides this configuration in terms of the durations of those two phases. The durations are measured in terms of the audio length from the start of the input audio. No-speech event is a response with END_OF_UTTERANCE without any transcript following up.", "id": "GoogleCloudDialogflowCxV3beta1BargeInConfig", "properties": { "noBargeInDuration": { @@ -11342,6 +11374,7 @@ "properties": { "id": { "description": "Output only. The unique identifier of the training phrase.", + "readOnly": true, "type": "string" }, "parts": { @@ -12692,7 +12725,7 @@ "type": "object" }, "GoogleCloudDialogflowV2ClearSuggestionFeatureConfigOperationMetadata": { - "description": "Metadata for a ConversationProfile.ClearSuggestionFeatureConfig operation.", + "description": "Metadata for a ConversationProfiles.ClearSuggestionFeatureConfig operation.", "id": "GoogleCloudDialogflowV2ClearSuggestionFeatureConfigOperationMetadata", "properties": { "conversationProfile": { @@ -12884,7 +12917,7 @@ "type": "object" }, "GoogleCloudDialogflowV2CreateConversationDatasetOperationMetadata": { - "description": "Metadata for ConversationDatasets.", + "description": "Metadata for CreateConversationDataset.", "id": "GoogleCloudDialogflowV2CreateConversationDatasetOperationMetadata", "properties": { "conversationDataset": { @@ -12973,7 +13006,7 @@ "type": "object" }, "GoogleCloudDialogflowV2DeleteConversationDatasetOperationMetadata": { - "description": "Metadata for ConversationDatasets.", + "description": "Metadata for DeleteConversationDataset.", "id": "GoogleCloudDialogflowV2DeleteConversationDatasetOperationMetadata", "properties": {}, "type": "object" @@ -14158,6 +14191,7 @@ "properties": { "name": { "description": "Output only. The unique identifier of this training phrase.", + "readOnly": true, "type": "string" }, "parts": { @@ -14562,7 +14596,7 @@ "type": "object" }, "GoogleCloudDialogflowV2SentimentAnalysisResult": { - "description": "The result of sentiment analysis. Sentiment analysis inspects user input and identifies the prevailing subjective opinion, especially to determine a user's attitude as positive, negative, or neutral. For Participants.DetectIntent, it needs to be configured in DetectIntentRequest.query_params. For Participants.StreamingDetectIntent, it needs to be configured in StreamingDetectIntentRequest.query_params. And for Participants.AnalyzeContent and Participants.StreamingAnalyzeContent, it needs to be configured in ConversationProfile.human_agent_assistant_config", + "description": "The result of sentiment analysis. Sentiment analysis inspects user input and identifies the prevailing subjective opinion, especially to determine a user's attitude as positive, negative, or neutral. For DetectIntent, it needs to be configured in DetectIntentRequest.query_params. For StreamingDetectIntent, it needs to be configured in StreamingDetectIntentRequest.query_params. And for Participants.AnalyzeContent and Participants.StreamingAnalyzeContent, it needs to be configured in ConversationProfile.human_agent_assistant_config", "id": "GoogleCloudDialogflowV2SentimentAnalysisResult", "properties": { "queryTextSentiment": { @@ -14605,7 +14639,7 @@ "type": "object" }, "GoogleCloudDialogflowV2SetSuggestionFeatureConfigOperationMetadata": { - "description": "Metadata for a ConversationProfile.SetSuggestionFeatureConfig operation.", + "description": "Metadata for a ConversationProfiles.SetSuggestionFeatureConfig operation.", "id": "GoogleCloudDialogflowV2SetSuggestionFeatureConfigOperationMetadata", "properties": { "conversationProfile": { @@ -16534,6 +16568,7 @@ "properties": { "name": { "description": "Output only. The unique identifier of this training phrase.", + "readOnly": true, "type": "string" }, "parts": { diff --git a/discovery/googleapis/digitalassetlinks__v1.json b/discovery/googleapis/digitalassetlinks__v1.json index 270f43ff4..b310d8fed 100644 --- a/discovery/googleapis/digitalassetlinks__v1.json +++ b/discovery/googleapis/digitalassetlinks__v1.json @@ -15,7 +15,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20220122", + "revision": "20241207", "rootUrl": "https://digitalassetlinks.googleapis.com/", "servicePath": "", "title": "Digital Asset Links API", @@ -101,21 +101,6 @@ "resources": { "assetlinks": { "methods": { - "bulkCheck": { - "description": "Send a bundle of statement checks in a single RPC to minimize latency and service load. Statements need not be all for the same source and/or target. We recommend using this method when you need to check more than one statement in a short period of time.", - "flatPath": "v1/assetlinks:bulkCheck", - "httpMethod": "POST", - "id": "digitalassetlinks.assetlinks.bulkCheck", - "parameterOrder": [], - "parameters": {}, - "path": "v1/assetlinks:bulkCheck", - "request": { - "$ref": "BulkCheckRequest" - }, - "response": { - "$ref": "BulkCheckResponse" - } - }, "check": { "description": "Determines whether the specified (directional) relationship exists between the specified source and target assets. The relation describes the intent of the link between the two assets as claimed by the source asset. An example for such relationships is the delegation of privileges or permissions. This command is most often used by infrastructure systems to check preconditions for an action. For example, a client may want to know if it is OK to send a web URL to a particular mobile app instead. The client can check for the relevant asset link from the website to the mobile app to decide if the operation should be allowed. A note about security: if you specify a secure asset as the source, such as an HTTPS website or an Android app, the API will ensure that any statements used to generate the response have been made in a secure way by the owner of that asset. Conversely, if the source asset is an insecure HTTP website (that is, the URL starts with `http://` instead of `https://`), the API cannot verify its statements securely, and it is not possible to ensure that the website's statements have not been altered by a third party. For more information, see the [Digital Asset Links technical design specification](https://github.com/google/digitalassetlinks/blob/master/well-known/details.md).", "flatPath": "v1/assetlinks:check", @@ -235,84 +220,6 @@ }, "type": "object" }, - "BulkCheckRequest": { - "description": "Message used to check for the existence of multiple digital asset links within a single RPC.", - "id": "BulkCheckRequest", - "properties": { - "allowGoogleInternalDataSources": { - "description": "Same configuration as in Check request, all statements checks will use same configurations.", - "type": "boolean" - }, - "defaultRelation": { - "description": "If specified, will be used in any given template statement that doesn’t specify a relation.", - "type": "string" - }, - "defaultSource": { - "$ref": "Asset", - "description": "If specified, will be used in any given template statement that doesn’t specify a source." - }, - "defaultTarget": { - "$ref": "Asset", - "description": "If specified, will be used in any given template statement that doesn’t specify a target." - }, - "skipCacheLookup": { - "description": "Same configuration as in Check request, all statements checks will use same configurations.", - "type": "boolean" - }, - "statements": { - "description": "List of statements to check. For each statement, you can omit a field if the corresponding default_* field below was supplied. Minimum 1 statement; maximum 1,000 statements. Any additional statements will be ignored.", - "items": { - "$ref": "StatementTemplate" - }, - "type": "array" - } - }, - "type": "object" - }, - "BulkCheckResponse": { - "description": "Response for BulkCheck call. Results are sent in a list in the same order in which they were sent. Individual check errors are described in the appropriate check_results entry. If the entire call fails, the response will include a bulk_error_code field describing the error.", - "id": "BulkCheckResponse", - "properties": { - "bulkErrorCode": { - "description": "Error code for the entire request. Present only if the entire request failed. Individual check errors will not trigger the presence of this field.", - "enum": [ - "ERROR_CODE_UNSPECIFIED", - "ERROR_CODE_INVALID_QUERY", - "ERROR_CODE_FETCH_ERROR", - "ERROR_CODE_FAILED_SSL_VALIDATION", - "ERROR_CODE_REDIRECT", - "ERROR_CODE_TOO_LARGE", - "ERROR_CODE_MALFORMED_HTTP_RESPONSE", - "ERROR_CODE_WRONG_CONTENT_TYPE", - "ERROR_CODE_MALFORMED_CONTENT", - "ERROR_CODE_SECURE_ASSET_INCLUDES_INSECURE", - "ERROR_CODE_FETCH_BUDGET_EXHAUSTED" - ], - "enumDescriptions": [ - "", - "Unable to parse query.", - "Unable to fetch the asset links data.", - "Invalid HTTPS certificate .", - "HTTP redirects (e.g, 301) are not allowed.", - "Asset links data exceeds maximum size.", - "Can't parse HTTP response.", - "HTTP Content-type should be application/json.", - "JSON content is malformed.", - "A secure asset includes an insecure asset (security downgrade).", - "Too many includes (maybe a loop)." - ], - "type": "string" - }, - "checkResults": { - "description": "List of results for each check request. Results are returned in the same order in which they were sent in the request.", - "items": { - "$ref": "CheckResponse" - }, - "type": "array" - } - }, - "type": "object" - }, "CertificateInfo": { "description": "Describes an X509 certificate.", "id": "CertificateInfo", @@ -349,7 +256,7 @@ "ERROR_CODE_FETCH_BUDGET_EXHAUSTED" ], "enumDescriptions": [ - "", + "Default value, otherwise unused.", "Unable to parse query.", "Unable to fetch the asset links data.", "Invalid HTTPS certificate .", @@ -402,7 +309,7 @@ "ERROR_CODE_FETCH_BUDGET_EXHAUSTED" ], "enumDescriptions": [ - "", + "Default value, otherwise unused.", "Unable to parse query.", "Unable to fetch the asset links data.", "Invalid HTTPS certificate .", @@ -452,25 +359,6 @@ }, "type": "object" }, - "StatementTemplate": { - "description": "A single statement to check in a bulk call using BulkCheck. See CheckRequest for details about each field.", - "id": "StatementTemplate", - "properties": { - "relation": { - "description": "The relationship being asserted between the source and target. If omitted, you must specify a BulkCheckRequest.default_relation value to use here.", - "type": "string" - }, - "source": { - "$ref": "Asset", - "description": "The source asset that is asserting the statement. If omitted, you must specify a BulkCheckRequest.default_source value to use here." - }, - "target": { - "$ref": "Asset", - "description": "The target that the source is declaring the relationship with. If omitted, you must specify a BulkCheckRequest.default_target to use here." - } - }, - "type": "object" - }, "WebAsset": { "description": "Describes a web asset.", "id": "WebAsset", diff --git a/discovery/googleapis/displayvideo__v2.json b/discovery/googleapis/displayvideo__v2.json index de43749cb..4653b55ab 100644 --- a/discovery/googleapis/displayvideo__v2.json +++ b/discovery/googleapis/displayvideo__v2.json @@ -34,7 +34,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241017", + "revision": "20241212", "rootUrl": "https://displayvideo.googleapis.com/", "servicePath": "", "title": "Display & Video 360 API", @@ -6326,7 +6326,7 @@ "type": "string" }, "pageSize": { - "description": "Requested page size. Must be between `1` and `200`. If unspecified will default to `100`. Returns error code `INVALID_ARGUMENT` if an invalid value is specified.", + "description": "Requested page size. Must be between `1` and `5000`. If unspecified, this value defaults to `100`. Returns error code `INVALID_ARGUMENT` if an invalid value is specified.", "format": "int32", "location": "query", "type": "integer" @@ -9423,11 +9423,11 @@ "type": "object" }, "Adloox": { - "description": "Details of Adloox settings.", + "description": "Details of Adloox brand safety settings.", "id": "Adloox", "properties": { "excludedAdlooxCategories": { - "description": "Adloox's brand safety settings.", + "description": "Adloox categories to exclude.", "items": { "enum": [ "ADLOOX_UNSPECIFIED", @@ -9441,7 +9441,7 @@ "FRAUD" ], "enumDescriptions": [ - "This enum is only a placeholder and it doesn't specify any Adloox option.", + "Default value when a Adloox category is not specified or is unknown in this version.", "Adult content (hard).", "Adult content (soft).", "Illegal content.", @@ -9474,7 +9474,7 @@ }, "billingConfig": { "$ref": "AdvertiserBillingConfig", - "description": "Optional. Required. Billing related settings of the advertiser." + "description": "Required. Billing related settings of the advertiser." }, "creativeConfig": { "$ref": "AdvertiserCreativeConfig", @@ -9563,7 +9563,7 @@ "id": "AdvertiserBillingConfig", "properties": { "billingProfileId": { - "description": "Optional. The ID of a billing profile assigned to the advertiser.", + "description": "Required. The ID of a billing profile assigned to the advertiser.", "format": "int64", "type": "string" } @@ -9883,7 +9883,12 @@ "ASSET_ROLE_IOS_APP_ID", "ASSET_ROLE_RATING", "ASSET_ROLE_ICON", - "ASSET_ROLE_COVER_IMAGE" + "ASSET_ROLE_COVER_IMAGE", + "ASSET_ROLE_BACKGROUND_COLOR", + "ASSET_ROLE_ACCENT_COLOR", + "ASSET_ROLE_REQUIRE_LOGO", + "ASSET_ROLE_REQUIRE_IMAGE", + "ASSET_ROLE_ENABLE_ASSET_ENHANCEMENTS" ], "enumDescriptions": [ "Asset role is not specified or is unknown in this version.", @@ -9902,7 +9907,12 @@ "The ID of an iOS app in the Apple app store. This ID number can be found in the Apple App Store URL as the string of numbers directly after \"id\". For example, in https://apps.apple.com/us/app/gmail-email-by-google/id422689480 the ID is 422689480. Assets of this role are read-only.", "The rating of an app in the Google play store or iOS app store. Note that this value is not automatically synced with the actual rating in the store. It will always be the one provided when save the creative. Assets of this role are read-only.", "The icon of a creative. This role is only supported and required in following creative_type: * `CREATIVE_TYPE_NATIVE` * `CREATIVE_TYPE_NATIVE_SITE_SQUARE`", - "The cover image of a native video creative. This role is only supported and required in following creative_type: * `CREATIVE_TYPE_VIDEO`" + "The cover image of a native video creative. This role is only supported and required in following creative_type: * `CREATIVE_TYPE_VIDEO`", + "The main color to use in a creative. This role is only supported and required in following creative_type: * `CREATIVE_TYPE_ASSET_BASED_CREATIVE`", + "The accent color to use in a creative. This role is only supported and required in following creative_type: * `CREATIVE_TYPE_ASSET_BASED_CREATIVE`", + "Whether the creative must use a logo asset. This role is only supported and required in following creative_type: * `CREATIVE_TYPE_ASSET_BASED_CREATIVE`", + "Whether the creative must use an image asset. This role is only supported and required in following creative_type: * `CREATIVE_TYPE_ASSET_BASED_CREATIVE`", + "Whether asset enhancements can be applied to the creative. This role is only supported and required in following creative_type: * `CREATIVE_TYPE_ASSET_BASED_CREATIVE`" ], "type": "string" } @@ -12369,7 +12379,7 @@ "SDF version 6", "SDF version 7. Read the [v7 migration guide](/display-video/api/structured-data-file/v7-migration-guide) before migrating to this version.", "SDF version 7.1. Read the [v7 migration guide](/display-video/api/structured-data-file/v7-migration-guide) before migrating to this version.", - "SDF version 8. **This SDF version is in beta. It is only available to a subset of users.**" + "SDF version 8. Read the [v8 migration guide](/display-video/api/structured-data-file/v8-migration-guide) before migrating to this version." ], "type": "string" } @@ -12478,7 +12488,8 @@ "CREATIVE_TYPE_AUDIO", "CREATIVE_TYPE_PUBLISHER_HOSTED", "CREATIVE_TYPE_NATIVE_VIDEO", - "CREATIVE_TYPE_TEMPLATED_APP_INSTALL_VIDEO" + "CREATIVE_TYPE_TEMPLATED_APP_INSTALL_VIDEO", + "CREATIVE_TYPE_ASSET_BASED_CREATIVE" ], "enumDescriptions": [ "Type value is not specified or is unknown in this version.", @@ -12495,7 +12506,8 @@ "Audio creative. Create and update methods are supported for this creative type if the hosting_source is `HOSTING_SOURCE_HOSTED`", "Publisher hosted creative. Create and update methods are **not** supported for this creative type.", "Native video creative. Create and update methods are supported for this creative type if the hosting_source is `HOSTING_SOURCE_HOSTED`", - "Templated app install mobile video creative. Create and update methods are **not** supported for this creative type." + "Templated app install mobile video creative. Create and update methods are **not** supported for this creative type.", + "Asset based creative. Create and update methods are supported for this creative type if the hosting_source is `HOSTING_SOURCE_HOSTED`." ], "type": "string" }, @@ -12755,7 +12767,8 @@ "CREATIVE_TYPE_AUDIO", "CREATIVE_TYPE_PUBLISHER_HOSTED", "CREATIVE_TYPE_NATIVE_VIDEO", - "CREATIVE_TYPE_TEMPLATED_APP_INSTALL_VIDEO" + "CREATIVE_TYPE_TEMPLATED_APP_INSTALL_VIDEO", + "CREATIVE_TYPE_ASSET_BASED_CREATIVE" ], "enumDescriptions": [ "Type value is not specified or is unknown in this version.", @@ -12772,7 +12785,8 @@ "Audio creative. Create and update methods are supported for this creative type if the hosting_source is `HOSTING_SOURCE_HOSTED`", "Publisher hosted creative. Create and update methods are **not** supported for this creative type.", "Native video creative. Create and update methods are supported for this creative type if the hosting_source is `HOSTING_SOURCE_HOSTED`", - "Templated app install mobile video creative. Create and update methods are **not** supported for this creative type." + "Templated app install mobile video creative. Create and update methods are **not** supported for this creative type.", + "Asset based creative. Create and update methods are supported for this creative type if the hosting_source is `HOSTING_SOURCE_HOSTED`." ], "type": "string" }, @@ -14115,7 +14129,9 @@ "EXCHANGE_CHARTBOOST_GBID", "EXCHANGE_ADMOST_GBID", "EXCHANGE_TOPON_GBID", - "EXCHANGE_NETFLIX" + "EXCHANGE_NETFLIX", + "EXCHANGE_CORE", + "EXCHANGE_TUBI" ], "enumDescriptions": [ "Exchange is not specified or is unknown in this version.", @@ -14200,7 +14216,9 @@ "Chartboost Mediation.", "AdMost.", "TopOn.", - "Netflix." + "Netflix.", + "Core.", + "Tubi." ], "type": "string" } @@ -14310,7 +14328,9 @@ "EXCHANGE_CHARTBOOST_GBID", "EXCHANGE_ADMOST_GBID", "EXCHANGE_TOPON_GBID", - "EXCHANGE_NETFLIX" + "EXCHANGE_NETFLIX", + "EXCHANGE_CORE", + "EXCHANGE_TUBI" ], "enumDescriptions": [ "Exchange is not specified or is unknown in this version.", @@ -14395,7 +14415,9 @@ "Chartboost Mediation.", "AdMost.", "TopOn.", - "Netflix." + "Netflix.", + "Core.", + "Tubi." ], "type": "string" }, @@ -14506,7 +14528,9 @@ "EXCHANGE_CHARTBOOST_GBID", "EXCHANGE_ADMOST_GBID", "EXCHANGE_TOPON_GBID", - "EXCHANGE_NETFLIX" + "EXCHANGE_NETFLIX", + "EXCHANGE_CORE", + "EXCHANGE_TUBI" ], "enumDescriptions": [ "Exchange is not specified or is unknown in this version.", @@ -14591,7 +14615,9 @@ "Chartboost Mediation.", "AdMost.", "TopOn.", - "Netflix." + "Netflix.", + "Core.", + "Tubi." ], "type": "string" }, @@ -14703,7 +14729,9 @@ "EXCHANGE_CHARTBOOST_GBID", "EXCHANGE_ADMOST_GBID", "EXCHANGE_TOPON_GBID", - "EXCHANGE_NETFLIX" + "EXCHANGE_NETFLIX", + "EXCHANGE_CORE", + "EXCHANGE_TUBI" ], "enumDescriptions": [ "Exchange is not specified or is unknown in this version.", @@ -14788,7 +14816,9 @@ "Chartboost Mediation.", "AdMost.", "TopOn.", - "Netflix." + "Netflix.", + "Core.", + "Tubi." ], "readOnly": true, "type": "string" @@ -15751,7 +15781,9 @@ "EXCHANGE_CHARTBOOST_GBID", "EXCHANGE_ADMOST_GBID", "EXCHANGE_TOPON_GBID", - "EXCHANGE_NETFLIX" + "EXCHANGE_NETFLIX", + "EXCHANGE_CORE", + "EXCHANGE_TUBI" ], "enumDescriptions": [ "Exchange is not specified or is unknown in this version.", @@ -15836,7 +15868,9 @@ "Chartboost Mediation.", "AdMost.", "TopOn.", - "Netflix." + "Netflix.", + "Core.", + "Tubi." ], "type": "string" }, @@ -16200,7 +16234,7 @@ }, "pacing": { "$ref": "Pacing", - "description": "Required. The budget spending speed setting of the insertion order. *Warning*: Starting on **November 5, 2024**, pacing_type `PACING_TYPE_ASAP` will no longer be compatible with pacing_period `PACING_PERIOD_FLIGHT`. [Read more about this announced change](/display-video/api/deprecations#features.io_asap)." + "description": "Required. The budget spending speed setting of the insertion order. pacing_type `PACING_TYPE_ASAP` is not compatible with pacing_period `PACING_PERIOD_FLIGHT`." }, "partnerCosts": { "description": "The partner costs associated with the insertion order. If absent or empty in CreateInsertionOrder method, the newly created insertion order will inherit partner costs from the partner settings.", @@ -16664,7 +16698,9 @@ "EXCHANGE_CHARTBOOST_GBID", "EXCHANGE_ADMOST_GBID", "EXCHANGE_TOPON_GBID", - "EXCHANGE_NETFLIX" + "EXCHANGE_NETFLIX", + "EXCHANGE_CORE", + "EXCHANGE_TUBI" ], "enumDescriptions": [ "Exchange is not specified or is unknown in this version.", @@ -16749,7 +16785,9 @@ "Chartboost Mediation.", "AdMost.", "TopOn.", - "Netflix." + "Netflix.", + "Core.", + "Tubi." ], "type": "string" }, @@ -18852,7 +18890,7 @@ "type": "string" }, "pacingType": { - "description": "Required. The type of pacing that defines how the budget amount will be spent across the pacing_period. *Warning*: Starting on **November 5, 2024**, `PACING_TYPE_ASAP` will no longer be compatible with pacing_period `PACING_PERIOD_FLIGHT` for insertion orders. [Read more about this announced change](/display-video/api/deprecations#features.io_asap).", + "description": "Required. The type of pacing that defines how the budget amount will be spent across the pacing_period. `PACING_TYPE_ASAP` is not compatible with pacing_period `PACING_PERIOD_FLIGHT` for insertion orders.", "enum": [ "PACING_TYPE_UNSPECIFIED", "PACING_TYPE_AHEAD", @@ -19889,7 +19927,7 @@ "SDF version 6", "SDF version 7. Read the [v7 migration guide](/display-video/api/structured-data-file/v7-migration-guide) before migrating to this version.", "SDF version 7.1. Read the [v7 migration guide](/display-video/api/structured-data-file/v7-migration-guide) before migrating to this version.", - "SDF version 8. **This SDF version is in beta. It is only available to a subset of users.**" + "SDF version 8. Read the [v8 migration guide](/display-video/api/structured-data-file/v8-migration-guide) before migrating to this version." ], "type": "string" } @@ -19972,7 +20010,7 @@ "SDF version 6", "SDF version 7. Read the [v7 migration guide](/display-video/api/structured-data-file/v7-migration-guide) before migrating to this version.", "SDF version 7.1. Read the [v7 migration guide](/display-video/api/structured-data-file/v7-migration-guide) before migrating to this version.", - "SDF version 8. **This SDF version is in beta. It is only available to a subset of users.**" + "SDF version 8. Read the [v8 migration guide](/display-video/api/structured-data-file/v8-migration-guide) before migrating to this version." ], "type": "string" } diff --git a/discovery/googleapis/displayvideo__v3.json b/discovery/googleapis/displayvideo__v3.json index d6fe6c4bf..a69f463ea 100644 --- a/discovery/googleapis/displayvideo__v3.json +++ b/discovery/googleapis/displayvideo__v3.json @@ -34,7 +34,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241017", + "revision": "20241212", "rootUrl": "https://displayvideo.googleapis.com/", "servicePath": "", "title": "Display & Video 360 API", @@ -6275,7 +6275,7 @@ "type": "string" }, "pageSize": { - "description": "Requested page size. Must be between `1` and `200`. If unspecified will default to `100`. Returns error code `INVALID_ARGUMENT` if an invalid value is specified.", + "description": "Requested page size. Must be between `1` and `5000`. If unspecified, this value defaults to `100`. Returns error code `INVALID_ARGUMENT` if an invalid value is specified.", "format": "int32", "location": "query", "type": "integer" @@ -9565,11 +9565,11 @@ "type": "object" }, "Adloox": { - "description": "Details of Adloox settings.", + "description": "Details of Adloox brand safety settings.", "id": "Adloox", "properties": { "adultExplicitSexualContent": { - "description": "Optional. Adult explicit sexual content.", + "description": "Optional. Adult and Explicit Sexual Content [GARM](https://wfanet.org/leadership/garm/about-garm) risk ranges to exclude.", "enum": [ "GARM_RISK_EXCLUSION_UNSPECIFIED", "GARM_RISK_EXCLUSION_FLOOR", @@ -9578,16 +9578,16 @@ "GARM_RISK_EXCLUSION_LOW" ], "enumDescriptions": [ - "This enum is only a placeholder and it doesn't specify any GARM risk level option.", + "This enum is only a placeholder and it doesn't specify any GARM risk exclusion option.", "Exclude floor risk.", "Exclude high and floor risk.", - "Exclude high, medium, and floor risk.", - "Exclude All Level of Risk (Low, Medium, High and Floor)." + "Exclude medium, high, and floor risk.", + "Exclude all levels of risk (low, medium, high and floor)." ], "type": "string" }, "armsAmmunitionContent": { - "description": "Optional. Arms ammunition content.", + "description": "Optional. Arms and Ammunition Content [GARM](https://wfanet.org/leadership/garm/about-garm) risk ranges to exclude.", "enum": [ "GARM_RISK_EXCLUSION_UNSPECIFIED", "GARM_RISK_EXCLUSION_FLOOR", @@ -9596,16 +9596,16 @@ "GARM_RISK_EXCLUSION_LOW" ], "enumDescriptions": [ - "This enum is only a placeholder and it doesn't specify any GARM risk level option.", + "This enum is only a placeholder and it doesn't specify any GARM risk exclusion option.", "Exclude floor risk.", "Exclude high and floor risk.", - "Exclude high, medium, and floor risk.", - "Exclude All Level of Risk (Low, Medium, High and Floor)." + "Exclude medium, high, and floor risk.", + "Exclude all levels of risk (low, medium, high and floor)." ], "type": "string" }, "crimeHarmfulActsIndividualsSocietyHumanRightsViolationsContent": { - "description": "Optional. Crime harmful acts to individuals society human rights violations content.", + "description": "Optional. Crime and Harmful Acts Content [GARM](https://wfanet.org/leadership/garm/about-garm) risk ranges to exclude.", "enum": [ "GARM_RISK_EXCLUSION_UNSPECIFIED", "GARM_RISK_EXCLUSION_FLOOR", @@ -9614,16 +9614,16 @@ "GARM_RISK_EXCLUSION_LOW" ], "enumDescriptions": [ - "This enum is only a placeholder and it doesn't specify any GARM risk level option.", + "This enum is only a placeholder and it doesn't specify any GARM risk exclusion option.", "Exclude floor risk.", "Exclude high and floor risk.", - "Exclude high, medium, and floor risk.", - "Exclude All Level of Risk (Low, Medium, High and Floor)." + "Exclude medium, high, and floor risk.", + "Exclude all levels of risk (low, medium, high and floor)." ], "type": "string" }, "deathInjuryMilitaryConflictContent": { - "description": "Optional. Death injury military conflict content.", + "description": "Optional. Death, Injury, or Military Conflict Content [GARM](https://wfanet.org/leadership/garm/about-garm) risk ranges to exclude.", "enum": [ "GARM_RISK_EXCLUSION_UNSPECIFIED", "GARM_RISK_EXCLUSION_FLOOR", @@ -9632,16 +9632,16 @@ "GARM_RISK_EXCLUSION_LOW" ], "enumDescriptions": [ - "This enum is only a placeholder and it doesn't specify any GARM risk level option.", + "This enum is only a placeholder and it doesn't specify any GARM risk exclusion option.", "Exclude floor risk.", "Exclude high and floor risk.", - "Exclude high, medium, and floor risk.", - "Exclude All Level of Risk (Low, Medium, High and Floor)." + "Exclude medium, high, and floor risk.", + "Exclude all levels of risk (low, medium, high and floor)." ], "type": "string" }, "debatedSensitiveSocialIssueContent": { - "description": "Optional. Debated sensitive social issue content.", + "description": "Optional. Debated Sensitive Social Issue Content [GARM](https://wfanet.org/leadership/garm/about-garm) risk ranges to exclude.", "enum": [ "GARM_RISK_EXCLUSION_UNSPECIFIED", "GARM_RISK_EXCLUSION_FLOOR", @@ -9650,16 +9650,16 @@ "GARM_RISK_EXCLUSION_LOW" ], "enumDescriptions": [ - "This enum is only a placeholder and it doesn't specify any GARM risk level option.", + "This enum is only a placeholder and it doesn't specify any GARM risk exclusion option.", "Exclude floor risk.", "Exclude high and floor risk.", - "Exclude high, medium, and floor risk.", - "Exclude All Level of Risk (Low, Medium, High and Floor)." + "Exclude medium, high, and floor risk.", + "Exclude all levels of risk (low, medium, high and floor)." ], "type": "string" }, "displayIabViewability": { - "description": "Optional. Display IAB viewability.", + "description": "Optional. IAB viewability threshold for display ads.", "enum": [ "DISPLAY_IAB_VIEWABILITY_UNSPECIFIED", "DISPLAY_IAB_VIEWABILITY_10", @@ -9669,7 +9669,7 @@ "DISPLAY_IAB_VIEWABILITY_75" ], "enumDescriptions": [ - "This enum is only a placeholder and it doesn't specify any display viewability options.", + "Default value when not specified or is unknown in this version.", "10%+ in view (IAB display viewability standard).", "20%+ in view (IAB display viewability standard).", "35%+ in view (IAB display viewability standard).", @@ -9679,7 +9679,7 @@ "type": "string" }, "excludedAdlooxCategories": { - "description": "Adloox's brand safety settings.", + "description": "Adloox categories to exclude.", "items": { "enum": [ "ADLOOX_UNSPECIFIED", @@ -9693,7 +9693,7 @@ "FRAUD" ], "enumDescriptions": [ - "This enum is only a placeholder and it doesn't specify any Adloox option.", + "Default value when a Adloox category is not specified or is unknown in this version.", "Adult content (hard).", "Adult content (soft).", "Illegal content.", @@ -9708,14 +9708,14 @@ "type": "array" }, "excludedFraudIvtMfaCategories": { - "description": "Optional. Adloox's fraud IVT MFA settings.", + "description": "Optional. Adloox's fraud IVT MFA categories to exclude.", "items": { "enum": [ "FRAUD_IVT_MFA_CATEGORY_UNSPECIFIED", "FRAUD_IVT_MFA" ], "enumDescriptions": [ - "This enum is only a placeholder and it doesn't specify any fraud IVT MFA categories.", + "Default value when a Adloox Fraud, IVT, MFA category is not specified or is unknown in this version.", "FRAUD, IVT, MFA." ], "type": "string" @@ -9723,7 +9723,7 @@ "type": "array" }, "hateSpeechActsAggressionContent": { - "description": "Optional. Hate speech acts of aggression content.", + "description": "Optional. Hate Speech and Acts of Aggression Content [GARM](https://wfanet.org/leadership/garm/about-garm) risk ranges to exclude.", "enum": [ "GARM_RISK_EXCLUSION_UNSPECIFIED", "GARM_RISK_EXCLUSION_FLOOR", @@ -9732,16 +9732,16 @@ "GARM_RISK_EXCLUSION_LOW" ], "enumDescriptions": [ - "This enum is only a placeholder and it doesn't specify any GARM risk level option.", + "This enum is only a placeholder and it doesn't specify any GARM risk exclusion option.", "Exclude floor risk.", "Exclude high and floor risk.", - "Exclude high, medium, and floor risk.", - "Exclude All Level of Risk (Low, Medium, High and Floor)." + "Exclude medium, high, and floor risk.", + "Exclude all levels of risk (low, medium, high and floor)." ], "type": "string" }, "illegalDrugsTobaccoEcigarettesVapingAlcoholContent": { - "description": "Optional. Illegal drugs tobacco ecigarettes vaping alcohol content.", + "description": "Optional. Illegal Drugs/Alcohol Content [GARM](https://wfanet.org/leadership/garm/about-garm) risk ranges to exclude.", "enum": [ "GARM_RISK_EXCLUSION_UNSPECIFIED", "GARM_RISK_EXCLUSION_FLOOR", @@ -9750,16 +9750,16 @@ "GARM_RISK_EXCLUSION_LOW" ], "enumDescriptions": [ - "This enum is only a placeholder and it doesn't specify any GARM risk level option.", + "This enum is only a placeholder and it doesn't specify any GARM risk exclusion option.", "Exclude floor risk.", "Exclude high and floor risk.", - "Exclude high, medium, and floor risk.", - "Exclude All Level of Risk (Low, Medium, High and Floor)." + "Exclude medium, high, and floor risk.", + "Exclude all levels of risk (low, medium, high and floor)." ], "type": "string" }, "misinformationContent": { - "description": "Optional. Misinformation content.", + "description": "Optional. Misinformation Content [GARM](https://wfanet.org/leadership/garm/about-garm) risk ranges to exclude.", "enum": [ "GARM_RISK_EXCLUSION_UNSPECIFIED", "GARM_RISK_EXCLUSION_FLOOR", @@ -9768,16 +9768,16 @@ "GARM_RISK_EXCLUSION_LOW" ], "enumDescriptions": [ - "This enum is only a placeholder and it doesn't specify any GARM risk level option.", + "This enum is only a placeholder and it doesn't specify any GARM risk exclusion option.", "Exclude floor risk.", "Exclude high and floor risk.", - "Exclude high, medium, and floor risk.", - "Exclude All Level of Risk (Low, Medium, High and Floor)." + "Exclude medium, high, and floor risk.", + "Exclude all levels of risk (low, medium, high and floor)." ], "type": "string" }, "obscenityProfanityContent": { - "description": "Optional. Obscenity profanity content.", + "description": "Optional. Obscenity and Profanity Content [GARM](https://wfanet.org/leadership/garm/about-garm) risk ranges to exclude.", "enum": [ "GARM_RISK_EXCLUSION_UNSPECIFIED", "GARM_RISK_EXCLUSION_FLOOR", @@ -9786,16 +9786,16 @@ "GARM_RISK_EXCLUSION_LOW" ], "enumDescriptions": [ - "This enum is only a placeholder and it doesn't specify any GARM risk level option.", + "This enum is only a placeholder and it doesn't specify any GARM risk exclusion option.", "Exclude floor risk.", "Exclude high and floor risk.", - "Exclude high, medium, and floor risk.", - "Exclude All Level of Risk (Low, Medium, High and Floor)." + "Exclude medium, high, and floor risk.", + "Exclude all levels of risk (low, medium, high and floor)." ], "type": "string" }, "onlinePiracyContent": { - "description": "Optional. Online piracy content.", + "description": "Optional. Online Piracy Content [GARM](https://wfanet.org/leadership/garm/about-garm) risk ranges to exclude.", "enum": [ "GARM_RISK_EXCLUSION_UNSPECIFIED", "GARM_RISK_EXCLUSION_FLOOR", @@ -9804,16 +9804,16 @@ "GARM_RISK_EXCLUSION_LOW" ], "enumDescriptions": [ - "This enum is only a placeholder and it doesn't specify any GARM risk level option.", + "This enum is only a placeholder and it doesn't specify any GARM risk exclusion option.", "Exclude floor risk.", "Exclude high and floor risk.", - "Exclude high, medium, and floor risk.", - "Exclude All Level of Risk (Low, Medium, High and Floor)." + "Exclude medium, high, and floor risk.", + "Exclude all levels of risk (low, medium, high and floor)." ], "type": "string" }, "spamHarmfulContent": { - "description": "Optional. Spam harmful content.", + "description": "Optional. Spam or Harmful Content [GARM](https://wfanet.org/leadership/garm/about-garm) risk ranges to exclude.", "enum": [ "GARM_RISK_EXCLUSION_UNSPECIFIED", "GARM_RISK_EXCLUSION_FLOOR", @@ -9822,16 +9822,16 @@ "GARM_RISK_EXCLUSION_LOW" ], "enumDescriptions": [ - "This enum is only a placeholder and it doesn't specify any GARM risk level option.", + "This enum is only a placeholder and it doesn't specify any GARM risk exclusion option.", "Exclude floor risk.", "Exclude high and floor risk.", - "Exclude high, medium, and floor risk.", - "Exclude All Level of Risk (Low, Medium, High and Floor)." + "Exclude medium, high, and floor risk.", + "Exclude all levels of risk (low, medium, high and floor)." ], "type": "string" }, "terrorismContent": { - "description": "Optional. Terrorism content.", + "description": "Optional. Terrorism Content [GARM](https://wfanet.org/leadership/garm/about-garm) risk ranges to exclude.", "enum": [ "GARM_RISK_EXCLUSION_UNSPECIFIED", "GARM_RISK_EXCLUSION_FLOOR", @@ -9840,16 +9840,16 @@ "GARM_RISK_EXCLUSION_LOW" ], "enumDescriptions": [ - "This enum is only a placeholder and it doesn't specify any GARM risk level option.", + "This enum is only a placeholder and it doesn't specify any GARM risk exclusion option.", "Exclude floor risk.", "Exclude high and floor risk.", - "Exclude high, medium, and floor risk.", - "Exclude All Level of Risk (Low, Medium, High and Floor)." + "Exclude medium, high, and floor risk.", + "Exclude all levels of risk (low, medium, high and floor)." ], "type": "string" }, "videoIabViewability": { - "description": "Optional. Video IAB viewability.", + "description": "Optional. IAB viewability threshold for video ads.", "enum": [ "VIDEO_IAB_VIEWABILITY_UNSPECIFIED", "VIDEO_IAB_VIEWABILITY_10", @@ -9859,7 +9859,7 @@ "VIDEO_IAB_VIEWABILITY_75" ], "enumDescriptions": [ - "This enum is only a placeholder and it doesn't specify any video viewability options.", + "Default value when not specified or is unknown in this version.", "10%+ in view (IAB video viewability standard).", "20%+ in view (IAB video viewability standard).", "35%+ in view (IAB video viewability standard).", @@ -9887,7 +9887,7 @@ }, "billingConfig": { "$ref": "AdvertiserBillingConfig", - "description": "Optional. Required. Billing related settings of the advertiser." + "description": "Required. Billing related settings of the advertiser." }, "creativeConfig": { "$ref": "AdvertiserCreativeConfig", @@ -9976,7 +9976,7 @@ "id": "AdvertiserBillingConfig", "properties": { "billingProfileId": { - "description": "Optional. The ID of a billing profile assigned to the advertiser.", + "description": "Required. The ID of a billing profile assigned to the advertiser.", "format": "int64", "type": "string" } @@ -10322,7 +10322,9 @@ "EXCHANGE_CHARTBOOST_GBID", "EXCHANGE_ADMOST_GBID", "EXCHANGE_TOPON_GBID", - "EXCHANGE_NETFLIX" + "EXCHANGE_NETFLIX", + "EXCHANGE_CORE", + "EXCHANGE_TUBI" ], "enumDescriptions": [ "Exchange is not specified or is unknown in this version.", @@ -10407,7 +10409,9 @@ "Chartboost Mediation.", "AdMost.", "TopOn.", - "Netflix." + "Netflix.", + "Core.", + "Tubi." ], "type": "string" }, @@ -10566,10 +10570,10 @@ "enumDescriptions": [ "Unknown operator.", "Values are equal.", - "First value is greater than the comparison value.", - "First value is less than the second.", - "First value is greater than or equal to the second.", - "First value is less or equals to the comparison value." + "Signal value is greater than the comparison value.", + "Signal value is less than the second.", + "Signal value is greater than or equal to the second.", + "Signal value is less or equals to the comparison value." ], "type": "string" }, @@ -10725,7 +10729,12 @@ "ASSET_ROLE_IOS_APP_ID", "ASSET_ROLE_RATING", "ASSET_ROLE_ICON", - "ASSET_ROLE_COVER_IMAGE" + "ASSET_ROLE_COVER_IMAGE", + "ASSET_ROLE_BACKGROUND_COLOR", + "ASSET_ROLE_ACCENT_COLOR", + "ASSET_ROLE_REQUIRE_LOGO", + "ASSET_ROLE_REQUIRE_IMAGE", + "ASSET_ROLE_ENABLE_ASSET_ENHANCEMENTS" ], "enumDescriptions": [ "Asset role is not specified or is unknown in this version.", @@ -10744,7 +10753,12 @@ "The ID of an iOS app in the Apple app store. This ID number can be found in the Apple App Store URL as the string of numbers directly after \"id\". For example, in https://apps.apple.com/us/app/gmail-email-by-google/id422689480 the ID is 422689480. Assets of this role are read-only.", "The rating of an app in the Google play store or iOS app store. Note that this value is not automatically synced with the actual rating in the store. It will always be the one provided when save the creative. Assets of this role are read-only.", "The icon of a creative. This role is only supported and required in following creative_type: * `CREATIVE_TYPE_NATIVE` * `CREATIVE_TYPE_NATIVE_SITE_SQUARE`", - "The cover image of a native video creative. This role is only supported and required in following creative_type: * `CREATIVE_TYPE_VIDEO`" + "The cover image of a native video creative. This role is only supported and required in following creative_type: * `CREATIVE_TYPE_VIDEO`", + "The main color to use in a creative. This role is only supported and required in following creative_type: * `CREATIVE_TYPE_ASSET_BASED_CREATIVE`", + "The accent color to use in a creative. This role is only supported and required in following creative_type: * `CREATIVE_TYPE_ASSET_BASED_CREATIVE`", + "Whether the creative must use a logo asset. This role is only supported and required in following creative_type: * `CREATIVE_TYPE_ASSET_BASED_CREATIVE`", + "Whether the creative must use an image asset. This role is only supported and required in following creative_type: * `CREATIVE_TYPE_ASSET_BASED_CREATIVE`", + "Whether asset enhancements can be applied to the creative. This role is only supported and required in following creative_type: * `CREATIVE_TYPE_ASSET_BASED_CREATIVE`" ], "type": "string" } @@ -13215,7 +13229,7 @@ "SDF version 6", "SDF version 7. Read the [v7 migration guide](/display-video/api/structured-data-file/v7-migration-guide) before migrating to this version.", "SDF version 7.1. Read the [v7 migration guide](/display-video/api/structured-data-file/v7-migration-guide) before migrating to this version.", - "SDF version 8. **This SDF version is in beta. It is only available to a subset of users.**" + "SDF version 8. Read the [v8 migration guide](/display-video/api/structured-data-file/v8-migration-guide) before migrating to this version." ], "type": "string" } @@ -13324,7 +13338,8 @@ "CREATIVE_TYPE_AUDIO", "CREATIVE_TYPE_PUBLISHER_HOSTED", "CREATIVE_TYPE_NATIVE_VIDEO", - "CREATIVE_TYPE_TEMPLATED_APP_INSTALL_VIDEO" + "CREATIVE_TYPE_TEMPLATED_APP_INSTALL_VIDEO", + "CREATIVE_TYPE_ASSET_BASED_CREATIVE" ], "enumDescriptions": [ "Type value is not specified or is unknown in this version.", @@ -13341,7 +13356,8 @@ "Audio creative. Create and update methods are supported for this creative type if the hosting_source is `HOSTING_SOURCE_HOSTED`", "Publisher hosted creative. Create and update methods are **not** supported for this creative type.", "Native video creative. Create and update methods are supported for this creative type if the hosting_source is `HOSTING_SOURCE_HOSTED`", - "Templated app install mobile video creative. Create and update methods are **not** supported for this creative type." + "Templated app install mobile video creative. Create and update methods are **not** supported for this creative type.", + "Asset based creative. Create and update methods are supported for this creative type if the hosting_source is `HOSTING_SOURCE_HOSTED`." ], "type": "string" }, @@ -13601,7 +13617,8 @@ "CREATIVE_TYPE_AUDIO", "CREATIVE_TYPE_PUBLISHER_HOSTED", "CREATIVE_TYPE_NATIVE_VIDEO", - "CREATIVE_TYPE_TEMPLATED_APP_INSTALL_VIDEO" + "CREATIVE_TYPE_TEMPLATED_APP_INSTALL_VIDEO", + "CREATIVE_TYPE_ASSET_BASED_CREATIVE" ], "enumDescriptions": [ "Type value is not specified or is unknown in this version.", @@ -13618,7 +13635,8 @@ "Audio creative. Create and update methods are supported for this creative type if the hosting_source is `HOSTING_SOURCE_HOSTED`", "Publisher hosted creative. Create and update methods are **not** supported for this creative type.", "Native video creative. Create and update methods are supported for this creative type if the hosting_source is `HOSTING_SOURCE_HOSTED`", - "Templated app install mobile video creative. Create and update methods are **not** supported for this creative type." + "Templated app install mobile video creative. Create and update methods are **not** supported for this creative type.", + "Asset based creative. Create and update methods are supported for this creative type if the hosting_source is `HOSTING_SOURCE_HOSTED`." ], "type": "string" }, @@ -15102,7 +15120,9 @@ "EXCHANGE_CHARTBOOST_GBID", "EXCHANGE_ADMOST_GBID", "EXCHANGE_TOPON_GBID", - "EXCHANGE_NETFLIX" + "EXCHANGE_NETFLIX", + "EXCHANGE_CORE", + "EXCHANGE_TUBI" ], "enumDescriptions": [ "Exchange is not specified or is unknown in this version.", @@ -15187,7 +15207,9 @@ "Chartboost Mediation.", "AdMost.", "TopOn.", - "Netflix." + "Netflix.", + "Core.", + "Tubi." ], "type": "string" } @@ -15297,7 +15319,9 @@ "EXCHANGE_CHARTBOOST_GBID", "EXCHANGE_ADMOST_GBID", "EXCHANGE_TOPON_GBID", - "EXCHANGE_NETFLIX" + "EXCHANGE_NETFLIX", + "EXCHANGE_CORE", + "EXCHANGE_TUBI" ], "enumDescriptions": [ "Exchange is not specified or is unknown in this version.", @@ -15382,7 +15406,9 @@ "Chartboost Mediation.", "AdMost.", "TopOn.", - "Netflix." + "Netflix.", + "Core.", + "Tubi." ], "type": "string" }, @@ -15493,7 +15519,9 @@ "EXCHANGE_CHARTBOOST_GBID", "EXCHANGE_ADMOST_GBID", "EXCHANGE_TOPON_GBID", - "EXCHANGE_NETFLIX" + "EXCHANGE_NETFLIX", + "EXCHANGE_CORE", + "EXCHANGE_TUBI" ], "enumDescriptions": [ "Exchange is not specified or is unknown in this version.", @@ -15578,7 +15606,9 @@ "Chartboost Mediation.", "AdMost.", "TopOn.", - "Netflix." + "Netflix.", + "Core.", + "Tubi." ], "type": "string" }, @@ -15690,7 +15720,9 @@ "EXCHANGE_CHARTBOOST_GBID", "EXCHANGE_ADMOST_GBID", "EXCHANGE_TOPON_GBID", - "EXCHANGE_NETFLIX" + "EXCHANGE_NETFLIX", + "EXCHANGE_CORE", + "EXCHANGE_TUBI" ], "enumDescriptions": [ "Exchange is not specified or is unknown in this version.", @@ -15775,7 +15807,9 @@ "Chartboost Mediation.", "AdMost.", "TopOn.", - "Netflix." + "Netflix.", + "Core.", + "Tubi." ], "readOnly": true, "type": "string" @@ -16738,7 +16772,9 @@ "EXCHANGE_CHARTBOOST_GBID", "EXCHANGE_ADMOST_GBID", "EXCHANGE_TOPON_GBID", - "EXCHANGE_NETFLIX" + "EXCHANGE_NETFLIX", + "EXCHANGE_CORE", + "EXCHANGE_TUBI" ], "enumDescriptions": [ "Exchange is not specified or is unknown in this version.", @@ -16823,7 +16859,9 @@ "Chartboost Mediation.", "AdMost.", "TopOn.", - "Netflix." + "Netflix.", + "Core.", + "Tubi." ], "type": "string" }, @@ -17205,7 +17243,7 @@ }, "pacing": { "$ref": "Pacing", - "description": "Required. The budget spending speed setting of the insertion order. *Warning*: Starting on **November 5, 2024**, pacing_type `PACING_TYPE_ASAP` will no longer be compatible with pacing_period `PACING_PERIOD_FLIGHT`. [Read more about this announced change](/display-video/api/deprecations#features.io_asap)." + "description": "Required. The budget spending speed setting of the insertion order. pacing_type `PACING_TYPE_ASAP` is not compatible with pacing_period `PACING_PERIOD_FLIGHT`." }, "partnerCosts": { "description": "The partner costs associated with the insertion order. If absent or empty in CreateInsertionOrder method, the newly created insertion order will inherit partner costs from the partner settings.", @@ -17665,7 +17703,9 @@ "EXCHANGE_CHARTBOOST_GBID", "EXCHANGE_ADMOST_GBID", "EXCHANGE_TOPON_GBID", - "EXCHANGE_NETFLIX" + "EXCHANGE_NETFLIX", + "EXCHANGE_CORE", + "EXCHANGE_TUBI" ], "enumDescriptions": [ "Exchange is not specified or is unknown in this version.", @@ -17750,7 +17790,9 @@ "Chartboost Mediation.", "AdMost.", "TopOn.", - "Netflix." + "Netflix.", + "Core.", + "Tubi." ], "type": "string" }, @@ -19874,7 +19916,7 @@ "type": "string" }, "pacingType": { - "description": "Required. The type of pacing that defines how the budget amount will be spent across the pacing_period. *Warning*: Starting on **November 5, 2024**, `PACING_TYPE_ASAP` will no longer be compatible with pacing_period `PACING_PERIOD_FLIGHT` for insertion orders. [Read more about this announced change](/display-video/api/deprecations#features.io_asap).", + "description": "Required. The type of pacing that defines how the budget amount will be spent across the pacing_period. `PACING_TYPE_ASAP` is not compatible with pacing_period `PACING_PERIOD_FLIGHT` for insertion orders.", "enum": [ "PACING_TYPE_UNSPECIFIED", "PACING_TYPE_AHEAD", @@ -20931,7 +20973,7 @@ "SDF version 6", "SDF version 7. Read the [v7 migration guide](/display-video/api/structured-data-file/v7-migration-guide) before migrating to this version.", "SDF version 7.1. Read the [v7 migration guide](/display-video/api/structured-data-file/v7-migration-guide) before migrating to this version.", - "SDF version 8. **This SDF version is in beta. It is only available to a subset of users.**" + "SDF version 8. Read the [v8 migration guide](/display-video/api/structured-data-file/v8-migration-guide) before migrating to this version." ], "type": "string" } @@ -21014,7 +21056,7 @@ "SDF version 6", "SDF version 7. Read the [v7 migration guide](/display-video/api/structured-data-file/v7-migration-guide) before migrating to this version.", "SDF version 7.1. Read the [v7 migration guide](/display-video/api/structured-data-file/v7-migration-guide) before migrating to this version.", - "SDF version 8. **This SDF version is in beta. It is only available to a subset of users.**" + "SDF version 8. Read the [v8 migration guide](/display-video/api/structured-data-file/v8-migration-guide) before migrating to this version." ], "type": "string" } diff --git a/discovery/googleapis/dlp__v2.json b/discovery/googleapis/dlp__v2.json index 4b5f3aa00..2d9be5584 100644 --- a/discovery/googleapis/dlp__v2.json +++ b/discovery/googleapis/dlp__v2.json @@ -242,7 +242,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241006", + "revision": "20241204", "rootUrl": "https://dlp.googleapis.com/", "servicePath": "", "title": "Sensitive Data Protection (DLP)", @@ -329,7 +329,7 @@ "infoTypes": { "methods": { "list": { - "description": "Returns a list of the sensitive information types that DLP API supports. See https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference to learn more.", + "description": "Returns a list of the sensitive information types that the DLP API supports. See https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference to learn more.", "flatPath": "v2/infoTypes", "httpMethod": "GET", "id": "dlp.infoTypes.list", @@ -371,7 +371,7 @@ "infoTypes": { "methods": { "list": { - "description": "Returns a list of the sensitive information types that DLP API supports. See https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference to learn more.", + "description": "Returns a list of the sensitive information types that the DLP API supports. See https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference to learn more.", "flatPath": "v2/locations/{locationsId}/infoTypes", "httpMethod": "GET", "id": "dlp.locations.infoTypes.list", @@ -5433,7 +5433,7 @@ "type": "string" }, "projectId": { - "description": "The Google Cloud Platform project ID of the project containing the table. If omitted, project ID is inferred from the API call.", + "description": "The Google Cloud project ID of the project containing the table. If omitted, project ID is inferred from the API call.", "type": "string" }, "tableId": { @@ -5464,12 +5464,14 @@ "enum": [ "BIG_QUERY_TABLE_TYPE_UNSPECIFIED", "BIG_QUERY_TABLE_TYPE_TABLE", - "BIG_QUERY_TABLE_TYPE_EXTERNAL_BIG_LAKE" + "BIG_QUERY_TABLE_TYPE_EXTERNAL_BIG_LAKE", + "BIG_QUERY_TABLE_TYPE_SNAPSHOT" ], "enumDescriptions": [ "Unused.", "A normal BigQuery table.", - "A table that references data stored in Cloud Storage." + "A table that references data stored in Cloud Storage.", + "A snapshot of a BigQuery table." ], "type": "string" }, @@ -5566,7 +5568,8 @@ "TSV", "AUDIO", "VIDEO", - "EXECUTABLE" + "EXECUTABLE", + "AI_MODEL" ], "enumDescriptions": [ "Unused", @@ -5585,7 +5588,8 @@ "tsv", "Audio file types. Only used for profiling.", "Video file types. Only used for profiling.", - "Executable file types. Only used for profiling." + "Executable file types. Only used for profiling.", + "AI model file types. Only used for profiling." ], "type": "string" } @@ -5771,7 +5775,7 @@ "type": "string" }, "maxConnections": { - "description": "Required. DLP will limit its connections to max_connections. Must be 2 or greater.", + "description": "Required. The DLP API will limit its connections to max_connections. Must be 2 or greater.", "format": "int32", "type": "integer" }, @@ -6209,7 +6213,7 @@ "type": "object" }, "GooglePrivacyDlpV2Connection": { - "description": "A data connection to allow DLP to profile data in locations that require additional configuration.", + "description": "A data connection to allow the DLP API to profile data in locations that require additional configuration.", "id": "GooglePrivacyDlpV2Connection", "properties": { "cloudSql": { @@ -6239,7 +6243,7 @@ ], "enumDescriptions": [ "Unused", - "DLP automatically created this connection during an initial scan, and it is awaiting full configuration by a user.", + "The DLP API automatically created this connection during an initial scan, and it is awaiting full configuration by a user.", "A configured connection that has not encountered any errors.", "A configured connection that encountered errors during its last use. It will not be used again until it is set to AVAILABLE. If the resolution requires external action, then the client must send a request to set the status to AVAILABLE when the connection is ready for use. If the resolution doesn't require external action, then any changes to the connection properties will automatically mark it as AVAILABLE." ], @@ -6647,7 +6651,7 @@ }, "publishToScc": { "$ref": "GooglePrivacyDlpV2PublishToSecurityCommandCenter", - "description": "Publishes findings to SCC for each data profile." + "description": "Publishes findings to Security Command Center for each data profile." }, "tagResources": { "$ref": "GooglePrivacyDlpV2TagResources", @@ -6731,7 +6735,7 @@ "description": "Must be set only when scanning other clouds." }, "projectId": { - "description": "The project that will run the scan. The DLP service account that exists within this project must have access to all resources that are profiled, and the Cloud DLP API must be enabled.", + "description": "The project that will run the scan. The DLP service account that exists within this project must have access to all resources that are profiled, and the DLP API must be enabled.", "type": "string" } }, @@ -8267,7 +8271,8 @@ "CLUSTER_IMAGE", "CLUSTER_ARCHIVE", "CLUSTER_MULTIMEDIA", - "CLUSTER_EXECUTABLE" + "CLUSTER_EXECUTABLE", + "CLUSTER_AI_MODEL" ], "enumDescriptions": [ "Unused.", @@ -8279,7 +8284,8 @@ "Images like jpeg, bmp.", "Archives and containers like .zip, .tar etc.", "Multimedia like .mp4, .avi etc.", - "Executable files like .exe, .class, .apk etc." + "Executable files like .exe, .class, .apk etc.", + "AI models like .tflite etc." ], "type": "string" } @@ -9038,6 +9044,10 @@ "description": "Human readable form of the infoType name.", "type": "string" }, + "example": { + "description": "A sample true positive for this infoType.", + "type": "string" + }, "name": { "description": "Internal name of the infoType.", "type": "string" @@ -10226,7 +10236,7 @@ "description": "The data to scan: folder, org, or project" }, "projectId": { - "description": "The project that will run the scan. The DLP service account that exists within this project must have access to all resources that are profiled, and the Cloud DLP API must be enabled.", + "description": "The project that will run the scan. The DLP service account that exists within this project must have access to all resources that are profiled, and the DLP API must be enabled.", "type": "string" } }, @@ -10706,7 +10716,7 @@ "type": "object" }, "GooglePrivacyDlpV2PublishToSecurityCommandCenter": { - "description": "If set, a summary finding will be created/updated in SCC for each profile.", + "description": "If set, a summary finding will be created or updated in Security Command Center for each profile.", "id": "GooglePrivacyDlpV2PublishToSecurityCommandCenter", "properties": {}, "type": "object" diff --git a/discovery/googleapis/documentai__v1.json b/discovery/googleapis/documentai__v1.json index 4292f462f..18f58e265 100644 --- a/discovery/googleapis/documentai__v1.json +++ b/discovery/googleapis/documentai__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240911", + "revision": "20241210", "rootUrl": "https://documentai.googleapis.com/", "servicePath": "", "title": "Cloud Document AI API", @@ -238,7 +238,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "documentai.projects.locations.operations.cancel", @@ -6190,18 +6190,18 @@ "type": "object" }, "GoogleTypePostalAddress": { - "description": "Represents a postal address, e.g. for postal delivery or payments addresses. Given a postal address, a postal service can deliver items to a premise, P.O. Box or similar. It is not intended to model geographical locations (roads, towns, mountains). In typical usage an address would be created via user input or from importing existing data, depending on the type of process. Advice on address input / editing: - Use an internationalization-ready address widget such as https://github.com/google/libaddressinput) - Users should not be presented with UI elements for input or editing of fields outside countries where that field is used. For more guidance on how to use this schema, please see: https://support.google.com/business/answer/6397478", + "description": "Represents a postal address. For example for postal delivery or payments addresses. Given a postal address, a postal service can deliver items to a premise, P.O. Box or similar. It is not intended to model geographical locations (roads, towns, mountains). In typical usage an address would be created by user input or from importing existing data, depending on the type of process. Advice on address input / editing: - Use an internationalization-ready address widget such as https://github.com/google/libaddressinput) - Users should not be presented with UI elements for input or editing of fields outside countries where that field is used. For more guidance on how to use this schema, see: https://support.google.com/business/answer/6397478", "id": "GoogleTypePostalAddress", "properties": { "addressLines": { - "description": "Unstructured address lines describing the lower levels of an address. Because values in address_lines do not have type information and may sometimes contain multiple values in a single field (e.g. \"Austin, TX\"), it is important that the line order is clear. The order of address lines should be \"envelope order\" for the country/region of the address. In places where this can vary (e.g. Japan), address_language is used to make it explicit (e.g. \"ja\" for large-to-small ordering and \"ja-Latn\" or \"en\" for small-to-large). This way, the most specific line of an address can be selected based on the language. The minimum permitted structural representation of an address consists of a region_code with all remaining information placed in the address_lines. It would be possible to format such an address very approximately without geocoding, but no semantic reasoning could be made about any of the address components until it was at least partially resolved. Creating an address only containing a region_code and address_lines, and then geocoding is the recommended way to handle completely unstructured addresses (as opposed to guessing which parts of the address should be localities or administrative areas).", + "description": "Unstructured address lines describing the lower levels of an address. Because values in address_lines do not have type information and may sometimes contain multiple values in a single field (For example \"Austin, TX\"), it is important that the line order is clear. The order of address lines should be \"envelope order\" for the country/region of the address. In places where this can vary (For example Japan), address_language is used to make it explicit (For example \"ja\" for large-to-small ordering and \"ja-Latn\" or \"en\" for small-to-large). This way, the most specific line of an address can be selected based on the language. The minimum permitted structural representation of an address consists of a region_code with all remaining information placed in the address_lines. It would be possible to format such an address very approximately without geocoding, but no semantic reasoning could be made about any of the address components until it was at least partially resolved. Creating an address only containing a region_code and address_lines, and then geocoding is the recommended way to handle completely unstructured addresses (as opposed to guessing which parts of the address should be localities or administrative areas).", "items": { "type": "string" }, "type": "array" }, "administrativeArea": { - "description": "Optional. Highest administrative subdivision which is used for postal addresses of a country or region. For example, this can be a state, a province, an oblast, or a prefecture. Specifically, for Spain this is the province and not the autonomous community (e.g. \"Barcelona\" and not \"Catalonia\"). Many countries don't use an administrative area in postal addresses. E.g. in Switzerland this should be left unpopulated.", + "description": "Optional. Highest administrative subdivision which is used for postal addresses of a country or region. For example, this can be a state, a province, an oblast, or a prefecture. Specifically, for Spain this is the province and not the autonomous community (For example \"Barcelona\" and not \"Catalonia\"). Many countries don't use an administrative area in postal addresses. For example in Switzerland this should be left unpopulated.", "type": "string" }, "languageCode": { @@ -6217,7 +6217,7 @@ "type": "string" }, "postalCode": { - "description": "Optional. Postal code of the address. Not all countries use or require postal codes to be present, but where they are used, they may trigger additional validation with other parts of the address (e.g. state/zip validation in the U.S.A.).", + "description": "Optional. Postal code of the address. Not all countries use or require postal codes to be present, but where they are used, they may trigger additional validation with other parts of the address (For example state/zip validation in the U.S.A.).", "type": "string" }, "recipients": { @@ -6237,7 +6237,7 @@ "type": "integer" }, "sortingCode": { - "description": "Optional. Additional, country-specific, sorting code. This is not used in most regions. Where it is used, the value is either a string like \"CEDEX\", optionally followed by a number (e.g. \"CEDEX 7\"), or just a number alone, representing the \"sector code\" (Jamaica), \"delivery area indicator\" (Malawi) or \"post office indicator\" (e.g. Côte d'Ivoire).", + "description": "Optional. Additional, country-specific, sorting code. This is not used in most regions. Where it is used, the value is either a string like \"CEDEX\", optionally followed by a number (For example \"CEDEX 7\"), or just a number alone, representing the \"sector code\" (Jamaica), \"delivery area indicator\" (Malawi) or \"post office indicator\" (For example Côte d'Ivoire).", "type": "string" }, "sublocality": { @@ -6252,11 +6252,11 @@ "id": "GoogleTypeTimeZone", "properties": { "id": { - "description": "IANA Time Zone Database time zone, e.g. \"America/New_York\".", + "description": "IANA Time Zone Database time zone. For example \"America/New_York\".", "type": "string" }, "version": { - "description": "Optional. IANA Time Zone Database version number, e.g. \"2019a\".", + "description": "Optional. IANA Time Zone Database version number. For example \"2019a\".", "type": "string" } }, diff --git a/discovery/googleapis/drive__v3.json b/discovery/googleapis/drive__v3.json index 4c19b1b29..482130742 100644 --- a/discovery/googleapis/drive__v3.json +++ b/discovery/googleapis/drive__v3.json @@ -51,7 +51,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241014", + "revision": "20241206", "rootUrl": "https://www.googleapis.com/", "servicePath": "drive/v3/", "title": "Google Drive API", @@ -137,7 +137,7 @@ "about": { "methods": { "get": { - "description": "Gets information about the user, the user's Drive, and system capabilities.", + "description": "Gets information about the user, the user's Drive, and system capabilities. For more information, see [Return user info](https://developers.google.com/drive/api/guides/user-info). Required: The `fields` parameter must be set. To return the exact fields you need, see [Return specific fields](https://developers.google.com/drive/api/guides/fields-parameter).", "flatPath": "about", "httpMethod": "GET", "id": "drive.about.get", @@ -159,10 +159,120 @@ } } }, + "accessproposals": { + "methods": { + "get": { + "description": "Retrieves an AccessProposal by ID.", + "flatPath": "files/{fileId}/accessproposals/{proposalId}", + "httpMethod": "GET", + "id": "drive.accessproposals.get", + "parameterOrder": [ + "fileId", + "proposalId" + ], + "parameters": { + "fileId": { + "description": "Required. The id of the item the request is on.", + "location": "path", + "required": true, + "type": "string" + }, + "proposalId": { + "description": "Required. The id of the access proposal to resolve.", + "location": "path", + "required": true, + "type": "string" + } + }, + "path": "files/{fileId}/accessproposals/{proposalId}", + "response": { + "$ref": "AccessProposal" + }, + "scopes": [ + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/drive.file", + "https://www.googleapis.com/auth/drive.metadata", + "https://www.googleapis.com/auth/drive.metadata.readonly", + "https://www.googleapis.com/auth/drive.readonly" + ] + }, + "list": { + "description": "List the AccessProposals on a file. Note: Only approvers are able to list AccessProposals on a file. If the user is not an approver, returns a 403.", + "flatPath": "files/{fileId}/accessproposals", + "httpMethod": "GET", + "id": "drive.accessproposals.list", + "parameterOrder": [ + "fileId" + ], + "parameters": { + "fileId": { + "description": "Required. The id of the item the request is on.", + "location": "path", + "required": true, + "type": "string" + }, + "pageSize": { + "description": "Optional. The number of results per page", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. The continuation token on the list of access requests.", + "location": "query", + "type": "string" + } + }, + "path": "files/{fileId}/accessproposals", + "response": { + "$ref": "ListAccessProposalsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/drive.file", + "https://www.googleapis.com/auth/drive.metadata", + "https://www.googleapis.com/auth/drive.metadata.readonly", + "https://www.googleapis.com/auth/drive.readonly" + ] + }, + "resolve": { + "description": "Used to approve or deny an Access Proposal.", + "flatPath": "files/{fileId}/accessproposals/{proposalId}:resolve", + "httpMethod": "POST", + "id": "drive.accessproposals.resolve", + "parameterOrder": [ + "fileId", + "proposalId" + ], + "parameters": { + "fileId": { + "description": "Required. The id of the item the request is on.", + "location": "path", + "required": true, + "type": "string" + }, + "proposalId": { + "description": "Required. The id of the access proposal to resolve.", + "location": "path", + "required": true, + "type": "string" + } + }, + "path": "files/{fileId}/accessproposals/{proposalId}:resolve", + "request": { + "$ref": "ResolveAccessProposalRequest" + }, + "scopes": [ + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/drive.file" + ] + } + } + }, "apps": { "methods": { "get": { - "description": "Gets a specific app.", + "description": "Gets a specific app. For more information, see [Return user info](https://developers.google.com/drive/api/guides/user-info).", "flatPath": "apps/{appId}", "httpMethod": "GET", "id": "drive.apps.get", @@ -192,7 +302,7 @@ ] }, "list": { - "description": "Lists a user's installed apps.", + "description": "Lists a user's installed apps. For more information, see [Return user info](https://developers.google.com/drive/api/guides/user-info).", "flatPath": "apps", "httpMethod": "GET", "id": "drive.apps.list", @@ -229,7 +339,7 @@ "changes": { "methods": { "getStartPageToken": { - "description": "Gets the starting pageToken for listing future changes.", + "description": "Gets the starting pageToken for listing future changes. For more information, see [Retrieve changes](https://developers.google.com/drive/api/guides/manage-changes).", "flatPath": "changes/startPageToken", "httpMethod": "GET", "id": "drive.changes.getStartPageToken", @@ -276,7 +386,7 @@ ] }, "list": { - "description": "Lists the changes for a user or shared drive.", + "description": "Lists the changes for a user or shared drive. For more information, see [Retrieve changes](https://developers.google.com/drive/api/guides/manage-changes).", "flatPath": "changes", "httpMethod": "GET", "id": "drive.changes.list", @@ -388,7 +498,7 @@ "supportsSubscription": true }, "watch": { - "description": "Subscribes to changes for a user.", + "description": "Subscribes to changes for a user. For more information, see [Notifications for resource changes](https://developers.google.com/drive/api/guides/push).", "flatPath": "changes/watch", "httpMethod": "POST", "id": "drive.changes.watch", @@ -508,7 +618,7 @@ "channels": { "methods": { "stop": { - "description": "Stops watching resources through this channel.", + "description": "Stops watching resources through this channel. For more information, see [Notifications for resource changes](https://developers.google.com/drive/api/guides/push).", "flatPath": "channels/stop", "httpMethod": "POST", "id": "drive.channels.stop", @@ -535,7 +645,7 @@ "comments": { "methods": { "create": { - "description": "Creates a comment on a file.", + "description": "Creates a comment on a file. For more information, see [Manage comments and replies](https://developers.google.com/drive/api/guides/manage-comments). Required: The `fields` parameter must be set. To return the exact fields you need, see [Return specific fields](https://developers.google.com/drive/api/guides/fields-parameter).", "flatPath": "files/{fileId}/comments", "httpMethod": "POST", "id": "drive.comments.create", @@ -563,7 +673,7 @@ ] }, "delete": { - "description": "Deletes a comment.", + "description": "Deletes a comment. For more information, see [Manage comments and replies](https://developers.google.com/drive/api/guides/manage-comments). Required: The `fields` parameter must be set. To return the exact fields you need, see [Return specific fields](https://developers.google.com/drive/api/guides/fields-parameter).", "flatPath": "files/{fileId}/comments/{commentId}", "httpMethod": "DELETE", "id": "drive.comments.delete", @@ -592,7 +702,7 @@ ] }, "get": { - "description": "Gets a comment by ID.", + "description": "Gets a comment by ID. For more information, see [Manage comments and replies](https://developers.google.com/drive/api/guides/manage-comments). Required: The `fields` parameter must be set. To return the exact fields you need, see [Return specific fields](https://developers.google.com/drive/api/guides/fields-parameter).", "flatPath": "files/{fileId}/comments/{commentId}", "httpMethod": "GET", "id": "drive.comments.get", @@ -632,7 +742,7 @@ ] }, "list": { - "description": "Lists a file's comments.", + "description": "Lists a file's comments. For more information, see [Manage comments and replies](https://developers.google.com/drive/api/guides/manage-comments). Required: The `fields` parameter must be set. To return the exact fields you need, see [Return specific fields](https://developers.google.com/drive/api/guides/fields-parameter).", "flatPath": "files/{fileId}/comments", "httpMethod": "GET", "id": "drive.comments.list", @@ -684,7 +794,7 @@ ] }, "update": { - "description": "Updates a comment with patch semantics.", + "description": "Updates a comment with patch semantics. For more information, see [Manage comments and replies](https://developers.google.com/drive/api/guides/manage-comments). Required: The `fields` parameter must be set. To return the exact fields you need, see [Return specific fields](https://developers.google.com/drive/api/guides/fields-parameter).", "flatPath": "files/{fileId}/comments/{commentId}", "httpMethod": "PATCH", "id": "drive.comments.update", @@ -1706,117 +1816,12 @@ ], "supportsSubscription": true } - }, - "resources": { - "accessproposals": { - "methods": { - "list": { - "description": "List the AccessProposals on a file. Note: Only approvers are able to list AccessProposals on a file. If the user is not an approver, returns a 403.", - "flatPath": "files/{fileId}/accessproposals", - "httpMethod": "GET", - "id": "drive.files.accessproposals.list", - "parameterOrder": [ - "fileId" - ], - "parameters": { - "fileId": { - "description": "Required. The id of the item the request is on.", - "location": "path", - "required": true, - "type": "string" - }, - "pageSize": { - "description": "Optional. The number of results per page", - "format": "int32", - "location": "query", - "type": "integer" - }, - "pageToken": { - "description": "Optional. The continuation token on the list of access requests.", - "location": "query", - "type": "string" - } - }, - "path": "files/{fileId}/accessproposals", - "response": { - "$ref": "ListAccessProposalsResponse" - }, - "scopes": [ - "https://www.googleapis.com/auth/drive", - "https://www.googleapis.com/auth/drive.file", - "https://www.googleapis.com/auth/drive.metadata", - "https://www.googleapis.com/auth/drive.metadata.readonly", - "https://www.googleapis.com/auth/drive.readonly" - ] - }, - "resolve": { - "description": "Used to approve or deny an Access Proposal.", - "flatPath": "files/{fileId}/accessproposals/{proposalId}:resolve", - "httpMethod": "POST", - "id": "drive.files.accessproposals.resolve", - "parameterOrder": [ - "fileId", - "proposalId" - ], - "parameters": { - "action": { - "description": "Required. The action to take on the AccessProposal.", - "enum": [ - "ACTION_UNSPECIFIED", - "ACCEPT", - "DENY" - ], - "enumDescriptions": [ - "Unspecified action", - "The user accepts the proposal", - "The user denies the proposal" - ], - "location": "query", - "type": "string" - }, - "fileId": { - "description": "Required. The id of the item the request is on.", - "location": "path", - "required": true, - "type": "string" - }, - "proposalId": { - "description": "Required. The id of the access proposal to resolve.", - "location": "path", - "required": true, - "type": "string" - }, - "role": { - "description": "Optional. The roles the approver has allowed, if any. Note: This field is required for the `ACCEPT` action.", - "location": "query", - "repeated": true, - "type": "string" - }, - "sendNotification": { - "description": "Optional. Whether to send an email to the requester when the AccessProposal is denied or accepted.", - "location": "query", - "type": "boolean" - }, - "view": { - "description": "Optional. Indicates the view for this access proposal. This should only be set when the proposal belongs to a view. `published` is the only supported value.", - "location": "query", - "type": "string" - } - }, - "path": "files/{fileId}/accessproposals/{proposalId}:resolve", - "scopes": [ - "https://www.googleapis.com/auth/drive", - "https://www.googleapis.com/auth/drive.file" - ] - } - } - } } }, "operation": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "operation/{name}:cancel", "httpMethod": "POST", "id": "drive.operation.cancel", @@ -4617,7 +4622,7 @@ "type": "object" }, "Permission": { - "description": "A permission for a file. A permission grants a user, group, domain, or the world access to a file or a folder hierarchy. Some resource methods (such as `permissions.update`) require a `permissionId`. Use the `permissions.list` method to retrieve the ID for a file, folder, or shared drive.", + "description": "A permission for a file. A permission grants a user, group, domain, or the world access to a file or a folder hierarchy. By default, permissions requests only return a subset of fields. Permission kind, ID, type, and role are always returned. To retrieve specific fields, see https://developers.google.com/drive/api/guides/fields-parameter. Some resource methods (such as `permissions.update`) require a `permissionId`. Use the `permissions.list` method to retrieve the ID for a file, folder, or shared drive.", "id": "Permission", "properties": { "allowFileDiscovery": { @@ -4841,6 +4846,42 @@ }, "type": "object" }, + "ResolveAccessProposalRequest": { + "description": "Request message for resolving an AccessProposal on a file.", + "id": "ResolveAccessProposalRequest", + "properties": { + "action": { + "description": "Required. The action to take on the AccessProposal.", + "enum": [ + "ACTION_UNSPECIFIED", + "ACCEPT", + "DENY" + ], + "enumDescriptions": [ + "Unspecified action", + "The user accepts the proposal. Note: If this action is used, the `role` field must have at least one value.", + "The user denies the proposal" + ], + "type": "string" + }, + "role": { + "description": "Optional. The roles the approver has allowed, if any. Note: This field is required for the `ACCEPT` action.", + "items": { + "type": "string" + }, + "type": "array" + }, + "sendNotification": { + "description": "Optional. Whether to send an email to the requester when the AccessProposal is denied or accepted.", + "type": "boolean" + }, + "view": { + "description": "Optional. Indicates the view for this access proposal. This should only be set when the proposal belongs to a view. `published` is the only supported value.", + "type": "string" + } + }, + "type": "object" + }, "Revision": { "description": "The metadata for a revision to a file. Some resource methods (such as `revisions.update`) require a `revisionId`. Use the `revisions.list` method to retrieve the ID for a revision.", "id": "Revision", diff --git a/discovery/googleapis/eventarc__v1.json b/discovery/googleapis/eventarc__v1.json index 1d998e83f..4028e8616 100644 --- a/discovery/googleapis/eventarc__v1.json +++ b/discovery/googleapis/eventarc__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240913", + "revision": "20241203", "rootUrl": "https://eventarc.googleapis.com/", "servicePath": "", "title": "Eventarc API", @@ -716,6 +716,109 @@ }, "enrollments": { "methods": { + "create": { + "description": "Create a new Enrollment in a particular project and location.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/enrollments", + "httpMethod": "POST", + "id": "eventarc.projects.locations.enrollments.create", + "parameterOrder": [ + "parent" + ], + "parameters": { + "enrollmentId": { + "description": "Required. The user-provided ID to be assigned to the Enrollment. It should match the format `^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$`.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The parent collection in which to add this enrollment.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + }, + "validateOnly": { + "description": "Optional. If set, validate the request and preview the review, but do not post it.", + "location": "query", + "type": "boolean" + } + }, + "path": "v1/{+parent}/enrollments", + "request": { + "$ref": "Enrollment" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "delete": { + "description": "Delete a single Enrollment.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/enrollments/{enrollmentsId}", + "httpMethod": "DELETE", + "id": "eventarc.projects.locations.enrollments.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "allowMissing": { + "description": "Optional. If set to true, and the Enrollment is not found, the request will succeed but no action will be taken on the server.", + "location": "query", + "type": "boolean" + }, + "etag": { + "description": "Optional. If provided, the Enrollment will only be deleted if the etag matches the current etag on the resource.", + "location": "query", + "type": "string" + }, + "name": { + "description": "Required. The name of the Enrollment to be deleted.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/enrollments/[^/]+$", + "required": true, + "type": "string" + }, + "validateOnly": { + "description": "Optional. If set, validate the request and preview the review, but do not post it.", + "location": "query", + "type": "boolean" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Get a single Enrollment.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/enrollments/{enrollmentsId}", + "httpMethod": "GET", + "id": "eventarc.projects.locations.enrollments.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the Enrollment to get.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/enrollments/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "Enrollment" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, "getIamPolicy": { "description": "Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/enrollments/{enrollmentsId}:getIamPolicy", @@ -747,6 +850,96 @@ "https://www.googleapis.com/auth/cloud-platform" ] }, + "list": { + "description": "List Enrollments.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/enrollments", + "httpMethod": "GET", + "id": "eventarc.projects.locations.enrollments.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "filter": { + "description": "Optional. The filter field that the list request will filter on. Possible filtersare described in https://google.aip.dev/160.", + "location": "query", + "type": "string" + }, + "orderBy": { + "description": "Optional. The sorting order of the resources returned. Value should be a comma-separated list of fields. The default sorting order is ascending. To specify descending order for a field, append a `desc` suffix; for example: `name desc, update_time`.", + "location": "query", + "type": "string" + }, + "pageSize": { + "description": "Optional. The maximum number of results to return on each page. Note: The service may send fewer.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. The page token; provide the value from the `next_page_token` field in a previous call to retrieve the subsequent page. When paginating, all other parameters provided must match the previous call that provided the page token.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The parent collection to list triggers on.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/enrollments", + "response": { + "$ref": "ListEnrollmentsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "patch": { + "description": "Update a single Enrollment.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/enrollments/{enrollmentsId}", + "httpMethod": "PATCH", + "id": "eventarc.projects.locations.enrollments.patch", + "parameterOrder": [ + "name" + ], + "parameters": { + "allowMissing": { + "description": "Optional. If set to true, and the Enrollment is not found, a new Enrollment will be created. In this situation, `update_mask` is ignored.", + "location": "query", + "type": "boolean" + }, + "name": { + "description": "Identifier. Resource name of the form projects/{project}/locations/{location}/enrollments/{enrollment}", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/enrollments/[^/]+$", + "required": true, + "type": "string" + }, + "updateMask": { + "description": "Optional. The fields to be updated; only fields explicitly provided are updated. If no field mask is provided, all provided fields in the request are updated. To update all fields, provide a field mask of \"*\".", + "format": "google-fieldmask", + "location": "query", + "type": "string" + }, + "validateOnly": { + "description": "Optional. If set, validate the request and preview the review, but do not post it.", + "location": "query", + "type": "boolean" + } + }, + "path": "v1/{+name}", + "request": { + "$ref": "Enrollment" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, "setIamPolicy": { "description": "Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/enrollments/{enrollmentsId}:setIamPolicy", @@ -797,21 +990,408 @@ "$ref": "TestIamPermissionsRequest" }, "response": { - "$ref": "TestIamPermissionsResponse" + "$ref": "TestIamPermissionsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + } + }, + "googleApiSources": { + "methods": { + "create": { + "description": "Create a new GoogleApiSource in a particular project and location.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/googleApiSources", + "httpMethod": "POST", + "id": "eventarc.projects.locations.googleApiSources.create", + "parameterOrder": [ + "parent" + ], + "parameters": { + "googleApiSourceId": { + "description": "Required. The user-provided ID to be assigned to the GoogleApiSource. It should match the format `^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$`.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The parent collection in which to add this google api source.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + }, + "validateOnly": { + "description": "Optional. If set, validate the request and preview the review, but do not post it.", + "location": "query", + "type": "boolean" + } + }, + "path": "v1/{+parent}/googleApiSources", + "request": { + "$ref": "GoogleApiSource" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "delete": { + "description": "Delete a single GoogleApiSource.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/googleApiSources/{googleApiSourcesId}", + "httpMethod": "DELETE", + "id": "eventarc.projects.locations.googleApiSources.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "allowMissing": { + "description": "Optional. If set to true, and the MessageBus is not found, the request will succeed but no action will be taken on the server.", + "location": "query", + "type": "boolean" + }, + "etag": { + "description": "Optional. If provided, the MessageBus will only be deleted if the etag matches the current etag on the resource.", + "location": "query", + "type": "string" + }, + "name": { + "description": "Required. The name of the GoogleApiSource to be deleted.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/googleApiSources/[^/]+$", + "required": true, + "type": "string" + }, + "validateOnly": { + "description": "Optional. If set, validate the request and preview the review, but do not post it.", + "location": "query", + "type": "boolean" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Get a single GoogleApiSource.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/googleApiSources/{googleApiSourcesId}", + "httpMethod": "GET", + "id": "eventarc.projects.locations.googleApiSources.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the google api source to get.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/googleApiSources/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "GoogleApiSource" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "getIamPolicy": { + "description": "Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/googleApiSources/{googleApiSourcesId}:getIamPolicy", + "httpMethod": "GET", + "id": "eventarc.projects.locations.googleApiSources.getIamPolicy", + "parameterOrder": [ + "resource" + ], + "parameters": { + "options.requestedPolicyVersion": { + "description": "Optional. The maximum policy version that will be used to format the policy. Valid values are 0, 1, and 3. Requests specifying an invalid value will be rejected. Requests for policies with any conditional role bindings must specify version 3. Policies with no conditional role bindings may specify any valid value or leave the field unset. The policy in the response might use the policy version that you specified, or it might use a lower policy version. For example, if you specify version 3, but the policy has no conditional role bindings, the response uses version 1. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).", + "format": "int32", + "location": "query", + "type": "integer" + }, + "resource": { + "description": "REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/googleApiSources/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+resource}:getIamPolicy", + "response": { + "$ref": "Policy" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "List GoogleApiSources.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/googleApiSources", + "httpMethod": "GET", + "id": "eventarc.projects.locations.googleApiSources.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "filter": { + "description": "Optional. The filter field that the list request will filter on. Possible filtersare described in https://google.aip.dev/160.", + "location": "query", + "type": "string" + }, + "orderBy": { + "description": "Optional. The sorting order of the resources returned. Value should be a comma-separated list of fields. The default sorting order is ascending. To specify descending order for a field, append a `desc` suffix; for example: `name desc, update_time`.", + "location": "query", + "type": "string" + }, + "pageSize": { + "description": "Optional. The maximum number of results to return on each page. Note: The service may send fewer.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. The page token; provide the value from the `next_page_token` field in a previous call to retrieve the subsequent page. When paginating, all other parameters provided must match the previous call that provided the page token.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The parent collection to list GoogleApiSources on.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/googleApiSources", + "response": { + "$ref": "ListGoogleApiSourcesResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "patch": { + "description": "Update a single GoogleApiSource.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/googleApiSources/{googleApiSourcesId}", + "httpMethod": "PATCH", + "id": "eventarc.projects.locations.googleApiSources.patch", + "parameterOrder": [ + "name" + ], + "parameters": { + "allowMissing": { + "description": "Optional. If set to true, and the GoogleApiSource is not found, a new GoogleApiSource will be created. In this situation, `update_mask` is ignored.", + "location": "query", + "type": "boolean" + }, + "name": { + "description": "Identifier. Resource name of the form projects/{project}/locations/{location}/googleApiSources/{google_api_source}", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/googleApiSources/[^/]+$", + "required": true, + "type": "string" + }, + "updateMask": { + "description": "Optional. The fields to be updated; only fields explicitly provided are updated. If no field mask is provided, all provided fields in the request are updated. To update all fields, provide a field mask of \"*\".", + "format": "google-fieldmask", + "location": "query", + "type": "string" + }, + "validateOnly": { + "description": "Optional. If set, validate the request and preview the review, but do not post it.", + "location": "query", + "type": "boolean" + } + }, + "path": "v1/{+name}", + "request": { + "$ref": "GoogleApiSource" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "setIamPolicy": { + "description": "Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/googleApiSources/{googleApiSourcesId}:setIamPolicy", + "httpMethod": "POST", + "id": "eventarc.projects.locations.googleApiSources.setIamPolicy", + "parameterOrder": [ + "resource" + ], + "parameters": { + "resource": { + "description": "REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/googleApiSources/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+resource}:setIamPolicy", + "request": { + "$ref": "SetIamPolicyRequest" + }, + "response": { + "$ref": "Policy" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "testIamPermissions": { + "description": "Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of permissions, not a `NOT_FOUND` error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may \"fail open\" without warning.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/googleApiSources/{googleApiSourcesId}:testIamPermissions", + "httpMethod": "POST", + "id": "eventarc.projects.locations.googleApiSources.testIamPermissions", + "parameterOrder": [ + "resource" + ], + "parameters": { + "resource": { + "description": "REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/googleApiSources/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+resource}:testIamPermissions", + "request": { + "$ref": "TestIamPermissionsRequest" + }, + "response": { + "$ref": "TestIamPermissionsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + } + }, + "messageBuses": { + "methods": { + "create": { + "description": "Create a new MessageBus in a particular project and location.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/messageBuses", + "httpMethod": "POST", + "id": "eventarc.projects.locations.messageBuses.create", + "parameterOrder": [ + "parent" + ], + "parameters": { + "messageBusId": { + "description": "Required. The user-provided ID to be assigned to the MessageBus. It should match the format `^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$`.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The parent collection in which to add this message bus.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + }, + "validateOnly": { + "description": "Optional. If set, validate the request and preview the review, but do not post it.", + "location": "query", + "type": "boolean" + } + }, + "path": "v1/{+parent}/messageBuses", + "request": { + "$ref": "MessageBus" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "delete": { + "description": "Delete a single message bus.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/messageBuses/{messageBusesId}", + "httpMethod": "DELETE", + "id": "eventarc.projects.locations.messageBuses.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "allowMissing": { + "description": "Optional. If set to true, and the MessageBus is not found, the request will succeed but no action will be taken on the server.", + "location": "query", + "type": "boolean" + }, + "etag": { + "description": "Optional. If provided, the MessageBus will only be deleted if the etag matches the current etag on the resource.", + "location": "query", + "type": "string" + }, + "name": { + "description": "Required. The name of the MessageBus to be deleted.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/messageBuses/[^/]+$", + "required": true, + "type": "string" + }, + "validateOnly": { + "description": "Optional. If set, validate the request and preview the review, but do not post it.", + "location": "query", + "type": "boolean" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Get a single MessageBus.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/messageBuses/{messageBusesId}", + "httpMethod": "GET", + "id": "eventarc.projects.locations.messageBuses.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the message bus to get.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/messageBuses/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "MessageBus" }, "scopes": [ "https://www.googleapis.com/auth/cloud-platform" ] - } - } - }, - "googleApiSources": { - "methods": { + }, "getIamPolicy": { "description": "Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/googleApiSources/{googleApiSourcesId}:getIamPolicy", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/messageBuses/{messageBusesId}:getIamPolicy", "httpMethod": "GET", - "id": "eventarc.projects.locations.googleApiSources.getIamPolicy", + "id": "eventarc.projects.locations.messageBuses.getIamPolicy", "parameterOrder": [ "resource" ], @@ -825,7 +1405,7 @@ "resource": { "description": "REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field.", "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/googleApiSources/[^/]+$", + "pattern": "^projects/[^/]+/locations/[^/]+/messageBuses/[^/]+$", "required": true, "type": "string" } @@ -838,92 +1418,127 @@ "https://www.googleapis.com/auth/cloud-platform" ] }, - "setIamPolicy": { - "description": "Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/googleApiSources/{googleApiSourcesId}:setIamPolicy", - "httpMethod": "POST", - "id": "eventarc.projects.locations.googleApiSources.setIamPolicy", + "list": { + "description": "List message buses.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/messageBuses", + "httpMethod": "GET", + "id": "eventarc.projects.locations.messageBuses.list", "parameterOrder": [ - "resource" + "parent" ], "parameters": { - "resource": { - "description": "REQUIRED: The resource for which the policy is being specified. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field.", + "filter": { + "description": "Optional. The filter field that the list request will filter on. Possible filtersare described in https://google.aip.dev/160.", + "location": "query", + "type": "string" + }, + "orderBy": { + "description": "Optional. The sorting order of the resources returned. Value should be a comma-separated list of fields. The default sorting order is ascending. To specify descending order for a field, append a `desc` suffix; for example: `name desc, update_time`.", + "location": "query", + "type": "string" + }, + "pageSize": { + "description": "Optional. The maximum number of results to return on each page. Note: The service may send fewer.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. The page token; provide the value from the `next_page_token` field in a previous call to retrieve the subsequent page. When paginating, all other parameters provided must match the previous call that provided the page token.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The parent collection to list message buses on.", "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/googleApiSources/[^/]+$", + "pattern": "^projects/[^/]+/locations/[^/]+$", "required": true, "type": "string" } }, - "path": "v1/{+resource}:setIamPolicy", - "request": { - "$ref": "SetIamPolicyRequest" - }, + "path": "v1/{+parent}/messageBuses", "response": { - "$ref": "Policy" + "$ref": "ListMessageBusesResponse" }, "scopes": [ "https://www.googleapis.com/auth/cloud-platform" ] }, - "testIamPermissions": { - "description": "Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of permissions, not a `NOT_FOUND` error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may \"fail open\" without warning.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/googleApiSources/{googleApiSourcesId}:testIamPermissions", - "httpMethod": "POST", - "id": "eventarc.projects.locations.googleApiSources.testIamPermissions", + "listEnrollments": { + "description": "List message bus enrollments.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/messageBuses/{messageBusesId}:listEnrollments", + "httpMethod": "GET", + "id": "eventarc.projects.locations.messageBuses.listEnrollments", "parameterOrder": [ - "resource" + "parent" ], "parameters": { - "resource": { - "description": "REQUIRED: The resource for which the policy detail is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field.", + "pageSize": { + "description": "Optional. The maximum number of results to return on each page. Note: The service may send fewer.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. The page token; provide the value from the `next_page_token` field in a previous call to retrieve the subsequent page. When paginating, all other parameters provided must match the previous call that provided the page token.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The parent message bus to list enrollments on.", "location": "path", - "pattern": "^projects/[^/]+/locations/[^/]+/googleApiSources/[^/]+$", + "pattern": "^projects/[^/]+/locations/[^/]+/messageBuses/[^/]+$", "required": true, "type": "string" } }, - "path": "v1/{+resource}:testIamPermissions", - "request": { - "$ref": "TestIamPermissionsRequest" - }, + "path": "v1/{+parent}:listEnrollments", "response": { - "$ref": "TestIamPermissionsResponse" + "$ref": "ListMessageBusEnrollmentsResponse" }, "scopes": [ "https://www.googleapis.com/auth/cloud-platform" ] - } - } - }, - "messageBuses": { - "methods": { - "getIamPolicy": { - "description": "Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set.", - "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/messageBuses/{messageBusesId}:getIamPolicy", - "httpMethod": "GET", - "id": "eventarc.projects.locations.messageBuses.getIamPolicy", + }, + "patch": { + "description": "Update a single message bus.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/messageBuses/{messageBusesId}", + "httpMethod": "PATCH", + "id": "eventarc.projects.locations.messageBuses.patch", "parameterOrder": [ - "resource" + "name" ], "parameters": { - "options.requestedPolicyVersion": { - "description": "Optional. The maximum policy version that will be used to format the policy. Valid values are 0, 1, and 3. Requests specifying an invalid value will be rejected. Requests for policies with any conditional role bindings must specify version 3. Policies with no conditional role bindings may specify any valid value or leave the field unset. The policy in the response might use the policy version that you specified, or it might use a lower policy version. For example, if you specify version 3, but the policy has no conditional role bindings, the response uses version 1. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).", - "format": "int32", + "allowMissing": { + "description": "Optional. If set to true, and the MessageBus is not found, a new MessageBus will be created. In this situation, `update_mask` is ignored.", "location": "query", - "type": "integer" + "type": "boolean" }, - "resource": { - "description": "REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field.", + "name": { + "description": "Identifier. Resource name of the form projects/{project}/locations/{location}/messageBuses/{message_bus}", "location": "path", "pattern": "^projects/[^/]+/locations/[^/]+/messageBuses/[^/]+$", "required": true, "type": "string" + }, + "updateMask": { + "description": "Optional. The fields to be updated; only fields explicitly provided are updated. If no field mask is provided, all provided fields in the request are updated. To update all fields, provide a field mask of \"*\".", + "format": "google-fieldmask", + "location": "query", + "type": "string" + }, + "validateOnly": { + "description": "Optional. If set, validate the request and preview the review, but do not post it.", + "location": "query", + "type": "boolean" } }, - "path": "v1/{+resource}:getIamPolicy", + "path": "v1/{+name}", + "request": { + "$ref": "MessageBus" + }, "response": { - "$ref": "Policy" + "$ref": "GoogleLongrunningOperation" }, "scopes": [ "https://www.googleapis.com/auth/cloud-platform" @@ -990,7 +1605,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "eventarc.projects.locations.operations.cancel", @@ -1112,32 +1727,225 @@ }, "pipelines": { "methods": { + "create": { + "description": "Create a new Pipeline in a particular project and location.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/pipelines", + "httpMethod": "POST", + "id": "eventarc.projects.locations.pipelines.create", + "parameterOrder": [ + "parent" + ], + "parameters": { + "parent": { + "description": "Required. The parent collection in which to add this pipeline.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + }, + "pipelineId": { + "description": "Required. The user-provided ID to be assigned to the Pipeline. It should match the format `^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$`.", + "location": "query", + "type": "string" + }, + "validateOnly": { + "description": "Optional. If set, validate the request and preview the review, but do not post it.", + "location": "query", + "type": "boolean" + } + }, + "path": "v1/{+parent}/pipelines", + "request": { + "$ref": "Pipeline" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "delete": { + "description": "Delete a single pipeline.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/pipelines/{pipelinesId}", + "httpMethod": "DELETE", + "id": "eventarc.projects.locations.pipelines.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "allowMissing": { + "description": "Optional. If set to true, and the Pipeline is not found, the request will succeed but no action will be taken on the server.", + "location": "query", + "type": "boolean" + }, + "etag": { + "description": "Optional. If provided, the Pipeline will only be deleted if the etag matches the current etag on the resource.", + "location": "query", + "type": "string" + }, + "name": { + "description": "Required. The name of the Pipeline to be deleted.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/pipelines/[^/]+$", + "required": true, + "type": "string" + }, + "validateOnly": { + "description": "Optional. If set, validate the request and preview the review, but do not post it.", + "location": "query", + "type": "boolean" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Get a single Pipeline.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/pipelines/{pipelinesId}", + "httpMethod": "GET", + "id": "eventarc.projects.locations.pipelines.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the pipeline to get.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/pipelines/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "Pipeline" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, "getIamPolicy": { "description": "Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/pipelines/{pipelinesId}:getIamPolicy", "httpMethod": "GET", "id": "eventarc.projects.locations.pipelines.getIamPolicy", "parameterOrder": [ - "resource" + "resource" + ], + "parameters": { + "options.requestedPolicyVersion": { + "description": "Optional. The maximum policy version that will be used to format the policy. Valid values are 0, 1, and 3. Requests specifying an invalid value will be rejected. Requests for policies with any conditional role bindings must specify version 3. Policies with no conditional role bindings may specify any valid value or leave the field unset. The policy in the response might use the policy version that you specified, or it might use a lower policy version. For example, if you specify version 3, but the policy has no conditional role bindings, the response uses version 1. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).", + "format": "int32", + "location": "query", + "type": "integer" + }, + "resource": { + "description": "REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/pipelines/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+resource}:getIamPolicy", + "response": { + "$ref": "Policy" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "List pipelines.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/pipelines", + "httpMethod": "GET", + "id": "eventarc.projects.locations.pipelines.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "filter": { + "description": "Optional. The filter field that the list request will filter on. Possible filters are described in https://google.aip.dev/160.", + "location": "query", + "type": "string" + }, + "orderBy": { + "description": "Optional. The sorting order of the resources returned. Value should be a comma-separated list of fields. The default sorting order is ascending. To specify descending order for a field, append a `desc` suffix; for example: `name desc, update_time`.", + "location": "query", + "type": "string" + }, + "pageSize": { + "description": "Optional. The maximum number of results to return on each page. Note: The service may send fewer.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. The page token; provide the value from the `next_page_token` field in a previous call to retrieve the subsequent page. When paginating, all other parameters provided must match the previous call that provided the page token.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The parent collection to list pipelines on.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/pipelines", + "response": { + "$ref": "ListPipelinesResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "patch": { + "description": "Update a single pipeline.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/pipelines/{pipelinesId}", + "httpMethod": "PATCH", + "id": "eventarc.projects.locations.pipelines.patch", + "parameterOrder": [ + "name" ], "parameters": { - "options.requestedPolicyVersion": { - "description": "Optional. The maximum policy version that will be used to format the policy. Valid values are 0, 1, and 3. Requests specifying an invalid value will be rejected. Requests for policies with any conditional role bindings must specify version 3. Policies with no conditional role bindings may specify any valid value or leave the field unset. The policy in the response might use the policy version that you specified, or it might use a lower policy version. For example, if you specify version 3, but the policy has no conditional role bindings, the response uses version 1. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).", - "format": "int32", + "allowMissing": { + "description": "Optional. If set to true, and the Pipeline is not found, a new Pipeline will be created. In this situation, `update_mask` is ignored.", "location": "query", - "type": "integer" + "type": "boolean" }, - "resource": { - "description": "REQUIRED: The resource for which the policy is being requested. See [Resource names](https://cloud.google.com/apis/design/resource_names) for the appropriate value for this field.", + "name": { + "description": "Identifier. The resource name of the Pipeline. Must be unique within the location of the project and must be in `projects/{project}/locations/{location}/pipelines/{pipeline}` format.", "location": "path", "pattern": "^projects/[^/]+/locations/[^/]+/pipelines/[^/]+$", "required": true, "type": "string" + }, + "updateMask": { + "description": "Optional. The fields to be updated; only fields explicitly provided are updated. If no field mask is provided, all provided fields in the request are updated. To update all fields, provide a field mask of \"*\".", + "format": "google-fieldmask", + "location": "query", + "type": "string" + }, + "validateOnly": { + "description": "Optional. If set, validate the request and preview the review, but do not post it.", + "location": "query", + "type": "boolean" } }, - "path": "v1/{+resource}:getIamPolicy", + "path": "v1/{+name}", + "request": { + "$ref": "Pipeline" + }, "response": { - "$ref": "Policy" + "$ref": "GoogleLongrunningOperation" }, "scopes": [ "https://www.googleapis.com/auth/cloud-platform" @@ -1655,6 +2463,13 @@ "description": "Resource name of a KMS crypto key (managed by the user) used to encrypt/decrypt their event data. It must match the pattern `projects/*/locations/*/keyRings/*/cryptoKeys/*`.", "type": "string" }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. Resource labels.", + "type": "object" + }, "name": { "description": "Required. The resource name of the channel. Must be unique within the location on the project and must be in `projects/{project}/locations/{location}/channels/{channel_id}` format.", "type": "string" @@ -1722,6 +2537,13 @@ "readOnly": true, "type": "string" }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. Resource labels.", + "type": "object" + }, "name": { "description": "Required. The name of the connection.", "type": "string" @@ -1796,6 +2618,69 @@ "properties": {}, "type": "object" }, + "Enrollment": { + "description": "An enrollment represents a subscription for messages on a particular message bus. It defines a matching criteria for messages on the bus and the subscriber endpoint where matched messages should be delivered.", + "id": "Enrollment", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. Resource annotations.", + "type": "object" + }, + "celMatch": { + "description": "Required. A CEL expression identifying which messages this enrollment applies to.", + "type": "string" + }, + "createTime": { + "description": "Output only. The creation time.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "destination": { + "description": "Required. Destination is the Pipeline that the Enrollment is delivering to. It must point to the full resource name of a Pipeline. Format: \"projects/{PROJECT_ID}/locations/{region}/pipelines/{PIPELINE_ID)\"", + "type": "string" + }, + "displayName": { + "description": "Optional. Resource display name.", + "type": "string" + }, + "etag": { + "description": "Output only. This checksum is computed by the server based on the value of other fields, and might be sent only on update and delete requests to ensure that the client has an up-to-date value before proceeding.", + "readOnly": true, + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. Resource labels.", + "type": "object" + }, + "messageBus": { + "description": "Required. Resource name of the message bus identifying the source of the messages. It matches the form projects/{project}/locations/{location}/messageBuses/{messageBus}.", + "type": "string" + }, + "name": { + "description": "Identifier. Resource name of the form projects/{project}/locations/{location}/enrollments/{enrollment}", + "type": "string" + }, + "uid": { + "description": "Output only. Server assigned unique identifier for the channel. The value is a UUID4 string and guaranteed to remain unchanged until the resource is deleted.", + "readOnly": true, + "type": "string" + }, + "updateTime": { + "description": "Output only. The last-modified time.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, "EventFilter": { "description": "Filters events based on exact matches on the CloudEvents attributes.", "id": "EventFilter", @@ -1895,49 +2780,309 @@ }, "type": "object" }, - "GKE": { - "description": "Represents a GKE destination.", - "id": "GKE", + "GKE": { + "description": "Represents a GKE destination.", + "id": "GKE", + "properties": { + "cluster": { + "description": "Required. The name of the cluster the GKE service is running in. The cluster must be running in the same project as the trigger being created.", + "type": "string" + }, + "location": { + "description": "Required. The name of the Google Compute Engine in which the cluster resides, which can either be compute zone (for example, us-central1-a) for the zonal clusters or region (for example, us-central1) for regional clusters.", + "type": "string" + }, + "namespace": { + "description": "Required. The namespace the GKE service is running in.", + "type": "string" + }, + "path": { + "description": "Optional. The relative path on the GKE service the events should be sent to. The value must conform to the definition of a URI path segment (section 3.3 of RFC2396). Examples: \"/route\", \"route\", \"route/subroute\".", + "type": "string" + }, + "service": { + "description": "Required. Name of the GKE service.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleApiSource": { + "description": "A GoogleApiSource represents a subscription of 1P events from a MessageBus.", + "id": "GoogleApiSource", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. Resource annotations.", + "type": "object" + }, + "createTime": { + "description": "Output only. The creation time.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "cryptoKeyName": { + "description": "Optional. Resource name of a KMS crypto key (managed by the user) used to encrypt/decrypt their event data. It must match the pattern `projects/*/locations/*/keyRings/*/cryptoKeys/*`.", + "type": "string" + }, + "destination": { + "description": "Required. Destination is the message bus that the GoogleApiSource is delivering to. It must be point to the full resource name of a MessageBus. Format: \"projects/{PROJECT_ID}/locations/{region}/messagesBuses/{MESSAGE_BUS_ID)", + "type": "string" + }, + "displayName": { + "description": "Optional. Resource display name.", + "type": "string" + }, + "etag": { + "description": "Output only. This checksum is computed by the server based on the value of other fields, and might be sent only on update and delete requests to ensure that the client has an up-to-date value before proceeding.", + "readOnly": true, + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. Resource labels.", + "type": "object" + }, + "loggingConfig": { + "$ref": "LoggingConfig", + "description": "Optional. Config to control Platform logging for the GoogleApiSource." + }, + "name": { + "description": "Identifier. Resource name of the form projects/{project}/locations/{location}/googleApiSources/{google_api_source}", + "type": "string" + }, + "uid": { + "description": "Output only. Server assigned unique identifier for the channel. The value is a UUID4 string and guaranteed to remain unchanged until the resource is deleted.", + "readOnly": true, + "type": "string" + }, + "updateTime": { + "description": "Output only. The last-modified time.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleChannelConfig": { + "description": "A GoogleChannelConfig is a resource that stores the custom settings respected by Eventarc first-party triggers in the matching region. Once configured, first-party event data will be protected using the specified custom managed encryption key instead of Google-managed encryption keys.", + "id": "GoogleChannelConfig", + "properties": { + "cryptoKeyName": { + "description": "Optional. Resource name of a KMS crypto key (managed by the user) used to encrypt/decrypt their event data. It must match the pattern `projects/*/locations/*/keyRings/*/cryptoKeys/*`.", + "type": "string" + }, + "name": { + "description": "Required. The resource name of the config. Must be in the format of, `projects/{project}/locations/{location}/googleChannelConfig`.", + "type": "string" + }, + "updateTime": { + "description": "Output only. The last-modified time.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudEventarcV1PipelineDestination": { + "description": "Represents a target of an invocation over HTTP.", + "id": "GoogleCloudEventarcV1PipelineDestination", + "properties": { + "authenticationConfig": { + "$ref": "GoogleCloudEventarcV1PipelineDestinationAuthenticationConfig", + "description": "Optional. An authentication config used to authenticate message requests, such that destinations can verify the source. For example, this can be used with private GCP destinations that require GCP credentials to access like Cloud Run. This field is optional and should be set only by users interested in authenticated push" + }, + "httpEndpoint": { + "$ref": "GoogleCloudEventarcV1PipelineDestinationHttpEndpoint", + "description": "Optional. An HTTP endpoint destination described by an URI. If a DNS FQDN is provided as the endpoint, Pipeline will create a peering zone to the consumer VPC and forward DNS requests to the VPC specified by network config to resolve the service endpoint. See: https://cloud.google.com/dns/docs/zones/zones-overview#peering_zones" + }, + "messageBus": { + "description": "Optional. The resource name of the Message Bus to which events should be published. The Message Bus resource should exist in the same project as the Pipeline. Format: `projects/{project}/locations/{location}/messageBuses/{message_bus}`", + "type": "string" + }, + "networkConfig": { + "$ref": "GoogleCloudEventarcV1PipelineDestinationNetworkConfig", + "description": "Optional. Network config is used to configure how Pipeline resolves and connects to a destination." + }, + "outputPayloadFormat": { + "$ref": "GoogleCloudEventarcV1PipelineMessagePayloadFormat", + "description": "Optional. The message format before it is delivered to the destination. If not set, the message will be delivered in the format it was originally delivered to the Pipeline. This field can only be set if Pipeline.input_payload_format is also set." + }, + "topic": { + "description": "Optional. The resource name of the Pub/Sub topic to which events should be published. Format: `projects/{project}/locations/{location}/topics/{topic}`", + "type": "string" + }, + "workflow": { + "description": "Optional. The resource name of the Workflow whose Executions are triggered by the events. The Workflow resource should be deployed in the same project as the Pipeline. Format: `projects/{project}/locations/{location}/workflows/{workflow}`", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudEventarcV1PipelineDestinationAuthenticationConfig": { + "description": "Represents a config used to authenticate message requests.", + "id": "GoogleCloudEventarcV1PipelineDestinationAuthenticationConfig", + "properties": { + "googleOidc": { + "$ref": "GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken", + "description": "Optional. This authenticate method will apply Google OIDC tokens signed by a GCP service account to the requests." + }, + "oauthToken": { + "$ref": "GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken", + "description": "Optional. If specified, an [OAuth token](https://developers.google.com/identity/protocols/OAuth2) will be generated and attached as an `Authorization` header in the HTTP request. This type of authorization should generally only be used when calling Google APIs hosted on *.googleapis.com." + } + }, + "type": "object" + }, + "GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken": { + "description": "Contains information needed for generating an [OAuth token](https://developers.google.com/identity/protocols/OAuth2). This type of authorization should generally only be used when calling Google APIs hosted on *.googleapis.com.", + "id": "GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken", + "properties": { + "scope": { + "description": "Optional. OAuth scope to be used for generating OAuth access token. If not specified, \"https://www.googleapis.com/auth/cloud-platform\" will be used.", + "type": "string" + }, + "serviceAccount": { + "description": "Required. Service account email used to generate the [OAuth token](https://developers.google.com/identity/protocols/OAuth2). The principal who calls this API must have iam.serviceAccounts.actAs permission in the service account. See https://cloud.google.com/iam/docs/understanding-service-accounts for more information. Eventarc service agents must have roles/roles/iam.serviceAccountTokenCreator role to allow Pipeline to create OAuth2 tokens for authenticated requests.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken": { + "description": "Represents a config used to authenticate with a Google OIDC token using a GCP service account. Use this authentication method to invoke your Cloud Run and Cloud Functions destinations or HTTP endpoints that support Google OIDC.", + "id": "GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken", + "properties": { + "audience": { + "description": "Optional. Audience to be used to generate the OIDC Token. The audience claim identifies the recipient that the JWT is intended for. If unspecified, the destination URI will be used.", + "type": "string" + }, + "serviceAccount": { + "description": "Required. Service account email used to generate the OIDC Token. The principal who calls this API must have iam.serviceAccounts.actAs permission in the service account. See https://cloud.google.com/iam/docs/understanding-service-accounts for more information. Eventarc service agents must have roles/roles/iam.serviceAccountTokenCreator role to allow the Pipeline to create OpenID tokens for authenticated requests.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudEventarcV1PipelineDestinationHttpEndpoint": { + "description": "Represents a HTTP endpoint destination.", + "id": "GoogleCloudEventarcV1PipelineDestinationHttpEndpoint", + "properties": { + "messageBindingTemplate": { + "description": "Optional. The CEL expression used to modify how the destination-bound HTTP request is constructed. If a binding expression is not specified here, the message is treated as a CloudEvent and is mapped to the HTTP request according to the CloudEvent HTTP Protocol Binding Binary Content Mode (https://github.com/cloudevents/spec/blob/main/cloudevents/bindings/http-protocol-binding.md#31-binary-content-mode). In this representation, all fields except the `data` and `datacontenttype` field on the message are mapped to HTTP request headers with a prefix of `ce-`. To construct the HTTP request payload and the value of the content-type HTTP header, the payload format is defined as follows: 1) Use the output_payload_format_type on the Pipeline.Destination if it is set, else: 2) Use the input_payload_format_type on the Pipeline if it is set, else: 3) Treat the payload as opaque binary data. The `data` field of the message is converted to the payload format or left as-is for case 3) and then attached as the payload of the HTTP request. The `content-type` header on the HTTP request is set to the payload format type or left empty for case 3). However, if a mediation has updated the `datacontenttype` field on the message so that it is not the same as the payload format type but it is still a prefix of the payload format type, then the `content-type` header on the HTTP request is set to this `datacontenttype` value. For example, if the `datacontenttype` is \"application/json\" and the payload format type is \"application/json; charset=utf-8\", then the `content-type` header on the HTTP request is set to \"application/json; charset=utf-8\". If a non-empty binding expression is specified then this expression is used to modify the default CloudEvent HTTP Protocol Binding Binary Content representation. The result of the CEL expression must be a map of key/value pairs which is used as follows: - If a map named `headers` exists on the result of the expression, then its key/value pairs are directly mapped to the HTTP request headers. The headers values are constructed from the corresponding value type's canonical representation. If the `headers` field doesn't exist then the resulting HTTP request will be the headers of the CloudEvent HTTP Binding Binary Content Mode representation of the final message. Note: If the specified binding expression, has updated the `datacontenttype` field on the message so that it is not the same as the payload format type but it is still a prefix of the payload format type, then the `content-type` header in the `headers` map is set to this `datacontenttype` value. - If a field named `body` exists on the result of the expression then its value is directly mapped to the body of the request. If the value of the `body` field is of type bytes or string then it is used for the HTTP request body as-is, with no conversion. If the body field is of any other type then it is converted to a JSON string. If the body field does not exist then the resulting payload of the HTTP request will be data value of the CloudEvent HTTP Binding Binary Content Mode representation of the final message as described earlier. - Any other fields in the resulting expression will be ignored. The CEL expression may access the incoming CloudEvent message in its definition, as follows: - The `data` field of the incoming CloudEvent message can be accessed using the `message.data` value. Subfields of `message.data` may also be accessed if an input_payload_format has been specified on the Pipeline. - Each attribute of the incoming CloudEvent message can be accessed using the `message.` value, where is replaced with the name of the attribute. - Existing headers can be accessed in the CEL expression using the `headers` variable. The `headers` variable defines a map of key/value pairs corresponding to the HTTP headers of the CloudEvent HTTP Binding Binary Content Mode representation of the final message as described earlier. For example, the following CEL expression can be used to construct an HTTP request by adding an additional header to the HTTP headers of the CloudEvent HTTP Binding Binary Content Mode representation of the final message and by overwriting the body of the request: ``` { \"headers\": headers.merge({\"new-header-key\": \"new-header-value\"}), \"body\": \"new-body\" } ``` - The default binding for the message payload can be accessed using the `body` variable. It conatins a string representation of the message payload in the format specified by the `output_payload_format` field. If the `input_payload_format` field is not set, the `body` variable contains the same message payload bytes that were published. Additionally, the following CEL extension functions are provided for use in this CEL expression: - toBase64Url: map.toBase64Url() -> string - Converts a CelValue to a base64url encoded string - toJsonString: map.toJsonString() -> string - Converts a CelValue to a JSON string - merge: map1.merge(map2) -> map3 - Merges the passed CEL map with the existing CEL map the function is applied to. - If the same key exists in both maps, if the key's value is type map both maps are merged else the value from the passed map is used. - denormalize: map.denormalize() -> map - Denormalizes a CEL map such that every value of type map or key in the map is expanded to return a single level map. - The resulting keys are \".\" separated indices of the map keys. - For example: { \"a\": 1, \"b\": { \"c\": 2, \"d\": 3 } \"e\": [4, 5] } .denormalize() -> { \"a\": 1, \"b.c\": 2, \"b.d\": 3, \"e.0\": 4, \"e.1\": 5 } - setField: map.setField(key, value) -> message - Sets the field of the message with the given key to the given value. - If the field is not present it will be added. - If the field is present it will be overwritten. - The key can be a dot separated path to set a field in a nested message. - Key must be of type string. - Value may be any valid type. - removeFields: map.removeFields([key1, key2, ...]) -> message - Removes the fields of the map with the given keys. - The keys can be a dot separated path to remove a field in a nested message. - If a key is not found it will be ignored. - Keys must be of type string. - toMap: [map1, map2, ...].toMap() -> map - Converts a CEL list of CEL maps to a single CEL map - toCloudEventJsonWithPayloadFormat: message.toCloudEventJsonWithPayloadFormat() -> map - Converts a message to the corresponding structure of JSON format for CloudEvents. - It converts `data` to destination payload format specified in `output_payload_format`. If `output_payload_format` is not set, the data will remain unchanged. - It also sets the corresponding datacontenttype of the CloudEvent, as indicated by `output_payload_format`. If no `output_payload_format` is set it will use the value of the \"datacontenttype\" attribute on the CloudEvent if present, else remove \"datacontenttype\" attribute. - This function expects that the content of the message will adhere to the standard CloudEvent format. If it doesn't then this function will fail. - The result is a CEL map that corresponds to the JSON representation of the CloudEvent. To convert that data to a JSON string it can be chained with the toJsonString function. The Pipeline expects that the message it receives adheres to the standard CloudEvent format. If it doesn't then the outgoing message request may fail with a persistent error.", + "type": "string" + }, + "uri": { + "description": "Required. The URI of the HTTP enpdoint. The value must be a RFC2396 URI string. Examples: `https://svc.us-central1.p.local:8080/route`. Only the HTTPS protocol is supported.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudEventarcV1PipelineDestinationNetworkConfig": { + "description": "Represents a network config to be used for destination resolution and connectivity.", + "id": "GoogleCloudEventarcV1PipelineDestinationNetworkConfig", + "properties": { + "networkAttachment": { + "description": "Required. Name of the NetworkAttachment that allows access to the consumer VPC. Format: `projects/{PROJECT_ID}/regions/{REGION}/networkAttachments/{NETWORK_ATTACHMENT_NAME}`", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudEventarcV1PipelineMediation": { + "description": "Mediation defines different ways to modify the Pipeline.", + "id": "GoogleCloudEventarcV1PipelineMediation", "properties": { - "cluster": { - "description": "Required. The name of the cluster the GKE service is running in. The cluster must be running in the same project as the trigger being created.", - "type": "string" - }, - "location": { - "description": "Required. The name of the Google Compute Engine in which the cluster resides, which can either be compute zone (for example, us-central1-a) for the zonal clusters or region (for example, us-central1) for regional clusters.", - "type": "string" - }, - "namespace": { - "description": "Required. The namespace the GKE service is running in.", + "transformation": { + "$ref": "GoogleCloudEventarcV1PipelineMediationTransformation", + "description": "Optional. How the Pipeline is to transform messages" + } + }, + "type": "object" + }, + "GoogleCloudEventarcV1PipelineMediationTransformation": { + "description": "Transformation defines the way to transform an incoming message.", + "id": "GoogleCloudEventarcV1PipelineMediationTransformation", + "properties": { + "transformationTemplate": { + "description": "Optional. The CEL expression template to apply to transform messages. The following CEL extension functions are provided for use in this CEL expression: - merge: map1.merge(map2) -> map3 - Merges the passed CEL map with the existing CEL map the function is applied to. - If the same key exists in both maps, if the key's value is type map both maps are merged else the value from the passed map is used. - denormalize: map.denormalize() -> map - Denormalizes a CEL map such that every value of type map or key in the map is expanded to return a single level map. - The resulting keys are \".\" separated indices of the map keys. - For example: { \"a\": 1, \"b\": { \"c\": 2, \"d\": 3 } \"e\": [4, 5] } .denormalize() -> { \"a\": 1, \"b.c\": 2, \"b.d\": 3, \"e.0\": 4, \"e.1\": 5 } - setField: map.setField(key, value) -> message - Sets the field of the message with the given key to the given value. - If the field is not present it will be added. - If the field is present it will be overwritten. - The key can be a dot separated path to set a field in a nested message. - Key must be of type string. - Value may be any valid type. - removeFields: map.removeFields([key1, key2, ...]) -> message - Removes the fields of the map with the given keys. - The keys can be a dot separated path to remove a field in a nested message. - If a key is not found it will be ignored. - Keys must be of type string. - toMap: [map1, map2, ...].toMap() -> map - Converts a CEL list of CEL maps to a single CEL map - toDestinationPayloadFormat(): message.data.toDestinationPayloadFormat() -> string or bytes - Converts the message data to the destination payload format specified in Pipeline.Destination.output_payload_format - This function is meant to be applied to the message.data field. - If the destination payload format is not set, the function will return the message data unchanged. - toCloudEventJsonWithPayloadFormat: message.toCloudEventJsonWithPayloadFormat() -> map - Converts a message to the corresponding structure of JSON format for CloudEvents - This function applies toDestinationPayloadFormat() to the message data. It also sets the corresponding datacontenttype of the CloudEvent, as indicated by Pipeline.Destination.output_payload_format. If no output_payload_format is set it will use the existing datacontenttype on the CloudEvent if present, else leave datacontenttype absent. - This function expects that the content of the message will adhere to the standard CloudEvent format. If it doesn't then this function will fail. - The result is a CEL map that corresponds to the JSON representation of the CloudEvent. To convert that data to a JSON string it can be chained with the toJsonString function.", "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudEventarcV1PipelineMessagePayloadFormat": { + "description": "Represents the format of message data.", + "id": "GoogleCloudEventarcV1PipelineMessagePayloadFormat", + "properties": { + "avro": { + "$ref": "GoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat", + "description": "Optional. Avro format." }, - "path": { - "description": "Optional. The relative path on the GKE service the events should be sent to. The value must conform to the definition of a URI path segment (section 3.3 of RFC2396). Examples: \"/route\", \"route\", \"route/subroute\".", - "type": "string" + "json": { + "$ref": "GoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat", + "description": "Optional. JSON format." }, - "service": { - "description": "Required. Name of the GKE service.", + "protobuf": { + "$ref": "GoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat", + "description": "Optional. Protobuf format." + } + }, + "type": "object" + }, + "GoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat": { + "description": "The format of an AVRO message payload.", + "id": "GoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat", + "properties": { + "schemaDefinition": { + "description": "Optional. The entire schema definition is stored in this field.", "type": "string" } }, "type": "object" }, - "GoogleChannelConfig": { - "description": "A GoogleChannelConfig is a resource that stores the custom settings respected by Eventarc first-party triggers in the matching region. Once configured, first-party event data will be protected using the specified custom managed encryption key instead of Google-managed encryption keys.", - "id": "GoogleChannelConfig", + "GoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat": { + "description": "The format of a JSON message payload.", + "id": "GoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat", + "properties": {}, + "type": "object" + }, + "GoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat": { + "description": "The format of a Protobuf message payload.", + "id": "GoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat", "properties": { - "cryptoKeyName": { - "description": "Optional. Resource name of a KMS crypto key (managed by the user) used to encrypt/decrypt their event data. It must match the pattern `projects/*/locations/*/keyRings/*/cryptoKeys/*`.", + "schemaDefinition": { + "description": "Optional. The entire schema definition is stored in this field.", "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudEventarcV1PipelineRetryPolicy": { + "description": "The retry policy configuration for the Pipeline. The pipeline exponentially backs off in case the destination is non responsive or returns a retryable error code. The default semantics are as follows: The backoff starts with a 5 second delay and doubles the delay after each failed attempt (10 seconds, 20 seconds, 40 seconds, etc.). The delay is capped at 60 seconds by default. Please note that if you set the min_retry_delay and max_retry_delay fields to the same value this will make the duration between retries constant.", + "id": "GoogleCloudEventarcV1PipelineRetryPolicy", + "properties": { + "maxAttempts": { + "description": "Optional. The maximum number of delivery attempts for any message. The value must be between 1 and 100. The default value for this field is 5.", + "format": "int32", + "type": "integer" }, - "name": { - "description": "Required. The resource name of the config. Must be in the format of, `projects/{project}/locations/{location}/googleChannelConfig`.", + "maxRetryDelay": { + "description": "Optional. The maximum amount of seconds to wait between retry attempts. The value must be between 1 and 600. The default value for this field is 60.", + "format": "google-duration", "type": "string" }, - "updateTime": { - "description": "Output only. The last-modified time.", - "format": "google-datetime", - "readOnly": true, + "minRetryDelay": { + "description": "Optional. The minimum amount of seconds to wait between retry attempts. The value must be between 1 and 600. The default value for this field is 5.", + "format": "google-duration", "type": "string" } }, @@ -2090,6 +3235,56 @@ }, "type": "object" }, + "ListEnrollmentsResponse": { + "description": "The response message for the `ListEnrollments` method.", + "id": "ListEnrollmentsResponse", + "properties": { + "enrollments": { + "description": "The requested Enrollments, up to the number specified in `page_size`.", + "items": { + "$ref": "Enrollment" + }, + "type": "array" + }, + "nextPageToken": { + "description": "A page token that can be sent to `ListEnrollments` to request the next page. If this is empty, then there are no more pages.", + "type": "string" + }, + "unreachable": { + "description": "Unreachable resources, if any.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "ListGoogleApiSourcesResponse": { + "description": "The response message for the `ListGoogleApiSources` method.", + "id": "ListGoogleApiSourcesResponse", + "properties": { + "googleApiSources": { + "description": "The requested GoogleApiSources, up to the number specified in `page_size`.", + "items": { + "$ref": "GoogleApiSource" + }, + "type": "array" + }, + "nextPageToken": { + "description": "A page token that can be sent to `ListMessageBusEnrollments` to request the next page. If this is empty, then there are no more pages.", + "type": "string" + }, + "unreachable": { + "description": "Unreachable resources, if any.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, "ListLocationsResponse": { "description": "The response message for Locations.ListLocations.", "id": "ListLocationsResponse", @@ -2108,6 +3303,81 @@ }, "type": "object" }, + "ListMessageBusEnrollmentsResponse": { + "description": "The response message for the `ListMessageBusEnrollments` method.`", + "id": "ListMessageBusEnrollmentsResponse", + "properties": { + "enrollments": { + "description": "The requested enrollments, up to the number specified in `page_size`.", + "items": { + "type": "string" + }, + "type": "array" + }, + "nextPageToken": { + "description": "A page token that can be sent to `ListMessageBusEnrollments` to request the next page. If this is empty, then there are no more pages.", + "type": "string" + }, + "unreachable": { + "description": "Unreachable resources, if any.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "ListMessageBusesResponse": { + "description": "The response message for the `ListMessageBuses` method.", + "id": "ListMessageBusesResponse", + "properties": { + "messageBuses": { + "description": "The requested message buses, up to the number specified in `page_size`.", + "items": { + "$ref": "MessageBus" + }, + "type": "array" + }, + "nextPageToken": { + "description": "A page token that can be sent to `ListMessageBuses` to request the next page. If this is empty, then there are no more pages.", + "type": "string" + }, + "unreachable": { + "description": "Unreachable resources, if any.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "ListPipelinesResponse": { + "description": "The response message for the ListPipelines method.", + "id": "ListPipelinesResponse", + "properties": { + "nextPageToken": { + "description": "A page token that can be sent to `ListPipelines` to request the next page. If this is empty, then there are no more pages.", + "type": "string" + }, + "pipelines": { + "description": "The requested pipelines, up to the number specified in `page_size`.", + "items": { + "$ref": "Pipeline" + }, + "type": "array" + }, + "unreachable": { + "description": "Unreachable resources, if any.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, "ListProvidersResponse": { "description": "The response message for the `ListProviders` method.", "id": "ListProvidersResponse", @@ -2192,6 +3462,100 @@ }, "type": "object" }, + "LoggingConfig": { + "description": "The configuration for Platform Telemetry logging for Eventarc Advanced resources.", + "id": "LoggingConfig", + "properties": { + "logSeverity": { + "description": "Optional. The minimum severity of logs that will be sent to Stackdriver/Platform Telemetry. Logs at severitiy ≥ this value will be sent, unless it is NONE.", + "enum": [ + "LOG_SEVERITY_UNSPECIFIED", + "NONE", + "DEBUG", + "INFO", + "NOTICE", + "WARNING", + "ERROR", + "CRITICAL", + "ALERT", + "EMERGENCY" + ], + "enumDescriptions": [ + "Log severity is not specified. This value is treated the same as NONE, but is used to distinguish between no update and update to NONE in update_masks.", + "Default value at resource creation, presence of this value must be treated as no logging/disable logging.", + "Debug or trace level logging.", + "Routine information, such as ongoing status or performance.", + "Normal but significant events, such as start up, shut down, or a configuration change.", + "Warning events might cause problems.", + "Error events are likely to cause problems.", + "Critical events cause more severe problems or outages.", + "A person must take action immediately.", + "One or more systems are unusable." + ], + "type": "string" + } + }, + "type": "object" + }, + "MessageBus": { + "description": "MessageBus for the messages flowing through the system. The admin has visibility and control over the messages being published and consumed and can restrict publishers and subscribers to only a subset of data available in the system by defining authorization policies.", + "id": "MessageBus", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. Resource annotations.", + "type": "object" + }, + "createTime": { + "description": "Output only. The creation time.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "cryptoKeyName": { + "description": "Optional. Resource name of a KMS crypto key (managed by the user) used to encrypt/decrypt their event data. It must match the pattern `projects/*/locations/*/keyRings/*/cryptoKeys/*`.", + "type": "string" + }, + "displayName": { + "description": "Optional. Resource display name.", + "type": "string" + }, + "etag": { + "description": "Output only. This checksum is computed by the server based on the value of other fields, and might be sent only on update and delete requests to ensure that the client has an up-to-date value before proceeding.", + "readOnly": true, + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. Resource labels.", + "type": "object" + }, + "loggingConfig": { + "$ref": "LoggingConfig", + "description": "Optional. Config to control Platform logging for the Message Bus. This log configuration is applied to the Message Bus itself, and all the Enrollments attached to it." + }, + "name": { + "description": "Identifier. Resource name of the form projects/{project}/locations/{location}/messageBuses/{message_bus}", + "type": "string" + }, + "uid": { + "description": "Output only. Server assigned unique identifier for the channel. The value is a UUID4 string and guaranteed to remain unchanged until the resource is deleted.", + "readOnly": true, + "type": "string" + }, + "updateTime": { + "description": "Output only. The last-modified time.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, "NetworkConfig": { "description": "Network Configuration that can be inherited by other protos.", "id": "NetworkConfig", @@ -2247,6 +3611,86 @@ }, "type": "object" }, + "Pipeline": { + "description": "A representation of the Pipeline resource.", + "id": "Pipeline", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. User-defined annotations. See https://google.aip.dev/128#annotations.", + "type": "object" + }, + "createTime": { + "description": "Output only. The creation time. A timestamp in RFC3339 UTC \"Zulu\" format, with nanosecond resolution and up to nine fractional digits. Examples: \"2014-10-02T15:01:23Z\" and \"2014-10-02T15:01:23.045123456Z\".", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "cryptoKeyName": { + "description": "Optional. Resource name of a KMS crypto key (managed by the user) used to encrypt/decrypt the event data. If not set, an internal Google-owned key will be used to encrypt messages. It must match the pattern \"projects/{project}/locations/{location}/keyRings/{keyring}/cryptoKeys/{key}\".", + "type": "string" + }, + "destinations": { + "description": "Required. List of destinations to which messages will be forwarded. Currently, exactly one destination is supported per Pipeline.", + "items": { + "$ref": "GoogleCloudEventarcV1PipelineDestination" + }, + "type": "array" + }, + "displayName": { + "description": "Optional. Display name of resource.", + "type": "string" + }, + "etag": { + "description": "Output only. This checksum is computed by the server based on the value of other fields, and might be sent only on create requests to ensure that the client has an up-to-date value before proceeding.", + "type": "string" + }, + "inputPayloadFormat": { + "$ref": "GoogleCloudEventarcV1PipelineMessagePayloadFormat", + "description": "Optional. The payload format expected for the messages received by the Pipeline. If input_payload_format is set then any messages not matching this format will be treated as persistent errors. If input_payload_format is not set, then the message data will be treated as an opaque binary and no output format can be set on the Pipeline through the Pipeline.Destination.output_payload_format field. Any Mediations on the Pipeline that involve access to the data field will fail as persistent errors." + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. User labels attached to the Pipeline that can be used to group resources. An object containing a list of \"key\": value pairs. Example: { \"name\": \"wrench\", \"mass\": \"1.3kg\", \"count\": \"3\" }.", + "type": "object" + }, + "loggingConfig": { + "$ref": "LoggingConfig", + "description": "Optional. Config to control Platform Logging for Pipelines." + }, + "mediations": { + "description": "Optional. List of mediation operations to be performed on the message. Currently, only one Transformation operation is allowed in each Pipeline.", + "items": { + "$ref": "GoogleCloudEventarcV1PipelineMediation" + }, + "type": "array" + }, + "name": { + "description": "Identifier. The resource name of the Pipeline. Must be unique within the location of the project and must be in `projects/{project}/locations/{location}/pipelines/{pipeline}` format.", + "type": "string" + }, + "retryPolicy": { + "$ref": "GoogleCloudEventarcV1PipelineRetryPolicy", + "description": "Optional. The retry policy to use in the pipeline." + }, + "uid": { + "description": "Output only. Server-assigned unique identifier for the Pipeline. The value is a UUID4 string and guaranteed to remain unchanged until the resource is deleted.", + "readOnly": true, + "type": "string" + }, + "updateTime": { + "description": "Output only. The last-modified time. A timestamp in RFC3339 UTC \"Zulu\" format, with nanosecond resolution and up to nine fractional digits. Examples: \"2014-10-02T15:01:23Z\" and \"2014-10-02T15:01:23.045123456Z\".", + "format": "google-datetime", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, "Policy": { "description": "An Identity and Access Management (IAM) policy, which specifies access controls for Google Cloud resources. A `Policy` is a collection of `bindings`. A `binding` binds one or more `members`, or principals, to a single `role`. Principals can be user accounts, service accounts, Google groups, and domains (such as G Suite). A `role` is a named list of permissions; each `role` can be an IAM predefined role or a user-created custom role. For some types of Google Cloud resources, a `binding` can also specify a `condition`, which is a logical expression that allows access to a resource only if the expression evaluates to `true`. A condition can add constraints based on attributes of the request, the resource, or both. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). **JSON example:** ``` { \"bindings\": [ { \"role\": \"roles/resourcemanager.organizationAdmin\", \"members\": [ \"user:mike@example.com\", \"group:admins@example.com\", \"domain:google.com\", \"serviceAccount:my-project-id@appspot.gserviceaccount.com\" ] }, { \"role\": \"roles/resourcemanager.organizationViewer\", \"members\": [ \"user:eve@example.com\" ], \"condition\": { \"title\": \"expirable access\", \"description\": \"Does not grant access after Sep 2020\", \"expression\": \"request.time < timestamp('2020-10-01T00:00:00.000Z')\", } } ], \"etag\": \"BwWWja0YfJA=\", \"version\": 3 } ``` **YAML example:** ``` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3 ``` For a description of IAM and its features, see the [IAM documentation](https://cloud.google.com/iam/docs/).", "id": "Policy", diff --git a/discovery/googleapis/fcm__v1.json b/discovery/googleapis/fcm__v1.json index ad5fc941e..4cab9dcc4 100644 --- a/discovery/googleapis/fcm__v1.json +++ b/discovery/googleapis/fcm__v1.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240524", + "revision": "20241112", "rootUrl": "https://fcm.googleapis.com/", "servicePath": "", "title": "Firebase Cloud Messaging API", @@ -396,6 +396,10 @@ "description": "HTTP request headers defined in Apple Push Notification Service. Refer to [APNs request headers](https://developer.apple.com/documentation/usernotifications/setting_up_a_remote_notification_server/sending_notification_requests_to_apns) for supported headers such as `apns-expiration` and `apns-priority`. The backend sets a default value for `apns-expiration` of 30 days and a default value for `apns-priority` of 10 if not explicitly set.", "type": "object" }, + "liveActivityToken": { + "description": "Optional. [Apple Live Activity](https://developer.apple.com/design/human-interface-guidelines/live-activities) token to send updates to. This token can either be a push token or [push-to-start](https://developer.apple.com/documentation/activitykit/activity/pushtostarttoken) token from Apple. To start, update, or end a live activity remotely using FCM, construct an [`aps payload`](https://developer.apple.com/documentation/activitykit/starting-and-updating-live-activities-with-activitykit-push-notifications#Construct-the-payload-that-starts-a-Live-Activity) and put it in the [`apns.payload`](https://firebase.google.com/docs/reference/fcm/rest/v1/projects.messages#ApnsConfig) field.", + "type": "string" + }, "payload": { "additionalProperties": { "description": "Properties of the object.", diff --git a/discovery/googleapis/file__v1.json b/discovery/googleapis/file__v1.json index e452c6754..ad08ab4ff 100644 --- a/discovery/googleapis/file__v1.json +++ b/discovery/googleapis/file__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241015", + "revision": "20241204", "rootUrl": "https://file.googleapis.com/", "servicePath": "", "title": "Cloud Filestore API", @@ -730,6 +730,11 @@ "pattern": "^projects/[^/]+/locations/[^/]+/instances/[^/]+$", "required": true, "type": "string" + }, + "returnPartialSuccess": { + "description": "Optional. If true, allow partial responses for multi-regional Aggregated List requests.", + "location": "query", + "type": "boolean" } }, "path": "v1/{+parent}/snapshots", @@ -781,7 +786,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "file.projects.locations.operations.cancel", @@ -1040,7 +1045,7 @@ "additionalProperties": { "type": "string" }, - "description": "Optional. Input only. Immutable. Tag key-value pairs are bound to this resource. For example: \"123/environment\": \"production\", \"123/costCenter\": \"marketing\"", + "description": "Optional. Input only. Immutable. Tag key-value pairs bound to this resource. Each key must be a namespaced name and each value a short name. Example: \"123456789012/environment\" : \"production\", \"123456789013/costCenter\" : \"marketing\" See the documentation for more information: - Namespaced name: https://cloud.google.com/resource-manager/docs/tags/tags-creating-and-managing#retrieving_tag_key - Short name: https://cloud.google.com/resource-manager/docs/tags/tags-creating-and-managing#retrieving_tag_value", "type": "object" } }, @@ -1150,12 +1155,6 @@ "description": "Required. Maximum IOPS.", "format": "int64", "type": "string" - }, - "maxReadIops": { - "deprecated": true, - "description": "Optional. Deprecated: `max_iops` should be used instead of this parameter. Maximum raw read IOPS.", - "format": "int64", - "type": "string" } }, "type": "object" @@ -1443,12 +1442,6 @@ "description": "Required. Maximum IOPS per TiB.", "format": "int64", "type": "string" - }, - "maxReadIopsPerTb": { - "deprecated": true, - "description": "Optional. Deprecated: `max_iops_per_tb` should be used instead of this parameter. Maximum read IOPS per TiB.", - "format": "int64", - "type": "string" } }, "type": "object" @@ -1609,7 +1602,7 @@ "additionalProperties": { "type": "string" }, - "description": "Optional. Input only. Immutable. Tag key-value pairs are bound to this resource. For example: \"123/environment\": \"production\", \"123/costCenter\": \"marketing\"", + "description": "Optional. Input only. Immutable. Tag key-value pairs bound to this resource. Each key must be a namespaced name and each value a short name. Example: \"123456789012/environment\" : \"production\", \"123456789013/costCenter\" : \"marketing\" See the documentation for more information: - Namespaced name: https://cloud.google.com/resource-manager/docs/tags/tags-creating-and-managing#retrieving_tag_key - Short name: https://cloud.google.com/resource-manager/docs/tags/tags-creating-and-managing#retrieving_tag_value", "type": "object" }, "tier": { @@ -1657,7 +1650,7 @@ "type": "string" }, "unreachable": { - "description": "Locations that could not be reached.", + "description": "Unordered list. Locations that could not be reached.", "items": { "type": "string" }, @@ -1682,7 +1675,7 @@ "type": "string" }, "unreachable": { - "description": "Locations that could not be reached.", + "description": "Unordered list. Locations that could not be reached.", "items": { "type": "string" }, @@ -1741,6 +1734,13 @@ "$ref": "Snapshot" }, "type": "array" + }, + "unreachable": { + "description": "Unordered list. Locations that could not be reached.", + "items": { + "type": "string" + }, + "type": "array" } }, "type": "object" @@ -1993,7 +1993,7 @@ "type": "string" }, "cancelRequested": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have been cancelled successfully have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have been cancelled successfully have google.longrunning.Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, @@ -2278,7 +2278,7 @@ "additionalProperties": { "type": "string" }, - "description": "Optional. Input only. Immutable. Tag key-value pairs are bound to this resource. For example: \"123/environment\": \"production\", \"123/costCenter\": \"marketing\"", + "description": "Optional. Input only. Immutable. Tag key-value pairs bound to this resource. Each key must be a namespaced name and each value a short name. Example: \"123456789012/environment\" : \"production\", \"123456789013/costCenter\" : \"marketing\" See the documentation for more information: - Namespaced name: https://cloud.google.com/resource-manager/docs/tags/tags-creating-and-managing#retrieving_tag_key - Short name: https://cloud.google.com/resource-manager/docs/tags/tags-creating-and-managing#retrieving_tag_value", "type": "object" } }, diff --git a/discovery/googleapis/firebaseappdistribution__v1.json b/discovery/googleapis/firebaseappdistribution__v1.json index 5d3733eed..f690d89c2 100644 --- a/discovery/googleapis/firebaseappdistribution__v1.json +++ b/discovery/googleapis/firebaseappdistribution__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240605", + "revision": "20241115", "rootUrl": "https://firebaseappdistribution.googleapis.com/", "servicePath": "", "title": "Firebase App Distribution API", @@ -442,7 +442,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/apps/{appsId}/releases/{releasesId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "firebaseappdistribution.projects.apps.releases.operations.cancel", diff --git a/discovery/googleapis/firebasedynamiclinks__v1.json b/discovery/googleapis/firebasedynamiclinks__v1.json index 0aa387eca..efa1bd234 100644 --- a/discovery/googleapis/firebasedynamiclinks__v1.json +++ b/discovery/googleapis/firebasedynamiclinks__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240821", + "revision": "20241125", "rootUrl": "https://firebasedynamiclinks.googleapis.com/", "servicePath": "", "title": "Firebase Dynamic Links API", diff --git a/discovery/googleapis/firebasehosting__v1.json b/discovery/googleapis/firebasehosting__v1.json index eb31ad3e5..542920f37 100644 --- a/discovery/googleapis/firebasehosting__v1.json +++ b/discovery/googleapis/firebasehosting__v1.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240319", + "revision": "20241203", "rootUrl": "https://firebasehosting.googleapis.com/", "servicePath": "", "title": "Firebase Hosting API", @@ -115,7 +115,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "firebasehosting.operations.cancel", diff --git a/discovery/googleapis/firebaseml__v1.json b/discovery/googleapis/firebaseml__v1.json index b4647e323..a5c926bcf 100644 --- a/discovery/googleapis/firebaseml__v1.json +++ b/discovery/googleapis/firebaseml__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20230802", + "revision": "20241117", "rootUrl": "https://firebaseml.googleapis.com/", "servicePath": "", "title": "Firebase ML API", @@ -112,7 +112,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "firebaseml.operations.cancel", diff --git a/discovery/googleapis/firebaserules__v1.json b/discovery/googleapis/firebaserules__v1.json index f871c1c5c..664b4a226 100644 --- a/discovery/googleapis/firebaserules__v1.json +++ b/discovery/googleapis/firebaserules__v1.json @@ -31,7 +31,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20231120", + "revision": "20241125", "rootUrl": "https://firebaserules.googleapis.com/", "servicePath": "", "title": "Firebase Rules API", @@ -243,7 +243,7 @@ ], "parameters": { "executableVersion": { - "description": "The requested runtime executable version. Defaults to FIREBASE_RULES_EXECUTABLE_V1.", + "description": "Optional. The requested runtime executable version. Defaults to FIREBASE_RULES_EXECUTABLE_V1.", "enum": [ "RELEASE_EXECUTABLE_VERSION_UNSPECIFIED", "FIREBASE_RULES_EXECUTABLE_V1", @@ -285,7 +285,7 @@ ], "parameters": { "filter": { - "description": "`Release` filter. The list method supports filters with restrictions on the `Release.name`, and `Release.ruleset_name`. Example 1: A filter of 'name=prod*' might return `Release`s with names within 'projects/foo' prefixed with 'prod': Name -> Ruleset Name: * projects/foo/releases/prod -> projects/foo/rulesets/uuid1234 * projects/foo/releases/prod/v1 -> projects/foo/rulesets/uuid1234 * projects/foo/releases/prod/v2 -> projects/foo/rulesets/uuid8888 Example 2: A filter of `name=prod* ruleset_name=uuid1234` would return only `Release` instances for 'projects/foo' with names prefixed with 'prod' referring to the same `Ruleset` name of 'uuid1234': Name -> Ruleset Name: * projects/foo/releases/prod -> projects/foo/rulesets/1234 * projects/foo/releases/prod/v1 -> projects/foo/rulesets/1234 In the examples, the filter parameters refer to the search filters are relative to the project. Fully qualified prefixed may also be used.", + "description": "Optional. `Release` filter. The list method supports filters with restrictions on the `Release.name`, and `Release.ruleset_name`. Example 1: A filter of 'name=prod*' might return `Release`s with names within 'projects/foo' prefixed with 'prod': Name -> Ruleset Name: * projects/foo/releases/prod -> projects/foo/rulesets/uuid1234 * projects/foo/releases/prod/v1 -> projects/foo/rulesets/uuid1234 * projects/foo/releases/prod/v2 -> projects/foo/rulesets/uuid8888 Example 2: A filter of `name=prod* ruleset_name=uuid1234` would return only `Release` instances for 'projects/foo' with names prefixed with 'prod' referring to the same `Ruleset` name of 'uuid1234': Name -> Ruleset Name: * projects/foo/releases/prod -> projects/foo/rulesets/1234 * projects/foo/releases/prod/v1 -> projects/foo/rulesets/1234 In the examples, the filter parameters refer to the search filters are relative to the project. Fully qualified prefixed may also be used.", "location": "query", "type": "string" }, @@ -297,13 +297,13 @@ "type": "string" }, "pageSize": { - "description": "Page size to load. Maximum of 100. Defaults to 10. Note: `page_size` is just a hint and the service may choose to load fewer than `page_size` results due to the size of the output. To traverse all of the releases, the caller should iterate until the `page_token` on the response is empty.", + "description": "Optional. Page size to load. Maximum of 100. Defaults to 10. Note: `page_size` is just a hint and the service may choose to load fewer than `page_size` results due to the size of the output. To traverse all of the releases, the caller should iterate until the `page_token` on the response is empty.", "format": "int32", "location": "query", "type": "integer" }, "pageToken": { - "description": "Next page token for the next batch of `Release` instances.", + "description": "Optional. Next page token for the next batch of `Release` instances.", "location": "query", "type": "string" } @@ -443,7 +443,7 @@ ], "parameters": { "filter": { - "description": "`Ruleset` filter. The list method supports filters with restrictions on `Ruleset.name`. Filters on `Ruleset.create_time` should use the `date` function which parses strings that conform to the RFC 3339 date/time specifications. Example: `create_time > date(\"2017-01-01T00:00:00Z\") AND name=UUID-*`", + "description": "Optional. `Ruleset` filter. The list method supports filters with restrictions on `Ruleset.name`. Filters on `Ruleset.create_time` should use the `date` function which parses strings that conform to the RFC 3339 date/time specifications. Example: `create_time > date(\"2017-01-01T00:00:00Z\") AND name=UUID-*`", "location": "query", "type": "string" }, @@ -455,13 +455,13 @@ "type": "string" }, "pageSize": { - "description": "Page size to load. Maximum of 100. Defaults to 10. Note: `page_size` is just a hint and the service may choose to load less than `page_size` due to the size of the output. To traverse all of the releases, caller should iterate until the `page_token` is empty.", + "description": "Optional. Page size to load. Maximum of 100. Defaults to 10. Note: `page_size` is just a hint and the service may choose to load less than `page_size` due to the size of the output. To traverse all of the releases, caller should iterate until the `page_token` is empty.", "format": "int32", "location": "query", "type": "integer" }, "pageToken": { - "description": "Next page token for loading the next batch of `Ruleset` instances.", + "description": "Optional. Next page token for loading the next batch of `Ruleset` instances.", "location": "query", "type": "string" } @@ -966,11 +966,11 @@ "properties": { "source": { "$ref": "Source", - "description": "Optional `Source` to be checked for correctness. This field must not be set when the resource name refers to a `Ruleset`." + "description": "Optional. Optional `Source` to be checked for correctness. This field must not be set when the resource name refers to a `Ruleset`." }, "testSuite": { "$ref": "TestSuite", - "description": "The tests to execute against the `Source`. When `Source` is provided inline, the test cases will only be run if the `Source` is syntactically and semantically valid. Inline `TestSuite` to run." + "description": "Required. The tests to execute against the `Source`. When `Source` is provided inline, the test cases will only be run if the `Source` is syntactically and semantically valid. Inline `TestSuite` to run." } }, "type": "object" @@ -1019,7 +1019,7 @@ "description": "Required. `Release` to update." }, "updateMask": { - "description": "Specifies which fields to update.", + "description": "Optional. Specifies which fields to update.", "format": "google-fieldmask", "type": "string" } diff --git a/discovery/googleapis/firestore__v1.json b/discovery/googleapis/firestore__v1.json index 62bc8f294..47d80598a 100644 --- a/discovery/googleapis/firestore__v1.json +++ b/discovery/googleapis/firestore__v1.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240904", + "revision": "20241204", "rootUrl": "https://firestore.googleapis.com/", "servicePath": "", "title": "Cloud Firestore API", @@ -1396,7 +1396,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/databases/{databasesId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "firestore.projects.databases.operations.cancel", @@ -2393,7 +2393,7 @@ "type": "string" }, "distanceThreshold": { - "description": "Optional. Option to specify a threshold for which no less similar documents will be returned. The behavior of the specified `distance_measure` will affect the meaning of the distance threshold. Since DOT_PRODUCT distances increase when the vectors are more similar, the comparison is inverted. For EUCLIDEAN, COSINE: WHERE distance <= distance_threshold For DOT_PRODUCT: WHERE distance >= distance_threshold", + "description": "Optional. Option to specify a threshold for which no less similar documents will be returned. The behavior of the specified `distance_measure` will affect the meaning of the distance threshold. Since DOT_PRODUCT distances increase when the vectors are more similar, the comparison is inverted. * For EUCLIDEAN, COSINE: WHERE distance <= distance_threshold * For DOT_PRODUCT: WHERE distance >= distance_threshold", "format": "double", "type": "number" }, @@ -4246,7 +4246,7 @@ "type": "object" }, "StructuredQuery": { - "description": "A Firestore query. The query stages are executed in the following order: 1. from 2. where 3. select 4. order_by + start_at + end_at 5. offset 6. limit", + "description": "A Firestore query. The query stages are executed in the following order: 1. from 2. where 3. select 4. order_by + start_at + end_at 5. offset 6. limit 7. find_nearest", "id": "StructuredQuery", "properties": { "endAt": { diff --git a/discovery/googleapis/forms__v1.json b/discovery/googleapis/forms__v1.json index 63df2a26e..2e5bdcc35 100644 --- a/discovery/googleapis/forms__v1.json +++ b/discovery/googleapis/forms__v1.json @@ -40,7 +40,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20220908", + "revision": "20241112", "rootUrl": "https://forms.googleapis.com/", "servicePath": "", "title": "Google Forms API", @@ -1192,6 +1192,10 @@ "description": "Read only. The question ID. On creation, it can be provided but the ID must not be already used in the form. If not provided, a new ID is assigned.", "type": "string" }, + "ratingQuestion": { + "$ref": "RatingQuestion", + "description": "A respondent can choose a rating from a pre-defined set of icons." + }, "required": { "description": "Whether the question must be answered in order for a respondent to submit their response.", "type": "boolean" @@ -1263,6 +1267,34 @@ }, "type": "object" }, + "RatingQuestion": { + "description": "A rating question. The user has a range of icons to choose from.", + "id": "RatingQuestion", + "properties": { + "iconType": { + "description": "Required. The icon type to use for the rating.", + "enum": [ + "RATING_ICON_TYPE_UNSPECIFIED", + "STAR", + "HEART", + "THUMB_UP" + ], + "enumDescriptions": [ + "Default value. Unused.", + "A star icon.", + "A heart icon.", + "A thumbs down icon." + ], + "type": "string" + }, + "ratingScaleLevel": { + "description": "Required. The rating scale level of the rating question.", + "format": "int32", + "type": "integer" + } + }, + "type": "object" + }, "RenewWatchRequest": { "description": "Renew an existing Watch for seven days.", "id": "RenewWatchRequest", diff --git a/discovery/googleapis/gamesManagement__v1management.json b/discovery/googleapis/gamesManagement__v1management.json index 050c3fa21..26e7d5256 100644 --- a/discovery/googleapis/gamesManagement__v1management.json +++ b/discovery/googleapis/gamesManagement__v1management.json @@ -12,7 +12,7 @@ "baseUrl": "https://gamesmanagement.googleapis.com/", "batchPath": "batch", "canonicalName": "Games Management", - "description": "The Google Play Game Management API allows developers to manage resources from the Google Play Game service.", + "description": "The Google Play Games Management API allows developers to manage resources from the Google Play Game service.", "discoveryVersion": "v1", "documentationLink": "https://developers.google.com/games/", "icons": { @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20200811", + "revision": "20241209", "rootUrl": "https://gamesmanagement.googleapis.com/", "servicePath": "", "title": "Google Play Game Management", diff --git a/discovery/googleapis/games__v1.json b/discovery/googleapis/games__v1.json index a9c1efbf2..c21c9d0db 100644 --- a/discovery/googleapis/games__v1.json +++ b/discovery/googleapis/games__v1.json @@ -18,7 +18,7 @@ "baseUrl": "https://games.googleapis.com/", "batchPath": "batch", "canonicalName": "Games", - "description": "The Google Play games service allows developers to enhance games with social leaderboards, achievements, game state, sign-in with Google, and more.", + "description": "The Google Play Games Service allows developers to enhance games with social leaderboards, achievements, game state, sign-in with Google, and more.", "discoveryVersion": "v1", "documentationLink": "https://developers.google.com/games/", "icons": { @@ -31,7 +31,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240701", + "revision": "20241209", "rootUrl": "https://games.googleapis.com/", "servicePath": "", "title": "Google Play Game Services", diff --git a/discovery/googleapis/gkebackup__v1.json b/discovery/googleapis/gkebackup__v1.json index cfeba3f80..91edab767 100644 --- a/discovery/googleapis/gkebackup__v1.json +++ b/discovery/googleapis/gkebackup__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241002", + "revision": "20241203", "rootUrl": "https://gkebackup.googleapis.com/", "servicePath": "", "title": "Backup for GKE API", @@ -898,7 +898,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "gkebackup.projects.locations.operations.cancel", @@ -2143,7 +2143,7 @@ "type": "object" }, "ClusterResourceRestoreScope": { - "description": "Defines the scope of cluster-scoped resources to restore. Some group kinds are not reasonable choices for a restore, and will cause an error if selected here. Any scope selection that would restore \"all valid\" resources automatically excludes these group kinds. - gkebackup.gke.io/BackupJob - gkebackup.gke.io/RestoreJob - metrics.k8s.io/NodeMetrics - migration.k8s.io/StorageState - migration.k8s.io/StorageVersionMigration - Node - snapshot.storage.k8s.io/VolumeSnapshotContent - storage.k8s.io/CSINode Some group kinds are driven by restore configuration elsewhere, and will cause an error if selected here. - Namespace - PersistentVolume", + "description": "Defines the scope of cluster-scoped resources to restore. Some group kinds are not reasonable choices for a restore, and will cause an error if selected here. Any scope selection that would restore \"all valid\" resources automatically excludes these group kinds. - Node - ComponentStatus - gkebackup.gke.io/BackupJob - gkebackup.gke.io/RestoreJob - metrics.k8s.io/NodeMetrics - migration.k8s.io/StorageState - migration.k8s.io/StorageVersionMigration - snapshot.storage.k8s.io/VolumeSnapshotContent - storage.k8s.io/CSINode - storage.k8s.io/VolumeAttachment Some group kinds are driven by restore configuration elsewhere, and will cause an error if selected here. - Namespace - PersistentVolume", "id": "ClusterResourceRestoreScope", "properties": { "allGroupKinds": { @@ -2688,7 +2688,7 @@ "type": "string" }, "requestedCancellation": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, diff --git a/discovery/googleapis/gkehub__v1.json b/discovery/googleapis/gkehub__v1.json index a09934b83..8b2694167 100644 --- a/discovery/googleapis/gkehub__v1.json +++ b/discovery/googleapis/gkehub__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241010", + "revision": "20241206", "rootUrl": "https://gkehub.googleapis.com/", "servicePath": "", "title": "GKE Hub API", @@ -1353,7 +1353,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "gkehub.projects.locations.operations.cancel", @@ -2210,6 +2210,16 @@ "format": "byte", "type": "string" }, + "scopeTenancyIdentityProvider": { + "description": "Optional. Output only. The identity provider for the scope-tenancy workload identity pool.", + "readOnly": true, + "type": "string" + }, + "scopeTenancyWorkloadIdentityPool": { + "description": "Optional. Output only. The name of the scope-tenancy workload identity pool. This pool is set in the fleet-level feature.", + "readOnly": true, + "type": "string" + }, "workloadIdentityPool": { "description": "Output only. The name of the workload identity pool in which `issuer` will be recognized. There is a single Workload Identity Pool per Hub that is shared between all Memberships that belong to that Hub. For a Hub hosted in {PROJECT_ID}, the workload pool format is `{PROJECT_ID}.hub.id.goog`, although this is subject to change in newer versions of this API.", "readOnly": true, @@ -2651,7 +2661,8 @@ "description": "Git repo configuration for the cluster." }, "metricsGcpServiceAccountEmail": { - "description": "The Email of the Google Cloud Service Account (GSA) used for exporting Config Sync metrics to Cloud Monitoring and Cloud Monarch when Workload Identity is enabled. The GSA should have the Monitoring Metric Writer (roles/monitoring.metricWriter) IAM role. The Kubernetes ServiceAccount `default` in the namespace `config-management-monitoring` should be bound to the GSA.", + "deprecated": true, + "description": "The Email of the Google Cloud Service Account (GSA) used for exporting Config Sync metrics to Cloud Monitoring and Cloud Monarch when Workload Identity is enabled. The GSA should have the Monitoring Metric Writer (roles/monitoring.metricWriter) IAM role. The Kubernetes ServiceAccount `default` in the namespace `config-management-monitoring` should be bound to the GSA. Deprecated: If Workload Identity Federation for GKE is enabled, Google Cloud Service Account is no longer needed for exporting Config Sync metrics: https://cloud.google.com/kubernetes-engine/enterprise/config-sync/docs/how-to/monitor-config-sync-cloud-monitoring#custom-monitoring.", "type": "string" }, "oci": { @@ -2665,6 +2676,10 @@ "sourceFormat": { "description": "Specifies whether the Config Sync Repo is in \"hierarchical\" or \"unstructured\" mode.", "type": "string" + }, + "stopSyncing": { + "description": "Set to true to stop syncing configs for a single cluster. Default to false.", + "type": "boolean" } }, "type": "object" @@ -2869,6 +2884,12 @@ ], "type": "string" }, + "crCount": { + "description": "Output only. The number of RootSync and RepoSync CRs in the cluster.", + "format": "int32", + "readOnly": true, + "type": "integer" + }, "deploymentState": { "$ref": "ConfigManagementConfigSyncDeploymentState", "description": "Information about the deployment of ConfigSync, including the version of the various Pods deployed" @@ -4861,6 +4882,21 @@ "$ref": "Authority", "description": "Optional. How to identify workloads from this Membership. See the documentation on Workload Identity for more details: https://cloud.google.com/kubernetes-engine/docs/how-to/workload-identity" }, + "clusterTier": { + "description": "Output only. The tier of the cluster.", + "enum": [ + "CLUSTER_TIER_UNSPECIFIED", + "STANDARD", + "ENTERPRISE" + ], + "enumDescriptions": [ + "The ClusterTier is not set.", + "The ClusterTier is standard.", + "The ClusterTier is enterprise." + ], + "readOnly": true, + "type": "string" + }, "createTime": { "description": "Output only. When the Membership was created.", "format": "google-datetime", @@ -5362,7 +5398,7 @@ "type": "string" }, "cancelRequested": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, @@ -6311,6 +6347,20 @@ "description": "**Service Mesh**: Spec for a single Membership for the servicemesh feature", "id": "ServiceMeshMembershipSpec", "properties": { + "configApi": { + "description": "Optional. Specifies the API that will be used for configuring the mesh workloads.", + "enum": [ + "CONFIG_API_UNSPECIFIED", + "CONFIG_API_ISTIO", + "CONFIG_API_GATEWAY" + ], + "enumDescriptions": [ + "Unspecified", + "Use the Istio API for configuration.", + "Use the K8s Gateway API for configuration." + ], + "type": "string" + }, "controlPlane": { "deprecated": true, "description": "Deprecated: use `management` instead Enables automatic control plane management.", diff --git a/discovery/googleapis/gkehub__v2.json b/discovery/googleapis/gkehub__v2.json index 23012b9f5..3ed0f4ef9 100644 --- a/discovery/googleapis/gkehub__v2.json +++ b/discovery/googleapis/gkehub__v2.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241010", + "revision": "20241206", "rootUrl": "https://gkehub.googleapis.com/", "servicePath": "", "title": "GKE Hub API", @@ -375,7 +375,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v2/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "gkehub.projects.locations.operations.cancel", @@ -716,7 +716,8 @@ "description": "Git repo configuration for the cluster." }, "metricsGcpServiceAccountEmail": { - "description": "The Email of the Google Cloud Service Account (GSA) used for exporting Config Sync metrics to Cloud Monitoring and Cloud Monarch when Workload Identity is enabled. The GSA should have the Monitoring Metric Writer (roles/monitoring.metricWriter) IAM role. The Kubernetes ServiceAccount `default` in the namespace `config-management-monitoring` should be bound to the GSA.", + "deprecated": true, + "description": "The Email of the Google Cloud Service Account (GSA) used for exporting Config Sync metrics to Cloud Monitoring and Cloud Monarch when Workload Identity is enabled. The GSA should have the Monitoring Metric Writer (roles/monitoring.metricWriter) IAM role. The Kubernetes ServiceAccount `default` in the namespace `config-management-monitoring` should be bound to the GSA. Deprecated: If Workload Identity Federation for GKE is enabled, Google Cloud Service Account is no longer needed for exporting Config Sync metrics: https://cloud.google.com/kubernetes-engine/enterprise/config-sync/docs/how-to/monitor-config-sync-cloud-monitoring#custom-monitoring.", "type": "string" }, "oci": { @@ -730,6 +731,10 @@ "sourceFormat": { "description": "Specifies whether the Config Sync Repo is in \"hierarchical\" or \"unstructured\" mode.", "type": "string" + }, + "stopSyncing": { + "description": "Set to true to stop syncing configs for a single cluster. Default to false.", + "type": "boolean" } }, "type": "object" @@ -934,6 +939,12 @@ ], "type": "string" }, + "crCount": { + "description": "Output only. The number of RootSync and RepoSync CRs in the cluster.", + "format": "int32", + "readOnly": true, + "type": "integer" + }, "deploymentState": { "$ref": "ConfigManagementConfigSyncDeploymentState", "description": "Information about the deployment of ConfigSync, including the version. of the various Pods deployed" @@ -1661,28 +1672,6 @@ "properties": {}, "type": "object" }, - "FeatureConfigRef": { - "description": "Information of the FeatureConfig applied on the MembershipFeature.", - "id": "FeatureConfigRef", - "properties": { - "config": { - "description": "Input only. Resource name of FeatureConfig, in the format: `projects/{project}/locations/global/featureConfigs/{feature_config}`.", - "type": "string" - }, - "configUpdateTime": { - "description": "Output only. When the FeatureConfig was last applied and copied to FeatureSpec.", - "format": "google-datetime", - "readOnly": true, - "type": "string" - }, - "uuid": { - "description": "Output only. An id that uniquely identify a FeatureConfig object.", - "readOnly": true, - "type": "string" - } - }, - "type": "object" - }, "FeatureSpec": { "description": "FeatureSpec contains user input per-feature spec information.", "id": "FeatureSpec", @@ -2316,10 +2305,6 @@ "readOnly": true, "type": "string" }, - "featureConfigRef": { - "$ref": "FeatureConfigRef", - "description": "Reference information for a FeatureConfig applied on the MembershipFeature." - }, "labels": { "additionalProperties": { "type": "string" @@ -2339,7 +2324,7 @@ }, "spec": { "$ref": "FeatureSpec", - "description": "Spec of this membershipFeature." + "description": "Optional. Spec of this membershipFeature." }, "state": { "$ref": "FeatureState", diff --git a/discovery/googleapis/gkeonprem__v1.json b/discovery/googleapis/gkeonprem__v1.json index 138a25550..e57539028 100644 --- a/discovery/googleapis/gkeonprem__v1.json +++ b/discovery/googleapis/gkeonprem__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240814", + "revision": "20241204", "rootUrl": "https://gkeonprem.googleapis.com/", "servicePath": "", "title": "GDC Virtual API", @@ -1601,7 +1601,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "gkeonprem.projects.locations.operations.cancel", @@ -1723,6 +1723,49 @@ }, "vmwareAdminClusters": { "methods": { + "create": { + "description": "Creates a new VMware admin cluster in a given project and location. The API needs to be combined with creating a bootstrap cluster to work.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/vmwareAdminClusters", + "httpMethod": "POST", + "id": "gkeonprem.projects.locations.vmwareAdminClusters.create", + "parameterOrder": [ + "parent" + ], + "parameters": { + "allowPreflightFailure": { + "description": "Optional. If set to true, CLM will force CCFE to persist the cluster resource in RMS when the creation fails during standalone preflight checks. In that case the subsequent create call will fail with \"cluster already exists\" error and hence a update cluster is required to fix the cluster.", + "location": "query", + "type": "boolean" + }, + "parent": { + "description": "Required. The parent of the project and location where the cluster is created in. Format: \"projects/{project}/locations/{location}\"", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + }, + "validateOnly": { + "description": "Validate the request without actually doing any updates.", + "location": "query", + "type": "boolean" + }, + "vmwareAdminClusterId": { + "description": "Required. User provided identifier that is used as part of the resource name; must conform to RFC-1034 and additionally restrict to lower-cased letters. This comes out roughly to: /^a-z+[a-z0-9]$/", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+parent}/vmwareAdminClusters", + "request": { + "$ref": "VmwareAdminCluster" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, "enroll": { "description": "Enrolls an existing VMware admin cluster to the Anthos On-Prem API within a given project and location. Through enrollment, an existing admin cluster will become Anthos On-Prem API managed. The corresponding GCP resources will be created and all future modifications to the cluster will be expected to be performed through the API.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/vmwareAdminClusters:enroll", @@ -5796,7 +5839,12 @@ "VmwareAdminMetalLbConfig": { "description": "VmwareAdminMetalLbConfig represents configuration parameters for a MetalLB load balancer. For admin clusters, currently no configurations is needed.", "id": "VmwareAdminMetalLbConfig", - "properties": {}, + "properties": { + "enabled": { + "description": "Whether MetalLB is enabled.", + "type": "boolean" + } + }, "type": "object" }, "VmwareAdminNetworkConfig": { diff --git a/discovery/googleapis/healthcare__v1.json b/discovery/googleapis/healthcare__v1.json index d0f8c3540..558cf64e8 100644 --- a/discovery/googleapis/healthcare__v1.json +++ b/discovery/googleapis/healthcare__v1.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241009", + "revision": "20241205", "rootUrl": "https://healthcare.googleapis.com/", "servicePath": "", "title": "Cloud Healthcare API", @@ -2946,6 +2946,64 @@ }, "fhirStores": { "methods": { + "applyAdminConsents": { + "description": "Applies the admin Consent resources for the FHIR store and reindexes the underlying resources in the FHIR store according to the aggregate consents. This method also updates the `consent_config.enforced_admin_consents` field of the FhirStore unless `validate_only=true` in ApplyAdminConsentsRequest. Any admin Consent resource change after this operation execution (including deletion) requires you to call ApplyAdminConsents again for the change to take effect. This method returns an Operation that can be used to track the progress of the resources that were reindexed, by calling GetOperation. Upon completion, the ApplyAdminConsentsResponse additionally contains the number of resources that were reindexed. If at least one Consent resource contains an error or fails be be enforced for any reason, the method returns an error instead of an Operation. No resources will be reindexed and the `consent_config.enforced_admin_consents` field will be unchanged. To enforce a consent check for data access, `consent_config.access_enforced` must be set to true for the FhirStore.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/fhirStores/{fhirStoresId}:applyAdminConsents", + "httpMethod": "POST", + "id": "healthcare.projects.locations.datasets.fhirStores.applyAdminConsents", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the FHIR store to enforce, in the format `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/fhirStores/{fhir_store_id}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/datasets/[^/]+/fhirStores/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:applyAdminConsents", + "request": { + "$ref": "ApplyAdminConsentsRequest" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-healthcare", + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "applyConsents": { + "description": "Apply the Consent resources for the FHIR store and reindex the underlying resources in the FHIR store according to the aggregate consent. The aggregate consent of the patient in scope in this request replaces any previous call of this method. Any Consent resource change after this operation execution (including deletion) requires you to call ApplyConsents again to have effect. This method returns an Operation that can be used to track the progress of the consent resources that were processed by calling GetOperation. Upon completion, the ApplyConsentsResponse additionally contains the number of resources that was reindexed. Errors are logged to Cloud Logging (see [Viewing error logs in Cloud Logging](https://cloud.google.com/healthcare/docs/how-tos/logging)). To enforce consent check for data access, `consent_config.access_enforced` must be set to true for the FhirStore.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/fhirStores/{fhirStoresId}:applyConsents", + "httpMethod": "POST", + "id": "healthcare.projects.locations.datasets.fhirStores.applyConsents", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the FHIR store to enforce, in the format `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/fhirStores/{fhir_store_id}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/datasets/[^/]+/fhirStores/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:applyConsents", + "request": { + "$ref": "ApplyConsentsRequest" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-healthcare", + "https://www.googleapis.com/auth/cloud-platform" + ] + }, "create": { "description": "Creates a new FHIR store within the parent dataset.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/fhirStores", @@ -3035,6 +3093,37 @@ "https://www.googleapis.com/auth/cloud-platform" ] }, + "explainDataAccess": { + "description": "Explains all the permitted/denied actor, purpose and environment for a given resource.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/fhirStores/{fhirStoresId}:explainDataAccess", + "httpMethod": "GET", + "id": "healthcare.projects.locations.datasets.fhirStores.explainDataAccess", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the FHIR store to enforce, in the format `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/fhirStores/{fhir_store_id}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/datasets/[^/]+/fhirStores/[^/]+$", + "required": true, + "type": "string" + }, + "resourceId": { + "description": "Required. The ID (`{resourceType}/{id}`) of the resource to explain data access on.", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}:explainDataAccess", + "response": { + "$ref": "ExplainDataAccessResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-healthcare", + "https://www.googleapis.com/auth/cloud-platform" + ] + }, "export": { "description": "Export resources from the FHIR store to the specified destination. This method returns an Operation that can be used to track the status of the export by calling GetOperation. Immediate fatal errors appear in the error field, errors are also logged to Cloud Logging (see [Viewing error logs in Cloud Logging](https://cloud.google.com/healthcare/docs/how-tos/logging)). Otherwise, when the operation finishes, a detailed response of type ExportResourcesResponse is returned in the response field. The metadata field type for this operation is OperationMetadata.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/fhirStores/{fhirStoresId}:export", @@ -3346,7 +3435,7 @@ "fhir": { "methods": { "Binary-create": { - "description": "Creates a FHIR Binary resource. This method can be used to create a Binary resource either by using one of the accepted FHIR JSON content types, or as a raw data stream. If a resource is created with this method using the FHIR content type this method's behavior is the same as [`fhir.create`](https://cloud.google.com/healthcare-api/docs/reference/rest/v1/projects.locations.datasets.fhirStores.fhir/create). If a resource type other than Binary is used in the request it's treated in the same way as non-FHIR data (e.g., images, zip archives, pdf files, documents). When a non-FHIR content type is used in the request, a Binary resource will be generated, and the uploaded data will be stored in the `content` field (`DSTU2` and `STU3`), or the `data` field (`R4`). The Binary resource's `contentType` will be filled in using the value of the `Content-Type` header, and the `securityContext` field (not present in `DSTU2`) will be populated from the `X-Security-Context` header if it exists. At this time `securityContext` has no special behavior in the Cloud Healthcare API. Note: the limit on data ingested through this method is 2 GB. For best performance, use a non-FHIR data type instead of wrapping the data in a Binary resource. Some of the Healthcare API features, such as [exporting to BigQuery](https://cloud.google.com/healthcare-api/docs/how-tos/fhir-export-bigquery) or [Pub/Sub notifications](https://cloud.google.com/healthcare-api/docs/fhir-pubsub#behavior_when_a_fhir_resource_is_too_large_or_traffic_is_high) with full resource content, do not support Binary resources that are larger than 10 MB. In these cases the resource's `data` field will be omitted. Instead, the \"http://hl7.org/fhir/StructureDefinition/data-absent-reason\" extension will be present to indicate that including the data is `unsupported`. On success, an empty `201 Created` response is returned. The newly created resource's ID and version are returned in the Location header. Using `Prefer: representation=resource` is not allowed for this method. The definition of the Binary REST API can be found at https://hl7.org/fhir/binary.html#rest.", + "description": "Creates a FHIR Binary resource. This method can be used to create a Binary resource either by using one of the accepted FHIR JSON content types, or as a raw data stream. If a resource is created with this method using the FHIR content type this method's behavior is the same as [`fhir.create`](https://cloud.google.com/healthcare-api/docs/reference/rest/v1/projects.locations.datasets.fhirStores.fhir/create). If a resource type other than Binary is used in the request it's treated in the same way as non-FHIR data (e.g., images, zip archives, pdf files, documents). When a non-FHIR content type is used in the request, a Binary resource will be generated, and the uploaded data will be stored in the `content` field (`DSTU2` and `STU3`), or the `data` field (`R4`). The Binary resource's `contentType` will be filled in using the value of the `Content-Type` header, and the `securityContext` field (not present in `DSTU2`) will be populated from the `X-Security-Context` header if it exists. At this time `securityContext` has no special behavior in the Cloud Healthcare API. Note: the limit on data ingested through this method is 1 GB. For best performance, use a non-FHIR data type instead of wrapping the data in a Binary resource. Some of the Healthcare API features, such as [exporting to BigQuery](https://cloud.google.com/healthcare-api/docs/how-tos/fhir-export-bigquery) or [Pub/Sub notifications](https://cloud.google.com/healthcare-api/docs/fhir-pubsub#behavior_when_a_fhir_resource_is_too_large_or_traffic_is_high) with full resource content, do not support Binary resources that are larger than 10 MB. In these cases the resource's `data` field will be omitted. Instead, the \"http://hl7.org/fhir/StructureDefinition/data-absent-reason\" extension will be present to indicate that including the data is `unsupported`. On success, an empty `201 Created` response is returned. The newly created resource's ID and version are returned in the Location header. Using `Prefer: representation=resource` is not allowed for this method. The definition of the Binary REST API can be found at https://hl7.org/fhir/binary.html#rest.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/fhirStores/{fhirStoresId}/fhir/Binary", "httpMethod": "POST", "id": "healthcare.projects.locations.datasets.fhirStores.fhir.Binary-create", @@ -3455,6 +3544,69 @@ "https://www.googleapis.com/auth/cloud-platform" ] }, + "Consent-enforcement-status": { + "description": "Returns the consent enforcement status of a single consent resource. On success, the response body contains a JSON-encoded representation of a `Parameters` (http://hl7.org/fhir/parameters.html) FHIR resource, containing the current enforcement status. Does not support DSTU2.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/fhirStores/{fhirStoresId}/fhir/Consent/{ConsentId}/$consent-enforcement-status", + "httpMethod": "GET", + "id": "healthcare.projects.locations.datasets.fhirStores.fhir.Consent-enforcement-status", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the consent resource to find enforcement status, in the format `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/fhirStores/{fhir_store_id}/fhir/Consent/{consent_id}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/datasets/[^/]+/fhirStores/[^/]+/fhir/Consent/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}/$consent-enforcement-status", + "response": { + "$ref": "HttpBody" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-healthcare", + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "Patient-consent-enforcement-status": { + "description": "Returns the consent enforcement status of all consent resources for a patient. On success, the response body contains a JSON-encoded representation of a bundle of `Parameters` (http://hl7.org/fhir/parameters.html) FHIR resources, containing the current enforcement status for each consent resource of the patient. Does not support DSTU2.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/fhirStores/{fhirStoresId}/fhir/Patient/{PatientId}/$consent-enforcement-status", + "httpMethod": "GET", + "id": "healthcare.projects.locations.datasets.fhirStores.fhir.Patient-consent-enforcement-status", + "parameterOrder": [ + "name" + ], + "parameters": { + "_count": { + "description": "Optional. The maximum number of results on a page. If not specified, 100 is used. May not be larger than 1000.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "_page_token": { + "description": "Optional. Used to retrieve the first, previous, next, or last page of consent enforcement statuses when using pagination. Value should be set to the value of `_page_token` set in next or previous page links' URLs. Next and previous page are returned in the response bundle's links field, where `link.relation` is \"previous\" or \"next\". Omit `_page_token` if no previous request has been made.", + "location": "query", + "type": "string" + }, + "name": { + "description": "Required. The name of the patient to find enforcement statuses, in the format `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/fhirStores/{fhir_store_id}/fhir/Patient/{patient_id}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/datasets/[^/]+/fhirStores/[^/]+/fhir/Patient/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}/$consent-enforcement-status", + "response": { + "$ref": "HttpBody" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-healthcare", + "https://www.googleapis.com/auth/cloud-platform" + ] + }, "Patient-everything": { "description": "Retrieves a Patient resource and resources related to that patient. Implements the FHIR extended operation Patient-everything ([DSTU2](http://hl7.org/implement/standards/fhir/DSTU2/patient-operations.html#everything), [STU3](http://hl7.org/implement/standards/fhir/STU3/patient-operations.html#everything), [R4](http://hl7.org/implement/standards/fhir/R4/patient-operations.html#everything)). On success, the response body contains a JSON-encoded representation of a `Bundle` resource of type `searchset`, containing the results of the operation. Errors generated by the FHIR store contain a JSON-encoded `OperationOutcome` resource describing the reason for the error. If the request cannot be mapped to a valid API method on a FHIR store, a generic GCP error might be returned instead. The resources in scope for the response are: * The patient resource itself. * All the resources directly referenced by the patient resource. * Resources directly referencing the patient resource that meet the inclusion criteria. The inclusion criteria are based on the membership rules in the patient compartment definition ([DSTU2](http://hl7.org/fhir/DSTU2/compartment-patient.html), [STU3](http://www.hl7.org/fhir/stu3/compartmentdefinition-patient.html), [R4](http://hl7.org/fhir/R4/compartmentdefinition-patient.html)), which details the eligible resource types and referencing search parameters. For samples that show how to call `Patient-everything`, see [Getting all patient compartment resources](https://cloud.google.com/healthcare/docs/how-tos/fhir-resources#getting_all_patient_compartment_resources).", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/fhirStores/{fhirStoresId}/fhir/Patient/{PatientId}/$everything", @@ -4645,7 +4797,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "healthcare.projects.locations.datasets.operations.cancel", @@ -4788,6 +4940,29 @@ } }, "schemas": { + "AccessDeterminationLogConfig": { + "description": "Configures consent audit log config for FHIR create, read, update, and delete (CRUD) operations. Cloud audit log for healthcare API must be [enabled](https://cloud.google.com/logging/docs/audit/configure-data-access#config-console-enable). The consent-related logs are included as part of `protoPayload.metadata`.", + "id": "AccessDeterminationLogConfig", + "properties": { + "logLevel": { + "description": "Optional. Controls the amount of detail to include as part of the audit logs.", + "enum": [ + "LOG_LEVEL_UNSPECIFIED", + "DISABLED", + "MINIMUM", + "VERBOSE" + ], + "enumDescriptions": [ + "No log level specified. This value is unused.", + "No additional consent-related logging is added to audit logs.", + "The following information is included: * One of the following [`consentMode`](https://cloud.google.com/healthcare-api/docs/fhir-consent#audit_logs) fields: (`off`|`emptyScope`|`enforced`|`btg`|`bypass`). * The accessor's request headers * The `log_level` of the AccessDeterminationLogConfig * The final consent evaluation (`PERMIT`, `DENY`, or `NO_CONSENT`) * A human-readable summary of the evaluation", + "Includes `MINIMUM` and, for each resource owner, returns: * The resource owner's name * Most specific part of the `X-Consent-Scope` resulting in consensual determination * Timestamp of the applied enforcement leading to the decision * Enforcement version at the time the applicable consents were applied * The Consent resource name * The timestamp of the Consent resource used for enforcement * Policy type (`PATIENT` or `ADMIN`) Note that this mode adds some overhead to CRUD operations." + ], + "type": "string" + } + }, + "type": "object" + }, "ActivateConsentRequest": { "description": "Activates the latest revision of the specified Consent by committing a new revision with `state` updated to `ACTIVE`. If the latest revision of the given Consent is in the `ACTIVE` state, no new revision is committed. A FAILED_PRECONDITION error occurs if the latest revision of the given consent is in the `REJECTED` or `REVOKED` state.", "id": "ActivateConsentRequest", @@ -4809,6 +4984,20 @@ }, "type": "object" }, + "AdminConsents": { + "description": "List of admin Consent resources to be applied.", + "id": "AdminConsents", + "properties": { + "names": { + "description": "Optional. The versioned names of the admin Consent resource(s), in the format `projects/{project_id}/locations/{location}/datasets/{dataset_id}/fhirStores/{fhir_store_id}/fhir/Consent/{resource_id}/_history/{version_id}`. For FHIR stores with `disable_resource_versioning=true`, the format is `projects/{project_id}/locations/{location}/datasets/{dataset_id}/fhirStores/{fhir_store_id}/fhir/Consent/{resource_id}`.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, "AnalyzeEntitiesRequest": { "description": "The request to analyze healthcare entities in a document.", "id": "AnalyzeEntitiesRequest", @@ -4881,6 +5070,108 @@ }, "type": "object" }, + "ApplyAdminConsentsErrorDetail": { + "description": "Contains the error details of the unsupported admin Consent resources for when the ApplyAdminConsents method fails to apply one or more Consent resources.", + "id": "ApplyAdminConsentsErrorDetail", + "properties": { + "consentErrors": { + "description": "The list of Consent resources that are unsupported or cannot be applied and the error associated with each of them.", + "items": { + "$ref": "ConsentErrors" + }, + "type": "array" + }, + "existingOperationId": { + "description": "The currently in progress non-validate-only ApplyAdminConsents operation ID if exist.", + "format": "uint64", + "type": "string" + } + }, + "type": "object" + }, + "ApplyAdminConsentsRequest": { + "description": "Request to apply the admin Consent resources for the specified FHIR store.", + "id": "ApplyAdminConsentsRequest", + "properties": { + "newConsentsList": { + "$ref": "AdminConsents", + "description": "A new list of admin Consent resources to be applied. Any existing enforced Consents, which are specified in `consent_config.enforced_admin_consents` of the FhirStore, that are not part of this list will be disabled. An empty list is equivalent to clearing or disabling all Consents enforced on the FHIR store. When a FHIR store has `disable_resource_versioning=true` and this list contains a Consent resource that exists in `consent_config.enforced_admin_consents`, the method enforces any updates to the existing resource since the last enforcement. If the existing resource hasn't been updated since the last enforcement, the resource is unaffected. After the method finishes, the resulting consent enforcement model is determined by the contents of the Consent resource(s) when the method was called: * When `disable_resource_versioning=true`, the result is identical to the current resource(s) in the FHIR store. * When `disable_resource_versioning=false`, the result is based on the historical version(s) of the Consent resource(s) at the point in time when the method was called. At most 200 Consents can be specified." + }, + "validateOnly": { + "description": "Optional. If true, the method only validates Consent resources to make sure they are supported. Otherwise, the method applies the aggregate consent information to update the enforcement model and reindex the FHIR resources. If all Consent resources can be applied successfully, the ApplyAdminConsentsResponse is returned containing the following fields: * `consent_apply_success` to indicate the number of Consent resources applied. * `affected_resources` to indicate the number of resources that might have had their consent access changed. If, however, one or more Consent resources are unsupported or cannot be applied, the method fails and ApplyAdminConsentsErrorDetail is is returned with details about the unsupported Consent resources.", + "type": "boolean" + } + }, + "type": "object" + }, + "ApplyAdminConsentsResponse": { + "description": "Response when all admin Consent resources in scope were processed and all affected resources were reindexed successfully. This structure will be included in the response when the operation finishes successfully.", + "id": "ApplyAdminConsentsResponse", + "properties": { + "affectedResources": { + "description": "The number of resources (including the Consent resources) that may have consent access change.", + "format": "int64", + "type": "string" + }, + "consentApplySuccess": { + "description": "If `validate_only=false` in ApplyAdminConsentsRequest, this counter contains the number of Consent resources that were successfully applied. Otherwise, it is the number of Consent resources that are supported.", + "format": "int64", + "type": "string" + }, + "failedResources": { + "description": "The number of resources (including the Consent resources) that ApplyAdminConsents failed to re-index.", + "format": "int64", + "type": "string" + } + }, + "type": "object" + }, + "ApplyConsentsRequest": { + "description": "Request to apply the Consent resources for the specified FHIR store.", + "id": "ApplyConsentsRequest", + "properties": { + "patientScope": { + "$ref": "PatientScope", + "description": "Optional. Scope down to a list of patients." + }, + "timeRange": { + "$ref": "TimeRange", + "description": "Optional. Scope down to patients whose most recent consent changes are in the time range. Can only be used with a versioning store (i.e. when disable_resource_versioning is set to false)." + }, + "validateOnly": { + "description": "Optional. If true, the method only validates Consent resources to make sure they are supported. When the operation completes, ApplyConsentsResponse is returned where `consent_apply_success` and `consent_apply_failure` indicate supported and unsupported (or invalid) Consent resources, respectively. Otherwise, the method propagates the aggregate consensual information to the patient's resources. Upon success, `affected_resources` in the ApplyConsentsResponse indicates the number of resources that may have consensual access changed.", + "type": "boolean" + } + }, + "type": "object" + }, + "ApplyConsentsResponse": { + "description": "Response when all Consent resources in scope were processed and all affected resources were reindexed successfully. This structure is included in the response when the operation finishes successfully.", + "id": "ApplyConsentsResponse", + "properties": { + "affectedResources": { + "description": "The number of resources (including the Consent resources) that may have consensual access change.", + "format": "int64", + "type": "string" + }, + "consentApplyFailure": { + "description": "If `validate_only = false` in ApplyConsentsRequest, this counter is the number of Consent resources that were failed to apply. Otherwise, it is the number of Consent resources that are not supported or invalid.", + "format": "int64", + "type": "string" + }, + "consentApplySuccess": { + "description": "If `validate_only = false` in ApplyConsentsRequest, this counter is the number of Consent resources that were successfully applied. Otherwise, it is the number of Consent resources that are supported.", + "format": "int64", + "type": "string" + }, + "failedResources": { + "description": "The number of resources (including the Consent resources) that ApplyConsents failed to re-index.", + "format": "int64", + "type": "string" + } + }, + "type": "object" + }, "ArchiveUserDataMappingRequest": { "description": "Archives the specified User data mapping.", "id": "ArchiveUserDataMappingRequest", @@ -5099,7 +5390,7 @@ "id": "CharacterMaskConfig", "properties": { "maskingCharacter": { - "description": "Character to mask the sensitive values. If not supplied, defaults to \"*\".", + "description": "Optional. Character to mask the sensitive values. If not supplied, defaults to \"*\".", "type": "string" } }, @@ -5233,6 +5524,25 @@ }, "type": "object" }, + "ConsentAccessorScope": { + "description": "The accessor scope that describes who can access, for what purpose, in which environment.", + "id": "ConsentAccessorScope", + "properties": { + "actor": { + "description": "An individual, group, or access role that identifies the accessor or a characteristic of the accessor. This can be a resource ID (such as `{resourceType}/{id}`) or an external URI. This value must be present.", + "type": "string" + }, + "environment": { + "description": "An abstract identifier that describes the environment or conditions under which the accessor is acting. Can be \"*\" if it applies to all environments.", + "type": "string" + }, + "purpose": { + "description": "The intent of data use. Can be \"*\" if it applies to all purposes.", + "type": "string" + } + }, + "type": "object" + }, "ConsentArtifact": { "description": "Documentation of a user's consent.", "id": "ConsentArtifact", @@ -5278,6 +5588,60 @@ }, "type": "object" }, + "ConsentConfig": { + "description": "Configures whether to enforce consent for the FHIR store and which consent enforcement version is being used.", + "id": "ConsentConfig", + "properties": { + "accessDeterminationLogConfig": { + "$ref": "AccessDeterminationLogConfig", + "description": "Optional. Specifies how the server logs the consent-aware requests. If not specified, the `AccessDeterminationLogConfig.LogLevel.MINIMUM` option is used." + }, + "accessEnforced": { + "description": "Optional. The default value is false. If set to true, when accessing FHIR resources, the consent headers will be verified against consents given by patients. See the ConsentEnforcementVersion for the supported consent headers.", + "type": "boolean" + }, + "consentHeaderHandling": { + "$ref": "ConsentHeaderHandling", + "description": "Optional. Different options to configure the behaviour of the server when handling the `X-Consent-Scope` header." + }, + "enforcedAdminConsents": { + "description": "Output only. The versioned names of the enforced admin Consent resource(s), in the format `projects/{project_id}/locations/{location}/datasets/{dataset_id}/fhirStores/{fhir_store_id}/fhir/Consent/{resource_id}/_history/{version_id}`. For FHIR stores with `disable_resource_versioning=true`, the format is `projects/{project_id}/locations/{location}/datasets/{dataset_id}/fhirStores/{fhir_store_id}/fhir/Consent/{resource_id}`. This field can only be updated using ApplyAdminConsents.", + "items": { + "type": "string" + }, + "readOnly": true, + "type": "array" + }, + "version": { + "description": "Required. Specifies which consent enforcement version is being used for this FHIR store. This field can only be set once by either CreateFhirStore or UpdateFhirStore. After that, you must call ApplyConsents to change the version.", + "enum": [ + "CONSENT_ENFORCEMENT_VERSION_UNSPECIFIED", + "V1" + ], + "enumDescriptions": [ + "Users must specify an enforcement version or an error is returned.", + "Enforcement version 1. See the [FHIR Consent resources in the Cloud Healthcare API](https://cloud.google.com/healthcare-api/docs/fhir-consent) guide for more details." + ], + "type": "string" + } + }, + "type": "object" + }, + "ConsentErrors": { + "description": "The Consent resource name and error.", + "id": "ConsentErrors", + "properties": { + "error": { + "$ref": "Status", + "description": "The error code and message." + }, + "name": { + "description": "The versioned name of the admin Consent resource, in the format `projects/{project_id}/locations/{location}/datasets/{dataset_id}/fhirStores/{fhir_store_id}/fhir/Consent/{resource_id}/_history/{version_id}`. For FHIR stores with `disable_resource_versioning=true`, the format is `projects/{project_id}/locations/{location}/datasets/{dataset_id}/fhirStores/{fhir_store_id}/fhir/Consent/{resource_id}`.", + "type": "string" + } + }, + "type": "object" + }, "ConsentEvaluation": { "description": "The detailed evaluation of a particular Consent.", "id": "ConsentEvaluation", @@ -5303,6 +5667,27 @@ }, "type": "object" }, + "ConsentHeaderHandling": { + "description": "How the server handles the consent header.", + "id": "ConsentHeaderHandling", + "properties": { + "profile": { + "description": "Optional. Specifies the default server behavior when the header is empty. If not specified, the `ScopeProfile.PERMIT_EMPTY_SCOPE` option is used.", + "enum": [ + "SCOPE_PROFILE_UNSPECIFIED", + "PERMIT_EMPTY_SCOPE", + "REQUIRED_ON_READ" + ], + "enumDescriptions": [ + "If not specified, the default value `PERMIT_EMPTY_SCOPE` is used.", + "When no consent scopes are provided (for example, if there's an empty or missing header), then consent check is disabled, similar to when `access_enforced` is `false`. You can use audit logs to differentiate these two cases by looking at the value of `protopayload.metadata.consentMode`. If consents scopes are present, they must be valid and within the allowed limits, otherwise the request will be rejected with a `4xx` code.", + "The consent header must be non-empty when performing read and search operations, otherwise the request is rejected with a `4xx` code. Additionally, invalid consent scopes or scopes exceeding the allowed limits are rejected." + ], + "type": "string" + } + }, + "type": "object" + }, "ConsentList": { "description": "List of resource names of Consent resources.", "id": "ConsentList", @@ -5412,7 +5797,7 @@ "properties": { "config": { "$ref": "DeidentifyConfig", - "description": "The configuration to use when de-identifying resources that are added to this store." + "description": "Optional. The configuration to use when de-identifying resources that are added to this store." }, "store": { "description": "Optional. The full resource name of a Cloud Healthcare FHIR store, for example, `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/fhirStores/{fhir_store_id}`.", @@ -5427,22 +5812,22 @@ "properties": { "dicom": { "$ref": "DicomConfig", - "description": "Configures de-id of application/DICOM content." + "description": "Optional. Configures de-id of application/DICOM content." }, "fhir": { "$ref": "FhirConfig", - "description": "Configures de-id of application/FHIR content." + "description": "Optional. Configures de-id of application/FHIR content." }, "image": { "$ref": "ImageConfig", - "description": "Configures de-identification of image pixels wherever they are found in the source_dataset." + "description": "Optional. Configures de-identification of image pixels wherever they are found in the source_dataset." }, "text": { "$ref": "TextConfig", - "description": "Configures de-identification of text wherever it is found in the source_dataset." + "description": "Optional. Configures de-identification of text wherever it is found in the source_dataset." }, "useRegionalDataProcessing": { - "description": "Ensures in-flight data remains in the region of origin during de-identification. The default value is false. Using this option results in a significant reduction of throughput, and is not compatible with `LOCATION` or `ORGANIZATION_NAME` infoTypes. `LOCATION` must be excluded within TextConfig, and must also be excluded within ImageConfig if image redaction is required.", + "description": "Optional. Ensures in-flight data remains in the region of origin during de-identification. The default value is false. Using this option results in a significant reduction of throughput, and is not compatible with `LOCATION` or `ORGANIZATION_NAME` infoTypes. `LOCATION` must be excluded within TextConfig, and must also be excluded within ImageConfig if image redaction is required.", "type": "boolean" } }, @@ -5554,7 +5939,7 @@ "description": "List of tags to remove. Keep all other tags." }, "skipIdRedaction": { - "description": "If true, skip replacing StudyInstanceUID, SeriesInstanceUID, SOPInstanceUID, and MediaStorageSOPInstanceUID and leave them untouched. The Cloud Healthcare API regenerates these UIDs by default based on the DICOM Standard's reasoning: \"Whilst these UIDs cannot be mapped directly to an individual out of context, given access to the original images, or to a database of the original images containing the UIDs, it would be possible to recover the individual's identity.\" http://dicom.nema.org/medical/dicom/current/output/chtml/part15/sect_E.3.9.html", + "description": "Optional. If true, skip replacing StudyInstanceUID, SeriesInstanceUID, SOPInstanceUID, and MediaStorageSOPInstanceUID and leave them untouched. The Cloud Healthcare API regenerates these UIDs by default based on the DICOM Standard's reasoning: \"Whilst these UIDs cannot be mapped directly to an individual out of context, given access to the original images, or to a database of the original images containing the UIDs, it would be possible to recover the individual's identity.\" http://dicom.nema.org/medical/dicom/current/output/chtml/part15/sect_E.3.9.html", "type": "boolean" } }, @@ -5807,6 +6192,128 @@ }, "type": "object" }, + "ExplainDataAccessConsentInfo": { + "description": "The enforcing consent's metadata.", + "id": "ExplainDataAccessConsentInfo", + "properties": { + "cascadeOrigins": { + "description": "The compartment base resources that matched a cascading policy. Each resource has the following format: `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/fhirStores/{fhir_store_id}/fhir/{resource_type}/{resource_id}`", + "items": { + "type": "string" + }, + "type": "array" + }, + "consentResource": { + "description": "The resource name of this consent resource, in the format: `projects/{project_id}/locations/{location}/datasets/{dataset_id}/fhirStores/{fhir_store_id}/fhir/Consent/{resource_id}`.", + "type": "string" + }, + "enforcementTime": { + "description": "Last enforcement timestamp of this consent resource.", + "format": "google-datetime", + "type": "string" + }, + "matchingAccessorScopes": { + "description": "A list of all the matching accessor scopes of this consent policy that enforced ExplainDataAccessConsentScope.accessor_scope.", + "items": { + "$ref": "ConsentAccessorScope" + }, + "type": "array" + }, + "patientConsentOwner": { + "description": "The patient owning the consent (only applicable for patient consents), in the format: `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/fhirStores/{fhir_store_id}/fhir/Patient/{patient_id}`", + "type": "string" + }, + "type": { + "description": "The policy type of consent resource (e.g. PATIENT, ADMIN).", + "enum": [ + "CONSENT_POLICY_TYPE_UNSPECIFIED", + "CONSENT_POLICY_TYPE_PATIENT", + "CONSENT_POLICY_TYPE_ADMIN" + ], + "enumDescriptions": [ + "Unspecified policy type.", + "Consent represent a patient consent.", + "Consent represent an admin consent." + ], + "type": "string" + }, + "variants": { + "description": "The consent's variant combinations. A single consent may have multiple variants.", + "items": { + "enum": [ + "CONSENT_VARIANT_UNSPECIFIED", + "CONSENT_VARIANT_STANDARD", + "CONSENT_VARIANT_CASCADE" + ], + "enumDescriptions": [ + "Consent variant unspecified.", + "Consent is a standard patient or admin consent.", + "Consent is a cascading consent." + ], + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "ExplainDataAccessConsentScope": { + "description": "A single consent scope that provides info on who has access to the requested resource scope for a particular purpose and environment, enforced by which consent.", + "id": "ExplainDataAccessConsentScope", + "properties": { + "accessorScope": { + "$ref": "ConsentAccessorScope", + "description": "The accessor scope that describes who can access, for what purpose, and in which environment." + }, + "decision": { + "description": "Whether the current consent scope is permitted or denied access on the requested resource.", + "enum": [ + "CONSENT_DECISION_TYPE_UNSPECIFIED", + "CONSENT_DECISION_TYPE_PERMIT", + "CONSENT_DECISION_TYPE_DENY" + ], + "enumDescriptions": [ + "Unspecified consent decision type.", + "Consent permitted access.", + "Consent denied access." + ], + "type": "string" + }, + "enforcingConsents": { + "description": "Metadata of the consent resources that enforce the consent scope's access.", + "items": { + "$ref": "ExplainDataAccessConsentInfo" + }, + "type": "array" + }, + "exceptions": { + "description": "Other consent scopes that created exceptions within this scope.", + "items": { + "$ref": "ExplainDataAccessConsentScope" + }, + "type": "array" + } + }, + "type": "object" + }, + "ExplainDataAccessResponse": { + "description": "List of consent scopes that are applicable to the explained access on a given resource.", + "id": "ExplainDataAccessResponse", + "properties": { + "consentScopes": { + "description": "List of applicable consent scopes. Sorted in order of actor such that scopes belonging to the same actor will be adjacent to each other in the list.", + "items": { + "$ref": "ExplainDataAccessConsentScope" + }, + "type": "array" + }, + "warning": { + "description": "Warnings associated with this response. It inform user with exceeded scope limit errors.", + "type": "string" + } + }, + "type": "object" + }, "ExportDicomDataRequest": { "description": "Exports data from the specified DICOM store. If a given resource, such as a DICOM object with the same SOPInstance UID, already exists in the output, it is overwritten with the version in the source dataset. Exported DICOM data persists when the DICOM store from which it was exported is deleted.", "id": "ExportDicomDataRequest", @@ -5936,11 +6443,11 @@ "id": "FhirConfig", "properties": { "defaultKeepExtensions": { - "description": "The behaviour for handling FHIR extensions that aren't otherwise specified for de-identification. If true, all extensions are preserved during de-identification by default. If false or unspecified, all extensions are removed during de-identification by default.", + "description": "Optional. The behaviour for handling FHIR extensions that aren't otherwise specified for de-identification. If true, all extensions are preserved during de-identification by default. If false or unspecified, all extensions are removed during de-identification by default.", "type": "boolean" }, "fieldMetadataList": { - "description": "Specifies FHIR paths to match and how to transform them. Any field that is not matched by a FieldMetadata is passed through to the output dataset unmodified. All extensions will be processed according to `default_keep_extensions`.", + "description": "Optional. Specifies FHIR paths to match and how to transform them. Any field that is not matched by a FieldMetadata is passed through to the output dataset unmodified. All extensions will be processed according to `default_keep_extensions`.", "items": { "$ref": "FieldMetadata" }, @@ -5997,6 +6504,10 @@ ], "type": "string" }, + "consentConfig": { + "$ref": "ConsentConfig", + "description": "Optional. Specifies whether this store has consent enforcement. Not available for DSTU2 FHIR version due to absence of Consent resources." + }, "defaultSearchHandlingStrict": { "description": "Optional. If true, overrides the default search behavior for this FHIR store to `handling=strict` which returns an error for unrecognized search parameters. If false, uses the FHIR specification default `handling=lenient` which ignores unrecognized search parameters. The handling can always be changed from the default on an individual API call by setting the HTTP header `Prefer: handling=strict` or `Prefer: handling=lenient`. Defaults to false.", "type": "boolean" @@ -6139,7 +6650,7 @@ "id": "FieldMetadata", "properties": { "action": { - "description": "Deidentify action for one field.", + "description": "Optional. Deidentify action for one field.", "enum": [ "ACTION_UNSPECIFIED", "TRANSFORM", @@ -6147,7 +6658,7 @@ "DO_NOT_TRANSFORM" ], "enumDescriptions": [ - "No action specified.", + "No action specified. Defaults to DO_NOT_TRANSFORM.", "Transform the entire field.", "Inspect and transform any found PHI.", "Do not transform." @@ -6155,7 +6666,7 @@ "type": "string" }, "paths": { - "description": "List of paths to FHIR fields to be redacted. Each path is a period-separated list where each component is either a field name or FHIR type name, for example: Patient, HumanName. For \"choice\" types (those defined in the FHIR spec with the form: field[x]) we use two separate components. For example, \"deceasedAge.unit\" is matched by \"Deceased.Age.unit\". Supported types are: AdministrativeGenderCode, Base64Binary, Boolean, Code, Date, DateTime, Decimal, HumanName, Id, Instant, Integer, LanguageCode, Markdown, Oid, PositiveInt, String, UnsignedInt, Uri, Uuid, Xhtml.", + "description": "Optional. List of paths to FHIR fields to be redacted. Each path is a period-separated list where each component is either a field name or FHIR type name, for example: Patient, HumanName. For \"choice\" types (those defined in the FHIR spec with the form: field[x]) we use two separate components. For example, \"deceasedAge.unit\" is matched by \"Deceased.Age.unit\". Supported types are: AdministrativeGenderCode, Base64Binary, Boolean, Code, Date, DateTime, Decimal, HumanName, Id, Instant, Integer, LanguageCode, Markdown, Oid, PositiveInt, String, UnsignedInt, Uri, Uuid, Xhtml.", "items": { "type": "string" }, @@ -6574,7 +7085,7 @@ "id": "ImageConfig", "properties": { "textRedactionMode": { - "description": "Determines how to redact text from image.", + "description": "Optional. Determines how to redact text from image.", "enum": [ "TEXT_REDACTION_MODE_UNSPECIFIED", "REDACT_ALL_TEXT", @@ -6682,7 +7193,7 @@ "description": "Config for date shift." }, "infoTypes": { - "description": "InfoTypes to apply this transformation to. If this is not specified, the transformation applies to any info_type.", + "description": "Optional. InfoTypes to apply this transformation to. If this is not specified, the transformation applies to any info_type.", "items": { "type": "string" }, @@ -7187,7 +7698,7 @@ "description": "Optional. Schemas used to parse messages in this store, if schematized parsing is desired." }, "segmentTerminator": { - "description": "Byte(s) to use as the segment terminator. If this is unset, '\\r' is used as segment terminator, matching the HL7 version 2 specification.", + "description": "Optional. Byte(s) to use as the segment terminator. If this is unset, '\\r' is used as segment terminator, matching the HL7 version 2 specification.", "format": "byte", "type": "string" }, @@ -7225,6 +7736,20 @@ }, "type": "object" }, + "PatientScope": { + "description": "Apply consents given by a list of patients.", + "id": "PatientScope", + "properties": { + "patientIds": { + "description": "Optional. The list of patient IDs whose Consent resources will be enforced. At most 10,000 patients can be specified. An empty list is equivalent to all patients (meaning the entire FHIR store).", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, "Policy": { "description": "An Identity and Access Management (IAM) policy, which specifies access controls for Google Cloud resources. A `Policy` is a collection of `bindings`. A `binding` binds one or more `members`, or principals, to a single `role`. Principals can be user accounts, service accounts, Google groups, and domains (such as G Suite). A `role` is a named list of permissions; each `role` can be an IAM predefined role or a user-created custom role. For some types of Google Cloud resources, a `binding` can also specify a `condition`, which is a logical expression that allows access to a resource only if the expression evaluates to `true`. A condition can add constraints based on attributes of the request, the resource, or both. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). **JSON example:** ``` { \"bindings\": [ { \"role\": \"roles/resourcemanager.organizationAdmin\", \"members\": [ \"user:mike@example.com\", \"group:admins@example.com\", \"domain:google.com\", \"serviceAccount:my-project-id@appspot.gserviceaccount.com\" ] }, { \"role\": \"roles/resourcemanager.organizationViewer\", \"members\": [ \"user:eve@example.com\" ], \"condition\": { \"title\": \"expirable access\", \"description\": \"Does not grant access after Sep 2020\", \"expression\": \"request.time < timestamp('2020-10-01T00:00:00.000Z')\", } } ], \"etag\": \"BwWWja0YfJA=\", \"version\": 3 } ``` **YAML example:** ``` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3 ``` For a description of IAM and its features, see the [IAM documentation](https://cloud.google.com/iam/docs/).", "id": "Policy", @@ -7951,7 +8476,7 @@ "id": "TagFilterList", "properties": { "tags": { - "description": "Tags to be filtered. Tags must be DICOM Data Elements, File Meta Elements, or Directory Structuring Elements, as defined at: http://dicom.nema.org/medical/dicom/current/output/html/part06.html#table_6-1,. They may be provided by \"Keyword\" or \"Tag\". For example \"PatientID\", \"00100010\".", + "description": "Optional. Tags to be filtered. Tags must be DICOM Data Elements, File Meta Elements, or Directory Structuring Elements, as defined at: http://dicom.nema.org/medical/dicom/current/output/html/part06.html#table_6-1,. They may be provided by \"Keyword\" or \"Tag\". For example \"PatientID\", \"00100010\".", "items": { "type": "string" }, @@ -7992,21 +8517,21 @@ "id": "TextConfig", "properties": { "additionalTransformations": { - "description": "Transformations to apply to the detected data, overridden by `exclude_info_types`.", + "description": "Optional. Transformations to apply to the detected data, overridden by `exclude_info_types`.", "items": { "$ref": "InfoTypeTransformation" }, "type": "array" }, "excludeInfoTypes": { - "description": "InfoTypes to skip transforming, overriding `additional_transformations`.", + "description": "Optional. InfoTypes to skip transforming, overriding `additional_transformations`.", "items": { "type": "string" }, "type": "array" }, "transformations": { - "description": "The transformations to apply to the detected data. Deprecated. Use `additional_transformations` instead.", + "description": "Optional. The transformations to apply to the detected data. Deprecated. Use `additional_transformations` instead.", "items": { "$ref": "InfoTypeTransformation" }, @@ -8061,6 +8586,21 @@ }, "type": "object" }, + "TimeRange": { + "description": "Apply consents given by patients whose most recent consent changes are in the time range. Note that after identifying these patients, the server applies all Consent resources given by those patients, not just the Consent resources within the timestamp in the range.", + "id": "TimeRange", + "properties": { + "end": { + "description": "Optional. The latest consent change time, in format YYYY-MM-DDThh:mm:ss.sss+zz:zz If not specified, the system uses the time when ApplyConsents was called.", + "type": "string" + }, + "start": { + "description": "Optional. The earliest consent change time, in format YYYY-MM-DDThh:mm:ss.sss+zz:zz If not specified, the system uses the FHIR store creation time.", + "type": "string" + } + }, + "type": "object" + }, "Type": { "description": "A type definition for some HL7v2 type (incl. Segments and Datatypes).", "id": "Type", diff --git a/discovery/googleapis/iam__v1.json b/discovery/googleapis/iam__v1.json index 328d327da..06daccdd3 100644 --- a/discovery/googleapis/iam__v1.json +++ b/discovery/googleapis/iam__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241017", + "revision": "20241114", "rootUrl": "https://iam.googleapis.com/", "servicePath": "", "title": "Identity and Access Management (IAM) API", @@ -163,14 +163,14 @@ ], "parameters": { "location": { - "description": "The location of the pool to create. Format: `locations/{location}`.", + "description": "Optional. The location of the pool to create. Format: `locations/{location}`.", "location": "path", "pattern": "^locations/[^/]+$", "required": true, "type": "string" }, "workforcePoolId": { - "description": "The ID to use for the pool, which becomes the final component of the resource name. The IDs must be a globally unique string of 6 to 63 lowercase letters, digits, or hyphens. It must start with a letter, and cannot have a trailing hyphen. The prefix `gcp-` is reserved for use by Google, and may not be specified.", + "description": "Optional. The ID to use for the pool, which becomes the final component of the resource name. The IDs must be a globally unique string of 6 to 63 lowercase letters, digits, or hyphens. It must start with a letter, and cannot have a trailing hyphen. The prefix `gcp-` is reserved for use by Google, and may not be specified.", "location": "query", "type": "string" } @@ -1329,7 +1329,7 @@ ], "parameters": { "name": { - "description": "Immutable. The resource name of the OauthClient. Format:`projects/{project}/locations/{location}/oauthClients/{oauth_client}`.", + "description": "Immutable. Identifier. The resource name of the OauthClient. Format:`projects/{project}/locations/{location}/oauthClients/{oauth_client}`.", "location": "path", "pattern": "^projects/[^/]+/locations/[^/]+/oauthClients/[^/]+$", "required": true, @@ -1503,7 +1503,7 @@ ], "parameters": { "name": { - "description": "Immutable. The resource name of the OauthClientCredential. Format: `projects/{project}/locations/{location}/oauthClients/{oauth_client}/credentials/{credential}`", + "description": "Immutable. Identifier. The resource name of the OauthClientCredential. Format: `projects/{project}/locations/{location}/oauthClients/{oauth_client}/credentials/{credential}`", "location": "path", "pattern": "^projects/[^/]+/locations/[^/]+/oauthClients/[^/]+/credentials/[^/]+$", "required": true, @@ -3601,14 +3601,14 @@ }, "clientSecret": { "$ref": "GoogleIamAdminV1WorkforcePoolProviderOidcClientSecret", - "description": "The optional client secret. Required to enable Authorization Code flow for web sign-in." + "description": "Optional. The optional client secret. Required to enable Authorization Code flow for web sign-in." }, "issuerUri": { "description": "Required. The OIDC issuer URI. Must be a valid URI using the `https` scheme.", "type": "string" }, "jwksJson": { - "description": "OIDC JWKs in JSON String format. For details on the definition of a JWK, see https://tools.ietf.org/html/rfc7517. If not set, the `jwks_uri` from the discovery document(fetched from the .well-known path of the `issuer_uri`) will be used. Currently, RSA and EC asymmetric keys are supported. The JWK must use following format and include only the following fields: { \"keys\": [ { \"kty\": \"RSA/EC\", \"alg\": \"\", \"use\": \"sig\", \"kid\": \"\", \"n\": \"\", \"e\": \"\", \"x\": \"\", \"y\": \"\", \"crv\": \"\" } ] }", + "description": "Optional. OIDC JWKs in JSON String format. For details on the definition of a JWK, see https://tools.ietf.org/html/rfc7517. If not set, the `jwks_uri` from the discovery document(fetched from the .well-known path of the `issuer_uri`) will be used. Currently, RSA and EC asymmetric keys are supported. The JWK must use following format and include only the following fields: { \"keys\": [ { \"kty\": \"RSA/EC\", \"alg\": \"\", \"use\": \"sig\", \"kid\": \"\", \"n\": \"\", \"e\": \"\", \"x\": \"\", \"y\": \"\", \"crv\": \"\" } ] }", "type": "string" }, "webSsoConfig": { @@ -3634,7 +3634,7 @@ "id": "GoogleIamAdminV1WorkforcePoolProviderOidcClientSecretValue", "properties": { "plainText": { - "description": "Input only. The plain text of the client secret value. For security reasons, this field is only used for input and will never be populated in any response.", + "description": "Optional. Input only. The plain text of the client secret value. For security reasons, this field is only used for input and will never be populated in any response.", "type": "string" }, "thumbprint": { @@ -3650,7 +3650,7 @@ "id": "GoogleIamAdminV1WorkforcePoolProviderOidcWebSsoConfig", "properties": { "additionalScopes": { - "description": "Additional scopes to request for in the OIDC authentication request on top of scopes requested by default. By default, the `openid`, `profile` and `email` scopes that are supported by the identity provider are requested. Each additional scope may be at most 256 characters. A maximum of 10 additional scopes may be configured.", + "description": "Optional. Additional scopes to request for in the OIDC authentication request on top of scopes requested by default. By default, the `openid`, `profile` and `email` scopes that are supported by the identity provider are requested. Each additional scope may be at most 256 characters. A maximum of 10 additional scopes may be configured.", "items": { "type": "string" }, @@ -4099,7 +4099,7 @@ "type": "string" }, "name": { - "description": "Immutable. The resource name of the OauthClient. Format:`projects/{project}/locations/{location}/oauthClients/{oauth_client}`.", + "description": "Immutable. Identifier. The resource name of the OauthClient. Format:`projects/{project}/locations/{location}/oauthClients/{oauth_client}`.", "type": "string" }, "state": { @@ -4138,7 +4138,7 @@ "type": "string" }, "name": { - "description": "Immutable. The resource name of the OauthClientCredential. Format: `projects/{project}/locations/{location}/oauthClients/{oauth_client}/credentials/{credential}`", + "description": "Immutable. Identifier. The resource name of the OauthClientCredential. Format: `projects/{project}/locations/{location}/oauthClients/{oauth_client}/credentials/{credential}`", "type": "string" } }, @@ -4149,7 +4149,7 @@ "id": "Oidc", "properties": { "allowedAudiences": { - "description": "Acceptable values for the `aud` field (audience) in the OIDC token. Token exchange requests are rejected if the token audience does not match one of the configured values. Each audience may be at most 256 characters. A maximum of 10 audiences may be configured. If this list is empty, the OIDC token audience must be equal to the full canonical resource name of the WorkloadIdentityPoolProvider, with or without the HTTPS prefix. For example: ``` //iam.googleapis.com/projects//locations//workloadIdentityPools//providers/ https://iam.googleapis.com/projects//locations//workloadIdentityPools//providers/ ```", + "description": "Optional. Acceptable values for the `aud` field (audience) in the OIDC token. Token exchange requests are rejected if the token audience does not match one of the configured values. Each audience may be at most 256 characters. A maximum of 10 audiences may be configured. If this list is empty, the OIDC token audience must be equal to the full canonical resource name of the WorkloadIdentityPoolProvider, with or without the HTTPS prefix. For example: ``` //iam.googleapis.com/projects//locations//workloadIdentityPools//providers/ https://iam.googleapis.com/projects//locations//workloadIdentityPools//providers/ ```", "items": { "type": "string" }, @@ -4211,7 +4211,7 @@ "type": "string" }, "cancelRequested": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have been cancelled successfully have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have been cancelled successfully have google.longrunning.Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, @@ -5000,15 +5000,15 @@ "description": "Optional. Configure access restrictions on the workforce pool users. This is an optional field. If specified web sign-in can be restricted to given set of services or programmatic sign-in can be disabled for pool users." }, "description": { - "description": "A user-specified description of the pool. Cannot exceed 256 characters.", + "description": "Optional. A user-specified description of the pool. Cannot exceed 256 characters.", "type": "string" }, "disabled": { - "description": "Disables the workforce pool. You cannot use a disabled pool to exchange tokens, or use existing tokens to access resources. If the pool is re-enabled, existing tokens grant access again.", + "description": "Optional. Disables the workforce pool. You cannot use a disabled pool to exchange tokens, or use existing tokens to access resources. If the pool is re-enabled, existing tokens grant access again.", "type": "boolean" }, "displayName": { - "description": "A user-specified display name of the pool in Google Cloud Console. Cannot exceed 32 characters.", + "description": "Optional. A user-specified display name of the pool in Google Cloud Console. Cannot exceed 32 characters.", "type": "string" }, "expireTime": { @@ -5027,7 +5027,7 @@ "type": "string" }, "sessionDuration": { - "description": "Duration that the Google Cloud access tokens, console sign-in sessions, and `gcloud` sign-in sessions from this pool are valid. Must be greater than 15 minutes (900s) and less than 12 hours (43200s). If `session_duration` is not configured, minted credentials have a default duration of one hour (3600s). For SAML providers, the lifetime of the token is the minimum of the `session_duration` and the `SessionNotOnOrAfter` claim in the SAML assertion.", + "description": "Optional. Duration that the Google Cloud access tokens, console sign-in sessions, and `gcloud` sign-in sessions from this pool are valid. Must be greater than 15 minutes (900s) and less than 12 hours (43200s). If `session_duration` is not configured, minted credentials have a default duration of one hour (3600s). For SAML providers, the lifetime of the token is the minimum of the `session_duration` and the `SessionNotOnOrAfter` claim in the SAML assertion.", "format": "google-duration", "type": "string" }, @@ -5054,7 +5054,7 @@ "id": "WorkforcePoolProvider", "properties": { "attributeCondition": { - "description": "A [Common Expression Language](https://opensource.google/projects/cel) expression, in plain text, to restrict what otherwise valid authentication credentials issued by the provider should not be accepted. The expression must output a boolean representing whether to allow the federation. The following keywords may be referenced in the expressions: * `assertion`: JSON representing the authentication credential issued by the provider. * `google`: The Google attributes mapped from the assertion in the `attribute_mappings`. `google.profile_photo`, `google.display_name` and `google.posix_username` are not supported. * `attribute`: The custom attributes mapped from the assertion in the `attribute_mappings`. The maximum length of the attribute condition expression is 4096 characters. If unspecified, all valid authentication credentials will be accepted. The following example shows how to only allow credentials with a mapped `google.groups` value of `admins`: ``` \"'admins' in google.groups\" ```", + "description": "Optional. A [Common Expression Language](https://opensource.google/projects/cel) expression, in plain text, to restrict what otherwise valid authentication credentials issued by the provider should not be accepted. The expression must output a boolean representing whether to allow the federation. The following keywords may be referenced in the expressions: * `assertion`: JSON representing the authentication credential issued by the provider. * `google`: The Google attributes mapped from the assertion in the `attribute_mappings`. `google.profile_photo`, `google.display_name` and `google.posix_username` are not supported. * `attribute`: The custom attributes mapped from the assertion in the `attribute_mappings`. The maximum length of the attribute condition expression is 4096 characters. If unspecified, all valid authentication credentials will be accepted. The following example shows how to only allow credentials with a mapped `google.groups` value of `admins`: ``` \"'admins' in google.groups\" ```", "type": "string" }, "attributeMapping": { @@ -5065,15 +5065,15 @@ "type": "object" }, "description": { - "description": "A user-specified description of the provider. Cannot exceed 256 characters.", + "description": "Optional. A user-specified description of the provider. Cannot exceed 256 characters.", "type": "string" }, "disabled": { - "description": "Disables the workforce pool provider. You cannot use a disabled provider to exchange tokens. However, existing tokens still grant access.", + "description": "Optional. Disables the workforce pool provider. You cannot use a disabled provider to exchange tokens. However, existing tokens still grant access.", "type": "boolean" }, "displayName": { - "description": "A user-specified display name for the provider. Cannot exceed 32 characters.", + "description": "Optional. A user-specified display name for the provider. Cannot exceed 32 characters.", "type": "string" }, "expireTime": { @@ -5171,15 +5171,15 @@ "id": "WorkloadIdentityPool", "properties": { "description": { - "description": "A description of the pool. Cannot exceed 256 characters.", + "description": "Optional. A description of the pool. Cannot exceed 256 characters.", "type": "string" }, "disabled": { - "description": "Whether the pool is disabled. You cannot use a disabled pool to exchange tokens, or use existing tokens to access resources. If the pool is re-enabled, existing tokens grant access again.", + "description": "Optional. Whether the pool is disabled. You cannot use a disabled pool to exchange tokens, or use existing tokens to access resources. If the pool is re-enabled, existing tokens grant access again.", "type": "boolean" }, "displayName": { - "description": "A display name for the pool. Cannot exceed 32 characters.", + "description": "Optional. A display name for the pool. Cannot exceed 32 characters.", "type": "string" }, "expireTime": { @@ -5222,14 +5222,14 @@ "id": "WorkloadIdentityPoolProvider", "properties": { "attributeCondition": { - "description": "[A Common Expression Language](https://opensource.google/projects/cel) expression, in plain text, to restrict what otherwise valid authentication credentials issued by the provider should not be accepted. The expression must output a boolean representing whether to allow the federation. The following keywords may be referenced in the expressions: * `assertion`: JSON representing the authentication credential issued by the provider. * `google`: The Google attributes mapped from the assertion in the `attribute_mappings`. * `attribute`: The custom attributes mapped from the assertion in the `attribute_mappings`. The maximum length of the attribute condition expression is 4096 characters. If unspecified, all valid authentication credential are accepted. The following example shows how to only allow credentials with a mapped `google.groups` value of `admins`: ``` \"'admins' in google.groups\" ```", + "description": "Optional. [A Common Expression Language](https://opensource.google/projects/cel) expression, in plain text, to restrict what otherwise valid authentication credentials issued by the provider should not be accepted. The expression must output a boolean representing whether to allow the federation. The following keywords may be referenced in the expressions: * `assertion`: JSON representing the authentication credential issued by the provider. * `google`: The Google attributes mapped from the assertion in the `attribute_mappings`. * `attribute`: The custom attributes mapped from the assertion in the `attribute_mappings`. The maximum length of the attribute condition expression is 4096 characters. If unspecified, all valid authentication credential are accepted. The following example shows how to only allow credentials with a mapped `google.groups` value of `admins`: ``` \"'admins' in google.groups\" ```", "type": "string" }, "attributeMapping": { "additionalProperties": { "type": "string" }, - "description": " Maps attributes from authentication credentials issued by an external identity provider to Google Cloud attributes, such as `subject` and `segment`. Each key must be a string specifying the Google Cloud IAM attribute to map to. The following keys are supported: * `google.subject`: The principal IAM is authenticating. You can reference this value in IAM bindings. This is also the subject that appears in Cloud Logging logs. Cannot exceed 127 bytes. * `google.groups`: Groups the external identity belongs to. You can grant groups access to resources using an IAM `principalSet` binding; access applies to all members of the group. You can also provide custom attributes by specifying `attribute.{custom_attribute}`, where `{custom_attribute}` is the name of the custom attribute to be mapped. You can define a maximum of 50 custom attributes. The maximum length of a mapped attribute key is 100 characters, and the key may only contain the characters [a-z0-9_]. You can reference these attributes in IAM policies to define fine-grained access for a workload to Google Cloud resources. For example: * `google.subject`: `principal://iam.googleapis.com/projects/{project}/locations/{location}/workloadIdentityPools/{pool}/subject/{value}` * `google.groups`: `principalSet://iam.googleapis.com/projects/{project}/locations/{location}/workloadIdentityPools/{pool}/group/{value}` * `attribute.{custom_attribute}`: `principalSet://iam.googleapis.com/projects/{project}/locations/{location}/workloadIdentityPools/{pool}/attribute.{custom_attribute}/{value}` Each value must be a [Common Expression Language] (https://opensource.google/projects/cel) function that maps an identity provider credential to the normalized attribute specified by the corresponding map key. You can use the `assertion` keyword in the expression to access a JSON representation of the authentication credential issued by the provider. The maximum length of an attribute mapping expression is 2048 characters. When evaluated, the total size of all mapped attributes must not exceed 8KB. For AWS providers, if no attribute mapping is defined, the following default mapping applies: ``` { \"google.subject\":\"assertion.arn\", \"attribute.aws_role\": \"assertion.arn.contains('assumed-role')\" \" ? assertion.arn.extract('{account_arn}assumed-role/')\" \" + 'assumed-role/'\" \" + assertion.arn.extract('assumed-role/{role_name}/')\" \" : assertion.arn\", } ``` If any custom attribute mappings are defined, they must include a mapping to the `google.subject` attribute. For OIDC providers, you must supply a custom mapping, which must include the `google.subject` attribute. For example, the following maps the `sub` claim of the incoming credential to the `subject` attribute on a Google token: ``` {\"google.subject\": \"assertion.sub\"} ```", + "description": "Optional. Maps attributes from authentication credentials issued by an external identity provider to Google Cloud attributes, such as `subject` and `segment`. Each key must be a string specifying the Google Cloud IAM attribute to map to. The following keys are supported: * `google.subject`: The principal IAM is authenticating. You can reference this value in IAM bindings. This is also the subject that appears in Cloud Logging logs. Cannot exceed 127 bytes. * `google.groups`: Groups the external identity belongs to. You can grant groups access to resources using an IAM `principalSet` binding; access applies to all members of the group. You can also provide custom attributes by specifying `attribute.{custom_attribute}`, where `{custom_attribute}` is the name of the custom attribute to be mapped. You can define a maximum of 50 custom attributes. The maximum length of a mapped attribute key is 100 characters, and the key may only contain the characters [a-z0-9_]. You can reference these attributes in IAM policies to define fine-grained access for a workload to Google Cloud resources. For example: * `google.subject`: `principal://iam.googleapis.com/projects/{project}/locations/{location}/workloadIdentityPools/{pool}/subject/{value}` * `google.groups`: `principalSet://iam.googleapis.com/projects/{project}/locations/{location}/workloadIdentityPools/{pool}/group/{value}` * `attribute.{custom_attribute}`: `principalSet://iam.googleapis.com/projects/{project}/locations/{location}/workloadIdentityPools/{pool}/attribute.{custom_attribute}/{value}` Each value must be a [Common Expression Language] (https://opensource.google/projects/cel) function that maps an identity provider credential to the normalized attribute specified by the corresponding map key. You can use the `assertion` keyword in the expression to access a JSON representation of the authentication credential issued by the provider. The maximum length of an attribute mapping expression is 2048 characters. When evaluated, the total size of all mapped attributes must not exceed 8KB. For AWS providers, if no attribute mapping is defined, the following default mapping applies: ``` { \"google.subject\":\"assertion.arn\", \"attribute.aws_role\": \"assertion.arn.contains('assumed-role')\" \" ? assertion.arn.extract('{account_arn}assumed-role/')\" \" + 'assumed-role/'\" \" + assertion.arn.extract('assumed-role/{role_name}/')\" \" : assertion.arn\", } ``` If any custom attribute mappings are defined, they must include a mapping to the `google.subject` attribute. For OIDC providers, you must supply a custom mapping, which must include the `google.subject` attribute. For example, the following maps the `sub` claim of the incoming credential to the `subject` attribute on a Google token: ``` {\"google.subject\": \"assertion.sub\"} ```", "type": "object" }, "aws": { @@ -5237,15 +5237,15 @@ "description": "An Amazon Web Services identity provider." }, "description": { - "description": "A description for the provider. Cannot exceed 256 characters.", + "description": "Optional. A description for the provider. Cannot exceed 256 characters.", "type": "string" }, "disabled": { - "description": "Whether the provider is disabled. You cannot use a disabled provider to exchange tokens. However, existing tokens still grant access.", + "description": "Optional. Whether the provider is disabled. You cannot use a disabled provider to exchange tokens. However, existing tokens still grant access.", "type": "boolean" }, "displayName": { - "description": "A display name for the provider. Cannot exceed 32 characters.", + "description": "Optional. A display name for the provider. Cannot exceed 32 characters.", "type": "string" }, "expireTime": { diff --git a/discovery/googleapis/iam__v2.json b/discovery/googleapis/iam__v2.json index 9ca266f93..f064ee4ff 100644 --- a/discovery/googleapis/iam__v2.json +++ b/discovery/googleapis/iam__v2.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240530", + "revision": "20241114", "rootUrl": "https://iam.googleapis.com/", "servicePath": "", "title": "Identity and Access Management (IAM) API", @@ -339,7 +339,7 @@ "type": "string" }, "cancelRequested": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have been cancelled successfully have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have been cancelled successfully have google.longrunning.Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, diff --git a/discovery/googleapis/iamcredentials__v1.json b/discovery/googleapis/iamcredentials__v1.json index 67b928098..3bc5b4866 100644 --- a/discovery/googleapis/iamcredentials__v1.json +++ b/discovery/googleapis/iamcredentials__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240227", + "revision": "20241024", "rootUrl": "https://iamcredentials.googleapis.com/", "servicePath": "", "title": "IAM Service Account Credentials API", @@ -169,6 +169,28 @@ "https://www.googleapis.com/auth/cloud-platform" ] }, + "getAllowedLocations": { + "description": "Returns the trust boundary info for a given service account.", + "flatPath": "v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}/allowedLocations", + "httpMethod": "GET", + "id": "iamcredentials.projects.serviceAccounts.getAllowedLocations", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. Resource name of service account.", + "location": "path", + "pattern": "^projects/[^/]+/serviceAccounts/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}/allowedLocations", + "response": { + "$ref": "ServiceAccountAllowedLocations" + } + }, "signBlob": { "description": "Signs a blob using a service account's system-managed private key.", "flatPath": "v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}:signBlob", @@ -302,6 +324,26 @@ }, "type": "object" }, + "ServiceAccountAllowedLocations": { + "description": "Represents a list of allowed locations for given service account.", + "id": "ServiceAccountAllowedLocations", + "properties": { + "encodedLocations": { + "description": "Output only. The hex encoded bitmap of the trust boundary locations", + "readOnly": true, + "type": "string" + }, + "locations": { + "description": "Output only. The human readable trust boundary locations. For example, [\"us-central1\", \"europe-west1\"]", + "items": { + "type": "string" + }, + "readOnly": true, + "type": "array" + } + }, + "type": "object" + }, "SignBlobRequest": { "id": "SignBlobRequest", "properties": { diff --git a/discovery/googleapis/iap__v1.json b/discovery/googleapis/iap__v1.json index c36f51dbd..e4d72d8ee 100644 --- a/discovery/googleapis/iap__v1.json +++ b/discovery/googleapis/iap__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240909", + "revision": "20241202", "rootUrl": "https://iap.googleapis.com/", "servicePath": "", "title": "Cloud Identity-Aware Proxy API", @@ -1245,7 +1245,7 @@ "description": "Properties of the object. Contains field @type with type URL.", "type": "any" }, - "description": "The proto or JSON formatted expected next state of the resource, wrapped in a google.protobuf.Any proto, against which the policy rules are evaluated. Services not integrated with custom org policy can omit this field. Services integrated with custom org policy must populate this field for all requests where the API call changes the state of the resource. Custom org policy backend uses these attributes to enforce custom org policies. When a proto is wrapped, it is generally the One Platform API proto. When a JSON string is wrapped, use `google.protobuf.StringValue` for the inner value. For create operations, GCP service is expected to pass resource from customer request as is. For update/patch operations, GCP service is expected to compute the next state with the patch provided by the user. See go/custom-constraints-org-policy-integration-guide for additional details.", + "description": "The proto or JSON formatted expected next state of the resource, wrapped in a google.protobuf.Any proto, against which the policy rules are evaluated. Services not integrated with custom org policy can omit this field. Services integrated with custom org policy must populate this field for all requests where the API call changes the state of the resource. Custom org policy backend uses these attributes to enforce custom org policies. For create operations, GCP service is expected to pass resource from customer request as is. For update/patch operations, GCP service is expected to compute the next state with the patch provided by the user. See go/custom-constraints-org-policy-integration-guide for additional details.", "type": "object" }, "labels": { diff --git a/discovery/googleapis/identitytoolkit__v1.json b/discovery/googleapis/identitytoolkit__v1.json index 2a0c1bb8f..7b253e992 100644 --- a/discovery/googleapis/identitytoolkit__v1.json +++ b/discovery/googleapis/identitytoolkit__v1.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241017", + "revision": "20241205", "rootUrl": "https://identitytoolkit.googleapis.com/", "servicePath": "", "title": "Identity Toolkit API", @@ -2357,7 +2357,7 @@ "id": "GoogleCloudIdentitytoolkitV1ResetPasswordRequest", "properties": { "email": { - "description": "The email of the account to be modified. Specify this and the old password in order to change an account's password without using an out-of-band code.", + "description": "Optional. The email of the account to be modified. Specify this and the old password in order to change an account's password without using an out-of-band code.", "type": "string" }, "newPassword": { @@ -2373,7 +2373,7 @@ "type": "string" }, "tenantId": { - "description": "The tenant ID of the Identity Platform tenant the account belongs to.", + "description": "Optional. The tenant ID of the Identity Platform tenant the account belongs to.", "type": "string" } }, @@ -2437,7 +2437,7 @@ "type": "object" }, "GoogleCloudIdentitytoolkitV1SendVerificationCodeRequest": { - "description": "Request message for SendVerificationCode. At least one of (`ios_receipt` and `ios_secret`), `recaptcha_token`, or `safety_net_token` must be specified to verify the verification code is being sent on behalf of a real app and not an emulator.", + "description": "Request message for SendVerificationCode. 'captcha_response' is required when reCAPTCHA enterprise is enabled, or otherwise at least one of (`ios_receipt` and `ios_secret`), `recaptcha_token`, or `safety_net_token` must be specified to verify the verification code is being sent on behalf of a real app and not an emulator.", "id": "GoogleCloudIdentitytoolkitV1SendVerificationCodeRequest", "properties": { "autoRetrievalInfo": { @@ -2465,7 +2465,7 @@ "type": "string" }, "iosReceipt": { - "description": "Receipt of successful iOS app token validation. At least one of (`ios_receipt` and `ios_secret`), `recaptcha_token`, or `safety_net_token` must be specified to verify the verification code is being sent on behalf of a real app and not an emulator. This should come from the response of verifyIosClient. If present, the caller should also provide the `ios_secret`, as well as a bundle ID in the `x-ios-bundle-identifier` header, which must match the bundle ID from the verifyIosClient request.", + "description": "Receipt of successful iOS app token validation. At least one of (`ios_receipt` and `ios_secret`), `recaptcha_token`, or `safety_net_token` must be specified to verify the verification code is being sent on behalf of a real app and not an emulator, if 'captcha_response' is not used (reCAPTCHA enterprise is not enabled). This should come from the response of verifyIosClient. If present, the caller should also provide the `ios_secret`, as well as a bundle ID in the `x-ios-bundle-identifier` header, which must match the bundle ID from the verifyIosClient request.", "type": "string" }, "iosSecret": { @@ -2477,15 +2477,15 @@ "type": "string" }, "playIntegrityToken": { - "description": "Android only. Used to assert application identity in place of a recaptcha token (and safety_net_token). At least one of (`ios_receipt` and `ios_secret`), `recaptcha_token`, , or `play_integrity_token` must be specified to verify the verification code is being sent on behalf of a real app and not an emulator. A Play Integrity Token can be generated via the [PlayIntegrity API](https://developer.android.com/google/play/integrity) with applying SHA256 to the `phone_number` field as the nonce.", + "description": "Android only. Used to assert application identity in place of a recaptcha token (and safety_net_token). At least one of (`ios_receipt` and `ios_secret`), `recaptcha_token`, , or `play_integrity_token` must be specified to verify the verification code is being sent on behalf of a real app and not an emulator, if 'captcha_response' is not used (reCAPTCHA enterprise is not enabled). A Play Integrity Token can be generated via the [PlayIntegrity API](https://developer.android.com/google/play/integrity) with applying SHA256 to the `phone_number` field as the nonce.", "type": "string" }, "recaptchaToken": { - "description": "Recaptcha token for app verification. At least one of (`ios_receipt` and `ios_secret`), `recaptcha_token`, or `safety_net_token` must be specified to verify the verification code is being sent on behalf of a real app and not an emulator. The recaptcha should be generated by calling getRecaptchaParams and the recaptcha token will be generated on user completion of the recaptcha challenge.", + "description": "Recaptcha token for app verification. At least one of (`ios_receipt` and `ios_secret`), `recaptcha_token`, or `safety_net_token` must be specified to verify the verification code is being sent on behalf of a real app and not an emulator, if 'captcha_response' is not used (reCAPTCHA enterprise is not enabled). The recaptcha should be generated by calling getRecaptchaParams and the recaptcha token will be generated on user completion of the recaptcha challenge.", "type": "string" }, "recaptchaVersion": { - "description": "Optional. The reCAPTCHA version of the reCAPTCHA token in the captcha_response.", + "description": "Optional. The reCAPTCHA version of the reCAPTCHA token in the captcha_response. Required when reCAPTCHA Enterprise is enabled.", "enum": [ "RECAPTCHA_VERSION_UNSPECIFIED", "RECAPTCHA_ENTERPRISE" @@ -2497,7 +2497,7 @@ "type": "string" }, "safetyNetToken": { - "description": "Android only. Used to assert application identity in place of a recaptcha token. At least one of (`ios_receipt` and `ios_secret`), `recaptcha_token`, or `safety_net_token` must be specified to verify the verification code is being sent on behalf of a real app and not an emulator. A SafetyNet Token can be generated via the [SafetyNet Android Attestation API](https://developer.android.com/training/safetynet/attestation.html), with the Base64 encoding of the `phone_number` field as the nonce.", + "description": "Android only. Used to assert application identity in place of a recaptcha token. At least one of (`ios_receipt` and `ios_secret`), `recaptcha_token`, or `safety_net_token` must be specified to verify the verification code is being sent on behalf of a real app and not an emulator, if 'captcha_response' is not used (reCAPTCHA enterprise is not enabled). A SafetyNet Token can be generated via the [SafetyNet Android Attestation API](https://developer.android.com/training/safetynet/attestation.html), with the Base64 encoding of the `phone_number` field as the nonce.", "type": "string" }, "tenantId": { diff --git a/discovery/googleapis/identitytoolkit__v2.json b/discovery/googleapis/identitytoolkit__v2.json index 8a1b20b70..cd54903d8 100644 --- a/discovery/googleapis/identitytoolkit__v2.json +++ b/discovery/googleapis/identitytoolkit__v2.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241017", + "revision": "20241024", "rootUrl": "https://identitytoolkit.googleapis.com/", "servicePath": "", "title": "Identity Toolkit API", @@ -2618,7 +2618,7 @@ "id": "GoogleCloudIdentitytoolkitAdminV2RecaptchaConfig", "properties": { "emailPasswordEnforcementState": { - "description": "The reCAPTCHA config for email/password provider, containing the enforcement status. The email/password provider contains all related user flows protected by reCAPTCHA.", + "description": "The reCAPTCHA config for email/password provider, containing the enforcement status. The email/password provider contains all email related user flows protected by reCAPTCHA.", "enum": [ "RECAPTCHA_PROVIDER_ENFORCEMENT_STATE_UNSPECIFIED", "OFF", @@ -2641,7 +2641,7 @@ "type": "array" }, "phoneEnforcementState": { - "description": "The reCAPTCHA config for phone provider, containing the enforcement status. The phone provider contains all related user flows protected by reCAPTCHA.", + "description": "The reCAPTCHA config for phone provider, containing the enforcement status. The phone provider contains all SMS related user flows protected by reCAPTCHA.", "enum": [ "RECAPTCHA_PROVIDER_ENFORCEMENT_STATE_UNSPECIFIED", "OFF", @@ -2664,7 +2664,7 @@ "type": "array" }, "tollFraudManagedRules": { - "description": "The managed rules for toll fraud provider, containing the enforcement status. The toll fraud provider contains all SMS related user flows.", + "description": "The managed rules for the authentication action based on reCAPTCHA toll fraud risk scores. Toll fraud managed rules will only take effect when the phone_enforcement_state is AUDIT or ENFORCE and use_sms_toll_fraud_protection is true.", "items": { "$ref": "GoogleCloudIdentitytoolkitAdminV2RecaptchaTollFraudManagedRule" }, @@ -2673,6 +2673,14 @@ "useAccountDefender": { "description": "Whether to use the account defender for reCAPTCHA assessment. Defaults to `false`.", "type": "boolean" + }, + "useSmsBotScore": { + "description": "Whether to use the rCE bot score for reCAPTCHA phone provider. Can only be true when the phone_enforcement_state is AUDIT or ENFORCE.", + "type": "boolean" + }, + "useSmsTollFraudProtection": { + "description": "Whether to use the rCE sms toll fraud protection risk score for reCAPTCHA phone provider. Can only be true when the phone_enforcement_state is AUDIT or ENFORCE.", + "type": "boolean" } }, "type": "object" @@ -2729,7 +2737,7 @@ "type": "object" }, "GoogleCloudIdentitytoolkitAdminV2RecaptchaTollFraudManagedRule": { - "description": "The config for a reCAPTCHA toll fraud assessment managed rule. Models a single interval [start_score, end_score]. The start_score is maximum_allowed_score. End score is 1.0.", + "description": "The config for a reCAPTCHA toll fraud assessment managed rule. Models a single interval [start_score, end_score]. The end_score is implicit. It is either the closest smaller end_score (if one is available) or 0. Intervals in aggregate span [0, 1] without overlapping.", "id": "GoogleCloudIdentitytoolkitAdminV2RecaptchaTollFraudManagedRule", "properties": { "action": { @@ -2745,7 +2753,7 @@ "type": "string" }, "startScore": { - "description": "The start score (inclusive) for an action. A score of 0.0 indicates the safest request (likely legitimate), whereas 1.0 indicates the riskiest request (likely toll fraud). See https://cloud.google.com/recaptcha-enterprise/docs/sms-fraud-detection#create-assessment-sms.", + "description": "The start score (inclusive) for an action. Must be a value between 0.0 and 1.0, at 11 discrete values; e.g. 0, 0.1, 0.2, 0.3, ... 0.9, 1.0. A score of 0.0 indicates the safest request (likely legitimate), whereas 1.0 indicates the riskiest request (likely toll fraud). See https://cloud.google.com/recaptcha-enterprise/docs/sms-fraud-detection#create-assessment-sms.", "format": "float", "type": "number" } @@ -3371,6 +3379,14 @@ "recaptchaKey": { "description": "The reCAPTCHA Enterprise key resource name, e.g. \"projects/{project}/keys/{key}\". This will only be returned when the reCAPTCHA enforcement state is AUDIT or ENFORCE on at least one of the reCAPTCHA providers.", "type": "string" + }, + "useSmsBotScore": { + "description": "Whether to use the rCE bot score for reCAPTCHA phone provider.", + "type": "boolean" + }, + "useSmsTollFraudProtection": { + "description": "Whether to use the rCE sms toll fraud protection risk score for reCAPTCHA phone provider.", + "type": "boolean" } }, "type": "object" @@ -3548,7 +3564,7 @@ "type": "string" }, "recaptchaVersion": { - "description": "The reCAPTCHA version of the reCAPTCHA token in the captcha_response.", + "description": "The reCAPTCHA version of the reCAPTCHA token in the captcha_response. Required when reCAPTCHA Enterprise is enabled.", "enum": [ "RECAPTCHA_VERSION_UNSPECIFIED", "RECAPTCHA_ENTERPRISE" diff --git a/discovery/googleapis/integrations__v1.json b/discovery/googleapis/integrations__v1.json index a7d6577fa..6599babee 100644 --- a/discovery/googleapis/integrations__v1.json +++ b/discovery/googleapis/integrations__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241003", + "revision": "20241209", "rootUrl": "https://integrations.googleapis.com/", "servicePath": "", "title": "Application Integration API", @@ -216,6 +216,34 @@ "resources": { "locations": { "methods": { + "generateOpenApiSpec": { + "description": "Generate OpenAPI spec for the requested integrations and api triggers", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}:generateOpenApiSpec", + "httpMethod": "POST", + "id": "integrations.projects.locations.generateOpenApiSpec", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. Project and location from which the integrations should be fetched. Format: projects/{project}/location/{location}", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:generateOpenApiSpec", + "request": { + "$ref": "GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest" + }, + "response": { + "$ref": "GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, "getClients": { "description": "Gets the client configuration for the given project and location resource name", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/clients", @@ -1417,7 +1445,7 @@ ], "parameters": { "name": { - "description": "Required. Next ID: 3 The execution resource name. Format: projects/{gcp_project_id}/locations/{location}/integrations/{integration}/executions/{execution_id}", + "description": "Required. Next ID: 6 The execution resource name. Format: projects/{gcp_project_id}/locations/{location}/integrations/{integration}/executions/{execution_id}", "location": "path", "pattern": "^projects/[^/]+/locations/[^/]+/integrations/[^/]+/executions/[^/]+$", "required": true, @@ -8105,6 +8133,20 @@ "description": "If this execution is a replay of another execution, then this field contains the original execution id.", "type": "string" }, + "replayMode": { + "description": "Replay mode for the execution", + "enum": [ + "REPLAY_MODE_UNSPECIFIED", + "REPLAY_MODE_FROM_BEGINNING", + "REPLAY_MODE_POINT_OF_FAILURE" + ], + "enumDescriptions": [ + "", + "Replay the original execution from the beginning.", + "Replay the execution from the first failed task." + ], + "type": "string" + }, "replayReason": { "description": "reason for replay", "type": "string" @@ -8865,11 +8907,8 @@ "type": "string" }, "inputVariables": { - "description": "Optional. List of input variables for the api trigger.", - "items": { - "type": "string" - }, - "type": "array" + "$ref": "EnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables", + "description": "Optional. List of input variables for the api trigger." }, "label": { "description": "The user created label for a particular trigger.", @@ -8890,11 +8929,8 @@ "type": "string" }, "outputVariables": { - "description": "Optional. List of output variables for the api trigger.", - "items": { - "type": "string" - }, - "type": "array" + "$ref": "EnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables", + "description": "Optional. List of output variables for the api trigger." }, "pauseWorkflowExecutions": { "description": "Optional. If set to true, any upcoming requests for this trigger config will be paused and the executions will be resumed later when the flag is reset. The workflow to which this trigger config belongs has to be in ACTIVE status for the executions to be paused or resumed.", @@ -8974,6 +9010,20 @@ }, "type": "object" }, + "EnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables": { + "description": "Variables names mapped to api trigger.", + "id": "EnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables", + "properties": { + "names": { + "description": "Optional. List of variable names.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, "EnterpriseCrmFrontendsEventbusProtoWorkflowParameterEntry": { "id": "EnterpriseCrmFrontendsEventbusProtoWorkflowParameterEntry", "properties": { @@ -9432,6 +9482,7 @@ "PREVIEW", "GA", "DEPRECATED", + "TEST", "PRIVATE_PREVIEW" ], "enumDescriptions": [ @@ -9439,6 +9490,7 @@ "PREVIEW.", "GA.", "DEPRECATED.", + "TEST.", "PRIVATE_PREVIEW." ], "readOnly": true, @@ -9629,7 +9681,7 @@ "type": "string" }, "deploymentModel": { - "description": "Optional. Indicates whether connector is deployed on GKE/CloudRun", + "description": "Output only. Indicates whether connector is deployed on GKE/CloudRun", "enum": [ "DEPLOYMENT_MODEL_UNSPECIFIED", "GKE_MST", @@ -9640,6 +9692,7 @@ "Default model gke mst.", "Cloud run mst." ], + "readOnly": true, "type": "string" }, "deploymentModelMigrationState": { @@ -9785,7 +9838,7 @@ "id": "GoogleCloudConnectorsV1EventingConfig", "properties": { "additionalVariables": { - "description": "Additional eventing related field values", + "description": "Optional. Additional eventing related field values", "items": { "$ref": "GoogleCloudConnectorsV1ConfigVariable" }, @@ -9793,14 +9846,14 @@ }, "authConfig": { "$ref": "GoogleCloudConnectorsV1AuthConfig", - "description": "Auth details for the webhook adapter." + "description": "Optional. Auth details for the webhook adapter." }, "deadLetterConfig": { "$ref": "GoogleCloudConnectorsV1EventingConfigDeadLetterConfig", "description": "Optional. Dead letter configuration for eventing of a connection." }, "enrichmentEnabled": { - "description": "Enrichment Enabled.", + "description": "Optional. Enrichment Enabled.", "type": "boolean" }, "eventsListenerIngressEndpoint": { @@ -9821,7 +9874,7 @@ }, "registrationDestinationConfig": { "$ref": "GoogleCloudConnectorsV1DestinationConfig", - "description": "Registration endpoint for auto registration." + "description": "Optional. Registration endpoint for auto registration." } }, "type": "object" @@ -9864,6 +9917,11 @@ "$ref": "GoogleCloudConnectorsV1EventingRuntimeDataWebhookData", "description": "Output only. Webhook data.", "readOnly": true + }, + "webhookSubscriptions": { + "$ref": "GoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions", + "description": "Output only. Webhook subscriptions.", + "readOnly": true } }, "type": "object" @@ -9911,6 +9969,21 @@ }, "type": "object" }, + "GoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions": { + "description": "WebhookSubscriptions has details of webhook subscriptions.", + "id": "GoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions", + "properties": { + "webhookData": { + "description": "Output only. Webhook data.", + "items": { + "$ref": "GoogleCloudConnectorsV1EventingRuntimeDataWebhookData" + }, + "readOnly": true, + "type": "array" + } + }, + "type": "object" + }, "GoogleCloudConnectorsV1EventingStatus": { "description": "EventingStatus indicates the state of eventing.", "id": "GoogleCloudConnectorsV1EventingStatus", @@ -10068,14 +10141,14 @@ "id": "GoogleCloudConnectorsV1SslConfig", "properties": { "additionalVariables": { - "description": "Additional SSL related field values", + "description": "Optional. Additional SSL related field values", "items": { "$ref": "GoogleCloudConnectorsV1ConfigVariable" }, "type": "array" }, "clientCertType": { - "description": "Type of Client Cert (PEM/JKS/.. etc.)", + "description": "Optional. Type of Client Cert (PEM/JKS/.. etc.)", "enum": [ "CERT_TYPE_UNSPECIFIED", "PEM" @@ -10088,22 +10161,22 @@ }, "clientCertificate": { "$ref": "GoogleCloudConnectorsV1Secret", - "description": "Client Certificate" + "description": "Optional. Client Certificate" }, "clientPrivateKey": { "$ref": "GoogleCloudConnectorsV1Secret", - "description": "Client Private Key" + "description": "Optional. Client Private Key" }, "clientPrivateKeyPass": { "$ref": "GoogleCloudConnectorsV1Secret", - "description": "Secret containing the passphrase protecting the Client Private Key" + "description": "Optional. Secret containing the passphrase protecting the Client Private Key" }, "privateServerCertificate": { "$ref": "GoogleCloudConnectorsV1Secret", - "description": "Private Server Certificate. Needs to be specified if trust model is `PRIVATE`." + "description": "Optional. Private Server Certificate. Needs to be specified if trust model is `PRIVATE`." }, "serverCertType": { - "description": "Type of Server Cert (PEM/JKS/.. etc.)", + "description": "Optional. Type of Server Cert (PEM/JKS/.. etc.)", "enum": [ "CERT_TYPE_UNSPECIFIED", "PEM" @@ -10115,7 +10188,7 @@ "type": "string" }, "trustModel": { - "description": "Trust Model of the SSL connection", + "description": "Optional. Trust Model of the SSL connection", "enum": [ "PUBLIC", "PRIVATE", @@ -10129,7 +10202,7 @@ "type": "string" }, "type": { - "description": "Controls the ssl type for the given connector version.", + "description": "Optional. Controls the ssl type for the given connector version.", "enum": [ "SSL_TYPE_UNSPECIFIED", "TLS", @@ -10143,7 +10216,7 @@ "type": "string" }, "useSsl": { - "description": "Bool for enabling SSL", + "description": "Optional. Bool for enabling SSL", "type": "boolean" } }, @@ -10178,12 +10251,30 @@ }, "type": "object" }, + "GoogleCloudIntegrationsV1alphaApiTriggerResource": { + "description": "List of API triggerID and their workflow resource name.", + "id": "GoogleCloudIntegrationsV1alphaApiTriggerResource", + "properties": { + "integrationResource": { + "description": "Required. Integration where the API is published", + "type": "string" + }, + "triggerId": { + "description": "Required. Trigger Id of the API trigger(s) in the integration", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, "GoogleCloudIntegrationsV1alphaAssertion": { - "description": "An assertion which will check for a condition over task execution status or an expression for task output variables Next available id: 5", + "description": "An assertion which will check for a condition over task execution status or an expression for task output variables", "id": "GoogleCloudIntegrationsV1alphaAssertion", "properties": { "assertionStrategy": { - "description": "The type of assertion to perform.", + "description": "Optional. The type of assertion to perform.", "enum": [ "ASSERTION_STRATEGY_UNSPECIFIED", "ASSERT_SUCCESSFUL_EXECUTION", @@ -11366,6 +11457,20 @@ "description": "If this execution is a replay of another execution, then this field contains the original execution id.", "type": "string" }, + "replayMode": { + "description": "Replay mode for the execution", + "enum": [ + "REPLAY_MODE_UNSPECIFIED", + "REPLAY_MODE_FROM_BEGINNING", + "REPLAY_MODE_POINT_OF_FAILURE" + ], + "enumDescriptions": [ + "Default value.", + "Replay the original execution from the beginning.", + "Replay the execution from the first failed task." + ], + "type": "string" + }, "replayReason": { "description": "reason for replay", "type": "string" @@ -11534,6 +11639,45 @@ }, "type": "object" }, + "GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest": { + "description": "Request for GenerateOpenApiSpec.", + "id": "GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest", + "properties": { + "apiTriggerResources": { + "description": "Required. List of api triggers", + "items": { + "$ref": "GoogleCloudIntegrationsV1alphaApiTriggerResource" + }, + "type": "array" + }, + "fileFormat": { + "description": "Required. File format for generated spec.", + "enum": [ + "FILE_FORMAT_UNSPECIFIED", + "JSON", + "YAML" + ], + "enumDescriptions": [ + "Unspecified file format", + "JSON File Format", + "YAML File Format" + ], + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse": { + "description": "Response of the GenerateOpenApiSpec API.", + "id": "GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse", + "properties": { + "openApiSpec": { + "description": "Open API spec as per the required format", + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudIntegrationsV1alphaGenerateTokenResponse": { "description": "Returns success or error message", "id": "GoogleCloudIntegrationsV1alphaGenerateTokenResponse", @@ -13021,7 +13165,7 @@ "type": "object" }, "GoogleCloudIntegrationsV1alphaScheduleIntegrationsRequest": { - "description": "The request for scheduling an integration. Next available id: 11", + "description": "The request for scheduling an integration.", "id": "GoogleCloudIntegrationsV1alphaScheduleIntegrationsRequest", "properties": { "inputParameters": { @@ -14033,11 +14177,8 @@ "type": "string" }, "inputVariables": { - "description": "Optional. List of input variables for the api trigger.", - "items": { - "type": "string" - }, - "type": "array" + "$ref": "GoogleCloudIntegrationsV1alphaTriggerConfigVariables", + "description": "Optional. List of input variables for the api trigger." }, "label": { "description": "Optional. The user created label for a particular trigger.", @@ -14058,11 +14199,8 @@ "type": "string" }, "outputVariables": { - "description": "Optional. List of output variables for the api trigger.", - "items": { - "type": "string" - }, - "type": "array" + "$ref": "GoogleCloudIntegrationsV1alphaTriggerConfigVariables", + "description": "Optional. List of output variables for the api trigger." }, "position": { "$ref": "GoogleCloudIntegrationsV1alphaCoordinate", @@ -14125,6 +14263,20 @@ }, "type": "object" }, + "GoogleCloudIntegrationsV1alphaTriggerConfigVariables": { + "description": "Variables names mapped to api trigger.", + "id": "GoogleCloudIntegrationsV1alphaTriggerConfigVariables", + "properties": { + "names": { + "description": "Optional. List of variable names.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, "GoogleCloudIntegrationsV1alphaUnpublishIntegrationVersionRequest": { "description": "Request for UnpublishIntegrationVersion.", "id": "GoogleCloudIntegrationsV1alphaUnpublishIntegrationVersionRequest", diff --git a/discovery/googleapis/language__v1.json b/discovery/googleapis/language__v1.json index 21c36215e..ecfd0d07d 100644 --- a/discovery/googleapis/language__v1.json +++ b/discovery/googleapis/language__v1.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240929", + "revision": "20241103", "rootUrl": "https://language.googleapis.com/", "servicePath": "", "title": "Cloud Natural Language API", @@ -1792,7 +1792,7 @@ "type": "object" }, "InfraUsage": { - "description": "Infra Usage of billing metrics. Next ID: 6", + "description": "Infra Usage of billing metrics.", "id": "InfraUsage", "properties": { "cpuMetrics": { @@ -2891,7 +2891,6 @@ "type": "object" }, "XPSColumnSpecForecastingMetadata": { - "description": "=========================================================================== # The fields below are used exclusively for Forecasting.", "id": "XPSColumnSpecForecastingMetadata", "properties": { "columnType": { @@ -3384,11 +3383,11 @@ "type": "boolean" }, "outputGcrUri": { - "description": "The Google Contained Registry (GCR) path the exported files to be pushed to. This location is set if the exported format is DOCKDER.", + "description": "The Google Contained Registry path the exported files to be pushed to. This location is set if the exported format is DOCKDER.", "type": "string" }, "outputGcsUri": { - "description": "The Google Cloud Storage (GCS) directory where XPS will output the exported models and related files. Format: gs://bucket/directory", + "description": "The Google Cloud Storage directory where XPS will output the exported models and related files. Format: gs://bucket/directory", "type": "string" }, "tfJsFormat": { @@ -3404,7 +3403,7 @@ "type": "object" }, "XPSFileSpec": { - "description": "Spec of input and output files, on external file systems (CNS, GCS, etc).", + "description": "Spec of input and output files, on external file systems (for example, Colossus Namespace System or Google Cloud Storage).", "id": "XPSFileSpec", "properties": { "directoryPath": { @@ -3432,7 +3431,7 @@ "enumDescriptions": [ "", "", - "Internal format for parallel text data used by Google Translate. go/rkvtools", + "Internal format for parallel text data used by Google Translate.", "", "Only the lexicographically first file described by the file_spec contains the header line.", "" @@ -3561,7 +3560,7 @@ "id": "XPSImageExportModelSpec", "properties": { "exportModelOutputConfig": { - "description": "Contains the model format and internal location of the model files to be exported/downloaded. Use the GCS bucket name which is provided via TrainRequest.gcs_bucket_name to store the model files.", + "description": "Contains the model format and internal location of the model files to be exported/downloaded. Use the Google Cloud Storage bucket name which is provided via TrainRequest.gcs_bucket_name to store the model files.", "items": { "$ref": "XPSExportModelOutputConfig" }, @@ -3586,7 +3585,7 @@ "type": "array" }, "labelGcsUri": { - "description": "GCS uri of decoded labels file for model export 'dict.txt'.", + "description": "Google Cloud Storage URI of decoded labels file for model export 'dict.txt'.", "type": "string" }, "servingArtifact": { @@ -3594,11 +3593,11 @@ "description": "The default model binary file used for serving (e.g. online predict, batch predict) via public Cloud AI Platform API." }, "tfJsBinaryGcsPrefix": { - "description": "GCS uri prefix of Tensorflow JavaScript binary files 'groupX-shardXofX.bin' Deprecated.", + "description": "Google Cloud Storage URI prefix of Tensorflow JavaScript binary files 'groupX-shardXofX.bin'. Deprecated.", "type": "string" }, "tfLiteMetadataGcsUri": { - "description": "GCS uri of Tensorflow Lite metadata 'tflite_metadata.json'.", + "description": "Google Cloud Storage URI of Tensorflow Lite metadata 'tflite_metadata.json'.", "type": "string" } }, @@ -3690,7 +3689,7 @@ "", "The default partition.", "It has significantly lower replication than partition-0 and is located in the US only. It also has a larger model size limit and higher default RAM quota than partition-0. Customers with batch traffic, US-based traffic, or very large models should use this partition. Capacity in this partition is significantly cheaper than partition-0.", - "To be used by customers with Jellyfish-accelerated ops. See go/servomatic-jellyfish for details.", + "To be used by customers with Jellyfish-accelerated ops.", "The partition used by regionalized servomatic cloud regions.", "The partition used for loading models from custom storage." ], @@ -3950,14 +3949,13 @@ "type": "string" }, "gcsUri": { - "description": "The Google Cloud Storage (GCS) uri that stores the model binary files.", + "description": "The Google Cloud Storage URI that stores the model binary files.", "type": "string" } }, "type": "object" }, "XPSPreprocessResponse": { - "description": "Next ID: 8", "id": "XPSPreprocessResponse", "properties": { "outputExampleSet": { @@ -4131,7 +4129,7 @@ }, "XPSResponseExplanationSpec": { "deprecated": true, - "description": "Specification of Model explanation. Feature-based XAI in AutoML Vision ICN is deprecated, see b/288407203 for context.", + "description": "Specification of Model explanation. Feature-based XAI in AutoML Vision ICN is deprecated.", "id": "XPSResponseExplanationSpec", "properties": { "explanationType": { @@ -4587,7 +4585,7 @@ "type": "object" }, "XPSTablesDatasetMetadata": { - "description": "Metadata for a dataset used for AutoML Tables. Next ID: 6", + "description": "Metadata for a dataset used for AutoML Tables.", "id": "XPSTablesDatasetMetadata", "properties": { "mlUseColumnId": { @@ -4831,7 +4829,7 @@ "type": "object" }, "XPSTextComponentModel": { - "description": "Component model. Next ID: 10", + "description": "Component model.", "id": "XPSTextComponentModel", "properties": { "batchPredictionModelGcsUri": { @@ -4856,7 +4854,7 @@ "", "The default partition.", "It has significantly lower replication than partition-0 and is located in the US only. It also has a larger model size limit and higher default RAM quota than partition-0. Customers with batch traffic, US-based traffic, or very large models should use this partition. Capacity in this partition is significantly cheaper than partition-0.", - "To be used by customers with Jellyfish-accelerated ops. See go/servomatic-jellyfish for details.", + "To be used by customers with Jellyfish-accelerated ops.", "The partition used by regionalized servomatic cloud regions.", "The partition used for loading models from custom storage." ], @@ -4901,7 +4899,7 @@ "", "Model type for entity extraction.", "Model type for relationship extraction.", - "A composite model represents a set of component models that have to be used together for prediction. A composite model appears to be a single model to the model user. It may contain only one component model. Please refer to go/cnl-composite-models for more information.", + "A composite model represents a set of component models that have to be used together for prediction. A composite model appears to be a single model to the model user. It may contain only one component model.", "Model type used to train default, MA, and ATC models in a single batch worker pipeline.", "BERT pipeline needs a specific model type, since it uses a different TFX configuration compared with DEFAULT (despite sharing most of the code).", "Model type for EncPaLM." @@ -5136,7 +5134,6 @@ "type": "object" }, "XPSTrainResponse": { - "description": "Next ID: 18", "id": "XPSTrainResponse", "properties": { "deployedModelSizeBytes": { @@ -5413,7 +5410,7 @@ "id": "XPSVideoExportModelSpec", "properties": { "exportModelOutputConfig": { - "description": "Contains the model format and internal location of the model files to be exported/downloaded. Use the GCS bucket name which is provided via TrainRequest.gcs_bucket_name to store the model files.", + "description": "Contains the model format and internal location of the model files to be exported/downloaded. Use the Google Cloud Storage bucket name which is provided via TrainRequest.gcs_bucket_name to store the model files.", "items": { "$ref": "XPSExportModelOutputConfig" }, diff --git a/discovery/googleapis/language__v2.json b/discovery/googleapis/language__v2.json index 5bf10c431..f8ef38386 100644 --- a/discovery/googleapis/language__v2.json +++ b/discovery/googleapis/language__v2.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240929", + "revision": "20241110", "rootUrl": "https://language.googleapis.com/", "servicePath": "", "title": "Cloud Natural Language API", @@ -252,7 +252,7 @@ "type": "array" }, "languageCode": { - "description": "The language of the text, which will be the same as the language specified in the request or, if not specified, the automatically-detected language. See Document.language field for more details.", + "description": "The language of the text, which will be the same as the language specified in the request or, if not specified, the automatically-detected language. See Document.language_code field for more details.", "type": "string" }, "languageSupported": { @@ -298,7 +298,7 @@ "description": "The overall sentiment of the input document." }, "languageCode": { - "description": "The language of the text, which will be the same as the language specified in the request or, if not specified, the automatically-detected language. See Document.language field for more details.", + "description": "The language of the text, which will be the same as the language specified in the request or, if not specified, the automatically-detected language. See Document.language_code field for more details.", "type": "string" }, "languageSupported": { @@ -385,14 +385,14 @@ "description": "The overall sentiment for the document. Populated if the user enables AnnotateTextRequest.Features.extract_document_sentiment." }, "entities": { - "description": "Entities, along with their semantic information, in the input document. Populated if the user enables AnnotateTextRequest.Features.extract_entities or AnnotateTextRequest.Features.extract_entity_sentiment.", + "description": "Entities, along with their semantic information, in the input document. Populated if the user enables AnnotateTextRequest.Features.extract_entities .", "items": { "$ref": "Entity" }, "type": "array" }, "languageCode": { - "description": "The language of the text, which will be the same as the language specified in the request or, if not specified, the automatically-detected language. See Document.language field for more details.", + "description": "The language of the text, which will be the same as the language specified in the request or, if not specified, the automatically-detected language. See Document.language_code field for more details.", "type": "string" }, "languageSupported": { @@ -460,7 +460,7 @@ "type": "array" }, "languageCode": { - "description": "The language of the text, which will be the same as the language specified in the request or, if not specified, the automatically-detected language. See Document.language field for more details.", + "description": "The language of the text, which will be the same as the language specified in the request or, if not specified, the automatically-detected language. See Document.language_code field for more details.", "type": "string" }, "languageSupported": { @@ -979,7 +979,7 @@ }, "sentiment": { "$ref": "Sentiment", - "description": "For calls to AnalyzeEntitySentiment or if AnnotateTextRequest.Features.extract_entity_sentiment is set to true, this field will contain the aggregate sentiment expressed for this entity in the provided document." + "description": "For calls to AnalyzeEntitySentiment this field will contain the aggregate sentiment expressed for this entity in the provided document." }, "type": { "description": "The entity type.", @@ -1029,7 +1029,7 @@ }, "sentiment": { "$ref": "Sentiment", - "description": "For calls to AnalyzeEntitySentiment or if AnnotateTextRequest.Features.extract_entity_sentiment is set to true, this field will contain the sentiment expressed for this mention of the entity in the provided document." + "description": "For calls to AnalyzeEntitySentiment this field will contain the sentiment expressed for this mention of the entity in the provided document." }, "text": { "$ref": "TextSpan", @@ -1431,7 +1431,7 @@ "type": "object" }, "InfraUsage": { - "description": "Infra Usage of billing metrics. Next ID: 6", + "description": "Infra Usage of billing metrics.", "id": "InfraUsage", "properties": { "cpuMetrics": { @@ -1502,7 +1502,7 @@ "id": "ModerateTextResponse", "properties": { "languageCode": { - "description": "The language of the text, which will be the same as the language specified in the request or, if not specified, the automatically-detected language. See Document.language field for more details.", + "description": "The language of the text, which will be the same as the language specified in the request or, if not specified, the automatically-detected language. See Document.language_code field for more details.", "type": "string" }, "languageSupported": { @@ -1932,7 +1932,7 @@ "id": "Sentiment", "properties": { "magnitude": { - "description": "A non-negative number in the [0, +inf) range, which represents the absolute magnitude of sentiment regardless of score (positive or negative).", + "description": "A non-negative number in the [0, +inf] range, which represents the absolute magnitude of sentiment regardless of score (positive or negative).", "format": "float", "type": "number" }, @@ -2262,7 +2262,6 @@ "type": "object" }, "XPSColumnSpecForecastingMetadata": { - "description": "=========================================================================== # The fields below are used exclusively for Forecasting.", "id": "XPSColumnSpecForecastingMetadata", "properties": { "columnType": { @@ -2755,11 +2754,11 @@ "type": "boolean" }, "outputGcrUri": { - "description": "The Google Contained Registry (GCR) path the exported files to be pushed to. This location is set if the exported format is DOCKDER.", + "description": "The Google Contained Registry path the exported files to be pushed to. This location is set if the exported format is DOCKDER.", "type": "string" }, "outputGcsUri": { - "description": "The Google Cloud Storage (GCS) directory where XPS will output the exported models and related files. Format: gs://bucket/directory", + "description": "The Google Cloud Storage directory where XPS will output the exported models and related files. Format: gs://bucket/directory", "type": "string" }, "tfJsFormat": { @@ -2775,7 +2774,7 @@ "type": "object" }, "XPSFileSpec": { - "description": "Spec of input and output files, on external file systems (CNS, GCS, etc).", + "description": "Spec of input and output files, on external file systems (for example, Colossus Namespace System or Google Cloud Storage).", "id": "XPSFileSpec", "properties": { "directoryPath": { @@ -2803,7 +2802,7 @@ "enumDescriptions": [ "", "", - "Internal format for parallel text data used by Google Translate. go/rkvtools", + "Internal format for parallel text data used by Google Translate.", "", "Only the lexicographically first file described by the file_spec contains the header line.", "" @@ -2932,7 +2931,7 @@ "id": "XPSImageExportModelSpec", "properties": { "exportModelOutputConfig": { - "description": "Contains the model format and internal location of the model files to be exported/downloaded. Use the GCS bucket name which is provided via TrainRequest.gcs_bucket_name to store the model files.", + "description": "Contains the model format and internal location of the model files to be exported/downloaded. Use the Google Cloud Storage bucket name which is provided via TrainRequest.gcs_bucket_name to store the model files.", "items": { "$ref": "XPSExportModelOutputConfig" }, @@ -2957,7 +2956,7 @@ "type": "array" }, "labelGcsUri": { - "description": "GCS uri of decoded labels file for model export 'dict.txt'.", + "description": "Google Cloud Storage URI of decoded labels file for model export 'dict.txt'.", "type": "string" }, "servingArtifact": { @@ -2965,11 +2964,11 @@ "description": "The default model binary file used for serving (e.g. online predict, batch predict) via public Cloud AI Platform API." }, "tfJsBinaryGcsPrefix": { - "description": "GCS uri prefix of Tensorflow JavaScript binary files 'groupX-shardXofX.bin' Deprecated.", + "description": "Google Cloud Storage URI prefix of Tensorflow JavaScript binary files 'groupX-shardXofX.bin'. Deprecated.", "type": "string" }, "tfLiteMetadataGcsUri": { - "description": "GCS uri of Tensorflow Lite metadata 'tflite_metadata.json'.", + "description": "Google Cloud Storage URI of Tensorflow Lite metadata 'tflite_metadata.json'.", "type": "string" } }, @@ -3061,7 +3060,7 @@ "", "The default partition.", "It has significantly lower replication than partition-0 and is located in the US only. It also has a larger model size limit and higher default RAM quota than partition-0. Customers with batch traffic, US-based traffic, or very large models should use this partition. Capacity in this partition is significantly cheaper than partition-0.", - "To be used by customers with Jellyfish-accelerated ops. See go/servomatic-jellyfish for details.", + "To be used by customers with Jellyfish-accelerated ops.", "The partition used by regionalized servomatic cloud regions.", "The partition used for loading models from custom storage." ], @@ -3321,14 +3320,13 @@ "type": "string" }, "gcsUri": { - "description": "The Google Cloud Storage (GCS) uri that stores the model binary files.", + "description": "The Google Cloud Storage URI that stores the model binary files.", "type": "string" } }, "type": "object" }, "XPSPreprocessResponse": { - "description": "Next ID: 8", "id": "XPSPreprocessResponse", "properties": { "outputExampleSet": { @@ -3502,7 +3500,7 @@ }, "XPSResponseExplanationSpec": { "deprecated": true, - "description": "Specification of Model explanation. Feature-based XAI in AutoML Vision ICN is deprecated, see b/288407203 for context.", + "description": "Specification of Model explanation. Feature-based XAI in AutoML Vision ICN is deprecated.", "id": "XPSResponseExplanationSpec", "properties": { "explanationType": { @@ -3958,7 +3956,7 @@ "type": "object" }, "XPSTablesDatasetMetadata": { - "description": "Metadata for a dataset used for AutoML Tables. Next ID: 6", + "description": "Metadata for a dataset used for AutoML Tables.", "id": "XPSTablesDatasetMetadata", "properties": { "mlUseColumnId": { @@ -4202,7 +4200,7 @@ "type": "object" }, "XPSTextComponentModel": { - "description": "Component model. Next ID: 10", + "description": "Component model.", "id": "XPSTextComponentModel", "properties": { "batchPredictionModelGcsUri": { @@ -4227,7 +4225,7 @@ "", "The default partition.", "It has significantly lower replication than partition-0 and is located in the US only. It also has a larger model size limit and higher default RAM quota than partition-0. Customers with batch traffic, US-based traffic, or very large models should use this partition. Capacity in this partition is significantly cheaper than partition-0.", - "To be used by customers with Jellyfish-accelerated ops. See go/servomatic-jellyfish for details.", + "To be used by customers with Jellyfish-accelerated ops.", "The partition used by regionalized servomatic cloud regions.", "The partition used for loading models from custom storage." ], @@ -4272,7 +4270,7 @@ "", "Model type for entity extraction.", "Model type for relationship extraction.", - "A composite model represents a set of component models that have to be used together for prediction. A composite model appears to be a single model to the model user. It may contain only one component model. Please refer to go/cnl-composite-models for more information.", + "A composite model represents a set of component models that have to be used together for prediction. A composite model appears to be a single model to the model user. It may contain only one component model.", "Model type used to train default, MA, and ATC models in a single batch worker pipeline.", "BERT pipeline needs a specific model type, since it uses a different TFX configuration compared with DEFAULT (despite sharing most of the code).", "Model type for EncPaLM." @@ -4507,7 +4505,6 @@ "type": "object" }, "XPSTrainResponse": { - "description": "Next ID: 18", "id": "XPSTrainResponse", "properties": { "deployedModelSizeBytes": { @@ -4784,7 +4781,7 @@ "id": "XPSVideoExportModelSpec", "properties": { "exportModelOutputConfig": { - "description": "Contains the model format and internal location of the model files to be exported/downloaded. Use the GCS bucket name which is provided via TrainRequest.gcs_bucket_name to store the model files.", + "description": "Contains the model format and internal location of the model files to be exported/downloaded. Use the Google Cloud Storage bucket name which is provided via TrainRequest.gcs_bucket_name to store the model files.", "items": { "$ref": "XPSExportModelOutputConfig" }, diff --git a/discovery/googleapis/localservices__v1.json b/discovery/googleapis/localservices__v1.json index 4a70e363a..241220025 100644 --- a/discovery/googleapis/localservices__v1.json +++ b/discovery/googleapis/localservices__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240218", + "revision": "20241202", "rootUrl": "https://localservices.googleapis.com/", "servicePath": "", "title": "Local Services API", @@ -570,11 +570,11 @@ "id": "GoogleTypeTimeZone", "properties": { "id": { - "description": "IANA Time Zone Database time zone, e.g. \"America/New_York\".", + "description": "IANA Time Zone Database time zone. For example \"America/New_York\".", "type": "string" }, "version": { - "description": "Optional. IANA Time Zone Database version number, e.g. \"2019a\".", + "description": "Optional. IANA Time Zone Database version number. For example \"2019a\".", "type": "string" } }, diff --git a/discovery/googleapis/logging__v2.json b/discovery/googleapis/logging__v2.json index 7fc945a32..5753dcffe 100644 --- a/discovery/googleapis/logging__v2.json +++ b/discovery/googleapis/logging__v2.json @@ -119,7 +119,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241010", + "revision": "20241018", "rootUrl": "https://logging.googleapis.com/", "servicePath": "", "title": "Cloud Logging API", @@ -10416,7 +10416,7 @@ "type": "string" }, "resourceNames": { - "description": "Required. Names of one or more parent resources: projects/[PROJECT_ID]May alternatively be one or more views: projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]A log scope can include a maximum of 50 projects and a maximum of 100 resources in total.", + "description": "Required. Names of one or more parent resources: projects/[PROJECT_ID]May alternatively be one or more views: projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]A log scope can include a maximum of 5 projects and a maximum of 100 resources in total.", "items": { "type": "string" }, diff --git a/discovery/googleapis/looker__v1.json b/discovery/googleapis/looker__v1.json index a43ed662e..1bd30a2d8 100644 --- a/discovery/googleapis/looker__v1.json +++ b/discovery/googleapis/looker__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240910", + "revision": "20241204", "rootUrl": "https://looker.googleapis.com/", "servicePath": "", "title": "Looker (Google Cloud core) API", @@ -610,7 +610,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "looker.projects.locations.operations.cancel", @@ -1157,7 +1157,10 @@ "LOOKER_CORE_STANDARD", "LOOKER_CORE_STANDARD_ANNUAL", "LOOKER_CORE_ENTERPRISE_ANNUAL", - "LOOKER_CORE_EMBED_ANNUAL" + "LOOKER_CORE_EMBED_ANNUAL", + "LOOKER_CORE_NONPROD_STANDARD_ANNUAL", + "LOOKER_CORE_NONPROD_ENTERPRISE_ANNUAL", + "LOOKER_CORE_NONPROD_EMBED_ANNUAL" ], "enumDescriptions": [ "Platform edition is unspecified.", @@ -1165,7 +1168,10 @@ "Standard.", "Subscription Standard.", "Subscription Enterprise.", - "Subscription Embed." + "Subscription Embed.", + "Nonprod Subscription Standard.", + "Nonprod Subscription Enterprise.", + "Nonprod Subscription Embed." ], "type": "string" }, @@ -1224,7 +1230,7 @@ }, "userMetadata": { "$ref": "UserMetadata", - "description": "User metadata." + "description": "Optional. User metadata." } }, "type": "object" @@ -1638,22 +1644,22 @@ "id": "TimeOfDay", "properties": { "hours": { - "description": "Hours of day in 24 hour format. Should be from 0 to 23. An API may choose to allow the value \"24:00:00\" for scenarios like business closing time.", + "description": "Hours of a day in 24 hour format. Must be greater than or equal to 0 and typically must be less than or equal to 23. An API may choose to allow the value \"24:00:00\" for scenarios like business closing time.", "format": "int32", "type": "integer" }, "minutes": { - "description": "Minutes of hour of day. Must be from 0 to 59.", + "description": "Minutes of an hour. Must be greater than or equal to 0 and less than or equal to 59.", "format": "int32", "type": "integer" }, "nanos": { - "description": "Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999.", + "description": "Fractions of seconds, in nanoseconds. Must be greater than or equal to 0 and less than or equal to 999,999,999.", "format": "int32", "type": "integer" }, "seconds": { - "description": "Seconds of minutes of the time. Must normally be from 0 to 59. An API may allow the value 60 if it allows leap-seconds.", + "description": "Seconds of a minute. Must be greater than or equal to 0 and typically must be less than or equal to 59. An API may allow the value 60 if it allows leap-seconds.", "format": "int32", "type": "integer" } diff --git a/discovery/googleapis/metastore__v1.json b/discovery/googleapis/metastore__v1.json index 57d62a4ff..b39a8e620 100644 --- a/discovery/googleapis/metastore__v1.json +++ b/discovery/googleapis/metastore__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240709", + "revision": "20241203", "rootUrl": "https://metastore.googleapis.com/", "servicePath": "", "title": "Dataproc Metastore API", @@ -3025,7 +3025,7 @@ "type": "string" }, "requestedCancellation": { - "description": "Output only. Identifies whether the caller has requested cancellation of the operation. Operations that have successfully been cancelled have Operation.error value with a google.rpc.Status.code of 1, corresponding to Code.CANCELLED.", + "description": "Output only. Identifies whether the caller has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of 1, corresponding to Code.CANCELLED.", "readOnly": true, "type": "boolean" }, @@ -3168,7 +3168,7 @@ "type": "object" }, "RestoreServiceRequest": { - "description": "Request message for DataprocMetastore.Restore.", + "description": "Request message for DataprocMetastore.RestoreService.", "id": "RestoreServiceRequest", "properties": { "backup": { diff --git a/discovery/googleapis/metastore__v2.json b/discovery/googleapis/metastore__v2.json new file mode 100644 index 000000000..195c2861c --- /dev/null +++ b/discovery/googleapis/metastore__v2.json @@ -0,0 +1,1811 @@ +{ + "auth": { + "oauth2": { + "scopes": { + "https://www.googleapis.com/auth/cloud-platform": { + "description": "See, edit, configure, and delete your Google Cloud data and see the email address for your Google Account." + } + } + } + }, + "basePath": "", + "baseUrl": "https://metastore.googleapis.com/", + "batchPath": "batch", + "canonicalName": "Dataproc Metastore", + "description": "The Dataproc Metastore API is used to manage the lifecycle and configuration of metastore services.", + "discoveryVersion": "v1", + "documentationLink": "https://cloud.google.com/dataproc-metastore/docs", + "icons": { + "x16": "http://www.google.com/images/icons/product/search-16.gif", + "x32": "http://www.google.com/images/icons/product/search-32.gif" + }, + "id": "metastore:v2", + "kind": "discovery#restDescription", + "name": "metastore", + "ownerDomain": "google.com", + "ownerName": "Google", + "protocol": "rest", + "revision": "20241203", + "rootUrl": "https://metastore.googleapis.com/", + "servicePath": "", + "title": "Dataproc Metastore API", + "version": "v2", + "version_module": true, + "parameters": { + "$.xgafv": { + "description": "V1 error format.", + "enum": [ + "1", + "2" + ], + "enumDescriptions": [ + "v1 error format", + "v2 error format" + ], + "location": "query", + "type": "string" + }, + "access_token": { + "description": "OAuth access token.", + "location": "query", + "type": "string" + }, + "alt": { + "default": "json", + "description": "Data format for response.", + "enum": [ + "json", + "media", + "proto" + ], + "enumDescriptions": [ + "Responses with Content-Type of application/json", + "Media download with context-dependent Content-Type", + "Responses with Content-Type of application/x-protobuf" + ], + "location": "query", + "type": "string" + }, + "callback": { + "description": "JSONP", + "location": "query", + "type": "string" + }, + "fields": { + "description": "Selector specifying which fields to include in a partial response.", + "location": "query", + "type": "string" + }, + "key": { + "description": "API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.", + "location": "query", + "type": "string" + }, + "oauth_token": { + "description": "OAuth 2.0 token for the current user.", + "location": "query", + "type": "string" + }, + "prettyPrint": { + "default": "true", + "description": "Returns response with indentations and line breaks.", + "location": "query", + "type": "boolean" + }, + "quotaUser": { + "description": "Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.", + "location": "query", + "type": "string" + }, + "uploadType": { + "description": "Legacy upload protocol for media (e.g. \"media\", \"multipart\").", + "location": "query", + "type": "string" + }, + "upload_protocol": { + "description": "Upload protocol for media (e.g. \"raw\", \"multipart\").", + "location": "query", + "type": "string" + } + }, + "resources": { + "projects": { + "resources": { + "locations": { + "resources": { + "services": { + "methods": { + "alterLocation": { + "description": "Alter metadata resource location. The metadata resource can be a database, table, or partition. This functionality only updates the parent directory for the respective metadata resource and does not transfer any existing data to the new location.", + "flatPath": "v2/projects/{projectsId}/locations/{locationsId}/services/{servicesId}:alterLocation", + "httpMethod": "POST", + "id": "metastore.projects.locations.services.alterLocation", + "parameterOrder": [ + "service" + ], + "parameters": { + "service": { + "description": "Required. The relative resource name of the metastore service to mutate metadata, in the following format:projects/{project_id}/locations/{location_id}/services/{service_id}.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/services/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v2/{+service}:alterLocation", + "request": { + "$ref": "GoogleCloudMetastoreV2AlterMetadataResourceLocationRequest" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "alterTableProperties": { + "description": "Alter metadata table properties.", + "flatPath": "v2/projects/{projectsId}/locations/{locationsId}/services/{servicesId}:alterTableProperties", + "httpMethod": "POST", + "id": "metastore.projects.locations.services.alterTableProperties", + "parameterOrder": [ + "service" + ], + "parameters": { + "service": { + "description": "Required. The relative resource name of the Dataproc Metastore service that's being used to mutate metadata table properties, in the following format:projects/{project_id}/locations/{location_id}/services/{service_id}.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/services/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v2/{+service}:alterTableProperties", + "request": { + "$ref": "GoogleCloudMetastoreV2AlterTablePropertiesRequest" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "create": { + "description": "Creates a metastore service in a project and location.", + "flatPath": "v2/projects/{projectsId}/locations/{locationsId}/services", + "httpMethod": "POST", + "id": "metastore.projects.locations.services.create", + "parameterOrder": [ + "parent" + ], + "parameters": { + "parent": { + "description": "Required. The relative resource name of the location in which to create a metastore service, in the following form:projects/{project_number}/locations/{location_id}.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + }, + "requestId": { + "description": "Optional. A request ID. Specify a unique request ID to allow the server to ignore the request if it has completed. The server will ignore subsequent requests that provide a duplicate request ID for at least 60 minutes after the first request.For example, if an initial request times out, followed by another request with the same request ID, the server ignores the second request to prevent the creation of duplicate commitments.The request ID must be a valid UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier#Format) A zero UUID (00000000-0000-0000-0000-000000000000) is not supported.", + "location": "query", + "type": "string" + }, + "serviceId": { + "description": "Required. The ID of the metastore service, which is used as the final component of the metastore service's name.This value must be between 2 and 63 characters long inclusive, begin with a letter, end with a letter or number, and consist of alpha-numeric ASCII characters or hyphens.", + "location": "query", + "type": "string" + } + }, + "path": "v2/{+parent}/services", + "request": { + "$ref": "GoogleCloudMetastoreV2Service" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "delete": { + "description": "Deletes a single service.", + "flatPath": "v2/projects/{projectsId}/locations/{locationsId}/services/{servicesId}", + "httpMethod": "DELETE", + "id": "metastore.projects.locations.services.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The relative resource name of the metastore service to delete, in the following form:projects/{project_number}/locations/{location_id}/services/{service_id}.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/services/[^/]+$", + "required": true, + "type": "string" + }, + "requestId": { + "description": "Optional. A request ID. Specify a unique request ID to allow the server to ignore the request if it has completed. The server will ignore subsequent requests that provide a duplicate request ID for at least 60 minutes after the first request.For example, if an initial request times out, followed by another request with the same request ID, the server ignores the second request to prevent the creation of duplicate commitments.The request ID must be a valid UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier#Format) A zero UUID (00000000-0000-0000-0000-000000000000) is not supported.", + "location": "query", + "type": "string" + } + }, + "path": "v2/{+name}", + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "exportMetadata": { + "description": "Exports metadata from a service.", + "flatPath": "v2/projects/{projectsId}/locations/{locationsId}/services/{servicesId}:exportMetadata", + "httpMethod": "POST", + "id": "metastore.projects.locations.services.exportMetadata", + "parameterOrder": [ + "service" + ], + "parameters": { + "service": { + "description": "Required. The relative resource name of the metastore service to run export, in the following form:projects/{project_id}/locations/{location_id}/services/{service_id}.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/services/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v2/{+service}:exportMetadata", + "request": { + "$ref": "GoogleCloudMetastoreV2ExportMetadataRequest" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Gets the details of a single service.", + "flatPath": "v2/projects/{projectsId}/locations/{locationsId}/services/{servicesId}", + "httpMethod": "GET", + "id": "metastore.projects.locations.services.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The relative resource name of the metastore service to retrieve, in the following form:projects/{project_number}/locations/{location_id}/services/{service_id}.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/services/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v2/{+name}", + "response": { + "$ref": "GoogleCloudMetastoreV2Service" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "importMetadata": { + "description": "Imports Metadata into a Dataproc Metastore service.", + "flatPath": "v2/projects/{projectsId}/locations/{locationsId}/services/{servicesId}:importMetadata", + "httpMethod": "POST", + "id": "metastore.projects.locations.services.importMetadata", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Immutable. The relative resource name of the metastore service to run import, in the following form:projects/{project_id}/locations/{location_id}/services/{service_id}.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/services/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v2/{+name}:importMetadata", + "request": { + "$ref": "GoogleCloudMetastoreV2ImportMetadataRequest" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists services in a project and location.", + "flatPath": "v2/projects/{projectsId}/locations/{locationsId}/services", + "httpMethod": "GET", + "id": "metastore.projects.locations.services.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "filter": { + "description": "Optional. The filter to apply to list results.", + "location": "query", + "type": "string" + }, + "orderBy": { + "description": "Optional. Specify the ordering of results as described in Sorting Order (https://cloud.google.com/apis/design/design_patterns#sorting_order). If not specified, the results will be sorted in the default order.", + "location": "query", + "type": "string" + }, + "pageSize": { + "description": "Optional. The maximum number of services to return. The response may contain less than the maximum number. If unspecified, no more than 500 services are returned. The maximum value is 1000; values above 1000 are changed to 1000.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. A page token, received from a previous DataprocMetastore.ListServices call. Provide this token to retrieve the subsequent page.To retrieve the first page, supply an empty page token.When paginating, other parameters provided to DataprocMetastore.ListServices must match the call that provided the page token.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The relative resource name of the location of metastore services to list, in the following form:projects/{project_number}/locations/{location_id}.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v2/{+parent}/services", + "response": { + "$ref": "GoogleCloudMetastoreV2ListServicesResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "moveTableToDatabase": { + "description": "Move a table to another database.", + "flatPath": "v2/projects/{projectsId}/locations/{locationsId}/services/{servicesId}:moveTableToDatabase", + "httpMethod": "POST", + "id": "metastore.projects.locations.services.moveTableToDatabase", + "parameterOrder": [ + "service" + ], + "parameters": { + "service": { + "description": "Required. The relative resource name of the metastore service to mutate metadata, in the following format:projects/{project_id}/locations/{location_id}/services/{service_id}.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/services/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v2/{+service}:moveTableToDatabase", + "request": { + "$ref": "GoogleCloudMetastoreV2MoveTableToDatabaseRequest" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "patch": { + "description": "Updates the parameters of a single service.", + "flatPath": "v2/projects/{projectsId}/locations/{locationsId}/services/{servicesId}", + "httpMethod": "PATCH", + "id": "metastore.projects.locations.services.patch", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Immutable. The relative resource name of the metastore service, in the following format:projects/{project_number}/locations/{location_id}/services/{service_id}.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/services/[^/]+$", + "required": true, + "type": "string" + }, + "requestId": { + "description": "Optional. A request ID. Specify a unique request ID to allow the server to ignore the request if it has completed. The server will ignore subsequent requests that provide a duplicate request ID for at least 60 minutes after the first request.For example, if an initial request times out, followed by another request with the same request ID, the server ignores the second request to prevent the creation of duplicate commitments.The request ID must be a valid UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier#Format) A zero UUID (00000000-0000-0000-0000-000000000000) is not supported.", + "location": "query", + "type": "string" + }, + "updateMask": { + "description": "Required. A field mask used to specify the fields to be overwritten in the metastore service resource by the update. Fields specified in the update_mask are relative to the resource (not to the full request). A field is overwritten if it is in the mask.", + "format": "google-fieldmask", + "location": "query", + "type": "string" + } + }, + "path": "v2/{+name}", + "request": { + "$ref": "GoogleCloudMetastoreV2Service" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "queryMetadata": { + "description": "Query Dataproc Metastore metadata.", + "flatPath": "v2/projects/{projectsId}/locations/{locationsId}/services/{servicesId}:queryMetadata", + "httpMethod": "POST", + "id": "metastore.projects.locations.services.queryMetadata", + "parameterOrder": [ + "service" + ], + "parameters": { + "service": { + "description": "Required. The relative resource name of the metastore service to query metadata, in the following format:projects/{project_id}/locations/{location_id}/services/{service_id}.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/services/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v2/{+service}:queryMetadata", + "request": { + "$ref": "GoogleCloudMetastoreV2QueryMetadataRequest" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "restore": { + "description": "Restores a service from a backup.", + "flatPath": "v2/projects/{projectsId}/locations/{locationsId}/services/{servicesId}:restore", + "httpMethod": "POST", + "id": "metastore.projects.locations.services.restore", + "parameterOrder": [ + "service" + ], + "parameters": { + "service": { + "description": "Required. The relative resource name of the metastore service to run restore, in the following form:projects/{project_id}/locations/{location_id}/services/{service_id}.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/services/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v2/{+service}:restore", + "request": { + "$ref": "GoogleCloudMetastoreV2RestoreServiceRequest" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + }, + "resources": { + "backups": { + "methods": { + "create": { + "description": "Creates a new backup in a given project and location.", + "flatPath": "v2/projects/{projectsId}/locations/{locationsId}/services/{servicesId}/backups", + "httpMethod": "POST", + "id": "metastore.projects.locations.services.backups.create", + "parameterOrder": [ + "parent" + ], + "parameters": { + "backupId": { + "description": "Required. The ID of the backup, which is used as the final component of the backup's name.This value must be between 1 and 64 characters long, begin with a letter, end with a letter or number, and consist of alpha-numeric ASCII characters or hyphens.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The relative resource name of the service in which to create a backup of the following form:projects/{project_number}/locations/{location_id}/services/{service_id}.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/services/[^/]+$", + "required": true, + "type": "string" + }, + "requestId": { + "description": "Optional. A request ID. Specify a unique request ID to allow the server to ignore the request if it has completed. The server will ignore subsequent requests that provide a duplicate request ID for at least 60 minutes after the first request.For example, if an initial request times out, followed by another request with the same request ID, the server ignores the second request to prevent the creation of duplicate commitments.The request ID must be a valid UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier#Format) A zero UUID (00000000-0000-0000-0000-000000000000) is not supported.", + "location": "query", + "type": "string" + } + }, + "path": "v2/{+parent}/backups", + "request": { + "$ref": "GoogleCloudMetastoreV2Backup" + }, + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "delete": { + "description": "Deletes a single backup.", + "flatPath": "v2/projects/{projectsId}/locations/{locationsId}/services/{servicesId}/backups/{backupsId}", + "httpMethod": "DELETE", + "id": "metastore.projects.locations.services.backups.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The relative resource name of the backup to delete, in the following form:projects/{project_number}/locations/{location_id}/services/{service_id}/backups/{backup_id}.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/services/[^/]+/backups/[^/]+$", + "required": true, + "type": "string" + }, + "requestId": { + "description": "Optional. A request ID. Specify a unique request ID to allow the server to ignore the request if it has completed. The server will ignore subsequent requests that provide a duplicate request ID for at least 60 minutes after the first request.For example, if an initial request times out, followed by another request with the same request ID, the server ignores the second request to prevent the creation of duplicate commitments.The request ID must be a valid UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier#Format) A zero UUID (00000000-0000-0000-0000-000000000000) is not supported.", + "location": "query", + "type": "string" + } + }, + "path": "v2/{+name}", + "response": { + "$ref": "GoogleLongrunningOperation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Gets details of a single backup.", + "flatPath": "v2/projects/{projectsId}/locations/{locationsId}/services/{servicesId}/backups/{backupsId}", + "httpMethod": "GET", + "id": "metastore.projects.locations.services.backups.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The relative resource name of the backup to retrieve, in the following form:projects/{project_number}/locations/{location_id}/services/{service_id}/backups/{backup_id}.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/services/[^/]+/backups/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v2/{+name}", + "response": { + "$ref": "GoogleCloudMetastoreV2Backup" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists backups in a service.", + "flatPath": "v2/projects/{projectsId}/locations/{locationsId}/services/{servicesId}/backups", + "httpMethod": "GET", + "id": "metastore.projects.locations.services.backups.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "filter": { + "description": "Optional. The filter to apply to list results.", + "location": "query", + "type": "string" + }, + "orderBy": { + "description": "Optional. Specify the ordering of results as described in Sorting Order (https://cloud.google.com/apis/design/design_patterns#sorting_order). If not specified, the results will be sorted in the default order.", + "location": "query", + "type": "string" + }, + "pageSize": { + "description": "Optional. The maximum number of backups to return. The response may contain less than the maximum number. If unspecified, no more than 500 backups are returned. The maximum value is 1000; values above 1000 are changed to 1000.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. A page token, received from a previous DataprocMetastore.ListBackups call. Provide this token to retrieve the subsequent page.To retrieve the first page, supply an empty page token.When paginating, other parameters provided to DataprocMetastore.ListBackups must match the call that provided the page token.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The relative resource name of the service whose backups to list, in the following form:projects/{project_number}/locations/{location_id}/services/{service_id}/backups.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/services/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v2/{+parent}/backups", + "response": { + "$ref": "GoogleCloudMetastoreV2ListBackupsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + } + } + } + } + } + } + } + } + }, + "schemas": { + "GoogleCloudMetastoreV1AlterMetadataResourceLocationResponse": { + "description": "Response message for DataprocMetastore.AlterMetadataResourceLocation.", + "id": "GoogleCloudMetastoreV1AlterMetadataResourceLocationResponse", + "properties": {}, + "type": "object" + }, + "GoogleCloudMetastoreV1CustomRegionMetadata": { + "description": "Metadata about a custom region. This is only populated if the region is a custom region. For single/multi regions, it will be empty.", + "id": "GoogleCloudMetastoreV1CustomRegionMetadata", + "properties": { + "optionalReadOnlyRegions": { + "description": "The read-only regions for this custom region.", + "items": { + "type": "string" + }, + "type": "array" + }, + "requiredReadWriteRegions": { + "description": "The read-write regions for this custom region.", + "items": { + "type": "string" + }, + "type": "array" + }, + "witnessRegion": { + "description": "The Spanner witness region for this custom region.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV1ErrorDetails": { + "description": "Error details in public error message for DataprocMetastore.QueryMetadata.", + "id": "GoogleCloudMetastoreV1ErrorDetails", + "properties": { + "details": { + "additionalProperties": { + "type": "string" + }, + "description": "Additional structured details about this error.Keys define the failure items. Value describes the exception or details of the item.", + "type": "object" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV1HiveMetastoreVersion": { + "description": "A specification of a supported version of the Hive Metastore software.", + "id": "GoogleCloudMetastoreV1HiveMetastoreVersion", + "properties": { + "isDefault": { + "description": "Whether version will be chosen by the server if a metastore service is created with a HiveMetastoreConfig that omits the version.", + "type": "boolean" + }, + "version": { + "description": "The semantic version of the Hive Metastore software.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV1LocationMetadata": { + "description": "Metadata about the service in a location.", + "id": "GoogleCloudMetastoreV1LocationMetadata", + "properties": { + "customRegionMetadata": { + "description": "Possible configurations supported if the current region is a custom region.", + "items": { + "$ref": "GoogleCloudMetastoreV1CustomRegionMetadata" + }, + "type": "array" + }, + "multiRegionMetadata": { + "$ref": "GoogleCloudMetastoreV1MultiRegionMetadata", + "description": "The multi-region metadata if the current region is a multi-region." + }, + "supportedHiveMetastoreVersions": { + "description": "The versions of Hive Metastore that can be used when creating a new metastore service in this location. The server guarantees that exactly one HiveMetastoreVersion in the list will set is_default.", + "items": { + "$ref": "GoogleCloudMetastoreV1HiveMetastoreVersion" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV1MoveTableToDatabaseResponse": { + "description": "Response message for DataprocMetastore.MoveTableToDatabase.", + "id": "GoogleCloudMetastoreV1MoveTableToDatabaseResponse", + "properties": {}, + "type": "object" + }, + "GoogleCloudMetastoreV1MultiRegionMetadata": { + "description": "The metadata for the multi-region that includes the constituent regions. The metadata is only populated if the region is multi-region. For single region or custom dual region, it will be empty.", + "id": "GoogleCloudMetastoreV1MultiRegionMetadata", + "properties": { + "constituentRegions": { + "description": "The regions constituting the multi-region.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV1OperationMetadata": { + "description": "Represents the metadata of a long-running operation.", + "id": "GoogleCloudMetastoreV1OperationMetadata", + "properties": { + "apiVersion": { + "description": "Output only. API version used to start the operation.", + "readOnly": true, + "type": "string" + }, + "createTime": { + "description": "Output only. The time the operation was created.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "endTime": { + "description": "Output only. The time the operation finished running.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "requestedCancellation": { + "description": "Output only. Identifies whether the caller has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of 1, corresponding to Code.CANCELLED.", + "readOnly": true, + "type": "boolean" + }, + "statusMessage": { + "description": "Output only. Human-readable status of the operation, if any.", + "readOnly": true, + "type": "string" + }, + "target": { + "description": "Output only. Server-defined resource path for the target of the operation.", + "readOnly": true, + "type": "string" + }, + "verb": { + "description": "Output only. Name of the verb executed by the operation.", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV1QueryMetadataResponse": { + "description": "Response message for DataprocMetastore.QueryMetadata.", + "id": "GoogleCloudMetastoreV1QueryMetadataResponse", + "properties": { + "resultManifestUri": { + "description": "The manifest URI is link to a JSON instance in Cloud Storage. This instance manifests immediately along with QueryMetadataResponse. The content of the URI is not retriable until the long-running operation query against the metadata finishes.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV1alphaAlterMetadataResourceLocationResponse": { + "description": "Response message for DataprocMetastore.AlterMetadataResourceLocation.", + "id": "GoogleCloudMetastoreV1alphaAlterMetadataResourceLocationResponse", + "properties": {}, + "type": "object" + }, + "GoogleCloudMetastoreV1alphaCancelMigrationResponse": { + "description": "Response message for DataprocMetastore.CancelMigration.", + "id": "GoogleCloudMetastoreV1alphaCancelMigrationResponse", + "properties": { + "migrationExecution": { + "description": "The relative resource name of the migration execution, in the following form:projects/{project_number}/locations/{location_id}/services/{service_id}/migrationExecutions/{migration_execution_id}.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV1alphaCompleteMigrationResponse": { + "description": "Response message for DataprocMetastore.CompleteMigration.", + "id": "GoogleCloudMetastoreV1alphaCompleteMigrationResponse", + "properties": { + "migrationExecution": { + "description": "The relative resource name of the migration execution, in the following form:projects/{project_number}/locations/{location_id}/services/{service_id}/migrationExecutions/{migration_execution_id}.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV1alphaCustomRegionMetadata": { + "description": "Metadata about a custom region. This is only populated if the region is a custom region. For single/multi regions, it will be empty.", + "id": "GoogleCloudMetastoreV1alphaCustomRegionMetadata", + "properties": { + "optionalReadOnlyRegions": { + "description": "The read-only regions for this custom region.", + "items": { + "type": "string" + }, + "type": "array" + }, + "requiredReadWriteRegions": { + "description": "The read-write regions for this custom region.", + "items": { + "type": "string" + }, + "type": "array" + }, + "witnessRegion": { + "description": "The Spanner witness region for this custom region.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV1alphaErrorDetails": { + "description": "Error details in public error message for DataprocMetastore.QueryMetadata.", + "id": "GoogleCloudMetastoreV1alphaErrorDetails", + "properties": { + "details": { + "additionalProperties": { + "type": "string" + }, + "description": "Additional structured details about this error.Keys define the failure items. Value describes the exception or details of the item.", + "type": "object" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV1alphaHiveMetastoreVersion": { + "description": "A specification of a supported version of the Hive Metastore software.", + "id": "GoogleCloudMetastoreV1alphaHiveMetastoreVersion", + "properties": { + "isDefault": { + "description": "Whether version will be chosen by the server if a metastore service is created with a HiveMetastoreConfig that omits the version.", + "type": "boolean" + }, + "version": { + "description": "The semantic version of the Hive Metastore software.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV1alphaLocationMetadata": { + "description": "Metadata about the service in a location.", + "id": "GoogleCloudMetastoreV1alphaLocationMetadata", + "properties": { + "customRegionMetadata": { + "description": "Possible configurations supported if the current region is a custom region.", + "items": { + "$ref": "GoogleCloudMetastoreV1alphaCustomRegionMetadata" + }, + "type": "array" + }, + "multiRegionMetadata": { + "$ref": "GoogleCloudMetastoreV1alphaMultiRegionMetadata", + "description": "The multi-region metadata if the current region is a multi-region." + }, + "supportedHiveMetastoreVersions": { + "description": "The versions of Hive Metastore that can be used when creating a new metastore service in this location. The server guarantees that exactly one HiveMetastoreVersion in the list will set is_default.", + "items": { + "$ref": "GoogleCloudMetastoreV1alphaHiveMetastoreVersion" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV1alphaMoveTableToDatabaseResponse": { + "description": "Response message for DataprocMetastore.MoveTableToDatabase.", + "id": "GoogleCloudMetastoreV1alphaMoveTableToDatabaseResponse", + "properties": {}, + "type": "object" + }, + "GoogleCloudMetastoreV1alphaMultiRegionMetadata": { + "description": "The metadata for the multi-region that includes the constituent regions. The metadata is only populated if the region is multi-region. For single region or custom dual region, it will be empty.", + "id": "GoogleCloudMetastoreV1alphaMultiRegionMetadata", + "properties": { + "constituentRegions": { + "description": "The regions constituting the multi-region.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV1alphaOperationMetadata": { + "description": "Represents the metadata of a long-running operation.", + "id": "GoogleCloudMetastoreV1alphaOperationMetadata", + "properties": { + "apiVersion": { + "description": "Output only. API version used to start the operation.", + "readOnly": true, + "type": "string" + }, + "createTime": { + "description": "Output only. The time the operation was created.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "endTime": { + "description": "Output only. The time the operation finished running.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "requestedCancellation": { + "description": "Output only. Identifies whether the caller has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of 1, corresponding to Code.CANCELLED.", + "readOnly": true, + "type": "boolean" + }, + "statusMessage": { + "description": "Output only. Human-readable status of the operation, if any.", + "readOnly": true, + "type": "string" + }, + "target": { + "description": "Output only. Server-defined resource path for the target of the operation.", + "readOnly": true, + "type": "string" + }, + "verb": { + "description": "Output only. Name of the verb executed by the operation.", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV1alphaQueryMetadataResponse": { + "description": "Response message for DataprocMetastore.QueryMetadata.", + "id": "GoogleCloudMetastoreV1alphaQueryMetadataResponse", + "properties": { + "resultManifestUri": { + "description": "The manifest URI is link to a JSON instance in Cloud Storage. This instance manifests immediately along with QueryMetadataResponse. The content of the URI is not retriable until the long-running operation query against the metadata finishes.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV1betaAlterMetadataResourceLocationResponse": { + "description": "Response message for DataprocMetastore.AlterMetadataResourceLocation.", + "id": "GoogleCloudMetastoreV1betaAlterMetadataResourceLocationResponse", + "properties": {}, + "type": "object" + }, + "GoogleCloudMetastoreV1betaCancelMigrationResponse": { + "description": "Response message for DataprocMetastore.CancelMigration.", + "id": "GoogleCloudMetastoreV1betaCancelMigrationResponse", + "properties": { + "migrationExecution": { + "description": "The relative resource name of the migration execution, in the following form:projects/{project_number}/locations/{location_id}/services/{service_id}/migrationExecutions/{migration_execution_id}.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV1betaCompleteMigrationResponse": { + "description": "Response message for DataprocMetastore.CompleteMigration.", + "id": "GoogleCloudMetastoreV1betaCompleteMigrationResponse", + "properties": { + "migrationExecution": { + "description": "The relative resource name of the migration execution, in the following form:projects/{project_number}/locations/{location_id}/services/{service_id}/migrationExecutions/{migration_execution_id}.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV1betaCustomRegionMetadata": { + "description": "Metadata about a custom region. This is only populated if the region is a custom region. For single/multi regions, it will be empty.", + "id": "GoogleCloudMetastoreV1betaCustomRegionMetadata", + "properties": { + "optionalReadOnlyRegions": { + "description": "The read-only regions for this custom region.", + "items": { + "type": "string" + }, + "type": "array" + }, + "requiredReadWriteRegions": { + "description": "The read-write regions for this custom region.", + "items": { + "type": "string" + }, + "type": "array" + }, + "witnessRegion": { + "description": "The Spanner witness region for this custom region.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV1betaErrorDetails": { + "description": "Error details in public error message for DataprocMetastore.QueryMetadata.", + "id": "GoogleCloudMetastoreV1betaErrorDetails", + "properties": { + "details": { + "additionalProperties": { + "type": "string" + }, + "description": "Additional structured details about this error.Keys define the failure items. Value describes the exception or details of the item.", + "type": "object" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV1betaHiveMetastoreVersion": { + "description": "A specification of a supported version of the Hive Metastore software.", + "id": "GoogleCloudMetastoreV1betaHiveMetastoreVersion", + "properties": { + "isDefault": { + "description": "Whether version will be chosen by the server if a metastore service is created with a HiveMetastoreConfig that omits the version.", + "type": "boolean" + }, + "version": { + "description": "The semantic version of the Hive Metastore software.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV1betaLocationMetadata": { + "description": "Metadata about the service in a location.", + "id": "GoogleCloudMetastoreV1betaLocationMetadata", + "properties": { + "customRegionMetadata": { + "description": "Possible configurations supported if the current region is a custom region.", + "items": { + "$ref": "GoogleCloudMetastoreV1betaCustomRegionMetadata" + }, + "type": "array" + }, + "multiRegionMetadata": { + "$ref": "GoogleCloudMetastoreV1betaMultiRegionMetadata", + "description": "The multi-region metadata if the current region is a multi-region." + }, + "supportedHiveMetastoreVersions": { + "description": "The versions of Hive Metastore that can be used when creating a new metastore service in this location. The server guarantees that exactly one HiveMetastoreVersion in the list will set is_default.", + "items": { + "$ref": "GoogleCloudMetastoreV1betaHiveMetastoreVersion" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV1betaMoveTableToDatabaseResponse": { + "description": "Response message for DataprocMetastore.MoveTableToDatabase.", + "id": "GoogleCloudMetastoreV1betaMoveTableToDatabaseResponse", + "properties": {}, + "type": "object" + }, + "GoogleCloudMetastoreV1betaMultiRegionMetadata": { + "description": "The metadata for the multi-region that includes the constituent regions. The metadata is only populated if the region is multi-region. For single region or custom dual region, it will be empty.", + "id": "GoogleCloudMetastoreV1betaMultiRegionMetadata", + "properties": { + "constituentRegions": { + "description": "The regions constituting the multi-region.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV1betaOperationMetadata": { + "description": "Represents the metadata of a long-running operation.", + "id": "GoogleCloudMetastoreV1betaOperationMetadata", + "properties": { + "apiVersion": { + "description": "Output only. API version used to start the operation.", + "readOnly": true, + "type": "string" + }, + "createTime": { + "description": "Output only. The time the operation was created.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "endTime": { + "description": "Output only. The time the operation finished running.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "requestedCancellation": { + "description": "Output only. Identifies whether the caller has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of 1, corresponding to Code.CANCELLED.", + "readOnly": true, + "type": "boolean" + }, + "statusMessage": { + "description": "Output only. Human-readable status of the operation, if any.", + "readOnly": true, + "type": "string" + }, + "target": { + "description": "Output only. Server-defined resource path for the target of the operation.", + "readOnly": true, + "type": "string" + }, + "verb": { + "description": "Output only. Name of the verb executed by the operation.", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV1betaQueryMetadataResponse": { + "description": "Response message for DataprocMetastore.QueryMetadata.", + "id": "GoogleCloudMetastoreV1betaQueryMetadataResponse", + "properties": { + "resultManifestUri": { + "description": "The manifest URI is link to a JSON instance in Cloud Storage. This instance manifests immediately along with QueryMetadataResponse. The content of the URI is not retriable until the long-running operation query against the metadata finishes.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV2AlterMetadataResourceLocationRequest": { + "description": "Request message for DataprocMetastore.AlterMetadataResourceLocation.", + "id": "GoogleCloudMetastoreV2AlterMetadataResourceLocationRequest", + "properties": { + "locationUri": { + "description": "Required. The new location URI for the metadata resource.", + "type": "string" + }, + "resourceName": { + "description": "Required. The relative metadata resource name in the following format.databases/{database_id} or databases/{database_id}/tables/{table_id} or databases/{database_id}/tables/{table_id}/partitions/{partition_id}", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV2AlterTablePropertiesRequest": { + "description": "Request message for DataprocMetastore.AlterTableProperties.", + "id": "GoogleCloudMetastoreV2AlterTablePropertiesRequest", + "properties": { + "properties": { + "additionalProperties": { + "type": "string" + }, + "description": "A map that describes the desired values to mutate. If update_mask is empty, the properties will not update. Otherwise, the properties only alters the value whose associated paths exist in the update mask", + "type": "object" + }, + "tableName": { + "description": "Required. The name of the table containing the properties you're altering in the following format.databases/{database_id}/tables/{table_id}", + "type": "string" + }, + "updateMask": { + "description": "A field mask that specifies the metadata table properties that are overwritten by the update. Fields specified in the update_mask are relative to the resource (not to the full request). A field is overwritten if it is in the mask.For example, given the target properties: properties { a: 1 b: 2 } And an update properties: properties { a: 2 b: 3 c: 4 } then if the field mask is:paths: \"properties.b\", \"properties.c\"then the result will be: properties { a: 1 b: 3 c: 4 } ", + "format": "google-fieldmask", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV2AuxiliaryVersionConfig": { + "description": "Configuration information for the auxiliary service versions.", + "id": "GoogleCloudMetastoreV2AuxiliaryVersionConfig", + "properties": { + "configOverrides": { + "additionalProperties": { + "type": "string" + }, + "description": "A mapping of Hive metastore configuration key-value pairs to apply to the auxiliary Hive metastore (configured in hive-site.xml) in addition to the primary version's overrides. If keys are present in both the auxiliary version's overrides and the primary version's overrides, the value from the auxiliary version's overrides takes precedence.", + "type": "object" + }, + "endpoints": { + "description": "Output only. The list of endpoints used to access the auxiliary metastore service, includes version and region data.", + "items": { + "$ref": "GoogleCloudMetastoreV2Endpoint" + }, + "readOnly": true, + "type": "array" + }, + "version": { + "description": "The Hive metastore version of the auxiliary service. It must be less than the primary Hive metastore service's version.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV2Backup": { + "description": "The details of a backup resource.", + "id": "GoogleCloudMetastoreV2Backup", + "properties": { + "createTime": { + "description": "Output only. The time when the backup was started.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "description": { + "description": "The description of the backup.", + "type": "string" + }, + "endTime": { + "description": "Output only. The time when the backup finished creating.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "name": { + "description": "Immutable. The relative resource name of the backup, in the following form:projects/{project_number}/locations/{location_id}/services/{service_id}/backups/{backup_id}", + "type": "string" + }, + "restoringServices": { + "description": "Output only. Services that are restoring from the backup.", + "items": { + "type": "string" + }, + "readOnly": true, + "type": "array" + }, + "serviceRevision": { + "$ref": "GoogleCloudMetastoreV2Service", + "description": "Output only. The revision of the service at the time of backup.", + "readOnly": true + }, + "state": { + "description": "Output only. The current state of the backup.", + "enum": [ + "STATE_UNSPECIFIED", + "CREATING", + "DELETING", + "ACTIVE", + "FAILED", + "RESTORING" + ], + "enumDescriptions": [ + "The state of the backup is unknown.", + "The backup is being created.", + "The backup is being deleted.", + "The backup is active and ready to use.", + "The backup failed.", + "The backup is being restored." + ], + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV2DataCatalogConfig": { + "description": "Specifies how metastore metadata should be integrated with the Data Catalog service.", + "id": "GoogleCloudMetastoreV2DataCatalogConfig", + "properties": { + "enabled": { + "description": "Optional. Defines whether the metastore metadata should be synced to Data Catalog. The default value is to disable syncing metastore metadata to Data Catalog.", + "type": "boolean" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV2DatabaseDump": { + "description": "A specification of the location of and metadata about a database dump from a relational database management system.", + "id": "GoogleCloudMetastoreV2DatabaseDump", + "properties": { + "gcsUri": { + "description": "Required. A Cloud Storage object or folder URI that specifies the source from which to import metadata. It must begin with gs://.", + "type": "string" + }, + "type": { + "description": "Optional. The type of the database dump. If unspecified, defaults to MYSQL.", + "enum": [ + "TYPE_UNSPECIFIED", + "MYSQL", + "AVRO" + ], + "enumDescriptions": [ + "The type of the database dump is unknown.", + "Database dump is a MySQL dump file.", + "Database dump contains Avro files." + ], + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV2EncryptionConfig": { + "description": "Encryption settings for the service.", + "id": "GoogleCloudMetastoreV2EncryptionConfig", + "properties": {}, + "type": "object" + }, + "GoogleCloudMetastoreV2Endpoint": { + "description": "An endpoint used to access the metastore service.", + "id": "GoogleCloudMetastoreV2Endpoint", + "properties": { + "endpointUri": { + "description": "Output only. The URI of the endpoint used to access the metastore service.", + "readOnly": true, + "type": "string" + }, + "region": { + "description": "Output only. The region where the endpoint is located.", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV2ExportMetadataRequest": { + "description": "Request message for DataprocMetastore.ExportMetadata.", + "id": "GoogleCloudMetastoreV2ExportMetadataRequest", + "properties": { + "databaseDumpType": { + "description": "Optional. The type of the database dump. If unspecified, defaults to MYSQL.", + "enum": [ + "TYPE_UNSPECIFIED", + "MYSQL", + "AVRO" + ], + "enumDescriptions": [ + "The type of the database dump is unknown.", + "Database dump is a MySQL dump file.", + "Database dump contains Avro files." + ], + "type": "string" + }, + "destinationGcsFolder": { + "description": "A Cloud Storage URI of a folder, in the format gs:///. A sub-folder containing exported files will be created below it.", + "type": "string" + }, + "requestId": { + "description": "Optional. A request ID. Specify a unique request ID to allow the server to ignore the request if it has completed. The server will ignore subsequent requests that provide a duplicate request ID for at least 60 minutes after the first request.For example, if an initial request times out, followed by another request with the same request ID, the server ignores the second request to prevent the creation of duplicate commitments.The request ID must be a valid UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier#Format). A zero UUID (00000000-0000-0000-0000-000000000000) is not supported.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV2HiveMetastoreConfig": { + "description": "Specifies configuration information specific to running Hive metastore software as the metastore service.", + "id": "GoogleCloudMetastoreV2HiveMetastoreConfig", + "properties": { + "auxiliaryVersions": { + "additionalProperties": { + "$ref": "GoogleCloudMetastoreV2AuxiliaryVersionConfig" + }, + "description": "Optional. A mapping of Hive metastore version to the auxiliary version configuration. When specified, a secondary Hive metastore service is created along with the primary service. All auxiliary versions must be less than the service's primary version. The key is the auxiliary service name and it must match the regular expression a-z?. This means that the first character must be a lowercase letter, and all the following characters must be hyphens, lowercase letters, or digits, except the last character, which cannot be a hyphen.", + "type": "object" + }, + "configOverrides": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. A mapping of Hive metastore configuration key-value pairs to apply to the Hive metastore (configured in hive-site.xml). The mappings override system defaults (some keys cannot be overridden). These overrides are also applied to auxiliary versions and can be further customized in the auxiliary version's AuxiliaryVersionConfig.", + "type": "object" + }, + "endpointProtocol": { + "description": "Optional. The protocol to use for the metastore service endpoint. If unspecified, defaults to GRPC.", + "enum": [ + "ENDPOINT_PROTOCOL_UNSPECIFIED", + "THRIFT", + "GRPC" + ], + "enumDescriptions": [ + "The protocol is not set.", + "Use the legacy Apache Thrift protocol for the metastore service endpoint.", + "Use the modernized gRPC protocol for the metastore service endpoint." + ], + "type": "string" + }, + "version": { + "description": "Immutable. The Hive metastore schema version.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV2ImportMetadataRequest": { + "description": "Request message for DataprocMetastore.CreateMetadataImport.", + "id": "GoogleCloudMetastoreV2ImportMetadataRequest", + "properties": { + "databaseDump": { + "$ref": "GoogleCloudMetastoreV2DatabaseDump", + "description": "Immutable. A database dump from a pre-existing metastore's database." + }, + "description": { + "description": "Optional. The description of the metadata import.", + "type": "string" + }, + "requestId": { + "description": "Optional. A request ID. Specify a unique request ID to allow the server to ignore the request if it has completed. The server will ignore subsequent requests that provide a duplicate request ID for at least 60 minutes after the first request.For example, if an initial request times out, followed by another request with the same request ID, the server ignores the second request to prevent the creation of duplicate commitments.The request ID must be a valid UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier#Format). A zero UUID (00000000-0000-0000-0000-000000000000) is not supported.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV2LatestBackup": { + "description": "The details of the latest scheduled backup.", + "id": "GoogleCloudMetastoreV2LatestBackup", + "properties": { + "backupId": { + "description": "Output only. The ID of an in-progress scheduled backup. Empty if no backup is in progress.", + "readOnly": true, + "type": "string" + }, + "duration": { + "description": "Output only. The duration of the backup completion.", + "format": "google-duration", + "readOnly": true, + "type": "string" + }, + "startTime": { + "description": "Output only. The time when the backup was started.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "state": { + "description": "Output only. The current state of the backup.", + "enum": [ + "STATE_UNSPECIFIED", + "IN_PROGRESS", + "SUCCEEDED", + "FAILED" + ], + "enumDescriptions": [ + "The state of the backup is unknown.", + "The backup is in progress.", + "The backup completed.", + "The backup failed." + ], + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV2ListBackupsResponse": { + "description": "Response message for DataprocMetastore.ListBackups.", + "id": "GoogleCloudMetastoreV2ListBackupsResponse", + "properties": { + "backups": { + "description": "The backups of the specified service.", + "items": { + "$ref": "GoogleCloudMetastoreV2Backup" + }, + "type": "array" + }, + "nextPageToken": { + "description": "A token that can be sent as page_token to retrieve the next page. If this field is omitted, there are no subsequent pages.", + "type": "string" + }, + "unreachable": { + "description": "Locations that could not be reached.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV2ListServicesResponse": { + "description": "Response message for DataprocMetastore.ListServices.", + "id": "GoogleCloudMetastoreV2ListServicesResponse", + "properties": { + "nextPageToken": { + "description": "A token that can be sent as page_token to retrieve the next page. If this field is omitted, there are no subsequent pages.", + "type": "string" + }, + "services": { + "description": "The services in the specified location.", + "items": { + "$ref": "GoogleCloudMetastoreV2Service" + }, + "type": "array" + }, + "unreachable": { + "description": "Locations that could not be reached.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV2MetadataIntegration": { + "description": "Specifies how metastore metadata should be integrated with external services.", + "id": "GoogleCloudMetastoreV2MetadataIntegration", + "properties": { + "dataCatalogConfig": { + "$ref": "GoogleCloudMetastoreV2DataCatalogConfig", + "description": "Optional. The integration config for the Data Catalog service." + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV2MoveTableToDatabaseRequest": { + "description": "Request message for DataprocMetastore.MoveTableToDatabase.", + "id": "GoogleCloudMetastoreV2MoveTableToDatabaseRequest", + "properties": { + "dbName": { + "description": "Required. The name of the database where the table resides.", + "type": "string" + }, + "destinationDbName": { + "description": "Required. The name of the database where the table should be moved.", + "type": "string" + }, + "tableName": { + "description": "Required. The name of the table to be moved.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV2QueryMetadataRequest": { + "description": "Request message for DataprocMetastore.QueryMetadata.", + "id": "GoogleCloudMetastoreV2QueryMetadataRequest", + "properties": { + "query": { + "description": "Required. A read-only SQL query to execute against the metadata database. The query cannot change or mutate the data.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV2RestoreServiceRequest": { + "description": "Request message for DataprocMetastore.Restore.", + "id": "GoogleCloudMetastoreV2RestoreServiceRequest", + "properties": { + "backup": { + "description": "Optional. The relative resource name of the metastore service backup to restore from, in the following form:projects/{project_id}/locations/{location_id}/services/{service_id}/backups/{backup_id}. Mutually exclusive with backup_location, and exactly one of the two must be set.", + "type": "string" + }, + "backupLocation": { + "description": "Optional. A Cloud Storage URI specifying the location of the backup artifacts, namely - backup avro files under \"avro/\", backup_metastore.json and service.json, in the following form:gs://. Mutually exclusive with backup, and exactly one of the two must be set.", + "type": "string" + }, + "requestId": { + "description": "Optional. A request ID. Specify a unique request ID to allow the server to ignore the request if it has completed. The server will ignore subsequent requests that provide a duplicate request ID for at least 60 minutes after the first request.For example, if an initial request times out, followed by another request with the same request ID, the server ignores the second request to prevent the creation of duplicate commitments.The request ID must be a valid UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier#Format). A zero UUID (00000000-0000-0000-0000-000000000000) is not supported.", + "type": "string" + }, + "restoreType": { + "description": "Optional. The type of restore. If unspecified, defaults to METADATA_ONLY.", + "enum": [ + "RESTORE_TYPE_UNSPECIFIED", + "FULL", + "METADATA_ONLY" + ], + "enumDescriptions": [ + "The restore type is unknown.", + "The service's metadata and configuration are restored.", + "Only the service's metadata is restored." + ], + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV2ScalingConfig": { + "description": "Represents the scaling configuration of a metastore service.", + "id": "GoogleCloudMetastoreV2ScalingConfig", + "properties": { + "scalingFactor": { + "description": "Optional. Scaling factor from 1 to 5, increments of 1.", + "format": "int32", + "type": "integer" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV2ScheduledBackup": { + "description": "This specifies the configuration of scheduled backup.", + "id": "GoogleCloudMetastoreV2ScheduledBackup", + "properties": { + "backupLocation": { + "description": "Optional. A Cloud Storage URI of a folder, in the format gs:///. A sub-folder containing backup files will be stored below it.", + "type": "string" + }, + "cronSchedule": { + "description": "Optional. The scheduled interval in Cron format, see https://en.wikipedia.org/wiki/Cron The default is empty: scheduled backup is not enabled. Must be specified to enable scheduled backups.", + "type": "string" + }, + "enabled": { + "description": "Optional. Defines whether the scheduled backup is enabled. The default value is false.", + "type": "boolean" + }, + "latestBackup": { + "$ref": "GoogleCloudMetastoreV2LatestBackup", + "description": "Output only. The details of the latest scheduled backup.", + "readOnly": true + }, + "nextScheduledTime": { + "description": "Output only. The time when the next backups execution is scheduled to start.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "timeZone": { + "description": "Optional. Specifies the time zone to be used when interpreting cron_schedule. Must be a time zone name from the time zone database (https://en.wikipedia.org/wiki/List_of_tz_database_time_zones), e.g. America/Los_Angeles or Africa/Abidjan. If left unspecified, the default is UTC.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudMetastoreV2Service": { + "description": "A managed metastore service that serves metadata queries.", + "id": "GoogleCloudMetastoreV2Service", + "properties": { + "createTime": { + "description": "Output only. The time when the metastore service was created.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "encryptionConfig": { + "$ref": "GoogleCloudMetastoreV2EncryptionConfig", + "description": "Immutable. Information used to configure the Dataproc Metastore service to encrypt customer data at rest. Cannot be updated." + }, + "endpoints": { + "description": "Output only. The list of endpoints used to access the metastore service.", + "items": { + "$ref": "GoogleCloudMetastoreV2Endpoint" + }, + "readOnly": true, + "type": "array" + }, + "hiveMetastoreConfig": { + "$ref": "GoogleCloudMetastoreV2HiveMetastoreConfig", + "description": "Configuration information specific to running Hive metastore software as the metastore service." + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "User-defined labels for the metastore service.", + "type": "object" + }, + "metadataIntegration": { + "$ref": "GoogleCloudMetastoreV2MetadataIntegration", + "description": "Optional. The setting that defines how metastore metadata should be integrated with external services and systems." + }, + "name": { + "description": "Immutable. The relative resource name of the metastore service, in the following format:projects/{project_number}/locations/{location_id}/services/{service_id}.", + "type": "string" + }, + "scalingConfig": { + "$ref": "GoogleCloudMetastoreV2ScalingConfig", + "description": "Optional. Scaling configuration of the metastore service." + }, + "scheduledBackup": { + "$ref": "GoogleCloudMetastoreV2ScheduledBackup", + "description": "Optional. The configuration of scheduled backup for the metastore service." + }, + "state": { + "description": "Output only. The current state of the metastore service.", + "enum": [ + "STATE_UNSPECIFIED", + "CREATING", + "ACTIVE", + "SUSPENDING", + "SUSPENDED", + "UPDATING", + "DELETING", + "ERROR" + ], + "enumDescriptions": [ + "The state of the metastore service is unknown.", + "The metastore service is in the process of being created.", + "The metastore service is running and ready to serve queries.", + "The metastore service is entering suspension. Its query-serving availability may cease unexpectedly.", + "The metastore service is suspended and unable to serve queries.", + "The metastore service is being updated. It remains usable but cannot accept additional update requests or be deleted at this time.", + "The metastore service is undergoing deletion. It cannot be used.", + "The metastore service has encountered an error and cannot be used. The metastore service should be deleted." + ], + "readOnly": true, + "type": "string" + }, + "stateMessage": { + "description": "Output only. Additional information about the current state of the metastore service, if available.", + "readOnly": true, + "type": "string" + }, + "uid": { + "description": "Output only. The globally unique resource identifier of the metastore service.", + "readOnly": true, + "type": "string" + }, + "updateTime": { + "description": "Output only. The time when the metastore service was last updated.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "warehouseGcsUri": { + "description": "Required. A Cloud Storage URI (starting with gs://) that specifies the default warehouse directory of the Hive Metastore.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleLongrunningOperation": { + "description": "This resource represents a long-running operation that is the result of a network API call.", + "id": "GoogleLongrunningOperation", + "properties": { + "done": { + "description": "If the value is false, it means the operation is still in progress. If true, the operation is completed, and either error or response is available.", + "type": "boolean" + }, + "error": { + "$ref": "GoogleRpcStatus", + "description": "The error result of the operation in case of failure or cancellation." + }, + "metadata": { + "additionalProperties": { + "description": "Properties of the object. Contains field @type with type URL.", + "type": "any" + }, + "description": "Service-specific metadata associated with the operation. It typically contains progress information and common metadata such as create time. Some services might not provide such metadata. Any method that returns a long-running operation should document the metadata type, if any.", + "type": "object" + }, + "name": { + "description": "The server-assigned name, which is only unique within the same service that originally returns it. If you use the default HTTP mapping, the name should be a resource name ending with operations/{unique_id}.", + "type": "string" + }, + "response": { + "additionalProperties": { + "description": "Properties of the object. Contains field @type with type URL.", + "type": "any" + }, + "description": "The normal, successful response of the operation. If the original method returns no data on success, such as Delete, the response is google.protobuf.Empty. If the original method is standard Get/Create/Update, the response should be the resource. For other methods, the response should have the type XxxResponse, where Xxx is the original method name. For example, if the original method name is TakeSnapshot(), the inferred response type is TakeSnapshotResponse.", + "type": "object" + } + }, + "type": "object" + }, + "GoogleRpcStatus": { + "description": "The Status type defines a logical error model that is suitable for different programming environments, including REST APIs and RPC APIs. It is used by gRPC (https://github.com/grpc). Each Status message contains three pieces of data: error code, error message, and error details.You can find out more about this error model and how to work with it in the API Design Guide (https://cloud.google.com/apis/design/errors).", + "id": "GoogleRpcStatus", + "properties": { + "code": { + "description": "The status code, which should be an enum value of google.rpc.Code.", + "format": "int32", + "type": "integer" + }, + "details": { + "description": "A list of messages that carry the error details. There is a common set of message types for APIs to use.", + "items": { + "additionalProperties": { + "description": "Properties of the object. Contains field @type with type URL.", + "type": "any" + }, + "type": "object" + }, + "type": "array" + }, + "message": { + "description": "A developer-facing error message, which should be in English. Any user-facing error message should be localized and sent in the google.rpc.Status.details field, or localized by the client.", + "type": "string" + } + }, + "type": "object" + } + } +} diff --git a/discovery/googleapis/migrationcenter__v1.json b/discovery/googleapis/migrationcenter__v1.json index 0e4555832..338ce733a 100644 --- a/discovery/googleapis/migrationcenter__v1.json +++ b/discovery/googleapis/migrationcenter__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241001", + "revision": "20241205", "rootUrl": "https://migrationcenter.googleapis.com/", "servicePath": "", "title": "Migration Center API", @@ -1405,7 +1405,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "migrationcenter.projects.locations.operations.cancel", @@ -2544,6 +2544,16 @@ "readOnly": true, "type": "string" }, + "databaseDeploymentDetails": { + "$ref": "DatabaseDeploymentDetails", + "description": "Output only. Asset information specific for database deployments.", + "readOnly": true + }, + "databaseDetails": { + "$ref": "DatabaseDetails", + "description": "Output only. Asset information specific for logical databases.", + "readOnly": true + }, "insightList": { "$ref": "InsightList", "description": "Output only. The list of insights associated with the asset.", @@ -2579,6 +2589,11 @@ "readOnly": true, "type": "array" }, + "title": { + "description": "Output only. Server generated human readable name of the asset.", + "readOnly": true, + "type": "string" + }, "updateTime": { "description": "Output only. The timestamp when the asset was last updated.", "format": "google-datetime", @@ -2619,6 +2634,14 @@ ], "type": "string" }, + "databaseDeploymentDetails": { + "$ref": "DatabaseDeploymentDetails", + "description": "Asset information specific for database deployments." + }, + "databaseDetails": { + "$ref": "DatabaseDetails", + "description": "Asset information specific for logical databases." + }, "labels": { "additionalProperties": { "type": "string" @@ -2865,7 +2888,7 @@ "PERSISTENT_DISK_TYPE_SSD" ], "enumDescriptions": [ - "Unspecified (default value). Selecting this value allows the system to use any disk type according to reported usage. This a good value to start with.", + "Unspecified. Fallback to default value based on context.", "Standard HDD Persistent Disk.", "Balanced Persistent Disk.", "SSD Persistent Disk." @@ -2935,7 +2958,7 @@ "PERSISTENT_DISK_TYPE_SSD" ], "enumDescriptions": [ - "Unspecified (default value). Selecting this value allows the system to use any disk type according to reported usage. This a good value to start with.", + "Unspecified. Fallback to default value based on context.", "Standard HDD Persistent Disk.", "Balanced Persistent Disk.", "SSD Persistent Disk." @@ -3002,7 +3025,15 @@ "properties": { "iops": { "$ref": "DailyResourceUsageAggregationStats", - "description": "Disk I/O operations per second." + "description": "Optional. Disk I/O operations per second." + }, + "readIops": { + "$ref": "DailyResourceUsageAggregationStats", + "description": "Optional. Disk read I/O operations per second." + }, + "writeIops": { + "$ref": "DailyResourceUsageAggregationStats", + "description": "Optional. Disk write I/O operations per second." } }, "type": "object" @@ -3060,6 +3091,284 @@ }, "type": "object" }, + "DatabaseDeploymentDetails": { + "description": "The details of a database deployment asset.", + "id": "DatabaseDeploymentDetails", + "properties": { + "aggregatedStats": { + "$ref": "DatabaseDeploymentDetailsAggregatedStats", + "description": "Output only. Aggregated stats for the database deployment.", + "readOnly": true + }, + "edition": { + "description": "Optional. The database deployment edition.", + "type": "string" + }, + "generatedId": { + "description": "Optional. The database deployment generated ID.", + "type": "string" + }, + "manualUniqueId": { + "description": "Optional. A manual unique ID set by the user.", + "type": "string" + }, + "mysql": { + "$ref": "MysqlDatabaseDeployment", + "description": "Optional. Details of a MYSQL database deployment." + }, + "postgresql": { + "$ref": "PostgreSqlDatabaseDeployment", + "description": "Optional. Details of a PostgreSQL database deployment." + }, + "sqlServer": { + "$ref": "SqlServerDatabaseDeployment", + "description": "Optional. Details of a Microsoft SQL Server database deployment." + }, + "topology": { + "$ref": "DatabaseDeploymentTopology", + "description": "Optional. Details of the database deployment topology." + }, + "version": { + "description": "Optional. The database deployment version.", + "type": "string" + } + }, + "type": "object" + }, + "DatabaseDeploymentDetailsAggregatedStats": { + "description": "Aggregated stats for the database deployment.", + "id": "DatabaseDeploymentDetailsAggregatedStats", + "properties": { + "databaseCount": { + "description": "Output only. The number of databases in the deployment.", + "format": "int32", + "readOnly": true, + "type": "integer" + } + }, + "type": "object" + }, + "DatabaseDeploymentTopology": { + "description": "Details of database deployment's topology.", + "id": "DatabaseDeploymentTopology", + "properties": { + "coreCount": { + "description": "Optional. Number of total logical cores.", + "format": "int32", + "type": "integer" + }, + "coreLimit": { + "description": "Optional. Number of total logical cores limited by db deployment.", + "format": "int32", + "type": "integer" + }, + "diskAllocatedBytes": { + "description": "Optional. Disk allocated in bytes.", + "format": "int64", + "type": "string" + }, + "diskUsedBytes": { + "description": "Optional. Disk used in bytes.", + "format": "int64", + "type": "string" + }, + "instances": { + "description": "Optional. List of database instances.", + "items": { + "$ref": "DatabaseInstance" + }, + "type": "array" + }, + "memoryBytes": { + "description": "Optional. Total memory in bytes.", + "format": "int64", + "type": "string" + }, + "memoryLimitBytes": { + "description": "Optional. Total memory in bytes limited by db deployment.", + "format": "int64", + "type": "string" + }, + "physicalCoreCount": { + "description": "Optional. Number of total physical cores.", + "format": "int32", + "type": "integer" + }, + "physicalCoreLimit": { + "description": "Optional. Number of total physical cores limited by db deployment.", + "format": "int32", + "type": "integer" + } + }, + "type": "object" + }, + "DatabaseDetails": { + "description": "Details of a logical database.", + "id": "DatabaseDetails", + "properties": { + "allocatedStorageBytes": { + "description": "Optional. The allocated storage for the database in bytes.", + "format": "int64", + "type": "string" + }, + "databaseName": { + "description": "Required. The name of the database.", + "type": "string" + }, + "parentDatabaseDeployment": { + "$ref": "DatabaseDetailsParentDatabaseDeployment", + "description": "Required. The parent database deployment that contains the logical database." + }, + "schemas": { + "description": "Optional. The database schemas.", + "items": { + "$ref": "DatabaseSchema" + }, + "type": "array" + } + }, + "type": "object" + }, + "DatabaseDetailsParentDatabaseDeployment": { + "description": "The identifiers of the parent database deployment.", + "id": "DatabaseDetailsParentDatabaseDeployment", + "properties": { + "generatedId": { + "description": "Optional. The parent database deployment generated ID.", + "type": "string" + }, + "manualUniqueId": { + "description": "Optional. The parent database deployment optional manual unique ID set by the user.", + "type": "string" + } + }, + "type": "object" + }, + "DatabaseInstance": { + "description": "Details of a database instance.", + "id": "DatabaseInstance", + "properties": { + "instanceName": { + "description": "Optional. The instance's name.", + "type": "string" + }, + "network": { + "$ref": "DatabaseInstanceNetwork", + "description": "Optional. Networking details." + }, + "role": { + "description": "Optional. The instance role in the database engine.", + "enum": [ + "ROLE_UNSPECIFIED", + "PRIMARY", + "SECONDARY", + "ARBITER" + ], + "enumDescriptions": [ + "Unspecified.", + "Primary.", + "Secondary.", + "Arbiter." + ], + "type": "string" + } + }, + "type": "object" + }, + "DatabaseInstanceNetwork": { + "description": "Network details of a database instance.", + "id": "DatabaseInstanceNetwork", + "properties": { + "hostNames": { + "description": "Optional. The instance's host names.", + "items": { + "type": "string" + }, + "type": "array" + }, + "ipAddresses": { + "description": "Optional. The instance's IP addresses.", + "items": { + "type": "string" + }, + "type": "array" + }, + "primaryMacAddress": { + "description": "Optional. The instance's primary MAC address.", + "type": "string" + } + }, + "type": "object" + }, + "DatabaseObjects": { + "description": "Details of a group of database objects.", + "id": "DatabaseObjects", + "properties": { + "category": { + "description": "Optional. The category of the objects.", + "enum": [ + "CATEGORY_UNSPECIFIED", + "TABLE", + "INDEX", + "CONSTRAINTS", + "VIEWS", + "SOURCE_CODE", + "OTHER" + ], + "enumDescriptions": [ + "Unspecified type.", + "Table.", + "Index.", + "Constraints.", + "Views.", + "Source code, e.g. procedures.", + "Uncategorized objects." + ], + "type": "string" + }, + "count": { + "description": "Optional. The number of objects.", + "format": "int64", + "type": "string" + } + }, + "type": "object" + }, + "DatabaseSchema": { + "description": "Details of a database schema.", + "id": "DatabaseSchema", + "properties": { + "mysql": { + "$ref": "MySqlSchemaDetails", + "description": "Optional. Details of a Mysql schema." + }, + "objects": { + "description": "Optional. List of details of objects by category.", + "items": { + "$ref": "DatabaseObjects" + }, + "type": "array" + }, + "postgresql": { + "$ref": "PostgreSqlSchemaDetails", + "description": "Optional. Details of a PostgreSql schema." + }, + "schemaName": { + "description": "Required. The name of the schema.", + "type": "string" + }, + "sqlServer": { + "$ref": "SqlServerSchemaDetails", + "description": "Optional. Details of a SqlServer schema." + }, + "tablesSizeBytes": { + "description": "Optional. The total size of tables in bytes.", + "format": "int64", + "type": "string" + } + }, + "type": "object" + }, "Date": { "description": "Represents a whole or partial calendar date, such as a birthday. The time of day and time zone are either specified elsewhere or are insignificant. The date is relative to the Gregorian Calendar. This can represent one of the following: * A full date, with non-zero year, month, and day values. * A month and day, with a zero year (for example, an anniversary). * A year on its own, with a zero month and a zero day. * A year and month, with a zero day (for example, a credit card expiration date). Related types: * google.type.TimeOfDay * google.type.DateTime * google.protobuf.Timestamp", "id": "Date", @@ -3313,7 +3622,17 @@ "id": "DiskUsageSample", "properties": { "averageIops": { - "description": "Average IOPS sampled over a short window. Must be non-negative.", + "description": "Optional. Average IOPS sampled over a short window. Must be non-negative. Must be equal to the sum of read and write if one of them is positive. if both read and write are zero they are ignored.", + "format": "float", + "type": "number" + }, + "averageReadIops": { + "description": "Optional. Average read IOPS sampled over a short window. Must be non-negative.", + "format": "float", + "type": "number" + }, + "averageWriteIops": { + "description": "Optional. Average write IOPS sampled over a short window. Must be non-negative.", "format": "float", "type": "number" } @@ -3819,7 +4138,8 @@ "IMPORT_JOB_FORMAT_RVTOOLS_CSV", "IMPORT_JOB_FORMAT_EXPORTED_AWS_CSV", "IMPORT_JOB_FORMAT_EXPORTED_AZURE_CSV", - "IMPORT_JOB_FORMAT_STRATOZONE_CSV" + "IMPORT_JOB_FORMAT_STRATOZONE_CSV", + "IMPORT_JOB_FORMAT_DATABASE_ZIP" ], "enumDescriptions": [ "Default value.", @@ -3827,7 +4147,8 @@ "RVTools format (CSV).", "CSV format exported from AWS using the AWS collection script.", "CSV format exported from Azure using the Azure collection script.", - "CSV format created manually and following the StratoZone format. For more information, see Manually create and upload data tables." + "CSV format created manually and following the StratoZone format. For more information, see Manually create and upload data tables.", + "ZIP file with nested CSV files generated by a database collector." ], "type": "string" }, @@ -3969,6 +4290,15 @@ "description": "A resource that reports the import job errors at row level.", "id": "ImportRowError", "properties": { + "archiveError": { + "$ref": "ImportRowErrorArchiveErrorDetails", + "description": "Error details for an archive file." + }, + "assetTitle": { + "description": "Output only. The asset title.", + "readOnly": true, + "type": "string" + }, "csvError": { "$ref": "ImportRowErrorCsvErrorDetails", "description": "Error details for a CSV file." @@ -4001,6 +4331,22 @@ }, "type": "object" }, + "ImportRowErrorArchiveErrorDetails": { + "description": "Error details for an archive file.", + "id": "ImportRowErrorArchiveErrorDetails", + "properties": { + "csvError": { + "$ref": "ImportRowErrorCsvErrorDetails", + "description": "Error details for a CSV file." + }, + "filePath": { + "description": "Output only. The file path inside the archive where the error was detected.", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, "ImportRowErrorCsvErrorDetails": { "description": "Error details for a CSV file.", "id": "ImportRowErrorCsvErrorDetails", @@ -4035,7 +4381,7 @@ "properties": { "genericInsight": { "$ref": "GenericInsight", - "description": "Output only. A generic insight about an asset", + "description": "Output only. A generic insight about an asset.", "readOnly": true }, "migrationInsight": { @@ -4399,6 +4745,10 @@ "description": "CPU architecture, e.g., \"x64-based PC\", \"x86_64\", \"i686\" etc.", "type": "string" }, + "cpuManufacturer": { + "description": "Optional. CPU manufacturer, e.g., \"Intel\", \"AMD\".", + "type": "string" + }, "cpuName": { "description": "CPU name, e.g., \"Intel Xeon E5-2690\", \"AMD EPYC 7571\" etc.", "type": "string" @@ -4640,6 +4990,160 @@ }, "type": "object" }, + "MySqlPlugin": { + "description": "MySql plugin.", + "id": "MySqlPlugin", + "properties": { + "enabled": { + "description": "Required. The plugin is active.", + "type": "boolean" + }, + "plugin": { + "description": "Required. The plugin name.", + "type": "string" + }, + "version": { + "description": "Required. The plugin version.", + "type": "string" + } + }, + "type": "object" + }, + "MySqlProperty": { + "description": "MySql property.", + "id": "MySqlProperty", + "properties": { + "enabled": { + "description": "Required. The property is enabled.", + "type": "boolean" + }, + "numericValue": { + "description": "Required. The property numeric value.", + "format": "int64", + "type": "string" + }, + "property": { + "description": "Required. The property name.", + "type": "string" + } + }, + "type": "object" + }, + "MySqlSchemaDetails": { + "description": "Specific details for a Mysql database.", + "id": "MySqlSchemaDetails", + "properties": { + "storageEngines": { + "description": "Optional. Mysql storage engine tables.", + "items": { + "$ref": "MySqlStorageEngineDetails" + }, + "type": "array" + } + }, + "type": "object" + }, + "MySqlStorageEngineDetails": { + "description": "Mysql storage engine tables.", + "id": "MySqlStorageEngineDetails", + "properties": { + "encryptedTableCount": { + "description": "Optional. The number of encrypted tables.", + "format": "int32", + "type": "integer" + }, + "engine": { + "description": "Required. The storage engine.", + "enum": [ + "ENGINE_UNSPECIFIED", + "INNODB", + "MYISAM", + "MEMORY", + "CSV", + "ARCHIVE", + "BLACKHOLE", + "NDB", + "MERGE", + "FEDERATED", + "EXAMPLE", + "OTHER" + ], + "enumDescriptions": [ + "Unspecified storage engine.", + "InnoDB.", + "MyISAM.", + "Memory.", + "CSV.", + "Archive.", + "Blackhole.", + "NDB.", + "Merge.", + "Federated.", + "Example.", + "Other." + ], + "type": "string" + }, + "tableCount": { + "description": "Optional. The number of tables.", + "format": "int32", + "type": "integer" + } + }, + "type": "object" + }, + "MySqlVariable": { + "description": "MySql variable.", + "id": "MySqlVariable", + "properties": { + "category": { + "description": "Required. The variable category.", + "type": "string" + }, + "value": { + "description": "Required. The variable value.", + "type": "string" + }, + "variable": { + "description": "Required. The variable name.", + "type": "string" + } + }, + "type": "object" + }, + "MysqlDatabaseDeployment": { + "description": "Specific details for a Mysql database deployment.", + "id": "MysqlDatabaseDeployment", + "properties": { + "plugins": { + "description": "Optional. List of MySql plugins.", + "items": { + "$ref": "MySqlPlugin" + }, + "type": "array" + }, + "properties": { + "description": "Optional. List of MySql properties.", + "items": { + "$ref": "MySqlProperty" + }, + "type": "array" + }, + "resourceGroupsCount": { + "description": "Optional. Number of resource groups.", + "format": "int32", + "type": "integer" + }, + "variables": { + "description": "Optional. List of MySql variables.", + "items": { + "$ref": "MySqlVariable" + }, + "type": "array" + } + }, + "type": "object" + }, "NetworkAdapterDetails": { "description": "Details of network adapter.", "id": "NetworkAdapterDetails", @@ -5040,6 +5544,118 @@ }, "type": "object" }, + "PostgreSqlDatabaseDeployment": { + "description": "Specific details for a PostgreSQL database deployment.", + "id": "PostgreSqlDatabaseDeployment", + "properties": { + "properties": { + "description": "Optional. List of PostgreSql properties.", + "items": { + "$ref": "PostgreSqlProperty" + }, + "type": "array" + }, + "settings": { + "description": "Optional. List of PostgreSql settings.", + "items": { + "$ref": "PostgreSqlSetting" + }, + "type": "array" + } + }, + "type": "object" + }, + "PostgreSqlExtension": { + "description": "PostgreSql extension.", + "id": "PostgreSqlExtension", + "properties": { + "extension": { + "description": "Required. The extension name.", + "type": "string" + }, + "version": { + "description": "Required. The extension version.", + "type": "string" + } + }, + "type": "object" + }, + "PostgreSqlProperty": { + "description": "PostgreSql property.", + "id": "PostgreSqlProperty", + "properties": { + "enabled": { + "description": "Required. The property is enabled.", + "type": "boolean" + }, + "numericValue": { + "description": "Required. The property numeric value.", + "format": "int64", + "type": "string" + }, + "property": { + "description": "Required. The property name.", + "type": "string" + } + }, + "type": "object" + }, + "PostgreSqlSchemaDetails": { + "description": "Specific details for a PostgreSql schema.", + "id": "PostgreSqlSchemaDetails", + "properties": { + "foreignTablesCount": { + "description": "Optional. PostgreSql foreign tables.", + "format": "int32", + "type": "integer" + }, + "postgresqlExtensions": { + "description": "Optional. PostgreSql extensions.", + "items": { + "$ref": "PostgreSqlExtension" + }, + "type": "array" + } + }, + "type": "object" + }, + "PostgreSqlSetting": { + "description": "PostgreSql setting.", + "id": "PostgreSqlSetting", + "properties": { + "boolValue": { + "description": "Required. The setting boolean value.", + "type": "boolean" + }, + "intValue": { + "description": "Required. The setting int value.", + "format": "int64", + "type": "string" + }, + "realValue": { + "description": "Required. The setting real value.", + "format": "float", + "type": "number" + }, + "setting": { + "description": "Required. The setting name.", + "type": "string" + }, + "source": { + "description": "Required. The setting source.", + "type": "string" + }, + "stringValue": { + "description": "Required. The setting string value. Notice that enum values are stored as strings.", + "type": "string" + }, + "unit": { + "description": "Optional. The setting unit.", + "type": "string" + } + }, + "type": "object" + }, "PreferenceSet": { "description": "The preferences that apply to all assets in a given context.", "id": "PreferenceSet", @@ -5352,7 +5968,7 @@ "PERSISTENT_DISK_TYPE_SSD" ], "enumDescriptions": [ - "Unspecified (default value). Selecting this value allows the system to use any disk type according to reported usage. This a good value to start with.", + "Unspecified. Fallback to default value based on context.", "Standard HDD Persistent Disk.", "Balanced Persistent Disk.", "SSD Persistent Disk." @@ -5975,6 +6591,107 @@ }, "type": "object" }, + "SqlServerDatabaseDeployment": { + "description": "Specific details for a Microsoft SQL Server database deployment.", + "id": "SqlServerDatabaseDeployment", + "properties": { + "features": { + "description": "Optional. List of SQL Server features.", + "items": { + "$ref": "SqlServerFeature" + }, + "type": "array" + }, + "serverFlags": { + "description": "Optional. List of SQL Server server flags.", + "items": { + "$ref": "SqlServerServerFlag" + }, + "type": "array" + }, + "traceFlags": { + "description": "Optional. List of SQL Server trace flags.", + "items": { + "$ref": "SqlServerTraceFlag" + }, + "type": "array" + } + }, + "type": "object" + }, + "SqlServerFeature": { + "description": "SQL Server feature details.", + "id": "SqlServerFeature", + "properties": { + "enabled": { + "description": "Required. Field enabled is set when a feature is used on the source deployment.", + "type": "boolean" + }, + "featureName": { + "description": "Required. The feature name.", + "type": "string" + } + }, + "type": "object" + }, + "SqlServerSchemaDetails": { + "description": "Specific details for a SqlServer database.", + "id": "SqlServerSchemaDetails", + "properties": { + "clrObjectCount": { + "description": "Optional. SqlServer number of CLR objects.", + "format": "int32", + "type": "integer" + } + }, + "type": "object" + }, + "SqlServerServerFlag": { + "description": "SQL Server server flag details.", + "id": "SqlServerServerFlag", + "properties": { + "serverFlagName": { + "description": "Required. The server flag name.", + "type": "string" + }, + "value": { + "description": "Required. The server flag value set by the user.", + "type": "string" + }, + "valueInUse": { + "description": "Required. The server flag actual value. If `value_in_use` is different from `value` it means that either the configuration change was not applied or it is an expected behavior. See SQL Server documentation for more details.", + "type": "string" + } + }, + "type": "object" + }, + "SqlServerTraceFlag": { + "description": "SQL Server trace flag details.", + "id": "SqlServerTraceFlag", + "properties": { + "scope": { + "description": "Required. The trace flag scope.", + "enum": [ + "SCOPE_UNSPECIFIED", + "OFF", + "GLOBAL", + "SESSION" + ], + "enumDescriptions": [ + "Unspecified.", + "Off.", + "Global.", + "Session." + ], + "type": "string" + }, + "traceFlagName": { + "description": "Required. The trace flag name.", + "type": "string" + } + }, + "type": "object" + }, "Status": { "description": "The `Status` type defines a logical error model that is suitable for different programming environments, including REST APIs and RPC APIs. It is used by [gRPC](https://github.com/grpc). Each `Status` message contains three pieces of data: error code, error message, and error details. You can find out more about this error model and how to work with it in the [API Design Guide](https://cloud.google.com/apis/design/errors).", "id": "Status", @@ -6139,7 +6856,7 @@ "enumDescriptions": [ "Unspecified (default value).", "Prefer to migrate to Google Cloud Compute Engine.", - "Prefer to migrate to Google Cloud VMware Engine.", + "Prefer to migrate to Google Cloud VMware Engine.6278", "Prefer to migrate to Google Cloud Sole Tenant Nodes." ], "type": "string" diff --git a/discovery/googleapis/ml__v1.json b/discovery/googleapis/ml__v1.json index c51a83598..966618600 100644 --- a/discovery/googleapis/ml__v1.json +++ b/discovery/googleapis/ml__v1.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240122", + "revision": "20241116", "rootUrl": "https://ml.googleapis.com/", "servicePath": "", "title": "AI Platform Training & Prediction API", @@ -516,7 +516,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "ml.projects.locations.operations.cancel", @@ -1395,7 +1395,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "ml.projects.operations.cancel", diff --git a/discovery/googleapis/monitoring__v3.json b/discovery/googleapis/monitoring__v3.json index 5ba703804..3507b141c 100644 --- a/discovery/googleapis/monitoring__v3.json +++ b/discovery/googleapis/monitoring__v3.json @@ -34,7 +34,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241017", + "revision": "20241114", "rootUrl": "https://monitoring.googleapis.com/", "servicePath": "", "title": "Cloud Monitoring API", @@ -1209,7 +1209,7 @@ ], "parameters": { "filter": { - "description": "If this field is empty, all custom and system-defined metric descriptors are returned. Otherwise, the filter (https://cloud.google.com/monitoring/api/v3/filters) specifies which metric descriptors are to be returned. For example, the following filter matches all custom metrics (https://cloud.google.com/monitoring/custom-metrics): metric.type = starts_with(\"custom.googleapis.com/\") ", + "description": "Optional. If this field is empty, all custom and system-defined metric descriptors are returned. Otherwise, the filter (https://cloud.google.com/monitoring/api/v3/filters) specifies which metric descriptors are to be returned. For example, the following filter matches all custom metrics (https://cloud.google.com/monitoring/custom-metrics): metric.type = starts_with(\"custom.googleapis.com/\") ", "location": "query", "type": "string" }, @@ -1221,13 +1221,13 @@ "type": "string" }, "pageSize": { - "description": "A positive number that is the maximum number of results to return. The default and maximum value is 10,000. If a page_size <= 0 or > 10,000 is submitted, will instead return a maximum of 10,000 results.", + "description": "Optional. A positive number that is the maximum number of results to return. The default and maximum value is 10,000. If a page_size <= 0 or > 10,000 is submitted, will instead return a maximum of 10,000 results.", "format": "int32", "location": "query", "type": "integer" }, "pageToken": { - "description": "If this field is not empty then it must contain the nextPageToken value returned by a previous call to this method. Using this field causes the method to return additional results from the previous method call.", + "description": "Optional. If this field is not empty then it must contain the nextPageToken value returned by a previous call to this method. Using this field causes the method to return additional results from the previous method call.", "location": "query", "type": "string" } @@ -5265,6 +5265,10 @@ "description": "Optional. The alerting rule name of this alert in the corresponding Prometheus configuration file.Some external tools may require this field to be populated correctly in order to refer to the original Prometheus configuration file. The rule group name and the alert name are necessary to update the relevant AlertPolicies in case the definition of the rule group changes in the future.This field is optional. If this field is not empty, then it must be a valid Prometheus label name (https://prometheus.io/docs/concepts/data_model/#metric-names-and-labels). This field may not exceed 2048 Unicode characters in length.", "type": "string" }, + "disableMetricValidation": { + "description": "Optional. Whether to disable metric existence validation for this condition.This allows alerting policies to be defined on metrics that do not yet exist, improving advanced customer workflows such as configuring alerting policies using Terraform.Users with the monitoring.alertPolicyViewer role are able to see the name of the non-existent metric in the alerting policy condition.", + "type": "boolean" + }, "duration": { "description": "Optional. Alerts are considered firing once their PromQL expression was evaluated to be \"true\" for this long. Alerts whose PromQL expression was not evaluated to be \"true\" for long enough are considered pending. Must be a non-negative duration or missing. This field is optional. Its default value is zero.", "format": "google-duration", diff --git a/discovery/googleapis/mybusinessaccountmanagement__v1.json b/discovery/googleapis/mybusinessaccountmanagement__v1.json index 9cb26d81d..fa9d54a86 100644 --- a/discovery/googleapis/mybusinessaccountmanagement__v1.json +++ b/discovery/googleapis/mybusinessaccountmanagement__v1.json @@ -16,7 +16,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20230115", + "revision": "20241120", "rootUrl": "https://mybusinessaccountmanagement.googleapis.com/", "servicePath": "", "title": "My Business Account Management API", @@ -846,18 +846,18 @@ "type": "object" }, "PostalAddress": { - "description": "Represents a postal address, e.g. for postal delivery or payments addresses. Given a postal address, a postal service can deliver items to a premise, P.O. Box or similar. It is not intended to model geographical locations (roads, towns, mountains). In typical usage an address would be created via user input or from importing existing data, depending on the type of process. Advice on address input / editing: - Use an internationalization-ready address widget such as https://github.com/google/libaddressinput) - Users should not be presented with UI elements for input or editing of fields outside countries where that field is used. For more guidance on how to use this schema, please see: https://support.google.com/business/answer/6397478", + "description": "Represents a postal address. For example for postal delivery or payments addresses. Given a postal address, a postal service can deliver items to a premise, P.O. Box or similar. It is not intended to model geographical locations (roads, towns, mountains). In typical usage an address would be created by user input or from importing existing data, depending on the type of process. Advice on address input / editing: - Use an internationalization-ready address widget such as https://github.com/google/libaddressinput) - Users should not be presented with UI elements for input or editing of fields outside countries where that field is used. For more guidance on how to use this schema, see: https://support.google.com/business/answer/6397478", "id": "PostalAddress", "properties": { "addressLines": { - "description": "Unstructured address lines describing the lower levels of an address. Because values in address_lines do not have type information and may sometimes contain multiple values in a single field (e.g. \"Austin, TX\"), it is important that the line order is clear. The order of address lines should be \"envelope order\" for the country/region of the address. In places where this can vary (e.g. Japan), address_language is used to make it explicit (e.g. \"ja\" for large-to-small ordering and \"ja-Latn\" or \"en\" for small-to-large). This way, the most specific line of an address can be selected based on the language. The minimum permitted structural representation of an address consists of a region_code with all remaining information placed in the address_lines. It would be possible to format such an address very approximately without geocoding, but no semantic reasoning could be made about any of the address components until it was at least partially resolved. Creating an address only containing a region_code and address_lines, and then geocoding is the recommended way to handle completely unstructured addresses (as opposed to guessing which parts of the address should be localities or administrative areas).", + "description": "Unstructured address lines describing the lower levels of an address. Because values in address_lines do not have type information and may sometimes contain multiple values in a single field (For example \"Austin, TX\"), it is important that the line order is clear. The order of address lines should be \"envelope order\" for the country/region of the address. In places where this can vary (For example Japan), address_language is used to make it explicit (For example \"ja\" for large-to-small ordering and \"ja-Latn\" or \"en\" for small-to-large). This way, the most specific line of an address can be selected based on the language. The minimum permitted structural representation of an address consists of a region_code with all remaining information placed in the address_lines. It would be possible to format such an address very approximately without geocoding, but no semantic reasoning could be made about any of the address components until it was at least partially resolved. Creating an address only containing a region_code and address_lines, and then geocoding is the recommended way to handle completely unstructured addresses (as opposed to guessing which parts of the address should be localities or administrative areas).", "items": { "type": "string" }, "type": "array" }, "administrativeArea": { - "description": "Optional. Highest administrative subdivision which is used for postal addresses of a country or region. For example, this can be a state, a province, an oblast, or a prefecture. Specifically, for Spain this is the province and not the autonomous community (e.g. \"Barcelona\" and not \"Catalonia\"). Many countries don't use an administrative area in postal addresses. E.g. in Switzerland this should be left unpopulated.", + "description": "Optional. Highest administrative subdivision which is used for postal addresses of a country or region. For example, this can be a state, a province, an oblast, or a prefecture. Specifically, for Spain this is the province and not the autonomous community (For example \"Barcelona\" and not \"Catalonia\"). Many countries don't use an administrative area in postal addresses. For example in Switzerland this should be left unpopulated.", "type": "string" }, "languageCode": { @@ -873,7 +873,7 @@ "type": "string" }, "postalCode": { - "description": "Optional. Postal code of the address. Not all countries use or require postal codes to be present, but where they are used, they may trigger additional validation with other parts of the address (e.g. state/zip validation in the U.S.A.).", + "description": "Optional. Postal code of the address. Not all countries use or require postal codes to be present, but where they are used, they may trigger additional validation with other parts of the address (For example state/zip validation in the U.S.A.).", "type": "string" }, "recipients": { @@ -893,7 +893,7 @@ "type": "integer" }, "sortingCode": { - "description": "Optional. Additional, country-specific, sorting code. This is not used in most regions. Where it is used, the value is either a string like \"CEDEX\", optionally followed by a number (e.g. \"CEDEX 7\"), or just a number alone, representing the \"sector code\" (Jamaica), \"delivery area indicator\" (Malawi) or \"post office indicator\" (e.g. Côte d'Ivoire).", + "description": "Optional. Additional, country-specific, sorting code. This is not used in most regions. Where it is used, the value is either a string like \"CEDEX\", optionally followed by a number (For example \"CEDEX 7\"), or just a number alone, representing the \"sector code\" (Jamaica), \"delivery area indicator\" (Malawi) or \"post office indicator\" (For example Côte d'Ivoire).", "type": "string" }, "sublocality": { diff --git a/discovery/googleapis/mybusinessbusinessinformation__v1.json b/discovery/googleapis/mybusinessbusinessinformation__v1.json index ede46e1fe..caebce172 100644 --- a/discovery/googleapis/mybusinessbusinessinformation__v1.json +++ b/discovery/googleapis/mybusinessbusinessinformation__v1.json @@ -16,7 +16,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241002", + "revision": "20241120", "rootUrl": "https://mybusinessbusinessinformation.googleapis.com/", "servicePath": "", "title": "My Business Business Information API", @@ -1388,18 +1388,18 @@ "type": "object" }, "PostalAddress": { - "description": "Represents a postal address, e.g. for postal delivery or payments addresses. Given a postal address, a postal service can deliver items to a premise, P.O. Box or similar. It is not intended to model geographical locations (roads, towns, mountains). In typical usage an address would be created via user input or from importing existing data, depending on the type of process. Advice on address input / editing: - Use an internationalization-ready address widget such as https://github.com/google/libaddressinput) - Users should not be presented with UI elements for input or editing of fields outside countries where that field is used. For more guidance on how to use this schema, please see: https://support.google.com/business/answer/6397478", + "description": "Represents a postal address. For example for postal delivery or payments addresses. Given a postal address, a postal service can deliver items to a premise, P.O. Box or similar. It is not intended to model geographical locations (roads, towns, mountains). In typical usage an address would be created by user input or from importing existing data, depending on the type of process. Advice on address input / editing: - Use an internationalization-ready address widget such as https://github.com/google/libaddressinput) - Users should not be presented with UI elements for input or editing of fields outside countries where that field is used. For more guidance on how to use this schema, see: https://support.google.com/business/answer/6397478", "id": "PostalAddress", "properties": { "addressLines": { - "description": "Unstructured address lines describing the lower levels of an address. Because values in address_lines do not have type information and may sometimes contain multiple values in a single field (e.g. \"Austin, TX\"), it is important that the line order is clear. The order of address lines should be \"envelope order\" for the country/region of the address. In places where this can vary (e.g. Japan), address_language is used to make it explicit (e.g. \"ja\" for large-to-small ordering and \"ja-Latn\" or \"en\" for small-to-large). This way, the most specific line of an address can be selected based on the language. The minimum permitted structural representation of an address consists of a region_code with all remaining information placed in the address_lines. It would be possible to format such an address very approximately without geocoding, but no semantic reasoning could be made about any of the address components until it was at least partially resolved. Creating an address only containing a region_code and address_lines, and then geocoding is the recommended way to handle completely unstructured addresses (as opposed to guessing which parts of the address should be localities or administrative areas).", + "description": "Unstructured address lines describing the lower levels of an address. Because values in address_lines do not have type information and may sometimes contain multiple values in a single field (For example \"Austin, TX\"), it is important that the line order is clear. The order of address lines should be \"envelope order\" for the country/region of the address. In places where this can vary (For example Japan), address_language is used to make it explicit (For example \"ja\" for large-to-small ordering and \"ja-Latn\" or \"en\" for small-to-large). This way, the most specific line of an address can be selected based on the language. The minimum permitted structural representation of an address consists of a region_code with all remaining information placed in the address_lines. It would be possible to format such an address very approximately without geocoding, but no semantic reasoning could be made about any of the address components until it was at least partially resolved. Creating an address only containing a region_code and address_lines, and then geocoding is the recommended way to handle completely unstructured addresses (as opposed to guessing which parts of the address should be localities or administrative areas).", "items": { "type": "string" }, "type": "array" }, "administrativeArea": { - "description": "Optional. Highest administrative subdivision which is used for postal addresses of a country or region. For example, this can be a state, a province, an oblast, or a prefecture. Specifically, for Spain this is the province and not the autonomous community (e.g. \"Barcelona\" and not \"Catalonia\"). Many countries don't use an administrative area in postal addresses. E.g. in Switzerland this should be left unpopulated.", + "description": "Optional. Highest administrative subdivision which is used for postal addresses of a country or region. For example, this can be a state, a province, an oblast, or a prefecture. Specifically, for Spain this is the province and not the autonomous community (For example \"Barcelona\" and not \"Catalonia\"). Many countries don't use an administrative area in postal addresses. For example in Switzerland this should be left unpopulated.", "type": "string" }, "languageCode": { @@ -1415,7 +1415,7 @@ "type": "string" }, "postalCode": { - "description": "Optional. Postal code of the address. Not all countries use or require postal codes to be present, but where they are used, they may trigger additional validation with other parts of the address (e.g. state/zip validation in the U.S.A.).", + "description": "Optional. Postal code of the address. Not all countries use or require postal codes to be present, but where they are used, they may trigger additional validation with other parts of the address (For example state/zip validation in the U.S.A.).", "type": "string" }, "recipients": { @@ -1435,7 +1435,7 @@ "type": "integer" }, "sortingCode": { - "description": "Optional. Additional, country-specific, sorting code. This is not used in most regions. Where it is used, the value is either a string like \"CEDEX\", optionally followed by a number (e.g. \"CEDEX 7\"), or just a number alone, representing the \"sector code\" (Jamaica), \"delivery area indicator\" (Malawi) or \"post office indicator\" (e.g. Côte d'Ivoire).", + "description": "Optional. Additional, country-specific, sorting code. This is not used in most regions. Where it is used, the value is either a string like \"CEDEX\", optionally followed by a number (For example \"CEDEX 7\"), or just a number alone, representing the \"sector code\" (Jamaica), \"delivery area indicator\" (Malawi) or \"post office indicator\" (For example Côte d'Ivoire).", "type": "string" }, "sublocality": { diff --git a/discovery/googleapis/mybusinessverifications__v1.json b/discovery/googleapis/mybusinessverifications__v1.json index f5cc70659..d2c846c8d 100644 --- a/discovery/googleapis/mybusinessverifications__v1.json +++ b/discovery/googleapis/mybusinessverifications__v1.json @@ -16,7 +16,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20230914", + "revision": "20241120", "rootUrl": "https://mybusinessverifications.googleapis.com/", "servicePath": "", "title": "My Business Verifications API", @@ -372,18 +372,18 @@ "type": "object" }, "PostalAddress": { - "description": "Represents a postal address, e.g. for postal delivery or payments addresses. Given a postal address, a postal service can deliver items to a premise, P.O. Box or similar. It is not intended to model geographical locations (roads, towns, mountains). In typical usage an address would be created via user input or from importing existing data, depending on the type of process. Advice on address input / editing: - Use an internationalization-ready address widget such as https://github.com/google/libaddressinput) - Users should not be presented with UI elements for input or editing of fields outside countries where that field is used. For more guidance on how to use this schema, please see: https://support.google.com/business/answer/6397478", + "description": "Represents a postal address. For example for postal delivery or payments addresses. Given a postal address, a postal service can deliver items to a premise, P.O. Box or similar. It is not intended to model geographical locations (roads, towns, mountains). In typical usage an address would be created by user input or from importing existing data, depending on the type of process. Advice on address input / editing: - Use an internationalization-ready address widget such as https://github.com/google/libaddressinput) - Users should not be presented with UI elements for input or editing of fields outside countries where that field is used. For more guidance on how to use this schema, see: https://support.google.com/business/answer/6397478", "id": "PostalAddress", "properties": { "addressLines": { - "description": "Unstructured address lines describing the lower levels of an address. Because values in address_lines do not have type information and may sometimes contain multiple values in a single field (e.g. \"Austin, TX\"), it is important that the line order is clear. The order of address lines should be \"envelope order\" for the country/region of the address. In places where this can vary (e.g. Japan), address_language is used to make it explicit (e.g. \"ja\" for large-to-small ordering and \"ja-Latn\" or \"en\" for small-to-large). This way, the most specific line of an address can be selected based on the language. The minimum permitted structural representation of an address consists of a region_code with all remaining information placed in the address_lines. It would be possible to format such an address very approximately without geocoding, but no semantic reasoning could be made about any of the address components until it was at least partially resolved. Creating an address only containing a region_code and address_lines, and then geocoding is the recommended way to handle completely unstructured addresses (as opposed to guessing which parts of the address should be localities or administrative areas).", + "description": "Unstructured address lines describing the lower levels of an address. Because values in address_lines do not have type information and may sometimes contain multiple values in a single field (For example \"Austin, TX\"), it is important that the line order is clear. The order of address lines should be \"envelope order\" for the country/region of the address. In places where this can vary (For example Japan), address_language is used to make it explicit (For example \"ja\" for large-to-small ordering and \"ja-Latn\" or \"en\" for small-to-large). This way, the most specific line of an address can be selected based on the language. The minimum permitted structural representation of an address consists of a region_code with all remaining information placed in the address_lines. It would be possible to format such an address very approximately without geocoding, but no semantic reasoning could be made about any of the address components until it was at least partially resolved. Creating an address only containing a region_code and address_lines, and then geocoding is the recommended way to handle completely unstructured addresses (as opposed to guessing which parts of the address should be localities or administrative areas).", "items": { "type": "string" }, "type": "array" }, "administrativeArea": { - "description": "Optional. Highest administrative subdivision which is used for postal addresses of a country or region. For example, this can be a state, a province, an oblast, or a prefecture. Specifically, for Spain this is the province and not the autonomous community (e.g. \"Barcelona\" and not \"Catalonia\"). Many countries don't use an administrative area in postal addresses. E.g. in Switzerland this should be left unpopulated.", + "description": "Optional. Highest administrative subdivision which is used for postal addresses of a country or region. For example, this can be a state, a province, an oblast, or a prefecture. Specifically, for Spain this is the province and not the autonomous community (For example \"Barcelona\" and not \"Catalonia\"). Many countries don't use an administrative area in postal addresses. For example in Switzerland this should be left unpopulated.", "type": "string" }, "languageCode": { @@ -399,7 +399,7 @@ "type": "string" }, "postalCode": { - "description": "Optional. Postal code of the address. Not all countries use or require postal codes to be present, but where they are used, they may trigger additional validation with other parts of the address (e.g. state/zip validation in the U.S.A.).", + "description": "Optional. Postal code of the address. Not all countries use or require postal codes to be present, but where they are used, they may trigger additional validation with other parts of the address (For example state/zip validation in the U.S.A.).", "type": "string" }, "recipients": { @@ -419,7 +419,7 @@ "type": "integer" }, "sortingCode": { - "description": "Optional. Additional, country-specific, sorting code. This is not used in most regions. Where it is used, the value is either a string like \"CEDEX\", optionally followed by a number (e.g. \"CEDEX 7\"), or just a number alone, representing the \"sector code\" (Jamaica), \"delivery area indicator\" (Malawi) or \"post office indicator\" (e.g. Côte d'Ivoire).", + "description": "Optional. Additional, country-specific, sorting code. This is not used in most regions. Where it is used, the value is either a string like \"CEDEX\", optionally followed by a number (For example \"CEDEX 7\"), or just a number alone, representing the \"sector code\" (Jamaica), \"delivery area indicator\" (Malawi) or \"post office indicator\" (For example Côte d'Ivoire).", "type": "string" }, "sublocality": { diff --git a/discovery/googleapis/netapp__v1.json b/discovery/googleapis/netapp__v1.json index 7bbc7063d..c597b86ee 100644 --- a/discovery/googleapis/netapp__v1.json +++ b/discovery/googleapis/netapp__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241002", + "revision": "20241203", "rootUrl": "https://netapp.googleapis.com/", "servicePath": "", "title": "NetApp API", @@ -1077,7 +1077,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "netapp.projects.locations.operations.cancel", @@ -1389,6 +1389,34 @@ "scopes": [ "https://www.googleapis.com/auth/cloud-platform" ] + }, + "validateDirectoryService": { + "description": "ValidateDirectoryService does a connectivity check for a directory service policy attached to the storage pool.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/storagePools/{storagePoolsId}:validateDirectoryService", + "httpMethod": "POST", + "id": "netapp.projects.locations.storagePools.validateDirectoryService", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. Name of the storage pool", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/storagePools/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:validateDirectoryService", + "request": { + "$ref": "ValidateDirectoryServiceRequest" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] } } }, @@ -1652,6 +1680,34 @@ "https://www.googleapis.com/auth/cloud-platform" ] }, + "establishPeering": { + "description": "Establish replication peering.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/volumes/{volumesId}/replications/{replicationsId}:establishPeering", + "httpMethod": "POST", + "id": "netapp.projects.locations.volumes.replications.establishPeering", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The resource name of the replication, in the format of projects/{project_id}/locations/{location}/volumes/{volume_id}/replications/{replication_id}.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/volumes/[^/]+/replications/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:establishPeering", + "request": { + "$ref": "EstablishPeeringRequest" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, "get": { "description": "Describe a replication for a volume.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/volumes/{volumesId}/replications/{replicationsId}", @@ -1840,6 +1896,34 @@ "scopes": [ "https://www.googleapis.com/auth/cloud-platform" ] + }, + "sync": { + "description": "Syncs the replication. This will invoke one time volume data transfer from source to destination.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/volumes/{volumesId}/replications/{replicationsId}:sync", + "httpMethod": "POST", + "id": "netapp.projects.locations.volumes.replications.sync", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The resource name of the replication, in the format of projects/{project_id}/locations/{location}/volumes/{volume_id}/replications/{replication_id}.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/volumes/[^/]+/replications/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:sync", + "request": { + "$ref": "SyncReplicationRequest" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] } } }, @@ -2149,42 +2233,6 @@ }, "type": "object" }, - "AssetLocation": { - "description": "Provides the mapping of a cloud asset to a direct physical location or to a proxy that defines the location on its behalf.", - "id": "AssetLocation", - "properties": { - "ccfeRmsPath": { - "description": "Spanner path of the CCFE RMS database. It is only applicable for CCFE tenants that use CCFE RMS for storing resource metadata.", - "type": "string" - }, - "expected": { - "$ref": "IsolationExpectations", - "description": "Defines the customer expectation around ZI/ZS for this asset and ZI/ZS state of the region at the time of asset creation." - }, - "extraParameters": { - "description": "Defines extra parameters required for specific asset types.", - "items": { - "$ref": "ExtraParameter" - }, - "type": "array" - }, - "locationData": { - "description": "Contains all kinds of physical location definitions for this asset.", - "items": { - "$ref": "LocationData" - }, - "type": "array" - }, - "parentAsset": { - "description": "Defines parents assets if any in order to allow later generation of child_asset_location data via child assets.", - "items": { - "$ref": "CloudAsset" - }, - "type": "array" - } - }, - "type": "object" - }, "Backup": { "description": "A NetApp Backup.", "id": "Backup", @@ -2422,49 +2470,12 @@ }, "type": "object" }, - "BlobstoreLocation": { - "description": "Policy ID that identified data placement in Blobstore as per go/blobstore-user-guide#data-metadata-placement-and-failure-domains", - "id": "BlobstoreLocation", - "properties": { - "policyId": { - "items": { - "type": "string" - }, - "type": "array" - } - }, - "type": "object" - }, "CancelOperationRequest": { "description": "The request message for Operations.CancelOperation.", "id": "CancelOperationRequest", "properties": {}, "type": "object" }, - "CloudAsset": { - "id": "CloudAsset", - "properties": { - "assetName": { - "type": "string" - }, - "assetType": { - "type": "string" - } - }, - "type": "object" - }, - "CloudAssetComposition": { - "id": "CloudAssetComposition", - "properties": { - "childAsset": { - "items": { - "$ref": "CloudAsset" - }, - "type": "array" - } - }, - "type": "object" - }, "DailySchedule": { "description": "Make a snapshot every day e.g. at 04:00, 05:20, 23:50", "id": "DailySchedule", @@ -2503,6 +2514,10 @@ "description": "Required. Existing destination StoragePool name.", "type": "string" }, + "tieringPolicy": { + "$ref": "TieringPolicy", + "description": "Optional. Tiering policy for the volume." + }, "volumeId": { "description": "Desired destination volume resource id. If not specified, source volume's resource id will be used. This value must start with a lowercase letter followed by up to 62 lowercase letters, numbers, or hyphens, and cannot end with a hyphen.", "type": "string" @@ -2510,24 +2525,38 @@ }, "type": "object" }, - "DirectLocationAssignment": { - "id": "DirectLocationAssignment", + "EncryptVolumesRequest": { + "description": "EncryptVolumesRequest specifies the KMS config to encrypt existing volumes.", + "id": "EncryptVolumesRequest", + "properties": {}, + "type": "object" + }, + "EstablishPeeringRequest": { + "description": "EstablishPeeringRequest establishes cluster and svm peerings between the source and the destination replications.", + "id": "EstablishPeeringRequest", "properties": { - "location": { + "peerClusterName": { + "description": "Required. Name of the user's local source cluster to be peered with the destination cluster.", + "type": "string" + }, + "peerIpAddresses": { + "description": "Optional. List of IPv4 ip addresses to be used for peering.", "items": { - "$ref": "LocationAssignment" + "type": "string" }, "type": "array" + }, + "peerSvmName": { + "description": "Required. Name of the user's local source vserver svm to be peered with the destination vserver svm.", + "type": "string" + }, + "peerVolumeName": { + "description": "Required. Name of the user's local source volume to be peered with the destination volume.", + "type": "string" } }, "type": "object" }, - "EncryptVolumesRequest": { - "description": "EncryptVolumesRequest specifies the KMS config to encrypt existing volumes.", - "id": "EncryptVolumesRequest", - "properties": {}, - "type": "object" - }, "ExportPolicy": { "description": "Defines the export policy for the volume.", "id": "ExportPolicy", @@ -2542,17 +2571,6 @@ }, "type": "object" }, - "ExtraParameter": { - "description": "Defines parameters that should only be used for specific asset types.", - "id": "ExtraParameter", - "properties": { - "regionalMigDistributionPolicy": { - "$ref": "RegionalMigDistributionPolicy", - "description": "Details about zones used by regional compute.googleapis.com/InstanceGroupManager to create instances." - } - }, - "type": "object" - }, "GoogleProtobufEmpty": { "description": "A generic empty message that you can re-use to avoid defining duplicated empty messages in your APIs. A typical example is to use it as the request or the response type of an API method. For instance: service Foo { rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); }", "id": "GoogleProtobufEmpty", @@ -2576,126 +2594,70 @@ }, "type": "object" }, - "IsolationExpectations": { - "id": "IsolationExpectations", + "HybridPeeringDetails": { + "description": "HybridPeeringDetails contains details about the hybrid peering.", + "id": "HybridPeeringDetails", "properties": { - "requirementOverride": { - "$ref": "RequirementOverride", - "description": "Explicit overrides for ZI and ZS requirements to be used for resources that should be excluded from ZI/ZS verification logic." + "command": { + "description": "Optional. Copy-paste-able commands to be used on user's ONTAP to accept peering requests.", + "type": "string" }, - "ziOrgPolicy": { - "enum": [ - "ZI_UNSPECIFIED", - "ZI_UNKNOWN", - "ZI_NOT_REQUIRED", - "ZI_PREFERRED", - "ZI_REQUIRED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "", - "" - ], + "commandExpiryTime": { + "description": "Optional. Expiration time for the peering command to be executed on user's ONTAP.", + "format": "google-datetime", "type": "string" }, - "ziRegionPolicy": { - "enum": [ - "ZI_REGION_POLICY_UNSPECIFIED", - "ZI_REGION_POLICY_UNKNOWN", - "ZI_REGION_POLICY_NOT_SET", - "ZI_REGION_POLICY_FAIL_OPEN", - "ZI_REGION_POLICY_FAIL_CLOSED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "", - "" - ], + "passphrase": { + "description": "Optional. Temporary passphrase generated to accept cluster peering command.", "type": "string" }, - "ziRegionState": { - "enum": [ - "ZI_REGION_UNSPECIFIED", - "ZI_REGION_UNKNOWN", - "ZI_REGION_NOT_ENABLED", - "ZI_REGION_ENABLED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "" - ], + "subnetIp": { + "description": "Optional. IP address of the subnet.", + "type": "string" + } + }, + "type": "object" + }, + "HybridReplicationParameters": { + "description": "The Hybrid Replication parameters for the volume.", + "id": "HybridReplicationParameters", + "properties": { + "clusterLocation": { + "description": "Optional. Name of source cluster location associated with the Hybrid replication. This is a free-form field for the display purpose only.", "type": "string" }, - "zoneIsolation": { - "deprecated": true, - "description": "Deprecated: use zi_org_policy, zi_region_policy and zi_region_state instead for setting ZI expectations as per go/zicy-publish-physical-location.", - "enum": [ - "ZI_UNSPECIFIED", - "ZI_UNKNOWN", - "ZI_NOT_REQUIRED", - "ZI_PREFERRED", - "ZI_REQUIRED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "", - "" - ], + "description": { + "description": "Optional. Description of the replication.", "type": "string" }, - "zoneSeparation": { - "deprecated": true, - "description": "Deprecated: use zs_org_policy, and zs_region_stateinstead for setting Zs expectations as per go/zicy-publish-physical-location.", - "enum": [ - "ZS_UNSPECIFIED", - "ZS_UNKNOWN", - "ZS_NOT_REQUIRED", - "ZS_REQUIRED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "" - ], + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. Labels to be added to the replication as the key value pairs.", + "type": "object" + }, + "peerClusterName": { + "description": "Required. Name of the user's local source cluster to be peered with the destination cluster.", "type": "string" }, - "zsOrgPolicy": { - "enum": [ - "ZS_UNSPECIFIED", - "ZS_UNKNOWN", - "ZS_NOT_REQUIRED", - "ZS_REQUIRED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "" - ], + "peerIpAddresses": { + "description": "Required. List of node ip addresses to be peered with.", + "items": { + "type": "string" + }, + "type": "array" + }, + "peerSvmName": { + "description": "Required. Name of the user's local source vserver svm to be peered with the destination vserver svm.", "type": "string" }, - "zsRegionState": { - "enum": [ - "ZS_REGION_UNSPECIFIED", - "ZS_REGION_UNKNOWN", - "ZS_REGION_NOT_ENABLED", - "ZS_REGION_ENABLED" - ], - "enumDescriptions": [ - "", - "To be used if tracking of the asset ZS-bit is not available", - "", - "" - ], + "peerVolumeName": { + "description": "Required. Name of the user's local source volume to be peered with the destination volume.", + "type": "string" + }, + "replication": { + "description": "Required. Desired name for the replication of this volume.", "type": "string" } }, @@ -3076,64 +3038,6 @@ }, "type": "object" }, - "LocationAssignment": { - "id": "LocationAssignment", - "properties": { - "location": { - "type": "string" - }, - "locationType": { - "enum": [ - "UNSPECIFIED", - "CLUSTER", - "POP", - "CLOUD_ZONE", - "CLOUD_REGION", - "MULTI_REGION_GEO", - "MULTI_REGION_JURISDICTION", - "GLOBAL", - "OTHER" - ], - "enumDescriptions": [ - "", - "1-10: Physical failure domains.", - "", - "11-20: Logical failure domains.", - "", - "", - "", - "", - "" - ], - "type": "string" - } - }, - "type": "object" - }, - "LocationData": { - "id": "LocationData", - "properties": { - "blobstoreLocation": { - "$ref": "BlobstoreLocation" - }, - "childAssetLocation": { - "$ref": "CloudAssetComposition" - }, - "directLocation": { - "$ref": "DirectLocationAssignment" - }, - "gcpProjectProxy": { - "$ref": "TenantProjectProxy" - }, - "placerLocation": { - "$ref": "PlacerLocation" - }, - "spannerLocation": { - "$ref": "SpannerLocation" - } - }, - "type": "object" - }, "LocationMetadata": { "description": "Metadata for a given google.cloud.location.Location.", "id": "LocationMetadata", @@ -3303,40 +3207,14 @@ }, "type": "object" }, - "PlacerLocation": { - "description": "Message describing that the location of the customer resource is tied to placer allocations", - "id": "PlacerLocation", - "properties": { - "placerConfig": { - "description": "Directory with a config related to it in placer (e.g. \"/placer/prod/home/my-root/my-dir\")", - "type": "string" - } - }, - "type": "object" - }, - "RegionalMigDistributionPolicy": { - "description": "To be used for specifying the intended distribution of regional compute.googleapis.com/InstanceGroupManager instances", - "id": "RegionalMigDistributionPolicy", - "properties": { - "targetShape": { - "description": "The shape in which the group converges around distribution of resources. Instance of proto2 enum", - "format": "int32", - "type": "integer" - }, - "zones": { - "description": "Cloud zones used by regional MIG to create instances.", - "items": { - "$ref": "ZoneConfiguration" - }, - "type": "array" - } - }, - "type": "object" - }, "Replication": { "description": "Replication is a nested resource under Volume, that describes a cross-region replication relationship between 2 volumes in different regions.", "id": "Replication", "properties": { + "clusterLocation": { + "description": "Optional. Location of the user cluster.", + "type": "string" + }, "createTime": { "description": "Output only. Replication create time.", "format": "google-datetime", @@ -3361,6 +3239,26 @@ "readOnly": true, "type": "boolean" }, + "hybridPeeringDetails": { + "$ref": "HybridPeeringDetails", + "description": "Output only. Hybrid peering details.", + "readOnly": true + }, + "hybridReplicationType": { + "description": "Output only. Type of the hybrid replication.", + "enum": [ + "HYBRID_REPLICATION_TYPE_UNSPECIFIED", + "MIGRATION", + "CONTINUOUS_REPLICATION" + ], + "enumDescriptions": [ + "Unspecified hybrid replication type.", + "Hybrid replication type for migration.", + "Hybrid replication type for continuous replication." + ], + "readOnly": true, + "type": "string" + }, "labels": { "additionalProperties": { "type": "string" @@ -3375,14 +3273,18 @@ "PREPARING", "MIRRORED", "STOPPED", - "TRANSFERRING" + "TRANSFERRING", + "BASELINE_TRANSFERRING", + "ABORTED" ], "enumDescriptions": [ "Unspecified MirrorState", "Destination volume is being prepared.", "Destination volume has been initialized and is ready to receive replication transfers.", "Destination volume is not receiving replication transfers.", - "Incremental replication is in progress." + "Incremental replication is in progress.", + "Baseline replication is in progress.", + "Replication is aborted." ], "readOnly": true, "type": "string" @@ -3435,7 +3337,9 @@ "READY", "UPDATING", "DELETING", - "ERROR" + "ERROR", + "PENDING_CLUSTER_PEERING", + "PENDING_SVM_PEERING" ], "enumDescriptions": [ "Unspecified replication State", @@ -3443,7 +3347,9 @@ "Replication is ready.", "Replication is updating.", "Replication is deleting.", - "Replication is in error state." + "Replication is in error state.", + "Replication is waiting for cluster peering to be established.", + "Replication is waiting for SVM peering to be established." ], "readOnly": true, "type": "string" @@ -3461,44 +3367,6 @@ }, "type": "object" }, - "RequirementOverride": { - "id": "RequirementOverride", - "properties": { - "ziOverride": { - "enum": [ - "ZI_UNSPECIFIED", - "ZI_UNKNOWN", - "ZI_NOT_REQUIRED", - "ZI_PREFERRED", - "ZI_REQUIRED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "", - "" - ], - "type": "string" - }, - "zsOverride": { - "enum": [ - "ZS_UNSPECIFIED", - "ZS_UNKNOWN", - "ZS_NOT_REQUIRED", - "ZS_REQUIRED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "" - ], - "type": "string" - } - }, - "type": "object" - }, "RestoreParameters": { "description": "The RestoreParameters if volume is created from a snapshot or backup.", "id": "RestoreParameters", @@ -3689,26 +3557,6 @@ }, "type": "object" }, - "SpannerLocation": { - "id": "SpannerLocation", - "properties": { - "backupName": { - "description": "Set of backups used by the resource with name in the same format as what is available at http://table/spanner_automon.backup_metadata", - "items": { - "type": "string" - }, - "type": "array" - }, - "dbName": { - "description": "Set of databases used by the resource in format /span//", - "items": { - "type": "string" - }, - "type": "array" - } - }, - "type": "object" - }, "Status": { "description": "The `Status` type defines a logical error model that is suitable for different programming environments, including REST APIs and RPC APIs. It is used by [gRPC](https://github.com/grpc). Each `Status` message contains three pieces of data: error code, error message, and error details. You can find out more about this error model and how to work with it in the [API Design Guide](https://cloud.google.com/apis/design/errors).", "id": "Status", @@ -3898,16 +3746,10 @@ "properties": {}, "type": "object" }, - "TenantProjectProxy": { - "id": "TenantProjectProxy", - "properties": { - "projectNumbers": { - "items": { - "type": "string" - }, - "type": "array" - } - }, + "SyncReplicationRequest": { + "description": "SyncReplicationRequest syncs the replication from source to destination.", + "id": "SyncReplicationRequest", + "properties": {}, "type": "object" }, "TieringPolicy": { @@ -3982,6 +3824,25 @@ }, "type": "object" }, + "ValidateDirectoryServiceRequest": { + "description": "ValidateDirectoryServiceRequest validates the directory service policy attached to the storage pool.", + "id": "ValidateDirectoryServiceRequest", + "properties": { + "directoryServiceType": { + "description": "Type of directory service policy attached to the storage pool.", + "enum": [ + "DIRECTORY_SERVICE_TYPE_UNSPECIFIED", + "ACTIVE_DIRECTORY" + ], + "enumDescriptions": [ + "Directory service type is not specified.", + "Active directory policy attached to the storage pool." + ], + "type": "string" + } + }, + "type": "object" + }, "VerifyKmsConfigRequest": { "description": "VerifyKmsConfigRequest specifies the KMS config to be validated.", "id": "VerifyKmsConfigRequest", @@ -4068,6 +3929,10 @@ "readOnly": true, "type": "boolean" }, + "hybridReplicationParameters": { + "$ref": "HybridReplicationParameters", + "description": "Optional. The Hybrid Replication parameters for the volume." + }, "kerberosEnabled": { "description": "Optional. Flag indicating if the volume is a kerberos volume or not, export policy rules control kerberos security modes (krb5, krb5i, krb5p).", "type": "boolean" @@ -4253,7 +4118,9 @@ "UPDATING", "RESTORING", "DISABLED", - "ERROR" + "ERROR", + "PREPARING", + "READ_ONLY" ], "enumDescriptions": [ "Unspecified Volume State", @@ -4263,7 +4130,9 @@ "Volume State is Updating", "Volume State is Restoring", "Volume State is Disabled", - "Volume State is Error" + "Volume State is Error", + "Volume State is Preparing. Note that this is different from CREATING where CREATING means the volume is being created, while PREPARING means the volume is created and now being prepared for the replication.", + "Volume State is Read Only" ], "readOnly": true, "type": "string" @@ -4324,15 +4193,6 @@ } }, "type": "object" - }, - "ZoneConfiguration": { - "id": "ZoneConfiguration", - "properties": { - "zone": { - "type": "string" - } - }, - "type": "object" } } } diff --git a/discovery/googleapis/networkconnectivity__v1.json b/discovery/googleapis/networkconnectivity__v1.json index 5a988739f..0b983b905 100644 --- a/discovery/googleapis/networkconnectivity__v1.json +++ b/discovery/googleapis/networkconnectivity__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241009", + "revision": "20241112", "rootUrl": "https://networkconnectivity.googleapis.com/", "servicePath": "", "title": "Network Connectivity API", @@ -489,6 +489,57 @@ "https://www.googleapis.com/auth/cloud-platform" ] }, + "queryStatus": { + "description": "Query PSC propagation status the status of a Network Connectivity Center hub.", + "flatPath": "v1/projects/{projectsId}/locations/global/hubs/{hubsId}:queryStatus", + "httpMethod": "GET", + "id": "networkconnectivity.projects.locations.global.hubs.queryStatus", + "parameterOrder": [ + "name" + ], + "parameters": { + "filter": { + "description": "Optional. An expression that filters the list of results. The filter can be used to filter the results by the following fields: * psc_propagation_status.source_spoke * psc_propagation_status.source_group * psc_propagation_status.source_forwarding_rule * psc_propagation_status.target_spoke * psc_propagation_status.target_group * psc_propagation_status.code * psc_propagation_status.message", + "location": "query", + "type": "string" + }, + "groupBy": { + "description": "Optional. A field that counts are grouped by. A comma-separated list of any of these fields: * psc_propagation_status.source_spoke * psc_propagation_status.source_group * psc_propagation_status.source_forwarding_rule * psc_propagation_status.target_spoke * psc_propagation_status.target_group * psc_propagation_status.code", + "location": "query", + "type": "string" + }, + "name": { + "description": "Required. The name of the hub.", + "location": "path", + "pattern": "^projects/[^/]+/locations/global/hubs/[^/]+$", + "required": true, + "type": "string" + }, + "orderBy": { + "description": "Optional. Sort the results in the ascending order by specific fields returned in the response. A comma-separated list of any of these fields: * psc_propagation_status.source_spoke * psc_propagation_status.source_group * psc_propagation_status.source_forwarding_rule * psc_propagation_status.target_spoke * psc_propagation_status.target_group * psc_propagation_status.code If `group_by` is set, the value of the `order_by` field must be the same as or a subset of the `group_by` field.", + "location": "query", + "type": "string" + }, + "pageSize": { + "description": "Optional. The maximum number of results to return per page.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. The page token.", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}:queryStatus", + "response": { + "$ref": "QueryHubStatusResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, "rejectSpoke": { "description": "Rejects a Network Connectivity Center spoke from being attached to a hub. If the spoke was previously in the `ACTIVE` state, it transitions to the `INACTIVE` state and is no longer able to connect to other spokes that are attached to the hub.", "flatPath": "v1/projects/{projectsId}/locations/global/hubs/{hubsId}:rejectSpoke", @@ -3477,6 +3528,26 @@ }, "type": "object" }, + "HubStatusEntry": { + "description": "The hub status entry.", + "id": "HubStatusEntry", + "properties": { + "count": { + "description": "The number of status. If group_by is not set in the request, the default is 1.", + "format": "int32", + "type": "integer" + }, + "groupBy": { + "description": "The same group_by field from the request.", + "type": "string" + }, + "pscPropagationStatus": { + "$ref": "PscPropagationStatus", + "description": "The PSC propagation status." + } + }, + "type": "object" + }, "InterconnectAttachment": { "description": "InterconnectAttachment that this route applies to.", "id": "InterconnectAttachment", @@ -3632,7 +3703,6 @@ "type": "object" }, "LinkedProducerVpcNetwork": { - "description": "Next ID: 7", "id": "LinkedProducerVpcNetwork", "properties": { "excludeExportRanges": { @@ -3642,6 +3712,13 @@ }, "type": "array" }, + "includeExportRanges": { + "description": "Optional. IP ranges allowed to be included from peering.", + "items": { + "type": "string" + }, + "type": "array" + }, "network": { "description": "Immutable. The URI of the Service Consumer VPC that the Producer VPC is peered with.", "type": "string" @@ -3875,7 +3952,7 @@ "type": "object" }, "ListPolicyBasedRoutesResponse": { - "description": "Response for PolicyBasedRouting.ListPolicyBasedRoutes method.", + "description": "Response for PolicyBasedRoutingService.ListPolicyBasedRoutes method.", "id": "ListPolicyBasedRoutesResponse", "properties": { "nextPageToken": { @@ -4261,7 +4338,7 @@ "type": "string" }, "requestedCancellation": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have been cancelled successfully have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have been cancelled successfully have google.longrunning.Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, @@ -4564,6 +4641,77 @@ }, "type": "object" }, + "PscPropagationStatus": { + "description": "The PSC propagation status in a hub.", + "id": "PscPropagationStatus", + "properties": { + "code": { + "description": "The propagation status.", + "enum": [ + "CODE_UNSPECIFIED", + "READY", + "PROPAGATING", + "ERROR_PRODUCER_PROPAGATED_CONNECTION_LIMIT_EXCEEDED", + "ERROR_PRODUCER_NAT_IP_SPACE_EXHAUSTED", + "ERROR_PRODUCER_QUOTA_EXCEEDED", + "ERROR_CONSUMER_QUOTA_EXCEEDED" + ], + "enumDescriptions": [ + "The code is unspecified.", + "The propagated PSC connection is ready.", + "PSC connection is propagating. This is a transient state.", + "The PSC connection propagation failed because the VPC network or the project of the target spoke has exceeded the connection limit set by the producer.", + "The PSC connection propagation failed because the NAT IP subnet space has been exhausted. It is equivalent to the `Needs attention` status of the PSC connection. See https://cloud.google.com/vpc/docs/about-accessing-vpc-hosted-services-endpoints#connection-statuses.", + "PSC connection propagation failed because the `PSC_ILB_CONSUMER_FORWARDING_RULES_PER_PRODUCER_NETWORK` quota in the producer VPC network has been exceeded.", + "The PSC connection propagation failed because the `PSC_PROPAGATED_CONNECTIONS_PER_VPC_NETWORK` quota in the consumer VPC network has been exceeded." + ], + "type": "string" + }, + "message": { + "description": "The human-readable summary of the PSC connection propagation status.", + "type": "string" + }, + "sourceForwardingRule": { + "description": "The name of the forwarding rule exported to the hub.", + "type": "string" + }, + "sourceGroup": { + "description": "The name of the group that the source spoke belongs to.", + "type": "string" + }, + "sourceSpoke": { + "description": "The name of the spoke that the source forwarding rule belongs to.", + "type": "string" + }, + "targetGroup": { + "description": "The name of the group that the target spoke belongs to.", + "type": "string" + }, + "targetSpoke": { + "description": "The name of the spoke that the source forwarding rule propagates to.", + "type": "string" + } + }, + "type": "object" + }, + "QueryHubStatusResponse": { + "description": "The response for HubService.QueryHubStatus.", + "id": "QueryHubStatusResponse", + "properties": { + "hubStatusEntries": { + "description": "The list of hub status.", + "items": { + "$ref": "HubStatusEntry" + }, + "type": "array" + }, + "nextPageToken": { + "description": "The token for the next page of the response. To see more results, use this value as the page_token for your next request. If this value is empty, there are no more results.", + "type": "string" + } + }, + "type": "object" + }, "RegionalEndpoint": { "description": "The RegionalEndpoint resource.", "id": "RegionalEndpoint", @@ -4583,7 +4731,7 @@ "type": "string" }, "address": { - "description": "Optional. The IP Address of the Regional Endpoint. When no address is provided, an IP from the subnetwork is allocated. Use one of the following formats: * IPv4 address as in `10.0.0.1` * Address resource URI as in `projects/{project}/regions/{region}/addresses/{address_name}`", + "description": "Optional. The IP Address of the Regional Endpoint. When no address is provided, an IP from the subnetwork is allocated. Use one of the following formats: * IPv4 address as in `10.0.0.1` * Address resource URI as in `projects/{project}/regions/{region}/addresses/{address_name}` for an IPv4 or IPv6 address.", "type": "string" }, "createTime": { @@ -4885,7 +5033,7 @@ "type": "object" }, "ServiceClass": { - "description": "The ServiceClass resource. Next id: 9", + "description": "The ServiceClass resource.", "id": "ServiceClass", "properties": { "createTime": { @@ -4928,7 +5076,7 @@ "type": "object" }, "ServiceConnectionMap": { - "description": "The ServiceConnectionMap resource. Next id: 15", + "description": "The ServiceConnectionMap resource.", "id": "ServiceConnectionMap", "properties": { "consumerPscConfigs": { @@ -5014,7 +5162,7 @@ "type": "object" }, "ServiceConnectionPolicy": { - "description": "The ServiceConnectionPolicy resource. Next id: 12", + "description": "The ServiceConnectionPolicy resource.", "id": "ServiceConnectionPolicy", "properties": { "createTime": { @@ -5085,7 +5233,7 @@ "type": "object" }, "ServiceConnectionToken": { - "description": "The ServiceConnectionToken resource. Next id: 10", + "description": "The ServiceConnectionToken resource.", "id": "ServiceConnectionToken", "properties": { "createTime": { @@ -5207,7 +5355,7 @@ "type": "string" }, "reasons": { - "description": "Output only. The reasons for current state of the spoke. Only present when the spoke is in the `INACTIVE` state.", + "description": "Output only. The reasons for current state of the spoke.", "items": { "$ref": "StateReason" }, @@ -5333,14 +5481,20 @@ "PENDING_REVIEW", "REJECTED", "PAUSED", - "FAILED" + "FAILED", + "UPDATE_PENDING_REVIEW", + "UPDATE_REJECTED", + "UPDATE_FAILED" ], "enumDescriptions": [ "No information available.", "The proposed spoke is pending review.", "The proposed spoke has been rejected by the hub administrator.", "The spoke has been deactivated internally.", - "Network Connectivity Center encountered errors while accepting the spoke." + "Network Connectivity Center encountered errors while accepting the spoke.", + "The proposed spoke update is pending review.", + "The proposed spoke update has been rejected by the hub administrator.", + "Network Connectivity Center encountered errors while accepting the spoke update." ], "readOnly": true, "type": "string" @@ -5424,14 +5578,20 @@ "PENDING_REVIEW", "REJECTED", "PAUSED", - "FAILED" + "FAILED", + "UPDATE_PENDING_REVIEW", + "UPDATE_REJECTED", + "UPDATE_FAILED" ], "enumDescriptions": [ "No information available.", "The proposed spoke is pending review.", "The proposed spoke has been rejected by the hub administrator.", "The spoke has been deactivated internally.", - "Network Connectivity Center encountered errors while accepting the spoke." + "Network Connectivity Center encountered errors while accepting the spoke.", + "The proposed spoke update is pending review.", + "The proposed spoke update has been rejected by the hub administrator.", + "Network Connectivity Center encountered errors while accepting the spoke update." ], "type": "string" }, diff --git a/discovery/googleapis/networkmanagement__v1.json b/discovery/googleapis/networkmanagement__v1.json index b4b97048d..e56644f9e 100644 --- a/discovery/googleapis/networkmanagement__v1.json +++ b/discovery/googleapis/networkmanagement__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241009", + "revision": "20241210", "rootUrl": "https://networkmanagement.googleapis.com/", "servicePath": "", "title": "Network Management API", @@ -468,7 +468,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/global/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "networkmanagement.projects.locations.global.operations.cancel", @@ -589,6 +589,173 @@ } } } + }, + "vpcFlowLogsConfigs": { + "methods": { + "create": { + "description": "Creates a new `VpcFlowLogsConfig`. If a configuration with the exact same settings already exists (even if the ID is different), the creation fails. Notes: 1. Creating a configuration with state=DISABLED will fail 2. The following fields are not considered as `settings` for the purpose of the check mentioned above, therefore - creating another configuration with the same fields but different values for the following fields will fail as well: * name * create_time * update_time * labels * description", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/vpcFlowLogsConfigs", + "httpMethod": "POST", + "id": "networkmanagement.projects.locations.vpcFlowLogsConfigs.create", + "parameterOrder": [ + "parent" + ], + "parameters": { + "parent": { + "description": "Required. The parent resource of the VPC Flow Logs configuration to create: `projects/{project_id}/locations/global`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + }, + "vpcFlowLogsConfigId": { + "description": "Required. ID of the `VpcFlowLogsConfig`.", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+parent}/vpcFlowLogsConfigs", + "request": { + "$ref": "VpcFlowLogsConfig" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "delete": { + "description": "Deletes a specific `VpcFlowLogsConfig`.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/vpcFlowLogsConfigs/{vpcFlowLogsConfigsId}", + "httpMethod": "DELETE", + "id": "networkmanagement.projects.locations.vpcFlowLogsConfigs.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. `VpcFlowLogsConfig` resource name using the form: `projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/vpcFlowLogsConfigs/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Gets the details of a specific `VpcFlowLogsConfig`.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/vpcFlowLogsConfigs/{vpcFlowLogsConfigsId}", + "httpMethod": "GET", + "id": "networkmanagement.projects.locations.vpcFlowLogsConfigs.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. `VpcFlowLogsConfig` resource name using the form: `projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/vpcFlowLogsConfigs/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "VpcFlowLogsConfig" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists all `VpcFlowLogsConfigs` in a given project.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/vpcFlowLogsConfigs", + "httpMethod": "GET", + "id": "networkmanagement.projects.locations.vpcFlowLogsConfigs.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "filter": { + "description": "Optional. Lists the `VpcFlowLogsConfigs` that match the filter expression. A filter expression must use the supported [CEL logic operators] (https://cloud.google.com/vpc/docs/about-flow-logs-records#supported_cel_logic_operators).", + "location": "query", + "type": "string" + }, + "orderBy": { + "description": "Optional. Field to use to sort the list.", + "location": "query", + "type": "string" + }, + "pageSize": { + "description": "Optional. Number of `VpcFlowLogsConfigs` to return.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. Page token from an earlier query, as returned in `next_page_token`.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The parent resource of the VpcFlowLogsConfig: `projects/{project_id}/locations/global`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/vpcFlowLogsConfigs", + "response": { + "$ref": "ListVpcFlowLogsConfigsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "patch": { + "description": "Updates an existing `VpcFlowLogsConfig`. If a configuration with the exact same settings already exists (even if the ID is different), the creation fails. Notes: 1. Updating a configuration with state=DISABLED will fail. 2. The following fields are not considered as `settings` for the purpose of the check mentioned above, therefore - updating another configuration with the same fields but different values for the following fields will fail as well: * name * create_time * update_time * labels * description", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/vpcFlowLogsConfigs/{vpcFlowLogsConfigsId}", + "httpMethod": "PATCH", + "id": "networkmanagement.projects.locations.vpcFlowLogsConfigs.patch", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Identifier. Unique name of the configuration using the form: `projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/vpcFlowLogsConfigs/[^/]+$", + "required": true, + "type": "string" + }, + "updateMask": { + "description": "Required. Mask of fields to update. At least one path must be supplied in this field.", + "format": "google-fieldmask", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}", + "request": { + "$ref": "VpcFlowLogsConfig" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + } } } } @@ -618,6 +785,7 @@ "PERMISSION_DENIED", "PERMISSION_DENIED_NO_CLOUD_NAT_CONFIGS", "PERMISSION_DENIED_NO_NEG_ENDPOINT_CONFIGS", + "PERMISSION_DENIED_NO_CLOUD_ROUTER_CONFIGS", "NO_SOURCE_LOCATION", "INVALID_ARGUMENT", "TRACE_TOO_LONG", @@ -674,6 +842,7 @@ false, false, false, + false, false ], "enumDescriptions": [ @@ -692,6 +861,7 @@ "Aborted because user lacks permission to access all or part of the network configurations required to run the test.", "Aborted because user lacks permission to access Cloud NAT configs required to run the test.", "Aborted because user lacks permission to access Network endpoint group endpoint configs required to run the test.", + "Aborted because user lacks permission to access Cloud Router configs required to run the test.", "Aborted because no valid source or destination endpoint is derived from the input test request.", "Aborted because the source or destination endpoint specified in the request is invalid. Some examples: - The request might contain malformed resource URI, project ID, or IP address. - The request might contain inconsistent information (for example, the request might include both the instance and the network, but the instance might not have a NIC in that network).", "Aborted because the number of steps in the trace exceeds a certain limit. It might be caused by a routing loop.", @@ -1002,6 +1172,15 @@ }, "type": "array" }, + "returnReachabilityDetails": { + "$ref": "ReachabilityDetails", + "description": "Output only. The reachability details of this test from the latest run for the return path. The details are updated when creating a new test, updating an existing test, or triggering a one-time rerun of an existing test.", + "readOnly": true + }, + "roundTrip": { + "description": "Whether run analysis for the return path from destination to source. Default value is false.", + "type": "boolean" + }, "source": { "$ref": "Endpoint", "description": "Required. Source specification of the Connectivity Test. You can use a combination of source IP address, virtual machine (VM) instance, or Compute Engine network to uniquely identify the source location. Examples: If the source IP address is an internal IP address within a Google Cloud Virtual Private Cloud (VPC) network, then you must also specify the VPC network. Otherwise, specify the VM instance, which already contains its internal IP address and VPC network information. If the source of the test is within an on-premises network, then you must provide the destination VPC network. If the source endpoint is a Compute Engine VM instance with multiple network interfaces, the instance itself is not sufficient to identify the endpoint. So, you must also specify the source IP address or VPC network. A reachability analysis proceeds even if the source location is ambiguous. However, the test result may include endpoints that you don't intend to test." @@ -1171,7 +1350,8 @@ "REDIS_CLUSTER_UNSUPPORTED_PROTOCOL", "NO_ADVERTISED_ROUTE_TO_GCP_DESTINATION", "NO_TRAFFIC_SELECTOR_TO_GCP_DESTINATION", - "NO_KNOWN_ROUTE_FROM_PEERED_NETWORK_TO_DESTINATION" + "NO_KNOWN_ROUTE_FROM_PEERED_NETWORK_TO_DESTINATION", + "PRIVATE_NAT_TO_PSC_ENDPOINT_UNSUPPORTED" ], "enumDescriptions": [ "Cause is unspecified.", @@ -1256,7 +1436,8 @@ "Packet is dropped due to an unsupported protocol being used to connect to a Redis Cluster. Only TCP connections are accepted by a Redis Cluster.", "Packet from the non-GCP (on-prem) or unknown GCP network is dropped due to the destination IP address not belonging to any IP prefix advertised via BGP by the Cloud Router.", "Packet from the non-GCP (on-prem) or unknown GCP network is dropped due to the destination IP address not belonging to any IP prefix included to the local traffic selector of the VPN tunnel.", - "Packet from the unknown peered network is dropped due to no known route from the source network to the destination IP address." + "Packet from the unknown peered network is dropped due to no known route from the source network to the destination IP address.", + "Sending packets processed by the Private NAT Gateways to the Private Service Connect endpoints is not supported." ], "type": "string" }, @@ -1339,8 +1520,12 @@ "readOnly": true, "type": "string" }, + "fqdn": { + "description": "DNS endpoint of [Google Kubernetes Engine cluster control plane](https://cloud.google.com/kubernetes-engine/docs/concepts/cluster-architecture). Requires gke_master_cluster to be set, can't be used simultaneoulsly with ip_address or network. Applicable only to destination endpoint.", + "type": "string" + }, "gkeMasterCluster": { - "description": "A cluster URI for [Google Kubernetes Engine master](https://cloud.google.com/kubernetes-engine/docs/concepts/cluster-architecture).", + "description": "A cluster URI for [Google Kubernetes Engine cluster control plane](https://cloud.google.com/kubernetes-engine/docs/concepts/cluster-architecture).", "type": "string" }, "instance": { @@ -1687,12 +1872,16 @@ "description": "URI of a GKE cluster.", "type": "string" }, + "dnsEndpoint": { + "description": "DNS endpoint of a GKE cluster control plane.", + "type": "string" + }, "externalIp": { - "description": "External IP address of a GKE cluster master.", + "description": "External IP address of a GKE cluster control plane.", "type": "string" }, "internalIp": { - "description": "Internal IP address of a GKE cluster master.", + "description": "Internal IP address of a GKE cluster control plane.", "type": "string" } }, @@ -1870,6 +2059,31 @@ }, "type": "object" }, + "ListVpcFlowLogsConfigsResponse": { + "description": "Response for the `ListVpcFlowLogsConfigs` method.", + "id": "ListVpcFlowLogsConfigsResponse", + "properties": { + "nextPageToken": { + "description": "Page token to fetch the next set of configurations.", + "type": "string" + }, + "unreachable": { + "description": "Locations that could not be reached (when querying all locations with `-`).", + "items": { + "type": "string" + }, + "type": "array" + }, + "vpcFlowLogsConfigs": { + "description": "List of VPC Flow Log configurations.", + "items": { + "$ref": "VpcFlowLogsConfig" + }, + "type": "array" + } + }, + "type": "object" + }, "LoadBalancerBackend": { "description": "For display only. Metadata associated with a specific load balancer backend.", "id": "LoadBalancerBackend", @@ -2514,11 +2728,12 @@ "id": "RouteInfo", "properties": { "advertisedRouteNextHopUri": { - "description": "For advertised routes, the URI of their next hop, i.e. the URI of the hybrid endpoint (VPN tunnel, Interconnect attachment, NCC router appliance) the advertised prefix is advertised through, or URI of the source peered network.", + "deprecated": true, + "description": "For ADVERTISED routes, the URI of their next hop, i.e. the URI of the hybrid endpoint (VPN tunnel, Interconnect attachment, NCC router appliance) the advertised prefix is advertised through, or URI of the source peered network. Deprecated in favor of the next_hop_uri field, not used in new tests.", "type": "string" }, "advertisedRouteSourceRouterUri": { - "description": "For advertised dynamic routes, the URI of the Cloud Router that advertised the corresponding IP prefix.", + "description": "For ADVERTISED dynamic routes, the URI of the Cloud Router that advertised the corresponding IP prefix.", "type": "string" }, "destIpRange": { @@ -2526,7 +2741,7 @@ "type": "string" }, "destPortRanges": { - "description": "Destination port ranges of the route. Policy based routes only.", + "description": "Destination port ranges of the route. POLICY_BASED routes only.", "items": { "type": "string" }, @@ -2543,20 +2758,29 @@ }, "type": "array" }, + "nccHubRouteUri": { + "description": "For PEERING_SUBNET and PEERING_DYNAMIC routes that are advertised by NCC Hub, the URI of the corresponding route in NCC Hub's routing table.", + "type": "string" + }, "nccHubUri": { - "description": "URI of a NCC Hub. NCC_HUB routes only.", + "description": "URI of the NCC Hub the route is advertised by. PEERING_SUBNET and PEERING_DYNAMIC routes that are advertised by NCC Hub only.", "type": "string" }, "nccSpokeUri": { - "description": "URI of a NCC Spoke. NCC_HUB routes only.", + "description": "URI of the destination NCC Spoke. PEERING_SUBNET and PEERING_DYNAMIC routes that are advertised by NCC Hub only.", "type": "string" }, "networkUri": { - "description": "URI of a Compute Engine network. NETWORK routes only.", + "description": "URI of a VPC network where route is located.", "type": "string" }, "nextHop": { - "description": "Next hop of the route.", + "deprecated": true, + "description": "String type of the next hop of the route (for example, \"VPN tunnel\"). Deprecated in favor of the next_hop_type and next_hop_uri fields, not used in new tests.", + "type": "string" + }, + "nextHopNetworkUri": { + "description": "URI of a VPC network where the next hop resource is located.", "type": "string" }, "nextHopType": { @@ -2581,36 +2805,49 @@ "Next hop is an IP address.", "Next hop is a Compute Engine instance.", "Next hop is a VPC network gateway.", - "Next hop is a peering VPC.", + "Next hop is a peering VPC. This scenario only happens when the user doesn't have permissions to the project where the next hop resource is located.", "Next hop is an interconnect.", "Next hop is a VPN tunnel.", "Next hop is a VPN gateway. This scenario only happens when tracing connectivity from an on-premises network to Google Cloud through a VPN. The analysis simulates a packet departing from the on-premises network through a VPN tunnel and arriving at a Cloud VPN gateway.", "Next hop is an internet gateway.", - "Next hop is blackhole; that is, the next hop either does not exist or is not running.", + "Next hop is blackhole; that is, the next hop either does not exist or is unusable.", "Next hop is the forwarding rule of an Internal Load Balancer.", "Next hop is a [router appliance instance](https://cloud.google.com/network-connectivity/docs/network-connectivity-center/concepts/ra-overview).", - "Next hop is an NCC hub." + "Next hop is an NCC hub. This scenario only happens when the user doesn't have permissions to the project where the next hop resource is located." ], "type": "string" }, + "nextHopUri": { + "description": "URI of the next hop resource.", + "type": "string" + }, + "originatingRouteDisplayName": { + "description": "For PEERING_SUBNET, PEERING_STATIC and PEERING_DYNAMIC routes, the name of the originating SUBNET/STATIC/DYNAMIC route.", + "type": "string" + }, + "originatingRouteUri": { + "description": "For PEERING_SUBNET and PEERING_STATIC routes, the URI of the originating SUBNET/STATIC route.", + "type": "string" + }, "priority": { "description": "Priority of the route.", "format": "int32", "type": "integer" }, "protocols": { - "description": "Protocols of the route. Policy based routes only.", + "description": "Protocols of the route. POLICY_BASED routes only.", "items": { "type": "string" }, "type": "array" }, "region": { - "description": "Region of the route (if applicable).", + "description": "Region of the route. DYNAMIC, PEERING_DYNAMIC, POLICY_BASED and ADVERTISED routes only. If set for POLICY_BASED route, this is a region of VLAN attachments for Cloud Interconnect the route applies to.", "type": "string" }, "routeScope": { - "description": "Indicates where route is applicable.", + "deprecated": true, + "description": "Indicates where route is applicable. Deprecated, routes with NCC_HUB scope are not included in the trace in new tests.", "enum": [ "ROUTE_SCOPE_UNSPECIFIED", "NETWORK", @@ -2641,27 +2878,27 @@ "Route is a subnet route automatically created by the system.", "Static route created by the user, including the default route to the internet.", "Dynamic route exchanged between BGP peers.", - "A subnet route received from peering network.", + "A subnet route received from peering network or NCC Hub.", "A static route received from peering network.", - "A dynamic route received from peering network.", + "A dynamic route received from peering network or NCC Hub.", "Policy based route.", "Advertised route. Synthetic route which is used to transition from the StartFromPrivateNetwork state in Connectivity tests." ], "type": "string" }, "srcIpRange": { - "description": "Source IP address range of the route. Policy based routes only.", + "description": "Source IP address range of the route. POLICY_BASED routes only.", "type": "string" }, "srcPortRanges": { - "description": "Source port ranges of the route. Policy based routes only.", + "description": "Source port ranges of the route. POLICY_BASED routes only.", "items": { "type": "string" }, "type": "array" }, "uri": { - "description": "URI of a route (if applicable).", + "description": "URI of a route. SUBNET, STATIC, PEERING_SUBNET (only for peering network) and POLICY_BASED routes only.", "type": "string" } }, @@ -3042,6 +3279,131 @@ }, "type": "object" }, + "VpcFlowLogsConfig": { + "description": "A configuration to generate VPC Flow Logs.", + "id": "VpcFlowLogsConfig", + "properties": { + "aggregationInterval": { + "description": "Optional. The aggregation interval for the logs. Default value is INTERVAL_5_SEC.", + "enum": [ + "AGGREGATION_INTERVAL_UNSPECIFIED", + "INTERVAL_5_SEC", + "INTERVAL_30_SEC", + "INTERVAL_1_MIN", + "INTERVAL_5_MIN", + "INTERVAL_10_MIN", + "INTERVAL_15_MIN" + ], + "enumDescriptions": [ + "If not specified, will default to INTERVAL_5_SEC.", + "Aggregate logs in 5s intervals.", + "Aggregate logs in 30s intervals.", + "Aggregate logs in 1m intervals.", + "Aggregate logs in 5m intervals.", + "Aggregate logs in 10m intervals.", + "Aggregate logs in 15m intervals." + ], + "type": "string" + }, + "createTime": { + "description": "Output only. The time the config was created.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "description": { + "description": "Optional. The user-supplied description of the VPC Flow Logs configuration. Maximum of 512 characters.", + "type": "string" + }, + "filterExpr": { + "description": "Optional. Export filter used to define which VPC Flow Logs should be logged.", + "type": "string" + }, + "flowSampling": { + "description": "Optional. The value of the field must be in (0, 1]. The sampling rate of VPC Flow Logs where 1.0 means all collected logs are reported. Setting the sampling rate to 0.0 is not allowed. If you want to disable VPC Flow Logs, use the state field instead. Default value is 1.0.", + "format": "float", + "type": "number" + }, + "interconnectAttachment": { + "description": "Traffic will be logged from the Interconnect Attachment. Format: projects/{project_id}/regions/{region}/interconnectAttachments/{name}", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. Resource labels to represent user-provided metadata.", + "type": "object" + }, + "metadata": { + "description": "Optional. Configures whether all, none or a subset of metadata fields should be added to the reported VPC flow logs. Default value is INCLUDE_ALL_METADATA.", + "enum": [ + "METADATA_UNSPECIFIED", + "INCLUDE_ALL_METADATA", + "EXCLUDE_ALL_METADATA", + "CUSTOM_METADATA" + ], + "enumDescriptions": [ + "If not specified, will default to INCLUDE_ALL_METADATA.", + "Include all metadata fields.", + "Exclude all metadata fields.", + "Include only custom fields (specified in metadata_fields)." + ], + "type": "string" + }, + "metadataFields": { + "description": "Optional. Custom metadata fields to include in the reported VPC flow logs. Can only be specified if \"metadata\" was set to CUSTOM_METADATA.", + "items": { + "type": "string" + }, + "type": "array" + }, + "name": { + "description": "Identifier. Unique name of the configuration using the form: `projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`", + "type": "string" + }, + "state": { + "description": "Optional. The state of the VPC Flow Log configuration. Default value is ENABLED. When creating a new configuration, it must be enabled.", + "enum": [ + "STATE_UNSPECIFIED", + "ENABLED", + "DISABLED" + ], + "enumDescriptions": [ + "If not specified, will default to ENABLED.", + "When ENABLED, this configuration will generate logs.", + "When DISABLED, this configuration will not generate logs." + ], + "type": "string" + }, + "targetResourceState": { + "description": "Output only. A diagnostic bit - describes the state of the configured target resource for diagnostic purposes.", + "enum": [ + "TARGET_RESOURCE_STATE_UNSPECIFIED", + "TARGET_RESOURCE_EXISTS", + "TARGET_RESOURCE_DOES_NOT_EXIST" + ], + "enumDescriptions": [ + "Unspecified target resource state.", + "Indicates that the target resource exists.", + "Indicates that the target resource does not exist." + ], + "readOnly": true, + "type": "string" + }, + "updateTime": { + "description": "Output only. The time the config was updated.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "vpnTunnel": { + "description": "Traffic will be logged from the VPN Tunnel. Format: projects/{project_id}/regions/{region}/vpnTunnels/{name}", + "type": "string" + } + }, + "type": "object" + }, "VpnGatewayInfo": { "description": "For display only. Metadata associated with a Compute Engine VPN gateway.", "id": "VpnGatewayInfo", diff --git a/discovery/googleapis/networksecurity__v1.json b/discovery/googleapis/networksecurity__v1.json index 10a623294..82d14cd8a 100644 --- a/discovery/googleapis/networksecurity__v1.json +++ b/discovery/googleapis/networksecurity__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240919", + "revision": "20241202", "rootUrl": "https://networksecurity.googleapis.com/", "servicePath": "", "title": "Network Security API", @@ -590,7 +590,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "networksecurity.organizations.locations.operations.cancel", @@ -1735,6 +1735,99 @@ }, "authzPolicies": { "methods": { + "create": { + "description": "Creates a new AuthzPolicy in a given project and location.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authzPolicies", + "httpMethod": "POST", + "id": "networksecurity.projects.locations.authzPolicies.create", + "parameterOrder": [ + "parent" + ], + "parameters": { + "authzPolicyId": { + "description": "Required. User-provided ID of the `AuthzPolicy` resource to be created.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The parent resource of the `AuthzPolicy` resource. Must be in the format `projects/{project}/locations/{location}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + }, + "requestId": { + "description": "Optional. An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server can ignore the request if it has already been completed. The server guarantees that for at least 60 minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+parent}/authzPolicies", + "request": { + "$ref": "AuthzPolicy" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "delete": { + "description": "Deletes a single AuthzPolicy.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authzPolicies/{authzPoliciesId}", + "httpMethod": "DELETE", + "id": "networksecurity.projects.locations.authzPolicies.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the `AuthzPolicy` resource to delete. Must be in the format `projects/{project}/locations/{location}/authzPolicies/{authz_policy}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/authzPolicies/[^/]+$", + "required": true, + "type": "string" + }, + "requestId": { + "description": "Optional. An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server can ignore the request if it has already been completed. The server guarantees that for at least 60 minutes after the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Gets details of a single AuthzPolicy.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authzPolicies/{authzPoliciesId}", + "httpMethod": "GET", + "id": "networksecurity.projects.locations.authzPolicies.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. A name of the `AuthzPolicy` resource to get. Must be in the format `projects/{project}/locations/{location}/authzPolicies/{authz_policy}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/authzPolicies/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "AuthzPolicy" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, "getIamPolicy": { "description": "Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authzPolicies/{authzPoliciesId}:getIamPolicy", @@ -1766,6 +1859,91 @@ "https://www.googleapis.com/auth/cloud-platform" ] }, + "list": { + "description": "Lists AuthzPolicies in a given project and location.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authzPolicies", + "httpMethod": "GET", + "id": "networksecurity.projects.locations.authzPolicies.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "filter": { + "description": "Optional. Filtering results.", + "location": "query", + "type": "string" + }, + "orderBy": { + "description": "Optional. Hint for how to order the results.", + "location": "query", + "type": "string" + }, + "pageSize": { + "description": "Optional. Requested page size. The server might return fewer items than requested. If unspecified, the server picks an appropriate default.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. A token identifying a page of results that the server returns.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The project and location from which the `AuthzPolicy` resources are listed, specified in the following format: `projects/{project}/locations/{location}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/authzPolicies", + "response": { + "$ref": "ListAuthzPoliciesResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "patch": { + "description": "Updates the parameters of a single AuthzPolicy.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authzPolicies/{authzPoliciesId}", + "httpMethod": "PATCH", + "id": "networksecurity.projects.locations.authzPolicies.patch", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. Identifier. Name of the `AuthzPolicy` resource in the following format: `projects/{project}/locations/{location}/authzPolicies/{authz_policy}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/authzPolicies/[^/]+$", + "required": true, + "type": "string" + }, + "requestId": { + "description": "Optional. An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server can ignore the request if it has already been completed. The server guarantees that for at least 60 minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).", + "location": "query", + "type": "string" + }, + "updateMask": { + "description": "Required. Used to specify the fields to be overwritten in the `AuthzPolicy` resource by the update. The fields specified in the `update_mask` are relative to the resource, not the full request. A field is overwritten if it is in the mask. If the user does not specify a mask, then all fields are overwritten.", + "format": "google-fieldmask", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}", + "request": { + "$ref": "AuthzPolicy" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, "setIamPolicy": { "description": "Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authzPolicies/{authzPoliciesId}:setIamPolicy", @@ -2569,7 +2747,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "networksecurity.projects.locations.operations.cancel", @@ -3413,6 +3591,336 @@ }, "type": "object" }, + "AuthzPolicy": { + "description": "`AuthzPolicy` is a resource that allows to forward traffic to a callout backend designed to scan the traffic for security purposes.", + "id": "AuthzPolicy", + "properties": { + "action": { + "description": "Required. Can be one of `ALLOW`, `DENY`, `CUSTOM`. When the action is `CUSTOM`, `customProvider` must be specified. When the action is `ALLOW`, only requests matching the policy will be allowed. When the action is `DENY`, only requests matching the policy will be denied. When a request arrives, the policies are evaluated in the following order: 1. If there is a `CUSTOM` policy that matches the request, the `CUSTOM` policy is evaluated using the custom authorization providers and the request is denied if the provider rejects the request. 2. If there are any `DENY` policies that match the request, the request is denied. 3. If there are no `ALLOW` policies for the resource or if any of the `ALLOW` policies match the request, the request is allowed. 4. Else the request is denied by default if none of the configured AuthzPolicies with `ALLOW` action match the request.", + "enum": [ + "AUTHZ_ACTION_UNSPECIFIED", + "ALLOW", + "DENY", + "CUSTOM" + ], + "enumDescriptions": [ + "Unspecified action.", + "Allow request to pass through to the backend.", + "Deny the request and return a HTTP 404 to the client.", + "Delegate the authorization decision to an external authorization engine." + ], + "type": "string" + }, + "createTime": { + "description": "Output only. The timestamp when the resource was created.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "customProvider": { + "$ref": "AuthzPolicyCustomProvider", + "description": "Optional. Required if the action is `CUSTOM`. Allows delegating authorization decisions to Cloud IAP or to Service Extensions. One of `cloudIap` or `authzExtension` must be specified." + }, + "description": { + "description": "Optional. A human-readable description of the resource.", + "type": "string" + }, + "httpRules": { + "description": "Optional. A list of authorization HTTP rules to match against the incoming request. A policy match occurs when at least one HTTP rule matches the request or when no HTTP rules are specified in the policy. At least one HTTP Rule is required for Allow or Deny Action. Limited to 5 rules.", + "items": { + "$ref": "AuthzPolicyAuthzRule" + }, + "type": "array" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. Set of labels associated with the `AuthzPolicy` resource. The format must comply with [the following requirements](/compute/docs/labeling-resources#requirements).", + "type": "object" + }, + "name": { + "description": "Required. Identifier. Name of the `AuthzPolicy` resource in the following format: `projects/{project}/locations/{location}/authzPolicies/{authz_policy}`.", + "type": "string" + }, + "target": { + "$ref": "AuthzPolicyTarget", + "description": "Required. Specifies the set of resources to which this policy should be applied to." + }, + "updateTime": { + "description": "Output only. The timestamp when the resource was updated.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "AuthzPolicyAuthzRule": { + "description": "Conditions to match against the incoming request.", + "id": "AuthzPolicyAuthzRule", + "properties": { + "from": { + "$ref": "AuthzPolicyAuthzRuleFrom", + "description": "Optional. Describes properties of a source of a request." + }, + "to": { + "$ref": "AuthzPolicyAuthzRuleTo", + "description": "Optional. Describes properties of a target of a request." + }, + "when": { + "description": "Optional. CEL expression that describes the conditions to be satisfied for the action. The result of the CEL expression is ANDed with the from and to. Refer to the CEL language reference for a list of available attributes.", + "type": "string" + } + }, + "type": "object" + }, + "AuthzPolicyAuthzRuleFrom": { + "description": "Describes properties of one or more sources of a request.", + "id": "AuthzPolicyAuthzRuleFrom", + "properties": { + "notSources": { + "description": "Optional. Describes the negated properties of request sources. Matches requests from sources that do not match the criteria specified in this field. At least one of sources or notSources must be specified.", + "items": { + "$ref": "AuthzPolicyAuthzRuleFromRequestSource" + }, + "type": "array" + }, + "sources": { + "description": "Optional. Describes the properties of a request's sources. At least one of sources or notSources must be specified. Limited to 1 source. A match occurs when ANY source (in sources or notSources) matches the request. Within a single source, the match follows AND semantics across fields and OR semantics within a single field, i.e. a match occurs when ANY principal matches AND ANY ipBlocks match.", + "items": { + "$ref": "AuthzPolicyAuthzRuleFromRequestSource" + }, + "type": "array" + } + }, + "type": "object" + }, + "AuthzPolicyAuthzRuleFromRequestSource": { + "description": "Describes the properties of a single source.", + "id": "AuthzPolicyAuthzRuleFromRequestSource", + "properties": { + "principals": { + "description": "Optional. A list of identities derived from the client's certificate. This field will not match on a request unless mutual TLS is enabled for the Forwarding rule or Gateway. Each identity is a string whose value is matched against the URI SAN, or DNS SAN or the subject field in the client's certificate. The match can be exact, prefix, suffix or a substring match. One of exact, prefix, suffix or contains must be specified. Limited to 5 principals.", + "items": { + "$ref": "AuthzPolicyAuthzRuleStringMatch" + }, + "type": "array" + }, + "resources": { + "description": "Optional. A list of resources to match against the resource of the source VM of a request. Limited to 5 resources.", + "items": { + "$ref": "AuthzPolicyAuthzRuleRequestResource" + }, + "type": "array" + } + }, + "type": "object" + }, + "AuthzPolicyAuthzRuleHeaderMatch": { + "description": "Determines how a HTTP header should be matched.", + "id": "AuthzPolicyAuthzRuleHeaderMatch", + "properties": { + "name": { + "description": "Optional. Specifies the name of the header in the request.", + "type": "string" + }, + "value": { + "$ref": "AuthzPolicyAuthzRuleStringMatch", + "description": "Optional. Specifies how the header match will be performed." + } + }, + "type": "object" + }, + "AuthzPolicyAuthzRuleRequestResource": { + "description": "Describes the properties of a client VM resource accessing the internal application load balancers.", + "id": "AuthzPolicyAuthzRuleRequestResource", + "properties": { + "iamServiceAccount": { + "$ref": "AuthzPolicyAuthzRuleStringMatch", + "description": "Optional. An IAM service account to match against the source service account of the VM sending the request." + }, + "tagValueIdSet": { + "$ref": "AuthzPolicyAuthzRuleRequestResourceTagValueIdSet", + "description": "Optional. A list of resource tag value permanent IDs to match against the resource manager tags value associated with the source VM of a request." + } + }, + "type": "object" + }, + "AuthzPolicyAuthzRuleRequestResourceTagValueIdSet": { + "description": "Describes a set of resource tag value permanent IDs to match against the resource manager tags value associated with the source VM of a request.", + "id": "AuthzPolicyAuthzRuleRequestResourceTagValueIdSet", + "properties": { + "ids": { + "description": "Required. A list of resource tag value permanent IDs to match against the resource manager tags value associated with the source VM of a request. The match follows AND semantics which means all the ids must match. Limited to 5 matches.", + "items": { + "format": "int64", + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "AuthzPolicyAuthzRuleStringMatch": { + "description": "Determines how a string value should be matched.", + "id": "AuthzPolicyAuthzRuleStringMatch", + "properties": { + "contains": { + "description": "The input string must have the substring specified here. Note: empty contains match is not allowed, please use regex instead. Examples: * ``abc`` matches the value ``xyz.abc.def``", + "type": "string" + }, + "exact": { + "description": "The input string must match exactly the string specified here. Examples: * ``abc`` only matches the value ``abc``.", + "type": "string" + }, + "ignoreCase": { + "description": "If true, indicates the exact/prefix/suffix/contains matching should be case insensitive. For example, the matcher ``data`` will match both input string ``Data`` and ``data`` if set to true.", + "type": "boolean" + }, + "prefix": { + "description": "The input string must have the prefix specified here. Note: empty prefix is not allowed, please use regex instead. Examples: * ``abc`` matches the value ``abc.xyz``", + "type": "string" + }, + "suffix": { + "description": "The input string must have the suffix specified here. Note: empty prefix is not allowed, please use regex instead. Examples: * ``abc`` matches the value ``xyz.abc``", + "type": "string" + } + }, + "type": "object" + }, + "AuthzPolicyAuthzRuleTo": { + "description": "Describes properties of one or more targets of a request.", + "id": "AuthzPolicyAuthzRuleTo", + "properties": { + "notOperations": { + "description": "Optional. Describes the negated properties of the targets of a request. Matches requests for operations that do not match the criteria specified in this field. At least one of operations or notOperations must be specified.", + "items": { + "$ref": "AuthzPolicyAuthzRuleToRequestOperation" + }, + "type": "array" + }, + "operations": { + "description": "Optional. Describes properties of one or more targets of a request. At least one of operations or notOperations must be specified. Limited to 1 operation. A match occurs when ANY operation (in operations or notOperations) matches. Within an operation, the match follows AND semantics across fields and OR semantics within a field, i.e. a match occurs when ANY path matches AND ANY header matches and ANY method matches.", + "items": { + "$ref": "AuthzPolicyAuthzRuleToRequestOperation" + }, + "type": "array" + } + }, + "type": "object" + }, + "AuthzPolicyAuthzRuleToRequestOperation": { + "description": "Describes properties of one or more targets of a request.", + "id": "AuthzPolicyAuthzRuleToRequestOperation", + "properties": { + "headerSet": { + "$ref": "AuthzPolicyAuthzRuleToRequestOperationHeaderSet", + "description": "Optional. A list of headers to match against in http header." + }, + "hosts": { + "description": "Optional. A list of HTTP Hosts to match against. The match can be one of exact, prefix, suffix, or contains (substring match). Matches are always case sensitive unless the ignoreCase is set. Limited to 5 matches.", + "items": { + "$ref": "AuthzPolicyAuthzRuleStringMatch" + }, + "type": "array" + }, + "methods": { + "description": "Optional. A list of HTTP methods to match against. Each entry must be a valid HTTP method name (GET, PUT, POST, HEAD, PATCH, DELETE, OPTIONS). It only allows exact match and is always case sensitive.", + "items": { + "type": "string" + }, + "type": "array" + }, + "paths": { + "description": "Optional. A list of paths to match against. The match can be one of exact, prefix, suffix, or contains (substring match). Matches are always case sensitive unless the ignoreCase is set. Limited to 5 matches. Note that this path match includes the query parameters. For gRPC services, this should be a fully-qualified name of the form /package.service/method.", + "items": { + "$ref": "AuthzPolicyAuthzRuleStringMatch" + }, + "type": "array" + } + }, + "type": "object" + }, + "AuthzPolicyAuthzRuleToRequestOperationHeaderSet": { + "description": "Describes a set of HTTP headers to match against.", + "id": "AuthzPolicyAuthzRuleToRequestOperationHeaderSet", + "properties": { + "headers": { + "description": "Required. A list of headers to match against in http header. The match can be one of exact, prefix, suffix, or contains (substring match). The match follows AND semantics which means all the headers must match. Matches are always case sensitive unless the ignoreCase is set. Limited to 5 matches.", + "items": { + "$ref": "AuthzPolicyAuthzRuleHeaderMatch" + }, + "type": "array" + } + }, + "type": "object" + }, + "AuthzPolicyCustomProvider": { + "description": "Allows delegating authorization decisions to Cloud IAP or to Service Extensions.", + "id": "AuthzPolicyCustomProvider", + "properties": { + "authzExtension": { + "$ref": "AuthzPolicyCustomProviderAuthzExtension", + "description": "Optional. Delegate authorization decision to user authored Service Extension. Only one of cloudIap or authzExtension can be specified." + }, + "cloudIap": { + "$ref": "AuthzPolicyCustomProviderCloudIap", + "description": "Optional. Delegates authorization decisions to Cloud IAP. Applicable only for managed load balancers. Enabling Cloud IAP at the AuthzPolicy level is not compatible with Cloud IAP settings in the BackendService. Enabling IAP in both places will result in request failure. Ensure that IAP is enabled in either the AuthzPolicy or the BackendService but not in both places." + } + }, + "type": "object" + }, + "AuthzPolicyCustomProviderAuthzExtension": { + "description": "Optional. Delegate authorization decision to user authored extension. Only one of cloudIap or authzExtension can be specified.", + "id": "AuthzPolicyCustomProviderAuthzExtension", + "properties": { + "resources": { + "description": "Required. A list of references to authorization extensions that will be invoked for requests matching this policy. Limited to 1 custom provider.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "AuthzPolicyCustomProviderCloudIap": { + "description": "Optional. Delegates authorization decisions to Cloud IAP. Applicable only for managed load balancers. Enabling Cloud IAP at the AuthzPolicy level is not compatible with Cloud IAP settings in the BackendService. Enabling IAP in both places will result in request failure. Ensure that IAP is enabled in either the AuthzPolicy or the BackendService but not in both places.", + "id": "AuthzPolicyCustomProviderCloudIap", + "properties": {}, + "type": "object" + }, + "AuthzPolicyTarget": { + "description": "Specifies the set of targets to which this policy should be applied to.", + "id": "AuthzPolicyTarget", + "properties": { + "loadBalancingScheme": { + "description": "Required. All gateways and forwarding rules referenced by this policy and extensions must share the same load balancing scheme. Supported values: `INTERNAL_MANAGED` and `EXTERNAL_MANAGED`. For more information, refer to [Backend services overview](https://cloud.google.com/load-balancing/docs/backend-service).", + "enum": [ + "LOAD_BALANCING_SCHEME_UNSPECIFIED", + "INTERNAL_MANAGED", + "EXTERNAL_MANAGED", + "INTERNAL_SELF_MANAGED" + ], + "enumDescriptions": [ + "Default value. Do not use.", + "Signifies that this is used for Regional internal or Cross-region internal Application Load Balancing.", + "Signifies that this is used for Global external or Regional external Application Load Balancing.", + "Signifies that this is used for Cloud Service Mesh. Meant for use by CSM GKE controller only." + ], + "type": "string" + }, + "resources": { + "description": "Required. A list of references to the Forwarding Rules on which this policy will be applied.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, "CancelOperationRequest": { "description": "The request message for Operations.CancelOperation.", "id": "CancelOperationRequest", @@ -3494,6 +4002,17 @@ }, "type": "object" }, + "CustomInterceptProfile": { + "description": "CustomInterceptProfile defines the Packet Intercept Endpoint Group used to intercept traffic to a third-party firewall in a Firewall rule.", + "id": "CustomInterceptProfile", + "properties": { + "interceptEndpointGroup": { + "description": "Required. The InterceptEndpointGroup to which traffic associated with the SP should be mirrored.", + "type": "string" + } + }, + "type": "object" + }, "CustomMirroringProfile": { "description": "CustomMirroringProfile defines an action for mirroring traffic to a collector's EndpointGroup", "id": "CustomMirroringProfile", @@ -3700,7 +4219,7 @@ "Active and ready for traffic.", "Being deleted.", "Down or in an error state.", - "The GCP project that housed the association has been deleted." + "The project that housed the association has been deleted." ], "readOnly": true, "type": "string" @@ -4088,6 +4607,31 @@ }, "type": "object" }, + "ListAuthzPoliciesResponse": { + "description": "Message for response to listing `AuthzPolicy` resources.", + "id": "ListAuthzPoliciesResponse", + "properties": { + "authzPolicies": { + "description": "The list of `AuthzPolicy` resources.", + "items": { + "$ref": "AuthzPolicy" + }, + "type": "array" + }, + "nextPageToken": { + "description": "A token identifying a page of results that the server returns.", + "type": "string" + }, + "unreachable": { + "description": "Locations that could not be reached.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, "ListClientTlsPoliciesResponse": { "description": "Response returned by the ListClientTlsPolicies method.", "id": "ListClientTlsPoliciesResponse", @@ -4531,7 +5075,7 @@ "type": "object" }, "SecurityProfile": { - "description": "SecurityProfile is a resource that defines the behavior for one of many ProfileTypes. Next ID: 12", + "description": "SecurityProfile is a resource that defines the behavior for one of many ProfileTypes.", "id": "SecurityProfile", "properties": { "createTime": { @@ -4540,6 +5084,10 @@ "readOnly": true, "type": "string" }, + "customInterceptProfile": { + "$ref": "CustomInterceptProfile", + "description": "The custom TPPI configuration for the SecurityProfile." + }, "customMirroringProfile": { "$ref": "CustomMirroringProfile", "description": "The custom Packet Mirroring v2 configuration for the SecurityProfile." @@ -4573,12 +5121,14 @@ "enum": [ "PROFILE_TYPE_UNSPECIFIED", "THREAT_PREVENTION", - "CUSTOM_MIRRORING" + "CUSTOM_MIRRORING", + "CUSTOM_INTERCEPT" ], "enumDescriptions": [ "Profile type not specified.", "Profile type for threat prevention.", - "Profile type for packet mirroring v2" + "Profile type for packet mirroring v2", + "Profile type for TPPI." ], "type": "string" }, @@ -4592,7 +5142,7 @@ "type": "object" }, "SecurityProfileGroup": { - "description": "SecurityProfileGroup is a resource that defines the behavior for various ProfileTypes. Next ID: 11", + "description": "SecurityProfileGroup is a resource that defines the behavior for various ProfileTypes.", "id": "SecurityProfileGroup", "properties": { "createTime": { @@ -4601,6 +5151,10 @@ "readOnly": true, "type": "string" }, + "customInterceptProfile": { + "description": "Optional. Reference to a SecurityProfile with the CustomIntercept configuration.", + "type": "string" + }, "customMirroringProfile": { "description": "Optional. Reference to a SecurityProfile with the CustomMirroring configuration.", "type": "string" diff --git a/discovery/googleapis/networkservices__v1.json b/discovery/googleapis/networkservices__v1.json index 5f98cef07..6b6a181d7 100644 --- a/discovery/googleapis/networkservices__v1.json +++ b/discovery/googleapis/networkservices__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240919", + "revision": "20241118", "rootUrl": "https://networkservices.googleapis.com/", "servicePath": "", "title": "Network Services API", @@ -181,6 +181,188 @@ } }, "resources": { + "authzExtensions": { + "methods": { + "create": { + "description": "Creates a new `AuthzExtension` resource in a given project and location.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authzExtensions", + "httpMethod": "POST", + "id": "networkservices.projects.locations.authzExtensions.create", + "parameterOrder": [ + "parent" + ], + "parameters": { + "authzExtensionId": { + "description": "Required. User-provided ID of the `AuthzExtension` resource to be created.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The parent resource of the `AuthzExtension` resource. Must be in the format `projects/{project}/locations/{location}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + }, + "requestId": { + "description": "Optional. An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server can ignore the request if it has already been completed. The server guarantees that for at least 60 minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+parent}/authzExtensions", + "request": { + "$ref": "AuthzExtension" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "delete": { + "description": "Deletes the specified `AuthzExtension` resource.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authzExtensions/{authzExtensionsId}", + "httpMethod": "DELETE", + "id": "networkservices.projects.locations.authzExtensions.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the `AuthzExtension` resource to delete. Must be in the format `projects/{project}/locations/{location}/authzExtensions/{authz_extension}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/authzExtensions/[^/]+$", + "required": true, + "type": "string" + }, + "requestId": { + "description": "Optional. An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server can ignore the request if it has already been completed. The server guarantees that for at least 60 minutes after the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Gets details of the specified `AuthzExtension` resource.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authzExtensions/{authzExtensionsId}", + "httpMethod": "GET", + "id": "networkservices.projects.locations.authzExtensions.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. A name of the `AuthzExtension` resource to get. Must be in the format `projects/{project}/locations/{location}/authzExtensions/{authz_extension}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/authzExtensions/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "AuthzExtension" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists `AuthzExtension` resources in a given project and location.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authzExtensions", + "httpMethod": "GET", + "id": "networkservices.projects.locations.authzExtensions.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "filter": { + "description": "Optional. Filtering results.", + "location": "query", + "type": "string" + }, + "orderBy": { + "description": "Optional. Hint for how to order the results.", + "location": "query", + "type": "string" + }, + "pageSize": { + "description": "Optional. Requested page size. The server might return fewer items than requested. If unspecified, the server picks an appropriate default.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. A token identifying a page of results that the server returns.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The project and location from which the `AuthzExtension` resources are listed, specified in the following format: `projects/{project}/locations/{location}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/authzExtensions", + "response": { + "$ref": "ListAuthzExtensionsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "patch": { + "description": "Updates the parameters of the specified `AuthzExtension` resource.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/authzExtensions/{authzExtensionsId}", + "httpMethod": "PATCH", + "id": "networkservices.projects.locations.authzExtensions.patch", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. Identifier. Name of the `AuthzExtension` resource in the following format: `projects/{project}/locations/{location}/authzExtensions/{authz_extension}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/authzExtensions/[^/]+$", + "required": true, + "type": "string" + }, + "requestId": { + "description": "Optional. An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server can ignore the request if it has already been completed. The server guarantees that for at least 60 minutes since the first request. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, ignores the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).", + "location": "query", + "type": "string" + }, + "updateMask": { + "description": "Required. Used to specify the fields to be overwritten in the `AuthzExtension` resource by the update. The fields specified in the `update_mask` are relative to the resource, not the full request. A field is overwritten if it is in the mask. If the user does not specify a mask, then all fields are overwritten.", + "format": "google-fieldmask", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}", + "request": { + "$ref": "AuthzExtension" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + } + }, "edgeCacheKeysets": { "methods": { "getIamPolicy": { @@ -766,6 +948,73 @@ "https://www.googleapis.com/auth/cloud-platform" ] } + }, + "resources": { + "routeViews": { + "methods": { + "get": { + "description": "Get a single RouteView of a Gateway.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/gateways/{gatewaysId}/routeViews/{routeViewsId}", + "httpMethod": "GET", + "id": "networkservices.projects.locations.gateways.routeViews.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. Name of the GatewayRouteView resource. Formats: projects/{project_number}/locations/{location}/gateways/{gateway_name}/routeViews/{route_view_name}", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/gateways/[^/]+/routeViews/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "GatewayRouteView" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists RouteViews", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/gateways/{gatewaysId}/routeViews", + "httpMethod": "GET", + "id": "networkservices.projects.locations.gateways.routeViews.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "pageSize": { + "description": "Maximum number of GatewayRouteViews to return per call.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "The value returned by the last `ListGatewayRouteViewsResponse` Indicates that this is a continuation of a prior `ListGatewayRouteViews` call, and that the system should return the next page of data.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The Gateway to which a Route is associated. Formats: projects/{project_number}/locations/{location}/gateways/{gateway_name}", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/gateways/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/routeViews", + "response": { + "$ref": "ListGatewayRouteViewsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + } + } } }, "grpcRoutes": { @@ -1601,12 +1850,79 @@ "https://www.googleapis.com/auth/cloud-platform" ] } + }, + "resources": { + "routeViews": { + "methods": { + "get": { + "description": "Get a single RouteView of a Mesh.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/meshes/{meshesId}/routeViews/{routeViewsId}", + "httpMethod": "GET", + "id": "networkservices.projects.locations.meshes.routeViews.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. Name of the MeshRouteView resource. Format: projects/{project_number}/locations/{location}/meshes/{mesh_name}/routeViews/{route_view_name}", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/meshes/[^/]+/routeViews/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "MeshRouteView" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists RouteViews", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/meshes/{meshesId}/routeViews", + "httpMethod": "GET", + "id": "networkservices.projects.locations.meshes.routeViews.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "pageSize": { + "description": "Maximum number of MeshRouteViews to return per call.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "The value returned by the last `ListMeshRouteViewsResponse` Indicates that this is a continuation of a prior `ListMeshRouteViews` call, and that the system should return the next page of data.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The Mesh to which a Route is associated. Format: projects/{project_number}/locations/{location}/meshes/{mesh_name}", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/meshes/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/routeViews", + "response": { + "$ref": "ListMeshRouteViewsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + } + } } }, "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "networkservices.projects.locations.operations.cancel", @@ -1980,7 +2296,7 @@ ], "parameters": { "name": { - "description": "Required. Name of the ServiceLbPolicy resource. It matches pattern `projects/{project}/locations/{location}/serviceLbPolicies/{service_lb_policy_name}`.", + "description": "Identifier. Name of the ServiceLbPolicy resource. It matches pattern `projects/{project}/locations/{location}/serviceLbPolicies/{service_lb_policy_name}`.", "location": "path", "pattern": "^projects/[^/]+/locations/[^/]+/serviceLbPolicies/[^/]+$", "required": true, @@ -2319,6 +2635,303 @@ ] } } + }, + "wasmPlugins": { + "methods": { + "create": { + "description": "Creates a new `WasmPlugin` resource in a given project and location.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/wasmPlugins", + "httpMethod": "POST", + "id": "networkservices.projects.locations.wasmPlugins.create", + "parameterOrder": [ + "parent" + ], + "parameters": { + "parent": { + "description": "Required. The parent resource of the `WasmPlugin` resource. Must be in the format `projects/{project}/locations/global`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + }, + "wasmPluginId": { + "description": "Required. User-provided ID of the `WasmPlugin` resource to be created.", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+parent}/wasmPlugins", + "request": { + "$ref": "WasmPlugin" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "delete": { + "description": "Deletes the specified `WasmPlugin` resource.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/wasmPlugins/{wasmPluginsId}", + "httpMethod": "DELETE", + "id": "networkservices.projects.locations.wasmPlugins.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. A name of the `WasmPlugin` resource to delete. Must be in the format `projects/{project}/locations/global/wasmPlugins/{wasm_plugin}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/wasmPlugins/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Gets details of the specified `WasmPlugin` resource.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/wasmPlugins/{wasmPluginsId}", + "httpMethod": "GET", + "id": "networkservices.projects.locations.wasmPlugins.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. A name of the `WasmPlugin` resource to get. Must be in the format `projects/{project}/locations/global/wasmPlugins/{wasm_plugin}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/wasmPlugins/[^/]+$", + "required": true, + "type": "string" + }, + "view": { + "description": "Determines how much data must be returned in the response. See [AIP-157](https://google.aip.dev/157).", + "enum": [ + "WASM_PLUGIN_VIEW_UNSPECIFIED", + "WASM_PLUGIN_VIEW_BASIC", + "WASM_PLUGIN_VIEW_FULL" + ], + "enumDescriptions": [ + "Unspecified value. Do not use.", + "If specified in the `GET` request for a `WasmPlugin` resource, the server's response includes just the `WasmPlugin` resource.", + "If specified in the `GET` request for a `WasmPlugin` resource, the server's response includes the `WasmPlugin` resource with all its versions." + ], + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "WasmPlugin" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists `WasmPlugin` resources in a given project and location.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/wasmPlugins", + "httpMethod": "GET", + "id": "networkservices.projects.locations.wasmPlugins.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "pageSize": { + "description": "Maximum number of `WasmPlugin` resources to return per call. If not specified, at most 50 `WasmPlugin` resources are returned. The maximum value is 1000; values above 1000 are coerced to 1000.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "The value returned by the last `ListWasmPluginsResponse` call. Indicates that this is a continuation of a prior `ListWasmPlugins` call, and that the next page of data is to be returned.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The project and location from which the `WasmPlugin` resources are listed, specified in the following format: `projects/{project}/locations/global`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/wasmPlugins", + "response": { + "$ref": "ListWasmPluginsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "patch": { + "description": "Updates the parameters of the specified `WasmPlugin` resource.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/wasmPlugins/{wasmPluginsId}", + "httpMethod": "PATCH", + "id": "networkservices.projects.locations.wasmPlugins.patch", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Identifier. Name of the `WasmPlugin` resource in the following format: `projects/{project}/locations/{location}/wasmPlugins/{wasm_plugin}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/wasmPlugins/[^/]+$", + "required": true, + "type": "string" + }, + "updateMask": { + "description": "Optional. Used to specify the fields to be overwritten in the `WasmPlugin` resource by the update. The fields specified in the `update_mask` field are relative to the resource, not the full request. An omitted `update_mask` field is treated as an implied `update_mask` field equivalent to all fields that are populated (that have a non-empty value). The `update_mask` field supports a special value `*`, which means that each field in the given `WasmPlugin` resource (including the empty ones) replaces the current value.", + "format": "google-fieldmask", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}", + "request": { + "$ref": "WasmPlugin" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + }, + "resources": { + "versions": { + "methods": { + "create": { + "description": "Creates a new `WasmPluginVersion` resource in a given project and location.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/wasmPlugins/{wasmPluginsId}/versions", + "httpMethod": "POST", + "id": "networkservices.projects.locations.wasmPlugins.versions.create", + "parameterOrder": [ + "parent" + ], + "parameters": { + "parent": { + "description": "Required. The parent resource of the `WasmPluginVersion` resource. Must be in the format `projects/{project}/locations/global/wasmPlugins/{wasm_plugin}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/wasmPlugins/[^/]+$", + "required": true, + "type": "string" + }, + "wasmPluginVersionId": { + "description": "Required. User-provided ID of the `WasmPluginVersion` resource to be created.", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+parent}/versions", + "request": { + "$ref": "WasmPluginVersion" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "delete": { + "description": "Deletes the specified `WasmPluginVersion` resource.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/wasmPlugins/{wasmPluginsId}/versions/{versionsId}", + "httpMethod": "DELETE", + "id": "networkservices.projects.locations.wasmPlugins.versions.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. A name of the `WasmPluginVersion` resource to delete. Must be in the format `projects/{project}/locations/global/wasmPlugins/{wasm_plugin}/versions/{wasm_plugin_version}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/wasmPlugins/[^/]+/versions/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Gets details of the specified `WasmPluginVersion` resource.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/wasmPlugins/{wasmPluginsId}/versions/{versionsId}", + "httpMethod": "GET", + "id": "networkservices.projects.locations.wasmPlugins.versions.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. A name of the `WasmPluginVersion` resource to get. Must be in the format `projects/{project}/locations/global/wasmPlugins/{wasm_plugin}/versions/{wasm_plugin_version}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/wasmPlugins/[^/]+/versions/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "WasmPluginVersion" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists `WasmPluginVersion` resources in a given project and location.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/wasmPlugins/{wasmPluginsId}/versions", + "httpMethod": "GET", + "id": "networkservices.projects.locations.wasmPlugins.versions.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "pageSize": { + "description": "Maximum number of `WasmPluginVersion` resources to return per call. If not specified, at most 50 `WasmPluginVersion` resources are returned. The maximum value is 1000; values above 1000 are coerced to 1000.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "The value returned by the last `ListWasmPluginVersionsResponse` call. Indicates that this is a continuation of a prior `ListWasmPluginVersions` call, and that the next page of data is to be returned.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The `WasmPlugin` resource whose `WasmPluginVersion`s are listed, specified in the following format: `projects/{project}/locations/global/wasmPlugins/{wasm_plugin}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/wasmPlugins/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/versions", + "response": { + "$ref": "ListWasmPluginVersionsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + } + } + } } } } @@ -2374,6 +2987,98 @@ }, "type": "object" }, + "AuthzExtension": { + "description": "`AuthzExtension` is a resource that allows traffic forwarding to a callout backend service to make an authorization decision.", + "id": "AuthzExtension", + "properties": { + "authority": { + "description": "Required. The `:authority` header in the gRPC request sent from Envoy to the extension service.", + "type": "string" + }, + "createTime": { + "description": "Output only. The timestamp when the resource was created.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "description": { + "description": "Optional. A human-readable description of the resource.", + "type": "string" + }, + "failOpen": { + "description": "Optional. Determines how the proxy behaves if the call to the extension fails or times out. When set to `TRUE`, request or response processing continues without error. Any subsequent extensions in the extension chain are also executed. When set to `FALSE` or the default setting of `FALSE` is used, one of the following happens: * If response headers have not been delivered to the downstream client, a generic 500 error is returned to the client. The error response can be tailored by configuring a custom error response in the load balancer. * If response headers have been delivered, then the HTTP stream to the downstream client is reset.", + "type": "boolean" + }, + "forwardHeaders": { + "description": "Optional. List of the HTTP headers to forward to the extension (from the client). If omitted, all headers are sent. Each element is a string indicating the header name.", + "items": { + "type": "string" + }, + "type": "array" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. Set of labels associated with the `AuthzExtension` resource. The format must comply with [the requirements for labels](/compute/docs/labeling-resources#requirements) for Google Cloud resources.", + "type": "object" + }, + "loadBalancingScheme": { + "description": "Required. All backend services and forwarding rules referenced by this extension must share the same load balancing scheme. Supported values: `INTERNAL_MANAGED`, `EXTERNAL_MANAGED`. For more information, refer to [Backend services overview](https://cloud.google.com/load-balancing/docs/backend-service).", + "enum": [ + "LOAD_BALANCING_SCHEME_UNSPECIFIED", + "INTERNAL_MANAGED", + "EXTERNAL_MANAGED" + ], + "enumDescriptions": [ + "Default value. Do not use.", + "Signifies that this is used for Internal HTTP(S) Load Balancing.", + "Signifies that this is used for External Managed HTTP(S) Load Balancing." + ], + "type": "string" + }, + "metadata": { + "additionalProperties": { + "description": "Properties of the object.", + "type": "any" + }, + "description": "Optional. The metadata provided here is included as part of the `metadata_context` (of type `google.protobuf.Struct`) in the `ProcessingRequest` message sent to the extension server. The metadata is available under the namespace `com.google.authz_extension.`. The following variables are supported in the metadata Struct: `{forwarding_rule_id}` - substituted with the forwarding rule's fully qualified resource name.", + "type": "object" + }, + "name": { + "description": "Required. Identifier. Name of the `AuthzExtension` resource in the following format: `projects/{project}/locations/{location}/authzExtensions/{authz_extension}`.", + "type": "string" + }, + "service": { + "description": "Required. The reference to the service that runs the extension. To configure a callout extension, `service` must be a fully-qualified reference to a [backend service](https://cloud.google.com/compute/docs/reference/rest/v1/backendServices) in the format: `https://www.googleapis.com/compute/v1/projects/{project}/regions/{region}/backendServices/{backendService}` or `https://www.googleapis.com/compute/v1/projects/{project}/global/backendServices/{backendService}`.", + "type": "string" + }, + "timeout": { + "description": "Required. Specifies the timeout for each individual message on the stream. The timeout must be between 10-10000 milliseconds.", + "format": "google-duration", + "type": "string" + }, + "updateTime": { + "description": "Output only. The timestamp when the resource was updated.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "wireFormat": { + "description": "Optional. The format of communication supported by the callout extension. If not specified, the default is `EXT_PROC_GRPC`.", + "enum": [ + "WIRE_FORMAT_UNSPECIFIED", + "EXT_PROC_GRPC" + ], + "enumDescriptions": [ + "Not specified.", + "The extension service uses ExtProc GRPC API over a gRPC stream. This is the default value if the wire format is not specified. The backend service for the extension must use HTTP2 or H2C as the protocol. All `supported_events` for a client request will be sent as part of the same gRPC stream." + ], + "type": "string" + } + }, + "type": "object" + }, "Binding": { "description": "Associates `members`, or principals, with a `role`.", "id": "Binding", @@ -2580,7 +3285,7 @@ "id": "ExtensionChainExtension", "properties": { "authority": { - "description": "Optional. The `:authority` header in the gRPC request sent from Envoy to the extension service. Required for Callout extensions.", + "description": "Optional. The `:authority` header in the gRPC request sent from Envoy to the extension service. Required for Callout extensions. This field is not supported for plugin extensions. Setting it results in a validation error.", "type": "string" }, "failOpen": { @@ -2592,18 +3297,26 @@ "items": { "type": "string" }, - "type": "array" + "type": "array" + }, + "metadata": { + "additionalProperties": { + "description": "Properties of the object.", + "type": "any" + }, + "description": "Optional. The metadata provided here is included as part of the `metadata_context` (of type `google.protobuf.Struct`) in the `ProcessingRequest` message sent to the extension server. The metadata is available under the namespace `com.google....`. For example: `com.google.lb_traffic_extension.lbtrafficextension1.chain1.ext1`. The following variables are supported in the metadata: `{forwarding_rule_id}` - substituted with the forwarding rule's fully qualified resource name. This field is not supported for plugin extensions. Setting it results in a validation error.", + "type": "object" }, "name": { "description": "Required. The name for this extension. The name is logged as part of the HTTP request logs. The name must conform with RFC-1034, is restricted to lower-cased letters, numbers and hyphens, and can have a maximum length of 63 characters. Additionally, the first character must be a letter and the last a letter or a number.", "type": "string" }, "service": { - "description": "Required. The reference to the service that runs the extension. Currently only callout extensions are supported here. To configure a callout extension, `service` must be a fully-qualified reference to a [backend service](https://cloud.google.com/compute/docs/reference/rest/v1/backendServices) in the format: `https://www.googleapis.com/compute/v1/projects/{project}/regions/{region}/backendServices/{backendService}` or `https://www.googleapis.com/compute/v1/projects/{project}/global/backendServices/{backendService}`.", + "description": "Required. The reference to the service that runs the extension. To configure a callout extension, `service` must be a fully-qualified reference to a [backend service](https://cloud.google.com/compute/docs/reference/rest/v1/backendServices) in the format: `https://www.googleapis.com/compute/v1/projects/{project}/regions/{region}/backendServices/{backendService}` or `https://www.googleapis.com/compute/v1/projects/{project}/global/backendServices/{backendService}`. To configure a plugin extension, `service` must be a reference to a [`WasmPlugin` resource](https://cloud.google.com/service-extensions/docs/reference/rest/v1beta1/projects.locations.wasmPlugins) in the format: `projects/{project}/locations/{location}/wasmPlugins/{plugin}` or `//networkservices.googleapis.com/projects/{project}/locations/{location}/wasmPlugins/{wasmPlugin}`. Plugin extensions are currently supported for the `LbTrafficExtension` and the `LbRouteExtension` resources.", "type": "string" }, "supportedEvents": { - "description": "Optional. A set of events during request or response processing for which this extension is called. This field is required for the `LbTrafficExtension` resource. It must not be set for the `LbRouteExtension` resource.", + "description": "Optional. A set of events during request or response processing for which this extension is called. This field is required for the `LbTrafficExtension` resource. It must not be set for the `LbRouteExtension` resource, otherwise a validation error is returned.", "items": { "enum": [ "EVENT_TYPE_UNSPECIFIED", @@ -2628,7 +3341,7 @@ "type": "array" }, "timeout": { - "description": "Optional. Specifies the timeout for each individual message on the stream. The timeout must be between 10-1000 milliseconds. Required for Callout extensions.", + "description": "Optional. Specifies the timeout for each individual message on the stream. The timeout must be between `10`-`1000` milliseconds. Required for callout extensions. This field is not supported for plugin extensions. Setting it results in a validation error.", "format": "google-duration", "type": "string" } @@ -2781,6 +3494,39 @@ }, "type": "object" }, + "GatewayRouteView": { + "description": "GatewayRouteView defines view-only resource for Routes to a Gateway", + "id": "GatewayRouteView", + "properties": { + "name": { + "description": "Output only. Identifier. Full path name of the GatewayRouteView resource. Format: projects/{project_number}/locations/{location}/gateways/{gateway_name}/routeViews/{route_view_name}", + "readOnly": true, + "type": "string" + }, + "routeId": { + "description": "Output only. The resource id for the route.", + "readOnly": true, + "type": "string" + }, + "routeLocation": { + "description": "Output only. Location where the route exists.", + "readOnly": true, + "type": "string" + }, + "routeProjectNumber": { + "description": "Output only. Project number where the route exists.", + "format": "int64", + "readOnly": true, + "type": "string" + }, + "routeType": { + "description": "Output only. Type of the route: HttpRoute,GrpcRoute,TcpRoute, or TlsRoute", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, "GrpcRoute": { "description": "GrpcRoute is the resource defining how gRPC traffic routed by a Mesh or Gateway resource is routed.", "id": "GrpcRoute", @@ -3660,7 +4406,7 @@ "type": "array" }, "forwardingRules": { - "description": "Required. A list of references to the forwarding rules to which this service extension is attached to. At least one forwarding rule is required. There can be only one `LbRouteExtension` resource per forwarding rule.", + "description": "Required. A list of references to the forwarding rules to which this service extension is attached. At least one forwarding rule is required. There can be only one `LbRouteExtension` resource per forwarding rule.", "items": { "type": "string" }, @@ -3692,7 +4438,7 @@ "description": "Properties of the object.", "type": "any" }, - "description": "Optional. The metadata provided here is included as part of the `metadata_context` (of type `google.protobuf.Struct`) in the `ProcessingRequest` message sent to the extension server. The metadata is available under the namespace `com.google.lb_route_extension.`. The following variables are supported in the metadata Struct: `{forwarding_rule_id}` - substituted with the forwarding rule's fully qualified resource name.", + "description": "Optional. The metadata provided here is included as part of the `metadata_context` (of type `google.protobuf.Struct`) in the `ProcessingRequest` message sent to the extension server. The metadata is available under the namespace `com.google.lb_route_extension.`. The following variables are supported in the metadata Struct: `{forwarding_rule_id}` - substituted with the forwarding rule's fully qualified resource name. This field is not supported for plugin extensions. Setting it results in a validation error.", "type": "object" }, "name": { @@ -3730,7 +4476,7 @@ "type": "array" }, "forwardingRules": { - "description": "Required. A list of references to the forwarding rules to which this service extension is attached to. At least one forwarding rule is required. There can be only one `LBTrafficExtension` resource per forwarding rule.", + "description": "Optional. A list of references to the forwarding rules to which this service extension is attached. At least one forwarding rule is required. There can be only one `LBTrafficExtension` resource per forwarding rule.", "items": { "type": "string" }, @@ -3762,7 +4508,7 @@ "description": "Properties of the object.", "type": "any" }, - "description": "Optional. The metadata provided here is included in the `ProcessingRequest.metadata_context.filter_metadata` map field. The metadata is available under the key `com.google.lb_traffic_extension.`. The following variables are supported in the metadata: `{forwarding_rule_id}` - substituted with the forwarding rule's fully qualified resource name.", + "description": "Optional. The metadata provided here is included in the `ProcessingRequest.metadata_context.filter_metadata` map field. The metadata is available under the key `com.google.lb_traffic_extension.`. The following variables are supported in the metadata: `{forwarding_rule_id}` - substituted with the forwarding rule's fully qualified resource name. This field is not supported for plugin extensions. Setting it results in a validation error.", "type": "object" }, "name": { @@ -3778,6 +4524,31 @@ }, "type": "object" }, + "ListAuthzExtensionsResponse": { + "description": "Message for response to listing `AuthzExtension` resources.", + "id": "ListAuthzExtensionsResponse", + "properties": { + "authzExtensions": { + "description": "The list of `AuthzExtension` resources.", + "items": { + "$ref": "AuthzExtension" + }, + "type": "array" + }, + "nextPageToken": { + "description": "A token identifying a page of results that the server returns.", + "type": "string" + }, + "unreachable": { + "description": "Locations that could not be reached.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, "ListEndpointPoliciesResponse": { "description": "Response returned by the ListEndpointPolicies method.", "id": "ListEndpointPoliciesResponse", @@ -3796,6 +4567,24 @@ }, "type": "object" }, + "ListGatewayRouteViewsResponse": { + "description": "Response returned by the ListGatewayRouteViews method.", + "id": "ListGatewayRouteViewsResponse", + "properties": { + "gatewayRouteViews": { + "description": "List of GatewayRouteView resources.", + "items": { + "$ref": "GatewayRouteView" + }, + "type": "array" + }, + "nextPageToken": { + "description": "A token, which can be sent as `page_token` to retrieve the next page. If this field is omitted, there are no subsequent pages.", + "type": "string" + } + }, + "type": "object" + }, "ListGatewaysResponse": { "description": "Response returned by the ListGateways method.", "id": "ListGatewaysResponse", @@ -3925,6 +4714,24 @@ }, "type": "object" }, + "ListMeshRouteViewsResponse": { + "description": "Response returned by the ListMeshRouteViews method.", + "id": "ListMeshRouteViewsResponse", + "properties": { + "meshRouteViews": { + "description": "List of MeshRouteView resources.", + "items": { + "$ref": "MeshRouteView" + }, + "type": "array" + }, + "nextPageToken": { + "description": "A token, which can be sent as `page_token` to retrieve the next page. If this field is omitted, there are no subsequent pages.", + "type": "string" + } + }, + "type": "object" + }, "ListMeshesResponse": { "description": "Response returned by the ListMeshes method.", "id": "ListMeshesResponse", @@ -4033,6 +4840,42 @@ }, "type": "object" }, + "ListWasmPluginVersionsResponse": { + "description": "Response returned by the `ListWasmPluginVersions` method.", + "id": "ListWasmPluginVersionsResponse", + "properties": { + "nextPageToken": { + "description": "If there might be more results than those appearing in this response, then `next_page_token` is included. To get the next set of results, call this method again using the value of `next_page_token` as `page_token`.", + "type": "string" + }, + "wasmPluginVersions": { + "description": "List of `WasmPluginVersion` resources.", + "items": { + "$ref": "WasmPluginVersion" + }, + "type": "array" + } + }, + "type": "object" + }, + "ListWasmPluginsResponse": { + "description": "Response returned by the `ListWasmPlugins` method.", + "id": "ListWasmPluginsResponse", + "properties": { + "nextPageToken": { + "description": "If there might be more results than those appearing in this response, then `next_page_token` is included. To get the next set of results, call this method again using the value of `next_page_token` as `page_token`.", + "type": "string" + }, + "wasmPlugins": { + "description": "List of `WasmPlugin` resources.", + "items": { + "$ref": "WasmPlugin" + }, + "type": "array" + } + }, + "type": "object" + }, "Location": { "description": "A resource that represents a Google Cloud location.", "id": "Location", @@ -4068,7 +4911,7 @@ "type": "object" }, "LoggingConfig": { - "description": "The configuration for Platform Telemetry logging for Eventarc Avdvanced resources.", + "description": "The configuration for Platform Telemetry logging for Eventarc Advanced resources.", "id": "LoggingConfig", "properties": { "logSeverity": { @@ -4160,6 +5003,39 @@ }, "type": "object" }, + "MeshRouteView": { + "description": "MeshRouteView defines view-only resource for Routes to a Mesh", + "id": "MeshRouteView", + "properties": { + "name": { + "description": "Output only. Identifier. Full path name of the MeshRouteView resource. Format: projects/{project_number}/locations/{location}/meshes/{mesh_name}/routeViews/{route_view_name}", + "readOnly": true, + "type": "string" + }, + "routeId": { + "description": "Output only. The resource id for the route.", + "readOnly": true, + "type": "string" + }, + "routeLocation": { + "description": "Output only. Location where the route exists.", + "readOnly": true, + "type": "string" + }, + "routeProjectNumber": { + "description": "Output only. Project number where the route exists.", + "format": "int64", + "readOnly": true, + "type": "string" + }, + "routeType": { + "description": "Output only. Type of the route: HttpRoute,GrpcRoute,TcpRoute, or TlsRoute", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, "Operation": { "description": "This resource represents a long-running operation that is the result of a network API call.", "id": "Operation", @@ -4371,7 +5247,7 @@ "type": "string" }, "name": { - "description": "Required. Name of the ServiceLbPolicy resource. It matches pattern `projects/{project}/locations/{location}/serviceLbPolicies/{service_lb_policy_name}`.", + "description": "Identifier. Name of the ServiceLbPolicy resource. It matches pattern `projects/{project}/locations/{location}/serviceLbPolicies/{service_lb_policy_name}`.", "type": "string" }, "updateTime": { @@ -4756,6 +5632,223 @@ } }, "type": "object" + }, + "WasmPlugin": { + "description": "`WasmPlugin` is a resource representing a service executing a customer-provided Wasm module.", + "id": "WasmPlugin", + "properties": { + "createTime": { + "description": "Output only. The timestamp when the resource was created.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "description": { + "description": "Optional. A human-readable description of the resource.", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. Set of labels associated with the `WasmPlugin` resource. The format must comply with [the following requirements](/compute/docs/labeling-resources#requirements).", + "type": "object" + }, + "logConfig": { + "$ref": "WasmPluginLogConfig", + "description": "Optional. Specifies the logging options for the activity performed by this plugin. If logging is enabled, plugin logs are exported to Cloud Logging. Note that the settings relate to the logs generated by using logging statements in your Wasm code." + }, + "mainVersionId": { + "description": "Optional. The ID of the `WasmPluginVersion` resource that is the currently serving one. The version referred to must be a child of this `WasmPlugin` resource.", + "type": "string" + }, + "name": { + "description": "Identifier. Name of the `WasmPlugin` resource in the following format: `projects/{project}/locations/{location}/wasmPlugins/{wasm_plugin}`.", + "type": "string" + }, + "updateTime": { + "description": "Output only. The timestamp when the resource was updated.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "usedBy": { + "description": "Output only. List of all [extensions](https://cloud.google.com/service-extensions/docs/overview) that use this `WasmPlugin` resource.", + "items": { + "$ref": "WasmPluginUsedBy" + }, + "readOnly": true, + "type": "array" + }, + "versions": { + "additionalProperties": { + "$ref": "WasmPluginVersionDetails" + }, + "description": "Optional. All versions of this `WasmPlugin` resource in the key-value format. The key is the resource ID, and the value is the `VersionDetails` object. Lets you create or update a `WasmPlugin` resource and its versions in a single request. When the `main_version_id` field is not empty, it must point to one of the `VersionDetails` objects in the map. If provided in a `PATCH` request, the new versions replace the previous set. Any version omitted from the `versions` field is removed. Because the `WasmPluginVersion` resource is immutable, if a `WasmPluginVersion` resource with the same name already exists and differs, the request fails. Note: In a `GET` request, this field is populated only if the field `GetWasmPluginRequest.view` is set to `WASM_PLUGIN_VIEW_FULL`.", + "type": "object" + } + }, + "type": "object" + }, + "WasmPluginLogConfig": { + "description": "Specifies the logging options for the activity performed by this plugin. If logging is enabled, plugin logs are exported to Cloud Logging.", + "id": "WasmPluginLogConfig", + "properties": { + "enable": { + "description": "Optional. Specifies whether to enable logging for activity by this plugin. Defaults to `false`.", + "type": "boolean" + }, + "minLogLevel": { + "description": "Non-empty default. Specificies the lowest level of the plugin logs that are exported to Cloud Logging. This setting relates to the logs generated by using logging statements in your Wasm code. This field is can be set only if logging is enabled for the plugin. If the field is not provided when logging is enabled, it is set to `INFO` by default.", + "enum": [ + "LOG_LEVEL_UNSPECIFIED", + "TRACE", + "DEBUG", + "INFO", + "WARN", + "ERROR", + "CRITICAL" + ], + "enumDescriptions": [ + "Unspecified value. Defaults to `LogLevel.INFO`.", + "Report logs with TRACE level and above.", + "Report logs with DEBUG level and above.", + "Report logs with INFO level and above.", + "Report logs with WARN level and above.", + "Report logs with ERROR level and above.", + "Report logs with CRITICAL level only." + ], + "type": "string" + }, + "sampleRate": { + "description": "Non-empty default. Configures the sampling rate of activity logs, where `1.0` means all logged activity is reported and `0.0` means no activity is reported. A floating point value between `0.0` and `1.0` indicates that a percentage of log messages is stored. The default value when logging is enabled is `1.0`. The value of the field must be between `0` and `1` (inclusive). This field can be specified only if logging is enabled for this plugin.", + "format": "float", + "type": "number" + } + }, + "type": "object" + }, + "WasmPluginUsedBy": { + "description": "Defines a resource that uses the `WasmPlugin` resource.", + "id": "WasmPluginUsedBy", + "properties": { + "name": { + "description": "Output only. Full name of the resource https://google.aip.dev/122#full-resource-names, for example `//networkservices.googleapis.com/projects/{project}/locations/{location}/lbRouteExtensions/{extension}`", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "WasmPluginVersion": { + "description": "A single immutable version of a `WasmPlugin` resource. Defines the Wasm module used and optionally its runtime config.", + "id": "WasmPluginVersion", + "properties": { + "createTime": { + "description": "Output only. The timestamp when the resource was created.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "description": { + "description": "Optional. A human-readable description of the resource.", + "type": "string" + }, + "imageDigest": { + "description": "Output only. The resolved digest for the image specified in the `image` field. The digest is resolved during the creation of `WasmPluginVersion` resource. This field holds the digest value, regardless of whether a tag or digest was originally specified in the `image` field.", + "readOnly": true, + "type": "string" + }, + "imageUri": { + "description": "Optional. URI of the container image containing the plugin, stored in the Artifact Registry. When a new `WasmPluginVersion` resource is created, the digest of the container image is saved in the `image_digest` field. When downloading an image, the digest value is used instead of an image tag.", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. Set of labels associated with the `WasmPluginVersion` resource.", + "type": "object" + }, + "name": { + "description": "Identifier. Name of the `WasmPluginVersion` resource in the following format: `projects/{project}/locations/{location}/wasmPlugins/{wasm_plugin}/ versions/{wasm_plugin_version}`.", + "type": "string" + }, + "pluginConfigData": { + "description": "Configuration for the plugin. The configuration is provided to the plugin at runtime through the `ON_CONFIGURE` callback. When a new `WasmPluginVersion` resource is created, the digest of the contents is saved in the `plugin_config_digest` field.", + "format": "byte", + "type": "string" + }, + "pluginConfigDigest": { + "description": "Output only. This field holds the digest (usually checksum) value for the plugin configuration. The value is calculated based on the contents of `plugin_config_data` or the container image defined by the `plugin_config_uri` field.", + "readOnly": true, + "type": "string" + }, + "pluginConfigUri": { + "description": "URI of the plugin configuration stored in the Artifact Registry. The configuration is provided to the plugin at runtime through the `ON_CONFIGURE` callback. The container image must contain only a single file with the name `plugin.config`. When a new `WasmPluginVersion` resource is created, the digest of the container image is saved in the `plugin_config_digest` field.", + "type": "string" + }, + "updateTime": { + "description": "Output only. The timestamp when the resource was updated.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "WasmPluginVersionDetails": { + "description": "Details of a `WasmPluginVersion` resource to be inlined in the `WasmPlugin` resource.", + "id": "WasmPluginVersionDetails", + "properties": { + "createTime": { + "description": "Output only. The timestamp when the resource was created.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "description": { + "description": "Optional. A human-readable description of the resource.", + "type": "string" + }, + "imageDigest": { + "description": "Output only. The resolved digest for the image specified in `image`. The digest is resolved during the creation of a `WasmPluginVersion` resource. This field holds the digest value regardless of whether a tag or digest was originally specified in the `image` field.", + "readOnly": true, + "type": "string" + }, + "imageUri": { + "description": "Optional. URI of the container image containing the Wasm module, stored in the Artifact Registry. The container image must contain only a single file with the name `plugin.wasm`. When a new `WasmPluginVersion` resource is created, the URI gets resolved to an image digest and saved in the `image_digest` field.", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. Set of labels associated with the `WasmPluginVersion` resource.", + "type": "object" + }, + "pluginConfigData": { + "description": "Configuration for the plugin. The configuration is provided to the plugin at runtime through the `ON_CONFIGURE` callback. When a new `WasmPluginVersion` version is created, the digest of the contents is saved in the `plugin_config_digest` field.", + "format": "byte", + "type": "string" + }, + "pluginConfigDigest": { + "description": "Output only. This field holds the digest (usually checksum) value for the plugin configuration. The value is calculated based on the contents of the `plugin_config_data` field or the container image defined by the `plugin_config_uri` field.", + "readOnly": true, + "type": "string" + }, + "pluginConfigUri": { + "description": "URI of the plugin configuration stored in the Artifact Registry. The configuration is provided to the plugin at runtime through the `ON_CONFIGURE` callback. The container image must contain only a single file with the name `plugin.config`. When a new `WasmPluginVersion` resource is created, the digest of the container image is saved in the `plugin_config_digest` field.", + "type": "string" + }, + "updateTime": { + "description": "Output only. The timestamp when the resource was updated.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + } + }, + "type": "object" } } } diff --git a/discovery/googleapis/notebooks__v1.json b/discovery/googleapis/notebooks__v1.json index e42582ccd..c66d7cca7 100644 --- a/discovery/googleapis/notebooks__v1.json +++ b/discovery/googleapis/notebooks__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240918", + "revision": "20241204", "rootUrl": "https://notebooks.googleapis.com/", "servicePath": "", "title": "Notebooks API", @@ -1205,7 +1205,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "notebooks.projects.locations.operations.cancel", @@ -2134,7 +2134,7 @@ "description": "Required. Defines flags that are used to run the diagnostic tool" }, "timeoutMinutes": { - "description": "Optional. Maxmium amount of time in minutes before the operation times out.", + "description": "Optional. Maximum amount of time in minutes before the operation times out.", "format": "int32", "type": "integer" } @@ -2150,7 +2150,7 @@ "description": "Required. Defines flags that are used to run the diagnostic tool" }, "timeoutMinutes": { - "description": "Optional. Maxmium amount of time in minutes before the operation times out.", + "description": "Optional. Maximum amount of time in minutes before the operation times out.", "format": "int32", "type": "integer" } @@ -2708,7 +2708,7 @@ "type": "object" }, "machineType": { - "description": "Required. The [Compute Engine machine type](https://cloud.google.com/compute/docs/machine-types) of this instance.", + "description": "Required. The [Compute Engine machine type](https://cloud.google.com/compute/docs/machine-resource) of this instance.", "type": "string" }, "metadata": { @@ -3362,7 +3362,7 @@ "type": "string" }, "requestedCancellation": { - "description": "Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "type": "boolean" }, "statusMessage": { @@ -3970,9 +3970,9 @@ "enumDescriptions": [ "Unspecified state.", "The job is executing normally.", - "The job is paused by the user. It will not execute. A user can intentionally pause the job using PauseJobRequest.", + "The job is paused by the user. It will not execute. A user can intentionally pause the job using [Cloud Scheduler](https://cloud.google.com/scheduler/docs/creating#pause).", "The job is disabled by the system due to error. The user cannot directly set a job to be disabled.", - "The job state resulting from a failed CloudScheduler.UpdateJob operation. To recover a job from this state, retry CloudScheduler.UpdateJob until a successful response is received.", + "The job state resulting from a failed [CloudScheduler.UpdateJob](https://cloud.google.com/scheduler/docs/creating#edit) operation. To recover a job from this state, retry [CloudScheduler.UpdateJob](https://cloud.google.com/scheduler/docs/creating#edit) until a successful response is received.", "The schedule resource is being created.", "The schedule resource is being deleted." ], @@ -4111,7 +4111,7 @@ "id": "SetInstanceMachineTypeRequest", "properties": { "machineType": { - "description": "Required. The [Compute Engine machine type](https://cloud.google.com/compute/docs/machine-types).", + "description": "Required. The [Compute Engine machine type](https://cloud.google.com/compute/docs/machine-resource).", "type": "string" } }, diff --git a/discovery/googleapis/notebooks__v2.json b/discovery/googleapis/notebooks__v2.json index 6c6426f47..a3db7929e 100644 --- a/discovery/googleapis/notebooks__v2.json +++ b/discovery/googleapis/notebooks__v2.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240918", + "revision": "20241204", "rootUrl": "https://notebooks.googleapis.com/", "servicePath": "", "title": "Notebooks API", @@ -783,7 +783,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v2/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "notebooks.projects.locations.operations.cancel", @@ -1170,7 +1170,7 @@ "description": "Required. Defines flags that are used to run the diagnostic tool" }, "timeoutMinutes": { - "description": "Optional. Maxmium amount of time in minutes before the operation times out.", + "description": "Optional. Maximum amount of time in minutes before the operation times out.", "format": "int32", "type": "integer" } @@ -1408,6 +1408,10 @@ "description": "Optional. If true, the notebook instance will not register with the proxy.", "type": "boolean" }, + "enableThirdPartyIdentity": { + "description": "Optional. Flag that specifies that a notebook can be accessed with third party identity provider.", + "type": "boolean" + }, "gceSetup": { "$ref": "GceSetup", "description": "Optional. Compute Engine setup for the notebook. Uses notebook-defined fields." @@ -1720,7 +1724,7 @@ "type": "string" }, "requestedCancellation": { - "description": "Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "type": "boolean" }, "statusMessage": { diff --git a/discovery/googleapis/ondemandscanning__v1.json b/discovery/googleapis/ondemandscanning__v1.json index 535ef2fa4..92644daa8 100644 --- a/discovery/googleapis/ondemandscanning__v1.json +++ b/discovery/googleapis/ondemandscanning__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240715", + "revision": "20241209", "rootUrl": "https://ondemandscanning.googleapis.com/", "servicePath": "", "title": "On-Demand Scanning API", @@ -116,7 +116,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "ondemandscanning.projects.locations.operations.cancel", diff --git a/discovery/googleapis/oracledatabase__v1.json b/discovery/googleapis/oracledatabase__v1.json index 849f2a72d..6ab3b4137 100644 --- a/discovery/googleapis/oracledatabase__v1.json +++ b/discovery/googleapis/oracledatabase__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241009", + "revision": "20241210", "rootUrl": "https://oracledatabase.googleapis.com/", "servicePath": "", "title": "Oracle Database@Google Cloud API", @@ -998,7 +998,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "oracledatabase.projects.locations.operations.cancel", @@ -2636,8 +2636,7 @@ "type": "integer" }, "systemVersion": { - "description": "Output only. Operating system version of the image.", - "readOnly": true, + "description": "Optional. Operating system version of the image.", "type": "string" }, "timeZone": { @@ -3070,13 +3069,15 @@ "STATE_UNSPECIFIED", "ACCOUNT_NOT_LINKED", "ACCOUNT_NOT_ACTIVE", - "ACTIVE" + "ACTIVE", + "ACCOUNT_SUSPENDED" ], "enumDescriptions": [ "Default unspecified value.", "Account not linked.", "Account is linked but not active.", - "Entitlement and Account are active." + "Entitlement and Account are active.", + "Account is suspended." ], "readOnly": true, "type": "string" @@ -3751,11 +3752,11 @@ "id": "TimeZone", "properties": { "id": { - "description": "IANA Time Zone Database time zone, e.g. \"America/New_York\".", + "description": "IANA Time Zone Database time zone. For example \"America/New_York\".", "type": "string" }, "version": { - "description": "Optional. IANA Time Zone Database version number, e.g. \"2019a\".", + "description": "Optional. IANA Time Zone Database version number. For example \"2019a\".", "type": "string" } }, diff --git a/discovery/googleapis/orgpolicy__v2.json b/discovery/googleapis/orgpolicy__v2.json index f360b3355..f29eaa697 100644 --- a/discovery/googleapis/orgpolicy__v2.json +++ b/discovery/googleapis/orgpolicy__v2.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241021", + "revision": "20241129", "rootUrl": "https://orgpolicy.googleapis.com/", "servicePath": "", "title": "Organization Policy API", @@ -991,6 +991,10 @@ "supportsDryRun": { "description": "Shows if dry run is supported for this constraint or not.", "type": "boolean" + }, + "supportsSimulation": { + "description": "Shows if simulation is supported for this constraint or not.", + "type": "boolean" } }, "type": "object" @@ -1352,6 +1356,14 @@ "description": "If `true`, then the policy is enforced. If `false`, then any configuration is acceptable. This field can be set only in policies for boolean constraints.", "type": "boolean" }, + "parameters": { + "additionalProperties": { + "description": "Properties of the object.", + "type": "any" + }, + "description": "Optional. Required for GMCs if parameters defined in constraints. Pass parameter values when policy enforcement is enabled. Ensure that parameter value types match those defined in the constraint definition. For example: { \"allowedLocations\" : [\"us-east1\", \"us-west1\"], \"allowAll\" : true }", + "type": "object" + }, "values": { "$ref": "GoogleCloudOrgpolicyV2PolicySpecPolicyRuleStringValues", "description": "List of values to be used for this policy rule. This field can be set only in policies for list constraints." diff --git a/discovery/googleapis/osconfig__v1.json b/discovery/googleapis/osconfig__v1.json index 96e55601b..b17a0446d 100644 --- a/discovery/googleapis/osconfig__v1.json +++ b/discovery/googleapis/osconfig__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241021", + "revision": "20241201", "rootUrl": "https://osconfig.googleapis.com/", "servicePath": "", "title": "OS Config API", @@ -640,7 +640,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/osPolicyAssignments/{osPolicyAssignmentsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "osconfig.projects.locations.osPolicyAssignments.operations.cancel", @@ -3573,11 +3573,11 @@ "id": "TimeZone", "properties": { "id": { - "description": "IANA Time Zone Database time zone, e.g. \"America/New_York\".", + "description": "IANA Time Zone Database time zone. For example \"America/New_York\".", "type": "string" }, "version": { - "description": "Optional. IANA Time Zone Database version number, e.g. \"2019a\".", + "description": "Optional. IANA Time Zone Database version number. For example \"2019a\".", "type": "string" } }, diff --git a/discovery/googleapis/parallelstore__v1.json b/discovery/googleapis/parallelstore__v1.json new file mode 100644 index 000000000..d33ba13ee --- /dev/null +++ b/discovery/googleapis/parallelstore__v1.json @@ -0,0 +1,973 @@ +{ + "auth": { + "oauth2": { + "scopes": { + "https://www.googleapis.com/auth/cloud-platform": { + "description": "See, edit, configure, and delete your Google Cloud data and see the email address for your Google Account." + } + } + } + }, + "basePath": "", + "baseUrl": "https://parallelstore.googleapis.com/", + "batchPath": "batch", + "canonicalName": "Parallelstore", + "description": "", + "discoveryVersion": "v1", + "documentationLink": "https://cloud.google.com/parallelstore", + "icons": { + "x16": "http://www.google.com/images/icons/product/search-16.gif", + "x32": "http://www.google.com/images/icons/product/search-32.gif" + }, + "id": "parallelstore:v1", + "kind": "discovery#restDescription", + "name": "parallelstore", + "ownerDomain": "google.com", + "ownerName": "Google", + "protocol": "rest", + "revision": "20241204", + "rootUrl": "https://parallelstore.googleapis.com/", + "servicePath": "", + "title": "Parallelstore API", + "version": "v1", + "version_module": true, + "parameters": { + "$.xgafv": { + "description": "V1 error format.", + "enum": [ + "1", + "2" + ], + "enumDescriptions": [ + "v1 error format", + "v2 error format" + ], + "location": "query", + "type": "string" + }, + "access_token": { + "description": "OAuth access token.", + "location": "query", + "type": "string" + }, + "alt": { + "default": "json", + "description": "Data format for response.", + "enum": [ + "json", + "media", + "proto" + ], + "enumDescriptions": [ + "Responses with Content-Type of application/json", + "Media download with context-dependent Content-Type", + "Responses with Content-Type of application/x-protobuf" + ], + "location": "query", + "type": "string" + }, + "callback": { + "description": "JSONP", + "location": "query", + "type": "string" + }, + "fields": { + "description": "Selector specifying which fields to include in a partial response.", + "location": "query", + "type": "string" + }, + "key": { + "description": "API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.", + "location": "query", + "type": "string" + }, + "oauth_token": { + "description": "OAuth 2.0 token for the current user.", + "location": "query", + "type": "string" + }, + "prettyPrint": { + "default": "true", + "description": "Returns response with indentations and line breaks.", + "location": "query", + "type": "boolean" + }, + "quotaUser": { + "description": "Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.", + "location": "query", + "type": "string" + }, + "uploadType": { + "description": "Legacy upload protocol for media (e.g. \"media\", \"multipart\").", + "location": "query", + "type": "string" + }, + "upload_protocol": { + "description": "Upload protocol for media (e.g. \"raw\", \"multipart\").", + "location": "query", + "type": "string" + } + }, + "resources": { + "projects": { + "resources": { + "locations": { + "methods": { + "get": { + "description": "Gets information about a location.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}", + "httpMethod": "GET", + "id": "parallelstore.projects.locations.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Resource name for the location.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "Location" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists information about the supported locations for this service.", + "flatPath": "v1/projects/{projectsId}/locations", + "httpMethod": "GET", + "id": "parallelstore.projects.locations.list", + "parameterOrder": [ + "name" + ], + "parameters": { + "filter": { + "description": "A filter to narrow down results to a preferred subset. The filtering language accepts strings like `\"displayName=tokyo\"`, and is documented in more detail in [AIP-160](https://google.aip.dev/160).", + "location": "query", + "type": "string" + }, + "name": { + "description": "The resource that owns the locations collection, if applicable.", + "location": "path", + "pattern": "^projects/[^/]+$", + "required": true, + "type": "string" + }, + "pageSize": { + "description": "The maximum number of results to return. If not set, the service selects a default.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "A page token received from the `next_page_token` field in the response. Send that page token to receive the subsequent page.", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}/locations", + "response": { + "$ref": "ListLocationsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + }, + "resources": { + "instances": { + "methods": { + "create": { + "description": "Creates a Parallelstore instance in a given project and location.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/instances", + "httpMethod": "POST", + "id": "parallelstore.projects.locations.instances.create", + "parameterOrder": [ + "parent" + ], + "parameters": { + "instanceId": { + "description": "Required. The name of the Parallelstore instance. * Must contain only lowercase letters, numbers, and hyphens. * Must start with a letter. * Must be between 1-63 characters. * Must end with a number or a letter. * Must be unique within the customer project / location", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The instance's project and location, in the format `projects/{project}/locations/{location}`. Locations map to Google Cloud zones; for example, `us-west1-b`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + }, + "requestId": { + "description": "Optional. An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. For example, consider a situation where you make an initial request and t he request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+parent}/instances", + "request": { + "$ref": "Instance" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "delete": { + "description": "Deletes a single instance.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/instances/{instancesId}", + "httpMethod": "DELETE", + "id": "parallelstore.projects.locations.instances.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. Name of the resource", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/instances/[^/]+$", + "required": true, + "type": "string" + }, + "requestId": { + "description": "Optional. An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes after the first request. For example, consider a situation where you make an initial request and t he request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "exportData": { + "description": "Copies data from Parallelstore to Cloud Storage.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/instances/{instancesId}:exportData", + "httpMethod": "POST", + "id": "parallelstore.projects.locations.instances.exportData", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. Name of the resource.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/instances/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:exportData", + "request": { + "$ref": "ExportDataRequest" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Gets details of a single instance.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/instances/{instancesId}", + "httpMethod": "GET", + "id": "parallelstore.projects.locations.instances.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The instance resource name, in the format `projects/{project_id}/locations/{location}/instances/{instance_id}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/instances/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "Instance" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "importData": { + "description": "Copies data from Cloud Storage to Parallelstore.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/instances/{instancesId}:importData", + "httpMethod": "POST", + "id": "parallelstore.projects.locations.instances.importData", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. Name of the resource.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/instances/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:importData", + "request": { + "$ref": "ImportDataRequest" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists all instances in a given project and location.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/instances", + "httpMethod": "GET", + "id": "parallelstore.projects.locations.instances.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "filter": { + "description": "Optional. Filtering results.", + "location": "query", + "type": "string" + }, + "orderBy": { + "description": "Optional. Hint for how to order the results.", + "location": "query", + "type": "string" + }, + "pageSize": { + "description": "Optional. Requested page size. Server may return fewer items than requested. If unspecified, the server will pick an appropriate default.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. A token identifying a page of results the server should return.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The project and location for which to retrieve instance information, in the format `projects/{project_id}/locations/{location}`. To retrieve instance information for all locations, use \"-\" as the value of `{location}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/instances", + "response": { + "$ref": "ListInstancesResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "patch": { + "description": "Updates the parameters of a single instance.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/instances/{instancesId}", + "httpMethod": "PATCH", + "id": "parallelstore.projects.locations.instances.patch", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Identifier. The resource name of the instance, in the format `projects/{project}/locations/{location}/instances/{instance_id}`.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/instances/[^/]+$", + "required": true, + "type": "string" + }, + "requestId": { + "description": "Optional. An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. For example, consider a situation where you make an initial request and t he request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).", + "location": "query", + "type": "string" + }, + "updateMask": { + "description": "Required. Mask of fields to update. Field mask is used to specify the fields to be overwritten in the Instance resource by the update. At least one path must be supplied in this field. The fields specified in the update_mask are relative to the resource, not the full request.", + "format": "google-fieldmask", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}", + "request": { + "$ref": "Instance" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + } + }, + "operations": { + "methods": { + "cancel": { + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", + "httpMethod": "POST", + "id": "parallelstore.projects.locations.operations.cancel", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "The name of the operation resource to be cancelled.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/operations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:cancel", + "request": { + "$ref": "CancelOperationRequest" + }, + "response": { + "$ref": "GoogleProtobufEmpty" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "delete": { + "description": "Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}", + "httpMethod": "DELETE", + "id": "parallelstore.projects.locations.operations.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "The name of the operation resource to be deleted.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/operations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "GoogleProtobufEmpty" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}", + "httpMethod": "GET", + "id": "parallelstore.projects.locations.operations.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "The name of the operation resource.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/operations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations", + "httpMethod": "GET", + "id": "parallelstore.projects.locations.operations.list", + "parameterOrder": [ + "name" + ], + "parameters": { + "filter": { + "description": "The standard list filter.", + "location": "query", + "type": "string" + }, + "name": { + "description": "The name of the operation's parent resource.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + }, + "pageSize": { + "description": "The standard list page size.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "The standard list page token.", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}/operations", + "response": { + "$ref": "ListOperationsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + } + } + } + } + } + } + }, + "schemas": { + "CancelOperationRequest": { + "description": "The request message for Operations.CancelOperation.", + "id": "CancelOperationRequest", + "properties": {}, + "type": "object" + }, + "DestinationGcsBucket": { + "description": "Cloud Storage as the destination of a data transfer.", + "id": "DestinationGcsBucket", + "properties": { + "uri": { + "description": "Required. URI to a Cloud Storage bucket in the format: `gs:///`. The path inside the bucket is optional.", + "type": "string" + } + }, + "type": "object" + }, + "DestinationParallelstore": { + "description": "Parallelstore as the destination of a data transfer.", + "id": "DestinationParallelstore", + "properties": { + "path": { + "description": "Optional. Root directory path to the Paralellstore filesystem, starting with `/`. Defaults to `/` if unset.", + "type": "string" + } + }, + "type": "object" + }, + "ExportDataRequest": { + "description": "Export data from Parallelstore to Cloud Storage.", + "id": "ExportDataRequest", + "properties": { + "destinationGcsBucket": { + "$ref": "DestinationGcsBucket", + "description": "Cloud Storage destination." + }, + "requestId": { + "description": "Optional. An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. For example, consider a situation where you make an initial request and t he request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).", + "type": "string" + }, + "serviceAccount": { + "description": "Optional. User-specified Service Account (SA) credentials to be used when performing the transfer. Use one of the following formats: * `{EMAIL_ADDRESS_OR_UNIQUE_ID}` * `projects/{PROJECT_ID_OR_NUMBER}/serviceAccounts/{EMAIL_ADDRESS_OR_UNIQUE_ID}` * `projects/-/serviceAccounts/{EMAIL_ADDRESS_OR_UNIQUE_ID}` If unspecified, the Parallelstore service agent is used: `service-@gcp-sa-parallelstore.iam.gserviceaccount.com`", + "type": "string" + }, + "sourceParallelstore": { + "$ref": "SourceParallelstore", + "description": "Parallelstore source." + } + }, + "type": "object" + }, + "GoogleProtobufEmpty": { + "description": "A generic empty message that you can re-use to avoid defining duplicated empty messages in your APIs. A typical example is to use it as the request or the response type of an API method. For instance: service Foo { rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); }", + "id": "GoogleProtobufEmpty", + "properties": {}, + "type": "object" + }, + "ImportDataRequest": { + "description": "Import data from Cloud Storage into a Parallelstore instance.", + "id": "ImportDataRequest", + "properties": { + "destinationParallelstore": { + "$ref": "DestinationParallelstore", + "description": "Parallelstore destination." + }, + "requestId": { + "description": "Optional. An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. The server will guarantee that for at least 60 minutes since the first request. For example, consider a situation where you make an initial request and t he request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).", + "type": "string" + }, + "serviceAccount": { + "description": "Optional. User-specified service account credentials to be used when performing the transfer. Use one of the following formats: * `{EMAIL_ADDRESS_OR_UNIQUE_ID}` * `projects/{PROJECT_ID_OR_NUMBER}/serviceAccounts/{EMAIL_ADDRESS_OR_UNIQUE_ID}` * `projects/-/serviceAccounts/{EMAIL_ADDRESS_OR_UNIQUE_ID}` If unspecified, the Parallelstore service agent is used: `service-@gcp-sa-parallelstore.iam.gserviceaccount.com`", + "type": "string" + }, + "sourceGcsBucket": { + "$ref": "SourceGcsBucket", + "description": "The Cloud Storage source bucket and, optionally, path inside the bucket." + } + }, + "type": "object" + }, + "Instance": { + "description": "A Parallelstore instance.", + "id": "Instance", + "properties": { + "accessPoints": { + "description": "Output only. A list of IPv4 addresses used for client side configuration.", + "items": { + "type": "string" + }, + "readOnly": true, + "type": "array" + }, + "capacityGib": { + "description": "Required. Immutable. The instance's storage capacity in Gibibytes (GiB). Allowed values are between 12000 and 100000, in multiples of 4000; e.g., 12000, 16000, 20000, ...", + "format": "int64", + "type": "string" + }, + "createTime": { + "description": "Output only. The time when the instance was created.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "daosVersion": { + "description": "Output only. The version of DAOS software running in the instance.", + "readOnly": true, + "type": "string" + }, + "description": { + "description": "Optional. The description of the instance. 2048 characters or less.", + "type": "string" + }, + "directoryStripeLevel": { + "description": "Optional. Stripe level for directories. Allowed values are: * `DIRECTORY_STRIPE_LEVEL_MIN`: recommended when directories contain a small number of files. * `DIRECTORY_STRIPE_LEVEL_BALANCED`: balances performance for workloads involving a mix of small and large directories. * `DIRECTORY_STRIPE_LEVEL_MAX`: recommended for directories with a large number of files.", + "enum": [ + "DIRECTORY_STRIPE_LEVEL_UNSPECIFIED", + "DIRECTORY_STRIPE_LEVEL_MIN", + "DIRECTORY_STRIPE_LEVEL_BALANCED", + "DIRECTORY_STRIPE_LEVEL_MAX" + ], + "enumDescriptions": [ + "If not set, DirectoryStripeLevel will default to DIRECTORY_STRIPE_LEVEL_MAX", + "Minimum directory striping", + "Medium directory striping", + "Maximum directory striping" + ], + "type": "string" + }, + "effectiveReservedIpRange": { + "description": "Output only. Immutable. The ID of the IP address range being used by the instance's VPC network. This field is populated by the service and contains the value currently used by the service.", + "readOnly": true, + "type": "string" + }, + "fileStripeLevel": { + "description": "Optional. Stripe level for files. Allowed values are: * `FILE_STRIPE_LEVEL_MIN`: offers the best performance for small size files. * `FILE_STRIPE_LEVEL_BALANCED`: balances performance for workloads involving a mix of small and large files. * `FILE_STRIPE_LEVEL_MAX`: higher throughput performance for larger files.", + "enum": [ + "FILE_STRIPE_LEVEL_UNSPECIFIED", + "FILE_STRIPE_LEVEL_MIN", + "FILE_STRIPE_LEVEL_BALANCED", + "FILE_STRIPE_LEVEL_MAX" + ], + "enumDescriptions": [ + "If not set, FileStripeLevel will default to FILE_STRIPE_LEVEL_BALANCED", + "Minimum file striping", + "Medium file striping", + "Maximum file striping" + ], + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. Cloud Labels are a flexible and lightweight mechanism for organizing cloud resources into groups that reflect a customer's organizational needs and deployment strategies. See https://cloud.google.com/resource-manager/docs/labels-overview for details.", + "type": "object" + }, + "name": { + "description": "Identifier. The resource name of the instance, in the format `projects/{project}/locations/{location}/instances/{instance_id}`.", + "type": "string" + }, + "network": { + "description": "Optional. Immutable. The name of the Compute Engine [VPC network](https://cloud.google.com/vpc/docs/vpc) to which the instance is connected.", + "type": "string" + }, + "reservedIpRange": { + "description": "Optional. Immutable. The ID of the IP address range being used by the instance's VPC network. See [Configure a VPC network](https://cloud.google.com/parallelstore/docs/vpc#create_and_configure_the_vpc). If no ID is provided, all ranges are considered.", + "type": "string" + }, + "state": { + "description": "Output only. The instance state.", + "enum": [ + "STATE_UNSPECIFIED", + "CREATING", + "ACTIVE", + "DELETING", + "FAILED", + "UPGRADING" + ], + "enumDescriptions": [ + "Not set.", + "The instance is being created.", + "The instance is available for use.", + "The instance is being deleted.", + "The instance is not usable.", + "The instance is being upgraded." + ], + "readOnly": true, + "type": "string" + }, + "updateTime": { + "description": "Output only. The time when the instance was updated.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "ListInstancesResponse": { + "description": "Response from ListInstances.", + "id": "ListInstancesResponse", + "properties": { + "instances": { + "description": "The list of Parallelstore instances.", + "items": { + "$ref": "Instance" + }, + "type": "array" + }, + "nextPageToken": { + "description": "A token identifying a page of results the server should return.", + "type": "string" + }, + "unreachable": { + "description": "Locations that could not be reached.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "ListLocationsResponse": { + "description": "The response message for Locations.ListLocations.", + "id": "ListLocationsResponse", + "properties": { + "locations": { + "description": "A list of locations that matches the specified filter in the request.", + "items": { + "$ref": "Location" + }, + "type": "array" + }, + "nextPageToken": { + "description": "The standard List next-page token.", + "type": "string" + } + }, + "type": "object" + }, + "ListOperationsResponse": { + "description": "The response message for Operations.ListOperations.", + "id": "ListOperationsResponse", + "properties": { + "nextPageToken": { + "description": "The standard List next-page token.", + "type": "string" + }, + "operations": { + "description": "A list of operations that matches the specified filter in the request.", + "items": { + "$ref": "Operation" + }, + "type": "array" + } + }, + "type": "object" + }, + "Location": { + "description": "A resource that represents a Google Cloud location.", + "id": "Location", + "properties": { + "displayName": { + "description": "The friendly name for this location, typically a nearby city name. For example, \"Tokyo\".", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Cross-service attributes for the location. For example {\"cloud.googleapis.com/region\": \"us-east1\"}", + "type": "object" + }, + "locationId": { + "description": "The canonical id for this location. For example: `\"us-east1\"`.", + "type": "string" + }, + "metadata": { + "additionalProperties": { + "description": "Properties of the object. Contains field @type with type URL.", + "type": "any" + }, + "description": "Service-specific metadata. For example the available capacity at the given location.", + "type": "object" + }, + "name": { + "description": "Resource name for the location, which may vary between implementations. For example: `\"projects/example-project/locations/us-east1\"`", + "type": "string" + } + }, + "type": "object" + }, + "Operation": { + "description": "This resource represents a long-running operation that is the result of a network API call.", + "id": "Operation", + "properties": { + "done": { + "description": "If the value is `false`, it means the operation is still in progress. If `true`, the operation is completed, and either `error` or `response` is available.", + "type": "boolean" + }, + "error": { + "$ref": "Status", + "description": "The error result of the operation in case of failure or cancellation." + }, + "metadata": { + "additionalProperties": { + "description": "Properties of the object. Contains field @type with type URL.", + "type": "any" + }, + "description": "Service-specific metadata associated with the operation. It typically contains progress information and common metadata such as create time. Some services might not provide such metadata. Any method that returns a long-running operation should document the metadata type, if any.", + "type": "object" + }, + "name": { + "description": "The server-assigned name, which is only unique within the same service that originally returns it. If you use the default HTTP mapping, the `name` should be a resource name ending with `operations/{unique_id}`.", + "type": "string" + }, + "response": { + "additionalProperties": { + "description": "Properties of the object. Contains field @type with type URL.", + "type": "any" + }, + "description": "The normal, successful response of the operation. If the original method returns no data on success, such as `Delete`, the response is `google.protobuf.Empty`. If the original method is standard `Get`/`Create`/`Update`, the response should be the resource. For other methods, the response should have the type `XxxResponse`, where `Xxx` is the original method name. For example, if the original method name is `TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.", + "type": "object" + } + }, + "type": "object" + }, + "OperationMetadata": { + "description": "Long-running operation metadata.", + "id": "OperationMetadata", + "properties": { + "apiVersion": { + "description": "Output only. API version used to start the operation.", + "readOnly": true, + "type": "string" + }, + "createTime": { + "description": "Output only. The time the operation was created.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "endTime": { + "description": "Output only. The time the operation finished running.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "requestedCancellation": { + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have been cancelled successfully have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "readOnly": true, + "type": "boolean" + }, + "statusMessage": { + "description": "Output only. Human-readable status of the operation, if any.", + "readOnly": true, + "type": "string" + }, + "target": { + "description": "Output only. Server-defined resource path for the target of the operation.", + "readOnly": true, + "type": "string" + }, + "verb": { + "description": "Output only. Name of the verb executed by the operation.", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "SourceGcsBucket": { + "description": "Cloud Storage as the source of a data transfer.", + "id": "SourceGcsBucket", + "properties": { + "uri": { + "description": "Required. URI to a Cloud Storage bucket in the format: `gs:///`. The path inside the bucket is optional.", + "type": "string" + } + }, + "type": "object" + }, + "SourceParallelstore": { + "description": "Parallelstore as the source of a data transfer.", + "id": "SourceParallelstore", + "properties": { + "path": { + "description": "Optional. Root directory path to the Paralellstore filesystem, starting with `/`. Defaults to `/` if unset.", + "type": "string" + } + }, + "type": "object" + }, + "Status": { + "description": "The `Status` type defines a logical error model that is suitable for different programming environments, including REST APIs and RPC APIs. It is used by [gRPC](https://github.com/grpc). Each `Status` message contains three pieces of data: error code, error message, and error details. You can find out more about this error model and how to work with it in the [API Design Guide](https://cloud.google.com/apis/design/errors).", + "id": "Status", + "properties": { + "code": { + "description": "The status code, which should be an enum value of google.rpc.Code.", + "format": "int32", + "type": "integer" + }, + "details": { + "description": "A list of messages that carry the error details. There is a common set of message types for APIs to use.", + "items": { + "additionalProperties": { + "description": "Properties of the object. Contains field @type with type URL.", + "type": "any" + }, + "type": "object" + }, + "type": "array" + }, + "message": { + "description": "A developer-facing error message, which should be in English. Any user-facing error message should be localized and sent in the google.rpc.Status.details field, or localized by the client.", + "type": "string" + } + }, + "type": "object" + } + } +} diff --git a/discovery/googleapis/paymentsresellersubscription__v1.json b/discovery/googleapis/paymentsresellersubscription__v1.json index 4f7568c81..78cb49308 100644 --- a/discovery/googleapis/paymentsresellersubscription__v1.json +++ b/discovery/googleapis/paymentsresellersubscription__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240922", + "revision": "20241211", "rootUrl": "https://paymentsresellersubscription.googleapis.com/", "servicePath": "", "title": "Payments Reseller Subscription API", @@ -435,6 +435,38 @@ ] } } + }, + "userSessions": { + "methods": { + "generate": { + "description": "This API replaces user authorized OAuth consent based APIs (Create, Entitle). Generates a short-lived token for a user session based on the user intent. You can use the session token to redirect the user to Google to finish the signup flow. You can re-generate new session token repeatedly for the same request if necessary, regardless of the previous tokens being expired or not.", + "flatPath": "v1/partners/{partnersId}/userSessions:generate", + "httpMethod": "POST", + "id": "paymentsresellersubscription.partners.userSessions.generate", + "parameterOrder": [ + "parent" + ], + "parameters": { + "parent": { + "description": "Required. The parent, the partner that can resell. Format: partners/{partner}", + "location": "path", + "pattern": "^partners/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/userSessions:generate", + "request": { + "$ref": "GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest" + }, + "response": { + "$ref": "GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse" + }, + "scopes": [ + "openid" + ] + } + } } } } @@ -508,6 +540,25 @@ }, "type": "object" }, + "GoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent": { + "description": "Intent message for creating a Subscription resource.", + "id": "GoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent", + "properties": { + "parent": { + "description": "Required. The parent resource name, which is the identifier of the partner.", + "type": "string" + }, + "subscription": { + "$ref": "GoogleCloudPaymentsResellerSubscriptionV1Subscription", + "description": "Required. The Subscription to be created." + }, + "subscriptionId": { + "description": "Required. Identifies the subscription resource on the Partner side. The value is restricted to 63 ASCII characters at the maximum. If a subscription was previously created with the same subscription_id, we will directly return that one.", + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudPaymentsResellerSubscriptionV1Duration": { "description": "Describes the length of a period of a time.", "id": "GoogleCloudPaymentsResellerSubscriptionV1Duration", @@ -536,8 +587,19 @@ }, "type": "object" }, + "GoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent": { + "description": "Intent for entitling the previously provisioned subscription to an end user.", + "id": "GoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent", + "properties": { + "name": { + "description": "Required. The name of the subscription resource that is entitled to the current end user.", + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionRequest": { - "description": "LINT.IfChange Partner request for entitling the previously provisioned subscription to an end user. The end user identity is inferred from the request OAuth context.", + "description": "Partner request for entitling the previously provisioned subscription to an end user. The end user identity is inferred from the request OAuth context.", "id": "GoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionRequest", "properties": { "lineItemEntitlementDetails": { @@ -683,6 +745,28 @@ }, "type": "object" }, + "GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest": { + "description": "[Preview only] Request to generate a user session.", + "id": "GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest", + "properties": { + "intentPayload": { + "$ref": "GoogleCloudPaymentsResellerSubscriptionV1IntentPayload", + "description": "The user intent to generate the user session." + } + }, + "type": "object" + }, + "GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse": { + "description": "[Preview only] Response that contains the details for generated user session.", + "id": "GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse", + "properties": { + "userSession": { + "$ref": "GoogleCloudPaymentsResellerSubscriptionV1UserSession", + "description": "The generated user session. The token size is proportional to the size of the intent payload." + } + }, + "type": "object" + }, "GoogleCloudPaymentsResellerSubscriptionV1GoogleOnePayload": { "description": "Payload specific to Google One products.", "id": "GoogleCloudPaymentsResellerSubscriptionV1GoogleOnePayload", @@ -737,6 +821,21 @@ }, "type": "object" }, + "GoogleCloudPaymentsResellerSubscriptionV1IntentPayload": { + "description": "The payload that describes the user intent.", + "id": "GoogleCloudPaymentsResellerSubscriptionV1IntentPayload", + "properties": { + "createIntent": { + "$ref": "GoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent", + "description": "The request to create a subscription." + }, + "entitleIntent": { + "$ref": "GoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent", + "description": "The request to entitle a subscription." + } + }, + "type": "object" + }, "GoogleCloudPaymentsResellerSubscriptionV1ListProductsResponse": { "description": "Response that contains the products.", "id": "GoogleCloudPaymentsResellerSubscriptionV1ListProductsResponse", @@ -1064,6 +1163,11 @@ }, "type": "array" }, + "migrationDetails": { + "$ref": "GoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails", + "description": "Output only. Describes the details of the migrated subscription. Only populated if this subscription is migrated from another system.", + "readOnly": true + }, "name": { "description": "Identifier. Resource name of the subscription. It will have the format of \"partners/{partner_id}/subscriptions/{subscription_id}\". This is available for authorizeAddon, but otherwise is response only.", "type": "string" @@ -1330,6 +1434,18 @@ }, "type": "object" }, + "GoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails": { + "description": "Describes the details of the migrated subscription.", + "id": "GoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails", + "properties": { + "migratedSubscriptionId": { + "description": "Output only. The migrated subscription id in the legacy system.", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudPaymentsResellerSubscriptionV1SubscriptionPromotionSpec": { "description": "Describes the spec for one promotion.", "id": "GoogleCloudPaymentsResellerSubscriptionV1SubscriptionPromotionSpec", @@ -1408,6 +1524,24 @@ }, "type": "object" }, + "GoogleCloudPaymentsResellerSubscriptionV1UserSession": { + "description": "A user session contains a short-lived token that includes information required to interact with Google Payments Reseller Platform using the following web endpoints. - A user session token should be generated dynamically for an authenticated user. You should refrain from sharing a token directly with a user in an unauthenticated context, such as SMS, or email. - You can re-generate new session tokens repeatedly for same `generate` request if necessary, regardless of the previous tokens being expired or not. You don't need to worry about multiple sessions resulting in duplicate fulfillments as guaranteed by the same subscription id. Please refer to the [Google Managed Signup](/payments/reseller/subscription/reference/index/User.Signup.Integration/Google.Managed.Signup.\\(In.Preview\\)) documentation for additional integration details.", + "id": "GoogleCloudPaymentsResellerSubscriptionV1UserSession", + "properties": { + "expireTime": { + "description": "Output only. The time at which the user session expires.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "token": { + "description": "Output only. The encrypted token of the user session, including the information of the user's intent and request. This token should be provided when redirecting the user to Google.", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudPaymentsResellerSubscriptionV1YoutubePayload": { "description": "Payload specific to Youtube products.", "id": "GoogleCloudPaymentsResellerSubscriptionV1YoutubePayload", diff --git a/discovery/googleapis/places__v1.json b/discovery/googleapis/places__v1.json index e6834e3e6..1bf3032d1 100644 --- a/discovery/googleapis/places__v1.json +++ b/discovery/googleapis/places__v1.json @@ -43,7 +43,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241015", + "revision": "20241117", "rootUrl": "https://places.googleapis.com/", "servicePath": "", "title": "Places API (New)", @@ -395,12 +395,12 @@ "type": "string" }, "straightLineDistanceMeters": { - "description": "The straight line distance in meters between the target location and the landmark.", + "description": "The straight line distance, in meters, between the center point of the target and the center point of the landmark. In some situations, this value can be longer than `travel_distance_meters`.", "format": "float", "type": "number" }, "travelDistanceMeters": { - "description": "The travel distance in meters along the road network if known. This does not take into account the mode of transportation (walking/driving).", + "description": "The travel distance, in meters, along the road network from the target to the landmark, if known. This value does not take into account the mode of transportation, such as walking, driving, or biking.", "format": "float", "type": "number" }, @@ -437,6 +437,10 @@ "description": "Request proto for AutocompletePlaces.", "id": "GoogleMapsPlacesV1AutocompletePlacesRequest", "properties": { + "includePureServiceAreaBusinesses": { + "description": "Optional. Include pure service area businesses if the field is set to true. Pure service area business is a business that visits or delivers to customers directly but does not serve customers at their business address. For example, businesses like cleaning services or plumbers. Those businesses do not have a physical address or location on Google Maps. Places will not return fields including `location`, `plus_code`, and other location related fields for these businesses.", + "type": "boolean" + }, "includeQueryPredictions": { "description": "Optional. If true, the response will include both Place and query predictions. Otherwise the response will only return Place predictions.", "type": "boolean" @@ -959,6 +963,14 @@ }, "type": "array" }, + "flagContentUri": { + "description": "A link where users can flag a problem with the photo.", + "type": "string" + }, + "googleMapsUri": { + "description": "A link to show the photo on Google Maps.", + "type": "string" + }, "heightPx": { "description": "The maximum available height, in pixels.", "format": "int32", @@ -1045,6 +1057,13 @@ ], "type": "string" }, + "containingPlaces": { + "description": "List of places in which the current place is located.", + "items": { + "$ref": "GoogleMapsPlacesV1PlaceContainingPlace" + }, + "type": "array" + }, "curbsidePickup": { "description": "Specifies if the business supports curbside pickup.", "type": "boolean" @@ -1104,6 +1123,10 @@ "description": "Place is suitable for watching sports.", "type": "boolean" }, + "googleMapsLinks": { + "$ref": "GoogleMapsPlacesV1PlaceGoogleMapsLinks", + "description": "Links to trigger different Google Maps actions." + }, "googleMapsUri": { "description": "A URL providing more information about this place.", "type": "string" @@ -1187,6 +1210,10 @@ ], "type": "string" }, + "priceRange": { + "$ref": "GoogleMapsPlacesV1PriceRange", + "description": "The price range associated with a Place." + }, "primaryType": { "description": "The primary type of the given result. This type must one of the Places API supported types. For example, \"restaurant\", \"cafe\", \"airport\", etc. A place can only have a single primary type. For the complete list of possible values, see Table A and Table B at https://developers.google.com/maps/documentation/places/web-service/place-types", "type": "string" @@ -1195,6 +1222,10 @@ "$ref": "GoogleTypeLocalizedText", "description": "The display name of the primary type, localized to the request language if applicable. For the complete list of possible values, see Table A and Table B at https://developers.google.com/maps/documentation/places/web-service/place-types" }, + "pureServiceAreaBusiness": { + "description": "Indicates whether the place is a pure service area business. Pure service area business is a business that visits or delivers to customers directly but does not serve customers at their business address. For example, businesses like cleaning services or plumbers. Those businesses may not have a physical address or location on Google Maps.", + "type": "boolean" + }, "rating": { "description": "A rating between 1.0 and 5.0, based on user reviews of this place.", "format": "double", @@ -1202,7 +1233,7 @@ }, "regularOpeningHours": { "$ref": "GoogleMapsPlacesV1PlaceOpeningHours", - "description": "The regular hours of operation." + "description": "The regular hours of operation. Note that if a place is always open (24 hours), the `close` field will not be set. Clients can rely on always open (24 hours) being represented as an `open` period containing day with value `0`, hour with value `0`, and minute with value `0`." }, "regularSecondaryOpeningHours": { "description": "Contains an array of entries for information about regular secondary hours of a business. Secondary hours are different from a business's main hours. For example, a restaurant can specify drive through hours or delivery hours as its secondary hours. This field populates the type subfield, which draws from a predefined list of opening hours types (such as DRIVE_THROUGH, PICKUP, or TAKEOUT) based on the types of the place.", @@ -1368,6 +1399,10 @@ "$ref": "GoogleMapsPlacesV1ContentBlock" }, "type": "array" + }, + "flagContentUri": { + "description": "A link where users can flag a problem with the summary.", + "type": "string" } }, "type": "object" @@ -1387,6 +1422,21 @@ }, "type": "object" }, + "GoogleMapsPlacesV1PlaceContainingPlace": { + "description": "Info about the place in which this place is located.", + "id": "GoogleMapsPlacesV1PlaceContainingPlace", + "properties": { + "id": { + "description": "The place id of the place in which this place is located.", + "type": "string" + }, + "name": { + "description": "The resource name of the place in which this place is located.", + "type": "string" + } + }, + "type": "object" + }, "GoogleMapsPlacesV1PlaceGenerativeSummary": { "description": "Experimental: See https://developers.google.com/maps/documentation/places/web-service/experimental/places-generative for more details. AI-generated summary of the place.", "id": "GoogleMapsPlacesV1PlaceGenerativeSummary", @@ -1395,10 +1445,18 @@ "$ref": "GoogleTypeLocalizedText", "description": "The detailed description of the place." }, + "descriptionFlagContentUri": { + "description": "A link where users can flag a problem with the description summary.", + "type": "string" + }, "overview": { "$ref": "GoogleTypeLocalizedText", "description": "The overview of the place." }, + "overviewFlagContentUri": { + "description": "A link where users can flag a problem with the overview summary.", + "type": "string" + }, "references": { "$ref": "GoogleMapsPlacesV1References", "description": "References that are used to generate the summary description." @@ -1406,10 +1464,47 @@ }, "type": "object" }, + "GoogleMapsPlacesV1PlaceGoogleMapsLinks": { + "description": "Links to trigger different Google Maps actions.", + "id": "GoogleMapsPlacesV1PlaceGoogleMapsLinks", + "properties": { + "directionsUri": { + "description": "A link to show the directions to the place. The link only populates the destination location and uses the default travel mode `DRIVE`.", + "type": "string" + }, + "photosUri": { + "description": "A link to show photos of this place. This link is currently not supported on Google Maps Mobile and only works on the web version of Google Maps.", + "type": "string" + }, + "placeUri": { + "description": "A link to show this place.", + "type": "string" + }, + "reviewsUri": { + "description": "A link to show reviews of this place. This link is currently not supported on Google Maps Mobile and only works on the web version of Google Maps.", + "type": "string" + }, + "writeAReviewUri": { + "description": "A link to write a review for this place. This link is currently not supported on Google Maps Mobile and only works on the web version of Google Maps.", + "type": "string" + } + }, + "type": "object" + }, "GoogleMapsPlacesV1PlaceOpeningHours": { "description": "Information about business hour of the place.", "id": "GoogleMapsPlacesV1PlaceOpeningHours", "properties": { + "nextCloseTime": { + "description": "The next time the current opening hours period ends up to 7 days in the future. This field is only populated if the opening hours period is active at the time of serving the request.", + "format": "google-datetime", + "type": "string" + }, + "nextOpenTime": { + "description": "The next time the current opening hours period starts up to 7 days in the future. This field is only populated if the opening hours period is not active at the time of serving the request.", + "format": "google-datetime", + "type": "string" + }, "openNow": { "description": "Whether the opening hours period is currently active. For regular opening hours and current opening hours, this field means whether the place is open. For secondary opening hours and current secondary opening hours, this field means whether the secondary hours of this place is active.", "type": "boolean" @@ -1503,12 +1598,12 @@ "type": "integer" }, "hour": { - "description": "The hour in 2 digits. Ranges from 00 to 23.", + "description": "The hour in 24 hour format. Ranges from 0 to 23.", "format": "int32", "type": "integer" }, "minute": { - "description": "The minute in 2 digits. Ranges from 00 to 59.", + "description": "The minute. Ranges from 0 to 59.", "format": "int32", "type": "integer" }, @@ -1629,6 +1724,21 @@ }, "type": "object" }, + "GoogleMapsPlacesV1PriceRange": { + "description": "The price range associated with a Place. `end_price` could be unset, which indicates a range without upper bound (e.g. \"More than $100\").", + "id": "GoogleMapsPlacesV1PriceRange", + "properties": { + "endPrice": { + "$ref": "GoogleTypeMoney", + "description": "The high end of the price range (exclusive). Price should be lower than this amount." + }, + "startPrice": { + "$ref": "GoogleTypeMoney", + "description": "The low end of the price range (inclusive). Price should be at or above this amount." + } + }, + "type": "object" + }, "GoogleMapsPlacesV1References": { "description": "Experimental: See https://developers.google.com/maps/documentation/places/web-service/experimental/places-generative for more details. Reference that the generative content is related to.", "id": "GoogleMapsPlacesV1References", @@ -1658,6 +1768,14 @@ "$ref": "GoogleMapsPlacesV1AuthorAttribution", "description": "This review's author." }, + "flagContentUri": { + "description": "A link where users can flag a problem with the review.", + "type": "string" + }, + "googleMapsUri": { + "description": "A link to show the review on Google Maps.", + "type": "string" + }, "name": { "description": "A reference representing this place review which may be used to look up this place review again (also called the API \"resource\" name: `places/{place_id}/reviews/{review}`).", "type": "string" @@ -1763,6 +1881,10 @@ "description": "The duration and distance from the routing origin to a place in the response, and a second leg from that place to the destination, if requested. **Note:** Adding `routingSummaries` in the field mask without also including either the `routingParameters.origin` parameter or the `searchAlongRouteParameters.polyline.encodedPolyline` parameter in the request causes an error.", "id": "GoogleMapsPlacesV1RoutingSummary", "properties": { + "directionsUri": { + "description": "A link to show directions on Google Maps using the waypoints from the given routing summary. The route generated by this link is not guaranteed to be the same as the route used to generate the routing summary. The link uses information provided in the request, from fields including `routingParameters` and `searchAlongRouteParameters` when applicable, to generate the directions link.", + "type": "string" + }, "legs": { "description": "The legs of the trip. When you calculate travel duration and distance from a set origin, `legs` contains a single leg containing the duration and distance from the origin to the destination. When you do a search along route, `legs` contains two legs: one from the origin to place, and one from the place to the destination.", "items": { @@ -1900,6 +2022,10 @@ "$ref": "GoogleMapsPlacesV1SearchTextRequestEVOptions", "description": "Optional. Set the searchable EV options of a place search request." }, + "includePureServiceAreaBusinesses": { + "description": "Optional. Include pure service area businesses if the field is set to true. Pure service area business is a business that visits or delivers to customers directly but does not serve customers at their business address. For example, businesses like cleaning services or plumbers. Those businesses do not have a physical address or location on Google Maps. Places will not return fields including `location`, `plus_code`, and other location related fields for these businesses.", + "type": "boolean" + }, "includedType": { "description": "The requested place type. Full list of types supported: https://developers.google.com/maps/documentation/places/web-service/place-types. Only support one included type.", "type": "string" @@ -2108,6 +2234,10 @@ "$ref": "GoogleMapsPlacesV1RoutingSummary" }, "type": "array" + }, + "searchUri": { + "description": "A link allows the user to search with the same text query as specified in the request on Google Maps.", + "type": "string" } }, "type": "object" diff --git a/discovery/googleapis/playintegrity__v1.json b/discovery/googleapis/playintegrity__v1.json index e3d74c91e..26589c570 100644 --- a/discovery/googleapis/playintegrity__v1.json +++ b/discovery/googleapis/playintegrity__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240813", + "revision": "20241127", "rootUrl": "https://playintegrity.googleapis.com/", "servicePath": "", "title": "Google Play Integrity API", @@ -368,10 +368,26 @@ }, "type": "object" }, + "DeviceAttributes": { + "description": "Contains information about the device for which the integrity token was generated, e.g. Android SDK version.", + "id": "DeviceAttributes", + "properties": { + "sdkVersion": { + "description": "Android SDK version of the device, as defined in the public Android documentation: https://developer.android.com/reference/android/os/Build.VERSION_CODES. It won't be set if a necessary requirement was missed. For example DeviceIntegrity did not meet the minimum bar.", + "format": "int32", + "type": "integer" + } + }, + "type": "object" + }, "DeviceIntegrity": { "description": "Contains the device attestation information.", "id": "DeviceIntegrity", "properties": { + "deviceAttributes": { + "$ref": "DeviceAttributes", + "description": "Attributes of the device where the integrity token was generated." + }, "deviceRecall": { "$ref": "DeviceRecall", "description": "Details about the device recall bits set by the developer." @@ -397,6 +413,27 @@ }, "type": "array" }, + "legacyDeviceRecognitionVerdict": { + "description": "Contains legacy details about the integrity of the device the app is running on. Only for devices with Android version T or higher and only for apps opted in to the new verdicts. Only available during the transition period to the new verdicts system and will be removed afterwards.", + "items": { + "enum": [ + "UNKNOWN", + "MEETS_BASIC_INTEGRITY", + "MEETS_DEVICE_INTEGRITY", + "MEETS_STRONG_INTEGRITY", + "MEETS_VIRTUAL_INTEGRITY" + ], + "enumDescriptions": [ + "Play does not have sufficient information to evaluate device integrity", + "App is running on a device that passes basic system integrity checks, but may not meet Android platform compatibility requirements and may not be approved to run Google Play services.", + "App is running on GMS Android device with Google Play services.", + "App is running on GMS Android device with Google Play services and has a strong guarantee of system integrity such as a hardware-backed keystore.", + "App is running on an Android emulator with Google Play services which meets core Android compatibility requirements." + ], + "type": "string" + }, + "type": "array" + }, "recentDeviceActivity": { "$ref": "RecentDeviceActivity", "description": "Details about the device activity of the device the app is running on." diff --git a/discovery/googleapis/policysimulator__v1.json b/discovery/googleapis/policysimulator__v1.json index 9c49650d4..0240769c0 100644 --- a/discovery/googleapis/policysimulator__v1.json +++ b/discovery/googleapis/policysimulator__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240617", + "revision": "20241118", "rootUrl": "https://policysimulator.googleapis.com/", "servicePath": "", "title": "Policy Simulator API", @@ -979,6 +979,14 @@ "description": "If `true`, then the policy is enforced. If `false`, then any configuration is acceptable. This field can be set only in policies for boolean constraints.", "type": "boolean" }, + "parameters": { + "additionalProperties": { + "description": "Properties of the object.", + "type": "any" + }, + "description": "Optional. Required for GMCs if parameters defined in constraints. Pass parameter values when policy enforcement is enabled. Ensure that parameter value types match those defined in the constraint definition. For example: { \"allowedLocations\" : [\"us-east1\", \"us-west1\"], \"allowAll\" : true }", + "type": "object" + }, "values": { "$ref": "GoogleCloudOrgpolicyV2PolicySpecPolicyRuleStringValues", "description": "List of values to be used for this policy rule. This field can be set only in policies for list constraints." diff --git a/discovery/googleapis/privateca__v1.json b/discovery/googleapis/privateca__v1.json index c4915460e..810056efb 100644 --- a/discovery/googleapis/privateca__v1.json +++ b/discovery/googleapis/privateca__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241002", + "revision": "20241204", "rootUrl": "https://privateca.googleapis.com/", "servicePath": "", "title": "Certificate Authority API", @@ -396,7 +396,7 @@ ], "parameters": { "name": { - "description": "Output only. Identifier. The resource name for this CaPool in the format `projects/*/locations/*/caPools/*`.", + "description": "Identifier. The resource name for this CaPool in the format `projects/*/locations/*/caPools/*`.", "location": "path", "pattern": "^projects/[^/]+/locations/[^/]+/caPools/[^/]+$", "required": true, @@ -758,7 +758,7 @@ ], "parameters": { "name": { - "description": "Output only. Identifier. The resource name for this CertificateAuthority in the format `projects/*/locations/*/caPools/*/certificateAuthorities/*`.", + "description": "Identifier. The resource name for this CertificateAuthority in the format `projects/*/locations/*/caPools/*/certificateAuthorities/*`.", "location": "path", "pattern": "^projects/[^/]+/locations/[^/]+/caPools/[^/]+/certificateAuthorities/[^/]+$", "required": true, @@ -931,7 +931,7 @@ ], "parameters": { "name": { - "description": "Output only. The resource name for this CertificateRevocationList in the format `projects/*/locations/*/caPools/*certificateAuthorities/*/ certificateRevocationLists/*`.", + "description": "Identifier. The resource name for this CertificateRevocationList in the format `projects/*/locations/*/caPools/*certificateAuthorities/*/ certificateRevocationLists/*`.", "location": "path", "pattern": "^projects/[^/]+/locations/[^/]+/caPools/[^/]+/certificateAuthorities/[^/]+/certificateRevocationLists/[^/]+$", "required": true, @@ -1032,7 +1032,7 @@ ], "parameters": { "certificateId": { - "description": "Optional. It must be unique within a location and match the regular expression `[a-zA-Z0-9_-]{1,63}`. This field is required when using a CertificateAuthority in the Enterprise CertificateAuthority.Tier, but is optional and its value is ignored otherwise.", + "description": "Optional. It must be unique within a location and match the regular expression `[a-zA-Z0-9_-]{1,63}`. This field is required when using a CertificateAuthority in the Enterprise CertificateAuthority.tier, but is optional and its value is ignored otherwise.", "location": "query", "type": "string" }, @@ -1151,7 +1151,7 @@ ], "parameters": { "name": { - "description": "Output only. The resource name for this Certificate in the format `projects/*/locations/*/caPools/*/certificates/*`.", + "description": "Identifier. The resource name for this Certificate in the format `projects/*/locations/*/caPools/*/certificates/*`.", "location": "path", "pattern": "^projects/[^/]+/locations/[^/]+/caPools/[^/]+/certificates/[^/]+$", "required": true, @@ -1394,7 +1394,7 @@ ], "parameters": { "name": { - "description": "Output only. The resource name for this CertificateTemplate in the format `projects/*/locations/*/certificateTemplates/*`.", + "description": "Identifier. The resource name for this CertificateTemplate in the format `projects/*/locations/*/certificateTemplates/*`.", "location": "path", "pattern": "^projects/[^/]+/locations/[^/]+/certificateTemplates/[^/]+$", "required": true, @@ -1484,7 +1484,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "privateca.projects.locations.operations.cancel", @@ -1764,8 +1764,7 @@ "type": "object" }, "name": { - "description": "Output only. Identifier. The resource name for this CaPool in the format `projects/*/locations/*/caPools/*`.", - "readOnly": true, + "description": "Identifier. The resource name for this CaPool in the format `projects/*/locations/*/caPools/*`.", "type": "string" }, "publishingOptions": { @@ -1849,8 +1848,7 @@ "type": "string" }, "name": { - "description": "Output only. The resource name for this Certificate in the format `projects/*/locations/*/caPools/*/certificates/*`.", - "readOnly": true, + "description": "Identifier. The resource name for this Certificate in the format `projects/*/locations/*/caPools/*/certificates/*`.", "type": "string" }, "pemCertificate": { @@ -1958,8 +1956,7 @@ "type": "string" }, "name": { - "description": "Output only. Identifier. The resource name for this CertificateAuthority in the format `projects/*/locations/*/caPools/*/certificateAuthorities/*`.", - "readOnly": true, + "description": "Identifier. The resource name for this CertificateAuthority in the format `projects/*/locations/*/caPools/*/certificateAuthorities/*`.", "type": "string" }, "pemCaCertificates": { @@ -2218,8 +2215,7 @@ "type": "object" }, "name": { - "description": "Output only. The resource name for this CertificateRevocationList in the format `projects/*/locations/*/caPools/*certificateAuthorities/*/ certificateRevocationLists/*`.", - "readOnly": true, + "description": "Identifier. The resource name for this CertificateRevocationList in the format `projects/*/locations/*/caPools/*certificateAuthorities/*/ certificateRevocationLists/*`.", "type": "string" }, "pemCrl": { @@ -2301,8 +2297,7 @@ "type": "string" }, "name": { - "description": "Output only. The resource name for this CertificateTemplate in the format `projects/*/locations/*/certificateTemplates/*`.", - "readOnly": true, + "description": "Identifier. The resource name for this CertificateTemplate in the format `projects/*/locations/*/certificateTemplates/*`.", "type": "string" }, "passthroughExtensions": { @@ -2643,7 +2638,7 @@ "type": "array" }, "nextPageToken": { - "description": "A token to retrieve next page of results. Pass this value in ListCertificateAuthoritiesRequest.next_page_token to retrieve the next page of results.", + "description": "A token to retrieve next page of results. Pass this value in ListCertificateAuthoritiesRequest.page_token to retrieve the next page of results.", "type": "string" }, "unreachable": { @@ -2668,7 +2663,7 @@ "type": "array" }, "nextPageToken": { - "description": "A token to retrieve next page of results. Pass this value in ListCertificateAuthoritiesRequest.next_page_token to retrieve the next page of results.", + "description": "A token to retrieve next page of results. Pass this value in ListCertificateAuthoritiesRequest.page_token to retrieve the next page of results.", "type": "string" }, "unreachable": { @@ -2693,7 +2688,7 @@ "type": "array" }, "nextPageToken": { - "description": "A token to retrieve next page of results. Pass this value in ListCertificateRevocationListsRequest.next_page_token to retrieve the next page of results.", + "description": "A token to retrieve next page of results. Pass this value in ListCertificateRevocationListsRequest.page_token to retrieve the next page of results.", "type": "string" }, "unreachable": { @@ -2718,7 +2713,7 @@ "type": "array" }, "nextPageToken": { - "description": "A token to retrieve next page of results. Pass this value in ListCertificateTemplatesRequest.next_page_token to retrieve the next page of results.", + "description": "A token to retrieve next page of results. Pass this value in ListCertificateTemplatesRequest.page_token to retrieve the next page of results.", "type": "string" }, "unreachable": { @@ -2743,7 +2738,7 @@ "type": "array" }, "nextPageToken": { - "description": "A token to retrieve next page of results. Pass this value in ListCertificatesRequest.next_page_token to retrieve the next page of results.", + "description": "A token to retrieve next page of results. Pass this value in ListCertificatesRequest.page_token to retrieve the next page of results.", "type": "string" }, "unreachable": { @@ -2965,7 +2960,7 @@ "type": "string" }, "requestedCancellation": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, diff --git a/discovery/googleapis/pubsublite__v1.json b/discovery/googleapis/pubsublite__v1.json index 0829d8a71..153b2960f 100644 --- a/discovery/googleapis/pubsublite__v1.json +++ b/discovery/googleapis/pubsublite__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20230804", + "revision": "20241116", "rootUrl": "https://pubsublite.googleapis.com/", "servicePath": "", "title": "Pub/Sub Lite API", @@ -118,7 +118,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/admin/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "pubsublite.admin.projects.locations.operations.cancel", diff --git a/discovery/googleapis/rapidmigrationassessment__v1.json b/discovery/googleapis/rapidmigrationassessment__v1.json index aaf8a9c6c..78ef90ddc 100644 --- a/discovery/googleapis/rapidmigrationassessment__v1.json +++ b/discovery/googleapis/rapidmigrationassessment__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20230824", + "revision": "20241211", "rootUrl": "https://rapidmigrationassessment.googleapis.com/", "servicePath": "", "title": "Rapid Migration Assessment API", @@ -512,7 +512,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "rapidmigrationassessment.projects.locations.operations.cancel", diff --git a/discovery/googleapis/recaptchaenterprise__v1.json b/discovery/googleapis/recaptchaenterprise__v1.json index 140fb7fee..3f692101c 100644 --- a/discovery/googleapis/recaptchaenterprise__v1.json +++ b/discovery/googleapis/recaptchaenterprise__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241013", + "revision": "20241208", "rootUrl": "https://recaptchaenterprise.googleapis.com/", "servicePath": "", "title": "reCAPTCHA Enterprise API", @@ -1177,9 +1177,9 @@ "DISABLED" ], "enumDescriptions": [ - "Default, unspecified setting. If opted in for automatic detection, `fraud_prevention_assessment` is returned based on the request. Otherwise, `fraud_prevention_assessment` is returned if `transaction_data` is present in the `Event` and Fraud Prevention is enabled in the Google Cloud console.", + "Default, unspecified setting. `fraud_prevention_assessment` is returned if `transaction_data` is present in `Event` and Fraud Prevention is enabled in the Google Cloud console.", "Enable Fraud Prevention for this assessment, if Fraud Prevention is enabled in the Google Cloud console.", - "Disable Fraud Prevention for this assessment, regardless of opt-in status or Google Cloud console settings." + "Disable Fraud Prevention for this assessment, regardless of Google Cloud console settings." ], "type": "string" }, @@ -1805,7 +1805,7 @@ "type": "object" }, "GoogleCloudRecaptchaenterpriseV1RemoveIpOverrideRequest": { - "description": "The removeIpOverride request message.", + "description": "The RemoveIpOverride request message.", "id": "GoogleCloudRecaptchaenterpriseV1RemoveIpOverrideRequest", "properties": { "ipOverrideData": { @@ -1857,7 +1857,7 @@ "id": "GoogleCloudRecaptchaenterpriseV1RiskAnalysis", "properties": { "challenge": { - "description": "Output only. Challenge information for SCORE_AND_CHALLENGE keys", + "description": "Output only. Challenge information for SCORE_AND_CHALLENGE and INVISIBLE keys", "enum": [ "CHALLENGE_UNSPECIFIED", "NOCAPTCHA", diff --git a/discovery/googleapis/redis__v1.json b/discovery/googleapis/redis__v1.json index 3d4bdd538..bdc7e9f22 100644 --- a/discovery/googleapis/redis__v1.json +++ b/discovery/googleapis/redis__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241017", + "revision": "20241203", "rootUrl": "https://redis.googleapis.com/", "servicePath": "", "title": "Google Cloud Memorystore for Redis API", @@ -181,8 +181,226 @@ } }, "resources": { + "backupCollections": { + "methods": { + "get": { + "description": "Get a backup collection.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/backupCollections/{backupCollectionsId}", + "httpMethod": "GET", + "id": "redis.projects.locations.backupCollections.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. Redis backupCollection resource name using the form: `projects/{project_id}/locations/{location_id}/backupCollections/{backup_collection_id}` where `location_id` refers to a GCP region.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/backupCollections/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "BackupCollection" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists all backup collections owned by a consumer project in either the specified location (region) or all locations. If `location_id` is specified as `-` (wildcard), then all regions available to the project are queried, and the results are aggregated.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/backupCollections", + "httpMethod": "GET", + "id": "redis.projects.locations.backupCollections.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "pageSize": { + "description": "Optional. The maximum number of items to return. If not specified, a default value of 1000 will be used by the service. Regardless of the page_size value, the response may include a partial list and a caller should only rely on response's `next_page_token` to determine if there are more clusters left to be queried.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. The `next_page_token` value returned from a previous [ListBackupCollections] request, if any.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The resource name of the backupCollection location using the form: `projects/{project_id}/locations/{location_id}` where `location_id` refers to a GCP region.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/backupCollections", + "response": { + "$ref": "ListBackupCollectionsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + }, + "resources": { + "backups": { + "methods": { + "delete": { + "description": "Deletes a specific backup.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/backupCollections/{backupCollectionsId}/backups/{backupsId}", + "httpMethod": "DELETE", + "id": "redis.projects.locations.backupCollections.backups.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. Redis backup resource name using the form: `projects/{project_id}/locations/{location_id}/backupCollections/{backup_collection_id}/backups/{backup_id}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/backupCollections/[^/]+/backups/[^/]+$", + "required": true, + "type": "string" + }, + "requestId": { + "description": "Optional. Idempotent request UUID.", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "export": { + "description": "Exports a specific backup to a customer target Cloud Storage URI.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/backupCollections/{backupCollectionsId}/backups/{backupsId}:export", + "httpMethod": "POST", + "id": "redis.projects.locations.backupCollections.backups.export", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. Redis backup resource name using the form: `projects/{project_id}/locations/{location_id}/backupCollections/{backup_collection_id}/backups/{backup_id}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/backupCollections/[^/]+/backups/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:export", + "request": { + "$ref": "ExportBackupRequest" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Gets the details of a specific backup.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/backupCollections/{backupCollectionsId}/backups/{backupsId}", + "httpMethod": "GET", + "id": "redis.projects.locations.backupCollections.backups.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. Redis backup resource name using the form: `projects/{project_id}/locations/{location_id}/backupCollections/{backup_collection_id}/backups/{backup_id}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/backupCollections/[^/]+/backups/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "Backup" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists all backups owned by a backup collection.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/backupCollections/{backupCollectionsId}/backups", + "httpMethod": "GET", + "id": "redis.projects.locations.backupCollections.backups.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "pageSize": { + "description": "Optional. The maximum number of items to return. If not specified, a default value of 1000 will be used by the service. Regardless of the page_size value, the response may include a partial list and a caller should only rely on response's `next_page_token` to determine if there are more clusters left to be queried.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. The `next_page_token` value returned from a previous [ListBackupCollections] request, if any.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The resource name of the backupCollection using the form: `projects/{project_id}/locations/{location_id}/backupCollections/{backup_collection_id}`", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/backupCollections/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/backups", + "response": { + "$ref": "ListBackupsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + } + } + } + }, "clusters": { "methods": { + "backup": { + "description": "Backup Redis Cluster. If this is the first time a backup is being created, a backup collection will be created at the backend, and this backup belongs to this collection. Both collection and backup will have a resource name. Backup will be executed for each shard. A replica (primary if nonHA) will be selected to perform the execution. Backup call will be rejected if there is an ongoing backup or update operation. Be aware that during preview, if the cluster's internal software version is too old, critical update will be performed before actual backup. Once the internal software version is updated to the minimum version required by the backup feature, subsequent backups will not require critical update. After preview, there will be no critical update needed for backup.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/clusters/{clustersId}:backup", + "httpMethod": "POST", + "id": "redis.projects.locations.clusters.backup", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. Redis cluster resource name using the form: `projects/{project_id}/locations/{location_id}/clusters/{cluster_id}` where `location_id` refers to a GCP region.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+/clusters/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:backup", + "request": { + "$ref": "BackupClusterRequest" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, "create": { "description": "Creates a Redis cluster based on the specified properties. The creation is executed asynchronously and callers may check the returned operation to track its progress. Once the operation is completed the Redis cluster will be fully functional. The completed longrunning.Operation will contain the new cluster object in the response field. The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/clusters", @@ -731,7 +949,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "redis.projects.locations.operations.cancel", @@ -877,6 +1095,36 @@ }, "type": "object" }, + "AutomatedBackupConfig": { + "description": "The automated backup config for a cluster.", + "id": "AutomatedBackupConfig", + "properties": { + "automatedBackupMode": { + "description": "Optional. The automated backup mode. If the mode is disabled, the other fields will be ignored.", + "enum": [ + "AUTOMATED_BACKUP_MODE_UNSPECIFIED", + "DISABLED", + "ENABLED" + ], + "enumDescriptions": [ + "Default value. Automated backup config is not specified.", + "Automated backup config disabled.", + "Automated backup config enabled." + ], + "type": "string" + }, + "fixedFrequencySchedule": { + "$ref": "FixedFrequencySchedule", + "description": "Optional. Trigger automated backups at a fixed frequency." + }, + "retention": { + "description": "Optional. How long to keep automated backups before the backups are deleted. The value should be between 1 day and 365 days. If not specified, the default value is 35 days.", + "format": "google-duration", + "type": "string" + } + }, + "type": "object" + }, "AvailabilityConfiguration": { "description": "Configuration for availability of database instance", "id": "AvailabilityConfiguration", @@ -916,6 +1164,170 @@ }, "type": "object" }, + "Backup": { + "description": "Backup of a cluster.", + "id": "Backup", + "properties": { + "backupFiles": { + "description": "Output only. List of backup files of the backup.", + "items": { + "$ref": "BackupFile" + }, + "readOnly": true, + "type": "array" + }, + "backupType": { + "description": "Output only. Type of the backup.", + "enum": [ + "BACKUP_TYPE_UNSPECIFIED", + "ON_DEMAND", + "AUTOMATED" + ], + "enumDescriptions": [ + "The default value, not set.", + "On-demand backup.", + "Automated backup." + ], + "readOnly": true, + "type": "string" + }, + "cluster": { + "description": "Output only. Cluster resource path of this backup.", + "readOnly": true, + "type": "string" + }, + "clusterUid": { + "description": "Output only. Cluster uid of this backup.", + "readOnly": true, + "type": "string" + }, + "createTime": { + "description": "Output only. The time when the backup was created.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "engineVersion": { + "description": "Output only. redis-7.2, valkey-7.5", + "readOnly": true, + "type": "string" + }, + "expireTime": { + "description": "Output only. The time when the backup will expire.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "name": { + "description": "Identifier. Full resource path of the backup. the last part of the name is the backup id with the following format: [YYYYMMDDHHMMSS]_[Shorted Cluster UID] OR customer specified while backup cluster. Example: 20240515123000_1234", + "type": "string" + }, + "nodeType": { + "description": "Output only. Node type of the cluster.", + "enum": [ + "NODE_TYPE_UNSPECIFIED", + "REDIS_SHARED_CORE_NANO", + "REDIS_HIGHMEM_MEDIUM", + "REDIS_HIGHMEM_XLARGE", + "REDIS_STANDARD_SMALL" + ], + "enumDescriptions": [ + "Node type unspecified", + "Redis shared core nano node_type.", + "Redis highmem medium node_type.", + "Redis highmem xlarge node_type.", + "Redis standard small node_type." + ], + "readOnly": true, + "type": "string" + }, + "replicaCount": { + "description": "Output only. Number of replicas for the cluster.", + "format": "int32", + "readOnly": true, + "type": "integer" + }, + "shardCount": { + "description": "Output only. Number of shards for the cluster.", + "format": "int32", + "readOnly": true, + "type": "integer" + }, + "state": { + "description": "Output only. State of the backup.", + "enum": [ + "STATE_UNSPECIFIED", + "CREATING", + "ACTIVE", + "DELETING", + "SUSPENDED" + ], + "enumDescriptions": [ + "The default value, not set.", + "The backup is being created.", + "The backup is active to be used.", + "The backup is being deleted.", + "The backup is currently suspended due to reasons like project deletion, billing account closure, etc." + ], + "readOnly": true, + "type": "string" + }, + "totalSizeBytes": { + "description": "Output only. Total size of the backup in bytes.", + "format": "int64", + "readOnly": true, + "type": "string" + }, + "uid": { + "description": "Output only. System assigned unique identifier of the backup.", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "BackupClusterRequest": { + "description": "Request for [BackupCluster].", + "id": "BackupClusterRequest", + "properties": { + "backupId": { + "description": "Optional. The id of the backup to be created. If not specified, the default value ([YYYYMMDDHHMMSS]_[Shortened Cluster UID] is used.", + "type": "string" + }, + "ttl": { + "description": "Optional. TTL for the backup to expire. Value range is 1 day to 100 years. If not specified, the default value is 100 years.", + "format": "google-duration", + "type": "string" + } + }, + "type": "object" + }, + "BackupCollection": { + "description": "BackupCollection of a cluster.", + "id": "BackupCollection", + "properties": { + "cluster": { + "description": "Output only. The full resource path of the cluster the backup collection belongs to. Example: projects/{project}/locations/{location}/clusters/{cluster}", + "readOnly": true, + "type": "string" + }, + "clusterUid": { + "description": "Output only. The cluster uid of the backup collection.", + "readOnly": true, + "type": "string" + }, + "name": { + "description": "Identifier. Full resource path of the backup collection.", + "type": "string" + }, + "uid": { + "description": "Output only. System assigned unique identifier of the backup collection.", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, "BackupConfiguration": { "description": "Configuration for automatic backups", "id": "BackupConfiguration", @@ -935,6 +1347,30 @@ }, "type": "object" }, + "BackupFile": { + "description": "Backup is consisted of multiple backup files.", + "id": "BackupFile", + "properties": { + "createTime": { + "description": "Output only. The time when the backup file was created.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "fileName": { + "description": "Output only. e.g: .rdb", + "readOnly": true, + "type": "string" + }, + "sizeBytes": { + "description": "Output only. Size of the backup file in bytes.", + "format": "int64", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, "BackupRun": { "description": "A backup run.", "id": "BackupRun", @@ -1015,6 +1451,15 @@ ], "type": "string" }, + "automatedBackupConfig": { + "$ref": "AutomatedBackupConfig", + "description": "Optional. The automated backup config for the cluster." + }, + "backupCollection": { + "description": "Optional. Output only. The backup collection full resource name. Example: projects/{project}/locations/{location}/backupCollections/{collection}", + "readOnly": true, + "type": "string" + }, "clusterEndpoints": { "description": "Optional. A list of cluster enpoints.", "items": { @@ -1044,6 +1489,10 @@ "readOnly": true, "type": "array" }, + "gcsSource": { + "$ref": "GcsBackupSource", + "description": "Optional. Backups stored in Cloud Storage buckets. The Cloud Storage buckets need to be the same region as the clusters. Read permission is required to import from the provided Cloud Storage objects." + }, "maintenancePolicy": { "$ref": "ClusterMaintenancePolicy", "description": "Optional. ClusterMaintenancePolicy determines when to allow or deny updates." @@ -1053,6 +1502,10 @@ "description": "Output only. ClusterMaintenanceSchedule Output only Published maintenance schedule.", "readOnly": true }, + "managedBackupSource": { + "$ref": "ManagedBackupSource", + "description": "Optional. Backups generated and managed by memorystore service." + }, "name": { "description": "Required. Identifier. Unique name of the resource in this scope including project and location using the form: `projects/{project_id}/locations/{location_id}/clusters/{cluster_id}`", "type": "string" @@ -1325,6 +1778,10 @@ "description": "Detailed information of each PSC connection.", "id": "ConnectionDetail", "properties": { + "pscAutoConnection": { + "$ref": "PscAutoConnection", + "description": "Detailed information of a PSC connection that is created through service connectivity automation." + }, "pscConnection": { "$ref": "PscConnection", "description": "Detailed information of a PSC connection that is created by the customer who owns the cluster." @@ -2391,6 +2848,17 @@ }, "type": "object" }, + "ExportBackupRequest": { + "description": "Request for [ExportBackup].", + "id": "ExportBackupRequest", + "properties": { + "gcsBucket": { + "description": "Google Cloud Storage bucket, like \"my-bucket\".", + "type": "string" + } + }, + "type": "object" + }, "ExportInstanceRequest": { "description": "Request for Export.", "id": "ExportInstanceRequest", @@ -2423,6 +2891,31 @@ }, "type": "object" }, + "FixedFrequencySchedule": { + "description": "This schedule allows the backup to be triggered at a fixed frequency (currently only daily is supported).", + "id": "FixedFrequencySchedule", + "properties": { + "startTime": { + "$ref": "TimeOfDay", + "description": "Required. The start time of every automated backup in UTC. It must be set to the start of an hour. This field is required." + } + }, + "type": "object" + }, + "GcsBackupSource": { + "description": "Backups stored in Cloud Storage buckets. The Cloud Storage buckets need to be the same region as the clusters.", + "id": "GcsBackupSource", + "properties": { + "uris": { + "description": "Optional. URIs of the GCS objects to import. Example: gs://bucket1/object1, gs://bucket2/folder2/object2", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, "GcsDestination": { "description": "The Cloud Storage location for the output content", "id": "GcsDestination", @@ -2823,6 +3316,56 @@ }, "type": "object" }, + "ListBackupCollectionsResponse": { + "description": "Response for [ListBackupCollections].", + "id": "ListBackupCollectionsResponse", + "properties": { + "backupCollections": { + "description": "A list of backupCollections in the project. If the `location_id` in the parent field of the request is \"-\", all regions available to the project are queried, and the results aggregated. If in such an aggregated query a location is unavailable, a placeholder backupCollection entry is included in the response with the `name` field set to a value of the form `projects/{project_id}/locations/{location_id}/backupCollections/`- and the `status` field set to ERROR and `status_message` field set to \"location not available for ListBackupCollections\".", + "items": { + "$ref": "BackupCollection" + }, + "type": "array" + }, + "nextPageToken": { + "description": "Token to retrieve the next page of results, or empty if there are no more results in the list.", + "type": "string" + }, + "unreachable": { + "description": "Locations that could not be reached.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "ListBackupsResponse": { + "description": "Response for [ListBackups].", + "id": "ListBackupsResponse", + "properties": { + "backups": { + "description": "A list of backups in the project.", + "items": { + "$ref": "Backup" + }, + "type": "array" + }, + "nextPageToken": { + "description": "Token to retrieve the next page of results, or empty if there are no more results in the list.", + "type": "string" + }, + "unreachable": { + "description": "Backups that could not be reached.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, "ListClustersResponse": { "description": "Response for ListClusters.", "id": "ListClustersResponse", @@ -2961,6 +3504,11 @@ "description": "Optional. Number of shards (if applicable).", "format": "int32", "type": "integer" + }, + "vcpuCount": { + "description": "Optional. The number of vCPUs. TODO(b/342344482, b/342346271) add proto validations again after bug fix.", + "format": "double", + "type": "number" } }, "type": "object" @@ -3025,6 +3573,17 @@ }, "type": "object" }, + "ManagedBackupSource": { + "description": "Backups that generated and managed by memorystore.", + "id": "ManagedBackupSource", + "properties": { + "backup": { + "description": "Optional. Example: //redis.googleapis.com/projects/{project}/locations/{location}/backupCollections/{collection}/backups/{backup} A shorter version (without the prefix) of the backup name is also supported, like projects/{project}/locations/{location}/backupCollections/{collection}/backups/{backup_id} In this case, it assumes the backup is under redis.googleapis.com.", + "type": "string" + } + }, + "type": "object" + }, "ManagedCertificateAuthority": { "id": "ManagedCertificateAuthority", "properties": { @@ -3423,6 +3982,73 @@ }, "type": "object" }, + "PscAutoConnection": { + "description": "Details of consumer resources in a PSC connection that is created through Service Connectivity Automation.", + "id": "PscAutoConnection", + "properties": { + "address": { + "description": "Output only. The IP allocated on the consumer network for the PSC forwarding rule.", + "readOnly": true, + "type": "string" + }, + "connectionType": { + "description": "Output only. Type of the PSC connection.", + "enum": [ + "CONNECTION_TYPE_UNSPECIFIED", + "CONNECTION_TYPE_DISCOVERY", + "CONNECTION_TYPE_PRIMARY", + "CONNECTION_TYPE_READER" + ], + "enumDescriptions": [ + "Cluster endpoint Type is not set", + "Cluster endpoint that will be used as for cluster topology discovery.", + "Cluster endpoint that will be used as primary endpoint to access primary.", + "Cluster endpoint that will be used as reader endpoint to access replicas." + ], + "readOnly": true, + "type": "string" + }, + "forwardingRule": { + "description": "Output only. The URI of the consumer side forwarding rule. Example: projects/{projectNumOrId}/regions/us-east1/forwardingRules/{resourceId}.", + "readOnly": true, + "type": "string" + }, + "network": { + "description": "Required. The consumer network where the IP address resides, in the form of projects/{project_id}/global/networks/{network_id}.", + "type": "string" + }, + "projectId": { + "description": "Required. The consumer project_id where the forwarding rule is created from.", + "type": "string" + }, + "pscConnectionId": { + "description": "Output only. The PSC connection id of the forwarding rule connected to the service attachment.", + "readOnly": true, + "type": "string" + }, + "pscConnectionStatus": { + "description": "Output only. The status of the PSC connection. Please note that this value is updated periodically. Please use Private Service Connect APIs for the latest status.", + "enum": [ + "PSC_CONNECTION_STATUS_UNSPECIFIED", + "PSC_CONNECTION_STATUS_ACTIVE", + "PSC_CONNECTION_STATUS_NOT_FOUND" + ], + "enumDescriptions": [ + "PSC connection status is not specified.", + "The connection is active", + "Connection not found" + ], + "readOnly": true, + "type": "string" + }, + "serviceAttachment": { + "description": "Output only. The service attachment which is the target of the PSC connection, in the form of projects/{project-id}/regions/{region}/serviceAttachments/{service-attachment-id}.", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, "PscConfig": { "id": "PscConfig", "properties": { diff --git a/discovery/googleapis/retail__v2.json b/discovery/googleapis/retail__v2.json index 87e83c74d..bb626f52a 100644 --- a/discovery/googleapis/retail__v2.json +++ b/discovery/googleapis/retail__v2.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241017", + "revision": "20241205", "rootUrl": "https://retail.googleapis.com/", "servicePath": "", "title": "Vertex AI Search for Retail API", @@ -2516,7 +2516,7 @@ "enumDescriptions": [ "Value used when unset. In this case, server behavior defaults to CATALOG_LEVEL_ATTRIBUTE_CONFIG.", "At this level, we honor the attribute configurations set in Product.attributes.", - "At this level, we honor the attribute configurations set in CatalogConfig.attribute_configs." + "At this level, we honor the attribute configurations set in `CatalogConfig.attribute_configs`." ], "readOnly": true, "type": "string" @@ -2617,7 +2617,7 @@ }, "partitionDate": { "$ref": "GoogleTypeDate", - "description": "BigQuery time partitioned table's _PARTITIONDATE in YYYY-MM-DD format. Only supported in ImportProductsRequest." + "description": "BigQuery time partitioned table's _PARTITIONDATE in YYYY-MM-DD format." }, "projectId": { "description": "The project ID (can be project # or ID) that the BigQuery source is in with a length limit of 128 characters. If not specified, inherits the project ID from the parent request.", @@ -2686,7 +2686,7 @@ "description": "Contains facet options." }, "inUse": { - "description": "Output only. Indicates whether this attribute has been used by any products. `True` if at least one Product is using this attribute in Product.attributes. Otherwise, this field is `False`. CatalogAttribute can be pre-loaded by using CatalogService.AddCatalogAttribute, CatalogService.ImportCatalogAttributes, or CatalogService.UpdateAttributesConfig APIs. This field is `False` for pre-loaded CatalogAttributes. Only pre-loaded catalog attributes that are neither in use by products nor predefined can be deleted. Catalog attributes that are either in use by products or are predefined attributes cannot be deleted; however, their configuration properties will reset to default values upon removal request. After catalog changes, it takes about 10 minutes for this field to update.", + "description": "Output only. Indicates whether this attribute has been used by any products. `True` if at least one Product is using this attribute in Product.attributes. Otherwise, this field is `False`. CatalogAttribute can be pre-loaded by using CatalogService.AddCatalogAttribute or CatalogService.UpdateAttributesConfig APIs. This field is `False` for pre-loaded CatalogAttributes. Only pre-loaded catalog attributes that are neither in use by products nor predefined can be deleted. Catalog attributes that are either in use by products or are predefined attributes cannot be deleted; however, their configuration properties will reset to default values upon removal request. After catalog changes, it takes about 10 minutes for this field to update.", "readOnly": true, "type": "boolean" }, @@ -3213,7 +3213,7 @@ "type": "object" }, "GoogleCloudRetailV2ExperimentInfo": { - "description": "Metadata for active A/B testing Experiment.", + "description": "Metadata for active A/B testing experiment.", "id": "GoogleCloudRetailV2ExperimentInfo", "properties": { "experiment": { @@ -3232,7 +3232,7 @@ "id": "GoogleCloudRetailV2ExperimentInfoServingConfigExperiment", "properties": { "experimentServingConfig": { - "description": "The fully qualified resource name of the serving config Experiment.VariantArm.serving_config_id responsible for generating the search response. For example: `projects/*/locations/*/catalogs/*/servingConfigs/*`.", + "description": "The fully qualified resource name of the serving config `Experiment.VariantArm.serving_config_id` responsible for generating the search response. For example: `projects/*/locations/*/catalogs/*/servingConfigs/*`.", "type": "string" }, "originalServingConfig": { @@ -3437,7 +3437,7 @@ "type": "object" }, "GoogleCloudRetailV2Image": { - "description": "Product image. Recommendations AI and Retail Search do not use product images to improve prediction and search results. However, product images can be returned in results, and are shown in prediction or search previews in the console.", + "description": "Product image. Recommendations AI and Retail Search use product images to improve prediction and search results. Product images can be returned in results, and are shown in prediction or search previews in the console. Please try to provide correct product images and avoid using images with size too small.", "id": "GoogleCloudRetailV2Image", "properties": { "height": { @@ -3770,23 +3770,41 @@ "additionalProperties": { "$ref": "GoogleCloudRetailV2CustomAttribute" }, - "description": "Additional local inventory attributes, for example, store name, promotion tags, etc. This field needs to pass all below criteria, otherwise an INVALID_ARGUMENT error is returned: * At most 30 attributes are allowed. * The key must be a UTF-8 encoded string with a length limit of 32 characters. * The key must match the pattern: `a-zA-Z0-9*`. For example, key0LikeThis or KEY_1_LIKE_THIS. * The attribute values must be of the same type (text or number). * Only 1 value is allowed for each attribute. * For text values, the length limit is 256 UTF-8 characters. * The attribute does not support search. The `searchable` field should be unset or set to false. * The max summed total bytes of custom attribute keys and values per product is 5MiB.", + "description": "Optional. Additional local inventory attributes, for example, store name, promotion tags, etc. This field needs to pass all below criteria, otherwise an INVALID_ARGUMENT error is returned: * At most 30 attributes are allowed. * The key must be a UTF-8 encoded string with a length limit of 32 characters. * The key must match the pattern: `a-zA-Z0-9*`. For example, key0LikeThis or KEY_1_LIKE_THIS. * The attribute values must be of the same type (text or number). * Only 1 value is allowed for each attribute. * For text values, the length limit is 256 UTF-8 characters. * The attribute does not support search. The `searchable` field should be unset or set to false. * The max summed total bytes of custom attribute keys and values per product is 5MiB.", "type": "object" }, + "availability": { + "description": "Optional. The availability of the Product at this place_id. Default to Availability.IN_STOCK. For primary products with variants set the availability of the primary as Availability.OUT_OF_STOCK and set the true availability at the variant level. This way the primary product will be considered \"in stock\" as long as it has at least one variant in stock. For primary products with no variants set the true availability at the primary level. Corresponding properties: Google Merchant Center property [availability](https://support.google.com/merchants/answer/6324448). Schema.org property [Offer.availability](https://schema.org/availability).", + "enum": [ + "AVAILABILITY_UNSPECIFIED", + "IN_STOCK", + "OUT_OF_STOCK", + "PREORDER", + "BACKORDER" + ], + "enumDescriptions": [ + "Default product availability. Default to Availability.IN_STOCK if unset.", + "Product in stock.", + "Product out of stock.", + "Product that is in pre-order state.", + "Product that is back-ordered (i.e. temporarily out of stock)." + ], + "type": "string" + }, "fulfillmentTypes": { - "description": "Input only. Supported fulfillment types. Valid fulfillment type values include commonly used types (such as pickup in store and same day delivery), and custom types. Customers have to map custom types to their display names before rendering UI. Supported values: * \"pickup-in-store\" * \"ship-to-store\" * \"same-day-delivery\" * \"next-day-delivery\" * \"custom-type-1\" * \"custom-type-2\" * \"custom-type-3\" * \"custom-type-4\" * \"custom-type-5\" If this field is set to an invalid value other than these, an INVALID_ARGUMENT error is returned. All the elements must be distinct. Otherwise, an INVALID_ARGUMENT error is returned.", + "description": "Optional. Supported fulfillment types. Valid fulfillment type values include commonly used types (such as pickup in store and same day delivery), and custom types. Customers have to map custom types to their display names before rendering UI. Supported values: * \"pickup-in-store\" * \"ship-to-store\" * \"same-day-delivery\" * \"next-day-delivery\" * \"custom-type-1\" * \"custom-type-2\" * \"custom-type-3\" * \"custom-type-4\" * \"custom-type-5\" If this field is set to an invalid value other than these, an INVALID_ARGUMENT error is returned. All the elements must be distinct. Otherwise, an INVALID_ARGUMENT error is returned.", "items": { "type": "string" }, "type": "array" }, "placeId": { - "description": "The place ID for the current set of inventory information.", + "description": "Required. The place ID for the current set of inventory information.", "type": "string" }, "priceInfo": { "$ref": "GoogleCloudRetailV2PriceInfo", - "description": "Product price and cost information. Google Merchant Center property [price](https://support.google.com/merchants/answer/6324371)." + "description": "Optional. Product price and cost information. Google Merchant Center property [price](https://support.google.com/merchants/answer/6324371)." } }, "type": "object" @@ -4043,6 +4061,41 @@ "properties": {}, "type": "object" }, + "GoogleCloudRetailV2PinControlMetadata": { + "description": "Metadata for pinning to be returned in the response. This is used for distinguishing between applied vs dropped pins.", + "id": "GoogleCloudRetailV2PinControlMetadata", + "properties": { + "allMatchedPins": { + "additionalProperties": { + "$ref": "GoogleCloudRetailV2PinControlMetadataProductPins" + }, + "description": "Map of all matched pins, keyed by pin position.", + "type": "object" + }, + "droppedPins": { + "additionalProperties": { + "$ref": "GoogleCloudRetailV2PinControlMetadataProductPins" + }, + "description": "Map of pins that were dropped due to overlap with other matching pins, keyed by pin position.", + "type": "object" + } + }, + "type": "object" + }, + "GoogleCloudRetailV2PinControlMetadataProductPins": { + "description": "List of product ids which have associated pins.", + "id": "GoogleCloudRetailV2PinControlMetadataProductPins", + "properties": { + "productId": { + "description": "List of product ids which have associated pins.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, "GoogleCloudRetailV2PredictRequest": { "description": "Request message for Predict method.", "id": "GoogleCloudRetailV2PredictRequest", @@ -5482,7 +5535,7 @@ "type": "string" }, "experimentInfo": { - "description": "Metadata related to A/B testing Experiment associated with this response. Only exists when an experiment is triggered.", + "description": "Metadata related to A/B testing experiment associated with this response. Only exists when an experiment is triggered.", "items": { "$ref": "GoogleCloudRetailV2ExperimentInfo" }, @@ -5506,6 +5559,10 @@ "description": "A token that can be sent as SearchRequest.page_token to retrieve the next page. If this field is omitted, there are no subsequent pages.", "type": "string" }, + "pinControlMetadata": { + "$ref": "GoogleCloudRetailV2PinControlMetadata", + "description": "Metadata for pin controls which were applicable to the request. This contains two map fields, one for all matched pins and one for pins which were matched but not applied. The two maps are keyed by pin position, and the values are the product ids which were matched to that pin." + }, "queryExpansionInfo": { "$ref": "GoogleCloudRetailV2SearchResponseQueryExpansionInfo", "description": "Query expansion information for the returned results." @@ -6136,7 +6193,7 @@ "type": "string" }, "userAgent": { - "description": "User agent as included in the HTTP header. Required for getting SearchResponse.sponsored_results. The field must be a UTF-8 encoded string with a length limit of 1,000 characters. Otherwise, an INVALID_ARGUMENT error is returned. This should not be set when using the client side event reporting with GTM or JavaScript tag in UserEventService.CollectUserEvent or if direct_user_request is set.", + "description": "User agent as included in the HTTP header. The field must be a UTF-8 encoded string with a length limit of 1,000 characters. Otherwise, an INVALID_ARGUMENT error is returned. This should not be set when using the client side event reporting with GTM or JavaScript tag in UserEventService.CollectUserEvent or if direct_user_request is set.", "type": "string" }, "userId": { diff --git a/discovery/googleapis/run__v2.json b/discovery/googleapis/run__v2.json index 25eb47ec5..e494fac06 100644 --- a/discovery/googleapis/run__v2.json +++ b/discovery/googleapis/run__v2.json @@ -227,7 +227,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241011", + "revision": "20241206", "rootUrl": "https://run.googleapis.com/", "servicePath": "", "title": "Cloud Run Admin API", @@ -1577,6 +1577,10 @@ "description": "Optional. Name of the function target if the source is a function source. Required for function builds.", "type": "string" }, + "projectDescriptor": { + "description": "Optional. project_descriptor stores the path to the project descriptor file. When empty, it means that there is no project descriptor file in the source.", + "type": "string" + }, "runtime": { "deprecated": true, "description": "The runtime name, e.g. 'go113'. Leave blank for generic builds.", @@ -2269,6 +2273,13 @@ "description": "Cloud Storage Bucket name.", "type": "string" }, + "mountOptions": { + "description": "A list of additional flags to pass to the gcsfuse CLI. Options should be specified without the leading \"--\".", + "items": { + "type": "string" + }, + "type": "array" + }, "readOnly": { "description": "If true, the volume will be mounted as read only for all mounts.", "type": "boolean" @@ -3032,6 +3043,25 @@ "description": "A reference to a customer managed encryption key (CMEK) to use to encrypt this container image. For more information, go to https://cloud.google.com/run/docs/securing/using-cmek", "type": "string" }, + "encryptionKeyRevocationAction": { + "description": "Optional. The action to take if the encryption key is revoked.", + "enum": [ + "ENCRYPTION_KEY_REVOCATION_ACTION_UNSPECIFIED", + "PREVENT_NEW", + "SHUTDOWN" + ], + "enumDescriptions": [ + "Unspecified", + "Prevents the creation of new instances.", + "Shuts down existing instances, and prevents creation of new ones." + ], + "type": "string" + }, + "encryptionKeyShutdownDuration": { + "description": "Optional. If encryption_key_revocation_action is SHUTDOWN, the duration before shutting down all instances. The minimum increment is 1 hour.", + "format": "google-duration", + "type": "string" + }, "executionEnvironment": { "description": "Optional. The sandbox environment to host this Revision.", "enum": [ @@ -3058,7 +3088,7 @@ "type": "object" }, "maxInstanceRequestConcurrency": { - "description": "Optional. Sets the maximum number of requests that each serving instance can receive. If not specified or 0, defaults to 80 when requested `CPU >= 1` and defaults to 1 when requested `CPU < 1`.", + "description": "Optional. Sets the maximum number of requests that each serving instance can receive. If not specified or 0, concurrency defaults to 80 when requested `CPU >= 1` and defaults to 1 when requested `CPU < 1`.", "format": "int32", "type": "integer" }, @@ -3144,7 +3174,7 @@ "id": "GoogleCloudRunV2SecretVolumeSource", "properties": { "defaultMode": { - "description": "Integer representation of mode bits to use on created files by default. Must be a value between 0000 and 0777 (octal), defaulting to 0444. Directories within the path are not affected by this setting. Notes * Internally, a umask of 0222 will be applied to any non-zero value. * This is an integer representation of the mode bits. So, the octal integer value should look exactly as the chmod numeric notation with a leading zero. Some examples: for chmod 777 (a=rwx), set to 0777 (octal) or 511 (base-10). For chmod 640 (u=rw,g=r), set to 0640 (octal) or 416 (base-10). For chmod 755 (u=rwx,g=rx,o=rx), set to 0755 (octal) or 493 (base-10). * This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set. This might be in conflict with other options that affect the file mode, like fsGroup, and as a result, other mode bits could be set.", + "description": "Integer representation of mode bits to use on created files by default. Must be a value between 0000 and 0777 (octal), defaulting to 0444. Directories within the path are not affected by this setting. Notes * Internally, a umask of 0222 will be applied to any non-zero value. * This is an integer representation of the mode bits. So, the octal integer value should look exactly as the chmod numeric notation with a leading zero. Some examples: for chmod 640 (u=rw,g=r), set to 0640 (octal) or 416 (base-10). For chmod 755 (u=rwx,g=rx,o=rx), set to 0755 (octal) or 493 (base-10). * This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set. This might be in conflict with other options that affect the file mode, like fsGroup, and as a result, other mode bits could be set.", "format": "int32", "type": "integer" }, @@ -3400,6 +3430,11 @@ "description": "Scaling settings applied at the service level rather than at the revision level.", "id": "GoogleCloudRunV2ServiceScaling", "properties": { + "manualInstanceCount": { + "description": "Optional. total instance count for the service in manual scaling mode. This number of instances is divided among all revisions with specified traffic based on the percent of traffic they are receiving.", + "format": "int32", + "type": "integer" + }, "minInstanceCount": { "description": "Optional. total min instances for the service. This number of instances is divided among all revisions with specified traffic based on the percent of traffic they are receiving.", "format": "int32", @@ -3857,7 +3892,7 @@ "id": "GoogleCloudRunV2VersionToPath", "properties": { "mode": { - "description": "Integer octal mode bits to use on this file, must be a value between 01 and 0777 (octal). If 0 or not set, the Volume's default mode will be used. Notes * Internally, a umask of 0222 will be applied to any non-zero value. * This is an integer representation of the mode bits. So, the octal integer value should look exactly as the chmod numeric notation with a leading zero. Some examples: for chmod 777 (a=rwx), set to 0777 (octal) or 511 (base-10). For chmod 640 (u=rw,g=r), set to 0640 (octal) or 416 (base-10). For chmod 755 (u=rwx,g=rx,o=rx), set to 0755 (octal) or 493 (base-10). * This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.", + "description": "Integer octal mode bits to use on this file, must be a value between 01 and 0777 (octal). If 0 or not set, the Volume's default mode will be used. Notes * Internally, a umask of 0222 will be applied to any non-zero value. * This is an integer representation of the mode bits. So, the octal integer value should look exactly as the chmod numeric notation with a leading zero. Some examples: for chmod 640 (u=rw,g=r), set to 0640 (octal) or 416 (base-10). For chmod 755 (u=rwx,g=rx,o=rx), set to 0755 (octal) or 493 (base-10). * This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.", "format": "int32", "type": "integer" }, diff --git a/discovery/googleapis/secretmanager__v1.json b/discovery/googleapis/secretmanager__v1.json index 51a300fee..0b6e683a5 100644 --- a/discovery/googleapis/secretmanager__v1.json +++ b/discovery/googleapis/secretmanager__v1.json @@ -107,7 +107,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240924", + "revision": "20241114", "rootUrl": "https://secretmanager.googleapis.com/", "servicePath": "", "title": "Secret Manager API", @@ -1823,7 +1823,7 @@ "type": "object" }, "UserManaged": { - "description": "A replication policy that replicates the Secret payload into the locations specified in Secret.replication.user_managed.replicas", + "description": "A replication policy that replicates the Secret payload into the locations specified in Replication.UserManaged.replicas", "id": "UserManaged", "properties": { "replicas": { diff --git a/discovery/googleapis/securitycenter__v1.json b/discovery/googleapis/securitycenter__v1.json index 2e9007bc0..7962d9787 100644 --- a/discovery/googleapis/securitycenter__v1.json +++ b/discovery/googleapis/securitycenter__v1.json @@ -42,7 +42,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241018", + "revision": "20241206", "rootUrl": "https://securitycenter.googleapis.com/", "servicePath": "", "title": "Security Command Center API", @@ -1513,6 +1513,7 @@ ], "parameters": { "compareDuration": { + "deprecated": true, "description": "When compare_duration is set, the ListFindingsResult's \"state_change\" attribute is updated to indicate whether the finding had its state changed, the finding's state remained unchanged, or if the finding was added in any state during the compare_duration period of time that precedes the read_time. This is the time between (read_time - compare_duration) and read_time. The state_change value is derived based on the presence and state of the finding at the two points in time. Intermediate state changes between the two times don't affect the result. For example, the results aren't affected if the finding is made inactive and then active again. Possible \"state_change\" values when compare_duration is specified: * \"CHANGED\": indicates that the finding was present and matched the given filter at the start of compare_duration, but changed its state at read_time. * \"UNCHANGED\": indicates that the finding was present and matched the given filter at the start of compare_duration and did not change state at read_time. * \"ADDED\": indicates that the finding did not match the given filter or was not present at the start of compare_duration, but was present at read_time. * \"REMOVED\": indicates that the finding was present and matched the filter at the start of compare_duration, but did not match the filter at read_time. If compare_duration is not specified, then the only possible state_change is \"UNUSED\", which will be the state_change set for all findings present at read_time.", "format": "google-duration", "location": "query", @@ -1553,6 +1554,7 @@ "type": "string" }, "readTime": { + "deprecated": true, "description": "Time used as a reference point when filtering findings. The filter is limited to findings existing at the supplied time and their values are those at that specific time. Absence of this field will default to the API's version of NOW.", "format": "google-datetime", "location": "query", @@ -1973,6 +1975,51 @@ } } }, + "attackPaths": { + "methods": { + "list": { + "description": "Lists the attack paths for a set of simulation results or valued resources and filter.", + "flatPath": "v1/organizations/{organizationsId}/attackPaths", + "httpMethod": "GET", + "id": "securitycenter.organizations.attackPaths.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "filter": { + "description": "The filter expression that filters the attack path in the response. Supported fields: * `valued_resources` supports =", + "location": "query", + "type": "string" + }, + "pageSize": { + "description": "The maximum number of results to return in a single response. Default is 10, minimum is 1, maximum is 1000.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "The value returned by the last `ListAttackPathsResponse`; indicates that this is a continuation of a prior `ListAttackPaths` call, and that the system should return the next page of data.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. Name of parent to list attack paths. Valid formats: `organizations/{organization}`, `organizations/{organization}/simulations/{simulation}` `organizations/{organization}/simulations/{simulation}/attackExposureResults/{attack_exposure_result_v2}` `organizations/{organization}/simulations/{simulation}/valuedResources/{valued_resource}`", + "location": "path", + "pattern": "^organizations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/attackPaths", + "response": { + "$ref": "ListAttackPathsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + } + }, "bigQueryExports": { "methods": { "create": { @@ -2858,7 +2905,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/organizations/{organizationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "securitycenter.organizations.operations.cancel", @@ -3993,6 +4040,7 @@ ], "parameters": { "compareDuration": { + "deprecated": true, "description": "When compare_duration is set, the ListFindingsResult's \"state_change\" attribute is updated to indicate whether the finding had its state changed, the finding's state remained unchanged, or if the finding was added in any state during the compare_duration period of time that precedes the read_time. This is the time between (read_time - compare_duration) and read_time. The state_change value is derived based on the presence and state of the finding at the two points in time. Intermediate state changes between the two times don't affect the result. For example, the results aren't affected if the finding is made inactive and then active again. Possible \"state_change\" values when compare_duration is specified: * \"CHANGED\": indicates that the finding was present and matched the given filter at the start of compare_duration, but changed its state at read_time. * \"UNCHANGED\": indicates that the finding was present and matched the given filter at the start of compare_duration and did not change state at read_time. * \"ADDED\": indicates that the finding did not match the given filter or was not present at the start of compare_duration, but was present at read_time. * \"REMOVED\": indicates that the finding was present and matched the filter at the start of compare_duration, but did not match the filter at read_time. If compare_duration is not specified, then the only possible state_change is \"UNUSED\", which will be the state_change set for all findings present at read_time.", "format": "google-duration", "location": "query", @@ -4033,6 +4081,7 @@ "type": "string" }, "readTime": { + "deprecated": true, "description": "Time used as a reference point when filtering findings. The filter is limited to findings existing at the supplied time and their values are those at that specific time. Absence of this field will default to the API's version of NOW.", "format": "google-datetime", "location": "query", @@ -5660,6 +5709,7 @@ ], "parameters": { "compareDuration": { + "deprecated": true, "description": "When compare_duration is set, the ListFindingsResult's \"state_change\" attribute is updated to indicate whether the finding had its state changed, the finding's state remained unchanged, or if the finding was added in any state during the compare_duration period of time that precedes the read_time. This is the time between (read_time - compare_duration) and read_time. The state_change value is derived based on the presence and state of the finding at the two points in time. Intermediate state changes between the two times don't affect the result. For example, the results aren't affected if the finding is made inactive and then active again. Possible \"state_change\" values when compare_duration is specified: * \"CHANGED\": indicates that the finding was present and matched the given filter at the start of compare_duration, but changed its state at read_time. * \"UNCHANGED\": indicates that the finding was present and matched the given filter at the start of compare_duration and did not change state at read_time. * \"ADDED\": indicates that the finding did not match the given filter or was not present at the start of compare_duration, but was present at read_time. * \"REMOVED\": indicates that the finding was present and matched the filter at the start of compare_duration, but did not match the filter at read_time. If compare_duration is not specified, then the only possible state_change is \"UNUSED\", which will be the state_change set for all findings present at read_time.", "format": "google-duration", "location": "query", @@ -5700,6 +5750,7 @@ "type": "string" }, "readTime": { + "deprecated": true, "description": "Time used as a reference point when filtering findings. The filter is limited to findings existing at the supplied time and their values are those at that specific time. Absence of this field will default to the API's version of NOW.", "format": "google-datetime", "location": "query", @@ -7239,6 +7290,40 @@ }, "type": "object" }, + "DataRetentionDeletionEvent": { + "description": "Details about data retention deletion violations, in which the data is non-compliant based on their retention or deletion time, as defined in the applicable data security policy. The Data Retention Deletion (DRD) control is a control of the DSPM (Data Security Posture Management) suite that enables organizations to manage data retention and deletion policies in compliance with regulations, such as GDPR and CRPA. DRD supports two primary policy types: maximum storage length (max TTL) and minimum storage length (min TTL). Both are aimed at helping organizations meet regulatory and data management commitments.", + "id": "DataRetentionDeletionEvent", + "properties": { + "dataObjectCount": { + "description": "Number of objects that violated the policy for this resource. If the number is less than 1,000, then the value of this field is the exact number. If the number of objects that violated the policy is greater than or equal to 1,000, then the value of this field is 1000.", + "format": "int64", + "type": "string" + }, + "eventDetectionTime": { + "description": "Timestamp indicating when the event was detected.", + "format": "google-datetime", + "type": "string" + }, + "eventType": { + "description": "Type of the DRD event.", + "enum": [ + "EVENT_TYPE_UNSPECIFIED", + "EVENT_TYPE_MAX_TTL_EXCEEDED" + ], + "enumDescriptions": [ + "Unspecified event type.", + "The maximum retention time has been exceeded." + ], + "type": "string" + }, + "maxRetentionAllowed": { + "description": "Maximum duration of retention allowed from the DRD control. This comes from the DRD control where users set a max TTL for their data. For example, suppose that a user set the max TTL for a Cloud Storage bucket to 90 days. However, an object in that bucket is 100 days old. In this case, a DataRetentionDeletionEvent will be generated for that Cloud Storage bucket, and the max_retention_allowed is 90 days.", + "format": "google-duration", + "type": "string" + } + }, + "type": "object" + }, "Database": { "description": "Represents database access information, such as queries. A database may be a sub-resource of an instance (as in the case of Cloud SQL instances or Cloud Spanner instances), or the database instance itself. Some database resources might not have the [full resource name](https://google.aip.dev/122#full-resource-names) populated because these resource types, such as Cloud SQL databases, are not yet supported by Cloud Asset Inventory. In these cases only the display name is provided.", "id": "Database", @@ -7289,6 +7374,17 @@ }, "type": "object" }, + "Disk": { + "description": "Contains information about the disk associated with the finding.", + "id": "Disk", + "properties": { + "name": { + "description": "The name of the disk, for example, \"https://www.googleapis.com/compute/v1/projects/project-id/zones/zone-id/disks/disk-id\".", + "type": "string" + } + }, + "type": "object" + }, "DiskPath": { "description": "Path of the file in terms of underlying disk/partition identifiers.", "id": "DiskPath", @@ -7324,6 +7420,22 @@ "description": "An EffectiveEventThreatDetectionCustomModule is the representation of an Event Threat Detection custom module at a specified level of the resource hierarchy: organization, folder, or project. If a custom module is inherited from a parent organization or folder, the value of the `enablement_state` property in EffectiveEventThreatDetectionCustomModule is set to the value that is effective in the parent, instead of `INHERITED`. For example, if the module is enabled in a parent organization or folder, the effective `enablement_state` for the module in all child folders or projects is also `enabled`. EffectiveEventThreatDetectionCustomModule is read-only.", "id": "EffectiveEventThreatDetectionCustomModule", "properties": { + "cloudProvider": { + "description": "The cloud provider of the custom module.", + "enum": [ + "CLOUD_PROVIDER_UNSPECIFIED", + "GOOGLE_CLOUD_PLATFORM", + "AMAZON_WEB_SERVICES", + "MICROSOFT_AZURE" + ], + "enumDescriptions": [ + "Unspecified cloud provider.", + "Google Cloud Platform.", + "Amazon Web Services.", + "Microsoft Azure." + ], + "type": "string" + }, "config": { "additionalProperties": { "description": "Properties of the object.", @@ -7401,6 +7513,22 @@ "readOnly": true, "type": "string" }, + "cloudProvider": { + "description": "The cloud provider of the custom module.", + "enum": [ + "CLOUD_PROVIDER_UNSPECIFIED", + "GOOGLE_CLOUD_PLATFORM", + "AMAZON_WEB_SERVICES", + "MICROSOFT_AZURE" + ], + "enumDescriptions": [ + "Unspecified cloud provider.", + "Google Cloud.", + "Amazon Web Services (AWS).", + "Microsoft Azure." + ], + "type": "string" + }, "config": { "additionalProperties": { "description": "Properties of the object.", @@ -7647,6 +7775,13 @@ }, "type": "array" }, + "dataRetentionDeletionEvents": { + "description": "Data retention deletion events associated with the finding.", + "items": { + "$ref": "DataRetentionDeletionEvent" + }, + "type": "array" + }, "database": { "$ref": "Database", "description": "Database associated with the finding." @@ -7655,6 +7790,10 @@ "description": "Contains more details about the finding.", "type": "string" }, + "disk": { + "$ref": "Disk", + "description": "Disk associated with the finding." + }, "eventTime": { "description": "The time the finding was first detected. If an existing finding is updated, then this is the time the update occurred. For example, if the finding represents an open firewall, this property captures the time the detector believes the firewall became open. The accuracy is determined by the detector. If the finding is later resolved, then this time reflects when the finding was resolved. This must not be set to a value greater than the current timestamp.", "format": "google-datetime", @@ -8110,6 +8249,22 @@ "description": "An EffectiveSecurityHealthAnalyticsCustomModule is the representation of a Security Health Analytics custom module at a specified level of the resource hierarchy: organization, folder, or project. If a custom module is inherited from a parent organization or folder, the value of the `enablementState` property in EffectiveSecurityHealthAnalyticsCustomModule is set to the value that is effective in the parent, instead of `INHERITED`. For example, if the module is enabled in a parent organization or folder, the effective enablement_state for the module in all child folders or projects is also `enabled`. EffectiveSecurityHealthAnalyticsCustomModule is read-only.", "id": "GoogleCloudSecuritycenterV1EffectiveSecurityHealthAnalyticsCustomModule", "properties": { + "cloudProvider": { + "description": "The cloud provider of the custom module.", + "enum": [ + "CLOUD_PROVIDER_UNSPECIFIED", + "GOOGLE_CLOUD_PLATFORM", + "AMAZON_WEB_SERVICES", + "MICROSOFT_AZURE" + ], + "enumDescriptions": [ + "Unspecified cloud provider.", + "Google Cloud Platform.", + "Amazon Web Services.", + "Microsoft Azure." + ], + "type": "string" + }, "customConfig": { "$ref": "GoogleCloudSecuritycenterV1CustomConfig", "description": "Output only. The user-specified configuration for the module.", @@ -8468,7 +8623,7 @@ "description": "A mapping of the sensitivity on Sensitive Data Protection finding to resource values. This mapping can only be used in combination with a resource_type that is related to BigQuery, e.g. \"bigquery.googleapis.com/Dataset\"." }, "tagValues": { - "description": "Required. Tag values combined with `AND` to check against. Values in the form \"tagValues/123\" Example: `[ \"tagValues/123\", \"tagValues/456\", \"tagValues/789\" ]` https://cloud.google.com/resource-manager/docs/tags/tags-creating-and-managing", + "description": "Required. Tag values combined with `AND` to check against. For Google Cloud resources, they are tag value IDs in the form of \"tagValues/123\". Example: `[ \"tagValues/123\", \"tagValues/456\", \"tagValues/789\" ]` https://cloud.google.com/resource-manager/docs/tags/tags-creating-and-managing", "items": { "type": "string" }, @@ -8520,6 +8675,22 @@ "readOnly": true, "type": "string" }, + "cloudProvider": { + "description": "The cloud provider of the custom module.", + "enum": [ + "CLOUD_PROVIDER_UNSPECIFIED", + "GOOGLE_CLOUD_PLATFORM", + "AMAZON_WEB_SERVICES", + "MICROSOFT_AZURE" + ], + "enumDescriptions": [ + "Unspecified cloud provider.", + "Google Cloud.", + "Amazon Web Services (AWS).", + "Microsoft Azure." + ], + "type": "string" + }, "customConfig": { "$ref": "GoogleCloudSecuritycenterV1CustomConfig", "description": "The user specified custom configuration for the module." @@ -9830,6 +10001,40 @@ }, "type": "object" }, + "GoogleCloudSecuritycenterV2DataRetentionDeletionEvent": { + "description": "Details about data retention deletion violations, in which the data is non-compliant based on their retention or deletion time, as defined in the applicable data security policy. The Data Retention Deletion (DRD) control is a control of the DSPM (Data Security Posture Management) suite that enables organizations to manage data retention and deletion policies in compliance with regulations, such as GDPR and CRPA. DRD supports two primary policy types: maximum storage length (max TTL) and minimum storage length (min TTL). Both are aimed at helping organizations meet regulatory and data management commitments.", + "id": "GoogleCloudSecuritycenterV2DataRetentionDeletionEvent", + "properties": { + "dataObjectCount": { + "description": "Number of objects that violated the policy for this resource. If the number is less than 1,000, then the value of this field is the exact number. If the number of objects that violated the policy is greater than or equal to 1,000, then the value of this field is 1000.", + "format": "int64", + "type": "string" + }, + "eventDetectionTime": { + "description": "Timestamp indicating when the event was detected.", + "format": "google-datetime", + "type": "string" + }, + "eventType": { + "description": "Type of the DRD event.", + "enum": [ + "EVENT_TYPE_UNSPECIFIED", + "EVENT_TYPE_MAX_TTL_EXCEEDED" + ], + "enumDescriptions": [ + "Unspecified event type.", + "The maximum retention time has been exceeded." + ], + "type": "string" + }, + "maxRetentionAllowed": { + "description": "Maximum duration of retention allowed from the DRD control. This comes from the DRD control where users set a max TTL for their data. For example, suppose that a user set the max TTL for a Cloud Storage bucket to 90 days. However, an object in that bucket is 100 days old. In this case, a DataRetentionDeletionEvent will be generated for that Cloud Storage bucket, and the max_retention_allowed is 90 days.", + "format": "google-duration", + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudSecuritycenterV2Database": { "description": "Represents database access information, such as queries. A database may be a sub-resource of an instance (as in the case of Cloud SQL instances or Cloud Spanner instances), or the database instance itself. Some database resources might not have the [full resource name](https://google.aip.dev/122#full-resource-names) populated because these resource types, such as Cloud SQL databases, are not yet supported by Cloud Asset Inventory. In these cases only the display name is provided.", "id": "GoogleCloudSecuritycenterV2Database", @@ -9880,6 +10085,17 @@ }, "type": "object" }, + "GoogleCloudSecuritycenterV2Disk": { + "description": "Contains information about the disk associated with the finding.", + "id": "GoogleCloudSecuritycenterV2Disk", + "properties": { + "name": { + "description": "The name of the disk, for example, \"https://www.googleapis.com/compute/v1/projects/project-id/zones/zone-id/disks/disk-id\".", + "type": "string" + } + }, + "type": "object" + }, "GoogleCloudSecuritycenterV2DiskPath": { "description": "Path of the file in terms of underlying disk/partition identifiers.", "id": "GoogleCloudSecuritycenterV2DiskPath", @@ -10155,6 +10371,13 @@ }, "type": "array" }, + "dataRetentionDeletionEvents": { + "description": "Data retention deletion events associated with the finding.", + "items": { + "$ref": "GoogleCloudSecuritycenterV2DataRetentionDeletionEvent" + }, + "type": "array" + }, "database": { "$ref": "GoogleCloudSecuritycenterV2Database", "description": "Database associated with the finding." @@ -10163,6 +10386,10 @@ "description": "Contains more details about the finding.", "type": "string" }, + "disk": { + "$ref": "GoogleCloudSecuritycenterV2Disk", + "description": "Disk associated with the finding." + }, "eventTime": { "description": "The time the finding was first detected. If an existing finding is updated, then this is the time the update occurred. For example, if the finding represents an open firewall, this property captures the time the detector believes the firewall became open. The accuracy is determined by the detector. If the finding is later resolved, then this time reflects when the finding was resolved. This must not be set to a value greater than the current timestamp.", "format": "google-datetime", @@ -10508,6 +10735,403 @@ }, "type": "object" }, + "GoogleCloudSecuritycenterV2Issue": { + "description": "Security Command Center Issue.", + "id": "GoogleCloudSecuritycenterV2Issue", + "properties": { + "createTime": { + "description": "Output only. The time the issue was created.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "description": { + "description": "The description of the issue in Markdown format.", + "type": "string" + }, + "detection": { + "description": "The finding category or rule name that generated the issue.", + "type": "string" + }, + "domains": { + "description": "The domains of the issue.", + "items": { + "$ref": "GoogleCloudSecuritycenterV2IssueDomain" + }, + "type": "array" + }, + "exposureScore": { + "description": "The exposure score of the issue.", + "format": "double", + "type": "number" + }, + "issueType": { + "description": "The type of the issue.", + "enum": [ + "ISSUE_TYPE_UNSPECIFIED", + "CHOKEPOINT", + "TOXIC_COMBINATION", + "INSIGHT" + ], + "enumDescriptions": [ + "Unspecified issue type.", + "Chokepoint issue type.", + "Toxic combination issue type.", + "Insight issue type." + ], + "type": "string" + }, + "lastObservationTime": { + "description": "The time the issue was last observed.", + "format": "google-datetime", + "type": "string" + }, + "mute": { + "$ref": "GoogleCloudSecuritycenterV2IssueMute", + "description": "The mute information of the issue." + }, + "name": { + "description": "Identifier. The name of the issue. Format: organizations/{organization}/locations/{location}/issues/{issue}", + "type": "string" + }, + "primaryResource": { + "$ref": "GoogleCloudSecuritycenterV2IssueResource", + "description": "The primary resource associated with the issue." + }, + "relatedFindings": { + "description": "The findings related to the issue.", + "items": { + "$ref": "GoogleCloudSecuritycenterV2IssueFinding" + }, + "type": "array" + }, + "remediations": { + "description": "Approaches to remediate the issue in Markdown format.", + "items": { + "type": "string" + }, + "type": "array" + }, + "secondaryResources": { + "description": "Additional resources associated with the issue.", + "items": { + "$ref": "GoogleCloudSecuritycenterV2IssueResource" + }, + "type": "array" + }, + "securityContexts": { + "description": "The security context of the issue.", + "items": { + "$ref": "GoogleCloudSecuritycenterV2IssueSecurityContext" + }, + "type": "array" + }, + "severity": { + "description": "The severity of the issue.", + "enum": [ + "SEVERITY_UNSPECIFIED", + "CRITICAL", + "HIGH", + "MEDIUM", + "LOW" + ], + "enumDescriptions": [ + "Unspecified severity.", + "Critical severity.", + "High severity.", + "Medium severity.", + "Low severity." + ], + "type": "string" + }, + "state": { + "description": "Output only. The state of the issue.", + "enum": [ + "STATE_UNSPECIFIED", + "ACTIVE", + "INACTIVE" + ], + "enumDescriptions": [ + "Unspecified state.", + "Active state.", + "Inactive state." + ], + "readOnly": true, + "type": "string" + }, + "updateTime": { + "description": "Output only. The time the issue was last updated.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudSecuritycenterV2IssueDomain": { + "description": "The domains of an issue.", + "id": "GoogleCloudSecuritycenterV2IssueDomain", + "properties": { + "domainCategory": { + "description": "The domain category of the issue.", + "enum": [ + "DOMAIN_CATEGORY_UNSPECIFIED", + "AI", + "CODE", + "CONTAINER", + "DATA", + "IDENTITY_AND_ACCESS", + "VULNERABILITY" + ], + "enumDescriptions": [ + "Unspecified domain category.", + "Issues in the AI domain.", + "Issues in the code domain.", + "Issues in the container domain.", + "Issues in the data domain.", + "Issues in the identity and access domain.", + "Issues in the vulnerability domain." + ], + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudSecuritycenterV2IssueFinding": { + "description": "Finding related to an issue.", + "id": "GoogleCloudSecuritycenterV2IssueFinding", + "properties": { + "cve": { + "$ref": "GoogleCloudSecuritycenterV2IssueFindingCve", + "description": "The CVE of the finding." + }, + "name": { + "description": "The name of the finding.", + "type": "string" + }, + "securityBulletin": { + "$ref": "GoogleCloudSecuritycenterV2IssueFindingSecurityBulletin", + "description": "The security bulletin of the finding." + } + }, + "type": "object" + }, + "GoogleCloudSecuritycenterV2IssueFindingCve": { + "description": "The CVE of the finding.", + "id": "GoogleCloudSecuritycenterV2IssueFindingCve", + "properties": { + "name": { + "description": "The CVE name.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudSecuritycenterV2IssueFindingSecurityBulletin": { + "description": "The security bulletin of the finding.", + "id": "GoogleCloudSecuritycenterV2IssueFindingSecurityBulletin", + "properties": { + "name": { + "description": "The security bulletin name.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudSecuritycenterV2IssueMute": { + "description": "The mute information of the issue.", + "id": "GoogleCloudSecuritycenterV2IssueMute", + "properties": { + "muteInitiator": { + "description": "The email address of the user who last changed the mute state of the issue.", + "type": "string" + }, + "muteReason": { + "description": "The user-provided reason for muting the issue.", + "type": "string" + }, + "muteState": { + "description": "Output only. The mute state of the issue.", + "enum": [ + "MUTE_STATE_UNSPECIFIED", + "NOT_MUTED", + "MUTED" + ], + "enumDescriptions": [ + "Unspecified mute state.", + "Not muted.", + "Muted." + ], + "readOnly": true, + "type": "string" + }, + "muteUpdateTime": { + "description": "The time the issue was muted.", + "format": "google-datetime", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudSecuritycenterV2IssueResource": { + "description": "A resource associated with the an issue.", + "id": "GoogleCloudSecuritycenterV2IssueResource", + "properties": { + "awsMetadata": { + "$ref": "GoogleCloudSecuritycenterV2IssueResourceAwsMetadata", + "description": "The AWS metadata of the resource associated with the issue. Only populated for AWS resources." + }, + "azureMetadata": { + "$ref": "GoogleCloudSecuritycenterV2IssueResourceAzureMetadata", + "description": "The Azure metadata of the resource associated with the issue. Only populated for Azure resources." + }, + "cloudProvider": { + "description": "The cloud provider of the resource associated with the issue.", + "enum": [ + "CLOUD_PROVIDER_UNSPECIFIED", + "GOOGLE_CLOUD", + "AMAZON_WEB_SERVICES", + "MICROSOFT_AZURE" + ], + "enumDescriptions": [ + "Unspecified cloud provider.", + "Google Cloud.", + "Amazon Web Services.", + "Microsoft Azure." + ], + "type": "string" + }, + "displayName": { + "description": "The resource-type specific display name of the resource associated with the issue.", + "type": "string" + }, + "googleCloudMetadata": { + "$ref": "GoogleCloudSecuritycenterV2IssueResourceGoogleCloudMetadata", + "description": "The Google Cloud metadata of the resource associated with the issue. Only populated for Google Cloud resources." + }, + "name": { + "description": "The full resource name of the resource associated with the issue.", + "type": "string" + }, + "type": { + "description": "The type of the resource associated with the issue.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudSecuritycenterV2IssueResourceAwsMetadata": { + "description": "The AWS metadata of a resource associated with an issue.", + "id": "GoogleCloudSecuritycenterV2IssueResourceAwsMetadata", + "properties": { + "account": { + "$ref": "GoogleCloudSecuritycenterV2IssueResourceAwsMetadataAwsAccount", + "description": "The AWS account of the resource associated with the issue." + } + }, + "type": "object" + }, + "GoogleCloudSecuritycenterV2IssueResourceAwsMetadataAwsAccount": { + "description": "The AWS account of the resource associated with the issue.", + "id": "GoogleCloudSecuritycenterV2IssueResourceAwsMetadataAwsAccount", + "properties": { + "id": { + "description": "The AWS account ID of the resource associated with the issue.", + "type": "string" + }, + "name": { + "description": "The AWS account name of the resource associated with the issue.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudSecuritycenterV2IssueResourceAzureMetadata": { + "description": "The Azure metadata of a resource associated with an issue.", + "id": "GoogleCloudSecuritycenterV2IssueResourceAzureMetadata", + "properties": { + "subscription": { + "$ref": "GoogleCloudSecuritycenterV2IssueResourceAzureMetadataAzureSubscription", + "description": "The Azure subscription of the resource associated with the issue." + } + }, + "type": "object" + }, + "GoogleCloudSecuritycenterV2IssueResourceAzureMetadataAzureSubscription": { + "description": "The Azure subscription of the resource associated with the issue.", + "id": "GoogleCloudSecuritycenterV2IssueResourceAzureMetadataAzureSubscription", + "properties": { + "displayName": { + "description": "The Azure subscription display name of the resource associated with the issue.", + "type": "string" + }, + "id": { + "description": "The Azure subscription ID of the resource associated with the issue.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudSecuritycenterV2IssueResourceGoogleCloudMetadata": { + "description": "Google Cloud metadata of a resource associated with an issue.", + "id": "GoogleCloudSecuritycenterV2IssueResourceGoogleCloudMetadata", + "properties": { + "projectId": { + "description": "The project ID that the resource associated with the issue belongs to.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudSecuritycenterV2IssueSecurityContext": { + "description": "Security context associated with an issue.", + "id": "GoogleCloudSecuritycenterV2IssueSecurityContext", + "properties": { + "aggregatedCount": { + "$ref": "GoogleCloudSecuritycenterV2IssueSecurityContextAggregatedCount", + "description": "The aggregated count of the security context." + }, + "context": { + "$ref": "GoogleCloudSecuritycenterV2IssueSecurityContextContext", + "description": "The context of the security context." + } + }, + "type": "object" + }, + "GoogleCloudSecuritycenterV2IssueSecurityContextAggregatedCount": { + "description": "Aggregated count of a security context.", + "id": "GoogleCloudSecuritycenterV2IssueSecurityContextAggregatedCount", + "properties": { + "key": { + "description": "Aggregation key.", + "type": "string" + }, + "value": { + "description": "Aggregation value.", + "format": "int32", + "type": "integer" + } + }, + "type": "object" + }, + "GoogleCloudSecuritycenterV2IssueSecurityContextContext": { + "description": "Context of a security context.", + "id": "GoogleCloudSecuritycenterV2IssueSecurityContextContext", + "properties": { + "type": { + "description": "Context type.", + "type": "string" + }, + "values": { + "description": "Context values.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, "GoogleCloudSecuritycenterV2KernelRootkit": { "description": "Kernel mode rootkit signatures.", "id": "GoogleCloudSecuritycenterV2KernelRootkit", @@ -11620,7 +12244,7 @@ "description": "A mapping of the sensitivity on Sensitive Data Protection finding to resource values. This mapping can only be used in combination with a resource_type that is related to BigQuery, e.g. \"bigquery.googleapis.com/Dataset\"." }, "tagValues": { - "description": "Tag values combined with `AND` to check against. Values in the form \"tagValues/123\" Example: `[ \"tagValues/123\", \"tagValues/456\", \"tagValues/789\" ]` https://cloud.google.com/resource-manager/docs/tags/tags-creating-and-managing", + "description": "Tag values combined with `AND` to check against. For Google Cloud resources, they are tag value IDs in the form of \"tagValues/123\". Example: `[ \"tagValues/123\", \"tagValues/456\", \"tagValues/789\" ]` https://cloud.google.com/resource-manager/docs/tags/tags-creating-and-managing", "items": { "type": "string" }, @@ -12036,6 +12660,7 @@ "id": "GroupFindingsRequest", "properties": { "compareDuration": { + "deprecated": true, "description": "When compare_duration is set, the GroupResult's \"state_change\" attribute is updated to indicate whether the finding had its state changed, the finding's state remained unchanged, or if the finding was added during the compare_duration period of time that precedes the read_time. This is the time between (read_time - compare_duration) and read_time. The state_change value is derived based on the presence and state of the finding at the two points in time. Intermediate state changes between the two times don't affect the result. For example, the results aren't affected if the finding is made inactive and then active again. Possible \"state_change\" values when compare_duration is specified: * \"CHANGED\": indicates that the finding was present and matched the given filter at the start of compare_duration, but changed its state at read_time. * \"UNCHANGED\": indicates that the finding was present and matched the given filter at the start of compare_duration and did not change state at read_time. * \"ADDED\": indicates that the finding did not match the given filter or was not present at the start of compare_duration, but was present at read_time. * \"REMOVED\": indicates that the finding was present and matched the filter at the start of compare_duration, but did not match the filter at read_time. If compare_duration is not specified, then the only possible state_change is \"UNUSED\", which will be the state_change set for all findings present at read_time. If this field is set then `state_change` must be a specified field in `group_by`.", "format": "google-duration", "type": "string" @@ -12058,6 +12683,7 @@ "type": "string" }, "readTime": { + "deprecated": true, "description": "Time used as a reference point when filtering findings. The filter is limited to findings existing at the supplied time and their values are those at that specific time. Absence of this field will default to the API's version of NOW.", "format": "google-datetime", "type": "string" @@ -13938,6 +14564,7 @@ "id": "SetFindingStateRequest", "properties": { "startTime": { + "deprecated": true, "description": "Optional. The time at which the updated state takes effect. If unset, defaults to the request time.", "format": "google-datetime", "type": "string" diff --git a/discovery/googleapis/securityposture__v1.json b/discovery/googleapis/securityposture__v1.json new file mode 100644 index 000000000..13293a6c9 --- /dev/null +++ b/discovery/googleapis/securityposture__v1.json @@ -0,0 +1,2134 @@ +{ + "auth": { + "oauth2": { + "scopes": { + "https://www.googleapis.com/auth/cloud-platform": { + "description": "See, edit, configure, and delete your Google Cloud data and see the email address for your Google Account." + } + } + } + }, + "basePath": "", + "baseUrl": "https://securityposture.googleapis.com/", + "batchPath": "batch", + "canonicalName": "Security Posture", + "description": "Defines, assesses, and monitors the overall status of your security in Google Cloud. You can use security postures to evaluate your current cloud security against defined benchmarks and help maintain the level of security that your organization requires. ", + "discoveryVersion": "v1", + "documentationLink": "https://cloud.google.com/security-command-center", + "icons": { + "x16": "http://www.google.com/images/icons/product/search-16.gif", + "x32": "http://www.google.com/images/icons/product/search-32.gif" + }, + "id": "securityposture:v1", + "kind": "discovery#restDescription", + "name": "securityposture", + "ownerDomain": "google.com", + "ownerName": "Google", + "protocol": "rest", + "revision": "20241212", + "rootUrl": "https://securityposture.googleapis.com/", + "servicePath": "", + "title": "Security Posture API", + "version": "v1", + "version_module": true, + "parameters": { + "$.xgafv": { + "description": "V1 error format.", + "enum": [ + "1", + "2" + ], + "enumDescriptions": [ + "v1 error format", + "v2 error format" + ], + "location": "query", + "type": "string" + }, + "access_token": { + "description": "OAuth access token.", + "location": "query", + "type": "string" + }, + "alt": { + "default": "json", + "description": "Data format for response.", + "enum": [ + "json", + "media", + "proto" + ], + "enumDescriptions": [ + "Responses with Content-Type of application/json", + "Media download with context-dependent Content-Type", + "Responses with Content-Type of application/x-protobuf" + ], + "location": "query", + "type": "string" + }, + "callback": { + "description": "JSONP", + "location": "query", + "type": "string" + }, + "fields": { + "description": "Selector specifying which fields to include in a partial response.", + "location": "query", + "type": "string" + }, + "key": { + "description": "API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.", + "location": "query", + "type": "string" + }, + "oauth_token": { + "description": "OAuth 2.0 token for the current user.", + "location": "query", + "type": "string" + }, + "prettyPrint": { + "default": "true", + "description": "Returns response with indentations and line breaks.", + "location": "query", + "type": "boolean" + }, + "quotaUser": { + "description": "Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.", + "location": "query", + "type": "string" + }, + "uploadType": { + "description": "Legacy upload protocol for media (e.g. \"media\", \"multipart\").", + "location": "query", + "type": "string" + }, + "upload_protocol": { + "description": "Upload protocol for media (e.g. \"raw\", \"multipart\").", + "location": "query", + "type": "string" + } + }, + "resources": { + "organizations": { + "resources": { + "locations": { + "resources": { + "operations": { + "methods": { + "cancel": { + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/operations/{operationsId}:cancel", + "httpMethod": "POST", + "id": "securityposture.organizations.locations.operations.cancel", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "The name of the operation resource to be cancelled.", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+/operations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}:cancel", + "request": { + "$ref": "CancelOperationRequest" + }, + "response": { + "$ref": "Empty" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "delete": { + "description": "Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/operations/{operationsId}", + "httpMethod": "DELETE", + "id": "securityposture.organizations.locations.operations.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "The name of the operation resource to be deleted.", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+/operations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "Empty" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/operations/{operationsId}", + "httpMethod": "GET", + "id": "securityposture.organizations.locations.operations.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "The name of the operation resource.", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+/operations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/operations", + "httpMethod": "GET", + "id": "securityposture.organizations.locations.operations.list", + "parameterOrder": [ + "name" + ], + "parameters": { + "filter": { + "description": "The standard list filter.", + "location": "query", + "type": "string" + }, + "name": { + "description": "The name of the operation's parent resource.", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + }, + "pageSize": { + "description": "The standard list page size.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "The standard list page token.", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}/operations", + "response": { + "$ref": "ListOperationsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + } + }, + "postureDeployments": { + "methods": { + "create": { + "description": "Creates a new PostureDeployment in a given project and location.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/postureDeployments", + "httpMethod": "POST", + "id": "securityposture.organizations.locations.postureDeployments.create", + "parameterOrder": [ + "parent" + ], + "parameters": { + "parent": { + "description": "Required. The parent resource name, in the format `organizations/{organization}/locations/global`.", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + }, + "postureDeploymentId": { + "description": "Required. An identifier for the posture deployment.", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+parent}/postureDeployments", + "request": { + "$ref": "PostureDeployment" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "delete": { + "description": "Deletes a PostureDeployment.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/postureDeployments/{postureDeploymentsId}", + "httpMethod": "DELETE", + "id": "securityposture.organizations.locations.postureDeployments.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "etag": { + "description": "Optional. An opaque identifier for the current version of the posture deployment. If you provide this value, then it must match the existing value. If the values don't match, then the request fails with an ABORTED error. If you omit this value, then the posture deployment is deleted regardless of its current `etag` value.", + "location": "query", + "type": "string" + }, + "name": { + "description": "Required. The name of the posture deployment, in the format `organizations/{organization}/locations/global/postureDeployments/{posture_id}`.", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+/postureDeployments/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Gets details for a PostureDeployment.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/postureDeployments/{postureDeploymentsId}", + "httpMethod": "GET", + "id": "securityposture.organizations.locations.postureDeployments.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the PostureDeployment, in the format `organizations/{organization}/locations/global/postureDeployments/{posture_deployment_id}`.", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+/postureDeployments/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "PostureDeployment" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists every PostureDeployment in a project and location.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/postureDeployments", + "httpMethod": "GET", + "id": "securityposture.organizations.locations.postureDeployments.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "filter": { + "description": "Optional. A filter to apply to the list of postures, in the format defined in [AIP-160: Filtering](https://google.aip.dev/160).", + "location": "query", + "type": "string" + }, + "pageSize": { + "description": "Optional. The maximum number of posture deployments to return. The default value is `500`. If you exceed the maximum value of `1000`, then the service uses the maximum value.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. A pagination token returned from a previous request to list posture deployments. Provide this token to retrieve the next page of results.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The parent resource name, in the format `organizations/{organization}/locations/global`.", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/postureDeployments", + "response": { + "$ref": "ListPostureDeploymentsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "patch": { + "description": "Updates an existing PostureDeployment. To prevent concurrent updates from overwriting each other, always follow the read-modify-write pattern when you update a posture deployment: 1. Call GetPostureDeployment to get the current version of the deployment. 2. Update the fields in the deployment as needed. 3. Call UpdatePostureDeployment to update the deployment. Ensure that your request includes the `etag` value from the GetPostureDeployment response. **Important:** If you omit the `etag` when you call UpdatePostureDeployment, then the updated deployment unconditionally overwrites the existing deployment.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/postureDeployments/{postureDeploymentsId}", + "httpMethod": "PATCH", + "id": "securityposture.organizations.locations.postureDeployments.patch", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. Identifier. The name of the posture deployment, in the format `organizations/{organization}/locations/global/postureDeployments/{deployment_id}`.", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+/postureDeployments/[^/]+$", + "required": true, + "type": "string" + }, + "updateMask": { + "description": "Required. The fields in the PostureDeployment to update. You can update only the following fields: * PostureDeployment.posture_id * PostureDeployment.posture_revision_id", + "format": "google-fieldmask", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}", + "request": { + "$ref": "PostureDeployment" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + } + }, + "postureTemplates": { + "methods": { + "get": { + "description": "Gets a single revision of a PostureTemplate.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/postureTemplates/{postureTemplatesId}", + "httpMethod": "GET", + "id": "securityposture.organizations.locations.postureTemplates.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the PostureTemplate, in the format `organizations/{organization}/locations/global/postureTemplates/{posture_template}`.", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+/postureTemplates/[^/]+$", + "required": true, + "type": "string" + }, + "revisionId": { + "description": "Optional. The posture template revision to retrieve. If not specified, the most recently updated revision is retrieved.", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "PostureTemplate" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists every PostureTemplate in a given organization and location.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/postureTemplates", + "httpMethod": "GET", + "id": "securityposture.organizations.locations.postureTemplates.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "filter": { + "description": "Optional. A filter to apply to the list of postures, in the format defined in [AIP-160: Filtering](https://google.aip.dev/160).", + "location": "query", + "type": "string" + }, + "pageSize": { + "description": "Optional. The maximum number of posture templates to return. The default value is `500`. If you exceed the maximum value of `1000`, then the service uses the maximum value.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. A pagination token returned from a previous request to list posture templates. Provide this token to retrieve the next page of results.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The parent resource name, in the format `organizations/{organization}/locations/global`.", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/postureTemplates", + "response": { + "$ref": "ListPostureTemplatesResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + } + }, + "postures": { + "methods": { + "create": { + "description": "Creates a new Posture.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/postures", + "httpMethod": "POST", + "id": "securityposture.organizations.locations.postures.create", + "parameterOrder": [ + "parent" + ], + "parameters": { + "parent": { + "description": "Required. The parent resource name, in the format `organizations/{organization}/locations/global`.", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + }, + "postureId": { + "description": "Required. An identifier for the posture.", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+parent}/postures", + "request": { + "$ref": "Posture" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "delete": { + "description": "Deletes all revisions of a Posture. You can only delete a posture if none of its revisions are deployed.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/postures/{posturesId}", + "httpMethod": "DELETE", + "id": "securityposture.organizations.locations.postures.delete", + "parameterOrder": [ + "name" + ], + "parameters": { + "etag": { + "description": "Optional. An opaque identifier for the current version of the posture. If you provide this value, then it must match the existing value. If the values don't match, then the request fails with an ABORTED error. If you omit this value, then the posture is deleted regardless of its current `etag` value.", + "location": "query", + "type": "string" + }, + "name": { + "description": "Required. The name of the Posture, in the format `organizations/{organization}/locations/global/postures/{posture_id}`.", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+/postures/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "extract": { + "description": "Extracts existing policies from an organization, folder, or project, and applies them to another organization, folder, or project as a Posture. If the other organization, folder, or project already has a posture, then the result of the long-running operation is an ALREADY_EXISTS error.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/postures:extract", + "httpMethod": "POST", + "id": "securityposture.organizations.locations.postures.extract", + "parameterOrder": [ + "parent" + ], + "parameters": { + "parent": { + "description": "Required. The parent resource name, in the format `organizations/{organization}/locations/global`.", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/postures:extract", + "request": { + "$ref": "ExtractPostureRequest" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Gets a single revision of a Posture.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/postures/{posturesId}", + "httpMethod": "GET", + "id": "securityposture.organizations.locations.postures.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the Posture, in the format `organizations/{organization}/locations/global/postures/{posture_id}`.", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+/postures/[^/]+$", + "required": true, + "type": "string" + }, + "revisionId": { + "description": "Optional. The posture revision to retrieve. If not specified, the most recently updated revision is retrieved.", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "Posture" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists the most recent revisions of all Posture resources in a specified organization and location.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/postures", + "httpMethod": "GET", + "id": "securityposture.organizations.locations.postures.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "filter": { + "description": "Optional. A filter to apply to the list of postures, in the format defined in [AIP-160: Filtering](https://google.aip.dev/160).", + "location": "query", + "type": "string" + }, + "pageSize": { + "description": "The maximum number of postures to return. The default value is `500`. If you exceed the maximum value of `1000`, then the service uses the maximum value.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "A pagination token returned from a previous request to list postures. Provide this token to retrieve the next page of results.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The parent resource name, in the format `organizations/{organization}/locations/global`.", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/postures", + "response": { + "$ref": "ListPosturesResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "listRevisions": { + "description": "Lists all revisions of a single Posture.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/postures/{posturesId}:listRevisions", + "httpMethod": "GET", + "id": "securityposture.organizations.locations.postures.listRevisions", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the Posture, in the format `organizations/{organization}/locations/global/postures/{posture_id}`.", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+/postures/[^/]+$", + "required": true, + "type": "string" + }, + "pageSize": { + "description": "Optional. The maximum number of posture revisions to return. The default value is `500`. If you exceed the maximum value of `1000`, then the service uses the maximum value.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. A pagination token from a previous request to list posture revisions. Provide this token to retrieve the next page of results.", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}:listRevisions", + "response": { + "$ref": "ListPostureRevisionsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "patch": { + "description": "Updates a revision of an existing Posture. If the posture revision that you update is currently deployed, then a new revision of the posture is created. To prevent concurrent updates from overwriting each other, always follow the read-modify-write pattern when you update a posture: 1. Call GetPosture to get the current version of the posture. 2. Update the fields in the posture as needed. 3. Call UpdatePosture to update the posture. Ensure that your request includes the `etag` value from the GetPosture response. **Important:** If you omit the `etag` when you call UpdatePosture, then the updated posture unconditionally overwrites the existing posture.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/postures/{posturesId}", + "httpMethod": "PATCH", + "id": "securityposture.organizations.locations.postures.patch", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. Identifier. The name of the posture, in the format `organizations/{organization}/locations/global/postures/{posture_id}`.", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+/postures/[^/]+$", + "required": true, + "type": "string" + }, + "revisionId": { + "description": "Required. The revision ID of the posture to update. If the posture revision that you update is currently deployed, then a new revision of the posture is created.", + "location": "query", + "type": "string" + }, + "updateMask": { + "description": "Required. The fields in the Posture to update. You can update only the following fields: * Posture.description * Posture.policy_sets * Posture.state", + "format": "google-fieldmask", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}", + "request": { + "$ref": "Posture" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + } + }, + "reports": { + "methods": { + "createIaCValidationReport": { + "description": "Validates a specified infrastructure-as-code (IaC) configuration, and creates a Report with the validation results. Only Terraform configurations are supported. Only modified assets are validated.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/reports:createIaCValidationReport", + "httpMethod": "POST", + "id": "securityposture.organizations.locations.reports.createIaCValidationReport", + "parameterOrder": [ + "parent" + ], + "parameters": { + "parent": { + "description": "Required. The parent resource name, in the format `organizations/{organization}/locations/global`.", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/reports:createIaCValidationReport", + "request": { + "$ref": "CreateIaCValidationReportRequest" + }, + "response": { + "$ref": "Operation" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "get": { + "description": "Gets details for a Report.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/reports/{reportsId}", + "httpMethod": "GET", + "id": "securityposture.organizations.locations.reports.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the report, in the format `organizations/{organization}/locations/global/reports/{report_id}`.", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+/reports/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "Report" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists every Report in a given organization and location.", + "flatPath": "v1/organizations/{organizationsId}/locations/{locationsId}/reports", + "httpMethod": "GET", + "id": "securityposture.organizations.locations.reports.list", + "parameterOrder": [ + "parent" + ], + "parameters": { + "filter": { + "description": "Optional. A filter to apply to the list of reports, in the format defined in [AIP-160: Filtering](https://google.aip.dev/160).", + "location": "query", + "type": "string" + }, + "pageSize": { + "description": "Optional. The maximum number of reports to return. The default value is `500`. If you exceed the maximum value of `1000`, then the service uses the maximum value.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "Optional. A pagination token returned from a previous request to list reports. Provide this token to retrieve the next page of results.", + "location": "query", + "type": "string" + }, + "parent": { + "description": "Required. The parent resource name, in the format `organizations/{organization}/locations/global`.", + "location": "path", + "pattern": "^organizations/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+parent}/reports", + "response": { + "$ref": "ListReportsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + } + } + } + } + } + }, + "projects": { + "resources": { + "locations": { + "methods": { + "get": { + "description": "Gets information about a location.", + "flatPath": "v1/projects/{projectsId}/locations/{locationsId}", + "httpMethod": "GET", + "id": "securityposture.projects.locations.get", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Resource name for the location.", + "location": "path", + "pattern": "^projects/[^/]+/locations/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1/{+name}", + "response": { + "$ref": "Location" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + }, + "list": { + "description": "Lists information about the supported locations for this service.", + "flatPath": "v1/projects/{projectsId}/locations", + "httpMethod": "GET", + "id": "securityposture.projects.locations.list", + "parameterOrder": [ + "name" + ], + "parameters": { + "filter": { + "description": "A filter to narrow down results to a preferred subset. The filtering language accepts strings like `\"displayName=tokyo\"`, and is documented in more detail in [AIP-160](https://google.aip.dev/160).", + "location": "query", + "type": "string" + }, + "name": { + "description": "The resource that owns the locations collection, if applicable.", + "location": "path", + "pattern": "^projects/[^/]+$", + "required": true, + "type": "string" + }, + "pageSize": { + "description": "The maximum number of results to return. If not set, the service selects a default.", + "format": "int32", + "location": "query", + "type": "integer" + }, + "pageToken": { + "description": "A page token received from the `next_page_token` field in the response. Send that page token to receive the subsequent page.", + "location": "query", + "type": "string" + } + }, + "path": "v1/{+name}/locations", + "response": { + "$ref": "ListLocationsResponse" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform" + ] + } + } + } + } + } + }, + "schemas": { + "AssetDetails": { + "description": "Details of a Cloud Asset Inventory asset that caused a violation.", + "id": "AssetDetails", + "properties": { + "asset": { + "description": "Information about the Cloud Asset Inventory asset that violated a policy. The format of this information can change at any time without prior notice. Your application must not depend on this information in any way.", + "type": "string" + }, + "assetType": { + "description": "The type of Cloud Asset Inventory asset. For a list of asset types, see [Supported asset types](https://cloud.google.com/asset-inventory/docs/supported-asset-types).", + "type": "string" + } + }, + "type": "object" + }, + "CancelOperationRequest": { + "description": "The request message for Operations.CancelOperation.", + "id": "CancelOperationRequest", + "properties": {}, + "type": "object" + }, + "ComplianceStandard": { + "description": "Information about a compliance standard that the policy helps enforce.", + "id": "ComplianceStandard", + "properties": { + "control": { + "description": "Optional. The control in the compliance standard that the policy helps enforce. For example, `AC-3`.", + "type": "string" + }, + "standard": { + "description": "Optional. The compliance standard that the policy helps enforce. For example, `NIST SP 800-53`.", + "type": "string" + } + }, + "type": "object" + }, + "Constraint": { + "description": "Metadata for a constraint in a Policy.", + "id": "Constraint", + "properties": { + "orgPolicyConstraint": { + "$ref": "OrgPolicyConstraint", + "description": "Optional. A predefined organization policy constraint." + }, + "orgPolicyConstraintCustom": { + "$ref": "OrgPolicyConstraintCustom", + "description": "Optional. A custom organization policy constraint." + }, + "securityHealthAnalyticsCustomModule": { + "$ref": "SecurityHealthAnalyticsCustomModule", + "description": "Optional. A custom module for Security Health Analytics." + }, + "securityHealthAnalyticsModule": { + "$ref": "SecurityHealthAnalyticsModule", + "description": "Optional. A built-in detector for Security Health Analytics." + } + }, + "type": "object" + }, + "CreateIaCValidationReportRequest": { + "description": "Request message for CreateIaCValidationReport.", + "id": "CreateIaCValidationReportRequest", + "properties": { + "iac": { + "$ref": "IaC", + "description": "Required. The infrastructure-as-code (IaC) configuration to validate." + } + }, + "type": "object" + }, + "CustomConfig": { + "description": "A custom module configuration for Security Health Analytics. Use `CustomConfig` to create custom detectors that generate custom findings for resources that you specify.", + "id": "CustomConfig", + "properties": { + "customOutput": { + "$ref": "CustomOutputSpec", + "description": "Optional. Definitions of custom source properties to include in findings." + }, + "description": { + "description": "Optional. A description of the vulnerability or misconfiguration that the custom module detects. The description appears in each finding. Provide enough information to help an investigator understand the finding. The value must be enclosed in quotation marks.", + "type": "string" + }, + "predicate": { + "$ref": "Expr", + "description": "Required. The Common Expression Language (CEL) expression to evaluate. When the expression evaluates to `true` for a resource, a finding is generated." + }, + "recommendation": { + "description": "Required. An explanation of the steps that security teams can take to resolve the detected issue. The explanation appears in each finding.", + "type": "string" + }, + "resourceSelector": { + "$ref": "ResourceSelector", + "description": "Required. The resource types that the custom module operates on." + }, + "severity": { + "description": "Required. The severity of findings generated by the custom module.", + "enum": [ + "SEVERITY_UNSPECIFIED", + "CRITICAL", + "HIGH", + "MEDIUM", + "LOW" + ], + "enumDescriptions": [ + "Default value. This value is unused.", + "Critical severity.", + "High severity.", + "Medium severity.", + "Low severity." + ], + "type": "string" + } + }, + "type": "object" + }, + "CustomOutputSpec": { + "description": "Definitions of custom source properties that can appear in findings.", + "id": "CustomOutputSpec", + "properties": { + "properties": { + "description": "Optional. The custom source properties that can appear in findings.", + "items": { + "$ref": "Property" + }, + "type": "array" + } + }, + "type": "object" + }, + "Empty": { + "description": "A generic empty message that you can re-use to avoid defining duplicated empty messages in your APIs. A typical example is to use it as the request or the response type of an API method. For instance: service Foo { rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); }", + "id": "Empty", + "properties": {}, + "type": "object" + }, + "Expr": { + "description": "Represents a textual expression in the Common Expression Language (CEL) syntax. CEL is a C-like expression language. The syntax and semantics of CEL are documented at https://github.com/google/cel-spec. Example (Comparison): title: \"Summary size limit\" description: \"Determines if a summary is less than 100 chars\" expression: \"document.summary.size() < 100\" Example (Equality): title: \"Requestor is owner\" description: \"Determines if requestor is the document owner\" expression: \"document.owner == request.auth.claims.email\" Example (Logic): title: \"Public documents\" description: \"Determine whether the document should be publicly visible\" expression: \"document.type != 'private' && document.type != 'internal'\" Example (Data Manipulation): title: \"Notification string\" description: \"Create a notification string with a timestamp.\" expression: \"'New message received at ' + string(document.create_time)\" The exact variables and functions that may be referenced within an expression are determined by the service that evaluates it. See the service documentation for additional information.", + "id": "Expr", + "properties": { + "description": { + "description": "Optional. Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.", + "type": "string" + }, + "expression": { + "description": "Textual representation of an expression in Common Expression Language syntax.", + "type": "string" + }, + "location": { + "description": "Optional. String indicating the location of the expression for error reporting, e.g. a file name and a position in the file.", + "type": "string" + }, + "title": { + "description": "Optional. Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression.", + "type": "string" + } + }, + "type": "object" + }, + "ExtractPostureRequest": { + "description": "Request message for ExtractPosture.", + "id": "ExtractPostureRequest", + "properties": { + "postureId": { + "description": "Required. An identifier for the posture.", + "type": "string" + }, + "workload": { + "description": "Required. The organization, folder, or project from which policies are extracted. Must be within the organization defined in parent. Use one of the following formats: * `organization/{organization_number}` * `folder/{folder_number}` * `project/{project_number}`", + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudSecuritypostureV1CustomConstraint": { + "description": "A custom, user-defined constraint. You can apply the constraint only to the resource types specified in the constraint, and only within the organization where the constraint is defined. _When you create a custom constraint, it is not enforced automatically._ You must use an organization policy to [enforce the constraint](https://cloud.google.com/resource-manager/help/organization-policy/constraints/enforce).", + "id": "GoogleCloudSecuritypostureV1CustomConstraint", + "properties": { + "actionType": { + "description": "Whether to allow or deny the action.", + "enum": [ + "ACTION_TYPE_UNSPECIFIED", + "ALLOW", + "DENY" + ], + "enumDescriptions": [ + "Default value. This value is unused.", + "Allow the action.", + "Deny the action." + ], + "type": "string" + }, + "condition": { + "description": "A Common Expression Language (CEL) condition expression that must evaluate to `true` for the constraint to be enforced. The maximum length is 1000 characters. For example: + `resource.instanceName.matches('(production|test)_(.+_)?[\\d]+')`: Evaluates to `true` if the resource's `instanceName` attribute contains the following: + The prefix `production` or `test` + An underscore (`_`) + Optional: One or more characters, followed by an underscore (`_`) + One or more digits + `resource.management.auto_upgrade == true`: Evaluates to `true` if the resource's `management.auto_upgrade` attribute is `true`.", + "type": "string" + }, + "description": { + "description": "A description of the constraint. The maximum length is 2000 characters.", + "type": "string" + }, + "displayName": { + "description": "A display name for the constraint. The maximum length is 200 characters.", + "type": "string" + }, + "methodTypes": { + "description": "The types of operations that the constraint applies to.", + "items": { + "enum": [ + "METHOD_TYPE_UNSPECIFIED", + "CREATE", + "UPDATE", + "DELETE" + ], + "enumDescriptions": [ + "Default value. This value is unused.", + "Constraint applied when creating the resource.", + "Constraint applied when updating the resource.", + "Not supported. Constraint applied when deleting the resource." + ], + "type": "string" + }, + "type": "array" + }, + "name": { + "description": "Immutable. The name of the constraint, in the format `organizations/{organization_id}/customConstraints/custom.{custom_constraint_id}`. For example, `organizations/123456789012/customConstraints/custom.createOnlyE2TypeVms`. Must contain 1 to 62 characters, excluding the prefix `organizations/{organization_id}/customConstraints/custom.`.", + "type": "string" + }, + "resourceTypes": { + "description": "Immutable. The resource type that the constraint applies to, in the format `{canonical_service_name}/{resource_type_name}`. For example, `compute.googleapis.com/Instance`.", + "items": { + "type": "string" + }, + "type": "array" + }, + "updateTime": { + "description": "Output only. The last time at which the constraint was updated or created.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleCloudSecuritypostureV1PolicyRule": { + "description": "A rule that defines the allowed and denied values for an organization policy constraint.", + "id": "GoogleCloudSecuritypostureV1PolicyRule", + "properties": { + "allowAll": { + "description": "Whether to allow any value for a list constraint. Valid only for list constraints.", + "type": "boolean" + }, + "condition": { + "$ref": "Expr", + "description": "A condition that determines whether this rule is used to evaluate the policy. When set, the google.type.Expr.expression field must contain 1 to 10 subexpressions, joined by the `||` or `&&` operators. Each subexpression must use the `resource.matchTag()` or `resource.matchTagId()` Common Expression Language (CEL) function. The `resource.matchTag()` function takes the following arguments: * `key_name`: the namespaced name of the tag key, with the organization ID and a slash (`/`) as a prefix; for example, `123456789012/environment` * `value_name`: the short name of the tag value For example: `resource.matchTag('123456789012/environment, 'prod')` The `resource.matchTagId()` function takes the following arguments: * `key_id`: the permanent ID of the tag key; for example, `tagKeys/123456789012` * `value_id`: the permanent ID of the tag value; for example, `tagValues/567890123456` For example: `resource.matchTagId('tagKeys/123456789012', 'tagValues/567890123456')`" + }, + "denyAll": { + "description": "Whether to deny all values for a list constraint. Valid only for list constraints.", + "type": "boolean" + }, + "enforce": { + "description": "Whether to enforce the constraint. Valid only for boolean constraints.", + "type": "boolean" + }, + "parameters": { + "additionalProperties": { + "description": "Properties of the object.", + "type": "any" + }, + "description": "Optional. Required for GMCs if parameters defined in constraints. Pass parameter values when policy enforcement is enabled. Ensure that parameter value types match those defined in the constraint definition. For example: { \"allowedLocations\" : [\"us-east1\", \"us-west1\"], \"allowAll\" : true }", + "type": "object" + }, + "resourceTypes": { + "$ref": "ResourceTypes", + "description": "Optional. The resource types policy can support, only used for Google managed constraint and method type is GOVERN_TAGS." + }, + "values": { + "$ref": "GoogleCloudSecuritypostureV1PolicyRuleStringValues", + "description": "The allowed and denied values for a list constraint. Valid only for list constraints." + } + }, + "type": "object" + }, + "GoogleCloudSecuritypostureV1PolicyRuleStringValues": { + "description": "The allowed and denied values for a list constraint. For all constraints, these fields can contain literal values. Optionally, you can add the `is:` prefix to these values. If the value contains a colon (`:`), then the `is:` prefix is required. Some constraints allow you to specify a portion of the resource hierarchy, known as a [_hierarchy subtree_](https://cloud.google.com/resource-manager/help/organization-policy/hierarchy-subtree), that the constraint applies to. To specify a hierarchy subtree, use the `under:` prefix, followed by a value with one of these formats: - `projects/{project_id}` (for example, `projects/tokyo-rain-123`) - `folders/{folder_id}` (for example, `folders/1234567890123`) - `organizations/{organization_id}` (for example, `organizations/123456789012`) A constraint's `supports_under` field indicates whether you can specify a hierarchy subtree. To learn which predefined constraints let you specify a hierarchy subtree, see the [constraints reference](https://cloud.google.com/resource-manager/help/organization-policy/constraints/reference).", + "id": "GoogleCloudSecuritypostureV1PolicyRuleStringValues", + "properties": { + "allowedValues": { + "description": "The allowed values for the constraint.", + "items": { + "type": "string" + }, + "type": "array" + }, + "deniedValues": { + "description": "The denied values for the constraint.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "IaC": { + "description": "Details of an infrastructure-as-code (IaC) configuration.", + "id": "IaC", + "properties": { + "tfPlan": { + "description": "Optional. A Terraform plan file, formatted as a stringified JSON object. To learn how to generate a Terraform plan file in JSON format, see [JSON output format](https://developer.hashicorp.com/terraform/internals/json-format) in the Terraform documentation.", + "format": "byte", + "type": "string" + } + }, + "type": "object" + }, + "IaCValidationReport": { + "description": "Details of an infrastructure-as-code (IaC) validation report.", + "id": "IaCValidationReport", + "properties": { + "note": { + "description": "Additional information about the report.", + "type": "string" + }, + "violations": { + "description": "A list of every Violation found in the IaC configuration.", + "items": { + "$ref": "Violation" + }, + "type": "array" + } + }, + "type": "object" + }, + "ListLocationsResponse": { + "description": "The response message for Locations.ListLocations.", + "id": "ListLocationsResponse", + "properties": { + "locations": { + "description": "A list of locations that matches the specified filter in the request.", + "items": { + "$ref": "Location" + }, + "type": "array" + }, + "nextPageToken": { + "description": "The standard List next-page token.", + "type": "string" + } + }, + "type": "object" + }, + "ListOperationsResponse": { + "description": "The response message for Operations.ListOperations.", + "id": "ListOperationsResponse", + "properties": { + "nextPageToken": { + "description": "The standard List next-page token.", + "type": "string" + }, + "operations": { + "description": "A list of operations that matches the specified filter in the request.", + "items": { + "$ref": "Operation" + }, + "type": "array" + } + }, + "type": "object" + }, + "ListPostureDeploymentsResponse": { + "description": "Response message for ListPostureDeployments.", + "id": "ListPostureDeploymentsResponse", + "properties": { + "nextPageToken": { + "description": "A pagination token. To retrieve the next page of results, call the method again with this token.", + "type": "string" + }, + "postureDeployments": { + "description": "The list of PostureDeployment resources.", + "items": { + "$ref": "PostureDeployment" + }, + "type": "array" + }, + "unreachable": { + "description": "Locations that were temporarily unavailable and could not be reached.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "ListPostureRevisionsResponse": { + "description": "Response message for ListPostureRevisions.", + "id": "ListPostureRevisionsResponse", + "properties": { + "nextPageToken": { + "description": "A pagination token. To retrieve the next page of results, call the method again with this token.", + "type": "string" + }, + "revisions": { + "description": "The list of revisions for the Posture.", + "items": { + "$ref": "Posture" + }, + "type": "array" + } + }, + "type": "object" + }, + "ListPostureTemplatesResponse": { + "description": "Response message for ListPostureTemplates.", + "id": "ListPostureTemplatesResponse", + "properties": { + "nextPageToken": { + "description": "A pagination token. To retrieve the next page of results, call the method again with this token.", + "type": "string" + }, + "postureTemplates": { + "description": "The list of PostureTemplate resources.", + "items": { + "$ref": "PostureTemplate" + }, + "type": "array" + } + }, + "type": "object" + }, + "ListPosturesResponse": { + "description": "Response message for ListPostures.", + "id": "ListPosturesResponse", + "properties": { + "nextPageToken": { + "description": "A pagination token. To retrieve the next page of results, call the method again with this token.", + "type": "string" + }, + "postures": { + "description": "The list of Posture resources.", + "items": { + "$ref": "Posture" + }, + "type": "array" + }, + "unreachable": { + "description": "Locations that were temporarily unavailable and could not be reached.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "ListReportsResponse": { + "description": "Response message for ListReports.", + "id": "ListReportsResponse", + "properties": { + "nextPageToken": { + "description": "A pagination token. To retrieve the next page of results, call the method again with this token.", + "type": "string" + }, + "reports": { + "description": "The list of Report resources.", + "items": { + "$ref": "Report" + }, + "type": "array" + }, + "unreachable": { + "description": "Locations that were temporarily unavailable and could not be reached.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "Location": { + "description": "A resource that represents a Google Cloud location.", + "id": "Location", + "properties": { + "displayName": { + "description": "The friendly name for this location, typically a nearby city name. For example, \"Tokyo\".", + "type": "string" + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "description": "Cross-service attributes for the location. For example {\"cloud.googleapis.com/region\": \"us-east1\"}", + "type": "object" + }, + "locationId": { + "description": "The canonical id for this location. For example: `\"us-east1\"`.", + "type": "string" + }, + "metadata": { + "additionalProperties": { + "description": "Properties of the object. Contains field @type with type URL.", + "type": "any" + }, + "description": "Service-specific metadata. For example the available capacity at the given location.", + "type": "object" + }, + "name": { + "description": "Resource name for the location, which may vary between implementations. For example: `\"projects/example-project/locations/us-east1\"`", + "type": "string" + } + }, + "type": "object" + }, + "Operation": { + "description": "This resource represents a long-running operation that is the result of a network API call.", + "id": "Operation", + "properties": { + "done": { + "description": "If the value is `false`, it means the operation is still in progress. If `true`, the operation is completed, and either `error` or `response` is available.", + "type": "boolean" + }, + "error": { + "$ref": "Status", + "description": "The error result of the operation in case of failure or cancellation." + }, + "metadata": { + "additionalProperties": { + "description": "Properties of the object. Contains field @type with type URL.", + "type": "any" + }, + "description": "Service-specific metadata associated with the operation. It typically contains progress information and common metadata such as create time. Some services might not provide such metadata. Any method that returns a long-running operation should document the metadata type, if any.", + "type": "object" + }, + "name": { + "description": "The server-assigned name, which is only unique within the same service that originally returns it. If you use the default HTTP mapping, the `name` should be a resource name ending with `operations/{unique_id}`.", + "type": "string" + }, + "response": { + "additionalProperties": { + "description": "Properties of the object. Contains field @type with type URL.", + "type": "any" + }, + "description": "The normal, successful response of the operation. If the original method returns no data on success, such as `Delete`, the response is `google.protobuf.Empty`. If the original method is standard `Get`/`Create`/`Update`, the response should be the resource. For other methods, the response should have the type `XxxResponse`, where `Xxx` is the original method name. For example, if the original method name is `TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.", + "type": "object" + } + }, + "type": "object" + }, + "OperationMetadata": { + "description": "Metadata for an Operation.", + "id": "OperationMetadata", + "properties": { + "apiVersion": { + "description": "Output only. The API version used to start the operation.", + "readOnly": true, + "type": "string" + }, + "createTime": { + "description": "Output only. The time at which the operation was created.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "endTime": { + "description": "Output only. The time at which the operation finished running.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "errorMessage": { + "description": "Output only. An error message. Returned when a PostureDeployment enters a failure state like UPDATE_FAILED.", + "readOnly": true, + "type": "string" + }, + "requestedCancellation": { + "description": "Output only. Whether a request to cancel the operation has been received. For operations that have been cancelled successfully, the Operation.error field contains the error code CANCELLED.", + "readOnly": true, + "type": "boolean" + }, + "statusMessage": { + "description": "Output only. The status of the operation, if any.", + "readOnly": true, + "type": "string" + }, + "target": { + "description": "Output only. The server-defined resource path for the target of the operation.", + "readOnly": true, + "type": "string" + }, + "verb": { + "description": "Output only. The name of the action executed by the operation.", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "OrgPolicyConstraint": { + "description": "A predefined organization policy constraint.", + "id": "OrgPolicyConstraint", + "properties": { + "cannedConstraintId": { + "description": "Required. A unique identifier for the constraint.", + "type": "string" + }, + "policyRules": { + "description": "Required. The rules enforced by the constraint.", + "items": { + "$ref": "GoogleCloudSecuritypostureV1PolicyRule" + }, + "type": "array" + } + }, + "type": "object" + }, + "OrgPolicyConstraintCustom": { + "description": "A custom organization policy constraint.", + "id": "OrgPolicyConstraintCustom", + "properties": { + "customConstraint": { + "$ref": "GoogleCloudSecuritypostureV1CustomConstraint", + "description": "Required. Metadata for the constraint." + }, + "policyRules": { + "description": "Required. The rules enforced by the constraint.", + "items": { + "$ref": "GoogleCloudSecuritypostureV1PolicyRule" + }, + "type": "array" + } + }, + "type": "object" + }, + "Policy": { + "description": "The details of a policy, including the constraints that it includes.", + "id": "Policy", + "properties": { + "complianceStandards": { + "description": "Optional. The compliance standards that the policy helps enforce.", + "items": { + "$ref": "ComplianceStandard" + }, + "type": "array" + }, + "constraint": { + "$ref": "Constraint", + "description": "Required. The constraints that the policy includes." + }, + "description": { + "description": "Optional. A description of the policy.", + "type": "string" + }, + "policyId": { + "description": "Required. A user-specified identifier for the policy. In a PolicySet, each policy must have a unique identifier.", + "type": "string" + } + }, + "type": "object" + }, + "PolicyDetails": { + "description": "Details of a policy that was violated.", + "id": "PolicyDetails", + "properties": { + "complianceStandards": { + "description": "The compliance standards that the policy maps to. For example, `CIS-2.0 1.15`.", + "items": { + "type": "string" + }, + "type": "array" + }, + "constraint": { + "description": "Information about the constraint that was violated. The format of this information can change at any time without prior notice. Your application must not depend on this information in any way.", + "type": "string" + }, + "constraintType": { + "description": "The type of constraint that was violated.", + "enum": [ + "CONSTRAINT_TYPE_UNSPECIFIED", + "SECURITY_HEALTH_ANALYTICS_CUSTOM_MODULE", + "ORG_POLICY_CUSTOM", + "SECURITY_HEALTH_ANALYTICS_MODULE", + "ORG_POLICY", + "REGO_POLICY" + ], + "enumDescriptions": [ + "Default value. This value is unused.", + "A custom module for Security Health Analytics.", + "A custom organization policy constraint.", + "A built-in detector for Security Health Analytics.", + "A predefined organization policy constraint.", + "A custom rego policy constraint." + ], + "type": "string" + }, + "description": { + "description": "A description of the policy.", + "type": "string" + } + }, + "type": "object" + }, + "PolicySet": { + "description": "A group of one or more Policy resources.", + "id": "PolicySet", + "properties": { + "description": { + "description": "Optional. A description of the policy set.", + "type": "string" + }, + "policies": { + "description": "Required. The Policy resources in the policy set. Each policy must have a policy_id that's unique within the policy set.", + "items": { + "$ref": "Policy" + }, + "type": "array" + }, + "policySetId": { + "description": "Required. An identifier for the policy set.", + "type": "string" + } + }, + "type": "object" + }, + "Posture": { + "description": "The details of a posture.", + "id": "Posture", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. The user-specified annotations for the posture. For details about the values you can use in an annotation, see [AIP-148: Standard fields](https://google.aip.dev/148#annotations).", + "type": "object" + }, + "categories": { + "description": "Output only. The categories that the posture belongs to, as determined by the Security Posture API.", + "items": { + "enum": [ + "CATEGORY_UNSPECIFIED", + "AI", + "AWS", + "GCP", + "AZURE" + ], + "enumDescriptions": [ + "Default value. This value is unused.", + "Artificial intelligence (AI).", + "Amazon Web Services (AWS) policies.", + "Google Cloud policies.", + "Microsoft Azure policies." + ], + "type": "string" + }, + "readOnly": true, + "type": "array" + }, + "createTime": { + "description": "Output only. The time at which the posture was created.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "description": { + "description": "Optional. A description of the posture.", + "type": "string" + }, + "etag": { + "description": "Optional. An opaque identifier for the current version of the posture at the specified `revision_id`. To prevent concurrent updates from overwriting each other, always provide the `etag` when you update a posture. You can also provide the `etag` when you delete a posture, to help ensure that you're deleting the intended version of the posture.", + "type": "string" + }, + "name": { + "description": "Required. Identifier. The name of the posture, in the format `organizations/{organization}/locations/global/postures/{posture_id}`.", + "type": "string" + }, + "policySets": { + "description": "Required. The PolicySet resources that the posture includes.", + "items": { + "$ref": "PolicySet" + }, + "type": "array" + }, + "reconciling": { + "description": "Output only. Whether the posture is in the process of being updated.", + "readOnly": true, + "type": "boolean" + }, + "revisionId": { + "description": "Output only. Immutable. An opaque eight-character string that identifies the revision of the posture. A posture can have multiple revisions; when you deploy a posture, you deploy a specific revision of the posture.", + "readOnly": true, + "type": "string" + }, + "state": { + "description": "Required. The state of the posture at the specified `revision_id`.", + "enum": [ + "STATE_UNSPECIFIED", + "DEPRECATED", + "DRAFT", + "ACTIVE" + ], + "enumDescriptions": [ + "Default value. This value is unused.", + "The posture is deprecated and can no longer be deployed.", + "The posture is a draft and is not ready to deploy.", + "The posture is complete and ready to deploy." + ], + "type": "string" + }, + "updateTime": { + "description": "Output only. The time at which the posture was last updated.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "PostureDeployment": { + "description": "Details for a Posture deployment on an organization, folder, or project. You can deploy at most one posture to each organization, folder, or project. The parent resource for a posture deployment is always the organization, even if the deployment applies to a folder or project.", + "id": "PostureDeployment", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. The user-specified annotations for the posture deployment. For details about the values you can use in an annotation, see [AIP-148: Standard fields](https://google.aip.dev/148#annotations).", + "type": "object" + }, + "categories": { + "description": "Output only. The categories that the posture deployment belongs to, as determined by the Security Posture API.", + "items": { + "enum": [ + "CATEGORY_UNSPECIFIED", + "AI", + "AWS", + "GCP", + "AZURE" + ], + "enumDescriptions": [ + "Default value. This value is unused.", + "Artificial intelligence (AI).", + "Amazon Web Services (AWS) policies.", + "Google Cloud policies.", + "Microsoft Azure policies." + ], + "type": "string" + }, + "readOnly": true, + "type": "array" + }, + "createTime": { + "description": "Output only. The time at which the posture deployment was created.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "description": { + "description": "Optional. A description of the posture deployment.", + "type": "string" + }, + "desiredPostureId": { + "description": "Output only. The posture ID that was specified for the deployment. Present only if the posture deployment is in a failed state.", + "readOnly": true, + "type": "string" + }, + "desiredPostureRevisionId": { + "description": "Output only. The revision ID of the posture that was specified for the deployment. Present only if the deployment is in a failed state.", + "readOnly": true, + "type": "string" + }, + "etag": { + "description": "Optional. An opaque identifier for the current version of the posture deployment. To prevent concurrent updates from overwriting each other, always provide the `etag` when you update a posture deployment. You can also provide the `etag` when you delete a posture deployment, to help ensure that you're deleting the intended posture deployment.", + "type": "string" + }, + "failureMessage": { + "description": "Output only. A description of why the posture deployment failed. Present only if the deployment is in a failed state.", + "readOnly": true, + "type": "string" + }, + "name": { + "description": "Required. Identifier. The name of the posture deployment, in the format `organizations/{organization}/locations/global/postureDeployments/{deployment_id}`.", + "type": "string" + }, + "postureId": { + "description": "Required. The posture used in the deployment, in the format `organizations/{organization}/locations/global/postures/{posture_id}`.", + "type": "string" + }, + "postureRevisionId": { + "description": "Required. The revision ID of the posture used in the deployment.", + "type": "string" + }, + "reconciling": { + "description": "Output only. Whether the posture deployment is in the process of being updated.", + "readOnly": true, + "type": "boolean" + }, + "state": { + "description": "Output only. The state of the posture deployment.", + "enum": [ + "STATE_UNSPECIFIED", + "CREATING", + "DELETING", + "UPDATING", + "ACTIVE", + "CREATE_FAILED", + "UPDATE_FAILED", + "DELETE_FAILED" + ], + "enumDescriptions": [ + "Default value. This value is unused.", + "The posture deployment is being created.", + "The posture deployment is being deleted.", + "The posture deployment is being updated.", + "The posture deployment is active and in use.", + "The posture deployment could not be created.", + "The posture deployment could not be updated.", + "The posture deployment could not be deleted." + ], + "readOnly": true, + "type": "string" + }, + "targetResource": { + "description": "Required. The organization, folder, or project where the posture is deployed. Uses one of the following formats: * `organizations/{organization_number}` * `folders/{folder_number}` * `projects/{project_number}`", + "type": "string" + }, + "updateTime": { + "description": "Output only. The time at which the posture deployment was last updated.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "PostureDetails": { + "description": "Details of a posture deployment.", + "id": "PostureDetails", + "properties": { + "policySet": { + "description": "The identifier for the PolicySet that the relevant policy belongs to.", + "type": "string" + }, + "posture": { + "description": "The posture used in the deployment, in the format `organizations/{organization}/locations/global/postures/{posture_id}`.", + "type": "string" + }, + "postureDeployment": { + "description": "The name of the posture deployment, in the format `organizations/{organization}/locations/global/postureDeployments/{deployment_id}`.", + "type": "string" + }, + "postureDeploymentTargetResource": { + "description": "The organization, folder, or project where the posture is deployed. Uses one of the following formats: * `organizations/{organization_number}` * `folders/{folder_number}` * `projects/{project_number}`", + "type": "string" + }, + "postureRevisionId": { + "description": "The revision ID of the posture used in the deployment.", + "type": "string" + } + }, + "type": "object" + }, + "PostureTemplate": { + "description": "The details of a posture template.", + "id": "PostureTemplate", + "properties": { + "categories": { + "description": "Output only. The categories that the posture template belongs to, as determined by the Security Posture API.", + "items": { + "enum": [ + "CATEGORY_UNSPECIFIED", + "AI", + "AWS", + "GCP", + "AZURE" + ], + "enumDescriptions": [ + "Default value. This value is unused.", + "Artificial intelligence (AI).", + "Amazon Web Services (AWS) policies.", + "Google Cloud policies.", + "Microsoft Azure policies." + ], + "type": "string" + }, + "readOnly": true, + "type": "array" + }, + "description": { + "description": "Output only. A description of the posture template.", + "readOnly": true, + "type": "string" + }, + "name": { + "description": "Output only. Identifier. The name of the posture template, in the format `organizations/{organization}/locations/global/postureTemplates/{posture_template}`.", + "readOnly": true, + "type": "string" + }, + "policySets": { + "description": "Output only. The PolicySet resources that the posture template includes.", + "items": { + "$ref": "PolicySet" + }, + "readOnly": true, + "type": "array" + }, + "revisionId": { + "description": "Output only. A string that identifies the revision of the posture template.", + "readOnly": true, + "type": "string" + }, + "state": { + "description": "Output only. The state of the posture template at the specified `revision_id`.", + "enum": [ + "STATE_UNSPECIFIED", + "ACTIVE", + "DEPRECATED" + ], + "enumDescriptions": [ + "Default value. This value is unused.", + "The posture template follows the latest controls and standards.", + "The posture template uses outdated controls and standards. We recommend that you use a newer revision of the posture template." + ], + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "Property": { + "description": "A name-value pair used as a custom source property.", + "id": "Property", + "properties": { + "name": { + "description": "Required. The name of the custom source property.", + "type": "string" + }, + "valueExpression": { + "$ref": "Expr", + "description": "Optional. The CEL expression for the value of the custom source property. For resource properties, you can return the value of the property or a string enclosed in quotation marks." + } + }, + "type": "object" + }, + "Report": { + "description": "Details of a report.", + "id": "Report", + "properties": { + "createTime": { + "description": "Output only. The time at which the report was created.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "iacValidationReport": { + "$ref": "IaCValidationReport", + "description": "Output only. An infrastructure-as-code (IaC) validation report.", + "readOnly": true + }, + "name": { + "description": "Required. The name of the report, in the format `organizations/{organization}/locations/global/reports/{report_id}`.", + "type": "string" + }, + "updateTime": { + "description": "Output only. The time at which the report was last updated.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "ResourceSelector": { + "description": "A selector for the resource types to run the detector on.", + "id": "ResourceSelector", + "properties": { + "resourceTypes": { + "description": "Required. The resource types to run the detector on. Each custom module can specify up to 5 resource types.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "ResourceTypes": { + "description": "Set multiple resource types for one policy, eg: resourceTypes: included: - compute.googleapis.com/Instance - compute.googleapis.com/Disk Constraint definition contains an empty resource type in order to support multiple resource types in the policy. Only support Google managed constriaint and method type is GOVERN_TAGS Refer go/multi-resource-support-force-tags-gmc to get more details.", + "id": "ResourceTypes", + "properties": { + "included": { + "description": "Optional. The resource type we currently support. cloud/orgpolicy/customconstraintconfig/prod/resource_types.prototext", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "SecurityHealthAnalyticsCustomModule": { + "description": "A custom module for Security Health Analytics.", + "id": "SecurityHealthAnalyticsCustomModule", + "properties": { + "config": { + "$ref": "CustomConfig", + "description": "Required. Configuration settings for the custom module." + }, + "displayName": { + "description": "Optional. The display name of the custom module. This value is used as the finding category for all the asset violation findings that the custom module returns. The display name must contain between 1 and 128 alphanumeric characters or underscores, and it must start with a lowercase letter.", + "type": "string" + }, + "id": { + "description": "Output only. Immutable. The unique identifier for the custom module. Contains 1 to 20 digits.", + "readOnly": true, + "type": "string" + }, + "moduleEnablementState": { + "description": "Whether the custom module is enabled at a specified level of the resource hierarchy.", + "enum": [ + "ENABLEMENT_STATE_UNSPECIFIED", + "ENABLED", + "DISABLED" + ], + "enumDescriptions": [ + "Default value. This value is unused.", + "The detector or custom module is enabled.", + "The detector or custom module is disabled." + ], + "type": "string" + } + }, + "type": "object" + }, + "SecurityHealthAnalyticsModule": { + "description": "A built-in detector for Security Health Analytics.", + "id": "SecurityHealthAnalyticsModule", + "properties": { + "moduleEnablementState": { + "description": "Whether the detector is enabled at a specified level of the resource hierarchy.", + "enum": [ + "ENABLEMENT_STATE_UNSPECIFIED", + "ENABLED", + "DISABLED" + ], + "enumDescriptions": [ + "Default value. This value is unused.", + "The detector or custom module is enabled.", + "The detector or custom module is disabled." + ], + "type": "string" + }, + "moduleName": { + "description": "Required. The name of the detector. For example, `BIGQUERY_TABLE_CMEK_DISABLED`. This field is also used as the finding category for all the asset violation findings that the detector returns.", + "type": "string" + } + }, + "type": "object" + }, + "Status": { + "description": "The `Status` type defines a logical error model that is suitable for different programming environments, including REST APIs and RPC APIs. It is used by [gRPC](https://github.com/grpc). Each `Status` message contains three pieces of data: error code, error message, and error details. You can find out more about this error model and how to work with it in the [API Design Guide](https://cloud.google.com/apis/design/errors).", + "id": "Status", + "properties": { + "code": { + "description": "The status code, which should be an enum value of google.rpc.Code.", + "format": "int32", + "type": "integer" + }, + "details": { + "description": "A list of messages that carry the error details. There is a common set of message types for APIs to use.", + "items": { + "additionalProperties": { + "description": "Properties of the object. Contains field @type with type URL.", + "type": "any" + }, + "type": "object" + }, + "type": "array" + }, + "message": { + "description": "A developer-facing error message, which should be in English. Any user-facing error message should be localized and sent in the google.rpc.Status.details field, or localized by the client.", + "type": "string" + } + }, + "type": "object" + }, + "Violation": { + "description": "Details of a violation.", + "id": "Violation", + "properties": { + "assetId": { + "description": "The full resource name of the asset that caused the violation. For details about the format of the full resource name for each asset type, see [Resource name format](https://cloud.google.com/asset-inventory/docs/resource-name-format).", + "type": "string" + }, + "nextSteps": { + "description": "A description of the steps that you can take to fix the violation.", + "type": "string" + }, + "policyId": { + "description": "The policy that was violated.", + "type": "string" + }, + "severity": { + "description": "The severity of the violation.", + "enum": [ + "SEVERITY_UNSPECIFIED", + "CRITICAL", + "HIGH", + "MEDIUM", + "LOW" + ], + "enumDescriptions": [ + "Default value. This value is unused.", + "Critical severity.", + "High severity.", + "Medium severity.", + "Low severity." + ], + "type": "string" + }, + "violatedAsset": { + "$ref": "AssetDetails", + "description": "Details of the Cloud Asset Inventory asset that caused the violation." + }, + "violatedPolicy": { + "$ref": "PolicyDetails", + "description": "Details of the policy that was violated." + }, + "violatedPosture": { + "$ref": "PostureDetails", + "description": "Details for the posture that was violated. This field is present only if the violated policy belongs to a deployed posture." + } + }, + "type": "object" + } + } +} diff --git a/discovery/googleapis/serviceconsumermanagement__v1.json b/discovery/googleapis/serviceconsumermanagement__v1.json index 1ce830778..d7ed308be 100644 --- a/discovery/googleapis/serviceconsumermanagement__v1.json +++ b/discovery/googleapis/serviceconsumermanagement__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240929", + "revision": "20241115", "rootUrl": "https://serviceconsumermanagement.googleapis.com/", "servicePath": "", "title": "Service Consumer Management API", @@ -112,7 +112,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "serviceconsumermanagement.operations.cancel", @@ -1350,6 +1350,10 @@ "description": "Experimental features to be included during client library generation. These fields will be deprecated once the feature graduates and is enabled by default.", "id": "ExperimentalFeatures", "properties": { + "protobufPythonicTypesEnabled": { + "description": "Enables generation of protobuf code using new types that are more Pythonic which are included in `protobuf>=5.29.x`. This feature will be enabled by default 1 month after launching the feature in preview packages.", + "type": "boolean" + }, "restAsyncIoEnabled": { "description": "Enables generation of asynchronous REST clients if `rest` transport is enabled. By default, asynchronous REST clients will not be generated. This feature will be enabled by default 1 month after launching the feature in preview packages.", "type": "boolean" @@ -1489,6 +1493,13 @@ "common": { "$ref": "CommonLanguageSettings", "description": "Some settings." + }, + "renamedServices": { + "additionalProperties": { + "type": "string" + }, + "description": "Map of service names to renamed services. Keys are the package relative service names and values are the name to be used for the service client and call options. publishing: go_settings: renamed_services: Publisher: TopicAdmin", + "type": "object" } }, "type": "object" @@ -2429,7 +2440,7 @@ "type": "string" }, "unit": { - "description": "Specify the unit of the quota limit. It uses the same syntax as Metric.unit. The supported unit kinds are determined by the quota backend system. Here are some examples: * \"1/min/{project}\" for quota per minute per project. Note: the order of unit components is insignificant. The \"1\" at the beginning is required to follow the metric unit syntax.", + "description": "Specify the unit of the quota limit. It uses the same syntax as MetricDescriptor.unit. The supported unit kinds are determined by the quota backend system. Here are some examples: * \"1/min/{project}\" for quota per minute per project. Note: the order of unit components is insignificant. The \"1\" at the beginning is required to follow the metric unit syntax.", "type": "string" }, "values": { diff --git a/discovery/googleapis/servicemanagement__v1.json b/discovery/googleapis/servicemanagement__v1.json index fe449698f..26213dc54 100644 --- a/discovery/googleapis/servicemanagement__v1.json +++ b/discovery/googleapis/servicemanagement__v1.json @@ -34,7 +34,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240927", + "revision": "20241202", "rootUrl": "https://servicemanagement.googleapis.com/", "servicePath": "", "title": "Service Management API", @@ -1569,6 +1569,10 @@ "description": "`Documentation` provides the information for describing a service. Example: documentation: summary: > The Google Calendar API gives access to most calendar features. pages: - name: Overview content: (== include google/foo/overview.md ==) - name: Tutorial content: (== include google/foo/tutorial.md ==) subpages: - name: Java content: (== include google/foo/tutorial_java.md ==) rules: - selector: google.calendar.Calendar.Get description: > ... - selector: google.calendar.Calendar.Put description: > ... Documentation is provided in markdown syntax. In addition to standard markdown features, definition lists, tables and fenced code blocks are supported. Section headers can be provided and are interpreted relative to the section nesting of the context where a documentation fragment is embedded. Documentation from the IDL is merged with documentation defined via the config at normalization time, where documentation provided by config rules overrides IDL provided. A number of constructs specific to the API platform are supported in documentation text. In order to reference a proto element, the following notation can be used: [fully.qualified.proto.name][] To override the display text used for the link, this can be used: [display text][fully.qualified.proto.name] Text can be excluded from doc using the following notation: (-- internal comment --) A few directives are available in documentation. Note that directives must appear on a single line to be properly identified. The `include` directive includes a markdown file from an external source: (== include path/to/file ==) The `resource_for` directive marks a message to be the resource of a collection in REST view. If it is not specified, tools attempt to infer the resource from the operations in a collection: (== resource_for v1.shelves.books ==) The directive `suppress_warning` does not directly affect documentation and is documented together with service config validation.", "id": "Documentation", "properties": { + "additionalIamInfo": { + "description": "Optional information about the IAM configuration. This is typically used to link to documentation about a product's IAM roles and permissions.", + "type": "string" + }, "documentationRootUrl": { "description": "The URL to the root of documentation.", "type": "string" @@ -1784,6 +1788,10 @@ "description": "Experimental features to be included during client library generation. These fields will be deprecated once the feature graduates and is enabled by default.", "id": "ExperimentalFeatures", "properties": { + "protobufPythonicTypesEnabled": { + "description": "Enables generation of protobuf code using new types that are more Pythonic which are included in `protobuf>=5.29.x`. This feature will be enabled by default 1 month after launching the feature in preview packages.", + "type": "boolean" + }, "restAsyncIoEnabled": { "description": "Enables generation of asynchronous REST clients if `rest` transport is enabled. By default, asynchronous REST clients will not be generated. This feature will be enabled by default 1 month after launching the feature in preview packages.", "type": "boolean" @@ -2036,6 +2044,13 @@ "common": { "$ref": "CommonLanguageSettings", "description": "Some settings." + }, + "renamedServices": { + "additionalProperties": { + "type": "string" + }, + "description": "Map of service names to renamed services. Keys are the package relative service names and values are the name to be used for the service client and call options. publishing: go_settings: renamed_services: Publisher: TopicAdmin", + "type": "object" } }, "type": "object" @@ -3086,7 +3101,7 @@ "type": "string" }, "unit": { - "description": "Specify the unit of the quota limit. It uses the same syntax as Metric.unit. The supported unit kinds are determined by the quota backend system. Here are some examples: * \"1/min/{project}\" for quota per minute per project. Note: the order of unit components is insignificant. The \"1\" at the beginning is required to follow the metric unit syntax.", + "description": "Specify the unit of the quota limit. It uses the same syntax as MetricDescriptor.unit. The supported unit kinds are determined by the quota backend system. Here are some examples: * \"1/min/{project}\" for quota per minute per project. Note: the order of unit components is insignificant. The \"1\" at the beginning is required to follow the metric unit syntax.", "type": "string" }, "values": { diff --git a/discovery/googleapis/servicenetworking__v1.json b/discovery/googleapis/servicenetworking__v1.json index 642efaa15..b6bd70da5 100644 --- a/discovery/googleapis/servicenetworking__v1.json +++ b/discovery/googleapis/servicenetworking__v1.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240925", + "revision": "20241212", "rootUrl": "https://servicenetworking.googleapis.com/", "servicePath": "", "title": "Service Networking API", @@ -115,7 +115,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "servicenetworking.operations.cancel", @@ -1970,6 +1970,10 @@ "description": "`Documentation` provides the information for describing a service. Example: documentation: summary: > The Google Calendar API gives access to most calendar features. pages: - name: Overview content: (== include google/foo/overview.md ==) - name: Tutorial content: (== include google/foo/tutorial.md ==) subpages: - name: Java content: (== include google/foo/tutorial_java.md ==) rules: - selector: google.calendar.Calendar.Get description: > ... - selector: google.calendar.Calendar.Put description: > ... Documentation is provided in markdown syntax. In addition to standard markdown features, definition lists, tables and fenced code blocks are supported. Section headers can be provided and are interpreted relative to the section nesting of the context where a documentation fragment is embedded. Documentation from the IDL is merged with documentation defined via the config at normalization time, where documentation provided by config rules overrides IDL provided. A number of constructs specific to the API platform are supported in documentation text. In order to reference a proto element, the following notation can be used: [fully.qualified.proto.name][] To override the display text used for the link, this can be used: [display text][fully.qualified.proto.name] Text can be excluded from doc using the following notation: (-- internal comment --) A few directives are available in documentation. Note that directives must appear on a single line to be properly identified. The `include` directive includes a markdown file from an external source: (== include path/to/file ==) The `resource_for` directive marks a message to be the resource of a collection in REST view. If it is not specified, tools attempt to infer the resource from the operations in a collection: (== resource_for v1.shelves.books ==) The directive `suppress_warning` does not directly affect documentation and is documented together with service config validation.", "id": "Documentation", "properties": { + "additionalIamInfo": { + "description": "Optional information about the IAM configuration. This is typically used to link to documentation about a product's IAM roles and permissions.", + "type": "string" + }, "documentationRootUrl": { "description": "The URL to the root of documentation.", "type": "string" @@ -2196,6 +2200,10 @@ "description": "Experimental features to be included during client library generation. These fields will be deprecated once the feature graduates and is enabled by default.", "id": "ExperimentalFeatures", "properties": { + "protobufPythonicTypesEnabled": { + "description": "Enables generation of protobuf code using new types that are more Pythonic which are included in `protobuf>=5.29.x`. This feature will be enabled by default 1 month after launching the feature in preview packages.", + "type": "boolean" + }, "restAsyncIoEnabled": { "description": "Enables generation of asynchronous REST clients if `rest` transport is enabled. By default, asynchronous REST clients will not be generated. This feature will be enabled by default 1 month after launching the feature in preview packages.", "type": "boolean" @@ -2350,6 +2358,13 @@ "common": { "$ref": "CommonLanguageSettings", "description": "Some settings." + }, + "renamedServices": { + "additionalProperties": { + "type": "string" + }, + "description": "Map of service names to renamed services. Keys are the package relative service names and values are the name to be used for the service client and call options. publishing: go_settings: renamed_services: Publisher: TopicAdmin", + "type": "object" } }, "type": "object" @@ -3421,7 +3436,7 @@ "type": "string" }, "unit": { - "description": "Specify the unit of the quota limit. It uses the same syntax as Metric.unit. The supported unit kinds are determined by the quota backend system. Here are some examples: * \"1/min/{project}\" for quota per minute per project. Note: the order of unit components is insignificant. The \"1\" at the beginning is required to follow the metric unit syntax.", + "description": "Specify the unit of the quota limit. It uses the same syntax as MetricDescriptor.unit. The supported unit kinds are determined by the quota backend system. Here are some examples: * \"1/min/{project}\" for quota per minute per project. Note: the order of unit components is insignificant. The \"1\" at the beginning is required to follow the metric unit syntax.", "type": "string" }, "values": { diff --git a/discovery/googleapis/serviceusage__v1.json b/discovery/googleapis/serviceusage__v1.json index 741192708..e08d9e768 100644 --- a/discovery/googleapis/serviceusage__v1.json +++ b/discovery/googleapis/serviceusage__v1.json @@ -31,7 +31,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240929", + "revision": "20241205", "rootUrl": "https://serviceusage.googleapis.com/", "servicePath": "", "title": "Service Usage API", @@ -118,7 +118,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "serviceusage.operations.cancel", @@ -1233,6 +1233,10 @@ "description": "`Documentation` provides the information for describing a service. Example: documentation: summary: > The Google Calendar API gives access to most calendar features. pages: - name: Overview content: (== include google/foo/overview.md ==) - name: Tutorial content: (== include google/foo/tutorial.md ==) subpages: - name: Java content: (== include google/foo/tutorial_java.md ==) rules: - selector: google.calendar.Calendar.Get description: > ... - selector: google.calendar.Calendar.Put description: > ... Documentation is provided in markdown syntax. In addition to standard markdown features, definition lists, tables and fenced code blocks are supported. Section headers can be provided and are interpreted relative to the section nesting of the context where a documentation fragment is embedded. Documentation from the IDL is merged with documentation defined via the config at normalization time, where documentation provided by config rules overrides IDL provided. A number of constructs specific to the API platform are supported in documentation text. In order to reference a proto element, the following notation can be used: [fully.qualified.proto.name][] To override the display text used for the link, this can be used: [display text][fully.qualified.proto.name] Text can be excluded from doc using the following notation: (-- internal comment --) A few directives are available in documentation. Note that directives must appear on a single line to be properly identified. The `include` directive includes a markdown file from an external source: (== include path/to/file ==) The `resource_for` directive marks a message to be the resource of a collection in REST view. If it is not specified, tools attempt to infer the resource from the operations in a collection: (== resource_for v1.shelves.books ==) The directive `suppress_warning` does not directly affect documentation and is documented together with service config validation.", "id": "Documentation", "properties": { + "additionalIamInfo": { + "description": "Optional information about the IAM configuration. This is typically used to link to documentation about a product's IAM roles and permissions.", + "type": "string" + }, "documentationRootUrl": { "description": "The URL to the root of documentation.", "type": "string" @@ -1526,6 +1530,10 @@ "description": "Experimental features to be included during client library generation. These fields will be deprecated once the feature graduates and is enabled by default.", "id": "ExperimentalFeatures", "properties": { + "protobufPythonicTypesEnabled": { + "description": "Enables generation of protobuf code using new types that are more Pythonic which are included in `protobuf>=5.29.x`. This feature will be enabled by default 1 month after launching the feature in preview packages.", + "type": "boolean" + }, "restAsyncIoEnabled": { "description": "Enables generation of asynchronous REST clients if `rest` transport is enabled. By default, asynchronous REST clients will not be generated. This feature will be enabled by default 1 month after launching the feature in preview packages.", "type": "boolean" @@ -1694,6 +1702,13 @@ "common": { "$ref": "CommonLanguageSettings", "description": "Some settings." + }, + "renamedServices": { + "additionalProperties": { + "type": "string" + }, + "description": "Map of service names to renamed services. Keys are the package relative service names and values are the name to be used for the service client and call options. publishing: go_settings: renamed_services: Publisher: TopicAdmin", + "type": "object" } }, "type": "object" @@ -2046,6 +2061,171 @@ "properties": {}, "type": "object" }, + "GoogleApiServiceusageV2betaAnalysis": { + "description": "A message to group the analysis information.", + "id": "GoogleApiServiceusageV2betaAnalysis", + "properties": { + "analysis": { + "$ref": "GoogleApiServiceusageV2betaAnalysisResult", + "description": "Output only. Analysis result of updating a policy.", + "readOnly": true + }, + "analysisType": { + "description": "Output only. The type of analysis.", + "enum": [ + "ANALYSIS_TYPE_UNSPECIFIED", + "ANALYSIS_TYPE_DEPENDENCY", + "ANALYSIS_TYPE_RESOURCE_USAGE" + ], + "enumDescriptions": [ + "Unspecified analysis type. Do not use.", + "The analysis of service dependencies.", + "The analysis of service resource usage." + ], + "readOnly": true, + "type": "string" + }, + "displayName": { + "description": "Output only. The user friendly display name of the analysis type. E.g. service dependency analysis, service resource usage analysis, etc.", + "readOnly": true, + "type": "string" + }, + "service": { + "description": "The names of the service that has analysis result of warnings or blockers. Example: `services/storage.googleapis.com`.", + "type": "string" + } + }, + "type": "object" + }, + "GoogleApiServiceusageV2betaAnalysisResult": { + "description": "An analysis result including blockers and warnings.", + "id": "GoogleApiServiceusageV2betaAnalysisResult", + "properties": { + "blockers": { + "description": "Blocking information that would prevent the policy changes at runtime.", + "items": { + "$ref": "GoogleApiServiceusageV2betaImpact" + }, + "type": "array" + }, + "warnings": { + "description": "Warning information indicating that the policy changes might be unsafe, but will not block the changes at runtime.", + "items": { + "$ref": "GoogleApiServiceusageV2betaImpact" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleApiServiceusageV2betaAnalyzeConsumerPolicyMetadata": { + "description": "Metadata for the `AnalyzeConsumerPolicy` method.", + "id": "GoogleApiServiceusageV2betaAnalyzeConsumerPolicyMetadata", + "properties": {}, + "type": "object" + }, + "GoogleApiServiceusageV2betaAnalyzeConsumerPolicyResponse": { + "description": "The response of analyzing a consumer policy update.", + "id": "GoogleApiServiceusageV2betaAnalyzeConsumerPolicyResponse", + "properties": { + "analysis": { + "description": "The list of analyses returned from performing the intended policy update analysis. The analysis is grouped by service name and different analysis types. The empty analysis list means that the consumer policy can be updated without any warnings or blockers.", + "items": { + "$ref": "GoogleApiServiceusageV2betaAnalysis" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleApiServiceusageV2betaConsumerPolicy": { + "description": "Consumer Policy is a set of rules that define what services or service groups can be used for a cloud resource hierarchy.", + "id": "GoogleApiServiceusageV2betaConsumerPolicy", + "properties": { + "annotations": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. Annotations is an unstructured key-value map stored with a policy that may be set by external tools to store and retrieve arbitrary metadata. They are not queryable and should be preserved when modifying objects. [AIP-128](https://google.aip.dev/128#annotations)", + "type": "object" + }, + "createTime": { + "description": "Output only. The time the policy was created. For singleton policies, this is the first touch of the policy.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + }, + "enableRules": { + "description": "Enable rules define usable services, groups, and categories. There can currently be at most one `EnableRule`. This restriction will be lifted in later releases.", + "items": { + "$ref": "GoogleApiServiceusageV2betaEnableRule" + }, + "type": "array" + }, + "etag": { + "description": "Output only. An opaque tag indicating the current version of the policy, used for concurrency control.", + "readOnly": true, + "type": "string" + }, + "name": { + "description": "Output only. The resource name of the policy. Only the `default` policy is supported: `projects/12345/consumerPolicies/default`, `folders/12345/consumerPolicies/default`, `organizations/12345/consumerPolicies/default`.", + "readOnly": true, + "type": "string" + }, + "updateTime": { + "description": "Output only. The time the policy was last updated.", + "format": "google-datetime", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleApiServiceusageV2betaEnableRule": { + "description": "The consumer policy rule that defines enabled services, groups, and categories.", + "id": "GoogleApiServiceusageV2betaEnableRule", + "properties": { + "services": { + "description": "The names of the services that are enabled. Example: `services/storage.googleapis.com`.", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "GoogleApiServiceusageV2betaImpact": { + "description": "A message to group impacts of updating a policy.", + "id": "GoogleApiServiceusageV2betaImpact", + "properties": { + "detail": { + "description": "Output only. User friendly impact detail in a free form message.", + "readOnly": true, + "type": "string" + }, + "impactType": { + "description": "Output only. The type of impact.", + "enum": [ + "IMPACT_TYPE_UNSPECIFIED", + "DEPENDENCY_MISSING_DEPENDENCIES" + ], + "enumDescriptions": [ + "Reserved Blocks (Block n contains codes from 100n to 100(n+1) -1 Block 0 - Special/Admin codes Block 1 - Impact Type of ANALYSIS_TYPE_DEPENDENCY Block 2 - Impact Type of ANALYSIS_TYPE_RESOURCE_USAGE ...", + "Block 1 - Impact Type of ANALYSIS_TYPE_DEPENDENCY" + ], + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "GoogleApiServiceusageV2betaUpdateConsumerPolicyMetadata": { + "description": "Metadata for the `UpdateConsumerPolicy` method.", + "id": "GoogleApiServiceusageV2betaUpdateConsumerPolicyMetadata", + "properties": {}, + "type": "object" + }, "Http": { "description": "Defines the HTTP configuration for an API service. It contains a list of HttpRule, each specifying the mapping of an RPC method to one or more HTTP REST API methods.", "id": "Http", @@ -3063,7 +3243,7 @@ "type": "string" }, "unit": { - "description": "Specify the unit of the quota limit. It uses the same syntax as Metric.unit. The supported unit kinds are determined by the quota backend system. Here are some examples: * \"1/min/{project}\" for quota per minute per project. Note: the order of unit components is insignificant. The \"1\" at the beginning is required to follow the metric unit syntax.", + "description": "Specify the unit of the quota limit. It uses the same syntax as MetricDescriptor.unit. The supported unit kinds are determined by the quota backend system. Here are some examples: * \"1/min/{project}\" for quota per minute per project. Note: the order of unit components is insignificant. The \"1\" at the beginning is required to follow the metric unit syntax.", "type": "string" }, "values": { diff --git a/discovery/googleapis/sheets__v4.json b/discovery/googleapis/sheets__v4.json index a34e433fc..8f47144b4 100644 --- a/discovery/googleapis/sheets__v4.json +++ b/discovery/googleapis/sheets__v4.json @@ -37,7 +37,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241008", + "revision": "20241203", "rootUrl": "https://sheets.googleapis.com/", "servicePath": "", "title": "Google Sheets API", @@ -6540,6 +6540,10 @@ "description": "Sets a data validation rule to every cell in the range. To clear validation in a range, call this with no rule specified.", "id": "SetDataValidationRequest", "properties": { + "filteredRowsIncluded": { + "description": "Optional. If true, the data validation rule will be applied to the filtered rows as well.", + "type": "boolean" + }, "range": { "$ref": "GridRange", "description": "The range the data validation rule should apply to." diff --git a/discovery/googleapis/solar__v1.json b/discovery/googleapis/solar__v1.json index 48f1b3396..ecc8e0d8e 100644 --- a/discovery/googleapis/solar__v1.json +++ b/discovery/googleapis/solar__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240820", + "revision": "20241211", "rootUrl": "https://solar.googleapis.com/", "servicePath": "", "title": "Solar API", @@ -347,7 +347,7 @@ "type": "string" }, "name": { - "description": "The resource name for the building, of the format `building/`.", + "description": "The resource name for the building, of the format `buildings/{place_id}`.", "type": "string" }, "postalCode": { diff --git a/discovery/googleapis/spanner__v1.json b/discovery/googleapis/spanner__v1.json index d6057c8af..9739d9437 100644 --- a/discovery/googleapis/spanner__v1.json +++ b/discovery/googleapis/spanner__v1.json @@ -103,7 +103,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241015", + "revision": "20241119", "rootUrl": "https://spanner.googleapis.com/", "servicePath": "", "title": "Cloud Spanner API", @@ -1048,7 +1048,7 @@ "type": "string" }, "encryptionConfig.kmsKeyNames": { - "description": "Optional. Specifies the KMS configuration for the one or more keys used to protect the backup. Values are of the form `projects//locations//keyRings//cryptoKeys/`. The keys referenced by kms_key_names must fully cover all regions of the backup's instance configuration. Some examples: * For single region instance configs, specify a single regional location KMS key. * For multi-regional instance configs of type GOOGLE_MANAGED, either specify a multi-regional location KMS key or multiple regional location KMS keys that cover all regions in the instance config. * For an instance config of type USER_MANAGED, please specify only regional location KMS keys to cover each region in the instance config. Multi-regional location KMS keys are not supported for USER_MANAGED instance configs.", + "description": "Optional. Specifies the KMS configuration for the one or more keys used to protect the backup. Values are of the form `projects//locations//keyRings//cryptoKeys/`. The keys referenced by `kms_key_names` must fully cover all regions of the backup's instance configuration. Some examples: * For regional (single-region) instance configurations, specify a regional location KMS key. * For multi-region instance configurations of type `GOOGLE_MANAGED`, either specify a multi-region location KMS key or multiple regional location KMS keys that cover all regions in the instance configuration. * For an instance configuration of type `USER_MANAGED`, specify only regional location KMS keys to cover each region in the instance configuration. Multi-region location KMS keys aren't supported for `USER_MANAGED` type instance configurations.", "location": "query", "repeated": true, "type": "string" @@ -1429,7 +1429,7 @@ ], "parameters": { "filter": { - "description": "An expression that filters the list of returned operations. A filter expression consists of a field name, a comparison operator, and a value for filtering. The value must be a string, a number, or a boolean. The comparison operator must be one of: `<`, `>`, `<=`, `>=`, `!=`, `=`, or `:`. Colon `:` is the contains operator. Filter rules are not case sensitive. The following fields in the Operation are eligible for filtering: * `name` - The name of the long-running operation * `done` - False if the operation is in progress, else true. * `metadata.@type` - the type of metadata. For example, the type string for RestoreDatabaseMetadata is `type.googleapis.com/google.spanner.admin.database.v1.RestoreDatabaseMetadata`. * `metadata.` - any field in metadata.value. `metadata.@type` must be specified first, if filtering on metadata fields. * `error` - Error associated with the long-running operation. * `response.@type` - the type of response. * `response.` - any field in response.value. You can combine multiple expressions by enclosing each expression in parentheses. By default, expressions are combined with AND logic. However, you can specify AND, OR, and NOT logic explicitly. Here are a few examples: * `done:true` - The operation is complete. * `(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.RestoreDatabaseMetadata) AND` \\ `(metadata.source_type:BACKUP) AND` \\ `(metadata.backup_info.backup:backup_howl) AND` \\ `(metadata.name:restored_howl) AND` \\ `(metadata.progress.start_time < \\\"2018-03-28T14:50:00Z\\\") AND` \\ `(error:*)` - Return operations where: * The operation's metadata type is RestoreDatabaseMetadata. * The database is restored from a backup. * The backup name contains \"backup_howl\". * The restored database's name contains \"restored_howl\". * The operation started before 2018-03-28T14:50:00Z. * The operation resulted in an error.", + "description": "An expression that filters the list of returned operations. A filter expression consists of a field name, a comparison operator, and a value for filtering. The value must be a string, a number, or a boolean. The comparison operator must be one of: `<`, `>`, `<=`, `>=`, `!=`, `=`, or `:`. Colon `:` is the contains operator. Filter rules are not case sensitive. The following fields in the operation are eligible for filtering: * `name` - The name of the long-running operation * `done` - False if the operation is in progress, else true. * `metadata.@type` - the type of metadata. For example, the type string for RestoreDatabaseMetadata is `type.googleapis.com/google.spanner.admin.database.v1.RestoreDatabaseMetadata`. * `metadata.` - any field in metadata.value. `metadata.@type` must be specified first, if filtering on metadata fields. * `error` - Error associated with the long-running operation. * `response.@type` - the type of response. * `response.` - any field in response.value. You can combine multiple expressions by enclosing each expression in parentheses. By default, expressions are combined with AND logic. However, you can specify AND, OR, and NOT logic explicitly. Here are a few examples: * `done:true` - The operation is complete. * `(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.RestoreDatabaseMetadata) AND` \\ `(metadata.source_type:BACKUP) AND` \\ `(metadata.backup_info.backup:backup_howl) AND` \\ `(metadata.name:restored_howl) AND` \\ `(metadata.progress.start_time < \\\"2018-03-28T14:50:00Z\\\") AND` \\ `(error:*)` - Return operations where: * The operation's metadata type is RestoreDatabaseMetadata. * The database is restored from a backup. * The backup name contains \"backup_howl\". * The restored database's name contains \"restored_howl\". * The operation started before 2018-03-28T14:50:00Z. * The operation resulted in an error.", "location": "query", "type": "string" }, @@ -2810,7 +2810,7 @@ "type": "string" }, "instancePartitionDeadline": { - "description": "Optional. Deadline used while retrieving metadata for instance partition operations. Instance partitions whose operation metadata cannot be retrieved within this deadline will be added to unreachable in ListInstancePartitionOperationsResponse.", + "description": "Optional. Deadline used while retrieving metadata for instance partition operations. Instance partitions whose operation metadata cannot be retrieved within this deadline will be added to unreachable_instance_partitions in ListInstancePartitionOperationsResponse.", "format": "google-datetime", "location": "query", "type": "string" @@ -3464,7 +3464,7 @@ "readOnly": true }, "encryptionInformation": { - "description": "Output only. The encryption information for the backup, whether it is protected by one or more KMS keys. The information includes all Cloud KMS key versions used to encrypt the backup. The `encryption_status' field inside of each `EncryptionInfo` is not populated. At least one of the key versions must be available for the backup to be restored. If a key version is revoked in the middle of a restore, the restore behavior is undefined.", + "description": "Output only. The encryption information for the backup, whether it is protected by one or more KMS keys. The information includes all Cloud KMS key versions used to encrypt the backup. The `encryption_status` field inside of each `EncryptionInfo` is not populated. At least one of the key versions must be available for the backup to be restored. If a key version is revoked in the middle of a restore, the restore behavior is undefined.", "items": { "$ref": "EncryptionInfo" }, @@ -3580,7 +3580,7 @@ "type": "object" }, "BackupSchedule": { - "description": "BackupSchedule expresses the automated backup creation specification for a Spanner database. Next ID: 10", + "description": "BackupSchedule expresses the automated backup creation specification for a Spanner database.", "id": "BackupSchedule", "properties": { "encryptionConfig": { @@ -3940,7 +3940,7 @@ "type": "string" }, "kmsKeyNames": { - "description": "Optional. Specifies the KMS configuration for the one or more keys used to protect the backup. Values are of the form `projects//locations//keyRings//cryptoKeys/`. Kms keys specified can be in any order. The keys referenced by kms_key_names must fully cover all regions of the backup's instance configuration. Some examples: * For single region instance configs, specify a single regional location KMS key. * For multi-regional instance configs of type GOOGLE_MANAGED, either specify a multi-regional location KMS key or multiple regional location KMS keys that cover all regions in the instance config. * For an instance config of type USER_MANAGED, please specify only regional location KMS keys to cover each region in the instance config. Multi-regional location KMS keys are not supported for USER_MANAGED instance configs.", + "description": "Optional. Specifies the KMS configuration for the one or more keys used to protect the backup. Values are of the form `projects//locations//keyRings//cryptoKeys/`. KMS keys specified can be in any order. The keys referenced by `kms_key_names` must fully cover all regions of the backup's instance configuration. Some examples: * For regional (single-region) instance configurations, specify a regional location KMS key. * For multi-region instance configurations of type `GOOGLE_MANAGED`, either specify a multi-region location KMS key or multiple regional location KMS keys that cover all regions in the instance configuration. * For an instance configuration of type `USER_MANAGED`, specify only regional location KMS keys to cover each region in the instance configuration. Multi-region location KMS keys aren't supported for `USER_MANAGED` type instance configurations.", "items": { "type": "string" }, @@ -4022,7 +4022,7 @@ "type": "string" }, "kmsKeyNames": { - "description": "Optional. Specifies the KMS configuration for the one or more keys used to protect the backup. Values are of the form `projects//locations//keyRings//cryptoKeys/`. The keys referenced by kms_key_names must fully cover all regions of the backup's instance configuration. Some examples: * For single region instance configs, specify a single regional location KMS key. * For multi-regional instance configs of type GOOGLE_MANAGED, either specify a multi-regional location KMS key or multiple regional location KMS keys that cover all regions in the instance config. * For an instance config of type USER_MANAGED, please specify only regional location KMS keys to cover each region in the instance config. Multi-regional location KMS keys are not supported for USER_MANAGED instance configs.", + "description": "Optional. Specifies the KMS configuration for the one or more keys used to protect the backup. Values are of the form `projects//locations//keyRings//cryptoKeys/`. The keys referenced by `kms_key_names` must fully cover all regions of the backup's instance configuration. Some examples: * For regional (single-region) instance configurations, specify a regional location KMS key. * For multi-region instance configurations of type `GOOGLE_MANAGED`, either specify a multi-region location KMS key or multiple regional location KMS keys that cover all regions in the instance configuration. * For an instance configuration of type `USER_MANAGED`, specify only regional location KMS keys to cover each region in the instance configuration. Multi-region location KMS keys aren't supported for `USER_MANAGED` type instance configurations.", "items": { "type": "string" }, @@ -4128,7 +4128,7 @@ "type": "object" }, "CreateInstanceConfigRequest": { - "description": "The request for CreateInstanceConfigRequest.", + "description": "The request for CreateInstanceConfig.", "id": "CreateInstanceConfigRequest", "properties": { "instanceConfig": { @@ -4321,7 +4321,7 @@ "readOnly": true }, "encryptionInfo": { - "description": "Output only. For databases that are using customer managed encryption, this field contains the encryption information for the database, such as all Cloud KMS key versions that are in use. The `encryption_status' field inside of each `EncryptionInfo` is not populated. For databases that are using Google default or other types of encryption, this field is empty. This field is propagated lazily from the backend. There might be a delay from when a key version is being used and when it appears in this field.", + "description": "Output only. For databases that are using customer managed encryption, this field contains the encryption information for the database, such as all Cloud KMS key versions that are in use. The `encryption_status` field inside of each `EncryptionInfo` is not populated. For databases that are using Google default or other types of encryption, this field is empty. This field is propagated lazily from the backend. There might be a delay from when a key version is being used and when it appears in this field.", "items": { "$ref": "EncryptionInfo" }, @@ -4512,7 +4512,7 @@ "type": "string" }, "kmsKeyNames": { - "description": "Specifies the KMS configuration for the one or more keys used to encrypt the database. Values are of the form `projects//locations//keyRings//cryptoKeys/`. The keys referenced by kms_key_names must fully cover all regions of the database instance configuration. Some examples: * For single region database instance configs, specify a single regional location KMS key. * For multi-regional database instance configs of type GOOGLE_MANAGED, either specify a multi-regional location KMS key or multiple regional location KMS keys that cover all regions in the instance config. * For a database instance config of type USER_MANAGED, please specify only regional location KMS keys to cover each region in the instance config. Multi-regional location KMS keys are not supported for USER_MANAGED instance configs.", + "description": "Specifies the KMS configuration for one or more keys used to encrypt the database. Values are of the form `projects//locations//keyRings//cryptoKeys/`. The keys referenced by `kms_key_names` must fully cover all regions of the database's instance configuration. Some examples: * For regional (single-region) instance configurations, specify a regional location KMS key. * For multi-region instance configurations of type `GOOGLE_MANAGED`, either specify a multi-region location KMS key or multiple regional location KMS keys that cover all regions in the instance configuration. * For an instance configuration of type `USER_MANAGED`, specify only regional location KMS keys to cover each region in the instance configuration. Multi-region location KMS keys aren't supported for `USER_MANAGED` type instance configurations.", "items": { "type": "string" }, @@ -4904,6 +4904,20 @@ "readOnly": true, "type": "string" }, + "defaultBackupScheduleType": { + "description": "Optional. Controls the default backup behavior for new databases within the instance. Note that `AUTOMATIC` is not permitted for free instances, as backups and backup schedules are not allowed for free instances. In the `GetInstance` or `ListInstances` response, if the value of default_backup_schedule_type is unset or NONE, no default backup schedule will be created for new databases within the instance.", + "enum": [ + "DEFAULT_BACKUP_SCHEDULE_TYPE_UNSPECIFIED", + "NONE", + "AUTOMATIC" + ], + "enumDescriptions": [ + "Not specified.", + "No default backup schedule will be created automatically on creation of a database within the instance.", + "A default backup schedule will be created automatically on creation of a database within the instance. Once created, the default backup schedule can be edited or deleted just like any other backup schedule. Currently, the default backup schedule creates a full backup every 24 hours and retains the backup for a period of 7 days." + ], + "type": "string" + }, "displayName": { "description": "Required. The descriptive name for this instance as it appears in UIs. Must be unique per project and between 4 and 30 characters in length.", "type": "string" @@ -4961,12 +4975,12 @@ "type": "string" }, "nodeCount": { - "description": "The number of nodes allocated to this instance. At most, one of either `node_count` or `processing_units` should be present in the message. Users can set the `node_count` field to specify the target number of nodes allocated to the instance. If autoscaling is enabled, `node_count` is treated as an `OUTPUT_ONLY` field and reflects the current number of nodes allocated to the instance. This might be zero in API responses for instances that are not yet in the `READY` state. If the instance has varying node count across replicas (achieved by setting asymmetric_autoscaling_options in autoscaling config), the node_count here is the maximum node count across all replicas. For more information, see [Compute capacity, nodes, and processing units](https://cloud.google.com/spanner/docs/compute-capacity).", + "description": "The number of nodes allocated to this instance. At most, one of either `node_count` or `processing_units` should be present in the message. Users can set the `node_count` field to specify the target number of nodes allocated to the instance. If autoscaling is enabled, `node_count` is treated as an `OUTPUT_ONLY` field and reflects the current number of nodes allocated to the instance. This might be zero in API responses for instances that are not yet in the `READY` state. For more information, see [Compute capacity, nodes, and processing units](https://cloud.google.com/spanner/docs/compute-capacity).", "format": "int32", "type": "integer" }, "processingUnits": { - "description": "The number of processing units allocated to this instance. At most, one of either `processing_units` or `node_count` should be present in the message. Users can set the `processing_units` field to specify the target number of processing units allocated to the instance. If autoscaling is enabled, `processing_units` is treated as an `OUTPUT_ONLY` field and reflects the current number of processing units allocated to the instance. This might be zero in API responses for instances that are not yet in the `READY` state. If the instance has varying processing units per replica (achieved by setting asymmetric_autoscaling_options in autoscaling config), the processing_units here is the maximum processing units across all replicas. For more information, see [Compute capacity, nodes and processing units](https://cloud.google.com/spanner/docs/compute-capacity).", + "description": "The number of processing units allocated to this instance. At most, one of either `processing_units` or `node_count` should be present in the message. Users can set the `processing_units` field to specify the target number of processing units allocated to the instance. If autoscaling is enabled, `processing_units` is treated as an `OUTPUT_ONLY` field and reflects the current number of processing units allocated to the instance. This might be zero in API responses for instances that are not yet in the `READY` state. For more information, see [Compute capacity, nodes and processing units](https://cloud.google.com/spanner/docs/compute-capacity).", "format": "int32", "type": "integer" }, @@ -6556,7 +6570,7 @@ "type": "string" }, "kmsKeyNames": { - "description": "Optional. Specifies the KMS configuration for the one or more keys used to encrypt the database. Values have the form `projects//locations//keyRings//cryptoKeys/`. The keys referenced by kms_key_names must fully cover all regions of the database instance configuration. Some examples: * For single region database instance configurations, specify a single regional location KMS key. * For multi-regional database instance configurations of type `GOOGLE_MANAGED`, either specify a multi-regional location KMS key or multiple regional location KMS keys that cover all regions in the instance configuration. * For a database instance configuration of type `USER_MANAGED`, please specify only regional location KMS keys to cover each region in the instance configuration. Multi-regional location KMS keys are not supported for USER_MANAGED instance configurations.", + "description": "Optional. Specifies the KMS configuration for one or more keys used to encrypt the database. Values have the form `projects//locations//keyRings//cryptoKeys/`. The keys referenced by `kms_key_names` must fully cover all regions of the database's instance configuration. Some examples: * For regional (single-region) instance configurations, specify a regional location KMS key. * For multi-region instance configurations of type `GOOGLE_MANAGED`, either specify a multi-region location KMS key or multiple regional location KMS keys that cover all regions in the instance configuration. * For an instance configuration of type `USER_MANAGED`, specify only regional location KMS keys to cover each region in the instance configuration. Multi-region location KMS keys aren't supported for `USER_MANAGED` type instance configurations.", "items": { "type": "string" }, @@ -7153,7 +7167,7 @@ "id": "UpdateDatabaseDdlRequest", "properties": { "operationId": { - "description": "If empty, the new update request is assigned an automatically-generated operation ID. Otherwise, `operation_id` is used to construct the name of the resulting Operation. Specifying an explicit operation ID simplifies determining whether the statements were executed in the event that the UpdateDatabaseDdl call is replayed, or the return value is otherwise lost: the database and `operation_id` fields can be combined to form the name of the resulting longrunning.Operation: `/operations/`. `operation_id` should be unique within the database, and must be a valid identifier: `a-z*`. Note that automatically-generated operation IDs always begin with an underscore. If the named operation already exists, UpdateDatabaseDdl returns `ALREADY_EXISTS`.", + "description": "If empty, the new update request is assigned an automatically-generated operation ID. Otherwise, `operation_id` is used to construct the name of the resulting Operation. Specifying an explicit operation ID simplifies determining whether the statements were executed in the event that the UpdateDatabaseDdl call is replayed, or the return value is otherwise lost: the database and `operation_id` fields can be combined to form the `name` of the resulting longrunning.Operation: `/operations/`. `operation_id` should be unique within the database, and must be a valid identifier: `a-z*`. Note that automatically-generated operation IDs always begin with an underscore. If the named operation already exists, UpdateDatabaseDdl returns `ALREADY_EXISTS`.", "type": "string" }, "protoDescriptors": { @@ -7228,7 +7242,7 @@ "type": "object" }, "UpdateInstanceConfigRequest": { - "description": "The request for UpdateInstanceConfigRequest.", + "description": "The request for UpdateInstanceConfig.", "id": "UpdateInstanceConfigRequest", "properties": { "instanceConfig": { diff --git a/discovery/googleapis/speech__v1.json b/discovery/googleapis/speech__v1.json index f8e3c3b2c..abde80e2d 100644 --- a/discovery/googleapis/speech__v1.json +++ b/discovery/googleapis/speech__v1.json @@ -15,6 +15,43 @@ "description": "Converts audio to text by applying powerful neural network models.", "discoveryVersion": "v1", "documentationLink": "https://cloud.google.com/speech-to-text/docs/quickstart-protocol", + "endpoints": [ + { + "description": "Regional Endpoint", + "endpointUrl": "https://speech.us-central1.rep.googleapis.com/", + "location": "us-central1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://speech.us-west1.rep.googleapis.com/", + "location": "us-west1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://speech.me-west1.rep.googleapis.com/", + "location": "me-west1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://speech.europe-west1.rep.googleapis.com/", + "location": "europe-west1" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://speech.europe-west2.rep.googleapis.com/", + "location": "europe-west2" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://speech.europe-west3.rep.googleapis.com/", + "location": "europe-west3" + }, + { + "description": "Regional Endpoint", + "endpointUrl": "https://speech.europe-west4.rep.googleapis.com/", + "location": "europe-west4" + } + ], "icons": { "x16": "http://www.google.com/images/icons/product/search-16.gif", "x32": "http://www.google.com/images/icons/product/search-32.gif" @@ -25,7 +62,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240926", + "revision": "20241111", "rootUrl": "https://speech.googleapis.com/", "servicePath": "", "title": "Cloud Speech-to-Text API", diff --git a/discovery/googleapis/storage__v1.json b/discovery/googleapis/storage__v1.json index 5025cbdb1..2f951d8fe 100644 --- a/discovery/googleapis/storage__v1.json +++ b/discovery/googleapis/storage__v1.json @@ -93,7 +93,7 @@ "location": "us-west4" } ], - "etag": "\"3132333635343336333933383332343134323139\"", + "etag": "\"3133343838373034343130353038353234313337\"", "icons": { "x16": "https://www.google.com/images/icons/product/cloud_storage-16.png", "x32": "https://www.google.com/images/icons/product/cloud_storage-32.png" @@ -107,7 +107,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241008", + "revision": "20241206", "rootUrl": "https://storage.googleapis.com/", "servicePath": "storage/v1/", "title": "Cloud Storage JSON API", @@ -1133,6 +1133,19 @@ "required": true, "type": "string" }, + "projection": { + "description": "Set of properties to return. Defaults to full.", + "enum": [ + "full", + "noAcl" + ], + "enumDescriptions": [ + "Include all properties.", + "Omit owner, acl and defaultObjectAcl properties." + ], + "location": "query", + "type": "string" + }, "userProject": { "description": "The project to be billed for this request. Required for Requester Pays buckets.", "location": "query", @@ -1140,6 +1153,9 @@ } }, "path": "b/{bucket}/restore", + "response": { + "$ref": "Bucket" + }, "scopes": [ "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/devstorage.full_control", @@ -3212,6 +3228,98 @@ ], "supportsSubscription": true }, + "move": { + "description": "Moves the source object to the destination object in the same bucket.", + "httpMethod": "POST", + "id": "storage.objects.move", + "parameterOrder": [ + "bucket", + "sourceObject", + "destinationObject" + ], + "parameters": { + "bucket": { + "description": "Name of the bucket in which the object resides.", + "location": "path", + "required": true, + "type": "string" + }, + "destinationObject": { + "description": "Name of the destination object. For information about how to URL encode object names to be path safe, see [Encoding URI Path Parts](https://cloud.google.com/storage/docs/request-endpoints#encoding).", + "location": "path", + "required": true, + "type": "string" + }, + "ifGenerationMatch": { + "description": "Makes the operation conditional on whether the destination object's current generation matches the given value. Setting to 0 makes the operation succeed only if there are no live versions of the object. `ifGenerationMatch` and `ifGenerationNotMatch` conditions are mutually exclusive: it's an error for both of them to be set in the request.", + "format": "int64", + "location": "query", + "type": "string" + }, + "ifGenerationNotMatch": { + "description": "Makes the operation conditional on whether the destination object's current generation does not match the given value. If no live object exists, the precondition fails. Setting to 0 makes the operation succeed only if there is a live version of the object.`ifGenerationMatch` and `ifGenerationNotMatch` conditions are mutually exclusive: it's an error for both of them to be set in the request.", + "format": "int64", + "location": "query", + "type": "string" + }, + "ifMetagenerationMatch": { + "description": "Makes the operation conditional on whether the destination object's current metageneration matches the given value. `ifMetagenerationMatch` and `ifMetagenerationNotMatch` conditions are mutually exclusive: it's an error for both of them to be set in the request.", + "format": "int64", + "location": "query", + "type": "string" + }, + "ifMetagenerationNotMatch": { + "description": "Makes the operation conditional on whether the destination object's current metageneration does not match the given value. `ifMetagenerationMatch` and `ifMetagenerationNotMatch` conditions are mutually exclusive: it's an error for both of them to be set in the request.", + "format": "int64", + "location": "query", + "type": "string" + }, + "ifSourceGenerationMatch": { + "description": "Makes the operation conditional on whether the source object's current generation matches the given value. `ifSourceGenerationMatch` and `ifSourceGenerationNotMatch` conditions are mutually exclusive: it's an error for both of them to be set in the request.", + "format": "int64", + "location": "query", + "type": "string" + }, + "ifSourceGenerationNotMatch": { + "description": "Makes the operation conditional on whether the source object's current generation does not match the given value. `ifSourceGenerationMatch` and `ifSourceGenerationNotMatch` conditions are mutually exclusive: it's an error for both of them to be set in the request.", + "format": "int64", + "location": "query", + "type": "string" + }, + "ifSourceMetagenerationMatch": { + "description": "Makes the operation conditional on whether the source object's current metageneration matches the given value. `ifSourceMetagenerationMatch` and `ifSourceMetagenerationNotMatch` conditions are mutually exclusive: it's an error for both of them to be set in the request.", + "format": "int64", + "location": "query", + "type": "string" + }, + "ifSourceMetagenerationNotMatch": { + "description": "Makes the operation conditional on whether the source object's current metageneration does not match the given value. `ifSourceMetagenerationMatch` and `ifSourceMetagenerationNotMatch` conditions are mutually exclusive: it's an error for both of them to be set in the request.", + "format": "int64", + "location": "query", + "type": "string" + }, + "sourceObject": { + "description": "Name of the source object. For information about how to URL encode object names to be path safe, see [Encoding URI Path Parts](https://cloud.google.com/storage/docs/request-endpoints#encoding).", + "location": "path", + "required": true, + "type": "string" + }, + "userProject": { + "description": "The project to be billed for this request. Required for Requester Pays buckets.", + "location": "query", + "type": "string" + } + }, + "path": "b/{bucket}/o/{sourceObject}/moveTo/o/{destinationObject}", + "response": { + "$ref": "Object" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/devstorage.full_control", + "https://www.googleapis.com/auth/devstorage.read_write" + ] + }, "patch": { "description": "Patches an object's metadata.", "httpMethod": "PATCH", @@ -5792,6 +5900,11 @@ "format": "date-time", "type": "string" }, + "timeFinalized": { + "description": "The time when the object was finalized.", + "format": "date-time", + "type": "string" + }, "timeStorageClassUpdated": { "description": "The time at which the object's storage class was last changed. When the object is initially created, it will be set to timeCreated.", "format": "date-time", diff --git a/discovery/googleapis/sts__v1.json b/discovery/googleapis/sts__v1.json index 5295b165e..a943922e7 100644 --- a/discovery/googleapis/sts__v1.json +++ b/discovery/googleapis/sts__v1.json @@ -6,6 +6,13 @@ "description": "The Security Token Service exchanges Google or third-party credentials for a short-lived access token to Google Cloud resources.", "discoveryVersion": "v1", "documentationLink": "https://cloud.google.com/iam/docs/workload-identity-federation", + "endpoints": [ + { + "description": "Regional Endpoint", + "endpointUrl": "https://sts.us-east7.rep.googleapis.com/", + "location": "us-east7" + } + ], "icons": { "x16": "http://www.google.com/images/icons/product/search-16.gif", "x32": "http://www.google.com/images/icons/product/search-32.gif" @@ -16,7 +23,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240904", + "revision": "20241106", "rootUrl": "https://sts.googleapis.com/", "servicePath": "", "title": "Security Token Service API", @@ -228,7 +235,7 @@ "type": "string" }, "expires_in": { - "description": "The amount of time, in seconds, between the time when the access token was issued and the time when the access token will expire. This field is absent when the `subject_token` in the request is a Google-issued, short-lived access token. In this case, the access token has the same expiration time as the `subject_token`.", + "description": "The amount of time, in seconds, between the time when the access token was issued and the time when the access token will expire. This field is absent when the `subject_token` in the request is a a short-lived access token for a Cloud Identity or Google Workspace user account. In this case, the access token has the same expiration time as the `subject_token`.", "format": "int32", "type": "integer" }, diff --git a/discovery/googleapis/tagmanager__v2.json b/discovery/googleapis/tagmanager__v2.json index bcf36999e..5345af8e9 100644 --- a/discovery/googleapis/tagmanager__v2.json +++ b/discovery/googleapis/tagmanager__v2.json @@ -43,7 +43,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240701", + "revision": "20241204", "rootUrl": "https://tagmanager.googleapis.com/", "servicePath": "", "title": "Tag Manager API", @@ -5146,6 +5146,10 @@ "description": "The signature of the community gallery template as computed at import time. This value is recomputed whenever the template is updated from the gallery.", "type": "string" }, + "templateDeveloperId": { + "description": "The developer id of the community gallery template. This value is set whenever the template is created from the gallery.", + "type": "string" + }, "version": { "description": "The version of the community gallery template.", "type": "string" diff --git a/discovery/googleapis/testing__v1.json b/discovery/googleapis/testing__v1.json index 614e5003a..d36b2330b 100644 --- a/discovery/googleapis/testing__v1.json +++ b/discovery/googleapis/testing__v1.json @@ -27,7 +27,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240917", + "revision": "20241105", "rootUrl": "https://testing.googleapis.com/", "servicePath": "", "title": "Cloud Testing API", @@ -973,12 +973,18 @@ "type": "array" }, "usesPermission": { - "description": "Permissions declared to be used by the application", "items": { "type": "string" }, "type": "array" }, + "usesPermissionTags": { + "description": "Permissions declared to be used by the application", + "items": { + "$ref": "UsesPermissionTag" + }, + "type": "array" + }, "versionCode": { "description": "Version number used internally by the app.", "format": "int64", @@ -2820,6 +2826,22 @@ }, "type": "object" }, + "UsesPermissionTag": { + "description": "The tag within a manifest. https://developer.android.com/guide/topics/manifest/uses-permission-element.html", + "id": "UsesPermissionTag", + "properties": { + "maxSdkVersion": { + "description": "The android:name value", + "format": "int32", + "type": "integer" + }, + "name": { + "description": "The android:name value", + "type": "string" + } + }, + "type": "object" + }, "XcodeVersion": { "description": "An Xcode version that an iOS version is compatible with.", "id": "XcodeVersion", diff --git a/discovery/googleapis/texttospeech__v1.json b/discovery/googleapis/texttospeech__v1.json index 15c3b5c41..89182ff80 100644 --- a/discovery/googleapis/texttospeech__v1.json +++ b/discovery/googleapis/texttospeech__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241008", + "revision": "20241116", "rootUrl": "https://texttospeech.googleapis.com/", "servicePath": "", "title": "Cloud Text-to-Speech API", @@ -112,7 +112,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "texttospeech.operations.cancel", @@ -328,7 +328,7 @@ "id": "AdvancedVoiceOptions", "properties": { "lowLatencyJourneySynthesis": { - "description": "Only for Jounrney voices. If false, the synthesis will be context aware and have higher latency.", + "description": "Only for Journey voices. If false, the synthesis will be context aware and have higher latency.", "type": "boolean" } }, @@ -524,6 +524,20 @@ }, "type": "object" }, + "MultiSpeakerMarkup": { + "description": "A collection of turns for multi-speaker synthesis.", + "id": "MultiSpeakerMarkup", + "properties": { + "turns": { + "description": "Required. Speaker turns.", + "items": { + "$ref": "Turn" + }, + "type": "array" + } + }, + "type": "object" + }, "Operation": { "description": "This resource represents a long-running operation that is the result of a network API call.", "id": "Operation", @@ -594,6 +608,10 @@ "$ref": "CustomPronunciations", "description": "Optional. The pronunciation customizations to be applied to the input. If this is set, the input will be synthesized using the given pronunciation customizations. The initial support will be for EFIGS (English, French, Italian, German, Spanish) languages, as provided in VoiceSelectionParams. Journey and Instant Clone voices are not supported yet. In order to customize the pronunciation of a phrase, there must be an exact match of the phrase in the input types. If using SSML, the phrase must not be inside a phoneme tag (entirely or partially)." }, + "multiSpeakerMarkup": { + "$ref": "MultiSpeakerMarkup", + "description": "The multi-speaker input to be synthesized. Only applicable for multi-speaker synthesis." + }, "ssml": { "description": "The SSML document to be synthesized. The SSML document must be valid and well-formed. Otherwise the RPC will fail and return google.rpc.Code.INVALID_ARGUMENT. For more information, see [SSML](https://cloud.google.com/text-to-speech/docs/ssml).", "type": "string" @@ -657,7 +675,7 @@ "properties": { "advancedVoiceOptions": { "$ref": "AdvancedVoiceOptions", - "description": "Adnanced voice options." + "description": "Advanced voice options." }, "audioConfig": { "$ref": "AudioConfig", @@ -686,6 +704,21 @@ }, "type": "object" }, + "Turn": { + "description": "A Multi-speaker turn.", + "id": "Turn", + "properties": { + "speaker": { + "description": "Required. The speaker of the turn, for example, 'O' or 'Q'. Please refer to documentation for available speakers.", + "type": "string" + }, + "text": { + "description": "Required. The text to speak.", + "type": "string" + } + }, + "type": "object" + }, "Voice": { "description": "Description of a voice supported by the TTS service.", "id": "Voice", @@ -725,6 +758,17 @@ }, "type": "object" }, + "VoiceCloneParams": { + "description": "The configuration of Voice Clone feature.", + "id": "VoiceCloneParams", + "properties": { + "voiceCloningKey": { + "description": "Required. Created by GenerateVoiceCloningKey.", + "type": "string" + } + }, + "type": "object" + }, "VoiceSelectionParams": { "description": "Description of which voice to use for a synthesis request.", "id": "VoiceSelectionParams", @@ -756,6 +800,10 @@ "A gender-neutral voice. This voice is not yet supported." ], "type": "string" + }, + "voiceClone": { + "$ref": "VoiceCloneParams", + "description": "Optional. The configuration for a voice clone. If [VoiceCloneParams.voice_clone_key] is set, the service will choose the voice clone matching the specified configuration." } }, "type": "object" diff --git a/discovery/googleapis/tpu__v1.json b/discovery/googleapis/tpu__v1.json index b436c65b7..8f31fadf8 100644 --- a/discovery/googleapis/tpu__v1.json +++ b/discovery/googleapis/tpu__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241009", + "revision": "20241126", "rootUrl": "https://tpu.googleapis.com/", "servicePath": "", "title": "Cloud TPU API", @@ -466,7 +466,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "tpu.projects.locations.operations.cancel", diff --git a/discovery/googleapis/tpu__v2.json b/discovery/googleapis/tpu__v2.json index e68e9bd54..d10bd19ac 100644 --- a/discovery/googleapis/tpu__v2.json +++ b/discovery/googleapis/tpu__v2.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241009", + "revision": "20241126", "rootUrl": "https://tpu.googleapis.com/", "servicePath": "", "title": "Cloud TPU API", @@ -528,7 +528,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v2/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "tpu.projects.locations.operations.cancel", @@ -908,7 +908,8 @@ "V3", "V4", "V5LITE_POD", - "V5P" + "V5P", + "V6E" ], "enumDescriptions": [ "Unspecified version.", @@ -916,7 +917,8 @@ "TPU v3.", "TPU v4.", "TPU v5lite pod.", - "TPU v5p" + "TPU v5p", + "TPU v6e" ], "type": "string" } diff --git a/discovery/googleapis/translate__v3.json b/discovery/googleapis/translate__v3.json index 2fbc07cde..de84fd97e 100644 --- a/discovery/googleapis/translate__v3.json +++ b/discovery/googleapis/translate__v3.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240912", + "revision": "20241029", "rootUrl": "https://translation.googleapis.com/", "servicePath": "", "title": "Cloud Translation API", @@ -295,7 +295,8 @@ "$ref": "Operation" }, "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-translation" ] }, "batchTranslateText": { @@ -323,7 +324,8 @@ "$ref": "Operation" }, "scopes": [ - "https://www.googleapis.com/auth/cloud-platform" + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-translation" ] }, "detectLanguage": { diff --git a/discovery/googleapis/travelimpactmodel__v1.json b/discovery/googleapis/travelimpactmodel__v1.json index 1159935d9..5c6e9a61d 100644 --- a/discovery/googleapis/travelimpactmodel__v1.json +++ b/discovery/googleapis/travelimpactmodel__v1.json @@ -16,7 +16,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20231011", + "revision": "20241027", "rootUrl": "https://travelimpactmodel.googleapis.com/", "servicePath": "", "title": "Travel Impact Model API", @@ -236,7 +236,7 @@ "properties": { "emissionsGramsPerPax": { "$ref": "EmissionsGramsPerPax", - "description": "Optional. Per-passenger emission estimate numbers. Will not be present if emissions could not be computed. For the list of reasons why emissions could not be computed, see ComputeFlightEmissions. Note this field is currently equivalent to ttw_emissions_grams_per_pax until TIM version 1.X.0 which will update this to be total wtw emissions aka wtt_emissions_grams_per_pax + ttw_emissions_grams_per_pax." + "description": "Optional. Per-passenger emission estimate numbers. Will not be present if emissions could not be computed. For the list of reasons why emissions could not be computed, see ComputeFlightEmissions. This field uses wtw emissions aka ttw_emissions_grams_per_pax + wtt_emissions_grams_per_pax." }, "flight": { "$ref": "Flight", diff --git a/discovery/googleapis/vault__v1.json b/discovery/googleapis/vault__v1.json index 5b5990358..a57e680dd 100644 --- a/discovery/googleapis/vault__v1.json +++ b/discovery/googleapis/vault__v1.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240806", + "revision": "20241121", "rootUrl": "https://vault.googleapis.com/", "servicePath": "", "title": "Google Vault API", @@ -1084,7 +1084,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "vault.operations.cancel", diff --git a/discovery/googleapis/verifiedaccess__v2.json b/discovery/googleapis/verifiedaccess__v2.json index 061e896d5..52e88a68e 100644 --- a/discovery/googleapis/verifiedaccess__v2.json +++ b/discovery/googleapis/verifiedaccess__v2.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241001", + "revision": "20241112", "rootUrl": "https://verifiedaccess.googleapis.com/", "servicePath": "", "title": "Chrome Verified Access API", @@ -287,7 +287,7 @@ "type": "string" }, "osFirewall": { - "description": "The state of the OS level firewall. On ChromeOS, the value will always be ENABLED on regular devices and UNKNOWN on devices in developer mode. The signal is currently not available on MacOS 15 (Sequoia) and later.", + "description": "The state of the OS level firewall. On ChromeOS, the value will always be ENABLED on regular devices and UNKNOWN on devices in developer mode. Support for MacOS 15 (Sequoia) and later has been introduced in Chrome M131.", "enum": [ "OS_FIREWALL_UNSPECIFIED", "OS_FIREWALL_UNKNOWN", diff --git a/discovery/googleapis/versionhistory__v1.json b/discovery/googleapis/versionhistory__v1.json index 809440078..96384ec58 100644 --- a/discovery/googleapis/versionhistory__v1.json +++ b/discovery/googleapis/versionhistory__v1.json @@ -5,7 +5,7 @@ "canonicalName": "Version History", "description": "Version History API - Prod", "discoveryVersion": "v1", - "documentationLink": "https://developer.chrome.com/docs/versionhistory/", + "documentationLink": "https://developer.chrome.com/docs/web-platform/versionhistory/guide", "icons": { "x16": "http://www.google.com/images/icons/product/search-16.gif", "x32": "http://www.google.com/images/icons/product/search-32.gif" diff --git a/discovery/googleapis/vision__v1.json b/discovery/googleapis/vision__v1.json index 01eaa5940..752cb2618 100644 --- a/discovery/googleapis/vision__v1.json +++ b/discovery/googleapis/vision__v1.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240823", + "revision": "20241115", "rootUrl": "https://vision.googleapis.com/", "servicePath": "", "title": "Cloud Vision API", @@ -233,7 +233,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "vision.operations.cancel", diff --git a/discovery/googleapis/vmmigration__v1.json b/discovery/googleapis/vmmigration__v1.json index 802ad7a1c..1721e7615 100644 --- a/discovery/googleapis/vmmigration__v1.json +++ b/discovery/googleapis/vmmigration__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241013", + "revision": "20241114", "rootUrl": "https://vmmigration.googleapis.com/", "servicePath": "", "title": "VM Migration API", @@ -670,7 +670,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "vmmigration.projects.locations.operations.cancel", diff --git a/discovery/googleapis/walletobjects__v1.json b/discovery/googleapis/walletobjects__v1.json index 020819b7d..5fc0a6ae6 100644 --- a/discovery/googleapis/walletobjects__v1.json +++ b/discovery/googleapis/walletobjects__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241017", + "revision": "20241203", "rootUrl": "https://walletobjects.googleapis.com/", "servicePath": "", "title": "Google Wallet API", @@ -2775,6 +2775,10 @@ "$ref": "AppLinkDataAppLinkInfo", "description": "Optional information about the partner app link." }, + "displayText": { + "$ref": "LocalizedString", + "description": "Optional display text for the app link button. Character limit is 30." + }, "iosAppLinkInfo": { "$ref": "AppLinkDataAppLinkInfo", "deprecated": true, @@ -3516,7 +3520,7 @@ "type": "object" }, "DiscoverableProgram": { - "description": "Information about how a class may be discovered and instantiated from within the Android Pay app. This is done by searching for a loyalty or gift card program and scanning or manually entering.", + "description": "Information about how a class may be discovered and instantiated from within the Google Wallet app. This is done by searching for a loyalty or gift card program and scanning or manually entering.", "id": "DiscoverableProgram", "properties": { "merchantSigninInfo": { @@ -3918,6 +3922,13 @@ "$ref": "Image", "description": "The logo image of the ticket. This image is displayed in the card detail view of the app." }, + "merchantLocations": { + "description": "Merchant locations. There is a maximum of ten on the class. Any additional MerchantLocations added beyond the 10 will be rejected. These locations will trigger a notification when a user enters within a Google-set radius of the point. This field replaces the deprecated LatLongPoints.", + "items": { + "$ref": "MerchantLocation" + }, + "type": "array" + }, "messages": { "description": "An array of messages displayed in the app. All users of this object will receive its associated messages. The maximum number of these fields is 10.", "items": { @@ -4254,6 +4265,13 @@ }, "type": "array" }, + "merchantLocations": { + "description": "Merchant locations. There is a maximum of ten on the object. Any additional MerchantLocations added beyond the 10 will be rejected. These locations will trigger a notification when a user enters within a Google-set radius of the point. This field replaces the deprecated LatLongPoints.", + "items": { + "$ref": "MerchantLocation" + }, + "type": "array" + }, "messages": { "description": "An array of messages displayed in the app. All users of this object will receive its associated messages. The maximum number of these fields is 10.", "items": { @@ -4749,6 +4767,13 @@ }, "type": "array" }, + "merchantLocations": { + "description": "Merchant locations. There is a maximum of ten on the class. Any additional MerchantLocations added beyond the 10 will be rejected by the validator. These locations will trigger a notification when a user enters within a Google-set radius of the point. This field replaces the deprecated LatLongPoints.", + "items": { + "$ref": "MerchantLocation" + }, + "type": "array" + }, "messages": { "description": "An array of messages displayed in the app. All users of this object will receive its associated messages. The maximum number of these fields is 10.", "items": { @@ -5042,6 +5067,13 @@ }, "type": "array" }, + "merchantLocations": { + "description": "Merchant locations. There is a maximum of ten on the object. Any additional MerchantLocations added beyond the 10 will be rejected. These locations will trigger a notification when a user enters within a Google-set radius of the point. This field replaces the deprecated LatLongPoints.", + "items": { + "$ref": "MerchantLocation" + }, + "type": "array" + }, "messages": { "description": "An array of messages displayed in the app. All users of this object will receive its associated messages. The maximum number of these fields is 10.", "items": { @@ -5234,6 +5266,13 @@ "$ref": "LinksModuleData", "description": "Links module data. If `linksModuleData` is also defined on the object, both will be displayed. The maximum number of these fields displayed is 10 from class and 10 from object." }, + "merchantLocations": { + "description": "Merchant locations. There is a maximum of ten on the class. Any additional MerchantLocations added beyond the 10 will be rejected. These locations will trigger a notification when a user enters within a Google-set radius of the point. This field replaces the deprecated LatLongPoints.", + "items": { + "$ref": "MerchantLocation" + }, + "type": "array" + }, "messages": { "description": "An array of messages displayed in the app. All users of this object will receive its associated messages. The maximum number of these fields is 10.", "items": { @@ -5379,6 +5418,7 @@ "GENERIC_HOME_INSURANCE", "GENERIC_ENTRY_TICKET", "GENERIC_RECEIPT", + "GENERIC_LOYALTY_CARD", "GENERIC_OTHER" ], "enumDescriptions": [ @@ -5394,6 +5434,7 @@ "Home-insurance cards", "Entry tickets", "Receipts", + "Loyalty cards. Please note that it is advisable to use a dedicated Loyalty card pass type instead of this generic type. A dedicated loyalty card pass type offers more features and functionality than a generic pass type.", "Other type" ], "type": "string" @@ -5444,6 +5485,13 @@ "$ref": "Image", "description": "The logo image of the pass. This image is displayed in the card detail view in upper left, and also on the list/thumbnail view. If the logo is not present, the first letter of `cardTitle` would be shown as logo." }, + "merchantLocations": { + "description": "Merchant locations. There is a maximum of ten on the object. Any additional MerchantLocations added beyond the 10 will be rejected. These locations will trigger a notification when a user enters within a Google-set radius of the point. This field replaces the deprecated LatLongPoints.", + "items": { + "$ref": "MerchantLocation" + }, + "type": "array" + }, "messages": { "description": "An array of messages displayed in the app. All users of this object will receive its associated messages. The maximum number of these fields is 10.", "items": { @@ -5675,6 +5723,13 @@ }, "type": "array" }, + "merchantLocations": { + "description": "Merchant locations. There is a maximum of ten on the class. Any additional MerchantLocations added beyond the 10 will be rejected. These locations will trigger a notification when a user enters within a Google-set radius of the point. This field replaces the deprecated LatLongPoints.", + "items": { + "$ref": "MerchantLocation" + }, + "type": "array" + }, "merchantName": { "description": "Merchant name, such as \"Adam's Apparel\". The app may display an ellipsis after the first 20 characters to ensure full string is displayed on smaller screens.", "type": "string" @@ -5957,6 +6012,13 @@ }, "type": "array" }, + "merchantLocations": { + "description": "Merchant locations. There is a maximum of ten on the object. Any additional MerchantLocations added beyond the 10 will be rejected. These locations will trigger a notification when a user enters within a Google-set radius of the point. This field replaces the deprecated LatLongPoints.", + "items": { + "$ref": "MerchantLocation" + }, + "type": "array" + }, "messages": { "description": "An array of messages displayed in the app. All users of this object will receive its associated messages. The maximum number of these fields is 10.", "items": { @@ -6539,6 +6601,13 @@ }, "type": "array" }, + "merchantLocations": { + "description": "Merchant locations. There is a maximum of ten on the class. Any additional MerchantLocations added beyond the 10 will be rejected. These locations will trigger a notification when a user enters within a Google-set radius of the point. This field replaces the deprecated LatLongPoints.", + "items": { + "$ref": "MerchantLocation" + }, + "type": "array" + }, "messages": { "description": "An array of messages displayed in the app. All users of this object will receive its associated messages. The maximum number of these fields is 10.", "items": { @@ -6836,6 +6905,13 @@ "$ref": "LoyaltyPoints", "description": "The loyalty reward points label, balance, and type." }, + "merchantLocations": { + "description": "Merchant locations. There is a maximum of ten on the object. Any additional MerchantLocations added beyond the 10 will be rejected. These locations will trigger a notification when a user enters within a Google-set radius of the point. This field replaces the deprecated LatLongPoints.", + "items": { + "$ref": "MerchantLocation" + }, + "type": "array" + }, "messages": { "description": "An array of messages displayed in the app. All users of this object will receive its associated messages. The maximum number of these fields is 10.", "items": { @@ -7244,6 +7320,23 @@ }, "type": "object" }, + "MerchantLocation": { + "description": "Locations of interest for this class or object. Currently, this location is used for geofenced notifications. When a user is within a set radius of this lat/long, and dwells there, Google will trigger a notification. When a user exits this radius, the notification will be hidden.", + "id": "MerchantLocation", + "properties": { + "latitude": { + "description": "The latitude specified as any value in the range of -90.0 through +90.0, both inclusive. Values outside these bounds will be rejected.", + "format": "double", + "type": "number" + }, + "longitude": { + "description": "The longitude specified in the range -180.0 through +180.0, both inclusive. Values outside these bounds will be rejected.", + "format": "double", + "type": "number" + } + }, + "type": "object" + }, "Message": { "description": "A message that will be displayed with a Valuable", "id": "Message", @@ -7517,6 +7610,13 @@ }, "type": "array" }, + "merchantLocations": { + "description": "Merchant locations. There is a maximum of ten on the class. Any additional MerchantLocations added beyond the 10 will be rejected. These locations will trigger a notification when a user enters within a Google-set radius of the point. This field replaces the deprecated LatLongPoints.", + "items": { + "$ref": "MerchantLocation" + }, + "type": "array" + }, "messages": { "description": "An array of messages displayed in the app. All users of this object will receive its associated messages. The maximum number of these fields is 10.", "items": { @@ -7823,6 +7923,13 @@ }, "type": "array" }, + "merchantLocations": { + "description": "Merchant locations. There is a maximum of ten on the object. Any additional MerchantLocations added beyond the 10 will be rejected. These locations will trigger a notification when a user enters within a Google-set radius of the point. This field replaces the deprecated LatLongPoints.", + "items": { + "$ref": "MerchantLocation" + }, + "type": "array" + }, "messages": { "description": "An array of messages displayed in the app. All users of this object will receive its associated messages. The maximum number of these fields is 10.", "items": { @@ -8932,6 +9039,13 @@ "$ref": "Image", "description": "Required. The logo image of the ticket. This image is displayed in the card detail view of the app." }, + "merchantLocations": { + "description": "Merchant locations. There is a maximum of ten on the class. Any additional MerchantLocations added beyond the 10 will be rejected. These locations will trigger a notification when a user enters within a Google-set radius of the point. This field replaces the deprecated LatLongPoints.", + "items": { + "$ref": "MerchantLocation" + }, + "type": "array" + }, "messages": { "description": "An array of messages displayed in the app. All users of this object will receive its associated messages. The maximum number of these fields is 10.", "items": { @@ -9283,6 +9397,13 @@ }, "type": "array" }, + "merchantLocations": { + "description": "Merchant locations. There is a maximum of ten on the object. Any additional MerchantLocations added beyond the 10 will be rejected. These locations will trigger a notification when a user enters within a Google-set radius of the point. This field replaces the deprecated LatLongPoints.", + "items": { + "$ref": "MerchantLocation" + }, + "type": "array" + }, "messages": { "description": "An array of messages displayed in the app. All users of this object will receive its associated messages. The maximum number of these fields is 10.", "items": { diff --git a/discovery/googleapis/webrisk__v1.json b/discovery/googleapis/webrisk__v1.json index b88d632aa..b525ddd6f 100644 --- a/discovery/googleapis/webrisk__v1.json +++ b/discovery/googleapis/webrisk__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20230806", + "revision": "20241117", "rootUrl": "https://webrisk.googleapis.com/", "servicePath": "", "title": "Web Risk API", @@ -160,7 +160,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "webrisk.projects.operations.cancel", diff --git a/discovery/googleapis/workflows__v1.json b/discovery/googleapis/workflows__v1.json index b86f8a671..be8e9920b 100644 --- a/discovery/googleapis/workflows__v1.json +++ b/discovery/googleapis/workflows__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240925", + "revision": "20241204", "rootUrl": "https://workflows.googleapis.com/", "servicePath": "", "title": "Workflows API", @@ -842,6 +842,13 @@ "description": "Output only. Error regarding the state of the workflow. For example, this field will have error details if the execution data is unavailable due to revoked KMS key permissions.", "readOnly": true }, + "tags": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. Input only. Immutable. Tags associated with this workflow.", + "type": "object" + }, "updateTime": { "description": "Output only. The timestamp for when the workflow was last updated. This is a workflow-wide field and is not tied to a specific revision.", "format": "google-datetime", diff --git a/discovery/googleapis/workloadmanager__v1.json b/discovery/googleapis/workloadmanager__v1.json index 3121f96e1..bfff355e1 100644 --- a/discovery/googleapis/workloadmanager__v1.json +++ b/discovery/googleapis/workloadmanager__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241009", + "revision": "20241209", "rootUrl": "https://workloadmanager.googleapis.com/", "servicePath": "", "title": "Workload Manager API", @@ -601,7 +601,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "workloadmanager.projects.locations.operations.cancel", @@ -795,42 +795,6 @@ }, "type": "object" }, - "AssetLocation": { - "description": "Provides the mapping of a cloud asset to a direct physical location or to a proxy that defines the location on its behalf.", - "id": "AssetLocation", - "properties": { - "ccfeRmsPath": { - "description": "Spanner path of the CCFE RMS database. It is only applicable for CCFE tenants that use CCFE RMS for storing resource metadata.", - "type": "string" - }, - "expected": { - "$ref": "IsolationExpectations", - "description": "Defines the customer expectation around ZI/ZS for this asset and ZI/ZS state of the region at the time of asset creation." - }, - "extraParameters": { - "description": "Defines extra parameters required for specific asset types.", - "items": { - "$ref": "ExtraParameter" - }, - "type": "array" - }, - "locationData": { - "description": "Contains all kinds of physical location definitions for this asset.", - "items": { - "$ref": "LocationData" - }, - "type": "array" - }, - "parentAsset": { - "description": "Defines parents assets if any in order to allow later generation of child_asset_location data via child assets.", - "items": { - "$ref": "CloudAsset" - }, - "type": "array" - } - }, - "type": "object" - }, "BigQueryDestination": { "description": "Message describing big query destination", "id": "BigQueryDestination", @@ -846,49 +810,12 @@ }, "type": "object" }, - "BlobstoreLocation": { - "description": "Policy ID that identified data placement in Blobstore as per go/blobstore-user-guide#data-metadata-placement-and-failure-domains", - "id": "BlobstoreLocation", - "properties": { - "policyId": { - "items": { - "type": "string" - }, - "type": "array" - } - }, - "type": "object" - }, "CancelOperationRequest": { "description": "The request message for Operations.CancelOperation.", "id": "CancelOperationRequest", "properties": {}, "type": "object" }, - "CloudAsset": { - "id": "CloudAsset", - "properties": { - "assetName": { - "type": "string" - }, - "assetType": { - "type": "string" - } - }, - "type": "object" - }, - "CloudAssetComposition": { - "id": "CloudAssetComposition", - "properties": { - "childAsset": { - "items": { - "$ref": "CloudAsset" - }, - "type": "array" - } - }, - "type": "object" - }, "Command": { "description": "* Command specifies the type of command to execute.", "id": "Command", @@ -904,18 +831,6 @@ }, "type": "object" }, - "DirectLocationAssignment": { - "id": "DirectLocationAssignment", - "properties": { - "location": { - "items": { - "$ref": "LocationAssignment" - }, - "type": "array" - } - }, - "type": "object" - }, "Empty": { "description": "A generic empty message that you can re-use to avoid defining duplicated empty messages in your APIs. A typical example is to use it as the request or the response type of an API method. For instance: service Foo { rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); }", "id": "Empty", @@ -944,6 +859,24 @@ "description": "Description of the Evaluation", "type": "string" }, + "evaluationType": { + "description": "Evaluation type", + "enum": [ + "EVALUATION_TYPE_UNSPECIFIED", + "SAP", + "SQL_SERVER", + "OTHER", + "SCC_IAC" + ], + "enumDescriptions": [ + "Not specified", + "SAP best practices", + "SQL best practices", + "Customized best practices", + "SCC IaC (Infra as Code) best practices" + ], + "type": "string" + }, "labels": { "additionalProperties": { "type": "string" @@ -1031,6 +964,19 @@ "description": "The name of execution resource. The format is projects/{project}/locations/{location}/evaluations/{evaluation}/executions/{execution}", "type": "string" }, + "notices": { + "description": "Output only. Additional information generated by the execution", + "items": { + "$ref": "Notice" + }, + "readOnly": true, + "type": "array" + }, + "resultSummary": { + "$ref": "Summary", + "description": "Output only. [Output only] Result summary", + "readOnly": true + }, "ruleResults": { "description": "Output only. execution result summary per rule", "items": { @@ -1162,17 +1108,6 @@ }, "type": "object" }, - "ExtraParameter": { - "description": "Defines parameters that should only be used for specific asset types.", - "id": "ExtraParameter", - "properties": { - "regionalMigDistributionPolicy": { - "$ref": "RegionalMigDistributionPolicy", - "description": "Details about zones used by regional compute.googleapis.com/InstanceGroupManager to create instances." - } - }, - "type": "object" - }, "GceInstanceFilter": { "description": "Message describing compute engine instance filter", "id": "GceInstanceFilter", @@ -1212,131 +1147,10 @@ "sqlserverValidation": { "$ref": "SqlserverValidation", "description": "The insights data for the sqlserver workload validation." - } - }, - "type": "object" - }, - "IsolationExpectations": { - "id": "IsolationExpectations", - "properties": { - "requirementOverride": { - "$ref": "RequirementOverride", - "description": "Explicit overrides for ZI and ZS requirements to be used for resources that should be excluded from ZI/ZS verification logic." - }, - "ziOrgPolicy": { - "enum": [ - "ZI_UNSPECIFIED", - "ZI_UNKNOWN", - "ZI_NOT_REQUIRED", - "ZI_PREFERRED", - "ZI_REQUIRED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "", - "" - ], - "type": "string" - }, - "ziRegionPolicy": { - "enum": [ - "ZI_REGION_POLICY_UNSPECIFIED", - "ZI_REGION_POLICY_UNKNOWN", - "ZI_REGION_POLICY_NOT_SET", - "ZI_REGION_POLICY_FAIL_OPEN", - "ZI_REGION_POLICY_FAIL_CLOSED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "", - "" - ], - "type": "string" - }, - "ziRegionState": { - "enum": [ - "ZI_REGION_UNSPECIFIED", - "ZI_REGION_UNKNOWN", - "ZI_REGION_NOT_ENABLED", - "ZI_REGION_ENABLED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "" - ], - "type": "string" - }, - "zoneIsolation": { - "deprecated": true, - "description": "Deprecated: use zi_org_policy, zi_region_policy and zi_region_state instead for setting ZI expectations as per go/zicy-publish-physical-location.", - "enum": [ - "ZI_UNSPECIFIED", - "ZI_UNKNOWN", - "ZI_NOT_REQUIRED", - "ZI_PREFERRED", - "ZI_REQUIRED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "", - "" - ], - "type": "string" - }, - "zoneSeparation": { - "deprecated": true, - "description": "Deprecated: use zs_org_policy, and zs_region_stateinstead for setting Zs expectations as per go/zicy-publish-physical-location.", - "enum": [ - "ZS_UNSPECIFIED", - "ZS_UNKNOWN", - "ZS_NOT_REQUIRED", - "ZS_REQUIRED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "" - ], - "type": "string" - }, - "zsOrgPolicy": { - "enum": [ - "ZS_UNSPECIFIED", - "ZS_UNKNOWN", - "ZS_NOT_REQUIRED", - "ZS_REQUIRED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "" - ], - "type": "string" }, - "zsRegionState": { - "enum": [ - "ZS_REGION_UNSPECIFIED", - "ZS_REGION_UNKNOWN", - "ZS_REGION_NOT_ENABLED", - "ZS_REGION_ENABLED" - ], - "enumDescriptions": [ - "", - "To be used if tracking of the asset ZS-bit is not available", - "", - "" - ], - "type": "string" + "torsoValidation": { + "$ref": "TorsoValidation", + "description": "The insights data for workload validation of torso workloads." } }, "type": "object" @@ -1515,64 +1329,18 @@ }, "type": "object" }, - "LocationAssignment": { - "id": "LocationAssignment", + "Notice": { + "description": "Message for additional information generated by the execution", + "id": "Notice", "properties": { - "location": { - "type": "string" - }, - "locationType": { - "enum": [ - "UNSPECIFIED", - "CLUSTER", - "POP", - "CLOUD_ZONE", - "CLOUD_REGION", - "MULTI_REGION_GEO", - "MULTI_REGION_JURISDICTION", - "GLOBAL", - "OTHER" - ], - "enumDescriptions": [ - "", - "1-10: Physical failure domains.", - "", - "11-20: Logical failure domains.", - "", - "", - "", - "", - "" - ], + "message": { + "description": "Output only. Message of the notice", + "readOnly": true, "type": "string" } }, "type": "object" }, - "LocationData": { - "id": "LocationData", - "properties": { - "blobstoreLocation": { - "$ref": "BlobstoreLocation" - }, - "childAssetLocation": { - "$ref": "CloudAssetComposition" - }, - "directLocation": { - "$ref": "DirectLocationAssignment" - }, - "gcpProjectProxy": { - "$ref": "TenantProjectProxy" - }, - "placerLocation": { - "$ref": "PlacerLocation" - }, - "spannerLocation": { - "$ref": "SpannerLocation" - } - }, - "type": "object" - }, "Operation": { "description": "This resource represents a long-running operation that is the result of a network API call.", "id": "Operation", @@ -1652,74 +1420,6 @@ }, "type": "object" }, - "PlacerLocation": { - "description": "Message describing that the location of the customer resource is tied to placer allocations", - "id": "PlacerLocation", - "properties": { - "placerConfig": { - "description": "Directory with a config related to it in placer (e.g. \"/placer/prod/home/my-root/my-dir\")", - "type": "string" - } - }, - "type": "object" - }, - "RegionalMigDistributionPolicy": { - "description": "To be used for specifying the intended distribution of regional compute.googleapis.com/InstanceGroupManager instances", - "id": "RegionalMigDistributionPolicy", - "properties": { - "targetShape": { - "description": "The shape in which the group converges around distribution of resources. Instance of proto2 enum", - "format": "int32", - "type": "integer" - }, - "zones": { - "description": "Cloud zones used by regional MIG to create instances.", - "items": { - "$ref": "ZoneConfiguration" - }, - "type": "array" - } - }, - "type": "object" - }, - "RequirementOverride": { - "id": "RequirementOverride", - "properties": { - "ziOverride": { - "enum": [ - "ZI_UNSPECIFIED", - "ZI_UNKNOWN", - "ZI_NOT_REQUIRED", - "ZI_PREFERRED", - "ZI_REQUIRED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "", - "" - ], - "type": "string" - }, - "zsOverride": { - "enum": [ - "ZS_UNSPECIFIED", - "ZS_UNKNOWN", - "ZS_NOT_REQUIRED", - "ZS_REQUIRED" - ], - "enumDescriptions": [ - "", - "To be used if tracking is not available", - "", - "" - ], - "type": "string" - } - }, - "type": "object" - }, "Resource": { "description": "Message represent resource in execution result", "id": "Resource", @@ -2454,26 +2154,6 @@ }, "type": "object" }, - "SpannerLocation": { - "id": "SpannerLocation", - "properties": { - "backupName": { - "description": "Set of backups used by the resource with name in the same format as what is available at http://table/spanner_automon.backup_metadata", - "items": { - "type": "string" - }, - "type": "array" - }, - "dbName": { - "description": "Set of databases used by the resource in format /span//", - "items": { - "type": "string" - }, - "type": "array" - } - }, - "type": "object" - }, "SqlserverValidation": { "description": "A presentation of SQLServer workload insight. The schema of SqlServer workloads validation related data.", "id": "SqlserverValidation", @@ -2589,14 +2269,69 @@ }, "type": "object" }, - "TenantProjectProxy": { - "id": "TenantProjectProxy", + "Summary": { + "description": "Message for execution summary", + "id": "Summary", "properties": { - "projectNumbers": { - "items": { + "failures": { + "description": "Output only. Number of failures", + "format": "int64", + "readOnly": true, + "type": "string" + }, + "newFailures": { + "description": "Output only. Number of new failures compared to the previous execution", + "format": "int64", + "readOnly": true, + "type": "string" + }, + "newFixes": { + "description": "Output only. Number of new fixes compared to the previous execution", + "format": "int64", + "readOnly": true, + "type": "string" + } + }, + "type": "object" + }, + "TorsoValidation": { + "description": "The schema of torso workload validation data.", + "id": "TorsoValidation", + "properties": { + "agentVersion": { + "description": "Required. agent_version lists the version of the agent that collected this data.", + "type": "string" + }, + "instanceName": { + "description": "Required. instance_name lists the human readable name of the instance that the data comes from.", + "type": "string" + }, + "projectId": { + "description": "Required. project_id lists the human readable cloud project that the data comes from.", + "type": "string" + }, + "validationDetails": { + "additionalProperties": { "type": "string" }, - "type": "array" + "description": "Required. validation_details contains the pairs of validation data: field name & field value.", + "type": "object" + }, + "workloadType": { + "description": "Required. workload_type specifies the type of torso workload.", + "enum": [ + "WORKLOAD_TYPE_UNSPECIFIED", + "MYSQL", + "ORACLE", + "REDIS" + ], + "enumDescriptions": [ + "Unspecified workload type.", + "MySQL workload.", + "Oracle workload.", + "Redis workload." + ], + "type": "string" } }, "type": "object" @@ -2647,15 +2382,6 @@ "id": "WriteInsightResponse", "properties": {}, "type": "object" - }, - "ZoneConfiguration": { - "id": "ZoneConfiguration", - "properties": { - "zone": { - "type": "string" - } - }, - "type": "object" } } } diff --git a/discovery/googleapis/workspaceevents__v1.json b/discovery/googleapis/workspaceevents__v1.json index c8322061e..5606cec7b 100644 --- a/discovery/googleapis/workspaceevents__v1.json +++ b/discovery/googleapis/workspaceevents__v1.json @@ -29,6 +29,21 @@ "https://www.googleapis.com/auth/chat.spaces.readonly": { "description": "View chat and spaces in Google Chat" }, + "https://www.googleapis.com/auth/drive": { + "description": "See, edit, create, and delete all of your Google Drive files" + }, + "https://www.googleapis.com/auth/drive.file": { + "description": "See, edit, create, and delete only the specific Google Drive files you use with this app" + }, + "https://www.googleapis.com/auth/drive.metadata": { + "description": "View and manage metadata of files in your Google Drive" + }, + "https://www.googleapis.com/auth/drive.metadata.readonly": { + "description": "See information about your Google Drive files" + }, + "https://www.googleapis.com/auth/drive.readonly": { + "description": "See and download all your Google Drive files" + }, "https://www.googleapis.com/auth/meetings.space.created": { "description": "Create, edit, and see information about your Google Meet conferences created by the app." }, @@ -55,7 +70,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241008", + "revision": "20241210", "rootUrl": "https://workspaceevents.googleapis.com/", "servicePath": "", "title": "Google Workspace Events API", @@ -172,6 +187,11 @@ "https://www.googleapis.com/auth/chat.messages.readonly", "https://www.googleapis.com/auth/chat.spaces", "https://www.googleapis.com/auth/chat.spaces.readonly", + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/drive.file", + "https://www.googleapis.com/auth/drive.metadata", + "https://www.googleapis.com/auth/drive.metadata.readonly", + "https://www.googleapis.com/auth/drive.readonly", "https://www.googleapis.com/auth/meetings.space.created", "https://www.googleapis.com/auth/meetings.space.readonly" ] @@ -209,6 +229,11 @@ "https://www.googleapis.com/auth/chat.messages.readonly", "https://www.googleapis.com/auth/chat.spaces", "https://www.googleapis.com/auth/chat.spaces.readonly", + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/drive.file", + "https://www.googleapis.com/auth/drive.metadata", + "https://www.googleapis.com/auth/drive.metadata.readonly", + "https://www.googleapis.com/auth/drive.readonly", "https://www.googleapis.com/auth/meetings.space.created", "https://www.googleapis.com/auth/meetings.space.readonly" ] @@ -259,6 +284,11 @@ "https://www.googleapis.com/auth/chat.messages.readonly", "https://www.googleapis.com/auth/chat.spaces", "https://www.googleapis.com/auth/chat.spaces.readonly", + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/drive.file", + "https://www.googleapis.com/auth/drive.metadata", + "https://www.googleapis.com/auth/drive.metadata.readonly", + "https://www.googleapis.com/auth/drive.readonly", "https://www.googleapis.com/auth/meetings.space.created", "https://www.googleapis.com/auth/meetings.space.readonly" ] @@ -294,6 +324,11 @@ "https://www.googleapis.com/auth/chat.messages.readonly", "https://www.googleapis.com/auth/chat.spaces", "https://www.googleapis.com/auth/chat.spaces.readonly", + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/drive.file", + "https://www.googleapis.com/auth/drive.metadata", + "https://www.googleapis.com/auth/drive.metadata.readonly", + "https://www.googleapis.com/auth/drive.readonly", "https://www.googleapis.com/auth/meetings.space.created", "https://www.googleapis.com/auth/meetings.space.readonly" ] @@ -336,6 +371,11 @@ "https://www.googleapis.com/auth/chat.messages.readonly", "https://www.googleapis.com/auth/chat.spaces", "https://www.googleapis.com/auth/chat.spaces.readonly", + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/drive.file", + "https://www.googleapis.com/auth/drive.metadata", + "https://www.googleapis.com/auth/drive.metadata.readonly", + "https://www.googleapis.com/auth/drive.readonly", "https://www.googleapis.com/auth/meetings.space.created", "https://www.googleapis.com/auth/meetings.space.readonly" ] @@ -384,12 +424,17 @@ "https://www.googleapis.com/auth/chat.messages.readonly", "https://www.googleapis.com/auth/chat.spaces", "https://www.googleapis.com/auth/chat.spaces.readonly", + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/drive.file", + "https://www.googleapis.com/auth/drive.metadata", + "https://www.googleapis.com/auth/drive.metadata.readonly", + "https://www.googleapis.com/auth/drive.readonly", "https://www.googleapis.com/auth/meetings.space.created", "https://www.googleapis.com/auth/meetings.space.readonly" ] }, "reactivate": { - "description": "[Developer Preview](https://developers.google.com/workspace/preview): Reactivates a suspended Google Workspace subscription. This method resets your subscription's `State` field to `ACTIVE`. Before you use this method, you must fix the error that suspended the subscription. To learn how to use this method, see [Reactivate a Google Workspace subscription](https://developers.google.com/workspace/events/guides/reactivate-subscription).", + "description": "[Developer Preview](https://developers.google.com/workspace/preview): Reactivates a suspended Google Workspace subscription. This method resets your subscription's `State` field to `ACTIVE`. Before you use this method, you must fix the error that suspended the subscription. This method will ignore or reject any subscription that isn't currently in a suspended state. To learn how to use this method, see [Reactivate a Google Workspace subscription](https://developers.google.com/workspace/events/guides/reactivate-subscription).", "flatPath": "v1/subscriptions/{subscriptionsId}:reactivate", "httpMethod": "POST", "id": "workspaceevents.subscriptions.reactivate", @@ -421,6 +466,11 @@ "https://www.googleapis.com/auth/chat.messages.readonly", "https://www.googleapis.com/auth/chat.spaces", "https://www.googleapis.com/auth/chat.spaces.readonly", + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/drive.file", + "https://www.googleapis.com/auth/drive.metadata", + "https://www.googleapis.com/auth/drive.metadata.readonly", + "https://www.googleapis.com/auth/drive.readonly", "https://www.googleapis.com/auth/meetings.space.created", "https://www.googleapis.com/auth/meetings.space.readonly" ] diff --git a/discovery/googleapis/workstations__v1.json b/discovery/googleapis/workstations__v1.json index 205841592..e92d11ff1 100644 --- a/discovery/googleapis/workstations__v1.json +++ b/discovery/googleapis/workstations__v1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241009", + "revision": "20241202", "rootUrl": "https://workstations.googleapis.com/", "servicePath": "", "title": "Cloud Workstations API", @@ -184,7 +184,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "workstations.projects.locations.operations.cancel", @@ -877,7 +877,7 @@ ] }, "generateAccessToken": { - "description": "Returns a short-lived credential that can be used to send authenticated and authorized traffic to a workstation.", + "description": "Returns a short-lived credential that can be used to send authenticated and authorized traffic to a workstation. Once generated this token cannot be revoked and is good for the lifetime of the token.", "flatPath": "v1/projects/{projectsId}/locations/{locationsId}/workstationClusters/{workstationClustersId}/workstationConfigs/{workstationConfigsId}/workstations/{workstationsId}:generateAccessToken", "httpMethod": "POST", "id": "workstations.projects.locations.workstationClusters.workstationConfigs.workstations.generateAccessToken", @@ -1287,7 +1287,7 @@ "type": "object" }, "BoostConfig": { - "description": "A configuration that workstations can boost to.", + "description": "A boost configuration is a set of resources that a workstation can use to increase its performance. If a boost configuration is specified, when starting a workstation, users can choose to use a VM provisioned under the boost config by passing the boost config id in the start request. If no boost config id is provided in the start request, the system will choose a VM from the pool provisioned under the default config.", "id": "BoostConfig", "properties": { "accelerators": { @@ -1307,7 +1307,7 @@ "type": "boolean" }, "id": { - "description": "Optional. Required. The id to be used for the boost configuration.", + "description": "Required. The id to be used for the boost configuration.", "type": "string" }, "machineType": { @@ -1456,7 +1456,7 @@ "type": "array" }, "boostConfigs": { - "description": "Optional. A list of the boost configurations that workstations created using this workstation configuration are allowed to use.", + "description": "Optional. A list of the boost configurations that workstations created using this workstation configuration are allowed to use. If specified, users will have the option to choose from the list of boost configs when starting a workstation.", "items": { "$ref": "BoostConfig" }, diff --git a/discovery/googleapis/youtube__v3.json b/discovery/googleapis/youtube__v3.json index 14d6254b7..acd677148 100644 --- a/discovery/googleapis/youtube__v3.json +++ b/discovery/googleapis/youtube__v3.json @@ -43,7 +43,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241022", + "revision": "20241203", "rootUrl": "https://youtube.googleapis.com/", "servicePath": "", "title": "YouTube Data API v3", @@ -4070,67 +4070,6 @@ "$ref": "CommentThread" } } - }, - "resources": { - "liveChat": { - "resources": { - "messages": { - "methods": { - "stream": { - "description": "Allows a user to load live chat through a server-streamed RPC.", - "flatPath": "youtube/v3/liveChat/messages/stream", - "httpMethod": "GET", - "id": "youtube.youtube.v3.liveChat.messages.stream", - "parameterOrder": [], - "parameters": { - "hl": { - "description": "Specifies the localization language in which the system messages should be returned.", - "location": "query", - "type": "string" - }, - "liveChatId": { - "description": "The id of the live chat for which comments should be returned.", - "location": "query", - "type": "string" - }, - "maxResults": { - "default": "500", - "description": "The *maxResults* parameter specifies the maximum number of items that should be returned in the result set. Not used in the streaming RPC.", - "format": "uint32", - "location": "query", - "maximum": "2000", - "minimum": "200", - "type": "integer" - }, - "pageToken": { - "description": "The *pageToken* parameter identifies a specific page in the result set that should be returned. In an API response, the nextPageToken property identify other pages that could be retrieved.", - "location": "query", - "type": "string" - }, - "part": { - "description": "The *part* parameter specifies the liveChatComment resource parts that the API response will include. Supported values are id, snippet, and authorDetails.", - "location": "query", - "repeated": true, - "type": "string" - }, - "profileImageSize": { - "description": "Specifies the size of the profile image that should be returned for each user.", - "format": "uint32", - "location": "query", - "maximum": "720", - "minimum": "16", - "type": "integer" - } - }, - "path": "youtube/v3/liveChat/messages/stream", - "response": { - "$ref": "LiveChatMessageListResponse" - } - } - } - } - } - } } } } @@ -5613,6 +5552,10 @@ "$ref": "ChannelToStoreLinkDetailsBillingDetails", "description": "Information specific to billing (read-only)." }, + "merchantAffiliateProgramDetails": { + "$ref": "ChannelToStoreLinkDetailsMerchantAffiliateProgramDetails", + "description": "Information specific to merchant affiliate program (read-only)." + }, "merchantId": { "description": "Google Merchant Center id of the store.", "format": "uint64", @@ -5652,6 +5595,29 @@ }, "type": "object" }, + "ChannelToStoreLinkDetailsMerchantAffiliateProgramDetails": { + "description": "Information specific to merchant affiliate program.", + "id": "ChannelToStoreLinkDetailsMerchantAffiliateProgramDetails", + "properties": { + "status": { + "description": "The current merchant affiliate program status.", + "enum": [ + "merchantAffiliateProgramStatusUnspecified", + "merchantAffiliateProgramStatusEligible", + "merchantAffiliateProgramStatusActive", + "merchantAffiliateProgramStatusPaused" + ], + "enumDescriptions": [ + "Unspecified status.", + "Merchant is eligible for the merchant affiliate program.", + "Merchant affiliate program is active.", + "Merchant affiliate program is paused." + ], + "type": "string" + } + }, + "type": "object" + }, "ChannelTopicDetails": { "description": "Freebase topic information related to the channel.", "id": "ChannelTopicDetails", @@ -10077,7 +10043,7 @@ }, "kind": { "default": "youtube#playlistItemListResponse", - "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#playlistItemListResponse\". Etag of this resource.", + "description": "Identifies what kind of resource this is. Value: the fixed string \"youtube#playlistItemListResponse\".", "type": "string" }, "nextPageToken": { @@ -11706,7 +11672,7 @@ "type": "string" }, "scheduledEndTime": { - "description": "The time that the broadcast is scheduled to end. If the value is empty or the property is not present, then the broadcast is scheduled to contiue indefinitely.", + "description": "The time that the broadcast is scheduled to end. If the value is empty or the property is not present, then the broadcast is scheduled to continue indefinitely.", "format": "date-time", "type": "string" }, @@ -12019,6 +11985,10 @@ "description": "Basic details about a video category, such as its localized title. Next Id: 19", "id": "VideoStatus", "properties": { + "containsSyntheticMedia": { + "description": "Indicates if the video contains altered or synthetic media.", + "type": "boolean" + }, "embeddable": { "description": "This value indicates if the video can be embedded on another website. @mutable youtube.videos.insert youtube.videos.update", "type": "boolean" diff --git a/discovery/googleapis_beta/alertcenter__v1beta1.json b/discovery/googleapis_beta/alertcenter__v1beta1.json index 949b37046..1728595da 100644 --- a/discovery/googleapis_beta/alertcenter__v1beta1.json +++ b/discovery/googleapis_beta/alertcenter__v1beta1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240415", + "revision": "20241028", "rootUrl": "https://alertcenter.googleapis.com/", "servicePath": "", "title": "Google Workspace Alert Center API", @@ -858,7 +858,7 @@ "type": "object" }, "AppSettingsChanged": { - "description": "Alerts from AppSettingsChanged bucket Rules configured by Admin which contain the below rules. Calendar settings changed Drive settings changed Email settings changed Mobile settings changed", + "description": "* Alerts from AppSettingsChanged bucket Rules configured by Admin which contain the below rules. Calendar settings changed Drive settings changed Email settings changed Mobile settings changed", "id": "AppSettingsChanged", "properties": { "alertDetails": { @@ -1579,7 +1579,7 @@ "type": "object" }, "PrimaryAdminChangedEvent": { - "description": "Event occurred when primary admin changed in customer's account. The event are being received from insight forwarder", + "description": "* Event occurred when primary admin changed in customer's account. The event are being received from insight forwarder", "id": "PrimaryAdminChangedEvent", "properties": { "domain": { @@ -1890,7 +1890,7 @@ "type": "object" }, "SSOProfileCreatedEvent": { - "description": "Event occurred when SSO Profile created in customer's account. The event are being received from insight forwarder", + "description": "* Event occurred when SSO Profile created in customer's account. The event are being received from insight forwarder", "id": "SSOProfileCreatedEvent", "properties": { "inboundSsoProfileName": { @@ -1901,7 +1901,7 @@ "type": "object" }, "SSOProfileDeletedEvent": { - "description": "Event occurred when SSO Profile deleted in customer's account. The event are being received from insight forwarder", + "description": "* Event occurred when SSO Profile deleted in customer's account. The event are being received from insight forwarder", "id": "SSOProfileDeletedEvent", "properties": { "inboundSsoProfileName": { @@ -1912,7 +1912,7 @@ "type": "object" }, "SSOProfileUpdatedEvent": { - "description": "Event occurred when SSO Profile updated in customer's account. The event are being received from insight forwarder", + "description": "* Event occurred when SSO Profile updated in customer's account. The event are being received from insight forwarder", "id": "SSOProfileUpdatedEvent", "properties": { "inboundSsoProfileChanges": { @@ -2015,7 +2015,7 @@ "type": "object" }, "SuperAdminPasswordResetEvent": { - "description": "Event occurred when password was reset for super admin in customer's account. The event are being received from insight forwarder", + "description": "* Event occurred when password was reset for super admin in customer's account. The event are being received from insight forwarder", "id": "SuperAdminPasswordResetEvent", "properties": { "userEmail": { @@ -2195,7 +2195,7 @@ "type": "object" }, "UserChanges": { - "description": "Alerts from UserChanges bucket Rules for predefined rules which contain the below rules. Suspended user made active New user Added User suspended (by admin) User granted admin privileges User admin privileges revoked User deleted Users password changed", + "description": "* Alerts from UserChanges bucket Rules for predefined rules which contain the below rules. Suspended user made active New user Added User suspended (by admin) User granted admin privileges User admin privileges revoked User deleted Users password changed", "id": "UserChanges", "properties": { "name": { diff --git a/discovery/googleapis_beta/analyticsadmin__v1beta.json b/discovery/googleapis_beta/analyticsadmin__v1beta.json index 9bfeac0be..b3cb3d98f 100644 --- a/discovery/googleapis_beta/analyticsadmin__v1beta.json +++ b/discovery/googleapis_beta/analyticsadmin__v1beta.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240916", + "revision": "20241113", "rootUrl": "https://analyticsadmin.googleapis.com/", "servicePath": "", "title": "Google Analytics Admin API", @@ -3496,6 +3496,8 @@ "GOOGLE_SIGNALS_SETTINGS", "CONVERSION_EVENT", "MEASUREMENT_PROTOCOL_SECRET", + "CUSTOM_DIMENSION", + "CUSTOM_METRIC", "DATA_RETENTION_SETTINGS", "DISPLAY_VIDEO_360_ADVERTISER_LINK", "DISPLAY_VIDEO_360_ADVERTISER_LINK_PROPOSAL", @@ -3511,6 +3513,8 @@ "GoogleSignalsSettings resource", "ConversionEvent resource", "MeasurementProtocolSecret resource", + "CustomDimension resource", + "CustomMetric resource", "DataRetentionSettings resource", "DisplayVideo360AdvertiserLink resource", "DisplayVideo360AdvertiserLinkProposal resource", diff --git a/discovery/googleapis_beta/analyticsdata__v1beta.json b/discovery/googleapis_beta/analyticsdata__v1beta.json index eec0f0fdf..67ed8059d 100644 --- a/discovery/googleapis_beta/analyticsdata__v1beta.json +++ b/discovery/googleapis_beta/analyticsdata__v1beta.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240825", + "revision": "20241117", "rootUrl": "https://analyticsdata.googleapis.com/", "servicePath": "", "title": "Google Analytics Data API", @@ -1009,6 +1009,12 @@ }, "type": "object" }, + "EmptyFilter": { + "description": "Filter for empty values.", + "id": "EmptyFilter", + "properties": {}, + "type": "object" + }, "Filter": { "description": "An expression to filter dimension or metric values.", "id": "Filter", @@ -1017,6 +1023,10 @@ "$ref": "BetweenFilter", "description": "A filter for two values." }, + "emptyFilter": { + "$ref": "EmptyFilter", + "description": "A filter for empty values such as \"(not set)\" and \"\" values." + }, "fieldName": { "description": "The dimension name or metric name. In most methods, dimensions & metrics can be used for the first time in this field. However in a RunPivotReportRequest, this field must be additionally specified by name in the RunPivotReportRequest's dimensions or metrics.", "type": "string" diff --git a/discovery/googleapis_beta/cloudsupport__v2beta.json b/discovery/googleapis_beta/cloudsupport__v2beta.json index 7a38dab84..b3d14f330 100644 --- a/discovery/googleapis_beta/cloudsupport__v2beta.json +++ b/discovery/googleapis_beta/cloudsupport__v2beta.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240429", + "revision": "20241212", "rootUrl": "https://cloudsupport.googleapis.com/", "servicePath": "", "title": "Google Cloud Support API", @@ -453,7 +453,7 @@ ], "parameters": { "pageSize": { - "description": "The maximum number of attachments fetched with each request. If not provided, the default is 10. The maximum page size that will be returned is 100.", + "description": "The maximum number of attachments fetched with each request. If not provided, the default is 10. The maximum page size that will be returned is 100. The size of each page can be smaller than the requested page size and can include zero. For example, you could request 100 attachments on one page, receive 0, and then on the next page, receive 90.", "format": "int32", "location": "query", "type": "integer" diff --git a/discovery/googleapis_beta/dataflow__v1b3.json b/discovery/googleapis_beta/dataflow__v1b3.json index d1e3ca1cb..0208dbbec 100644 --- a/discovery/googleapis_beta/dataflow__v1b3.json +++ b/discovery/googleapis_beta/dataflow__v1b3.json @@ -110,7 +110,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241014", + "revision": "20241209", "rootUrl": "https://dataflow.googleapis.com/", "servicePath": "", "title": "Dataflow API", @@ -2927,6 +2927,23 @@ }, "type": "object" }, + "DataflowGaugeValue": { + "description": "The gauge value of a metric.", + "id": "DataflowGaugeValue", + "properties": { + "measuredTime": { + "description": "The timestamp when the gauge was recorded.", + "format": "google-datetime", + "type": "string" + }, + "value": { + "description": "The value of the gauge.", + "format": "int64", + "type": "string" + } + }, + "type": "object" + }, "DataflowHistogramValue": { "description": "Summary statistics for a population of values. HistogramValue contains a sequence of buckets and gives a count of values that fall into each bucket. Bucket boundares are defined by a formula and bucket widths are either fixed or exponentially increasing.", "id": "DataflowHistogramValue", @@ -3633,6 +3650,34 @@ }, "type": "object" }, + "GPUUsage": { + "description": "Information about the GPU usage on the worker.", + "id": "GPUUsage", + "properties": { + "timestamp": { + "description": "Required. Timestamp of the measurement.", + "format": "google-datetime", + "type": "string" + }, + "utilization": { + "$ref": "GPUUtilization", + "description": "Required. Utilization info about the GPU." + } + }, + "type": "object" + }, + "GPUUtilization": { + "description": "Utilization details about the GPU.", + "id": "GPUUtilization", + "properties": { + "rate": { + "description": "Required. GPU utilization rate of any kernel over the last sample period in the range of [0, 1].", + "format": "double", + "type": "number" + } + }, + "type": "object" + }, "GetDebugConfigRequest": { "description": "Request to get updated debug configuration for component.", "id": "GetDebugConfigRequest", @@ -4730,6 +4775,10 @@ "description": "Optional. Set of metric labels for this metric.", "type": "object" }, + "valueGauge64": { + "$ref": "DataflowGaugeValue", + "description": "Non-cumulative int64 value of this metric." + }, "valueHistogram": { "$ref": "DataflowHistogramValue", "description": "Histogram value of this metric." @@ -5469,6 +5518,13 @@ }, "type": "array" }, + "gpuUsage": { + "description": "Optional. GPU usage samples.", + "items": { + "$ref": "GPUUsage" + }, + "type": "array" + }, "memoryInfo": { "description": "Memory utilization samples.", "items": { diff --git a/discovery/googleapis_beta/datalabeling__v1beta1.json b/discovery/googleapis_beta/datalabeling__v1beta1.json index 3aada2596..e018ab261 100644 --- a/discovery/googleapis_beta/datalabeling__v1beta1.json +++ b/discovery/googleapis_beta/datalabeling__v1beta1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20231001", + "revision": "20241117", "rootUrl": "https://datalabeling.googleapis.com/", "servicePath": "", "title": "Data Labeling API", @@ -1480,7 +1480,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1beta1/projects/{projectsId}/operations/{operationsId}:cancel", "httpMethod": "GET", "id": "datalabeling.projects.operations.cancel", diff --git a/discovery/googleapis_beta/datastream__v1alpha1.json b/discovery/googleapis_beta/datastream__v1alpha1.json index 0a5d03fc3..1d1fe7dfb 100644 --- a/discovery/googleapis_beta/datastream__v1alpha1.json +++ b/discovery/googleapis_beta/datastream__v1alpha1.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20230802", + "revision": "20241204", "rootUrl": "https://datastream.googleapis.com/", "servicePath": "", "title": "Datastream API", @@ -443,7 +443,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v1alpha1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "datastream.projects.locations.operations.cancel", @@ -2078,7 +2078,7 @@ "type": "string" }, "requestedCancellation": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have successfully been cancelled have google.longrunning.Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, diff --git a/discovery/googleapis_beta/firebase__v1beta1.json b/discovery/googleapis_beta/firebase__v1beta1.json index ad31e4ed1..ac54d3f0e 100644 --- a/discovery/googleapis_beta/firebase__v1beta1.json +++ b/discovery/googleapis_beta/firebase__v1beta1.json @@ -34,7 +34,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240913", + "revision": "20241107", "rootUrl": "https://firebase.googleapis.com/", "servicePath": "", "title": "Firebase Management API", @@ -121,7 +121,7 @@ "availableProjects": { "methods": { "list": { - "description": "Lists each [Google Cloud Platform (GCP) `Project`] (https://cloud.google.com/resource-manager/reference/rest/v1/projects) that can have Firebase resources added to it. A Project will only be listed if: - The caller has sufficient [Google IAM](https://cloud.google.com/iam) permissions to call AddFirebase. - The Project is not already a FirebaseProject. - The Project is not in an Organization which has policies that prevent Firebase resources from being added. ", + "description": "Lists each [Google Cloud `Project`](https://cloud.google.com/resource-manager/reference/rest/v1/projects) that can have Firebase resources added and Firebase services enabled. A Project will only be listed if: - The caller has sufficient [Google IAM](https://cloud.google.com/iam) permissions to call AddFirebase. - The Project is not already a FirebaseProject. - The Project is not in an Organization which has policies that prevent Firebase resources from being added. ", "flatPath": "v1beta1/availableProjects", "httpMethod": "GET", "id": "firebase.availableProjects.list", @@ -187,7 +187,7 @@ "projects": { "methods": { "addFirebase": { - "description": "Adds Firebase resources to the specified existing [Google Cloud Platform (GCP) `Project`] (https://cloud.google.com/resource-manager/reference/rest/v1/projects). Since a FirebaseProject is actually also a GCP `Project`, a `FirebaseProject` has the same underlying GCP identifiers (`projectNumber` and `projectId`). This allows for easy interop with Google APIs. The result of this call is an [`Operation`](../../v1beta1/operations). Poll the `Operation` to track the provisioning process by calling GetOperation until [`done`](../../v1beta1/operations#Operation.FIELDS.done) is `true`. When `done` is `true`, the `Operation` has either succeeded or failed. If the `Operation` succeeded, its [`response`](../../v1beta1/operations#Operation.FIELDS.response) is set to a FirebaseProject; if the `Operation` failed, its [`error`](../../v1beta1/operations#Operation.FIELDS.error) is set to a google.rpc.Status. The `Operation` is automatically deleted after completion, so there is no need to call DeleteOperation. This method does not modify any billing account information on the underlying GCP `Project`. To call `AddFirebase`, a project member or service account must have the following permissions (the IAM roles of Editor and Owner contain these permissions): `firebase.projects.update`, `resourcemanager.projects.get`, `serviceusage.services.enable`, and `serviceusage.services.get`.", + "description": "Adds Firebase resources and enables Firebase services in the specified existing [Google Cloud `Project`](https://cloud.google.com/resource-manager/reference/rest/v1/projects). Since a FirebaseProject is actually also a Google Cloud `Project`, a `FirebaseProject` has the same underlying Google Cloud identifiers (`projectNumber` and `projectId`). This allows for easy interop with Google APIs. The result of this call is an [`Operation`](../../v1beta1/operations). Poll the `Operation` to track the provisioning process by calling GetOperation until [`done`](../../v1beta1/operations#Operation.FIELDS.done) is `true`. When `done` is `true`, the `Operation` has either succeeded or failed. If the `Operation` succeeded, its [`response`](../../v1beta1/operations#Operation.FIELDS.response) is set to a FirebaseProject; if the `Operation` failed, its [`error`](../../v1beta1/operations#Operation.FIELDS.error) is set to a google.rpc.Status. The `Operation` is automatically deleted after completion, so there is no need to call DeleteOperation. This method does not modify any billing account information on the underlying Google Cloud `Project`. To call `AddFirebase`, a project member or service account must have the following permissions (the IAM roles of Editor and Owner contain these permissions): `firebase.projects.update`, `resourcemanager.projects.get`, `serviceusage.services.enable`, and `serviceusage.services.get`.", "flatPath": "v1beta1/projects/{projectsId}:addFirebase", "httpMethod": "POST", "id": "firebase.projects.addFirebase", @@ -196,7 +196,7 @@ ], "parameters": { "project": { - "description": "The resource name of the GCP `Project` to which Firebase resources will be added, in the format: projects/PROJECT_IDENTIFIER Refer to the `FirebaseProject` [`name`](../projects#FirebaseProject.FIELDS.name) field for details about PROJECT_IDENTIFIER values. After calling `AddFirebase`, the unique Project identifiers ( [`projectNumber`](https://cloud.google.com/resource-manager/reference/rest/v1/projects#Project.FIELDS.project_number) and [`projectId`](https://cloud.google.com/resource-manager/reference/rest/v1/projects#Project.FIELDS.project_id)) of the underlying GCP `Project` are also the identifiers of the FirebaseProject.", + "description": "The resource name of the Google Cloud `Project` in which Firebase resources will be added and Firebase services enabled, in the format: projects/ PROJECT_IDENTIFIER Refer to the `FirebaseProject` [`name`](../projects#FirebaseProject.FIELDS.name) field for details about PROJECT_IDENTIFIER values. After calling `AddFirebase`, the unique Project identifiers ( [`projectNumber`](https://cloud.google.com/resource-manager/reference/rest/v1/projects#Project.FIELDS.project_number) and [`projectId`](https://cloud.google.com/resource-manager/reference/rest/v1/projects#Project.FIELDS.project_id)) of the underlying Google Cloud `Project` are also the identifiers of the FirebaseProject.", "location": "path", "pattern": "^projects/[^/]+$", "required": true, @@ -798,7 +798,7 @@ "methods": { "list": { "deprecated": true, - "description": "**DEPRECATED.** _Instead, use the applicable resource-specific REST API (or associated documentation, as needed) to determine valid locations for each resource used in your Project._ Lists the valid Google Cloud Platform (GCP) resource locations for the specified Project (including a FirebaseProject). One of these locations can be selected as the Project's [_default_ GCP resource location](https://firebase.google.com/docs/projects/locations), which is the geographical location where the Project's resources, such as Cloud Firestore, will be provisioned by default. However, if the default GCP resource location has already been set for the Project, then this setting cannot be changed. This call checks for any possible [location restrictions](https://cloud.google.com/resource-manager/docs/organization-policy/defining-locations) for the specified Project and, thus, might return a subset of all possible GCP resource locations. To list all GCP resource locations (regardless of any restrictions), call the endpoint without specifying a unique project identifier (that is, `/v1beta1/{parent=projects/-}/listAvailableLocations`). To call `ListAvailableLocations` with a specified project, a member must be at minimum a Viewer of the Project. Calls without a specified project do not require any specific project permissions.", + "description": "**DECOMMISSIONED.** **If called, this endpoint will return a 404 error.** _Instead, use the applicable resource-specific REST API (or associated documentation, as needed) to determine valid locations for each resource used in your Project._ Lists the valid [\"locations for default Google Cloud resources\"](https://firebase.google.com/docs/projects/locations#default-cloud-location) for the specified Project (including a FirebaseProject). One of these locations can be selected as the Project's location for default Google Cloud resources, which is the geographical location where the Project's resources associated with Google App Engine (such as the default Cloud Firestore instance) will be provisioned by default. However, if the location for default Google Cloud resources has already been set for the Project, then this setting cannot be changed. This call checks for any possible [location restrictions](https://cloud.google.com/resource-manager/docs/organization-policy/defining-locations) for the specified Project and, thus, might return a subset of all possible locations. To list all locations (regardless of any restrictions), call the endpoint without specifying a unique project identifier (that is, `/v1beta1/{parent=projects/-}/listAvailableLocations`). To call `ListAvailableLocations` with a specified project, a member must be at minimum a Viewer of the Project. Calls without a specified project do not require any specific project permissions.", "flatPath": "v1beta1/projects/{projectsId}/availableLocations", "httpMethod": "GET", "id": "firebase.projects.availableLocations.list", @@ -818,7 +818,7 @@ "type": "string" }, "parent": { - "description": "The FirebaseProject for which to list GCP resource locations, in the format: projects/PROJECT_IDENTIFIER Refer to the `FirebaseProject` [`name`](../projects#FirebaseProject.FIELDS.name) field for details about PROJECT_IDENTIFIER values. If no unique project identifier is specified (that is, `projects/-`), the returned list does not take into account org-specific or project-specific location restrictions.", + "description": "The FirebaseProject for which to list [locations for default Google Cloud resources](https://firebase.google.com/docs/projects/locations#default-cloud-location), in the format: projects/PROJECT_IDENTIFIER Refer to the `FirebaseProject` [`name`](../projects#FirebaseProject.FIELDS.name) field for details about PROJECT_IDENTIFIER values. If no unique project identifier is specified (that is, `projects/-`), the returned list does not take into account org-specific or project-specific location restrictions.", "location": "path", "pattern": "^projects/[^/]+$", "required": true, @@ -843,7 +843,7 @@ "methods": { "finalize": { "deprecated": true, - "description": "**DEPRECATED.** _Instead, use the applicable resource-specific REST API to set the location for each resource used in your Project._ Sets the default Google Cloud Platform (GCP) resource location for the specified FirebaseProject. This method creates an App Engine application with a [default Cloud Storage bucket](https://cloud.google.com/appengine/docs/standard/python/googlecloudstorageclient/setting-up-cloud-storage#activating_a_cloud_storage_bucket), located in the specified [`locationId`](#body.request_body.FIELDS.location_id). This location must be one of the available [GCP resource locations](https://firebase.google.com/docs/projects/locations). After the default GCP resource location is finalized, or if it was already set, it cannot be changed. The default GCP resource location for the specified `FirebaseProject` might already be set because either the underlying GCP `Project` already has an App Engine application or `FinalizeDefaultLocation` was previously called with a specified `locationId`. Any new calls to `FinalizeDefaultLocation` with a *different* specified `locationId` will return a 409 error. The result of this call is an [`Operation`](../../v1beta1/operations), which can be used to track the provisioning process. The [`response`](../../v1beta1/operations#Operation.FIELDS.response) type of the `Operation` is google.protobuf.Empty. The `Operation` can be polled by its `name` using GetOperation until `done` is true. When `done` is true, the `Operation` has either succeeded or failed. If the `Operation` has succeeded, its [`response`](../../v1beta1/operations#Operation.FIELDS.response) will be set to a google.protobuf.Empty; if the `Operation` has failed, its `error` will be set to a google.rpc.Status. The `Operation` is automatically deleted after completion, so there is no need to call DeleteOperation. All fields listed in the [request body](#request-body) are required. To call `FinalizeDefaultLocation`, a member must be an Owner of the Project.", + "description": "**DECOMMISSIONED.** **If called, this endpoint will return a 404 error.** _Instead, use the applicable resource-specific REST API to set the location for each resource used in your Project._ Sets the [\"location for default Google Cloud resources\"](https://firebase.google.com/docs/projects/locations#default-cloud-location) for the specified FirebaseProject. This method creates a Google App Engine application with a [default Cloud Storage bucket](https://cloud.google.com/appengine/docs/standard/python/googlecloudstorageclient/setting-up-cloud-storage#activating_a_cloud_storage_bucket), located in the specified [`locationId`](#body.request_body.FIELDS.location_id). This location must be one of the available [App Engine locations](https://cloud.google.com/about/locations#region). After the location for default Google Cloud resources is finalized, or if it was already set, it cannot be changed. The location for default Google Cloud resources for the specified `FirebaseProject` might already be set because either the underlying Google Cloud `Project` already has an App Engine application or `FinalizeDefaultLocation` was previously called with a specified `locationId`. The result of this call is an [`Operation`](../../v1beta1/operations), which can be used to track the provisioning process. The [`response`](../../v1beta1/operations#Operation.FIELDS.response) type of the `Operation` is google.protobuf.Empty. The `Operation` can be polled by its `name` using GetOperation until `done` is true. When `done` is true, the `Operation` has either succeeded or failed. If the `Operation` has succeeded, its [`response`](../../v1beta1/operations#Operation.FIELDS.response) will be set to a google.protobuf.Empty; if the `Operation` has failed, its `error` will be set to a google.rpc.Status. The `Operation` is automatically deleted after completion, so there is no need to call DeleteOperation. All fields listed in the [request body](#request-body) are required. To call `FinalizeDefaultLocation`, a member must be an Owner of the Project.", "flatPath": "v1beta1/projects/{projectsId}/defaultLocation:finalize", "httpMethod": "POST", "id": "firebase.projects.defaultLocation.finalize", @@ -852,7 +852,7 @@ ], "parameters": { "parent": { - "description": "The resource name of the FirebaseProject for which the default GCP resource location will be set, in the format: projects/PROJECT_IDENTIFIER Refer to the `FirebaseProject` [`name`](../projects#FirebaseProject.FIELDS.name) field for details about PROJECT_IDENTIFIER values.", + "description": "The resource name of the FirebaseProject for which the [\"location for default Google Cloud resources\"](https://firebase.google.com/docs/projects/locations#default-cloud-location) will be set, in the format: projects/PROJECT_IDENTIFIER Refer to the `FirebaseProject` [`name`](../projects#FirebaseProject.FIELDS.name) field for details about PROJECT_IDENTIFIER values.", "location": "path", "pattern": "^projects/[^/]+$", "required": true, @@ -1334,7 +1334,7 @@ "id": "AddFirebaseRequest", "properties": { "locationId": { - "description": "Deprecated. Instead, to set a Project's default GCP resource location, call [`FinalizeDefaultLocation`](../projects.defaultLocation/finalize) after you add Firebase resources to the GCP `Project`. The ID of the Project's default GCP resource location. The location must be one of the available [GCP resource locations](https://firebase.google.com/docs/projects/locations).", + "description": "**DEPRECATED.** _Instead, use product-specific REST APIs to work with the location of each resource in a Project. This field may be ignored, especially for newly provisioned projects after October 30, 2024._ The ID of the Project's [\"location for default Google Cloud resources\"](https://firebase.google.com/docs/projects/locations#default-cloud-location), which are resources associated with Google App Engine. The location must be one of the available [Google App Engine locations](https://cloud.google.com/about/locations#region).", "type": "string" } }, @@ -1359,12 +1359,12 @@ "properties": { "databaseURL": { "deprecated": true, - "description": "**DEPRECATED.** _Instead, find the default Firebase Realtime Database instance name using the [list endpoint](https://firebase.google.com/docs/reference/rest/database/database-management/rest/v1beta/projects.locations.instances/list) within the Firebase Realtime Database REST API. Note that the default instance for the Project might not yet be provisioned, so the return might not contain a default instance._ The default Firebase Realtime Database URL.", + "description": "**DEPRECATED.** _Instead, find the URL of the default Realtime Database instance using the [list endpoint](https://firebase.google.com/docs/reference/rest/database/database-management/rest/v1beta/projects.locations.instances/list) within the Firebase Realtime Database REST API. If the default instance for the Project has not yet been provisioned, the return might not contain a default instance. Note that the config that's generated for the Firebase console or the Firebase CLI uses the Realtime Database endpoint to populate this value for that config._ The URL of the default Firebase Realtime Database instance.", "type": "string" }, "locationId": { "deprecated": true, - "description": "**DEPRECATED.** _Instead, use product-specific REST APIs to find the location of resources._ The ID of the Project's default GCP resource location. The location is one of the available [GCP resource locations](https://firebase.google.com/docs/projects/locations). This field is omitted if the default GCP resource location has not been finalized yet. To set a Project's default GCP resource location, call [`FinalizeDefaultLocation`](../projects.defaultLocation/finalize) after you add Firebase resources to the Project.", + "description": "**DEPRECATED.** _Instead, use product-specific REST APIs to find the location of each resource in a Project. This field may not be populated, especially for newly provisioned projects after October 30, 2024._ The ID of the Project's [\"location for default Google Cloud resources\"](https://firebase.google.com/docs/projects/locations#default-cloud-location), which are resources associated with Google App Engine. The location is one of the available [App Engine locations](https://cloud.google.com/about/locations#region). This field is omitted if the location for default Google Cloud resources has not been set.", "type": "string" }, "projectId": { @@ -1373,7 +1373,7 @@ }, "storageBucket": { "deprecated": true, - "description": "**DEPRECATED.** _Instead, find the default Cloud Storage for Firebase bucket using the [list endpoint](https://firebase.google.com/docs/reference/rest/storage/rest/v1beta/projects.buckets/list) within the Cloud Storage for Firebase REST API. Note that the default bucket for the Project might not yet be provisioned, so the return might not contain a default bucket._ The default Cloud Storage for Firebase storage bucket name.", + "description": "**DEPRECATED.** _Instead, find the name of the default Cloud Storage for Firebase bucket using the [list endpoint](https://firebase.google.com/docs/reference/rest/storage/rest/v1beta/projects.buckets/list) within the Cloud Storage for Firebase REST API. If the default bucket for the Project has not yet been provisioned, the return might not contain a default bucket. Note that the config that's generated for the Firebase console or the Firebase CLI uses the Cloud Storage for Firebase endpoint to populate this value for that config._ The name of the default Cloud Storage for Firebase bucket.", "type": "string" } }, @@ -1438,7 +1438,7 @@ "type": "string" }, "expireTime": { - "description": "Output only. Timestamp of when the App will be considered expired and cannot be undeleted. This value is only provided if the App is in the `DELETED` state.", + "description": "Output only. If the App has been removed from the Project, this is the timestamp of when the App is considered expired and will be permanently deleted. After this time, the App cannot be undeleted (that is, restored to the Project). This value is only provided if the App is in the `DELETED` state.", "format": "google-datetime", "readOnly": true, "type": "string" @@ -1511,25 +1511,25 @@ "properties": { "hostingSite": { "deprecated": true, - "description": "Output only. **DEPRECATED.** _Instead, find the default Firebase Hosting site name using the [ListSites](https://firebase.google.com/docs/reference/hosting/rest/v1beta1/projects.sites/list) within the Firebase Hosting REST API. Note that the default site for the Project might not yet be provisioned, so the return might not contain a default site._ The default Firebase Hosting site name, in the format: PROJECT_ID Though rare, your `projectId` might already be used as the name for an existing Hosting site in another project (learn more about creating non-default, [additional sites](https://firebase.google.com/docs/hosting/multisites)). In these cases, your `projectId` is appended with a hyphen then five alphanumeric characters to create your default Hosting site name. For example, if your `projectId` is `myproject123`, your default Hosting site name might be: `myproject123-a5c16`", + "description": "Output only. **DEPRECATED.** _Instead, find the name of the default Firebase Hosting site using [ListSites](https://firebase.google.com/docs/reference/hosting/rest/v1beta1/projects.sites/list) within the Firebase Hosting REST API. If the default Hosting site for the Project has not yet been provisioned, the return might not contain a default site._ The name of the default Firebase Hosting site, in the format: PROJECT_ID Though rare, your `projectId` might already be used as the name for an existing Hosting site in another project (learn more about creating non-default, [additional sites](https://firebase.google.com/docs/hosting/multisites)). In these cases, your `projectId` is appended with a hyphen then five alphanumeric characters to create your default Hosting site name. For example, if your `projectId` is `myproject123`, your default Hosting site name might be: `myproject123-a5c16`", "readOnly": true, "type": "string" }, "locationId": { "deprecated": true, - "description": "Output only. **DEPRECATED.** _Instead, use product-specific REST APIs to find the location of resources._ The ID of the Project's default GCP resource location. The location is one of the available [GCP resource locations](https://firebase.google.com/docs/projects/locations). This field is omitted if the default GCP resource location has not been finalized yet. To set a Project's default GCP resource location, call [`FinalizeDefaultLocation`](../projects.defaultLocation/finalize) after you add Firebase resources to the Project.", + "description": "Output only. **DEPRECATED.** _Instead, use product-specific REST APIs to find the location of each resource in a Project. This field may not be populated, especially for newly provisioned projects after October 30, 2024._ The ID of the Project's [\"location for default Google Cloud resources\"](https://firebase.google.com/docs/projects/locations#default-cloud-location), which are resources associated with Google App Engine. The location is one of the available [Google App Engine locations](https://cloud.google.com/about/locations#region). This field is omitted if the location for default Google Cloud resources has not been set.", "readOnly": true, "type": "string" }, "realtimeDatabaseInstance": { "deprecated": true, - "description": "Output only. **DEPRECATED.** _Instead, find the default Firebase Realtime Database instance name using the [list endpoint](https://firebase.google.com/docs/reference/rest/database/database-management/rest/v1beta/projects.locations.instances/list) within the Firebase Realtime Database REST API. Note that the default instance for the Project might not yet be provisioned, so the return might not contain a default instance._ The default Firebase Realtime Database instance name, in the format: PROJECT_ID Though rare, your `projectId` might already be used as the name for an existing Realtime Database instance in another project (learn more about [database sharding](https://firebase.google.com/docs/database/usage/sharding)). In these cases, your `projectId` is appended with a hyphen then five alphanumeric characters to create your default Realtime Database instance name. For example, if your `projectId` is `myproject123`, your default database instance name might be: `myproject123-a5c16`", + "description": "Output only. **DEPRECATED.** _Instead, find the name of the default Realtime Database instance using the [list endpoint](https://firebase.google.com/docs/reference/rest/database/database-management/rest/v1beta/projects.locations.instances/list) within the Firebase Realtime Database REST API. If the default Realtime Database instance for a Project has not yet been provisioned, the return might not contain a default instance._ The default Firebase Realtime Database instance name, in the format: PROJECT_ID Though rare, your `projectId` might already be used as the name for an existing Realtime Database instance in another project (learn more about [database sharding](https://firebase.google.com/docs/database/usage/sharding)). In these cases, your `projectId` is appended with a hyphen then five alphanumeric characters to create your default Realtime Database instance name. For example, if your `projectId` is `myproject123`, your default database instance name might be: `myproject123-a5c16`", "readOnly": true, "type": "string" }, "storageBucket": { "deprecated": true, - "description": "Output only. **DEPRECATED.** _Instead, find the default Cloud Storage for Firebase bucket using the [list endpoint](https://firebase.google.com/docs/reference/rest/storage/rest/v1beta/projects.buckets/list) within the Cloud Storage for Firebase REST API. Note that the default bucket for the Project might not yet be provisioned, so the return might not contain a default bucket._ The default Cloud Storage for Firebase storage bucket, in the format: PROJECT_ID.appspot.com", + "description": "Output only. **DEPRECATED.** _Instead, find the name of the default Cloud Storage for Firebase bucket using the [list endpoint](https://firebase.google.com/docs/reference/rest/storage/rest/v1beta/projects.buckets/list) within the Cloud Storage for Firebase REST API. If the default bucket for the Project has not yet been provisioned, the return might not contain a default bucket._ The name of the default Cloud Storage for Firebase bucket, in one of the following formats: * If provisioned _before_ October 30, 2024: PROJECT_ID.firebasestorage.app * If provisioned _on or after_ October 30, 2024: PROJECT_ID.firebasestorage.app", "readOnly": true, "type": "string" } @@ -1547,7 +1547,7 @@ "id": "FinalizeDefaultLocationRequest", "properties": { "locationId": { - "description": "The ID of the Project's default GCP resource location. The location must be one of the available [GCP resource locations](https://firebase.google.com/docs/projects/locations).", + "description": "**DEPRECATED** The ID of the Project's [\"location for default Google Cloud resources\"](https://firebase.google.com/docs/projects/locations#default-cloud-location), which are resources associated with Google App Engine. The location must be one of the available [Google App Engine locations](https://cloud.google.com/about/locations#region).", "type": "string" } }, @@ -1571,7 +1571,7 @@ "type": "string" }, "expireTime": { - "description": "Output only. Timestamp of when the App will be considered expired and cannot be undeleted. This value is only provided if the App is in the `DELETED` state.", + "description": "Output only. If the App has been removed from the Project, this is the timestamp of when the App is considered expired and will be permanently deleted. After this time, the App cannot be undeleted (that is, restored to the Project). This value is only provided if the App is in the `DELETED` state.", "format": "google-datetime", "readOnly": true, "type": "string" @@ -1620,7 +1620,7 @@ "type": "object" }, "FirebaseProject": { - "description": "A `FirebaseProject` is the top-level Firebase entity. It is the container for Firebase Apps, Firebase Hosting sites, storage systems (Firebase Realtime Database, Cloud Firestore, Cloud Storage buckets), and other Firebase and Google Cloud Platform (GCP) resources. You create a `FirebaseProject` by calling AddFirebase and specifying an *existing* [GCP `Project`](https://cloud.google.com/resource-manager/reference/rest/v1/projects). This adds Firebase resources to the existing GCP `Project`. Since a FirebaseProject is actually also a GCP `Project`, a `FirebaseProject` has the same underlying GCP identifiers (`projectNumber` and `projectId`). This allows for easy interop with Google APIs.", + "description": "A `FirebaseProject` is the top-level Firebase entity. It is the container for Firebase Apps, Firebase Hosting sites, storage systems (Firebase Realtime Database, Cloud Firestore, Cloud Storage buckets), and other Firebase and Google Cloud resources. You create a `FirebaseProject` by calling AddFirebase and specifying an *existing* [Google Cloud `Project`](https://cloud.google.com/resource-manager/reference/rest/v1/projects). This adds Firebase resources to the existing Google Cloud `Project`. Since a FirebaseProject is actually also a Google Cloud `Project`, a `FirebaseProject` has the same underlying Google Cloud identifiers (`projectNumber` and `projectId`). This allows for easy interop with Google APIs.", "id": "FirebaseProject", "properties": { "annotations": { @@ -1707,7 +1707,7 @@ "type": "string" }, "expireTime": { - "description": "Output only. Timestamp of when the App will be considered expired and cannot be undeleted. This value is only provided if the App is in the `DELETED` state.", + "description": "Output only. If the App has been removed from the Project, this is the timestamp of when the App is considered expired and will be permanently deleted. After this time, the App cannot be undeleted (that is, restored to the Project). This value is only provided if the App is in the `DELETED` state.", "format": "google-datetime", "readOnly": true, "type": "string" @@ -1802,7 +1802,7 @@ "type": "string" }, "projectInfo": { - "description": "The list of GCP `Projects` which can have Firebase resources added to them.", + "description": "The list of Google Cloud `Projects` which can have Firebase resources added to them.", "items": { "$ref": "ProjectInfo" }, @@ -1877,11 +1877,11 @@ }, "Location": { "deprecated": true, - "description": "**DEPRECATED.** _This Location is no longer used to determine Firebase resource locations. Instead, consult product documentation to determine valid locations for each resource used in your Project._ A GCP resource location that can be selected for a FirebaseProject.", + "description": "**DEPRECATED.** _This Location is no longer used to determine Firebase resource locations. Instead, consult product documentation to determine valid locations for each resource used in your Project._ A [\"location for default Google Cloud resources\"](https://firebase.google.com/docs/projects/locations#default-cloud-location) that can be selected for a FirebaseProject. These are resources associated with Google App Engine.", "id": "Location", "properties": { "features": { - "description": "Products and services that are available in the GCP resource location.", + "description": "Products and services that are available in the location for default Google Cloud resources.", "items": { "enum": [ "LOCATION_FEATURE_UNSPECIFIED", @@ -1891,8 +1891,8 @@ ], "enumDescriptions": [ "Used internally for distinguishing unset values and is not intended for external use.", - "This location supports Cloud Firestore database instances. App Engine is available in this location, so it can be a Project's [default GCP resource location](//firebase.google.com/docs/projects/locations#default-cloud-location).", - "This location supports default Cloud Storage buckets. App Engine is available in this location, so it can be a Project's [default GCP resource location](//firebase.google.com/docs/projects/locations#default-cloud-location).", + "This location supports Cloud Firestore database instances. Google App Engine is available in this location, so it can be a Project's location for default Google Cloud resources.", + "This location supports default Cloud Storage buckets. Google App Engine is available in this location, so it can be a Project's location for default Google Cloud resources.", "Cloud Functions for Firebase is available in this location." ], "type": "string" @@ -1900,11 +1900,11 @@ "type": "array" }, "locationId": { - "description": "The ID of the GCP resource location. It will be one of the available [GCP resource locations](https://firebase.google.com/docs/projects/locations#types).", + "description": "The ID of the Project's location for default Google Cloud resources. It will be one of the available [Google App Engine locations](https://cloud.google.com/about/locations#region).", "type": "string" }, "type": { - "description": "Indicates whether the GCP resource location is a [regional or multi-regional location](https://firebase.google.com/docs/projects/locations#types) for data replication.", + "description": "Indicates whether the location for default Google Cloud resources is a [regional or multi-regional location](https://firebase.google.com/docs/projects/locations#types) for data replication.", "enum": [ "LOCATION_TYPE_UNSPECIFIED", "REGIONAL", @@ -1982,19 +1982,19 @@ "type": "object" }, "ProjectInfo": { - "description": "A reference to a Google Cloud Platform (GCP) `Project`.", + "description": "A reference to a Google Cloud `Project`.", "id": "ProjectInfo", "properties": { "displayName": { - "description": "The user-assigned display name of the GCP `Project`, for example: `My App`", + "description": "The user-assigned display name of the Google Cloud `Project`, for example: `My App`.", "type": "string" }, "locationId": { - "description": "The ID of the Project's default GCP resource location. The location is one of the available [GCP resource locations](https://firebase.google.com/docs/projects/locations). Not all Projects will have this field populated. If it is not populated, it means that the Project does not yet have a default GCP resource location. To set a Project's default GCP resource location, call [`FinalizeDefaultLocation`](../projects.defaultLocation/finalize) after you add Firebase resources to the Project.", + "description": "**DEPRECATED** _Instead, use product-specific REST APIs to work with the location of each resource in a Project. This field may not be populated, especially for newly provisioned projects after October 30, 2024._ The ID of the Project's [\"location for default Google Cloud resources\"](https://firebase.google.com/docs/projects/locations#default-cloud-location). The location is one of the available [Google App Engine locations](https://cloud.google.com/about/locations#region). Not all Projects will have this field populated. If it is not populated, it means that the Project does not yet have a location for default Google Cloud resources.", "type": "string" }, "project": { - "description": "The resource name of the GCP `Project` to which Firebase resources can be added, in the format: projects/PROJECT_IDENTIFIER Refer to the `FirebaseProject` [`name`](../projects#FirebaseProject.FIELDS.name) field for details about PROJECT_IDENTIFIER values.", + "description": "The resource name of the Google Cloud `Project` to which Firebase resources can be added, in the format: projects/PROJECT_IDENTIFIER Refer to the `FirebaseProject` [`name`](../projects#FirebaseProject.FIELDS.name) field for details about PROJECT_IDENTIFIER values.", "type": "string" } }, @@ -2022,7 +2022,7 @@ "type": "string" }, "immediate": { - "description": "Determines whether to _immediately_ delete the AndroidApp. If set to true, the App is immediately deleted from the Project and cannot be restored to the Project. If not set, defaults to false, which means the App will be set to expire in 30 days. Within the 30 days, the App may be restored to the Project using UndeleteAndroidApp.", + "description": "Determines whether to _immediately_ delete the AndroidApp. If set to true, the App is immediately deleted from the Project and cannot be undeleted (that is, restored to the Project). If not set, defaults to false, which means the App will be set to expire in 30 days. Within the 30 days, the App may be restored to the Project using UndeleteAndroidApp.", "type": "boolean" }, "validateOnly": { @@ -2044,7 +2044,7 @@ "type": "string" }, "immediate": { - "description": "Determines whether to _immediately_ delete the IosApp. If set to true, the App is immediately deleted from the Project and cannot be restored to the Project. If not set, defaults to false, which means the App will be set to expire in 30 days. Within the 30 days, the App may be restored to the Project using UndeleteIosApp", + "description": "Determines whether to _immediately_ delete the IosApp. If set to true, the App is immediately deleted from the Project and cannot be undeleted (that is, restored to the Project). If not set, defaults to false, which means the App will be set to expire in 30 days. Within the 30 days, the App may be restored to the Project using UndeleteIosApp", "type": "boolean" }, "validateOnly": { @@ -2066,7 +2066,7 @@ "type": "string" }, "immediate": { - "description": "Determines whether to _immediately_ delete the WebApp. If set to true, the App is immediately deleted from the Project and cannot be restored to the Project. If not set, defaults to false, which means the App will be set to expire in 30 days. Within the 30 days, the App may be restored to the Project using UndeleteWebApp", + "description": "Determines whether to _immediately_ delete the WebApp. If set to true, the App is immediately deleted from the Project and cannot be undeleted (that is, restored to the Project). If not set, defaults to false, which means the App will be set to expire in 30 days. Within the 30 days, the App may be restored to the Project using UndeleteWebApp", "type": "boolean" }, "validateOnly": { @@ -2269,7 +2269,7 @@ "type": "string" }, "expireTime": { - "description": "Output only. Timestamp of when the App will be considered expired and cannot be undeleted. This value is only provided if the App is in the `DELETED` state.", + "description": "Output only. If the App has been removed from the Project, this is the timestamp of when the App is considered expired and will be permanently deleted. After this time, the App cannot be undeleted (that is, restored to the Project). This value is only provided if the App is in the `DELETED` state.", "format": "google-datetime", "readOnly": true, "type": "string" @@ -2325,12 +2325,12 @@ }, "databaseURL": { "deprecated": true, - "description": "**DEPRECATED.** _Instead, find the default Firebase Realtime Database instance name using the [list endpoint](https://firebase.google.com/docs/reference/rest/database/database-management/rest/v1beta/projects.locations.instances/list) within the Firebase Realtime Database REST API. Note that the default instance for the Project might not yet be provisioned, so the return might not contain a default instance._ The default Firebase Realtime Database URL.", + "description": "**DEPRECATED.** _Instead, find the URL of the default Realtime Database instance using the [list endpoint](https://firebase.google.com/docs/reference/rest/database/database-management/rest/v1beta/projects.locations.instances/list) within the Firebase Realtime Database REST API. If the default instance for the Project has not yet been provisioned, the return might not contain a default instance. Note that the config that's generated for the Firebase console or the Firebase CLI uses the Realtime Database endpoint to populate this value for that config._ The URL of the default Firebase Realtime Database instance.", "type": "string" }, "locationId": { "deprecated": true, - "description": "**DEPRECATED.** _Instead, use product-specific REST APIs to find the location of resources._ The ID of the Project's default GCP resource location. The location is one of the available [GCP resource locations](https://firebase.google.com/docs/projects/locations). This field is omitted if the default GCP resource location has not been finalized yet. To set a Project's default GCP resource location, call [`FinalizeDefaultLocation`](../projects.defaultLocation/finalize) after you add Firebase resources to the Project.", + "description": "**DEPRECATED.** _Instead, use product-specific REST APIs to find the location of each resource in a Project. This field may not be populated, especially for newly provisioned projects after October 30, 2024._ The ID of the Project's [\"location for default Google Cloud resources\"](https://firebase.google.com/docs/projects/locations#default-cloud-location), which are resources associated with Google App Engine. The location is one of the available [App Engine locations](https://cloud.google.com/about/locations#region). This field is omitted if the location for default Google Cloud resources has not been set.", "type": "string" }, "measurementId": { @@ -2351,12 +2351,12 @@ "type": "string" }, "realtimeDatabaseUrl": { - "description": "Optional. Duplicate field for the URL of the default RTDB instances (if there is one) that uses the same field name as the unified V2 config file format. We wanted to make a single config file format for all the app platforms (Android, iOS and web) and we had to pick consistent names for all the fields since there was some varience between the platforms. If the request asks for the V2 format we will populate this field instead of realtime_database_instance_uri.", + "description": "Optional. Duplicate field for the URL of the default Realtime Database instances (if the default instance has been provisioned). If the request asks for the V2 config format, this field will be populated instead of `realtime_database_instance_uri`.", "type": "string" }, "storageBucket": { "deprecated": true, - "description": "**DEPRECATED.** _Instead, find the default Cloud Storage for Firebase bucket using the [list endpoint](https://firebase.google.com/docs/reference/rest/storage/rest/v1beta/projects.buckets/list) within the Cloud Storage for Firebase REST API. Note that the default bucket for the Project might not yet be provisioned, so the return might not contain a default bucket._ The default Cloud Storage for Firebase storage bucket name.", + "description": "**DEPRECATED.** _Instead, find the name of the default Cloud Storage for Firebase bucket using the [list endpoint](https://firebase.google.com/docs/reference/rest/storage/rest/v1beta/projects.buckets/list) within the Cloud Storage for Firebase REST API. If the default bucket for the Project has not yet been provisioned, the return might not contain a default bucket. Note that the config that's generated for the Firebase console or the Firebase CLI uses the Cloud Storage for Firebase endpoint to populate this value for that config._ The name of the default Cloud Storage for Firebase bucket.", "type": "string" }, "version": { diff --git a/discovery/googleapis_beta/firebasedatabase__v1beta.json b/discovery/googleapis_beta/firebasedatabase__v1beta.json index fc92a2474..851005b04 100644 --- a/discovery/googleapis_beta/firebasedatabase__v1beta.json +++ b/discovery/googleapis_beta/firebasedatabase__v1beta.json @@ -34,10 +34,10 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20221214", + "revision": "20241202", "rootUrl": "https://firebasedatabase.googleapis.com/", "servicePath": "", - "title": "Firebase Realtime Database API", + "title": "Firebase Realtime Database Management API", "version": "v1beta", "version_module": true, "parameters": { diff --git a/discovery/googleapis_beta/firebasestorage__v1beta.json b/discovery/googleapis_beta/firebasestorage__v1beta.json index 087e6fa91..4a43b25f8 100644 --- a/discovery/googleapis_beta/firebasestorage__v1beta.json +++ b/discovery/googleapis_beta/firebasestorage__v1beta.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20230822", + "revision": "20241111", "rootUrl": "https://firebasestorage.googleapis.com/", "servicePath": "", "title": "Cloud Storage for Firebase API", @@ -113,6 +113,60 @@ }, "resources": { "projects": { + "methods": { + "deleteDefaultBucket": { + "description": "Unlinks and deletes the default bucket.", + "flatPath": "v1beta/projects/{projectsId}/defaultBucket", + "httpMethod": "DELETE", + "id": "firebasestorage.projects.deleteDefaultBucket", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the default bucket to delete, `projects/{project_id_or_number}/defaultBucket`.", + "location": "path", + "pattern": "^projects/[^/]+/defaultBucket$", + "required": true, + "type": "string" + } + }, + "path": "v1beta/{+name}", + "response": { + "$ref": "Empty" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/firebase" + ] + }, + "getDefaultBucket": { + "description": "Gets the default bucket.", + "flatPath": "v1beta/projects/{projectsId}/defaultBucket", + "httpMethod": "GET", + "id": "firebasestorage.projects.getDefaultBucket", + "parameterOrder": [ + "name" + ], + "parameters": { + "name": { + "description": "Required. The name of the default bucket to retrieve, `projects/{project_id_or_number}/defaultBucket`.", + "location": "path", + "pattern": "^projects/[^/]+/defaultBucket$", + "required": true, + "type": "string" + } + }, + "path": "v1beta/{+name}", + "response": { + "$ref": "DefaultBucket" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/firebase" + ] + } + }, "resources": { "buckets": { "methods": { @@ -238,6 +292,39 @@ ] } } + }, + "defaultBucket": { + "methods": { + "create": { + "description": "Creates a Spark tier-eligible Cloud Storage bucket and links it to your Firebase project. If the default bucket already exists, this method will re-link it to your Firebase project. See https://firebase.google.com/pricing for pricing details.", + "flatPath": "v1beta/projects/{projectsId}/defaultBucket", + "httpMethod": "POST", + "id": "firebasestorage.projects.defaultBucket.create", + "parameterOrder": [ + "parent" + ], + "parameters": { + "parent": { + "description": "Required. The parent resource where the default bucket will be created, `projects/{project_id_or_number}`.", + "location": "path", + "pattern": "^projects/[^/]+$", + "required": true, + "type": "string" + } + }, + "path": "v1beta/{+parent}/defaultBucket", + "request": { + "$ref": "DefaultBucket" + }, + "response": { + "$ref": "DefaultBucket" + }, + "scopes": [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/firebase" + ] + } + } } } } @@ -261,6 +348,30 @@ }, "type": "object" }, + "DefaultBucket": { + "description": "Spark tier-eligible Cloud Storage bucket. One per project. This resource exists if the underlying Cloud Storage bucket exists and it is linked to your Firebase project. See https://firebase.google.com/pricing for pricing details.", + "id": "DefaultBucket", + "properties": { + "bucket": { + "$ref": "Bucket", + "description": "Output only. Underlying bucket resource.", + "readOnly": true + }, + "location": { + "description": "Immutable. Location of the default bucket.", + "type": "string" + }, + "name": { + "description": "Resource name of the default bucket.", + "type": "string" + }, + "storageClass": { + "description": "Immutable. Storage class of the default bucket. Supported values are available at https://cloud.google.com/storage/docs/storage-classes#classes.", + "type": "string" + } + }, + "type": "object" + }, "Empty": { "description": "A generic empty message that you can re-use to avoid defining duplicated empty messages in your APIs. A typical example is to use it as the request or the response type of an API method. For instance: service Foo { rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); }", "id": "Empty", diff --git a/discovery/googleapis_beta/gkehub__v2alpha.json b/discovery/googleapis_beta/gkehub__v2alpha.json index bc2a9fbf3..d17b0b6bc 100644 --- a/discovery/googleapis_beta/gkehub__v2alpha.json +++ b/discovery/googleapis_beta/gkehub__v2alpha.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241010", + "revision": "20241206", "rootUrl": "https://gkehub.googleapis.com/", "servicePath": "", "title": "GKE Hub API", @@ -375,7 +375,7 @@ "operations": { "methods": { "cancel": { - "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "flatPath": "v2alpha/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel", "httpMethod": "POST", "id": "gkehub.projects.locations.operations.cancel", @@ -716,7 +716,8 @@ "description": "Git repo configuration for the cluster." }, "metricsGcpServiceAccountEmail": { - "description": "The Email of the Google Cloud Service Account (GSA) used for exporting Config Sync metrics to Cloud Monitoring and Cloud Monarch when Workload Identity is enabled. The GSA should have the Monitoring Metric Writer (roles/monitoring.metricWriter) IAM role. The Kubernetes ServiceAccount `default` in the namespace `config-management-monitoring` should be bound to the GSA.", + "deprecated": true, + "description": "The Email of the Google Cloud Service Account (GSA) used for exporting Config Sync metrics to Cloud Monitoring and Cloud Monarch when Workload Identity is enabled. The GSA should have the Monitoring Metric Writer (roles/monitoring.metricWriter) IAM role. The Kubernetes ServiceAccount `default` in the namespace `config-management-monitoring` should be bound to the GSA. Deprecated: If Workload Identity Federation for GKE is enabled, Google Cloud Service Account is no longer needed for exporting Config Sync metrics: https://cloud.google.com/kubernetes-engine/enterprise/config-sync/docs/how-to/monitor-config-sync-cloud-monitoring#custom-monitoring.", "type": "string" }, "oci": { @@ -730,6 +731,10 @@ "sourceFormat": { "description": "Specifies whether the Config Sync Repo is in \"hierarchical\" or \"unstructured\" mode.", "type": "string" + }, + "stopSyncing": { + "description": "Set to true to stop syncing configs for a single cluster. Default to false.", + "type": "boolean" } }, "type": "object" @@ -934,6 +939,12 @@ ], "type": "string" }, + "crCount": { + "description": "Output only. The number of RootSync and RepoSync CRs in the cluster.", + "format": "int32", + "readOnly": true, + "type": "integer" + }, "deploymentState": { "$ref": "ConfigManagementConfigSyncDeploymentState", "description": "Information about the deployment of ConfigSync, including the version. of the various Pods deployed" @@ -1661,28 +1672,6 @@ "properties": {}, "type": "object" }, - "FeatureConfigRef": { - "description": "Information of the FeatureConfig applied on the MembershipFeature.", - "id": "FeatureConfigRef", - "properties": { - "config": { - "description": "Input only. Resource name of FeatureConfig, in the format: `projects/{project}/locations/global/featureConfigs/{feature_config}`.", - "type": "string" - }, - "configUpdateTime": { - "description": "Output only. When the FeatureConfig was last applied and copied to FeatureSpec.", - "format": "google-datetime", - "readOnly": true, - "type": "string" - }, - "uuid": { - "description": "Output only. An id that uniquely identify a FeatureConfig object.", - "readOnly": true, - "type": "string" - } - }, - "type": "object" - }, "FeatureSpec": { "description": "FeatureSpec contains user input per-feature spec information.", "id": "FeatureSpec", @@ -2316,10 +2305,6 @@ "readOnly": true, "type": "string" }, - "featureConfigRef": { - "$ref": "FeatureConfigRef", - "description": "Reference information for a FeatureConfig applied on the MembershipFeature." - }, "labels": { "additionalProperties": { "type": "string" @@ -2339,7 +2324,7 @@ }, "spec": { "$ref": "FeatureSpec", - "description": "Spec of this membershipFeature." + "description": "Optional. Spec of this membershipFeature." }, "state": { "$ref": "FeatureState", diff --git a/discovery/googleapis_beta/sqladmin__v1beta4.json b/discovery/googleapis_beta/sqladmin__v1beta4.json index 76cfba20c..86a0d44d4 100644 --- a/discovery/googleapis_beta/sqladmin__v1beta4.json +++ b/discovery/googleapis_beta/sqladmin__v1beta4.json @@ -28,7 +28,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20241011", + "revision": "20241204", "rootUrl": "https://sqladmin.googleapis.com/", "servicePath": "", "title": "Cloud SQL Admin API", @@ -822,18 +822,6 @@ "instance" ], "parameters": { - "finalBackupExpiryTime": { - "description": "Optional. Final Backup expiration time. Timestamp in UTC of when this resource is considered expired.", - "format": "google-datetime", - "location": "query", - "type": "string" - }, - "finalBackupTtlDays": { - "description": "Optional. Retention period of the final backup.", - "format": "int64", - "location": "query", - "type": "string" - }, "instance": { "description": "Cloud SQL instance ID. This does not include the project ID.", "location": "path", @@ -1208,7 +1196,7 @@ ], "parameters": { "failover": { - "description": "Set to true to invoke a replica failover to the designated DR replica. As part of replica failover, the promote operation attempts to add the original primary instance as a replica of the promoted DR replica when the original primary instance comes back online. If set to false or not specified, then the original primary instance becomes an independent Cloud SQL primary instance. Only applicable to MySQL.", + "description": "Set to true to invoke a replica failover to the DR replica. As part of replica failover, the promote operation attempts to add the original primary instance as a replica of the promoted DR replica when the original primary instance comes back online. If set to false or not specified, then the original primary instance becomes an independent Cloud SQL primary instance.", "location": "query", "type": "boolean" }, @@ -1500,7 +1488,7 @@ ] }, "switchover": { - "description": "Switches over from the primary instance to the designated DR replica instance.", + "description": "Switches over from the primary instance to the DR replica instance.", "flatPath": "sql/v1beta4/projects/{project}/instances/{instance}/switchover", "httpMethod": "POST", "id": "sql.instances.switchover", @@ -1510,7 +1498,7 @@ ], "parameters": { "dbTimeout": { - "description": "Optional. (MySQL only) Cloud SQL instance operations timeout, which is a sum of all database operations. Default value is 10 minutes and can be modified to a maximum value of 24 hours.", + "description": "Optional. (MySQL and PostgreSQL only) Cloud SQL instance operations timeout, which is a sum of all database operations. Default value is 10 minutes and can be modified to a maximum value of 24 hours.", "format": "google-duration", "location": "query", "type": "string" @@ -2869,6 +2857,13 @@ ], "type": "string" }, + "customSubjectAlternativeNames": { + "description": "Custom subject alternative names for the server certificate.", + "items": { + "type": "string" + }, + "type": "array" + }, "databaseVersion": { "description": "The database engine type and version. The `databaseVersion` field cannot be changed after instance creation. MySQL instances: `MYSQL_8_0`, `MYSQL_5_7` (default), or `MYSQL_5_6`. PostgreSQL instances: `POSTGRES_9_6`, `POSTGRES_10`, `POSTGRES_11` or `POSTGRES_12` (default), `POSTGRES_13`, or `POSTGRES_14`. SQL Server instances: `SQLSERVER_2017_STANDARD` (default), `SQLSERVER_2017_ENTERPRISE`, `SQLSERVER_2017_EXPRESS`, `SQLSERVER_2017_WEB`, `SQLSERVER_2019_STANDARD`, `SQLSERVER_2019_ENTERPRISE`, `SQLSERVER_2019_EXPRESS`, or `SQLSERVER_2019_WEB`.", "enum": [ @@ -2877,19 +2872,6 @@ "MYSQL_5_5", "MYSQL_5_6", "MYSQL_5_7", - "SQLSERVER_2017_STANDARD", - "SQLSERVER_2017_ENTERPRISE", - "SQLSERVER_2017_EXPRESS", - "SQLSERVER_2017_WEB", - "POSTGRES_9_6", - "POSTGRES_10", - "POSTGRES_11", - "POSTGRES_12", - "POSTGRES_13", - "POSTGRES_14", - "POSTGRES_15", - "POSTGRES_16", - "POSTGRES_17", "MYSQL_8_0", "MYSQL_8_0_18", "MYSQL_8_0_26", @@ -2907,7 +2889,22 @@ "MYSQL_8_0_38", "MYSQL_8_0_39", "MYSQL_8_0_40", + "MYSQL_8_0_41", + "MYSQL_8_0_42", "MYSQL_8_4", + "SQLSERVER_2017_STANDARD", + "SQLSERVER_2017_ENTERPRISE", + "SQLSERVER_2017_EXPRESS", + "SQLSERVER_2017_WEB", + "POSTGRES_9_6", + "POSTGRES_10", + "POSTGRES_11", + "POSTGRES_12", + "POSTGRES_13", + "POSTGRES_14", + "POSTGRES_15", + "POSTGRES_16", + "POSTGRES_17", "SQLSERVER_2019_STANDARD", "SQLSERVER_2019_ENTERPRISE", "SQLSERVER_2019_EXPRESS", @@ -2928,6 +2925,9 @@ false, false, false, + true, + false, + false, false, false, false, @@ -2941,7 +2941,6 @@ false, false, false, - true, false, false, false, @@ -2969,19 +2968,6 @@ "The database version is MySQL 5.5.", "The database version is MySQL 5.6.", "The database version is MySQL 5.7.", - "The database version is SQL Server 2017 Standard.", - "The database version is SQL Server 2017 Enterprise.", - "The database version is SQL Server 2017 Express.", - "The database version is SQL Server 2017 Web.", - "The database version is PostgreSQL 9.6.", - "The database version is PostgreSQL 10.", - "The database version is PostgreSQL 11.", - "The database version is PostgreSQL 12.", - "The database version is PostgreSQL 13.", - "The database version is PostgreSQL 14.", - "The database version is PostgreSQL 15.", - "The database version is PostgreSQL 16.", - "The database version is PostgreSQL 17.", "The database version is MySQL 8.", "The database major version is MySQL 8.0 and the minor version is 18.", "The database major version is MySQL 8.0 and the minor version is 26.", @@ -2999,7 +2985,22 @@ "The database major version is MySQL 8.0 and the minor version is 38.", "The database major version is MySQL 8.0 and the minor version is 39.", "The database major version is MySQL 8.0 and the minor version is 40.", + "The database major version is MySQL 8.0 and the minor version is 41.", + "The database major version is MySQL 8.0 and the minor version is 42.", "The database version is MySQL 8.4.", + "The database version is SQL Server 2017 Standard.", + "The database version is SQL Server 2017 Enterprise.", + "The database version is SQL Server 2017 Express.", + "The database version is SQL Server 2017 Web.", + "The database version is PostgreSQL 9.6.", + "The database version is PostgreSQL 10.", + "The database version is PostgreSQL 11.", + "The database version is PostgreSQL 12.", + "The database version is PostgreSQL 13.", + "The database version is PostgreSQL 14.", + "The database version is PostgreSQL 15.", + "The database version is PostgreSQL 16.", + "The database version is PostgreSQL 17.", "The database version is SQL Server 2019 Standard.", "The database version is SQL Server 2019 Enterprise.", "The database version is SQL Server 2019 Express.", @@ -3186,19 +3187,6 @@ "MYSQL_5_5", "MYSQL_5_6", "MYSQL_5_7", - "SQLSERVER_2017_STANDARD", - "SQLSERVER_2017_ENTERPRISE", - "SQLSERVER_2017_EXPRESS", - "SQLSERVER_2017_WEB", - "POSTGRES_9_6", - "POSTGRES_10", - "POSTGRES_11", - "POSTGRES_12", - "POSTGRES_13", - "POSTGRES_14", - "POSTGRES_15", - "POSTGRES_16", - "POSTGRES_17", "MYSQL_8_0", "MYSQL_8_0_18", "MYSQL_8_0_26", @@ -3216,7 +3204,22 @@ "MYSQL_8_0_38", "MYSQL_8_0_39", "MYSQL_8_0_40", + "MYSQL_8_0_41", + "MYSQL_8_0_42", "MYSQL_8_4", + "SQLSERVER_2017_STANDARD", + "SQLSERVER_2017_ENTERPRISE", + "SQLSERVER_2017_EXPRESS", + "SQLSERVER_2017_WEB", + "POSTGRES_9_6", + "POSTGRES_10", + "POSTGRES_11", + "POSTGRES_12", + "POSTGRES_13", + "POSTGRES_14", + "POSTGRES_15", + "POSTGRES_16", + "POSTGRES_17", "SQLSERVER_2019_STANDARD", "SQLSERVER_2019_ENTERPRISE", "SQLSERVER_2019_EXPRESS", @@ -3237,6 +3240,9 @@ false, false, false, + true, + false, + false, false, false, false, @@ -3250,7 +3256,6 @@ false, false, false, - true, false, false, false, @@ -3278,19 +3283,6 @@ "The database version is MySQL 5.5.", "The database version is MySQL 5.6.", "The database version is MySQL 5.7.", - "The database version is SQL Server 2017 Standard.", - "The database version is SQL Server 2017 Enterprise.", - "The database version is SQL Server 2017 Express.", - "The database version is SQL Server 2017 Web.", - "The database version is PostgreSQL 9.6.", - "The database version is PostgreSQL 10.", - "The database version is PostgreSQL 11.", - "The database version is PostgreSQL 12.", - "The database version is PostgreSQL 13.", - "The database version is PostgreSQL 14.", - "The database version is PostgreSQL 15.", - "The database version is PostgreSQL 16.", - "The database version is PostgreSQL 17.", "The database version is MySQL 8.", "The database major version is MySQL 8.0 and the minor version is 18.", "The database major version is MySQL 8.0 and the minor version is 26.", @@ -3308,7 +3300,22 @@ "The database major version is MySQL 8.0 and the minor version is 38.", "The database major version is MySQL 8.0 and the minor version is 39.", "The database major version is MySQL 8.0 and the minor version is 40.", + "The database major version is MySQL 8.0 and the minor version is 41.", + "The database major version is MySQL 8.0 and the minor version is 42.", "The database version is MySQL 8.4.", + "The database version is SQL Server 2017 Standard.", + "The database version is SQL Server 2017 Enterprise.", + "The database version is SQL Server 2017 Express.", + "The database version is SQL Server 2017 Web.", + "The database version is PostgreSQL 9.6.", + "The database version is PostgreSQL 10.", + "The database version is PostgreSQL 11.", + "The database version is PostgreSQL 12.", + "The database version is PostgreSQL 13.", + "The database version is PostgreSQL 14.", + "The database version is PostgreSQL 15.", + "The database version is PostgreSQL 16.", + "The database version is PostgreSQL 17.", "The database version is SQL Server 2019 Standard.", "The database version is SQL Server 2019 Enterprise.", "The database version is SQL Server 2019 Express.", @@ -3449,7 +3456,7 @@ }, "replicationCluster": { "$ref": "ReplicationCluster", - "description": "A primary instance and disaster recovery (DR) replica pair. A DR replica is a cross-region replica that you designate for failover in the event that the primary instance experiences regional failure. Only applicable to MySQL." + "description": "A primary instance and disaster recovery (DR) replica pair. A DR replica is a cross-region replica that you designate for failover in the event that the primary instance experiences regional failure. Applicable to MySQL and PostgreSQL." }, "rootPassword": { "description": "Initial root password. Use only on creation. You must set root passwords before you can connect to PostgreSQL instances.", @@ -3561,6 +3568,13 @@ "description": "Input only. Whether Cloud SQL is enabled to switch storing point-in-time recovery log files from a data disk to Cloud Storage.", "type": "boolean" }, + "tags": { + "additionalProperties": { + "type": "string" + }, + "description": "Optional. Input only. Immutable. Tag keys and tag values that are bound to this instance. You must represent each item in the map as: `\"\" : \"\"`. For example, a single resource can have the following tags: ``` \"123/environment\": \"production\", \"123/costCenter\": \"marketing\", ``` For more information on tag creation and management, see https://cloud.google.com/resource-manager/docs/tags/tags-overview.", + "type": "object" + }, "upgradableDatabaseVersions": { "description": "Output only. All database versions that are available for upgrade.", "items": { @@ -3907,6 +3921,17 @@ }, "type": "object" }, + "ExternalSyncSelectedObject": { + "description": "The selected object that Cloud SQL migrates.", + "id": "ExternalSyncSelectedObject", + "properties": { + "database": { + "description": "The name of the database that Cloud SQL migrates.", + "type": "string" + } + }, + "type": "object" + }, "FailoverContext": { "description": "Database instance failover context.", "id": "FailoverContext", @@ -3951,19 +3976,6 @@ "MYSQL_5_5", "MYSQL_5_6", "MYSQL_5_7", - "SQLSERVER_2017_STANDARD", - "SQLSERVER_2017_ENTERPRISE", - "SQLSERVER_2017_EXPRESS", - "SQLSERVER_2017_WEB", - "POSTGRES_9_6", - "POSTGRES_10", - "POSTGRES_11", - "POSTGRES_12", - "POSTGRES_13", - "POSTGRES_14", - "POSTGRES_15", - "POSTGRES_16", - "POSTGRES_17", "MYSQL_8_0", "MYSQL_8_0_18", "MYSQL_8_0_26", @@ -3981,7 +3993,22 @@ "MYSQL_8_0_38", "MYSQL_8_0_39", "MYSQL_8_0_40", + "MYSQL_8_0_41", + "MYSQL_8_0_42", "MYSQL_8_4", + "SQLSERVER_2017_STANDARD", + "SQLSERVER_2017_ENTERPRISE", + "SQLSERVER_2017_EXPRESS", + "SQLSERVER_2017_WEB", + "POSTGRES_9_6", + "POSTGRES_10", + "POSTGRES_11", + "POSTGRES_12", + "POSTGRES_13", + "POSTGRES_14", + "POSTGRES_15", + "POSTGRES_16", + "POSTGRES_17", "SQLSERVER_2019_STANDARD", "SQLSERVER_2019_ENTERPRISE", "SQLSERVER_2019_EXPRESS", @@ -4002,6 +4029,9 @@ false, false, false, + true, + false, + false, false, false, false, @@ -4015,7 +4045,6 @@ false, false, false, - true, false, false, false, @@ -4043,19 +4072,6 @@ "The database version is MySQL 5.5.", "The database version is MySQL 5.6.", "The database version is MySQL 5.7.", - "The database version is SQL Server 2017 Standard.", - "The database version is SQL Server 2017 Enterprise.", - "The database version is SQL Server 2017 Express.", - "The database version is SQL Server 2017 Web.", - "The database version is PostgreSQL 9.6.", - "The database version is PostgreSQL 10.", - "The database version is PostgreSQL 11.", - "The database version is PostgreSQL 12.", - "The database version is PostgreSQL 13.", - "The database version is PostgreSQL 14.", - "The database version is PostgreSQL 15.", - "The database version is PostgreSQL 16.", - "The database version is PostgreSQL 17.", "The database version is MySQL 8.", "The database major version is MySQL 8.0 and the minor version is 18.", "The database major version is MySQL 8.0 and the minor version is 26.", @@ -4073,7 +4089,22 @@ "The database major version is MySQL 8.0 and the minor version is 38.", "The database major version is MySQL 8.0 and the minor version is 39.", "The database major version is MySQL 8.0 and the minor version is 40.", + "The database major version is MySQL 8.0 and the minor version is 41.", + "The database major version is MySQL 8.0 and the minor version is 42.", "The database version is MySQL 8.4.", + "The database version is SQL Server 2017 Standard.", + "The database version is SQL Server 2017 Enterprise.", + "The database version is SQL Server 2017 Express.", + "The database version is SQL Server 2017 Web.", + "The database version is PostgreSQL 9.6.", + "The database version is PostgreSQL 10.", + "The database version is PostgreSQL 11.", + "The database version is PostgreSQL 12.", + "The database version is PostgreSQL 13.", + "The database version is PostgreSQL 14.", + "The database version is PostgreSQL 15.", + "The database version is PostgreSQL 16.", + "The database version is PostgreSQL 17.", "The database version is SQL Server 2019 Standard.", "The database version is SQL Server 2019 Enterprise.", "The database version is SQL Server 2019 Express.", @@ -4666,6 +4697,13 @@ }, "type": "array" }, + "customSubjectAlternativeNames": { + "description": "Optional. Custom Subject Alternative Name(SAN)s for a Cloud SQL instance.", + "items": { + "type": "string" + }, + "type": "array" + }, "enablePrivatePathForGoogleCloudServices": { "description": "Controls connectivity to private IP instances from Google services, such as BigQuery.", "type": "boolean" @@ -4691,15 +4729,21 @@ "enum": [ "CA_MODE_UNSPECIFIED", "GOOGLE_MANAGED_INTERNAL_CA", - "GOOGLE_MANAGED_CAS_CA" + "GOOGLE_MANAGED_CAS_CA", + "CUSTOMER_MANAGED_CAS_CA" ], "enumDescriptions": [ "CA mode is unspecified. It is effectively the same as `GOOGLE_MANAGED_INTERNAL_CA`.", "Google-managed self-signed internal CA.", - "Google-managed regional CA part of root CA hierarchy hosted on Google Cloud's Certificate Authority Service (CAS)." + "Google-managed regional CA part of root CA hierarchy hosted on Google Cloud's Certificate Authority Service (CAS).", + "Customer-managed CA hosted on Google Cloud's Certificate Authority Service (CAS)." ], "type": "string" }, + "serverCaPool": { + "description": "Optional. The resource name of the server CA pool for an instance with `CUSTOMER_MANAGED_CAS_CA` as the `server_ca_mode`. Format: projects//locations//caPools/", + "type": "string" + }, "sslMode": { "description": "Specify how SSL/TLS is enforced in database connections. If you must use the `require_ssl` flag for backward compatibility, then only the following value pairs are valid: For PostgreSQL and MySQL: * `ssl_mode=ALLOW_UNENCRYPTED_AND_ENCRYPTED` and `require_ssl=false` * `ssl_mode=ENCRYPTED_ONLY` and `require_ssl=false` * `ssl_mode=TRUSTED_CLIENT_CERTIFICATE_REQUIRED` and `require_ssl=true` For SQL Server: * `ssl_mode=ALLOW_UNENCRYPTED_AND_ENCRYPTED` and `require_ssl=false` * `ssl_mode=ENCRYPTED_ONLY` and `require_ssl=true` The value of `ssl_mode` has priority over the value of `require_ssl`. For example, for the pair `ssl_mode=ENCRYPTED_ONLY` and `require_ssl=false`, `ssl_mode=ENCRYPTED_ONLY` means accept only SSL connections, while `require_ssl=false` means accept both non-SSL and SSL connections. In this case, MySQL and PostgreSQL databases respect `ssl_mode` and accepts only SSL connections.", "enum": [ @@ -4913,10 +4957,33 @@ "description": "The password for connecting to on-premises instance.", "type": "string" }, + "selectedObjects": { + "description": "Optional. A list of objects that the user selects for replication from an external source instance.", + "items": { + "$ref": "SelectedObjects" + }, + "type": "array" + }, "sourceInstance": { "$ref": "InstanceReference", "description": "The reference to Cloud SQL instance if the source is Cloud SQL." }, + "sslOption": { + "description": "Optional. SslOption for replica connection to the on-premises source.", + "enum": [ + "SSL_OPTION_UNSPECIFIED", + "DISABLE", + "REQUIRE", + "VERIFY_CA" + ], + "enumDescriptions": [ + "Unknown SSL option i.e. SSL option not specified by user.", + "SSL is disabled for replica connection to the on-premises source.", + "SSL is required for replica connection to the on-premises source.", + "Verify CA is required for replica connection to the on-premises source." + ], + "type": "string" + }, "username": { "description": "The username for connecting to on-premises instance.", "type": "string" @@ -5018,7 +5085,8 @@ "CLUSTER_MAINTENANCE", "SELF_SERVICE_MAINTENANCE", "SWITCHOVER_TO_REPLICA", - "MAJOR_VERSION_UPGRADE" + "MAJOR_VERSION_UPGRADE", + "ADVANCED_BACKUP" ], "enumDeprecated": [ false, @@ -5066,6 +5134,7 @@ true, true, false, + false, false ], "enumDescriptions": [ @@ -5114,7 +5183,8 @@ "Indicates that the instance, its read replicas, and its cascading replicas are in maintenance. Maintenance typically gets initiated on groups of replicas first, followed by the primary instance. For each instance, maintenance typically causes the instance to be unavailable for 1-3 minutes.", "Indicates that the instance (and any of its replicas) are currently in maintenance. This is initiated as a self-service request by using SSM. Maintenance typically causes the instance to be unavailable for 1-3 minutes.", "Switches a primary instance to a replica. This operation runs as part of a switchover operation to the original primary instance.", - "Updates the major version of a Cloud SQL instance." + "Updates the major version of a Cloud SQL instance.", + "Creates a backup for an Advanced BackupTier Cloud SQL instance." ], "type": "string" }, @@ -5143,6 +5213,10 @@ ], "type": "string" }, + "subOperationType": { + "$ref": "SqlSubOperationType", + "description": "Optional. The sub operation based on the operation type." + }, "targetId": { "description": "Name of the database instance related to this operation.", "type": "string" @@ -5208,7 +5282,7 @@ "type": "string" }, "cancelRequested": { - "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have been cancelled successfully have Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.", + "description": "Output only. Identifies whether the user has requested cancellation of the operation. Operations that have been cancelled successfully have google.longrunning.Operation.error value with a google.rpc.Status.code of `1`, corresponding to `Code.CANCELLED`.", "readOnly": true, "type": "boolean" }, @@ -5415,7 +5489,7 @@ "type": "object" }, "ReplicationCluster": { - "description": "A primary instance and disaster recovery (DR) replica pair. A DR replica is a cross-region replica that you designate for failover in the event that the primary instance has regional failure. Only applicable to MySQL.", + "description": "A primary instance and disaster recovery (DR) replica pair. A DR replica is a cross-region replica that you designate for failover in the event that the primary instance has regional failure. Applicable to MySQL and PostgreSQL.", "id": "ReplicationCluster", "properties": { "drReplica": { @@ -5428,7 +5502,7 @@ "type": "string" }, "psaWriteEndpoint": { - "description": "Output only. If set, it indicates this instance has a private service access (PSA) dns endpoint that is pointing to the primary instance of the cluster. If this instance is the primary, the dns should be pointing to this instance. After Switchover or Replica failover, this DNS endpoint points to the promoted instance. This is a read-only field, returned to the user as information. This field can exist even if a standalone instance does not yet have a replica, or had a DR replica that was deleted.", + "description": "Output only. If set, this field indicates this instance has a private service access (PSA) DNS endpoint that is pointing to the primary instance of the cluster. If this instance is the primary, then the DNS endpoint points to this instance. After a switchover or replica failover operation, this DNS endpoint points to the promoted instance. This is a read-only field, returned to the user as information. This field can exist even if a standalone instance doesn't have a DR replica yet or the DR replica is deleted.", "readOnly": true, "type": "string" } @@ -5516,6 +5590,17 @@ }, "type": "object" }, + "SelectedObjects": { + "description": "A list of objects that the user selects for replication from an external source instance.", + "id": "SelectedObjects", + "properties": { + "database": { + "description": "Required. The name of the database to migrate.", + "type": "string" + } + }, + "type": "object" + }, "Settings": { "description": "Database instance settings.", "id": "Settings", @@ -6063,6 +6148,13 @@ "$ref": "MySqlSyncConfig", "description": "Optional. MySQL-specific settings for start external sync." }, + "selectedObjects": { + "description": "Optional. Migrate only the specified objects from the source instance. If this field is empty, then migrate all objects.", + "items": { + "$ref": "ExternalSyncSelectedObject" + }, + "type": "array" + }, "syncMode": { "description": "External sync mode", "enum": [ @@ -6239,6 +6331,31 @@ }, "type": "object" }, + "SqlSubOperationType": { + "description": "The sub operation type based on the operation type.", + "id": "SqlSubOperationType", + "properties": { + "maintenanceType": { + "description": "The type of maintenance to be performed on the instance.", + "enum": [ + "SQL_MAINTENANCE_TYPE_UNSPECIFIED", + "INSTANCE_MAINTENANCE", + "REPLICA_INCLUDED_MAINTENANCE", + "INSTANCE_SELF_SERVICE_MAINTENANCE", + "REPLICA_INCLUDED_SELF_SERVICE_MAINTENANCE" + ], + "enumDescriptions": [ + "Maintenance type is unspecified.", + "Indicates that a standalone instance is undergoing maintenance. The instance can be either a primary instance or a replica.", + "Indicates that the primary instance and all of its replicas, including cascading replicas, are undergoing maintenance. Maintenance is performed on groups of replicas first, followed by the primary instance.", + "Indicates that the standalone instance is undergoing maintenance, initiated by self-service. The instance can be either a primary instance or a replica.", + "Indicates that the primary instance and all of its replicas are undergoing maintenance, initiated by self-service. Maintenance is performed on groups of replicas first, followed by the primary instance." + ], + "type": "string" + } + }, + "type": "object" + }, "SslCert": { "description": "SslCerts Resource", "id": "SslCert", diff --git a/discovery/googleapis_beta/toolresults__v1beta3.json b/discovery/googleapis_beta/toolresults__v1beta3.json index b12ae482c..61cdbde00 100644 --- a/discovery/googleapis_beta/toolresults__v1beta3.json +++ b/discovery/googleapis_beta/toolresults__v1beta3.json @@ -25,7 +25,7 @@ "ownerDomain": "google.com", "ownerName": "Google", "protocol": "rest", - "revision": "20240321", + "revision": "20241121", "rootUrl": "https://toolresults.googleapis.com/", "servicePath": "", "title": "Cloud Tool Results API", diff --git a/generated/googleapis/README.md b/generated/googleapis/README.md index 60d745638..40dd2fa11 100644 --- a/generated/googleapis/README.md +++ b/generated/googleapis/README.md @@ -921,7 +921,7 @@ Reads and writes Google Forms and responses. #### Google Play Game Services - `games/v1` -The Google Play games service allows developers to enhance games with social leaderboards, achievements, game state, sign-in with Google, and more. +The Google Play Games Service allows developers to enhance games with social leaderboards, achievements, game state, sign-in with Google, and more. - [Original documentation](https://developers.google.com/games/) - [Dart package details](https://pub.dev/documentation/googleapis/14.0.0-wip/games_v1/games_v1-library.html) @@ -935,7 +935,7 @@ The Google Play Game Services Publishing API allows developers to configure thei #### Google Play Game Management - `gamesManagement/v1management` -The Google Play Game Management API allows developers to manage resources from the Google Play Game service. +The Google Play Games Management API allows developers to manage resources from the Google Play Game service. - [Original documentation](https://developers.google.com/games/) - [Dart package details](https://pub.dev/documentation/googleapis/14.0.0-wip/gamesManagement_v1management/gamesManagement_v1management-library.html) @@ -1183,6 +1183,13 @@ The Dataproc Metastore API is used to manage the lifecycle and configuration of - [Original documentation](https://cloud.google.com/dataproc-metastore/docs) - [Dart package details](https://pub.dev/documentation/googleapis/14.0.0-wip/metastore_v1/metastore_v1-library.html) +#### Dataproc Metastore API - `metastore/v2` + +The Dataproc Metastore API is used to manage the lifecycle and configuration of metastore services. + +- [Original documentation](https://cloud.google.com/dataproc-metastore/docs) +- [Dart package details](https://pub.dev/documentation/googleapis/14.0.0-wip/metastore_v2/metastore_v2-library.html) + #### Migration Center API - `migrationcenter/v1` A unified platform that helps you accelerate your end-to-end cloud journey from your current on-premises or cloud environments to Google Cloud. @@ -1347,6 +1354,11 @@ The PageSpeed Insights API lets you analyze the performance of your website with - [Original documentation](https://developers.google.com/speed/docs/insights/v5/about) - [Dart package details](https://pub.dev/documentation/googleapis/14.0.0-wip/pagespeedonline_v5/pagespeedonline_v5-library.html) +#### Parallelstore API - `parallelstore/v1` + +- [Original documentation](https://cloud.google.com/parallelstore) +- [Dart package details](https://pub.dev/documentation/googleapis/14.0.0-wip/parallelstore_v1/parallelstore_v1-library.html) + #### Payments Reseller Subscription API - `paymentsresellersubscription/v1` - [Original documentation](https://developers.google.com/payments/reseller/subscription/) @@ -1552,6 +1564,13 @@ Security Command Center API provides access to temporal views of assets and find - [Original documentation](https://cloud.google.com/security-command-center) - [Dart package details](https://pub.dev/documentation/googleapis/14.0.0-wip/securitycenter_v1/securitycenter_v1-library.html) +#### Security Posture API - `securityposture/v1` + +Defines, assesses, and monitors the overall status of your security in Google Cloud. You can use security postures to evaluate your current cloud security against defined benchmarks and help maintain the level of security that your organization requires. + +- [Original documentation](https://cloud.google.com/security-command-center) +- [Dart package details](https://pub.dev/documentation/googleapis/14.0.0-wip/securityposture_v1/securityposture_v1-library.html) + #### Service Consumer Management API - `serviceconsumermanagement/v1` Manages the service consumers of a Service Infrastructure service. @@ -1783,7 +1802,7 @@ API for Verified Access chrome extension to provide credential verification for Version History API - Prod -- [Original documentation](https://developer.chrome.com/docs/versionhistory/) +- [Original documentation](https://developer.chrome.com/docs/web-platform/versionhistory/guide) - [Dart package details](https://pub.dev/documentation/googleapis/14.0.0-wip/versionhistory_v1/versionhistory_v1-library.html) #### Cloud Video Intelligence API - `videointelligence/v1` diff --git a/generated/googleapis/lib/accesscontextmanager/v1.dart b/generated/googleapis/lib/accesscontextmanager/v1.dart index 9b02ef837..2fee328e4 100644 --- a/generated/googleapis/lib/accesscontextmanager/v1.dart +++ b/generated/googleapis/lib/accesscontextmanager/v1.dart @@ -1448,8 +1448,8 @@ class OperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -1828,18 +1828,19 @@ class OrganizationsGcpUserAccessBindingsResource { /// [append] - Optional. This field controls whether or not certain repeated /// settings in the update request overwrite or append to existing settings on /// the binding. If true, then append. Otherwise overwrite. So far, only - /// scoped_access_settings supports appending. Global access_levels, - /// dry_run_access_levels, and reauth_settings are not compatible with append - /// functionality, and the request will return an error if append=true when - /// these settings are in the update_mask. The request will also return an - /// error if append=true when "scoped_access_settings" is not set in the - /// update_mask. + /// scoped_access_settings with reauth_settings supports appending. Global + /// access_levels, access_levels in scoped_access_settings, + /// dry_run_access_levels, reauth_settings, and session_settings are not + /// compatible with append functionality, and the request will return an error + /// if append=true when these settings are in the update_mask. The request + /// will also return an error if append=true when "scoped_access_settings" is + /// not set in the update_mask. /// /// [updateMask] - Required. Only the fields specified in this mask are /// updated. Because name and group_key cannot be changed, update_mask is /// required and may only contain the following fields: `access_levels`, - /// `dry_run_access_levels`, `reauth_settings`, `scoped_access_settings`. - /// update_mask { paths: "access_levels" } + /// `dry_run_access_levels`, `reauth_settings` `session_settings`, + /// `scoped_access_settings`. update_mask { paths: "access_levels" } /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -2070,14 +2071,14 @@ class AccessSettings { /// Optional. core.List? accessLevels; - /// Reauth settings applied to user access on a given AccessScope. + /// Session settings applied to user access on a given AccessScope. /// /// Optional. - ReauthSettings? reauthSettings; + SessionSettings? sessionSettings; AccessSettings({ this.accessLevels, - this.reauthSettings, + this.sessionSettings, }); AccessSettings.fromJson(core.Map json_) @@ -2085,15 +2086,15 @@ class AccessSettings { accessLevels: (json_['accessLevels'] as core.List?) ?.map((value) => value as core.String) .toList(), - reauthSettings: json_.containsKey('reauthSettings') - ? ReauthSettings.fromJson(json_['reauthSettings'] + sessionSettings: json_.containsKey('sessionSettings') + ? SessionSettings.fromJson(json_['sessionSettings'] as core.Map) : null, ); core.Map toJson() => { if (accessLevels != null) 'accessLevels': accessLevels!, - if (reauthSettings != null) 'reauthSettings': reauthSettings!, + if (sessionSettings != null) 'sessionSettings': sessionSettings!, }; } @@ -2815,9 +2816,19 @@ class EgressPolicy { /// cause this EgressPolicy to apply. EgressTo? egressTo; + /// Human-readable title for the egress rule. + /// + /// The title must be unique within the perimeter and can not exceed 100 + /// characters. Within the access policy, the combined length of all rule + /// titles must not exceed 240,000 characters. + /// + /// Optional. + core.String? title; + EgressPolicy({ this.egressFrom, this.egressTo, + this.title, }); EgressPolicy.fromJson(core.Map json_) @@ -2830,11 +2841,13 @@ class EgressPolicy { ? EgressTo.fromJson( json_['egressTo'] as core.Map) : null, + title: json_['title'] as core.String?, ); core.Map toJson() => { if (egressFrom != null) 'egressFrom': egressFrom!, if (egressTo != null) 'egressTo': egressTo!, + if (title != null) 'title': title!, }; } @@ -2974,11 +2987,6 @@ class GcpUserAccessBinding { /// Immutable. core.String? name; - /// GCSL policy for the group key. - /// - /// Optional. - ReauthSettings? reauthSettings; - /// A list of applications that are subject to this binding's restrictions. /// /// If the list is empty, the binding restrictions will universally apply to @@ -2995,14 +3003,19 @@ class GcpUserAccessBinding { /// Optional. core.List? scopedAccessSettings; + /// The Google Cloud session length (GCSL) policy for the group key. + /// + /// Optional. + SessionSettings? sessionSettings; + GcpUserAccessBinding({ this.accessLevels, this.dryRunAccessLevels, this.groupKey, this.name, - this.reauthSettings, this.restrictedClientApplications, this.scopedAccessSettings, + this.sessionSettings, }); GcpUserAccessBinding.fromJson(core.Map json_) @@ -3015,10 +3028,6 @@ class GcpUserAccessBinding { .toList(), groupKey: json_['groupKey'] as core.String?, name: json_['name'] as core.String?, - reauthSettings: json_.containsKey('reauthSettings') - ? ReauthSettings.fromJson(json_['reauthSettings'] - as core.Map) - : null, restrictedClientApplications: (json_['restrictedClientApplications'] as core.List?) ?.map((value) => Application.fromJson( @@ -3028,6 +3037,10 @@ class GcpUserAccessBinding { ?.map((value) => ScopedAccessSettings.fromJson( value as core.Map)) .toList(), + sessionSettings: json_.containsKey('sessionSettings') + ? SessionSettings.fromJson(json_['sessionSettings'] + as core.Map) + : null, ); core.Map toJson() => { @@ -3036,11 +3049,11 @@ class GcpUserAccessBinding { 'dryRunAccessLevels': dryRunAccessLevels!, if (groupKey != null) 'groupKey': groupKey!, if (name != null) 'name': name!, - if (reauthSettings != null) 'reauthSettings': reauthSettings!, if (restrictedClientApplications != null) 'restrictedClientApplications': restrictedClientApplications!, if (scopedAccessSettings != null) 'scopedAccessSettings': scopedAccessSettings!, + if (sessionSettings != null) 'sessionSettings': sessionSettings!, }; } @@ -3149,9 +3162,19 @@ class IngressPolicy { /// cause this IngressPolicy to apply. IngressTo? ingressTo; + /// Human-readable title for the ingress rule. + /// + /// The title must be unique within the perimeter and can not exceed 100 + /// characters. Within the access policy, the combined length of all rule + /// titles must not exceed 240,000 characters. + /// + /// Optional. + core.String? title; + IngressPolicy({ this.ingressFrom, this.ingressTo, + this.title, }); IngressPolicy.fromJson(core.Map json_) @@ -3164,11 +3187,13 @@ class IngressPolicy { ? IngressTo.fromJson( json_['ingressTo'] as core.Map) : null, + title: json_['title'] as core.String?, ); core.Map toJson() => { if (ingressFrom != null) 'ingressFrom': ingressFrom!, if (ingressTo != null) 'ingressTo': ingressTo!, + if (title != null) 'title': title!, }; } @@ -3622,87 +3647,6 @@ class Policy { }; } -/// Stores settings related to Google Cloud Session Length including session -/// duration, the type of challenge (i.e. method) they should face when their -/// session expires, and other related settings. -class ReauthSettings { - /// How long a user is allowed to take between actions before a new access - /// token must be issued. - /// - /// Presently only set for Cloud Apps. - /// - /// Optional. - core.String? maxInactivity; - - /// Reauth method when users GCP session is up. - /// - /// Optional. - /// Possible string values are: - /// - "REAUTH_METHOD_UNSPECIFIED" : If method undefined in API, we will use - /// LOGIN by default. - /// - "LOGIN" : The user will prompted to perform regular login. Users who are - /// enrolled for two-step verification and haven't chosen to "Remember this - /// computer" will be prompted for their second factor. - /// - "SECURITY_KEY" : The user will be prompted to autheticate using their - /// security key. If no security key has been configured, then we will - /// fallback to LOGIN. - /// - "PASSWORD" : The user will be prompted for their password. - core.String? reauthMethod; - - /// The session length. - /// - /// Setting this field to zero is equal to disabling. Reauth. Also can set - /// infinite session by flipping the enabled bit to false below. If - /// use_oidc_max_age is true, for OIDC apps, the session length will be the - /// minimum of this field and OIDC max_age param. - /// - /// Optional. - core.String? sessionLength; - - /// Big red button to turn off GCSL. - /// - /// When false, all fields set above will be disregarded and the session - /// length is basically infinite. - /// - /// Optional. - core.bool? sessionLengthEnabled; - - /// Only useful for OIDC apps. - /// - /// When false, the OIDC max_age param, if passed in the authentication - /// request will be ignored. When true, the re-auth period will be the minimum - /// of the session_length field and the max_age OIDC param. - /// - /// Optional. - core.bool? useOidcMaxAge; - - ReauthSettings({ - this.maxInactivity, - this.reauthMethod, - this.sessionLength, - this.sessionLengthEnabled, - this.useOidcMaxAge, - }); - - ReauthSettings.fromJson(core.Map json_) - : this( - maxInactivity: json_['maxInactivity'] as core.String?, - reauthMethod: json_['reauthMethod'] as core.String?, - sessionLength: json_['sessionLength'] as core.String?, - sessionLengthEnabled: json_['sessionLengthEnabled'] as core.bool?, - useOidcMaxAge: json_['useOidcMaxAge'] as core.bool?, - ); - - core.Map toJson() => { - if (maxInactivity != null) 'maxInactivity': maxInactivity!, - if (reauthMethod != null) 'reauthMethod': reauthMethod!, - if (sessionLength != null) 'sessionLength': sessionLength!, - if (sessionLengthEnabled != null) - 'sessionLengthEnabled': sessionLengthEnabled!, - if (useOidcMaxAge != null) 'useOidcMaxAge': useOidcMaxAge!, - }; -} - /// A request to replace all existing Access Levels in an Access Policy with the /// Access Levels provided. /// @@ -3860,6 +3804,14 @@ class ServicePerimeter { /// Does not affect behavior. core.String? description; + /// An opaque identifier for the current version of the `ServicePerimeter`. + /// + /// This identifier does not follow any specific format. If an etag is not + /// provided, the operation will be performed as if a valid etag is provided. + /// + /// Optional. + core.String? etag; + /// Identifier. /// /// Resource name for the `ServicePerimeter`. Format: @@ -3916,6 +3868,7 @@ class ServicePerimeter { ServicePerimeter({ this.description, + this.etag, this.name, this.perimeterType, this.spec, @@ -3927,6 +3880,7 @@ class ServicePerimeter { ServicePerimeter.fromJson(core.Map json_) : this( description: json_['description'] as core.String?, + etag: json_['etag'] as core.String?, name: json_['name'] as core.String?, perimeterType: json_['perimeterType'] as core.String?, spec: json_.containsKey('spec') @@ -3943,6 +3897,7 @@ class ServicePerimeter { core.Map toJson() => { if (description != null) 'description': description!, + if (etag != null) 'etag': etag!, if (name != null) 'name': name!, if (perimeterType != null) 'perimeterType': perimeterType!, if (spec != null) 'spec': spec!, @@ -4046,6 +4001,88 @@ class ServicePerimeterConfig { }; } +/// Stores settings related to Google Cloud Session Length including session +/// duration, the type of challenge (i.e. method) they should face when their +/// session expires, and other related settings. +class SessionSettings { + /// How long a user is allowed to take between actions before a new access + /// token must be issued. + /// + /// Only set for Google Cloud apps. + /// + /// Optional. + core.String? maxInactivity; + + /// The session length. + /// + /// Setting this field to zero is equal to disabling session. Also can set + /// infinite session by flipping the enabled bit to false below. If + /// use_oidc_max_age is true, for OIDC apps, the session length will be the + /// minimum of this field and OIDC max_age param. + /// + /// Optional. + core.String? sessionLength; + + /// This field enables or disables Google Cloud session length. + /// + /// When false, all fields set above will be disregarded and the session + /// length is basically infinite. + /// + /// Optional. + core.bool? sessionLengthEnabled; + + /// Session method when user's Google Cloud session is up. + /// + /// Optional. + /// Possible string values are: + /// - "SESSION_REAUTH_METHOD_UNSPECIFIED" : If method is undefined in the API, + /// LOGIN will be used by default. + /// - "LOGIN" : The user will be prompted to perform regular login. Users who + /// are enrolled for two-step verification and haven't chosen "Remember this + /// computer" will be prompted for their second factor. + /// - "SECURITY_KEY" : The user will be prompted to authenticate using their + /// security key. If no security key has been configured, then authentication + /// will fallback to LOGIN. + /// - "PASSWORD" : The user will be prompted for their password. + core.String? sessionReauthMethod; + + /// Only useful for OIDC apps. + /// + /// When false, the OIDC max_age param, if passed in the authentication + /// request will be ignored. When true, the re-auth period will be the minimum + /// of the session_length field and the max_age OIDC param. + /// + /// Optional. + core.bool? useOidcMaxAge; + + SessionSettings({ + this.maxInactivity, + this.sessionLength, + this.sessionLengthEnabled, + this.sessionReauthMethod, + this.useOidcMaxAge, + }); + + SessionSettings.fromJson(core.Map json_) + : this( + maxInactivity: json_['maxInactivity'] as core.String?, + sessionLength: json_['sessionLength'] as core.String?, + sessionLengthEnabled: json_['sessionLengthEnabled'] as core.bool?, + sessionReauthMethod: json_['sessionReauthMethod'] as core.String?, + useOidcMaxAge: json_['useOidcMaxAge'] as core.bool?, + ); + + core.Map toJson() => { + if (maxInactivity != null) 'maxInactivity': maxInactivity!, + if (sessionLength != null) 'sessionLength': sessionLength!, + if (sessionLengthEnabled != null) + 'sessionLengthEnabled': sessionLengthEnabled!, + if (sessionReauthMethod != null) + 'sessionReauthMethod': sessionReauthMethod!, + if (useOidcMaxAge != null) 'useOidcMaxAge': useOidcMaxAge!, + }; +} + /// Request message for `SetIamPolicy` method. class SetIamPolicyRequest { /// REQUIRED: The complete policy to be applied to the `resource`. diff --git a/generated/googleapis/lib/addressvalidation/v1.dart b/generated/googleapis/lib/addressvalidation/v1.dart index 048a10e90..b2b473bb7 100644 --- a/generated/googleapis/lib/addressvalidation/v1.dart +++ b/generated/googleapis/lib/addressvalidation/v1.dart @@ -224,6 +224,12 @@ class GoogleMapsAddressvalidationV1Address { /// The post-processed address, formatted as a single-line address following /// the address formatting rules of the region where the address is located. + /// + /// Note: the format of this address may not match the format of the address + /// in the `postal_address` field. For example, the `postal_address` always + /// represents the country as a 2 letter `region_code`, such as "US" or "NZ". + /// By contrast, this field uses a longer form of the country name, such as + /// "USA" or "New Zealand". core.String? formattedAddress; /// The types of components that were expected to be present in a correctly @@ -254,9 +260,9 @@ class GoogleMapsAddressvalidationV1Address { /// Any tokens in the input that could not be resolved. /// /// This might be an input that was not recognized as a valid part of an - /// address (for example in an input like "123235253253 Main St, San - /// Francisco, CA, 94105", the unresolved tokens may look like - /// `["123235253253"]` since that does not look like a valid street number. + /// address. For example, for an input such as "Parcel 0000123123 & 0000456456 + /// Str # Guthrie Center IA 50115 US", the unresolved tokens might look like + /// `["Parcel", "0000123123", "&", "0000456456"]`. core.List? unresolvedTokens; GoogleMapsAddressvalidationV1Address({ @@ -1453,16 +1459,16 @@ class GoogleMapsAddressvalidationV1Verdict { /// the WGS84 standard. Values must be within normalized ranges. typedef GoogleTypeLatLng = $LatLng; -/// Represents a postal address, e.g. for postal delivery or payments addresses. +/// Represents a postal address. /// -/// Given a postal address, a postal service can deliver items to a premise, -/// P.O. Box or similar. It is not intended to model geographical locations -/// (roads, towns, mountains). In typical usage an address would be created via -/// user input or from importing existing data, depending on the type of -/// process. Advice on address input / editing: - Use an -/// internationalization-ready address widget such as -/// https://github.com/google/libaddressinput) - Users should not be presented -/// with UI elements for input or editing of fields outside countries where that -/// field is used. For more guidance on how to use this schema, please see: +/// For example for postal delivery or payments addresses. Given a postal +/// address, a postal service can deliver items to a premise, P.O. Box or +/// similar. It is not intended to model geographical locations (roads, towns, +/// mountains). In typical usage an address would be created by user input or +/// from importing existing data, depending on the type of process. Advice on +/// address input / editing: - Use an internationalization-ready address widget +/// such as https://github.com/google/libaddressinput) - Users should not be +/// presented with UI elements for input or editing of fields outside countries +/// where that field is used. For more guidance on how to use this schema, see: /// https://support.google.com/business/answer/6397478 -typedef GoogleTypePostalAddress = $PostalAddress; +typedef GoogleTypePostalAddress = $PostalAddress00; diff --git a/generated/googleapis/lib/admin/directory_v1.dart b/generated/googleapis/lib/admin/directory_v1.dart index 206f79239..fad45a065 100644 --- a/generated/googleapis/lib/admin/directory_v1.dart +++ b/generated/googleapis/lib/admin/directory_v1.dart @@ -7260,6 +7260,35 @@ class Buildings { }; } +/// Represents a data capacity with some amount of current usage in bytes. +class ByteUsage { + /// The total capacity value, in bytes. + /// + /// Output only. + core.String? capacityBytes; + + /// The current usage value, in bytes. + /// + /// Output only. + core.String? usedBytes; + + ByteUsage({ + this.capacityBytes, + this.usedBytes, + }); + + ByteUsage.fromJson(core.Map json_) + : this( + capacityBytes: json_['capacityBytes'] as core.String?, + usedBytes: json_['usedBytes'] as core.String?, + ); + + core.Map toJson() => { + if (capacityBytes != null) 'capacityBytes': capacityBytes!, + if (usedBytes != null) 'usedBytes': usedBytes!, + }; +} + /// Public API: Resources.calendars class CalendarResource { /// Unique ID for the building a resource is located in. @@ -8147,6 +8176,11 @@ class ChromeOsDevice { /// - "kioskUpgrade" : The device has an annual Kiosk Upgrade. core.String? deviceLicenseType; + /// How much disk space the device has available and is currently using. + /// + /// Output only. + ByteUsage? diskSpaceUsage; + /// Reports of disk space and other info about mounted/connected volumes. core.List? diskVolumeReports; @@ -8340,6 +8374,7 @@ class ChromeOsDevice { this.deviceFiles, this.deviceId, this.deviceLicenseType, + this.diskSpaceUsage, this.diskVolumeReports, this.dockMacAddress, this.etag, @@ -8410,6 +8445,10 @@ class ChromeOsDevice { .toList(), deviceId: json_['deviceId'] as core.String?, deviceLicenseType: json_['deviceLicenseType'] as core.String?, + diskSpaceUsage: json_.containsKey('diskSpaceUsage') + ? ByteUsage.fromJson(json_['diskSpaceUsage'] + as core.Map) + : null, diskVolumeReports: (json_['diskVolumeReports'] as core.List?) ?.map((value) => ChromeOsDeviceDiskVolumeReports.fromJson( value as core.Map)) @@ -8497,6 +8536,7 @@ class ChromeOsDevice { if (deviceFiles != null) 'deviceFiles': deviceFiles!, if (deviceId != null) 'deviceId': deviceId!, if (deviceLicenseType != null) 'deviceLicenseType': deviceLicenseType!, + if (diskSpaceUsage != null) 'diskSpaceUsage': diskSpaceUsage!, if (diskVolumeReports != null) 'diskVolumeReports': diskVolumeReports!, if (dockMacAddress != null) 'dockMacAddress': dockMacAddress!, if (etag != null) 'etag': etag!, @@ -10412,15 +10452,7 @@ class MobileDevices { /// The customer's organizational unit hierarchy is limited to 35 levels of /// depth. class OrgUnit { - /// Determines if a sub-organizational unit can inherit the settings of the - /// parent organization. - /// - /// The default value is `false`, meaning a sub-organizational unit inherits - /// the settings of the nearest parent organizational unit. This field is - /// deprecated. Setting it to `true` is no longer supported and can have - /// _unintended consequences_. For more information about inheritance and - /// users in an organization structure, see the - /// [administration help center](https://support.google.com/a/answer/4352075). + /// This field is deprecated and setting its value has no effect. @core.Deprecated( 'Not supported. Member documentation may have more information.', ) @@ -11150,6 +11182,39 @@ class RoleAssignment { /// - "group" : A group within the domain. core.String? assigneeType; + /// The condition associated with this role assignment. + /// + /// Note: Feature is available to Enterprise Standard, Enterprise Plus, Google + /// Workspace for Education Plus and Cloud Identity Premium customers. A + /// `RoleAssignment` with the `condition` field set will only take effect when + /// the resource being accessed meets the condition. If `condition` is empty, + /// the role (`role_id`) is applied to the actor (`assigned_to`) at the scope + /// (`scope_type`) unconditionally. Currently, the following conditions are + /// supported: - To make the `RoleAssignment` only applicable to + /// [Security Groups](https://cloud.google.com/identity/docs/groups#group_types): + /// `api.getAttribute('cloudidentity.googleapis.com/groups.labels', + /// []).hasAny(['groups.security']) && resource.type == + /// 'cloudidentity.googleapis.com/Group'` - To make the `RoleAssignment` not + /// applicable to + /// [Security Groups](https://cloud.google.com/identity/docs/groups#group_types): + /// `!api.getAttribute('cloudidentity.googleapis.com/groups.labels', + /// []).hasAny(['groups.security']) && resource.type == + /// 'cloudidentity.googleapis.com/Group'` Currently, the condition strings + /// have to be verbatim and they only work with the following \[pre-built + /// administrator roles\](https://support.google.com/a/answer/2405986): - + /// Groups Editor - Groups Reader The condition follows + /// [Cloud IAM condition syntax](https://cloud.google.com/iam/docs/conditions-overview). + /// Additional conditions related to Locked Groups are available under Open + /// Beta. - To make the `RoleAssignment` not applicable to + /// [Locked Groups](https://cloud.google.com/identity/docs/groups#group_types): + /// `!api.getAttribute('cloudidentity.googleapis.com/groups.labels', + /// []).hasAny(['groups.locked']) && resource.type == + /// 'cloudidentity.googleapis.com/Group'` This condition can also be used in + /// conjunction with a Security-related condition. + /// + /// Optional. + core.String? condition; + /// ETag of the resource. core.String? etag; @@ -11174,6 +11239,7 @@ class RoleAssignment { RoleAssignment({ this.assignedTo, this.assigneeType, + this.condition, this.etag, this.kind, this.orgUnitId, @@ -11186,6 +11252,7 @@ class RoleAssignment { : this( assignedTo: json_['assignedTo'] as core.String?, assigneeType: json_['assigneeType'] as core.String?, + condition: json_['condition'] as core.String?, etag: json_['etag'] as core.String?, kind: json_['kind'] as core.String?, orgUnitId: json_['orgUnitId'] as core.String?, @@ -11197,6 +11264,7 @@ class RoleAssignment { core.Map toJson() => { if (assignedTo != null) 'assignedTo': assignedTo!, if (assigneeType != null) 'assigneeType': assigneeType!, + if (condition != null) 'condition': condition!, if (etag != null) 'etag': etag!, if (kind != null) 'kind': kind!, if (orgUnitId != null) 'orgUnitId': orgUnitId!, diff --git a/generated/googleapis/lib/aiplatform/v1.dart b/generated/googleapis/lib/aiplatform/v1.dart index bda1e35ec..5f48bec3e 100644 --- a/generated/googleapis/lib/aiplatform/v1.dart +++ b/generated/googleapis/lib/aiplatform/v1.dart @@ -24,9 +24,11 @@ /// - [DatasetsResource] /// - [DatasetsDatasetVersionsResource] /// - [EndpointsResource] +/// - [MediaResource] /// - [ProjectsResource] /// - [ProjectsLocationsResource] /// - [ProjectsLocationsBatchPredictionJobsResource] +/// - [ProjectsLocationsCachedContentsResource] /// - [ProjectsLocationsCustomJobsResource] /// - [ProjectsLocationsCustomJobsOperationsResource] /// - [ProjectsLocationsDataLabelingJobsResource] @@ -101,6 +103,12 @@ /// - [ProjectsLocationsPipelineJobsOperationsResource] /// - [ProjectsLocationsPublishersResource] /// - [ProjectsLocationsPublishersModelsResource] +/// - [ProjectsLocationsRagCorporaResource] +/// - [ProjectsLocationsRagCorporaOperationsResource] +/// - [ProjectsLocationsRagCorporaRagFilesResource] +/// - [ProjectsLocationsRagCorporaRagFilesOperationsResource] +/// - [ProjectsLocationsReasoningEnginesResource] +/// - [ProjectsLocationsReasoningEnginesOperationsResource] /// - [ProjectsLocationsSchedulesResource] /// - [ProjectsLocationsSchedulesOperationsResource] /// - [ProjectsLocationsSpecialistPoolsResource] @@ -136,7 +144,15 @@ import '../shared.dart'; import '../src/user_agent.dart'; export 'package:_discoveryapis_commons/_discoveryapis_commons.dart' - show ApiRequestError, DetailedApiRequestError; + show + ApiRequestError, + ByteRange, + DetailedApiRequestError, + DownloadOptions, + Media, + PartialDownloadOptions, + ResumableUploadOptions, + UploadOptions; /// Train high-quality custom machine learning models with minimal machine /// learning expertise and effort. @@ -155,6 +171,7 @@ class AiplatformApi { DatasetsResource get datasets => DatasetsResource(_requester); EndpointsResource get endpoints => EndpointsResource(_requester); + MediaResource get media => MediaResource(_requester); ProjectsResource get projects => ProjectsResource(_requester); PublishersResource get publishers => PublishersResource(_requester); @@ -861,6 +878,67 @@ class EndpointsResource { } } +class MediaResource { + final commons.ApiRequester _requester; + + MediaResource(commons.ApiRequester client) : _requester = client; + + /// Upload a file into a RagCorpus. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The name of the RagCorpus resource into which to + /// upload the file. Format: + /// `projects/{project}/locations/{location}/ragCorpora/{rag_corpus}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/ragCorpora/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// [uploadMedia] - The media to upload. + /// + /// Completes with a [GoogleCloudAiplatformV1UploadRagFileResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future upload( + GoogleCloudAiplatformV1UploadRagFileRequest request, + core.String parent, { + core.String? $fields, + commons.Media? uploadMedia, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + core.String url_; + if (uploadMedia == null) { + url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/ragFiles:upload'; + } else { + url_ = + '/upload/v1/' + core.Uri.encodeFull('$parent') + '/ragFiles:upload'; + } + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + uploadMedia: uploadMedia, + uploadOptions: commons.UploadOptions.defaultOptions, + ); + return GoogleCloudAiplatformV1UploadRagFileResponse.fromJson( + response_ as core.Map); + } +} + class ProjectsResource { final commons.ApiRequester _requester; @@ -868,6 +946,85 @@ class ProjectsResource { ProjectsLocationsResource(_requester); ProjectsResource(commons.ApiRequester client) : _requester = client; + + /// Gets a GenAI cache config. + /// + /// Request parameters: + /// + /// [name] - Required. Name of the cache config. Format: - + /// `projects/{project}/cacheConfig`. + /// Value must have pattern `^projects/\[^/\]+/cacheConfig$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudAiplatformV1CacheConfig]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future getCacheConfig( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1CacheConfig.fromJson( + response_ as core.Map); + } + + /// Updates a cache config. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Identifier. Name of the cache config. Format: - + /// `projects/{project}/cacheConfig`. + /// Value must have pattern `^projects/\[^/\]+/cacheConfig$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future updateCacheConfig( + GoogleCloudAiplatformV1CacheConfig request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'PATCH', + body: body_, + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } } class ProjectsLocationsResource { @@ -875,6 +1032,8 @@ class ProjectsLocationsResource { ProjectsLocationsBatchPredictionJobsResource get batchPredictionJobs => ProjectsLocationsBatchPredictionJobsResource(_requester); + ProjectsLocationsCachedContentsResource get cachedContents => + ProjectsLocationsCachedContentsResource(_requester); ProjectsLocationsCustomJobsResource get customJobs => ProjectsLocationsCustomJobsResource(_requester); ProjectsLocationsDataLabelingJobsResource get dataLabelingJobs => @@ -925,6 +1084,10 @@ class ProjectsLocationsResource { ProjectsLocationsPipelineJobsResource(_requester); ProjectsLocationsPublishersResource get publishers => ProjectsLocationsPublishersResource(_requester); + ProjectsLocationsRagCorporaResource get ragCorpora => + ProjectsLocationsRagCorporaResource(_requester); + ProjectsLocationsReasoningEnginesResource get reasoningEngines => + ProjectsLocationsReasoningEnginesResource(_requester); ProjectsLocationsSchedulesResource get schedules => ProjectsLocationsSchedulesResource(_requester); ProjectsLocationsSpecialistPoolsResource get specialistPools => @@ -940,6 +1103,98 @@ class ProjectsLocationsResource { ProjectsLocationsResource(commons.ApiRequester client) : _requester = client; + /// Given an input prompt, it returns augmented prompt from vertex rag store + /// to guide LLM towards generating grounded responses. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The resource name of the Location from which to + /// augment prompt. The users must have permission to make a call in the + /// project. Format: `projects/{project}/locations/{location}`. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudAiplatformV1AugmentPromptResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future augmentPrompt( + GoogleCloudAiplatformV1AugmentPromptRequest request, + core.String parent, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + ':augmentPrompt'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1AugmentPromptResponse.fromJson( + response_ as core.Map); + } + + /// Given an input text, it returns a score that evaluates the factuality of + /// the text. + /// + /// It also extracts and returns claims from the text and provides supporting + /// facts. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The resource name of the Location from which to + /// corroborate text. The users must have permission to make a call in the + /// project. Format: `projects/{project}/locations/{location}`. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudAiplatformV1CorroborateContentResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future + corroborateContent( + GoogleCloudAiplatformV1CorroborateContentRequest request, + core.String parent, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + ':corroborateContent'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1CorroborateContentResponse.fromJson( + response_ as core.Map); + } + /// Evaluates instances based on a given metric. /// /// [request] - The metadata request object. @@ -1071,6 +1326,50 @@ class ProjectsLocationsResource { return GoogleCloudLocationListLocationsResponse.fromJson( response_ as core.Map); } + + /// Retrieves relevant contexts for a query. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The resource name of the Location from which to + /// retrieve RagContexts. The users must have permission to make a call in the + /// project. Format: `projects/{project}/locations/{location}`. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudAiplatformV1RetrieveContextsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future + retrieveContexts( + GoogleCloudAiplatformV1RetrieveContextsRequest request, + core.String parent, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + ':retrieveContexts'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1RetrieveContextsResponse.fromJson( + response_ as core.Map); + } } class ProjectsLocationsBatchPredictionJobsResource { @@ -1323,6 +1622,229 @@ class ProjectsLocationsBatchPredictionJobsResource { } } +class ProjectsLocationsCachedContentsResource { + final commons.ApiRequester _requester; + + ProjectsLocationsCachedContentsResource(commons.ApiRequester client) + : _requester = client; + + /// Creates cached content, this call will initialize the cached content in + /// the data storage, and users need to pay for the cache data storage. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent resource where the cached content will be + /// created + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudAiplatformV1CachedContent]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future create( + GoogleCloudAiplatformV1CachedContent request, + core.String parent, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/cachedContents'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1CachedContent.fromJson( + response_ as core.Map); + } + + /// Deletes cached content + /// + /// Request parameters: + /// + /// [name] - Required. The resource name referring to the cached content + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/cachedContents/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleProtobufEmpty]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return GoogleProtobufEmpty.fromJson( + response_ as core.Map); + } + + /// Gets cached content configurations + /// + /// Request parameters: + /// + /// [name] - Required. The resource name referring to the cached content + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/cachedContents/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudAiplatformV1CachedContent]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1CachedContent.fromJson( + response_ as core.Map); + } + + /// Lists cached contents in a project + /// + /// Request parameters: + /// + /// [parent] - Required. The parent, which owns this collection of cached + /// contents. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [pageSize] - Optional. The maximum number of cached contents to return. + /// The service may return fewer than this value. If unspecified, some default + /// (under maximum) number of items will be returned. The maximum value is + /// 1000; values above 1000 will be coerced to 1000. + /// + /// [pageToken] - Optional. A page token, received from a previous + /// `ListCachedContents` call. Provide this to retrieve the subsequent page. + /// When paginating, all other parameters provided to `ListCachedContents` + /// must match the call that provided the page token. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudAiplatformV1ListCachedContentsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/cachedContents'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1ListCachedContentsResponse.fromJson( + response_ as core.Map); + } + + /// Updates cached content configurations + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Immutable. Identifier. The server-generated resource name of the + /// cached content Format: + /// projects/{project}/locations/{location}/cachedContents/{cached_content} + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/cachedContents/\[^/\]+$`. + /// + /// [updateMask] - Required. The list of fields to update. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudAiplatformV1CachedContent]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future patch( + GoogleCloudAiplatformV1CachedContent request, + core.String name, { + core.String? updateMask, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (updateMask != null) 'updateMask': [updateMask], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'PATCH', + body: body_, + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1CachedContent.fromJson( + response_ as core.Map); + } +} + class ProjectsLocationsCustomJobsResource { final commons.ApiRequester _requester; @@ -1583,8 +2105,8 @@ class ProjectsLocationsCustomJobsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -2066,8 +2588,8 @@ class ProjectsLocationsDataLabelingJobsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -2816,8 +3338,8 @@ class ProjectsLocationsDatasetsAnnotationSpecsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -3202,8 +3724,8 @@ class ProjectsLocationsDatasetsDataItemsAnnotationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -3441,8 +3963,8 @@ class ProjectsLocationsDatasetsDataItemsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -3957,8 +4479,8 @@ class ProjectsLocationsDatasetsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -4303,8 +4825,8 @@ class ProjectsLocationsDatasetsSavedQueriesOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -4829,8 +5351,8 @@ class ProjectsLocationsDeploymentResourcePoolsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -5427,6 +5949,52 @@ class ProjectsLocationsEndpointsResource { response_ as core.Map); } + /// Fetch an asynchronous online prediction operation. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [endpoint] - Required. The name of the Endpoint requested to serve the + /// prediction. Format: + /// `projects/{project}/locations/{location}/endpoints/{endpoint}` or + /// `projects/{project}/locations/{location}/publishers/{publisher}/models/{model}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/endpoints/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future fetchPredictOperation( + GoogleCloudAiplatformV1FetchPredictOperationRequest request, + core.String endpoint, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = + 'v1/' + core.Uri.encodeFull('$endpoint') + ':fetchPredictOperation'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + /// Generate content with multimodal inputs. /// /// [request] - The metadata request object. @@ -5722,6 +6290,50 @@ class ProjectsLocationsEndpointsResource { response_ as core.Map); } + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [endpoint] - Required. The name of the Endpoint requested to serve the + /// prediction. Format: + /// `projects/{project}/locations/{location}/endpoints/{endpoint}` or + /// `projects/{project}/locations/{location}/publishers/{publisher}/models/{model}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/endpoints/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future predictLongRunning( + GoogleCloudAiplatformV1PredictLongRunningRequest request, + core.String endpoint, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = + 'v1/' + core.Uri.encodeFull('$endpoint') + ':predictLongRunning'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + /// Perform an online prediction with an arbitrary HTTP payload. /// /// The response includes the following HTTP headers: * @@ -5954,6 +6566,48 @@ class ProjectsLocationsEndpointsResource { return GoogleLongrunningOperation.fromJson( response_ as core.Map); } + + /// Updates an Endpoint with a long running operation. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Output only. The resource name of the Endpoint. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/endpoints/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future update( + GoogleCloudAiplatformV1UpdateEndpointLongRunningRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':update'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } } class ProjectsLocationsEndpointsChatResource { @@ -6021,8 +6675,8 @@ class ProjectsLocationsEndpointsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -6414,10 +7068,10 @@ class ProjectsLocationsFeatureGroupsResource { /// greater than 100 will be coerced to 100. /// /// [pageToken] - A page token, received from a previous - /// FeatureGroupAdminService.ListFeatureGroups call. Provide this to retrieve + /// FeatureRegistryService.ListFeatureGroups call. Provide this to retrieve /// the subsequent page. When paginating, all other parameters provided to - /// FeatureGroupAdminService.ListFeatureGroups must match the call that - /// provided the page token. + /// FeatureRegistryService.ListFeatureGroups must match the call that provided + /// the page token. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -8017,7 +8671,8 @@ class ProjectsLocationsFeatureOnlineStoresFeatureViewsResource { /// fields: * `labels` * `service_agent_type` * `big_query_source` * /// `big_query_source.uri` * `big_query_source.entity_id_columns` * /// `feature_registry_source` * `feature_registry_source.feature_groups` * - /// `sync_config` * `sync_config.cron` + /// `sync_config` * `sync_config.cron` * + /// `optimized_config.automatic_resources` /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -10447,8 +11102,8 @@ class ProjectsLocationsFeaturestoresEntityTypesFeaturesOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -10686,8 +11341,8 @@ class ProjectsLocationsFeaturestoresEntityTypesOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -10924,8 +11579,8 @@ class ProjectsLocationsFeaturestoresOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -11418,8 +12073,8 @@ class ProjectsLocationsHyperparameterTuningJobsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -12127,8 +12782,8 @@ class ProjectsLocationsIndexEndpointsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -12686,8 +13341,8 @@ class ProjectsLocationsIndexesOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -13529,8 +14184,8 @@ class ProjectsLocationsMetadataStoresArtifactsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -14293,8 +14948,8 @@ class ProjectsLocationsMetadataStoresContextsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -14952,8 +15607,8 @@ class ProjectsLocationsMetadataStoresExecutionsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -15350,8 +16005,8 @@ class ProjectsLocationsMetadataStoresOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -15692,8 +16347,8 @@ class ProjectsLocationsMigratableResourcesOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -16344,8 +16999,8 @@ class ProjectsLocationsModelDeploymentMonitoringJobsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -17442,8 +18097,8 @@ class ProjectsLocationsModelsEvaluationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -17829,8 +18484,8 @@ class ProjectsLocationsModelsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -18558,7 +19213,7 @@ class ProjectsLocationsNotebookExecutionJobsResource { /// [pageSize] - Optional. The standard list page size. /// /// [pageToken] - Optional. The standard list page token. Typically obtained - /// via ListNotebookExecutionJobs.next_page_token of the previous + /// via ListNotebookExecutionJobsResponse.next_page_token of the previous /// NotebookService.ListNotebookExecutionJobs call. /// /// [view] - Optional. The NotebookExecutionJob view. Defaults to BASIC. @@ -18626,8 +19281,8 @@ class ProjectsLocationsNotebookExecutionJobsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -19062,10 +19717,12 @@ class ProjectsLocationsNotebookRuntimeTemplatesResource { /// \`labels.key=value\` - key:value equality * \`labels.key:* or labels:key - /// key existence * A key including a space must be quoted. \`labels."a /// key"\`. * \`notebookRuntimeType\` supports = and !=. notebookRuntimeType - /// enum: \[USER_DEFINED, ONE_CLICK\]. Some examples: * + /// enum: \[USER_DEFINED, ONE_CLICK\]. * \`machineType\` supports = and !=. * + /// \`acceleratorType\` supports = and !=. Some examples: * /// \`notebookRuntimeTemplate=notebookRuntimeTemplate123\` * /// \`displayName="myDisplayName"\` * \`labels.myKey="myValue"\` * - /// \`notebookRuntimeType=USER_DEFINED\` + /// \`notebookRuntimeType=USER_DEFINED\` * \`machineType=e2-standard-4\` * + /// \`acceleratorType=NVIDIA_TESLA_T4\` /// /// [orderBy] - Optional. A comma-separated list of fields to order by, sorted /// in ascending order. Use "desc" after a field name for descending. @@ -19288,8 +19945,8 @@ class ProjectsLocationsNotebookRuntimeTemplatesOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -19672,14 +20329,16 @@ class ProjectsLocationsNotebookRuntimesResource { /// \[UI_RESOURCE_STATE_UNSPECIFIED, UI_RESOURCE_STATE_BEING_CREATED, /// UI_RESOURCE_STATE_ACTIVE, UI_RESOURCE_STATE_BEING_DELETED, /// UI_RESOURCE_STATE_CREATION_FAILED\]. * `notebookRuntimeType` supports = - /// and !=. notebookRuntimeType enum: \[USER_DEFINED, ONE_CLICK\]. Some - /// examples: * `notebookRuntime="notebookRuntime123"` * + /// and !=. notebookRuntimeType enum: \[USER_DEFINED, ONE_CLICK\]. * + /// `machineType` supports = and !=. * `acceleratorType` supports = and !=. + /// Some examples: * `notebookRuntime="notebookRuntime123"` * /// `displayName="myDisplayName"` and `displayName=~"myDisplayNameRegex"` * /// `notebookRuntimeTemplate="notebookRuntimeTemplate321"` * /// `healthState=HEALTHY` * `runtimeState=RUNNING` * /// `runtimeUser="test@google.com"` * /// `uiState=UI_RESOURCE_STATE_BEING_DELETED` * - /// `notebookRuntimeType=USER_DEFINED` + /// `notebookRuntimeType=USER_DEFINED` * `machineType=e2-standard-4` * + /// `acceleratorType=NVIDIA_TESLA_T4` /// /// [orderBy] - Optional. A comma-separated list of fields to order by, sorted /// in ascending order. Use "desc" after a field name for descending. @@ -19778,6 +20437,51 @@ class ProjectsLocationsNotebookRuntimesResource { response_ as core.Map); } + /// Stops a NotebookRuntime. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the NotebookRuntime resource to be stopped. + /// Instead of checking whether the name is in valid NotebookRuntime resource + /// name format, directly throw NotFound exception if there is no such + /// NotebookRuntime in spanner. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/notebookRuntimes/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future stop( + GoogleCloudAiplatformV1StopNotebookRuntimeRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':stop'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + /// Upgrades a NotebookRuntime. /// /// [request] - The metadata request object. @@ -19839,8 +20543,8 @@ class ProjectsLocationsNotebookRuntimesOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -20077,8 +20781,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -20449,7 +21153,7 @@ class ProjectsLocationsPersistentResourcesResource { /// [pageSize] - Optional. The standard list page size. /// /// [pageToken] - Optional. The standard list page token. Typically obtained - /// via ListPersistentResourceResponse.next_page_token of the previous + /// via ListPersistentResourcesResponse.next_page_token of the previous /// PersistentResourceService.ListPersistentResource call. /// /// [$fields] - Selector specifying which fields to include in a partial @@ -20592,8 +21296,8 @@ class ProjectsLocationsPersistentResourcesOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -21203,8 +21907,8 @@ class ProjectsLocationsPipelineJobsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -21530,53 +22234,7 @@ class ProjectsLocationsPublishersModelsResource { response_ as core.Map); } - /// Generate content with multimodal inputs. - /// - /// [request] - The metadata request object. - /// - /// Request parameters: - /// - /// [model] - Required. The fully qualified name of the publisher model or - /// tuned model endpoint to use. Publisher model format: - /// `projects/{project}/locations/{location}/publishers / * /models / * ` - /// Tuned model endpoint format: - /// `projects/{project}/locations/{location}/endpoints/{endpoint}` - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/publishers/\[^/\]+/models/\[^/\]+$`. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleCloudAiplatformV1GenerateContentResponse]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future generateContent( - GoogleCloudAiplatformV1GenerateContentRequest request, - core.String model, { - core.String? $fields, - }) async { - final body_ = convert.json.encode(request); - final queryParams_ = >{ - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$model') + ':generateContent'; - - final response_ = await _requester.request( - url_, - 'POST', - body: body_, - queryParams: queryParams_, - ); - return GoogleCloudAiplatformV1GenerateContentResponse.fromJson( - response_ as core.Map); - } - - /// Perform an online prediction. + /// Fetch an asynchronous online prediction operation. /// /// [request] - The metadata request object. /// @@ -21584,117 +22242,23 @@ class ProjectsLocationsPublishersModelsResource { /// /// [endpoint] - Required. The name of the Endpoint requested to serve the /// prediction. Format: - /// `projects/{project}/locations/{location}/endpoints/{endpoint}` - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/publishers/\[^/\]+/models/\[^/\]+$`. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleCloudAiplatformV1PredictResponse]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future predict( - GoogleCloudAiplatformV1PredictRequest request, - core.String endpoint, { - core.String? $fields, - }) async { - final body_ = convert.json.encode(request); - final queryParams_ = >{ - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$endpoint') + ':predict'; - - final response_ = await _requester.request( - url_, - 'POST', - body: body_, - queryParams: queryParams_, - ); - return GoogleCloudAiplatformV1PredictResponse.fromJson( - response_ as core.Map); - } - - /// Perform an online prediction with an arbitrary HTTP payload. - /// - /// The response includes the following HTTP headers: * - /// `X-Vertex-AI-Endpoint-Id`: ID of the Endpoint that served this prediction. - /// * `X-Vertex-AI-Deployed-Model-Id`: ID of the Endpoint's DeployedModel that - /// served this prediction. - /// - /// [request] - The metadata request object. - /// - /// Request parameters: - /// - /// [endpoint] - Required. The name of the Endpoint requested to serve the - /// prediction. Format: - /// `projects/{project}/locations/{location}/endpoints/{endpoint}` - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/publishers/\[^/\]+/models/\[^/\]+$`. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleApiHttpBody]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future rawPredict( - GoogleCloudAiplatformV1RawPredictRequest request, - core.String endpoint, { - core.String? $fields, - }) async { - final body_ = convert.json.encode(request); - final queryParams_ = >{ - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$endpoint') + ':rawPredict'; - - final response_ = await _requester.request( - url_, - 'POST', - body: body_, - queryParams: queryParams_, - ); - return GoogleApiHttpBody.fromJson( - response_ as core.Map); - } - - /// Perform a server-side streaming online prediction request for Vertex LLM - /// streaming. - /// - /// [request] - The metadata request object. - /// - /// Request parameters: - /// - /// [endpoint] - Required. The name of the Endpoint requested to serve the - /// prediction. Format: - /// `projects/{project}/locations/{location}/endpoints/{endpoint}` + /// `projects/{project}/locations/{location}/endpoints/{endpoint}` or + /// `projects/{project}/locations/{location}/publishers/{publisher}/models/{model}` /// Value must have pattern /// `^projects/\[^/\]+/locations/\[^/\]+/publishers/\[^/\]+/models/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1StreamingPredictResponse]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future - serverStreamingPredict( - GoogleCloudAiplatformV1StreamingPredictRequest request, + async.Future fetchPredictOperation( + GoogleCloudAiplatformV1FetchPredictOperationRequest request, core.String endpoint, { core.String? $fields, }) async { @@ -21704,7 +22268,7 @@ class ProjectsLocationsPublishersModelsResource { }; final url_ = - 'v1/' + core.Uri.encodeFull('$endpoint') + ':serverStreamingPredict'; + 'v1/' + core.Uri.encodeFull('$endpoint') + ':fetchPredictOperation'; final response_ = await _requester.request( url_, @@ -21712,11 +22276,11 @@ class ProjectsLocationsPublishersModelsResource { body: body_, queryParams: queryParams_, ); - return GoogleCloudAiplatformV1StreamingPredictResponse.fromJson( + return GoogleLongrunningOperation.fromJson( response_ as core.Map); } - /// Generate content with multimodal inputs with streaming support. + /// Generate content with multimodal inputs. /// /// [request] - The metadata request object. /// @@ -21740,8 +22304,7 @@ class ProjectsLocationsPublishersModelsResource { /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future - streamGenerateContent( + async.Future generateContent( GoogleCloudAiplatformV1GenerateContentRequest request, core.String model, { core.String? $fields, @@ -21751,8 +22314,7 @@ class ProjectsLocationsPublishersModelsResource { if ($fields != null) 'fields': [$fields], }; - final url_ = - 'v1/' + core.Uri.encodeFull('$model') + ':streamGenerateContent'; + final url_ = 'v1/' + core.Uri.encodeFull('$model') + ':generateContent'; final response_ = await _requester.request( url_, @@ -21764,7 +22326,7 @@ class ProjectsLocationsPublishersModelsResource { response_ as core.Map); } - /// Perform a streaming online prediction with an arbitrary HTTP payload. + /// Perform an online prediction. /// /// [request] - The metadata request object. /// @@ -21779,15 +22341,15 @@ class ProjectsLocationsPublishersModelsResource { /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleApiHttpBody]. + /// Completes with a [GoogleCloudAiplatformV1PredictResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future streamRawPredict( - GoogleCloudAiplatformV1StreamRawPredictRequest request, + async.Future predict( + GoogleCloudAiplatformV1PredictRequest request, core.String endpoint, { core.String? $fields, }) async { @@ -21796,7 +22358,7 @@ class ProjectsLocationsPublishersModelsResource { if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$endpoint') + ':streamRawPredict'; + final url_ = 'v1/' + core.Uri.encodeFull('$endpoint') + ':predict'; final response_ = await _requester.request( url_, @@ -21804,43 +22366,84 @@ class ProjectsLocationsPublishersModelsResource { body: body_, queryParams: queryParams_, ); - return GoogleApiHttpBody.fromJson( + return GoogleCloudAiplatformV1PredictResponse.fromJson( response_ as core.Map); } -} -class ProjectsLocationsSchedulesResource { - final commons.ApiRequester _requester; + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [endpoint] - Required. The name of the Endpoint requested to serve the + /// prediction. Format: + /// `projects/{project}/locations/{location}/endpoints/{endpoint}` or + /// `projects/{project}/locations/{location}/publishers/{publisher}/models/{model}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/publishers/\[^/\]+/models/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future predictLongRunning( + GoogleCloudAiplatformV1PredictLongRunningRequest request, + core.String endpoint, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; - ProjectsLocationsSchedulesOperationsResource get operations => - ProjectsLocationsSchedulesOperationsResource(_requester); + final url_ = + 'v1/' + core.Uri.encodeFull('$endpoint') + ':predictLongRunning'; - ProjectsLocationsSchedulesResource(commons.ApiRequester client) - : _requester = client; + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } - /// Creates a Schedule. + /// Perform an online prediction with an arbitrary HTTP payload. + /// + /// The response includes the following HTTP headers: * + /// `X-Vertex-AI-Endpoint-Id`: ID of the Endpoint that served this prediction. + /// * `X-Vertex-AI-Deployed-Model-Id`: ID of the Endpoint's DeployedModel that + /// served this prediction. /// /// [request] - The metadata request object. /// /// Request parameters: /// - /// [parent] - Required. The resource name of the Location to create the - /// Schedule in. Format: `projects/{project}/locations/{location}` - /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// [endpoint] - Required. The name of the Endpoint requested to serve the + /// prediction. Format: + /// `projects/{project}/locations/{location}/endpoints/{endpoint}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/publishers/\[^/\]+/models/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1Schedule]. + /// Completes with a [GoogleApiHttpBody]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future create( - GoogleCloudAiplatformV1Schedule request, - core.String parent, { + async.Future rawPredict( + GoogleCloudAiplatformV1RawPredictRequest request, + core.String endpoint, { core.String? $fields, }) async { final body_ = convert.json.encode(request); @@ -21848,7 +22451,7 @@ class ProjectsLocationsSchedulesResource { if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/schedules'; + final url_ = 'v1/' + core.Uri.encodeFull('$endpoint') + ':rawPredict'; final response_ = await _requester.request( url_, @@ -21856,204 +22459,233 @@ class ProjectsLocationsSchedulesResource { body: body_, queryParams: queryParams_, ); - return GoogleCloudAiplatformV1Schedule.fromJson( + return GoogleApiHttpBody.fromJson( response_ as core.Map); } - /// Deletes a Schedule. + /// Perform a server-side streaming online prediction request for Vertex LLM + /// streaming. + /// + /// [request] - The metadata request object. /// /// Request parameters: /// - /// [name] - Required. The name of the Schedule resource to be deleted. - /// Format: `projects/{project}/locations/{location}/schedules/{schedule}` + /// [endpoint] - Required. The name of the Endpoint requested to serve the + /// prediction. Format: + /// `projects/{project}/locations/{location}/endpoints/{endpoint}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/schedules/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/publishers/\[^/\]+/models/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleLongrunningOperation]. + /// Completes with a [GoogleCloudAiplatformV1StreamingPredictResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future delete( - core.String name, { + async.Future + serverStreamingPredict( + GoogleCloudAiplatformV1StreamingPredictRequest request, + core.String endpoint, { core.String? $fields, }) async { + final body_ = convert.json.encode(request); final queryParams_ = >{ if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$name'); + final url_ = + 'v1/' + core.Uri.encodeFull('$endpoint') + ':serverStreamingPredict'; final response_ = await _requester.request( url_, - 'DELETE', + 'POST', + body: body_, queryParams: queryParams_, ); - return GoogleLongrunningOperation.fromJson( + return GoogleCloudAiplatformV1StreamingPredictResponse.fromJson( response_ as core.Map); } - /// Gets a Schedule. + /// Generate content with multimodal inputs with streaming support. + /// + /// [request] - The metadata request object. /// /// Request parameters: /// - /// [name] - Required. The name of the Schedule resource. Format: - /// `projects/{project}/locations/{location}/schedules/{schedule}` + /// [model] - Required. The fully qualified name of the publisher model or + /// tuned model endpoint to use. Publisher model format: + /// `projects/{project}/locations/{location}/publishers / * /models / * ` + /// Tuned model endpoint format: + /// `projects/{project}/locations/{location}/endpoints/{endpoint}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/schedules/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/publishers/\[^/\]+/models/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1Schedule]. + /// Completes with a [GoogleCloudAiplatformV1GenerateContentResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future get( - core.String name, { + async.Future + streamGenerateContent( + GoogleCloudAiplatformV1GenerateContentRequest request, + core.String model, { core.String? $fields, }) async { + final body_ = convert.json.encode(request); final queryParams_ = >{ if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$name'); + final url_ = + 'v1/' + core.Uri.encodeFull('$model') + ':streamGenerateContent'; final response_ = await _requester.request( url_, - 'GET', + 'POST', + body: body_, queryParams: queryParams_, ); - return GoogleCloudAiplatformV1Schedule.fromJson( + return GoogleCloudAiplatformV1GenerateContentResponse.fromJson( response_ as core.Map); } - /// Lists Schedules in a Location. + /// Perform a streaming online prediction with an arbitrary HTTP payload. + /// + /// [request] - The metadata request object. /// /// Request parameters: /// - /// [parent] - Required. The resource name of the Location to list the - /// Schedules from. Format: `projects/{project}/locations/{location}` - /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// [endpoint] - Required. The name of the Endpoint requested to serve the + /// prediction. Format: + /// `projects/{project}/locations/{location}/endpoints/{endpoint}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/publishers/\[^/\]+/models/\[^/\]+$`. /// - /// [filter] - Lists the Schedules that match the filter expression. The - /// following fields are supported: * `display_name`: Supports `=`, `!=` - /// comparisons, and `:` wildcard. * `state`: Supports `=` and `!=` - /// comparisons. * `request`: Supports existence of the check. (e.g. - /// `create_pipeline_job_request:*` --\> Schedule has - /// create_pipeline_job_request). * `create_time`: Supports `=`, `!=`, `<`, - /// `>`, `<=`, and `>=` comparisons. Values must be in RFC 3339 format. * - /// `start_time`: Supports `=`, `!=`, `<`, `>`, `<=`, and `>=` comparisons. - /// Values must be in RFC 3339 format. * `end_time`: Supports `=`, `!=`, `<`, - /// `>`, `<=`, `>=` comparisons and `:*` existence check. Values must be in - /// RFC 3339 format. * `next_run_time`: Supports `=`, `!=`, `<`, `>`, `<=`, - /// and `>=` comparisons. Values must be in RFC 3339 format. Filter - /// expressions can be combined together using logical operators (`NOT`, `AND` - /// & `OR`). The syntax to define filter expression is based on - /// https://google.aip.dev/160. Examples: * `state="ACTIVE" AND - /// display_name:"my_schedule_*"` * `NOT display_name="my_schedule"` * - /// `create_time>"2021-05-18T00:00:00Z"` * `end_time>"2021-05-18T00:00:00Z" OR - /// NOT end_time:*` * `create_pipeline_job_request:*` + /// [$fields] - Selector specifying which fields to include in a partial + /// response. /// - /// [orderBy] - A comma-separated list of fields to order by. The default sort - /// order is in ascending order. Use "desc" after a field name for descending. - /// You can have multiple order_by fields provided. For example, using - /// "create_time desc, end_time" will order results by create time in - /// descending order, and if there are multiple schedules having the same - /// create time, order them by the end time in ascending order. If order_by is - /// not specified, it will order by default with create_time in descending - /// order. Supported fields: * `create_time` * `start_time` * `end_time` * - /// `next_run_time` + /// Completes with a [GoogleApiHttpBody]. /// - /// [pageSize] - The standard list page size. Default to 100 if not specified. + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. /// - /// [pageToken] - The standard list page token. Typically obtained via - /// ListSchedulesResponse.next_page_token of the previous - /// ScheduleService.ListSchedules call. + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future streamRawPredict( + GoogleCloudAiplatformV1StreamRawPredictRequest request, + core.String endpoint, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$endpoint') + ':streamRawPredict'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleApiHttpBody.fromJson( + response_ as core.Map); + } +} + +class ProjectsLocationsRagCorporaResource { + final commons.ApiRequester _requester; + + ProjectsLocationsRagCorporaOperationsResource get operations => + ProjectsLocationsRagCorporaOperationsResource(_requester); + ProjectsLocationsRagCorporaRagFilesResource get ragFiles => + ProjectsLocationsRagCorporaRagFilesResource(_requester); + + ProjectsLocationsRagCorporaResource(commons.ApiRequester client) + : _requester = client; + + /// Creates a RagCorpus. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The resource name of the Location to create the + /// RagCorpus in. Format: `projects/{project}/locations/{location}` + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1ListSchedulesResponse]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future list( + async.Future create( + GoogleCloudAiplatformV1RagCorpus request, core.String parent, { - core.String? filter, - core.String? orderBy, - core.int? pageSize, - core.String? pageToken, core.String? $fields, }) async { + final body_ = convert.json.encode(request); final queryParams_ = >{ - if (filter != null) 'filter': [filter], - if (orderBy != null) 'orderBy': [orderBy], - if (pageSize != null) 'pageSize': ['${pageSize}'], - if (pageToken != null) 'pageToken': [pageToken], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/schedules'; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/ragCorpora'; final response_ = await _requester.request( url_, - 'GET', + 'POST', + body: body_, queryParams: queryParams_, ); - return GoogleCloudAiplatformV1ListSchedulesResponse.fromJson( + return GoogleLongrunningOperation.fromJson( response_ as core.Map); } - /// Updates an active or paused Schedule. - /// - /// When the Schedule is updated, new runs will be scheduled starting from the - /// updated next execution time after the update time based on the - /// time_specification in the updated Schedule. All unstarted runs before the - /// update time will be skipped while already created runs will NOT be paused - /// or canceled. - /// - /// [request] - The metadata request object. + /// Deletes a RagCorpus. /// /// Request parameters: /// - /// [name] - Immutable. The resource name of the Schedule. + /// [name] - Required. The name of the RagCorpus resource to be deleted. + /// Format: `projects/{project}/locations/{location}/ragCorpora/{rag_corpus}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/schedules/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/ragCorpora/\[^/\]+$`. /// - /// [updateMask] - Required. The update mask applies to the resource. See - /// google.protobuf.FieldMask. + /// [force] - Optional. If set to true, any RagFiles in this RagCorpus will + /// also be deleted. Otherwise, the request will only work if the RagCorpus + /// has no RagFiles. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1Schedule]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future patch( - GoogleCloudAiplatformV1Schedule request, + async.Future delete( core.String name, { - core.String? updateMask, + core.bool? force, core.String? $fields, }) async { - final body_ = convert.json.encode(request); final queryParams_ = >{ - if (updateMask != null) 'updateMask': [updateMask], + if (force != null) 'force': ['${force}'], if ($fields != null) 'fields': [$fields], }; @@ -22061,89 +22693,120 @@ class ProjectsLocationsSchedulesResource { final response_ = await _requester.request( url_, - 'PATCH', - body: body_, + 'DELETE', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1Schedule.fromJson( + return GoogleLongrunningOperation.fromJson( response_ as core.Map); } - /// Pauses a Schedule. - /// - /// Will mark Schedule.state to 'PAUSED'. If the schedule is paused, no new - /// runs will be created. Already created runs will NOT be paused or canceled. - /// - /// [request] - The metadata request object. + /// Gets a RagCorpus. /// /// Request parameters: /// - /// [name] - Required. The name of the Schedule resource to be paused. Format: - /// `projects/{project}/locations/{location}/schedules/{schedule}` + /// [name] - Required. The name of the RagCorpus resource. Format: + /// `projects/{project}/locations/{location}/ragCorpora/{rag_corpus}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/schedules/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/ragCorpora/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleProtobufEmpty]. + /// Completes with a [GoogleCloudAiplatformV1RagCorpus]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future pause( - GoogleCloudAiplatformV1PauseScheduleRequest request, + async.Future get( core.String name, { core.String? $fields, }) async { - final body_ = convert.json.encode(request); final queryParams_ = >{ if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':pause'; + final url_ = 'v1/' + core.Uri.encodeFull('$name'); final response_ = await _requester.request( url_, - 'POST', - body: body_, + 'GET', queryParams: queryParams_, ); - return GoogleProtobufEmpty.fromJson( + return GoogleCloudAiplatformV1RagCorpus.fromJson( response_ as core.Map); } - /// Resumes a paused Schedule to start scheduling new runs. + /// Lists RagCorpora in a Location. /// - /// Will mark Schedule.state to 'ACTIVE'. Only paused Schedule can be resumed. - /// When the Schedule is resumed, new runs will be scheduled starting from the - /// next execution time after the current time based on the time_specification - /// in the Schedule. If Schedule.catchUp is set up true, all missed runs will - /// be scheduled for backfill first. + /// Request parameters: + /// + /// [parent] - Required. The resource name of the Location from which to list + /// the RagCorpora. Format: `projects/{project}/locations/{location}` + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [pageSize] - Optional. The standard list page size. + /// + /// [pageToken] - Optional. The standard list page token. Typically obtained + /// via ListRagCorporaResponse.next_page_token of the previous + /// VertexRagDataService.ListRagCorpora call. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudAiplatformV1ListRagCorporaResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/ragCorpora'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1ListRagCorporaResponse.fromJson( + response_ as core.Map); + } + + /// Updates a RagCorpus. /// /// [request] - The metadata request object. /// /// Request parameters: /// - /// [name] - Required. The name of the Schedule resource to be resumed. - /// Format: `projects/{project}/locations/{location}/schedules/{schedule}` + /// [name] - Output only. The resource name of the RagCorpus. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/schedules/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/ragCorpora/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleProtobufEmpty]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future resume( - GoogleCloudAiplatformV1ResumeScheduleRequest request, + async.Future patch( + GoogleCloudAiplatformV1RagCorpus request, core.String name, { core.String? $fields, }) async { @@ -22152,23 +22815,23 @@ class ProjectsLocationsSchedulesResource { if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':resume'; + final url_ = 'v1/' + core.Uri.encodeFull('$name'); final response_ = await _requester.request( url_, - 'POST', + 'PATCH', body: body_, queryParams: queryParams_, ); - return GoogleProtobufEmpty.fromJson( + return GoogleLongrunningOperation.fromJson( response_ as core.Map); } } -class ProjectsLocationsSchedulesOperationsResource { +class ProjectsLocationsRagCorporaOperationsResource { final commons.ApiRequester _requester; - ProjectsLocationsSchedulesOperationsResource(commons.ApiRequester client) + ProjectsLocationsRagCorporaOperationsResource(commons.ApiRequester client) : _requester = client; /// Starts asynchronous cancellation on a long-running operation. @@ -22179,14 +22842,14 @@ class ProjectsLocationsSchedulesOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// /// [name] - The name of the operation resource to be cancelled. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/schedules/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/ragCorpora/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -22227,7 +22890,7 @@ class ProjectsLocationsSchedulesOperationsResource { /// /// [name] - The name of the operation resource to be deleted. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/schedules/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/ragCorpora/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -22267,7 +22930,7 @@ class ProjectsLocationsSchedulesOperationsResource { /// /// [name] - The name of the operation resource. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/schedules/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/ragCorpora/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -22306,7 +22969,7 @@ class ProjectsLocationsSchedulesOperationsResource { /// /// [name] - The name of the operation's parent resource. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/schedules/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/ragCorpora/\[^/\]+$`. /// /// [filter] - The standard list filter. /// @@ -22364,7 +23027,7 @@ class ProjectsLocationsSchedulesOperationsResource { /// /// [name] - The name of the operation resource to wait on. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/schedules/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/ragCorpora/\[^/\]+/operations/\[^/\]+$`. /// /// [timeout] - The maximum duration to wait before timing out. If left blank, /// the wait will be at most the time permitted by the underlying HTTP/RPC @@ -22403,70 +23066,23 @@ class ProjectsLocationsSchedulesOperationsResource { } } -class ProjectsLocationsSpecialistPoolsResource { +class ProjectsLocationsRagCorporaRagFilesResource { final commons.ApiRequester _requester; - ProjectsLocationsSpecialistPoolsOperationsResource get operations => - ProjectsLocationsSpecialistPoolsOperationsResource(_requester); + ProjectsLocationsRagCorporaRagFilesOperationsResource get operations => + ProjectsLocationsRagCorporaRagFilesOperationsResource(_requester); - ProjectsLocationsSpecialistPoolsResource(commons.ApiRequester client) + ProjectsLocationsRagCorporaRagFilesResource(commons.ApiRequester client) : _requester = client; - /// Creates a SpecialistPool. - /// - /// [request] - The metadata request object. - /// - /// Request parameters: - /// - /// [parent] - Required. The parent Project name for the new SpecialistPool. - /// The form is `projects/{project}/locations/{location}`. - /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleLongrunningOperation]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future create( - GoogleCloudAiplatformV1SpecialistPool request, - core.String parent, { - core.String? $fields, - }) async { - final body_ = convert.json.encode(request); - final queryParams_ = >{ - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/specialistPools'; - - final response_ = await _requester.request( - url_, - 'POST', - body: body_, - queryParams: queryParams_, - ); - return GoogleLongrunningOperation.fromJson( - response_ as core.Map); - } - - /// Deletes a SpecialistPool as well as all Specialists in the pool. + /// Deletes a RagFile. /// /// Request parameters: /// - /// [name] - Required. The resource name of the SpecialistPool to delete. - /// Format: - /// `projects/{project}/locations/{location}/specialistPools/{specialist_pool}` + /// [name] - Required. The name of the RagFile resource to be deleted. Format: + /// `projects/{project}/locations/{location}/ragCorpora/{rag_corpus}/ragFiles/{rag_file}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/specialistPools/\[^/\]+$`. - /// - /// [force] - If set to true, any specialist managers in this SpecialistPool - /// will also be deleted. (Otherwise, the request will only work if the - /// SpecialistPool has no specialist managers.) + /// `^projects/\[^/\]+/locations/\[^/\]+/ragCorpora/\[^/\]+/ragFiles/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -22480,11 +23096,9 @@ class ProjectsLocationsSpecialistPoolsResource { /// this method will complete with the same error. async.Future delete( core.String name, { - core.bool? force, core.String? $fields, }) async { final queryParams_ = >{ - if (force != null) 'force': ['${force}'], if ($fields != null) 'fields': [$fields], }; @@ -22499,26 +23113,26 @@ class ProjectsLocationsSpecialistPoolsResource { response_ as core.Map); } - /// Gets a SpecialistPool. + /// Gets a RagFile. /// /// Request parameters: /// - /// [name] - Required. The name of the SpecialistPool resource. The form is - /// `projects/{project}/locations/{location}/specialistPools/{specialist_pool}`. + /// [name] - Required. The name of the RagFile resource. Format: + /// `projects/{project}/locations/{location}/ragCorpora/{rag_corpus}/ragFiles/{rag_file}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/specialistPools/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/ragCorpora/\[^/\]+/ragFiles/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1SpecialistPool]. + /// Completes with a [GoogleCloudAiplatformV1RagFile]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future get( + async.Future get( core.String name, { core.String? $fields, }) async { @@ -22533,114 +23147,108 @@ class ProjectsLocationsSpecialistPoolsResource { 'GET', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1SpecialistPool.fromJson( + return GoogleCloudAiplatformV1RagFile.fromJson( response_ as core.Map); } - /// Lists SpecialistPools in a Location. + /// Import files from Google Cloud Storage or Google Drive into a RagCorpus. /// - /// Request parameters: - /// - /// [parent] - Required. The name of the SpecialistPool's parent resource. - /// Format: `projects/{project}/locations/{location}` - /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. - /// - /// [pageSize] - The standard list page size. + /// [request] - The metadata request object. /// - /// [pageToken] - The standard list page token. Typically obtained by - /// ListSpecialistPoolsResponse.next_page_token of the previous - /// SpecialistPoolService.ListSpecialistPools call. Return first page if - /// empty. + /// Request parameters: /// - /// [readMask] - Mask specifying which fields to read. FieldMask represents a - /// set of + /// [parent] - Required. The name of the RagCorpus resource into which to + /// import files. Format: + /// `projects/{project}/locations/{location}/ragCorpora/{rag_corpus}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/ragCorpora/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1ListSpecialistPoolsResponse]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future list( + async.Future import( + GoogleCloudAiplatformV1ImportRagFilesRequest request, core.String parent, { - core.int? pageSize, - core.String? pageToken, - core.String? readMask, core.String? $fields, }) async { + final body_ = convert.json.encode(request); final queryParams_ = >{ - if (pageSize != null) 'pageSize': ['${pageSize}'], - if (pageToken != null) 'pageToken': [pageToken], - if (readMask != null) 'readMask': [readMask], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/specialistPools'; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/ragFiles:import'; final response_ = await _requester.request( url_, - 'GET', + 'POST', + body: body_, queryParams: queryParams_, ); - return GoogleCloudAiplatformV1ListSpecialistPoolsResponse.fromJson( + return GoogleLongrunningOperation.fromJson( response_ as core.Map); } - /// Updates a SpecialistPool. - /// - /// [request] - The metadata request object. + /// Lists RagFiles in a RagCorpus. /// /// Request parameters: /// - /// [name] - Required. The resource name of the SpecialistPool. + /// [parent] - Required. The resource name of the RagCorpus from which to list + /// the RagFiles. Format: + /// `projects/{project}/locations/{location}/ragCorpora/{rag_corpus}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/specialistPools/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/ragCorpora/\[^/\]+$`. /// - /// [updateMask] - Required. The update mask applies to the resource. + /// [pageSize] - Optional. The standard list page size. + /// + /// [pageToken] - Optional. The standard list page token. Typically obtained + /// via ListRagFilesResponse.next_page_token of the previous + /// VertexRagDataService.ListRagFiles call. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleLongrunningOperation]. + /// Completes with a [GoogleCloudAiplatformV1ListRagFilesResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future patch( - GoogleCloudAiplatformV1SpecialistPool request, - core.String name, { - core.String? updateMask, + async.Future list( + core.String parent, { + core.int? pageSize, + core.String? pageToken, core.String? $fields, }) async { - final body_ = convert.json.encode(request); final queryParams_ = >{ - if (updateMask != null) 'updateMask': [updateMask], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$name'); + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/ragFiles'; final response_ = await _requester.request( url_, - 'PATCH', - body: body_, + 'GET', queryParams: queryParams_, ); - return GoogleLongrunningOperation.fromJson( + return GoogleCloudAiplatformV1ListRagFilesResponse.fromJson( response_ as core.Map); } } -class ProjectsLocationsSpecialistPoolsOperationsResource { +class ProjectsLocationsRagCorporaRagFilesOperationsResource { final commons.ApiRequester _requester; - ProjectsLocationsSpecialistPoolsOperationsResource( + ProjectsLocationsRagCorporaRagFilesOperationsResource( commons.ApiRequester client) : _requester = client; @@ -22652,14 +23260,14 @@ class ProjectsLocationsSpecialistPoolsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// /// [name] - The name of the operation resource to be cancelled. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/specialistPools/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/ragCorpora/\[^/\]+/ragFiles/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -22700,7 +23308,7 @@ class ProjectsLocationsSpecialistPoolsOperationsResource { /// /// [name] - The name of the operation resource to be deleted. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/specialistPools/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/ragCorpora/\[^/\]+/ragFiles/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -22740,7 +23348,7 @@ class ProjectsLocationsSpecialistPoolsOperationsResource { /// /// [name] - The name of the operation resource. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/specialistPools/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/ragCorpora/\[^/\]+/ragFiles/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -22779,7 +23387,7 @@ class ProjectsLocationsSpecialistPoolsOperationsResource { /// /// [name] - The name of the operation's parent resource. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/specialistPools/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/ragCorpora/\[^/\]+/ragFiles/\[^/\]+$`. /// /// [filter] - The standard list filter. /// @@ -22837,7 +23445,7 @@ class ProjectsLocationsSpecialistPoolsOperationsResource { /// /// [name] - The name of the operation resource to wait on. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/specialistPools/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/ragCorpora/\[^/\]+/ragFiles/\[^/\]+/operations/\[^/\]+$`. /// /// [timeout] - The maximum duration to wait before timing out. If left blank, /// the wait will be at most the time permitted by the underlying HTTP/RPC @@ -22876,41 +23484,37 @@ class ProjectsLocationsSpecialistPoolsOperationsResource { } } -class ProjectsLocationsStudiesResource { +class ProjectsLocationsReasoningEnginesResource { final commons.ApiRequester _requester; - ProjectsLocationsStudiesOperationsResource get operations => - ProjectsLocationsStudiesOperationsResource(_requester); - ProjectsLocationsStudiesTrialsResource get trials => - ProjectsLocationsStudiesTrialsResource(_requester); + ProjectsLocationsReasoningEnginesOperationsResource get operations => + ProjectsLocationsReasoningEnginesOperationsResource(_requester); - ProjectsLocationsStudiesResource(commons.ApiRequester client) + ProjectsLocationsReasoningEnginesResource(commons.ApiRequester client) : _requester = client; - /// Creates a Study. - /// - /// A resource name will be generated after creation of the Study. + /// Creates a reasoning engine. /// /// [request] - The metadata request object. /// /// Request parameters: /// /// [parent] - Required. The resource name of the Location to create the - /// CustomJob in. Format: `projects/{project}/locations/{location}` + /// ReasoningEngine in. Format: `projects/{project}/locations/{location}` /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1Study]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future create( - GoogleCloudAiplatformV1Study request, + async.Future create( + GoogleCloudAiplatformV1ReasoningEngine request, core.String parent, { core.String? $fields, }) async { @@ -22919,7 +23523,7 @@ class ProjectsLocationsStudiesResource { if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/studies'; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/reasoningEngines'; final response_ = await _requester.request( url_, @@ -22927,30 +23531,31 @@ class ProjectsLocationsStudiesResource { body: body_, queryParams: queryParams_, ); - return GoogleCloudAiplatformV1Study.fromJson( + return GoogleLongrunningOperation.fromJson( response_ as core.Map); } - /// Deletes a Study. + /// Deletes a reasoning engine. /// /// Request parameters: /// - /// [name] - Required. The name of the Study resource to be deleted. Format: - /// `projects/{project}/locations/{location}/studies/{study}` + /// [name] - Required. The name of the ReasoningEngine resource to be deleted. + /// Format: + /// `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/reasoningEngines/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleProtobufEmpty]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future delete( + async.Future delete( core.String name, { core.String? $fields, }) async { @@ -22965,30 +23570,30 @@ class ProjectsLocationsStudiesResource { 'DELETE', queryParams: queryParams_, ); - return GoogleProtobufEmpty.fromJson( + return GoogleLongrunningOperation.fromJson( response_ as core.Map); } - /// Gets a Study by name. + /// Gets a reasoning engine. /// /// Request parameters: /// - /// [name] - Required. The name of the Study resource. Format: - /// `projects/{project}/locations/{location}/studies/{study}` + /// [name] - Required. The name of the ReasoningEngine resource. Format: + /// `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/reasoningEngines/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1Study]. + /// Completes with a [GoogleCloudAiplatformV1ReasoningEngine]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future get( + async.Future get( core.String name, { core.String? $fields, }) async { @@ -23003,89 +23608,139 @@ class ProjectsLocationsStudiesResource { 'GET', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1Study.fromJson( + return GoogleCloudAiplatformV1ReasoningEngine.fromJson( response_ as core.Map); } - /// Lists all the studies in a region for an associated project. + /// Lists reasoning engines in a location. /// /// Request parameters: /// - /// [parent] - Required. The resource name of the Location to list the Study - /// from. Format: `projects/{project}/locations/{location}` + /// [parent] - Required. The resource name of the Location to list the + /// ReasoningEngines from. Format: `projects/{project}/locations/{location}` /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. /// - /// [pageSize] - Optional. The maximum number of studies to return per "page" - /// of results. If unspecified, service will pick an appropriate default. + /// [filter] - Optional. The standard list filter. More detail in + /// \[AIP-160\](https://google.aip.dev/160). /// - /// [pageToken] - Optional. A page token to request the next page of results. - /// If unspecified, there are no subsequent pages. + /// [pageSize] - Optional. The standard list page size. + /// + /// [pageToken] - Optional. The standard list page token. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1ListStudiesResponse]. + /// Completes with a [GoogleCloudAiplatformV1ListReasoningEnginesResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future list( + async.Future list( core.String parent, { + core.String? filter, core.int? pageSize, core.String? pageToken, core.String? $fields, }) async { final queryParams_ = >{ + if (filter != null) 'filter': [filter], if (pageSize != null) 'pageSize': ['${pageSize}'], if (pageToken != null) 'pageToken': [pageToken], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/studies'; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/reasoningEngines'; final response_ = await _requester.request( url_, 'GET', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1ListStudiesResponse.fromJson( + return GoogleCloudAiplatformV1ListReasoningEnginesResponse.fromJson( response_ as core.Map); } - /// Looks a study up using the user-defined display_name field instead of the - /// fully qualified resource name. + /// Updates a reasoning engine. /// /// [request] - The metadata request object. /// /// Request parameters: /// - /// [parent] - Required. The resource name of the Location to get the Study - /// from. Format: `projects/{project}/locations/{location}` - /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// [name] - Identifier. The resource name of the ReasoningEngine. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/reasoningEngines/\[^/\]+$`. + /// + /// [updateMask] - Optional. Mask specifying which fields to update. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1Study]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future lookup( - GoogleCloudAiplatformV1LookupStudyRequest request, - core.String parent, { + async.Future patch( + GoogleCloudAiplatformV1ReasoningEngine request, + core.String name, { + core.String? updateMask, core.String? $fields, }) async { final body_ = convert.json.encode(request); final queryParams_ = >{ + if (updateMask != null) 'updateMask': [updateMask], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/studies:lookup'; + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'PATCH', + body: body_, + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + + /// Queries using a reasoning engine. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the ReasoningEngine resource to use. + /// Format: + /// `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/reasoningEngines/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudAiplatformV1QueryReasoningEngineResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future query( + GoogleCloudAiplatformV1QueryReasoningEngineRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':query'; final response_ = await _requester.request( url_, @@ -23093,15 +23748,16 @@ class ProjectsLocationsStudiesResource { body: body_, queryParams: queryParams_, ); - return GoogleCloudAiplatformV1Study.fromJson( + return GoogleCloudAiplatformV1QueryReasoningEngineResponse.fromJson( response_ as core.Map); } } -class ProjectsLocationsStudiesOperationsResource { +class ProjectsLocationsReasoningEnginesOperationsResource { final commons.ApiRequester _requester; - ProjectsLocationsStudiesOperationsResource(commons.ApiRequester client) + ProjectsLocationsReasoningEnginesOperationsResource( + commons.ApiRequester client) : _requester = client; /// Starts asynchronous cancellation on a long-running operation. @@ -23112,14 +23768,14 @@ class ProjectsLocationsStudiesOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// /// [name] - The name of the operation resource to be cancelled. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/reasoningEngines/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -23160,7 +23816,7 @@ class ProjectsLocationsStudiesOperationsResource { /// /// [name] - The name of the operation resource to be deleted. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/reasoningEngines/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -23200,7 +23856,7 @@ class ProjectsLocationsStudiesOperationsResource { /// /// [name] - The name of the operation resource. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/reasoningEngines/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -23239,7 +23895,7 @@ class ProjectsLocationsStudiesOperationsResource { /// /// [name] - The name of the operation's parent resource. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/reasoningEngines/\[^/\]+$`. /// /// [filter] - The standard list filter. /// @@ -23297,7 +23953,7 @@ class ProjectsLocationsStudiesOperationsResource { /// /// [name] - The name of the operation resource to wait on. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/reasoningEngines/\[^/\]+/operations/\[^/\]+$`. /// /// [timeout] - The maximum duration to wait before timing out. If left blank, /// the wait will be at most the time permitted by the underlying HTTP/RPC @@ -23336,176 +23992,37 @@ class ProjectsLocationsStudiesOperationsResource { } } -class ProjectsLocationsStudiesTrialsResource { +class ProjectsLocationsSchedulesResource { final commons.ApiRequester _requester; - ProjectsLocationsStudiesTrialsOperationsResource get operations => - ProjectsLocationsStudiesTrialsOperationsResource(_requester); + ProjectsLocationsSchedulesOperationsResource get operations => + ProjectsLocationsSchedulesOperationsResource(_requester); - ProjectsLocationsStudiesTrialsResource(commons.ApiRequester client) + ProjectsLocationsSchedulesResource(commons.ApiRequester client) : _requester = client; - /// Adds a measurement of the objective metrics to a Trial. - /// - /// This measurement is assumed to have been taken before the Trial is - /// complete. - /// - /// [request] - The metadata request object. - /// - /// Request parameters: - /// - /// [trialName] - Required. The name of the trial to add measurement. Format: - /// `projects/{project}/locations/{location}/studies/{study}/trials/{trial}` - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/trials/\[^/\]+$`. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleCloudAiplatformV1Trial]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future addTrialMeasurement( - GoogleCloudAiplatformV1AddTrialMeasurementRequest request, - core.String trialName, { - core.String? $fields, - }) async { - final body_ = convert.json.encode(request); - final queryParams_ = >{ - if ($fields != null) 'fields': [$fields], - }; - - final url_ = - 'v1/' + core.Uri.encodeFull('$trialName') + ':addTrialMeasurement'; - - final response_ = await _requester.request( - url_, - 'POST', - body: body_, - queryParams: queryParams_, - ); - return GoogleCloudAiplatformV1Trial.fromJson( - response_ as core.Map); - } - - /// Checks whether a Trial should stop or not. - /// - /// Returns a long-running operation. When the operation is successful, it - /// will contain a CheckTrialEarlyStoppingStateResponse. - /// - /// [request] - The metadata request object. - /// - /// Request parameters: - /// - /// [trialName] - Required. The Trial's name. Format: - /// `projects/{project}/locations/{location}/studies/{study}/trials/{trial}` - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/trials/\[^/\]+$`. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleLongrunningOperation]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future checkTrialEarlyStoppingState( - GoogleCloudAiplatformV1CheckTrialEarlyStoppingStateRequest request, - core.String trialName, { - core.String? $fields, - }) async { - final body_ = convert.json.encode(request); - final queryParams_ = >{ - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + - core.Uri.encodeFull('$trialName') + - ':checkTrialEarlyStoppingState'; - - final response_ = await _requester.request( - url_, - 'POST', - body: body_, - queryParams: queryParams_, - ); - return GoogleLongrunningOperation.fromJson( - response_ as core.Map); - } - - /// Marks a Trial as complete. - /// - /// [request] - The metadata request object. - /// - /// Request parameters: - /// - /// [name] - Required. The Trial's name. Format: - /// `projects/{project}/locations/{location}/studies/{study}/trials/{trial}` - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/trials/\[^/\]+$`. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleCloudAiplatformV1Trial]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future complete( - GoogleCloudAiplatformV1CompleteTrialRequest request, - core.String name, { - core.String? $fields, - }) async { - final body_ = convert.json.encode(request); - final queryParams_ = >{ - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':complete'; - - final response_ = await _requester.request( - url_, - 'POST', - body: body_, - queryParams: queryParams_, - ); - return GoogleCloudAiplatformV1Trial.fromJson( - response_ as core.Map); - } - - /// Adds a user provided Trial to a Study. + /// Creates a Schedule. /// /// [request] - The metadata request object. /// /// Request parameters: /// - /// [parent] - Required. The resource name of the Study to create the Trial - /// in. Format: `projects/{project}/locations/{location}/studies/{study}` - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+$`. + /// [parent] - Required. The resource name of the Location to create the + /// Schedule in. Format: `projects/{project}/locations/{location}` + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1Trial]. + /// Completes with a [GoogleCloudAiplatformV1Schedule]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future create( - GoogleCloudAiplatformV1Trial request, + async.Future create( + GoogleCloudAiplatformV1Schedule request, core.String parent, { core.String? $fields, }) async { @@ -23514,7 +24031,7 @@ class ProjectsLocationsStudiesTrialsResource { if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/trials'; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/schedules'; final response_ = await _requester.request( url_, @@ -23522,30 +24039,30 @@ class ProjectsLocationsStudiesTrialsResource { body: body_, queryParams: queryParams_, ); - return GoogleCloudAiplatformV1Trial.fromJson( + return GoogleCloudAiplatformV1Schedule.fromJson( response_ as core.Map); } - /// Deletes a Trial. + /// Deletes a Schedule. /// /// Request parameters: /// - /// [name] - Required. The Trial's name. Format: - /// `projects/{project}/locations/{location}/studies/{study}/trials/{trial}` + /// [name] - Required. The name of the Schedule resource to be deleted. + /// Format: `projects/{project}/locations/{location}/schedules/{schedule}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/trials/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/schedules/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleProtobufEmpty]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future delete( + async.Future delete( core.String name, { core.String? $fields, }) async { @@ -23560,30 +24077,30 @@ class ProjectsLocationsStudiesTrialsResource { 'DELETE', queryParams: queryParams_, ); - return GoogleProtobufEmpty.fromJson( + return GoogleLongrunningOperation.fromJson( response_ as core.Map); } - /// Gets a Trial. + /// Gets a Schedule. /// /// Request parameters: /// - /// [name] - Required. The name of the Trial resource. Format: - /// `projects/{project}/locations/{location}/studies/{study}/trials/{trial}` + /// [name] - Required. The name of the Schedule resource. Format: + /// `projects/{project}/locations/{location}/schedules/{schedule}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/trials/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/schedules/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1Trial]. + /// Completes with a [GoogleCloudAiplatformV1Schedule]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future get( + async.Future get( core.String name, { core.String? $fields, }) async { @@ -23598,130 +24115,169 @@ class ProjectsLocationsStudiesTrialsResource { 'GET', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1Trial.fromJson( + return GoogleCloudAiplatformV1Schedule.fromJson( response_ as core.Map); } - /// Lists the Trials associated with a Study. + /// Lists Schedules in a Location. /// /// Request parameters: /// - /// [parent] - Required. The resource name of the Study to list the Trial - /// from. Format: `projects/{project}/locations/{location}/studies/{study}` - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+$`. + /// [parent] - Required. The resource name of the Location to list the + /// Schedules from. Format: `projects/{project}/locations/{location}` + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. /// - /// [pageSize] - Optional. The number of Trials to retrieve per "page" of - /// results. If unspecified, the service will pick an appropriate default. + /// [filter] - Lists the Schedules that match the filter expression. The + /// following fields are supported: * `display_name`: Supports `=`, `!=` + /// comparisons, and `:` wildcard. * `state`: Supports `=` and `!=` + /// comparisons. * `request`: Supports existence of the check. (e.g. + /// `create_pipeline_job_request:*` --\> Schedule has + /// create_pipeline_job_request). * `create_time`: Supports `=`, `!=`, `<`, + /// `>`, `<=`, and `>=` comparisons. Values must be in RFC 3339 format. * + /// `start_time`: Supports `=`, `!=`, `<`, `>`, `<=`, and `>=` comparisons. + /// Values must be in RFC 3339 format. * `end_time`: Supports `=`, `!=`, `<`, + /// `>`, `<=`, `>=` comparisons and `:*` existence check. Values must be in + /// RFC 3339 format. * `next_run_time`: Supports `=`, `!=`, `<`, `>`, `<=`, + /// and `>=` comparisons. Values must be in RFC 3339 format. Filter + /// expressions can be combined together using logical operators (`NOT`, `AND` + /// & `OR`). The syntax to define filter expression is based on + /// https://google.aip.dev/160. Examples: * `state="ACTIVE" AND + /// display_name:"my_schedule_*"` * `NOT display_name="my_schedule"` * + /// `create_time>"2021-05-18T00:00:00Z"` * `end_time>"2021-05-18T00:00:00Z" OR + /// NOT end_time:*` * `create_pipeline_job_request:*` /// - /// [pageToken] - Optional. A page token to request the next page of results. - /// If unspecified, there are no subsequent pages. + /// [orderBy] - A comma-separated list of fields to order by. The default sort + /// order is in ascending order. Use "desc" after a field name for descending. + /// You can have multiple order_by fields provided. For example, using + /// "create_time desc, end_time" will order results by create time in + /// descending order, and if there are multiple schedules having the same + /// create time, order them by the end time in ascending order. If order_by is + /// not specified, it will order by default with create_time in descending + /// order. Supported fields: * `create_time` * `start_time` * `end_time` * + /// `next_run_time` + /// + /// [pageSize] - The standard list page size. Default to 100 if not specified. + /// + /// [pageToken] - The standard list page token. Typically obtained via + /// ListSchedulesResponse.next_page_token of the previous + /// ScheduleService.ListSchedules call. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1ListTrialsResponse]. + /// Completes with a [GoogleCloudAiplatformV1ListSchedulesResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future list( + async.Future list( core.String parent, { + core.String? filter, + core.String? orderBy, core.int? pageSize, core.String? pageToken, core.String? $fields, }) async { final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (orderBy != null) 'orderBy': [orderBy], if (pageSize != null) 'pageSize': ['${pageSize}'], if (pageToken != null) 'pageToken': [pageToken], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/trials'; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/schedules'; final response_ = await _requester.request( url_, 'GET', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1ListTrialsResponse.fromJson( + return GoogleCloudAiplatformV1ListSchedulesResponse.fromJson( response_ as core.Map); } - /// Lists the pareto-optimal Trials for multi-objective Study or the optimal - /// Trials for single-objective Study. + /// Updates an active or paused Schedule. /// - /// The definition of pareto-optimal can be checked in wiki page. - /// https://en.wikipedia.org/wiki/Pareto_efficiency + /// When the Schedule is updated, new runs will be scheduled starting from the + /// updated next execution time after the update time based on the + /// time_specification in the updated Schedule. All unstarted runs before the + /// update time will be skipped while already created runs will NOT be paused + /// or canceled. /// /// [request] - The metadata request object. /// /// Request parameters: /// - /// [parent] - Required. The name of the Study that the optimal Trial belongs - /// to. + /// [name] - Immutable. The resource name of the Schedule. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/schedules/\[^/\]+$`. + /// + /// [updateMask] - Required. The update mask applies to the resource. See + /// google.protobuf.FieldMask. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1ListOptimalTrialsResponse]. + /// Completes with a [GoogleCloudAiplatformV1Schedule]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future - listOptimalTrials( - GoogleCloudAiplatformV1ListOptimalTrialsRequest request, - core.String parent, { + async.Future patch( + GoogleCloudAiplatformV1Schedule request, + core.String name, { + core.String? updateMask, core.String? $fields, }) async { final body_ = convert.json.encode(request); final queryParams_ = >{ + if (updateMask != null) 'updateMask': [updateMask], if ($fields != null) 'fields': [$fields], }; - final url_ = - 'v1/' + core.Uri.encodeFull('$parent') + '/trials:listOptimalTrials'; + final url_ = 'v1/' + core.Uri.encodeFull('$name'); final response_ = await _requester.request( url_, - 'POST', + 'PATCH', body: body_, queryParams: queryParams_, ); - return GoogleCloudAiplatformV1ListOptimalTrialsResponse.fromJson( + return GoogleCloudAiplatformV1Schedule.fromJson( response_ as core.Map); } - /// Stops a Trial. + /// Pauses a Schedule. + /// + /// Will mark Schedule.state to 'PAUSED'. If the schedule is paused, no new + /// runs will be created. Already created runs will NOT be paused or canceled. /// /// [request] - The metadata request object. /// /// Request parameters: /// - /// [name] - Required. The Trial's name. Format: - /// `projects/{project}/locations/{location}/studies/{study}/trials/{trial}` + /// [name] - Required. The name of the Schedule resource to be paused. Format: + /// `projects/{project}/locations/{location}/schedules/{schedule}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/trials/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/schedules/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1Trial]. + /// Completes with a [GoogleProtobufEmpty]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future stop( - GoogleCloudAiplatformV1StopTrialRequest request, + async.Future pause( + GoogleCloudAiplatformV1PauseScheduleRequest request, core.String name, { core.String? $fields, }) async { @@ -23730,7 +24286,7 @@ class ProjectsLocationsStudiesTrialsResource { if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':stop'; + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':pause'; final response_ = await _requester.request( url_, @@ -23738,39 +24294,40 @@ class ProjectsLocationsStudiesTrialsResource { body: body_, queryParams: queryParams_, ); - return GoogleCloudAiplatformV1Trial.fromJson( + return GoogleProtobufEmpty.fromJson( response_ as core.Map); } - /// Adds one or more Trials to a Study, with parameter values suggested by - /// Vertex AI Vizier. + /// Resumes a paused Schedule to start scheduling new runs. /// - /// Returns a long-running operation associated with the generation of Trial - /// suggestions. When this long-running operation succeeds, it will contain a - /// SuggestTrialsResponse. + /// Will mark Schedule.state to 'ACTIVE'. Only paused Schedule can be resumed. + /// When the Schedule is resumed, new runs will be scheduled starting from the + /// next execution time after the current time based on the time_specification + /// in the Schedule. If Schedule.catch_up is set up true, all missed runs will + /// be scheduled for backfill first. /// /// [request] - The metadata request object. /// /// Request parameters: /// - /// [parent] - Required. The project and location that the Study belongs to. - /// Format: `projects/{project}/locations/{location}/studies/{study}` + /// [name] - Required. The name of the Schedule resource to be resumed. + /// Format: `projects/{project}/locations/{location}/schedules/{schedule}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/schedules/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleLongrunningOperation]. + /// Completes with a [GoogleProtobufEmpty]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future suggest( - GoogleCloudAiplatformV1SuggestTrialsRequest request, - core.String parent, { + async.Future resume( + GoogleCloudAiplatformV1ResumeScheduleRequest request, + core.String name, { core.String? $fields, }) async { final body_ = convert.json.encode(request); @@ -23778,7 +24335,7 @@ class ProjectsLocationsStudiesTrialsResource { if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/trials:suggest'; + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':resume'; final response_ = await _requester.request( url_, @@ -23786,15 +24343,15 @@ class ProjectsLocationsStudiesTrialsResource { body: body_, queryParams: queryParams_, ); - return GoogleLongrunningOperation.fromJson( + return GoogleProtobufEmpty.fromJson( response_ as core.Map); } } -class ProjectsLocationsStudiesTrialsOperationsResource { +class ProjectsLocationsSchedulesOperationsResource { final commons.ApiRequester _requester; - ProjectsLocationsStudiesTrialsOperationsResource(commons.ApiRequester client) + ProjectsLocationsSchedulesOperationsResource(commons.ApiRequester client) : _requester = client; /// Starts asynchronous cancellation on a long-running operation. @@ -23805,14 +24362,14 @@ class ProjectsLocationsStudiesTrialsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// /// [name] - The name of the operation resource to be cancelled. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/trials/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/schedules/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -23853,7 +24410,7 @@ class ProjectsLocationsStudiesTrialsOperationsResource { /// /// [name] - The name of the operation resource to be deleted. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/trials/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/schedules/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -23893,7 +24450,7 @@ class ProjectsLocationsStudiesTrialsOperationsResource { /// /// [name] - The name of the operation resource. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/trials/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/schedules/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -23932,7 +24489,7 @@ class ProjectsLocationsStudiesTrialsOperationsResource { /// /// [name] - The name of the operation's parent resource. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/trials/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/schedules/\[^/\]+$`. /// /// [filter] - The standard list filter. /// @@ -23990,7 +24547,7 @@ class ProjectsLocationsStudiesTrialsOperationsResource { /// /// [name] - The name of the operation resource to wait on. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/trials/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/schedules/\[^/\]+/operations/\[^/\]+$`. /// /// [timeout] - The maximum duration to wait before timing out. If left blank, /// the wait will be at most the time permitted by the underlying HTTP/RPC @@ -24029,80 +24586,23 @@ class ProjectsLocationsStudiesTrialsOperationsResource { } } -class ProjectsLocationsTensorboardsResource { +class ProjectsLocationsSpecialistPoolsResource { final commons.ApiRequester _requester; - ProjectsLocationsTensorboardsExperimentsResource get experiments => - ProjectsLocationsTensorboardsExperimentsResource(_requester); - ProjectsLocationsTensorboardsOperationsResource get operations => - ProjectsLocationsTensorboardsOperationsResource(_requester); + ProjectsLocationsSpecialistPoolsOperationsResource get operations => + ProjectsLocationsSpecialistPoolsOperationsResource(_requester); - ProjectsLocationsTensorboardsResource(commons.ApiRequester client) + ProjectsLocationsSpecialistPoolsResource(commons.ApiRequester client) : _requester = client; - /// Reads multiple TensorboardTimeSeries' data. - /// - /// The data point number limit is 1000 for scalars, 100 for tensors and blob - /// references. If the number of data points stored is less than the limit, - /// all data is returned. Otherwise, the number limit of data points is - /// randomly selected from this time series and returned. - /// - /// Request parameters: - /// - /// [tensorboard] - Required. The resource name of the Tensorboard containing - /// TensorboardTimeSeries to read data from. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}`. The - /// TensorboardTimeSeries referenced by time_series must be sub resources of - /// this Tensorboard. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+$`. - /// - /// [timeSeries] - Required. The resource names of the TensorboardTimeSeries - /// to read data from. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a - /// [GoogleCloudAiplatformV1BatchReadTensorboardTimeSeriesDataResponse]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async - .Future - batchRead( - core.String tensorboard, { - core.List? timeSeries, - core.String? $fields, - }) async { - final queryParams_ = >{ - if (timeSeries != null) 'timeSeries': timeSeries, - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$tensorboard') + ':batchRead'; - - final response_ = await _requester.request( - url_, - 'GET', - queryParams: queryParams_, - ); - return GoogleCloudAiplatformV1BatchReadTensorboardTimeSeriesDataResponse - .fromJson(response_ as core.Map); - } - - /// Creates a Tensorboard. + /// Creates a SpecialistPool. /// /// [request] - The metadata request object. /// /// Request parameters: /// - /// [parent] - Required. The resource name of the Location to create the - /// Tensorboard in. Format: `projects/{project}/locations/{location}` + /// [parent] - Required. The parent Project name for the new SpecialistPool. + /// The form is `projects/{project}/locations/{location}`. /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial @@ -24116,7 +24616,7 @@ class ProjectsLocationsTensorboardsResource { /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. async.Future create( - GoogleCloudAiplatformV1Tensorboard request, + GoogleCloudAiplatformV1SpecialistPool request, core.String parent, { core.String? $fields, }) async { @@ -24125,7 +24625,7 @@ class ProjectsLocationsTensorboardsResource { if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/tensorboards'; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/specialistPools'; final response_ = await _requester.request( url_, @@ -24137,14 +24637,19 @@ class ProjectsLocationsTensorboardsResource { response_ as core.Map); } - /// Deletes a Tensorboard. + /// Deletes a SpecialistPool as well as all Specialists in the pool. /// /// Request parameters: /// - /// [name] - Required. The name of the Tensorboard to be deleted. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}` + /// [name] - Required. The resource name of the SpecialistPool to delete. + /// Format: + /// `projects/{project}/locations/{location}/specialistPools/{specialist_pool}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/specialistPools/\[^/\]+$`. + /// + /// [force] - If set to true, any specialist managers in this SpecialistPool + /// will also be deleted. (Otherwise, the request will only work if the + /// SpecialistPool has no specialist managers.) /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -24158,9 +24663,11 @@ class ProjectsLocationsTensorboardsResource { /// this method will complete with the same error. async.Future delete( core.String name, { + core.bool? force, core.String? $fields, }) async { final queryParams_ = >{ + if (force != null) 'force': ['${force}'], if ($fields != null) 'fields': [$fields], }; @@ -24175,26 +24682,26 @@ class ProjectsLocationsTensorboardsResource { response_ as core.Map); } - /// Gets a Tensorboard. + /// Gets a SpecialistPool. /// /// Request parameters: /// - /// [name] - Required. The name of the Tensorboard resource. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}` + /// [name] - Required. The name of the SpecialistPool resource. The form is + /// `projects/{project}/locations/{location}/specialistPools/{specialist_pool}`. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/specialistPools/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1Tensorboard]. + /// Completes with a [GoogleCloudAiplatformV1SpecialistPool]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future get( + async.Future get( core.String name, { core.String? $fields, }) async { @@ -24209,90 +24716,74 @@ class ProjectsLocationsTensorboardsResource { 'GET', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1Tensorboard.fromJson( + return GoogleCloudAiplatformV1SpecialistPool.fromJson( response_ as core.Map); } - /// Lists Tensorboards in a Location. + /// Lists SpecialistPools in a Location. /// /// Request parameters: /// - /// [parent] - Required. The resource name of the Location to list - /// Tensorboards. Format: `projects/{project}/locations/{location}` + /// [parent] - Required. The name of the SpecialistPool's parent resource. + /// Format: `projects/{project}/locations/{location}` /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. /// - /// [filter] - Lists the Tensorboards that match the filter expression. - /// - /// [orderBy] - Field to use to sort the list. - /// - /// [pageSize] - The maximum number of Tensorboards to return. The service may - /// return fewer than this value. If unspecified, at most 100 Tensorboards are - /// returned. The maximum value is 100; values above 100 are coerced to 100. + /// [pageSize] - The standard list page size. /// - /// [pageToken] - A page token, received from a previous - /// TensorboardService.ListTensorboards call. Provide this to retrieve the - /// subsequent page. When paginating, all other parameters provided to - /// TensorboardService.ListTensorboards must match the call that provided the - /// page token. + /// [pageToken] - The standard list page token. Typically obtained by + /// ListSpecialistPoolsResponse.next_page_token of the previous + /// SpecialistPoolService.ListSpecialistPools call. Return first page if + /// empty. /// - /// [readMask] - Mask specifying which fields to read. + /// [readMask] - Mask specifying which fields to read. FieldMask represents a + /// set of /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1ListTensorboardsResponse]. + /// Completes with a [GoogleCloudAiplatformV1ListSpecialistPoolsResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future list( + async.Future list( core.String parent, { - core.String? filter, - core.String? orderBy, core.int? pageSize, core.String? pageToken, core.String? readMask, core.String? $fields, }) async { final queryParams_ = >{ - if (filter != null) 'filter': [filter], - if (orderBy != null) 'orderBy': [orderBy], if (pageSize != null) 'pageSize': ['${pageSize}'], if (pageToken != null) 'pageToken': [pageToken], if (readMask != null) 'readMask': [readMask], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/tensorboards'; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/specialistPools'; final response_ = await _requester.request( url_, 'GET', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1ListTensorboardsResponse.fromJson( + return GoogleCloudAiplatformV1ListSpecialistPoolsResponse.fromJson( response_ as core.Map); } - /// Updates a Tensorboard. + /// Updates a SpecialistPool. /// /// [request] - The metadata request object. /// /// Request parameters: /// - /// [name] - Output only. Name of the Tensorboard. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}` + /// [name] - Required. The resource name of the SpecialistPool. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/specialistPools/\[^/\]+$`. /// - /// [updateMask] - Required. Field mask is used to specify the fields to be - /// overwritten in the Tensorboard resource by the update. The fields - /// specified in the update_mask are relative to the resource, not the full - /// request. A field is overwritten if it's in the mask. If the user does not - /// provide a mask then all fields are overwritten if new values are - /// specified. + /// [updateMask] - Required. The update mask applies to the resource. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -24305,7 +24796,7 @@ class ProjectsLocationsTensorboardsResource { /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. async.Future patch( - GoogleCloudAiplatformV1Tensorboard request, + GoogleCloudAiplatformV1SpecialistPool request, core.String name, { core.String? updateMask, core.String? $fields, @@ -24327,205 +24818,214 @@ class ProjectsLocationsTensorboardsResource { return GoogleLongrunningOperation.fromJson( response_ as core.Map); } +} - /// Returns the storage size for a given TensorBoard instance. +class ProjectsLocationsSpecialistPoolsOperationsResource { + final commons.ApiRequester _requester; + + ProjectsLocationsSpecialistPoolsOperationsResource( + commons.ApiRequester client) + : _requester = client; + + /// Starts asynchronous cancellation on a long-running operation. + /// + /// The server makes a best effort to cancel the operation, but success is not + /// guaranteed. If the server doesn't support this method, it returns + /// `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation + /// or other methods to check whether the cancellation succeeded or whether + /// the operation completed despite cancellation. On successful cancellation, + /// the operation is not deleted; instead, it becomes an operation with an + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// - /// [tensorboard] - Required. The name of the Tensorboard resource. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}` + /// [name] - The name of the operation resource to be cancelled. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/specialistPools/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1ReadTensorboardSizeResponse]. + /// Completes with a [GoogleProtobufEmpty]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future readSize( - core.String tensorboard, { + async.Future cancel( + core.String name, { core.String? $fields, }) async { final queryParams_ = >{ if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$tensorboard') + ':readSize'; + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':cancel'; final response_ = await _requester.request( url_, - 'GET', + 'POST', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1ReadTensorboardSizeResponse.fromJson( + return GoogleProtobufEmpty.fromJson( response_ as core.Map); } - /// Returns a list of monthly active users for a given TensorBoard instance. + /// Deletes a long-running operation. + /// + /// This method indicates that the client is no longer interested in the + /// operation result. It does not cancel the operation. If the server doesn't + /// support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. /// /// Request parameters: /// - /// [tensorboard] - Required. The name of the Tensorboard resource. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}` + /// [name] - The name of the operation resource to be deleted. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/specialistPools/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1ReadTensorboardUsageResponse]. + /// Completes with a [GoogleProtobufEmpty]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future readUsage( - core.String tensorboard, { + async.Future delete( + core.String name, { core.String? $fields, }) async { final queryParams_ = >{ if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$tensorboard') + ':readUsage'; + final url_ = 'v1/' + core.Uri.encodeFull('$name'); final response_ = await _requester.request( url_, - 'GET', + 'DELETE', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1ReadTensorboardUsageResponse.fromJson( + return GoogleProtobufEmpty.fromJson( response_ as core.Map); } -} -class ProjectsLocationsTensorboardsExperimentsResource { - final commons.ApiRequester _requester; - - ProjectsLocationsTensorboardsExperimentsOperationsResource get operations => - ProjectsLocationsTensorboardsExperimentsOperationsResource(_requester); - ProjectsLocationsTensorboardsExperimentsRunsResource get runs => - ProjectsLocationsTensorboardsExperimentsRunsResource(_requester); - - ProjectsLocationsTensorboardsExperimentsResource(commons.ApiRequester client) - : _requester = client; - - /// Batch create TensorboardTimeSeries that belong to a TensorboardExperiment. + /// Gets the latest state of a long-running operation. /// - /// [request] - The metadata request object. + /// Clients can use this method to poll the operation result at intervals as + /// recommended by the API service. /// /// Request parameters: /// - /// [parent] - Required. The resource name of the TensorboardExperiment to - /// create the TensorboardTimeSeries in. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}` - /// The TensorboardRuns referenced by the parent fields in the - /// CreateTensorboardTimeSeriesRequest messages must be sub resources of this - /// TensorboardExperiment. + /// [name] - The name of the operation resource. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/specialistPools/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a - /// [GoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesResponse]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future - batchCreate( - GoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesRequest request, - core.String parent, { + async.Future get( + core.String name, { core.String? $fields, }) async { - final body_ = convert.json.encode(request); final queryParams_ = >{ if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + ':batchCreate'; + final url_ = 'v1/' + core.Uri.encodeFull('$name'); final response_ = await _requester.request( url_, - 'POST', - body: body_, + 'GET', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesResponse - .fromJson(response_ as core.Map); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); } - /// Creates a TensorboardExperiment. + /// Lists operations that match the specified filter in the request. /// - /// [request] - The metadata request object. + /// If the server doesn't support this method, it returns `UNIMPLEMENTED`. /// /// Request parameters: /// - /// [parent] - Required. The resource name of the Tensorboard to create the - /// TensorboardExperiment in. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}` + /// [name] - The name of the operation's parent resource. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/specialistPools/\[^/\]+$`. /// - /// [tensorboardExperimentId] - Required. The ID to use for the Tensorboard - /// experiment, which becomes the final component of the Tensorboard - /// experiment's resource name. This value should be 1-128 characters, and - /// valid characters are `/a-z-/`. + /// [filter] - The standard list filter. + /// + /// [pageSize] - The standard list page size. + /// + /// [pageToken] - The standard list page token. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1TensorboardExperiment]. + /// Completes with a [GoogleLongrunningListOperationsResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future create( - GoogleCloudAiplatformV1TensorboardExperiment request, - core.String parent, { - core.String? tensorboardExperimentId, + async.Future list( + core.String name, { + core.String? filter, + core.int? pageSize, + core.String? pageToken, core.String? $fields, }) async { - final body_ = convert.json.encode(request); final queryParams_ = >{ - if (tensorboardExperimentId != null) - 'tensorboardExperimentId': [tensorboardExperimentId], + if (filter != null) 'filter': [filter], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/experiments'; + final url_ = 'v1/' + core.Uri.encodeFull('$name') + '/operations'; final response_ = await _requester.request( url_, - 'POST', - body: body_, + 'GET', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1TensorboardExperiment.fromJson( + return GoogleLongrunningListOperationsResponse.fromJson( response_ as core.Map); } - /// Deletes a TensorboardExperiment. + /// Waits until the specified long-running operation is done or reaches at + /// most a specified timeout, returning the latest state. + /// + /// If the operation is already done, the latest state is immediately + /// returned. If the timeout specified is greater than the default HTTP/RPC + /// timeout, the HTTP/RPC timeout is used. If the server does not support this + /// method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method + /// is on a best-effort basis. It may return the latest state before the + /// specified timeout (including immediately), meaning even an immediate + /// response is no guarantee that the operation is done. /// /// Request parameters: /// - /// [name] - Required. The name of the TensorboardExperiment to be deleted. - /// Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}` + /// [name] - The name of the operation resource to wait on. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/specialistPools/\[^/\]+/operations/\[^/\]+$`. + /// + /// [timeout] - The maximum duration to wait before timing out. If left blank, + /// the wait will be at most the time permitted by the underlying HTTP/RPC + /// protocol. If RPC context deadline is also specified, the shorter one will + /// be used. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -24537,168 +25037,145 @@ class ProjectsLocationsTensorboardsExperimentsResource { /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future delete( + async.Future wait( core.String name, { + core.String? timeout, core.String? $fields, }) async { final queryParams_ = >{ + if (timeout != null) 'timeout': [timeout], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$name'); + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':wait'; final response_ = await _requester.request( url_, - 'DELETE', + 'POST', queryParams: queryParams_, ); return GoogleLongrunningOperation.fromJson( response_ as core.Map); } +} - /// Gets a TensorboardExperiment. +class ProjectsLocationsStudiesResource { + final commons.ApiRequester _requester; + + ProjectsLocationsStudiesOperationsResource get operations => + ProjectsLocationsStudiesOperationsResource(_requester); + ProjectsLocationsStudiesTrialsResource get trials => + ProjectsLocationsStudiesTrialsResource(_requester); + + ProjectsLocationsStudiesResource(commons.ApiRequester client) + : _requester = client; + + /// Creates a Study. + /// + /// A resource name will be generated after creation of the Study. + /// + /// [request] - The metadata request object. /// /// Request parameters: /// - /// [name] - Required. The name of the TensorboardExperiment resource. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}` - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+$`. + /// [parent] - Required. The resource name of the Location to create the + /// CustomJob in. Format: `projects/{project}/locations/{location}` + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1TensorboardExperiment]. + /// Completes with a [GoogleCloudAiplatformV1Study]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future get( - core.String name, { + async.Future create( + GoogleCloudAiplatformV1Study request, + core.String parent, { core.String? $fields, }) async { + final body_ = convert.json.encode(request); final queryParams_ = >{ if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$name'); + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/studies'; final response_ = await _requester.request( url_, - 'GET', + 'POST', + body: body_, queryParams: queryParams_, ); - return GoogleCloudAiplatformV1TensorboardExperiment.fromJson( + return GoogleCloudAiplatformV1Study.fromJson( response_ as core.Map); } - /// Lists TensorboardExperiments in a Location. + /// Deletes a Study. /// /// Request parameters: /// - /// [parent] - Required. The resource name of the Tensorboard to list - /// TensorboardExperiments. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}` + /// [name] - Required. The name of the Study resource to be deleted. Format: + /// `projects/{project}/locations/{location}/studies/{study}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+$`. - /// - /// [filter] - Lists the TensorboardExperiments that match the filter - /// expression. - /// - /// [orderBy] - Field to use to sort the list. - /// - /// [pageSize] - The maximum number of TensorboardExperiments to return. The - /// service may return fewer than this value. If unspecified, at most 50 - /// TensorboardExperiments are returned. The maximum value is 1000; values - /// above 1000 are coerced to 1000. - /// - /// [pageToken] - A page token, received from a previous - /// TensorboardService.ListTensorboardExperiments call. Provide this to - /// retrieve the subsequent page. When paginating, all other parameters - /// provided to TensorboardService.ListTensorboardExperiments must match the - /// call that provided the page token. - /// - /// [readMask] - Mask specifying which fields to read. + /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a - /// [GoogleCloudAiplatformV1ListTensorboardExperimentsResponse]. + /// Completes with a [GoogleProtobufEmpty]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future list( - core.String parent, { - core.String? filter, - core.String? orderBy, - core.int? pageSize, - core.String? pageToken, - core.String? readMask, + async.Future delete( + core.String name, { core.String? $fields, }) async { final queryParams_ = >{ - if (filter != null) 'filter': [filter], - if (orderBy != null) 'orderBy': [orderBy], - if (pageSize != null) 'pageSize': ['${pageSize}'], - if (pageToken != null) 'pageToken': [pageToken], - if (readMask != null) 'readMask': [readMask], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/experiments'; + final url_ = 'v1/' + core.Uri.encodeFull('$name'); final response_ = await _requester.request( url_, - 'GET', + 'DELETE', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1ListTensorboardExperimentsResponse.fromJson( + return GoogleProtobufEmpty.fromJson( response_ as core.Map); } - /// Updates a TensorboardExperiment. - /// - /// [request] - The metadata request object. + /// Gets a Study by name. /// /// Request parameters: /// - /// [name] - Output only. Name of the TensorboardExperiment. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}` + /// [name] - Required. The name of the Study resource. Format: + /// `projects/{project}/locations/{location}/studies/{study}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+$`. - /// - /// [updateMask] - Required. Field mask is used to specify the fields to be - /// overwritten in the TensorboardExperiment resource by the update. The - /// fields specified in the update_mask are relative to the resource, not the - /// full request. A field is overwritten if it's in the mask. If the user does - /// not provide a mask then all fields are overwritten if new values are - /// specified. + /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1TensorboardExperiment]. + /// Completes with a [GoogleCloudAiplatformV1Study]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future patch( - GoogleCloudAiplatformV1TensorboardExperiment request, + async.Future get( core.String name, { - core.String? updateMask, core.String? $fields, }) async { - final body_ = convert.json.encode(request); final queryParams_ = >{ - if (updateMask != null) 'updateMask': [updateMask], if ($fields != null) 'fields': [$fields], }; @@ -24706,71 +25183,109 @@ class ProjectsLocationsTensorboardsExperimentsResource { final response_ = await _requester.request( url_, - 'PATCH', - body: body_, + 'GET', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1TensorboardExperiment.fromJson( + return GoogleCloudAiplatformV1Study.fromJson( response_ as core.Map); } - /// Write time series data points of multiple TensorboardTimeSeries in - /// multiple TensorboardRun's. + /// Lists all the studies in a region for an associated project. /// - /// If any data fail to be ingested, an error is returned. + /// Request parameters: /// - /// [request] - The metadata request object. + /// [parent] - Required. The resource name of the Location to list the Study + /// from. Format: `projects/{project}/locations/{location}` + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. /// - /// Request parameters: + /// [pageSize] - Optional. The maximum number of studies to return per "page" + /// of results. If unspecified, service will pick an appropriate default. /// - /// [tensorboardExperiment] - Required. The resource name of the - /// TensorboardExperiment to write data to. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}` - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+$`. + /// [pageToken] - Optional. A page token to request the next page of results. + /// If unspecified, there are no subsequent pages. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a - /// [GoogleCloudAiplatformV1WriteTensorboardExperimentDataResponse]. + /// Completes with a [GoogleCloudAiplatformV1ListStudiesResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future - write( - GoogleCloudAiplatformV1WriteTensorboardExperimentDataRequest request, - core.String tensorboardExperiment, { + async.Future list( + core.String parent, { + core.int? pageSize, + core.String? pageToken, core.String? $fields, }) async { - final body_ = convert.json.encode(request); final queryParams_ = >{ + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], if ($fields != null) 'fields': [$fields], }; - final url_ = - 'v1/' + core.Uri.encodeFull('$tensorboardExperiment') + ':write'; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/studies'; final response_ = await _requester.request( url_, - 'POST', - body: body_, + 'GET', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1WriteTensorboardExperimentDataResponse - .fromJson(response_ as core.Map); + return GoogleCloudAiplatformV1ListStudiesResponse.fromJson( + response_ as core.Map); } -} - -class ProjectsLocationsTensorboardsExperimentsOperationsResource { - final commons.ApiRequester _requester; - - ProjectsLocationsTensorboardsExperimentsOperationsResource( - commons.ApiRequester client) - : _requester = client; + + /// Looks a study up using the user-defined display_name field instead of the + /// fully qualified resource name. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The resource name of the Location to get the Study + /// from. Format: `projects/{project}/locations/{location}` + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudAiplatformV1Study]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future lookup( + GoogleCloudAiplatformV1LookupStudyRequest request, + core.String parent, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/studies:lookup'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1Study.fromJson( + response_ as core.Map); + } +} + +class ProjectsLocationsStudiesOperationsResource { + final commons.ApiRequester _requester; + + ProjectsLocationsStudiesOperationsResource(commons.ApiRequester client) + : _requester = client; /// Starts asynchronous cancellation on a long-running operation. /// @@ -24780,14 +25295,14 @@ class ProjectsLocationsTensorboardsExperimentsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// /// [name] - The name of the operation resource to be cancelled. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -24828,7 +25343,7 @@ class ProjectsLocationsTensorboardsExperimentsOperationsResource { /// /// [name] - The name of the operation resource to be deleted. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -24868,7 +25383,7 @@ class ProjectsLocationsTensorboardsExperimentsOperationsResource { /// /// [name] - The name of the operation resource. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -24907,7 +25422,7 @@ class ProjectsLocationsTensorboardsExperimentsOperationsResource { /// /// [name] - The name of the operation's parent resource. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+$`. /// /// [filter] - The standard list filter. /// @@ -24965,7 +25480,7 @@ class ProjectsLocationsTensorboardsExperimentsOperationsResource { /// /// [name] - The name of the operation resource to wait on. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/operations/\[^/\]+$`. /// /// [timeout] - The maximum duration to wait before timing out. If left blank, /// the wait will be at most the time permitted by the underlying HTTP/RPC @@ -25004,51 +25519,42 @@ class ProjectsLocationsTensorboardsExperimentsOperationsResource { } } -class ProjectsLocationsTensorboardsExperimentsRunsResource { +class ProjectsLocationsStudiesTrialsResource { final commons.ApiRequester _requester; - ProjectsLocationsTensorboardsExperimentsRunsOperationsResource - get operations => - ProjectsLocationsTensorboardsExperimentsRunsOperationsResource( - _requester); - ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesResource - get timeSeries => - ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesResource( - _requester); + ProjectsLocationsStudiesTrialsOperationsResource get operations => + ProjectsLocationsStudiesTrialsOperationsResource(_requester); - ProjectsLocationsTensorboardsExperimentsRunsResource( - commons.ApiRequester client) + ProjectsLocationsStudiesTrialsResource(commons.ApiRequester client) : _requester = client; - /// Batch create TensorboardRuns. + /// Adds a measurement of the objective metrics to a Trial. + /// + /// This measurement is assumed to have been taken before the Trial is + /// complete. /// /// [request] - The metadata request object. /// /// Request parameters: /// - /// [parent] - Required. The resource name of the TensorboardExperiment to - /// create the TensorboardRuns in. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}` - /// The parent field in the CreateTensorboardRunRequest messages must match - /// this field. + /// [trialName] - Required. The name of the trial to add measurement. Format: + /// `projects/{project}/locations/{location}/studies/{study}/trials/{trial}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/trials/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a - /// [GoogleCloudAiplatformV1BatchCreateTensorboardRunsResponse]. + /// Completes with a [GoogleCloudAiplatformV1Trial]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future - batchCreate( - GoogleCloudAiplatformV1BatchCreateTensorboardRunsRequest request, - core.String parent, { + async.Future addTrialMeasurement( + GoogleCloudAiplatformV1AddTrialMeasurementRequest request, + core.String trialName, { core.String? $fields, }) async { final body_ = convert.json.encode(request); @@ -25056,7 +25562,8 @@ class ProjectsLocationsTensorboardsExperimentsRunsResource { if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/runs:batchCreate'; + final url_ = + 'v1/' + core.Uri.encodeFull('$trialName') + ':addTrialMeasurement'; final response_ = await _requester.request( url_, @@ -25064,49 +25571,133 @@ class ProjectsLocationsTensorboardsExperimentsRunsResource { body: body_, queryParams: queryParams_, ); - return GoogleCloudAiplatformV1BatchCreateTensorboardRunsResponse.fromJson( + return GoogleCloudAiplatformV1Trial.fromJson( response_ as core.Map); } - /// Creates a TensorboardRun. + /// Checks whether a Trial should stop or not. + /// + /// Returns a long-running operation. When the operation is successful, it + /// will contain a CheckTrialEarlyStoppingStateResponse. /// /// [request] - The metadata request object. /// /// Request parameters: /// - /// [parent] - Required. The resource name of the TensorboardExperiment to - /// create the TensorboardRun in. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}` + /// [trialName] - Required. The Trial's name. Format: + /// `projects/{project}/locations/{location}/studies/{study}/trials/{trial}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/trials/\[^/\]+$`. /// - /// [tensorboardRunId] - Required. The ID to use for the Tensorboard run, - /// which becomes the final component of the Tensorboard run's resource name. - /// This value should be 1-128 characters, and valid characters are `/a-z-/`. + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future checkTrialEarlyStoppingState( + GoogleCloudAiplatformV1CheckTrialEarlyStoppingStateRequest request, + core.String trialName, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + + core.Uri.encodeFull('$trialName') + + ':checkTrialEarlyStoppingState'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + + /// Marks a Trial as complete. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. The Trial's name. Format: + /// `projects/{project}/locations/{location}/studies/{study}/trials/{trial}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/trials/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1TensorboardRun]. + /// Completes with a [GoogleCloudAiplatformV1Trial]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future create( - GoogleCloudAiplatformV1TensorboardRun request, + async.Future complete( + GoogleCloudAiplatformV1CompleteTrialRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':complete'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1Trial.fromJson( + response_ as core.Map); + } + + /// Adds a user provided Trial to a Study. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The resource name of the Study to create the Trial + /// in. Format: `projects/{project}/locations/{location}/studies/{study}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudAiplatformV1Trial]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future create( + GoogleCloudAiplatformV1Trial request, core.String parent, { - core.String? tensorboardRunId, core.String? $fields, }) async { final body_ = convert.json.encode(request); final queryParams_ = >{ - if (tensorboardRunId != null) 'tensorboardRunId': [tensorboardRunId], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/runs'; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/trials'; final response_ = await _requester.request( url_, @@ -25114,30 +25705,30 @@ class ProjectsLocationsTensorboardsExperimentsRunsResource { body: body_, queryParams: queryParams_, ); - return GoogleCloudAiplatformV1TensorboardRun.fromJson( + return GoogleCloudAiplatformV1Trial.fromJson( response_ as core.Map); } - /// Deletes a TensorboardRun. + /// Deletes a Trial. /// /// Request parameters: /// - /// [name] - Required. The name of the TensorboardRun to be deleted. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}` + /// [name] - Required. The Trial's name. Format: + /// `projects/{project}/locations/{location}/studies/{study}/trials/{trial}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/trials/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleLongrunningOperation]. + /// Completes with a [GoogleProtobufEmpty]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future delete( + async.Future delete( core.String name, { core.String? $fields, }) async { @@ -25152,30 +25743,30 @@ class ProjectsLocationsTensorboardsExperimentsRunsResource { 'DELETE', queryParams: queryParams_, ); - return GoogleLongrunningOperation.fromJson( + return GoogleProtobufEmpty.fromJson( response_ as core.Map); } - /// Gets a TensorboardRun. + /// Gets a Trial. /// /// Request parameters: /// - /// [name] - Required. The name of the TensorboardRun resource. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}` + /// [name] - Required. The name of the Trial resource. Format: + /// `projects/{project}/locations/{location}/studies/{study}/trials/{trial}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/trials/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1TensorboardRun]. + /// Completes with a [GoogleCloudAiplatformV1Trial]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future get( + async.Future get( core.String name, { core.String? $fields, }) async { @@ -25190,156 +25781,179 @@ class ProjectsLocationsTensorboardsExperimentsRunsResource { 'GET', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1TensorboardRun.fromJson( + return GoogleCloudAiplatformV1Trial.fromJson( response_ as core.Map); } - /// Lists TensorboardRuns in a Location. + /// Lists the Trials associated with a Study. /// /// Request parameters: /// - /// [parent] - Required. The resource name of the TensorboardExperiment to - /// list TensorboardRuns. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}` + /// [parent] - Required. The resource name of the Study to list the Trial + /// from. Format: `projects/{project}/locations/{location}/studies/{study}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+$`. - /// - /// [filter] - Lists the TensorboardRuns that match the filter expression. - /// - /// [orderBy] - Field to use to sort the list. - /// - /// [pageSize] - The maximum number of TensorboardRuns to return. The service - /// may return fewer than this value. If unspecified, at most 50 - /// TensorboardRuns are returned. The maximum value is 1000; values above 1000 - /// are coerced to 1000. + /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+$`. /// - /// [pageToken] - A page token, received from a previous - /// TensorboardService.ListTensorboardRuns call. Provide this to retrieve the - /// subsequent page. When paginating, all other parameters provided to - /// TensorboardService.ListTensorboardRuns must match the call that provided - /// the page token. + /// [pageSize] - Optional. The number of Trials to retrieve per "page" of + /// results. If unspecified, the service will pick an appropriate default. /// - /// [readMask] - Mask specifying which fields to read. + /// [pageToken] - Optional. A page token to request the next page of results. + /// If unspecified, there are no subsequent pages. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1ListTensorboardRunsResponse]. + /// Completes with a [GoogleCloudAiplatformV1ListTrialsResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future list( + async.Future list( core.String parent, { - core.String? filter, - core.String? orderBy, core.int? pageSize, core.String? pageToken, - core.String? readMask, core.String? $fields, }) async { final queryParams_ = >{ - if (filter != null) 'filter': [filter], - if (orderBy != null) 'orderBy': [orderBy], if (pageSize != null) 'pageSize': ['${pageSize}'], if (pageToken != null) 'pageToken': [pageToken], - if (readMask != null) 'readMask': [readMask], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/runs'; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/trials'; final response_ = await _requester.request( url_, 'GET', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1ListTensorboardRunsResponse.fromJson( + return GoogleCloudAiplatformV1ListTrialsResponse.fromJson( response_ as core.Map); } - /// Updates a TensorboardRun. + /// Lists the pareto-optimal Trials for multi-objective Study or the optimal + /// Trials for single-objective Study. + /// + /// The definition of pareto-optimal can be checked in wiki page. + /// https://en.wikipedia.org/wiki/Pareto_efficiency /// /// [request] - The metadata request object. /// /// Request parameters: /// - /// [name] - Output only. Name of the TensorboardRun. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}` + /// [parent] - Required. The name of the Study that the optimal Trial belongs + /// to. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+$`. /// - /// [updateMask] - Required. Field mask is used to specify the fields to be - /// overwritten in the TensorboardRun resource by the update. The fields - /// specified in the update_mask are relative to the resource, not the full - /// request. A field is overwritten if it's in the mask. If the user does not - /// provide a mask then all fields are overwritten if new values are - /// specified. + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudAiplatformV1ListOptimalTrialsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future + listOptimalTrials( + GoogleCloudAiplatformV1ListOptimalTrialsRequest request, + core.String parent, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = + 'v1/' + core.Uri.encodeFull('$parent') + '/trials:listOptimalTrials'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1ListOptimalTrialsResponse.fromJson( + response_ as core.Map); + } + + /// Stops a Trial. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. The Trial's name. Format: + /// `projects/{project}/locations/{location}/studies/{study}/trials/{trial}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/trials/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1TensorboardRun]. + /// Completes with a [GoogleCloudAiplatformV1Trial]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future patch( - GoogleCloudAiplatformV1TensorboardRun request, + async.Future stop( + GoogleCloudAiplatformV1StopTrialRequest request, core.String name, { - core.String? updateMask, core.String? $fields, }) async { final body_ = convert.json.encode(request); final queryParams_ = >{ - if (updateMask != null) 'updateMask': [updateMask], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$name'); + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':stop'; final response_ = await _requester.request( url_, - 'PATCH', + 'POST', body: body_, queryParams: queryParams_, ); - return GoogleCloudAiplatformV1TensorboardRun.fromJson( + return GoogleCloudAiplatformV1Trial.fromJson( response_ as core.Map); } - /// Write time series data points into multiple TensorboardTimeSeries under a - /// TensorboardRun. + /// Adds one or more Trials to a Study, with parameter values suggested by + /// Vertex AI Vizier. /// - /// If any data fail to be ingested, an error is returned. + /// Returns a long-running operation associated with the generation of Trial + /// suggestions. When this long-running operation succeeds, it will contain a + /// SuggestTrialsResponse. /// /// [request] - The metadata request object. /// /// Request parameters: /// - /// [tensorboardRun] - Required. The resource name of the TensorboardRun to - /// write data to. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}` + /// [parent] - Required. The project and location that the Study belongs to. + /// Format: `projects/{project}/locations/{location}/studies/{study}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1WriteTensorboardRunDataResponse]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future write( - GoogleCloudAiplatformV1WriteTensorboardRunDataRequest request, - core.String tensorboardRun, { + async.Future suggest( + GoogleCloudAiplatformV1SuggestTrialsRequest request, + core.String parent, { core.String? $fields, }) async { final body_ = convert.json.encode(request); @@ -25347,7 +25961,7 @@ class ProjectsLocationsTensorboardsExperimentsRunsResource { if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$tensorboardRun') + ':write'; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/trials:suggest'; final response_ = await _requester.request( url_, @@ -25355,16 +25969,15 @@ class ProjectsLocationsTensorboardsExperimentsRunsResource { body: body_, queryParams: queryParams_, ); - return GoogleCloudAiplatformV1WriteTensorboardRunDataResponse.fromJson( + return GoogleLongrunningOperation.fromJson( response_ as core.Map); } } -class ProjectsLocationsTensorboardsExperimentsRunsOperationsResource { +class ProjectsLocationsStudiesTrialsOperationsResource { final commons.ApiRequester _requester; - ProjectsLocationsTensorboardsExperimentsRunsOperationsResource( - commons.ApiRequester client) + ProjectsLocationsStudiesTrialsOperationsResource(commons.ApiRequester client) : _requester = client; /// Starts asynchronous cancellation on a long-running operation. @@ -25375,14 +25988,14 @@ class ProjectsLocationsTensorboardsExperimentsRunsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// /// [name] - The name of the operation resource to be cancelled. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/trials/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -25423,7 +26036,7 @@ class ProjectsLocationsTensorboardsExperimentsRunsOperationsResource { /// /// [name] - The name of the operation resource to be deleted. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/trials/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -25463,7 +26076,7 @@ class ProjectsLocationsTensorboardsExperimentsRunsOperationsResource { /// /// [name] - The name of the operation resource. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/trials/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -25502,7 +26115,7 @@ class ProjectsLocationsTensorboardsExperimentsRunsOperationsResource { /// /// [name] - The name of the operation's parent resource. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/trials/\[^/\]+$`. /// /// [filter] - The standard list filter. /// @@ -25560,7 +26173,7 @@ class ProjectsLocationsTensorboardsExperimentsRunsOperationsResource { /// /// [name] - The name of the operation resource to wait on. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/studies/\[^/\]+/trials/\[^/\]+/operations/\[^/\]+$`. /// /// [timeout] - The maximum duration to wait before timing out. If left blank, /// the wait will be at most the time permitted by the underlying HTTP/RPC @@ -25599,79 +26212,81 @@ class ProjectsLocationsTensorboardsExperimentsRunsOperationsResource { } } -class ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesResource { +class ProjectsLocationsTensorboardsResource { final commons.ApiRequester _requester; - ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesOperationsResource - get operations => - ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesOperationsResource( - _requester); + ProjectsLocationsTensorboardsExperimentsResource get experiments => + ProjectsLocationsTensorboardsExperimentsResource(_requester); + ProjectsLocationsTensorboardsOperationsResource get operations => + ProjectsLocationsTensorboardsOperationsResource(_requester); - ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesResource( - commons.ApiRequester client) + ProjectsLocationsTensorboardsResource(commons.ApiRequester client) : _requester = client; - /// Creates a TensorboardTimeSeries. + /// Reads multiple TensorboardTimeSeries' data. /// - /// [request] - The metadata request object. + /// The data point number limit is 1000 for scalars, 100 for tensors and blob + /// references. If the number of data points stored is less than the limit, + /// all data is returned. Otherwise, the number limit of data points is + /// randomly selected from this time series and returned. /// /// Request parameters: /// - /// [parent] - Required. The resource name of the TensorboardRun to create the - /// TensorboardTimeSeries in. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}` + /// [tensorboard] - Required. The resource name of the Tensorboard containing + /// TensorboardTimeSeries to read data from. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}`. The + /// TensorboardTimeSeries referenced by time_series must be sub resources of + /// this Tensorboard. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+$`. /// - /// [tensorboardTimeSeriesId] - Optional. The user specified unique ID to use - /// for the TensorboardTimeSeries, which becomes the final component of the - /// TensorboardTimeSeries's resource name. This value should match "a-z0-9{0, - /// 127}" + /// [timeSeries] - Required. The resource names of the TensorboardTimeSeries + /// to read data from. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1TensorboardTimeSeries]. + /// Completes with a + /// [GoogleCloudAiplatformV1BatchReadTensorboardTimeSeriesDataResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future create( - GoogleCloudAiplatformV1TensorboardTimeSeries request, - core.String parent, { - core.String? tensorboardTimeSeriesId, + async + .Future + batchRead( + core.String tensorboard, { + core.List? timeSeries, core.String? $fields, }) async { - final body_ = convert.json.encode(request); final queryParams_ = >{ - if (tensorboardTimeSeriesId != null) - 'tensorboardTimeSeriesId': [tensorboardTimeSeriesId], + if (timeSeries != null) 'timeSeries': timeSeries, if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/timeSeries'; + final url_ = 'v1/' + core.Uri.encodeFull('$tensorboard') + ':batchRead'; final response_ = await _requester.request( url_, - 'POST', - body: body_, + 'GET', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1TensorboardTimeSeries.fromJson( - response_ as core.Map); + return GoogleCloudAiplatformV1BatchReadTensorboardTimeSeriesDataResponse + .fromJson(response_ as core.Map); } - /// Deletes a TensorboardTimeSeries. + /// Creates a Tensorboard. + /// + /// [request] - The metadata request object. /// /// Request parameters: /// - /// [name] - Required. The name of the TensorboardTimeSeries to be deleted. - /// Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/timeSeries/\[^/\]+$`. + /// [parent] - Required. The resource name of the Location to create the + /// Tensorboard in. Format: `projects/{project}/locations/{location}` + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -25683,95 +26298,86 @@ class ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesResource { /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future delete( - core.String name, { + async.Future create( + GoogleCloudAiplatformV1Tensorboard request, + core.String parent, { core.String? $fields, }) async { + final body_ = convert.json.encode(request); final queryParams_ = >{ if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$name'); + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/tensorboards'; final response_ = await _requester.request( url_, - 'DELETE', + 'POST', + body: body_, queryParams: queryParams_, ); return GoogleLongrunningOperation.fromJson( response_ as core.Map); } - /// Exports a TensorboardTimeSeries' data. - /// - /// Data is returned in paginated responses. - /// - /// [request] - The metadata request object. + /// Deletes a Tensorboard. /// /// Request parameters: /// - /// [tensorboardTimeSeries] - Required. The resource name of the - /// TensorboardTimeSeries to export data from. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` + /// [name] - Required. The name of the Tensorboard to be deleted. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/timeSeries/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a - /// [GoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataResponse]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future - exportTensorboardTimeSeries( - GoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataRequest request, - core.String tensorboardTimeSeries, { + async.Future delete( + core.String name, { core.String? $fields, }) async { - final body_ = convert.json.encode(request); final queryParams_ = >{ if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + - core.Uri.encodeFull('$tensorboardTimeSeries') + - ':exportTensorboardTimeSeries'; + final url_ = 'v1/' + core.Uri.encodeFull('$name'); final response_ = await _requester.request( url_, - 'POST', - body: body_, + 'DELETE', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataResponse - .fromJson(response_ as core.Map); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); } - /// Gets a TensorboardTimeSeries. + /// Gets a Tensorboard. /// /// Request parameters: /// - /// [name] - Required. The name of the TensorboardTimeSeries resource. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` + /// [name] - Required. The name of the Tensorboard resource. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/timeSeries/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1TensorboardTimeSeries]. + /// Completes with a [GoogleCloudAiplatformV1Tensorboard]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future get( + async.Future get( core.String name, { core.String? $fields, }) async { @@ -25786,50 +26392,45 @@ class ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesResource { 'GET', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1TensorboardTimeSeries.fromJson( + return GoogleCloudAiplatformV1Tensorboard.fromJson( response_ as core.Map); } - /// Lists TensorboardTimeSeries in a Location. + /// Lists Tensorboards in a Location. /// /// Request parameters: /// - /// [parent] - Required. The resource name of the TensorboardRun to list - /// TensorboardTimeSeries. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}` - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+$`. + /// [parent] - Required. The resource name of the Location to list + /// Tensorboards. Format: `projects/{project}/locations/{location}` + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. /// - /// [filter] - Lists the TensorboardTimeSeries that match the filter - /// expression. + /// [filter] - Lists the Tensorboards that match the filter expression. /// /// [orderBy] - Field to use to sort the list. /// - /// [pageSize] - The maximum number of TensorboardTimeSeries to return. The - /// service may return fewer than this value. If unspecified, at most 50 - /// TensorboardTimeSeries are returned. The maximum value is 1000; values - /// above 1000 are coerced to 1000. + /// [pageSize] - The maximum number of Tensorboards to return. The service may + /// return fewer than this value. If unspecified, at most 100 Tensorboards are + /// returned. The maximum value is 100; values above 100 are coerced to 100. /// /// [pageToken] - A page token, received from a previous - /// TensorboardService.ListTensorboardTimeSeries call. Provide this to - /// retrieve the subsequent page. When paginating, all other parameters - /// provided to TensorboardService.ListTensorboardTimeSeries must match the - /// call that provided the page token. + /// TensorboardService.ListTensorboards call. Provide this to retrieve the + /// subsequent page. When paginating, all other parameters provided to + /// TensorboardService.ListTensorboards must match the call that provided the + /// page token. /// /// [readMask] - Mask specifying which fields to read. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a - /// [GoogleCloudAiplatformV1ListTensorboardTimeSeriesResponse]. + /// Completes with a [GoogleCloudAiplatformV1ListTensorboardsResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future list( + async.Future list( core.String parent, { core.String? filter, core.String? orderBy, @@ -25847,46 +26448,47 @@ class ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesResource { if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/timeSeries'; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/tensorboards'; final response_ = await _requester.request( url_, 'GET', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1ListTensorboardTimeSeriesResponse.fromJson( + return GoogleCloudAiplatformV1ListTensorboardsResponse.fromJson( response_ as core.Map); } - /// Updates a TensorboardTimeSeries. + /// Updates a Tensorboard. /// /// [request] - The metadata request object. /// /// Request parameters: /// - /// [name] - Output only. Name of the TensorboardTimeSeries. + /// [name] - Output only. Name of the Tensorboard. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/timeSeries/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+$`. /// /// [updateMask] - Required. Field mask is used to specify the fields to be - /// overwritten in the TensorboardTimeSeries resource by the update. The - /// fields specified in the update_mask are relative to the resource, not the - /// full request. A field is overwritten if it's in the mask. If the user does - /// not provide a mask then all fields are overwritten if new values are + /// overwritten in the Tensorboard resource by the update. The fields + /// specified in the update_mask are relative to the resource, not the full + /// request. A field is overwritten if it's in the mask. If the user does not + /// provide a mask then all fields are overwritten if new values are /// specified. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1TensorboardTimeSeries]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future patch( - GoogleCloudAiplatformV1TensorboardTimeSeries request, + async.Future patch( + GoogleCloudAiplatformV1Tensorboard request, core.String name, { core.String? updateMask, core.String? $fields, @@ -25905,221 +26507,208 @@ class ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesResource { body: body_, queryParams: queryParams_, ); - return GoogleCloudAiplatformV1TensorboardTimeSeries.fromJson( + return GoogleLongrunningOperation.fromJson( response_ as core.Map); } - /// Reads a TensorboardTimeSeries' data. - /// - /// By default, if the number of data points stored is less than 1000, all - /// data is returned. Otherwise, 1000 data points is randomly selected from - /// this time series and returned. This value can be changed by changing - /// max_data_points, which can't be greater than 10k. + /// Returns the storage size for a given TensorBoard instance. /// /// Request parameters: /// - /// [tensorboardTimeSeries] - Required. The resource name of the - /// TensorboardTimeSeries to read data from. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` + /// [tensorboard] - Required. The name of the Tensorboard resource. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/timeSeries/\[^/\]+$`. - /// - /// [filter] - Reads the TensorboardTimeSeries' data that match the filter - /// expression. - /// - /// [maxDataPoints] - The maximum number of TensorboardTimeSeries' data to - /// return. This value should be a positive integer. This value can be set to - /// -1 to return all data. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a - /// [GoogleCloudAiplatformV1ReadTensorboardTimeSeriesDataResponse]. + /// Completes with a [GoogleCloudAiplatformV1ReadTensorboardSizeResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future - read( - core.String tensorboardTimeSeries, { - core.String? filter, - core.int? maxDataPoints, + async.Future readSize( + core.String tensorboard, { core.String? $fields, }) async { final queryParams_ = >{ - if (filter != null) 'filter': [filter], - if (maxDataPoints != null) 'maxDataPoints': ['${maxDataPoints}'], if ($fields != null) 'fields': [$fields], }; - final url_ = - 'v1/' + core.Uri.encodeFull('$tensorboardTimeSeries') + ':read'; + final url_ = 'v1/' + core.Uri.encodeFull('$tensorboard') + ':readSize'; final response_ = await _requester.request( url_, 'GET', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1ReadTensorboardTimeSeriesDataResponse - .fromJson(response_ as core.Map); + return GoogleCloudAiplatformV1ReadTensorboardSizeResponse.fromJson( + response_ as core.Map); } - /// Gets bytes of TensorboardBlobs. - /// - /// This is to allow reading blob data stored in consumer project's Cloud - /// Storage bucket without users having to obtain Cloud Storage access - /// permission. + /// Returns a list of monthly active users for a given TensorBoard instance. /// /// Request parameters: /// - /// [timeSeries] - Required. The resource name of the TensorboardTimeSeries to - /// list Blobs. Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` + /// [tensorboard] - Required. The name of the Tensorboard resource. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/timeSeries/\[^/\]+$`. - /// - /// [blobIds] - IDs of the blobs to read. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1ReadTensorboardBlobDataResponse]. + /// Completes with a [GoogleCloudAiplatformV1ReadTensorboardUsageResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future - readBlobData( - core.String timeSeries, { - core.List? blobIds, + async.Future readUsage( + core.String tensorboard, { core.String? $fields, }) async { final queryParams_ = >{ - if (blobIds != null) 'blobIds': blobIds, if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$timeSeries') + ':readBlobData'; + final url_ = 'v1/' + core.Uri.encodeFull('$tensorboard') + ':readUsage'; final response_ = await _requester.request( url_, 'GET', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1ReadTensorboardBlobDataResponse.fromJson( + return GoogleCloudAiplatformV1ReadTensorboardUsageResponse.fromJson( response_ as core.Map); } } -class ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesOperationsResource { +class ProjectsLocationsTensorboardsExperimentsResource { final commons.ApiRequester _requester; - ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesOperationsResource( - commons.ApiRequester client) + ProjectsLocationsTensorboardsExperimentsOperationsResource get operations => + ProjectsLocationsTensorboardsExperimentsOperationsResource(_requester); + ProjectsLocationsTensorboardsExperimentsRunsResource get runs => + ProjectsLocationsTensorboardsExperimentsRunsResource(_requester); + + ProjectsLocationsTensorboardsExperimentsResource(commons.ApiRequester client) : _requester = client; - /// Starts asynchronous cancellation on a long-running operation. + /// Batch create TensorboardTimeSeries that belong to a TensorboardExperiment. /// - /// The server makes a best effort to cancel the operation, but success is not - /// guaranteed. If the server doesn't support this method, it returns - /// `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation - /// or other methods to check whether the cancellation succeeded or whether - /// the operation completed despite cancellation. On successful cancellation, - /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// [request] - The metadata request object. /// /// Request parameters: /// - /// [name] - The name of the operation resource to be cancelled. + /// [parent] - Required. The resource name of the TensorboardExperiment to + /// create the TensorboardTimeSeries in. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}` + /// The TensorboardRuns referenced by the parent fields in the + /// CreateTensorboardTimeSeriesRequest messages must be sub resources of this + /// TensorboardExperiment. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/timeSeries/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleProtobufEmpty]. + /// Completes with a + /// [GoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future cancel( - core.String name, { + async.Future + batchCreate( + GoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesRequest request, + core.String parent, { core.String? $fields, }) async { + final body_ = convert.json.encode(request); final queryParams_ = >{ if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':cancel'; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + ':batchCreate'; final response_ = await _requester.request( url_, 'POST', + body: body_, queryParams: queryParams_, ); - return GoogleProtobufEmpty.fromJson( - response_ as core.Map); + return GoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesResponse + .fromJson(response_ as core.Map); } - /// Deletes a long-running operation. + /// Creates a TensorboardExperiment. /// - /// This method indicates that the client is no longer interested in the - /// operation result. It does not cancel the operation. If the server doesn't - /// support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. + /// [request] - The metadata request object. /// /// Request parameters: /// - /// [name] - The name of the operation resource to be deleted. + /// [parent] - Required. The resource name of the Tensorboard to create the + /// TensorboardExperiment in. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/timeSeries/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+$`. + /// + /// [tensorboardExperimentId] - Required. The ID to use for the Tensorboard + /// experiment, which becomes the final component of the Tensorboard + /// experiment's resource name. This value should be 1-128 characters, and + /// valid characters are `/a-z-/`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleProtobufEmpty]. + /// Completes with a [GoogleCloudAiplatformV1TensorboardExperiment]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future delete( - core.String name, { + async.Future create( + GoogleCloudAiplatformV1TensorboardExperiment request, + core.String parent, { + core.String? tensorboardExperimentId, core.String? $fields, }) async { + final body_ = convert.json.encode(request); final queryParams_ = >{ + if (tensorboardExperimentId != null) + 'tensorboardExperimentId': [tensorboardExperimentId], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$name'); + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/experiments'; final response_ = await _requester.request( url_, - 'DELETE', + 'POST', + body: body_, queryParams: queryParams_, ); - return GoogleProtobufEmpty.fromJson( + return GoogleCloudAiplatformV1TensorboardExperiment.fromJson( response_ as core.Map); } - /// Gets the latest state of a long-running operation. - /// - /// Clients can use this method to poll the operation result at intervals as - /// recommended by the API service. + /// Deletes a TensorboardExperiment. /// /// Request parameters: /// - /// [name] - The name of the operation resource. + /// [name] - Required. The name of the TensorboardExperiment to be deleted. + /// Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/timeSeries/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -26131,7 +26720,7 @@ class ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesOperationsResource { /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future get( + async.Future delete( core.String name, { core.String? $fields, }) async { @@ -26143,195 +26732,300 @@ class ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesOperationsResource { final response_ = await _requester.request( url_, - 'GET', + 'DELETE', queryParams: queryParams_, ); return GoogleLongrunningOperation.fromJson( response_ as core.Map); } - /// Lists operations that match the specified filter in the request. - /// - /// If the server doesn't support this method, it returns `UNIMPLEMENTED`. + /// Gets a TensorboardExperiment. /// /// Request parameters: /// - /// [name] - The name of the operation's parent resource. + /// [name] - Required. The name of the TensorboardExperiment resource. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/timeSeries/\[^/\]+$`. - /// - /// [filter] - The standard list filter. - /// - /// [pageSize] - The standard list page size. - /// - /// [pageToken] - The standard list page token. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleLongrunningListOperationsResponse]. + /// Completes with a [GoogleCloudAiplatformV1TensorboardExperiment]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future list( + async.Future get( core.String name, { - core.String? filter, - core.int? pageSize, - core.String? pageToken, core.String? $fields, }) async { final queryParams_ = >{ - if (filter != null) 'filter': [filter], - if (pageSize != null) 'pageSize': ['${pageSize}'], - if (pageToken != null) 'pageToken': [pageToken], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$name') + '/operations'; + final url_ = 'v1/' + core.Uri.encodeFull('$name'); final response_ = await _requester.request( url_, 'GET', queryParams: queryParams_, ); - return GoogleLongrunningListOperationsResponse.fromJson( + return GoogleCloudAiplatformV1TensorboardExperiment.fromJson( response_ as core.Map); } - /// Waits until the specified long-running operation is done or reaches at - /// most a specified timeout, returning the latest state. - /// - /// If the operation is already done, the latest state is immediately - /// returned. If the timeout specified is greater than the default HTTP/RPC - /// timeout, the HTTP/RPC timeout is used. If the server does not support this - /// method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method - /// is on a best-effort basis. It may return the latest state before the - /// specified timeout (including immediately), meaning even an immediate - /// response is no guarantee that the operation is done. + /// Lists TensorboardExperiments in a Location. /// /// Request parameters: /// - /// [name] - The name of the operation resource to wait on. + /// [parent] - Required. The resource name of the Tensorboard to list + /// TensorboardExperiments. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/timeSeries/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+$`. /// - /// [timeout] - The maximum duration to wait before timing out. If left blank, - /// the wait will be at most the time permitted by the underlying HTTP/RPC - /// protocol. If RPC context deadline is also specified, the shorter one will - /// be used. + /// [filter] - Lists the TensorboardExperiments that match the filter + /// expression. + /// + /// [orderBy] - Field to use to sort the list. + /// + /// [pageSize] - The maximum number of TensorboardExperiments to return. The + /// service may return fewer than this value. If unspecified, at most 50 + /// TensorboardExperiments are returned. The maximum value is 1000; values + /// above 1000 are coerced to 1000. + /// + /// [pageToken] - A page token, received from a previous + /// TensorboardService.ListTensorboardExperiments call. Provide this to + /// retrieve the subsequent page. When paginating, all other parameters + /// provided to TensorboardService.ListTensorboardExperiments must match the + /// call that provided the page token. + /// + /// [readMask] - Mask specifying which fields to read. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleLongrunningOperation]. + /// Completes with a + /// [GoogleCloudAiplatformV1ListTensorboardExperimentsResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future wait( - core.String name, { - core.String? timeout, + async.Future list( + core.String parent, { + core.String? filter, + core.String? orderBy, + core.int? pageSize, + core.String? pageToken, + core.String? readMask, core.String? $fields, }) async { final queryParams_ = >{ - if (timeout != null) 'timeout': [timeout], + if (filter != null) 'filter': [filter], + if (orderBy != null) 'orderBy': [orderBy], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if (readMask != null) 'readMask': [readMask], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':wait'; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/experiments'; final response_ = await _requester.request( url_, - 'POST', + 'GET', queryParams: queryParams_, ); - return GoogleLongrunningOperation.fromJson( + return GoogleCloudAiplatformV1ListTensorboardExperimentsResponse.fromJson( response_ as core.Map); } -} - -class ProjectsLocationsTensorboardsOperationsResource { - final commons.ApiRequester _requester; - ProjectsLocationsTensorboardsOperationsResource(commons.ApiRequester client) - : _requester = client; - - /// Starts asynchronous cancellation on a long-running operation. + /// Updates a TensorboardExperiment. /// - /// The server makes a best effort to cancel the operation, but success is not - /// guaranteed. If the server doesn't support this method, it returns - /// `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation - /// or other methods to check whether the cancellation succeeded or whether - /// the operation completed despite cancellation. On successful cancellation, - /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// [request] - The metadata request object. /// /// Request parameters: /// - /// [name] - The name of the operation resource to be cancelled. + /// [name] - Output only. Name of the TensorboardExperiment. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+$`. + /// + /// [updateMask] - Required. Field mask is used to specify the fields to be + /// overwritten in the TensorboardExperiment resource by the update. The + /// fields specified in the update_mask are relative to the resource, not the + /// full request. A field is overwritten if it's in the mask. If the user does + /// not provide a mask then all fields are overwritten if new values are + /// specified. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleProtobufEmpty]. + /// Completes with a [GoogleCloudAiplatformV1TensorboardExperiment]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future cancel( + async.Future patch( + GoogleCloudAiplatformV1TensorboardExperiment request, core.String name, { + core.String? updateMask, core.String? $fields, }) async { + final body_ = convert.json.encode(request); final queryParams_ = >{ + if (updateMask != null) 'updateMask': [updateMask], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':cancel'; + final url_ = 'v1/' + core.Uri.encodeFull('$name'); final response_ = await _requester.request( url_, - 'POST', + 'PATCH', + body: body_, queryParams: queryParams_, ); - return GoogleProtobufEmpty.fromJson( + return GoogleCloudAiplatformV1TensorboardExperiment.fromJson( response_ as core.Map); } - /// Deletes a long-running operation. + /// Write time series data points of multiple TensorboardTimeSeries in + /// multiple TensorboardRun's. /// - /// This method indicates that the client is no longer interested in the - /// operation result. It does not cancel the operation. If the server doesn't - /// support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. + /// If any data fail to be ingested, an error is returned. + /// + /// [request] - The metadata request object. /// /// Request parameters: /// - /// [name] - The name of the operation resource to be deleted. + /// [tensorboardExperiment] - Required. The resource name of the + /// TensorboardExperiment to write data to. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleProtobufEmpty]. + /// Completes with a + /// [GoogleCloudAiplatformV1WriteTensorboardExperimentDataResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future delete( - core.String name, { - core.String? $fields, + async.Future + write( + GoogleCloudAiplatformV1WriteTensorboardExperimentDataRequest request, + core.String tensorboardExperiment, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = + 'v1/' + core.Uri.encodeFull('$tensorboardExperiment') + ':write'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1WriteTensorboardExperimentDataResponse + .fromJson(response_ as core.Map); + } +} + +class ProjectsLocationsTensorboardsExperimentsOperationsResource { + final commons.ApiRequester _requester; + + ProjectsLocationsTensorboardsExperimentsOperationsResource( + commons.ApiRequester client) + : _requester = client; + + /// Starts asynchronous cancellation on a long-running operation. + /// + /// The server makes a best effort to cancel the operation, but success is not + /// guaranteed. If the server doesn't support this method, it returns + /// `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation + /// or other methods to check whether the cancellation succeeded or whether + /// the operation completed despite cancellation. On successful cancellation, + /// the operation is not deleted; instead, it becomes an operation with an + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. + /// + /// Request parameters: + /// + /// [name] - The name of the operation resource to be cancelled. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/operations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleProtobufEmpty]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future cancel( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':cancel'; + + final response_ = await _requester.request( + url_, + 'POST', + queryParams: queryParams_, + ); + return GoogleProtobufEmpty.fromJson( + response_ as core.Map); + } + + /// Deletes a long-running operation. + /// + /// This method indicates that the client is no longer interested in the + /// operation result. It does not cancel the operation. If the server doesn't + /// support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. + /// + /// Request parameters: + /// + /// [name] - The name of the operation resource to be deleted. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/operations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleProtobufEmpty]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String name, { + core.String? $fields, }) async { final queryParams_ = >{ if ($fields != null) 'fields': [$fields], @@ -26357,7 +27051,7 @@ class ProjectsLocationsTensorboardsOperationsResource { /// /// [name] - The name of the operation resource. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -26396,7 +27090,7 @@ class ProjectsLocationsTensorboardsOperationsResource { /// /// [name] - The name of the operation's parent resource. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+$`. /// /// [filter] - The standard list filter. /// @@ -26454,7 +27148,7 @@ class ProjectsLocationsTensorboardsOperationsResource { /// /// [name] - The name of the operation resource to wait on. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/operations/\[^/\]+$`. /// /// [timeout] - The maximum duration to wait before timing out. If left blank, /// the wait will be at most the time permitted by the underlying HTTP/RPC @@ -26493,48 +27187,51 @@ class ProjectsLocationsTensorboardsOperationsResource { } } -class ProjectsLocationsTrainingPipelinesResource { +class ProjectsLocationsTensorboardsExperimentsRunsResource { final commons.ApiRequester _requester; - ProjectsLocationsTrainingPipelinesOperationsResource get operations => - ProjectsLocationsTrainingPipelinesOperationsResource(_requester); + ProjectsLocationsTensorboardsExperimentsRunsOperationsResource + get operations => + ProjectsLocationsTensorboardsExperimentsRunsOperationsResource( + _requester); + ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesResource + get timeSeries => + ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesResource( + _requester); - ProjectsLocationsTrainingPipelinesResource(commons.ApiRequester client) + ProjectsLocationsTensorboardsExperimentsRunsResource( + commons.ApiRequester client) : _requester = client; - /// Cancels a TrainingPipeline. - /// - /// Starts asynchronous cancellation on the TrainingPipeline. The server makes - /// a best effort to cancel the pipeline, but success is not guaranteed. - /// Clients can use PipelineService.GetTrainingPipeline or other methods to - /// check whether the cancellation succeeded or whether the pipeline completed - /// despite cancellation. On successful cancellation, the TrainingPipeline is - /// not deleted; instead it becomes a pipeline with a TrainingPipeline.error - /// value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`, and TrainingPipeline.state is set to `CANCELLED`. + /// Batch create TensorboardRuns. /// /// [request] - The metadata request object. /// /// Request parameters: /// - /// [name] - Required. The name of the TrainingPipeline to cancel. Format: - /// `projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}` + /// [parent] - Required. The resource name of the TensorboardExperiment to + /// create the TensorboardRuns in. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}` + /// The parent field in the CreateTensorboardRunRequest messages must match + /// this field. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/trainingPipelines/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleProtobufEmpty]. + /// Completes with a + /// [GoogleCloudAiplatformV1BatchCreateTensorboardRunsResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future cancel( - GoogleCloudAiplatformV1CancelTrainingPipelineRequest request, - core.String name, { + async.Future + batchCreate( + GoogleCloudAiplatformV1BatchCreateTensorboardRunsRequest request, + core.String parent, { core.String? $fields, }) async { final body_ = convert.json.encode(request); @@ -26542,7 +27239,7 @@ class ProjectsLocationsTrainingPipelinesResource { if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':cancel'; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/runs:batchCreate'; final response_ = await _requester.request( url_, @@ -26550,43 +27247,49 @@ class ProjectsLocationsTrainingPipelinesResource { body: body_, queryParams: queryParams_, ); - return GoogleProtobufEmpty.fromJson( + return GoogleCloudAiplatformV1BatchCreateTensorboardRunsResponse.fromJson( response_ as core.Map); } - /// Creates a TrainingPipeline. - /// - /// A created TrainingPipeline right away will be attempted to be run. + /// Creates a TensorboardRun. /// /// [request] - The metadata request object. /// /// Request parameters: /// - /// [parent] - Required. The resource name of the Location to create the - /// TrainingPipeline in. Format: `projects/{project}/locations/{location}` - /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// [parent] - Required. The resource name of the TensorboardExperiment to + /// create the TensorboardRun in. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+$`. + /// + /// [tensorboardRunId] - Required. The ID to use for the Tensorboard run, + /// which becomes the final component of the Tensorboard run's resource name. + /// This value should be 1-128 characters, and valid characters are `/a-z-/`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1TrainingPipeline]. + /// Completes with a [GoogleCloudAiplatformV1TensorboardRun]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future create( - GoogleCloudAiplatformV1TrainingPipeline request, + async.Future create( + GoogleCloudAiplatformV1TensorboardRun request, core.String parent, { + core.String? tensorboardRunId, core.String? $fields, }) async { final body_ = convert.json.encode(request); final queryParams_ = >{ + if (tensorboardRunId != null) 'tensorboardRunId': [tensorboardRunId], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/trainingPipelines'; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/runs'; final response_ = await _requester.request( url_, @@ -26594,19 +27297,18 @@ class ProjectsLocationsTrainingPipelinesResource { body: body_, queryParams: queryParams_, ); - return GoogleCloudAiplatformV1TrainingPipeline.fromJson( + return GoogleCloudAiplatformV1TensorboardRun.fromJson( response_ as core.Map); } - /// Deletes a TrainingPipeline. + /// Deletes a TensorboardRun. /// /// Request parameters: /// - /// [name] - Required. The name of the TrainingPipeline resource to be - /// deleted. Format: - /// `projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}` + /// [name] - Required. The name of the TensorboardRun to be deleted. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/trainingPipelines/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -26637,26 +27339,26 @@ class ProjectsLocationsTrainingPipelinesResource { response_ as core.Map); } - /// Gets a TrainingPipeline. + /// Gets a TensorboardRun. /// /// Request parameters: /// - /// [name] - Required. The name of the TrainingPipeline resource. Format: - /// `projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}` + /// [name] - Required. The name of the TensorboardRun resource. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/trainingPipelines/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1TrainingPipeline]. + /// Completes with a [GoogleCloudAiplatformV1TensorboardRun]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future get( + async.Future get( core.String name, { core.String? $fields, }) async { @@ -26671,52 +27373,51 @@ class ProjectsLocationsTrainingPipelinesResource { 'GET', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1TrainingPipeline.fromJson( + return GoogleCloudAiplatformV1TensorboardRun.fromJson( response_ as core.Map); } - /// Lists TrainingPipelines in a Location. + /// Lists TensorboardRuns in a Location. /// /// Request parameters: /// - /// [parent] - Required. The resource name of the Location to list the - /// TrainingPipelines from. Format: `projects/{project}/locations/{location}` - /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// [parent] - Required. The resource name of the TensorboardExperiment to + /// list TensorboardRuns. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+$`. /// - /// [filter] - The standard list filter. Supported fields: * \`display_name\` - /// supports \`=\`, \`!=\` comparisons, and \`:\` wildcard. * \`state\` - /// supports \`=\`, \`!=\` comparisons. * \`training_task_definition\` \`=\`, - /// \`!=\` comparisons, and \`:\` wildcard. * \`create_time\` supports \`=\`, - /// \`!=\`,\`\<\`, \`\<=\`,\`\>\`, \`\>=\` comparisons. \`create_time\` must - /// be in RFC 3339 format. * \`labels\` supports general map functions that - /// is: \`labels.key=value\` - key:value equality \`labels.key:* - key - /// existence Some examples of using the filter are: * - /// \`state="PIPELINE_STATE_SUCCEEDED" AND display_name:"my_pipeline_*"\` * - /// \`state!="PIPELINE_STATE_FAILED" OR display_name="my_pipeline"\` * \`NOT - /// display_name="my_pipeline"\` * \`create_time\>"2021-05-18T00:00:00Z"\` * - /// \`training_task_definition:"*automl_text_classification*"\` + /// [filter] - Lists the TensorboardRuns that match the filter expression. /// - /// [pageSize] - The standard list page size. + /// [orderBy] - Field to use to sort the list. /// - /// [pageToken] - The standard list page token. Typically obtained via - /// ListTrainingPipelinesResponse.next_page_token of the previous - /// PipelineService.ListTrainingPipelines call. + /// [pageSize] - The maximum number of TensorboardRuns to return. The service + /// may return fewer than this value. If unspecified, at most 50 + /// TensorboardRuns are returned. The maximum value is 1000; values above 1000 + /// are coerced to 1000. + /// + /// [pageToken] - A page token, received from a previous + /// TensorboardService.ListTensorboardRuns call. Provide this to retrieve the + /// subsequent page. When paginating, all other parameters provided to + /// TensorboardService.ListTensorboardRuns must match the call that provided + /// the page token. /// /// [readMask] - Mask specifying which fields to read. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1ListTrainingPipelinesResponse]. + /// Completes with a [GoogleCloudAiplatformV1ListTensorboardRunsResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future list( + async.Future list( core.String parent, { core.String? filter, + core.String? orderBy, core.int? pageSize, core.String? pageToken, core.String? readMask, @@ -26724,28 +27425,128 @@ class ProjectsLocationsTrainingPipelinesResource { }) async { final queryParams_ = >{ if (filter != null) 'filter': [filter], + if (orderBy != null) 'orderBy': [orderBy], if (pageSize != null) 'pageSize': ['${pageSize}'], if (pageToken != null) 'pageToken': [pageToken], if (readMask != null) 'readMask': [readMask], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/trainingPipelines'; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/runs'; final response_ = await _requester.request( url_, 'GET', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1ListTrainingPipelinesResponse.fromJson( + return GoogleCloudAiplatformV1ListTensorboardRunsResponse.fromJson( + response_ as core.Map); + } + + /// Updates a TensorboardRun. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Output only. Name of the TensorboardRun. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+$`. + /// + /// [updateMask] - Required. Field mask is used to specify the fields to be + /// overwritten in the TensorboardRun resource by the update. The fields + /// specified in the update_mask are relative to the resource, not the full + /// request. A field is overwritten if it's in the mask. If the user does not + /// provide a mask then all fields are overwritten if new values are + /// specified. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudAiplatformV1TensorboardRun]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future patch( + GoogleCloudAiplatformV1TensorboardRun request, + core.String name, { + core.String? updateMask, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (updateMask != null) 'updateMask': [updateMask], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'PATCH', + body: body_, + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1TensorboardRun.fromJson( + response_ as core.Map); + } + + /// Write time series data points into multiple TensorboardTimeSeries under a + /// TensorboardRun. + /// + /// If any data fail to be ingested, an error is returned. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [tensorboardRun] - Required. The resource name of the TensorboardRun to + /// write data to. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudAiplatformV1WriteTensorboardRunDataResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future write( + GoogleCloudAiplatformV1WriteTensorboardRunDataRequest request, + core.String tensorboardRun, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$tensorboardRun') + ':write'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1WriteTensorboardRunDataResponse.fromJson( response_ as core.Map); } } -class ProjectsLocationsTrainingPipelinesOperationsResource { +class ProjectsLocationsTensorboardsExperimentsRunsOperationsResource { final commons.ApiRequester _requester; - ProjectsLocationsTrainingPipelinesOperationsResource( + ProjectsLocationsTensorboardsExperimentsRunsOperationsResource( commons.ApiRequester client) : _requester = client; @@ -26757,14 +27558,14 @@ class ProjectsLocationsTrainingPipelinesOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// /// [name] - The name of the operation resource to be cancelled. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/trainingPipelines/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -26805,7 +27606,7 @@ class ProjectsLocationsTrainingPipelinesOperationsResource { /// /// [name] - The name of the operation resource to be deleted. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/trainingPipelines/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -26845,7 +27646,7 @@ class ProjectsLocationsTrainingPipelinesOperationsResource { /// /// [name] - The name of the operation resource. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/trainingPipelines/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -26884,7 +27685,7 @@ class ProjectsLocationsTrainingPipelinesOperationsResource { /// /// [name] - The name of the operation's parent resource. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/trainingPipelines/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+$`. /// /// [filter] - The standard list filter. /// @@ -26942,7 +27743,7 @@ class ProjectsLocationsTrainingPipelinesOperationsResource { /// /// [name] - The name of the operation resource to wait on. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/trainingPipelines/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/operations/\[^/\]+$`. /// /// [timeout] - The maximum duration to wait before timing out. If left blank, /// the wait will be at most the time permitted by the underlying HTTP/RPC @@ -26981,56 +27782,59 @@ class ProjectsLocationsTrainingPipelinesOperationsResource { } } -class ProjectsLocationsTuningJobsResource { +class ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesResource { final commons.ApiRequester _requester; - ProjectsLocationsTuningJobsOperationsResource get operations => - ProjectsLocationsTuningJobsOperationsResource(_requester); + ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesOperationsResource + get operations => + ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesOperationsResource( + _requester); - ProjectsLocationsTuningJobsResource(commons.ApiRequester client) + ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesResource( + commons.ApiRequester client) : _requester = client; - /// Cancels a TuningJob. - /// - /// Starts asynchronous cancellation on the TuningJob. The server makes a best - /// effort to cancel the job, but success is not guaranteed. Clients can use - /// GenAiTuningService.GetTuningJob or other methods to check whether the - /// cancellation succeeded or whether the job completed despite cancellation. - /// On successful cancellation, the TuningJob is not deleted; instead it - /// becomes a job with a TuningJob.error value with a google.rpc.Status.code - /// of 1, corresponding to `Code.CANCELLED`, and TuningJob.state is set to - /// `CANCELLED`. + /// Creates a TensorboardTimeSeries. /// /// [request] - The metadata request object. /// /// Request parameters: /// - /// [name] - Required. The name of the TuningJob to cancel. Format: - /// `projects/{project}/locations/{location}/tuningJobs/{tuning_job}` + /// [parent] - Required. The resource name of the TensorboardRun to create the + /// TensorboardTimeSeries in. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tuningJobs/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+$`. + /// + /// [tensorboardTimeSeriesId] - Optional. The user specified unique ID to use + /// for the TensorboardTimeSeries, which becomes the final component of the + /// TensorboardTimeSeries's resource name. This value should match "a-z0-9{0, + /// 127}" /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleProtobufEmpty]. + /// Completes with a [GoogleCloudAiplatformV1TensorboardTimeSeries]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future cancel( - GoogleCloudAiplatformV1CancelTuningJobRequest request, - core.String name, { + async.Future create( + GoogleCloudAiplatformV1TensorboardTimeSeries request, + core.String parent, { + core.String? tensorboardTimeSeriesId, core.String? $fields, }) async { final body_ = convert.json.encode(request); final queryParams_ = >{ + if (tensorboardTimeSeriesId != null) + 'tensorboardTimeSeriesId': [tensorboardTimeSeriesId], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':cancel'; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/timeSeries'; final response_ = await _requester.request( url_, @@ -27038,191 +27842,368 @@ class ProjectsLocationsTuningJobsResource { body: body_, queryParams: queryParams_, ); - return GoogleProtobufEmpty.fromJson( + return GoogleCloudAiplatformV1TensorboardTimeSeries.fromJson( response_ as core.Map); } - /// Creates a TuningJob. - /// - /// A created TuningJob right away will be attempted to be run. - /// - /// [request] - The metadata request object. + /// Deletes a TensorboardTimeSeries. /// /// Request parameters: /// - /// [parent] - Required. The resource name of the Location to create the - /// TuningJob in. Format: `projects/{project}/locations/{location}` - /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// [name] - Required. The name of the TensorboardTimeSeries to be deleted. + /// Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/timeSeries/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1TuningJob]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future create( - GoogleCloudAiplatformV1TuningJob request, - core.String parent, { + async.Future delete( + core.String name, { core.String? $fields, }) async { - final body_ = convert.json.encode(request); final queryParams_ = >{ if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/tuningJobs'; + final url_ = 'v1/' + core.Uri.encodeFull('$name'); final response_ = await _requester.request( url_, - 'POST', - body: body_, + 'DELETE', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1TuningJob.fromJson( + return GoogleLongrunningOperation.fromJson( response_ as core.Map); } - /// Gets a TuningJob. + /// Exports a TensorboardTimeSeries' data. + /// + /// Data is returned in paginated responses. + /// + /// [request] - The metadata request object. /// /// Request parameters: /// - /// [name] - Required. The name of the TuningJob resource. Format: - /// `projects/{project}/locations/{location}/tuningJobs/{tuning_job}` + /// [tensorboardTimeSeries] - Required. The resource name of the + /// TensorboardTimeSeries to export data from. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tuningJobs/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/timeSeries/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1TuningJob]. + /// Completes with a + /// [GoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future get( - core.String name, { + async.Future + exportTensorboardTimeSeries( + GoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataRequest request, + core.String tensorboardTimeSeries, { core.String? $fields, }) async { + final body_ = convert.json.encode(request); final queryParams_ = >{ if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$name'); + final url_ = 'v1/' + + core.Uri.encodeFull('$tensorboardTimeSeries') + + ':exportTensorboardTimeSeries'; final response_ = await _requester.request( url_, - 'GET', + 'POST', + body: body_, queryParams: queryParams_, ); - return GoogleCloudAiplatformV1TuningJob.fromJson( - response_ as core.Map); + return GoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataResponse + .fromJson(response_ as core.Map); } - /// Lists TuningJobs in a Location. + /// Gets a TensorboardTimeSeries. /// /// Request parameters: /// - /// [parent] - Required. The resource name of the Location to list the - /// TuningJobs from. Format: `projects/{project}/locations/{location}` - /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. - /// - /// [filter] - Optional. The standard list filter. - /// - /// [pageSize] - Optional. The standard list page size. - /// - /// [pageToken] - Optional. The standard list page token. Typically obtained - /// via ListTuningJob.next_page_token of the previous - /// GenAiTuningService.ListTuningJob\]\[\] call. + /// [name] - Required. The name of the TensorboardTimeSeries resource. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/timeSeries/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1ListTuningJobsResponse]. + /// Completes with a [GoogleCloudAiplatformV1TensorboardTimeSeries]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future list( - core.String parent, { - core.String? filter, - core.int? pageSize, - core.String? pageToken, + async.Future get( + core.String name, { core.String? $fields, }) async { final queryParams_ = >{ - if (filter != null) 'filter': [filter], - if (pageSize != null) 'pageSize': ['${pageSize}'], - if (pageToken != null) 'pageToken': [pageToken], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/tuningJobs'; + final url_ = 'v1/' + core.Uri.encodeFull('$name'); final response_ = await _requester.request( url_, 'GET', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1ListTuningJobsResponse.fromJson( + return GoogleCloudAiplatformV1TensorboardTimeSeries.fromJson( response_ as core.Map); } - /// Rebase a TunedModel. + /// Lists TensorboardTimeSeries in a Location. + /// + /// Request parameters: + /// + /// [parent] - Required. The resource name of the TensorboardRun to list + /// TensorboardTimeSeries. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+$`. + /// + /// [filter] - Lists the TensorboardTimeSeries that match the filter + /// expression. + /// + /// [orderBy] - Field to use to sort the list. + /// + /// [pageSize] - The maximum number of TensorboardTimeSeries to return. The + /// service may return fewer than this value. If unspecified, at most 50 + /// TensorboardTimeSeries are returned. The maximum value is 1000; values + /// above 1000 are coerced to 1000. + /// + /// [pageToken] - A page token, received from a previous + /// TensorboardService.ListTensorboardTimeSeries call. Provide this to + /// retrieve the subsequent page. When paginating, all other parameters + /// provided to TensorboardService.ListTensorboardTimeSeries must match the + /// call that provided the page token. + /// + /// [readMask] - Mask specifying which fields to read. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a + /// [GoogleCloudAiplatformV1ListTensorboardTimeSeriesResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.String? filter, + core.String? orderBy, + core.int? pageSize, + core.String? pageToken, + core.String? readMask, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (orderBy != null) 'orderBy': [orderBy], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if (readMask != null) 'readMask': [readMask], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/timeSeries'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1ListTensorboardTimeSeriesResponse.fromJson( + response_ as core.Map); + } + + /// Updates a TensorboardTimeSeries. /// /// [request] - The metadata request object. /// /// Request parameters: /// - /// [parent] - Required. The resource name of the Location into which to - /// rebase the Model. Format: `projects/{project}/locations/{location}` - /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// [name] - Output only. Name of the TensorboardTimeSeries. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/timeSeries/\[^/\]+$`. + /// + /// [updateMask] - Required. Field mask is used to specify the fields to be + /// overwritten in the TensorboardTimeSeries resource by the update. The + /// fields specified in the update_mask are relative to the resource, not the + /// full request. A field is overwritten if it's in the mask. If the user does + /// not provide a mask then all fields are overwritten if new values are + /// specified. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleLongrunningOperation]. + /// Completes with a [GoogleCloudAiplatformV1TensorboardTimeSeries]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future rebaseTunedModel( - GoogleCloudAiplatformV1RebaseTunedModelRequest request, - core.String parent, { + async.Future patch( + GoogleCloudAiplatformV1TensorboardTimeSeries request, + core.String name, { + core.String? updateMask, core.String? $fields, }) async { final body_ = convert.json.encode(request); final queryParams_ = >{ + if (updateMask != null) 'updateMask': [updateMask], if ($fields != null) 'fields': [$fields], }; - final url_ = - 'v1/' + core.Uri.encodeFull('$parent') + '/tuningJobs:rebaseTunedModel'; + final url_ = 'v1/' + core.Uri.encodeFull('$name'); final response_ = await _requester.request( url_, - 'POST', + 'PATCH', body: body_, queryParams: queryParams_, ); - return GoogleLongrunningOperation.fromJson( + return GoogleCloudAiplatformV1TensorboardTimeSeries.fromJson( + response_ as core.Map); + } + + /// Reads a TensorboardTimeSeries' data. + /// + /// By default, if the number of data points stored is less than 1000, all + /// data is returned. Otherwise, 1000 data points is randomly selected from + /// this time series and returned. This value can be changed by changing + /// max_data_points, which can't be greater than 10k. + /// + /// Request parameters: + /// + /// [tensorboardTimeSeries] - Required. The resource name of the + /// TensorboardTimeSeries to read data from. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/timeSeries/\[^/\]+$`. + /// + /// [filter] - Reads the TensorboardTimeSeries' data that match the filter + /// expression. + /// + /// [maxDataPoints] - The maximum number of TensorboardTimeSeries' data to + /// return. This value should be a positive integer. This value can be set to + /// -1 to return all data. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a + /// [GoogleCloudAiplatformV1ReadTensorboardTimeSeriesDataResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future + read( + core.String tensorboardTimeSeries, { + core.String? filter, + core.int? maxDataPoints, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (maxDataPoints != null) 'maxDataPoints': ['${maxDataPoints}'], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = + 'v1/' + core.Uri.encodeFull('$tensorboardTimeSeries') + ':read'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1ReadTensorboardTimeSeriesDataResponse + .fromJson(response_ as core.Map); + } + + /// Gets bytes of TensorboardBlobs. + /// + /// This is to allow reading blob data stored in consumer project's Cloud + /// Storage bucket without users having to obtain Cloud Storage access + /// permission. + /// + /// Request parameters: + /// + /// [timeSeries] - Required. The resource name of the TensorboardTimeSeries to + /// list Blobs. Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/timeSeries/\[^/\]+$`. + /// + /// [blobIds] - IDs of the blobs to read. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudAiplatformV1ReadTensorboardBlobDataResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future + readBlobData( + core.String timeSeries, { + core.List? blobIds, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (blobIds != null) 'blobIds': blobIds, + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$timeSeries') + ':readBlobData'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1ReadTensorboardBlobDataResponse.fromJson( response_ as core.Map); } } -class ProjectsLocationsTuningJobsOperationsResource { +class ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesOperationsResource { final commons.ApiRequester _requester; - ProjectsLocationsTuningJobsOperationsResource(commons.ApiRequester client) + ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesOperationsResource( + commons.ApiRequester client) : _requester = client; /// Starts asynchronous cancellation on a long-running operation. @@ -27233,14 +28214,14 @@ class ProjectsLocationsTuningJobsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// /// [name] - The name of the operation resource to be cancelled. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tuningJobs/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/timeSeries/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -27281,7 +28262,7 @@ class ProjectsLocationsTuningJobsOperationsResource { /// /// [name] - The name of the operation resource to be deleted. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tuningJobs/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/timeSeries/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -27321,7 +28302,7 @@ class ProjectsLocationsTuningJobsOperationsResource { /// /// [name] - The name of the operation resource. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tuningJobs/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/timeSeries/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -27360,7 +28341,7 @@ class ProjectsLocationsTuningJobsOperationsResource { /// /// [name] - The name of the operation's parent resource. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/tuningJobs/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/timeSeries/\[^/\]+$`. /// /// [filter] - The standard list filter. /// @@ -27402,1517 +28383,6457 @@ class ProjectsLocationsTuningJobsOperationsResource { return GoogleLongrunningListOperationsResponse.fromJson( response_ as core.Map); } -} -class PublishersResource { - final commons.ApiRequester _requester; + /// Waits until the specified long-running operation is done or reaches at + /// most a specified timeout, returning the latest state. + /// + /// If the operation is already done, the latest state is immediately + /// returned. If the timeout specified is greater than the default HTTP/RPC + /// timeout, the HTTP/RPC timeout is used. If the server does not support this + /// method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method + /// is on a best-effort basis. It may return the latest state before the + /// specified timeout (including immediately), meaning even an immediate + /// response is no guarantee that the operation is done. + /// + /// Request parameters: + /// + /// [name] - The name of the operation resource to wait on. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/experiments/\[^/\]+/runs/\[^/\]+/timeSeries/\[^/\]+/operations/\[^/\]+$`. + /// + /// [timeout] - The maximum duration to wait before timing out. If left blank, + /// the wait will be at most the time permitted by the underlying HTTP/RPC + /// protocol. If RPC context deadline is also specified, the shorter one will + /// be used. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future wait( + core.String name, { + core.String? timeout, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (timeout != null) 'timeout': [timeout], + if ($fields != null) 'fields': [$fields], + }; - PublishersModelsResource get models => PublishersModelsResource(_requester); + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':wait'; - PublishersResource(commons.ApiRequester client) : _requester = client; + final response_ = await _requester.request( + url_, + 'POST', + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } } -class PublishersModelsResource { +class ProjectsLocationsTensorboardsOperationsResource { final commons.ApiRequester _requester; - PublishersModelsResource(commons.ApiRequester client) : _requester = client; + ProjectsLocationsTensorboardsOperationsResource(commons.ApiRequester client) + : _requester = client; - /// Return a list of tokens based on the input text. + /// Starts asynchronous cancellation on a long-running operation. /// - /// [request] - The metadata request object. + /// The server makes a best effort to cancel the operation, but success is not + /// guaranteed. If the server doesn't support this method, it returns + /// `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation + /// or other methods to check whether the cancellation succeeded or whether + /// the operation completed despite cancellation. On successful cancellation, + /// the operation is not deleted; instead, it becomes an operation with an + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// - /// [endpoint] - Required. The name of the Endpoint requested to get lists of - /// tokens and token ids. - /// Value must have pattern `^publishers/\[^/\]+/models/\[^/\]+$`. + /// [name] - The name of the operation resource to be cancelled. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1ComputeTokensResponse]. + /// Completes with a [GoogleProtobufEmpty]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future computeTokens( - GoogleCloudAiplatformV1ComputeTokensRequest request, - core.String endpoint, { + async.Future cancel( + core.String name, { core.String? $fields, }) async { - final body_ = convert.json.encode(request); final queryParams_ = >{ if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$endpoint') + ':computeTokens'; + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':cancel'; final response_ = await _requester.request( url_, 'POST', - body: body_, queryParams: queryParams_, ); - return GoogleCloudAiplatformV1ComputeTokensResponse.fromJson( + return GoogleProtobufEmpty.fromJson( response_ as core.Map); } - /// Perform a token counting. + /// Deletes a long-running operation. /// - /// [request] - The metadata request object. + /// This method indicates that the client is no longer interested in the + /// operation result. It does not cancel the operation. If the server doesn't + /// support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. /// /// Request parameters: /// - /// [endpoint] - Required. The name of the Endpoint requested to perform token - /// counting. Format: - /// `projects/{project}/locations/{location}/endpoints/{endpoint}` - /// Value must have pattern `^publishers/\[^/\]+/models/\[^/\]+$`. + /// [name] - The name of the operation resource to be deleted. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1CountTokensResponse]. + /// Completes with a [GoogleProtobufEmpty]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future countTokens( - GoogleCloudAiplatformV1CountTokensRequest request, - core.String endpoint, { + async.Future delete( + core.String name, { core.String? $fields, }) async { - final body_ = convert.json.encode(request); final queryParams_ = >{ if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$endpoint') + ':countTokens'; + final url_ = 'v1/' + core.Uri.encodeFull('$name'); final response_ = await _requester.request( url_, - 'POST', - body: body_, + 'DELETE', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1CountTokensResponse.fromJson( + return GoogleProtobufEmpty.fromJson( response_ as core.Map); } - /// Generate content with multimodal inputs. + /// Gets the latest state of a long-running operation. /// - /// [request] - The metadata request object. + /// Clients can use this method to poll the operation result at intervals as + /// recommended by the API service. /// /// Request parameters: /// - /// [model] - Required. The fully qualified name of the publisher model or - /// tuned model endpoint to use. Publisher model format: - /// `projects/{project}/locations/{location}/publishers / * /models / * ` - /// Tuned model endpoint format: - /// `projects/{project}/locations/{location}/endpoints/{endpoint}` - /// Value must have pattern `^publishers/\[^/\]+/models/\[^/\]+$`. + /// [name] - The name of the operation resource. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/operations/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1GenerateContentResponse]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future generateContent( - GoogleCloudAiplatformV1GenerateContentRequest request, - core.String model, { + async.Future get( + core.String name, { core.String? $fields, }) async { - final body_ = convert.json.encode(request); final queryParams_ = >{ if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$model') + ':generateContent'; + final url_ = 'v1/' + core.Uri.encodeFull('$name'); final response_ = await _requester.request( url_, - 'POST', - body: body_, + 'GET', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1GenerateContentResponse.fromJson( + return GoogleLongrunningOperation.fromJson( response_ as core.Map); } - /// Gets a Model Garden publisher model. + /// Lists operations that match the specified filter in the request. /// - /// Request parameters: + /// If the server doesn't support this method, it returns `UNIMPLEMENTED`. /// - /// [name] - Required. The name of the PublisherModel resource. Format: - /// `publishers/{publisher}/models/{publisher_model}` - /// Value must have pattern `^publishers/\[^/\]+/models/\[^/\]+$`. + /// Request parameters: /// - /// [huggingFaceToken] - Optional. Token used to access Hugging Face gated - /// models. + /// [name] - The name of the operation's parent resource. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+$`. /// - /// [isHuggingFaceModel] - Optional. Boolean indicates whether the requested - /// model is a Hugging Face model. + /// [filter] - The standard list filter. /// - /// [languageCode] - Optional. The IETF BCP-47 language code representing the - /// language in which the publisher model's text information should be written - /// in. + /// [pageSize] - The standard list page size. /// - /// [view] - Optional. PublisherModel view specifying which fields to read. - /// Possible string values are: - /// - "PUBLISHER_MODEL_VIEW_UNSPECIFIED" : The default / unset value. The API - /// will default to the BASIC view. - /// - "PUBLISHER_MODEL_VIEW_BASIC" : Include basic metadata about the - /// publisher model, but not the full contents. - /// - "PUBLISHER_MODEL_VIEW_FULL" : Include everything. - /// - "PUBLISHER_MODEL_VERSION_VIEW_BASIC" : Include: VersionId, - /// ModelVersionExternalName, and SupportedActions. + /// [pageToken] - The standard list page token. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1PublisherModel]. + /// Completes with a [GoogleLongrunningListOperationsResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future get( + async.Future list( core.String name, { - core.String? huggingFaceToken, - core.bool? isHuggingFaceModel, - core.String? languageCode, - core.String? view, + core.String? filter, + core.int? pageSize, + core.String? pageToken, core.String? $fields, }) async { final queryParams_ = >{ - if (huggingFaceToken != null) 'huggingFaceToken': [huggingFaceToken], - if (isHuggingFaceModel != null) - 'isHuggingFaceModel': ['${isHuggingFaceModel}'], - if (languageCode != null) 'languageCode': [languageCode], - if (view != null) 'view': [view], + if (filter != null) 'filter': [filter], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$name'); + final url_ = 'v1/' + core.Uri.encodeFull('$name') + '/operations'; final response_ = await _requester.request( url_, 'GET', queryParams: queryParams_, ); - return GoogleCloudAiplatformV1PublisherModel.fromJson( + return GoogleLongrunningListOperationsResponse.fromJson( response_ as core.Map); } - /// Generate content with multimodal inputs with streaming support. + /// Waits until the specified long-running operation is done or reaches at + /// most a specified timeout, returning the latest state. /// - /// [request] - The metadata request object. + /// If the operation is already done, the latest state is immediately + /// returned. If the timeout specified is greater than the default HTTP/RPC + /// timeout, the HTTP/RPC timeout is used. If the server does not support this + /// method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method + /// is on a best-effort basis. It may return the latest state before the + /// specified timeout (including immediately), meaning even an immediate + /// response is no guarantee that the operation is done. /// /// Request parameters: /// - /// [model] - Required. The fully qualified name of the publisher model or - /// tuned model endpoint to use. Publisher model format: - /// `projects/{project}/locations/{location}/publishers / * /models / * ` - /// Tuned model endpoint format: - /// `projects/{project}/locations/{location}/endpoints/{endpoint}` - /// Value must have pattern `^publishers/\[^/\]+/models/\[^/\]+$`. + /// [name] - The name of the operation resource to wait on. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/tensorboards/\[^/\]+/operations/\[^/\]+$`. + /// + /// [timeout] - The maximum duration to wait before timing out. If left blank, + /// the wait will be at most the time permitted by the underlying HTTP/RPC + /// protocol. If RPC context deadline is also specified, the shorter one will + /// be used. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleCloudAiplatformV1GenerateContentResponse]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future - streamGenerateContent( - GoogleCloudAiplatformV1GenerateContentRequest request, - core.String model, { + async.Future wait( + core.String name, { + core.String? timeout, core.String? $fields, }) async { - final body_ = convert.json.encode(request); final queryParams_ = >{ + if (timeout != null) 'timeout': [timeout], if ($fields != null) 'fields': [$fields], }; - final url_ = - 'v1/' + core.Uri.encodeFull('$model') + ':streamGenerateContent'; + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':wait'; final response_ = await _requester.request( url_, 'POST', - body: body_, queryParams: queryParams_, ); - return GoogleCloudAiplatformV1GenerateContentResponse.fromJson( + return GoogleLongrunningOperation.fromJson( response_ as core.Map); } } -/// Message that represents an arbitrary HTTP body. -/// -/// It should only be used for payload formats that can't be represented as -/// JSON, such as raw binary or an HTML page. This message can be used both in -/// streaming and non-streaming API methods in the request as well as the -/// response. It can be used as a top-level request field, which is convenient -/// if one wants to extract parameters from either the URL or HTTP template into -/// the request fields and also want access to the raw HTTP body. Example: -/// message GetResourceRequest { // A unique request id. string request_id = 1; -/// // The raw HTTP body is bound to this field. google.api.HttpBody http_body = -/// 2; } service ResourceService { rpc GetResource(GetResourceRequest) returns -/// (google.api.HttpBody); rpc UpdateResource(google.api.HttpBody) returns -/// (google.protobuf.Empty); } Example with streaming methods: service -/// CaldavService { rpc GetCalendar(stream google.api.HttpBody) returns (stream -/// google.api.HttpBody); rpc UpdateCalendar(stream google.api.HttpBody) returns -/// (stream google.api.HttpBody); } Use of this type only changes how the -/// request and response bodies are handled, all other features will continue to -/// work unchanged. -typedef GoogleApiHttpBody = $HttpBody; +class ProjectsLocationsTrainingPipelinesResource { + final commons.ApiRequester _requester; -/// Parameters that configure the active learning pipeline. -/// -/// Active learning will label the data incrementally by several iterations. For -/// every iteration, it will select a batch of data based on the sampling -/// strategy. -class GoogleCloudAiplatformV1ActiveLearningConfig { - /// Max number of human labeled DataItems. - core.String? maxDataItemCount; + ProjectsLocationsTrainingPipelinesOperationsResource get operations => + ProjectsLocationsTrainingPipelinesOperationsResource(_requester); - /// Max percent of total DataItems for human labeling. - core.int? maxDataItemPercentage; + ProjectsLocationsTrainingPipelinesResource(commons.ApiRequester client) + : _requester = client; - /// Active learning data sampling config. + /// Cancels a TrainingPipeline. /// - /// For every active learning labeling iteration, it will select a batch of - /// data based on the sampling strategy. - GoogleCloudAiplatformV1SampleConfig? sampleConfig; - - /// CMLE training config. + /// Starts asynchronous cancellation on the TrainingPipeline. The server makes + /// a best effort to cancel the pipeline, but success is not guaranteed. + /// Clients can use PipelineService.GetTrainingPipeline or other methods to + /// check whether the cancellation succeeded or whether the pipeline completed + /// despite cancellation. On successful cancellation, the TrainingPipeline is + /// not deleted; instead it becomes a pipeline with a TrainingPipeline.error + /// value with a google.rpc.Status.code of 1, corresponding to + /// `Code.CANCELLED`, and TrainingPipeline.state is set to `CANCELLED`. /// - /// For every active learning labeling iteration, system will train a machine - /// learning model on CMLE. The trained model will be used by data sampling - /// algorithm to select DataItems. - GoogleCloudAiplatformV1TrainingConfig? trainingConfig; - - GoogleCloudAiplatformV1ActiveLearningConfig({ - this.maxDataItemCount, - this.maxDataItemPercentage, - this.sampleConfig, - this.trainingConfig, - }); - - GoogleCloudAiplatformV1ActiveLearningConfig.fromJson(core.Map json_) - : this( - maxDataItemCount: json_['maxDataItemCount'] as core.String?, - maxDataItemPercentage: json_['maxDataItemPercentage'] as core.int?, - sampleConfig: json_.containsKey('sampleConfig') - ? GoogleCloudAiplatformV1SampleConfig.fromJson( - json_['sampleConfig'] as core.Map) - : null, - trainingConfig: json_.containsKey('trainingConfig') - ? GoogleCloudAiplatformV1TrainingConfig.fromJson( - json_['trainingConfig'] - as core.Map) - : null, - ); - - core.Map toJson() => { - if (maxDataItemCount != null) 'maxDataItemCount': maxDataItemCount!, - if (maxDataItemPercentage != null) - 'maxDataItemPercentage': maxDataItemPercentage!, - if (sampleConfig != null) 'sampleConfig': sampleConfig!, - if (trainingConfig != null) 'trainingConfig': trainingConfig!, - }; -} - -/// Request message for MetadataService.AddContextArtifactsAndExecutions. -class GoogleCloudAiplatformV1AddContextArtifactsAndExecutionsRequest { - /// The resource names of the Artifacts to attribute to the Context. + /// [request] - The metadata request object. /// - /// Format: - /// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/artifacts/{artifact}` - core.List? artifacts; - - /// The resource names of the Executions to associate with the Context. + /// Request parameters: /// - /// Format: - /// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/executions/{execution}` - core.List? executions; - - GoogleCloudAiplatformV1AddContextArtifactsAndExecutionsRequest({ - this.artifacts, - this.executions, - }); - - GoogleCloudAiplatformV1AddContextArtifactsAndExecutionsRequest.fromJson( - core.Map json_) - : this( - artifacts: (json_['artifacts'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - executions: (json_['executions'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - ); - - core.Map toJson() => { - if (artifacts != null) 'artifacts': artifacts!, - if (executions != null) 'executions': executions!, - }; -} - -/// Response message for MetadataService.AddContextArtifactsAndExecutions. -typedef GoogleCloudAiplatformV1AddContextArtifactsAndExecutionsResponse - = $Empty; - -/// Request message for MetadataService.AddContextChildren. -typedef GoogleCloudAiplatformV1AddContextChildrenRequest - = $ContextChildrenRequest; - -/// Response message for MetadataService.AddContextChildren. -typedef GoogleCloudAiplatformV1AddContextChildrenResponse = $Empty; - -/// Request message for MetadataService.AddExecutionEvents. -class GoogleCloudAiplatformV1AddExecutionEventsRequest { - /// The Events to create and add. - core.List? events; - - GoogleCloudAiplatformV1AddExecutionEventsRequest({ - this.events, - }); - - GoogleCloudAiplatformV1AddExecutionEventsRequest.fromJson(core.Map json_) - : this( - events: (json_['events'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Event.fromJson( - value as core.Map)) - .toList(), - ); + /// [name] - Required. The name of the TrainingPipeline to cancel. Format: + /// `projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/trainingPipelines/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleProtobufEmpty]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future cancel( + GoogleCloudAiplatformV1CancelTrainingPipelineRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; - core.Map toJson() => { - if (events != null) 'events': events!, - }; -} + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':cancel'; -/// Response message for MetadataService.AddExecutionEvents. -typedef GoogleCloudAiplatformV1AddExecutionEventsResponse = $Empty; + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleProtobufEmpty.fromJson( + response_ as core.Map); + } -/// Request message for VizierService.AddTrialMeasurement. -class GoogleCloudAiplatformV1AddTrialMeasurementRequest { - /// The measurement to be added to a Trial. + /// Creates a TrainingPipeline. /// - /// Required. - GoogleCloudAiplatformV1Measurement? measurement; - - GoogleCloudAiplatformV1AddTrialMeasurementRequest({ - this.measurement, - }); + /// A created TrainingPipeline right away will be attempted to be run. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The resource name of the Location to create the + /// TrainingPipeline in. Format: `projects/{project}/locations/{location}` + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudAiplatformV1TrainingPipeline]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future create( + GoogleCloudAiplatformV1TrainingPipeline request, + core.String parent, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; - GoogleCloudAiplatformV1AddTrialMeasurementRequest.fromJson(core.Map json_) - : this( - measurement: json_.containsKey('measurement') - ? GoogleCloudAiplatformV1Measurement.fromJson( - json_['measurement'] as core.Map) - : null, - ); + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/trainingPipelines'; - core.Map toJson() => { - if (measurement != null) 'measurement': measurement!, - }; -} + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1TrainingPipeline.fromJson( + response_ as core.Map); + } -/// Used to assign specific AnnotationSpec to a particular area of a DataItem or -/// the whole part of the DataItem. -class GoogleCloudAiplatformV1Annotation { - /// The source of the Annotation. + /// Deletes a TrainingPipeline. /// - /// Output only. - GoogleCloudAiplatformV1UserActionReference? annotationSource; - - /// Timestamp when this Annotation was created. + /// Request parameters: /// - /// Output only. - core.String? createTime; - - /// Used to perform consistent read-modify-write updates. + /// [name] - Required. The name of the TrainingPipeline resource to be + /// deleted. Format: + /// `projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/trainingPipelines/\[^/\]+$`. /// - /// If not set, a blind "overwrite" update happens. + /// [$fields] - Selector specifying which fields to include in a partial + /// response. /// - /// Optional. - core.String? etag; - - /// The labels with user-defined metadata to organize your Annotations. + /// Completes with a [GoogleLongrunningOperation]. /// - /// Label keys and values can be no longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. No more than - /// 64 user labels can be associated with one Annotation(System labels are - /// excluded). See https://goo.gl/xmQnxf for more information and examples of - /// labels. System reserved label keys are prefixed with - /// "aiplatform.googleapis.com/" and are immutable. Following system labels - /// exist for each Annotation: * - /// "aiplatform.googleapis.com/annotation_set_name": optional, name of the - /// UI's annotation set this Annotation belongs to. If not set, the Annotation - /// is not visible in the UI. * "aiplatform.googleapis.com/payload_schema": - /// output only, its value is the payload_schema's title. + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. /// - /// Optional. - core.Map? labels; + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; - /// Resource name of the Annotation. - /// - /// Output only. - core.String? name; + final url_ = 'v1/' + core.Uri.encodeFull('$name'); - /// The schema of the payload can be found in payload_schema. + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + + /// Gets a TrainingPipeline. /// - /// Required. + /// Request parameters: /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Object? payload; - - /// Google Cloud Storage URI points to a YAML file describing payload. + /// [name] - Required. The name of the TrainingPipeline resource. Format: + /// `projects/{project}/locations/{location}/trainingPipelines/{training_pipeline}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/trainingPipelines/\[^/\]+$`. /// - /// The schema is defined as an - /// [OpenAPI 3.0.2 Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). - /// The schema files that can be used here are found in - /// gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the - /// chosen schema must be consistent with the parent Dataset's metadata. + /// [$fields] - Selector specifying which fields to include in a partial + /// response. /// - /// Required. - core.String? payloadSchemaUri; - - /// Timestamp when this Annotation was last updated. + /// Completes with a [GoogleCloudAiplatformV1TrainingPipeline]. /// - /// Output only. - core.String? updateTime; - - GoogleCloudAiplatformV1Annotation({ - this.annotationSource, - this.createTime, - this.etag, - this.labels, - this.name, - this.payload, - this.payloadSchemaUri, - this.updateTime, - }); + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; - GoogleCloudAiplatformV1Annotation.fromJson(core.Map json_) - : this( - annotationSource: json_.containsKey('annotationSource') - ? GoogleCloudAiplatformV1UserActionReference.fromJson( - json_['annotationSource'] - as core.Map) - : null, - createTime: json_['createTime'] as core.String?, - etag: json_['etag'] as core.String?, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - name: json_['name'] as core.String?, - payload: json_['payload'], - payloadSchemaUri: json_['payloadSchemaUri'] as core.String?, - updateTime: json_['updateTime'] as core.String?, - ); + final url_ = 'v1/' + core.Uri.encodeFull('$name'); - core.Map toJson() => { - if (annotationSource != null) 'annotationSource': annotationSource!, - if (createTime != null) 'createTime': createTime!, - if (etag != null) 'etag': etag!, - if (labels != null) 'labels': labels!, - if (name != null) 'name': name!, - if (payload != null) 'payload': payload!, - if (payloadSchemaUri != null) 'payloadSchemaUri': payloadSchemaUri!, - if (updateTime != null) 'updateTime': updateTime!, - }; -} + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1TrainingPipeline.fromJson( + response_ as core.Map); + } -/// Identifies a concept with which DataItems may be annotated with. -class GoogleCloudAiplatformV1AnnotationSpec { - /// Timestamp when this AnnotationSpec was created. + /// Lists TrainingPipelines in a Location. /// - /// Output only. - core.String? createTime; - - /// The user-defined name of the AnnotationSpec. + /// Request parameters: /// - /// The name can be up to 128 characters long and can consist of any UTF-8 - /// characters. + /// [parent] - Required. The resource name of the Location to list the + /// TrainingPipelines from. Format: `projects/{project}/locations/{location}` + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. /// - /// Required. - core.String? displayName; - - /// Used to perform consistent read-modify-write updates. + /// [filter] - The standard list filter. Supported fields: * \`display_name\` + /// supports \`=\`, \`!=\` comparisons, and \`:\` wildcard. * \`state\` + /// supports \`=\`, \`!=\` comparisons. * \`training_task_definition\` \`=\`, + /// \`!=\` comparisons, and \`:\` wildcard. * \`create_time\` supports \`=\`, + /// \`!=\`,\`\<\`, \`\<=\`,\`\>\`, \`\>=\` comparisons. \`create_time\` must + /// be in RFC 3339 format. * \`labels\` supports general map functions that + /// is: \`labels.key=value\` - key:value equality \`labels.key:* - key + /// existence Some examples of using the filter are: * + /// \`state="PIPELINE_STATE_SUCCEEDED" AND display_name:"my_pipeline_*"\` * + /// \`state!="PIPELINE_STATE_FAILED" OR display_name="my_pipeline"\` * \`NOT + /// display_name="my_pipeline"\` * \`create_time\>"2021-05-18T00:00:00Z"\` * + /// \`training_task_definition:"*automl_text_classification*"\` /// - /// If not set, a blind "overwrite" update happens. + /// [pageSize] - The standard list page size. /// - /// Optional. - core.String? etag; - - /// Resource name of the AnnotationSpec. + /// [pageToken] - The standard list page token. Typically obtained via + /// ListTrainingPipelinesResponse.next_page_token of the previous + /// PipelineService.ListTrainingPipelines call. /// - /// Output only. - core.String? name; - - /// Timestamp when AnnotationSpec was last updated. + /// [readMask] - Mask specifying which fields to read. /// - /// Output only. - core.String? updateTime; - - GoogleCloudAiplatformV1AnnotationSpec({ - this.createTime, - this.displayName, - this.etag, - this.name, - this.updateTime, - }); + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudAiplatformV1ListTrainingPipelinesResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.String? filter, + core.int? pageSize, + core.String? pageToken, + core.String? readMask, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if (readMask != null) 'readMask': [readMask], + if ($fields != null) 'fields': [$fields], + }; - GoogleCloudAiplatformV1AnnotationSpec.fromJson(core.Map json_) - : this( - createTime: json_['createTime'] as core.String?, - displayName: json_['displayName'] as core.String?, - etag: json_['etag'] as core.String?, - name: json_['name'] as core.String?, - updateTime: json_['updateTime'] as core.String?, - ); + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/trainingPipelines'; - core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (displayName != null) 'displayName': displayName!, - if (etag != null) 'etag': etag!, - if (name != null) 'name': name!, - if (updateTime != null) 'updateTime': updateTime!, - }; + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1ListTrainingPipelinesResponse.fromJson( + response_ as core.Map); + } } -/// Instance of a general artifact. -class GoogleCloudAiplatformV1Artifact { - /// Timestamp when this Artifact was created. - /// - /// Output only. - core.String? createTime; - - /// Description of the Artifact - core.String? description; +class ProjectsLocationsTrainingPipelinesOperationsResource { + final commons.ApiRequester _requester; - /// User provided display name of the Artifact. - /// - /// May be up to 128 Unicode characters. - core.String? displayName; + ProjectsLocationsTrainingPipelinesOperationsResource( + commons.ApiRequester client) + : _requester = client; - /// An eTag used to perform consistent read-modify-write updates. + /// Starts asynchronous cancellation on a long-running operation. /// - /// If not set, a blind "overwrite" update happens. - core.String? etag; - - /// The labels with user-defined metadata to organize your Artifacts. + /// The server makes a best effort to cancel the operation, but success is not + /// guaranteed. If the server doesn't support this method, it returns + /// `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation + /// or other methods to check whether the cancellation succeeded or whether + /// the operation completed despite cancellation. On successful cancellation, + /// the operation is not deleted; instead, it becomes an operation with an + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// - /// Label keys and values can be no longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. No more than - /// 64 user labels can be associated with one Artifact (System labels are - /// excluded). - core.Map? labels; - - /// Properties of the Artifact. + /// Request parameters: /// - /// Top level metadata keys' heading and trailing spaces will be trimmed. The - /// size of this field should not exceed 200KB. + /// [name] - The name of the operation resource to be cancelled. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/trainingPipelines/\[^/\]+/operations/\[^/\]+$`. /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Map? metadata; - - /// The resource name of the Artifact. + /// [$fields] - Selector specifying which fields to include in a partial + /// response. /// - /// Output only. - core.String? name; - - /// The title of the schema describing the metadata. + /// Completes with a [GoogleProtobufEmpty]. /// - /// Schema title and version is expected to be registered in earlier Create - /// Schema calls. And both are used together as unique identifiers to identify - /// schemas within the local metadata store. - core.String? schemaTitle; - - /// The version of the schema in schema_name to use. + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. /// - /// Schema title and version is expected to be registered in earlier Create - /// Schema calls. And both are used together as unique identifiers to identify - /// schemas within the local metadata store. - core.String? schemaVersion; + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future cancel( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; - /// The state of this Artifact. - /// - /// This is a property of the Artifact, and does not imply or capture any - /// ongoing process. This property is managed by clients (such as Vertex AI - /// Pipelines), and the system does not prescribe or check the validity of - /// state transitions. - /// Possible string values are: - /// - "STATE_UNSPECIFIED" : Unspecified state for the Artifact. - /// - "PENDING" : A state used by systems like Vertex AI Pipelines to indicate - /// that the underlying data item represented by this Artifact is being - /// created. - /// - "LIVE" : A state indicating that the Artifact should exist, unless - /// something external to the system deletes it. - core.String? state; + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':cancel'; - /// Timestamp when this Artifact was last updated. - /// - /// Output only. - core.String? updateTime; + final response_ = await _requester.request( + url_, + 'POST', + queryParams: queryParams_, + ); + return GoogleProtobufEmpty.fromJson( + response_ as core.Map); + } - /// The uniform resource identifier of the artifact file. + /// Deletes a long-running operation. /// - /// May be empty if there is no actual artifact file. - core.String? uri; - - GoogleCloudAiplatformV1Artifact({ - this.createTime, - this.description, - this.displayName, - this.etag, - this.labels, - this.metadata, - this.name, - this.schemaTitle, - this.schemaVersion, - this.state, - this.updateTime, - this.uri, - }); - - GoogleCloudAiplatformV1Artifact.fromJson(core.Map json_) - : this( - createTime: json_['createTime'] as core.String?, - description: json_['description'] as core.String?, - displayName: json_['displayName'] as core.String?, - etag: json_['etag'] as core.String?, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - metadata: json_.containsKey('metadata') - ? json_['metadata'] as core.Map - : null, - name: json_['name'] as core.String?, - schemaTitle: json_['schemaTitle'] as core.String?, - schemaVersion: json_['schemaVersion'] as core.String?, - state: json_['state'] as core.String?, - updateTime: json_['updateTime'] as core.String?, - uri: json_['uri'] as core.String?, - ); - - core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (description != null) 'description': description!, - if (displayName != null) 'displayName': displayName!, - if (etag != null) 'etag': etag!, - if (labels != null) 'labels': labels!, - if (metadata != null) 'metadata': metadata!, - if (name != null) 'name': name!, - if (schemaTitle != null) 'schemaTitle': schemaTitle!, - if (schemaVersion != null) 'schemaVersion': schemaVersion!, - if (state != null) 'state': state!, - if (updateTime != null) 'updateTime': updateTime!, - if (uri != null) 'uri': uri!, - }; -} - -/// Request message for NotebookService.AssignNotebookRuntime. -class GoogleCloudAiplatformV1AssignNotebookRuntimeRequest { - /// Provide runtime specific information (e.g. runtime owner, notebook id) - /// used for NotebookRuntime assignment. + /// This method indicates that the client is no longer interested in the + /// operation result. It does not cancel the operation. If the server doesn't + /// support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. /// - /// Required. - GoogleCloudAiplatformV1NotebookRuntime? notebookRuntime; - - /// User specified ID for the notebook runtime. + /// Request parameters: /// - /// Optional. - core.String? notebookRuntimeId; - - /// The resource name of the NotebookRuntimeTemplate based on which a - /// NotebookRuntime will be assigned (reuse or create a new one). + /// [name] - The name of the operation resource to be deleted. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/trainingPipelines/\[^/\]+/operations/\[^/\]+$`. /// - /// Required. - core.String? notebookRuntimeTemplate; - - GoogleCloudAiplatformV1AssignNotebookRuntimeRequest({ - this.notebookRuntime, - this.notebookRuntimeId, - this.notebookRuntimeTemplate, - }); + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleProtobufEmpty]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; - GoogleCloudAiplatformV1AssignNotebookRuntimeRequest.fromJson(core.Map json_) - : this( - notebookRuntime: json_.containsKey('notebookRuntime') - ? GoogleCloudAiplatformV1NotebookRuntime.fromJson( - json_['notebookRuntime'] - as core.Map) - : null, - notebookRuntimeId: json_['notebookRuntimeId'] as core.String?, - notebookRuntimeTemplate: - json_['notebookRuntimeTemplate'] as core.String?, - ); + final url_ = 'v1/' + core.Uri.encodeFull('$name'); - core.Map toJson() => { - if (notebookRuntime != null) 'notebookRuntime': notebookRuntime!, - if (notebookRuntimeId != null) 'notebookRuntimeId': notebookRuntimeId!, - if (notebookRuntimeTemplate != null) - 'notebookRuntimeTemplate': notebookRuntimeTemplate!, - }; -} + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return GoogleProtobufEmpty.fromJson( + response_ as core.Map); + } -/// Attribution that explains a particular prediction output. -class GoogleCloudAiplatformV1Attribution { - /// Error of feature_attributions caused by approximation used in the - /// explanation method. + /// Gets the latest state of a long-running operation. /// - /// Lower value means more precise attributions. * For Sampled Shapley - /// attribution, increasing path_count might reduce the error. * For - /// Integrated Gradients attribution, increasing step_count might reduce the - /// error. * For XRAI attribution, increasing step_count might reduce the - /// error. See \[this introduction\](/vertex-ai/docs/explainable-ai/overview) - /// for more information. + /// Clients can use this method to poll the operation result at intervals as + /// recommended by the API service. /// - /// Output only. - core.double? approximationError; - - /// Model predicted output if the input instance is constructed from the - /// baselines of all the features defined in ExplanationMetadata.inputs. + /// Request parameters: /// - /// The field name of the output is determined by the key in - /// ExplanationMetadata.outputs. If the Model's predicted output has multiple - /// dimensions (rank \> 1), this is the value in the output located by - /// output_index. If there are multiple baselines, their output values are - /// averaged. + /// [name] - The name of the operation resource. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/trainingPipelines/\[^/\]+/operations/\[^/\]+$`. /// - /// Output only. - core.double? baselineOutputValue; - - /// Attributions of each explained feature. + /// [$fields] - Selector specifying which fields to include in a partial + /// response. /// - /// Features are extracted from the prediction instances according to - /// explanation metadata for inputs. The value is a struct, whose keys are the - /// name of the feature. The values are how much the feature in the instance - /// contributed to the predicted result. The format of the value is determined - /// by the feature's input format: * If the feature is a scalar value, the - /// attribution value is a floating number. * If the feature is an array of - /// scalar values, the attribution value is an array. * If the feature is a - /// struct, the attribution value is a struct. The keys in the attribution - /// value struct are the same as the keys in the feature struct. The formats - /// of the values in the attribution struct are determined by the formats of - /// the values in the feature struct. The - /// ExplanationMetadata.feature_attributions_schema_uri field, pointed to by - /// the ExplanationSpec field of the Endpoint.deployed_models object, points - /// to the schema file that describes the features and their attribution - /// values (if it is populated). + /// Completes with a [GoogleLongrunningOperation]. /// - /// Output only. + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Object? featureAttributions; + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; - /// Model predicted output on the corresponding explanation instance. + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + + /// Lists operations that match the specified filter in the request. /// - /// The field name of the output is determined by the key in - /// ExplanationMetadata.outputs. If the Model predicted output has multiple - /// dimensions, this is the value in the output located by output_index. + /// If the server doesn't support this method, it returns `UNIMPLEMENTED`. /// - /// Output only. - core.double? instanceOutputValue; - - /// The display name of the output identified by output_index. + /// Request parameters: /// - /// For example, the predicted class name by a multi-classification Model. - /// This field is only populated iff the Model predicts display names as a - /// separate field along with the explained output. The predicted display name - /// must has the same shape of the explained output, and can be located using - /// output_index. + /// [name] - The name of the operation's parent resource. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/trainingPipelines/\[^/\]+$`. /// - /// Output only. - core.String? outputDisplayName; - - /// The index that locates the explained prediction output. + /// [filter] - The standard list filter. /// - /// If the prediction output is a scalar value, output_index is not populated. - /// If the prediction output has multiple dimensions, the length of the - /// output_index list is the same as the number of dimensions of the output. - /// The i-th element in output_index is the element index of the i-th - /// dimension of the output vector. Indices start from 0. + /// [pageSize] - The standard list page size. /// - /// Output only. - core.List? outputIndex; - - /// Name of the explain output. + /// [pageToken] - The standard list page token. /// - /// Specified as the key in ExplanationMetadata.outputs. + /// [$fields] - Selector specifying which fields to include in a partial + /// response. /// - /// Output only. - core.String? outputName; - - GoogleCloudAiplatformV1Attribution({ - this.approximationError, - this.baselineOutputValue, - this.featureAttributions, - this.instanceOutputValue, - this.outputDisplayName, - this.outputIndex, - this.outputName, - }); + /// Completes with a [GoogleLongrunningListOperationsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String name, { + core.String? filter, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; - GoogleCloudAiplatformV1Attribution.fromJson(core.Map json_) - : this( - approximationError: - (json_['approximationError'] as core.num?)?.toDouble(), - baselineOutputValue: - (json_['baselineOutputValue'] as core.num?)?.toDouble(), - featureAttributions: json_['featureAttributions'], - instanceOutputValue: - (json_['instanceOutputValue'] as core.num?)?.toDouble(), - outputDisplayName: json_['outputDisplayName'] as core.String?, - outputIndex: (json_['outputIndex'] as core.List?) - ?.map((value) => value as core.int) - .toList(), - outputName: json_['outputName'] as core.String?, - ); + final url_ = 'v1/' + core.Uri.encodeFull('$name') + '/operations'; - core.Map toJson() => { - if (approximationError != null) - 'approximationError': approximationError!, - if (baselineOutputValue != null) - 'baselineOutputValue': baselineOutputValue!, - if (featureAttributions != null) - 'featureAttributions': featureAttributions!, - if (instanceOutputValue != null) - 'instanceOutputValue': instanceOutputValue!, - if (outputDisplayName != null) 'outputDisplayName': outputDisplayName!, - if (outputIndex != null) 'outputIndex': outputIndex!, - if (outputName != null) 'outputName': outputName!, - }; -} + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleLongrunningListOperationsResponse.fromJson( + response_ as core.Map); + } -/// A description of resources that to large degree are decided by Vertex AI, -/// and require only a modest additional configuration. -/// -/// Each Model supporting these resources documents its specific guidelines. -class GoogleCloudAiplatformV1AutomaticResources { - /// The maximum number of replicas this DeployedModel may be deployed on when - /// the traffic against it increases. + /// Waits until the specified long-running operation is done or reaches at + /// most a specified timeout, returning the latest state. /// - /// If the requested value is too large, the deployment will error, but if - /// deployment succeeds then the ability to scale the model to that many - /// replicas is guaranteed (barring service outages). If traffic against the - /// DeployedModel increases beyond what its replicas at maximum may handle, a - /// portion of the traffic will be dropped. If this value is not provided, a - /// no upper bound for scaling under heavy traffic will be assume, though - /// Vertex AI may be unable to scale beyond certain replica number. + /// If the operation is already done, the latest state is immediately + /// returned. If the timeout specified is greater than the default HTTP/RPC + /// timeout, the HTTP/RPC timeout is used. If the server does not support this + /// method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method + /// is on a best-effort basis. It may return the latest state before the + /// specified timeout (including immediately), meaning even an immediate + /// response is no guarantee that the operation is done. /// - /// Immutable. - core.int? maxReplicaCount; - - /// The minimum number of replicas this DeployedModel will be always deployed - /// on. + /// Request parameters: /// - /// If traffic against it increases, it may dynamically be deployed onto more - /// replicas up to max_replica_count, and as traffic decreases, some of these - /// extra replicas may be freed. If the requested value is too large, the - /// deployment will error. + /// [name] - The name of the operation resource to wait on. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/trainingPipelines/\[^/\]+/operations/\[^/\]+$`. /// - /// Immutable. - core.int? minReplicaCount; - - GoogleCloudAiplatformV1AutomaticResources({ - this.maxReplicaCount, - this.minReplicaCount, - }); - - GoogleCloudAiplatformV1AutomaticResources.fromJson(core.Map json_) - : this( - maxReplicaCount: json_['maxReplicaCount'] as core.int?, - minReplicaCount: json_['minReplicaCount'] as core.int?, - ); - - core.Map toJson() => { - if (maxReplicaCount != null) 'maxReplicaCount': maxReplicaCount!, - if (minReplicaCount != null) 'minReplicaCount': minReplicaCount!, - }; -} - -/// The metric specification that defines the target resource utilization (CPU -/// utilization, accelerator's duty cycle, and so on) for calculating the -/// desired replica count. -class GoogleCloudAiplatformV1AutoscalingMetricSpec { - /// The resource metric name. + /// [timeout] - The maximum duration to wait before timing out. If left blank, + /// the wait will be at most the time permitted by the underlying HTTP/RPC + /// protocol. If RPC context deadline is also specified, the shorter one will + /// be used. /// - /// Supported metrics: * For Online Prediction: * - /// `aiplatform.googleapis.com/prediction/online/accelerator/duty_cycle` * - /// `aiplatform.googleapis.com/prediction/online/cpu/utilization` + /// [$fields] - Selector specifying which fields to include in a partial + /// response. /// - /// Required. - core.String? metricName; - - /// The target resource utilization in percentage (1% - 100%) for the given - /// metric; once the real usage deviates from the target by a certain - /// percentage, the machine replicas change. + /// Completes with a [GoogleLongrunningOperation]. /// - /// The default value is 60 (representing 60%) if not provided. - core.int? target; - - GoogleCloudAiplatformV1AutoscalingMetricSpec({ - this.metricName, - this.target, - }); + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future wait( + core.String name, { + core.String? timeout, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (timeout != null) 'timeout': [timeout], + if ($fields != null) 'fields': [$fields], + }; - GoogleCloudAiplatformV1AutoscalingMetricSpec.fromJson(core.Map json_) - : this( - metricName: json_['metricName'] as core.String?, - target: json_['target'] as core.int?, - ); + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':wait'; - core.Map toJson() => { - if (metricName != null) 'metricName': metricName!, - if (target != null) 'target': target!, - }; + final response_ = await _requester.request( + url_, + 'POST', + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } } -/// The storage details for Avro input content. -class GoogleCloudAiplatformV1AvroSource { - /// Google Cloud Storage location. - /// - /// Required. - GoogleCloudAiplatformV1GcsSource? gcsSource; - - GoogleCloudAiplatformV1AvroSource({ - this.gcsSource, - }); +class ProjectsLocationsTuningJobsResource { + final commons.ApiRequester _requester; - GoogleCloudAiplatformV1AvroSource.fromJson(core.Map json_) - : this( - gcsSource: json_.containsKey('gcsSource') - ? GoogleCloudAiplatformV1GcsSource.fromJson( - json_['gcsSource'] as core.Map) - : null, - ); + ProjectsLocationsTuningJobsOperationsResource get operations => + ProjectsLocationsTuningJobsOperationsResource(_requester); - core.Map toJson() => { - if (gcsSource != null) 'gcsSource': gcsSource!, - }; -} + ProjectsLocationsTuningJobsResource(commons.ApiRequester client) + : _requester = client; -/// Request message for PipelineService.BatchCancelPipelineJobs. -class GoogleCloudAiplatformV1BatchCancelPipelineJobsRequest { - /// The names of the PipelineJobs to cancel. + /// Cancels a TuningJob. /// - /// A maximum of 32 PipelineJobs can be cancelled in a batch. Format: - /// `projects/{project}/locations/{location}/pipelineJobs/{pipelineJob}` + /// Starts asynchronous cancellation on the TuningJob. The server makes a best + /// effort to cancel the job, but success is not guaranteed. Clients can use + /// GenAiTuningService.GetTuningJob or other methods to check whether the + /// cancellation succeeded or whether the job completed despite cancellation. + /// On successful cancellation, the TuningJob is not deleted; instead it + /// becomes a job with a TuningJob.error value with a google.rpc.Status.code + /// of 1, corresponding to `Code.CANCELLED`, and TuningJob.state is set to + /// `CANCELLED`. /// - /// Required. - core.List? names; - - GoogleCloudAiplatformV1BatchCancelPipelineJobsRequest({ - this.names, - }); - - GoogleCloudAiplatformV1BatchCancelPipelineJobsRequest.fromJson(core.Map json_) - : this( - names: (json_['names'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - ); - - core.Map toJson() => { - if (names != null) 'names': names!, - }; -} - -/// Request message for FeaturestoreService.BatchCreateFeatures. -/// -/// Request message for FeatureRegistryService.BatchCreateFeatures. -class GoogleCloudAiplatformV1BatchCreateFeaturesRequest { - /// The request message specifying the Features to create. + /// [request] - The metadata request object. /// - /// All Features must be created under the same parent EntityType / - /// FeatureGroup. The `parent` field in each child request message can be - /// omitted. If `parent` is set in a child request, then the value must match - /// the `parent` value in this request message. + /// Request parameters: /// - /// Required. - core.List? requests; - - GoogleCloudAiplatformV1BatchCreateFeaturesRequest({ - this.requests, - }); - - GoogleCloudAiplatformV1BatchCreateFeaturesRequest.fromJson(core.Map json_) - : this( - requests: (json_['requests'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1CreateFeatureRequest.fromJson( - value as core.Map)) - .toList(), - ); - - core.Map toJson() => { - if (requests != null) 'requests': requests!, - }; -} - -/// Request message for TensorboardService.BatchCreateTensorboardRuns. -class GoogleCloudAiplatformV1BatchCreateTensorboardRunsRequest { - /// The request message specifying the TensorboardRuns to create. + /// [name] - Required. The name of the TuningJob to cancel. Format: + /// `projects/{project}/locations/{location}/tuningJobs/{tuning_job}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/tuningJobs/\[^/\]+$`. /// - /// A maximum of 1000 TensorboardRuns can be created in a batch. + /// [$fields] - Selector specifying which fields to include in a partial + /// response. /// - /// Required. - core.List? requests; - - GoogleCloudAiplatformV1BatchCreateTensorboardRunsRequest({ - this.requests, - }); - - GoogleCloudAiplatformV1BatchCreateTensorboardRunsRequest.fromJson( - core.Map json_) - : this( - requests: (json_['requests'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1CreateTensorboardRunRequest.fromJson( - value as core.Map)) - .toList(), - ); - - core.Map toJson() => { - if (requests != null) 'requests': requests!, - }; -} - -/// Response message for TensorboardService.BatchCreateTensorboardRuns. -class GoogleCloudAiplatformV1BatchCreateTensorboardRunsResponse { - /// The created TensorboardRuns. - core.List? tensorboardRuns; - - GoogleCloudAiplatformV1BatchCreateTensorboardRunsResponse({ - this.tensorboardRuns, - }); - - GoogleCloudAiplatformV1BatchCreateTensorboardRunsResponse.fromJson( - core.Map json_) - : this( - tensorboardRuns: (json_['tensorboardRuns'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1TensorboardRun.fromJson( - value as core.Map)) - .toList(), - ); - - core.Map toJson() => { - if (tensorboardRuns != null) 'tensorboardRuns': tensorboardRuns!, - }; -} - -/// Request message for TensorboardService.BatchCreateTensorboardTimeSeries. -class GoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesRequest { - /// The request message specifying the TensorboardTimeSeries to create. + /// Completes with a [GoogleProtobufEmpty]. /// - /// A maximum of 1000 TensorboardTimeSeries can be created in a batch. + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. /// - /// Required. - core.List? - requests; - - GoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesRequest({ - this.requests, - }); - - GoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesRequest.fromJson( - core.Map json_) - : this( - requests: (json_['requests'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1CreateTensorboardTimeSeriesRequest - .fromJson(value as core.Map)) - .toList(), - ); - - core.Map toJson() => { - if (requests != null) 'requests': requests!, - }; -} - -/// Response message for TensorboardService.BatchCreateTensorboardTimeSeries. -class GoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesResponse { - /// The created TensorboardTimeSeries. - core.List? - tensorboardTimeSeries; - - GoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesResponse({ - this.tensorboardTimeSeries, - }); + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future cancel( + GoogleCloudAiplatformV1CancelTuningJobRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; - GoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesResponse.fromJson( - core.Map json_) - : this( - tensorboardTimeSeries: (json_['tensorboardTimeSeries'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1TensorboardTimeSeries.fromJson( - value as core.Map)) - .toList(), - ); + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':cancel'; - core.Map toJson() => { - if (tensorboardTimeSeries != null) - 'tensorboardTimeSeries': tensorboardTimeSeries!, - }; -} + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleProtobufEmpty.fromJson( + response_ as core.Map); + } -/// A description of resources that are used for performing batch operations, -/// are dedicated to a Model, and need manual configuration. -class GoogleCloudAiplatformV1BatchDedicatedResources { - /// The specification of a single machine. + /// Creates a TuningJob. /// - /// Required. Immutable. - GoogleCloudAiplatformV1MachineSpec? machineSpec; - - /// The maximum number of machine replicas the batch operation may be scaled - /// to. + /// A created TuningJob right away will be attempted to be run. /// - /// The default value is 10. + /// [request] - The metadata request object. /// - /// Immutable. - core.int? maxReplicaCount; - - /// The number of machine replicas used at the start of the batch operation. + /// Request parameters: /// - /// If not set, Vertex AI decides starting number, not greater than - /// max_replica_count + /// [parent] - Required. The resource name of the Location to create the + /// TuningJob in. Format: `projects/{project}/locations/{location}` + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. /// - /// Immutable. - core.int? startingReplicaCount; - - GoogleCloudAiplatformV1BatchDedicatedResources({ - this.machineSpec, - this.maxReplicaCount, - this.startingReplicaCount, - }); - - GoogleCloudAiplatformV1BatchDedicatedResources.fromJson(core.Map json_) - : this( - machineSpec: json_.containsKey('machineSpec') - ? GoogleCloudAiplatformV1MachineSpec.fromJson( - json_['machineSpec'] as core.Map) - : null, - maxReplicaCount: json_['maxReplicaCount'] as core.int?, - startingReplicaCount: json_['startingReplicaCount'] as core.int?, - ); - - core.Map toJson() => { - if (machineSpec != null) 'machineSpec': machineSpec!, - if (maxReplicaCount != null) 'maxReplicaCount': maxReplicaCount!, - if (startingReplicaCount != null) - 'startingReplicaCount': startingReplicaCount!, - }; -} - -/// Request message for PipelineService.BatchDeletePipelineJobs. -class GoogleCloudAiplatformV1BatchDeletePipelineJobsRequest { - /// The names of the PipelineJobs to delete. + /// [$fields] - Selector specifying which fields to include in a partial + /// response. /// - /// A maximum of 32 PipelineJobs can be deleted in a batch. Format: - /// `projects/{project}/locations/{location}/pipelineJobs/{pipelineJob}` + /// Completes with a [GoogleCloudAiplatformV1TuningJob]. /// - /// Required. - core.List? names; - - GoogleCloudAiplatformV1BatchDeletePipelineJobsRequest({ - this.names, - }); - - GoogleCloudAiplatformV1BatchDeletePipelineJobsRequest.fromJson(core.Map json_) - : this( - names: (json_['names'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - ); - - core.Map toJson() => { - if (names != null) 'names': names!, - }; -} - -/// Request message for ModelService.BatchImportEvaluatedAnnotations -class GoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsRequest { - /// Evaluated annotations resource to be imported. + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. /// - /// Required. - core.List? evaluatedAnnotations; - - GoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsRequest({ - this.evaluatedAnnotations, - }); + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future create( + GoogleCloudAiplatformV1TuningJob request, + core.String parent, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; - GoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsRequest.fromJson( - core.Map json_) - : this( - evaluatedAnnotations: (json_['evaluatedAnnotations'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1EvaluatedAnnotation.fromJson( - value as core.Map)) - .toList(), - ); + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/tuningJobs'; - core.Map toJson() => { - if (evaluatedAnnotations != null) - 'evaluatedAnnotations': evaluatedAnnotations!, - }; -} + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1TuningJob.fromJson( + response_ as core.Map); + } -/// Response message for ModelService.BatchImportEvaluatedAnnotations -class GoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsResponse { - /// Number of EvaluatedAnnotations imported. + /// Gets a TuningJob. /// - /// Output only. - core.int? importedEvaluatedAnnotationsCount; - - GoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsResponse({ - this.importedEvaluatedAnnotationsCount, - }); - - GoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsResponse.fromJson( - core.Map json_) - : this( - importedEvaluatedAnnotationsCount: - json_['importedEvaluatedAnnotationsCount'] as core.int?, - ); - - core.Map toJson() => { - if (importedEvaluatedAnnotationsCount != null) - 'importedEvaluatedAnnotationsCount': - importedEvaluatedAnnotationsCount!, - }; -} - -/// Request message for MigrationService.BatchMigrateResources. -class GoogleCloudAiplatformV1BatchMigrateResourcesRequest { - /// The request messages specifying the resources to migrate. + /// Request parameters: /// - /// They must be in the same location as the destination. Up to 50 resources - /// can be migrated in one batch. + /// [name] - Required. The name of the TuningJob resource. Format: + /// `projects/{project}/locations/{location}/tuningJobs/{tuning_job}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/tuningJobs/\[^/\]+$`. /// - /// Required. - core.List? - migrateResourceRequests; - - GoogleCloudAiplatformV1BatchMigrateResourcesRequest({ - this.migrateResourceRequests, - }); + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudAiplatformV1TuningJob]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; - GoogleCloudAiplatformV1BatchMigrateResourcesRequest.fromJson(core.Map json_) - : this( - migrateResourceRequests: - (json_['migrateResourceRequests'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1MigrateResourceRequest.fromJson( - value as core.Map)) - .toList(), - ); + final url_ = 'v1/' + core.Uri.encodeFull('$name'); - core.Map toJson() => { - if (migrateResourceRequests != null) - 'migrateResourceRequests': migrateResourceRequests!, - }; -} + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1TuningJob.fromJson( + response_ as core.Map); + } -/// A job that uses a Model to produce predictions on multiple input instances. -/// -/// If predictions for significant portion of the instances fail, the job may -/// finish without attempting predictions for all remaining instances. -class GoogleCloudAiplatformV1BatchPredictionJob { - /// Statistics on completed and failed prediction instances. + /// Lists TuningJobs in a Location. /// - /// Output only. - GoogleCloudAiplatformV1CompletionStats? completionStats; - - /// Time when the BatchPredictionJob was created. + /// Request parameters: /// - /// Output only. - core.String? createTime; - - /// The config of resources used by the Model during the batch prediction. + /// [parent] - Required. The resource name of the Location to list the + /// TuningJobs from. Format: `projects/{project}/locations/{location}` + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. /// - /// If the Model supports DEDICATED_RESOURCES this config may be provided (and - /// the job will use these resources), if the Model doesn't support - /// AUTOMATIC_RESOURCES, this config must be provided. - GoogleCloudAiplatformV1BatchDedicatedResources? dedicatedResources; - - /// For custom-trained Models and AutoML Tabular Models, the container of the - /// DeployedModel instances will send `stderr` and `stdout` streams to Cloud - /// Logging by default. + /// [filter] - Optional. The standard list filter. /// - /// Please note that the logs incur cost, which are subject to - /// [Cloud Logging pricing](https://cloud.google.com/logging/pricing). User - /// can disable container logging by setting this flag to true. - core.bool? disableContainerLogging; - - /// The user-defined name of this BatchPredictionJob. + /// [pageSize] - Optional. The standard list page size. /// - /// Required. - core.String? displayName; - - /// Customer-managed encryption key options for a BatchPredictionJob. + /// [pageToken] - Optional. The standard list page token. Typically obtained + /// via ListTuningJobsResponse.next_page_token of the previous + /// GenAiTuningService.ListTuningJob\]\[\] call. /// - /// If this is set, then all resources created by the BatchPredictionJob will - /// be encrypted with the provided encryption key. - GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; - - /// Time when the BatchPredictionJob entered any of the following states: - /// `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`. + /// [$fields] - Selector specifying which fields to include in a partial + /// response. /// - /// Output only. - core.String? endTime; - - /// Only populated when the job's state is JOB_STATE_FAILED or - /// JOB_STATE_CANCELLED. + /// Completes with a [GoogleCloudAiplatformV1ListTuningJobsResponse]. /// - /// Output only. - GoogleRpcStatus? error; - - /// Explanation configuration for this BatchPredictionJob. + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. /// - /// Can be specified only if generate_explanation is set to `true`. This value - /// overrides the value of Model.explanation_spec. All fields of - /// explanation_spec are optional in the request. If a field of the - /// explanation_spec object is not populated, the corresponding field of the - /// Model.explanation_spec object is inherited. - GoogleCloudAiplatformV1ExplanationSpec? explanationSpec; + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.String? filter, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; - /// Generate explanation with the batch prediction results. - /// - /// When set to `true`, the batch prediction output changes based on the - /// `predictions_format` field of the BatchPredictionJob.output_config object: - /// * `bigquery`: output includes a column named `explanation`. The value is a - /// struct that conforms to the Explanation object. * `jsonl`: The JSON - /// objects on each line include an additional entry keyed `explanation`. The - /// value of the entry is a JSON object that conforms to the Explanation - /// object. * `csv`: Generating explanations for CSV format is not supported. - /// If this field is set to true, either the Model.explanation_spec or - /// explanation_spec must be populated. - core.bool? generateExplanation; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/tuningJobs'; - /// Input configuration of the instances on which predictions are performed. + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1ListTuningJobsResponse.fromJson( + response_ as core.Map); + } + + /// Rebase a TunedModel. /// - /// The schema of any single instance may be specified via the Model's - /// PredictSchemata's instance_schema_uri. + /// [request] - The metadata request object. /// - /// Required. - GoogleCloudAiplatformV1BatchPredictionJobInputConfig? inputConfig; - - /// Configuration for how to convert batch prediction input instances to the - /// prediction instances that are sent to the Model. - GoogleCloudAiplatformV1BatchPredictionJobInstanceConfig? instanceConfig; - - /// The labels with user-defined metadata to organize BatchPredictionJobs. + /// Request parameters: /// - /// Label keys and values can be no longer than 64 characters (Unicode + /// [parent] - Required. The resource name of the Location into which to + /// rebase the Model. Format: `projects/{project}/locations/{location}` + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future rebaseTunedModel( + GoogleCloudAiplatformV1RebaseTunedModelRequest request, + core.String parent, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = + 'v1/' + core.Uri.encodeFull('$parent') + '/tuningJobs:rebaseTunedModel'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } +} + +class ProjectsLocationsTuningJobsOperationsResource { + final commons.ApiRequester _requester; + + ProjectsLocationsTuningJobsOperationsResource(commons.ApiRequester client) + : _requester = client; + + /// Starts asynchronous cancellation on a long-running operation. + /// + /// The server makes a best effort to cancel the operation, but success is not + /// guaranteed. If the server doesn't support this method, it returns + /// `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation + /// or other methods to check whether the cancellation succeeded or whether + /// the operation completed despite cancellation. On successful cancellation, + /// the operation is not deleted; instead, it becomes an operation with an + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. + /// + /// Request parameters: + /// + /// [name] - The name of the operation resource to be cancelled. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/tuningJobs/\[^/\]+/operations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleProtobufEmpty]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future cancel( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':cancel'; + + final response_ = await _requester.request( + url_, + 'POST', + queryParams: queryParams_, + ); + return GoogleProtobufEmpty.fromJson( + response_ as core.Map); + } + + /// Deletes a long-running operation. + /// + /// This method indicates that the client is no longer interested in the + /// operation result. It does not cancel the operation. If the server doesn't + /// support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. + /// + /// Request parameters: + /// + /// [name] - The name of the operation resource to be deleted. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/tuningJobs/\[^/\]+/operations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleProtobufEmpty]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return GoogleProtobufEmpty.fromJson( + response_ as core.Map); + } + + /// Gets the latest state of a long-running operation. + /// + /// Clients can use this method to poll the operation result at intervals as + /// recommended by the API service. + /// + /// Request parameters: + /// + /// [name] - The name of the operation resource. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/tuningJobs/\[^/\]+/operations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + + /// Lists operations that match the specified filter in the request. + /// + /// If the server doesn't support this method, it returns `UNIMPLEMENTED`. + /// + /// Request parameters: + /// + /// [name] - The name of the operation's parent resource. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/tuningJobs/\[^/\]+$`. + /// + /// [filter] - The standard list filter. + /// + /// [pageSize] - The standard list page size. + /// + /// [pageToken] - The standard list page token. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningListOperationsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String name, { + core.String? filter, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + '/operations'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleLongrunningListOperationsResponse.fromJson( + response_ as core.Map); + } +} + +class PublishersResource { + final commons.ApiRequester _requester; + + PublishersModelsResource get models => PublishersModelsResource(_requester); + + PublishersResource(commons.ApiRequester client) : _requester = client; +} + +class PublishersModelsResource { + final commons.ApiRequester _requester; + + PublishersModelsResource(commons.ApiRequester client) : _requester = client; + + /// Return a list of tokens based on the input text. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [endpoint] - Required. The name of the Endpoint requested to get lists of + /// tokens and token ids. + /// Value must have pattern `^publishers/\[^/\]+/models/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudAiplatformV1ComputeTokensResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future computeTokens( + GoogleCloudAiplatformV1ComputeTokensRequest request, + core.String endpoint, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$endpoint') + ':computeTokens'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1ComputeTokensResponse.fromJson( + response_ as core.Map); + } + + /// Perform a token counting. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [endpoint] - Required. The name of the Endpoint requested to perform token + /// counting. Format: + /// `projects/{project}/locations/{location}/endpoints/{endpoint}` + /// Value must have pattern `^publishers/\[^/\]+/models/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudAiplatformV1CountTokensResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future countTokens( + GoogleCloudAiplatformV1CountTokensRequest request, + core.String endpoint, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$endpoint') + ':countTokens'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1CountTokensResponse.fromJson( + response_ as core.Map); + } + + /// Generate content with multimodal inputs. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [model] - Required. The fully qualified name of the publisher model or + /// tuned model endpoint to use. Publisher model format: + /// `projects/{project}/locations/{location}/publishers / * /models / * ` + /// Tuned model endpoint format: + /// `projects/{project}/locations/{location}/endpoints/{endpoint}` + /// Value must have pattern `^publishers/\[^/\]+/models/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudAiplatformV1GenerateContentResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future generateContent( + GoogleCloudAiplatformV1GenerateContentRequest request, + core.String model, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$model') + ':generateContent'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1GenerateContentResponse.fromJson( + response_ as core.Map); + } + + /// Gets a Model Garden publisher model. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the PublisherModel resource. Format: + /// `publishers/{publisher}/models/{publisher_model}` + /// Value must have pattern `^publishers/\[^/\]+/models/\[^/\]+$`. + /// + /// [huggingFaceToken] - Optional. Token used to access Hugging Face gated + /// models. + /// + /// [isHuggingFaceModel] - Optional. Boolean indicates whether the requested + /// model is a Hugging Face model. + /// + /// [languageCode] - Optional. The IETF BCP-47 language code representing the + /// language in which the publisher model's text information should be written + /// in. + /// + /// [view] - Optional. PublisherModel view specifying which fields to read. + /// Possible string values are: + /// - "PUBLISHER_MODEL_VIEW_UNSPECIFIED" : The default / unset value. The API + /// will default to the BASIC view. + /// - "PUBLISHER_MODEL_VIEW_BASIC" : Include basic metadata about the + /// publisher model, but not the full contents. + /// - "PUBLISHER_MODEL_VIEW_FULL" : Include everything. + /// - "PUBLISHER_MODEL_VERSION_VIEW_BASIC" : Include: VersionId, + /// ModelVersionExternalName, and SupportedActions. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudAiplatformV1PublisherModel]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? huggingFaceToken, + core.bool? isHuggingFaceModel, + core.String? languageCode, + core.String? view, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (huggingFaceToken != null) 'huggingFaceToken': [huggingFaceToken], + if (isHuggingFaceModel != null) + 'isHuggingFaceModel': ['${isHuggingFaceModel}'], + if (languageCode != null) 'languageCode': [languageCode], + if (view != null) 'view': [view], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1PublisherModel.fromJson( + response_ as core.Map); + } + + /// Generate content with multimodal inputs with streaming support. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [model] - Required. The fully qualified name of the publisher model or + /// tuned model endpoint to use. Publisher model format: + /// `projects/{project}/locations/{location}/publishers / * /models / * ` + /// Tuned model endpoint format: + /// `projects/{project}/locations/{location}/endpoints/{endpoint}` + /// Value must have pattern `^publishers/\[^/\]+/models/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudAiplatformV1GenerateContentResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future + streamGenerateContent( + GoogleCloudAiplatformV1GenerateContentRequest request, + core.String model, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = + 'v1/' + core.Uri.encodeFull('$model') + ':streamGenerateContent'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleCloudAiplatformV1GenerateContentResponse.fromJson( + response_ as core.Map); + } +} + +/// Message that represents an arbitrary HTTP body. +/// +/// It should only be used for payload formats that can't be represented as +/// JSON, such as raw binary or an HTML page. This message can be used both in +/// streaming and non-streaming API methods in the request as well as the +/// response. It can be used as a top-level request field, which is convenient +/// if one wants to extract parameters from either the URL or HTTP template into +/// the request fields and also want access to the raw HTTP body. Example: +/// message GetResourceRequest { // A unique request id. string request_id = 1; +/// // The raw HTTP body is bound to this field. google.api.HttpBody http_body = +/// 2; } service ResourceService { rpc GetResource(GetResourceRequest) returns +/// (google.api.HttpBody); rpc UpdateResource(google.api.HttpBody) returns +/// (google.protobuf.Empty); } Example with streaming methods: service +/// CaldavService { rpc GetCalendar(stream google.api.HttpBody) returns (stream +/// google.api.HttpBody); rpc UpdateCalendar(stream google.api.HttpBody) returns +/// (stream google.api.HttpBody); } Use of this type only changes how the +/// request and response bodies are handled, all other features will continue to +/// work unchanged. +typedef GoogleApiHttpBody = $HttpBody; + +/// Parameters that configure the active learning pipeline. +/// +/// Active learning will label the data incrementally by several iterations. For +/// every iteration, it will select a batch of data based on the sampling +/// strategy. +class GoogleCloudAiplatformV1ActiveLearningConfig { + /// Max number of human labeled DataItems. + core.String? maxDataItemCount; + + /// Max percent of total DataItems for human labeling. + core.int? maxDataItemPercentage; + + /// Active learning data sampling config. + /// + /// For every active learning labeling iteration, it will select a batch of + /// data based on the sampling strategy. + GoogleCloudAiplatformV1SampleConfig? sampleConfig; + + /// CMLE training config. + /// + /// For every active learning labeling iteration, system will train a machine + /// learning model on CMLE. The trained model will be used by data sampling + /// algorithm to select DataItems. + GoogleCloudAiplatformV1TrainingConfig? trainingConfig; + + GoogleCloudAiplatformV1ActiveLearningConfig({ + this.maxDataItemCount, + this.maxDataItemPercentage, + this.sampleConfig, + this.trainingConfig, + }); + + GoogleCloudAiplatformV1ActiveLearningConfig.fromJson(core.Map json_) + : this( + maxDataItemCount: json_['maxDataItemCount'] as core.String?, + maxDataItemPercentage: json_['maxDataItemPercentage'] as core.int?, + sampleConfig: json_.containsKey('sampleConfig') + ? GoogleCloudAiplatformV1SampleConfig.fromJson( + json_['sampleConfig'] as core.Map) + : null, + trainingConfig: json_.containsKey('trainingConfig') + ? GoogleCloudAiplatformV1TrainingConfig.fromJson( + json_['trainingConfig'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (maxDataItemCount != null) 'maxDataItemCount': maxDataItemCount!, + if (maxDataItemPercentage != null) + 'maxDataItemPercentage': maxDataItemPercentage!, + if (sampleConfig != null) 'sampleConfig': sampleConfig!, + if (trainingConfig != null) 'trainingConfig': trainingConfig!, + }; +} + +/// Request message for MetadataService.AddContextArtifactsAndExecutions. +class GoogleCloudAiplatformV1AddContextArtifactsAndExecutionsRequest { + /// The resource names of the Artifacts to attribute to the Context. + /// + /// Format: + /// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/artifacts/{artifact}` + core.List? artifacts; + + /// The resource names of the Executions to associate with the Context. + /// + /// Format: + /// `projects/{project}/locations/{location}/metadataStores/{metadatastore}/executions/{execution}` + core.List? executions; + + GoogleCloudAiplatformV1AddContextArtifactsAndExecutionsRequest({ + this.artifacts, + this.executions, + }); + + GoogleCloudAiplatformV1AddContextArtifactsAndExecutionsRequest.fromJson( + core.Map json_) + : this( + artifacts: (json_['artifacts'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + executions: (json_['executions'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (artifacts != null) 'artifacts': artifacts!, + if (executions != null) 'executions': executions!, + }; +} + +/// Response message for MetadataService.AddContextArtifactsAndExecutions. +typedef GoogleCloudAiplatformV1AddContextArtifactsAndExecutionsResponse + = $Empty; + +/// Request message for MetadataService.AddContextChildren. +typedef GoogleCloudAiplatformV1AddContextChildrenRequest + = $ContextChildrenRequest; + +/// Response message for MetadataService.AddContextChildren. +typedef GoogleCloudAiplatformV1AddContextChildrenResponse = $Empty; + +/// Request message for MetadataService.AddExecutionEvents. +class GoogleCloudAiplatformV1AddExecutionEventsRequest { + /// The Events to create and add. + core.List? events; + + GoogleCloudAiplatformV1AddExecutionEventsRequest({ + this.events, + }); + + GoogleCloudAiplatformV1AddExecutionEventsRequest.fromJson(core.Map json_) + : this( + events: (json_['events'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Event.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (events != null) 'events': events!, + }; +} + +/// Response message for MetadataService.AddExecutionEvents. +typedef GoogleCloudAiplatformV1AddExecutionEventsResponse = $Empty; + +/// Request message for VizierService.AddTrialMeasurement. +class GoogleCloudAiplatformV1AddTrialMeasurementRequest { + /// The measurement to be added to a Trial. + /// + /// Required. + GoogleCloudAiplatformV1Measurement? measurement; + + GoogleCloudAiplatformV1AddTrialMeasurementRequest({ + this.measurement, + }); + + GoogleCloudAiplatformV1AddTrialMeasurementRequest.fromJson(core.Map json_) + : this( + measurement: json_.containsKey('measurement') + ? GoogleCloudAiplatformV1Measurement.fromJson( + json_['measurement'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (measurement != null) 'measurement': measurement!, + }; +} + +/// Used to assign specific AnnotationSpec to a particular area of a DataItem or +/// the whole part of the DataItem. +class GoogleCloudAiplatformV1Annotation { + /// The source of the Annotation. + /// + /// Output only. + GoogleCloudAiplatformV1UserActionReference? annotationSource; + + /// Timestamp when this Annotation was created. + /// + /// Output only. + core.String? createTime; + + /// Used to perform consistent read-modify-write updates. + /// + /// If not set, a blind "overwrite" update happens. + /// + /// Optional. + core.String? etag; + + /// The labels with user-defined metadata to organize your Annotations. + /// + /// Label keys and values can be no longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. No more than + /// 64 user labels can be associated with one Annotation(System labels are + /// excluded). See https://goo.gl/xmQnxf for more information and examples of + /// labels. System reserved label keys are prefixed with + /// "aiplatform.googleapis.com/" and are immutable. Following system labels + /// exist for each Annotation: * + /// "aiplatform.googleapis.com/annotation_set_name": optional, name of the + /// UI's annotation set this Annotation belongs to. If not set, the Annotation + /// is not visible in the UI. * "aiplatform.googleapis.com/payload_schema": + /// output only, its value is the payload_schema's title. + /// + /// Optional. + core.Map? labels; + + /// Resource name of the Annotation. + /// + /// Output only. + core.String? name; + + /// The schema of the payload can be found in payload_schema. + /// + /// Required. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? payload; + + /// Google Cloud Storage URI points to a YAML file describing payload. + /// + /// The schema is defined as an + /// [OpenAPI 3.0.2 Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). + /// The schema files that can be used here are found in + /// gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the + /// chosen schema must be consistent with the parent Dataset's metadata. + /// + /// Required. + core.String? payloadSchemaUri; + + /// Timestamp when this Annotation was last updated. + /// + /// Output only. + core.String? updateTime; + + GoogleCloudAiplatformV1Annotation({ + this.annotationSource, + this.createTime, + this.etag, + this.labels, + this.name, + this.payload, + this.payloadSchemaUri, + this.updateTime, + }); + + GoogleCloudAiplatformV1Annotation.fromJson(core.Map json_) + : this( + annotationSource: json_.containsKey('annotationSource') + ? GoogleCloudAiplatformV1UserActionReference.fromJson( + json_['annotationSource'] + as core.Map) + : null, + createTime: json_['createTime'] as core.String?, + etag: json_['etag'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + name: json_['name'] as core.String?, + payload: json_['payload'], + payloadSchemaUri: json_['payloadSchemaUri'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + ); + + core.Map toJson() => { + if (annotationSource != null) 'annotationSource': annotationSource!, + if (createTime != null) 'createTime': createTime!, + if (etag != null) 'etag': etag!, + if (labels != null) 'labels': labels!, + if (name != null) 'name': name!, + if (payload != null) 'payload': payload!, + if (payloadSchemaUri != null) 'payloadSchemaUri': payloadSchemaUri!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} + +/// Identifies a concept with which DataItems may be annotated with. +class GoogleCloudAiplatformV1AnnotationSpec { + /// Timestamp when this AnnotationSpec was created. + /// + /// Output only. + core.String? createTime; + + /// The user-defined name of the AnnotationSpec. + /// + /// The name can be up to 128 characters long and can consist of any UTF-8 + /// characters. + /// + /// Required. + core.String? displayName; + + /// Used to perform consistent read-modify-write updates. + /// + /// If not set, a blind "overwrite" update happens. + /// + /// Optional. + core.String? etag; + + /// Resource name of the AnnotationSpec. + /// + /// Output only. + core.String? name; + + /// Timestamp when AnnotationSpec was last updated. + /// + /// Output only. + core.String? updateTime; + + GoogleCloudAiplatformV1AnnotationSpec({ + this.createTime, + this.displayName, + this.etag, + this.name, + this.updateTime, + }); + + GoogleCloudAiplatformV1AnnotationSpec.fromJson(core.Map json_) + : this( + createTime: json_['createTime'] as core.String?, + displayName: json_['displayName'] as core.String?, + etag: json_['etag'] as core.String?, + name: json_['name'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + ); + + core.Map toJson() => { + if (createTime != null) 'createTime': createTime!, + if (displayName != null) 'displayName': displayName!, + if (etag != null) 'etag': etag!, + if (name != null) 'name': name!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} + +/// The generic reusable api auth config. +class GoogleCloudAiplatformV1ApiAuth { + /// The API secret. + GoogleCloudAiplatformV1ApiAuthApiKeyConfig? apiKeyConfig; + + GoogleCloudAiplatformV1ApiAuth({ + this.apiKeyConfig, + }); + + GoogleCloudAiplatformV1ApiAuth.fromJson(core.Map json_) + : this( + apiKeyConfig: json_.containsKey('apiKeyConfig') + ? GoogleCloudAiplatformV1ApiAuthApiKeyConfig.fromJson( + json_['apiKeyConfig'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (apiKeyConfig != null) 'apiKeyConfig': apiKeyConfig!, + }; +} + +/// The API secret. +class GoogleCloudAiplatformV1ApiAuthApiKeyConfig { + /// The SecretManager secret version resource name storing API key. + /// + /// e.g. projects/{project}/secrets/{secret}/versions/{version} + /// + /// Required. + core.String? apiKeySecretVersion; + + GoogleCloudAiplatformV1ApiAuthApiKeyConfig({ + this.apiKeySecretVersion, + }); + + GoogleCloudAiplatformV1ApiAuthApiKeyConfig.fromJson(core.Map json_) + : this( + apiKeySecretVersion: json_['apiKeySecretVersion'] as core.String?, + ); + + core.Map toJson() => { + if (apiKeySecretVersion != null) + 'apiKeySecretVersion': apiKeySecretVersion!, + }; +} + +/// Instance of a general artifact. +class GoogleCloudAiplatformV1Artifact { + /// Timestamp when this Artifact was created. + /// + /// Output only. + core.String? createTime; + + /// Description of the Artifact + core.String? description; + + /// User provided display name of the Artifact. + /// + /// May be up to 128 Unicode characters. + core.String? displayName; + + /// An eTag used to perform consistent read-modify-write updates. + /// + /// If not set, a blind "overwrite" update happens. + core.String? etag; + + /// The labels with user-defined metadata to organize your Artifacts. + /// + /// Label keys and values can be no longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. No more than + /// 64 user labels can be associated with one Artifact (System labels are + /// excluded). + core.Map? labels; + + /// Properties of the Artifact. + /// + /// Top level metadata keys' heading and trailing spaces will be trimmed. The + /// size of this field should not exceed 200KB. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Map? metadata; + + /// The resource name of the Artifact. + /// + /// Output only. + core.String? name; + + /// The title of the schema describing the metadata. + /// + /// Schema title and version is expected to be registered in earlier Create + /// Schema calls. And both are used together as unique identifiers to identify + /// schemas within the local metadata store. + core.String? schemaTitle; + + /// The version of the schema in schema_name to use. + /// + /// Schema title and version is expected to be registered in earlier Create + /// Schema calls. And both are used together as unique identifiers to identify + /// schemas within the local metadata store. + core.String? schemaVersion; + + /// The state of this Artifact. + /// + /// This is a property of the Artifact, and does not imply or capture any + /// ongoing process. This property is managed by clients (such as Vertex AI + /// Pipelines), and the system does not prescribe or check the validity of + /// state transitions. + /// Possible string values are: + /// - "STATE_UNSPECIFIED" : Unspecified state for the Artifact. + /// - "PENDING" : A state used by systems like Vertex AI Pipelines to indicate + /// that the underlying data item represented by this Artifact is being + /// created. + /// - "LIVE" : A state indicating that the Artifact should exist, unless + /// something external to the system deletes it. + core.String? state; + + /// Timestamp when this Artifact was last updated. + /// + /// Output only. + core.String? updateTime; + + /// The uniform resource identifier of the artifact file. + /// + /// May be empty if there is no actual artifact file. + core.String? uri; + + GoogleCloudAiplatformV1Artifact({ + this.createTime, + this.description, + this.displayName, + this.etag, + this.labels, + this.metadata, + this.name, + this.schemaTitle, + this.schemaVersion, + this.state, + this.updateTime, + this.uri, + }); + + GoogleCloudAiplatformV1Artifact.fromJson(core.Map json_) + : this( + createTime: json_['createTime'] as core.String?, + description: json_['description'] as core.String?, + displayName: json_['displayName'] as core.String?, + etag: json_['etag'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + metadata: json_.containsKey('metadata') + ? json_['metadata'] as core.Map + : null, + name: json_['name'] as core.String?, + schemaTitle: json_['schemaTitle'] as core.String?, + schemaVersion: json_['schemaVersion'] as core.String?, + state: json_['state'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + uri: json_['uri'] as core.String?, + ); + + core.Map toJson() => { + if (createTime != null) 'createTime': createTime!, + if (description != null) 'description': description!, + if (displayName != null) 'displayName': displayName!, + if (etag != null) 'etag': etag!, + if (labels != null) 'labels': labels!, + if (metadata != null) 'metadata': metadata!, + if (name != null) 'name': name!, + if (schemaTitle != null) 'schemaTitle': schemaTitle!, + if (schemaVersion != null) 'schemaVersion': schemaVersion!, + if (state != null) 'state': state!, + if (updateTime != null) 'updateTime': updateTime!, + if (uri != null) 'uri': uri!, + }; +} + +/// Request message for NotebookService.AssignNotebookRuntime. +class GoogleCloudAiplatformV1AssignNotebookRuntimeRequest { + /// Provide runtime specific information (e.g. runtime owner, notebook id) + /// used for NotebookRuntime assignment. + /// + /// Required. + GoogleCloudAiplatformV1NotebookRuntime? notebookRuntime; + + /// User specified ID for the notebook runtime. + /// + /// Optional. + core.String? notebookRuntimeId; + + /// The resource name of the NotebookRuntimeTemplate based on which a + /// NotebookRuntime will be assigned (reuse or create a new one). + /// + /// Required. + core.String? notebookRuntimeTemplate; + + GoogleCloudAiplatformV1AssignNotebookRuntimeRequest({ + this.notebookRuntime, + this.notebookRuntimeId, + this.notebookRuntimeTemplate, + }); + + GoogleCloudAiplatformV1AssignNotebookRuntimeRequest.fromJson(core.Map json_) + : this( + notebookRuntime: json_.containsKey('notebookRuntime') + ? GoogleCloudAiplatformV1NotebookRuntime.fromJson( + json_['notebookRuntime'] + as core.Map) + : null, + notebookRuntimeId: json_['notebookRuntimeId'] as core.String?, + notebookRuntimeTemplate: + json_['notebookRuntimeTemplate'] as core.String?, + ); + + core.Map toJson() => { + if (notebookRuntime != null) 'notebookRuntime': notebookRuntime!, + if (notebookRuntimeId != null) 'notebookRuntimeId': notebookRuntimeId!, + if (notebookRuntimeTemplate != null) + 'notebookRuntimeTemplate': notebookRuntimeTemplate!, + }; +} + +/// Attribution that explains a particular prediction output. +class GoogleCloudAiplatformV1Attribution { + /// Error of feature_attributions caused by approximation used in the + /// explanation method. + /// + /// Lower value means more precise attributions. * For Sampled Shapley + /// attribution, increasing path_count might reduce the error. * For + /// Integrated Gradients attribution, increasing step_count might reduce the + /// error. * For XRAI attribution, increasing step_count might reduce the + /// error. See \[this introduction\](/vertex-ai/docs/explainable-ai/overview) + /// for more information. + /// + /// Output only. + core.double? approximationError; + + /// Model predicted output if the input instance is constructed from the + /// baselines of all the features defined in ExplanationMetadata.inputs. + /// + /// The field name of the output is determined by the key in + /// ExplanationMetadata.outputs. If the Model's predicted output has multiple + /// dimensions (rank \> 1), this is the value in the output located by + /// output_index. If there are multiple baselines, their output values are + /// averaged. + /// + /// Output only. + core.double? baselineOutputValue; + + /// Attributions of each explained feature. + /// + /// Features are extracted from the prediction instances according to + /// explanation metadata for inputs. The value is a struct, whose keys are the + /// name of the feature. The values are how much the feature in the instance + /// contributed to the predicted result. The format of the value is determined + /// by the feature's input format: * If the feature is a scalar value, the + /// attribution value is a floating number. * If the feature is an array of + /// scalar values, the attribution value is an array. * If the feature is a + /// struct, the attribution value is a struct. The keys in the attribution + /// value struct are the same as the keys in the feature struct. The formats + /// of the values in the attribution struct are determined by the formats of + /// the values in the feature struct. The + /// ExplanationMetadata.feature_attributions_schema_uri field, pointed to by + /// the ExplanationSpec field of the Endpoint.deployed_models object, points + /// to the schema file that describes the features and their attribution + /// values (if it is populated). + /// + /// Output only. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? featureAttributions; + + /// Model predicted output on the corresponding explanation instance. + /// + /// The field name of the output is determined by the key in + /// ExplanationMetadata.outputs. If the Model predicted output has multiple + /// dimensions, this is the value in the output located by output_index. + /// + /// Output only. + core.double? instanceOutputValue; + + /// The display name of the output identified by output_index. + /// + /// For example, the predicted class name by a multi-classification Model. + /// This field is only populated iff the Model predicts display names as a + /// separate field along with the explained output. The predicted display name + /// must has the same shape of the explained output, and can be located using + /// output_index. + /// + /// Output only. + core.String? outputDisplayName; + + /// The index that locates the explained prediction output. + /// + /// If the prediction output is a scalar value, output_index is not populated. + /// If the prediction output has multiple dimensions, the length of the + /// output_index list is the same as the number of dimensions of the output. + /// The i-th element in output_index is the element index of the i-th + /// dimension of the output vector. Indices start from 0. + /// + /// Output only. + core.List? outputIndex; + + /// Name of the explain output. + /// + /// Specified as the key in ExplanationMetadata.outputs. + /// + /// Output only. + core.String? outputName; + + GoogleCloudAiplatformV1Attribution({ + this.approximationError, + this.baselineOutputValue, + this.featureAttributions, + this.instanceOutputValue, + this.outputDisplayName, + this.outputIndex, + this.outputName, + }); + + GoogleCloudAiplatformV1Attribution.fromJson(core.Map json_) + : this( + approximationError: + (json_['approximationError'] as core.num?)?.toDouble(), + baselineOutputValue: + (json_['baselineOutputValue'] as core.num?)?.toDouble(), + featureAttributions: json_['featureAttributions'], + instanceOutputValue: + (json_['instanceOutputValue'] as core.num?)?.toDouble(), + outputDisplayName: json_['outputDisplayName'] as core.String?, + outputIndex: (json_['outputIndex'] as core.List?) + ?.map((value) => value as core.int) + .toList(), + outputName: json_['outputName'] as core.String?, + ); + + core.Map toJson() => { + if (approximationError != null) + 'approximationError': approximationError!, + if (baselineOutputValue != null) + 'baselineOutputValue': baselineOutputValue!, + if (featureAttributions != null) + 'featureAttributions': featureAttributions!, + if (instanceOutputValue != null) + 'instanceOutputValue': instanceOutputValue!, + if (outputDisplayName != null) 'outputDisplayName': outputDisplayName!, + if (outputIndex != null) 'outputIndex': outputIndex!, + if (outputName != null) 'outputName': outputName!, + }; +} + +/// Request message for AugmentPrompt. +class GoogleCloudAiplatformV1AugmentPromptRequest { + /// Input content to augment, only text format is supported for now. + /// + /// Optional. + core.List? contents; + + /// Metadata of the backend deployed model. + /// + /// Optional. + GoogleCloudAiplatformV1AugmentPromptRequestModel? model; + + /// Retrieves contexts from the Vertex RagStore. + /// + /// Optional. + GoogleCloudAiplatformV1VertexRagStore? vertexRagStore; + + GoogleCloudAiplatformV1AugmentPromptRequest({ + this.contents, + this.model, + this.vertexRagStore, + }); + + GoogleCloudAiplatformV1AugmentPromptRequest.fromJson(core.Map json_) + : this( + contents: (json_['contents'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Content.fromJson( + value as core.Map)) + .toList(), + model: json_.containsKey('model') + ? GoogleCloudAiplatformV1AugmentPromptRequestModel.fromJson( + json_['model'] as core.Map) + : null, + vertexRagStore: json_.containsKey('vertexRagStore') + ? GoogleCloudAiplatformV1VertexRagStore.fromJson( + json_['vertexRagStore'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (contents != null) 'contents': contents!, + if (model != null) 'model': model!, + if (vertexRagStore != null) 'vertexRagStore': vertexRagStore!, + }; +} + +/// Metadata of the backend deployed model. +class GoogleCloudAiplatformV1AugmentPromptRequestModel { + /// The model that the user will send the augmented prompt for content + /// generation. + /// + /// Optional. + core.String? model; + + /// The model version of the backend deployed model. + /// + /// Optional. + core.String? modelVersion; + + GoogleCloudAiplatformV1AugmentPromptRequestModel({ + this.model, + this.modelVersion, + }); + + GoogleCloudAiplatformV1AugmentPromptRequestModel.fromJson(core.Map json_) + : this( + model: json_['model'] as core.String?, + modelVersion: json_['modelVersion'] as core.String?, + ); + + core.Map toJson() => { + if (model != null) 'model': model!, + if (modelVersion != null) 'modelVersion': modelVersion!, + }; +} + +/// Response message for AugmentPrompt. +class GoogleCloudAiplatformV1AugmentPromptResponse { + /// Augmented prompt, only text format is supported for now. + core.List? augmentedPrompt; + + /// Retrieved facts from RAG data sources. + core.List? facts; + + GoogleCloudAiplatformV1AugmentPromptResponse({ + this.augmentedPrompt, + this.facts, + }); + + GoogleCloudAiplatformV1AugmentPromptResponse.fromJson(core.Map json_) + : this( + augmentedPrompt: (json_['augmentedPrompt'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Content.fromJson( + value as core.Map)) + .toList(), + facts: (json_['facts'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Fact.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (augmentedPrompt != null) 'augmentedPrompt': augmentedPrompt!, + if (facts != null) 'facts': facts!, + }; +} + +/// A description of resources that to large degree are decided by Vertex AI, +/// and require only a modest additional configuration. +/// +/// Each Model supporting these resources documents its specific guidelines. +class GoogleCloudAiplatformV1AutomaticResources { + /// The maximum number of replicas this DeployedModel may be deployed on when + /// the traffic against it increases. + /// + /// If the requested value is too large, the deployment will error, but if + /// deployment succeeds then the ability to scale the model to that many + /// replicas is guaranteed (barring service outages). If traffic against the + /// DeployedModel increases beyond what its replicas at maximum may handle, a + /// portion of the traffic will be dropped. If this value is not provided, a + /// no upper bound for scaling under heavy traffic will be assume, though + /// Vertex AI may be unable to scale beyond certain replica number. + /// + /// Immutable. + core.int? maxReplicaCount; + + /// The minimum number of replicas this DeployedModel will be always deployed + /// on. + /// + /// If traffic against it increases, it may dynamically be deployed onto more + /// replicas up to max_replica_count, and as traffic decreases, some of these + /// extra replicas may be freed. If the requested value is too large, the + /// deployment will error. + /// + /// Immutable. + core.int? minReplicaCount; + + GoogleCloudAiplatformV1AutomaticResources({ + this.maxReplicaCount, + this.minReplicaCount, + }); + + GoogleCloudAiplatformV1AutomaticResources.fromJson(core.Map json_) + : this( + maxReplicaCount: json_['maxReplicaCount'] as core.int?, + minReplicaCount: json_['minReplicaCount'] as core.int?, + ); + + core.Map toJson() => { + if (maxReplicaCount != null) 'maxReplicaCount': maxReplicaCount!, + if (minReplicaCount != null) 'minReplicaCount': minReplicaCount!, + }; +} + +/// The metric specification that defines the target resource utilization (CPU +/// utilization, accelerator's duty cycle, and so on) for calculating the +/// desired replica count. +class GoogleCloudAiplatformV1AutoscalingMetricSpec { + /// The resource metric name. + /// + /// Supported metrics: * For Online Prediction: * + /// `aiplatform.googleapis.com/prediction/online/accelerator/duty_cycle` * + /// `aiplatform.googleapis.com/prediction/online/cpu/utilization` + /// + /// Required. + core.String? metricName; + + /// The target resource utilization in percentage (1% - 100%) for the given + /// metric; once the real usage deviates from the target by a certain + /// percentage, the machine replicas change. + /// + /// The default value is 60 (representing 60%) if not provided. + core.int? target; + + GoogleCloudAiplatformV1AutoscalingMetricSpec({ + this.metricName, + this.target, + }); + + GoogleCloudAiplatformV1AutoscalingMetricSpec.fromJson(core.Map json_) + : this( + metricName: json_['metricName'] as core.String?, + target: json_['target'] as core.int?, + ); + + core.Map toJson() => { + if (metricName != null) 'metricName': metricName!, + if (target != null) 'target': target!, + }; +} + +/// The storage details for Avro input content. +class GoogleCloudAiplatformV1AvroSource { + /// Google Cloud Storage location. + /// + /// Required. + GoogleCloudAiplatformV1GcsSource? gcsSource; + + GoogleCloudAiplatformV1AvroSource({ + this.gcsSource, + }); + + GoogleCloudAiplatformV1AvroSource.fromJson(core.Map json_) + : this( + gcsSource: json_.containsKey('gcsSource') + ? GoogleCloudAiplatformV1GcsSource.fromJson( + json_['gcsSource'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (gcsSource != null) 'gcsSource': gcsSource!, + }; +} + +/// Request message for PipelineService.BatchCancelPipelineJobs. +class GoogleCloudAiplatformV1BatchCancelPipelineJobsRequest { + /// The names of the PipelineJobs to cancel. + /// + /// A maximum of 32 PipelineJobs can be cancelled in a batch. Format: + /// `projects/{project}/locations/{location}/pipelineJobs/{pipelineJob}` + /// + /// Required. + core.List? names; + + GoogleCloudAiplatformV1BatchCancelPipelineJobsRequest({ + this.names, + }); + + GoogleCloudAiplatformV1BatchCancelPipelineJobsRequest.fromJson(core.Map json_) + : this( + names: (json_['names'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (names != null) 'names': names!, + }; +} + +/// Request message for FeaturestoreService.BatchCreateFeatures. +/// +/// Request message for FeatureRegistryService.BatchCreateFeatures. +class GoogleCloudAiplatformV1BatchCreateFeaturesRequest { + /// The request message specifying the Features to create. + /// + /// All Features must be created under the same parent EntityType / + /// FeatureGroup. The `parent` field in each child request message can be + /// omitted. If `parent` is set in a child request, then the value must match + /// the `parent` value in this request message. + /// + /// Required. + core.List? requests; + + GoogleCloudAiplatformV1BatchCreateFeaturesRequest({ + this.requests, + }); + + GoogleCloudAiplatformV1BatchCreateFeaturesRequest.fromJson(core.Map json_) + : this( + requests: (json_['requests'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1CreateFeatureRequest.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (requests != null) 'requests': requests!, + }; +} + +/// Request message for TensorboardService.BatchCreateTensorboardRuns. +class GoogleCloudAiplatformV1BatchCreateTensorboardRunsRequest { + /// The request message specifying the TensorboardRuns to create. + /// + /// A maximum of 1000 TensorboardRuns can be created in a batch. + /// + /// Required. + core.List? requests; + + GoogleCloudAiplatformV1BatchCreateTensorboardRunsRequest({ + this.requests, + }); + + GoogleCloudAiplatformV1BatchCreateTensorboardRunsRequest.fromJson( + core.Map json_) + : this( + requests: (json_['requests'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1CreateTensorboardRunRequest.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (requests != null) 'requests': requests!, + }; +} + +/// Response message for TensorboardService.BatchCreateTensorboardRuns. +class GoogleCloudAiplatformV1BatchCreateTensorboardRunsResponse { + /// The created TensorboardRuns. + core.List? tensorboardRuns; + + GoogleCloudAiplatformV1BatchCreateTensorboardRunsResponse({ + this.tensorboardRuns, + }); + + GoogleCloudAiplatformV1BatchCreateTensorboardRunsResponse.fromJson( + core.Map json_) + : this( + tensorboardRuns: (json_['tensorboardRuns'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1TensorboardRun.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (tensorboardRuns != null) 'tensorboardRuns': tensorboardRuns!, + }; +} + +/// Request message for TensorboardService.BatchCreateTensorboardTimeSeries. +class GoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesRequest { + /// The request message specifying the TensorboardTimeSeries to create. + /// + /// A maximum of 1000 TensorboardTimeSeries can be created in a batch. + /// + /// Required. + core.List? + requests; + + GoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesRequest({ + this.requests, + }); + + GoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesRequest.fromJson( + core.Map json_) + : this( + requests: (json_['requests'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1CreateTensorboardTimeSeriesRequest + .fromJson(value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (requests != null) 'requests': requests!, + }; +} + +/// Response message for TensorboardService.BatchCreateTensorboardTimeSeries. +class GoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesResponse { + /// The created TensorboardTimeSeries. + core.List? + tensorboardTimeSeries; + + GoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesResponse({ + this.tensorboardTimeSeries, + }); + + GoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesResponse.fromJson( + core.Map json_) + : this( + tensorboardTimeSeries: (json_['tensorboardTimeSeries'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1TensorboardTimeSeries.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (tensorboardTimeSeries != null) + 'tensorboardTimeSeries': tensorboardTimeSeries!, + }; +} + +/// A description of resources that are used for performing batch operations, +/// are dedicated to a Model, and need manual configuration. +class GoogleCloudAiplatformV1BatchDedicatedResources { + /// The specification of a single machine. + /// + /// Required. Immutable. + GoogleCloudAiplatformV1MachineSpec? machineSpec; + + /// The maximum number of machine replicas the batch operation may be scaled + /// to. + /// + /// The default value is 10. + /// + /// Immutable. + core.int? maxReplicaCount; + + /// The number of machine replicas used at the start of the batch operation. + /// + /// If not set, Vertex AI decides starting number, not greater than + /// max_replica_count + /// + /// Immutable. + core.int? startingReplicaCount; + + GoogleCloudAiplatformV1BatchDedicatedResources({ + this.machineSpec, + this.maxReplicaCount, + this.startingReplicaCount, + }); + + GoogleCloudAiplatformV1BatchDedicatedResources.fromJson(core.Map json_) + : this( + machineSpec: json_.containsKey('machineSpec') + ? GoogleCloudAiplatformV1MachineSpec.fromJson( + json_['machineSpec'] as core.Map) + : null, + maxReplicaCount: json_['maxReplicaCount'] as core.int?, + startingReplicaCount: json_['startingReplicaCount'] as core.int?, + ); + + core.Map toJson() => { + if (machineSpec != null) 'machineSpec': machineSpec!, + if (maxReplicaCount != null) 'maxReplicaCount': maxReplicaCount!, + if (startingReplicaCount != null) + 'startingReplicaCount': startingReplicaCount!, + }; +} + +/// Request message for PipelineService.BatchDeletePipelineJobs. +class GoogleCloudAiplatformV1BatchDeletePipelineJobsRequest { + /// The names of the PipelineJobs to delete. + /// + /// A maximum of 32 PipelineJobs can be deleted in a batch. Format: + /// `projects/{project}/locations/{location}/pipelineJobs/{pipelineJob}` + /// + /// Required. + core.List? names; + + GoogleCloudAiplatformV1BatchDeletePipelineJobsRequest({ + this.names, + }); + + GoogleCloudAiplatformV1BatchDeletePipelineJobsRequest.fromJson(core.Map json_) + : this( + names: (json_['names'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (names != null) 'names': names!, + }; +} + +/// Request message for ModelService.BatchImportEvaluatedAnnotations +class GoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsRequest { + /// Evaluated annotations resource to be imported. + /// + /// Required. + core.List? evaluatedAnnotations; + + GoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsRequest({ + this.evaluatedAnnotations, + }); + + GoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsRequest.fromJson( + core.Map json_) + : this( + evaluatedAnnotations: (json_['evaluatedAnnotations'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1EvaluatedAnnotation.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (evaluatedAnnotations != null) + 'evaluatedAnnotations': evaluatedAnnotations!, + }; +} + +/// Response message for ModelService.BatchImportEvaluatedAnnotations +class GoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsResponse { + /// Number of EvaluatedAnnotations imported. + /// + /// Output only. + core.int? importedEvaluatedAnnotationsCount; + + GoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsResponse({ + this.importedEvaluatedAnnotationsCount, + }); + + GoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsResponse.fromJson( + core.Map json_) + : this( + importedEvaluatedAnnotationsCount: + json_['importedEvaluatedAnnotationsCount'] as core.int?, + ); + + core.Map toJson() => { + if (importedEvaluatedAnnotationsCount != null) + 'importedEvaluatedAnnotationsCount': + importedEvaluatedAnnotationsCount!, + }; +} + +/// Request message for MigrationService.BatchMigrateResources. +class GoogleCloudAiplatformV1BatchMigrateResourcesRequest { + /// The request messages specifying the resources to migrate. + /// + /// They must be in the same location as the destination. Up to 50 resources + /// can be migrated in one batch. + /// + /// Required. + core.List? + migrateResourceRequests; + + GoogleCloudAiplatformV1BatchMigrateResourcesRequest({ + this.migrateResourceRequests, + }); + + GoogleCloudAiplatformV1BatchMigrateResourcesRequest.fromJson(core.Map json_) + : this( + migrateResourceRequests: + (json_['migrateResourceRequests'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1MigrateResourceRequest.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (migrateResourceRequests != null) + 'migrateResourceRequests': migrateResourceRequests!, + }; +} + +/// A job that uses a Model to produce predictions on multiple input instances. +/// +/// If predictions for significant portion of the instances fail, the job may +/// finish without attempting predictions for all remaining instances. +class GoogleCloudAiplatformV1BatchPredictionJob { + /// Statistics on completed and failed prediction instances. + /// + /// Output only. + GoogleCloudAiplatformV1CompletionStats? completionStats; + + /// Time when the BatchPredictionJob was created. + /// + /// Output only. + core.String? createTime; + + /// The config of resources used by the Model during the batch prediction. + /// + /// If the Model supports DEDICATED_RESOURCES this config may be provided (and + /// the job will use these resources), if the Model doesn't support + /// AUTOMATIC_RESOURCES, this config must be provided. + GoogleCloudAiplatformV1BatchDedicatedResources? dedicatedResources; + + /// For custom-trained Models and AutoML Tabular Models, the container of the + /// DeployedModel instances will send `stderr` and `stdout` streams to Cloud + /// Logging by default. + /// + /// Please note that the logs incur cost, which are subject to + /// [Cloud Logging pricing](https://cloud.google.com/logging/pricing). User + /// can disable container logging by setting this flag to true. + core.bool? disableContainerLogging; + + /// The user-defined name of this BatchPredictionJob. + /// + /// Required. + core.String? displayName; + + /// Customer-managed encryption key options for a BatchPredictionJob. + /// + /// If this is set, then all resources created by the BatchPredictionJob will + /// be encrypted with the provided encryption key. + GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; + + /// Time when the BatchPredictionJob entered any of the following states: + /// `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`. + /// + /// Output only. + core.String? endTime; + + /// Only populated when the job's state is JOB_STATE_FAILED or + /// JOB_STATE_CANCELLED. + /// + /// Output only. + GoogleRpcStatus? error; + + /// Explanation configuration for this BatchPredictionJob. + /// + /// Can be specified only if generate_explanation is set to `true`. This value + /// overrides the value of Model.explanation_spec. All fields of + /// explanation_spec are optional in the request. If a field of the + /// explanation_spec object is not populated, the corresponding field of the + /// Model.explanation_spec object is inherited. + GoogleCloudAiplatformV1ExplanationSpec? explanationSpec; + + /// Generate explanation with the batch prediction results. + /// + /// When set to `true`, the batch prediction output changes based on the + /// `predictions_format` field of the BatchPredictionJob.output_config object: + /// * `bigquery`: output includes a column named `explanation`. The value is a + /// struct that conforms to the Explanation object. * `jsonl`: The JSON + /// objects on each line include an additional entry keyed `explanation`. The + /// value of the entry is a JSON object that conforms to the Explanation + /// object. * `csv`: Generating explanations for CSV format is not supported. + /// If this field is set to true, either the Model.explanation_spec or + /// explanation_spec must be populated. + core.bool? generateExplanation; + + /// Input configuration of the instances on which predictions are performed. + /// + /// The schema of any single instance may be specified via the Model's + /// PredictSchemata's instance_schema_uri. + /// + /// Required. + GoogleCloudAiplatformV1BatchPredictionJobInputConfig? inputConfig; + + /// Configuration for how to convert batch prediction input instances to the + /// prediction instances that are sent to the Model. + GoogleCloudAiplatformV1BatchPredictionJobInstanceConfig? instanceConfig; + + /// The labels with user-defined metadata to organize BatchPredictionJobs. + /// + /// Label keys and values can be no longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. See + /// https://goo.gl/xmQnxf for more information and examples of labels. + core.Map? labels; + + /// Parameters configuring the batch behavior. + /// + /// Currently only applicable when dedicated_resources are used (in other + /// cases Vertex AI does the tuning itself). + /// + /// Immutable. + GoogleCloudAiplatformV1ManualBatchTuningParameters? + manualBatchTuningParameters; + + /// The name of the Model resource that produces the predictions via this job, + /// must share the same ancestor Location. + /// + /// Starting this job has no impact on any existing deployments of the Model + /// and their resources. Exactly one of model and unmanaged_container_model + /// must be set. The model resource name may contain version id or version + /// alias to specify the version. Example: + /// `projects/{project}/locations/{location}/models/{model}@2` or + /// `projects/{project}/locations/{location}/models/{model}@golden` if no + /// version is specified, the default version will be deployed. The model + /// resource could also be a publisher model. Example: + /// `publishers/{publisher}/models/{model}` or + /// `projects/{project}/locations/{location}/publishers/{publisher}/models/{model}` + core.String? model; + + /// The parameters that govern the predictions. + /// + /// The schema of the parameters may be specified via the Model's + /// PredictSchemata's parameters_schema_uri. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? modelParameters; + + /// The version ID of the Model that produces the predictions via this job. + /// + /// Output only. + core.String? modelVersionId; + + /// Resource name of the BatchPredictionJob. + /// + /// Output only. + core.String? name; + + /// The Configuration specifying where output predictions should be written. + /// + /// The schema of any single prediction may be specified as a concatenation of + /// Model's PredictSchemata's instance_schema_uri and prediction_schema_uri. + /// + /// Required. + GoogleCloudAiplatformV1BatchPredictionJobOutputConfig? outputConfig; + + /// Information further describing the output of this job. + /// + /// Output only. + GoogleCloudAiplatformV1BatchPredictionJobOutputInfo? outputInfo; + + /// Partial failures encountered. + /// + /// For example, single files that can't be read. This field never exceeds 20 + /// entries. Status details fields contain standard Google Cloud error + /// details. + /// + /// Output only. + core.List? partialFailures; + + /// Information about resources that had been consumed by this job. + /// + /// Provided in real time at best effort basis, as well as a final value once + /// the job completes. Note: This field currently may be not populated for + /// batch predictions that use AutoML Models. + /// + /// Output only. + GoogleCloudAiplatformV1ResourcesConsumed? resourcesConsumed; + + /// Reserved for future use. + /// + /// Output only. + core.bool? satisfiesPzi; + + /// Reserved for future use. + /// + /// Output only. + core.bool? satisfiesPzs; + + /// The service account that the DeployedModel's container runs as. + /// + /// If not specified, a system generated one will be used, which has minimal + /// permissions and the custom container, if used, may not have enough + /// permission to access other Google Cloud resources. Users deploying the + /// Model must have the `iam.serviceAccounts.actAs` permission on this service + /// account. + core.String? serviceAccount; + + /// Time when the BatchPredictionJob for the first time entered the + /// `JOB_STATE_RUNNING` state. + /// + /// Output only. + core.String? startTime; + + /// The detailed state of the job. + /// + /// Output only. + /// Possible string values are: + /// - "JOB_STATE_UNSPECIFIED" : The job state is unspecified. + /// - "JOB_STATE_QUEUED" : The job has been just created or resumed and + /// processing has not yet begun. + /// - "JOB_STATE_PENDING" : The service is preparing to run the job. + /// - "JOB_STATE_RUNNING" : The job is in progress. + /// - "JOB_STATE_SUCCEEDED" : The job completed successfully. + /// - "JOB_STATE_FAILED" : The job failed. + /// - "JOB_STATE_CANCELLING" : The job is being cancelled. From this state the + /// job may only go to either `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED` or + /// `JOB_STATE_CANCELLED`. + /// - "JOB_STATE_CANCELLED" : The job has been cancelled. + /// - "JOB_STATE_PAUSED" : The job has been stopped, and can be resumed. + /// - "JOB_STATE_EXPIRED" : The job has expired. + /// - "JOB_STATE_UPDATING" : The job is being updated. Only jobs in the + /// `RUNNING` state can be updated. After updating, the job goes back to the + /// `RUNNING` state. + /// - "JOB_STATE_PARTIALLY_SUCCEEDED" : The job is partially succeeded, some + /// results may be missing due to errors. + core.String? state; + + /// Contains model information necessary to perform batch prediction without + /// requiring uploading to model registry. + /// + /// Exactly one of model and unmanaged_container_model must be set. + GoogleCloudAiplatformV1UnmanagedContainerModel? unmanagedContainerModel; + + /// Time when the BatchPredictionJob was most recently updated. + /// + /// Output only. + core.String? updateTime; + + GoogleCloudAiplatformV1BatchPredictionJob({ + this.completionStats, + this.createTime, + this.dedicatedResources, + this.disableContainerLogging, + this.displayName, + this.encryptionSpec, + this.endTime, + this.error, + this.explanationSpec, + this.generateExplanation, + this.inputConfig, + this.instanceConfig, + this.labels, + this.manualBatchTuningParameters, + this.model, + this.modelParameters, + this.modelVersionId, + this.name, + this.outputConfig, + this.outputInfo, + this.partialFailures, + this.resourcesConsumed, + this.satisfiesPzi, + this.satisfiesPzs, + this.serviceAccount, + this.startTime, + this.state, + this.unmanagedContainerModel, + this.updateTime, + }); + + GoogleCloudAiplatformV1BatchPredictionJob.fromJson(core.Map json_) + : this( + completionStats: json_.containsKey('completionStats') + ? GoogleCloudAiplatformV1CompletionStats.fromJson( + json_['completionStats'] + as core.Map) + : null, + createTime: json_['createTime'] as core.String?, + dedicatedResources: json_.containsKey('dedicatedResources') + ? GoogleCloudAiplatformV1BatchDedicatedResources.fromJson( + json_['dedicatedResources'] + as core.Map) + : null, + disableContainerLogging: + json_['disableContainerLogging'] as core.bool?, + displayName: json_['displayName'] as core.String?, + encryptionSpec: json_.containsKey('encryptionSpec') + ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( + json_['encryptionSpec'] + as core.Map) + : null, + endTime: json_['endTime'] as core.String?, + error: json_.containsKey('error') + ? GoogleRpcStatus.fromJson( + json_['error'] as core.Map) + : null, + explanationSpec: json_.containsKey('explanationSpec') + ? GoogleCloudAiplatformV1ExplanationSpec.fromJson( + json_['explanationSpec'] + as core.Map) + : null, + generateExplanation: json_['generateExplanation'] as core.bool?, + inputConfig: json_.containsKey('inputConfig') + ? GoogleCloudAiplatformV1BatchPredictionJobInputConfig.fromJson( + json_['inputConfig'] as core.Map) + : null, + instanceConfig: json_.containsKey('instanceConfig') + ? GoogleCloudAiplatformV1BatchPredictionJobInstanceConfig + .fromJson(json_['instanceConfig'] + as core.Map) + : null, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + manualBatchTuningParameters: + json_.containsKey('manualBatchTuningParameters') + ? GoogleCloudAiplatformV1ManualBatchTuningParameters.fromJson( + json_['manualBatchTuningParameters'] + as core.Map) + : null, + model: json_['model'] as core.String?, + modelParameters: json_['modelParameters'], + modelVersionId: json_['modelVersionId'] as core.String?, + name: json_['name'] as core.String?, + outputConfig: json_.containsKey('outputConfig') + ? GoogleCloudAiplatformV1BatchPredictionJobOutputConfig.fromJson( + json_['outputConfig'] as core.Map) + : null, + outputInfo: json_.containsKey('outputInfo') + ? GoogleCloudAiplatformV1BatchPredictionJobOutputInfo.fromJson( + json_['outputInfo'] as core.Map) + : null, + partialFailures: (json_['partialFailures'] as core.List?) + ?.map((value) => GoogleRpcStatus.fromJson( + value as core.Map)) + .toList(), + resourcesConsumed: json_.containsKey('resourcesConsumed') + ? GoogleCloudAiplatformV1ResourcesConsumed.fromJson( + json_['resourcesConsumed'] + as core.Map) + : null, + satisfiesPzi: json_['satisfiesPzi'] as core.bool?, + satisfiesPzs: json_['satisfiesPzs'] as core.bool?, + serviceAccount: json_['serviceAccount'] as core.String?, + startTime: json_['startTime'] as core.String?, + state: json_['state'] as core.String?, + unmanagedContainerModel: json_.containsKey('unmanagedContainerModel') + ? GoogleCloudAiplatformV1UnmanagedContainerModel.fromJson( + json_['unmanagedContainerModel'] + as core.Map) + : null, + updateTime: json_['updateTime'] as core.String?, + ); + + core.Map toJson() => { + if (completionStats != null) 'completionStats': completionStats!, + if (createTime != null) 'createTime': createTime!, + if (dedicatedResources != null) + 'dedicatedResources': dedicatedResources!, + if (disableContainerLogging != null) + 'disableContainerLogging': disableContainerLogging!, + if (displayName != null) 'displayName': displayName!, + if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, + if (endTime != null) 'endTime': endTime!, + if (error != null) 'error': error!, + if (explanationSpec != null) 'explanationSpec': explanationSpec!, + if (generateExplanation != null) + 'generateExplanation': generateExplanation!, + if (inputConfig != null) 'inputConfig': inputConfig!, + if (instanceConfig != null) 'instanceConfig': instanceConfig!, + if (labels != null) 'labels': labels!, + if (manualBatchTuningParameters != null) + 'manualBatchTuningParameters': manualBatchTuningParameters!, + if (model != null) 'model': model!, + if (modelParameters != null) 'modelParameters': modelParameters!, + if (modelVersionId != null) 'modelVersionId': modelVersionId!, + if (name != null) 'name': name!, + if (outputConfig != null) 'outputConfig': outputConfig!, + if (outputInfo != null) 'outputInfo': outputInfo!, + if (partialFailures != null) 'partialFailures': partialFailures!, + if (resourcesConsumed != null) 'resourcesConsumed': resourcesConsumed!, + if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, + if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, + if (serviceAccount != null) 'serviceAccount': serviceAccount!, + if (startTime != null) 'startTime': startTime!, + if (state != null) 'state': state!, + if (unmanagedContainerModel != null) + 'unmanagedContainerModel': unmanagedContainerModel!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} + +/// Configures the input to BatchPredictionJob. +/// +/// See Model.supported_input_storage_formats for Model's supported input +/// formats, and how instances should be expressed via any of them. +class GoogleCloudAiplatformV1BatchPredictionJobInputConfig { + /// The BigQuery location of the input table. + /// + /// The schema of the table should be in the format described by the given + /// context OpenAPI Schema, if one is provided. The table may contain + /// additional columns that are not described by the schema, and they will be + /// ignored. + GoogleCloudAiplatformV1BigQuerySource? bigquerySource; + + /// The Cloud Storage location for the input instances. + GoogleCloudAiplatformV1GcsSource? gcsSource; + + /// The format in which instances are given, must be one of the Model's + /// supported_input_storage_formats. + /// + /// Required. + core.String? instancesFormat; + + GoogleCloudAiplatformV1BatchPredictionJobInputConfig({ + this.bigquerySource, + this.gcsSource, + this.instancesFormat, + }); + + GoogleCloudAiplatformV1BatchPredictionJobInputConfig.fromJson(core.Map json_) + : this( + bigquerySource: json_.containsKey('bigquerySource') + ? GoogleCloudAiplatformV1BigQuerySource.fromJson( + json_['bigquerySource'] + as core.Map) + : null, + gcsSource: json_.containsKey('gcsSource') + ? GoogleCloudAiplatformV1GcsSource.fromJson( + json_['gcsSource'] as core.Map) + : null, + instancesFormat: json_['instancesFormat'] as core.String?, + ); + + core.Map toJson() => { + if (bigquerySource != null) 'bigquerySource': bigquerySource!, + if (gcsSource != null) 'gcsSource': gcsSource!, + if (instancesFormat != null) 'instancesFormat': instancesFormat!, + }; +} + +/// Configuration defining how to transform batch prediction input instances to +/// the instances that the Model accepts. +class GoogleCloudAiplatformV1BatchPredictionJobInstanceConfig { + /// Fields that will be excluded in the prediction instance that is sent to + /// the Model. + /// + /// Excluded will be attached to the batch prediction output if key_field is + /// not specified. When excluded_fields is populated, included_fields must be + /// empty. The input must be JSONL with objects at each line, BigQuery or + /// TfRecord. + core.List? excludedFields; + + /// Fields that will be included in the prediction instance that is sent to + /// the Model. + /// + /// If instance_type is `array`, the order of field names in included_fields + /// also determines the order of the values in the array. When included_fields + /// is populated, excluded_fields must be empty. The input must be JSONL with + /// objects at each line, BigQuery or TfRecord. + core.List? includedFields; + + /// The format of the instance that the Model accepts. + /// + /// Vertex AI will convert compatible batch prediction input instance formats + /// to the specified format. Supported values are: * `object`: Each input is + /// converted to JSON object format. * For `bigquery`, each row is converted + /// to an object. * For `jsonl`, each line of the JSONL input must be an + /// object. * Does not apply to `csv`, `file-list`, `tf-record`, or + /// `tf-record-gzip`. * `array`: Each input is converted to JSON array format. + /// * For `bigquery`, each row is converted to an array. The order of columns + /// is determined by the BigQuery column order, unless included_fields is + /// populated. included_fields must be populated for specifying field orders. + /// * For `jsonl`, if each line of the JSONL input is an object, + /// included_fields must be populated for specifying field orders. * Does not + /// apply to `csv`, `file-list`, `tf-record`, or `tf-record-gzip`. If not + /// specified, Vertex AI converts the batch prediction input as follows: * For + /// `bigquery` and `csv`, the behavior is the same as `array`. The order of + /// columns is the same as defined in the file or table, unless + /// included_fields is populated. * For `jsonl`, the prediction instance + /// format is determined by each line of the input. * For + /// `tf-record`/`tf-record-gzip`, each record will be converted to an object + /// in the format of `{"b64": }`, where `` is the Base64-encoded string of the + /// content of the record. * For `file-list`, each file in the list will be + /// converted to an object in the format of `{"b64": }`, where `` is the + /// Base64-encoded string of the content of the file. + core.String? instanceType; + + /// The name of the field that is considered as a key. + /// + /// The values identified by the key field is not included in the transformed + /// instances that is sent to the Model. This is similar to specifying this + /// name of the field in excluded_fields. In addition, the batch prediction + /// output will not include the instances. Instead the output will only + /// include the value of the key field, in a field named `key` in the output: + /// * For `jsonl` output format, the output will have a `key` field instead of + /// the `instance` field. * For `csv`/`bigquery` output format, the output + /// will have have a `key` column instead of the instance feature columns. The + /// input must be JSONL with objects at each line, CSV, BigQuery or TfRecord. + core.String? keyField; + + GoogleCloudAiplatformV1BatchPredictionJobInstanceConfig({ + this.excludedFields, + this.includedFields, + this.instanceType, + this.keyField, + }); + + GoogleCloudAiplatformV1BatchPredictionJobInstanceConfig.fromJson( + core.Map json_) + : this( + excludedFields: (json_['excludedFields'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + includedFields: (json_['includedFields'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + instanceType: json_['instanceType'] as core.String?, + keyField: json_['keyField'] as core.String?, + ); + + core.Map toJson() => { + if (excludedFields != null) 'excludedFields': excludedFields!, + if (includedFields != null) 'includedFields': includedFields!, + if (instanceType != null) 'instanceType': instanceType!, + if (keyField != null) 'keyField': keyField!, + }; +} + +/// Configures the output of BatchPredictionJob. +/// +/// See Model.supported_output_storage_formats for supported output formats, and +/// how predictions are expressed via any of them. +class GoogleCloudAiplatformV1BatchPredictionJobOutputConfig { + /// The BigQuery project or dataset location where the output is to be written + /// to. + /// + /// If project is provided, a new dataset is created with name `prediction__` + /// where is made BigQuery-dataset-name compatible (for example, most special + /// characters become underscores), and timestamp is in + /// YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset two + /// tables will be created, `predictions`, and `errors`. If the Model has both + /// instance and prediction schemata defined then the tables have columns as + /// follows: The `predictions` table contains instances for which the + /// prediction succeeded, it has columns as per a concatenation of the Model's + /// instance and prediction schemata. The `errors` table contains rows for + /// which the prediction has failed, it has instance columns, as per the + /// instance schema, followed by a single "errors" column, which as values has + /// google.rpc.Status represented as a STRUCT, and containing only `code` and + /// `message`. + GoogleCloudAiplatformV1BigQueryDestination? bigqueryDestination; + + /// The Cloud Storage location of the directory where the output is to be + /// written to. + /// + /// In the given directory a new directory is created. Its name is + /// `prediction--`, where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 + /// format. Inside of it files `predictions_0001.`, `predictions_0002.`, ..., + /// `predictions_N.` are created where `` depends on chosen + /// predictions_format, and N may equal 0001 and depends on the total number + /// of successfully predicted instances. If the Model has both instance and + /// prediction schemata defined then each such file contains predictions as + /// per the predictions_format. If prediction for any instance failed + /// (partially or completely), then an additional `errors_0001.`, + /// `errors_0002.`,..., `errors_N.` files are created (N depends on total + /// number of failed predictions). These files contain the failed instances, + /// as per their schema, followed by an additional `error` field which as + /// value has google.rpc.Status containing only `code` and `message` fields. + GoogleCloudAiplatformV1GcsDestination? gcsDestination; + + /// The format in which Vertex AI gives the predictions, must be one of the + /// Model's supported_output_storage_formats. + /// + /// Required. + core.String? predictionsFormat; + + GoogleCloudAiplatformV1BatchPredictionJobOutputConfig({ + this.bigqueryDestination, + this.gcsDestination, + this.predictionsFormat, + }); + + GoogleCloudAiplatformV1BatchPredictionJobOutputConfig.fromJson(core.Map json_) + : this( + bigqueryDestination: json_.containsKey('bigqueryDestination') + ? GoogleCloudAiplatformV1BigQueryDestination.fromJson( + json_['bigqueryDestination'] + as core.Map) + : null, + gcsDestination: json_.containsKey('gcsDestination') + ? GoogleCloudAiplatformV1GcsDestination.fromJson( + json_['gcsDestination'] + as core.Map) + : null, + predictionsFormat: json_['predictionsFormat'] as core.String?, + ); + + core.Map toJson() => { + if (bigqueryDestination != null) + 'bigqueryDestination': bigqueryDestination!, + if (gcsDestination != null) 'gcsDestination': gcsDestination!, + if (predictionsFormat != null) 'predictionsFormat': predictionsFormat!, + }; +} + +/// Further describes this job's output. +/// +/// Supplements output_config. +class GoogleCloudAiplatformV1BatchPredictionJobOutputInfo { + /// The path of the BigQuery dataset created, in `bq://projectId.bqDatasetId` + /// format, into which the prediction output is written. + /// + /// Output only. + core.String? bigqueryOutputDataset; + + /// The name of the BigQuery table created, in `predictions_` format, into + /// which the prediction output is written. + /// + /// Can be used by UI to generate the BigQuery output path, for example. + /// + /// Output only. + core.String? bigqueryOutputTable; + + /// The full path of the Cloud Storage directory created, into which the + /// prediction output is written. + /// + /// Output only. + core.String? gcsOutputDirectory; + + GoogleCloudAiplatformV1BatchPredictionJobOutputInfo({ + this.bigqueryOutputDataset, + this.bigqueryOutputTable, + this.gcsOutputDirectory, + }); + + GoogleCloudAiplatformV1BatchPredictionJobOutputInfo.fromJson(core.Map json_) + : this( + bigqueryOutputDataset: json_['bigqueryOutputDataset'] as core.String?, + bigqueryOutputTable: json_['bigqueryOutputTable'] as core.String?, + gcsOutputDirectory: json_['gcsOutputDirectory'] as core.String?, + ); + + core.Map toJson() => { + if (bigqueryOutputDataset != null) + 'bigqueryOutputDataset': bigqueryOutputDataset!, + if (bigqueryOutputTable != null) + 'bigqueryOutputTable': bigqueryOutputTable!, + if (gcsOutputDirectory != null) + 'gcsOutputDirectory': gcsOutputDirectory!, + }; +} + +/// Request message for FeaturestoreService.BatchReadFeatureValues. +class GoogleCloudAiplatformV1BatchReadFeatureValuesRequest { + /// Similar to csv_read_instances, but from BigQuery source. + GoogleCloudAiplatformV1BigQuerySource? bigqueryReadInstances; + + /// Each read instance consists of exactly one read timestamp and one or more + /// entity IDs identifying entities of the corresponding EntityTypes whose + /// Features are requested. + /// + /// Each output instance contains Feature values of requested entities + /// concatenated together as of the read time. An example read instance may be + /// `foo_entity_id, bar_entity_id, 2020-01-01T10:00:00.123Z`. An example + /// output instance may be `foo_entity_id, bar_entity_id, + /// 2020-01-01T10:00:00.123Z, foo_entity_feature1_value, + /// bar_entity_feature2_value`. Timestamp in each read instance must be + /// millisecond-aligned. `csv_read_instances` are read instances stored in a + /// plain-text CSV file. The header should be: \[ENTITY_TYPE_ID1\], + /// \[ENTITY_TYPE_ID2\], ..., timestamp The columns can be in any order. + /// Values in the timestamp column must use the RFC 3339 format, e.g. + /// `2012-07-30T10:43:17.123Z`. + GoogleCloudAiplatformV1CsvSource? csvReadInstances; + + /// Specifies output location and format. + /// + /// Required. + GoogleCloudAiplatformV1FeatureValueDestination? destination; + + /// Specifies EntityType grouping Features to read values of and settings. + /// + /// Required. + core.List? + entityTypeSpecs; + + /// When not empty, the specified fields in the *_read_instances source will + /// be joined as-is in the output, in addition to those fields from the + /// Featurestore Entity. + /// + /// For BigQuery source, the type of the pass-through values will be + /// automatically inferred. For CSV source, the pass-through values will be + /// passed as opaque bytes. + core.List< + GoogleCloudAiplatformV1BatchReadFeatureValuesRequestPassThroughField>? + passThroughFields; + + /// Excludes Feature values with feature generation timestamp before this + /// timestamp. + /// + /// If not set, retrieve oldest values kept in Feature Store. Timestamp, if + /// present, must not have higher than millisecond precision. + /// + /// Optional. + core.String? startTime; + + GoogleCloudAiplatformV1BatchReadFeatureValuesRequest({ + this.bigqueryReadInstances, + this.csvReadInstances, + this.destination, + this.entityTypeSpecs, + this.passThroughFields, + this.startTime, + }); + + GoogleCloudAiplatformV1BatchReadFeatureValuesRequest.fromJson(core.Map json_) + : this( + bigqueryReadInstances: json_.containsKey('bigqueryReadInstances') + ? GoogleCloudAiplatformV1BigQuerySource.fromJson( + json_['bigqueryReadInstances'] + as core.Map) + : null, + csvReadInstances: json_.containsKey('csvReadInstances') + ? GoogleCloudAiplatformV1CsvSource.fromJson( + json_['csvReadInstances'] + as core.Map) + : null, + destination: json_.containsKey('destination') + ? GoogleCloudAiplatformV1FeatureValueDestination.fromJson( + json_['destination'] as core.Map) + : null, + entityTypeSpecs: (json_['entityTypeSpecs'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1BatchReadFeatureValuesRequestEntityTypeSpec + .fromJson(value as core.Map)) + .toList(), + passThroughFields: (json_['passThroughFields'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1BatchReadFeatureValuesRequestPassThroughField + .fromJson(value as core.Map)) + .toList(), + startTime: json_['startTime'] as core.String?, + ); + + core.Map toJson() => { + if (bigqueryReadInstances != null) + 'bigqueryReadInstances': bigqueryReadInstances!, + if (csvReadInstances != null) 'csvReadInstances': csvReadInstances!, + if (destination != null) 'destination': destination!, + if (entityTypeSpecs != null) 'entityTypeSpecs': entityTypeSpecs!, + if (passThroughFields != null) 'passThroughFields': passThroughFields!, + if (startTime != null) 'startTime': startTime!, + }; +} + +/// Selects Features of an EntityType to read values of and specifies read +/// settings. +class GoogleCloudAiplatformV1BatchReadFeatureValuesRequestEntityTypeSpec { + /// ID of the EntityType to select Features. + /// + /// The EntityType id is the entity_type_id specified during EntityType + /// creation. + /// + /// Required. + core.String? entityTypeId; + + /// Selectors choosing which Feature values to read from the EntityType. + /// + /// Required. + GoogleCloudAiplatformV1FeatureSelector? featureSelector; + + /// Per-Feature settings for the batch read. + core.List? settings; + + GoogleCloudAiplatformV1BatchReadFeatureValuesRequestEntityTypeSpec({ + this.entityTypeId, + this.featureSelector, + this.settings, + }); + + GoogleCloudAiplatformV1BatchReadFeatureValuesRequestEntityTypeSpec.fromJson( + core.Map json_) + : this( + entityTypeId: json_['entityTypeId'] as core.String?, + featureSelector: json_.containsKey('featureSelector') + ? GoogleCloudAiplatformV1FeatureSelector.fromJson( + json_['featureSelector'] + as core.Map) + : null, + settings: (json_['settings'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1DestinationFeatureSetting.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (entityTypeId != null) 'entityTypeId': entityTypeId!, + if (featureSelector != null) 'featureSelector': featureSelector!, + if (settings != null) 'settings': settings!, + }; +} + +/// Describe pass-through fields in read_instance source. +class GoogleCloudAiplatformV1BatchReadFeatureValuesRequestPassThroughField { + /// The name of the field in the CSV header or the name of the column in + /// BigQuery table. + /// + /// The naming restriction is the same as Feature.name. + /// + /// Required. + core.String? fieldName; + + GoogleCloudAiplatformV1BatchReadFeatureValuesRequestPassThroughField({ + this.fieldName, + }); + + GoogleCloudAiplatformV1BatchReadFeatureValuesRequestPassThroughField.fromJson( + core.Map json_) + : this( + fieldName: json_['fieldName'] as core.String?, + ); + + core.Map toJson() => { + if (fieldName != null) 'fieldName': fieldName!, + }; +} + +/// Response message for TensorboardService.BatchReadTensorboardTimeSeriesData. +class GoogleCloudAiplatformV1BatchReadTensorboardTimeSeriesDataResponse { + /// The returned time series data. + core.List? timeSeriesData; + + GoogleCloudAiplatformV1BatchReadTensorboardTimeSeriesDataResponse({ + this.timeSeriesData, + }); + + GoogleCloudAiplatformV1BatchReadTensorboardTimeSeriesDataResponse.fromJson( + core.Map json_) + : this( + timeSeriesData: (json_['timeSeriesData'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1TimeSeriesData.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (timeSeriesData != null) 'timeSeriesData': timeSeriesData!, + }; +} + +/// The BigQuery location for the output content. +class GoogleCloudAiplatformV1BigQueryDestination { + /// BigQuery URI to a project or table, up to 2000 characters long. + /// + /// When only the project is specified, the Dataset and Table is created. When + /// the full table reference is specified, the Dataset must exist and table + /// must not exist. Accepted forms: * BigQuery path. For example: + /// `bq://projectId` or `bq://projectId.bqDatasetId` or + /// `bq://projectId.bqDatasetId.bqTableId`. + /// + /// Required. + core.String? outputUri; + + GoogleCloudAiplatformV1BigQueryDestination({ + this.outputUri, + }); + + GoogleCloudAiplatformV1BigQueryDestination.fromJson(core.Map json_) + : this( + outputUri: json_['outputUri'] as core.String?, + ); + + core.Map toJson() => { + if (outputUri != null) 'outputUri': outputUri!, + }; +} + +/// The BigQuery location for the input content. +class GoogleCloudAiplatformV1BigQuerySource { + /// BigQuery URI to a table, up to 2000 characters long. + /// + /// Accepted forms: * BigQuery path. For example: + /// `bq://projectId.bqDatasetId.bqTableId`. + /// + /// Required. + core.String? inputUri; + + GoogleCloudAiplatformV1BigQuerySource({ + this.inputUri, + }); + + GoogleCloudAiplatformV1BigQuerySource.fromJson(core.Map json_) + : this( + inputUri: json_['inputUri'] as core.String?, + ); + + core.Map toJson() => { + if (inputUri != null) 'inputUri': inputUri!, + }; +} + +/// Input for bleu metric. +class GoogleCloudAiplatformV1BleuInput { + /// Repeated bleu instances. + /// + /// Required. + core.List? instances; + + /// Spec for bleu score metric. + /// + /// Required. + GoogleCloudAiplatformV1BleuSpec? metricSpec; + + GoogleCloudAiplatformV1BleuInput({ + this.instances, + this.metricSpec, + }); + + GoogleCloudAiplatformV1BleuInput.fromJson(core.Map json_) + : this( + instances: (json_['instances'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1BleuInstance.fromJson( + value as core.Map)) + .toList(), + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1BleuSpec.fromJson( + json_['metricSpec'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (instances != null) 'instances': instances!, + if (metricSpec != null) 'metricSpec': metricSpec!, + }; +} + +/// Spec for bleu instance. +typedef GoogleCloudAiplatformV1BleuInstance = $Instance00; + +/// Bleu metric value for an instance. +class GoogleCloudAiplatformV1BleuMetricValue { + /// Bleu score. + /// + /// Output only. + core.double? score; + + GoogleCloudAiplatformV1BleuMetricValue({ + this.score, + }); + + GoogleCloudAiplatformV1BleuMetricValue.fromJson(core.Map json_) + : this( + score: (json_['score'] as core.num?)?.toDouble(), + ); + + core.Map toJson() => { + if (score != null) 'score': score!, + }; +} + +/// Results for bleu metric. +class GoogleCloudAiplatformV1BleuResults { + /// Bleu metric values. + /// + /// Output only. + core.List? bleuMetricValues; + + GoogleCloudAiplatformV1BleuResults({ + this.bleuMetricValues, + }); + + GoogleCloudAiplatformV1BleuResults.fromJson(core.Map json_) + : this( + bleuMetricValues: (json_['bleuMetricValues'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1BleuMetricValue.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (bleuMetricValues != null) 'bleuMetricValues': bleuMetricValues!, + }; +} + +/// Spec for bleu score metric - calculates the precision of n-grams in the +/// prediction as compared to reference - returns a score ranging between 0 to +/// 1. +class GoogleCloudAiplatformV1BleuSpec { + /// Whether to use_effective_order to compute bleu score. + /// + /// Optional. + core.bool? useEffectiveOrder; + + GoogleCloudAiplatformV1BleuSpec({ + this.useEffectiveOrder, + }); + + GoogleCloudAiplatformV1BleuSpec.fromJson(core.Map json_) + : this( + useEffectiveOrder: json_['useEffectiveOrder'] as core.bool?, + ); + + core.Map toJson() => { + if (useEffectiveOrder != null) 'useEffectiveOrder': useEffectiveOrder!, + }; +} + +/// Content blob. +/// +/// It's preferred to send as text directly rather than raw bytes. +class GoogleCloudAiplatformV1Blob { + /// Raw bytes. + /// + /// Required. + core.String? data; + core.List get dataAsBytes => convert.base64.decode(data!); + + set dataAsBytes(core.List bytes_) { + data = + convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); + } + + /// The IANA standard MIME type of the source data. + /// + /// Required. + core.String? mimeType; + + GoogleCloudAiplatformV1Blob({ + this.data, + this.mimeType, + }); + + GoogleCloudAiplatformV1Blob.fromJson(core.Map json_) + : this( + data: json_['data'] as core.String?, + mimeType: json_['mimeType'] as core.String?, + ); + + core.Map toJson() => { + if (data != null) 'data': data!, + if (mimeType != null) 'mimeType': mimeType!, + }; +} + +/// Config for blur baseline. +/// +/// When enabled, a linear path from the maximally blurred image to the input +/// image is created. Using a blurred baseline instead of zero (black image) is +/// motivated by the BlurIG approach explained here: +/// https://arxiv.org/abs/2004.03383 +class GoogleCloudAiplatformV1BlurBaselineConfig { + /// The standard deviation of the blur kernel for the blurred baseline. + /// + /// The same blurring parameter is used for both the height and the width + /// dimension. If not set, the method defaults to the zero (i.e. black for + /// images) baseline. + core.double? maxBlurSigma; + + GoogleCloudAiplatformV1BlurBaselineConfig({ + this.maxBlurSigma, + }); + + GoogleCloudAiplatformV1BlurBaselineConfig.fromJson(core.Map json_) + : this( + maxBlurSigma: (json_['maxBlurSigma'] as core.num?)?.toDouble(), + ); + + core.Map toJson() => { + if (maxBlurSigma != null) 'maxBlurSigma': maxBlurSigma!, + }; +} + +/// A list of boolean values. +class GoogleCloudAiplatformV1BoolArray { + /// A list of bool values. + core.List? values; + + GoogleCloudAiplatformV1BoolArray({ + this.values, + }); + + GoogleCloudAiplatformV1BoolArray.fromJson(core.Map json_) + : this( + values: (json_['values'] as core.List?) + ?.map((value) => value as core.bool) + .toList(), + ); + + core.Map toJson() => { + if (values != null) 'values': values!, + }; +} + +/// Config of GenAI caching features. +/// +/// This is a singleton resource. +class GoogleCloudAiplatformV1CacheConfig { + /// If set to true, disables GenAI caching. + /// + /// Otherwise caching is enabled. + core.bool? disableCache; + + /// Identifier. + /// + /// Name of the cache config. Format: - `projects/{project}/cacheConfig`. + core.String? name; + + GoogleCloudAiplatformV1CacheConfig({ + this.disableCache, + this.name, + }); + + GoogleCloudAiplatformV1CacheConfig.fromJson(core.Map json_) + : this( + disableCache: json_['disableCache'] as core.bool?, + name: json_['name'] as core.String?, + ); + + core.Map toJson() => { + if (disableCache != null) 'disableCache': disableCache!, + if (name != null) 'name': name!, + }; +} + +/// A resource used in LLM queries for users to explicitly specify what to cache +/// and how to cache. +class GoogleCloudAiplatformV1CachedContent { + /// Input only. + /// + /// Immutable. The content to cache + /// + /// Optional. + core.List? contents; + + /// Creatation time of the cache entry. + /// + /// Output only. + core.String? createTime; + + /// The user-generated meaningful display name of the cached content. + /// + /// Optional. Immutable. + core.String? displayName; + + /// Timestamp of when this resource is considered expired. + /// + /// This is *always* provided on output, regardless of what was sent on input. + core.String? expireTime; + + /// The name of the publisher model to use for cached content. + /// + /// Format: + /// projects/{project}/locations/{location}/publishers/{publisher}/models/{model} + /// + /// Immutable. + core.String? model; + + /// Identifier. + /// + /// The server-generated resource name of the cached content Format: + /// projects/{project}/locations/{location}/cachedContents/{cached_content} + /// + /// Immutable. + core.String? name; + + /// Input only. + /// + /// Immutable. Developer set system instruction. Currently, text only + /// + /// Optional. + GoogleCloudAiplatformV1Content? systemInstruction; + + /// Input only. + /// + /// Immutable. Tool config. This config is shared for all tools + /// + /// Optional. + GoogleCloudAiplatformV1ToolConfig? toolConfig; + + /// Input only. + /// + /// Immutable. A list of `Tools` the model may use to generate the next + /// response + /// + /// Optional. + core.List? tools; + + /// Input only. + /// + /// The TTL for this resource. The expiration time is computed: now + TTL. + core.String? ttl; + + /// When the cache entry was last updated in UTC time. + /// + /// Output only. + core.String? updateTime; + + /// Metadata on the usage of the cached content. + /// + /// Output only. + GoogleCloudAiplatformV1CachedContentUsageMetadata? usageMetadata; + + GoogleCloudAiplatformV1CachedContent({ + this.contents, + this.createTime, + this.displayName, + this.expireTime, + this.model, + this.name, + this.systemInstruction, + this.toolConfig, + this.tools, + this.ttl, + this.updateTime, + this.usageMetadata, + }); + + GoogleCloudAiplatformV1CachedContent.fromJson(core.Map json_) + : this( + contents: (json_['contents'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Content.fromJson( + value as core.Map)) + .toList(), + createTime: json_['createTime'] as core.String?, + displayName: json_['displayName'] as core.String?, + expireTime: json_['expireTime'] as core.String?, + model: json_['model'] as core.String?, + name: json_['name'] as core.String?, + systemInstruction: json_.containsKey('systemInstruction') + ? GoogleCloudAiplatformV1Content.fromJson( + json_['systemInstruction'] + as core.Map) + : null, + toolConfig: json_.containsKey('toolConfig') + ? GoogleCloudAiplatformV1ToolConfig.fromJson( + json_['toolConfig'] as core.Map) + : null, + tools: (json_['tools'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Tool.fromJson( + value as core.Map)) + .toList(), + ttl: json_['ttl'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + usageMetadata: json_.containsKey('usageMetadata') + ? GoogleCloudAiplatformV1CachedContentUsageMetadata.fromJson( + json_['usageMetadata'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (contents != null) 'contents': contents!, + if (createTime != null) 'createTime': createTime!, + if (displayName != null) 'displayName': displayName!, + if (expireTime != null) 'expireTime': expireTime!, + if (model != null) 'model': model!, + if (name != null) 'name': name!, + if (systemInstruction != null) 'systemInstruction': systemInstruction!, + if (toolConfig != null) 'toolConfig': toolConfig!, + if (tools != null) 'tools': tools!, + if (ttl != null) 'ttl': ttl!, + if (updateTime != null) 'updateTime': updateTime!, + if (usageMetadata != null) 'usageMetadata': usageMetadata!, + }; +} + +/// Metadata on the usage of the cached content. +class GoogleCloudAiplatformV1CachedContentUsageMetadata { + /// Duration of audio in seconds. + core.int? audioDurationSeconds; + + /// Number of images. + core.int? imageCount; + + /// Number of text characters. + core.int? textCount; + + /// Total number of tokens that the cached content consumes. + core.int? totalTokenCount; + + /// Duration of video in seconds. + core.int? videoDurationSeconds; + + GoogleCloudAiplatformV1CachedContentUsageMetadata({ + this.audioDurationSeconds, + this.imageCount, + this.textCount, + this.totalTokenCount, + this.videoDurationSeconds, + }); + + GoogleCloudAiplatformV1CachedContentUsageMetadata.fromJson(core.Map json_) + : this( + audioDurationSeconds: json_['audioDurationSeconds'] as core.int?, + imageCount: json_['imageCount'] as core.int?, + textCount: json_['textCount'] as core.int?, + totalTokenCount: json_['totalTokenCount'] as core.int?, + videoDurationSeconds: json_['videoDurationSeconds'] as core.int?, + ); + + core.Map toJson() => { + if (audioDurationSeconds != null) + 'audioDurationSeconds': audioDurationSeconds!, + if (imageCount != null) 'imageCount': imageCount!, + if (textCount != null) 'textCount': textCount!, + if (totalTokenCount != null) 'totalTokenCount': totalTokenCount!, + if (videoDurationSeconds != null) + 'videoDurationSeconds': videoDurationSeconds!, + }; +} + +/// Request message for JobService.CancelBatchPredictionJob. +typedef GoogleCloudAiplatformV1CancelBatchPredictionJobRequest = $Empty; + +/// Request message for JobService.CancelCustomJob. +typedef GoogleCloudAiplatformV1CancelCustomJobRequest = $Empty; + +/// Request message for JobService.CancelDataLabelingJob. +typedef GoogleCloudAiplatformV1CancelDataLabelingJobRequest = $Empty; + +/// Request message for JobService.CancelHyperparameterTuningJob. +typedef GoogleCloudAiplatformV1CancelHyperparameterTuningJobRequest = $Empty; + +/// Request message for JobService.CancelNasJob. +typedef GoogleCloudAiplatformV1CancelNasJobRequest = $Empty; + +/// Request message for PipelineService.CancelPipelineJob. +typedef GoogleCloudAiplatformV1CancelPipelineJobRequest = $Empty; + +/// Request message for PipelineService.CancelTrainingPipeline. +typedef GoogleCloudAiplatformV1CancelTrainingPipelineRequest = $Empty; + +/// Request message for GenAiTuningService.CancelTuningJob. +typedef GoogleCloudAiplatformV1CancelTuningJobRequest = $Empty; + +/// A response candidate generated from the model. +class GoogleCloudAiplatformV1Candidate { + /// Average log probability score of the candidate. + /// + /// Output only. + core.double? avgLogprobs; + + /// Source attribution of the generated content. + /// + /// Output only. + GoogleCloudAiplatformV1CitationMetadata? citationMetadata; + + /// Content parts of the candidate. + /// + /// Output only. + GoogleCloudAiplatformV1Content? content; + + /// Describes the reason the mode stopped generating tokens in more detail. + /// + /// This is only filled when `finish_reason` is set. + /// + /// Output only. + core.String? finishMessage; + + /// The reason why the model stopped generating tokens. + /// + /// If empty, the model has not stopped generating the tokens. + /// + /// Output only. + /// Possible string values are: + /// - "FINISH_REASON_UNSPECIFIED" : The finish reason is unspecified. + /// - "STOP" : Token generation reached a natural stopping point or a + /// configured stop sequence. + /// - "MAX_TOKENS" : Token generation reached the configured maximum output + /// tokens. + /// - "SAFETY" : Token generation stopped because the content potentially + /// contains safety violations. NOTE: When streaming, content is empty if + /// content filters blocks the output. + /// - "RECITATION" : The token generation stopped because of potential + /// recitation. + /// - "OTHER" : All other reasons that stopped the token generation. + /// - "BLOCKLIST" : Token generation stopped because the content contains + /// forbidden terms. + /// - "PROHIBITED_CONTENT" : Token generation stopped for potentially + /// containing prohibited content. + /// - "SPII" : Token generation stopped because the content potentially + /// contains Sensitive Personally Identifiable Information (SPII). + /// - "MALFORMED_FUNCTION_CALL" : The function call generated by the model is + /// invalid. + core.String? finishReason; + + /// Metadata specifies sources used to ground generated content. + /// + /// Output only. + GoogleCloudAiplatformV1GroundingMetadata? groundingMetadata; + + /// Index of the candidate. + /// + /// Output only. + core.int? index; + + /// Log-likelihood scores for the response tokens and top tokens + /// + /// Output only. + GoogleCloudAiplatformV1LogprobsResult? logprobsResult; + + /// List of ratings for the safety of a response candidate. + /// + /// There is at most one rating per category. + /// + /// Output only. + core.List? safetyRatings; + + GoogleCloudAiplatformV1Candidate({ + this.avgLogprobs, + this.citationMetadata, + this.content, + this.finishMessage, + this.finishReason, + this.groundingMetadata, + this.index, + this.logprobsResult, + this.safetyRatings, + }); + + GoogleCloudAiplatformV1Candidate.fromJson(core.Map json_) + : this( + avgLogprobs: (json_['avgLogprobs'] as core.num?)?.toDouble(), + citationMetadata: json_.containsKey('citationMetadata') + ? GoogleCloudAiplatformV1CitationMetadata.fromJson( + json_['citationMetadata'] + as core.Map) + : null, + content: json_.containsKey('content') + ? GoogleCloudAiplatformV1Content.fromJson( + json_['content'] as core.Map) + : null, + finishMessage: json_['finishMessage'] as core.String?, + finishReason: json_['finishReason'] as core.String?, + groundingMetadata: json_.containsKey('groundingMetadata') + ? GoogleCloudAiplatformV1GroundingMetadata.fromJson( + json_['groundingMetadata'] + as core.Map) + : null, + index: json_['index'] as core.int?, + logprobsResult: json_.containsKey('logprobsResult') + ? GoogleCloudAiplatformV1LogprobsResult.fromJson( + json_['logprobsResult'] + as core.Map) + : null, + safetyRatings: (json_['safetyRatings'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1SafetyRating.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (avgLogprobs != null) 'avgLogprobs': avgLogprobs!, + if (citationMetadata != null) 'citationMetadata': citationMetadata!, + if (content != null) 'content': content!, + if (finishMessage != null) 'finishMessage': finishMessage!, + if (finishReason != null) 'finishReason': finishReason!, + if (groundingMetadata != null) 'groundingMetadata': groundingMetadata!, + if (index != null) 'index': index!, + if (logprobsResult != null) 'logprobsResult': logprobsResult!, + if (safetyRatings != null) 'safetyRatings': safetyRatings!, + }; +} + +/// Request message for VizierService.CheckTrialEarlyStoppingState. +typedef GoogleCloudAiplatformV1CheckTrialEarlyStoppingStateRequest = $Empty; + +/// Source attributions for content. +class GoogleCloudAiplatformV1Citation { + /// End index into the content. + /// + /// Output only. + core.int? endIndex; + + /// License of the attribution. + /// + /// Output only. + core.String? license; + + /// Publication date of the attribution. + /// + /// Output only. + GoogleTypeDate? publicationDate; + + /// Start index into the content. + /// + /// Output only. + core.int? startIndex; + + /// Title of the attribution. + /// + /// Output only. + core.String? title; + + /// Url reference of the attribution. + /// + /// Output only. + core.String? uri; + + GoogleCloudAiplatformV1Citation({ + this.endIndex, + this.license, + this.publicationDate, + this.startIndex, + this.title, + this.uri, + }); + + GoogleCloudAiplatformV1Citation.fromJson(core.Map json_) + : this( + endIndex: json_['endIndex'] as core.int?, + license: json_['license'] as core.String?, + publicationDate: json_.containsKey('publicationDate') + ? GoogleTypeDate.fromJson(json_['publicationDate'] + as core.Map) + : null, + startIndex: json_['startIndex'] as core.int?, + title: json_['title'] as core.String?, + uri: json_['uri'] as core.String?, + ); + + core.Map toJson() => { + if (endIndex != null) 'endIndex': endIndex!, + if (license != null) 'license': license!, + if (publicationDate != null) 'publicationDate': publicationDate!, + if (startIndex != null) 'startIndex': startIndex!, + if (title != null) 'title': title!, + if (uri != null) 'uri': uri!, + }; +} + +/// A collection of source attributions for a piece of content. +class GoogleCloudAiplatformV1CitationMetadata { + /// List of citations. + /// + /// Output only. + core.List? citations; + + GoogleCloudAiplatformV1CitationMetadata({ + this.citations, + }); + + GoogleCloudAiplatformV1CitationMetadata.fromJson(core.Map json_) + : this( + citations: (json_['citations'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Citation.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (citations != null) 'citations': citations!, + }; +} + +/// Claim that is extracted from the input text and facts that support it. +class GoogleCloudAiplatformV1Claim { + /// Index in the input text where the claim ends (exclusive). + core.int? endIndex; + + /// Indexes of the facts supporting this claim. + core.List? factIndexes; + + /// Confidence score of this corroboration. + core.double? score; + + /// Index in the input text where the claim starts (inclusive). + core.int? startIndex; + + GoogleCloudAiplatformV1Claim({ + this.endIndex, + this.factIndexes, + this.score, + this.startIndex, + }); + + GoogleCloudAiplatformV1Claim.fromJson(core.Map json_) + : this( + endIndex: json_['endIndex'] as core.int?, + factIndexes: (json_['factIndexes'] as core.List?) + ?.map((value) => value as core.int) + .toList(), + score: (json_['score'] as core.num?)?.toDouble(), + startIndex: json_['startIndex'] as core.int?, + ); + + core.Map toJson() => { + if (endIndex != null) 'endIndex': endIndex!, + if (factIndexes != null) 'factIndexes': factIndexes!, + if (score != null) 'score': score!, + if (startIndex != null) 'startIndex': startIndex!, + }; +} + +/// Configurations (e.g. inference timeout) that are applied on your endpoints. +class GoogleCloudAiplatformV1ClientConnectionConfig { + /// Customizable online prediction request timeout. + core.String? inferenceTimeout; + + GoogleCloudAiplatformV1ClientConnectionConfig({ + this.inferenceTimeout, + }); + + GoogleCloudAiplatformV1ClientConnectionConfig.fromJson(core.Map json_) + : this( + inferenceTimeout: json_['inferenceTimeout'] as core.String?, + ); + + core.Map toJson() => { + if (inferenceTimeout != null) 'inferenceTimeout': inferenceTimeout!, + }; +} + +/// Input for coherence metric. +class GoogleCloudAiplatformV1CoherenceInput { + /// Coherence instance. + /// + /// Required. + GoogleCloudAiplatformV1CoherenceInstance? instance; + + /// Spec for coherence score metric. + /// + /// Required. + GoogleCloudAiplatformV1CoherenceSpec? metricSpec; + + GoogleCloudAiplatformV1CoherenceInput({ + this.instance, + this.metricSpec, + }); + + GoogleCloudAiplatformV1CoherenceInput.fromJson(core.Map json_) + : this( + instance: json_.containsKey('instance') + ? GoogleCloudAiplatformV1CoherenceInstance.fromJson( + json_['instance'] as core.Map) + : null, + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1CoherenceSpec.fromJson( + json_['metricSpec'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (instance != null) 'instance': instance!, + if (metricSpec != null) 'metricSpec': metricSpec!, + }; +} + +/// Spec for coherence instance. +typedef GoogleCloudAiplatformV1CoherenceInstance = $Instance01; + +/// Spec for coherence result. +class GoogleCloudAiplatformV1CoherenceResult { + /// Confidence for coherence score. + /// + /// Output only. + core.double? confidence; + + /// Explanation for coherence score. + /// + /// Output only. + core.String? explanation; + + /// Coherence score. + /// + /// Output only. + core.double? score; + + GoogleCloudAiplatformV1CoherenceResult({ + this.confidence, + this.explanation, + this.score, + }); + + GoogleCloudAiplatformV1CoherenceResult.fromJson(core.Map json_) + : this( + confidence: (json_['confidence'] as core.num?)?.toDouble(), + explanation: json_['explanation'] as core.String?, + score: (json_['score'] as core.num?)?.toDouble(), + ); + + core.Map toJson() => { + if (confidence != null) 'confidence': confidence!, + if (explanation != null) 'explanation': explanation!, + if (score != null) 'score': score!, + }; +} + +/// Spec for coherence score metric. +typedef GoogleCloudAiplatformV1CoherenceSpec = $Spec; + +/// Input for Comet metric. +class GoogleCloudAiplatformV1CometInput { + /// Comet instance. + /// + /// Required. + GoogleCloudAiplatformV1CometInstance? instance; + + /// Spec for comet metric. + /// + /// Required. + GoogleCloudAiplatformV1CometSpec? metricSpec; + + GoogleCloudAiplatformV1CometInput({ + this.instance, + this.metricSpec, + }); + + GoogleCloudAiplatformV1CometInput.fromJson(core.Map json_) + : this( + instance: json_.containsKey('instance') + ? GoogleCloudAiplatformV1CometInstance.fromJson( + json_['instance'] as core.Map) + : null, + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1CometSpec.fromJson( + json_['metricSpec'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (instance != null) 'instance': instance!, + if (metricSpec != null) 'metricSpec': metricSpec!, + }; +} + +/// Spec for Comet instance - The fields used for evaluation are dependent on +/// the comet version. +typedef GoogleCloudAiplatformV1CometInstance = $Instance03; + +/// Spec for Comet result - calculates the comet score for the given instance +/// using the version specified in the spec. +class GoogleCloudAiplatformV1CometResult { + /// Comet score. + /// + /// Range depends on version. + /// + /// Output only. + core.double? score; + + GoogleCloudAiplatformV1CometResult({ + this.score, + }); + + GoogleCloudAiplatformV1CometResult.fromJson(core.Map json_) + : this( + score: (json_['score'] as core.num?)?.toDouble(), + ); + + core.Map toJson() => { + if (score != null) 'score': score!, + }; +} + +/// Spec for Comet metric. +class GoogleCloudAiplatformV1CometSpec { + /// Source language in BCP-47 format. + /// + /// Optional. + core.String? sourceLanguage; + + /// Target language in BCP-47 format. + /// + /// Covers both prediction and reference. + /// + /// Optional. + core.String? targetLanguage; + + /// Which version to use for evaluation. + /// + /// Required. + /// Possible string values are: + /// - "COMET_VERSION_UNSPECIFIED" : Comet version unspecified. + /// - "COMET_22_SRC_REF" : Comet 22 for translation + source + reference + /// (source-reference-combined). + core.String? version; + + GoogleCloudAiplatformV1CometSpec({ + this.sourceLanguage, + this.targetLanguage, + this.version, + }); + + GoogleCloudAiplatformV1CometSpec.fromJson(core.Map json_) + : this( + sourceLanguage: json_['sourceLanguage'] as core.String?, + targetLanguage: json_['targetLanguage'] as core.String?, + version: json_['version'] as core.String?, + ); + + core.Map toJson() => { + if (sourceLanguage != null) 'sourceLanguage': sourceLanguage!, + if (targetLanguage != null) 'targetLanguage': targetLanguage!, + if (version != null) 'version': version!, + }; +} + +/// Request message for VizierService.CompleteTrial. +class GoogleCloudAiplatformV1CompleteTrialRequest { + /// If provided, it will be used as the completed Trial's final_measurement; + /// Otherwise, the service will auto-select a previously reported measurement + /// as the final-measurement + /// + /// Optional. + GoogleCloudAiplatformV1Measurement? finalMeasurement; + + /// A human readable reason why the trial was infeasible. + /// + /// This should only be provided if `trial_infeasible` is true. + /// + /// Optional. + core.String? infeasibleReason; + + /// True if the Trial cannot be run with the given Parameter, and + /// final_measurement will be ignored. + /// + /// Optional. + core.bool? trialInfeasible; + + GoogleCloudAiplatformV1CompleteTrialRequest({ + this.finalMeasurement, + this.infeasibleReason, + this.trialInfeasible, + }); + + GoogleCloudAiplatformV1CompleteTrialRequest.fromJson(core.Map json_) + : this( + finalMeasurement: json_.containsKey('finalMeasurement') + ? GoogleCloudAiplatformV1Measurement.fromJson( + json_['finalMeasurement'] + as core.Map) + : null, + infeasibleReason: json_['infeasibleReason'] as core.String?, + trialInfeasible: json_['trialInfeasible'] as core.bool?, + ); + + core.Map toJson() => { + if (finalMeasurement != null) 'finalMeasurement': finalMeasurement!, + if (infeasibleReason != null) 'infeasibleReason': infeasibleReason!, + if (trialInfeasible != null) 'trialInfeasible': trialInfeasible!, + }; +} + +/// Success and error statistics of processing multiple entities (for example, +/// DataItems or structured data rows) in batch. +class GoogleCloudAiplatformV1CompletionStats { + /// The number of entities for which any error was encountered. + /// + /// Output only. + core.String? failedCount; + + /// In cases when enough errors are encountered a job, pipeline, or operation + /// may be failed as a whole. + /// + /// Below is the number of entities for which the processing had not been + /// finished (either in successful or failed state). Set to -1 if the number + /// is unknown (for example, the operation failed before the total entity + /// number could be collected). + /// + /// Output only. + core.String? incompleteCount; + + /// The number of entities that had been processed successfully. + /// + /// Output only. + core.String? successfulCount; + + /// The number of the successful forecast points that are generated by the + /// forecasting model. + /// + /// This is ONLY used by the forecasting batch prediction. + /// + /// Output only. + core.String? successfulForecastPointCount; + + GoogleCloudAiplatformV1CompletionStats({ + this.failedCount, + this.incompleteCount, + this.successfulCount, + this.successfulForecastPointCount, + }); + + GoogleCloudAiplatformV1CompletionStats.fromJson(core.Map json_) + : this( + failedCount: json_['failedCount'] as core.String?, + incompleteCount: json_['incompleteCount'] as core.String?, + successfulCount: json_['successfulCount'] as core.String?, + successfulForecastPointCount: + json_['successfulForecastPointCount'] as core.String?, + ); + + core.Map toJson() => { + if (failedCount != null) 'failedCount': failedCount!, + if (incompleteCount != null) 'incompleteCount': incompleteCount!, + if (successfulCount != null) 'successfulCount': successfulCount!, + if (successfulForecastPointCount != null) + 'successfulForecastPointCount': successfulForecastPointCount!, + }; +} + +/// Request message for ComputeTokens RPC call. +class GoogleCloudAiplatformV1ComputeTokensRequest { + /// Input content. + /// + /// Optional. + core.List? contents; + + /// The instances that are the input to token computing API call. + /// + /// Schema is identical to the prediction schema of the text model, even for + /// the non-text models, like chat models, or Codey models. + /// + /// Optional. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.List? instances; + + /// The name of the publisher model requested to serve the prediction. + /// + /// Format: projects/{project}/locations/{location}/publishers / * /models / * + /// + /// Optional. + core.String? model; + + GoogleCloudAiplatformV1ComputeTokensRequest({ + this.contents, + this.instances, + this.model, + }); + + GoogleCloudAiplatformV1ComputeTokensRequest.fromJson(core.Map json_) + : this( + contents: (json_['contents'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Content.fromJson( + value as core.Map)) + .toList(), + instances: json_.containsKey('instances') + ? json_['instances'] as core.List + : null, + model: json_['model'] as core.String?, + ); + + core.Map toJson() => { + if (contents != null) 'contents': contents!, + if (instances != null) 'instances': instances!, + if (model != null) 'model': model!, + }; +} + +/// Response message for ComputeTokens RPC call. +class GoogleCloudAiplatformV1ComputeTokensResponse { + /// Lists of tokens info from the input. + /// + /// A ComputeTokensRequest could have multiple instances with a prompt in each + /// instance. We also need to return lists of tokens info for the request with + /// multiple instances. + core.List? tokensInfo; + + GoogleCloudAiplatformV1ComputeTokensResponse({ + this.tokensInfo, + }); + + GoogleCloudAiplatformV1ComputeTokensResponse.fromJson(core.Map json_) + : this( + tokensInfo: (json_['tokensInfo'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1TokensInfo.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (tokensInfo != null) 'tokensInfo': tokensInfo!, + }; +} + +/// The Container Registry location for the container image. +class GoogleCloudAiplatformV1ContainerRegistryDestination { + /// Container Registry URI of a container image. + /// + /// Only Google Container Registry and Artifact Registry are supported now. + /// Accepted forms: * Google Container Registry path. For example: + /// `gcr.io/projectId/imageName:tag`. * Artifact Registry path. For example: + /// `us-central1-docker.pkg.dev/projectId/repoName/imageName:tag`. If a tag is + /// not specified, "latest" will be used as the default tag. + /// + /// Required. + core.String? outputUri; + + GoogleCloudAiplatformV1ContainerRegistryDestination({ + this.outputUri, + }); + + GoogleCloudAiplatformV1ContainerRegistryDestination.fromJson(core.Map json_) + : this( + outputUri: json_['outputUri'] as core.String?, + ); + + core.Map toJson() => { + if (outputUri != null) 'outputUri': outputUri!, + }; +} + +/// The spec of a Container. +class GoogleCloudAiplatformV1ContainerSpec { + /// The arguments to be passed when starting the container. + core.List? args; + + /// The command to be invoked when the container is started. + /// + /// It overrides the entrypoint instruction in Dockerfile when provided. + core.List? command; + + /// Environment variables to be passed to the container. + /// + /// Maximum limit is 100. + core.List? env; + + /// The URI of a container image in the Container Registry that is to be run + /// on each worker replica. + /// + /// Required. + core.String? imageUri; + + GoogleCloudAiplatformV1ContainerSpec({ + this.args, + this.command, + this.env, + this.imageUri, + }); + + GoogleCloudAiplatformV1ContainerSpec.fromJson(core.Map json_) + : this( + args: (json_['args'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + command: (json_['command'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + env: (json_['env'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1EnvVar.fromJson( + value as core.Map)) + .toList(), + imageUri: json_['imageUri'] as core.String?, + ); + + core.Map toJson() => { + if (args != null) 'args': args!, + if (command != null) 'command': command!, + if (env != null) 'env': env!, + if (imageUri != null) 'imageUri': imageUri!, + }; +} + +/// The base structured datatype containing multi-part content of a message. +/// +/// A `Content` includes a `role` field designating the producer of the +/// `Content` and a `parts` field containing multi-part data that contains the +/// content of the message turn. +class GoogleCloudAiplatformV1Content { + /// Ordered `Parts` that constitute a single message. + /// + /// Parts may have different IANA MIME types. + /// + /// Required. + core.List? parts; + + /// The producer of the content. + /// + /// Must be either 'user' or 'model'. Useful to set for multi-turn + /// conversations, otherwise can be left blank or unset. + /// + /// Optional. + core.String? role; + + GoogleCloudAiplatformV1Content({ + this.parts, + this.role, + }); + + GoogleCloudAiplatformV1Content.fromJson(core.Map json_) + : this( + parts: (json_['parts'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Part.fromJson( + value as core.Map)) + .toList(), + role: json_['role'] as core.String?, + ); + + core.Map toJson() => { + if (parts != null) 'parts': parts!, + if (role != null) 'role': role!, + }; +} + +/// Instance of a general context. +class GoogleCloudAiplatformV1Context { + /// Timestamp when this Context was created. + /// + /// Output only. + core.String? createTime; + + /// Description of the Context + core.String? description; + + /// User provided display name of the Context. + /// + /// May be up to 128 Unicode characters. + core.String? displayName; + + /// An eTag used to perform consistent read-modify-write updates. + /// + /// If not set, a blind "overwrite" update happens. + core.String? etag; + + /// The labels with user-defined metadata to organize your Contexts. + /// + /// Label keys and values can be no longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. No more than + /// 64 user labels can be associated with one Context (System labels are + /// excluded). + core.Map? labels; + + /// Properties of the Context. + /// + /// Top level metadata keys' heading and trailing spaces will be trimmed. The + /// size of this field should not exceed 200KB. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Map? metadata; + + /// The resource name of the Context. + /// + /// Immutable. + core.String? name; + + /// A list of resource names of Contexts that are parents of this Context. + /// + /// A Context may have at most 10 parent_contexts. + /// + /// Output only. + core.List? parentContexts; + + /// The title of the schema describing the metadata. + /// + /// Schema title and version is expected to be registered in earlier Create + /// Schema calls. And both are used together as unique identifiers to identify + /// schemas within the local metadata store. + core.String? schemaTitle; + + /// The version of the schema in schema_name to use. + /// + /// Schema title and version is expected to be registered in earlier Create + /// Schema calls. And both are used together as unique identifiers to identify + /// schemas within the local metadata store. + core.String? schemaVersion; + + /// Timestamp when this Context was last updated. + /// + /// Output only. + core.String? updateTime; + + GoogleCloudAiplatformV1Context({ + this.createTime, + this.description, + this.displayName, + this.etag, + this.labels, + this.metadata, + this.name, + this.parentContexts, + this.schemaTitle, + this.schemaVersion, + this.updateTime, + }); + + GoogleCloudAiplatformV1Context.fromJson(core.Map json_) + : this( + createTime: json_['createTime'] as core.String?, + description: json_['description'] as core.String?, + displayName: json_['displayName'] as core.String?, + etag: json_['etag'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + metadata: json_.containsKey('metadata') + ? json_['metadata'] as core.Map + : null, + name: json_['name'] as core.String?, + parentContexts: (json_['parentContexts'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + schemaTitle: json_['schemaTitle'] as core.String?, + schemaVersion: json_['schemaVersion'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + ); + + core.Map toJson() => { + if (createTime != null) 'createTime': createTime!, + if (description != null) 'description': description!, + if (displayName != null) 'displayName': displayName!, + if (etag != null) 'etag': etag!, + if (labels != null) 'labels': labels!, + if (metadata != null) 'metadata': metadata!, + if (name != null) 'name': name!, + if (parentContexts != null) 'parentContexts': parentContexts!, + if (schemaTitle != null) 'schemaTitle': schemaTitle!, + if (schemaVersion != null) 'schemaVersion': schemaVersion!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} + +/// Request message for ModelService.CopyModel. +class GoogleCloudAiplatformV1CopyModelRequest { + /// Customer-managed encryption key options. + /// + /// If this is set, then the Model copy will be encrypted with the provided + /// encryption key. + GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; + + /// Copy source_model into a new Model with this ID. + /// + /// The ID will become the final component of the model resource name. This + /// value may be up to 63 characters, and valid characters are `[a-z0-9_-]`. + /// The first character cannot be a number or hyphen. + /// + /// Optional. + core.String? modelId; + + /// Specify this field to copy source_model into this existing Model as a new + /// version. + /// + /// Format: `projects/{project}/locations/{location}/models/{model}` + /// + /// Optional. + core.String? parentModel; + + /// The resource name of the Model to copy. + /// + /// That Model must be in the same Project. Format: + /// `projects/{project}/locations/{location}/models/{model}` + /// + /// Required. + core.String? sourceModel; + + GoogleCloudAiplatformV1CopyModelRequest({ + this.encryptionSpec, + this.modelId, + this.parentModel, + this.sourceModel, + }); + + GoogleCloudAiplatformV1CopyModelRequest.fromJson(core.Map json_) + : this( + encryptionSpec: json_.containsKey('encryptionSpec') + ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( + json_['encryptionSpec'] + as core.Map) + : null, + modelId: json_['modelId'] as core.String?, + parentModel: json_['parentModel'] as core.String?, + sourceModel: json_['sourceModel'] as core.String?, + ); + + core.Map toJson() => { + if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, + if (modelId != null) 'modelId': modelId!, + if (parentModel != null) 'parentModel': parentModel!, + if (sourceModel != null) 'sourceModel': sourceModel!, + }; +} + +/// RagCorpus status. +class GoogleCloudAiplatformV1CorpusStatus { + /// Only when the `state` field is ERROR. + /// + /// Output only. + core.String? errorStatus; + + /// RagCorpus life state. + /// + /// Output only. + /// Possible string values are: + /// - "UNKNOWN" : This state is not supposed to happen. + /// - "INITIALIZED" : RagCorpus resource entry is initialized, but hasn't done + /// validation. + /// - "ACTIVE" : RagCorpus is provisioned successfully and is ready to serve. + /// - "ERROR" : RagCorpus is in a problematic situation. See `error_message` + /// field for details. + core.String? state; + + GoogleCloudAiplatformV1CorpusStatus({ + this.errorStatus, + this.state, + }); + + GoogleCloudAiplatformV1CorpusStatus.fromJson(core.Map json_) + : this( + errorStatus: json_['errorStatus'] as core.String?, + state: json_['state'] as core.String?, + ); + + core.Map toJson() => { + if (errorStatus != null) 'errorStatus': errorStatus!, + if (state != null) 'state': state!, + }; +} + +/// Request message for CorroborateContent. +class GoogleCloudAiplatformV1CorroborateContentRequest { + /// Input content to corroborate, only text format is supported for now. + /// + /// Optional. + GoogleCloudAiplatformV1Content? content; + + /// Facts used to generate the text can also be used to corroborate the text. + /// + /// Optional. + core.List? facts; + + /// Parameters that can be set to override default settings per request. + /// + /// Optional. + GoogleCloudAiplatformV1CorroborateContentRequestParameters? parameters; + + GoogleCloudAiplatformV1CorroborateContentRequest({ + this.content, + this.facts, + this.parameters, + }); + + GoogleCloudAiplatformV1CorroborateContentRequest.fromJson(core.Map json_) + : this( + content: json_.containsKey('content') + ? GoogleCloudAiplatformV1Content.fromJson( + json_['content'] as core.Map) + : null, + facts: (json_['facts'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Fact.fromJson( + value as core.Map)) + .toList(), + parameters: json_.containsKey('parameters') + ? GoogleCloudAiplatformV1CorroborateContentRequestParameters + .fromJson(json_['parameters'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (content != null) 'content': content!, + if (facts != null) 'facts': facts!, + if (parameters != null) 'parameters': parameters!, + }; +} + +/// Parameters that can be overrided per request. +class GoogleCloudAiplatformV1CorroborateContentRequestParameters { + /// Only return claims with citation score larger than the threshold. + /// + /// Optional. + core.double? citationThreshold; + + GoogleCloudAiplatformV1CorroborateContentRequestParameters({ + this.citationThreshold, + }); + + GoogleCloudAiplatformV1CorroborateContentRequestParameters.fromJson( + core.Map json_) + : this( + citationThreshold: + (json_['citationThreshold'] as core.num?)?.toDouble(), + ); + + core.Map toJson() => { + if (citationThreshold != null) 'citationThreshold': citationThreshold!, + }; +} + +/// Response message for CorroborateContent. +class GoogleCloudAiplatformV1CorroborateContentResponse { + /// Claims that are extracted from the input content and facts that support + /// the claims. + core.List? claims; + + /// Confidence score of corroborating content. + /// + /// Value is \[0,1\] with 1 is the most confidence. + core.double? corroborationScore; + + GoogleCloudAiplatformV1CorroborateContentResponse({ + this.claims, + this.corroborationScore, + }); + + GoogleCloudAiplatformV1CorroborateContentResponse.fromJson(core.Map json_) + : this( + claims: (json_['claims'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Claim.fromJson( + value as core.Map)) + .toList(), + corroborationScore: + (json_['corroborationScore'] as core.num?)?.toDouble(), + ); + + core.Map toJson() => { + if (claims != null) 'claims': claims!, + if (corroborationScore != null) + 'corroborationScore': corroborationScore!, + }; +} + +/// Request message for PredictionService.CountTokens. +class GoogleCloudAiplatformV1CountTokensRequest { + /// Input content. + /// + /// Optional. + core.List? contents; + + /// Generation config that the model will use to generate the response. + /// + /// Optional. + GoogleCloudAiplatformV1GenerationConfig? generationConfig; + + /// The instances that are the input to token counting call. + /// + /// Schema is identical to the prediction schema of the underlying model. + /// + /// Optional. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.List? instances; + + /// The name of the publisher model requested to serve the prediction. + /// + /// Format: `projects/{project}/locations/{location}/publishers / * /models / + /// * ` + /// + /// Optional. + core.String? model; + + /// The user provided system instructions for the model. + /// + /// Note: only text should be used in parts and content in each part will be + /// in a separate paragraph. + /// + /// Optional. + GoogleCloudAiplatformV1Content? systemInstruction; + + /// A list of `Tools` the model may use to generate the next response. + /// + /// A `Tool` is a piece of code that enables the system to interact with + /// external systems to perform an action, or set of actions, outside of + /// knowledge and scope of the model. + /// + /// Optional. + core.List? tools; + + GoogleCloudAiplatformV1CountTokensRequest({ + this.contents, + this.generationConfig, + this.instances, + this.model, + this.systemInstruction, + this.tools, + }); + + GoogleCloudAiplatformV1CountTokensRequest.fromJson(core.Map json_) + : this( + contents: (json_['contents'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Content.fromJson( + value as core.Map)) + .toList(), + generationConfig: json_.containsKey('generationConfig') + ? GoogleCloudAiplatformV1GenerationConfig.fromJson( + json_['generationConfig'] + as core.Map) + : null, + instances: json_.containsKey('instances') + ? json_['instances'] as core.List + : null, + model: json_['model'] as core.String?, + systemInstruction: json_.containsKey('systemInstruction') + ? GoogleCloudAiplatformV1Content.fromJson( + json_['systemInstruction'] + as core.Map) + : null, + tools: (json_['tools'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Tool.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (contents != null) 'contents': contents!, + if (generationConfig != null) 'generationConfig': generationConfig!, + if (instances != null) 'instances': instances!, + if (model != null) 'model': model!, + if (systemInstruction != null) 'systemInstruction': systemInstruction!, + if (tools != null) 'tools': tools!, + }; +} + +/// Response message for PredictionService.CountTokens. +class GoogleCloudAiplatformV1CountTokensResponse { + /// The total number of billable characters counted across all instances from + /// the request. + core.int? totalBillableCharacters; + + /// The total number of tokens counted across all instances from the request. + core.int? totalTokens; + + GoogleCloudAiplatformV1CountTokensResponse({ + this.totalBillableCharacters, + this.totalTokens, + }); + + GoogleCloudAiplatformV1CountTokensResponse.fromJson(core.Map json_) + : this( + totalBillableCharacters: + json_['totalBillableCharacters'] as core.int?, + totalTokens: json_['totalTokens'] as core.int?, + ); + + core.Map toJson() => { + if (totalBillableCharacters != null) + 'totalBillableCharacters': totalBillableCharacters!, + if (totalTokens != null) 'totalTokens': totalTokens!, + }; +} + +/// Request message for CreateDeploymentResourcePool method. +class GoogleCloudAiplatformV1CreateDeploymentResourcePoolRequest { + /// The DeploymentResourcePool to create. + /// + /// Required. + GoogleCloudAiplatformV1DeploymentResourcePool? deploymentResourcePool; + + /// The ID to use for the DeploymentResourcePool, which will become the final + /// component of the DeploymentResourcePool's resource name. + /// + /// The maximum length is 63 characters, and valid characters are + /// `/^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$/`. + /// + /// Required. + core.String? deploymentResourcePoolId; + + GoogleCloudAiplatformV1CreateDeploymentResourcePoolRequest({ + this.deploymentResourcePool, + this.deploymentResourcePoolId, + }); + + GoogleCloudAiplatformV1CreateDeploymentResourcePoolRequest.fromJson( + core.Map json_) + : this( + deploymentResourcePool: json_.containsKey('deploymentResourcePool') + ? GoogleCloudAiplatformV1DeploymentResourcePool.fromJson( + json_['deploymentResourcePool'] + as core.Map) + : null, + deploymentResourcePoolId: + json_['deploymentResourcePoolId'] as core.String?, + ); + + core.Map toJson() => { + if (deploymentResourcePool != null) + 'deploymentResourcePool': deploymentResourcePool!, + if (deploymentResourcePoolId != null) + 'deploymentResourcePoolId': deploymentResourcePoolId!, + }; +} + +/// Request message for FeaturestoreService.CreateFeature. +/// +/// Request message for FeatureRegistryService.CreateFeature. +class GoogleCloudAiplatformV1CreateFeatureRequest { + /// The Feature to create. + /// + /// Required. + GoogleCloudAiplatformV1Feature? feature; + + /// The ID to use for the Feature, which will become the final component of + /// the Feature's resource name. + /// + /// This value may be up to 128 characters, and valid characters are + /// `[a-z0-9_]`. The first character cannot be a number. The value must be + /// unique within an EntityType/FeatureGroup. + /// + /// Required. + core.String? featureId; + + /// The resource name of the EntityType or FeatureGroup to create a Feature. + /// + /// Format for entity_type as parent: + /// `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}` + /// Format for feature_group as parent: + /// `projects/{project}/locations/{location}/featureGroups/{feature_group}` + /// + /// Required. + core.String? parent; + + GoogleCloudAiplatformV1CreateFeatureRequest({ + this.feature, + this.featureId, + this.parent, + }); + + GoogleCloudAiplatformV1CreateFeatureRequest.fromJson(core.Map json_) + : this( + feature: json_.containsKey('feature') + ? GoogleCloudAiplatformV1Feature.fromJson( + json_['feature'] as core.Map) + : null, + featureId: json_['featureId'] as core.String?, + parent: json_['parent'] as core.String?, + ); + + core.Map toJson() => { + if (feature != null) 'feature': feature!, + if (featureId != null) 'featureId': featureId!, + if (parent != null) 'parent': parent!, + }; +} + +/// Request message for \[NotebookService.CreateNotebookExecutionJob\] +class GoogleCloudAiplatformV1CreateNotebookExecutionJobRequest { + /// The NotebookExecutionJob to create. + /// + /// Required. + GoogleCloudAiplatformV1NotebookExecutionJob? notebookExecutionJob; + + /// User specified ID for the NotebookExecutionJob. + /// + /// Optional. + core.String? notebookExecutionJobId; + + /// The resource name of the Location to create the NotebookExecutionJob. + /// + /// Format: `projects/{project}/locations/{location}` + /// + /// Required. + core.String? parent; + + GoogleCloudAiplatformV1CreateNotebookExecutionJobRequest({ + this.notebookExecutionJob, + this.notebookExecutionJobId, + this.parent, + }); + + GoogleCloudAiplatformV1CreateNotebookExecutionJobRequest.fromJson( + core.Map json_) + : this( + notebookExecutionJob: json_.containsKey('notebookExecutionJob') + ? GoogleCloudAiplatformV1NotebookExecutionJob.fromJson( + json_['notebookExecutionJob'] + as core.Map) + : null, + notebookExecutionJobId: + json_['notebookExecutionJobId'] as core.String?, + parent: json_['parent'] as core.String?, + ); + + core.Map toJson() => { + if (notebookExecutionJob != null) + 'notebookExecutionJob': notebookExecutionJob!, + if (notebookExecutionJobId != null) + 'notebookExecutionJobId': notebookExecutionJobId!, + if (parent != null) 'parent': parent!, + }; +} + +/// Request message for PipelineService.CreatePipelineJob. +class GoogleCloudAiplatformV1CreatePipelineJobRequest { + /// The resource name of the Location to create the PipelineJob in. + /// + /// Format: `projects/{project}/locations/{location}` + /// + /// Required. + core.String? parent; + + /// The PipelineJob to create. + /// + /// Required. + GoogleCloudAiplatformV1PipelineJob? pipelineJob; + + /// The ID to use for the PipelineJob, which will become the final component + /// of the PipelineJob name. + /// + /// If not provided, an ID will be automatically generated. This value should + /// be less than 128 characters, and valid characters are `/a-z-/`. + core.String? pipelineJobId; + + GoogleCloudAiplatformV1CreatePipelineJobRequest({ + this.parent, + this.pipelineJob, + this.pipelineJobId, + }); + + GoogleCloudAiplatformV1CreatePipelineJobRequest.fromJson(core.Map json_) + : this( + parent: json_['parent'] as core.String?, + pipelineJob: json_.containsKey('pipelineJob') + ? GoogleCloudAiplatformV1PipelineJob.fromJson( + json_['pipelineJob'] as core.Map) + : null, + pipelineJobId: json_['pipelineJobId'] as core.String?, + ); + + core.Map toJson() => { + if (parent != null) 'parent': parent!, + if (pipelineJob != null) 'pipelineJob': pipelineJob!, + if (pipelineJobId != null) 'pipelineJobId': pipelineJobId!, + }; +} + +/// Request message for TensorboardService.CreateTensorboardRun. +class GoogleCloudAiplatformV1CreateTensorboardRunRequest { + /// The resource name of the TensorboardExperiment to create the + /// TensorboardRun in. + /// + /// Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}` + /// + /// Required. + core.String? parent; + + /// The TensorboardRun to create. + /// + /// Required. + GoogleCloudAiplatformV1TensorboardRun? tensorboardRun; + + /// The ID to use for the Tensorboard run, which becomes the final component + /// of the Tensorboard run's resource name. + /// + /// This value should be 1-128 characters, and valid characters are `/a-z-/`. + /// + /// Required. + core.String? tensorboardRunId; + + GoogleCloudAiplatformV1CreateTensorboardRunRequest({ + this.parent, + this.tensorboardRun, + this.tensorboardRunId, + }); + + GoogleCloudAiplatformV1CreateTensorboardRunRequest.fromJson(core.Map json_) + : this( + parent: json_['parent'] as core.String?, + tensorboardRun: json_.containsKey('tensorboardRun') + ? GoogleCloudAiplatformV1TensorboardRun.fromJson( + json_['tensorboardRun'] + as core.Map) + : null, + tensorboardRunId: json_['tensorboardRunId'] as core.String?, + ); + + core.Map toJson() => { + if (parent != null) 'parent': parent!, + if (tensorboardRun != null) 'tensorboardRun': tensorboardRun!, + if (tensorboardRunId != null) 'tensorboardRunId': tensorboardRunId!, + }; +} + +/// Request message for TensorboardService.CreateTensorboardTimeSeries. +class GoogleCloudAiplatformV1CreateTensorboardTimeSeriesRequest { + /// The resource name of the TensorboardRun to create the + /// TensorboardTimeSeries in. + /// + /// Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}` + /// + /// Required. + core.String? parent; + + /// The TensorboardTimeSeries to create. + /// + /// Required. + GoogleCloudAiplatformV1TensorboardTimeSeries? tensorboardTimeSeries; + + /// The user specified unique ID to use for the TensorboardTimeSeries, which + /// becomes the final component of the TensorboardTimeSeries's resource name. + /// + /// This value should match "a-z0-9{0, 127}" + /// + /// Optional. + core.String? tensorboardTimeSeriesId; + + GoogleCloudAiplatformV1CreateTensorboardTimeSeriesRequest({ + this.parent, + this.tensorboardTimeSeries, + this.tensorboardTimeSeriesId, + }); + + GoogleCloudAiplatformV1CreateTensorboardTimeSeriesRequest.fromJson( + core.Map json_) + : this( + parent: json_['parent'] as core.String?, + tensorboardTimeSeries: json_.containsKey('tensorboardTimeSeries') + ? GoogleCloudAiplatformV1TensorboardTimeSeries.fromJson( + json_['tensorboardTimeSeries'] + as core.Map) + : null, + tensorboardTimeSeriesId: + json_['tensorboardTimeSeriesId'] as core.String?, + ); + + core.Map toJson() => { + if (parent != null) 'parent': parent!, + if (tensorboardTimeSeries != null) + 'tensorboardTimeSeries': tensorboardTimeSeries!, + if (tensorboardTimeSeriesId != null) + 'tensorboardTimeSeriesId': tensorboardTimeSeriesId!, + }; +} + +/// The storage details for CSV output content. +class GoogleCloudAiplatformV1CsvDestination { + /// Google Cloud Storage location. + /// + /// Required. + GoogleCloudAiplatformV1GcsDestination? gcsDestination; + + GoogleCloudAiplatformV1CsvDestination({ + this.gcsDestination, + }); + + GoogleCloudAiplatformV1CsvDestination.fromJson(core.Map json_) + : this( + gcsDestination: json_.containsKey('gcsDestination') + ? GoogleCloudAiplatformV1GcsDestination.fromJson( + json_['gcsDestination'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (gcsDestination != null) 'gcsDestination': gcsDestination!, + }; +} + +/// The storage details for CSV input content. +class GoogleCloudAiplatformV1CsvSource { + /// Google Cloud Storage location. + /// + /// Required. + GoogleCloudAiplatformV1GcsSource? gcsSource; + + GoogleCloudAiplatformV1CsvSource({ + this.gcsSource, + }); + + GoogleCloudAiplatformV1CsvSource.fromJson(core.Map json_) + : this( + gcsSource: json_.containsKey('gcsSource') + ? GoogleCloudAiplatformV1GcsSource.fromJson( + json_['gcsSource'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (gcsSource != null) 'gcsSource': gcsSource!, + }; +} + +/// Represents a job that runs custom workloads such as a Docker container or a +/// Python package. +/// +/// A CustomJob can have multiple worker pools and each worker pool can have its +/// own machine and input spec. A CustomJob will be cleaned up once the job +/// enters terminal state (failed or succeeded). +class GoogleCloudAiplatformV1CustomJob { + /// Time when the CustomJob was created. + /// + /// Output only. + core.String? createTime; + + /// The display name of the CustomJob. + /// + /// The name can be up to 128 characters long and can consist of any UTF-8 + /// characters. + /// + /// Required. + core.String? displayName; + + /// Customer-managed encryption key options for a CustomJob. + /// + /// If this is set, then all resources created by the CustomJob will be + /// encrypted with the provided encryption key. + GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; + + /// Time when the CustomJob entered any of the following states: + /// `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`. + /// + /// Output only. + core.String? endTime; + + /// Only populated when job's state is `JOB_STATE_FAILED` or + /// `JOB_STATE_CANCELLED`. + /// + /// Output only. + GoogleRpcStatus? error; + + /// Job spec. + /// + /// Required. + GoogleCloudAiplatformV1CustomJobSpec? jobSpec; + + /// The labels with user-defined metadata to organize CustomJobs. + /// + /// Label keys and values can be no longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. See + /// https://goo.gl/xmQnxf for more information and examples of labels. + core.Map? labels; + + /// Resource name of a CustomJob. + /// + /// Output only. + core.String? name; + + /// Reserved for future use. + /// + /// Output only. + core.bool? satisfiesPzi; + + /// Reserved for future use. + /// + /// Output only. + core.bool? satisfiesPzs; + + /// Time when the CustomJob for the first time entered the `JOB_STATE_RUNNING` + /// state. + /// + /// Output only. + core.String? startTime; + + /// The detailed state of the job. + /// + /// Output only. + /// Possible string values are: + /// - "JOB_STATE_UNSPECIFIED" : The job state is unspecified. + /// - "JOB_STATE_QUEUED" : The job has been just created or resumed and + /// processing has not yet begun. + /// - "JOB_STATE_PENDING" : The service is preparing to run the job. + /// - "JOB_STATE_RUNNING" : The job is in progress. + /// - "JOB_STATE_SUCCEEDED" : The job completed successfully. + /// - "JOB_STATE_FAILED" : The job failed. + /// - "JOB_STATE_CANCELLING" : The job is being cancelled. From this state the + /// job may only go to either `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED` or + /// `JOB_STATE_CANCELLED`. + /// - "JOB_STATE_CANCELLED" : The job has been cancelled. + /// - "JOB_STATE_PAUSED" : The job has been stopped, and can be resumed. + /// - "JOB_STATE_EXPIRED" : The job has expired. + /// - "JOB_STATE_UPDATING" : The job is being updated. Only jobs in the + /// `RUNNING` state can be updated. After updating, the job goes back to the + /// `RUNNING` state. + /// - "JOB_STATE_PARTIALLY_SUCCEEDED" : The job is partially succeeded, some + /// results may be missing due to errors. + core.String? state; + + /// Time when the CustomJob was most recently updated. + /// + /// Output only. + core.String? updateTime; + + /// URIs for accessing + /// [interactive shells](https://cloud.google.com/vertex-ai/docs/training/monitor-debug-interactive-shell) + /// (one URI for each training node). + /// + /// Only available if job_spec.enable_web_access is `true`. The keys are names + /// of each node in the training job; for example, `workerpool0-0` for the + /// primary node, `workerpool1-0` for the first node in the second worker + /// pool, and `workerpool1-1` for the second node in the second worker pool. + /// The values are the URIs for each node's interactive shell. + /// + /// Output only. + core.Map? webAccessUris; + + GoogleCloudAiplatformV1CustomJob({ + this.createTime, + this.displayName, + this.encryptionSpec, + this.endTime, + this.error, + this.jobSpec, + this.labels, + this.name, + this.satisfiesPzi, + this.satisfiesPzs, + this.startTime, + this.state, + this.updateTime, + this.webAccessUris, + }); + + GoogleCloudAiplatformV1CustomJob.fromJson(core.Map json_) + : this( + createTime: json_['createTime'] as core.String?, + displayName: json_['displayName'] as core.String?, + encryptionSpec: json_.containsKey('encryptionSpec') + ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( + json_['encryptionSpec'] + as core.Map) + : null, + endTime: json_['endTime'] as core.String?, + error: json_.containsKey('error') + ? GoogleRpcStatus.fromJson( + json_['error'] as core.Map) + : null, + jobSpec: json_.containsKey('jobSpec') + ? GoogleCloudAiplatformV1CustomJobSpec.fromJson( + json_['jobSpec'] as core.Map) + : null, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + name: json_['name'] as core.String?, + satisfiesPzi: json_['satisfiesPzi'] as core.bool?, + satisfiesPzs: json_['satisfiesPzs'] as core.bool?, + startTime: json_['startTime'] as core.String?, + state: json_['state'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + webAccessUris: + (json_['webAccessUris'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + ); + + core.Map toJson() => { + if (createTime != null) 'createTime': createTime!, + if (displayName != null) 'displayName': displayName!, + if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, + if (endTime != null) 'endTime': endTime!, + if (error != null) 'error': error!, + if (jobSpec != null) 'jobSpec': jobSpec!, + if (labels != null) 'labels': labels!, + if (name != null) 'name': name!, + if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, + if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, + if (startTime != null) 'startTime': startTime!, + if (state != null) 'state': state!, + if (updateTime != null) 'updateTime': updateTime!, + if (webAccessUris != null) 'webAccessUris': webAccessUris!, + }; +} + +/// Represents the spec of a CustomJob. +class GoogleCloudAiplatformV1CustomJobSpec { + /// The Cloud Storage location to store the output of this CustomJob or + /// HyperparameterTuningJob. + /// + /// For HyperparameterTuningJob, the baseOutputDirectory of each child + /// CustomJob backing a Trial is set to a subdirectory of name id under its + /// parent HyperparameterTuningJob's baseOutputDirectory. The following Vertex + /// AI environment variables will be passed to containers or python modules + /// when this field is set: For CustomJob: * AIP_MODEL_DIR = `/model/` * + /// AIP_CHECKPOINT_DIR = `/checkpoints/` * AIP_TENSORBOARD_LOG_DIR = `/logs/` + /// For CustomJob backing a Trial of HyperparameterTuningJob: * AIP_MODEL_DIR + /// = `//model/` * AIP_CHECKPOINT_DIR = `//checkpoints/` * + /// AIP_TENSORBOARD_LOG_DIR = `//logs/` + GoogleCloudAiplatformV1GcsDestination? baseOutputDirectory; + + /// Whether you want Vertex AI to enable access to the customized dashboard in + /// training chief container. + /// + /// If set to `true`, you can access the dashboard at the URIs given by + /// CustomJob.web_access_uris or Trial.web_access_uris (within + /// HyperparameterTuningJob.trials). + /// + /// Optional. + core.bool? enableDashboardAccess; + + /// Whether you want Vertex AI to enable + /// [interactive shell access](https://cloud.google.com/vertex-ai/docs/training/monitor-debug-interactive-shell) + /// to training containers. + /// + /// If set to `true`, you can access interactive shells at the URIs given by + /// CustomJob.web_access_uris or Trial.web_access_uris (within + /// HyperparameterTuningJob.trials). + /// + /// Optional. + core.bool? enableWebAccess; + + /// The Experiment associated with this job. + /// + /// Format: + /// `projects/{project}/locations/{location}/metadataStores/{metadataStores}/contexts/{experiment-name}` + /// + /// Optional. + core.String? experiment; + + /// The Experiment Run associated with this job. + /// + /// Format: + /// `projects/{project}/locations/{location}/metadataStores/{metadataStores}/contexts/{experiment-name}-{experiment-run-name}` + /// + /// Optional. + core.String? experimentRun; + + /// The name of the Model resources for which to generate a mapping to + /// artifact URIs. + /// + /// Applicable only to some of the Google-provided custom jobs. Format: + /// `projects/{project}/locations/{location}/models/{model}` In order to + /// retrieve a specific version of the model, also provide the version ID or + /// version alias. Example: + /// `projects/{project}/locations/{location}/models/{model}@2` or + /// `projects/{project}/locations/{location}/models/{model}@golden` If no + /// version ID or alias is specified, the "default" version will be returned. + /// The "default" version alias is created for the first version of the model, + /// and can be moved to other versions later on. There will be exactly one + /// default version. + /// + /// Optional. + core.List? models; + + /// The full name of the Compute Engine + /// \[network\](/compute/docs/networks-and-firewalls#networks) to which the + /// Job should be peered. + /// + /// For example, `projects/12345/global/networks/myVPC`. + /// \[Format\](/compute/docs/reference/rest/v1/networks/insert) is of the form + /// `projects/{project}/global/networks/{network}`. Where {project} is a + /// project number, as in `12345`, and {network} is a network name. To specify + /// this field, you must have already + /// [configured VPC Network Peering for Vertex AI](https://cloud.google.com/vertex-ai/docs/general/vpc-peering). + /// If this field is left unspecified, the job is not peered with any network. + /// + /// Optional. + core.String? network; + + /// The ID of the PersistentResource in the same Project and Location which to + /// run If this is specified, the job will be run on existing machines held by + /// the PersistentResource instead of on-demand short-live machines. + /// + /// The network and CMEK configs on the job should be consistent with those on + /// the PersistentResource, otherwise, the job will be rejected. + /// + /// Optional. + core.String? persistentResourceId; + + /// The ID of the location to store protected artifacts. + /// + /// e.g. us-central1. Populate only when the location is different than + /// CustomJob location. List of supported locations: + /// https://cloud.google.com/vertex-ai/docs/general/locations + core.String? protectedArtifactLocationId; + + /// A list of names for the reserved ip ranges under the VPC network that can + /// be used for this job. + /// + /// If set, we will deploy the job within the provided ip ranges. Otherwise, + /// the job will be deployed to any ip ranges under the provided VPC network. + /// Example: \['vertex-ai-ip-range'\]. + /// + /// Optional. + core.List? reservedIpRanges; + + /// Scheduling options for a CustomJob. + GoogleCloudAiplatformV1Scheduling? scheduling; + + /// Specifies the service account for workload run-as account. + /// + /// Users submitting jobs must have act-as permission on this run-as account. + /// If unspecified, the + /// [Vertex AI Custom Code Service Agent](https://cloud.google.com/vertex-ai/docs/general/access-control#service-agents) + /// for the CustomJob's project is used. + core.String? serviceAccount; + + /// The name of a Vertex AI Tensorboard resource to which this CustomJob will + /// upload Tensorboard logs. + /// + /// Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}` + /// + /// Optional. + core.String? tensorboard; + + /// The spec of the worker pools including machine type and Docker image. + /// + /// All worker pools except the first one are optional and can be skipped by + /// providing an empty value. + /// + /// Required. + core.List? workerPoolSpecs; + + GoogleCloudAiplatformV1CustomJobSpec({ + this.baseOutputDirectory, + this.enableDashboardAccess, + this.enableWebAccess, + this.experiment, + this.experimentRun, + this.models, + this.network, + this.persistentResourceId, + this.protectedArtifactLocationId, + this.reservedIpRanges, + this.scheduling, + this.serviceAccount, + this.tensorboard, + this.workerPoolSpecs, + }); + + GoogleCloudAiplatformV1CustomJobSpec.fromJson(core.Map json_) + : this( + baseOutputDirectory: json_.containsKey('baseOutputDirectory') + ? GoogleCloudAiplatformV1GcsDestination.fromJson( + json_['baseOutputDirectory'] + as core.Map) + : null, + enableDashboardAccess: json_['enableDashboardAccess'] as core.bool?, + enableWebAccess: json_['enableWebAccess'] as core.bool?, + experiment: json_['experiment'] as core.String?, + experimentRun: json_['experimentRun'] as core.String?, + models: (json_['models'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + network: json_['network'] as core.String?, + persistentResourceId: json_['persistentResourceId'] as core.String?, + protectedArtifactLocationId: + json_['protectedArtifactLocationId'] as core.String?, + reservedIpRanges: (json_['reservedIpRanges'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + scheduling: json_.containsKey('scheduling') + ? GoogleCloudAiplatformV1Scheduling.fromJson( + json_['scheduling'] as core.Map) + : null, + serviceAccount: json_['serviceAccount'] as core.String?, + tensorboard: json_['tensorboard'] as core.String?, + workerPoolSpecs: (json_['workerPoolSpecs'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1WorkerPoolSpec.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (baseOutputDirectory != null) + 'baseOutputDirectory': baseOutputDirectory!, + if (enableDashboardAccess != null) + 'enableDashboardAccess': enableDashboardAccess!, + if (enableWebAccess != null) 'enableWebAccess': enableWebAccess!, + if (experiment != null) 'experiment': experiment!, + if (experimentRun != null) 'experimentRun': experimentRun!, + if (models != null) 'models': models!, + if (network != null) 'network': network!, + if (persistentResourceId != null) + 'persistentResourceId': persistentResourceId!, + if (protectedArtifactLocationId != null) + 'protectedArtifactLocationId': protectedArtifactLocationId!, + if (reservedIpRanges != null) 'reservedIpRanges': reservedIpRanges!, + if (scheduling != null) 'scheduling': scheduling!, + if (serviceAccount != null) 'serviceAccount': serviceAccount!, + if (tensorboard != null) 'tensorboard': tensorboard!, + if (workerPoolSpecs != null) 'workerPoolSpecs': workerPoolSpecs!, + }; +} + +/// A piece of data in a Dataset. +/// +/// Could be an image, a video, a document or plain text. +class GoogleCloudAiplatformV1DataItem { + /// Timestamp when this DataItem was created. + /// + /// Output only. + core.String? createTime; + + /// Used to perform consistent read-modify-write updates. + /// + /// If not set, a blind "overwrite" update happens. + /// + /// Optional. + core.String? etag; + + /// The labels with user-defined metadata to organize your DataItems. + /// + /// Label keys and values can be no longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. No more than + /// 64 user labels can be associated with one DataItem(System labels are + /// excluded). See https://goo.gl/xmQnxf for more information and examples of + /// labels. System reserved label keys are prefixed with + /// "aiplatform.googleapis.com/" and are immutable. + /// + /// Optional. + core.Map? labels; + + /// The resource name of the DataItem. + /// + /// Output only. + core.String? name; + + /// The data that the DataItem represents (for example, an image or a text + /// snippet). + /// + /// The schema of the payload is stored in the parent Dataset's metadata + /// schema's dataItemSchemaUri field. + /// + /// Required. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? payload; + + /// Reserved for future use. + /// + /// Output only. + core.bool? satisfiesPzi; + + /// Reserved for future use. + /// + /// Output only. + core.bool? satisfiesPzs; + + /// Timestamp when this DataItem was last updated. + /// + /// Output only. + core.String? updateTime; + + GoogleCloudAiplatformV1DataItem({ + this.createTime, + this.etag, + this.labels, + this.name, + this.payload, + this.satisfiesPzi, + this.satisfiesPzs, + this.updateTime, + }); + + GoogleCloudAiplatformV1DataItem.fromJson(core.Map json_) + : this( + createTime: json_['createTime'] as core.String?, + etag: json_['etag'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + name: json_['name'] as core.String?, + payload: json_['payload'], + satisfiesPzi: json_['satisfiesPzi'] as core.bool?, + satisfiesPzs: json_['satisfiesPzs'] as core.bool?, + updateTime: json_['updateTime'] as core.String?, + ); + + core.Map toJson() => { + if (createTime != null) 'createTime': createTime!, + if (etag != null) 'etag': etag!, + if (labels != null) 'labels': labels!, + if (name != null) 'name': name!, + if (payload != null) 'payload': payload!, + if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, + if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} + +/// A container for a single DataItem and Annotations on it. +class GoogleCloudAiplatformV1DataItemView { + /// The Annotations on the DataItem. + /// + /// If too many Annotations should be returned for the DataItem, this field + /// will be truncated per annotations_limit in request. If it was, then the + /// has_truncated_annotations will be set to true. + core.List? annotations; + + /// The DataItem. + GoogleCloudAiplatformV1DataItem? dataItem; + + /// True if and only if the Annotations field has been truncated. + /// + /// It happens if more Annotations for this DataItem met the request's + /// annotation_filter than are allowed to be returned by annotations_limit. + /// Note that if Annotations field is not being returned due to field mask, + /// then this field will not be set to true no matter how many Annotations are + /// there. + core.bool? hasTruncatedAnnotations; + + GoogleCloudAiplatformV1DataItemView({ + this.annotations, + this.dataItem, + this.hasTruncatedAnnotations, + }); + + GoogleCloudAiplatformV1DataItemView.fromJson(core.Map json_) + : this( + annotations: (json_['annotations'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Annotation.fromJson( + value as core.Map)) + .toList(), + dataItem: json_.containsKey('dataItem') + ? GoogleCloudAiplatformV1DataItem.fromJson( + json_['dataItem'] as core.Map) + : null, + hasTruncatedAnnotations: + json_['hasTruncatedAnnotations'] as core.bool?, + ); + + core.Map toJson() => { + if (annotations != null) 'annotations': annotations!, + if (dataItem != null) 'dataItem': dataItem!, + if (hasTruncatedAnnotations != null) + 'hasTruncatedAnnotations': hasTruncatedAnnotations!, + }; +} + +/// DataLabelingJob is used to trigger a human labeling job on unlabeled data +/// from the following Dataset: +class GoogleCloudAiplatformV1DataLabelingJob { + /// Parameters that configure the active learning pipeline. + /// + /// Active learning will label the data incrementally via several iterations. + /// For every iteration, it will select a batch of data based on the sampling + /// strategy. + GoogleCloudAiplatformV1ActiveLearningConfig? activeLearningConfig; + + /// Labels to assign to annotations generated by this DataLabelingJob. + /// + /// Label keys and values can be no longer than 64 characters (Unicode /// codepoints), can only contain lowercase letters, numeric characters, /// underscores and dashes. International characters are allowed. See - /// https://goo.gl/xmQnxf for more information and examples of labels. + /// https://goo.gl/xmQnxf for more information and examples of labels. System + /// reserved label keys are prefixed with "aiplatform.googleapis.com/" and are + /// immutable. + core.Map? annotationLabels; + + /// Timestamp when this DataLabelingJob was created. + /// + /// Output only. + core.String? createTime; + + /// Estimated cost(in US dollars) that the DataLabelingJob has incurred to + /// date. + /// + /// Output only. + GoogleTypeMoney? currentSpend; + + /// Dataset resource names. + /// + /// Right now we only support labeling from a single Dataset. Format: + /// `projects/{project}/locations/{location}/datasets/{dataset}` + /// + /// Required. + core.List? datasets; + + /// The user-defined name of the DataLabelingJob. + /// + /// The name can be up to 128 characters long and can consist of any UTF-8 + /// characters. Display name of a DataLabelingJob. + /// + /// Required. + core.String? displayName; + + /// Customer-managed encryption key spec for a DataLabelingJob. + /// + /// If set, this DataLabelingJob will be secured by this key. Note: + /// Annotations created in the DataLabelingJob are associated with the + /// EncryptionSpec of the Dataset they are exported to. + GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; + + /// DataLabelingJob errors. + /// + /// It is only populated when job's state is `JOB_STATE_FAILED` or + /// `JOB_STATE_CANCELLED`. + /// + /// Output only. + GoogleRpcStatus? error; + + /// Input config parameters for the DataLabelingJob. + /// + /// Required. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? inputs; + + /// Points to a YAML file stored on Google Cloud Storage describing the config + /// for a specific type of DataLabelingJob. + /// + /// The schema files that can be used here are found in the + /// https://storage.googleapis.com/google-cloud-aiplatform bucket in the + /// /schema/datalabelingjob/inputs/ folder. + /// + /// Required. + core.String? inputsSchemaUri; + + /// The Google Cloud Storage location of the instruction pdf. + /// + /// This pdf is shared with labelers, and provides detailed description on how + /// to label DataItems in Datasets. + /// + /// Required. + core.String? instructionUri; + + /// Number of labelers to work on each DataItem. + /// + /// Required. + core.int? labelerCount; + + /// Current labeling job progress percentage scaled in interval \[0, 100\], + /// indicating the percentage of DataItems that has been finished. + /// + /// Output only. + core.int? labelingProgress; + + /// The labels with user-defined metadata to organize your DataLabelingJobs. + /// + /// Label keys and values can be no longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. See + /// https://goo.gl/xmQnxf for more information and examples of labels. System + /// reserved label keys are prefixed with "aiplatform.googleapis.com/" and are + /// immutable. Following system labels exist for each DataLabelingJob: * + /// "aiplatform.googleapis.com/schema": output only, its value is the + /// inputs_schema's title. core.Map? labels; - /// Parameters configuring the batch behavior. - /// - /// Currently only applicable when dedicated_resources are used (in other - /// cases Vertex AI does the tuning itself). + /// Resource name of the DataLabelingJob. + /// + /// Output only. + core.String? name; + + /// The SpecialistPools' resource names associated with this job. + core.List? specialistPools; + + /// The detailed state of the job. + /// + /// Output only. + /// Possible string values are: + /// - "JOB_STATE_UNSPECIFIED" : The job state is unspecified. + /// - "JOB_STATE_QUEUED" : The job has been just created or resumed and + /// processing has not yet begun. + /// - "JOB_STATE_PENDING" : The service is preparing to run the job. + /// - "JOB_STATE_RUNNING" : The job is in progress. + /// - "JOB_STATE_SUCCEEDED" : The job completed successfully. + /// - "JOB_STATE_FAILED" : The job failed. + /// - "JOB_STATE_CANCELLING" : The job is being cancelled. From this state the + /// job may only go to either `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED` or + /// `JOB_STATE_CANCELLED`. + /// - "JOB_STATE_CANCELLED" : The job has been cancelled. + /// - "JOB_STATE_PAUSED" : The job has been stopped, and can be resumed. + /// - "JOB_STATE_EXPIRED" : The job has expired. + /// - "JOB_STATE_UPDATING" : The job is being updated. Only jobs in the + /// `RUNNING` state can be updated. After updating, the job goes back to the + /// `RUNNING` state. + /// - "JOB_STATE_PARTIALLY_SUCCEEDED" : The job is partially succeeded, some + /// results may be missing due to errors. + core.String? state; + + /// Timestamp when this DataLabelingJob was updated most recently. + /// + /// Output only. + core.String? updateTime; + + GoogleCloudAiplatformV1DataLabelingJob({ + this.activeLearningConfig, + this.annotationLabels, + this.createTime, + this.currentSpend, + this.datasets, + this.displayName, + this.encryptionSpec, + this.error, + this.inputs, + this.inputsSchemaUri, + this.instructionUri, + this.labelerCount, + this.labelingProgress, + this.labels, + this.name, + this.specialistPools, + this.state, + this.updateTime, + }); + + GoogleCloudAiplatformV1DataLabelingJob.fromJson(core.Map json_) + : this( + activeLearningConfig: json_.containsKey('activeLearningConfig') + ? GoogleCloudAiplatformV1ActiveLearningConfig.fromJson( + json_['activeLearningConfig'] + as core.Map) + : null, + annotationLabels: (json_['annotationLabels'] + as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + createTime: json_['createTime'] as core.String?, + currentSpend: json_.containsKey('currentSpend') + ? GoogleTypeMoney.fromJson( + json_['currentSpend'] as core.Map) + : null, + datasets: (json_['datasets'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + displayName: json_['displayName'] as core.String?, + encryptionSpec: json_.containsKey('encryptionSpec') + ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( + json_['encryptionSpec'] + as core.Map) + : null, + error: json_.containsKey('error') + ? GoogleRpcStatus.fromJson( + json_['error'] as core.Map) + : null, + inputs: json_['inputs'], + inputsSchemaUri: json_['inputsSchemaUri'] as core.String?, + instructionUri: json_['instructionUri'] as core.String?, + labelerCount: json_['labelerCount'] as core.int?, + labelingProgress: json_['labelingProgress'] as core.int?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + name: json_['name'] as core.String?, + specialistPools: (json_['specialistPools'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + state: json_['state'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + ); + + core.Map toJson() => { + if (activeLearningConfig != null) + 'activeLearningConfig': activeLearningConfig!, + if (annotationLabels != null) 'annotationLabels': annotationLabels!, + if (createTime != null) 'createTime': createTime!, + if (currentSpend != null) 'currentSpend': currentSpend!, + if (datasets != null) 'datasets': datasets!, + if (displayName != null) 'displayName': displayName!, + if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, + if (error != null) 'error': error!, + if (inputs != null) 'inputs': inputs!, + if (inputsSchemaUri != null) 'inputsSchemaUri': inputsSchemaUri!, + if (instructionUri != null) 'instructionUri': instructionUri!, + if (labelerCount != null) 'labelerCount': labelerCount!, + if (labelingProgress != null) 'labelingProgress': labelingProgress!, + if (labels != null) 'labels': labels!, + if (name != null) 'name': name!, + if (specialistPools != null) 'specialistPools': specialistPools!, + if (state != null) 'state': state!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} + +/// A collection of DataItems and Annotations on them. +class GoogleCloudAiplatformV1Dataset { + /// Timestamp when this Dataset was created. /// - /// Immutable. - GoogleCloudAiplatformV1ManualBatchTuningParameters? - manualBatchTuningParameters; + /// Output only. + core.String? createTime; - /// The name of the Model resource that produces the predictions via this job, - /// must share the same ancestor Location. + /// The number of DataItems in this Dataset. /// - /// Starting this job has no impact on any existing deployments of the Model - /// and their resources. Exactly one of model and unmanaged_container_model - /// must be set. The model resource name may contain version id or version - /// alias to specify the version. Example: - /// `projects/{project}/locations/{location}/models/{model}@2` or - /// `projects/{project}/locations/{location}/models/{model}@golden` if no - /// version is specified, the default version will be deployed. The model - /// resource could also be a publisher model. Example: - /// `publishers/{publisher}/models/{model}` or - /// `projects/{project}/locations/{location}/publishers/{publisher}/models/{model}` - core.String? model; + /// Only apply for non-structured Dataset. + /// + /// Output only. + core.String? dataItemCount; - /// The parameters that govern the predictions. + /// The description of the Dataset. + core.String? description; + + /// The user-defined name of the Dataset. /// - /// The schema of the parameters may be specified via the Model's - /// PredictSchemata's parameters_schema_uri. + /// The name can be up to 128 characters long and can consist of any UTF-8 + /// characters. /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Object? modelParameters; + /// Required. + core.String? displayName; - /// The version ID of the Model that produces the predictions via this job. + /// Customer-managed encryption key spec for a Dataset. /// - /// Output only. - core.String? modelVersionId; + /// If set, this Dataset and all sub-resources of this Dataset will be secured + /// by this key. + GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; - /// Resource name of the BatchPredictionJob. + /// Used to perform consistent read-modify-write updates. /// - /// Output only. - core.String? name; + /// If not set, a blind "overwrite" update happens. + core.String? etag; - /// The Configuration specifying where output predictions should be written. + /// The labels with user-defined metadata to organize your Datasets. /// - /// The schema of any single prediction may be specified as a concatenation of - /// Model's PredictSchemata's instance_schema_uri and prediction_schema_uri. + /// Label keys and values can be no longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. No more than + /// 64 user labels can be associated with one Dataset (System labels are + /// excluded). See https://goo.gl/xmQnxf for more information and examples of + /// labels. System reserved label keys are prefixed with + /// "aiplatform.googleapis.com/" and are immutable. Following system labels + /// exist for each Dataset: * + /// "aiplatform.googleapis.com/dataset_metadata_schema": output only, its + /// value is the metadata_schema's title. + core.Map? labels; + + /// Additional information about the Dataset. /// /// Required. - GoogleCloudAiplatformV1BatchPredictionJobOutputConfig? outputConfig; + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? metadata; - /// Information further describing the output of this job. + /// The resource name of the Artifact that was created in MetadataStore when + /// creating the Dataset. + /// + /// The Artifact resource name pattern is + /// `projects/{project}/locations/{location}/metadataStores/{metadata_store}/artifacts/{artifact}`. /// /// Output only. - GoogleCloudAiplatformV1BatchPredictionJobOutputInfo? outputInfo; + core.String? metadataArtifact; - /// Partial failures encountered. + /// Points to a YAML file stored on Google Cloud Storage describing additional + /// information about the Dataset. /// - /// For example, single files that can't be read. This field never exceeds 20 - /// entries. Status details fields contain standard Google Cloud error - /// details. + /// The schema is defined as an OpenAPI 3.0.2 Schema Object. The schema files + /// that can be used here are found in + /// gs://google-cloud-aiplatform/schema/dataset/metadata/. /// - /// Output only. - core.List? partialFailures; + /// Required. + core.String? metadataSchemaUri; - /// Information about resources that had been consumed by this job. + /// Reference to the public base model last used by the dataset. /// - /// Provided in real time at best effort basis, as well as a final value once - /// the job completes. Note: This field currently may be not populated for - /// batch predictions that use AutoML Models. + /// Only set for prompt datasets. + /// + /// Optional. + core.String? modelReference; + + /// Identifier. + /// + /// The resource name of the Dataset. /// /// Output only. - GoogleCloudAiplatformV1ResourcesConsumed? resourcesConsumed; + core.String? name; /// Reserved for future use. /// @@ -28924,129 +34845,52 @@ class GoogleCloudAiplatformV1BatchPredictionJob { /// Output only. core.bool? satisfiesPzs; - /// The service account that the DeployedModel's container runs as. - /// - /// If not specified, a system generated one will be used, which has minimal - /// permissions and the custom container, if used, may not have enough - /// permission to access other Google Cloud resources. Users deploying the - /// Model must have the `iam.serviceAccounts.actAs` permission on this service - /// account. - core.String? serviceAccount; - - /// Time when the BatchPredictionJob for the first time entered the - /// `JOB_STATE_RUNNING` state. - /// - /// Output only. - core.String? startTime; - - /// The detailed state of the job. - /// - /// Output only. - /// Possible string values are: - /// - "JOB_STATE_UNSPECIFIED" : The job state is unspecified. - /// - "JOB_STATE_QUEUED" : The job has been just created or resumed and - /// processing has not yet begun. - /// - "JOB_STATE_PENDING" : The service is preparing to run the job. - /// - "JOB_STATE_RUNNING" : The job is in progress. - /// - "JOB_STATE_SUCCEEDED" : The job completed successfully. - /// - "JOB_STATE_FAILED" : The job failed. - /// - "JOB_STATE_CANCELLING" : The job is being cancelled. From this state the - /// job may only go to either `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED` or - /// `JOB_STATE_CANCELLED`. - /// - "JOB_STATE_CANCELLED" : The job has been cancelled. - /// - "JOB_STATE_PAUSED" : The job has been stopped, and can be resumed. - /// - "JOB_STATE_EXPIRED" : The job has expired. - /// - "JOB_STATE_UPDATING" : The job is being updated. Only jobs in the - /// `RUNNING` state can be updated. After updating, the job goes back to the - /// `RUNNING` state. - /// - "JOB_STATE_PARTIALLY_SUCCEEDED" : The job is partially succeeded, some - /// results may be missing due to errors. - core.String? state; - - /// Contains model information necessary to perform batch prediction without - /// requiring uploading to model registry. + /// All SavedQueries belong to the Dataset will be returned in List/Get + /// Dataset response. /// - /// Exactly one of model and unmanaged_container_model must be set. - GoogleCloudAiplatformV1UnmanagedContainerModel? unmanagedContainerModel; + /// The annotation_specs field will not be populated except for UI cases which + /// will only use annotation_spec_count. In CreateDataset request, a + /// SavedQuery is created together if this field is set, up to one SavedQuery + /// can be set in CreateDatasetRequest. The SavedQuery should not contain any + /// AnnotationSpec. + core.List? savedQueries; - /// Time when the BatchPredictionJob was most recently updated. + /// Timestamp when this Dataset was last updated. /// /// Output only. core.String? updateTime; - GoogleCloudAiplatformV1BatchPredictionJob({ - this.completionStats, + GoogleCloudAiplatformV1Dataset({ this.createTime, - this.dedicatedResources, - this.disableContainerLogging, + this.dataItemCount, + this.description, this.displayName, this.encryptionSpec, - this.endTime, - this.error, - this.explanationSpec, - this.generateExplanation, - this.inputConfig, - this.instanceConfig, + this.etag, this.labels, - this.manualBatchTuningParameters, - this.model, - this.modelParameters, - this.modelVersionId, + this.metadata, + this.metadataArtifact, + this.metadataSchemaUri, + this.modelReference, this.name, - this.outputConfig, - this.outputInfo, - this.partialFailures, - this.resourcesConsumed, this.satisfiesPzi, this.satisfiesPzs, - this.serviceAccount, - this.startTime, - this.state, - this.unmanagedContainerModel, + this.savedQueries, this.updateTime, }); - GoogleCloudAiplatformV1BatchPredictionJob.fromJson(core.Map json_) + GoogleCloudAiplatformV1Dataset.fromJson(core.Map json_) : this( - completionStats: json_.containsKey('completionStats') - ? GoogleCloudAiplatformV1CompletionStats.fromJson( - json_['completionStats'] - as core.Map) - : null, createTime: json_['createTime'] as core.String?, - dedicatedResources: json_.containsKey('dedicatedResources') - ? GoogleCloudAiplatformV1BatchDedicatedResources.fromJson( - json_['dedicatedResources'] - as core.Map) - : null, - disableContainerLogging: - json_['disableContainerLogging'] as core.bool?, + dataItemCount: json_['dataItemCount'] as core.String?, + description: json_['description'] as core.String?, displayName: json_['displayName'] as core.String?, encryptionSpec: json_.containsKey('encryptionSpec') ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( json_['encryptionSpec'] as core.Map) : null, - endTime: json_['endTime'] as core.String?, - error: json_.containsKey('error') - ? GoogleRpcStatus.fromJson( - json_['error'] as core.Map) - : null, - explanationSpec: json_.containsKey('explanationSpec') - ? GoogleCloudAiplatformV1ExplanationSpec.fromJson( - json_['explanationSpec'] - as core.Map) - : null, - generateExplanation: json_['generateExplanation'] as core.bool?, - inputConfig: json_.containsKey('inputConfig') - ? GoogleCloudAiplatformV1BatchPredictionJobInputConfig.fromJson( - json_['inputConfig'] as core.Map) - : null, - instanceConfig: json_.containsKey('instanceConfig') - ? GoogleCloudAiplatformV1BatchPredictionJobInstanceConfig - .fromJson(json_['instanceConfig'] - as core.Map) - : null, + etag: json_['etag'] as core.String?, labels: (json_['labels'] as core.Map?)?.map( (key, value) => core.MapEntry( @@ -29054,1493 +34898,1771 @@ class GoogleCloudAiplatformV1BatchPredictionJob { value as core.String, ), ), - manualBatchTuningParameters: - json_.containsKey('manualBatchTuningParameters') - ? GoogleCloudAiplatformV1ManualBatchTuningParameters.fromJson( - json_['manualBatchTuningParameters'] - as core.Map) - : null, - model: json_['model'] as core.String?, - modelParameters: json_['modelParameters'], - modelVersionId: json_['modelVersionId'] as core.String?, + metadata: json_['metadata'], + metadataArtifact: json_['metadataArtifact'] as core.String?, + metadataSchemaUri: json_['metadataSchemaUri'] as core.String?, + modelReference: json_['modelReference'] as core.String?, name: json_['name'] as core.String?, - outputConfig: json_.containsKey('outputConfig') - ? GoogleCloudAiplatformV1BatchPredictionJobOutputConfig.fromJson( - json_['outputConfig'] as core.Map) - : null, - outputInfo: json_.containsKey('outputInfo') - ? GoogleCloudAiplatformV1BatchPredictionJobOutputInfo.fromJson( - json_['outputInfo'] as core.Map) - : null, - partialFailures: (json_['partialFailures'] as core.List?) - ?.map((value) => GoogleRpcStatus.fromJson( - value as core.Map)) - .toList(), - resourcesConsumed: json_.containsKey('resourcesConsumed') - ? GoogleCloudAiplatformV1ResourcesConsumed.fromJson( - json_['resourcesConsumed'] - as core.Map) - : null, satisfiesPzi: json_['satisfiesPzi'] as core.bool?, satisfiesPzs: json_['satisfiesPzs'] as core.bool?, - serviceAccount: json_['serviceAccount'] as core.String?, - startTime: json_['startTime'] as core.String?, - state: json_['state'] as core.String?, - unmanagedContainerModel: json_.containsKey('unmanagedContainerModel') - ? GoogleCloudAiplatformV1UnmanagedContainerModel.fromJson( - json_['unmanagedContainerModel'] - as core.Map) - : null, + savedQueries: (json_['savedQueries'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1SavedQuery.fromJson( + value as core.Map)) + .toList(), updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (completionStats != null) 'completionStats': completionStats!, if (createTime != null) 'createTime': createTime!, - if (dedicatedResources != null) - 'dedicatedResources': dedicatedResources!, - if (disableContainerLogging != null) - 'disableContainerLogging': disableContainerLogging!, + if (dataItemCount != null) 'dataItemCount': dataItemCount!, + if (description != null) 'description': description!, if (displayName != null) 'displayName': displayName!, if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, - if (endTime != null) 'endTime': endTime!, - if (error != null) 'error': error!, - if (explanationSpec != null) 'explanationSpec': explanationSpec!, - if (generateExplanation != null) - 'generateExplanation': generateExplanation!, - if (inputConfig != null) 'inputConfig': inputConfig!, - if (instanceConfig != null) 'instanceConfig': instanceConfig!, + if (etag != null) 'etag': etag!, if (labels != null) 'labels': labels!, - if (manualBatchTuningParameters != null) - 'manualBatchTuningParameters': manualBatchTuningParameters!, - if (model != null) 'model': model!, - if (modelParameters != null) 'modelParameters': modelParameters!, - if (modelVersionId != null) 'modelVersionId': modelVersionId!, + if (metadata != null) 'metadata': metadata!, + if (metadataArtifact != null) 'metadataArtifact': metadataArtifact!, + if (metadataSchemaUri != null) 'metadataSchemaUri': metadataSchemaUri!, + if (modelReference != null) 'modelReference': modelReference!, + if (name != null) 'name': name!, + if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, + if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, + if (savedQueries != null) 'savedQueries': savedQueries!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} + +/// Describes the dataset version. +class GoogleCloudAiplatformV1DatasetVersion { + /// Name of the associated BigQuery dataset. + /// + /// Output only. + core.String? bigQueryDatasetName; + + /// Timestamp when this DatasetVersion was created. + /// + /// Output only. + core.String? createTime; + + /// The user-defined name of the DatasetVersion. + /// + /// The name can be up to 128 characters long and can consist of any UTF-8 + /// characters. + core.String? displayName; + + /// Used to perform consistent read-modify-write updates. + /// + /// If not set, a blind "overwrite" update happens. + core.String? etag; + + /// Additional information about the DatasetVersion. + /// + /// Required. Output only. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? metadata; + + /// Reference to the public base model last used by the dataset version. + /// + /// Only set for prompt dataset versions. + /// + /// Output only. + core.String? modelReference; + + /// Identifier. + /// + /// The resource name of the DatasetVersion. + /// + /// Output only. + core.String? name; + + /// Reserved for future use. + /// + /// Output only. + core.bool? satisfiesPzi; + + /// Reserved for future use. + /// + /// Output only. + core.bool? satisfiesPzs; + + /// Timestamp when this DatasetVersion was last updated. + /// + /// Output only. + core.String? updateTime; + + GoogleCloudAiplatformV1DatasetVersion({ + this.bigQueryDatasetName, + this.createTime, + this.displayName, + this.etag, + this.metadata, + this.modelReference, + this.name, + this.satisfiesPzi, + this.satisfiesPzs, + this.updateTime, + }); + + GoogleCloudAiplatformV1DatasetVersion.fromJson(core.Map json_) + : this( + bigQueryDatasetName: json_['bigQueryDatasetName'] as core.String?, + createTime: json_['createTime'] as core.String?, + displayName: json_['displayName'] as core.String?, + etag: json_['etag'] as core.String?, + metadata: json_['metadata'], + modelReference: json_['modelReference'] as core.String?, + name: json_['name'] as core.String?, + satisfiesPzi: json_['satisfiesPzi'] as core.bool?, + satisfiesPzs: json_['satisfiesPzs'] as core.bool?, + updateTime: json_['updateTime'] as core.String?, + ); + + core.Map toJson() => { + if (bigQueryDatasetName != null) + 'bigQueryDatasetName': bigQueryDatasetName!, + if (createTime != null) 'createTime': createTime!, + if (displayName != null) 'displayName': displayName!, + if (etag != null) 'etag': etag!, + if (metadata != null) 'metadata': metadata!, + if (modelReference != null) 'modelReference': modelReference!, if (name != null) 'name': name!, - if (outputConfig != null) 'outputConfig': outputConfig!, - if (outputInfo != null) 'outputInfo': outputInfo!, - if (partialFailures != null) 'partialFailures': partialFailures!, - if (resourcesConsumed != null) 'resourcesConsumed': resourcesConsumed!, if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, - if (serviceAccount != null) 'serviceAccount': serviceAccount!, - if (startTime != null) 'startTime': startTime!, - if (state != null) 'state': state!, - if (unmanagedContainerModel != null) - 'unmanagedContainerModel': unmanagedContainerModel!, if (updateTime != null) 'updateTime': updateTime!, }; } -/// Configures the input to BatchPredictionJob. -/// -/// See Model.supported_input_storage_formats for Model's supported input -/// formats, and how instances should be expressed via any of them. -class GoogleCloudAiplatformV1BatchPredictionJobInputConfig { - /// The BigQuery location of the input table. - /// - /// The schema of the table should be in the format described by the given - /// context OpenAPI Schema, if one is provided. The table may contain - /// additional columns that are not described by the schema, and they will be - /// ignored. - GoogleCloudAiplatformV1BigQuerySource? bigquerySource; - - /// The Cloud Storage location for the input instances. - GoogleCloudAiplatformV1GcsSource? gcsSource; +/// A description of resources that are dedicated to a DeployedModel, and that +/// need a higher degree of manual configuration. +class GoogleCloudAiplatformV1DedicatedResources { + /// The metric specifications that overrides a resource utilization metric + /// (CPU utilization, accelerator's duty cycle, and so on) target value + /// (default to 60 if not set). + /// + /// At most one entry is allowed per metric. If machine_spec.accelerator_count + /// is above 0, the autoscaling will be based on both CPU utilization and + /// accelerator's duty cycle metrics and scale up when either metrics exceeds + /// its target value while scale down if both metrics are under their target + /// value. The default target value is 60 for both metrics. If + /// machine_spec.accelerator_count is 0, the autoscaling will be based on CPU + /// utilization metric only with default target value 60 if not explicitly + /// set. For example, in the case of Online Prediction, if you want to + /// override target CPU utilization to 80, you should set + /// autoscaling_metric_specs.metric_name to + /// `aiplatform.googleapis.com/prediction/online/cpu/utilization` and + /// autoscaling_metric_specs.target to `80`. + /// + /// Immutable. + core.List? + autoscalingMetricSpecs; + + /// The specification of a single machine used by the prediction. + /// + /// Required. Immutable. + GoogleCloudAiplatformV1MachineSpec? machineSpec; + + /// The maximum number of replicas this DeployedModel may be deployed on when + /// the traffic against it increases. + /// + /// If the requested value is too large, the deployment will error, but if + /// deployment succeeds then the ability to scale the model to that many + /// replicas is guaranteed (barring service outages). If traffic against the + /// DeployedModel increases beyond what its replicas at maximum may handle, a + /// portion of the traffic will be dropped. If this value is not provided, + /// will use min_replica_count as the default value. The value of this field + /// impacts the charge against Vertex CPU and GPU quotas. Specifically, you + /// will be charged for (max_replica_count * number of cores in the selected + /// machine type) and (max_replica_count * number of GPUs per replica in the + /// selected machine type). + /// + /// Immutable. + core.int? maxReplicaCount; + + /// The minimum number of machine replicas this DeployedModel will be always + /// deployed on. + /// + /// This value must be greater than or equal to 1. If traffic against the + /// DeployedModel increases, it may dynamically be deployed onto more + /// replicas, and as traffic decreases, some of these extra replicas may be + /// freed. + /// + /// Required. Immutable. + core.int? minReplicaCount; + + /// Number of required available replicas for the deployment to succeed. + /// + /// This field is only needed when partial model deployment/mutation is + /// desired. If set, the model deploy/mutate operation will succeed once + /// available_replica_count reaches required_replica_count, and the rest of + /// the replicas will be retried. If not set, the default + /// required_replica_count will be min_replica_count. + /// + /// Optional. + core.int? requiredReplicaCount; + + /// If true, schedule the deployment workload on + /// [spot VMs](https://cloud.google.com/kubernetes-engine/docs/concepts/spot-vms). + /// + /// Optional. + core.bool? spot; + + GoogleCloudAiplatformV1DedicatedResources({ + this.autoscalingMetricSpecs, + this.machineSpec, + this.maxReplicaCount, + this.minReplicaCount, + this.requiredReplicaCount, + this.spot, + }); + + GoogleCloudAiplatformV1DedicatedResources.fromJson(core.Map json_) + : this( + autoscalingMetricSpecs: + (json_['autoscalingMetricSpecs'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1AutoscalingMetricSpec.fromJson( + value as core.Map)) + .toList(), + machineSpec: json_.containsKey('machineSpec') + ? GoogleCloudAiplatformV1MachineSpec.fromJson( + json_['machineSpec'] as core.Map) + : null, + maxReplicaCount: json_['maxReplicaCount'] as core.int?, + minReplicaCount: json_['minReplicaCount'] as core.int?, + requiredReplicaCount: json_['requiredReplicaCount'] as core.int?, + spot: json_['spot'] as core.bool?, + ); + + core.Map toJson() => { + if (autoscalingMetricSpecs != null) + 'autoscalingMetricSpecs': autoscalingMetricSpecs!, + if (machineSpec != null) 'machineSpec': machineSpec!, + if (maxReplicaCount != null) 'maxReplicaCount': maxReplicaCount!, + if (minReplicaCount != null) 'minReplicaCount': minReplicaCount!, + if (requiredReplicaCount != null) + 'requiredReplicaCount': requiredReplicaCount!, + if (spot != null) 'spot': spot!, + }; +} + +/// Request message for FeaturestoreService.DeleteFeatureValues. +class GoogleCloudAiplatformV1DeleteFeatureValuesRequest { + /// Select feature values to be deleted by specifying entities. + GoogleCloudAiplatformV1DeleteFeatureValuesRequestSelectEntity? selectEntity; - /// The format in which instances are given, must be one of the Model's - /// supported_input_storage_formats. - /// - /// Required. - core.String? instancesFormat; + /// Select feature values to be deleted by specifying time range and features. + GoogleCloudAiplatformV1DeleteFeatureValuesRequestSelectTimeRangeAndFeature? + selectTimeRangeAndFeature; - GoogleCloudAiplatformV1BatchPredictionJobInputConfig({ - this.bigquerySource, - this.gcsSource, - this.instancesFormat, + GoogleCloudAiplatformV1DeleteFeatureValuesRequest({ + this.selectEntity, + this.selectTimeRangeAndFeature, }); - GoogleCloudAiplatformV1BatchPredictionJobInputConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1DeleteFeatureValuesRequest.fromJson(core.Map json_) : this( - bigquerySource: json_.containsKey('bigquerySource') - ? GoogleCloudAiplatformV1BigQuerySource.fromJson( - json_['bigquerySource'] + selectEntity: json_.containsKey('selectEntity') + ? GoogleCloudAiplatformV1DeleteFeatureValuesRequestSelectEntity + .fromJson(json_['selectEntity'] as core.Map) : null, - gcsSource: json_.containsKey('gcsSource') - ? GoogleCloudAiplatformV1GcsSource.fromJson( - json_['gcsSource'] as core.Map) + selectTimeRangeAndFeature: json_ + .containsKey('selectTimeRangeAndFeature') + ? GoogleCloudAiplatformV1DeleteFeatureValuesRequestSelectTimeRangeAndFeature + .fromJson(json_['selectTimeRangeAndFeature'] + as core.Map) : null, - instancesFormat: json_['instancesFormat'] as core.String?, ); core.Map toJson() => { - if (bigquerySource != null) 'bigquerySource': bigquerySource!, - if (gcsSource != null) 'gcsSource': gcsSource!, - if (instancesFormat != null) 'instancesFormat': instancesFormat!, + if (selectEntity != null) 'selectEntity': selectEntity!, + if (selectTimeRangeAndFeature != null) + 'selectTimeRangeAndFeature': selectTimeRangeAndFeature!, }; } -/// Configuration defining how to transform batch prediction input instances to -/// the instances that the Model accepts. -class GoogleCloudAiplatformV1BatchPredictionJobInstanceConfig { - /// Fields that will be excluded in the prediction instance that is sent to - /// the Model. - /// - /// Excluded will be attached to the batch prediction output if key_field is - /// not specified. When excluded_fields is populated, included_fields must be - /// empty. The input must be JSONL with objects at each line, BigQuery or - /// TfRecord. - core.List? excludedFields; - - /// Fields that will be included in the prediction instance that is sent to - /// the Model. - /// - /// If instance_type is `array`, the order of field names in included_fields - /// also determines the order of the values in the array. When included_fields - /// is populated, excluded_fields must be empty. The input must be JSONL with - /// objects at each line, BigQuery or TfRecord. - core.List? includedFields; - - /// The format of the instance that the Model accepts. - /// - /// Vertex AI will convert compatible batch prediction input instance formats - /// to the specified format. Supported values are: * `object`: Each input is - /// converted to JSON object format. * For `bigquery`, each row is converted - /// to an object. * For `jsonl`, each line of the JSONL input must be an - /// object. * Does not apply to `csv`, `file-list`, `tf-record`, or - /// `tf-record-gzip`. * `array`: Each input is converted to JSON array format. - /// * For `bigquery`, each row is converted to an array. The order of columns - /// is determined by the BigQuery column order, unless included_fields is - /// populated. included_fields must be populated for specifying field orders. - /// * For `jsonl`, if each line of the JSONL input is an object, - /// included_fields must be populated for specifying field orders. * Does not - /// apply to `csv`, `file-list`, `tf-record`, or `tf-record-gzip`. If not - /// specified, Vertex AI converts the batch prediction input as follows: * For - /// `bigquery` and `csv`, the behavior is the same as `array`. The order of - /// columns is the same as defined in the file or table, unless - /// included_fields is populated. * For `jsonl`, the prediction instance - /// format is determined by each line of the input. * For - /// `tf-record`/`tf-record-gzip`, each record will be converted to an object - /// in the format of `{"b64": }`, where `` is the Base64-encoded string of the - /// content of the record. * For `file-list`, each file in the list will be - /// converted to an object in the format of `{"b64": }`, where `` is the - /// Base64-encoded string of the content of the file. - core.String? instanceType; - - /// The name of the field that is considered as a key. +/// Message to select entity. +/// +/// If an entity id is selected, all the feature values corresponding to the +/// entity id will be deleted, including the entityId. +class GoogleCloudAiplatformV1DeleteFeatureValuesRequestSelectEntity { + /// Selectors choosing feature values of which entity id to be deleted from + /// the EntityType. /// - /// The values identified by the key field is not included in the transformed - /// instances that is sent to the Model. This is similar to specifying this - /// name of the field in excluded_fields. In addition, the batch prediction - /// output will not include the instances. Instead the output will only - /// include the value of the key field, in a field named `key` in the output: - /// * For `jsonl` output format, the output will have a `key` field instead of - /// the `instance` field. * For `csv`/`bigquery` output format, the output - /// will have have a `key` column instead of the instance feature columns. The - /// input must be JSONL with objects at each line, CSV, BigQuery or TfRecord. - core.String? keyField; + /// Required. + GoogleCloudAiplatformV1EntityIdSelector? entityIdSelector; - GoogleCloudAiplatformV1BatchPredictionJobInstanceConfig({ - this.excludedFields, - this.includedFields, - this.instanceType, - this.keyField, + GoogleCloudAiplatformV1DeleteFeatureValuesRequestSelectEntity({ + this.entityIdSelector, }); - GoogleCloudAiplatformV1BatchPredictionJobInstanceConfig.fromJson( + GoogleCloudAiplatformV1DeleteFeatureValuesRequestSelectEntity.fromJson( core.Map json_) : this( - excludedFields: (json_['excludedFields'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - includedFields: (json_['includedFields'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - instanceType: json_['instanceType'] as core.String?, - keyField: json_['keyField'] as core.String?, + entityIdSelector: json_.containsKey('entityIdSelector') + ? GoogleCloudAiplatformV1EntityIdSelector.fromJson( + json_['entityIdSelector'] + as core.Map) + : null, ); core.Map toJson() => { - if (excludedFields != null) 'excludedFields': excludedFields!, - if (includedFields != null) 'includedFields': includedFields!, - if (instanceType != null) 'instanceType': instanceType!, - if (keyField != null) 'keyField': keyField!, + if (entityIdSelector != null) 'entityIdSelector': entityIdSelector!, }; } -/// Configures the output of BatchPredictionJob. +/// Message to select time range and feature. /// -/// See Model.supported_output_storage_formats for supported output formats, and -/// how predictions are expressed via any of them. -class GoogleCloudAiplatformV1BatchPredictionJobOutputConfig { - /// The BigQuery project or dataset location where the output is to be written - /// to. +/// Values of the selected feature generated within an inclusive time range will +/// be deleted. Using this option permanently deletes the feature values from +/// the specified feature IDs within the specified time range. This might +/// include data from the online storage. If you want to retain any deleted +/// historical data in the online storage, you must re-ingest it. +class GoogleCloudAiplatformV1DeleteFeatureValuesRequestSelectTimeRangeAndFeature { + /// Selectors choosing which feature values to be deleted from the EntityType. /// - /// If project is provided, a new dataset is created with name `prediction__` - /// where is made BigQuery-dataset-name compatible (for example, most special - /// characters become underscores), and timestamp is in - /// YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset two - /// tables will be created, `predictions`, and `errors`. If the Model has both - /// instance and prediction schemata defined then the tables have columns as - /// follows: The `predictions` table contains instances for which the - /// prediction succeeded, it has columns as per a concatenation of the Model's - /// instance and prediction schemata. The `errors` table contains rows for - /// which the prediction has failed, it has instance columns, as per the - /// instance schema, followed by a single "errors" column, which as values has - /// google.rpc.Status represented as a STRUCT, and containing only `code` and - /// `message`. - GoogleCloudAiplatformV1BigQueryDestination? bigqueryDestination; + /// Required. + GoogleCloudAiplatformV1FeatureSelector? featureSelector; - /// The Cloud Storage location of the directory where the output is to be - /// written to. + /// If set, data will not be deleted from online storage. /// - /// In the given directory a new directory is created. Its name is - /// `prediction--`, where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 - /// format. Inside of it files `predictions_0001.`, `predictions_0002.`, ..., - /// `predictions_N.` are created where `` depends on chosen - /// predictions_format, and N may equal 0001 and depends on the total number - /// of successfully predicted instances. If the Model has both instance and - /// prediction schemata defined then each such file contains predictions as - /// per the predictions_format. If prediction for any instance failed - /// (partially or completely), then an additional `errors_0001.`, - /// `errors_0002.`,..., `errors_N.` files are created (N depends on total - /// number of failed predictions). These files contain the failed instances, - /// as per their schema, followed by an additional `error` field which as - /// value has google.rpc.Status containing only `code` and `message` fields. - GoogleCloudAiplatformV1GcsDestination? gcsDestination; + /// When time range is older than the data in online storage, setting this to + /// be true will make the deletion have no impact on online serving. + core.bool? skipOnlineStorageDelete; - /// The format in which Vertex AI gives the predictions, must be one of the - /// Model's supported_output_storage_formats. + /// Select feature generated within a half-inclusive time range. + /// + /// The time range is lower inclusive and upper exclusive. /// /// Required. - core.String? predictionsFormat; + GoogleTypeInterval? timeRange; - GoogleCloudAiplatformV1BatchPredictionJobOutputConfig({ - this.bigqueryDestination, - this.gcsDestination, - this.predictionsFormat, + GoogleCloudAiplatformV1DeleteFeatureValuesRequestSelectTimeRangeAndFeature({ + this.featureSelector, + this.skipOnlineStorageDelete, + this.timeRange, }); - GoogleCloudAiplatformV1BatchPredictionJobOutputConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1DeleteFeatureValuesRequestSelectTimeRangeAndFeature.fromJson( + core.Map json_) : this( - bigqueryDestination: json_.containsKey('bigqueryDestination') - ? GoogleCloudAiplatformV1BigQueryDestination.fromJson( - json_['bigqueryDestination'] + featureSelector: json_.containsKey('featureSelector') + ? GoogleCloudAiplatformV1FeatureSelector.fromJson( + json_['featureSelector'] as core.Map) : null, - gcsDestination: json_.containsKey('gcsDestination') - ? GoogleCloudAiplatformV1GcsDestination.fromJson( - json_['gcsDestination'] - as core.Map) + skipOnlineStorageDelete: + json_['skipOnlineStorageDelete'] as core.bool?, + timeRange: json_.containsKey('timeRange') + ? GoogleTypeInterval.fromJson( + json_['timeRange'] as core.Map) : null, - predictionsFormat: json_['predictionsFormat'] as core.String?, ); core.Map toJson() => { - if (bigqueryDestination != null) - 'bigqueryDestination': bigqueryDestination!, - if (gcsDestination != null) 'gcsDestination': gcsDestination!, - if (predictionsFormat != null) 'predictionsFormat': predictionsFormat!, + if (featureSelector != null) 'featureSelector': featureSelector!, + if (skipOnlineStorageDelete != null) + 'skipOnlineStorageDelete': skipOnlineStorageDelete!, + if (timeRange != null) 'timeRange': timeRange!, }; } -/// Further describes this job's output. -/// -/// Supplements output_config. -class GoogleCloudAiplatformV1BatchPredictionJobOutputInfo { - /// The path of the BigQuery dataset created, in `bq://projectId.bqDatasetId` - /// format, into which the prediction output is written. +/// Request message for IndexEndpointService.DeployIndex. +class GoogleCloudAiplatformV1DeployIndexRequest { + /// The DeployedIndex to be created within the IndexEndpoint. /// - /// Output only. - core.String? bigqueryOutputDataset; + /// Required. + GoogleCloudAiplatformV1DeployedIndex? deployedIndex; - /// The name of the BigQuery table created, in `predictions_` format, into - /// which the prediction output is written. + GoogleCloudAiplatformV1DeployIndexRequest({ + this.deployedIndex, + }); + + GoogleCloudAiplatformV1DeployIndexRequest.fromJson(core.Map json_) + : this( + deployedIndex: json_.containsKey('deployedIndex') + ? GoogleCloudAiplatformV1DeployedIndex.fromJson( + json_['deployedIndex'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (deployedIndex != null) 'deployedIndex': deployedIndex!, + }; +} + +/// Request message for EndpointService.DeployModel. +class GoogleCloudAiplatformV1DeployModelRequest { + /// The DeployedModel to be created within the Endpoint. /// - /// Can be used by UI to generate the BigQuery output path, for example. + /// Note that Endpoint.traffic_split must be updated for the DeployedModel to + /// start receiving traffic, either as part of this call, or via + /// EndpointService.UpdateEndpoint. /// - /// Output only. - core.String? bigqueryOutputTable; + /// Required. + GoogleCloudAiplatformV1DeployedModel? deployedModel; - /// The full path of the Cloud Storage directory created, into which the - /// prediction output is written. + /// A map from a DeployedModel's ID to the percentage of this Endpoint's + /// traffic that should be forwarded to that DeployedModel. /// - /// Output only. - core.String? gcsOutputDirectory; + /// If this field is non-empty, then the Endpoint's traffic_split will be + /// overwritten with it. To refer to the ID of the just being deployed Model, + /// a "0" should be used, and the actual ID of the new DeployedModel will be + /// filled in its place by this method. The traffic percentage values must add + /// up to 100. If this field is empty, then the Endpoint's traffic_split is + /// not updated. + core.Map? trafficSplit; - GoogleCloudAiplatformV1BatchPredictionJobOutputInfo({ - this.bigqueryOutputDataset, - this.bigqueryOutputTable, - this.gcsOutputDirectory, + GoogleCloudAiplatformV1DeployModelRequest({ + this.deployedModel, + this.trafficSplit, }); - GoogleCloudAiplatformV1BatchPredictionJobOutputInfo.fromJson(core.Map json_) + GoogleCloudAiplatformV1DeployModelRequest.fromJson(core.Map json_) : this( - bigqueryOutputDataset: json_['bigqueryOutputDataset'] as core.String?, - bigqueryOutputTable: json_['bigqueryOutputTable'] as core.String?, - gcsOutputDirectory: json_['gcsOutputDirectory'] as core.String?, + deployedModel: json_.containsKey('deployedModel') + ? GoogleCloudAiplatformV1DeployedModel.fromJson( + json_['deployedModel'] as core.Map) + : null, + trafficSplit: + (json_['trafficSplit'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + value as core.int, + ), + ), ); core.Map toJson() => { - if (bigqueryOutputDataset != null) - 'bigqueryOutputDataset': bigqueryOutputDataset!, - if (bigqueryOutputTable != null) - 'bigqueryOutputTable': bigqueryOutputTable!, - if (gcsOutputDirectory != null) - 'gcsOutputDirectory': gcsOutputDirectory!, + if (deployedModel != null) 'deployedModel': deployedModel!, + if (trafficSplit != null) 'trafficSplit': trafficSplit!, }; } -/// Request message for FeaturestoreService.BatchReadFeatureValues. -class GoogleCloudAiplatformV1BatchReadFeatureValuesRequest { - /// Similar to csv_read_instances, but from BigQuery source. - GoogleCloudAiplatformV1BigQuerySource? bigqueryReadInstances; +/// A deployment of an Index. +/// +/// IndexEndpoints contain one or more DeployedIndexes. +class GoogleCloudAiplatformV1DeployedIndex { + /// A description of resources that the DeployedIndex uses, which to large + /// degree are decided by Vertex AI, and optionally allows only a modest + /// additional configuration. + /// + /// If min_replica_count is not set, the default value is 2 (we don't provide + /// SLA when min_replica_count=1). If max_replica_count is not set, the + /// default value is min_replica_count. The max allowed replica count is 1000. + /// + /// Optional. + GoogleCloudAiplatformV1AutomaticResources? automaticResources; - /// Each read instance consists of exactly one read timestamp and one or more - /// entity IDs identifying entities of the corresponding EntityTypes whose - /// Features are requested. + /// Timestamp when the DeployedIndex was created. /// - /// Each output instance contains Feature values of requested entities - /// concatenated together as of the read time. An example read instance may be - /// `foo_entity_id, bar_entity_id, 2020-01-01T10:00:00.123Z`. An example - /// output instance may be `foo_entity_id, bar_entity_id, - /// 2020-01-01T10:00:00.123Z, foo_entity_feature1_value, - /// bar_entity_feature2_value`. Timestamp in each read instance must be - /// millisecond-aligned. `csv_read_instances` are read instances stored in a - /// plain-text CSV file. The header should be: \[ENTITY_TYPE_ID1\], - /// \[ENTITY_TYPE_ID2\], ..., timestamp The columns can be in any order. - /// Values in the timestamp column must use the RFC 3339 format, e.g. - /// `2012-07-30T10:43:17.123Z`. - GoogleCloudAiplatformV1CsvSource? csvReadInstances; + /// Output only. + core.String? createTime; - /// Specifies output location and format. + /// A description of resources that are dedicated to the DeployedIndex, and + /// that need a higher degree of manual configuration. /// - /// Required. - GoogleCloudAiplatformV1FeatureValueDestination? destination; + /// The field min_replica_count must be set to a value strictly greater than + /// 0, or else validation will fail. We don't provide SLA when + /// min_replica_count=1. If max_replica_count is not set, the default value is + /// min_replica_count. The max allowed replica count is 1000. Available + /// machine types for SMALL shard: e2-standard-2 and all machine types + /// available for MEDIUM and LARGE shard. Available machine types for MEDIUM + /// shard: e2-standard-16 and all machine types available for LARGE shard. + /// Available machine types for LARGE shard: e2-highmem-16, n2d-standard-32. + /// n1-standard-16 and n1-standard-32 are still available, but we recommend + /// e2-standard-16 and e2-highmem-16 for cost efficiency. + /// + /// Optional. + GoogleCloudAiplatformV1DedicatedResources? dedicatedResources; - /// Specifies EntityType grouping Features to read values of and settings. + /// If set, the authentication is enabled for the private endpoint. /// - /// Required. - core.List? - entityTypeSpecs; + /// Optional. + GoogleCloudAiplatformV1DeployedIndexAuthConfig? deployedIndexAuthConfig; - /// When not empty, the specified fields in the *_read_instances source will - /// be joined as-is in the output, in addition to those fields from the - /// Featurestore Entity. + /// The deployment group can be no longer than 64 characters (eg: 'test', + /// 'prod'). /// - /// For BigQuery source, the type of the pass-through values will be - /// automatically inferred. For CSV source, the pass-through values will be - /// passed as opaque bytes. - core.List< - GoogleCloudAiplatformV1BatchReadFeatureValuesRequestPassThroughField>? - passThroughFields; + /// If not set, we will use the 'default' deployment group. Creating + /// `deployment_groups` with `reserved_ip_ranges` is a recommended practice + /// when the peered network has multiple peering ranges. This creates your + /// deployments from predictable IP spaces for easier traffic administration. + /// Also, one deployment_group (except 'default') can only be used with the + /// same reserved_ip_ranges which means if the deployment_group has been used + /// with reserved_ip_ranges: \[a, b, c\], using it with \[a, b\] or \[d, e\] + /// is disallowed. Note: we only support up to 5 deployment groups(not + /// including 'default'). + /// + /// Optional. + core.String? deploymentGroup; - /// Excludes Feature values with feature generation timestamp before this - /// timestamp. + /// The display name of the DeployedIndex. /// - /// If not set, retrieve oldest values kept in Feature Store. Timestamp, if - /// present, must not have higher than millisecond precision. + /// If not provided upon creation, the Index's display_name is used. + core.String? displayName; + + /// If true, private endpoint's access logs are sent to Cloud Logging. + /// + /// These logs are like standard server access logs, containing information + /// like timestamp and latency for each MatchRequest. Note that logs may incur + /// a cost, especially if the deployed index receives a high queries per + /// second rate (QPS). Estimate your costs before enabling this option. /// /// Optional. - core.String? startTime; + core.bool? enableAccessLogging; - GoogleCloudAiplatformV1BatchReadFeatureValuesRequest({ - this.bigqueryReadInstances, - this.csvReadInstances, - this.destination, - this.entityTypeSpecs, - this.passThroughFields, - this.startTime, - }); + /// The user specified ID of the DeployedIndex. + /// + /// The ID can be up to 128 characters long and must start with a letter and + /// only contain letters, numbers, and underscores. The ID must be unique + /// within the project it is created in. + /// + /// Required. + core.String? id; - GoogleCloudAiplatformV1BatchReadFeatureValuesRequest.fromJson(core.Map json_) - : this( - bigqueryReadInstances: json_.containsKey('bigqueryReadInstances') - ? GoogleCloudAiplatformV1BigQuerySource.fromJson( - json_['bigqueryReadInstances'] - as core.Map) - : null, - csvReadInstances: json_.containsKey('csvReadInstances') - ? GoogleCloudAiplatformV1CsvSource.fromJson( - json_['csvReadInstances'] - as core.Map) - : null, - destination: json_.containsKey('destination') - ? GoogleCloudAiplatformV1FeatureValueDestination.fromJson( - json_['destination'] as core.Map) - : null, - entityTypeSpecs: (json_['entityTypeSpecs'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1BatchReadFeatureValuesRequestEntityTypeSpec - .fromJson(value as core.Map)) - .toList(), - passThroughFields: (json_['passThroughFields'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1BatchReadFeatureValuesRequestPassThroughField - .fromJson(value as core.Map)) - .toList(), - startTime: json_['startTime'] as core.String?, - ); + /// The name of the Index this is the deployment of. + /// + /// We may refer to this Index as the DeployedIndex's "original" Index. + /// + /// Required. + core.String? index; - core.Map toJson() => { - if (bigqueryReadInstances != null) - 'bigqueryReadInstances': bigqueryReadInstances!, - if (csvReadInstances != null) 'csvReadInstances': csvReadInstances!, - if (destination != null) 'destination': destination!, - if (entityTypeSpecs != null) 'entityTypeSpecs': entityTypeSpecs!, - if (passThroughFields != null) 'passThroughFields': passThroughFields!, - if (startTime != null) 'startTime': startTime!, - }; -} + /// The DeployedIndex may depend on various data on its original Index. + /// + /// Additionally when certain changes to the original Index are being done + /// (e.g. when what the Index contains is being changed) the DeployedIndex may + /// be asynchronously updated in the background to reflect these changes. If + /// this timestamp's value is at least the Index.update_time of the original + /// Index, it means that this DeployedIndex and the original Index are in + /// sync. If this timestamp is older, then to see which updates this + /// DeployedIndex already contains (and which it does not), one must list the + /// operations that are running on the original Index. Only the successfully + /// completed Operations with update_time equal or before this sync time are + /// contained in this DeployedIndex. + /// + /// Output only. + core.String? indexSyncTime; -/// Selects Features of an EntityType to read values of and specifies read -/// settings. -class GoogleCloudAiplatformV1BatchReadFeatureValuesRequestEntityTypeSpec { - /// ID of the EntityType to select Features. + /// Provides paths for users to send requests directly to the deployed index + /// services running on Cloud via private services access. /// - /// The EntityType id is the entity_type_id specified during EntityType - /// creation. + /// This field is populated if network is configured. /// - /// Required. - core.String? entityTypeId; + /// Output only. + GoogleCloudAiplatformV1IndexPrivateEndpoints? privateEndpoints; - /// Selectors choosing which Feature values to read from the EntityType. + /// If set for PSC deployed index, PSC connection will be automatically + /// created after deployment is done and the endpoint information is populated + /// in private_endpoints.psc_automated_endpoints. /// - /// Required. - GoogleCloudAiplatformV1FeatureSelector? featureSelector; + /// Optional. + core.List? pscAutomationConfigs; - /// Per-Feature settings for the batch read. - core.List? settings; + /// A list of reserved ip ranges under the VPC network that can be used for + /// this DeployedIndex. + /// + /// If set, we will deploy the index within the provided ip ranges. Otherwise, + /// the index might be deployed to any ip ranges under the provided VPC + /// network. The value should be the name of the address + /// (https://cloud.google.com/compute/docs/reference/rest/v1/addresses) + /// Example: \['vertex-ai-ip-range'\]. For more information about subnets and + /// network IP ranges, please see + /// https://cloud.google.com/vpc/docs/subnets#manually_created_subnet_ip_ranges. + /// + /// Optional. + core.List? reservedIpRanges; - GoogleCloudAiplatformV1BatchReadFeatureValuesRequestEntityTypeSpec({ - this.entityTypeId, - this.featureSelector, - this.settings, + GoogleCloudAiplatformV1DeployedIndex({ + this.automaticResources, + this.createTime, + this.dedicatedResources, + this.deployedIndexAuthConfig, + this.deploymentGroup, + this.displayName, + this.enableAccessLogging, + this.id, + this.index, + this.indexSyncTime, + this.privateEndpoints, + this.pscAutomationConfigs, + this.reservedIpRanges, }); - GoogleCloudAiplatformV1BatchReadFeatureValuesRequestEntityTypeSpec.fromJson( - core.Map json_) + GoogleCloudAiplatformV1DeployedIndex.fromJson(core.Map json_) : this( - entityTypeId: json_['entityTypeId'] as core.String?, - featureSelector: json_.containsKey('featureSelector') - ? GoogleCloudAiplatformV1FeatureSelector.fromJson( - json_['featureSelector'] + automaticResources: json_.containsKey('automaticResources') + ? GoogleCloudAiplatformV1AutomaticResources.fromJson( + json_['automaticResources'] as core.Map) : null, - settings: (json_['settings'] as core.List?) + createTime: json_['createTime'] as core.String?, + dedicatedResources: json_.containsKey('dedicatedResources') + ? GoogleCloudAiplatformV1DedicatedResources.fromJson( + json_['dedicatedResources'] + as core.Map) + : null, + deployedIndexAuthConfig: json_.containsKey('deployedIndexAuthConfig') + ? GoogleCloudAiplatformV1DeployedIndexAuthConfig.fromJson( + json_['deployedIndexAuthConfig'] + as core.Map) + : null, + deploymentGroup: json_['deploymentGroup'] as core.String?, + displayName: json_['displayName'] as core.String?, + enableAccessLogging: json_['enableAccessLogging'] as core.bool?, + id: json_['id'] as core.String?, + index: json_['index'] as core.String?, + indexSyncTime: json_['indexSyncTime'] as core.String?, + privateEndpoints: json_.containsKey('privateEndpoints') + ? GoogleCloudAiplatformV1IndexPrivateEndpoints.fromJson( + json_['privateEndpoints'] + as core.Map) + : null, + pscAutomationConfigs: (json_['pscAutomationConfigs'] as core.List?) ?.map((value) => - GoogleCloudAiplatformV1DestinationFeatureSetting.fromJson( + GoogleCloudAiplatformV1PSCAutomationConfig.fromJson( value as core.Map)) .toList(), + reservedIpRanges: (json_['reservedIpRanges'] as core.List?) + ?.map((value) => value as core.String) + .toList(), ); core.Map toJson() => { - if (entityTypeId != null) 'entityTypeId': entityTypeId!, - if (featureSelector != null) 'featureSelector': featureSelector!, - if (settings != null) 'settings': settings!, + if (automaticResources != null) + 'automaticResources': automaticResources!, + if (createTime != null) 'createTime': createTime!, + if (dedicatedResources != null) + 'dedicatedResources': dedicatedResources!, + if (deployedIndexAuthConfig != null) + 'deployedIndexAuthConfig': deployedIndexAuthConfig!, + if (deploymentGroup != null) 'deploymentGroup': deploymentGroup!, + if (displayName != null) 'displayName': displayName!, + if (enableAccessLogging != null) + 'enableAccessLogging': enableAccessLogging!, + if (id != null) 'id': id!, + if (index != null) 'index': index!, + if (indexSyncTime != null) 'indexSyncTime': indexSyncTime!, + if (privateEndpoints != null) 'privateEndpoints': privateEndpoints!, + if (pscAutomationConfigs != null) + 'pscAutomationConfigs': pscAutomationConfigs!, + if (reservedIpRanges != null) 'reservedIpRanges': reservedIpRanges!, }; } -/// Describe pass-through fields in read_instance source. -class GoogleCloudAiplatformV1BatchReadFeatureValuesRequestPassThroughField { - /// The name of the field in the CSV header or the name of the column in - /// BigQuery table. - /// - /// The naming restriction is the same as Feature.name. - /// - /// Required. - core.String? fieldName; +/// Used to set up the auth on the DeployedIndex's private endpoint. +class GoogleCloudAiplatformV1DeployedIndexAuthConfig { + /// Defines the authentication provider that the DeployedIndex uses. + GoogleCloudAiplatformV1DeployedIndexAuthConfigAuthProvider? authProvider; - GoogleCloudAiplatformV1BatchReadFeatureValuesRequestPassThroughField({ - this.fieldName, + GoogleCloudAiplatformV1DeployedIndexAuthConfig({ + this.authProvider, }); - GoogleCloudAiplatformV1BatchReadFeatureValuesRequestPassThroughField.fromJson( - core.Map json_) + GoogleCloudAiplatformV1DeployedIndexAuthConfig.fromJson(core.Map json_) : this( - fieldName: json_['fieldName'] as core.String?, + authProvider: json_.containsKey('authProvider') + ? GoogleCloudAiplatformV1DeployedIndexAuthConfigAuthProvider + .fromJson(json_['authProvider'] + as core.Map) + : null, ); core.Map toJson() => { - if (fieldName != null) 'fieldName': fieldName!, + if (authProvider != null) 'authProvider': authProvider!, }; } -/// Response message for TensorboardService.BatchReadTensorboardTimeSeriesData. -class GoogleCloudAiplatformV1BatchReadTensorboardTimeSeriesDataResponse { - /// The returned time series data. - core.List? timeSeriesData; +/// Configuration for an authentication provider, including support for \[JSON +/// Web Token +/// (JWT)\](https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32). +class GoogleCloudAiplatformV1DeployedIndexAuthConfigAuthProvider { + /// A list of allowed JWT issuers. + /// + /// Each entry must be a valid Google service account, in the following + /// format: `service-account-name@project-id.iam.gserviceaccount.com` + core.List? allowedIssuers; - GoogleCloudAiplatformV1BatchReadTensorboardTimeSeriesDataResponse({ - this.timeSeriesData, + /// The list of JWT + /// [audiences](https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32#section-4.1.3). + /// + /// that are allowed to access. A JWT containing any of these audiences will + /// be accepted. + core.List? audiences; + + GoogleCloudAiplatformV1DeployedIndexAuthConfigAuthProvider({ + this.allowedIssuers, + this.audiences, }); - GoogleCloudAiplatformV1BatchReadTensorboardTimeSeriesDataResponse.fromJson( + GoogleCloudAiplatformV1DeployedIndexAuthConfigAuthProvider.fromJson( core.Map json_) : this( - timeSeriesData: (json_['timeSeriesData'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1TimeSeriesData.fromJson( - value as core.Map)) + allowedIssuers: (json_['allowedIssuers'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + audiences: (json_['audiences'] as core.List?) + ?.map((value) => value as core.String) .toList(), ); core.Map toJson() => { - if (timeSeriesData != null) 'timeSeriesData': timeSeriesData!, + if (allowedIssuers != null) 'allowedIssuers': allowedIssuers!, + if (audiences != null) 'audiences': audiences!, }; } -/// The BigQuery location for the output content. -class GoogleCloudAiplatformV1BigQueryDestination { - /// BigQuery URI to a project or table, up to 2000 characters long. +/// Points to a DeployedIndex. +class GoogleCloudAiplatformV1DeployedIndexRef { + /// The ID of the DeployedIndex in the above IndexEndpoint. /// - /// When only the project is specified, the Dataset and Table is created. When - /// the full table reference is specified, the Dataset must exist and table - /// must not exist. Accepted forms: * BigQuery path. For example: - /// `bq://projectId` or `bq://projectId.bqDatasetId` or - /// `bq://projectId.bqDatasetId.bqTableId`. + /// Immutable. + core.String? deployedIndexId; + + /// The display name of the DeployedIndex. /// - /// Required. - core.String? outputUri; + /// Output only. + core.String? displayName; - GoogleCloudAiplatformV1BigQueryDestination({ - this.outputUri, + /// A resource name of the IndexEndpoint. + /// + /// Immutable. + core.String? indexEndpoint; + + GoogleCloudAiplatformV1DeployedIndexRef({ + this.deployedIndexId, + this.displayName, + this.indexEndpoint, }); - GoogleCloudAiplatformV1BigQueryDestination.fromJson(core.Map json_) + GoogleCloudAiplatformV1DeployedIndexRef.fromJson(core.Map json_) : this( - outputUri: json_['outputUri'] as core.String?, + deployedIndexId: json_['deployedIndexId'] as core.String?, + displayName: json_['displayName'] as core.String?, + indexEndpoint: json_['indexEndpoint'] as core.String?, ); core.Map toJson() => { - if (outputUri != null) 'outputUri': outputUri!, + if (deployedIndexId != null) 'deployedIndexId': deployedIndexId!, + if (displayName != null) 'displayName': displayName!, + if (indexEndpoint != null) 'indexEndpoint': indexEndpoint!, }; } -/// The BigQuery location for the input content. -class GoogleCloudAiplatformV1BigQuerySource { - /// BigQuery URI to a table, up to 2000 characters long. - /// - /// Accepted forms: * BigQuery path. For example: - /// `bq://projectId.bqDatasetId.bqTableId`. +/// A deployment of a Model. +/// +/// Endpoints contain one or more DeployedModels. +class GoogleCloudAiplatformV1DeployedModel { + /// A description of resources that to large degree are decided by Vertex AI, + /// and require only a modest additional configuration. + GoogleCloudAiplatformV1AutomaticResources? automaticResources; + + /// Timestamp when the DeployedModel was created. /// - /// Required. - core.String? inputUri; + /// Output only. + core.String? createTime; - GoogleCloudAiplatformV1BigQuerySource({ - this.inputUri, - }); + /// A description of resources that are dedicated to the DeployedModel, and + /// that need a higher degree of manual configuration. + GoogleCloudAiplatformV1DedicatedResources? dedicatedResources; - GoogleCloudAiplatformV1BigQuerySource.fromJson(core.Map json_) - : this( - inputUri: json_['inputUri'] as core.String?, - ); + /// For custom-trained Models and AutoML Tabular Models, the container of the + /// DeployedModel instances will send `stderr` and `stdout` streams to Cloud + /// Logging by default. + /// + /// Please note that the logs incur cost, which are subject to + /// [Cloud Logging pricing](https://cloud.google.com/logging/pricing). User + /// can disable container logging by setting this flag to true. + core.bool? disableContainerLogging; - core.Map toJson() => { - if (inputUri != null) 'inputUri': inputUri!, - }; -} + /// If true, deploy the model without explainable feature, regardless the + /// existence of Model.explanation_spec or explanation_spec. + core.bool? disableExplanations; -/// Input for bleu metric. -class GoogleCloudAiplatformV1BleuInput { - /// Repeated bleu instances. + /// The display name of the DeployedModel. /// - /// Required. - core.List? instances; + /// If not provided upon creation, the Model's display_name is used. + core.String? displayName; - /// Spec for bleu score metric. + /// If true, online prediction access logs are sent to Cloud Logging. /// - /// Required. - GoogleCloudAiplatformV1BleuSpec? metricSpec; + /// These logs are like standard server access logs, containing information + /// like timestamp and latency for each prediction request. Note that logs may + /// incur a cost, especially if your project receives prediction requests at a + /// high queries per second rate (QPS). Estimate your costs before enabling + /// this option. + core.bool? enableAccessLogging; - GoogleCloudAiplatformV1BleuInput({ - this.instances, - this.metricSpec, - }); + /// Explanation configuration for this DeployedModel. + /// + /// When deploying a Model using EndpointService.DeployModel, this value + /// overrides the value of Model.explanation_spec. All fields of + /// explanation_spec are optional in the request. If a field of + /// explanation_spec is not populated, the value of the same field of + /// Model.explanation_spec is inherited. If the corresponding + /// Model.explanation_spec is not populated, all fields of the + /// explanation_spec will be used for the explanation configuration. + GoogleCloudAiplatformV1ExplanationSpec? explanationSpec; - GoogleCloudAiplatformV1BleuInput.fromJson(core.Map json_) - : this( - instances: (json_['instances'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1BleuInstance.fromJson( - value as core.Map)) - .toList(), - metricSpec: json_.containsKey('metricSpec') - ? GoogleCloudAiplatformV1BleuSpec.fromJson( - json_['metricSpec'] as core.Map) - : null, - ); + /// Configuration for faster model deployment. + GoogleCloudAiplatformV1FasterDeploymentConfig? fasterDeploymentConfig; - core.Map toJson() => { - if (instances != null) 'instances': instances!, - if (metricSpec != null) 'metricSpec': metricSpec!, - }; -} + /// The ID of the DeployedModel. + /// + /// If not provided upon deployment, Vertex AI will generate a value for this + /// ID. This value should be 1-10 characters, and valid characters are + /// `/[0-9]/`. + /// + /// Immutable. + core.String? id; -/// Spec for bleu instance. -typedef GoogleCloudAiplatformV1BleuInstance = $Instance00; + /// The resource name of the Model that this is the deployment of. + /// + /// Note that the Model may be in a different location than the + /// DeployedModel's Endpoint. The resource name may contain version id or + /// version alias to specify the version. Example: + /// `projects/{project}/locations/{location}/models/{model}@2` or + /// `projects/{project}/locations/{location}/models/{model}@golden` if no + /// version is specified, the default version will be deployed. + /// + /// Required. + core.String? model; -/// Bleu metric value for an instance. -class GoogleCloudAiplatformV1BleuMetricValue { - /// Bleu score. + /// The version ID of the model that is deployed. /// /// Output only. - core.double? score; - - GoogleCloudAiplatformV1BleuMetricValue({ - this.score, - }); - - GoogleCloudAiplatformV1BleuMetricValue.fromJson(core.Map json_) - : this( - score: (json_['score'] as core.num?)?.toDouble(), - ); - - core.Map toJson() => { - if (score != null) 'score': score!, - }; -} + core.String? modelVersionId; -/// Results for bleu metric. -class GoogleCloudAiplatformV1BleuResults { - /// Bleu metric values. + /// Provide paths for users to send predict/explain/health requests directly + /// to the deployed model services running on Cloud via private services + /// access. + /// + /// This field is populated if network is configured. /// /// Output only. - core.List? bleuMetricValues; - - GoogleCloudAiplatformV1BleuResults({ - this.bleuMetricValues, - }); - - GoogleCloudAiplatformV1BleuResults.fromJson(core.Map json_) - : this( - bleuMetricValues: (json_['bleuMetricValues'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1BleuMetricValue.fromJson( - value as core.Map)) - .toList(), - ); - - core.Map toJson() => { - if (bleuMetricValues != null) 'bleuMetricValues': bleuMetricValues!, - }; -} + GoogleCloudAiplatformV1PrivateEndpoints? privateEndpoints; -/// Spec for bleu score metric - calculates the precision of n-grams in the -/// prediction as compared to reference - returns a score ranging between 0 to -/// 1. -class GoogleCloudAiplatformV1BleuSpec { - /// Whether to use_effective_order to compute bleu score. + /// The service account that the DeployedModel's container runs as. /// - /// Optional. - core.bool? useEffectiveOrder; - - GoogleCloudAiplatformV1BleuSpec({ - this.useEffectiveOrder, - }); - - GoogleCloudAiplatformV1BleuSpec.fromJson(core.Map json_) - : this( - useEffectiveOrder: json_['useEffectiveOrder'] as core.bool?, - ); - - core.Map toJson() => { - if (useEffectiveOrder != null) 'useEffectiveOrder': useEffectiveOrder!, - }; -} + /// Specify the email address of the service account. If this service account + /// is not specified, the container runs as a service account that doesn't + /// have access to the resource project. Users deploying the Model must have + /// the `iam.serviceAccounts.actAs` permission on this service account. + core.String? serviceAccount; -/// Content blob. -/// -/// It's preferred to send as text directly rather than raw bytes. -class GoogleCloudAiplatformV1Blob { - /// Raw bytes. + /// The resource name of the shared DeploymentResourcePool to deploy on. /// - /// Required. - core.String? data; - core.List get dataAsBytes => convert.base64.decode(data!); + /// Format: + /// `projects/{project}/locations/{location}/deploymentResourcePools/{deployment_resource_pool}` + core.String? sharedResources; - set dataAsBytes(core.List bytes_) { - data = - convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); - } + /// Runtime status of the deployed model. + /// + /// Output only. + GoogleCloudAiplatformV1DeployedModelStatus? status; - /// The IANA standard MIME type of the source data. + /// System labels to apply to Model Garden deployments. /// - /// Required. - core.String? mimeType; + /// System labels are managed by Google for internal use only. + core.Map? systemLabels; - GoogleCloudAiplatformV1Blob({ - this.data, - this.mimeType, + GoogleCloudAiplatformV1DeployedModel({ + this.automaticResources, + this.createTime, + this.dedicatedResources, + this.disableContainerLogging, + this.disableExplanations, + this.displayName, + this.enableAccessLogging, + this.explanationSpec, + this.fasterDeploymentConfig, + this.id, + this.model, + this.modelVersionId, + this.privateEndpoints, + this.serviceAccount, + this.sharedResources, + this.status, + this.systemLabels, }); - GoogleCloudAiplatformV1Blob.fromJson(core.Map json_) + GoogleCloudAiplatformV1DeployedModel.fromJson(core.Map json_) : this( - data: json_['data'] as core.String?, - mimeType: json_['mimeType'] as core.String?, + automaticResources: json_.containsKey('automaticResources') + ? GoogleCloudAiplatformV1AutomaticResources.fromJson( + json_['automaticResources'] + as core.Map) + : null, + createTime: json_['createTime'] as core.String?, + dedicatedResources: json_.containsKey('dedicatedResources') + ? GoogleCloudAiplatformV1DedicatedResources.fromJson( + json_['dedicatedResources'] + as core.Map) + : null, + disableContainerLogging: + json_['disableContainerLogging'] as core.bool?, + disableExplanations: json_['disableExplanations'] as core.bool?, + displayName: json_['displayName'] as core.String?, + enableAccessLogging: json_['enableAccessLogging'] as core.bool?, + explanationSpec: json_.containsKey('explanationSpec') + ? GoogleCloudAiplatformV1ExplanationSpec.fromJson( + json_['explanationSpec'] + as core.Map) + : null, + fasterDeploymentConfig: json_.containsKey('fasterDeploymentConfig') + ? GoogleCloudAiplatformV1FasterDeploymentConfig.fromJson( + json_['fasterDeploymentConfig'] + as core.Map) + : null, + id: json_['id'] as core.String?, + model: json_['model'] as core.String?, + modelVersionId: json_['modelVersionId'] as core.String?, + privateEndpoints: json_.containsKey('privateEndpoints') + ? GoogleCloudAiplatformV1PrivateEndpoints.fromJson( + json_['privateEndpoints'] + as core.Map) + : null, + serviceAccount: json_['serviceAccount'] as core.String?, + sharedResources: json_['sharedResources'] as core.String?, + status: json_.containsKey('status') + ? GoogleCloudAiplatformV1DeployedModelStatus.fromJson( + json_['status'] as core.Map) + : null, + systemLabels: + (json_['systemLabels'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), ); core.Map toJson() => { - if (data != null) 'data': data!, - if (mimeType != null) 'mimeType': mimeType!, + if (automaticResources != null) + 'automaticResources': automaticResources!, + if (createTime != null) 'createTime': createTime!, + if (dedicatedResources != null) + 'dedicatedResources': dedicatedResources!, + if (disableContainerLogging != null) + 'disableContainerLogging': disableContainerLogging!, + if (disableExplanations != null) + 'disableExplanations': disableExplanations!, + if (displayName != null) 'displayName': displayName!, + if (enableAccessLogging != null) + 'enableAccessLogging': enableAccessLogging!, + if (explanationSpec != null) 'explanationSpec': explanationSpec!, + if (fasterDeploymentConfig != null) + 'fasterDeploymentConfig': fasterDeploymentConfig!, + if (id != null) 'id': id!, + if (model != null) 'model': model!, + if (modelVersionId != null) 'modelVersionId': modelVersionId!, + if (privateEndpoints != null) 'privateEndpoints': privateEndpoints!, + if (serviceAccount != null) 'serviceAccount': serviceAccount!, + if (sharedResources != null) 'sharedResources': sharedResources!, + if (status != null) 'status': status!, + if (systemLabels != null) 'systemLabels': systemLabels!, }; } -/// Config for blur baseline. -/// -/// When enabled, a linear path from the maximally blurred image to the input -/// image is created. Using a blurred baseline instead of zero (black image) is -/// motivated by the BlurIG approach explained here: -/// https://arxiv.org/abs/2004.03383 -class GoogleCloudAiplatformV1BlurBaselineConfig { - /// The standard deviation of the blur kernel for the blurred baseline. +/// Points to a DeployedModel. +class GoogleCloudAiplatformV1DeployedModelRef { + /// An ID of a DeployedModel in the above Endpoint. /// - /// The same blurring parameter is used for both the height and the width - /// dimension. If not set, the method defaults to the zero (i.e. black for - /// images) baseline. - core.double? maxBlurSigma; + /// Immutable. + core.String? deployedModelId; - GoogleCloudAiplatformV1BlurBaselineConfig({ - this.maxBlurSigma, + /// A resource name of an Endpoint. + /// + /// Immutable. + core.String? endpoint; + + GoogleCloudAiplatformV1DeployedModelRef({ + this.deployedModelId, + this.endpoint, }); - GoogleCloudAiplatformV1BlurBaselineConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1DeployedModelRef.fromJson(core.Map json_) : this( - maxBlurSigma: (json_['maxBlurSigma'] as core.num?)?.toDouble(), + deployedModelId: json_['deployedModelId'] as core.String?, + endpoint: json_['endpoint'] as core.String?, ); core.Map toJson() => { - if (maxBlurSigma != null) 'maxBlurSigma': maxBlurSigma!, + if (deployedModelId != null) 'deployedModelId': deployedModelId!, + if (endpoint != null) 'endpoint': endpoint!, }; } -/// A list of boolean values. -class GoogleCloudAiplatformV1BoolArray { - /// A list of bool values. - core.List? values; +/// Runtime status of the deployed model. +class GoogleCloudAiplatformV1DeployedModelStatus { + /// The number of available replicas of the deployed model. + /// + /// Output only. + core.int? availableReplicaCount; - GoogleCloudAiplatformV1BoolArray({ - this.values, + /// The time at which the status was last updated. + /// + /// Output only. + core.String? lastUpdateTime; + + /// The latest deployed model's status message (if any). + /// + /// Output only. + core.String? message; + + GoogleCloudAiplatformV1DeployedModelStatus({ + this.availableReplicaCount, + this.lastUpdateTime, + this.message, }); - GoogleCloudAiplatformV1BoolArray.fromJson(core.Map json_) + GoogleCloudAiplatformV1DeployedModelStatus.fromJson(core.Map json_) : this( - values: (json_['values'] as core.List?) - ?.map((value) => value as core.bool) - .toList(), + availableReplicaCount: json_['availableReplicaCount'] as core.int?, + lastUpdateTime: json_['lastUpdateTime'] as core.String?, + message: json_['message'] as core.String?, ); core.Map toJson() => { - if (values != null) 'values': values!, + if (availableReplicaCount != null) + 'availableReplicaCount': availableReplicaCount!, + if (lastUpdateTime != null) 'lastUpdateTime': lastUpdateTime!, + if (message != null) 'message': message!, }; } -/// Request message for JobService.CancelBatchPredictionJob. -typedef GoogleCloudAiplatformV1CancelBatchPredictionJobRequest = $Empty; - -/// Request message for JobService.CancelCustomJob. -typedef GoogleCloudAiplatformV1CancelCustomJobRequest = $Empty; - -/// Request message for JobService.CancelDataLabelingJob. -typedef GoogleCloudAiplatformV1CancelDataLabelingJobRequest = $Empty; - -/// Request message for JobService.CancelHyperparameterTuningJob. -typedef GoogleCloudAiplatformV1CancelHyperparameterTuningJobRequest = $Empty; - -/// Request message for JobService.CancelNasJob. -typedef GoogleCloudAiplatformV1CancelNasJobRequest = $Empty; - -/// Request message for PipelineService.CancelPipelineJob. -typedef GoogleCloudAiplatformV1CancelPipelineJobRequest = $Empty; - -/// Request message for PipelineService.CancelTrainingPipeline. -typedef GoogleCloudAiplatformV1CancelTrainingPipelineRequest = $Empty; - -/// Request message for GenAiTuningService.CancelTuningJob. -typedef GoogleCloudAiplatformV1CancelTuningJobRequest = $Empty; - -/// A response candidate generated from the model. -class GoogleCloudAiplatformV1Candidate { - /// Average log probability score of the candidate. +/// A description of resources that can be shared by multiple DeployedModels, +/// whose underlying specification consists of a DedicatedResources. +class GoogleCloudAiplatformV1DeploymentResourcePool { + /// Timestamp when this DeploymentResourcePool was created. /// /// Output only. - core.double? avgLogprobs; + core.String? createTime; - /// Source attribution of the generated content. + /// The underlying DedicatedResources that the DeploymentResourcePool uses. /// - /// Output only. - GoogleCloudAiplatformV1CitationMetadata? citationMetadata; + /// Required. + GoogleCloudAiplatformV1DedicatedResources? dedicatedResources; - /// Content parts of the candidate. + /// If the DeploymentResourcePool is deployed with custom-trained Models or + /// AutoML Tabular Models, the container(s) of the DeploymentResourcePool will + /// send `stderr` and `stdout` streams to Cloud Logging by default. /// - /// Output only. - GoogleCloudAiplatformV1Content? content; + /// Please note that the logs incur cost, which are subject to + /// [Cloud Logging pricing](https://cloud.google.com/logging/pricing). User + /// can disable container logging by setting this flag to true. + core.bool? disableContainerLogging; - /// Describes the reason the mode stopped generating tokens in more detail. - /// - /// This is only filled when `finish_reason` is set. + /// Customer-managed encryption key spec for a DeploymentResourcePool. /// - /// Output only. - core.String? finishMessage; + /// If set, this DeploymentResourcePool will be secured by this key. Endpoints + /// and the DeploymentResourcePool they deploy in need to have the same + /// EncryptionSpec. + GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; - /// The reason why the model stopped generating tokens. - /// - /// If empty, the model has not stopped generating the tokens. + /// The resource name of the DeploymentResourcePool. /// - /// Output only. - /// Possible string values are: - /// - "FINISH_REASON_UNSPECIFIED" : The finish reason is unspecified. - /// - "STOP" : Token generation reached a natural stopping point or a - /// configured stop sequence. - /// - "MAX_TOKENS" : Token generation reached the configured maximum output - /// tokens. - /// - "SAFETY" : Token generation stopped because the content potentially - /// contains safety violations. NOTE: When streaming, content is empty if - /// content filters blocks the output. - /// - "RECITATION" : Token generation stopped because the content potentially - /// contains copyright violations. - /// - "OTHER" : All other reasons that stopped the token generation. - /// - "BLOCKLIST" : Token generation stopped because the content contains - /// forbidden terms. - /// - "PROHIBITED_CONTENT" : Token generation stopped for potentially - /// containing prohibited content. - /// - "SPII" : Token generation stopped because the content potentially - /// contains Sensitive Personally Identifiable Information (SPII). - /// - "MALFORMED_FUNCTION_CALL" : The function call generated by the model is - /// invalid. - core.String? finishReason; - - /// Metadata specifies sources used to ground generated content. + /// Format: + /// `projects/{project}/locations/{location}/deploymentResourcePools/{deployment_resource_pool}` /// - /// Output only. - GoogleCloudAiplatformV1GroundingMetadata? groundingMetadata; + /// Immutable. + core.String? name; - /// Index of the candidate. + /// Reserved for future use. /// /// Output only. - core.int? index; + core.bool? satisfiesPzi; - /// Log-likelihood scores for the response tokens and top tokens + /// Reserved for future use. /// /// Output only. - GoogleCloudAiplatformV1LogprobsResult? logprobsResult; + core.bool? satisfiesPzs; - /// List of ratings for the safety of a response candidate. - /// - /// There is at most one rating per category. + /// The service account that the DeploymentResourcePool's container(s) run as. /// - /// Output only. - core.List? safetyRatings; + /// Specify the email address of the service account. If this service account + /// is not specified, the container(s) run as a service account that doesn't + /// have access to the resource project. Users deploying the Models to this + /// DeploymentResourcePool must have the `iam.serviceAccounts.actAs` + /// permission on this service account. + core.String? serviceAccount; - GoogleCloudAiplatformV1Candidate({ - this.avgLogprobs, - this.citationMetadata, - this.content, - this.finishMessage, - this.finishReason, - this.groundingMetadata, - this.index, - this.logprobsResult, - this.safetyRatings, + GoogleCloudAiplatformV1DeploymentResourcePool({ + this.createTime, + this.dedicatedResources, + this.disableContainerLogging, + this.encryptionSpec, + this.name, + this.satisfiesPzi, + this.satisfiesPzs, + this.serviceAccount, }); - GoogleCloudAiplatformV1Candidate.fromJson(core.Map json_) + GoogleCloudAiplatformV1DeploymentResourcePool.fromJson(core.Map json_) : this( - avgLogprobs: (json_['avgLogprobs'] as core.num?)?.toDouble(), - citationMetadata: json_.containsKey('citationMetadata') - ? GoogleCloudAiplatformV1CitationMetadata.fromJson( - json_['citationMetadata'] - as core.Map) - : null, - content: json_.containsKey('content') - ? GoogleCloudAiplatformV1Content.fromJson( - json_['content'] as core.Map) - : null, - finishMessage: json_['finishMessage'] as core.String?, - finishReason: json_['finishReason'] as core.String?, - groundingMetadata: json_.containsKey('groundingMetadata') - ? GoogleCloudAiplatformV1GroundingMetadata.fromJson( - json_['groundingMetadata'] + createTime: json_['createTime'] as core.String?, + dedicatedResources: json_.containsKey('dedicatedResources') + ? GoogleCloudAiplatformV1DedicatedResources.fromJson( + json_['dedicatedResources'] as core.Map) : null, - index: json_['index'] as core.int?, - logprobsResult: json_.containsKey('logprobsResult') - ? GoogleCloudAiplatformV1LogprobsResult.fromJson( - json_['logprobsResult'] + disableContainerLogging: + json_['disableContainerLogging'] as core.bool?, + encryptionSpec: json_.containsKey('encryptionSpec') + ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( + json_['encryptionSpec'] as core.Map) : null, - safetyRatings: (json_['safetyRatings'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1SafetyRating.fromJson( - value as core.Map)) - .toList(), + name: json_['name'] as core.String?, + satisfiesPzi: json_['satisfiesPzi'] as core.bool?, + satisfiesPzs: json_['satisfiesPzs'] as core.bool?, + serviceAccount: json_['serviceAccount'] as core.String?, ); core.Map toJson() => { - if (avgLogprobs != null) 'avgLogprobs': avgLogprobs!, - if (citationMetadata != null) 'citationMetadata': citationMetadata!, - if (content != null) 'content': content!, - if (finishMessage != null) 'finishMessage': finishMessage!, - if (finishReason != null) 'finishReason': finishReason!, - if (groundingMetadata != null) 'groundingMetadata': groundingMetadata!, - if (index != null) 'index': index!, - if (logprobsResult != null) 'logprobsResult': logprobsResult!, - if (safetyRatings != null) 'safetyRatings': safetyRatings!, + if (createTime != null) 'createTime': createTime!, + if (dedicatedResources != null) + 'dedicatedResources': dedicatedResources!, + if (disableContainerLogging != null) + 'disableContainerLogging': disableContainerLogging!, + if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, + if (name != null) 'name': name!, + if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, + if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, + if (serviceAccount != null) 'serviceAccount': serviceAccount!, }; } -/// Request message for VizierService.CheckTrialEarlyStoppingState. -typedef GoogleCloudAiplatformV1CheckTrialEarlyStoppingStateRequest = $Empty; - -/// Source attributions for content. -class GoogleCloudAiplatformV1Citation { - /// End index into the content. - /// - /// Output only. - core.int? endIndex; - - /// License of the attribution. - /// - /// Output only. - core.String? license; - - /// Publication date of the attribution. - /// - /// Output only. - GoogleTypeDate? publicationDate; - - /// Start index into the content. - /// - /// Output only. - core.int? startIndex; - - /// Title of the attribution. +class GoogleCloudAiplatformV1DestinationFeatureSetting { + /// Specify the field name in the export destination. /// - /// Output only. - core.String? title; + /// If not specified, Feature ID is used. + core.String? destinationField; - /// Url reference of the attribution. + /// The ID of the Feature to apply the setting to. /// - /// Output only. - core.String? uri; + /// Required. + core.String? featureId; - GoogleCloudAiplatformV1Citation({ - this.endIndex, - this.license, - this.publicationDate, - this.startIndex, - this.title, - this.uri, + GoogleCloudAiplatformV1DestinationFeatureSetting({ + this.destinationField, + this.featureId, }); - GoogleCloudAiplatformV1Citation.fromJson(core.Map json_) + GoogleCloudAiplatformV1DestinationFeatureSetting.fromJson(core.Map json_) : this( - endIndex: json_['endIndex'] as core.int?, - license: json_['license'] as core.String?, - publicationDate: json_.containsKey('publicationDate') - ? GoogleTypeDate.fromJson(json_['publicationDate'] - as core.Map) - : null, - startIndex: json_['startIndex'] as core.int?, - title: json_['title'] as core.String?, - uri: json_['uri'] as core.String?, + destinationField: json_['destinationField'] as core.String?, + featureId: json_['featureId'] as core.String?, ); core.Map toJson() => { - if (endIndex != null) 'endIndex': endIndex!, - if (license != null) 'license': license!, - if (publicationDate != null) 'publicationDate': publicationDate!, - if (startIndex != null) 'startIndex': startIndex!, - if (title != null) 'title': title!, - if (uri != null) 'uri': uri!, + if (destinationField != null) 'destinationField': destinationField!, + if (featureId != null) 'featureId': featureId!, }; } -/// A collection of source attributions for a piece of content. -class GoogleCloudAiplatformV1CitationMetadata { - /// List of citations. - /// - /// Output only. - core.List? citations; +/// Request message for PredictionService.DirectPredict. +class GoogleCloudAiplatformV1DirectPredictRequest { + /// The prediction input. + core.List? inputs; - GoogleCloudAiplatformV1CitationMetadata({ - this.citations, + /// The parameters that govern the prediction. + GoogleCloudAiplatformV1Tensor? parameters; + + GoogleCloudAiplatformV1DirectPredictRequest({ + this.inputs, + this.parameters, }); - GoogleCloudAiplatformV1CitationMetadata.fromJson(core.Map json_) + GoogleCloudAiplatformV1DirectPredictRequest.fromJson(core.Map json_) : this( - citations: (json_['citations'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Citation.fromJson( + inputs: (json_['inputs'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Tensor.fromJson( value as core.Map)) .toList(), - ); - - core.Map toJson() => { - if (citations != null) 'citations': citations!, - }; -} - -/// Input for coherence metric. -class GoogleCloudAiplatformV1CoherenceInput { - /// Coherence instance. - /// - /// Required. - GoogleCloudAiplatformV1CoherenceInstance? instance; - - /// Spec for coherence score metric. - /// - /// Required. - GoogleCloudAiplatformV1CoherenceSpec? metricSpec; - - GoogleCloudAiplatformV1CoherenceInput({ - this.instance, - this.metricSpec, - }); - - GoogleCloudAiplatformV1CoherenceInput.fromJson(core.Map json_) - : this( - instance: json_.containsKey('instance') - ? GoogleCloudAiplatformV1CoherenceInstance.fromJson( - json_['instance'] as core.Map) - : null, - metricSpec: json_.containsKey('metricSpec') - ? GoogleCloudAiplatformV1CoherenceSpec.fromJson( - json_['metricSpec'] as core.Map) + parameters: json_.containsKey('parameters') + ? GoogleCloudAiplatformV1Tensor.fromJson( + json_['parameters'] as core.Map) : null, ); core.Map toJson() => { - if (instance != null) 'instance': instance!, - if (metricSpec != null) 'metricSpec': metricSpec!, + if (inputs != null) 'inputs': inputs!, + if (parameters != null) 'parameters': parameters!, }; } -/// Spec for coherence instance. -typedef GoogleCloudAiplatformV1CoherenceInstance = $Instance01; - -/// Spec for coherence result. -class GoogleCloudAiplatformV1CoherenceResult { - /// Confidence for coherence score. - /// - /// Output only. - core.double? confidence; - - /// Explanation for coherence score. - /// - /// Output only. - core.String? explanation; +/// Response message for PredictionService.DirectPredict. +class GoogleCloudAiplatformV1DirectPredictResponse { + /// The prediction output. + core.List? outputs; - /// Coherence score. - /// - /// Output only. - core.double? score; + /// The parameters that govern the prediction. + GoogleCloudAiplatformV1Tensor? parameters; - GoogleCloudAiplatformV1CoherenceResult({ - this.confidence, - this.explanation, - this.score, + GoogleCloudAiplatformV1DirectPredictResponse({ + this.outputs, + this.parameters, }); - GoogleCloudAiplatformV1CoherenceResult.fromJson(core.Map json_) + GoogleCloudAiplatformV1DirectPredictResponse.fromJson(core.Map json_) : this( - confidence: (json_['confidence'] as core.num?)?.toDouble(), - explanation: json_['explanation'] as core.String?, - score: (json_['score'] as core.num?)?.toDouble(), + outputs: (json_['outputs'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Tensor.fromJson( + value as core.Map)) + .toList(), + parameters: json_.containsKey('parameters') + ? GoogleCloudAiplatformV1Tensor.fromJson( + json_['parameters'] as core.Map) + : null, ); core.Map toJson() => { - if (confidence != null) 'confidence': confidence!, - if (explanation != null) 'explanation': explanation!, - if (score != null) 'score': score!, + if (outputs != null) 'outputs': outputs!, + if (parameters != null) 'parameters': parameters!, }; } -/// Spec for coherence score metric. -typedef GoogleCloudAiplatformV1CoherenceSpec = $Spec; - -/// Request message for VizierService.CompleteTrial. -class GoogleCloudAiplatformV1CompleteTrialRequest { - /// If provided, it will be used as the completed Trial's final_measurement; - /// Otherwise, the service will auto-select a previously reported measurement - /// as the final-measurement - /// - /// Optional. - GoogleCloudAiplatformV1Measurement? finalMeasurement; +/// Request message for PredictionService.DirectRawPredict. +class GoogleCloudAiplatformV1DirectRawPredictRequest { + /// The prediction input. + core.String? input; + core.List get inputAsBytes => convert.base64.decode(input!); - /// A human readable reason why the trial was infeasible. - /// - /// This should only be provided if `trial_infeasible` is true. - /// - /// Optional. - core.String? infeasibleReason; + set inputAsBytes(core.List bytes_) { + input = + convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); + } - /// True if the Trial cannot be run with the given Parameter, and - /// final_measurement will be ignored. + /// Fully qualified name of the API method being invoked to perform + /// predictions. /// - /// Optional. - core.bool? trialInfeasible; + /// Format: `/namespace.Service/Method/` Example: + /// `/tensorflow.serving.PredictionService/Predict` + core.String? methodName; - GoogleCloudAiplatformV1CompleteTrialRequest({ - this.finalMeasurement, - this.infeasibleReason, - this.trialInfeasible, + GoogleCloudAiplatformV1DirectRawPredictRequest({ + this.input, + this.methodName, }); - GoogleCloudAiplatformV1CompleteTrialRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1DirectRawPredictRequest.fromJson(core.Map json_) : this( - finalMeasurement: json_.containsKey('finalMeasurement') - ? GoogleCloudAiplatformV1Measurement.fromJson( - json_['finalMeasurement'] - as core.Map) - : null, - infeasibleReason: json_['infeasibleReason'] as core.String?, - trialInfeasible: json_['trialInfeasible'] as core.bool?, + input: json_['input'] as core.String?, + methodName: json_['methodName'] as core.String?, ); core.Map toJson() => { - if (finalMeasurement != null) 'finalMeasurement': finalMeasurement!, - if (infeasibleReason != null) 'infeasibleReason': infeasibleReason!, - if (trialInfeasible != null) 'trialInfeasible': trialInfeasible!, + if (input != null) 'input': input!, + if (methodName != null) 'methodName': methodName!, }; } -/// Success and error statistics of processing multiple entities (for example, -/// DataItems or structured data rows) in batch. -class GoogleCloudAiplatformV1CompletionStats { - /// The number of entities for which any error was encountered. - /// - /// Output only. - core.String? failedCount; - - /// In cases when enough errors are encountered a job, pipeline, or operation - /// may be failed as a whole. - /// - /// Below is the number of entities for which the processing had not been - /// finished (either in successful or failed state). Set to -1 if the number - /// is unknown (for example, the operation failed before the total entity - /// number could be collected). - /// - /// Output only. - core.String? incompleteCount; - - /// The number of entities that had been processed successfully. - /// - /// Output only. - core.String? successfulCount; - - /// The number of the successful forecast points that are generated by the - /// forecasting model. - /// - /// This is ONLY used by the forecasting batch prediction. - /// - /// Output only. - core.String? successfulForecastPointCount; +/// Response message for PredictionService.DirectRawPredict. +class GoogleCloudAiplatformV1DirectRawPredictResponse { + /// The prediction output. + core.String? output; + core.List get outputAsBytes => convert.base64.decode(output!); - GoogleCloudAiplatformV1CompletionStats({ - this.failedCount, - this.incompleteCount, - this.successfulCount, - this.successfulForecastPointCount, + set outputAsBytes(core.List bytes_) { + output = + convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); + } + + GoogleCloudAiplatformV1DirectRawPredictResponse({ + this.output, }); - GoogleCloudAiplatformV1CompletionStats.fromJson(core.Map json_) + GoogleCloudAiplatformV1DirectRawPredictResponse.fromJson(core.Map json_) : this( - failedCount: json_['failedCount'] as core.String?, - incompleteCount: json_['incompleteCount'] as core.String?, - successfulCount: json_['successfulCount'] as core.String?, - successfulForecastPointCount: - json_['successfulForecastPointCount'] as core.String?, + output: json_['output'] as core.String?, ); core.Map toJson() => { - if (failedCount != null) 'failedCount': failedCount!, - if (incompleteCount != null) 'incompleteCount': incompleteCount!, - if (successfulCount != null) 'successfulCount': successfulCount!, - if (successfulForecastPointCount != null) - 'successfulForecastPointCount': successfulForecastPointCount!, + if (output != null) 'output': output!, }; } -/// Request message for ComputeTokens RPC call. -class GoogleCloudAiplatformV1ComputeTokensRequest { - /// Input content. - /// - /// Optional. - core.List? contents; +/// The input content is encapsulated and uploaded in the request. +typedef GoogleCloudAiplatformV1DirectUploadSource = $Empty; - /// The instances that are the input to token computing API call. - /// - /// Schema is identical to the prediction schema of the text model, even for - /// the non-text models, like chat models, or Codey models. - /// - /// Optional. - /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.List? instances; +/// Represents the spec of disk options. +typedef GoogleCloudAiplatformV1DiskSpec = $Shared04; - /// The name of the publisher model requested to serve the prediction. - /// - /// Format: projects/{project}/locations/{location}/publishers / * /models / * - /// - /// Optional. - core.String? model; +/// A list of double values. +class GoogleCloudAiplatformV1DoubleArray { + /// A list of double values. + core.List? values; - GoogleCloudAiplatformV1ComputeTokensRequest({ - this.contents, - this.instances, - this.model, + GoogleCloudAiplatformV1DoubleArray({ + this.values, }); - GoogleCloudAiplatformV1ComputeTokensRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1DoubleArray.fromJson(core.Map json_) : this( - contents: (json_['contents'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Content.fromJson( - value as core.Map)) + values: (json_['values'] as core.List?) + ?.map((value) => (value as core.num).toDouble()) .toList(), - instances: json_.containsKey('instances') - ? json_['instances'] as core.List - : null, - model: json_['model'] as core.String?, ); core.Map toJson() => { - if (contents != null) 'contents': contents!, - if (instances != null) 'instances': instances!, - if (model != null) 'model': model!, + if (values != null) 'values': values!, }; } -/// Response message for ComputeTokens RPC call. -class GoogleCloudAiplatformV1ComputeTokensResponse { - /// Lists of tokens info from the input. +/// Describes the options to customize dynamic retrieval. +class GoogleCloudAiplatformV1DynamicRetrievalConfig { + /// The threshold to be used in dynamic retrieval. /// - /// A ComputeTokensRequest could have multiple instances with a prompt in each - /// instance. We also need to return lists of tokens info for the request with - /// multiple instances. - core.List? tokensInfo; + /// If not set, a system default value is used. + /// + /// Optional. + core.double? dynamicThreshold; - GoogleCloudAiplatformV1ComputeTokensResponse({ - this.tokensInfo, + /// The mode of the predictor to be used in dynamic retrieval. + /// Possible string values are: + /// - "MODE_UNSPECIFIED" : Always trigger retrieval. + /// - "MODE_DYNAMIC" : Run retrieval only when system decides it is necessary. + core.String? mode; + + GoogleCloudAiplatformV1DynamicRetrievalConfig({ + this.dynamicThreshold, + this.mode, }); - GoogleCloudAiplatformV1ComputeTokensResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1DynamicRetrievalConfig.fromJson(core.Map json_) : this( - tokensInfo: (json_['tokensInfo'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1TokensInfo.fromJson( - value as core.Map)) - .toList(), + dynamicThreshold: + (json_['dynamicThreshold'] as core.num?)?.toDouble(), + mode: json_['mode'] as core.String?, ); core.Map toJson() => { - if (tokensInfo != null) 'tokensInfo': tokensInfo!, + if (dynamicThreshold != null) 'dynamicThreshold': dynamicThreshold!, + if (mode != null) 'mode': mode!, }; } -/// The Container Registry location for the container image. -class GoogleCloudAiplatformV1ContainerRegistryDestination { - /// Container Registry URI of a container image. +/// Represents a customer-managed encryption key spec that can be applied to a +/// top-level resource. +class GoogleCloudAiplatformV1EncryptionSpec { + /// The Cloud KMS resource identifier of the customer managed encryption key + /// used to protect a resource. /// - /// Only Google Container Registry and Artifact Registry are supported now. - /// Accepted forms: * Google Container Registry path. For example: - /// `gcr.io/projectId/imageName:tag`. * Artifact Registry path. For example: - /// `us-central1-docker.pkg.dev/projectId/repoName/imageName:tag`. If a tag is - /// not specified, "latest" will be used as the default tag. + /// Has the form: + /// `projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key`. + /// The key needs to be in the same region as where the compute resource is + /// created. /// /// Required. - core.String? outputUri; + core.String? kmsKeyName; - GoogleCloudAiplatformV1ContainerRegistryDestination({ - this.outputUri, + GoogleCloudAiplatformV1EncryptionSpec({ + this.kmsKeyName, }); - GoogleCloudAiplatformV1ContainerRegistryDestination.fromJson(core.Map json_) + GoogleCloudAiplatformV1EncryptionSpec.fromJson(core.Map json_) : this( - outputUri: json_['outputUri'] as core.String?, + kmsKeyName: json_['kmsKeyName'] as core.String?, ); core.Map toJson() => { - if (outputUri != null) 'outputUri': outputUri!, + if (kmsKeyName != null) 'kmsKeyName': kmsKeyName!, }; } -/// The spec of a Container. -class GoogleCloudAiplatformV1ContainerSpec { - /// The arguments to be passed when starting the container. - core.List? args; +/// Models are deployed into it, and afterwards Endpoint is called to obtain +/// predictions and explanations. +class GoogleCloudAiplatformV1Endpoint { + /// Configurations that are applied to the endpoint for online prediction. + GoogleCloudAiplatformV1ClientConnectionConfig? clientConnectionConfig; - /// The command to be invoked when the container is started. + /// Timestamp when this Endpoint was created. /// - /// It overrides the entrypoint instruction in Dockerfile when provided. - core.List? command; + /// Output only. + core.String? createTime; - /// Environment variables to be passed to the container. + /// DNS of the dedicated endpoint. /// - /// Maximum limit is 100. - core.List? env; + /// Will only be populated if dedicated_endpoint_enabled is true. Format: + /// `https://{endpoint_id}.{region}-{project_number}.prediction.vertexai.goog`. + /// + /// Output only. + core.String? dedicatedEndpointDns; - /// The URI of a container image in the Container Registry that is to be run - /// on each worker replica. + /// If true, the endpoint will be exposed through a dedicated DNS + /// \[Endpoint.dedicated_endpoint_dns\]. + /// + /// Your request to the dedicated DNS will be isolated from other users' + /// traffic and will have better performance and reliability. Note: Once you + /// enabled dedicated endpoint, you won't be able to send request to the + /// shared DNS {region}-aiplatform.googleapis.com. The limitation will be + /// removed soon. + core.bool? dedicatedEndpointEnabled; + + /// The models deployed in this Endpoint. + /// + /// To add or remove DeployedModels use EndpointService.DeployModel and + /// EndpointService.UndeployModel respectively. + /// + /// Output only. + core.List? deployedModels; + + /// The description of the Endpoint. + core.String? description; + + /// The display name of the Endpoint. + /// + /// The name can be up to 128 characters long and can consist of any UTF-8 + /// characters. /// /// Required. - core.String? imageUri; + core.String? displayName; - GoogleCloudAiplatformV1ContainerSpec({ - this.args, - this.command, - this.env, - this.imageUri, + /// Deprecated: If true, expose the Endpoint via private service connect. + /// + /// Only one of the fields, network or enable_private_service_connect, can be + /// set. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) + core.bool? enablePrivateServiceConnect; + + /// Customer-managed encryption key spec for an Endpoint. + /// + /// If set, this Endpoint and all sub-resources of this Endpoint will be + /// secured by this key. + GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; + + /// Used to perform consistent read-modify-write updates. + /// + /// If not set, a blind "overwrite" update happens. + core.String? etag; + + /// The labels with user-defined metadata to organize your Endpoints. + /// + /// Label keys and values can be no longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. See + /// https://goo.gl/xmQnxf for more information and examples of labels. + core.Map? labels; + + /// Resource name of the Model Monitoring job associated with this Endpoint if + /// monitoring is enabled by JobService.CreateModelDeploymentMonitoringJob. + /// + /// Format: + /// `projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}` + /// + /// Output only. + core.String? modelDeploymentMonitoringJob; + + /// The resource name of the Endpoint. + /// + /// Output only. + core.String? name; + + /// The full name of the Google Compute Engine + /// [network](https://cloud.google.com//compute/docs/networks-and-firewalls#networks) + /// to which the Endpoint should be peered. + /// + /// Private services access must already be configured for the network. If + /// left unspecified, the Endpoint is not peered with any network. Only one of + /// the fields, network or enable_private_service_connect, can be set. + /// [Format](https://cloud.google.com/compute/docs/reference/rest/v1/networks/insert): + /// `projects/{project}/global/networks/{network}`. Where `{project}` is a + /// project number, as in `12345`, and `{network}` is network name. + /// + /// Optional. + core.String? network; + + /// Configures the request-response logging for online prediction. + GoogleCloudAiplatformV1PredictRequestResponseLoggingConfig? + predictRequestResponseLoggingConfig; + + /// Configuration for private service connect. + /// + /// network and private_service_connect_config are mutually exclusive. + /// + /// Optional. + GoogleCloudAiplatformV1PrivateServiceConnectConfig? + privateServiceConnectConfig; + + /// Reserved for future use. + /// + /// Output only. + core.bool? satisfiesPzi; + + /// Reserved for future use. + /// + /// Output only. + core.bool? satisfiesPzs; + + /// A map from a DeployedModel's ID to the percentage of this Endpoint's + /// traffic that should be forwarded to that DeployedModel. + /// + /// If a DeployedModel's ID is not listed in this map, then it receives no + /// traffic. The traffic percentage values must add up to 100, or map must be + /// empty if the Endpoint is to not accept any traffic at a moment. + core.Map? trafficSplit; + + /// Timestamp when this Endpoint was last updated. + /// + /// Output only. + core.String? updateTime; + + GoogleCloudAiplatformV1Endpoint({ + this.clientConnectionConfig, + this.createTime, + this.dedicatedEndpointDns, + this.dedicatedEndpointEnabled, + this.deployedModels, + this.description, + this.displayName, + this.enablePrivateServiceConnect, + this.encryptionSpec, + this.etag, + this.labels, + this.modelDeploymentMonitoringJob, + this.name, + this.network, + this.predictRequestResponseLoggingConfig, + this.privateServiceConnectConfig, + this.satisfiesPzi, + this.satisfiesPzs, + this.trafficSplit, + this.updateTime, }); - GoogleCloudAiplatformV1ContainerSpec.fromJson(core.Map json_) + GoogleCloudAiplatformV1Endpoint.fromJson(core.Map json_) : this( - args: (json_['args'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - command: (json_['command'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - env: (json_['env'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1EnvVar.fromJson( + clientConnectionConfig: json_.containsKey('clientConnectionConfig') + ? GoogleCloudAiplatformV1ClientConnectionConfig.fromJson( + json_['clientConnectionConfig'] + as core.Map) + : null, + createTime: json_['createTime'] as core.String?, + dedicatedEndpointDns: json_['dedicatedEndpointDns'] as core.String?, + dedicatedEndpointEnabled: + json_['dedicatedEndpointEnabled'] as core.bool?, + deployedModels: (json_['deployedModels'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1DeployedModel.fromJson( value as core.Map)) .toList(), - imageUri: json_['imageUri'] as core.String?, + description: json_['description'] as core.String?, + displayName: json_['displayName'] as core.String?, + enablePrivateServiceConnect: + json_['enablePrivateServiceConnect'] as core.bool?, + encryptionSpec: json_.containsKey('encryptionSpec') + ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( + json_['encryptionSpec'] + as core.Map) + : null, + etag: json_['etag'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + modelDeploymentMonitoringJob: + json_['modelDeploymentMonitoringJob'] as core.String?, + name: json_['name'] as core.String?, + network: json_['network'] as core.String?, + predictRequestResponseLoggingConfig: + json_.containsKey('predictRequestResponseLoggingConfig') + ? GoogleCloudAiplatformV1PredictRequestResponseLoggingConfig + .fromJson(json_['predictRequestResponseLoggingConfig'] + as core.Map) + : null, + privateServiceConnectConfig: + json_.containsKey('privateServiceConnectConfig') + ? GoogleCloudAiplatformV1PrivateServiceConnectConfig.fromJson( + json_['privateServiceConnectConfig'] + as core.Map) + : null, + satisfiesPzi: json_['satisfiesPzi'] as core.bool?, + satisfiesPzs: json_['satisfiesPzs'] as core.bool?, + trafficSplit: + (json_['trafficSplit'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + value as core.int, + ), + ), + updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (args != null) 'args': args!, - if (command != null) 'command': command!, - if (env != null) 'env': env!, - if (imageUri != null) 'imageUri': imageUri!, + if (clientConnectionConfig != null) + 'clientConnectionConfig': clientConnectionConfig!, + if (createTime != null) 'createTime': createTime!, + if (dedicatedEndpointDns != null) + 'dedicatedEndpointDns': dedicatedEndpointDns!, + if (dedicatedEndpointEnabled != null) + 'dedicatedEndpointEnabled': dedicatedEndpointEnabled!, + if (deployedModels != null) 'deployedModels': deployedModels!, + if (description != null) 'description': description!, + if (displayName != null) 'displayName': displayName!, + if (enablePrivateServiceConnect != null) + 'enablePrivateServiceConnect': enablePrivateServiceConnect!, + if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, + if (etag != null) 'etag': etag!, + if (labels != null) 'labels': labels!, + if (modelDeploymentMonitoringJob != null) + 'modelDeploymentMonitoringJob': modelDeploymentMonitoringJob!, + if (name != null) 'name': name!, + if (network != null) 'network': network!, + if (predictRequestResponseLoggingConfig != null) + 'predictRequestResponseLoggingConfig': + predictRequestResponseLoggingConfig!, + if (privateServiceConnectConfig != null) + 'privateServiceConnectConfig': privateServiceConnectConfig!, + if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, + if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, + if (trafficSplit != null) 'trafficSplit': trafficSplit!, + if (updateTime != null) 'updateTime': updateTime!, }; } -/// The base structured datatype containing multi-part content of a message. +/// Selector for entityId. /// -/// A `Content` includes a `role` field designating the producer of the -/// `Content` and a `parts` field containing multi-part data that contains the -/// content of the message turn. -class GoogleCloudAiplatformV1Content { - /// Ordered `Parts` that constitute a single message. - /// - /// Parts may have different IANA MIME types. - /// - /// Required. - core.List? parts; +/// Getting ids from the given source. +class GoogleCloudAiplatformV1EntityIdSelector { + /// Source of Csv + GoogleCloudAiplatformV1CsvSource? csvSource; - /// The producer of the content. - /// - /// Must be either 'user' or 'model'. Useful to set for multi-turn - /// conversations, otherwise can be left blank or unset. + /// Source column that holds entity IDs. /// - /// Optional. - core.String? role; + /// If not provided, entity IDs are extracted from the column named entity_id. + core.String? entityIdField; - GoogleCloudAiplatformV1Content({ - this.parts, - this.role, + GoogleCloudAiplatformV1EntityIdSelector({ + this.csvSource, + this.entityIdField, }); - GoogleCloudAiplatformV1Content.fromJson(core.Map json_) + GoogleCloudAiplatformV1EntityIdSelector.fromJson(core.Map json_) : this( - parts: (json_['parts'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Part.fromJson( - value as core.Map)) - .toList(), - role: json_['role'] as core.String?, + csvSource: json_.containsKey('csvSource') + ? GoogleCloudAiplatformV1CsvSource.fromJson( + json_['csvSource'] as core.Map) + : null, + entityIdField: json_['entityIdField'] as core.String?, ); core.Map toJson() => { - if (parts != null) 'parts': parts!, - if (role != null) 'role': role!, + if (csvSource != null) 'csvSource': csvSource!, + if (entityIdField != null) 'entityIdField': entityIdField!, }; } -/// Instance of a general context. -class GoogleCloudAiplatformV1Context { - /// Timestamp when this Context was created. +/// An entity type is a type of object in a system that needs to be modeled and +/// have stored information about. +/// +/// For example, driver is an entity type, and driver0 is an instance of an +/// entity type driver. +class GoogleCloudAiplatformV1EntityType { + /// Timestamp when this EntityType was created. /// /// Output only. core.String? createTime; - /// Description of the Context - core.String? description; - - /// User provided display name of the Context. + /// Description of the EntityType. /// - /// May be up to 128 Unicode characters. - core.String? displayName; + /// Optional. + core.String? description; - /// An eTag used to perform consistent read-modify-write updates. + /// Used to perform a consistent read-modify-write updates. /// /// If not set, a blind "overwrite" update happens. + /// + /// Optional. core.String? etag; - /// The labels with user-defined metadata to organize your Contexts. + /// The labels with user-defined metadata to organize your EntityTypes. /// /// Label keys and values can be no longer than 64 characters (Unicode /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. No more than - /// 64 user labels can be associated with one Context (System labels are - /// excluded). + /// underscores and dashes. International characters are allowed. See + /// https://goo.gl/xmQnxf for more information on and examples of labels. No + /// more than 64 user labels can be associated with one EntityType (System + /// labels are excluded)." System reserved label keys are prefixed with + /// "aiplatform.googleapis.com/" and are immutable. + /// + /// Optional. core.Map? labels; - /// Properties of the Context. + /// The default monitoring configuration for all Features with value type + /// (Feature.ValueType) BOOL, STRING, DOUBLE or INT64 under this EntityType. /// - /// Top level metadata keys' heading and trailing spaces will be trimmed. The - /// size of this field should not exceed 200KB. + /// If this is populated with + /// \[FeaturestoreMonitoringConfig.monitoring_interval\] specified, snapshot + /// analysis monitoring is enabled. Otherwise, snapshot analysis monitoring is + /// disabled. /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Map? metadata; + /// Optional. + GoogleCloudAiplatformV1FeaturestoreMonitoringConfig? monitoringConfig; - /// The resource name of the Context. + /// Name of the EntityType. + /// + /// Format: + /// `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}` + /// The last part entity_type is assigned by the client. The entity_type can + /// be up to 64 characters long and can consist only of ASCII Latin letters + /// A-Z and a-z and underscore(_), and ASCII digits 0-9 starting with a + /// letter. The value will be unique given a featurestore. /// /// Immutable. core.String? name; - /// A list of resource names of Contexts that are parents of this Context. + /// Config for data retention policy in offline storage. /// - /// A Context may have at most 10 parent_contexts. + /// TTL in days for feature values that will be stored in offline storage. The + /// Feature Store offline storage periodically removes obsolete feature values + /// older than `offline_storage_ttl_days` since the feature generation time. + /// If unset (or explicitly set to 0), default to 4000 days TTL. /// - /// Output only. - core.List? parentContexts; + /// Optional. + core.int? offlineStorageTtlDays; - /// The title of the schema describing the metadata. + /// Reserved for future use. /// - /// Schema title and version is expected to be registered in earlier Create - /// Schema calls. And both are used together as unique identifiers to identify - /// schemas within the local metadata store. - core.String? schemaTitle; + /// Output only. + core.bool? satisfiesPzi; - /// The version of the schema in schema_name to use. + /// Reserved for future use. /// - /// Schema title and version is expected to be registered in earlier Create - /// Schema calls. And both are used together as unique identifiers to identify - /// schemas within the local metadata store. - core.String? schemaVersion; + /// Output only. + core.bool? satisfiesPzs; - /// Timestamp when this Context was last updated. + /// Timestamp when this EntityType was most recently updated. /// /// Output only. core.String? updateTime; - GoogleCloudAiplatformV1Context({ + GoogleCloudAiplatformV1EntityType({ this.createTime, this.description, - this.displayName, this.etag, this.labels, - this.metadata, + this.monitoringConfig, this.name, - this.parentContexts, - this.schemaTitle, - this.schemaVersion, + this.offlineStorageTtlDays, + this.satisfiesPzi, + this.satisfiesPzs, this.updateTime, }); - GoogleCloudAiplatformV1Context.fromJson(core.Map json_) + GoogleCloudAiplatformV1EntityType.fromJson(core.Map json_) : this( createTime: json_['createTime'] as core.String?, description: json_['description'] as core.String?, - displayName: json_['displayName'] as core.String?, etag: json_['etag'] as core.String?, labels: (json_['labels'] as core.Map?)?.map( @@ -30549,1037 +36671,1138 @@ class GoogleCloudAiplatformV1Context { value as core.String, ), ), - metadata: json_.containsKey('metadata') - ? json_['metadata'] as core.Map + monitoringConfig: json_.containsKey('monitoringConfig') + ? GoogleCloudAiplatformV1FeaturestoreMonitoringConfig.fromJson( + json_['monitoringConfig'] + as core.Map) : null, name: json_['name'] as core.String?, - parentContexts: (json_['parentContexts'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - schemaTitle: json_['schemaTitle'] as core.String?, - schemaVersion: json_['schemaVersion'] as core.String?, + offlineStorageTtlDays: json_['offlineStorageTtlDays'] as core.int?, + satisfiesPzi: json_['satisfiesPzi'] as core.bool?, + satisfiesPzs: json_['satisfiesPzs'] as core.bool?, updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { if (createTime != null) 'createTime': createTime!, if (description != null) 'description': description!, - if (displayName != null) 'displayName': displayName!, if (etag != null) 'etag': etag!, if (labels != null) 'labels': labels!, - if (metadata != null) 'metadata': metadata!, + if (monitoringConfig != null) 'monitoringConfig': monitoringConfig!, if (name != null) 'name': name!, - if (parentContexts != null) 'parentContexts': parentContexts!, - if (schemaTitle != null) 'schemaTitle': schemaTitle!, - if (schemaVersion != null) 'schemaVersion': schemaVersion!, + if (offlineStorageTtlDays != null) + 'offlineStorageTtlDays': offlineStorageTtlDays!, + if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, + if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, if (updateTime != null) 'updateTime': updateTime!, }; } -/// Request message for ModelService.CopyModel. -class GoogleCloudAiplatformV1CopyModelRequest { - /// Customer-managed encryption key options. - /// - /// If this is set, then the Model copy will be encrypted with the provided - /// encryption key. - GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; - - /// Copy source_model into a new Model with this ID. - /// - /// The ID will become the final component of the model resource name. This - /// value may be up to 63 characters, and valid characters are `[a-z0-9_-]`. - /// The first character cannot be a number or hyphen. - /// - /// Optional. - core.String? modelId; - - /// Specify this field to copy source_model into this existing Model as a new - /// version. +/// Represents an environment variable present in a Container or Python Module. +class GoogleCloudAiplatformV1EnvVar { + /// Name of the environment variable. /// - /// Format: `projects/{project}/locations/{location}/models/{model}` + /// Must be a valid C identifier. /// - /// Optional. - core.String? parentModel; + /// Required. + core.String? name; - /// The resource name of the Model to copy. + /// Variables that reference a $(VAR_NAME) are expanded using the previous + /// defined environment variables in the container and any service environment + /// variables. /// - /// That Model must be in the same Project. Format: - /// `projects/{project}/locations/{location}/models/{model}` + /// If a variable cannot be resolved, the reference in the input string will + /// be unchanged. The $(VAR_NAME) syntax can be escaped with a double $$, ie: + /// $$(VAR_NAME). Escaped references will never be expanded, regardless of + /// whether the variable exists or not. /// /// Required. - core.String? sourceModel; + core.String? value; - GoogleCloudAiplatformV1CopyModelRequest({ - this.encryptionSpec, - this.modelId, - this.parentModel, - this.sourceModel, + GoogleCloudAiplatformV1EnvVar({ + this.name, + this.value, }); - GoogleCloudAiplatformV1CopyModelRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1EnvVar.fromJson(core.Map json_) : this( - encryptionSpec: json_.containsKey('encryptionSpec') - ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( - json_['encryptionSpec'] - as core.Map) - : null, - modelId: json_['modelId'] as core.String?, - parentModel: json_['parentModel'] as core.String?, - sourceModel: json_['sourceModel'] as core.String?, + name: json_['name'] as core.String?, + value: json_['value'] as core.String?, ); core.Map toJson() => { - if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, - if (modelId != null) 'modelId': modelId!, - if (parentModel != null) 'parentModel': parentModel!, - if (sourceModel != null) 'sourceModel': sourceModel!, + if (name != null) 'name': name!, + if (value != null) 'value': value!, }; } -/// Request message for PredictionService.CountTokens. -class GoogleCloudAiplatformV1CountTokensRequest { - /// Input content. - /// - /// Optional. - core.List? contents; +/// Model error analysis for each annotation. +class GoogleCloudAiplatformV1ErrorAnalysisAnnotation { + /// Attributed items for a given annotation, typically representing neighbors + /// from the training sets constrained by the query type. + core.List? + attributedItems; - /// Generation config that the model will use to generate the response. + /// The outlier score of this annotated item. /// - /// Optional. - GoogleCloudAiplatformV1GenerationConfig? generationConfig; + /// Usually defined as the min of all distances from attributed items. + core.double? outlierScore; - /// The instances that are the input to token counting call. - /// - /// Schema is identical to the prediction schema of the underlying model. - /// - /// Optional. - /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.List? instances; + /// The threshold used to determine if this annotation is an outlier or not. + core.double? outlierThreshold; - /// The name of the publisher model requested to serve the prediction. - /// - /// Format: `projects/{project}/locations/{location}/publishers / * /models / - /// * ` + /// The query type used for finding the attributed items. + /// Possible string values are: + /// - "QUERY_TYPE_UNSPECIFIED" : Unspecified query type for model error + /// analysis. + /// - "ALL_SIMILAR" : Query similar samples across all classes in the dataset. + /// - "SAME_CLASS_SIMILAR" : Query similar samples from the same class of the + /// input sample. + /// - "SAME_CLASS_DISSIMILAR" : Query dissimilar samples from the same class + /// of the input sample. + core.String? queryType; + + GoogleCloudAiplatformV1ErrorAnalysisAnnotation({ + this.attributedItems, + this.outlierScore, + this.outlierThreshold, + this.queryType, + }); + + GoogleCloudAiplatformV1ErrorAnalysisAnnotation.fromJson(core.Map json_) + : this( + attributedItems: (json_['attributedItems'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1ErrorAnalysisAnnotationAttributedItem + .fromJson(value as core.Map)) + .toList(), + outlierScore: (json_['outlierScore'] as core.num?)?.toDouble(), + outlierThreshold: + (json_['outlierThreshold'] as core.num?)?.toDouble(), + queryType: json_['queryType'] as core.String?, + ); + + core.Map toJson() => { + if (attributedItems != null) 'attributedItems': attributedItems!, + if (outlierScore != null) 'outlierScore': outlierScore!, + if (outlierThreshold != null) 'outlierThreshold': outlierThreshold!, + if (queryType != null) 'queryType': queryType!, + }; +} + +/// Attributed items for a given annotation, typically representing neighbors +/// from the training sets constrained by the query type. +class GoogleCloudAiplatformV1ErrorAnalysisAnnotationAttributedItem { + /// The unique ID for each annotation. /// - /// Optional. - core.String? model; + /// Used by FE to allocate the annotation in DB. + core.String? annotationResourceName; - /// The user provided system instructions for the model. + /// The distance of this item to the annotation. + core.double? distance; + + GoogleCloudAiplatformV1ErrorAnalysisAnnotationAttributedItem({ + this.annotationResourceName, + this.distance, + }); + + GoogleCloudAiplatformV1ErrorAnalysisAnnotationAttributedItem.fromJson( + core.Map json_) + : this( + annotationResourceName: + json_['annotationResourceName'] as core.String?, + distance: (json_['distance'] as core.num?)?.toDouble(), + ); + + core.Map toJson() => { + if (annotationResourceName != null) + 'annotationResourceName': annotationResourceName!, + if (distance != null) 'distance': distance!, + }; +} + +/// Request message for EvaluationService.EvaluateInstances. +class GoogleCloudAiplatformV1EvaluateInstancesRequest { + /// Instances and metric spec for bleu metric. + GoogleCloudAiplatformV1BleuInput? bleuInput; + + /// Input for coherence metric. + GoogleCloudAiplatformV1CoherenceInput? coherenceInput; + + /// Translation metrics. /// - /// Note: only text should be used in parts and content in each part will be - /// in a separate paragraph. + /// Input for Comet metric. + GoogleCloudAiplatformV1CometInput? cometInput; + + /// Auto metric instances. /// - /// Optional. - GoogleCloudAiplatformV1Content? systemInstruction; + /// Instances and metric spec for exact match metric. + GoogleCloudAiplatformV1ExactMatchInput? exactMatchInput; - /// A list of `Tools` the model may use to generate the next response. + /// LLM-based metric instance. /// - /// A `Tool` is a piece of code that enables the system to interact with - /// external systems to perform an action, or set of actions, outside of - /// knowledge and scope of the model. + /// General text generation metrics, applicable to other categories. Input for + /// fluency metric. + GoogleCloudAiplatformV1FluencyInput? fluencyInput; + + /// Input for fulfillment metric. + GoogleCloudAiplatformV1FulfillmentInput? fulfillmentInput; + + /// Input for groundedness metric. + GoogleCloudAiplatformV1GroundednessInput? groundednessInput; + + /// Input for Metricx metric. + GoogleCloudAiplatformV1MetricxInput? metricxInput; + + /// Input for pairwise metric. + GoogleCloudAiplatformV1PairwiseMetricInput? pairwiseMetricInput; + + /// Input for pairwise question answering quality metric. + GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityInput? + pairwiseQuestionAnsweringQualityInput; + + /// Input for pairwise summarization quality metric. + GoogleCloudAiplatformV1PairwiseSummarizationQualityInput? + pairwiseSummarizationQualityInput; + + /// Input for pointwise metric. + GoogleCloudAiplatformV1PointwiseMetricInput? pointwiseMetricInput; + + /// Input for question answering correctness metric. + GoogleCloudAiplatformV1QuestionAnsweringCorrectnessInput? + questionAnsweringCorrectnessInput; + + /// Input for question answering helpfulness metric. + GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessInput? + questionAnsweringHelpfulnessInput; + + /// Input for question answering quality metric. + GoogleCloudAiplatformV1QuestionAnsweringQualityInput? + questionAnsweringQualityInput; + + /// Input for question answering relevance metric. + GoogleCloudAiplatformV1QuestionAnsweringRelevanceInput? + questionAnsweringRelevanceInput; + + /// Instances and metric spec for rouge metric. + GoogleCloudAiplatformV1RougeInput? rougeInput; + + /// Input for safety metric. + GoogleCloudAiplatformV1SafetyInput? safetyInput; + + /// Input for summarization helpfulness metric. + GoogleCloudAiplatformV1SummarizationHelpfulnessInput? + summarizationHelpfulnessInput; + + /// Input for summarization quality metric. + GoogleCloudAiplatformV1SummarizationQualityInput? summarizationQualityInput; + + /// Input for summarization verbosity metric. + GoogleCloudAiplatformV1SummarizationVerbosityInput? + summarizationVerbosityInput; + + /// Tool call metric instances. /// - /// Optional. - core.List? tools; + /// Input for tool call valid metric. + GoogleCloudAiplatformV1ToolCallValidInput? toolCallValidInput; - GoogleCloudAiplatformV1CountTokensRequest({ - this.contents, - this.generationConfig, - this.instances, - this.model, - this.systemInstruction, - this.tools, + /// Input for tool name match metric. + GoogleCloudAiplatformV1ToolNameMatchInput? toolNameMatchInput; + + /// Input for tool parameter key match metric. + GoogleCloudAiplatformV1ToolParameterKeyMatchInput? toolParameterKeyMatchInput; + + /// Input for tool parameter key value match metric. + GoogleCloudAiplatformV1ToolParameterKVMatchInput? toolParameterKvMatchInput; + + /// Input for trajectory match any order metric. + GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput? + trajectoryAnyOrderMatchInput; + + /// Input for trajectory exact match metric. + GoogleCloudAiplatformV1TrajectoryExactMatchInput? trajectoryExactMatchInput; + + /// Input for trajectory in order match metric. + GoogleCloudAiplatformV1TrajectoryInOrderMatchInput? + trajectoryInOrderMatchInput; + + /// Input for trajectory precision metric. + GoogleCloudAiplatformV1TrajectoryPrecisionInput? trajectoryPrecisionInput; + + /// Input for trajectory recall metric. + GoogleCloudAiplatformV1TrajectoryRecallInput? trajectoryRecallInput; + + /// Input for trajectory single tool use metric. + GoogleCloudAiplatformV1TrajectorySingleToolUseInput? + trajectorySingleToolUseInput; + + GoogleCloudAiplatformV1EvaluateInstancesRequest({ + this.bleuInput, + this.coherenceInput, + this.cometInput, + this.exactMatchInput, + this.fluencyInput, + this.fulfillmentInput, + this.groundednessInput, + this.metricxInput, + this.pairwiseMetricInput, + this.pairwiseQuestionAnsweringQualityInput, + this.pairwiseSummarizationQualityInput, + this.pointwiseMetricInput, + this.questionAnsweringCorrectnessInput, + this.questionAnsweringHelpfulnessInput, + this.questionAnsweringQualityInput, + this.questionAnsweringRelevanceInput, + this.rougeInput, + this.safetyInput, + this.summarizationHelpfulnessInput, + this.summarizationQualityInput, + this.summarizationVerbosityInput, + this.toolCallValidInput, + this.toolNameMatchInput, + this.toolParameterKeyMatchInput, + this.toolParameterKvMatchInput, + this.trajectoryAnyOrderMatchInput, + this.trajectoryExactMatchInput, + this.trajectoryInOrderMatchInput, + this.trajectoryPrecisionInput, + this.trajectoryRecallInput, + this.trajectorySingleToolUseInput, }); - GoogleCloudAiplatformV1CountTokensRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1EvaluateInstancesRequest.fromJson(core.Map json_) : this( - contents: (json_['contents'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Content.fromJson( - value as core.Map)) - .toList(), - generationConfig: json_.containsKey('generationConfig') - ? GoogleCloudAiplatformV1GenerationConfig.fromJson( - json_['generationConfig'] + bleuInput: json_.containsKey('bleuInput') + ? GoogleCloudAiplatformV1BleuInput.fromJson( + json_['bleuInput'] as core.Map) + : null, + coherenceInput: json_.containsKey('coherenceInput') + ? GoogleCloudAiplatformV1CoherenceInput.fromJson( + json_['coherenceInput'] + as core.Map) + : null, + cometInput: json_.containsKey('cometInput') + ? GoogleCloudAiplatformV1CometInput.fromJson( + json_['cometInput'] as core.Map) + : null, + exactMatchInput: json_.containsKey('exactMatchInput') + ? GoogleCloudAiplatformV1ExactMatchInput.fromJson( + json_['exactMatchInput'] + as core.Map) + : null, + fluencyInput: json_.containsKey('fluencyInput') + ? GoogleCloudAiplatformV1FluencyInput.fromJson( + json_['fluencyInput'] as core.Map) + : null, + fulfillmentInput: json_.containsKey('fulfillmentInput') + ? GoogleCloudAiplatformV1FulfillmentInput.fromJson( + json_['fulfillmentInput'] + as core.Map) + : null, + groundednessInput: json_.containsKey('groundednessInput') + ? GoogleCloudAiplatformV1GroundednessInput.fromJson( + json_['groundednessInput'] + as core.Map) + : null, + metricxInput: json_.containsKey('metricxInput') + ? GoogleCloudAiplatformV1MetricxInput.fromJson( + json_['metricxInput'] as core.Map) + : null, + pairwiseMetricInput: json_.containsKey('pairwiseMetricInput') + ? GoogleCloudAiplatformV1PairwiseMetricInput.fromJson( + json_['pairwiseMetricInput'] + as core.Map) + : null, + pairwiseQuestionAnsweringQualityInput: + json_.containsKey('pairwiseQuestionAnsweringQualityInput') + ? GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityInput + .fromJson(json_['pairwiseQuestionAnsweringQualityInput'] + as core.Map) + : null, + pairwiseSummarizationQualityInput: + json_.containsKey('pairwiseSummarizationQualityInput') + ? GoogleCloudAiplatformV1PairwiseSummarizationQualityInput + .fromJson(json_['pairwiseSummarizationQualityInput'] + as core.Map) + : null, + pointwiseMetricInput: json_.containsKey('pointwiseMetricInput') + ? GoogleCloudAiplatformV1PointwiseMetricInput.fromJson( + json_['pointwiseMetricInput'] + as core.Map) + : null, + questionAnsweringCorrectnessInput: + json_.containsKey('questionAnsweringCorrectnessInput') + ? GoogleCloudAiplatformV1QuestionAnsweringCorrectnessInput + .fromJson(json_['questionAnsweringCorrectnessInput'] + as core.Map) + : null, + questionAnsweringHelpfulnessInput: + json_.containsKey('questionAnsweringHelpfulnessInput') + ? GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessInput + .fromJson(json_['questionAnsweringHelpfulnessInput'] + as core.Map) + : null, + questionAnsweringQualityInput: json_ + .containsKey('questionAnsweringQualityInput') + ? GoogleCloudAiplatformV1QuestionAnsweringQualityInput.fromJson( + json_['questionAnsweringQualityInput'] + as core.Map) + : null, + questionAnsweringRelevanceInput: json_ + .containsKey('questionAnsweringRelevanceInput') + ? GoogleCloudAiplatformV1QuestionAnsweringRelevanceInput.fromJson( + json_['questionAnsweringRelevanceInput'] + as core.Map) + : null, + rougeInput: json_.containsKey('rougeInput') + ? GoogleCloudAiplatformV1RougeInput.fromJson( + json_['rougeInput'] as core.Map) + : null, + safetyInput: json_.containsKey('safetyInput') + ? GoogleCloudAiplatformV1SafetyInput.fromJson( + json_['safetyInput'] as core.Map) + : null, + summarizationHelpfulnessInput: json_ + .containsKey('summarizationHelpfulnessInput') + ? GoogleCloudAiplatformV1SummarizationHelpfulnessInput.fromJson( + json_['summarizationHelpfulnessInput'] + as core.Map) + : null, + summarizationQualityInput: + json_.containsKey('summarizationQualityInput') + ? GoogleCloudAiplatformV1SummarizationQualityInput.fromJson( + json_['summarizationQualityInput'] + as core.Map) + : null, + summarizationVerbosityInput: + json_.containsKey('summarizationVerbosityInput') + ? GoogleCloudAiplatformV1SummarizationVerbosityInput.fromJson( + json_['summarizationVerbosityInput'] + as core.Map) + : null, + toolCallValidInput: json_.containsKey('toolCallValidInput') + ? GoogleCloudAiplatformV1ToolCallValidInput.fromJson( + json_['toolCallValidInput'] + as core.Map) + : null, + toolNameMatchInput: json_.containsKey('toolNameMatchInput') + ? GoogleCloudAiplatformV1ToolNameMatchInput.fromJson( + json_['toolNameMatchInput'] + as core.Map) + : null, + toolParameterKeyMatchInput: + json_.containsKey('toolParameterKeyMatchInput') + ? GoogleCloudAiplatformV1ToolParameterKeyMatchInput.fromJson( + json_['toolParameterKeyMatchInput'] + as core.Map) + : null, + toolParameterKvMatchInput: + json_.containsKey('toolParameterKvMatchInput') + ? GoogleCloudAiplatformV1ToolParameterKVMatchInput.fromJson( + json_['toolParameterKvMatchInput'] + as core.Map) + : null, + trajectoryAnyOrderMatchInput: json_ + .containsKey('trajectoryAnyOrderMatchInput') + ? GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput.fromJson( + json_['trajectoryAnyOrderMatchInput'] + as core.Map) + : null, + trajectoryExactMatchInput: + json_.containsKey('trajectoryExactMatchInput') + ? GoogleCloudAiplatformV1TrajectoryExactMatchInput.fromJson( + json_['trajectoryExactMatchInput'] + as core.Map) + : null, + trajectoryInOrderMatchInput: + json_.containsKey('trajectoryInOrderMatchInput') + ? GoogleCloudAiplatformV1TrajectoryInOrderMatchInput.fromJson( + json_['trajectoryInOrderMatchInput'] + as core.Map) + : null, + trajectoryPrecisionInput: + json_.containsKey('trajectoryPrecisionInput') + ? GoogleCloudAiplatformV1TrajectoryPrecisionInput.fromJson( + json_['trajectoryPrecisionInput'] + as core.Map) + : null, + trajectoryRecallInput: json_.containsKey('trajectoryRecallInput') + ? GoogleCloudAiplatformV1TrajectoryRecallInput.fromJson( + json_['trajectoryRecallInput'] as core.Map) : null, - instances: json_.containsKey('instances') - ? json_['instances'] as core.List - : null, - model: json_['model'] as core.String?, - systemInstruction: json_.containsKey('systemInstruction') - ? GoogleCloudAiplatformV1Content.fromJson( - json_['systemInstruction'] + trajectorySingleToolUseInput: json_ + .containsKey('trajectorySingleToolUseInput') + ? GoogleCloudAiplatformV1TrajectorySingleToolUseInput.fromJson( + json_['trajectorySingleToolUseInput'] as core.Map) : null, - tools: (json_['tools'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Tool.fromJson( - value as core.Map)) - .toList(), ); core.Map toJson() => { - if (contents != null) 'contents': contents!, - if (generationConfig != null) 'generationConfig': generationConfig!, - if (instances != null) 'instances': instances!, - if (model != null) 'model': model!, - if (systemInstruction != null) 'systemInstruction': systemInstruction!, - if (tools != null) 'tools': tools!, + if (bleuInput != null) 'bleuInput': bleuInput!, + if (coherenceInput != null) 'coherenceInput': coherenceInput!, + if (cometInput != null) 'cometInput': cometInput!, + if (exactMatchInput != null) 'exactMatchInput': exactMatchInput!, + if (fluencyInput != null) 'fluencyInput': fluencyInput!, + if (fulfillmentInput != null) 'fulfillmentInput': fulfillmentInput!, + if (groundednessInput != null) 'groundednessInput': groundednessInput!, + if (metricxInput != null) 'metricxInput': metricxInput!, + if (pairwiseMetricInput != null) + 'pairwiseMetricInput': pairwiseMetricInput!, + if (pairwiseQuestionAnsweringQualityInput != null) + 'pairwiseQuestionAnsweringQualityInput': + pairwiseQuestionAnsweringQualityInput!, + if (pairwiseSummarizationQualityInput != null) + 'pairwiseSummarizationQualityInput': + pairwiseSummarizationQualityInput!, + if (pointwiseMetricInput != null) + 'pointwiseMetricInput': pointwiseMetricInput!, + if (questionAnsweringCorrectnessInput != null) + 'questionAnsweringCorrectnessInput': + questionAnsweringCorrectnessInput!, + if (questionAnsweringHelpfulnessInput != null) + 'questionAnsweringHelpfulnessInput': + questionAnsweringHelpfulnessInput!, + if (questionAnsweringQualityInput != null) + 'questionAnsweringQualityInput': questionAnsweringQualityInput!, + if (questionAnsweringRelevanceInput != null) + 'questionAnsweringRelevanceInput': questionAnsweringRelevanceInput!, + if (rougeInput != null) 'rougeInput': rougeInput!, + if (safetyInput != null) 'safetyInput': safetyInput!, + if (summarizationHelpfulnessInput != null) + 'summarizationHelpfulnessInput': summarizationHelpfulnessInput!, + if (summarizationQualityInput != null) + 'summarizationQualityInput': summarizationQualityInput!, + if (summarizationVerbosityInput != null) + 'summarizationVerbosityInput': summarizationVerbosityInput!, + if (toolCallValidInput != null) + 'toolCallValidInput': toolCallValidInput!, + if (toolNameMatchInput != null) + 'toolNameMatchInput': toolNameMatchInput!, + if (toolParameterKeyMatchInput != null) + 'toolParameterKeyMatchInput': toolParameterKeyMatchInput!, + if (toolParameterKvMatchInput != null) + 'toolParameterKvMatchInput': toolParameterKvMatchInput!, + if (trajectoryAnyOrderMatchInput != null) + 'trajectoryAnyOrderMatchInput': trajectoryAnyOrderMatchInput!, + if (trajectoryExactMatchInput != null) + 'trajectoryExactMatchInput': trajectoryExactMatchInput!, + if (trajectoryInOrderMatchInput != null) + 'trajectoryInOrderMatchInput': trajectoryInOrderMatchInput!, + if (trajectoryPrecisionInput != null) + 'trajectoryPrecisionInput': trajectoryPrecisionInput!, + if (trajectoryRecallInput != null) + 'trajectoryRecallInput': trajectoryRecallInput!, + if (trajectorySingleToolUseInput != null) + 'trajectorySingleToolUseInput': trajectorySingleToolUseInput!, }; } -/// Response message for PredictionService.CountTokens. -class GoogleCloudAiplatformV1CountTokensResponse { - /// The total number of billable characters counted across all instances from - /// the request. - core.int? totalBillableCharacters; - - /// The total number of tokens counted across all instances from the request. - core.int? totalTokens; - - GoogleCloudAiplatformV1CountTokensResponse({ - this.totalBillableCharacters, - this.totalTokens, - }); - - GoogleCloudAiplatformV1CountTokensResponse.fromJson(core.Map json_) - : this( - totalBillableCharacters: - json_['totalBillableCharacters'] as core.int?, - totalTokens: json_['totalTokens'] as core.int?, - ); +/// Response message for EvaluationService.EvaluateInstances. +class GoogleCloudAiplatformV1EvaluateInstancesResponse { + /// Results for bleu metric. + GoogleCloudAiplatformV1BleuResults? bleuResults; - core.Map toJson() => { - if (totalBillableCharacters != null) - 'totalBillableCharacters': totalBillableCharacters!, - if (totalTokens != null) 'totalTokens': totalTokens!, - }; -} + /// Result for coherence metric. + GoogleCloudAiplatformV1CoherenceResult? coherenceResult; -/// Request message for CreateDeploymentResourcePool method. -class GoogleCloudAiplatformV1CreateDeploymentResourcePoolRequest { - /// The DeploymentResourcePool to create. + /// Translation metrics. /// - /// Required. - GoogleCloudAiplatformV1DeploymentResourcePool? deploymentResourcePool; + /// Result for Comet metric. + GoogleCloudAiplatformV1CometResult? cometResult; - /// The ID to use for the DeploymentResourcePool, which will become the final - /// component of the DeploymentResourcePool's resource name. - /// - /// The maximum length is 63 characters, and valid characters are - /// `/^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$/`. + /// Auto metric evaluation results. /// - /// Required. - core.String? deploymentResourcePoolId; + /// Results for exact match metric. + GoogleCloudAiplatformV1ExactMatchResults? exactMatchResults; - GoogleCloudAiplatformV1CreateDeploymentResourcePoolRequest({ - this.deploymentResourcePool, - this.deploymentResourcePoolId, - }); + /// LLM-based metric evaluation result. + /// + /// General text generation metrics, applicable to other categories. Result + /// for fluency metric. + GoogleCloudAiplatformV1FluencyResult? fluencyResult; - GoogleCloudAiplatformV1CreateDeploymentResourcePoolRequest.fromJson( - core.Map json_) - : this( - deploymentResourcePool: json_.containsKey('deploymentResourcePool') - ? GoogleCloudAiplatformV1DeploymentResourcePool.fromJson( - json_['deploymentResourcePool'] - as core.Map) - : null, - deploymentResourcePoolId: - json_['deploymentResourcePoolId'] as core.String?, - ); + /// Result for fulfillment metric. + GoogleCloudAiplatformV1FulfillmentResult? fulfillmentResult; - core.Map toJson() => { - if (deploymentResourcePool != null) - 'deploymentResourcePool': deploymentResourcePool!, - if (deploymentResourcePoolId != null) - 'deploymentResourcePoolId': deploymentResourcePoolId!, - }; -} + /// Result for groundedness metric. + GoogleCloudAiplatformV1GroundednessResult? groundednessResult; -/// Request message for FeaturestoreService.CreateFeature. -/// -/// Request message for FeatureRegistryService.CreateFeature. -class GoogleCloudAiplatformV1CreateFeatureRequest { - /// The Feature to create. - /// - /// Required. - GoogleCloudAiplatformV1Feature? feature; + /// Result for Metricx metric. + GoogleCloudAiplatformV1MetricxResult? metricxResult; - /// The ID to use for the Feature, which will become the final component of - /// the Feature's resource name. - /// - /// This value may be up to 128 characters, and valid characters are - /// `[a-z0-9_]`. The first character cannot be a number. The value must be - /// unique within an EntityType/FeatureGroup. - /// - /// Required. - core.String? featureId; + /// Result for pairwise metric. + GoogleCloudAiplatformV1PairwiseMetricResult? pairwiseMetricResult; - /// The resource name of the EntityType or FeatureGroup to create a Feature. - /// - /// Format for entity_type as parent: - /// `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}` - /// Format for feature_group as parent: - /// `projects/{project}/locations/{location}/featureGroups/{feature_group}` - /// - /// Required. - core.String? parent; + /// Result for pairwise question answering quality metric. + GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityResult? + pairwiseQuestionAnsweringQualityResult; - GoogleCloudAiplatformV1CreateFeatureRequest({ - this.feature, - this.featureId, - this.parent, - }); + /// Result for pairwise summarization quality metric. + GoogleCloudAiplatformV1PairwiseSummarizationQualityResult? + pairwiseSummarizationQualityResult; - GoogleCloudAiplatformV1CreateFeatureRequest.fromJson(core.Map json_) - : this( - feature: json_.containsKey('feature') - ? GoogleCloudAiplatformV1Feature.fromJson( - json_['feature'] as core.Map) - : null, - featureId: json_['featureId'] as core.String?, - parent: json_['parent'] as core.String?, - ); + /// Generic metrics. + /// + /// Result for pointwise metric. + GoogleCloudAiplatformV1PointwiseMetricResult? pointwiseMetricResult; - core.Map toJson() => { - if (feature != null) 'feature': feature!, - if (featureId != null) 'featureId': featureId!, - if (parent != null) 'parent': parent!, - }; -} + /// Result for question answering correctness metric. + GoogleCloudAiplatformV1QuestionAnsweringCorrectnessResult? + questionAnsweringCorrectnessResult; -/// Request message for \[NotebookService.CreateNotebookExecutionJob\] -class GoogleCloudAiplatformV1CreateNotebookExecutionJobRequest { - /// The NotebookExecutionJob to create. - /// - /// Required. - GoogleCloudAiplatformV1NotebookExecutionJob? notebookExecutionJob; + /// Result for question answering helpfulness metric. + GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessResult? + questionAnsweringHelpfulnessResult; - /// User specified ID for the NotebookExecutionJob. + /// Question answering only metrics. /// - /// Optional. - core.String? notebookExecutionJobId; + /// Result for question answering quality metric. + GoogleCloudAiplatformV1QuestionAnsweringQualityResult? + questionAnsweringQualityResult; - /// The resource name of the Location to create the NotebookExecutionJob. - /// - /// Format: `projects/{project}/locations/{location}` - /// - /// Required. - core.String? parent; + /// Result for question answering relevance metric. + GoogleCloudAiplatformV1QuestionAnsweringRelevanceResult? + questionAnsweringRelevanceResult; - GoogleCloudAiplatformV1CreateNotebookExecutionJobRequest({ - this.notebookExecutionJob, - this.notebookExecutionJobId, - this.parent, - }); + /// Results for rouge metric. + GoogleCloudAiplatformV1RougeResults? rougeResults; - GoogleCloudAiplatformV1CreateNotebookExecutionJobRequest.fromJson( - core.Map json_) - : this( - notebookExecutionJob: json_.containsKey('notebookExecutionJob') - ? GoogleCloudAiplatformV1NotebookExecutionJob.fromJson( - json_['notebookExecutionJob'] - as core.Map) - : null, - notebookExecutionJobId: - json_['notebookExecutionJobId'] as core.String?, - parent: json_['parent'] as core.String?, - ); + /// Result for safety metric. + GoogleCloudAiplatformV1SafetyResult? safetyResult; - core.Map toJson() => { - if (notebookExecutionJob != null) - 'notebookExecutionJob': notebookExecutionJob!, - if (notebookExecutionJobId != null) - 'notebookExecutionJobId': notebookExecutionJobId!, - if (parent != null) 'parent': parent!, - }; -} + /// Result for summarization helpfulness metric. + GoogleCloudAiplatformV1SummarizationHelpfulnessResult? + summarizationHelpfulnessResult; -/// Request message for PipelineService.CreatePipelineJob. -class GoogleCloudAiplatformV1CreatePipelineJobRequest { - /// The resource name of the Location to create the PipelineJob in. - /// - /// Format: `projects/{project}/locations/{location}` + /// Summarization only metrics. /// - /// Required. - core.String? parent; + /// Result for summarization quality metric. + GoogleCloudAiplatformV1SummarizationQualityResult? summarizationQualityResult; - /// The PipelineJob to create. - /// - /// Required. - GoogleCloudAiplatformV1PipelineJob? pipelineJob; + /// Result for summarization verbosity metric. + GoogleCloudAiplatformV1SummarizationVerbosityResult? + summarizationVerbosityResult; - /// The ID to use for the PipelineJob, which will become the final component - /// of the PipelineJob name. + /// Tool call metrics. /// - /// If not provided, an ID will be automatically generated. This value should - /// be less than 128 characters, and valid characters are `/a-z-/`. - core.String? pipelineJobId; - - GoogleCloudAiplatformV1CreatePipelineJobRequest({ - this.parent, - this.pipelineJob, - this.pipelineJobId, - }); - - GoogleCloudAiplatformV1CreatePipelineJobRequest.fromJson(core.Map json_) - : this( - parent: json_['parent'] as core.String?, - pipelineJob: json_.containsKey('pipelineJob') - ? GoogleCloudAiplatformV1PipelineJob.fromJson( - json_['pipelineJob'] as core.Map) - : null, - pipelineJobId: json_['pipelineJobId'] as core.String?, - ); - - core.Map toJson() => { - if (parent != null) 'parent': parent!, - if (pipelineJob != null) 'pipelineJob': pipelineJob!, - if (pipelineJobId != null) 'pipelineJobId': pipelineJobId!, - }; -} + /// Results for tool call valid metric. + GoogleCloudAiplatformV1ToolCallValidResults? toolCallValidResults; -/// Request message for TensorboardService.CreateTensorboardRun. -class GoogleCloudAiplatformV1CreateTensorboardRunRequest { - /// The resource name of the TensorboardExperiment to create the - /// TensorboardRun in. - /// - /// Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}` - /// - /// Required. - core.String? parent; + /// Results for tool name match metric. + GoogleCloudAiplatformV1ToolNameMatchResults? toolNameMatchResults; - /// The TensorboardRun to create. - /// - /// Required. - GoogleCloudAiplatformV1TensorboardRun? tensorboardRun; + /// Results for tool parameter key match metric. + GoogleCloudAiplatformV1ToolParameterKeyMatchResults? + toolParameterKeyMatchResults; - /// The ID to use for the Tensorboard run, which becomes the final component - /// of the Tensorboard run's resource name. - /// - /// This value should be 1-128 characters, and valid characters are `/a-z-/`. - /// - /// Required. - core.String? tensorboardRunId; + /// Results for tool parameter key value match metric. + GoogleCloudAiplatformV1ToolParameterKVMatchResults? + toolParameterKvMatchResults; - GoogleCloudAiplatformV1CreateTensorboardRunRequest({ - this.parent, - this.tensorboardRun, - this.tensorboardRunId, - }); + /// Result for trajectory any order match metric. + GoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults? + trajectoryAnyOrderMatchResults; - GoogleCloudAiplatformV1CreateTensorboardRunRequest.fromJson(core.Map json_) - : this( - parent: json_['parent'] as core.String?, - tensorboardRun: json_.containsKey('tensorboardRun') - ? GoogleCloudAiplatformV1TensorboardRun.fromJson( - json_['tensorboardRun'] - as core.Map) - : null, - tensorboardRunId: json_['tensorboardRunId'] as core.String?, - ); + /// Result for trajectory exact match metric. + GoogleCloudAiplatformV1TrajectoryExactMatchResults? + trajectoryExactMatchResults; - core.Map toJson() => { - if (parent != null) 'parent': parent!, - if (tensorboardRun != null) 'tensorboardRun': tensorboardRun!, - if (tensorboardRunId != null) 'tensorboardRunId': tensorboardRunId!, - }; -} + /// Result for trajectory in order match metric. + GoogleCloudAiplatformV1TrajectoryInOrderMatchResults? + trajectoryInOrderMatchResults; -/// Request message for TensorboardService.CreateTensorboardTimeSeries. -class GoogleCloudAiplatformV1CreateTensorboardTimeSeriesRequest { - /// The resource name of the TensorboardRun to create the - /// TensorboardTimeSeries in. - /// - /// Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}` - /// - /// Required. - core.String? parent; + /// Result for trajectory precision metric. + GoogleCloudAiplatformV1TrajectoryPrecisionResults? trajectoryPrecisionResults; - /// The TensorboardTimeSeries to create. - /// - /// Required. - GoogleCloudAiplatformV1TensorboardTimeSeries? tensorboardTimeSeries; + /// Results for trajectory recall metric. + GoogleCloudAiplatformV1TrajectoryRecallResults? trajectoryRecallResults; - /// The user specified unique ID to use for the TensorboardTimeSeries, which - /// becomes the final component of the TensorboardTimeSeries's resource name. - /// - /// This value should match "a-z0-9{0, 127}" - /// - /// Optional. - core.String? tensorboardTimeSeriesId; + /// Results for trajectory single tool use metric. + GoogleCloudAiplatformV1TrajectorySingleToolUseResults? + trajectorySingleToolUseResults; - GoogleCloudAiplatformV1CreateTensorboardTimeSeriesRequest({ - this.parent, - this.tensorboardTimeSeries, - this.tensorboardTimeSeriesId, + GoogleCloudAiplatformV1EvaluateInstancesResponse({ + this.bleuResults, + this.coherenceResult, + this.cometResult, + this.exactMatchResults, + this.fluencyResult, + this.fulfillmentResult, + this.groundednessResult, + this.metricxResult, + this.pairwiseMetricResult, + this.pairwiseQuestionAnsweringQualityResult, + this.pairwiseSummarizationQualityResult, + this.pointwiseMetricResult, + this.questionAnsweringCorrectnessResult, + this.questionAnsweringHelpfulnessResult, + this.questionAnsweringQualityResult, + this.questionAnsweringRelevanceResult, + this.rougeResults, + this.safetyResult, + this.summarizationHelpfulnessResult, + this.summarizationQualityResult, + this.summarizationVerbosityResult, + this.toolCallValidResults, + this.toolNameMatchResults, + this.toolParameterKeyMatchResults, + this.toolParameterKvMatchResults, + this.trajectoryAnyOrderMatchResults, + this.trajectoryExactMatchResults, + this.trajectoryInOrderMatchResults, + this.trajectoryPrecisionResults, + this.trajectoryRecallResults, + this.trajectorySingleToolUseResults, }); - GoogleCloudAiplatformV1CreateTensorboardTimeSeriesRequest.fromJson( - core.Map json_) + GoogleCloudAiplatformV1EvaluateInstancesResponse.fromJson(core.Map json_) : this( - parent: json_['parent'] as core.String?, - tensorboardTimeSeries: json_.containsKey('tensorboardTimeSeries') - ? GoogleCloudAiplatformV1TensorboardTimeSeries.fromJson( - json_['tensorboardTimeSeries'] + bleuResults: json_.containsKey('bleuResults') + ? GoogleCloudAiplatformV1BleuResults.fromJson( + json_['bleuResults'] as core.Map) + : null, + coherenceResult: json_.containsKey('coherenceResult') + ? GoogleCloudAiplatformV1CoherenceResult.fromJson( + json_['coherenceResult'] + as core.Map) + : null, + cometResult: json_.containsKey('cometResult') + ? GoogleCloudAiplatformV1CometResult.fromJson( + json_['cometResult'] as core.Map) + : null, + exactMatchResults: json_.containsKey('exactMatchResults') + ? GoogleCloudAiplatformV1ExactMatchResults.fromJson( + json_['exactMatchResults'] + as core.Map) + : null, + fluencyResult: json_.containsKey('fluencyResult') + ? GoogleCloudAiplatformV1FluencyResult.fromJson( + json_['fluencyResult'] as core.Map) + : null, + fulfillmentResult: json_.containsKey('fulfillmentResult') + ? GoogleCloudAiplatformV1FulfillmentResult.fromJson( + json_['fulfillmentResult'] + as core.Map) + : null, + groundednessResult: json_.containsKey('groundednessResult') + ? GoogleCloudAiplatformV1GroundednessResult.fromJson( + json_['groundednessResult'] + as core.Map) + : null, + metricxResult: json_.containsKey('metricxResult') + ? GoogleCloudAiplatformV1MetricxResult.fromJson( + json_['metricxResult'] as core.Map) + : null, + pairwiseMetricResult: json_.containsKey('pairwiseMetricResult') + ? GoogleCloudAiplatformV1PairwiseMetricResult.fromJson( + json_['pairwiseMetricResult'] + as core.Map) + : null, + pairwiseQuestionAnsweringQualityResult: json_ + .containsKey('pairwiseQuestionAnsweringQualityResult') + ? GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityResult + .fromJson(json_['pairwiseQuestionAnsweringQualityResult'] + as core.Map) + : null, + pairwiseSummarizationQualityResult: + json_.containsKey('pairwiseSummarizationQualityResult') + ? GoogleCloudAiplatformV1PairwiseSummarizationQualityResult + .fromJson(json_['pairwiseSummarizationQualityResult'] + as core.Map) + : null, + pointwiseMetricResult: json_.containsKey('pointwiseMetricResult') + ? GoogleCloudAiplatformV1PointwiseMetricResult.fromJson( + json_['pointwiseMetricResult'] + as core.Map) + : null, + questionAnsweringCorrectnessResult: + json_.containsKey('questionAnsweringCorrectnessResult') + ? GoogleCloudAiplatformV1QuestionAnsweringCorrectnessResult + .fromJson(json_['questionAnsweringCorrectnessResult'] + as core.Map) + : null, + questionAnsweringHelpfulnessResult: + json_.containsKey('questionAnsweringHelpfulnessResult') + ? GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessResult + .fromJson(json_['questionAnsweringHelpfulnessResult'] + as core.Map) + : null, + questionAnsweringQualityResult: json_ + .containsKey('questionAnsweringQualityResult') + ? GoogleCloudAiplatformV1QuestionAnsweringQualityResult.fromJson( + json_['questionAnsweringQualityResult'] + as core.Map) + : null, + questionAnsweringRelevanceResult: + json_.containsKey('questionAnsweringRelevanceResult') + ? GoogleCloudAiplatformV1QuestionAnsweringRelevanceResult + .fromJson(json_['questionAnsweringRelevanceResult'] + as core.Map) + : null, + rougeResults: json_.containsKey('rougeResults') + ? GoogleCloudAiplatformV1RougeResults.fromJson( + json_['rougeResults'] as core.Map) + : null, + safetyResult: json_.containsKey('safetyResult') + ? GoogleCloudAiplatformV1SafetyResult.fromJson( + json_['safetyResult'] as core.Map) + : null, + summarizationHelpfulnessResult: json_ + .containsKey('summarizationHelpfulnessResult') + ? GoogleCloudAiplatformV1SummarizationHelpfulnessResult.fromJson( + json_['summarizationHelpfulnessResult'] + as core.Map) + : null, + summarizationQualityResult: + json_.containsKey('summarizationQualityResult') + ? GoogleCloudAiplatformV1SummarizationQualityResult.fromJson( + json_['summarizationQualityResult'] + as core.Map) + : null, + summarizationVerbosityResult: json_ + .containsKey('summarizationVerbosityResult') + ? GoogleCloudAiplatformV1SummarizationVerbosityResult.fromJson( + json_['summarizationVerbosityResult'] + as core.Map) + : null, + toolCallValidResults: json_.containsKey('toolCallValidResults') + ? GoogleCloudAiplatformV1ToolCallValidResults.fromJson( + json_['toolCallValidResults'] as core.Map) : null, - tensorboardTimeSeriesId: - json_['tensorboardTimeSeriesId'] as core.String?, - ); - - core.Map toJson() => { - if (parent != null) 'parent': parent!, - if (tensorboardTimeSeries != null) - 'tensorboardTimeSeries': tensorboardTimeSeries!, - if (tensorboardTimeSeriesId != null) - 'tensorboardTimeSeriesId': tensorboardTimeSeriesId!, - }; -} - -/// The storage details for CSV output content. -class GoogleCloudAiplatformV1CsvDestination { - /// Google Cloud Storage location. - /// - /// Required. - GoogleCloudAiplatformV1GcsDestination? gcsDestination; - - GoogleCloudAiplatformV1CsvDestination({ - this.gcsDestination, - }); - - GoogleCloudAiplatformV1CsvDestination.fromJson(core.Map json_) - : this( - gcsDestination: json_.containsKey('gcsDestination') - ? GoogleCloudAiplatformV1GcsDestination.fromJson( - json_['gcsDestination'] + toolNameMatchResults: json_.containsKey('toolNameMatchResults') + ? GoogleCloudAiplatformV1ToolNameMatchResults.fromJson( + json_['toolNameMatchResults'] as core.Map) : null, - ); - - core.Map toJson() => { - if (gcsDestination != null) 'gcsDestination': gcsDestination!, - }; -} - -/// The storage details for CSV input content. -class GoogleCloudAiplatformV1CsvSource { - /// Google Cloud Storage location. - /// - /// Required. - GoogleCloudAiplatformV1GcsSource? gcsSource; - - GoogleCloudAiplatformV1CsvSource({ - this.gcsSource, - }); - - GoogleCloudAiplatformV1CsvSource.fromJson(core.Map json_) - : this( - gcsSource: json_.containsKey('gcsSource') - ? GoogleCloudAiplatformV1GcsSource.fromJson( - json_['gcsSource'] as core.Map) + toolParameterKeyMatchResults: json_ + .containsKey('toolParameterKeyMatchResults') + ? GoogleCloudAiplatformV1ToolParameterKeyMatchResults.fromJson( + json_['toolParameterKeyMatchResults'] + as core.Map) + : null, + toolParameterKvMatchResults: + json_.containsKey('toolParameterKvMatchResults') + ? GoogleCloudAiplatformV1ToolParameterKVMatchResults.fromJson( + json_['toolParameterKvMatchResults'] + as core.Map) + : null, + trajectoryAnyOrderMatchResults: json_ + .containsKey('trajectoryAnyOrderMatchResults') + ? GoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults.fromJson( + json_['trajectoryAnyOrderMatchResults'] + as core.Map) + : null, + trajectoryExactMatchResults: + json_.containsKey('trajectoryExactMatchResults') + ? GoogleCloudAiplatformV1TrajectoryExactMatchResults.fromJson( + json_['trajectoryExactMatchResults'] + as core.Map) + : null, + trajectoryInOrderMatchResults: json_ + .containsKey('trajectoryInOrderMatchResults') + ? GoogleCloudAiplatformV1TrajectoryInOrderMatchResults.fromJson( + json_['trajectoryInOrderMatchResults'] + as core.Map) + : null, + trajectoryPrecisionResults: + json_.containsKey('trajectoryPrecisionResults') + ? GoogleCloudAiplatformV1TrajectoryPrecisionResults.fromJson( + json_['trajectoryPrecisionResults'] + as core.Map) + : null, + trajectoryRecallResults: json_.containsKey('trajectoryRecallResults') + ? GoogleCloudAiplatformV1TrajectoryRecallResults.fromJson( + json_['trajectoryRecallResults'] + as core.Map) + : null, + trajectorySingleToolUseResults: json_ + .containsKey('trajectorySingleToolUseResults') + ? GoogleCloudAiplatformV1TrajectorySingleToolUseResults.fromJson( + json_['trajectorySingleToolUseResults'] + as core.Map) : null, ); core.Map toJson() => { - if (gcsSource != null) 'gcsSource': gcsSource!, + if (bleuResults != null) 'bleuResults': bleuResults!, + if (coherenceResult != null) 'coherenceResult': coherenceResult!, + if (cometResult != null) 'cometResult': cometResult!, + if (exactMatchResults != null) 'exactMatchResults': exactMatchResults!, + if (fluencyResult != null) 'fluencyResult': fluencyResult!, + if (fulfillmentResult != null) 'fulfillmentResult': fulfillmentResult!, + if (groundednessResult != null) + 'groundednessResult': groundednessResult!, + if (metricxResult != null) 'metricxResult': metricxResult!, + if (pairwiseMetricResult != null) + 'pairwiseMetricResult': pairwiseMetricResult!, + if (pairwiseQuestionAnsweringQualityResult != null) + 'pairwiseQuestionAnsweringQualityResult': + pairwiseQuestionAnsweringQualityResult!, + if (pairwiseSummarizationQualityResult != null) + 'pairwiseSummarizationQualityResult': + pairwiseSummarizationQualityResult!, + if (pointwiseMetricResult != null) + 'pointwiseMetricResult': pointwiseMetricResult!, + if (questionAnsweringCorrectnessResult != null) + 'questionAnsweringCorrectnessResult': + questionAnsweringCorrectnessResult!, + if (questionAnsweringHelpfulnessResult != null) + 'questionAnsweringHelpfulnessResult': + questionAnsweringHelpfulnessResult!, + if (questionAnsweringQualityResult != null) + 'questionAnsweringQualityResult': questionAnsweringQualityResult!, + if (questionAnsweringRelevanceResult != null) + 'questionAnsweringRelevanceResult': questionAnsweringRelevanceResult!, + if (rougeResults != null) 'rougeResults': rougeResults!, + if (safetyResult != null) 'safetyResult': safetyResult!, + if (summarizationHelpfulnessResult != null) + 'summarizationHelpfulnessResult': summarizationHelpfulnessResult!, + if (summarizationQualityResult != null) + 'summarizationQualityResult': summarizationQualityResult!, + if (summarizationVerbosityResult != null) + 'summarizationVerbosityResult': summarizationVerbosityResult!, + if (toolCallValidResults != null) + 'toolCallValidResults': toolCallValidResults!, + if (toolNameMatchResults != null) + 'toolNameMatchResults': toolNameMatchResults!, + if (toolParameterKeyMatchResults != null) + 'toolParameterKeyMatchResults': toolParameterKeyMatchResults!, + if (toolParameterKvMatchResults != null) + 'toolParameterKvMatchResults': toolParameterKvMatchResults!, + if (trajectoryAnyOrderMatchResults != null) + 'trajectoryAnyOrderMatchResults': trajectoryAnyOrderMatchResults!, + if (trajectoryExactMatchResults != null) + 'trajectoryExactMatchResults': trajectoryExactMatchResults!, + if (trajectoryInOrderMatchResults != null) + 'trajectoryInOrderMatchResults': trajectoryInOrderMatchResults!, + if (trajectoryPrecisionResults != null) + 'trajectoryPrecisionResults': trajectoryPrecisionResults!, + if (trajectoryRecallResults != null) + 'trajectoryRecallResults': trajectoryRecallResults!, + if (trajectorySingleToolUseResults != null) + 'trajectorySingleToolUseResults': trajectorySingleToolUseResults!, }; } -/// Represents a job that runs custom workloads such as a Docker container or a -/// Python package. +/// True positive, false positive, or false negative. /// -/// A CustomJob can have multiple worker pools and each worker pool can have its -/// own machine and input spec. A CustomJob will be cleaned up once the job -/// enters terminal state (failed or succeeded). -class GoogleCloudAiplatformV1CustomJob { - /// Time when the CustomJob was created. +/// EvaluatedAnnotation is only available under ModelEvaluationSlice with slice +/// of `annotationSpec` dimension. +class GoogleCloudAiplatformV1EvaluatedAnnotation { + /// The data item payload that the Model predicted this EvaluatedAnnotation + /// on. /// /// Output only. - core.String? createTime; - - /// The display name of the CustomJob. - /// - /// The name can be up to 128 characters long and can consist of any UTF-8 - /// characters. /// - /// Required. - core.String? displayName; + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? dataItemPayload; - /// Customer-managed encryption key options for a CustomJob. - /// - /// If this is set, then all resources created by the CustomJob will be - /// encrypted with the provided encryption key. - GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; + /// Annotations of model error analysis results. + core.List? + errorAnalysisAnnotations; - /// Time when the CustomJob entered any of the following states: - /// `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`. + /// ID of the EvaluatedDataItemView under the same ancestor ModelEvaluation. /// - /// Output only. - core.String? endTime; - - /// Only populated when job's state is `JOB_STATE_FAILED` or - /// `JOB_STATE_CANCELLED`. + /// The EvaluatedDataItemView consists of all ground truths and predictions on + /// data_item_payload. /// /// Output only. - GoogleRpcStatus? error; - - /// Job spec. - /// - /// Required. - GoogleCloudAiplatformV1CustomJobSpec? jobSpec; + core.String? evaluatedDataItemViewId; - /// The labels with user-defined metadata to organize CustomJobs. + /// Explanations of predictions. /// - /// Label keys and values can be no longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. See - /// https://goo.gl/xmQnxf for more information and examples of labels. - core.Map? labels; + /// Each element of the explanations indicates the explanation for one + /// explanation Method. The attributions list in the + /// EvaluatedAnnotationExplanation.explanation object corresponds to the + /// predictions list. For example, the second element in the attributions list + /// explains the second element in the predictions list. + core.List? + explanations; - /// Resource name of a CustomJob. + /// The ground truth Annotations, i.e. the Annotations that exist in the test + /// data the Model is evaluated on. /// - /// Output only. - core.String? name; - - /// Reserved for future use. + /// For true positive, there is one and only one ground truth annotation, + /// which matches the only prediction in predictions. For false positive, + /// there are zero or more ground truth annotations that are similar to the + /// only prediction in predictions, but not enough for a match. For false + /// negative, there is one and only one ground truth annotation, which doesn't + /// match any predictions created by the model. The schema of the ground truth + /// is stored in ModelEvaluation.annotation_schema_uri /// /// Output only. - core.bool? satisfiesPzi; - - /// Reserved for future use. /// - /// Output only. - core.bool? satisfiesPzs; + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.List? groundTruths; - /// Time when the CustomJob for the first time entered the `JOB_STATE_RUNNING` - /// state. + /// The model predicted annotations. /// - /// Output only. - core.String? startTime; - - /// The detailed state of the job. + /// For true positive, there is one and only one prediction, which matches the + /// only one ground truth annotation in ground_truths. For false positive, + /// there is one and only one prediction, which doesn't match any ground truth + /// annotation of the corresponding data_item_view_id. For false negative, + /// there are zero or more predictions which are similar to the only ground + /// truth annotation in ground_truths but not enough for a match. The schema + /// of the prediction is stored in ModelEvaluation.annotation_schema_uri /// /// Output only. - /// Possible string values are: - /// - "JOB_STATE_UNSPECIFIED" : The job state is unspecified. - /// - "JOB_STATE_QUEUED" : The job has been just created or resumed and - /// processing has not yet begun. - /// - "JOB_STATE_PENDING" : The service is preparing to run the job. - /// - "JOB_STATE_RUNNING" : The job is in progress. - /// - "JOB_STATE_SUCCEEDED" : The job completed successfully. - /// - "JOB_STATE_FAILED" : The job failed. - /// - "JOB_STATE_CANCELLING" : The job is being cancelled. From this state the - /// job may only go to either `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED` or - /// `JOB_STATE_CANCELLED`. - /// - "JOB_STATE_CANCELLED" : The job has been cancelled. - /// - "JOB_STATE_PAUSED" : The job has been stopped, and can be resumed. - /// - "JOB_STATE_EXPIRED" : The job has expired. - /// - "JOB_STATE_UPDATING" : The job is being updated. Only jobs in the - /// `RUNNING` state can be updated. After updating, the job goes back to the - /// `RUNNING` state. - /// - "JOB_STATE_PARTIALLY_SUCCEEDED" : The job is partially succeeded, some - /// results may be missing due to errors. - core.String? state; - - /// Time when the CustomJob was most recently updated. /// - /// Output only. - core.String? updateTime; + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.List? predictions; - /// URIs for accessing - /// [interactive shells](https://cloud.google.com/vertex-ai/docs/training/monitor-debug-interactive-shell) - /// (one URI for each training node). - /// - /// Only available if job_spec.enable_web_access is `true`. The keys are names - /// of each node in the training job; for example, `workerpool0-0` for the - /// primary node, `workerpool1-0` for the first node in the second worker - /// pool, and `workerpool1-1` for the second node in the second worker pool. - /// The values are the URIs for each node's interactive shell. + /// Type of the EvaluatedAnnotation. /// /// Output only. - core.Map? webAccessUris; + /// Possible string values are: + /// - "EVALUATED_ANNOTATION_TYPE_UNSPECIFIED" : Invalid value. + /// - "TRUE_POSITIVE" : The EvaluatedAnnotation is a true positive. It has a + /// prediction created by the Model and a ground truth Annotation which the + /// prediction matches. + /// - "FALSE_POSITIVE" : The EvaluatedAnnotation is false positive. It has a + /// prediction created by the Model which does not match any ground truth + /// annotation. + /// - "FALSE_NEGATIVE" : The EvaluatedAnnotation is false negative. It has a + /// ground truth annotation which is not matched by any of the model created + /// predictions. + core.String? type; - GoogleCloudAiplatformV1CustomJob({ - this.createTime, - this.displayName, - this.encryptionSpec, - this.endTime, - this.error, - this.jobSpec, - this.labels, - this.name, - this.satisfiesPzi, - this.satisfiesPzs, - this.startTime, - this.state, - this.updateTime, - this.webAccessUris, + GoogleCloudAiplatformV1EvaluatedAnnotation({ + this.dataItemPayload, + this.errorAnalysisAnnotations, + this.evaluatedDataItemViewId, + this.explanations, + this.groundTruths, + this.predictions, + this.type, }); - GoogleCloudAiplatformV1CustomJob.fromJson(core.Map json_) + GoogleCloudAiplatformV1EvaluatedAnnotation.fromJson(core.Map json_) : this( - createTime: json_['createTime'] as core.String?, - displayName: json_['displayName'] as core.String?, - encryptionSpec: json_.containsKey('encryptionSpec') - ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( - json_['encryptionSpec'] - as core.Map) - : null, - endTime: json_['endTime'] as core.String?, - error: json_.containsKey('error') - ? GoogleRpcStatus.fromJson( - json_['error'] as core.Map) + dataItemPayload: json_['dataItemPayload'], + errorAnalysisAnnotations: + (json_['errorAnalysisAnnotations'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1ErrorAnalysisAnnotation.fromJson( + value as core.Map)) + .toList(), + evaluatedDataItemViewId: + json_['evaluatedDataItemViewId'] as core.String?, + explanations: (json_['explanations'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1EvaluatedAnnotationExplanation + .fromJson(value as core.Map)) + .toList(), + groundTruths: json_.containsKey('groundTruths') + ? json_['groundTruths'] as core.List : null, - jobSpec: json_.containsKey('jobSpec') - ? GoogleCloudAiplatformV1CustomJobSpec.fromJson( - json_['jobSpec'] as core.Map) + predictions: json_.containsKey('predictions') + ? json_['predictions'] as core.List : null, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - name: json_['name'] as core.String?, - satisfiesPzi: json_['satisfiesPzi'] as core.bool?, - satisfiesPzs: json_['satisfiesPzs'] as core.bool?, - startTime: json_['startTime'] as core.String?, - state: json_['state'] as core.String?, - updateTime: json_['updateTime'] as core.String?, - webAccessUris: - (json_['webAccessUris'] as core.Map?) - ?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), + type: json_['type'] as core.String?, ); core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (displayName != null) 'displayName': displayName!, - if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, - if (endTime != null) 'endTime': endTime!, - if (error != null) 'error': error!, - if (jobSpec != null) 'jobSpec': jobSpec!, - if (labels != null) 'labels': labels!, - if (name != null) 'name': name!, - if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, - if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, - if (startTime != null) 'startTime': startTime!, - if (state != null) 'state': state!, - if (updateTime != null) 'updateTime': updateTime!, - if (webAccessUris != null) 'webAccessUris': webAccessUris!, + if (dataItemPayload != null) 'dataItemPayload': dataItemPayload!, + if (errorAnalysisAnnotations != null) + 'errorAnalysisAnnotations': errorAnalysisAnnotations!, + if (evaluatedDataItemViewId != null) + 'evaluatedDataItemViewId': evaluatedDataItemViewId!, + if (explanations != null) 'explanations': explanations!, + if (groundTruths != null) 'groundTruths': groundTruths!, + if (predictions != null) 'predictions': predictions!, + if (type != null) 'type': type!, }; } -/// Represents the spec of a CustomJob. -class GoogleCloudAiplatformV1CustomJobSpec { - /// The Cloud Storage location to store the output of this CustomJob or - /// HyperparameterTuningJob. - /// - /// For HyperparameterTuningJob, the baseOutputDirectory of each child - /// CustomJob backing a Trial is set to a subdirectory of name id under its - /// parent HyperparameterTuningJob's baseOutputDirectory. The following Vertex - /// AI environment variables will be passed to containers or python modules - /// when this field is set: For CustomJob: * AIP_MODEL_DIR = `/model/` * - /// AIP_CHECKPOINT_DIR = `/checkpoints/` * AIP_TENSORBOARD_LOG_DIR = `/logs/` - /// For CustomJob backing a Trial of HyperparameterTuningJob: * AIP_MODEL_DIR - /// = `//model/` * AIP_CHECKPOINT_DIR = `//checkpoints/` * - /// AIP_TENSORBOARD_LOG_DIR = `//logs/` - GoogleCloudAiplatformV1GcsDestination? baseOutputDirectory; - - /// Whether you want Vertex AI to enable access to the customized dashboard in - /// training chief container. - /// - /// If set to `true`, you can access the dashboard at the URIs given by - /// CustomJob.web_access_uris or Trial.web_access_uris (within - /// HyperparameterTuningJob.trials). - /// - /// Optional. - core.bool? enableDashboardAccess; - - /// Whether you want Vertex AI to enable - /// [interactive shell access](https://cloud.google.com/vertex-ai/docs/training/monitor-debug-interactive-shell) - /// to training containers. - /// - /// If set to `true`, you can access interactive shells at the URIs given by - /// CustomJob.web_access_uris or Trial.web_access_uris (within - /// HyperparameterTuningJob.trials). - /// - /// Optional. - core.bool? enableWebAccess; - - /// The Experiment associated with this job. - /// - /// Format: - /// `projects/{project}/locations/{location}/metadataStores/{metadataStores}/contexts/{experiment-name}` - /// - /// Optional. - core.String? experiment; - - /// The Experiment Run associated with this job. - /// - /// Format: - /// `projects/{project}/locations/{location}/metadataStores/{metadataStores}/contexts/{experiment-name}-{experiment-run-name}` - /// - /// Optional. - core.String? experimentRun; - - /// The name of the Model resources for which to generate a mapping to - /// artifact URIs. - /// - /// Applicable only to some of the Google-provided custom jobs. Format: - /// `projects/{project}/locations/{location}/models/{model}` In order to - /// retrieve a specific version of the model, also provide the version ID or - /// version alias. Example: - /// `projects/{project}/locations/{location}/models/{model}@2` or - /// `projects/{project}/locations/{location}/models/{model}@golden` If no - /// version ID or alias is specified, the "default" version will be returned. - /// The "default" version alias is created for the first version of the model, - /// and can be moved to other versions later on. There will be exactly one - /// default version. - /// - /// Optional. - core.List? models; - - /// The full name of the Compute Engine - /// \[network\](/compute/docs/networks-and-firewalls#networks) to which the - /// Job should be peered. - /// - /// For example, `projects/12345/global/networks/myVPC`. - /// \[Format\](/compute/docs/reference/rest/v1/networks/insert) is of the form - /// `projects/{project}/global/networks/{network}`. Where {project} is a - /// project number, as in `12345`, and {network} is a network name. To specify - /// this field, you must have already - /// [configured VPC Network Peering for Vertex AI](https://cloud.google.com/vertex-ai/docs/general/vpc-peering). - /// If this field is left unspecified, the job is not peered with any network. - /// - /// Optional. - core.String? network; - - /// The ID of the PersistentResource in the same Project and Location which to - /// run If this is specified, the job will be run on existing machines held by - /// the PersistentResource instead of on-demand short-live machines. - /// - /// The network and CMEK configs on the job should be consistent with those on - /// the PersistentResource, otherwise, the job will be rejected. - /// - /// Optional. - core.String? persistentResourceId; - - /// The ID of the location to store protected artifacts. - /// - /// e.g. us-central1. Populate only when the location is different than - /// CustomJob location. List of supported locations: - /// https://cloud.google.com/vertex-ai/docs/general/locations - core.String? protectedArtifactLocationId; - - /// Configuration for PSC-I for CustomJob. - /// - /// Optional. - GoogleCloudAiplatformV1PscInterfaceConfig? pscInterfaceConfig; - - /// A list of names for the reserved ip ranges under the VPC network that can - /// be used for this job. - /// - /// If set, we will deploy the job within the provided ip ranges. Otherwise, - /// the job will be deployed to any ip ranges under the provided VPC network. - /// Example: \['vertex-ai-ip-range'\]. - /// - /// Optional. - core.List? reservedIpRanges; - - /// Scheduling options for a CustomJob. - GoogleCloudAiplatformV1Scheduling? scheduling; - - /// Specifies the service account for workload run-as account. - /// - /// Users submitting jobs must have act-as permission on this run-as account. - /// If unspecified, the - /// [Vertex AI Custom Code Service Agent](https://cloud.google.com/vertex-ai/docs/general/access-control#service-agents) - /// for the CustomJob's project is used. - core.String? serviceAccount; - - /// The name of a Vertex AI Tensorboard resource to which this CustomJob will - /// upload Tensorboard logs. - /// - /// Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}` - /// - /// Optional. - core.String? tensorboard; +/// Explanation result of the prediction produced by the Model. +class GoogleCloudAiplatformV1EvaluatedAnnotationExplanation { + /// Explanation attribution response details. + GoogleCloudAiplatformV1Explanation? explanation; - /// The spec of the worker pools including machine type and Docker image. - /// - /// All worker pools except the first one are optional and can be skipped by - /// providing an empty value. + /// Explanation type. /// - /// Required. - core.List? workerPoolSpecs; + /// For AutoML Image Classification models, possible values are: * + /// `image-integrated-gradients` * `image-xrai` + core.String? explanationType; - GoogleCloudAiplatformV1CustomJobSpec({ - this.baseOutputDirectory, - this.enableDashboardAccess, - this.enableWebAccess, - this.experiment, - this.experimentRun, - this.models, - this.network, - this.persistentResourceId, - this.protectedArtifactLocationId, - this.pscInterfaceConfig, - this.reservedIpRanges, - this.scheduling, - this.serviceAccount, - this.tensorboard, - this.workerPoolSpecs, + GoogleCloudAiplatformV1EvaluatedAnnotationExplanation({ + this.explanation, + this.explanationType, }); - GoogleCloudAiplatformV1CustomJobSpec.fromJson(core.Map json_) + GoogleCloudAiplatformV1EvaluatedAnnotationExplanation.fromJson(core.Map json_) : this( - baseOutputDirectory: json_.containsKey('baseOutputDirectory') - ? GoogleCloudAiplatformV1GcsDestination.fromJson( - json_['baseOutputDirectory'] - as core.Map) - : null, - enableDashboardAccess: json_['enableDashboardAccess'] as core.bool?, - enableWebAccess: json_['enableWebAccess'] as core.bool?, - experiment: json_['experiment'] as core.String?, - experimentRun: json_['experimentRun'] as core.String?, - models: (json_['models'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - network: json_['network'] as core.String?, - persistentResourceId: json_['persistentResourceId'] as core.String?, - protectedArtifactLocationId: - json_['protectedArtifactLocationId'] as core.String?, - pscInterfaceConfig: json_.containsKey('pscInterfaceConfig') - ? GoogleCloudAiplatformV1PscInterfaceConfig.fromJson( - json_['pscInterfaceConfig'] - as core.Map) - : null, - reservedIpRanges: (json_['reservedIpRanges'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - scheduling: json_.containsKey('scheduling') - ? GoogleCloudAiplatformV1Scheduling.fromJson( - json_['scheduling'] as core.Map) + explanation: json_.containsKey('explanation') + ? GoogleCloudAiplatformV1Explanation.fromJson( + json_['explanation'] as core.Map) : null, - serviceAccount: json_['serviceAccount'] as core.String?, - tensorboard: json_['tensorboard'] as core.String?, - workerPoolSpecs: (json_['workerPoolSpecs'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1WorkerPoolSpec.fromJson( - value as core.Map)) - .toList(), + explanationType: json_['explanationType'] as core.String?, ); core.Map toJson() => { - if (baseOutputDirectory != null) - 'baseOutputDirectory': baseOutputDirectory!, - if (enableDashboardAccess != null) - 'enableDashboardAccess': enableDashboardAccess!, - if (enableWebAccess != null) 'enableWebAccess': enableWebAccess!, - if (experiment != null) 'experiment': experiment!, - if (experimentRun != null) 'experimentRun': experimentRun!, - if (models != null) 'models': models!, - if (network != null) 'network': network!, - if (persistentResourceId != null) - 'persistentResourceId': persistentResourceId!, - if (protectedArtifactLocationId != null) - 'protectedArtifactLocationId': protectedArtifactLocationId!, - if (pscInterfaceConfig != null) - 'pscInterfaceConfig': pscInterfaceConfig!, - if (reservedIpRanges != null) 'reservedIpRanges': reservedIpRanges!, - if (scheduling != null) 'scheduling': scheduling!, - if (serviceAccount != null) 'serviceAccount': serviceAccount!, - if (tensorboard != null) 'tensorboard': tensorboard!, - if (workerPoolSpecs != null) 'workerPoolSpecs': workerPoolSpecs!, + if (explanation != null) 'explanation': explanation!, + if (explanationType != null) 'explanationType': explanationType!, }; } -/// A piece of data in a Dataset. -/// -/// Could be an image, a video, a document or plain text. -class GoogleCloudAiplatformV1DataItem { - /// Timestamp when this DataItem was created. +/// An edge describing the relationship between an Artifact and an Execution in +/// a lineage graph. +class GoogleCloudAiplatformV1Event { + /// The relative resource name of the Artifact in the Event. /// - /// Output only. - core.String? createTime; + /// Required. + core.String? artifact; - /// Used to perform consistent read-modify-write updates. + /// Time the Event occurred. /// - /// If not set, a blind "overwrite" update happens. + /// Output only. + core.String? eventTime; + + /// The relative resource name of the Execution in the Event. /// - /// Optional. - core.String? etag; + /// Output only. + core.String? execution; - /// The labels with user-defined metadata to organize your DataItems. + /// The labels with user-defined metadata to annotate Events. /// /// Label keys and values can be no longer than 64 characters (Unicode /// codepoints), can only contain lowercase letters, numeric characters, /// underscores and dashes. International characters are allowed. No more than - /// 64 user labels can be associated with one DataItem(System labels are + /// 64 user labels can be associated with one Event (System labels are /// excluded). See https://goo.gl/xmQnxf for more information and examples of /// labels. System reserved label keys are prefixed with /// "aiplatform.googleapis.com/" and are immutable. - /// - /// Optional. core.Map? labels; - /// The resource name of the DataItem. - /// - /// Output only. - core.String? name; - - /// The data that the DataItem represents (for example, an image or a text - /// snippet). - /// - /// The schema of the payload is stored in the parent Dataset's metadata - /// schema's dataItemSchemaUri field. + /// The type of the Event. /// /// Required. - /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Object? payload; - - /// Reserved for future use. - /// - /// Output only. - core.bool? satisfiesPzi; - - /// Reserved for future use. - /// - /// Output only. - core.bool? satisfiesPzs; - - /// Timestamp when this DataItem was last updated. - /// - /// Output only. - core.String? updateTime; + /// Possible string values are: + /// - "TYPE_UNSPECIFIED" : Unspecified whether input or output of the + /// Execution. + /// - "INPUT" : An input of the Execution. + /// - "OUTPUT" : An output of the Execution. + core.String? type; - GoogleCloudAiplatformV1DataItem({ - this.createTime, - this.etag, + GoogleCloudAiplatformV1Event({ + this.artifact, + this.eventTime, + this.execution, this.labels, - this.name, - this.payload, - this.satisfiesPzi, - this.satisfiesPzs, - this.updateTime, + this.type, }); - GoogleCloudAiplatformV1DataItem.fromJson(core.Map json_) + GoogleCloudAiplatformV1Event.fromJson(core.Map json_) : this( - createTime: json_['createTime'] as core.String?, - etag: json_['etag'] as core.String?, + artifact: json_['artifact'] as core.String?, + eventTime: json_['eventTime'] as core.String?, + execution: json_['execution'] as core.String?, labels: (json_['labels'] as core.Map?)?.map( (key, value) => core.MapEntry( @@ -31587,2134 +37810,1952 @@ class GoogleCloudAiplatformV1DataItem { value as core.String, ), ), - name: json_['name'] as core.String?, - payload: json_['payload'], - satisfiesPzi: json_['satisfiesPzi'] as core.bool?, - satisfiesPzs: json_['satisfiesPzs'] as core.bool?, - updateTime: json_['updateTime'] as core.String?, + type: json_['type'] as core.String?, ); core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (etag != null) 'etag': etag!, + if (artifact != null) 'artifact': artifact!, + if (eventTime != null) 'eventTime': eventTime!, + if (execution != null) 'execution': execution!, if (labels != null) 'labels': labels!, - if (name != null) 'name': name!, - if (payload != null) 'payload': payload!, - if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, - if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, - if (updateTime != null) 'updateTime': updateTime!, + if (type != null) 'type': type!, }; } -/// A container for a single DataItem and Annotations on it. -class GoogleCloudAiplatformV1DataItemView { - /// The Annotations on the DataItem. +/// Input for exact match metric. +class GoogleCloudAiplatformV1ExactMatchInput { + /// Repeated exact match instances. /// - /// If too many Annotations should be returned for the DataItem, this field - /// will be truncated per annotations_limit in request. If it was, then the - /// has_truncated_annotations will be set to true. - core.List? annotations; - - /// The DataItem. - GoogleCloudAiplatformV1DataItem? dataItem; + /// Required. + core.List? instances; - /// True if and only if the Annotations field has been truncated. + /// Spec for exact match metric. /// - /// It happens if more Annotations for this DataItem met the request's - /// annotation_filter than are allowed to be returned by annotations_limit. - /// Note that if Annotations field is not being returned due to field mask, - /// then this field will not be set to true no matter how many Annotations are - /// there. - core.bool? hasTruncatedAnnotations; + /// Required. + GoogleCloudAiplatformV1ExactMatchSpec? metricSpec; - GoogleCloudAiplatformV1DataItemView({ - this.annotations, - this.dataItem, - this.hasTruncatedAnnotations, + GoogleCloudAiplatformV1ExactMatchInput({ + this.instances, + this.metricSpec, }); - GoogleCloudAiplatformV1DataItemView.fromJson(core.Map json_) + GoogleCloudAiplatformV1ExactMatchInput.fromJson(core.Map json_) : this( - annotations: (json_['annotations'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Annotation.fromJson( - value as core.Map)) + instances: (json_['instances'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1ExactMatchInstance.fromJson( + value as core.Map)) .toList(), - dataItem: json_.containsKey('dataItem') - ? GoogleCloudAiplatformV1DataItem.fromJson( - json_['dataItem'] as core.Map) + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1ExactMatchSpec.fromJson( + json_['metricSpec'] as core.Map) : null, - hasTruncatedAnnotations: - json_['hasTruncatedAnnotations'] as core.bool?, ); core.Map toJson() => { - if (annotations != null) 'annotations': annotations!, - if (dataItem != null) 'dataItem': dataItem!, - if (hasTruncatedAnnotations != null) - 'hasTruncatedAnnotations': hasTruncatedAnnotations!, + if (instances != null) 'instances': instances!, + if (metricSpec != null) 'metricSpec': metricSpec!, }; } -/// DataLabelingJob is used to trigger a human labeling job on unlabeled data -/// from the following Dataset: -class GoogleCloudAiplatformV1DataLabelingJob { - /// Parameters that configure the active learning pipeline. - /// - /// Active learning will label the data incrementally via several iterations. - /// For every iteration, it will select a batch of data based on the sampling - /// strategy. - GoogleCloudAiplatformV1ActiveLearningConfig? activeLearningConfig; - - /// Labels to assign to annotations generated by this DataLabelingJob. - /// - /// Label keys and values can be no longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. See - /// https://goo.gl/xmQnxf for more information and examples of labels. System - /// reserved label keys are prefixed with "aiplatform.googleapis.com/" and are - /// immutable. - core.Map? annotationLabels; - - /// Timestamp when this DataLabelingJob was created. - /// - /// Output only. - core.String? createTime; +/// Spec for exact match instance. +typedef GoogleCloudAiplatformV1ExactMatchInstance = $Instance00; - /// Estimated cost(in US dollars) that the DataLabelingJob has incurred to - /// date. +/// Exact match metric value for an instance. +class GoogleCloudAiplatformV1ExactMatchMetricValue { + /// Exact match score. /// /// Output only. - GoogleTypeMoney? currentSpend; + core.double? score; - /// Dataset resource names. - /// - /// Right now we only support labeling from a single Dataset. Format: - /// `projects/{project}/locations/{location}/datasets/{dataset}` - /// - /// Required. - core.List? datasets; + GoogleCloudAiplatformV1ExactMatchMetricValue({ + this.score, + }); - /// The user-defined name of the DataLabelingJob. - /// - /// The name can be up to 128 characters long and can consist of any UTF-8 - /// characters. Display name of a DataLabelingJob. - /// - /// Required. - core.String? displayName; + GoogleCloudAiplatformV1ExactMatchMetricValue.fromJson(core.Map json_) + : this( + score: (json_['score'] as core.num?)?.toDouble(), + ); - /// Customer-managed encryption key spec for a DataLabelingJob. - /// - /// If set, this DataLabelingJob will be secured by this key. Note: - /// Annotations created in the DataLabelingJob are associated with the - /// EncryptionSpec of the Dataset they are exported to. - GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; + core.Map toJson() => { + if (score != null) 'score': score!, + }; +} - /// DataLabelingJob errors. - /// - /// It is only populated when job's state is `JOB_STATE_FAILED` or - /// `JOB_STATE_CANCELLED`. +/// Results for exact match metric. +class GoogleCloudAiplatformV1ExactMatchResults { + /// Exact match metric values. /// /// Output only. - GoogleRpcStatus? error; - - /// Input config parameters for the DataLabelingJob. - /// - /// Required. - /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Object? inputs; + core.List? + exactMatchMetricValues; - /// Points to a YAML file stored on Google Cloud Storage describing the config - /// for a specific type of DataLabelingJob. - /// - /// The schema files that can be used here are found in the - /// https://storage.googleapis.com/google-cloud-aiplatform bucket in the - /// /schema/datalabelingjob/inputs/ folder. - /// - /// Required. - core.String? inputsSchemaUri; + GoogleCloudAiplatformV1ExactMatchResults({ + this.exactMatchMetricValues, + }); - /// The Google Cloud Storage location of the instruction pdf. - /// - /// This pdf is shared with labelers, and provides detailed description on how - /// to label DataItems in Datasets. - /// - /// Required. - core.String? instructionUri; + GoogleCloudAiplatformV1ExactMatchResults.fromJson(core.Map json_) + : this( + exactMatchMetricValues: + (json_['exactMatchMetricValues'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1ExactMatchMetricValue.fromJson( + value as core.Map)) + .toList(), + ); - /// Number of labelers to work on each DataItem. - /// - /// Required. - core.int? labelerCount; + core.Map toJson() => { + if (exactMatchMetricValues != null) + 'exactMatchMetricValues': exactMatchMetricValues!, + }; +} - /// Current labeling job progress percentage scaled in interval \[0, 100\], - /// indicating the percentage of DataItems that has been finished. - /// - /// Output only. - core.int? labelingProgress; +/// Spec for exact match metric - returns 1 if prediction and reference exactly +/// matches, otherwise 0. +typedef GoogleCloudAiplatformV1ExactMatchSpec = $Empty; - /// The labels with user-defined metadata to organize your DataLabelingJobs. - /// - /// Label keys and values can be no longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. See - /// https://goo.gl/xmQnxf for more information and examples of labels. System - /// reserved label keys are prefixed with "aiplatform.googleapis.com/" and are - /// immutable. Following system labels exist for each DataLabelingJob: * - /// "aiplatform.googleapis.com/schema": output only, its value is the - /// inputs_schema's title. - core.Map? labels; +/// Example-based explainability that returns the nearest neighbors from the +/// provided dataset. +class GoogleCloudAiplatformV1Examples { + /// The Cloud Storage input instances. + GoogleCloudAiplatformV1ExamplesExampleGcsSource? exampleGcsSource; - /// Resource name of the DataLabelingJob. + /// The full configuration for the generated index, the semantics are the same + /// as metadata and should match + /// [NearestNeighborSearchConfig](https://cloud.google.com/vertex-ai/docs/explainable-ai/configuring-explanations-example-based#nearest-neighbor-search-config). /// - /// Output only. - core.String? name; - - /// The SpecialistPools' resource names associated with this job. - core.List? specialistPools; + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? nearestNeighborSearchConfig; - /// The detailed state of the job. - /// - /// Output only. - /// Possible string values are: - /// - "JOB_STATE_UNSPECIFIED" : The job state is unspecified. - /// - "JOB_STATE_QUEUED" : The job has been just created or resumed and - /// processing has not yet begun. - /// - "JOB_STATE_PENDING" : The service is preparing to run the job. - /// - "JOB_STATE_RUNNING" : The job is in progress. - /// - "JOB_STATE_SUCCEEDED" : The job completed successfully. - /// - "JOB_STATE_FAILED" : The job failed. - /// - "JOB_STATE_CANCELLING" : The job is being cancelled. From this state the - /// job may only go to either `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED` or - /// `JOB_STATE_CANCELLED`. - /// - "JOB_STATE_CANCELLED" : The job has been cancelled. - /// - "JOB_STATE_PAUSED" : The job has been stopped, and can be resumed. - /// - "JOB_STATE_EXPIRED" : The job has expired. - /// - "JOB_STATE_UPDATING" : The job is being updated. Only jobs in the - /// `RUNNING` state can be updated. After updating, the job goes back to the - /// `RUNNING` state. - /// - "JOB_STATE_PARTIALLY_SUCCEEDED" : The job is partially succeeded, some - /// results may be missing due to errors. - core.String? state; + /// The number of neighbors to return when querying for examples. + core.int? neighborCount; - /// Timestamp when this DataLabelingJob was updated most recently. - /// - /// Output only. - core.String? updateTime; + /// Simplified preset configuration, which automatically sets configuration + /// values based on the desired query speed-precision trade-off and modality. + GoogleCloudAiplatformV1Presets? presets; - GoogleCloudAiplatformV1DataLabelingJob({ - this.activeLearningConfig, - this.annotationLabels, - this.createTime, - this.currentSpend, - this.datasets, - this.displayName, - this.encryptionSpec, - this.error, - this.inputs, - this.inputsSchemaUri, - this.instructionUri, - this.labelerCount, - this.labelingProgress, - this.labels, - this.name, - this.specialistPools, - this.state, - this.updateTime, + GoogleCloudAiplatformV1Examples({ + this.exampleGcsSource, + this.nearestNeighborSearchConfig, + this.neighborCount, + this.presets, }); - GoogleCloudAiplatformV1DataLabelingJob.fromJson(core.Map json_) + GoogleCloudAiplatformV1Examples.fromJson(core.Map json_) : this( - activeLearningConfig: json_.containsKey('activeLearningConfig') - ? GoogleCloudAiplatformV1ActiveLearningConfig.fromJson( - json_['activeLearningConfig'] - as core.Map) - : null, - annotationLabels: (json_['annotationLabels'] - as core.Map?) - ?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - createTime: json_['createTime'] as core.String?, - currentSpend: json_.containsKey('currentSpend') - ? GoogleTypeMoney.fromJson( - json_['currentSpend'] as core.Map) - : null, - datasets: (json_['datasets'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - displayName: json_['displayName'] as core.String?, - encryptionSpec: json_.containsKey('encryptionSpec') - ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( - json_['encryptionSpec'] + exampleGcsSource: json_.containsKey('exampleGcsSource') + ? GoogleCloudAiplatformV1ExamplesExampleGcsSource.fromJson( + json_['exampleGcsSource'] as core.Map) : null, - error: json_.containsKey('error') - ? GoogleRpcStatus.fromJson( - json_['error'] as core.Map) + nearestNeighborSearchConfig: json_['nearestNeighborSearchConfig'], + neighborCount: json_['neighborCount'] as core.int?, + presets: json_.containsKey('presets') + ? GoogleCloudAiplatformV1Presets.fromJson( + json_['presets'] as core.Map) : null, - inputs: json_['inputs'], - inputsSchemaUri: json_['inputsSchemaUri'] as core.String?, - instructionUri: json_['instructionUri'] as core.String?, - labelerCount: json_['labelerCount'] as core.int?, - labelingProgress: json_['labelingProgress'] as core.int?, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - name: json_['name'] as core.String?, - specialistPools: (json_['specialistPools'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - state: json_['state'] as core.String?, - updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (activeLearningConfig != null) - 'activeLearningConfig': activeLearningConfig!, - if (annotationLabels != null) 'annotationLabels': annotationLabels!, - if (createTime != null) 'createTime': createTime!, - if (currentSpend != null) 'currentSpend': currentSpend!, - if (datasets != null) 'datasets': datasets!, - if (displayName != null) 'displayName': displayName!, - if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, - if (error != null) 'error': error!, - if (inputs != null) 'inputs': inputs!, - if (inputsSchemaUri != null) 'inputsSchemaUri': inputsSchemaUri!, - if (instructionUri != null) 'instructionUri': instructionUri!, - if (labelerCount != null) 'labelerCount': labelerCount!, - if (labelingProgress != null) 'labelingProgress': labelingProgress!, - if (labels != null) 'labels': labels!, - if (name != null) 'name': name!, - if (specialistPools != null) 'specialistPools': specialistPools!, - if (state != null) 'state': state!, - if (updateTime != null) 'updateTime': updateTime!, + if (exampleGcsSource != null) 'exampleGcsSource': exampleGcsSource!, + if (nearestNeighborSearchConfig != null) + 'nearestNeighborSearchConfig': nearestNeighborSearchConfig!, + if (neighborCount != null) 'neighborCount': neighborCount!, + if (presets != null) 'presets': presets!, }; } -/// A collection of DataItems and Annotations on them. -class GoogleCloudAiplatformV1Dataset { - /// Timestamp when this Dataset was created. +/// The Cloud Storage input instances. +class GoogleCloudAiplatformV1ExamplesExampleGcsSource { + /// The format in which instances are given, if not specified, assume it's + /// JSONL format. /// - /// Output only. - core.String? createTime; + /// Currently only JSONL format is supported. + /// Possible string values are: + /// - "DATA_FORMAT_UNSPECIFIED" : Format unspecified, used when unset. + /// - "JSONL" : Examples are stored in JSONL files. + core.String? dataFormat; - /// The number of DataItems in this Dataset. - /// - /// Only apply for non-structured Dataset. - /// - /// Output only. - core.String? dataItemCount; + /// The Cloud Storage location for the input instances. + GoogleCloudAiplatformV1GcsSource? gcsSource; - /// The description of the Dataset. - core.String? description; + GoogleCloudAiplatformV1ExamplesExampleGcsSource({ + this.dataFormat, + this.gcsSource, + }); - /// The user-defined name of the Dataset. - /// - /// The name can be up to 128 characters long and can consist of any UTF-8 - /// characters. - /// - /// Required. - core.String? displayName; + GoogleCloudAiplatformV1ExamplesExampleGcsSource.fromJson(core.Map json_) + : this( + dataFormat: json_['dataFormat'] as core.String?, + gcsSource: json_.containsKey('gcsSource') + ? GoogleCloudAiplatformV1GcsSource.fromJson( + json_['gcsSource'] as core.Map) + : null, + ); - /// Customer-managed encryption key spec for a Dataset. - /// - /// If set, this Dataset and all sub-resources of this Dataset will be secured - /// by this key. - GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; + core.Map toJson() => { + if (dataFormat != null) 'dataFormat': dataFormat!, + if (gcsSource != null) 'gcsSource': gcsSource!, + }; +} - /// Used to perform consistent read-modify-write updates. - /// - /// If not set, a blind "overwrite" update happens. - core.String? etag; +/// Overrides for example-based explanations. +class GoogleCloudAiplatformV1ExamplesOverride { + /// The number of neighbors to return that have the same crowding tag. + core.int? crowdingCount; - /// The labels with user-defined metadata to organize your Datasets. - /// - /// Label keys and values can be no longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. No more than - /// 64 user labels can be associated with one Dataset (System labels are - /// excluded). See https://goo.gl/xmQnxf for more information and examples of - /// labels. System reserved label keys are prefixed with - /// "aiplatform.googleapis.com/" and are immutable. Following system labels - /// exist for each Dataset: * - /// "aiplatform.googleapis.com/dataset_metadata_schema": output only, its - /// value is the metadata_schema's title. - core.Map? labels; + /// The format of the data being provided with each call. + /// Possible string values are: + /// - "DATA_FORMAT_UNSPECIFIED" : Unspecified format. Must not be used. + /// - "INSTANCES" : Provided data is a set of model inputs. + /// - "EMBEDDINGS" : Provided data is a set of embeddings. + core.String? dataFormat; - /// Additional information about the Dataset. - /// - /// Required. - /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Object? metadata; + /// The number of neighbors to return. + core.int? neighborCount; - /// The resource name of the Artifact that was created in MetadataStore when - /// creating the Dataset. - /// - /// The Artifact resource name pattern is - /// `projects/{project}/locations/{location}/metadataStores/{metadata_store}/artifacts/{artifact}`. - /// - /// Output only. - core.String? metadataArtifact; + /// Restrict the resulting nearest neighbors to respect these constraints. + core.List? restrictions; - /// Points to a YAML file stored on Google Cloud Storage describing additional - /// information about the Dataset. - /// - /// The schema is defined as an OpenAPI 3.0.2 Schema Object. The schema files - /// that can be used here are found in - /// gs://google-cloud-aiplatform/schema/dataset/metadata/. - /// - /// Required. - core.String? metadataSchemaUri; + /// If true, return the embeddings instead of neighbors. + core.bool? returnEmbeddings; - /// Reference to the public base model last used by the dataset. - /// - /// Only set for prompt datasets. - /// - /// Optional. - core.String? modelReference; + GoogleCloudAiplatformV1ExamplesOverride({ + this.crowdingCount, + this.dataFormat, + this.neighborCount, + this.restrictions, + this.returnEmbeddings, + }); - /// Identifier. - /// - /// The resource name of the Dataset. - /// - /// Output only. - core.String? name; + GoogleCloudAiplatformV1ExamplesOverride.fromJson(core.Map json_) + : this( + crowdingCount: json_['crowdingCount'] as core.int?, + dataFormat: json_['dataFormat'] as core.String?, + neighborCount: json_['neighborCount'] as core.int?, + restrictions: (json_['restrictions'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1ExamplesRestrictionsNamespace.fromJson( + value as core.Map)) + .toList(), + returnEmbeddings: json_['returnEmbeddings'] as core.bool?, + ); - /// Reserved for future use. - /// - /// Output only. - core.bool? satisfiesPzi; + core.Map toJson() => { + if (crowdingCount != null) 'crowdingCount': crowdingCount!, + if (dataFormat != null) 'dataFormat': dataFormat!, + if (neighborCount != null) 'neighborCount': neighborCount!, + if (restrictions != null) 'restrictions': restrictions!, + if (returnEmbeddings != null) 'returnEmbeddings': returnEmbeddings!, + }; +} - /// Reserved for future use. - /// - /// Output only. - core.bool? satisfiesPzs; +/// Restrictions namespace for example-based explanations overrides. +class GoogleCloudAiplatformV1ExamplesRestrictionsNamespace { + /// The list of allowed tags. + core.List? allow; - /// All SavedQueries belong to the Dataset will be returned in List/Get - /// Dataset response. - /// - /// The annotation_specs field will not be populated except for UI cases which - /// will only use annotation_spec_count. In CreateDataset request, a - /// SavedQuery is created together if this field is set, up to one SavedQuery - /// can be set in CreateDatasetRequest. The SavedQuery should not contain any - /// AnnotationSpec. - core.List? savedQueries; + /// The list of deny tags. + core.List? deny; - /// Timestamp when this Dataset was last updated. - /// - /// Output only. - core.String? updateTime; + /// The namespace name. + core.String? namespaceName; - GoogleCloudAiplatformV1Dataset({ - this.createTime, - this.dataItemCount, - this.description, - this.displayName, - this.encryptionSpec, - this.etag, - this.labels, - this.metadata, - this.metadataArtifact, - this.metadataSchemaUri, - this.modelReference, - this.name, - this.satisfiesPzi, - this.satisfiesPzs, - this.savedQueries, - this.updateTime, + GoogleCloudAiplatformV1ExamplesRestrictionsNamespace({ + this.allow, + this.deny, + this.namespaceName, }); - GoogleCloudAiplatformV1Dataset.fromJson(core.Map json_) + GoogleCloudAiplatformV1ExamplesRestrictionsNamespace.fromJson(core.Map json_) : this( - createTime: json_['createTime'] as core.String?, - dataItemCount: json_['dataItemCount'] as core.String?, - description: json_['description'] as core.String?, - displayName: json_['displayName'] as core.String?, - encryptionSpec: json_.containsKey('encryptionSpec') - ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( - json_['encryptionSpec'] - as core.Map) - : null, - etag: json_['etag'] as core.String?, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - metadata: json_['metadata'], - metadataArtifact: json_['metadataArtifact'] as core.String?, - metadataSchemaUri: json_['metadataSchemaUri'] as core.String?, - modelReference: json_['modelReference'] as core.String?, - name: json_['name'] as core.String?, - satisfiesPzi: json_['satisfiesPzi'] as core.bool?, - satisfiesPzs: json_['satisfiesPzs'] as core.bool?, - savedQueries: (json_['savedQueries'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1SavedQuery.fromJson( - value as core.Map)) + allow: (json_['allow'] as core.List?) + ?.map((value) => value as core.String) .toList(), - updateTime: json_['updateTime'] as core.String?, + deny: (json_['deny'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + namespaceName: json_['namespaceName'] as core.String?, ); core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (dataItemCount != null) 'dataItemCount': dataItemCount!, - if (description != null) 'description': description!, - if (displayName != null) 'displayName': displayName!, - if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, - if (etag != null) 'etag': etag!, - if (labels != null) 'labels': labels!, - if (metadata != null) 'metadata': metadata!, - if (metadataArtifact != null) 'metadataArtifact': metadataArtifact!, - if (metadataSchemaUri != null) 'metadataSchemaUri': metadataSchemaUri!, - if (modelReference != null) 'modelReference': modelReference!, - if (name != null) 'name': name!, - if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, - if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, - if (savedQueries != null) 'savedQueries': savedQueries!, - if (updateTime != null) 'updateTime': updateTime!, + if (allow != null) 'allow': allow!, + if (deny != null) 'deny': deny!, + if (namespaceName != null) 'namespaceName': namespaceName!, }; } -/// Describes the dataset version. -class GoogleCloudAiplatformV1DatasetVersion { - /// Name of the associated BigQuery dataset. - /// - /// Output only. - core.String? bigQueryDatasetName; - - /// Timestamp when this DatasetVersion was created. +/// Instance of a general execution. +class GoogleCloudAiplatformV1Execution { + /// Timestamp when this Execution was created. /// /// Output only. core.String? createTime; - /// The user-defined name of the DatasetVersion. + /// Description of the Execution + core.String? description; + + /// User provided display name of the Execution. /// - /// The name can be up to 128 characters long and can consist of any UTF-8 - /// characters. + /// May be up to 128 Unicode characters. core.String? displayName; - /// Used to perform consistent read-modify-write updates. + /// An eTag used to perform consistent read-modify-write updates. /// /// If not set, a blind "overwrite" update happens. core.String? etag; - /// Additional information about the DatasetVersion. + /// The labels with user-defined metadata to organize your Executions. /// - /// Required. Output only. + /// Label keys and values can be no longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. No more than + /// 64 user labels can be associated with one Execution (System labels are + /// excluded). + core.Map? labels; + + /// Properties of the Execution. + /// + /// Top level metadata keys' heading and trailing spaces will be trimmed. The + /// size of this field should not exceed 200KB. /// /// The values for Object must be JSON objects. It can consist of `num`, /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Object? metadata; + core.Map? metadata; - /// Reference to the public base model last used by the dataset version. - /// - /// Only set for prompt dataset versions. + /// The resource name of the Execution. /// /// Output only. - core.String? modelReference; + core.String? name; - /// Identifier. - /// - /// The resource name of the DatasetVersion. + /// The title of the schema describing the metadata. /// - /// Output only. - core.String? name; + /// Schema title and version is expected to be registered in earlier Create + /// Schema calls. And both are used together as unique identifiers to identify + /// schemas within the local metadata store. + core.String? schemaTitle; - /// Reserved for future use. + /// The version of the schema in `schema_title` to use. /// - /// Output only. - core.bool? satisfiesPzi; + /// Schema title and version is expected to be registered in earlier Create + /// Schema calls. And both are used together as unique identifiers to identify + /// schemas within the local metadata store. + core.String? schemaVersion; - /// Reserved for future use. + /// The state of this Execution. /// - /// Output only. - core.bool? satisfiesPzs; + /// This is a property of the Execution, and does not imply or capture any + /// ongoing process. This property is managed by clients (such as Vertex AI + /// Pipelines) and the system does not prescribe or check the validity of + /// state transitions. + /// Possible string values are: + /// - "STATE_UNSPECIFIED" : Unspecified Execution state + /// - "NEW" : The Execution is new + /// - "RUNNING" : The Execution is running + /// - "COMPLETE" : The Execution has finished running + /// - "FAILED" : The Execution has failed + /// - "CACHED" : The Execution completed through Cache hit. + /// - "CANCELLED" : The Execution was cancelled. + core.String? state; - /// Timestamp when this DatasetVersion was last updated. + /// Timestamp when this Execution was last updated. /// /// Output only. core.String? updateTime; - GoogleCloudAiplatformV1DatasetVersion({ - this.bigQueryDatasetName, + GoogleCloudAiplatformV1Execution({ this.createTime, + this.description, this.displayName, this.etag, + this.labels, this.metadata, - this.modelReference, this.name, - this.satisfiesPzi, - this.satisfiesPzs, + this.schemaTitle, + this.schemaVersion, + this.state, this.updateTime, }); - GoogleCloudAiplatformV1DatasetVersion.fromJson(core.Map json_) - : this( - bigQueryDatasetName: json_['bigQueryDatasetName'] as core.String?, - createTime: json_['createTime'] as core.String?, - displayName: json_['displayName'] as core.String?, - etag: json_['etag'] as core.String?, - metadata: json_['metadata'], - modelReference: json_['modelReference'] as core.String?, - name: json_['name'] as core.String?, - satisfiesPzi: json_['satisfiesPzi'] as core.bool?, - satisfiesPzs: json_['satisfiesPzs'] as core.bool?, - updateTime: json_['updateTime'] as core.String?, - ); - - core.Map toJson() => { - if (bigQueryDatasetName != null) - 'bigQueryDatasetName': bigQueryDatasetName!, - if (createTime != null) 'createTime': createTime!, - if (displayName != null) 'displayName': displayName!, - if (etag != null) 'etag': etag!, - if (metadata != null) 'metadata': metadata!, - if (modelReference != null) 'modelReference': modelReference!, - if (name != null) 'name': name!, - if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, - if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, - if (updateTime != null) 'updateTime': updateTime!, - }; -} - -/// A description of resources that are dedicated to a DeployedModel, and that -/// need a higher degree of manual configuration. -class GoogleCloudAiplatformV1DedicatedResources { - /// The metric specifications that overrides a resource utilization metric - /// (CPU utilization, accelerator's duty cycle, and so on) target value - /// (default to 60 if not set). - /// - /// At most one entry is allowed per metric. If machine_spec.accelerator_count - /// is above 0, the autoscaling will be based on both CPU utilization and - /// accelerator's duty cycle metrics and scale up when either metrics exceeds - /// its target value while scale down if both metrics are under their target - /// value. The default target value is 60 for both metrics. If - /// machine_spec.accelerator_count is 0, the autoscaling will be based on CPU - /// utilization metric only with default target value 60 if not explicitly - /// set. For example, in the case of Online Prediction, if you want to - /// override target CPU utilization to 80, you should set - /// autoscaling_metric_specs.metric_name to - /// `aiplatform.googleapis.com/prediction/online/cpu/utilization` and - /// autoscaling_metric_specs.target to `80`. - /// - /// Immutable. - core.List? - autoscalingMetricSpecs; - - /// The specification of a single machine used by the prediction. - /// - /// Required. Immutable. - GoogleCloudAiplatformV1MachineSpec? machineSpec; - - /// The maximum number of replicas this DeployedModel may be deployed on when - /// the traffic against it increases. - /// - /// If the requested value is too large, the deployment will error, but if - /// deployment succeeds then the ability to scale the model to that many - /// replicas is guaranteed (barring service outages). If traffic against the - /// DeployedModel increases beyond what its replicas at maximum may handle, a - /// portion of the traffic will be dropped. If this value is not provided, - /// will use min_replica_count as the default value. The value of this field - /// impacts the charge against Vertex CPU and GPU quotas. Specifically, you - /// will be charged for (max_replica_count * number of cores in the selected - /// machine type) and (max_replica_count * number of GPUs per replica in the - /// selected machine type). - /// - /// Immutable. - core.int? maxReplicaCount; - - /// The minimum number of machine replicas this DeployedModel will be always - /// deployed on. - /// - /// This value must be greater than or equal to 1. If traffic against the - /// DeployedModel increases, it may dynamically be deployed onto more - /// replicas, and as traffic decreases, some of these extra replicas may be - /// freed. - /// - /// Required. Immutable. - core.int? minReplicaCount; - - /// If true, schedule the deployment workload on - /// [spot VMs](https://cloud.google.com/kubernetes-engine/docs/concepts/spot-vms). - /// - /// Optional. - core.bool? spot; - - GoogleCloudAiplatformV1DedicatedResources({ - this.autoscalingMetricSpecs, - this.machineSpec, - this.maxReplicaCount, - this.minReplicaCount, - this.spot, - }); - - GoogleCloudAiplatformV1DedicatedResources.fromJson(core.Map json_) - : this( - autoscalingMetricSpecs: - (json_['autoscalingMetricSpecs'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1AutoscalingMetricSpec.fromJson( - value as core.Map)) - .toList(), - machineSpec: json_.containsKey('machineSpec') - ? GoogleCloudAiplatformV1MachineSpec.fromJson( - json_['machineSpec'] as core.Map) - : null, - maxReplicaCount: json_['maxReplicaCount'] as core.int?, - minReplicaCount: json_['minReplicaCount'] as core.int?, - spot: json_['spot'] as core.bool?, - ); - - core.Map toJson() => { - if (autoscalingMetricSpecs != null) - 'autoscalingMetricSpecs': autoscalingMetricSpecs!, - if (machineSpec != null) 'machineSpec': machineSpec!, - if (maxReplicaCount != null) 'maxReplicaCount': maxReplicaCount!, - if (minReplicaCount != null) 'minReplicaCount': minReplicaCount!, - if (spot != null) 'spot': spot!, - }; -} - -/// Request message for FeaturestoreService.DeleteFeatureValues. -class GoogleCloudAiplatformV1DeleteFeatureValuesRequest { - /// Select feature values to be deleted by specifying entities. - GoogleCloudAiplatformV1DeleteFeatureValuesRequestSelectEntity? selectEntity; - - /// Select feature values to be deleted by specifying time range and features. - GoogleCloudAiplatformV1DeleteFeatureValuesRequestSelectTimeRangeAndFeature? - selectTimeRangeAndFeature; - - GoogleCloudAiplatformV1DeleteFeatureValuesRequest({ - this.selectEntity, - this.selectTimeRangeAndFeature, - }); - - GoogleCloudAiplatformV1DeleteFeatureValuesRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1Execution.fromJson(core.Map json_) : this( - selectEntity: json_.containsKey('selectEntity') - ? GoogleCloudAiplatformV1DeleteFeatureValuesRequestSelectEntity - .fromJson(json_['selectEntity'] - as core.Map) - : null, - selectTimeRangeAndFeature: json_ - .containsKey('selectTimeRangeAndFeature') - ? GoogleCloudAiplatformV1DeleteFeatureValuesRequestSelectTimeRangeAndFeature - .fromJson(json_['selectTimeRangeAndFeature'] - as core.Map) + createTime: json_['createTime'] as core.String?, + description: json_['description'] as core.String?, + displayName: json_['displayName'] as core.String?, + etag: json_['etag'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + metadata: json_.containsKey('metadata') + ? json_['metadata'] as core.Map : null, + name: json_['name'] as core.String?, + schemaTitle: json_['schemaTitle'] as core.String?, + schemaVersion: json_['schemaVersion'] as core.String?, + state: json_['state'] as core.String?, + updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (selectEntity != null) 'selectEntity': selectEntity!, - if (selectTimeRangeAndFeature != null) - 'selectTimeRangeAndFeature': selectTimeRangeAndFeature!, + if (createTime != null) 'createTime': createTime!, + if (description != null) 'description': description!, + if (displayName != null) 'displayName': displayName!, + if (etag != null) 'etag': etag!, + if (labels != null) 'labels': labels!, + if (metadata != null) 'metadata': metadata!, + if (name != null) 'name': name!, + if (schemaTitle != null) 'schemaTitle': schemaTitle!, + if (schemaVersion != null) 'schemaVersion': schemaVersion!, + if (state != null) 'state': state!, + if (updateTime != null) 'updateTime': updateTime!, }; } -/// Message to select entity. -/// -/// If an entity id is selected, all the feature values corresponding to the -/// entity id will be deleted, including the entityId. -class GoogleCloudAiplatformV1DeleteFeatureValuesRequestSelectEntity { - /// Selectors choosing feature values of which entity id to be deleted from - /// the EntityType. +/// Request message for PredictionService.Explain. +class GoogleCloudAiplatformV1ExplainRequest { + /// If specified, this ExplainRequest will be served by the chosen + /// DeployedModel, overriding Endpoint.traffic_split. + core.String? deployedModelId; + + /// If specified, overrides the explanation_spec of the DeployedModel. + /// + /// Can be used for explaining prediction results with different + /// configurations, such as: - Explaining top-5 predictions results as opposed + /// to top-1; - Increasing path count or step count of the attribution methods + /// to reduce approximate errors; - Using different baselines for explaining + /// the prediction results. + GoogleCloudAiplatformV1ExplanationSpecOverride? explanationSpecOverride; + + /// The instances that are the input to the explanation call. + /// + /// A DeployedModel may have an upper limit on the number of instances it + /// supports per request, and when it is exceeded the explanation call errors + /// in case of AutoML Models, or, in case of customer created Models, the + /// behaviour is as documented by that Model. The schema of any single + /// instance may be specified via Endpoint's DeployedModels' Model's + /// PredictSchemata's instance_schema_uri. /// /// Required. - GoogleCloudAiplatformV1EntityIdSelector? entityIdSelector; + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.List? instances; - GoogleCloudAiplatformV1DeleteFeatureValuesRequestSelectEntity({ - this.entityIdSelector, + /// The parameters that govern the prediction. + /// + /// The schema of the parameters may be specified via Endpoint's + /// DeployedModels' Model's PredictSchemata's parameters_schema_uri. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? parameters; + + GoogleCloudAiplatformV1ExplainRequest({ + this.deployedModelId, + this.explanationSpecOverride, + this.instances, + this.parameters, }); - GoogleCloudAiplatformV1DeleteFeatureValuesRequestSelectEntity.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ExplainRequest.fromJson(core.Map json_) : this( - entityIdSelector: json_.containsKey('entityIdSelector') - ? GoogleCloudAiplatformV1EntityIdSelector.fromJson( - json_['entityIdSelector'] + deployedModelId: json_['deployedModelId'] as core.String?, + explanationSpecOverride: json_.containsKey('explanationSpecOverride') + ? GoogleCloudAiplatformV1ExplanationSpecOverride.fromJson( + json_['explanationSpecOverride'] as core.Map) : null, + instances: json_.containsKey('instances') + ? json_['instances'] as core.List + : null, + parameters: json_['parameters'], ); core.Map toJson() => { - if (entityIdSelector != null) 'entityIdSelector': entityIdSelector!, + if (deployedModelId != null) 'deployedModelId': deployedModelId!, + if (explanationSpecOverride != null) + 'explanationSpecOverride': explanationSpecOverride!, + if (instances != null) 'instances': instances!, + if (parameters != null) 'parameters': parameters!, }; } -/// Message to select time range and feature. -/// -/// Values of the selected feature generated within an inclusive time range will -/// be deleted. Using this option permanently deletes the feature values from -/// the specified feature IDs within the specified time range. This might -/// include data from the online storage. If you want to retain any deleted -/// historical data in the online storage, you must re-ingest it. -class GoogleCloudAiplatformV1DeleteFeatureValuesRequestSelectTimeRangeAndFeature { - /// Selectors choosing which feature values to be deleted from the EntityType. - /// - /// Required. - GoogleCloudAiplatformV1FeatureSelector? featureSelector; +/// Response message for PredictionService.Explain. +class GoogleCloudAiplatformV1ExplainResponse { + /// ID of the Endpoint's DeployedModel that served this explanation. + core.String? deployedModelId; - /// If set, data will not be deleted from online storage. + /// The explanations of the Model's PredictResponse.predictions. /// - /// When time range is older than the data in online storage, setting this to - /// be true will make the deletion have no impact on online serving. - core.bool? skipOnlineStorageDelete; + /// It has the same number of elements as instances to be explained. + core.List? explanations; - /// Select feature generated within a half-inclusive time range. + /// The predictions that are the output of the predictions call. /// - /// The time range is lower inclusive and upper exclusive. + /// Same as PredictResponse.predictions. /// - /// Required. - GoogleTypeInterval? timeRange; + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.List? predictions; - GoogleCloudAiplatformV1DeleteFeatureValuesRequestSelectTimeRangeAndFeature({ - this.featureSelector, - this.skipOnlineStorageDelete, - this.timeRange, + GoogleCloudAiplatformV1ExplainResponse({ + this.deployedModelId, + this.explanations, + this.predictions, }); - GoogleCloudAiplatformV1DeleteFeatureValuesRequestSelectTimeRangeAndFeature.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ExplainResponse.fromJson(core.Map json_) : this( - featureSelector: json_.containsKey('featureSelector') - ? GoogleCloudAiplatformV1FeatureSelector.fromJson( - json_['featureSelector'] - as core.Map) - : null, - skipOnlineStorageDelete: - json_['skipOnlineStorageDelete'] as core.bool?, - timeRange: json_.containsKey('timeRange') - ? GoogleTypeInterval.fromJson( - json_['timeRange'] as core.Map) + deployedModelId: json_['deployedModelId'] as core.String?, + explanations: (json_['explanations'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Explanation.fromJson( + value as core.Map)) + .toList(), + predictions: json_.containsKey('predictions') + ? json_['predictions'] as core.List : null, ); core.Map toJson() => { - if (featureSelector != null) 'featureSelector': featureSelector!, - if (skipOnlineStorageDelete != null) - 'skipOnlineStorageDelete': skipOnlineStorageDelete!, - if (timeRange != null) 'timeRange': timeRange!, + if (deployedModelId != null) 'deployedModelId': deployedModelId!, + if (explanations != null) 'explanations': explanations!, + if (predictions != null) 'predictions': predictions!, }; } -/// Request message for IndexEndpointService.DeployIndex. -class GoogleCloudAiplatformV1DeployIndexRequest { - /// The DeployedIndex to be created within the IndexEndpoint. +/// Explanation of a prediction (provided in PredictResponse.predictions) +/// produced by the Model on a given instance. +class GoogleCloudAiplatformV1Explanation { + /// Feature attributions grouped by predicted outputs. /// - /// Required. - GoogleCloudAiplatformV1DeployedIndex? deployedIndex; + /// For Models that predict only one output, such as regression Models that + /// predict only one score, there is only one attibution that explains the + /// predicted output. For Models that predict multiple outputs, such as + /// multiclass Models that predict multiple classes, each element explains one + /// specific item. Attribution.output_index can be used to identify which + /// output this attribution is explaining. By default, we provide Shapley + /// values for the predicted class. However, you can configure the explanation + /// request to generate Shapley values for any other classes too. For example, + /// if a model predicts a probability of `0.4` for approving a loan + /// application, the model's decision is to reject the application since + /// `p(reject) = 0.6 > p(approve) = 0.4`, and the default Shapley values would + /// be computed for rejection decision and not approval, even though the + /// latter might be the positive class. If users set + /// ExplanationParameters.top_k, the attributions are sorted by + /// instance_output_value in descending order. If + /// ExplanationParameters.output_indices is specified, the attributions are + /// stored by Attribution.output_index in the same order as they appear in the + /// output_indices. + /// + /// Output only. + core.List? attributions; - GoogleCloudAiplatformV1DeployIndexRequest({ - this.deployedIndex, + /// List of the nearest neighbors for example-based explanations. + /// + /// For models deployed with the examples explanations feature enabled, the + /// attributions field is empty and instead the neighbors field is populated. + /// + /// Output only. + core.List? neighbors; + + GoogleCloudAiplatformV1Explanation({ + this.attributions, + this.neighbors, }); - GoogleCloudAiplatformV1DeployIndexRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1Explanation.fromJson(core.Map json_) : this( - deployedIndex: json_.containsKey('deployedIndex') - ? GoogleCloudAiplatformV1DeployedIndex.fromJson( - json_['deployedIndex'] as core.Map) - : null, + attributions: (json_['attributions'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Attribution.fromJson( + value as core.Map)) + .toList(), + neighbors: (json_['neighbors'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Neighbor.fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (deployedIndex != null) 'deployedIndex': deployedIndex!, + if (attributions != null) 'attributions': attributions!, + if (neighbors != null) 'neighbors': neighbors!, }; } -/// Request message for EndpointService.DeployModel. -class GoogleCloudAiplatformV1DeployModelRequest { - /// The DeployedModel to be created within the Endpoint. +/// Metadata describing the Model's input and output for explanation. +class GoogleCloudAiplatformV1ExplanationMetadata { + /// Points to a YAML file stored on Google Cloud Storage describing the format + /// of the feature attributions. /// - /// Note that Endpoint.traffic_split must be updated for the DeployedModel to - /// start receiving traffic, either as part of this call, or via - /// EndpointService.UpdateEndpoint. + /// The schema is defined as an OpenAPI 3.0.2 + /// [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). + /// AutoML tabular Models always have this field populated by Vertex AI. Note: + /// The URI given on output may be different, including the URI scheme, than + /// the one given on input. The output URI will point to a location where the + /// user only has a read access. + core.String? featureAttributionsSchemaUri; + + /// Map from feature names to feature input metadata. + /// + /// Keys are the name of the features. Values are the specification of the + /// feature. An empty InputMetadata is valid. It describes a text feature + /// which has the name specified as the key in ExplanationMetadata.inputs. The + /// baseline of the empty feature is chosen by Vertex AI. For Vertex + /// AI-provided Tensorflow images, the key can be any friendly name of the + /// feature. Once specified, featureAttributions are keyed by this key (if not + /// grouped with another feature). For custom images, the key must match with + /// the key in instance. /// /// Required. - GoogleCloudAiplatformV1DeployedModel? deployedModel; + core.Map? inputs; - /// A map from a DeployedModel's ID to the percentage of this Endpoint's - /// traffic that should be forwarded to that DeployedModel. + /// Name of the source to generate embeddings for example based explanations. + core.String? latentSpaceSource; + + /// Map from output names to output metadata. /// - /// If this field is non-empty, then the Endpoint's traffic_split will be - /// overwritten with it. To refer to the ID of the just being deployed Model, - /// a "0" should be used, and the actual ID of the new DeployedModel will be - /// filled in its place by this method. The traffic percentage values must add - /// up to 100. If this field is empty, then the Endpoint's traffic_split is - /// not updated. - core.Map? trafficSplit; + /// For Vertex AI-provided Tensorflow images, keys can be any user defined + /// string that consists of any UTF-8 characters. For custom images, keys are + /// the name of the output field in the prediction to be explained. Currently + /// only one key is allowed. + /// + /// Required. + core.Map? outputs; - GoogleCloudAiplatformV1DeployModelRequest({ - this.deployedModel, - this.trafficSplit, + GoogleCloudAiplatformV1ExplanationMetadata({ + this.featureAttributionsSchemaUri, + this.inputs, + this.latentSpaceSource, + this.outputs, }); - GoogleCloudAiplatformV1DeployModelRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1ExplanationMetadata.fromJson(core.Map json_) : this( - deployedModel: json_.containsKey('deployedModel') - ? GoogleCloudAiplatformV1DeployedModel.fromJson( - json_['deployedModel'] as core.Map) - : null, - trafficSplit: - (json_['trafficSplit'] as core.Map?) - ?.map( + featureAttributionsSchemaUri: + json_['featureAttributionsSchemaUri'] as core.String?, + inputs: + (json_['inputs'] as core.Map?)?.map( (key, value) => core.MapEntry( key, - value as core.int, + GoogleCloudAiplatformV1ExplanationMetadataInputMetadata.fromJson( + value as core.Map), + ), + ), + latentSpaceSource: json_['latentSpaceSource'] as core.String?, + outputs: + (json_['outputs'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + GoogleCloudAiplatformV1ExplanationMetadataOutputMetadata.fromJson( + value as core.Map), ), ), ); core.Map toJson() => { - if (deployedModel != null) 'deployedModel': deployedModel!, - if (trafficSplit != null) 'trafficSplit': trafficSplit!, + if (featureAttributionsSchemaUri != null) + 'featureAttributionsSchemaUri': featureAttributionsSchemaUri!, + if (inputs != null) 'inputs': inputs!, + if (latentSpaceSource != null) 'latentSpaceSource': latentSpaceSource!, + if (outputs != null) 'outputs': outputs!, }; } -/// A deployment of an Index. +/// Metadata of the input of a feature. /// -/// IndexEndpoints contain one or more DeployedIndexes. -class GoogleCloudAiplatformV1DeployedIndex { - /// A description of resources that the DeployedIndex uses, which to large - /// degree are decided by Vertex AI, and optionally allows only a modest - /// additional configuration. - /// - /// If min_replica_count is not set, the default value is 2 (we don't provide - /// SLA when min_replica_count=1). If max_replica_count is not set, the - /// default value is min_replica_count. The max allowed replica count is 1000. - /// - /// Optional. - GoogleCloudAiplatformV1AutomaticResources? automaticResources; - - /// Timestamp when the DeployedIndex was created. +/// Fields other than InputMetadata.input_baselines are applicable only for +/// Models that are using Vertex AI-provided images for Tensorflow. +class GoogleCloudAiplatformV1ExplanationMetadataInputMetadata { + /// Specifies the shape of the values of the input if the input is a sparse + /// representation. /// - /// Output only. - core.String? createTime; + /// Refer to Tensorflow documentation for more details: + /// https://www.tensorflow.org/api_docs/python/tf/sparse/SparseTensor. + core.String? denseShapeTensorName; - /// A description of resources that are dedicated to the DeployedIndex, and - /// that need a higher degree of manual configuration. - /// - /// The field min_replica_count must be set to a value strictly greater than - /// 0, or else validation will fail. We don't provide SLA when - /// min_replica_count=1. If max_replica_count is not set, the default value is - /// min_replica_count. The max allowed replica count is 1000. Available - /// machine types for SMALL shard: e2-standard-2 and all machine types - /// available for MEDIUM and LARGE shard. Available machine types for MEDIUM - /// shard: e2-standard-16 and all machine types available for LARGE shard. - /// Available machine types for LARGE shard: e2-highmem-16, n2d-standard-32. - /// n1-standard-16 and n1-standard-32 are still available, but we recommend - /// e2-standard-16 and e2-highmem-16 for cost efficiency. + /// A list of baselines for the encoded tensor. /// - /// Optional. - GoogleCloudAiplatformV1DedicatedResources? dedicatedResources; - - /// If set, the authentication is enabled for the private endpoint. + /// The shape of each baseline should match the shape of the encoded tensor. + /// If a scalar is provided, Vertex AI broadcasts to the same shape as the + /// encoded tensor. /// - /// Optional. - GoogleCloudAiplatformV1DeployedIndexAuthConfig? deployedIndexAuthConfig; + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.List? encodedBaselines; - /// The deployment group can be no longer than 64 characters (eg: 'test', - /// 'prod'). - /// - /// If not set, we will use the 'default' deployment group. Creating - /// `deployment_groups` with `reserved_ip_ranges` is a recommended practice - /// when the peered network has multiple peering ranges. This creates your - /// deployments from predictable IP spaces for easier traffic administration. - /// Also, one deployment_group (except 'default') can only be used with the - /// same reserved_ip_ranges which means if the deployment_group has been used - /// with reserved_ip_ranges: \[a, b, c\], using it with \[a, b\] or \[d, e\] - /// is disallowed. Note: we only support up to 5 deployment groups(not - /// including 'default'). + /// Encoded tensor is a transformation of the input tensor. /// - /// Optional. - core.String? deploymentGroup; + /// Must be provided if choosing Integrated Gradients attribution or XRAI + /// attribution and the input tensor is not differentiable. An encoded tensor + /// is generated if the input tensor is encoded by a lookup table. + core.String? encodedTensorName; - /// The display name of the DeployedIndex. + /// Defines how the feature is encoded into the input tensor. /// - /// If not provided upon creation, the Index's display_name is used. - core.String? displayName; + /// Defaults to IDENTITY. + /// Possible string values are: + /// - "ENCODING_UNSPECIFIED" : Default value. This is the same as IDENTITY. + /// - "IDENTITY" : The tensor represents one feature. + /// - "BAG_OF_FEATURES" : The tensor represents a bag of features where each + /// index maps to a feature. InputMetadata.index_feature_mapping must be + /// provided for this encoding. For example: ``` input = [27, 6.0, 150] + /// index_feature_mapping = ["age", "height", "weight"] ``` + /// - "BAG_OF_FEATURES_SPARSE" : The tensor represents a bag of features where + /// each index maps to a feature. Zero values in the tensor indicates feature + /// being non-existent. InputMetadata.index_feature_mapping must be provided + /// for this encoding. For example: ``` input = [2, 0, 5, 0, 1] + /// index_feature_mapping = ["a", "b", "c", "d", "e"] ``` + /// - "INDICATOR" : The tensor is a list of binaries representing whether a + /// feature exists or not (1 indicates existence). + /// InputMetadata.index_feature_mapping must be provided for this encoding. + /// For example: ``` input = [1, 0, 1, 0, 1] index_feature_mapping = ["a", + /// "b", "c", "d", "e"] ``` + /// - "COMBINED_EMBEDDING" : The tensor is encoded into a 1-dimensional array + /// represented by an encoded tensor. InputMetadata.encoded_tensor_name must + /// be provided for this encoding. For example: ``` input = ["This", "is", + /// "a", "test", "."] encoded = [0.1, 0.2, 0.3, 0.4, 0.5] ``` + /// - "CONCAT_EMBEDDING" : Select this encoding when the input tensor is + /// encoded into a 2-dimensional array represented by an encoded tensor. + /// InputMetadata.encoded_tensor_name must be provided for this encoding. The + /// first dimension of the encoded tensor's shape is the same as the input + /// tensor's shape. For example: ``` input = ["This", "is", "a", "test", "."] + /// encoded = [[0.1, 0.2, 0.3, 0.4, 0.5], [0.2, 0.1, 0.4, 0.3, 0.5], [0.5, + /// 0.1, 0.3, 0.5, 0.4], [0.5, 0.3, 0.1, 0.2, 0.4], [0.4, 0.3, 0.2, 0.5, 0.1]] + /// ``` + core.String? encoding; - /// If true, private endpoint's access logs are sent to Cloud Logging. - /// - /// These logs are like standard server access logs, containing information - /// like timestamp and latency for each MatchRequest. Note that logs may incur - /// a cost, especially if the deployed index receives a high queries per - /// second rate (QPS). Estimate your costs before enabling this option. + /// The domain details of the input feature value. /// - /// Optional. - core.bool? enableAccessLogging; + /// Like min/max, original mean or standard deviation if normalized. + GoogleCloudAiplatformV1ExplanationMetadataInputMetadataFeatureValueDomain? + featureValueDomain; - /// The user specified ID of the DeployedIndex. - /// - /// The ID can be up to 128 characters long and must start with a letter and - /// only contain letters, numbers, and underscores. The ID must be unique - /// within the project it is created in. + /// Name of the group that the input belongs to. /// - /// Required. - core.String? id; + /// Features with the same group name will be treated as one feature when + /// computing attributions. Features grouped together can have different + /// shapes in value. If provided, there will be one single attribution + /// generated in Attribution.feature_attributions, keyed by the group name. + core.String? groupName; - /// The name of the Index this is the deployment of. - /// - /// We may refer to this Index as the DeployedIndex's "original" Index. + /// A list of feature names for each index in the input tensor. /// - /// Required. - core.String? index; + /// Required when the input InputMetadata.encoding is BAG_OF_FEATURES, + /// BAG_OF_FEATURES_SPARSE, INDICATOR. + core.List? indexFeatureMapping; - /// The DeployedIndex may depend on various data on its original Index. - /// - /// Additionally when certain changes to the original Index are being done - /// (e.g. when what the Index contains is being changed) the DeployedIndex may - /// be asynchronously updated in the background to reflect these changes. If - /// this timestamp's value is at least the Index.update_time of the original - /// Index, it means that this DeployedIndex and the original Index are in - /// sync. If this timestamp is older, then to see which updates this - /// DeployedIndex already contains (and which it does not), one must list the - /// operations that are running on the original Index. Only the successfully - /// completed Operations with update_time equal or before this sync time are - /// contained in this DeployedIndex. + /// Specifies the index of the values of the input tensor. /// - /// Output only. - core.String? indexSyncTime; + /// Required when the input tensor is a sparse representation. Refer to + /// Tensorflow documentation for more details: + /// https://www.tensorflow.org/api_docs/python/tf/sparse/SparseTensor. + core.String? indicesTensorName; - /// Provides paths for users to send requests directly to the deployed index - /// services running on Cloud via private services access. - /// - /// This field is populated if network is configured. + /// Baseline inputs for this feature. /// - /// Output only. - GoogleCloudAiplatformV1IndexPrivateEndpoints? privateEndpoints; - - /// If set for PSC deployed index, PSC connection will be automatically - /// created after deployment is done and the endpoint information is populated - /// in private_endpoints.psc_automated_endpoints. + /// If no baseline is specified, Vertex AI chooses the baseline for this + /// feature. If multiple baselines are specified, Vertex AI returns the + /// average attributions across them in Attribution.feature_attributions. For + /// Vertex AI-provided Tensorflow images (both 1.x and 2.x), the shape of each + /// baseline must match the shape of the input tensor. If a scalar is + /// provided, we broadcast to the same shape as the input tensor. For custom + /// images, the element of the baselines must be in the same format as the + /// feature's input in the instance\[\]. The schema of any single instance may + /// be specified via Endpoint's DeployedModels' Model's PredictSchemata's + /// instance_schema_uri. /// - /// Optional. - core.List? pscAutomationConfigs; + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.List? inputBaselines; - /// A list of reserved ip ranges under the VPC network that can be used for - /// this DeployedIndex. + /// Name of the input tensor for this feature. /// - /// If set, we will deploy the index within the provided ip ranges. Otherwise, - /// the index might be deployed to any ip ranges under the provided VPC - /// network. The value should be the name of the address - /// (https://cloud.google.com/compute/docs/reference/rest/v1/addresses) - /// Example: \['vertex-ai-ip-range'\]. For more information about subnets and - /// network IP ranges, please see - /// https://cloud.google.com/vpc/docs/subnets#manually_created_subnet_ip_ranges. + /// Required and is only applicable to Vertex AI-provided images for + /// Tensorflow. + core.String? inputTensorName; + + /// Modality of the feature. /// - /// Optional. - core.List? reservedIpRanges; + /// Valid values are: numeric, image. Defaults to numeric. + core.String? modality; - GoogleCloudAiplatformV1DeployedIndex({ - this.automaticResources, - this.createTime, - this.dedicatedResources, - this.deployedIndexAuthConfig, - this.deploymentGroup, - this.displayName, - this.enableAccessLogging, - this.id, - this.index, - this.indexSyncTime, - this.privateEndpoints, - this.pscAutomationConfigs, - this.reservedIpRanges, + /// Visualization configurations for image explanation. + GoogleCloudAiplatformV1ExplanationMetadataInputMetadataVisualization? + visualization; + + GoogleCloudAiplatformV1ExplanationMetadataInputMetadata({ + this.denseShapeTensorName, + this.encodedBaselines, + this.encodedTensorName, + this.encoding, + this.featureValueDomain, + this.groupName, + this.indexFeatureMapping, + this.indicesTensorName, + this.inputBaselines, + this.inputTensorName, + this.modality, + this.visualization, }); - GoogleCloudAiplatformV1DeployedIndex.fromJson(core.Map json_) + GoogleCloudAiplatformV1ExplanationMetadataInputMetadata.fromJson( + core.Map json_) : this( - automaticResources: json_.containsKey('automaticResources') - ? GoogleCloudAiplatformV1AutomaticResources.fromJson( - json_['automaticResources'] - as core.Map) + denseShapeTensorName: json_['denseShapeTensorName'] as core.String?, + encodedBaselines: json_.containsKey('encodedBaselines') + ? json_['encodedBaselines'] as core.List : null, - createTime: json_['createTime'] as core.String?, - dedicatedResources: json_.containsKey('dedicatedResources') - ? GoogleCloudAiplatformV1DedicatedResources.fromJson( - json_['dedicatedResources'] + encodedTensorName: json_['encodedTensorName'] as core.String?, + encoding: json_['encoding'] as core.String?, + featureValueDomain: json_.containsKey('featureValueDomain') + ? GoogleCloudAiplatformV1ExplanationMetadataInputMetadataFeatureValueDomain + .fromJson(json_['featureValueDomain'] as core.Map) : null, - deployedIndexAuthConfig: json_.containsKey('deployedIndexAuthConfig') - ? GoogleCloudAiplatformV1DeployedIndexAuthConfig.fromJson( - json_['deployedIndexAuthConfig'] - as core.Map) + groupName: json_['groupName'] as core.String?, + indexFeatureMapping: (json_['indexFeatureMapping'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + indicesTensorName: json_['indicesTensorName'] as core.String?, + inputBaselines: json_.containsKey('inputBaselines') + ? json_['inputBaselines'] as core.List : null, - deploymentGroup: json_['deploymentGroup'] as core.String?, - displayName: json_['displayName'] as core.String?, - enableAccessLogging: json_['enableAccessLogging'] as core.bool?, - id: json_['id'] as core.String?, - index: json_['index'] as core.String?, - indexSyncTime: json_['indexSyncTime'] as core.String?, - privateEndpoints: json_.containsKey('privateEndpoints') - ? GoogleCloudAiplatformV1IndexPrivateEndpoints.fromJson( - json_['privateEndpoints'] + inputTensorName: json_['inputTensorName'] as core.String?, + modality: json_['modality'] as core.String?, + visualization: json_.containsKey('visualization') + ? GoogleCloudAiplatformV1ExplanationMetadataInputMetadataVisualization + .fromJson(json_['visualization'] as core.Map) : null, - pscAutomationConfigs: (json_['pscAutomationConfigs'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1PSCAutomationConfig.fromJson( - value as core.Map)) - .toList(), - reservedIpRanges: (json_['reservedIpRanges'] as core.List?) - ?.map((value) => value as core.String) - .toList(), ); core.Map toJson() => { - if (automaticResources != null) - 'automaticResources': automaticResources!, - if (createTime != null) 'createTime': createTime!, - if (dedicatedResources != null) - 'dedicatedResources': dedicatedResources!, - if (deployedIndexAuthConfig != null) - 'deployedIndexAuthConfig': deployedIndexAuthConfig!, - if (deploymentGroup != null) 'deploymentGroup': deploymentGroup!, - if (displayName != null) 'displayName': displayName!, - if (enableAccessLogging != null) - 'enableAccessLogging': enableAccessLogging!, - if (id != null) 'id': id!, - if (index != null) 'index': index!, - if (indexSyncTime != null) 'indexSyncTime': indexSyncTime!, - if (privateEndpoints != null) 'privateEndpoints': privateEndpoints!, - if (pscAutomationConfigs != null) - 'pscAutomationConfigs': pscAutomationConfigs!, - if (reservedIpRanges != null) 'reservedIpRanges': reservedIpRanges!, + if (denseShapeTensorName != null) + 'denseShapeTensorName': denseShapeTensorName!, + if (encodedBaselines != null) 'encodedBaselines': encodedBaselines!, + if (encodedTensorName != null) 'encodedTensorName': encodedTensorName!, + if (encoding != null) 'encoding': encoding!, + if (featureValueDomain != null) + 'featureValueDomain': featureValueDomain!, + if (groupName != null) 'groupName': groupName!, + if (indexFeatureMapping != null) + 'indexFeatureMapping': indexFeatureMapping!, + if (indicesTensorName != null) 'indicesTensorName': indicesTensorName!, + if (inputBaselines != null) 'inputBaselines': inputBaselines!, + if (inputTensorName != null) 'inputTensorName': inputTensorName!, + if (modality != null) 'modality': modality!, + if (visualization != null) 'visualization': visualization!, }; } -/// Used to set up the auth on the DeployedIndex's private endpoint. -class GoogleCloudAiplatformV1DeployedIndexAuthConfig { - /// Defines the authentication provider that the DeployedIndex uses. - GoogleCloudAiplatformV1DeployedIndexAuthConfigAuthProvider? authProvider; +/// Domain details of the input feature value. +/// +/// Provides numeric information about the feature, such as its range (min, +/// max). If the feature has been pre-processed, for example with z-scoring, +/// then it provides information about how to recover the original feature. For +/// example, if the input feature is an image and it has been pre-processed to +/// obtain 0-mean and stddev = 1 values, then original_mean, and original_stddev +/// refer to the mean and stddev of the original feature (e.g. image tensor) +/// from which input feature (with mean = 0 and stddev = 1) was obtained. +class GoogleCloudAiplatformV1ExplanationMetadataInputMetadataFeatureValueDomain { + /// The maximum permissible value for this feature. + core.double? maxValue; - GoogleCloudAiplatformV1DeployedIndexAuthConfig({ - this.authProvider, + /// The minimum permissible value for this feature. + core.double? minValue; + + /// If this input feature has been normalized to a mean value of 0, the + /// original_mean specifies the mean value of the domain prior to + /// normalization. + core.double? originalMean; + + /// If this input feature has been normalized to a standard deviation of 1.0, + /// the original_stddev specifies the standard deviation of the domain prior + /// to normalization. + core.double? originalStddev; + + GoogleCloudAiplatformV1ExplanationMetadataInputMetadataFeatureValueDomain({ + this.maxValue, + this.minValue, + this.originalMean, + this.originalStddev, }); - GoogleCloudAiplatformV1DeployedIndexAuthConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1ExplanationMetadataInputMetadataFeatureValueDomain.fromJson( + core.Map json_) : this( - authProvider: json_.containsKey('authProvider') - ? GoogleCloudAiplatformV1DeployedIndexAuthConfigAuthProvider - .fromJson(json_['authProvider'] - as core.Map) - : null, + maxValue: (json_['maxValue'] as core.num?)?.toDouble(), + minValue: (json_['minValue'] as core.num?)?.toDouble(), + originalMean: (json_['originalMean'] as core.num?)?.toDouble(), + originalStddev: (json_['originalStddev'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (authProvider != null) 'authProvider': authProvider!, + if (maxValue != null) 'maxValue': maxValue!, + if (minValue != null) 'minValue': minValue!, + if (originalMean != null) 'originalMean': originalMean!, + if (originalStddev != null) 'originalStddev': originalStddev!, }; } -/// Configuration for an authentication provider, including support for \[JSON -/// Web Token -/// (JWT)\](https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32). -class GoogleCloudAiplatformV1DeployedIndexAuthConfigAuthProvider { - /// A list of allowed JWT issuers. +/// Visualization configurations for image explanation. +class GoogleCloudAiplatformV1ExplanationMetadataInputMetadataVisualization { + /// Excludes attributions below the specified percentile, from the highlighted + /// areas. /// - /// Each entry must be a valid Google service account, in the following - /// format: `service-account-name@project-id.iam.gserviceaccount.com` - core.List? allowedIssuers; + /// Defaults to 62. + core.double? clipPercentLowerbound; - /// The list of JWT - /// [audiences](https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32#section-4.1.3). + /// Excludes attributions above the specified percentile from the highlighted + /// areas. /// - /// that are allowed to access. A JWT containing any of these audiences will - /// be accepted. - core.List? audiences; + /// Using the clip_percent_upperbound and clip_percent_lowerbound together can + /// be useful for filtering out noise and making it easier to see areas of + /// strong attribution. Defaults to 99.9. + core.double? clipPercentUpperbound; - GoogleCloudAiplatformV1DeployedIndexAuthConfigAuthProvider({ - this.allowedIssuers, - this.audiences, + /// The color scheme used for the highlighted areas. + /// + /// Defaults to PINK_GREEN for Integrated Gradients attribution, which shows + /// positive attributions in green and negative in pink. Defaults to VIRIDIS + /// for XRAI attribution, which highlights the most influential regions in + /// yellow and the least influential in blue. + /// Possible string values are: + /// - "COLOR_MAP_UNSPECIFIED" : Should not be used. + /// - "PINK_GREEN" : Positive: green. Negative: pink. + /// - "VIRIDIS" : Viridis color map: A perceptually uniform color mapping + /// which is easier to see by those with colorblindness and progresses from + /// yellow to green to blue. Positive: yellow. Negative: blue. + /// - "RED" : Positive: red. Negative: red. + /// - "GREEN" : Positive: green. Negative: green. + /// - "RED_GREEN" : Positive: green. Negative: red. + /// - "PINK_WHITE_GREEN" : PiYG palette. + core.String? colorMap; + + /// How the original image is displayed in the visualization. + /// + /// Adjusting the overlay can help increase visual clarity if the original + /// image makes it difficult to view the visualization. Defaults to NONE. + /// Possible string values are: + /// - "OVERLAY_TYPE_UNSPECIFIED" : Default value. This is the same as NONE. + /// - "NONE" : No overlay. + /// - "ORIGINAL" : The attributions are shown on top of the original image. + /// - "GRAYSCALE" : The attributions are shown on top of grayscaled version of + /// the original image. + /// - "MASK_BLACK" : The attributions are used as a mask to reveal predictive + /// parts of the image and hide the un-predictive parts. + core.String? overlayType; + + /// Whether to only highlight pixels with positive contributions, negative or + /// both. + /// + /// Defaults to POSITIVE. + /// Possible string values are: + /// - "POLARITY_UNSPECIFIED" : Default value. This is the same as POSITIVE. + /// - "POSITIVE" : Highlights the pixels/outlines that were most influential + /// to the model's prediction. + /// - "NEGATIVE" : Setting polarity to negative highlights areas that does not + /// lead to the models's current prediction. + /// - "BOTH" : Shows both positive and negative attributions. + core.String? polarity; + + /// Type of the image visualization. + /// + /// Only applicable to Integrated Gradients attribution. OUTLINES shows + /// regions of attribution, while PIXELS shows per-pixel attribution. Defaults + /// to OUTLINES. + /// Possible string values are: + /// - "TYPE_UNSPECIFIED" : Should not be used. + /// - "PIXELS" : Shows which pixel contributed to the image prediction. + /// - "OUTLINES" : Shows which region contributed to the image prediction by + /// outlining the region. + core.String? type; + + GoogleCloudAiplatformV1ExplanationMetadataInputMetadataVisualization({ + this.clipPercentLowerbound, + this.clipPercentUpperbound, + this.colorMap, + this.overlayType, + this.polarity, + this.type, }); - GoogleCloudAiplatformV1DeployedIndexAuthConfigAuthProvider.fromJson( + GoogleCloudAiplatformV1ExplanationMetadataInputMetadataVisualization.fromJson( core.Map json_) : this( - allowedIssuers: (json_['allowedIssuers'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - audiences: (json_['audiences'] as core.List?) - ?.map((value) => value as core.String) - .toList(), + clipPercentLowerbound: + (json_['clipPercentLowerbound'] as core.num?)?.toDouble(), + clipPercentUpperbound: + (json_['clipPercentUpperbound'] as core.num?)?.toDouble(), + colorMap: json_['colorMap'] as core.String?, + overlayType: json_['overlayType'] as core.String?, + polarity: json_['polarity'] as core.String?, + type: json_['type'] as core.String?, ); core.Map toJson() => { - if (allowedIssuers != null) 'allowedIssuers': allowedIssuers!, - if (audiences != null) 'audiences': audiences!, + if (clipPercentLowerbound != null) + 'clipPercentLowerbound': clipPercentLowerbound!, + if (clipPercentUpperbound != null) + 'clipPercentUpperbound': clipPercentUpperbound!, + if (colorMap != null) 'colorMap': colorMap!, + if (overlayType != null) 'overlayType': overlayType!, + if (polarity != null) 'polarity': polarity!, + if (type != null) 'type': type!, }; } -/// Points to a DeployedIndex. -class GoogleCloudAiplatformV1DeployedIndexRef { - /// The ID of the DeployedIndex in the above IndexEndpoint. +/// Metadata of the prediction output to be explained. +class GoogleCloudAiplatformV1ExplanationMetadataOutputMetadata { + /// Specify a field name in the prediction to look for the display name. /// - /// Immutable. - core.String? deployedIndexId; + /// Use this if the prediction contains the display names for the outputs. The + /// display names in the prediction must have the same shape of the outputs, + /// so that it can be located by Attribution.output_index for a specific + /// output. + core.String? displayNameMappingKey; - /// The display name of the DeployedIndex. + /// Static mapping between the index and display name. /// - /// Output only. - core.String? displayName; + /// Use this if the outputs are a deterministic n-dimensional array, e.g. a + /// list of scores of all the classes in a pre-defined order for a + /// multi-classification Model. It's not feasible if the outputs are + /// non-deterministic, e.g. the Model produces top-k classes or sort the + /// outputs by their values. The shape of the value must be an n-dimensional + /// array of strings. The number of dimensions must match that of the outputs + /// to be explained. The Attribution.output_display_name is populated by + /// locating in the mapping with Attribution.output_index. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? indexDisplayNameMapping; - /// A resource name of the IndexEndpoint. + /// Name of the output tensor. /// - /// Immutable. - core.String? indexEndpoint; + /// Required and is only applicable to Vertex AI provided images for + /// Tensorflow. + core.String? outputTensorName; - GoogleCloudAiplatformV1DeployedIndexRef({ - this.deployedIndexId, - this.displayName, - this.indexEndpoint, + GoogleCloudAiplatformV1ExplanationMetadataOutputMetadata({ + this.displayNameMappingKey, + this.indexDisplayNameMapping, + this.outputTensorName, }); - GoogleCloudAiplatformV1DeployedIndexRef.fromJson(core.Map json_) + GoogleCloudAiplatformV1ExplanationMetadataOutputMetadata.fromJson( + core.Map json_) : this( - deployedIndexId: json_['deployedIndexId'] as core.String?, - displayName: json_['displayName'] as core.String?, - indexEndpoint: json_['indexEndpoint'] as core.String?, + displayNameMappingKey: json_['displayNameMappingKey'] as core.String?, + indexDisplayNameMapping: json_['indexDisplayNameMapping'], + outputTensorName: json_['outputTensorName'] as core.String?, ); core.Map toJson() => { - if (deployedIndexId != null) 'deployedIndexId': deployedIndexId!, - if (displayName != null) 'displayName': displayName!, - if (indexEndpoint != null) 'indexEndpoint': indexEndpoint!, + if (displayNameMappingKey != null) + 'displayNameMappingKey': displayNameMappingKey!, + if (indexDisplayNameMapping != null) + 'indexDisplayNameMapping': indexDisplayNameMapping!, + if (outputTensorName != null) 'outputTensorName': outputTensorName!, }; } -/// A deployment of a Model. -/// -/// Endpoints contain one or more DeployedModels. -class GoogleCloudAiplatformV1DeployedModel { - /// A description of resources that to large degree are decided by Vertex AI, - /// and require only a modest additional configuration. - GoogleCloudAiplatformV1AutomaticResources? automaticResources; - - /// Timestamp when the DeployedModel was created. +/// The ExplanationMetadata entries that can be overridden at online explanation +/// time. +class GoogleCloudAiplatformV1ExplanationMetadataOverride { + /// Overrides the input metadata of the features. /// - /// Output only. - core.String? createTime; + /// The key is the name of the feature to be overridden. The keys specified + /// here must exist in the input metadata to be overridden. If a feature is + /// not specified here, the corresponding feature's input metadata is not + /// overridden. + /// + /// Required. + core.Map? + inputs; - /// A description of resources that are dedicated to the DeployedModel, and - /// that need a higher degree of manual configuration. - GoogleCloudAiplatformV1DedicatedResources? dedicatedResources; + GoogleCloudAiplatformV1ExplanationMetadataOverride({ + this.inputs, + }); - /// For custom-trained Models and AutoML Tabular Models, the container of the - /// DeployedModel instances will send `stderr` and `stdout` streams to Cloud - /// Logging by default. - /// - /// Please note that the logs incur cost, which are subject to - /// [Cloud Logging pricing](https://cloud.google.com/logging/pricing). User - /// can disable container logging by setting this flag to true. - core.bool? disableContainerLogging; + GoogleCloudAiplatformV1ExplanationMetadataOverride.fromJson(core.Map json_) + : this( + inputs: + (json_['inputs'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + GoogleCloudAiplatformV1ExplanationMetadataOverrideInputMetadataOverride + .fromJson(value as core.Map), + ), + ), + ); - /// If true, deploy the model without explainable feature, regardless the - /// existence of Model.explanation_spec or explanation_spec. - core.bool? disableExplanations; + core.Map toJson() => { + if (inputs != null) 'inputs': inputs!, + }; +} - /// The display name of the DeployedModel. +/// The input metadata entries to be overridden. +class GoogleCloudAiplatformV1ExplanationMetadataOverrideInputMetadataOverride { + /// Baseline inputs for this feature. /// - /// If not provided upon creation, the Model's display_name is used. - core.String? displayName; - - /// If true, online prediction access logs are sent to Cloud Logging. + /// This overrides the `input_baseline` field of the + /// ExplanationMetadata.InputMetadata object of the corresponding feature's + /// input metadata. If it's not specified, the original baselines are not + /// overridden. /// - /// These logs are like standard server access logs, containing information - /// like timestamp and latency for each prediction request. Note that logs may - /// incur a cost, especially if your project receives prediction requests at a - /// high queries per second rate (QPS). Estimate your costs before enabling - /// this option. - core.bool? enableAccessLogging; + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.List? inputBaselines; - /// Explanation configuration for this DeployedModel. - /// - /// When deploying a Model using EndpointService.DeployModel, this value - /// overrides the value of Model.explanation_spec. All fields of - /// explanation_spec are optional in the request. If a field of - /// explanation_spec is not populated, the value of the same field of - /// Model.explanation_spec is inherited. If the corresponding - /// Model.explanation_spec is not populated, all fields of the - /// explanation_spec will be used for the explanation configuration. - GoogleCloudAiplatformV1ExplanationSpec? explanationSpec; + GoogleCloudAiplatformV1ExplanationMetadataOverrideInputMetadataOverride({ + this.inputBaselines, + }); - /// The ID of the DeployedModel. - /// - /// If not provided upon deployment, Vertex AI will generate a value for this - /// ID. This value should be 1-10 characters, and valid characters are - /// `/[0-9]/`. - /// - /// Immutable. - core.String? id; + GoogleCloudAiplatformV1ExplanationMetadataOverrideInputMetadataOverride.fromJson( + core.Map json_) + : this( + inputBaselines: json_.containsKey('inputBaselines') + ? json_['inputBaselines'] as core.List + : null, + ); - /// The resource name of the Model that this is the deployment of. - /// - /// Note that the Model may be in a different location than the - /// DeployedModel's Endpoint. The resource name may contain version id or - /// version alias to specify the version. Example: - /// `projects/{project}/locations/{location}/models/{model}@2` or - /// `projects/{project}/locations/{location}/models/{model}@golden` if no - /// version is specified, the default version will be deployed. + core.Map toJson() => { + if (inputBaselines != null) 'inputBaselines': inputBaselines!, + }; +} + +/// Parameters to configure explaining for Model's predictions. +class GoogleCloudAiplatformV1ExplanationParameters { + /// Example-based explanations that returns the nearest neighbors from the + /// provided dataset. + GoogleCloudAiplatformV1Examples? examples; + + /// An attribution method that computes Aumann-Shapley values taking advantage + /// of the model's fully differentiable structure. /// - /// Required. - core.String? model; + /// Refer to this paper for more details: https://arxiv.org/abs/1703.01365 + GoogleCloudAiplatformV1IntegratedGradientsAttribution? + integratedGradientsAttribution; - /// The version ID of the model that is deployed. + /// If populated, only returns attributions that have output_index contained + /// in output_indices. + /// + /// It must be an ndarray of integers, with the same shape of the output it's + /// explaining. If not populated, returns attributions for top_k indices of + /// outputs. If neither top_k nor output_indices is populated, returns the + /// argmax index of the outputs. Only applicable to Models that predict + /// multiple outputs (e,g, multi-class Models that predict multiple classes). /// - /// Output only. - core.String? modelVersionId; + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.List? outputIndices; - /// Provide paths for users to send predict/explain/health requests directly - /// to the deployed model services running on Cloud via private services - /// access. - /// - /// This field is populated if network is configured. + /// An attribution method that approximates Shapley values for features that + /// contribute to the label being predicted. /// - /// Output only. - GoogleCloudAiplatformV1PrivateEndpoints? privateEndpoints; + /// A sampling strategy is used to approximate the value rather than + /// considering all subsets of features. Refer to this paper for model + /// details: https://arxiv.org/abs/1306.4265. + GoogleCloudAiplatformV1SampledShapleyAttribution? sampledShapleyAttribution; - /// The service account that the DeployedModel's container runs as. + /// If populated, returns attributions for top K indices of outputs (defaults + /// to 1). /// - /// Specify the email address of the service account. If this service account - /// is not specified, the container runs as a service account that doesn't - /// have access to the resource project. Users deploying the Model must have - /// the `iam.serviceAccounts.actAs` permission on this service account. - core.String? serviceAccount; + /// Only applies to Models that predicts more than one outputs (e,g, + /// multi-class Models). When set to -1, returns explanations for all outputs. + core.int? topK; - /// The resource name of the shared DeploymentResourcePool to deploy on. + /// An attribution method that redistributes Integrated Gradients attribution + /// to segmented regions, taking advantage of the model's fully differentiable + /// structure. /// - /// Format: - /// `projects/{project}/locations/{location}/deploymentResourcePools/{deployment_resource_pool}` - core.String? sharedResources; + /// Refer to this paper for more details: https://arxiv.org/abs/1906.02825 + /// XRAI currently performs better on natural images, like a picture of a + /// house or an animal. If the images are taken in artificial environments, + /// like a lab or manufacturing line, or from diagnostic equipment, like + /// x-rays or quality-control cameras, use Integrated Gradients instead. + GoogleCloudAiplatformV1XraiAttribution? xraiAttribution; - GoogleCloudAiplatformV1DeployedModel({ - this.automaticResources, - this.createTime, - this.dedicatedResources, - this.disableContainerLogging, - this.disableExplanations, - this.displayName, - this.enableAccessLogging, - this.explanationSpec, - this.id, - this.model, - this.modelVersionId, - this.privateEndpoints, - this.serviceAccount, - this.sharedResources, + GoogleCloudAiplatformV1ExplanationParameters({ + this.examples, + this.integratedGradientsAttribution, + this.outputIndices, + this.sampledShapleyAttribution, + this.topK, + this.xraiAttribution, }); - GoogleCloudAiplatformV1DeployedModel.fromJson(core.Map json_) + GoogleCloudAiplatformV1ExplanationParameters.fromJson(core.Map json_) : this( - automaticResources: json_.containsKey('automaticResources') - ? GoogleCloudAiplatformV1AutomaticResources.fromJson( - json_['automaticResources'] - as core.Map) + examples: json_.containsKey('examples') + ? GoogleCloudAiplatformV1Examples.fromJson( + json_['examples'] as core.Map) : null, - createTime: json_['createTime'] as core.String?, - dedicatedResources: json_.containsKey('dedicatedResources') - ? GoogleCloudAiplatformV1DedicatedResources.fromJson( - json_['dedicatedResources'] + integratedGradientsAttribution: json_ + .containsKey('integratedGradientsAttribution') + ? GoogleCloudAiplatformV1IntegratedGradientsAttribution.fromJson( + json_['integratedGradientsAttribution'] as core.Map) : null, - disableContainerLogging: - json_['disableContainerLogging'] as core.bool?, - disableExplanations: json_['disableExplanations'] as core.bool?, - displayName: json_['displayName'] as core.String?, - enableAccessLogging: json_['enableAccessLogging'] as core.bool?, - explanationSpec: json_.containsKey('explanationSpec') - ? GoogleCloudAiplatformV1ExplanationSpec.fromJson( - json_['explanationSpec'] - as core.Map) + outputIndices: json_.containsKey('outputIndices') + ? json_['outputIndices'] as core.List : null, - id: json_['id'] as core.String?, - model: json_['model'] as core.String?, - modelVersionId: json_['modelVersionId'] as core.String?, - privateEndpoints: json_.containsKey('privateEndpoints') - ? GoogleCloudAiplatformV1PrivateEndpoints.fromJson( - json_['privateEndpoints'] + sampledShapleyAttribution: + json_.containsKey('sampledShapleyAttribution') + ? GoogleCloudAiplatformV1SampledShapleyAttribution.fromJson( + json_['sampledShapleyAttribution'] + as core.Map) + : null, + topK: json_['topK'] as core.int?, + xraiAttribution: json_.containsKey('xraiAttribution') + ? GoogleCloudAiplatformV1XraiAttribution.fromJson( + json_['xraiAttribution'] as core.Map) : null, - serviceAccount: json_['serviceAccount'] as core.String?, - sharedResources: json_['sharedResources'] as core.String?, ); core.Map toJson() => { - if (automaticResources != null) - 'automaticResources': automaticResources!, - if (createTime != null) 'createTime': createTime!, - if (dedicatedResources != null) - 'dedicatedResources': dedicatedResources!, - if (disableContainerLogging != null) - 'disableContainerLogging': disableContainerLogging!, - if (disableExplanations != null) - 'disableExplanations': disableExplanations!, - if (displayName != null) 'displayName': displayName!, - if (enableAccessLogging != null) - 'enableAccessLogging': enableAccessLogging!, - if (explanationSpec != null) 'explanationSpec': explanationSpec!, - if (id != null) 'id': id!, - if (model != null) 'model': model!, - if (modelVersionId != null) 'modelVersionId': modelVersionId!, - if (privateEndpoints != null) 'privateEndpoints': privateEndpoints!, - if (serviceAccount != null) 'serviceAccount': serviceAccount!, - if (sharedResources != null) 'sharedResources': sharedResources!, + if (examples != null) 'examples': examples!, + if (integratedGradientsAttribution != null) + 'integratedGradientsAttribution': integratedGradientsAttribution!, + if (outputIndices != null) 'outputIndices': outputIndices!, + if (sampledShapleyAttribution != null) + 'sampledShapleyAttribution': sampledShapleyAttribution!, + if (topK != null) 'topK': topK!, + if (xraiAttribution != null) 'xraiAttribution': xraiAttribution!, }; } -/// Points to a DeployedModel. -class GoogleCloudAiplatformV1DeployedModelRef { - /// An ID of a DeployedModel in the above Endpoint. +/// Specification of Model explanation. +class GoogleCloudAiplatformV1ExplanationSpec { + /// Metadata describing the Model's input and output for explanation. /// - /// Immutable. - core.String? deployedModelId; + /// Optional. + GoogleCloudAiplatformV1ExplanationMetadata? metadata; - /// A resource name of an Endpoint. + /// Parameters that configure explaining of the Model's predictions. /// - /// Immutable. - core.String? endpoint; + /// Required. + GoogleCloudAiplatformV1ExplanationParameters? parameters; - GoogleCloudAiplatformV1DeployedModelRef({ - this.deployedModelId, - this.endpoint, + GoogleCloudAiplatformV1ExplanationSpec({ + this.metadata, + this.parameters, }); - GoogleCloudAiplatformV1DeployedModelRef.fromJson(core.Map json_) + GoogleCloudAiplatformV1ExplanationSpec.fromJson(core.Map json_) : this( - deployedModelId: json_['deployedModelId'] as core.String?, - endpoint: json_['endpoint'] as core.String?, + metadata: json_.containsKey('metadata') + ? GoogleCloudAiplatformV1ExplanationMetadata.fromJson( + json_['metadata'] as core.Map) + : null, + parameters: json_.containsKey('parameters') + ? GoogleCloudAiplatformV1ExplanationParameters.fromJson( + json_['parameters'] as core.Map) + : null, ); core.Map toJson() => { - if (deployedModelId != null) 'deployedModelId': deployedModelId!, - if (endpoint != null) 'endpoint': endpoint!, + if (metadata != null) 'metadata': metadata!, + if (parameters != null) 'parameters': parameters!, }; } -/// A description of resources that can be shared by multiple DeployedModels, -/// whose underlying specification consists of a DedicatedResources. -class GoogleCloudAiplatformV1DeploymentResourcePool { - /// Timestamp when this DeploymentResourcePool was created. - /// - /// Output only. - core.String? createTime; +/// The ExplanationSpec entries that can be overridden at online explanation +/// time. +class GoogleCloudAiplatformV1ExplanationSpecOverride { + /// The example-based explanations parameter overrides. + GoogleCloudAiplatformV1ExamplesOverride? examplesOverride; - /// The underlying DedicatedResources that the DeploymentResourcePool uses. + /// The metadata to be overridden. /// - /// Required. - GoogleCloudAiplatformV1DedicatedResources? dedicatedResources; + /// If not specified, no metadata is overridden. + GoogleCloudAiplatformV1ExplanationMetadataOverride? metadata; - /// If the DeploymentResourcePool is deployed with custom-trained Models or - /// AutoML Tabular Models, the container(s) of the DeploymentResourcePool will - /// send `stderr` and `stdout` streams to Cloud Logging by default. + /// The parameters to be overridden. /// - /// Please note that the logs incur cost, which are subject to - /// [Cloud Logging pricing](https://cloud.google.com/logging/pricing). User - /// can disable container logging by setting this flag to true. - core.bool? disableContainerLogging; + /// Note that the attribution method cannot be changed. If not specified, no + /// parameter is overridden. + GoogleCloudAiplatformV1ExplanationParameters? parameters; - /// Customer-managed encryption key spec for a DeploymentResourcePool. - /// - /// If set, this DeploymentResourcePool will be secured by this key. Endpoints - /// and the DeploymentResourcePool they deploy in need to have the same - /// EncryptionSpec. - GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; + GoogleCloudAiplatformV1ExplanationSpecOverride({ + this.examplesOverride, + this.metadata, + this.parameters, + }); - /// The resource name of the DeploymentResourcePool. - /// - /// Format: - /// `projects/{project}/locations/{location}/deploymentResourcePools/{deployment_resource_pool}` + GoogleCloudAiplatformV1ExplanationSpecOverride.fromJson(core.Map json_) + : this( + examplesOverride: json_.containsKey('examplesOverride') + ? GoogleCloudAiplatformV1ExamplesOverride.fromJson( + json_['examplesOverride'] + as core.Map) + : null, + metadata: json_.containsKey('metadata') + ? GoogleCloudAiplatformV1ExplanationMetadataOverride.fromJson( + json_['metadata'] as core.Map) + : null, + parameters: json_.containsKey('parameters') + ? GoogleCloudAiplatformV1ExplanationParameters.fromJson( + json_['parameters'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (examplesOverride != null) 'examplesOverride': examplesOverride!, + if (metadata != null) 'metadata': metadata!, + if (parameters != null) 'parameters': parameters!, + }; +} + +/// Describes what part of the Dataset is to be exported, the destination of the +/// export and how to export. +class GoogleCloudAiplatformV1ExportDataConfig { + /// The Cloud Storage URI that points to a YAML file describing the annotation + /// schema. /// - /// Immutable. - core.String? name; + /// The schema is defined as an OpenAPI 3.0.2 + /// [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). + /// The schema files that can be used here are found in + /// gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the + /// chosen schema must be consistent with metadata of the Dataset specified by + /// ExportDataRequest.name. Only used for custom training data export use + /// cases. Only applicable to Datasets that have DataItems and Annotations. + /// Only Annotations that both match this schema and belong to DataItems not + /// ignored by the split method are used in respectively training, validation + /// or test role, depending on the role of the DataItem they are on. When used + /// in conjunction with annotations_filter, the Annotations used for training + /// are filtered by both annotations_filter and annotation_schema_uri. + core.String? annotationSchemaUri; - /// Reserved for future use. + /// An expression for filtering what part of the Dataset is to be exported. /// - /// Output only. - core.bool? satisfiesPzi; + /// Only Annotations that match this filter will be exported. The filter + /// syntax is the same as in ListAnnotations. + core.String? annotationsFilter; - /// Reserved for future use. + /// Indicates the usage of the exported files. + /// Possible string values are: + /// - "EXPORT_USE_UNSPECIFIED" : Regular user export. + /// - "CUSTOM_CODE_TRAINING" : Export for custom code training. + core.String? exportUse; + + /// Split based on the provided filters for each set. + GoogleCloudAiplatformV1ExportFilterSplit? filterSplit; + + /// Split based on fractions defining the size of each set. + GoogleCloudAiplatformV1ExportFractionSplit? fractionSplit; + + /// The Google Cloud Storage location where the output is to be written to. /// - /// Output only. - core.bool? satisfiesPzs; + /// In the given directory a new directory will be created with name: + /// `export-data--` where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 + /// format. All export output will be written into that directory. Inside that + /// directory, annotations with the same schema will be grouped into sub + /// directories which are named with the corresponding annotations' schema + /// title. Inside these sub directories, a schema.yaml will be created to + /// describe the output format. + GoogleCloudAiplatformV1GcsDestination? gcsDestination; - /// The service account that the DeploymentResourcePool's container(s) run as. + /// The ID of a SavedQuery (annotation set) under the Dataset specified by + /// ExportDataRequest.name used for filtering Annotations for training. /// - /// Specify the email address of the service account. If this service account - /// is not specified, the container(s) run as a service account that doesn't - /// have access to the resource project. Users deploying the Models to this - /// DeploymentResourcePool must have the `iam.serviceAccounts.actAs` - /// permission on this service account. - core.String? serviceAccount; + /// Only used for custom training data export use cases. Only applicable to + /// Datasets that have SavedQueries. Only Annotations that are associated with + /// this SavedQuery are used in respectively training. When used in + /// conjunction with annotations_filter, the Annotations used for training are + /// filtered by both saved_query_id and annotations_filter. Only one of + /// saved_query_id and annotation_schema_uri should be specified as both of + /// them represent the same thing: problem type. + core.String? savedQueryId; - GoogleCloudAiplatformV1DeploymentResourcePool({ - this.createTime, - this.dedicatedResources, - this.disableContainerLogging, - this.encryptionSpec, - this.name, - this.satisfiesPzi, - this.satisfiesPzs, - this.serviceAccount, + GoogleCloudAiplatformV1ExportDataConfig({ + this.annotationSchemaUri, + this.annotationsFilter, + this.exportUse, + this.filterSplit, + this.fractionSplit, + this.gcsDestination, + this.savedQueryId, }); - GoogleCloudAiplatformV1DeploymentResourcePool.fromJson(core.Map json_) + GoogleCloudAiplatformV1ExportDataConfig.fromJson(core.Map json_) : this( - createTime: json_['createTime'] as core.String?, - dedicatedResources: json_.containsKey('dedicatedResources') - ? GoogleCloudAiplatformV1DedicatedResources.fromJson( - json_['dedicatedResources'] - as core.Map) + annotationSchemaUri: json_['annotationSchemaUri'] as core.String?, + annotationsFilter: json_['annotationsFilter'] as core.String?, + exportUse: json_['exportUse'] as core.String?, + filterSplit: json_.containsKey('filterSplit') + ? GoogleCloudAiplatformV1ExportFilterSplit.fromJson( + json_['filterSplit'] as core.Map) : null, - disableContainerLogging: - json_['disableContainerLogging'] as core.bool?, - encryptionSpec: json_.containsKey('encryptionSpec') - ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( - json_['encryptionSpec'] + fractionSplit: json_.containsKey('fractionSplit') + ? GoogleCloudAiplatformV1ExportFractionSplit.fromJson( + json_['fractionSplit'] as core.Map) + : null, + gcsDestination: json_.containsKey('gcsDestination') + ? GoogleCloudAiplatformV1GcsDestination.fromJson( + json_['gcsDestination'] as core.Map) : null, - name: json_['name'] as core.String?, - satisfiesPzi: json_['satisfiesPzi'] as core.bool?, - satisfiesPzs: json_['satisfiesPzs'] as core.bool?, - serviceAccount: json_['serviceAccount'] as core.String?, + savedQueryId: json_['savedQueryId'] as core.String?, ); core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (dedicatedResources != null) - 'dedicatedResources': dedicatedResources!, - if (disableContainerLogging != null) - 'disableContainerLogging': disableContainerLogging!, - if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, - if (name != null) 'name': name!, - if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, - if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, - if (serviceAccount != null) 'serviceAccount': serviceAccount!, + if (annotationSchemaUri != null) + 'annotationSchemaUri': annotationSchemaUri!, + if (annotationsFilter != null) 'annotationsFilter': annotationsFilter!, + if (exportUse != null) 'exportUse': exportUse!, + if (filterSplit != null) 'filterSplit': filterSplit!, + if (fractionSplit != null) 'fractionSplit': fractionSplit!, + if (gcsDestination != null) 'gcsDestination': gcsDestination!, + if (savedQueryId != null) 'savedQueryId': savedQueryId!, }; } -class GoogleCloudAiplatformV1DestinationFeatureSetting { - /// Specify the field name in the export destination. - /// - /// If not specified, Feature ID is used. - core.String? destinationField; - - /// The ID of the Feature to apply the setting to. +/// Request message for DatasetService.ExportData. +class GoogleCloudAiplatformV1ExportDataRequest { + /// The desired output location. /// /// Required. - core.String? featureId; + GoogleCloudAiplatformV1ExportDataConfig? exportConfig; - GoogleCloudAiplatformV1DestinationFeatureSetting({ - this.destinationField, - this.featureId, + GoogleCloudAiplatformV1ExportDataRequest({ + this.exportConfig, }); - GoogleCloudAiplatformV1DestinationFeatureSetting.fromJson(core.Map json_) + GoogleCloudAiplatformV1ExportDataRequest.fromJson(core.Map json_) : this( - destinationField: json_['destinationField'] as core.String?, - featureId: json_['featureId'] as core.String?, + exportConfig: json_.containsKey('exportConfig') + ? GoogleCloudAiplatformV1ExportDataConfig.fromJson( + json_['exportConfig'] as core.Map) + : null, ); core.Map toJson() => { - if (destinationField != null) 'destinationField': destinationField!, - if (featureId != null) 'featureId': featureId!, + if (exportConfig != null) 'exportConfig': exportConfig!, }; } -/// Request message for PredictionService.DirectPredict. -class GoogleCloudAiplatformV1DirectPredictRequest { - /// The prediction input. - core.List? inputs; +/// Request message for FeaturestoreService.ExportFeatureValues. +class GoogleCloudAiplatformV1ExportFeatureValuesRequest { + /// Specifies destination location and format. + /// + /// Required. + GoogleCloudAiplatformV1FeatureValueDestination? destination; - /// The parameters that govern the prediction. - GoogleCloudAiplatformV1Tensor? parameters; + /// Selects Features to export values of. + /// + /// Required. + GoogleCloudAiplatformV1FeatureSelector? featureSelector; - GoogleCloudAiplatformV1DirectPredictRequest({ - this.inputs, - this.parameters, + /// Exports all historical values of all entities of the EntityType within a + /// time range + GoogleCloudAiplatformV1ExportFeatureValuesRequestFullExport? fullExport; + + /// Per-Feature export settings. + core.List? settings; + + /// Exports the latest Feature values of all entities of the EntityType within + /// a time range. + GoogleCloudAiplatformV1ExportFeatureValuesRequestSnapshotExport? + snapshotExport; + + GoogleCloudAiplatformV1ExportFeatureValuesRequest({ + this.destination, + this.featureSelector, + this.fullExport, + this.settings, + this.snapshotExport, }); - GoogleCloudAiplatformV1DirectPredictRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1ExportFeatureValuesRequest.fromJson(core.Map json_) : this( - inputs: (json_['inputs'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Tensor.fromJson( - value as core.Map)) + destination: json_.containsKey('destination') + ? GoogleCloudAiplatformV1FeatureValueDestination.fromJson( + json_['destination'] as core.Map) + : null, + featureSelector: json_.containsKey('featureSelector') + ? GoogleCloudAiplatformV1FeatureSelector.fromJson( + json_['featureSelector'] + as core.Map) + : null, + fullExport: json_.containsKey('fullExport') + ? GoogleCloudAiplatformV1ExportFeatureValuesRequestFullExport + .fromJson(json_['fullExport'] + as core.Map) + : null, + settings: (json_['settings'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1DestinationFeatureSetting.fromJson( + value as core.Map)) .toList(), - parameters: json_.containsKey('parameters') - ? GoogleCloudAiplatformV1Tensor.fromJson( - json_['parameters'] as core.Map) + snapshotExport: json_.containsKey('snapshotExport') + ? GoogleCloudAiplatformV1ExportFeatureValuesRequestSnapshotExport + .fromJson(json_['snapshotExport'] + as core.Map) : null, ); core.Map toJson() => { - if (inputs != null) 'inputs': inputs!, - if (parameters != null) 'parameters': parameters!, + if (destination != null) 'destination': destination!, + if (featureSelector != null) 'featureSelector': featureSelector!, + if (fullExport != null) 'fullExport': fullExport!, + if (settings != null) 'settings': settings!, + if (snapshotExport != null) 'snapshotExport': snapshotExport!, }; } -/// Response message for PredictionService.DirectPredict. -class GoogleCloudAiplatformV1DirectPredictResponse { - /// The prediction output. - core.List? outputs; +/// Describes exporting all historical Feature values of all entities of the +/// EntityType between \[start_time, end_time\]. +class GoogleCloudAiplatformV1ExportFeatureValuesRequestFullExport { + /// Exports Feature values as of this timestamp. + /// + /// If not set, retrieve values as of now. Timestamp, if present, must not + /// have higher than millisecond precision. + core.String? endTime; - /// The parameters that govern the prediction. - GoogleCloudAiplatformV1Tensor? parameters; + /// Excludes Feature values with feature generation timestamp before this + /// timestamp. + /// + /// If not set, retrieve oldest values kept in Feature Store. Timestamp, if + /// present, must not have higher than millisecond precision. + core.String? startTime; - GoogleCloudAiplatformV1DirectPredictResponse({ - this.outputs, - this.parameters, + GoogleCloudAiplatformV1ExportFeatureValuesRequestFullExport({ + this.endTime, + this.startTime, }); - GoogleCloudAiplatformV1DirectPredictResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1ExportFeatureValuesRequestFullExport.fromJson( + core.Map json_) : this( - outputs: (json_['outputs'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Tensor.fromJson( - value as core.Map)) - .toList(), - parameters: json_.containsKey('parameters') - ? GoogleCloudAiplatformV1Tensor.fromJson( - json_['parameters'] as core.Map) - : null, + endTime: json_['endTime'] as core.String?, + startTime: json_['startTime'] as core.String?, ); core.Map toJson() => { - if (outputs != null) 'outputs': outputs!, - if (parameters != null) 'parameters': parameters!, + if (endTime != null) 'endTime': endTime!, + if (startTime != null) 'startTime': startTime!, }; } -/// Request message for PredictionService.DirectRawPredict. -class GoogleCloudAiplatformV1DirectRawPredictRequest { - /// The prediction input. - core.String? input; - core.List get inputAsBytes => convert.base64.decode(input!); - - set inputAsBytes(core.List bytes_) { - input = - convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); - } +/// Describes exporting the latest Feature values of all entities of the +/// EntityType between \[start_time, snapshot_time\]. +class GoogleCloudAiplatformV1ExportFeatureValuesRequestSnapshotExport { + /// Exports Feature values as of this timestamp. + /// + /// If not set, retrieve values as of now. Timestamp, if present, must not + /// have higher than millisecond precision. + core.String? snapshotTime; - /// Fully qualified name of the API method being invoked to perform - /// predictions. + /// Excludes Feature values with feature generation timestamp before this + /// timestamp. /// - /// Format: `/namespace.Service/Method/` Example: - /// `/tensorflow.serving.PredictionService/Predict` - core.String? methodName; + /// If not set, retrieve oldest values kept in Feature Store. Timestamp, if + /// present, must not have higher than millisecond precision. + core.String? startTime; - GoogleCloudAiplatformV1DirectRawPredictRequest({ - this.input, - this.methodName, + GoogleCloudAiplatformV1ExportFeatureValuesRequestSnapshotExport({ + this.snapshotTime, + this.startTime, }); - GoogleCloudAiplatformV1DirectRawPredictRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1ExportFeatureValuesRequestSnapshotExport.fromJson( + core.Map json_) : this( - input: json_['input'] as core.String?, - methodName: json_['methodName'] as core.String?, + snapshotTime: json_['snapshotTime'] as core.String?, + startTime: json_['startTime'] as core.String?, ); core.Map toJson() => { - if (input != null) 'input': input!, - if (methodName != null) 'methodName': methodName!, + if (snapshotTime != null) 'snapshotTime': snapshotTime!, + if (startTime != null) 'startTime': startTime!, }; } -/// Response message for PredictionService.DirectRawPredict. -class GoogleCloudAiplatformV1DirectRawPredictResponse { - /// The prediction output. - core.String? output; - core.List get outputAsBytes => convert.base64.decode(output!); +/// Assigns input data to training, validation, and test sets based on the given +/// filters, data pieces not matched by any filter are ignored. +/// +/// Currently only supported for Datasets containing DataItems. If any of the +/// filters in this message are to match nothing, then they can be set as '-' +/// (the minus sign). Supported only for unstructured Datasets. +typedef GoogleCloudAiplatformV1ExportFilterSplit = $FilterSplit; - set outputAsBytes(core.List bytes_) { - output = - convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); - } +/// Assigns the input data to training, validation, and test sets as per the +/// given fractions. +/// +/// Any of `training_fraction`, `validation_fraction` and `test_fraction` may +/// optionally be provided, they must sum to up to 1. If the provided ones sum +/// to less than 1, the remainder is assigned to sets as decided by Vertex AI. +/// If none of the fractions are set, by default roughly 80% of data is used for +/// training, 10% for validation, and 10% for test. +typedef GoogleCloudAiplatformV1ExportFractionSplit = $FractionSplit; - GoogleCloudAiplatformV1DirectRawPredictResponse({ - this.output, +/// Request message for ModelService.ExportModel. +class GoogleCloudAiplatformV1ExportModelRequest { + /// The desired output location and configuration. + /// + /// Required. + GoogleCloudAiplatformV1ExportModelRequestOutputConfig? outputConfig; + + GoogleCloudAiplatformV1ExportModelRequest({ + this.outputConfig, }); - GoogleCloudAiplatformV1DirectRawPredictResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1ExportModelRequest.fromJson(core.Map json_) : this( - output: json_['output'] as core.String?, + outputConfig: json_.containsKey('outputConfig') + ? GoogleCloudAiplatformV1ExportModelRequestOutputConfig.fromJson( + json_['outputConfig'] as core.Map) + : null, ); core.Map toJson() => { - if (output != null) 'output': output!, + if (outputConfig != null) 'outputConfig': outputConfig!, }; } -/// Represents the spec of disk options. -typedef GoogleCloudAiplatformV1DiskSpec = $Shared04; +/// Output configuration for the Model export. +class GoogleCloudAiplatformV1ExportModelRequestOutputConfig { + /// The Cloud Storage location where the Model artifact is to be written to. + /// + /// Under the directory given as the destination a new one with name + /// "`model-export--`", where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ + /// ISO-8601 format, will be created. Inside, the Model and any of its + /// supporting files will be written. This field should only be set when the + /// `exportableContent` field of the \[Model.supported_export_formats\] object + /// contains `ARTIFACT`. + GoogleCloudAiplatformV1GcsDestination? artifactDestination; -/// A list of double values. -class GoogleCloudAiplatformV1DoubleArray { - /// A list of double values. - core.List? values; + /// The ID of the format in which the Model must be exported. + /// + /// Each Model lists the export formats it supports. If no value is provided + /// here, then the first from the list of the Model's supported formats is + /// used by default. + core.String? exportFormatId; - GoogleCloudAiplatformV1DoubleArray({ - this.values, + /// The Google Container Registry or Artifact Registry uri where the Model + /// container image will be copied to. + /// + /// This field should only be set when the `exportableContent` field of the + /// \[Model.supported_export_formats\] object contains `IMAGE`. + GoogleCloudAiplatformV1ContainerRegistryDestination? imageDestination; + + GoogleCloudAiplatformV1ExportModelRequestOutputConfig({ + this.artifactDestination, + this.exportFormatId, + this.imageDestination, }); - GoogleCloudAiplatformV1DoubleArray.fromJson(core.Map json_) + GoogleCloudAiplatformV1ExportModelRequestOutputConfig.fromJson(core.Map json_) : this( - values: (json_['values'] as core.List?) - ?.map((value) => (value as core.num).toDouble()) - .toList(), + artifactDestination: json_.containsKey('artifactDestination') + ? GoogleCloudAiplatformV1GcsDestination.fromJson( + json_['artifactDestination'] + as core.Map) + : null, + exportFormatId: json_['exportFormatId'] as core.String?, + imageDestination: json_.containsKey('imageDestination') + ? GoogleCloudAiplatformV1ContainerRegistryDestination.fromJson( + json_['imageDestination'] + as core.Map) + : null, ); core.Map toJson() => { - if (values != null) 'values': values!, + if (artifactDestination != null) + 'artifactDestination': artifactDestination!, + if (exportFormatId != null) 'exportFormatId': exportFormatId!, + if (imageDestination != null) 'imageDestination': imageDestination!, }; } -/// Describes the options to customize dynamic retrieval. -class GoogleCloudAiplatformV1DynamicRetrievalConfig { - /// The threshold to be used in dynamic retrieval. +/// Request message for TensorboardService.ExportTensorboardTimeSeriesData. +class GoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataRequest { + /// Exports the TensorboardTimeSeries' data that match the filter expression. + core.String? filter; + + /// Field to use to sort the TensorboardTimeSeries' data. /// - /// If not set, a system default value is used. + /// By default, TensorboardTimeSeries' data is returned in a pseudo random + /// order. + core.String? orderBy; + + /// The maximum number of data points to return per page. /// - /// Optional. - core.double? dynamicThreshold; + /// The default page_size is 1000. Values must be between 1 and 10000. Values + /// above 10000 are coerced to 10000. + core.int? pageSize; - /// The mode of the predictor to be used in dynamic retrieval. - /// Possible string values are: - /// - "MODE_UNSPECIFIED" : Always trigger retrieval. - /// - "MODE_DYNAMIC" : Run retrieval only when system decides it is necessary. - core.String? mode; + /// A page token, received from a previous ExportTensorboardTimeSeriesData + /// call. + /// + /// Provide this to retrieve the subsequent page. When paginating, all other + /// parameters provided to ExportTensorboardTimeSeriesData must match the call + /// that provided the page token. + core.String? pageToken; - GoogleCloudAiplatformV1DynamicRetrievalConfig({ - this.dynamicThreshold, - this.mode, + GoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataRequest({ + this.filter, + this.orderBy, + this.pageSize, + this.pageToken, }); - GoogleCloudAiplatformV1DynamicRetrievalConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataRequest.fromJson( + core.Map json_) : this( - dynamicThreshold: - (json_['dynamicThreshold'] as core.num?)?.toDouble(), - mode: json_['mode'] as core.String?, + filter: json_['filter'] as core.String?, + orderBy: json_['orderBy'] as core.String?, + pageSize: json_['pageSize'] as core.int?, + pageToken: json_['pageToken'] as core.String?, ); core.Map toJson() => { - if (dynamicThreshold != null) 'dynamicThreshold': dynamicThreshold!, - if (mode != null) 'mode': mode!, + if (filter != null) 'filter': filter!, + if (orderBy != null) 'orderBy': orderBy!, + if (pageSize != null) 'pageSize': pageSize!, + if (pageToken != null) 'pageToken': pageToken!, }; } -/// Represents a customer-managed encryption key spec that can be applied to a -/// top-level resource. -class GoogleCloudAiplatformV1EncryptionSpec { - /// The Cloud KMS resource identifier of the customer managed encryption key - /// used to protect a resource. - /// - /// Has the form: - /// `projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key`. - /// The key needs to be in the same region as where the compute resource is - /// created. +/// Response message for TensorboardService.ExportTensorboardTimeSeriesData. +class GoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataResponse { + /// A token, which can be sent as page_token to retrieve the next page. /// - /// Required. - core.String? kmsKeyName; + /// If this field is omitted, there are no subsequent pages. + core.String? nextPageToken; - GoogleCloudAiplatformV1EncryptionSpec({ - this.kmsKeyName, + /// The returned time series data points. + core.List? timeSeriesDataPoints; + + GoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataResponse({ + this.nextPageToken, + this.timeSeriesDataPoints, }); - GoogleCloudAiplatformV1EncryptionSpec.fromJson(core.Map json_) + GoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataResponse.fromJson( + core.Map json_) : this( - kmsKeyName: json_['kmsKeyName'] as core.String?, + nextPageToken: json_['nextPageToken'] as core.String?, + timeSeriesDataPoints: (json_['timeSeriesDataPoints'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1TimeSeriesDataPoint.fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (kmsKeyName != null) 'kmsKeyName': kmsKeyName!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (timeSeriesDataPoints != null) + 'timeSeriesDataPoints': timeSeriesDataPoints!, }; } -/// Models are deployed into it, and afterwards Endpoint is called to obtain -/// predictions and explanations. -class GoogleCloudAiplatformV1Endpoint { - /// Timestamp when this Endpoint was created. - /// - /// Output only. - core.String? createTime; - - /// DNS of the dedicated endpoint. - /// - /// Will only be populated if dedicated_endpoint_enabled is true. Format: - /// `https://{endpoint_id}.{region}-{project_number}.prediction.vertexai.goog`. - /// - /// Output only. - core.String? dedicatedEndpointDns; +/// The fact used in grounding. +class GoogleCloudAiplatformV1Fact { + /// Query that is used to retrieve this fact. + core.String? query; - /// If true, the endpoint will be exposed through a dedicated DNS - /// \[Endpoint.dedicated_endpoint_dns\]. + /// If present, according to the underlying Vector DB and the selected metric + /// type, the score can be either the distance or the similarity between the + /// query and the fact and its range depends on the metric type. /// - /// Your request to the dedicated DNS will be isolated from other users' - /// traffic and will have better performance and reliability. Note: Once you - /// enabled dedicated endpoint, you won't be able to send request to the - /// shared DNS {region}-aiplatform.googleapis.com. The limitation will be - /// removed soon. - core.bool? dedicatedEndpointEnabled; + /// For example, if the metric type is COSINE_DISTANCE, it represents the + /// distance between the query and the fact. The larger the distance, the less + /// relevant the fact is to the query. The range is \[0, 2\], while 0 means + /// the most relevant and 2 means the least relevant. + core.double? score; - /// The models deployed in this Endpoint. - /// - /// To add or remove DeployedModels use EndpointService.DeployModel and - /// EndpointService.UndeployModel respectively. - /// - /// Output only. - core.List? deployedModels; + /// If present, the summary/snippet of the fact. + core.String? summary; - /// The description of the Endpoint. - core.String? description; + /// If present, it refers to the title of this fact. + core.String? title; - /// The display name of the Endpoint. - /// - /// The name can be up to 128 characters long and can consist of any UTF-8 - /// characters. - /// - /// Required. - core.String? displayName; + /// If present, this uri links to the source of the fact. + core.String? uri; - /// Deprecated: If true, expose the Endpoint via private service connect. - /// - /// Only one of the fields, network or enable_private_service_connect, can be - /// set. + /// If present, the distance between the query vector and this fact vector. @core.Deprecated( 'Not supported. Member documentation may have more information.', ) - core.bool? enablePrivateServiceConnect; - - /// Customer-managed encryption key spec for an Endpoint. - /// - /// If set, this Endpoint and all sub-resources of this Endpoint will be - /// secured by this key. - GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; - - /// Used to perform consistent read-modify-write updates. - /// - /// If not set, a blind "overwrite" update happens. - core.String? etag; - - /// The labels with user-defined metadata to organize your Endpoints. - /// - /// Label keys and values can be no longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. See - /// https://goo.gl/xmQnxf for more information and examples of labels. - core.Map? labels; - - /// Resource name of the Model Monitoring job associated with this Endpoint if - /// monitoring is enabled by JobService.CreateModelDeploymentMonitoringJob. - /// - /// Format: - /// `projects/{project}/locations/{location}/modelDeploymentMonitoringJobs/{model_deployment_monitoring_job}` - /// - /// Output only. - core.String? modelDeploymentMonitoringJob; - - /// The resource name of the Endpoint. - /// - /// Output only. - core.String? name; - - /// The full name of the Google Compute Engine - /// [network](https://cloud.google.com//compute/docs/networks-and-firewalls#networks) - /// to which the Endpoint should be peered. - /// - /// Private services access must already be configured for the network. If - /// left unspecified, the Endpoint is not peered with any network. Only one of - /// the fields, network or enable_private_service_connect, can be set. - /// [Format](https://cloud.google.com/compute/docs/reference/rest/v1/networks/insert): - /// `projects/{project}/global/networks/{network}`. Where `{project}` is a - /// project number, as in `12345`, and `{network}` is network name. - /// - /// Optional. - core.String? network; - - /// Configures the request-response logging for online prediction. - GoogleCloudAiplatformV1PredictRequestResponseLoggingConfig? - predictRequestResponseLoggingConfig; - - /// Configuration for private service connect. - /// - /// network and private_service_connect_config are mutually exclusive. - /// - /// Optional. - GoogleCloudAiplatformV1PrivateServiceConnectConfig? - privateServiceConnectConfig; - - /// Reserved for future use. - /// - /// Output only. - core.bool? satisfiesPzi; - - /// Reserved for future use. - /// - /// Output only. - core.bool? satisfiesPzs; - - /// A map from a DeployedModel's ID to the percentage of this Endpoint's - /// traffic that should be forwarded to that DeployedModel. - /// - /// If a DeployedModel's ID is not listed in this map, then it receives no - /// traffic. The traffic percentage values must add up to 100, or map must be - /// empty if the Endpoint is to not accept any traffic at a moment. - core.Map? trafficSplit; - - /// Timestamp when this Endpoint was last updated. - /// - /// Output only. - core.String? updateTime; + core.double? vectorDistance; - GoogleCloudAiplatformV1Endpoint({ - this.createTime, - this.dedicatedEndpointDns, - this.dedicatedEndpointEnabled, - this.deployedModels, - this.description, - this.displayName, - this.enablePrivateServiceConnect, - this.encryptionSpec, - this.etag, - this.labels, - this.modelDeploymentMonitoringJob, - this.name, - this.network, - this.predictRequestResponseLoggingConfig, - this.privateServiceConnectConfig, - this.satisfiesPzi, - this.satisfiesPzs, - this.trafficSplit, - this.updateTime, + GoogleCloudAiplatformV1Fact({ + this.query, + this.score, + this.summary, + this.title, + this.uri, + this.vectorDistance, }); - GoogleCloudAiplatformV1Endpoint.fromJson(core.Map json_) + GoogleCloudAiplatformV1Fact.fromJson(core.Map json_) : this( - createTime: json_['createTime'] as core.String?, - dedicatedEndpointDns: json_['dedicatedEndpointDns'] as core.String?, - dedicatedEndpointEnabled: - json_['dedicatedEndpointEnabled'] as core.bool?, - deployedModels: (json_['deployedModels'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1DeployedModel.fromJson( - value as core.Map)) - .toList(), - description: json_['description'] as core.String?, - displayName: json_['displayName'] as core.String?, - enablePrivateServiceConnect: - json_['enablePrivateServiceConnect'] as core.bool?, - encryptionSpec: json_.containsKey('encryptionSpec') - ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( - json_['encryptionSpec'] - as core.Map) - : null, - etag: json_['etag'] as core.String?, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - modelDeploymentMonitoringJob: - json_['modelDeploymentMonitoringJob'] as core.String?, - name: json_['name'] as core.String?, - network: json_['network'] as core.String?, - predictRequestResponseLoggingConfig: - json_.containsKey('predictRequestResponseLoggingConfig') - ? GoogleCloudAiplatformV1PredictRequestResponseLoggingConfig - .fromJson(json_['predictRequestResponseLoggingConfig'] - as core.Map) - : null, - privateServiceConnectConfig: - json_.containsKey('privateServiceConnectConfig') - ? GoogleCloudAiplatformV1PrivateServiceConnectConfig.fromJson( - json_['privateServiceConnectConfig'] - as core.Map) - : null, - satisfiesPzi: json_['satisfiesPzi'] as core.bool?, - satisfiesPzs: json_['satisfiesPzs'] as core.bool?, - trafficSplit: - (json_['trafficSplit'] as core.Map?) - ?.map( - (key, value) => core.MapEntry( - key, - value as core.int, - ), - ), - updateTime: json_['updateTime'] as core.String?, + query: json_['query'] as core.String?, + score: (json_['score'] as core.num?)?.toDouble(), + summary: json_['summary'] as core.String?, + title: json_['title'] as core.String?, + uri: json_['uri'] as core.String?, + vectorDistance: (json_['vectorDistance'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (dedicatedEndpointDns != null) - 'dedicatedEndpointDns': dedicatedEndpointDns!, - if (dedicatedEndpointEnabled != null) - 'dedicatedEndpointEnabled': dedicatedEndpointEnabled!, - if (deployedModels != null) 'deployedModels': deployedModels!, - if (description != null) 'description': description!, - if (displayName != null) 'displayName': displayName!, - if (enablePrivateServiceConnect != null) - 'enablePrivateServiceConnect': enablePrivateServiceConnect!, - if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, - if (etag != null) 'etag': etag!, - if (labels != null) 'labels': labels!, - if (modelDeploymentMonitoringJob != null) - 'modelDeploymentMonitoringJob': modelDeploymentMonitoringJob!, - if (name != null) 'name': name!, - if (network != null) 'network': network!, - if (predictRequestResponseLoggingConfig != null) - 'predictRequestResponseLoggingConfig': - predictRequestResponseLoggingConfig!, - if (privateServiceConnectConfig != null) - 'privateServiceConnectConfig': privateServiceConnectConfig!, - if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, - if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, - if (trafficSplit != null) 'trafficSplit': trafficSplit!, - if (updateTime != null) 'updateTime': updateTime!, + if (query != null) 'query': query!, + if (score != null) 'score': score!, + if (summary != null) 'summary': summary!, + if (title != null) 'title': title!, + if (uri != null) 'uri': uri!, + if (vectorDistance != null) 'vectorDistance': vectorDistance!, }; } -/// Selector for entityId. -/// -/// Getting ids from the given source. -class GoogleCloudAiplatformV1EntityIdSelector { - /// Source of Csv - GoogleCloudAiplatformV1CsvSource? csvSource; - - /// Source column that holds entity IDs. - /// - /// If not provided, entity IDs are extracted from the column named entity_id. - core.String? entityIdField; - - GoogleCloudAiplatformV1EntityIdSelector({ - this.csvSource, - this.entityIdField, - }); - - GoogleCloudAiplatformV1EntityIdSelector.fromJson(core.Map json_) - : this( - csvSource: json_.containsKey('csvSource') - ? GoogleCloudAiplatformV1CsvSource.fromJson( - json_['csvSource'] as core.Map) - : null, - entityIdField: json_['entityIdField'] as core.String?, +/// Configuration for faster model deployment. +class GoogleCloudAiplatformV1FasterDeploymentConfig { + /// If true, enable fast tryout feature for this deployed model. + core.bool? fastTryoutEnabled; + + GoogleCloudAiplatformV1FasterDeploymentConfig({ + this.fastTryoutEnabled, + }); + + GoogleCloudAiplatformV1FasterDeploymentConfig.fromJson(core.Map json_) + : this( + fastTryoutEnabled: json_['fastTryoutEnabled'] as core.bool?, ); core.Map toJson() => { - if (csvSource != null) 'csvSource': csvSource!, - if (entityIdField != null) 'entityIdField': entityIdField!, + if (fastTryoutEnabled != null) 'fastTryoutEnabled': fastTryoutEnabled!, }; } -/// An entity type is a type of object in a system that needs to be modeled and -/// have stored information about. +/// Feature Metadata information. /// -/// For example, driver is an entity type, and driver0 is an instance of an -/// entity type driver. -class GoogleCloudAiplatformV1EntityType { +/// For example, color is a feature that describes an apple. +class GoogleCloudAiplatformV1Feature { + /// Only applicable for Vertex AI Feature Store (Legacy). + /// /// Timestamp when this EntityType was created. /// /// Output only. core.String? createTime; - /// Description of the EntityType. + /// Description of the Feature. + core.String? description; + + /// Only applicable for Vertex AI Feature Store (Legacy). + /// + /// If not set, use the monitoring_config defined for the EntityType this + /// Feature belongs to. Only Features with type (Feature.ValueType) BOOL, + /// STRING, DOUBLE or INT64 can enable monitoring. If set to true, all types + /// of data monitoring are disabled despite the config on EntityType. /// /// Optional. - core.String? description; + core.bool? disableMonitoring; /// Used to perform a consistent read-modify-write updates. /// /// If not set, a blind "overwrite" update happens. - /// - /// Optional. core.String? etag; - /// The labels with user-defined metadata to organize your EntityTypes. + /// The labels with user-defined metadata to organize your Features. /// /// Label keys and values can be no longer than 64 characters (Unicode /// codepoints), can only contain lowercase letters, numeric characters, /// underscores and dashes. International characters are allowed. See /// https://goo.gl/xmQnxf for more information on and examples of labels. No - /// more than 64 user labels can be associated with one EntityType (System - /// labels are excluded)." System reserved label keys are prefixed with + /// more than 64 user labels can be associated with one Feature (System labels + /// are excluded)." System reserved label keys are prefixed with /// "aiplatform.googleapis.com/" and are immutable. /// /// Optional. core.Map? labels; - /// The default monitoring configuration for all Features with value type - /// (Feature.ValueType) BOOL, STRING, DOUBLE or INT64 under this EntityType. + /// Only applicable for Vertex AI Feature Store (Legacy). /// - /// If this is populated with - /// \[FeaturestoreMonitoringConfig.monitoring_interval\] specified, snapshot - /// analysis monitoring is enabled. Otherwise, snapshot analysis monitoring is - /// disabled. + /// The list of historical stats and anomalies with specified objectives. /// - /// Optional. - GoogleCloudAiplatformV1FeaturestoreMonitoringConfig? monitoringConfig; + /// Output only. + core.List? + monitoringStatsAnomalies; - /// Name of the EntityType. + /// Name of the Feature. /// /// Format: - /// `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}` - /// The last part entity_type is assigned by the client. The entity_type can - /// be up to 64 characters long and can consist only of ASCII Latin letters - /// A-Z and a-z and underscore(_), and ASCII digits 0-9 starting with a - /// letter. The value will be unique given a featurestore. + /// `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}/features/{feature}` + /// `projects/{project}/locations/{location}/featureGroups/{feature_group}/features/{feature}` + /// The last part feature is assigned by the client. The feature can be up to + /// 64 characters long and can consist only of ASCII Latin letters A-Z and + /// a-z, underscore(_), and ASCII digits 0-9 starting with a letter. The value + /// will be unique given an entity type. /// /// Immutable. core.String? name; - /// Config for data retention policy in offline storage. - /// - /// TTL in days for feature values that will be stored in offline storage. The - /// Feature Store offline storage periodically removes obsolete feature values - /// older than `offline_storage_ttl_days` since the feature generation time. - /// If unset (or explicitly set to 0), default to 4000 days TTL. + /// Entity responsible for maintaining this feature. /// - /// Optional. - core.int? offlineStorageTtlDays; + /// Can be comma separated list of email addresses or URIs. + core.String? pointOfContact; - /// Reserved for future use. + /// Only applicable for Vertex AI Feature Store (Legacy). + /// + /// Timestamp when this EntityType was most recently updated. /// /// Output only. - core.bool? satisfiesPzi; + core.String? updateTime; - /// Reserved for future use. + /// Only applicable for Vertex AI Feature Store (Legacy). /// - /// Output only. - core.bool? satisfiesPzs; + /// Type of Feature value. + /// + /// Immutable. + /// Possible string values are: + /// - "VALUE_TYPE_UNSPECIFIED" : The value type is unspecified. + /// - "BOOL" : Used for Feature that is a boolean. + /// - "BOOL_ARRAY" : Used for Feature that is a list of boolean. + /// - "DOUBLE" : Used for Feature that is double. + /// - "DOUBLE_ARRAY" : Used for Feature that is a list of double. + /// - "INT64" : Used for Feature that is INT64. + /// - "INT64_ARRAY" : Used for Feature that is a list of INT64. + /// - "STRING" : Used for Feature that is string. + /// - "STRING_ARRAY" : Used for Feature that is a list of String. + /// - "BYTES" : Used for Feature that is bytes. + /// - "STRUCT" : Used for Feature that is struct. + core.String? valueType; - /// Timestamp when this EntityType was most recently updated. + /// Only applicable for Vertex AI Feature Store. /// - /// Output only. - core.String? updateTime; + /// The name of the BigQuery Table/View column hosting data for this version. + /// If no value is provided, will use feature_id. + core.String? versionColumnName; - GoogleCloudAiplatformV1EntityType({ + GoogleCloudAiplatformV1Feature({ this.createTime, this.description, + this.disableMonitoring, this.etag, this.labels, - this.monitoringConfig, + this.monitoringStatsAnomalies, this.name, - this.offlineStorageTtlDays, - this.satisfiesPzi, - this.satisfiesPzs, + this.pointOfContact, this.updateTime, + this.valueType, + this.versionColumnName, }); - GoogleCloudAiplatformV1EntityType.fromJson(core.Map json_) + GoogleCloudAiplatformV1Feature.fromJson(core.Map json_) : this( createTime: json_['createTime'] as core.String?, description: json_['description'] as core.String?, + disableMonitoring: json_['disableMonitoring'] as core.bool?, etag: json_['etag'] as core.String?, labels: (json_['labels'] as core.Map?)?.map( @@ -33723,1330 +39764,1082 @@ class GoogleCloudAiplatformV1EntityType { value as core.String, ), ), - monitoringConfig: json_.containsKey('monitoringConfig') - ? GoogleCloudAiplatformV1FeaturestoreMonitoringConfig.fromJson( - json_['monitoringConfig'] - as core.Map) - : null, - name: json_['name'] as core.String?, - offlineStorageTtlDays: json_['offlineStorageTtlDays'] as core.int?, - satisfiesPzi: json_['satisfiesPzi'] as core.bool?, - satisfiesPzs: json_['satisfiesPzs'] as core.bool?, - updateTime: json_['updateTime'] as core.String?, - ); - - core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (description != null) 'description': description!, - if (etag != null) 'etag': etag!, - if (labels != null) 'labels': labels!, - if (monitoringConfig != null) 'monitoringConfig': monitoringConfig!, - if (name != null) 'name': name!, - if (offlineStorageTtlDays != null) - 'offlineStorageTtlDays': offlineStorageTtlDays!, - if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, - if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, - if (updateTime != null) 'updateTime': updateTime!, - }; -} - -/// Represents an environment variable present in a Container or Python Module. -class GoogleCloudAiplatformV1EnvVar { - /// Name of the environment variable. - /// - /// Must be a valid C identifier. - /// - /// Required. - core.String? name; - - /// Variables that reference a $(VAR_NAME) are expanded using the previous - /// defined environment variables in the container and any service environment - /// variables. - /// - /// If a variable cannot be resolved, the reference in the input string will - /// be unchanged. The $(VAR_NAME) syntax can be escaped with a double $$, ie: - /// $$(VAR_NAME). Escaped references will never be expanded, regardless of - /// whether the variable exists or not. - /// - /// Required. - core.String? value; - - GoogleCloudAiplatformV1EnvVar({ - this.name, - this.value, - }); - - GoogleCloudAiplatformV1EnvVar.fromJson(core.Map json_) - : this( - name: json_['name'] as core.String?, - value: json_['value'] as core.String?, - ); - - core.Map toJson() => { - if (name != null) 'name': name!, - if (value != null) 'value': value!, - }; -} - -/// Model error analysis for each annotation. -class GoogleCloudAiplatformV1ErrorAnalysisAnnotation { - /// Attributed items for a given annotation, typically representing neighbors - /// from the training sets constrained by the query type. - core.List? - attributedItems; - - /// The outlier score of this annotated item. - /// - /// Usually defined as the min of all distances from attributed items. - core.double? outlierScore; - - /// The threshold used to determine if this annotation is an outlier or not. - core.double? outlierThreshold; - - /// The query type used for finding the attributed items. - /// Possible string values are: - /// - "QUERY_TYPE_UNSPECIFIED" : Unspecified query type for model error - /// analysis. - /// - "ALL_SIMILAR" : Query similar samples across all classes in the dataset. - /// - "SAME_CLASS_SIMILAR" : Query similar samples from the same class of the - /// input sample. - /// - "SAME_CLASS_DISSIMILAR" : Query dissimilar samples from the same class - /// of the input sample. - core.String? queryType; - - GoogleCloudAiplatformV1ErrorAnalysisAnnotation({ - this.attributedItems, - this.outlierScore, - this.outlierThreshold, - this.queryType, - }); - - GoogleCloudAiplatformV1ErrorAnalysisAnnotation.fromJson(core.Map json_) - : this( - attributedItems: (json_['attributedItems'] as core.List?) + monitoringStatsAnomalies: (json_['monitoringStatsAnomalies'] + as core.List?) ?.map((value) => - GoogleCloudAiplatformV1ErrorAnalysisAnnotationAttributedItem - .fromJson(value as core.Map)) + GoogleCloudAiplatformV1FeatureMonitoringStatsAnomaly.fromJson( + value as core.Map)) .toList(), - outlierScore: (json_['outlierScore'] as core.num?)?.toDouble(), - outlierThreshold: - (json_['outlierThreshold'] as core.num?)?.toDouble(), - queryType: json_['queryType'] as core.String?, - ); - - core.Map toJson() => { - if (attributedItems != null) 'attributedItems': attributedItems!, - if (outlierScore != null) 'outlierScore': outlierScore!, - if (outlierThreshold != null) 'outlierThreshold': outlierThreshold!, - if (queryType != null) 'queryType': queryType!, - }; -} - -/// Attributed items for a given annotation, typically representing neighbors -/// from the training sets constrained by the query type. -class GoogleCloudAiplatformV1ErrorAnalysisAnnotationAttributedItem { - /// The unique ID for each annotation. - /// - /// Used by FE to allocate the annotation in DB. - core.String? annotationResourceName; - - /// The distance of this item to the annotation. - core.double? distance; - - GoogleCloudAiplatformV1ErrorAnalysisAnnotationAttributedItem({ - this.annotationResourceName, - this.distance, - }); - - GoogleCloudAiplatformV1ErrorAnalysisAnnotationAttributedItem.fromJson( - core.Map json_) - : this( - annotationResourceName: - json_['annotationResourceName'] as core.String?, - distance: (json_['distance'] as core.num?)?.toDouble(), - ); - - core.Map toJson() => { - if (annotationResourceName != null) - 'annotationResourceName': annotationResourceName!, - if (distance != null) 'distance': distance!, - }; -} - -/// Request message for EvaluationService.EvaluateInstances. -class GoogleCloudAiplatformV1EvaluateInstancesRequest { - /// Instances and metric spec for bleu metric. - GoogleCloudAiplatformV1BleuInput? bleuInput; - - /// Input for coherence metric. - GoogleCloudAiplatformV1CoherenceInput? coherenceInput; - - /// Auto metric instances. - /// - /// Instances and metric spec for exact match metric. - GoogleCloudAiplatformV1ExactMatchInput? exactMatchInput; - - /// LLM-based metric instance. - /// - /// General text generation metrics, applicable to other categories. Input for - /// fluency metric. - GoogleCloudAiplatformV1FluencyInput? fluencyInput; - - /// Input for fulfillment metric. - GoogleCloudAiplatformV1FulfillmentInput? fulfillmentInput; - - /// Input for groundedness metric. - GoogleCloudAiplatformV1GroundednessInput? groundednessInput; - - /// Input for pairwise metric. - GoogleCloudAiplatformV1PairwiseMetricInput? pairwiseMetricInput; - - /// Input for pairwise question answering quality metric. - GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityInput? - pairwiseQuestionAnsweringQualityInput; - - /// Input for pairwise summarization quality metric. - GoogleCloudAiplatformV1PairwiseSummarizationQualityInput? - pairwiseSummarizationQualityInput; - - /// Input for pointwise metric. - GoogleCloudAiplatformV1PointwiseMetricInput? pointwiseMetricInput; - - /// Input for question answering correctness metric. - GoogleCloudAiplatformV1QuestionAnsweringCorrectnessInput? - questionAnsweringCorrectnessInput; - - /// Input for question answering helpfulness metric. - GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessInput? - questionAnsweringHelpfulnessInput; - - /// Input for question answering quality metric. - GoogleCloudAiplatformV1QuestionAnsweringQualityInput? - questionAnsweringQualityInput; - - /// Input for question answering relevance metric. - GoogleCloudAiplatformV1QuestionAnsweringRelevanceInput? - questionAnsweringRelevanceInput; - - /// Instances and metric spec for rouge metric. - GoogleCloudAiplatformV1RougeInput? rougeInput; - - /// Input for safety metric. - GoogleCloudAiplatformV1SafetyInput? safetyInput; - - /// Input for summarization helpfulness metric. - GoogleCloudAiplatformV1SummarizationHelpfulnessInput? - summarizationHelpfulnessInput; - - /// Input for summarization quality metric. - GoogleCloudAiplatformV1SummarizationQualityInput? summarizationQualityInput; - - /// Input for summarization verbosity metric. - GoogleCloudAiplatformV1SummarizationVerbosityInput? - summarizationVerbosityInput; - - /// Tool call metric instances. - /// - /// Input for tool call valid metric. - GoogleCloudAiplatformV1ToolCallValidInput? toolCallValidInput; - - /// Input for tool name match metric. - GoogleCloudAiplatformV1ToolNameMatchInput? toolNameMatchInput; - - /// Input for tool parameter key match metric. - GoogleCloudAiplatformV1ToolParameterKeyMatchInput? toolParameterKeyMatchInput; - - /// Input for tool parameter key value match metric. - GoogleCloudAiplatformV1ToolParameterKVMatchInput? toolParameterKvMatchInput; - - GoogleCloudAiplatformV1EvaluateInstancesRequest({ - this.bleuInput, - this.coherenceInput, - this.exactMatchInput, - this.fluencyInput, - this.fulfillmentInput, - this.groundednessInput, - this.pairwiseMetricInput, - this.pairwiseQuestionAnsweringQualityInput, - this.pairwiseSummarizationQualityInput, - this.pointwiseMetricInput, - this.questionAnsweringCorrectnessInput, - this.questionAnsweringHelpfulnessInput, - this.questionAnsweringQualityInput, - this.questionAnsweringRelevanceInput, - this.rougeInput, - this.safetyInput, - this.summarizationHelpfulnessInput, - this.summarizationQualityInput, - this.summarizationVerbosityInput, - this.toolCallValidInput, - this.toolNameMatchInput, - this.toolParameterKeyMatchInput, - this.toolParameterKvMatchInput, - }); - - GoogleCloudAiplatformV1EvaluateInstancesRequest.fromJson(core.Map json_) - : this( - bleuInput: json_.containsKey('bleuInput') - ? GoogleCloudAiplatformV1BleuInput.fromJson( - json_['bleuInput'] as core.Map) - : null, - coherenceInput: json_.containsKey('coherenceInput') - ? GoogleCloudAiplatformV1CoherenceInput.fromJson( - json_['coherenceInput'] - as core.Map) - : null, - exactMatchInput: json_.containsKey('exactMatchInput') - ? GoogleCloudAiplatformV1ExactMatchInput.fromJson( - json_['exactMatchInput'] - as core.Map) - : null, - fluencyInput: json_.containsKey('fluencyInput') - ? GoogleCloudAiplatformV1FluencyInput.fromJson( - json_['fluencyInput'] as core.Map) - : null, - fulfillmentInput: json_.containsKey('fulfillmentInput') - ? GoogleCloudAiplatformV1FulfillmentInput.fromJson( - json_['fulfillmentInput'] - as core.Map) - : null, - groundednessInput: json_.containsKey('groundednessInput') - ? GoogleCloudAiplatformV1GroundednessInput.fromJson( - json_['groundednessInput'] - as core.Map) - : null, - pairwiseMetricInput: json_.containsKey('pairwiseMetricInput') - ? GoogleCloudAiplatformV1PairwiseMetricInput.fromJson( - json_['pairwiseMetricInput'] - as core.Map) - : null, - pairwiseQuestionAnsweringQualityInput: - json_.containsKey('pairwiseQuestionAnsweringQualityInput') - ? GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityInput - .fromJson(json_['pairwiseQuestionAnsweringQualityInput'] - as core.Map) - : null, - pairwiseSummarizationQualityInput: - json_.containsKey('pairwiseSummarizationQualityInput') - ? GoogleCloudAiplatformV1PairwiseSummarizationQualityInput - .fromJson(json_['pairwiseSummarizationQualityInput'] - as core.Map) - : null, - pointwiseMetricInput: json_.containsKey('pointwiseMetricInput') - ? GoogleCloudAiplatformV1PointwiseMetricInput.fromJson( - json_['pointwiseMetricInput'] - as core.Map) - : null, - questionAnsweringCorrectnessInput: - json_.containsKey('questionAnsweringCorrectnessInput') - ? GoogleCloudAiplatformV1QuestionAnsweringCorrectnessInput - .fromJson(json_['questionAnsweringCorrectnessInput'] - as core.Map) - : null, - questionAnsweringHelpfulnessInput: - json_.containsKey('questionAnsweringHelpfulnessInput') - ? GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessInput - .fromJson(json_['questionAnsweringHelpfulnessInput'] - as core.Map) - : null, - questionAnsweringQualityInput: json_ - .containsKey('questionAnsweringQualityInput') - ? GoogleCloudAiplatformV1QuestionAnsweringQualityInput.fromJson( - json_['questionAnsweringQualityInput'] - as core.Map) - : null, - questionAnsweringRelevanceInput: json_ - .containsKey('questionAnsweringRelevanceInput') - ? GoogleCloudAiplatformV1QuestionAnsweringRelevanceInput.fromJson( - json_['questionAnsweringRelevanceInput'] - as core.Map) - : null, - rougeInput: json_.containsKey('rougeInput') - ? GoogleCloudAiplatformV1RougeInput.fromJson( - json_['rougeInput'] as core.Map) - : null, - safetyInput: json_.containsKey('safetyInput') - ? GoogleCloudAiplatformV1SafetyInput.fromJson( - json_['safetyInput'] as core.Map) - : null, - summarizationHelpfulnessInput: json_ - .containsKey('summarizationHelpfulnessInput') - ? GoogleCloudAiplatformV1SummarizationHelpfulnessInput.fromJson( - json_['summarizationHelpfulnessInput'] - as core.Map) - : null, - summarizationQualityInput: - json_.containsKey('summarizationQualityInput') - ? GoogleCloudAiplatformV1SummarizationQualityInput.fromJson( - json_['summarizationQualityInput'] - as core.Map) - : null, - summarizationVerbosityInput: - json_.containsKey('summarizationVerbosityInput') - ? GoogleCloudAiplatformV1SummarizationVerbosityInput.fromJson( - json_['summarizationVerbosityInput'] - as core.Map) - : null, - toolCallValidInput: json_.containsKey('toolCallValidInput') - ? GoogleCloudAiplatformV1ToolCallValidInput.fromJson( - json_['toolCallValidInput'] - as core.Map) - : null, - toolNameMatchInput: json_.containsKey('toolNameMatchInput') - ? GoogleCloudAiplatformV1ToolNameMatchInput.fromJson( - json_['toolNameMatchInput'] - as core.Map) - : null, - toolParameterKeyMatchInput: - json_.containsKey('toolParameterKeyMatchInput') - ? GoogleCloudAiplatformV1ToolParameterKeyMatchInput.fromJson( - json_['toolParameterKeyMatchInput'] - as core.Map) - : null, - toolParameterKvMatchInput: - json_.containsKey('toolParameterKvMatchInput') - ? GoogleCloudAiplatformV1ToolParameterKVMatchInput.fromJson( - json_['toolParameterKvMatchInput'] - as core.Map) - : null, + name: json_['name'] as core.String?, + pointOfContact: json_['pointOfContact'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + valueType: json_['valueType'] as core.String?, + versionColumnName: json_['versionColumnName'] as core.String?, ); core.Map toJson() => { - if (bleuInput != null) 'bleuInput': bleuInput!, - if (coherenceInput != null) 'coherenceInput': coherenceInput!, - if (exactMatchInput != null) 'exactMatchInput': exactMatchInput!, - if (fluencyInput != null) 'fluencyInput': fluencyInput!, - if (fulfillmentInput != null) 'fulfillmentInput': fulfillmentInput!, - if (groundednessInput != null) 'groundednessInput': groundednessInput!, - if (pairwiseMetricInput != null) - 'pairwiseMetricInput': pairwiseMetricInput!, - if (pairwiseQuestionAnsweringQualityInput != null) - 'pairwiseQuestionAnsweringQualityInput': - pairwiseQuestionAnsweringQualityInput!, - if (pairwiseSummarizationQualityInput != null) - 'pairwiseSummarizationQualityInput': - pairwiseSummarizationQualityInput!, - if (pointwiseMetricInput != null) - 'pointwiseMetricInput': pointwiseMetricInput!, - if (questionAnsweringCorrectnessInput != null) - 'questionAnsweringCorrectnessInput': - questionAnsweringCorrectnessInput!, - if (questionAnsweringHelpfulnessInput != null) - 'questionAnsweringHelpfulnessInput': - questionAnsweringHelpfulnessInput!, - if (questionAnsweringQualityInput != null) - 'questionAnsweringQualityInput': questionAnsweringQualityInput!, - if (questionAnsweringRelevanceInput != null) - 'questionAnsweringRelevanceInput': questionAnsweringRelevanceInput!, - if (rougeInput != null) 'rougeInput': rougeInput!, - if (safetyInput != null) 'safetyInput': safetyInput!, - if (summarizationHelpfulnessInput != null) - 'summarizationHelpfulnessInput': summarizationHelpfulnessInput!, - if (summarizationQualityInput != null) - 'summarizationQualityInput': summarizationQualityInput!, - if (summarizationVerbosityInput != null) - 'summarizationVerbosityInput': summarizationVerbosityInput!, - if (toolCallValidInput != null) - 'toolCallValidInput': toolCallValidInput!, - if (toolNameMatchInput != null) - 'toolNameMatchInput': toolNameMatchInput!, - if (toolParameterKeyMatchInput != null) - 'toolParameterKeyMatchInput': toolParameterKeyMatchInput!, - if (toolParameterKvMatchInput != null) - 'toolParameterKvMatchInput': toolParameterKvMatchInput!, + if (createTime != null) 'createTime': createTime!, + if (description != null) 'description': description!, + if (disableMonitoring != null) 'disableMonitoring': disableMonitoring!, + if (etag != null) 'etag': etag!, + if (labels != null) 'labels': labels!, + if (monitoringStatsAnomalies != null) + 'monitoringStatsAnomalies': monitoringStatsAnomalies!, + if (name != null) 'name': name!, + if (pointOfContact != null) 'pointOfContact': pointOfContact!, + if (updateTime != null) 'updateTime': updateTime!, + if (valueType != null) 'valueType': valueType!, + if (versionColumnName != null) 'versionColumnName': versionColumnName!, }; } -/// Response message for EvaluationService.EvaluateInstances. -class GoogleCloudAiplatformV1EvaluateInstancesResponse { - /// Results for bleu metric. - GoogleCloudAiplatformV1BleuResults? bleuResults; +/// Vertex AI Feature Group. +class GoogleCloudAiplatformV1FeatureGroup { + /// Indicates that features for this group come from BigQuery Table/View. + /// + /// By default treats the source as a sparse time series source. The BigQuery + /// source table or view must have at least one entity ID column and a column + /// named `feature_timestamp`. + GoogleCloudAiplatformV1FeatureGroupBigQuery? bigQuery; - /// Result for coherence metric. - GoogleCloudAiplatformV1CoherenceResult? coherenceResult; + /// Timestamp when this FeatureGroup was created. + /// + /// Output only. + core.String? createTime; - /// Auto metric evaluation results. + /// Description of the FeatureGroup. /// - /// Results for exact match metric. - GoogleCloudAiplatformV1ExactMatchResults? exactMatchResults; + /// Optional. + core.String? description; - /// LLM-based metric evaluation result. + /// Used to perform consistent read-modify-write updates. /// - /// General text generation metrics, applicable to other categories. Result - /// for fluency metric. - GoogleCloudAiplatformV1FluencyResult? fluencyResult; + /// If not set, a blind "overwrite" update happens. + /// + /// Optional. + core.String? etag; - /// Result for fulfillment metric. - GoogleCloudAiplatformV1FulfillmentResult? fulfillmentResult; + /// The labels with user-defined metadata to organize your FeatureGroup. + /// + /// Label keys and values can be no longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. See + /// https://goo.gl/xmQnxf for more information on and examples of labels. No + /// more than 64 user labels can be associated with one FeatureGroup(System + /// labels are excluded)." System reserved label keys are prefixed with + /// "aiplatform.googleapis.com/" and are immutable. + /// + /// Optional. + core.Map? labels; - /// Result for groundedness metric. - GoogleCloudAiplatformV1GroundednessResult? groundednessResult; + /// Identifier. + /// + /// Name of the FeatureGroup. Format: + /// `projects/{project}/locations/{location}/featureGroups/{featureGroup}` + core.String? name; - /// Result for pairwise metric. - GoogleCloudAiplatformV1PairwiseMetricResult? pairwiseMetricResult; + /// Timestamp when this FeatureGroup was last updated. + /// + /// Output only. + core.String? updateTime; - /// Result for pairwise question answering quality metric. - GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityResult? - pairwiseQuestionAnsweringQualityResult; + GoogleCloudAiplatformV1FeatureGroup({ + this.bigQuery, + this.createTime, + this.description, + this.etag, + this.labels, + this.name, + this.updateTime, + }); - /// Result for pairwise summarization quality metric. - GoogleCloudAiplatformV1PairwiseSummarizationQualityResult? - pairwiseSummarizationQualityResult; + GoogleCloudAiplatformV1FeatureGroup.fromJson(core.Map json_) + : this( + bigQuery: json_.containsKey('bigQuery') + ? GoogleCloudAiplatformV1FeatureGroupBigQuery.fromJson( + json_['bigQuery'] as core.Map) + : null, + createTime: json_['createTime'] as core.String?, + description: json_['description'] as core.String?, + etag: json_['etag'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + name: json_['name'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + ); - /// Generic metrics. + core.Map toJson() => { + if (bigQuery != null) 'bigQuery': bigQuery!, + if (createTime != null) 'createTime': createTime!, + if (description != null) 'description': description!, + if (etag != null) 'etag': etag!, + if (labels != null) 'labels': labels!, + if (name != null) 'name': name!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} + +/// Input source type for BigQuery Tables and Views. +class GoogleCloudAiplatformV1FeatureGroupBigQuery { + /// The BigQuery source URI that points to either a BigQuery Table or View. /// - /// Result for pointwise metric. - GoogleCloudAiplatformV1PointwiseMetricResult? pointwiseMetricResult; + /// Required. Immutable. + GoogleCloudAiplatformV1BigQuerySource? bigQuerySource; - /// Result for question answering correctness metric. - GoogleCloudAiplatformV1QuestionAnsweringCorrectnessResult? - questionAnsweringCorrectnessResult; + /// If set, all feature values will be fetched from a single row per unique + /// entityId including nulls. + /// + /// If not set, will collapse all rows for each unique entityId into a singe + /// row with any non-null values if present, if no non-null values are present + /// will sync null. ex: If source has schema `(entity_id, feature_timestamp, + /// f0, f1)` and the following rows: `(e1, 2020-01-01T10:00:00.123Z, 10, 15)` + /// `(e1, 2020-02-01T10:00:00.123Z, 20, null)` If dense is set, `(e1, 20, + /// null)` is synced to online stores. If dense is not set, `(e1, 20, 15)` is + /// synced to online stores. + /// + /// Optional. + core.bool? dense; - /// Result for question answering helpfulness metric. - GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessResult? - questionAnsweringHelpfulnessResult; + /// Columns to construct entity_id / row keys. + /// + /// If not provided defaults to `entity_id`. + /// + /// Optional. + core.List? entityIdColumns; - /// Question answering only metrics. + /// Set if the data source is not a time-series. /// - /// Result for question answering quality metric. - GoogleCloudAiplatformV1QuestionAnsweringQualityResult? - questionAnsweringQualityResult; + /// Optional. + core.bool? staticDataSource; - /// Result for question answering relevance metric. - GoogleCloudAiplatformV1QuestionAnsweringRelevanceResult? - questionAnsweringRelevanceResult; + /// If the source is a time-series source, this can be set to control how + /// downstream sources (ex: FeatureView ) will treat time-series sources. + /// + /// If not set, will treat the source as a time-series source with + /// `feature_timestamp` as timestamp column and no scan boundary. + /// + /// Optional. + GoogleCloudAiplatformV1FeatureGroupBigQueryTimeSeries? timeSeries; - /// Results for rouge metric. - GoogleCloudAiplatformV1RougeResults? rougeResults; + GoogleCloudAiplatformV1FeatureGroupBigQuery({ + this.bigQuerySource, + this.dense, + this.entityIdColumns, + this.staticDataSource, + this.timeSeries, + }); - /// Result for safety metric. - GoogleCloudAiplatformV1SafetyResult? safetyResult; + GoogleCloudAiplatformV1FeatureGroupBigQuery.fromJson(core.Map json_) + : this( + bigQuerySource: json_.containsKey('bigQuerySource') + ? GoogleCloudAiplatformV1BigQuerySource.fromJson( + json_['bigQuerySource'] + as core.Map) + : null, + dense: json_['dense'] as core.bool?, + entityIdColumns: (json_['entityIdColumns'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + staticDataSource: json_['staticDataSource'] as core.bool?, + timeSeries: json_.containsKey('timeSeries') + ? GoogleCloudAiplatformV1FeatureGroupBigQueryTimeSeries.fromJson( + json_['timeSeries'] as core.Map) + : null, + ); - /// Result for summarization helpfulness metric. - GoogleCloudAiplatformV1SummarizationHelpfulnessResult? - summarizationHelpfulnessResult; + core.Map toJson() => { + if (bigQuerySource != null) 'bigQuerySource': bigQuerySource!, + if (dense != null) 'dense': dense!, + if (entityIdColumns != null) 'entityIdColumns': entityIdColumns!, + if (staticDataSource != null) 'staticDataSource': staticDataSource!, + if (timeSeries != null) 'timeSeries': timeSeries!, + }; +} - /// Summarization only metrics. +class GoogleCloudAiplatformV1FeatureGroupBigQueryTimeSeries { + /// Column hosting timestamp values for a time-series source. /// - /// Result for summarization quality metric. - GoogleCloudAiplatformV1SummarizationQualityResult? summarizationQualityResult; + /// Will be used to determine the latest `feature_values` for each entity. + /// Optional. If not provided, column named `feature_timestamp` of type + /// `TIMESTAMP` will be used. + /// + /// Optional. + core.String? timestampColumn; - /// Result for summarization verbosity metric. - GoogleCloudAiplatformV1SummarizationVerbosityResult? - summarizationVerbosityResult; + GoogleCloudAiplatformV1FeatureGroupBigQueryTimeSeries({ + this.timestampColumn, + }); - /// Tool call metrics. - /// - /// Results for tool call valid metric. - GoogleCloudAiplatformV1ToolCallValidResults? toolCallValidResults; + GoogleCloudAiplatformV1FeatureGroupBigQueryTimeSeries.fromJson(core.Map json_) + : this( + timestampColumn: json_['timestampColumn'] as core.String?, + ); - /// Results for tool name match metric. - GoogleCloudAiplatformV1ToolNameMatchResults? toolNameMatchResults; + core.Map toJson() => { + if (timestampColumn != null) 'timestampColumn': timestampColumn!, + }; +} - /// Results for tool parameter key match metric. - GoogleCloudAiplatformV1ToolParameterKeyMatchResults? - toolParameterKeyMatchResults; +/// A list of historical SnapshotAnalysis or ImportFeaturesAnalysis stats +/// requested by user, sorted by FeatureStatsAnomaly.start_time descending. +class GoogleCloudAiplatformV1FeatureMonitoringStatsAnomaly { + /// The stats and anomalies generated at specific timestamp. + /// + /// Output only. + GoogleCloudAiplatformV1FeatureStatsAnomaly? featureStatsAnomaly; - /// Results for tool parameter key value match metric. - GoogleCloudAiplatformV1ToolParameterKVMatchResults? - toolParameterKvMatchResults; + /// The objective for each stats. + /// + /// Output only. + /// Possible string values are: + /// - "OBJECTIVE_UNSPECIFIED" : If it's OBJECTIVE_UNSPECIFIED, + /// monitoring_stats will be empty. + /// - "IMPORT_FEATURE_ANALYSIS" : Stats are generated by Import Feature + /// Analysis. + /// - "SNAPSHOT_ANALYSIS" : Stats are generated by Snapshot Analysis. + core.String? objective; - GoogleCloudAiplatformV1EvaluateInstancesResponse({ - this.bleuResults, - this.coherenceResult, - this.exactMatchResults, - this.fluencyResult, - this.fulfillmentResult, - this.groundednessResult, - this.pairwiseMetricResult, - this.pairwiseQuestionAnsweringQualityResult, - this.pairwiseSummarizationQualityResult, - this.pointwiseMetricResult, - this.questionAnsweringCorrectnessResult, - this.questionAnsweringHelpfulnessResult, - this.questionAnsweringQualityResult, - this.questionAnsweringRelevanceResult, - this.rougeResults, - this.safetyResult, - this.summarizationHelpfulnessResult, - this.summarizationQualityResult, - this.summarizationVerbosityResult, - this.toolCallValidResults, - this.toolNameMatchResults, - this.toolParameterKeyMatchResults, - this.toolParameterKvMatchResults, + GoogleCloudAiplatformV1FeatureMonitoringStatsAnomaly({ + this.featureStatsAnomaly, + this.objective, }); - GoogleCloudAiplatformV1EvaluateInstancesResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1FeatureMonitoringStatsAnomaly.fromJson(core.Map json_) : this( - bleuResults: json_.containsKey('bleuResults') - ? GoogleCloudAiplatformV1BleuResults.fromJson( - json_['bleuResults'] as core.Map) - : null, - coherenceResult: json_.containsKey('coherenceResult') - ? GoogleCloudAiplatformV1CoherenceResult.fromJson( - json_['coherenceResult'] - as core.Map) - : null, - exactMatchResults: json_.containsKey('exactMatchResults') - ? GoogleCloudAiplatformV1ExactMatchResults.fromJson( - json_['exactMatchResults'] - as core.Map) - : null, - fluencyResult: json_.containsKey('fluencyResult') - ? GoogleCloudAiplatformV1FluencyResult.fromJson( - json_['fluencyResult'] as core.Map) - : null, - fulfillmentResult: json_.containsKey('fulfillmentResult') - ? GoogleCloudAiplatformV1FulfillmentResult.fromJson( - json_['fulfillmentResult'] - as core.Map) - : null, - groundednessResult: json_.containsKey('groundednessResult') - ? GoogleCloudAiplatformV1GroundednessResult.fromJson( - json_['groundednessResult'] - as core.Map) - : null, - pairwiseMetricResult: json_.containsKey('pairwiseMetricResult') - ? GoogleCloudAiplatformV1PairwiseMetricResult.fromJson( - json_['pairwiseMetricResult'] - as core.Map) - : null, - pairwiseQuestionAnsweringQualityResult: json_ - .containsKey('pairwiseQuestionAnsweringQualityResult') - ? GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityResult - .fromJson(json_['pairwiseQuestionAnsweringQualityResult'] - as core.Map) - : null, - pairwiseSummarizationQualityResult: - json_.containsKey('pairwiseSummarizationQualityResult') - ? GoogleCloudAiplatformV1PairwiseSummarizationQualityResult - .fromJson(json_['pairwiseSummarizationQualityResult'] - as core.Map) - : null, - pointwiseMetricResult: json_.containsKey('pointwiseMetricResult') - ? GoogleCloudAiplatformV1PointwiseMetricResult.fromJson( - json_['pointwiseMetricResult'] - as core.Map) - : null, - questionAnsweringCorrectnessResult: - json_.containsKey('questionAnsweringCorrectnessResult') - ? GoogleCloudAiplatformV1QuestionAnsweringCorrectnessResult - .fromJson(json_['questionAnsweringCorrectnessResult'] - as core.Map) - : null, - questionAnsweringHelpfulnessResult: - json_.containsKey('questionAnsweringHelpfulnessResult') - ? GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessResult - .fromJson(json_['questionAnsweringHelpfulnessResult'] - as core.Map) - : null, - questionAnsweringQualityResult: json_ - .containsKey('questionAnsweringQualityResult') - ? GoogleCloudAiplatformV1QuestionAnsweringQualityResult.fromJson( - json_['questionAnsweringQualityResult'] - as core.Map) - : null, - questionAnsweringRelevanceResult: - json_.containsKey('questionAnsweringRelevanceResult') - ? GoogleCloudAiplatformV1QuestionAnsweringRelevanceResult - .fromJson(json_['questionAnsweringRelevanceResult'] - as core.Map) - : null, - rougeResults: json_.containsKey('rougeResults') - ? GoogleCloudAiplatformV1RougeResults.fromJson( - json_['rougeResults'] as core.Map) - : null, - safetyResult: json_.containsKey('safetyResult') - ? GoogleCloudAiplatformV1SafetyResult.fromJson( - json_['safetyResult'] as core.Map) - : null, - summarizationHelpfulnessResult: json_ - .containsKey('summarizationHelpfulnessResult') - ? GoogleCloudAiplatformV1SummarizationHelpfulnessResult.fromJson( - json_['summarizationHelpfulnessResult'] - as core.Map) - : null, - summarizationQualityResult: - json_.containsKey('summarizationQualityResult') - ? GoogleCloudAiplatformV1SummarizationQualityResult.fromJson( - json_['summarizationQualityResult'] - as core.Map) - : null, - summarizationVerbosityResult: json_ - .containsKey('summarizationVerbosityResult') - ? GoogleCloudAiplatformV1SummarizationVerbosityResult.fromJson( - json_['summarizationVerbosityResult'] - as core.Map) - : null, - toolCallValidResults: json_.containsKey('toolCallValidResults') - ? GoogleCloudAiplatformV1ToolCallValidResults.fromJson( - json_['toolCallValidResults'] - as core.Map) - : null, - toolNameMatchResults: json_.containsKey('toolNameMatchResults') - ? GoogleCloudAiplatformV1ToolNameMatchResults.fromJson( - json_['toolNameMatchResults'] - as core.Map) - : null, - toolParameterKeyMatchResults: json_ - .containsKey('toolParameterKeyMatchResults') - ? GoogleCloudAiplatformV1ToolParameterKeyMatchResults.fromJson( - json_['toolParameterKeyMatchResults'] + featureStatsAnomaly: json_.containsKey('featureStatsAnomaly') + ? GoogleCloudAiplatformV1FeatureStatsAnomaly.fromJson( + json_['featureStatsAnomaly'] as core.Map) : null, - toolParameterKvMatchResults: - json_.containsKey('toolParameterKvMatchResults') - ? GoogleCloudAiplatformV1ToolParameterKVMatchResults.fromJson( - json_['toolParameterKvMatchResults'] - as core.Map) - : null, + objective: json_['objective'] as core.String?, + ); + + core.Map toJson() => { + if (featureStatsAnomaly != null) + 'featureStatsAnomaly': featureStatsAnomaly!, + if (objective != null) 'objective': objective!, + }; +} + +/// Noise sigma by features. +/// +/// Noise sigma represents the standard deviation of the gaussian kernel that +/// will be used to add noise to interpolated inputs prior to computing +/// gradients. +class GoogleCloudAiplatformV1FeatureNoiseSigma { + /// Noise sigma per feature. + /// + /// No noise is added to features that are not set. + core.List? + noiseSigma; + + GoogleCloudAiplatformV1FeatureNoiseSigma({ + this.noiseSigma, + }); + + GoogleCloudAiplatformV1FeatureNoiseSigma.fromJson(core.Map json_) + : this( + noiseSigma: (json_['noiseSigma'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1FeatureNoiseSigmaNoiseSigmaForFeature + .fromJson(value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (noiseSigma != null) 'noiseSigma': noiseSigma!, + }; +} + +/// Noise sigma for a single feature. +class GoogleCloudAiplatformV1FeatureNoiseSigmaNoiseSigmaForFeature { + /// The name of the input feature for which noise sigma is provided. + /// + /// The features are defined in explanation metadata inputs. + core.String? name; + + /// This represents the standard deviation of the Gaussian kernel that will be + /// used to add noise to the feature prior to computing gradients. + /// + /// Similar to noise_sigma but represents the noise added to the current + /// feature. Defaults to 0.1. + core.double? sigma; + + GoogleCloudAiplatformV1FeatureNoiseSigmaNoiseSigmaForFeature({ + this.name, + this.sigma, + }); + + GoogleCloudAiplatformV1FeatureNoiseSigmaNoiseSigmaForFeature.fromJson( + core.Map json_) + : this( + name: json_['name'] as core.String?, + sigma: (json_['sigma'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (bleuResults != null) 'bleuResults': bleuResults!, - if (coherenceResult != null) 'coherenceResult': coherenceResult!, - if (exactMatchResults != null) 'exactMatchResults': exactMatchResults!, - if (fluencyResult != null) 'fluencyResult': fluencyResult!, - if (fulfillmentResult != null) 'fulfillmentResult': fulfillmentResult!, - if (groundednessResult != null) - 'groundednessResult': groundednessResult!, - if (pairwiseMetricResult != null) - 'pairwiseMetricResult': pairwiseMetricResult!, - if (pairwiseQuestionAnsweringQualityResult != null) - 'pairwiseQuestionAnsweringQualityResult': - pairwiseQuestionAnsweringQualityResult!, - if (pairwiseSummarizationQualityResult != null) - 'pairwiseSummarizationQualityResult': - pairwiseSummarizationQualityResult!, - if (pointwiseMetricResult != null) - 'pointwiseMetricResult': pointwiseMetricResult!, - if (questionAnsweringCorrectnessResult != null) - 'questionAnsweringCorrectnessResult': - questionAnsweringCorrectnessResult!, - if (questionAnsweringHelpfulnessResult != null) - 'questionAnsweringHelpfulnessResult': - questionAnsweringHelpfulnessResult!, - if (questionAnsweringQualityResult != null) - 'questionAnsweringQualityResult': questionAnsweringQualityResult!, - if (questionAnsweringRelevanceResult != null) - 'questionAnsweringRelevanceResult': questionAnsweringRelevanceResult!, - if (rougeResults != null) 'rougeResults': rougeResults!, - if (safetyResult != null) 'safetyResult': safetyResult!, - if (summarizationHelpfulnessResult != null) - 'summarizationHelpfulnessResult': summarizationHelpfulnessResult!, - if (summarizationQualityResult != null) - 'summarizationQualityResult': summarizationQualityResult!, - if (summarizationVerbosityResult != null) - 'summarizationVerbosityResult': summarizationVerbosityResult!, - if (toolCallValidResults != null) - 'toolCallValidResults': toolCallValidResults!, - if (toolNameMatchResults != null) - 'toolNameMatchResults': toolNameMatchResults!, - if (toolParameterKeyMatchResults != null) - 'toolParameterKeyMatchResults': toolParameterKeyMatchResults!, - if (toolParameterKvMatchResults != null) - 'toolParameterKvMatchResults': toolParameterKvMatchResults!, + if (name != null) 'name': name!, + if (sigma != null) 'sigma': sigma!, }; } -/// True positive, false positive, or false negative. +/// Vertex AI Feature Online Store provides a centralized repository for serving +/// ML features and embedding indexes at low latency. /// -/// EvaluatedAnnotation is only available under ModelEvaluationSlice with slice -/// of `annotationSpec` dimension. -class GoogleCloudAiplatformV1EvaluatedAnnotation { - /// The data item payload that the Model predicted this EvaluatedAnnotation - /// on. +/// The Feature Online Store is a top-level container. +class GoogleCloudAiplatformV1FeatureOnlineStore { + /// Contains settings for the Cloud Bigtable instance that will be created to + /// serve featureValues for all FeatureViews under this FeatureOnlineStore. + GoogleCloudAiplatformV1FeatureOnlineStoreBigtable? bigtable; + + /// Timestamp when this FeatureOnlineStore was created. /// /// Output only. - /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Object? dataItemPayload; + core.String? createTime; - /// Annotations of model error analysis results. - core.List? - errorAnalysisAnnotations; + /// The dedicated serving endpoint for this FeatureOnlineStore, which is + /// different from common Vertex service endpoint. + /// + /// Optional. + GoogleCloudAiplatformV1FeatureOnlineStoreDedicatedServingEndpoint? + dedicatedServingEndpoint; - /// ID of the EvaluatedDataItemView under the same ancestor ModelEvaluation. + /// Customer-managed encryption key spec for data storage. /// - /// The EvaluatedDataItemView consists of all ground truths and predictions on - /// data_item_payload. + /// If set, online store will be secured by this key. /// - /// Output only. - core.String? evaluatedDataItemViewId; + /// Optional. + GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; - /// Explanations of predictions. + /// Used to perform consistent read-modify-write updates. /// - /// Each element of the explanations indicates the explanation for one - /// explanation Method. The attributions list in the - /// EvaluatedAnnotationExplanation.explanation object corresponds to the - /// predictions list. For example, the second element in the attributions list - /// explains the second element in the predictions list. - core.List? - explanations; + /// If not set, a blind "overwrite" update happens. + /// + /// Optional. + core.String? etag; - /// The ground truth Annotations, i.e. the Annotations that exist in the test - /// data the Model is evaluated on. + /// The labels with user-defined metadata to organize your FeatureOnlineStore. /// - /// For true positive, there is one and only one ground truth annotation, - /// which matches the only prediction in predictions. For false positive, - /// there are zero or more ground truth annotations that are similar to the - /// only prediction in predictions, but not enough for a match. For false - /// negative, there is one and only one ground truth annotation, which doesn't - /// match any predictions created by the model. The schema of the ground truth - /// is stored in ModelEvaluation.annotation_schema_uri + /// Label keys and values can be no longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. See + /// https://goo.gl/xmQnxf for more information on and examples of labels. No + /// more than 64 user labels can be associated with one + /// FeatureOnlineStore(System labels are excluded)." System reserved label + /// keys are prefixed with "aiplatform.googleapis.com/" and are immutable. /// - /// Output only. + /// Optional. + core.Map? labels; + + /// Identifier. /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.List? groundTruths; + /// Name of the FeatureOnlineStore. Format: + /// `projects/{project}/locations/{location}/featureOnlineStores/{featureOnlineStore}` + core.String? name; - /// The model predicted annotations. + /// Contains settings for the Optimized store that will be created to serve + /// featureValues for all FeatureViews under this FeatureOnlineStore. /// - /// For true positive, there is one and only one prediction, which matches the - /// only one ground truth annotation in ground_truths. For false positive, - /// there is one and only one prediction, which doesn't match any ground truth - /// annotation of the corresponding data_item_view_id. For false negative, - /// there are zero or more predictions which are similar to the only ground - /// truth annotation in ground_truths but not enough for a match. The schema - /// of the prediction is stored in ModelEvaluation.annotation_schema_uri + /// When choose Optimized storage type, need to set + /// PrivateServiceConnectConfig.enable_private_service_connect to use private + /// endpoint. Otherwise will use public endpoint by default. + GoogleCloudAiplatformV1FeatureOnlineStoreOptimized? optimized; + + /// Reserved for future use. /// /// Output only. + core.bool? satisfiesPzi; + + /// Reserved for future use. /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.List? predictions; + /// Output only. + core.bool? satisfiesPzs; - /// Type of the EvaluatedAnnotation. + /// State of the featureOnlineStore. /// /// Output only. /// Possible string values are: - /// - "EVALUATED_ANNOTATION_TYPE_UNSPECIFIED" : Invalid value. - /// - "TRUE_POSITIVE" : The EvaluatedAnnotation is a true positive. It has a - /// prediction created by the Model and a ground truth Annotation which the - /// prediction matches. - /// - "FALSE_POSITIVE" : The EvaluatedAnnotation is false positive. It has a - /// prediction created by the Model which does not match any ground truth - /// annotation. - /// - "FALSE_NEGATIVE" : The EvaluatedAnnotation is false negative. It has a - /// ground truth annotation which is not matched by any of the model created - /// predictions. - core.String? type; + /// - "STATE_UNSPECIFIED" : Default value. This value is unused. + /// - "STABLE" : State when the featureOnlineStore configuration is not being + /// updated and the fields reflect the current configuration of the + /// featureOnlineStore. The featureOnlineStore is usable in this state. + /// - "UPDATING" : The state of the featureOnlineStore configuration when it + /// is being updated. During an update, the fields reflect either the original + /// configuration or the updated configuration of the featureOnlineStore. The + /// featureOnlineStore is still usable in this state. + core.String? state; - GoogleCloudAiplatformV1EvaluatedAnnotation({ - this.dataItemPayload, - this.errorAnalysisAnnotations, - this.evaluatedDataItemViewId, - this.explanations, - this.groundTruths, - this.predictions, - this.type, + /// Timestamp when this FeatureOnlineStore was last updated. + /// + /// Output only. + core.String? updateTime; + + GoogleCloudAiplatformV1FeatureOnlineStore({ + this.bigtable, + this.createTime, + this.dedicatedServingEndpoint, + this.encryptionSpec, + this.etag, + this.labels, + this.name, + this.optimized, + this.satisfiesPzi, + this.satisfiesPzs, + this.state, + this.updateTime, }); - GoogleCloudAiplatformV1EvaluatedAnnotation.fromJson(core.Map json_) + GoogleCloudAiplatformV1FeatureOnlineStore.fromJson(core.Map json_) : this( - dataItemPayload: json_['dataItemPayload'], - errorAnalysisAnnotations: - (json_['errorAnalysisAnnotations'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1ErrorAnalysisAnnotation.fromJson( - value as core.Map)) - .toList(), - evaluatedDataItemViewId: - json_['evaluatedDataItemViewId'] as core.String?, - explanations: (json_['explanations'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1EvaluatedAnnotationExplanation - .fromJson(value as core.Map)) - .toList(), - groundTruths: json_.containsKey('groundTruths') - ? json_['groundTruths'] as core.List + bigtable: json_.containsKey('bigtable') + ? GoogleCloudAiplatformV1FeatureOnlineStoreBigtable.fromJson( + json_['bigtable'] as core.Map) : null, - predictions: json_.containsKey('predictions') - ? json_['predictions'] as core.List + createTime: json_['createTime'] as core.String?, + dedicatedServingEndpoint: json_ + .containsKey('dedicatedServingEndpoint') + ? GoogleCloudAiplatformV1FeatureOnlineStoreDedicatedServingEndpoint + .fromJson(json_['dedicatedServingEndpoint'] + as core.Map) : null, - type: json_['type'] as core.String?, + encryptionSpec: json_.containsKey('encryptionSpec') + ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( + json_['encryptionSpec'] + as core.Map) + : null, + etag: json_['etag'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + name: json_['name'] as core.String?, + optimized: json_.containsKey('optimized') + ? GoogleCloudAiplatformV1FeatureOnlineStoreOptimized.fromJson( + json_['optimized'] as core.Map) + : null, + satisfiesPzi: json_['satisfiesPzi'] as core.bool?, + satisfiesPzs: json_['satisfiesPzs'] as core.bool?, + state: json_['state'] as core.String?, + updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (dataItemPayload != null) 'dataItemPayload': dataItemPayload!, - if (errorAnalysisAnnotations != null) - 'errorAnalysisAnnotations': errorAnalysisAnnotations!, - if (evaluatedDataItemViewId != null) - 'evaluatedDataItemViewId': evaluatedDataItemViewId!, - if (explanations != null) 'explanations': explanations!, - if (groundTruths != null) 'groundTruths': groundTruths!, - if (predictions != null) 'predictions': predictions!, - if (type != null) 'type': type!, + if (bigtable != null) 'bigtable': bigtable!, + if (createTime != null) 'createTime': createTime!, + if (dedicatedServingEndpoint != null) + 'dedicatedServingEndpoint': dedicatedServingEndpoint!, + if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, + if (etag != null) 'etag': etag!, + if (labels != null) 'labels': labels!, + if (name != null) 'name': name!, + if (optimized != null) 'optimized': optimized!, + if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, + if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, + if (state != null) 'state': state!, + if (updateTime != null) 'updateTime': updateTime!, }; } -/// Explanation result of the prediction produced by the Model. -class GoogleCloudAiplatformV1EvaluatedAnnotationExplanation { - /// Explanation attribution response details. - GoogleCloudAiplatformV1Explanation? explanation; - - /// Explanation type. +class GoogleCloudAiplatformV1FeatureOnlineStoreBigtable { + /// Autoscaling config applied to Bigtable Instance. /// - /// For AutoML Image Classification models, possible values are: * - /// `image-integrated-gradients` * `image-xrai` - core.String? explanationType; + /// Required. + GoogleCloudAiplatformV1FeatureOnlineStoreBigtableAutoScaling? autoScaling; - GoogleCloudAiplatformV1EvaluatedAnnotationExplanation({ - this.explanation, - this.explanationType, + GoogleCloudAiplatformV1FeatureOnlineStoreBigtable({ + this.autoScaling, }); - GoogleCloudAiplatformV1EvaluatedAnnotationExplanation.fromJson(core.Map json_) + GoogleCloudAiplatformV1FeatureOnlineStoreBigtable.fromJson(core.Map json_) : this( - explanation: json_.containsKey('explanation') - ? GoogleCloudAiplatformV1Explanation.fromJson( - json_['explanation'] as core.Map) + autoScaling: json_.containsKey('autoScaling') + ? GoogleCloudAiplatformV1FeatureOnlineStoreBigtableAutoScaling + .fromJson(json_['autoScaling'] + as core.Map) : null, - explanationType: json_['explanationType'] as core.String?, ); core.Map toJson() => { - if (explanation != null) 'explanation': explanation!, - if (explanationType != null) 'explanationType': explanationType!, + if (autoScaling != null) 'autoScaling': autoScaling!, }; } -/// An edge describing the relationship between an Artifact and an Execution in -/// a lineage graph. -class GoogleCloudAiplatformV1Event { - /// The relative resource name of the Artifact in the Event. +class GoogleCloudAiplatformV1FeatureOnlineStoreBigtableAutoScaling { + /// A percentage of the cluster's CPU capacity. + /// + /// Can be from 10% to 80%. When a cluster's CPU utilization exceeds the + /// target that you have set, Bigtable immediately adds nodes to the cluster. + /// When CPU utilization is substantially lower than the target, Bigtable + /// removes nodes. If not set will default to 50%. + /// + /// Optional. + core.int? cpuUtilizationTarget; + + /// The maximum number of nodes to scale up to. + /// + /// Must be greater than or equal to min_node_count, and less than or equal to + /// 10 times of 'min_node_count'. /// /// Required. - core.String? artifact; + core.int? maxNodeCount; - /// Time the Event occurred. + /// The minimum number of nodes to scale down to. /// - /// Output only. - core.String? eventTime; + /// Must be greater than or equal to 1. + /// + /// Required. + core.int? minNodeCount; - /// The relative resource name of the Execution in the Event. + GoogleCloudAiplatformV1FeatureOnlineStoreBigtableAutoScaling({ + this.cpuUtilizationTarget, + this.maxNodeCount, + this.minNodeCount, + }); + + GoogleCloudAiplatformV1FeatureOnlineStoreBigtableAutoScaling.fromJson( + core.Map json_) + : this( + cpuUtilizationTarget: json_['cpuUtilizationTarget'] as core.int?, + maxNodeCount: json_['maxNodeCount'] as core.int?, + minNodeCount: json_['minNodeCount'] as core.int?, + ); + + core.Map toJson() => { + if (cpuUtilizationTarget != null) + 'cpuUtilizationTarget': cpuUtilizationTarget!, + if (maxNodeCount != null) 'maxNodeCount': maxNodeCount!, + if (minNodeCount != null) 'minNodeCount': minNodeCount!, + }; +} + +/// The dedicated serving endpoint for this FeatureOnlineStore. +/// +/// Only need to set when you choose Optimized storage type. Public endpoint is +/// provisioned by default. +class GoogleCloudAiplatformV1FeatureOnlineStoreDedicatedServingEndpoint { + /// Private service connect config. /// - /// Output only. - core.String? execution; + /// The private service connection is available only for Optimized storage + /// type, not for embedding management now. If + /// PrivateServiceConnectConfig.enable_private_service_connect set to true, + /// customers will use private service connection to send request. Otherwise, + /// the connection will set to public endpoint. + /// + /// Optional. + GoogleCloudAiplatformV1PrivateServiceConnectConfig? + privateServiceConnectConfig; - /// The labels with user-defined metadata to annotate Events. + /// This field will be populated with the domain name to use for this + /// FeatureOnlineStore /// - /// Label keys and values can be no longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. No more than - /// 64 user labels can be associated with one Event (System labels are - /// excluded). See https://goo.gl/xmQnxf for more information and examples of - /// labels. System reserved label keys are prefixed with - /// "aiplatform.googleapis.com/" and are immutable. - core.Map? labels; + /// Output only. + core.String? publicEndpointDomainName; - /// The type of the Event. + /// The name of the service attachment resource. /// - /// Required. - /// Possible string values are: - /// - "TYPE_UNSPECIFIED" : Unspecified whether input or output of the - /// Execution. - /// - "INPUT" : An input of the Execution. - /// - "OUTPUT" : An output of the Execution. - core.String? type; + /// Populated if private service connect is enabled and after FeatureViewSync + /// is created. + /// + /// Output only. + core.String? serviceAttachment; - GoogleCloudAiplatformV1Event({ - this.artifact, - this.eventTime, - this.execution, - this.labels, - this.type, + GoogleCloudAiplatformV1FeatureOnlineStoreDedicatedServingEndpoint({ + this.privateServiceConnectConfig, + this.publicEndpointDomainName, + this.serviceAttachment, }); - GoogleCloudAiplatformV1Event.fromJson(core.Map json_) + GoogleCloudAiplatformV1FeatureOnlineStoreDedicatedServingEndpoint.fromJson( + core.Map json_) : this( - artifact: json_['artifact'] as core.String?, - eventTime: json_['eventTime'] as core.String?, - execution: json_['execution'] as core.String?, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - type: json_['type'] as core.String?, + privateServiceConnectConfig: + json_.containsKey('privateServiceConnectConfig') + ? GoogleCloudAiplatformV1PrivateServiceConnectConfig.fromJson( + json_['privateServiceConnectConfig'] + as core.Map) + : null, + publicEndpointDomainName: + json_['publicEndpointDomainName'] as core.String?, + serviceAttachment: json_['serviceAttachment'] as core.String?, ); core.Map toJson() => { - if (artifact != null) 'artifact': artifact!, - if (eventTime != null) 'eventTime': eventTime!, - if (execution != null) 'execution': execution!, - if (labels != null) 'labels': labels!, - if (type != null) 'type': type!, + if (privateServiceConnectConfig != null) + 'privateServiceConnectConfig': privateServiceConnectConfig!, + if (publicEndpointDomainName != null) + 'publicEndpointDomainName': publicEndpointDomainName!, + if (serviceAttachment != null) 'serviceAttachment': serviceAttachment!, }; } -/// Input for exact match metric. -class GoogleCloudAiplatformV1ExactMatchInput { - /// Repeated exact match instances. - /// - /// Required. - core.List? instances; +/// Optimized storage type +typedef GoogleCloudAiplatformV1FeatureOnlineStoreOptimized = $Empty; - /// Spec for exact match metric. +/// Selector for Features of an EntityType. +class GoogleCloudAiplatformV1FeatureSelector { + /// Matches Features based on ID. /// /// Required. - GoogleCloudAiplatformV1ExactMatchSpec? metricSpec; + GoogleCloudAiplatformV1IdMatcher? idMatcher; - GoogleCloudAiplatformV1ExactMatchInput({ - this.instances, - this.metricSpec, + GoogleCloudAiplatformV1FeatureSelector({ + this.idMatcher, }); - GoogleCloudAiplatformV1ExactMatchInput.fromJson(core.Map json_) + GoogleCloudAiplatformV1FeatureSelector.fromJson(core.Map json_) : this( - instances: (json_['instances'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1ExactMatchInstance.fromJson( - value as core.Map)) - .toList(), - metricSpec: json_.containsKey('metricSpec') - ? GoogleCloudAiplatformV1ExactMatchSpec.fromJson( - json_['metricSpec'] as core.Map) + idMatcher: json_.containsKey('idMatcher') + ? GoogleCloudAiplatformV1IdMatcher.fromJson( + json_['idMatcher'] as core.Map) : null, ); - core.Map toJson() => { - if (instances != null) 'instances': instances!, - if (metricSpec != null) 'metricSpec': metricSpec!, - }; -} + core.Map toJson() => { + if (idMatcher != null) 'idMatcher': idMatcher!, + }; +} + +/// Stats and Anomaly generated at specific timestamp for specific Feature. +/// +/// The start_time and end_time are used to define the time range of the dataset +/// that current stats belongs to, e.g. prediction traffic is bucketed into +/// prediction datasets by time window. If the Dataset is not defined by time +/// window, start_time = end_time. Timestamp of the stats and anomalies always +/// refers to end_time. Raw stats and anomalies are stored in stats_uri or +/// anomaly_uri in the tensorflow defined protos. Field data_stats contains +/// almost identical information with the raw stats in Vertex AI defined proto, +/// for UI to display. +class GoogleCloudAiplatformV1FeatureStatsAnomaly { + /// This is the threshold used when detecting anomalies. + /// + /// The threshold can be changed by user, so this one might be different from + /// ThresholdConfig.value. + core.double? anomalyDetectionThreshold; + + /// Path of the anomaly file for current feature values in Cloud Storage + /// bucket. + /// + /// Format: gs:////anomalies. Example: + /// gs://monitoring_bucket/feature_name/anomalies. Stats are stored as binary + /// format with Protobuf message Anoamlies are stored as binary format with + /// Protobuf message + /// [tensorflow.metadata.v0.AnomalyInfo](https://github.com/tensorflow/metadata/blob/master/tensorflow_metadata/proto/v0/anomalies.proto). + core.String? anomalyUri; + + /// Deviation from the current stats to baseline stats. + /// + /// 1. For categorical feature, the distribution distance is calculated by + /// L-inifinity norm. 2. For numerical feature, the distribution distance is + /// calculated by Jensen–Shannon divergence. + core.double? distributionDeviation; -/// Spec for exact match instance. -typedef GoogleCloudAiplatformV1ExactMatchInstance = $Instance00; + /// The end timestamp of window where stats were generated. + /// + /// For objectives where time window doesn't make sense (e.g. Featurestore + /// Snapshot Monitoring), end_time indicates the timestamp of the data used to + /// generate stats (e.g. timestamp we take snapshots for feature values). + core.String? endTime; -/// Exact match metric value for an instance. -class GoogleCloudAiplatformV1ExactMatchMetricValue { - /// Exact match score. + /// Feature importance score, only populated when cross-feature monitoring is + /// enabled. /// - /// Output only. + /// For now only used to represent feature attribution score within range \[0, + /// 1\] for ModelDeploymentMonitoringObjectiveType.FEATURE_ATTRIBUTION_SKEW + /// and ModelDeploymentMonitoringObjectiveType.FEATURE_ATTRIBUTION_DRIFT. core.double? score; - GoogleCloudAiplatformV1ExactMatchMetricValue({ + /// The start timestamp of window where stats were generated. + /// + /// For objectives where time window doesn't make sense (e.g. Featurestore + /// Snapshot Monitoring), start_time is only used to indicate the monitoring + /// intervals, so it always equals to (end_time - monitoring_interval). + core.String? startTime; + + /// Path of the stats file for current feature values in Cloud Storage bucket. + /// + /// Format: gs:////stats. Example: gs://monitoring_bucket/feature_name/stats. + /// Stats are stored as binary format with Protobuf message + /// [tensorflow.metadata.v0.FeatureNameStatistics](https://github.com/tensorflow/metadata/blob/master/tensorflow_metadata/proto/v0/statistics.proto). + core.String? statsUri; + + GoogleCloudAiplatformV1FeatureStatsAnomaly({ + this.anomalyDetectionThreshold, + this.anomalyUri, + this.distributionDeviation, + this.endTime, this.score, + this.startTime, + this.statsUri, }); - GoogleCloudAiplatformV1ExactMatchMetricValue.fromJson(core.Map json_) + GoogleCloudAiplatformV1FeatureStatsAnomaly.fromJson(core.Map json_) : this( + anomalyDetectionThreshold: + (json_['anomalyDetectionThreshold'] as core.num?)?.toDouble(), + anomalyUri: json_['anomalyUri'] as core.String?, + distributionDeviation: + (json_['distributionDeviation'] as core.num?)?.toDouble(), + endTime: json_['endTime'] as core.String?, score: (json_['score'] as core.num?)?.toDouble(), + startTime: json_['startTime'] as core.String?, + statsUri: json_['statsUri'] as core.String?, ); core.Map toJson() => { + if (anomalyDetectionThreshold != null) + 'anomalyDetectionThreshold': anomalyDetectionThreshold!, + if (anomalyUri != null) 'anomalyUri': anomalyUri!, + if (distributionDeviation != null) + 'distributionDeviation': distributionDeviation!, + if (endTime != null) 'endTime': endTime!, if (score != null) 'score': score!, + if (startTime != null) 'startTime': startTime!, + if (statsUri != null) 'statsUri': statsUri!, }; } -/// Results for exact match metric. -class GoogleCloudAiplatformV1ExactMatchResults { - /// Exact match metric values. - /// - /// Output only. - core.List? - exactMatchMetricValues; +/// Value for a feature. +class GoogleCloudAiplatformV1FeatureValue { + /// A list of bool type feature value. + GoogleCloudAiplatformV1BoolArray? boolArrayValue; - GoogleCloudAiplatformV1ExactMatchResults({ - this.exactMatchMetricValues, - }); + /// Bool type feature value. + core.bool? boolValue; - GoogleCloudAiplatformV1ExactMatchResults.fromJson(core.Map json_) - : this( - exactMatchMetricValues: - (json_['exactMatchMetricValues'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1ExactMatchMetricValue.fromJson( - value as core.Map)) - .toList(), - ); + /// Bytes feature value. + core.String? bytesValue; + core.List get bytesValueAsBytes => + convert.base64.decode(bytesValue!); - core.Map toJson() => { - if (exactMatchMetricValues != null) - 'exactMatchMetricValues': exactMatchMetricValues!, - }; -} + set bytesValueAsBytes(core.List bytes_) { + bytesValue = + convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); + } -/// Spec for exact match metric - returns 1 if prediction and reference exactly -/// matches, otherwise 0. -typedef GoogleCloudAiplatformV1ExactMatchSpec = $Empty; + /// A list of double type feature value. + GoogleCloudAiplatformV1DoubleArray? doubleArrayValue; -/// Example-based explainability that returns the nearest neighbors from the -/// provided dataset. -class GoogleCloudAiplatformV1Examples { - /// The Cloud Storage input instances. - GoogleCloudAiplatformV1ExamplesExampleGcsSource? exampleGcsSource; + /// Double type feature value. + core.double? doubleValue; - /// The full configuration for the generated index, the semantics are the same - /// as metadata and should match - /// [NearestNeighborSearchConfig](https://cloud.google.com/vertex-ai/docs/explainable-ai/configuring-explanations-example-based#nearest-neighbor-search-config). - /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Object? nearestNeighborSearchConfig; + /// A list of int64 type feature value. + GoogleCloudAiplatformV1Int64Array? int64ArrayValue; - /// The number of neighbors to return when querying for examples. - core.int? neighborCount; + /// Int64 feature value. + core.String? int64Value; - /// Simplified preset configuration, which automatically sets configuration - /// values based on the desired query speed-precision trade-off and modality. - GoogleCloudAiplatformV1Presets? presets; + /// Metadata of feature value. + GoogleCloudAiplatformV1FeatureValueMetadata? metadata; - GoogleCloudAiplatformV1Examples({ - this.exampleGcsSource, - this.nearestNeighborSearchConfig, - this.neighborCount, - this.presets, + /// A list of string type feature value. + GoogleCloudAiplatformV1StringArray? stringArrayValue; + + /// String feature value. + core.String? stringValue; + + /// A struct type feature value. + GoogleCloudAiplatformV1StructValue? structValue; + + GoogleCloudAiplatformV1FeatureValue({ + this.boolArrayValue, + this.boolValue, + this.bytesValue, + this.doubleArrayValue, + this.doubleValue, + this.int64ArrayValue, + this.int64Value, + this.metadata, + this.stringArrayValue, + this.stringValue, + this.structValue, }); - GoogleCloudAiplatformV1Examples.fromJson(core.Map json_) + GoogleCloudAiplatformV1FeatureValue.fromJson(core.Map json_) : this( - exampleGcsSource: json_.containsKey('exampleGcsSource') - ? GoogleCloudAiplatformV1ExamplesExampleGcsSource.fromJson( - json_['exampleGcsSource'] + boolArrayValue: json_.containsKey('boolArrayValue') + ? GoogleCloudAiplatformV1BoolArray.fromJson( + json_['boolArrayValue'] as core.Map) : null, - nearestNeighborSearchConfig: json_['nearestNeighborSearchConfig'], - neighborCount: json_['neighborCount'] as core.int?, - presets: json_.containsKey('presets') - ? GoogleCloudAiplatformV1Presets.fromJson( - json_['presets'] as core.Map) + boolValue: json_['boolValue'] as core.bool?, + bytesValue: json_['bytesValue'] as core.String?, + doubleArrayValue: json_.containsKey('doubleArrayValue') + ? GoogleCloudAiplatformV1DoubleArray.fromJson( + json_['doubleArrayValue'] + as core.Map) + : null, + doubleValue: (json_['doubleValue'] as core.num?)?.toDouble(), + int64ArrayValue: json_.containsKey('int64ArrayValue') + ? GoogleCloudAiplatformV1Int64Array.fromJson( + json_['int64ArrayValue'] + as core.Map) + : null, + int64Value: json_['int64Value'] as core.String?, + metadata: json_.containsKey('metadata') + ? GoogleCloudAiplatformV1FeatureValueMetadata.fromJson( + json_['metadata'] as core.Map) + : null, + stringArrayValue: json_.containsKey('stringArrayValue') + ? GoogleCloudAiplatformV1StringArray.fromJson( + json_['stringArrayValue'] + as core.Map) + : null, + stringValue: json_['stringValue'] as core.String?, + structValue: json_.containsKey('structValue') + ? GoogleCloudAiplatformV1StructValue.fromJson( + json_['structValue'] as core.Map) : null, ); core.Map toJson() => { - if (exampleGcsSource != null) 'exampleGcsSource': exampleGcsSource!, - if (nearestNeighborSearchConfig != null) - 'nearestNeighborSearchConfig': nearestNeighborSearchConfig!, - if (neighborCount != null) 'neighborCount': neighborCount!, - if (presets != null) 'presets': presets!, + if (boolArrayValue != null) 'boolArrayValue': boolArrayValue!, + if (boolValue != null) 'boolValue': boolValue!, + if (bytesValue != null) 'bytesValue': bytesValue!, + if (doubleArrayValue != null) 'doubleArrayValue': doubleArrayValue!, + if (doubleValue != null) 'doubleValue': doubleValue!, + if (int64ArrayValue != null) 'int64ArrayValue': int64ArrayValue!, + if (int64Value != null) 'int64Value': int64Value!, + if (metadata != null) 'metadata': metadata!, + if (stringArrayValue != null) 'stringArrayValue': stringArrayValue!, + if (stringValue != null) 'stringValue': stringValue!, + if (structValue != null) 'structValue': structValue!, }; } -/// The Cloud Storage input instances. -class GoogleCloudAiplatformV1ExamplesExampleGcsSource { - /// The format in which instances are given, if not specified, assume it's - /// JSONL format. +/// A destination location for Feature values and format. +class GoogleCloudAiplatformV1FeatureValueDestination { + /// Output in BigQuery format. /// - /// Currently only JSONL format is supported. - /// Possible string values are: - /// - "DATA_FORMAT_UNSPECIFIED" : Format unspecified, used when unset. - /// - "JSONL" : Examples are stored in JSONL files. - core.String? dataFormat; + /// BigQueryDestination.output_uri in + /// FeatureValueDestination.bigquery_destination must refer to a table. + GoogleCloudAiplatformV1BigQueryDestination? bigqueryDestination; - /// The Cloud Storage location for the input instances. - GoogleCloudAiplatformV1GcsSource? gcsSource; + /// Output in CSV format. + /// + /// Array Feature value types are not allowed in CSV format. + GoogleCloudAiplatformV1CsvDestination? csvDestination; - GoogleCloudAiplatformV1ExamplesExampleGcsSource({ - this.dataFormat, - this.gcsSource, + /// Output in TFRecord format. + /// + /// Below are the mapping from Feature value type in Featurestore to Feature + /// value type in TFRecord: Value type in Featurestore | Value type in + /// TFRecord DOUBLE, DOUBLE_ARRAY | FLOAT_LIST INT64, INT64_ARRAY | INT64_LIST + /// STRING, STRING_ARRAY, BYTES | BYTES_LIST true -\> byte_string("true"), + /// false -\> byte_string("false") BOOL, BOOL_ARRAY (true, false) | BYTES_LIST + GoogleCloudAiplatformV1TFRecordDestination? tfrecordDestination; + + GoogleCloudAiplatformV1FeatureValueDestination({ + this.bigqueryDestination, + this.csvDestination, + this.tfrecordDestination, }); - GoogleCloudAiplatformV1ExamplesExampleGcsSource.fromJson(core.Map json_) + GoogleCloudAiplatformV1FeatureValueDestination.fromJson(core.Map json_) : this( - dataFormat: json_['dataFormat'] as core.String?, - gcsSource: json_.containsKey('gcsSource') - ? GoogleCloudAiplatformV1GcsSource.fromJson( - json_['gcsSource'] as core.Map) + bigqueryDestination: json_.containsKey('bigqueryDestination') + ? GoogleCloudAiplatformV1BigQueryDestination.fromJson( + json_['bigqueryDestination'] + as core.Map) + : null, + csvDestination: json_.containsKey('csvDestination') + ? GoogleCloudAiplatformV1CsvDestination.fromJson( + json_['csvDestination'] + as core.Map) + : null, + tfrecordDestination: json_.containsKey('tfrecordDestination') + ? GoogleCloudAiplatformV1TFRecordDestination.fromJson( + json_['tfrecordDestination'] + as core.Map) : null, ); core.Map toJson() => { - if (dataFormat != null) 'dataFormat': dataFormat!, - if (gcsSource != null) 'gcsSource': gcsSource!, + if (bigqueryDestination != null) + 'bigqueryDestination': bigqueryDestination!, + if (csvDestination != null) 'csvDestination': csvDestination!, + if (tfrecordDestination != null) + 'tfrecordDestination': tfrecordDestination!, }; } -/// Overrides for example-based explanations. -class GoogleCloudAiplatformV1ExamplesOverride { - /// The number of neighbors to return that have the same crowding tag. - core.int? crowdingCount; - - /// The format of the data being provided with each call. - /// Possible string values are: - /// - "DATA_FORMAT_UNSPECIFIED" : Unspecified format. Must not be used. - /// - "INSTANCES" : Provided data is a set of model inputs. - /// - "EMBEDDINGS" : Provided data is a set of embeddings. - core.String? dataFormat; - - /// The number of neighbors to return. - core.int? neighborCount; - - /// Restrict the resulting nearest neighbors to respect these constraints. - core.List? restrictions; - - /// If true, return the embeddings instead of neighbors. - core.bool? returnEmbeddings; +/// Container for list of values. +class GoogleCloudAiplatformV1FeatureValueList { + /// A list of feature values. + /// + /// All of them should be the same data type. + core.List? values; - GoogleCloudAiplatformV1ExamplesOverride({ - this.crowdingCount, - this.dataFormat, - this.neighborCount, - this.restrictions, - this.returnEmbeddings, + GoogleCloudAiplatformV1FeatureValueList({ + this.values, }); - GoogleCloudAiplatformV1ExamplesOverride.fromJson(core.Map json_) + GoogleCloudAiplatformV1FeatureValueList.fromJson(core.Map json_) : this( - crowdingCount: json_['crowdingCount'] as core.int?, - dataFormat: json_['dataFormat'] as core.String?, - neighborCount: json_['neighborCount'] as core.int?, - restrictions: (json_['restrictions'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1ExamplesRestrictionsNamespace.fromJson( - value as core.Map)) + values: (json_['values'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1FeatureValue.fromJson( + value as core.Map)) .toList(), - returnEmbeddings: json_['returnEmbeddings'] as core.bool?, ); core.Map toJson() => { - if (crowdingCount != null) 'crowdingCount': crowdingCount!, - if (dataFormat != null) 'dataFormat': dataFormat!, - if (neighborCount != null) 'neighborCount': neighborCount!, - if (restrictions != null) 'restrictions': restrictions!, - if (returnEmbeddings != null) 'returnEmbeddings': returnEmbeddings!, + if (values != null) 'values': values!, }; } -/// Restrictions namespace for example-based explanations overrides. -class GoogleCloudAiplatformV1ExamplesRestrictionsNamespace { - /// The list of allowed tags. - core.List? allow; - - /// The list of deny tags. - core.List? deny; - - /// The namespace name. - core.String? namespaceName; +/// Metadata of feature value. +class GoogleCloudAiplatformV1FeatureValueMetadata { + /// Feature generation timestamp. + /// + /// Typically, it is provided by user at feature ingestion time. If not, + /// feature store will use the system timestamp when the data is ingested into + /// feature store. For streaming ingestion, the time, aligned by days, must be + /// no older than five years (1825 days) and no later than one year (366 days) + /// in the future. + core.String? generateTime; - GoogleCloudAiplatformV1ExamplesRestrictionsNamespace({ - this.allow, - this.deny, - this.namespaceName, + GoogleCloudAiplatformV1FeatureValueMetadata({ + this.generateTime, }); - GoogleCloudAiplatformV1ExamplesRestrictionsNamespace.fromJson(core.Map json_) + GoogleCloudAiplatformV1FeatureValueMetadata.fromJson(core.Map json_) : this( - allow: (json_['allow'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - deny: (json_['deny'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - namespaceName: json_['namespaceName'] as core.String?, + generateTime: json_['generateTime'] as core.String?, ); core.Map toJson() => { - if (allow != null) 'allow': allow!, - if (deny != null) 'deny': deny!, - if (namespaceName != null) 'namespaceName': namespaceName!, + if (generateTime != null) 'generateTime': generateTime!, }; } -/// Instance of a general execution. -class GoogleCloudAiplatformV1Execution { - /// Timestamp when this Execution was created. +/// FeatureView is representation of values that the FeatureOnlineStore will +/// serve based on its syncConfig. +class GoogleCloudAiplatformV1FeatureView { + /// Configures how data is supposed to be extracted from a BigQuery source to + /// be loaded onto the FeatureOnlineStore. + /// + /// Optional. + GoogleCloudAiplatformV1FeatureViewBigQuerySource? bigQuerySource; + + /// Timestamp when this FeatureView was created. /// /// Output only. core.String? createTime; - /// Description of the Execution - core.String? description; + /// Used to perform consistent read-modify-write updates. + /// + /// If not set, a blind "overwrite" update happens. + /// + /// Optional. + core.String? etag; - /// User provided display name of the Execution. + /// Configures the features from a Feature Registry source that need to be + /// loaded onto the FeatureOnlineStore. /// - /// May be up to 128 Unicode characters. - core.String? displayName; + /// Optional. + GoogleCloudAiplatformV1FeatureViewFeatureRegistrySource? + featureRegistrySource; - /// An eTag used to perform consistent read-modify-write updates. + /// Configuration for index preparation for vector search. /// - /// If not set, a blind "overwrite" update happens. - core.String? etag; + /// It contains the required configurations to create an index from source + /// data, so that approximate nearest neighbor (a.k.a ANN) algorithms search + /// can be performed during online serving. + /// + /// Optional. + GoogleCloudAiplatformV1FeatureViewIndexConfig? indexConfig; - /// The labels with user-defined metadata to organize your Executions. + /// The labels with user-defined metadata to organize your FeatureViews. /// /// Label keys and values can be no longer than 64 characters (Unicode /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. No more than - /// 64 user labels can be associated with one Execution (System labels are - /// excluded). + /// underscores and dashes. International characters are allowed. See + /// https://goo.gl/xmQnxf for more information on and examples of labels. No + /// more than 64 user labels can be associated with one + /// FeatureOnlineStore(System labels are excluded)." System reserved label + /// keys are prefixed with "aiplatform.googleapis.com/" and are immutable. + /// + /// Optional. core.Map? labels; - /// Properties of the Execution. - /// - /// Top level metadata keys' heading and trailing spaces will be trimmed. The - /// size of this field should not exceed 200KB. + /// Identifier. /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Map? metadata; + /// Name of the FeatureView. Format: + /// `projects/{project}/locations/{location}/featureOnlineStores/{feature_online_store}/featureViews/{feature_view}` + core.String? name; - /// The resource name of the Execution. + /// Configuration for FeatureView created under Optimized FeatureOnlineStore. /// - /// Output only. - core.String? name; + /// Optional. + GoogleCloudAiplatformV1FeatureViewOptimizedConfig? optimizedConfig; - /// The title of the schema describing the metadata. + /// Reserved for future use. /// - /// Schema title and version is expected to be registered in earlier Create - /// Schema calls. And both are used together as unique identifiers to identify - /// schemas within the local metadata store. - core.String? schemaTitle; + /// Output only. + core.bool? satisfiesPzi; - /// The version of the schema in `schema_title` to use. + /// Reserved for future use. /// - /// Schema title and version is expected to be registered in earlier Create - /// Schema calls. And both are used together as unique identifiers to identify - /// schemas within the local metadata store. - core.String? schemaVersion; + /// Output only. + core.bool? satisfiesPzs; - /// The state of this Execution. + /// Configures when data is to be synced/updated for this FeatureView. /// - /// This is a property of the Execution, and does not imply or capture any - /// ongoing process. This property is managed by clients (such as Vertex AI - /// Pipelines) and the system does not prescribe or check the validity of - /// state transitions. - /// Possible string values are: - /// - "STATE_UNSPECIFIED" : Unspecified Execution state - /// - "NEW" : The Execution is new - /// - "RUNNING" : The Execution is running - /// - "COMPLETE" : The Execution has finished running - /// - "FAILED" : The Execution has failed - /// - "CACHED" : The Execution completed through Cache hit. - /// - "CANCELLED" : The Execution was cancelled. - core.String? state; + /// At the end of the sync the latest featureValues for each entityId of this + /// FeatureView are made ready for online serving. + GoogleCloudAiplatformV1FeatureViewSyncConfig? syncConfig; - /// Timestamp when this Execution was last updated. + /// Timestamp when this FeatureView was last updated. /// /// Output only. core.String? updateTime; - GoogleCloudAiplatformV1Execution({ + /// The Vertex RAG Source that the FeatureView is linked to. + /// + /// Optional. + GoogleCloudAiplatformV1FeatureViewVertexRagSource? vertexRagSource; + + GoogleCloudAiplatformV1FeatureView({ + this.bigQuerySource, this.createTime, - this.description, - this.displayName, this.etag, + this.featureRegistrySource, + this.indexConfig, this.labels, - this.metadata, this.name, - this.schemaTitle, - this.schemaVersion, - this.state, + this.optimizedConfig, + this.satisfiesPzi, + this.satisfiesPzs, + this.syncConfig, this.updateTime, + this.vertexRagSource, }); - GoogleCloudAiplatformV1Execution.fromJson(core.Map json_) + GoogleCloudAiplatformV1FeatureView.fromJson(core.Map json_) : this( + bigQuerySource: json_.containsKey('bigQuerySource') + ? GoogleCloudAiplatformV1FeatureViewBigQuerySource.fromJson( + json_['bigQuerySource'] + as core.Map) + : null, createTime: json_['createTime'] as core.String?, - description: json_['description'] as core.String?, - displayName: json_['displayName'] as core.String?, etag: json_['etag'] as core.String?, + featureRegistrySource: json_.containsKey('featureRegistrySource') + ? GoogleCloudAiplatformV1FeatureViewFeatureRegistrySource + .fromJson(json_['featureRegistrySource'] + as core.Map) + : null, + indexConfig: json_.containsKey('indexConfig') + ? GoogleCloudAiplatformV1FeatureViewIndexConfig.fromJson( + json_['indexConfig'] as core.Map) + : null, labels: (json_['labels'] as core.Map?)?.map( (key, value) => core.MapEntry( @@ -35054,3170 +40847,2989 @@ class GoogleCloudAiplatformV1Execution { value as core.String, ), ), - metadata: json_.containsKey('metadata') - ? json_['metadata'] as core.Map - : null, name: json_['name'] as core.String?, - schemaTitle: json_['schemaTitle'] as core.String?, - schemaVersion: json_['schemaVersion'] as core.String?, - state: json_['state'] as core.String?, + optimizedConfig: json_.containsKey('optimizedConfig') + ? GoogleCloudAiplatformV1FeatureViewOptimizedConfig.fromJson( + json_['optimizedConfig'] + as core.Map) + : null, + satisfiesPzi: json_['satisfiesPzi'] as core.bool?, + satisfiesPzs: json_['satisfiesPzs'] as core.bool?, + syncConfig: json_.containsKey('syncConfig') + ? GoogleCloudAiplatformV1FeatureViewSyncConfig.fromJson( + json_['syncConfig'] as core.Map) + : null, updateTime: json_['updateTime'] as core.String?, + vertexRagSource: json_.containsKey('vertexRagSource') + ? GoogleCloudAiplatformV1FeatureViewVertexRagSource.fromJson( + json_['vertexRagSource'] + as core.Map) + : null, ); core.Map toJson() => { + if (bigQuerySource != null) 'bigQuerySource': bigQuerySource!, if (createTime != null) 'createTime': createTime!, - if (description != null) 'description': description!, - if (displayName != null) 'displayName': displayName!, if (etag != null) 'etag': etag!, + if (featureRegistrySource != null) + 'featureRegistrySource': featureRegistrySource!, + if (indexConfig != null) 'indexConfig': indexConfig!, if (labels != null) 'labels': labels!, - if (metadata != null) 'metadata': metadata!, if (name != null) 'name': name!, - if (schemaTitle != null) 'schemaTitle': schemaTitle!, - if (schemaVersion != null) 'schemaVersion': schemaVersion!, - if (state != null) 'state': state!, + if (optimizedConfig != null) 'optimizedConfig': optimizedConfig!, + if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, + if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, + if (syncConfig != null) 'syncConfig': syncConfig!, if (updateTime != null) 'updateTime': updateTime!, + if (vertexRagSource != null) 'vertexRagSource': vertexRagSource!, }; } -/// Request message for PredictionService.Explain. -class GoogleCloudAiplatformV1ExplainRequest { - /// If specified, this ExplainRequest will be served by the chosen - /// DeployedModel, overriding Endpoint.traffic_split. - core.String? deployedModelId; - - /// If specified, overrides the explanation_spec of the DeployedModel. +class GoogleCloudAiplatformV1FeatureViewBigQuerySource { + /// Columns to construct entity_id / row keys. /// - /// Can be used for explaining prediction results with different - /// configurations, such as: - Explaining top-5 predictions results as opposed - /// to top-1; - Increasing path count or step count of the attribution methods - /// to reduce approximate errors; - Using different baselines for explaining - /// the prediction results. - GoogleCloudAiplatformV1ExplanationSpecOverride? explanationSpecOverride; + /// Required. + core.List? entityIdColumns; - /// The instances that are the input to the explanation call. - /// - /// A DeployedModel may have an upper limit on the number of instances it - /// supports per request, and when it is exceeded the explanation call errors - /// in case of AutoML Models, or, in case of customer created Models, the - /// behaviour is as documented by that Model. The schema of any single - /// instance may be specified via Endpoint's DeployedModels' Model's - /// PredictSchemata's instance_schema_uri. + /// The BigQuery view URI that will be materialized on each sync trigger based + /// on FeatureView.SyncConfig. /// /// Required. - /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.List? instances; + core.String? uri; - /// The parameters that govern the prediction. - /// - /// The schema of the parameters may be specified via Endpoint's - /// DeployedModels' Model's PredictSchemata's parameters_schema_uri. + GoogleCloudAiplatformV1FeatureViewBigQuerySource({ + this.entityIdColumns, + this.uri, + }); + + GoogleCloudAiplatformV1FeatureViewBigQuerySource.fromJson(core.Map json_) + : this( + entityIdColumns: (json_['entityIdColumns'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + uri: json_['uri'] as core.String?, + ); + + core.Map toJson() => { + if (entityIdColumns != null) 'entityIdColumns': entityIdColumns!, + if (uri != null) 'uri': uri!, + }; +} + +/// Lookup key for a feature view. +class GoogleCloudAiplatformV1FeatureViewDataKey { + /// The actual Entity ID will be composed from this struct. /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Object? parameters; + /// This should match with the way ID is defined in the FeatureView spec. + GoogleCloudAiplatformV1FeatureViewDataKeyCompositeKey? compositeKey; - GoogleCloudAiplatformV1ExplainRequest({ - this.deployedModelId, - this.explanationSpecOverride, - this.instances, - this.parameters, + /// String key to use for lookup. + core.String? key; + + GoogleCloudAiplatformV1FeatureViewDataKey({ + this.compositeKey, + this.key, }); - GoogleCloudAiplatformV1ExplainRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1FeatureViewDataKey.fromJson(core.Map json_) : this( - deployedModelId: json_['deployedModelId'] as core.String?, - explanationSpecOverride: json_.containsKey('explanationSpecOverride') - ? GoogleCloudAiplatformV1ExplanationSpecOverride.fromJson( - json_['explanationSpecOverride'] - as core.Map) - : null, - instances: json_.containsKey('instances') - ? json_['instances'] as core.List + compositeKey: json_.containsKey('compositeKey') + ? GoogleCloudAiplatformV1FeatureViewDataKeyCompositeKey.fromJson( + json_['compositeKey'] as core.Map) : null, - parameters: json_['parameters'], + key: json_['key'] as core.String?, ); core.Map toJson() => { - if (deployedModelId != null) 'deployedModelId': deployedModelId!, - if (explanationSpecOverride != null) - 'explanationSpecOverride': explanationSpecOverride!, - if (instances != null) 'instances': instances!, - if (parameters != null) 'parameters': parameters!, + if (compositeKey != null) 'compositeKey': compositeKey!, + if (key != null) 'key': key!, }; } -/// Response message for PredictionService.Explain. -class GoogleCloudAiplatformV1ExplainResponse { - /// ID of the Endpoint's DeployedModel that served this explanation. - core.String? deployedModelId; - - /// The explanations of the Model's PredictResponse.predictions. - /// - /// It has the same number of elements as instances to be explained. - core.List? explanations; - - /// The predictions that are the output of the predictions call. - /// - /// Same as PredictResponse.predictions. +/// ID that is comprised from several parts (columns). +class GoogleCloudAiplatformV1FeatureViewDataKeyCompositeKey { + /// Parts to construct Entity ID. /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.List? predictions; + /// Should match with the same ID columns as defined in FeatureView in the + /// same order. + core.List? parts; - GoogleCloudAiplatformV1ExplainResponse({ - this.deployedModelId, - this.explanations, - this.predictions, + GoogleCloudAiplatformV1FeatureViewDataKeyCompositeKey({ + this.parts, }); - GoogleCloudAiplatformV1ExplainResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1FeatureViewDataKeyCompositeKey.fromJson(core.Map json_) : this( - deployedModelId: json_['deployedModelId'] as core.String?, - explanations: (json_['explanations'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Explanation.fromJson( - value as core.Map)) + parts: (json_['parts'] as core.List?) + ?.map((value) => value as core.String) .toList(), - predictions: json_.containsKey('predictions') - ? json_['predictions'] as core.List - : null, ); core.Map toJson() => { - if (deployedModelId != null) 'deployedModelId': deployedModelId!, - if (explanations != null) 'explanations': explanations!, - if (predictions != null) 'predictions': predictions!, + if (parts != null) 'parts': parts!, }; } -/// Explanation of a prediction (provided in PredictResponse.predictions) -/// produced by the Model on a given instance. -class GoogleCloudAiplatformV1Explanation { - /// Feature attributions grouped by predicted outputs. - /// - /// For Models that predict only one output, such as regression Models that - /// predict only one score, there is only one attibution that explains the - /// predicted output. For Models that predict multiple outputs, such as - /// multiclass Models that predict multiple classes, each element explains one - /// specific item. Attribution.output_index can be used to identify which - /// output this attribution is explaining. By default, we provide Shapley - /// values for the predicted class. However, you can configure the explanation - /// request to generate Shapley values for any other classes too. For example, - /// if a model predicts a probability of `0.4` for approving a loan - /// application, the model's decision is to reject the application since - /// `p(reject) = 0.6 > p(approve) = 0.4`, and the default Shapley values would - /// be computed for rejection decision and not approval, even though the - /// latter might be the positive class. If users set - /// ExplanationParameters.top_k, the attributions are sorted by - /// instance_output_value in descending order. If - /// ExplanationParameters.output_indices is specified, the attributions are - /// stored by Attribution.output_index in the same order as they appear in the - /// output_indices. +/// A Feature Registry source for features that need to be synced to Online +/// Store. +class GoogleCloudAiplatformV1FeatureViewFeatureRegistrySource { + /// List of features that need to be synced to Online Store. /// - /// Output only. - core.List? attributions; + /// Required. + core.List< + GoogleCloudAiplatformV1FeatureViewFeatureRegistrySourceFeatureGroup>? + featureGroups; - /// List of the nearest neighbors for example-based explanations. - /// - /// For models deployed with the examples explanations feature enabled, the - /// attributions field is empty and instead the neighbors field is populated. + /// The project number of the parent project of the Feature Groups. /// - /// Output only. - core.List? neighbors; + /// Optional. + core.String? projectNumber; - GoogleCloudAiplatformV1Explanation({ - this.attributions, - this.neighbors, + GoogleCloudAiplatformV1FeatureViewFeatureRegistrySource({ + this.featureGroups, + this.projectNumber, }); - GoogleCloudAiplatformV1Explanation.fromJson(core.Map json_) + GoogleCloudAiplatformV1FeatureViewFeatureRegistrySource.fromJson( + core.Map json_) : this( - attributions: (json_['attributions'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Attribution.fromJson( - value as core.Map)) - .toList(), - neighbors: (json_['neighbors'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Neighbor.fromJson( - value as core.Map)) + featureGroups: (json_['featureGroups'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1FeatureViewFeatureRegistrySourceFeatureGroup + .fromJson(value as core.Map)) .toList(), + projectNumber: json_['projectNumber'] as core.String?, ); core.Map toJson() => { - if (attributions != null) 'attributions': attributions!, - if (neighbors != null) 'neighbors': neighbors!, + if (featureGroups != null) 'featureGroups': featureGroups!, + if (projectNumber != null) 'projectNumber': projectNumber!, }; } -/// Metadata describing the Model's input and output for explanation. -class GoogleCloudAiplatformV1ExplanationMetadata { - /// Points to a YAML file stored on Google Cloud Storage describing the format - /// of the feature attributions. - /// - /// The schema is defined as an OpenAPI 3.0.2 - /// [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). - /// AutoML tabular Models always have this field populated by Vertex AI. Note: - /// The URI given on output may be different, including the URI scheme, than - /// the one given on input. The output URI will point to a location where the - /// user only has a read access. - core.String? featureAttributionsSchemaUri; - - /// Map from feature names to feature input metadata. - /// - /// Keys are the name of the features. Values are the specification of the - /// feature. An empty InputMetadata is valid. It describes a text feature - /// which has the name specified as the key in ExplanationMetadata.inputs. The - /// baseline of the empty feature is chosen by Vertex AI. For Vertex - /// AI-provided Tensorflow images, the key can be any friendly name of the - /// feature. Once specified, featureAttributions are keyed by this key (if not - /// grouped with another feature). For custom images, the key must match with - /// the key in instance. +/// Features belonging to a single feature group that will be synced to Online +/// Store. +class GoogleCloudAiplatformV1FeatureViewFeatureRegistrySourceFeatureGroup { + /// Identifier of the feature group. /// /// Required. - core.Map? inputs; - - /// Name of the source to generate embeddings for example based explanations. - core.String? latentSpaceSource; + core.String? featureGroupId; - /// Map from output names to output metadata. - /// - /// For Vertex AI-provided Tensorflow images, keys can be any user defined - /// string that consists of any UTF-8 characters. For custom images, keys are - /// the name of the output field in the prediction to be explained. Currently - /// only one key is allowed. + /// Identifiers of features under the feature group. /// /// Required. - core.Map? outputs; + core.List? featureIds; - GoogleCloudAiplatformV1ExplanationMetadata({ - this.featureAttributionsSchemaUri, - this.inputs, - this.latentSpaceSource, - this.outputs, + GoogleCloudAiplatformV1FeatureViewFeatureRegistrySourceFeatureGroup({ + this.featureGroupId, + this.featureIds, }); - GoogleCloudAiplatformV1ExplanationMetadata.fromJson(core.Map json_) + GoogleCloudAiplatformV1FeatureViewFeatureRegistrySourceFeatureGroup.fromJson( + core.Map json_) : this( - featureAttributionsSchemaUri: - json_['featureAttributionsSchemaUri'] as core.String?, - inputs: - (json_['inputs'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - GoogleCloudAiplatformV1ExplanationMetadataInputMetadata.fromJson( - value as core.Map), - ), - ), - latentSpaceSource: json_['latentSpaceSource'] as core.String?, - outputs: - (json_['outputs'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - GoogleCloudAiplatformV1ExplanationMetadataOutputMetadata.fromJson( - value as core.Map), - ), - ), + featureGroupId: json_['featureGroupId'] as core.String?, + featureIds: (json_['featureIds'] as core.List?) + ?.map((value) => value as core.String) + .toList(), ); core.Map toJson() => { - if (featureAttributionsSchemaUri != null) - 'featureAttributionsSchemaUri': featureAttributionsSchemaUri!, - if (inputs != null) 'inputs': inputs!, - if (latentSpaceSource != null) 'latentSpaceSource': latentSpaceSource!, - if (outputs != null) 'outputs': outputs!, + if (featureGroupId != null) 'featureGroupId': featureGroupId!, + if (featureIds != null) 'featureIds': featureIds!, }; } -/// Metadata of the input of a feature. -/// -/// Fields other than InputMetadata.input_baselines are applicable only for -/// Models that are using Vertex AI-provided images for Tensorflow. -class GoogleCloudAiplatformV1ExplanationMetadataInputMetadata { - /// Specifies the shape of the values of the input if the input is a sparse - /// representation. - /// - /// Refer to Tensorflow documentation for more details: - /// https://www.tensorflow.org/api_docs/python/tf/sparse/SparseTensor. - core.String? denseShapeTensorName; - - /// A list of baselines for the encoded tensor. +/// Configuration for vector indexing. +class GoogleCloudAiplatformV1FeatureViewIndexConfig { + /// Configuration options for using brute force search, which simply + /// implements the standard linear search in the database for each query. /// - /// The shape of each baseline should match the shape of the encoded tensor. - /// If a scalar is provided, Vertex AI broadcasts to the same shape as the - /// encoded tensor. + /// It is primarily meant for benchmarking and to generate the ground truth + /// for approximate search. /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.List? encodedBaselines; + /// Optional. + GoogleCloudAiplatformV1FeatureViewIndexConfigBruteForceConfig? + bruteForceConfig; - /// Encoded tensor is a transformation of the input tensor. + /// Column of crowding. /// - /// Must be provided if choosing Integrated Gradients attribution or XRAI - /// attribution and the input tensor is not differentiable. An encoded tensor - /// is generated if the input tensor is encoded by a lookup table. - core.String? encodedTensorName; - - /// Defines how the feature is encoded into the input tensor. + /// This column contains crowding attribute which is a constraint on a + /// neighbor list produced by FeatureOnlineStoreService.SearchNearestEntities + /// to diversify search results. If + /// NearestNeighborQuery.per_crowding_attribute_neighbor_count is set to K in + /// SearchNearestEntitiesRequest, it's guaranteed that no more than K entities + /// of the same crowding attribute are returned in the response. /// - /// Defaults to IDENTITY. - /// Possible string values are: - /// - "ENCODING_UNSPECIFIED" : Default value. This is the same as IDENTITY. - /// - "IDENTITY" : The tensor represents one feature. - /// - "BAG_OF_FEATURES" : The tensor represents a bag of features where each - /// index maps to a feature. InputMetadata.index_feature_mapping must be - /// provided for this encoding. For example: ``` input = [27, 6.0, 150] - /// index_feature_mapping = ["age", "height", "weight"] ``` - /// - "BAG_OF_FEATURES_SPARSE" : The tensor represents a bag of features where - /// each index maps to a feature. Zero values in the tensor indicates feature - /// being non-existent. InputMetadata.index_feature_mapping must be provided - /// for this encoding. For example: ``` input = [2, 0, 5, 0, 1] - /// index_feature_mapping = ["a", "b", "c", "d", "e"] ``` - /// - "INDICATOR" : The tensor is a list of binaries representing whether a - /// feature exists or not (1 indicates existence). - /// InputMetadata.index_feature_mapping must be provided for this encoding. - /// For example: ``` input = [1, 0, 1, 0, 1] index_feature_mapping = ["a", - /// "b", "c", "d", "e"] ``` - /// - "COMBINED_EMBEDDING" : The tensor is encoded into a 1-dimensional array - /// represented by an encoded tensor. InputMetadata.encoded_tensor_name must - /// be provided for this encoding. For example: ``` input = ["This", "is", - /// "a", "test", "."] encoded = [0.1, 0.2, 0.3, 0.4, 0.5] ``` - /// - "CONCAT_EMBEDDING" : Select this encoding when the input tensor is - /// encoded into a 2-dimensional array represented by an encoded tensor. - /// InputMetadata.encoded_tensor_name must be provided for this encoding. The - /// first dimension of the encoded tensor's shape is the same as the input - /// tensor's shape. For example: ``` input = ["This", "is", "a", "test", "."] - /// encoded = [[0.1, 0.2, 0.3, 0.4, 0.5], [0.2, 0.1, 0.4, 0.3, 0.5], [0.5, - /// 0.1, 0.3, 0.5, 0.4], [0.5, 0.3, 0.1, 0.2, 0.4], [0.4, 0.3, 0.2, 0.5, 0.1]] - /// ``` - core.String? encoding; + /// Optional. + core.String? crowdingColumn; - /// The domain details of the input feature value. + /// The distance measure used in nearest neighbor search. /// - /// Like min/max, original mean or standard deviation if normalized. - GoogleCloudAiplatformV1ExplanationMetadataInputMetadataFeatureValueDomain? - featureValueDomain; + /// Optional. + /// Possible string values are: + /// - "DISTANCE_MEASURE_TYPE_UNSPECIFIED" : Should not be set. + /// - "SQUARED_L2_DISTANCE" : Euclidean (L_2) Distance. + /// - "COSINE_DISTANCE" : Cosine Distance. Defined as 1 - cosine similarity. + /// We strongly suggest using DOT_PRODUCT_DISTANCE + UNIT_L2_NORM instead of + /// COSINE distance. Our algorithms have been more optimized for DOT_PRODUCT + /// distance which, when combined with UNIT_L2_NORM, is mathematically + /// equivalent to COSINE distance and results in the same ranking. + /// - "DOT_PRODUCT_DISTANCE" : Dot Product Distance. Defined as a negative of + /// the dot product. + core.String? distanceMeasureType; - /// Name of the group that the input belongs to. + /// Column of embedding. /// - /// Features with the same group name will be treated as one feature when - /// computing attributions. Features grouped together can have different - /// shapes in value. If provided, there will be one single attribution - /// generated in Attribution.feature_attributions, keyed by the group name. - core.String? groupName; - - /// A list of feature names for each index in the input tensor. + /// This column contains the source data to create index for vector search. + /// embedding_column must be set when using vector search. /// - /// Required when the input InputMetadata.encoding is BAG_OF_FEATURES, - /// BAG_OF_FEATURES_SPARSE, INDICATOR. - core.List? indexFeatureMapping; + /// Optional. + core.String? embeddingColumn; - /// Specifies the index of the values of the input tensor. + /// The number of dimensions of the input embedding. /// - /// Required when the input tensor is a sparse representation. Refer to - /// Tensorflow documentation for more details: - /// https://www.tensorflow.org/api_docs/python/tf/sparse/SparseTensor. - core.String? indicesTensorName; + /// Optional. + core.int? embeddingDimension; - /// Baseline inputs for this feature. - /// - /// If no baseline is specified, Vertex AI chooses the baseline for this - /// feature. If multiple baselines are specified, Vertex AI returns the - /// average attributions across them in Attribution.feature_attributions. For - /// Vertex AI-provided Tensorflow images (both 1.x and 2.x), the shape of each - /// baseline must match the shape of the input tensor. If a scalar is - /// provided, we broadcast to the same shape as the input tensor. For custom - /// images, the element of the baselines must be in the same format as the - /// feature's input in the instance\[\]. The schema of any single instance may - /// be specified via Endpoint's DeployedModels' Model's PredictSchemata's - /// instance_schema_uri. + /// Columns of features that're used to filter vector search results. /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.List? inputBaselines; + /// Optional. + core.List? filterColumns; - /// Name of the input tensor for this feature. + /// Configuration options for the tree-AH algorithm (Shallow tree + Asymmetric + /// Hashing). /// - /// Required and is only applicable to Vertex AI-provided images for - /// Tensorflow. - core.String? inputTensorName; - - /// Modality of the feature. + /// Please refer to this paper for more details: + /// https://arxiv.org/abs/1908.10396 /// - /// Valid values are: numeric, image. Defaults to numeric. - core.String? modality; - - /// Visualization configurations for image explanation. - GoogleCloudAiplatformV1ExplanationMetadataInputMetadataVisualization? - visualization; + /// Optional. + GoogleCloudAiplatformV1FeatureViewIndexConfigTreeAHConfig? treeAhConfig; - GoogleCloudAiplatformV1ExplanationMetadataInputMetadata({ - this.denseShapeTensorName, - this.encodedBaselines, - this.encodedTensorName, - this.encoding, - this.featureValueDomain, - this.groupName, - this.indexFeatureMapping, - this.indicesTensorName, - this.inputBaselines, - this.inputTensorName, - this.modality, - this.visualization, + GoogleCloudAiplatformV1FeatureViewIndexConfig({ + this.bruteForceConfig, + this.crowdingColumn, + this.distanceMeasureType, + this.embeddingColumn, + this.embeddingDimension, + this.filterColumns, + this.treeAhConfig, }); - - GoogleCloudAiplatformV1ExplanationMetadataInputMetadata.fromJson( - core.Map json_) - : this( - denseShapeTensorName: json_['denseShapeTensorName'] as core.String?, - encodedBaselines: json_.containsKey('encodedBaselines') - ? json_['encodedBaselines'] as core.List - : null, - encodedTensorName: json_['encodedTensorName'] as core.String?, - encoding: json_['encoding'] as core.String?, - featureValueDomain: json_.containsKey('featureValueDomain') - ? GoogleCloudAiplatformV1ExplanationMetadataInputMetadataFeatureValueDomain - .fromJson(json_['featureValueDomain'] + + GoogleCloudAiplatformV1FeatureViewIndexConfig.fromJson(core.Map json_) + : this( + bruteForceConfig: json_.containsKey('bruteForceConfig') + ? GoogleCloudAiplatformV1FeatureViewIndexConfigBruteForceConfig + .fromJson(json_['bruteForceConfig'] as core.Map) : null, - groupName: json_['groupName'] as core.String?, - indexFeatureMapping: (json_['indexFeatureMapping'] as core.List?) + crowdingColumn: json_['crowdingColumn'] as core.String?, + distanceMeasureType: json_['distanceMeasureType'] as core.String?, + embeddingColumn: json_['embeddingColumn'] as core.String?, + embeddingDimension: json_['embeddingDimension'] as core.int?, + filterColumns: (json_['filterColumns'] as core.List?) ?.map((value) => value as core.String) .toList(), - indicesTensorName: json_['indicesTensorName'] as core.String?, - inputBaselines: json_.containsKey('inputBaselines') - ? json_['inputBaselines'] as core.List - : null, - inputTensorName: json_['inputTensorName'] as core.String?, - modality: json_['modality'] as core.String?, - visualization: json_.containsKey('visualization') - ? GoogleCloudAiplatformV1ExplanationMetadataInputMetadataVisualization - .fromJson(json_['visualization'] + treeAhConfig: json_.containsKey('treeAhConfig') + ? GoogleCloudAiplatformV1FeatureViewIndexConfigTreeAHConfig + .fromJson(json_['treeAhConfig'] as core.Map) : null, ); core.Map toJson() => { - if (denseShapeTensorName != null) - 'denseShapeTensorName': denseShapeTensorName!, - if (encodedBaselines != null) 'encodedBaselines': encodedBaselines!, - if (encodedTensorName != null) 'encodedTensorName': encodedTensorName!, - if (encoding != null) 'encoding': encoding!, - if (featureValueDomain != null) - 'featureValueDomain': featureValueDomain!, - if (groupName != null) 'groupName': groupName!, - if (indexFeatureMapping != null) - 'indexFeatureMapping': indexFeatureMapping!, - if (indicesTensorName != null) 'indicesTensorName': indicesTensorName!, - if (inputBaselines != null) 'inputBaselines': inputBaselines!, - if (inputTensorName != null) 'inputTensorName': inputTensorName!, - if (modality != null) 'modality': modality!, - if (visualization != null) 'visualization': visualization!, + if (bruteForceConfig != null) 'bruteForceConfig': bruteForceConfig!, + if (crowdingColumn != null) 'crowdingColumn': crowdingColumn!, + if (distanceMeasureType != null) + 'distanceMeasureType': distanceMeasureType!, + if (embeddingColumn != null) 'embeddingColumn': embeddingColumn!, + if (embeddingDimension != null) + 'embeddingDimension': embeddingDimension!, + if (filterColumns != null) 'filterColumns': filterColumns!, + if (treeAhConfig != null) 'treeAhConfig': treeAhConfig!, }; } -/// Domain details of the input feature value. -/// -/// Provides numeric information about the feature, such as its range (min, -/// max). If the feature has been pre-processed, for example with z-scoring, -/// then it provides information about how to recover the original feature. For -/// example, if the input feature is an image and it has been pre-processed to -/// obtain 0-mean and stddev = 1 values, then original_mean, and original_stddev -/// refer to the mean and stddev of the original feature (e.g. image tensor) -/// from which input feature (with mean = 0 and stddev = 1) was obtained. -class GoogleCloudAiplatformV1ExplanationMetadataInputMetadataFeatureValueDomain { - /// The maximum permissible value for this feature. - core.double? maxValue; - - /// The minimum permissible value for this feature. - core.double? minValue; - - /// If this input feature has been normalized to a mean value of 0, the - /// original_mean specifies the mean value of the domain prior to - /// normalization. - core.double? originalMean; +/// Configuration options for using brute force search. +typedef GoogleCloudAiplatformV1FeatureViewIndexConfigBruteForceConfig = $Empty; - /// If this input feature has been normalized to a standard deviation of 1.0, - /// the original_stddev specifies the standard deviation of the domain prior - /// to normalization. - core.double? originalStddev; +/// Configuration options for the tree-AH algorithm. +class GoogleCloudAiplatformV1FeatureViewIndexConfigTreeAHConfig { + /// Number of embeddings on each leaf node. + /// + /// The default value is 1000 if not set. + /// + /// Optional. + core.String? leafNodeEmbeddingCount; - GoogleCloudAiplatformV1ExplanationMetadataInputMetadataFeatureValueDomain({ - this.maxValue, - this.minValue, - this.originalMean, - this.originalStddev, + GoogleCloudAiplatformV1FeatureViewIndexConfigTreeAHConfig({ + this.leafNodeEmbeddingCount, }); - GoogleCloudAiplatformV1ExplanationMetadataInputMetadataFeatureValueDomain.fromJson( + GoogleCloudAiplatformV1FeatureViewIndexConfigTreeAHConfig.fromJson( core.Map json_) : this( - maxValue: (json_['maxValue'] as core.num?)?.toDouble(), - minValue: (json_['minValue'] as core.num?)?.toDouble(), - originalMean: (json_['originalMean'] as core.num?)?.toDouble(), - originalStddev: (json_['originalStddev'] as core.num?)?.toDouble(), + leafNodeEmbeddingCount: + json_['leafNodeEmbeddingCount'] as core.String?, ); core.Map toJson() => { - if (maxValue != null) 'maxValue': maxValue!, - if (minValue != null) 'minValue': minValue!, - if (originalMean != null) 'originalMean': originalMean!, - if (originalStddev != null) 'originalStddev': originalStddev!, + if (leafNodeEmbeddingCount != null) + 'leafNodeEmbeddingCount': leafNodeEmbeddingCount!, }; } -/// Visualization configurations for image explanation. -class GoogleCloudAiplatformV1ExplanationMetadataInputMetadataVisualization { - /// Excludes attributions below the specified percentile, from the highlighted - /// areas. - /// - /// Defaults to 62. - core.double? clipPercentLowerbound; - - /// Excludes attributions above the specified percentile from the highlighted - /// areas. - /// - /// Using the clip_percent_upperbound and clip_percent_lowerbound together can - /// be useful for filtering out noise and making it easier to see areas of - /// strong attribution. Defaults to 99.9. - core.double? clipPercentUpperbound; - - /// The color scheme used for the highlighted areas. - /// - /// Defaults to PINK_GREEN for Integrated Gradients attribution, which shows - /// positive attributions in green and negative in pink. Defaults to VIRIDIS - /// for XRAI attribution, which highlights the most influential regions in - /// yellow and the least influential in blue. - /// Possible string values are: - /// - "COLOR_MAP_UNSPECIFIED" : Should not be used. - /// - "PINK_GREEN" : Positive: green. Negative: pink. - /// - "VIRIDIS" : Viridis color map: A perceptually uniform color mapping - /// which is easier to see by those with colorblindness and progresses from - /// yellow to green to blue. Positive: yellow. Negative: blue. - /// - "RED" : Positive: red. Negative: red. - /// - "GREEN" : Positive: green. Negative: green. - /// - "RED_GREEN" : Positive: green. Negative: red. - /// - "PINK_WHITE_GREEN" : PiYG palette. - core.String? colorMap; - - /// How the original image is displayed in the visualization. - /// - /// Adjusting the overlay can help increase visual clarity if the original - /// image makes it difficult to view the visualization. Defaults to NONE. - /// Possible string values are: - /// - "OVERLAY_TYPE_UNSPECIFIED" : Default value. This is the same as NONE. - /// - "NONE" : No overlay. - /// - "ORIGINAL" : The attributions are shown on top of the original image. - /// - "GRAYSCALE" : The attributions are shown on top of grayscaled version of - /// the original image. - /// - "MASK_BLACK" : The attributions are used as a mask to reveal predictive - /// parts of the image and hide the un-predictive parts. - core.String? overlayType; - - /// Whether to only highlight pixels with positive contributions, negative or - /// both. +/// Configuration for FeatureViews created in Optimized FeatureOnlineStore. +class GoogleCloudAiplatformV1FeatureViewOptimizedConfig { + /// A description of resources that the FeatureView uses, which to large + /// degree are decided by Vertex AI, and optionally allows only a modest + /// additional configuration. /// - /// Defaults to POSITIVE. - /// Possible string values are: - /// - "POLARITY_UNSPECIFIED" : Default value. This is the same as POSITIVE. - /// - "POSITIVE" : Highlights the pixels/outlines that were most influential - /// to the model's prediction. - /// - "NEGATIVE" : Setting polarity to negative highlights areas that does not - /// lead to the models's current prediction. - /// - "BOTH" : Shows both positive and negative attributions. - core.String? polarity; - - /// Type of the image visualization. + /// If min_replica_count is not set, the default value is 2. If + /// max_replica_count is not set, the default value is 6. The max allowed + /// replica count is 1000. /// - /// Only applicable to Integrated Gradients attribution. OUTLINES shows - /// regions of attribution, while PIXELS shows per-pixel attribution. Defaults - /// to OUTLINES. - /// Possible string values are: - /// - "TYPE_UNSPECIFIED" : Should not be used. - /// - "PIXELS" : Shows which pixel contributed to the image prediction. - /// - "OUTLINES" : Shows which region contributed to the image prediction by - /// outlining the region. - core.String? type; + /// Optional. + GoogleCloudAiplatformV1AutomaticResources? automaticResources; - GoogleCloudAiplatformV1ExplanationMetadataInputMetadataVisualization({ - this.clipPercentLowerbound, - this.clipPercentUpperbound, - this.colorMap, - this.overlayType, - this.polarity, - this.type, + GoogleCloudAiplatformV1FeatureViewOptimizedConfig({ + this.automaticResources, }); - GoogleCloudAiplatformV1ExplanationMetadataInputMetadataVisualization.fromJson( - core.Map json_) + GoogleCloudAiplatformV1FeatureViewOptimizedConfig.fromJson(core.Map json_) : this( - clipPercentLowerbound: - (json_['clipPercentLowerbound'] as core.num?)?.toDouble(), - clipPercentUpperbound: - (json_['clipPercentUpperbound'] as core.num?)?.toDouble(), - colorMap: json_['colorMap'] as core.String?, - overlayType: json_['overlayType'] as core.String?, - polarity: json_['polarity'] as core.String?, - type: json_['type'] as core.String?, + automaticResources: json_.containsKey('automaticResources') + ? GoogleCloudAiplatformV1AutomaticResources.fromJson( + json_['automaticResources'] + as core.Map) + : null, ); core.Map toJson() => { - if (clipPercentLowerbound != null) - 'clipPercentLowerbound': clipPercentLowerbound!, - if (clipPercentUpperbound != null) - 'clipPercentUpperbound': clipPercentUpperbound!, - if (colorMap != null) 'colorMap': colorMap!, - if (overlayType != null) 'overlayType': overlayType!, - if (polarity != null) 'polarity': polarity!, - if (type != null) 'type': type!, + if (automaticResources != null) + 'automaticResources': automaticResources!, }; } -/// Metadata of the prediction output to be explained. -class GoogleCloudAiplatformV1ExplanationMetadataOutputMetadata { - /// Specify a field name in the prediction to look for the display name. +/// FeatureViewSync is a representation of sync operation which copies data from +/// data source to Feature View in Online Store. +class GoogleCloudAiplatformV1FeatureViewSync { + /// Time when this FeatureViewSync is created. /// - /// Use this if the prediction contains the display names for the outputs. The - /// display names in the prediction must have the same shape of the outputs, - /// so that it can be located by Attribution.output_index for a specific - /// output. - core.String? displayNameMappingKey; + /// Creation of a FeatureViewSync means that the job is pending / waiting for + /// sufficient resources but may not have started the actual data transfer + /// yet. + /// + /// Output only. + core.String? createTime; - /// Static mapping between the index and display name. + /// Final status of the FeatureViewSync. /// - /// Use this if the outputs are a deterministic n-dimensional array, e.g. a - /// list of scores of all the classes in a pre-defined order for a - /// multi-classification Model. It's not feasible if the outputs are - /// non-deterministic, e.g. the Model produces top-k classes or sort the - /// outputs by their values. The shape of the value must be an n-dimensional - /// array of strings. The number of dimensions must match that of the outputs - /// to be explained. The Attribution.output_display_name is populated by - /// locating in the mapping with Attribution.output_index. + /// Output only. + GoogleRpcStatus? finalStatus; + + /// Identifier. /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Object? indexDisplayNameMapping; + /// Name of the FeatureViewSync. Format: + /// `projects/{project}/locations/{location}/featureOnlineStores/{feature_online_store}/featureViews/{feature_view}/featureViewSyncs/{feature_view_sync}` + core.String? name; - /// Name of the output tensor. + /// Time when this FeatureViewSync is finished. /// - /// Required and is only applicable to Vertex AI provided images for - /// Tensorflow. - core.String? outputTensorName; + /// Output only. + GoogleTypeInterval? runTime; - GoogleCloudAiplatformV1ExplanationMetadataOutputMetadata({ - this.displayNameMappingKey, - this.indexDisplayNameMapping, - this.outputTensorName, + /// Reserved for future use. + /// + /// Output only. + core.bool? satisfiesPzi; + + /// Reserved for future use. + /// + /// Output only. + core.bool? satisfiesPzs; + + /// Summary of the sync job. + /// + /// Output only. + GoogleCloudAiplatformV1FeatureViewSyncSyncSummary? syncSummary; + + GoogleCloudAiplatformV1FeatureViewSync({ + this.createTime, + this.finalStatus, + this.name, + this.runTime, + this.satisfiesPzi, + this.satisfiesPzs, + this.syncSummary, }); - GoogleCloudAiplatformV1ExplanationMetadataOutputMetadata.fromJson( - core.Map json_) + GoogleCloudAiplatformV1FeatureViewSync.fromJson(core.Map json_) : this( - displayNameMappingKey: json_['displayNameMappingKey'] as core.String?, - indexDisplayNameMapping: json_['indexDisplayNameMapping'], - outputTensorName: json_['outputTensorName'] as core.String?, + createTime: json_['createTime'] as core.String?, + finalStatus: json_.containsKey('finalStatus') + ? GoogleRpcStatus.fromJson( + json_['finalStatus'] as core.Map) + : null, + name: json_['name'] as core.String?, + runTime: json_.containsKey('runTime') + ? GoogleTypeInterval.fromJson( + json_['runTime'] as core.Map) + : null, + satisfiesPzi: json_['satisfiesPzi'] as core.bool?, + satisfiesPzs: json_['satisfiesPzs'] as core.bool?, + syncSummary: json_.containsKey('syncSummary') + ? GoogleCloudAiplatformV1FeatureViewSyncSyncSummary.fromJson( + json_['syncSummary'] as core.Map) + : null, ); core.Map toJson() => { - if (displayNameMappingKey != null) - 'displayNameMappingKey': displayNameMappingKey!, - if (indexDisplayNameMapping != null) - 'indexDisplayNameMapping': indexDisplayNameMapping!, - if (outputTensorName != null) 'outputTensorName': outputTensorName!, + if (createTime != null) 'createTime': createTime!, + if (finalStatus != null) 'finalStatus': finalStatus!, + if (name != null) 'name': name!, + if (runTime != null) 'runTime': runTime!, + if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, + if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, + if (syncSummary != null) 'syncSummary': syncSummary!, }; } -/// The ExplanationMetadata entries that can be overridden at online explanation -/// time. -class GoogleCloudAiplatformV1ExplanationMetadataOverride { - /// Overrides the input metadata of the features. +/// Configuration for Sync. +/// +/// Only one option is set. +class GoogleCloudAiplatformV1FeatureViewSyncConfig { + /// If true, syncs the FeatureView in a continuous manner to Online Store. /// - /// The key is the name of the feature to be overridden. The keys specified - /// here must exist in the input metadata to be overridden. If a feature is - /// not specified here, the corresponding feature's input metadata is not - /// overridden. + /// Optional. + core.bool? continuous; + + /// Cron schedule (https://en.wikipedia.org/wiki/Cron) to launch scheduled + /// runs. /// - /// Required. - core.Map? - inputs; + /// To explicitly set a timezone to the cron tab, apply a prefix in the cron + /// tab: "CRON_TZ=${IANA_TIME_ZONE}" or "TZ=${IANA_TIME_ZONE}". The + /// ${IANA_TIME_ZONE} may only be a valid string from IANA time zone database. + /// For example, "CRON_TZ=America/New_York 1 * * * *", or "TZ=America/New_York + /// 1 * * * *". + core.String? cron; - GoogleCloudAiplatformV1ExplanationMetadataOverride({ - this.inputs, + GoogleCloudAiplatformV1FeatureViewSyncConfig({ + this.continuous, + this.cron, }); - GoogleCloudAiplatformV1ExplanationMetadataOverride.fromJson(core.Map json_) + GoogleCloudAiplatformV1FeatureViewSyncConfig.fromJson(core.Map json_) : this( - inputs: - (json_['inputs'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - GoogleCloudAiplatformV1ExplanationMetadataOverrideInputMetadataOverride - .fromJson(value as core.Map), - ), - ), + continuous: json_['continuous'] as core.bool?, + cron: json_['cron'] as core.String?, ); core.Map toJson() => { - if (inputs != null) 'inputs': inputs!, + if (continuous != null) 'continuous': continuous!, + if (cron != null) 'cron': cron!, }; } -/// The input metadata entries to be overridden. -class GoogleCloudAiplatformV1ExplanationMetadataOverrideInputMetadataOverride { - /// Baseline inputs for this feature. +/// Summary from the Sync job. +/// +/// For continuous syncs, the summary is updated periodically. For batch syncs, +/// it gets updated on completion of the sync. +class GoogleCloudAiplatformV1FeatureViewSyncSyncSummary { + /// Total number of rows synced. /// - /// This overrides the `input_baseline` field of the - /// ExplanationMetadata.InputMetadata object of the corresponding feature's - /// input metadata. If it's not specified, the original baselines are not - /// overridden. + /// Output only. + core.String? rowSynced; + + /// Lower bound of the system time watermark for the sync job. /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.List? inputBaselines; + /// This is only set for continuously syncing feature views. + core.String? systemWatermarkTime; - GoogleCloudAiplatformV1ExplanationMetadataOverrideInputMetadataOverride({ - this.inputBaselines, + /// BigQuery slot milliseconds consumed for the sync job. + /// + /// Output only. + core.String? totalSlot; + + GoogleCloudAiplatformV1FeatureViewSyncSyncSummary({ + this.rowSynced, + this.systemWatermarkTime, + this.totalSlot, }); - GoogleCloudAiplatformV1ExplanationMetadataOverrideInputMetadataOverride.fromJson( - core.Map json_) + GoogleCloudAiplatformV1FeatureViewSyncSyncSummary.fromJson(core.Map json_) : this( - inputBaselines: json_.containsKey('inputBaselines') - ? json_['inputBaselines'] as core.List - : null, + rowSynced: json_['rowSynced'] as core.String?, + systemWatermarkTime: json_['systemWatermarkTime'] as core.String?, + totalSlot: json_['totalSlot'] as core.String?, ); core.Map toJson() => { - if (inputBaselines != null) 'inputBaselines': inputBaselines!, + if (rowSynced != null) 'rowSynced': rowSynced!, + if (systemWatermarkTime != null) + 'systemWatermarkTime': systemWatermarkTime!, + if (totalSlot != null) 'totalSlot': totalSlot!, }; } -/// Parameters to configure explaining for Model's predictions. -class GoogleCloudAiplatformV1ExplanationParameters { - /// Example-based explanations that returns the nearest neighbors from the - /// provided dataset. - GoogleCloudAiplatformV1Examples? examples; - - /// An attribution method that computes Aumann-Shapley values taking advantage - /// of the model's fully differentiable structure. - /// - /// Refer to this paper for more details: https://arxiv.org/abs/1703.01365 - GoogleCloudAiplatformV1IntegratedGradientsAttribution? - integratedGradientsAttribution; - - /// If populated, only returns attributions that have output_index contained - /// in output_indices. - /// - /// It must be an ndarray of integers, with the same shape of the output it's - /// explaining. If not populated, returns attributions for top_k indices of - /// outputs. If neither top_k nor output_indices is populated, returns the - /// argmax index of the outputs. Only applicable to Models that predict - /// multiple outputs (e,g, multi-class Models that predict multiple classes). - /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.List? outputIndices; - - /// An attribution method that approximates Shapley values for features that - /// contribute to the label being predicted. +/// A Vertex Rag source for features that need to be synced to Online Store. +class GoogleCloudAiplatformV1FeatureViewVertexRagSource { + /// The RAG corpus id corresponding to this FeatureView. /// - /// A sampling strategy is used to approximate the value rather than - /// considering all subsets of features. Refer to this paper for model - /// details: https://arxiv.org/abs/1306.4265. - GoogleCloudAiplatformV1SampledShapleyAttribution? sampledShapleyAttribution; + /// Optional. + core.String? ragCorpusId; - /// If populated, returns attributions for top K indices of outputs (defaults - /// to 1). + /// The BigQuery view/table URI that will be materialized on each manual sync + /// trigger. /// - /// Only applies to Models that predicts more than one outputs (e,g, - /// multi-class Models). When set to -1, returns explanations for all outputs. - core.int? topK; - - /// An attribution method that redistributes Integrated Gradients attribution - /// to segmented regions, taking advantage of the model's fully differentiable - /// structure. + /// The table/view is expected to have the following columns and types at + /// least: - `corpus_id` (STRING, NULLABLE/REQUIRED) - `file_id` (STRING, + /// NULLABLE/REQUIRED) - `chunk_id` (STRING, NULLABLE/REQUIRED) - + /// `chunk_data_type` (STRING, NULLABLE/REQUIRED) - `chunk_data` (STRING, + /// NULLABLE/REQUIRED) - `embeddings` (FLOAT, REPEATED) - `file_original_uri` + /// (STRING, NULLABLE/REQUIRED) /// - /// Refer to this paper for more details: https://arxiv.org/abs/1906.02825 - /// XRAI currently performs better on natural images, like a picture of a - /// house or an animal. If the images are taken in artificial environments, - /// like a lab or manufacturing line, or from diagnostic equipment, like - /// x-rays or quality-control cameras, use Integrated Gradients instead. - GoogleCloudAiplatformV1XraiAttribution? xraiAttribution; + /// Required. + core.String? uri; - GoogleCloudAiplatformV1ExplanationParameters({ - this.examples, - this.integratedGradientsAttribution, - this.outputIndices, - this.sampledShapleyAttribution, - this.topK, - this.xraiAttribution, + GoogleCloudAiplatformV1FeatureViewVertexRagSource({ + this.ragCorpusId, + this.uri, }); - GoogleCloudAiplatformV1ExplanationParameters.fromJson(core.Map json_) - : this( - examples: json_.containsKey('examples') - ? GoogleCloudAiplatformV1Examples.fromJson( - json_['examples'] as core.Map) - : null, - integratedGradientsAttribution: json_ - .containsKey('integratedGradientsAttribution') - ? GoogleCloudAiplatformV1IntegratedGradientsAttribution.fromJson( - json_['integratedGradientsAttribution'] - as core.Map) - : null, - outputIndices: json_.containsKey('outputIndices') - ? json_['outputIndices'] as core.List - : null, - sampledShapleyAttribution: - json_.containsKey('sampledShapleyAttribution') - ? GoogleCloudAiplatformV1SampledShapleyAttribution.fromJson( - json_['sampledShapleyAttribution'] - as core.Map) - : null, - topK: json_['topK'] as core.int?, - xraiAttribution: json_.containsKey('xraiAttribution') - ? GoogleCloudAiplatformV1XraiAttribution.fromJson( - json_['xraiAttribution'] - as core.Map) - : null, + GoogleCloudAiplatformV1FeatureViewVertexRagSource.fromJson(core.Map json_) + : this( + ragCorpusId: json_['ragCorpusId'] as core.String?, + uri: json_['uri'] as core.String?, ); core.Map toJson() => { - if (examples != null) 'examples': examples!, - if (integratedGradientsAttribution != null) - 'integratedGradientsAttribution': integratedGradientsAttribution!, - if (outputIndices != null) 'outputIndices': outputIndices!, - if (sampledShapleyAttribution != null) - 'sampledShapleyAttribution': sampledShapleyAttribution!, - if (topK != null) 'topK': topK!, - if (xraiAttribution != null) 'xraiAttribution': xraiAttribution!, + if (ragCorpusId != null) 'ragCorpusId': ragCorpusId!, + if (uri != null) 'uri': uri!, }; } -/// Specification of Model explanation. -class GoogleCloudAiplatformV1ExplanationSpec { - /// Metadata describing the Model's input and output for explanation. +/// Vertex AI Feature Store provides a centralized repository for organizing, +/// storing, and serving ML features. +/// +/// The Featurestore is a top-level container for your features and their +/// values. +class GoogleCloudAiplatformV1Featurestore { + /// Timestamp when this Featurestore was created. + /// + /// Output only. + core.String? createTime; + + /// Customer-managed encryption key spec for data storage. + /// + /// If set, both of the online and offline data storage will be secured by + /// this key. /// /// Optional. - GoogleCloudAiplatformV1ExplanationMetadata? metadata; + GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; - /// Parameters that configure explaining of the Model's predictions. + /// Used to perform consistent read-modify-write updates. /// - /// Required. - GoogleCloudAiplatformV1ExplanationParameters? parameters; + /// If not set, a blind "overwrite" update happens. + /// + /// Optional. + core.String? etag; - GoogleCloudAiplatformV1ExplanationSpec({ - this.metadata, - this.parameters, - }); + /// The labels with user-defined metadata to organize your Featurestore. + /// + /// Label keys and values can be no longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. See + /// https://goo.gl/xmQnxf for more information on and examples of labels. No + /// more than 64 user labels can be associated with one Featurestore(System + /// labels are excluded)." System reserved label keys are prefixed with + /// "aiplatform.googleapis.com/" and are immutable. + /// + /// Optional. + core.Map? labels; - GoogleCloudAiplatformV1ExplanationSpec.fromJson(core.Map json_) - : this( - metadata: json_.containsKey('metadata') - ? GoogleCloudAiplatformV1ExplanationMetadata.fromJson( - json_['metadata'] as core.Map) - : null, - parameters: json_.containsKey('parameters') - ? GoogleCloudAiplatformV1ExplanationParameters.fromJson( - json_['parameters'] as core.Map) - : null, - ); + /// Name of the Featurestore. + /// + /// Format: + /// `projects/{project}/locations/{location}/featurestores/{featurestore}` + /// + /// Output only. + core.String? name; - core.Map toJson() => { - if (metadata != null) 'metadata': metadata!, - if (parameters != null) 'parameters': parameters!, - }; -} + /// Config for online storage resources. + /// + /// The field should not co-exist with the field of + /// `OnlineStoreReplicationConfig`. If both of it and + /// OnlineStoreReplicationConfig are unset, the feature store will not have an + /// online store and cannot be used for online serving. + /// + /// Optional. + GoogleCloudAiplatformV1FeaturestoreOnlineServingConfig? onlineServingConfig; -/// The ExplanationSpec entries that can be overridden at online explanation -/// time. -class GoogleCloudAiplatformV1ExplanationSpecOverride { - /// The example-based explanations parameter overrides. - GoogleCloudAiplatformV1ExamplesOverride? examplesOverride; + /// TTL in days for feature values that will be stored in online serving + /// storage. + /// + /// The Feature Store online storage periodically removes obsolete feature + /// values older than `online_storage_ttl_days` since the feature generation + /// time. Note that `online_storage_ttl_days` should be less than or equal to + /// `offline_storage_ttl_days` for each EntityType under a featurestore. If + /// not set, default to 4000 days + /// + /// Optional. + core.int? onlineStorageTtlDays; - /// The metadata to be overridden. + /// Reserved for future use. /// - /// If not specified, no metadata is overridden. - GoogleCloudAiplatformV1ExplanationMetadataOverride? metadata; + /// Output only. + core.bool? satisfiesPzi; - /// The parameters to be overridden. + /// Reserved for future use. /// - /// Note that the attribution method cannot be changed. If not specified, no - /// parameter is overridden. - GoogleCloudAiplatformV1ExplanationParameters? parameters; + /// Output only. + core.bool? satisfiesPzs; - GoogleCloudAiplatformV1ExplanationSpecOverride({ - this.examplesOverride, - this.metadata, - this.parameters, + /// State of the featurestore. + /// + /// Output only. + /// Possible string values are: + /// - "STATE_UNSPECIFIED" : Default value. This value is unused. + /// - "STABLE" : State when the featurestore configuration is not being + /// updated and the fields reflect the current configuration of the + /// featurestore. The featurestore is usable in this state. + /// - "UPDATING" : The state of the featurestore configuration when it is + /// being updated. During an update, the fields reflect either the original + /// configuration or the updated configuration of the featurestore. For + /// example, `online_serving_config.fixed_node_count` can take minutes to + /// update. While the update is in progress, the featurestore is in the + /// UPDATING state, and the value of `fixed_node_count` can be the original + /// value or the updated value, depending on the progress of the operation. + /// Until the update completes, the actual number of nodes can still be the + /// original value of `fixed_node_count`. The featurestore is still usable in + /// this state. + core.String? state; + + /// Timestamp when this Featurestore was last updated. + /// + /// Output only. + core.String? updateTime; + + GoogleCloudAiplatformV1Featurestore({ + this.createTime, + this.encryptionSpec, + this.etag, + this.labels, + this.name, + this.onlineServingConfig, + this.onlineStorageTtlDays, + this.satisfiesPzi, + this.satisfiesPzs, + this.state, + this.updateTime, }); - GoogleCloudAiplatformV1ExplanationSpecOverride.fromJson(core.Map json_) + GoogleCloudAiplatformV1Featurestore.fromJson(core.Map json_) : this( - examplesOverride: json_.containsKey('examplesOverride') - ? GoogleCloudAiplatformV1ExamplesOverride.fromJson( - json_['examplesOverride'] + createTime: json_['createTime'] as core.String?, + encryptionSpec: json_.containsKey('encryptionSpec') + ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( + json_['encryptionSpec'] as core.Map) : null, - metadata: json_.containsKey('metadata') - ? GoogleCloudAiplatformV1ExplanationMetadataOverride.fromJson( - json_['metadata'] as core.Map) - : null, - parameters: json_.containsKey('parameters') - ? GoogleCloudAiplatformV1ExplanationParameters.fromJson( - json_['parameters'] as core.Map) + etag: json_['etag'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + name: json_['name'] as core.String?, + onlineServingConfig: json_.containsKey('onlineServingConfig') + ? GoogleCloudAiplatformV1FeaturestoreOnlineServingConfig.fromJson( + json_['onlineServingConfig'] + as core.Map) : null, + onlineStorageTtlDays: json_['onlineStorageTtlDays'] as core.int?, + satisfiesPzi: json_['satisfiesPzi'] as core.bool?, + satisfiesPzs: json_['satisfiesPzs'] as core.bool?, + state: json_['state'] as core.String?, + updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (examplesOverride != null) 'examplesOverride': examplesOverride!, - if (metadata != null) 'metadata': metadata!, - if (parameters != null) 'parameters': parameters!, + if (createTime != null) 'createTime': createTime!, + if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, + if (etag != null) 'etag': etag!, + if (labels != null) 'labels': labels!, + if (name != null) 'name': name!, + if (onlineServingConfig != null) + 'onlineServingConfig': onlineServingConfig!, + if (onlineStorageTtlDays != null) + 'onlineStorageTtlDays': onlineStorageTtlDays!, + if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, + if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, + if (state != null) 'state': state!, + if (updateTime != null) 'updateTime': updateTime!, }; } -/// Describes what part of the Dataset is to be exported, the destination of the -/// export and how to export. -class GoogleCloudAiplatformV1ExportDataConfig { - /// The Cloud Storage URI that points to a YAML file describing the annotation - /// schema. - /// - /// The schema is defined as an OpenAPI 3.0.2 - /// [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). - /// The schema files that can be used here are found in - /// gs://google-cloud-aiplatform/schema/dataset/annotation/, note that the - /// chosen schema must be consistent with metadata of the Dataset specified by - /// dataset_id. Only used for custom training data export use cases. Only - /// applicable to Datasets that have DataItems and Annotations. Only - /// Annotations that both match this schema and belong to DataItems not - /// ignored by the split method are used in respectively training, validation - /// or test role, depending on the role of the DataItem they are on. When used - /// in conjunction with annotations_filter, the Annotations used for training - /// are filtered by both annotations_filter and annotation_schema_uri. - core.String? annotationSchemaUri; - - /// An expression for filtering what part of the Dataset is to be exported. +/// Configuration of how features in Featurestore are monitored. +class GoogleCloudAiplatformV1FeaturestoreMonitoringConfig { + /// Threshold for categorical features of anomaly detection. /// - /// Only Annotations that match this filter will be exported. The filter - /// syntax is the same as in ListAnnotations. - core.String? annotationsFilter; - - /// Indicates the usage of the exported files. - /// Possible string values are: - /// - "EXPORT_USE_UNSPECIFIED" : Regular user export. - /// - "CUSTOM_CODE_TRAINING" : Export for custom code training. - core.String? exportUse; - - /// Split based on the provided filters for each set. - GoogleCloudAiplatformV1ExportFilterSplit? filterSplit; + /// This is shared by all types of Featurestore Monitoring for categorical + /// features (i.e. Features with type (Feature.ValueType) BOOL or STRING). + GoogleCloudAiplatformV1FeaturestoreMonitoringConfigThresholdConfig? + categoricalThresholdConfig; - /// Split based on fractions defining the size of each set. - GoogleCloudAiplatformV1ExportFractionSplit? fractionSplit; + /// The config for ImportFeatures Analysis Based Feature Monitoring. + GoogleCloudAiplatformV1FeaturestoreMonitoringConfigImportFeaturesAnalysis? + importFeaturesAnalysis; - /// The Google Cloud Storage location where the output is to be written to. + /// Threshold for numerical features of anomaly detection. /// - /// In the given directory a new directory will be created with name: - /// `export-data--` where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 - /// format. All export output will be written into that directory. Inside that - /// directory, annotations with the same schema will be grouped into sub - /// directories which are named with the corresponding annotations' schema - /// title. Inside these sub directories, a schema.yaml will be created to - /// describe the output format. - GoogleCloudAiplatformV1GcsDestination? gcsDestination; + /// This is shared by all objectives of Featurestore Monitoring for numerical + /// features (i.e. Features with type (Feature.ValueType) DOUBLE or INT64). + GoogleCloudAiplatformV1FeaturestoreMonitoringConfigThresholdConfig? + numericalThresholdConfig; - /// The ID of a SavedQuery (annotation set) under the Dataset specified by - /// dataset_id used for filtering Annotations for training. - /// - /// Only used for custom training data export use cases. Only applicable to - /// Datasets that have SavedQueries. Only Annotations that are associated with - /// this SavedQuery are used in respectively training. When used in - /// conjunction with annotations_filter, the Annotations used for training are - /// filtered by both saved_query_id and annotations_filter. Only one of - /// saved_query_id and annotation_schema_uri should be specified as both of - /// them represent the same thing: problem type. - core.String? savedQueryId; + /// The config for Snapshot Analysis Based Feature Monitoring. + GoogleCloudAiplatformV1FeaturestoreMonitoringConfigSnapshotAnalysis? + snapshotAnalysis; - GoogleCloudAiplatformV1ExportDataConfig({ - this.annotationSchemaUri, - this.annotationsFilter, - this.exportUse, - this.filterSplit, - this.fractionSplit, - this.gcsDestination, - this.savedQueryId, + GoogleCloudAiplatformV1FeaturestoreMonitoringConfig({ + this.categoricalThresholdConfig, + this.importFeaturesAnalysis, + this.numericalThresholdConfig, + this.snapshotAnalysis, }); - GoogleCloudAiplatformV1ExportDataConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1FeaturestoreMonitoringConfig.fromJson(core.Map json_) : this( - annotationSchemaUri: json_['annotationSchemaUri'] as core.String?, - annotationsFilter: json_['annotationsFilter'] as core.String?, - exportUse: json_['exportUse'] as core.String?, - filterSplit: json_.containsKey('filterSplit') - ? GoogleCloudAiplatformV1ExportFilterSplit.fromJson( - json_['filterSplit'] as core.Map) + categoricalThresholdConfig: json_ + .containsKey('categoricalThresholdConfig') + ? GoogleCloudAiplatformV1FeaturestoreMonitoringConfigThresholdConfig + .fromJson(json_['categoricalThresholdConfig'] + as core.Map) : null, - fractionSplit: json_.containsKey('fractionSplit') - ? GoogleCloudAiplatformV1ExportFractionSplit.fromJson( - json_['fractionSplit'] as core.Map) + importFeaturesAnalysis: json_.containsKey('importFeaturesAnalysis') + ? GoogleCloudAiplatformV1FeaturestoreMonitoringConfigImportFeaturesAnalysis + .fromJson(json_['importFeaturesAnalysis'] + as core.Map) : null, - gcsDestination: json_.containsKey('gcsDestination') - ? GoogleCloudAiplatformV1GcsDestination.fromJson( - json_['gcsDestination'] + numericalThresholdConfig: json_ + .containsKey('numericalThresholdConfig') + ? GoogleCloudAiplatformV1FeaturestoreMonitoringConfigThresholdConfig + .fromJson(json_['numericalThresholdConfig'] + as core.Map) + : null, + snapshotAnalysis: json_.containsKey('snapshotAnalysis') + ? GoogleCloudAiplatformV1FeaturestoreMonitoringConfigSnapshotAnalysis + .fromJson(json_['snapshotAnalysis'] as core.Map) : null, - savedQueryId: json_['savedQueryId'] as core.String?, ); core.Map toJson() => { - if (annotationSchemaUri != null) - 'annotationSchemaUri': annotationSchemaUri!, - if (annotationsFilter != null) 'annotationsFilter': annotationsFilter!, - if (exportUse != null) 'exportUse': exportUse!, - if (filterSplit != null) 'filterSplit': filterSplit!, - if (fractionSplit != null) 'fractionSplit': fractionSplit!, - if (gcsDestination != null) 'gcsDestination': gcsDestination!, - if (savedQueryId != null) 'savedQueryId': savedQueryId!, + if (categoricalThresholdConfig != null) + 'categoricalThresholdConfig': categoricalThresholdConfig!, + if (importFeaturesAnalysis != null) + 'importFeaturesAnalysis': importFeaturesAnalysis!, + if (numericalThresholdConfig != null) + 'numericalThresholdConfig': numericalThresholdConfig!, + if (snapshotAnalysis != null) 'snapshotAnalysis': snapshotAnalysis!, }; } -/// Request message for DatasetService.ExportData. -class GoogleCloudAiplatformV1ExportDataRequest { - /// The desired output location. - /// - /// Required. - GoogleCloudAiplatformV1ExportDataConfig? exportConfig; +/// Configuration of the Featurestore's ImportFeature Analysis Based Monitoring. +/// +/// This type of analysis generates statistics for values of each Feature +/// imported by every ImportFeatureValues operation. +class GoogleCloudAiplatformV1FeaturestoreMonitoringConfigImportFeaturesAnalysis { + /// The baseline used to do anomaly detection for the statistics generated by + /// import features analysis. + /// Possible string values are: + /// - "BASELINE_UNSPECIFIED" : Should not be used. + /// - "LATEST_STATS" : Choose the later one statistics generated by either + /// most recent snapshot analysis or previous import features analysis. If non + /// of them exists, skip anomaly detection and only generate a statistics. + /// - "MOST_RECENT_SNAPSHOT_STATS" : Use the statistics generated by the most + /// recent snapshot analysis if exists. + /// - "PREVIOUS_IMPORT_FEATURES_STATS" : Use the statistics generated by the + /// previous import features analysis if exists. + core.String? anomalyDetectionBaseline; - GoogleCloudAiplatformV1ExportDataRequest({ - this.exportConfig, + /// Whether to enable / disable / inherite default hebavior for import + /// features analysis. + /// Possible string values are: + /// - "STATE_UNSPECIFIED" : Should not be used. + /// - "DEFAULT" : The default behavior of whether to enable the monitoring. + /// EntityType-level config: disabled. Feature-level config: inherited from + /// the configuration of EntityType this Feature belongs to. + /// - "ENABLED" : Explicitly enables import features analysis. + /// EntityType-level config: by default enables import features analysis for + /// all Features under it. Feature-level config: enables import features + /// analysis regardless of the EntityType-level config. + /// - "DISABLED" : Explicitly disables import features analysis. + /// EntityType-level config: by default disables import features analysis for + /// all Features under it. Feature-level config: disables import features + /// analysis regardless of the EntityType-level config. + core.String? state; + + GoogleCloudAiplatformV1FeaturestoreMonitoringConfigImportFeaturesAnalysis({ + this.anomalyDetectionBaseline, + this.state, }); - GoogleCloudAiplatformV1ExportDataRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1FeaturestoreMonitoringConfigImportFeaturesAnalysis.fromJson( + core.Map json_) : this( - exportConfig: json_.containsKey('exportConfig') - ? GoogleCloudAiplatformV1ExportDataConfig.fromJson( - json_['exportConfig'] as core.Map) - : null, + anomalyDetectionBaseline: + json_['anomalyDetectionBaseline'] as core.String?, + state: json_['state'] as core.String?, ); core.Map toJson() => { - if (exportConfig != null) 'exportConfig': exportConfig!, + if (anomalyDetectionBaseline != null) + 'anomalyDetectionBaseline': anomalyDetectionBaseline!, + if (state != null) 'state': state!, }; } -/// Request message for FeaturestoreService.ExportFeatureValues. -class GoogleCloudAiplatformV1ExportFeatureValuesRequest { - /// Specifies destination location and format. +/// Configuration of the Featurestore's Snapshot Analysis Based Monitoring. +/// +/// This type of analysis generates statistics for each Feature based on a +/// snapshot of the latest feature value of each entities every +/// monitoring_interval. +class GoogleCloudAiplatformV1FeaturestoreMonitoringConfigSnapshotAnalysis { + /// The monitoring schedule for snapshot analysis. /// - /// Required. - GoogleCloudAiplatformV1FeatureValueDestination? destination; + /// For EntityType-level config: unset / disabled = true indicates disabled by + /// default for Features under it; otherwise by default enable snapshot + /// analysis monitoring with monitoring_interval for Features under it. + /// Feature-level config: disabled = true indicates disabled regardless of the + /// EntityType-level config; unset monitoring_interval indicates going with + /// EntityType-level config; otherwise run snapshot analysis monitoring with + /// monitoring_interval regardless of the EntityType-level config. Explicitly + /// Disable the snapshot analysis based monitoring. + core.bool? disabled; - /// Selects Features to export values of. + /// Configuration of the snapshot analysis based monitoring pipeline running + /// interval. /// - /// Required. - GoogleCloudAiplatformV1FeatureSelector? featureSelector; - - /// Exports all historical values of all entities of the EntityType within a - /// time range - GoogleCloudAiplatformV1ExportFeatureValuesRequestFullExport? fullExport; - - /// Per-Feature export settings. - core.List? settings; + /// The value indicates number of days. + core.int? monitoringIntervalDays; - /// Exports the latest Feature values of all entities of the EntityType within - /// a time range. - GoogleCloudAiplatformV1ExportFeatureValuesRequestSnapshotExport? - snapshotExport; + /// Customized export features time window for snapshot analysis. + /// + /// Unit is one day. Default value is 3 weeks. Minimum value is 1 day. Maximum + /// value is 4000 days. + core.int? stalenessDays; - GoogleCloudAiplatformV1ExportFeatureValuesRequest({ - this.destination, - this.featureSelector, - this.fullExport, - this.settings, - this.snapshotExport, + GoogleCloudAiplatformV1FeaturestoreMonitoringConfigSnapshotAnalysis({ + this.disabled, + this.monitoringIntervalDays, + this.stalenessDays, }); - GoogleCloudAiplatformV1ExportFeatureValuesRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1FeaturestoreMonitoringConfigSnapshotAnalysis.fromJson( + core.Map json_) : this( - destination: json_.containsKey('destination') - ? GoogleCloudAiplatformV1FeatureValueDestination.fromJson( - json_['destination'] as core.Map) - : null, - featureSelector: json_.containsKey('featureSelector') - ? GoogleCloudAiplatformV1FeatureSelector.fromJson( - json_['featureSelector'] - as core.Map) - : null, - fullExport: json_.containsKey('fullExport') - ? GoogleCloudAiplatformV1ExportFeatureValuesRequestFullExport - .fromJson(json_['fullExport'] - as core.Map) - : null, - settings: (json_['settings'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1DestinationFeatureSetting.fromJson( - value as core.Map)) - .toList(), - snapshotExport: json_.containsKey('snapshotExport') - ? GoogleCloudAiplatformV1ExportFeatureValuesRequestSnapshotExport - .fromJson(json_['snapshotExport'] - as core.Map) - : null, + disabled: json_['disabled'] as core.bool?, + monitoringIntervalDays: json_['monitoringIntervalDays'] as core.int?, + stalenessDays: json_['stalenessDays'] as core.int?, ); core.Map toJson() => { - if (destination != null) 'destination': destination!, - if (featureSelector != null) 'featureSelector': featureSelector!, - if (fullExport != null) 'fullExport': fullExport!, - if (settings != null) 'settings': settings!, - if (snapshotExport != null) 'snapshotExport': snapshotExport!, + if (disabled != null) 'disabled': disabled!, + if (monitoringIntervalDays != null) + 'monitoringIntervalDays': monitoringIntervalDays!, + if (stalenessDays != null) 'stalenessDays': stalenessDays!, }; } -/// Describes exporting all historical Feature values of all entities of the -/// EntityType between \[start_time, end_time\]. -class GoogleCloudAiplatformV1ExportFeatureValuesRequestFullExport { - /// Exports Feature values as of this timestamp. - /// - /// If not set, retrieve values as of now. Timestamp, if present, must not - /// have higher than millisecond precision. - core.String? endTime; - - /// Excludes Feature values with feature generation timestamp before this - /// timestamp. +/// The config for Featurestore Monitoring threshold. +class GoogleCloudAiplatformV1FeaturestoreMonitoringConfigThresholdConfig { + /// Specify a threshold value that can trigger the alert. /// - /// If not set, retrieve oldest values kept in Feature Store. Timestamp, if - /// present, must not have higher than millisecond precision. - core.String? startTime; + /// 1. For categorical feature, the distribution distance is calculated by + /// L-inifinity norm. 2. For numerical feature, the distribution distance is + /// calculated by Jensen–Shannon divergence. Each feature must have a non-zero + /// threshold if they need to be monitored. Otherwise no alert will be + /// triggered for that feature. + core.double? value; - GoogleCloudAiplatformV1ExportFeatureValuesRequestFullExport({ - this.endTime, - this.startTime, + GoogleCloudAiplatformV1FeaturestoreMonitoringConfigThresholdConfig({ + this.value, }); - GoogleCloudAiplatformV1ExportFeatureValuesRequestFullExport.fromJson( + GoogleCloudAiplatformV1FeaturestoreMonitoringConfigThresholdConfig.fromJson( core.Map json_) : this( - endTime: json_['endTime'] as core.String?, - startTime: json_['startTime'] as core.String?, + value: (json_['value'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (endTime != null) 'endTime': endTime!, - if (startTime != null) 'startTime': startTime!, + if (value != null) 'value': value!, }; } -/// Describes exporting the latest Feature values of all entities of the -/// EntityType between \[start_time, snapshot_time\]. -class GoogleCloudAiplatformV1ExportFeatureValuesRequestSnapshotExport { - /// Exports Feature values as of this timestamp. +/// OnlineServingConfig specifies the details for provisioning online serving +/// resources. +class GoogleCloudAiplatformV1FeaturestoreOnlineServingConfig { + /// The number of nodes for the online store. /// - /// If not set, retrieve values as of now. Timestamp, if present, must not - /// have higher than millisecond precision. - core.String? snapshotTime; + /// The number of nodes doesn't scale automatically, but you can manually + /// update the number of nodes. If set to 0, the featurestore will not have an + /// online store and cannot be used for online serving. + core.int? fixedNodeCount; - /// Excludes Feature values with feature generation timestamp before this - /// timestamp. + /// Online serving scaling configuration. /// - /// If not set, retrieve oldest values kept in Feature Store. Timestamp, if - /// present, must not have higher than millisecond precision. - core.String? startTime; + /// Only one of `fixed_node_count` and `scaling` can be set. Setting one will + /// reset the other. + GoogleCloudAiplatformV1FeaturestoreOnlineServingConfigScaling? scaling; - GoogleCloudAiplatformV1ExportFeatureValuesRequestSnapshotExport({ - this.snapshotTime, - this.startTime, + GoogleCloudAiplatformV1FeaturestoreOnlineServingConfig({ + this.fixedNodeCount, + this.scaling, }); - GoogleCloudAiplatformV1ExportFeatureValuesRequestSnapshotExport.fromJson( + GoogleCloudAiplatformV1FeaturestoreOnlineServingConfig.fromJson( core.Map json_) : this( - snapshotTime: json_['snapshotTime'] as core.String?, - startTime: json_['startTime'] as core.String?, + fixedNodeCount: json_['fixedNodeCount'] as core.int?, + scaling: json_.containsKey('scaling') + ? GoogleCloudAiplatformV1FeaturestoreOnlineServingConfigScaling + .fromJson( + json_['scaling'] as core.Map) + : null, ); core.Map toJson() => { - if (snapshotTime != null) 'snapshotTime': snapshotTime!, - if (startTime != null) 'startTime': startTime!, + if (fixedNodeCount != null) 'fixedNodeCount': fixedNodeCount!, + if (scaling != null) 'scaling': scaling!, }; } -/// Assigns input data to training, validation, and test sets based on the given -/// filters, data pieces not matched by any filter are ignored. +/// Online serving scaling configuration. /// -/// Currently only supported for Datasets containing DataItems. If any of the -/// filters in this message are to match nothing, then they can be set as '-' -/// (the minus sign). Supported only for unstructured Datasets. -typedef GoogleCloudAiplatformV1ExportFilterSplit = $FilterSplit; +/// If min_node_count and max_node_count are set to the same value, the cluster +/// will be configured with the fixed number of node (no auto-scaling). +class GoogleCloudAiplatformV1FeaturestoreOnlineServingConfigScaling { + /// The cpu utilization that the Autoscaler should be trying to achieve. + /// + /// This number is on a scale from 0 (no utilization) to 100 (total + /// utilization), and is limited between 10 and 80. When a cluster's CPU + /// utilization exceeds the target that you have set, Bigtable immediately + /// adds nodes to the cluster. When CPU utilization is substantially lower + /// than the target, Bigtable removes nodes. If not set or set to 0, default + /// to 50. + /// + /// Optional. + core.int? cpuUtilizationTarget; -/// Assigns the input data to training, validation, and test sets as per the -/// given fractions. -/// -/// Any of `training_fraction`, `validation_fraction` and `test_fraction` may -/// optionally be provided, they must sum to up to 1. If the provided ones sum -/// to less than 1, the remainder is assigned to sets as decided by Vertex AI. -/// If none of the fractions are set, by default roughly 80% of data is used for -/// training, 10% for validation, and 10% for test. -typedef GoogleCloudAiplatformV1ExportFractionSplit = $FractionSplit; + /// The maximum number of nodes to scale up to. + /// + /// Must be greater than min_node_count, and less than or equal to 10 times of + /// 'min_node_count'. + core.int? maxNodeCount; -/// Request message for ModelService.ExportModel. -class GoogleCloudAiplatformV1ExportModelRequest { - /// The desired output location and configuration. + /// The minimum number of nodes to scale down to. + /// + /// Must be greater than or equal to 1. /// /// Required. - GoogleCloudAiplatformV1ExportModelRequestOutputConfig? outputConfig; + core.int? minNodeCount; - GoogleCloudAiplatformV1ExportModelRequest({ - this.outputConfig, + GoogleCloudAiplatformV1FeaturestoreOnlineServingConfigScaling({ + this.cpuUtilizationTarget, + this.maxNodeCount, + this.minNodeCount, }); - GoogleCloudAiplatformV1ExportModelRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1FeaturestoreOnlineServingConfigScaling.fromJson( + core.Map json_) : this( - outputConfig: json_.containsKey('outputConfig') - ? GoogleCloudAiplatformV1ExportModelRequestOutputConfig.fromJson( - json_['outputConfig'] as core.Map) - : null, + cpuUtilizationTarget: json_['cpuUtilizationTarget'] as core.int?, + maxNodeCount: json_['maxNodeCount'] as core.int?, + minNodeCount: json_['minNodeCount'] as core.int?, ); core.Map toJson() => { - if (outputConfig != null) 'outputConfig': outputConfig!, + if (cpuUtilizationTarget != null) + 'cpuUtilizationTarget': cpuUtilizationTarget!, + if (maxNodeCount != null) 'maxNodeCount': maxNodeCount!, + if (minNodeCount != null) 'minNodeCount': minNodeCount!, }; } -/// Output configuration for the Model export. -class GoogleCloudAiplatformV1ExportModelRequestOutputConfig { - /// The Cloud Storage location where the Model artifact is to be written to. +/// Request message for FeatureOnlineStoreService.FetchFeatureValues. +/// +/// All the features under the requested feature view will be returned. +class GoogleCloudAiplatformV1FetchFeatureValuesRequest { + /// Response data format. /// - /// Under the directory given as the destination a new one with name - /// "`model-export--`", where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ - /// ISO-8601 format, will be created. Inside, the Model and any of its - /// supporting files will be written. This field should only be set when the - /// `exportableContent` field of the \[Model.supported_export_formats\] object - /// contains `ARTIFACT`. - GoogleCloudAiplatformV1GcsDestination? artifactDestination; - - /// The ID of the format in which the Model must be exported. + /// If not set, FeatureViewDataFormat.KEY_VALUE will be used. /// - /// Each Model lists the export formats it supports. If no value is provided - /// here, then the first from the list of the Model's supported formats is - /// used by default. - core.String? exportFormatId; + /// Optional. + /// Possible string values are: + /// - "FEATURE_VIEW_DATA_FORMAT_UNSPECIFIED" : Not set. Will be treated as the + /// KeyValue format. + /// - "KEY_VALUE" : Return response data in key-value format. + /// - "PROTO_STRUCT" : Return response data in proto Struct format. + core.String? dataFormat; - /// The Google Container Registry or Artifact Registry uri where the Model - /// container image will be copied to. + /// The request key to fetch feature values for. /// - /// This field should only be set when the `exportableContent` field of the - /// \[Model.supported_export_formats\] object contains `IMAGE`. - GoogleCloudAiplatformV1ContainerRegistryDestination? imageDestination; + /// Optional. + GoogleCloudAiplatformV1FeatureViewDataKey? dataKey; - GoogleCloudAiplatformV1ExportModelRequestOutputConfig({ - this.artifactDestination, - this.exportFormatId, - this.imageDestination, + GoogleCloudAiplatformV1FetchFeatureValuesRequest({ + this.dataFormat, + this.dataKey, }); - GoogleCloudAiplatformV1ExportModelRequestOutputConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1FetchFeatureValuesRequest.fromJson(core.Map json_) : this( - artifactDestination: json_.containsKey('artifactDestination') - ? GoogleCloudAiplatformV1GcsDestination.fromJson( - json_['artifactDestination'] - as core.Map) - : null, - exportFormatId: json_['exportFormatId'] as core.String?, - imageDestination: json_.containsKey('imageDestination') - ? GoogleCloudAiplatformV1ContainerRegistryDestination.fromJson( - json_['imageDestination'] - as core.Map) + dataFormat: json_['dataFormat'] as core.String?, + dataKey: json_.containsKey('dataKey') + ? GoogleCloudAiplatformV1FeatureViewDataKey.fromJson( + json_['dataKey'] as core.Map) : null, ); core.Map toJson() => { - if (artifactDestination != null) - 'artifactDestination': artifactDestination!, - if (exportFormatId != null) 'exportFormatId': exportFormatId!, - if (imageDestination != null) 'imageDestination': imageDestination!, + if (dataFormat != null) 'dataFormat': dataFormat!, + if (dataKey != null) 'dataKey': dataKey!, }; } -/// Request message for TensorboardService.ExportTensorboardTimeSeriesData. -class GoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataRequest { - /// Exports the TensorboardTimeSeries' data that match the filter expression. - core.String? filter; - - /// Field to use to sort the TensorboardTimeSeries' data. +/// Response message for FeatureOnlineStoreService.FetchFeatureValues +class GoogleCloudAiplatformV1FetchFeatureValuesResponse { + /// The data key associated with this response. /// - /// By default, TensorboardTimeSeries' data is returned in a pseudo random - /// order. - core.String? orderBy; + /// Will only be populated for + /// FeatureOnlineStoreService.StreamingFetchFeatureValues RPCs. + GoogleCloudAiplatformV1FeatureViewDataKey? dataKey; - /// The maximum number of data points to return per page. - /// - /// The default page_size is 1000. Values must be between 1 and 10000. Values - /// above 10000 are coerced to 10000. - core.int? pageSize; + /// Feature values in KeyValue format. + GoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairList? + keyValues; - /// A page token, received from a previous ExportTensorboardTimeSeriesData - /// call. + /// Feature values in proto Struct format. /// - /// Provide this to retrieve the subsequent page. When paginating, all other - /// parameters provided to ExportTensorboardTimeSeriesData must match the call - /// that provided the page token. - core.String? pageToken; + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Map? protoStruct; - GoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataRequest({ - this.filter, - this.orderBy, - this.pageSize, - this.pageToken, + GoogleCloudAiplatformV1FetchFeatureValuesResponse({ + this.dataKey, + this.keyValues, + this.protoStruct, }); - GoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataRequest.fromJson( - core.Map json_) + GoogleCloudAiplatformV1FetchFeatureValuesResponse.fromJson(core.Map json_) : this( - filter: json_['filter'] as core.String?, - orderBy: json_['orderBy'] as core.String?, - pageSize: json_['pageSize'] as core.int?, - pageToken: json_['pageToken'] as core.String?, + dataKey: json_.containsKey('dataKey') + ? GoogleCloudAiplatformV1FeatureViewDataKey.fromJson( + json_['dataKey'] as core.Map) + : null, + keyValues: json_.containsKey('keyValues') + ? GoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairList + .fromJson( + json_['keyValues'] as core.Map) + : null, + protoStruct: json_.containsKey('protoStruct') + ? json_['protoStruct'] as core.Map + : null, ); core.Map toJson() => { - if (filter != null) 'filter': filter!, - if (orderBy != null) 'orderBy': orderBy!, - if (pageSize != null) 'pageSize': pageSize!, - if (pageToken != null) 'pageToken': pageToken!, + if (dataKey != null) 'dataKey': dataKey!, + if (keyValues != null) 'keyValues': keyValues!, + if (protoStruct != null) 'protoStruct': protoStruct!, }; } -/// Response message for TensorboardService.ExportTensorboardTimeSeriesData. -class GoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataResponse { - /// A token, which can be sent as page_token to retrieve the next page. - /// - /// If this field is omitted, there are no subsequent pages. - core.String? nextPageToken; - - /// The returned time series data points. - core.List? timeSeriesDataPoints; +/// Response structure in the format of key (feature name) and (feature) value +/// pair. +class GoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairList { + /// List of feature names and values. + core.List< + GoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairListFeatureNameValuePair>? + features; - GoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataResponse({ - this.nextPageToken, - this.timeSeriesDataPoints, + GoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairList({ + this.features, }); - GoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataResponse.fromJson( + GoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairList.fromJson( core.Map json_) : this( - nextPageToken: json_['nextPageToken'] as core.String?, - timeSeriesDataPoints: (json_['timeSeriesDataPoints'] as core.List?) + features: (json_['features'] as core.List?) ?.map((value) => - GoogleCloudAiplatformV1TimeSeriesDataPoint.fromJson( - value as core.Map)) + GoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairListFeatureNameValuePair + .fromJson(value as core.Map)) .toList(), ); core.Map toJson() => { - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - if (timeSeriesDataPoints != null) - 'timeSeriesDataPoints': timeSeriesDataPoints!, + if (features != null) 'features': features!, }; } -/// Feature Metadata information. -/// -/// For example, color is a feature that describes an apple. -class GoogleCloudAiplatformV1Feature { - /// Only applicable for Vertex AI Feature Store (Legacy). - /// - /// Timestamp when this EntityType was created. - /// - /// Output only. - core.String? createTime; +/// Feature name & value pair. +class GoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairListFeatureNameValuePair { + /// Feature short name. + core.String? name; - /// Description of the Feature. - core.String? description; + /// Feature value. + GoogleCloudAiplatformV1FeatureValue? value; - /// Only applicable for Vertex AI Feature Store (Legacy). - /// - /// If not set, use the monitoring_config defined for the EntityType this - /// Feature belongs to. Only Features with type (Feature.ValueType) BOOL, - /// STRING, DOUBLE or INT64 can enable monitoring. If set to true, all types - /// of data monitoring are disabled despite the config on EntityType. - /// - /// Optional. - core.bool? disableMonitoring; + GoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairListFeatureNameValuePair({ + this.name, + this.value, + }); - /// Used to perform a consistent read-modify-write updates. - /// - /// If not set, a blind "overwrite" update happens. - core.String? etag; + GoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairListFeatureNameValuePair.fromJson( + core.Map json_) + : this( + name: json_['name'] as core.String?, + value: json_.containsKey('value') + ? GoogleCloudAiplatformV1FeatureValue.fromJson( + json_['value'] as core.Map) + : null, + ); - /// The labels with user-defined metadata to organize your Features. - /// - /// Label keys and values can be no longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. See - /// https://goo.gl/xmQnxf for more information on and examples of labels. No - /// more than 64 user labels can be associated with one Feature (System labels - /// are excluded)." System reserved label keys are prefixed with - /// "aiplatform.googleapis.com/" and are immutable. - /// - /// Optional. - core.Map? labels; + core.Map toJson() => { + if (name != null) 'name': name!, + if (value != null) 'value': value!, + }; +} - /// Only applicable for Vertex AI Feature Store (Legacy). - /// - /// The list of historical stats and anomalies with specified objectives. +/// Request message for PredictionService.FetchPredictOperation. +class GoogleCloudAiplatformV1FetchPredictOperationRequest { + /// The server-assigned name for the operation. /// - /// Output only. - core.List? - monitoringStatsAnomalies; + /// Required. + core.String? operationName; - /// Name of the Feature. - /// - /// Format: - /// `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}/features/{feature}` - /// `projects/{project}/locations/{location}/featureGroups/{feature_group}/features/{feature}` - /// The last part feature is assigned by the client. The feature can be up to - /// 64 characters long and can consist only of ASCII Latin letters A-Z and - /// a-z, underscore(_), and ASCII digits 0-9 starting with a letter. The value - /// will be unique given an entity type. - /// - /// Immutable. - core.String? name; + GoogleCloudAiplatformV1FetchPredictOperationRequest({ + this.operationName, + }); - /// Entity responsible for maintaining this feature. - /// - /// Can be comma separated list of email addresses or URIs. - core.String? pointOfContact; + GoogleCloudAiplatformV1FetchPredictOperationRequest.fromJson(core.Map json_) + : this( + operationName: json_['operationName'] as core.String?, + ); - /// Only applicable for Vertex AI Feature Store (Legacy). - /// - /// Timestamp when this EntityType was most recently updated. - /// - /// Output only. - core.String? updateTime; + core.Map toJson() => { + if (operationName != null) 'operationName': operationName!, + }; +} - /// Only applicable for Vertex AI Feature Store (Legacy). - /// - /// Type of Feature value. +/// URI based data. +class GoogleCloudAiplatformV1FileData { + /// URI. /// - /// Immutable. - /// Possible string values are: - /// - "VALUE_TYPE_UNSPECIFIED" : The value type is unspecified. - /// - "BOOL" : Used for Feature that is a boolean. - /// - "BOOL_ARRAY" : Used for Feature that is a list of boolean. - /// - "DOUBLE" : Used for Feature that is double. - /// - "DOUBLE_ARRAY" : Used for Feature that is a list of double. - /// - "INT64" : Used for Feature that is INT64. - /// - "INT64_ARRAY" : Used for Feature that is a list of INT64. - /// - "STRING" : Used for Feature that is string. - /// - "STRING_ARRAY" : Used for Feature that is a list of String. - /// - "BYTES" : Used for Feature that is bytes. - /// - "STRUCT" : Used for Feature that is struct. - core.String? valueType; + /// Required. + core.String? fileUri; - /// Only applicable for Vertex AI Feature Store. + /// The IANA standard MIME type of the source data. /// - /// The name of the BigQuery Table/View column hosting data for this version. - /// If no value is provided, will use feature_id. - core.String? versionColumnName; + /// Required. + core.String? mimeType; - GoogleCloudAiplatformV1Feature({ - this.createTime, - this.description, - this.disableMonitoring, - this.etag, - this.labels, - this.monitoringStatsAnomalies, - this.name, - this.pointOfContact, - this.updateTime, - this.valueType, - this.versionColumnName, + GoogleCloudAiplatformV1FileData({ + this.fileUri, + this.mimeType, }); - GoogleCloudAiplatformV1Feature.fromJson(core.Map json_) + GoogleCloudAiplatformV1FileData.fromJson(core.Map json_) : this( - createTime: json_['createTime'] as core.String?, - description: json_['description'] as core.String?, - disableMonitoring: json_['disableMonitoring'] as core.bool?, - etag: json_['etag'] as core.String?, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - monitoringStatsAnomalies: (json_['monitoringStatsAnomalies'] - as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1FeatureMonitoringStatsAnomaly.fromJson( - value as core.Map)) - .toList(), - name: json_['name'] as core.String?, - pointOfContact: json_['pointOfContact'] as core.String?, - updateTime: json_['updateTime'] as core.String?, - valueType: json_['valueType'] as core.String?, - versionColumnName: json_['versionColumnName'] as core.String?, + fileUri: json_['fileUri'] as core.String?, + mimeType: json_['mimeType'] as core.String?, ); core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (description != null) 'description': description!, - if (disableMonitoring != null) 'disableMonitoring': disableMonitoring!, - if (etag != null) 'etag': etag!, - if (labels != null) 'labels': labels!, - if (monitoringStatsAnomalies != null) - 'monitoringStatsAnomalies': monitoringStatsAnomalies!, - if (name != null) 'name': name!, - if (pointOfContact != null) 'pointOfContact': pointOfContact!, - if (updateTime != null) 'updateTime': updateTime!, - if (valueType != null) 'valueType': valueType!, - if (versionColumnName != null) 'versionColumnName': versionColumnName!, + if (fileUri != null) 'fileUri': fileUri!, + if (mimeType != null) 'mimeType': mimeType!, }; } -/// Vertex AI Feature Group. -class GoogleCloudAiplatformV1FeatureGroup { - /// Indicates that features for this group come from BigQuery Table/View. +/// RagFile status. +class GoogleCloudAiplatformV1FileStatus { + /// Only when the `state` field is ERROR. /// - /// By default treats the source as a sparse time series source. The BigQuery - /// source table or view must have at least one entity ID column and a column - /// named `feature_timestamp`. - GoogleCloudAiplatformV1FeatureGroupBigQuery? bigQuery; + /// Output only. + core.String? errorStatus; - /// Timestamp when this FeatureGroup was created. + /// RagFile state. /// /// Output only. - core.String? createTime; + /// Possible string values are: + /// - "STATE_UNSPECIFIED" : RagFile state is unspecified. + /// - "ACTIVE" : RagFile resource has been created and indexed successfully. + /// - "ERROR" : RagFile resource is in a problematic state. See + /// `error_message` field for details. + core.String? state; - /// Description of the FeatureGroup. - /// - /// Optional. - core.String? description; + GoogleCloudAiplatformV1FileStatus({ + this.errorStatus, + this.state, + }); - /// Used to perform consistent read-modify-write updates. - /// - /// If not set, a blind "overwrite" update happens. - /// - /// Optional. - core.String? etag; + GoogleCloudAiplatformV1FileStatus.fromJson(core.Map json_) + : this( + errorStatus: json_['errorStatus'] as core.String?, + state: json_['state'] as core.String?, + ); - /// The labels with user-defined metadata to organize your FeatureGroup. - /// - /// Label keys and values can be no longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. See - /// https://goo.gl/xmQnxf for more information on and examples of labels. No - /// more than 64 user labels can be associated with one FeatureGroup(System - /// labels are excluded)." System reserved label keys are prefixed with - /// "aiplatform.googleapis.com/" and are immutable. - /// - /// Optional. - core.Map? labels; + core.Map toJson() => { + if (errorStatus != null) 'errorStatus': errorStatus!, + if (state != null) 'state': state!, + }; +} - /// Identifier. +/// Assigns input data to training, validation, and test sets based on the given +/// filters, data pieces not matched by any filter are ignored. +/// +/// Currently only supported for Datasets containing DataItems. If any of the +/// filters in this message are to match nothing, then they can be set as '-' +/// (the minus sign). Supported only for unstructured Datasets. +typedef GoogleCloudAiplatformV1FilterSplit = $FilterSplit; + +/// The request message for MatchService.FindNeighbors. +class GoogleCloudAiplatformV1FindNeighborsRequest { + /// The ID of the DeployedIndex that will serve the request. /// - /// Name of the FeatureGroup. Format: - /// `projects/{project}/locations/{location}/featureGroups/{featureGroup}` - core.String? name; + /// This request is sent to a specific IndexEndpoint, as per the + /// IndexEndpoint.network. That IndexEndpoint also has + /// IndexEndpoint.deployed_indexes, and each such index has a DeployedIndex.id + /// field. The value of the field below must equal one of the DeployedIndex.id + /// fields of the IndexEndpoint that is being called for this request. + core.String? deployedIndexId; - /// Timestamp when this FeatureGroup was last updated. + /// The list of queries. + core.List? queries; + + /// If set to true, the full datapoints (including all vector values and + /// restricts) of the nearest neighbors are returned. /// - /// Output only. - core.String? updateTime; + /// Note that returning full datapoint will significantly increase the latency + /// and cost of the query. + core.bool? returnFullDatapoint; - GoogleCloudAiplatformV1FeatureGroup({ - this.bigQuery, - this.createTime, - this.description, - this.etag, - this.labels, - this.name, - this.updateTime, + GoogleCloudAiplatformV1FindNeighborsRequest({ + this.deployedIndexId, + this.queries, + this.returnFullDatapoint, }); - GoogleCloudAiplatformV1FeatureGroup.fromJson(core.Map json_) + GoogleCloudAiplatformV1FindNeighborsRequest.fromJson(core.Map json_) : this( - bigQuery: json_.containsKey('bigQuery') - ? GoogleCloudAiplatformV1FeatureGroupBigQuery.fromJson( - json_['bigQuery'] as core.Map) - : null, - createTime: json_['createTime'] as core.String?, - description: json_['description'] as core.String?, - etag: json_['etag'] as core.String?, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - name: json_['name'] as core.String?, - updateTime: json_['updateTime'] as core.String?, + deployedIndexId: json_['deployedIndexId'] as core.String?, + queries: (json_['queries'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1FindNeighborsRequestQuery.fromJson( + value as core.Map)) + .toList(), + returnFullDatapoint: json_['returnFullDatapoint'] as core.bool?, ); core.Map toJson() => { - if (bigQuery != null) 'bigQuery': bigQuery!, - if (createTime != null) 'createTime': createTime!, - if (description != null) 'description': description!, - if (etag != null) 'etag': etag!, - if (labels != null) 'labels': labels!, - if (name != null) 'name': name!, - if (updateTime != null) 'updateTime': updateTime!, + if (deployedIndexId != null) 'deployedIndexId': deployedIndexId!, + if (queries != null) 'queries': queries!, + if (returnFullDatapoint != null) + 'returnFullDatapoint': returnFullDatapoint!, }; } -/// Input source type for BigQuery Tables and Views. -class GoogleCloudAiplatformV1FeatureGroupBigQuery { - /// The BigQuery source URI that points to either a BigQuery Table or View. +/// A query to find a number of the nearest neighbors (most similar vectors) of +/// a vector. +class GoogleCloudAiplatformV1FindNeighborsRequestQuery { + /// The number of neighbors to find via approximate search before exact + /// reordering is performed. /// - /// Required. Immutable. - GoogleCloudAiplatformV1BigQuerySource? bigQuerySource; + /// If not set, the default value from scam config is used; if set, this value + /// must be \> 0. + core.int? approximateNeighborCount; - /// If set, all feature values will be fetched from a single row per unique - /// entityId including nulls. - /// - /// If not set, will collapse all rows for each unique entityId into a singe - /// row with any non-null values if present, if no non-null values are present - /// will sync null. ex: If source has schema `(entity_id, feature_timestamp, - /// f0, f1)` and the following rows: `(e1, 2020-01-01T10:00:00.123Z, 10, 15)` - /// `(e1, 2020-02-01T10:00:00.123Z, 20, null)` If dense is set, `(e1, 20, - /// null)` is synced to online stores. If dense is not set, `(e1, 20, 15)` is - /// synced to online stores. + /// The datapoint/vector whose nearest neighbors should be searched for. /// - /// Optional. - core.bool? dense; + /// Required. + GoogleCloudAiplatformV1IndexDatapoint? datapoint; - /// Columns to construct entity_id / row keys. - /// - /// If not provided defaults to `entity_id`. + /// The fraction of the number of leaves to search, set at query time allows + /// user to tune search performance. /// - /// Optional. - core.List? entityIdColumns; + /// This value increase result in both search accuracy and latency increase. + /// The value should be between 0.0 and 1.0. If not set or set to 0.0, query + /// uses the default value specified in + /// NearestNeighborSearchConfig.TreeAHConfig.fraction_leaf_nodes_to_search. + core.double? fractionLeafNodesToSearchOverride; - /// Set if the data source is not a time-series. + /// The number of nearest neighbors to be retrieved from database for each + /// query. /// - /// Optional. - core.bool? staticDataSource; + /// If not set, will use the default from the service configuration + /// (https://cloud.google.com/vertex-ai/docs/matching-engine/configuring-indexes#nearest-neighbor-search-config). + core.int? neighborCount; - /// If the source is a time-series source, this can be set to control how - /// downstream sources (ex: FeatureView ) will treat time-series sources. + /// Crowding is a constraint on a neighbor list produced by nearest neighbor + /// search requiring that no more than some value k' of the k neighbors + /// returned have the same value of crowding_attribute. /// - /// If not set, will treat the source as a time-series source with - /// `feature_timestamp` as timestamp column and no scan boundary. + /// It's used for improving result diversity. This field is the maximum number + /// of matches with the same crowding tag. + core.int? perCrowdingAttributeNeighborCount; + + /// Represents RRF algorithm that combines search results. /// /// Optional. - GoogleCloudAiplatformV1FeatureGroupBigQueryTimeSeries? timeSeries; + GoogleCloudAiplatformV1FindNeighborsRequestQueryRRF? rrf; - GoogleCloudAiplatformV1FeatureGroupBigQuery({ - this.bigQuerySource, - this.dense, - this.entityIdColumns, - this.staticDataSource, - this.timeSeries, + GoogleCloudAiplatformV1FindNeighborsRequestQuery({ + this.approximateNeighborCount, + this.datapoint, + this.fractionLeafNodesToSearchOverride, + this.neighborCount, + this.perCrowdingAttributeNeighborCount, + this.rrf, }); - GoogleCloudAiplatformV1FeatureGroupBigQuery.fromJson(core.Map json_) + GoogleCloudAiplatformV1FindNeighborsRequestQuery.fromJson(core.Map json_) : this( - bigQuerySource: json_.containsKey('bigQuerySource') - ? GoogleCloudAiplatformV1BigQuerySource.fromJson( - json_['bigQuerySource'] - as core.Map) + approximateNeighborCount: + json_['approximateNeighborCount'] as core.int?, + datapoint: json_.containsKey('datapoint') + ? GoogleCloudAiplatformV1IndexDatapoint.fromJson( + json_['datapoint'] as core.Map) : null, - dense: json_['dense'] as core.bool?, - entityIdColumns: (json_['entityIdColumns'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - staticDataSource: json_['staticDataSource'] as core.bool?, - timeSeries: json_.containsKey('timeSeries') - ? GoogleCloudAiplatformV1FeatureGroupBigQueryTimeSeries.fromJson( - json_['timeSeries'] as core.Map) + fractionLeafNodesToSearchOverride: + (json_['fractionLeafNodesToSearchOverride'] as core.num?) + ?.toDouble(), + neighborCount: json_['neighborCount'] as core.int?, + perCrowdingAttributeNeighborCount: + json_['perCrowdingAttributeNeighborCount'] as core.int?, + rrf: json_.containsKey('rrf') + ? GoogleCloudAiplatformV1FindNeighborsRequestQueryRRF.fromJson( + json_['rrf'] as core.Map) : null, ); core.Map toJson() => { - if (bigQuerySource != null) 'bigQuerySource': bigQuerySource!, - if (dense != null) 'dense': dense!, - if (entityIdColumns != null) 'entityIdColumns': entityIdColumns!, - if (staticDataSource != null) 'staticDataSource': staticDataSource!, - if (timeSeries != null) 'timeSeries': timeSeries!, + if (approximateNeighborCount != null) + 'approximateNeighborCount': approximateNeighborCount!, + if (datapoint != null) 'datapoint': datapoint!, + if (fractionLeafNodesToSearchOverride != null) + 'fractionLeafNodesToSearchOverride': + fractionLeafNodesToSearchOverride!, + if (neighborCount != null) 'neighborCount': neighborCount!, + if (perCrowdingAttributeNeighborCount != null) + 'perCrowdingAttributeNeighborCount': + perCrowdingAttributeNeighborCount!, + if (rrf != null) 'rrf': rrf!, }; } -class GoogleCloudAiplatformV1FeatureGroupBigQueryTimeSeries { - /// Column hosting timestamp values for a time-series source. +/// Parameters for RRF algorithm that combines search results. +class GoogleCloudAiplatformV1FindNeighborsRequestQueryRRF { + /// Users can provide an alpha value to give more weight to dense vs sparse + /// results. /// - /// Will be used to determine the latest `feature_values` for each entity. - /// Optional. If not provided, column named `feature_timestamp` of type - /// `TIMESTAMP` will be used. + /// For example, if the alpha is 0, we only return sparse and if the alpha is + /// 1, we only return dense. /// - /// Optional. - core.String? timestampColumn; + /// Required. + core.double? alpha; - GoogleCloudAiplatformV1FeatureGroupBigQueryTimeSeries({ - this.timestampColumn, + GoogleCloudAiplatformV1FindNeighborsRequestQueryRRF({ + this.alpha, }); - GoogleCloudAiplatformV1FeatureGroupBigQueryTimeSeries.fromJson(core.Map json_) + GoogleCloudAiplatformV1FindNeighborsRequestQueryRRF.fromJson(core.Map json_) : this( - timestampColumn: json_['timestampColumn'] as core.String?, + alpha: (json_['alpha'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (timestampColumn != null) 'timestampColumn': timestampColumn!, + if (alpha != null) 'alpha': alpha!, }; } -/// A list of historical SnapshotAnalysis or ImportFeaturesAnalysis stats -/// requested by user, sorted by FeatureStatsAnomaly.start_time descending. -class GoogleCloudAiplatformV1FeatureMonitoringStatsAnomaly { - /// The stats and anomalies generated at specific timestamp. - /// - /// Output only. - GoogleCloudAiplatformV1FeatureStatsAnomaly? featureStatsAnomaly; - - /// The objective for each stats. - /// - /// Output only. - /// Possible string values are: - /// - "OBJECTIVE_UNSPECIFIED" : If it's OBJECTIVE_UNSPECIFIED, - /// monitoring_stats will be empty. - /// - "IMPORT_FEATURE_ANALYSIS" : Stats are generated by Import Feature - /// Analysis. - /// - "SNAPSHOT_ANALYSIS" : Stats are generated by Snapshot Analysis. - core.String? objective; +/// The response message for MatchService.FindNeighbors. +class GoogleCloudAiplatformV1FindNeighborsResponse { + /// The nearest neighbors of the query datapoints. + core.List? + nearestNeighbors; - GoogleCloudAiplatformV1FeatureMonitoringStatsAnomaly({ - this.featureStatsAnomaly, - this.objective, + GoogleCloudAiplatformV1FindNeighborsResponse({ + this.nearestNeighbors, }); - GoogleCloudAiplatformV1FeatureMonitoringStatsAnomaly.fromJson(core.Map json_) + GoogleCloudAiplatformV1FindNeighborsResponse.fromJson(core.Map json_) : this( - featureStatsAnomaly: json_.containsKey('featureStatsAnomaly') - ? GoogleCloudAiplatformV1FeatureStatsAnomaly.fromJson( - json_['featureStatsAnomaly'] - as core.Map) - : null, - objective: json_['objective'] as core.String?, + nearestNeighbors: (json_['nearestNeighbors'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1FindNeighborsResponseNearestNeighbors + .fromJson(value as core.Map)) + .toList(), ); core.Map toJson() => { - if (featureStatsAnomaly != null) - 'featureStatsAnomaly': featureStatsAnomaly!, - if (objective != null) 'objective': objective!, + if (nearestNeighbors != null) 'nearestNeighbors': nearestNeighbors!, }; } -/// Noise sigma by features. -/// -/// Noise sigma represents the standard deviation of the gaussian kernel that -/// will be used to add noise to interpolated inputs prior to computing -/// gradients. -class GoogleCloudAiplatformV1FeatureNoiseSigma { - /// Noise sigma per feature. - /// - /// No noise is added to features that are not set. - core.List? - noiseSigma; +/// Nearest neighbors for one query. +class GoogleCloudAiplatformV1FindNeighborsResponseNearestNeighbors { + /// The ID of the query datapoint. + core.String? id; - GoogleCloudAiplatformV1FeatureNoiseSigma({ - this.noiseSigma, + /// All its neighbors. + core.List? neighbors; + + GoogleCloudAiplatformV1FindNeighborsResponseNearestNeighbors({ + this.id, + this.neighbors, }); - GoogleCloudAiplatformV1FeatureNoiseSigma.fromJson(core.Map json_) + GoogleCloudAiplatformV1FindNeighborsResponseNearestNeighbors.fromJson( + core.Map json_) : this( - noiseSigma: (json_['noiseSigma'] as core.List?) + id: json_['id'] as core.String?, + neighbors: (json_['neighbors'] as core.List?) ?.map((value) => - GoogleCloudAiplatformV1FeatureNoiseSigmaNoiseSigmaForFeature - .fromJson(value as core.Map)) + GoogleCloudAiplatformV1FindNeighborsResponseNeighbor.fromJson( + value as core.Map)) .toList(), ); core.Map toJson() => { - if (noiseSigma != null) 'noiseSigma': noiseSigma!, + if (id != null) 'id': id!, + if (neighbors != null) 'neighbors': neighbors!, }; } -/// Noise sigma for a single feature. -class GoogleCloudAiplatformV1FeatureNoiseSigmaNoiseSigmaForFeature { - /// The name of the input feature for which noise sigma is provided. +/// A neighbor of the query vector. +class GoogleCloudAiplatformV1FindNeighborsResponseNeighbor { + /// The datapoint of the neighbor. /// - /// The features are defined in explanation metadata inputs. - core.String? name; + /// Note that full datapoints are returned only when "return_full_datapoint" + /// is set to true. Otherwise, only the "datapoint_id" and "crowding_tag" + /// fields are populated. + GoogleCloudAiplatformV1IndexDatapoint? datapoint; - /// This represents the standard deviation of the Gaussian kernel that will be - /// used to add noise to the feature prior to computing gradients. + /// The distance between the neighbor and the dense embedding query. + core.double? distance; + + /// The distance between the neighbor and the query sparse_embedding. + core.double? sparseDistance; + + GoogleCloudAiplatformV1FindNeighborsResponseNeighbor({ + this.datapoint, + this.distance, + this.sparseDistance, + }); + + GoogleCloudAiplatformV1FindNeighborsResponseNeighbor.fromJson(core.Map json_) + : this( + datapoint: json_.containsKey('datapoint') + ? GoogleCloudAiplatformV1IndexDatapoint.fromJson( + json_['datapoint'] as core.Map) + : null, + distance: (json_['distance'] as core.num?)?.toDouble(), + sparseDistance: (json_['sparseDistance'] as core.num?)?.toDouble(), + ); + + core.Map toJson() => { + if (datapoint != null) 'datapoint': datapoint!, + if (distance != null) 'distance': distance!, + if (sparseDistance != null) 'sparseDistance': sparseDistance!, + }; +} + +/// Input for fluency metric. +class GoogleCloudAiplatformV1FluencyInput { + /// Fluency instance. /// - /// Similar to noise_sigma but represents the noise added to the current - /// feature. Defaults to 0.1. - core.double? sigma; + /// Required. + GoogleCloudAiplatformV1FluencyInstance? instance; - GoogleCloudAiplatformV1FeatureNoiseSigmaNoiseSigmaForFeature({ - this.name, - this.sigma, + /// Spec for fluency score metric. + /// + /// Required. + GoogleCloudAiplatformV1FluencySpec? metricSpec; + + GoogleCloudAiplatformV1FluencyInput({ + this.instance, + this.metricSpec, }); - GoogleCloudAiplatformV1FeatureNoiseSigmaNoiseSigmaForFeature.fromJson( - core.Map json_) + GoogleCloudAiplatformV1FluencyInput.fromJson(core.Map json_) : this( - name: json_['name'] as core.String?, - sigma: (json_['sigma'] as core.num?)?.toDouble(), + instance: json_.containsKey('instance') + ? GoogleCloudAiplatformV1FluencyInstance.fromJson( + json_['instance'] as core.Map) + : null, + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1FluencySpec.fromJson( + json_['metricSpec'] as core.Map) + : null, ); core.Map toJson() => { - if (name != null) 'name': name!, - if (sigma != null) 'sigma': sigma!, + if (instance != null) 'instance': instance!, + if (metricSpec != null) 'metricSpec': metricSpec!, }; } -/// Vertex AI Feature Online Store provides a centralized repository for serving -/// ML features and embedding indexes at low latency. -/// -/// The Feature Online Store is a top-level container. -class GoogleCloudAiplatformV1FeatureOnlineStore { - /// Contains settings for the Cloud Bigtable instance that will be created to - /// serve featureValues for all FeatureViews under this FeatureOnlineStore. - GoogleCloudAiplatformV1FeatureOnlineStoreBigtable? bigtable; +/// Spec for fluency instance. +typedef GoogleCloudAiplatformV1FluencyInstance = $Instance01; - /// Timestamp when this FeatureOnlineStore was created. +/// Spec for fluency result. +class GoogleCloudAiplatformV1FluencyResult { + /// Confidence for fluency score. /// /// Output only. - core.String? createTime; + core.double? confidence; - /// The dedicated serving endpoint for this FeatureOnlineStore, which is - /// different from common Vertex service endpoint. + /// Explanation for fluency score. /// - /// Optional. - GoogleCloudAiplatformV1FeatureOnlineStoreDedicatedServingEndpoint? - dedicatedServingEndpoint; + /// Output only. + core.String? explanation; - /// Customer-managed encryption key spec for data storage. - /// - /// If set, online store will be secured by this key. + /// Fluency score. /// - /// Optional. - GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; + /// Output only. + core.double? score; - /// Used to perform consistent read-modify-write updates. - /// - /// If not set, a blind "overwrite" update happens. - /// - /// Optional. - core.String? etag; + GoogleCloudAiplatformV1FluencyResult({ + this.confidence, + this.explanation, + this.score, + }); - /// The labels with user-defined metadata to organize your FeatureOnlineStore. - /// - /// Label keys and values can be no longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. See - /// https://goo.gl/xmQnxf for more information on and examples of labels. No - /// more than 64 user labels can be associated with one - /// FeatureOnlineStore(System labels are excluded)." System reserved label - /// keys are prefixed with "aiplatform.googleapis.com/" and are immutable. + GoogleCloudAiplatformV1FluencyResult.fromJson(core.Map json_) + : this( + confidence: (json_['confidence'] as core.num?)?.toDouble(), + explanation: json_['explanation'] as core.String?, + score: (json_['score'] as core.num?)?.toDouble(), + ); + + core.Map toJson() => { + if (confidence != null) 'confidence': confidence!, + if (explanation != null) 'explanation': explanation!, + if (score != null) 'score': score!, + }; +} + +/// Spec for fluency score metric. +typedef GoogleCloudAiplatformV1FluencySpec = $Spec; + +/// Assigns the input data to training, validation, and test sets as per the +/// given fractions. +/// +/// Any of `training_fraction`, `validation_fraction` and `test_fraction` may +/// optionally be provided, they must sum to up to 1. If the provided ones sum +/// to less than 1, the remainder is assigned to sets as decided by Vertex AI. +/// If none of the fractions are set, by default roughly 80% of data is used for +/// training, 10% for validation, and 10% for test. +typedef GoogleCloudAiplatformV1FractionSplit = $FractionSplit; + +/// Input for fulfillment metric. +class GoogleCloudAiplatformV1FulfillmentInput { + /// Fulfillment instance. /// - /// Optional. - core.Map? labels; + /// Required. + GoogleCloudAiplatformV1FulfillmentInstance? instance; - /// Identifier. + /// Spec for fulfillment score metric. /// - /// Name of the FeatureOnlineStore. Format: - /// `projects/{project}/locations/{location}/featureOnlineStores/{featureOnlineStore}` - core.String? name; + /// Required. + GoogleCloudAiplatformV1FulfillmentSpec? metricSpec; - /// Contains settings for the Optimized store that will be created to serve - /// featureValues for all FeatureViews under this FeatureOnlineStore. + GoogleCloudAiplatformV1FulfillmentInput({ + this.instance, + this.metricSpec, + }); + + GoogleCloudAiplatformV1FulfillmentInput.fromJson(core.Map json_) + : this( + instance: json_.containsKey('instance') + ? GoogleCloudAiplatformV1FulfillmentInstance.fromJson( + json_['instance'] as core.Map) + : null, + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1FulfillmentSpec.fromJson( + json_['metricSpec'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (instance != null) 'instance': instance!, + if (metricSpec != null) 'metricSpec': metricSpec!, + }; +} + +/// Spec for fulfillment instance. +class GoogleCloudAiplatformV1FulfillmentInstance { + /// Inference instruction prompt to compare prediction with. /// - /// When choose Optimized storage type, need to set - /// PrivateServiceConnectConfig.enable_private_service_connect to use private - /// endpoint. Otherwise will use public endpoint by default. - GoogleCloudAiplatformV1FeatureOnlineStoreOptimized? optimized; + /// Required. + core.String? instruction; - /// Reserved for future use. + /// Output of the evaluated model. /// - /// Output only. - core.bool? satisfiesPzi; + /// Required. + core.String? prediction; - /// Reserved for future use. + GoogleCloudAiplatformV1FulfillmentInstance({ + this.instruction, + this.prediction, + }); + + GoogleCloudAiplatformV1FulfillmentInstance.fromJson(core.Map json_) + : this( + instruction: json_['instruction'] as core.String?, + prediction: json_['prediction'] as core.String?, + ); + + core.Map toJson() => { + if (instruction != null) 'instruction': instruction!, + if (prediction != null) 'prediction': prediction!, + }; +} + +/// Spec for fulfillment result. +class GoogleCloudAiplatformV1FulfillmentResult { + /// Confidence for fulfillment score. /// /// Output only. - core.bool? satisfiesPzs; + core.double? confidence; - /// State of the featureOnlineStore. + /// Explanation for fulfillment score. /// /// Output only. - /// Possible string values are: - /// - "STATE_UNSPECIFIED" : Default value. This value is unused. - /// - "STABLE" : State when the featureOnlineStore configuration is not being - /// updated and the fields reflect the current configuration of the - /// featureOnlineStore. The featureOnlineStore is usable in this state. - /// - "UPDATING" : The state of the featureOnlineStore configuration when it - /// is being updated. During an update, the fields reflect either the original - /// configuration or the updated configuration of the featureOnlineStore. The - /// featureOnlineStore is still usable in this state. - core.String? state; + core.String? explanation; - /// Timestamp when this FeatureOnlineStore was last updated. + /// Fulfillment score. /// /// Output only. - core.String? updateTime; + core.double? score; - GoogleCloudAiplatformV1FeatureOnlineStore({ - this.bigtable, - this.createTime, - this.dedicatedServingEndpoint, - this.encryptionSpec, - this.etag, - this.labels, - this.name, - this.optimized, - this.satisfiesPzi, - this.satisfiesPzs, - this.state, - this.updateTime, + GoogleCloudAiplatformV1FulfillmentResult({ + this.confidence, + this.explanation, + this.score, }); - GoogleCloudAiplatformV1FeatureOnlineStore.fromJson(core.Map json_) + GoogleCloudAiplatformV1FulfillmentResult.fromJson(core.Map json_) : this( - bigtable: json_.containsKey('bigtable') - ? GoogleCloudAiplatformV1FeatureOnlineStoreBigtable.fromJson( - json_['bigtable'] as core.Map) - : null, - createTime: json_['createTime'] as core.String?, - dedicatedServingEndpoint: json_ - .containsKey('dedicatedServingEndpoint') - ? GoogleCloudAiplatformV1FeatureOnlineStoreDedicatedServingEndpoint - .fromJson(json_['dedicatedServingEndpoint'] - as core.Map) - : null, - encryptionSpec: json_.containsKey('encryptionSpec') - ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( - json_['encryptionSpec'] - as core.Map) - : null, - etag: json_['etag'] as core.String?, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - name: json_['name'] as core.String?, - optimized: json_.containsKey('optimized') - ? GoogleCloudAiplatformV1FeatureOnlineStoreOptimized.fromJson( - json_['optimized'] as core.Map) - : null, - satisfiesPzi: json_['satisfiesPzi'] as core.bool?, - satisfiesPzs: json_['satisfiesPzs'] as core.bool?, - state: json_['state'] as core.String?, - updateTime: json_['updateTime'] as core.String?, + confidence: (json_['confidence'] as core.num?)?.toDouble(), + explanation: json_['explanation'] as core.String?, + score: (json_['score'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (bigtable != null) 'bigtable': bigtable!, - if (createTime != null) 'createTime': createTime!, - if (dedicatedServingEndpoint != null) - 'dedicatedServingEndpoint': dedicatedServingEndpoint!, - if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, - if (etag != null) 'etag': etag!, - if (labels != null) 'labels': labels!, - if (name != null) 'name': name!, - if (optimized != null) 'optimized': optimized!, - if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, - if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, - if (state != null) 'state': state!, - if (updateTime != null) 'updateTime': updateTime!, + if (confidence != null) 'confidence': confidence!, + if (explanation != null) 'explanation': explanation!, + if (score != null) 'score': score!, }; } -class GoogleCloudAiplatformV1FeatureOnlineStoreBigtable { - /// Autoscaling config applied to Bigtable Instance. +/// Spec for fulfillment metric. +typedef GoogleCloudAiplatformV1FulfillmentSpec = $Spec; + +/// A predicted \[FunctionCall\] returned from the model that contains a string +/// representing the \[FunctionDeclaration.name\] and a structured JSON object +/// containing the parameters and their values. +class GoogleCloudAiplatformV1FunctionCall { + /// The function parameters and values in JSON object format. + /// + /// See \[FunctionDeclaration.parameters\] for parameter details. + /// + /// Optional. Required. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Map? args; + + /// The name of the function to call. + /// + /// Matches \[FunctionDeclaration.name\]. /// /// Required. - GoogleCloudAiplatformV1FeatureOnlineStoreBigtableAutoScaling? autoScaling; + core.String? name; - GoogleCloudAiplatformV1FeatureOnlineStoreBigtable({ - this.autoScaling, + GoogleCloudAiplatformV1FunctionCall({ + this.args, + this.name, }); - GoogleCloudAiplatformV1FeatureOnlineStoreBigtable.fromJson(core.Map json_) + GoogleCloudAiplatformV1FunctionCall.fromJson(core.Map json_) : this( - autoScaling: json_.containsKey('autoScaling') - ? GoogleCloudAiplatformV1FeatureOnlineStoreBigtableAutoScaling - .fromJson(json_['autoScaling'] - as core.Map) + args: json_.containsKey('args') + ? json_['args'] as core.Map : null, + name: json_['name'] as core.String?, ); core.Map toJson() => { - if (autoScaling != null) 'autoScaling': autoScaling!, + if (args != null) 'args': args!, + if (name != null) 'name': name!, }; } -class GoogleCloudAiplatformV1FeatureOnlineStoreBigtableAutoScaling { - /// A percentage of the cluster's CPU capacity. +/// Function calling config. +class GoogleCloudAiplatformV1FunctionCallingConfig { + /// Function names to call. /// - /// Can be from 10% to 80%. When a cluster's CPU utilization exceeds the - /// target that you have set, Bigtable immediately adds nodes to the cluster. - /// When CPU utilization is substantially lower than the target, Bigtable - /// removes nodes. If not set will default to 50%. + /// Only set when the Mode is ANY. Function names should match + /// \[FunctionDeclaration.name\]. With mode set to ANY, model will predict a + /// function call from the set of function names provided. /// /// Optional. - core.int? cpuUtilizationTarget; - - /// The maximum number of nodes to scale up to. - /// - /// Must be greater than or equal to min_node_count, and less than or equal to - /// 10 times of 'min_node_count'. - /// - /// Required. - core.int? maxNodeCount; + core.List? allowedFunctionNames; - /// The minimum number of nodes to scale down to. - /// - /// Must be greater than or equal to 1. + /// Function calling mode. /// - /// Required. - core.int? minNodeCount; + /// Optional. + /// Possible string values are: + /// - "MODE_UNSPECIFIED" : Unspecified function calling mode. This value + /// should not be used. + /// - "AUTO" : Default model behavior, model decides to predict either + /// function calls or natural language response. + /// - "ANY" : Model is constrained to always predicting function calls only. + /// If "allowed_function_names" are set, the predicted function calls will be + /// limited to any one of "allowed_function_names", else the predicted + /// function calls will be any one of the provided "function_declarations". + /// - "NONE" : Model will not predict any function calls. Model behavior is + /// same as when not passing any function declarations. + core.String? mode; - GoogleCloudAiplatformV1FeatureOnlineStoreBigtableAutoScaling({ - this.cpuUtilizationTarget, - this.maxNodeCount, - this.minNodeCount, + GoogleCloudAiplatformV1FunctionCallingConfig({ + this.allowedFunctionNames, + this.mode, }); - GoogleCloudAiplatformV1FeatureOnlineStoreBigtableAutoScaling.fromJson( - core.Map json_) + GoogleCloudAiplatformV1FunctionCallingConfig.fromJson(core.Map json_) : this( - cpuUtilizationTarget: json_['cpuUtilizationTarget'] as core.int?, - maxNodeCount: json_['maxNodeCount'] as core.int?, - minNodeCount: json_['minNodeCount'] as core.int?, + allowedFunctionNames: (json_['allowedFunctionNames'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + mode: json_['mode'] as core.String?, ); core.Map toJson() => { - if (cpuUtilizationTarget != null) - 'cpuUtilizationTarget': cpuUtilizationTarget!, - if (maxNodeCount != null) 'maxNodeCount': maxNodeCount!, - if (minNodeCount != null) 'minNodeCount': minNodeCount!, + if (allowedFunctionNames != null) + 'allowedFunctionNames': allowedFunctionNames!, + if (mode != null) 'mode': mode!, }; } -/// The dedicated serving endpoint for this FeatureOnlineStore. +/// Structured representation of a function declaration as defined by the +/// [OpenAPI 3.0 specification](https://spec.openapis.org/oas/v3.0.3). /// -/// Only need to set when you choose Optimized storage type. Public endpoint is -/// provisioned by default. -class GoogleCloudAiplatformV1FeatureOnlineStoreDedicatedServingEndpoint { - /// Private service connect config. +/// Included in this declaration are the function name, description, parameters +/// and response type. This FunctionDeclaration is a representation of a block +/// of code that can be used as a `Tool` by the model and executed by the +/// client. +class GoogleCloudAiplatformV1FunctionDeclaration { + /// Description and purpose of the function. /// - /// The private service connection is available only for Optimized storage - /// type, not for embedding management now. If - /// PrivateServiceConnectConfig.enable_private_service_connect set to true, - /// customers will use private service connection to send request. Otherwise, - /// the connection will set to public endpoint. + /// Model uses it to decide how and whether to call the function. /// /// Optional. - GoogleCloudAiplatformV1PrivateServiceConnectConfig? - privateServiceConnectConfig; + core.String? description; - /// This field will be populated with the domain name to use for this - /// FeatureOnlineStore + /// The name of the function to call. /// - /// Output only. - core.String? publicEndpointDomainName; + /// Must start with a letter or an underscore. Must be a-z, A-Z, 0-9, or + /// contain underscores, dots and dashes, with a maximum length of 64. + /// + /// Required. + core.String? name; - /// The name of the service attachment resource. + /// Describes the parameters to this function in JSON Schema Object format. /// - /// Populated if private service connect is enabled and after FeatureViewSync - /// is created. + /// Reflects the Open API 3.03 Parameter Object. string Key: the name of the + /// parameter. Parameter names are case sensitive. Schema Value: the Schema + /// defining the type used for the parameter. For function with no parameters, + /// this can be left unset. Parameter names must start with a letter or an + /// underscore and must only contain chars a-z, A-Z, 0-9, or underscores with + /// a maximum length of 64. Example with 1 required and 1 optional parameter: + /// type: OBJECT properties: param1: type: STRING param2: type: INTEGER + /// required: - param1 /// - /// Output only. - core.String? serviceAttachment; + /// Optional. + GoogleCloudAiplatformV1Schema? parameters; - GoogleCloudAiplatformV1FeatureOnlineStoreDedicatedServingEndpoint({ - this.privateServiceConnectConfig, - this.publicEndpointDomainName, - this.serviceAttachment, + /// Describes the output from this function in JSON Schema format. + /// + /// Reflects the Open API 3.03 Response Object. The Schema defines the type + /// used for the response value of the function. + /// + /// Optional. + GoogleCloudAiplatformV1Schema? response; + + GoogleCloudAiplatformV1FunctionDeclaration({ + this.description, + this.name, + this.parameters, + this.response, }); - GoogleCloudAiplatformV1FeatureOnlineStoreDedicatedServingEndpoint.fromJson( - core.Map json_) + GoogleCloudAiplatformV1FunctionDeclaration.fromJson(core.Map json_) : this( - privateServiceConnectConfig: - json_.containsKey('privateServiceConnectConfig') - ? GoogleCloudAiplatformV1PrivateServiceConnectConfig.fromJson( - json_['privateServiceConnectConfig'] - as core.Map) - : null, - publicEndpointDomainName: - json_['publicEndpointDomainName'] as core.String?, - serviceAttachment: json_['serviceAttachment'] as core.String?, + description: json_['description'] as core.String?, + name: json_['name'] as core.String?, + parameters: json_.containsKey('parameters') + ? GoogleCloudAiplatformV1Schema.fromJson( + json_['parameters'] as core.Map) + : null, + response: json_.containsKey('response') + ? GoogleCloudAiplatformV1Schema.fromJson( + json_['response'] as core.Map) + : null, ); core.Map toJson() => { - if (privateServiceConnectConfig != null) - 'privateServiceConnectConfig': privateServiceConnectConfig!, - if (publicEndpointDomainName != null) - 'publicEndpointDomainName': publicEndpointDomainName!, - if (serviceAttachment != null) 'serviceAttachment': serviceAttachment!, + if (description != null) 'description': description!, + if (name != null) 'name': name!, + if (parameters != null) 'parameters': parameters!, + if (response != null) 'response': response!, }; } -/// Optimized storage type -typedef GoogleCloudAiplatformV1FeatureOnlineStoreOptimized = $Empty; +/// The result output from a \[FunctionCall\] that contains a string +/// representing the \[FunctionDeclaration.name\] and a structured JSON object +/// containing any output from the function is used as context to the model. +/// +/// This should contain the result of a \[FunctionCall\] made based on model +/// prediction. +class GoogleCloudAiplatformV1FunctionResponse { + /// The name of the function to call. + /// + /// Matches \[FunctionDeclaration.name\] and \[FunctionCall.name\]. + /// + /// Required. + core.String? name; -/// Selector for Features of an EntityType. -class GoogleCloudAiplatformV1FeatureSelector { - /// Matches Features based on ID. + /// The function response in JSON object format. + /// + /// Use "output" key to specify function output and "error" key to specify + /// error details (if any). If "output" and "error" keys are not specified, + /// then whole "response" is treated as function output. /// /// Required. - GoogleCloudAiplatformV1IdMatcher? idMatcher; + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Map? response; - GoogleCloudAiplatformV1FeatureSelector({ - this.idMatcher, + GoogleCloudAiplatformV1FunctionResponse({ + this.name, + this.response, }); - GoogleCloudAiplatformV1FeatureSelector.fromJson(core.Map json_) + GoogleCloudAiplatformV1FunctionResponse.fromJson(core.Map json_) : this( - idMatcher: json_.containsKey('idMatcher') - ? GoogleCloudAiplatformV1IdMatcher.fromJson( - json_['idMatcher'] as core.Map) + name: json_['name'] as core.String?, + response: json_.containsKey('response') + ? json_['response'] as core.Map : null, ); core.Map toJson() => { - if (idMatcher != null) 'idMatcher': idMatcher!, + if (name != null) 'name': name!, + if (response != null) 'response': response!, }; } -/// Stats and Anomaly generated at specific timestamp for specific Feature. -/// -/// The start_time and end_time are used to define the time range of the dataset -/// that current stats belongs to, e.g. prediction traffic is bucketed into -/// prediction datasets by time window. If the Dataset is not defined by time -/// window, start_time = end_time. Timestamp of the stats and anomalies always -/// refers to end_time. Raw stats and anomalies are stored in stats_uri or -/// anomaly_uri in the tensorflow defined protos. Field data_stats contains -/// almost identical information with the raw stats in Vertex AI defined proto, -/// for UI to display. -class GoogleCloudAiplatformV1FeatureStatsAnomaly { - /// This is the threshold used when detecting anomalies. +/// The Google Cloud Storage location where the output is to be written to. +class GoogleCloudAiplatformV1GcsDestination { + /// Google Cloud Storage URI to output directory. /// - /// The threshold can be changed by user, so this one might be different from - /// ThresholdConfig.value. - core.double? anomalyDetectionThreshold; - - /// Path of the anomaly file for current feature values in Cloud Storage - /// bucket. + /// If the uri doesn't end with '/', a '/' will be automatically appended. The + /// directory is created if it doesn't exist. /// - /// Format: gs:////anomalies. Example: - /// gs://monitoring_bucket/feature_name/anomalies. Stats are stored as binary - /// format with Protobuf message Anoamlies are stored as binary format with - /// Protobuf message - /// [tensorflow.metadata.v0.AnomalyInfo](https://github.com/tensorflow/metadata/blob/master/tensorflow_metadata/proto/v0/anomalies.proto). - core.String? anomalyUri; + /// Required. + core.String? outputUriPrefix; - /// Deviation from the current stats to baseline stats. - /// - /// 1. For categorical feature, the distribution distance is calculated by - /// L-inifinity norm. 2. For numerical feature, the distribution distance is - /// calculated by Jensen–Shannon divergence. - core.double? distributionDeviation; + GoogleCloudAiplatformV1GcsDestination({ + this.outputUriPrefix, + }); - /// The end timestamp of window where stats were generated. - /// - /// For objectives where time window doesn't make sense (e.g. Featurestore - /// Snapshot Monitoring), end_time indicates the timestamp of the data used to - /// generate stats (e.g. timestamp we take snapshots for feature values). - core.String? endTime; + GoogleCloudAiplatformV1GcsDestination.fromJson(core.Map json_) + : this( + outputUriPrefix: json_['outputUriPrefix'] as core.String?, + ); - /// Feature importance score, only populated when cross-feature monitoring is - /// enabled. - /// - /// For now only used to represent feature attribution score within range \[0, - /// 1\] for ModelDeploymentMonitoringObjectiveType.FEATURE_ATTRIBUTION_SKEW - /// and ModelDeploymentMonitoringObjectiveType.FEATURE_ATTRIBUTION_DRIFT. - core.double? score; + core.Map toJson() => { + if (outputUriPrefix != null) 'outputUriPrefix': outputUriPrefix!, + }; +} - /// The start timestamp of window where stats were generated. +/// The Google Cloud Storage location for the input content. +class GoogleCloudAiplatformV1GcsSource { + /// Google Cloud Storage URI(-s) to the input file(s). /// - /// For objectives where time window doesn't make sense (e.g. Featurestore - /// Snapshot Monitoring), start_time is only used to indicate the monitoring - /// intervals, so it always equals to (end_time - monitoring_interval). - core.String? startTime; - - /// Path of the stats file for current feature values in Cloud Storage bucket. + /// May contain wildcards. For more information on wildcards, see + /// https://cloud.google.com/storage/docs/gsutil/addlhelp/WildcardNames. /// - /// Format: gs:////stats. Example: gs://monitoring_bucket/feature_name/stats. - /// Stats are stored as binary format with Protobuf message - /// [tensorflow.metadata.v0.FeatureNameStatistics](https://github.com/tensorflow/metadata/blob/master/tensorflow_metadata/proto/v0/statistics.proto). - core.String? statsUri; + /// Required. + core.List? uris; - GoogleCloudAiplatformV1FeatureStatsAnomaly({ - this.anomalyDetectionThreshold, - this.anomalyUri, - this.distributionDeviation, - this.endTime, - this.score, - this.startTime, - this.statsUri, + GoogleCloudAiplatformV1GcsSource({ + this.uris, }); - GoogleCloudAiplatformV1FeatureStatsAnomaly.fromJson(core.Map json_) + GoogleCloudAiplatformV1GcsSource.fromJson(core.Map json_) : this( - anomalyDetectionThreshold: - (json_['anomalyDetectionThreshold'] as core.num?)?.toDouble(), - anomalyUri: json_['anomalyUri'] as core.String?, - distributionDeviation: - (json_['distributionDeviation'] as core.num?)?.toDouble(), - endTime: json_['endTime'] as core.String?, - score: (json_['score'] as core.num?)?.toDouble(), - startTime: json_['startTime'] as core.String?, - statsUri: json_['statsUri'] as core.String?, + uris: (json_['uris'] as core.List?) + ?.map((value) => value as core.String) + .toList(), ); core.Map toJson() => { - if (anomalyDetectionThreshold != null) - 'anomalyDetectionThreshold': anomalyDetectionThreshold!, - if (anomalyUri != null) 'anomalyUri': anomalyUri!, - if (distributionDeviation != null) - 'distributionDeviation': distributionDeviation!, - if (endTime != null) 'endTime': endTime!, - if (score != null) 'score': score!, - if (startTime != null) 'startTime': startTime!, - if (statsUri != null) 'statsUri': statsUri!, + if (uris != null) 'uris': uris!, }; } -/// Value for a feature. -class GoogleCloudAiplatformV1FeatureValue { - /// A list of bool type feature value. - GoogleCloudAiplatformV1BoolArray? boolArrayValue; - - /// Bool type feature value. - core.bool? boolValue; - - /// Bytes feature value. - core.String? bytesValue; - core.List get bytesValueAsBytes => - convert.base64.decode(bytesValue!); - - set bytesValueAsBytes(core.List bytes_) { - bytesValue = - convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); - } - - /// A list of double type feature value. - GoogleCloudAiplatformV1DoubleArray? doubleArrayValue; - - /// Double type feature value. - core.double? doubleValue; +/// Request message for \[PredictionService.GenerateContent\]. +class GoogleCloudAiplatformV1GenerateContentRequest { + /// The name of the cached content used as context to serve the prediction. + /// + /// Note: only used in explicit caching, where users can have control over + /// caching (e.g. what content to cache) and enjoy guaranteed cost savings. + /// Format: + /// `projects/{project}/locations/{location}/cachedContents/{cachedContent}` + /// + /// Optional. + core.String? cachedContent; - /// A list of int64 type feature value. - GoogleCloudAiplatformV1Int64Array? int64ArrayValue; + /// The content of the current conversation with the model. + /// + /// For single-turn queries, this is a single instance. For multi-turn + /// queries, this is a repeated field that contains conversation history + + /// latest request. + /// + /// Required. + core.List? contents; - /// Int64 feature value. - core.String? int64Value; + /// Generation config. + /// + /// Optional. + GoogleCloudAiplatformV1GenerationConfig? generationConfig; - /// Metadata of feature value. - GoogleCloudAiplatformV1FeatureValueMetadata? metadata; + /// The labels with user-defined metadata for the request. + /// + /// It is used for billing and reporting only. Label keys and values can be no + /// longer than 63 characters (Unicode codepoints) and can only contain + /// lowercase letters, numeric characters, underscores, and dashes. + /// International characters are allowed. Label values are optional. Label + /// keys must start with a letter. + /// + /// Optional. + core.Map? labels; - /// A list of string type feature value. - GoogleCloudAiplatformV1StringArray? stringArrayValue; + /// Per request settings for blocking unsafe content. + /// + /// Enforced on GenerateContentResponse.candidates. + /// + /// Optional. + core.List? safetySettings; - /// String feature value. - core.String? stringValue; + /// The user provided system instructions for the model. + /// + /// Note: only text should be used in parts and content in each part will be + /// in a separate paragraph. + /// + /// Optional. + GoogleCloudAiplatformV1Content? systemInstruction; - /// A struct type feature value. - GoogleCloudAiplatformV1StructValue? structValue; + /// Tool config. + /// + /// This config is shared for all tools provided in the request. + /// + /// Optional. + GoogleCloudAiplatformV1ToolConfig? toolConfig; - GoogleCloudAiplatformV1FeatureValue({ - this.boolArrayValue, - this.boolValue, - this.bytesValue, - this.doubleArrayValue, - this.doubleValue, - this.int64ArrayValue, - this.int64Value, - this.metadata, - this.stringArrayValue, - this.stringValue, - this.structValue, + /// A list of `Tools` the model may use to generate the next response. + /// + /// A `Tool` is a piece of code that enables the system to interact with + /// external systems to perform an action, or set of actions, outside of + /// knowledge and scope of the model. + /// + /// Optional. + core.List? tools; + + GoogleCloudAiplatformV1GenerateContentRequest({ + this.cachedContent, + this.contents, + this.generationConfig, + this.labels, + this.safetySettings, + this.systemInstruction, + this.toolConfig, + this.tools, }); - GoogleCloudAiplatformV1FeatureValue.fromJson(core.Map json_) + GoogleCloudAiplatformV1GenerateContentRequest.fromJson(core.Map json_) : this( - boolArrayValue: json_.containsKey('boolArrayValue') - ? GoogleCloudAiplatformV1BoolArray.fromJson( - json_['boolArrayValue'] - as core.Map) - : null, - boolValue: json_['boolValue'] as core.bool?, - bytesValue: json_['bytesValue'] as core.String?, - doubleArrayValue: json_.containsKey('doubleArrayValue') - ? GoogleCloudAiplatformV1DoubleArray.fromJson( - json_['doubleArrayValue'] - as core.Map) - : null, - doubleValue: (json_['doubleValue'] as core.num?)?.toDouble(), - int64ArrayValue: json_.containsKey('int64ArrayValue') - ? GoogleCloudAiplatformV1Int64Array.fromJson( - json_['int64ArrayValue'] + cachedContent: json_['cachedContent'] as core.String?, + contents: (json_['contents'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Content.fromJson( + value as core.Map)) + .toList(), + generationConfig: json_.containsKey('generationConfig') + ? GoogleCloudAiplatformV1GenerationConfig.fromJson( + json_['generationConfig'] as core.Map) : null, - int64Value: json_['int64Value'] as core.String?, - metadata: json_.containsKey('metadata') - ? GoogleCloudAiplatformV1FeatureValueMetadata.fromJson( - json_['metadata'] as core.Map) - : null, - stringArrayValue: json_.containsKey('stringArrayValue') - ? GoogleCloudAiplatformV1StringArray.fromJson( - json_['stringArrayValue'] + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + safetySettings: (json_['safetySettings'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1SafetySetting.fromJson( + value as core.Map)) + .toList(), + systemInstruction: json_.containsKey('systemInstruction') + ? GoogleCloudAiplatformV1Content.fromJson( + json_['systemInstruction'] as core.Map) : null, - stringValue: json_['stringValue'] as core.String?, - structValue: json_.containsKey('structValue') - ? GoogleCloudAiplatformV1StructValue.fromJson( - json_['structValue'] as core.Map) + toolConfig: json_.containsKey('toolConfig') + ? GoogleCloudAiplatformV1ToolConfig.fromJson( + json_['toolConfig'] as core.Map) : null, + tools: (json_['tools'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Tool.fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (boolArrayValue != null) 'boolArrayValue': boolArrayValue!, - if (boolValue != null) 'boolValue': boolValue!, - if (bytesValue != null) 'bytesValue': bytesValue!, - if (doubleArrayValue != null) 'doubleArrayValue': doubleArrayValue!, - if (doubleValue != null) 'doubleValue': doubleValue!, - if (int64ArrayValue != null) 'int64ArrayValue': int64ArrayValue!, - if (int64Value != null) 'int64Value': int64Value!, - if (metadata != null) 'metadata': metadata!, - if (stringArrayValue != null) 'stringArrayValue': stringArrayValue!, - if (stringValue != null) 'stringValue': stringValue!, - if (structValue != null) 'structValue': structValue!, + if (cachedContent != null) 'cachedContent': cachedContent!, + if (contents != null) 'contents': contents!, + if (generationConfig != null) 'generationConfig': generationConfig!, + if (labels != null) 'labels': labels!, + if (safetySettings != null) 'safetySettings': safetySettings!, + if (systemInstruction != null) 'systemInstruction': systemInstruction!, + if (toolConfig != null) 'toolConfig': toolConfig!, + if (tools != null) 'tools': tools!, }; } -/// A destination location for Feature values and format. -class GoogleCloudAiplatformV1FeatureValueDestination { - /// Output in BigQuery format. +/// Response message for \[PredictionService.GenerateContent\]. +class GoogleCloudAiplatformV1GenerateContentResponse { + /// Generated candidates. /// - /// BigQueryDestination.output_uri in - /// FeatureValueDestination.bigquery_destination must refer to a table. - GoogleCloudAiplatformV1BigQueryDestination? bigqueryDestination; + /// Output only. + core.List? candidates; - /// Output in CSV format. + /// The model version used to generate the response. /// - /// Array Feature value types are not allowed in CSV format. - GoogleCloudAiplatformV1CsvDestination? csvDestination; + /// Output only. + core.String? modelVersion; - /// Output in TFRecord format. + /// Content filter results for a prompt sent in the request. /// - /// Below are the mapping from Feature value type in Featurestore to Feature - /// value type in TFRecord: Value type in Featurestore | Value type in - /// TFRecord DOUBLE, DOUBLE_ARRAY | FLOAT_LIST INT64, INT64_ARRAY | INT64_LIST - /// STRING, STRING_ARRAY, BYTES | BYTES_LIST true -\> byte_string("true"), - /// false -\> byte_string("false") BOOL, BOOL_ARRAY (true, false) | BYTES_LIST - GoogleCloudAiplatformV1TFRecordDestination? tfrecordDestination; + /// Note: Sent only in the first stream chunk. Only happens when no candidates + /// were generated due to content violations. + /// + /// Output only. + GoogleCloudAiplatformV1GenerateContentResponsePromptFeedback? promptFeedback; - GoogleCloudAiplatformV1FeatureValueDestination({ - this.bigqueryDestination, - this.csvDestination, - this.tfrecordDestination, + /// Usage metadata about the response(s). + GoogleCloudAiplatformV1GenerateContentResponseUsageMetadata? usageMetadata; + + GoogleCloudAiplatformV1GenerateContentResponse({ + this.candidates, + this.modelVersion, + this.promptFeedback, + this.usageMetadata, }); - GoogleCloudAiplatformV1FeatureValueDestination.fromJson(core.Map json_) + GoogleCloudAiplatformV1GenerateContentResponse.fromJson(core.Map json_) : this( - bigqueryDestination: json_.containsKey('bigqueryDestination') - ? GoogleCloudAiplatformV1BigQueryDestination.fromJson( - json_['bigqueryDestination'] - as core.Map) - : null, - csvDestination: json_.containsKey('csvDestination') - ? GoogleCloudAiplatformV1CsvDestination.fromJson( - json_['csvDestination'] + candidates: (json_['candidates'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Candidate.fromJson( + value as core.Map)) + .toList(), + modelVersion: json_['modelVersion'] as core.String?, + promptFeedback: json_.containsKey('promptFeedback') + ? GoogleCloudAiplatformV1GenerateContentResponsePromptFeedback + .fromJson(json_['promptFeedback'] as core.Map) : null, - tfrecordDestination: json_.containsKey('tfrecordDestination') - ? GoogleCloudAiplatformV1TFRecordDestination.fromJson( - json_['tfrecordDestination'] + usageMetadata: json_.containsKey('usageMetadata') + ? GoogleCloudAiplatformV1GenerateContentResponseUsageMetadata + .fromJson(json_['usageMetadata'] as core.Map) : null, ); core.Map toJson() => { - if (bigqueryDestination != null) - 'bigqueryDestination': bigqueryDestination!, - if (csvDestination != null) 'csvDestination': csvDestination!, - if (tfrecordDestination != null) - 'tfrecordDestination': tfrecordDestination!, + if (candidates != null) 'candidates': candidates!, + if (modelVersion != null) 'modelVersion': modelVersion!, + if (promptFeedback != null) 'promptFeedback': promptFeedback!, + if (usageMetadata != null) 'usageMetadata': usageMetadata!, }; } -/// Container for list of values. -class GoogleCloudAiplatformV1FeatureValueList { - /// A list of feature values. +/// Content filter results for a prompt sent in the request. +class GoogleCloudAiplatformV1GenerateContentResponsePromptFeedback { + /// Blocked reason. /// - /// All of them should be the same data type. - core.List? values; + /// Output only. + /// Possible string values are: + /// - "BLOCKED_REASON_UNSPECIFIED" : Unspecified blocked reason. + /// - "SAFETY" : Candidates blocked due to safety. + /// - "OTHER" : Candidates blocked due to other reason. + /// - "BLOCKLIST" : Candidates blocked due to the terms which are included + /// from the terminology blocklist. + /// - "PROHIBITED_CONTENT" : Candidates blocked due to prohibited content. + core.String? blockReason; - GoogleCloudAiplatformV1FeatureValueList({ - this.values, + /// A readable block reason message. + /// + /// Output only. + core.String? blockReasonMessage; + + /// Safety ratings. + /// + /// Output only. + core.List? safetyRatings; + + GoogleCloudAiplatformV1GenerateContentResponsePromptFeedback({ + this.blockReason, + this.blockReasonMessage, + this.safetyRatings, }); - GoogleCloudAiplatformV1FeatureValueList.fromJson(core.Map json_) + GoogleCloudAiplatformV1GenerateContentResponsePromptFeedback.fromJson( + core.Map json_) : this( - values: (json_['values'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1FeatureValue.fromJson( + blockReason: json_['blockReason'] as core.String?, + blockReasonMessage: json_['blockReasonMessage'] as core.String?, + safetyRatings: (json_['safetyRatings'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1SafetyRating.fromJson( value as core.Map)) .toList(), ); core.Map toJson() => { - if (values != null) 'values': values!, + if (blockReason != null) 'blockReason': blockReason!, + if (blockReasonMessage != null) + 'blockReasonMessage': blockReasonMessage!, + if (safetyRatings != null) 'safetyRatings': safetyRatings!, }; } -/// Metadata of feature value. -class GoogleCloudAiplatformV1FeatureValueMetadata { - /// Feature generation timestamp. +/// Usage metadata about response(s). +class GoogleCloudAiplatformV1GenerateContentResponseUsageMetadata { + /// Number of tokens in the cached part in the input (the cached content). /// - /// Typically, it is provided by user at feature ingestion time. If not, - /// feature store will use the system timestamp when the data is ingested into - /// feature store. For streaming ingestion, the time, aligned by days, must be - /// no older than five years (1825 days) and no later than one year (366 days) - /// in the future. - core.String? generateTime; + /// Output only. + core.int? cachedContentTokenCount; - GoogleCloudAiplatformV1FeatureValueMetadata({ - this.generateTime, + /// Number of tokens in the response(s). + core.int? candidatesTokenCount; + + /// Number of tokens in the request. + /// + /// When `cached_content` is set, this is still the total effective prompt + /// size meaning this includes the number of tokens in the cached content. + core.int? promptTokenCount; + + /// Total token count for prompt and response candidates. + core.int? totalTokenCount; + + GoogleCloudAiplatformV1GenerateContentResponseUsageMetadata({ + this.cachedContentTokenCount, + this.candidatesTokenCount, + this.promptTokenCount, + this.totalTokenCount, }); - GoogleCloudAiplatformV1FeatureValueMetadata.fromJson(core.Map json_) + GoogleCloudAiplatformV1GenerateContentResponseUsageMetadata.fromJson( + core.Map json_) : this( - generateTime: json_['generateTime'] as core.String?, + cachedContentTokenCount: + json_['cachedContentTokenCount'] as core.int?, + candidatesTokenCount: json_['candidatesTokenCount'] as core.int?, + promptTokenCount: json_['promptTokenCount'] as core.int?, + totalTokenCount: json_['totalTokenCount'] as core.int?, ); core.Map toJson() => { - if (generateTime != null) 'generateTime': generateTime!, + if (cachedContentTokenCount != null) + 'cachedContentTokenCount': cachedContentTokenCount!, + if (candidatesTokenCount != null) + 'candidatesTokenCount': candidatesTokenCount!, + if (promptTokenCount != null) 'promptTokenCount': promptTokenCount!, + if (totalTokenCount != null) 'totalTokenCount': totalTokenCount!, }; } -/// FeatureView is representation of values that the FeatureOnlineStore will -/// serve based on its syncConfig. -class GoogleCloudAiplatformV1FeatureView { - /// Configures how data is supposed to be extracted from a BigQuery source to - /// be loaded onto the FeatureOnlineStore. +/// Generation config. +class GoogleCloudAiplatformV1GenerationConfig { + /// If enabled, audio timestamp will be included in the request to the model. /// /// Optional. - GoogleCloudAiplatformV1FeatureViewBigQuerySource? bigQuerySource; + core.bool? audioTimestamp; - /// Timestamp when this FeatureView was created. + /// Number of candidates to generate. /// - /// Output only. - core.String? createTime; + /// Optional. + core.int? candidateCount; - /// Used to perform consistent read-modify-write updates. + /// Frequency penalties. /// - /// If not set, a blind "overwrite" update happens. + /// Optional. + core.double? frequencyPenalty; + + /// Logit probabilities. /// /// Optional. - core.String? etag; + core.int? logprobs; - /// Configures the features from a Feature Registry source that need to be - /// loaded onto the FeatureOnlineStore. + /// The maximum number of output tokens to generate per message. /// /// Optional. - GoogleCloudAiplatformV1FeatureViewFeatureRegistrySource? - featureRegistrySource; + core.int? maxOutputTokens; - /// Configuration for index preparation for vector search. + /// Positive penalties. /// - /// It contains the required configurations to create an index from source - /// data, so that approximate nearest neighbor (a.k.a ANN) algorithms search - /// can be performed during online serving. + /// Optional. + core.double? presencePenalty; + + /// If true, export the logprobs results in response. /// /// Optional. - GoogleCloudAiplatformV1FeatureViewIndexConfig? indexConfig; + core.bool? responseLogprobs; - /// The labels with user-defined metadata to organize your FeatureViews. + /// Output response mimetype of the generated candidate text. /// - /// Label keys and values can be no longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. See - /// https://goo.gl/xmQnxf for more information on and examples of labels. No - /// more than 64 user labels can be associated with one - /// FeatureOnlineStore(System labels are excluded)." System reserved label - /// keys are prefixed with "aiplatform.googleapis.com/" and are immutable. + /// Supported mimetype: - `text/plain`: (default) Text output. - + /// `application/json`: JSON response in the candidates. The model needs to be + /// prompted to output the appropriate response type, otherwise the behavior + /// is undefined. This is a preview feature. /// /// Optional. - core.Map? labels; + core.String? responseMimeType; - /// Identifier. + /// The modalities of the response. /// - /// Name of the FeatureView. Format: - /// `projects/{project}/locations/{location}/featureOnlineStores/{feature_online_store}/featureViews/{feature_view}` - core.String? name; + /// Optional. + core.List? responseModalities; - /// Reserved for future use. + /// The `Schema` object allows the definition of input and output data types. /// - /// Output only. - core.bool? satisfiesPzi; + /// These types can be objects, but also primitives and arrays. Represents a + /// select subset of an + /// [OpenAPI 3.0 schema object](https://spec.openapis.org/oas/v3.0.3#schema). + /// If set, a compatible response_mime_type must also be set. Compatible + /// mimetypes: `application/json`: Schema for JSON response. + /// + /// Optional. + GoogleCloudAiplatformV1Schema? responseSchema; - /// Reserved for future use. + /// Routing configuration. /// - /// Output only. - core.bool? satisfiesPzs; + /// Optional. + GoogleCloudAiplatformV1GenerationConfigRoutingConfig? routingConfig; - /// Configures when data is to be synced/updated for this FeatureView. + /// Seed. /// - /// At the end of the sync the latest featureValues for each entityId of this - /// FeatureView are made ready for online serving. - GoogleCloudAiplatformV1FeatureViewSyncConfig? syncConfig; + /// Optional. + core.int? seed; - /// Timestamp when this FeatureView was last updated. + /// The speech generation config. /// - /// Output only. - core.String? updateTime; + /// Optional. + GoogleCloudAiplatformV1SpeechConfig? speechConfig; - /// The Vertex RAG Source that the FeatureView is linked to. + /// Stop sequences. /// /// Optional. - GoogleCloudAiplatformV1FeatureViewVertexRagSource? vertexRagSource; + core.List? stopSequences; - GoogleCloudAiplatformV1FeatureView({ - this.bigQuerySource, - this.createTime, - this.etag, - this.featureRegistrySource, - this.indexConfig, - this.labels, - this.name, - this.satisfiesPzi, - this.satisfiesPzs, - this.syncConfig, - this.updateTime, - this.vertexRagSource, + /// Controls the randomness of predictions. + /// + /// Optional. + core.double? temperature; + + /// If specified, the token resolution specified will be used. + /// + /// Optional. + /// Possible string values are: + /// - "TOKEN_RESOLUTION_UNSPECIFIED" : Token resolution has not been set. + /// - "TOKEN_RESOLUTION_LOW" : Token resolution set to low (64 tokens). + /// - "TOKEN_RESOLUTION_MEDIUM" : Token resolution set to medium (256 tokens). + /// - "TOKEN_RESOLUTION_HIGH" : Token resolution set to high (P&S with 256 + /// tokens). + core.String? tokenResolution; + + /// If specified, top-k sampling will be used. + /// + /// Optional. + core.double? topK; + + /// If specified, nucleus sampling will be used. + /// + /// Optional. + core.double? topP; + + GoogleCloudAiplatformV1GenerationConfig({ + this.audioTimestamp, + this.candidateCount, + this.frequencyPenalty, + this.logprobs, + this.maxOutputTokens, + this.presencePenalty, + this.responseLogprobs, + this.responseMimeType, + this.responseModalities, + this.responseSchema, + this.routingConfig, + this.seed, + this.speechConfig, + this.stopSequences, + this.temperature, + this.tokenResolution, + this.topK, + this.topP, + }); + + GoogleCloudAiplatformV1GenerationConfig.fromJson(core.Map json_) + : this( + audioTimestamp: json_['audioTimestamp'] as core.bool?, + candidateCount: json_['candidateCount'] as core.int?, + frequencyPenalty: + (json_['frequencyPenalty'] as core.num?)?.toDouble(), + logprobs: json_['logprobs'] as core.int?, + maxOutputTokens: json_['maxOutputTokens'] as core.int?, + presencePenalty: (json_['presencePenalty'] as core.num?)?.toDouble(), + responseLogprobs: json_['responseLogprobs'] as core.bool?, + responseMimeType: json_['responseMimeType'] as core.String?, + responseModalities: (json_['responseModalities'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + responseSchema: json_.containsKey('responseSchema') + ? GoogleCloudAiplatformV1Schema.fromJson(json_['responseSchema'] + as core.Map) + : null, + routingConfig: json_.containsKey('routingConfig') + ? GoogleCloudAiplatformV1GenerationConfigRoutingConfig.fromJson( + json_['routingConfig'] as core.Map) + : null, + seed: json_['seed'] as core.int?, + speechConfig: json_.containsKey('speechConfig') + ? GoogleCloudAiplatformV1SpeechConfig.fromJson( + json_['speechConfig'] as core.Map) + : null, + stopSequences: (json_['stopSequences'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + temperature: (json_['temperature'] as core.num?)?.toDouble(), + tokenResolution: json_['tokenResolution'] as core.String?, + topK: (json_['topK'] as core.num?)?.toDouble(), + topP: (json_['topP'] as core.num?)?.toDouble(), + ); + + core.Map toJson() => { + if (audioTimestamp != null) 'audioTimestamp': audioTimestamp!, + if (candidateCount != null) 'candidateCount': candidateCount!, + if (frequencyPenalty != null) 'frequencyPenalty': frequencyPenalty!, + if (logprobs != null) 'logprobs': logprobs!, + if (maxOutputTokens != null) 'maxOutputTokens': maxOutputTokens!, + if (presencePenalty != null) 'presencePenalty': presencePenalty!, + if (responseLogprobs != null) 'responseLogprobs': responseLogprobs!, + if (responseMimeType != null) 'responseMimeType': responseMimeType!, + if (responseModalities != null) + 'responseModalities': responseModalities!, + if (responseSchema != null) 'responseSchema': responseSchema!, + if (routingConfig != null) 'routingConfig': routingConfig!, + if (seed != null) 'seed': seed!, + if (speechConfig != null) 'speechConfig': speechConfig!, + if (stopSequences != null) 'stopSequences': stopSequences!, + if (temperature != null) 'temperature': temperature!, + if (tokenResolution != null) 'tokenResolution': tokenResolution!, + if (topK != null) 'topK': topK!, + if (topP != null) 'topP': topP!, + }; +} + +/// The configuration for routing the request to a specific model. +class GoogleCloudAiplatformV1GenerationConfigRoutingConfig { + /// Automated routing. + GoogleCloudAiplatformV1GenerationConfigRoutingConfigAutoRoutingMode? autoMode; + + /// Manual routing. + GoogleCloudAiplatformV1GenerationConfigRoutingConfigManualRoutingMode? + manualMode; + + GoogleCloudAiplatformV1GenerationConfigRoutingConfig({ + this.autoMode, + this.manualMode, }); - GoogleCloudAiplatformV1FeatureView.fromJson(core.Map json_) + GoogleCloudAiplatformV1GenerationConfigRoutingConfig.fromJson(core.Map json_) : this( - bigQuerySource: json_.containsKey('bigQuerySource') - ? GoogleCloudAiplatformV1FeatureViewBigQuerySource.fromJson( - json_['bigQuerySource'] - as core.Map) - : null, - createTime: json_['createTime'] as core.String?, - etag: json_['etag'] as core.String?, - featureRegistrySource: json_.containsKey('featureRegistrySource') - ? GoogleCloudAiplatformV1FeatureViewFeatureRegistrySource - .fromJson(json_['featureRegistrySource'] - as core.Map) - : null, - indexConfig: json_.containsKey('indexConfig') - ? GoogleCloudAiplatformV1FeatureViewIndexConfig.fromJson( - json_['indexConfig'] as core.Map) - : null, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - name: json_['name'] as core.String?, - satisfiesPzi: json_['satisfiesPzi'] as core.bool?, - satisfiesPzs: json_['satisfiesPzs'] as core.bool?, - syncConfig: json_.containsKey('syncConfig') - ? GoogleCloudAiplatformV1FeatureViewSyncConfig.fromJson( - json_['syncConfig'] as core.Map) + autoMode: json_.containsKey('autoMode') + ? GoogleCloudAiplatformV1GenerationConfigRoutingConfigAutoRoutingMode + .fromJson( + json_['autoMode'] as core.Map) : null, - updateTime: json_['updateTime'] as core.String?, - vertexRagSource: json_.containsKey('vertexRagSource') - ? GoogleCloudAiplatformV1FeatureViewVertexRagSource.fromJson( - json_['vertexRagSource'] + manualMode: json_.containsKey('manualMode') + ? GoogleCloudAiplatformV1GenerationConfigRoutingConfigManualRoutingMode + .fromJson(json_['manualMode'] as core.Map) : null, ); core.Map toJson() => { - if (bigQuerySource != null) 'bigQuerySource': bigQuerySource!, - if (createTime != null) 'createTime': createTime!, - if (etag != null) 'etag': etag!, - if (featureRegistrySource != null) - 'featureRegistrySource': featureRegistrySource!, - if (indexConfig != null) 'indexConfig': indexConfig!, - if (labels != null) 'labels': labels!, - if (name != null) 'name': name!, - if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, - if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, - if (syncConfig != null) 'syncConfig': syncConfig!, - if (updateTime != null) 'updateTime': updateTime!, - if (vertexRagSource != null) 'vertexRagSource': vertexRagSource!, + if (autoMode != null) 'autoMode': autoMode!, + if (manualMode != null) 'manualMode': manualMode!, }; } -class GoogleCloudAiplatformV1FeatureViewBigQuerySource { - /// Columns to construct entity_id / row keys. - /// - /// Required. - core.List? entityIdColumns; - - /// The BigQuery view URI that will be materialized on each sync trigger based - /// on FeatureView.SyncConfig. - /// - /// Required. - core.String? uri; +/// When automated routing is specified, the routing will be determined by the +/// pretrained routing model and customer provided model routing preference. +class GoogleCloudAiplatformV1GenerationConfigRoutingConfigAutoRoutingMode { + /// The model routing preference. + /// Possible string values are: + /// - "UNKNOWN" : Unspecified model routing preference. + /// - "PRIORITIZE_QUALITY" : Prefer higher quality over low cost. + /// - "BALANCED" : Balanced model routing preference. + /// - "PRIORITIZE_COST" : Prefer lower cost over higher quality. + core.String? modelRoutingPreference; - GoogleCloudAiplatformV1FeatureViewBigQuerySource({ - this.entityIdColumns, - this.uri, + GoogleCloudAiplatformV1GenerationConfigRoutingConfigAutoRoutingMode({ + this.modelRoutingPreference, }); - GoogleCloudAiplatformV1FeatureViewBigQuerySource.fromJson(core.Map json_) + GoogleCloudAiplatformV1GenerationConfigRoutingConfigAutoRoutingMode.fromJson( + core.Map json_) : this( - entityIdColumns: (json_['entityIdColumns'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - uri: json_['uri'] as core.String?, + modelRoutingPreference: + json_['modelRoutingPreference'] as core.String?, ); core.Map toJson() => { - if (entityIdColumns != null) 'entityIdColumns': entityIdColumns!, - if (uri != null) 'uri': uri!, + if (modelRoutingPreference != null) + 'modelRoutingPreference': modelRoutingPreference!, }; } -/// Lookup key for a feature view. -class GoogleCloudAiplatformV1FeatureViewDataKey { - /// The actual Entity ID will be composed from this struct. +/// When manual routing is set, the specified model will be used directly. +class GoogleCloudAiplatformV1GenerationConfigRoutingConfigManualRoutingMode { + /// The model name to use. /// - /// This should match with the way ID is defined in the FeatureView spec. - GoogleCloudAiplatformV1FeatureViewDataKeyCompositeKey? compositeKey; - - /// String key to use for lookup. - core.String? key; + /// Only the public LLM models are accepted. e.g. 'gemini-1.5-pro-001'. + core.String? modelName; - GoogleCloudAiplatformV1FeatureViewDataKey({ - this.compositeKey, - this.key, + GoogleCloudAiplatformV1GenerationConfigRoutingConfigManualRoutingMode({ + this.modelName, }); - GoogleCloudAiplatformV1FeatureViewDataKey.fromJson(core.Map json_) + GoogleCloudAiplatformV1GenerationConfigRoutingConfigManualRoutingMode.fromJson( + core.Map json_) : this( - compositeKey: json_.containsKey('compositeKey') - ? GoogleCloudAiplatformV1FeatureViewDataKeyCompositeKey.fromJson( - json_['compositeKey'] as core.Map) - : null, - key: json_['key'] as core.String?, + modelName: json_['modelName'] as core.String?, ); core.Map toJson() => { - if (compositeKey != null) 'compositeKey': compositeKey!, - if (key != null) 'key': key!, + if (modelName != null) 'modelName': modelName!, }; } -/// ID that is comprised from several parts (columns). -class GoogleCloudAiplatformV1FeatureViewDataKeyCompositeKey { - /// Parts to construct Entity ID. +/// Contains information about the source of the models generated from +/// Generative AI Studio. +class GoogleCloudAiplatformV1GenieSource { + /// The public base model URI. /// - /// Should match with the same ID columns as defined in FeatureView in the - /// same order. - core.List? parts; + /// Required. + core.String? baseModelUri; - GoogleCloudAiplatformV1FeatureViewDataKeyCompositeKey({ - this.parts, + GoogleCloudAiplatformV1GenieSource({ + this.baseModelUri, }); - GoogleCloudAiplatformV1FeatureViewDataKeyCompositeKey.fromJson(core.Map json_) + GoogleCloudAiplatformV1GenieSource.fromJson(core.Map json_) : this( - parts: (json_['parts'] as core.List?) - ?.map((value) => value as core.String) - .toList(), + baseModelUri: json_['baseModelUri'] as core.String?, ); core.Map toJson() => { - if (parts != null) 'parts': parts!, + if (baseModelUri != null) 'baseModelUri': baseModelUri!, }; } -/// A Feature Registry source for features that need to be synced to Online -/// Store. -class GoogleCloudAiplatformV1FeatureViewFeatureRegistrySource { - /// List of features that need to be synced to Online Store. +/// The Google Drive location for the input content. +class GoogleCloudAiplatformV1GoogleDriveSource { + /// Google Drive resource IDs. /// /// Required. - core.List< - GoogleCloudAiplatformV1FeatureViewFeatureRegistrySourceFeatureGroup>? - featureGroups; - - /// The project number of the parent project of the Feature Groups. - /// - /// Optional. - core.String? projectNumber; + core.List? resourceIds; - GoogleCloudAiplatformV1FeatureViewFeatureRegistrySource({ - this.featureGroups, - this.projectNumber, + GoogleCloudAiplatformV1GoogleDriveSource({ + this.resourceIds, }); - GoogleCloudAiplatformV1FeatureViewFeatureRegistrySource.fromJson( - core.Map json_) + GoogleCloudAiplatformV1GoogleDriveSource.fromJson(core.Map json_) : this( - featureGroups: (json_['featureGroups'] as core.List?) + resourceIds: (json_['resourceIds'] as core.List?) ?.map((value) => - GoogleCloudAiplatformV1FeatureViewFeatureRegistrySourceFeatureGroup - .fromJson(value as core.Map)) + GoogleCloudAiplatformV1GoogleDriveSourceResourceId.fromJson( + value as core.Map)) .toList(), - projectNumber: json_['projectNumber'] as core.String?, ); core.Map toJson() => { - if (featureGroups != null) 'featureGroups': featureGroups!, - if (projectNumber != null) 'projectNumber': projectNumber!, + if (resourceIds != null) 'resourceIds': resourceIds!, }; } -/// Features belonging to a single feature group that will be synced to Online -/// Store. -class GoogleCloudAiplatformV1FeatureViewFeatureRegistrySourceFeatureGroup { - /// Identifier of the feature group. +/// The type and ID of the Google Drive resource. +class GoogleCloudAiplatformV1GoogleDriveSourceResourceId { + /// The ID of the Google Drive resource. /// /// Required. - core.String? featureGroupId; + core.String? resourceId; - /// Identifiers of features under the feature group. + /// The type of the Google Drive resource. /// /// Required. - core.List? featureIds; + /// Possible string values are: + /// - "RESOURCE_TYPE_UNSPECIFIED" : Unspecified resource type. + /// - "RESOURCE_TYPE_FILE" : File resource type. + /// - "RESOURCE_TYPE_FOLDER" : Folder resource type. + core.String? resourceType; - GoogleCloudAiplatformV1FeatureViewFeatureRegistrySourceFeatureGroup({ - this.featureGroupId, - this.featureIds, + GoogleCloudAiplatformV1GoogleDriveSourceResourceId({ + this.resourceId, + this.resourceType, }); - GoogleCloudAiplatformV1FeatureViewFeatureRegistrySourceFeatureGroup.fromJson( - core.Map json_) + GoogleCloudAiplatformV1GoogleDriveSourceResourceId.fromJson(core.Map json_) : this( - featureGroupId: json_['featureGroupId'] as core.String?, - featureIds: (json_['featureIds'] as core.List?) - ?.map((value) => value as core.String) - .toList(), + resourceId: json_['resourceId'] as core.String?, + resourceType: json_['resourceType'] as core.String?, ); core.Map toJson() => { - if (featureGroupId != null) 'featureGroupId': featureGroupId!, - if (featureIds != null) 'featureIds': featureIds!, + if (resourceId != null) 'resourceId': resourceId!, + if (resourceType != null) 'resourceType': resourceType!, }; } -/// Configuration for vector indexing. -class GoogleCloudAiplatformV1FeatureViewIndexConfig { - /// Configuration options for using brute force search, which simply - /// implements the standard linear search in the database for each query. - /// - /// It is primarily meant for benchmarking and to generate the ground truth - /// for approximate search. - /// - /// Optional. - GoogleCloudAiplatformV1FeatureViewIndexConfigBruteForceConfig? - bruteForceConfig; - - /// Column of crowding. - /// - /// This column contains crowding attribute which is a constraint on a - /// neighbor list produced by FeatureOnlineStoreService.SearchNearestEntities - /// to diversify search results. If - /// NearestNeighborQuery.per_crowding_attribute_neighbor_count is set to K in - /// SearchNearestEntitiesRequest, it's guaranteed that no more than K entities - /// of the same crowding attribute are returned in the response. - /// - /// Optional. - core.String? crowdingColumn; +/// Tool to retrieve public web data for grounding, powered by Google. +class GoogleCloudAiplatformV1GoogleSearchRetrieval { + /// Specifies the dynamic retrieval configuration for the given source. + GoogleCloudAiplatformV1DynamicRetrievalConfig? dynamicRetrievalConfig; - /// The distance measure used in nearest neighbor search. - /// - /// Optional. - /// Possible string values are: - /// - "DISTANCE_MEASURE_TYPE_UNSPECIFIED" : Should not be set. - /// - "SQUARED_L2_DISTANCE" : Euclidean (L_2) Distance. - /// - "COSINE_DISTANCE" : Cosine Distance. Defined as 1 - cosine similarity. - /// We strongly suggest using DOT_PRODUCT_DISTANCE + UNIT_L2_NORM instead of - /// COSINE distance. Our algorithms have been more optimized for DOT_PRODUCT - /// distance which, when combined with UNIT_L2_NORM, is mathematically - /// equivalent to COSINE distance and results in the same ranking. - /// - "DOT_PRODUCT_DISTANCE" : Dot Product Distance. Defined as a negative of - /// the dot product. - core.String? distanceMeasureType; + GoogleCloudAiplatformV1GoogleSearchRetrieval({ + this.dynamicRetrievalConfig, + }); - /// Column of embedding. - /// - /// This column contains the source data to create index for vector search. - /// embedding_column must be set when using vector search. - /// - /// Optional. - core.String? embeddingColumn; + GoogleCloudAiplatformV1GoogleSearchRetrieval.fromJson(core.Map json_) + : this( + dynamicRetrievalConfig: json_.containsKey('dynamicRetrievalConfig') + ? GoogleCloudAiplatformV1DynamicRetrievalConfig.fromJson( + json_['dynamicRetrievalConfig'] + as core.Map) + : null, + ); - /// The number of dimensions of the input embedding. - /// - /// Optional. - core.int? embeddingDimension; + core.Map toJson() => { + if (dynamicRetrievalConfig != null) + 'dynamicRetrievalConfig': dynamicRetrievalConfig!, + }; +} - /// Columns of features that're used to filter vector search results. +/// Input for groundedness metric. +class GoogleCloudAiplatformV1GroundednessInput { + /// Groundedness instance. /// - /// Optional. - core.List? filterColumns; + /// Required. + GoogleCloudAiplatformV1GroundednessInstance? instance; - /// Configuration options for the tree-AH algorithm (Shallow tree + Asymmetric - /// Hashing). - /// - /// Please refer to this paper for more details: - /// https://arxiv.org/abs/1908.10396 + /// Spec for groundedness metric. /// - /// Optional. - GoogleCloudAiplatformV1FeatureViewIndexConfigTreeAHConfig? treeAhConfig; + /// Required. + GoogleCloudAiplatformV1GroundednessSpec? metricSpec; - GoogleCloudAiplatformV1FeatureViewIndexConfig({ - this.bruteForceConfig, - this.crowdingColumn, - this.distanceMeasureType, - this.embeddingColumn, - this.embeddingDimension, - this.filterColumns, - this.treeAhConfig, + GoogleCloudAiplatformV1GroundednessInput({ + this.instance, + this.metricSpec, }); - GoogleCloudAiplatformV1FeatureViewIndexConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1GroundednessInput.fromJson(core.Map json_) : this( - bruteForceConfig: json_.containsKey('bruteForceConfig') - ? GoogleCloudAiplatformV1FeatureViewIndexConfigBruteForceConfig - .fromJson(json_['bruteForceConfig'] - as core.Map) + instance: json_.containsKey('instance') + ? GoogleCloudAiplatformV1GroundednessInstance.fromJson( + json_['instance'] as core.Map) : null, - crowdingColumn: json_['crowdingColumn'] as core.String?, - distanceMeasureType: json_['distanceMeasureType'] as core.String?, - embeddingColumn: json_['embeddingColumn'] as core.String?, - embeddingDimension: json_['embeddingDimension'] as core.int?, - filterColumns: (json_['filterColumns'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - treeAhConfig: json_.containsKey('treeAhConfig') - ? GoogleCloudAiplatformV1FeatureViewIndexConfigTreeAHConfig - .fromJson(json_['treeAhConfig'] - as core.Map) + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1GroundednessSpec.fromJson( + json_['metricSpec'] as core.Map) : null, ); core.Map toJson() => { - if (bruteForceConfig != null) 'bruteForceConfig': bruteForceConfig!, - if (crowdingColumn != null) 'crowdingColumn': crowdingColumn!, - if (distanceMeasureType != null) - 'distanceMeasureType': distanceMeasureType!, - if (embeddingColumn != null) 'embeddingColumn': embeddingColumn!, - if (embeddingDimension != null) - 'embeddingDimension': embeddingDimension!, - if (filterColumns != null) 'filterColumns': filterColumns!, - if (treeAhConfig != null) 'treeAhConfig': treeAhConfig!, + if (instance != null) 'instance': instance!, + if (metricSpec != null) 'metricSpec': metricSpec!, }; } -/// Configuration options for using brute force search. -typedef GoogleCloudAiplatformV1FeatureViewIndexConfigBruteForceConfig = $Empty; - -/// Configuration options for the tree-AH algorithm. -class GoogleCloudAiplatformV1FeatureViewIndexConfigTreeAHConfig { - /// Number of embeddings on each leaf node. +/// Spec for groundedness instance. +class GoogleCloudAiplatformV1GroundednessInstance { + /// Background information provided in context used to compare against the + /// prediction. /// - /// The default value is 1000 if not set. + /// Required. + core.String? context; + + /// Output of the evaluated model. /// - /// Optional. - core.String? leafNodeEmbeddingCount; + /// Required. + core.String? prediction; - GoogleCloudAiplatformV1FeatureViewIndexConfigTreeAHConfig({ - this.leafNodeEmbeddingCount, + GoogleCloudAiplatformV1GroundednessInstance({ + this.context, + this.prediction, }); - GoogleCloudAiplatformV1FeatureViewIndexConfigTreeAHConfig.fromJson( - core.Map json_) + GoogleCloudAiplatformV1GroundednessInstance.fromJson(core.Map json_) : this( - leafNodeEmbeddingCount: - json_['leafNodeEmbeddingCount'] as core.String?, + context: json_['context'] as core.String?, + prediction: json_['prediction'] as core.String?, ); core.Map toJson() => { - if (leafNodeEmbeddingCount != null) - 'leafNodeEmbeddingCount': leafNodeEmbeddingCount!, + if (context != null) 'context': context!, + if (prediction != null) 'prediction': prediction!, }; } -/// FeatureViewSync is a representation of sync operation which copies data from -/// data source to Feature View in Online Store. -class GoogleCloudAiplatformV1FeatureViewSync { - /// Time when this FeatureViewSync is created. - /// - /// Creation of a FeatureViewSync means that the job is pending / waiting for - /// sufficient resources but may not have started the actual data transfer - /// yet. +/// Spec for groundedness result. +class GoogleCloudAiplatformV1GroundednessResult { + /// Confidence for groundedness score. /// /// Output only. - core.String? createTime; + core.double? confidence; - /// Final status of the FeatureViewSync. + /// Explanation for groundedness score. /// /// Output only. - GoogleRpcStatus? finalStatus; - - /// Identifier. - /// - /// Name of the FeatureViewSync. Format: - /// `projects/{project}/locations/{location}/featureOnlineStores/{feature_online_store}/featureViews/{feature_view}/featureViewSyncs/{feature_view_sync}` - core.String? name; + core.String? explanation; - /// Time when this FeatureViewSync is finished. + /// Groundedness score. /// /// Output only. - GoogleTypeInterval? runTime; + core.double? score; - /// Reserved for future use. - /// - /// Output only. - core.bool? satisfiesPzi; + GoogleCloudAiplatformV1GroundednessResult({ + this.confidence, + this.explanation, + this.score, + }); - /// Reserved for future use. - /// - /// Output only. - core.bool? satisfiesPzs; + GoogleCloudAiplatformV1GroundednessResult.fromJson(core.Map json_) + : this( + confidence: (json_['confidence'] as core.num?)?.toDouble(), + explanation: json_['explanation'] as core.String?, + score: (json_['score'] as core.num?)?.toDouble(), + ); - /// Summary of the sync job. - /// - /// Output only. - GoogleCloudAiplatformV1FeatureViewSyncSyncSummary? syncSummary; + core.Map toJson() => { + if (confidence != null) 'confidence': confidence!, + if (explanation != null) 'explanation': explanation!, + if (score != null) 'score': score!, + }; +} - GoogleCloudAiplatformV1FeatureViewSync({ - this.createTime, - this.finalStatus, - this.name, - this.runTime, - this.satisfiesPzi, - this.satisfiesPzs, - this.syncSummary, +/// Spec for groundedness metric. +typedef GoogleCloudAiplatformV1GroundednessSpec = $Spec; + +/// Grounding chunk. +class GoogleCloudAiplatformV1GroundingChunk { + /// Grounding chunk from context retrieved by the retrieval tools. + GoogleCloudAiplatformV1GroundingChunkRetrievedContext? retrievedContext; + + /// Grounding chunk from the web. + GoogleCloudAiplatformV1GroundingChunkWeb? web; + + GoogleCloudAiplatformV1GroundingChunk({ + this.retrievedContext, + this.web, }); - GoogleCloudAiplatformV1FeatureViewSync.fromJson(core.Map json_) + GoogleCloudAiplatformV1GroundingChunk.fromJson(core.Map json_) : this( - createTime: json_['createTime'] as core.String?, - finalStatus: json_.containsKey('finalStatus') - ? GoogleRpcStatus.fromJson( - json_['finalStatus'] as core.Map) - : null, - name: json_['name'] as core.String?, - runTime: json_.containsKey('runTime') - ? GoogleTypeInterval.fromJson( - json_['runTime'] as core.Map) + retrievedContext: json_.containsKey('retrievedContext') + ? GoogleCloudAiplatformV1GroundingChunkRetrievedContext.fromJson( + json_['retrievedContext'] + as core.Map) : null, - satisfiesPzi: json_['satisfiesPzi'] as core.bool?, - satisfiesPzs: json_['satisfiesPzs'] as core.bool?, - syncSummary: json_.containsKey('syncSummary') - ? GoogleCloudAiplatformV1FeatureViewSyncSyncSummary.fromJson( - json_['syncSummary'] as core.Map) + web: json_.containsKey('web') + ? GoogleCloudAiplatformV1GroundingChunkWeb.fromJson( + json_['web'] as core.Map) : null, ); core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (finalStatus != null) 'finalStatus': finalStatus!, - if (name != null) 'name': name!, - if (runTime != null) 'runTime': runTime!, - if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, - if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, - if (syncSummary != null) 'syncSummary': syncSummary!, + if (retrievedContext != null) 'retrievedContext': retrievedContext!, + if (web != null) 'web': web!, }; } -/// Configuration for Sync. -/// -/// Only one option is set. -class GoogleCloudAiplatformV1FeatureViewSyncConfig { - /// If true, syncs the FeatureView in a continuous manner to Online Store. - /// - /// Optional. - core.bool? continuous; +/// Chunk from context retrieved by the retrieval tools. +class GoogleCloudAiplatformV1GroundingChunkRetrievedContext { + /// Text of the attribution. + core.String? text; + + /// Title of the attribution. + core.String? title; + + /// URI reference of the attribution. + core.String? uri; + + GoogleCloudAiplatformV1GroundingChunkRetrievedContext({ + this.text, + this.title, + this.uri, + }); + + GoogleCloudAiplatformV1GroundingChunkRetrievedContext.fromJson(core.Map json_) + : this( + text: json_['text'] as core.String?, + title: json_['title'] as core.String?, + uri: json_['uri'] as core.String?, + ); + + core.Map toJson() => { + if (text != null) 'text': text!, + if (title != null) 'title': title!, + if (uri != null) 'uri': uri!, + }; +} - /// Cron schedule (https://en.wikipedia.org/wiki/Cron) to launch scheduled - /// runs. - /// - /// To explicitly set a timezone to the cron tab, apply a prefix in the cron - /// tab: "CRON_TZ=${IANA_TIME_ZONE}" or "TZ=${IANA_TIME_ZONE}". The - /// ${IANA_TIME_ZONE} may only be a valid string from IANA time zone database. - /// For example, "CRON_TZ=America/New_York 1 * * * *", or "TZ=America/New_York - /// 1 * * * *". - core.String? cron; +/// Chunk from the web. +class GoogleCloudAiplatformV1GroundingChunkWeb { + /// Title of the chunk. + core.String? title; - GoogleCloudAiplatformV1FeatureViewSyncConfig({ - this.continuous, - this.cron, + /// URI reference of the chunk. + core.String? uri; + + GoogleCloudAiplatformV1GroundingChunkWeb({ + this.title, + this.uri, }); - GoogleCloudAiplatformV1FeatureViewSyncConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1GroundingChunkWeb.fromJson(core.Map json_) : this( - continuous: json_['continuous'] as core.bool?, - cron: json_['cron'] as core.String?, + title: json_['title'] as core.String?, + uri: json_['uri'] as core.String?, ); core.Map toJson() => { - if (continuous != null) 'continuous': continuous!, - if (cron != null) 'cron': cron!, + if (title != null) 'title': title!, + if (uri != null) 'uri': uri!, }; } -/// Summary from the Sync job. -/// -/// For continuous syncs, the summary is updated periodically. For batch syncs, -/// it gets updated on completion of the sync. -class GoogleCloudAiplatformV1FeatureViewSyncSyncSummary { - /// Total number of rows synced. +/// Metadata returned to client when grounding is enabled. +class GoogleCloudAiplatformV1GroundingMetadata { + /// List of supporting references retrieved from specified grounding source. + core.List? groundingChunks; + + /// List of grounding support. /// - /// Output only. - core.String? rowSynced; + /// Optional. + core.List? groundingSupports; - /// Lower bound of the system time watermark for the sync job. + /// Retrieval metadata. /// - /// This is only set for continuously syncing feature views. - core.String? systemWatermarkTime; + /// Optional. Output only. + GoogleCloudAiplatformV1RetrievalMetadata? retrievalMetadata; - /// BigQuery slot milliseconds consumed for the sync job. + /// Google search entry for the following-up web searches. /// - /// Output only. - core.String? totalSlot; + /// Optional. + GoogleCloudAiplatformV1SearchEntryPoint? searchEntryPoint; - GoogleCloudAiplatformV1FeatureViewSyncSyncSummary({ - this.rowSynced, - this.systemWatermarkTime, - this.totalSlot, + /// Web search queries for the following-up web search. + /// + /// Optional. + core.List? webSearchQueries; + + GoogleCloudAiplatformV1GroundingMetadata({ + this.groundingChunks, + this.groundingSupports, + this.retrievalMetadata, + this.searchEntryPoint, + this.webSearchQueries, }); - GoogleCloudAiplatformV1FeatureViewSyncSyncSummary.fromJson(core.Map json_) + GoogleCloudAiplatformV1GroundingMetadata.fromJson(core.Map json_) : this( - rowSynced: json_['rowSynced'] as core.String?, - systemWatermarkTime: json_['systemWatermarkTime'] as core.String?, - totalSlot: json_['totalSlot'] as core.String?, + groundingChunks: (json_['groundingChunks'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1GroundingChunk.fromJson( + value as core.Map)) + .toList(), + groundingSupports: (json_['groundingSupports'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1GroundingSupport.fromJson( + value as core.Map)) + .toList(), + retrievalMetadata: json_.containsKey('retrievalMetadata') + ? GoogleCloudAiplatformV1RetrievalMetadata.fromJson( + json_['retrievalMetadata'] + as core.Map) + : null, + searchEntryPoint: json_.containsKey('searchEntryPoint') + ? GoogleCloudAiplatformV1SearchEntryPoint.fromJson( + json_['searchEntryPoint'] + as core.Map) + : null, + webSearchQueries: (json_['webSearchQueries'] as core.List?) + ?.map((value) => value as core.String) + .toList(), ); core.Map toJson() => { - if (rowSynced != null) 'rowSynced': rowSynced!, - if (systemWatermarkTime != null) - 'systemWatermarkTime': systemWatermarkTime!, - if (totalSlot != null) 'totalSlot': totalSlot!, + if (groundingChunks != null) 'groundingChunks': groundingChunks!, + if (groundingSupports != null) 'groundingSupports': groundingSupports!, + if (retrievalMetadata != null) 'retrievalMetadata': retrievalMetadata!, + if (searchEntryPoint != null) 'searchEntryPoint': searchEntryPoint!, + if (webSearchQueries != null) 'webSearchQueries': webSearchQueries!, }; } -/// A Vertex Rag source for features that need to be synced to Online Store. -class GoogleCloudAiplatformV1FeatureViewVertexRagSource { - /// The RAG corpus id corresponding to this FeatureView. +/// Grounding support. +class GoogleCloudAiplatformV1GroundingSupport { + /// Confidence score of the support references. /// - /// Optional. - core.String? ragCorpusId; + /// Ranges from 0 to 1. 1 is the most confident. This list must have the same + /// size as the grounding_chunk_indices. + core.List? confidenceScores; - /// The BigQuery view/table URI that will be materialized on each manual sync - /// trigger. - /// - /// The table/view is expected to have the following columns and types at - /// least: - `corpus_id` (STRING, NULLABLE/REQUIRED) - `file_id` (STRING, - /// NULLABLE/REQUIRED) - `chunk_id` (STRING, NULLABLE/REQUIRED) - - /// `chunk_data_type` (STRING, NULLABLE/REQUIRED) - `chunk_data` (STRING, - /// NULLABLE/REQUIRED) - `embeddings` (FLOAT, REPEATED) - `file_original_uri` - /// (STRING, NULLABLE/REQUIRED) + /// A list of indices (into 'grounding_chunk') specifying the citations + /// associated with the claim. /// - /// Required. - core.String? uri; + /// For instance \[1,3,4\] means that grounding_chunk\[1\], + /// grounding_chunk\[3\], grounding_chunk\[4\] are the retrieved content + /// attributed to the claim. + core.List? groundingChunkIndices; - GoogleCloudAiplatformV1FeatureViewVertexRagSource({ - this.ragCorpusId, - this.uri, + /// Segment of the content this support belongs to. + GoogleCloudAiplatformV1Segment? segment; + + GoogleCloudAiplatformV1GroundingSupport({ + this.confidenceScores, + this.groundingChunkIndices, + this.segment, }); - GoogleCloudAiplatformV1FeatureViewVertexRagSource.fromJson(core.Map json_) + GoogleCloudAiplatformV1GroundingSupport.fromJson(core.Map json_) : this( - ragCorpusId: json_['ragCorpusId'] as core.String?, - uri: json_['uri'] as core.String?, + confidenceScores: (json_['confidenceScores'] as core.List?) + ?.map((value) => (value as core.num).toDouble()) + .toList(), + groundingChunkIndices: (json_['groundingChunkIndices'] as core.List?) + ?.map((value) => value as core.int) + .toList(), + segment: json_.containsKey('segment') + ? GoogleCloudAiplatformV1Segment.fromJson( + json_['segment'] as core.Map) + : null, ); core.Map toJson() => { - if (ragCorpusId != null) 'ragCorpusId': ragCorpusId!, - if (uri != null) 'uri': uri!, + if (confidenceScores != null) 'confidenceScores': confidenceScores!, + if (groundingChunkIndices != null) + 'groundingChunkIndices': groundingChunkIndices!, + if (segment != null) 'segment': segment!, }; } -/// Vertex AI Feature Store provides a centralized repository for organizing, -/// storing, and serving ML features. +/// Represents a HyperparameterTuningJob. /// -/// The Featurestore is a top-level container for your features and their -/// values. -class GoogleCloudAiplatformV1Featurestore { - /// Timestamp when this Featurestore was created. +/// A HyperparameterTuningJob has a Study specification and multiple CustomJobs +/// with identical CustomJob specification. +class GoogleCloudAiplatformV1HyperparameterTuningJob { + /// Time when the HyperparameterTuningJob was created. /// /// Output only. core.String? createTime; - /// Customer-managed encryption key spec for data storage. + /// The display name of the HyperparameterTuningJob. /// - /// If set, both of the online and offline data storage will be secured by - /// this key. + /// The name can be up to 128 characters long and can consist of any UTF-8 + /// characters. /// - /// Optional. + /// Required. + core.String? displayName; + + /// Customer-managed encryption key options for a HyperparameterTuningJob. + /// + /// If this is set, then all resources created by the HyperparameterTuningJob + /// will be encrypted with the provided encryption key. GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; - /// Used to perform consistent read-modify-write updates. + /// Time when the HyperparameterTuningJob entered any of the following states: + /// `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`. /// - /// If not set, a blind "overwrite" update happens. + /// Output only. + core.String? endTime; + + /// Only populated when job's state is JOB_STATE_FAILED or + /// JOB_STATE_CANCELLED. /// - /// Optional. - core.String? etag; + /// Output only. + GoogleRpcStatus? error; - /// The labels with user-defined metadata to organize your Featurestore. + /// The labels with user-defined metadata to organize + /// HyperparameterTuningJobs. /// /// Label keys and values can be no longer than 64 characters (Unicode /// codepoints), can only contain lowercase letters, numeric characters, /// underscores and dashes. International characters are allowed. See - /// https://goo.gl/xmQnxf for more information on and examples of labels. No - /// more than 64 user labels can be associated with one Featurestore(System - /// labels are excluded)." System reserved label keys are prefixed with - /// "aiplatform.googleapis.com/" and are immutable. - /// - /// Optional. + /// https://goo.gl/xmQnxf for more information and examples of labels. core.Map? labels; - /// Name of the Featurestore. - /// - /// Format: - /// `projects/{project}/locations/{location}/featurestores/{featurestore}` + /// The number of failed Trials that need to be seen before failing the + /// HyperparameterTuningJob. /// - /// Output only. - core.String? name; + /// If set to 0, Vertex AI decides how many Trials must fail before the whole + /// job fails. + core.int? maxFailedTrialCount; - /// Config for online storage resources. - /// - /// The field should not co-exist with the field of - /// `OnlineStoreReplicationConfig`. If both of it and - /// OnlineStoreReplicationConfig are unset, the feature store will not have an - /// online store and cannot be used for online serving. + /// The desired total number of Trials. /// - /// Optional. - GoogleCloudAiplatformV1FeaturestoreOnlineServingConfig? onlineServingConfig; + /// Required. + core.int? maxTrialCount; - /// TTL in days for feature values that will be stored in online serving - /// storage. + /// Resource name of the HyperparameterTuningJob. /// - /// The Feature Store online storage periodically removes obsolete feature - /// values older than `online_storage_ttl_days` since the feature generation - /// time. Note that `online_storage_ttl_days` should be less than or equal to - /// `offline_storage_ttl_days` for each EntityType under a featurestore. If - /// not set, default to 4000 days + /// Output only. + core.String? name; + + /// The desired number of Trials to run in parallel. /// - /// Optional. - core.int? onlineStorageTtlDays; + /// Required. + core.int? parallelTrialCount; /// Reserved for future use. /// @@ -38229,54 +43841,93 @@ class GoogleCloudAiplatformV1Featurestore { /// Output only. core.bool? satisfiesPzs; - /// State of the featurestore. + /// Time when the HyperparameterTuningJob for the first time entered the + /// `JOB_STATE_RUNNING` state. + /// + /// Output only. + core.String? startTime; + + /// The detailed state of the job. /// /// Output only. /// Possible string values are: - /// - "STATE_UNSPECIFIED" : Default value. This value is unused. - /// - "STABLE" : State when the featurestore configuration is not being - /// updated and the fields reflect the current configuration of the - /// featurestore. The featurestore is usable in this state. - /// - "UPDATING" : The state of the featurestore configuration when it is - /// being updated. During an update, the fields reflect either the original - /// configuration or the updated configuration of the featurestore. For - /// example, `online_serving_config.fixed_node_count` can take minutes to - /// update. While the update is in progress, the featurestore is in the - /// UPDATING state, and the value of `fixed_node_count` can be the original - /// value or the updated value, depending on the progress of the operation. - /// Until the update completes, the actual number of nodes can still be the - /// original value of `fixed_node_count`. The featurestore is still usable in - /// this state. + /// - "JOB_STATE_UNSPECIFIED" : The job state is unspecified. + /// - "JOB_STATE_QUEUED" : The job has been just created or resumed and + /// processing has not yet begun. + /// - "JOB_STATE_PENDING" : The service is preparing to run the job. + /// - "JOB_STATE_RUNNING" : The job is in progress. + /// - "JOB_STATE_SUCCEEDED" : The job completed successfully. + /// - "JOB_STATE_FAILED" : The job failed. + /// - "JOB_STATE_CANCELLING" : The job is being cancelled. From this state the + /// job may only go to either `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED` or + /// `JOB_STATE_CANCELLED`. + /// - "JOB_STATE_CANCELLED" : The job has been cancelled. + /// - "JOB_STATE_PAUSED" : The job has been stopped, and can be resumed. + /// - "JOB_STATE_EXPIRED" : The job has expired. + /// - "JOB_STATE_UPDATING" : The job is being updated. Only jobs in the + /// `RUNNING` state can be updated. After updating, the job goes back to the + /// `RUNNING` state. + /// - "JOB_STATE_PARTIALLY_SUCCEEDED" : The job is partially succeeded, some + /// results may be missing due to errors. core.String? state; - /// Timestamp when this Featurestore was last updated. + /// Study configuration of the HyperparameterTuningJob. + /// + /// Required. + GoogleCloudAiplatformV1StudySpec? studySpec; + + /// The spec of a trial job. + /// + /// The same spec applies to the CustomJobs created in all the trials. + /// + /// Required. + GoogleCloudAiplatformV1CustomJobSpec? trialJobSpec; + + /// Trials of the HyperparameterTuningJob. + /// + /// Output only. + core.List? trials; + + /// Time when the HyperparameterTuningJob was most recently updated. /// /// Output only. core.String? updateTime; - GoogleCloudAiplatformV1Featurestore({ + GoogleCloudAiplatformV1HyperparameterTuningJob({ this.createTime, + this.displayName, this.encryptionSpec, - this.etag, + this.endTime, + this.error, this.labels, + this.maxFailedTrialCount, + this.maxTrialCount, this.name, - this.onlineServingConfig, - this.onlineStorageTtlDays, + this.parallelTrialCount, this.satisfiesPzi, this.satisfiesPzs, + this.startTime, this.state, + this.studySpec, + this.trialJobSpec, + this.trials, this.updateTime, }); - GoogleCloudAiplatformV1Featurestore.fromJson(core.Map json_) + GoogleCloudAiplatformV1HyperparameterTuningJob.fromJson(core.Map json_) : this( createTime: json_['createTime'] as core.String?, + displayName: json_['displayName'] as core.String?, encryptionSpec: json_.containsKey('encryptionSpec') ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( json_['encryptionSpec'] as core.Map) : null, - etag: json_['etag'] as core.String?, + endTime: json_['endTime'] as core.String?, + error: json_.containsKey('error') + ? GoogleRpcStatus.fromJson( + json_['error'] as core.Map) + : null, labels: (json_['labels'] as core.Map?)?.map( (key, value) => core.MapEntry( @@ -38284,6209 +43935,6901 @@ class GoogleCloudAiplatformV1Featurestore { value as core.String, ), ), + maxFailedTrialCount: json_['maxFailedTrialCount'] as core.int?, + maxTrialCount: json_['maxTrialCount'] as core.int?, name: json_['name'] as core.String?, - onlineServingConfig: json_.containsKey('onlineServingConfig') - ? GoogleCloudAiplatformV1FeaturestoreOnlineServingConfig.fromJson( - json_['onlineServingConfig'] - as core.Map) - : null, - onlineStorageTtlDays: json_['onlineStorageTtlDays'] as core.int?, + parallelTrialCount: json_['parallelTrialCount'] as core.int?, satisfiesPzi: json_['satisfiesPzi'] as core.bool?, satisfiesPzs: json_['satisfiesPzs'] as core.bool?, + startTime: json_['startTime'] as core.String?, state: json_['state'] as core.String?, + studySpec: json_.containsKey('studySpec') + ? GoogleCloudAiplatformV1StudySpec.fromJson( + json_['studySpec'] as core.Map) + : null, + trialJobSpec: json_.containsKey('trialJobSpec') + ? GoogleCloudAiplatformV1CustomJobSpec.fromJson( + json_['trialJobSpec'] as core.Map) + : null, + trials: (json_['trials'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Trial.fromJson( + value as core.Map)) + .toList(), updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { if (createTime != null) 'createTime': createTime!, + if (displayName != null) 'displayName': displayName!, if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, - if (etag != null) 'etag': etag!, + if (endTime != null) 'endTime': endTime!, + if (error != null) 'error': error!, if (labels != null) 'labels': labels!, + if (maxFailedTrialCount != null) + 'maxFailedTrialCount': maxFailedTrialCount!, + if (maxTrialCount != null) 'maxTrialCount': maxTrialCount!, if (name != null) 'name': name!, - if (onlineServingConfig != null) - 'onlineServingConfig': onlineServingConfig!, - if (onlineStorageTtlDays != null) - 'onlineStorageTtlDays': onlineStorageTtlDays!, + if (parallelTrialCount != null) + 'parallelTrialCount': parallelTrialCount!, if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, + if (startTime != null) 'startTime': startTime!, if (state != null) 'state': state!, + if (studySpec != null) 'studySpec': studySpec!, + if (trialJobSpec != null) 'trialJobSpec': trialJobSpec!, + if (trials != null) 'trials': trials!, if (updateTime != null) 'updateTime': updateTime!, }; } -/// Configuration of how features in Featurestore are monitored. -class GoogleCloudAiplatformV1FeaturestoreMonitoringConfig { - /// Threshold for categorical features of anomaly detection. - /// - /// This is shared by all types of Featurestore Monitoring for categorical - /// features (i.e. Features with type (Feature.ValueType) BOOL or STRING). - GoogleCloudAiplatformV1FeaturestoreMonitoringConfigThresholdConfig? - categoricalThresholdConfig; - - /// The config for ImportFeatures Analysis Based Feature Monitoring. - GoogleCloudAiplatformV1FeaturestoreMonitoringConfigImportFeaturesAnalysis? - importFeaturesAnalysis; - - /// Threshold for numerical features of anomaly detection. +/// Matcher for Features of an EntityType by Feature ID. +class GoogleCloudAiplatformV1IdMatcher { + /// The following are accepted as `ids`: * A single-element list containing + /// only `*`, which selects all Features in the target EntityType, or * A list + /// containing only Feature IDs, which selects only Features with those IDs in + /// the target EntityType. /// - /// This is shared by all objectives of Featurestore Monitoring for numerical - /// features (i.e. Features with type (Feature.ValueType) DOUBLE or INT64). - GoogleCloudAiplatformV1FeaturestoreMonitoringConfigThresholdConfig? - numericalThresholdConfig; - - /// The config for Snapshot Analysis Based Feature Monitoring. - GoogleCloudAiplatformV1FeaturestoreMonitoringConfigSnapshotAnalysis? - snapshotAnalysis; - - GoogleCloudAiplatformV1FeaturestoreMonitoringConfig({ - this.categoricalThresholdConfig, - this.importFeaturesAnalysis, - this.numericalThresholdConfig, - this.snapshotAnalysis, - }); - - GoogleCloudAiplatformV1FeaturestoreMonitoringConfig.fromJson(core.Map json_) - : this( - categoricalThresholdConfig: json_ - .containsKey('categoricalThresholdConfig') - ? GoogleCloudAiplatformV1FeaturestoreMonitoringConfigThresholdConfig - .fromJson(json_['categoricalThresholdConfig'] - as core.Map) - : null, - importFeaturesAnalysis: json_.containsKey('importFeaturesAnalysis') - ? GoogleCloudAiplatformV1FeaturestoreMonitoringConfigImportFeaturesAnalysis - .fromJson(json_['importFeaturesAnalysis'] - as core.Map) - : null, - numericalThresholdConfig: json_ - .containsKey('numericalThresholdConfig') - ? GoogleCloudAiplatformV1FeaturestoreMonitoringConfigThresholdConfig - .fromJson(json_['numericalThresholdConfig'] - as core.Map) - : null, - snapshotAnalysis: json_.containsKey('snapshotAnalysis') - ? GoogleCloudAiplatformV1FeaturestoreMonitoringConfigSnapshotAnalysis - .fromJson(json_['snapshotAnalysis'] - as core.Map) - : null, - ); - - core.Map toJson() => { - if (categoricalThresholdConfig != null) - 'categoricalThresholdConfig': categoricalThresholdConfig!, - if (importFeaturesAnalysis != null) - 'importFeaturesAnalysis': importFeaturesAnalysis!, - if (numericalThresholdConfig != null) - 'numericalThresholdConfig': numericalThresholdConfig!, - if (snapshotAnalysis != null) 'snapshotAnalysis': snapshotAnalysis!, - }; -} - -/// Configuration of the Featurestore's ImportFeature Analysis Based Monitoring. -/// -/// This type of analysis generates statistics for values of each Feature -/// imported by every ImportFeatureValues operation. -class GoogleCloudAiplatformV1FeaturestoreMonitoringConfigImportFeaturesAnalysis { - /// The baseline used to do anomaly detection for the statistics generated by - /// import features analysis. - /// Possible string values are: - /// - "BASELINE_UNSPECIFIED" : Should not be used. - /// - "LATEST_STATS" : Choose the later one statistics generated by either - /// most recent snapshot analysis or previous import features analysis. If non - /// of them exists, skip anomaly detection and only generate a statistics. - /// - "MOST_RECENT_SNAPSHOT_STATS" : Use the statistics generated by the most - /// recent snapshot analysis if exists. - /// - "PREVIOUS_IMPORT_FEATURES_STATS" : Use the statistics generated by the - /// previous import features analysis if exists. - core.String? anomalyDetectionBaseline; - - /// Whether to enable / disable / inherite default hebavior for import - /// features analysis. - /// Possible string values are: - /// - "STATE_UNSPECIFIED" : Should not be used. - /// - "DEFAULT" : The default behavior of whether to enable the monitoring. - /// EntityType-level config: disabled. Feature-level config: inherited from - /// the configuration of EntityType this Feature belongs to. - /// - "ENABLED" : Explicitly enables import features analysis. - /// EntityType-level config: by default enables import features analysis for - /// all Features under it. Feature-level config: enables import features - /// analysis regardless of the EntityType-level config. - /// - "DISABLED" : Explicitly disables import features analysis. - /// EntityType-level config: by default disables import features analysis for - /// all Features under it. Feature-level config: disables import features - /// analysis regardless of the EntityType-level config. - core.String? state; + /// Required. + core.List? ids; - GoogleCloudAiplatformV1FeaturestoreMonitoringConfigImportFeaturesAnalysis({ - this.anomalyDetectionBaseline, - this.state, + GoogleCloudAiplatformV1IdMatcher({ + this.ids, }); - GoogleCloudAiplatformV1FeaturestoreMonitoringConfigImportFeaturesAnalysis.fromJson( - core.Map json_) + GoogleCloudAiplatformV1IdMatcher.fromJson(core.Map json_) : this( - anomalyDetectionBaseline: - json_['anomalyDetectionBaseline'] as core.String?, - state: json_['state'] as core.String?, + ids: (json_['ids'] as core.List?) + ?.map((value) => value as core.String) + .toList(), ); core.Map toJson() => { - if (anomalyDetectionBaseline != null) - 'anomalyDetectionBaseline': anomalyDetectionBaseline!, - if (state != null) 'state': state!, + if (ids != null) 'ids': ids!, }; } -/// Configuration of the Featurestore's Snapshot Analysis Based Monitoring. -/// -/// This type of analysis generates statistics for each Feature based on a -/// snapshot of the latest feature value of each entities every -/// monitoring_interval. -class GoogleCloudAiplatformV1FeaturestoreMonitoringConfigSnapshotAnalysis { - /// The monitoring schedule for snapshot analysis. +/// Describes the location from where we import data into a Dataset, together +/// with the labels that will be applied to the DataItems and the Annotations. +class GoogleCloudAiplatformV1ImportDataConfig { + /// Labels that will be applied to newly imported Annotations. /// - /// For EntityType-level config: unset / disabled = true indicates disabled by - /// default for Features under it; otherwise by default enable snapshot - /// analysis monitoring with monitoring_interval for Features under it. - /// Feature-level config: disabled = true indicates disabled regardless of the - /// EntityType-level config; unset monitoring_interval indicates going with - /// EntityType-level config; otherwise run snapshot analysis monitoring with - /// monitoring_interval regardless of the EntityType-level config. Explicitly - /// Disable the snapshot analysis based monitoring. - core.bool? disabled; + /// If two Annotations are identical, one of them will be deduped. Two + /// Annotations are considered identical if their payload, payload_schema_uri + /// and all of their labels are the same. These labels will be overridden by + /// Annotation labels specified inside index file referenced by + /// import_schema_uri, e.g. jsonl file. + core.Map? annotationLabels; - /// Configuration of the snapshot analysis based monitoring pipeline running - /// interval. + /// Labels that will be applied to newly imported DataItems. /// - /// The value indicates number of days. - core.int? monitoringIntervalDays; + /// If an identical DataItem as one being imported already exists in the + /// Dataset, then these labels will be appended to these of the already + /// existing one, and if labels with identical key is imported before, the old + /// label value will be overwritten. If two DataItems are identical in the + /// same import data operation, the labels will be combined and if key + /// collision happens in this case, one of the values will be picked randomly. + /// Two DataItems are considered identical if their content bytes are + /// identical (e.g. image bytes or pdf bytes). These labels will be overridden + /// by Annotation labels specified inside index file referenced by + /// import_schema_uri, e.g. jsonl file. + core.Map? dataItemLabels; - /// Customized export features time window for snapshot analysis. + /// The Google Cloud Storage location for the input content. + GoogleCloudAiplatformV1GcsSource? gcsSource; + + /// Points to a YAML file stored on Google Cloud Storage describing the import + /// format. /// - /// Unit is one day. Default value is 3 weeks. Minimum value is 1 day. Maximum - /// value is 4000 days. - core.int? stalenessDays; + /// Validation will be done against the schema. The schema is defined as an + /// [OpenAPI 3.0.2 Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). + /// + /// Required. + core.String? importSchemaUri; - GoogleCloudAiplatformV1FeaturestoreMonitoringConfigSnapshotAnalysis({ - this.disabled, - this.monitoringIntervalDays, - this.stalenessDays, + GoogleCloudAiplatformV1ImportDataConfig({ + this.annotationLabels, + this.dataItemLabels, + this.gcsSource, + this.importSchemaUri, }); - GoogleCloudAiplatformV1FeaturestoreMonitoringConfigSnapshotAnalysis.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ImportDataConfig.fromJson(core.Map json_) : this( - disabled: json_['disabled'] as core.bool?, - monitoringIntervalDays: json_['monitoringIntervalDays'] as core.int?, - stalenessDays: json_['stalenessDays'] as core.int?, + annotationLabels: (json_['annotationLabels'] + as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + dataItemLabels: + (json_['dataItemLabels'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + gcsSource: json_.containsKey('gcsSource') + ? GoogleCloudAiplatformV1GcsSource.fromJson( + json_['gcsSource'] as core.Map) + : null, + importSchemaUri: json_['importSchemaUri'] as core.String?, ); core.Map toJson() => { - if (disabled != null) 'disabled': disabled!, - if (monitoringIntervalDays != null) - 'monitoringIntervalDays': monitoringIntervalDays!, - if (stalenessDays != null) 'stalenessDays': stalenessDays!, + if (annotationLabels != null) 'annotationLabels': annotationLabels!, + if (dataItemLabels != null) 'dataItemLabels': dataItemLabels!, + if (gcsSource != null) 'gcsSource': gcsSource!, + if (importSchemaUri != null) 'importSchemaUri': importSchemaUri!, }; } -/// The config for Featurestore Monitoring threshold. -class GoogleCloudAiplatformV1FeaturestoreMonitoringConfigThresholdConfig { - /// Specify a threshold value that can trigger the alert. +/// Request message for DatasetService.ImportData. +class GoogleCloudAiplatformV1ImportDataRequest { + /// The desired input locations. /// - /// 1. For categorical feature, the distribution distance is calculated by - /// L-inifinity norm. 2. For numerical feature, the distribution distance is - /// calculated by Jensen–Shannon divergence. Each feature must have a non-zero - /// threshold if they need to be monitored. Otherwise no alert will be - /// triggered for that feature. - core.double? value; + /// The contents of all input locations will be imported in one batch. + /// + /// Required. + core.List? importConfigs; - GoogleCloudAiplatformV1FeaturestoreMonitoringConfigThresholdConfig({ - this.value, + GoogleCloudAiplatformV1ImportDataRequest({ + this.importConfigs, }); - GoogleCloudAiplatformV1FeaturestoreMonitoringConfigThresholdConfig.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ImportDataRequest.fromJson(core.Map json_) : this( - value: (json_['value'] as core.num?)?.toDouble(), + importConfigs: (json_['importConfigs'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1ImportDataConfig.fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (value != null) 'value': value!, + if (importConfigs != null) 'importConfigs': importConfigs!, }; } -/// OnlineServingConfig specifies the details for provisioning online serving -/// resources. -class GoogleCloudAiplatformV1FeaturestoreOnlineServingConfig { - /// The number of nodes for the online store. +/// Request message for FeaturestoreService.ImportFeatureValues. +class GoogleCloudAiplatformV1ImportFeatureValuesRequest { + GoogleCloudAiplatformV1AvroSource? avroSource; + GoogleCloudAiplatformV1BigQuerySource? bigquerySource; + GoogleCloudAiplatformV1CsvSource? csvSource; + + /// If true, API doesn't start ingestion analysis pipeline. + core.bool? disableIngestionAnalysis; + + /// If set, data will not be imported for online serving. /// - /// The number of nodes doesn't scale automatically, but you can manually - /// update the number of nodes. If set to 0, the featurestore will not have an - /// online store and cannot be used for online serving. - core.int? fixedNodeCount; + /// This is typically used for backfilling, where Feature generation + /// timestamps are not in the timestamp range needed for online serving. + core.bool? disableOnlineServing; - /// Online serving scaling configuration. + /// Source column that holds entity IDs. /// - /// Only one of `fixed_node_count` and `scaling` can be set. Setting one will - /// reset the other. - GoogleCloudAiplatformV1FeaturestoreOnlineServingConfigScaling? scaling; + /// If not provided, entity IDs are extracted from the column named entity_id. + core.String? entityIdField; - GoogleCloudAiplatformV1FeaturestoreOnlineServingConfig({ - this.fixedNodeCount, - this.scaling, + /// Specifications defining which Feature values to import from the entity. + /// + /// The request fails if no feature_specs are provided, and having multiple + /// feature_specs for one Feature is not allowed. + /// + /// Required. + core.List? + featureSpecs; + + /// Single Feature timestamp for all entities being imported. + /// + /// The timestamp must not have higher than millisecond precision. + core.String? featureTime; + + /// Source column that holds the Feature timestamp for all Feature values in + /// each entity. + core.String? featureTimeField; + + /// Specifies the number of workers that are used to write data to the + /// Featurestore. + /// + /// Consider the online serving capacity that you require to achieve the + /// desired import throughput without interfering with online serving. The + /// value must be positive, and less than or equal to 100. If not set, + /// defaults to using 1 worker. The low count ensures minimal impact on online + /// serving performance. + core.int? workerCount; + + GoogleCloudAiplatformV1ImportFeatureValuesRequest({ + this.avroSource, + this.bigquerySource, + this.csvSource, + this.disableIngestionAnalysis, + this.disableOnlineServing, + this.entityIdField, + this.featureSpecs, + this.featureTime, + this.featureTimeField, + this.workerCount, }); - GoogleCloudAiplatformV1FeaturestoreOnlineServingConfig.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ImportFeatureValuesRequest.fromJson(core.Map json_) : this( - fixedNodeCount: json_['fixedNodeCount'] as core.int?, - scaling: json_.containsKey('scaling') - ? GoogleCloudAiplatformV1FeaturestoreOnlineServingConfigScaling - .fromJson( - json_['scaling'] as core.Map) + avroSource: json_.containsKey('avroSource') + ? GoogleCloudAiplatformV1AvroSource.fromJson( + json_['avroSource'] as core.Map) + : null, + bigquerySource: json_.containsKey('bigquerySource') + ? GoogleCloudAiplatformV1BigQuerySource.fromJson( + json_['bigquerySource'] + as core.Map) : null, + csvSource: json_.containsKey('csvSource') + ? GoogleCloudAiplatformV1CsvSource.fromJson( + json_['csvSource'] as core.Map) + : null, + disableIngestionAnalysis: + json_['disableIngestionAnalysis'] as core.bool?, + disableOnlineServing: json_['disableOnlineServing'] as core.bool?, + entityIdField: json_['entityIdField'] as core.String?, + featureSpecs: (json_['featureSpecs'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1ImportFeatureValuesRequestFeatureSpec + .fromJson(value as core.Map)) + .toList(), + featureTime: json_['featureTime'] as core.String?, + featureTimeField: json_['featureTimeField'] as core.String?, + workerCount: json_['workerCount'] as core.int?, ); core.Map toJson() => { - if (fixedNodeCount != null) 'fixedNodeCount': fixedNodeCount!, - if (scaling != null) 'scaling': scaling!, + if (avroSource != null) 'avroSource': avroSource!, + if (bigquerySource != null) 'bigquerySource': bigquerySource!, + if (csvSource != null) 'csvSource': csvSource!, + if (disableIngestionAnalysis != null) + 'disableIngestionAnalysis': disableIngestionAnalysis!, + if (disableOnlineServing != null) + 'disableOnlineServing': disableOnlineServing!, + if (entityIdField != null) 'entityIdField': entityIdField!, + if (featureSpecs != null) 'featureSpecs': featureSpecs!, + if (featureTime != null) 'featureTime': featureTime!, + if (featureTimeField != null) 'featureTimeField': featureTimeField!, + if (workerCount != null) 'workerCount': workerCount!, }; } -/// Online serving scaling configuration. -/// -/// If min_node_count and max_node_count are set to the same value, the cluster -/// will be configured with the fixed number of node (no auto-scaling). -class GoogleCloudAiplatformV1FeaturestoreOnlineServingConfigScaling { - /// The cpu utilization that the Autoscaler should be trying to achieve. - /// - /// This number is on a scale from 0 (no utilization) to 100 (total - /// utilization), and is limited between 10 and 80. When a cluster's CPU - /// utilization exceeds the target that you have set, Bigtable immediately - /// adds nodes to the cluster. When CPU utilization is substantially lower - /// than the target, Bigtable removes nodes. If not set or set to 0, default - /// to 50. +/// Defines the Feature value(s) to import. +class GoogleCloudAiplatformV1ImportFeatureValuesRequestFeatureSpec { + /// ID of the Feature to import values of. /// - /// Optional. - core.int? cpuUtilizationTarget; - - /// The maximum number of nodes to scale up to. + /// This Feature must exist in the target EntityType, or the request will + /// fail. /// - /// Must be greater than min_node_count, and less than or equal to 10 times of - /// 'min_node_count'. - core.int? maxNodeCount; + /// Required. + core.String? id; - /// The minimum number of nodes to scale down to. - /// - /// Must be greater than or equal to 1. + /// Source column to get the Feature values from. /// - /// Required. - core.int? minNodeCount; + /// If not set, uses the column with the same name as the Feature ID. + core.String? sourceField; - GoogleCloudAiplatformV1FeaturestoreOnlineServingConfigScaling({ - this.cpuUtilizationTarget, - this.maxNodeCount, - this.minNodeCount, + GoogleCloudAiplatformV1ImportFeatureValuesRequestFeatureSpec({ + this.id, + this.sourceField, }); - GoogleCloudAiplatformV1FeaturestoreOnlineServingConfigScaling.fromJson( + GoogleCloudAiplatformV1ImportFeatureValuesRequestFeatureSpec.fromJson( core.Map json_) : this( - cpuUtilizationTarget: json_['cpuUtilizationTarget'] as core.int?, - maxNodeCount: json_['maxNodeCount'] as core.int?, - minNodeCount: json_['minNodeCount'] as core.int?, + id: json_['id'] as core.String?, + sourceField: json_['sourceField'] as core.String?, ); core.Map toJson() => { - if (cpuUtilizationTarget != null) - 'cpuUtilizationTarget': cpuUtilizationTarget!, - if (maxNodeCount != null) 'maxNodeCount': maxNodeCount!, - if (minNodeCount != null) 'minNodeCount': minNodeCount!, + if (id != null) 'id': id!, + if (sourceField != null) 'sourceField': sourceField!, }; } -/// Request message for FeatureOnlineStoreService.FetchFeatureValues. -/// -/// All the features under the requested feature view will be returned. -class GoogleCloudAiplatformV1FetchFeatureValuesRequest { - /// Response data format. - /// - /// If not set, FeatureViewDataFormat.KEY_VALUE will be used. - /// - /// Optional. - /// Possible string values are: - /// - "FEATURE_VIEW_DATA_FORMAT_UNSPECIFIED" : Not set. Will be treated as the - /// KeyValue format. - /// - "KEY_VALUE" : Return response data in key-value format. - /// - "PROTO_STRUCT" : Return response data in proto Struct format. - core.String? dataFormat; - - /// The request key to fetch feature values for. +/// Request message for ModelService.ImportModelEvaluation +class GoogleCloudAiplatformV1ImportModelEvaluationRequest { + /// Model evaluation resource to be imported. /// - /// Optional. - GoogleCloudAiplatformV1FeatureViewDataKey? dataKey; + /// Required. + GoogleCloudAiplatformV1ModelEvaluation? modelEvaluation; - GoogleCloudAiplatformV1FetchFeatureValuesRequest({ - this.dataFormat, - this.dataKey, + GoogleCloudAiplatformV1ImportModelEvaluationRequest({ + this.modelEvaluation, }); - GoogleCloudAiplatformV1FetchFeatureValuesRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1ImportModelEvaluationRequest.fromJson(core.Map json_) : this( - dataFormat: json_['dataFormat'] as core.String?, - dataKey: json_.containsKey('dataKey') - ? GoogleCloudAiplatformV1FeatureViewDataKey.fromJson( - json_['dataKey'] as core.Map) + modelEvaluation: json_.containsKey('modelEvaluation') + ? GoogleCloudAiplatformV1ModelEvaluation.fromJson( + json_['modelEvaluation'] + as core.Map) : null, ); core.Map toJson() => { - if (dataFormat != null) 'dataFormat': dataFormat!, - if (dataKey != null) 'dataKey': dataKey!, + if (modelEvaluation != null) 'modelEvaluation': modelEvaluation!, }; } -/// Response message for FeatureOnlineStoreService.FetchFeatureValues -class GoogleCloudAiplatformV1FetchFeatureValuesResponse { - /// The data key associated with this response. +/// Config for importing RagFiles. +class GoogleCloudAiplatformV1ImportRagFilesConfig { + /// Google Cloud Storage location. /// - /// Will only be populated for - /// FeatureOnlineStoreService.StreamingFetchFeatureValues RPCs. - GoogleCloudAiplatformV1FeatureViewDataKey? dataKey; + /// Supports importing individual files as well as entire Google Cloud Storage + /// directories. Sample formats: - + /// `gs://bucket_name/my_directory/object_name/my_file.txt` - + /// `gs://bucket_name/my_directory` + GoogleCloudAiplatformV1GcsSource? gcsSource; - /// Feature values in KeyValue format. - GoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairList? - keyValues; + /// Google Drive location. + /// + /// Supports importing individual files as well as Google Drive folders. + GoogleCloudAiplatformV1GoogleDriveSource? googleDriveSource; - /// Feature values in proto Struct format. + /// Jira queries with their corresponding authentication. + GoogleCloudAiplatformV1JiraSource? jiraSource; + + /// The max number of queries per minute that this job is allowed to make to + /// the embedding model specified on the corpus. /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Map? protoStruct; + /// This value is specific to this job and not shared across other import + /// jobs. Consult the Quotas page on the project to set an appropriate value + /// here. If unspecified, a default value of 1,000 QPM would be used. + /// + /// Optional. + core.int? maxEmbeddingRequestsPerMin; - GoogleCloudAiplatformV1FetchFeatureValuesResponse({ - this.dataKey, - this.keyValues, - this.protoStruct, + /// The BigQuery destination to write partial failures to. + /// + /// It should be a bigquery table resource name (e.g. + /// "bq://projectId.bqDatasetId.bqTableId"). The dataset must exist. If the + /// table does not exist, it will be created with the expected schema. If the + /// table exists, the schema will be validated and data will be added to this + /// existing table. Deprecated. Prefer to use `import_result_bq_sink`. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) + GoogleCloudAiplatformV1BigQueryDestination? partialFailureBigquerySink; + + /// The Cloud Storage path to write partial failures to. + /// + /// Deprecated. Prefer to use `import_result_gcs_sink`. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) + GoogleCloudAiplatformV1GcsDestination? partialFailureGcsSink; + + /// Specifies the transformation config for RagFiles. + GoogleCloudAiplatformV1RagFileTransformationConfig? + ragFileTransformationConfig; + + /// SharePoint sources. + GoogleCloudAiplatformV1SharePointSources? sharePointSources; + + /// Slack channels with their corresponding access tokens. + GoogleCloudAiplatformV1SlackSource? slackSource; + + GoogleCloudAiplatformV1ImportRagFilesConfig({ + this.gcsSource, + this.googleDriveSource, + this.jiraSource, + this.maxEmbeddingRequestsPerMin, + this.partialFailureBigquerySink, + this.partialFailureGcsSink, + this.ragFileTransformationConfig, + this.sharePointSources, + this.slackSource, }); - GoogleCloudAiplatformV1FetchFeatureValuesResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1ImportRagFilesConfig.fromJson(core.Map json_) : this( - dataKey: json_.containsKey('dataKey') - ? GoogleCloudAiplatformV1FeatureViewDataKey.fromJson( - json_['dataKey'] as core.Map) + gcsSource: json_.containsKey('gcsSource') + ? GoogleCloudAiplatformV1GcsSource.fromJson( + json_['gcsSource'] as core.Map) : null, - keyValues: json_.containsKey('keyValues') - ? GoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairList - .fromJson( - json_['keyValues'] as core.Map) + googleDriveSource: json_.containsKey('googleDriveSource') + ? GoogleCloudAiplatformV1GoogleDriveSource.fromJson( + json_['googleDriveSource'] + as core.Map) : null, - protoStruct: json_.containsKey('protoStruct') - ? json_['protoStruct'] as core.Map + jiraSource: json_.containsKey('jiraSource') + ? GoogleCloudAiplatformV1JiraSource.fromJson( + json_['jiraSource'] as core.Map) + : null, + maxEmbeddingRequestsPerMin: + json_['maxEmbeddingRequestsPerMin'] as core.int?, + partialFailureBigquerySink: + json_.containsKey('partialFailureBigquerySink') + ? GoogleCloudAiplatformV1BigQueryDestination.fromJson( + json_['partialFailureBigquerySink'] + as core.Map) + : null, + partialFailureGcsSink: json_.containsKey('partialFailureGcsSink') + ? GoogleCloudAiplatformV1GcsDestination.fromJson( + json_['partialFailureGcsSink'] + as core.Map) + : null, + ragFileTransformationConfig: + json_.containsKey('ragFileTransformationConfig') + ? GoogleCloudAiplatformV1RagFileTransformationConfig.fromJson( + json_['ragFileTransformationConfig'] + as core.Map) + : null, + sharePointSources: json_.containsKey('sharePointSources') + ? GoogleCloudAiplatformV1SharePointSources.fromJson( + json_['sharePointSources'] + as core.Map) + : null, + slackSource: json_.containsKey('slackSource') + ? GoogleCloudAiplatformV1SlackSource.fromJson( + json_['slackSource'] as core.Map) : null, ); core.Map toJson() => { - if (dataKey != null) 'dataKey': dataKey!, - if (keyValues != null) 'keyValues': keyValues!, - if (protoStruct != null) 'protoStruct': protoStruct!, + if (gcsSource != null) 'gcsSource': gcsSource!, + if (googleDriveSource != null) 'googleDriveSource': googleDriveSource!, + if (jiraSource != null) 'jiraSource': jiraSource!, + if (maxEmbeddingRequestsPerMin != null) + 'maxEmbeddingRequestsPerMin': maxEmbeddingRequestsPerMin!, + if (partialFailureBigquerySink != null) + 'partialFailureBigquerySink': partialFailureBigquerySink!, + if (partialFailureGcsSink != null) + 'partialFailureGcsSink': partialFailureGcsSink!, + if (ragFileTransformationConfig != null) + 'ragFileTransformationConfig': ragFileTransformationConfig!, + if (sharePointSources != null) 'sharePointSources': sharePointSources!, + if (slackSource != null) 'slackSource': slackSource!, }; } -/// Response structure in the format of key (feature name) and (feature) value -/// pair. -class GoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairList { - /// List of feature names and values. - core.List< - GoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairListFeatureNameValuePair>? - features; +/// Request message for VertexRagDataService.ImportRagFiles. +class GoogleCloudAiplatformV1ImportRagFilesRequest { + /// The config for the RagFiles to be synced and imported into the RagCorpus. + /// + /// VertexRagDataService.ImportRagFiles. + /// + /// Required. + GoogleCloudAiplatformV1ImportRagFilesConfig? importRagFilesConfig; - GoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairList({ - this.features, + GoogleCloudAiplatformV1ImportRagFilesRequest({ + this.importRagFilesConfig, }); - GoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairList.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ImportRagFilesRequest.fromJson(core.Map json_) : this( - features: (json_['features'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairListFeatureNameValuePair - .fromJson(value as core.Map)) - .toList(), + importRagFilesConfig: json_.containsKey('importRagFilesConfig') + ? GoogleCloudAiplatformV1ImportRagFilesConfig.fromJson( + json_['importRagFilesConfig'] + as core.Map) + : null, ); core.Map toJson() => { - if (features != null) 'features': features!, + if (importRagFilesConfig != null) + 'importRagFilesConfig': importRagFilesConfig!, }; } -/// Feature name & value pair. -class GoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairListFeatureNameValuePair { - /// Feature short name. +/// A representation of a collection of database items organized in a way that +/// allows for approximate nearest neighbor (a.k.a ANN) algorithms search. +class GoogleCloudAiplatformV1Index { + /// Timestamp when this Index was created. + /// + /// Output only. + core.String? createTime; + + /// The pointers to DeployedIndexes created from this Index. + /// + /// An Index can be only deleted if all its DeployedIndexes had been + /// undeployed first. + /// + /// Output only. + core.List? deployedIndexes; + + /// The description of the Index. + core.String? description; + + /// The display name of the Index. + /// + /// The name can be up to 128 characters long and can consist of any UTF-8 + /// characters. + /// + /// Required. + core.String? displayName; + + /// Customer-managed encryption key spec for an Index. + /// + /// If set, this Index and all sub-resources of this Index will be secured by + /// this key. + /// + /// Immutable. + GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; + + /// Used to perform consistent read-modify-write updates. + /// + /// If not set, a blind "overwrite" update happens. + core.String? etag; + + /// Stats of the index resource. + /// + /// Output only. + GoogleCloudAiplatformV1IndexStats? indexStats; + + /// The update method to use with this Index. + /// + /// If not set, BATCH_UPDATE will be used by default. + /// + /// Immutable. + /// Possible string values are: + /// - "INDEX_UPDATE_METHOD_UNSPECIFIED" : Should not be used. + /// - "BATCH_UPDATE" : BatchUpdate: user can call UpdateIndex with files on + /// Cloud Storage of Datapoints to update. + /// - "STREAM_UPDATE" : StreamUpdate: user can call + /// UpsertDatapoints/DeleteDatapoints to update the Index and the updates will + /// be applied in corresponding DeployedIndexes in nearly real-time. + core.String? indexUpdateMethod; + + /// The labels with user-defined metadata to organize your Indexes. + /// + /// Label keys and values can be no longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. See + /// https://goo.gl/xmQnxf for more information and examples of labels. + core.Map? labels; + + /// An additional information about the Index; the schema of the metadata can + /// be found in metadata_schema. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? metadata; + + /// Points to a YAML file stored on Google Cloud Storage describing additional + /// information about the Index, that is specific to it. + /// + /// Unset if the Index does not have any additional information. The schema is + /// defined as an OpenAPI 3.0.2 + /// [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). + /// Note: The URI given on output will be immutable and probably different, + /// including the URI scheme, than the one given on input. The output URI will + /// point to a location where the user only has a read access. + /// + /// Immutable. + core.String? metadataSchemaUri; + + /// The resource name of the Index. + /// + /// Output only. core.String? name; - /// Feature value. - GoogleCloudAiplatformV1FeatureValue? value; + /// Reserved for future use. + /// + /// Output only. + core.bool? satisfiesPzi; - GoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairListFeatureNameValuePair({ + /// Reserved for future use. + /// + /// Output only. + core.bool? satisfiesPzs; + + /// Timestamp when this Index was most recently updated. + /// + /// This also includes any update to the contents of the Index. Note that + /// Operations working on this Index may have their + /// Operations.metadata.generic_metadata.update_time a little after the value + /// of this timestamp, yet that does not mean their results are not already + /// reflected in the Index. Result of any successfully completed Operation on + /// the Index is reflected in it. + /// + /// Output only. + core.String? updateTime; + + GoogleCloudAiplatformV1Index({ + this.createTime, + this.deployedIndexes, + this.description, + this.displayName, + this.encryptionSpec, + this.etag, + this.indexStats, + this.indexUpdateMethod, + this.labels, + this.metadata, + this.metadataSchemaUri, this.name, - this.value, + this.satisfiesPzi, + this.satisfiesPzs, + this.updateTime, }); - GoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairListFeatureNameValuePair.fromJson( - core.Map json_) + GoogleCloudAiplatformV1Index.fromJson(core.Map json_) : this( - name: json_['name'] as core.String?, - value: json_.containsKey('value') - ? GoogleCloudAiplatformV1FeatureValue.fromJson( - json_['value'] as core.Map) + createTime: json_['createTime'] as core.String?, + deployedIndexes: (json_['deployedIndexes'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1DeployedIndexRef.fromJson( + value as core.Map)) + .toList(), + description: json_['description'] as core.String?, + displayName: json_['displayName'] as core.String?, + encryptionSpec: json_.containsKey('encryptionSpec') + ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( + json_['encryptionSpec'] + as core.Map) + : null, + etag: json_['etag'] as core.String?, + indexStats: json_.containsKey('indexStats') + ? GoogleCloudAiplatformV1IndexStats.fromJson( + json_['indexStats'] as core.Map) : null, + indexUpdateMethod: json_['indexUpdateMethod'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + metadata: json_['metadata'], + metadataSchemaUri: json_['metadataSchemaUri'] as core.String?, + name: json_['name'] as core.String?, + satisfiesPzi: json_['satisfiesPzi'] as core.bool?, + satisfiesPzs: json_['satisfiesPzs'] as core.bool?, + updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { + if (createTime != null) 'createTime': createTime!, + if (deployedIndexes != null) 'deployedIndexes': deployedIndexes!, + if (description != null) 'description': description!, + if (displayName != null) 'displayName': displayName!, + if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, + if (etag != null) 'etag': etag!, + if (indexStats != null) 'indexStats': indexStats!, + if (indexUpdateMethod != null) 'indexUpdateMethod': indexUpdateMethod!, + if (labels != null) 'labels': labels!, + if (metadata != null) 'metadata': metadata!, + if (metadataSchemaUri != null) 'metadataSchemaUri': metadataSchemaUri!, if (name != null) 'name': name!, - if (value != null) 'value': value!, + if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, + if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, + if (updateTime != null) 'updateTime': updateTime!, }; } -/// URI based data. -class GoogleCloudAiplatformV1FileData { - /// URI. +/// A datapoint of Index. +class GoogleCloudAiplatformV1IndexDatapoint { + /// CrowdingTag of the datapoint, the number of neighbors to return in each + /// crowding can be configured during query. /// - /// Required. - core.String? fileUri; + /// Optional. + GoogleCloudAiplatformV1IndexDatapointCrowdingTag? crowdingTag; - /// The IANA standard MIME type of the source data. + /// Unique identifier of the datapoint. /// /// Required. - core.String? mimeType; - - GoogleCloudAiplatformV1FileData({ - this.fileUri, - this.mimeType, - }); - - GoogleCloudAiplatformV1FileData.fromJson(core.Map json_) - : this( - fileUri: json_['fileUri'] as core.String?, - mimeType: json_['mimeType'] as core.String?, - ); - - core.Map toJson() => { - if (fileUri != null) 'fileUri': fileUri!, - if (mimeType != null) 'mimeType': mimeType!, - }; -} + core.String? datapointId; -/// Assigns input data to training, validation, and test sets based on the given -/// filters, data pieces not matched by any filter are ignored. -/// -/// Currently only supported for Datasets containing DataItems. If any of the -/// filters in this message are to match nothing, then they can be set as '-' -/// (the minus sign). Supported only for unstructured Datasets. -typedef GoogleCloudAiplatformV1FilterSplit = $FilterSplit; + /// Feature embedding vector for dense index. + /// + /// An array of numbers with the length of + /// \[NearestNeighborSearchConfig.dimensions\]. + /// + /// Required. + core.List? featureVector; -/// The request message for MatchService.FindNeighbors. -class GoogleCloudAiplatformV1FindNeighborsRequest { - /// The ID of the DeployedIndex that will serve the request. + /// List of Restrict of the datapoint, used to perform "restricted searches" + /// where boolean rule are used to filter the subset of the database eligible + /// for matching. /// - /// This request is sent to a specific IndexEndpoint, as per the - /// IndexEndpoint.network. That IndexEndpoint also has - /// IndexEndpoint.deployed_indexes, and each such index has a DeployedIndex.id - /// field. The value of the field below must equal one of the DeployedIndex.id - /// fields of the IndexEndpoint that is being called for this request. - core.String? deployedIndexId; + /// This uses numeric comparisons. + /// + /// Optional. + core.List? + numericRestricts; - /// The list of queries. - core.List? queries; + /// List of Restrict of the datapoint, used to perform "restricted searches" + /// where boolean rule are used to filter the subset of the database eligible + /// for matching. + /// + /// This uses categorical tokens. See: + /// https://cloud.google.com/vertex-ai/docs/matching-engine/filtering + /// + /// Optional. + core.List? restricts; - /// If set to true, the full datapoints (including all vector values and - /// restricts) of the nearest neighbors are returned. + /// Feature embedding vector for sparse index. /// - /// Note that returning full datapoint will significantly increase the latency - /// and cost of the query. - core.bool? returnFullDatapoint; + /// Optional. + GoogleCloudAiplatformV1IndexDatapointSparseEmbedding? sparseEmbedding; - GoogleCloudAiplatformV1FindNeighborsRequest({ - this.deployedIndexId, - this.queries, - this.returnFullDatapoint, + GoogleCloudAiplatformV1IndexDatapoint({ + this.crowdingTag, + this.datapointId, + this.featureVector, + this.numericRestricts, + this.restricts, + this.sparseEmbedding, }); - GoogleCloudAiplatformV1FindNeighborsRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1IndexDatapoint.fromJson(core.Map json_) : this( - deployedIndexId: json_['deployedIndexId'] as core.String?, - queries: (json_['queries'] as core.List?) + crowdingTag: json_.containsKey('crowdingTag') + ? GoogleCloudAiplatformV1IndexDatapointCrowdingTag.fromJson( + json_['crowdingTag'] as core.Map) + : null, + datapointId: json_['datapointId'] as core.String?, + featureVector: (json_['featureVector'] as core.List?) + ?.map((value) => (value as core.num).toDouble()) + .toList(), + numericRestricts: (json_['numericRestricts'] as core.List?) ?.map((value) => - GoogleCloudAiplatformV1FindNeighborsRequestQuery.fromJson( + GoogleCloudAiplatformV1IndexDatapointNumericRestriction + .fromJson(value as core.Map)) + .toList(), + restricts: (json_['restricts'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1IndexDatapointRestriction.fromJson( value as core.Map)) .toList(), - returnFullDatapoint: json_['returnFullDatapoint'] as core.bool?, + sparseEmbedding: json_.containsKey('sparseEmbedding') + ? GoogleCloudAiplatformV1IndexDatapointSparseEmbedding.fromJson( + json_['sparseEmbedding'] + as core.Map) + : null, ); core.Map toJson() => { - if (deployedIndexId != null) 'deployedIndexId': deployedIndexId!, - if (queries != null) 'queries': queries!, - if (returnFullDatapoint != null) - 'returnFullDatapoint': returnFullDatapoint!, + if (crowdingTag != null) 'crowdingTag': crowdingTag!, + if (datapointId != null) 'datapointId': datapointId!, + if (featureVector != null) 'featureVector': featureVector!, + if (numericRestricts != null) 'numericRestricts': numericRestricts!, + if (restricts != null) 'restricts': restricts!, + if (sparseEmbedding != null) 'sparseEmbedding': sparseEmbedding!, }; } -/// A query to find a number of the nearest neighbors (most similar vectors) of -/// a vector. -class GoogleCloudAiplatformV1FindNeighborsRequestQuery { - /// The number of neighbors to find via approximate search before exact - /// reordering is performed. - /// - /// If not set, the default value from scam config is used; if set, this value - /// must be \> 0. - core.int? approximateNeighborCount; - - /// The datapoint/vector whose nearest neighbors should be searched for. - /// - /// Required. - GoogleCloudAiplatformV1IndexDatapoint? datapoint; - - /// The fraction of the number of leaves to search, set at query time allows - /// user to tune search performance. - /// - /// This value increase result in both search accuracy and latency increase. - /// The value should be between 0.0 and 1.0. If not set or set to 0.0, query - /// uses the default value specified in - /// NearestNeighborSearchConfig.TreeAHConfig.fraction_leaf_nodes_to_search. - core.double? fractionLeafNodesToSearchOverride; - - /// The number of nearest neighbors to be retrieved from database for each - /// query. - /// - /// If not set, will use the default from the service configuration - /// (https://cloud.google.com/vertex-ai/docs/matching-engine/configuring-indexes#nearest-neighbor-search-config). - core.int? neighborCount; - - /// Crowding is a constraint on a neighbor list produced by nearest neighbor - /// search requiring that no more than some value k' of the k neighbors - /// returned have the same value of crowding_attribute. - /// - /// It's used for improving result diversity. This field is the maximum number - /// of matches with the same crowding tag. - core.int? perCrowdingAttributeNeighborCount; - - /// Represents RRF algorithm that combines search results. +/// Crowding tag is a constraint on a neighbor list produced by nearest neighbor +/// search requiring that no more than some value k' of the k neighbors returned +/// have the same value of crowding_attribute. +class GoogleCloudAiplatformV1IndexDatapointCrowdingTag { + /// The attribute value used for crowding. /// - /// Optional. - GoogleCloudAiplatformV1FindNeighborsRequestQueryRRF? rrf; + /// The maximum number of neighbors to return per crowding attribute value + /// (per_crowding_attribute_num_neighbors) is configured per-query. This field + /// is ignored if per_crowding_attribute_num_neighbors is larger than the + /// total number of neighbors to return for a given query. + core.String? crowdingAttribute; - GoogleCloudAiplatformV1FindNeighborsRequestQuery({ - this.approximateNeighborCount, - this.datapoint, - this.fractionLeafNodesToSearchOverride, - this.neighborCount, - this.perCrowdingAttributeNeighborCount, - this.rrf, + GoogleCloudAiplatformV1IndexDatapointCrowdingTag({ + this.crowdingAttribute, }); - GoogleCloudAiplatformV1FindNeighborsRequestQuery.fromJson(core.Map json_) + GoogleCloudAiplatformV1IndexDatapointCrowdingTag.fromJson(core.Map json_) : this( - approximateNeighborCount: - json_['approximateNeighborCount'] as core.int?, - datapoint: json_.containsKey('datapoint') - ? GoogleCloudAiplatformV1IndexDatapoint.fromJson( - json_['datapoint'] as core.Map) - : null, - fractionLeafNodesToSearchOverride: - (json_['fractionLeafNodesToSearchOverride'] as core.num?) - ?.toDouble(), - neighborCount: json_['neighborCount'] as core.int?, - perCrowdingAttributeNeighborCount: - json_['perCrowdingAttributeNeighborCount'] as core.int?, - rrf: json_.containsKey('rrf') - ? GoogleCloudAiplatformV1FindNeighborsRequestQueryRRF.fromJson( - json_['rrf'] as core.Map) - : null, + crowdingAttribute: json_['crowdingAttribute'] as core.String?, ); core.Map toJson() => { - if (approximateNeighborCount != null) - 'approximateNeighborCount': approximateNeighborCount!, - if (datapoint != null) 'datapoint': datapoint!, - if (fractionLeafNodesToSearchOverride != null) - 'fractionLeafNodesToSearchOverride': - fractionLeafNodesToSearchOverride!, - if (neighborCount != null) 'neighborCount': neighborCount!, - if (perCrowdingAttributeNeighborCount != null) - 'perCrowdingAttributeNeighborCount': - perCrowdingAttributeNeighborCount!, - if (rrf != null) 'rrf': rrf!, + if (crowdingAttribute != null) 'crowdingAttribute': crowdingAttribute!, }; } -/// Parameters for RRF algorithm that combines search results. -class GoogleCloudAiplatformV1FindNeighborsRequestQueryRRF { - /// Users can provide an alpha value to give more weight to dense vs sparse - /// results. - /// - /// For example, if the alpha is 0, we only return sparse and if the alpha is - /// 1, we only return dense. +/// This field allows restricts to be based on numeric comparisons rather than +/// categorical tokens. +class GoogleCloudAiplatformV1IndexDatapointNumericRestriction { + /// The namespace of this restriction. /// - /// Required. - core.double? alpha; + /// e.g.: cost. + core.String? namespace; - GoogleCloudAiplatformV1FindNeighborsRequestQueryRRF({ - this.alpha, + /// This MUST be specified for queries and must NOT be specified for + /// datapoints. + /// Possible string values are: + /// - "OPERATOR_UNSPECIFIED" : Default value of the enum. + /// - "LESS" : Datapoints are eligible iff their value is \< the query's. + /// - "LESS_EQUAL" : Datapoints are eligible iff their value is \<= the + /// query's. + /// - "EQUAL" : Datapoints are eligible iff their value is == the query's. + /// - "GREATER_EQUAL" : Datapoints are eligible iff their value is \>= the + /// query's. + /// - "GREATER" : Datapoints are eligible iff their value is \> the query's. + /// - "NOT_EQUAL" : Datapoints are eligible iff their value is != the query's. + core.String? op; + + /// Represents 64 bit float. + core.double? valueDouble; + + /// Represents 32 bit float. + core.double? valueFloat; + + /// Represents 64 bit integer. + core.String? valueInt; + + GoogleCloudAiplatformV1IndexDatapointNumericRestriction({ + this.namespace, + this.op, + this.valueDouble, + this.valueFloat, + this.valueInt, }); - GoogleCloudAiplatformV1FindNeighborsRequestQueryRRF.fromJson(core.Map json_) + GoogleCloudAiplatformV1IndexDatapointNumericRestriction.fromJson( + core.Map json_) : this( - alpha: (json_['alpha'] as core.num?)?.toDouble(), + namespace: json_['namespace'] as core.String?, + op: json_['op'] as core.String?, + valueDouble: (json_['valueDouble'] as core.num?)?.toDouble(), + valueFloat: (json_['valueFloat'] as core.num?)?.toDouble(), + valueInt: json_['valueInt'] as core.String?, ); core.Map toJson() => { - if (alpha != null) 'alpha': alpha!, + if (namespace != null) 'namespace': namespace!, + if (op != null) 'op': op!, + if (valueDouble != null) 'valueDouble': valueDouble!, + if (valueFloat != null) 'valueFloat': valueFloat!, + if (valueInt != null) 'valueInt': valueInt!, }; } -/// The response message for MatchService.FindNeighbors. -class GoogleCloudAiplatformV1FindNeighborsResponse { - /// The nearest neighbors of the query datapoints. - core.List? - nearestNeighbors; +/// Restriction of a datapoint which describe its attributes(tokens) from each +/// of several attribute categories(namespaces). +class GoogleCloudAiplatformV1IndexDatapointRestriction { + /// The attributes to allow in this namespace. + /// + /// e.g.: 'red' + core.List? allowList; - GoogleCloudAiplatformV1FindNeighborsResponse({ - this.nearestNeighbors, + /// The attributes to deny in this namespace. + /// + /// e.g.: 'blue' + core.List? denyList; + + /// The namespace of this restriction. + /// + /// e.g.: color. + core.String? namespace; + + GoogleCloudAiplatformV1IndexDatapointRestriction({ + this.allowList, + this.denyList, + this.namespace, }); - GoogleCloudAiplatformV1FindNeighborsResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1IndexDatapointRestriction.fromJson(core.Map json_) : this( - nearestNeighbors: (json_['nearestNeighbors'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1FindNeighborsResponseNearestNeighbors - .fromJson(value as core.Map)) + allowList: (json_['allowList'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + denyList: (json_['denyList'] as core.List?) + ?.map((value) => value as core.String) .toList(), + namespace: json_['namespace'] as core.String?, ); core.Map toJson() => { - if (nearestNeighbors != null) 'nearestNeighbors': nearestNeighbors!, + if (allowList != null) 'allowList': allowList!, + if (denyList != null) 'denyList': denyList!, + if (namespace != null) 'namespace': namespace!, }; } -/// Nearest neighbors for one query. -class GoogleCloudAiplatformV1FindNeighborsResponseNearestNeighbors { - /// The ID of the query datapoint. - core.String? id; +/// Feature embedding vector for sparse index. +/// +/// An array of numbers whose values are located in the specified dimensions. +class GoogleCloudAiplatformV1IndexDatapointSparseEmbedding { + /// The list of indexes for the embedding values of the sparse vector. + /// + /// Required. + core.List? dimensions; - /// All its neighbors. - core.List? neighbors; + /// The list of embedding values of the sparse vector. + /// + /// Required. + core.List? values; - GoogleCloudAiplatformV1FindNeighborsResponseNearestNeighbors({ - this.id, - this.neighbors, + GoogleCloudAiplatformV1IndexDatapointSparseEmbedding({ + this.dimensions, + this.values, }); - GoogleCloudAiplatformV1FindNeighborsResponseNearestNeighbors.fromJson( - core.Map json_) + GoogleCloudAiplatformV1IndexDatapointSparseEmbedding.fromJson(core.Map json_) : this( - id: json_['id'] as core.String?, - neighbors: (json_['neighbors'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1FindNeighborsResponseNeighbor.fromJson( - value as core.Map)) + dimensions: (json_['dimensions'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + values: (json_['values'] as core.List?) + ?.map((value) => (value as core.num).toDouble()) .toList(), ); core.Map toJson() => { - if (id != null) 'id': id!, - if (neighbors != null) 'neighbors': neighbors!, + if (dimensions != null) 'dimensions': dimensions!, + if (values != null) 'values': values!, }; } -/// A neighbor of the query vector. -class GoogleCloudAiplatformV1FindNeighborsResponseNeighbor { - /// The datapoint of the neighbor. +/// Indexes are deployed into it. +/// +/// An IndexEndpoint can have multiple DeployedIndexes. +class GoogleCloudAiplatformV1IndexEndpoint { + /// Timestamp when this IndexEndpoint was created. /// - /// Note that full datapoints are returned only when "return_full_datapoint" - /// is set to true. Otherwise, only the "datapoint_id" and "crowding_tag" - /// fields are populated. - GoogleCloudAiplatformV1IndexDatapoint? datapoint; + /// Output only. + core.String? createTime; - /// The distance between the neighbor and the dense embedding query. - core.double? distance; + /// The indexes deployed in this endpoint. + /// + /// Output only. + core.List? deployedIndexes; - /// The distance between the neighbor and the query sparse_embedding. - core.double? sparseDistance; + /// The description of the IndexEndpoint. + core.String? description; - GoogleCloudAiplatformV1FindNeighborsResponseNeighbor({ - this.datapoint, - this.distance, - this.sparseDistance, - }); + /// The display name of the IndexEndpoint. + /// + /// The name can be up to 128 characters long and can consist of any UTF-8 + /// characters. + /// + /// Required. + core.String? displayName; - GoogleCloudAiplatformV1FindNeighborsResponseNeighbor.fromJson(core.Map json_) - : this( - datapoint: json_.containsKey('datapoint') - ? GoogleCloudAiplatformV1IndexDatapoint.fromJson( - json_['datapoint'] as core.Map) - : null, - distance: (json_['distance'] as core.num?)?.toDouble(), - sparseDistance: (json_['sparseDistance'] as core.num?)?.toDouble(), - ); + /// Deprecated: If true, expose the IndexEndpoint via private service connect. + /// + /// Only one of the fields, network or enable_private_service_connect, can be + /// set. + /// + /// Optional. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) + core.bool? enablePrivateServiceConnect; - core.Map toJson() => { - if (datapoint != null) 'datapoint': datapoint!, - if (distance != null) 'distance': distance!, - if (sparseDistance != null) 'sparseDistance': sparseDistance!, - }; -} + /// Customer-managed encryption key spec for an IndexEndpoint. + /// + /// If set, this IndexEndpoint and all sub-resources of this IndexEndpoint + /// will be secured by this key. + /// + /// Immutable. + GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; -/// Input for fluency metric. -class GoogleCloudAiplatformV1FluencyInput { - /// Fluency instance. + /// Used to perform consistent read-modify-write updates. /// - /// Required. - GoogleCloudAiplatformV1FluencyInstance? instance; + /// If not set, a blind "overwrite" update happens. + core.String? etag; - /// Spec for fluency score metric. + /// The labels with user-defined metadata to organize your IndexEndpoints. /// - /// Required. - GoogleCloudAiplatformV1FluencySpec? metricSpec; + /// Label keys and values can be no longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. See + /// https://goo.gl/xmQnxf for more information and examples of labels. + core.Map? labels; - GoogleCloudAiplatformV1FluencyInput({ - this.instance, - this.metricSpec, - }); + /// The resource name of the IndexEndpoint. + /// + /// Output only. + core.String? name; - GoogleCloudAiplatformV1FluencyInput.fromJson(core.Map json_) - : this( - instance: json_.containsKey('instance') - ? GoogleCloudAiplatformV1FluencyInstance.fromJson( - json_['instance'] as core.Map) - : null, - metricSpec: json_.containsKey('metricSpec') - ? GoogleCloudAiplatformV1FluencySpec.fromJson( - json_['metricSpec'] as core.Map) - : null, - ); + /// The full name of the Google Compute Engine + /// [network](https://cloud.google.com/compute/docs/networks-and-firewalls#networks) + /// to which the IndexEndpoint should be peered. + /// + /// Private services access must already be configured for the network. If + /// left unspecified, the Endpoint is not peered with any network. network and + /// private_service_connect_config are mutually exclusive. + /// [Format](https://cloud.google.com/compute/docs/reference/rest/v1/networks/insert): + /// `projects/{project}/global/networks/{network}`. Where {project} is a + /// project number, as in '12345', and {network} is network name. + /// + /// Optional. + core.String? network; - core.Map toJson() => { - if (instance != null) 'instance': instance!, - if (metricSpec != null) 'metricSpec': metricSpec!, - }; -} + /// Configuration for private service connect. + /// + /// network and private_service_connect_config are mutually exclusive. + /// + /// Optional. + GoogleCloudAiplatformV1PrivateServiceConnectConfig? + privateServiceConnectConfig; -/// Spec for fluency instance. -typedef GoogleCloudAiplatformV1FluencyInstance = $Instance01; + /// If public_endpoint_enabled is true, this field will be populated with the + /// domain name to use for this index endpoint. + /// + /// Output only. + core.String? publicEndpointDomainName; -/// Spec for fluency result. -class GoogleCloudAiplatformV1FluencyResult { - /// Confidence for fluency score. + /// If true, the deployed index will be accessible through public endpoint. + /// + /// Optional. + core.bool? publicEndpointEnabled; + + /// Reserved for future use. /// /// Output only. - core.double? confidence; + core.bool? satisfiesPzi; - /// Explanation for fluency score. + /// Reserved for future use. /// /// Output only. - core.String? explanation; + core.bool? satisfiesPzs; - /// Fluency score. + /// Timestamp when this IndexEndpoint was last updated. + /// + /// This timestamp is not updated when the endpoint's DeployedIndexes are + /// updated, e.g. due to updates of the original Indexes they are the + /// deployments of. /// /// Output only. - core.double? score; + core.String? updateTime; - GoogleCloudAiplatformV1FluencyResult({ - this.confidence, - this.explanation, - this.score, + GoogleCloudAiplatformV1IndexEndpoint({ + this.createTime, + this.deployedIndexes, + this.description, + this.displayName, + this.enablePrivateServiceConnect, + this.encryptionSpec, + this.etag, + this.labels, + this.name, + this.network, + this.privateServiceConnectConfig, + this.publicEndpointDomainName, + this.publicEndpointEnabled, + this.satisfiesPzi, + this.satisfiesPzs, + this.updateTime, }); - GoogleCloudAiplatformV1FluencyResult.fromJson(core.Map json_) + GoogleCloudAiplatformV1IndexEndpoint.fromJson(core.Map json_) : this( - confidence: (json_['confidence'] as core.num?)?.toDouble(), - explanation: json_['explanation'] as core.String?, - score: (json_['score'] as core.num?)?.toDouble(), + createTime: json_['createTime'] as core.String?, + deployedIndexes: (json_['deployedIndexes'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1DeployedIndex.fromJson( + value as core.Map)) + .toList(), + description: json_['description'] as core.String?, + displayName: json_['displayName'] as core.String?, + enablePrivateServiceConnect: + json_['enablePrivateServiceConnect'] as core.bool?, + encryptionSpec: json_.containsKey('encryptionSpec') + ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( + json_['encryptionSpec'] + as core.Map) + : null, + etag: json_['etag'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + name: json_['name'] as core.String?, + network: json_['network'] as core.String?, + privateServiceConnectConfig: + json_.containsKey('privateServiceConnectConfig') + ? GoogleCloudAiplatformV1PrivateServiceConnectConfig.fromJson( + json_['privateServiceConnectConfig'] + as core.Map) + : null, + publicEndpointDomainName: + json_['publicEndpointDomainName'] as core.String?, + publicEndpointEnabled: json_['publicEndpointEnabled'] as core.bool?, + satisfiesPzi: json_['satisfiesPzi'] as core.bool?, + satisfiesPzs: json_['satisfiesPzs'] as core.bool?, + updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (confidence != null) 'confidence': confidence!, - if (explanation != null) 'explanation': explanation!, - if (score != null) 'score': score!, + if (createTime != null) 'createTime': createTime!, + if (deployedIndexes != null) 'deployedIndexes': deployedIndexes!, + if (description != null) 'description': description!, + if (displayName != null) 'displayName': displayName!, + if (enablePrivateServiceConnect != null) + 'enablePrivateServiceConnect': enablePrivateServiceConnect!, + if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, + if (etag != null) 'etag': etag!, + if (labels != null) 'labels': labels!, + if (name != null) 'name': name!, + if (network != null) 'network': network!, + if (privateServiceConnectConfig != null) + 'privateServiceConnectConfig': privateServiceConnectConfig!, + if (publicEndpointDomainName != null) + 'publicEndpointDomainName': publicEndpointDomainName!, + if (publicEndpointEnabled != null) + 'publicEndpointEnabled': publicEndpointEnabled!, + if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, + if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, + if (updateTime != null) 'updateTime': updateTime!, }; } -/// Spec for fluency score metric. -typedef GoogleCloudAiplatformV1FluencySpec = $Spec; - -/// Assigns the input data to training, validation, and test sets as per the -/// given fractions. +/// IndexPrivateEndpoints proto is used to provide paths for users to send +/// requests via private endpoints (e.g. private service access, private service +/// connect). /// -/// Any of `training_fraction`, `validation_fraction` and `test_fraction` may -/// optionally be provided, they must sum to up to 1. If the provided ones sum -/// to less than 1, the remainder is assigned to sets as decided by Vertex AI. -/// If none of the fractions are set, by default roughly 80% of data is used for -/// training, 10% for validation, and 10% for test. -typedef GoogleCloudAiplatformV1FractionSplit = $FractionSplit; +/// To send request via private service access, use match_grpc_address. To send +/// request via private service connect, use service_attachment. +class GoogleCloudAiplatformV1IndexPrivateEndpoints { + /// The ip address used to send match gRPC requests. + /// + /// Output only. + core.String? matchGrpcAddress; -/// Input for fulfillment metric. -class GoogleCloudAiplatformV1FulfillmentInput { - /// Fulfillment instance. + /// PscAutomatedEndpoints is populated if private service connect is enabled + /// if PscAutomatedConfig is set. /// - /// Required. - GoogleCloudAiplatformV1FulfillmentInstance? instance; + /// Output only. + core.List? + pscAutomatedEndpoints; - /// Spec for fulfillment score metric. + /// The name of the service attachment resource. /// - /// Required. - GoogleCloudAiplatformV1FulfillmentSpec? metricSpec; + /// Populated if private service connect is enabled. + /// + /// Output only. + core.String? serviceAttachment; - GoogleCloudAiplatformV1FulfillmentInput({ - this.instance, - this.metricSpec, + GoogleCloudAiplatformV1IndexPrivateEndpoints({ + this.matchGrpcAddress, + this.pscAutomatedEndpoints, + this.serviceAttachment, }); - GoogleCloudAiplatformV1FulfillmentInput.fromJson(core.Map json_) + GoogleCloudAiplatformV1IndexPrivateEndpoints.fromJson(core.Map json_) : this( - instance: json_.containsKey('instance') - ? GoogleCloudAiplatformV1FulfillmentInstance.fromJson( - json_['instance'] as core.Map) - : null, - metricSpec: json_.containsKey('metricSpec') - ? GoogleCloudAiplatformV1FulfillmentSpec.fromJson( - json_['metricSpec'] as core.Map) - : null, + matchGrpcAddress: json_['matchGrpcAddress'] as core.String?, + pscAutomatedEndpoints: (json_['pscAutomatedEndpoints'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1PscAutomatedEndpoints.fromJson( + value as core.Map)) + .toList(), + serviceAttachment: json_['serviceAttachment'] as core.String?, ); core.Map toJson() => { - if (instance != null) 'instance': instance!, - if (metricSpec != null) 'metricSpec': metricSpec!, + if (matchGrpcAddress != null) 'matchGrpcAddress': matchGrpcAddress!, + if (pscAutomatedEndpoints != null) + 'pscAutomatedEndpoints': pscAutomatedEndpoints!, + if (serviceAttachment != null) 'serviceAttachment': serviceAttachment!, }; } -/// Spec for fulfillment instance. -class GoogleCloudAiplatformV1FulfillmentInstance { - /// Inference instruction prompt to compare prediction with. +/// Stats of the Index. +class GoogleCloudAiplatformV1IndexStats { + /// The number of shards in the Index. /// - /// Required. - core.String? instruction; + /// Output only. + core.int? shardsCount; - /// Output of the evaluated model. + /// The number of sparse vectors in the Index. /// - /// Required. - core.String? prediction; + /// Output only. + core.String? sparseVectorsCount; - GoogleCloudAiplatformV1FulfillmentInstance({ - this.instruction, - this.prediction, + /// The number of dense vectors in the Index. + /// + /// Output only. + core.String? vectorsCount; + + GoogleCloudAiplatformV1IndexStats({ + this.shardsCount, + this.sparseVectorsCount, + this.vectorsCount, }); - GoogleCloudAiplatformV1FulfillmentInstance.fromJson(core.Map json_) + GoogleCloudAiplatformV1IndexStats.fromJson(core.Map json_) : this( - instruction: json_['instruction'] as core.String?, - prediction: json_['prediction'] as core.String?, + shardsCount: json_['shardsCount'] as core.int?, + sparseVectorsCount: json_['sparseVectorsCount'] as core.String?, + vectorsCount: json_['vectorsCount'] as core.String?, ); core.Map toJson() => { - if (instruction != null) 'instruction': instruction!, - if (prediction != null) 'prediction': prediction!, + if (shardsCount != null) 'shardsCount': shardsCount!, + if (sparseVectorsCount != null) + 'sparseVectorsCount': sparseVectorsCount!, + if (vectorsCount != null) 'vectorsCount': vectorsCount!, }; } -/// Spec for fulfillment result. -class GoogleCloudAiplatformV1FulfillmentResult { - /// Confidence for fulfillment score. +/// Specifies Vertex AI owned input data to be used for training, and possibly +/// evaluating, the Model. +class GoogleCloudAiplatformV1InputDataConfig { + /// Applicable only to custom training with Datasets that have DataItems and + /// Annotations. /// - /// Output only. - core.double? confidence; + /// Cloud Storage URI that points to a YAML file describing the annotation + /// schema. The schema is defined as an OpenAPI 3.0.2 + /// [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). + /// The schema files that can be used here are found in + /// gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the + /// chosen schema must be consistent with metadata of the Dataset specified by + /// dataset_id. Only Annotations that both match this schema and belong to + /// DataItems not ignored by the split method are used in respectively + /// training, validation or test role, depending on the role of the DataItem + /// they are on. When used in conjunction with annotations_filter, the + /// Annotations used for training are filtered by both annotations_filter and + /// annotation_schema_uri. + core.String? annotationSchemaUri; - /// Explanation for fulfillment score. + /// Applicable only to Datasets that have DataItems and Annotations. /// - /// Output only. - core.String? explanation; + /// A filter on Annotations of the Dataset. Only Annotations that both match + /// this filter and belong to DataItems not ignored by the split method are + /// used in respectively training, validation or test role, depending on the + /// role of the DataItem they are on (for the auto-assigned that role is + /// decided by Vertex AI). A filter with same syntax as the one used in + /// ListAnnotations may be used, but note here it filters across all + /// Annotations of the Dataset, and not just within a single DataItem. + core.String? annotationsFilter; - /// Fulfillment score. + /// Only applicable to custom training with tabular Dataset with BigQuery + /// source. /// - /// Output only. - core.double? score; - - GoogleCloudAiplatformV1FulfillmentResult({ - this.confidence, - this.explanation, - this.score, - }); + /// The BigQuery project location where the training data is to be written to. + /// In the given project a new dataset is created with name `dataset___` where + /// timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training input data + /// is written into that dataset. In the dataset three tables are created, + /// `training`, `validation` and `test`. * AIP_DATA_FORMAT = "bigquery". * + /// AIP_TRAINING_DATA_URI = "bigquery_destination.dataset___.training" * + /// AIP_VALIDATION_DATA_URI = "bigquery_destination.dataset___.validation" * + /// AIP_TEST_DATA_URI = "bigquery_destination.dataset___.test" + GoogleCloudAiplatformV1BigQueryDestination? bigqueryDestination; - GoogleCloudAiplatformV1FulfillmentResult.fromJson(core.Map json_) - : this( - confidence: (json_['confidence'] as core.num?)?.toDouble(), - explanation: json_['explanation'] as core.String?, - score: (json_['score'] as core.num?)?.toDouble(), - ); + /// The ID of the Dataset in the same Project and Location which data will be + /// used to train the Model. + /// + /// The Dataset must use schema compatible with Model being trained, and what + /// is compatible should be described in the used TrainingPipeline's + /// training_task_definition. For tabular Datasets, all their data is exported + /// to training, to pick and choose from. + /// + /// Required. + core.String? datasetId; - core.Map toJson() => { - if (confidence != null) 'confidence': confidence!, - if (explanation != null) 'explanation': explanation!, - if (score != null) 'score': score!, - }; -} + /// Split based on the provided filters for each set. + GoogleCloudAiplatformV1FilterSplit? filterSplit; -/// Spec for fulfillment metric. -typedef GoogleCloudAiplatformV1FulfillmentSpec = $Spec; + /// Split based on fractions defining the size of each set. + GoogleCloudAiplatformV1FractionSplit? fractionSplit; -/// A predicted \[FunctionCall\] returned from the model that contains a string -/// representing the \[FunctionDeclaration.name\] and a structured JSON object -/// containing the parameters and their values. -class GoogleCloudAiplatformV1FunctionCall { - /// The function parameters and values in JSON object format. + /// The Cloud Storage location where the training data is to be written to. /// - /// See \[FunctionDeclaration.parameters\] for parameter details. + /// In the given directory a new directory is created with name: `dataset---` + /// where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. All + /// training input data is written into that directory. The Vertex AI + /// environment variables representing Cloud Storage data URIs are represented + /// in the Cloud Storage wildcard format to support sharded data. e.g.: + /// "gs://.../training-*.jsonl" * AIP_DATA_FORMAT = "jsonl" for non-tabular + /// data, "csv" for tabular data * AIP_TRAINING_DATA_URI = + /// "gcs_destination/dataset---/training-*.${AIP_DATA_FORMAT}" * + /// AIP_VALIDATION_DATA_URI = + /// "gcs_destination/dataset---/validation-*.${AIP_DATA_FORMAT}" * + /// AIP_TEST_DATA_URI = "gcs_destination/dataset---/test-*.${AIP_DATA_FORMAT}" + GoogleCloudAiplatformV1GcsDestination? gcsDestination; + + /// Whether to persist the ML use assignment to data item system labels. + core.bool? persistMlUseAssignment; + + /// Supported only for tabular Datasets. /// - /// Optional. Required. + /// Split based on a predefined key. + GoogleCloudAiplatformV1PredefinedSplit? predefinedSplit; + + /// Only applicable to Datasets that have SavedQueries. /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Map? args; + /// The ID of a SavedQuery (annotation set) under the Dataset specified by + /// dataset_id used for filtering Annotations for training. Only Annotations + /// that are associated with this SavedQuery are used in respectively + /// training. When used in conjunction with annotations_filter, the + /// Annotations used for training are filtered by both saved_query_id and + /// annotations_filter. Only one of saved_query_id and annotation_schema_uri + /// should be specified as both of them represent the same thing: problem + /// type. + core.String? savedQueryId; - /// The name of the function to call. + /// Supported only for tabular Datasets. /// - /// Matches \[FunctionDeclaration.name\]. + /// Split based on the distribution of the specified column. + GoogleCloudAiplatformV1StratifiedSplit? stratifiedSplit; + + /// Supported only for tabular Datasets. /// - /// Required. - core.String? name; + /// Split based on the timestamp of the input data pieces. + GoogleCloudAiplatformV1TimestampSplit? timestampSplit; - GoogleCloudAiplatformV1FunctionCall({ - this.args, - this.name, + GoogleCloudAiplatformV1InputDataConfig({ + this.annotationSchemaUri, + this.annotationsFilter, + this.bigqueryDestination, + this.datasetId, + this.filterSplit, + this.fractionSplit, + this.gcsDestination, + this.persistMlUseAssignment, + this.predefinedSplit, + this.savedQueryId, + this.stratifiedSplit, + this.timestampSplit, }); - GoogleCloudAiplatformV1FunctionCall.fromJson(core.Map json_) + GoogleCloudAiplatformV1InputDataConfig.fromJson(core.Map json_) : this( - args: json_.containsKey('args') - ? json_['args'] as core.Map + annotationSchemaUri: json_['annotationSchemaUri'] as core.String?, + annotationsFilter: json_['annotationsFilter'] as core.String?, + bigqueryDestination: json_.containsKey('bigqueryDestination') + ? GoogleCloudAiplatformV1BigQueryDestination.fromJson( + json_['bigqueryDestination'] + as core.Map) + : null, + datasetId: json_['datasetId'] as core.String?, + filterSplit: json_.containsKey('filterSplit') + ? GoogleCloudAiplatformV1FilterSplit.fromJson( + json_['filterSplit'] as core.Map) + : null, + fractionSplit: json_.containsKey('fractionSplit') + ? GoogleCloudAiplatformV1FractionSplit.fromJson( + json_['fractionSplit'] as core.Map) + : null, + gcsDestination: json_.containsKey('gcsDestination') + ? GoogleCloudAiplatformV1GcsDestination.fromJson( + json_['gcsDestination'] + as core.Map) + : null, + persistMlUseAssignment: json_['persistMlUseAssignment'] as core.bool?, + predefinedSplit: json_.containsKey('predefinedSplit') + ? GoogleCloudAiplatformV1PredefinedSplit.fromJson( + json_['predefinedSplit'] + as core.Map) + : null, + savedQueryId: json_['savedQueryId'] as core.String?, + stratifiedSplit: json_.containsKey('stratifiedSplit') + ? GoogleCloudAiplatformV1StratifiedSplit.fromJson( + json_['stratifiedSplit'] + as core.Map) + : null, + timestampSplit: json_.containsKey('timestampSplit') + ? GoogleCloudAiplatformV1TimestampSplit.fromJson( + json_['timestampSplit'] + as core.Map) : null, - name: json_['name'] as core.String?, ); core.Map toJson() => { - if (args != null) 'args': args!, - if (name != null) 'name': name!, + if (annotationSchemaUri != null) + 'annotationSchemaUri': annotationSchemaUri!, + if (annotationsFilter != null) 'annotationsFilter': annotationsFilter!, + if (bigqueryDestination != null) + 'bigqueryDestination': bigqueryDestination!, + if (datasetId != null) 'datasetId': datasetId!, + if (filterSplit != null) 'filterSplit': filterSplit!, + if (fractionSplit != null) 'fractionSplit': fractionSplit!, + if (gcsDestination != null) 'gcsDestination': gcsDestination!, + if (persistMlUseAssignment != null) + 'persistMlUseAssignment': persistMlUseAssignment!, + if (predefinedSplit != null) 'predefinedSplit': predefinedSplit!, + if (savedQueryId != null) 'savedQueryId': savedQueryId!, + if (stratifiedSplit != null) 'stratifiedSplit': stratifiedSplit!, + if (timestampSplit != null) 'timestampSplit': timestampSplit!, }; } -/// Function calling config. -class GoogleCloudAiplatformV1FunctionCallingConfig { - /// Function names to call. - /// - /// Only set when the Mode is ANY. Function names should match - /// \[FunctionDeclaration.name\]. With mode set to ANY, model will predict a - /// function call from the set of function names provided. - /// - /// Optional. - core.List? allowedFunctionNames; - - /// Function calling mode. - /// - /// Optional. - /// Possible string values are: - /// - "MODE_UNSPECIFIED" : Unspecified function calling mode. This value - /// should not be used. - /// - "AUTO" : Default model behavior, model decides to predict either - /// function calls or natural language response. - /// - "ANY" : Model is constrained to always predicting function calls only. - /// If "allowed_function_names" are set, the predicted function calls will be - /// limited to any one of "allowed_function_names", else the predicted - /// function calls will be any one of the provided "function_declarations". - /// - "NONE" : Model will not predict any function calls. Model behavior is - /// same as when not passing any function declarations. - core.String? mode; +/// A list of int64 values. +class GoogleCloudAiplatformV1Int64Array { + /// A list of int64 values. + core.List? values; - GoogleCloudAiplatformV1FunctionCallingConfig({ - this.allowedFunctionNames, - this.mode, + GoogleCloudAiplatformV1Int64Array({ + this.values, }); - GoogleCloudAiplatformV1FunctionCallingConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1Int64Array.fromJson(core.Map json_) : this( - allowedFunctionNames: (json_['allowedFunctionNames'] as core.List?) + values: (json_['values'] as core.List?) ?.map((value) => value as core.String) .toList(), - mode: json_['mode'] as core.String?, ); core.Map toJson() => { - if (allowedFunctionNames != null) - 'allowedFunctionNames': allowedFunctionNames!, - if (mode != null) 'mode': mode!, + if (values != null) 'values': values!, }; } -/// Structured representation of a function declaration as defined by the -/// [OpenAPI 3.0 specification](https://spec.openapis.org/oas/v3.0.3). +/// An attribution method that computes the Aumann-Shapley value taking +/// advantage of the model's fully differentiable structure. /// -/// Included in this declaration are the function name and parameters. This -/// FunctionDeclaration is a representation of a block of code that can be used -/// as a `Tool` by the model and executed by the client. -class GoogleCloudAiplatformV1FunctionDeclaration { - /// Description and purpose of the function. - /// - /// Model uses it to decide how and whether to call the function. - /// - /// Optional. - core.String? description; - - /// The name of the function to call. - /// - /// Must start with a letter or an underscore. Must be a-z, A-Z, 0-9, or - /// contain underscores, dots and dashes, with a maximum length of 64. +/// Refer to this paper for more details: https://arxiv.org/abs/1703.01365 +class GoogleCloudAiplatformV1IntegratedGradientsAttribution { + /// Config for IG with blur baseline. /// - /// Required. - core.String? name; + /// When enabled, a linear path from the maximally blurred image to the input + /// image is created. Using a blurred baseline instead of zero (black image) + /// is motivated by the BlurIG approach explained here: + /// https://arxiv.org/abs/2004.03383 + GoogleCloudAiplatformV1BlurBaselineConfig? blurBaselineConfig; - /// Describes the parameters to this function in JSON Schema Object format. - /// - /// Reflects the Open API 3.03 Parameter Object. string Key: the name of the - /// parameter. Parameter names are case sensitive. Schema Value: the Schema - /// defining the type used for the parameter. For function with no parameters, - /// this can be left unset. Parameter names must start with a letter or an - /// underscore and must only contain chars a-z, A-Z, 0-9, or underscores with - /// a maximum length of 64. Example with 1 required and 1 optional parameter: - /// type: OBJECT properties: param1: type: STRING param2: type: INTEGER - /// required: - param1 + /// Config for SmoothGrad approximation of gradients. /// - /// Optional. - GoogleCloudAiplatformV1Schema? parameters; + /// When enabled, the gradients are approximated by averaging the gradients + /// from noisy samples in the vicinity of the inputs. Adding noise can help + /// improve the computed gradients. Refer to this paper for more details: + /// https://arxiv.org/pdf/1706.03825.pdf + GoogleCloudAiplatformV1SmoothGradConfig? smoothGradConfig; - /// Describes the output from this function in JSON Schema format. + /// The number of steps for approximating the path integral. /// - /// Reflects the Open API 3.03 Response Object. The Schema defines the type - /// used for the response value of the function. + /// A good value to start is 50 and gradually increase until the sum to diff + /// property is within the desired error range. Valid range of its value is + /// \[1, 100\], inclusively. /// - /// Optional. - GoogleCloudAiplatformV1Schema? response; + /// Required. + core.int? stepCount; - GoogleCloudAiplatformV1FunctionDeclaration({ - this.description, - this.name, - this.parameters, - this.response, + GoogleCloudAiplatformV1IntegratedGradientsAttribution({ + this.blurBaselineConfig, + this.smoothGradConfig, + this.stepCount, }); - GoogleCloudAiplatformV1FunctionDeclaration.fromJson(core.Map json_) + GoogleCloudAiplatformV1IntegratedGradientsAttribution.fromJson(core.Map json_) : this( - description: json_['description'] as core.String?, - name: json_['name'] as core.String?, - parameters: json_.containsKey('parameters') - ? GoogleCloudAiplatformV1Schema.fromJson( - json_['parameters'] as core.Map) + blurBaselineConfig: json_.containsKey('blurBaselineConfig') + ? GoogleCloudAiplatformV1BlurBaselineConfig.fromJson( + json_['blurBaselineConfig'] + as core.Map) : null, - response: json_.containsKey('response') - ? GoogleCloudAiplatformV1Schema.fromJson( - json_['response'] as core.Map) + smoothGradConfig: json_.containsKey('smoothGradConfig') + ? GoogleCloudAiplatformV1SmoothGradConfig.fromJson( + json_['smoothGradConfig'] + as core.Map) : null, + stepCount: json_['stepCount'] as core.int?, ); core.Map toJson() => { - if (description != null) 'description': description!, - if (name != null) 'name': name!, - if (parameters != null) 'parameters': parameters!, - if (response != null) 'response': response!, + if (blurBaselineConfig != null) + 'blurBaselineConfig': blurBaselineConfig!, + if (smoothGradConfig != null) 'smoothGradConfig': smoothGradConfig!, + if (stepCount != null) 'stepCount': stepCount!, }; } -/// The result output from a \[FunctionCall\] that contains a string -/// representing the \[FunctionDeclaration.name\] and a structured JSON object -/// containing any output from the function is used as context to the model. -/// -/// This should contain the result of a \[FunctionCall\] made based on model -/// prediction. -class GoogleCloudAiplatformV1FunctionResponse { - /// The name of the function to call. - /// - /// Matches \[FunctionDeclaration.name\] and \[FunctionCall.name\]. - /// - /// Required. - core.String? name; - - /// The function response in JSON object format. - /// - /// Use "output" key to specify function output and "error" key to specify - /// error details (if any). If "output" and "error" keys are not specified, - /// then whole "response" is treated as function output. +/// The Jira source for the ImportRagFilesRequest. +class GoogleCloudAiplatformV1JiraSource { + /// The Jira queries. /// /// Required. - /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Map? response; + core.List? jiraQueries; - GoogleCloudAiplatformV1FunctionResponse({ - this.name, - this.response, + GoogleCloudAiplatformV1JiraSource({ + this.jiraQueries, }); - GoogleCloudAiplatformV1FunctionResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1JiraSource.fromJson(core.Map json_) : this( - name: json_['name'] as core.String?, - response: json_.containsKey('response') - ? json_['response'] as core.Map - : null, + jiraQueries: (json_['jiraQueries'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1JiraSourceJiraQueries.fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (name != null) 'name': name!, - if (response != null) 'response': response!, + if (jiraQueries != null) 'jiraQueries': jiraQueries!, }; } -/// The Google Cloud Storage location where the output is to be written to. -class GoogleCloudAiplatformV1GcsDestination { - /// Google Cloud Storage URI to output directory. +/// JiraQueries contains the Jira queries and corresponding authentication. +class GoogleCloudAiplatformV1JiraSourceJiraQueries { + /// The SecretManager secret version resource name (e.g. + /// projects/{project}/secrets/{secret}/versions/{version}) storing the Jira + /// API key. /// - /// If the uri doesn't end with '/', a '/' will be automatically appended. The - /// directory is created if it doesn't exist. + /// See + /// [Manage API tokens for your Atlassian account](https://support.atlassian.com/atlassian-account/docs/manage-api-tokens-for-your-atlassian-account/). /// /// Required. - core.String? outputUriPrefix; + GoogleCloudAiplatformV1ApiAuthApiKeyConfig? apiKeyConfig; - GoogleCloudAiplatformV1GcsDestination({ - this.outputUriPrefix, - }); + /// A list of custom Jira queries to import. + /// + /// For information about JQL (Jira Query Language), see + /// https://support.atlassian.com/jira-service-management-cloud/docs/use-advanced-search-with-jira-query-language-jql/ + core.List? customQueries; - GoogleCloudAiplatformV1GcsDestination.fromJson(core.Map json_) - : this( - outputUriPrefix: json_['outputUriPrefix'] as core.String?, - ); + /// The Jira email address. + /// + /// Required. + core.String? email; - core.Map toJson() => { - if (outputUriPrefix != null) 'outputUriPrefix': outputUriPrefix!, - }; -} + /// A list of Jira projects to import in their entirety. + core.List? projects; -/// The Google Cloud Storage location for the input content. -class GoogleCloudAiplatformV1GcsSource { - /// Google Cloud Storage URI(-s) to the input file(s). - /// - /// May contain wildcards. For more information on wildcards, see - /// https://cloud.google.com/storage/docs/gsutil/addlhelp/WildcardNames. + /// The Jira server URI. /// /// Required. - core.List? uris; + core.String? serverUri; - GoogleCloudAiplatformV1GcsSource({ - this.uris, + GoogleCloudAiplatformV1JiraSourceJiraQueries({ + this.apiKeyConfig, + this.customQueries, + this.email, + this.projects, + this.serverUri, }); - GoogleCloudAiplatformV1GcsSource.fromJson(core.Map json_) + GoogleCloudAiplatformV1JiraSourceJiraQueries.fromJson(core.Map json_) : this( - uris: (json_['uris'] as core.List?) + apiKeyConfig: json_.containsKey('apiKeyConfig') + ? GoogleCloudAiplatformV1ApiAuthApiKeyConfig.fromJson( + json_['apiKeyConfig'] as core.Map) + : null, + customQueries: (json_['customQueries'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + email: json_['email'] as core.String?, + projects: (json_['projects'] as core.List?) ?.map((value) => value as core.String) .toList(), + serverUri: json_['serverUri'] as core.String?, ); core.Map toJson() => { - if (uris != null) 'uris': uris!, + if (apiKeyConfig != null) 'apiKeyConfig': apiKeyConfig!, + if (customQueries != null) 'customQueries': customQueries!, + if (email != null) 'email': email!, + if (projects != null) 'projects': projects!, + if (serverUri != null) 'serverUri': serverUri!, }; } -/// Request message for \[PredictionService.GenerateContent\]. -class GoogleCloudAiplatformV1GenerateContentRequest { - /// The content of the current conversation with the model. +/// Contains information about the Large Model. +class GoogleCloudAiplatformV1LargeModelReference { + /// The unique name of the large Foundation or pre-built model. /// - /// For single-turn queries, this is a single instance. For multi-turn - /// queries, this is a repeated field that contains conversation history + - /// latest request. + /// Like "chat-bison", "text-bison". Or model name with version ID, like + /// "chat-bison@001", "text-bison@005", etc. /// /// Required. - core.List? contents; + core.String? name; - /// Generation config. - /// - /// Optional. - GoogleCloudAiplatformV1GenerationConfig? generationConfig; + GoogleCloudAiplatformV1LargeModelReference({ + this.name, + }); - /// The labels with user-defined metadata for the request. - /// - /// It is used for billing and reporting only. Label keys and values can be no - /// longer than 63 characters (Unicode codepoints) and can only contain - /// lowercase letters, numeric characters, underscores, and dashes. - /// International characters are allowed. Label values are optional. Label - /// keys must start with a letter. - /// - /// Optional. - core.Map? labels; + GoogleCloudAiplatformV1LargeModelReference.fromJson(core.Map json_) + : this( + name: json_['name'] as core.String?, + ); - /// Per request settings for blocking unsafe content. - /// - /// Enforced on GenerateContentResponse.candidates. - /// - /// Optional. - core.List? safetySettings; + core.Map toJson() => { + if (name != null) 'name': name!, + }; +} - /// The user provided system instructions for the model. - /// - /// Note: only text should be used in parts and content in each part will be - /// in a separate paragraph. - /// - /// Optional. - GoogleCloudAiplatformV1Content? systemInstruction; +/// A subgraph of the overall lineage graph. +/// +/// Event edges connect Artifact and Execution nodes. +class GoogleCloudAiplatformV1LineageSubgraph { + /// The Artifact nodes in the subgraph. + core.List? artifacts; - /// Tool config. - /// - /// This config is shared for all tools provided in the request. - /// - /// Optional. - GoogleCloudAiplatformV1ToolConfig? toolConfig; + /// The Event edges between Artifacts and Executions in the subgraph. + core.List? events; - /// A list of `Tools` the model may use to generate the next response. - /// - /// A `Tool` is a piece of code that enables the system to interact with - /// external systems to perform an action, or set of actions, outside of - /// knowledge and scope of the model. - /// - /// Optional. - core.List? tools; + /// The Execution nodes in the subgraph. + core.List? executions; - GoogleCloudAiplatformV1GenerateContentRequest({ - this.contents, - this.generationConfig, - this.labels, - this.safetySettings, - this.systemInstruction, - this.toolConfig, - this.tools, + GoogleCloudAiplatformV1LineageSubgraph({ + this.artifacts, + this.events, + this.executions, }); - GoogleCloudAiplatformV1GenerateContentRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1LineageSubgraph.fromJson(core.Map json_) : this( - contents: (json_['contents'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Content.fromJson( + artifacts: (json_['artifacts'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Artifact.fromJson( value as core.Map)) .toList(), - generationConfig: json_.containsKey('generationConfig') - ? GoogleCloudAiplatformV1GenerationConfig.fromJson( - json_['generationConfig'] - as core.Map) - : null, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - safetySettings: (json_['safetySettings'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1SafetySetting.fromJson( + events: (json_['events'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Event.fromJson( value as core.Map)) .toList(), - systemInstruction: json_.containsKey('systemInstruction') - ? GoogleCloudAiplatformV1Content.fromJson( - json_['systemInstruction'] - as core.Map) - : null, - toolConfig: json_.containsKey('toolConfig') - ? GoogleCloudAiplatformV1ToolConfig.fromJson( - json_['toolConfig'] as core.Map) - : null, - tools: (json_['tools'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Tool.fromJson( + executions: (json_['executions'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Execution.fromJson( value as core.Map)) .toList(), ); core.Map toJson() => { - if (contents != null) 'contents': contents!, - if (generationConfig != null) 'generationConfig': generationConfig!, - if (labels != null) 'labels': labels!, - if (safetySettings != null) 'safetySettings': safetySettings!, - if (systemInstruction != null) 'systemInstruction': systemInstruction!, - if (toolConfig != null) 'toolConfig': toolConfig!, - if (tools != null) 'tools': tools!, + if (artifacts != null) 'artifacts': artifacts!, + if (events != null) 'events': events!, + if (executions != null) 'executions': executions!, }; } -/// Response message for \[PredictionService.GenerateContent\]. -class GoogleCloudAiplatformV1GenerateContentResponse { - /// Generated candidates. - /// - /// Output only. - core.List? candidates; - - /// The model version used to generate the response. - /// - /// Output only. - core.String? modelVersion; - - /// Content filter results for a prompt sent in the request. - /// - /// Note: Sent only in the first stream chunk. Only happens when no candidates - /// were generated due to content violations. - /// - /// Output only. - GoogleCloudAiplatformV1GenerateContentResponsePromptFeedback? promptFeedback; +/// Response message for DatasetService.ListAnnotations. +class GoogleCloudAiplatformV1ListAnnotationsResponse { + /// A list of Annotations that matches the specified filter in the request. + core.List? annotations; - /// Usage metadata about the response(s). - GoogleCloudAiplatformV1GenerateContentResponseUsageMetadata? usageMetadata; + /// The standard List next-page token. + core.String? nextPageToken; - GoogleCloudAiplatformV1GenerateContentResponse({ - this.candidates, - this.modelVersion, - this.promptFeedback, - this.usageMetadata, + GoogleCloudAiplatformV1ListAnnotationsResponse({ + this.annotations, + this.nextPageToken, }); - GoogleCloudAiplatformV1GenerateContentResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListAnnotationsResponse.fromJson(core.Map json_) : this( - candidates: (json_['candidates'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Candidate.fromJson( + annotations: (json_['annotations'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Annotation.fromJson( value as core.Map)) .toList(), - modelVersion: json_['modelVersion'] as core.String?, - promptFeedback: json_.containsKey('promptFeedback') - ? GoogleCloudAiplatformV1GenerateContentResponsePromptFeedback - .fromJson(json_['promptFeedback'] - as core.Map) - : null, - usageMetadata: json_.containsKey('usageMetadata') - ? GoogleCloudAiplatformV1GenerateContentResponseUsageMetadata - .fromJson(json_['usageMetadata'] - as core.Map) - : null, + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (candidates != null) 'candidates': candidates!, - if (modelVersion != null) 'modelVersion': modelVersion!, - if (promptFeedback != null) 'promptFeedback': promptFeedback!, - if (usageMetadata != null) 'usageMetadata': usageMetadata!, + if (annotations != null) 'annotations': annotations!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// Content filter results for a prompt sent in the request. -class GoogleCloudAiplatformV1GenerateContentResponsePromptFeedback { - /// Blocked reason. - /// - /// Output only. - /// Possible string values are: - /// - "BLOCKED_REASON_UNSPECIFIED" : Unspecified blocked reason. - /// - "SAFETY" : Candidates blocked due to safety. - /// - "OTHER" : Candidates blocked due to other reason. - /// - "BLOCKLIST" : Candidates blocked due to the terms which are included - /// from the terminology blocklist. - /// - "PROHIBITED_CONTENT" : Candidates blocked due to prohibited content. - core.String? blockReason; - - /// A readable block reason message. - /// - /// Output only. - core.String? blockReasonMessage; +/// Response message for MetadataService.ListArtifacts. +class GoogleCloudAiplatformV1ListArtifactsResponse { + /// The Artifacts retrieved from the MetadataStore. + core.List? artifacts; - /// Safety ratings. + /// A token, which can be sent as ListArtifactsRequest.page_token to retrieve + /// the next page. /// - /// Output only. - core.List? safetyRatings; + /// If this field is not populated, there are no subsequent pages. + core.String? nextPageToken; - GoogleCloudAiplatformV1GenerateContentResponsePromptFeedback({ - this.blockReason, - this.blockReasonMessage, - this.safetyRatings, + GoogleCloudAiplatformV1ListArtifactsResponse({ + this.artifacts, + this.nextPageToken, }); - GoogleCloudAiplatformV1GenerateContentResponsePromptFeedback.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ListArtifactsResponse.fromJson(core.Map json_) : this( - blockReason: json_['blockReason'] as core.String?, - blockReasonMessage: json_['blockReasonMessage'] as core.String?, - safetyRatings: (json_['safetyRatings'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1SafetyRating.fromJson( + artifacts: (json_['artifacts'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Artifact.fromJson( value as core.Map)) .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (blockReason != null) 'blockReason': blockReason!, - if (blockReasonMessage != null) - 'blockReasonMessage': blockReasonMessage!, - if (safetyRatings != null) 'safetyRatings': safetyRatings!, + if (artifacts != null) 'artifacts': artifacts!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// Usage metadata about response(s). -class GoogleCloudAiplatformV1GenerateContentResponseUsageMetadata { - /// Number of tokens in the response(s). - core.int? candidatesTokenCount; +/// Response message for JobService.ListBatchPredictionJobs +class GoogleCloudAiplatformV1ListBatchPredictionJobsResponse { + /// List of BatchPredictionJobs in the requested page. + core.List? batchPredictionJobs; - /// Number of tokens in the request. + /// A token to retrieve the next page of results. /// - /// When `cached_content` is set, this is still the total effective prompt - /// size meaning this includes the number of tokens in the cached content. - core.int? promptTokenCount; - - /// Total token count for prompt and response candidates. - core.int? totalTokenCount; + /// Pass to ListBatchPredictionJobsRequest.page_token to obtain that page. + core.String? nextPageToken; - GoogleCloudAiplatformV1GenerateContentResponseUsageMetadata({ - this.candidatesTokenCount, - this.promptTokenCount, - this.totalTokenCount, + GoogleCloudAiplatformV1ListBatchPredictionJobsResponse({ + this.batchPredictionJobs, + this.nextPageToken, }); - GoogleCloudAiplatformV1GenerateContentResponseUsageMetadata.fromJson( + GoogleCloudAiplatformV1ListBatchPredictionJobsResponse.fromJson( core.Map json_) : this( - candidatesTokenCount: json_['candidatesTokenCount'] as core.int?, - promptTokenCount: json_['promptTokenCount'] as core.int?, - totalTokenCount: json_['totalTokenCount'] as core.int?, - ); - - core.Map toJson() => { - if (candidatesTokenCount != null) - 'candidatesTokenCount': candidatesTokenCount!, - if (promptTokenCount != null) 'promptTokenCount': promptTokenCount!, - if (totalTokenCount != null) 'totalTokenCount': totalTokenCount!, - }; -} - -/// Generation config. -class GoogleCloudAiplatformV1GenerationConfig { - /// If enabled, audio timestamp will be included in the request to the model. - /// - /// Optional. - core.bool? audioTimestamp; - - /// Number of candidates to generate. - /// - /// Optional. - core.int? candidateCount; - - /// Frequency penalties. - /// - /// Optional. - core.double? frequencyPenalty; + batchPredictionJobs: (json_['batchPredictionJobs'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1BatchPredictionJob.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + ); - /// Logit probabilities. - /// - /// Optional. - core.int? logprobs; + core.Map toJson() => { + if (batchPredictionJobs != null) + 'batchPredictionJobs': batchPredictionJobs!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + }; +} - /// The maximum number of output tokens to generate per message. - /// - /// Optional. - core.int? maxOutputTokens; +/// Response with a list of CachedContents. +class GoogleCloudAiplatformV1ListCachedContentsResponse { + /// List of cached contents. + core.List? cachedContents; - /// Positive penalties. + /// A token, which can be sent as `page_token` to retrieve the next page. /// - /// Optional. - core.double? presencePenalty; + /// If this field is omitted, there are no subsequent pages. + core.String? nextPageToken; - /// If true, export the logprobs results in response. - /// - /// Optional. - core.bool? responseLogprobs; + GoogleCloudAiplatformV1ListCachedContentsResponse({ + this.cachedContents, + this.nextPageToken, + }); - /// Output response mimetype of the generated candidate text. - /// - /// Supported mimetype: - `text/plain`: (default) Text output. - - /// `application/json`: JSON response in the candidates. The model needs to be - /// prompted to output the appropriate response type, otherwise the behavior - /// is undefined. This is a preview feature. - /// - /// Optional. - core.String? responseMimeType; + GoogleCloudAiplatformV1ListCachedContentsResponse.fromJson(core.Map json_) + : this( + cachedContents: (json_['cachedContents'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1CachedContent.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + ); - /// The `Schema` object allows the definition of input and output data types. - /// - /// These types can be objects, but also primitives and arrays. Represents a - /// select subset of an - /// [OpenAPI 3.0 schema object](https://spec.openapis.org/oas/v3.0.3#schema). - /// If set, a compatible response_mime_type must also be set. Compatible - /// mimetypes: `application/json`: Schema for JSON response. - /// - /// Optional. - GoogleCloudAiplatformV1Schema? responseSchema; + core.Map toJson() => { + if (cachedContents != null) 'cachedContents': cachedContents!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + }; +} - /// Routing configuration. - /// - /// Optional. - GoogleCloudAiplatformV1GenerationConfigRoutingConfig? routingConfig; +/// Response message for MetadataService.ListContexts. +class GoogleCloudAiplatformV1ListContextsResponse { + /// The Contexts retrieved from the MetadataStore. + core.List? contexts; - /// Seed. + /// A token, which can be sent as ListContextsRequest.page_token to retrieve + /// the next page. /// - /// Optional. - core.int? seed; + /// If this field is not populated, there are no subsequent pages. + core.String? nextPageToken; - /// Stop sequences. - /// - /// Optional. - core.List? stopSequences; + GoogleCloudAiplatformV1ListContextsResponse({ + this.contexts, + this.nextPageToken, + }); - /// Controls the randomness of predictions. - /// - /// Optional. - core.double? temperature; + GoogleCloudAiplatformV1ListContextsResponse.fromJson(core.Map json_) + : this( + contexts: (json_['contexts'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Context.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + ); - /// If specified, top-k sampling will be used. - /// - /// Optional. - core.double? topK; + core.Map toJson() => { + if (contexts != null) 'contexts': contexts!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + }; +} - /// If specified, nucleus sampling will be used. +/// Response message for JobService.ListCustomJobs +class GoogleCloudAiplatformV1ListCustomJobsResponse { + /// List of CustomJobs in the requested page. + core.List? customJobs; + + /// A token to retrieve the next page of results. /// - /// Optional. - core.double? topP; + /// Pass to ListCustomJobsRequest.page_token to obtain that page. + core.String? nextPageToken; - GoogleCloudAiplatformV1GenerationConfig({ - this.audioTimestamp, - this.candidateCount, - this.frequencyPenalty, - this.logprobs, - this.maxOutputTokens, - this.presencePenalty, - this.responseLogprobs, - this.responseMimeType, - this.responseSchema, - this.routingConfig, - this.seed, - this.stopSequences, - this.temperature, - this.topK, - this.topP, + GoogleCloudAiplatformV1ListCustomJobsResponse({ + this.customJobs, + this.nextPageToken, }); - GoogleCloudAiplatformV1GenerationConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListCustomJobsResponse.fromJson(core.Map json_) : this( - audioTimestamp: json_['audioTimestamp'] as core.bool?, - candidateCount: json_['candidateCount'] as core.int?, - frequencyPenalty: - (json_['frequencyPenalty'] as core.num?)?.toDouble(), - logprobs: json_['logprobs'] as core.int?, - maxOutputTokens: json_['maxOutputTokens'] as core.int?, - presencePenalty: (json_['presencePenalty'] as core.num?)?.toDouble(), - responseLogprobs: json_['responseLogprobs'] as core.bool?, - responseMimeType: json_['responseMimeType'] as core.String?, - responseSchema: json_.containsKey('responseSchema') - ? GoogleCloudAiplatformV1Schema.fromJson(json_['responseSchema'] - as core.Map) - : null, - routingConfig: json_.containsKey('routingConfig') - ? GoogleCloudAiplatformV1GenerationConfigRoutingConfig.fromJson( - json_['routingConfig'] as core.Map) - : null, - seed: json_['seed'] as core.int?, - stopSequences: (json_['stopSequences'] as core.List?) - ?.map((value) => value as core.String) + customJobs: (json_['customJobs'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1CustomJob.fromJson( + value as core.Map)) .toList(), - temperature: (json_['temperature'] as core.num?)?.toDouble(), - topK: (json_['topK'] as core.num?)?.toDouble(), - topP: (json_['topP'] as core.num?)?.toDouble(), + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (audioTimestamp != null) 'audioTimestamp': audioTimestamp!, - if (candidateCount != null) 'candidateCount': candidateCount!, - if (frequencyPenalty != null) 'frequencyPenalty': frequencyPenalty!, - if (logprobs != null) 'logprobs': logprobs!, - if (maxOutputTokens != null) 'maxOutputTokens': maxOutputTokens!, - if (presencePenalty != null) 'presencePenalty': presencePenalty!, - if (responseLogprobs != null) 'responseLogprobs': responseLogprobs!, - if (responseMimeType != null) 'responseMimeType': responseMimeType!, - if (responseSchema != null) 'responseSchema': responseSchema!, - if (routingConfig != null) 'routingConfig': routingConfig!, - if (seed != null) 'seed': seed!, - if (stopSequences != null) 'stopSequences': stopSequences!, - if (temperature != null) 'temperature': temperature!, - if (topK != null) 'topK': topK!, - if (topP != null) 'topP': topP!, + if (customJobs != null) 'customJobs': customJobs!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// The configuration for routing the request to a specific model. -class GoogleCloudAiplatformV1GenerationConfigRoutingConfig { - /// Automated routing. - GoogleCloudAiplatformV1GenerationConfigRoutingConfigAutoRoutingMode? autoMode; +/// Response message for DatasetService.ListDataItems. +class GoogleCloudAiplatformV1ListDataItemsResponse { + /// A list of DataItems that matches the specified filter in the request. + core.List? dataItems; - /// Manual routing. - GoogleCloudAiplatformV1GenerationConfigRoutingConfigManualRoutingMode? - manualMode; + /// The standard List next-page token. + core.String? nextPageToken; - GoogleCloudAiplatformV1GenerationConfigRoutingConfig({ - this.autoMode, - this.manualMode, + GoogleCloudAiplatformV1ListDataItemsResponse({ + this.dataItems, + this.nextPageToken, }); - GoogleCloudAiplatformV1GenerationConfigRoutingConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListDataItemsResponse.fromJson(core.Map json_) : this( - autoMode: json_.containsKey('autoMode') - ? GoogleCloudAiplatformV1GenerationConfigRoutingConfigAutoRoutingMode - .fromJson( - json_['autoMode'] as core.Map) - : null, - manualMode: json_.containsKey('manualMode') - ? GoogleCloudAiplatformV1GenerationConfigRoutingConfigManualRoutingMode - .fromJson(json_['manualMode'] - as core.Map) - : null, + dataItems: (json_['dataItems'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1DataItem.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (autoMode != null) 'autoMode': autoMode!, - if (manualMode != null) 'manualMode': manualMode!, + if (dataItems != null) 'dataItems': dataItems!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// When automated routing is specified, the routing will be determined by the -/// pretrained routing model and customer provided model routing preference. -class GoogleCloudAiplatformV1GenerationConfigRoutingConfigAutoRoutingMode { - /// The model routing preference. - /// Possible string values are: - /// - "UNKNOWN" : Unspecified model routing preference. - /// - "PRIORITIZE_QUALITY" : Prefer higher quality over low cost. - /// - "BALANCED" : Balanced model routing preference. - /// - "PRIORITIZE_COST" : Prefer lower cost over higher quality. - core.String? modelRoutingPreference; +/// Response message for JobService.ListDataLabelingJobs. +class GoogleCloudAiplatformV1ListDataLabelingJobsResponse { + /// A list of DataLabelingJobs that matches the specified filter in the + /// request. + core.List? dataLabelingJobs; - GoogleCloudAiplatformV1GenerationConfigRoutingConfigAutoRoutingMode({ - this.modelRoutingPreference, + /// The standard List next-page token. + core.String? nextPageToken; + + GoogleCloudAiplatformV1ListDataLabelingJobsResponse({ + this.dataLabelingJobs, + this.nextPageToken, }); - GoogleCloudAiplatformV1GenerationConfigRoutingConfigAutoRoutingMode.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ListDataLabelingJobsResponse.fromJson(core.Map json_) : this( - modelRoutingPreference: - json_['modelRoutingPreference'] as core.String?, + dataLabelingJobs: (json_['dataLabelingJobs'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1DataLabelingJob.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (modelRoutingPreference != null) - 'modelRoutingPreference': modelRoutingPreference!, + if (dataLabelingJobs != null) 'dataLabelingJobs': dataLabelingJobs!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// When manual routing is set, the specified model will be used directly. -class GoogleCloudAiplatformV1GenerationConfigRoutingConfigManualRoutingMode { - /// The model name to use. - /// - /// Only the public LLM models are accepted. e.g. 'gemini-1.5-pro-001'. - core.String? modelName; +/// Response message for DatasetService.ListDatasetVersions. +class GoogleCloudAiplatformV1ListDatasetVersionsResponse { + /// A list of DatasetVersions that matches the specified filter in the + /// request. + core.List? datasetVersions; - GoogleCloudAiplatformV1GenerationConfigRoutingConfigManualRoutingMode({ - this.modelName, + /// The standard List next-page token. + core.String? nextPageToken; + + GoogleCloudAiplatformV1ListDatasetVersionsResponse({ + this.datasetVersions, + this.nextPageToken, }); - GoogleCloudAiplatformV1GenerationConfigRoutingConfigManualRoutingMode.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ListDatasetVersionsResponse.fromJson(core.Map json_) : this( - modelName: json_['modelName'] as core.String?, + datasetVersions: (json_['datasetVersions'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1DatasetVersion.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (modelName != null) 'modelName': modelName!, + if (datasetVersions != null) 'datasetVersions': datasetVersions!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// Contains information about the source of the models generated from -/// Generative AI Studio. -class GoogleCloudAiplatformV1GenieSource { - /// The public base model URI. - /// - /// Required. - core.String? baseModelUri; +/// Response message for DatasetService.ListDatasets. +class GoogleCloudAiplatformV1ListDatasetsResponse { + /// A list of Datasets that matches the specified filter in the request. + core.List? datasets; - GoogleCloudAiplatformV1GenieSource({ - this.baseModelUri, + /// The standard List next-page token. + core.String? nextPageToken; + + GoogleCloudAiplatformV1ListDatasetsResponse({ + this.datasets, + this.nextPageToken, }); - GoogleCloudAiplatformV1GenieSource.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListDatasetsResponse.fromJson(core.Map json_) : this( - baseModelUri: json_['baseModelUri'] as core.String?, + datasets: (json_['datasets'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Dataset.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (baseModelUri != null) 'baseModelUri': baseModelUri!, + if (datasets != null) 'datasets': datasets!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// Tool to retrieve public web data for grounding, powered by Google. -class GoogleCloudAiplatformV1GoogleSearchRetrieval { - /// Specifies the dynamic retrieval configuration for the given source. - GoogleCloudAiplatformV1DynamicRetrievalConfig? dynamicRetrievalConfig; +/// Response message for ListDeploymentResourcePools method. +class GoogleCloudAiplatformV1ListDeploymentResourcePoolsResponse { + /// The DeploymentResourcePools from the specified location. + core.List? + deploymentResourcePools; - GoogleCloudAiplatformV1GoogleSearchRetrieval({ - this.dynamicRetrievalConfig, + /// A token, which can be sent as `page_token` to retrieve the next page. + /// + /// If this field is omitted, there are no subsequent pages. + core.String? nextPageToken; + + GoogleCloudAiplatformV1ListDeploymentResourcePoolsResponse({ + this.deploymentResourcePools, + this.nextPageToken, }); - GoogleCloudAiplatformV1GoogleSearchRetrieval.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListDeploymentResourcePoolsResponse.fromJson( + core.Map json_) : this( - dynamicRetrievalConfig: json_.containsKey('dynamicRetrievalConfig') - ? GoogleCloudAiplatformV1DynamicRetrievalConfig.fromJson( - json_['dynamicRetrievalConfig'] - as core.Map) - : null, + deploymentResourcePools: + (json_['deploymentResourcePools'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1DeploymentResourcePool.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (dynamicRetrievalConfig != null) - 'dynamicRetrievalConfig': dynamicRetrievalConfig!, + if (deploymentResourcePools != null) + 'deploymentResourcePools': deploymentResourcePools!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// Input for groundedness metric. -class GoogleCloudAiplatformV1GroundednessInput { - /// Groundedness instance. - /// - /// Required. - GoogleCloudAiplatformV1GroundednessInstance? instance; +/// Response message for EndpointService.ListEndpoints. +class GoogleCloudAiplatformV1ListEndpointsResponse { + /// List of Endpoints in the requested page. + core.List? endpoints; - /// Spec for groundedness metric. + /// A token to retrieve the next page of results. /// - /// Required. - GoogleCloudAiplatformV1GroundednessSpec? metricSpec; + /// Pass to ListEndpointsRequest.page_token to obtain that page. + core.String? nextPageToken; - GoogleCloudAiplatformV1GroundednessInput({ - this.instance, - this.metricSpec, + GoogleCloudAiplatformV1ListEndpointsResponse({ + this.endpoints, + this.nextPageToken, }); - GoogleCloudAiplatformV1GroundednessInput.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListEndpointsResponse.fromJson(core.Map json_) : this( - instance: json_.containsKey('instance') - ? GoogleCloudAiplatformV1GroundednessInstance.fromJson( - json_['instance'] as core.Map) - : null, - metricSpec: json_.containsKey('metricSpec') - ? GoogleCloudAiplatformV1GroundednessSpec.fromJson( - json_['metricSpec'] as core.Map) - : null, + endpoints: (json_['endpoints'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Endpoint.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (instance != null) 'instance': instance!, - if (metricSpec != null) 'metricSpec': metricSpec!, + if (endpoints != null) 'endpoints': endpoints!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// Spec for groundedness instance. -class GoogleCloudAiplatformV1GroundednessInstance { - /// Background information provided in context used to compare against the - /// prediction. - /// - /// Required. - core.String? context; +/// Response message for FeaturestoreService.ListEntityTypes. +class GoogleCloudAiplatformV1ListEntityTypesResponse { + /// The EntityTypes matching the request. + core.List? entityTypes; - /// Output of the evaluated model. + /// A token, which can be sent as ListEntityTypesRequest.page_token to + /// retrieve the next page. /// - /// Required. - core.String? prediction; + /// If this field is omitted, there are no subsequent pages. + core.String? nextPageToken; - GoogleCloudAiplatformV1GroundednessInstance({ - this.context, - this.prediction, + GoogleCloudAiplatformV1ListEntityTypesResponse({ + this.entityTypes, + this.nextPageToken, }); - GoogleCloudAiplatformV1GroundednessInstance.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListEntityTypesResponse.fromJson(core.Map json_) : this( - context: json_['context'] as core.String?, - prediction: json_['prediction'] as core.String?, + entityTypes: (json_['entityTypes'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1EntityType.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (context != null) 'context': context!, - if (prediction != null) 'prediction': prediction!, + if (entityTypes != null) 'entityTypes': entityTypes!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// Spec for groundedness result. -class GoogleCloudAiplatformV1GroundednessResult { - /// Confidence for groundedness score. - /// - /// Output only. - core.double? confidence; - - /// Explanation for groundedness score. - /// - /// Output only. - core.String? explanation; +/// Response message for MetadataService.ListExecutions. +class GoogleCloudAiplatformV1ListExecutionsResponse { + /// The Executions retrieved from the MetadataStore. + core.List? executions; - /// Groundedness score. + /// A token, which can be sent as ListExecutionsRequest.page_token to retrieve + /// the next page. /// - /// Output only. - core.double? score; + /// If this field is not populated, there are no subsequent pages. + core.String? nextPageToken; - GoogleCloudAiplatformV1GroundednessResult({ - this.confidence, - this.explanation, - this.score, + GoogleCloudAiplatformV1ListExecutionsResponse({ + this.executions, + this.nextPageToken, }); - GoogleCloudAiplatformV1GroundednessResult.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListExecutionsResponse.fromJson(core.Map json_) : this( - confidence: (json_['confidence'] as core.num?)?.toDouble(), - explanation: json_['explanation'] as core.String?, - score: (json_['score'] as core.num?)?.toDouble(), + executions: (json_['executions'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Execution.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (confidence != null) 'confidence': confidence!, - if (explanation != null) 'explanation': explanation!, - if (score != null) 'score': score!, + if (executions != null) 'executions': executions!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// Spec for groundedness metric. -typedef GoogleCloudAiplatformV1GroundednessSpec = $Spec; - -/// Grounding chunk. -class GoogleCloudAiplatformV1GroundingChunk { - /// Grounding chunk from context retrieved by the retrieval tools. - GoogleCloudAiplatformV1GroundingChunkRetrievedContext? retrievedContext; +/// Response message for FeatureRegistryService.ListFeatureGroups. +class GoogleCloudAiplatformV1ListFeatureGroupsResponse { + /// The FeatureGroups matching the request. + core.List? featureGroups; - /// Grounding chunk from the web. - GoogleCloudAiplatformV1GroundingChunkWeb? web; + /// A token, which can be sent as ListFeatureGroupsRequest.page_token to + /// retrieve the next page. + /// + /// If this field is omitted, there are no subsequent pages. + core.String? nextPageToken; - GoogleCloudAiplatformV1GroundingChunk({ - this.retrievedContext, - this.web, + GoogleCloudAiplatformV1ListFeatureGroupsResponse({ + this.featureGroups, + this.nextPageToken, }); - GoogleCloudAiplatformV1GroundingChunk.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListFeatureGroupsResponse.fromJson(core.Map json_) : this( - retrievedContext: json_.containsKey('retrievedContext') - ? GoogleCloudAiplatformV1GroundingChunkRetrievedContext.fromJson( - json_['retrievedContext'] - as core.Map) - : null, - web: json_.containsKey('web') - ? GoogleCloudAiplatformV1GroundingChunkWeb.fromJson( - json_['web'] as core.Map) - : null, + featureGroups: (json_['featureGroups'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1FeatureGroup.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (retrievedContext != null) 'retrievedContext': retrievedContext!, - if (web != null) 'web': web!, + if (featureGroups != null) 'featureGroups': featureGroups!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// Chunk from context retrieved by the retrieval tools. -class GoogleCloudAiplatformV1GroundingChunkRetrievedContext { - /// Title of the attribution. - core.String? title; +/// Response message for FeatureOnlineStoreAdminService.ListFeatureOnlineStores. +class GoogleCloudAiplatformV1ListFeatureOnlineStoresResponse { + /// The FeatureOnlineStores matching the request. + core.List? featureOnlineStores; - /// URI reference of the attribution. - core.String? uri; + /// A token, which can be sent as ListFeatureOnlineStoresRequest.page_token to + /// retrieve the next page. + /// + /// If this field is omitted, there are no subsequent pages. + core.String? nextPageToken; - GoogleCloudAiplatformV1GroundingChunkRetrievedContext({ - this.title, - this.uri, + GoogleCloudAiplatformV1ListFeatureOnlineStoresResponse({ + this.featureOnlineStores, + this.nextPageToken, }); - GoogleCloudAiplatformV1GroundingChunkRetrievedContext.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListFeatureOnlineStoresResponse.fromJson( + core.Map json_) : this( - title: json_['title'] as core.String?, - uri: json_['uri'] as core.String?, + featureOnlineStores: (json_['featureOnlineStores'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1FeatureOnlineStore.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (title != null) 'title': title!, - if (uri != null) 'uri': uri!, + if (featureOnlineStores != null) + 'featureOnlineStores': featureOnlineStores!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// Chunk from the web. -class GoogleCloudAiplatformV1GroundingChunkWeb { - /// Title of the chunk. - core.String? title; +/// Response message for FeatureOnlineStoreAdminService.ListFeatureViewSyncs. +class GoogleCloudAiplatformV1ListFeatureViewSyncsResponse { + /// The FeatureViewSyncs matching the request. + core.List? featureViewSyncs; - /// URI reference of the chunk. - core.String? uri; + /// A token, which can be sent as ListFeatureViewSyncsRequest.page_token to + /// retrieve the next page. + /// + /// If this field is omitted, there are no subsequent pages. + core.String? nextPageToken; - GoogleCloudAiplatformV1GroundingChunkWeb({ - this.title, - this.uri, + GoogleCloudAiplatformV1ListFeatureViewSyncsResponse({ + this.featureViewSyncs, + this.nextPageToken, }); - GoogleCloudAiplatformV1GroundingChunkWeb.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListFeatureViewSyncsResponse.fromJson(core.Map json_) : this( - title: json_['title'] as core.String?, - uri: json_['uri'] as core.String?, + featureViewSyncs: (json_['featureViewSyncs'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1FeatureViewSync.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (title != null) 'title': title!, - if (uri != null) 'uri': uri!, + if (featureViewSyncs != null) 'featureViewSyncs': featureViewSyncs!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// Metadata returned to client when grounding is enabled. -class GoogleCloudAiplatformV1GroundingMetadata { - /// List of supporting references retrieved from specified grounding source. - core.List? groundingChunks; +/// Response message for FeatureOnlineStoreAdminService.ListFeatureViews. +class GoogleCloudAiplatformV1ListFeatureViewsResponse { + /// The FeatureViews matching the request. + core.List? featureViews; - /// List of grounding support. + /// A token, which can be sent as ListFeatureViewsRequest.page_token to + /// retrieve the next page. /// - /// Optional. - core.List? groundingSupports; + /// If this field is omitted, there are no subsequent pages. + core.String? nextPageToken; - /// Retrieval metadata. - /// - /// Optional. Output only. - GoogleCloudAiplatformV1RetrievalMetadata? retrievalMetadata; + GoogleCloudAiplatformV1ListFeatureViewsResponse({ + this.featureViews, + this.nextPageToken, + }); - /// Google search entry for the following-up web searches. - /// - /// Optional. - GoogleCloudAiplatformV1SearchEntryPoint? searchEntryPoint; + GoogleCloudAiplatformV1ListFeatureViewsResponse.fromJson(core.Map json_) + : this( + featureViews: (json_['featureViews'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1FeatureView.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + ); - /// Web search queries for the following-up web search. + core.Map toJson() => { + if (featureViews != null) 'featureViews': featureViews!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + }; +} + +/// Response message for FeaturestoreService.ListFeatures. +/// +/// Response message for FeatureRegistryService.ListFeatures. +class GoogleCloudAiplatformV1ListFeaturesResponse { + /// The Features matching the request. + core.List? features; + + /// A token, which can be sent as ListFeaturesRequest.page_token to retrieve + /// the next page. /// - /// Optional. - core.List? webSearchQueries; + /// If this field is omitted, there are no subsequent pages. + core.String? nextPageToken; - GoogleCloudAiplatformV1GroundingMetadata({ - this.groundingChunks, - this.groundingSupports, - this.retrievalMetadata, - this.searchEntryPoint, - this.webSearchQueries, + GoogleCloudAiplatformV1ListFeaturesResponse({ + this.features, + this.nextPageToken, }); - GoogleCloudAiplatformV1GroundingMetadata.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListFeaturesResponse.fromJson(core.Map json_) : this( - groundingChunks: (json_['groundingChunks'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1GroundingChunk.fromJson( - value as core.Map)) - .toList(), - groundingSupports: (json_['groundingSupports'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1GroundingSupport.fromJson( + features: (json_['features'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Feature.fromJson( value as core.Map)) .toList(), - retrievalMetadata: json_.containsKey('retrievalMetadata') - ? GoogleCloudAiplatformV1RetrievalMetadata.fromJson( - json_['retrievalMetadata'] - as core.Map) - : null, - searchEntryPoint: json_.containsKey('searchEntryPoint') - ? GoogleCloudAiplatformV1SearchEntryPoint.fromJson( - json_['searchEntryPoint'] - as core.Map) - : null, - webSearchQueries: (json_['webSearchQueries'] as core.List?) - ?.map((value) => value as core.String) - .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (groundingChunks != null) 'groundingChunks': groundingChunks!, - if (groundingSupports != null) 'groundingSupports': groundingSupports!, - if (retrievalMetadata != null) 'retrievalMetadata': retrievalMetadata!, - if (searchEntryPoint != null) 'searchEntryPoint': searchEntryPoint!, - if (webSearchQueries != null) 'webSearchQueries': webSearchQueries!, + if (features != null) 'features': features!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// Grounding support. -class GoogleCloudAiplatformV1GroundingSupport { - /// Confidence score of the support references. - /// - /// Ranges from 0 to 1. 1 is the most confident. This list must have the same - /// size as the grounding_chunk_indices. - core.List? confidenceScores; +/// Response message for FeaturestoreService.ListFeaturestores. +class GoogleCloudAiplatformV1ListFeaturestoresResponse { + /// The Featurestores matching the request. + core.List? featurestores; - /// A list of indices (into 'grounding_chunk') specifying the citations - /// associated with the claim. + /// A token, which can be sent as ListFeaturestoresRequest.page_token to + /// retrieve the next page. /// - /// For instance \[1,3,4\] means that grounding_chunk\[1\], - /// grounding_chunk\[3\], grounding_chunk\[4\] are the retrieved content - /// attributed to the claim. - core.List? groundingChunkIndices; - - /// Segment of the content this support belongs to. - GoogleCloudAiplatformV1Segment? segment; + /// If this field is omitted, there are no subsequent pages. + core.String? nextPageToken; - GoogleCloudAiplatformV1GroundingSupport({ - this.confidenceScores, - this.groundingChunkIndices, - this.segment, + GoogleCloudAiplatformV1ListFeaturestoresResponse({ + this.featurestores, + this.nextPageToken, }); - GoogleCloudAiplatformV1GroundingSupport.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListFeaturestoresResponse.fromJson(core.Map json_) : this( - confidenceScores: (json_['confidenceScores'] as core.List?) - ?.map((value) => (value as core.num).toDouble()) - .toList(), - groundingChunkIndices: (json_['groundingChunkIndices'] as core.List?) - ?.map((value) => value as core.int) + featurestores: (json_['featurestores'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Featurestore.fromJson( + value as core.Map)) .toList(), - segment: json_.containsKey('segment') - ? GoogleCloudAiplatformV1Segment.fromJson( - json_['segment'] as core.Map) - : null, + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (confidenceScores != null) 'confidenceScores': confidenceScores!, - if (groundingChunkIndices != null) - 'groundingChunkIndices': groundingChunkIndices!, - if (segment != null) 'segment': segment!, + if (featurestores != null) 'featurestores': featurestores!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// Represents a HyperparameterTuningJob. -/// -/// A HyperparameterTuningJob has a Study specification and multiple CustomJobs -/// with identical CustomJob specification. -class GoogleCloudAiplatformV1HyperparameterTuningJob { - /// Time when the HyperparameterTuningJob was created. - /// - /// Output only. - core.String? createTime; - - /// The display name of the HyperparameterTuningJob. - /// - /// The name can be up to 128 characters long and can consist of any UTF-8 - /// characters. +/// Response message for JobService.ListHyperparameterTuningJobs +class GoogleCloudAiplatformV1ListHyperparameterTuningJobsResponse { + /// List of HyperparameterTuningJobs in the requested page. /// - /// Required. - core.String? displayName; + /// HyperparameterTuningJob.trials of the jobs will be not be returned. + core.List? + hyperparameterTuningJobs; - /// Customer-managed encryption key options for a HyperparameterTuningJob. + /// A token to retrieve the next page of results. /// - /// If this is set, then all resources created by the HyperparameterTuningJob - /// will be encrypted with the provided encryption key. - GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; + /// Pass to ListHyperparameterTuningJobsRequest.page_token to obtain that + /// page. + core.String? nextPageToken; - /// Time when the HyperparameterTuningJob entered any of the following states: - /// `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`. - /// - /// Output only. - core.String? endTime; + GoogleCloudAiplatformV1ListHyperparameterTuningJobsResponse({ + this.hyperparameterTuningJobs, + this.nextPageToken, + }); - /// Only populated when job's state is JOB_STATE_FAILED or - /// JOB_STATE_CANCELLED. - /// - /// Output only. - GoogleRpcStatus? error; + GoogleCloudAiplatformV1ListHyperparameterTuningJobsResponse.fromJson( + core.Map json_) + : this( + hyperparameterTuningJobs: + (json_['hyperparameterTuningJobs'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1HyperparameterTuningJob.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + ); - /// The labels with user-defined metadata to organize - /// HyperparameterTuningJobs. - /// - /// Label keys and values can be no longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. See - /// https://goo.gl/xmQnxf for more information and examples of labels. - core.Map? labels; + core.Map toJson() => { + if (hyperparameterTuningJobs != null) + 'hyperparameterTuningJobs': hyperparameterTuningJobs!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + }; +} - /// The number of failed Trials that need to be seen before failing the - /// HyperparameterTuningJob. - /// - /// If set to 0, Vertex AI decides how many Trials must fail before the whole - /// job fails. - core.int? maxFailedTrialCount; +/// Response message for IndexEndpointService.ListIndexEndpoints. +class GoogleCloudAiplatformV1ListIndexEndpointsResponse { + /// List of IndexEndpoints in the requested page. + core.List? indexEndpoints; - /// The desired total number of Trials. + /// A token to retrieve next page of results. /// - /// Required. - core.int? maxTrialCount; + /// Pass to ListIndexEndpointsRequest.page_token to obtain that page. + core.String? nextPageToken; - /// Resource name of the HyperparameterTuningJob. - /// - /// Output only. - core.String? name; + GoogleCloudAiplatformV1ListIndexEndpointsResponse({ + this.indexEndpoints, + this.nextPageToken, + }); - /// The desired number of Trials to run in parallel. - /// - /// Required. - core.int? parallelTrialCount; + GoogleCloudAiplatformV1ListIndexEndpointsResponse.fromJson(core.Map json_) + : this( + indexEndpoints: (json_['indexEndpoints'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1IndexEndpoint.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + ); - /// Reserved for future use. - /// - /// Output only. - core.bool? satisfiesPzi; + core.Map toJson() => { + if (indexEndpoints != null) 'indexEndpoints': indexEndpoints!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + }; +} - /// Reserved for future use. - /// - /// Output only. - core.bool? satisfiesPzs; +/// Response message for IndexService.ListIndexes. +class GoogleCloudAiplatformV1ListIndexesResponse { + /// List of indexes in the requested page. + core.List? indexes; - /// Time when the HyperparameterTuningJob for the first time entered the - /// `JOB_STATE_RUNNING` state. + /// A token to retrieve next page of results. /// - /// Output only. - core.String? startTime; + /// Pass to ListIndexesRequest.page_token to obtain that page. + core.String? nextPageToken; - /// The detailed state of the job. - /// - /// Output only. - /// Possible string values are: - /// - "JOB_STATE_UNSPECIFIED" : The job state is unspecified. - /// - "JOB_STATE_QUEUED" : The job has been just created or resumed and - /// processing has not yet begun. - /// - "JOB_STATE_PENDING" : The service is preparing to run the job. - /// - "JOB_STATE_RUNNING" : The job is in progress. - /// - "JOB_STATE_SUCCEEDED" : The job completed successfully. - /// - "JOB_STATE_FAILED" : The job failed. - /// - "JOB_STATE_CANCELLING" : The job is being cancelled. From this state the - /// job may only go to either `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED` or - /// `JOB_STATE_CANCELLED`. - /// - "JOB_STATE_CANCELLED" : The job has been cancelled. - /// - "JOB_STATE_PAUSED" : The job has been stopped, and can be resumed. - /// - "JOB_STATE_EXPIRED" : The job has expired. - /// - "JOB_STATE_UPDATING" : The job is being updated. Only jobs in the - /// `RUNNING` state can be updated. After updating, the job goes back to the - /// `RUNNING` state. - /// - "JOB_STATE_PARTIALLY_SUCCEEDED" : The job is partially succeeded, some - /// results may be missing due to errors. - core.String? state; + GoogleCloudAiplatformV1ListIndexesResponse({ + this.indexes, + this.nextPageToken, + }); - /// Study configuration of the HyperparameterTuningJob. - /// - /// Required. - GoogleCloudAiplatformV1StudySpec? studySpec; + GoogleCloudAiplatformV1ListIndexesResponse.fromJson(core.Map json_) + : this( + indexes: (json_['indexes'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Index.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + ); - /// The spec of a trial job. - /// - /// The same spec applies to the CustomJobs created in all the trials. - /// - /// Required. - GoogleCloudAiplatformV1CustomJobSpec? trialJobSpec; + core.Map toJson() => { + if (indexes != null) 'indexes': indexes!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + }; +} - /// Trials of the HyperparameterTuningJob. - /// - /// Output only. - core.List? trials; +/// Response message for MetadataService.ListMetadataSchemas. +class GoogleCloudAiplatformV1ListMetadataSchemasResponse { + /// The MetadataSchemas found for the MetadataStore. + core.List? metadataSchemas; - /// Time when the HyperparameterTuningJob was most recently updated. + /// A token, which can be sent as ListMetadataSchemasRequest.page_token to + /// retrieve the next page. /// - /// Output only. - core.String? updateTime; + /// If this field is not populated, there are no subsequent pages. + core.String? nextPageToken; - GoogleCloudAiplatformV1HyperparameterTuningJob({ - this.createTime, - this.displayName, - this.encryptionSpec, - this.endTime, - this.error, - this.labels, - this.maxFailedTrialCount, - this.maxTrialCount, - this.name, - this.parallelTrialCount, - this.satisfiesPzi, - this.satisfiesPzs, - this.startTime, - this.state, - this.studySpec, - this.trialJobSpec, - this.trials, - this.updateTime, + GoogleCloudAiplatformV1ListMetadataSchemasResponse({ + this.metadataSchemas, + this.nextPageToken, }); - GoogleCloudAiplatformV1HyperparameterTuningJob.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListMetadataSchemasResponse.fromJson(core.Map json_) : this( - createTime: json_['createTime'] as core.String?, - displayName: json_['displayName'] as core.String?, - encryptionSpec: json_.containsKey('encryptionSpec') - ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( - json_['encryptionSpec'] - as core.Map) - : null, - endTime: json_['endTime'] as core.String?, - error: json_.containsKey('error') - ? GoogleRpcStatus.fromJson( - json_['error'] as core.Map) - : null, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - maxFailedTrialCount: json_['maxFailedTrialCount'] as core.int?, - maxTrialCount: json_['maxTrialCount'] as core.int?, - name: json_['name'] as core.String?, - parallelTrialCount: json_['parallelTrialCount'] as core.int?, - satisfiesPzi: json_['satisfiesPzi'] as core.bool?, - satisfiesPzs: json_['satisfiesPzs'] as core.bool?, - startTime: json_['startTime'] as core.String?, - state: json_['state'] as core.String?, - studySpec: json_.containsKey('studySpec') - ? GoogleCloudAiplatformV1StudySpec.fromJson( - json_['studySpec'] as core.Map) - : null, - trialJobSpec: json_.containsKey('trialJobSpec') - ? GoogleCloudAiplatformV1CustomJobSpec.fromJson( - json_['trialJobSpec'] as core.Map) - : null, - trials: (json_['trials'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Trial.fromJson( + metadataSchemas: (json_['metadataSchemas'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1MetadataSchema.fromJson( value as core.Map)) .toList(), - updateTime: json_['updateTime'] as core.String?, + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (displayName != null) 'displayName': displayName!, - if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, - if (endTime != null) 'endTime': endTime!, - if (error != null) 'error': error!, - if (labels != null) 'labels': labels!, - if (maxFailedTrialCount != null) - 'maxFailedTrialCount': maxFailedTrialCount!, - if (maxTrialCount != null) 'maxTrialCount': maxTrialCount!, - if (name != null) 'name': name!, - if (parallelTrialCount != null) - 'parallelTrialCount': parallelTrialCount!, - if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, - if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, - if (startTime != null) 'startTime': startTime!, - if (state != null) 'state': state!, - if (studySpec != null) 'studySpec': studySpec!, - if (trialJobSpec != null) 'trialJobSpec': trialJobSpec!, - if (trials != null) 'trials': trials!, - if (updateTime != null) 'updateTime': updateTime!, + if (metadataSchemas != null) 'metadataSchemas': metadataSchemas!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// Matcher for Features of an EntityType by Feature ID. -class GoogleCloudAiplatformV1IdMatcher { - /// The following are accepted as `ids`: * A single-element list containing - /// only `*`, which selects all Features in the target EntityType, or * A list - /// containing only Feature IDs, which selects only Features with those IDs in - /// the target EntityType. +/// Response message for MetadataService.ListMetadataStores. +class GoogleCloudAiplatformV1ListMetadataStoresResponse { + /// The MetadataStores found for the Location. + core.List? metadataStores; + + /// A token, which can be sent as ListMetadataStoresRequest.page_token to + /// retrieve the next page. /// - /// Required. - core.List? ids; + /// If this field is not populated, there are no subsequent pages. + core.String? nextPageToken; - GoogleCloudAiplatformV1IdMatcher({ - this.ids, + GoogleCloudAiplatformV1ListMetadataStoresResponse({ + this.metadataStores, + this.nextPageToken, }); - GoogleCloudAiplatformV1IdMatcher.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListMetadataStoresResponse.fromJson(core.Map json_) : this( - ids: (json_['ids'] as core.List?) - ?.map((value) => value as core.String) + metadataStores: (json_['metadataStores'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1MetadataStore.fromJson( + value as core.Map)) .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (ids != null) 'ids': ids!, + if (metadataStores != null) 'metadataStores': metadataStores!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// Describes the location from where we import data into a Dataset, together -/// with the labels that will be applied to the DataItems and the Annotations. -class GoogleCloudAiplatformV1ImportDataConfig { - /// Labels that will be applied to newly imported Annotations. - /// - /// If two Annotations are identical, one of them will be deduped. Two - /// Annotations are considered identical if their payload, payload_schema_uri - /// and all of their labels are the same. These labels will be overridden by - /// Annotation labels specified inside index file referenced by - /// import_schema_uri, e.g. jsonl file. - core.Map? annotationLabels; - - /// Labels that will be applied to newly imported DataItems. - /// - /// If an identical DataItem as one being imported already exists in the - /// Dataset, then these labels will be appended to these of the already - /// existing one, and if labels with identical key is imported before, the old - /// label value will be overwritten. If two DataItems are identical in the - /// same import data operation, the labels will be combined and if key - /// collision happens in this case, one of the values will be picked randomly. - /// Two DataItems are considered identical if their content bytes are - /// identical (e.g. image bytes or pdf bytes). These labels will be overridden - /// by Annotation labels specified inside index file referenced by - /// import_schema_uri, e.g. jsonl file. - core.Map? dataItemLabels; - - /// The Google Cloud Storage location for the input content. - GoogleCloudAiplatformV1GcsSource? gcsSource; +/// Response message for JobService.ListModelDeploymentMonitoringJobs. +class GoogleCloudAiplatformV1ListModelDeploymentMonitoringJobsResponse { + /// A list of ModelDeploymentMonitoringJobs that matches the specified filter + /// in the request. + core.List? + modelDeploymentMonitoringJobs; - /// Points to a YAML file stored on Google Cloud Storage describing the import - /// format. - /// - /// Validation will be done against the schema. The schema is defined as an - /// [OpenAPI 3.0.2 Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). - /// - /// Required. - core.String? importSchemaUri; + /// The standard List next-page token. + core.String? nextPageToken; - GoogleCloudAiplatformV1ImportDataConfig({ - this.annotationLabels, - this.dataItemLabels, - this.gcsSource, - this.importSchemaUri, + GoogleCloudAiplatformV1ListModelDeploymentMonitoringJobsResponse({ + this.modelDeploymentMonitoringJobs, + this.nextPageToken, }); - GoogleCloudAiplatformV1ImportDataConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListModelDeploymentMonitoringJobsResponse.fromJson( + core.Map json_) : this( - annotationLabels: (json_['annotationLabels'] - as core.Map?) - ?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - dataItemLabels: - (json_['dataItemLabels'] as core.Map?) - ?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - gcsSource: json_.containsKey('gcsSource') - ? GoogleCloudAiplatformV1GcsSource.fromJson( - json_['gcsSource'] as core.Map) - : null, - importSchemaUri: json_['importSchemaUri'] as core.String?, + modelDeploymentMonitoringJobs: (json_['modelDeploymentMonitoringJobs'] + as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1ModelDeploymentMonitoringJob.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (annotationLabels != null) 'annotationLabels': annotationLabels!, - if (dataItemLabels != null) 'dataItemLabels': dataItemLabels!, - if (gcsSource != null) 'gcsSource': gcsSource!, - if (importSchemaUri != null) 'importSchemaUri': importSchemaUri!, + if (modelDeploymentMonitoringJobs != null) + 'modelDeploymentMonitoringJobs': modelDeploymentMonitoringJobs!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// Request message for DatasetService.ImportData. -class GoogleCloudAiplatformV1ImportDataRequest { - /// The desired input locations. - /// - /// The contents of all input locations will be imported in one batch. +/// Response message for ModelService.ListModelEvaluationSlices. +class GoogleCloudAiplatformV1ListModelEvaluationSlicesResponse { + /// List of ModelEvaluations in the requested page. + core.List? modelEvaluationSlices; + + /// A token to retrieve next page of results. /// - /// Required. - core.List? importConfigs; + /// Pass to ListModelEvaluationSlicesRequest.page_token to obtain that page. + core.String? nextPageToken; - GoogleCloudAiplatformV1ImportDataRequest({ - this.importConfigs, + GoogleCloudAiplatformV1ListModelEvaluationSlicesResponse({ + this.modelEvaluationSlices, + this.nextPageToken, }); - GoogleCloudAiplatformV1ImportDataRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListModelEvaluationSlicesResponse.fromJson( + core.Map json_) : this( - importConfigs: (json_['importConfigs'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1ImportDataConfig.fromJson( - value as core.Map)) + modelEvaluationSlices: (json_['modelEvaluationSlices'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1ModelEvaluationSlice.fromJson( + value as core.Map)) .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (importConfigs != null) 'importConfigs': importConfigs!, + if (modelEvaluationSlices != null) + 'modelEvaluationSlices': modelEvaluationSlices!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// Request message for FeaturestoreService.ImportFeatureValues. -class GoogleCloudAiplatformV1ImportFeatureValuesRequest { - GoogleCloudAiplatformV1AvroSource? avroSource; - GoogleCloudAiplatformV1BigQuerySource? bigquerySource; - GoogleCloudAiplatformV1CsvSource? csvSource; - - /// If true, API doesn't start ingestion analysis pipeline. - core.bool? disableIngestionAnalysis; +/// Response message for ModelService.ListModelEvaluations. +class GoogleCloudAiplatformV1ListModelEvaluationsResponse { + /// List of ModelEvaluations in the requested page. + core.List? modelEvaluations; - /// If set, data will not be imported for online serving. + /// A token to retrieve next page of results. /// - /// This is typically used for backfilling, where Feature generation - /// timestamps are not in the timestamp range needed for online serving. - core.bool? disableOnlineServing; + /// Pass to ListModelEvaluationsRequest.page_token to obtain that page. + core.String? nextPageToken; - /// Source column that holds entity IDs. - /// - /// If not provided, entity IDs are extracted from the column named entity_id. - core.String? entityIdField; + GoogleCloudAiplatformV1ListModelEvaluationsResponse({ + this.modelEvaluations, + this.nextPageToken, + }); - /// Specifications defining which Feature values to import from the entity. - /// - /// The request fails if no feature_specs are provided, and having multiple - /// feature_specs for one Feature is not allowed. - /// - /// Required. - core.List? - featureSpecs; + GoogleCloudAiplatformV1ListModelEvaluationsResponse.fromJson(core.Map json_) + : this( + modelEvaluations: (json_['modelEvaluations'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1ModelEvaluation.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + ); - /// Single Feature timestamp for all entities being imported. - /// - /// The timestamp must not have higher than millisecond precision. - core.String? featureTime; + core.Map toJson() => { + if (modelEvaluations != null) 'modelEvaluations': modelEvaluations!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + }; +} - /// Source column that holds the Feature timestamp for all Feature values in - /// each entity. - core.String? featureTimeField; +/// Response message for ModelService.ListModelVersions +class GoogleCloudAiplatformV1ListModelVersionsResponse { + /// List of Model versions in the requested page. + /// + /// In the returned Model name field, version ID instead of regvision tag will + /// be included. + core.List? models; - /// Specifies the number of workers that are used to write data to the - /// Featurestore. + /// A token to retrieve the next page of results. /// - /// Consider the online serving capacity that you require to achieve the - /// desired import throughput without interfering with online serving. The - /// value must be positive, and less than or equal to 100. If not set, - /// defaults to using 1 worker. The low count ensures minimal impact on online - /// serving performance. - core.int? workerCount; + /// Pass to ListModelVersionsRequest.page_token to obtain that page. + core.String? nextPageToken; - GoogleCloudAiplatformV1ImportFeatureValuesRequest({ - this.avroSource, - this.bigquerySource, - this.csvSource, - this.disableIngestionAnalysis, - this.disableOnlineServing, - this.entityIdField, - this.featureSpecs, - this.featureTime, - this.featureTimeField, - this.workerCount, + GoogleCloudAiplatformV1ListModelVersionsResponse({ + this.models, + this.nextPageToken, }); - GoogleCloudAiplatformV1ImportFeatureValuesRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListModelVersionsResponse.fromJson(core.Map json_) : this( - avroSource: json_.containsKey('avroSource') - ? GoogleCloudAiplatformV1AvroSource.fromJson( - json_['avroSource'] as core.Map) - : null, - bigquerySource: json_.containsKey('bigquerySource') - ? GoogleCloudAiplatformV1BigQuerySource.fromJson( - json_['bigquerySource'] - as core.Map) - : null, - csvSource: json_.containsKey('csvSource') - ? GoogleCloudAiplatformV1CsvSource.fromJson( - json_['csvSource'] as core.Map) - : null, - disableIngestionAnalysis: - json_['disableIngestionAnalysis'] as core.bool?, - disableOnlineServing: json_['disableOnlineServing'] as core.bool?, - entityIdField: json_['entityIdField'] as core.String?, - featureSpecs: (json_['featureSpecs'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1ImportFeatureValuesRequestFeatureSpec - .fromJson(value as core.Map)) + models: (json_['models'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Model.fromJson( + value as core.Map)) .toList(), - featureTime: json_['featureTime'] as core.String?, - featureTimeField: json_['featureTimeField'] as core.String?, - workerCount: json_['workerCount'] as core.int?, + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (avroSource != null) 'avroSource': avroSource!, - if (bigquerySource != null) 'bigquerySource': bigquerySource!, - if (csvSource != null) 'csvSource': csvSource!, - if (disableIngestionAnalysis != null) - 'disableIngestionAnalysis': disableIngestionAnalysis!, - if (disableOnlineServing != null) - 'disableOnlineServing': disableOnlineServing!, - if (entityIdField != null) 'entityIdField': entityIdField!, - if (featureSpecs != null) 'featureSpecs': featureSpecs!, - if (featureTime != null) 'featureTime': featureTime!, - if (featureTimeField != null) 'featureTimeField': featureTimeField!, - if (workerCount != null) 'workerCount': workerCount!, + if (models != null) 'models': models!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// Defines the Feature value(s) to import. -class GoogleCloudAiplatformV1ImportFeatureValuesRequestFeatureSpec { - /// ID of the Feature to import values of. - /// - /// This Feature must exist in the target EntityType, or the request will - /// fail. - /// - /// Required. - core.String? id; +/// Response message for ModelService.ListModels +class GoogleCloudAiplatformV1ListModelsResponse { + /// List of Models in the requested page. + core.List? models; - /// Source column to get the Feature values from. + /// A token to retrieve next page of results. /// - /// If not set, uses the column with the same name as the Feature ID. - core.String? sourceField; + /// Pass to ListModelsRequest.page_token to obtain that page. + core.String? nextPageToken; - GoogleCloudAiplatformV1ImportFeatureValuesRequestFeatureSpec({ - this.id, - this.sourceField, + GoogleCloudAiplatformV1ListModelsResponse({ + this.models, + this.nextPageToken, }); - GoogleCloudAiplatformV1ImportFeatureValuesRequestFeatureSpec.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ListModelsResponse.fromJson(core.Map json_) : this( - id: json_['id'] as core.String?, - sourceField: json_['sourceField'] as core.String?, + models: (json_['models'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Model.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (id != null) 'id': id!, - if (sourceField != null) 'sourceField': sourceField!, + if (models != null) 'models': models!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// Request message for ModelService.ImportModelEvaluation -class GoogleCloudAiplatformV1ImportModelEvaluationRequest { - /// Model evaluation resource to be imported. +/// Response message for JobService.ListNasJobs +class GoogleCloudAiplatformV1ListNasJobsResponse { + /// List of NasJobs in the requested page. /// - /// Required. - GoogleCloudAiplatformV1ModelEvaluation? modelEvaluation; + /// NasJob.nas_job_output of the jobs will not be returned. + core.List? nasJobs; - GoogleCloudAiplatformV1ImportModelEvaluationRequest({ - this.modelEvaluation, + /// A token to retrieve the next page of results. + /// + /// Pass to ListNasJobsRequest.page_token to obtain that page. + core.String? nextPageToken; + + GoogleCloudAiplatformV1ListNasJobsResponse({ + this.nasJobs, + this.nextPageToken, }); - GoogleCloudAiplatformV1ImportModelEvaluationRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListNasJobsResponse.fromJson(core.Map json_) : this( - modelEvaluation: json_.containsKey('modelEvaluation') - ? GoogleCloudAiplatformV1ModelEvaluation.fromJson( - json_['modelEvaluation'] - as core.Map) - : null, + nasJobs: (json_['nasJobs'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1NasJob.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (modelEvaluation != null) 'modelEvaluation': modelEvaluation!, + if (nasJobs != null) 'nasJobs': nasJobs!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// A representation of a collection of database items organized in a way that -/// allows for approximate nearest neighbor (a.k.a ANN) algorithms search. -class GoogleCloudAiplatformV1Index { - /// Timestamp when this Index was created. - /// - /// Output only. - core.String? createTime; +/// Response message for JobService.ListNasTrialDetails +class GoogleCloudAiplatformV1ListNasTrialDetailsResponse { + /// List of top NasTrials in the requested page. + core.List? nasTrialDetails; - /// The pointers to DeployedIndexes created from this Index. - /// - /// An Index can be only deleted if all its DeployedIndexes had been - /// undeployed first. + /// A token to retrieve the next page of results. /// - /// Output only. - core.List? deployedIndexes; + /// Pass to ListNasTrialDetailsRequest.page_token to obtain that page. + core.String? nextPageToken; - /// The description of the Index. - core.String? description; + GoogleCloudAiplatformV1ListNasTrialDetailsResponse({ + this.nasTrialDetails, + this.nextPageToken, + }); - /// The display name of the Index. - /// - /// The name can be up to 128 characters long and can consist of any UTF-8 - /// characters. - /// - /// Required. - core.String? displayName; + GoogleCloudAiplatformV1ListNasTrialDetailsResponse.fromJson(core.Map json_) + : this( + nasTrialDetails: (json_['nasTrialDetails'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1NasTrialDetail.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + ); - /// Customer-managed encryption key spec for an Index. - /// - /// If set, this Index and all sub-resources of this Index will be secured by - /// this key. - /// - /// Immutable. - GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; + core.Map toJson() => { + if (nasTrialDetails != null) 'nasTrialDetails': nasTrialDetails!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + }; +} - /// Used to perform consistent read-modify-write updates. +/// Response message for \[NotebookService.CreateNotebookExecutionJob\] +class GoogleCloudAiplatformV1ListNotebookExecutionJobsResponse { + /// A token to retrieve next page of results. /// - /// If not set, a blind "overwrite" update happens. - core.String? etag; + /// Pass to ListNotebookExecutionJobsRequest.page_token to obtain that page. + core.String? nextPageToken; - /// Stats of the index resource. - /// - /// Output only. - GoogleCloudAiplatformV1IndexStats? indexStats; + /// List of NotebookExecutionJobs in the requested page. + core.List? notebookExecutionJobs; - /// The update method to use with this Index. - /// - /// If not set, BATCH_UPDATE will be used by default. - /// - /// Immutable. - /// Possible string values are: - /// - "INDEX_UPDATE_METHOD_UNSPECIFIED" : Should not be used. - /// - "BATCH_UPDATE" : BatchUpdate: user can call UpdateIndex with files on - /// Cloud Storage of Datapoints to update. - /// - "STREAM_UPDATE" : StreamUpdate: user can call - /// UpsertDatapoints/DeleteDatapoints to update the Index and the updates will - /// be applied in corresponding DeployedIndexes in nearly real-time. - core.String? indexUpdateMethod; + GoogleCloudAiplatformV1ListNotebookExecutionJobsResponse({ + this.nextPageToken, + this.notebookExecutionJobs, + }); - /// The labels with user-defined metadata to organize your Indexes. - /// - /// Label keys and values can be no longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. See - /// https://goo.gl/xmQnxf for more information and examples of labels. - core.Map? labels; + GoogleCloudAiplatformV1ListNotebookExecutionJobsResponse.fromJson( + core.Map json_) + : this( + nextPageToken: json_['nextPageToken'] as core.String?, + notebookExecutionJobs: (json_['notebookExecutionJobs'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1NotebookExecutionJob.fromJson( + value as core.Map)) + .toList(), + ); - /// An additional information about the Index; the schema of the metadata can - /// be found in metadata_schema. - /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Object? metadata; + core.Map toJson() => { + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (notebookExecutionJobs != null) + 'notebookExecutionJobs': notebookExecutionJobs!, + }; +} - /// Points to a YAML file stored on Google Cloud Storage describing additional - /// information about the Index, that is specific to it. - /// - /// Unset if the Index does not have any additional information. The schema is - /// defined as an OpenAPI 3.0.2 - /// [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). - /// Note: The URI given on output will be immutable and probably different, - /// including the URI scheme, than the one given on input. The output URI will - /// point to a location where the user only has a read access. +/// Response message for NotebookService.ListNotebookRuntimeTemplates. +class GoogleCloudAiplatformV1ListNotebookRuntimeTemplatesResponse { + /// A token to retrieve next page of results. /// - /// Immutable. - core.String? metadataSchemaUri; + /// Pass to ListNotebookRuntimeTemplatesRequest.page_token to obtain that + /// page. + core.String? nextPageToken; - /// The resource name of the Index. - /// - /// Output only. - core.String? name; + /// List of NotebookRuntimeTemplates in the requested page. + core.List? + notebookRuntimeTemplates; - /// Reserved for future use. - /// - /// Output only. - core.bool? satisfiesPzi; + GoogleCloudAiplatformV1ListNotebookRuntimeTemplatesResponse({ + this.nextPageToken, + this.notebookRuntimeTemplates, + }); + + GoogleCloudAiplatformV1ListNotebookRuntimeTemplatesResponse.fromJson( + core.Map json_) + : this( + nextPageToken: json_['nextPageToken'] as core.String?, + notebookRuntimeTemplates: + (json_['notebookRuntimeTemplates'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1NotebookRuntimeTemplate.fromJson( + value as core.Map)) + .toList(), + ); - /// Reserved for future use. - /// - /// Output only. - core.bool? satisfiesPzs; + core.Map toJson() => { + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (notebookRuntimeTemplates != null) + 'notebookRuntimeTemplates': notebookRuntimeTemplates!, + }; +} - /// Timestamp when this Index was most recently updated. - /// - /// This also includes any update to the contents of the Index. Note that - /// Operations working on this Index may have their - /// Operations.metadata.generic_metadata.update_time a little after the value - /// of this timestamp, yet that does not mean their results are not already - /// reflected in the Index. Result of any successfully completed Operation on - /// the Index is reflected in it. +/// Response message for NotebookService.ListNotebookRuntimes. +class GoogleCloudAiplatformV1ListNotebookRuntimesResponse { + /// A token to retrieve next page of results. /// - /// Output only. - core.String? updateTime; + /// Pass to ListNotebookRuntimesRequest.page_token to obtain that page. + core.String? nextPageToken; - GoogleCloudAiplatformV1Index({ - this.createTime, - this.deployedIndexes, - this.description, - this.displayName, - this.encryptionSpec, - this.etag, - this.indexStats, - this.indexUpdateMethod, - this.labels, - this.metadata, - this.metadataSchemaUri, - this.name, - this.satisfiesPzi, - this.satisfiesPzs, - this.updateTime, + /// List of NotebookRuntimes in the requested page. + core.List? notebookRuntimes; + + GoogleCloudAiplatformV1ListNotebookRuntimesResponse({ + this.nextPageToken, + this.notebookRuntimes, }); - GoogleCloudAiplatformV1Index.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListNotebookRuntimesResponse.fromJson(core.Map json_) : this( - createTime: json_['createTime'] as core.String?, - deployedIndexes: (json_['deployedIndexes'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1DeployedIndexRef.fromJson( + nextPageToken: json_['nextPageToken'] as core.String?, + notebookRuntimes: (json_['notebookRuntimes'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1NotebookRuntime.fromJson( value as core.Map)) .toList(), - description: json_['description'] as core.String?, - displayName: json_['displayName'] as core.String?, - encryptionSpec: json_.containsKey('encryptionSpec') - ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( - json_['encryptionSpec'] - as core.Map) - : null, - etag: json_['etag'] as core.String?, - indexStats: json_.containsKey('indexStats') - ? GoogleCloudAiplatformV1IndexStats.fromJson( - json_['indexStats'] as core.Map) - : null, - indexUpdateMethod: json_['indexUpdateMethod'] as core.String?, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - metadata: json_['metadata'], - metadataSchemaUri: json_['metadataSchemaUri'] as core.String?, - name: json_['name'] as core.String?, - satisfiesPzi: json_['satisfiesPzi'] as core.bool?, - satisfiesPzs: json_['satisfiesPzs'] as core.bool?, - updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (deployedIndexes != null) 'deployedIndexes': deployedIndexes!, - if (description != null) 'description': description!, - if (displayName != null) 'displayName': displayName!, - if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, - if (etag != null) 'etag': etag!, - if (indexStats != null) 'indexStats': indexStats!, - if (indexUpdateMethod != null) 'indexUpdateMethod': indexUpdateMethod!, - if (labels != null) 'labels': labels!, - if (metadata != null) 'metadata': metadata!, - if (metadataSchemaUri != null) 'metadataSchemaUri': metadataSchemaUri!, - if (name != null) 'name': name!, - if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, - if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, - if (updateTime != null) 'updateTime': updateTime!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (notebookRuntimes != null) 'notebookRuntimes': notebookRuntimes!, }; } -/// A datapoint of Index. -class GoogleCloudAiplatformV1IndexDatapoint { - /// CrowdingTag of the datapoint, the number of neighbors to return in each - /// crowding can be configured during query. - /// - /// Optional. - GoogleCloudAiplatformV1IndexDatapointCrowdingTag? crowdingTag; +/// Request message for VizierService.ListOptimalTrials. +typedef GoogleCloudAiplatformV1ListOptimalTrialsRequest = $Empty; - /// Unique identifier of the datapoint. +/// Response message for VizierService.ListOptimalTrials. +class GoogleCloudAiplatformV1ListOptimalTrialsResponse { + /// The pareto-optimal Trials for multiple objective Study or the optimal + /// trial for single objective Study. /// - /// Required. - core.String? datapointId; + /// The definition of pareto-optimal can be checked in wiki page. + /// https://en.wikipedia.org/wiki/Pareto_efficiency + core.List? optimalTrials; - /// Feature embedding vector for dense index. - /// - /// An array of numbers with the length of - /// \[NearestNeighborSearchConfig.dimensions\]. - /// - /// Required. - core.List? featureVector; + GoogleCloudAiplatformV1ListOptimalTrialsResponse({ + this.optimalTrials, + }); - /// List of Restrict of the datapoint, used to perform "restricted searches" - /// where boolean rule are used to filter the subset of the database eligible - /// for matching. - /// - /// This uses numeric comparisons. - /// - /// Optional. - core.List? - numericRestricts; + GoogleCloudAiplatformV1ListOptimalTrialsResponse.fromJson(core.Map json_) + : this( + optimalTrials: (json_['optimalTrials'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Trial.fromJson( + value as core.Map)) + .toList(), + ); - /// List of Restrict of the datapoint, used to perform "restricted searches" - /// where boolean rule are used to filter the subset of the database eligible - /// for matching. - /// - /// This uses categorical tokens. See: - /// https://cloud.google.com/vertex-ai/docs/matching-engine/filtering - /// - /// Optional. - core.List? restricts; + core.Map toJson() => { + if (optimalTrials != null) 'optimalTrials': optimalTrials!, + }; +} - /// Feature embedding vector for sparse index. +/// Response message for PersistentResourceService.ListPersistentResources +class GoogleCloudAiplatformV1ListPersistentResourcesResponse { + /// A token to retrieve next page of results. /// - /// Optional. - GoogleCloudAiplatformV1IndexDatapointSparseEmbedding? sparseEmbedding; + /// Pass to ListPersistentResourcesRequest.page_token to obtain that page. + core.String? nextPageToken; + core.List? persistentResources; - GoogleCloudAiplatformV1IndexDatapoint({ - this.crowdingTag, - this.datapointId, - this.featureVector, - this.numericRestricts, - this.restricts, - this.sparseEmbedding, + GoogleCloudAiplatformV1ListPersistentResourcesResponse({ + this.nextPageToken, + this.persistentResources, }); - GoogleCloudAiplatformV1IndexDatapoint.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListPersistentResourcesResponse.fromJson( + core.Map json_) : this( - crowdingTag: json_.containsKey('crowdingTag') - ? GoogleCloudAiplatformV1IndexDatapointCrowdingTag.fromJson( - json_['crowdingTag'] as core.Map) - : null, - datapointId: json_['datapointId'] as core.String?, - featureVector: (json_['featureVector'] as core.List?) - ?.map((value) => (value as core.num).toDouble()) - .toList(), - numericRestricts: (json_['numericRestricts'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1IndexDatapointNumericRestriction - .fromJson(value as core.Map)) - .toList(), - restricts: (json_['restricts'] as core.List?) + nextPageToken: json_['nextPageToken'] as core.String?, + persistentResources: (json_['persistentResources'] as core.List?) ?.map((value) => - GoogleCloudAiplatformV1IndexDatapointRestriction.fromJson( + GoogleCloudAiplatformV1PersistentResource.fromJson( value as core.Map)) .toList(), - sparseEmbedding: json_.containsKey('sparseEmbedding') - ? GoogleCloudAiplatformV1IndexDatapointSparseEmbedding.fromJson( - json_['sparseEmbedding'] - as core.Map) - : null, ); core.Map toJson() => { - if (crowdingTag != null) 'crowdingTag': crowdingTag!, - if (datapointId != null) 'datapointId': datapointId!, - if (featureVector != null) 'featureVector': featureVector!, - if (numericRestricts != null) 'numericRestricts': numericRestricts!, - if (restricts != null) 'restricts': restricts!, - if (sparseEmbedding != null) 'sparseEmbedding': sparseEmbedding!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (persistentResources != null) + 'persistentResources': persistentResources!, }; } -/// Crowding tag is a constraint on a neighbor list produced by nearest neighbor -/// search requiring that no more than some value k' of the k neighbors returned -/// have the same value of crowding_attribute. -class GoogleCloudAiplatformV1IndexDatapointCrowdingTag { - /// The attribute value used for crowding. +/// Response message for PipelineService.ListPipelineJobs +class GoogleCloudAiplatformV1ListPipelineJobsResponse { + /// A token to retrieve the next page of results. /// - /// The maximum number of neighbors to return per crowding attribute value - /// (per_crowding_attribute_num_neighbors) is configured per-query. This field - /// is ignored if per_crowding_attribute_num_neighbors is larger than the - /// total number of neighbors to return for a given query. - core.String? crowdingAttribute; + /// Pass to ListPipelineJobsRequest.page_token to obtain that page. + core.String? nextPageToken; - GoogleCloudAiplatformV1IndexDatapointCrowdingTag({ - this.crowdingAttribute, + /// List of PipelineJobs in the requested page. + core.List? pipelineJobs; + + GoogleCloudAiplatformV1ListPipelineJobsResponse({ + this.nextPageToken, + this.pipelineJobs, }); - GoogleCloudAiplatformV1IndexDatapointCrowdingTag.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListPipelineJobsResponse.fromJson(core.Map json_) : this( - crowdingAttribute: json_['crowdingAttribute'] as core.String?, + nextPageToken: json_['nextPageToken'] as core.String?, + pipelineJobs: (json_['pipelineJobs'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1PipelineJob.fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (crowdingAttribute != null) 'crowdingAttribute': crowdingAttribute!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (pipelineJobs != null) 'pipelineJobs': pipelineJobs!, }; } -/// This field allows restricts to be based on numeric comparisons rather than -/// categorical tokens. -class GoogleCloudAiplatformV1IndexDatapointNumericRestriction { - /// The namespace of this restriction. +/// Response message for VertexRagDataService.ListRagCorpora. +class GoogleCloudAiplatformV1ListRagCorporaResponse { + /// A token to retrieve the next page of results. /// - /// e.g.: cost. - core.String? namespace; + /// Pass to ListRagCorporaRequest.page_token to obtain that page. + core.String? nextPageToken; - /// This MUST be specified for queries and must NOT be specified for - /// datapoints. - /// Possible string values are: - /// - "OPERATOR_UNSPECIFIED" : Default value of the enum. - /// - "LESS" : Datapoints are eligible iff their value is \< the query's. - /// - "LESS_EQUAL" : Datapoints are eligible iff their value is \<= the - /// query's. - /// - "EQUAL" : Datapoints are eligible iff their value is == the query's. - /// - "GREATER_EQUAL" : Datapoints are eligible iff their value is \>= the - /// query's. - /// - "GREATER" : Datapoints are eligible iff their value is \> the query's. - /// - "NOT_EQUAL" : Datapoints are eligible iff their value is != the query's. - core.String? op; + /// List of RagCorpora in the requested page. + core.List? ragCorpora; - /// Represents 64 bit float. - core.double? valueDouble; + GoogleCloudAiplatformV1ListRagCorporaResponse({ + this.nextPageToken, + this.ragCorpora, + }); - /// Represents 32 bit float. - core.double? valueFloat; + GoogleCloudAiplatformV1ListRagCorporaResponse.fromJson(core.Map json_) + : this( + nextPageToken: json_['nextPageToken'] as core.String?, + ragCorpora: (json_['ragCorpora'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1RagCorpus.fromJson( + value as core.Map)) + .toList(), + ); - /// Represents 64 bit integer. - core.String? valueInt; + core.Map toJson() => { + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (ragCorpora != null) 'ragCorpora': ragCorpora!, + }; +} - GoogleCloudAiplatformV1IndexDatapointNumericRestriction({ - this.namespace, - this.op, - this.valueDouble, - this.valueFloat, - this.valueInt, +/// Response message for VertexRagDataService.ListRagFiles. +class GoogleCloudAiplatformV1ListRagFilesResponse { + /// A token to retrieve the next page of results. + /// + /// Pass to ListRagFilesRequest.page_token to obtain that page. + core.String? nextPageToken; + + /// List of RagFiles in the requested page. + core.List? ragFiles; + + GoogleCloudAiplatformV1ListRagFilesResponse({ + this.nextPageToken, + this.ragFiles, }); - GoogleCloudAiplatformV1IndexDatapointNumericRestriction.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ListRagFilesResponse.fromJson(core.Map json_) : this( - namespace: json_['namespace'] as core.String?, - op: json_['op'] as core.String?, - valueDouble: (json_['valueDouble'] as core.num?)?.toDouble(), - valueFloat: (json_['valueFloat'] as core.num?)?.toDouble(), - valueInt: json_['valueInt'] as core.String?, + nextPageToken: json_['nextPageToken'] as core.String?, + ragFiles: (json_['ragFiles'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1RagFile.fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (namespace != null) 'namespace': namespace!, - if (op != null) 'op': op!, - if (valueDouble != null) 'valueDouble': valueDouble!, - if (valueFloat != null) 'valueFloat': valueFloat!, - if (valueInt != null) 'valueInt': valueInt!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (ragFiles != null) 'ragFiles': ragFiles!, }; } -/// Restriction of a datapoint which describe its attributes(tokens) from each -/// of several attribute categories(namespaces). -class GoogleCloudAiplatformV1IndexDatapointRestriction { - /// The attributes to allow in this namespace. - /// - /// e.g.: 'red' - core.List? allowList; - - /// The attributes to deny in this namespace. +/// Response message for ReasoningEngineService.ListReasoningEngines +class GoogleCloudAiplatformV1ListReasoningEnginesResponse { + /// A token to retrieve the next page of results. /// - /// e.g.: 'blue' - core.List? denyList; + /// Pass to ListReasoningEnginesRequest.page_token to obtain that page. + core.String? nextPageToken; - /// The namespace of this restriction. - /// - /// e.g.: color. - core.String? namespace; + /// List of ReasoningEngines in the requested page. + core.List? reasoningEngines; - GoogleCloudAiplatformV1IndexDatapointRestriction({ - this.allowList, - this.denyList, - this.namespace, + GoogleCloudAiplatformV1ListReasoningEnginesResponse({ + this.nextPageToken, + this.reasoningEngines, }); - GoogleCloudAiplatformV1IndexDatapointRestriction.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListReasoningEnginesResponse.fromJson(core.Map json_) : this( - allowList: (json_['allowList'] as core.List?) - ?.map((value) => value as core.String) + nextPageToken: json_['nextPageToken'] as core.String?, + reasoningEngines: (json_['reasoningEngines'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1ReasoningEngine.fromJson( + value as core.Map)) .toList(), - denyList: (json_['denyList'] as core.List?) - ?.map((value) => value as core.String) + ); + + core.Map toJson() => { + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (reasoningEngines != null) 'reasoningEngines': reasoningEngines!, + }; +} + +/// Response message for DatasetService.ListSavedQueries. +class GoogleCloudAiplatformV1ListSavedQueriesResponse { + /// The standard List next-page token. + core.String? nextPageToken; + + /// A list of SavedQueries that match the specified filter in the request. + core.List? savedQueries; + + GoogleCloudAiplatformV1ListSavedQueriesResponse({ + this.nextPageToken, + this.savedQueries, + }); + + GoogleCloudAiplatformV1ListSavedQueriesResponse.fromJson(core.Map json_) + : this( + nextPageToken: json_['nextPageToken'] as core.String?, + savedQueries: (json_['savedQueries'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1SavedQuery.fromJson( + value as core.Map)) .toList(), - namespace: json_['namespace'] as core.String?, ); core.Map toJson() => { - if (allowList != null) 'allowList': allowList!, - if (denyList != null) 'denyList': denyList!, - if (namespace != null) 'namespace': namespace!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (savedQueries != null) 'savedQueries': savedQueries!, }; } -/// Feature embedding vector for sparse index. -/// -/// An array of numbers whose values are located in the specified dimensions. -class GoogleCloudAiplatformV1IndexDatapointSparseEmbedding { - /// The list of indexes for the embedding values of the sparse vector. +/// Response message for ScheduleService.ListSchedules +class GoogleCloudAiplatformV1ListSchedulesResponse { + /// A token to retrieve the next page of results. /// - /// Required. - core.List? dimensions; + /// Pass to ListSchedulesRequest.page_token to obtain that page. + core.String? nextPageToken; - /// The list of embedding values of the sparse vector. - /// - /// Required. - core.List? values; + /// List of Schedules in the requested page. + core.List? schedules; - GoogleCloudAiplatformV1IndexDatapointSparseEmbedding({ - this.dimensions, - this.values, + GoogleCloudAiplatformV1ListSchedulesResponse({ + this.nextPageToken, + this.schedules, }); - GoogleCloudAiplatformV1IndexDatapointSparseEmbedding.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListSchedulesResponse.fromJson(core.Map json_) : this( - dimensions: (json_['dimensions'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - values: (json_['values'] as core.List?) - ?.map((value) => (value as core.num).toDouble()) + nextPageToken: json_['nextPageToken'] as core.String?, + schedules: (json_['schedules'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Schedule.fromJson( + value as core.Map)) .toList(), ); core.Map toJson() => { - if (dimensions != null) 'dimensions': dimensions!, - if (values != null) 'values': values!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (schedules != null) 'schedules': schedules!, }; } -/// Indexes are deployed into it. -/// -/// An IndexEndpoint can have multiple DeployedIndexes. -class GoogleCloudAiplatformV1IndexEndpoint { - /// Timestamp when this IndexEndpoint was created. - /// - /// Output only. - core.String? createTime; +/// Response message for SpecialistPoolService.ListSpecialistPools. +class GoogleCloudAiplatformV1ListSpecialistPoolsResponse { + /// The standard List next-page token. + core.String? nextPageToken; - /// The indexes deployed in this endpoint. - /// - /// Output only. - core.List? deployedIndexes; + /// A list of SpecialistPools that matches the specified filter in the + /// request. + core.List? specialistPools; - /// The description of the IndexEndpoint. - core.String? description; + GoogleCloudAiplatformV1ListSpecialistPoolsResponse({ + this.nextPageToken, + this.specialistPools, + }); - /// The display name of the IndexEndpoint. - /// - /// The name can be up to 128 characters long and can consist of any UTF-8 - /// characters. - /// - /// Required. - core.String? displayName; + GoogleCloudAiplatformV1ListSpecialistPoolsResponse.fromJson(core.Map json_) + : this( + nextPageToken: json_['nextPageToken'] as core.String?, + specialistPools: (json_['specialistPools'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1SpecialistPool.fromJson( + value as core.Map)) + .toList(), + ); - /// Deprecated: If true, expose the IndexEndpoint via private service connect. - /// - /// Only one of the fields, network or enable_private_service_connect, can be - /// set. - /// - /// Optional. - @core.Deprecated( - 'Not supported. Member documentation may have more information.', - ) - core.bool? enablePrivateServiceConnect; + core.Map toJson() => { + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (specialistPools != null) 'specialistPools': specialistPools!, + }; +} - /// Customer-managed encryption key spec for an IndexEndpoint. - /// - /// If set, this IndexEndpoint and all sub-resources of this IndexEndpoint - /// will be secured by this key. +/// Response message for VizierService.ListStudies. +class GoogleCloudAiplatformV1ListStudiesResponse { + /// Passes this token as the `page_token` field of the request for a + /// subsequent call. /// - /// Immutable. - GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; + /// If this field is omitted, there are no subsequent pages. + core.String? nextPageToken; - /// Used to perform consistent read-modify-write updates. - /// - /// If not set, a blind "overwrite" update happens. - core.String? etag; + /// The studies associated with the project. + core.List? studies; - /// The labels with user-defined metadata to organize your IndexEndpoints. - /// - /// Label keys and values can be no longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. See - /// https://goo.gl/xmQnxf for more information and examples of labels. - core.Map? labels; + GoogleCloudAiplatformV1ListStudiesResponse({ + this.nextPageToken, + this.studies, + }); - /// The resource name of the IndexEndpoint. - /// - /// Output only. - core.String? name; + GoogleCloudAiplatformV1ListStudiesResponse.fromJson(core.Map json_) + : this( + nextPageToken: json_['nextPageToken'] as core.String?, + studies: (json_['studies'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Study.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (studies != null) 'studies': studies!, + }; +} - /// The full name of the Google Compute Engine - /// [network](https://cloud.google.com/compute/docs/networks-and-firewalls#networks) - /// to which the IndexEndpoint should be peered. - /// - /// Private services access must already be configured for the network. If - /// left unspecified, the Endpoint is not peered with any network. network and - /// private_service_connect_config are mutually exclusive. - /// [Format](https://cloud.google.com/compute/docs/reference/rest/v1/networks/insert): - /// `projects/{project}/global/networks/{network}`. Where {project} is a - /// project number, as in '12345', and {network} is network name. +/// Response message for TensorboardService.ListTensorboardExperiments. +class GoogleCloudAiplatformV1ListTensorboardExperimentsResponse { + /// A token, which can be sent as ListTensorboardExperimentsRequest.page_token + /// to retrieve the next page. /// - /// Optional. - core.String? network; + /// If this field is omitted, there are no subsequent pages. + core.String? nextPageToken; - /// Configuration for private service connect. - /// - /// network and private_service_connect_config are mutually exclusive. - /// - /// Optional. - GoogleCloudAiplatformV1PrivateServiceConnectConfig? - privateServiceConnectConfig; + /// The TensorboardExperiments mathching the request. + core.List? + tensorboardExperiments; - /// If public_endpoint_enabled is true, this field will be populated with the - /// domain name to use for this index endpoint. - /// - /// Output only. - core.String? publicEndpointDomainName; + GoogleCloudAiplatformV1ListTensorboardExperimentsResponse({ + this.nextPageToken, + this.tensorboardExperiments, + }); - /// If true, the deployed index will be accessible through public endpoint. - /// - /// Optional. - core.bool? publicEndpointEnabled; + GoogleCloudAiplatformV1ListTensorboardExperimentsResponse.fromJson( + core.Map json_) + : this( + nextPageToken: json_['nextPageToken'] as core.String?, + tensorboardExperiments: + (json_['tensorboardExperiments'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1TensorboardExperiment.fromJson( + value as core.Map)) + .toList(), + ); - /// Reserved for future use. - /// - /// Output only. - core.bool? satisfiesPzi; + core.Map toJson() => { + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (tensorboardExperiments != null) + 'tensorboardExperiments': tensorboardExperiments!, + }; +} - /// Reserved for future use. +/// Response message for TensorboardService.ListTensorboardRuns. +class GoogleCloudAiplatformV1ListTensorboardRunsResponse { + /// A token, which can be sent as ListTensorboardRunsRequest.page_token to + /// retrieve the next page. /// - /// Output only. - core.bool? satisfiesPzs; + /// If this field is omitted, there are no subsequent pages. + core.String? nextPageToken; - /// Timestamp when this IndexEndpoint was last updated. - /// - /// This timestamp is not updated when the endpoint's DeployedIndexes are - /// updated, e.g. due to updates of the original Indexes they are the - /// deployments of. - /// - /// Output only. - core.String? updateTime; + /// The TensorboardRuns mathching the request. + core.List? tensorboardRuns; - GoogleCloudAiplatformV1IndexEndpoint({ - this.createTime, - this.deployedIndexes, - this.description, - this.displayName, - this.enablePrivateServiceConnect, - this.encryptionSpec, - this.etag, - this.labels, - this.name, - this.network, - this.privateServiceConnectConfig, - this.publicEndpointDomainName, - this.publicEndpointEnabled, - this.satisfiesPzi, - this.satisfiesPzs, - this.updateTime, + GoogleCloudAiplatformV1ListTensorboardRunsResponse({ + this.nextPageToken, + this.tensorboardRuns, }); - GoogleCloudAiplatformV1IndexEndpoint.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListTensorboardRunsResponse.fromJson(core.Map json_) : this( - createTime: json_['createTime'] as core.String?, - deployedIndexes: (json_['deployedIndexes'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1DeployedIndex.fromJson( + nextPageToken: json_['nextPageToken'] as core.String?, + tensorboardRuns: (json_['tensorboardRuns'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1TensorboardRun.fromJson( value as core.Map)) .toList(), - description: json_['description'] as core.String?, - displayName: json_['displayName'] as core.String?, - enablePrivateServiceConnect: - json_['enablePrivateServiceConnect'] as core.bool?, - encryptionSpec: json_.containsKey('encryptionSpec') - ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( - json_['encryptionSpec'] - as core.Map) - : null, - etag: json_['etag'] as core.String?, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - name: json_['name'] as core.String?, - network: json_['network'] as core.String?, - privateServiceConnectConfig: - json_.containsKey('privateServiceConnectConfig') - ? GoogleCloudAiplatformV1PrivateServiceConnectConfig.fromJson( - json_['privateServiceConnectConfig'] - as core.Map) - : null, - publicEndpointDomainName: - json_['publicEndpointDomainName'] as core.String?, - publicEndpointEnabled: json_['publicEndpointEnabled'] as core.bool?, - satisfiesPzi: json_['satisfiesPzi'] as core.bool?, - satisfiesPzs: json_['satisfiesPzs'] as core.bool?, - updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (deployedIndexes != null) 'deployedIndexes': deployedIndexes!, - if (description != null) 'description': description!, - if (displayName != null) 'displayName': displayName!, - if (enablePrivateServiceConnect != null) - 'enablePrivateServiceConnect': enablePrivateServiceConnect!, - if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, - if (etag != null) 'etag': etag!, - if (labels != null) 'labels': labels!, - if (name != null) 'name': name!, - if (network != null) 'network': network!, - if (privateServiceConnectConfig != null) - 'privateServiceConnectConfig': privateServiceConnectConfig!, - if (publicEndpointDomainName != null) - 'publicEndpointDomainName': publicEndpointDomainName!, - if (publicEndpointEnabled != null) - 'publicEndpointEnabled': publicEndpointEnabled!, - if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, - if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, - if (updateTime != null) 'updateTime': updateTime!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (tensorboardRuns != null) 'tensorboardRuns': tensorboardRuns!, }; } -/// IndexPrivateEndpoints proto is used to provide paths for users to send -/// requests via private endpoints (e.g. private service access, private service -/// connect). -/// -/// To send request via private service access, use match_grpc_address. To send -/// request via private service connect, use service_attachment. -class GoogleCloudAiplatformV1IndexPrivateEndpoints { - /// The ip address used to send match gRPC requests. - /// - /// Output only. - core.String? matchGrpcAddress; - - /// PscAutomatedEndpoints is populated if private service connect is enabled - /// if PscAutomatedConfig is set. +/// Response message for TensorboardService.ListTensorboardTimeSeries. +class GoogleCloudAiplatformV1ListTensorboardTimeSeriesResponse { + /// A token, which can be sent as ListTensorboardTimeSeriesRequest.page_token + /// to retrieve the next page. /// - /// Output only. - core.List? - pscAutomatedEndpoints; + /// If this field is omitted, there are no subsequent pages. + core.String? nextPageToken; - /// The name of the service attachment resource. - /// - /// Populated if private service connect is enabled. - /// - /// Output only. - core.String? serviceAttachment; + /// The TensorboardTimeSeries mathching the request. + core.List? + tensorboardTimeSeries; - GoogleCloudAiplatformV1IndexPrivateEndpoints({ - this.matchGrpcAddress, - this.pscAutomatedEndpoints, - this.serviceAttachment, + GoogleCloudAiplatformV1ListTensorboardTimeSeriesResponse({ + this.nextPageToken, + this.tensorboardTimeSeries, }); - GoogleCloudAiplatformV1IndexPrivateEndpoints.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListTensorboardTimeSeriesResponse.fromJson( + core.Map json_) : this( - matchGrpcAddress: json_['matchGrpcAddress'] as core.String?, - pscAutomatedEndpoints: (json_['pscAutomatedEndpoints'] as core.List?) + nextPageToken: json_['nextPageToken'] as core.String?, + tensorboardTimeSeries: (json_['tensorboardTimeSeries'] as core.List?) ?.map((value) => - GoogleCloudAiplatformV1PscAutomatedEndpoints.fromJson( + GoogleCloudAiplatformV1TensorboardTimeSeries.fromJson( value as core.Map)) .toList(), - serviceAttachment: json_['serviceAttachment'] as core.String?, ); core.Map toJson() => { - if (matchGrpcAddress != null) 'matchGrpcAddress': matchGrpcAddress!, - if (pscAutomatedEndpoints != null) - 'pscAutomatedEndpoints': pscAutomatedEndpoints!, - if (serviceAttachment != null) 'serviceAttachment': serviceAttachment!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (tensorboardTimeSeries != null) + 'tensorboardTimeSeries': tensorboardTimeSeries!, }; } -/// Stats of the Index. -class GoogleCloudAiplatformV1IndexStats { - /// The number of shards in the Index. - /// - /// Output only. - core.int? shardsCount; - - /// The number of sparse vectors in the Index. +/// Response message for TensorboardService.ListTensorboards. +class GoogleCloudAiplatformV1ListTensorboardsResponse { + /// A token, which can be sent as ListTensorboardsRequest.page_token to + /// retrieve the next page. /// - /// Output only. - core.String? sparseVectorsCount; + /// If this field is omitted, there are no subsequent pages. + core.String? nextPageToken; - /// The number of dense vectors in the Index. - /// - /// Output only. - core.String? vectorsCount; + /// The Tensorboards mathching the request. + core.List? tensorboards; - GoogleCloudAiplatformV1IndexStats({ - this.shardsCount, - this.sparseVectorsCount, - this.vectorsCount, + GoogleCloudAiplatformV1ListTensorboardsResponse({ + this.nextPageToken, + this.tensorboards, }); - GoogleCloudAiplatformV1IndexStats.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListTensorboardsResponse.fromJson(core.Map json_) : this( - shardsCount: json_['shardsCount'] as core.int?, - sparseVectorsCount: json_['sparseVectorsCount'] as core.String?, - vectorsCount: json_['vectorsCount'] as core.String?, + nextPageToken: json_['nextPageToken'] as core.String?, + tensorboards: (json_['tensorboards'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Tensorboard.fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (shardsCount != null) 'shardsCount': shardsCount!, - if (sparseVectorsCount != null) - 'sparseVectorsCount': sparseVectorsCount!, - if (vectorsCount != null) 'vectorsCount': vectorsCount!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (tensorboards != null) 'tensorboards': tensorboards!, }; } -/// Specifies Vertex AI owned input data to be used for training, and possibly -/// evaluating, the Model. -class GoogleCloudAiplatformV1InputDataConfig { - /// Applicable only to custom training with Datasets that have DataItems and - /// Annotations. - /// - /// Cloud Storage URI that points to a YAML file describing the annotation - /// schema. The schema is defined as an OpenAPI 3.0.2 - /// [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). - /// The schema files that can be used here are found in - /// gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the - /// chosen schema must be consistent with metadata of the Dataset specified by - /// dataset_id. Only Annotations that both match this schema and belong to - /// DataItems not ignored by the split method are used in respectively - /// training, validation or test role, depending on the role of the DataItem - /// they are on. When used in conjunction with annotations_filter, the - /// Annotations used for training are filtered by both annotations_filter and - /// annotation_schema_uri. - core.String? annotationSchemaUri; - - /// Applicable only to Datasets that have DataItems and Annotations. +/// Response message for PipelineService.ListTrainingPipelines +class GoogleCloudAiplatformV1ListTrainingPipelinesResponse { + /// A token to retrieve the next page of results. /// - /// A filter on Annotations of the Dataset. Only Annotations that both match - /// this filter and belong to DataItems not ignored by the split method are - /// used in respectively training, validation or test role, depending on the - /// role of the DataItem they are on (for the auto-assigned that role is - /// decided by Vertex AI). A filter with same syntax as the one used in - /// ListAnnotations may be used, but note here it filters across all - /// Annotations of the Dataset, and not just within a single DataItem. - core.String? annotationsFilter; + /// Pass to ListTrainingPipelinesRequest.page_token to obtain that page. + core.String? nextPageToken; - /// Only applicable to custom training with tabular Dataset with BigQuery - /// source. - /// - /// The BigQuery project location where the training data is to be written to. - /// In the given project a new dataset is created with name `dataset___` where - /// timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training input data - /// is written into that dataset. In the dataset three tables are created, - /// `training`, `validation` and `test`. * AIP_DATA_FORMAT = "bigquery". * - /// AIP_TRAINING_DATA_URI = "bigquery_destination.dataset___.training" * - /// AIP_VALIDATION_DATA_URI = "bigquery_destination.dataset___.validation" * - /// AIP_TEST_DATA_URI = "bigquery_destination.dataset___.test" - GoogleCloudAiplatformV1BigQueryDestination? bigqueryDestination; + /// List of TrainingPipelines in the requested page. + core.List? trainingPipelines; - /// The ID of the Dataset in the same Project and Location which data will be - /// used to train the Model. - /// - /// The Dataset must use schema compatible with Model being trained, and what - /// is compatible should be described in the used TrainingPipeline's - /// training_task_definition. For tabular Datasets, all their data is exported - /// to training, to pick and choose from. - /// - /// Required. - core.String? datasetId; + GoogleCloudAiplatformV1ListTrainingPipelinesResponse({ + this.nextPageToken, + this.trainingPipelines, + }); - /// Split based on the provided filters for each set. - GoogleCloudAiplatformV1FilterSplit? filterSplit; + GoogleCloudAiplatformV1ListTrainingPipelinesResponse.fromJson(core.Map json_) + : this( + nextPageToken: json_['nextPageToken'] as core.String?, + trainingPipelines: (json_['trainingPipelines'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1TrainingPipeline.fromJson( + value as core.Map)) + .toList(), + ); - /// Split based on fractions defining the size of each set. - GoogleCloudAiplatformV1FractionSplit? fractionSplit; + core.Map toJson() => { + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (trainingPipelines != null) 'trainingPipelines': trainingPipelines!, + }; +} - /// The Cloud Storage location where the training data is to be written to. +/// Response message for VizierService.ListTrials. +class GoogleCloudAiplatformV1ListTrialsResponse { + /// Pass this token as the `page_token` field of the request for a subsequent + /// call. /// - /// In the given directory a new directory is created with name: `dataset---` - /// where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. All - /// training input data is written into that directory. The Vertex AI - /// environment variables representing Cloud Storage data URIs are represented - /// in the Cloud Storage wildcard format to support sharded data. e.g.: - /// "gs://.../training-*.jsonl" * AIP_DATA_FORMAT = "jsonl" for non-tabular - /// data, "csv" for tabular data * AIP_TRAINING_DATA_URI = - /// "gcs_destination/dataset---/training-*.${AIP_DATA_FORMAT}" * - /// AIP_VALIDATION_DATA_URI = - /// "gcs_destination/dataset---/validation-*.${AIP_DATA_FORMAT}" * - /// AIP_TEST_DATA_URI = "gcs_destination/dataset---/test-*.${AIP_DATA_FORMAT}" - GoogleCloudAiplatformV1GcsDestination? gcsDestination; + /// If this field is omitted, there are no subsequent pages. + core.String? nextPageToken; - /// Whether to persist the ML use assignment to data item system labels. - core.bool? persistMlUseAssignment; + /// The Trials associated with the Study. + core.List? trials; - /// Supported only for tabular Datasets. - /// - /// Split based on a predefined key. - GoogleCloudAiplatformV1PredefinedSplit? predefinedSplit; + GoogleCloudAiplatformV1ListTrialsResponse({ + this.nextPageToken, + this.trials, + }); - /// Only applicable to Datasets that have SavedQueries. - /// - /// The ID of a SavedQuery (annotation set) under the Dataset specified by - /// dataset_id used for filtering Annotations for training. Only Annotations - /// that are associated with this SavedQuery are used in respectively - /// training. When used in conjunction with annotations_filter, the - /// Annotations used for training are filtered by both saved_query_id and - /// annotations_filter. Only one of saved_query_id and annotation_schema_uri - /// should be specified as both of them represent the same thing: problem - /// type. - core.String? savedQueryId; + GoogleCloudAiplatformV1ListTrialsResponse.fromJson(core.Map json_) + : this( + nextPageToken: json_['nextPageToken'] as core.String?, + trials: (json_['trials'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Trial.fromJson( + value as core.Map)) + .toList(), + ); - /// Supported only for tabular Datasets. - /// - /// Split based on the distribution of the specified column. - GoogleCloudAiplatformV1StratifiedSplit? stratifiedSplit; + core.Map toJson() => { + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (trials != null) 'trials': trials!, + }; +} - /// Supported only for tabular Datasets. +/// Response message for GenAiTuningService.ListTuningJobs +class GoogleCloudAiplatformV1ListTuningJobsResponse { + /// A token to retrieve the next page of results. /// - /// Split based on the timestamp of the input data pieces. - GoogleCloudAiplatformV1TimestampSplit? timestampSplit; + /// Pass to ListTuningJobsRequest.page_token to obtain that page. + core.String? nextPageToken; - GoogleCloudAiplatformV1InputDataConfig({ - this.annotationSchemaUri, - this.annotationsFilter, - this.bigqueryDestination, - this.datasetId, - this.filterSplit, - this.fractionSplit, - this.gcsDestination, - this.persistMlUseAssignment, - this.predefinedSplit, - this.savedQueryId, - this.stratifiedSplit, - this.timestampSplit, + /// List of TuningJobs in the requested page. + core.List? tuningJobs; + + GoogleCloudAiplatformV1ListTuningJobsResponse({ + this.nextPageToken, + this.tuningJobs, }); - GoogleCloudAiplatformV1InputDataConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1ListTuningJobsResponse.fromJson(core.Map json_) : this( - annotationSchemaUri: json_['annotationSchemaUri'] as core.String?, - annotationsFilter: json_['annotationsFilter'] as core.String?, - bigqueryDestination: json_.containsKey('bigqueryDestination') - ? GoogleCloudAiplatformV1BigQueryDestination.fromJson( - json_['bigqueryDestination'] - as core.Map) - : null, - datasetId: json_['datasetId'] as core.String?, - filterSplit: json_.containsKey('filterSplit') - ? GoogleCloudAiplatformV1FilterSplit.fromJson( - json_['filterSplit'] as core.Map) - : null, - fractionSplit: json_.containsKey('fractionSplit') - ? GoogleCloudAiplatformV1FractionSplit.fromJson( - json_['fractionSplit'] as core.Map) - : null, - gcsDestination: json_.containsKey('gcsDestination') - ? GoogleCloudAiplatformV1GcsDestination.fromJson( - json_['gcsDestination'] - as core.Map) - : null, - persistMlUseAssignment: json_['persistMlUseAssignment'] as core.bool?, - predefinedSplit: json_.containsKey('predefinedSplit') - ? GoogleCloudAiplatformV1PredefinedSplit.fromJson( - json_['predefinedSplit'] - as core.Map) - : null, - savedQueryId: json_['savedQueryId'] as core.String?, - stratifiedSplit: json_.containsKey('stratifiedSplit') - ? GoogleCloudAiplatformV1StratifiedSplit.fromJson( - json_['stratifiedSplit'] - as core.Map) - : null, - timestampSplit: json_.containsKey('timestampSplit') - ? GoogleCloudAiplatformV1TimestampSplit.fromJson( - json_['timestampSplit'] - as core.Map) - : null, + nextPageToken: json_['nextPageToken'] as core.String?, + tuningJobs: (json_['tuningJobs'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1TuningJob.fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (annotationSchemaUri != null) - 'annotationSchemaUri': annotationSchemaUri!, - if (annotationsFilter != null) 'annotationsFilter': annotationsFilter!, - if (bigqueryDestination != null) - 'bigqueryDestination': bigqueryDestination!, - if (datasetId != null) 'datasetId': datasetId!, - if (filterSplit != null) 'filterSplit': filterSplit!, - if (fractionSplit != null) 'fractionSplit': fractionSplit!, - if (gcsDestination != null) 'gcsDestination': gcsDestination!, - if (persistMlUseAssignment != null) - 'persistMlUseAssignment': persistMlUseAssignment!, - if (predefinedSplit != null) 'predefinedSplit': predefinedSplit!, - if (savedQueryId != null) 'savedQueryId': savedQueryId!, - if (stratifiedSplit != null) 'stratifiedSplit': stratifiedSplit!, - if (timestampSplit != null) 'timestampSplit': timestampSplit!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (tuningJobs != null) 'tuningJobs': tuningJobs!, }; } -/// A list of int64 values. -class GoogleCloudAiplatformV1Int64Array { - /// A list of int64 values. - core.List? values; +/// Logprobs Result +class GoogleCloudAiplatformV1LogprobsResult { + /// Length = total number of decoding steps. + /// + /// The chosen candidates may or may not be in top_candidates. + core.List? chosenCandidates; - GoogleCloudAiplatformV1Int64Array({ - this.values, + /// Length = total number of decoding steps. + core.List? topCandidates; + + GoogleCloudAiplatformV1LogprobsResult({ + this.chosenCandidates, + this.topCandidates, }); - GoogleCloudAiplatformV1Int64Array.fromJson(core.Map json_) + GoogleCloudAiplatformV1LogprobsResult.fromJson(core.Map json_) : this( - values: (json_['values'] as core.List?) - ?.map((value) => value as core.String) + chosenCandidates: (json_['chosenCandidates'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1LogprobsResultCandidate.fromJson( + value as core.Map)) + .toList(), + topCandidates: (json_['topCandidates'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1LogprobsResultTopCandidates.fromJson( + value as core.Map)) .toList(), ); core.Map toJson() => { - if (values != null) 'values': values!, + if (chosenCandidates != null) 'chosenCandidates': chosenCandidates!, + if (topCandidates != null) 'topCandidates': topCandidates!, }; } -/// An attribution method that computes the Aumann-Shapley value taking -/// advantage of the model's fully differentiable structure. -/// -/// Refer to this paper for more details: https://arxiv.org/abs/1703.01365 -class GoogleCloudAiplatformV1IntegratedGradientsAttribution { - /// Config for IG with blur baseline. - /// - /// When enabled, a linear path from the maximally blurred image to the input - /// image is created. Using a blurred baseline instead of zero (black image) - /// is motivated by the BlurIG approach explained here: - /// https://arxiv.org/abs/2004.03383 - GoogleCloudAiplatformV1BlurBaselineConfig? blurBaselineConfig; +/// Candidate for the logprobs token and score. +class GoogleCloudAiplatformV1LogprobsResultCandidate { + /// The candidate's log probability. + core.double? logProbability; - /// Config for SmoothGrad approximation of gradients. - /// - /// When enabled, the gradients are approximated by averaging the gradients - /// from noisy samples in the vicinity of the inputs. Adding noise can help - /// improve the computed gradients. Refer to this paper for more details: - /// https://arxiv.org/pdf/1706.03825.pdf - GoogleCloudAiplatformV1SmoothGradConfig? smoothGradConfig; + /// The candidate's token string value. + core.String? token; - /// The number of steps for approximating the path integral. - /// - /// A good value to start is 50 and gradually increase until the sum to diff - /// property is within the desired error range. Valid range of its value is - /// \[1, 100\], inclusively. - /// - /// Required. - core.int? stepCount; + /// The candidate's token id value. + core.int? tokenId; - GoogleCloudAiplatformV1IntegratedGradientsAttribution({ - this.blurBaselineConfig, - this.smoothGradConfig, - this.stepCount, + GoogleCloudAiplatformV1LogprobsResultCandidate({ + this.logProbability, + this.token, + this.tokenId, }); - GoogleCloudAiplatformV1IntegratedGradientsAttribution.fromJson(core.Map json_) + GoogleCloudAiplatformV1LogprobsResultCandidate.fromJson(core.Map json_) : this( - blurBaselineConfig: json_.containsKey('blurBaselineConfig') - ? GoogleCloudAiplatformV1BlurBaselineConfig.fromJson( - json_['blurBaselineConfig'] - as core.Map) - : null, - smoothGradConfig: json_.containsKey('smoothGradConfig') - ? GoogleCloudAiplatformV1SmoothGradConfig.fromJson( - json_['smoothGradConfig'] - as core.Map) - : null, - stepCount: json_['stepCount'] as core.int?, + logProbability: (json_['logProbability'] as core.num?)?.toDouble(), + token: json_['token'] as core.String?, + tokenId: json_['tokenId'] as core.int?, ); core.Map toJson() => { - if (blurBaselineConfig != null) - 'blurBaselineConfig': blurBaselineConfig!, - if (smoothGradConfig != null) 'smoothGradConfig': smoothGradConfig!, - if (stepCount != null) 'stepCount': stepCount!, + if (logProbability != null) 'logProbability': logProbability!, + if (token != null) 'token': token!, + if (tokenId != null) 'tokenId': tokenId!, }; } -/// Contains information about the Large Model. -class GoogleCloudAiplatformV1LargeModelReference { - /// The unique name of the large Foundation or pre-built model. - /// - /// Like "chat-bison", "text-bison". Or model name with version ID, like - /// "chat-bison@001", "text-bison@005", etc. - /// - /// Required. - core.String? name; +/// Candidates with top log probabilities at each decoding step. +class GoogleCloudAiplatformV1LogprobsResultTopCandidates { + /// Sorted by log probability in descending order. + core.List? candidates; - GoogleCloudAiplatformV1LargeModelReference({ - this.name, + GoogleCloudAiplatformV1LogprobsResultTopCandidates({ + this.candidates, }); - GoogleCloudAiplatformV1LargeModelReference.fromJson(core.Map json_) + GoogleCloudAiplatformV1LogprobsResultTopCandidates.fromJson(core.Map json_) : this( - name: json_['name'] as core.String?, + candidates: (json_['candidates'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1LogprobsResultCandidate.fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (name != null) 'name': name!, + if (candidates != null) 'candidates': candidates!, }; } -/// A subgraph of the overall lineage graph. -/// -/// Event edges connect Artifact and Execution nodes. -class GoogleCloudAiplatformV1LineageSubgraph { - /// The Artifact nodes in the subgraph. - core.List? artifacts; - - /// The Event edges between Artifacts and Executions in the subgraph. - core.List? events; - - /// The Execution nodes in the subgraph. - core.List? executions; +/// Request message for VizierService.LookupStudy. +class GoogleCloudAiplatformV1LookupStudyRequest { + /// The user-defined display name of the Study + /// + /// Required. + core.String? displayName; - GoogleCloudAiplatformV1LineageSubgraph({ - this.artifacts, - this.events, - this.executions, + GoogleCloudAiplatformV1LookupStudyRequest({ + this.displayName, }); - GoogleCloudAiplatformV1LineageSubgraph.fromJson(core.Map json_) + GoogleCloudAiplatformV1LookupStudyRequest.fromJson(core.Map json_) : this( - artifacts: (json_['artifacts'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Artifact.fromJson( - value as core.Map)) - .toList(), - events: (json_['events'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Event.fromJson( - value as core.Map)) - .toList(), - executions: (json_['executions'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Execution.fromJson( - value as core.Map)) - .toList(), + displayName: json_['displayName'] as core.String?, ); core.Map toJson() => { - if (artifacts != null) 'artifacts': artifacts!, - if (events != null) 'events': events!, - if (executions != null) 'executions': executions!, + if (displayName != null) 'displayName': displayName!, }; } -/// Response message for DatasetService.ListAnnotations. -class GoogleCloudAiplatformV1ListAnnotationsResponse { - /// A list of Annotations that matches the specified filter in the request. - core.List? annotations; +/// Specification of a single machine. +class GoogleCloudAiplatformV1MachineSpec { + /// The number of accelerators to attach to the machine. + core.int? acceleratorCount; - /// The standard List next-page token. - core.String? nextPageToken; + /// The type of accelerator(s) that may be attached to the machine as per + /// accelerator_count. + /// + /// Immutable. + /// Possible string values are: + /// - "ACCELERATOR_TYPE_UNSPECIFIED" : Unspecified accelerator type, which + /// means no accelerator. + /// - "NVIDIA_TESLA_K80" : Deprecated: Nvidia Tesla K80 GPU has reached end of + /// support, see https://cloud.google.com/compute/docs/eol/k80-eol. + /// - "NVIDIA_TESLA_P100" : Nvidia Tesla P100 GPU. + /// - "NVIDIA_TESLA_V100" : Nvidia Tesla V100 GPU. + /// - "NVIDIA_TESLA_P4" : Nvidia Tesla P4 GPU. + /// - "NVIDIA_TESLA_T4" : Nvidia Tesla T4 GPU. + /// - "NVIDIA_TESLA_A100" : Nvidia Tesla A100 GPU. + /// - "NVIDIA_A100_80GB" : Nvidia A100 80GB GPU. + /// - "NVIDIA_L4" : Nvidia L4 GPU. + /// - "NVIDIA_H100_80GB" : Nvidia H100 80Gb GPU. + /// - "NVIDIA_H100_MEGA_80GB" : Nvidia H100 Mega 80Gb GPU. + /// - "TPU_V2" : TPU v2. + /// - "TPU_V3" : TPU v3. + /// - "TPU_V4_POD" : TPU v4. + /// - "TPU_V5_LITEPOD" : TPU v5. + core.String? acceleratorType; - GoogleCloudAiplatformV1ListAnnotationsResponse({ - this.annotations, - this.nextPageToken, + /// The type of the machine. + /// + /// See the + /// [list of machine types supported for prediction](https://cloud.google.com/vertex-ai/docs/predictions/configure-compute#machine-types) + /// See the + /// [list of machine types supported for custom training](https://cloud.google.com/vertex-ai/docs/training/configure-compute#machine-types). + /// For DeployedModel this field is optional, and the default value is + /// `n1-standard-2`. For BatchPredictionJob or as part of WorkerPoolSpec this + /// field is required. + /// + /// Immutable. + core.String? machineType; + + /// Configuration controlling how this resource pool consumes reservation. + /// + /// Optional. Immutable. + GoogleCloudAiplatformV1ReservationAffinity? reservationAffinity; + + /// The topology of the TPUs. + /// + /// Corresponds to the TPU topologies available from GKE. (Example: + /// tpu_topology: "2x2x1"). + /// + /// Immutable. + core.String? tpuTopology; + + GoogleCloudAiplatformV1MachineSpec({ + this.acceleratorCount, + this.acceleratorType, + this.machineType, + this.reservationAffinity, + this.tpuTopology, }); - GoogleCloudAiplatformV1ListAnnotationsResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1MachineSpec.fromJson(core.Map json_) : this( - annotations: (json_['annotations'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Annotation.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + acceleratorCount: json_['acceleratorCount'] as core.int?, + acceleratorType: json_['acceleratorType'] as core.String?, + machineType: json_['machineType'] as core.String?, + reservationAffinity: json_.containsKey('reservationAffinity') + ? GoogleCloudAiplatformV1ReservationAffinity.fromJson( + json_['reservationAffinity'] + as core.Map) + : null, + tpuTopology: json_['tpuTopology'] as core.String?, ); core.Map toJson() => { - if (annotations != null) 'annotations': annotations!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (acceleratorCount != null) 'acceleratorCount': acceleratorCount!, + if (acceleratorType != null) 'acceleratorType': acceleratorType!, + if (machineType != null) 'machineType': machineType!, + if (reservationAffinity != null) + 'reservationAffinity': reservationAffinity!, + if (tpuTopology != null) 'tpuTopology': tpuTopology!, }; } -/// Response message for MetadataService.ListArtifacts. -class GoogleCloudAiplatformV1ListArtifactsResponse { - /// The Artifacts retrieved from the MetadataStore. - core.List? artifacts; - - /// A token, which can be sent as ListArtifactsRequest.page_token to retrieve - /// the next page. +/// Manual batch tuning parameters. +class GoogleCloudAiplatformV1ManualBatchTuningParameters { + /// The number of the records (e.g. instances) of the operation given in each + /// batch to a machine replica. /// - /// If this field is not populated, there are no subsequent pages. - core.String? nextPageToken; + /// Machine type, and size of a single record should be considered when + /// setting this parameter, higher value speeds up the batch operation's + /// execution, but too high value will result in a whole batch not fitting in + /// a machine's memory, and the whole operation will fail. The default value + /// is 64. + /// + /// Immutable. + core.int? batchSize; - GoogleCloudAiplatformV1ListArtifactsResponse({ - this.artifacts, - this.nextPageToken, + GoogleCloudAiplatformV1ManualBatchTuningParameters({ + this.batchSize, }); - GoogleCloudAiplatformV1ListArtifactsResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1ManualBatchTuningParameters.fromJson(core.Map json_) : this( - artifacts: (json_['artifacts'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Artifact.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + batchSize: json_['batchSize'] as core.int?, ); core.Map toJson() => { - if (artifacts != null) 'artifacts': artifacts!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (batchSize != null) 'batchSize': batchSize!, }; } -/// Response message for JobService.ListBatchPredictionJobs -class GoogleCloudAiplatformV1ListBatchPredictionJobsResponse { - /// List of BatchPredictionJobs in the requested page. - core.List? batchPredictionJobs; +/// A message representing a Measurement of a Trial. +/// +/// A Measurement contains the Metrics got by executing a Trial using suggested +/// hyperparameter values. +class GoogleCloudAiplatformV1Measurement { + /// Time that the Trial has been running at the point of this Measurement. + /// + /// Output only. + core.String? elapsedDuration; - /// A token to retrieve the next page of results. + /// A list of metrics got by evaluating the objective functions using + /// suggested Parameter values. /// - /// Pass to ListBatchPredictionJobsRequest.page_token to obtain that page. - core.String? nextPageToken; + /// Output only. + core.List? metrics; - GoogleCloudAiplatformV1ListBatchPredictionJobsResponse({ - this.batchPredictionJobs, - this.nextPageToken, + /// The number of steps the machine learning model has been trained for. + /// + /// Must be non-negative. + /// + /// Output only. + core.String? stepCount; + + GoogleCloudAiplatformV1Measurement({ + this.elapsedDuration, + this.metrics, + this.stepCount, }); - GoogleCloudAiplatformV1ListBatchPredictionJobsResponse.fromJson( - core.Map json_) + GoogleCloudAiplatformV1Measurement.fromJson(core.Map json_) : this( - batchPredictionJobs: (json_['batchPredictionJobs'] as core.List?) + elapsedDuration: json_['elapsedDuration'] as core.String?, + metrics: (json_['metrics'] as core.List?) ?.map((value) => - GoogleCloudAiplatformV1BatchPredictionJob.fromJson( + GoogleCloudAiplatformV1MeasurementMetric.fromJson( value as core.Map)) .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + stepCount: json_['stepCount'] as core.String?, ); core.Map toJson() => { - if (batchPredictionJobs != null) - 'batchPredictionJobs': batchPredictionJobs!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (elapsedDuration != null) 'elapsedDuration': elapsedDuration!, + if (metrics != null) 'metrics': metrics!, + if (stepCount != null) 'stepCount': stepCount!, }; } -/// Response message for MetadataService.ListContexts. -class GoogleCloudAiplatformV1ListContextsResponse { - /// The Contexts retrieved from the MetadataStore. - core.List? contexts; +/// A message representing a metric in the measurement. +class GoogleCloudAiplatformV1MeasurementMetric { + /// The ID of the Metric. + /// + /// The Metric should be defined in StudySpec's Metrics. + /// + /// Output only. + core.String? metricId; - /// A token, which can be sent as ListContextsRequest.page_token to retrieve - /// the next page. + /// The value for this metric. /// - /// If this field is not populated, there are no subsequent pages. - core.String? nextPageToken; + /// Output only. + core.double? value; - GoogleCloudAiplatformV1ListContextsResponse({ - this.contexts, - this.nextPageToken, + GoogleCloudAiplatformV1MeasurementMetric({ + this.metricId, + this.value, }); - GoogleCloudAiplatformV1ListContextsResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1MeasurementMetric.fromJson(core.Map json_) : this( - contexts: (json_['contexts'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Context.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + metricId: json_['metricId'] as core.String?, + value: (json_['value'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (contexts != null) 'contexts': contexts!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (metricId != null) 'metricId': metricId!, + if (value != null) 'value': value!, }; } -/// Response message for JobService.ListCustomJobs -class GoogleCloudAiplatformV1ListCustomJobsResponse { - /// List of CustomJobs in the requested page. - core.List? customJobs; - - /// A token to retrieve the next page of results. +/// Request message for ModelService.MergeVersionAliases. +class GoogleCloudAiplatformV1MergeVersionAliasesRequest { + /// The set of version aliases to merge. /// - /// Pass to ListCustomJobsRequest.page_token to obtain that page. - core.String? nextPageToken; + /// The alias should be at most 128 characters, and match + /// `a-z{0,126}[a-z-0-9]`. Add the `-` prefix to an alias means removing that + /// alias from the version. `-` is NOT counted in the 128 characters. Example: + /// `-golden` means removing the `golden` alias from the version. There is NO + /// ordering in aliases, which means 1) The aliases returned from GetModel API + /// might not have the exactly same order from this MergeVersionAliases API. + /// 2) Adding and deleting the same alias in the request is not recommended, + /// and the 2 operations will be cancelled out. + /// + /// Required. + core.List? versionAliases; - GoogleCloudAiplatformV1ListCustomJobsResponse({ - this.customJobs, - this.nextPageToken, + GoogleCloudAiplatformV1MergeVersionAliasesRequest({ + this.versionAliases, }); - GoogleCloudAiplatformV1ListCustomJobsResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1MergeVersionAliasesRequest.fromJson(core.Map json_) : this( - customJobs: (json_['customJobs'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1CustomJob.fromJson( - value as core.Map)) + versionAliases: (json_['versionAliases'] as core.List?) + ?.map((value) => value as core.String) .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (customJobs != null) 'customJobs': customJobs!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (versionAliases != null) 'versionAliases': versionAliases!, }; } -/// Response message for DatasetService.ListDataItems. -class GoogleCloudAiplatformV1ListDataItemsResponse { - /// A list of DataItems that matches the specified filter in the request. - core.List? dataItems; +/// Instance of a general MetadataSchema. +class GoogleCloudAiplatformV1MetadataSchema { + /// Timestamp when this MetadataSchema was created. + /// + /// Output only. + core.String? createTime; - /// The standard List next-page token. - core.String? nextPageToken; + /// Description of the Metadata Schema + core.String? description; - GoogleCloudAiplatformV1ListDataItemsResponse({ - this.dataItems, - this.nextPageToken, + /// The resource name of the MetadataSchema. + /// + /// Output only. + core.String? name; + + /// The raw YAML string representation of the MetadataSchema. + /// + /// The combination of \[MetadataSchema.version\] and the schema name given by + /// `title` in \[MetadataSchema.schema\] must be unique within a + /// MetadataStore. The schema is defined as an OpenAPI 3.0.2 + /// [MetadataSchema Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#schemaObject) + /// + /// Required. + core.String? schema; + + /// The type of the MetadataSchema. + /// + /// This is a property that identifies which metadata types will use the + /// MetadataSchema. + /// Possible string values are: + /// - "METADATA_SCHEMA_TYPE_UNSPECIFIED" : Unspecified type for the + /// MetadataSchema. + /// - "ARTIFACT_TYPE" : A type indicating that the MetadataSchema will be used + /// by Artifacts. + /// - "EXECUTION_TYPE" : A typee indicating that the MetadataSchema will be + /// used by Executions. + /// - "CONTEXT_TYPE" : A state indicating that the MetadataSchema will be used + /// by Contexts. + core.String? schemaType; + + /// The version of the MetadataSchema. + /// + /// The version's format must match the following regular expression: + /// `^[0-9]+.+.+$`, which would allow to order/compare different versions. + /// Example: 1.0.0, 1.0.1, etc. + core.String? schemaVersion; + + GoogleCloudAiplatformV1MetadataSchema({ + this.createTime, + this.description, + this.name, + this.schema, + this.schemaType, + this.schemaVersion, }); - GoogleCloudAiplatformV1ListDataItemsResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1MetadataSchema.fromJson(core.Map json_) : this( - dataItems: (json_['dataItems'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1DataItem.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + createTime: json_['createTime'] as core.String?, + description: json_['description'] as core.String?, + name: json_['name'] as core.String?, + schema: json_['schema'] as core.String?, + schemaType: json_['schemaType'] as core.String?, + schemaVersion: json_['schemaVersion'] as core.String?, ); core.Map toJson() => { - if (dataItems != null) 'dataItems': dataItems!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (createTime != null) 'createTime': createTime!, + if (description != null) 'description': description!, + if (name != null) 'name': name!, + if (schema != null) 'schema': schema!, + if (schemaType != null) 'schemaType': schemaType!, + if (schemaVersion != null) 'schemaVersion': schemaVersion!, }; } -/// Response message for JobService.ListDataLabelingJobs. -class GoogleCloudAiplatformV1ListDataLabelingJobsResponse { - /// A list of DataLabelingJobs that matches the specified filter in the - /// request. - core.List? dataLabelingJobs; - - /// The standard List next-page token. - core.String? nextPageToken; +/// Instance of a metadata store. +/// +/// Contains a set of metadata that can be queried. +class GoogleCloudAiplatformV1MetadataStore { + /// Timestamp when this MetadataStore was created. + /// + /// Output only. + core.String? createTime; - GoogleCloudAiplatformV1ListDataLabelingJobsResponse({ - this.dataLabelingJobs, - this.nextPageToken, - }); + /// Dataplex integration settings. + /// + /// Optional. + GoogleCloudAiplatformV1MetadataStoreDataplexConfig? dataplexConfig; - GoogleCloudAiplatformV1ListDataLabelingJobsResponse.fromJson(core.Map json_) - : this( - dataLabelingJobs: (json_['dataLabelingJobs'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1DataLabelingJob.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, - ); + /// Description of the MetadataStore. + core.String? description; - core.Map toJson() => { - if (dataLabelingJobs != null) 'dataLabelingJobs': dataLabelingJobs!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - }; -} + /// Customer-managed encryption key spec for a Metadata Store. + /// + /// If set, this Metadata Store and all sub-resources of this Metadata Store + /// are secured using this key. + GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; -/// Response message for DatasetService.ListDatasetVersions. -class GoogleCloudAiplatformV1ListDatasetVersionsResponse { - /// A list of DatasetVersions that matches the specified filter in the - /// request. - core.List? datasetVersions; + /// The resource name of the MetadataStore instance. + /// + /// Output only. + core.String? name; - /// The standard List next-page token. - core.String? nextPageToken; + /// State information of the MetadataStore. + /// + /// Output only. + GoogleCloudAiplatformV1MetadataStoreMetadataStoreState? state; - GoogleCloudAiplatformV1ListDatasetVersionsResponse({ - this.datasetVersions, - this.nextPageToken, + /// Timestamp when this MetadataStore was last updated. + /// + /// Output only. + core.String? updateTime; + + GoogleCloudAiplatformV1MetadataStore({ + this.createTime, + this.dataplexConfig, + this.description, + this.encryptionSpec, + this.name, + this.state, + this.updateTime, }); - GoogleCloudAiplatformV1ListDatasetVersionsResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1MetadataStore.fromJson(core.Map json_) : this( - datasetVersions: (json_['datasetVersions'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1DatasetVersion.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + createTime: json_['createTime'] as core.String?, + dataplexConfig: json_.containsKey('dataplexConfig') + ? GoogleCloudAiplatformV1MetadataStoreDataplexConfig.fromJson( + json_['dataplexConfig'] + as core.Map) + : null, + description: json_['description'] as core.String?, + encryptionSpec: json_.containsKey('encryptionSpec') + ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( + json_['encryptionSpec'] + as core.Map) + : null, + name: json_['name'] as core.String?, + state: json_.containsKey('state') + ? GoogleCloudAiplatformV1MetadataStoreMetadataStoreState.fromJson( + json_['state'] as core.Map) + : null, + updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (datasetVersions != null) 'datasetVersions': datasetVersions!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (createTime != null) 'createTime': createTime!, + if (dataplexConfig != null) 'dataplexConfig': dataplexConfig!, + if (description != null) 'description': description!, + if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, + if (name != null) 'name': name!, + if (state != null) 'state': state!, + if (updateTime != null) 'updateTime': updateTime!, }; } -/// Response message for DatasetService.ListDatasets. -class GoogleCloudAiplatformV1ListDatasetsResponse { - /// A list of Datasets that matches the specified filter in the request. - core.List? datasets; - - /// The standard List next-page token. - core.String? nextPageToken; +/// Represents Dataplex integration settings. +class GoogleCloudAiplatformV1MetadataStoreDataplexConfig { + /// Whether or not Data Lineage synchronization is enabled for Vertex + /// Pipelines. + /// + /// Optional. + core.bool? enabledPipelinesLineage; - GoogleCloudAiplatformV1ListDatasetsResponse({ - this.datasets, - this.nextPageToken, + GoogleCloudAiplatformV1MetadataStoreDataplexConfig({ + this.enabledPipelinesLineage, }); - GoogleCloudAiplatformV1ListDatasetsResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1MetadataStoreDataplexConfig.fromJson(core.Map json_) : this( - datasets: (json_['datasets'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Dataset.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + enabledPipelinesLineage: + json_['enabledPipelinesLineage'] as core.bool?, ); core.Map toJson() => { - if (datasets != null) 'datasets': datasets!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (enabledPipelinesLineage != null) + 'enabledPipelinesLineage': enabledPipelinesLineage!, }; } -/// Response message for ListDeploymentResourcePools method. -class GoogleCloudAiplatformV1ListDeploymentResourcePoolsResponse { - /// The DeploymentResourcePools from the specified location. - core.List? - deploymentResourcePools; - - /// A token, which can be sent as `page_token` to retrieve the next page. - /// - /// If this field is omitted, there are no subsequent pages. - core.String? nextPageToken; +/// Represents state information for a MetadataStore. +class GoogleCloudAiplatformV1MetadataStoreMetadataStoreState { + /// The disk utilization of the MetadataStore in bytes. + core.String? diskUtilizationBytes; - GoogleCloudAiplatformV1ListDeploymentResourcePoolsResponse({ - this.deploymentResourcePools, - this.nextPageToken, + GoogleCloudAiplatformV1MetadataStoreMetadataStoreState({ + this.diskUtilizationBytes, }); - GoogleCloudAiplatformV1ListDeploymentResourcePoolsResponse.fromJson( + GoogleCloudAiplatformV1MetadataStoreMetadataStoreState.fromJson( core.Map json_) : this( - deploymentResourcePools: - (json_['deploymentResourcePools'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1DeploymentResourcePool.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + diskUtilizationBytes: json_['diskUtilizationBytes'] as core.String?, ); core.Map toJson() => { - if (deploymentResourcePools != null) - 'deploymentResourcePools': deploymentResourcePools!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (diskUtilizationBytes != null) + 'diskUtilizationBytes': diskUtilizationBytes!, }; } -/// Response message for EndpointService.ListEndpoints. -class GoogleCloudAiplatformV1ListEndpointsResponse { - /// List of Endpoints in the requested page. - core.List? endpoints; +/// Input for MetricX metric. +class GoogleCloudAiplatformV1MetricxInput { + /// Metricx instance. + /// + /// Required. + GoogleCloudAiplatformV1MetricxInstance? instance; - /// A token to retrieve the next page of results. + /// Spec for Metricx metric. /// - /// Pass to ListEndpointsRequest.page_token to obtain that page. - core.String? nextPageToken; + /// Required. + GoogleCloudAiplatformV1MetricxSpec? metricSpec; - GoogleCloudAiplatformV1ListEndpointsResponse({ - this.endpoints, - this.nextPageToken, + GoogleCloudAiplatformV1MetricxInput({ + this.instance, + this.metricSpec, }); - GoogleCloudAiplatformV1ListEndpointsResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1MetricxInput.fromJson(core.Map json_) : this( - endpoints: (json_['endpoints'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Endpoint.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + instance: json_.containsKey('instance') + ? GoogleCloudAiplatformV1MetricxInstance.fromJson( + json_['instance'] as core.Map) + : null, + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1MetricxSpec.fromJson( + json_['metricSpec'] as core.Map) + : null, ); core.Map toJson() => { - if (endpoints != null) 'endpoints': endpoints!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (instance != null) 'instance': instance!, + if (metricSpec != null) 'metricSpec': metricSpec!, }; } -/// Response message for FeaturestoreService.ListEntityTypes. -class GoogleCloudAiplatformV1ListEntityTypesResponse { - /// The EntityTypes matching the request. - core.List? entityTypes; +/// Spec for MetricX instance - The fields used for evaluation are dependent on +/// the MetricX version. +typedef GoogleCloudAiplatformV1MetricxInstance = $Instance03; - /// A token, which can be sent as ListEntityTypesRequest.page_token to - /// retrieve the next page. +/// Spec for MetricX result - calculates the MetricX score for the given +/// instance using the version specified in the spec. +class GoogleCloudAiplatformV1MetricxResult { + /// MetricX score. /// - /// If this field is omitted, there are no subsequent pages. - core.String? nextPageToken; + /// Range depends on version. + /// + /// Output only. + core.double? score; - GoogleCloudAiplatformV1ListEntityTypesResponse({ - this.entityTypes, - this.nextPageToken, + GoogleCloudAiplatformV1MetricxResult({ + this.score, }); - GoogleCloudAiplatformV1ListEntityTypesResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1MetricxResult.fromJson(core.Map json_) : this( - entityTypes: (json_['entityTypes'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1EntityType.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + score: (json_['score'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (entityTypes != null) 'entityTypes': entityTypes!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (score != null) 'score': score!, }; } -/// Response message for MetadataService.ListExecutions. -class GoogleCloudAiplatformV1ListExecutionsResponse { - /// The Executions retrieved from the MetadataStore. - core.List? executions; +/// Spec for MetricX metric. +class GoogleCloudAiplatformV1MetricxSpec { + /// Source language in BCP-47 format. + /// + /// Optional. + core.String? sourceLanguage; - /// A token, which can be sent as ListExecutionsRequest.page_token to retrieve - /// the next page. + /// Target language in BCP-47 format. /// - /// If this field is not populated, there are no subsequent pages. - core.String? nextPageToken; + /// Covers both prediction and reference. + /// + /// Optional. + core.String? targetLanguage; - GoogleCloudAiplatformV1ListExecutionsResponse({ - this.executions, - this.nextPageToken, + /// Which version to use for evaluation. + /// + /// Required. + /// Possible string values are: + /// - "METRICX_VERSION_UNSPECIFIED" : MetricX version unspecified. + /// - "METRICX_24_REF" : MetricX 2024 (2.6) for translation + reference + /// (reference-based). + /// - "METRICX_24_SRC" : MetricX 2024 (2.6) for translation + source (QE). + /// - "METRICX_24_SRC_REF" : MetricX 2024 (2.6) for translation + source + + /// reference (source-reference-combined). + core.String? version; + + GoogleCloudAiplatformV1MetricxSpec({ + this.sourceLanguage, + this.targetLanguage, + this.version, }); - GoogleCloudAiplatformV1ListExecutionsResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1MetricxSpec.fromJson(core.Map json_) : this( - executions: (json_['executions'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Execution.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + sourceLanguage: json_['sourceLanguage'] as core.String?, + targetLanguage: json_['targetLanguage'] as core.String?, + version: json_['version'] as core.String?, ); core.Map toJson() => { - if (executions != null) 'executions': executions!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (sourceLanguage != null) 'sourceLanguage': sourceLanguage!, + if (targetLanguage != null) 'targetLanguage': targetLanguage!, + if (version != null) 'version': version!, }; } -/// Response message for FeatureRegistryService.ListFeatureGroups. -class GoogleCloudAiplatformV1ListFeatureGroupsResponse { - /// The FeatureGroups matching the request. - core.List? featureGroups; - - /// A token, which can be sent as ListFeatureGroupsRequest.page_token to - /// retrieve the next page. +/// Represents one resource that exists in automl.googleapis.com, +/// datalabeling.googleapis.com or ml.googleapis.com. +class GoogleCloudAiplatformV1MigratableResource { + /// Represents one Dataset in automl.googleapis.com. /// - /// If this field is omitted, there are no subsequent pages. - core.String? nextPageToken; + /// Output only. + GoogleCloudAiplatformV1MigratableResourceAutomlDataset? automlDataset; - GoogleCloudAiplatformV1ListFeatureGroupsResponse({ - this.featureGroups, - this.nextPageToken, - }); + /// Represents one Model in automl.googleapis.com. + /// + /// Output only. + GoogleCloudAiplatformV1MigratableResourceAutomlModel? automlModel; - GoogleCloudAiplatformV1ListFeatureGroupsResponse.fromJson(core.Map json_) - : this( - featureGroups: (json_['featureGroups'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1FeatureGroup.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, - ); + /// Represents one Dataset in datalabeling.googleapis.com. + /// + /// Output only. + GoogleCloudAiplatformV1MigratableResourceDataLabelingDataset? + dataLabelingDataset; - core.Map toJson() => { - if (featureGroups != null) 'featureGroups': featureGroups!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - }; -} + /// Timestamp when the last migration attempt on this MigratableResource + /// started. + /// + /// Will not be set if there's no migration attempt on this + /// MigratableResource. + /// + /// Output only. + core.String? lastMigrateTime; -/// Response message for FeatureOnlineStoreAdminService.ListFeatureOnlineStores. -class GoogleCloudAiplatformV1ListFeatureOnlineStoresResponse { - /// The FeatureOnlineStores matching the request. - core.List? featureOnlineStores; + /// Timestamp when this MigratableResource was last updated. + /// + /// Output only. + core.String? lastUpdateTime; - /// A token, which can be sent as ListFeatureOnlineStoresRequest.page_token to - /// retrieve the next page. + /// Represents one Version in ml.googleapis.com. /// - /// If this field is omitted, there are no subsequent pages. - core.String? nextPageToken; + /// Output only. + GoogleCloudAiplatformV1MigratableResourceMlEngineModelVersion? + mlEngineModelVersion; - GoogleCloudAiplatformV1ListFeatureOnlineStoresResponse({ - this.featureOnlineStores, - this.nextPageToken, + GoogleCloudAiplatformV1MigratableResource({ + this.automlDataset, + this.automlModel, + this.dataLabelingDataset, + this.lastMigrateTime, + this.lastUpdateTime, + this.mlEngineModelVersion, }); - GoogleCloudAiplatformV1ListFeatureOnlineStoresResponse.fromJson( - core.Map json_) + GoogleCloudAiplatformV1MigratableResource.fromJson(core.Map json_) : this( - featureOnlineStores: (json_['featureOnlineStores'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1FeatureOnlineStore.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + automlDataset: json_.containsKey('automlDataset') + ? GoogleCloudAiplatformV1MigratableResourceAutomlDataset.fromJson( + json_['automlDataset'] as core.Map) + : null, + automlModel: json_.containsKey('automlModel') + ? GoogleCloudAiplatformV1MigratableResourceAutomlModel.fromJson( + json_['automlModel'] as core.Map) + : null, + dataLabelingDataset: json_.containsKey('dataLabelingDataset') + ? GoogleCloudAiplatformV1MigratableResourceDataLabelingDataset + .fromJson(json_['dataLabelingDataset'] + as core.Map) + : null, + lastMigrateTime: json_['lastMigrateTime'] as core.String?, + lastUpdateTime: json_['lastUpdateTime'] as core.String?, + mlEngineModelVersion: json_.containsKey('mlEngineModelVersion') + ? GoogleCloudAiplatformV1MigratableResourceMlEngineModelVersion + .fromJson(json_['mlEngineModelVersion'] + as core.Map) + : null, ); core.Map toJson() => { - if (featureOnlineStores != null) - 'featureOnlineStores': featureOnlineStores!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (automlDataset != null) 'automlDataset': automlDataset!, + if (automlModel != null) 'automlModel': automlModel!, + if (dataLabelingDataset != null) + 'dataLabelingDataset': dataLabelingDataset!, + if (lastMigrateTime != null) 'lastMigrateTime': lastMigrateTime!, + if (lastUpdateTime != null) 'lastUpdateTime': lastUpdateTime!, + if (mlEngineModelVersion != null) + 'mlEngineModelVersion': mlEngineModelVersion!, }; } -/// Response message for FeatureOnlineStoreAdminService.ListFeatureViewSyncs. -class GoogleCloudAiplatformV1ListFeatureViewSyncsResponse { - /// The FeatureViewSyncs matching the request. - core.List? featureViewSyncs; - - /// A token, which can be sent as ListFeatureViewSyncsRequest.page_token to - /// retrieve the next page. +/// Represents one Dataset in automl.googleapis.com. +class GoogleCloudAiplatformV1MigratableResourceAutomlDataset { + /// Full resource name of automl Dataset. /// - /// If this field is omitted, there are no subsequent pages. - core.String? nextPageToken; + /// Format: `projects/{project}/locations/{location}/datasets/{dataset}`. + core.String? dataset; - GoogleCloudAiplatformV1ListFeatureViewSyncsResponse({ - this.featureViewSyncs, - this.nextPageToken, + /// The Dataset's display name in automl.googleapis.com. + core.String? datasetDisplayName; + + GoogleCloudAiplatformV1MigratableResourceAutomlDataset({ + this.dataset, + this.datasetDisplayName, }); - GoogleCloudAiplatformV1ListFeatureViewSyncsResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1MigratableResourceAutomlDataset.fromJson( + core.Map json_) : this( - featureViewSyncs: (json_['featureViewSyncs'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1FeatureViewSync.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + dataset: json_['dataset'] as core.String?, + datasetDisplayName: json_['datasetDisplayName'] as core.String?, ); core.Map toJson() => { - if (featureViewSyncs != null) 'featureViewSyncs': featureViewSyncs!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (dataset != null) 'dataset': dataset!, + if (datasetDisplayName != null) + 'datasetDisplayName': datasetDisplayName!, }; } -/// Response message for FeatureOnlineStoreAdminService.ListFeatureViews. -class GoogleCloudAiplatformV1ListFeatureViewsResponse { - /// The FeatureViews matching the request. - core.List? featureViews; - - /// A token, which can be sent as ListFeatureViewsRequest.page_token to - /// retrieve the next page. +/// Represents one Model in automl.googleapis.com. +class GoogleCloudAiplatformV1MigratableResourceAutomlModel { + /// Full resource name of automl Model. /// - /// If this field is omitted, there are no subsequent pages. - core.String? nextPageToken; + /// Format: `projects/{project}/locations/{location}/models/{model}`. + core.String? model; - GoogleCloudAiplatformV1ListFeatureViewsResponse({ - this.featureViews, - this.nextPageToken, + /// The Model's display name in automl.googleapis.com. + core.String? modelDisplayName; + + GoogleCloudAiplatformV1MigratableResourceAutomlModel({ + this.model, + this.modelDisplayName, }); - GoogleCloudAiplatformV1ListFeatureViewsResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1MigratableResourceAutomlModel.fromJson(core.Map json_) : this( - featureViews: (json_['featureViews'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1FeatureView.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + model: json_['model'] as core.String?, + modelDisplayName: json_['modelDisplayName'] as core.String?, ); core.Map toJson() => { - if (featureViews != null) 'featureViews': featureViews!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (model != null) 'model': model!, + if (modelDisplayName != null) 'modelDisplayName': modelDisplayName!, }; } -/// Response message for FeaturestoreService.ListFeatures. -/// -/// Response message for FeatureRegistryService.ListFeatures. -class GoogleCloudAiplatformV1ListFeaturesResponse { - /// The Features matching the request. - core.List? features; +/// Represents one Dataset in datalabeling.googleapis.com. +class GoogleCloudAiplatformV1MigratableResourceDataLabelingDataset { + /// The migratable AnnotatedDataset in datalabeling.googleapis.com belongs to + /// the data labeling Dataset. + core.List< + GoogleCloudAiplatformV1MigratableResourceDataLabelingDatasetDataLabelingAnnotatedDataset>? + dataLabelingAnnotatedDatasets; - /// A token, which can be sent as ListFeaturesRequest.page_token to retrieve - /// the next page. + /// Full resource name of data labeling Dataset. /// - /// If this field is omitted, there are no subsequent pages. - core.String? nextPageToken; + /// Format: `projects/{project}/datasets/{dataset}`. + core.String? dataset; - GoogleCloudAiplatformV1ListFeaturesResponse({ - this.features, - this.nextPageToken, + /// The Dataset's display name in datalabeling.googleapis.com. + core.String? datasetDisplayName; + + GoogleCloudAiplatformV1MigratableResourceDataLabelingDataset({ + this.dataLabelingAnnotatedDatasets, + this.dataset, + this.datasetDisplayName, }); - GoogleCloudAiplatformV1ListFeaturesResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1MigratableResourceDataLabelingDataset.fromJson( + core.Map json_) : this( - features: (json_['features'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Feature.fromJson( - value as core.Map)) + dataLabelingAnnotatedDatasets: (json_['dataLabelingAnnotatedDatasets'] + as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1MigratableResourceDataLabelingDatasetDataLabelingAnnotatedDataset + .fromJson(value as core.Map)) .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + dataset: json_['dataset'] as core.String?, + datasetDisplayName: json_['datasetDisplayName'] as core.String?, ); core.Map toJson() => { - if (features != null) 'features': features!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (dataLabelingAnnotatedDatasets != null) + 'dataLabelingAnnotatedDatasets': dataLabelingAnnotatedDatasets!, + if (dataset != null) 'dataset': dataset!, + if (datasetDisplayName != null) + 'datasetDisplayName': datasetDisplayName!, }; } -/// Response message for FeaturestoreService.ListFeaturestores. -class GoogleCloudAiplatformV1ListFeaturestoresResponse { - /// The Featurestores matching the request. - core.List? featurestores; - - /// A token, which can be sent as ListFeaturestoresRequest.page_token to - /// retrieve the next page. +/// Represents one AnnotatedDataset in datalabeling.googleapis.com. +class GoogleCloudAiplatformV1MigratableResourceDataLabelingDatasetDataLabelingAnnotatedDataset { + /// Full resource name of data labeling AnnotatedDataset. /// - /// If this field is omitted, there are no subsequent pages. - core.String? nextPageToken; + /// Format: + /// `projects/{project}/datasets/{dataset}/annotatedDatasets/{annotated_dataset}`. + core.String? annotatedDataset; - GoogleCloudAiplatformV1ListFeaturestoresResponse({ - this.featurestores, - this.nextPageToken, + /// The AnnotatedDataset's display name in datalabeling.googleapis.com. + core.String? annotatedDatasetDisplayName; + + GoogleCloudAiplatformV1MigratableResourceDataLabelingDatasetDataLabelingAnnotatedDataset({ + this.annotatedDataset, + this.annotatedDatasetDisplayName, }); - GoogleCloudAiplatformV1ListFeaturestoresResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1MigratableResourceDataLabelingDatasetDataLabelingAnnotatedDataset.fromJson( + core.Map json_) : this( - featurestores: (json_['featurestores'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Featurestore.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + annotatedDataset: json_['annotatedDataset'] as core.String?, + annotatedDatasetDisplayName: + json_['annotatedDatasetDisplayName'] as core.String?, ); core.Map toJson() => { - if (featurestores != null) 'featurestores': featurestores!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (annotatedDataset != null) 'annotatedDataset': annotatedDataset!, + if (annotatedDatasetDisplayName != null) + 'annotatedDatasetDisplayName': annotatedDatasetDisplayName!, }; } -/// Response message for JobService.ListHyperparameterTuningJobs -class GoogleCloudAiplatformV1ListHyperparameterTuningJobsResponse { - /// List of HyperparameterTuningJobs in the requested page. +/// Represents one model Version in ml.googleapis.com. +class GoogleCloudAiplatformV1MigratableResourceMlEngineModelVersion { + /// The ml.googleapis.com endpoint that this model Version currently lives in. /// - /// HyperparameterTuningJob.trials of the jobs will be not be returned. - core.List? - hyperparameterTuningJobs; + /// Example values: * ml.googleapis.com * us-centrall-ml.googleapis.com * + /// europe-west4-ml.googleapis.com * asia-east1-ml.googleapis.com + core.String? endpoint; - /// A token to retrieve the next page of results. + /// Full resource name of ml engine model Version. /// - /// Pass to ListHyperparameterTuningJobsRequest.page_token to obtain that - /// page. - core.String? nextPageToken; + /// Format: `projects/{project}/models/{model}/versions/{version}`. + core.String? version; - GoogleCloudAiplatformV1ListHyperparameterTuningJobsResponse({ - this.hyperparameterTuningJobs, - this.nextPageToken, + GoogleCloudAiplatformV1MigratableResourceMlEngineModelVersion({ + this.endpoint, + this.version, }); - GoogleCloudAiplatformV1ListHyperparameterTuningJobsResponse.fromJson( + GoogleCloudAiplatformV1MigratableResourceMlEngineModelVersion.fromJson( core.Map json_) : this( - hyperparameterTuningJobs: - (json_['hyperparameterTuningJobs'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1HyperparameterTuningJob.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + endpoint: json_['endpoint'] as core.String?, + version: json_['version'] as core.String?, ); core.Map toJson() => { - if (hyperparameterTuningJobs != null) - 'hyperparameterTuningJobs': hyperparameterTuningJobs!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (endpoint != null) 'endpoint': endpoint!, + if (version != null) 'version': version!, }; } -/// Response message for IndexEndpointService.ListIndexEndpoints. -class GoogleCloudAiplatformV1ListIndexEndpointsResponse { - /// List of IndexEndpoints in the requested page. - core.List? indexEndpoints; +/// Config of migrating one resource from automl.googleapis.com, +/// datalabeling.googleapis.com and ml.googleapis.com to Vertex AI. +class GoogleCloudAiplatformV1MigrateResourceRequest { + /// Config for migrating Dataset in automl.googleapis.com to Vertex AI's + /// Dataset. + GoogleCloudAiplatformV1MigrateResourceRequestMigrateAutomlDatasetConfig? + migrateAutomlDatasetConfig; - /// A token to retrieve next page of results. - /// - /// Pass to ListIndexEndpointsRequest.page_token to obtain that page. - core.String? nextPageToken; + /// Config for migrating Model in automl.googleapis.com to Vertex AI's Model. + GoogleCloudAiplatformV1MigrateResourceRequestMigrateAutomlModelConfig? + migrateAutomlModelConfig; - GoogleCloudAiplatformV1ListIndexEndpointsResponse({ - this.indexEndpoints, - this.nextPageToken, + /// Config for migrating Dataset in datalabeling.googleapis.com to Vertex AI's + /// Dataset. + GoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfig? + migrateDataLabelingDatasetConfig; + + /// Config for migrating Version in ml.googleapis.com to Vertex AI's Model. + GoogleCloudAiplatformV1MigrateResourceRequestMigrateMlEngineModelVersionConfig? + migrateMlEngineModelVersionConfig; + + GoogleCloudAiplatformV1MigrateResourceRequest({ + this.migrateAutomlDatasetConfig, + this.migrateAutomlModelConfig, + this.migrateDataLabelingDatasetConfig, + this.migrateMlEngineModelVersionConfig, }); - GoogleCloudAiplatformV1ListIndexEndpointsResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1MigrateResourceRequest.fromJson(core.Map json_) : this( - indexEndpoints: (json_['indexEndpoints'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1IndexEndpoint.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + migrateAutomlDatasetConfig: json_ + .containsKey('migrateAutomlDatasetConfig') + ? GoogleCloudAiplatformV1MigrateResourceRequestMigrateAutomlDatasetConfig + .fromJson(json_['migrateAutomlDatasetConfig'] + as core.Map) + : null, + migrateAutomlModelConfig: json_ + .containsKey('migrateAutomlModelConfig') + ? GoogleCloudAiplatformV1MigrateResourceRequestMigrateAutomlModelConfig + .fromJson(json_['migrateAutomlModelConfig'] + as core.Map) + : null, + migrateDataLabelingDatasetConfig: json_ + .containsKey('migrateDataLabelingDatasetConfig') + ? GoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfig + .fromJson(json_['migrateDataLabelingDatasetConfig'] + as core.Map) + : null, + migrateMlEngineModelVersionConfig: json_ + .containsKey('migrateMlEngineModelVersionConfig') + ? GoogleCloudAiplatformV1MigrateResourceRequestMigrateMlEngineModelVersionConfig + .fromJson(json_['migrateMlEngineModelVersionConfig'] + as core.Map) + : null, ); core.Map toJson() => { - if (indexEndpoints != null) 'indexEndpoints': indexEndpoints!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (migrateAutomlDatasetConfig != null) + 'migrateAutomlDatasetConfig': migrateAutomlDatasetConfig!, + if (migrateAutomlModelConfig != null) + 'migrateAutomlModelConfig': migrateAutomlModelConfig!, + if (migrateDataLabelingDatasetConfig != null) + 'migrateDataLabelingDatasetConfig': migrateDataLabelingDatasetConfig!, + if (migrateMlEngineModelVersionConfig != null) + 'migrateMlEngineModelVersionConfig': + migrateMlEngineModelVersionConfig!, }; } -/// Response message for IndexService.ListIndexes. -class GoogleCloudAiplatformV1ListIndexesResponse { - /// List of indexes in the requested page. - core.List? indexes; +/// Config for migrating Dataset in automl.googleapis.com to Vertex AI's +/// Dataset. +class GoogleCloudAiplatformV1MigrateResourceRequestMigrateAutomlDatasetConfig { + /// Full resource name of automl Dataset. + /// + /// Format: `projects/{project}/locations/{location}/datasets/{dataset}`. + /// + /// Required. + core.String? dataset; - /// A token to retrieve next page of results. + /// Display name of the Dataset in Vertex AI. /// - /// Pass to ListIndexesRequest.page_token to obtain that page. - core.String? nextPageToken; + /// System will pick a display name if unspecified. + /// + /// Required. + core.String? datasetDisplayName; - GoogleCloudAiplatformV1ListIndexesResponse({ - this.indexes, - this.nextPageToken, + GoogleCloudAiplatformV1MigrateResourceRequestMigrateAutomlDatasetConfig({ + this.dataset, + this.datasetDisplayName, }); - GoogleCloudAiplatformV1ListIndexesResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1MigrateResourceRequestMigrateAutomlDatasetConfig.fromJson( + core.Map json_) : this( - indexes: (json_['indexes'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Index.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + dataset: json_['dataset'] as core.String?, + datasetDisplayName: json_['datasetDisplayName'] as core.String?, ); core.Map toJson() => { - if (indexes != null) 'indexes': indexes!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (dataset != null) 'dataset': dataset!, + if (datasetDisplayName != null) + 'datasetDisplayName': datasetDisplayName!, }; } -/// Response message for MetadataService.ListMetadataSchemas. -class GoogleCloudAiplatformV1ListMetadataSchemasResponse { - /// The MetadataSchemas found for the MetadataStore. - core.List? metadataSchemas; +/// Config for migrating Model in automl.googleapis.com to Vertex AI's Model. +class GoogleCloudAiplatformV1MigrateResourceRequestMigrateAutomlModelConfig { + /// Full resource name of automl Model. + /// + /// Format: `projects/{project}/locations/{location}/models/{model}`. + /// + /// Required. + core.String? model; - /// A token, which can be sent as ListMetadataSchemasRequest.page_token to - /// retrieve the next page. + /// Display name of the model in Vertex AI. /// - /// If this field is not populated, there are no subsequent pages. - core.String? nextPageToken; + /// System will pick a display name if unspecified. + /// + /// Optional. + core.String? modelDisplayName; - GoogleCloudAiplatformV1ListMetadataSchemasResponse({ - this.metadataSchemas, - this.nextPageToken, + GoogleCloudAiplatformV1MigrateResourceRequestMigrateAutomlModelConfig({ + this.model, + this.modelDisplayName, }); - GoogleCloudAiplatformV1ListMetadataSchemasResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1MigrateResourceRequestMigrateAutomlModelConfig.fromJson( + core.Map json_) : this( - metadataSchemas: (json_['metadataSchemas'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1MetadataSchema.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + model: json_['model'] as core.String?, + modelDisplayName: json_['modelDisplayName'] as core.String?, ); core.Map toJson() => { - if (metadataSchemas != null) 'metadataSchemas': metadataSchemas!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (model != null) 'model': model!, + if (modelDisplayName != null) 'modelDisplayName': modelDisplayName!, }; } -/// Response message for MetadataService.ListMetadataStores. -class GoogleCloudAiplatformV1ListMetadataStoresResponse { - /// The MetadataStores found for the Location. - core.List? metadataStores; +/// Config for migrating Dataset in datalabeling.googleapis.com to Vertex AI's +/// Dataset. +class GoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfig { + /// Full resource name of data labeling Dataset. + /// + /// Format: `projects/{project}/datasets/{dataset}`. + /// + /// Required. + core.String? dataset; - /// A token, which can be sent as ListMetadataStoresRequest.page_token to - /// retrieve the next page. + /// Display name of the Dataset in Vertex AI. /// - /// If this field is not populated, there are no subsequent pages. - core.String? nextPageToken; + /// System will pick a display name if unspecified. + /// + /// Optional. + core.String? datasetDisplayName; - GoogleCloudAiplatformV1ListMetadataStoresResponse({ - this.metadataStores, - this.nextPageToken, + /// Configs for migrating AnnotatedDataset in datalabeling.googleapis.com to + /// Vertex AI's SavedQuery. + /// + /// The specified AnnotatedDatasets have to belong to the datalabeling + /// Dataset. + /// + /// Optional. + core.List< + GoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfigMigrateDataLabelingAnnotatedDatasetConfig>? + migrateDataLabelingAnnotatedDatasetConfigs; + + GoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfig({ + this.dataset, + this.datasetDisplayName, + this.migrateDataLabelingAnnotatedDatasetConfigs, }); - GoogleCloudAiplatformV1ListMetadataStoresResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfig.fromJson( + core.Map json_) : this( - metadataStores: (json_['metadataStores'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1MetadataStore.fromJson( - value as core.Map)) + dataset: json_['dataset'] as core.String?, + datasetDisplayName: json_['datasetDisplayName'] as core.String?, + migrateDataLabelingAnnotatedDatasetConfigs: (json_[ + 'migrateDataLabelingAnnotatedDatasetConfigs'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfigMigrateDataLabelingAnnotatedDatasetConfig + .fromJson(value as core.Map)) .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (metadataStores != null) 'metadataStores': metadataStores!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (dataset != null) 'dataset': dataset!, + if (datasetDisplayName != null) + 'datasetDisplayName': datasetDisplayName!, + if (migrateDataLabelingAnnotatedDatasetConfigs != null) + 'migrateDataLabelingAnnotatedDatasetConfigs': + migrateDataLabelingAnnotatedDatasetConfigs!, }; } -/// Response message for JobService.ListModelDeploymentMonitoringJobs. -class GoogleCloudAiplatformV1ListModelDeploymentMonitoringJobsResponse { - /// A list of ModelDeploymentMonitoringJobs that matches the specified filter - /// in the request. - core.List? - modelDeploymentMonitoringJobs; - - /// The standard List next-page token. - core.String? nextPageToken; +/// Config for migrating AnnotatedDataset in datalabeling.googleapis.com to +/// Vertex AI's SavedQuery. +class GoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfigMigrateDataLabelingAnnotatedDatasetConfig { + /// Full resource name of data labeling AnnotatedDataset. + /// + /// Format: + /// `projects/{project}/datasets/{dataset}/annotatedDatasets/{annotated_dataset}`. + /// + /// Required. + core.String? annotatedDataset; - GoogleCloudAiplatformV1ListModelDeploymentMonitoringJobsResponse({ - this.modelDeploymentMonitoringJobs, - this.nextPageToken, + GoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfigMigrateDataLabelingAnnotatedDatasetConfig({ + this.annotatedDataset, }); - GoogleCloudAiplatformV1ListModelDeploymentMonitoringJobsResponse.fromJson( + GoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfigMigrateDataLabelingAnnotatedDatasetConfig.fromJson( core.Map json_) : this( - modelDeploymentMonitoringJobs: (json_['modelDeploymentMonitoringJobs'] - as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1ModelDeploymentMonitoringJob.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + annotatedDataset: json_['annotatedDataset'] as core.String?, ); core.Map toJson() => { - if (modelDeploymentMonitoringJobs != null) - 'modelDeploymentMonitoringJobs': modelDeploymentMonitoringJobs!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (annotatedDataset != null) 'annotatedDataset': annotatedDataset!, }; } -/// Response message for ModelService.ListModelEvaluationSlices. -class GoogleCloudAiplatformV1ListModelEvaluationSlicesResponse { - /// List of ModelEvaluations in the requested page. - core.List? modelEvaluationSlices; +/// Config for migrating version in ml.googleapis.com to Vertex AI's Model. +class GoogleCloudAiplatformV1MigrateResourceRequestMigrateMlEngineModelVersionConfig { + /// The ml.googleapis.com endpoint that this model version should be migrated + /// from. + /// + /// Example values: * ml.googleapis.com * us-centrall-ml.googleapis.com * + /// europe-west4-ml.googleapis.com * asia-east1-ml.googleapis.com + /// + /// Required. + core.String? endpoint; - /// A token to retrieve next page of results. + /// Display name of the model in Vertex AI. /// - /// Pass to ListModelEvaluationSlicesRequest.page_token to obtain that page. - core.String? nextPageToken; + /// System will pick a display name if unspecified. + /// + /// Required. + core.String? modelDisplayName; - GoogleCloudAiplatformV1ListModelEvaluationSlicesResponse({ - this.modelEvaluationSlices, - this.nextPageToken, + /// Full resource name of ml engine model version. + /// + /// Format: `projects/{project}/models/{model}/versions/{version}`. + /// + /// Required. + core.String? modelVersion; + + GoogleCloudAiplatformV1MigrateResourceRequestMigrateMlEngineModelVersionConfig({ + this.endpoint, + this.modelDisplayName, + this.modelVersion, }); - GoogleCloudAiplatformV1ListModelEvaluationSlicesResponse.fromJson( + GoogleCloudAiplatformV1MigrateResourceRequestMigrateMlEngineModelVersionConfig.fromJson( core.Map json_) : this( - modelEvaluationSlices: (json_['modelEvaluationSlices'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1ModelEvaluationSlice.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + endpoint: json_['endpoint'] as core.String?, + modelDisplayName: json_['modelDisplayName'] as core.String?, + modelVersion: json_['modelVersion'] as core.String?, ); core.Map toJson() => { - if (modelEvaluationSlices != null) - 'modelEvaluationSlices': modelEvaluationSlices!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (endpoint != null) 'endpoint': endpoint!, + if (modelDisplayName != null) 'modelDisplayName': modelDisplayName!, + if (modelVersion != null) 'modelVersion': modelVersion!, }; } -/// Response message for ModelService.ListModelEvaluations. -class GoogleCloudAiplatformV1ListModelEvaluationsResponse { - /// List of ModelEvaluations in the requested page. - core.List? modelEvaluations; +/// A trained machine learning Model. +class GoogleCloudAiplatformV1Model { + /// The path to the directory containing the Model artifact and any of its + /// supporting files. + /// + /// Not required for AutoML Models. + /// + /// Immutable. + core.String? artifactUri; - /// A token to retrieve next page of results. + /// User input field to specify the base model source. /// - /// Pass to ListModelEvaluationsRequest.page_token to obtain that page. - core.String? nextPageToken; + /// Currently it only supports specifing the Model Garden models and Genie + /// models. + /// + /// Optional. + GoogleCloudAiplatformV1ModelBaseModelSource? baseModelSource; - GoogleCloudAiplatformV1ListModelEvaluationsResponse({ - this.modelEvaluations, - this.nextPageToken, - }); + /// Input only. + /// + /// The specification of the container that is to be used when deploying this + /// Model. The specification is ingested upon ModelService.UploadModel, and + /// all binaries it contains are copied and stored internally by Vertex AI. + /// Not required for AutoML Models. + GoogleCloudAiplatformV1ModelContainerSpec? containerSpec; - GoogleCloudAiplatformV1ListModelEvaluationsResponse.fromJson(core.Map json_) - : this( - modelEvaluations: (json_['modelEvaluations'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1ModelEvaluation.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, - ); + /// Timestamp when this Model was uploaded into Vertex AI. + /// + /// Output only. + core.String? createTime; - core.Map toJson() => { - if (modelEvaluations != null) 'modelEvaluations': modelEvaluations!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - }; -} + /// Stats of data used for training or evaluating the Model. + /// + /// Only populated when the Model is trained by a TrainingPipeline with + /// data_input_config. + GoogleCloudAiplatformV1ModelDataStats? dataStats; -/// Response message for ModelService.ListModelVersions -class GoogleCloudAiplatformV1ListModelVersionsResponse { - /// List of Model versions in the requested page. + /// The pointers to DeployedModels created from this Model. /// - /// In the returned Model name field, version ID instead of regvision tag will - /// be included. - core.List? models; + /// Note that Model could have been deployed to Endpoints in different + /// Locations. + /// + /// Output only. + core.List? deployedModels; - /// A token to retrieve the next page of results. + /// The description of the Model. + core.String? description; + + /// The display name of the Model. /// - /// Pass to ListModelVersionsRequest.page_token to obtain that page. - core.String? nextPageToken; + /// The name can be up to 128 characters long and can consist of any UTF-8 + /// characters. + /// + /// Required. + core.String? displayName; - GoogleCloudAiplatformV1ListModelVersionsResponse({ - this.models, - this.nextPageToken, - }); + /// Customer-managed encryption key spec for a Model. + /// + /// If set, this Model and all sub-resources of this Model will be secured by + /// this key. + GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; - GoogleCloudAiplatformV1ListModelVersionsResponse.fromJson(core.Map json_) - : this( - models: (json_['models'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Model.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, - ); + /// Used to perform consistent read-modify-write updates. + /// + /// If not set, a blind "overwrite" update happens. + core.String? etag; - core.Map toJson() => { - if (models != null) 'models': models!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - }; -} + /// The default explanation specification for this Model. + /// + /// The Model can be used for requesting explanation after being deployed if + /// it is populated. The Model can be used for batch explanation if it is + /// populated. All fields of the explanation_spec can be overridden by + /// explanation_spec of DeployModelRequest.deployed_model, or explanation_spec + /// of BatchPredictionJob. If the default explanation specification is not set + /// for this Model, this Model can still be used for requesting explanation by + /// setting explanation_spec of DeployModelRequest.deployed_model and for + /// batch explanation by setting explanation_spec of BatchPredictionJob. + GoogleCloudAiplatformV1ExplanationSpec? explanationSpec; -/// Response message for ModelService.ListModels -class GoogleCloudAiplatformV1ListModelsResponse { - /// List of Models in the requested page. - core.List? models; + /// The labels with user-defined metadata to organize your Models. + /// + /// Label keys and values can be no longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. See + /// https://goo.gl/xmQnxf for more information and examples of labels. + core.Map? labels; - /// A token to retrieve next page of results. + /// An additional information about the Model; the schema of the metadata can + /// be found in metadata_schema. + /// + /// Unset if the Model does not have any additional information. + /// + /// Immutable. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? metadata; + + /// The resource name of the Artifact that was created in MetadataStore when + /// creating the Model. + /// + /// The Artifact resource name pattern is + /// `projects/{project}/locations/{location}/metadataStores/{metadata_store}/artifacts/{artifact}`. + /// + /// Output only. + core.String? metadataArtifact; + + /// Points to a YAML file stored on Google Cloud Storage describing additional + /// information about the Model, that is specific to it. + /// + /// Unset if the Model does not have any additional information. The schema is + /// defined as an OpenAPI 3.0.2 + /// [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). + /// AutoML Models always have this field populated by Vertex AI, if no + /// additional metadata is needed, this field is set to an empty string. Note: + /// The URI given on output will be immutable and probably different, + /// including the URI scheme, than the one given on input. The output URI will + /// point to a location where the user only has a read access. /// - /// Pass to ListModelsRequest.page_token to obtain that page. - core.String? nextPageToken; - - GoogleCloudAiplatformV1ListModelsResponse({ - this.models, - this.nextPageToken, - }); + /// Immutable. + core.String? metadataSchemaUri; - GoogleCloudAiplatformV1ListModelsResponse.fromJson(core.Map json_) - : this( - models: (json_['models'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Model.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, - ); + /// Source of a model. + /// + /// It can either be automl training pipeline, custom training pipeline, + /// BigQuery ML, or saved and tuned from Genie or Model Garden. + /// + /// Output only. + GoogleCloudAiplatformV1ModelSourceInfo? modelSourceInfo; - core.Map toJson() => { - if (models != null) 'models': models!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - }; -} + /// The resource name of the Model. + core.String? name; -/// Response message for JobService.ListNasJobs -class GoogleCloudAiplatformV1ListNasJobsResponse { - /// List of NasJobs in the requested page. + /// If this Model is a copy of another Model, this contains info about the + /// original. /// - /// NasJob.nas_job_output of the jobs will not be returned. - core.List? nasJobs; + /// Output only. + GoogleCloudAiplatformV1ModelOriginalModelInfo? originalModelInfo; - /// A token to retrieve the next page of results. + /// This field is populated if the model is produced by a pipeline job. /// - /// Pass to ListNasJobsRequest.page_token to obtain that page. - core.String? nextPageToken; - - GoogleCloudAiplatformV1ListNasJobsResponse({ - this.nasJobs, - this.nextPageToken, - }); + /// Optional. + core.String? pipelineJob; - GoogleCloudAiplatformV1ListNasJobsResponse.fromJson(core.Map json_) - : this( - nasJobs: (json_['nasJobs'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1NasJob.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, - ); + /// The schemata that describe formats of the Model's predictions and + /// explanations as given and returned via PredictionService.Predict and + /// PredictionService.Explain. + GoogleCloudAiplatformV1PredictSchemata? predictSchemata; - core.Map toJson() => { - if (nasJobs != null) 'nasJobs': nasJobs!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - }; -} + /// Reserved for future use. + /// + /// Output only. + core.bool? satisfiesPzi; -/// Response message for JobService.ListNasTrialDetails -class GoogleCloudAiplatformV1ListNasTrialDetailsResponse { - /// List of top NasTrials in the requested page. - core.List? nasTrialDetails; + /// Reserved for future use. + /// + /// Output only. + core.bool? satisfiesPzs; - /// A token to retrieve the next page of results. + /// When this Model is deployed, its prediction resources are described by the + /// `prediction_resources` field of the Endpoint.deployed_models object. /// - /// Pass to ListNasTrialDetailsRequest.page_token to obtain that page. - core.String? nextPageToken; + /// Because not all Models support all resource configuration types, the + /// configuration types this Model supports are listed here. If no + /// configuration types are listed, the Model cannot be deployed to an + /// Endpoint and does not support online predictions + /// (PredictionService.Predict or PredictionService.Explain). Such a Model can + /// serve predictions by using a BatchPredictionJob, if it has at least one + /// entry each in supported_input_storage_formats and + /// supported_output_storage_formats. + /// + /// Output only. + core.List? supportedDeploymentResourcesTypes; - GoogleCloudAiplatformV1ListNasTrialDetailsResponse({ - this.nasTrialDetails, - this.nextPageToken, - }); + /// The formats in which this Model may be exported. + /// + /// If empty, this Model is not available for export. + /// + /// Output only. + core.List? supportedExportFormats; - GoogleCloudAiplatformV1ListNasTrialDetailsResponse.fromJson(core.Map json_) - : this( - nasTrialDetails: (json_['nasTrialDetails'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1NasTrialDetail.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, - ); + /// The formats this Model supports in BatchPredictionJob.input_config. + /// + /// If PredictSchemata.instance_schema_uri exists, the instances should be + /// given as per that schema. The possible formats are: * `jsonl` The JSON + /// Lines format, where each instance is a single line. Uses GcsSource. * + /// `csv` The CSV format, where each instance is a single comma-separated + /// line. The first line in the file is the header, containing comma-separated + /// field names. Uses GcsSource. * `tf-record` The TFRecord format, where each + /// instance is a single record in tfrecord syntax. Uses GcsSource. * + /// `tf-record-gzip` Similar to `tf-record`, but the file is gzipped. Uses + /// GcsSource. * `bigquery` Each instance is a single row in BigQuery. Uses + /// BigQuerySource. * `file-list` Each line of the file is the location of an + /// instance to process, uses `gcs_source` field of the InputConfig object. If + /// this Model doesn't support any of these formats it means it cannot be used + /// with a BatchPredictionJob. However, if it has + /// supported_deployment_resources_types, it could serve online predictions by + /// using PredictionService.Predict or PredictionService.Explain. + /// + /// Output only. + core.List? supportedInputStorageFormats; - core.Map toJson() => { - if (nasTrialDetails != null) 'nasTrialDetails': nasTrialDetails!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - }; -} + /// The formats this Model supports in BatchPredictionJob.output_config. + /// + /// If both PredictSchemata.instance_schema_uri and + /// PredictSchemata.prediction_schema_uri exist, the predictions are returned + /// together with their instances. In other words, the prediction has the + /// original instance data first, followed by the actual prediction content + /// (as per the schema). The possible formats are: * `jsonl` The JSON Lines + /// format, where each prediction is a single line. Uses GcsDestination. * + /// `csv` The CSV format, where each prediction is a single comma-separated + /// line. The first line in the file is the header, containing comma-separated + /// field names. Uses GcsDestination. * `bigquery` Each prediction is a single + /// row in a BigQuery table, uses BigQueryDestination . If this Model doesn't + /// support any of these formats it means it cannot be used with a + /// BatchPredictionJob. However, if it has + /// supported_deployment_resources_types, it could serve online predictions by + /// using PredictionService.Predict or PredictionService.Explain. + /// + /// Output only. + core.List? supportedOutputStorageFormats; -/// Response message for \[NotebookService.CreateNotebookExecutionJob\] -class GoogleCloudAiplatformV1ListNotebookExecutionJobsResponse { - /// A token to retrieve next page of results. + /// The resource name of the TrainingPipeline that uploaded this Model, if + /// any. /// - /// Pass to ListNotebookExecutionJobs.page_token to obtain that page. - core.String? nextPageToken; + /// Output only. + core.String? trainingPipeline; - /// List of NotebookExecutionJobs in the requested page. - core.List? notebookExecutionJobs; + /// Timestamp when this Model was most recently updated. + /// + /// Output only. + core.String? updateTime; - GoogleCloudAiplatformV1ListNotebookExecutionJobsResponse({ - this.nextPageToken, - this.notebookExecutionJobs, - }); + /// User provided version aliases so that a model version can be referenced + /// via alias (i.e. + /// `projects/{project}/locations/{location}/models/{model_id}@{version_alias}` + /// instead of auto-generated version id (i.e. + /// `projects/{project}/locations/{location}/models/{model_id}@{version_id})`. + /// + /// The format is a-z{0,126}\[a-z0-9\] to distinguish from version_id. A + /// default version alias will be created for the first version of the model, + /// and there must be exactly one default version alias for a model. + core.List? versionAliases; - GoogleCloudAiplatformV1ListNotebookExecutionJobsResponse.fromJson( - core.Map json_) - : this( - nextPageToken: json_['nextPageToken'] as core.String?, - notebookExecutionJobs: (json_['notebookExecutionJobs'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1NotebookExecutionJob.fromJson( - value as core.Map)) - .toList(), - ); + /// Timestamp when this version was created. + /// + /// Output only. + core.String? versionCreateTime; - core.Map toJson() => { - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - if (notebookExecutionJobs != null) - 'notebookExecutionJobs': notebookExecutionJobs!, - }; -} + /// The description of this version. + core.String? versionDescription; -/// Response message for NotebookService.ListNotebookRuntimeTemplates. -class GoogleCloudAiplatformV1ListNotebookRuntimeTemplatesResponse { - /// A token to retrieve next page of results. + /// The version ID of the model. /// - /// Pass to ListNotebookRuntimeTemplatesRequest.page_token to obtain that - /// page. - core.String? nextPageToken; + /// A new version is committed when a new model version is uploaded or trained + /// under an existing model id. It is an auto-incrementing decimal number in + /// string representation. + /// + /// Output only. Immutable. + core.String? versionId; - /// List of NotebookRuntimeTemplates in the requested page. - core.List? - notebookRuntimeTemplates; + /// Timestamp when this version was most recently updated. + /// + /// Output only. + core.String? versionUpdateTime; - GoogleCloudAiplatformV1ListNotebookRuntimeTemplatesResponse({ - this.nextPageToken, - this.notebookRuntimeTemplates, + GoogleCloudAiplatformV1Model({ + this.artifactUri, + this.baseModelSource, + this.containerSpec, + this.createTime, + this.dataStats, + this.deployedModels, + this.description, + this.displayName, + this.encryptionSpec, + this.etag, + this.explanationSpec, + this.labels, + this.metadata, + this.metadataArtifact, + this.metadataSchemaUri, + this.modelSourceInfo, + this.name, + this.originalModelInfo, + this.pipelineJob, + this.predictSchemata, + this.satisfiesPzi, + this.satisfiesPzs, + this.supportedDeploymentResourcesTypes, + this.supportedExportFormats, + this.supportedInputStorageFormats, + this.supportedOutputStorageFormats, + this.trainingPipeline, + this.updateTime, + this.versionAliases, + this.versionCreateTime, + this.versionDescription, + this.versionId, + this.versionUpdateTime, }); - GoogleCloudAiplatformV1ListNotebookRuntimeTemplatesResponse.fromJson( - core.Map json_) + GoogleCloudAiplatformV1Model.fromJson(core.Map json_) : this( - nextPageToken: json_['nextPageToken'] as core.String?, - notebookRuntimeTemplates: - (json_['notebookRuntimeTemplates'] as core.List?) + artifactUri: json_['artifactUri'] as core.String?, + baseModelSource: json_.containsKey('baseModelSource') + ? GoogleCloudAiplatformV1ModelBaseModelSource.fromJson( + json_['baseModelSource'] + as core.Map) + : null, + containerSpec: json_.containsKey('containerSpec') + ? GoogleCloudAiplatformV1ModelContainerSpec.fromJson( + json_['containerSpec'] as core.Map) + : null, + createTime: json_['createTime'] as core.String?, + dataStats: json_.containsKey('dataStats') + ? GoogleCloudAiplatformV1ModelDataStats.fromJson( + json_['dataStats'] as core.Map) + : null, + deployedModels: (json_['deployedModels'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1DeployedModelRef.fromJson( + value as core.Map)) + .toList(), + description: json_['description'] as core.String?, + displayName: json_['displayName'] as core.String?, + encryptionSpec: json_.containsKey('encryptionSpec') + ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( + json_['encryptionSpec'] + as core.Map) + : null, + etag: json_['etag'] as core.String?, + explanationSpec: json_.containsKey('explanationSpec') + ? GoogleCloudAiplatformV1ExplanationSpec.fromJson( + json_['explanationSpec'] + as core.Map) + : null, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + metadata: json_['metadata'], + metadataArtifact: json_['metadataArtifact'] as core.String?, + metadataSchemaUri: json_['metadataSchemaUri'] as core.String?, + modelSourceInfo: json_.containsKey('modelSourceInfo') + ? GoogleCloudAiplatformV1ModelSourceInfo.fromJson( + json_['modelSourceInfo'] + as core.Map) + : null, + name: json_['name'] as core.String?, + originalModelInfo: json_.containsKey('originalModelInfo') + ? GoogleCloudAiplatformV1ModelOriginalModelInfo.fromJson( + json_['originalModelInfo'] + as core.Map) + : null, + pipelineJob: json_['pipelineJob'] as core.String?, + predictSchemata: json_.containsKey('predictSchemata') + ? GoogleCloudAiplatformV1PredictSchemata.fromJson( + json_['predictSchemata'] + as core.Map) + : null, + satisfiesPzi: json_['satisfiesPzi'] as core.bool?, + satisfiesPzs: json_['satisfiesPzs'] as core.bool?, + supportedDeploymentResourcesTypes: + (json_['supportedDeploymentResourcesTypes'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + supportedExportFormats: + (json_['supportedExportFormats'] as core.List?) ?.map((value) => - GoogleCloudAiplatformV1NotebookRuntimeTemplate.fromJson( + GoogleCloudAiplatformV1ModelExportFormat.fromJson( value as core.Map)) .toList(), + supportedInputStorageFormats: + (json_['supportedInputStorageFormats'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + supportedOutputStorageFormats: + (json_['supportedOutputStorageFormats'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + trainingPipeline: json_['trainingPipeline'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + versionAliases: (json_['versionAliases'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + versionCreateTime: json_['versionCreateTime'] as core.String?, + versionDescription: json_['versionDescription'] as core.String?, + versionId: json_['versionId'] as core.String?, + versionUpdateTime: json_['versionUpdateTime'] as core.String?, ); core.Map toJson() => { - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - if (notebookRuntimeTemplates != null) - 'notebookRuntimeTemplates': notebookRuntimeTemplates!, + if (artifactUri != null) 'artifactUri': artifactUri!, + if (baseModelSource != null) 'baseModelSource': baseModelSource!, + if (containerSpec != null) 'containerSpec': containerSpec!, + if (createTime != null) 'createTime': createTime!, + if (dataStats != null) 'dataStats': dataStats!, + if (deployedModels != null) 'deployedModels': deployedModels!, + if (description != null) 'description': description!, + if (displayName != null) 'displayName': displayName!, + if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, + if (etag != null) 'etag': etag!, + if (explanationSpec != null) 'explanationSpec': explanationSpec!, + if (labels != null) 'labels': labels!, + if (metadata != null) 'metadata': metadata!, + if (metadataArtifact != null) 'metadataArtifact': metadataArtifact!, + if (metadataSchemaUri != null) 'metadataSchemaUri': metadataSchemaUri!, + if (modelSourceInfo != null) 'modelSourceInfo': modelSourceInfo!, + if (name != null) 'name': name!, + if (originalModelInfo != null) 'originalModelInfo': originalModelInfo!, + if (pipelineJob != null) 'pipelineJob': pipelineJob!, + if (predictSchemata != null) 'predictSchemata': predictSchemata!, + if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, + if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, + if (supportedDeploymentResourcesTypes != null) + 'supportedDeploymentResourcesTypes': + supportedDeploymentResourcesTypes!, + if (supportedExportFormats != null) + 'supportedExportFormats': supportedExportFormats!, + if (supportedInputStorageFormats != null) + 'supportedInputStorageFormats': supportedInputStorageFormats!, + if (supportedOutputStorageFormats != null) + 'supportedOutputStorageFormats': supportedOutputStorageFormats!, + if (trainingPipeline != null) 'trainingPipeline': trainingPipeline!, + if (updateTime != null) 'updateTime': updateTime!, + if (versionAliases != null) 'versionAliases': versionAliases!, + if (versionCreateTime != null) 'versionCreateTime': versionCreateTime!, + if (versionDescription != null) + 'versionDescription': versionDescription!, + if (versionId != null) 'versionId': versionId!, + if (versionUpdateTime != null) 'versionUpdateTime': versionUpdateTime!, }; } -/// Response message for NotebookService.ListNotebookRuntimes. -class GoogleCloudAiplatformV1ListNotebookRuntimesResponse { - /// A token to retrieve next page of results. - /// - /// Pass to ListNotebookRuntimesRequest.page_token to obtain that page. - core.String? nextPageToken; +/// User input field to specify the base model source. +/// +/// Currently it only supports specifing the Model Garden models and Genie +/// models. +class GoogleCloudAiplatformV1ModelBaseModelSource { + /// Information about the base model of Genie models. + GoogleCloudAiplatformV1GenieSource? genieSource; - /// List of NotebookRuntimes in the requested page. - core.List? notebookRuntimes; + /// Source information of Model Garden models. + GoogleCloudAiplatformV1ModelGardenSource? modelGardenSource; - GoogleCloudAiplatformV1ListNotebookRuntimesResponse({ - this.nextPageToken, - this.notebookRuntimes, + GoogleCloudAiplatformV1ModelBaseModelSource({ + this.genieSource, + this.modelGardenSource, }); - GoogleCloudAiplatformV1ListNotebookRuntimesResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1ModelBaseModelSource.fromJson(core.Map json_) : this( - nextPageToken: json_['nextPageToken'] as core.String?, - notebookRuntimes: (json_['notebookRuntimes'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1NotebookRuntime.fromJson( - value as core.Map)) - .toList(), + genieSource: json_.containsKey('genieSource') + ? GoogleCloudAiplatformV1GenieSource.fromJson( + json_['genieSource'] as core.Map) + : null, + modelGardenSource: json_.containsKey('modelGardenSource') + ? GoogleCloudAiplatformV1ModelGardenSource.fromJson( + json_['modelGardenSource'] + as core.Map) + : null, ); core.Map toJson() => { - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - if (notebookRuntimes != null) 'notebookRuntimes': notebookRuntimes!, + if (genieSource != null) 'genieSource': genieSource!, + if (modelGardenSource != null) 'modelGardenSource': modelGardenSource!, }; } -/// Request message for VizierService.ListOptimalTrials. -typedef GoogleCloudAiplatformV1ListOptimalTrialsRequest = $Empty; +/// Specification of a container for serving predictions. +/// +/// Some fields in this message correspond to fields in the +/// [Kubernetes Container v1 core specification](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#container-v1-core). +class GoogleCloudAiplatformV1ModelContainerSpec { + /// Specifies arguments for the command that runs when the container starts. + /// + /// This overrides the container's + /// \[`CMD`\](https://docs.docker.com/engine/reference/builder/#cmd). Specify + /// this field as an array of executable and arguments, similar to a Docker + /// `CMD`'s "default parameters" form. If you don't specify this field but do + /// specify the command field, then the command from the `command` field runs + /// without any additional arguments. See the \[Kubernetes documentation about + /// how the `command` and `args` fields interact with a container's + /// `ENTRYPOINT` and + /// `CMD`\](https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#notes). + /// If you don't specify this field and don't specify the `command` field, + /// then the container's + /// \[`ENTRYPOINT`\](https://docs.docker.com/engine/reference/builder/#cmd) + /// and `CMD` determine what runs based on their default behavior. See the + /// Docker documentation about \[how `CMD` and `ENTRYPOINT` + /// interact\](https://docs.docker.com/engine/reference/builder/#understand-how-cmd-and-entrypoint-interact). + /// In this field, you can reference + /// [environment variables set by Vertex AI](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables) + /// and environment variables set in the env field. You cannot reference + /// environment variables set in the Docker image. In order for environment + /// variables to be expanded, reference them by using the following syntax: $( + /// VARIABLE_NAME) Note that this differs from Bash variable expansion, which + /// does not use parentheses. If a variable cannot be resolved, the reference + /// in the input string is used unchanged. To avoid variable expansion, you + /// can escape this syntax with `$$`; for example: $$(VARIABLE_NAME) This + /// field corresponds to the `args` field of the Kubernetes Containers + /// [v1 core API](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#container-v1-core). + /// + /// Immutable. + core.List? args; + + /// Specifies the command that runs when the container starts. + /// + /// This overrides the container's + /// [ENTRYPOINT](https://docs.docker.com/engine/reference/builder/#entrypoint). + /// Specify this field as an array of executable and arguments, similar to a + /// Docker `ENTRYPOINT`'s "exec" form, not its "shell" form. If you do not + /// specify this field, then the container's `ENTRYPOINT` runs, in conjunction + /// with the args field or the container's + /// \[`CMD`\](https://docs.docker.com/engine/reference/builder/#cmd), if + /// either exists. If this field is not specified and the container does not + /// have an `ENTRYPOINT`, then refer to the Docker documentation about \[how + /// `CMD` and `ENTRYPOINT` + /// interact\](https://docs.docker.com/engine/reference/builder/#understand-how-cmd-and-entrypoint-interact). + /// If you specify this field, then you can also specify the `args` field to + /// provide additional arguments for this command. However, if you specify + /// this field, then the container's `CMD` is ignored. See the \[Kubernetes + /// documentation about how the `command` and `args` fields interact with a + /// container's `ENTRYPOINT` and + /// `CMD`\](https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#notes). + /// In this field, you can reference + /// [environment variables set by Vertex AI](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables) + /// and environment variables set in the env field. You cannot reference + /// environment variables set in the Docker image. In order for environment + /// variables to be expanded, reference them by using the following syntax: $( + /// VARIABLE_NAME) Note that this differs from Bash variable expansion, which + /// does not use parentheses. If a variable cannot be resolved, the reference + /// in the input string is used unchanged. To avoid variable expansion, you + /// can escape this syntax with `$$`; for example: $$(VARIABLE_NAME) This + /// field corresponds to the `command` field of the Kubernetes Containers + /// [v1 core API](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#container-v1-core). + /// + /// Immutable. + core.List? command; -/// Response message for VizierService.ListOptimalTrials. -class GoogleCloudAiplatformV1ListOptimalTrialsResponse { - /// The pareto-optimal Trials for multiple objective Study or the optimal - /// trial for single objective Study. + /// Deployment timeout. /// - /// The definition of pareto-optimal can be checked in wiki page. - /// https://en.wikipedia.org/wiki/Pareto_efficiency - core.List? optimalTrials; + /// Limit for deployment timeout is 2 hours. + /// + /// Immutable. + core.String? deploymentTimeout; - GoogleCloudAiplatformV1ListOptimalTrialsResponse({ - this.optimalTrials, - }); + /// List of environment variables to set in the container. + /// + /// After the container starts running, code running in the container can read + /// these environment variables. Additionally, the command and args fields can + /// reference these variables. Later entries in this list can also reference + /// earlier entries. For example, the following example sets the variable + /// `VAR_2` to have the value `foo bar`: ```json [ { "name": "VAR_1", "value": + /// "foo" }, { "name": "VAR_2", "value": "$(VAR_1) bar" } ] ``` If you switch + /// the order of the variables in the example, then the expansion does not + /// occur. This field corresponds to the `env` field of the Kubernetes + /// Containers + /// [v1 core API](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#container-v1-core). + /// + /// Immutable. + core.List? env; - GoogleCloudAiplatformV1ListOptimalTrialsResponse.fromJson(core.Map json_) - : this( - optimalTrials: (json_['optimalTrials'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Trial.fromJson( - value as core.Map)) - .toList(), - ); + /// List of ports to expose from the container. + /// + /// Vertex AI sends gRPC prediction requests that it receives to the first + /// port on this list. Vertex AI also sends liveness and health checks to this + /// port. If you do not specify this field, gRPC requests to the container + /// will be disabled. Vertex AI does not use ports other than the first one + /// listed. This field corresponds to the `ports` field of the Kubernetes + /// Containers v1 core API. + /// + /// Immutable. + core.List? grpcPorts; - core.Map toJson() => { - if (optimalTrials != null) 'optimalTrials': optimalTrials!, - }; -} + /// Specification for Kubernetes readiness probe. + /// + /// Immutable. + GoogleCloudAiplatformV1Probe? healthProbe; -/// Response message for PersistentResourceService.ListPersistentResources -class GoogleCloudAiplatformV1ListPersistentResourcesResponse { - /// A token to retrieve next page of results. + /// HTTP path on the container to send health checks to. /// - /// Pass to ListPersistentResourcesRequest.page_token to obtain that page. - core.String? nextPageToken; - core.List? persistentResources; + /// Vertex AI intermittently sends GET requests to this path on the + /// container's IP address and port to check that the container is healthy. + /// Read more about + /// [health checks](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#health). + /// For example, if you set this field to `/bar`, then Vertex AI + /// intermittently sends a GET request to the `/bar` path on the port of your + /// container specified by the first value of this `ModelContainerSpec`'s + /// ports field. If you don't specify this field, it defaults to the following + /// value when you deploy this Model to an Endpoint: + /// /v1/endpoints/ENDPOINT/deployedModels/ DEPLOYED_MODEL:predict The + /// placeholders in this value are replaced as follows: * ENDPOINT: The last + /// segment (following `endpoints/`)of the Endpoint.name\]\[\] field of the + /// Endpoint where this Model has been deployed. (Vertex AI makes this value + /// available to your container code as the \[`AIP_ENDPOINT_ID` environment + /// variable\](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables).) + /// * DEPLOYED_MODEL: DeployedModel.id of the `DeployedModel`. (Vertex AI + /// makes this value available to your container code as the + /// \[`AIP_DEPLOYED_MODEL_ID` environment + /// variable\](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables).) + /// + /// Immutable. + core.String? healthRoute; - GoogleCloudAiplatformV1ListPersistentResourcesResponse({ - this.nextPageToken, - this.persistentResources, - }); + /// URI of the Docker image to be used as the custom container for serving + /// predictions. + /// + /// This URI must identify an image in Artifact Registry or Container + /// Registry. Learn more about the + /// [container publishing requirements](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#publishing), + /// including permissions requirements for the Vertex AI Service Agent. The + /// container image is ingested upon ModelService.UploadModel, stored + /// internally, and this original path is afterwards not used. To learn about + /// the requirements for the Docker image itself, see + /// [Custom container requirements](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#). + /// You can use the URI to one of Vertex AI's \[pre-built container images for + /// prediction\](https://cloud.google.com/vertex-ai/docs/predictions/pre-built-containers) + /// in this field. + /// + /// Required. Immutable. + core.String? imageUri; - GoogleCloudAiplatformV1ListPersistentResourcesResponse.fromJson( - core.Map json_) - : this( - nextPageToken: json_['nextPageToken'] as core.String?, - persistentResources: (json_['persistentResources'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1PersistentResource.fromJson( - value as core.Map)) - .toList(), - ); + /// List of ports to expose from the container. + /// + /// Vertex AI sends any prediction requests that it receives to the first port + /// on this list. Vertex AI also sends + /// [liveness and health checks](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#liveness) + /// to this port. If you do not specify this field, it defaults to following + /// value: ```json [ { "containerPort": 8080 } ] ``` Vertex AI does not use + /// ports other than the first one listed. This field corresponds to the + /// `ports` field of the Kubernetes Containers + /// [v1 core API](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#container-v1-core). + /// + /// Immutable. + core.List? ports; - core.Map toJson() => { - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - if (persistentResources != null) - 'persistentResources': persistentResources!, - }; -} + /// HTTP path on the container to send prediction requests to. + /// + /// Vertex AI forwards requests sent using + /// projects.locations.endpoints.predict to this path on the container's IP + /// address and port. Vertex AI then returns the container's response in the + /// API response. For example, if you set this field to `/foo`, then when + /// Vertex AI receives a prediction request, it forwards the request body in a + /// POST request to the `/foo` path on the port of your container specified by + /// the first value of this `ModelContainerSpec`'s ports field. If you don't + /// specify this field, it defaults to the following value when you deploy + /// this Model to an Endpoint: + /// /v1/endpoints/ENDPOINT/deployedModels/DEPLOYED_MODEL:predict The + /// placeholders in this value are replaced as follows: * ENDPOINT: The last + /// segment (following `endpoints/`)of the Endpoint.name\]\[\] field of the + /// Endpoint where this Model has been deployed. (Vertex AI makes this value + /// available to your container code as the \[`AIP_ENDPOINT_ID` environment + /// variable\](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables).) + /// * DEPLOYED_MODEL: DeployedModel.id of the `DeployedModel`. (Vertex AI + /// makes this value available to your container code as the + /// \[`AIP_DEPLOYED_MODEL_ID` environment + /// variable\](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables).) + /// + /// Immutable. + core.String? predictRoute; -/// Response message for PipelineService.ListPipelineJobs -class GoogleCloudAiplatformV1ListPipelineJobsResponse { - /// A token to retrieve the next page of results. + /// The amount of the VM memory to reserve as the shared memory for the model + /// in megabytes. /// - /// Pass to ListPipelineJobsRequest.page_token to obtain that page. - core.String? nextPageToken; + /// Immutable. + core.String? sharedMemorySizeMb; - /// List of PipelineJobs in the requested page. - core.List? pipelineJobs; + /// Specification for Kubernetes startup probe. + /// + /// Immutable. + GoogleCloudAiplatformV1Probe? startupProbe; - GoogleCloudAiplatformV1ListPipelineJobsResponse({ - this.nextPageToken, - this.pipelineJobs, + GoogleCloudAiplatformV1ModelContainerSpec({ + this.args, + this.command, + this.deploymentTimeout, + this.env, + this.grpcPorts, + this.healthProbe, + this.healthRoute, + this.imageUri, + this.ports, + this.predictRoute, + this.sharedMemorySizeMb, + this.startupProbe, }); - GoogleCloudAiplatformV1ListPipelineJobsResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1ModelContainerSpec.fromJson(core.Map json_) : this( - nextPageToken: json_['nextPageToken'] as core.String?, - pipelineJobs: (json_['pipelineJobs'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1PipelineJob.fromJson( + args: (json_['args'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + command: (json_['command'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + deploymentTimeout: json_['deploymentTimeout'] as core.String?, + env: (json_['env'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1EnvVar.fromJson( value as core.Map)) .toList(), - ); - - core.Map toJson() => { - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - if (pipelineJobs != null) 'pipelineJobs': pipelineJobs!, - }; -} - -/// Response message for DatasetService.ListSavedQueries. -class GoogleCloudAiplatformV1ListSavedQueriesResponse { - /// The standard List next-page token. - core.String? nextPageToken; - - /// A list of SavedQueries that match the specified filter in the request. - core.List? savedQueries; - - GoogleCloudAiplatformV1ListSavedQueriesResponse({ - this.nextPageToken, - this.savedQueries, - }); - - GoogleCloudAiplatformV1ListSavedQueriesResponse.fromJson(core.Map json_) - : this( - nextPageToken: json_['nextPageToken'] as core.String?, - savedQueries: (json_['savedQueries'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1SavedQuery.fromJson( + grpcPorts: (json_['grpcPorts'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Port.fromJson( + value as core.Map)) + .toList(), + healthProbe: json_.containsKey('healthProbe') + ? GoogleCloudAiplatformV1Probe.fromJson( + json_['healthProbe'] as core.Map) + : null, + healthRoute: json_['healthRoute'] as core.String?, + imageUri: json_['imageUri'] as core.String?, + ports: (json_['ports'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Port.fromJson( value as core.Map)) .toList(), + predictRoute: json_['predictRoute'] as core.String?, + sharedMemorySizeMb: json_['sharedMemorySizeMb'] as core.String?, + startupProbe: json_.containsKey('startupProbe') + ? GoogleCloudAiplatformV1Probe.fromJson( + json_['startupProbe'] as core.Map) + : null, ); core.Map toJson() => { - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - if (savedQueries != null) 'savedQueries': savedQueries!, + if (args != null) 'args': args!, + if (command != null) 'command': command!, + if (deploymentTimeout != null) 'deploymentTimeout': deploymentTimeout!, + if (env != null) 'env': env!, + if (grpcPorts != null) 'grpcPorts': grpcPorts!, + if (healthProbe != null) 'healthProbe': healthProbe!, + if (healthRoute != null) 'healthRoute': healthRoute!, + if (imageUri != null) 'imageUri': imageUri!, + if (ports != null) 'ports': ports!, + if (predictRoute != null) 'predictRoute': predictRoute!, + if (sharedMemorySizeMb != null) + 'sharedMemorySizeMb': sharedMemorySizeMb!, + if (startupProbe != null) 'startupProbe': startupProbe!, }; } -/// Response message for ScheduleService.ListSchedules -class GoogleCloudAiplatformV1ListSchedulesResponse { - /// A token to retrieve the next page of results. +/// Stats of data used for train or evaluate the Model. +class GoogleCloudAiplatformV1ModelDataStats { + /// Number of Annotations that are used for evaluating this Model. /// - /// Pass to ListSchedulesRequest.page_token to obtain that page. - core.String? nextPageToken; - - /// List of Schedules in the requested page. - core.List? schedules; + /// If the Model is evaluated multiple times, this will be the number of test + /// Annotations used by the first evaluation. If the Model is not evaluated, + /// the number is 0. + core.String? testAnnotationsCount; - GoogleCloudAiplatformV1ListSchedulesResponse({ - this.nextPageToken, - this.schedules, - }); + /// Number of DataItems that were used for evaluating this Model. + /// + /// If the Model is evaluated multiple times, this will be the number of test + /// DataItems used by the first evaluation. If the Model is not evaluated, the + /// number is 0. + core.String? testDataItemsCount; - GoogleCloudAiplatformV1ListSchedulesResponse.fromJson(core.Map json_) - : this( - nextPageToken: json_['nextPageToken'] as core.String?, - schedules: (json_['schedules'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Schedule.fromJson( - value as core.Map)) - .toList(), - ); + /// Number of Annotations that are used for training this Model. + core.String? trainingAnnotationsCount; - core.Map toJson() => { - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - if (schedules != null) 'schedules': schedules!, - }; -} + /// Number of DataItems that were used for training this Model. + core.String? trainingDataItemsCount; -/// Response message for SpecialistPoolService.ListSpecialistPools. -class GoogleCloudAiplatformV1ListSpecialistPoolsResponse { - /// The standard List next-page token. - core.String? nextPageToken; + /// Number of Annotations that are used for validating this Model during + /// training. + core.String? validationAnnotationsCount; - /// A list of SpecialistPools that matches the specified filter in the - /// request. - core.List? specialistPools; + /// Number of DataItems that were used for validating this Model during + /// training. + core.String? validationDataItemsCount; - GoogleCloudAiplatformV1ListSpecialistPoolsResponse({ - this.nextPageToken, - this.specialistPools, + GoogleCloudAiplatformV1ModelDataStats({ + this.testAnnotationsCount, + this.testDataItemsCount, + this.trainingAnnotationsCount, + this.trainingDataItemsCount, + this.validationAnnotationsCount, + this.validationDataItemsCount, }); - GoogleCloudAiplatformV1ListSpecialistPoolsResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1ModelDataStats.fromJson(core.Map json_) : this( - nextPageToken: json_['nextPageToken'] as core.String?, - specialistPools: (json_['specialistPools'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1SpecialistPool.fromJson( - value as core.Map)) - .toList(), + testAnnotationsCount: json_['testAnnotationsCount'] as core.String?, + testDataItemsCount: json_['testDataItemsCount'] as core.String?, + trainingAnnotationsCount: + json_['trainingAnnotationsCount'] as core.String?, + trainingDataItemsCount: + json_['trainingDataItemsCount'] as core.String?, + validationAnnotationsCount: + json_['validationAnnotationsCount'] as core.String?, + validationDataItemsCount: + json_['validationDataItemsCount'] as core.String?, ); core.Map toJson() => { - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - if (specialistPools != null) 'specialistPools': specialistPools!, + if (testAnnotationsCount != null) + 'testAnnotationsCount': testAnnotationsCount!, + if (testDataItemsCount != null) + 'testDataItemsCount': testDataItemsCount!, + if (trainingAnnotationsCount != null) + 'trainingAnnotationsCount': trainingAnnotationsCount!, + if (trainingDataItemsCount != null) + 'trainingDataItemsCount': trainingDataItemsCount!, + if (validationAnnotationsCount != null) + 'validationAnnotationsCount': validationAnnotationsCount!, + if (validationDataItemsCount != null) + 'validationDataItemsCount': validationDataItemsCount!, }; } -/// Response message for VizierService.ListStudies. -class GoogleCloudAiplatformV1ListStudiesResponse { - /// Passes this token as the `page_token` field of the request for a - /// subsequent call. +/// ModelDeploymentMonitoringBigQueryTable specifies the BigQuery table name as +/// well as some information of the logs stored in this table. +class GoogleCloudAiplatformV1ModelDeploymentMonitoringBigQueryTable { + /// The created BigQuery table to store logs. /// - /// If this field is omitted, there are no subsequent pages. - core.String? nextPageToken; - - /// The studies associated with the project. - core.List? studies; - - GoogleCloudAiplatformV1ListStudiesResponse({ - this.nextPageToken, - this.studies, - }); + /// Customer could do their own query & analysis. Format: + /// `bq://.model_deployment_monitoring_._` + core.String? bigqueryTablePath; - GoogleCloudAiplatformV1ListStudiesResponse.fromJson(core.Map json_) - : this( - nextPageToken: json_['nextPageToken'] as core.String?, - studies: (json_['studies'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Study.fromJson( - value as core.Map)) - .toList(), - ); + /// The source of log. + /// Possible string values are: + /// - "LOG_SOURCE_UNSPECIFIED" : Unspecified source. + /// - "TRAINING" : Logs coming from Training dataset. + /// - "SERVING" : Logs coming from Serving traffic. + core.String? logSource; - core.Map toJson() => { - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - if (studies != null) 'studies': studies!, - }; -} + /// The type of log. + /// Possible string values are: + /// - "LOG_TYPE_UNSPECIFIED" : Unspecified type. + /// - "PREDICT" : Predict logs. + /// - "EXPLAIN" : Explain logs. + core.String? logType; -/// Response message for TensorboardService.ListTensorboardExperiments. -class GoogleCloudAiplatformV1ListTensorboardExperimentsResponse { - /// A token, which can be sent as ListTensorboardExperimentsRequest.page_token - /// to retrieve the next page. + /// The schema version of the request/response logging BigQuery table. /// - /// If this field is omitted, there are no subsequent pages. - core.String? nextPageToken; - - /// The TensorboardExperiments mathching the request. - core.List? - tensorboardExperiments; + /// Default to v1 if unset. + /// + /// Output only. + core.String? requestResponseLoggingSchemaVersion; - GoogleCloudAiplatformV1ListTensorboardExperimentsResponse({ - this.nextPageToken, - this.tensorboardExperiments, + GoogleCloudAiplatformV1ModelDeploymentMonitoringBigQueryTable({ + this.bigqueryTablePath, + this.logSource, + this.logType, + this.requestResponseLoggingSchemaVersion, }); - GoogleCloudAiplatformV1ListTensorboardExperimentsResponse.fromJson( + GoogleCloudAiplatformV1ModelDeploymentMonitoringBigQueryTable.fromJson( core.Map json_) : this( - nextPageToken: json_['nextPageToken'] as core.String?, - tensorboardExperiments: - (json_['tensorboardExperiments'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1TensorboardExperiment.fromJson( - value as core.Map)) - .toList(), + bigqueryTablePath: json_['bigqueryTablePath'] as core.String?, + logSource: json_['logSource'] as core.String?, + logType: json_['logType'] as core.String?, + requestResponseLoggingSchemaVersion: + json_['requestResponseLoggingSchemaVersion'] as core.String?, ); core.Map toJson() => { - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - if (tensorboardExperiments != null) - 'tensorboardExperiments': tensorboardExperiments!, + if (bigqueryTablePath != null) 'bigqueryTablePath': bigqueryTablePath!, + if (logSource != null) 'logSource': logSource!, + if (logType != null) 'logType': logType!, + if (requestResponseLoggingSchemaVersion != null) + 'requestResponseLoggingSchemaVersion': + requestResponseLoggingSchemaVersion!, }; } -/// Response message for TensorboardService.ListTensorboardRuns. -class GoogleCloudAiplatformV1ListTensorboardRunsResponse { - /// A token, which can be sent as ListTensorboardRunsRequest.page_token to - /// retrieve the next page. +/// Represents a job that runs periodically to monitor the deployed models in an +/// endpoint. +/// +/// It will analyze the logged training & prediction data to detect any abnormal +/// behaviors. +class GoogleCloudAiplatformV1ModelDeploymentMonitoringJob { + /// YAML schema file uri describing the format of a single instance that you + /// want Tensorflow Data Validation (TFDV) to analyze. /// - /// If this field is omitted, there are no subsequent pages. - core.String? nextPageToken; - - /// The TensorboardRuns mathching the request. - core.List? tensorboardRuns; - - GoogleCloudAiplatformV1ListTensorboardRunsResponse({ - this.nextPageToken, - this.tensorboardRuns, - }); - - GoogleCloudAiplatformV1ListTensorboardRunsResponse.fromJson(core.Map json_) - : this( - nextPageToken: json_['nextPageToken'] as core.String?, - tensorboardRuns: (json_['tensorboardRuns'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1TensorboardRun.fromJson( - value as core.Map)) - .toList(), - ); + /// If this field is empty, all the feature data types are inferred from + /// predict_instance_schema_uri, meaning that TFDV will use the data in the + /// exact format(data type) as prediction request/response. If there are any + /// data type differences between predict instance and TFDV instance, this + /// field can be used to override the schema. For models trained with Vertex + /// AI, this field must be set as all the fields in predict instance formatted + /// as string. + core.String? analysisInstanceSchemaUri; - core.Map toJson() => { - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - if (tensorboardRuns != null) 'tensorboardRuns': tensorboardRuns!, - }; -} + /// The created bigquery tables for the job under customer project. + /// + /// Customer could do their own query & analysis. There could be 4 log tables + /// in maximum: 1. Training data logging predict request/response 2. Serving + /// data logging predict request/response + /// + /// Output only. + core.List? + bigqueryTables; -/// Response message for TensorboardService.ListTensorboardTimeSeries. -class GoogleCloudAiplatformV1ListTensorboardTimeSeriesResponse { - /// A token, which can be sent as ListTensorboardTimeSeriesRequest.page_token - /// to retrieve the next page. + /// Timestamp when this ModelDeploymentMonitoringJob was created. /// - /// If this field is omitted, there are no subsequent pages. - core.String? nextPageToken; + /// Output only. + core.String? createTime; - /// The TensorboardTimeSeries mathching the request. - core.List? - tensorboardTimeSeries; + /// The user-defined name of the ModelDeploymentMonitoringJob. + /// + /// The name can be up to 128 characters long and can consist of any UTF-8 + /// characters. Display name of a ModelDeploymentMonitoringJob. + /// + /// Required. + core.String? displayName; - GoogleCloudAiplatformV1ListTensorboardTimeSeriesResponse({ - this.nextPageToken, - this.tensorboardTimeSeries, - }); + /// If true, the scheduled monitoring pipeline logs are sent to Google Cloud + /// Logging, including pipeline status and anomalies detected. + /// + /// Please note the logs incur cost, which are subject to + /// [Cloud Logging pricing](https://cloud.google.com/logging#pricing). + core.bool? enableMonitoringPipelineLogs; - GoogleCloudAiplatformV1ListTensorboardTimeSeriesResponse.fromJson( - core.Map json_) - : this( - nextPageToken: json_['nextPageToken'] as core.String?, - tensorboardTimeSeries: (json_['tensorboardTimeSeries'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1TensorboardTimeSeries.fromJson( - value as core.Map)) - .toList(), - ); + /// Customer-managed encryption key spec for a ModelDeploymentMonitoringJob. + /// + /// If set, this ModelDeploymentMonitoringJob and all sub-resources of this + /// ModelDeploymentMonitoringJob will be secured by this key. + GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; - core.Map toJson() => { - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - if (tensorboardTimeSeries != null) - 'tensorboardTimeSeries': tensorboardTimeSeries!, - }; -} + /// Endpoint resource name. + /// + /// Format: `projects/{project}/locations/{location}/endpoints/{endpoint}` + /// + /// Required. + core.String? endpoint; -/// Response message for TensorboardService.ListTensorboards. -class GoogleCloudAiplatformV1ListTensorboardsResponse { - /// A token, which can be sent as ListTensorboardsRequest.page_token to - /// retrieve the next page. + /// Only populated when the job's state is `JOB_STATE_FAILED` or + /// `JOB_STATE_CANCELLED`. /// - /// If this field is omitted, there are no subsequent pages. - core.String? nextPageToken; + /// Output only. + GoogleRpcStatus? error; - /// The Tensorboards mathching the request. - core.List? tensorboards; + /// The labels with user-defined metadata to organize your + /// ModelDeploymentMonitoringJob. + /// + /// Label keys and values can be no longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. See + /// https://goo.gl/xmQnxf for more information and examples of labels. + core.Map? labels; - GoogleCloudAiplatformV1ListTensorboardsResponse({ - this.nextPageToken, - this.tensorboards, - }); + /// Latest triggered monitoring pipeline metadata. + /// + /// Output only. + GoogleCloudAiplatformV1ModelDeploymentMonitoringJobLatestMonitoringPipelineMetadata? + latestMonitoringPipelineMetadata; - GoogleCloudAiplatformV1ListTensorboardsResponse.fromJson(core.Map json_) - : this( - nextPageToken: json_['nextPageToken'] as core.String?, - tensorboards: (json_['tensorboards'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Tensorboard.fromJson( - value as core.Map)) - .toList(), - ); + /// The TTL of BigQuery tables in user projects which stores logs. + /// + /// A day is the basic unit of the TTL and we take the ceil of TTL/86400(a + /// day). e.g. { second: 3600} indicates ttl = 1 day. + core.String? logTtl; - core.Map toJson() => { - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - if (tensorboards != null) 'tensorboards': tensorboards!, - }; -} + /// Sample Strategy for logging. + /// + /// Required. + GoogleCloudAiplatformV1SamplingStrategy? loggingSamplingStrategy; -/// Response message for PipelineService.ListTrainingPipelines -class GoogleCloudAiplatformV1ListTrainingPipelinesResponse { - /// A token to retrieve the next page of results. + /// The config for monitoring objectives. /// - /// Pass to ListTrainingPipelinesRequest.page_token to obtain that page. - core.String? nextPageToken; + /// This is a per DeployedModel config. Each DeployedModel needs to be + /// configured separately. + /// + /// Required. + core.List? + modelDeploymentMonitoringObjectiveConfigs; - /// List of TrainingPipelines in the requested page. - core.List? trainingPipelines; + /// Schedule config for running the monitoring job. + /// + /// Required. + GoogleCloudAiplatformV1ModelDeploymentMonitoringScheduleConfig? + modelDeploymentMonitoringScheduleConfig; - GoogleCloudAiplatformV1ListTrainingPipelinesResponse({ - this.nextPageToken, - this.trainingPipelines, - }); + /// Alert config for model monitoring. + GoogleCloudAiplatformV1ModelMonitoringAlertConfig? modelMonitoringAlertConfig; - GoogleCloudAiplatformV1ListTrainingPipelinesResponse.fromJson(core.Map json_) - : this( - nextPageToken: json_['nextPageToken'] as core.String?, - trainingPipelines: (json_['trainingPipelines'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1TrainingPipeline.fromJson( - value as core.Map)) - .toList(), - ); + /// Resource name of a ModelDeploymentMonitoringJob. + /// + /// Output only. + core.String? name; - core.Map toJson() => { - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - if (trainingPipelines != null) 'trainingPipelines': trainingPipelines!, - }; -} + /// Timestamp when this monitoring pipeline will be scheduled to run for the + /// next round. + /// + /// Output only. + core.String? nextScheduleTime; -/// Response message for VizierService.ListTrials. -class GoogleCloudAiplatformV1ListTrialsResponse { - /// Pass this token as the `page_token` field of the request for a subsequent - /// call. + /// YAML schema file uri describing the format of a single instance, which are + /// given to format this Endpoint's prediction (and explanation). /// - /// If this field is omitted, there are no subsequent pages. - core.String? nextPageToken; + /// If not set, we will generate predict schema from collected predict + /// requests. + core.String? predictInstanceSchemaUri; - /// The Trials associated with the Study. - core.List? trials; + /// Sample Predict instance, same format as PredictRequest.instances, this can + /// be set as a replacement of + /// ModelDeploymentMonitoringJob.predict_instance_schema_uri. + /// + /// If not set, we will generate predict schema from collected predict + /// requests. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? samplePredictInstance; - GoogleCloudAiplatformV1ListTrialsResponse({ - this.nextPageToken, - this.trials, - }); + /// Reserved for future use. + /// + /// Output only. + core.bool? satisfiesPzi; - GoogleCloudAiplatformV1ListTrialsResponse.fromJson(core.Map json_) - : this( - nextPageToken: json_['nextPageToken'] as core.String?, - trials: (json_['trials'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Trial.fromJson( - value as core.Map)) - .toList(), - ); + /// Reserved for future use. + /// + /// Output only. + core.bool? satisfiesPzs; - core.Map toJson() => { - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - if (trials != null) 'trials': trials!, - }; -} + /// Schedule state when the monitoring job is in Running state. + /// + /// Output only. + /// Possible string values are: + /// - "MONITORING_SCHEDULE_STATE_UNSPECIFIED" : Unspecified state. + /// - "PENDING" : The pipeline is picked up and wait to run. + /// - "OFFLINE" : The pipeline is offline and will be scheduled for next run. + /// - "RUNNING" : The pipeline is running. + core.String? scheduleState; -/// Response message for GenAiTuningService.ListTuningJobs -class GoogleCloudAiplatformV1ListTuningJobsResponse { - /// A token to retrieve the next page of results. + /// The detailed state of the monitoring job. /// - /// Pass to ListTuningJobsRequest.page_token to obtain that page. - core.String? nextPageToken; + /// When the job is still creating, the state will be 'PENDING'. Once the job + /// is successfully created, the state will be 'RUNNING'. Pause the job, the + /// state will be 'PAUSED'. Resume the job, the state will return to + /// 'RUNNING'. + /// + /// Output only. + /// Possible string values are: + /// - "JOB_STATE_UNSPECIFIED" : The job state is unspecified. + /// - "JOB_STATE_QUEUED" : The job has been just created or resumed and + /// processing has not yet begun. + /// - "JOB_STATE_PENDING" : The service is preparing to run the job. + /// - "JOB_STATE_RUNNING" : The job is in progress. + /// - "JOB_STATE_SUCCEEDED" : The job completed successfully. + /// - "JOB_STATE_FAILED" : The job failed. + /// - "JOB_STATE_CANCELLING" : The job is being cancelled. From this state the + /// job may only go to either `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED` or + /// `JOB_STATE_CANCELLED`. + /// - "JOB_STATE_CANCELLED" : The job has been cancelled. + /// - "JOB_STATE_PAUSED" : The job has been stopped, and can be resumed. + /// - "JOB_STATE_EXPIRED" : The job has expired. + /// - "JOB_STATE_UPDATING" : The job is being updated. Only jobs in the + /// `RUNNING` state can be updated. After updating, the job goes back to the + /// `RUNNING` state. + /// - "JOB_STATE_PARTIALLY_SUCCEEDED" : The job is partially succeeded, some + /// results may be missing due to errors. + core.String? state; - /// List of TuningJobs in the requested page. - core.List? tuningJobs; + /// Stats anomalies base folder path. + GoogleCloudAiplatformV1GcsDestination? statsAnomaliesBaseDirectory; - GoogleCloudAiplatformV1ListTuningJobsResponse({ - this.nextPageToken, - this.tuningJobs, + /// Timestamp when this ModelDeploymentMonitoringJob was updated most + /// recently. + /// + /// Output only. + core.String? updateTime; + + GoogleCloudAiplatformV1ModelDeploymentMonitoringJob({ + this.analysisInstanceSchemaUri, + this.bigqueryTables, + this.createTime, + this.displayName, + this.enableMonitoringPipelineLogs, + this.encryptionSpec, + this.endpoint, + this.error, + this.labels, + this.latestMonitoringPipelineMetadata, + this.logTtl, + this.loggingSamplingStrategy, + this.modelDeploymentMonitoringObjectiveConfigs, + this.modelDeploymentMonitoringScheduleConfig, + this.modelMonitoringAlertConfig, + this.name, + this.nextScheduleTime, + this.predictInstanceSchemaUri, + this.samplePredictInstance, + this.satisfiesPzi, + this.satisfiesPzs, + this.scheduleState, + this.state, + this.statsAnomaliesBaseDirectory, + this.updateTime, }); - GoogleCloudAiplatformV1ListTuningJobsResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1ModelDeploymentMonitoringJob.fromJson(core.Map json_) : this( - nextPageToken: json_['nextPageToken'] as core.String?, - tuningJobs: (json_['tuningJobs'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1TuningJob.fromJson( - value as core.Map)) + analysisInstanceSchemaUri: + json_['analysisInstanceSchemaUri'] as core.String?, + bigqueryTables: (json_['bigqueryTables'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1ModelDeploymentMonitoringBigQueryTable + .fromJson(value as core.Map)) + .toList(), + createTime: json_['createTime'] as core.String?, + displayName: json_['displayName'] as core.String?, + enableMonitoringPipelineLogs: + json_['enableMonitoringPipelineLogs'] as core.bool?, + encryptionSpec: json_.containsKey('encryptionSpec') + ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( + json_['encryptionSpec'] + as core.Map) + : null, + endpoint: json_['endpoint'] as core.String?, + error: json_.containsKey('error') + ? GoogleRpcStatus.fromJson( + json_['error'] as core.Map) + : null, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + latestMonitoringPipelineMetadata: json_ + .containsKey('latestMonitoringPipelineMetadata') + ? GoogleCloudAiplatformV1ModelDeploymentMonitoringJobLatestMonitoringPipelineMetadata + .fromJson(json_['latestMonitoringPipelineMetadata'] + as core.Map) + : null, + logTtl: json_['logTtl'] as core.String?, + loggingSamplingStrategy: json_.containsKey('loggingSamplingStrategy') + ? GoogleCloudAiplatformV1SamplingStrategy.fromJson( + json_['loggingSamplingStrategy'] + as core.Map) + : null, + modelDeploymentMonitoringObjectiveConfigs: (json_[ + 'modelDeploymentMonitoringObjectiveConfigs'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1ModelDeploymentMonitoringObjectiveConfig + .fromJson(value as core.Map)) .toList(), + modelDeploymentMonitoringScheduleConfig: json_ + .containsKey('modelDeploymentMonitoringScheduleConfig') + ? GoogleCloudAiplatformV1ModelDeploymentMonitoringScheduleConfig + .fromJson(json_['modelDeploymentMonitoringScheduleConfig'] + as core.Map) + : null, + modelMonitoringAlertConfig: + json_.containsKey('modelMonitoringAlertConfig') + ? GoogleCloudAiplatformV1ModelMonitoringAlertConfig.fromJson( + json_['modelMonitoringAlertConfig'] + as core.Map) + : null, + name: json_['name'] as core.String?, + nextScheduleTime: json_['nextScheduleTime'] as core.String?, + predictInstanceSchemaUri: + json_['predictInstanceSchemaUri'] as core.String?, + samplePredictInstance: json_['samplePredictInstance'], + satisfiesPzi: json_['satisfiesPzi'] as core.bool?, + satisfiesPzs: json_['satisfiesPzs'] as core.bool?, + scheduleState: json_['scheduleState'] as core.String?, + state: json_['state'] as core.String?, + statsAnomaliesBaseDirectory: + json_.containsKey('statsAnomaliesBaseDirectory') + ? GoogleCloudAiplatformV1GcsDestination.fromJson( + json_['statsAnomaliesBaseDirectory'] + as core.Map) + : null, + updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - if (tuningJobs != null) 'tuningJobs': tuningJobs!, + if (analysisInstanceSchemaUri != null) + 'analysisInstanceSchemaUri': analysisInstanceSchemaUri!, + if (bigqueryTables != null) 'bigqueryTables': bigqueryTables!, + if (createTime != null) 'createTime': createTime!, + if (displayName != null) 'displayName': displayName!, + if (enableMonitoringPipelineLogs != null) + 'enableMonitoringPipelineLogs': enableMonitoringPipelineLogs!, + if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, + if (endpoint != null) 'endpoint': endpoint!, + if (error != null) 'error': error!, + if (labels != null) 'labels': labels!, + if (latestMonitoringPipelineMetadata != null) + 'latestMonitoringPipelineMetadata': latestMonitoringPipelineMetadata!, + if (logTtl != null) 'logTtl': logTtl!, + if (loggingSamplingStrategy != null) + 'loggingSamplingStrategy': loggingSamplingStrategy!, + if (modelDeploymentMonitoringObjectiveConfigs != null) + 'modelDeploymentMonitoringObjectiveConfigs': + modelDeploymentMonitoringObjectiveConfigs!, + if (modelDeploymentMonitoringScheduleConfig != null) + 'modelDeploymentMonitoringScheduleConfig': + modelDeploymentMonitoringScheduleConfig!, + if (modelMonitoringAlertConfig != null) + 'modelMonitoringAlertConfig': modelMonitoringAlertConfig!, + if (name != null) 'name': name!, + if (nextScheduleTime != null) 'nextScheduleTime': nextScheduleTime!, + if (predictInstanceSchemaUri != null) + 'predictInstanceSchemaUri': predictInstanceSchemaUri!, + if (samplePredictInstance != null) + 'samplePredictInstance': samplePredictInstance!, + if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, + if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, + if (scheduleState != null) 'scheduleState': scheduleState!, + if (state != null) 'state': state!, + if (statsAnomaliesBaseDirectory != null) + 'statsAnomaliesBaseDirectory': statsAnomaliesBaseDirectory!, + if (updateTime != null) 'updateTime': updateTime!, }; } -/// Logprobs Result -class GoogleCloudAiplatformV1LogprobsResult { - /// Length = total number of decoding steps. - /// - /// The chosen candidates may or may not be in top_candidates. - core.List? chosenCandidates; +/// All metadata of most recent monitoring pipelines. +class GoogleCloudAiplatformV1ModelDeploymentMonitoringJobLatestMonitoringPipelineMetadata { + /// The time that most recent monitoring pipelines that is related to this + /// run. + core.String? runTime; - /// Length = total number of decoding steps. - core.List? topCandidates; + /// The status of the most recent monitoring pipeline. + GoogleRpcStatus? status; - GoogleCloudAiplatformV1LogprobsResult({ - this.chosenCandidates, - this.topCandidates, + GoogleCloudAiplatformV1ModelDeploymentMonitoringJobLatestMonitoringPipelineMetadata({ + this.runTime, + this.status, }); - GoogleCloudAiplatformV1LogprobsResult.fromJson(core.Map json_) + GoogleCloudAiplatformV1ModelDeploymentMonitoringJobLatestMonitoringPipelineMetadata.fromJson( + core.Map json_) : this( - chosenCandidates: (json_['chosenCandidates'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1LogprobsResultCandidate.fromJson( - value as core.Map)) - .toList(), - topCandidates: (json_['topCandidates'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1LogprobsResultTopCandidates.fromJson( - value as core.Map)) - .toList(), + runTime: json_['runTime'] as core.String?, + status: json_.containsKey('status') + ? GoogleRpcStatus.fromJson( + json_['status'] as core.Map) + : null, ); core.Map toJson() => { - if (chosenCandidates != null) 'chosenCandidates': chosenCandidates!, - if (topCandidates != null) 'topCandidates': topCandidates!, + if (runTime != null) 'runTime': runTime!, + if (status != null) 'status': status!, }; } -/// Candidate for the logprobs token and score. -class GoogleCloudAiplatformV1LogprobsResultCandidate { - /// The candidate's log probability. - core.double? logProbability; - - /// The candidate's token string value. - core.String? token; +/// ModelDeploymentMonitoringObjectiveConfig contains the pair of +/// deployed_model_id to ModelMonitoringObjectiveConfig. +class GoogleCloudAiplatformV1ModelDeploymentMonitoringObjectiveConfig { + /// The DeployedModel ID of the objective config. + core.String? deployedModelId; - /// The candidate's token id value. - core.int? tokenId; + /// The objective config of for the modelmonitoring job of this deployed + /// model. + GoogleCloudAiplatformV1ModelMonitoringObjectiveConfig? objectiveConfig; - GoogleCloudAiplatformV1LogprobsResultCandidate({ - this.logProbability, - this.token, - this.tokenId, + GoogleCloudAiplatformV1ModelDeploymentMonitoringObjectiveConfig({ + this.deployedModelId, + this.objectiveConfig, }); - GoogleCloudAiplatformV1LogprobsResultCandidate.fromJson(core.Map json_) + GoogleCloudAiplatformV1ModelDeploymentMonitoringObjectiveConfig.fromJson( + core.Map json_) : this( - logProbability: (json_['logProbability'] as core.num?)?.toDouble(), - token: json_['token'] as core.String?, - tokenId: json_['tokenId'] as core.int?, + deployedModelId: json_['deployedModelId'] as core.String?, + objectiveConfig: json_.containsKey('objectiveConfig') + ? GoogleCloudAiplatformV1ModelMonitoringObjectiveConfig.fromJson( + json_['objectiveConfig'] + as core.Map) + : null, ); core.Map toJson() => { - if (logProbability != null) 'logProbability': logProbability!, - if (token != null) 'token': token!, - if (tokenId != null) 'tokenId': tokenId!, + if (deployedModelId != null) 'deployedModelId': deployedModelId!, + if (objectiveConfig != null) 'objectiveConfig': objectiveConfig!, }; } -/// Candidates with top log probabilities at each decoding step. -class GoogleCloudAiplatformV1LogprobsResultTopCandidates { - /// Sorted by log probability in descending order. - core.List? candidates; +/// The config for scheduling monitoring job. +class GoogleCloudAiplatformV1ModelDeploymentMonitoringScheduleConfig { + /// The model monitoring job scheduling interval. + /// + /// It will be rounded up to next full hour. This defines how often the + /// monitoring jobs are triggered. + /// + /// Required. + core.String? monitorInterval; - GoogleCloudAiplatformV1LogprobsResultTopCandidates({ - this.candidates, + /// The time window of the prediction data being included in each prediction + /// dataset. + /// + /// This window specifies how long the data should be collected from + /// historical model results for each run. If not set, + /// ModelDeploymentMonitoringScheduleConfig.monitor_interval will be used. + /// e.g. If currently the cutoff time is 2022-01-08 14:30:00 and the + /// monitor_window is set to be 3600, then data from 2022-01-08 13:30:00 to + /// 2022-01-08 14:30:00 will be retrieved and aggregated to calculate the + /// monitoring statistics. + core.String? monitorWindow; + + GoogleCloudAiplatformV1ModelDeploymentMonitoringScheduleConfig({ + this.monitorInterval, + this.monitorWindow, }); - GoogleCloudAiplatformV1LogprobsResultTopCandidates.fromJson(core.Map json_) + GoogleCloudAiplatformV1ModelDeploymentMonitoringScheduleConfig.fromJson( + core.Map json_) : this( - candidates: (json_['candidates'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1LogprobsResultCandidate.fromJson( - value as core.Map)) - .toList(), + monitorInterval: json_['monitorInterval'] as core.String?, + monitorWindow: json_['monitorWindow'] as core.String?, ); core.Map toJson() => { - if (candidates != null) 'candidates': candidates!, + if (monitorInterval != null) 'monitorInterval': monitorInterval!, + if (monitorWindow != null) 'monitorWindow': monitorWindow!, }; } -/// Request message for VizierService.LookupStudy. -class GoogleCloudAiplatformV1LookupStudyRequest { - /// The user-defined display name of the Study +/// A collection of metrics calculated by comparing Model's predictions on all +/// of the test data against annotations from the test data. +class GoogleCloudAiplatformV1ModelEvaluation { + /// Points to a YAML file stored on Google Cloud Storage describing + /// EvaluatedDataItemView.predictions, EvaluatedDataItemView.ground_truths, + /// EvaluatedAnnotation.predictions, and EvaluatedAnnotation.ground_truths. /// - /// Required. - core.String? displayName; + /// The schema is defined as an OpenAPI 3.0.2 + /// [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). + /// This field is not populated if there are neither EvaluatedDataItemViews + /// nor EvaluatedAnnotations under this ModelEvaluation. + core.String? annotationSchemaUri; - GoogleCloudAiplatformV1LookupStudyRequest({ - this.displayName, - }); + /// Timestamp when this ModelEvaluation was created. + /// + /// Output only. + core.String? createTime; - GoogleCloudAiplatformV1LookupStudyRequest.fromJson(core.Map json_) - : this( - displayName: json_['displayName'] as core.String?, - ); + /// Points to a YAML file stored on Google Cloud Storage describing + /// EvaluatedDataItemView.data_item_payload and + /// EvaluatedAnnotation.data_item_payload. + /// + /// The schema is defined as an OpenAPI 3.0.2 + /// [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). + /// This field is not populated if there are neither EvaluatedDataItemViews + /// nor EvaluatedAnnotations under this ModelEvaluation. + core.String? dataItemSchemaUri; - core.Map toJson() => { - if (displayName != null) 'displayName': displayName!, - }; -} + /// The display name of the ModelEvaluation. + core.String? displayName; -/// Specification of a single machine. -class GoogleCloudAiplatformV1MachineSpec { - /// The number of accelerators to attach to the machine. - core.int? acceleratorCount; + /// Describes the values of ExplanationSpec that are used for explaining the + /// predicted values on the evaluated data. + core.List< + GoogleCloudAiplatformV1ModelEvaluationModelEvaluationExplanationSpec>? + explanationSpecs; - /// The type of accelerator(s) that may be attached to the machine as per - /// accelerator_count. + /// The metadata of the ModelEvaluation. /// - /// Immutable. - /// Possible string values are: - /// - "ACCELERATOR_TYPE_UNSPECIFIED" : Unspecified accelerator type, which - /// means no accelerator. - /// - "NVIDIA_TESLA_K80" : Deprecated: Nvidia Tesla K80 GPU has reached end of - /// support, see https://cloud.google.com/compute/docs/eol/k80-eol. - /// - "NVIDIA_TESLA_P100" : Nvidia Tesla P100 GPU. - /// - "NVIDIA_TESLA_V100" : Nvidia Tesla V100 GPU. - /// - "NVIDIA_TESLA_P4" : Nvidia Tesla P4 GPU. - /// - "NVIDIA_TESLA_T4" : Nvidia Tesla T4 GPU. - /// - "NVIDIA_TESLA_A100" : Nvidia Tesla A100 GPU. - /// - "NVIDIA_A100_80GB" : Nvidia A100 80GB GPU. - /// - "NVIDIA_L4" : Nvidia L4 GPU. - /// - "NVIDIA_H100_80GB" : Nvidia H100 80Gb GPU. - /// - "TPU_V2" : TPU v2. - /// - "TPU_V3" : TPU v3. - /// - "TPU_V4_POD" : TPU v4. - /// - "TPU_V5_LITEPOD" : TPU v5. - core.String? acceleratorType; + /// For the ModelEvaluation uploaded from Managed Pipeline, metadata contains + /// a structured value with keys of "pipeline_job_id", + /// "evaluation_dataset_type", "evaluation_dataset_path", + /// "row_based_metrics_path". + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? metadata; - /// The type of the machine. + /// Evaluation metrics of the Model. /// - /// See the - /// [list of machine types supported for prediction](https://cloud.google.com/vertex-ai/docs/predictions/configure-compute#machine-types) - /// See the - /// [list of machine types supported for custom training](https://cloud.google.com/vertex-ai/docs/training/configure-compute#machine-types). - /// For DeployedModel this field is optional, and the default value is - /// `n1-standard-2`. For BatchPredictionJob or as part of WorkerPoolSpec this - /// field is required. + /// The schema of the metrics is stored in metrics_schema_uri /// - /// Immutable. - core.String? machineType; + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? metrics; - /// Configuration controlling how this resource pool consumes reservation. + /// Points to a YAML file stored on Google Cloud Storage describing the + /// metrics of this ModelEvaluation. /// - /// Optional. Immutable. - GoogleCloudAiplatformV1ReservationAffinity? reservationAffinity; + /// The schema is defined as an OpenAPI 3.0.2 + /// [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). + core.String? metricsSchemaUri; - /// The topology of the TPUs. + /// Aggregated explanation metrics for the Model's prediction output over the + /// data this ModelEvaluation uses. /// - /// Corresponds to the TPU topologies available from GKE. (Example: - /// tpu_topology: "2x2x1"). + /// This field is populated only if the Model is evaluated with explanations, + /// and only for AutoML tabular Models. + GoogleCloudAiplatformV1ModelExplanation? modelExplanation; + + /// The resource name of the ModelEvaluation. /// - /// Immutable. - core.String? tpuTopology; + /// Output only. + core.String? name; - GoogleCloudAiplatformV1MachineSpec({ - this.acceleratorCount, - this.acceleratorType, - this.machineType, - this.reservationAffinity, - this.tpuTopology, + /// All possible dimensions of ModelEvaluationSlices. + /// + /// The dimensions can be used as the filter of the + /// ModelService.ListModelEvaluationSlices request, in the form of + /// `slice.dimension = `. + core.List? sliceDimensions; + + GoogleCloudAiplatformV1ModelEvaluation({ + this.annotationSchemaUri, + this.createTime, + this.dataItemSchemaUri, + this.displayName, + this.explanationSpecs, + this.metadata, + this.metrics, + this.metricsSchemaUri, + this.modelExplanation, + this.name, + this.sliceDimensions, }); - GoogleCloudAiplatformV1MachineSpec.fromJson(core.Map json_) + GoogleCloudAiplatformV1ModelEvaluation.fromJson(core.Map json_) : this( - acceleratorCount: json_['acceleratorCount'] as core.int?, - acceleratorType: json_['acceleratorType'] as core.String?, - machineType: json_['machineType'] as core.String?, - reservationAffinity: json_.containsKey('reservationAffinity') - ? GoogleCloudAiplatformV1ReservationAffinity.fromJson( - json_['reservationAffinity'] + annotationSchemaUri: json_['annotationSchemaUri'] as core.String?, + createTime: json_['createTime'] as core.String?, + dataItemSchemaUri: json_['dataItemSchemaUri'] as core.String?, + displayName: json_['displayName'] as core.String?, + explanationSpecs: (json_['explanationSpecs'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1ModelEvaluationModelEvaluationExplanationSpec + .fromJson(value as core.Map)) + .toList(), + metadata: json_['metadata'], + metrics: json_['metrics'], + metricsSchemaUri: json_['metricsSchemaUri'] as core.String?, + modelExplanation: json_.containsKey('modelExplanation') + ? GoogleCloudAiplatformV1ModelExplanation.fromJson( + json_['modelExplanation'] as core.Map) : null, - tpuTopology: json_['tpuTopology'] as core.String?, + name: json_['name'] as core.String?, + sliceDimensions: (json_['sliceDimensions'] as core.List?) + ?.map((value) => value as core.String) + .toList(), ); core.Map toJson() => { - if (acceleratorCount != null) 'acceleratorCount': acceleratorCount!, - if (acceleratorType != null) 'acceleratorType': acceleratorType!, - if (machineType != null) 'machineType': machineType!, - if (reservationAffinity != null) - 'reservationAffinity': reservationAffinity!, - if (tpuTopology != null) 'tpuTopology': tpuTopology!, + if (annotationSchemaUri != null) + 'annotationSchemaUri': annotationSchemaUri!, + if (createTime != null) 'createTime': createTime!, + if (dataItemSchemaUri != null) 'dataItemSchemaUri': dataItemSchemaUri!, + if (displayName != null) 'displayName': displayName!, + if (explanationSpecs != null) 'explanationSpecs': explanationSpecs!, + if (metadata != null) 'metadata': metadata!, + if (metrics != null) 'metrics': metrics!, + if (metricsSchemaUri != null) 'metricsSchemaUri': metricsSchemaUri!, + if (modelExplanation != null) 'modelExplanation': modelExplanation!, + if (name != null) 'name': name!, + if (sliceDimensions != null) 'sliceDimensions': sliceDimensions!, }; } -/// Manual batch tuning parameters. -class GoogleCloudAiplatformV1ManualBatchTuningParameters { - /// The number of the records (e.g. instances) of the operation given in each - /// batch to a machine replica. - /// - /// Machine type, and size of a single record should be considered when - /// setting this parameter, higher value speeds up the batch operation's - /// execution, but too high value will result in a whole batch not fitting in - /// a machine's memory, and the whole operation will fail. The default value - /// is 64. +class GoogleCloudAiplatformV1ModelEvaluationModelEvaluationExplanationSpec { + /// Explanation spec details. + GoogleCloudAiplatformV1ExplanationSpec? explanationSpec; + + /// Explanation type. /// - /// Immutable. - core.int? batchSize; + /// For AutoML Image Classification models, possible values are: * + /// `image-integrated-gradients` * `image-xrai` + core.String? explanationType; - GoogleCloudAiplatformV1ManualBatchTuningParameters({ - this.batchSize, + GoogleCloudAiplatformV1ModelEvaluationModelEvaluationExplanationSpec({ + this.explanationSpec, + this.explanationType, }); - GoogleCloudAiplatformV1ManualBatchTuningParameters.fromJson(core.Map json_) + GoogleCloudAiplatformV1ModelEvaluationModelEvaluationExplanationSpec.fromJson( + core.Map json_) : this( - batchSize: json_['batchSize'] as core.int?, + explanationSpec: json_.containsKey('explanationSpec') + ? GoogleCloudAiplatformV1ExplanationSpec.fromJson( + json_['explanationSpec'] + as core.Map) + : null, + explanationType: json_['explanationType'] as core.String?, ); core.Map toJson() => { - if (batchSize != null) 'batchSize': batchSize!, + if (explanationSpec != null) 'explanationSpec': explanationSpec!, + if (explanationType != null) 'explanationType': explanationType!, }; } -/// A message representing a Measurement of a Trial. -/// -/// A Measurement contains the Metrics got by executing a Trial using suggested -/// hyperparameter values. -class GoogleCloudAiplatformV1Measurement { - /// Time that the Trial has been running at the point of this Measurement. +/// A collection of metrics calculated by comparing Model's predictions on a +/// slice of the test data against ground truth annotations. +class GoogleCloudAiplatformV1ModelEvaluationSlice { + /// Timestamp when this ModelEvaluationSlice was created. /// /// Output only. - core.String? elapsedDuration; + core.String? createTime; - /// A list of metrics got by evaluating the objective functions using - /// suggested Parameter values. + /// Sliced evaluation metrics of the Model. + /// + /// The schema of the metrics is stored in metrics_schema_uri /// /// Output only. - core.List? metrics; + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? metrics; - /// The number of steps the machine learning model has been trained for. + /// Points to a YAML file stored on Google Cloud Storage describing the + /// metrics of this ModelEvaluationSlice. /// - /// Must be non-negative. + /// The schema is defined as an OpenAPI 3.0.2 + /// [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). /// /// Output only. - core.String? stepCount; + core.String? metricsSchemaUri; - GoogleCloudAiplatformV1Measurement({ - this.elapsedDuration, + /// Aggregated explanation metrics for the Model's prediction output over the + /// data this ModelEvaluation uses. + /// + /// This field is populated only if the Model is evaluated with explanations, + /// and only for tabular Models. + /// + /// Output only. + GoogleCloudAiplatformV1ModelExplanation? modelExplanation; + + /// The resource name of the ModelEvaluationSlice. + /// + /// Output only. + core.String? name; + + /// The slice of the test data that is used to evaluate the Model. + /// + /// Output only. + GoogleCloudAiplatformV1ModelEvaluationSliceSlice? slice; + + GoogleCloudAiplatformV1ModelEvaluationSlice({ + this.createTime, this.metrics, - this.stepCount, + this.metricsSchemaUri, + this.modelExplanation, + this.name, + this.slice, }); - GoogleCloudAiplatformV1Measurement.fromJson(core.Map json_) + GoogleCloudAiplatformV1ModelEvaluationSlice.fromJson(core.Map json_) : this( - elapsedDuration: json_['elapsedDuration'] as core.String?, - metrics: (json_['metrics'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1MeasurementMetric.fromJson( - value as core.Map)) - .toList(), - stepCount: json_['stepCount'] as core.String?, + createTime: json_['createTime'] as core.String?, + metrics: json_['metrics'], + metricsSchemaUri: json_['metricsSchemaUri'] as core.String?, + modelExplanation: json_.containsKey('modelExplanation') + ? GoogleCloudAiplatformV1ModelExplanation.fromJson( + json_['modelExplanation'] + as core.Map) + : null, + name: json_['name'] as core.String?, + slice: json_.containsKey('slice') + ? GoogleCloudAiplatformV1ModelEvaluationSliceSlice.fromJson( + json_['slice'] as core.Map) + : null, ); core.Map toJson() => { - if (elapsedDuration != null) 'elapsedDuration': elapsedDuration!, + if (createTime != null) 'createTime': createTime!, if (metrics != null) 'metrics': metrics!, - if (stepCount != null) 'stepCount': stepCount!, + if (metricsSchemaUri != null) 'metricsSchemaUri': metricsSchemaUri!, + if (modelExplanation != null) 'modelExplanation': modelExplanation!, + if (name != null) 'name': name!, + if (slice != null) 'slice': slice!, }; } -/// A message representing a metric in the measurement. -class GoogleCloudAiplatformV1MeasurementMetric { - /// The ID of the Metric. +/// Definition of a slice. +class GoogleCloudAiplatformV1ModelEvaluationSliceSlice { + /// The dimension of the slice. /// - /// The Metric should be defined in StudySpec's Metrics. + /// Well-known dimensions are: * `annotationSpec`: This slice is on the test + /// data that has either ground truth or prediction with + /// AnnotationSpec.display_name equals to value. * `slice`: This slice is a + /// user customized slice defined by its SliceSpec. /// /// Output only. - core.String? metricId; + core.String? dimension; - /// The value for this metric. + /// Specification for how the data was sliced. /// /// Output only. - core.double? value; + GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpec? sliceSpec; - GoogleCloudAiplatformV1MeasurementMetric({ - this.metricId, + /// The value of the dimension in this slice. + /// + /// Output only. + core.String? value; + + GoogleCloudAiplatformV1ModelEvaluationSliceSlice({ + this.dimension, + this.sliceSpec, this.value, }); - GoogleCloudAiplatformV1MeasurementMetric.fromJson(core.Map json_) + GoogleCloudAiplatformV1ModelEvaluationSliceSlice.fromJson(core.Map json_) : this( - metricId: json_['metricId'] as core.String?, - value: (json_['value'] as core.num?)?.toDouble(), + dimension: json_['dimension'] as core.String?, + sliceSpec: json_.containsKey('sliceSpec') + ? GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpec + .fromJson( + json_['sliceSpec'] as core.Map) + : null, + value: json_['value'] as core.String?, ); core.Map toJson() => { - if (metricId != null) 'metricId': metricId!, + if (dimension != null) 'dimension': dimension!, + if (sliceSpec != null) 'sliceSpec': sliceSpec!, if (value != null) 'value': value!, }; } -/// Request message for ModelService.MergeVersionAliases. -class GoogleCloudAiplatformV1MergeVersionAliasesRequest { - /// The set of version aliases to merge. - /// - /// The alias should be at most 128 characters, and match - /// `a-z{0,126}[a-z-0-9]`. Add the `-` prefix to an alias means removing that - /// alias from the version. `-` is NOT counted in the 128 characters. Example: - /// `-golden` means removing the `golden` alias from the version. There is NO - /// ordering in aliases, which means 1) The aliases returned from GetModel API - /// might not have the exactly same order from this MergeVersionAliases API. - /// 2) Adding and deleting the same alias in the request is not recommended, - /// and the 2 operations will be cancelled out. +/// Specification for how the data should be sliced. +class GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpec { + /// Mapping configuration for this SliceSpec. /// - /// Required. - core.List? versionAliases; + /// The key is the name of the feature. By default, the key will be prefixed + /// by "instance" as a dictionary prefix for Vertex Batch Predictions output + /// format. + core.Map? + configs; - GoogleCloudAiplatformV1MergeVersionAliasesRequest({ - this.versionAliases, + GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpec({ + this.configs, }); - GoogleCloudAiplatformV1MergeVersionAliasesRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpec.fromJson( + core.Map json_) : this( - versionAliases: (json_['versionAliases'] as core.List?) - ?.map((value) => value as core.String) - .toList(), + configs: + (json_['configs'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecSliceConfig + .fromJson(value as core.Map), + ), + ), ); core.Map toJson() => { - if (versionAliases != null) 'versionAliases': versionAliases!, + if (configs != null) 'configs': configs!, }; } -/// Instance of a general MetadataSchema. -class GoogleCloudAiplatformV1MetadataSchema { - /// Timestamp when this MetadataSchema was created. - /// - /// Output only. - core.String? createTime; - - /// Description of the Metadata Schema - core.String? description; - - /// The resource name of the MetadataSchema. - /// - /// Output only. - core.String? name; - - /// The raw YAML string representation of the MetadataSchema. - /// - /// The combination of \[MetadataSchema.version\] and the schema name given by - /// `title` in \[MetadataSchema.schema\] must be unique within a - /// MetadataStore. The schema is defined as an OpenAPI 3.0.2 - /// [MetadataSchema Object](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#schemaObject) - /// - /// Required. - core.String? schema; - - /// The type of the MetadataSchema. - /// - /// This is a property that identifies which metadata types will use the - /// MetadataSchema. - /// Possible string values are: - /// - "METADATA_SCHEMA_TYPE_UNSPECIFIED" : Unspecified type for the - /// MetadataSchema. - /// - "ARTIFACT_TYPE" : A type indicating that the MetadataSchema will be used - /// by Artifacts. - /// - "EXECUTION_TYPE" : A typee indicating that the MetadataSchema will be - /// used by Executions. - /// - "CONTEXT_TYPE" : A state indicating that the MetadataSchema will be used - /// by Contexts. - core.String? schemaType; +/// A range of values for slice(s). +/// +/// `low` is inclusive, `high` is exclusive. +class GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecRange { + /// Exclusive high value for the range. + core.double? high; - /// The version of the MetadataSchema. - /// - /// The version's format must match the following regular expression: - /// `^[0-9]+.+.+$`, which would allow to order/compare different versions. - /// Example: 1.0.0, 1.0.1, etc. - core.String? schemaVersion; + /// Inclusive low value for the range. + core.double? low; - GoogleCloudAiplatformV1MetadataSchema({ - this.createTime, - this.description, - this.name, - this.schema, - this.schemaType, - this.schemaVersion, + GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecRange({ + this.high, + this.low, }); - GoogleCloudAiplatformV1MetadataSchema.fromJson(core.Map json_) - : this( - createTime: json_['createTime'] as core.String?, - description: json_['description'] as core.String?, - name: json_['name'] as core.String?, - schema: json_['schema'] as core.String?, - schemaType: json_['schemaType'] as core.String?, - schemaVersion: json_['schemaVersion'] as core.String?, - ); - - core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (description != null) 'description': description!, - if (name != null) 'name': name!, - if (schema != null) 'schema': schema!, - if (schemaType != null) 'schemaType': schemaType!, - if (schemaVersion != null) 'schemaVersion': schemaVersion!, - }; -} - -/// Instance of a metadata store. -/// -/// Contains a set of metadata that can be queried. -class GoogleCloudAiplatformV1MetadataStore { - /// Timestamp when this MetadataStore was created. - /// - /// Output only. - core.String? createTime; - - /// Dataplex integration settings. - /// - /// Optional. - GoogleCloudAiplatformV1MetadataStoreDataplexConfig? dataplexConfig; - - /// Description of the MetadataStore. - core.String? description; + GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecRange.fromJson( + core.Map json_) + : this( + high: (json_['high'] as core.num?)?.toDouble(), + low: (json_['low'] as core.num?)?.toDouble(), + ); - /// Customer-managed encryption key spec for a Metadata Store. - /// - /// If set, this Metadata Store and all sub-resources of this Metadata Store - /// are secured using this key. - GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; + core.Map toJson() => { + if (high != null) 'high': high!, + if (low != null) 'low': low!, + }; +} - /// The resource name of the MetadataStore instance. +/// Specification message containing the config for this SliceSpec. +/// +/// When `kind` is selected as `value` and/or `range`, only a single slice will +/// be computed. When `all_values` is present, a separate slice will be computed +/// for each possible label/value for the corresponding key in `config`. +/// Examples, with feature zip_code with values 12345, 23334, 88888 and feature +/// country with values "US", "Canada", "Mexico" in the dataset: Example 1: { +/// "zip_code": { "value": { "float_value": 12345.0 } } } A single slice for any +/// data with zip_code 12345 in the dataset. Example 2: { "zip_code": { "range": +/// { "low": 12345, "high": 20000 } } } A single slice containing data where the +/// zip_codes between 12345 and 20000 For this example, data with the zip_code +/// of 12345 will be in this slice. Example 3: { "zip_code": { "range": { "low": +/// 10000, "high": 20000 } }, "country": { "value": { "string_value": "US" } } } +/// A single slice containing data where the zip_codes between 10000 and 20000 +/// has the country "US". For this example, data with the zip_code of 12345 and +/// country "US" will be in this slice. Example 4: { "country": {"all_values": { +/// "value": true } } } Three slices are computed, one for each unique country +/// in the dataset. Example 5: { "country": { "all_values": { "value": true } }, +/// "zip_code": { "value": { "float_value": 12345.0 } } } Three slices are +/// computed, one for each unique country in the dataset where the zip_code is +/// also 12345. For this example, data with zip_code 12345 and country "US" will +/// be in one slice, zip_code 12345 and country "Canada" in another slice, and +/// zip_code 12345 and country "Mexico" in another slice, totaling 3 slices. +class GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecSliceConfig { + /// If all_values is set to true, then all possible labels of the keyed + /// feature will have another slice computed. /// - /// Output only. - core.String? name; + /// Example: `{"all_values":{"value":true}}` + core.bool? allValues; - /// State information of the MetadataStore. + /// A range of values for a numerical feature. /// - /// Output only. - GoogleCloudAiplatformV1MetadataStoreMetadataStoreState? state; + /// Example: `{"range":{"low":10000.0,"high":50000.0}}` will capture 12345 and + /// 23334 in the slice. + GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecRange? range; - /// Timestamp when this MetadataStore was last updated. + /// A unique specific value for a given feature. /// - /// Output only. - core.String? updateTime; + /// Example: `{ "value": { "string_value": "12345" } }` + GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecValue? value; - GoogleCloudAiplatformV1MetadataStore({ - this.createTime, - this.dataplexConfig, - this.description, - this.encryptionSpec, - this.name, - this.state, - this.updateTime, + GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecSliceConfig({ + this.allValues, + this.range, + this.value, }); - GoogleCloudAiplatformV1MetadataStore.fromJson(core.Map json_) + GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecSliceConfig.fromJson( + core.Map json_) : this( - createTime: json_['createTime'] as core.String?, - dataplexConfig: json_.containsKey('dataplexConfig') - ? GoogleCloudAiplatformV1MetadataStoreDataplexConfig.fromJson( - json_['dataplexConfig'] - as core.Map) - : null, - description: json_['description'] as core.String?, - encryptionSpec: json_.containsKey('encryptionSpec') - ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( - json_['encryptionSpec'] - as core.Map) + allValues: json_['allValues'] as core.bool?, + range: json_.containsKey('range') + ? GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecRange + .fromJson( + json_['range'] as core.Map) : null, - name: json_['name'] as core.String?, - state: json_.containsKey('state') - ? GoogleCloudAiplatformV1MetadataStoreMetadataStoreState.fromJson( - json_['state'] as core.Map) + value: json_.containsKey('value') + ? GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecValue + .fromJson( + json_['value'] as core.Map) : null, - updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (dataplexConfig != null) 'dataplexConfig': dataplexConfig!, - if (description != null) 'description': description!, - if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, - if (name != null) 'name': name!, - if (state != null) 'state': state!, - if (updateTime != null) 'updateTime': updateTime!, + if (allValues != null) 'allValues': allValues!, + if (range != null) 'range': range!, + if (value != null) 'value': value!, }; } -/// Represents Dataplex integration settings. -class GoogleCloudAiplatformV1MetadataStoreDataplexConfig { - /// Whether or not Data Lineage synchronization is enabled for Vertex - /// Pipelines. - /// - /// Optional. - core.bool? enabledPipelinesLineage; +/// Single value that supports strings and floats. +class GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecValue { + /// Float type. + core.double? floatValue; - GoogleCloudAiplatformV1MetadataStoreDataplexConfig({ - this.enabledPipelinesLineage, + /// String type. + core.String? stringValue; + + GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecValue({ + this.floatValue, + this.stringValue, }); - GoogleCloudAiplatformV1MetadataStoreDataplexConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecValue.fromJson( + core.Map json_) : this( - enabledPipelinesLineage: - json_['enabledPipelinesLineage'] as core.bool?, + floatValue: (json_['floatValue'] as core.num?)?.toDouble(), + stringValue: json_['stringValue'] as core.String?, ); core.Map toJson() => { - if (enabledPipelinesLineage != null) - 'enabledPipelinesLineage': enabledPipelinesLineage!, + if (floatValue != null) 'floatValue': floatValue!, + if (stringValue != null) 'stringValue': stringValue!, }; } -/// Represents state information for a MetadataStore. -class GoogleCloudAiplatformV1MetadataStoreMetadataStoreState { - /// The disk utilization of the MetadataStore in bytes. - core.String? diskUtilizationBytes; +/// Aggregated explanation metrics for a Model over a set of instances. +class GoogleCloudAiplatformV1ModelExplanation { + /// Aggregated attributions explaining the Model's prediction outputs over the + /// set of instances. + /// + /// The attributions are grouped by outputs. For Models that predict only one + /// output, such as regression Models that predict only one score, there is + /// only one attibution that explains the predicted output. For Models that + /// predict multiple outputs, such as multiclass Models that predict multiple + /// classes, each element explains one specific item. Attribution.output_index + /// can be used to identify which output this attribution is explaining. The + /// baselineOutputValue, instanceOutputValue and featureAttributions fields + /// are averaged over the test data. NOTE: Currently AutoML tabular + /// classification Models produce only one attribution, which averages + /// attributions over all the classes it predicts. + /// Attribution.approximation_error is not populated. + /// + /// Output only. + core.List? meanAttributions; - GoogleCloudAiplatformV1MetadataStoreMetadataStoreState({ - this.diskUtilizationBytes, + GoogleCloudAiplatformV1ModelExplanation({ + this.meanAttributions, }); - GoogleCloudAiplatformV1MetadataStoreMetadataStoreState.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ModelExplanation.fromJson(core.Map json_) : this( - diskUtilizationBytes: json_['diskUtilizationBytes'] as core.String?, + meanAttributions: (json_['meanAttributions'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Attribution.fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (diskUtilizationBytes != null) - 'diskUtilizationBytes': diskUtilizationBytes!, + if (meanAttributions != null) 'meanAttributions': meanAttributions!, }; } -/// Represents one resource that exists in automl.googleapis.com, -/// datalabeling.googleapis.com or ml.googleapis.com. -class GoogleCloudAiplatformV1MigratableResource { - /// Represents one Dataset in automl.googleapis.com. +/// Represents export format supported by the Model. +/// +/// All formats export to Google Cloud Storage. +class GoogleCloudAiplatformV1ModelExportFormat { + /// The content of this Model that may be exported. /// /// Output only. - GoogleCloudAiplatformV1MigratableResourceAutomlDataset? automlDataset; + core.List? exportableContents; - /// Represents one Model in automl.googleapis.com. + /// The ID of the export format. /// - /// Output only. - GoogleCloudAiplatformV1MigratableResourceAutomlModel? automlModel; - - /// Represents one Dataset in datalabeling.googleapis.com. + /// The possible format IDs are: * `tflite` Used for Android mobile devices. * + /// `edgetpu-tflite` Used for [Edge TPU](https://cloud.google.com/edge-tpu/) + /// devices. * `tf-saved-model` A tensorflow model in SavedModel format. * + /// `tf-js` A [TensorFlow.js](https://www.tensorflow.org/js) model that can be + /// used in the browser and in Node.js using JavaScript. * `core-ml` Used for + /// iOS mobile devices. * `custom-trained` A Model that was uploaded or + /// trained by custom code. /// /// Output only. - GoogleCloudAiplatformV1MigratableResourceDataLabelingDataset? - dataLabelingDataset; + core.String? id; - /// Timestamp when the last migration attempt on this MigratableResource - /// started. - /// - /// Will not be set if there's no migration attempt on this - /// MigratableResource. - /// - /// Output only. - core.String? lastMigrateTime; + GoogleCloudAiplatformV1ModelExportFormat({ + this.exportableContents, + this.id, + }); - /// Timestamp when this MigratableResource was last updated. - /// - /// Output only. - core.String? lastUpdateTime; + GoogleCloudAiplatformV1ModelExportFormat.fromJson(core.Map json_) + : this( + exportableContents: (json_['exportableContents'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + id: json_['id'] as core.String?, + ); - /// Represents one Version in ml.googleapis.com. + core.Map toJson() => { + if (exportableContents != null) + 'exportableContents': exportableContents!, + if (id != null) 'id': id!, + }; +} + +/// Contains information about the source of the models generated from Model +/// Garden. +class GoogleCloudAiplatformV1ModelGardenSource { + /// The model garden source model resource name. /// - /// Output only. - GoogleCloudAiplatformV1MigratableResourceMlEngineModelVersion? - mlEngineModelVersion; + /// Required. + core.String? publicModelName; - GoogleCloudAiplatformV1MigratableResource({ - this.automlDataset, - this.automlModel, - this.dataLabelingDataset, - this.lastMigrateTime, - this.lastUpdateTime, - this.mlEngineModelVersion, + GoogleCloudAiplatformV1ModelGardenSource({ + this.publicModelName, }); - GoogleCloudAiplatformV1MigratableResource.fromJson(core.Map json_) + GoogleCloudAiplatformV1ModelGardenSource.fromJson(core.Map json_) : this( - automlDataset: json_.containsKey('automlDataset') - ? GoogleCloudAiplatformV1MigratableResourceAutomlDataset.fromJson( - json_['automlDataset'] as core.Map) - : null, - automlModel: json_.containsKey('automlModel') - ? GoogleCloudAiplatformV1MigratableResourceAutomlModel.fromJson( - json_['automlModel'] as core.Map) - : null, - dataLabelingDataset: json_.containsKey('dataLabelingDataset') - ? GoogleCloudAiplatformV1MigratableResourceDataLabelingDataset - .fromJson(json_['dataLabelingDataset'] - as core.Map) - : null, - lastMigrateTime: json_['lastMigrateTime'] as core.String?, - lastUpdateTime: json_['lastUpdateTime'] as core.String?, - mlEngineModelVersion: json_.containsKey('mlEngineModelVersion') - ? GoogleCloudAiplatformV1MigratableResourceMlEngineModelVersion - .fromJson(json_['mlEngineModelVersion'] - as core.Map) - : null, + publicModelName: json_['publicModelName'] as core.String?, ); core.Map toJson() => { - if (automlDataset != null) 'automlDataset': automlDataset!, - if (automlModel != null) 'automlModel': automlModel!, - if (dataLabelingDataset != null) - 'dataLabelingDataset': dataLabelingDataset!, - if (lastMigrateTime != null) 'lastMigrateTime': lastMigrateTime!, - if (lastUpdateTime != null) 'lastUpdateTime': lastUpdateTime!, - if (mlEngineModelVersion != null) - 'mlEngineModelVersion': mlEngineModelVersion!, + if (publicModelName != null) 'publicModelName': publicModelName!, }; } -/// Represents one Dataset in automl.googleapis.com. -class GoogleCloudAiplatformV1MigratableResourceAutomlDataset { - /// Full resource name of automl Dataset. +/// The alert config for model monitoring. +class GoogleCloudAiplatformV1ModelMonitoringAlertConfig { + /// Email alert config. + GoogleCloudAiplatformV1ModelMonitoringAlertConfigEmailAlertConfig? + emailAlertConfig; + + /// Dump the anomalies to Cloud Logging. /// - /// Format: `projects/{project}/locations/{location}/datasets/{dataset}`. - core.String? dataset; + /// The anomalies will be put to json payload encoded from proto + /// ModelMonitoringStatsAnomalies. This can be further synced to Pub/Sub or + /// any other services supported by Cloud Logging. + core.bool? enableLogging; - /// The Dataset's display name in automl.googleapis.com. - core.String? datasetDisplayName; + /// Resource names of the NotificationChannels to send alert. + /// + /// Must be of the format `projects//notificationChannels/` + core.List? notificationChannels; - GoogleCloudAiplatformV1MigratableResourceAutomlDataset({ - this.dataset, - this.datasetDisplayName, + GoogleCloudAiplatformV1ModelMonitoringAlertConfig({ + this.emailAlertConfig, + this.enableLogging, + this.notificationChannels, }); - GoogleCloudAiplatformV1MigratableResourceAutomlDataset.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ModelMonitoringAlertConfig.fromJson(core.Map json_) : this( - dataset: json_['dataset'] as core.String?, - datasetDisplayName: json_['datasetDisplayName'] as core.String?, + emailAlertConfig: json_.containsKey('emailAlertConfig') + ? GoogleCloudAiplatformV1ModelMonitoringAlertConfigEmailAlertConfig + .fromJson(json_['emailAlertConfig'] + as core.Map) + : null, + enableLogging: json_['enableLogging'] as core.bool?, + notificationChannels: (json_['notificationChannels'] as core.List?) + ?.map((value) => value as core.String) + .toList(), ); core.Map toJson() => { - if (dataset != null) 'dataset': dataset!, - if (datasetDisplayName != null) - 'datasetDisplayName': datasetDisplayName!, + if (emailAlertConfig != null) 'emailAlertConfig': emailAlertConfig!, + if (enableLogging != null) 'enableLogging': enableLogging!, + if (notificationChannels != null) + 'notificationChannels': notificationChannels!, }; } -/// Represents one Model in automl.googleapis.com. -class GoogleCloudAiplatformV1MigratableResourceAutomlModel { - /// Full resource name of automl Model. - /// - /// Format: `projects/{project}/locations/{location}/models/{model}`. - core.String? model; - - /// The Model's display name in automl.googleapis.com. - core.String? modelDisplayName; +/// The config for email alert. +class GoogleCloudAiplatformV1ModelMonitoringAlertConfigEmailAlertConfig { + /// The email addresses to send the alert. + core.List? userEmails; - GoogleCloudAiplatformV1MigratableResourceAutomlModel({ - this.model, - this.modelDisplayName, + GoogleCloudAiplatformV1ModelMonitoringAlertConfigEmailAlertConfig({ + this.userEmails, }); - GoogleCloudAiplatformV1MigratableResourceAutomlModel.fromJson(core.Map json_) + GoogleCloudAiplatformV1ModelMonitoringAlertConfigEmailAlertConfig.fromJson( + core.Map json_) : this( - model: json_['model'] as core.String?, - modelDisplayName: json_['modelDisplayName'] as core.String?, + userEmails: (json_['userEmails'] as core.List?) + ?.map((value) => value as core.String) + .toList(), ); core.Map toJson() => { - if (model != null) 'model': model!, - if (modelDisplayName != null) 'modelDisplayName': modelDisplayName!, + if (userEmails != null) 'userEmails': userEmails!, }; } -/// Represents one Dataset in datalabeling.googleapis.com. -class GoogleCloudAiplatformV1MigratableResourceDataLabelingDataset { - /// The migratable AnnotatedDataset in datalabeling.googleapis.com belongs to - /// the data labeling Dataset. - core.List< - GoogleCloudAiplatformV1MigratableResourceDataLabelingDatasetDataLabelingAnnotatedDataset>? - dataLabelingAnnotatedDatasets; +/// The objective configuration for model monitoring, including the information +/// needed to detect anomalies for one particular model. +class GoogleCloudAiplatformV1ModelMonitoringObjectiveConfig { + /// The config for integrating with Vertex Explainable AI. + GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigExplanationConfig? + explanationConfig; - /// Full resource name of data labeling Dataset. + /// The config for drift of prediction data. + GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigPredictionDriftDetectionConfig? + predictionDriftDetectionConfig; + + /// Training dataset for models. /// - /// Format: `projects/{project}/datasets/{dataset}`. - core.String? dataset; + /// This field has to be set only if TrainingPredictionSkewDetectionConfig is + /// specified. + GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingDataset? + trainingDataset; - /// The Dataset's display name in datalabeling.googleapis.com. - core.String? datasetDisplayName; + /// The config for skew between training data and prediction data. + GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingPredictionSkewDetectionConfig? + trainingPredictionSkewDetectionConfig; - GoogleCloudAiplatformV1MigratableResourceDataLabelingDataset({ - this.dataLabelingAnnotatedDatasets, - this.dataset, - this.datasetDisplayName, + GoogleCloudAiplatformV1ModelMonitoringObjectiveConfig({ + this.explanationConfig, + this.predictionDriftDetectionConfig, + this.trainingDataset, + this.trainingPredictionSkewDetectionConfig, }); - GoogleCloudAiplatformV1MigratableResourceDataLabelingDataset.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ModelMonitoringObjectiveConfig.fromJson(core.Map json_) : this( - dataLabelingAnnotatedDatasets: (json_['dataLabelingAnnotatedDatasets'] - as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1MigratableResourceDataLabelingDatasetDataLabelingAnnotatedDataset - .fromJson(value as core.Map)) - .toList(), - dataset: json_['dataset'] as core.String?, - datasetDisplayName: json_['datasetDisplayName'] as core.String?, + explanationConfig: json_.containsKey('explanationConfig') + ? GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigExplanationConfig + .fromJson(json_['explanationConfig'] + as core.Map) + : null, + predictionDriftDetectionConfig: json_ + .containsKey('predictionDriftDetectionConfig') + ? GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigPredictionDriftDetectionConfig + .fromJson(json_['predictionDriftDetectionConfig'] + as core.Map) + : null, + trainingDataset: json_.containsKey('trainingDataset') + ? GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingDataset + .fromJson(json_['trainingDataset'] + as core.Map) + : null, + trainingPredictionSkewDetectionConfig: json_ + .containsKey('trainingPredictionSkewDetectionConfig') + ? GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingPredictionSkewDetectionConfig + .fromJson(json_['trainingPredictionSkewDetectionConfig'] + as core.Map) + : null, ); core.Map toJson() => { - if (dataLabelingAnnotatedDatasets != null) - 'dataLabelingAnnotatedDatasets': dataLabelingAnnotatedDatasets!, - if (dataset != null) 'dataset': dataset!, - if (datasetDisplayName != null) - 'datasetDisplayName': datasetDisplayName!, + if (explanationConfig != null) 'explanationConfig': explanationConfig!, + if (predictionDriftDetectionConfig != null) + 'predictionDriftDetectionConfig': predictionDriftDetectionConfig!, + if (trainingDataset != null) 'trainingDataset': trainingDataset!, + if (trainingPredictionSkewDetectionConfig != null) + 'trainingPredictionSkewDetectionConfig': + trainingPredictionSkewDetectionConfig!, }; } -/// Represents one AnnotatedDataset in datalabeling.googleapis.com. -class GoogleCloudAiplatformV1MigratableResourceDataLabelingDatasetDataLabelingAnnotatedDataset { - /// Full resource name of data labeling AnnotatedDataset. +/// The config for integrating with Vertex Explainable AI. +/// +/// Only applicable if the Model has explanation_spec populated. +class GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigExplanationConfig { + /// If want to analyze the Vertex Explainable AI feature attribute scores or + /// not. /// - /// Format: - /// `projects/{project}/datasets/{dataset}/annotatedDatasets/{annotated_dataset}`. - core.String? annotatedDataset; + /// If set to true, Vertex AI will log the feature attributions from explain + /// response and do the skew/drift detection for them. + core.bool? enableFeatureAttributes; - /// The AnnotatedDataset's display name in datalabeling.googleapis.com. - core.String? annotatedDatasetDisplayName; + /// Predictions generated by the BatchPredictionJob using baseline dataset. + GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigExplanationConfigExplanationBaseline? + explanationBaseline; - GoogleCloudAiplatformV1MigratableResourceDataLabelingDatasetDataLabelingAnnotatedDataset({ - this.annotatedDataset, - this.annotatedDatasetDisplayName, + GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigExplanationConfig({ + this.enableFeatureAttributes, + this.explanationBaseline, }); - GoogleCloudAiplatformV1MigratableResourceDataLabelingDatasetDataLabelingAnnotatedDataset.fromJson( + GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigExplanationConfig.fromJson( core.Map json_) : this( - annotatedDataset: json_['annotatedDataset'] as core.String?, - annotatedDatasetDisplayName: - json_['annotatedDatasetDisplayName'] as core.String?, + enableFeatureAttributes: + json_['enableFeatureAttributes'] as core.bool?, + explanationBaseline: json_.containsKey('explanationBaseline') + ? GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigExplanationConfigExplanationBaseline + .fromJson(json_['explanationBaseline'] + as core.Map) + : null, ); core.Map toJson() => { - if (annotatedDataset != null) 'annotatedDataset': annotatedDataset!, - if (annotatedDatasetDisplayName != null) - 'annotatedDatasetDisplayName': annotatedDatasetDisplayName!, + if (enableFeatureAttributes != null) + 'enableFeatureAttributes': enableFeatureAttributes!, + if (explanationBaseline != null) + 'explanationBaseline': explanationBaseline!, }; } -/// Represents one model Version in ml.googleapis.com. -class GoogleCloudAiplatformV1MigratableResourceMlEngineModelVersion { - /// The ml.googleapis.com endpoint that this model Version currently lives in. - /// - /// Example values: * ml.googleapis.com * us-centrall-ml.googleapis.com * - /// europe-west4-ml.googleapis.com * asia-east1-ml.googleapis.com - core.String? endpoint; +/// Output from BatchPredictionJob for Model Monitoring baseline dataset, which +/// can be used to generate baseline attribution scores. +class GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigExplanationConfigExplanationBaseline { + /// BigQuery location for BatchExplain output. + GoogleCloudAiplatformV1BigQueryDestination? bigquery; - /// Full resource name of ml engine model Version. - /// - /// Format: `projects/{project}/models/{model}/versions/{version}`. - core.String? version; + /// Cloud Storage location for BatchExplain output. + GoogleCloudAiplatformV1GcsDestination? gcs; - GoogleCloudAiplatformV1MigratableResourceMlEngineModelVersion({ - this.endpoint, - this.version, + /// The storage format of the predictions generated BatchPrediction job. + /// Possible string values are: + /// - "PREDICTION_FORMAT_UNSPECIFIED" : Should not be set. + /// - "JSONL" : Predictions are in JSONL files. + /// - "BIGQUERY" : Predictions are in BigQuery. + core.String? predictionFormat; + + GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigExplanationConfigExplanationBaseline({ + this.bigquery, + this.gcs, + this.predictionFormat, }); - GoogleCloudAiplatformV1MigratableResourceMlEngineModelVersion.fromJson( + GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigExplanationConfigExplanationBaseline.fromJson( core.Map json_) : this( - endpoint: json_['endpoint'] as core.String?, - version: json_['version'] as core.String?, + bigquery: json_.containsKey('bigquery') + ? GoogleCloudAiplatformV1BigQueryDestination.fromJson( + json_['bigquery'] as core.Map) + : null, + gcs: json_.containsKey('gcs') + ? GoogleCloudAiplatformV1GcsDestination.fromJson( + json_['gcs'] as core.Map) + : null, + predictionFormat: json_['predictionFormat'] as core.String?, ); core.Map toJson() => { - if (endpoint != null) 'endpoint': endpoint!, - if (version != null) 'version': version!, + if (bigquery != null) 'bigquery': bigquery!, + if (gcs != null) 'gcs': gcs!, + if (predictionFormat != null) 'predictionFormat': predictionFormat!, }; } -/// Config of migrating one resource from automl.googleapis.com, -/// datalabeling.googleapis.com and ml.googleapis.com to Vertex AI. -class GoogleCloudAiplatformV1MigrateResourceRequest { - /// Config for migrating Dataset in automl.googleapis.com to Vertex AI's - /// Dataset. - GoogleCloudAiplatformV1MigrateResourceRequestMigrateAutomlDatasetConfig? - migrateAutomlDatasetConfig; - - /// Config for migrating Model in automl.googleapis.com to Vertex AI's Model. - GoogleCloudAiplatformV1MigrateResourceRequestMigrateAutomlModelConfig? - migrateAutomlModelConfig; +/// The config for Prediction data drift detection. +class GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigPredictionDriftDetectionConfig { + /// Key is the feature name and value is the threshold. + /// + /// The threshold here is against attribution score distance between different + /// time windows. + core.Map? + attributionScoreDriftThresholds; - /// Config for migrating Dataset in datalabeling.googleapis.com to Vertex AI's - /// Dataset. - GoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfig? - migrateDataLabelingDatasetConfig; + /// Drift anomaly detection threshold used by all features. + /// + /// When the per-feature thresholds are not set, this field can be used to + /// specify a threshold for all features. + GoogleCloudAiplatformV1ThresholdConfig? defaultDriftThreshold; - /// Config for migrating Version in ml.googleapis.com to Vertex AI's Model. - GoogleCloudAiplatformV1MigrateResourceRequestMigrateMlEngineModelVersionConfig? - migrateMlEngineModelVersionConfig; + /// Key is the feature name and value is the threshold. + /// + /// If a feature needs to be monitored for drift, a value threshold must be + /// configured for that feature. The threshold here is against feature + /// distribution distance between different time windws. + core.Map? + driftThresholds; - GoogleCloudAiplatformV1MigrateResourceRequest({ - this.migrateAutomlDatasetConfig, - this.migrateAutomlModelConfig, - this.migrateDataLabelingDatasetConfig, - this.migrateMlEngineModelVersionConfig, + GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigPredictionDriftDetectionConfig({ + this.attributionScoreDriftThresholds, + this.defaultDriftThreshold, + this.driftThresholds, }); - GoogleCloudAiplatformV1MigrateResourceRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigPredictionDriftDetectionConfig.fromJson( + core.Map json_) : this( - migrateAutomlDatasetConfig: json_ - .containsKey('migrateAutomlDatasetConfig') - ? GoogleCloudAiplatformV1MigrateResourceRequestMigrateAutomlDatasetConfig - .fromJson(json_['migrateAutomlDatasetConfig'] - as core.Map) - : null, - migrateAutomlModelConfig: json_ - .containsKey('migrateAutomlModelConfig') - ? GoogleCloudAiplatformV1MigrateResourceRequestMigrateAutomlModelConfig - .fromJson(json_['migrateAutomlModelConfig'] - as core.Map) - : null, - migrateDataLabelingDatasetConfig: json_ - .containsKey('migrateDataLabelingDatasetConfig') - ? GoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfig - .fromJson(json_['migrateDataLabelingDatasetConfig'] - as core.Map) - : null, - migrateMlEngineModelVersionConfig: json_ - .containsKey('migrateMlEngineModelVersionConfig') - ? GoogleCloudAiplatformV1MigrateResourceRequestMigrateMlEngineModelVersionConfig - .fromJson(json_['migrateMlEngineModelVersionConfig'] + attributionScoreDriftThresholds: + (json_['attributionScoreDriftThresholds'] + as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + GoogleCloudAiplatformV1ThresholdConfig.fromJson( + value as core.Map), + ), + ), + defaultDriftThreshold: json_.containsKey('defaultDriftThreshold') + ? GoogleCloudAiplatformV1ThresholdConfig.fromJson( + json_['defaultDriftThreshold'] as core.Map) : null, + driftThresholds: + (json_['driftThresholds'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + GoogleCloudAiplatformV1ThresholdConfig.fromJson( + value as core.Map), + ), + ), ); core.Map toJson() => { - if (migrateAutomlDatasetConfig != null) - 'migrateAutomlDatasetConfig': migrateAutomlDatasetConfig!, - if (migrateAutomlModelConfig != null) - 'migrateAutomlModelConfig': migrateAutomlModelConfig!, - if (migrateDataLabelingDatasetConfig != null) - 'migrateDataLabelingDatasetConfig': migrateDataLabelingDatasetConfig!, - if (migrateMlEngineModelVersionConfig != null) - 'migrateMlEngineModelVersionConfig': - migrateMlEngineModelVersionConfig!, + if (attributionScoreDriftThresholds != null) + 'attributionScoreDriftThresholds': attributionScoreDriftThresholds!, + if (defaultDriftThreshold != null) + 'defaultDriftThreshold': defaultDriftThreshold!, + if (driftThresholds != null) 'driftThresholds': driftThresholds!, }; } -/// Config for migrating Dataset in automl.googleapis.com to Vertex AI's -/// Dataset. -class GoogleCloudAiplatformV1MigrateResourceRequestMigrateAutomlDatasetConfig { - /// Full resource name of automl Dataset. - /// - /// Format: `projects/{project}/locations/{location}/datasets/{dataset}`. +/// Training Dataset information. +class GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingDataset { + /// The BigQuery table of the unmanaged Dataset used to train this Model. + GoogleCloudAiplatformV1BigQuerySource? bigquerySource; + + /// Data format of the dataset, only applicable if the input is from Google + /// Cloud Storage. /// - /// Required. + /// The possible formats are: "tf-record" The source file is a TFRecord file. + /// "csv" The source file is a CSV file. "jsonl" The source file is a JSONL + /// file. + core.String? dataFormat; + + /// The resource name of the Dataset used to train this Model. core.String? dataset; - /// Display name of the Dataset in Vertex AI. + /// The Google Cloud Storage uri of the unmanaged Dataset used to train this + /// Model. + GoogleCloudAiplatformV1GcsSource? gcsSource; + + /// Strategy to sample data from Training Dataset. /// - /// System will pick a display name if unspecified. + /// If not set, we process the whole dataset. + GoogleCloudAiplatformV1SamplingStrategy? loggingSamplingStrategy; + + /// The target field name the model is to predict. /// - /// Required. - core.String? datasetDisplayName; + /// This field will be excluded when doing Predict and (or) Explain for the + /// training data. + core.String? targetField; - GoogleCloudAiplatformV1MigrateResourceRequestMigrateAutomlDatasetConfig({ + GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingDataset({ + this.bigquerySource, + this.dataFormat, this.dataset, - this.datasetDisplayName, + this.gcsSource, + this.loggingSamplingStrategy, + this.targetField, }); - GoogleCloudAiplatformV1MigrateResourceRequestMigrateAutomlDatasetConfig.fromJson( + GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingDataset.fromJson( core.Map json_) : this( + bigquerySource: json_.containsKey('bigquerySource') + ? GoogleCloudAiplatformV1BigQuerySource.fromJson( + json_['bigquerySource'] + as core.Map) + : null, + dataFormat: json_['dataFormat'] as core.String?, dataset: json_['dataset'] as core.String?, - datasetDisplayName: json_['datasetDisplayName'] as core.String?, + gcsSource: json_.containsKey('gcsSource') + ? GoogleCloudAiplatformV1GcsSource.fromJson( + json_['gcsSource'] as core.Map) + : null, + loggingSamplingStrategy: json_.containsKey('loggingSamplingStrategy') + ? GoogleCloudAiplatformV1SamplingStrategy.fromJson( + json_['loggingSamplingStrategy'] + as core.Map) + : null, + targetField: json_['targetField'] as core.String?, ); core.Map toJson() => { + if (bigquerySource != null) 'bigquerySource': bigquerySource!, + if (dataFormat != null) 'dataFormat': dataFormat!, if (dataset != null) 'dataset': dataset!, - if (datasetDisplayName != null) - 'datasetDisplayName': datasetDisplayName!, + if (gcsSource != null) 'gcsSource': gcsSource!, + if (loggingSamplingStrategy != null) + 'loggingSamplingStrategy': loggingSamplingStrategy!, + if (targetField != null) 'targetField': targetField!, }; } -/// Config for migrating Model in automl.googleapis.com to Vertex AI's Model. -class GoogleCloudAiplatformV1MigrateResourceRequestMigrateAutomlModelConfig { - /// Full resource name of automl Model. - /// - /// Format: `projects/{project}/locations/{location}/models/{model}`. +/// The config for Training & Prediction data skew detection. +/// +/// It specifies the training dataset sources and the skew detection parameters. +class GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingPredictionSkewDetectionConfig { + /// Key is the feature name and value is the threshold. /// - /// Required. - core.String? model; + /// The threshold here is against attribution score distance between the + /// training and prediction feature. + core.Map? + attributionScoreSkewThresholds; - /// Display name of the model in Vertex AI. + /// Skew anomaly detection threshold used by all features. /// - /// System will pick a display name if unspecified. + /// When the per-feature thresholds are not set, this field can be used to + /// specify a threshold for all features. + GoogleCloudAiplatformV1ThresholdConfig? defaultSkewThreshold; + + /// Key is the feature name and value is the threshold. /// - /// Optional. - core.String? modelDisplayName; + /// If a feature needs to be monitored for skew, a value threshold must be + /// configured for that feature. The threshold here is against feature + /// distribution distance between the training and prediction feature. + core.Map? skewThresholds; - GoogleCloudAiplatformV1MigrateResourceRequestMigrateAutomlModelConfig({ - this.model, - this.modelDisplayName, + GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingPredictionSkewDetectionConfig({ + this.attributionScoreSkewThresholds, + this.defaultSkewThreshold, + this.skewThresholds, }); - GoogleCloudAiplatformV1MigrateResourceRequestMigrateAutomlModelConfig.fromJson( + GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingPredictionSkewDetectionConfig.fromJson( core.Map json_) : this( - model: json_['model'] as core.String?, - modelDisplayName: json_['modelDisplayName'] as core.String?, + attributionScoreSkewThresholds: + (json_['attributionScoreSkewThresholds'] + as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + GoogleCloudAiplatformV1ThresholdConfig.fromJson( + value as core.Map), + ), + ), + defaultSkewThreshold: json_.containsKey('defaultSkewThreshold') + ? GoogleCloudAiplatformV1ThresholdConfig.fromJson( + json_['defaultSkewThreshold'] + as core.Map) + : null, + skewThresholds: + (json_['skewThresholds'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + GoogleCloudAiplatformV1ThresholdConfig.fromJson( + value as core.Map), + ), + ), ); core.Map toJson() => { - if (model != null) 'model': model!, - if (modelDisplayName != null) 'modelDisplayName': modelDisplayName!, + if (attributionScoreSkewThresholds != null) + 'attributionScoreSkewThresholds': attributionScoreSkewThresholds!, + if (defaultSkewThreshold != null) + 'defaultSkewThreshold': defaultSkewThreshold!, + if (skewThresholds != null) 'skewThresholds': skewThresholds!, }; } -/// Config for migrating Dataset in datalabeling.googleapis.com to Vertex AI's -/// Dataset. -class GoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfig { - /// Full resource name of data labeling Dataset. - /// - /// Format: `projects/{project}/datasets/{dataset}`. - /// - /// Required. - core.String? dataset; +/// Statistics and anomalies generated by Model Monitoring. +class GoogleCloudAiplatformV1ModelMonitoringStatsAnomalies { + /// Number of anomalies within all stats. + core.int? anomalyCount; - /// Display name of the Dataset in Vertex AI. - /// - /// System will pick a display name if unspecified. - /// - /// Optional. - core.String? datasetDisplayName; + /// Deployed Model ID. + core.String? deployedModelId; - /// Configs for migrating AnnotatedDataset in datalabeling.googleapis.com to - /// Vertex AI's SavedQuery. - /// - /// The specified AnnotatedDatasets have to belong to the datalabeling - /// Dataset. - /// - /// Optional. + /// A list of historical Stats and Anomalies generated for all Features. core.List< - GoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfigMigrateDataLabelingAnnotatedDatasetConfig>? - migrateDataLabelingAnnotatedDatasetConfigs; + GoogleCloudAiplatformV1ModelMonitoringStatsAnomaliesFeatureHistoricStatsAnomalies>? + featureStats; - GoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfig({ - this.dataset, - this.datasetDisplayName, - this.migrateDataLabelingAnnotatedDatasetConfigs, + /// Model Monitoring Objective those stats and anomalies belonging to. + /// Possible string values are: + /// - "MODEL_DEPLOYMENT_MONITORING_OBJECTIVE_TYPE_UNSPECIFIED" : Default + /// value, should not be set. + /// - "RAW_FEATURE_SKEW" : Raw feature values' stats to detect skew between + /// Training-Prediction datasets. + /// - "RAW_FEATURE_DRIFT" : Raw feature values' stats to detect drift between + /// Serving-Prediction datasets. + /// - "FEATURE_ATTRIBUTION_SKEW" : Feature attribution scores to detect skew + /// between Training-Prediction datasets. + /// - "FEATURE_ATTRIBUTION_DRIFT" : Feature attribution scores to detect skew + /// between Prediction datasets collected within different time windows. + core.String? objective; + + GoogleCloudAiplatformV1ModelMonitoringStatsAnomalies({ + this.anomalyCount, + this.deployedModelId, + this.featureStats, + this.objective, }); - GoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfig.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ModelMonitoringStatsAnomalies.fromJson(core.Map json_) : this( - dataset: json_['dataset'] as core.String?, - datasetDisplayName: json_['datasetDisplayName'] as core.String?, - migrateDataLabelingAnnotatedDatasetConfigs: (json_[ - 'migrateDataLabelingAnnotatedDatasetConfigs'] as core.List?) + anomalyCount: json_['anomalyCount'] as core.int?, + deployedModelId: json_['deployedModelId'] as core.String?, + featureStats: (json_['featureStats'] as core.List?) ?.map((value) => - GoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfigMigrateDataLabelingAnnotatedDatasetConfig + GoogleCloudAiplatformV1ModelMonitoringStatsAnomaliesFeatureHistoricStatsAnomalies .fromJson(value as core.Map)) .toList(), + objective: json_['objective'] as core.String?, ); core.Map toJson() => { - if (dataset != null) 'dataset': dataset!, - if (datasetDisplayName != null) - 'datasetDisplayName': datasetDisplayName!, - if (migrateDataLabelingAnnotatedDatasetConfigs != null) - 'migrateDataLabelingAnnotatedDatasetConfigs': - migrateDataLabelingAnnotatedDatasetConfigs!, + if (anomalyCount != null) 'anomalyCount': anomalyCount!, + if (deployedModelId != null) 'deployedModelId': deployedModelId!, + if (featureStats != null) 'featureStats': featureStats!, + if (objective != null) 'objective': objective!, }; } -/// Config for migrating AnnotatedDataset in datalabeling.googleapis.com to -/// Vertex AI's SavedQuery. -class GoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfigMigrateDataLabelingAnnotatedDatasetConfig { - /// Full resource name of data labeling AnnotatedDataset. - /// - /// Format: - /// `projects/{project}/datasets/{dataset}/annotatedDatasets/{annotated_dataset}`. - /// - /// Required. - core.String? annotatedDataset; +/// Historical Stats (and Anomalies) for a specific Feature. +class GoogleCloudAiplatformV1ModelMonitoringStatsAnomaliesFeatureHistoricStatsAnomalies { + /// Display Name of the Feature. + core.String? featureDisplayName; - GoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfigMigrateDataLabelingAnnotatedDatasetConfig({ - this.annotatedDataset, + /// A list of historical stats generated by different time window's Prediction + /// Dataset. + core.List? predictionStats; + + /// Threshold for anomaly detection. + GoogleCloudAiplatformV1ThresholdConfig? threshold; + + /// Stats calculated for the Training Dataset. + GoogleCloudAiplatformV1FeatureStatsAnomaly? trainingStats; + + GoogleCloudAiplatformV1ModelMonitoringStatsAnomaliesFeatureHistoricStatsAnomalies({ + this.featureDisplayName, + this.predictionStats, + this.threshold, + this.trainingStats, }); - GoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfigMigrateDataLabelingAnnotatedDatasetConfig.fromJson( + GoogleCloudAiplatformV1ModelMonitoringStatsAnomaliesFeatureHistoricStatsAnomalies.fromJson( core.Map json_) : this( - annotatedDataset: json_['annotatedDataset'] as core.String?, + featureDisplayName: json_['featureDisplayName'] as core.String?, + predictionStats: (json_['predictionStats'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1FeatureStatsAnomaly.fromJson( + value as core.Map)) + .toList(), + threshold: json_.containsKey('threshold') + ? GoogleCloudAiplatformV1ThresholdConfig.fromJson( + json_['threshold'] as core.Map) + : null, + trainingStats: json_.containsKey('trainingStats') + ? GoogleCloudAiplatformV1FeatureStatsAnomaly.fromJson( + json_['trainingStats'] as core.Map) + : null, ); core.Map toJson() => { - if (annotatedDataset != null) 'annotatedDataset': annotatedDataset!, + if (featureDisplayName != null) + 'featureDisplayName': featureDisplayName!, + if (predictionStats != null) 'predictionStats': predictionStats!, + if (threshold != null) 'threshold': threshold!, + if (trainingStats != null) 'trainingStats': trainingStats!, }; } -/// Config for migrating version in ml.googleapis.com to Vertex AI's Model. -class GoogleCloudAiplatformV1MigrateResourceRequestMigrateMlEngineModelVersionConfig { - /// The ml.googleapis.com endpoint that this model version should be migrated - /// from. - /// - /// Example values: * ml.googleapis.com * us-centrall-ml.googleapis.com * - /// europe-west4-ml.googleapis.com * asia-east1-ml.googleapis.com - /// - /// Required. - core.String? endpoint; - - /// Display name of the model in Vertex AI. - /// - /// System will pick a display name if unspecified. - /// - /// Required. - core.String? modelDisplayName; - - /// Full resource name of ml engine model version. +/// Contains information about the original Model if this Model is a copy. +class GoogleCloudAiplatformV1ModelOriginalModelInfo { + /// The resource name of the Model this Model is a copy of, including the + /// revision. /// - /// Format: `projects/{project}/models/{model}/versions/{version}`. + /// Format: + /// `projects/{project}/locations/{location}/models/{model_id}@{version_id}` /// - /// Required. - core.String? modelVersion; + /// Output only. + core.String? model; - GoogleCloudAiplatformV1MigrateResourceRequestMigrateMlEngineModelVersionConfig({ - this.endpoint, - this.modelDisplayName, - this.modelVersion, + GoogleCloudAiplatformV1ModelOriginalModelInfo({ + this.model, }); - GoogleCloudAiplatformV1MigrateResourceRequestMigrateMlEngineModelVersionConfig.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ModelOriginalModelInfo.fromJson(core.Map json_) : this( - endpoint: json_['endpoint'] as core.String?, - modelDisplayName: json_['modelDisplayName'] as core.String?, - modelVersion: json_['modelVersion'] as core.String?, + model: json_['model'] as core.String?, ); core.Map toJson() => { - if (endpoint != null) 'endpoint': endpoint!, - if (modelDisplayName != null) 'modelDisplayName': modelDisplayName!, - if (modelVersion != null) 'modelVersion': modelVersion!, + if (model != null) 'model': model!, }; } -/// A trained machine learning Model. -class GoogleCloudAiplatformV1Model { - /// The path to the directory containing the Model artifact and any of its - /// supporting files. +/// Detail description of the source information of the model. +typedef GoogleCloudAiplatformV1ModelSourceInfo = $ModelSourceInfo; + +/// Request message for EndpointService.MutateDeployedModel. +class GoogleCloudAiplatformV1MutateDeployedModelRequest { + /// The DeployedModel to be mutated within the Endpoint. /// - /// Not required for AutoML Models. + /// Only the following fields can be mutated: * `min_replica_count` in either + /// DedicatedResources or AutomaticResources * `max_replica_count` in either + /// DedicatedResources or AutomaticResources * autoscaling_metric_specs * + /// `disable_container_logging` (v1 only) * `enable_container_logging` + /// (v1beta1 only) /// - /// Immutable. - core.String? artifactUri; + /// Required. + GoogleCloudAiplatformV1DeployedModel? deployedModel; - /// User input field to specify the base model source. + /// The update mask applies to the resource. /// - /// Currently it only supports specifing the Model Garden models and Genie - /// models. + /// See google.protobuf.FieldMask. /// - /// Optional. - GoogleCloudAiplatformV1ModelBaseModelSource? baseModelSource; + /// Required. + core.String? updateMask; - /// Input only. - /// - /// The specification of the container that is to be used when deploying this - /// Model. The specification is ingested upon ModelService.UploadModel, and - /// all binaries it contains are copied and stored internally by Vertex AI. - /// Not required for AutoML Models. - GoogleCloudAiplatformV1ModelContainerSpec? containerSpec; + GoogleCloudAiplatformV1MutateDeployedModelRequest({ + this.deployedModel, + this.updateMask, + }); - /// Timestamp when this Model was uploaded into Vertex AI. - /// - /// Output only. - core.String? createTime; + GoogleCloudAiplatformV1MutateDeployedModelRequest.fromJson(core.Map json_) + : this( + deployedModel: json_.containsKey('deployedModel') + ? GoogleCloudAiplatformV1DeployedModel.fromJson( + json_['deployedModel'] as core.Map) + : null, + updateMask: json_['updateMask'] as core.String?, + ); - /// Stats of data used for training or evaluating the Model. - /// - /// Only populated when the Model is trained by a TrainingPipeline with - /// data_input_config. - GoogleCloudAiplatformV1ModelDataStats? dataStats; + core.Map toJson() => { + if (deployedModel != null) 'deployedModel': deployedModel!, + if (updateMask != null) 'updateMask': updateMask!, + }; +} - /// The pointers to DeployedModels created from this Model. - /// - /// Note that Model could have been deployed to Endpoints in different - /// Locations. +/// Represents a Neural Architecture Search (NAS) job. +class GoogleCloudAiplatformV1NasJob { + /// Time when the NasJob was created. /// /// Output only. - core.List? deployedModels; - - /// The description of the Model. - core.String? description; + core.String? createTime; - /// The display name of the Model. + /// The display name of the NasJob. /// /// The name can be up to 128 characters long and can consist of any UTF-8 /// characters. @@ -44494,30 +50837,34 @@ class GoogleCloudAiplatformV1Model { /// Required. core.String? displayName; - /// Customer-managed encryption key spec for a Model. + /// Enable a separation of Custom model training and restricted image training + /// for tenant project. /// - /// If set, this Model and all sub-resources of this Model will be secured by - /// this key. + /// Optional. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) + core.bool? enableRestrictedImageTraining; + + /// Customer-managed encryption key options for a NasJob. + /// + /// If this is set, then all resources created by the NasJob will be encrypted + /// with the provided encryption key. GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; - /// Used to perform consistent read-modify-write updates. + /// Time when the NasJob entered any of the following states: + /// `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`. /// - /// If not set, a blind "overwrite" update happens. - core.String? etag; + /// Output only. + core.String? endTime; - /// The default explanation specification for this Model. + /// Only populated when job's state is JOB_STATE_FAILED or + /// JOB_STATE_CANCELLED. /// - /// The Model can be used for requesting explanation after being deployed if - /// it is populated. The Model can be used for batch explanation if it is - /// populated. All fields of the explanation_spec can be overridden by - /// explanation_spec of DeployModelRequest.deployed_model, or explanation_spec - /// of BatchPredictionJob. If the default explanation specification is not set - /// for this Model, this Model can still be used for requesting explanation by - /// setting explanation_spec of DeployModelRequest.deployed_model and for - /// batch explanation by setting explanation_spec of BatchPredictionJob. - GoogleCloudAiplatformV1ExplanationSpec? explanationSpec; + /// Output only. + GoogleRpcStatus? error; - /// The labels with user-defined metadata to organize your Models. + /// The labels with user-defined metadata to organize NasJobs. /// /// Label keys and values can be no longer than 64 characters (Unicode /// codepoints), can only contain lowercase letters, numeric characters, @@ -44525,67 +50872,20 @@ class GoogleCloudAiplatformV1Model { /// https://goo.gl/xmQnxf for more information and examples of labels. core.Map? labels; - /// An additional information about the Model; the schema of the metadata can - /// be found in metadata_schema. - /// - /// Unset if the Model does not have any additional information. - /// - /// Immutable. - /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Object? metadata; - - /// The resource name of the Artifact that was created in MetadataStore when - /// creating the Model. - /// - /// The Artifact resource name pattern is - /// `projects/{project}/locations/{location}/metadataStores/{metadata_store}/artifacts/{artifact}`. - /// - /// Output only. - core.String? metadataArtifact; - - /// Points to a YAML file stored on Google Cloud Storage describing additional - /// information about the Model, that is specific to it. - /// - /// Unset if the Model does not have any additional information. The schema is - /// defined as an OpenAPI 3.0.2 - /// [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). - /// AutoML Models always have this field populated by Vertex AI, if no - /// additional metadata is needed, this field is set to an empty string. Note: - /// The URI given on output will be immutable and probably different, - /// including the URI scheme, than the one given on input. The output URI will - /// point to a location where the user only has a read access. - /// - /// Immutable. - core.String? metadataSchemaUri; - - /// Source of a model. - /// - /// It can either be automl training pipeline, custom training pipeline, - /// BigQuery ML, or saved and tuned from Genie or Model Garden. + /// Resource name of the NasJob. /// /// Output only. - GoogleCloudAiplatformV1ModelSourceInfo? modelSourceInfo; - - /// The resource name of the Model. core.String? name; - /// If this Model is a copy of another Model, this contains info about the - /// original. + /// Output of the NasJob. /// /// Output only. - GoogleCloudAiplatformV1ModelOriginalModelInfo? originalModelInfo; + GoogleCloudAiplatformV1NasJobOutput? nasJobOutput; - /// This field is populated if the model is produced by a pipeline job. + /// The specification of a NasJob. /// - /// Optional. - core.String? pipelineJob; - - /// The schemata that describe formats of the Model's predictions and - /// explanations as given and returned via PredictionService.Predict and - /// PredictionService.Explain. - GoogleCloudAiplatformV1PredictSchemata? predictSchemata; + /// Required. + GoogleCloudAiplatformV1NasJobSpec? nasJobSpec; /// Reserved for future use. /// @@ -44597,182 +50897,74 @@ class GoogleCloudAiplatformV1Model { /// Output only. core.bool? satisfiesPzs; - /// When this Model is deployed, its prediction resources are described by the - /// `prediction_resources` field of the Endpoint.deployed_models object. - /// - /// Because not all Models support all resource configuration types, the - /// configuration types this Model supports are listed here. If no - /// configuration types are listed, the Model cannot be deployed to an - /// Endpoint and does not support online predictions - /// (PredictionService.Predict or PredictionService.Explain). Such a Model can - /// serve predictions by using a BatchPredictionJob, if it has at least one - /// entry each in supported_input_storage_formats and - /// supported_output_storage_formats. - /// - /// Output only. - core.List? supportedDeploymentResourcesTypes; - - /// The formats in which this Model may be exported. - /// - /// If empty, this Model is not available for export. - /// - /// Output only. - core.List? supportedExportFormats; - - /// The formats this Model supports in BatchPredictionJob.input_config. - /// - /// If PredictSchemata.instance_schema_uri exists, the instances should be - /// given as per that schema. The possible formats are: * `jsonl` The JSON - /// Lines format, where each instance is a single line. Uses GcsSource. * - /// `csv` The CSV format, where each instance is a single comma-separated - /// line. The first line in the file is the header, containing comma-separated - /// field names. Uses GcsSource. * `tf-record` The TFRecord format, where each - /// instance is a single record in tfrecord syntax. Uses GcsSource. * - /// `tf-record-gzip` Similar to `tf-record`, but the file is gzipped. Uses - /// GcsSource. * `bigquery` Each instance is a single row in BigQuery. Uses - /// BigQuerySource. * `file-list` Each line of the file is the location of an - /// instance to process, uses `gcs_source` field of the InputConfig object. If - /// this Model doesn't support any of these formats it means it cannot be used - /// with a BatchPredictionJob. However, if it has - /// supported_deployment_resources_types, it could serve online predictions by - /// using PredictionService.Predict or PredictionService.Explain. - /// - /// Output only. - core.List? supportedInputStorageFormats; - - /// The formats this Model supports in BatchPredictionJob.output_config. - /// - /// If both PredictSchemata.instance_schema_uri and - /// PredictSchemata.prediction_schema_uri exist, the predictions are returned - /// together with their instances. In other words, the prediction has the - /// original instance data first, followed by the actual prediction content - /// (as per the schema). The possible formats are: * `jsonl` The JSON Lines - /// format, where each prediction is a single line. Uses GcsDestination. * - /// `csv` The CSV format, where each prediction is a single comma-separated - /// line. The first line in the file is the header, containing comma-separated - /// field names. Uses GcsDestination. * `bigquery` Each prediction is a single - /// row in a BigQuery table, uses BigQueryDestination . If this Model doesn't - /// support any of these formats it means it cannot be used with a - /// BatchPredictionJob. However, if it has - /// supported_deployment_resources_types, it could serve online predictions by - /// using PredictionService.Predict or PredictionService.Explain. + /// Time when the NasJob for the first time entered the `JOB_STATE_RUNNING` + /// state. /// /// Output only. - core.List? supportedOutputStorageFormats; + core.String? startTime; - /// The resource name of the TrainingPipeline that uploaded this Model, if - /// any. + /// The detailed state of the job. /// /// Output only. - core.String? trainingPipeline; + /// Possible string values are: + /// - "JOB_STATE_UNSPECIFIED" : The job state is unspecified. + /// - "JOB_STATE_QUEUED" : The job has been just created or resumed and + /// processing has not yet begun. + /// - "JOB_STATE_PENDING" : The service is preparing to run the job. + /// - "JOB_STATE_RUNNING" : The job is in progress. + /// - "JOB_STATE_SUCCEEDED" : The job completed successfully. + /// - "JOB_STATE_FAILED" : The job failed. + /// - "JOB_STATE_CANCELLING" : The job is being cancelled. From this state the + /// job may only go to either `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED` or + /// `JOB_STATE_CANCELLED`. + /// - "JOB_STATE_CANCELLED" : The job has been cancelled. + /// - "JOB_STATE_PAUSED" : The job has been stopped, and can be resumed. + /// - "JOB_STATE_EXPIRED" : The job has expired. + /// - "JOB_STATE_UPDATING" : The job is being updated. Only jobs in the + /// `RUNNING` state can be updated. After updating, the job goes back to the + /// `RUNNING` state. + /// - "JOB_STATE_PARTIALLY_SUCCEEDED" : The job is partially succeeded, some + /// results may be missing due to errors. + core.String? state; - /// Timestamp when this Model was most recently updated. + /// Time when the NasJob was most recently updated. /// /// Output only. core.String? updateTime; - /// User provided version aliases so that a model version can be referenced - /// via alias (i.e. - /// `projects/{project}/locations/{location}/models/{model_id}@{version_alias}` - /// instead of auto-generated version id (i.e. - /// `projects/{project}/locations/{location}/models/{model_id}@{version_id})`. - /// - /// The format is a-z{0,126}\[a-z0-9\] to distinguish from version_id. A - /// default version alias will be created for the first version of the model, - /// and there must be exactly one default version alias for a model. - core.List? versionAliases; - - /// Timestamp when this version was created. - /// - /// Output only. - core.String? versionCreateTime; - - /// The description of this version. - core.String? versionDescription; - - /// The version ID of the model. - /// - /// A new version is committed when a new model version is uploaded or trained - /// under an existing model id. It is an auto-incrementing decimal number in - /// string representation. - /// - /// Output only. Immutable. - core.String? versionId; - - /// Timestamp when this version was most recently updated. - /// - /// Output only. - core.String? versionUpdateTime; - - GoogleCloudAiplatformV1Model({ - this.artifactUri, - this.baseModelSource, - this.containerSpec, + GoogleCloudAiplatformV1NasJob({ this.createTime, - this.dataStats, - this.deployedModels, - this.description, this.displayName, + this.enableRestrictedImageTraining, this.encryptionSpec, - this.etag, - this.explanationSpec, + this.endTime, + this.error, this.labels, - this.metadata, - this.metadataArtifact, - this.metadataSchemaUri, - this.modelSourceInfo, this.name, - this.originalModelInfo, - this.pipelineJob, - this.predictSchemata, + this.nasJobOutput, + this.nasJobSpec, this.satisfiesPzi, this.satisfiesPzs, - this.supportedDeploymentResourcesTypes, - this.supportedExportFormats, - this.supportedInputStorageFormats, - this.supportedOutputStorageFormats, - this.trainingPipeline, + this.startTime, + this.state, this.updateTime, - this.versionAliases, - this.versionCreateTime, - this.versionDescription, - this.versionId, - this.versionUpdateTime, }); - GoogleCloudAiplatformV1Model.fromJson(core.Map json_) + GoogleCloudAiplatformV1NasJob.fromJson(core.Map json_) : this( - artifactUri: json_['artifactUri'] as core.String?, - baseModelSource: json_.containsKey('baseModelSource') - ? GoogleCloudAiplatformV1ModelBaseModelSource.fromJson( - json_['baseModelSource'] - as core.Map) - : null, - containerSpec: json_.containsKey('containerSpec') - ? GoogleCloudAiplatformV1ModelContainerSpec.fromJson( - json_['containerSpec'] as core.Map) - : null, createTime: json_['createTime'] as core.String?, - dataStats: json_.containsKey('dataStats') - ? GoogleCloudAiplatformV1ModelDataStats.fromJson( - json_['dataStats'] as core.Map) - : null, - deployedModels: (json_['deployedModels'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1DeployedModelRef.fromJson( - value as core.Map)) - .toList(), - description: json_['description'] as core.String?, displayName: json_['displayName'] as core.String?, + enableRestrictedImageTraining: + json_['enableRestrictedImageTraining'] as core.bool?, encryptionSpec: json_.containsKey('encryptionSpec') ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( json_['encryptionSpec'] as core.Map) : null, - etag: json_['etag'] as core.String?, - explanationSpec: json_.containsKey('explanationSpec') - ? GoogleCloudAiplatformV1ExplanationSpec.fromJson( - json_['explanationSpec'] - as core.Map) + endTime: json_['endTime'] as core.String?, + error: json_.containsKey('error') + ? GoogleRpcStatus.fromJson( + json_['error'] as core.Map) : null, labels: (json_['labels'] as core.Map?)?.map( @@ -44781,5715 +50973,5684 @@ class GoogleCloudAiplatformV1Model { value as core.String, ), ), - metadata: json_['metadata'], - metadataArtifact: json_['metadataArtifact'] as core.String?, - metadataSchemaUri: json_['metadataSchemaUri'] as core.String?, - modelSourceInfo: json_.containsKey('modelSourceInfo') - ? GoogleCloudAiplatformV1ModelSourceInfo.fromJson( - json_['modelSourceInfo'] - as core.Map) - : null, name: json_['name'] as core.String?, - originalModelInfo: json_.containsKey('originalModelInfo') - ? GoogleCloudAiplatformV1ModelOriginalModelInfo.fromJson( - json_['originalModelInfo'] - as core.Map) + nasJobOutput: json_.containsKey('nasJobOutput') + ? GoogleCloudAiplatformV1NasJobOutput.fromJson( + json_['nasJobOutput'] as core.Map) : null, - pipelineJob: json_['pipelineJob'] as core.String?, - predictSchemata: json_.containsKey('predictSchemata') - ? GoogleCloudAiplatformV1PredictSchemata.fromJson( - json_['predictSchemata'] - as core.Map) + nasJobSpec: json_.containsKey('nasJobSpec') + ? GoogleCloudAiplatformV1NasJobSpec.fromJson( + json_['nasJobSpec'] as core.Map) : null, satisfiesPzi: json_['satisfiesPzi'] as core.bool?, satisfiesPzs: json_['satisfiesPzs'] as core.bool?, - supportedDeploymentResourcesTypes: - (json_['supportedDeploymentResourcesTypes'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - supportedExportFormats: - (json_['supportedExportFormats'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1ModelExportFormat.fromJson( - value as core.Map)) - .toList(), - supportedInputStorageFormats: - (json_['supportedInputStorageFormats'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - supportedOutputStorageFormats: - (json_['supportedOutputStorageFormats'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - trainingPipeline: json_['trainingPipeline'] as core.String?, + startTime: json_['startTime'] as core.String?, + state: json_['state'] as core.String?, updateTime: json_['updateTime'] as core.String?, - versionAliases: (json_['versionAliases'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - versionCreateTime: json_['versionCreateTime'] as core.String?, - versionDescription: json_['versionDescription'] as core.String?, - versionId: json_['versionId'] as core.String?, - versionUpdateTime: json_['versionUpdateTime'] as core.String?, ); core.Map toJson() => { - if (artifactUri != null) 'artifactUri': artifactUri!, - if (baseModelSource != null) 'baseModelSource': baseModelSource!, - if (containerSpec != null) 'containerSpec': containerSpec!, if (createTime != null) 'createTime': createTime!, - if (dataStats != null) 'dataStats': dataStats!, - if (deployedModels != null) 'deployedModels': deployedModels!, - if (description != null) 'description': description!, if (displayName != null) 'displayName': displayName!, + if (enableRestrictedImageTraining != null) + 'enableRestrictedImageTraining': enableRestrictedImageTraining!, if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, - if (etag != null) 'etag': etag!, - if (explanationSpec != null) 'explanationSpec': explanationSpec!, + if (endTime != null) 'endTime': endTime!, + if (error != null) 'error': error!, if (labels != null) 'labels': labels!, - if (metadata != null) 'metadata': metadata!, - if (metadataArtifact != null) 'metadataArtifact': metadataArtifact!, - if (metadataSchemaUri != null) 'metadataSchemaUri': metadataSchemaUri!, - if (modelSourceInfo != null) 'modelSourceInfo': modelSourceInfo!, if (name != null) 'name': name!, - if (originalModelInfo != null) 'originalModelInfo': originalModelInfo!, - if (pipelineJob != null) 'pipelineJob': pipelineJob!, - if (predictSchemata != null) 'predictSchemata': predictSchemata!, + if (nasJobOutput != null) 'nasJobOutput': nasJobOutput!, + if (nasJobSpec != null) 'nasJobSpec': nasJobSpec!, if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, - if (supportedDeploymentResourcesTypes != null) - 'supportedDeploymentResourcesTypes': - supportedDeploymentResourcesTypes!, - if (supportedExportFormats != null) - 'supportedExportFormats': supportedExportFormats!, - if (supportedInputStorageFormats != null) - 'supportedInputStorageFormats': supportedInputStorageFormats!, - if (supportedOutputStorageFormats != null) - 'supportedOutputStorageFormats': supportedOutputStorageFormats!, - if (trainingPipeline != null) 'trainingPipeline': trainingPipeline!, + if (startTime != null) 'startTime': startTime!, + if (state != null) 'state': state!, if (updateTime != null) 'updateTime': updateTime!, - if (versionAliases != null) 'versionAliases': versionAliases!, - if (versionCreateTime != null) 'versionCreateTime': versionCreateTime!, - if (versionDescription != null) - 'versionDescription': versionDescription!, - if (versionId != null) 'versionId': versionId!, - if (versionUpdateTime != null) 'versionUpdateTime': versionUpdateTime!, }; } -/// User input field to specify the base model source. -/// -/// Currently it only supports specifing the Model Garden models and Genie -/// models. -class GoogleCloudAiplatformV1ModelBaseModelSource { - /// Information about the base model of Genie models. - GoogleCloudAiplatformV1GenieSource? genieSource; +/// Represents a uCAIP NasJob output. +class GoogleCloudAiplatformV1NasJobOutput { + /// The output of this multi-trial Neural Architecture Search (NAS) job. + /// + /// Output only. + GoogleCloudAiplatformV1NasJobOutputMultiTrialJobOutput? multiTrialJobOutput; + + GoogleCloudAiplatformV1NasJobOutput({ + this.multiTrialJobOutput, + }); + + GoogleCloudAiplatformV1NasJobOutput.fromJson(core.Map json_) + : this( + multiTrialJobOutput: json_.containsKey('multiTrialJobOutput') + ? GoogleCloudAiplatformV1NasJobOutputMultiTrialJobOutput.fromJson( + json_['multiTrialJobOutput'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (multiTrialJobOutput != null) + 'multiTrialJobOutput': multiTrialJobOutput!, + }; +} + +/// The output of a multi-trial Neural Architecture Search (NAS) jobs. +class GoogleCloudAiplatformV1NasJobOutputMultiTrialJobOutput { + /// List of NasTrials that were started as part of search stage. + /// + /// Output only. + core.List? searchTrials; + + /// List of NasTrials that were started as part of train stage. + /// + /// Output only. + core.List? trainTrials; + + GoogleCloudAiplatformV1NasJobOutputMultiTrialJobOutput({ + this.searchTrials, + this.trainTrials, + }); + + GoogleCloudAiplatformV1NasJobOutputMultiTrialJobOutput.fromJson( + core.Map json_) + : this( + searchTrials: (json_['searchTrials'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1NasTrial.fromJson( + value as core.Map)) + .toList(), + trainTrials: (json_['trainTrials'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1NasTrial.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (searchTrials != null) 'searchTrials': searchTrials!, + if (trainTrials != null) 'trainTrials': trainTrials!, + }; +} + +/// Represents the spec of a NasJob. +class GoogleCloudAiplatformV1NasJobSpec { + /// The spec of multi-trial algorithms. + GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpec? + multiTrialAlgorithmSpec; + + /// The ID of the existing NasJob in the same Project and Location which will + /// be used to resume search. + /// + /// search_space_spec and nas_algorithm_spec are obtained from previous NasJob + /// hence should not provide them again for this NasJob. + core.String? resumeNasJobId; + + /// It defines the search space for Neural Architecture Search (NAS). + core.String? searchSpaceSpec; + + GoogleCloudAiplatformV1NasJobSpec({ + this.multiTrialAlgorithmSpec, + this.resumeNasJobId, + this.searchSpaceSpec, + }); + + GoogleCloudAiplatformV1NasJobSpec.fromJson(core.Map json_) + : this( + multiTrialAlgorithmSpec: json_.containsKey('multiTrialAlgorithmSpec') + ? GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpec + .fromJson(json_['multiTrialAlgorithmSpec'] + as core.Map) + : null, + resumeNasJobId: json_['resumeNasJobId'] as core.String?, + searchSpaceSpec: json_['searchSpaceSpec'] as core.String?, + ); + + core.Map toJson() => { + if (multiTrialAlgorithmSpec != null) + 'multiTrialAlgorithmSpec': multiTrialAlgorithmSpec!, + if (resumeNasJobId != null) 'resumeNasJobId': resumeNasJobId!, + if (searchSpaceSpec != null) 'searchSpaceSpec': searchSpaceSpec!, + }; +} + +/// The spec of multi-trial Neural Architecture Search (NAS). +class GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpec { + /// Metric specs for the NAS job. + /// + /// Validation for this field is done at `multi_trial_algorithm_spec` field. + GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecMetricSpec? metric; + + /// The multi-trial Neural Architecture Search (NAS) algorithm type. + /// + /// Defaults to `REINFORCEMENT_LEARNING`. + /// Possible string values are: + /// - "MULTI_TRIAL_ALGORITHM_UNSPECIFIED" : Defaults to + /// `REINFORCEMENT_LEARNING`. + /// - "REINFORCEMENT_LEARNING" : The Reinforcement Learning Algorithm for + /// Multi-trial Neural Architecture Search (NAS). + /// - "GRID_SEARCH" : The Grid Search Algorithm for Multi-trial Neural + /// Architecture Search (NAS). + core.String? multiTrialAlgorithm; - /// Source information of Model Garden models. - GoogleCloudAiplatformV1ModelGardenSource? modelGardenSource; + /// Spec for search trials. + /// + /// Required. + GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecSearchTrialSpec? + searchTrialSpec; - GoogleCloudAiplatformV1ModelBaseModelSource({ - this.genieSource, - this.modelGardenSource, + /// Spec for train trials. + /// + /// Top N \[TrainTrialSpec.max_parallel_trial_count\] search trials will be + /// trained for every M \[TrainTrialSpec.frequency\] trials searched. + GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecTrainTrialSpec? + trainTrialSpec; + + GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpec({ + this.metric, + this.multiTrialAlgorithm, + this.searchTrialSpec, + this.trainTrialSpec, }); - GoogleCloudAiplatformV1ModelBaseModelSource.fromJson(core.Map json_) + GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpec.fromJson( + core.Map json_) : this( - genieSource: json_.containsKey('genieSource') - ? GoogleCloudAiplatformV1GenieSource.fromJson( - json_['genieSource'] as core.Map) + metric: json_.containsKey('metric') + ? GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecMetricSpec + .fromJson( + json_['metric'] as core.Map) : null, - modelGardenSource: json_.containsKey('modelGardenSource') - ? GoogleCloudAiplatformV1ModelGardenSource.fromJson( - json_['modelGardenSource'] + multiTrialAlgorithm: json_['multiTrialAlgorithm'] as core.String?, + searchTrialSpec: json_.containsKey('searchTrialSpec') + ? GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecSearchTrialSpec + .fromJson(json_['searchTrialSpec'] + as core.Map) + : null, + trainTrialSpec: json_.containsKey('trainTrialSpec') + ? GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecTrainTrialSpec + .fromJson(json_['trainTrialSpec'] as core.Map) : null, ); core.Map toJson() => { - if (genieSource != null) 'genieSource': genieSource!, - if (modelGardenSource != null) 'modelGardenSource': modelGardenSource!, + if (metric != null) 'metric': metric!, + if (multiTrialAlgorithm != null) + 'multiTrialAlgorithm': multiTrialAlgorithm!, + if (searchTrialSpec != null) 'searchTrialSpec': searchTrialSpec!, + if (trainTrialSpec != null) 'trainTrialSpec': trainTrialSpec!, }; } -/// Specification of a container for serving predictions. -/// -/// Some fields in this message correspond to fields in the -/// [Kubernetes Container v1 core specification](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#container-v1-core). -class GoogleCloudAiplatformV1ModelContainerSpec { - /// Specifies arguments for the command that runs when the container starts. - /// - /// This overrides the container's - /// \[`CMD`\](https://docs.docker.com/engine/reference/builder/#cmd). Specify - /// this field as an array of executable and arguments, similar to a Docker - /// `CMD`'s "default parameters" form. If you don't specify this field but do - /// specify the command field, then the command from the `command` field runs - /// without any additional arguments. See the \[Kubernetes documentation about - /// how the `command` and `args` fields interact with a container's - /// `ENTRYPOINT` and - /// `CMD`\](https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#notes). - /// If you don't specify this field and don't specify the `command` field, - /// then the container's - /// \[`ENTRYPOINT`\](https://docs.docker.com/engine/reference/builder/#cmd) - /// and `CMD` determine what runs based on their default behavior. See the - /// Docker documentation about \[how `CMD` and `ENTRYPOINT` - /// interact\](https://docs.docker.com/engine/reference/builder/#understand-how-cmd-and-entrypoint-interact). - /// In this field, you can reference - /// [environment variables set by Vertex AI](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables) - /// and environment variables set in the env field. You cannot reference - /// environment variables set in the Docker image. In order for environment - /// variables to be expanded, reference them by using the following syntax: $( - /// VARIABLE_NAME) Note that this differs from Bash variable expansion, which - /// does not use parentheses. If a variable cannot be resolved, the reference - /// in the input string is used unchanged. To avoid variable expansion, you - /// can escape this syntax with `$$`; for example: $$(VARIABLE_NAME) This - /// field corresponds to the `args` field of the Kubernetes Containers - /// [v1 core API](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#container-v1-core). +/// Represents a metric to optimize. +class GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecMetricSpec { + /// The optimization goal of the metric. /// - /// Immutable. - core.List? args; + /// Required. + /// Possible string values are: + /// - "GOAL_TYPE_UNSPECIFIED" : Goal Type will default to maximize. + /// - "MAXIMIZE" : Maximize the goal metric. + /// - "MINIMIZE" : Minimize the goal metric. + core.String? goal; - /// Specifies the command that runs when the container starts. + /// The ID of the metric. /// - /// This overrides the container's - /// [ENTRYPOINT](https://docs.docker.com/engine/reference/builder/#entrypoint). - /// Specify this field as an array of executable and arguments, similar to a - /// Docker `ENTRYPOINT`'s "exec" form, not its "shell" form. If you do not - /// specify this field, then the container's `ENTRYPOINT` runs, in conjunction - /// with the args field or the container's - /// \[`CMD`\](https://docs.docker.com/engine/reference/builder/#cmd), if - /// either exists. If this field is not specified and the container does not - /// have an `ENTRYPOINT`, then refer to the Docker documentation about \[how - /// `CMD` and `ENTRYPOINT` - /// interact\](https://docs.docker.com/engine/reference/builder/#understand-how-cmd-and-entrypoint-interact). - /// If you specify this field, then you can also specify the `args` field to - /// provide additional arguments for this command. However, if you specify - /// this field, then the container's `CMD` is ignored. See the \[Kubernetes - /// documentation about how the `command` and `args` fields interact with a - /// container's `ENTRYPOINT` and - /// `CMD`\](https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#notes). - /// In this field, you can reference - /// [environment variables set by Vertex AI](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables) - /// and environment variables set in the env field. You cannot reference - /// environment variables set in the Docker image. In order for environment - /// variables to be expanded, reference them by using the following syntax: $( - /// VARIABLE_NAME) Note that this differs from Bash variable expansion, which - /// does not use parentheses. If a variable cannot be resolved, the reference - /// in the input string is used unchanged. To avoid variable expansion, you - /// can escape this syntax with `$$`; for example: $$(VARIABLE_NAME) This - /// field corresponds to the `command` field of the Kubernetes Containers - /// [v1 core API](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#container-v1-core). + /// Must not contain whitespaces. /// - /// Immutable. - core.List? command; + /// Required. + core.String? metricId; - /// Deployment timeout. - /// - /// Limit for deployment timeout is 2 hours. - /// - /// Immutable. - core.String? deploymentTimeout; + GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecMetricSpec({ + this.goal, + this.metricId, + }); - /// List of environment variables to set in the container. - /// - /// After the container starts running, code running in the container can read - /// these environment variables. Additionally, the command and args fields can - /// reference these variables. Later entries in this list can also reference - /// earlier entries. For example, the following example sets the variable - /// `VAR_2` to have the value `foo bar`: ```json [ { "name": "VAR_1", "value": - /// "foo" }, { "name": "VAR_2", "value": "$(VAR_1) bar" } ] ``` If you switch - /// the order of the variables in the example, then the expansion does not - /// occur. This field corresponds to the `env` field of the Kubernetes - /// Containers - /// [v1 core API](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#container-v1-core). - /// - /// Immutable. - core.List? env; + GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecMetricSpec.fromJson( + core.Map json_) + : this( + goal: json_['goal'] as core.String?, + metricId: json_['metricId'] as core.String?, + ); - /// List of ports to expose from the container. - /// - /// Vertex AI sends gRPC prediction requests that it receives to the first - /// port on this list. Vertex AI also sends liveness and health checks to this - /// port. If you do not specify this field, gRPC requests to the container - /// will be disabled. Vertex AI does not use ports other than the first one - /// listed. This field corresponds to the `ports` field of the Kubernetes - /// Containers v1 core API. - /// - /// Immutable. - core.List? grpcPorts; + core.Map toJson() => { + if (goal != null) 'goal': goal!, + if (metricId != null) 'metricId': metricId!, + }; +} - /// Specification for Kubernetes readiness probe. +/// Represent spec for search trials. +class GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecSearchTrialSpec { + /// The number of failed trials that need to be seen before failing the + /// NasJob. /// - /// Immutable. - GoogleCloudAiplatformV1Probe? healthProbe; + /// If set to 0, Vertex AI decides how many trials must fail before the whole + /// job fails. + core.int? maxFailedTrialCount; - /// HTTP path on the container to send health checks to. - /// - /// Vertex AI intermittently sends GET requests to this path on the - /// container's IP address and port to check that the container is healthy. - /// Read more about - /// [health checks](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#health). - /// For example, if you set this field to `/bar`, then Vertex AI - /// intermittently sends a GET request to the `/bar` path on the port of your - /// container specified by the first value of this `ModelContainerSpec`'s - /// ports field. If you don't specify this field, it defaults to the following - /// value when you deploy this Model to an Endpoint: - /// /v1/endpoints/ENDPOINT/deployedModels/ DEPLOYED_MODEL:predict The - /// placeholders in this value are replaced as follows: * ENDPOINT: The last - /// segment (following `endpoints/`)of the Endpoint.name\]\[\] field of the - /// Endpoint where this Model has been deployed. (Vertex AI makes this value - /// available to your container code as the \[`AIP_ENDPOINT_ID` environment - /// variable\](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables).) - /// * DEPLOYED_MODEL: DeployedModel.id of the `DeployedModel`. (Vertex AI - /// makes this value available to your container code as the - /// \[`AIP_DEPLOYED_MODEL_ID` environment - /// variable\](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables).) + /// The maximum number of trials to run in parallel. /// - /// Immutable. - core.String? healthRoute; + /// Required. + core.int? maxParallelTrialCount; - /// URI of the Docker image to be used as the custom container for serving - /// predictions. - /// - /// This URI must identify an image in Artifact Registry or Container - /// Registry. Learn more about the - /// [container publishing requirements](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#publishing), - /// including permissions requirements for the Vertex AI Service Agent. The - /// container image is ingested upon ModelService.UploadModel, stored - /// internally, and this original path is afterwards not used. To learn about - /// the requirements for the Docker image itself, see - /// [Custom container requirements](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#). - /// You can use the URI to one of Vertex AI's \[pre-built container images for - /// prediction\](https://cloud.google.com/vertex-ai/docs/predictions/pre-built-containers) - /// in this field. + /// The maximum number of Neural Architecture Search (NAS) trials to run. /// - /// Required. Immutable. - core.String? imageUri; + /// Required. + core.int? maxTrialCount; - /// List of ports to expose from the container. + /// The spec of a search trial job. /// - /// Vertex AI sends any prediction requests that it receives to the first port - /// on this list. Vertex AI also sends - /// [liveness and health checks](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#liveness) - /// to this port. If you do not specify this field, it defaults to following - /// value: ```json [ { "containerPort": 8080 } ] ``` Vertex AI does not use - /// ports other than the first one listed. This field corresponds to the - /// `ports` field of the Kubernetes Containers - /// [v1 core API](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#container-v1-core). + /// The same spec applies to all search trials. /// - /// Immutable. - core.List? ports; + /// Required. + GoogleCloudAiplatformV1CustomJobSpec? searchTrialJobSpec; - /// HTTP path on the container to send prediction requests to. + GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecSearchTrialSpec({ + this.maxFailedTrialCount, + this.maxParallelTrialCount, + this.maxTrialCount, + this.searchTrialJobSpec, + }); + + GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecSearchTrialSpec.fromJson( + core.Map json_) + : this( + maxFailedTrialCount: json_['maxFailedTrialCount'] as core.int?, + maxParallelTrialCount: json_['maxParallelTrialCount'] as core.int?, + maxTrialCount: json_['maxTrialCount'] as core.int?, + searchTrialJobSpec: json_.containsKey('searchTrialJobSpec') + ? GoogleCloudAiplatformV1CustomJobSpec.fromJson( + json_['searchTrialJobSpec'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (maxFailedTrialCount != null) + 'maxFailedTrialCount': maxFailedTrialCount!, + if (maxParallelTrialCount != null) + 'maxParallelTrialCount': maxParallelTrialCount!, + if (maxTrialCount != null) 'maxTrialCount': maxTrialCount!, + if (searchTrialJobSpec != null) + 'searchTrialJobSpec': searchTrialJobSpec!, + }; +} + +/// Represent spec for train trials. +class GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecTrainTrialSpec { + /// Frequency of search trials to start train stage. /// - /// Vertex AI forwards requests sent using - /// projects.locations.endpoints.predict to this path on the container's IP - /// address and port. Vertex AI then returns the container's response in the - /// API response. For example, if you set this field to `/foo`, then when - /// Vertex AI receives a prediction request, it forwards the request body in a - /// POST request to the `/foo` path on the port of your container specified by - /// the first value of this `ModelContainerSpec`'s ports field. If you don't - /// specify this field, it defaults to the following value when you deploy - /// this Model to an Endpoint: - /// /v1/endpoints/ENDPOINT/deployedModels/DEPLOYED_MODEL:predict The - /// placeholders in this value are replaced as follows: * ENDPOINT: The last - /// segment (following `endpoints/`)of the Endpoint.name\]\[\] field of the - /// Endpoint where this Model has been deployed. (Vertex AI makes this value - /// available to your container code as the \[`AIP_ENDPOINT_ID` environment - /// variable\](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables).) - /// * DEPLOYED_MODEL: DeployedModel.id of the `DeployedModel`. (Vertex AI - /// makes this value available to your container code as the - /// \[`AIP_DEPLOYED_MODEL_ID` environment - /// variable\](https://cloud.google.com/vertex-ai/docs/predictions/custom-container-requirements#aip-variables).) + /// Top N \[TrainTrialSpec.max_parallel_trial_count\] search trials will be + /// trained for every M \[TrainTrialSpec.frequency\] trials searched. /// - /// Immutable. - core.String? predictRoute; + /// Required. + core.int? frequency; - /// The amount of the VM memory to reserve as the shared memory for the model - /// in megabytes. + /// The maximum number of trials to run in parallel. /// - /// Immutable. - core.String? sharedMemorySizeMb; + /// Required. + core.int? maxParallelTrialCount; - /// Specification for Kubernetes startup probe. + /// The spec of a train trial job. /// - /// Immutable. - GoogleCloudAiplatformV1Probe? startupProbe; + /// The same spec applies to all train trials. + /// + /// Required. + GoogleCloudAiplatformV1CustomJobSpec? trainTrialJobSpec; - GoogleCloudAiplatformV1ModelContainerSpec({ - this.args, - this.command, - this.deploymentTimeout, - this.env, - this.grpcPorts, - this.healthProbe, - this.healthRoute, - this.imageUri, - this.ports, - this.predictRoute, - this.sharedMemorySizeMb, - this.startupProbe, + GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecTrainTrialSpec({ + this.frequency, + this.maxParallelTrialCount, + this.trainTrialJobSpec, }); - GoogleCloudAiplatformV1ModelContainerSpec.fromJson(core.Map json_) + GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecTrainTrialSpec.fromJson( + core.Map json_) : this( - args: (json_['args'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - command: (json_['command'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - deploymentTimeout: json_['deploymentTimeout'] as core.String?, - env: (json_['env'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1EnvVar.fromJson( - value as core.Map)) - .toList(), - grpcPorts: (json_['grpcPorts'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Port.fromJson( - value as core.Map)) - .toList(), - healthProbe: json_.containsKey('healthProbe') - ? GoogleCloudAiplatformV1Probe.fromJson( - json_['healthProbe'] as core.Map) - : null, - healthRoute: json_['healthRoute'] as core.String?, - imageUri: json_['imageUri'] as core.String?, - ports: (json_['ports'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Port.fromJson( - value as core.Map)) - .toList(), - predictRoute: json_['predictRoute'] as core.String?, - sharedMemorySizeMb: json_['sharedMemorySizeMb'] as core.String?, - startupProbe: json_.containsKey('startupProbe') - ? GoogleCloudAiplatformV1Probe.fromJson( - json_['startupProbe'] as core.Map) + frequency: json_['frequency'] as core.int?, + maxParallelTrialCount: json_['maxParallelTrialCount'] as core.int?, + trainTrialJobSpec: json_.containsKey('trainTrialJobSpec') + ? GoogleCloudAiplatformV1CustomJobSpec.fromJson( + json_['trainTrialJobSpec'] + as core.Map) : null, ); core.Map toJson() => { - if (args != null) 'args': args!, - if (command != null) 'command': command!, - if (deploymentTimeout != null) 'deploymentTimeout': deploymentTimeout!, - if (env != null) 'env': env!, - if (grpcPorts != null) 'grpcPorts': grpcPorts!, - if (healthProbe != null) 'healthProbe': healthProbe!, - if (healthRoute != null) 'healthRoute': healthRoute!, - if (imageUri != null) 'imageUri': imageUri!, - if (ports != null) 'ports': ports!, - if (predictRoute != null) 'predictRoute': predictRoute!, - if (sharedMemorySizeMb != null) - 'sharedMemorySizeMb': sharedMemorySizeMb!, - if (startupProbe != null) 'startupProbe': startupProbe!, + if (frequency != null) 'frequency': frequency!, + if (maxParallelTrialCount != null) + 'maxParallelTrialCount': maxParallelTrialCount!, + if (trainTrialJobSpec != null) 'trainTrialJobSpec': trainTrialJobSpec!, }; } -/// Stats of data used for train or evaluate the Model. -class GoogleCloudAiplatformV1ModelDataStats { - /// Number of Annotations that are used for evaluating this Model. +/// Represents a uCAIP NasJob trial. +class GoogleCloudAiplatformV1NasTrial { + /// Time when the NasTrial's status changed to `SUCCEEDED` or `INFEASIBLE`. /// - /// If the Model is evaluated multiple times, this will be the number of test - /// Annotations used by the first evaluation. If the Model is not evaluated, - /// the number is 0. - core.String? testAnnotationsCount; + /// Output only. + core.String? endTime; - /// Number of DataItems that were used for evaluating this Model. + /// The final measurement containing the objective value. /// - /// If the Model is evaluated multiple times, this will be the number of test - /// DataItems used by the first evaluation. If the Model is not evaluated, the - /// number is 0. - core.String? testDataItemsCount; - - /// Number of Annotations that are used for training this Model. - core.String? trainingAnnotationsCount; + /// Output only. + GoogleCloudAiplatformV1Measurement? finalMeasurement; - /// Number of DataItems that were used for training this Model. - core.String? trainingDataItemsCount; + /// The identifier of the NasTrial assigned by the service. + /// + /// Output only. + core.String? id; - /// Number of Annotations that are used for validating this Model during - /// training. - core.String? validationAnnotationsCount; + /// Time when the NasTrial was started. + /// + /// Output only. + core.String? startTime; - /// Number of DataItems that were used for validating this Model during - /// training. - core.String? validationDataItemsCount; + /// The detailed state of the NasTrial. + /// + /// Output only. + /// Possible string values are: + /// - "STATE_UNSPECIFIED" : The NasTrial state is unspecified. + /// - "REQUESTED" : Indicates that a specific NasTrial has been requested, but + /// it has not yet been suggested by the service. + /// - "ACTIVE" : Indicates that the NasTrial has been suggested. + /// - "STOPPING" : Indicates that the NasTrial should stop according to the + /// service. + /// - "SUCCEEDED" : Indicates that the NasTrial is completed successfully. + /// - "INFEASIBLE" : Indicates that the NasTrial should not be attempted + /// again. The service will set a NasTrial to INFEASIBLE when it's done but + /// missing the final_measurement. + core.String? state; - GoogleCloudAiplatformV1ModelDataStats({ - this.testAnnotationsCount, - this.testDataItemsCount, - this.trainingAnnotationsCount, - this.trainingDataItemsCount, - this.validationAnnotationsCount, - this.validationDataItemsCount, + GoogleCloudAiplatformV1NasTrial({ + this.endTime, + this.finalMeasurement, + this.id, + this.startTime, + this.state, }); - GoogleCloudAiplatformV1ModelDataStats.fromJson(core.Map json_) + GoogleCloudAiplatformV1NasTrial.fromJson(core.Map json_) : this( - testAnnotationsCount: json_['testAnnotationsCount'] as core.String?, - testDataItemsCount: json_['testDataItemsCount'] as core.String?, - trainingAnnotationsCount: - json_['trainingAnnotationsCount'] as core.String?, - trainingDataItemsCount: - json_['trainingDataItemsCount'] as core.String?, - validationAnnotationsCount: - json_['validationAnnotationsCount'] as core.String?, - validationDataItemsCount: - json_['validationDataItemsCount'] as core.String?, - ); - - core.Map toJson() => { - if (testAnnotationsCount != null) - 'testAnnotationsCount': testAnnotationsCount!, - if (testDataItemsCount != null) - 'testDataItemsCount': testDataItemsCount!, - if (trainingAnnotationsCount != null) - 'trainingAnnotationsCount': trainingAnnotationsCount!, - if (trainingDataItemsCount != null) - 'trainingDataItemsCount': trainingDataItemsCount!, - if (validationAnnotationsCount != null) - 'validationAnnotationsCount': validationAnnotationsCount!, - if (validationDataItemsCount != null) - 'validationDataItemsCount': validationDataItemsCount!, + endTime: json_['endTime'] as core.String?, + finalMeasurement: json_.containsKey('finalMeasurement') + ? GoogleCloudAiplatformV1Measurement.fromJson( + json_['finalMeasurement'] + as core.Map) + : null, + id: json_['id'] as core.String?, + startTime: json_['startTime'] as core.String?, + state: json_['state'] as core.String?, + ); + + core.Map toJson() => { + if (endTime != null) 'endTime': endTime!, + if (finalMeasurement != null) 'finalMeasurement': finalMeasurement!, + if (id != null) 'id': id!, + if (startTime != null) 'startTime': startTime!, + if (state != null) 'state': state!, }; } -/// ModelDeploymentMonitoringBigQueryTable specifies the BigQuery table name as -/// well as some information of the logs stored in this table. -class GoogleCloudAiplatformV1ModelDeploymentMonitoringBigQueryTable { - /// The created BigQuery table to store logs. +/// Represents a NasTrial details along with its parameters. +/// +/// If there is a corresponding train NasTrial, the train NasTrial is also +/// returned. +class GoogleCloudAiplatformV1NasTrialDetail { + /// Resource name of the NasTrialDetail. /// - /// Customer could do their own query & analysis. Format: - /// `bq://.model_deployment_monitoring_._` - core.String? bigqueryTablePath; + /// Output only. + core.String? name; - /// The source of log. - /// Possible string values are: - /// - "LOG_SOURCE_UNSPECIFIED" : Unspecified source. - /// - "TRAINING" : Logs coming from Training dataset. - /// - "SERVING" : Logs coming from Serving traffic. - core.String? logSource; + /// The parameters for the NasJob NasTrial. + core.String? parameters; - /// The type of log. - /// Possible string values are: - /// - "LOG_TYPE_UNSPECIFIED" : Unspecified type. - /// - "PREDICT" : Predict logs. - /// - "EXPLAIN" : Explain logs. - core.String? logType; + /// The requested search NasTrial. + GoogleCloudAiplatformV1NasTrial? searchTrial; - /// The schema version of the request/response logging BigQuery table. - /// - /// Default to v1 if unset. + /// The train NasTrial corresponding to search_trial. /// - /// Output only. - core.String? requestResponseLoggingSchemaVersion; + /// Only populated if search_trial is used for training. + GoogleCloudAiplatformV1NasTrial? trainTrial; - GoogleCloudAiplatformV1ModelDeploymentMonitoringBigQueryTable({ - this.bigqueryTablePath, - this.logSource, - this.logType, - this.requestResponseLoggingSchemaVersion, + GoogleCloudAiplatformV1NasTrialDetail({ + this.name, + this.parameters, + this.searchTrial, + this.trainTrial, }); - GoogleCloudAiplatformV1ModelDeploymentMonitoringBigQueryTable.fromJson( - core.Map json_) + GoogleCloudAiplatformV1NasTrialDetail.fromJson(core.Map json_) : this( - bigqueryTablePath: json_['bigqueryTablePath'] as core.String?, - logSource: json_['logSource'] as core.String?, - logType: json_['logType'] as core.String?, - requestResponseLoggingSchemaVersion: - json_['requestResponseLoggingSchemaVersion'] as core.String?, + name: json_['name'] as core.String?, + parameters: json_['parameters'] as core.String?, + searchTrial: json_.containsKey('searchTrial') + ? GoogleCloudAiplatformV1NasTrial.fromJson( + json_['searchTrial'] as core.Map) + : null, + trainTrial: json_.containsKey('trainTrial') + ? GoogleCloudAiplatformV1NasTrial.fromJson( + json_['trainTrial'] as core.Map) + : null, ); core.Map toJson() => { - if (bigqueryTablePath != null) 'bigqueryTablePath': bigqueryTablePath!, - if (logSource != null) 'logSource': logSource!, - if (logType != null) 'logType': logType!, - if (requestResponseLoggingSchemaVersion != null) - 'requestResponseLoggingSchemaVersion': - requestResponseLoggingSchemaVersion!, + if (name != null) 'name': name!, + if (parameters != null) 'parameters': parameters!, + if (searchTrial != null) 'searchTrial': searchTrial!, + if (trainTrial != null) 'trainTrial': trainTrial!, }; } -/// Represents a job that runs periodically to monitor the deployed models in an -/// endpoint. -/// -/// It will analyze the logged training & prediction data to detect any abnormal -/// behaviors. -class GoogleCloudAiplatformV1ModelDeploymentMonitoringJob { - /// YAML schema file uri describing the format of a single instance that you - /// want Tensorflow Data Validation (TFDV) to analyze. +/// A query to find a number of similar entities. +class GoogleCloudAiplatformV1NearestNeighborQuery { + /// The embedding vector that be used for similar search. /// - /// If this field is empty, all the feature data types are inferred from - /// predict_instance_schema_uri, meaning that TFDV will use the data in the - /// exact format(data type) as prediction request/response. If there are any - /// data type differences between predict instance and TFDV instance, this - /// field can be used to override the schema. For models trained with Vertex - /// AI, this field must be set as all the fields in predict instance formatted - /// as string. - core.String? analysisInstanceSchemaUri; + /// Optional. + GoogleCloudAiplatformV1NearestNeighborQueryEmbedding? embedding; - /// The created bigquery tables for the job under customer project. - /// - /// Customer could do their own query & analysis. There could be 4 log tables - /// in maximum: 1. Training data logging predict request/response 2. Serving - /// data logging predict request/response + /// The entity id whose similar entities should be searched for. /// - /// Output only. - core.List? - bigqueryTables; - - /// Timestamp when this ModelDeploymentMonitoringJob was created. + /// If embedding is set, search will use embedding instead of entity_id. /// - /// Output only. - core.String? createTime; + /// Optional. + core.String? entityId; - /// The user-defined name of the ModelDeploymentMonitoringJob. - /// - /// The name can be up to 128 characters long and can consist of any UTF-8 - /// characters. Display name of a ModelDeploymentMonitoringJob. + /// The number of similar entities to be retrieved from feature view for each + /// query. /// - /// Required. - core.String? displayName; + /// Optional. + core.int? neighborCount; - /// If true, the scheduled monitoring pipeline logs are sent to Google Cloud - /// Logging, including pipeline status and anomalies detected. + /// The list of numeric filters. /// - /// Please note the logs incur cost, which are subject to - /// [Cloud Logging pricing](https://cloud.google.com/logging#pricing). - core.bool? enableMonitoringPipelineLogs; + /// Optional. + core.List? + numericFilters; - /// Customer-managed encryption key spec for a ModelDeploymentMonitoringJob. + /// Parameters that can be set to tune query on the fly. /// - /// If set, this ModelDeploymentMonitoringJob and all sub-resources of this - /// ModelDeploymentMonitoringJob will be secured by this key. - GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; + /// Optional. + GoogleCloudAiplatformV1NearestNeighborQueryParameters? parameters; - /// Endpoint resource name. + /// Crowding is a constraint on a neighbor list produced by nearest neighbor + /// search requiring that no more than sper_crowding_attribute_neighbor_count + /// of the k neighbors returned have the same value of crowding_attribute. /// - /// Format: `projects/{project}/locations/{location}/endpoints/{endpoint}` + /// It's used for improving result diversity. /// - /// Required. - core.String? endpoint; + /// Optional. + core.int? perCrowdingAttributeNeighborCount; - /// Only populated when the job's state is `JOB_STATE_FAILED` or - /// `JOB_STATE_CANCELLED`. + /// The list of string filters. /// - /// Output only. - GoogleRpcStatus? error; + /// Optional. + core.List? + stringFilters; - /// The labels with user-defined metadata to organize your - /// ModelDeploymentMonitoringJob. - /// - /// Label keys and values can be no longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. See - /// https://goo.gl/xmQnxf for more information and examples of labels. - core.Map? labels; + GoogleCloudAiplatformV1NearestNeighborQuery({ + this.embedding, + this.entityId, + this.neighborCount, + this.numericFilters, + this.parameters, + this.perCrowdingAttributeNeighborCount, + this.stringFilters, + }); - /// Latest triggered monitoring pipeline metadata. - /// - /// Output only. - GoogleCloudAiplatformV1ModelDeploymentMonitoringJobLatestMonitoringPipelineMetadata? - latestMonitoringPipelineMetadata; + GoogleCloudAiplatformV1NearestNeighborQuery.fromJson(core.Map json_) + : this( + embedding: json_.containsKey('embedding') + ? GoogleCloudAiplatformV1NearestNeighborQueryEmbedding.fromJson( + json_['embedding'] as core.Map) + : null, + entityId: json_['entityId'] as core.String?, + neighborCount: json_['neighborCount'] as core.int?, + numericFilters: (json_['numericFilters'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1NearestNeighborQueryNumericFilter + .fromJson(value as core.Map)) + .toList(), + parameters: json_.containsKey('parameters') + ? GoogleCloudAiplatformV1NearestNeighborQueryParameters.fromJson( + json_['parameters'] as core.Map) + : null, + perCrowdingAttributeNeighborCount: + json_['perCrowdingAttributeNeighborCount'] as core.int?, + stringFilters: (json_['stringFilters'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1NearestNeighborQueryStringFilter + .fromJson(value as core.Map)) + .toList(), + ); - /// The TTL of BigQuery tables in user projects which stores logs. - /// - /// A day is the basic unit of the TTL and we take the ceil of TTL/86400(a - /// day). e.g. { second: 3600} indicates ttl = 1 day. - core.String? logTtl; + core.Map toJson() => { + if (embedding != null) 'embedding': embedding!, + if (entityId != null) 'entityId': entityId!, + if (neighborCount != null) 'neighborCount': neighborCount!, + if (numericFilters != null) 'numericFilters': numericFilters!, + if (parameters != null) 'parameters': parameters!, + if (perCrowdingAttributeNeighborCount != null) + 'perCrowdingAttributeNeighborCount': + perCrowdingAttributeNeighborCount!, + if (stringFilters != null) 'stringFilters': stringFilters!, + }; +} - /// Sample Strategy for logging. +/// The embedding vector. +class GoogleCloudAiplatformV1NearestNeighborQueryEmbedding { + /// Individual value in the embedding. /// - /// Required. - GoogleCloudAiplatformV1SamplingStrategy? loggingSamplingStrategy; + /// Optional. + core.List? value; - /// The config for monitoring objectives. - /// - /// This is a per DeployedModel config. Each DeployedModel needs to be - /// configured separately. - /// - /// Required. - core.List? - modelDeploymentMonitoringObjectiveConfigs; + GoogleCloudAiplatformV1NearestNeighborQueryEmbedding({ + this.value, + }); - /// Schedule config for running the monitoring job. - /// - /// Required. - GoogleCloudAiplatformV1ModelDeploymentMonitoringScheduleConfig? - modelDeploymentMonitoringScheduleConfig; + GoogleCloudAiplatformV1NearestNeighborQueryEmbedding.fromJson(core.Map json_) + : this( + value: (json_['value'] as core.List?) + ?.map((value) => (value as core.num).toDouble()) + .toList(), + ); - /// Alert config for model monitoring. - GoogleCloudAiplatformV1ModelMonitoringAlertConfig? modelMonitoringAlertConfig; + core.Map toJson() => { + if (value != null) 'value': value!, + }; +} - /// Resource name of a ModelDeploymentMonitoringJob. +/// Numeric filter is used to search a subset of the entities by using boolean +/// rules on numeric columns. +/// +/// For example: Database Point 0: {name: "a" value_int: 42} {name: "b" +/// value_float: 1.0} Database Point 1: {name: "a" value_int: 10} {name: "b" +/// value_float: 2.0} Database Point 2: {name: "a" value_int: -1} {name: "b" +/// value_float: 3.0} Query: {name: "a" value_int: 12 operator: LESS} // Matches +/// Point 1, 2 {name: "b" value_float: 2.0 operator: EQUAL} // Matches Point 1 +class GoogleCloudAiplatformV1NearestNeighborQueryNumericFilter { + /// Column name in BigQuery that used as filters. /// - /// Output only. + /// Required. core.String? name; - /// Timestamp when this monitoring pipeline will be scheduled to run for the - /// next round. + /// This MUST be specified for queries and must NOT be specified for database + /// points. /// - /// Output only. - core.String? nextScheduleTime; + /// Optional. + /// Possible string values are: + /// - "OPERATOR_UNSPECIFIED" : Unspecified operator. + /// - "LESS" : Entities are eligible if their value is \< the query's. + /// - "LESS_EQUAL" : Entities are eligible if their value is \<= the query's. + /// - "EQUAL" : Entities are eligible if their value is == the query's. + /// - "GREATER_EQUAL" : Entities are eligible if their value is \>= the + /// query's. + /// - "GREATER" : Entities are eligible if their value is \> the query's. + /// - "NOT_EQUAL" : Entities are eligible if their value is != the query's. + core.String? op; - /// YAML schema file uri describing the format of a single instance, which are - /// given to format this Endpoint's prediction (and explanation). - /// - /// If not set, we will generate predict schema from collected predict - /// requests. - core.String? predictInstanceSchemaUri; + /// double value type. + core.double? valueDouble; - /// Sample Predict instance, same format as PredictRequest.instances, this can - /// be set as a replacement of - /// ModelDeploymentMonitoringJob.predict_instance_schema_uri. - /// - /// If not set, we will generate predict schema from collected predict - /// requests. - /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Object? samplePredictInstance; + /// float value type. + core.double? valueFloat; - /// Reserved for future use. - /// - /// Output only. - core.bool? satisfiesPzi; + /// int value type. + core.String? valueInt; - /// Reserved for future use. - /// - /// Output only. - core.bool? satisfiesPzs; + GoogleCloudAiplatformV1NearestNeighborQueryNumericFilter({ + this.name, + this.op, + this.valueDouble, + this.valueFloat, + this.valueInt, + }); - /// Schedule state when the monitoring job is in Running state. - /// - /// Output only. - /// Possible string values are: - /// - "MONITORING_SCHEDULE_STATE_UNSPECIFIED" : Unspecified state. - /// - "PENDING" : The pipeline is picked up and wait to run. - /// - "OFFLINE" : The pipeline is offline and will be scheduled for next run. - /// - "RUNNING" : The pipeline is running. - core.String? scheduleState; + GoogleCloudAiplatformV1NearestNeighborQueryNumericFilter.fromJson( + core.Map json_) + : this( + name: json_['name'] as core.String?, + op: json_['op'] as core.String?, + valueDouble: (json_['valueDouble'] as core.num?)?.toDouble(), + valueFloat: (json_['valueFloat'] as core.num?)?.toDouble(), + valueInt: json_['valueInt'] as core.String?, + ); - /// The detailed state of the monitoring job. - /// - /// When the job is still creating, the state will be 'PENDING'. Once the job - /// is successfully created, the state will be 'RUNNING'. Pause the job, the - /// state will be 'PAUSED'. Resume the job, the state will return to - /// 'RUNNING'. - /// - /// Output only. - /// Possible string values are: - /// - "JOB_STATE_UNSPECIFIED" : The job state is unspecified. - /// - "JOB_STATE_QUEUED" : The job has been just created or resumed and - /// processing has not yet begun. - /// - "JOB_STATE_PENDING" : The service is preparing to run the job. - /// - "JOB_STATE_RUNNING" : The job is in progress. - /// - "JOB_STATE_SUCCEEDED" : The job completed successfully. - /// - "JOB_STATE_FAILED" : The job failed. - /// - "JOB_STATE_CANCELLING" : The job is being cancelled. From this state the - /// job may only go to either `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED` or - /// `JOB_STATE_CANCELLED`. - /// - "JOB_STATE_CANCELLED" : The job has been cancelled. - /// - "JOB_STATE_PAUSED" : The job has been stopped, and can be resumed. - /// - "JOB_STATE_EXPIRED" : The job has expired. - /// - "JOB_STATE_UPDATING" : The job is being updated. Only jobs in the - /// `RUNNING` state can be updated. After updating, the job goes back to the - /// `RUNNING` state. - /// - "JOB_STATE_PARTIALLY_SUCCEEDED" : The job is partially succeeded, some - /// results may be missing due to errors. - core.String? state; + core.Map toJson() => { + if (name != null) 'name': name!, + if (op != null) 'op': op!, + if (valueDouble != null) 'valueDouble': valueDouble!, + if (valueFloat != null) 'valueFloat': valueFloat!, + if (valueInt != null) 'valueInt': valueInt!, + }; +} - /// Stats anomalies base folder path. - GoogleCloudAiplatformV1GcsDestination? statsAnomaliesBaseDirectory; +/// Parameters that can be overrided in each query to tune query latency and +/// recall. +class GoogleCloudAiplatformV1NearestNeighborQueryParameters { + /// The number of neighbors to find via approximate search before exact + /// reordering is performed; if set, this value must be \> neighbor_count. + /// + /// Optional. + core.int? approximateNeighborCandidates; - /// Timestamp when this ModelDeploymentMonitoringJob was updated most - /// recently. + /// The fraction of the number of leaves to search, set at query time allows + /// user to tune search performance. /// - /// Output only. - core.String? updateTime; + /// This value increase result in both search accuracy and latency increase. + /// The value should be between 0.0 and 1.0. + /// + /// Optional. + core.double? leafNodesSearchFraction; - GoogleCloudAiplatformV1ModelDeploymentMonitoringJob({ - this.analysisInstanceSchemaUri, - this.bigqueryTables, - this.createTime, - this.displayName, - this.enableMonitoringPipelineLogs, - this.encryptionSpec, - this.endpoint, - this.error, - this.labels, - this.latestMonitoringPipelineMetadata, - this.logTtl, - this.loggingSamplingStrategy, - this.modelDeploymentMonitoringObjectiveConfigs, - this.modelDeploymentMonitoringScheduleConfig, - this.modelMonitoringAlertConfig, - this.name, - this.nextScheduleTime, - this.predictInstanceSchemaUri, - this.samplePredictInstance, - this.satisfiesPzi, - this.satisfiesPzs, - this.scheduleState, - this.state, - this.statsAnomaliesBaseDirectory, - this.updateTime, + GoogleCloudAiplatformV1NearestNeighborQueryParameters({ + this.approximateNeighborCandidates, + this.leafNodesSearchFraction, }); - GoogleCloudAiplatformV1ModelDeploymentMonitoringJob.fromJson(core.Map json_) + GoogleCloudAiplatformV1NearestNeighborQueryParameters.fromJson(core.Map json_) : this( - analysisInstanceSchemaUri: - json_['analysisInstanceSchemaUri'] as core.String?, - bigqueryTables: (json_['bigqueryTables'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1ModelDeploymentMonitoringBigQueryTable - .fromJson(value as core.Map)) - .toList(), - createTime: json_['createTime'] as core.String?, - displayName: json_['displayName'] as core.String?, - enableMonitoringPipelineLogs: - json_['enableMonitoringPipelineLogs'] as core.bool?, - encryptionSpec: json_.containsKey('encryptionSpec') - ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( - json_['encryptionSpec'] - as core.Map) - : null, - endpoint: json_['endpoint'] as core.String?, - error: json_.containsKey('error') - ? GoogleRpcStatus.fromJson( - json_['error'] as core.Map) - : null, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - latestMonitoringPipelineMetadata: json_ - .containsKey('latestMonitoringPipelineMetadata') - ? GoogleCloudAiplatformV1ModelDeploymentMonitoringJobLatestMonitoringPipelineMetadata - .fromJson(json_['latestMonitoringPipelineMetadata'] - as core.Map) - : null, - logTtl: json_['logTtl'] as core.String?, - loggingSamplingStrategy: json_.containsKey('loggingSamplingStrategy') - ? GoogleCloudAiplatformV1SamplingStrategy.fromJson( - json_['loggingSamplingStrategy'] - as core.Map) - : null, - modelDeploymentMonitoringObjectiveConfigs: (json_[ - 'modelDeploymentMonitoringObjectiveConfigs'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1ModelDeploymentMonitoringObjectiveConfig - .fromJson(value as core.Map)) + approximateNeighborCandidates: + json_['approximateNeighborCandidates'] as core.int?, + leafNodesSearchFraction: + (json_['leafNodesSearchFraction'] as core.num?)?.toDouble(), + ); + + core.Map toJson() => { + if (approximateNeighborCandidates != null) + 'approximateNeighborCandidates': approximateNeighborCandidates!, + if (leafNodesSearchFraction != null) + 'leafNodesSearchFraction': leafNodesSearchFraction!, + }; +} + +/// String filter is used to search a subset of the entities by using boolean +/// rules on string columns. +/// +/// For example: if a query specifies string filter with 'name = color, +/// allow_tokens = {red, blue}, deny_tokens = {purple}',' then that query will +/// match entities that are red or blue, but if those points are also purple, +/// then they will be excluded even if they are red/blue. Only string filter is +/// supported for now, numeric filter will be supported in the near future. +class GoogleCloudAiplatformV1NearestNeighborQueryStringFilter { + /// The allowed tokens. + /// + /// Optional. + core.List? allowTokens; + + /// The denied tokens. + /// + /// Optional. + core.List? denyTokens; + + /// Column names in BigQuery that used as filters. + /// + /// Required. + core.String? name; + + GoogleCloudAiplatformV1NearestNeighborQueryStringFilter({ + this.allowTokens, + this.denyTokens, + this.name, + }); + + GoogleCloudAiplatformV1NearestNeighborQueryStringFilter.fromJson( + core.Map json_) + : this( + allowTokens: (json_['allowTokens'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + denyTokens: (json_['denyTokens'] as core.List?) + ?.map((value) => value as core.String) .toList(), - modelDeploymentMonitoringScheduleConfig: json_ - .containsKey('modelDeploymentMonitoringScheduleConfig') - ? GoogleCloudAiplatformV1ModelDeploymentMonitoringScheduleConfig - .fromJson(json_['modelDeploymentMonitoringScheduleConfig'] - as core.Map) - : null, - modelMonitoringAlertConfig: - json_.containsKey('modelMonitoringAlertConfig') - ? GoogleCloudAiplatformV1ModelMonitoringAlertConfig.fromJson( - json_['modelMonitoringAlertConfig'] - as core.Map) - : null, name: json_['name'] as core.String?, - nextScheduleTime: json_['nextScheduleTime'] as core.String?, - predictInstanceSchemaUri: - json_['predictInstanceSchemaUri'] as core.String?, - samplePredictInstance: json_['samplePredictInstance'], - satisfiesPzi: json_['satisfiesPzi'] as core.bool?, - satisfiesPzs: json_['satisfiesPzs'] as core.bool?, - scheduleState: json_['scheduleState'] as core.String?, - state: json_['state'] as core.String?, - statsAnomaliesBaseDirectory: - json_.containsKey('statsAnomaliesBaseDirectory') - ? GoogleCloudAiplatformV1GcsDestination.fromJson( - json_['statsAnomaliesBaseDirectory'] - as core.Map) - : null, - updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (analysisInstanceSchemaUri != null) - 'analysisInstanceSchemaUri': analysisInstanceSchemaUri!, - if (bigqueryTables != null) 'bigqueryTables': bigqueryTables!, - if (createTime != null) 'createTime': createTime!, - if (displayName != null) 'displayName': displayName!, - if (enableMonitoringPipelineLogs != null) - 'enableMonitoringPipelineLogs': enableMonitoringPipelineLogs!, - if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, - if (endpoint != null) 'endpoint': endpoint!, - if (error != null) 'error': error!, - if (labels != null) 'labels': labels!, - if (latestMonitoringPipelineMetadata != null) - 'latestMonitoringPipelineMetadata': latestMonitoringPipelineMetadata!, - if (logTtl != null) 'logTtl': logTtl!, - if (loggingSamplingStrategy != null) - 'loggingSamplingStrategy': loggingSamplingStrategy!, - if (modelDeploymentMonitoringObjectiveConfigs != null) - 'modelDeploymentMonitoringObjectiveConfigs': - modelDeploymentMonitoringObjectiveConfigs!, - if (modelDeploymentMonitoringScheduleConfig != null) - 'modelDeploymentMonitoringScheduleConfig': - modelDeploymentMonitoringScheduleConfig!, - if (modelMonitoringAlertConfig != null) - 'modelMonitoringAlertConfig': modelMonitoringAlertConfig!, + if (allowTokens != null) 'allowTokens': allowTokens!, + if (denyTokens != null) 'denyTokens': denyTokens!, if (name != null) 'name': name!, - if (nextScheduleTime != null) 'nextScheduleTime': nextScheduleTime!, - if (predictInstanceSchemaUri != null) - 'predictInstanceSchemaUri': predictInstanceSchemaUri!, - if (samplePredictInstance != null) - 'samplePredictInstance': samplePredictInstance!, - if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, - if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, - if (scheduleState != null) 'scheduleState': scheduleState!, - if (state != null) 'state': state!, - if (statsAnomaliesBaseDirectory != null) - 'statsAnomaliesBaseDirectory': statsAnomaliesBaseDirectory!, - if (updateTime != null) 'updateTime': updateTime!, }; } -/// All metadata of most recent monitoring pipelines. -class GoogleCloudAiplatformV1ModelDeploymentMonitoringJobLatestMonitoringPipelineMetadata { - /// The time that most recent monitoring pipelines that is related to this - /// run. - core.String? runTime; - - /// The status of the most recent monitoring pipeline. - GoogleRpcStatus? status; +/// Nearest neighbors for one query. +class GoogleCloudAiplatformV1NearestNeighbors { + /// All its neighbors. + core.List? neighbors; - GoogleCloudAiplatformV1ModelDeploymentMonitoringJobLatestMonitoringPipelineMetadata({ - this.runTime, - this.status, + GoogleCloudAiplatformV1NearestNeighbors({ + this.neighbors, }); - GoogleCloudAiplatformV1ModelDeploymentMonitoringJobLatestMonitoringPipelineMetadata.fromJson( - core.Map json_) + GoogleCloudAiplatformV1NearestNeighbors.fromJson(core.Map json_) : this( - runTime: json_['runTime'] as core.String?, - status: json_.containsKey('status') - ? GoogleRpcStatus.fromJson( - json_['status'] as core.Map) - : null, + neighbors: (json_['neighbors'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1NearestNeighborsNeighbor.fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (runTime != null) 'runTime': runTime!, - if (status != null) 'status': status!, + if (neighbors != null) 'neighbors': neighbors!, }; } -/// ModelDeploymentMonitoringObjectiveConfig contains the pair of -/// deployed_model_id to ModelMonitoringObjectiveConfig. -class GoogleCloudAiplatformV1ModelDeploymentMonitoringObjectiveConfig { - /// The DeployedModel ID of the objective config. - core.String? deployedModelId; +/// A neighbor of the query vector. +class GoogleCloudAiplatformV1NearestNeighborsNeighbor { + /// The distance between the neighbor and the query vector. + core.double? distance; - /// The objective config of for the modelmonitoring job of this deployed - /// model. - GoogleCloudAiplatformV1ModelMonitoringObjectiveConfig? objectiveConfig; + /// The id of the similar entity. + core.String? entityId; - GoogleCloudAiplatformV1ModelDeploymentMonitoringObjectiveConfig({ - this.deployedModelId, - this.objectiveConfig, + /// The attributes of the neighbor, e.g. filters, crowding and metadata Note + /// that full entities are returned only when "return_full_entity" is set to + /// true. + /// + /// Otherwise, only the "entity_id" and "distance" fields are populated. + GoogleCloudAiplatformV1FetchFeatureValuesResponse? entityKeyValues; + + GoogleCloudAiplatformV1NearestNeighborsNeighbor({ + this.distance, + this.entityId, + this.entityKeyValues, }); - GoogleCloudAiplatformV1ModelDeploymentMonitoringObjectiveConfig.fromJson( - core.Map json_) + GoogleCloudAiplatformV1NearestNeighborsNeighbor.fromJson(core.Map json_) : this( - deployedModelId: json_['deployedModelId'] as core.String?, - objectiveConfig: json_.containsKey('objectiveConfig') - ? GoogleCloudAiplatformV1ModelMonitoringObjectiveConfig.fromJson( - json_['objectiveConfig'] + distance: (json_['distance'] as core.num?)?.toDouble(), + entityId: json_['entityId'] as core.String?, + entityKeyValues: json_.containsKey('entityKeyValues') + ? GoogleCloudAiplatformV1FetchFeatureValuesResponse.fromJson( + json_['entityKeyValues'] as core.Map) : null, ); core.Map toJson() => { - if (deployedModelId != null) 'deployedModelId': deployedModelId!, - if (objectiveConfig != null) 'objectiveConfig': objectiveConfig!, + if (distance != null) 'distance': distance!, + if (entityId != null) 'entityId': entityId!, + if (entityKeyValues != null) 'entityKeyValues': entityKeyValues!, }; } -/// The config for scheduling monitoring job. -class GoogleCloudAiplatformV1ModelDeploymentMonitoringScheduleConfig { - /// The model monitoring job scheduling interval. - /// - /// It will be rounded up to next full hour. This defines how often the - /// monitoring jobs are triggered. +/// Neighbors for example-based explanations. +class GoogleCloudAiplatformV1Neighbor { + /// The neighbor distance. /// - /// Required. - core.String? monitorInterval; + /// Output only. + core.double? neighborDistance; - /// The time window of the prediction data being included in each prediction - /// dataset. + /// The neighbor id. /// - /// This window specifies how long the data should be collected from - /// historical model results for each run. If not set, - /// ModelDeploymentMonitoringScheduleConfig.monitor_interval will be used. - /// e.g. If currently the cutoff time is 2022-01-08 14:30:00 and the - /// monitor_window is set to be 3600, then data from 2022-01-08 13:30:00 to - /// 2022-01-08 14:30:00 will be retrieved and aggregated to calculate the - /// monitoring statistics. - core.String? monitorWindow; + /// Output only. + core.String? neighborId; - GoogleCloudAiplatformV1ModelDeploymentMonitoringScheduleConfig({ - this.monitorInterval, - this.monitorWindow, + GoogleCloudAiplatformV1Neighbor({ + this.neighborDistance, + this.neighborId, }); - GoogleCloudAiplatformV1ModelDeploymentMonitoringScheduleConfig.fromJson( - core.Map json_) + GoogleCloudAiplatformV1Neighbor.fromJson(core.Map json_) : this( - monitorInterval: json_['monitorInterval'] as core.String?, - monitorWindow: json_['monitorWindow'] as core.String?, + neighborDistance: + (json_['neighborDistance'] as core.num?)?.toDouble(), + neighborId: json_['neighborId'] as core.String?, ); core.Map toJson() => { - if (monitorInterval != null) 'monitorInterval': monitorInterval!, - if (monitorWindow != null) 'monitorWindow': monitorWindow!, + if (neighborDistance != null) 'neighborDistance': neighborDistance!, + if (neighborId != null) 'neighborId': neighborId!, }; } -/// A collection of metrics calculated by comparing Model's predictions on all -/// of the test data against annotations from the test data. -class GoogleCloudAiplatformV1ModelEvaluation { - /// Points to a YAML file stored on Google Cloud Storage describing - /// EvaluatedDataItemView.predictions, EvaluatedDataItemView.ground_truths, - /// EvaluatedAnnotation.predictions, and EvaluatedAnnotation.ground_truths. +/// Network spec. +class GoogleCloudAiplatformV1NetworkSpec { + /// Whether to enable public internet access. /// - /// The schema is defined as an OpenAPI 3.0.2 - /// [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). - /// This field is not populated if there are neither EvaluatedDataItemViews - /// nor EvaluatedAnnotations under this ModelEvaluation. - core.String? annotationSchemaUri; + /// Default false. + core.bool? enableInternetAccess; - /// Timestamp when this ModelEvaluation was created. - /// - /// Output only. - core.String? createTime; + /// The full name of the Google Compute Engine + /// [network](https://cloud.google.com//compute/docs/networks-and-firewalls#networks) + core.String? network; - /// Points to a YAML file stored on Google Cloud Storage describing - /// EvaluatedDataItemView.data_item_payload and - /// EvaluatedAnnotation.data_item_payload. + /// The name of the subnet that this instance is in. /// - /// The schema is defined as an OpenAPI 3.0.2 - /// [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). - /// This field is not populated if there are neither EvaluatedDataItemViews - /// nor EvaluatedAnnotations under this ModelEvaluation. - core.String? dataItemSchemaUri; + /// Format: + /// `projects/{project_id_or_number}/regions/{region}/subnetworks/{subnetwork_id}` + core.String? subnetwork; - /// The display name of the ModelEvaluation. - core.String? displayName; + GoogleCloudAiplatformV1NetworkSpec({ + this.enableInternetAccess, + this.network, + this.subnetwork, + }); - /// Describes the values of ExplanationSpec that are used for explaining the - /// predicted values on the evaluated data. - core.List< - GoogleCloudAiplatformV1ModelEvaluationModelEvaluationExplanationSpec>? - explanationSpecs; + GoogleCloudAiplatformV1NetworkSpec.fromJson(core.Map json_) + : this( + enableInternetAccess: json_['enableInternetAccess'] as core.bool?, + network: json_['network'] as core.String?, + subnetwork: json_['subnetwork'] as core.String?, + ); - /// The metadata of the ModelEvaluation. - /// - /// For the ModelEvaluation uploaded from Managed Pipeline, metadata contains - /// a structured value with keys of "pipeline_job_id", - /// "evaluation_dataset_type", "evaluation_dataset_path", - /// "row_based_metrics_path". - /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Object? metadata; + core.Map toJson() => { + if (enableInternetAccess != null) + 'enableInternetAccess': enableInternetAccess!, + if (network != null) 'network': network!, + if (subnetwork != null) 'subnetwork': subnetwork!, + }; +} - /// Evaluation metrics of the Model. - /// - /// The schema of the metrics is stored in metrics_schema_uri +/// Represents a mount configuration for Network File System (NFS) to mount. +class GoogleCloudAiplatformV1NfsMount { + /// Destination mount path. /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Object? metrics; - - /// Points to a YAML file stored on Google Cloud Storage describing the - /// metrics of this ModelEvaluation. + /// The NFS will be mounted for the user under /mnt/nfs/ /// - /// The schema is defined as an OpenAPI 3.0.2 - /// [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). - core.String? metricsSchemaUri; + /// Required. + core.String? mountPoint; - /// Aggregated explanation metrics for the Model's prediction output over the - /// data this ModelEvaluation uses. + /// Source path exported from NFS server. /// - /// This field is populated only if the Model is evaluated with explanations, - /// and only for AutoML tabular Models. - GoogleCloudAiplatformV1ModelExplanation? modelExplanation; - - /// The resource name of the ModelEvaluation. + /// Has to start with '/', and combined with the ip address, it indicates the + /// source mount path in the form of `server:path` /// - /// Output only. - core.String? name; + /// Required. + core.String? path; - /// All possible dimensions of ModelEvaluationSlices. + /// IP address of the NFS server. /// - /// The dimensions can be used as the filter of the - /// ModelService.ListModelEvaluationSlices request, in the form of - /// `slice.dimension = `. - core.List? sliceDimensions; + /// Required. + core.String? server; - GoogleCloudAiplatformV1ModelEvaluation({ - this.annotationSchemaUri, - this.createTime, - this.dataItemSchemaUri, - this.displayName, - this.explanationSpecs, - this.metadata, - this.metrics, - this.metricsSchemaUri, - this.modelExplanation, - this.name, - this.sliceDimensions, + GoogleCloudAiplatformV1NfsMount({ + this.mountPoint, + this.path, + this.server, }); - GoogleCloudAiplatformV1ModelEvaluation.fromJson(core.Map json_) + GoogleCloudAiplatformV1NfsMount.fromJson(core.Map json_) : this( - annotationSchemaUri: json_['annotationSchemaUri'] as core.String?, - createTime: json_['createTime'] as core.String?, - dataItemSchemaUri: json_['dataItemSchemaUri'] as core.String?, - displayName: json_['displayName'] as core.String?, - explanationSpecs: (json_['explanationSpecs'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1ModelEvaluationModelEvaluationExplanationSpec - .fromJson(value as core.Map)) - .toList(), - metadata: json_['metadata'], - metrics: json_['metrics'], - metricsSchemaUri: json_['metricsSchemaUri'] as core.String?, - modelExplanation: json_.containsKey('modelExplanation') - ? GoogleCloudAiplatformV1ModelExplanation.fromJson( - json_['modelExplanation'] - as core.Map) - : null, - name: json_['name'] as core.String?, - sliceDimensions: (json_['sliceDimensions'] as core.List?) - ?.map((value) => value as core.String) - .toList(), + mountPoint: json_['mountPoint'] as core.String?, + path: json_['path'] as core.String?, + server: json_['server'] as core.String?, ); core.Map toJson() => { - if (annotationSchemaUri != null) - 'annotationSchemaUri': annotationSchemaUri!, - if (createTime != null) 'createTime': createTime!, - if (dataItemSchemaUri != null) 'dataItemSchemaUri': dataItemSchemaUri!, - if (displayName != null) 'displayName': displayName!, - if (explanationSpecs != null) 'explanationSpecs': explanationSpecs!, - if (metadata != null) 'metadata': metadata!, - if (metrics != null) 'metrics': metrics!, - if (metricsSchemaUri != null) 'metricsSchemaUri': metricsSchemaUri!, - if (modelExplanation != null) 'modelExplanation': modelExplanation!, - if (name != null) 'name': name!, - if (sliceDimensions != null) 'sliceDimensions': sliceDimensions!, + if (mountPoint != null) 'mountPoint': mountPoint!, + if (path != null) 'path': path!, + if (server != null) 'server': server!, }; } -class GoogleCloudAiplatformV1ModelEvaluationModelEvaluationExplanationSpec { - /// Explanation spec details. - GoogleCloudAiplatformV1ExplanationSpec? explanationSpec; +/// The euc configuration of NotebookRuntimeTemplate. +class GoogleCloudAiplatformV1NotebookEucConfig { + /// Whether ActAs check is bypassed for service account attached to the VM. + /// + /// If false, we need ActAs check for the default Compute Engine Service + /// account. When a Runtime is created, a VM is allocated using Default + /// Compute Engine Service Account. Any user requesting to use this Runtime + /// requires Service Account User (ActAs) permission over this SA. If true, + /// Runtime owner is using EUC and does not require the above permission as VM + /// no longer use default Compute Engine SA, but a P4SA. + /// + /// Output only. + core.bool? bypassActasCheck; - /// Explanation type. + /// Input only. /// - /// For AutoML Image Classification models, possible values are: * - /// `image-integrated-gradients` * `image-xrai` - core.String? explanationType; + /// Whether EUC is disabled in this NotebookRuntimeTemplate. In proto3, the + /// default value of a boolean is false. In this way, by default EUC will be + /// enabled for NotebookRuntimeTemplate. + core.bool? eucDisabled; - GoogleCloudAiplatformV1ModelEvaluationModelEvaluationExplanationSpec({ - this.explanationSpec, - this.explanationType, + GoogleCloudAiplatformV1NotebookEucConfig({ + this.bypassActasCheck, + this.eucDisabled, }); - GoogleCloudAiplatformV1ModelEvaluationModelEvaluationExplanationSpec.fromJson( - core.Map json_) + GoogleCloudAiplatformV1NotebookEucConfig.fromJson(core.Map json_) : this( - explanationSpec: json_.containsKey('explanationSpec') - ? GoogleCloudAiplatformV1ExplanationSpec.fromJson( - json_['explanationSpec'] - as core.Map) - : null, - explanationType: json_['explanationType'] as core.String?, + bypassActasCheck: json_['bypassActasCheck'] as core.bool?, + eucDisabled: json_['eucDisabled'] as core.bool?, ); core.Map toJson() => { - if (explanationSpec != null) 'explanationSpec': explanationSpec!, - if (explanationType != null) 'explanationType': explanationType!, + if (bypassActasCheck != null) 'bypassActasCheck': bypassActasCheck!, + if (eucDisabled != null) 'eucDisabled': eucDisabled!, }; } -/// A collection of metrics calculated by comparing Model's predictions on a -/// slice of the test data against ground truth annotations. -class GoogleCloudAiplatformV1ModelEvaluationSlice { - /// Timestamp when this ModelEvaluationSlice was created. +/// NotebookExecutionJob represents an instance of a notebook execution. +class GoogleCloudAiplatformV1NotebookExecutionJob { + /// Timestamp when this NotebookExecutionJob was created. /// /// Output only. core.String? createTime; - /// Sliced evaluation metrics of the Model. + /// The custom compute configuration for an execution job. + GoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec? + customEnvironmentSpec; + + /// The Dataform Repository pointing to a single file notebook repository. + GoogleCloudAiplatformV1NotebookExecutionJobDataformRepositorySource? + dataformRepositorySource; + + /// The contents of an input notebook file. + GoogleCloudAiplatformV1NotebookExecutionJobDirectNotebookSource? + directNotebookSource; + + /// The display name of the NotebookExecutionJob. + /// + /// The name can be up to 128 characters long and can consist of any UTF-8 + /// characters. + core.String? displayName; + + /// Customer-managed encryption key spec for the notebook execution job. + /// + /// This field is auto-populated if the NotebookRuntimeTemplate has an + /// encryption spec. + GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; + + /// Max running time of the execution job in seconds (default 86400s / 24 + /// hrs). + core.String? executionTimeout; + + /// The user email to run the execution as. + /// + /// Only supported by Colab runtimes. + core.String? executionUser; + + /// The Cloud Storage url pointing to the ipynb file. + /// + /// Format: `gs://bucket/notebook_file.ipynb` + GoogleCloudAiplatformV1NotebookExecutionJobGcsNotebookSource? + gcsNotebookSource; + + /// The Cloud Storage location to upload the result to. + /// + /// Format: `gs://bucket-name` + core.String? gcsOutputUri; + + /// The state of the NotebookExecutionJob. + /// + /// Output only. + /// Possible string values are: + /// - "JOB_STATE_UNSPECIFIED" : The job state is unspecified. + /// - "JOB_STATE_QUEUED" : The job has been just created or resumed and + /// processing has not yet begun. + /// - "JOB_STATE_PENDING" : The service is preparing to run the job. + /// - "JOB_STATE_RUNNING" : The job is in progress. + /// - "JOB_STATE_SUCCEEDED" : The job completed successfully. + /// - "JOB_STATE_FAILED" : The job failed. + /// - "JOB_STATE_CANCELLING" : The job is being cancelled. From this state the + /// job may only go to either `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED` or + /// `JOB_STATE_CANCELLED`. + /// - "JOB_STATE_CANCELLED" : The job has been cancelled. + /// - "JOB_STATE_PAUSED" : The job has been stopped, and can be resumed. + /// - "JOB_STATE_EXPIRED" : The job has expired. + /// - "JOB_STATE_UPDATING" : The job is being updated. Only jobs in the + /// `RUNNING` state can be updated. After updating, the job goes back to the + /// `RUNNING` state. + /// - "JOB_STATE_PARTIALLY_SUCCEEDED" : The job is partially succeeded, some + /// results may be missing due to errors. + core.String? jobState; + + /// The name of the kernel to use during notebook execution. + /// + /// If unset, the default kernel is used. + core.String? kernelName; + + /// The labels with user-defined metadata to organize NotebookExecutionJobs. + /// + /// Label keys and values can be no longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. See + /// https://goo.gl/xmQnxf for more information and examples of labels. System + /// reserved label keys are prefixed with "aiplatform.googleapis.com/" and are + /// immutable. + core.Map? labels; + + /// The resource name of this NotebookExecutionJob. /// - /// The schema of the metrics is stored in metrics_schema_uri + /// Format: + /// `projects/{project_id}/locations/{location}/notebookExecutionJobs/{job_id}` /// /// Output only. - /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Object? metrics; + core.String? name; - /// Points to a YAML file stored on Google Cloud Storage describing the - /// metrics of this ModelEvaluationSlice. + /// The NotebookRuntimeTemplate to source compute configuration from. + core.String? notebookRuntimeTemplateResourceName; + + /// The Schedule resource name if this job is triggered by one. /// - /// The schema is defined as an OpenAPI 3.0.2 - /// [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). + /// Format: + /// `projects/{project_id}/locations/{location}/schedules/{schedule_id}` /// /// Output only. - core.String? metricsSchemaUri; + core.String? scheduleResourceName; - /// Aggregated explanation metrics for the Model's prediction output over the - /// data this ModelEvaluation uses. + /// The service account to run the execution as. + core.String? serviceAccount; + + /// Populated when the NotebookExecutionJob is completed. /// - /// This field is populated only if the Model is evaluated with explanations, - /// and only for tabular Models. + /// When there is an error during notebook execution, the error details are + /// populated. /// /// Output only. - GoogleCloudAiplatformV1ModelExplanation? modelExplanation; + GoogleRpcStatus? status; - /// The resource name of the ModelEvaluationSlice. + /// Timestamp when this NotebookExecutionJob was most recently updated. /// /// Output only. - core.String? name; + core.String? updateTime; - /// The slice of the test data that is used to evaluate the Model. - /// - /// Output only. - GoogleCloudAiplatformV1ModelEvaluationSliceSlice? slice; + /// The Workbench runtime configuration to use for the notebook execution. + GoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime? workbenchRuntime; - GoogleCloudAiplatformV1ModelEvaluationSlice({ + GoogleCloudAiplatformV1NotebookExecutionJob({ this.createTime, - this.metrics, - this.metricsSchemaUri, - this.modelExplanation, + this.customEnvironmentSpec, + this.dataformRepositorySource, + this.directNotebookSource, + this.displayName, + this.encryptionSpec, + this.executionTimeout, + this.executionUser, + this.gcsNotebookSource, + this.gcsOutputUri, + this.jobState, + this.kernelName, + this.labels, this.name, - this.slice, + this.notebookRuntimeTemplateResourceName, + this.scheduleResourceName, + this.serviceAccount, + this.status, + this.updateTime, + this.workbenchRuntime, }); - GoogleCloudAiplatformV1ModelEvaluationSlice.fromJson(core.Map json_) + GoogleCloudAiplatformV1NotebookExecutionJob.fromJson(core.Map json_) : this( createTime: json_['createTime'] as core.String?, - metrics: json_['metrics'], - metricsSchemaUri: json_['metricsSchemaUri'] as core.String?, - modelExplanation: json_.containsKey('modelExplanation') - ? GoogleCloudAiplatformV1ModelExplanation.fromJson( - json_['modelExplanation'] + customEnvironmentSpec: json_.containsKey('customEnvironmentSpec') + ? GoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec + .fromJson(json_['customEnvironmentSpec'] + as core.Map) + : null, + dataformRepositorySource: json_ + .containsKey('dataformRepositorySource') + ? GoogleCloudAiplatformV1NotebookExecutionJobDataformRepositorySource + .fromJson(json_['dataformRepositorySource'] + as core.Map) + : null, + directNotebookSource: json_.containsKey('directNotebookSource') + ? GoogleCloudAiplatformV1NotebookExecutionJobDirectNotebookSource + .fromJson(json_['directNotebookSource'] + as core.Map) + : null, + displayName: json_['displayName'] as core.String?, + encryptionSpec: json_.containsKey('encryptionSpec') + ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( + json_['encryptionSpec'] + as core.Map) + : null, + executionTimeout: json_['executionTimeout'] as core.String?, + executionUser: json_['executionUser'] as core.String?, + gcsNotebookSource: json_.containsKey('gcsNotebookSource') + ? GoogleCloudAiplatformV1NotebookExecutionJobGcsNotebookSource + .fromJson(json_['gcsNotebookSource'] as core.Map) : null, + gcsOutputUri: json_['gcsOutputUri'] as core.String?, + jobState: json_['jobState'] as core.String?, + kernelName: json_['kernelName'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), name: json_['name'] as core.String?, - slice: json_.containsKey('slice') - ? GoogleCloudAiplatformV1ModelEvaluationSliceSlice.fromJson( - json_['slice'] as core.Map) + notebookRuntimeTemplateResourceName: + json_['notebookRuntimeTemplateResourceName'] as core.String?, + scheduleResourceName: json_['scheduleResourceName'] as core.String?, + serviceAccount: json_['serviceAccount'] as core.String?, + status: json_.containsKey('status') + ? GoogleRpcStatus.fromJson( + json_['status'] as core.Map) + : null, + updateTime: json_['updateTime'] as core.String?, + workbenchRuntime: json_.containsKey('workbenchRuntime') + ? GoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime + .fromJson(json_['workbenchRuntime'] + as core.Map) : null, ); core.Map toJson() => { if (createTime != null) 'createTime': createTime!, - if (metrics != null) 'metrics': metrics!, - if (metricsSchemaUri != null) 'metricsSchemaUri': metricsSchemaUri!, - if (modelExplanation != null) 'modelExplanation': modelExplanation!, + if (customEnvironmentSpec != null) + 'customEnvironmentSpec': customEnvironmentSpec!, + if (dataformRepositorySource != null) + 'dataformRepositorySource': dataformRepositorySource!, + if (directNotebookSource != null) + 'directNotebookSource': directNotebookSource!, + if (displayName != null) 'displayName': displayName!, + if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, + if (executionTimeout != null) 'executionTimeout': executionTimeout!, + if (executionUser != null) 'executionUser': executionUser!, + if (gcsNotebookSource != null) 'gcsNotebookSource': gcsNotebookSource!, + if (gcsOutputUri != null) 'gcsOutputUri': gcsOutputUri!, + if (jobState != null) 'jobState': jobState!, + if (kernelName != null) 'kernelName': kernelName!, + if (labels != null) 'labels': labels!, if (name != null) 'name': name!, - if (slice != null) 'slice': slice!, + if (notebookRuntimeTemplateResourceName != null) + 'notebookRuntimeTemplateResourceName': + notebookRuntimeTemplateResourceName!, + if (scheduleResourceName != null) + 'scheduleResourceName': scheduleResourceName!, + if (serviceAccount != null) 'serviceAccount': serviceAccount!, + if (status != null) 'status': status!, + if (updateTime != null) 'updateTime': updateTime!, + if (workbenchRuntime != null) 'workbenchRuntime': workbenchRuntime!, }; } -/// Definition of a slice. -class GoogleCloudAiplatformV1ModelEvaluationSliceSlice { - /// The dimension of the slice. - /// - /// Well-known dimensions are: * `annotationSpec`: This slice is on the test - /// data that has either ground truth or prediction with - /// AnnotationSpec.display_name equals to value. * `slice`: This slice is a - /// user customized slice defined by its SliceSpec. - /// - /// Output only. - core.String? dimension; +/// Compute configuration to use for an execution job. +class GoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec { + /// The specification of a single machine for the execution job. + GoogleCloudAiplatformV1MachineSpec? machineSpec; - /// Specification for how the data was sliced. - /// - /// Output only. - GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpec? sliceSpec; + /// The network configuration to use for the execution job. + GoogleCloudAiplatformV1NetworkSpec? networkSpec; - /// The value of the dimension in this slice. - /// - /// Output only. - core.String? value; + /// The specification of a persistent disk to attach for the execution job. + GoogleCloudAiplatformV1PersistentDiskSpec? persistentDiskSpec; - GoogleCloudAiplatformV1ModelEvaluationSliceSlice({ - this.dimension, - this.sliceSpec, - this.value, + GoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec({ + this.machineSpec, + this.networkSpec, + this.persistentDiskSpec, }); - GoogleCloudAiplatformV1ModelEvaluationSliceSlice.fromJson(core.Map json_) + GoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec.fromJson( + core.Map json_) : this( - dimension: json_['dimension'] as core.String?, - sliceSpec: json_.containsKey('sliceSpec') - ? GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpec - .fromJson( - json_['sliceSpec'] as core.Map) + machineSpec: json_.containsKey('machineSpec') + ? GoogleCloudAiplatformV1MachineSpec.fromJson( + json_['machineSpec'] as core.Map) + : null, + networkSpec: json_.containsKey('networkSpec') + ? GoogleCloudAiplatformV1NetworkSpec.fromJson( + json_['networkSpec'] as core.Map) + : null, + persistentDiskSpec: json_.containsKey('persistentDiskSpec') + ? GoogleCloudAiplatformV1PersistentDiskSpec.fromJson( + json_['persistentDiskSpec'] + as core.Map) : null, - value: json_['value'] as core.String?, ); core.Map toJson() => { - if (dimension != null) 'dimension': dimension!, - if (sliceSpec != null) 'sliceSpec': sliceSpec!, - if (value != null) 'value': value!, + if (machineSpec != null) 'machineSpec': machineSpec!, + if (networkSpec != null) 'networkSpec': networkSpec!, + if (persistentDiskSpec != null) + 'persistentDiskSpec': persistentDiskSpec!, }; } -/// Specification for how the data should be sliced. -class GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpec { - /// Mapping configuration for this SliceSpec. +/// The Dataform Repository containing the input notebook. +class GoogleCloudAiplatformV1NotebookExecutionJobDataformRepositorySource { + /// The commit SHA to read repository with. /// - /// The key is the name of the feature. By default, the key will be prefixed - /// by "instance" as a dictionary prefix for Vertex Batch Predictions output - /// format. - core.Map? - configs; + /// If unset, the file will be read at HEAD. + core.String? commitSha; - GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpec({ - this.configs, + /// The resource name of the Dataform Repository. + /// + /// Format: + /// `projects/{project_id}/locations/{location}/repositories/{repository_id}` + core.String? dataformRepositoryResourceName; + + GoogleCloudAiplatformV1NotebookExecutionJobDataformRepositorySource({ + this.commitSha, + this.dataformRepositoryResourceName, }); - GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpec.fromJson( + GoogleCloudAiplatformV1NotebookExecutionJobDataformRepositorySource.fromJson( core.Map json_) : this( - configs: - (json_['configs'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecSliceConfig - .fromJson(value as core.Map), - ), - ), + commitSha: json_['commitSha'] as core.String?, + dataformRepositoryResourceName: + json_['dataformRepositoryResourceName'] as core.String?, ); core.Map toJson() => { - if (configs != null) 'configs': configs!, + if (commitSha != null) 'commitSha': commitSha!, + if (dataformRepositoryResourceName != null) + 'dataformRepositoryResourceName': dataformRepositoryResourceName!, }; } -/// A range of values for slice(s). -/// -/// `low` is inclusive, `high` is exclusive. -class GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecRange { - /// Exclusive high value for the range. - core.double? high; +/// The content of the input notebook in ipynb format. +class GoogleCloudAiplatformV1NotebookExecutionJobDirectNotebookSource { + /// The base64-encoded contents of the input notebook file. + core.String? content; + core.List get contentAsBytes => convert.base64.decode(content!); - /// Inclusive low value for the range. - core.double? low; + set contentAsBytes(core.List bytes_) { + content = + convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); + } - GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecRange({ - this.high, - this.low, + GoogleCloudAiplatformV1NotebookExecutionJobDirectNotebookSource({ + this.content, }); - GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecRange.fromJson( + GoogleCloudAiplatformV1NotebookExecutionJobDirectNotebookSource.fromJson( core.Map json_) : this( - high: (json_['high'] as core.num?)?.toDouble(), - low: (json_['low'] as core.num?)?.toDouble(), + content: json_['content'] as core.String?, ); core.Map toJson() => { - if (high != null) 'high': high!, - if (low != null) 'low': low!, + if (content != null) 'content': content!, }; } -/// Specification message containing the config for this SliceSpec. -/// -/// When `kind` is selected as `value` and/or `range`, only a single slice will -/// be computed. When `all_values` is present, a separate slice will be computed -/// for each possible label/value for the corresponding key in `config`. -/// Examples, with feature zip_code with values 12345, 23334, 88888 and feature -/// country with values "US", "Canada", "Mexico" in the dataset: Example 1: { -/// "zip_code": { "value": { "float_value": 12345.0 } } } A single slice for any -/// data with zip_code 12345 in the dataset. Example 2: { "zip_code": { "range": -/// { "low": 12345, "high": 20000 } } } A single slice containing data where the -/// zip_codes between 12345 and 20000 For this example, data with the zip_code -/// of 12345 will be in this slice. Example 3: { "zip_code": { "range": { "low": -/// 10000, "high": 20000 } }, "country": { "value": { "string_value": "US" } } } -/// A single slice containing data where the zip_codes between 10000 and 20000 -/// has the country "US". For this example, data with the zip_code of 12345 and -/// country "US" will be in this slice. Example 4: { "country": {"all_values": { -/// "value": true } } } Three slices are computed, one for each unique country -/// in the dataset. Example 5: { "country": { "all_values": { "value": true } }, -/// "zip_code": { "value": { "float_value": 12345.0 } } } Three slices are -/// computed, one for each unique country in the dataset where the zip_code is -/// also 12345. For this example, data with zip_code 12345 and country "US" will -/// be in one slice, zip_code 12345 and country "Canada" in another slice, and -/// zip_code 12345 and country "Mexico" in another slice, totaling 3 slices. -class GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecSliceConfig { - /// If all_values is set to true, then all possible labels of the keyed - /// feature will have another slice computed. - /// - /// Example: `{"all_values":{"value":true}}` - core.bool? allValues; - - /// A range of values for a numerical feature. +/// The Cloud Storage uri for the input notebook. +class GoogleCloudAiplatformV1NotebookExecutionJobGcsNotebookSource { + /// The version of the Cloud Storage object to read. /// - /// Example: `{"range":{"low":10000.0,"high":50000.0}}` will capture 12345 and - /// 23334 in the slice. - GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecRange? range; + /// If unset, the current version of the object is read. See + /// https://cloud.google.com/storage/docs/metadata#generation-number. + core.String? generation; - /// A unique specific value for a given feature. + /// The Cloud Storage uri pointing to the ipynb file. /// - /// Example: `{ "value": { "string_value": "12345" } }` - GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecValue? value; + /// Format: `gs://bucket/notebook_file.ipynb` + core.String? uri; - GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecSliceConfig({ - this.allValues, - this.range, - this.value, + GoogleCloudAiplatformV1NotebookExecutionJobGcsNotebookSource({ + this.generation, + this.uri, }); - GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecSliceConfig.fromJson( + GoogleCloudAiplatformV1NotebookExecutionJobGcsNotebookSource.fromJson( core.Map json_) : this( - allValues: json_['allValues'] as core.bool?, - range: json_.containsKey('range') - ? GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecRange - .fromJson( - json_['range'] as core.Map) - : null, - value: json_.containsKey('value') - ? GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecValue - .fromJson( - json_['value'] as core.Map) - : null, + generation: json_['generation'] as core.String?, + uri: json_['uri'] as core.String?, ); core.Map toJson() => { - if (allValues != null) 'allValues': allValues!, - if (range != null) 'range': range!, - if (value != null) 'value': value!, + if (generation != null) 'generation': generation!, + if (uri != null) 'uri': uri!, }; } -/// Single value that supports strings and floats. -class GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecValue { - /// Float type. - core.double? floatValue; +/// Configuration for a Workbench Instances-based environment. +typedef GoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime = $Empty; - /// String type. - core.String? stringValue; +/// The idle shutdown configuration of NotebookRuntimeTemplate, which contains +/// the idle_timeout as required field. +class GoogleCloudAiplatformV1NotebookIdleShutdownConfig { + /// Whether Idle Shutdown is disabled in this NotebookRuntimeTemplate. + core.bool? idleShutdownDisabled; - GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecValue({ - this.floatValue, - this.stringValue, + /// Duration is accurate to the second. + /// + /// In Notebook, Idle Timeout is accurate to minute so the range of + /// idle_timeout (second) is: 10 * 60 ~ 1440 * 60. + /// + /// Required. + core.String? idleTimeout; + + GoogleCloudAiplatformV1NotebookIdleShutdownConfig({ + this.idleShutdownDisabled, + this.idleTimeout, }); - GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecValue.fromJson( - core.Map json_) + GoogleCloudAiplatformV1NotebookIdleShutdownConfig.fromJson(core.Map json_) : this( - floatValue: (json_['floatValue'] as core.num?)?.toDouble(), - stringValue: json_['stringValue'] as core.String?, + idleShutdownDisabled: json_['idleShutdownDisabled'] as core.bool?, + idleTimeout: json_['idleTimeout'] as core.String?, ); core.Map toJson() => { - if (floatValue != null) 'floatValue': floatValue!, - if (stringValue != null) 'stringValue': stringValue!, + if (idleShutdownDisabled != null) + 'idleShutdownDisabled': idleShutdownDisabled!, + if (idleTimeout != null) 'idleTimeout': idleTimeout!, }; } -/// Aggregated explanation metrics for a Model over a set of instances. -class GoogleCloudAiplatformV1ModelExplanation { - /// Aggregated attributions explaining the Model's prediction outputs over the - /// set of instances. +/// A runtime is a virtual machine allocated to a particular user for a +/// particular Notebook file on temporary basis with lifetime limited to 24 +/// hours. +class GoogleCloudAiplatformV1NotebookRuntime { + /// Timestamp when this NotebookRuntime was created. + /// + /// Output only. + core.String? createTime; + + /// The specification of persistent disk attached to the notebook runtime as + /// data disk storage. + /// + /// Output only. + GoogleCloudAiplatformV1PersistentDiskSpec? dataPersistentDiskSpec; + + /// The description of the NotebookRuntime. + core.String? description; + + /// The display name of the NotebookRuntime. + /// + /// The name can be up to 128 characters long and can consist of any UTF-8 + /// characters. + /// + /// Required. + core.String? displayName; + + /// Customer-managed encryption key spec for the notebook runtime. + /// + /// Output only. + GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; + + /// EUC configuration of the notebook runtime. + /// + /// Output only. + GoogleCloudAiplatformV1NotebookEucConfig? eucConfig; + + /// Timestamp when this NotebookRuntime will be expired: 1. + /// + /// System Predefined NotebookRuntime: 24 hours after creation. After + /// expiration, system predifined runtime will be deleted. 2. User created + /// NotebookRuntime: 6 months after last upgrade. After expiration, user + /// created runtime will be stopped and allowed for upgrade. + /// + /// Output only. + core.String? expirationTime; + + /// The health state of the NotebookRuntime. + /// + /// Output only. + /// Possible string values are: + /// - "HEALTH_STATE_UNSPECIFIED" : Unspecified health state. + /// - "HEALTHY" : NotebookRuntime is in healthy state. Applies to ACTIVE + /// state. + /// - "UNHEALTHY" : NotebookRuntime is in unhealthy state. Applies to ACTIVE + /// state. + core.String? healthState; + + /// The idle shutdown configuration of the notebook runtime. + /// + /// Output only. + GoogleCloudAiplatformV1NotebookIdleShutdownConfig? idleShutdownConfig; + + /// Whether NotebookRuntime is upgradable. + /// + /// Output only. + core.bool? isUpgradable; + + /// The labels with user-defined metadata to organize your NotebookRuntime. + /// + /// Label keys and values can be no longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. No more than + /// 64 user labels can be associated with one NotebookRuntime (System labels + /// are excluded). See https://goo.gl/xmQnxf for more information and examples + /// of labels. System reserved label keys are prefixed with + /// "aiplatform.googleapis.com/" and are immutable. Following system labels + /// exist for NotebookRuntime: * + /// "aiplatform.googleapis.com/notebook_runtime_gce_instance_id": output only, + /// its value is the Compute Engine instance id. * + /// "aiplatform.googleapis.com/colab_enterprise_entry_service": its value is + /// either "bigquery" or "vertex"; if absent, it should be "vertex". This is + /// to describe the entry service, either BigQuery or Vertex. + core.Map? labels; + + /// The specification of a single machine used by the notebook runtime. + /// + /// Output only. + GoogleCloudAiplatformV1MachineSpec? machineSpec; + + /// The resource name of the NotebookRuntime. + /// + /// Output only. + core.String? name; + + /// Network spec of the notebook runtime. + /// + /// Output only. + GoogleCloudAiplatformV1NetworkSpec? networkSpec; + + /// The Compute Engine tags to add to runtime (see + /// [Tagging instances](https://cloud.google.com/vpc/docs/add-remove-network-tags)). + /// + /// Optional. + core.List? networkTags; + + /// The pointer to NotebookRuntimeTemplate this NotebookRuntime is created + /// from. + /// + /// Output only. + GoogleCloudAiplatformV1NotebookRuntimeTemplateRef? notebookRuntimeTemplateRef; + + /// The type of the notebook runtime. + /// + /// Output only. + /// Possible string values are: + /// - "NOTEBOOK_RUNTIME_TYPE_UNSPECIFIED" : Unspecified notebook runtime type, + /// NotebookRuntimeType will default to USER_DEFINED. + /// - "USER_DEFINED" : runtime or template with coustomized configurations + /// from user. + /// - "ONE_CLICK" : runtime or template with system defined configurations. + core.String? notebookRuntimeType; + + /// The proxy endpoint used to access the NotebookRuntime. + /// + /// Output only. + core.String? proxyUri; + + /// The runtime (instance) state of the NotebookRuntime. + /// + /// Output only. + /// Possible string values are: + /// - "RUNTIME_STATE_UNSPECIFIED" : Unspecified runtime state. + /// - "RUNNING" : NotebookRuntime is in running state. + /// - "BEING_STARTED" : NotebookRuntime is in starting state. + /// - "BEING_STOPPED" : NotebookRuntime is in stopping state. + /// - "STOPPED" : NotebookRuntime is in stopped state. + /// - "BEING_UPGRADED" : NotebookRuntime is in upgrading state. It is in the + /// middle of upgrading process. + /// - "ERROR" : NotebookRuntime was unable to start/stop properly. + /// - "INVALID" : NotebookRuntime is in invalid state. Cannot be recovered. + core.String? runtimeState; + + /// The user email of the NotebookRuntime. /// - /// The attributions are grouped by outputs. For Models that predict only one - /// output, such as regression Models that predict only one score, there is - /// only one attibution that explains the predicted output. For Models that - /// predict multiple outputs, such as multiclass Models that predict multiple - /// classes, each element explains one specific item. Attribution.output_index - /// can be used to identify which output this attribution is explaining. The - /// baselineOutputValue, instanceOutputValue and featureAttributions fields - /// are averaged over the test data. NOTE: Currently AutoML tabular - /// classification Models produce only one attribution, which averages - /// attributions over all the classes it predicts. - /// Attribution.approximation_error is not populated. + /// Required. + core.String? runtimeUser; + + /// Reserved for future use. /// /// Output only. - core.List? meanAttributions; - - GoogleCloudAiplatformV1ModelExplanation({ - this.meanAttributions, - }); + core.bool? satisfiesPzi; - GoogleCloudAiplatformV1ModelExplanation.fromJson(core.Map json_) - : this( - meanAttributions: (json_['meanAttributions'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Attribution.fromJson( - value as core.Map)) - .toList(), - ); + /// Reserved for future use. + /// + /// Output only. + core.bool? satisfiesPzs; - core.Map toJson() => { - if (meanAttributions != null) 'meanAttributions': meanAttributions!, - }; -} + /// Deprecated: This field is no longer used and the "Vertex AI Notebook + /// Service Account" + /// (service-PROJECT_NUMBER@gcp-sa-aiplatform-vm.iam.gserviceaccount.com) is + /// used for the runtime workload identity. + /// + /// See + /// https://cloud.google.com/iam/docs/service-agents#vertex-ai-notebook-service-account + /// for more details. The service account that the NotebookRuntime workload + /// runs as. + /// + /// Output only. + core.String? serviceAccount; -/// Represents export format supported by the Model. -/// -/// All formats export to Google Cloud Storage. -class GoogleCloudAiplatformV1ModelExportFormat { - /// The content of this Model that may be exported. + /// Runtime Shielded VM spec. /// /// Output only. - core.List? exportableContents; + GoogleCloudAiplatformV1ShieldedVmConfig? shieldedVmConfig; - /// The ID of the export format. + /// Timestamp when this NotebookRuntime was most recently updated. /// - /// The possible format IDs are: * `tflite` Used for Android mobile devices. * - /// `edgetpu-tflite` Used for [Edge TPU](https://cloud.google.com/edge-tpu/) - /// devices. * `tf-saved-model` A tensorflow model in SavedModel format. * - /// `tf-js` A [TensorFlow.js](https://www.tensorflow.org/js) model that can be - /// used in the browser and in Node.js using JavaScript. * `core-ml` Used for - /// iOS mobile devices. * `custom-trained` A Model that was uploaded or - /// trained by custom code. + /// Output only. + core.String? updateTime; + + /// The VM os image version of NotebookRuntime. /// /// Output only. - core.String? id; + core.String? version; - GoogleCloudAiplatformV1ModelExportFormat({ - this.exportableContents, - this.id, + GoogleCloudAiplatformV1NotebookRuntime({ + this.createTime, + this.dataPersistentDiskSpec, + this.description, + this.displayName, + this.encryptionSpec, + this.eucConfig, + this.expirationTime, + this.healthState, + this.idleShutdownConfig, + this.isUpgradable, + this.labels, + this.machineSpec, + this.name, + this.networkSpec, + this.networkTags, + this.notebookRuntimeTemplateRef, + this.notebookRuntimeType, + this.proxyUri, + this.runtimeState, + this.runtimeUser, + this.satisfiesPzi, + this.satisfiesPzs, + this.serviceAccount, + this.shieldedVmConfig, + this.updateTime, + this.version, }); - GoogleCloudAiplatformV1ModelExportFormat.fromJson(core.Map json_) + GoogleCloudAiplatformV1NotebookRuntime.fromJson(core.Map json_) : this( - exportableContents: (json_['exportableContents'] as core.List?) + createTime: json_['createTime'] as core.String?, + dataPersistentDiskSpec: json_.containsKey('dataPersistentDiskSpec') + ? GoogleCloudAiplatformV1PersistentDiskSpec.fromJson( + json_['dataPersistentDiskSpec'] + as core.Map) + : null, + description: json_['description'] as core.String?, + displayName: json_['displayName'] as core.String?, + encryptionSpec: json_.containsKey('encryptionSpec') + ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( + json_['encryptionSpec'] + as core.Map) + : null, + eucConfig: json_.containsKey('eucConfig') + ? GoogleCloudAiplatformV1NotebookEucConfig.fromJson( + json_['eucConfig'] as core.Map) + : null, + expirationTime: json_['expirationTime'] as core.String?, + healthState: json_['healthState'] as core.String?, + idleShutdownConfig: json_.containsKey('idleShutdownConfig') + ? GoogleCloudAiplatformV1NotebookIdleShutdownConfig.fromJson( + json_['idleShutdownConfig'] + as core.Map) + : null, + isUpgradable: json_['isUpgradable'] as core.bool?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + machineSpec: json_.containsKey('machineSpec') + ? GoogleCloudAiplatformV1MachineSpec.fromJson( + json_['machineSpec'] as core.Map) + : null, + name: json_['name'] as core.String?, + networkSpec: json_.containsKey('networkSpec') + ? GoogleCloudAiplatformV1NetworkSpec.fromJson( + json_['networkSpec'] as core.Map) + : null, + networkTags: (json_['networkTags'] as core.List?) ?.map((value) => value as core.String) .toList(), - id: json_['id'] as core.String?, + notebookRuntimeTemplateRef: + json_.containsKey('notebookRuntimeTemplateRef') + ? GoogleCloudAiplatformV1NotebookRuntimeTemplateRef.fromJson( + json_['notebookRuntimeTemplateRef'] + as core.Map) + : null, + notebookRuntimeType: json_['notebookRuntimeType'] as core.String?, + proxyUri: json_['proxyUri'] as core.String?, + runtimeState: json_['runtimeState'] as core.String?, + runtimeUser: json_['runtimeUser'] as core.String?, + satisfiesPzi: json_['satisfiesPzi'] as core.bool?, + satisfiesPzs: json_['satisfiesPzs'] as core.bool?, + serviceAccount: json_['serviceAccount'] as core.String?, + shieldedVmConfig: json_.containsKey('shieldedVmConfig') + ? GoogleCloudAiplatformV1ShieldedVmConfig.fromJson( + json_['shieldedVmConfig'] + as core.Map) + : null, + updateTime: json_['updateTime'] as core.String?, + version: json_['version'] as core.String?, ); core.Map toJson() => { - if (exportableContents != null) - 'exportableContents': exportableContents!, - if (id != null) 'id': id!, + if (createTime != null) 'createTime': createTime!, + if (dataPersistentDiskSpec != null) + 'dataPersistentDiskSpec': dataPersistentDiskSpec!, + if (description != null) 'description': description!, + if (displayName != null) 'displayName': displayName!, + if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, + if (eucConfig != null) 'eucConfig': eucConfig!, + if (expirationTime != null) 'expirationTime': expirationTime!, + if (healthState != null) 'healthState': healthState!, + if (idleShutdownConfig != null) + 'idleShutdownConfig': idleShutdownConfig!, + if (isUpgradable != null) 'isUpgradable': isUpgradable!, + if (labels != null) 'labels': labels!, + if (machineSpec != null) 'machineSpec': machineSpec!, + if (name != null) 'name': name!, + if (networkSpec != null) 'networkSpec': networkSpec!, + if (networkTags != null) 'networkTags': networkTags!, + if (notebookRuntimeTemplateRef != null) + 'notebookRuntimeTemplateRef': notebookRuntimeTemplateRef!, + if (notebookRuntimeType != null) + 'notebookRuntimeType': notebookRuntimeType!, + if (proxyUri != null) 'proxyUri': proxyUri!, + if (runtimeState != null) 'runtimeState': runtimeState!, + if (runtimeUser != null) 'runtimeUser': runtimeUser!, + if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, + if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, + if (serviceAccount != null) 'serviceAccount': serviceAccount!, + if (shieldedVmConfig != null) 'shieldedVmConfig': shieldedVmConfig!, + if (updateTime != null) 'updateTime': updateTime!, + if (version != null) 'version': version!, }; } -/// Contains information about the source of the models generated from Model -/// Garden. -class GoogleCloudAiplatformV1ModelGardenSource { - /// The model garden source model resource name. +/// A template that specifies runtime configurations such as machine type, +/// runtime version, network configurations, etc. +/// +/// Multiple runtimes can be created from a runtime template. +class GoogleCloudAiplatformV1NotebookRuntimeTemplate { + /// Timestamp when this NotebookRuntimeTemplate was created. /// - /// Required. - core.String? publicModelName; + /// Output only. + core.String? createTime; - GoogleCloudAiplatformV1ModelGardenSource({ - this.publicModelName, - }); + /// The specification of persistent disk attached to the runtime as data disk + /// storage. + /// + /// Optional. + GoogleCloudAiplatformV1PersistentDiskSpec? dataPersistentDiskSpec; - GoogleCloudAiplatformV1ModelGardenSource.fromJson(core.Map json_) - : this( - publicModelName: json_['publicModelName'] as core.String?, - ); + /// The description of the NotebookRuntimeTemplate. + core.String? description; - core.Map toJson() => { - if (publicModelName != null) 'publicModelName': publicModelName!, - }; -} + /// The display name of the NotebookRuntimeTemplate. + /// + /// The name can be up to 128 characters long and can consist of any UTF-8 + /// characters. + /// + /// Required. + core.String? displayName; -/// The alert config for model monitoring. -class GoogleCloudAiplatformV1ModelMonitoringAlertConfig { - /// Email alert config. - GoogleCloudAiplatformV1ModelMonitoringAlertConfigEmailAlertConfig? - emailAlertConfig; + /// Customer-managed encryption key spec for the notebook runtime. + GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; - /// Dump the anomalies to Cloud Logging. + /// Used to perform consistent read-modify-write updates. /// - /// The anomalies will be put to json payload encoded from proto - /// google.cloud.aiplatform.logging.ModelMonitoringAnomaliesLogEntry. This can - /// be further sinked to Pub/Sub or any other services supported by Cloud - /// Logging. - core.bool? enableLogging; + /// If not set, a blind "overwrite" update happens. + core.String? etag; - /// Resource names of the NotificationChannels to send alert. - /// - /// Must be of the format `projects//notificationChannels/` - core.List? notificationChannels; + /// EUC configuration of the NotebookRuntimeTemplate. + GoogleCloudAiplatformV1NotebookEucConfig? eucConfig; - GoogleCloudAiplatformV1ModelMonitoringAlertConfig({ - this.emailAlertConfig, - this.enableLogging, - this.notificationChannels, - }); + /// The idle shutdown configuration of NotebookRuntimeTemplate. + /// + /// This config will only be set when idle shutdown is enabled. + GoogleCloudAiplatformV1NotebookIdleShutdownConfig? idleShutdownConfig; - GoogleCloudAiplatformV1ModelMonitoringAlertConfig.fromJson(core.Map json_) - : this( - emailAlertConfig: json_.containsKey('emailAlertConfig') - ? GoogleCloudAiplatformV1ModelMonitoringAlertConfigEmailAlertConfig - .fromJson(json_['emailAlertConfig'] - as core.Map) - : null, - enableLogging: json_['enableLogging'] as core.bool?, - notificationChannels: (json_['notificationChannels'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - ); + /// Deprecated: This field has no behavior. + /// + /// Use notebook_runtime_type = 'ONE_CLICK' instead. The default template to + /// use if not specified. + /// + /// Output only. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) + core.bool? isDefault; - core.Map toJson() => { - if (emailAlertConfig != null) 'emailAlertConfig': emailAlertConfig!, - if (enableLogging != null) 'enableLogging': enableLogging!, - if (notificationChannels != null) - 'notificationChannels': notificationChannels!, - }; -} + /// The labels with user-defined metadata to organize the + /// NotebookRuntimeTemplates. + /// + /// Label keys and values can be no longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. See + /// https://goo.gl/xmQnxf for more information and examples of labels. + core.Map? labels; -/// The config for email alert. -class GoogleCloudAiplatformV1ModelMonitoringAlertConfigEmailAlertConfig { - /// The email addresses to send the alert. - core.List? userEmails; + /// The specification of a single machine for the template. + /// + /// Optional. Immutable. + GoogleCloudAiplatformV1MachineSpec? machineSpec; - GoogleCloudAiplatformV1ModelMonitoringAlertConfigEmailAlertConfig({ - this.userEmails, - }); + /// The resource name of the NotebookRuntimeTemplate. + core.String? name; - GoogleCloudAiplatformV1ModelMonitoringAlertConfigEmailAlertConfig.fromJson( - core.Map json_) - : this( - userEmails: (json_['userEmails'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - ); + /// Network spec. + /// + /// Optional. + GoogleCloudAiplatformV1NetworkSpec? networkSpec; - core.Map toJson() => { - if (userEmails != null) 'userEmails': userEmails!, - }; -} + /// The Compute Engine tags to add to runtime (see + /// [Tagging instances](https://cloud.google.com/vpc/docs/add-remove-network-tags)). + /// + /// Optional. + core.List? networkTags; -/// The objective configuration for model monitoring, including the information -/// needed to detect anomalies for one particular model. -class GoogleCloudAiplatformV1ModelMonitoringObjectiveConfig { - /// The config for integrating with Vertex Explainable AI. - GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigExplanationConfig? - explanationConfig; + /// The type of the notebook runtime template. + /// + /// Optional. Immutable. + /// Possible string values are: + /// - "NOTEBOOK_RUNTIME_TYPE_UNSPECIFIED" : Unspecified notebook runtime type, + /// NotebookRuntimeType will default to USER_DEFINED. + /// - "USER_DEFINED" : runtime or template with coustomized configurations + /// from user. + /// - "ONE_CLICK" : runtime or template with system defined configurations. + core.String? notebookRuntimeType; - /// The config for drift of prediction data. - GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigPredictionDriftDetectionConfig? - predictionDriftDetectionConfig; + /// Deprecated: This field is ignored and the "Vertex AI Notebook Service + /// Account" + /// (service-PROJECT_NUMBER@gcp-sa-aiplatform-vm.iam.gserviceaccount.com) is + /// used for the runtime workload identity. + /// + /// See + /// https://cloud.google.com/iam/docs/service-agents#vertex-ai-notebook-service-account + /// for more details. For NotebookExecutionJob, use + /// NotebookExecutionJob.service_account instead. The service account that the + /// runtime workload runs as. You can use any service account within the same + /// project, but you must have the service account user permission to use the + /// instance. If not specified, the + /// [Compute Engine default service account](https://cloud.google.com/compute/docs/access/service-accounts#default_service_account) + /// is used. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) + core.String? serviceAccount; - /// Training dataset for models. + /// Runtime Shielded VM spec. /// - /// This field has to be set only if TrainingPredictionSkewDetectionConfig is - /// specified. - GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingDataset? - trainingDataset; + /// Optional. Immutable. + GoogleCloudAiplatformV1ShieldedVmConfig? shieldedVmConfig; - /// The config for skew between training data and prediction data. - GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingPredictionSkewDetectionConfig? - trainingPredictionSkewDetectionConfig; + /// Timestamp when this NotebookRuntimeTemplate was most recently updated. + /// + /// Output only. + core.String? updateTime; - GoogleCloudAiplatformV1ModelMonitoringObjectiveConfig({ - this.explanationConfig, - this.predictionDriftDetectionConfig, - this.trainingDataset, - this.trainingPredictionSkewDetectionConfig, + GoogleCloudAiplatformV1NotebookRuntimeTemplate({ + this.createTime, + this.dataPersistentDiskSpec, + this.description, + this.displayName, + this.encryptionSpec, + this.etag, + this.eucConfig, + this.idleShutdownConfig, + this.isDefault, + this.labels, + this.machineSpec, + this.name, + this.networkSpec, + this.networkTags, + this.notebookRuntimeType, + this.serviceAccount, + this.shieldedVmConfig, + this.updateTime, }); - GoogleCloudAiplatformV1ModelMonitoringObjectiveConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1NotebookRuntimeTemplate.fromJson(core.Map json_) : this( - explanationConfig: json_.containsKey('explanationConfig') - ? GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigExplanationConfig - .fromJson(json_['explanationConfig'] + createTime: json_['createTime'] as core.String?, + dataPersistentDiskSpec: json_.containsKey('dataPersistentDiskSpec') + ? GoogleCloudAiplatformV1PersistentDiskSpec.fromJson( + json_['dataPersistentDiskSpec'] as core.Map) : null, - predictionDriftDetectionConfig: json_ - .containsKey('predictionDriftDetectionConfig') - ? GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigPredictionDriftDetectionConfig - .fromJson(json_['predictionDriftDetectionConfig'] + description: json_['description'] as core.String?, + displayName: json_['displayName'] as core.String?, + encryptionSpec: json_.containsKey('encryptionSpec') + ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( + json_['encryptionSpec'] as core.Map) : null, - trainingDataset: json_.containsKey('trainingDataset') - ? GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingDataset - .fromJson(json_['trainingDataset'] + etag: json_['etag'] as core.String?, + eucConfig: json_.containsKey('eucConfig') + ? GoogleCloudAiplatformV1NotebookEucConfig.fromJson( + json_['eucConfig'] as core.Map) + : null, + idleShutdownConfig: json_.containsKey('idleShutdownConfig') + ? GoogleCloudAiplatformV1NotebookIdleShutdownConfig.fromJson( + json_['idleShutdownConfig'] as core.Map) : null, - trainingPredictionSkewDetectionConfig: json_ - .containsKey('trainingPredictionSkewDetectionConfig') - ? GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingPredictionSkewDetectionConfig - .fromJson(json_['trainingPredictionSkewDetectionConfig'] + isDefault: json_['isDefault'] as core.bool?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + machineSpec: json_.containsKey('machineSpec') + ? GoogleCloudAiplatformV1MachineSpec.fromJson( + json_['machineSpec'] as core.Map) + : null, + name: json_['name'] as core.String?, + networkSpec: json_.containsKey('networkSpec') + ? GoogleCloudAiplatformV1NetworkSpec.fromJson( + json_['networkSpec'] as core.Map) + : null, + networkTags: (json_['networkTags'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + notebookRuntimeType: json_['notebookRuntimeType'] as core.String?, + serviceAccount: json_['serviceAccount'] as core.String?, + shieldedVmConfig: json_.containsKey('shieldedVmConfig') + ? GoogleCloudAiplatformV1ShieldedVmConfig.fromJson( + json_['shieldedVmConfig'] as core.Map) : null, + updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (explanationConfig != null) 'explanationConfig': explanationConfig!, - if (predictionDriftDetectionConfig != null) - 'predictionDriftDetectionConfig': predictionDriftDetectionConfig!, - if (trainingDataset != null) 'trainingDataset': trainingDataset!, - if (trainingPredictionSkewDetectionConfig != null) - 'trainingPredictionSkewDetectionConfig': - trainingPredictionSkewDetectionConfig!, + if (createTime != null) 'createTime': createTime!, + if (dataPersistentDiskSpec != null) + 'dataPersistentDiskSpec': dataPersistentDiskSpec!, + if (description != null) 'description': description!, + if (displayName != null) 'displayName': displayName!, + if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, + if (etag != null) 'etag': etag!, + if (eucConfig != null) 'eucConfig': eucConfig!, + if (idleShutdownConfig != null) + 'idleShutdownConfig': idleShutdownConfig!, + if (isDefault != null) 'isDefault': isDefault!, + if (labels != null) 'labels': labels!, + if (machineSpec != null) 'machineSpec': machineSpec!, + if (name != null) 'name': name!, + if (networkSpec != null) 'networkSpec': networkSpec!, + if (networkTags != null) 'networkTags': networkTags!, + if (notebookRuntimeType != null) + 'notebookRuntimeType': notebookRuntimeType!, + if (serviceAccount != null) 'serviceAccount': serviceAccount!, + if (shieldedVmConfig != null) 'shieldedVmConfig': shieldedVmConfig!, + if (updateTime != null) 'updateTime': updateTime!, }; } -/// The config for integrating with Vertex Explainable AI. -/// -/// Only applicable if the Model has explanation_spec populated. -class GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigExplanationConfig { - /// If want to analyze the Vertex Explainable AI feature attribute scores or - /// not. +/// Points to a NotebookRuntimeTemplateRef. +class GoogleCloudAiplatformV1NotebookRuntimeTemplateRef { + /// A resource name of the NotebookRuntimeTemplate. + /// + /// Immutable. + core.String? notebookRuntimeTemplate; + + GoogleCloudAiplatformV1NotebookRuntimeTemplateRef({ + this.notebookRuntimeTemplate, + }); + + GoogleCloudAiplatformV1NotebookRuntimeTemplateRef.fromJson(core.Map json_) + : this( + notebookRuntimeTemplate: + json_['notebookRuntimeTemplate'] as core.String?, + ); + + core.Map toJson() => { + if (notebookRuntimeTemplate != null) + 'notebookRuntimeTemplate': notebookRuntimeTemplate!, + }; +} + +/// PSC config that is used to automatically create forwarding rule via +/// ServiceConnectionMap. +class GoogleCloudAiplatformV1PSCAutomationConfig { + /// The full name of the Google Compute Engine + /// [network](https://cloud.google.com/compute/docs/networks-and-firewalls#networks). + /// + /// [Format](https://cloud.google.com/compute/docs/reference/rest/v1/networks/insert): + /// `projects/{project}/global/networks/{network}`. Where {project} is a + /// project number, as in '12345', and {network} is network name. /// - /// If set to true, Vertex AI will log the feature attributions from explain - /// response and do the skew/drift detection for them. - core.bool? enableFeatureAttributes; + /// Required. + core.String? network; - /// Predictions generated by the BatchPredictionJob using baseline dataset. - GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigExplanationConfigExplanationBaseline? - explanationBaseline; + /// Project id used to create forwarding rule. + /// + /// Required. + core.String? projectId; - GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigExplanationConfig({ - this.enableFeatureAttributes, - this.explanationBaseline, + GoogleCloudAiplatformV1PSCAutomationConfig({ + this.network, + this.projectId, }); - GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigExplanationConfig.fromJson( - core.Map json_) + GoogleCloudAiplatformV1PSCAutomationConfig.fromJson(core.Map json_) : this( - enableFeatureAttributes: - json_['enableFeatureAttributes'] as core.bool?, - explanationBaseline: json_.containsKey('explanationBaseline') - ? GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigExplanationConfigExplanationBaseline - .fromJson(json_['explanationBaseline'] - as core.Map) - : null, + network: json_['network'] as core.String?, + projectId: json_['projectId'] as core.String?, ); core.Map toJson() => { - if (enableFeatureAttributes != null) - 'enableFeatureAttributes': enableFeatureAttributes!, - if (explanationBaseline != null) - 'explanationBaseline': explanationBaseline!, + if (network != null) 'network': network!, + if (projectId != null) 'projectId': projectId!, }; } -/// Output from BatchPredictionJob for Model Monitoring baseline dataset, which -/// can be used to generate baseline attribution scores. -class GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigExplanationConfigExplanationBaseline { - /// BigQuery location for BatchExplain output. - GoogleCloudAiplatformV1BigQueryDestination? bigquery; - - /// Cloud Storage location for BatchExplain output. - GoogleCloudAiplatformV1GcsDestination? gcs; +/// Input for pairwise metric. +class GoogleCloudAiplatformV1PairwiseMetricInput { + /// Pairwise metric instance. + /// + /// Required. + GoogleCloudAiplatformV1PairwiseMetricInstance? instance; - /// The storage format of the predictions generated BatchPrediction job. - /// Possible string values are: - /// - "PREDICTION_FORMAT_UNSPECIFIED" : Should not be set. - /// - "JSONL" : Predictions are in JSONL files. - /// - "BIGQUERY" : Predictions are in BigQuery. - core.String? predictionFormat; + /// Spec for pairwise metric. + /// + /// Required. + GoogleCloudAiplatformV1PairwiseMetricSpec? metricSpec; - GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigExplanationConfigExplanationBaseline({ - this.bigquery, - this.gcs, - this.predictionFormat, + GoogleCloudAiplatformV1PairwiseMetricInput({ + this.instance, + this.metricSpec, }); - GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigExplanationConfigExplanationBaseline.fromJson( - core.Map json_) + GoogleCloudAiplatformV1PairwiseMetricInput.fromJson(core.Map json_) : this( - bigquery: json_.containsKey('bigquery') - ? GoogleCloudAiplatformV1BigQueryDestination.fromJson( - json_['bigquery'] as core.Map) + instance: json_.containsKey('instance') + ? GoogleCloudAiplatformV1PairwiseMetricInstance.fromJson( + json_['instance'] as core.Map) : null, - gcs: json_.containsKey('gcs') - ? GoogleCloudAiplatformV1GcsDestination.fromJson( - json_['gcs'] as core.Map) + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1PairwiseMetricSpec.fromJson( + json_['metricSpec'] as core.Map) : null, - predictionFormat: json_['predictionFormat'] as core.String?, ); core.Map toJson() => { - if (bigquery != null) 'bigquery': bigquery!, - if (gcs != null) 'gcs': gcs!, - if (predictionFormat != null) 'predictionFormat': predictionFormat!, + if (instance != null) 'instance': instance!, + if (metricSpec != null) 'metricSpec': metricSpec!, }; } -/// The config for Prediction data drift detection. -class GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigPredictionDriftDetectionConfig { - /// Key is the feature name and value is the threshold. - /// - /// The threshold here is against attribution score distance between different - /// time windows. - core.Map? - attributionScoreDriftThresholds; - - /// Drift anomaly detection threshold used by all features. - /// - /// When the per-feature thresholds are not set, this field can be used to - /// specify a threshold for all features. - GoogleCloudAiplatformV1ThresholdConfig? defaultDriftThreshold; - - /// Key is the feature name and value is the threshold. +/// Pairwise metric instance. +/// +/// Usually one instance corresponds to one row in an evaluation dataset. +class GoogleCloudAiplatformV1PairwiseMetricInstance { + /// Instance specified as a json string. /// - /// If a feature needs to be monitored for drift, a value threshold must be - /// configured for that feature. The threshold here is against feature - /// distribution distance between different time windws. - core.Map? - driftThresholds; + /// String key-value pairs are expected in the json_instance to render + /// PairwiseMetricSpec.instance_prompt_template. + core.String? jsonInstance; - GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigPredictionDriftDetectionConfig({ - this.attributionScoreDriftThresholds, - this.defaultDriftThreshold, - this.driftThresholds, + GoogleCloudAiplatformV1PairwiseMetricInstance({ + this.jsonInstance, }); - GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigPredictionDriftDetectionConfig.fromJson( - core.Map json_) + GoogleCloudAiplatformV1PairwiseMetricInstance.fromJson(core.Map json_) : this( - attributionScoreDriftThresholds: - (json_['attributionScoreDriftThresholds'] - as core.Map?) - ?.map( - (key, value) => core.MapEntry( - key, - GoogleCloudAiplatformV1ThresholdConfig.fromJson( - value as core.Map), - ), - ), - defaultDriftThreshold: json_.containsKey('defaultDriftThreshold') - ? GoogleCloudAiplatformV1ThresholdConfig.fromJson( - json_['defaultDriftThreshold'] - as core.Map) - : null, - driftThresholds: - (json_['driftThresholds'] as core.Map?) - ?.map( - (key, value) => core.MapEntry( - key, - GoogleCloudAiplatformV1ThresholdConfig.fromJson( - value as core.Map), - ), - ), + jsonInstance: json_['jsonInstance'] as core.String?, ); core.Map toJson() => { - if (attributionScoreDriftThresholds != null) - 'attributionScoreDriftThresholds': attributionScoreDriftThresholds!, - if (defaultDriftThreshold != null) - 'defaultDriftThreshold': defaultDriftThreshold!, - if (driftThresholds != null) 'driftThresholds': driftThresholds!, + if (jsonInstance != null) 'jsonInstance': jsonInstance!, }; } -/// Training Dataset information. -class GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingDataset { - /// The BigQuery table of the unmanaged Dataset used to train this Model. - GoogleCloudAiplatformV1BigQuerySource? bigquerySource; +/// Spec for pairwise metric result. +class GoogleCloudAiplatformV1PairwiseMetricResult { + /// Explanation for pairwise metric score. + /// + /// Output only. + core.String? explanation; - /// Data format of the dataset, only applicable if the input is from Google - /// Cloud Storage. + /// Pairwise metric choice. /// - /// The possible formats are: "tf-record" The source file is a TFRecord file. - /// "csv" The source file is a CSV file. "jsonl" The source file is a JSONL - /// file. - core.String? dataFormat; + /// Output only. + /// Possible string values are: + /// - "PAIRWISE_CHOICE_UNSPECIFIED" : Unspecified prediction choice. + /// - "BASELINE" : Baseline prediction wins + /// - "CANDIDATE" : Candidate prediction wins + /// - "TIE" : Winner cannot be determined + core.String? pairwiseChoice; - /// The resource name of the Dataset used to train this Model. - core.String? dataset; + GoogleCloudAiplatformV1PairwiseMetricResult({ + this.explanation, + this.pairwiseChoice, + }); - /// The Google Cloud Storage uri of the unmanaged Dataset used to train this - /// Model. - GoogleCloudAiplatformV1GcsSource? gcsSource; + GoogleCloudAiplatformV1PairwiseMetricResult.fromJson(core.Map json_) + : this( + explanation: json_['explanation'] as core.String?, + pairwiseChoice: json_['pairwiseChoice'] as core.String?, + ); - /// Strategy to sample data from Training Dataset. - /// - /// If not set, we process the whole dataset. - GoogleCloudAiplatformV1SamplingStrategy? loggingSamplingStrategy; + core.Map toJson() => { + if (explanation != null) 'explanation': explanation!, + if (pairwiseChoice != null) 'pairwiseChoice': pairwiseChoice!, + }; +} - /// The target field name the model is to predict. +/// Spec for pairwise metric. +class GoogleCloudAiplatformV1PairwiseMetricSpec { + /// Metric prompt template for pairwise metric. /// - /// This field will be excluded when doing Predict and (or) Explain for the - /// training data. - core.String? targetField; + /// Required. + core.String? metricPromptTemplate; - GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingDataset({ - this.bigquerySource, - this.dataFormat, - this.dataset, - this.gcsSource, - this.loggingSamplingStrategy, - this.targetField, + GoogleCloudAiplatformV1PairwiseMetricSpec({ + this.metricPromptTemplate, }); - GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingDataset.fromJson( - core.Map json_) + GoogleCloudAiplatformV1PairwiseMetricSpec.fromJson(core.Map json_) : this( - bigquerySource: json_.containsKey('bigquerySource') - ? GoogleCloudAiplatformV1BigQuerySource.fromJson( - json_['bigquerySource'] - as core.Map) - : null, - dataFormat: json_['dataFormat'] as core.String?, - dataset: json_['dataset'] as core.String?, - gcsSource: json_.containsKey('gcsSource') - ? GoogleCloudAiplatformV1GcsSource.fromJson( - json_['gcsSource'] as core.Map) - : null, - loggingSamplingStrategy: json_.containsKey('loggingSamplingStrategy') - ? GoogleCloudAiplatformV1SamplingStrategy.fromJson( - json_['loggingSamplingStrategy'] - as core.Map) - : null, - targetField: json_['targetField'] as core.String?, + metricPromptTemplate: json_['metricPromptTemplate'] as core.String?, ); core.Map toJson() => { - if (bigquerySource != null) 'bigquerySource': bigquerySource!, - if (dataFormat != null) 'dataFormat': dataFormat!, - if (dataset != null) 'dataset': dataset!, - if (gcsSource != null) 'gcsSource': gcsSource!, - if (loggingSamplingStrategy != null) - 'loggingSamplingStrategy': loggingSamplingStrategy!, - if (targetField != null) 'targetField': targetField!, + if (metricPromptTemplate != null) + 'metricPromptTemplate': metricPromptTemplate!, }; } -/// The config for Training & Prediction data skew detection. -/// -/// It specifies the training dataset sources and the skew detection parameters. -class GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingPredictionSkewDetectionConfig { - /// Key is the feature name and value is the threshold. - /// - /// The threshold here is against attribution score distance between the - /// training and prediction feature. - core.Map? - attributionScoreSkewThresholds; - - /// Skew anomaly detection threshold used by all features. +/// Input for pairwise question answering quality metric. +class GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityInput { + /// Pairwise question answering quality instance. /// - /// When the per-feature thresholds are not set, this field can be used to - /// specify a threshold for all features. - GoogleCloudAiplatformV1ThresholdConfig? defaultSkewThreshold; + /// Required. + GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityInstance? instance; - /// Key is the feature name and value is the threshold. + /// Spec for pairwise question answering quality score metric. /// - /// If a feature needs to be monitored for skew, a value threshold must be - /// configured for that feature. The threshold here is against feature - /// distribution distance between the training and prediction feature. - core.Map? skewThresholds; + /// Required. + GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualitySpec? metricSpec; - GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingPredictionSkewDetectionConfig({ - this.attributionScoreSkewThresholds, - this.defaultSkewThreshold, - this.skewThresholds, + GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityInput({ + this.instance, + this.metricSpec, }); - GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingPredictionSkewDetectionConfig.fromJson( + GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityInput.fromJson( core.Map json_) : this( - attributionScoreSkewThresholds: - (json_['attributionScoreSkewThresholds'] - as core.Map?) - ?.map( - (key, value) => core.MapEntry( - key, - GoogleCloudAiplatformV1ThresholdConfig.fromJson( - value as core.Map), - ), - ), - defaultSkewThreshold: json_.containsKey('defaultSkewThreshold') - ? GoogleCloudAiplatformV1ThresholdConfig.fromJson( - json_['defaultSkewThreshold'] + instance: json_.containsKey('instance') + ? GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityInstance + .fromJson( + json_['instance'] as core.Map) + : null, + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualitySpec + .fromJson(json_['metricSpec'] as core.Map) : null, - skewThresholds: - (json_['skewThresholds'] as core.Map?) - ?.map( - (key, value) => core.MapEntry( - key, - GoogleCloudAiplatformV1ThresholdConfig.fromJson( - value as core.Map), - ), - ), ); core.Map toJson() => { - if (attributionScoreSkewThresholds != null) - 'attributionScoreSkewThresholds': attributionScoreSkewThresholds!, - if (defaultSkewThreshold != null) - 'defaultSkewThreshold': defaultSkewThreshold!, - if (skewThresholds != null) 'skewThresholds': skewThresholds!, + if (instance != null) 'instance': instance!, + if (metricSpec != null) 'metricSpec': metricSpec!, }; } -/// Statistics and anomalies generated by Model Monitoring. -class GoogleCloudAiplatformV1ModelMonitoringStatsAnomalies { - /// Number of anomalies within all stats. - core.int? anomalyCount; +/// Spec for pairwise question answering quality instance. +class GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityInstance { + /// Output of the baseline model. + /// + /// Required. + core.String? baselinePrediction; - /// Deployed Model ID. - core.String? deployedModelId; + /// Text to answer the question. + /// + /// Required. + core.String? context; - /// A list of historical Stats and Anomalies generated for all Features. - core.List< - GoogleCloudAiplatformV1ModelMonitoringStatsAnomaliesFeatureHistoricStatsAnomalies>? - featureStats; + /// Question Answering prompt for LLM. + /// + /// Required. + core.String? instruction; - /// Model Monitoring Objective those stats and anomalies belonging to. - /// Possible string values are: - /// - "MODEL_DEPLOYMENT_MONITORING_OBJECTIVE_TYPE_UNSPECIFIED" : Default - /// value, should not be set. - /// - "RAW_FEATURE_SKEW" : Raw feature values' stats to detect skew between - /// Training-Prediction datasets. - /// - "RAW_FEATURE_DRIFT" : Raw feature values' stats to detect drift between - /// Serving-Prediction datasets. - /// - "FEATURE_ATTRIBUTION_SKEW" : Feature attribution scores to detect skew - /// between Training-Prediction datasets. - /// - "FEATURE_ATTRIBUTION_DRIFT" : Feature attribution scores to detect skew - /// between Prediction datasets collected within different time windows. - core.String? objective; + /// Output of the candidate model. + /// + /// Required. + core.String? prediction; - GoogleCloudAiplatformV1ModelMonitoringStatsAnomalies({ - this.anomalyCount, - this.deployedModelId, - this.featureStats, - this.objective, + /// Ground truth used to compare against the prediction. + /// + /// Optional. + core.String? reference; + + GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityInstance({ + this.baselinePrediction, + this.context, + this.instruction, + this.prediction, + this.reference, }); - GoogleCloudAiplatformV1ModelMonitoringStatsAnomalies.fromJson(core.Map json_) + GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityInstance.fromJson( + core.Map json_) : this( - anomalyCount: json_['anomalyCount'] as core.int?, - deployedModelId: json_['deployedModelId'] as core.String?, - featureStats: (json_['featureStats'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1ModelMonitoringStatsAnomaliesFeatureHistoricStatsAnomalies - .fromJson(value as core.Map)) - .toList(), - objective: json_['objective'] as core.String?, + baselinePrediction: json_['baselinePrediction'] as core.String?, + context: json_['context'] as core.String?, + instruction: json_['instruction'] as core.String?, + prediction: json_['prediction'] as core.String?, + reference: json_['reference'] as core.String?, ); core.Map toJson() => { - if (anomalyCount != null) 'anomalyCount': anomalyCount!, - if (deployedModelId != null) 'deployedModelId': deployedModelId!, - if (featureStats != null) 'featureStats': featureStats!, - if (objective != null) 'objective': objective!, + if (baselinePrediction != null) + 'baselinePrediction': baselinePrediction!, + if (context != null) 'context': context!, + if (instruction != null) 'instruction': instruction!, + if (prediction != null) 'prediction': prediction!, + if (reference != null) 'reference': reference!, }; } -/// Historical Stats (and Anomalies) for a specific Feature. -class GoogleCloudAiplatformV1ModelMonitoringStatsAnomaliesFeatureHistoricStatsAnomalies { - /// Display Name of the Feature. - core.String? featureDisplayName; - - /// A list of historical stats generated by different time window's Prediction - /// Dataset. - core.List? predictionStats; +/// Spec for pairwise question answering quality result. +class GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityResult { + /// Confidence for question answering quality score. + /// + /// Output only. + core.double? confidence; - /// Threshold for anomaly detection. - GoogleCloudAiplatformV1ThresholdConfig? threshold; + /// Explanation for question answering quality score. + /// + /// Output only. + core.String? explanation; - /// Stats calculated for the Training Dataset. - GoogleCloudAiplatformV1FeatureStatsAnomaly? trainingStats; + /// Pairwise question answering prediction choice. + /// + /// Output only. + /// Possible string values are: + /// - "PAIRWISE_CHOICE_UNSPECIFIED" : Unspecified prediction choice. + /// - "BASELINE" : Baseline prediction wins + /// - "CANDIDATE" : Candidate prediction wins + /// - "TIE" : Winner cannot be determined + core.String? pairwiseChoice; - GoogleCloudAiplatformV1ModelMonitoringStatsAnomaliesFeatureHistoricStatsAnomalies({ - this.featureDisplayName, - this.predictionStats, - this.threshold, - this.trainingStats, + GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityResult({ + this.confidence, + this.explanation, + this.pairwiseChoice, }); - GoogleCloudAiplatformV1ModelMonitoringStatsAnomaliesFeatureHistoricStatsAnomalies.fromJson( + GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityResult.fromJson( core.Map json_) : this( - featureDisplayName: json_['featureDisplayName'] as core.String?, - predictionStats: (json_['predictionStats'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1FeatureStatsAnomaly.fromJson( - value as core.Map)) - .toList(), - threshold: json_.containsKey('threshold') - ? GoogleCloudAiplatformV1ThresholdConfig.fromJson( - json_['threshold'] as core.Map) - : null, - trainingStats: json_.containsKey('trainingStats') - ? GoogleCloudAiplatformV1FeatureStatsAnomaly.fromJson( - json_['trainingStats'] as core.Map) - : null, + confidence: (json_['confidence'] as core.num?)?.toDouble(), + explanation: json_['explanation'] as core.String?, + pairwiseChoice: json_['pairwiseChoice'] as core.String?, ); core.Map toJson() => { - if (featureDisplayName != null) - 'featureDisplayName': featureDisplayName!, - if (predictionStats != null) 'predictionStats': predictionStats!, - if (threshold != null) 'threshold': threshold!, - if (trainingStats != null) 'trainingStats': trainingStats!, + if (confidence != null) 'confidence': confidence!, + if (explanation != null) 'explanation': explanation!, + if (pairwiseChoice != null) 'pairwiseChoice': pairwiseChoice!, }; } -/// Contains information about the original Model if this Model is a copy. -class GoogleCloudAiplatformV1ModelOriginalModelInfo { - /// The resource name of the Model this Model is a copy of, including the - /// revision. +/// Spec for pairwise question answering quality score metric. +typedef GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualitySpec + = $QuestionAnsweringQualitySpec; + +/// Input for pairwise summarization quality metric. +class GoogleCloudAiplatformV1PairwiseSummarizationQualityInput { + /// Pairwise summarization quality instance. /// - /// Format: - /// `projects/{project}/locations/{location}/models/{model_id}@{version_id}` + /// Required. + GoogleCloudAiplatformV1PairwiseSummarizationQualityInstance? instance; + + /// Spec for pairwise summarization quality score metric. /// - /// Output only. - core.String? model; + /// Required. + GoogleCloudAiplatformV1PairwiseSummarizationQualitySpec? metricSpec; - GoogleCloudAiplatformV1ModelOriginalModelInfo({ - this.model, + GoogleCloudAiplatformV1PairwiseSummarizationQualityInput({ + this.instance, + this.metricSpec, }); - GoogleCloudAiplatformV1ModelOriginalModelInfo.fromJson(core.Map json_) + GoogleCloudAiplatformV1PairwiseSummarizationQualityInput.fromJson( + core.Map json_) : this( - model: json_['model'] as core.String?, + instance: json_.containsKey('instance') + ? GoogleCloudAiplatformV1PairwiseSummarizationQualityInstance + .fromJson( + json_['instance'] as core.Map) + : null, + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1PairwiseSummarizationQualitySpec + .fromJson(json_['metricSpec'] + as core.Map) + : null, ); core.Map toJson() => { - if (model != null) 'model': model!, + if (instance != null) 'instance': instance!, + if (metricSpec != null) 'metricSpec': metricSpec!, }; } -/// Detail description of the source information of the model. -typedef GoogleCloudAiplatformV1ModelSourceInfo = $ModelSourceInfo; - -/// Request message for EndpointService.MutateDeployedModel. -class GoogleCloudAiplatformV1MutateDeployedModelRequest { - /// The DeployedModel to be mutated within the Endpoint. +/// Spec for pairwise summarization quality instance. +class GoogleCloudAiplatformV1PairwiseSummarizationQualityInstance { + /// Output of the baseline model. /// - /// Only the following fields can be mutated: * `min_replica_count` in either - /// DedicatedResources or AutomaticResources * `max_replica_count` in either - /// DedicatedResources or AutomaticResources * autoscaling_metric_specs * - /// `disable_container_logging` (v1 only) * `enable_container_logging` - /// (v1beta1 only) + /// Required. + core.String? baselinePrediction; + + /// Text to be summarized. /// /// Required. - GoogleCloudAiplatformV1DeployedModel? deployedModel; + core.String? context; - /// The update mask applies to the resource. + /// Summarization prompt for LLM. /// - /// See google.protobuf.FieldMask. + /// Required. + core.String? instruction; + + /// Output of the candidate model. /// /// Required. - core.String? updateMask; + core.String? prediction; - GoogleCloudAiplatformV1MutateDeployedModelRequest({ - this.deployedModel, - this.updateMask, + /// Ground truth used to compare against the prediction. + /// + /// Optional. + core.String? reference; + + GoogleCloudAiplatformV1PairwiseSummarizationQualityInstance({ + this.baselinePrediction, + this.context, + this.instruction, + this.prediction, + this.reference, }); - GoogleCloudAiplatformV1MutateDeployedModelRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1PairwiseSummarizationQualityInstance.fromJson( + core.Map json_) : this( - deployedModel: json_.containsKey('deployedModel') - ? GoogleCloudAiplatformV1DeployedModel.fromJson( - json_['deployedModel'] as core.Map) - : null, - updateMask: json_['updateMask'] as core.String?, + baselinePrediction: json_['baselinePrediction'] as core.String?, + context: json_['context'] as core.String?, + instruction: json_['instruction'] as core.String?, + prediction: json_['prediction'] as core.String?, + reference: json_['reference'] as core.String?, ); core.Map toJson() => { - if (deployedModel != null) 'deployedModel': deployedModel!, - if (updateMask != null) 'updateMask': updateMask!, + if (baselinePrediction != null) + 'baselinePrediction': baselinePrediction!, + if (context != null) 'context': context!, + if (instruction != null) 'instruction': instruction!, + if (prediction != null) 'prediction': prediction!, + if (reference != null) 'reference': reference!, }; } -/// Represents a Neural Architecture Search (NAS) job. -class GoogleCloudAiplatformV1NasJob { - /// Time when the NasJob was created. +/// Spec for pairwise summarization quality result. +class GoogleCloudAiplatformV1PairwiseSummarizationQualityResult { + /// Confidence for summarization quality score. /// /// Output only. - core.String? createTime; + core.double? confidence; - /// The display name of the NasJob. + /// Explanation for summarization quality score. /// - /// The name can be up to 128 characters long and can consist of any UTF-8 - /// characters. + /// Output only. + core.String? explanation; + + /// Pairwise summarization prediction choice. /// - /// Required. - core.String? displayName; + /// Output only. + /// Possible string values are: + /// - "PAIRWISE_CHOICE_UNSPECIFIED" : Unspecified prediction choice. + /// - "BASELINE" : Baseline prediction wins + /// - "CANDIDATE" : Candidate prediction wins + /// - "TIE" : Winner cannot be determined + core.String? pairwiseChoice; - /// Enable a separation of Custom model training and restricted image training - /// for tenant project. + GoogleCloudAiplatformV1PairwiseSummarizationQualityResult({ + this.confidence, + this.explanation, + this.pairwiseChoice, + }); + + GoogleCloudAiplatformV1PairwiseSummarizationQualityResult.fromJson( + core.Map json_) + : this( + confidence: (json_['confidence'] as core.num?)?.toDouble(), + explanation: json_['explanation'] as core.String?, + pairwiseChoice: json_['pairwiseChoice'] as core.String?, + ); + + core.Map toJson() => { + if (confidence != null) 'confidence': confidence!, + if (explanation != null) 'explanation': explanation!, + if (pairwiseChoice != null) 'pairwiseChoice': pairwiseChoice!, + }; +} + +/// Spec for pairwise summarization quality score metric. +class GoogleCloudAiplatformV1PairwiseSummarizationQualitySpec { + /// Whether to use instance.reference to compute pairwise summarization + /// quality. /// /// Optional. - @core.Deprecated( - 'Not supported. Member documentation may have more information.', - ) - core.bool? enableRestrictedImageTraining; + core.bool? useReference; - /// Customer-managed encryption key options for a NasJob. + /// Which version to use for evaluation. /// - /// If this is set, then all resources created by the NasJob will be encrypted - /// with the provided encryption key. - GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; + /// Optional. + core.int? version; - /// Time when the NasJob entered any of the following states: - /// `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`. - /// - /// Output only. - core.String? endTime; + GoogleCloudAiplatformV1PairwiseSummarizationQualitySpec({ + this.useReference, + this.version, + }); - /// Only populated when job's state is JOB_STATE_FAILED or - /// JOB_STATE_CANCELLED. - /// - /// Output only. - GoogleRpcStatus? error; + GoogleCloudAiplatformV1PairwiseSummarizationQualitySpec.fromJson( + core.Map json_) + : this( + useReference: json_['useReference'] as core.bool?, + version: json_['version'] as core.int?, + ); - /// The labels with user-defined metadata to organize NasJobs. - /// - /// Label keys and values can be no longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. See - /// https://goo.gl/xmQnxf for more information and examples of labels. - core.Map? labels; + core.Map toJson() => { + if (useReference != null) 'useReference': useReference!, + if (version != null) 'version': version!, + }; +} - /// Resource name of the NasJob. +/// A datatype containing media that is part of a multi-part `Content` message. +/// +/// A `Part` consists of data which has an associated datatype. A `Part` can +/// only contain one of the accepted types in `Part.data`. A `Part` must have a +/// fixed IANA MIME type identifying the type and subtype of the media if +/// `inline_data` or `file_data` field is filled with raw bytes. +class GoogleCloudAiplatformV1Part { + /// URI based data. /// - /// Output only. - core.String? name; + /// Optional. + GoogleCloudAiplatformV1FileData? fileData; - /// Output of the NasJob. + /// A predicted \[FunctionCall\] returned from the model that contains a + /// string representing the \[FunctionDeclaration.name\] with the parameters + /// and their values. /// - /// Output only. - GoogleCloudAiplatformV1NasJobOutput? nasJobOutput; + /// Optional. + GoogleCloudAiplatformV1FunctionCall? functionCall; - /// The specification of a NasJob. + /// The result output of a \[FunctionCall\] that contains a string + /// representing the \[FunctionDeclaration.name\] and a structured JSON object + /// containing any output from the function call. /// - /// Required. - GoogleCloudAiplatformV1NasJobSpec? nasJobSpec; - - /// Reserved for future use. + /// It is used as context to the model. /// - /// Output only. - core.bool? satisfiesPzi; + /// Optional. + GoogleCloudAiplatformV1FunctionResponse? functionResponse; - /// Reserved for future use. + /// Inlined bytes data. /// - /// Output only. - core.bool? satisfiesPzs; + /// Optional. + GoogleCloudAiplatformV1Blob? inlineData; - /// Time when the NasJob for the first time entered the `JOB_STATE_RUNNING` - /// state. + /// Text part (can be code). /// - /// Output only. - core.String? startTime; + /// Optional. + core.String? text; - /// The detailed state of the job. + /// Video metadata. /// - /// Output only. - /// Possible string values are: - /// - "JOB_STATE_UNSPECIFIED" : The job state is unspecified. - /// - "JOB_STATE_QUEUED" : The job has been just created or resumed and - /// processing has not yet begun. - /// - "JOB_STATE_PENDING" : The service is preparing to run the job. - /// - "JOB_STATE_RUNNING" : The job is in progress. - /// - "JOB_STATE_SUCCEEDED" : The job completed successfully. - /// - "JOB_STATE_FAILED" : The job failed. - /// - "JOB_STATE_CANCELLING" : The job is being cancelled. From this state the - /// job may only go to either `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED` or - /// `JOB_STATE_CANCELLED`. - /// - "JOB_STATE_CANCELLED" : The job has been cancelled. - /// - "JOB_STATE_PAUSED" : The job has been stopped, and can be resumed. - /// - "JOB_STATE_EXPIRED" : The job has expired. - /// - "JOB_STATE_UPDATING" : The job is being updated. Only jobs in the - /// `RUNNING` state can be updated. After updating, the job goes back to the - /// `RUNNING` state. - /// - "JOB_STATE_PARTIALLY_SUCCEEDED" : The job is partially succeeded, some - /// results may be missing due to errors. - core.String? state; - - /// Time when the NasJob was most recently updated. + /// The metadata should only be specified while the video data is presented in + /// inline_data or file_data. /// - /// Output only. - core.String? updateTime; + /// Optional. + GoogleCloudAiplatformV1VideoMetadata? videoMetadata; - GoogleCloudAiplatformV1NasJob({ - this.createTime, - this.displayName, - this.enableRestrictedImageTraining, - this.encryptionSpec, - this.endTime, - this.error, - this.labels, - this.name, - this.nasJobOutput, - this.nasJobSpec, - this.satisfiesPzi, - this.satisfiesPzs, - this.startTime, - this.state, - this.updateTime, + GoogleCloudAiplatformV1Part({ + this.fileData, + this.functionCall, + this.functionResponse, + this.inlineData, + this.text, + this.videoMetadata, }); - GoogleCloudAiplatformV1NasJob.fromJson(core.Map json_) + GoogleCloudAiplatformV1Part.fromJson(core.Map json_) : this( - createTime: json_['createTime'] as core.String?, - displayName: json_['displayName'] as core.String?, - enableRestrictedImageTraining: - json_['enableRestrictedImageTraining'] as core.bool?, - encryptionSpec: json_.containsKey('encryptionSpec') - ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( - json_['encryptionSpec'] - as core.Map) + fileData: json_.containsKey('fileData') + ? GoogleCloudAiplatformV1FileData.fromJson( + json_['fileData'] as core.Map) : null, - endTime: json_['endTime'] as core.String?, - error: json_.containsKey('error') - ? GoogleRpcStatus.fromJson( - json_['error'] as core.Map) + functionCall: json_.containsKey('functionCall') + ? GoogleCloudAiplatformV1FunctionCall.fromJson( + json_['functionCall'] as core.Map) : null, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - name: json_['name'] as core.String?, - nasJobOutput: json_.containsKey('nasJobOutput') - ? GoogleCloudAiplatformV1NasJobOutput.fromJson( - json_['nasJobOutput'] as core.Map) + functionResponse: json_.containsKey('functionResponse') + ? GoogleCloudAiplatformV1FunctionResponse.fromJson( + json_['functionResponse'] + as core.Map) : null, - nasJobSpec: json_.containsKey('nasJobSpec') - ? GoogleCloudAiplatformV1NasJobSpec.fromJson( - json_['nasJobSpec'] as core.Map) + inlineData: json_.containsKey('inlineData') + ? GoogleCloudAiplatformV1Blob.fromJson( + json_['inlineData'] as core.Map) + : null, + text: json_['text'] as core.String?, + videoMetadata: json_.containsKey('videoMetadata') + ? GoogleCloudAiplatformV1VideoMetadata.fromJson( + json_['videoMetadata'] as core.Map) : null, - satisfiesPzi: json_['satisfiesPzi'] as core.bool?, - satisfiesPzs: json_['satisfiesPzs'] as core.bool?, - startTime: json_['startTime'] as core.String?, - state: json_['state'] as core.String?, - updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (displayName != null) 'displayName': displayName!, - if (enableRestrictedImageTraining != null) - 'enableRestrictedImageTraining': enableRestrictedImageTraining!, - if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, - if (endTime != null) 'endTime': endTime!, - if (error != null) 'error': error!, - if (labels != null) 'labels': labels!, - if (name != null) 'name': name!, - if (nasJobOutput != null) 'nasJobOutput': nasJobOutput!, - if (nasJobSpec != null) 'nasJobSpec': nasJobSpec!, - if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, - if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, - if (startTime != null) 'startTime': startTime!, - if (state != null) 'state': state!, - if (updateTime != null) 'updateTime': updateTime!, + if (fileData != null) 'fileData': fileData!, + if (functionCall != null) 'functionCall': functionCall!, + if (functionResponse != null) 'functionResponse': functionResponse!, + if (inlineData != null) 'inlineData': inlineData!, + if (text != null) 'text': text!, + if (videoMetadata != null) 'videoMetadata': videoMetadata!, }; } -/// Represents a uCAIP NasJob output. -class GoogleCloudAiplatformV1NasJobOutput { - /// The output of this multi-trial Neural Architecture Search (NAS) job. +/// Request message for JobService.PauseModelDeploymentMonitoringJob. +typedef GoogleCloudAiplatformV1PauseModelDeploymentMonitoringJobRequest + = $Empty; + +/// Request message for ScheduleService.PauseSchedule. +typedef GoogleCloudAiplatformV1PauseScheduleRequest = $Empty; + +/// Represents the spec of persistent disk options. +class GoogleCloudAiplatformV1PersistentDiskSpec { + /// Size in GB of the disk (default is 100GB). + core.String? diskSizeGb; + + /// Type of the disk (default is "pd-standard"). /// - /// Output only. - GoogleCloudAiplatformV1NasJobOutputMultiTrialJobOutput? multiTrialJobOutput; + /// Valid values: "pd-ssd" (Persistent Disk Solid State Drive) "pd-standard" + /// (Persistent Disk Hard Disk Drive) "pd-balanced" (Balanced Persistent Disk) + /// "pd-extreme" (Extreme Persistent Disk) + core.String? diskType; - GoogleCloudAiplatformV1NasJobOutput({ - this.multiTrialJobOutput, + GoogleCloudAiplatformV1PersistentDiskSpec({ + this.diskSizeGb, + this.diskType, }); - GoogleCloudAiplatformV1NasJobOutput.fromJson(core.Map json_) + GoogleCloudAiplatformV1PersistentDiskSpec.fromJson(core.Map json_) : this( - multiTrialJobOutput: json_.containsKey('multiTrialJobOutput') - ? GoogleCloudAiplatformV1NasJobOutputMultiTrialJobOutput.fromJson( - json_['multiTrialJobOutput'] - as core.Map) - : null, + diskSizeGb: json_['diskSizeGb'] as core.String?, + diskType: json_['diskType'] as core.String?, ); core.Map toJson() => { - if (multiTrialJobOutput != null) - 'multiTrialJobOutput': multiTrialJobOutput!, + if (diskSizeGb != null) 'diskSizeGb': diskSizeGb!, + if (diskType != null) 'diskType': diskType!, }; } -/// The output of a multi-trial Neural Architecture Search (NAS) jobs. -class GoogleCloudAiplatformV1NasJobOutputMultiTrialJobOutput { - /// List of NasTrials that were started as part of search stage. +/// Represents long-lasting resources that are dedicated to users to runs custom +/// workloads. +/// +/// A PersistentResource can have multiple node pools and each node pool can +/// have its own machine spec. +class GoogleCloudAiplatformV1PersistentResource { + /// Time when the PersistentResource was created. /// /// Output only. - core.List? searchTrials; + core.String? createTime; - /// List of NasTrials that were started as part of train stage. + /// The display name of the PersistentResource. /// - /// Output only. - core.List? trainTrials; + /// The name can be up to 128 characters long and can consist of any UTF-8 + /// characters. + /// + /// Optional. + core.String? displayName; - GoogleCloudAiplatformV1NasJobOutputMultiTrialJobOutput({ - this.searchTrials, - this.trainTrials, - }); + /// Customer-managed encryption key spec for a PersistentResource. + /// + /// If set, this PersistentResource and all sub-resources of this + /// PersistentResource will be secured by this key. + /// + /// Optional. + GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; - GoogleCloudAiplatformV1NasJobOutputMultiTrialJobOutput.fromJson( - core.Map json_) - : this( - searchTrials: (json_['searchTrials'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1NasTrial.fromJson( - value as core.Map)) - .toList(), - trainTrials: (json_['trainTrials'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1NasTrial.fromJson( - value as core.Map)) - .toList(), - ); + /// Only populated when persistent resource's state is `STOPPING` or `ERROR`. + /// + /// Output only. + GoogleRpcStatus? error; - core.Map toJson() => { - if (searchTrials != null) 'searchTrials': searchTrials!, - if (trainTrials != null) 'trainTrials': trainTrials!, - }; -} + /// The labels with user-defined metadata to organize PersistentResource. + /// + /// Label keys and values can be no longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. See + /// https://goo.gl/xmQnxf for more information and examples of labels. + /// + /// Optional. + core.Map? labels; -/// Represents the spec of a NasJob. -class GoogleCloudAiplatformV1NasJobSpec { - /// The spec of multi-trial algorithms. - GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpec? - multiTrialAlgorithmSpec; + /// Resource name of a PersistentResource. + /// + /// Immutable. + core.String? name; - /// The ID of the existing NasJob in the same Project and Location which will - /// be used to resume search. + /// The full name of the Compute Engine + /// \[network\](/compute/docs/networks-and-firewalls#networks) to peered with + /// Vertex AI to host the persistent resources. /// - /// search_space_spec and nas_algorithm_spec are obtained from previous NasJob - /// hence should not provide them again for this NasJob. - core.String? resumeNasJobId; + /// For example, `projects/12345/global/networks/myVPC`. + /// \[Format\](/compute/docs/reference/rest/v1/networks/insert) is of the form + /// `projects/{project}/global/networks/{network}`. Where {project} is a + /// project number, as in `12345`, and {network} is a network name. To specify + /// this field, you must have already + /// [configured VPC Network Peering for Vertex AI](https://cloud.google.com/vertex-ai/docs/general/vpc-peering). + /// If this field is left unspecified, the resources aren't peered with any + /// network. + /// + /// Optional. + core.String? network; - /// It defines the search space for Neural Architecture Search (NAS). - core.String? searchSpaceSpec; + /// A list of names for the reserved IP ranges under the VPC network that can + /// be used for this persistent resource. + /// + /// If set, we will deploy the persistent resource within the provided IP + /// ranges. Otherwise, the persistent resource is deployed to any IP ranges + /// under the provided VPC network. Example: \['vertex-ai-ip-range'\]. + /// + /// Optional. + core.List? reservedIpRanges; - GoogleCloudAiplatformV1NasJobSpec({ - this.multiTrialAlgorithmSpec, - this.resumeNasJobId, - this.searchSpaceSpec, - }); + /// The spec of the pools of different resources. + /// + /// Required. + core.List? resourcePools; - GoogleCloudAiplatformV1NasJobSpec.fromJson(core.Map json_) - : this( - multiTrialAlgorithmSpec: json_.containsKey('multiTrialAlgorithmSpec') - ? GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpec - .fromJson(json_['multiTrialAlgorithmSpec'] - as core.Map) - : null, - resumeNasJobId: json_['resumeNasJobId'] as core.String?, - searchSpaceSpec: json_['searchSpaceSpec'] as core.String?, - ); + /// Runtime information of the Persistent Resource. + /// + /// Output only. + GoogleCloudAiplatformV1ResourceRuntime? resourceRuntime; + + /// Persistent Resource runtime spec. + /// + /// For example, used for Ray cluster configuration. + /// + /// Optional. + GoogleCloudAiplatformV1ResourceRuntimeSpec? resourceRuntimeSpec; - core.Map toJson() => { - if (multiTrialAlgorithmSpec != null) - 'multiTrialAlgorithmSpec': multiTrialAlgorithmSpec!, - if (resumeNasJobId != null) 'resumeNasJobId': resumeNasJobId!, - if (searchSpaceSpec != null) 'searchSpaceSpec': searchSpaceSpec!, - }; -} + /// Reserved for future use. + /// + /// Output only. + core.bool? satisfiesPzi; -/// The spec of multi-trial Neural Architecture Search (NAS). -class GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpec { - /// Metric specs for the NAS job. + /// Reserved for future use. /// - /// Validation for this field is done at `multi_trial_algorithm_spec` field. - GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecMetricSpec? metric; + /// Output only. + core.bool? satisfiesPzs; - /// The multi-trial Neural Architecture Search (NAS) algorithm type. + /// Time when the PersistentResource for the first time entered the `RUNNING` + /// state. /// - /// Defaults to `REINFORCEMENT_LEARNING`. - /// Possible string values are: - /// - "MULTI_TRIAL_ALGORITHM_UNSPECIFIED" : Defaults to - /// `REINFORCEMENT_LEARNING`. - /// - "REINFORCEMENT_LEARNING" : The Reinforcement Learning Algorithm for - /// Multi-trial Neural Architecture Search (NAS). - /// - "GRID_SEARCH" : The Grid Search Algorithm for Multi-trial Neural - /// Architecture Search (NAS). - core.String? multiTrialAlgorithm; + /// Output only. + core.String? startTime; - /// Spec for search trials. + /// The detailed state of a Study. /// - /// Required. - GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecSearchTrialSpec? - searchTrialSpec; + /// Output only. + /// Possible string values are: + /// - "STATE_UNSPECIFIED" : Not set. + /// - "PROVISIONING" : The PROVISIONING state indicates the persistent + /// resources is being created. + /// - "RUNNING" : The RUNNING state indicates the persistent resource is + /// healthy and fully usable. + /// - "STOPPING" : The STOPPING state indicates the persistent resource is + /// being deleted. + /// - "ERROR" : The ERROR state indicates the persistent resource may be + /// unusable. Details can be found in the `error` field. + /// - "REBOOTING" : The REBOOTING state indicates the persistent resource is + /// being rebooted (PR is not available right now but is expected to be ready + /// again later). + /// - "UPDATING" : The UPDATING state indicates the persistent resource is + /// being updated. + core.String? state; - /// Spec for train trials. + /// Time when the PersistentResource was most recently updated. /// - /// Top N \[TrainTrialSpec.max_parallel_trial_count\] search trials will be - /// trained for every M \[TrainTrialSpec.frequency\] trials searched. - GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecTrainTrialSpec? - trainTrialSpec; + /// Output only. + core.String? updateTime; - GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpec({ - this.metric, - this.multiTrialAlgorithm, - this.searchTrialSpec, - this.trainTrialSpec, + GoogleCloudAiplatformV1PersistentResource({ + this.createTime, + this.displayName, + this.encryptionSpec, + this.error, + this.labels, + this.name, + this.network, + this.reservedIpRanges, + this.resourcePools, + this.resourceRuntime, + this.resourceRuntimeSpec, + this.satisfiesPzi, + this.satisfiesPzs, + this.startTime, + this.state, + this.updateTime, }); - GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpec.fromJson( - core.Map json_) + GoogleCloudAiplatformV1PersistentResource.fromJson(core.Map json_) : this( - metric: json_.containsKey('metric') - ? GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecMetricSpec - .fromJson( - json_['metric'] as core.Map) + createTime: json_['createTime'] as core.String?, + displayName: json_['displayName'] as core.String?, + encryptionSpec: json_.containsKey('encryptionSpec') + ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( + json_['encryptionSpec'] + as core.Map) : null, - multiTrialAlgorithm: json_['multiTrialAlgorithm'] as core.String?, - searchTrialSpec: json_.containsKey('searchTrialSpec') - ? GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecSearchTrialSpec - .fromJson(json_['searchTrialSpec'] + error: json_.containsKey('error') + ? GoogleRpcStatus.fromJson( + json_['error'] as core.Map) + : null, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + name: json_['name'] as core.String?, + network: json_['network'] as core.String?, + reservedIpRanges: (json_['reservedIpRanges'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + resourcePools: (json_['resourcePools'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1ResourcePool.fromJson( + value as core.Map)) + .toList(), + resourceRuntime: json_.containsKey('resourceRuntime') + ? GoogleCloudAiplatformV1ResourceRuntime.fromJson( + json_['resourceRuntime'] as core.Map) : null, - trainTrialSpec: json_.containsKey('trainTrialSpec') - ? GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecTrainTrialSpec - .fromJson(json_['trainTrialSpec'] + resourceRuntimeSpec: json_.containsKey('resourceRuntimeSpec') + ? GoogleCloudAiplatformV1ResourceRuntimeSpec.fromJson( + json_['resourceRuntimeSpec'] as core.Map) : null, + satisfiesPzi: json_['satisfiesPzi'] as core.bool?, + satisfiesPzs: json_['satisfiesPzs'] as core.bool?, + startTime: json_['startTime'] as core.String?, + state: json_['state'] as core.String?, + updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (metric != null) 'metric': metric!, - if (multiTrialAlgorithm != null) - 'multiTrialAlgorithm': multiTrialAlgorithm!, - if (searchTrialSpec != null) 'searchTrialSpec': searchTrialSpec!, - if (trainTrialSpec != null) 'trainTrialSpec': trainTrialSpec!, + if (createTime != null) 'createTime': createTime!, + if (displayName != null) 'displayName': displayName!, + if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, + if (error != null) 'error': error!, + if (labels != null) 'labels': labels!, + if (name != null) 'name': name!, + if (network != null) 'network': network!, + if (reservedIpRanges != null) 'reservedIpRanges': reservedIpRanges!, + if (resourcePools != null) 'resourcePools': resourcePools!, + if (resourceRuntime != null) 'resourceRuntime': resourceRuntime!, + if (resourceRuntimeSpec != null) + 'resourceRuntimeSpec': resourceRuntimeSpec!, + if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, + if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, + if (startTime != null) 'startTime': startTime!, + if (state != null) 'state': state!, + if (updateTime != null) 'updateTime': updateTime!, }; } -/// Represents a metric to optimize. -class GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecMetricSpec { - /// The optimization goal of the metric. +/// An instance of a machine learning PipelineJob. +class GoogleCloudAiplatformV1PipelineJob { + /// Pipeline creation time. /// - /// Required. - /// Possible string values are: - /// - "GOAL_TYPE_UNSPECIFIED" : Goal Type will default to maximize. - /// - "MAXIMIZE" : Maximize the goal metric. - /// - "MINIMIZE" : Minimize the goal metric. - core.String? goal; + /// Output only. + core.String? createTime; - /// The ID of the metric. - /// - /// Must not contain whitespaces. + /// The display name of the Pipeline. /// - /// Required. - core.String? metricId; - - GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecMetricSpec({ - this.goal, - this.metricId, - }); - - GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecMetricSpec.fromJson( - core.Map json_) - : this( - goal: json_['goal'] as core.String?, - metricId: json_['metricId'] as core.String?, - ); - - core.Map toJson() => { - if (goal != null) 'goal': goal!, - if (metricId != null) 'metricId': metricId!, - }; -} + /// The name can be up to 128 characters long and can consist of any UTF-8 + /// characters. + core.String? displayName; -/// Represent spec for search trials. -class GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecSearchTrialSpec { - /// The number of failed trials that need to be seen before failing the - /// NasJob. + /// Customer-managed encryption key spec for a pipelineJob. /// - /// If set to 0, Vertex AI decides how many trials must fail before the whole - /// job fails. - core.int? maxFailedTrialCount; + /// If set, this PipelineJob and all of its sub-resources will be secured by + /// this key. + GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; - /// The maximum number of trials to run in parallel. + /// Pipeline end time. /// - /// Required. - core.int? maxParallelTrialCount; + /// Output only. + core.String? endTime; - /// The maximum number of Neural Architecture Search (NAS) trials to run. + /// The error that occurred during pipeline execution. /// - /// Required. - core.int? maxTrialCount; + /// Only populated when the pipeline's state is FAILED or CANCELLED. + /// + /// Output only. + GoogleRpcStatus? error; - /// The spec of a search trial job. + /// The details of pipeline run. /// - /// The same spec applies to all search trials. + /// Not available in the list view. /// - /// Required. - GoogleCloudAiplatformV1CustomJobSpec? searchTrialJobSpec; - - GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecSearchTrialSpec({ - this.maxFailedTrialCount, - this.maxParallelTrialCount, - this.maxTrialCount, - this.searchTrialJobSpec, - }); - - GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecSearchTrialSpec.fromJson( - core.Map json_) - : this( - maxFailedTrialCount: json_['maxFailedTrialCount'] as core.int?, - maxParallelTrialCount: json_['maxParallelTrialCount'] as core.int?, - maxTrialCount: json_['maxTrialCount'] as core.int?, - searchTrialJobSpec: json_.containsKey('searchTrialJobSpec') - ? GoogleCloudAiplatformV1CustomJobSpec.fromJson( - json_['searchTrialJobSpec'] - as core.Map) - : null, - ); + /// Output only. + GoogleCloudAiplatformV1PipelineJobDetail? jobDetail; - core.Map toJson() => { - if (maxFailedTrialCount != null) - 'maxFailedTrialCount': maxFailedTrialCount!, - if (maxParallelTrialCount != null) - 'maxParallelTrialCount': maxParallelTrialCount!, - if (maxTrialCount != null) 'maxTrialCount': maxTrialCount!, - if (searchTrialJobSpec != null) - 'searchTrialJobSpec': searchTrialJobSpec!, - }; -} + /// The labels with user-defined metadata to organize PipelineJob. + /// + /// Label keys and values can be no longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. See + /// https://goo.gl/xmQnxf for more information and examples of labels. Note + /// there is some reserved label key for Vertex AI Pipelines. - + /// `vertex-ai-pipelines-run-billing-id`, user set value will get overrided. + core.Map? labels; -/// Represent spec for train trials. -class GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecTrainTrialSpec { - /// Frequency of search trials to start train stage. + /// The resource name of the PipelineJob. /// - /// Top N \[TrainTrialSpec.max_parallel_trial_count\] search trials will be - /// trained for every M \[TrainTrialSpec.frequency\] trials searched. + /// Output only. + core.String? name; + + /// The full name of the Compute Engine + /// \[network\](/compute/docs/networks-and-firewalls#networks) to which the + /// Pipeline Job's workload should be peered. /// - /// Required. - core.int? frequency; + /// For example, `projects/12345/global/networks/myVPC`. + /// \[Format\](/compute/docs/reference/rest/v1/networks/insert) is of the form + /// `projects/{project}/global/networks/{network}`. Where {project} is a + /// project number, as in `12345`, and {network} is a network name. Private + /// services access must already be configured for the network. Pipeline job + /// will apply the network configuration to the Google Cloud resources being + /// launched, if applied, such as Vertex AI Training or Dataflow job. If left + /// unspecified, the workload is not peered with any network. + core.String? network; - /// The maximum number of trials to run in parallel. + /// The spec of the pipeline. /// - /// Required. - core.int? maxParallelTrialCount; + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Map? pipelineSpec; - /// The spec of a train trial job. + /// Whether to do component level validations before job creation. /// - /// The same spec applies to all train trials. + /// Optional. + core.bool? preflightValidations; + + /// A list of names for the reserved ip ranges under the VPC network that can + /// be used for this Pipeline Job's workload. /// - /// Required. - GoogleCloudAiplatformV1CustomJobSpec? trainTrialJobSpec; + /// If set, we will deploy the Pipeline Job's workload within the provided ip + /// ranges. Otherwise, the job will be deployed to any ip ranges under the + /// provided VPC network. Example: \['vertex-ai-ip-range'\]. + core.List? reservedIpRanges; - GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecTrainTrialSpec({ - this.frequency, - this.maxParallelTrialCount, - this.trainTrialJobSpec, - }); + /// Runtime config of the pipeline. + GoogleCloudAiplatformV1PipelineJobRuntimeConfig? runtimeConfig; - GoogleCloudAiplatformV1NasJobSpecMultiTrialAlgorithmSpecTrainTrialSpec.fromJson( - core.Map json_) - : this( - frequency: json_['frequency'] as core.int?, - maxParallelTrialCount: json_['maxParallelTrialCount'] as core.int?, - trainTrialJobSpec: json_.containsKey('trainTrialJobSpec') - ? GoogleCloudAiplatformV1CustomJobSpec.fromJson( - json_['trainTrialJobSpec'] - as core.Map) - : null, - ); + /// The schedule resource name. + /// + /// Only returned if the Pipeline is created by Schedule API. + /// + /// Output only. + core.String? scheduleName; - core.Map toJson() => { - if (frequency != null) 'frequency': frequency!, - if (maxParallelTrialCount != null) - 'maxParallelTrialCount': maxParallelTrialCount!, - if (trainTrialJobSpec != null) 'trainTrialJobSpec': trainTrialJobSpec!, - }; -} + /// The service account that the pipeline workload runs as. + /// + /// If not specified, the Compute Engine default service account in the + /// project will be used. See + /// https://cloud.google.com/compute/docs/access/service-accounts#default_service_account + /// Users starting the pipeline must have the `iam.serviceAccounts.actAs` + /// permission on this service account. + core.String? serviceAccount; -/// Represents a uCAIP NasJob trial. -class GoogleCloudAiplatformV1NasTrial { - /// Time when the NasTrial's status changed to `SUCCEEDED` or `INFEASIBLE`. + /// Pipeline start time. /// /// Output only. - core.String? endTime; + core.String? startTime; - /// The final measurement containing the objective value. + /// The detailed state of the job. /// /// Output only. - GoogleCloudAiplatformV1Measurement? finalMeasurement; + /// Possible string values are: + /// - "PIPELINE_STATE_UNSPECIFIED" : The pipeline state is unspecified. + /// - "PIPELINE_STATE_QUEUED" : The pipeline has been created or resumed, and + /// processing has not yet begun. + /// - "PIPELINE_STATE_PENDING" : The service is preparing to run the pipeline. + /// - "PIPELINE_STATE_RUNNING" : The pipeline is in progress. + /// - "PIPELINE_STATE_SUCCEEDED" : The pipeline completed successfully. + /// - "PIPELINE_STATE_FAILED" : The pipeline failed. + /// - "PIPELINE_STATE_CANCELLING" : The pipeline is being cancelled. From this + /// state, the pipeline may only go to either PIPELINE_STATE_SUCCEEDED, + /// PIPELINE_STATE_FAILED or PIPELINE_STATE_CANCELLED. + /// - "PIPELINE_STATE_CANCELLED" : The pipeline has been cancelled. + /// - "PIPELINE_STATE_PAUSED" : The pipeline has been stopped, and can be + /// resumed. + core.String? state; - /// The identifier of the NasTrial assigned by the service. + /// Pipeline template metadata. + /// + /// Will fill up fields if PipelineJob.template_uri is from supported template + /// registry. /// /// Output only. - core.String? id; + GoogleCloudAiplatformV1PipelineTemplateMetadata? templateMetadata; - /// Time when the NasTrial was started. + /// A template uri from where the PipelineJob.pipeline_spec, if empty, will be + /// downloaded. /// - /// Output only. - core.String? startTime; + /// Currently, only uri from Vertex Template Registry & Gallery is supported. + /// Reference to + /// https://cloud.google.com/vertex-ai/docs/pipelines/create-pipeline-template. + core.String? templateUri; - /// The detailed state of the NasTrial. + /// Timestamp when this PipelineJob was most recently updated. /// /// Output only. - /// Possible string values are: - /// - "STATE_UNSPECIFIED" : The NasTrial state is unspecified. - /// - "REQUESTED" : Indicates that a specific NasTrial has been requested, but - /// it has not yet been suggested by the service. - /// - "ACTIVE" : Indicates that the NasTrial has been suggested. - /// - "STOPPING" : Indicates that the NasTrial should stop according to the - /// service. - /// - "SUCCEEDED" : Indicates that the NasTrial is completed successfully. - /// - "INFEASIBLE" : Indicates that the NasTrial should not be attempted - /// again. The service will set a NasTrial to INFEASIBLE when it's done but - /// missing the final_measurement. - core.String? state; + core.String? updateTime; - GoogleCloudAiplatformV1NasTrial({ + GoogleCloudAiplatformV1PipelineJob({ + this.createTime, + this.displayName, + this.encryptionSpec, this.endTime, - this.finalMeasurement, - this.id, + this.error, + this.jobDetail, + this.labels, + this.name, + this.network, + this.pipelineSpec, + this.preflightValidations, + this.reservedIpRanges, + this.runtimeConfig, + this.scheduleName, + this.serviceAccount, this.startTime, this.state, + this.templateMetadata, + this.templateUri, + this.updateTime, }); - GoogleCloudAiplatformV1NasTrial.fromJson(core.Map json_) + GoogleCloudAiplatformV1PipelineJob.fromJson(core.Map json_) : this( - endTime: json_['endTime'] as core.String?, - finalMeasurement: json_.containsKey('finalMeasurement') - ? GoogleCloudAiplatformV1Measurement.fromJson( - json_['finalMeasurement'] + createTime: json_['createTime'] as core.String?, + displayName: json_['displayName'] as core.String?, + encryptionSpec: json_.containsKey('encryptionSpec') + ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( + json_['encryptionSpec'] as core.Map) : null, - id: json_['id'] as core.String?, + endTime: json_['endTime'] as core.String?, + error: json_.containsKey('error') + ? GoogleRpcStatus.fromJson( + json_['error'] as core.Map) + : null, + jobDetail: json_.containsKey('jobDetail') + ? GoogleCloudAiplatformV1PipelineJobDetail.fromJson( + json_['jobDetail'] as core.Map) + : null, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + name: json_['name'] as core.String?, + network: json_['network'] as core.String?, + pipelineSpec: json_.containsKey('pipelineSpec') + ? json_['pipelineSpec'] as core.Map + : null, + preflightValidations: json_['preflightValidations'] as core.bool?, + reservedIpRanges: (json_['reservedIpRanges'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + runtimeConfig: json_.containsKey('runtimeConfig') + ? GoogleCloudAiplatformV1PipelineJobRuntimeConfig.fromJson( + json_['runtimeConfig'] as core.Map) + : null, + scheduleName: json_['scheduleName'] as core.String?, + serviceAccount: json_['serviceAccount'] as core.String?, startTime: json_['startTime'] as core.String?, state: json_['state'] as core.String?, + templateMetadata: json_.containsKey('templateMetadata') + ? GoogleCloudAiplatformV1PipelineTemplateMetadata.fromJson( + json_['templateMetadata'] + as core.Map) + : null, + templateUri: json_['templateUri'] as core.String?, + updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { + if (createTime != null) 'createTime': createTime!, + if (displayName != null) 'displayName': displayName!, + if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, if (endTime != null) 'endTime': endTime!, - if (finalMeasurement != null) 'finalMeasurement': finalMeasurement!, - if (id != null) 'id': id!, + if (error != null) 'error': error!, + if (jobDetail != null) 'jobDetail': jobDetail!, + if (labels != null) 'labels': labels!, + if (name != null) 'name': name!, + if (network != null) 'network': network!, + if (pipelineSpec != null) 'pipelineSpec': pipelineSpec!, + if (preflightValidations != null) + 'preflightValidations': preflightValidations!, + if (reservedIpRanges != null) 'reservedIpRanges': reservedIpRanges!, + if (runtimeConfig != null) 'runtimeConfig': runtimeConfig!, + if (scheduleName != null) 'scheduleName': scheduleName!, + if (serviceAccount != null) 'serviceAccount': serviceAccount!, if (startTime != null) 'startTime': startTime!, if (state != null) 'state': state!, - }; -} - -/// Represents a NasTrial details along with its parameters. -/// -/// If there is a corresponding train NasTrial, the train NasTrial is also -/// returned. -class GoogleCloudAiplatformV1NasTrialDetail { - /// Resource name of the NasTrialDetail. + if (templateMetadata != null) 'templateMetadata': templateMetadata!, + if (templateUri != null) 'templateUri': templateUri!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} + +/// The runtime detail of PipelineJob. +class GoogleCloudAiplatformV1PipelineJobDetail { + /// The context of the pipeline. /// /// Output only. - core.String? name; - - /// The parameters for the NasJob NasTrial. - core.String? parameters; + GoogleCloudAiplatformV1Context? pipelineContext; - /// The requested search NasTrial. - GoogleCloudAiplatformV1NasTrial? searchTrial; + /// The context of the current pipeline run. + /// + /// Output only. + GoogleCloudAiplatformV1Context? pipelineRunContext; - /// The train NasTrial corresponding to search_trial. + /// The runtime details of the tasks under the pipeline. /// - /// Only populated if search_trial is used for training. - GoogleCloudAiplatformV1NasTrial? trainTrial; + /// Output only. + core.List? taskDetails; - GoogleCloudAiplatformV1NasTrialDetail({ - this.name, - this.parameters, - this.searchTrial, - this.trainTrial, + GoogleCloudAiplatformV1PipelineJobDetail({ + this.pipelineContext, + this.pipelineRunContext, + this.taskDetails, }); - GoogleCloudAiplatformV1NasTrialDetail.fromJson(core.Map json_) + GoogleCloudAiplatformV1PipelineJobDetail.fromJson(core.Map json_) : this( - name: json_['name'] as core.String?, - parameters: json_['parameters'] as core.String?, - searchTrial: json_.containsKey('searchTrial') - ? GoogleCloudAiplatformV1NasTrial.fromJson( - json_['searchTrial'] as core.Map) + pipelineContext: json_.containsKey('pipelineContext') + ? GoogleCloudAiplatformV1Context.fromJson(json_['pipelineContext'] + as core.Map) : null, - trainTrial: json_.containsKey('trainTrial') - ? GoogleCloudAiplatformV1NasTrial.fromJson( - json_['trainTrial'] as core.Map) + pipelineRunContext: json_.containsKey('pipelineRunContext') + ? GoogleCloudAiplatformV1Context.fromJson( + json_['pipelineRunContext'] + as core.Map) : null, + taskDetails: (json_['taskDetails'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1PipelineTaskDetail.fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (name != null) 'name': name!, - if (parameters != null) 'parameters': parameters!, - if (searchTrial != null) 'searchTrial': searchTrial!, - if (trainTrial != null) 'trainTrial': trainTrial!, + if (pipelineContext != null) 'pipelineContext': pipelineContext!, + if (pipelineRunContext != null) + 'pipelineRunContext': pipelineRunContext!, + if (taskDetails != null) 'taskDetails': taskDetails!, }; } -/// A query to find a number of similar entities. -class GoogleCloudAiplatformV1NearestNeighborQuery { - /// The embedding vector that be used for similar search. +/// The runtime config of a PipelineJob. +class GoogleCloudAiplatformV1PipelineJobRuntimeConfig { + /// Represents the failure policy of a pipeline. /// - /// Optional. - GoogleCloudAiplatformV1NearestNeighborQueryEmbedding? embedding; + /// Currently, the default of a pipeline is that the pipeline will continue to + /// run until no more tasks can be executed, also known as + /// PIPELINE_FAILURE_POLICY_FAIL_SLOW. However, if a pipeline is set to + /// PIPELINE_FAILURE_POLICY_FAIL_FAST, it will stop scheduling any new tasks + /// when a task has failed. Any scheduled tasks will continue to completion. + /// Possible string values are: + /// - "PIPELINE_FAILURE_POLICY_UNSPECIFIED" : Default value, and follows fail + /// slow behavior. + /// - "PIPELINE_FAILURE_POLICY_FAIL_SLOW" : Indicates that the pipeline should + /// continue to run until all possible tasks have been scheduled and + /// completed. + /// - "PIPELINE_FAILURE_POLICY_FAIL_FAST" : Indicates that the pipeline should + /// stop scheduling new tasks after a task has failed. + core.String? failurePolicy; - /// The entity id whose similar entities should be searched for. + /// A path in a Cloud Storage bucket, which will be treated as the root output + /// directory of the pipeline. /// - /// If embedding is set, search will use embedding instead of entity_id. + /// It is used by the system to generate the paths of output artifacts. The + /// artifact paths are generated with a sub-path pattern + /// `{job_id}/{task_id}/{output_key}` under the specified output directory. + /// The service account specified in this pipeline must have the + /// `storage.objects.get` and `storage.objects.create` permissions for this + /// bucket. /// - /// Optional. - core.String? entityId; + /// Required. + core.String? gcsOutputDirectory; - /// The number of similar entities to be retrieved from feature view for each - /// query. + /// The runtime artifacts of the PipelineJob. /// - /// Optional. - core.int? neighborCount; + /// The key will be the input artifact name and the value would be one of the + /// InputArtifact. + core.Map? + inputArtifacts; - /// The list of numeric filters. + /// The runtime parameters of the PipelineJob. /// - /// Optional. - core.List? - numericFilters; - - /// Parameters that can be set to tune query on the fly. + /// The parameters will be passed into PipelineJob.pipeline_spec to replace + /// the placeholders at runtime. This field is used by pipelines built using + /// `PipelineJob.pipeline_spec.schema_version` 2.1.0, such as pipelines built + /// using Kubeflow Pipelines SDK 1.9 or higher and the v2 DSL. /// - /// Optional. - GoogleCloudAiplatformV1NearestNeighborQueryParameters? parameters; + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Map? parameterValues; - /// Crowding is a constraint on a neighbor list produced by nearest neighbor - /// search requiring that no more than sper_crowding_attribute_neighbor_count - /// of the k neighbors returned have the same value of crowding_attribute. - /// - /// It's used for improving result diversity. + /// Use RuntimeConfig.parameter_values instead. /// - /// Optional. - core.int? perCrowdingAttributeNeighborCount; - - /// The list of string filters. + /// The runtime parameters of the PipelineJob. The parameters will be passed + /// into PipelineJob.pipeline_spec to replace the placeholders at runtime. + /// This field is used by pipelines built using + /// `PipelineJob.pipeline_spec.schema_version` 2.0.0 or lower, such as + /// pipelines built using Kubeflow Pipelines SDK 1.8 or lower. /// - /// Optional. - core.List? - stringFilters; + /// Deprecated. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) + core.Map? parameters; - GoogleCloudAiplatformV1NearestNeighborQuery({ - this.embedding, - this.entityId, - this.neighborCount, - this.numericFilters, + GoogleCloudAiplatformV1PipelineJobRuntimeConfig({ + this.failurePolicy, + this.gcsOutputDirectory, + this.inputArtifacts, + this.parameterValues, this.parameters, - this.perCrowdingAttributeNeighborCount, - this.stringFilters, }); - GoogleCloudAiplatformV1NearestNeighborQuery.fromJson(core.Map json_) + GoogleCloudAiplatformV1PipelineJobRuntimeConfig.fromJson(core.Map json_) : this( - embedding: json_.containsKey('embedding') - ? GoogleCloudAiplatformV1NearestNeighborQueryEmbedding.fromJson( - json_['embedding'] as core.Map) - : null, - entityId: json_['entityId'] as core.String?, - neighborCount: json_['neighborCount'] as core.int?, - numericFilters: (json_['numericFilters'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1NearestNeighborQueryNumericFilter - .fromJson(value as core.Map)) - .toList(), - parameters: json_.containsKey('parameters') - ? GoogleCloudAiplatformV1NearestNeighborQueryParameters.fromJson( - json_['parameters'] as core.Map) + failurePolicy: json_['failurePolicy'] as core.String?, + gcsOutputDirectory: json_['gcsOutputDirectory'] as core.String?, + inputArtifacts: + (json_['inputArtifacts'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + GoogleCloudAiplatformV1PipelineJobRuntimeConfigInputArtifact + .fromJson(value as core.Map), + ), + ), + parameterValues: json_.containsKey('parameterValues') + ? json_['parameterValues'] as core.Map : null, - perCrowdingAttributeNeighborCount: - json_['perCrowdingAttributeNeighborCount'] as core.int?, - stringFilters: (json_['stringFilters'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1NearestNeighborQueryStringFilter - .fromJson(value as core.Map)) - .toList(), + parameters: + (json_['parameters'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + GoogleCloudAiplatformV1Value.fromJson( + value as core.Map), + ), + ), ); core.Map toJson() => { - if (embedding != null) 'embedding': embedding!, - if (entityId != null) 'entityId': entityId!, - if (neighborCount != null) 'neighborCount': neighborCount!, - if (numericFilters != null) 'numericFilters': numericFilters!, + if (failurePolicy != null) 'failurePolicy': failurePolicy!, + if (gcsOutputDirectory != null) + 'gcsOutputDirectory': gcsOutputDirectory!, + if (inputArtifacts != null) 'inputArtifacts': inputArtifacts!, + if (parameterValues != null) 'parameterValues': parameterValues!, if (parameters != null) 'parameters': parameters!, - if (perCrowdingAttributeNeighborCount != null) - 'perCrowdingAttributeNeighborCount': - perCrowdingAttributeNeighborCount!, - if (stringFilters != null) 'stringFilters': stringFilters!, }; } -/// The embedding vector. -class GoogleCloudAiplatformV1NearestNeighborQueryEmbedding { - /// Individual value in the embedding. +/// The type of an input artifact. +class GoogleCloudAiplatformV1PipelineJobRuntimeConfigInputArtifact { + /// Artifact resource id from MLMD. /// - /// Optional. - core.List? value; + /// Which is the last portion of an artifact resource name: + /// `projects/{project}/locations/{location}/metadataStores/default/artifacts/{artifact_id}`. + /// The artifact must stay within the same project, location and default + /// metadatastore as the pipeline. + core.String? artifactId; - GoogleCloudAiplatformV1NearestNeighborQueryEmbedding({ - this.value, + GoogleCloudAiplatformV1PipelineJobRuntimeConfigInputArtifact({ + this.artifactId, }); - GoogleCloudAiplatformV1NearestNeighborQueryEmbedding.fromJson(core.Map json_) + GoogleCloudAiplatformV1PipelineJobRuntimeConfigInputArtifact.fromJson( + core.Map json_) : this( - value: (json_['value'] as core.List?) - ?.map((value) => (value as core.num).toDouble()) - .toList(), + artifactId: json_['artifactId'] as core.String?, ); core.Map toJson() => { - if (value != null) 'value': value!, + if (artifactId != null) 'artifactId': artifactId!, }; } -/// Numeric filter is used to search a subset of the entities by using boolean -/// rules on numeric columns. -/// -/// For example: Database Point 0: {name: "a" value_int: 42} {name: "b" -/// value_float: 1.0} Database Point 1: {name: "a" value_int: 10} {name: "b" -/// value_float: 2.0} Database Point 2: {name: "a" value_int: -1} {name: "b" -/// value_float: 3.0} Query: {name: "a" value_int: 12 operator: LESS} // Matches -/// Point 1, 2 {name: "b" value_float: 2.0 operator: EQUAL} // Matches Point 1 -class GoogleCloudAiplatformV1NearestNeighborQueryNumericFilter { - /// Column name in BigQuery that used as filters. +/// The runtime detail of a task execution. +class GoogleCloudAiplatformV1PipelineTaskDetail { + /// Task create time. /// - /// Required. - core.String? name; + /// Output only. + core.String? createTime; - /// This MUST be specified for queries and must NOT be specified for database - /// points. + /// Task end time. /// - /// Optional. - /// Possible string values are: - /// - "OPERATOR_UNSPECIFIED" : Unspecified operator. - /// - "LESS" : Entities are eligible if their value is \< the query's. - /// - "LESS_EQUAL" : Entities are eligible if their value is \<= the query's. - /// - "EQUAL" : Entities are eligible if their value is == the query's. - /// - "GREATER_EQUAL" : Entities are eligible if their value is \>= the - /// query's. - /// - "GREATER" : Entities are eligible if their value is \> the query's. - /// - "NOT_EQUAL" : Entities are eligible if their value is != the query's. - core.String? op; - - /// double value type. - core.double? valueDouble; - - /// float value type. - core.double? valueFloat; + /// Output only. + core.String? endTime; - /// int value type. - core.String? valueInt; + /// The error that occurred during task execution. + /// + /// Only populated when the task's state is FAILED or CANCELLED. + /// + /// Output only. + GoogleRpcStatus? error; - GoogleCloudAiplatformV1NearestNeighborQueryNumericFilter({ - this.name, - this.op, - this.valueDouble, - this.valueFloat, - this.valueInt, - }); + /// The execution metadata of the task. + /// + /// Output only. + GoogleCloudAiplatformV1Execution? execution; - GoogleCloudAiplatformV1NearestNeighborQueryNumericFilter.fromJson( - core.Map json_) - : this( - name: json_['name'] as core.String?, - op: json_['op'] as core.String?, - valueDouble: (json_['valueDouble'] as core.num?)?.toDouble(), - valueFloat: (json_['valueFloat'] as core.num?)?.toDouble(), - valueInt: json_['valueInt'] as core.String?, - ); + /// The detailed execution info. + /// + /// Output only. + GoogleCloudAiplatformV1PipelineTaskExecutorDetail? executorDetail; - core.Map toJson() => { - if (name != null) 'name': name!, - if (op != null) 'op': op!, - if (valueDouble != null) 'valueDouble': valueDouble!, - if (valueFloat != null) 'valueFloat': valueFloat!, - if (valueInt != null) 'valueInt': valueInt!, - }; -} + /// The runtime input artifacts of the task. + /// + /// Output only. + core.Map? + inputs; -/// Parameters that can be overrided in each query to tune query latency and -/// recall. -class GoogleCloudAiplatformV1NearestNeighborQueryParameters { - /// The number of neighbors to find via approximate search before exact - /// reordering is performed; if set, this value must be \> neighbor_count. + /// The runtime output artifacts of the task. /// - /// Optional. - core.int? approximateNeighborCandidates; + /// Output only. + core.Map? + outputs; - /// The fraction of the number of leaves to search, set at query time allows - /// user to tune search performance. + /// The id of the parent task if the task is within a component scope. /// - /// This value increase result in both search accuracy and latency increase. - /// The value should be between 0.0 and 1.0. + /// Empty if the task is at the root level. /// - /// Optional. - core.double? leafNodesSearchFraction; - - GoogleCloudAiplatformV1NearestNeighborQueryParameters({ - this.approximateNeighborCandidates, - this.leafNodesSearchFraction, - }); - - GoogleCloudAiplatformV1NearestNeighborQueryParameters.fromJson(core.Map json_) - : this( - approximateNeighborCandidates: - json_['approximateNeighborCandidates'] as core.int?, - leafNodesSearchFraction: - (json_['leafNodesSearchFraction'] as core.num?)?.toDouble(), - ); - - core.Map toJson() => { - if (approximateNeighborCandidates != null) - 'approximateNeighborCandidates': approximateNeighborCandidates!, - if (leafNodesSearchFraction != null) - 'leafNodesSearchFraction': leafNodesSearchFraction!, - }; -} + /// Output only. + core.String? parentTaskId; -/// String filter is used to search a subset of the entities by using boolean -/// rules on string columns. -/// -/// For example: if a query specifies string filter with 'name = color, -/// allow_tokens = {red, blue}, deny_tokens = {purple}',' then that query will -/// match entities that are red or blue, but if those points are also purple, -/// then they will be excluded even if they are red/blue. Only string filter is -/// supported for now, numeric filter will be supported in the near future. -class GoogleCloudAiplatformV1NearestNeighborQueryStringFilter { - /// The allowed tokens. + /// A list of task status. /// - /// Optional. - core.List? allowTokens; - - /// The denied tokens. + /// This field keeps a record of task status evolving over time. /// - /// Optional. - core.List? denyTokens; + /// Output only. + core.List? + pipelineTaskStatus; - /// Column names in BigQuery that used as filters. + /// Task start time. /// - /// Required. - core.String? name; - - GoogleCloudAiplatformV1NearestNeighborQueryStringFilter({ - this.allowTokens, - this.denyTokens, - this.name, - }); + /// Output only. + core.String? startTime; - GoogleCloudAiplatformV1NearestNeighborQueryStringFilter.fromJson( - core.Map json_) - : this( - allowTokens: (json_['allowTokens'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - denyTokens: (json_['denyTokens'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - name: json_['name'] as core.String?, - ); + /// State of the task. + /// + /// Output only. + /// Possible string values are: + /// - "STATE_UNSPECIFIED" : Unspecified. + /// - "PENDING" : Specifies pending state for the task. + /// - "RUNNING" : Specifies task is being executed. + /// - "SUCCEEDED" : Specifies task completed successfully. + /// - "CANCEL_PENDING" : Specifies Task cancel is in pending state. + /// - "CANCELLING" : Specifies task is being cancelled. + /// - "CANCELLED" : Specifies task was cancelled. + /// - "FAILED" : Specifies task failed. + /// - "SKIPPED" : Specifies task was skipped due to cache hit. + /// - "NOT_TRIGGERED" : Specifies that the task was not triggered because the + /// task's trigger policy is not satisfied. The trigger policy is specified in + /// the `condition` field of PipelineJob.pipeline_spec. + core.String? state; - core.Map toJson() => { - if (allowTokens != null) 'allowTokens': allowTokens!, - if (denyTokens != null) 'denyTokens': denyTokens!, - if (name != null) 'name': name!, - }; -} + /// The system generated ID of the task. + /// + /// Output only. + core.String? taskId; -/// Nearest neighbors for one query. -class GoogleCloudAiplatformV1NearestNeighbors { - /// All its neighbors. - core.List? neighbors; + /// The user specified name of the task that is defined in pipeline_spec. + /// + /// Output only. + core.String? taskName; - GoogleCloudAiplatformV1NearestNeighbors({ - this.neighbors, + GoogleCloudAiplatformV1PipelineTaskDetail({ + this.createTime, + this.endTime, + this.error, + this.execution, + this.executorDetail, + this.inputs, + this.outputs, + this.parentTaskId, + this.pipelineTaskStatus, + this.startTime, + this.state, + this.taskId, + this.taskName, }); - GoogleCloudAiplatformV1NearestNeighbors.fromJson(core.Map json_) + GoogleCloudAiplatformV1PipelineTaskDetail.fromJson(core.Map json_) : this( - neighbors: (json_['neighbors'] as core.List?) + createTime: json_['createTime'] as core.String?, + endTime: json_['endTime'] as core.String?, + error: json_.containsKey('error') + ? GoogleRpcStatus.fromJson( + json_['error'] as core.Map) + : null, + execution: json_.containsKey('execution') + ? GoogleCloudAiplatformV1Execution.fromJson( + json_['execution'] as core.Map) + : null, + executorDetail: json_.containsKey('executorDetail') + ? GoogleCloudAiplatformV1PipelineTaskExecutorDetail.fromJson( + json_['executorDetail'] + as core.Map) + : null, + inputs: + (json_['inputs'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + GoogleCloudAiplatformV1PipelineTaskDetailArtifactList.fromJson( + value as core.Map), + ), + ), + outputs: + (json_['outputs'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + GoogleCloudAiplatformV1PipelineTaskDetailArtifactList.fromJson( + value as core.Map), + ), + ), + parentTaskId: json_['parentTaskId'] as core.String?, + pipelineTaskStatus: (json_['pipelineTaskStatus'] as core.List?) ?.map((value) => - GoogleCloudAiplatformV1NearestNeighborsNeighbor.fromJson( - value as core.Map)) + GoogleCloudAiplatformV1PipelineTaskDetailPipelineTaskStatus + .fromJson(value as core.Map)) .toList(), + startTime: json_['startTime'] as core.String?, + state: json_['state'] as core.String?, + taskId: json_['taskId'] as core.String?, + taskName: json_['taskName'] as core.String?, ); core.Map toJson() => { - if (neighbors != null) 'neighbors': neighbors!, + if (createTime != null) 'createTime': createTime!, + if (endTime != null) 'endTime': endTime!, + if (error != null) 'error': error!, + if (execution != null) 'execution': execution!, + if (executorDetail != null) 'executorDetail': executorDetail!, + if (inputs != null) 'inputs': inputs!, + if (outputs != null) 'outputs': outputs!, + if (parentTaskId != null) 'parentTaskId': parentTaskId!, + if (pipelineTaskStatus != null) + 'pipelineTaskStatus': pipelineTaskStatus!, + if (startTime != null) 'startTime': startTime!, + if (state != null) 'state': state!, + if (taskId != null) 'taskId': taskId!, + if (taskName != null) 'taskName': taskName!, }; } -/// A neighbor of the query vector. -class GoogleCloudAiplatformV1NearestNeighborsNeighbor { - /// The distance between the neighbor and the query vector. - core.double? distance; - - /// The id of the similar entity. - core.String? entityId; - - /// The attributes of the neighbor, e.g. filters, crowding and metadata Note - /// that full entities are returned only when "return_full_entity" is set to - /// true. +/// A list of artifact metadata. +class GoogleCloudAiplatformV1PipelineTaskDetailArtifactList { + /// A list of artifact metadata. /// - /// Otherwise, only the "entity_id" and "distance" fields are populated. - GoogleCloudAiplatformV1FetchFeatureValuesResponse? entityKeyValues; + /// Output only. + core.List? artifacts; - GoogleCloudAiplatformV1NearestNeighborsNeighbor({ - this.distance, - this.entityId, - this.entityKeyValues, + GoogleCloudAiplatformV1PipelineTaskDetailArtifactList({ + this.artifacts, }); - GoogleCloudAiplatformV1NearestNeighborsNeighbor.fromJson(core.Map json_) + GoogleCloudAiplatformV1PipelineTaskDetailArtifactList.fromJson(core.Map json_) : this( - distance: (json_['distance'] as core.num?)?.toDouble(), - entityId: json_['entityId'] as core.String?, - entityKeyValues: json_.containsKey('entityKeyValues') - ? GoogleCloudAiplatformV1FetchFeatureValuesResponse.fromJson( - json_['entityKeyValues'] - as core.Map) - : null, + artifacts: (json_['artifacts'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Artifact.fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (distance != null) 'distance': distance!, - if (entityId != null) 'entityId': entityId!, - if (entityKeyValues != null) 'entityKeyValues': entityKeyValues!, + if (artifacts != null) 'artifacts': artifacts!, }; } -/// Neighbors for example-based explanations. -class GoogleCloudAiplatformV1Neighbor { - /// The neighbor distance. +/// A single record of the task status. +class GoogleCloudAiplatformV1PipelineTaskDetailPipelineTaskStatus { + /// The error that occurred during the state. + /// + /// May be set when the state is any of the non-final state + /// (PENDING/RUNNING/CANCELLING) or FAILED state. If the state is FAILED, the + /// error here is final and not going to be retried. If the state is a + /// non-final state, the error indicates a system-error being retried. /// /// Output only. - core.double? neighborDistance; + GoogleRpcStatus? error; - /// The neighbor id. + /// The state of the task. /// /// Output only. - core.String? neighborId; + /// Possible string values are: + /// - "STATE_UNSPECIFIED" : Unspecified. + /// - "PENDING" : Specifies pending state for the task. + /// - "RUNNING" : Specifies task is being executed. + /// - "SUCCEEDED" : Specifies task completed successfully. + /// - "CANCEL_PENDING" : Specifies Task cancel is in pending state. + /// - "CANCELLING" : Specifies task is being cancelled. + /// - "CANCELLED" : Specifies task was cancelled. + /// - "FAILED" : Specifies task failed. + /// - "SKIPPED" : Specifies task was skipped due to cache hit. + /// - "NOT_TRIGGERED" : Specifies that the task was not triggered because the + /// task's trigger policy is not satisfied. The trigger policy is specified in + /// the `condition` field of PipelineJob.pipeline_spec. + core.String? state; - GoogleCloudAiplatformV1Neighbor({ - this.neighborDistance, - this.neighborId, + /// Update time of this status. + /// + /// Output only. + core.String? updateTime; + + GoogleCloudAiplatformV1PipelineTaskDetailPipelineTaskStatus({ + this.error, + this.state, + this.updateTime, }); - GoogleCloudAiplatformV1Neighbor.fromJson(core.Map json_) + GoogleCloudAiplatformV1PipelineTaskDetailPipelineTaskStatus.fromJson( + core.Map json_) : this( - neighborDistance: - (json_['neighborDistance'] as core.num?)?.toDouble(), - neighborId: json_['neighborId'] as core.String?, + error: json_.containsKey('error') + ? GoogleRpcStatus.fromJson( + json_['error'] as core.Map) + : null, + state: json_['state'] as core.String?, + updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (neighborDistance != null) 'neighborDistance': neighborDistance!, - if (neighborId != null) 'neighborId': neighborId!, + if (error != null) 'error': error!, + if (state != null) 'state': state!, + if (updateTime != null) 'updateTime': updateTime!, }; } -/// Network spec. -class GoogleCloudAiplatformV1NetworkSpec { - /// Whether to enable public internet access. +/// The runtime detail of a pipeline executor. +class GoogleCloudAiplatformV1PipelineTaskExecutorDetail { + /// The detailed info for a container executor. /// - /// Default false. - core.bool? enableInternetAccess; - - /// The full name of the Google Compute Engine - /// [network](https://cloud.google.com//compute/docs/networks-and-firewalls#networks) - core.String? network; + /// Output only. + GoogleCloudAiplatformV1PipelineTaskExecutorDetailContainerDetail? + containerDetail; - /// The name of the subnet that this instance is in. + /// The detailed info for a custom job executor. /// - /// Format: - /// `projects/{project_id_or_number}/regions/{region}/subnetworks/{subnetwork_id}` - core.String? subnetwork; + /// Output only. + GoogleCloudAiplatformV1PipelineTaskExecutorDetailCustomJobDetail? + customJobDetail; - GoogleCloudAiplatformV1NetworkSpec({ - this.enableInternetAccess, - this.network, - this.subnetwork, + GoogleCloudAiplatformV1PipelineTaskExecutorDetail({ + this.containerDetail, + this.customJobDetail, }); - GoogleCloudAiplatformV1NetworkSpec.fromJson(core.Map json_) + GoogleCloudAiplatformV1PipelineTaskExecutorDetail.fromJson(core.Map json_) : this( - enableInternetAccess: json_['enableInternetAccess'] as core.bool?, - network: json_['network'] as core.String?, - subnetwork: json_['subnetwork'] as core.String?, + containerDetail: json_.containsKey('containerDetail') + ? GoogleCloudAiplatformV1PipelineTaskExecutorDetailContainerDetail + .fromJson(json_['containerDetail'] + as core.Map) + : null, + customJobDetail: json_.containsKey('customJobDetail') + ? GoogleCloudAiplatformV1PipelineTaskExecutorDetailCustomJobDetail + .fromJson(json_['customJobDetail'] + as core.Map) + : null, ); core.Map toJson() => { - if (enableInternetAccess != null) - 'enableInternetAccess': enableInternetAccess!, - if (network != null) 'network': network!, - if (subnetwork != null) 'subnetwork': subnetwork!, + if (containerDetail != null) 'containerDetail': containerDetail!, + if (customJobDetail != null) 'customJobDetail': customJobDetail!, }; } -/// Represents a mount configuration for Network File System (NFS) to mount. -class GoogleCloudAiplatformV1NfsMount { - /// Destination mount path. +/// The detail of a container execution. +/// +/// It contains the job names of the lifecycle of a container execution. +class GoogleCloudAiplatformV1PipelineTaskExecutorDetailContainerDetail { + /// The names of the previously failed CustomJob for the main container + /// executions. /// - /// The NFS will be mounted for the user under /mnt/nfs/ + /// The list includes the all attempts in chronological order. /// - /// Required. - core.String? mountPoint; + /// Output only. + core.List? failedMainJobs; - /// Source path exported from NFS server. + /// The names of the previously failed CustomJob for the pre-caching-check + /// container executions. /// - /// Has to start with '/', and combined with the ip address, it indicates the - /// source mount path in the form of `server:path` + /// This job will be available if the PipelineJob.pipeline_spec specifies the + /// `pre_caching_check` hook in the lifecycle events. The list includes the + /// all attempts in chronological order. /// - /// Required. - core.String? path; + /// Output only. + core.List? failedPreCachingCheckJobs; - /// IP address of the NFS server. + /// The name of the CustomJob for the main container execution. /// - /// Required. - core.String? server; + /// Output only. + core.String? mainJob; - GoogleCloudAiplatformV1NfsMount({ - this.mountPoint, - this.path, - this.server, + /// The name of the CustomJob for the pre-caching-check container execution. + /// + /// This job will be available if the PipelineJob.pipeline_spec specifies the + /// `pre_caching_check` hook in the lifecycle events. + /// + /// Output only. + core.String? preCachingCheckJob; + + GoogleCloudAiplatformV1PipelineTaskExecutorDetailContainerDetail({ + this.failedMainJobs, + this.failedPreCachingCheckJobs, + this.mainJob, + this.preCachingCheckJob, }); - GoogleCloudAiplatformV1NfsMount.fromJson(core.Map json_) + GoogleCloudAiplatformV1PipelineTaskExecutorDetailContainerDetail.fromJson( + core.Map json_) : this( - mountPoint: json_['mountPoint'] as core.String?, - path: json_['path'] as core.String?, - server: json_['server'] as core.String?, + failedMainJobs: (json_['failedMainJobs'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + failedPreCachingCheckJobs: + (json_['failedPreCachingCheckJobs'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + mainJob: json_['mainJob'] as core.String?, + preCachingCheckJob: json_['preCachingCheckJob'] as core.String?, ); core.Map toJson() => { - if (mountPoint != null) 'mountPoint': mountPoint!, - if (path != null) 'path': path!, - if (server != null) 'server': server!, + if (failedMainJobs != null) 'failedMainJobs': failedMainJobs!, + if (failedPreCachingCheckJobs != null) + 'failedPreCachingCheckJobs': failedPreCachingCheckJobs!, + if (mainJob != null) 'mainJob': mainJob!, + if (preCachingCheckJob != null) + 'preCachingCheckJob': preCachingCheckJob!, }; } -/// The euc configuration of NotebookRuntimeTemplate. -class GoogleCloudAiplatformV1NotebookEucConfig { - /// Whether ActAs check is bypassed for service account attached to the VM. +/// The detailed info for a custom job executor. +class GoogleCloudAiplatformV1PipelineTaskExecutorDetailCustomJobDetail { + /// The names of the previously failed CustomJob. /// - /// If false, we need ActAs check for the default Compute Engine Service - /// account. When a Runtime is created, a VM is allocated using Default - /// Compute Engine Service Account. Any user requesting to use this Runtime - /// requires Service Account User (ActAs) permission over this SA. If true, - /// Runtime owner is using EUC and does not require the above permission as VM - /// no longer use default Compute Engine SA, but a P4SA. + /// The list includes the all attempts in chronological order. /// /// Output only. - core.bool? bypassActasCheck; + core.List? failedJobs; - /// Input only. + /// The name of the CustomJob. /// - /// Whether EUC is disabled in this NotebookRuntimeTemplate. In proto3, the - /// default value of a boolean is false. In this way, by default EUC will be - /// enabled for NotebookRuntimeTemplate. - core.bool? eucDisabled; + /// Output only. + core.String? job; - GoogleCloudAiplatformV1NotebookEucConfig({ - this.bypassActasCheck, - this.eucDisabled, + GoogleCloudAiplatformV1PipelineTaskExecutorDetailCustomJobDetail({ + this.failedJobs, + this.job, }); - GoogleCloudAiplatformV1NotebookEucConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1PipelineTaskExecutorDetailCustomJobDetail.fromJson( + core.Map json_) : this( - bypassActasCheck: json_['bypassActasCheck'] as core.bool?, - eucDisabled: json_['eucDisabled'] as core.bool?, + failedJobs: (json_['failedJobs'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + job: json_['job'] as core.String?, ); core.Map toJson() => { - if (bypassActasCheck != null) 'bypassActasCheck': bypassActasCheck!, - if (eucDisabled != null) 'eucDisabled': eucDisabled!, + if (failedJobs != null) 'failedJobs': failedJobs!, + if (job != null) 'job': job!, }; } -/// NotebookExecutionJob represents an instance of a notebook execution. -class GoogleCloudAiplatformV1NotebookExecutionJob { - /// Timestamp when this NotebookExecutionJob was created. - /// - /// Output only. - core.String? createTime; - - /// The Dataform Repository pointing to a single file notebook repository. - GoogleCloudAiplatformV1NotebookExecutionJobDataformRepositorySource? - dataformRepositorySource; - - /// The contents of an input notebook file. - GoogleCloudAiplatformV1NotebookExecutionJobDirectNotebookSource? - directNotebookSource; - - /// The display name of the NotebookExecutionJob. - /// - /// The name can be up to 128 characters long and can consist of any UTF-8 - /// characters. - core.String? displayName; - - /// Customer-managed encryption key spec for the notebook execution job. - /// - /// This field is auto-populated if the - /// NotebookService.NotebookRuntimeTemplate has an encryption spec. - GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; - - /// Max running time of the execution job in seconds (default 86400s / 24 - /// hrs). - core.String? executionTimeout; - - /// The user email to run the execution as. - /// - /// Only supported by Colab runtimes. - core.String? executionUser; - - /// The Cloud Storage url pointing to the ipynb file. - /// - /// Format: `gs://bucket/notebook_file.ipynb` - GoogleCloudAiplatformV1NotebookExecutionJobGcsNotebookSource? - gcsNotebookSource; - - /// The Cloud Storage location to upload the result to. - /// - /// Format: `gs://bucket-name` - core.String? gcsOutputUri; - - /// The state of the NotebookExecutionJob. - /// - /// Output only. - /// Possible string values are: - /// - "JOB_STATE_UNSPECIFIED" : The job state is unspecified. - /// - "JOB_STATE_QUEUED" : The job has been just created or resumed and - /// processing has not yet begun. - /// - "JOB_STATE_PENDING" : The service is preparing to run the job. - /// - "JOB_STATE_RUNNING" : The job is in progress. - /// - "JOB_STATE_SUCCEEDED" : The job completed successfully. - /// - "JOB_STATE_FAILED" : The job failed. - /// - "JOB_STATE_CANCELLING" : The job is being cancelled. From this state the - /// job may only go to either `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED` or - /// `JOB_STATE_CANCELLED`. - /// - "JOB_STATE_CANCELLED" : The job has been cancelled. - /// - "JOB_STATE_PAUSED" : The job has been stopped, and can be resumed. - /// - "JOB_STATE_EXPIRED" : The job has expired. - /// - "JOB_STATE_UPDATING" : The job is being updated. Only jobs in the - /// `RUNNING` state can be updated. After updating, the job goes back to the - /// `RUNNING` state. - /// - "JOB_STATE_PARTIALLY_SUCCEEDED" : The job is partially succeeded, some - /// results may be missing due to errors. - core.String? jobState; - - /// The labels with user-defined metadata to organize NotebookExecutionJobs. - /// - /// Label keys and values can be no longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. See - /// https://goo.gl/xmQnxf for more information and examples of labels. System - /// reserved label keys are prefixed with "aiplatform.googleapis.com/" and are - /// immutable. - core.Map? labels; - - /// The resource name of this NotebookExecutionJob. - /// - /// Format: - /// `projects/{project_id}/locations/{location}/notebookExecutionJobs/{job_id}` +/// Pipeline template metadata if PipelineJob.template_uri is from supported +/// template registry. +/// +/// Currently, the only supported registry is Artifact Registry. +class GoogleCloudAiplatformV1PipelineTemplateMetadata { + /// The version_name in artifact registry. /// - /// Output only. - core.String? name; + /// Will always be presented in output if the PipelineJob.template_uri is from + /// supported template registry. Format is "sha256:abcdef123456...". + core.String? version; - /// The NotebookRuntimeTemplate to source compute configuration from. - core.String? notebookRuntimeTemplateResourceName; + GoogleCloudAiplatformV1PipelineTemplateMetadata({ + this.version, + }); - /// The Schedule resource name if this job is triggered by one. - /// - /// Format: - /// `projects/{project_id}/locations/{location}/schedules/{schedule_id}` - /// - /// Output only. - core.String? scheduleResourceName; + GoogleCloudAiplatformV1PipelineTemplateMetadata.fromJson(core.Map json_) + : this( + version: json_['version'] as core.String?, + ); - /// The service account to run the execution as. - core.String? serviceAccount; + core.Map toJson() => { + if (version != null) 'version': version!, + }; +} - /// Populated when the NotebookExecutionJob is completed. - /// - /// When there is an error during notebook execution, the error details are - /// populated. +/// Input for pointwise metric. +class GoogleCloudAiplatformV1PointwiseMetricInput { + /// Pointwise metric instance. /// - /// Output only. - GoogleRpcStatus? status; + /// Required. + GoogleCloudAiplatformV1PointwiseMetricInstance? instance; - /// Timestamp when this NotebookExecutionJob was most recently updated. + /// Spec for pointwise metric. /// - /// Output only. - core.String? updateTime; + /// Required. + GoogleCloudAiplatformV1PointwiseMetricSpec? metricSpec; - GoogleCloudAiplatformV1NotebookExecutionJob({ - this.createTime, - this.dataformRepositorySource, - this.directNotebookSource, - this.displayName, - this.encryptionSpec, - this.executionTimeout, - this.executionUser, - this.gcsNotebookSource, - this.gcsOutputUri, - this.jobState, - this.labels, - this.name, - this.notebookRuntimeTemplateResourceName, - this.scheduleResourceName, - this.serviceAccount, - this.status, - this.updateTime, + GoogleCloudAiplatformV1PointwiseMetricInput({ + this.instance, + this.metricSpec, }); - GoogleCloudAiplatformV1NotebookExecutionJob.fromJson(core.Map json_) + GoogleCloudAiplatformV1PointwiseMetricInput.fromJson(core.Map json_) : this( - createTime: json_['createTime'] as core.String?, - dataformRepositorySource: json_ - .containsKey('dataformRepositorySource') - ? GoogleCloudAiplatformV1NotebookExecutionJobDataformRepositorySource - .fromJson(json_['dataformRepositorySource'] - as core.Map) - : null, - directNotebookSource: json_.containsKey('directNotebookSource') - ? GoogleCloudAiplatformV1NotebookExecutionJobDirectNotebookSource - .fromJson(json_['directNotebookSource'] - as core.Map) - : null, - displayName: json_['displayName'] as core.String?, - encryptionSpec: json_.containsKey('encryptionSpec') - ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( - json_['encryptionSpec'] - as core.Map) - : null, - executionTimeout: json_['executionTimeout'] as core.String?, - executionUser: json_['executionUser'] as core.String?, - gcsNotebookSource: json_.containsKey('gcsNotebookSource') - ? GoogleCloudAiplatformV1NotebookExecutionJobGcsNotebookSource - .fromJson(json_['gcsNotebookSource'] - as core.Map) + instance: json_.containsKey('instance') + ? GoogleCloudAiplatformV1PointwiseMetricInstance.fromJson( + json_['instance'] as core.Map) : null, - gcsOutputUri: json_['gcsOutputUri'] as core.String?, - jobState: json_['jobState'] as core.String?, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - name: json_['name'] as core.String?, - notebookRuntimeTemplateResourceName: - json_['notebookRuntimeTemplateResourceName'] as core.String?, - scheduleResourceName: json_['scheduleResourceName'] as core.String?, - serviceAccount: json_['serviceAccount'] as core.String?, - status: json_.containsKey('status') - ? GoogleRpcStatus.fromJson( - json_['status'] as core.Map) + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1PointwiseMetricSpec.fromJson( + json_['metricSpec'] as core.Map) : null, - updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (dataformRepositorySource != null) - 'dataformRepositorySource': dataformRepositorySource!, - if (directNotebookSource != null) - 'directNotebookSource': directNotebookSource!, - if (displayName != null) 'displayName': displayName!, - if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, - if (executionTimeout != null) 'executionTimeout': executionTimeout!, - if (executionUser != null) 'executionUser': executionUser!, - if (gcsNotebookSource != null) 'gcsNotebookSource': gcsNotebookSource!, - if (gcsOutputUri != null) 'gcsOutputUri': gcsOutputUri!, - if (jobState != null) 'jobState': jobState!, - if (labels != null) 'labels': labels!, - if (name != null) 'name': name!, - if (notebookRuntimeTemplateResourceName != null) - 'notebookRuntimeTemplateResourceName': - notebookRuntimeTemplateResourceName!, - if (scheduleResourceName != null) - 'scheduleResourceName': scheduleResourceName!, - if (serviceAccount != null) 'serviceAccount': serviceAccount!, - if (status != null) 'status': status!, - if (updateTime != null) 'updateTime': updateTime!, + if (instance != null) 'instance': instance!, + if (metricSpec != null) 'metricSpec': metricSpec!, + }; +} + +/// Pointwise metric instance. +/// +/// Usually one instance corresponds to one row in an evaluation dataset. +class GoogleCloudAiplatformV1PointwiseMetricInstance { + /// Instance specified as a json string. + /// + /// String key-value pairs are expected in the json_instance to render + /// PointwiseMetricSpec.instance_prompt_template. + core.String? jsonInstance; + + GoogleCloudAiplatformV1PointwiseMetricInstance({ + this.jsonInstance, + }); + + GoogleCloudAiplatformV1PointwiseMetricInstance.fromJson(core.Map json_) + : this( + jsonInstance: json_['jsonInstance'] as core.String?, + ); + + core.Map toJson() => { + if (jsonInstance != null) 'jsonInstance': jsonInstance!, }; } -/// The Dataform Repository containing the input notebook. -class GoogleCloudAiplatformV1NotebookExecutionJobDataformRepositorySource { - /// The commit SHA to read repository with. +/// Spec for pointwise metric result. +class GoogleCloudAiplatformV1PointwiseMetricResult { + /// Explanation for pointwise metric score. /// - /// If unset, the file will be read at HEAD. - core.String? commitSha; + /// Output only. + core.String? explanation; - /// The resource name of the Dataform Repository. + /// Pointwise metric score. /// - /// Format: - /// `projects/{project_id}/locations/{location}/repositories/{repository_id}` - core.String? dataformRepositoryResourceName; + /// Output only. + core.double? score; - GoogleCloudAiplatformV1NotebookExecutionJobDataformRepositorySource({ - this.commitSha, - this.dataformRepositoryResourceName, + GoogleCloudAiplatformV1PointwiseMetricResult({ + this.explanation, + this.score, }); - GoogleCloudAiplatformV1NotebookExecutionJobDataformRepositorySource.fromJson( - core.Map json_) + GoogleCloudAiplatformV1PointwiseMetricResult.fromJson(core.Map json_) : this( - commitSha: json_['commitSha'] as core.String?, - dataformRepositoryResourceName: - json_['dataformRepositoryResourceName'] as core.String?, + explanation: json_['explanation'] as core.String?, + score: (json_['score'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (commitSha != null) 'commitSha': commitSha!, - if (dataformRepositoryResourceName != null) - 'dataformRepositoryResourceName': dataformRepositoryResourceName!, + if (explanation != null) 'explanation': explanation!, + if (score != null) 'score': score!, }; } -/// The content of the input notebook in ipynb format. -class GoogleCloudAiplatformV1NotebookExecutionJobDirectNotebookSource { - /// The base64-encoded contents of the input notebook file. - core.String? content; - core.List get contentAsBytes => convert.base64.decode(content!); - - set contentAsBytes(core.List bytes_) { - content = - convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); - } +/// Spec for pointwise metric. +class GoogleCloudAiplatformV1PointwiseMetricSpec { + /// Metric prompt template for pointwise metric. + /// + /// Required. + core.String? metricPromptTemplate; - GoogleCloudAiplatformV1NotebookExecutionJobDirectNotebookSource({ - this.content, + GoogleCloudAiplatformV1PointwiseMetricSpec({ + this.metricPromptTemplate, }); - GoogleCloudAiplatformV1NotebookExecutionJobDirectNotebookSource.fromJson( - core.Map json_) + GoogleCloudAiplatformV1PointwiseMetricSpec.fromJson(core.Map json_) : this( - content: json_['content'] as core.String?, + metricPromptTemplate: json_['metricPromptTemplate'] as core.String?, ); core.Map toJson() => { - if (content != null) 'content': content!, + if (metricPromptTemplate != null) + 'metricPromptTemplate': metricPromptTemplate!, }; } -/// The Cloud Storage uri for the input notebook. -class GoogleCloudAiplatformV1NotebookExecutionJobGcsNotebookSource { - /// The version of the Cloud Storage object to read. - /// - /// If unset, the current version of the object is read. See - /// https://cloud.google.com/storage/docs/metadata#generation-number. - core.String? generation; - - /// The Cloud Storage uri pointing to the ipynb file. +/// Represents a network port in a container. +class GoogleCloudAiplatformV1Port { + /// The number of the port to expose on the pod's IP address. /// - /// Format: `gs://bucket/notebook_file.ipynb` - core.String? uri; + /// Must be a valid port number, between 1 and 65535 inclusive. + core.int? containerPort; - GoogleCloudAiplatformV1NotebookExecutionJobGcsNotebookSource({ - this.generation, - this.uri, + GoogleCloudAiplatformV1Port({ + this.containerPort, }); - GoogleCloudAiplatformV1NotebookExecutionJobGcsNotebookSource.fromJson( - core.Map json_) + GoogleCloudAiplatformV1Port.fromJson(core.Map json_) : this( - generation: json_['generation'] as core.String?, - uri: json_['uri'] as core.String?, + containerPort: json_['containerPort'] as core.int?, ); core.Map toJson() => { - if (generation != null) 'generation': generation!, - if (uri != null) 'uri': uri!, + if (containerPort != null) 'containerPort': containerPort!, }; } -/// The idle shutdown configuration of NotebookRuntimeTemplate, which contains -/// the idle_timeout as required field. -class GoogleCloudAiplatformV1NotebookIdleShutdownConfig { - /// Whether Idle Shutdown is disabled in this NotebookRuntimeTemplate. - core.bool? idleShutdownDisabled; +/// The configuration for the prebuilt speaker to use. +class GoogleCloudAiplatformV1PrebuiltVoiceConfig { + /// The name of the preset voice to use. + core.String? voiceName; - /// Duration is accurate to the second. + GoogleCloudAiplatformV1PrebuiltVoiceConfig({ + this.voiceName, + }); + + GoogleCloudAiplatformV1PrebuiltVoiceConfig.fromJson(core.Map json_) + : this( + voiceName: json_['voiceName'] as core.String?, + ); + + core.Map toJson() => { + if (voiceName != null) 'voiceName': voiceName!, + }; +} + +/// Assigns input data to training, validation, and test sets based on the value +/// of a provided key. +/// +/// Supported only for tabular Datasets. +class GoogleCloudAiplatformV1PredefinedSplit { + /// The key is a name of one of the Dataset's data columns. /// - /// In Notebook, Idle Timeout is accurate to minute so the range of - /// idle_timeout (second) is: 10 * 60 ~ 1440 * 60. + /// The value of the key (either the label's value or value in the column) + /// must be one of {`training`, `validation`, `test`}, and it defines to which + /// set the given piece of data is assigned. If for a piece of data the key is + /// not present or has an invalid value, that piece is ignored by the + /// pipeline. /// /// Required. - core.String? idleTimeout; + core.String? key; - GoogleCloudAiplatformV1NotebookIdleShutdownConfig({ - this.idleShutdownDisabled, - this.idleTimeout, + GoogleCloudAiplatformV1PredefinedSplit({ + this.key, }); - GoogleCloudAiplatformV1NotebookIdleShutdownConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1PredefinedSplit.fromJson(core.Map json_) : this( - idleShutdownDisabled: json_['idleShutdownDisabled'] as core.bool?, - idleTimeout: json_['idleTimeout'] as core.String?, + key: json_['key'] as core.String?, ); core.Map toJson() => { - if (idleShutdownDisabled != null) - 'idleShutdownDisabled': idleShutdownDisabled!, - if (idleTimeout != null) 'idleTimeout': idleTimeout!, + if (key != null) 'key': key!, }; } -/// A runtime is a virtual machine allocated to a particular user for a -/// particular Notebook file on temporary basis with lifetime limited to 24 -/// hours. -class GoogleCloudAiplatformV1NotebookRuntime { - /// Timestamp when this NotebookRuntime was created. - /// - /// Output only. - core.String? createTime; - - /// The description of the NotebookRuntime. - core.String? description; - - /// The display name of the NotebookRuntime. +/// Request message for PredictionService.PredictLongRunning. +class GoogleCloudAiplatformV1PredictLongRunningRequest { + /// The instances that are the input to the prediction call. /// - /// The name can be up to 128 characters long and can consist of any UTF-8 - /// characters. + /// A DeployedModel may have an upper limit on the number of instances it + /// supports per request, and when it is exceeded the prediction call errors + /// in case of AutoML Models, or, in case of customer created Models, the + /// behaviour is as documented by that Model. The schema of any single + /// instance may be specified via Endpoint's DeployedModels' Model's + /// PredictSchemata's instance_schema_uri. /// /// Required. - core.String? displayName; - - /// Customer-managed encryption key spec for the notebook runtime. /// - /// Output only. - GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.List? instances; - /// Timestamp when this NotebookRuntime will be expired: 1. - /// - /// System Predefined NotebookRuntime: 24 hours after creation. After - /// expiration, system predifined runtime will be deleted. 2. User created - /// NotebookRuntime: 6 months after last upgrade. After expiration, user - /// created runtime will be stopped and allowed for upgrade. + /// The parameters that govern the prediction. /// - /// Output only. - core.String? expirationTime; - - /// The health state of the NotebookRuntime. + /// The schema of the parameters may be specified via Endpoint's + /// DeployedModels' Model's PredictSchemata's parameters_schema_uri. /// - /// Output only. - /// Possible string values are: - /// - "HEALTH_STATE_UNSPECIFIED" : Unspecified health state. - /// - "HEALTHY" : NotebookRuntime is in healthy state. Applies to ACTIVE - /// state. - /// - "UNHEALTHY" : NotebookRuntime is in unhealthy state. Applies to ACTIVE - /// state. - core.String? healthState; - - /// The idle shutdown configuration of the notebook runtime. + /// Optional. /// - /// Output only. - GoogleCloudAiplatformV1NotebookIdleShutdownConfig? idleShutdownConfig; + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? parameters; - /// Whether NotebookRuntime is upgradable. - /// - /// Output only. - core.bool? isUpgradable; + GoogleCloudAiplatformV1PredictLongRunningRequest({ + this.instances, + this.parameters, + }); - /// The labels with user-defined metadata to organize your NotebookRuntime. - /// - /// Label keys and values can be no longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. No more than - /// 64 user labels can be associated with one NotebookRuntime (System labels - /// are excluded). See https://goo.gl/xmQnxf for more information and examples - /// of labels. System reserved label keys are prefixed with - /// "aiplatform.googleapis.com/" and are immutable. Following system labels - /// exist for NotebookRuntime: * - /// "aiplatform.googleapis.com/notebook_runtime_gce_instance_id": output only, - /// its value is the Compute Engine instance id. * - /// "aiplatform.googleapis.com/colab_enterprise_entry_service": its value is - /// either "bigquery" or "vertex"; if absent, it should be "vertex". This is - /// to describe the entry service, either BigQuery or Vertex. - core.Map? labels; + GoogleCloudAiplatformV1PredictLongRunningRequest.fromJson(core.Map json_) + : this( + instances: json_.containsKey('instances') + ? json_['instances'] as core.List + : null, + parameters: json_['parameters'], + ); - /// The resource name of the NotebookRuntime. - /// - /// Output only. - core.String? name; + core.Map toJson() => { + if (instances != null) 'instances': instances!, + if (parameters != null) 'parameters': parameters!, + }; +} - /// The Compute Engine tags to add to runtime (see - /// [Tagging instances](https://cloud.google.com/vpc/docs/add-remove-network-tags)). +/// Request message for PredictionService.Predict. +class GoogleCloudAiplatformV1PredictRequest { + /// The instances that are the input to the prediction call. /// - /// Optional. - core.List? networkTags; - - /// The pointer to NotebookRuntimeTemplate this NotebookRuntime is created - /// from. + /// A DeployedModel may have an upper limit on the number of instances it + /// supports per request, and when it is exceeded the prediction call errors + /// in case of AutoML Models, or, in case of customer created Models, the + /// behaviour is as documented by that Model. The schema of any single + /// instance may be specified via Endpoint's DeployedModels' Model's + /// PredictSchemata's instance_schema_uri. /// - /// Output only. - GoogleCloudAiplatformV1NotebookRuntimeTemplateRef? notebookRuntimeTemplateRef; - - /// The type of the notebook runtime. + /// Required. /// - /// Output only. - /// Possible string values are: - /// - "NOTEBOOK_RUNTIME_TYPE_UNSPECIFIED" : Unspecified notebook runtime type, - /// NotebookRuntimeType will default to USER_DEFINED. - /// - "USER_DEFINED" : runtime or template with coustomized configurations - /// from user. - /// - "ONE_CLICK" : runtime or template with system defined configurations. - core.String? notebookRuntimeType; + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.List? instances; - /// The proxy endpoint used to access the NotebookRuntime. + /// The parameters that govern the prediction. /// - /// Output only. - core.String? proxyUri; - - /// The runtime (instance) state of the NotebookRuntime. + /// The schema of the parameters may be specified via Endpoint's + /// DeployedModels' Model's PredictSchemata's parameters_schema_uri. /// - /// Output only. - /// Possible string values are: - /// - "RUNTIME_STATE_UNSPECIFIED" : Unspecified runtime state. - /// - "RUNNING" : NotebookRuntime is in running state. - /// - "BEING_STARTED" : NotebookRuntime is in starting state. - /// - "BEING_STOPPED" : NotebookRuntime is in stopping state. - /// - "STOPPED" : NotebookRuntime is in stopped state. - /// - "BEING_UPGRADED" : NotebookRuntime is in upgrading state. It is in the - /// middle of upgrading process. - /// - "ERROR" : NotebookRuntime was unable to start/stop properly. - /// - "INVALID" : NotebookRuntime is in invalid state. Cannot be recovered. - core.String? runtimeState; + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? parameters; - /// The user email of the NotebookRuntime. - /// - /// Required. - core.String? runtimeUser; + GoogleCloudAiplatformV1PredictRequest({ + this.instances, + this.parameters, + }); - /// Reserved for future use. - /// - /// Output only. - core.bool? satisfiesPzi; + GoogleCloudAiplatformV1PredictRequest.fromJson(core.Map json_) + : this( + instances: json_.containsKey('instances') + ? json_['instances'] as core.List + : null, + parameters: json_['parameters'], + ); - /// Reserved for future use. - /// - /// Output only. - core.bool? satisfiesPzs; + core.Map toJson() => { + if (instances != null) 'instances': instances!, + if (parameters != null) 'parameters': parameters!, + }; +} - /// The service account that the NotebookRuntime workload runs as. +/// Configuration for logging request-response to a BigQuery table. +class GoogleCloudAiplatformV1PredictRequestResponseLoggingConfig { + /// BigQuery table for logging. /// - /// Output only. - core.String? serviceAccount; + /// If only given a project, a new dataset will be created with name + /// `logging__` where will be made BigQuery-dataset-name compatible (e.g. most + /// special characters will become underscores). If no table name is given, a + /// new table will be created with name `request_response_logging` + GoogleCloudAiplatformV1BigQueryDestination? bigqueryDestination; - /// Timestamp when this NotebookRuntime was most recently updated. - /// - /// Output only. - core.String? updateTime; + /// If logging is enabled or not. + core.bool? enabled; - /// The VM os image version of NotebookRuntime. - /// - /// Output only. - core.String? version; + /// Percentage of requests to be logged, expressed as a fraction in + /// range(0,1\]. + core.double? samplingRate; - GoogleCloudAiplatformV1NotebookRuntime({ - this.createTime, - this.description, - this.displayName, - this.encryptionSpec, - this.expirationTime, - this.healthState, - this.idleShutdownConfig, - this.isUpgradable, - this.labels, - this.name, - this.networkTags, - this.notebookRuntimeTemplateRef, - this.notebookRuntimeType, - this.proxyUri, - this.runtimeState, - this.runtimeUser, - this.satisfiesPzi, - this.satisfiesPzs, - this.serviceAccount, - this.updateTime, - this.version, + GoogleCloudAiplatformV1PredictRequestResponseLoggingConfig({ + this.bigqueryDestination, + this.enabled, + this.samplingRate, }); - GoogleCloudAiplatformV1NotebookRuntime.fromJson(core.Map json_) + GoogleCloudAiplatformV1PredictRequestResponseLoggingConfig.fromJson( + core.Map json_) : this( - createTime: json_['createTime'] as core.String?, - description: json_['description'] as core.String?, - displayName: json_['displayName'] as core.String?, - encryptionSpec: json_.containsKey('encryptionSpec') - ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( - json_['encryptionSpec'] - as core.Map) - : null, - expirationTime: json_['expirationTime'] as core.String?, - healthState: json_['healthState'] as core.String?, - idleShutdownConfig: json_.containsKey('idleShutdownConfig') - ? GoogleCloudAiplatformV1NotebookIdleShutdownConfig.fromJson( - json_['idleShutdownConfig'] + bigqueryDestination: json_.containsKey('bigqueryDestination') + ? GoogleCloudAiplatformV1BigQueryDestination.fromJson( + json_['bigqueryDestination'] as core.Map) : null, - isUpgradable: json_['isUpgradable'] as core.bool?, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - name: json_['name'] as core.String?, - networkTags: (json_['networkTags'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - notebookRuntimeTemplateRef: - json_.containsKey('notebookRuntimeTemplateRef') - ? GoogleCloudAiplatformV1NotebookRuntimeTemplateRef.fromJson( - json_['notebookRuntimeTemplateRef'] - as core.Map) - : null, - notebookRuntimeType: json_['notebookRuntimeType'] as core.String?, - proxyUri: json_['proxyUri'] as core.String?, - runtimeState: json_['runtimeState'] as core.String?, - runtimeUser: json_['runtimeUser'] as core.String?, - satisfiesPzi: json_['satisfiesPzi'] as core.bool?, - satisfiesPzs: json_['satisfiesPzs'] as core.bool?, - serviceAccount: json_['serviceAccount'] as core.String?, - updateTime: json_['updateTime'] as core.String?, - version: json_['version'] as core.String?, + enabled: json_['enabled'] as core.bool?, + samplingRate: (json_['samplingRate'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (description != null) 'description': description!, - if (displayName != null) 'displayName': displayName!, - if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, - if (expirationTime != null) 'expirationTime': expirationTime!, - if (healthState != null) 'healthState': healthState!, - if (idleShutdownConfig != null) - 'idleShutdownConfig': idleShutdownConfig!, - if (isUpgradable != null) 'isUpgradable': isUpgradable!, - if (labels != null) 'labels': labels!, - if (name != null) 'name': name!, - if (networkTags != null) 'networkTags': networkTags!, - if (notebookRuntimeTemplateRef != null) - 'notebookRuntimeTemplateRef': notebookRuntimeTemplateRef!, - if (notebookRuntimeType != null) - 'notebookRuntimeType': notebookRuntimeType!, - if (proxyUri != null) 'proxyUri': proxyUri!, - if (runtimeState != null) 'runtimeState': runtimeState!, - if (runtimeUser != null) 'runtimeUser': runtimeUser!, - if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, - if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, - if (serviceAccount != null) 'serviceAccount': serviceAccount!, - if (updateTime != null) 'updateTime': updateTime!, - if (version != null) 'version': version!, + if (bigqueryDestination != null) + 'bigqueryDestination': bigqueryDestination!, + if (enabled != null) 'enabled': enabled!, + if (samplingRate != null) 'samplingRate': samplingRate!, }; } -/// A template that specifies runtime configurations such as machine type, -/// runtime version, network configurations, etc. -/// -/// Multiple runtimes can be created from a runtime template. -class GoogleCloudAiplatformV1NotebookRuntimeTemplate { - /// Timestamp when this NotebookRuntimeTemplate was created. - /// - /// Output only. - core.String? createTime; - - /// The specification of persistent disk attached to the runtime as data disk - /// storage. - /// - /// Optional. - GoogleCloudAiplatformV1PersistentDiskSpec? dataPersistentDiskSpec; - - /// The description of the NotebookRuntimeTemplate. - core.String? description; +/// Response message for PredictionService.Predict. +class GoogleCloudAiplatformV1PredictResponse { + /// ID of the Endpoint's DeployedModel that served this prediction. + core.String? deployedModelId; - /// The display name of the NotebookRuntimeTemplate. + /// Request-level metadata returned by the model. /// - /// The name can be up to 128 characters long and can consist of any UTF-8 - /// characters. + /// The metadata type will be dependent upon the model implementation. /// - /// Required. - core.String? displayName; - - /// Customer-managed encryption key spec for the notebook runtime. - GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; - - /// Used to perform consistent read-modify-write updates. + /// Output only. /// - /// If not set, a blind "overwrite" update happens. - core.String? etag; - - /// EUC configuration of the NotebookRuntimeTemplate. - GoogleCloudAiplatformV1NotebookEucConfig? eucConfig; + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? metadata; - /// The idle shutdown configuration of NotebookRuntimeTemplate. + /// The resource name of the Model which is deployed as the DeployedModel that + /// this prediction hits. /// - /// This config will only be set when idle shutdown is enabled. - GoogleCloudAiplatformV1NotebookIdleShutdownConfig? idleShutdownConfig; + /// Output only. + core.String? model; - /// The default template to use if not specified. + /// The display name of the Model which is deployed as the DeployedModel that + /// this prediction hits. /// /// Output only. - core.bool? isDefault; + core.String? modelDisplayName; - /// The labels with user-defined metadata to organize the - /// NotebookRuntimeTemplates. + /// The version ID of the Model which is deployed as the DeployedModel that + /// this prediction hits. /// - /// Label keys and values can be no longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. See - /// https://goo.gl/xmQnxf for more information and examples of labels. - core.Map? labels; + /// Output only. + core.String? modelVersionId; - /// The specification of a single machine for the template. + /// The predictions that are the output of the predictions call. /// - /// Optional. Immutable. - GoogleCloudAiplatformV1MachineSpec? machineSpec; + /// The schema of any single prediction may be specified via Endpoint's + /// DeployedModels' Model's PredictSchemata's prediction_schema_uri. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.List? predictions; - /// The resource name of the NotebookRuntimeTemplate. - core.String? name; + GoogleCloudAiplatformV1PredictResponse({ + this.deployedModelId, + this.metadata, + this.model, + this.modelDisplayName, + this.modelVersionId, + this.predictions, + }); - /// Network spec. - /// - /// Optional. - GoogleCloudAiplatformV1NetworkSpec? networkSpec; + GoogleCloudAiplatformV1PredictResponse.fromJson(core.Map json_) + : this( + deployedModelId: json_['deployedModelId'] as core.String?, + metadata: json_['metadata'], + model: json_['model'] as core.String?, + modelDisplayName: json_['modelDisplayName'] as core.String?, + modelVersionId: json_['modelVersionId'] as core.String?, + predictions: json_.containsKey('predictions') + ? json_['predictions'] as core.List + : null, + ); - /// The Compute Engine tags to add to runtime (see - /// [Tagging instances](https://cloud.google.com/vpc/docs/add-remove-network-tags)). - /// - /// Optional. - core.List? networkTags; + core.Map toJson() => { + if (deployedModelId != null) 'deployedModelId': deployedModelId!, + if (metadata != null) 'metadata': metadata!, + if (model != null) 'model': model!, + if (modelDisplayName != null) 'modelDisplayName': modelDisplayName!, + if (modelVersionId != null) 'modelVersionId': modelVersionId!, + if (predictions != null) 'predictions': predictions!, + }; +} - /// The type of the notebook runtime template. +/// Contains the schemata used in Model's predictions and explanations via +/// PredictionService.Predict, PredictionService.Explain and BatchPredictionJob. +class GoogleCloudAiplatformV1PredictSchemata { + /// Points to a YAML file stored on Google Cloud Storage describing the format + /// of a single instance, which are used in PredictRequest.instances, + /// ExplainRequest.instances and BatchPredictionJob.input_config. /// - /// Optional. Immutable. - /// Possible string values are: - /// - "NOTEBOOK_RUNTIME_TYPE_UNSPECIFIED" : Unspecified notebook runtime type, - /// NotebookRuntimeType will default to USER_DEFINED. - /// - "USER_DEFINED" : runtime or template with coustomized configurations - /// from user. - /// - "ONE_CLICK" : runtime or template with system defined configurations. - core.String? notebookRuntimeType; - - /// The service account that the runtime workload runs as. + /// The schema is defined as an OpenAPI 3.0.2 + /// [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). + /// AutoML Models always have this field populated by Vertex AI. Note: The URI + /// given on output will be immutable and probably different, including the + /// URI scheme, than the one given on input. The output URI will point to a + /// location where the user only has a read access. /// - /// You can use any service account within the same project, but you must have - /// the service account user permission to use the instance. If not specified, - /// the - /// [Compute Engine default service account](https://cloud.google.com/compute/docs/access/service-accounts#default_service_account) - /// is used. - core.String? serviceAccount; + /// Immutable. + core.String? instanceSchemaUri; - /// Runtime Shielded VM spec. + /// Points to a YAML file stored on Google Cloud Storage describing the + /// parameters of prediction and explanation via PredictRequest.parameters, + /// ExplainRequest.parameters and BatchPredictionJob.model_parameters. /// - /// Optional. Immutable. - GoogleCloudAiplatformV1ShieldedVmConfig? shieldedVmConfig; + /// The schema is defined as an OpenAPI 3.0.2 + /// [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). + /// AutoML Models always have this field populated by Vertex AI, if no + /// parameters are supported, then it is set to an empty string. Note: The URI + /// given on output will be immutable and probably different, including the + /// URI scheme, than the one given on input. The output URI will point to a + /// location where the user only has a read access. + /// + /// Immutable. + core.String? parametersSchemaUri; - /// Timestamp when this NotebookRuntimeTemplate was most recently updated. + /// Points to a YAML file stored on Google Cloud Storage describing the format + /// of a single prediction produced by this Model, which are returned via + /// PredictResponse.predictions, ExplainResponse.explanations, and + /// BatchPredictionJob.output_config. /// - /// Output only. - core.String? updateTime; + /// The schema is defined as an OpenAPI 3.0.2 + /// [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). + /// AutoML Models always have this field populated by Vertex AI. Note: The URI + /// given on output will be immutable and probably different, including the + /// URI scheme, than the one given on input. The output URI will point to a + /// location where the user only has a read access. + /// + /// Immutable. + core.String? predictionSchemaUri; - GoogleCloudAiplatformV1NotebookRuntimeTemplate({ - this.createTime, - this.dataPersistentDiskSpec, - this.description, - this.displayName, - this.encryptionSpec, - this.etag, - this.eucConfig, - this.idleShutdownConfig, - this.isDefault, - this.labels, - this.machineSpec, - this.name, - this.networkSpec, - this.networkTags, - this.notebookRuntimeType, - this.serviceAccount, - this.shieldedVmConfig, - this.updateTime, + GoogleCloudAiplatformV1PredictSchemata({ + this.instanceSchemaUri, + this.parametersSchemaUri, + this.predictionSchemaUri, }); - GoogleCloudAiplatformV1NotebookRuntimeTemplate.fromJson(core.Map json_) + GoogleCloudAiplatformV1PredictSchemata.fromJson(core.Map json_) : this( - createTime: json_['createTime'] as core.String?, - dataPersistentDiskSpec: json_.containsKey('dataPersistentDiskSpec') - ? GoogleCloudAiplatformV1PersistentDiskSpec.fromJson( - json_['dataPersistentDiskSpec'] - as core.Map) - : null, - description: json_['description'] as core.String?, - displayName: json_['displayName'] as core.String?, - encryptionSpec: json_.containsKey('encryptionSpec') - ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( - json_['encryptionSpec'] - as core.Map) - : null, - etag: json_['etag'] as core.String?, - eucConfig: json_.containsKey('eucConfig') - ? GoogleCloudAiplatformV1NotebookEucConfig.fromJson( - json_['eucConfig'] as core.Map) - : null, - idleShutdownConfig: json_.containsKey('idleShutdownConfig') - ? GoogleCloudAiplatformV1NotebookIdleShutdownConfig.fromJson( - json_['idleShutdownConfig'] - as core.Map) - : null, - isDefault: json_['isDefault'] as core.bool?, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - machineSpec: json_.containsKey('machineSpec') - ? GoogleCloudAiplatformV1MachineSpec.fromJson( - json_['machineSpec'] as core.Map) - : null, - name: json_['name'] as core.String?, - networkSpec: json_.containsKey('networkSpec') - ? GoogleCloudAiplatformV1NetworkSpec.fromJson( - json_['networkSpec'] as core.Map) - : null, - networkTags: (json_['networkTags'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - notebookRuntimeType: json_['notebookRuntimeType'] as core.String?, - serviceAccount: json_['serviceAccount'] as core.String?, - shieldedVmConfig: json_.containsKey('shieldedVmConfig') - ? GoogleCloudAiplatformV1ShieldedVmConfig.fromJson( - json_['shieldedVmConfig'] - as core.Map) - : null, - updateTime: json_['updateTime'] as core.String?, + instanceSchemaUri: json_['instanceSchemaUri'] as core.String?, + parametersSchemaUri: json_['parametersSchemaUri'] as core.String?, + predictionSchemaUri: json_['predictionSchemaUri'] as core.String?, ); core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (dataPersistentDiskSpec != null) - 'dataPersistentDiskSpec': dataPersistentDiskSpec!, - if (description != null) 'description': description!, - if (displayName != null) 'displayName': displayName!, - if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, - if (etag != null) 'etag': etag!, - if (eucConfig != null) 'eucConfig': eucConfig!, - if (idleShutdownConfig != null) - 'idleShutdownConfig': idleShutdownConfig!, - if (isDefault != null) 'isDefault': isDefault!, - if (labels != null) 'labels': labels!, - if (machineSpec != null) 'machineSpec': machineSpec!, - if (name != null) 'name': name!, - if (networkSpec != null) 'networkSpec': networkSpec!, - if (networkTags != null) 'networkTags': networkTags!, - if (notebookRuntimeType != null) - 'notebookRuntimeType': notebookRuntimeType!, - if (serviceAccount != null) 'serviceAccount': serviceAccount!, - if (shieldedVmConfig != null) 'shieldedVmConfig': shieldedVmConfig!, - if (updateTime != null) 'updateTime': updateTime!, + if (instanceSchemaUri != null) 'instanceSchemaUri': instanceSchemaUri!, + if (parametersSchemaUri != null) + 'parametersSchemaUri': parametersSchemaUri!, + if (predictionSchemaUri != null) + 'predictionSchemaUri': predictionSchemaUri!, }; } -/// Points to a NotebookRuntimeTemplateRef. -class GoogleCloudAiplatformV1NotebookRuntimeTemplateRef { - /// A resource name of the NotebookRuntimeTemplate. +/// Preset configuration for example-based explanations +class GoogleCloudAiplatformV1Presets { + /// The modality of the uploaded model, which automatically configures the + /// distance measurement and feature normalization for the underlying example + /// index and queries. /// - /// Immutable. - core.String? notebookRuntimeTemplate; + /// If your model does not precisely fit one of these types, it is okay to + /// choose the closest type. + /// Possible string values are: + /// - "MODALITY_UNSPECIFIED" : Should not be set. Added as a recommended best + /// practice for enums + /// - "IMAGE" : IMAGE modality + /// - "TEXT" : TEXT modality + /// - "TABULAR" : TABULAR modality + core.String? modality; - GoogleCloudAiplatformV1NotebookRuntimeTemplateRef({ - this.notebookRuntimeTemplate, + /// Preset option controlling parameters for speed-precision trade-off when + /// querying for examples. + /// + /// If omitted, defaults to `PRECISE`. + /// Possible string values are: + /// - "PRECISE" : More precise neighbors as a trade-off against slower + /// response. + /// - "FAST" : Faster response as a trade-off against less precise neighbors. + core.String? query; + + GoogleCloudAiplatformV1Presets({ + this.modality, + this.query, }); - GoogleCloudAiplatformV1NotebookRuntimeTemplateRef.fromJson(core.Map json_) + GoogleCloudAiplatformV1Presets.fromJson(core.Map json_) : this( - notebookRuntimeTemplate: - json_['notebookRuntimeTemplate'] as core.String?, + modality: json_['modality'] as core.String?, + query: json_['query'] as core.String?, ); core.Map toJson() => { - if (notebookRuntimeTemplate != null) - 'notebookRuntimeTemplate': notebookRuntimeTemplate!, + if (modality != null) 'modality': modality!, + if (query != null) 'query': query!, }; } -/// PSC config that is used to automatically create forwarding rule via -/// ServiceConnectionMap. -class GoogleCloudAiplatformV1PSCAutomationConfig { - /// The full name of the Google Compute Engine - /// [network](https://cloud.google.com/compute/docs/networks-and-firewalls#networks). +/// PrivateEndpoints proto is used to provide paths for users to send requests +/// privately. +/// +/// To send request via private service access, use predict_http_uri, +/// explain_http_uri or health_http_uri. To send request via private service +/// connect, use service_attachment. +class GoogleCloudAiplatformV1PrivateEndpoints { + /// Http(s) path to send explain requests. /// - /// [Format](https://cloud.google.com/compute/docs/reference/rest/v1/networks/insert): - /// `projects/{project}/global/networks/{network}`. Where {project} is a - /// project number, as in '12345', and {network} is network name. + /// Output only. + core.String? explainHttpUri; + + /// Http(s) path to send health check requests. /// - /// Required. - core.String? network; + /// Output only. + core.String? healthHttpUri; - /// Project id used to create forwarding rule. + /// Http(s) path to send prediction requests. /// - /// Required. - core.String? projectId; + /// Output only. + core.String? predictHttpUri; - GoogleCloudAiplatformV1PSCAutomationConfig({ - this.network, - this.projectId, + /// The name of the service attachment resource. + /// + /// Populated if private service connect is enabled. + /// + /// Output only. + core.String? serviceAttachment; + + GoogleCloudAiplatformV1PrivateEndpoints({ + this.explainHttpUri, + this.healthHttpUri, + this.predictHttpUri, + this.serviceAttachment, }); - GoogleCloudAiplatformV1PSCAutomationConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1PrivateEndpoints.fromJson(core.Map json_) : this( - network: json_['network'] as core.String?, - projectId: json_['projectId'] as core.String?, + explainHttpUri: json_['explainHttpUri'] as core.String?, + healthHttpUri: json_['healthHttpUri'] as core.String?, + predictHttpUri: json_['predictHttpUri'] as core.String?, + serviceAttachment: json_['serviceAttachment'] as core.String?, ); core.Map toJson() => { - if (network != null) 'network': network!, - if (projectId != null) 'projectId': projectId!, + if (explainHttpUri != null) 'explainHttpUri': explainHttpUri!, + if (healthHttpUri != null) 'healthHttpUri': healthHttpUri!, + if (predictHttpUri != null) 'predictHttpUri': predictHttpUri!, + if (serviceAttachment != null) 'serviceAttachment': serviceAttachment!, }; } -/// Input for pairwise metric. -class GoogleCloudAiplatformV1PairwiseMetricInput { - /// Pairwise metric instance. +/// Represents configuration for private service connect. +class GoogleCloudAiplatformV1PrivateServiceConnectConfig { + /// If true, expose the IndexEndpoint via private service connect. /// /// Required. - GoogleCloudAiplatformV1PairwiseMetricInstance? instance; + core.bool? enablePrivateServiceConnect; - /// Spec for pairwise metric. + /// A list of Projects from which the forwarding rule will target the service + /// attachment. + core.List? projectAllowlist; + + /// The name of the generated service attachment resource. /// - /// Required. - GoogleCloudAiplatformV1PairwiseMetricSpec? metricSpec; + /// This is only populated if the endpoint is deployed with + /// PrivateServiceConnect. + /// + /// Output only. + core.String? serviceAttachment; - GoogleCloudAiplatformV1PairwiseMetricInput({ - this.instance, - this.metricSpec, + GoogleCloudAiplatformV1PrivateServiceConnectConfig({ + this.enablePrivateServiceConnect, + this.projectAllowlist, + this.serviceAttachment, }); - GoogleCloudAiplatformV1PairwiseMetricInput.fromJson(core.Map json_) + GoogleCloudAiplatformV1PrivateServiceConnectConfig.fromJson(core.Map json_) : this( - instance: json_.containsKey('instance') - ? GoogleCloudAiplatformV1PairwiseMetricInstance.fromJson( - json_['instance'] as core.Map) - : null, - metricSpec: json_.containsKey('metricSpec') - ? GoogleCloudAiplatformV1PairwiseMetricSpec.fromJson( - json_['metricSpec'] as core.Map) - : null, + enablePrivateServiceConnect: + json_['enablePrivateServiceConnect'] as core.bool?, + projectAllowlist: (json_['projectAllowlist'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + serviceAttachment: json_['serviceAttachment'] as core.String?, ); core.Map toJson() => { - if (instance != null) 'instance': instance!, - if (metricSpec != null) 'metricSpec': metricSpec!, + if (enablePrivateServiceConnect != null) + 'enablePrivateServiceConnect': enablePrivateServiceConnect!, + if (projectAllowlist != null) 'projectAllowlist': projectAllowlist!, + if (serviceAttachment != null) 'serviceAttachment': serviceAttachment!, }; } -/// Pairwise metric instance. -/// -/// Usually one instance corresponds to one row in an evaluation dataset. -class GoogleCloudAiplatformV1PairwiseMetricInstance { - /// Instance specified as a json string. +/// Probe describes a health check to be performed against a container to +/// determine whether it is alive or ready to receive traffic. +class GoogleCloudAiplatformV1Probe { + /// ExecAction probes the health of a container by executing a command. + GoogleCloudAiplatformV1ProbeExecAction? exec; + + /// How often (in seconds) to perform the probe. /// - /// String key-value pairs are expected in the json_instance to render - /// PairwiseMetricSpec.instance_prompt_template. - core.String? jsonInstance; + /// Default to 10 seconds. Minimum value is 1. Must be less than + /// timeout_seconds. Maps to Kubernetes probe argument 'periodSeconds'. + core.int? periodSeconds; - GoogleCloudAiplatformV1PairwiseMetricInstance({ - this.jsonInstance, + /// Number of seconds after which the probe times out. + /// + /// Defaults to 1 second. Minimum value is 1. Must be greater or equal to + /// period_seconds. Maps to Kubernetes probe argument 'timeoutSeconds'. + core.int? timeoutSeconds; + + GoogleCloudAiplatformV1Probe({ + this.exec, + this.periodSeconds, + this.timeoutSeconds, }); - GoogleCloudAiplatformV1PairwiseMetricInstance.fromJson(core.Map json_) + GoogleCloudAiplatformV1Probe.fromJson(core.Map json_) : this( - jsonInstance: json_['jsonInstance'] as core.String?, + exec: json_.containsKey('exec') + ? GoogleCloudAiplatformV1ProbeExecAction.fromJson( + json_['exec'] as core.Map) + : null, + periodSeconds: json_['periodSeconds'] as core.int?, + timeoutSeconds: json_['timeoutSeconds'] as core.int?, ); core.Map toJson() => { - if (jsonInstance != null) 'jsonInstance': jsonInstance!, + if (exec != null) 'exec': exec!, + if (periodSeconds != null) 'periodSeconds': periodSeconds!, + if (timeoutSeconds != null) 'timeoutSeconds': timeoutSeconds!, }; } -/// Spec for pairwise metric result. -class GoogleCloudAiplatformV1PairwiseMetricResult { - /// Explanation for pairwise metric score. - /// - /// Output only. - core.String? explanation; +/// ExecAction specifies a command to execute. +typedef GoogleCloudAiplatformV1ProbeExecAction = $ExecAction; - /// Pairwise metric choice. - /// - /// Output only. - /// Possible string values are: - /// - "PAIRWISE_CHOICE_UNSPECIFIED" : Unspecified prediction choice. - /// - "BASELINE" : Baseline prediction wins - /// - "CANDIDATE" : Candidate prediction wins - /// - "TIE" : Winner cannot be determined - core.String? pairwiseChoice; +/// PscAutomatedEndpoints defines the output of the forwarding rule +/// automatically created by each PscAutomationConfig. +class GoogleCloudAiplatformV1PscAutomatedEndpoints { + /// Ip Address created by the automated forwarding rule. + core.String? matchAddress; - GoogleCloudAiplatformV1PairwiseMetricResult({ - this.explanation, - this.pairwiseChoice, + /// Corresponding network in pscAutomationConfigs. + core.String? network; + + /// Corresponding project_id in pscAutomationConfigs + core.String? projectId; + + GoogleCloudAiplatformV1PscAutomatedEndpoints({ + this.matchAddress, + this.network, + this.projectId, }); - GoogleCloudAiplatformV1PairwiseMetricResult.fromJson(core.Map json_) + GoogleCloudAiplatformV1PscAutomatedEndpoints.fromJson(core.Map json_) : this( - explanation: json_['explanation'] as core.String?, - pairwiseChoice: json_['pairwiseChoice'] as core.String?, + matchAddress: json_['matchAddress'] as core.String?, + network: json_['network'] as core.String?, + projectId: json_['projectId'] as core.String?, ); core.Map toJson() => { - if (explanation != null) 'explanation': explanation!, - if (pairwiseChoice != null) 'pairwiseChoice': pairwiseChoice!, + if (matchAddress != null) 'matchAddress': matchAddress!, + if (network != null) 'network': network!, + if (projectId != null) 'projectId': projectId!, }; } -/// Spec for pairwise metric. -class GoogleCloudAiplatformV1PairwiseMetricSpec { - /// Metric prompt template for pairwise metric. +/// A Model Garden Publisher Model. +class GoogleCloudAiplatformV1PublisherModel { + /// Additional information about the model's Frameworks. + /// + /// Optional. + core.List? frameworks; + + /// Indicates the launch stage of the model. + /// + /// Optional. + /// Possible string values are: + /// - "LAUNCH_STAGE_UNSPECIFIED" : The model launch stage is unspecified. + /// - "EXPERIMENTAL" : Used to indicate the PublisherModel is at Experimental + /// launch stage, available to a small set of customers. + /// - "PRIVATE_PREVIEW" : Used to indicate the PublisherModel is at Private + /// Preview launch stage, only available to a small set of customers, although + /// a larger set of customers than an Experimental launch. Previews are the + /// first launch stage used to get feedback from customers. + /// - "PUBLIC_PREVIEW" : Used to indicate the PublisherModel is at Public + /// Preview launch stage, available to all customers, although not supported + /// for production workloads. + /// - "GA" : Used to indicate the PublisherModel is at GA launch stage, + /// available to all customers and ready for production workload. + core.String? launchStage; + + /// The resource name of the PublisherModel. + /// + /// Output only. + core.String? name; + + /// Indicates the open source category of the publisher model. + /// + /// Required. + /// Possible string values are: + /// - "OPEN_SOURCE_CATEGORY_UNSPECIFIED" : The open source category is + /// unspecified, which should not be used. + /// - "PROPRIETARY" : Used to indicate the PublisherModel is not open sourced. + /// - "GOOGLE_OWNED_OSS_WITH_GOOGLE_CHECKPOINT" : Used to indicate the + /// PublisherModel is a Google-owned open source model w/ Google checkpoint. + /// - "THIRD_PARTY_OWNED_OSS_WITH_GOOGLE_CHECKPOINT" : Used to indicate the + /// PublisherModel is a 3p-owned open source model w/ Google checkpoint. + /// - "GOOGLE_OWNED_OSS" : Used to indicate the PublisherModel is a + /// Google-owned pure open source model. + /// - "THIRD_PARTY_OWNED_OSS" : Used to indicate the PublisherModel is a + /// 3p-owned pure open source model. + core.String? openSourceCategory; + + /// The schemata that describes formats of the PublisherModel's predictions + /// and explanations as given and returned via PredictionService.Predict. /// - /// Required. - core.String? metricPromptTemplate; - - GoogleCloudAiplatformV1PairwiseMetricSpec({ - this.metricPromptTemplate, - }); + /// Optional. + GoogleCloudAiplatformV1PredictSchemata? predictSchemata; - GoogleCloudAiplatformV1PairwiseMetricSpec.fromJson(core.Map json_) - : this( - metricPromptTemplate: json_['metricPromptTemplate'] as core.String?, - ); + /// Used to indicate this model has a publisher model and provide the template + /// of the publisher model resource name. + /// + /// Optional. Output only. Immutable. + core.String? publisherModelTemplate; - core.Map toJson() => { - if (metricPromptTemplate != null) - 'metricPromptTemplate': metricPromptTemplate!, - }; -} + /// Supported call-to-action options. + /// + /// Optional. + GoogleCloudAiplatformV1PublisherModelCallToAction? supportedActions; -/// Input for pairwise question answering quality metric. -class GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityInput { - /// Pairwise question answering quality instance. + /// The version ID of the PublisherModel. /// - /// Required. - GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityInstance? instance; + /// A new version is committed when a new model version is uploaded under an + /// existing model id. It is an auto-incrementing decimal number in string + /// representation. + /// + /// Output only. Immutable. + core.String? versionId; - /// Spec for pairwise question answering quality score metric. + /// Indicates the state of the model version. /// - /// Required. - GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualitySpec? metricSpec; + /// Optional. + /// Possible string values are: + /// - "VERSION_STATE_UNSPECIFIED" : The version state is unspecified. + /// - "VERSION_STATE_STABLE" : Used to indicate the version is stable. + /// - "VERSION_STATE_UNSTABLE" : Used to indicate the version is unstable. + core.String? versionState; - GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityInput({ - this.instance, - this.metricSpec, + GoogleCloudAiplatformV1PublisherModel({ + this.frameworks, + this.launchStage, + this.name, + this.openSourceCategory, + this.predictSchemata, + this.publisherModelTemplate, + this.supportedActions, + this.versionId, + this.versionState, }); - GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityInput.fromJson( - core.Map json_) + GoogleCloudAiplatformV1PublisherModel.fromJson(core.Map json_) : this( - instance: json_.containsKey('instance') - ? GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityInstance - .fromJson( - json_['instance'] as core.Map) + frameworks: (json_['frameworks'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + launchStage: json_['launchStage'] as core.String?, + name: json_['name'] as core.String?, + openSourceCategory: json_['openSourceCategory'] as core.String?, + predictSchemata: json_.containsKey('predictSchemata') + ? GoogleCloudAiplatformV1PredictSchemata.fromJson( + json_['predictSchemata'] + as core.Map) : null, - metricSpec: json_.containsKey('metricSpec') - ? GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualitySpec - .fromJson(json_['metricSpec'] + publisherModelTemplate: + json_['publisherModelTemplate'] as core.String?, + supportedActions: json_.containsKey('supportedActions') + ? GoogleCloudAiplatformV1PublisherModelCallToAction.fromJson( + json_['supportedActions'] as core.Map) : null, + versionId: json_['versionId'] as core.String?, + versionState: json_['versionState'] as core.String?, ); core.Map toJson() => { - if (instance != null) 'instance': instance!, - if (metricSpec != null) 'metricSpec': metricSpec!, + if (frameworks != null) 'frameworks': frameworks!, + if (launchStage != null) 'launchStage': launchStage!, + if (name != null) 'name': name!, + if (openSourceCategory != null) + 'openSourceCategory': openSourceCategory!, + if (predictSchemata != null) 'predictSchemata': predictSchemata!, + if (publisherModelTemplate != null) + 'publisherModelTemplate': publisherModelTemplate!, + if (supportedActions != null) 'supportedActions': supportedActions!, + if (versionId != null) 'versionId': versionId!, + if (versionState != null) 'versionState': versionState!, }; } -/// Spec for pairwise question answering quality instance. -class GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityInstance { - /// Output of the baseline model. +/// Actions could take on this Publisher Model. +class GoogleCloudAiplatformV1PublisherModelCallToAction { + /// Create application using the PublisherModel. /// - /// Required. - core.String? baselinePrediction; + /// Optional. + GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences? + createApplication; - /// Text to answer the question. + /// Deploy the PublisherModel to Vertex Endpoint. /// - /// Required. - core.String? context; + /// Optional. + GoogleCloudAiplatformV1PublisherModelCallToActionDeploy? deploy; - /// Question Answering prompt for LLM. + /// Deploy PublisherModel to Google Kubernetes Engine. /// - /// Required. - core.String? instruction; + /// Optional. + GoogleCloudAiplatformV1PublisherModelCallToActionDeployGke? deployGke; - /// Output of the candidate model. + /// Multiple setups to deploy the PublisherModel to Vertex Endpoint. /// - /// Required. - core.String? prediction; + /// Optional. + GoogleCloudAiplatformV1PublisherModelCallToActionDeployVertex? + multiDeployVertex; - /// Ground truth used to compare against the prediction. + /// Open evaluation pipeline of the PublisherModel. /// /// Optional. - core.String? reference; - - GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityInstance({ - this.baselinePrediction, - this.context, - this.instruction, - this.prediction, - this.reference, - }); - - GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityInstance.fromJson( - core.Map json_) - : this( - baselinePrediction: json_['baselinePrediction'] as core.String?, - context: json_['context'] as core.String?, - instruction: json_['instruction'] as core.String?, - prediction: json_['prediction'] as core.String?, - reference: json_['reference'] as core.String?, - ); - - core.Map toJson() => { - if (baselinePrediction != null) - 'baselinePrediction': baselinePrediction!, - if (context != null) 'context': context!, - if (instruction != null) 'instruction': instruction!, - if (prediction != null) 'prediction': prediction!, - if (reference != null) 'reference': reference!, - }; -} + GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences? + openEvaluationPipeline; -/// Spec for pairwise question answering quality result. -class GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityResult { - /// Confidence for question answering quality score. + /// Open fine-tuning pipeline of the PublisherModel. /// - /// Output only. - core.double? confidence; + /// Optional. + GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences? + openFineTuningPipeline; - /// Explanation for question answering quality score. + /// Open fine-tuning pipelines of the PublisherModel. /// - /// Output only. - core.String? explanation; + /// Optional. + GoogleCloudAiplatformV1PublisherModelCallToActionOpenFineTuningPipelines? + openFineTuningPipelines; - /// Pairwise question answering prediction choice. + /// Open in Generation AI Studio. /// - /// Output only. - /// Possible string values are: - /// - "PAIRWISE_CHOICE_UNSPECIFIED" : Unspecified prediction choice. - /// - "BASELINE" : Baseline prediction wins - /// - "CANDIDATE" : Candidate prediction wins - /// - "TIE" : Winner cannot be determined - core.String? pairwiseChoice; + /// Optional. + GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences? + openGenerationAiStudio; - GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityResult({ - this.confidence, - this.explanation, - this.pairwiseChoice, - }); + /// Open Genie / Playground. + /// + /// Optional. + GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences? + openGenie; - GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityResult.fromJson( - core.Map json_) - : this( - confidence: (json_['confidence'] as core.num?)?.toDouble(), - explanation: json_['explanation'] as core.String?, - pairwiseChoice: json_['pairwiseChoice'] as core.String?, - ); + /// Open notebook of the PublisherModel. + /// + /// Optional. + GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences? + openNotebook; - core.Map toJson() => { - if (confidence != null) 'confidence': confidence!, - if (explanation != null) 'explanation': explanation!, - if (pairwiseChoice != null) 'pairwiseChoice': pairwiseChoice!, - }; -} + /// Open notebooks of the PublisherModel. + /// + /// Optional. + GoogleCloudAiplatformV1PublisherModelCallToActionOpenNotebooks? openNotebooks; -/// Spec for pairwise question answering quality score metric. -typedef GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualitySpec - = $QuestionAnsweringQualitySpec; + /// Open prompt-tuning pipeline of the PublisherModel. + /// + /// Optional. + GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences? + openPromptTuningPipeline; -/// Input for pairwise summarization quality metric. -class GoogleCloudAiplatformV1PairwiseSummarizationQualityInput { - /// Pairwise summarization quality instance. + /// Request for access. /// - /// Required. - GoogleCloudAiplatformV1PairwiseSummarizationQualityInstance? instance; + /// Optional. + GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences? + requestAccess; - /// Spec for pairwise summarization quality score metric. + /// To view Rest API docs. /// - /// Required. - GoogleCloudAiplatformV1PairwiseSummarizationQualitySpec? metricSpec; + /// Optional. + GoogleCloudAiplatformV1PublisherModelCallToActionViewRestApi? viewRestApi; - GoogleCloudAiplatformV1PairwiseSummarizationQualityInput({ - this.instance, - this.metricSpec, + GoogleCloudAiplatformV1PublisherModelCallToAction({ + this.createApplication, + this.deploy, + this.deployGke, + this.multiDeployVertex, + this.openEvaluationPipeline, + this.openFineTuningPipeline, + this.openFineTuningPipelines, + this.openGenerationAiStudio, + this.openGenie, + this.openNotebook, + this.openNotebooks, + this.openPromptTuningPipeline, + this.requestAccess, + this.viewRestApi, }); - GoogleCloudAiplatformV1PairwiseSummarizationQualityInput.fromJson( - core.Map json_) + GoogleCloudAiplatformV1PublisherModelCallToAction.fromJson(core.Map json_) : this( - instance: json_.containsKey('instance') - ? GoogleCloudAiplatformV1PairwiseSummarizationQualityInstance + createApplication: json_.containsKey('createApplication') + ? GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences + .fromJson(json_['createApplication'] + as core.Map) + : null, + deploy: json_.containsKey('deploy') + ? GoogleCloudAiplatformV1PublisherModelCallToActionDeploy .fromJson( - json_['instance'] as core.Map) + json_['deploy'] as core.Map) : null, - metricSpec: json_.containsKey('metricSpec') - ? GoogleCloudAiplatformV1PairwiseSummarizationQualitySpec - .fromJson(json_['metricSpec'] + deployGke: json_.containsKey('deployGke') + ? GoogleCloudAiplatformV1PublisherModelCallToActionDeployGke + .fromJson( + json_['deployGke'] as core.Map) + : null, + multiDeployVertex: json_.containsKey('multiDeployVertex') + ? GoogleCloudAiplatformV1PublisherModelCallToActionDeployVertex + .fromJson(json_['multiDeployVertex'] + as core.Map) + : null, + openEvaluationPipeline: json_.containsKey('openEvaluationPipeline') + ? GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences + .fromJson(json_['openEvaluationPipeline'] + as core.Map) + : null, + openFineTuningPipeline: json_.containsKey('openFineTuningPipeline') + ? GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences + .fromJson(json_['openFineTuningPipeline'] + as core.Map) + : null, + openFineTuningPipelines: json_.containsKey('openFineTuningPipelines') + ? GoogleCloudAiplatformV1PublisherModelCallToActionOpenFineTuningPipelines + .fromJson(json_['openFineTuningPipelines'] + as core.Map) + : null, + openGenerationAiStudio: json_.containsKey('openGenerationAiStudio') + ? GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences + .fromJson(json_['openGenerationAiStudio'] + as core.Map) + : null, + openGenie: json_.containsKey('openGenie') + ? GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences + .fromJson( + json_['openGenie'] as core.Map) + : null, + openNotebook: json_.containsKey('openNotebook') + ? GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences + .fromJson(json_['openNotebook'] + as core.Map) + : null, + openNotebooks: json_.containsKey('openNotebooks') + ? GoogleCloudAiplatformV1PublisherModelCallToActionOpenNotebooks + .fromJson(json_['openNotebooks'] + as core.Map) + : null, + openPromptTuningPipeline: json_ + .containsKey('openPromptTuningPipeline') + ? GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences + .fromJson(json_['openPromptTuningPipeline'] + as core.Map) + : null, + requestAccess: json_.containsKey('requestAccess') + ? GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences + .fromJson(json_['requestAccess'] + as core.Map) + : null, + viewRestApi: json_.containsKey('viewRestApi') + ? GoogleCloudAiplatformV1PublisherModelCallToActionViewRestApi + .fromJson(json_['viewRestApi'] as core.Map) : null, ); core.Map toJson() => { - if (instance != null) 'instance': instance!, - if (metricSpec != null) 'metricSpec': metricSpec!, + if (createApplication != null) 'createApplication': createApplication!, + if (deploy != null) 'deploy': deploy!, + if (deployGke != null) 'deployGke': deployGke!, + if (multiDeployVertex != null) 'multiDeployVertex': multiDeployVertex!, + if (openEvaluationPipeline != null) + 'openEvaluationPipeline': openEvaluationPipeline!, + if (openFineTuningPipeline != null) + 'openFineTuningPipeline': openFineTuningPipeline!, + if (openFineTuningPipelines != null) + 'openFineTuningPipelines': openFineTuningPipelines!, + if (openGenerationAiStudio != null) + 'openGenerationAiStudio': openGenerationAiStudio!, + if (openGenie != null) 'openGenie': openGenie!, + if (openNotebook != null) 'openNotebook': openNotebook!, + if (openNotebooks != null) 'openNotebooks': openNotebooks!, + if (openPromptTuningPipeline != null) + 'openPromptTuningPipeline': openPromptTuningPipeline!, + if (requestAccess != null) 'requestAccess': requestAccess!, + if (viewRestApi != null) 'viewRestApi': viewRestApi!, }; } -/// Spec for pairwise summarization quality instance. -class GoogleCloudAiplatformV1PairwiseSummarizationQualityInstance { - /// Output of the baseline model. +/// Model metadata that is needed for UploadModel or DeployModel/CreateEndpoint +/// requests. +class GoogleCloudAiplatformV1PublisherModelCallToActionDeploy { + /// The path to the directory containing the Model artifact and any of its + /// supporting files. /// - /// Required. - core.String? baselinePrediction; + /// Optional. + core.String? artifactUri; - /// Text to be summarized. + /// A description of resources that to large degree are decided by Vertex AI, + /// and require only a modest additional configuration. + GoogleCloudAiplatformV1AutomaticResources? automaticResources; + + /// The specification of the container that is to be used when deploying this + /// Model in Vertex AI. /// - /// Required. - core.String? context; + /// Not present for Large Models. + /// + /// Optional. + GoogleCloudAiplatformV1ModelContainerSpec? containerSpec; - /// Summarization prompt for LLM. + /// A description of resources that are dedicated to the DeployedModel, and + /// that need a higher degree of manual configuration. + GoogleCloudAiplatformV1DedicatedResources? dedicatedResources; + + /// Metadata information about this deployment config. /// - /// Required. - core.String? instruction; + /// Optional. + GoogleCloudAiplatformV1PublisherModelCallToActionDeployDeployMetadata? + deployMetadata; - /// Output of the candidate model. + /// The name of the deploy task (e.g., "text to image generation"). /// - /// Required. - core.String? prediction; + /// Optional. + core.String? deployTaskName; - /// Ground truth used to compare against the prediction. + /// Large model reference. + /// + /// When this is set, model_artifact_spec is not needed. /// /// Optional. - core.String? reference; + GoogleCloudAiplatformV1LargeModelReference? largeModelReference; - GoogleCloudAiplatformV1PairwiseSummarizationQualityInstance({ - this.baselinePrediction, - this.context, - this.instruction, - this.prediction, - this.reference, + /// Default model display name. + /// + /// Optional. + core.String? modelDisplayName; + + /// The signed URI for ephemeral Cloud Storage access to model artifact. + /// + /// Optional. + core.String? publicArtifactUri; + + /// The resource name of the shared DeploymentResourcePool to deploy on. + /// + /// Format: + /// `projects/{project}/locations/{location}/deploymentResourcePools/{deployment_resource_pool}` + core.String? sharedResources; + + /// The title of the regional resource reference. + /// + /// Required. + core.String? title; + + GoogleCloudAiplatformV1PublisherModelCallToActionDeploy({ + this.artifactUri, + this.automaticResources, + this.containerSpec, + this.dedicatedResources, + this.deployMetadata, + this.deployTaskName, + this.largeModelReference, + this.modelDisplayName, + this.publicArtifactUri, + this.sharedResources, + this.title, }); - GoogleCloudAiplatformV1PairwiseSummarizationQualityInstance.fromJson( + GoogleCloudAiplatformV1PublisherModelCallToActionDeploy.fromJson( core.Map json_) : this( - baselinePrediction: json_['baselinePrediction'] as core.String?, - context: json_['context'] as core.String?, - instruction: json_['instruction'] as core.String?, - prediction: json_['prediction'] as core.String?, - reference: json_['reference'] as core.String?, + artifactUri: json_['artifactUri'] as core.String?, + automaticResources: json_.containsKey('automaticResources') + ? GoogleCloudAiplatformV1AutomaticResources.fromJson( + json_['automaticResources'] + as core.Map) + : null, + containerSpec: json_.containsKey('containerSpec') + ? GoogleCloudAiplatformV1ModelContainerSpec.fromJson( + json_['containerSpec'] as core.Map) + : null, + dedicatedResources: json_.containsKey('dedicatedResources') + ? GoogleCloudAiplatformV1DedicatedResources.fromJson( + json_['dedicatedResources'] + as core.Map) + : null, + deployMetadata: json_.containsKey('deployMetadata') + ? GoogleCloudAiplatformV1PublisherModelCallToActionDeployDeployMetadata + .fromJson(json_['deployMetadata'] + as core.Map) + : null, + deployTaskName: json_['deployTaskName'] as core.String?, + largeModelReference: json_.containsKey('largeModelReference') + ? GoogleCloudAiplatformV1LargeModelReference.fromJson( + json_['largeModelReference'] + as core.Map) + : null, + modelDisplayName: json_['modelDisplayName'] as core.String?, + publicArtifactUri: json_['publicArtifactUri'] as core.String?, + sharedResources: json_['sharedResources'] as core.String?, + title: json_['title'] as core.String?, ); core.Map toJson() => { - if (baselinePrediction != null) - 'baselinePrediction': baselinePrediction!, - if (context != null) 'context': context!, - if (instruction != null) 'instruction': instruction!, - if (prediction != null) 'prediction': prediction!, - if (reference != null) 'reference': reference!, + if (artifactUri != null) 'artifactUri': artifactUri!, + if (automaticResources != null) + 'automaticResources': automaticResources!, + if (containerSpec != null) 'containerSpec': containerSpec!, + if (dedicatedResources != null) + 'dedicatedResources': dedicatedResources!, + if (deployMetadata != null) 'deployMetadata': deployMetadata!, + if (deployTaskName != null) 'deployTaskName': deployTaskName!, + if (largeModelReference != null) + 'largeModelReference': largeModelReference!, + if (modelDisplayName != null) 'modelDisplayName': modelDisplayName!, + if (publicArtifactUri != null) 'publicArtifactUri': publicArtifactUri!, + if (sharedResources != null) 'sharedResources': sharedResources!, + if (title != null) 'title': title!, }; } -/// Spec for pairwise summarization quality result. -class GoogleCloudAiplatformV1PairwiseSummarizationQualityResult { - /// Confidence for summarization quality score. +/// Metadata information about the deployment for managing deployment config. +class GoogleCloudAiplatformV1PublisherModelCallToActionDeployDeployMetadata { + /// Labels for the deployment config. /// - /// Output only. - core.double? confidence; - - /// Explanation for summarization quality score. + /// For managing deployment config like verifying, source of deployment + /// config, etc. /// - /// Output only. - core.String? explanation; + /// Optional. + core.Map? labels; - /// Pairwise summarization prediction choice. + /// Sample request for deployed endpoint. /// - /// Output only. - /// Possible string values are: - /// - "PAIRWISE_CHOICE_UNSPECIFIED" : Unspecified prediction choice. - /// - "BASELINE" : Baseline prediction wins - /// - "CANDIDATE" : Candidate prediction wins - /// - "TIE" : Winner cannot be determined - core.String? pairwiseChoice; + /// Optional. + core.String? sampleRequest; - GoogleCloudAiplatformV1PairwiseSummarizationQualityResult({ - this.confidence, - this.explanation, - this.pairwiseChoice, + GoogleCloudAiplatformV1PublisherModelCallToActionDeployDeployMetadata({ + this.labels, + this.sampleRequest, }); - GoogleCloudAiplatformV1PairwiseSummarizationQualityResult.fromJson( + GoogleCloudAiplatformV1PublisherModelCallToActionDeployDeployMetadata.fromJson( core.Map json_) : this( - confidence: (json_['confidence'] as core.num?)?.toDouble(), - explanation: json_['explanation'] as core.String?, - pairwiseChoice: json_['pairwiseChoice'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + sampleRequest: json_['sampleRequest'] as core.String?, ); core.Map toJson() => { - if (confidence != null) 'confidence': confidence!, - if (explanation != null) 'explanation': explanation!, - if (pairwiseChoice != null) 'pairwiseChoice': pairwiseChoice!, + if (labels != null) 'labels': labels!, + if (sampleRequest != null) 'sampleRequest': sampleRequest!, }; } -/// Spec for pairwise summarization quality score metric. -class GoogleCloudAiplatformV1PairwiseSummarizationQualitySpec { - /// Whether to use instance.reference to compute pairwise summarization - /// quality. - /// - /// Optional. - core.bool? useReference; - - /// Which version to use for evaluation. +/// Configurations for PublisherModel GKE deployment +class GoogleCloudAiplatformV1PublisherModelCallToActionDeployGke { + /// GKE deployment configuration in yaml format. /// /// Optional. - core.int? version; + core.List? gkeYamlConfigs; - GoogleCloudAiplatformV1PairwiseSummarizationQualitySpec({ - this.useReference, - this.version, + GoogleCloudAiplatformV1PublisherModelCallToActionDeployGke({ + this.gkeYamlConfigs, }); - GoogleCloudAiplatformV1PairwiseSummarizationQualitySpec.fromJson( + GoogleCloudAiplatformV1PublisherModelCallToActionDeployGke.fromJson( core.Map json_) : this( - useReference: json_['useReference'] as core.bool?, - version: json_['version'] as core.int?, + gkeYamlConfigs: (json_['gkeYamlConfigs'] as core.List?) + ?.map((value) => value as core.String) + .toList(), ); core.Map toJson() => { - if (useReference != null) 'useReference': useReference!, - if (version != null) 'version': version!, + if (gkeYamlConfigs != null) 'gkeYamlConfigs': gkeYamlConfigs!, }; } -/// A datatype containing media that is part of a multi-part `Content` message. -/// -/// A `Part` consists of data which has an associated datatype. A `Part` can -/// only contain one of the accepted types in `Part.data`. A `Part` must have a -/// fixed IANA MIME type identifying the type and subtype of the media if -/// `inline_data` or `file_data` field is filled with raw bytes. -class GoogleCloudAiplatformV1Part { - /// URI based data. - /// - /// Optional. - GoogleCloudAiplatformV1FileData? fileData; - - /// A predicted \[FunctionCall\] returned from the model that contains a - /// string representing the \[FunctionDeclaration.name\] with the parameters - /// and their values. +/// Multiple setups to deploy the PublisherModel. +class GoogleCloudAiplatformV1PublisherModelCallToActionDeployVertex { + /// One click deployment configurations. /// /// Optional. - GoogleCloudAiplatformV1FunctionCall? functionCall; + core.List? + multiDeployVertex; - /// The result output of a \[FunctionCall\] that contains a string - /// representing the \[FunctionDeclaration.name\] and a structured JSON object - /// containing any output from the function call. - /// - /// It is used as context to the model. - /// - /// Optional. - GoogleCloudAiplatformV1FunctionResponse? functionResponse; + GoogleCloudAiplatformV1PublisherModelCallToActionDeployVertex({ + this.multiDeployVertex, + }); - /// Inlined bytes data. - /// - /// Optional. - GoogleCloudAiplatformV1Blob? inlineData; + GoogleCloudAiplatformV1PublisherModelCallToActionDeployVertex.fromJson( + core.Map json_) + : this( + multiDeployVertex: (json_['multiDeployVertex'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1PublisherModelCallToActionDeploy + .fromJson(value as core.Map)) + .toList(), + ); - /// Text part (can be code). - /// - /// Optional. - core.String? text; + core.Map toJson() => { + if (multiDeployVertex != null) 'multiDeployVertex': multiDeployVertex!, + }; +} - /// Video metadata. - /// - /// The metadata should only be specified while the video data is presented in - /// inline_data or file_data. +/// Open fine tuning pipelines. +class GoogleCloudAiplatformV1PublisherModelCallToActionOpenFineTuningPipelines { + /// Regional resource references to fine tuning pipelines. /// - /// Optional. - GoogleCloudAiplatformV1VideoMetadata? videoMetadata; + /// Required. + core.List< + GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences>? + fineTuningPipelines; - GoogleCloudAiplatformV1Part({ - this.fileData, - this.functionCall, - this.functionResponse, - this.inlineData, - this.text, - this.videoMetadata, + GoogleCloudAiplatformV1PublisherModelCallToActionOpenFineTuningPipelines({ + this.fineTuningPipelines, }); - GoogleCloudAiplatformV1Part.fromJson(core.Map json_) + GoogleCloudAiplatformV1PublisherModelCallToActionOpenFineTuningPipelines.fromJson( + core.Map json_) : this( - fileData: json_.containsKey('fileData') - ? GoogleCloudAiplatformV1FileData.fromJson( - json_['fileData'] as core.Map) - : null, - functionCall: json_.containsKey('functionCall') - ? GoogleCloudAiplatformV1FunctionCall.fromJson( - json_['functionCall'] as core.Map) - : null, - functionResponse: json_.containsKey('functionResponse') - ? GoogleCloudAiplatformV1FunctionResponse.fromJson( - json_['functionResponse'] - as core.Map) - : null, - inlineData: json_.containsKey('inlineData') - ? GoogleCloudAiplatformV1Blob.fromJson( - json_['inlineData'] as core.Map) - : null, - text: json_['text'] as core.String?, - videoMetadata: json_.containsKey('videoMetadata') - ? GoogleCloudAiplatformV1VideoMetadata.fromJson( - json_['videoMetadata'] as core.Map) - : null, + fineTuningPipelines: (json_['fineTuningPipelines'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences + .fromJson(value as core.Map)) + .toList(), ); core.Map toJson() => { - if (fileData != null) 'fileData': fileData!, - if (functionCall != null) 'functionCall': functionCall!, - if (functionResponse != null) 'functionResponse': functionResponse!, - if (inlineData != null) 'inlineData': inlineData!, - if (text != null) 'text': text!, - if (videoMetadata != null) 'videoMetadata': videoMetadata!, + if (fineTuningPipelines != null) + 'fineTuningPipelines': fineTuningPipelines!, }; } -/// Request message for JobService.PauseModelDeploymentMonitoringJob. -typedef GoogleCloudAiplatformV1PauseModelDeploymentMonitoringJobRequest - = $Empty; - -/// Request message for ScheduleService.PauseSchedule. -typedef GoogleCloudAiplatformV1PauseScheduleRequest = $Empty; - -/// Represents the spec of persistent disk options. -class GoogleCloudAiplatformV1PersistentDiskSpec { - /// Size in GB of the disk (default is 100GB). - core.String? diskSizeGb; - - /// Type of the disk (default is "pd-standard"). +/// Open notebooks. +class GoogleCloudAiplatformV1PublisherModelCallToActionOpenNotebooks { + /// Regional resource references to notebooks. /// - /// Valid values: "pd-ssd" (Persistent Disk Solid State Drive) "pd-standard" - /// (Persistent Disk Hard Disk Drive) "pd-balanced" (Balanced Persistent Disk) - /// "pd-extreme" (Extreme Persistent Disk) - core.String? diskType; + /// Required. + core.List< + GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences>? + notebooks; - GoogleCloudAiplatformV1PersistentDiskSpec({ - this.diskSizeGb, - this.diskType, + GoogleCloudAiplatformV1PublisherModelCallToActionOpenNotebooks({ + this.notebooks, }); - GoogleCloudAiplatformV1PersistentDiskSpec.fromJson(core.Map json_) + GoogleCloudAiplatformV1PublisherModelCallToActionOpenNotebooks.fromJson( + core.Map json_) : this( - diskSizeGb: json_['diskSizeGb'] as core.String?, - diskType: json_['diskType'] as core.String?, + notebooks: (json_['notebooks'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences + .fromJson(value as core.Map)) + .toList(), ); core.Map toJson() => { - if (diskSizeGb != null) 'diskSizeGb': diskSizeGb!, - if (diskType != null) 'diskType': diskType!, + if (notebooks != null) 'notebooks': notebooks!, }; } -/// Represents long-lasting resources that are dedicated to users to runs custom -/// workloads. +/// The regional resource name or the URI. /// -/// A PersistentResource can have multiple node pools and each node pool can -/// have its own machine spec. -class GoogleCloudAiplatformV1PersistentResource { - /// Time when the PersistentResource was created. - /// - /// Output only. - core.String? createTime; +/// Key is region, e.g., us-central1, europe-west2, global, etc.. +class GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences { + /// Required. + core.Map? + references; - /// The display name of the PersistentResource. - /// - /// The name can be up to 128 characters long and can consist of any UTF-8 - /// characters. + /// Description of the resource. /// /// Optional. - core.String? displayName; + core.String? resourceDescription; - /// Customer-managed encryption key spec for a PersistentResource. - /// - /// If set, this PersistentResource and all sub-resources of this - /// PersistentResource will be secured by this key. + /// Title of the resource. /// /// Optional. - GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; + core.String? resourceTitle; - /// Only populated when persistent resource's state is `STOPPING` or `ERROR`. + /// Use case (CUJ) of the resource. /// - /// Output only. - GoogleRpcStatus? error; + /// Optional. + core.String? resourceUseCase; - /// The labels with user-defined metadata to organize PersistentResource. /// - /// Label keys and values can be no longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. See - /// https://goo.gl/xmQnxf for more information and examples of labels. /// - /// Optional. - core.Map? labels; + /// Required. + core.String? title; - /// Resource name of a PersistentResource. - /// - /// Immutable. - core.String? name; + GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences({ + this.references, + this.resourceDescription, + this.resourceTitle, + this.resourceUseCase, + this.title, + }); - /// The full name of the Compute Engine - /// \[network\](/compute/docs/networks-and-firewalls#networks) to peered with - /// Vertex AI to host the persistent resources. - /// - /// For example, `projects/12345/global/networks/myVPC`. - /// \[Format\](/compute/docs/reference/rest/v1/networks/insert) is of the form - /// `projects/{project}/global/networks/{network}`. Where {project} is a - /// project number, as in `12345`, and {network} is a network name. To specify - /// this field, you must have already - /// [configured VPC Network Peering for Vertex AI](https://cloud.google.com/vertex-ai/docs/general/vpc-peering). - /// If this field is left unspecified, the resources aren't peered with any - /// network. - /// - /// Optional. - core.String? network; + GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences.fromJson( + core.Map json_) + : this( + references: + (json_['references'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + GoogleCloudAiplatformV1PublisherModelResourceReference.fromJson( + value as core.Map), + ), + ), + resourceDescription: json_['resourceDescription'] as core.String?, + resourceTitle: json_['resourceTitle'] as core.String?, + resourceUseCase: json_['resourceUseCase'] as core.String?, + title: json_['title'] as core.String?, + ); - /// A list of names for the reserved IP ranges under the VPC network that can - /// be used for this persistent resource. - /// - /// If set, we will deploy the persistent resource within the provided IP - /// ranges. Otherwise, the persistent resource is deployed to any IP ranges - /// under the provided VPC network. Example: \['vertex-ai-ip-range'\]. - /// - /// Optional. - core.List? reservedIpRanges; + core.Map toJson() => { + if (references != null) 'references': references!, + if (resourceDescription != null) + 'resourceDescription': resourceDescription!, + if (resourceTitle != null) 'resourceTitle': resourceTitle!, + if (resourceUseCase != null) 'resourceUseCase': resourceUseCase!, + if (title != null) 'title': title!, + }; +} - /// The spec of the pools of different resources. - /// +/// Rest API docs. +class GoogleCloudAiplatformV1PublisherModelCallToActionViewRestApi { /// Required. - core.List? resourcePools; - - /// Runtime information of the Persistent Resource. - /// - /// Output only. - GoogleCloudAiplatformV1ResourceRuntime? resourceRuntime; + core.List? documentations; - /// Persistent Resource runtime spec. - /// - /// For example, used for Ray cluster configuration. + /// The title of the view rest API. /// - /// Optional. - GoogleCloudAiplatformV1ResourceRuntimeSpec? resourceRuntimeSpec; + /// Required. + core.String? title; - /// Reserved for future use. - /// - /// Output only. - core.bool? satisfiesPzi; + GoogleCloudAiplatformV1PublisherModelCallToActionViewRestApi({ + this.documentations, + this.title, + }); - /// Reserved for future use. - /// - /// Output only. - core.bool? satisfiesPzs; + GoogleCloudAiplatformV1PublisherModelCallToActionViewRestApi.fromJson( + core.Map json_) + : this( + documentations: (json_['documentations'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1PublisherModelDocumentation.fromJson( + value as core.Map)) + .toList(), + title: json_['title'] as core.String?, + ); - /// Time when the PersistentResource for the first time entered the `RUNNING` - /// state. - /// - /// Output only. - core.String? startTime; + core.Map toJson() => { + if (documentations != null) 'documentations': documentations!, + if (title != null) 'title': title!, + }; +} - /// The detailed state of a Study. +/// A named piece of documentation. +class GoogleCloudAiplatformV1PublisherModelDocumentation { + /// Content of this piece of document (in Markdown format). /// - /// Output only. - /// Possible string values are: - /// - "STATE_UNSPECIFIED" : Not set. - /// - "PROVISIONING" : The PROVISIONING state indicates the persistent - /// resources is being created. - /// - "RUNNING" : The RUNNING state indicates the persistent resource is - /// healthy and fully usable. - /// - "STOPPING" : The STOPPING state indicates the persistent resource is - /// being deleted. - /// - "ERROR" : The ERROR state indicates the persistent resource may be - /// unusable. Details can be found in the `error` field. - /// - "REBOOTING" : The REBOOTING state indicates the persistent resource is - /// being rebooted (PR is not available right now but is expected to be ready - /// again later). - /// - "UPDATING" : The UPDATING state indicates the persistent resource is - /// being updated. - core.String? state; + /// Required. + core.String? content; - /// Time when the PersistentResource was most recently updated. + /// E.g., OVERVIEW, USE CASES, DOCUMENTATION, SDK & SAMPLES, JAVA, NODE.JS, + /// etc.. /// - /// Output only. - core.String? updateTime; + /// Required. + core.String? title; - GoogleCloudAiplatformV1PersistentResource({ - this.createTime, - this.displayName, - this.encryptionSpec, - this.error, - this.labels, - this.name, - this.network, - this.reservedIpRanges, - this.resourcePools, - this.resourceRuntime, - this.resourceRuntimeSpec, - this.satisfiesPzi, - this.satisfiesPzs, - this.startTime, - this.state, - this.updateTime, + GoogleCloudAiplatformV1PublisherModelDocumentation({ + this.content, + this.title, }); - GoogleCloudAiplatformV1PersistentResource.fromJson(core.Map json_) + GoogleCloudAiplatformV1PublisherModelDocumentation.fromJson(core.Map json_) : this( - createTime: json_['createTime'] as core.String?, - displayName: json_['displayName'] as core.String?, - encryptionSpec: json_.containsKey('encryptionSpec') - ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( - json_['encryptionSpec'] - as core.Map) - : null, - error: json_.containsKey('error') - ? GoogleRpcStatus.fromJson( - json_['error'] as core.Map) - : null, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - name: json_['name'] as core.String?, - network: json_['network'] as core.String?, - reservedIpRanges: (json_['reservedIpRanges'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - resourcePools: (json_['resourcePools'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1ResourcePool.fromJson( - value as core.Map)) - .toList(), - resourceRuntime: json_.containsKey('resourceRuntime') - ? GoogleCloudAiplatformV1ResourceRuntime.fromJson( - json_['resourceRuntime'] - as core.Map) - : null, - resourceRuntimeSpec: json_.containsKey('resourceRuntimeSpec') - ? GoogleCloudAiplatformV1ResourceRuntimeSpec.fromJson( - json_['resourceRuntimeSpec'] - as core.Map) - : null, - satisfiesPzi: json_['satisfiesPzi'] as core.bool?, - satisfiesPzs: json_['satisfiesPzs'] as core.bool?, - startTime: json_['startTime'] as core.String?, - state: json_['state'] as core.String?, - updateTime: json_['updateTime'] as core.String?, + content: json_['content'] as core.String?, + title: json_['title'] as core.String?, + ); + + core.Map toJson() => { + if (content != null) 'content': content!, + if (title != null) 'title': title!, + }; +} + +/// Reference to a resource. +class GoogleCloudAiplatformV1PublisherModelResourceReference { + /// Description of the resource. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) + core.String? description; + + /// The resource name of the Google Cloud resource. + core.String? resourceName; + + /// The URI of the resource. + core.String? uri; + + /// Use case (CUJ) of the resource. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) + core.String? useCase; + + GoogleCloudAiplatformV1PublisherModelResourceReference({ + this.description, + this.resourceName, + this.uri, + this.useCase, + }); + + GoogleCloudAiplatformV1PublisherModelResourceReference.fromJson( + core.Map json_) + : this( + description: json_['description'] as core.String?, + resourceName: json_['resourceName'] as core.String?, + uri: json_['uri'] as core.String?, + useCase: json_['useCase'] as core.String?, ); core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (displayName != null) 'displayName': displayName!, - if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, - if (error != null) 'error': error!, - if (labels != null) 'labels': labels!, - if (name != null) 'name': name!, - if (network != null) 'network': network!, - if (reservedIpRanges != null) 'reservedIpRanges': reservedIpRanges!, - if (resourcePools != null) 'resourcePools': resourcePools!, - if (resourceRuntime != null) 'resourceRuntime': resourceRuntime!, - if (resourceRuntimeSpec != null) - 'resourceRuntimeSpec': resourceRuntimeSpec!, - if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, - if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, - if (startTime != null) 'startTime': startTime!, - if (state != null) 'state': state!, - if (updateTime != null) 'updateTime': updateTime!, + if (description != null) 'description': description!, + if (resourceName != null) 'resourceName': resourceName!, + if (uri != null) 'uri': uri!, + if (useCase != null) 'useCase': useCase!, }; } -/// An instance of a machine learning PipelineJob. -class GoogleCloudAiplatformV1PipelineJob { - /// Pipeline creation time. +/// Request message for MetadataService.PurgeArtifacts. +class GoogleCloudAiplatformV1PurgeArtifactsRequest { + /// A required filter matching the Artifacts to be purged. /// - /// Output only. - core.String? createTime; - - /// The display name of the Pipeline. + /// E.g., `update_time <= 2020-11-19T11:30:00-04:00`. /// - /// The name can be up to 128 characters long and can consist of any UTF-8 - /// characters. - core.String? displayName; + /// Required. + core.String? filter; - /// Customer-managed encryption key spec for a pipelineJob. + /// Flag to indicate to actually perform the purge. /// - /// If set, this PipelineJob and all of its sub-resources will be secured by - /// this key. - GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; - - /// Pipeline end time. + /// If `force` is set to false, the method will return a sample of Artifact + /// names that would be deleted. /// - /// Output only. - core.String? endTime; + /// Optional. + core.bool? force; - /// The error that occurred during pipeline execution. - /// - /// Only populated when the pipeline's state is FAILED or CANCELLED. - /// - /// Output only. - GoogleRpcStatus? error; + GoogleCloudAiplatformV1PurgeArtifactsRequest({ + this.filter, + this.force, + }); - /// The details of pipeline run. - /// - /// Not available in the list view. - /// - /// Output only. - GoogleCloudAiplatformV1PipelineJobDetail? jobDetail; + GoogleCloudAiplatformV1PurgeArtifactsRequest.fromJson(core.Map json_) + : this( + filter: json_['filter'] as core.String?, + force: json_['force'] as core.bool?, + ); - /// The labels with user-defined metadata to organize PipelineJob. - /// - /// Label keys and values can be no longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. See - /// https://goo.gl/xmQnxf for more information and examples of labels. Note - /// there is some reserved label key for Vertex AI Pipelines. - - /// `vertex-ai-pipelines-run-billing-id`, user set value will get overrided. - core.Map? labels; + core.Map toJson() => { + if (filter != null) 'filter': filter!, + if (force != null) 'force': force!, + }; +} - /// The resource name of the PipelineJob. +/// Request message for MetadataService.PurgeContexts. +class GoogleCloudAiplatformV1PurgeContextsRequest { + /// A required filter matching the Contexts to be purged. /// - /// Output only. - core.String? name; - - /// The full name of the Compute Engine - /// \[network\](/compute/docs/networks-and-firewalls#networks) to which the - /// Pipeline Job's workload should be peered. + /// E.g., `update_time <= 2020-11-19T11:30:00-04:00`. /// - /// For example, `projects/12345/global/networks/myVPC`. - /// \[Format\](/compute/docs/reference/rest/v1/networks/insert) is of the form - /// `projects/{project}/global/networks/{network}`. Where {project} is a - /// project number, as in `12345`, and {network} is a network name. Private - /// services access must already be configured for the network. Pipeline job - /// will apply the network configuration to the Google Cloud resources being - /// launched, if applied, such as Vertex AI Training or Dataflow job. If left - /// unspecified, the workload is not peered with any network. - core.String? network; + /// Required. + core.String? filter; - /// The spec of the pipeline. + /// Flag to indicate to actually perform the purge. /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Map? pipelineSpec; - - /// Whether to do component level validations before job creation. + /// If `force` is set to false, the method will return a sample of Context + /// names that would be deleted. /// /// Optional. - core.bool? preflightValidations; + core.bool? force; - /// A list of names for the reserved ip ranges under the VPC network that can - /// be used for this Pipeline Job's workload. - /// - /// If set, we will deploy the Pipeline Job's workload within the provided ip - /// ranges. Otherwise, the job will be deployed to any ip ranges under the - /// provided VPC network. Example: \['vertex-ai-ip-range'\]. - core.List? reservedIpRanges; + GoogleCloudAiplatformV1PurgeContextsRequest({ + this.filter, + this.force, + }); - /// Runtime config of the pipeline. - GoogleCloudAiplatformV1PipelineJobRuntimeConfig? runtimeConfig; + GoogleCloudAiplatformV1PurgeContextsRequest.fromJson(core.Map json_) + : this( + filter: json_['filter'] as core.String?, + force: json_['force'] as core.bool?, + ); - /// The schedule resource name. + core.Map toJson() => { + if (filter != null) 'filter': filter!, + if (force != null) 'force': force!, + }; +} + +/// Request message for MetadataService.PurgeExecutions. +class GoogleCloudAiplatformV1PurgeExecutionsRequest { + /// A required filter matching the Executions to be purged. /// - /// Only returned if the Pipeline is created by Schedule API. + /// E.g., `update_time <= 2020-11-19T11:30:00-04:00`. /// - /// Output only. - core.String? scheduleName; + /// Required. + core.String? filter; - /// The service account that the pipeline workload runs as. + /// Flag to indicate to actually perform the purge. /// - /// If not specified, the Compute Engine default service account in the - /// project will be used. See - /// https://cloud.google.com/compute/docs/access/service-accounts#default_service_account - /// Users starting the pipeline must have the `iam.serviceAccounts.actAs` - /// permission on this service account. - core.String? serviceAccount; - - /// Pipeline start time. + /// If `force` is set to false, the method will return a sample of Execution + /// names that would be deleted. /// - /// Output only. - core.String? startTime; + /// Optional. + core.bool? force; - /// The detailed state of the job. + GoogleCloudAiplatformV1PurgeExecutionsRequest({ + this.filter, + this.force, + }); + + GoogleCloudAiplatformV1PurgeExecutionsRequest.fromJson(core.Map json_) + : this( + filter: json_['filter'] as core.String?, + force: json_['force'] as core.bool?, + ); + + core.Map toJson() => { + if (filter != null) 'filter': filter!, + if (force != null) 'force': force!, + }; +} + +/// The spec of a Python packaged code. +class GoogleCloudAiplatformV1PythonPackageSpec { + /// Command line arguments to be passed to the Python task. + core.List? args; + + /// Environment variables to be passed to the python module. /// - /// Output only. - /// Possible string values are: - /// - "PIPELINE_STATE_UNSPECIFIED" : The pipeline state is unspecified. - /// - "PIPELINE_STATE_QUEUED" : The pipeline has been created or resumed, and - /// processing has not yet begun. - /// - "PIPELINE_STATE_PENDING" : The service is preparing to run the pipeline. - /// - "PIPELINE_STATE_RUNNING" : The pipeline is in progress. - /// - "PIPELINE_STATE_SUCCEEDED" : The pipeline completed successfully. - /// - "PIPELINE_STATE_FAILED" : The pipeline failed. - /// - "PIPELINE_STATE_CANCELLING" : The pipeline is being cancelled. From this - /// state, the pipeline may only go to either PIPELINE_STATE_SUCCEEDED, - /// PIPELINE_STATE_FAILED or PIPELINE_STATE_CANCELLED. - /// - "PIPELINE_STATE_CANCELLED" : The pipeline has been cancelled. - /// - "PIPELINE_STATE_PAUSED" : The pipeline has been stopped, and can be - /// resumed. - core.String? state; + /// Maximum limit is 100. + core.List? env; - /// Pipeline template metadata. + /// The URI of a container image in Artifact Registry that will run the + /// provided Python package. /// - /// Will fill up fields if PipelineJob.template_uri is from supported template - /// registry. + /// Vertex AI provides a wide range of executor images with pre-installed + /// packages to meet users' various use cases. See the list of \[pre-built + /// containers for + /// training\](https://cloud.google.com/vertex-ai/docs/training/pre-built-containers). + /// You must use an image from this list. /// - /// Output only. - GoogleCloudAiplatformV1PipelineTemplateMetadata? templateMetadata; + /// Required. + core.String? executorImageUri; - /// A template uri from where the PipelineJob.pipeline_spec, if empty, will be - /// downloaded. + /// The Google Cloud Storage location of the Python package files which are + /// the training program and its dependent packages. /// - /// Currently, only uri from Vertex Template Registry & Gallery is supported. - /// Reference to - /// https://cloud.google.com/vertex-ai/docs/pipelines/create-pipeline-template. - core.String? templateUri; + /// The maximum number of package URIs is 100. + /// + /// Required. + core.List? packageUris; - /// Timestamp when this PipelineJob was most recently updated. + /// The Python module name to run after installing the packages. /// - /// Output only. - core.String? updateTime; + /// Required. + core.String? pythonModule; - GoogleCloudAiplatformV1PipelineJob({ - this.createTime, - this.displayName, - this.encryptionSpec, - this.endTime, - this.error, - this.jobDetail, - this.labels, - this.name, - this.network, - this.pipelineSpec, - this.preflightValidations, - this.reservedIpRanges, - this.runtimeConfig, - this.scheduleName, - this.serviceAccount, - this.startTime, - this.state, - this.templateMetadata, - this.templateUri, - this.updateTime, + GoogleCloudAiplatformV1PythonPackageSpec({ + this.args, + this.env, + this.executorImageUri, + this.packageUris, + this.pythonModule, }); - GoogleCloudAiplatformV1PipelineJob.fromJson(core.Map json_) + GoogleCloudAiplatformV1PythonPackageSpec.fromJson(core.Map json_) : this( - createTime: json_['createTime'] as core.String?, - displayName: json_['displayName'] as core.String?, - encryptionSpec: json_.containsKey('encryptionSpec') - ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( - json_['encryptionSpec'] - as core.Map) - : null, - endTime: json_['endTime'] as core.String?, - error: json_.containsKey('error') - ? GoogleRpcStatus.fromJson( - json_['error'] as core.Map) - : null, - jobDetail: json_.containsKey('jobDetail') - ? GoogleCloudAiplatformV1PipelineJobDetail.fromJson( - json_['jobDetail'] as core.Map) - : null, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - name: json_['name'] as core.String?, - network: json_['network'] as core.String?, - pipelineSpec: json_.containsKey('pipelineSpec') - ? json_['pipelineSpec'] as core.Map - : null, - preflightValidations: json_['preflightValidations'] as core.bool?, - reservedIpRanges: (json_['reservedIpRanges'] as core.List?) + args: (json_['args'] as core.List?) ?.map((value) => value as core.String) .toList(), - runtimeConfig: json_.containsKey('runtimeConfig') - ? GoogleCloudAiplatformV1PipelineJobRuntimeConfig.fromJson( - json_['runtimeConfig'] as core.Map) - : null, - scheduleName: json_['scheduleName'] as core.String?, - serviceAccount: json_['serviceAccount'] as core.String?, - startTime: json_['startTime'] as core.String?, - state: json_['state'] as core.String?, - templateMetadata: json_.containsKey('templateMetadata') - ? GoogleCloudAiplatformV1PipelineTemplateMetadata.fromJson( - json_['templateMetadata'] - as core.Map) - : null, - templateUri: json_['templateUri'] as core.String?, - updateTime: json_['updateTime'] as core.String?, + env: (json_['env'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1EnvVar.fromJson( + value as core.Map)) + .toList(), + executorImageUri: json_['executorImageUri'] as core.String?, + packageUris: (json_['packageUris'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + pythonModule: json_['pythonModule'] as core.String?, ); core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (displayName != null) 'displayName': displayName!, - if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, - if (endTime != null) 'endTime': endTime!, - if (error != null) 'error': error!, - if (jobDetail != null) 'jobDetail': jobDetail!, - if (labels != null) 'labels': labels!, - if (name != null) 'name': name!, - if (network != null) 'network': network!, - if (pipelineSpec != null) 'pipelineSpec': pipelineSpec!, - if (preflightValidations != null) - 'preflightValidations': preflightValidations!, - if (reservedIpRanges != null) 'reservedIpRanges': reservedIpRanges!, - if (runtimeConfig != null) 'runtimeConfig': runtimeConfig!, - if (scheduleName != null) 'scheduleName': scheduleName!, - if (serviceAccount != null) 'serviceAccount': serviceAccount!, - if (startTime != null) 'startTime': startTime!, - if (state != null) 'state': state!, - if (templateMetadata != null) 'templateMetadata': templateMetadata!, - if (templateUri != null) 'templateUri': templateUri!, - if (updateTime != null) 'updateTime': updateTime!, + if (args != null) 'args': args!, + if (env != null) 'env': env!, + if (executorImageUri != null) 'executorImageUri': executorImageUri!, + if (packageUris != null) 'packageUris': packageUris!, + if (pythonModule != null) 'pythonModule': pythonModule!, }; } -/// The runtime detail of PipelineJob. -class GoogleCloudAiplatformV1PipelineJobDetail { - /// The context of the pipeline. - /// - /// Output only. - GoogleCloudAiplatformV1Context? pipelineContext; +/// Response message for QueryDeployedModels method. +class GoogleCloudAiplatformV1QueryDeployedModelsResponse { + /// References to the DeployedModels that share the specified + /// deploymentResourcePool. + core.List? deployedModelRefs; - /// The context of the current pipeline run. - /// - /// Output only. - GoogleCloudAiplatformV1Context? pipelineRunContext; + /// DEPRECATED Use deployed_model_refs instead. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) + core.List? deployedModels; - /// The runtime details of the tasks under the pipeline. + /// A token, which can be sent as `page_token` to retrieve the next page. /// - /// Output only. - core.List? taskDetails; + /// If this field is omitted, there are no subsequent pages. + core.String? nextPageToken; - GoogleCloudAiplatformV1PipelineJobDetail({ - this.pipelineContext, - this.pipelineRunContext, - this.taskDetails, + /// The total number of DeployedModels on this DeploymentResourcePool. + core.int? totalDeployedModelCount; + + /// The total number of Endpoints that have DeployedModels on this + /// DeploymentResourcePool. + core.int? totalEndpointCount; + + GoogleCloudAiplatformV1QueryDeployedModelsResponse({ + this.deployedModelRefs, + this.deployedModels, + this.nextPageToken, + this.totalDeployedModelCount, + this.totalEndpointCount, }); - GoogleCloudAiplatformV1PipelineJobDetail.fromJson(core.Map json_) + GoogleCloudAiplatformV1QueryDeployedModelsResponse.fromJson(core.Map json_) : this( - pipelineContext: json_.containsKey('pipelineContext') - ? GoogleCloudAiplatformV1Context.fromJson(json_['pipelineContext'] - as core.Map) - : null, - pipelineRunContext: json_.containsKey('pipelineRunContext') - ? GoogleCloudAiplatformV1Context.fromJson( - json_['pipelineRunContext'] - as core.Map) - : null, - taskDetails: (json_['taskDetails'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1PipelineTaskDetail.fromJson( - value as core.Map)) + deployedModelRefs: (json_['deployedModelRefs'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1DeployedModelRef.fromJson( + value as core.Map)) + .toList(), + deployedModels: (json_['deployedModels'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1DeployedModel.fromJson( + value as core.Map)) .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + totalDeployedModelCount: + json_['totalDeployedModelCount'] as core.int?, + totalEndpointCount: json_['totalEndpointCount'] as core.int?, ); core.Map toJson() => { - if (pipelineContext != null) 'pipelineContext': pipelineContext!, - if (pipelineRunContext != null) - 'pipelineRunContext': pipelineRunContext!, - if (taskDetails != null) 'taskDetails': taskDetails!, + if (deployedModelRefs != null) 'deployedModelRefs': deployedModelRefs!, + if (deployedModels != null) 'deployedModels': deployedModels!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (totalDeployedModelCount != null) + 'totalDeployedModelCount': totalDeployedModelCount!, + if (totalEndpointCount != null) + 'totalEndpointCount': totalEndpointCount!, }; } -/// The runtime config of a PipelineJob. -class GoogleCloudAiplatformV1PipelineJobRuntimeConfig { - /// Represents the failure policy of a pipeline. - /// - /// Currently, the default of a pipeline is that the pipeline will continue to - /// run until no more tasks can be executed, also known as - /// PIPELINE_FAILURE_POLICY_FAIL_SLOW. However, if a pipeline is set to - /// PIPELINE_FAILURE_POLICY_FAIL_FAST, it will stop scheduling any new tasks - /// when a task has failed. Any scheduled tasks will continue to completion. - /// Possible string values are: - /// - "PIPELINE_FAILURE_POLICY_UNSPECIFIED" : Default value, and follows fail - /// slow behavior. - /// - "PIPELINE_FAILURE_POLICY_FAIL_SLOW" : Indicates that the pipeline should - /// continue to run until all possible tasks have been scheduled and - /// completed. - /// - "PIPELINE_FAILURE_POLICY_FAIL_FAST" : Indicates that the pipeline should - /// stop scheduling new tasks after a task has failed. - core.String? failurePolicy; - - /// A path in a Cloud Storage bucket, which will be treated as the root output - /// directory of the pipeline. +/// Request message for ReasoningEngineExecutionService.Query. +class GoogleCloudAiplatformV1QueryReasoningEngineRequest { + /// Class method to be used for the query. /// - /// It is used by the system to generate the paths of output artifacts. The - /// artifact paths are generated with a sub-path pattern - /// `{job_id}/{task_id}/{output_key}` under the specified output directory. - /// The service account specified in this pipeline must have the - /// `storage.objects.get` and `storage.objects.create` permissions for this - /// bucket. + /// It is optional and defaults to "query" if unspecified. /// - /// Required. - core.String? gcsOutputDirectory; + /// Optional. + core.String? classMethod; - /// The runtime artifacts of the PipelineJob. + /// Input content provided by users in JSON object format. /// - /// The key will be the input artifact name and the value would be one of the - /// InputArtifact. - core.Map? - inputArtifacts; - - /// The runtime parameters of the PipelineJob. + /// Examples include text query, function calling parameters, media bytes, + /// etc. /// - /// The parameters will be passed into PipelineJob.pipeline_spec to replace - /// the placeholders at runtime. This field is used by pipelines built using - /// `PipelineJob.pipeline_spec.schema_version` 2.1.0, such as pipelines built - /// using Kubeflow Pipelines SDK 1.9 or higher and the v2 DSL. + /// Optional. /// /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Map? parameterValues; - - /// Use RuntimeConfig.parameter_values instead. - /// - /// The runtime parameters of the PipelineJob. The parameters will be passed - /// into PipelineJob.pipeline_spec to replace the placeholders at runtime. - /// This field is used by pipelines built using - /// `PipelineJob.pipeline_spec.schema_version` 2.0.0 or lower, such as - /// pipelines built using Kubeflow Pipelines SDK 1.8 or lower. - /// - /// Deprecated. - @core.Deprecated( - 'Not supported. Member documentation may have more information.', - ) - core.Map? parameters; + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Map? input; - GoogleCloudAiplatformV1PipelineJobRuntimeConfig({ - this.failurePolicy, - this.gcsOutputDirectory, - this.inputArtifacts, - this.parameterValues, - this.parameters, + GoogleCloudAiplatformV1QueryReasoningEngineRequest({ + this.classMethod, + this.input, }); - GoogleCloudAiplatformV1PipelineJobRuntimeConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1QueryReasoningEngineRequest.fromJson(core.Map json_) : this( - failurePolicy: json_['failurePolicy'] as core.String?, - gcsOutputDirectory: json_['gcsOutputDirectory'] as core.String?, - inputArtifacts: - (json_['inputArtifacts'] as core.Map?) - ?.map( - (key, value) => core.MapEntry( - key, - GoogleCloudAiplatformV1PipelineJobRuntimeConfigInputArtifact - .fromJson(value as core.Map), - ), - ), - parameterValues: json_.containsKey('parameterValues') - ? json_['parameterValues'] as core.Map + classMethod: json_['classMethod'] as core.String?, + input: json_.containsKey('input') + ? json_['input'] as core.Map : null, - parameters: - (json_['parameters'] as core.Map?) - ?.map( - (key, value) => core.MapEntry( - key, - GoogleCloudAiplatformV1Value.fromJson( - value as core.Map), - ), - ), ); core.Map toJson() => { - if (failurePolicy != null) 'failurePolicy': failurePolicy!, - if (gcsOutputDirectory != null) - 'gcsOutputDirectory': gcsOutputDirectory!, - if (inputArtifacts != null) 'inputArtifacts': inputArtifacts!, - if (parameterValues != null) 'parameterValues': parameterValues!, - if (parameters != null) 'parameters': parameters!, + if (classMethod != null) 'classMethod': classMethod!, + if (input != null) 'input': input!, }; } -/// The type of an input artifact. -class GoogleCloudAiplatformV1PipelineJobRuntimeConfigInputArtifact { - /// Artifact resource id from MLMD. +/// Response message for ReasoningEngineExecutionService.Query +class GoogleCloudAiplatformV1QueryReasoningEngineResponse { + /// Response provided by users in JSON object format. /// - /// Which is the last portion of an artifact resource name: - /// `projects/{project}/locations/{location}/metadataStores/default/artifacts/{artifact_id}`. - /// The artifact must stay within the same project, location and default - /// metadatastore as the pipeline. - core.String? artifactId; + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? output; - GoogleCloudAiplatformV1PipelineJobRuntimeConfigInputArtifact({ - this.artifactId, + GoogleCloudAiplatformV1QueryReasoningEngineResponse({ + this.output, }); - GoogleCloudAiplatformV1PipelineJobRuntimeConfigInputArtifact.fromJson( - core.Map json_) + GoogleCloudAiplatformV1QueryReasoningEngineResponse.fromJson(core.Map json_) : this( - artifactId: json_['artifactId'] as core.String?, + output: json_['output'], ); core.Map toJson() => { - if (artifactId != null) 'artifactId': artifactId!, + if (output != null) 'output': output!, }; } -/// The runtime detail of a task execution. -class GoogleCloudAiplatformV1PipelineTaskDetail { - /// Task create time. +/// Input for question answering correctness metric. +class GoogleCloudAiplatformV1QuestionAnsweringCorrectnessInput { + /// Question answering correctness instance. /// - /// Output only. - core.String? createTime; + /// Required. + GoogleCloudAiplatformV1QuestionAnsweringCorrectnessInstance? instance; - /// Task end time. + /// Spec for question answering correctness score metric. /// - /// Output only. - core.String? endTime; + /// Required. + GoogleCloudAiplatformV1QuestionAnsweringCorrectnessSpec? metricSpec; - /// The error that occurred during task execution. - /// - /// Only populated when the task's state is FAILED or CANCELLED. - /// - /// Output only. - GoogleRpcStatus? error; + GoogleCloudAiplatformV1QuestionAnsweringCorrectnessInput({ + this.instance, + this.metricSpec, + }); - /// The execution metadata of the task. - /// - /// Output only. - GoogleCloudAiplatformV1Execution? execution; + GoogleCloudAiplatformV1QuestionAnsweringCorrectnessInput.fromJson( + core.Map json_) + : this( + instance: json_.containsKey('instance') + ? GoogleCloudAiplatformV1QuestionAnsweringCorrectnessInstance + .fromJson( + json_['instance'] as core.Map) + : null, + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1QuestionAnsweringCorrectnessSpec + .fromJson(json_['metricSpec'] + as core.Map) + : null, + ); - /// The detailed execution info. - /// - /// Output only. - GoogleCloudAiplatformV1PipelineTaskExecutorDetail? executorDetail; + core.Map toJson() => { + if (instance != null) 'instance': instance!, + if (metricSpec != null) 'metricSpec': metricSpec!, + }; +} - /// The runtime input artifacts of the task. - /// - /// Output only. - core.Map? - inputs; +/// Spec for question answering correctness instance. +typedef GoogleCloudAiplatformV1QuestionAnsweringCorrectnessInstance + = $Instance02; - /// The runtime output artifacts of the task. +/// Spec for question answering correctness result. +class GoogleCloudAiplatformV1QuestionAnsweringCorrectnessResult { + /// Confidence for question answering correctness score. /// /// Output only. - core.Map? - outputs; + core.double? confidence; - /// The id of the parent task if the task is within a component scope. - /// - /// Empty if the task is at the root level. + /// Explanation for question answering correctness score. /// /// Output only. - core.String? parentTaskId; + core.String? explanation; - /// A list of task status. - /// - /// This field keeps a record of task status evolving over time. + /// Question Answering Correctness score. /// /// Output only. - core.List? - pipelineTaskStatus; + core.double? score; - /// Task start time. - /// - /// Output only. - core.String? startTime; + GoogleCloudAiplatformV1QuestionAnsweringCorrectnessResult({ + this.confidence, + this.explanation, + this.score, + }); - /// State of the task. - /// - /// Output only. - /// Possible string values are: - /// - "STATE_UNSPECIFIED" : Unspecified. - /// - "PENDING" : Specifies pending state for the task. - /// - "RUNNING" : Specifies task is being executed. - /// - "SUCCEEDED" : Specifies task completed successfully. - /// - "CANCEL_PENDING" : Specifies Task cancel is in pending state. - /// - "CANCELLING" : Specifies task is being cancelled. - /// - "CANCELLED" : Specifies task was cancelled. - /// - "FAILED" : Specifies task failed. - /// - "SKIPPED" : Specifies task was skipped due to cache hit. - /// - "NOT_TRIGGERED" : Specifies that the task was not triggered because the - /// task's trigger policy is not satisfied. The trigger policy is specified in - /// the `condition` field of PipelineJob.pipeline_spec. - core.String? state; + GoogleCloudAiplatformV1QuestionAnsweringCorrectnessResult.fromJson( + core.Map json_) + : this( + confidence: (json_['confidence'] as core.num?)?.toDouble(), + explanation: json_['explanation'] as core.String?, + score: (json_['score'] as core.num?)?.toDouble(), + ); - /// The system generated ID of the task. + core.Map toJson() => { + if (confidence != null) 'confidence': confidence!, + if (explanation != null) 'explanation': explanation!, + if (score != null) 'score': score!, + }; +} + +/// Spec for question answering correctness metric. +class GoogleCloudAiplatformV1QuestionAnsweringCorrectnessSpec { + /// Whether to use instance.reference to compute question answering + /// correctness. /// - /// Output only. - core.String? taskId; + /// Optional. + core.bool? useReference; - /// The user specified name of the task that is defined in pipeline_spec. + /// Which version to use for evaluation. /// - /// Output only. - core.String? taskName; + /// Optional. + core.int? version; - GoogleCloudAiplatformV1PipelineTaskDetail({ - this.createTime, - this.endTime, - this.error, - this.execution, - this.executorDetail, - this.inputs, - this.outputs, - this.parentTaskId, - this.pipelineTaskStatus, - this.startTime, - this.state, - this.taskId, - this.taskName, + GoogleCloudAiplatformV1QuestionAnsweringCorrectnessSpec({ + this.useReference, + this.version, }); - GoogleCloudAiplatformV1PipelineTaskDetail.fromJson(core.Map json_) + GoogleCloudAiplatformV1QuestionAnsweringCorrectnessSpec.fromJson( + core.Map json_) : this( - createTime: json_['createTime'] as core.String?, - endTime: json_['endTime'] as core.String?, - error: json_.containsKey('error') - ? GoogleRpcStatus.fromJson( - json_['error'] as core.Map) - : null, - execution: json_.containsKey('execution') - ? GoogleCloudAiplatformV1Execution.fromJson( - json_['execution'] as core.Map) - : null, - executorDetail: json_.containsKey('executorDetail') - ? GoogleCloudAiplatformV1PipelineTaskExecutorDetail.fromJson( - json_['executorDetail'] - as core.Map) - : null, - inputs: - (json_['inputs'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - GoogleCloudAiplatformV1PipelineTaskDetailArtifactList.fromJson( - value as core.Map), - ), - ), - outputs: - (json_['outputs'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - GoogleCloudAiplatformV1PipelineTaskDetailArtifactList.fromJson( - value as core.Map), - ), - ), - parentTaskId: json_['parentTaskId'] as core.String?, - pipelineTaskStatus: (json_['pipelineTaskStatus'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1PipelineTaskDetailPipelineTaskStatus - .fromJson(value as core.Map)) - .toList(), - startTime: json_['startTime'] as core.String?, - state: json_['state'] as core.String?, - taskId: json_['taskId'] as core.String?, - taskName: json_['taskName'] as core.String?, + useReference: json_['useReference'] as core.bool?, + version: json_['version'] as core.int?, ); core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (endTime != null) 'endTime': endTime!, - if (error != null) 'error': error!, - if (execution != null) 'execution': execution!, - if (executorDetail != null) 'executorDetail': executorDetail!, - if (inputs != null) 'inputs': inputs!, - if (outputs != null) 'outputs': outputs!, - if (parentTaskId != null) 'parentTaskId': parentTaskId!, - if (pipelineTaskStatus != null) - 'pipelineTaskStatus': pipelineTaskStatus!, - if (startTime != null) 'startTime': startTime!, - if (state != null) 'state': state!, - if (taskId != null) 'taskId': taskId!, - if (taskName != null) 'taskName': taskName!, + if (useReference != null) 'useReference': useReference!, + if (version != null) 'version': version!, }; } -/// A list of artifact metadata. -class GoogleCloudAiplatformV1PipelineTaskDetailArtifactList { - /// A list of artifact metadata. +/// Input for question answering helpfulness metric. +class GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessInput { + /// Question answering helpfulness instance. /// - /// Output only. - core.List? artifacts; + /// Required. + GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessInstance? instance; - GoogleCloudAiplatformV1PipelineTaskDetailArtifactList({ - this.artifacts, + /// Spec for question answering helpfulness score metric. + /// + /// Required. + GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessSpec? metricSpec; + + GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessInput({ + this.instance, + this.metricSpec, }); - GoogleCloudAiplatformV1PipelineTaskDetailArtifactList.fromJson(core.Map json_) + GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessInput.fromJson( + core.Map json_) : this( - artifacts: (json_['artifacts'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Artifact.fromJson( - value as core.Map)) - .toList(), + instance: json_.containsKey('instance') + ? GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessInstance + .fromJson( + json_['instance'] as core.Map) + : null, + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessSpec + .fromJson(json_['metricSpec'] + as core.Map) + : null, ); core.Map toJson() => { - if (artifacts != null) 'artifacts': artifacts!, + if (instance != null) 'instance': instance!, + if (metricSpec != null) 'metricSpec': metricSpec!, }; } -/// A single record of the task status. -class GoogleCloudAiplatformV1PipelineTaskDetailPipelineTaskStatus { - /// The error that occurred during the state. - /// - /// May be set when the state is any of the non-final state - /// (PENDING/RUNNING/CANCELLING) or FAILED state. If the state is FAILED, the - /// error here is final and not going to be retried. If the state is a - /// non-final state, the error indicates a system-error being retried. +/// Spec for question answering helpfulness instance. +typedef GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessInstance + = $Instance02; + +/// Spec for question answering helpfulness result. +class GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessResult { + /// Confidence for question answering helpfulness score. /// /// Output only. - GoogleRpcStatus? error; + core.double? confidence; - /// The state of the task. + /// Explanation for question answering helpfulness score. /// /// Output only. - /// Possible string values are: - /// - "STATE_UNSPECIFIED" : Unspecified. - /// - "PENDING" : Specifies pending state for the task. - /// - "RUNNING" : Specifies task is being executed. - /// - "SUCCEEDED" : Specifies task completed successfully. - /// - "CANCEL_PENDING" : Specifies Task cancel is in pending state. - /// - "CANCELLING" : Specifies task is being cancelled. - /// - "CANCELLED" : Specifies task was cancelled. - /// - "FAILED" : Specifies task failed. - /// - "SKIPPED" : Specifies task was skipped due to cache hit. - /// - "NOT_TRIGGERED" : Specifies that the task was not triggered because the - /// task's trigger policy is not satisfied. The trigger policy is specified in - /// the `condition` field of PipelineJob.pipeline_spec. - core.String? state; + core.String? explanation; - /// Update time of this status. + /// Question Answering Helpfulness score. /// /// Output only. - core.String? updateTime; + core.double? score; - GoogleCloudAiplatformV1PipelineTaskDetailPipelineTaskStatus({ - this.error, - this.state, - this.updateTime, + GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessResult({ + this.confidence, + this.explanation, + this.score, }); - GoogleCloudAiplatformV1PipelineTaskDetailPipelineTaskStatus.fromJson( + GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessResult.fromJson( core.Map json_) : this( - error: json_.containsKey('error') - ? GoogleRpcStatus.fromJson( - json_['error'] as core.Map) - : null, - state: json_['state'] as core.String?, - updateTime: json_['updateTime'] as core.String?, + confidence: (json_['confidence'] as core.num?)?.toDouble(), + explanation: json_['explanation'] as core.String?, + score: (json_['score'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (error != null) 'error': error!, - if (state != null) 'state': state!, - if (updateTime != null) 'updateTime': updateTime!, + if (confidence != null) 'confidence': confidence!, + if (explanation != null) 'explanation': explanation!, + if (score != null) 'score': score!, }; } -/// The runtime detail of a pipeline executor. -class GoogleCloudAiplatformV1PipelineTaskExecutorDetail { - /// The detailed info for a container executor. +/// Spec for question answering helpfulness metric. +class GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessSpec { + /// Whether to use instance.reference to compute question answering + /// helpfulness. /// - /// Output only. - GoogleCloudAiplatformV1PipelineTaskExecutorDetailContainerDetail? - containerDetail; + /// Optional. + core.bool? useReference; - /// The detailed info for a custom job executor. + /// Which version to use for evaluation. /// - /// Output only. - GoogleCloudAiplatformV1PipelineTaskExecutorDetailCustomJobDetail? - customJobDetail; + /// Optional. + core.int? version; - GoogleCloudAiplatformV1PipelineTaskExecutorDetail({ - this.containerDetail, - this.customJobDetail, + GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessSpec({ + this.useReference, + this.version, }); - GoogleCloudAiplatformV1PipelineTaskExecutorDetail.fromJson(core.Map json_) + GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessSpec.fromJson( + core.Map json_) : this( - containerDetail: json_.containsKey('containerDetail') - ? GoogleCloudAiplatformV1PipelineTaskExecutorDetailContainerDetail - .fromJson(json_['containerDetail'] - as core.Map) - : null, - customJobDetail: json_.containsKey('customJobDetail') - ? GoogleCloudAiplatformV1PipelineTaskExecutorDetailCustomJobDetail - .fromJson(json_['customJobDetail'] - as core.Map) - : null, + useReference: json_['useReference'] as core.bool?, + version: json_['version'] as core.int?, ); core.Map toJson() => { - if (containerDetail != null) 'containerDetail': containerDetail!, - if (customJobDetail != null) 'customJobDetail': customJobDetail!, + if (useReference != null) 'useReference': useReference!, + if (version != null) 'version': version!, }; } -/// The detail of a container execution. -/// -/// It contains the job names of the lifecycle of a container execution. -class GoogleCloudAiplatformV1PipelineTaskExecutorDetailContainerDetail { - /// The names of the previously failed CustomJob for the main container - /// executions. - /// - /// The list includes the all attempts in chronological order. - /// - /// Output only. - core.List? failedMainJobs; - - /// The names of the previously failed CustomJob for the pre-caching-check - /// container executions. - /// - /// This job will be available if the PipelineJob.pipeline_spec specifies the - /// `pre_caching_check` hook in the lifecycle events. The list includes the - /// all attempts in chronological order. - /// - /// Output only. - core.List? failedPreCachingCheckJobs; - - /// The name of the CustomJob for the main container execution. +/// Input for question answering quality metric. +class GoogleCloudAiplatformV1QuestionAnsweringQualityInput { + /// Question answering quality instance. /// - /// Output only. - core.String? mainJob; + /// Required. + GoogleCloudAiplatformV1QuestionAnsweringQualityInstance? instance; - /// The name of the CustomJob for the pre-caching-check container execution. - /// - /// This job will be available if the PipelineJob.pipeline_spec specifies the - /// `pre_caching_check` hook in the lifecycle events. + /// Spec for question answering quality score metric. /// - /// Output only. - core.String? preCachingCheckJob; + /// Required. + GoogleCloudAiplatformV1QuestionAnsweringQualitySpec? metricSpec; - GoogleCloudAiplatformV1PipelineTaskExecutorDetailContainerDetail({ - this.failedMainJobs, - this.failedPreCachingCheckJobs, - this.mainJob, - this.preCachingCheckJob, + GoogleCloudAiplatformV1QuestionAnsweringQualityInput({ + this.instance, + this.metricSpec, }); - GoogleCloudAiplatformV1PipelineTaskExecutorDetailContainerDetail.fromJson( - core.Map json_) + GoogleCloudAiplatformV1QuestionAnsweringQualityInput.fromJson(core.Map json_) : this( - failedMainJobs: (json_['failedMainJobs'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - failedPreCachingCheckJobs: - (json_['failedPreCachingCheckJobs'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - mainJob: json_['mainJob'] as core.String?, - preCachingCheckJob: json_['preCachingCheckJob'] as core.String?, + instance: json_.containsKey('instance') + ? GoogleCloudAiplatformV1QuestionAnsweringQualityInstance + .fromJson( + json_['instance'] as core.Map) + : null, + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1QuestionAnsweringQualitySpec.fromJson( + json_['metricSpec'] as core.Map) + : null, ); core.Map toJson() => { - if (failedMainJobs != null) 'failedMainJobs': failedMainJobs!, - if (failedPreCachingCheckJobs != null) - 'failedPreCachingCheckJobs': failedPreCachingCheckJobs!, - if (mainJob != null) 'mainJob': mainJob!, - if (preCachingCheckJob != null) - 'preCachingCheckJob': preCachingCheckJob!, + if (instance != null) 'instance': instance!, + if (metricSpec != null) 'metricSpec': metricSpec!, }; } -/// The detailed info for a custom job executor. -class GoogleCloudAiplatformV1PipelineTaskExecutorDetailCustomJobDetail { - /// The names of the previously failed CustomJob. +/// Spec for question answering quality instance. +class GoogleCloudAiplatformV1QuestionAnsweringQualityInstance { + /// Text to answer the question. /// - /// The list includes the all attempts in chronological order. + /// Required. + core.String? context; + + /// Question Answering prompt for LLM. /// - /// Output only. - core.List? failedJobs; + /// Required. + core.String? instruction; + + /// Output of the evaluated model. + /// + /// Required. + core.String? prediction; - /// The name of the CustomJob. + /// Ground truth used to compare against the prediction. /// - /// Output only. - core.String? job; + /// Optional. + core.String? reference; - GoogleCloudAiplatformV1PipelineTaskExecutorDetailCustomJobDetail({ - this.failedJobs, - this.job, + GoogleCloudAiplatformV1QuestionAnsweringQualityInstance({ + this.context, + this.instruction, + this.prediction, + this.reference, }); - GoogleCloudAiplatformV1PipelineTaskExecutorDetailCustomJobDetail.fromJson( + GoogleCloudAiplatformV1QuestionAnsweringQualityInstance.fromJson( core.Map json_) : this( - failedJobs: (json_['failedJobs'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - job: json_['job'] as core.String?, + context: json_['context'] as core.String?, + instruction: json_['instruction'] as core.String?, + prediction: json_['prediction'] as core.String?, + reference: json_['reference'] as core.String?, ); core.Map toJson() => { - if (failedJobs != null) 'failedJobs': failedJobs!, - if (job != null) 'job': job!, + if (context != null) 'context': context!, + if (instruction != null) 'instruction': instruction!, + if (prediction != null) 'prediction': prediction!, + if (reference != null) 'reference': reference!, }; } -/// Pipeline template metadata if PipelineJob.template_uri is from supported -/// template registry. -/// -/// Currently, the only supported registry is Artifact Registry. -class GoogleCloudAiplatformV1PipelineTemplateMetadata { - /// The version_name in artifact registry. +/// Spec for question answering quality result. +class GoogleCloudAiplatformV1QuestionAnsweringQualityResult { + /// Confidence for question answering quality score. /// - /// Will always be presented in output if the PipelineJob.template_uri is from - /// supported template registry. Format is "sha256:abcdef123456...". - core.String? version; + /// Output only. + core.double? confidence; - GoogleCloudAiplatformV1PipelineTemplateMetadata({ - this.version, + /// Explanation for question answering quality score. + /// + /// Output only. + core.String? explanation; + + /// Question Answering Quality score. + /// + /// Output only. + core.double? score; + + GoogleCloudAiplatformV1QuestionAnsweringQualityResult({ + this.confidence, + this.explanation, + this.score, }); - GoogleCloudAiplatformV1PipelineTemplateMetadata.fromJson(core.Map json_) + GoogleCloudAiplatformV1QuestionAnsweringQualityResult.fromJson(core.Map json_) : this( - version: json_['version'] as core.String?, + confidence: (json_['confidence'] as core.num?)?.toDouble(), + explanation: json_['explanation'] as core.String?, + score: (json_['score'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (version != null) 'version': version!, + if (confidence != null) 'confidence': confidence!, + if (explanation != null) 'explanation': explanation!, + if (score != null) 'score': score!, }; } -/// Input for pointwise metric. -class GoogleCloudAiplatformV1PointwiseMetricInput { - /// Pointwise metric instance. +/// Spec for question answering quality score metric. +typedef GoogleCloudAiplatformV1QuestionAnsweringQualitySpec + = $QuestionAnsweringQualitySpec; + +/// Input for question answering relevance metric. +class GoogleCloudAiplatformV1QuestionAnsweringRelevanceInput { + /// Question answering relevance instance. /// /// Required. - GoogleCloudAiplatformV1PointwiseMetricInstance? instance; + GoogleCloudAiplatformV1QuestionAnsweringRelevanceInstance? instance; - /// Spec for pointwise metric. + /// Spec for question answering relevance score metric. /// /// Required. - GoogleCloudAiplatformV1PointwiseMetricSpec? metricSpec; + GoogleCloudAiplatformV1QuestionAnsweringRelevanceSpec? metricSpec; - GoogleCloudAiplatformV1PointwiseMetricInput({ + GoogleCloudAiplatformV1QuestionAnsweringRelevanceInput({ this.instance, this.metricSpec, }); - GoogleCloudAiplatformV1PointwiseMetricInput.fromJson(core.Map json_) + GoogleCloudAiplatformV1QuestionAnsweringRelevanceInput.fromJson( + core.Map json_) : this( instance: json_.containsKey('instance') - ? GoogleCloudAiplatformV1PointwiseMetricInstance.fromJson( - json_['instance'] as core.Map) + ? GoogleCloudAiplatformV1QuestionAnsweringRelevanceInstance + .fromJson( + json_['instance'] as core.Map) : null, metricSpec: json_.containsKey('metricSpec') - ? GoogleCloudAiplatformV1PointwiseMetricSpec.fromJson( + ? GoogleCloudAiplatformV1QuestionAnsweringRelevanceSpec.fromJson( json_['metricSpec'] as core.Map) : null, ); @@ -50500,8031 +56661,8258 @@ class GoogleCloudAiplatformV1PointwiseMetricInput { }; } -/// Pointwise metric instance. -/// -/// Usually one instance corresponds to one row in an evaluation dataset. -class GoogleCloudAiplatformV1PointwiseMetricInstance { - /// Instance specified as a json string. - /// - /// String key-value pairs are expected in the json_instance to render - /// PointwiseMetricSpec.instance_prompt_template. - core.String? jsonInstance; - - GoogleCloudAiplatformV1PointwiseMetricInstance({ - this.jsonInstance, - }); - - GoogleCloudAiplatformV1PointwiseMetricInstance.fromJson(core.Map json_) - : this( - jsonInstance: json_['jsonInstance'] as core.String?, - ); +/// Spec for question answering relevance instance. +typedef GoogleCloudAiplatformV1QuestionAnsweringRelevanceInstance = $Instance02; - core.Map toJson() => { - if (jsonInstance != null) 'jsonInstance': jsonInstance!, - }; -} +/// Spec for question answering relevance result. +class GoogleCloudAiplatformV1QuestionAnsweringRelevanceResult { + /// Confidence for question answering relevance score. + /// + /// Output only. + core.double? confidence; -/// Spec for pointwise metric result. -class GoogleCloudAiplatformV1PointwiseMetricResult { - /// Explanation for pointwise metric score. + /// Explanation for question answering relevance score. /// /// Output only. core.String? explanation; - /// Pointwise metric score. + /// Question Answering Relevance score. /// /// Output only. core.double? score; - GoogleCloudAiplatformV1PointwiseMetricResult({ + GoogleCloudAiplatformV1QuestionAnsweringRelevanceResult({ + this.confidence, this.explanation, this.score, }); - GoogleCloudAiplatformV1PointwiseMetricResult.fromJson(core.Map json_) + GoogleCloudAiplatformV1QuestionAnsweringRelevanceResult.fromJson( + core.Map json_) : this( + confidence: (json_['confidence'] as core.num?)?.toDouble(), explanation: json_['explanation'] as core.String?, score: (json_['score'] as core.num?)?.toDouble(), ); core.Map toJson() => { + if (confidence != null) 'confidence': confidence!, if (explanation != null) 'explanation': explanation!, if (score != null) 'score': score!, }; } -/// Spec for pointwise metric. -class GoogleCloudAiplatformV1PointwiseMetricSpec { - /// Metric prompt template for pointwise metric. +/// Spec for question answering relevance metric. +class GoogleCloudAiplatformV1QuestionAnsweringRelevanceSpec { + /// Whether to use instance.reference to compute question answering relevance. /// - /// Required. - core.String? metricPromptTemplate; + /// Optional. + core.bool? useReference; - GoogleCloudAiplatformV1PointwiseMetricSpec({ - this.metricPromptTemplate, + /// Which version to use for evaluation. + /// + /// Optional. + core.int? version; + + GoogleCloudAiplatformV1QuestionAnsweringRelevanceSpec({ + this.useReference, + this.version, }); - GoogleCloudAiplatformV1PointwiseMetricSpec.fromJson(core.Map json_) + GoogleCloudAiplatformV1QuestionAnsweringRelevanceSpec.fromJson(core.Map json_) : this( - metricPromptTemplate: json_['metricPromptTemplate'] as core.String?, + useReference: json_['useReference'] as core.bool?, + version: json_['version'] as core.int?, ); core.Map toJson() => { - if (metricPromptTemplate != null) - 'metricPromptTemplate': metricPromptTemplate!, + if (useReference != null) 'useReference': useReference!, + if (version != null) 'version': version!, }; } -/// Represents a network port in a container. -class GoogleCloudAiplatformV1Port { - /// The number of the port to expose on the pod's IP address. - /// - /// Must be a valid port number, between 1 and 65535 inclusive. - core.int? containerPort; +/// Relevant contexts for one query. +class GoogleCloudAiplatformV1RagContexts { + /// All its contexts. + core.List? contexts; - GoogleCloudAiplatformV1Port({ - this.containerPort, + GoogleCloudAiplatformV1RagContexts({ + this.contexts, }); - GoogleCloudAiplatformV1Port.fromJson(core.Map json_) + GoogleCloudAiplatformV1RagContexts.fromJson(core.Map json_) : this( - containerPort: json_['containerPort'] as core.int?, + contexts: (json_['contexts'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1RagContextsContext.fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (containerPort != null) 'containerPort': containerPort!, + if (contexts != null) 'contexts': contexts!, }; } -/// Assigns input data to training, validation, and test sets based on the value -/// of a provided key. -/// -/// Supported only for tabular Datasets. -class GoogleCloudAiplatformV1PredefinedSplit { - /// The key is a name of one of the Dataset's data columns. - /// - /// The value of the key (either the label's value or value in the column) - /// must be one of {`training`, `validation`, `test`}, and it defines to which - /// set the given piece of data is assigned. If for a piece of data the key is - /// not present or has an invalid value, that piece is ignored by the - /// pipeline. +/// A context of the query. +class GoogleCloudAiplatformV1RagContextsContext { + /// According to the underlying Vector DB and the selected metric type, the + /// score can be either the distance or the similarity between the query and + /// the context and its range depends on the metric type. /// - /// Required. - core.String? key; + /// For example, if the metric type is COSINE_DISTANCE, it represents the + /// distance between the query and the context. The larger the distance, the + /// less relevant the context is to the query. The range is \[0, 2\], while 0 + /// means the most relevant and 2 means the least relevant. + core.double? score; - GoogleCloudAiplatformV1PredefinedSplit({ - this.key, + /// The file display name. + core.String? sourceDisplayName; + + /// If the file is imported from Cloud Storage or Google Drive, source_uri + /// will be original file URI in Cloud Storage or Google Drive; if file is + /// uploaded, source_uri will be file display name. + core.String? sourceUri; + + /// The text chunk. + core.String? text; + + GoogleCloudAiplatformV1RagContextsContext({ + this.score, + this.sourceDisplayName, + this.sourceUri, + this.text, }); - GoogleCloudAiplatformV1PredefinedSplit.fromJson(core.Map json_) + GoogleCloudAiplatformV1RagContextsContext.fromJson(core.Map json_) : this( - key: json_['key'] as core.String?, + score: (json_['score'] as core.num?)?.toDouble(), + sourceDisplayName: json_['sourceDisplayName'] as core.String?, + sourceUri: json_['sourceUri'] as core.String?, + text: json_['text'] as core.String?, ); core.Map toJson() => { - if (key != null) 'key': key!, + if (score != null) 'score': score!, + if (sourceDisplayName != null) 'sourceDisplayName': sourceDisplayName!, + if (sourceUri != null) 'sourceUri': sourceUri!, + if (text != null) 'text': text!, }; } -/// Request message for PredictionService.Predict. -class GoogleCloudAiplatformV1PredictRequest { - /// The instances that are the input to the prediction call. +/// A RagCorpus is a RagFile container and a project can have multiple +/// RagCorpora. +class GoogleCloudAiplatformV1RagCorpus { + /// RagCorpus state. /// - /// A DeployedModel may have an upper limit on the number of instances it - /// supports per request, and when it is exceeded the prediction call errors - /// in case of AutoML Models, or, in case of customer created Models, the - /// behaviour is as documented by that Model. The schema of any single - /// instance may be specified via Endpoint's DeployedModels' Model's - /// PredictSchemata's instance_schema_uri. + /// Output only. + GoogleCloudAiplatformV1CorpusStatus? corpusStatus; + + /// Timestamp when this RagCorpus was created. + /// + /// Output only. + core.String? createTime; + + /// The description of the RagCorpus. + /// + /// Optional. + core.String? description; + + /// The display name of the RagCorpus. + /// + /// The name can be up to 128 characters long and can consist of any UTF-8 + /// characters. /// /// Required. + core.String? displayName; + + /// The resource name of the RagCorpus. /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.List? instances; + /// Output only. + core.String? name; - /// The parameters that govern the prediction. + /// Timestamp when this RagCorpus was last updated. /// - /// The schema of the parameters may be specified via Endpoint's - /// DeployedModels' Model's PredictSchemata's parameters_schema_uri. + /// Output only. + core.String? updateTime; + + /// The config for the Vector DBs. /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Object? parameters; + /// Optional. Immutable. + GoogleCloudAiplatformV1RagVectorDbConfig? vectorDbConfig; - GoogleCloudAiplatformV1PredictRequest({ - this.instances, - this.parameters, + GoogleCloudAiplatformV1RagCorpus({ + this.corpusStatus, + this.createTime, + this.description, + this.displayName, + this.name, + this.updateTime, + this.vectorDbConfig, }); - GoogleCloudAiplatformV1PredictRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1RagCorpus.fromJson(core.Map json_) : this( - instances: json_.containsKey('instances') - ? json_['instances'] as core.List + corpusStatus: json_.containsKey('corpusStatus') + ? GoogleCloudAiplatformV1CorpusStatus.fromJson( + json_['corpusStatus'] as core.Map) + : null, + createTime: json_['createTime'] as core.String?, + description: json_['description'] as core.String?, + displayName: json_['displayName'] as core.String?, + name: json_['name'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + vectorDbConfig: json_.containsKey('vectorDbConfig') + ? GoogleCloudAiplatformV1RagVectorDbConfig.fromJson( + json_['vectorDbConfig'] + as core.Map) : null, - parameters: json_['parameters'], ); core.Map toJson() => { - if (instances != null) 'instances': instances!, - if (parameters != null) 'parameters': parameters!, + if (corpusStatus != null) 'corpusStatus': corpusStatus!, + if (createTime != null) 'createTime': createTime!, + if (description != null) 'description': description!, + if (displayName != null) 'displayName': displayName!, + if (name != null) 'name': name!, + if (updateTime != null) 'updateTime': updateTime!, + if (vectorDbConfig != null) 'vectorDbConfig': vectorDbConfig!, }; } -/// Configuration for logging request-response to a BigQuery table. -class GoogleCloudAiplatformV1PredictRequestResponseLoggingConfig { - /// BigQuery table for logging. +/// Config for the embedding model to use for RAG. +class GoogleCloudAiplatformV1RagEmbeddingModelConfig { + /// The Vertex AI Prediction Endpoint that either refers to a publisher model + /// or an endpoint that is hosting a 1P fine-tuned text embedding model. /// - /// If only given a project, a new dataset will be created with name - /// `logging__` where will be made BigQuery-dataset-name compatible (e.g. most - /// special characters will become underscores). If no table name is given, a - /// new table will be created with name `request_response_logging` - GoogleCloudAiplatformV1BigQueryDestination? bigqueryDestination; - - /// If logging is enabled or not. - core.bool? enabled; + /// Endpoints hosting non-1P fine-tuned text embedding models are currently + /// not supported. This is used for dense vector search. + GoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint? + vertexPredictionEndpoint; - /// Percentage of requests to be logged, expressed as a fraction in - /// range(0,1\]. - core.double? samplingRate; - - GoogleCloudAiplatformV1PredictRequestResponseLoggingConfig({ - this.bigqueryDestination, - this.enabled, - this.samplingRate, + GoogleCloudAiplatformV1RagEmbeddingModelConfig({ + this.vertexPredictionEndpoint, }); - GoogleCloudAiplatformV1PredictRequestResponseLoggingConfig.fromJson( - core.Map json_) + GoogleCloudAiplatformV1RagEmbeddingModelConfig.fromJson(core.Map json_) : this( - bigqueryDestination: json_.containsKey('bigqueryDestination') - ? GoogleCloudAiplatformV1BigQueryDestination.fromJson( - json_['bigqueryDestination'] + vertexPredictionEndpoint: json_ + .containsKey('vertexPredictionEndpoint') + ? GoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint + .fromJson(json_['vertexPredictionEndpoint'] as core.Map) : null, - enabled: json_['enabled'] as core.bool?, - samplingRate: (json_['samplingRate'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (bigqueryDestination != null) - 'bigqueryDestination': bigqueryDestination!, - if (enabled != null) 'enabled': enabled!, - if (samplingRate != null) 'samplingRate': samplingRate!, + if (vertexPredictionEndpoint != null) + 'vertexPredictionEndpoint': vertexPredictionEndpoint!, }; } -/// Response message for PredictionService.Predict. -class GoogleCloudAiplatformV1PredictResponse { - /// ID of the Endpoint's DeployedModel that served this prediction. - core.String? deployedModelId; - - /// Request-level metadata returned by the model. - /// - /// The metadata type will be dependent upon the model implementation. +/// Config representing a model hosted on Vertex Prediction Endpoint. +class GoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint { + /// The endpoint resource name. /// - /// Output only. + /// Format: + /// `projects/{project}/locations/{location}/publishers/{publisher}/models/{model}` + /// or `projects/{project}/locations/{location}/endpoints/{endpoint}` /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Object? metadata; + /// Required. + core.String? endpoint; - /// The resource name of the Model which is deployed as the DeployedModel that - /// this prediction hits. + /// The resource name of the model that is deployed on the endpoint. + /// + /// Present only when the endpoint is not a publisher model. Pattern: + /// `projects/{project}/locations/{location}/models/{model}` /// /// Output only. core.String? model; - /// The display name of the Model which is deployed as the DeployedModel that - /// this prediction hits. + /// Version ID of the model that is deployed on the endpoint. /// - /// Output only. - core.String? modelDisplayName; - - /// The version ID of the Model which is deployed as the DeployedModel that - /// this prediction hits. + /// Present only when the endpoint is not a publisher model. /// /// Output only. core.String? modelVersionId; - /// The predictions that are the output of the predictions call. - /// - /// The schema of any single prediction may be specified via Endpoint's - /// DeployedModels' Model's PredictSchemata's prediction_schema_uri. - /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.List? predictions; - - GoogleCloudAiplatformV1PredictResponse({ - this.deployedModelId, - this.metadata, + GoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint({ + this.endpoint, this.model, - this.modelDisplayName, this.modelVersionId, - this.predictions, }); - GoogleCloudAiplatformV1PredictResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint.fromJson( + core.Map json_) : this( - deployedModelId: json_['deployedModelId'] as core.String?, - metadata: json_['metadata'], + endpoint: json_['endpoint'] as core.String?, model: json_['model'] as core.String?, - modelDisplayName: json_['modelDisplayName'] as core.String?, modelVersionId: json_['modelVersionId'] as core.String?, - predictions: json_.containsKey('predictions') - ? json_['predictions'] as core.List - : null, ); core.Map toJson() => { - if (deployedModelId != null) 'deployedModelId': deployedModelId!, - if (metadata != null) 'metadata': metadata!, + if (endpoint != null) 'endpoint': endpoint!, if (model != null) 'model': model!, - if (modelDisplayName != null) 'modelDisplayName': modelDisplayName!, if (modelVersionId != null) 'modelVersionId': modelVersionId!, - if (predictions != null) 'predictions': predictions!, }; } -/// Contains the schemata used in Model's predictions and explanations via -/// PredictionService.Predict, PredictionService.Explain and BatchPredictionJob. -class GoogleCloudAiplatformV1PredictSchemata { - /// Points to a YAML file stored on Google Cloud Storage describing the format - /// of a single instance, which are used in PredictRequest.instances, - /// ExplainRequest.instances and BatchPredictionJob.input_config. +/// A RagFile contains user data for chunking, embedding and indexing. +class GoogleCloudAiplatformV1RagFile { + /// Timestamp when this RagFile was created. /// - /// The schema is defined as an OpenAPI 3.0.2 - /// [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). - /// AutoML Models always have this field populated by Vertex AI. Note: The URI - /// given on output will be immutable and probably different, including the - /// URI scheme, than the one given on input. The output URI will point to a - /// location where the user only has a read access. + /// Output only. + core.String? createTime; + + /// The description of the RagFile. /// - /// Immutable. - core.String? instanceSchemaUri; + /// Optional. + core.String? description; - /// Points to a YAML file stored on Google Cloud Storage describing the - /// parameters of prediction and explanation via PredictRequest.parameters, - /// ExplainRequest.parameters and BatchPredictionJob.model_parameters. + /// The RagFile is encapsulated and uploaded in the UploadRagFile request. /// - /// The schema is defined as an OpenAPI 3.0.2 - /// [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). - /// AutoML Models always have this field populated by Vertex AI, if no - /// parameters are supported, then it is set to an empty string. Note: The URI - /// given on output will be immutable and probably different, including the - /// URI scheme, than the one given on input. The output URI will point to a - /// location where the user only has a read access. + /// Output only. + GoogleCloudAiplatformV1DirectUploadSource? directUploadSource; + + /// The display name of the RagFile. /// - /// Immutable. - core.String? parametersSchemaUri; + /// The name can be up to 128 characters long and can consist of any UTF-8 + /// characters. + /// + /// Required. + core.String? displayName; - /// Points to a YAML file stored on Google Cloud Storage describing the format - /// of a single prediction produced by this Model, which are returned via - /// PredictResponse.predictions, ExplainResponse.explanations, and - /// BatchPredictionJob.output_config. + /// State of the RagFile. /// - /// The schema is defined as an OpenAPI 3.0.2 - /// [Schema Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject). - /// AutoML Models always have this field populated by Vertex AI. Note: The URI - /// given on output will be immutable and probably different, including the - /// URI scheme, than the one given on input. The output URI will point to a - /// location where the user only has a read access. + /// Output only. + GoogleCloudAiplatformV1FileStatus? fileStatus; + + /// Google Cloud Storage location of the RagFile. /// - /// Immutable. - core.String? predictionSchemaUri; + /// It does not support wildcards in the Cloud Storage uri for now. + /// + /// Output only. + GoogleCloudAiplatformV1GcsSource? gcsSource; - GoogleCloudAiplatformV1PredictSchemata({ - this.instanceSchemaUri, - this.parametersSchemaUri, - this.predictionSchemaUri, + /// Google Drive location. + /// + /// Supports importing individual files as well as Google Drive folders. + /// + /// Output only. + GoogleCloudAiplatformV1GoogleDriveSource? googleDriveSource; + + /// The RagFile is imported from a Jira query. + GoogleCloudAiplatformV1JiraSource? jiraSource; + + /// The resource name of the RagFile. + /// + /// Output only. + core.String? name; + + /// The RagFile is imported from a SharePoint source. + GoogleCloudAiplatformV1SharePointSources? sharePointSources; + + /// The RagFile is imported from a Slack channel. + GoogleCloudAiplatformV1SlackSource? slackSource; + + /// Timestamp when this RagFile was last updated. + /// + /// Output only. + core.String? updateTime; + + GoogleCloudAiplatformV1RagFile({ + this.createTime, + this.description, + this.directUploadSource, + this.displayName, + this.fileStatus, + this.gcsSource, + this.googleDriveSource, + this.jiraSource, + this.name, + this.sharePointSources, + this.slackSource, + this.updateTime, }); - GoogleCloudAiplatformV1PredictSchemata.fromJson(core.Map json_) + GoogleCloudAiplatformV1RagFile.fromJson(core.Map json_) : this( - instanceSchemaUri: json_['instanceSchemaUri'] as core.String?, - parametersSchemaUri: json_['parametersSchemaUri'] as core.String?, - predictionSchemaUri: json_['predictionSchemaUri'] as core.String?, + createTime: json_['createTime'] as core.String?, + description: json_['description'] as core.String?, + directUploadSource: json_.containsKey('directUploadSource') + ? GoogleCloudAiplatformV1DirectUploadSource.fromJson( + json_['directUploadSource'] + as core.Map) + : null, + displayName: json_['displayName'] as core.String?, + fileStatus: json_.containsKey('fileStatus') + ? GoogleCloudAiplatformV1FileStatus.fromJson( + json_['fileStatus'] as core.Map) + : null, + gcsSource: json_.containsKey('gcsSource') + ? GoogleCloudAiplatformV1GcsSource.fromJson( + json_['gcsSource'] as core.Map) + : null, + googleDriveSource: json_.containsKey('googleDriveSource') + ? GoogleCloudAiplatformV1GoogleDriveSource.fromJson( + json_['googleDriveSource'] + as core.Map) + : null, + jiraSource: json_.containsKey('jiraSource') + ? GoogleCloudAiplatformV1JiraSource.fromJson( + json_['jiraSource'] as core.Map) + : null, + name: json_['name'] as core.String?, + sharePointSources: json_.containsKey('sharePointSources') + ? GoogleCloudAiplatformV1SharePointSources.fromJson( + json_['sharePointSources'] + as core.Map) + : null, + slackSource: json_.containsKey('slackSource') + ? GoogleCloudAiplatformV1SlackSource.fromJson( + json_['slackSource'] as core.Map) + : null, + updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (instanceSchemaUri != null) 'instanceSchemaUri': instanceSchemaUri!, - if (parametersSchemaUri != null) - 'parametersSchemaUri': parametersSchemaUri!, - if (predictionSchemaUri != null) - 'predictionSchemaUri': predictionSchemaUri!, - }; -} - -/// Preset configuration for example-based explanations -class GoogleCloudAiplatformV1Presets { - /// The modality of the uploaded model, which automatically configures the - /// distance measurement and feature normalization for the underlying example - /// index and queries. - /// - /// If your model does not precisely fit one of these types, it is okay to - /// choose the closest type. - /// Possible string values are: - /// - "MODALITY_UNSPECIFIED" : Should not be set. Added as a recommended best - /// practice for enums - /// - "IMAGE" : IMAGE modality - /// - "TEXT" : TEXT modality - /// - "TABULAR" : TABULAR modality - core.String? modality; - - /// Preset option controlling parameters for speed-precision trade-off when - /// querying for examples. - /// - /// If omitted, defaults to `PRECISE`. - /// Possible string values are: - /// - "PRECISE" : More precise neighbors as a trade-off against slower - /// response. - /// - "FAST" : Faster response as a trade-off against less precise neighbors. - core.String? query; + if (createTime != null) 'createTime': createTime!, + if (description != null) 'description': description!, + if (directUploadSource != null) + 'directUploadSource': directUploadSource!, + if (displayName != null) 'displayName': displayName!, + if (fileStatus != null) 'fileStatus': fileStatus!, + if (gcsSource != null) 'gcsSource': gcsSource!, + if (googleDriveSource != null) 'googleDriveSource': googleDriveSource!, + if (jiraSource != null) 'jiraSource': jiraSource!, + if (name != null) 'name': name!, + if (sharePointSources != null) 'sharePointSources': sharePointSources!, + if (slackSource != null) 'slackSource': slackSource!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} - GoogleCloudAiplatformV1Presets({ - this.modality, - this.query, +/// Specifies the size and overlap of chunks for RagFiles. +class GoogleCloudAiplatformV1RagFileChunkingConfig { + /// Specifies the fixed length chunking config. + GoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking? + fixedLengthChunking; + + GoogleCloudAiplatformV1RagFileChunkingConfig({ + this.fixedLengthChunking, }); - GoogleCloudAiplatformV1Presets.fromJson(core.Map json_) + GoogleCloudAiplatformV1RagFileChunkingConfig.fromJson(core.Map json_) : this( - modality: json_['modality'] as core.String?, - query: json_['query'] as core.String?, + fixedLengthChunking: json_.containsKey('fixedLengthChunking') + ? GoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking + .fromJson(json_['fixedLengthChunking'] + as core.Map) + : null, ); core.Map toJson() => { - if (modality != null) 'modality': modality!, - if (query != null) 'query': query!, + if (fixedLengthChunking != null) + 'fixedLengthChunking': fixedLengthChunking!, }; } -/// PrivateEndpoints proto is used to provide paths for users to send requests -/// privately. -/// -/// To send request via private service access, use predict_http_uri, -/// explain_http_uri or health_http_uri. To send request via private service -/// connect, use service_attachment. -class GoogleCloudAiplatformV1PrivateEndpoints { - /// Http(s) path to send explain requests. - /// - /// Output only. - core.String? explainHttpUri; - - /// Http(s) path to send health check requests. - /// - /// Output only. - core.String? healthHttpUri; - - /// Http(s) path to send prediction requests. - /// - /// Output only. - core.String? predictHttpUri; +/// Specifies the fixed length chunking config. +class GoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking { + /// The overlap between chunks. + core.int? chunkOverlap; - /// The name of the service attachment resource. - /// - /// Populated if private service connect is enabled. - /// - /// Output only. - core.String? serviceAttachment; + /// The size of the chunks. + core.int? chunkSize; - GoogleCloudAiplatformV1PrivateEndpoints({ - this.explainHttpUri, - this.healthHttpUri, - this.predictHttpUri, - this.serviceAttachment, + GoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking({ + this.chunkOverlap, + this.chunkSize, }); - GoogleCloudAiplatformV1PrivateEndpoints.fromJson(core.Map json_) + GoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking.fromJson( + core.Map json_) : this( - explainHttpUri: json_['explainHttpUri'] as core.String?, - healthHttpUri: json_['healthHttpUri'] as core.String?, - predictHttpUri: json_['predictHttpUri'] as core.String?, - serviceAttachment: json_['serviceAttachment'] as core.String?, + chunkOverlap: json_['chunkOverlap'] as core.int?, + chunkSize: json_['chunkSize'] as core.int?, ); core.Map toJson() => { - if (explainHttpUri != null) 'explainHttpUri': explainHttpUri!, - if (healthHttpUri != null) 'healthHttpUri': healthHttpUri!, - if (predictHttpUri != null) 'predictHttpUri': predictHttpUri!, - if (serviceAttachment != null) 'serviceAttachment': serviceAttachment!, + if (chunkOverlap != null) 'chunkOverlap': chunkOverlap!, + if (chunkSize != null) 'chunkSize': chunkSize!, }; } -/// Represents configuration for private service connect. -class GoogleCloudAiplatformV1PrivateServiceConnectConfig { - /// If true, expose the IndexEndpoint via private service connect. - /// - /// Required. - core.bool? enablePrivateServiceConnect; - - /// A list of Projects from which the forwarding rule will target the service - /// attachment. - core.List? projectAllowlist; - - /// The name of the generated service attachment resource. - /// - /// This is only populated if the endpoint is deployed with - /// PrivateServiceConnect. - /// - /// Output only. - core.String? serviceAttachment; +/// Specifies the transformation config for RagFiles. +class GoogleCloudAiplatformV1RagFileTransformationConfig { + /// Specifies the chunking config for RagFiles. + GoogleCloudAiplatformV1RagFileChunkingConfig? ragFileChunkingConfig; - GoogleCloudAiplatformV1PrivateServiceConnectConfig({ - this.enablePrivateServiceConnect, - this.projectAllowlist, - this.serviceAttachment, + GoogleCloudAiplatformV1RagFileTransformationConfig({ + this.ragFileChunkingConfig, }); - GoogleCloudAiplatformV1PrivateServiceConnectConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1RagFileTransformationConfig.fromJson(core.Map json_) : this( - enablePrivateServiceConnect: - json_['enablePrivateServiceConnect'] as core.bool?, - projectAllowlist: (json_['projectAllowlist'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - serviceAttachment: json_['serviceAttachment'] as core.String?, + ragFileChunkingConfig: json_.containsKey('ragFileChunkingConfig') + ? GoogleCloudAiplatformV1RagFileChunkingConfig.fromJson( + json_['ragFileChunkingConfig'] + as core.Map) + : null, ); core.Map toJson() => { - if (enablePrivateServiceConnect != null) - 'enablePrivateServiceConnect': enablePrivateServiceConnect!, - if (projectAllowlist != null) 'projectAllowlist': projectAllowlist!, - if (serviceAttachment != null) 'serviceAttachment': serviceAttachment!, + if (ragFileChunkingConfig != null) + 'ragFileChunkingConfig': ragFileChunkingConfig!, }; } -/// Probe describes a health check to be performed against a container to -/// determine whether it is alive or ready to receive traffic. -class GoogleCloudAiplatformV1Probe { - /// ExecAction probes the health of a container by executing a command. - GoogleCloudAiplatformV1ProbeExecAction? exec; - - /// How often (in seconds) to perform the probe. +/// A query to retrieve relevant contexts. +class GoogleCloudAiplatformV1RagQuery { + /// The retrieval config for the query. /// - /// Default to 10 seconds. Minimum value is 1. Must be less than - /// timeout_seconds. Maps to Kubernetes probe argument 'periodSeconds'. - core.int? periodSeconds; + /// Optional. + GoogleCloudAiplatformV1RagRetrievalConfig? ragRetrievalConfig; - /// Number of seconds after which the probe times out. + /// The query in text format to get relevant contexts. /// - /// Defaults to 1 second. Minimum value is 1. Must be greater or equal to - /// period_seconds. Maps to Kubernetes probe argument 'timeoutSeconds'. - core.int? timeoutSeconds; + /// Optional. + core.String? text; - GoogleCloudAiplatformV1Probe({ - this.exec, - this.periodSeconds, - this.timeoutSeconds, + GoogleCloudAiplatformV1RagQuery({ + this.ragRetrievalConfig, + this.text, }); - GoogleCloudAiplatformV1Probe.fromJson(core.Map json_) + GoogleCloudAiplatformV1RagQuery.fromJson(core.Map json_) : this( - exec: json_.containsKey('exec') - ? GoogleCloudAiplatformV1ProbeExecAction.fromJson( - json_['exec'] as core.Map) + ragRetrievalConfig: json_.containsKey('ragRetrievalConfig') + ? GoogleCloudAiplatformV1RagRetrievalConfig.fromJson( + json_['ragRetrievalConfig'] + as core.Map) : null, - periodSeconds: json_['periodSeconds'] as core.int?, - timeoutSeconds: json_['timeoutSeconds'] as core.int?, + text: json_['text'] as core.String?, ); core.Map toJson() => { - if (exec != null) 'exec': exec!, - if (periodSeconds != null) 'periodSeconds': periodSeconds!, - if (timeoutSeconds != null) 'timeoutSeconds': timeoutSeconds!, + if (ragRetrievalConfig != null) + 'ragRetrievalConfig': ragRetrievalConfig!, + if (text != null) 'text': text!, }; } -/// ExecAction specifies a command to execute. -typedef GoogleCloudAiplatformV1ProbeExecAction = $ExecAction; - -/// PscAutomatedEndpoints defines the output of the forwarding rule -/// automatically created by each PscAutomationConfig. -class GoogleCloudAiplatformV1PscAutomatedEndpoints { - /// Ip Address created by the automated forwarding rule. - core.String? matchAddress; - - /// Corresponding network in pscAutomationConfigs. - core.String? network; +/// Specifies the context retrieval config. +class GoogleCloudAiplatformV1RagRetrievalConfig { + /// Config for filters. + /// + /// Optional. + GoogleCloudAiplatformV1RagRetrievalConfigFilter? filter; - /// Corresponding project_id in pscAutomationConfigs - core.String? projectId; + /// The number of contexts to retrieve. + /// + /// Optional. + core.int? topK; - GoogleCloudAiplatformV1PscAutomatedEndpoints({ - this.matchAddress, - this.network, - this.projectId, + GoogleCloudAiplatformV1RagRetrievalConfig({ + this.filter, + this.topK, }); - GoogleCloudAiplatformV1PscAutomatedEndpoints.fromJson(core.Map json_) + GoogleCloudAiplatformV1RagRetrievalConfig.fromJson(core.Map json_) : this( - matchAddress: json_['matchAddress'] as core.String?, - network: json_['network'] as core.String?, - projectId: json_['projectId'] as core.String?, + filter: json_.containsKey('filter') + ? GoogleCloudAiplatformV1RagRetrievalConfigFilter.fromJson( + json_['filter'] as core.Map) + : null, + topK: json_['topK'] as core.int?, ); core.Map toJson() => { - if (matchAddress != null) 'matchAddress': matchAddress!, - if (network != null) 'network': network!, - if (projectId != null) 'projectId': projectId!, + if (filter != null) 'filter': filter!, + if (topK != null) 'topK': topK!, }; } -/// Configuration for PSC-I. -typedef GoogleCloudAiplatformV1PscInterfaceConfig = $Empty; - -/// A Model Garden Publisher Model. -class GoogleCloudAiplatformV1PublisherModel { - /// Additional information about the model's Frameworks. +/// Config for filters. +class GoogleCloudAiplatformV1RagRetrievalConfigFilter { + /// String for metadata filtering. /// /// Optional. - core.List? frameworks; + core.String? metadataFilter; - /// Indicates the launch stage of the model. + /// Only returns contexts with vector distance smaller than the threshold. /// /// Optional. - /// Possible string values are: - /// - "LAUNCH_STAGE_UNSPECIFIED" : The model launch stage is unspecified. - /// - "EXPERIMENTAL" : Used to indicate the PublisherModel is at Experimental - /// launch stage, available to a small set of customers. - /// - "PRIVATE_PREVIEW" : Used to indicate the PublisherModel is at Private - /// Preview launch stage, only available to a small set of customers, although - /// a larger set of customers than an Experimental launch. Previews are the - /// first launch stage used to get feedback from customers. - /// - "PUBLIC_PREVIEW" : Used to indicate the PublisherModel is at Public - /// Preview launch stage, available to all customers, although not supported - /// for production workloads. - /// - "GA" : Used to indicate the PublisherModel is at GA launch stage, - /// available to all customers and ready for production workload. - core.String? launchStage; + core.double? vectorDistanceThreshold; - /// The resource name of the PublisherModel. + /// Only returns contexts with vector similarity larger than the threshold. /// - /// Output only. - core.String? name; + /// Optional. + core.double? vectorSimilarityThreshold; - /// Indicates the open source category of the publisher model. - /// - /// Required. - /// Possible string values are: - /// - "OPEN_SOURCE_CATEGORY_UNSPECIFIED" : The open source category is - /// unspecified, which should not be used. - /// - "PROPRIETARY" : Used to indicate the PublisherModel is not open sourced. - /// - "GOOGLE_OWNED_OSS_WITH_GOOGLE_CHECKPOINT" : Used to indicate the - /// PublisherModel is a Google-owned open source model w/ Google checkpoint. - /// - "THIRD_PARTY_OWNED_OSS_WITH_GOOGLE_CHECKPOINT" : Used to indicate the - /// PublisherModel is a 3p-owned open source model w/ Google checkpoint. - /// - "GOOGLE_OWNED_OSS" : Used to indicate the PublisherModel is a - /// Google-owned pure open source model. - /// - "THIRD_PARTY_OWNED_OSS" : Used to indicate the PublisherModel is a - /// 3p-owned pure open source model. - core.String? openSourceCategory; + GoogleCloudAiplatformV1RagRetrievalConfigFilter({ + this.metadataFilter, + this.vectorDistanceThreshold, + this.vectorSimilarityThreshold, + }); - /// The schemata that describes formats of the PublisherModel's predictions - /// and explanations as given and returned via PredictionService.Predict. - /// - /// Optional. - GoogleCloudAiplatformV1PredictSchemata? predictSchemata; + GoogleCloudAiplatformV1RagRetrievalConfigFilter.fromJson(core.Map json_) + : this( + metadataFilter: json_['metadataFilter'] as core.String?, + vectorDistanceThreshold: + (json_['vectorDistanceThreshold'] as core.num?)?.toDouble(), + vectorSimilarityThreshold: + (json_['vectorSimilarityThreshold'] as core.num?)?.toDouble(), + ); - /// Used to indicate this model has a publisher model and provide the template - /// of the publisher model resource name. - /// - /// Optional. Output only. Immutable. - core.String? publisherModelTemplate; + core.Map toJson() => { + if (metadataFilter != null) 'metadataFilter': metadataFilter!, + if (vectorDistanceThreshold != null) + 'vectorDistanceThreshold': vectorDistanceThreshold!, + if (vectorSimilarityThreshold != null) + 'vectorSimilarityThreshold': vectorSimilarityThreshold!, + }; +} - /// Supported call-to-action options. - /// - /// Optional. - GoogleCloudAiplatformV1PublisherModelCallToAction? supportedActions; +/// Config for the Vector DB to use for RAG. +class GoogleCloudAiplatformV1RagVectorDbConfig { + /// Authentication config for the chosen Vector DB. + GoogleCloudAiplatformV1ApiAuth? apiAuth; - /// The version ID of the PublisherModel. - /// - /// A new version is committed when a new model version is uploaded under an - /// existing model id. It is an auto-incrementing decimal number in string - /// representation. - /// - /// Output only. Immutable. - core.String? versionId; + /// The config for the Pinecone. + GoogleCloudAiplatformV1RagVectorDbConfigPinecone? pinecone; - /// Indicates the state of the model version. + /// The embedding model config of the Vector DB. /// - /// Optional. - /// Possible string values are: - /// - "VERSION_STATE_UNSPECIFIED" : The version state is unspecified. - /// - "VERSION_STATE_STABLE" : Used to indicate the version is stable. - /// - "VERSION_STATE_UNSTABLE" : Used to indicate the version is unstable. - core.String? versionState; + /// Optional. Immutable. + GoogleCloudAiplatformV1RagEmbeddingModelConfig? ragEmbeddingModelConfig; - GoogleCloudAiplatformV1PublisherModel({ - this.frameworks, - this.launchStage, - this.name, - this.openSourceCategory, - this.predictSchemata, - this.publisherModelTemplate, - this.supportedActions, - this.versionId, - this.versionState, + /// The config for the RAG-managed Vector DB. + GoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb? ragManagedDb; + + /// The config for the Vertex Vector Search. + GoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch? + vertexVectorSearch; + + GoogleCloudAiplatformV1RagVectorDbConfig({ + this.apiAuth, + this.pinecone, + this.ragEmbeddingModelConfig, + this.ragManagedDb, + this.vertexVectorSearch, }); - GoogleCloudAiplatformV1PublisherModel.fromJson(core.Map json_) + GoogleCloudAiplatformV1RagVectorDbConfig.fromJson(core.Map json_) : this( - frameworks: (json_['frameworks'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - launchStage: json_['launchStage'] as core.String?, - name: json_['name'] as core.String?, - openSourceCategory: json_['openSourceCategory'] as core.String?, - predictSchemata: json_.containsKey('predictSchemata') - ? GoogleCloudAiplatformV1PredictSchemata.fromJson( - json_['predictSchemata'] + apiAuth: json_.containsKey('apiAuth') + ? GoogleCloudAiplatformV1ApiAuth.fromJson( + json_['apiAuth'] as core.Map) + : null, + pinecone: json_.containsKey('pinecone') + ? GoogleCloudAiplatformV1RagVectorDbConfigPinecone.fromJson( + json_['pinecone'] as core.Map) + : null, + ragEmbeddingModelConfig: json_.containsKey('ragEmbeddingModelConfig') + ? GoogleCloudAiplatformV1RagEmbeddingModelConfig.fromJson( + json_['ragEmbeddingModelConfig'] as core.Map) : null, - publisherModelTemplate: - json_['publisherModelTemplate'] as core.String?, - supportedActions: json_.containsKey('supportedActions') - ? GoogleCloudAiplatformV1PublisherModelCallToAction.fromJson( - json_['supportedActions'] + ragManagedDb: json_.containsKey('ragManagedDb') + ? GoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb.fromJson( + json_['ragManagedDb'] as core.Map) + : null, + vertexVectorSearch: json_.containsKey('vertexVectorSearch') + ? GoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch + .fromJson(json_['vertexVectorSearch'] as core.Map) : null, - versionId: json_['versionId'] as core.String?, - versionState: json_['versionState'] as core.String?, ); core.Map toJson() => { - if (frameworks != null) 'frameworks': frameworks!, - if (launchStage != null) 'launchStage': launchStage!, - if (name != null) 'name': name!, - if (openSourceCategory != null) - 'openSourceCategory': openSourceCategory!, - if (predictSchemata != null) 'predictSchemata': predictSchemata!, - if (publisherModelTemplate != null) - 'publisherModelTemplate': publisherModelTemplate!, - if (supportedActions != null) 'supportedActions': supportedActions!, - if (versionId != null) 'versionId': versionId!, - if (versionState != null) 'versionState': versionState!, + if (apiAuth != null) 'apiAuth': apiAuth!, + if (pinecone != null) 'pinecone': pinecone!, + if (ragEmbeddingModelConfig != null) + 'ragEmbeddingModelConfig': ragEmbeddingModelConfig!, + if (ragManagedDb != null) 'ragManagedDb': ragManagedDb!, + if (vertexVectorSearch != null) + 'vertexVectorSearch': vertexVectorSearch!, }; } -/// Actions could take on this Publisher Model. -class GoogleCloudAiplatformV1PublisherModelCallToAction { - /// Create application using the PublisherModel. +/// The config for the Pinecone. +class GoogleCloudAiplatformV1RagVectorDbConfigPinecone { + /// Pinecone index name. /// - /// Optional. - GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences? - createApplication; + /// This value cannot be changed after it's set. + core.String? indexName; - /// Deploy the PublisherModel to Vertex Endpoint. - /// - /// Optional. - GoogleCloudAiplatformV1PublisherModelCallToActionDeploy? deploy; + GoogleCloudAiplatformV1RagVectorDbConfigPinecone({ + this.indexName, + }); - /// Deploy PublisherModel to Google Kubernetes Engine. - /// - /// Optional. - GoogleCloudAiplatformV1PublisherModelCallToActionDeployGke? deployGke; + GoogleCloudAiplatformV1RagVectorDbConfigPinecone.fromJson(core.Map json_) + : this( + indexName: json_['indexName'] as core.String?, + ); - /// Multiple setups to deploy the PublisherModel to Vertex Endpoint. - /// - /// Optional. - GoogleCloudAiplatformV1PublisherModelCallToActionDeployVertex? - multiDeployVertex; + core.Map toJson() => { + if (indexName != null) 'indexName': indexName!, + }; +} - /// Open evaluation pipeline of the PublisherModel. - /// - /// Optional. - GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences? - openEvaluationPipeline; +/// The config for the default RAG-managed Vector DB. +typedef GoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb = $Empty; - /// Open fine-tuning pipeline of the PublisherModel. +/// The config for the Vertex Vector Search. +class GoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch { + /// The resource name of the Index. /// - /// Optional. - GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences? - openFineTuningPipeline; + /// Format: `projects/{project}/locations/{location}/indexes/{index}` + core.String? index; - /// Open fine-tuning pipelines of the PublisherModel. + /// The resource name of the Index Endpoint. /// - /// Optional. - GoogleCloudAiplatformV1PublisherModelCallToActionOpenFineTuningPipelines? - openFineTuningPipelines; + /// Format: + /// `projects/{project}/locations/{location}/indexEndpoints/{index_endpoint}` + core.String? indexEndpoint; - /// Open in Generation AI Studio. - /// - /// Optional. - GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences? - openGenerationAiStudio; + GoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch({ + this.index, + this.indexEndpoint, + }); - /// Open Genie / Playground. - /// - /// Optional. - GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences? - openGenie; + GoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch.fromJson( + core.Map json_) + : this( + index: json_['index'] as core.String?, + indexEndpoint: json_['indexEndpoint'] as core.String?, + ); - /// Open notebook of the PublisherModel. - /// - /// Optional. - GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences? - openNotebook; + core.Map toJson() => { + if (index != null) 'index': index!, + if (indexEndpoint != null) 'indexEndpoint': indexEndpoint!, + }; +} - /// Open notebooks of the PublisherModel. +/// Request message for PredictionService.RawPredict. +class GoogleCloudAiplatformV1RawPredictRequest { + /// The prediction input. /// - /// Optional. - GoogleCloudAiplatformV1PublisherModelCallToActionOpenNotebooks? openNotebooks; + /// Supports HTTP headers and arbitrary data payload. A DeployedModel may have + /// an upper limit on the number of instances it supports per request. When + /// this limit it is exceeded for an AutoML model, the RawPredict method + /// returns an error. When this limit is exceeded for a custom-trained model, + /// the behavior varies depending on the model. You can specify the schema for + /// each instance in the predict_schemata.instance_schema_uri field when you + /// create a Model. This schema applies when you deploy the `Model` as a + /// `DeployedModel` to an Endpoint and use the `RawPredict` method. + GoogleApiHttpBody? httpBody; - /// Open prompt-tuning pipeline of the PublisherModel. - /// - /// Optional. - GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences? - openPromptTuningPipeline; + GoogleCloudAiplatformV1RawPredictRequest({ + this.httpBody, + }); - /// Request for access. - /// - /// Optional. - GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences? - requestAccess; + GoogleCloudAiplatformV1RawPredictRequest.fromJson(core.Map json_) + : this( + httpBody: json_.containsKey('httpBody') + ? GoogleApiHttpBody.fromJson( + json_['httpBody'] as core.Map) + : null, + ); - /// To view Rest API docs. + core.Map toJson() => { + if (httpBody != null) 'httpBody': httpBody!, + }; +} + +/// Configuration for the Ray OSS Logs. +class GoogleCloudAiplatformV1RayLogsSpec { + /// Flag to disable the export of Ray OSS logs to Cloud Logging. /// /// Optional. - GoogleCloudAiplatformV1PublisherModelCallToActionViewRestApi? viewRestApi; + core.bool? disabled; - GoogleCloudAiplatformV1PublisherModelCallToAction({ - this.createApplication, - this.deploy, - this.deployGke, - this.multiDeployVertex, - this.openEvaluationPipeline, - this.openFineTuningPipeline, - this.openFineTuningPipelines, - this.openGenerationAiStudio, - this.openGenie, - this.openNotebook, - this.openNotebooks, - this.openPromptTuningPipeline, - this.requestAccess, - this.viewRestApi, + GoogleCloudAiplatformV1RayLogsSpec({ + this.disabled, }); - GoogleCloudAiplatformV1PublisherModelCallToAction.fromJson(core.Map json_) + GoogleCloudAiplatformV1RayLogsSpec.fromJson(core.Map json_) : this( - createApplication: json_.containsKey('createApplication') - ? GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences - .fromJson(json_['createApplication'] - as core.Map) - : null, - deploy: json_.containsKey('deploy') - ? GoogleCloudAiplatformV1PublisherModelCallToActionDeploy - .fromJson( - json_['deploy'] as core.Map) - : null, - deployGke: json_.containsKey('deployGke') - ? GoogleCloudAiplatformV1PublisherModelCallToActionDeployGke - .fromJson( - json_['deployGke'] as core.Map) - : null, - multiDeployVertex: json_.containsKey('multiDeployVertex') - ? GoogleCloudAiplatformV1PublisherModelCallToActionDeployVertex - .fromJson(json_['multiDeployVertex'] - as core.Map) - : null, - openEvaluationPipeline: json_.containsKey('openEvaluationPipeline') - ? GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences - .fromJson(json_['openEvaluationPipeline'] - as core.Map) - : null, - openFineTuningPipeline: json_.containsKey('openFineTuningPipeline') - ? GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences - .fromJson(json_['openFineTuningPipeline'] - as core.Map) - : null, - openFineTuningPipelines: json_.containsKey('openFineTuningPipelines') - ? GoogleCloudAiplatformV1PublisherModelCallToActionOpenFineTuningPipelines - .fromJson(json_['openFineTuningPipelines'] - as core.Map) - : null, - openGenerationAiStudio: json_.containsKey('openGenerationAiStudio') - ? GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences - .fromJson(json_['openGenerationAiStudio'] - as core.Map) - : null, - openGenie: json_.containsKey('openGenie') - ? GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences - .fromJson( - json_['openGenie'] as core.Map) - : null, - openNotebook: json_.containsKey('openNotebook') - ? GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences - .fromJson(json_['openNotebook'] - as core.Map) - : null, - openNotebooks: json_.containsKey('openNotebooks') - ? GoogleCloudAiplatformV1PublisherModelCallToActionOpenNotebooks - .fromJson(json_['openNotebooks'] - as core.Map) - : null, - openPromptTuningPipeline: json_ - .containsKey('openPromptTuningPipeline') - ? GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences - .fromJson(json_['openPromptTuningPipeline'] - as core.Map) - : null, - requestAccess: json_.containsKey('requestAccess') - ? GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences - .fromJson(json_['requestAccess'] - as core.Map) - : null, - viewRestApi: json_.containsKey('viewRestApi') - ? GoogleCloudAiplatformV1PublisherModelCallToActionViewRestApi - .fromJson(json_['viewRestApi'] - as core.Map) - : null, + disabled: json_['disabled'] as core.bool?, ); - core.Map toJson() => { - if (createApplication != null) 'createApplication': createApplication!, - if (deploy != null) 'deploy': deploy!, - if (deployGke != null) 'deployGke': deployGke!, - if (multiDeployVertex != null) 'multiDeployVertex': multiDeployVertex!, - if (openEvaluationPipeline != null) - 'openEvaluationPipeline': openEvaluationPipeline!, - if (openFineTuningPipeline != null) - 'openFineTuningPipeline': openFineTuningPipeline!, - if (openFineTuningPipelines != null) - 'openFineTuningPipelines': openFineTuningPipelines!, - if (openGenerationAiStudio != null) - 'openGenerationAiStudio': openGenerationAiStudio!, - if (openGenie != null) 'openGenie': openGenie!, - if (openNotebook != null) 'openNotebook': openNotebook!, - if (openNotebooks != null) 'openNotebooks': openNotebooks!, - if (openPromptTuningPipeline != null) - 'openPromptTuningPipeline': openPromptTuningPipeline!, - if (requestAccess != null) 'requestAccess': requestAccess!, - if (viewRestApi != null) 'viewRestApi': viewRestApi!, + core.Map toJson() => { + if (disabled != null) 'disabled': disabled!, }; } -/// Model metadata that is needed for UploadModel or DeployModel/CreateEndpoint -/// requests. -class GoogleCloudAiplatformV1PublisherModelCallToActionDeploy { - /// The path to the directory containing the Model artifact and any of its - /// supporting files. +/// Configuration for the Ray metrics. +class GoogleCloudAiplatformV1RayMetricSpec { + /// Flag to disable the Ray metrics collection. /// /// Optional. - core.String? artifactUri; + core.bool? disabled; - /// A description of resources that to large degree are decided by Vertex AI, - /// and require only a modest additional configuration. - GoogleCloudAiplatformV1AutomaticResources? automaticResources; + GoogleCloudAiplatformV1RayMetricSpec({ + this.disabled, + }); - /// The specification of the container that is to be used when deploying this - /// Model in Vertex AI. - /// - /// Not present for Large Models. - /// - /// Optional. - GoogleCloudAiplatformV1ModelContainerSpec? containerSpec; + GoogleCloudAiplatformV1RayMetricSpec.fromJson(core.Map json_) + : this( + disabled: json_['disabled'] as core.bool?, + ); - /// A description of resources that are dedicated to the DeployedModel, and - /// that need a higher degree of manual configuration. - GoogleCloudAiplatformV1DedicatedResources? dedicatedResources; + core.Map toJson() => { + if (disabled != null) 'disabled': disabled!, + }; +} - /// Metadata information about this deployment config. +/// Configuration information for the Ray cluster. +/// +/// For experimental launch, Ray cluster creation and Persistent cluster +/// creation are 1:1 mapping: We will provision all the nodes within the +/// Persistent cluster as Ray nodes. +class GoogleCloudAiplatformV1RaySpec { + /// This will be used to indicate which resource pool will serve as the Ray + /// head node(the first node within that pool). /// - /// Optional. - GoogleCloudAiplatformV1PublisherModelCallToActionDeployDeployMetadata? - deployMetadata; - - /// The name of the deploy task (e.g., "text to image generation"). + /// Will use the machine from the first workerpool as the head node by default + /// if this field isn't set. /// /// Optional. - core.String? deployTaskName; + core.String? headNodeResourcePoolId; - /// Large model reference. + /// Default image for user to choose a preferred ML framework (for example, + /// TensorFlow or Pytorch) by choosing from + /// [Vertex prebuilt images](https://cloud.google.com/vertex-ai/docs/training/pre-built-containers). /// - /// When this is set, model_artifact_spec is not needed. + /// Either this or the resource_pool_images is required. Use this field if you + /// need all the resource pools to have the same Ray image. Otherwise, use the + /// {@code resource_pool_images} field. /// /// Optional. - GoogleCloudAiplatformV1LargeModelReference? largeModelReference; + core.String? imageUri; - /// Default model display name. + /// OSS Ray logging configurations. /// /// Optional. - core.String? modelDisplayName; + GoogleCloudAiplatformV1RayLogsSpec? rayLogsSpec; - /// The signed URI for ephemeral Cloud Storage access to model artifact. + /// Ray metrics configurations. /// /// Optional. - core.String? publicArtifactUri; + GoogleCloudAiplatformV1RayMetricSpec? rayMetricSpec; - /// The resource name of the shared DeploymentResourcePool to deploy on. + /// Required if image_uri isn't set. /// - /// Format: - /// `projects/{project}/locations/{location}/deploymentResourcePools/{deployment_resource_pool}` - core.String? sharedResources; - - /// The title of the regional resource reference. + /// A map of resource_pool_id to prebuild Ray image if user need to use + /// different images for different head/worker pools. This map needs to cover + /// all the resource pool ids. Example: { "ray_head_node_pool": "head image" + /// "ray_worker_node_pool1": "worker image" "ray_worker_node_pool2": "another + /// worker image" } /// - /// Required. - core.String? title; + /// Optional. + core.Map? resourcePoolImages; - GoogleCloudAiplatformV1PublisherModelCallToActionDeploy({ - this.artifactUri, - this.automaticResources, - this.containerSpec, - this.dedicatedResources, - this.deployMetadata, - this.deployTaskName, - this.largeModelReference, - this.modelDisplayName, - this.publicArtifactUri, - this.sharedResources, - this.title, + GoogleCloudAiplatformV1RaySpec({ + this.headNodeResourcePoolId, + this.imageUri, + this.rayLogsSpec, + this.rayMetricSpec, + this.resourcePoolImages, }); - GoogleCloudAiplatformV1PublisherModelCallToActionDeploy.fromJson( - core.Map json_) + GoogleCloudAiplatformV1RaySpec.fromJson(core.Map json_) : this( - artifactUri: json_['artifactUri'] as core.String?, - automaticResources: json_.containsKey('automaticResources') - ? GoogleCloudAiplatformV1AutomaticResources.fromJson( - json_['automaticResources'] - as core.Map) - : null, - containerSpec: json_.containsKey('containerSpec') - ? GoogleCloudAiplatformV1ModelContainerSpec.fromJson( - json_['containerSpec'] as core.Map) - : null, - dedicatedResources: json_.containsKey('dedicatedResources') - ? GoogleCloudAiplatformV1DedicatedResources.fromJson( - json_['dedicatedResources'] - as core.Map) - : null, - deployMetadata: json_.containsKey('deployMetadata') - ? GoogleCloudAiplatformV1PublisherModelCallToActionDeployDeployMetadata - .fromJson(json_['deployMetadata'] - as core.Map) + headNodeResourcePoolId: + json_['headNodeResourcePoolId'] as core.String?, + imageUri: json_['imageUri'] as core.String?, + rayLogsSpec: json_.containsKey('rayLogsSpec') + ? GoogleCloudAiplatformV1RayLogsSpec.fromJson( + json_['rayLogsSpec'] as core.Map) : null, - deployTaskName: json_['deployTaskName'] as core.String?, - largeModelReference: json_.containsKey('largeModelReference') - ? GoogleCloudAiplatformV1LargeModelReference.fromJson( - json_['largeModelReference'] - as core.Map) + rayMetricSpec: json_.containsKey('rayMetricSpec') + ? GoogleCloudAiplatformV1RayMetricSpec.fromJson( + json_['rayMetricSpec'] as core.Map) : null, - modelDisplayName: json_['modelDisplayName'] as core.String?, - publicArtifactUri: json_['publicArtifactUri'] as core.String?, - sharedResources: json_['sharedResources'] as core.String?, - title: json_['title'] as core.String?, + resourcePoolImages: (json_['resourcePoolImages'] + as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), ); core.Map toJson() => { - if (artifactUri != null) 'artifactUri': artifactUri!, - if (automaticResources != null) - 'automaticResources': automaticResources!, - if (containerSpec != null) 'containerSpec': containerSpec!, - if (dedicatedResources != null) - 'dedicatedResources': dedicatedResources!, - if (deployMetadata != null) 'deployMetadata': deployMetadata!, - if (deployTaskName != null) 'deployTaskName': deployTaskName!, - if (largeModelReference != null) - 'largeModelReference': largeModelReference!, - if (modelDisplayName != null) 'modelDisplayName': modelDisplayName!, - if (publicArtifactUri != null) 'publicArtifactUri': publicArtifactUri!, - if (sharedResources != null) 'sharedResources': sharedResources!, - if (title != null) 'title': title!, + if (headNodeResourcePoolId != null) + 'headNodeResourcePoolId': headNodeResourcePoolId!, + if (imageUri != null) 'imageUri': imageUri!, + if (rayLogsSpec != null) 'rayLogsSpec': rayLogsSpec!, + if (rayMetricSpec != null) 'rayMetricSpec': rayMetricSpec!, + if (resourcePoolImages != null) + 'resourcePoolImages': resourcePoolImages!, }; } -/// Metadata information about the deployment for managing deployment config. -class GoogleCloudAiplatformV1PublisherModelCallToActionDeployDeployMetadata { - /// Labels for the deployment. +/// Request message for FeaturestoreOnlineServingService.ReadFeatureValues. +class GoogleCloudAiplatformV1ReadFeatureValuesRequest { + /// ID for a specific entity. /// - /// For managing deployment config like verifying, source of deployment - /// config, etc. + /// For example, for a machine learning model predicting user clicks on a + /// website, an entity ID could be `user_123`. /// - /// Optional. - core.Map? labels; + /// Required. + core.String? entityId; - /// Sample request for deployed endpoint. + /// Selector choosing Features of the target EntityType. /// - /// Optional. - core.String? sampleRequest; + /// Required. + GoogleCloudAiplatformV1FeatureSelector? featureSelector; - GoogleCloudAiplatformV1PublisherModelCallToActionDeployDeployMetadata({ - this.labels, - this.sampleRequest, + GoogleCloudAiplatformV1ReadFeatureValuesRequest({ + this.entityId, + this.featureSelector, }); - GoogleCloudAiplatformV1PublisherModelCallToActionDeployDeployMetadata.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ReadFeatureValuesRequest.fromJson(core.Map json_) : this( - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - sampleRequest: json_['sampleRequest'] as core.String?, + entityId: json_['entityId'] as core.String?, + featureSelector: json_.containsKey('featureSelector') + ? GoogleCloudAiplatformV1FeatureSelector.fromJson( + json_['featureSelector'] + as core.Map) + : null, ); core.Map toJson() => { - if (labels != null) 'labels': labels!, - if (sampleRequest != null) 'sampleRequest': sampleRequest!, + if (entityId != null) 'entityId': entityId!, + if (featureSelector != null) 'featureSelector': featureSelector!, }; } -/// Configurations for PublisherModel GKE deployment -class GoogleCloudAiplatformV1PublisherModelCallToActionDeployGke { - /// GKE deployment configuration in yaml format. +/// Response message for FeaturestoreOnlineServingService.ReadFeatureValues. +class GoogleCloudAiplatformV1ReadFeatureValuesResponse { + /// Entity view with Feature values. /// - /// Optional. - core.List? gkeYamlConfigs; + /// This may be the entity in the Featurestore if values for all Features were + /// requested, or a projection of the entity in the Featurestore if values for + /// only some Features were requested. + GoogleCloudAiplatformV1ReadFeatureValuesResponseEntityView? entityView; - GoogleCloudAiplatformV1PublisherModelCallToActionDeployGke({ - this.gkeYamlConfigs, + /// Response header. + GoogleCloudAiplatformV1ReadFeatureValuesResponseHeader? header; + + GoogleCloudAiplatformV1ReadFeatureValuesResponse({ + this.entityView, + this.header, }); - GoogleCloudAiplatformV1PublisherModelCallToActionDeployGke.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ReadFeatureValuesResponse.fromJson(core.Map json_) : this( - gkeYamlConfigs: (json_['gkeYamlConfigs'] as core.List?) - ?.map((value) => value as core.String) - .toList(), + entityView: json_.containsKey('entityView') + ? GoogleCloudAiplatformV1ReadFeatureValuesResponseEntityView + .fromJson(json_['entityView'] + as core.Map) + : null, + header: json_.containsKey('header') + ? GoogleCloudAiplatformV1ReadFeatureValuesResponseHeader.fromJson( + json_['header'] as core.Map) + : null, ); core.Map toJson() => { - if (gkeYamlConfigs != null) 'gkeYamlConfigs': gkeYamlConfigs!, + if (entityView != null) 'entityView': entityView!, + if (header != null) 'header': header!, }; } -/// Multiple setups to deploy the PublisherModel. -class GoogleCloudAiplatformV1PublisherModelCallToActionDeployVertex { - /// One click deployment configurations. +/// Entity view with Feature values. +class GoogleCloudAiplatformV1ReadFeatureValuesResponseEntityView { + /// Each piece of data holds the k requested values for one requested Feature. /// - /// Optional. - core.List? - multiDeployVertex; + /// If no values for the requested Feature exist, the corresponding cell will + /// be empty. This has the same size and is in the same order as the features + /// from the header ReadFeatureValuesResponse.header. + core.List? + data; - GoogleCloudAiplatformV1PublisherModelCallToActionDeployVertex({ - this.multiDeployVertex, + /// ID of the requested entity. + core.String? entityId; + + GoogleCloudAiplatformV1ReadFeatureValuesResponseEntityView({ + this.data, + this.entityId, }); - GoogleCloudAiplatformV1PublisherModelCallToActionDeployVertex.fromJson( + GoogleCloudAiplatformV1ReadFeatureValuesResponseEntityView.fromJson( core.Map json_) : this( - multiDeployVertex: (json_['multiDeployVertex'] as core.List?) + data: (json_['data'] as core.List?) ?.map((value) => - GoogleCloudAiplatformV1PublisherModelCallToActionDeploy + GoogleCloudAiplatformV1ReadFeatureValuesResponseEntityViewData .fromJson(value as core.Map)) .toList(), + entityId: json_['entityId'] as core.String?, ); core.Map toJson() => { - if (multiDeployVertex != null) 'multiDeployVertex': multiDeployVertex!, + if (data != null) 'data': data!, + if (entityId != null) 'entityId': entityId!, }; } -/// Open fine tuning pipelines. -class GoogleCloudAiplatformV1PublisherModelCallToActionOpenFineTuningPipelines { - /// Regional resource references to fine tuning pipelines. +/// Container to hold value(s), successive in time, for one Feature from the +/// request. +class GoogleCloudAiplatformV1ReadFeatureValuesResponseEntityViewData { + /// Feature value if a single value is requested. + GoogleCloudAiplatformV1FeatureValue? value; + + /// Feature values list if values, successive in time, are requested. /// - /// Required. - core.List< - GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences>? - fineTuningPipelines; + /// If the requested number of values is greater than the number of existing + /// Feature values, nonexistent values are omitted instead of being returned + /// as empty. + GoogleCloudAiplatformV1FeatureValueList? values; - GoogleCloudAiplatformV1PublisherModelCallToActionOpenFineTuningPipelines({ - this.fineTuningPipelines, + GoogleCloudAiplatformV1ReadFeatureValuesResponseEntityViewData({ + this.value, + this.values, }); - GoogleCloudAiplatformV1PublisherModelCallToActionOpenFineTuningPipelines.fromJson( + GoogleCloudAiplatformV1ReadFeatureValuesResponseEntityViewData.fromJson( core.Map json_) : this( - fineTuningPipelines: (json_['fineTuningPipelines'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences - .fromJson(value as core.Map)) - .toList(), + value: json_.containsKey('value') + ? GoogleCloudAiplatformV1FeatureValue.fromJson( + json_['value'] as core.Map) + : null, + values: json_.containsKey('values') + ? GoogleCloudAiplatformV1FeatureValueList.fromJson( + json_['values'] as core.Map) + : null, ); core.Map toJson() => { - if (fineTuningPipelines != null) - 'fineTuningPipelines': fineTuningPipelines!, + if (value != null) 'value': value!, + if (values != null) 'values': values!, }; } -/// Open notebooks. -class GoogleCloudAiplatformV1PublisherModelCallToActionOpenNotebooks { - /// Regional resource references to notebooks. +/// Metadata for requested Features. +class GoogleCloudAiplatformV1ReadFeatureValuesResponseFeatureDescriptor { + /// Feature ID. + core.String? id; + + GoogleCloudAiplatformV1ReadFeatureValuesResponseFeatureDescriptor({ + this.id, + }); + + GoogleCloudAiplatformV1ReadFeatureValuesResponseFeatureDescriptor.fromJson( + core.Map json_) + : this( + id: json_['id'] as core.String?, + ); + + core.Map toJson() => { + if (id != null) 'id': id!, + }; +} + +/// Response header with metadata for the requested +/// ReadFeatureValuesRequest.entity_type and Features. +class GoogleCloudAiplatformV1ReadFeatureValuesResponseHeader { + /// The resource name of the EntityType from the ReadFeatureValuesRequest. /// - /// Required. - core.List< - GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences>? - notebooks; + /// Value format: + /// `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entityType}`. + core.String? entityType; - GoogleCloudAiplatformV1PublisherModelCallToActionOpenNotebooks({ - this.notebooks, + /// List of Feature metadata corresponding to each piece of + /// ReadFeatureValuesResponse.EntityView.data. + core.List? + featureDescriptors; + + GoogleCloudAiplatformV1ReadFeatureValuesResponseHeader({ + this.entityType, + this.featureDescriptors, }); - GoogleCloudAiplatformV1PublisherModelCallToActionOpenNotebooks.fromJson( + GoogleCloudAiplatformV1ReadFeatureValuesResponseHeader.fromJson( core.Map json_) : this( - notebooks: (json_['notebooks'] as core.List?) + entityType: json_['entityType'] as core.String?, + featureDescriptors: (json_['featureDescriptors'] as core.List?) ?.map((value) => - GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences + GoogleCloudAiplatformV1ReadFeatureValuesResponseFeatureDescriptor .fromJson(value as core.Map)) .toList(), ); core.Map toJson() => { - if (notebooks != null) 'notebooks': notebooks!, + if (entityType != null) 'entityType': entityType!, + if (featureDescriptors != null) + 'featureDescriptors': featureDescriptors!, }; } -/// The regional resource name or the URI. -/// -/// Key is region, e.g., us-central1, europe-west2, global, etc.. -class GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences { - /// Required. - core.Map? - references; +/// The request message for MatchService.ReadIndexDatapoints. +class GoogleCloudAiplatformV1ReadIndexDatapointsRequest { + /// The ID of the DeployedIndex that will serve the request. + core.String? deployedIndexId; - /// Description of the resource. - /// - /// Optional. - core.String? resourceDescription; + /// IDs of the datapoints to be searched for. + core.List? ids; - /// Title of the resource. - /// - /// Optional. - core.String? resourceTitle; + GoogleCloudAiplatformV1ReadIndexDatapointsRequest({ + this.deployedIndexId, + this.ids, + }); - /// Use case (CUJ) of the resource. - /// - /// Optional. - core.String? resourceUseCase; + GoogleCloudAiplatformV1ReadIndexDatapointsRequest.fromJson(core.Map json_) + : this( + deployedIndexId: json_['deployedIndexId'] as core.String?, + ids: (json_['ids'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); - /// - /// - /// Required. - core.String? title; + core.Map toJson() => { + if (deployedIndexId != null) 'deployedIndexId': deployedIndexId!, + if (ids != null) 'ids': ids!, + }; +} - GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences({ - this.references, - this.resourceDescription, - this.resourceTitle, - this.resourceUseCase, - this.title, +/// The response message for MatchService.ReadIndexDatapoints. +class GoogleCloudAiplatformV1ReadIndexDatapointsResponse { + /// The result list of datapoints. + core.List? datapoints; + + GoogleCloudAiplatformV1ReadIndexDatapointsResponse({ + this.datapoints, }); - GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ReadIndexDatapointsResponse.fromJson(core.Map json_) : this( - references: - (json_['references'] as core.Map?) - ?.map( - (key, value) => core.MapEntry( - key, - GoogleCloudAiplatformV1PublisherModelResourceReference.fromJson( - value as core.Map), - ), - ), - resourceDescription: json_['resourceDescription'] as core.String?, - resourceTitle: json_['resourceTitle'] as core.String?, - resourceUseCase: json_['resourceUseCase'] as core.String?, - title: json_['title'] as core.String?, + datapoints: (json_['datapoints'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1IndexDatapoint.fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (references != null) 'references': references!, - if (resourceDescription != null) - 'resourceDescription': resourceDescription!, - if (resourceTitle != null) 'resourceTitle': resourceTitle!, - if (resourceUseCase != null) 'resourceUseCase': resourceUseCase!, - if (title != null) 'title': title!, + if (datapoints != null) 'datapoints': datapoints!, }; } -/// Rest API docs. -class GoogleCloudAiplatformV1PublisherModelCallToActionViewRestApi { - /// Required. - core.List? documentations; - - /// The title of the view rest API. - /// - /// Required. - core.String? title; +/// Response message for TensorboardService.ReadTensorboardBlobData. +class GoogleCloudAiplatformV1ReadTensorboardBlobDataResponse { + /// Blob messages containing blob bytes. + core.List? blobs; - GoogleCloudAiplatformV1PublisherModelCallToActionViewRestApi({ - this.documentations, - this.title, + GoogleCloudAiplatformV1ReadTensorboardBlobDataResponse({ + this.blobs, }); - GoogleCloudAiplatformV1PublisherModelCallToActionViewRestApi.fromJson( + GoogleCloudAiplatformV1ReadTensorboardBlobDataResponse.fromJson( core.Map json_) : this( - documentations: (json_['documentations'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1PublisherModelDocumentation.fromJson( - value as core.Map)) + blobs: (json_['blobs'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1TensorboardBlob.fromJson( + value as core.Map)) .toList(), - title: json_['title'] as core.String?, ); core.Map toJson() => { - if (documentations != null) 'documentations': documentations!, - if (title != null) 'title': title!, + if (blobs != null) 'blobs': blobs!, }; -} - -/// A named piece of documentation. -class GoogleCloudAiplatformV1PublisherModelDocumentation { - /// Content of this piece of document (in Markdown format). - /// - /// Required. - core.String? content; +} - /// E.g., OVERVIEW, USE CASES, DOCUMENTATION, SDK & SAMPLES, JAVA, NODE.JS, - /// etc.. - /// - /// Required. - core.String? title; +/// Response message for TensorboardService.ReadTensorboardSize. +class GoogleCloudAiplatformV1ReadTensorboardSizeResponse { + /// Payload storage size for the TensorBoard + core.String? storageSizeByte; - GoogleCloudAiplatformV1PublisherModelDocumentation({ - this.content, - this.title, + GoogleCloudAiplatformV1ReadTensorboardSizeResponse({ + this.storageSizeByte, }); - GoogleCloudAiplatformV1PublisherModelDocumentation.fromJson(core.Map json_) + GoogleCloudAiplatformV1ReadTensorboardSizeResponse.fromJson(core.Map json_) : this( - content: json_['content'] as core.String?, - title: json_['title'] as core.String?, + storageSizeByte: json_['storageSizeByte'] as core.String?, ); core.Map toJson() => { - if (content != null) 'content': content!, - if (title != null) 'title': title!, + if (storageSizeByte != null) 'storageSizeByte': storageSizeByte!, }; } -/// Reference to a resource. -class GoogleCloudAiplatformV1PublisherModelResourceReference { - /// Description of the resource. - @core.Deprecated( - 'Not supported. Member documentation may have more information.', - ) - core.String? description; - - /// The resource name of the Google Cloud resource. - core.String? resourceName; - - /// The URI of the resource. - core.String? uri; - - /// Use case (CUJ) of the resource. - @core.Deprecated( - 'Not supported. Member documentation may have more information.', - ) - core.String? useCase; +/// Response message for TensorboardService.ReadTensorboardTimeSeriesData. +class GoogleCloudAiplatformV1ReadTensorboardTimeSeriesDataResponse { + /// The returned time series data. + GoogleCloudAiplatformV1TimeSeriesData? timeSeriesData; - GoogleCloudAiplatformV1PublisherModelResourceReference({ - this.description, - this.resourceName, - this.uri, - this.useCase, + GoogleCloudAiplatformV1ReadTensorboardTimeSeriesDataResponse({ + this.timeSeriesData, }); - GoogleCloudAiplatformV1PublisherModelResourceReference.fromJson( + GoogleCloudAiplatformV1ReadTensorboardTimeSeriesDataResponse.fromJson( core.Map json_) : this( - description: json_['description'] as core.String?, - resourceName: json_['resourceName'] as core.String?, - uri: json_['uri'] as core.String?, - useCase: json_['useCase'] as core.String?, + timeSeriesData: json_.containsKey('timeSeriesData') + ? GoogleCloudAiplatformV1TimeSeriesData.fromJson( + json_['timeSeriesData'] + as core.Map) + : null, ); core.Map toJson() => { - if (description != null) 'description': description!, - if (resourceName != null) 'resourceName': resourceName!, - if (uri != null) 'uri': uri!, - if (useCase != null) 'useCase': useCase!, + if (timeSeriesData != null) 'timeSeriesData': timeSeriesData!, }; } -/// Request message for MetadataService.PurgeArtifacts. -class GoogleCloudAiplatformV1PurgeArtifactsRequest { - /// A required filter matching the Artifacts to be purged. - /// - /// E.g., `update_time <= 2020-11-19T11:30:00-04:00`. - /// - /// Required. - core.String? filter; - - /// Flag to indicate to actually perform the purge. - /// - /// If `force` is set to false, the method will return a sample of Artifact - /// names that would be deleted. - /// - /// Optional. - core.bool? force; +/// Response message for TensorboardService.ReadTensorboardUsage. +class GoogleCloudAiplatformV1ReadTensorboardUsageResponse { + /// Maps year-month (YYYYMM) string to per month usage data. + core.Map? + monthlyUsageData; - GoogleCloudAiplatformV1PurgeArtifactsRequest({ - this.filter, - this.force, + GoogleCloudAiplatformV1ReadTensorboardUsageResponse({ + this.monthlyUsageData, }); - GoogleCloudAiplatformV1PurgeArtifactsRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1ReadTensorboardUsageResponse.fromJson(core.Map json_) : this( - filter: json_['filter'] as core.String?, - force: json_['force'] as core.bool?, + monthlyUsageData: (json_['monthlyUsageData'] + as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + GoogleCloudAiplatformV1ReadTensorboardUsageResponsePerMonthUsageData + .fromJson(value as core.Map), + ), + ), ); core.Map toJson() => { - if (filter != null) 'filter': filter!, - if (force != null) 'force': force!, + if (monthlyUsageData != null) 'monthlyUsageData': monthlyUsageData!, }; } -/// Request message for MetadataService.PurgeContexts. -class GoogleCloudAiplatformV1PurgeContextsRequest { - /// A required filter matching the Contexts to be purged. - /// - /// E.g., `update_time <= 2020-11-19T11:30:00-04:00`. - /// - /// Required. - core.String? filter; - - /// Flag to indicate to actually perform the purge. - /// - /// If `force` is set to false, the method will return a sample of Context - /// names that would be deleted. - /// - /// Optional. - core.bool? force; +/// Per month usage data +class GoogleCloudAiplatformV1ReadTensorboardUsageResponsePerMonthUsageData { + /// Usage data for each user in the given month. + core.List< + GoogleCloudAiplatformV1ReadTensorboardUsageResponsePerUserUsageData>? + userUsageData; - GoogleCloudAiplatformV1PurgeContextsRequest({ - this.filter, - this.force, + GoogleCloudAiplatformV1ReadTensorboardUsageResponsePerMonthUsageData({ + this.userUsageData, }); - GoogleCloudAiplatformV1PurgeContextsRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1ReadTensorboardUsageResponsePerMonthUsageData.fromJson( + core.Map json_) : this( - filter: json_['filter'] as core.String?, - force: json_['force'] as core.bool?, + userUsageData: (json_['userUsageData'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1ReadTensorboardUsageResponsePerUserUsageData + .fromJson(value as core.Map)) + .toList(), ); core.Map toJson() => { - if (filter != null) 'filter': filter!, - if (force != null) 'force': force!, + if (userUsageData != null) 'userUsageData': userUsageData!, }; } -/// Request message for MetadataService.PurgeExecutions. -class GoogleCloudAiplatformV1PurgeExecutionsRequest { - /// A required filter matching the Executions to be purged. - /// - /// E.g., `update_time <= 2020-11-19T11:30:00-04:00`. - /// - /// Required. - core.String? filter; +/// Per user usage data. +class GoogleCloudAiplatformV1ReadTensorboardUsageResponsePerUserUsageData { + /// User's username + core.String? username; - /// Flag to indicate to actually perform the purge. - /// - /// If `force` is set to false, the method will return a sample of Execution - /// names that would be deleted. - /// - /// Optional. - core.bool? force; + /// Number of times the user has read data within the Tensorboard. + core.String? viewCount; - GoogleCloudAiplatformV1PurgeExecutionsRequest({ - this.filter, - this.force, + GoogleCloudAiplatformV1ReadTensorboardUsageResponsePerUserUsageData({ + this.username, + this.viewCount, }); - GoogleCloudAiplatformV1PurgeExecutionsRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1ReadTensorboardUsageResponsePerUserUsageData.fromJson( + core.Map json_) : this( - filter: json_['filter'] as core.String?, - force: json_['force'] as core.bool?, + username: json_['username'] as core.String?, + viewCount: json_['viewCount'] as core.String?, ); core.Map toJson() => { - if (filter != null) 'filter': filter!, - if (force != null) 'force': force!, + if (username != null) 'username': username!, + if (viewCount != null) 'viewCount': viewCount!, }; } -/// The spec of a Python packaged code. -class GoogleCloudAiplatformV1PythonPackageSpec { - /// Command line arguments to be passed to the Python task. - core.List? args; - - /// Environment variables to be passed to the python module. +/// ReasoningEngine provides a customizable runtime for models to determine +/// which actions to take and in which order. +class GoogleCloudAiplatformV1ReasoningEngine { + /// Timestamp when this ReasoningEngine was created. /// - /// Maximum limit is 100. - core.List? env; + /// Output only. + core.String? createTime; - /// The URI of a container image in Artifact Registry that will run the - /// provided Python package. + /// The description of the ReasoningEngine. /// - /// Vertex AI provides a wide range of executor images with pre-installed - /// packages to meet users' various use cases. See the list of \[pre-built - /// containers for - /// training\](https://cloud.google.com/vertex-ai/docs/training/pre-built-containers). - /// You must use an image from this list. + /// Optional. + core.String? description; + + /// The display name of the ReasoningEngine. /// /// Required. - core.String? executorImageUri; + core.String? displayName; - /// The Google Cloud Storage location of the Python package files which are - /// the training program and its dependent packages. + /// Used to perform consistent read-modify-write updates. /// - /// The maximum number of package URIs is 100. + /// If not set, a blind "overwrite" update happens. /// - /// Required. - core.List? packageUris; + /// Optional. + core.String? etag; - /// The Python module name to run after installing the packages. + /// Identifier. /// - /// Required. - core.String? pythonModule; - - GoogleCloudAiplatformV1PythonPackageSpec({ - this.args, - this.env, - this.executorImageUri, - this.packageUris, - this.pythonModule, - }); - - GoogleCloudAiplatformV1PythonPackageSpec.fromJson(core.Map json_) - : this( - args: (json_['args'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - env: (json_['env'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1EnvVar.fromJson( - value as core.Map)) - .toList(), - executorImageUri: json_['executorImageUri'] as core.String?, - packageUris: (json_['packageUris'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - pythonModule: json_['pythonModule'] as core.String?, - ); - - core.Map toJson() => { - if (args != null) 'args': args!, - if (env != null) 'env': env!, - if (executorImageUri != null) 'executorImageUri': executorImageUri!, - if (packageUris != null) 'packageUris': packageUris!, - if (pythonModule != null) 'pythonModule': pythonModule!, - }; -} - -/// Response message for QueryDeployedModels method. -class GoogleCloudAiplatformV1QueryDeployedModelsResponse { - /// References to the DeployedModels that share the specified - /// deploymentResourcePool. - core.List? deployedModelRefs; - - /// DEPRECATED Use deployed_model_refs instead. - @core.Deprecated( - 'Not supported. Member documentation may have more information.', - ) - core.List? deployedModels; + /// The resource name of the ReasoningEngine. + core.String? name; - /// A token, which can be sent as `page_token` to retrieve the next page. + /// Configurations of the ReasoningEngine /// - /// If this field is omitted, there are no subsequent pages. - core.String? nextPageToken; - - /// The total number of DeployedModels on this DeploymentResourcePool. - core.int? totalDeployedModelCount; + /// Required. + GoogleCloudAiplatformV1ReasoningEngineSpec? spec; - /// The total number of Endpoints that have DeployedModels on this - /// DeploymentResourcePool. - core.int? totalEndpointCount; + /// Timestamp when this ReasoningEngine was most recently updated. + /// + /// Output only. + core.String? updateTime; - GoogleCloudAiplatformV1QueryDeployedModelsResponse({ - this.deployedModelRefs, - this.deployedModels, - this.nextPageToken, - this.totalDeployedModelCount, - this.totalEndpointCount, + GoogleCloudAiplatformV1ReasoningEngine({ + this.createTime, + this.description, + this.displayName, + this.etag, + this.name, + this.spec, + this.updateTime, }); - GoogleCloudAiplatformV1QueryDeployedModelsResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1ReasoningEngine.fromJson(core.Map json_) : this( - deployedModelRefs: (json_['deployedModelRefs'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1DeployedModelRef.fromJson( - value as core.Map)) - .toList(), - deployedModels: (json_['deployedModels'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1DeployedModel.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, - totalDeployedModelCount: - json_['totalDeployedModelCount'] as core.int?, - totalEndpointCount: json_['totalEndpointCount'] as core.int?, + createTime: json_['createTime'] as core.String?, + description: json_['description'] as core.String?, + displayName: json_['displayName'] as core.String?, + etag: json_['etag'] as core.String?, + name: json_['name'] as core.String?, + spec: json_.containsKey('spec') + ? GoogleCloudAiplatformV1ReasoningEngineSpec.fromJson( + json_['spec'] as core.Map) + : null, + updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (deployedModelRefs != null) 'deployedModelRefs': deployedModelRefs!, - if (deployedModels != null) 'deployedModels': deployedModels!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - if (totalDeployedModelCount != null) - 'totalDeployedModelCount': totalDeployedModelCount!, - if (totalEndpointCount != null) - 'totalEndpointCount': totalEndpointCount!, + if (createTime != null) 'createTime': createTime!, + if (description != null) 'description': description!, + if (displayName != null) 'displayName': displayName!, + if (etag != null) 'etag': etag!, + if (name != null) 'name': name!, + if (spec != null) 'spec': spec!, + if (updateTime != null) 'updateTime': updateTime!, }; } -/// Input for question answering correctness metric. -class GoogleCloudAiplatformV1QuestionAnsweringCorrectnessInput { - /// Question answering correctness instance. +/// ReasoningEngine configurations +class GoogleCloudAiplatformV1ReasoningEngineSpec { + /// Declarations for object class methods in OpenAPI specification format. /// - /// Required. - GoogleCloudAiplatformV1QuestionAnsweringCorrectnessInstance? instance; + /// Optional. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.List>? classMethods; - /// Spec for question answering correctness score metric. + /// User provided package spec of the ReasoningEngine. /// /// Required. - GoogleCloudAiplatformV1QuestionAnsweringCorrectnessSpec? metricSpec; + GoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec? packageSpec; - GoogleCloudAiplatformV1QuestionAnsweringCorrectnessInput({ - this.instance, - this.metricSpec, + GoogleCloudAiplatformV1ReasoningEngineSpec({ + this.classMethods, + this.packageSpec, }); - GoogleCloudAiplatformV1QuestionAnsweringCorrectnessInput.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ReasoningEngineSpec.fromJson(core.Map json_) : this( - instance: json_.containsKey('instance') - ? GoogleCloudAiplatformV1QuestionAnsweringCorrectnessInstance - .fromJson( - json_['instance'] as core.Map) - : null, - metricSpec: json_.containsKey('metricSpec') - ? GoogleCloudAiplatformV1QuestionAnsweringCorrectnessSpec - .fromJson(json_['metricSpec'] - as core.Map) + classMethods: (json_['classMethods'] as core.List?) + ?.map((value) => value as core.Map) + .toList(), + packageSpec: json_.containsKey('packageSpec') + ? GoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec.fromJson( + json_['packageSpec'] as core.Map) : null, ); core.Map toJson() => { - if (instance != null) 'instance': instance!, - if (metricSpec != null) 'metricSpec': metricSpec!, + if (classMethods != null) 'classMethods': classMethods!, + if (packageSpec != null) 'packageSpec': packageSpec!, }; } -/// Spec for question answering correctness instance. -typedef GoogleCloudAiplatformV1QuestionAnsweringCorrectnessInstance - = $Instance02; +/// User provided package spec like pickled object and package requirements. +class GoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec { + /// The Cloud Storage URI of the dependency files in tar.gz format. + /// + /// Optional. + core.String? dependencyFilesGcsUri; -/// Spec for question answering correctness result. -class GoogleCloudAiplatformV1QuestionAnsweringCorrectnessResult { - /// Confidence for question answering correctness score. + /// The Cloud Storage URI of the pickled python object. /// - /// Output only. - core.double? confidence; + /// Optional. + core.String? pickleObjectGcsUri; - /// Explanation for question answering correctness score. + /// The Python version. /// - /// Output only. - core.String? explanation; + /// Currently support 3.8, 3.9, 3.10, 3.11. If not specified, default value is + /// 3.10. + /// + /// Optional. + core.String? pythonVersion; - /// Question Answering Correctness score. + /// The Cloud Storage URI of the `requirements.txt` file /// - /// Output only. - core.double? score; + /// Optional. + core.String? requirementsGcsUri; - GoogleCloudAiplatformV1QuestionAnsweringCorrectnessResult({ - this.confidence, - this.explanation, - this.score, + GoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec({ + this.dependencyFilesGcsUri, + this.pickleObjectGcsUri, + this.pythonVersion, + this.requirementsGcsUri, }); - GoogleCloudAiplatformV1QuestionAnsweringCorrectnessResult.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec.fromJson(core.Map json_) : this( - confidence: (json_['confidence'] as core.num?)?.toDouble(), - explanation: json_['explanation'] as core.String?, - score: (json_['score'] as core.num?)?.toDouble(), + dependencyFilesGcsUri: json_['dependencyFilesGcsUri'] as core.String?, + pickleObjectGcsUri: json_['pickleObjectGcsUri'] as core.String?, + pythonVersion: json_['pythonVersion'] as core.String?, + requirementsGcsUri: json_['requirementsGcsUri'] as core.String?, ); core.Map toJson() => { - if (confidence != null) 'confidence': confidence!, - if (explanation != null) 'explanation': explanation!, - if (score != null) 'score': score!, + if (dependencyFilesGcsUri != null) + 'dependencyFilesGcsUri': dependencyFilesGcsUri!, + if (pickleObjectGcsUri != null) + 'pickleObjectGcsUri': pickleObjectGcsUri!, + if (pythonVersion != null) 'pythonVersion': pythonVersion!, + if (requirementsGcsUri != null) + 'requirementsGcsUri': requirementsGcsUri!, }; } -/// Spec for question answering correctness metric. -class GoogleCloudAiplatformV1QuestionAnsweringCorrectnessSpec { - /// Whether to use instance.reference to compute question answering - /// correctness. +/// Request message for GenAiTuningService.RebaseTunedModel. +class GoogleCloudAiplatformV1RebaseTunedModelRequest { + /// The Google Cloud Storage location to write the artifacts. /// /// Optional. - core.bool? useReference; + GoogleCloudAiplatformV1GcsDestination? artifactDestination; - /// Which version to use for evaluation. + /// By default, bison to gemini migration will always create new + /// model/endpoint, but for gemini-1.0 to gemini-1.5 migration, we default + /// deploy to the same endpoint. + /// + /// See details in this Section. /// /// Optional. - core.int? version; + core.bool? deployToSameEndpoint; - GoogleCloudAiplatformV1QuestionAnsweringCorrectnessSpec({ - this.useReference, - this.version, + /// TunedModel reference to retrieve the legacy model information. + /// + /// Required. + GoogleCloudAiplatformV1TunedModelRef? tunedModelRef; + + /// The TuningJob to be updated. + /// + /// Users can use this TuningJob field to overwrite tuning configs. + /// + /// Optional. + GoogleCloudAiplatformV1TuningJob? tuningJob; + + GoogleCloudAiplatformV1RebaseTunedModelRequest({ + this.artifactDestination, + this.deployToSameEndpoint, + this.tunedModelRef, + this.tuningJob, }); - GoogleCloudAiplatformV1QuestionAnsweringCorrectnessSpec.fromJson( - core.Map json_) + GoogleCloudAiplatformV1RebaseTunedModelRequest.fromJson(core.Map json_) : this( - useReference: json_['useReference'] as core.bool?, - version: json_['version'] as core.int?, + artifactDestination: json_.containsKey('artifactDestination') + ? GoogleCloudAiplatformV1GcsDestination.fromJson( + json_['artifactDestination'] + as core.Map) + : null, + deployToSameEndpoint: json_['deployToSameEndpoint'] as core.bool?, + tunedModelRef: json_.containsKey('tunedModelRef') + ? GoogleCloudAiplatformV1TunedModelRef.fromJson( + json_['tunedModelRef'] as core.Map) + : null, + tuningJob: json_.containsKey('tuningJob') + ? GoogleCloudAiplatformV1TuningJob.fromJson( + json_['tuningJob'] as core.Map) + : null, ); core.Map toJson() => { - if (useReference != null) 'useReference': useReference!, - if (version != null) 'version': version!, + if (artifactDestination != null) + 'artifactDestination': artifactDestination!, + if (deployToSameEndpoint != null) + 'deployToSameEndpoint': deployToSameEndpoint!, + if (tunedModelRef != null) 'tunedModelRef': tunedModelRef!, + if (tuningJob != null) 'tuningJob': tuningJob!, }; } -/// Input for question answering helpfulness metric. -class GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessInput { - /// Question answering helpfulness instance. - /// - /// Required. - GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessInstance? instance; +/// Request message for PersistentResourceService.RebootPersistentResource. +typedef GoogleCloudAiplatformV1RebootPersistentResourceRequest = $Empty; - /// Spec for question answering helpfulness score metric. - /// - /// Required. - GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessSpec? metricSpec; +/// Request message for MetadataService.DeleteContextChildrenRequest. +typedef GoogleCloudAiplatformV1RemoveContextChildrenRequest + = $ContextChildrenRequest; - GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessInput({ - this.instance, - this.metricSpec, +/// Response message for MetadataService.RemoveContextChildren. +typedef GoogleCloudAiplatformV1RemoveContextChildrenResponse = $Empty; + +/// Request message for IndexService.RemoveDatapoints +class GoogleCloudAiplatformV1RemoveDatapointsRequest { + /// A list of datapoint ids to be deleted. + core.List? datapointIds; + + GoogleCloudAiplatformV1RemoveDatapointsRequest({ + this.datapointIds, }); - GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessInput.fromJson( - core.Map json_) + GoogleCloudAiplatformV1RemoveDatapointsRequest.fromJson(core.Map json_) : this( - instance: json_.containsKey('instance') - ? GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessInstance - .fromJson( - json_['instance'] as core.Map) - : null, - metricSpec: json_.containsKey('metricSpec') - ? GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessSpec - .fromJson(json_['metricSpec'] - as core.Map) - : null, + datapointIds: (json_['datapointIds'] as core.List?) + ?.map((value) => value as core.String) + .toList(), ); core.Map toJson() => { - if (instance != null) 'instance': instance!, - if (metricSpec != null) 'metricSpec': metricSpec!, + if (datapointIds != null) 'datapointIds': datapointIds!, }; } -/// Spec for question answering helpfulness instance. -typedef GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessInstance - = $Instance02; +/// Response message for IndexService.RemoveDatapoints +typedef GoogleCloudAiplatformV1RemoveDatapointsResponse = $Empty; -/// Spec for question answering helpfulness result. -class GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessResult { - /// Confidence for question answering helpfulness score. +/// A ReservationAffinity can be used to configure a Vertex AI resource (e.g., a +/// DeployedModel) to draw its Compute Engine resources from a Shared +/// Reservation, or exclusively from on-demand capacity. +class GoogleCloudAiplatformV1ReservationAffinity { + /// Corresponds to the label key of a reservation resource. /// - /// Output only. - core.double? confidence; + /// To target a SPECIFIC_RESERVATION by name, use + /// `compute.googleapis.com/reservation-name` as the key and specify the name + /// of your reservation as its value. + /// + /// Optional. + core.String? key; - /// Explanation for question answering helpfulness score. + /// Specifies the reservation affinity type. /// - /// Output only. - core.String? explanation; + /// Required. + /// Possible string values are: + /// - "TYPE_UNSPECIFIED" : Default value. This should not be used. + /// - "NO_RESERVATION" : Do not consume from any reserved capacity, only use + /// on-demand. + /// - "ANY_RESERVATION" : Consume any reservation available, falling back to + /// on-demand. + /// - "SPECIFIC_RESERVATION" : Consume from a specific reservation. When + /// chosen, the reservation must be identified via the `key` and `values` + /// fields. + core.String? reservationAffinityType; - /// Question Answering Helpfulness score. + /// Corresponds to the label values of a reservation resource. /// - /// Output only. - core.double? score; + /// This must be the full resource name of the reservation. + /// + /// Optional. + core.List? values; - GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessResult({ - this.confidence, - this.explanation, - this.score, + GoogleCloudAiplatformV1ReservationAffinity({ + this.key, + this.reservationAffinityType, + this.values, }); - GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessResult.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ReservationAffinity.fromJson(core.Map json_) : this( - confidence: (json_['confidence'] as core.num?)?.toDouble(), - explanation: json_['explanation'] as core.String?, - score: (json_['score'] as core.num?)?.toDouble(), + key: json_['key'] as core.String?, + reservationAffinityType: + json_['reservationAffinityType'] as core.String?, + values: (json_['values'] as core.List?) + ?.map((value) => value as core.String) + .toList(), ); core.Map toJson() => { - if (confidence != null) 'confidence': confidence!, - if (explanation != null) 'explanation': explanation!, - if (score != null) 'score': score!, + if (key != null) 'key': key!, + if (reservationAffinityType != null) + 'reservationAffinityType': reservationAffinityType!, + if (values != null) 'values': values!, }; } -/// Spec for question answering helpfulness metric. -class GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessSpec { - /// Whether to use instance.reference to compute question answering - /// helpfulness. +/// Represents the spec of a group of resources of the same type, for example +/// machine type, disk, and accelerators, in a PersistentResource. +class GoogleCloudAiplatformV1ResourcePool { + /// Optional spec to configure GKE or Ray-on-Vertex autoscaling /// /// Optional. - core.bool? useReference; + GoogleCloudAiplatformV1ResourcePoolAutoscalingSpec? autoscalingSpec; - /// Which version to use for evaluation. + /// Disk spec for the machine in this node pool. /// /// Optional. - core.int? version; - - GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessSpec({ - this.useReference, - this.version, - }); + GoogleCloudAiplatformV1DiskSpec? diskSpec; - GoogleCloudAiplatformV1QuestionAnsweringHelpfulnessSpec.fromJson( - core.Map json_) - : this( - useReference: json_['useReference'] as core.bool?, - version: json_['version'] as core.int?, - ); + /// The unique ID in a PersistentResource for referring to this resource pool. + /// + /// User can specify it if necessary. Otherwise, it's generated automatically. + /// + /// Immutable. + core.String? id; - core.Map toJson() => { - if (useReference != null) 'useReference': useReference!, - if (version != null) 'version': version!, - }; -} + /// The specification of a single machine. + /// + /// Required. Immutable. + GoogleCloudAiplatformV1MachineSpec? machineSpec; -/// Input for question answering quality metric. -class GoogleCloudAiplatformV1QuestionAnsweringQualityInput { - /// Question answering quality instance. + /// The total number of machines to use for this resource pool. /// - /// Required. - GoogleCloudAiplatformV1QuestionAnsweringQualityInstance? instance; + /// Optional. + core.String? replicaCount; - /// Spec for question answering quality score metric. + /// The number of machines currently in use by training jobs for this resource + /// pool. /// - /// Required. - GoogleCloudAiplatformV1QuestionAnsweringQualitySpec? metricSpec; + /// Will replace idle_replica_count. + /// + /// Output only. + core.String? usedReplicaCount; - GoogleCloudAiplatformV1QuestionAnsweringQualityInput({ - this.instance, - this.metricSpec, + GoogleCloudAiplatformV1ResourcePool({ + this.autoscalingSpec, + this.diskSpec, + this.id, + this.machineSpec, + this.replicaCount, + this.usedReplicaCount, }); - GoogleCloudAiplatformV1QuestionAnsweringQualityInput.fromJson(core.Map json_) + GoogleCloudAiplatformV1ResourcePool.fromJson(core.Map json_) : this( - instance: json_.containsKey('instance') - ? GoogleCloudAiplatformV1QuestionAnsweringQualityInstance - .fromJson( - json_['instance'] as core.Map) + autoscalingSpec: json_.containsKey('autoscalingSpec') + ? GoogleCloudAiplatformV1ResourcePoolAutoscalingSpec.fromJson( + json_['autoscalingSpec'] + as core.Map) : null, - metricSpec: json_.containsKey('metricSpec') - ? GoogleCloudAiplatformV1QuestionAnsweringQualitySpec.fromJson( - json_['metricSpec'] as core.Map) + diskSpec: json_.containsKey('diskSpec') + ? GoogleCloudAiplatformV1DiskSpec.fromJson( + json_['diskSpec'] as core.Map) + : null, + id: json_['id'] as core.String?, + machineSpec: json_.containsKey('machineSpec') + ? GoogleCloudAiplatformV1MachineSpec.fromJson( + json_['machineSpec'] as core.Map) : null, + replicaCount: json_['replicaCount'] as core.String?, + usedReplicaCount: json_['usedReplicaCount'] as core.String?, ); core.Map toJson() => { - if (instance != null) 'instance': instance!, - if (metricSpec != null) 'metricSpec': metricSpec!, + if (autoscalingSpec != null) 'autoscalingSpec': autoscalingSpec!, + if (diskSpec != null) 'diskSpec': diskSpec!, + if (id != null) 'id': id!, + if (machineSpec != null) 'machineSpec': machineSpec!, + if (replicaCount != null) 'replicaCount': replicaCount!, + if (usedReplicaCount != null) 'usedReplicaCount': usedReplicaCount!, }; } -/// Spec for question answering quality instance. -class GoogleCloudAiplatformV1QuestionAnsweringQualityInstance { - /// Text to answer the question. - /// - /// Required. - core.String? context; - - /// Question Answering prompt for LLM. +/// The min/max number of replicas allowed if enabling autoscaling +class GoogleCloudAiplatformV1ResourcePoolAutoscalingSpec { + /// max replicas in the node pool, must be ≥ replica_count and \> + /// min_replica_count or will throw error /// - /// Required. - core.String? instruction; + /// Optional. + core.String? maxReplicaCount; - /// Output of the evaluated model. + /// min replicas in the node pool, must be ≤ replica_count and \< + /// max_replica_count or will throw error. /// - /// Required. - core.String? prediction; - - /// Ground truth used to compare against the prediction. + /// For autoscaling enabled Ray-on-Vertex, we allow min_replica_count of a + /// resource_pool to be 0 to match the OSS Ray + /// behavior(https://docs.ray.io/en/latest/cluster/vms/user-guides/configuring-autoscaling.html#cluster-config-parameters). + /// As for Persistent Resource, the min_replica_count must be \> 0, we added a + /// corresponding validation inside + /// CreatePersistentResourceRequestValidator.java. /// /// Optional. - core.String? reference; + core.String? minReplicaCount; - GoogleCloudAiplatformV1QuestionAnsweringQualityInstance({ - this.context, - this.instruction, - this.prediction, - this.reference, + GoogleCloudAiplatformV1ResourcePoolAutoscalingSpec({ + this.maxReplicaCount, + this.minReplicaCount, }); - GoogleCloudAiplatformV1QuestionAnsweringQualityInstance.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ResourcePoolAutoscalingSpec.fromJson(core.Map json_) : this( - context: json_['context'] as core.String?, - instruction: json_['instruction'] as core.String?, - prediction: json_['prediction'] as core.String?, - reference: json_['reference'] as core.String?, + maxReplicaCount: json_['maxReplicaCount'] as core.String?, + minReplicaCount: json_['minReplicaCount'] as core.String?, ); core.Map toJson() => { - if (context != null) 'context': context!, - if (instruction != null) 'instruction': instruction!, - if (prediction != null) 'prediction': prediction!, - if (reference != null) 'reference': reference!, + if (maxReplicaCount != null) 'maxReplicaCount': maxReplicaCount!, + if (minReplicaCount != null) 'minReplicaCount': minReplicaCount!, }; } -/// Spec for question answering quality result. -class GoogleCloudAiplatformV1QuestionAnsweringQualityResult { - /// Confidence for question answering quality score. - /// - /// Output only. - core.double? confidence; - - /// Explanation for question answering quality score. +/// Persistent Cluster runtime information as output +class GoogleCloudAiplatformV1ResourceRuntime { + /// URIs for user to connect to the Cluster. /// - /// Output only. - core.String? explanation; - - /// Question Answering Quality score. + /// Example: { "RAY_HEAD_NODE_INTERNAL_IP": "head-node-IP:10001" + /// "RAY_DASHBOARD_URI": "ray-dashboard-address:8888" } /// /// Output only. - core.double? score; + core.Map? accessUris; - GoogleCloudAiplatformV1QuestionAnsweringQualityResult({ - this.confidence, - this.explanation, - this.score, + GoogleCloudAiplatformV1ResourceRuntime({ + this.accessUris, }); - GoogleCloudAiplatformV1QuestionAnsweringQualityResult.fromJson(core.Map json_) + GoogleCloudAiplatformV1ResourceRuntime.fromJson(core.Map json_) : this( - confidence: (json_['confidence'] as core.num?)?.toDouble(), - explanation: json_['explanation'] as core.String?, - score: (json_['score'] as core.num?)?.toDouble(), + accessUris: + (json_['accessUris'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), ); core.Map toJson() => { - if (confidence != null) 'confidence': confidence!, - if (explanation != null) 'explanation': explanation!, - if (score != null) 'score': score!, + if (accessUris != null) 'accessUris': accessUris!, }; } -/// Spec for question answering quality score metric. -typedef GoogleCloudAiplatformV1QuestionAnsweringQualitySpec - = $QuestionAnsweringQualitySpec; - -/// Input for question answering relevance metric. -class GoogleCloudAiplatformV1QuestionAnsweringRelevanceInput { - /// Question answering relevance instance. +/// Configuration for the runtime on a PersistentResource instance, including +/// but not limited to: * Service accounts used to run the workloads. +/// +/// * Whether to make it a dedicated Ray Cluster. +class GoogleCloudAiplatformV1ResourceRuntimeSpec { + /// Ray cluster configuration. /// - /// Required. - GoogleCloudAiplatformV1QuestionAnsweringRelevanceInstance? instance; + /// Required when creating a dedicated RayCluster on the PersistentResource. + /// + /// Optional. + GoogleCloudAiplatformV1RaySpec? raySpec; - /// Spec for question answering relevance score metric. + /// Configure the use of workload identity on the PersistentResource /// - /// Required. - GoogleCloudAiplatformV1QuestionAnsweringRelevanceSpec? metricSpec; + /// Optional. + GoogleCloudAiplatformV1ServiceAccountSpec? serviceAccountSpec; - GoogleCloudAiplatformV1QuestionAnsweringRelevanceInput({ - this.instance, - this.metricSpec, + GoogleCloudAiplatformV1ResourceRuntimeSpec({ + this.raySpec, + this.serviceAccountSpec, }); - GoogleCloudAiplatformV1QuestionAnsweringRelevanceInput.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ResourceRuntimeSpec.fromJson(core.Map json_) : this( - instance: json_.containsKey('instance') - ? GoogleCloudAiplatformV1QuestionAnsweringRelevanceInstance - .fromJson( - json_['instance'] as core.Map) + raySpec: json_.containsKey('raySpec') + ? GoogleCloudAiplatformV1RaySpec.fromJson( + json_['raySpec'] as core.Map) : null, - metricSpec: json_.containsKey('metricSpec') - ? GoogleCloudAiplatformV1QuestionAnsweringRelevanceSpec.fromJson( - json_['metricSpec'] as core.Map) + serviceAccountSpec: json_.containsKey('serviceAccountSpec') + ? GoogleCloudAiplatformV1ServiceAccountSpec.fromJson( + json_['serviceAccountSpec'] + as core.Map) : null, ); core.Map toJson() => { - if (instance != null) 'instance': instance!, - if (metricSpec != null) 'metricSpec': metricSpec!, + if (raySpec != null) 'raySpec': raySpec!, + if (serviceAccountSpec != null) + 'serviceAccountSpec': serviceAccountSpec!, }; } -/// Spec for question answering relevance instance. -typedef GoogleCloudAiplatformV1QuestionAnsweringRelevanceInstance = $Instance02; - -/// Spec for question answering relevance result. -class GoogleCloudAiplatformV1QuestionAnsweringRelevanceResult { - /// Confidence for question answering relevance score. - /// - /// Output only. - core.double? confidence; - - /// Explanation for question answering relevance score. +/// Statistics information about resource consumption. +class GoogleCloudAiplatformV1ResourcesConsumed { + /// The number of replica hours used. /// - /// Output only. - core.String? explanation; - - /// Question Answering Relevance score. + /// Note that many replicas may run in parallel, and additionally any given + /// work may be queued for some time. Therefore this value is not strictly + /// related to wall time. /// /// Output only. - core.double? score; + core.double? replicaHours; - GoogleCloudAiplatformV1QuestionAnsweringRelevanceResult({ - this.confidence, - this.explanation, - this.score, + GoogleCloudAiplatformV1ResourcesConsumed({ + this.replicaHours, }); - GoogleCloudAiplatformV1QuestionAnsweringRelevanceResult.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ResourcesConsumed.fromJson(core.Map json_) : this( - confidence: (json_['confidence'] as core.num?)?.toDouble(), - explanation: json_['explanation'] as core.String?, - score: (json_['score'] as core.num?)?.toDouble(), + replicaHours: (json_['replicaHours'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (confidence != null) 'confidence': confidence!, - if (explanation != null) 'explanation': explanation!, - if (score != null) 'score': score!, + if (replicaHours != null) 'replicaHours': replicaHours!, }; } -/// Spec for question answering relevance metric. -class GoogleCloudAiplatformV1QuestionAnsweringRelevanceSpec { - /// Whether to use instance.reference to compute question answering relevance. - /// - /// Optional. - core.bool? useReference; +/// Request message for JobService.ResumeModelDeploymentMonitoringJob. +typedef GoogleCloudAiplatformV1ResumeModelDeploymentMonitoringJobRequest + = $Empty; - /// Which version to use for evaluation. +/// Request message for ScheduleService.ResumeSchedule. +class GoogleCloudAiplatformV1ResumeScheduleRequest { + /// Whether to backfill missed runs when the schedule is resumed from PAUSED + /// state. + /// + /// If set to true, all missed runs will be scheduled. New runs will be + /// scheduled after the backfill is complete. This will also update + /// Schedule.catch_up field. Default to false. /// /// Optional. - core.int? version; + core.bool? catchUp; - GoogleCloudAiplatformV1QuestionAnsweringRelevanceSpec({ - this.useReference, - this.version, + GoogleCloudAiplatformV1ResumeScheduleRequest({ + this.catchUp, }); - GoogleCloudAiplatformV1QuestionAnsweringRelevanceSpec.fromJson(core.Map json_) + GoogleCloudAiplatformV1ResumeScheduleRequest.fromJson(core.Map json_) : this( - useReference: json_['useReference'] as core.bool?, - version: json_['version'] as core.int?, + catchUp: json_['catchUp'] as core.bool?, ); core.Map toJson() => { - if (useReference != null) 'useReference': useReference!, - if (version != null) 'version': version!, + if (catchUp != null) 'catchUp': catchUp!, }; } -/// Request message for PredictionService.RawPredict. -class GoogleCloudAiplatformV1RawPredictRequest { - /// The prediction input. +/// Defines a retrieval tool that model can call to access external knowledge. +class GoogleCloudAiplatformV1Retrieval { + /// This option is no longer supported. /// - /// Supports HTTP headers and arbitrary data payload. A DeployedModel may have - /// an upper limit on the number of instances it supports per request. When - /// this limit it is exceeded for an AutoML model, the RawPredict method - /// returns an error. When this limit is exceeded for a custom-trained model, - /// the behavior varies depending on the model. You can specify the schema for - /// each instance in the predict_schemata.instance_schema_uri field when you - /// create a Model. This schema applies when you deploy the `Model` as a - /// `DeployedModel` to an Endpoint and use the `RawPredict` method. - GoogleApiHttpBody? httpBody; + /// Optional. Deprecated. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) + core.bool? disableAttribution; - GoogleCloudAiplatformV1RawPredictRequest({ - this.httpBody, + /// Set to use data source powered by Vertex AI Search. + GoogleCloudAiplatformV1VertexAISearch? vertexAiSearch; + + /// Set to use data source powered by Vertex RAG store. + /// + /// User data is uploaded via the VertexRagDataService. + GoogleCloudAiplatformV1VertexRagStore? vertexRagStore; + + GoogleCloudAiplatformV1Retrieval({ + this.disableAttribution, + this.vertexAiSearch, + this.vertexRagStore, }); - GoogleCloudAiplatformV1RawPredictRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1Retrieval.fromJson(core.Map json_) : this( - httpBody: json_.containsKey('httpBody') - ? GoogleApiHttpBody.fromJson( - json_['httpBody'] as core.Map) + disableAttribution: json_['disableAttribution'] as core.bool?, + vertexAiSearch: json_.containsKey('vertexAiSearch') + ? GoogleCloudAiplatformV1VertexAISearch.fromJson( + json_['vertexAiSearch'] + as core.Map) + : null, + vertexRagStore: json_.containsKey('vertexRagStore') + ? GoogleCloudAiplatformV1VertexRagStore.fromJson( + json_['vertexRagStore'] + as core.Map) : null, ); core.Map toJson() => { - if (httpBody != null) 'httpBody': httpBody!, + if (disableAttribution != null) + 'disableAttribution': disableAttribution!, + if (vertexAiSearch != null) 'vertexAiSearch': vertexAiSearch!, + if (vertexRagStore != null) 'vertexRagStore': vertexRagStore!, }; } -/// Configuration for the Ray OSS Logs. -class GoogleCloudAiplatformV1RayLogsSpec { - /// Flag to disable the export of Ray OSS logs to Cloud Logging. +/// Metadata related to retrieval in the grounding flow. +class GoogleCloudAiplatformV1RetrievalMetadata { + /// Score indicating how likely information from Google Search could help + /// answer the prompt. + /// + /// The score is in the range `[0, 1]`, where 0 is the least likely and 1 is + /// the most likely. This score is only populated when Google Search grounding + /// and dynamic retrieval is enabled. It will be compared to the threshold to + /// determine whether to trigger Google Search. /// /// Optional. - core.bool? disabled; + core.double? googleSearchDynamicRetrievalScore; - GoogleCloudAiplatformV1RayLogsSpec({ - this.disabled, + GoogleCloudAiplatformV1RetrievalMetadata({ + this.googleSearchDynamicRetrievalScore, }); - GoogleCloudAiplatformV1RayLogsSpec.fromJson(core.Map json_) + GoogleCloudAiplatformV1RetrievalMetadata.fromJson(core.Map json_) : this( - disabled: json_['disabled'] as core.bool?, + googleSearchDynamicRetrievalScore: + (json_['googleSearchDynamicRetrievalScore'] as core.num?) + ?.toDouble(), ); core.Map toJson() => { - if (disabled != null) 'disabled': disabled!, + if (googleSearchDynamicRetrievalScore != null) + 'googleSearchDynamicRetrievalScore': + googleSearchDynamicRetrievalScore!, }; } -/// Configuration for the Ray metrics. -class GoogleCloudAiplatformV1RayMetricSpec { - /// Flag to disable the Ray metrics collection. +/// Request message for VertexRagService.RetrieveContexts. +class GoogleCloudAiplatformV1RetrieveContextsRequest { + /// Single RAG retrieve query. /// - /// Optional. - core.bool? disabled; + /// Required. + GoogleCloudAiplatformV1RagQuery? query; - GoogleCloudAiplatformV1RayMetricSpec({ - this.disabled, + /// The data source for Vertex RagStore. + GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore? vertexRagStore; + + GoogleCloudAiplatformV1RetrieveContextsRequest({ + this.query, + this.vertexRagStore, }); - GoogleCloudAiplatformV1RayMetricSpec.fromJson(core.Map json_) + GoogleCloudAiplatformV1RetrieveContextsRequest.fromJson(core.Map json_) : this( - disabled: json_['disabled'] as core.bool?, + query: json_.containsKey('query') + ? GoogleCloudAiplatformV1RagQuery.fromJson( + json_['query'] as core.Map) + : null, + vertexRagStore: json_.containsKey('vertexRagStore') + ? GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore + .fromJson(json_['vertexRagStore'] + as core.Map) + : null, ); core.Map toJson() => { - if (disabled != null) 'disabled': disabled!, + if (query != null) 'query': query!, + if (vertexRagStore != null) 'vertexRagStore': vertexRagStore!, }; } -/// Configuration information for the Ray cluster. -/// -/// For experimental launch, Ray cluster creation and Persistent cluster -/// creation are 1:1 mapping: We will provision all the nodes within the -/// Persistent cluster as Ray nodes. -class GoogleCloudAiplatformV1RaySpec { - /// This will be used to indicate which resource pool will serve as the Ray - /// head node(the first node within that pool). - /// - /// Will use the machine from the first workerpool as the head node by default - /// if this field isn't set. - /// - /// Optional. - core.String? headNodeResourcePoolId; - - /// Default image for user to choose a preferred ML framework (for example, - /// TensorFlow or Pytorch) by choosing from - /// [Vertex prebuilt images](https://cloud.google.com/vertex-ai/docs/training/pre-built-containers). - /// - /// Either this or the resource_pool_images is required. Use this field if you - /// need all the resource pools to have the same Ray image. Otherwise, use the - /// {@code resource_pool_images} field. +/// The data source for Vertex RagStore. +class GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore { + /// The representation of the rag source. /// - /// Optional. - core.String? imageUri; - - /// OSS Ray logging configurations. + /// It can be used to specify corpus only or ragfiles. Currently only support + /// one corpus or multiple files from one corpus. In the future we may open up + /// multiple corpora support. /// /// Optional. - GoogleCloudAiplatformV1RayLogsSpec? rayLogsSpec; + core.List< + GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource>? + ragResources; - /// Ray metrics configurations. + /// Only return contexts with vector distance smaller than the threshold. /// /// Optional. - GoogleCloudAiplatformV1RayMetricSpec? rayMetricSpec; + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) + core.double? vectorDistanceThreshold; - /// Required if image_uri isn't set. - /// - /// A map of resource_pool_id to prebuild Ray image if user need to use - /// different images for different head/worker pools. This map needs to cover - /// all the resource pool ids. Example: { "ray_head_node_pool": "head image" - /// "ray_worker_node_pool1": "worker image" "ray_worker_node_pool2": "another - /// worker image" } - /// - /// Optional. - core.Map? resourcePoolImages; + GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore({ + this.ragResources, + this.vectorDistanceThreshold, + }); - GoogleCloudAiplatformV1RaySpec({ - this.headNodeResourcePoolId, - this.imageUri, - this.rayLogsSpec, - this.rayMetricSpec, - this.resourcePoolImages, + GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore.fromJson( + core.Map json_) + : this( + ragResources: (json_['ragResources'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource + .fromJson(value as core.Map)) + .toList(), + vectorDistanceThreshold: + (json_['vectorDistanceThreshold'] as core.num?)?.toDouble(), + ); + + core.Map toJson() => { + if (ragResources != null) 'ragResources': ragResources!, + if (vectorDistanceThreshold != null) + 'vectorDistanceThreshold': vectorDistanceThreshold!, + }; +} + +/// The definition of the Rag resource. +typedef GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource + = $VertexRagStoreRagResource; + +/// Response message for VertexRagService.RetrieveContexts. +class GoogleCloudAiplatformV1RetrieveContextsResponse { + /// The contexts of the query. + GoogleCloudAiplatformV1RagContexts? contexts; + + GoogleCloudAiplatformV1RetrieveContextsResponse({ + this.contexts, }); - GoogleCloudAiplatformV1RaySpec.fromJson(core.Map json_) + GoogleCloudAiplatformV1RetrieveContextsResponse.fromJson(core.Map json_) : this( - headNodeResourcePoolId: - json_['headNodeResourcePoolId'] as core.String?, - imageUri: json_['imageUri'] as core.String?, - rayLogsSpec: json_.containsKey('rayLogsSpec') - ? GoogleCloudAiplatformV1RayLogsSpec.fromJson( - json_['rayLogsSpec'] as core.Map) + contexts: json_.containsKey('contexts') + ? GoogleCloudAiplatformV1RagContexts.fromJson( + json_['contexts'] as core.Map) : null, - rayMetricSpec: json_.containsKey('rayMetricSpec') - ? GoogleCloudAiplatformV1RayMetricSpec.fromJson( - json_['rayMetricSpec'] as core.Map) - : null, - resourcePoolImages: (json_['resourcePoolImages'] - as core.Map?) - ?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), ); core.Map toJson() => { - if (headNodeResourcePoolId != null) - 'headNodeResourcePoolId': headNodeResourcePoolId!, - if (imageUri != null) 'imageUri': imageUri!, - if (rayLogsSpec != null) 'rayLogsSpec': rayLogsSpec!, - if (rayMetricSpec != null) 'rayMetricSpec': rayMetricSpec!, - if (resourcePoolImages != null) - 'resourcePoolImages': resourcePoolImages!, + if (contexts != null) 'contexts': contexts!, }; } -/// Request message for FeaturestoreOnlineServingService.ReadFeatureValues. -class GoogleCloudAiplatformV1ReadFeatureValuesRequest { - /// ID for a specific entity. - /// - /// For example, for a machine learning model predicting user clicks on a - /// website, an entity ID could be `user_123`. +/// Input for rouge metric. +class GoogleCloudAiplatformV1RougeInput { + /// Repeated rouge instances. /// /// Required. - core.String? entityId; + core.List? instances; - /// Selector choosing Features of the target EntityType. + /// Spec for rouge score metric. /// /// Required. - GoogleCloudAiplatformV1FeatureSelector? featureSelector; + GoogleCloudAiplatformV1RougeSpec? metricSpec; - GoogleCloudAiplatformV1ReadFeatureValuesRequest({ - this.entityId, - this.featureSelector, + GoogleCloudAiplatformV1RougeInput({ + this.instances, + this.metricSpec, }); - GoogleCloudAiplatformV1ReadFeatureValuesRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1RougeInput.fromJson(core.Map json_) : this( - entityId: json_['entityId'] as core.String?, - featureSelector: json_.containsKey('featureSelector') - ? GoogleCloudAiplatformV1FeatureSelector.fromJson( - json_['featureSelector'] - as core.Map) + instances: (json_['instances'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1RougeInstance.fromJson( + value as core.Map)) + .toList(), + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1RougeSpec.fromJson( + json_['metricSpec'] as core.Map) : null, ); core.Map toJson() => { - if (entityId != null) 'entityId': entityId!, - if (featureSelector != null) 'featureSelector': featureSelector!, + if (instances != null) 'instances': instances!, + if (metricSpec != null) 'metricSpec': metricSpec!, }; } -/// Response message for FeaturestoreOnlineServingService.ReadFeatureValues. -class GoogleCloudAiplatformV1ReadFeatureValuesResponse { - /// Entity view with Feature values. - /// - /// This may be the entity in the Featurestore if values for all Features were - /// requested, or a projection of the entity in the Featurestore if values for - /// only some Features were requested. - GoogleCloudAiplatformV1ReadFeatureValuesResponseEntityView? entityView; +/// Spec for rouge instance. +typedef GoogleCloudAiplatformV1RougeInstance = $Instance00; - /// Response header. - GoogleCloudAiplatformV1ReadFeatureValuesResponseHeader? header; +/// Rouge metric value for an instance. +class GoogleCloudAiplatformV1RougeMetricValue { + /// Rouge score. + /// + /// Output only. + core.double? score; - GoogleCloudAiplatformV1ReadFeatureValuesResponse({ - this.entityView, - this.header, + GoogleCloudAiplatformV1RougeMetricValue({ + this.score, }); - GoogleCloudAiplatformV1ReadFeatureValuesResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1RougeMetricValue.fromJson(core.Map json_) : this( - entityView: json_.containsKey('entityView') - ? GoogleCloudAiplatformV1ReadFeatureValuesResponseEntityView - .fromJson(json_['entityView'] - as core.Map) - : null, - header: json_.containsKey('header') - ? GoogleCloudAiplatformV1ReadFeatureValuesResponseHeader.fromJson( - json_['header'] as core.Map) - : null, + score: (json_['score'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (entityView != null) 'entityView': entityView!, - if (header != null) 'header': header!, + if (score != null) 'score': score!, }; } -/// Entity view with Feature values. -class GoogleCloudAiplatformV1ReadFeatureValuesResponseEntityView { - /// Each piece of data holds the k requested values for one requested Feature. +/// Results for rouge metric. +class GoogleCloudAiplatformV1RougeResults { + /// Rouge metric values. /// - /// If no values for the requested Feature exist, the corresponding cell will - /// be empty. This has the same size and is in the same order as the features - /// from the header ReadFeatureValuesResponse.header. - core.List? - data; - - /// ID of the requested entity. - core.String? entityId; + /// Output only. + core.List? rougeMetricValues; - GoogleCloudAiplatformV1ReadFeatureValuesResponseEntityView({ - this.data, - this.entityId, + GoogleCloudAiplatformV1RougeResults({ + this.rougeMetricValues, }); - GoogleCloudAiplatformV1ReadFeatureValuesResponseEntityView.fromJson( - core.Map json_) + GoogleCloudAiplatformV1RougeResults.fromJson(core.Map json_) : this( - data: (json_['data'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1ReadFeatureValuesResponseEntityViewData - .fromJson(value as core.Map)) + rougeMetricValues: (json_['rougeMetricValues'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1RougeMetricValue.fromJson( + value as core.Map)) .toList(), - entityId: json_['entityId'] as core.String?, ); core.Map toJson() => { - if (data != null) 'data': data!, - if (entityId != null) 'entityId': entityId!, + if (rougeMetricValues != null) 'rougeMetricValues': rougeMetricValues!, }; } -/// Container to hold value(s), successive in time, for one Feature from the -/// request. -class GoogleCloudAiplatformV1ReadFeatureValuesResponseEntityViewData { - /// Feature value if a single value is requested. - GoogleCloudAiplatformV1FeatureValue? value; +/// Spec for rouge score metric - calculates the recall of n-grams in prediction +/// as compared to reference - returns a score ranging between 0 and 1. +class GoogleCloudAiplatformV1RougeSpec { + /// Supported rouge types are rougen\[1-9\], rougeL, and rougeLsum. + /// + /// Optional. + core.String? rougeType; - /// Feature values list if values, successive in time, are requested. + /// Whether to split summaries while using rougeLsum. /// - /// If the requested number of values is greater than the number of existing - /// Feature values, nonexistent values are omitted instead of being returned - /// as empty. - GoogleCloudAiplatformV1FeatureValueList? values; + /// Optional. + core.bool? splitSummaries; - GoogleCloudAiplatformV1ReadFeatureValuesResponseEntityViewData({ - this.value, - this.values, + /// Whether to use stemmer to compute rouge score. + /// + /// Optional. + core.bool? useStemmer; + + GoogleCloudAiplatformV1RougeSpec({ + this.rougeType, + this.splitSummaries, + this.useStemmer, }); - GoogleCloudAiplatformV1ReadFeatureValuesResponseEntityViewData.fromJson( - core.Map json_) + GoogleCloudAiplatformV1RougeSpec.fromJson(core.Map json_) : this( - value: json_.containsKey('value') - ? GoogleCloudAiplatformV1FeatureValue.fromJson( - json_['value'] as core.Map) - : null, - values: json_.containsKey('values') - ? GoogleCloudAiplatformV1FeatureValueList.fromJson( - json_['values'] as core.Map) - : null, + rougeType: json_['rougeType'] as core.String?, + splitSummaries: json_['splitSummaries'] as core.bool?, + useStemmer: json_['useStemmer'] as core.bool?, ); core.Map toJson() => { - if (value != null) 'value': value!, - if (values != null) 'values': values!, + if (rougeType != null) 'rougeType': rougeType!, + if (splitSummaries != null) 'splitSummaries': splitSummaries!, + if (useStemmer != null) 'useStemmer': useStemmer!, }; } -/// Metadata for requested Features. -class GoogleCloudAiplatformV1ReadFeatureValuesResponseFeatureDescriptor { - /// Feature ID. - core.String? id; +/// Input for safety metric. +class GoogleCloudAiplatformV1SafetyInput { + /// Safety instance. + /// + /// Required. + GoogleCloudAiplatformV1SafetyInstance? instance; - GoogleCloudAiplatformV1ReadFeatureValuesResponseFeatureDescriptor({ - this.id, + /// Spec for safety metric. + /// + /// Required. + GoogleCloudAiplatformV1SafetySpec? metricSpec; + + GoogleCloudAiplatformV1SafetyInput({ + this.instance, + this.metricSpec, }); - GoogleCloudAiplatformV1ReadFeatureValuesResponseFeatureDescriptor.fromJson( - core.Map json_) + GoogleCloudAiplatformV1SafetyInput.fromJson(core.Map json_) : this( - id: json_['id'] as core.String?, + instance: json_.containsKey('instance') + ? GoogleCloudAiplatformV1SafetyInstance.fromJson( + json_['instance'] as core.Map) + : null, + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1SafetySpec.fromJson( + json_['metricSpec'] as core.Map) + : null, ); core.Map toJson() => { - if (id != null) 'id': id!, + if (instance != null) 'instance': instance!, + if (metricSpec != null) 'metricSpec': metricSpec!, }; } -/// Response header with metadata for the requested -/// ReadFeatureValuesRequest.entity_type and Features. -class GoogleCloudAiplatformV1ReadFeatureValuesResponseHeader { - /// The resource name of the EntityType from the ReadFeatureValuesRequest. +/// Spec for safety instance. +typedef GoogleCloudAiplatformV1SafetyInstance = $Instance01; + +/// Safety rating corresponding to the generated content. +class GoogleCloudAiplatformV1SafetyRating { + /// Indicates whether the content was filtered out because of this rating. /// - /// Value format: - /// `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entityType}`. - core.String? entityType; + /// Output only. + core.bool? blocked; - /// List of Feature metadata corresponding to each piece of - /// ReadFeatureValuesResponse.EntityView.data. - core.List? - featureDescriptors; + /// Harm category. + /// + /// Output only. + /// Possible string values are: + /// - "HARM_CATEGORY_UNSPECIFIED" : The harm category is unspecified. + /// - "HARM_CATEGORY_HATE_SPEECH" : The harm category is hate speech. + /// - "HARM_CATEGORY_DANGEROUS_CONTENT" : The harm category is dangerous + /// content. + /// - "HARM_CATEGORY_HARASSMENT" : The harm category is harassment. + /// - "HARM_CATEGORY_SEXUALLY_EXPLICIT" : The harm category is sexually + /// explicit content. + /// - "HARM_CATEGORY_CIVIC_INTEGRITY" : The harm category is civic integrity. + core.String? category; - GoogleCloudAiplatformV1ReadFeatureValuesResponseHeader({ - this.entityType, - this.featureDescriptors, + /// Harm probability levels in the content. + /// + /// Output only. + /// Possible string values are: + /// - "HARM_PROBABILITY_UNSPECIFIED" : Harm probability unspecified. + /// - "NEGLIGIBLE" : Negligible level of harm. + /// - "LOW" : Low level of harm. + /// - "MEDIUM" : Medium level of harm. + /// - "HIGH" : High level of harm. + core.String? probability; + + /// Harm probability score. + /// + /// Output only. + core.double? probabilityScore; + + /// Harm severity levels in the content. + /// + /// Output only. + /// Possible string values are: + /// - "HARM_SEVERITY_UNSPECIFIED" : Harm severity unspecified. + /// - "HARM_SEVERITY_NEGLIGIBLE" : Negligible level of harm severity. + /// - "HARM_SEVERITY_LOW" : Low level of harm severity. + /// - "HARM_SEVERITY_MEDIUM" : Medium level of harm severity. + /// - "HARM_SEVERITY_HIGH" : High level of harm severity. + core.String? severity; + + /// Harm severity score. + /// + /// Output only. + core.double? severityScore; + + GoogleCloudAiplatformV1SafetyRating({ + this.blocked, + this.category, + this.probability, + this.probabilityScore, + this.severity, + this.severityScore, }); - GoogleCloudAiplatformV1ReadFeatureValuesResponseHeader.fromJson( - core.Map json_) + GoogleCloudAiplatformV1SafetyRating.fromJson(core.Map json_) : this( - entityType: json_['entityType'] as core.String?, - featureDescriptors: (json_['featureDescriptors'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1ReadFeatureValuesResponseFeatureDescriptor - .fromJson(value as core.Map)) - .toList(), + blocked: json_['blocked'] as core.bool?, + category: json_['category'] as core.String?, + probability: json_['probability'] as core.String?, + probabilityScore: + (json_['probabilityScore'] as core.num?)?.toDouble(), + severity: json_['severity'] as core.String?, + severityScore: (json_['severityScore'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (entityType != null) 'entityType': entityType!, - if (featureDescriptors != null) - 'featureDescriptors': featureDescriptors!, + if (blocked != null) 'blocked': blocked!, + if (category != null) 'category': category!, + if (probability != null) 'probability': probability!, + if (probabilityScore != null) 'probabilityScore': probabilityScore!, + if (severity != null) 'severity': severity!, + if (severityScore != null) 'severityScore': severityScore!, }; } -/// The request message for MatchService.ReadIndexDatapoints. -class GoogleCloudAiplatformV1ReadIndexDatapointsRequest { - /// The ID of the DeployedIndex that will serve the request. - core.String? deployedIndexId; +/// Spec for safety result. +class GoogleCloudAiplatformV1SafetyResult { + /// Confidence for safety score. + /// + /// Output only. + core.double? confidence; - /// IDs of the datapoints to be searched for. - core.List? ids; + /// Explanation for safety score. + /// + /// Output only. + core.String? explanation; - GoogleCloudAiplatformV1ReadIndexDatapointsRequest({ - this.deployedIndexId, - this.ids, + /// Safety score. + /// + /// Output only. + core.double? score; + + GoogleCloudAiplatformV1SafetyResult({ + this.confidence, + this.explanation, + this.score, }); - GoogleCloudAiplatformV1ReadIndexDatapointsRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1SafetyResult.fromJson(core.Map json_) : this( - deployedIndexId: json_['deployedIndexId'] as core.String?, - ids: (json_['ids'] as core.List?) - ?.map((value) => value as core.String) - .toList(), + confidence: (json_['confidence'] as core.num?)?.toDouble(), + explanation: json_['explanation'] as core.String?, + score: (json_['score'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (deployedIndexId != null) 'deployedIndexId': deployedIndexId!, - if (ids != null) 'ids': ids!, + if (confidence != null) 'confidence': confidence!, + if (explanation != null) 'explanation': explanation!, + if (score != null) 'score': score!, }; } -/// The response message for MatchService.ReadIndexDatapoints. -class GoogleCloudAiplatformV1ReadIndexDatapointsResponse { - /// The result list of datapoints. - core.List? datapoints; +/// Safety settings. +class GoogleCloudAiplatformV1SafetySetting { + /// Harm category. + /// + /// Required. + /// Possible string values are: + /// - "HARM_CATEGORY_UNSPECIFIED" : The harm category is unspecified. + /// - "HARM_CATEGORY_HATE_SPEECH" : The harm category is hate speech. + /// - "HARM_CATEGORY_DANGEROUS_CONTENT" : The harm category is dangerous + /// content. + /// - "HARM_CATEGORY_HARASSMENT" : The harm category is harassment. + /// - "HARM_CATEGORY_SEXUALLY_EXPLICIT" : The harm category is sexually + /// explicit content. + /// - "HARM_CATEGORY_CIVIC_INTEGRITY" : The harm category is civic integrity. + core.String? category; - GoogleCloudAiplatformV1ReadIndexDatapointsResponse({ - this.datapoints, + /// Specify if the threshold is used for probability or severity score. + /// + /// If not specified, the threshold is used for probability score. + /// + /// Optional. + /// Possible string values are: + /// - "HARM_BLOCK_METHOD_UNSPECIFIED" : The harm block method is unspecified. + /// - "SEVERITY" : The harm block method uses both probability and severity + /// scores. + /// - "PROBABILITY" : The harm block method uses the probability score. + core.String? method; + + /// The harm block threshold. + /// + /// Required. + /// Possible string values are: + /// - "HARM_BLOCK_THRESHOLD_UNSPECIFIED" : Unspecified harm block threshold. + /// - "BLOCK_LOW_AND_ABOVE" : Block low threshold and above (i.e. block more). + /// - "BLOCK_MEDIUM_AND_ABOVE" : Block medium threshold and above. + /// - "BLOCK_ONLY_HIGH" : Block only high threshold (i.e. block less). + /// - "BLOCK_NONE" : Block none. + /// - "OFF" : Turn off the safety filter. + core.String? threshold; + + GoogleCloudAiplatformV1SafetySetting({ + this.category, + this.method, + this.threshold, }); - GoogleCloudAiplatformV1ReadIndexDatapointsResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1SafetySetting.fromJson(core.Map json_) : this( - datapoints: (json_['datapoints'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1IndexDatapoint.fromJson( - value as core.Map)) - .toList(), + category: json_['category'] as core.String?, + method: json_['method'] as core.String?, + threshold: json_['threshold'] as core.String?, ); core.Map toJson() => { - if (datapoints != null) 'datapoints': datapoints!, + if (category != null) 'category': category!, + if (method != null) 'method': method!, + if (threshold != null) 'threshold': threshold!, }; } -/// Response message for TensorboardService.ReadTensorboardBlobData. -class GoogleCloudAiplatformV1ReadTensorboardBlobDataResponse { - /// Blob messages containing blob bytes. - core.List? blobs; +/// Spec for safety metric. +typedef GoogleCloudAiplatformV1SafetySpec = $Spec; - GoogleCloudAiplatformV1ReadTensorboardBlobDataResponse({ - this.blobs, +/// Active learning data sampling config. +/// +/// For every active learning labeling iteration, it will select a batch of data +/// based on the sampling strategy. +class GoogleCloudAiplatformV1SampleConfig { + /// The percentage of data needed to be labeled in each following batch + /// (except the first batch). + core.int? followingBatchSamplePercentage; + + /// The percentage of data needed to be labeled in the first batch. + core.int? initialBatchSamplePercentage; + + /// Field to choose sampling strategy. + /// + /// Sampling strategy will decide which data should be selected for human + /// labeling in every batch. + /// Possible string values are: + /// - "SAMPLE_STRATEGY_UNSPECIFIED" : Default will be treated as UNCERTAINTY. + /// - "UNCERTAINTY" : Sample the most uncertain data to label. + core.String? sampleStrategy; + + GoogleCloudAiplatformV1SampleConfig({ + this.followingBatchSamplePercentage, + this.initialBatchSamplePercentage, + this.sampleStrategy, }); - GoogleCloudAiplatformV1ReadTensorboardBlobDataResponse.fromJson( - core.Map json_) + GoogleCloudAiplatformV1SampleConfig.fromJson(core.Map json_) : this( - blobs: (json_['blobs'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1TensorboardBlob.fromJson( - value as core.Map)) - .toList(), + followingBatchSamplePercentage: + json_['followingBatchSamplePercentage'] as core.int?, + initialBatchSamplePercentage: + json_['initialBatchSamplePercentage'] as core.int?, + sampleStrategy: json_['sampleStrategy'] as core.String?, ); core.Map toJson() => { - if (blobs != null) 'blobs': blobs!, + if (followingBatchSamplePercentage != null) + 'followingBatchSamplePercentage': followingBatchSamplePercentage!, + if (initialBatchSamplePercentage != null) + 'initialBatchSamplePercentage': initialBatchSamplePercentage!, + if (sampleStrategy != null) 'sampleStrategy': sampleStrategy!, }; } -/// Response message for TensorboardService.ReadTensorboardSize. -class GoogleCloudAiplatformV1ReadTensorboardSizeResponse { - /// Payload storage size for the TensorBoard - core.String? storageSizeByte; +/// An attribution method that approximates Shapley values for features that +/// contribute to the label being predicted. +/// +/// A sampling strategy is used to approximate the value rather than considering +/// all subsets of features. +class GoogleCloudAiplatformV1SampledShapleyAttribution { + /// The number of feature permutations to consider when approximating the + /// Shapley values. + /// + /// Valid range of its value is \[1, 50\], inclusively. + /// + /// Required. + core.int? pathCount; - GoogleCloudAiplatformV1ReadTensorboardSizeResponse({ - this.storageSizeByte, + GoogleCloudAiplatformV1SampledShapleyAttribution({ + this.pathCount, }); - GoogleCloudAiplatformV1ReadTensorboardSizeResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1SampledShapleyAttribution.fromJson(core.Map json_) : this( - storageSizeByte: json_['storageSizeByte'] as core.String?, + pathCount: json_['pathCount'] as core.int?, ); core.Map toJson() => { - if (storageSizeByte != null) 'storageSizeByte': storageSizeByte!, + if (pathCount != null) 'pathCount': pathCount!, }; } -/// Response message for TensorboardService.ReadTensorboardTimeSeriesData. -class GoogleCloudAiplatformV1ReadTensorboardTimeSeriesDataResponse { - /// The returned time series data. - GoogleCloudAiplatformV1TimeSeriesData? timeSeriesData; +/// Sampling Strategy for logging, can be for both training and prediction +/// dataset. +class GoogleCloudAiplatformV1SamplingStrategy { + /// Random sample config. + /// + /// Will support more sampling strategies later. + GoogleCloudAiplatformV1SamplingStrategyRandomSampleConfig? randomSampleConfig; - GoogleCloudAiplatformV1ReadTensorboardTimeSeriesDataResponse({ - this.timeSeriesData, + GoogleCloudAiplatformV1SamplingStrategy({ + this.randomSampleConfig, }); - GoogleCloudAiplatformV1ReadTensorboardTimeSeriesDataResponse.fromJson( - core.Map json_) + GoogleCloudAiplatformV1SamplingStrategy.fromJson(core.Map json_) : this( - timeSeriesData: json_.containsKey('timeSeriesData') - ? GoogleCloudAiplatformV1TimeSeriesData.fromJson( - json_['timeSeriesData'] + randomSampleConfig: json_.containsKey('randomSampleConfig') + ? GoogleCloudAiplatformV1SamplingStrategyRandomSampleConfig + .fromJson(json_['randomSampleConfig'] as core.Map) : null, ); core.Map toJson() => { - if (timeSeriesData != null) 'timeSeriesData': timeSeriesData!, + if (randomSampleConfig != null) + 'randomSampleConfig': randomSampleConfig!, }; } -/// Response message for TensorboardService.ReadTensorboardUsage. -class GoogleCloudAiplatformV1ReadTensorboardUsageResponse { - /// Maps year-month (YYYYMM) string to per month usage data. - core.Map? - monthlyUsageData; +/// Requests are randomly selected. +class GoogleCloudAiplatformV1SamplingStrategyRandomSampleConfig { + /// Sample rate (0, 1\] + core.double? sampleRate; - GoogleCloudAiplatformV1ReadTensorboardUsageResponse({ - this.monthlyUsageData, + GoogleCloudAiplatformV1SamplingStrategyRandomSampleConfig({ + this.sampleRate, }); - GoogleCloudAiplatformV1ReadTensorboardUsageResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1SamplingStrategyRandomSampleConfig.fromJson( + core.Map json_) : this( - monthlyUsageData: (json_['monthlyUsageData'] - as core.Map?) - ?.map( - (key, value) => core.MapEntry( - key, - GoogleCloudAiplatformV1ReadTensorboardUsageResponsePerMonthUsageData - .fromJson(value as core.Map), - ), - ), + sampleRate: (json_['sampleRate'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (monthlyUsageData != null) 'monthlyUsageData': monthlyUsageData!, + if (sampleRate != null) 'sampleRate': sampleRate!, }; } -/// Per month usage data -class GoogleCloudAiplatformV1ReadTensorboardUsageResponsePerMonthUsageData { - /// Usage data for each user in the given month. - core.List< - GoogleCloudAiplatformV1ReadTensorboardUsageResponsePerUserUsageData>? - userUsageData; +/// A SavedQuery is a view of the dataset. +/// +/// It references a subset of annotations by problem type and filters. +class GoogleCloudAiplatformV1SavedQuery { + /// Filters on the Annotations in the dataset. + /// + /// Output only. + core.String? annotationFilter; - GoogleCloudAiplatformV1ReadTensorboardUsageResponsePerMonthUsageData({ - this.userUsageData, + /// Number of AnnotationSpecs in the context of the SavedQuery. + /// + /// Output only. + core.int? annotationSpecCount; + + /// Timestamp when this SavedQuery was created. + /// + /// Output only. + core.String? createTime; + + /// The user-defined name of the SavedQuery. + /// + /// The name can be up to 128 characters long and can consist of any UTF-8 + /// characters. + /// + /// Required. + core.String? displayName; + + /// Used to perform a consistent read-modify-write update. + /// + /// If not set, a blind "overwrite" update happens. + core.String? etag; + + /// Some additional information about the SavedQuery. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? metadata; + + /// Resource name of the SavedQuery. + /// + /// Output only. + core.String? name; + + /// Problem type of the SavedQuery. + /// + /// Allowed values: * IMAGE_CLASSIFICATION_SINGLE_LABEL * + /// IMAGE_CLASSIFICATION_MULTI_LABEL * IMAGE_BOUNDING_POLY * + /// IMAGE_BOUNDING_BOX * TEXT_CLASSIFICATION_SINGLE_LABEL * + /// TEXT_CLASSIFICATION_MULTI_LABEL * TEXT_EXTRACTION * TEXT_SENTIMENT * + /// VIDEO_CLASSIFICATION * VIDEO_OBJECT_TRACKING + /// + /// Required. + core.String? problemType; + + /// If the Annotations belonging to the SavedQuery can be used for AutoML + /// training. + /// + /// Output only. + core.bool? supportAutomlTraining; + + /// Timestamp when SavedQuery was last updated. + /// + /// Output only. + core.String? updateTime; + + GoogleCloudAiplatformV1SavedQuery({ + this.annotationFilter, + this.annotationSpecCount, + this.createTime, + this.displayName, + this.etag, + this.metadata, + this.name, + this.problemType, + this.supportAutomlTraining, + this.updateTime, }); - GoogleCloudAiplatformV1ReadTensorboardUsageResponsePerMonthUsageData.fromJson( - core.Map json_) + GoogleCloudAiplatformV1SavedQuery.fromJson(core.Map json_) : this( - userUsageData: (json_['userUsageData'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1ReadTensorboardUsageResponsePerUserUsageData - .fromJson(value as core.Map)) - .toList(), + annotationFilter: json_['annotationFilter'] as core.String?, + annotationSpecCount: json_['annotationSpecCount'] as core.int?, + createTime: json_['createTime'] as core.String?, + displayName: json_['displayName'] as core.String?, + etag: json_['etag'] as core.String?, + metadata: json_['metadata'], + name: json_['name'] as core.String?, + problemType: json_['problemType'] as core.String?, + supportAutomlTraining: json_['supportAutomlTraining'] as core.bool?, + updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (userUsageData != null) 'userUsageData': userUsageData!, + if (annotationFilter != null) 'annotationFilter': annotationFilter!, + if (annotationSpecCount != null) + 'annotationSpecCount': annotationSpecCount!, + if (createTime != null) 'createTime': createTime!, + if (displayName != null) 'displayName': displayName!, + if (etag != null) 'etag': etag!, + if (metadata != null) 'metadata': metadata!, + if (name != null) 'name': name!, + if (problemType != null) 'problemType': problemType!, + if (supportAutomlTraining != null) + 'supportAutomlTraining': supportAutomlTraining!, + if (updateTime != null) 'updateTime': updateTime!, }; } -/// Per user usage data. -class GoogleCloudAiplatformV1ReadTensorboardUsageResponsePerUserUsageData { - /// User's username - core.String? username; - - /// Number of times the user has read data within the Tensorboard. - core.String? viewCount; +/// One point viewable on a scalar metric plot. +class GoogleCloudAiplatformV1Scalar { + /// Value of the point at this step / timestamp. + core.double? value; - GoogleCloudAiplatformV1ReadTensorboardUsageResponsePerUserUsageData({ - this.username, - this.viewCount, + GoogleCloudAiplatformV1Scalar({ + this.value, }); - GoogleCloudAiplatformV1ReadTensorboardUsageResponsePerUserUsageData.fromJson( - core.Map json_) + GoogleCloudAiplatformV1Scalar.fromJson(core.Map json_) : this( - username: json_['username'] as core.String?, - viewCount: json_['viewCount'] as core.String?, + value: (json_['value'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (username != null) 'username': username!, - if (viewCount != null) 'viewCount': viewCount!, + if (value != null) 'value': value!, }; } -/// Request message for GenAiTuningService.RebaseTunedModel. -class GoogleCloudAiplatformV1RebaseTunedModelRequest { - /// The Google Cloud Storage location to write the artifacts. +/// An instance of a Schedule periodically schedules runs to make API calls +/// based on user specified time specification and API request type. +class GoogleCloudAiplatformV1Schedule { + /// Whether new scheduled runs can be queued when max_concurrent_runs limit is + /// reached. + /// + /// If set to true, new runs will be queued instead of skipped. Default to + /// false. /// /// Optional. - GoogleCloudAiplatformV1GcsDestination? artifactDestination; + core.bool? allowQueueing; - /// By default, bison to gemini migration will always create new - /// model/endpoint, but for gemini-1.0 to gemini-1.5 migration, we default - /// deploy to the same endpoint. + /// Whether to backfill missed runs when the schedule is resumed from PAUSED + /// state. /// - /// See details in this Section. + /// If set to true, all missed runs will be scheduled. New runs will be + /// scheduled after the backfill is complete. Default to false. + /// + /// Output only. + core.bool? catchUp; + + /// Request for NotebookService.CreateNotebookExecutionJob. + GoogleCloudAiplatformV1CreateNotebookExecutionJobRequest? + createNotebookExecutionJobRequest; + + /// Request for PipelineService.CreatePipelineJob. + /// + /// CreatePipelineJobRequest.parent field is required (format: + /// projects/{project}/locations/{location}). + GoogleCloudAiplatformV1CreatePipelineJobRequest? createPipelineJobRequest; + + /// Timestamp when this Schedule was created. + /// + /// Output only. + core.String? createTime; + + /// Cron schedule (https://en.wikipedia.org/wiki/Cron) to launch scheduled + /// runs. + /// + /// To explicitly set a timezone to the cron tab, apply a prefix in the cron + /// tab: "CRON_TZ=${IANA_TIME_ZONE}" or "TZ=${IANA_TIME_ZONE}". The + /// ${IANA_TIME_ZONE} may only be a valid string from IANA time zone database. + /// For example, "CRON_TZ=America/New_York 1 * * * *", or "TZ=America/New_York + /// 1 * * * *". + core.String? cron; + + /// User provided name of the Schedule. + /// + /// The name can be up to 128 characters long and can consist of any UTF-8 + /// characters. + /// + /// Required. + core.String? displayName; + + /// Timestamp after which no new runs can be scheduled. + /// + /// If specified, The schedule will be completed when either end_time is + /// reached or when scheduled_run_count \>= max_run_count. If not specified, + /// new runs will keep getting scheduled until this Schedule is paused or + /// deleted. Already scheduled runs will be allowed to complete. Unset if not + /// specified. /// /// Optional. - core.bool? deployToSameEndpoint; + core.String? endTime; - /// TunedModel reference to retrieve the legacy model information. + /// Timestamp when this Schedule was last paused. + /// + /// Unset if never paused. + /// + /// Output only. + core.String? lastPauseTime; + + /// Timestamp when this Schedule was last resumed. + /// + /// Unset if never resumed from pause. + /// + /// Output only. + core.String? lastResumeTime; + + /// Response of the last scheduled run. + /// + /// This is the response for starting the scheduled requests and not the + /// execution of the operations/jobs created by the requests (if applicable). + /// Unset if no run has been scheduled yet. + /// + /// Output only. + GoogleCloudAiplatformV1ScheduleRunResponse? lastScheduledRunResponse; + + /// Maximum number of runs that can be started concurrently for this Schedule. + /// + /// This is the limit for starting the scheduled requests and not the + /// execution of the operations/jobs created by the requests (if applicable). /// /// Required. - GoogleCloudAiplatformV1TunedModelRef? tunedModelRef; + core.String? maxConcurrentRunCount; - /// The TuningJob to be updated. + /// Maximum run count of the schedule. /// - /// Users can use this TuningJob field to overwrite tuning configs. + /// If specified, The schedule will be completed when either started_run_count + /// \>= max_run_count or when end_time is reached. If not specified, new runs + /// will keep getting scheduled until this Schedule is paused or deleted. + /// Already scheduled runs will be allowed to complete. Unset if not + /// specified. /// /// Optional. - GoogleCloudAiplatformV1TuningJob? tuningJob; + core.String? maxRunCount; - GoogleCloudAiplatformV1RebaseTunedModelRequest({ - this.artifactDestination, - this.deployToSameEndpoint, - this.tunedModelRef, - this.tuningJob, + /// The resource name of the Schedule. + /// + /// Immutable. + core.String? name; + + /// Timestamp when this Schedule should schedule the next run. + /// + /// Having a next_run_time in the past means the runs are being started behind + /// schedule. + /// + /// Output only. + core.String? nextRunTime; + + /// Timestamp after which the first run can be scheduled. + /// + /// Default to Schedule create time if not specified. + /// + /// Optional. + core.String? startTime; + + /// The number of runs started by this schedule. + /// + /// Output only. + core.String? startedRunCount; + + /// The state of this Schedule. + /// + /// Output only. + /// Possible string values are: + /// - "STATE_UNSPECIFIED" : Unspecified. + /// - "ACTIVE" : The Schedule is active. Runs are being scheduled on the + /// user-specified timespec. + /// - "PAUSED" : The schedule is paused. No new runs will be created until the + /// schedule is resumed. Already started runs will be allowed to complete. + /// - "COMPLETED" : The Schedule is completed. No new runs will be scheduled. + /// Already started runs will be allowed to complete. Schedules in completed + /// state cannot be paused or resumed. + core.String? state; + + /// Timestamp when this Schedule was updated. + /// + /// Output only. + core.String? updateTime; + + GoogleCloudAiplatformV1Schedule({ + this.allowQueueing, + this.catchUp, + this.createNotebookExecutionJobRequest, + this.createPipelineJobRequest, + this.createTime, + this.cron, + this.displayName, + this.endTime, + this.lastPauseTime, + this.lastResumeTime, + this.lastScheduledRunResponse, + this.maxConcurrentRunCount, + this.maxRunCount, + this.name, + this.nextRunTime, + this.startTime, + this.startedRunCount, + this.state, + this.updateTime, }); - GoogleCloudAiplatformV1RebaseTunedModelRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1Schedule.fromJson(core.Map json_) : this( - artifactDestination: json_.containsKey('artifactDestination') - ? GoogleCloudAiplatformV1GcsDestination.fromJson( - json_['artifactDestination'] - as core.Map) - : null, - deployToSameEndpoint: json_['deployToSameEndpoint'] as core.bool?, - tunedModelRef: json_.containsKey('tunedModelRef') - ? GoogleCloudAiplatformV1TunedModelRef.fromJson( - json_['tunedModelRef'] as core.Map) - : null, - tuningJob: json_.containsKey('tuningJob') - ? GoogleCloudAiplatformV1TuningJob.fromJson( - json_['tuningJob'] as core.Map) - : null, + allowQueueing: json_['allowQueueing'] as core.bool?, + catchUp: json_['catchUp'] as core.bool?, + createNotebookExecutionJobRequest: + json_.containsKey('createNotebookExecutionJobRequest') + ? GoogleCloudAiplatformV1CreateNotebookExecutionJobRequest + .fromJson(json_['createNotebookExecutionJobRequest'] + as core.Map) + : null, + createPipelineJobRequest: + json_.containsKey('createPipelineJobRequest') + ? GoogleCloudAiplatformV1CreatePipelineJobRequest.fromJson( + json_['createPipelineJobRequest'] + as core.Map) + : null, + createTime: json_['createTime'] as core.String?, + cron: json_['cron'] as core.String?, + displayName: json_['displayName'] as core.String?, + endTime: json_['endTime'] as core.String?, + lastPauseTime: json_['lastPauseTime'] as core.String?, + lastResumeTime: json_['lastResumeTime'] as core.String?, + lastScheduledRunResponse: + json_.containsKey('lastScheduledRunResponse') + ? GoogleCloudAiplatformV1ScheduleRunResponse.fromJson( + json_['lastScheduledRunResponse'] + as core.Map) + : null, + maxConcurrentRunCount: json_['maxConcurrentRunCount'] as core.String?, + maxRunCount: json_['maxRunCount'] as core.String?, + name: json_['name'] as core.String?, + nextRunTime: json_['nextRunTime'] as core.String?, + startTime: json_['startTime'] as core.String?, + startedRunCount: json_['startedRunCount'] as core.String?, + state: json_['state'] as core.String?, + updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (artifactDestination != null) - 'artifactDestination': artifactDestination!, - if (deployToSameEndpoint != null) - 'deployToSameEndpoint': deployToSameEndpoint!, - if (tunedModelRef != null) 'tunedModelRef': tunedModelRef!, - if (tuningJob != null) 'tuningJob': tuningJob!, + if (allowQueueing != null) 'allowQueueing': allowQueueing!, + if (catchUp != null) 'catchUp': catchUp!, + if (createNotebookExecutionJobRequest != null) + 'createNotebookExecutionJobRequest': + createNotebookExecutionJobRequest!, + if (createPipelineJobRequest != null) + 'createPipelineJobRequest': createPipelineJobRequest!, + if (createTime != null) 'createTime': createTime!, + if (cron != null) 'cron': cron!, + if (displayName != null) 'displayName': displayName!, + if (endTime != null) 'endTime': endTime!, + if (lastPauseTime != null) 'lastPauseTime': lastPauseTime!, + if (lastResumeTime != null) 'lastResumeTime': lastResumeTime!, + if (lastScheduledRunResponse != null) + 'lastScheduledRunResponse': lastScheduledRunResponse!, + if (maxConcurrentRunCount != null) + 'maxConcurrentRunCount': maxConcurrentRunCount!, + if (maxRunCount != null) 'maxRunCount': maxRunCount!, + if (name != null) 'name': name!, + if (nextRunTime != null) 'nextRunTime': nextRunTime!, + if (startTime != null) 'startTime': startTime!, + if (startedRunCount != null) 'startedRunCount': startedRunCount!, + if (state != null) 'state': state!, + if (updateTime != null) 'updateTime': updateTime!, }; } -/// Request message for PersistentResourceService.RebootPersistentResource. -typedef GoogleCloudAiplatformV1RebootPersistentResourceRequest = $Empty; - -/// Request message for MetadataService.DeleteContextChildrenRequest. -typedef GoogleCloudAiplatformV1RemoveContextChildrenRequest - = $ContextChildrenRequest; - -/// Response message for MetadataService.RemoveContextChildren. -typedef GoogleCloudAiplatformV1RemoveContextChildrenResponse = $Empty; +/// Status of a scheduled run. +class GoogleCloudAiplatformV1ScheduleRunResponse { + /// The response of the scheduled run. + core.String? runResponse; -/// Request message for IndexService.RemoveDatapoints -class GoogleCloudAiplatformV1RemoveDatapointsRequest { - /// A list of datapoint ids to be deleted. - core.List? datapointIds; + /// The scheduled run time based on the user-specified schedule. + core.String? scheduledRunTime; - GoogleCloudAiplatformV1RemoveDatapointsRequest({ - this.datapointIds, + GoogleCloudAiplatformV1ScheduleRunResponse({ + this.runResponse, + this.scheduledRunTime, }); - GoogleCloudAiplatformV1RemoveDatapointsRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1ScheduleRunResponse.fromJson(core.Map json_) : this( - datapointIds: (json_['datapointIds'] as core.List?) - ?.map((value) => value as core.String) - .toList(), + runResponse: json_['runResponse'] as core.String?, + scheduledRunTime: json_['scheduledRunTime'] as core.String?, ); core.Map toJson() => { - if (datapointIds != null) 'datapointIds': datapointIds!, + if (runResponse != null) 'runResponse': runResponse!, + if (scheduledRunTime != null) 'scheduledRunTime': scheduledRunTime!, }; } -/// Response message for IndexService.RemoveDatapoints -typedef GoogleCloudAiplatformV1RemoveDatapointsResponse = $Empty; +/// All parameters related to queuing and scheduling of custom jobs. +class GoogleCloudAiplatformV1Scheduling { + /// Indicates if the job should retry for internal errors after the job starts + /// running. + /// + /// If true, overrides `Scheduling.restart_job_on_worker_restart` to false. + /// + /// Optional. + core.bool? disableRetries; -/// A ReservationAffinity can be used to configure a Vertex AI resource (e.g., a -/// DeployedModel) to draw its Compute Engine resources from a Shared -/// Reservation, or exclusively from on-demand capacity. -class GoogleCloudAiplatformV1ReservationAffinity { - /// Corresponds to the label key of a reservation resource. + /// This is the maximum duration that a job will wait for the requested + /// resources to be provisioned if the scheduling strategy is set to + /// \[Strategy.DWS_FLEX_START\]. /// - /// To target a SPECIFIC_RESERVATION by name, use - /// `compute.googleapis.com/reservation-name` as the key and specify the name - /// of your reservation as its value. + /// If set to 0, the job will wait indefinitely. The default is 24 hours. /// /// Optional. - core.String? key; + core.String? maxWaitDuration; - /// Specifies the reservation affinity type. + /// Restarts the entire CustomJob if a worker gets restarted. /// - /// Required. + /// This feature can be used by distributed training jobs that are not + /// resilient to workers leaving and joining a job. + /// + /// Optional. + core.bool? restartJobOnWorkerRestart; + + /// This determines which type of scheduling strategy to use. + /// + /// Optional. /// Possible string values are: - /// - "TYPE_UNSPECIFIED" : Default value. This should not be used. - /// - "NO_RESERVATION" : Do not consume from any reserved capacity, only use - /// on-demand. - /// - "ANY_RESERVATION" : Consume any reservation available, falling back to - /// on-demand. - /// - "SPECIFIC_RESERVATION" : Consume from a specific reservation. When - /// chosen, the reservation must be identified via the `key` and `values` - /// fields. - core.String? reservationAffinityType; + /// - "STRATEGY_UNSPECIFIED" : Strategy will default to STANDARD. + /// - "ON_DEMAND" : Deprecated. Regular on-demand provisioning strategy. + /// - "LOW_COST" : Deprecated. Low cost by making potential use of spot + /// resources. + /// - "STANDARD" : Standard provisioning strategy uses regular on-demand + /// resources. + /// - "SPOT" : Spot provisioning strategy uses spot resources. + /// - "FLEX_START" : Flex Start strategy uses DWS to queue for resources. + core.String? strategy; - /// Corresponds to the label values of a reservation resource. + /// The maximum job running time. /// - /// This must be the full resource name of the reservation. + /// The default is 7 days. /// /// Optional. - core.List? values; + core.String? timeout; - GoogleCloudAiplatformV1ReservationAffinity({ - this.key, - this.reservationAffinityType, - this.values, + GoogleCloudAiplatformV1Scheduling({ + this.disableRetries, + this.maxWaitDuration, + this.restartJobOnWorkerRestart, + this.strategy, + this.timeout, }); - GoogleCloudAiplatformV1ReservationAffinity.fromJson(core.Map json_) + GoogleCloudAiplatformV1Scheduling.fromJson(core.Map json_) : this( - key: json_['key'] as core.String?, - reservationAffinityType: - json_['reservationAffinityType'] as core.String?, - values: (json_['values'] as core.List?) - ?.map((value) => value as core.String) - .toList(), + disableRetries: json_['disableRetries'] as core.bool?, + maxWaitDuration: json_['maxWaitDuration'] as core.String?, + restartJobOnWorkerRestart: + json_['restartJobOnWorkerRestart'] as core.bool?, + strategy: json_['strategy'] as core.String?, + timeout: json_['timeout'] as core.String?, ); core.Map toJson() => { - if (key != null) 'key': key!, - if (reservationAffinityType != null) - 'reservationAffinityType': reservationAffinityType!, - if (values != null) 'values': values!, + if (disableRetries != null) 'disableRetries': disableRetries!, + if (maxWaitDuration != null) 'maxWaitDuration': maxWaitDuration!, + if (restartJobOnWorkerRestart != null) + 'restartJobOnWorkerRestart': restartJobOnWorkerRestart!, + if (strategy != null) 'strategy': strategy!, + if (timeout != null) 'timeout': timeout!, }; } -/// Represents the spec of a group of resources of the same type, for example -/// machine type, disk, and accelerators, in a PersistentResource. -class GoogleCloudAiplatformV1ResourcePool { - /// Optional spec to configure GKE or Ray-on-Vertex autoscaling +/// Schema is used to define the format of input/output data. +/// +/// Represents a select subset of an +/// [OpenAPI 3.0 schema object](https://spec.openapis.org/oas/v3.0.3#schema-object). +/// More fields may be added in the future as needed. +class GoogleCloudAiplatformV1Schema { + /// The value should be validated against any (one or more) of the subschemas + /// in the list. /// /// Optional. - GoogleCloudAiplatformV1ResourcePoolAutoscalingSpec? autoscalingSpec; + core.List? anyOf; - /// Disk spec for the machine in this node pool. + /// Default value of the data. /// /// Optional. - GoogleCloudAiplatformV1DiskSpec? diskSpec; - - /// The unique ID in a PersistentResource for referring to this resource pool. - /// - /// User can specify it if necessary. Otherwise, it's generated automatically. - /// - /// Immutable. - core.String? id; - - /// The specification of a single machine. /// - /// Required. Immutable. - GoogleCloudAiplatformV1MachineSpec? machineSpec; + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? default_; - /// The total number of machines to use for this resource pool. + /// The description of the data. /// /// Optional. - core.String? replicaCount; + core.String? description; - /// The number of machines currently in use by training jobs for this resource - /// pool. - /// - /// Will replace idle_replica_count. + /// Possible values of the element of primitive type with enum format. /// - /// Output only. - core.String? usedReplicaCount; - - GoogleCloudAiplatformV1ResourcePool({ - this.autoscalingSpec, - this.diskSpec, - this.id, - this.machineSpec, - this.replicaCount, - this.usedReplicaCount, - }); - - GoogleCloudAiplatformV1ResourcePool.fromJson(core.Map json_) - : this( - autoscalingSpec: json_.containsKey('autoscalingSpec') - ? GoogleCloudAiplatformV1ResourcePoolAutoscalingSpec.fromJson( - json_['autoscalingSpec'] - as core.Map) - : null, - diskSpec: json_.containsKey('diskSpec') - ? GoogleCloudAiplatformV1DiskSpec.fromJson( - json_['diskSpec'] as core.Map) - : null, - id: json_['id'] as core.String?, - machineSpec: json_.containsKey('machineSpec') - ? GoogleCloudAiplatformV1MachineSpec.fromJson( - json_['machineSpec'] as core.Map) - : null, - replicaCount: json_['replicaCount'] as core.String?, - usedReplicaCount: json_['usedReplicaCount'] as core.String?, - ); - - core.Map toJson() => { - if (autoscalingSpec != null) 'autoscalingSpec': autoscalingSpec!, - if (diskSpec != null) 'diskSpec': diskSpec!, - if (id != null) 'id': id!, - if (machineSpec != null) 'machineSpec': machineSpec!, - if (replicaCount != null) 'replicaCount': replicaCount!, - if (usedReplicaCount != null) 'usedReplicaCount': usedReplicaCount!, - }; -} - -/// The min/max number of replicas allowed if enabling autoscaling -class GoogleCloudAiplatformV1ResourcePoolAutoscalingSpec { - /// max replicas in the node pool, must be ≥ replica_count and \> - /// min_replica_count or will throw error + /// Examples: 1. We can define direction as : {type:STRING, format:enum, + /// enum:\["EAST", NORTH", "SOUTH", "WEST"\]} 2. We can define apartment + /// number as : {type:INTEGER, format:enum, enum:\["101", "201", "301"\]} /// /// Optional. - core.String? maxReplicaCount; + core.List? enum_; - /// min replicas in the node pool, must be ≤ replica_count and \< - /// max_replica_count or will throw error. + /// Example of the object. /// - /// For autoscaling enabled Ray-on-Vertex, we allow min_replica_count of a - /// resource_pool to be 0 to match the OSS Ray - /// behavior(https://docs.ray.io/en/latest/cluster/vms/user-guides/configuring-autoscaling.html#cluster-config-parameters). - /// As for Persistent Resource, the min_replica_count must be \> 0, we added a - /// corresponding validation inside - /// CreatePersistentResourceRequestValidator.java. + /// Will only populated when the object is the root. /// /// Optional. - core.String? minReplicaCount; - - GoogleCloudAiplatformV1ResourcePoolAutoscalingSpec({ - this.maxReplicaCount, - this.minReplicaCount, - }); - - GoogleCloudAiplatformV1ResourcePoolAutoscalingSpec.fromJson(core.Map json_) - : this( - maxReplicaCount: json_['maxReplicaCount'] as core.String?, - minReplicaCount: json_['minReplicaCount'] as core.String?, - ); - - core.Map toJson() => { - if (maxReplicaCount != null) 'maxReplicaCount': maxReplicaCount!, - if (minReplicaCount != null) 'minReplicaCount': minReplicaCount!, - }; -} - -/// Persistent Cluster runtime information as output -class GoogleCloudAiplatformV1ResourceRuntime { - /// URIs for user to connect to the Cluster. - /// - /// Example: { "RAY_HEAD_NODE_INTERNAL_IP": "head-node-IP:10001" - /// "RAY_DASHBOARD_URI": "ray-dashboard-address:8888" } /// - /// Output only. - core.Map? accessUris; - - GoogleCloudAiplatformV1ResourceRuntime({ - this.accessUris, - }); - - GoogleCloudAiplatformV1ResourceRuntime.fromJson(core.Map json_) - : this( - accessUris: - (json_['accessUris'] as core.Map?) - ?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - ); - - core.Map toJson() => { - if (accessUris != null) 'accessUris': accessUris!, - }; -} + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? example; -/// Configuration for the runtime on a PersistentResource instance, including -/// but not limited to: * Service accounts used to run the workloads. -/// -/// * Whether to make it a dedicated Ray Cluster. -class GoogleCloudAiplatformV1ResourceRuntimeSpec { - /// Ray cluster configuration. + /// The format of the data. /// - /// Required when creating a dedicated RayCluster on the PersistentResource. + /// Supported formats: for NUMBER type: "float", "double" for INTEGER type: + /// "int32", "int64" for STRING type: "email", "byte", etc /// /// Optional. - GoogleCloudAiplatformV1RaySpec? raySpec; + core.String? format; - /// Configure the use of workload identity on the PersistentResource + /// SCHEMA FIELDS FOR TYPE ARRAY Schema of the elements of Type.ARRAY. /// /// Optional. - GoogleCloudAiplatformV1ServiceAccountSpec? serviceAccountSpec; - - GoogleCloudAiplatformV1ResourceRuntimeSpec({ - this.raySpec, - this.serviceAccountSpec, - }); - - GoogleCloudAiplatformV1ResourceRuntimeSpec.fromJson(core.Map json_) - : this( - raySpec: json_.containsKey('raySpec') - ? GoogleCloudAiplatformV1RaySpec.fromJson( - json_['raySpec'] as core.Map) - : null, - serviceAccountSpec: json_.containsKey('serviceAccountSpec') - ? GoogleCloudAiplatformV1ServiceAccountSpec.fromJson( - json_['serviceAccountSpec'] - as core.Map) - : null, - ); - - core.Map toJson() => { - if (raySpec != null) 'raySpec': raySpec!, - if (serviceAccountSpec != null) - 'serviceAccountSpec': serviceAccountSpec!, - }; -} + GoogleCloudAiplatformV1Schema? items; -/// Statistics information about resource consumption. -class GoogleCloudAiplatformV1ResourcesConsumed { - /// The number of replica hours used. - /// - /// Note that many replicas may run in parallel, and additionally any given - /// work may be queued for some time. Therefore this value is not strictly - /// related to wall time. + /// Maximum number of the elements for Type.ARRAY. /// - /// Output only. - core.double? replicaHours; - - GoogleCloudAiplatformV1ResourcesConsumed({ - this.replicaHours, - }); - - GoogleCloudAiplatformV1ResourcesConsumed.fromJson(core.Map json_) - : this( - replicaHours: (json_['replicaHours'] as core.num?)?.toDouble(), - ); - - core.Map toJson() => { - if (replicaHours != null) 'replicaHours': replicaHours!, - }; -} - -/// Request message for JobService.ResumeModelDeploymentMonitoringJob. -typedef GoogleCloudAiplatformV1ResumeModelDeploymentMonitoringJobRequest - = $Empty; + /// Optional. + core.String? maxItems; -/// Request message for ScheduleService.ResumeSchedule. -class GoogleCloudAiplatformV1ResumeScheduleRequest { - /// Whether to backfill missed runs when the schedule is resumed from PAUSED - /// state. - /// - /// If set to true, all missed runs will be scheduled. New runs will be - /// scheduled after the backfill is complete. This will also update - /// Schedule.catch_up field. Default to false. + /// Maximum length of the Type.STRING /// /// Optional. - core.bool? catchUp; + core.String? maxLength; - GoogleCloudAiplatformV1ResumeScheduleRequest({ - this.catchUp, - }); + /// Maximum number of the properties for Type.OBJECT. + /// + /// Optional. + core.String? maxProperties; - GoogleCloudAiplatformV1ResumeScheduleRequest.fromJson(core.Map json_) - : this( - catchUp: json_['catchUp'] as core.bool?, - ); + /// Maximum value of the Type.INTEGER and Type.NUMBER + /// + /// Optional. + core.double? maximum; - core.Map toJson() => { - if (catchUp != null) 'catchUp': catchUp!, - }; -} + /// Minimum number of the elements for Type.ARRAY. + /// + /// Optional. + core.String? minItems; -/// Defines a retrieval tool that model can call to access external knowledge. -class GoogleCloudAiplatformV1Retrieval { - /// This option is no longer supported. + /// SCHEMA FIELDS FOR TYPE STRING Minimum length of the Type.STRING /// - /// Optional. Deprecated. - @core.Deprecated( - 'Not supported. Member documentation may have more information.', - ) - core.bool? disableAttribution; + /// Optional. + core.String? minLength; - /// Set to use data source powered by Vertex AI Search. - GoogleCloudAiplatformV1VertexAISearch? vertexAiSearch; + /// Minimum number of the properties for Type.OBJECT. + /// + /// Optional. + core.String? minProperties; - /// Set to use data source powered by Vertex RAG store. + /// SCHEMA FIELDS FOR TYPE INTEGER and NUMBER Minimum value of the + /// Type.INTEGER and Type.NUMBER /// - /// User data is uploaded via the VertexRagDataService. - GoogleCloudAiplatformV1VertexRagStore? vertexRagStore; + /// Optional. + core.double? minimum; - GoogleCloudAiplatformV1Retrieval({ - this.disableAttribution, - this.vertexAiSearch, - this.vertexRagStore, - }); + /// Indicates if the value may be null. + /// + /// Optional. + core.bool? nullable; - GoogleCloudAiplatformV1Retrieval.fromJson(core.Map json_) - : this( - disableAttribution: json_['disableAttribution'] as core.bool?, - vertexAiSearch: json_.containsKey('vertexAiSearch') - ? GoogleCloudAiplatformV1VertexAISearch.fromJson( - json_['vertexAiSearch'] - as core.Map) - : null, - vertexRagStore: json_.containsKey('vertexRagStore') - ? GoogleCloudAiplatformV1VertexRagStore.fromJson( - json_['vertexRagStore'] - as core.Map) - : null, - ); + /// Pattern of the Type.STRING to restrict a string to a regular expression. + /// + /// Optional. + core.String? pattern; - core.Map toJson() => { - if (disableAttribution != null) - 'disableAttribution': disableAttribution!, - if (vertexAiSearch != null) 'vertexAiSearch': vertexAiSearch!, - if (vertexRagStore != null) 'vertexRagStore': vertexRagStore!, - }; -} + /// SCHEMA FIELDS FOR TYPE OBJECT Properties of Type.OBJECT. + /// + /// Optional. + core.Map? properties; -/// Metadata related to retrieval in the grounding flow. -class GoogleCloudAiplatformV1RetrievalMetadata { - /// Score indicating how likely information from Google Search could help - /// answer the prompt. + /// The order of the properties. /// - /// The score is in the range `[0, 1]`, where 0 is the least likely and 1 is - /// the most likely. This score is only populated when Google Search grounding - /// and dynamic retrieval is enabled. It will be compared to the threshold to - /// determine whether to trigger Google Search. + /// Not a standard field in open api spec. Only used to support the order of + /// the properties. /// /// Optional. - core.double? googleSearchDynamicRetrievalScore; - - GoogleCloudAiplatformV1RetrievalMetadata({ - this.googleSearchDynamicRetrievalScore, - }); - - GoogleCloudAiplatformV1RetrievalMetadata.fromJson(core.Map json_) - : this( - googleSearchDynamicRetrievalScore: - (json_['googleSearchDynamicRetrievalScore'] as core.num?) - ?.toDouble(), - ); + core.List? propertyOrdering; - core.Map toJson() => { - if (googleSearchDynamicRetrievalScore != null) - 'googleSearchDynamicRetrievalScore': - googleSearchDynamicRetrievalScore!, - }; -} + /// Required properties of Type.OBJECT. + /// + /// Optional. + core.List? required; -/// Input for rouge metric. -class GoogleCloudAiplatformV1RougeInput { - /// Repeated rouge instances. + /// The title of the Schema. /// - /// Required. - core.List? instances; + /// Optional. + core.String? title; - /// Spec for rouge score metric. + /// The type of the data. /// - /// Required. - GoogleCloudAiplatformV1RougeSpec? metricSpec; + /// Optional. + /// Possible string values are: + /// - "TYPE_UNSPECIFIED" : Not specified, should not be used. + /// - "STRING" : OpenAPI string type + /// - "NUMBER" : OpenAPI number type + /// - "INTEGER" : OpenAPI integer type + /// - "BOOLEAN" : OpenAPI boolean type + /// - "ARRAY" : OpenAPI array type + /// - "OBJECT" : OpenAPI object type + core.String? type; - GoogleCloudAiplatformV1RougeInput({ - this.instances, - this.metricSpec, + GoogleCloudAiplatformV1Schema({ + this.anyOf, + this.default_, + this.description, + this.enum_, + this.example, + this.format, + this.items, + this.maxItems, + this.maxLength, + this.maxProperties, + this.maximum, + this.minItems, + this.minLength, + this.minProperties, + this.minimum, + this.nullable, + this.pattern, + this.properties, + this.propertyOrdering, + this.required, + this.title, + this.type, }); - GoogleCloudAiplatformV1RougeInput.fromJson(core.Map json_) + GoogleCloudAiplatformV1Schema.fromJson(core.Map json_) : this( - instances: (json_['instances'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1RougeInstance.fromJson( + anyOf: (json_['anyOf'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Schema.fromJson( value as core.Map)) .toList(), - metricSpec: json_.containsKey('metricSpec') - ? GoogleCloudAiplatformV1RougeSpec.fromJson( - json_['metricSpec'] as core.Map) + default_: json_['default'], + description: json_['description'] as core.String?, + enum_: (json_['enum'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + example: json_['example'], + format: json_['format'] as core.String?, + items: json_.containsKey('items') + ? GoogleCloudAiplatformV1Schema.fromJson( + json_['items'] as core.Map) : null, + maxItems: json_['maxItems'] as core.String?, + maxLength: json_['maxLength'] as core.String?, + maxProperties: json_['maxProperties'] as core.String?, + maximum: (json_['maximum'] as core.num?)?.toDouble(), + minItems: json_['minItems'] as core.String?, + minLength: json_['minLength'] as core.String?, + minProperties: json_['minProperties'] as core.String?, + minimum: (json_['minimum'] as core.num?)?.toDouble(), + nullable: json_['nullable'] as core.bool?, + pattern: json_['pattern'] as core.String?, + properties: + (json_['properties'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + GoogleCloudAiplatformV1Schema.fromJson( + value as core.Map), + ), + ), + propertyOrdering: (json_['propertyOrdering'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + required: (json_['required'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + title: json_['title'] as core.String?, + type: json_['type'] as core.String?, ); core.Map toJson() => { - if (instances != null) 'instances': instances!, - if (metricSpec != null) 'metricSpec': metricSpec!, + if (anyOf != null) 'anyOf': anyOf!, + if (default_ != null) 'default': default_!, + if (description != null) 'description': description!, + if (enum_ != null) 'enum': enum_!, + if (example != null) 'example': example!, + if (format != null) 'format': format!, + if (items != null) 'items': items!, + if (maxItems != null) 'maxItems': maxItems!, + if (maxLength != null) 'maxLength': maxLength!, + if (maxProperties != null) 'maxProperties': maxProperties!, + if (maximum != null) 'maximum': maximum!, + if (minItems != null) 'minItems': minItems!, + if (minLength != null) 'minLength': minLength!, + if (minProperties != null) 'minProperties': minProperties!, + if (minimum != null) 'minimum': minimum!, + if (nullable != null) 'nullable': nullable!, + if (pattern != null) 'pattern': pattern!, + if (properties != null) 'properties': properties!, + if (propertyOrdering != null) 'propertyOrdering': propertyOrdering!, + if (required != null) 'required': required!, + if (title != null) 'title': title!, + if (type != null) 'type': type!, }; } -/// Spec for rouge instance. -typedef GoogleCloudAiplatformV1RougeInstance = $Instance00; - -/// Rouge metric value for an instance. -class GoogleCloudAiplatformV1RougeMetricValue { - /// Rouge score. - /// - /// Output only. - core.double? score; - - GoogleCloudAiplatformV1RougeMetricValue({ - this.score, - }); - - GoogleCloudAiplatformV1RougeMetricValue.fromJson(core.Map json_) - : this( - score: (json_['score'] as core.num?)?.toDouble(), - ); - - core.Map toJson() => { - if (score != null) 'score': score!, - }; -} +/// Response message for DatasetService.SearchDataItems. +class GoogleCloudAiplatformV1SearchDataItemsResponse { + /// The DataItemViews read. + core.List? dataItemViews; -/// Results for rouge metric. -class GoogleCloudAiplatformV1RougeResults { - /// Rouge metric values. + /// A token to retrieve next page of results. /// - /// Output only. - core.List? rougeMetricValues; + /// Pass to SearchDataItemsRequest.page_token to obtain that page. + core.String? nextPageToken; - GoogleCloudAiplatformV1RougeResults({ - this.rougeMetricValues, + GoogleCloudAiplatformV1SearchDataItemsResponse({ + this.dataItemViews, + this.nextPageToken, }); - GoogleCloudAiplatformV1RougeResults.fromJson(core.Map json_) + GoogleCloudAiplatformV1SearchDataItemsResponse.fromJson(core.Map json_) : this( - rougeMetricValues: (json_['rougeMetricValues'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1RougeMetricValue.fromJson( + dataItemViews: (json_['dataItemViews'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1DataItemView.fromJson( value as core.Map)) .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (rougeMetricValues != null) 'rougeMetricValues': rougeMetricValues!, + if (dataItemViews != null) 'dataItemViews': dataItemViews!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// Spec for rouge score metric - calculates the recall of n-grams in prediction -/// as compared to reference - returns a score ranging between 0 and 1. -class GoogleCloudAiplatformV1RougeSpec { - /// Supported rouge types are rougen\[1-9\], rougeL, and rougeLsum. +/// Google search entry point. +class GoogleCloudAiplatformV1SearchEntryPoint { + /// Web content snippet that can be embedded in a web page or an app webview. /// /// Optional. - core.String? rougeType; + core.String? renderedContent; - /// Whether to split summaries while using rougeLsum. + /// Base64 encoded JSON representing array of tuple. /// /// Optional. - core.bool? splitSummaries; + core.String? sdkBlob; + core.List get sdkBlobAsBytes => convert.base64.decode(sdkBlob!); - /// Whether to use stemmer to compute rouge score. - /// - /// Optional. - core.bool? useStemmer; + set sdkBlobAsBytes(core.List bytes_) { + sdkBlob = + convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); + } - GoogleCloudAiplatformV1RougeSpec({ - this.rougeType, - this.splitSummaries, - this.useStemmer, + GoogleCloudAiplatformV1SearchEntryPoint({ + this.renderedContent, + this.sdkBlob, }); - GoogleCloudAiplatformV1RougeSpec.fromJson(core.Map json_) + GoogleCloudAiplatformV1SearchEntryPoint.fromJson(core.Map json_) : this( - rougeType: json_['rougeType'] as core.String?, - splitSummaries: json_['splitSummaries'] as core.bool?, - useStemmer: json_['useStemmer'] as core.bool?, + renderedContent: json_['renderedContent'] as core.String?, + sdkBlob: json_['sdkBlob'] as core.String?, ); core.Map toJson() => { - if (rougeType != null) 'rougeType': rougeType!, - if (splitSummaries != null) 'splitSummaries': splitSummaries!, - if (useStemmer != null) 'useStemmer': useStemmer!, + if (renderedContent != null) 'renderedContent': renderedContent!, + if (sdkBlob != null) 'sdkBlob': sdkBlob!, }; } -/// Input for safety metric. -class GoogleCloudAiplatformV1SafetyInput { - /// Safety instance. +/// Response message for FeaturestoreService.SearchFeatures. +class GoogleCloudAiplatformV1SearchFeaturesResponse { + /// The Features matching the request. /// - /// Required. - GoogleCloudAiplatformV1SafetyInstance? instance; + /// Fields returned: * `name` * `description` * `labels` * `create_time` * + /// `update_time` + core.List? features; - /// Spec for safety metric. + /// A token, which can be sent as SearchFeaturesRequest.page_token to retrieve + /// the next page. /// - /// Required. - GoogleCloudAiplatformV1SafetySpec? metricSpec; + /// If this field is omitted, there are no subsequent pages. + core.String? nextPageToken; - GoogleCloudAiplatformV1SafetyInput({ - this.instance, - this.metricSpec, + GoogleCloudAiplatformV1SearchFeaturesResponse({ + this.features, + this.nextPageToken, }); - GoogleCloudAiplatformV1SafetyInput.fromJson(core.Map json_) + GoogleCloudAiplatformV1SearchFeaturesResponse.fromJson(core.Map json_) : this( - instance: json_.containsKey('instance') - ? GoogleCloudAiplatformV1SafetyInstance.fromJson( - json_['instance'] as core.Map) - : null, - metricSpec: json_.containsKey('metricSpec') - ? GoogleCloudAiplatformV1SafetySpec.fromJson( - json_['metricSpec'] as core.Map) - : null, + features: (json_['features'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Feature.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (instance != null) 'instance': instance!, - if (metricSpec != null) 'metricSpec': metricSpec!, + if (features != null) 'features': features!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// Spec for safety instance. -typedef GoogleCloudAiplatformV1SafetyInstance = $Instance01; - -/// Safety rating corresponding to the generated content. -class GoogleCloudAiplatformV1SafetyRating { - /// Indicates whether the content was filtered out because of this rating. - /// - /// Output only. - core.bool? blocked; - - /// Harm category. - /// - /// Output only. - /// Possible string values are: - /// - "HARM_CATEGORY_UNSPECIFIED" : The harm category is unspecified. - /// - "HARM_CATEGORY_HATE_SPEECH" : The harm category is hate speech. - /// - "HARM_CATEGORY_DANGEROUS_CONTENT" : The harm category is dangerous - /// content. - /// - "HARM_CATEGORY_HARASSMENT" : The harm category is harassment. - /// - "HARM_CATEGORY_SEXUALLY_EXPLICIT" : The harm category is sexually - /// explicit content. - /// - "HARM_CATEGORY_CIVIC_INTEGRITY" : The harm category is civic integrity. - core.String? category; - - /// Harm probability levels in the content. - /// - /// Output only. - /// Possible string values are: - /// - "HARM_PROBABILITY_UNSPECIFIED" : Harm probability unspecified. - /// - "NEGLIGIBLE" : Negligible level of harm. - /// - "LOW" : Low level of harm. - /// - "MEDIUM" : Medium level of harm. - /// - "HIGH" : High level of harm. - core.String? probability; - - /// Harm probability score. +/// Request message for MigrationService.SearchMigratableResources. +class GoogleCloudAiplatformV1SearchMigratableResourcesRequest { + /// A filter for your search. /// - /// Output only. - core.double? probabilityScore; + /// You can use the following types of filters: * Resource type filters. The + /// following strings filter for a specific type of MigratableResource: * + /// `ml_engine_model_version:*` * `automl_model:*` * `automl_dataset:*` * + /// `data_labeling_dataset:*` * "Migrated or not" filters. The following + /// strings filter for resources that either have or have not already been + /// migrated: * `last_migrate_time:*` filters for migrated resources. * `NOT + /// last_migrate_time:*` filters for not yet migrated resources. + core.String? filter; - /// Harm severity levels in the content. + /// The standard page size. /// - /// Output only. - /// Possible string values are: - /// - "HARM_SEVERITY_UNSPECIFIED" : Harm severity unspecified. - /// - "HARM_SEVERITY_NEGLIGIBLE" : Negligible level of harm severity. - /// - "HARM_SEVERITY_LOW" : Low level of harm severity. - /// - "HARM_SEVERITY_MEDIUM" : Medium level of harm severity. - /// - "HARM_SEVERITY_HIGH" : High level of harm severity. - core.String? severity; + /// The default and maximum value is 100. + core.int? pageSize; - /// Harm severity score. - /// - /// Output only. - core.double? severityScore; + /// The standard page token. + core.String? pageToken; - GoogleCloudAiplatformV1SafetyRating({ - this.blocked, - this.category, - this.probability, - this.probabilityScore, - this.severity, - this.severityScore, + GoogleCloudAiplatformV1SearchMigratableResourcesRequest({ + this.filter, + this.pageSize, + this.pageToken, }); - GoogleCloudAiplatformV1SafetyRating.fromJson(core.Map json_) + GoogleCloudAiplatformV1SearchMigratableResourcesRequest.fromJson( + core.Map json_) : this( - blocked: json_['blocked'] as core.bool?, - category: json_['category'] as core.String?, - probability: json_['probability'] as core.String?, - probabilityScore: - (json_['probabilityScore'] as core.num?)?.toDouble(), - severity: json_['severity'] as core.String?, - severityScore: (json_['severityScore'] as core.num?)?.toDouble(), + filter: json_['filter'] as core.String?, + pageSize: json_['pageSize'] as core.int?, + pageToken: json_['pageToken'] as core.String?, ); core.Map toJson() => { - if (blocked != null) 'blocked': blocked!, - if (category != null) 'category': category!, - if (probability != null) 'probability': probability!, - if (probabilityScore != null) 'probabilityScore': probabilityScore!, - if (severity != null) 'severity': severity!, - if (severityScore != null) 'severityScore': severityScore!, + if (filter != null) 'filter': filter!, + if (pageSize != null) 'pageSize': pageSize!, + if (pageToken != null) 'pageToken': pageToken!, }; } -/// Spec for safety result. -class GoogleCloudAiplatformV1SafetyResult { - /// Confidence for safety score. - /// - /// Output only. - core.double? confidence; - - /// Explanation for safety score. - /// - /// Output only. - core.String? explanation; +/// Response message for MigrationService.SearchMigratableResources. +class GoogleCloudAiplatformV1SearchMigratableResourcesResponse { + /// All migratable resources that can be migrated to the location specified in + /// the request. + core.List? migratableResources; - /// Safety score. + /// The standard next-page token. /// - /// Output only. - core.double? score; + /// The migratable_resources may not fill page_size in + /// SearchMigratableResourcesRequest even when there are subsequent pages. + core.String? nextPageToken; - GoogleCloudAiplatformV1SafetyResult({ - this.confidence, - this.explanation, - this.score, + GoogleCloudAiplatformV1SearchMigratableResourcesResponse({ + this.migratableResources, + this.nextPageToken, }); - GoogleCloudAiplatformV1SafetyResult.fromJson(core.Map json_) + GoogleCloudAiplatformV1SearchMigratableResourcesResponse.fromJson( + core.Map json_) : this( - confidence: (json_['confidence'] as core.num?)?.toDouble(), - explanation: json_['explanation'] as core.String?, - score: (json_['score'] as core.num?)?.toDouble(), + migratableResources: (json_['migratableResources'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1MigratableResource.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (confidence != null) 'confidence': confidence!, - if (explanation != null) 'explanation': explanation!, - if (score != null) 'score': score!, + if (migratableResources != null) + 'migratableResources': migratableResources!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// Safety settings. -class GoogleCloudAiplatformV1SafetySetting { - /// Harm category. +/// Request message for +/// JobService.SearchModelDeploymentMonitoringStatsAnomalies. +class GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequest { + /// The DeployedModel ID of the + /// \[ModelDeploymentMonitoringObjectiveConfig.deployed_model_id\]. /// /// Required. - /// Possible string values are: - /// - "HARM_CATEGORY_UNSPECIFIED" : The harm category is unspecified. - /// - "HARM_CATEGORY_HATE_SPEECH" : The harm category is hate speech. - /// - "HARM_CATEGORY_DANGEROUS_CONTENT" : The harm category is dangerous - /// content. - /// - "HARM_CATEGORY_HARASSMENT" : The harm category is harassment. - /// - "HARM_CATEGORY_SEXUALLY_EXPLICIT" : The harm category is sexually - /// explicit content. - /// - "HARM_CATEGORY_CIVIC_INTEGRITY" : The harm category is civic integrity. - core.String? category; + core.String? deployedModelId; - /// Specify if the threshold is used for probability or severity score. + /// The latest timestamp of stats being generated. /// - /// If not specified, the threshold is used for probability score. + /// If not set, indicates feching stats till the latest possible one. + core.String? endTime; + + /// The feature display name. /// - /// Optional. - /// Possible string values are: - /// - "HARM_BLOCK_METHOD_UNSPECIFIED" : The harm block method is unspecified. - /// - "SEVERITY" : The harm block method uses both probability and severity - /// scores. - /// - "PROBABILITY" : The harm block method uses the probability score. - core.String? method; + /// If specified, only return the stats belonging to this feature. Format: + /// ModelMonitoringStatsAnomalies.FeatureHistoricStatsAnomalies.feature_display_name, + /// example: "user_destination". + core.String? featureDisplayName; - /// The harm block threshold. + /// Objectives of the stats to retrieve. /// /// Required. - /// Possible string values are: - /// - "HARM_BLOCK_THRESHOLD_UNSPECIFIED" : Unspecified harm block threshold. - /// - "BLOCK_LOW_AND_ABOVE" : Block low threshold and above (i.e. block more). - /// - "BLOCK_MEDIUM_AND_ABOVE" : Block medium threshold and above. - /// - "BLOCK_ONLY_HIGH" : Block only high threshold (i.e. block less). - /// - "BLOCK_NONE" : Block none. - /// - "OFF" : Turn off the safety filter. - core.String? threshold; + core.List< + GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequestStatsAnomaliesObjective>? + objectives; - GoogleCloudAiplatformV1SafetySetting({ - this.category, - this.method, - this.threshold, + /// The standard list page size. + core.int? pageSize; + + /// A page token received from a previous + /// JobService.SearchModelDeploymentMonitoringStatsAnomalies call. + core.String? pageToken; + + /// The earliest timestamp of stats being generated. + /// + /// If not set, indicates fetching stats till the earliest possible one. + core.String? startTime; + + GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequest({ + this.deployedModelId, + this.endTime, + this.featureDisplayName, + this.objectives, + this.pageSize, + this.pageToken, + this.startTime, }); - GoogleCloudAiplatformV1SafetySetting.fromJson(core.Map json_) + GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequest.fromJson( + core.Map json_) : this( - category: json_['category'] as core.String?, - method: json_['method'] as core.String?, - threshold: json_['threshold'] as core.String?, + deployedModelId: json_['deployedModelId'] as core.String?, + endTime: json_['endTime'] as core.String?, + featureDisplayName: json_['featureDisplayName'] as core.String?, + objectives: (json_['objectives'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequestStatsAnomaliesObjective + .fromJson(value as core.Map)) + .toList(), + pageSize: json_['pageSize'] as core.int?, + pageToken: json_['pageToken'] as core.String?, + startTime: json_['startTime'] as core.String?, ); core.Map toJson() => { - if (category != null) 'category': category!, - if (method != null) 'method': method!, - if (threshold != null) 'threshold': threshold!, + if (deployedModelId != null) 'deployedModelId': deployedModelId!, + if (endTime != null) 'endTime': endTime!, + if (featureDisplayName != null) + 'featureDisplayName': featureDisplayName!, + if (objectives != null) 'objectives': objectives!, + if (pageSize != null) 'pageSize': pageSize!, + if (pageToken != null) 'pageToken': pageToken!, + if (startTime != null) 'startTime': startTime!, }; } -/// Spec for safety metric. -typedef GoogleCloudAiplatformV1SafetySpec = $Spec; - -/// Active learning data sampling config. -/// -/// For every active learning labeling iteration, it will select a batch of data -/// based on the sampling strategy. -class GoogleCloudAiplatformV1SampleConfig { - /// The percentage of data needed to be labeled in each following batch - /// (except the first batch). - core.int? followingBatchSamplePercentage; - - /// The percentage of data needed to be labeled in the first batch. - core.int? initialBatchSamplePercentage; +/// Stats requested for specific objective. +class GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequestStatsAnomaliesObjective { + /// If set, all attribution scores between + /// SearchModelDeploymentMonitoringStatsAnomaliesRequest.start_time and + /// SearchModelDeploymentMonitoringStatsAnomaliesRequest.end_time are fetched, + /// and page token doesn't take effect in this case. + /// + /// Only used to retrieve attribution score for the top Features which has the + /// highest attribution score in the latest monitoring run. + core.int? topFeatureCount; - /// Field to choose sampling strategy. /// - /// Sampling strategy will decide which data should be selected for human - /// labeling in every batch. /// Possible string values are: - /// - "SAMPLE_STRATEGY_UNSPECIFIED" : Default will be treated as UNCERTAINTY. - /// - "UNCERTAINTY" : Sample the most uncertain data to label. - core.String? sampleStrategy; + /// - "MODEL_DEPLOYMENT_MONITORING_OBJECTIVE_TYPE_UNSPECIFIED" : Default + /// value, should not be set. + /// - "RAW_FEATURE_SKEW" : Raw feature values' stats to detect skew between + /// Training-Prediction datasets. + /// - "RAW_FEATURE_DRIFT" : Raw feature values' stats to detect drift between + /// Serving-Prediction datasets. + /// - "FEATURE_ATTRIBUTION_SKEW" : Feature attribution scores to detect skew + /// between Training-Prediction datasets. + /// - "FEATURE_ATTRIBUTION_DRIFT" : Feature attribution scores to detect skew + /// between Prediction datasets collected within different time windows. + core.String? type; - GoogleCloudAiplatformV1SampleConfig({ - this.followingBatchSamplePercentage, - this.initialBatchSamplePercentage, - this.sampleStrategy, + GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequestStatsAnomaliesObjective({ + this.topFeatureCount, + this.type, }); - GoogleCloudAiplatformV1SampleConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequestStatsAnomaliesObjective.fromJson( + core.Map json_) : this( - followingBatchSamplePercentage: - json_['followingBatchSamplePercentage'] as core.int?, - initialBatchSamplePercentage: - json_['initialBatchSamplePercentage'] as core.int?, - sampleStrategy: json_['sampleStrategy'] as core.String?, + topFeatureCount: json_['topFeatureCount'] as core.int?, + type: json_['type'] as core.String?, ); core.Map toJson() => { - if (followingBatchSamplePercentage != null) - 'followingBatchSamplePercentage': followingBatchSamplePercentage!, - if (initialBatchSamplePercentage != null) - 'initialBatchSamplePercentage': initialBatchSamplePercentage!, - if (sampleStrategy != null) 'sampleStrategy': sampleStrategy!, + if (topFeatureCount != null) 'topFeatureCount': topFeatureCount!, + if (type != null) 'type': type!, }; } -/// An attribution method that approximates Shapley values for features that -/// contribute to the label being predicted. -/// -/// A sampling strategy is used to approximate the value rather than considering -/// all subsets of features. -class GoogleCloudAiplatformV1SampledShapleyAttribution { - /// The number of feature permutations to consider when approximating the - /// Shapley values. - /// - /// Valid range of its value is \[1, 50\], inclusively. +/// Response message for +/// JobService.SearchModelDeploymentMonitoringStatsAnomalies. +class GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesResponse { + /// Stats retrieved for requested objectives. /// - /// Required. - core.int? pathCount; + /// There are at most 1000 + /// ModelMonitoringStatsAnomalies.FeatureHistoricStatsAnomalies.prediction_stats + /// in the response. + core.List? + monitoringStats; - GoogleCloudAiplatformV1SampledShapleyAttribution({ - this.pathCount, + /// The page token that can be used by the next + /// JobService.SearchModelDeploymentMonitoringStatsAnomalies call. + core.String? nextPageToken; + + GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesResponse({ + this.monitoringStats, + this.nextPageToken, }); - GoogleCloudAiplatformV1SampledShapleyAttribution.fromJson(core.Map json_) + GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesResponse.fromJson( + core.Map json_) : this( - pathCount: json_['pathCount'] as core.int?, + monitoringStats: (json_['monitoringStats'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1ModelMonitoringStatsAnomalies.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (pathCount != null) 'pathCount': pathCount!, + if (monitoringStats != null) 'monitoringStats': monitoringStats!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, }; } -/// Sampling Strategy for logging, can be for both training and prediction -/// dataset. -class GoogleCloudAiplatformV1SamplingStrategy { - /// Random sample config. +/// The request message for FeatureOnlineStoreService.SearchNearestEntities. +class GoogleCloudAiplatformV1SearchNearestEntitiesRequest { + /// The query. /// - /// Will support more sampling strategies later. - GoogleCloudAiplatformV1SamplingStrategyRandomSampleConfig? randomSampleConfig; + /// Required. + GoogleCloudAiplatformV1NearestNeighborQuery? query; - GoogleCloudAiplatformV1SamplingStrategy({ - this.randomSampleConfig, + /// If set to true, the full entities (including all vector values and + /// metadata) of the nearest neighbors are returned; otherwise only entity id + /// of the nearest neighbors will be returned. + /// + /// Note that returning full entities will significantly increase the latency + /// and cost of the query. + /// + /// Optional. + core.bool? returnFullEntity; + + GoogleCloudAiplatformV1SearchNearestEntitiesRequest({ + this.query, + this.returnFullEntity, }); - GoogleCloudAiplatformV1SamplingStrategy.fromJson(core.Map json_) + GoogleCloudAiplatformV1SearchNearestEntitiesRequest.fromJson(core.Map json_) : this( - randomSampleConfig: json_.containsKey('randomSampleConfig') - ? GoogleCloudAiplatformV1SamplingStrategyRandomSampleConfig - .fromJson(json_['randomSampleConfig'] - as core.Map) + query: json_.containsKey('query') + ? GoogleCloudAiplatformV1NearestNeighborQuery.fromJson( + json_['query'] as core.Map) : null, + returnFullEntity: json_['returnFullEntity'] as core.bool?, ); core.Map toJson() => { - if (randomSampleConfig != null) - 'randomSampleConfig': randomSampleConfig!, + if (query != null) 'query': query!, + if (returnFullEntity != null) 'returnFullEntity': returnFullEntity!, }; } -/// Requests are randomly selected. -class GoogleCloudAiplatformV1SamplingStrategyRandomSampleConfig { - /// Sample rate (0, 1\] - core.double? sampleRate; +/// Response message for FeatureOnlineStoreService.SearchNearestEntities +class GoogleCloudAiplatformV1SearchNearestEntitiesResponse { + /// The nearest neighbors of the query entity. + GoogleCloudAiplatformV1NearestNeighbors? nearestNeighbors; - GoogleCloudAiplatformV1SamplingStrategyRandomSampleConfig({ - this.sampleRate, + GoogleCloudAiplatformV1SearchNearestEntitiesResponse({ + this.nearestNeighbors, }); - GoogleCloudAiplatformV1SamplingStrategyRandomSampleConfig.fromJson( - core.Map json_) + GoogleCloudAiplatformV1SearchNearestEntitiesResponse.fromJson(core.Map json_) : this( - sampleRate: (json_['sampleRate'] as core.num?)?.toDouble(), + nearestNeighbors: json_.containsKey('nearestNeighbors') + ? GoogleCloudAiplatformV1NearestNeighbors.fromJson( + json_['nearestNeighbors'] + as core.Map) + : null, ); core.Map toJson() => { - if (sampleRate != null) 'sampleRate': sampleRate!, + if (nearestNeighbors != null) 'nearestNeighbors': nearestNeighbors!, }; } -/// A SavedQuery is a view of the dataset. -/// -/// It references a subset of annotations by problem type and filters. -class GoogleCloudAiplatformV1SavedQuery { - /// Filters on the Annotations in the dataset. +/// Segment of the content. +class GoogleCloudAiplatformV1Segment { + /// End index in the given Part, measured in bytes. /// - /// Output only. - core.String? annotationFilter; - - /// Number of AnnotationSpecs in the context of the SavedQuery. + /// Offset from the start of the Part, exclusive, starting at zero. /// /// Output only. - core.int? annotationSpecCount; + core.int? endIndex; - /// Timestamp when this SavedQuery was created. + /// The index of a Part object within its parent Content object. /// /// Output only. - core.String? createTime; + core.int? partIndex; - /// The user-defined name of the SavedQuery. + /// Start index in the given Part, measured in bytes. /// - /// The name can be up to 128 characters long and can consist of any UTF-8 - /// characters. + /// Offset from the start of the Part, inclusive, starting at zero. /// - /// Required. - core.String? displayName; + /// Output only. + core.int? startIndex; - /// Used to perform a consistent read-modify-write update. + /// The text corresponding to the segment from the response. /// - /// If not set, a blind "overwrite" update happens. - core.String? etag; + /// Output only. + core.String? text; - /// Some additional information about the SavedQuery. - /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Object? metadata; + GoogleCloudAiplatformV1Segment({ + this.endIndex, + this.partIndex, + this.startIndex, + this.text, + }); - /// Resource name of the SavedQuery. - /// - /// Output only. - core.String? name; + GoogleCloudAiplatformV1Segment.fromJson(core.Map json_) + : this( + endIndex: json_['endIndex'] as core.int?, + partIndex: json_['partIndex'] as core.int?, + startIndex: json_['startIndex'] as core.int?, + text: json_['text'] as core.String?, + ); - /// Problem type of the SavedQuery. + core.Map toJson() => { + if (endIndex != null) 'endIndex': endIndex!, + if (partIndex != null) 'partIndex': partIndex!, + if (startIndex != null) 'startIndex': startIndex!, + if (text != null) 'text': text!, + }; +} + +/// Configuration for the use of custom service account to run the workloads. +class GoogleCloudAiplatformV1ServiceAccountSpec { + /// If true, custom user-managed service account is enforced to run any + /// workloads (for example, Vertex Jobs) on the resource. /// - /// Allowed values: * IMAGE_CLASSIFICATION_SINGLE_LABEL * - /// IMAGE_CLASSIFICATION_MULTI_LABEL * IMAGE_BOUNDING_POLY * - /// IMAGE_BOUNDING_BOX * TEXT_CLASSIFICATION_SINGLE_LABEL * - /// TEXT_CLASSIFICATION_MULTI_LABEL * TEXT_EXTRACTION * TEXT_SENTIMENT * - /// VIDEO_CLASSIFICATION * VIDEO_OBJECT_TRACKING + /// Otherwise, uses the + /// [Vertex AI Custom Code Service Agent](https://cloud.google.com/vertex-ai/docs/general/access-control#service-agents). /// /// Required. - core.String? problemType; + core.bool? enableCustomServiceAccount; - /// If the Annotations belonging to the SavedQuery can be used for AutoML - /// training. + /// Required when all below conditions are met * + /// `enable_custom_service_account` is true; * any runtime is specified via + /// `ResourceRuntimeSpec` on creation time, for example, Ray The users must + /// have `iam.serviceAccounts.actAs` permission on this service account and + /// then the specified runtime containers will run as it. /// - /// Output only. - core.bool? supportAutomlTraining; - - /// Timestamp when SavedQuery was last updated. + /// Do not set this field if you want to submit jobs using custom service + /// account to this PersistentResource after creation, but only specify the + /// `service_account` inside the job. /// - /// Output only. - core.String? updateTime; + /// Optional. + core.String? serviceAccount; - GoogleCloudAiplatformV1SavedQuery({ - this.annotationFilter, - this.annotationSpecCount, - this.createTime, - this.displayName, - this.etag, - this.metadata, - this.name, - this.problemType, - this.supportAutomlTraining, - this.updateTime, + GoogleCloudAiplatformV1ServiceAccountSpec({ + this.enableCustomServiceAccount, + this.serviceAccount, }); - GoogleCloudAiplatformV1SavedQuery.fromJson(core.Map json_) + GoogleCloudAiplatformV1ServiceAccountSpec.fromJson(core.Map json_) : this( - annotationFilter: json_['annotationFilter'] as core.String?, - annotationSpecCount: json_['annotationSpecCount'] as core.int?, - createTime: json_['createTime'] as core.String?, - displayName: json_['displayName'] as core.String?, - etag: json_['etag'] as core.String?, - metadata: json_['metadata'], - name: json_['name'] as core.String?, - problemType: json_['problemType'] as core.String?, - supportAutomlTraining: json_['supportAutomlTraining'] as core.bool?, - updateTime: json_['updateTime'] as core.String?, + enableCustomServiceAccount: + json_['enableCustomServiceAccount'] as core.bool?, + serviceAccount: json_['serviceAccount'] as core.String?, ); core.Map toJson() => { - if (annotationFilter != null) 'annotationFilter': annotationFilter!, - if (annotationSpecCount != null) - 'annotationSpecCount': annotationSpecCount!, - if (createTime != null) 'createTime': createTime!, - if (displayName != null) 'displayName': displayName!, - if (etag != null) 'etag': etag!, - if (metadata != null) 'metadata': metadata!, - if (name != null) 'name': name!, - if (problemType != null) 'problemType': problemType!, - if (supportAutomlTraining != null) - 'supportAutomlTraining': supportAutomlTraining!, - if (updateTime != null) 'updateTime': updateTime!, + if (enableCustomServiceAccount != null) + 'enableCustomServiceAccount': enableCustomServiceAccount!, + if (serviceAccount != null) 'serviceAccount': serviceAccount!, }; } -/// One point viewable on a scalar metric plot. -class GoogleCloudAiplatformV1Scalar { - /// Value of the point at this step / timestamp. - core.double? value; +/// The SharePointSources to pass to ImportRagFiles. +class GoogleCloudAiplatformV1SharePointSources { + /// The SharePoint sources. + core.List? + sharePointSources; - GoogleCloudAiplatformV1Scalar({ - this.value, + GoogleCloudAiplatformV1SharePointSources({ + this.sharePointSources, }); - GoogleCloudAiplatformV1Scalar.fromJson(core.Map json_) + GoogleCloudAiplatformV1SharePointSources.fromJson(core.Map json_) : this( - value: (json_['value'] as core.num?)?.toDouble(), + sharePointSources: (json_['sharePointSources'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1SharePointSourcesSharePointSource + .fromJson(value as core.Map)) + .toList(), ); core.Map toJson() => { - if (value != null) 'value': value!, + if (sharePointSources != null) 'sharePointSources': sharePointSources!, }; } -/// An instance of a Schedule periodically schedules runs to make API calls -/// based on user specified time specification and API request type. -class GoogleCloudAiplatformV1Schedule { - /// Whether new scheduled runs can be queued when max_concurrent_runs limit is - /// reached. - /// - /// If set to true, new runs will be queued instead of skipped. Default to - /// false. - /// - /// Optional. - core.bool? allowQueueing; - - /// Whether to backfill missed runs when the schedule is resumed from PAUSED - /// state. - /// - /// If set to true, all missed runs will be scheduled. New runs will be - /// scheduled after the backfill is complete. Default to false. - /// - /// Output only. - core.bool? catchUp; - - /// Request for NotebookService.CreateNotebookExecutionJob. - GoogleCloudAiplatformV1CreateNotebookExecutionJobRequest? - createNotebookExecutionJobRequest; - - /// Request for PipelineService.CreatePipelineJob. - /// - /// CreatePipelineJobRequest.parent field is required (format: - /// projects/{project}/locations/{location}). - GoogleCloudAiplatformV1CreatePipelineJobRequest? createPipelineJobRequest; - - /// Timestamp when this Schedule was created. - /// - /// Output only. - core.String? createTime; - - /// Cron schedule (https://en.wikipedia.org/wiki/Cron) to launch scheduled - /// runs. +/// An individual SharePointSource. +class GoogleCloudAiplatformV1SharePointSourcesSharePointSource { + /// The Application ID for the app registered in Microsoft Azure Portal. /// - /// To explicitly set a timezone to the cron tab, apply a prefix in the cron - /// tab: "CRON_TZ=${IANA_TIME_ZONE}" or "TZ=${IANA_TIME_ZONE}". The - /// ${IANA_TIME_ZONE} may only be a valid string from IANA time zone database. - /// For example, "CRON_TZ=America/New_York 1 * * * *", or "TZ=America/New_York - /// 1 * * * *". - core.String? cron; + /// The application must also be configured with MS Graph permissions + /// "Files.ReadAll", "Sites.ReadAll" and BrowserSiteLists.Read.All. + core.String? clientId; - /// User provided name of the Schedule. - /// - /// The name can be up to 128 characters long and can consist of any UTF-8 - /// characters. - /// - /// Required. - core.String? displayName; + /// The application secret for the app registered in Azure. + GoogleCloudAiplatformV1ApiAuthApiKeyConfig? clientSecret; - /// Timestamp after which no new runs can be scheduled. - /// - /// If specified, The schedule will be completed when either end_time is - /// reached or when scheduled_run_count \>= max_run_count. If not specified, - /// new runs will keep getting scheduled until this Schedule is paused or - /// deleted. Already scheduled runs will be allowed to complete. Unset if not - /// specified. - /// - /// Optional. - core.String? endTime; + /// The ID of the drive to download from. + core.String? driveId; - /// Timestamp when this Schedule was last paused. - /// - /// Unset if never paused. - /// - /// Output only. - core.String? lastPauseTime; + /// The name of the drive to download from. + core.String? driveName; - /// Timestamp when this Schedule was last resumed. - /// - /// Unset if never resumed from pause. + /// The SharePoint file id. /// /// Output only. - core.String? lastResumeTime; - - /// Response of the last scheduled run. - /// - /// This is the response for starting the scheduled requests and not the - /// execution of the operations/jobs created by the requests (if applicable). - /// Unset if no run has been scheduled yet. /// /// Output only. - GoogleCloudAiplatformV1ScheduleRunResponse? lastScheduledRunResponse; + core.String? fileId; - /// Maximum number of runs that can be started concurrently for this Schedule. - /// - /// This is the limit for starting the scheduled requests and not the - /// execution of the operations/jobs created by the requests (if applicable). - /// - /// Required. - core.String? maxConcurrentRunCount; + /// The ID of the SharePoint folder to download from. + core.String? sharepointFolderId; - /// Maximum run count of the schedule. - /// - /// If specified, The schedule will be completed when either started_run_count - /// \>= max_run_count or when end_time is reached. If not specified, new runs - /// will keep getting scheduled until this Schedule is paused or deleted. - /// Already scheduled runs will be allowed to complete. Unset if not - /// specified. - /// - /// Optional. - core.String? maxRunCount; + /// The path of the SharePoint folder to download from. + core.String? sharepointFolderPath; - /// The resource name of the Schedule. + /// The name of the SharePoint site to download from. /// - /// Immutable. - core.String? name; + /// This can be the site name or the site id. + core.String? sharepointSiteName; - /// Timestamp when this Schedule should schedule the next run. - /// - /// Having a next_run_time in the past means the runs are being started behind - /// schedule. - /// - /// Output only. - core.String? nextRunTime; + /// Unique identifier of the Azure Active Directory Instance. + core.String? tenantId; - /// Timestamp after which the first run can be scheduled. - /// - /// Default to Schedule create time if not specified. - /// - /// Optional. - core.String? startTime; + GoogleCloudAiplatformV1SharePointSourcesSharePointSource({ + this.clientId, + this.clientSecret, + this.driveId, + this.driveName, + this.fileId, + this.sharepointFolderId, + this.sharepointFolderPath, + this.sharepointSiteName, + this.tenantId, + }); - /// The number of runs started by this schedule. - /// - /// Output only. - core.String? startedRunCount; + GoogleCloudAiplatformV1SharePointSourcesSharePointSource.fromJson( + core.Map json_) + : this( + clientId: json_['clientId'] as core.String?, + clientSecret: json_.containsKey('clientSecret') + ? GoogleCloudAiplatformV1ApiAuthApiKeyConfig.fromJson( + json_['clientSecret'] as core.Map) + : null, + driveId: json_['driveId'] as core.String?, + driveName: json_['driveName'] as core.String?, + fileId: json_['fileId'] as core.String?, + sharepointFolderId: json_['sharepointFolderId'] as core.String?, + sharepointFolderPath: json_['sharepointFolderPath'] as core.String?, + sharepointSiteName: json_['sharepointSiteName'] as core.String?, + tenantId: json_['tenantId'] as core.String?, + ); - /// The state of this Schedule. - /// - /// Output only. - /// Possible string values are: - /// - "STATE_UNSPECIFIED" : Unspecified. - /// - "ACTIVE" : The Schedule is active. Runs are being scheduled on the - /// user-specified timespec. - /// - "PAUSED" : The schedule is paused. No new runs will be created until the - /// schedule is resumed. Already started runs will be allowed to complete. - /// - "COMPLETED" : The Schedule is completed. No new runs will be scheduled. - /// Already started runs will be allowed to complete. Schedules in completed - /// state cannot be paused or resumed. - core.String? state; + core.Map toJson() => { + if (clientId != null) 'clientId': clientId!, + if (clientSecret != null) 'clientSecret': clientSecret!, + if (driveId != null) 'driveId': driveId!, + if (driveName != null) 'driveName': driveName!, + if (fileId != null) 'fileId': fileId!, + if (sharepointFolderId != null) + 'sharepointFolderId': sharepointFolderId!, + if (sharepointFolderPath != null) + 'sharepointFolderPath': sharepointFolderPath!, + if (sharepointSiteName != null) + 'sharepointSiteName': sharepointSiteName!, + if (tenantId != null) 'tenantId': tenantId!, + }; +} - /// Timestamp when this Schedule was updated. +/// A set of Shielded Instance options. +/// +/// See +/// [Images using supported Shielded VM features](https://cloud.google.com/compute/docs/instances/modifying-shielded-vm). +class GoogleCloudAiplatformV1ShieldedVmConfig { + /// Defines whether the instance has + /// [Secure Boot](https://cloud.google.com/compute/shielded-vm/docs/shielded-vm#secure-boot) + /// enabled. /// - /// Output only. - core.String? updateTime; + /// Secure Boot helps ensure that the system only runs authentic software by + /// verifying the digital signature of all boot components, and halting the + /// boot process if signature verification fails. + core.bool? enableSecureBoot; - GoogleCloudAiplatformV1Schedule({ - this.allowQueueing, - this.catchUp, - this.createNotebookExecutionJobRequest, - this.createPipelineJobRequest, - this.createTime, - this.cron, - this.displayName, - this.endTime, - this.lastPauseTime, - this.lastResumeTime, - this.lastScheduledRunResponse, - this.maxConcurrentRunCount, - this.maxRunCount, - this.name, - this.nextRunTime, - this.startTime, - this.startedRunCount, - this.state, - this.updateTime, + GoogleCloudAiplatformV1ShieldedVmConfig({ + this.enableSecureBoot, }); - GoogleCloudAiplatformV1Schedule.fromJson(core.Map json_) + GoogleCloudAiplatformV1ShieldedVmConfig.fromJson(core.Map json_) : this( - allowQueueing: json_['allowQueueing'] as core.bool?, - catchUp: json_['catchUp'] as core.bool?, - createNotebookExecutionJobRequest: - json_.containsKey('createNotebookExecutionJobRequest') - ? GoogleCloudAiplatformV1CreateNotebookExecutionJobRequest - .fromJson(json_['createNotebookExecutionJobRequest'] - as core.Map) - : null, - createPipelineJobRequest: - json_.containsKey('createPipelineJobRequest') - ? GoogleCloudAiplatformV1CreatePipelineJobRequest.fromJson( - json_['createPipelineJobRequest'] - as core.Map) - : null, - createTime: json_['createTime'] as core.String?, - cron: json_['cron'] as core.String?, - displayName: json_['displayName'] as core.String?, - endTime: json_['endTime'] as core.String?, - lastPauseTime: json_['lastPauseTime'] as core.String?, - lastResumeTime: json_['lastResumeTime'] as core.String?, - lastScheduledRunResponse: - json_.containsKey('lastScheduledRunResponse') - ? GoogleCloudAiplatformV1ScheduleRunResponse.fromJson( - json_['lastScheduledRunResponse'] - as core.Map) - : null, - maxConcurrentRunCount: json_['maxConcurrentRunCount'] as core.String?, - maxRunCount: json_['maxRunCount'] as core.String?, - name: json_['name'] as core.String?, - nextRunTime: json_['nextRunTime'] as core.String?, - startTime: json_['startTime'] as core.String?, - startedRunCount: json_['startedRunCount'] as core.String?, - state: json_['state'] as core.String?, - updateTime: json_['updateTime'] as core.String?, + enableSecureBoot: json_['enableSecureBoot'] as core.bool?, ); core.Map toJson() => { - if (allowQueueing != null) 'allowQueueing': allowQueueing!, - if (catchUp != null) 'catchUp': catchUp!, - if (createNotebookExecutionJobRequest != null) - 'createNotebookExecutionJobRequest': - createNotebookExecutionJobRequest!, - if (createPipelineJobRequest != null) - 'createPipelineJobRequest': createPipelineJobRequest!, - if (createTime != null) 'createTime': createTime!, - if (cron != null) 'cron': cron!, - if (displayName != null) 'displayName': displayName!, - if (endTime != null) 'endTime': endTime!, - if (lastPauseTime != null) 'lastPauseTime': lastPauseTime!, - if (lastResumeTime != null) 'lastResumeTime': lastResumeTime!, - if (lastScheduledRunResponse != null) - 'lastScheduledRunResponse': lastScheduledRunResponse!, - if (maxConcurrentRunCount != null) - 'maxConcurrentRunCount': maxConcurrentRunCount!, - if (maxRunCount != null) 'maxRunCount': maxRunCount!, - if (name != null) 'name': name!, - if (nextRunTime != null) 'nextRunTime': nextRunTime!, - if (startTime != null) 'startTime': startTime!, - if (startedRunCount != null) 'startedRunCount': startedRunCount!, - if (state != null) 'state': state!, - if (updateTime != null) 'updateTime': updateTime!, + if (enableSecureBoot != null) 'enableSecureBoot': enableSecureBoot!, }; } -/// Status of a scheduled run. -class GoogleCloudAiplatformV1ScheduleRunResponse { - /// The response of the scheduled run. - core.String? runResponse; - - /// The scheduled run time based on the user-specified schedule. - core.String? scheduledRunTime; +/// The Slack source for the ImportRagFilesRequest. +class GoogleCloudAiplatformV1SlackSource { + /// The Slack channels. + /// + /// Required. + core.List? channels; - GoogleCloudAiplatformV1ScheduleRunResponse({ - this.runResponse, - this.scheduledRunTime, + GoogleCloudAiplatformV1SlackSource({ + this.channels, }); - GoogleCloudAiplatformV1ScheduleRunResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1SlackSource.fromJson(core.Map json_) : this( - runResponse: json_['runResponse'] as core.String?, - scheduledRunTime: json_['scheduledRunTime'] as core.String?, + channels: (json_['channels'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1SlackSourceSlackChannels.fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (runResponse != null) 'runResponse': runResponse!, - if (scheduledRunTime != null) 'scheduledRunTime': scheduledRunTime!, + if (channels != null) 'channels': channels!, }; } -/// All parameters related to queuing and scheduling of custom jobs. -class GoogleCloudAiplatformV1Scheduling { - /// Indicates if the job should retry for internal errors after the job starts - /// running. +/// SlackChannels contains the Slack channels and corresponding access token. +class GoogleCloudAiplatformV1SlackSourceSlackChannels { + /// The SecretManager secret version resource name (e.g. + /// projects/{project}/secrets/{secret}/versions/{version}) storing the Slack + /// channel access token that has access to the slack channel IDs. /// - /// If true, overrides `Scheduling.restart_job_on_worker_restart` to false. + /// See: https://api.slack.com/tutorials/tracks/getting-a-token. /// - /// Optional. - core.bool? disableRetries; + /// Required. + GoogleCloudAiplatformV1ApiAuthApiKeyConfig? apiKeyConfig; - /// This is the maximum duration that a job will wait for the requested - /// resources to be provisioned if the scheduling strategy is set to - /// \[Strategy.DWS_FLEX_START\]. - /// - /// If set to 0, the job will wait indefinitely. The default is 24 hours. + /// The Slack channel IDs. /// - /// Optional. - core.String? maxWaitDuration; + /// Required. + core.List? + channels; - /// Restarts the entire CustomJob if a worker gets restarted. + GoogleCloudAiplatformV1SlackSourceSlackChannels({ + this.apiKeyConfig, + this.channels, + }); + + GoogleCloudAiplatformV1SlackSourceSlackChannels.fromJson(core.Map json_) + : this( + apiKeyConfig: json_.containsKey('apiKeyConfig') + ? GoogleCloudAiplatformV1ApiAuthApiKeyConfig.fromJson( + json_['apiKeyConfig'] as core.Map) + : null, + channels: (json_['channels'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel + .fromJson(value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (apiKeyConfig != null) 'apiKeyConfig': apiKeyConfig!, + if (channels != null) 'channels': channels!, + }; +} + +/// SlackChannel contains the Slack channel ID and the time range to import. +class GoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel { + /// The Slack channel ID. /// - /// This feature can be used by distributed training jobs that are not - /// resilient to workers leaving and joining a job. - core.bool? restartJobOnWorkerRestart; + /// Required. + core.String? channelId; - /// This determines which type of scheduling strategy to use. + /// The ending timestamp for messages to import. /// /// Optional. - /// Possible string values are: - /// - "STRATEGY_UNSPECIFIED" : Strategy will default to STANDARD. - /// - "ON_DEMAND" : Deprecated. Regular on-demand provisioning strategy. - /// - "LOW_COST" : Deprecated. Low cost by making potential use of spot - /// resources. - /// - "STANDARD" : Standard provisioning strategy uses regular on-demand - /// resources. - /// - "SPOT" : Spot provisioning strategy uses spot resources. - /// - "FLEX_START" : Flex Start strategy uses DWS to queue for resources. - core.String? strategy; + core.String? endTime; - /// The maximum job running time. + /// The starting timestamp for messages to import. /// - /// The default is 7 days. - core.String? timeout; + /// Optional. + core.String? startTime; - GoogleCloudAiplatformV1Scheduling({ - this.disableRetries, - this.maxWaitDuration, - this.restartJobOnWorkerRestart, - this.strategy, - this.timeout, + GoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel({ + this.channelId, + this.endTime, + this.startTime, }); - GoogleCloudAiplatformV1Scheduling.fromJson(core.Map json_) + GoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel.fromJson( + core.Map json_) : this( - disableRetries: json_['disableRetries'] as core.bool?, - maxWaitDuration: json_['maxWaitDuration'] as core.String?, - restartJobOnWorkerRestart: - json_['restartJobOnWorkerRestart'] as core.bool?, - strategy: json_['strategy'] as core.String?, - timeout: json_['timeout'] as core.String?, + channelId: json_['channelId'] as core.String?, + endTime: json_['endTime'] as core.String?, + startTime: json_['startTime'] as core.String?, ); core.Map toJson() => { - if (disableRetries != null) 'disableRetries': disableRetries!, - if (maxWaitDuration != null) 'maxWaitDuration': maxWaitDuration!, - if (restartJobOnWorkerRestart != null) - 'restartJobOnWorkerRestart': restartJobOnWorkerRestart!, - if (strategy != null) 'strategy': strategy!, - if (timeout != null) 'timeout': timeout!, + if (channelId != null) 'channelId': channelId!, + if (endTime != null) 'endTime': endTime!, + if (startTime != null) 'startTime': startTime!, }; } -/// Schema is used to define the format of input/output data. +/// Config for SmoothGrad approximation of gradients. /// -/// Represents a select subset of an -/// [OpenAPI 3.0 schema object](https://spec.openapis.org/oas/v3.0.3#schema-object). -/// More fields may be added in the future as needed. -class GoogleCloudAiplatformV1Schema { - /// The value should be validated against any (one or more) of the subschemas - /// in the list. +/// When enabled, the gradients are approximated by averaging the gradients from +/// noisy samples in the vicinity of the inputs. Adding noise can help improve +/// the computed gradients. Refer to this paper for more details: +/// https://arxiv.org/pdf/1706.03825.pdf +class GoogleCloudAiplatformV1SmoothGradConfig { + /// This is similar to noise_sigma, but provides additional flexibility. /// - /// Optional. - core.List? anyOf; + /// A separate noise sigma can be provided for each feature, which is useful + /// if their distributions are different. No noise is added to features that + /// are not set. If this field is unset, noise_sigma will be used for all + /// features. + GoogleCloudAiplatformV1FeatureNoiseSigma? featureNoiseSigma; - /// Default value of the data. - /// - /// Optional. + /// This is a single float value and will be used to add noise to all the + /// features. /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Object? default_; + /// Use this field when all features are normalized to have the same + /// distribution: scale to range \[0, 1\], \[-1, 1\] or z-scoring, where + /// features are normalized to have 0-mean and 1-variance. Learn more about + /// [normalization](https://developers.google.com/machine-learning/data-prep/transform/normalization). + /// For best results the recommended value is about 10% - 20% of the standard + /// deviation of the input feature. Refer to section 3.2 of the SmoothGrad + /// paper: https://arxiv.org/pdf/1706.03825.pdf. Defaults to 0.1. If the + /// distribution is different per feature, set feature_noise_sigma instead for + /// each feature. + core.double? noiseSigma; - /// The description of the data. + /// The number of gradient samples to use for approximation. /// - /// Optional. - core.String? description; + /// The higher this number, the more accurate the gradient is, but the runtime + /// complexity increases by this factor as well. Valid range of its value is + /// \[1, 50\]. Defaults to 3. + core.int? noisySampleCount; - /// Possible values of the element of primitive type with enum format. - /// - /// Examples: 1. We can define direction as : {type:STRING, format:enum, - /// enum:\["EAST", NORTH", "SOUTH", "WEST"\]} 2. We can define apartment - /// number as : {type:INTEGER, format:enum, enum:\["101", "201", "301"\]} - /// - /// Optional. - core.List? enum_; + GoogleCloudAiplatformV1SmoothGradConfig({ + this.featureNoiseSigma, + this.noiseSigma, + this.noisySampleCount, + }); - /// Example of the object. - /// - /// Will only populated when the object is the root. - /// - /// Optional. - /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Object? example; + GoogleCloudAiplatformV1SmoothGradConfig.fromJson(core.Map json_) + : this( + featureNoiseSigma: json_.containsKey('featureNoiseSigma') + ? GoogleCloudAiplatformV1FeatureNoiseSigma.fromJson( + json_['featureNoiseSigma'] + as core.Map) + : null, + noiseSigma: (json_['noiseSigma'] as core.num?)?.toDouble(), + noisySampleCount: json_['noisySampleCount'] as core.int?, + ); - /// The format of the data. + core.Map toJson() => { + if (featureNoiseSigma != null) 'featureNoiseSigma': featureNoiseSigma!, + if (noiseSigma != null) 'noiseSigma': noiseSigma!, + if (noisySampleCount != null) 'noisySampleCount': noisySampleCount!, + }; +} + +/// SpecialistPool represents customers' own workforce to work on their data +/// labeling jobs. +/// +/// It includes a group of specialist managers and workers. Managers are +/// responsible for managing the workers in this pool as well as customers' data +/// labeling jobs associated with this pool. Customers create specialist pool as +/// well as start data labeling jobs on Cloud, managers and workers handle the +/// jobs using CrowdCompute console. +class GoogleCloudAiplatformV1SpecialistPool { + /// The user-defined name of the SpecialistPool. /// - /// Supported formats: for NUMBER type: "float", "double" for INTEGER type: - /// "int32", "int64" for STRING type: "email", "byte", etc + /// The name can be up to 128 characters long and can consist of any UTF-8 + /// characters. This field should be unique on project-level. /// - /// Optional. - core.String? format; + /// Required. + core.String? displayName; - /// SCHEMA FIELDS FOR TYPE ARRAY Schema of the elements of Type.ARRAY. + /// The resource name of the SpecialistPool. /// - /// Optional. - GoogleCloudAiplatformV1Schema? items; + /// Required. + core.String? name; - /// Maximum number of the elements for Type.ARRAY. + /// The resource name of the pending data labeling jobs. /// - /// Optional. - core.String? maxItems; + /// Output only. + core.List? pendingDataLabelingJobs; - /// Maximum length of the Type.STRING - /// - /// Optional. - core.String? maxLength; + /// The email addresses of the managers in the SpecialistPool. + core.List? specialistManagerEmails; - /// Maximum number of the properties for Type.OBJECT. + /// The number of managers in this SpecialistPool. /// - /// Optional. - core.String? maxProperties; + /// Output only. + core.int? specialistManagersCount; - /// Maximum value of the Type.INTEGER and Type.NUMBER - /// - /// Optional. - core.double? maximum; + /// The email addresses of workers in the SpecialistPool. + core.List? specialistWorkerEmails; - /// Minimum number of the elements for Type.ARRAY. - /// - /// Optional. - core.String? minItems; + GoogleCloudAiplatformV1SpecialistPool({ + this.displayName, + this.name, + this.pendingDataLabelingJobs, + this.specialistManagerEmails, + this.specialistManagersCount, + this.specialistWorkerEmails, + }); - /// SCHEMA FIELDS FOR TYPE STRING Minimum length of the Type.STRING - /// - /// Optional. - core.String? minLength; + GoogleCloudAiplatformV1SpecialistPool.fromJson(core.Map json_) + : this( + displayName: json_['displayName'] as core.String?, + name: json_['name'] as core.String?, + pendingDataLabelingJobs: + (json_['pendingDataLabelingJobs'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + specialistManagerEmails: + (json_['specialistManagerEmails'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + specialistManagersCount: + json_['specialistManagersCount'] as core.int?, + specialistWorkerEmails: + (json_['specialistWorkerEmails'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); - /// Minimum number of the properties for Type.OBJECT. - /// - /// Optional. - core.String? minProperties; + core.Map toJson() => { + if (displayName != null) 'displayName': displayName!, + if (name != null) 'name': name!, + if (pendingDataLabelingJobs != null) + 'pendingDataLabelingJobs': pendingDataLabelingJobs!, + if (specialistManagerEmails != null) + 'specialistManagerEmails': specialistManagerEmails!, + if (specialistManagersCount != null) + 'specialistManagersCount': specialistManagersCount!, + if (specialistWorkerEmails != null) + 'specialistWorkerEmails': specialistWorkerEmails!, + }; +} - /// SCHEMA FIELDS FOR TYPE INTEGER and NUMBER Minimum value of the - /// Type.INTEGER and Type.NUMBER - /// - /// Optional. - core.double? minimum; +/// The speech generation config. +class GoogleCloudAiplatformV1SpeechConfig { + /// The configuration for the speaker to use. + GoogleCloudAiplatformV1VoiceConfig? voiceConfig; - /// Indicates if the value may be null. - /// - /// Optional. - core.bool? nullable; + GoogleCloudAiplatformV1SpeechConfig({ + this.voiceConfig, + }); - /// Pattern of the Type.STRING to restrict a string to a regular expression. - /// - /// Optional. - core.String? pattern; + GoogleCloudAiplatformV1SpeechConfig.fromJson(core.Map json_) + : this( + voiceConfig: json_.containsKey('voiceConfig') + ? GoogleCloudAiplatformV1VoiceConfig.fromJson( + json_['voiceConfig'] as core.Map) + : null, + ); - /// SCHEMA FIELDS FOR TYPE OBJECT Properties of Type.OBJECT. - /// - /// Optional. - core.Map? properties; + core.Map toJson() => { + if (voiceConfig != null) 'voiceConfig': voiceConfig!, + }; +} - /// The order of the properties. +/// Request message for NotebookService.StartNotebookRuntime. +typedef GoogleCloudAiplatformV1StartNotebookRuntimeRequest = $Empty; + +/// Request message for NotebookService.StopNotebookRuntime. +typedef GoogleCloudAiplatformV1StopNotebookRuntimeRequest = $Empty; + +/// Request message for VizierService.StopTrial. +typedef GoogleCloudAiplatformV1StopTrialRequest = $Empty; + +/// Assigns input data to the training, validation, and test sets so that the +/// distribution of values found in the categorical column (as specified by the +/// `key` field) is mirrored within each split. +/// +/// The fraction values determine the relative sizes of the splits. For example, +/// if the specified column has three values, with 50% of the rows having value +/// "A", 25% value "B", and 25% value "C", and the split fractions are specified +/// as 80/10/10, then the training set will constitute 80% of the training data, +/// with about 50% of the training set rows having the value "A" for the +/// specified column, about 25% having the value "B", and about 25% having the +/// value "C". Only the top 500 occurring values are used; any values not in the +/// top 500 values are randomly assigned to a split. If less than three rows +/// contain a specific value, those rows are randomly assigned. Supported only +/// for tabular Datasets. +class GoogleCloudAiplatformV1StratifiedSplit { + /// The key is a name of one of the Dataset's data columns. /// - /// Not a standard field in open api spec. Only used to support the order of - /// the properties. + /// The key provided must be for a categorical column. /// - /// Optional. - core.List? propertyOrdering; + /// Required. + core.String? key; - /// Required properties of Type.OBJECT. - /// - /// Optional. - core.List? required; + /// The fraction of the input data that is to be used to evaluate the Model. + core.double? testFraction; - /// The title of the Schema. - /// - /// Optional. - core.String? title; + /// The fraction of the input data that is to be used to train the Model. + core.double? trainingFraction; - /// The type of the data. + /// The fraction of the input data that is to be used to validate the Model. + core.double? validationFraction; + + GoogleCloudAiplatformV1StratifiedSplit({ + this.key, + this.testFraction, + this.trainingFraction, + this.validationFraction, + }); + + GoogleCloudAiplatformV1StratifiedSplit.fromJson(core.Map json_) + : this( + key: json_['key'] as core.String?, + testFraction: (json_['testFraction'] as core.num?)?.toDouble(), + trainingFraction: + (json_['trainingFraction'] as core.num?)?.toDouble(), + validationFraction: + (json_['validationFraction'] as core.num?)?.toDouble(), + ); + + core.Map toJson() => { + if (key != null) 'key': key!, + if (testFraction != null) 'testFraction': testFraction!, + if (trainingFraction != null) 'trainingFraction': trainingFraction!, + if (validationFraction != null) + 'validationFraction': validationFraction!, + }; +} + +/// Request message for PredictionService.StreamRawPredict. +class GoogleCloudAiplatformV1StreamRawPredictRequest { + /// The prediction input. /// - /// Optional. - /// Possible string values are: - /// - "TYPE_UNSPECIFIED" : Not specified, should not be used. - /// - "STRING" : OpenAPI string type - /// - "NUMBER" : OpenAPI number type - /// - "INTEGER" : OpenAPI integer type - /// - "BOOLEAN" : OpenAPI boolean type - /// - "ARRAY" : OpenAPI array type - /// - "OBJECT" : OpenAPI object type - core.String? type; + /// Supports HTTP headers and arbitrary data payload. + GoogleApiHttpBody? httpBody; - GoogleCloudAiplatformV1Schema({ - this.anyOf, - this.default_, - this.description, - this.enum_, - this.example, - this.format, - this.items, - this.maxItems, - this.maxLength, - this.maxProperties, - this.maximum, - this.minItems, - this.minLength, - this.minProperties, - this.minimum, - this.nullable, - this.pattern, - this.properties, - this.propertyOrdering, - this.required, - this.title, - this.type, + GoogleCloudAiplatformV1StreamRawPredictRequest({ + this.httpBody, }); - GoogleCloudAiplatformV1Schema.fromJson(core.Map json_) + GoogleCloudAiplatformV1StreamRawPredictRequest.fromJson(core.Map json_) : this( - anyOf: (json_['anyOf'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Schema.fromJson( - value as core.Map)) - .toList(), - default_: json_['default'], - description: json_['description'] as core.String?, - enum_: (json_['enum'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - example: json_['example'], - format: json_['format'] as core.String?, - items: json_.containsKey('items') - ? GoogleCloudAiplatformV1Schema.fromJson( - json_['items'] as core.Map) + httpBody: json_.containsKey('httpBody') + ? GoogleApiHttpBody.fromJson( + json_['httpBody'] as core.Map) : null, - maxItems: json_['maxItems'] as core.String?, - maxLength: json_['maxLength'] as core.String?, - maxProperties: json_['maxProperties'] as core.String?, - maximum: (json_['maximum'] as core.num?)?.toDouble(), - minItems: json_['minItems'] as core.String?, - minLength: json_['minLength'] as core.String?, - minProperties: json_['minProperties'] as core.String?, - minimum: (json_['minimum'] as core.num?)?.toDouble(), - nullable: json_['nullable'] as core.bool?, - pattern: json_['pattern'] as core.String?, - properties: - (json_['properties'] as core.Map?) - ?.map( - (key, value) => core.MapEntry( - key, - GoogleCloudAiplatformV1Schema.fromJson( - value as core.Map), - ), - ), - propertyOrdering: (json_['propertyOrdering'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - required: (json_['required'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - title: json_['title'] as core.String?, - type: json_['type'] as core.String?, ); core.Map toJson() => { - if (anyOf != null) 'anyOf': anyOf!, - if (default_ != null) 'default': default_!, - if (description != null) 'description': description!, - if (enum_ != null) 'enum': enum_!, - if (example != null) 'example': example!, - if (format != null) 'format': format!, - if (items != null) 'items': items!, - if (maxItems != null) 'maxItems': maxItems!, - if (maxLength != null) 'maxLength': maxLength!, - if (maxProperties != null) 'maxProperties': maxProperties!, - if (maximum != null) 'maximum': maximum!, - if (minItems != null) 'minItems': minItems!, - if (minLength != null) 'minLength': minLength!, - if (minProperties != null) 'minProperties': minProperties!, - if (minimum != null) 'minimum': minimum!, - if (nullable != null) 'nullable': nullable!, - if (pattern != null) 'pattern': pattern!, - if (properties != null) 'properties': properties!, - if (propertyOrdering != null) 'propertyOrdering': propertyOrdering!, - if (required != null) 'required': required!, - if (title != null) 'title': title!, - if (type != null) 'type': type!, + if (httpBody != null) 'httpBody': httpBody!, + }; +} + +/// Request message for PredictionService.StreamingPredict. +/// +/// The first message must contain endpoint field and optionally input. The +/// subsequent messages must contain input. +class GoogleCloudAiplatformV1StreamingPredictRequest { + /// The prediction input. + core.List? inputs; + + /// The parameters that govern the prediction. + GoogleCloudAiplatformV1Tensor? parameters; + + GoogleCloudAiplatformV1StreamingPredictRequest({ + this.inputs, + this.parameters, + }); + + GoogleCloudAiplatformV1StreamingPredictRequest.fromJson(core.Map json_) + : this( + inputs: (json_['inputs'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Tensor.fromJson( + value as core.Map)) + .toList(), + parameters: json_.containsKey('parameters') + ? GoogleCloudAiplatformV1Tensor.fromJson( + json_['parameters'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (inputs != null) 'inputs': inputs!, + if (parameters != null) 'parameters': parameters!, }; } -/// Response message for DatasetService.SearchDataItems. -class GoogleCloudAiplatformV1SearchDataItemsResponse { - /// The DataItemViews read. - core.List? dataItemViews; +/// Response message for PredictionService.StreamingPredict. +class GoogleCloudAiplatformV1StreamingPredictResponse { + /// The prediction output. + core.List? outputs; - /// A token to retrieve next page of results. - /// - /// Pass to SearchDataItemsRequest.page_token to obtain that page. - core.String? nextPageToken; + /// The parameters that govern the prediction. + GoogleCloudAiplatformV1Tensor? parameters; - GoogleCloudAiplatformV1SearchDataItemsResponse({ - this.dataItemViews, - this.nextPageToken, + GoogleCloudAiplatformV1StreamingPredictResponse({ + this.outputs, + this.parameters, }); - GoogleCloudAiplatformV1SearchDataItemsResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1StreamingPredictResponse.fromJson(core.Map json_) : this( - dataItemViews: (json_['dataItemViews'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1DataItemView.fromJson( + outputs: (json_['outputs'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Tensor.fromJson( value as core.Map)) .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + parameters: json_.containsKey('parameters') + ? GoogleCloudAiplatformV1Tensor.fromJson( + json_['parameters'] as core.Map) + : null, ); core.Map toJson() => { - if (dataItemViews != null) 'dataItemViews': dataItemViews!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (outputs != null) 'outputs': outputs!, + if (parameters != null) 'parameters': parameters!, }; } -/// Google search entry point. -class GoogleCloudAiplatformV1SearchEntryPoint { - /// Web content snippet that can be embedded in a web page or an app webview. +/// Request message for +/// FeaturestoreOnlineServingService.StreamingReadFeatureValues. +class GoogleCloudAiplatformV1StreamingReadFeatureValuesRequest { + /// IDs of entities to read Feature values of. /// - /// Optional. - core.String? renderedContent; - - /// Base64 encoded JSON representing array of tuple. + /// The maximum number of IDs is 100. For example, for a machine learning + /// model predicting user clicks on a website, an entity ID could be + /// `user_123`. /// - /// Optional. - core.String? sdkBlob; - core.List get sdkBlobAsBytes => convert.base64.decode(sdkBlob!); + /// Required. + core.List? entityIds; - set sdkBlobAsBytes(core.List bytes_) { - sdkBlob = - convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); - } + /// Selector choosing Features of the target EntityType. + /// + /// Feature IDs will be deduplicated. + /// + /// Required. + GoogleCloudAiplatformV1FeatureSelector? featureSelector; - GoogleCloudAiplatformV1SearchEntryPoint({ - this.renderedContent, - this.sdkBlob, + GoogleCloudAiplatformV1StreamingReadFeatureValuesRequest({ + this.entityIds, + this.featureSelector, }); - GoogleCloudAiplatformV1SearchEntryPoint.fromJson(core.Map json_) + GoogleCloudAiplatformV1StreamingReadFeatureValuesRequest.fromJson( + core.Map json_) : this( - renderedContent: json_['renderedContent'] as core.String?, - sdkBlob: json_['sdkBlob'] as core.String?, + entityIds: (json_['entityIds'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + featureSelector: json_.containsKey('featureSelector') + ? GoogleCloudAiplatformV1FeatureSelector.fromJson( + json_['featureSelector'] + as core.Map) + : null, ); core.Map toJson() => { - if (renderedContent != null) 'renderedContent': renderedContent!, - if (sdkBlob != null) 'sdkBlob': sdkBlob!, + if (entityIds != null) 'entityIds': entityIds!, + if (featureSelector != null) 'featureSelector': featureSelector!, }; } -/// Response message for FeaturestoreService.SearchFeatures. -class GoogleCloudAiplatformV1SearchFeaturesResponse { - /// The Features matching the request. - /// - /// Fields returned: * `name` * `description` * `labels` * `create_time` * - /// `update_time` - core.List? features; - - /// A token, which can be sent as SearchFeaturesRequest.page_token to retrieve - /// the next page. - /// - /// If this field is omitted, there are no subsequent pages. - core.String? nextPageToken; +/// A list of string values. +class GoogleCloudAiplatformV1StringArray { + /// A list of string values. + core.List? values; - GoogleCloudAiplatformV1SearchFeaturesResponse({ - this.features, - this.nextPageToken, + GoogleCloudAiplatformV1StringArray({ + this.values, }); - GoogleCloudAiplatformV1SearchFeaturesResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1StringArray.fromJson(core.Map json_) : this( - features: (json_['features'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Feature.fromJson( - value as core.Map)) + values: (json_['values'] as core.List?) + ?.map((value) => value as core.String) .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (features != null) 'features': features!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (values != null) 'values': values!, }; } -/// Request message for MigrationService.SearchMigratableResources. -class GoogleCloudAiplatformV1SearchMigratableResourcesRequest { - /// A filter for your search. - /// - /// You can use the following types of filters: * Resource type filters. The - /// following strings filter for a specific type of MigratableResource: * - /// `ml_engine_model_version:*` * `automl_model:*` * `automl_dataset:*` * - /// `data_labeling_dataset:*` * "Migrated or not" filters. The following - /// strings filter for resources that either have or have not already been - /// migrated: * `last_migrate_time:*` filters for migrated resources. * `NOT - /// last_migrate_time:*` filters for not yet migrated resources. - core.String? filter; - - /// The standard page size. - /// - /// The default and maximum value is 100. - core.int? pageSize; +/// One field of a Struct (or object) type feature value. +class GoogleCloudAiplatformV1StructFieldValue { + /// Name of the field in the struct feature. + core.String? name; - /// The standard page token. - core.String? pageToken; + /// The value for this field. + GoogleCloudAiplatformV1FeatureValue? value; - GoogleCloudAiplatformV1SearchMigratableResourcesRequest({ - this.filter, - this.pageSize, - this.pageToken, + GoogleCloudAiplatformV1StructFieldValue({ + this.name, + this.value, }); - GoogleCloudAiplatformV1SearchMigratableResourcesRequest.fromJson( - core.Map json_) + GoogleCloudAiplatformV1StructFieldValue.fromJson(core.Map json_) : this( - filter: json_['filter'] as core.String?, - pageSize: json_['pageSize'] as core.int?, - pageToken: json_['pageToken'] as core.String?, + name: json_['name'] as core.String?, + value: json_.containsKey('value') + ? GoogleCloudAiplatformV1FeatureValue.fromJson( + json_['value'] as core.Map) + : null, ); core.Map toJson() => { - if (filter != null) 'filter': filter!, - if (pageSize != null) 'pageSize': pageSize!, - if (pageToken != null) 'pageToken': pageToken!, + if (name != null) 'name': name!, + if (value != null) 'value': value!, }; } -/// Response message for MigrationService.SearchMigratableResources. -class GoogleCloudAiplatformV1SearchMigratableResourcesResponse { - /// All migratable resources that can be migrated to the location specified in - /// the request. - core.List? migratableResources; - - /// The standard next-page token. - /// - /// The migratable_resources may not fill page_size in - /// SearchMigratableResourcesRequest even when there are subsequent pages. - core.String? nextPageToken; +/// Struct (or object) type feature value. +class GoogleCloudAiplatformV1StructValue { + /// A list of field values. + core.List? values; - GoogleCloudAiplatformV1SearchMigratableResourcesResponse({ - this.migratableResources, - this.nextPageToken, + GoogleCloudAiplatformV1StructValue({ + this.values, }); - GoogleCloudAiplatformV1SearchMigratableResourcesResponse.fromJson( - core.Map json_) + GoogleCloudAiplatformV1StructValue.fromJson(core.Map json_) : this( - migratableResources: (json_['migratableResources'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1MigratableResource.fromJson( - value as core.Map)) + values: (json_['values'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1StructFieldValue.fromJson( + value as core.Map)) .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, ); core.Map toJson() => { - if (migratableResources != null) - 'migratableResources': migratableResources!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (values != null) 'values': values!, }; } -/// Request message for -/// JobService.SearchModelDeploymentMonitoringStatsAnomalies. -class GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequest { - /// The DeployedModel ID of the - /// \[ModelDeploymentMonitoringObjectiveConfig.deployed_model_id\]. +/// A message representing a Study. +class GoogleCloudAiplatformV1Study { + /// Time at which the study was created. /// - /// Required. - core.String? deployedModelId; + /// Output only. + core.String? createTime; - /// The latest timestamp of stats being generated. + /// Describes the Study, default value is empty string. /// - /// If not set, indicates feching stats till the latest possible one. - core.String? endTime; + /// Required. + core.String? displayName; - /// The feature display name. + /// A human readable reason why the Study is inactive. /// - /// If specified, only return the stats belonging to this feature. Format: - /// ModelMonitoringStatsAnomalies.FeatureHistoricStatsAnomalies.feature_display_name, - /// example: "user_destination". - core.String? featureDisplayName; - - /// Objectives of the stats to retrieve. + /// This should be empty if a study is ACTIVE or COMPLETED. /// - /// Required. - core.List< - GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequestStatsAnomaliesObjective>? - objectives; + /// Output only. + core.String? inactiveReason; - /// The standard list page size. - core.int? pageSize; + /// The name of a study. + /// + /// The study's globally unique identifier. Format: + /// `projects/{project}/locations/{location}/studies/{study}` + /// + /// Output only. + core.String? name; - /// A page token received from a previous - /// JobService.SearchModelDeploymentMonitoringStatsAnomalies call. - core.String? pageToken; + /// The detailed state of a Study. + /// + /// Output only. + /// Possible string values are: + /// - "STATE_UNSPECIFIED" : The study state is unspecified. + /// - "ACTIVE" : The study is active. + /// - "INACTIVE" : The study is stopped due to an internal error. + /// - "COMPLETED" : The study is done when the service exhausts the parameter + /// search space or max_trial_count is reached. + core.String? state; - /// The earliest timestamp of stats being generated. + /// Configuration of the Study. /// - /// If not set, indicates fetching stats till the earliest possible one. - core.String? startTime; + /// Required. + GoogleCloudAiplatformV1StudySpec? studySpec; - GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequest({ - this.deployedModelId, - this.endTime, - this.featureDisplayName, - this.objectives, - this.pageSize, - this.pageToken, - this.startTime, + GoogleCloudAiplatformV1Study({ + this.createTime, + this.displayName, + this.inactiveReason, + this.name, + this.state, + this.studySpec, }); - GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequest.fromJson( - core.Map json_) + GoogleCloudAiplatformV1Study.fromJson(core.Map json_) : this( - deployedModelId: json_['deployedModelId'] as core.String?, - endTime: json_['endTime'] as core.String?, - featureDisplayName: json_['featureDisplayName'] as core.String?, - objectives: (json_['objectives'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequestStatsAnomaliesObjective - .fromJson(value as core.Map)) - .toList(), - pageSize: json_['pageSize'] as core.int?, - pageToken: json_['pageToken'] as core.String?, - startTime: json_['startTime'] as core.String?, + createTime: json_['createTime'] as core.String?, + displayName: json_['displayName'] as core.String?, + inactiveReason: json_['inactiveReason'] as core.String?, + name: json_['name'] as core.String?, + state: json_['state'] as core.String?, + studySpec: json_.containsKey('studySpec') + ? GoogleCloudAiplatformV1StudySpec.fromJson( + json_['studySpec'] as core.Map) + : null, ); core.Map toJson() => { - if (deployedModelId != null) 'deployedModelId': deployedModelId!, - if (endTime != null) 'endTime': endTime!, - if (featureDisplayName != null) - 'featureDisplayName': featureDisplayName!, - if (objectives != null) 'objectives': objectives!, - if (pageSize != null) 'pageSize': pageSize!, - if (pageToken != null) 'pageToken': pageToken!, - if (startTime != null) 'startTime': startTime!, + if (createTime != null) 'createTime': createTime!, + if (displayName != null) 'displayName': displayName!, + if (inactiveReason != null) 'inactiveReason': inactiveReason!, + if (name != null) 'name': name!, + if (state != null) 'state': state!, + if (studySpec != null) 'studySpec': studySpec!, }; } -/// Stats requested for specific objective. -class GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequestStatsAnomaliesObjective { - /// If set, all attribution scores between - /// SearchModelDeploymentMonitoringStatsAnomaliesRequest.start_time and - /// SearchModelDeploymentMonitoringStatsAnomaliesRequest.end_time are fetched, - /// and page token doesn't take effect in this case. +/// Represents specification of a Study. +class GoogleCloudAiplatformV1StudySpec { + /// The search algorithm specified for the Study. + /// Possible string values are: + /// - "ALGORITHM_UNSPECIFIED" : The default algorithm used by Vertex AI for + /// [hyperparameter tuning](https://cloud.google.com/vertex-ai/docs/training/hyperparameter-tuning-overview) + /// and [Vertex AI Vizier](https://cloud.google.com/vertex-ai/docs/vizier). + /// - "GRID_SEARCH" : Simple grid search within the feasible space. To use + /// grid search, all parameters must be `INTEGER`, `CATEGORICAL`, or + /// `DISCRETE`. + /// - "RANDOM_SEARCH" : Simple random search within the feasible space. + core.String? algorithm; + + /// The automated early stopping spec using convex stopping rule. + GoogleCloudAiplatformV1StudySpecConvexAutomatedStoppingSpec? + convexAutomatedStoppingSpec; + + /// The automated early stopping spec using decay curve rule. + GoogleCloudAiplatformV1StudySpecDecayCurveAutomatedStoppingSpec? + decayCurveStoppingSpec; + + /// Describe which measurement selection type will be used + /// Possible string values are: + /// - "MEASUREMENT_SELECTION_TYPE_UNSPECIFIED" : Will be treated as + /// LAST_MEASUREMENT. + /// - "LAST_MEASUREMENT" : Use the last measurement reported. + /// - "BEST_MEASUREMENT" : Use the best measurement reported. + core.String? measurementSelectionType; + + /// The automated early stopping spec using median rule. + GoogleCloudAiplatformV1StudySpecMedianAutomatedStoppingSpec? + medianAutomatedStoppingSpec; + + /// Metric specs for the Study. /// - /// Only used to retrieve attribution score for the top Features which has the - /// highest attribution score in the latest monitoring run. - core.int? topFeatureCount; + /// Required. + core.List? metrics; + /// The observation noise level of the study. /// + /// Currently only supported by the Vertex AI Vizier service. Not supported by + /// HyperparameterTuningJob or TrainingPipeline. /// Possible string values are: - /// - "MODEL_DEPLOYMENT_MONITORING_OBJECTIVE_TYPE_UNSPECIFIED" : Default - /// value, should not be set. - /// - "RAW_FEATURE_SKEW" : Raw feature values' stats to detect skew between - /// Training-Prediction datasets. - /// - "RAW_FEATURE_DRIFT" : Raw feature values' stats to detect drift between - /// Serving-Prediction datasets. - /// - "FEATURE_ATTRIBUTION_SKEW" : Feature attribution scores to detect skew - /// between Training-Prediction datasets. - /// - "FEATURE_ATTRIBUTION_DRIFT" : Feature attribution scores to detect skew - /// between Prediction datasets collected within different time windows. - core.String? type; + /// - "OBSERVATION_NOISE_UNSPECIFIED" : The default noise level chosen by + /// Vertex AI. + /// - "LOW" : Vertex AI assumes that the objective function is (nearly) + /// perfectly reproducible, and will never repeat the same Trial parameters. + /// - "HIGH" : Vertex AI will estimate the amount of noise in metric + /// evaluations, it may repeat the same Trial parameters more than once. + core.String? observationNoise; - GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequestStatsAnomaliesObjective({ - this.topFeatureCount, - this.type, + /// The set of parameters to tune. + /// + /// Required. + core.List? parameters; + + /// Conditions for automated stopping of a Study. + /// + /// Enable automated stopping by configuring at least one condition. + GoogleCloudAiplatformV1StudySpecStudyStoppingConfig? studyStoppingConfig; + + GoogleCloudAiplatformV1StudySpec({ + this.algorithm, + this.convexAutomatedStoppingSpec, + this.decayCurveStoppingSpec, + this.measurementSelectionType, + this.medianAutomatedStoppingSpec, + this.metrics, + this.observationNoise, + this.parameters, + this.studyStoppingConfig, }); - GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequestStatsAnomaliesObjective.fromJson( - core.Map json_) + GoogleCloudAiplatformV1StudySpec.fromJson(core.Map json_) : this( - topFeatureCount: json_['topFeatureCount'] as core.int?, - type: json_['type'] as core.String?, + algorithm: json_['algorithm'] as core.String?, + convexAutomatedStoppingSpec: + json_.containsKey('convexAutomatedStoppingSpec') + ? GoogleCloudAiplatformV1StudySpecConvexAutomatedStoppingSpec + .fromJson(json_['convexAutomatedStoppingSpec'] + as core.Map) + : null, + decayCurveStoppingSpec: json_.containsKey('decayCurveStoppingSpec') + ? GoogleCloudAiplatformV1StudySpecDecayCurveAutomatedStoppingSpec + .fromJson(json_['decayCurveStoppingSpec'] + as core.Map) + : null, + measurementSelectionType: + json_['measurementSelectionType'] as core.String?, + medianAutomatedStoppingSpec: + json_.containsKey('medianAutomatedStoppingSpec') + ? GoogleCloudAiplatformV1StudySpecMedianAutomatedStoppingSpec + .fromJson(json_['medianAutomatedStoppingSpec'] + as core.Map) + : null, + metrics: (json_['metrics'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1StudySpecMetricSpec.fromJson( + value as core.Map)) + .toList(), + observationNoise: json_['observationNoise'] as core.String?, + parameters: (json_['parameters'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1StudySpecParameterSpec.fromJson( + value as core.Map)) + .toList(), + studyStoppingConfig: json_.containsKey('studyStoppingConfig') + ? GoogleCloudAiplatformV1StudySpecStudyStoppingConfig.fromJson( + json_['studyStoppingConfig'] + as core.Map) + : null, ); core.Map toJson() => { - if (topFeatureCount != null) 'topFeatureCount': topFeatureCount!, - if (type != null) 'type': type!, + if (algorithm != null) 'algorithm': algorithm!, + if (convexAutomatedStoppingSpec != null) + 'convexAutomatedStoppingSpec': convexAutomatedStoppingSpec!, + if (decayCurveStoppingSpec != null) + 'decayCurveStoppingSpec': decayCurveStoppingSpec!, + if (measurementSelectionType != null) + 'measurementSelectionType': measurementSelectionType!, + if (medianAutomatedStoppingSpec != null) + 'medianAutomatedStoppingSpec': medianAutomatedStoppingSpec!, + if (metrics != null) 'metrics': metrics!, + if (observationNoise != null) 'observationNoise': observationNoise!, + if (parameters != null) 'parameters': parameters!, + if (studyStoppingConfig != null) + 'studyStoppingConfig': studyStoppingConfig!, }; } -/// Response message for -/// JobService.SearchModelDeploymentMonitoringStatsAnomalies. -class GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesResponse { - /// Stats retrieved for requested objectives. +/// Configuration for ConvexAutomatedStoppingSpec. +/// +/// When there are enough completed trials (configured by +/// min_measurement_count), for pending trials with enough measurements and +/// steps, the policy first computes an overestimate of the objective value at +/// max_num_steps according to the slope of the incomplete objective value +/// curve. No prediction can be made if the curve is completely flat. If the +/// overestimation is worse than the best objective value of the completed +/// trials, this pending trial will be early-stopped, but a last measurement +/// will be added to the pending trial with max_num_steps and predicted +/// objective value from the autoregression model. +class GoogleCloudAiplatformV1StudySpecConvexAutomatedStoppingSpec { + /// The hyper-parameter name used in the tuning job that stands for learning + /// rate. + /// + /// Leave it blank if learning rate is not in a parameter in tuning. The + /// learning_rate is used to estimate the objective value of the ongoing + /// trial. + core.String? learningRateParameterName; + + /// Steps used in predicting the final objective for early stopped trials. + /// + /// In general, it's set to be the same as the defined steps in training / + /// tuning. If not defined, it will learn it from the completed trials. When + /// use_steps is false, this field is set to the maximum elapsed seconds. + core.String? maxStepCount; + + /// The minimal number of measurements in a Trial. + /// + /// Early-stopping checks will not trigger if less than + /// min_measurement_count+1 completed trials or pending trials with less than + /// min_measurement_count measurements. If not defined, the default value is + /// 5. + core.String? minMeasurementCount; + + /// Minimum number of steps for a trial to complete. + /// + /// Trials which do not have a measurement with step_count \> min_step_count + /// won't be considered for early stopping. It's ok to set it to 0, and a + /// trial can be early stopped at any stage. By default, min_step_count is set + /// to be one-tenth of the max_step_count. When use_elapsed_duration is true, + /// this field is set to the minimum elapsed seconds. + core.String? minStepCount; + + /// ConvexAutomatedStoppingSpec by default only updates the trials that needs + /// to be early stopped using a newly trained auto-regressive model. /// - /// There are at most 1000 - /// ModelMonitoringStatsAnomalies.FeatureHistoricStatsAnomalies.prediction_stats - /// in the response. - core.List? - monitoringStats; + /// When this flag is set to True, all stopped trials from the beginning are + /// potentially updated in terms of their `final_measurement`. Also, note that + /// the training logic of autoregressive models is different in this case. + /// Enabling this option has shown better results and this may be the default + /// option in the future. + core.bool? updateAllStoppedTrials; - /// The page token that can be used by the next - /// JobService.SearchModelDeploymentMonitoringStatsAnomalies call. - core.String? nextPageToken; + /// This bool determines whether or not the rule is applied based on + /// elapsed_secs or steps. + /// + /// If use_elapsed_duration==false, the early stopping decision is made + /// according to the predicted objective values according to the target steps. + /// If use_elapsed_duration==true, elapsed_secs is used instead of steps. + /// Also, in this case, the parameters max_num_steps and min_num_steps are + /// overloaded to contain max_elapsed_seconds and min_elapsed_seconds. + core.bool? useElapsedDuration; - GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesResponse({ - this.monitoringStats, - this.nextPageToken, + GoogleCloudAiplatformV1StudySpecConvexAutomatedStoppingSpec({ + this.learningRateParameterName, + this.maxStepCount, + this.minMeasurementCount, + this.minStepCount, + this.updateAllStoppedTrials, + this.useElapsedDuration, }); - GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesResponse.fromJson( + GoogleCloudAiplatformV1StudySpecConvexAutomatedStoppingSpec.fromJson( core.Map json_) : this( - monitoringStats: (json_['monitoringStats'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1ModelMonitoringStatsAnomalies.fromJson( - value as core.Map)) - .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + learningRateParameterName: + json_['learningRateParameterName'] as core.String?, + maxStepCount: json_['maxStepCount'] as core.String?, + minMeasurementCount: json_['minMeasurementCount'] as core.String?, + minStepCount: json_['minStepCount'] as core.String?, + updateAllStoppedTrials: json_['updateAllStoppedTrials'] as core.bool?, + useElapsedDuration: json_['useElapsedDuration'] as core.bool?, ); core.Map toJson() => { - if (monitoringStats != null) 'monitoringStats': monitoringStats!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (learningRateParameterName != null) + 'learningRateParameterName': learningRateParameterName!, + if (maxStepCount != null) 'maxStepCount': maxStepCount!, + if (minMeasurementCount != null) + 'minMeasurementCount': minMeasurementCount!, + if (minStepCount != null) 'minStepCount': minStepCount!, + if (updateAllStoppedTrials != null) + 'updateAllStoppedTrials': updateAllStoppedTrials!, + if (useElapsedDuration != null) + 'useElapsedDuration': useElapsedDuration!, }; } -/// The request message for FeatureOnlineStoreService.SearchNearestEntities. -class GoogleCloudAiplatformV1SearchNearestEntitiesRequest { - /// The query. - /// - /// Required. - GoogleCloudAiplatformV1NearestNeighborQuery? query; - - /// If set to true, the full entities (including all vector values and - /// metadata) of the nearest neighbors are returned; otherwise only entity id - /// of the nearest neighbors will be returned. - /// - /// Note that returning full entities will significantly increase the latency - /// and cost of the query. +/// The decay curve automated stopping rule builds a Gaussian Process Regressor +/// to predict the final objective value of a Trial based on the already +/// completed Trials and the intermediate measurements of the current Trial. +/// +/// Early stopping is requested for the current Trial if there is very low +/// probability to exceed the optimal value found so far. +class GoogleCloudAiplatformV1StudySpecDecayCurveAutomatedStoppingSpec { + /// True if Measurement.elapsed_duration is used as the x-axis of each Trials + /// Decay Curve. /// - /// Optional. - core.bool? returnFullEntity; + /// Otherwise, Measurement.step_count will be used as the x-axis. + core.bool? useElapsedDuration; - GoogleCloudAiplatformV1SearchNearestEntitiesRequest({ - this.query, - this.returnFullEntity, + GoogleCloudAiplatformV1StudySpecDecayCurveAutomatedStoppingSpec({ + this.useElapsedDuration, }); - GoogleCloudAiplatformV1SearchNearestEntitiesRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1StudySpecDecayCurveAutomatedStoppingSpec.fromJson( + core.Map json_) : this( - query: json_.containsKey('query') - ? GoogleCloudAiplatformV1NearestNeighborQuery.fromJson( - json_['query'] as core.Map) - : null, - returnFullEntity: json_['returnFullEntity'] as core.bool?, + useElapsedDuration: json_['useElapsedDuration'] as core.bool?, ); core.Map toJson() => { - if (query != null) 'query': query!, - if (returnFullEntity != null) 'returnFullEntity': returnFullEntity!, + if (useElapsedDuration != null) + 'useElapsedDuration': useElapsedDuration!, }; } -/// Response message for FeatureOnlineStoreService.SearchNearestEntities -class GoogleCloudAiplatformV1SearchNearestEntitiesResponse { - /// The nearest neighbors of the query entity. - GoogleCloudAiplatformV1NearestNeighbors? nearestNeighbors; +/// The median automated stopping rule stops a pending Trial if the Trial's best +/// objective_value is strictly below the median 'performance' of all completed +/// Trials reported up to the Trial's last measurement. +/// +/// Currently, 'performance' refers to the running average of the objective +/// values reported by the Trial in each measurement. +class GoogleCloudAiplatformV1StudySpecMedianAutomatedStoppingSpec { + /// True if median automated stopping rule applies on + /// Measurement.elapsed_duration. + /// + /// It means that elapsed_duration field of latest measurement of current + /// Trial is used to compute median objective value for each completed Trials. + core.bool? useElapsedDuration; - GoogleCloudAiplatformV1SearchNearestEntitiesResponse({ - this.nearestNeighbors, + GoogleCloudAiplatformV1StudySpecMedianAutomatedStoppingSpec({ + this.useElapsedDuration, }); - GoogleCloudAiplatformV1SearchNearestEntitiesResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1StudySpecMedianAutomatedStoppingSpec.fromJson( + core.Map json_) : this( - nearestNeighbors: json_.containsKey('nearestNeighbors') - ? GoogleCloudAiplatformV1NearestNeighbors.fromJson( - json_['nearestNeighbors'] - as core.Map) - : null, + useElapsedDuration: json_['useElapsedDuration'] as core.bool?, ); core.Map toJson() => { - if (nearestNeighbors != null) 'nearestNeighbors': nearestNeighbors!, + if (useElapsedDuration != null) + 'useElapsedDuration': useElapsedDuration!, }; } -/// Segment of the content. -class GoogleCloudAiplatformV1Segment { - /// End index in the given Part, measured in bytes. - /// - /// Offset from the start of the Part, exclusive, starting at zero. - /// - /// Output only. - core.int? endIndex; - - /// The index of a Part object within its parent Content object. +/// Represents a metric to optimize. +class GoogleCloudAiplatformV1StudySpecMetricSpec { + /// The optimization goal of the metric. /// - /// Output only. - core.int? partIndex; + /// Required. + /// Possible string values are: + /// - "GOAL_TYPE_UNSPECIFIED" : Goal Type will default to maximize. + /// - "MAXIMIZE" : Maximize the goal metric. + /// - "MINIMIZE" : Minimize the goal metric. + core.String? goal; - /// Start index in the given Part, measured in bytes. + /// The ID of the metric. /// - /// Offset from the start of the Part, inclusive, starting at zero. + /// Must not contain whitespaces and must be unique amongst all MetricSpecs. /// - /// Output only. - core.int? startIndex; + /// Required. + core.String? metricId; - /// The text corresponding to the segment from the response. + /// Used for safe search. /// - /// Output only. - core.String? text; + /// In the case, the metric will be a safety metric. You must provide a + /// separate metric for objective metric. + GoogleCloudAiplatformV1StudySpecMetricSpecSafetyMetricConfig? safetyConfig; - GoogleCloudAiplatformV1Segment({ - this.endIndex, - this.partIndex, - this.startIndex, - this.text, + GoogleCloudAiplatformV1StudySpecMetricSpec({ + this.goal, + this.metricId, + this.safetyConfig, }); - GoogleCloudAiplatformV1Segment.fromJson(core.Map json_) + GoogleCloudAiplatformV1StudySpecMetricSpec.fromJson(core.Map json_) : this( - endIndex: json_['endIndex'] as core.int?, - partIndex: json_['partIndex'] as core.int?, - startIndex: json_['startIndex'] as core.int?, - text: json_['text'] as core.String?, + goal: json_['goal'] as core.String?, + metricId: json_['metricId'] as core.String?, + safetyConfig: json_.containsKey('safetyConfig') + ? GoogleCloudAiplatformV1StudySpecMetricSpecSafetyMetricConfig + .fromJson(json_['safetyConfig'] + as core.Map) + : null, ); core.Map toJson() => { - if (endIndex != null) 'endIndex': endIndex!, - if (partIndex != null) 'partIndex': partIndex!, - if (startIndex != null) 'startIndex': startIndex!, - if (text != null) 'text': text!, + if (goal != null) 'goal': goal!, + if (metricId != null) 'metricId': metricId!, + if (safetyConfig != null) 'safetyConfig': safetyConfig!, }; } -/// Configuration for the use of custom service account to run the workloads. -class GoogleCloudAiplatformV1ServiceAccountSpec { - /// If true, custom user-managed service account is enforced to run any - /// workloads (for example, Vertex Jobs) on the resource. - /// - /// Otherwise, uses the - /// [Vertex AI Custom Code Service Agent](https://cloud.google.com/vertex-ai/docs/general/access-control#service-agents). +/// Used in safe optimization to specify threshold levels and risk tolerance. +class GoogleCloudAiplatformV1StudySpecMetricSpecSafetyMetricConfig { + /// Desired minimum fraction of safe trials (over total number of trials) that + /// should be targeted by the algorithm at any time during the study (best + /// effort). /// - /// Required. - core.bool? enableCustomServiceAccount; + /// This should be between 0.0 and 1.0 and a value of 0.0 means that there is + /// no minimum and an algorithm proceeds without targeting any specific + /// fraction. A value of 1.0 means that the algorithm attempts to only Suggest + /// safe Trials. + core.double? desiredMinSafeTrialsFraction; - /// Required when all below conditions are met * - /// `enable_custom_service_account` is true; * any runtime is specified via - /// `ResourceRuntimeSpec` on creation time, for example, Ray The users must - /// have `iam.serviceAccounts.actAs` permission on this service account and - /// then the specified runtime containers will run as it. - /// - /// Do not set this field if you want to submit jobs using custom service - /// account to this PersistentResource after creation, but only specify the - /// `service_account` inside the job. + /// Safety threshold (boundary value between safe and unsafe). /// - /// Optional. - core.String? serviceAccount; + /// NOTE that if you leave SafetyMetricConfig unset, a default value of 0 will + /// be used. + core.double? safetyThreshold; - GoogleCloudAiplatformV1ServiceAccountSpec({ - this.enableCustomServiceAccount, - this.serviceAccount, + GoogleCloudAiplatformV1StudySpecMetricSpecSafetyMetricConfig({ + this.desiredMinSafeTrialsFraction, + this.safetyThreshold, }); - GoogleCloudAiplatformV1ServiceAccountSpec.fromJson(core.Map json_) + GoogleCloudAiplatformV1StudySpecMetricSpecSafetyMetricConfig.fromJson( + core.Map json_) : this( - enableCustomServiceAccount: - json_['enableCustomServiceAccount'] as core.bool?, - serviceAccount: json_['serviceAccount'] as core.String?, + desiredMinSafeTrialsFraction: + (json_['desiredMinSafeTrialsFraction'] as core.num?)?.toDouble(), + safetyThreshold: (json_['safetyThreshold'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (enableCustomServiceAccount != null) - 'enableCustomServiceAccount': enableCustomServiceAccount!, - if (serviceAccount != null) 'serviceAccount': serviceAccount!, + if (desiredMinSafeTrialsFraction != null) + 'desiredMinSafeTrialsFraction': desiredMinSafeTrialsFraction!, + if (safetyThreshold != null) 'safetyThreshold': safetyThreshold!, }; } -/// A set of Shielded Instance options. -/// -/// See -/// [Images using supported Shielded VM features](https://cloud.google.com/compute/docs/instances/modifying-shielded-vm). -class GoogleCloudAiplatformV1ShieldedVmConfig { - /// Defines whether the instance has - /// [Secure Boot](https://cloud.google.com/compute/shielded-vm/docs/shielded-vm#secure-boot) - /// enabled. +/// Represents a single parameter to optimize. +class GoogleCloudAiplatformV1StudySpecParameterSpec { + /// The value spec for a 'CATEGORICAL' parameter. + GoogleCloudAiplatformV1StudySpecParameterSpecCategoricalValueSpec? + categoricalValueSpec; + + /// A conditional parameter node is active if the parameter's value matches + /// the conditional node's parent_value_condition. /// - /// Secure Boot helps ensure that the system only runs authentic software by - /// verifying the digital signature of all boot components, and halting the - /// boot process if signature verification fails. - core.bool? enableSecureBoot; + /// If two items in conditional_parameter_specs have the same name, they must + /// have disjoint parent_value_condition. + core.List< + GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpec>? + conditionalParameterSpecs; - GoogleCloudAiplatformV1ShieldedVmConfig({ - this.enableSecureBoot, - }); + /// The value spec for a 'DISCRETE' parameter. + GoogleCloudAiplatformV1StudySpecParameterSpecDiscreteValueSpec? + discreteValueSpec; - GoogleCloudAiplatformV1ShieldedVmConfig.fromJson(core.Map json_) - : this( - enableSecureBoot: json_['enableSecureBoot'] as core.bool?, - ); + /// The value spec for a 'DOUBLE' parameter. + GoogleCloudAiplatformV1StudySpecParameterSpecDoubleValueSpec? doubleValueSpec; - core.Map toJson() => { - if (enableSecureBoot != null) 'enableSecureBoot': enableSecureBoot!, - }; -} + /// The value spec for an 'INTEGER' parameter. + GoogleCloudAiplatformV1StudySpecParameterSpecIntegerValueSpec? + integerValueSpec; -/// Config for SmoothGrad approximation of gradients. -/// -/// When enabled, the gradients are approximated by averaging the gradients from -/// noisy samples in the vicinity of the inputs. Adding noise can help improve -/// the computed gradients. Refer to this paper for more details: -/// https://arxiv.org/pdf/1706.03825.pdf -class GoogleCloudAiplatformV1SmoothGradConfig { - /// This is similar to noise_sigma, but provides additional flexibility. + /// The ID of the parameter. /// - /// A separate noise sigma can be provided for each feature, which is useful - /// if their distributions are different. No noise is added to features that - /// are not set. If this field is unset, noise_sigma will be used for all - /// features. - GoogleCloudAiplatformV1FeatureNoiseSigma? featureNoiseSigma; - - /// This is a single float value and will be used to add noise to all the - /// features. + /// Must not contain whitespaces and must be unique amongst all + /// ParameterSpecs. /// - /// Use this field when all features are normalized to have the same - /// distribution: scale to range \[0, 1\], \[-1, 1\] or z-scoring, where - /// features are normalized to have 0-mean and 1-variance. Learn more about - /// [normalization](https://developers.google.com/machine-learning/data-prep/transform/normalization). - /// For best results the recommended value is about 10% - 20% of the standard - /// deviation of the input feature. Refer to section 3.2 of the SmoothGrad - /// paper: https://arxiv.org/pdf/1706.03825.pdf. Defaults to 0.1. If the - /// distribution is different per feature, set feature_noise_sigma instead for - /// each feature. - core.double? noiseSigma; + /// Required. + core.String? parameterId; - /// The number of gradient samples to use for approximation. + /// How the parameter should be scaled. /// - /// The higher this number, the more accurate the gradient is, but the runtime - /// complexity increases by this factor as well. Valid range of its value is - /// \[1, 50\]. Defaults to 3. - core.int? noisySampleCount; + /// Leave unset for `CATEGORICAL` parameters. + /// Possible string values are: + /// - "SCALE_TYPE_UNSPECIFIED" : By default, no scaling is applied. + /// - "UNIT_LINEAR_SCALE" : Scales the feasible space to (0, 1) linearly. + /// - "UNIT_LOG_SCALE" : Scales the feasible space logarithmically to (0, 1). + /// The entire feasible space must be strictly positive. + /// - "UNIT_REVERSE_LOG_SCALE" : Scales the feasible space "reverse" + /// logarithmically to (0, 1). The result is that values close to the top of + /// the feasible space are spread out more than points near the bottom. The + /// entire feasible space must be strictly positive. + core.String? scaleType; - GoogleCloudAiplatformV1SmoothGradConfig({ - this.featureNoiseSigma, - this.noiseSigma, - this.noisySampleCount, + GoogleCloudAiplatformV1StudySpecParameterSpec({ + this.categoricalValueSpec, + this.conditionalParameterSpecs, + this.discreteValueSpec, + this.doubleValueSpec, + this.integerValueSpec, + this.parameterId, + this.scaleType, }); - GoogleCloudAiplatformV1SmoothGradConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1StudySpecParameterSpec.fromJson(core.Map json_) : this( - featureNoiseSigma: json_.containsKey('featureNoiseSigma') - ? GoogleCloudAiplatformV1FeatureNoiseSigma.fromJson( - json_['featureNoiseSigma'] + categoricalValueSpec: json_.containsKey('categoricalValueSpec') + ? GoogleCloudAiplatformV1StudySpecParameterSpecCategoricalValueSpec + .fromJson(json_['categoricalValueSpec'] as core.Map) : null, - noiseSigma: (json_['noiseSigma'] as core.num?)?.toDouble(), - noisySampleCount: json_['noisySampleCount'] as core.int?, + conditionalParameterSpecs: (json_['conditionalParameterSpecs'] + as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpec + .fromJson(value as core.Map)) + .toList(), + discreteValueSpec: json_.containsKey('discreteValueSpec') + ? GoogleCloudAiplatformV1StudySpecParameterSpecDiscreteValueSpec + .fromJson(json_['discreteValueSpec'] + as core.Map) + : null, + doubleValueSpec: json_.containsKey('doubleValueSpec') + ? GoogleCloudAiplatformV1StudySpecParameterSpecDoubleValueSpec + .fromJson(json_['doubleValueSpec'] + as core.Map) + : null, + integerValueSpec: json_.containsKey('integerValueSpec') + ? GoogleCloudAiplatformV1StudySpecParameterSpecIntegerValueSpec + .fromJson(json_['integerValueSpec'] + as core.Map) + : null, + parameterId: json_['parameterId'] as core.String?, + scaleType: json_['scaleType'] as core.String?, ); core.Map toJson() => { - if (featureNoiseSigma != null) 'featureNoiseSigma': featureNoiseSigma!, - if (noiseSigma != null) 'noiseSigma': noiseSigma!, - if (noisySampleCount != null) 'noisySampleCount': noisySampleCount!, + if (categoricalValueSpec != null) + 'categoricalValueSpec': categoricalValueSpec!, + if (conditionalParameterSpecs != null) + 'conditionalParameterSpecs': conditionalParameterSpecs!, + if (discreteValueSpec != null) 'discreteValueSpec': discreteValueSpec!, + if (doubleValueSpec != null) 'doubleValueSpec': doubleValueSpec!, + if (integerValueSpec != null) 'integerValueSpec': integerValueSpec!, + if (parameterId != null) 'parameterId': parameterId!, + if (scaleType != null) 'scaleType': scaleType!, }; } -/// SpecialistPool represents customers' own workforce to work on their data -/// labeling jobs. -/// -/// It includes a group of specialist managers and workers. Managers are -/// responsible for managing the workers in this pool as well as customers' data -/// labeling jobs associated with this pool. Customers create specialist pool as -/// well as start data labeling jobs on Cloud, managers and workers handle the -/// jobs using CrowdCompute console. -class GoogleCloudAiplatformV1SpecialistPool { - /// The user-defined name of the SpecialistPool. - /// - /// The name can be up to 128 characters long and can consist of any UTF-8 - /// characters. This field should be unique on project-level. +/// Value specification for a parameter in `CATEGORICAL` type. +class GoogleCloudAiplatformV1StudySpecParameterSpecCategoricalValueSpec { + /// A default value for a `CATEGORICAL` parameter that is assumed to be a + /// relatively good starting point. /// - /// Required. - core.String? displayName; + /// Unset value signals that there is no offered starting point. Currently + /// only supported by the Vertex AI Vizier service. Not supported by + /// HyperparameterTuningJob or TrainingPipeline. + core.String? defaultValue; - /// The resource name of the SpecialistPool. + /// The list of possible categories. /// /// Required. - core.String? name; + core.List? values; - /// The resource name of the pending data labeling jobs. - /// - /// Output only. - core.List? pendingDataLabelingJobs; + GoogleCloudAiplatformV1StudySpecParameterSpecCategoricalValueSpec({ + this.defaultValue, + this.values, + }); - /// The email addresses of the managers in the SpecialistPool. - core.List? specialistManagerEmails; + GoogleCloudAiplatformV1StudySpecParameterSpecCategoricalValueSpec.fromJson( + core.Map json_) + : this( + defaultValue: json_['defaultValue'] as core.String?, + values: (json_['values'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); - /// The number of managers in this SpecialistPool. + core.Map toJson() => { + if (defaultValue != null) 'defaultValue': defaultValue!, + if (values != null) 'values': values!, + }; +} + +/// Represents a parameter spec with condition from its parent parameter. +class GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpec { + /// The spec for a conditional parameter. /// - /// Output only. - core.int? specialistManagersCount; + /// Required. + GoogleCloudAiplatformV1StudySpecParameterSpec? parameterSpec; - /// The email addresses of workers in the SpecialistPool. - core.List? specialistWorkerEmails; + /// The spec for matching values from a parent parameter of `CATEGORICAL` + /// type. + GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecCategoricalValueCondition? + parentCategoricalValues; - GoogleCloudAiplatformV1SpecialistPool({ - this.displayName, - this.name, - this.pendingDataLabelingJobs, - this.specialistManagerEmails, - this.specialistManagersCount, - this.specialistWorkerEmails, + /// The spec for matching values from a parent parameter of `DISCRETE` type. + GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecDiscreteValueCondition? + parentDiscreteValues; + + /// The spec for matching values from a parent parameter of `INTEGER` type. + GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecIntValueCondition? + parentIntValues; + + GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpec({ + this.parameterSpec, + this.parentCategoricalValues, + this.parentDiscreteValues, + this.parentIntValues, }); - GoogleCloudAiplatformV1SpecialistPool.fromJson(core.Map json_) - : this( - displayName: json_['displayName'] as core.String?, - name: json_['name'] as core.String?, - pendingDataLabelingJobs: - (json_['pendingDataLabelingJobs'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - specialistManagerEmails: - (json_['specialistManagerEmails'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - specialistManagersCount: - json_['specialistManagersCount'] as core.int?, - specialistWorkerEmails: - (json_['specialistWorkerEmails'] as core.List?) - ?.map((value) => value as core.String) - .toList(), + GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpec.fromJson( + core.Map json_) + : this( + parameterSpec: json_.containsKey('parameterSpec') + ? GoogleCloudAiplatformV1StudySpecParameterSpec.fromJson( + json_['parameterSpec'] as core.Map) + : null, + parentCategoricalValues: json_.containsKey('parentCategoricalValues') + ? GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecCategoricalValueCondition + .fromJson(json_['parentCategoricalValues'] + as core.Map) + : null, + parentDiscreteValues: json_.containsKey('parentDiscreteValues') + ? GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecDiscreteValueCondition + .fromJson(json_['parentDiscreteValues'] + as core.Map) + : null, + parentIntValues: json_.containsKey('parentIntValues') + ? GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecIntValueCondition + .fromJson(json_['parentIntValues'] + as core.Map) + : null, ); core.Map toJson() => { - if (displayName != null) 'displayName': displayName!, - if (name != null) 'name': name!, - if (pendingDataLabelingJobs != null) - 'pendingDataLabelingJobs': pendingDataLabelingJobs!, - if (specialistManagerEmails != null) - 'specialistManagerEmails': specialistManagerEmails!, - if (specialistManagersCount != null) - 'specialistManagersCount': specialistManagersCount!, - if (specialistWorkerEmails != null) - 'specialistWorkerEmails': specialistWorkerEmails!, + if (parameterSpec != null) 'parameterSpec': parameterSpec!, + if (parentCategoricalValues != null) + 'parentCategoricalValues': parentCategoricalValues!, + if (parentDiscreteValues != null) + 'parentDiscreteValues': parentDiscreteValues!, + if (parentIntValues != null) 'parentIntValues': parentIntValues!, }; } -/// Request message for NotebookService.StartNotebookRuntime. -typedef GoogleCloudAiplatformV1StartNotebookRuntimeRequest = $Empty; - -/// Request message for VizierService.StopTrial. -typedef GoogleCloudAiplatformV1StopTrialRequest = $Empty; - -/// Assigns input data to the training, validation, and test sets so that the -/// distribution of values found in the categorical column (as specified by the -/// `key` field) is mirrored within each split. -/// -/// The fraction values determine the relative sizes of the splits. For example, -/// if the specified column has three values, with 50% of the rows having value -/// "A", 25% value "B", and 25% value "C", and the split fractions are specified -/// as 80/10/10, then the training set will constitute 80% of the training data, -/// with about 50% of the training set rows having the value "A" for the -/// specified column, about 25% having the value "B", and about 25% having the -/// value "C". Only the top 500 occurring values are used; any values not in the -/// top 500 values are randomly assigned to a split. If less than three rows -/// contain a specific value, those rows are randomly assigned. Supported only -/// for tabular Datasets. -class GoogleCloudAiplatformV1StratifiedSplit { - /// The key is a name of one of the Dataset's data columns. +/// Represents the spec to match categorical values from parent parameter. +class GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecCategoricalValueCondition { + /// Matches values of the parent parameter of 'CATEGORICAL' type. /// - /// The key provided must be for a categorical column. + /// All values must exist in `categorical_value_spec` of parent parameter. /// /// Required. - core.String? key; - - /// The fraction of the input data that is to be used to evaluate the Model. - core.double? testFraction; - - /// The fraction of the input data that is to be used to train the Model. - core.double? trainingFraction; - - /// The fraction of the input data that is to be used to validate the Model. - core.double? validationFraction; + core.List? values; - GoogleCloudAiplatformV1StratifiedSplit({ - this.key, - this.testFraction, - this.trainingFraction, - this.validationFraction, + GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecCategoricalValueCondition({ + this.values, }); - GoogleCloudAiplatformV1StratifiedSplit.fromJson(core.Map json_) + GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecCategoricalValueCondition.fromJson( + core.Map json_) : this( - key: json_['key'] as core.String?, - testFraction: (json_['testFraction'] as core.num?)?.toDouble(), - trainingFraction: - (json_['trainingFraction'] as core.num?)?.toDouble(), - validationFraction: - (json_['validationFraction'] as core.num?)?.toDouble(), + values: (json_['values'] as core.List?) + ?.map((value) => value as core.String) + .toList(), ); core.Map toJson() => { - if (key != null) 'key': key!, - if (testFraction != null) 'testFraction': testFraction!, - if (trainingFraction != null) 'trainingFraction': trainingFraction!, - if (validationFraction != null) - 'validationFraction': validationFraction!, + if (values != null) 'values': values!, }; } -/// Request message for PredictionService.StreamRawPredict. -class GoogleCloudAiplatformV1StreamRawPredictRequest { - /// The prediction input. +/// Represents the spec to match discrete values from parent parameter. +class GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecDiscreteValueCondition { + /// Matches values of the parent parameter of 'DISCRETE' type. /// - /// Supports HTTP headers and arbitrary data payload. - GoogleApiHttpBody? httpBody; + /// All values must exist in `discrete_value_spec` of parent parameter. The + /// Epsilon of the value matching is 1e-10. + /// + /// Required. + core.List? values; - GoogleCloudAiplatformV1StreamRawPredictRequest({ - this.httpBody, + GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecDiscreteValueCondition({ + this.values, }); - GoogleCloudAiplatformV1StreamRawPredictRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecDiscreteValueCondition.fromJson( + core.Map json_) : this( - httpBody: json_.containsKey('httpBody') - ? GoogleApiHttpBody.fromJson( - json_['httpBody'] as core.Map) - : null, + values: (json_['values'] as core.List?) + ?.map((value) => (value as core.num).toDouble()) + .toList(), ); core.Map toJson() => { - if (httpBody != null) 'httpBody': httpBody!, + if (values != null) 'values': values!, }; } -/// Request message for PredictionService.StreamingPredict. -/// -/// The first message must contain endpoint field and optionally input. The -/// subsequent messages must contain input. -class GoogleCloudAiplatformV1StreamingPredictRequest { - /// The prediction input. - core.List? inputs; - - /// The parameters that govern the prediction. - GoogleCloudAiplatformV1Tensor? parameters; +/// Represents the spec to match integer values from parent parameter. +class GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecIntValueCondition { + /// Matches values of the parent parameter of 'INTEGER' type. + /// + /// All values must lie in `integer_value_spec` of parent parameter. + /// + /// Required. + core.List? values; - GoogleCloudAiplatformV1StreamingPredictRequest({ - this.inputs, - this.parameters, + GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecIntValueCondition({ + this.values, }); - GoogleCloudAiplatformV1StreamingPredictRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecIntValueCondition.fromJson( + core.Map json_) : this( - inputs: (json_['inputs'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Tensor.fromJson( - value as core.Map)) + values: (json_['values'] as core.List?) + ?.map((value) => value as core.String) .toList(), - parameters: json_.containsKey('parameters') - ? GoogleCloudAiplatformV1Tensor.fromJson( - json_['parameters'] as core.Map) - : null, ); core.Map toJson() => { - if (inputs != null) 'inputs': inputs!, - if (parameters != null) 'parameters': parameters!, + if (values != null) 'values': values!, }; } -/// Response message for PredictionService.StreamingPredict. -class GoogleCloudAiplatformV1StreamingPredictResponse { - /// The prediction output. - core.List? outputs; +/// Value specification for a parameter in `DISCRETE` type. +class GoogleCloudAiplatformV1StudySpecParameterSpecDiscreteValueSpec { + /// A default value for a `DISCRETE` parameter that is assumed to be a + /// relatively good starting point. + /// + /// Unset value signals that there is no offered starting point. It + /// automatically rounds to the nearest feasible discrete point. Currently + /// only supported by the Vertex AI Vizier service. Not supported by + /// HyperparameterTuningJob or TrainingPipeline. + core.double? defaultValue; - /// The parameters that govern the prediction. - GoogleCloudAiplatformV1Tensor? parameters; + /// A list of possible values. + /// + /// The list should be in increasing order and at least 1e-10 apart. For + /// instance, this parameter might have possible settings of 1.5, 2.5, and + /// 4.0. This list should not contain more than 1,000 values. + /// + /// Required. + core.List? values; - GoogleCloudAiplatformV1StreamingPredictResponse({ - this.outputs, - this.parameters, + GoogleCloudAiplatformV1StudySpecParameterSpecDiscreteValueSpec({ + this.defaultValue, + this.values, }); - GoogleCloudAiplatformV1StreamingPredictResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1StudySpecParameterSpecDiscreteValueSpec.fromJson( + core.Map json_) : this( - outputs: (json_['outputs'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Tensor.fromJson( - value as core.Map)) + defaultValue: (json_['defaultValue'] as core.num?)?.toDouble(), + values: (json_['values'] as core.List?) + ?.map((value) => (value as core.num).toDouble()) .toList(), - parameters: json_.containsKey('parameters') - ? GoogleCloudAiplatformV1Tensor.fromJson( - json_['parameters'] as core.Map) - : null, ); core.Map toJson() => { - if (outputs != null) 'outputs': outputs!, - if (parameters != null) 'parameters': parameters!, + if (defaultValue != null) 'defaultValue': defaultValue!, + if (values != null) 'values': values!, }; } -/// Request message for -/// FeaturestoreOnlineServingService.StreamingFeatureValuesRead. -class GoogleCloudAiplatformV1StreamingReadFeatureValuesRequest { - /// IDs of entities to read Feature values of. +/// Value specification for a parameter in `DOUBLE` type. +class GoogleCloudAiplatformV1StudySpecParameterSpecDoubleValueSpec { + /// A default value for a `DOUBLE` parameter that is assumed to be a + /// relatively good starting point. /// - /// The maximum number of IDs is 100. For example, for a machine learning - /// model predicting user clicks on a website, an entity ID could be - /// `user_123`. + /// Unset value signals that there is no offered starting point. Currently + /// only supported by the Vertex AI Vizier service. Not supported by + /// HyperparameterTuningJob or TrainingPipeline. + core.double? defaultValue; + + /// Inclusive maximum value of the parameter. /// /// Required. - core.List? entityIds; + core.double? maxValue; - /// Selector choosing Features of the target EntityType. - /// - /// Feature IDs will be deduplicated. + /// Inclusive minimum value of the parameter. /// /// Required. - GoogleCloudAiplatformV1FeatureSelector? featureSelector; + core.double? minValue; - GoogleCloudAiplatformV1StreamingReadFeatureValuesRequest({ - this.entityIds, - this.featureSelector, + GoogleCloudAiplatformV1StudySpecParameterSpecDoubleValueSpec({ + this.defaultValue, + this.maxValue, + this.minValue, }); - GoogleCloudAiplatformV1StreamingReadFeatureValuesRequest.fromJson( + GoogleCloudAiplatformV1StudySpecParameterSpecDoubleValueSpec.fromJson( core.Map json_) : this( - entityIds: (json_['entityIds'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - featureSelector: json_.containsKey('featureSelector') - ? GoogleCloudAiplatformV1FeatureSelector.fromJson( - json_['featureSelector'] - as core.Map) - : null, + defaultValue: (json_['defaultValue'] as core.num?)?.toDouble(), + maxValue: (json_['maxValue'] as core.num?)?.toDouble(), + minValue: (json_['minValue'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (entityIds != null) 'entityIds': entityIds!, - if (featureSelector != null) 'featureSelector': featureSelector!, + if (defaultValue != null) 'defaultValue': defaultValue!, + if (maxValue != null) 'maxValue': maxValue!, + if (minValue != null) 'minValue': minValue!, }; } -/// A list of string values. -class GoogleCloudAiplatformV1StringArray { - /// A list of string values. - core.List? values; +/// Value specification for a parameter in `INTEGER` type. +class GoogleCloudAiplatformV1StudySpecParameterSpecIntegerValueSpec { + /// A default value for an `INTEGER` parameter that is assumed to be a + /// relatively good starting point. + /// + /// Unset value signals that there is no offered starting point. Currently + /// only supported by the Vertex AI Vizier service. Not supported by + /// HyperparameterTuningJob or TrainingPipeline. + core.String? defaultValue; - GoogleCloudAiplatformV1StringArray({ - this.values, + /// Inclusive maximum value of the parameter. + /// + /// Required. + core.String? maxValue; + + /// Inclusive minimum value of the parameter. + /// + /// Required. + core.String? minValue; + + GoogleCloudAiplatformV1StudySpecParameterSpecIntegerValueSpec({ + this.defaultValue, + this.maxValue, + this.minValue, }); - GoogleCloudAiplatformV1StringArray.fromJson(core.Map json_) + GoogleCloudAiplatformV1StudySpecParameterSpecIntegerValueSpec.fromJson( + core.Map json_) : this( - values: (json_['values'] as core.List?) - ?.map((value) => value as core.String) - .toList(), + defaultValue: json_['defaultValue'] as core.String?, + maxValue: json_['maxValue'] as core.String?, + minValue: json_['minValue'] as core.String?, ); core.Map toJson() => { - if (values != null) 'values': values!, + if (defaultValue != null) 'defaultValue': defaultValue!, + if (maxValue != null) 'maxValue': maxValue!, + if (minValue != null) 'minValue': minValue!, }; } -/// One field of a Struct (or object) type feature value. -class GoogleCloudAiplatformV1StructFieldValue { - /// Name of the field in the struct feature. - core.String? name; +/// The configuration (stopping conditions) for automated stopping of a Study. +/// +/// Conditions include trial budgets, time budgets, and convergence detection. +class GoogleCloudAiplatformV1StudySpecStudyStoppingConfig { + /// If the objective value has not improved for this much time, stop the + /// study. + /// + /// WARNING: Effective only for single-objective studies. + core.String? maxDurationNoProgress; - /// The value for this field. - GoogleCloudAiplatformV1FeatureValue? value; + /// If there are more than this many trials, stop the study. + core.int? maxNumTrials; - GoogleCloudAiplatformV1StructFieldValue({ - this.name, - this.value, + /// If the objective value has not improved for this many consecutive trials, + /// stop the study. + /// + /// WARNING: Effective only for single-objective studies. + core.int? maxNumTrialsNoProgress; + + /// If the specified time or duration has passed, stop the study. + GoogleCloudAiplatformV1StudyTimeConstraint? maximumRuntimeConstraint; + + /// If there are fewer than this many COMPLETED trials, do not stop the study. + core.int? minNumTrials; + + /// Each "stopping rule" in this proto specifies an "if" condition. + /// + /// Before Vizier would generate a new suggestion, it first checks each + /// specified stopping rule, from top to bottom in this list. Note that the + /// first few rules (e.g. minimum_runtime_constraint, min_num_trials) will + /// prevent other stopping rules from being evaluated until they are met. For + /// example, setting `min_num_trials=5` and `always_stop_after= 1 hour` means + /// that the Study will ONLY stop after it has 5 COMPLETED trials, even if + /// more than an hour has passed since its creation. It follows the first + /// applicable rule (whose "if" condition is satisfied) to make a stopping + /// decision. If none of the specified rules are applicable, then Vizier + /// decides that the study should not stop. If Vizier decides that the study + /// should stop, the study enters STOPPING state (or STOPPING_ASAP if + /// should_stop_asap = true). IMPORTANT: The automatic study state transition + /// happens precisely as described above; that is, deleting trials or updating + /// StudyConfig NEVER automatically moves the study state back to ACTIVE. If + /// you want to _resume_ a Study that was stopped, 1) change the stopping + /// conditions if necessary, 2) activate the study, and then 3) ask for + /// suggestions. If the specified time or duration has not passed, do not stop + /// the study. + GoogleCloudAiplatformV1StudyTimeConstraint? minimumRuntimeConstraint; + + /// If true, a Study enters STOPPING_ASAP whenever it would normally enters + /// STOPPING state. + /// + /// The bottom line is: set to true if you want to interrupt on-going + /// evaluations of Trials as soon as the study stopping condition is met. + /// (Please see Study.State documentation for the source of truth). + core.bool? shouldStopAsap; + + GoogleCloudAiplatformV1StudySpecStudyStoppingConfig({ + this.maxDurationNoProgress, + this.maxNumTrials, + this.maxNumTrialsNoProgress, + this.maximumRuntimeConstraint, + this.minNumTrials, + this.minimumRuntimeConstraint, + this.shouldStopAsap, }); - GoogleCloudAiplatformV1StructFieldValue.fromJson(core.Map json_) + GoogleCloudAiplatformV1StudySpecStudyStoppingConfig.fromJson(core.Map json_) : this( - name: json_['name'] as core.String?, - value: json_.containsKey('value') - ? GoogleCloudAiplatformV1FeatureValue.fromJson( - json_['value'] as core.Map) - : null, + maxDurationNoProgress: json_['maxDurationNoProgress'] as core.String?, + maxNumTrials: json_['maxNumTrials'] as core.int?, + maxNumTrialsNoProgress: json_['maxNumTrialsNoProgress'] as core.int?, + maximumRuntimeConstraint: + json_.containsKey('maximumRuntimeConstraint') + ? GoogleCloudAiplatformV1StudyTimeConstraint.fromJson( + json_['maximumRuntimeConstraint'] + as core.Map) + : null, + minNumTrials: json_['minNumTrials'] as core.int?, + minimumRuntimeConstraint: + json_.containsKey('minimumRuntimeConstraint') + ? GoogleCloudAiplatformV1StudyTimeConstraint.fromJson( + json_['minimumRuntimeConstraint'] + as core.Map) + : null, + shouldStopAsap: json_['shouldStopAsap'] as core.bool?, ); core.Map toJson() => { - if (name != null) 'name': name!, - if (value != null) 'value': value!, + if (maxDurationNoProgress != null) + 'maxDurationNoProgress': maxDurationNoProgress!, + if (maxNumTrials != null) 'maxNumTrials': maxNumTrials!, + if (maxNumTrialsNoProgress != null) + 'maxNumTrialsNoProgress': maxNumTrialsNoProgress!, + if (maximumRuntimeConstraint != null) + 'maximumRuntimeConstraint': maximumRuntimeConstraint!, + if (minNumTrials != null) 'minNumTrials': minNumTrials!, + if (minimumRuntimeConstraint != null) + 'minimumRuntimeConstraint': minimumRuntimeConstraint!, + if (shouldStopAsap != null) 'shouldStopAsap': shouldStopAsap!, }; } -/// Struct (or object) type feature value. -class GoogleCloudAiplatformV1StructValue { - /// A list of field values. - core.List? values; +/// Time-based Constraint for Study +class GoogleCloudAiplatformV1StudyTimeConstraint { + /// Compares the wallclock time to this time. + /// + /// Must use UTC timezone. + core.String? endTime; - GoogleCloudAiplatformV1StructValue({ - this.values, + /// Counts the wallclock time passed since the creation of this Study. + core.String? maxDuration; + + GoogleCloudAiplatformV1StudyTimeConstraint({ + this.endTime, + this.maxDuration, }); - GoogleCloudAiplatformV1StructValue.fromJson(core.Map json_) + GoogleCloudAiplatformV1StudyTimeConstraint.fromJson(core.Map json_) : this( - values: (json_['values'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1StructFieldValue.fromJson( - value as core.Map)) - .toList(), + endTime: json_['endTime'] as core.String?, + maxDuration: json_['maxDuration'] as core.String?, ); core.Map toJson() => { - if (values != null) 'values': values!, + if (endTime != null) 'endTime': endTime!, + if (maxDuration != null) 'maxDuration': maxDuration!, }; } -/// A message representing a Study. -class GoogleCloudAiplatformV1Study { - /// Time at which the study was created. +/// Request message for VizierService.SuggestTrials. +class GoogleCloudAiplatformV1SuggestTrialsRequest { + /// The identifier of the client that is requesting the suggestion. /// - /// Output only. - core.String? createTime; - - /// Describes the Study, default value is empty string. + /// If multiple SuggestTrialsRequests have the same `client_id`, the service + /// will return the identical suggested Trial if the Trial is pending, and + /// provide a new Trial if the last suggested Trial was completed. /// /// Required. - core.String? displayName; - - /// A human readable reason why the Study is inactive. - /// - /// This should be empty if a study is ACTIVE or COMPLETED. - /// - /// Output only. - core.String? inactiveReason; + core.String? clientId; - /// The name of a study. - /// - /// The study's globally unique identifier. Format: - /// `projects/{project}/locations/{location}/studies/{study}` + /// This allows you to specify the "context" for a Trial; a context is a slice + /// (a subspace) of the search space. /// - /// Output only. - core.String? name; - - /// The detailed state of a Study. + /// Typical uses for contexts: 1) You are using Vizier to tune a server for + /// best performance, but there's a strong weekly cycle. The context specifies + /// the day-of-week. This allows Tuesday to generalize from Wednesday without + /// assuming that everything is identical. 2) Imagine you're optimizing some + /// medical treatment for people. As they walk in the door, you know certain + /// facts about them (e.g. sex, weight, height, blood-pressure). Put that + /// information in the context, and Vizier will adapt its suggestions to the + /// patient. 3) You want to do a fair A/B test efficiently. Specify the "A" + /// and "B" conditions as contexts, and Vizier will generalize between "A" and + /// "B" conditions. If they are similar, this will allow Vizier to converge to + /// the optimum faster than if "A" and "B" were separate Studies. NOTE: You + /// can also enter contexts as REQUESTED Trials, e.g. via the CreateTrial() + /// RPC; that's the asynchronous option where you don't need a close + /// association between contexts and suggestions. NOTE: All the Parameters you + /// set in a context MUST be defined in the Study. NOTE: You must supply 0 or + /// $suggestion_count contexts. If you don't supply any contexts, Vizier will + /// make suggestions from the full search space specified in the StudySpec; if + /// you supply a full set of context, each suggestion will match the + /// corresponding context. NOTE: A Context with no features set matches + /// anything, and allows suggestions from the full search space. NOTE: + /// Contexts MUST lie within the search space specified in the StudySpec. It's + /// an error if they don't. NOTE: Contexts preferentially match ACTIVE then + /// REQUESTED trials before new suggestions are generated. NOTE: Generation of + /// suggestions involves a match between a Context and (optionally) a + /// REQUESTED trial; if that match is not fully specified, a suggestion will + /// be geneated in the merged subspace. /// - /// Output only. - /// Possible string values are: - /// - "STATE_UNSPECIFIED" : The study state is unspecified. - /// - "ACTIVE" : The study is active. - /// - "INACTIVE" : The study is stopped due to an internal error. - /// - "COMPLETED" : The study is done when the service exhausts the parameter - /// search space or max_trial_count is reached. - core.String? state; + /// Optional. + core.List? contexts; - /// Configuration of the Study. + /// The number of suggestions requested. + /// + /// It must be positive. /// /// Required. - GoogleCloudAiplatformV1StudySpec? studySpec; + core.int? suggestionCount; - GoogleCloudAiplatformV1Study({ - this.createTime, - this.displayName, - this.inactiveReason, - this.name, - this.state, - this.studySpec, + GoogleCloudAiplatformV1SuggestTrialsRequest({ + this.clientId, + this.contexts, + this.suggestionCount, }); - GoogleCloudAiplatformV1Study.fromJson(core.Map json_) + GoogleCloudAiplatformV1SuggestTrialsRequest.fromJson(core.Map json_) : this( - createTime: json_['createTime'] as core.String?, - displayName: json_['displayName'] as core.String?, - inactiveReason: json_['inactiveReason'] as core.String?, - name: json_['name'] as core.String?, - state: json_['state'] as core.String?, - studySpec: json_.containsKey('studySpec') - ? GoogleCloudAiplatformV1StudySpec.fromJson( - json_['studySpec'] as core.Map) - : null, + clientId: json_['clientId'] as core.String?, + contexts: (json_['contexts'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1TrialContext.fromJson( + value as core.Map)) + .toList(), + suggestionCount: json_['suggestionCount'] as core.int?, ); core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (displayName != null) 'displayName': displayName!, - if (inactiveReason != null) 'inactiveReason': inactiveReason!, - if (name != null) 'name': name!, - if (state != null) 'state': state!, - if (studySpec != null) 'studySpec': studySpec!, + if (clientId != null) 'clientId': clientId!, + if (contexts != null) 'contexts': contexts!, + if (suggestionCount != null) 'suggestionCount': suggestionCount!, }; } -/// Represents specification of a Study. -class GoogleCloudAiplatformV1StudySpec { - /// The search algorithm specified for the Study. - /// Possible string values are: - /// - "ALGORITHM_UNSPECIFIED" : The default algorithm used by Vertex AI for - /// [hyperparameter tuning](https://cloud.google.com/vertex-ai/docs/training/hyperparameter-tuning-overview) - /// and [Vertex AI Vizier](https://cloud.google.com/vertex-ai/docs/vizier). - /// - "GRID_SEARCH" : Simple grid search within the feasible space. To use - /// grid search, all parameters must be `INTEGER`, `CATEGORICAL`, or - /// `DISCRETE`. - /// - "RANDOM_SEARCH" : Simple random search within the feasible space. - core.String? algorithm; - - /// The automated early stopping spec using convex stopping rule. - GoogleCloudAiplatformV1StudySpecConvexAutomatedStoppingSpec? - convexAutomatedStoppingSpec; - - /// The automated early stopping spec using decay curve rule. - GoogleCloudAiplatformV1StudySpecDecayCurveAutomatedStoppingSpec? - decayCurveStoppingSpec; - - /// Describe which measurement selection type will be used - /// Possible string values are: - /// - "MEASUREMENT_SELECTION_TYPE_UNSPECIFIED" : Will be treated as - /// LAST_MEASUREMENT. - /// - "LAST_MEASUREMENT" : Use the last measurement reported. - /// - "BEST_MEASUREMENT" : Use the best measurement reported. - core.String? measurementSelectionType; - - /// The automated early stopping spec using median rule. - GoogleCloudAiplatformV1StudySpecMedianAutomatedStoppingSpec? - medianAutomatedStoppingSpec; - - /// Metric specs for the Study. +/// Input for summarization helpfulness metric. +class GoogleCloudAiplatformV1SummarizationHelpfulnessInput { + /// Summarization helpfulness instance. /// /// Required. - core.List? metrics; - - /// The observation noise level of the study. - /// - /// Currently only supported by the Vertex AI Vizier service. Not supported by - /// HyperparameterTuningJob or TrainingPipeline. - /// Possible string values are: - /// - "OBSERVATION_NOISE_UNSPECIFIED" : The default noise level chosen by - /// Vertex AI. - /// - "LOW" : Vertex AI assumes that the objective function is (nearly) - /// perfectly reproducible, and will never repeat the same Trial parameters. - /// - "HIGH" : Vertex AI will estimate the amount of noise in metric - /// evaluations, it may repeat the same Trial parameters more than once. - core.String? observationNoise; + GoogleCloudAiplatformV1SummarizationHelpfulnessInstance? instance; - /// The set of parameters to tune. + /// Spec for summarization helpfulness score metric. /// /// Required. - core.List? parameters; - - /// Conditions for automated stopping of a Study. - /// - /// Enable automated stopping by configuring at least one condition. - GoogleCloudAiplatformV1StudySpecStudyStoppingConfig? studyStoppingConfig; + GoogleCloudAiplatformV1SummarizationHelpfulnessSpec? metricSpec; - GoogleCloudAiplatformV1StudySpec({ - this.algorithm, - this.convexAutomatedStoppingSpec, - this.decayCurveStoppingSpec, - this.measurementSelectionType, - this.medianAutomatedStoppingSpec, - this.metrics, - this.observationNoise, - this.parameters, - this.studyStoppingConfig, + GoogleCloudAiplatformV1SummarizationHelpfulnessInput({ + this.instance, + this.metricSpec, }); - GoogleCloudAiplatformV1StudySpec.fromJson(core.Map json_) + GoogleCloudAiplatformV1SummarizationHelpfulnessInput.fromJson(core.Map json_) : this( - algorithm: json_['algorithm'] as core.String?, - convexAutomatedStoppingSpec: - json_.containsKey('convexAutomatedStoppingSpec') - ? GoogleCloudAiplatformV1StudySpecConvexAutomatedStoppingSpec - .fromJson(json_['convexAutomatedStoppingSpec'] - as core.Map) - : null, - decayCurveStoppingSpec: json_.containsKey('decayCurveStoppingSpec') - ? GoogleCloudAiplatformV1StudySpecDecayCurveAutomatedStoppingSpec - .fromJson(json_['decayCurveStoppingSpec'] - as core.Map) + instance: json_.containsKey('instance') + ? GoogleCloudAiplatformV1SummarizationHelpfulnessInstance + .fromJson( + json_['instance'] as core.Map) : null, - measurementSelectionType: - json_['measurementSelectionType'] as core.String?, - medianAutomatedStoppingSpec: - json_.containsKey('medianAutomatedStoppingSpec') - ? GoogleCloudAiplatformV1StudySpecMedianAutomatedStoppingSpec - .fromJson(json_['medianAutomatedStoppingSpec'] - as core.Map) - : null, - metrics: (json_['metrics'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1StudySpecMetricSpec.fromJson( - value as core.Map)) - .toList(), - observationNoise: json_['observationNoise'] as core.String?, - parameters: (json_['parameters'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1StudySpecParameterSpec.fromJson( - value as core.Map)) - .toList(), - studyStoppingConfig: json_.containsKey('studyStoppingConfig') - ? GoogleCloudAiplatformV1StudySpecStudyStoppingConfig.fromJson( - json_['studyStoppingConfig'] - as core.Map) + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1SummarizationHelpfulnessSpec.fromJson( + json_['metricSpec'] as core.Map) : null, ); core.Map toJson() => { - if (algorithm != null) 'algorithm': algorithm!, - if (convexAutomatedStoppingSpec != null) - 'convexAutomatedStoppingSpec': convexAutomatedStoppingSpec!, - if (decayCurveStoppingSpec != null) - 'decayCurveStoppingSpec': decayCurveStoppingSpec!, - if (measurementSelectionType != null) - 'measurementSelectionType': measurementSelectionType!, - if (medianAutomatedStoppingSpec != null) - 'medianAutomatedStoppingSpec': medianAutomatedStoppingSpec!, - if (metrics != null) 'metrics': metrics!, - if (observationNoise != null) 'observationNoise': observationNoise!, - if (parameters != null) 'parameters': parameters!, - if (studyStoppingConfig != null) - 'studyStoppingConfig': studyStoppingConfig!, + if (instance != null) 'instance': instance!, + if (metricSpec != null) 'metricSpec': metricSpec!, }; } -/// Configuration for ConvexAutomatedStoppingSpec. -/// -/// When there are enough completed trials (configured by -/// min_measurement_count), for pending trials with enough measurements and -/// steps, the policy first computes an overestimate of the objective value at -/// max_num_steps according to the slope of the incomplete objective value -/// curve. No prediction can be made if the curve is completely flat. If the -/// overestimation is worse than the best objective value of the completed -/// trials, this pending trial will be early-stopped, but a last measurement -/// will be added to the pending trial with max_num_steps and predicted -/// objective value from the autoregression model. -class GoogleCloudAiplatformV1StudySpecConvexAutomatedStoppingSpec { - /// The hyper-parameter name used in the tuning job that stands for learning - /// rate. - /// - /// Leave it blank if learning rate is not in a parameter in tuning. The - /// learning_rate is used to estimate the objective value of the ongoing - /// trial. - core.String? learningRateParameterName; - - /// Steps used in predicting the final objective for early stopped trials. - /// - /// In general, it's set to be the same as the defined steps in training / - /// tuning. If not defined, it will learn it from the completed trials. When - /// use_steps is false, this field is set to the maximum elapsed seconds. - core.String? maxStepCount; - - /// The minimal number of measurements in a Trial. - /// - /// Early-stopping checks will not trigger if less than - /// min_measurement_count+1 completed trials or pending trials with less than - /// min_measurement_count measurements. If not defined, the default value is - /// 5. - core.String? minMeasurementCount; +/// Spec for summarization helpfulness instance. +typedef GoogleCloudAiplatformV1SummarizationHelpfulnessInstance = $Instance04; - /// Minimum number of steps for a trial to complete. +/// Spec for summarization helpfulness result. +class GoogleCloudAiplatformV1SummarizationHelpfulnessResult { + /// Confidence for summarization helpfulness score. /// - /// Trials which do not have a measurement with step_count \> min_step_count - /// won't be considered for early stopping. It's ok to set it to 0, and a - /// trial can be early stopped at any stage. By default, min_step_count is set - /// to be one-tenth of the max_step_count. When use_elapsed_duration is true, - /// this field is set to the minimum elapsed seconds. - core.String? minStepCount; + /// Output only. + core.double? confidence; - /// ConvexAutomatedStoppingSpec by default only updates the trials that needs - /// to be early stopped using a newly trained auto-regressive model. + /// Explanation for summarization helpfulness score. /// - /// When this flag is set to True, all stopped trials from the beginning are - /// potentially updated in terms of their `final_measurement`. Also, note that - /// the training logic of autoregressive models is different in this case. - /// Enabling this option has shown better results and this may be the default - /// option in the future. - core.bool? updateAllStoppedTrials; + /// Output only. + core.String? explanation; - /// This bool determines whether or not the rule is applied based on - /// elapsed_secs or steps. + /// Summarization Helpfulness score. /// - /// If use_elapsed_duration==false, the early stopping decision is made - /// according to the predicted objective values according to the target steps. - /// If use_elapsed_duration==true, elapsed_secs is used instead of steps. - /// Also, in this case, the parameters max_num_steps and min_num_steps are - /// overloaded to contain max_elapsed_seconds and min_elapsed_seconds. - core.bool? useElapsedDuration; + /// Output only. + core.double? score; - GoogleCloudAiplatformV1StudySpecConvexAutomatedStoppingSpec({ - this.learningRateParameterName, - this.maxStepCount, - this.minMeasurementCount, - this.minStepCount, - this.updateAllStoppedTrials, - this.useElapsedDuration, + GoogleCloudAiplatformV1SummarizationHelpfulnessResult({ + this.confidence, + this.explanation, + this.score, }); - GoogleCloudAiplatformV1StudySpecConvexAutomatedStoppingSpec.fromJson( - core.Map json_) + GoogleCloudAiplatformV1SummarizationHelpfulnessResult.fromJson(core.Map json_) : this( - learningRateParameterName: - json_['learningRateParameterName'] as core.String?, - maxStepCount: json_['maxStepCount'] as core.String?, - minMeasurementCount: json_['minMeasurementCount'] as core.String?, - minStepCount: json_['minStepCount'] as core.String?, - updateAllStoppedTrials: json_['updateAllStoppedTrials'] as core.bool?, - useElapsedDuration: json_['useElapsedDuration'] as core.bool?, + confidence: (json_['confidence'] as core.num?)?.toDouble(), + explanation: json_['explanation'] as core.String?, + score: (json_['score'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (learningRateParameterName != null) - 'learningRateParameterName': learningRateParameterName!, - if (maxStepCount != null) 'maxStepCount': maxStepCount!, - if (minMeasurementCount != null) - 'minMeasurementCount': minMeasurementCount!, - if (minStepCount != null) 'minStepCount': minStepCount!, - if (updateAllStoppedTrials != null) - 'updateAllStoppedTrials': updateAllStoppedTrials!, - if (useElapsedDuration != null) - 'useElapsedDuration': useElapsedDuration!, + if (confidence != null) 'confidence': confidence!, + if (explanation != null) 'explanation': explanation!, + if (score != null) 'score': score!, }; } -/// The decay curve automated stopping rule builds a Gaussian Process Regressor -/// to predict the final objective value of a Trial based on the already -/// completed Trials and the intermediate measurements of the current Trial. -/// -/// Early stopping is requested for the current Trial if there is very low -/// probability to exceed the optimal value found so far. -class GoogleCloudAiplatformV1StudySpecDecayCurveAutomatedStoppingSpec { - /// True if Measurement.elapsed_duration is used as the x-axis of each Trials - /// Decay Curve. +/// Spec for summarization helpfulness score metric. +class GoogleCloudAiplatformV1SummarizationHelpfulnessSpec { + /// Whether to use instance.reference to compute summarization helpfulness. /// - /// Otherwise, Measurement.step_count will be used as the x-axis. - core.bool? useElapsedDuration; + /// Optional. + core.bool? useReference; - GoogleCloudAiplatformV1StudySpecDecayCurveAutomatedStoppingSpec({ - this.useElapsedDuration, + /// Which version to use for evaluation. + /// + /// Optional. + core.int? version; + + GoogleCloudAiplatformV1SummarizationHelpfulnessSpec({ + this.useReference, + this.version, }); - GoogleCloudAiplatformV1StudySpecDecayCurveAutomatedStoppingSpec.fromJson( - core.Map json_) + GoogleCloudAiplatformV1SummarizationHelpfulnessSpec.fromJson(core.Map json_) : this( - useElapsedDuration: json_['useElapsedDuration'] as core.bool?, + useReference: json_['useReference'] as core.bool?, + version: json_['version'] as core.int?, ); core.Map toJson() => { - if (useElapsedDuration != null) - 'useElapsedDuration': useElapsedDuration!, + if (useReference != null) 'useReference': useReference!, + if (version != null) 'version': version!, }; } -/// The median automated stopping rule stops a pending Trial if the Trial's best -/// objective_value is strictly below the median 'performance' of all completed -/// Trials reported up to the Trial's last measurement. -/// -/// Currently, 'performance' refers to the running average of the objective -/// values reported by the Trial in each measurement. -class GoogleCloudAiplatformV1StudySpecMedianAutomatedStoppingSpec { - /// True if median automated stopping rule applies on - /// Measurement.elapsed_duration. +/// Input for summarization quality metric. +class GoogleCloudAiplatformV1SummarizationQualityInput { + /// Summarization quality instance. /// - /// It means that elapsed_duration field of latest measurement of current - /// Trial is used to compute median objective value for each completed Trials. - core.bool? useElapsedDuration; + /// Required. + GoogleCloudAiplatformV1SummarizationQualityInstance? instance; - GoogleCloudAiplatformV1StudySpecMedianAutomatedStoppingSpec({ - this.useElapsedDuration, + /// Spec for summarization quality score metric. + /// + /// Required. + GoogleCloudAiplatformV1SummarizationQualitySpec? metricSpec; + + GoogleCloudAiplatformV1SummarizationQualityInput({ + this.instance, + this.metricSpec, }); - GoogleCloudAiplatformV1StudySpecMedianAutomatedStoppingSpec.fromJson( - core.Map json_) + GoogleCloudAiplatformV1SummarizationQualityInput.fromJson(core.Map json_) : this( - useElapsedDuration: json_['useElapsedDuration'] as core.bool?, + instance: json_.containsKey('instance') + ? GoogleCloudAiplatformV1SummarizationQualityInstance.fromJson( + json_['instance'] as core.Map) + : null, + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1SummarizationQualitySpec.fromJson( + json_['metricSpec'] as core.Map) + : null, ); core.Map toJson() => { - if (useElapsedDuration != null) - 'useElapsedDuration': useElapsedDuration!, + if (instance != null) 'instance': instance!, + if (metricSpec != null) 'metricSpec': metricSpec!, }; } -/// Represents a metric to optimize. -class GoogleCloudAiplatformV1StudySpecMetricSpec { - /// The optimization goal of the metric. +/// Spec for summarization quality instance. +class GoogleCloudAiplatformV1SummarizationQualityInstance { + /// Text to be summarized. /// /// Required. - /// Possible string values are: - /// - "GOAL_TYPE_UNSPECIFIED" : Goal Type will default to maximize. - /// - "MAXIMIZE" : Maximize the goal metric. - /// - "MINIMIZE" : Minimize the goal metric. - core.String? goal; + core.String? context; - /// The ID of the metric. + /// Summarization prompt for LLM. /// - /// Must not contain whitespaces and must be unique amongst all MetricSpecs. + /// Required. + core.String? instruction; + + /// Output of the evaluated model. /// /// Required. - core.String? metricId; + core.String? prediction; - /// Used for safe search. + /// Ground truth used to compare against the prediction. /// - /// In the case, the metric will be a safety metric. You must provide a - /// separate metric for objective metric. - GoogleCloudAiplatformV1StudySpecMetricSpecSafetyMetricConfig? safetyConfig; + /// Optional. + core.String? reference; - GoogleCloudAiplatformV1StudySpecMetricSpec({ - this.goal, - this.metricId, - this.safetyConfig, + GoogleCloudAiplatformV1SummarizationQualityInstance({ + this.context, + this.instruction, + this.prediction, + this.reference, }); - GoogleCloudAiplatformV1StudySpecMetricSpec.fromJson(core.Map json_) + GoogleCloudAiplatformV1SummarizationQualityInstance.fromJson(core.Map json_) : this( - goal: json_['goal'] as core.String?, - metricId: json_['metricId'] as core.String?, - safetyConfig: json_.containsKey('safetyConfig') - ? GoogleCloudAiplatformV1StudySpecMetricSpecSafetyMetricConfig - .fromJson(json_['safetyConfig'] - as core.Map) - : null, + context: json_['context'] as core.String?, + instruction: json_['instruction'] as core.String?, + prediction: json_['prediction'] as core.String?, + reference: json_['reference'] as core.String?, ); core.Map toJson() => { - if (goal != null) 'goal': goal!, - if (metricId != null) 'metricId': metricId!, - if (safetyConfig != null) 'safetyConfig': safetyConfig!, + if (context != null) 'context': context!, + if (instruction != null) 'instruction': instruction!, + if (prediction != null) 'prediction': prediction!, + if (reference != null) 'reference': reference!, }; } -/// Used in safe optimization to specify threshold levels and risk tolerance. -class GoogleCloudAiplatformV1StudySpecMetricSpecSafetyMetricConfig { - /// Desired minimum fraction of safe trials (over total number of trials) that - /// should be targeted by the algorithm at any time during the study (best - /// effort). +/// Spec for summarization quality result. +class GoogleCloudAiplatformV1SummarizationQualityResult { + /// Confidence for summarization quality score. /// - /// This should be between 0.0 and 1.0 and a value of 0.0 means that there is - /// no minimum and an algorithm proceeds without targeting any specific - /// fraction. A value of 1.0 means that the algorithm attempts to only Suggest - /// safe Trials. - core.double? desiredMinSafeTrialsFraction; + /// Output only. + core.double? confidence; - /// Safety threshold (boundary value between safe and unsafe). + /// Explanation for summarization quality score. /// - /// NOTE that if you leave SafetyMetricConfig unset, a default value of 0 will - /// be used. - core.double? safetyThreshold; + /// Output only. + core.String? explanation; - GoogleCloudAiplatformV1StudySpecMetricSpecSafetyMetricConfig({ - this.desiredMinSafeTrialsFraction, - this.safetyThreshold, + /// Summarization Quality score. + /// + /// Output only. + core.double? score; + + GoogleCloudAiplatformV1SummarizationQualityResult({ + this.confidence, + this.explanation, + this.score, }); - GoogleCloudAiplatformV1StudySpecMetricSpecSafetyMetricConfig.fromJson( - core.Map json_) + GoogleCloudAiplatformV1SummarizationQualityResult.fromJson(core.Map json_) : this( - desiredMinSafeTrialsFraction: - (json_['desiredMinSafeTrialsFraction'] as core.num?)?.toDouble(), - safetyThreshold: (json_['safetyThreshold'] as core.num?)?.toDouble(), + confidence: (json_['confidence'] as core.num?)?.toDouble(), + explanation: json_['explanation'] as core.String?, + score: (json_['score'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (desiredMinSafeTrialsFraction != null) - 'desiredMinSafeTrialsFraction': desiredMinSafeTrialsFraction!, - if (safetyThreshold != null) 'safetyThreshold': safetyThreshold!, + if (confidence != null) 'confidence': confidence!, + if (explanation != null) 'explanation': explanation!, + if (score != null) 'score': score!, }; } -/// Represents a single parameter to optimize. -class GoogleCloudAiplatformV1StudySpecParameterSpec { - /// The value spec for a 'CATEGORICAL' parameter. - GoogleCloudAiplatformV1StudySpecParameterSpecCategoricalValueSpec? - categoricalValueSpec; - - /// A conditional parameter node is active if the parameter's value matches - /// the conditional node's parent_value_condition. - /// - /// If two items in conditional_parameter_specs have the same name, they must - /// have disjoint parent_value_condition. - core.List< - GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpec>? - conditionalParameterSpecs; - - /// The value spec for a 'DISCRETE' parameter. - GoogleCloudAiplatformV1StudySpecParameterSpecDiscreteValueSpec? - discreteValueSpec; - - /// The value spec for a 'DOUBLE' parameter. - GoogleCloudAiplatformV1StudySpecParameterSpecDoubleValueSpec? doubleValueSpec; - - /// The value spec for an 'INTEGER' parameter. - GoogleCloudAiplatformV1StudySpecParameterSpecIntegerValueSpec? - integerValueSpec; - - /// The ID of the parameter. - /// - /// Must not contain whitespaces and must be unique amongst all - /// ParameterSpecs. - /// - /// Required. - core.String? parameterId; - - /// How the parameter should be scaled. - /// - /// Leave unset for `CATEGORICAL` parameters. - /// Possible string values are: - /// - "SCALE_TYPE_UNSPECIFIED" : By default, no scaling is applied. - /// - "UNIT_LINEAR_SCALE" : Scales the feasible space to (0, 1) linearly. - /// - "UNIT_LOG_SCALE" : Scales the feasible space logarithmically to (0, 1). - /// The entire feasible space must be strictly positive. - /// - "UNIT_REVERSE_LOG_SCALE" : Scales the feasible space "reverse" - /// logarithmically to (0, 1). The result is that values close to the top of - /// the feasible space are spread out more than points near the bottom. The - /// entire feasible space must be strictly positive. - core.String? scaleType; +/// Spec for summarization quality score metric. +class GoogleCloudAiplatformV1SummarizationQualitySpec { + /// Whether to use instance.reference to compute summarization quality. + /// + /// Optional. + core.bool? useReference; - GoogleCloudAiplatformV1StudySpecParameterSpec({ - this.categoricalValueSpec, - this.conditionalParameterSpecs, - this.discreteValueSpec, - this.doubleValueSpec, - this.integerValueSpec, - this.parameterId, - this.scaleType, + /// Which version to use for evaluation. + /// + /// Optional. + core.int? version; + + GoogleCloudAiplatformV1SummarizationQualitySpec({ + this.useReference, + this.version, }); - GoogleCloudAiplatformV1StudySpecParameterSpec.fromJson(core.Map json_) + GoogleCloudAiplatformV1SummarizationQualitySpec.fromJson(core.Map json_) : this( - categoricalValueSpec: json_.containsKey('categoricalValueSpec') - ? GoogleCloudAiplatformV1StudySpecParameterSpecCategoricalValueSpec - .fromJson(json_['categoricalValueSpec'] - as core.Map) - : null, - conditionalParameterSpecs: (json_['conditionalParameterSpecs'] - as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpec - .fromJson(value as core.Map)) - .toList(), - discreteValueSpec: json_.containsKey('discreteValueSpec') - ? GoogleCloudAiplatformV1StudySpecParameterSpecDiscreteValueSpec - .fromJson(json_['discreteValueSpec'] - as core.Map) - : null, - doubleValueSpec: json_.containsKey('doubleValueSpec') - ? GoogleCloudAiplatformV1StudySpecParameterSpecDoubleValueSpec - .fromJson(json_['doubleValueSpec'] - as core.Map) - : null, - integerValueSpec: json_.containsKey('integerValueSpec') - ? GoogleCloudAiplatformV1StudySpecParameterSpecIntegerValueSpec - .fromJson(json_['integerValueSpec'] - as core.Map) - : null, - parameterId: json_['parameterId'] as core.String?, - scaleType: json_['scaleType'] as core.String?, + useReference: json_['useReference'] as core.bool?, + version: json_['version'] as core.int?, ); core.Map toJson() => { - if (categoricalValueSpec != null) - 'categoricalValueSpec': categoricalValueSpec!, - if (conditionalParameterSpecs != null) - 'conditionalParameterSpecs': conditionalParameterSpecs!, - if (discreteValueSpec != null) 'discreteValueSpec': discreteValueSpec!, - if (doubleValueSpec != null) 'doubleValueSpec': doubleValueSpec!, - if (integerValueSpec != null) 'integerValueSpec': integerValueSpec!, - if (parameterId != null) 'parameterId': parameterId!, - if (scaleType != null) 'scaleType': scaleType!, + if (useReference != null) 'useReference': useReference!, + if (version != null) 'version': version!, }; } -/// Value specification for a parameter in `CATEGORICAL` type. -class GoogleCloudAiplatformV1StudySpecParameterSpecCategoricalValueSpec { - /// A default value for a `CATEGORICAL` parameter that is assumed to be a - /// relatively good starting point. +/// Input for summarization verbosity metric. +class GoogleCloudAiplatformV1SummarizationVerbosityInput { + /// Summarization verbosity instance. /// - /// Unset value signals that there is no offered starting point. Currently - /// only supported by the Vertex AI Vizier service. Not supported by - /// HyperparameterTuningJob or TrainingPipeline. - core.String? defaultValue; + /// Required. + GoogleCloudAiplatformV1SummarizationVerbosityInstance? instance; - /// The list of possible categories. + /// Spec for summarization verbosity score metric. /// /// Required. - core.List? values; + GoogleCloudAiplatformV1SummarizationVerbositySpec? metricSpec; - GoogleCloudAiplatformV1StudySpecParameterSpecCategoricalValueSpec({ - this.defaultValue, - this.values, + GoogleCloudAiplatformV1SummarizationVerbosityInput({ + this.instance, + this.metricSpec, }); - GoogleCloudAiplatformV1StudySpecParameterSpecCategoricalValueSpec.fromJson( - core.Map json_) + GoogleCloudAiplatformV1SummarizationVerbosityInput.fromJson(core.Map json_) : this( - defaultValue: json_['defaultValue'] as core.String?, - values: (json_['values'] as core.List?) - ?.map((value) => value as core.String) - .toList(), + instance: json_.containsKey('instance') + ? GoogleCloudAiplatformV1SummarizationVerbosityInstance.fromJson( + json_['instance'] as core.Map) + : null, + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1SummarizationVerbositySpec.fromJson( + json_['metricSpec'] as core.Map) + : null, ); core.Map toJson() => { - if (defaultValue != null) 'defaultValue': defaultValue!, - if (values != null) 'values': values!, + if (instance != null) 'instance': instance!, + if (metricSpec != null) 'metricSpec': metricSpec!, }; } -/// Represents a parameter spec with condition from its parent parameter. -class GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpec { - /// The spec for a conditional parameter. - /// - /// Required. - GoogleCloudAiplatformV1StudySpecParameterSpec? parameterSpec; +/// Spec for summarization verbosity instance. +typedef GoogleCloudAiplatformV1SummarizationVerbosityInstance = $Instance04; - /// The spec for matching values from a parent parameter of `CATEGORICAL` - /// type. - GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecCategoricalValueCondition? - parentCategoricalValues; +/// Spec for summarization verbosity result. +class GoogleCloudAiplatformV1SummarizationVerbosityResult { + /// Confidence for summarization verbosity score. + /// + /// Output only. + core.double? confidence; - /// The spec for matching values from a parent parameter of `DISCRETE` type. - GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecDiscreteValueCondition? - parentDiscreteValues; + /// Explanation for summarization verbosity score. + /// + /// Output only. + core.String? explanation; - /// The spec for matching values from a parent parameter of `INTEGER` type. - GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecIntValueCondition? - parentIntValues; + /// Summarization Verbosity score. + /// + /// Output only. + core.double? score; - GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpec({ - this.parameterSpec, - this.parentCategoricalValues, - this.parentDiscreteValues, - this.parentIntValues, + GoogleCloudAiplatformV1SummarizationVerbosityResult({ + this.confidence, + this.explanation, + this.score, }); - GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpec.fromJson( - core.Map json_) + GoogleCloudAiplatformV1SummarizationVerbosityResult.fromJson(core.Map json_) : this( - parameterSpec: json_.containsKey('parameterSpec') - ? GoogleCloudAiplatformV1StudySpecParameterSpec.fromJson( - json_['parameterSpec'] as core.Map) - : null, - parentCategoricalValues: json_.containsKey('parentCategoricalValues') - ? GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecCategoricalValueCondition - .fromJson(json_['parentCategoricalValues'] - as core.Map) - : null, - parentDiscreteValues: json_.containsKey('parentDiscreteValues') - ? GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecDiscreteValueCondition - .fromJson(json_['parentDiscreteValues'] - as core.Map) - : null, - parentIntValues: json_.containsKey('parentIntValues') - ? GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecIntValueCondition - .fromJson(json_['parentIntValues'] - as core.Map) - : null, + confidence: (json_['confidence'] as core.num?)?.toDouble(), + explanation: json_['explanation'] as core.String?, + score: (json_['score'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (parameterSpec != null) 'parameterSpec': parameterSpec!, - if (parentCategoricalValues != null) - 'parentCategoricalValues': parentCategoricalValues!, - if (parentDiscreteValues != null) - 'parentDiscreteValues': parentDiscreteValues!, - if (parentIntValues != null) 'parentIntValues': parentIntValues!, + if (confidence != null) 'confidence': confidence!, + if (explanation != null) 'explanation': explanation!, + if (score != null) 'score': score!, }; } -/// Represents the spec to match categorical values from parent parameter. -class GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecCategoricalValueCondition { - /// Matches values of the parent parameter of 'CATEGORICAL' type. +/// Spec for summarization verbosity score metric. +class GoogleCloudAiplatformV1SummarizationVerbositySpec { + /// Whether to use instance.reference to compute summarization verbosity. /// - /// All values must exist in `categorical_value_spec` of parent parameter. + /// Optional. + core.bool? useReference; + + /// Which version to use for evaluation. /// - /// Required. - core.List? values; + /// Optional. + core.int? version; - GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecCategoricalValueCondition({ - this.values, + GoogleCloudAiplatformV1SummarizationVerbositySpec({ + this.useReference, + this.version, }); - GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecCategoricalValueCondition.fromJson( - core.Map json_) + GoogleCloudAiplatformV1SummarizationVerbositySpec.fromJson(core.Map json_) : this( - values: (json_['values'] as core.List?) - ?.map((value) => value as core.String) - .toList(), + useReference: json_['useReference'] as core.bool?, + version: json_['version'] as core.int?, ); core.Map toJson() => { - if (values != null) 'values': values!, + if (useReference != null) 'useReference': useReference!, + if (version != null) 'version': version!, }; } -/// Represents the spec to match discrete values from parent parameter. -class GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecDiscreteValueCondition { - /// Matches values of the parent parameter of 'DISCRETE' type. +/// Hyperparameters for SFT. +class GoogleCloudAiplatformV1SupervisedHyperParameters { + /// Adapter size for tuning. /// - /// All values must exist in `discrete_value_spec` of parent parameter. The - /// Epsilon of the value matching is 1e-10. + /// Optional. + /// Possible string values are: + /// - "ADAPTER_SIZE_UNSPECIFIED" : Adapter size is unspecified. + /// - "ADAPTER_SIZE_ONE" : Adapter size 1. + /// - "ADAPTER_SIZE_FOUR" : Adapter size 4. + /// - "ADAPTER_SIZE_EIGHT" : Adapter size 8. + /// - "ADAPTER_SIZE_SIXTEEN" : Adapter size 16. + /// - "ADAPTER_SIZE_THIRTY_TWO" : Adapter size 32. + core.String? adapterSize; + + /// Number of complete passes the model makes over the entire training dataset + /// during training. /// - /// Required. - core.List? values; + /// Optional. + core.String? epochCount; - GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecDiscreteValueCondition({ - this.values, + /// Multiplier for adjusting the default learning rate. + /// + /// Optional. + core.double? learningRateMultiplier; + + GoogleCloudAiplatformV1SupervisedHyperParameters({ + this.adapterSize, + this.epochCount, + this.learningRateMultiplier, }); - GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecDiscreteValueCondition.fromJson( - core.Map json_) + GoogleCloudAiplatformV1SupervisedHyperParameters.fromJson(core.Map json_) : this( - values: (json_['values'] as core.List?) - ?.map((value) => (value as core.num).toDouble()) - .toList(), + adapterSize: json_['adapterSize'] as core.String?, + epochCount: json_['epochCount'] as core.String?, + learningRateMultiplier: + (json_['learningRateMultiplier'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (values != null) 'values': values!, + if (adapterSize != null) 'adapterSize': adapterSize!, + if (epochCount != null) 'epochCount': epochCount!, + if (learningRateMultiplier != null) + 'learningRateMultiplier': learningRateMultiplier!, }; } -/// Represents the spec to match integer values from parent parameter. -class GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecIntValueCondition { - /// Matches values of the parent parameter of 'INTEGER' type. +/// Tuning data statistics for Supervised Tuning. +class GoogleCloudAiplatformV1SupervisedTuningDataStats { + /// Number of billable characters in the tuning dataset. /// - /// All values must lie in `integer_value_spec` of parent parameter. + /// Output only. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) + core.String? totalBillableCharacterCount; + + /// Number of billable tokens in the tuning dataset. /// - /// Required. - core.List? values; + /// Output only. + core.String? totalBillableTokenCount; - GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecIntValueCondition({ - this.values, + /// The number of examples in the dataset that have been truncated by any + /// amount. + core.String? totalTruncatedExampleCount; + + /// Number of tuning characters in the tuning dataset. + /// + /// Output only. + core.String? totalTuningCharacterCount; + + /// A partial sample of the indices (starting from 1) of the truncated + /// examples. + core.List? truncatedExampleIndices; + + /// Number of examples in the tuning dataset. + /// + /// Output only. + core.String? tuningDatasetExampleCount; + + /// Number of tuning steps for this Tuning Job. + /// + /// Output only. + core.String? tuningStepCount; + + /// Sample user messages in the training dataset uri. + /// + /// Output only. + core.List? userDatasetExamples; + + /// Dataset distributions for the user input tokens. + /// + /// Output only. + GoogleCloudAiplatformV1SupervisedTuningDatasetDistribution? + userInputTokenDistribution; + + /// Dataset distributions for the messages per example. + /// + /// Output only. + GoogleCloudAiplatformV1SupervisedTuningDatasetDistribution? + userMessagePerExampleDistribution; + + /// Dataset distributions for the user output tokens. + /// + /// Output only. + GoogleCloudAiplatformV1SupervisedTuningDatasetDistribution? + userOutputTokenDistribution; + + GoogleCloudAiplatformV1SupervisedTuningDataStats({ + this.totalBillableCharacterCount, + this.totalBillableTokenCount, + this.totalTruncatedExampleCount, + this.totalTuningCharacterCount, + this.truncatedExampleIndices, + this.tuningDatasetExampleCount, + this.tuningStepCount, + this.userDatasetExamples, + this.userInputTokenDistribution, + this.userMessagePerExampleDistribution, + this.userOutputTokenDistribution, }); - GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecIntValueCondition.fromJson( - core.Map json_) + GoogleCloudAiplatformV1SupervisedTuningDataStats.fromJson(core.Map json_) : this( - values: (json_['values'] as core.List?) - ?.map((value) => value as core.String) + totalBillableCharacterCount: + json_['totalBillableCharacterCount'] as core.String?, + totalBillableTokenCount: + json_['totalBillableTokenCount'] as core.String?, + totalTruncatedExampleCount: + json_['totalTruncatedExampleCount'] as core.String?, + totalTuningCharacterCount: + json_['totalTuningCharacterCount'] as core.String?, + truncatedExampleIndices: + (json_['truncatedExampleIndices'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + tuningDatasetExampleCount: + json_['tuningDatasetExampleCount'] as core.String?, + tuningStepCount: json_['tuningStepCount'] as core.String?, + userDatasetExamples: (json_['userDatasetExamples'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Content.fromJson( + value as core.Map)) .toList(), + userInputTokenDistribution: + json_.containsKey('userInputTokenDistribution') + ? GoogleCloudAiplatformV1SupervisedTuningDatasetDistribution + .fromJson(json_['userInputTokenDistribution'] + as core.Map) + : null, + userMessagePerExampleDistribution: + json_.containsKey('userMessagePerExampleDistribution') + ? GoogleCloudAiplatformV1SupervisedTuningDatasetDistribution + .fromJson(json_['userMessagePerExampleDistribution'] + as core.Map) + : null, + userOutputTokenDistribution: + json_.containsKey('userOutputTokenDistribution') + ? GoogleCloudAiplatformV1SupervisedTuningDatasetDistribution + .fromJson(json_['userOutputTokenDistribution'] + as core.Map) + : null, ); core.Map toJson() => { - if (values != null) 'values': values!, + if (totalBillableCharacterCount != null) + 'totalBillableCharacterCount': totalBillableCharacterCount!, + if (totalBillableTokenCount != null) + 'totalBillableTokenCount': totalBillableTokenCount!, + if (totalTruncatedExampleCount != null) + 'totalTruncatedExampleCount': totalTruncatedExampleCount!, + if (totalTuningCharacterCount != null) + 'totalTuningCharacterCount': totalTuningCharacterCount!, + if (truncatedExampleIndices != null) + 'truncatedExampleIndices': truncatedExampleIndices!, + if (tuningDatasetExampleCount != null) + 'tuningDatasetExampleCount': tuningDatasetExampleCount!, + if (tuningStepCount != null) 'tuningStepCount': tuningStepCount!, + if (userDatasetExamples != null) + 'userDatasetExamples': userDatasetExamples!, + if (userInputTokenDistribution != null) + 'userInputTokenDistribution': userInputTokenDistribution!, + if (userMessagePerExampleDistribution != null) + 'userMessagePerExampleDistribution': + userMessagePerExampleDistribution!, + if (userOutputTokenDistribution != null) + 'userOutputTokenDistribution': userOutputTokenDistribution!, }; } -/// Value specification for a parameter in `DISCRETE` type. -class GoogleCloudAiplatformV1StudySpecParameterSpecDiscreteValueSpec { - /// A default value for a `DISCRETE` parameter that is assumed to be a - /// relatively good starting point. +/// Dataset distribution for Supervised Tuning. +class GoogleCloudAiplatformV1SupervisedTuningDatasetDistribution { + /// Sum of a given population of values that are billable. /// - /// Unset value signals that there is no offered starting point. It - /// automatically rounds to the nearest feasible discrete point. Currently - /// only supported by the Vertex AI Vizier service. Not supported by - /// HyperparameterTuningJob or TrainingPipeline. - core.double? defaultValue; + /// Output only. + core.String? billableSum; - /// A list of possible values. + /// Defines the histogram bucket. /// - /// The list should be in increasing order and at least 1e-10 apart. For - /// instance, this parameter might have possible settings of 1.5, 2.5, and - /// 4.0. This list should not contain more than 1,000 values. + /// Output only. + core.List< + GoogleCloudAiplatformV1SupervisedTuningDatasetDistributionDatasetBucket>? + buckets; + + /// The maximum of the population values. /// - /// Required. - core.List? values; + /// Output only. + core.double? max; - GoogleCloudAiplatformV1StudySpecParameterSpecDiscreteValueSpec({ - this.defaultValue, - this.values, + /// The arithmetic mean of the values in the population. + /// + /// Output only. + core.double? mean; + + /// The median of the values in the population. + /// + /// Output only. + core.double? median; + + /// The minimum of the population values. + /// + /// Output only. + core.double? min; + + /// The 5th percentile of the values in the population. + /// + /// Output only. + core.double? p5; + + /// The 95th percentile of the values in the population. + /// + /// Output only. + core.double? p95; + + /// Sum of a given population of values. + /// + /// Output only. + core.String? sum; + + GoogleCloudAiplatformV1SupervisedTuningDatasetDistribution({ + this.billableSum, + this.buckets, + this.max, + this.mean, + this.median, + this.min, + this.p5, + this.p95, + this.sum, }); - GoogleCloudAiplatformV1StudySpecParameterSpecDiscreteValueSpec.fromJson( + GoogleCloudAiplatformV1SupervisedTuningDatasetDistribution.fromJson( core.Map json_) : this( - defaultValue: (json_['defaultValue'] as core.num?)?.toDouble(), - values: (json_['values'] as core.List?) - ?.map((value) => (value as core.num).toDouble()) + billableSum: json_['billableSum'] as core.String?, + buckets: (json_['buckets'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1SupervisedTuningDatasetDistributionDatasetBucket + .fromJson(value as core.Map)) .toList(), + max: (json_['max'] as core.num?)?.toDouble(), + mean: (json_['mean'] as core.num?)?.toDouble(), + median: (json_['median'] as core.num?)?.toDouble(), + min: (json_['min'] as core.num?)?.toDouble(), + p5: (json_['p5'] as core.num?)?.toDouble(), + p95: (json_['p95'] as core.num?)?.toDouble(), + sum: json_['sum'] as core.String?, ); core.Map toJson() => { - if (defaultValue != null) 'defaultValue': defaultValue!, - if (values != null) 'values': values!, + if (billableSum != null) 'billableSum': billableSum!, + if (buckets != null) 'buckets': buckets!, + if (max != null) 'max': max!, + if (mean != null) 'mean': mean!, + if (median != null) 'median': median!, + if (min != null) 'min': min!, + if (p5 != null) 'p5': p5!, + if (p95 != null) 'p95': p95!, + if (sum != null) 'sum': sum!, }; } -/// Value specification for a parameter in `DOUBLE` type. -class GoogleCloudAiplatformV1StudySpecParameterSpecDoubleValueSpec { - /// A default value for a `DOUBLE` parameter that is assumed to be a - /// relatively good starting point. +/// Dataset bucket used to create a histogram for the distribution given a +/// population of values. +class GoogleCloudAiplatformV1SupervisedTuningDatasetDistributionDatasetBucket { + /// Number of values in the bucket. /// - /// Unset value signals that there is no offered starting point. Currently - /// only supported by the Vertex AI Vizier service. Not supported by - /// HyperparameterTuningJob or TrainingPipeline. - core.double? defaultValue; + /// Output only. + core.double? count; - /// Inclusive maximum value of the parameter. + /// Left bound of the bucket. /// - /// Required. - core.double? maxValue; + /// Output only. + core.double? left; - /// Inclusive minimum value of the parameter. + /// Right bound of the bucket. /// - /// Required. - core.double? minValue; + /// Output only. + core.double? right; - GoogleCloudAiplatformV1StudySpecParameterSpecDoubleValueSpec({ - this.defaultValue, - this.maxValue, - this.minValue, + GoogleCloudAiplatformV1SupervisedTuningDatasetDistributionDatasetBucket({ + this.count, + this.left, + this.right, }); - GoogleCloudAiplatformV1StudySpecParameterSpecDoubleValueSpec.fromJson( + GoogleCloudAiplatformV1SupervisedTuningDatasetDistributionDatasetBucket.fromJson( core.Map json_) : this( - defaultValue: (json_['defaultValue'] as core.num?)?.toDouble(), - maxValue: (json_['maxValue'] as core.num?)?.toDouble(), - minValue: (json_['minValue'] as core.num?)?.toDouble(), + count: (json_['count'] as core.num?)?.toDouble(), + left: (json_['left'] as core.num?)?.toDouble(), + right: (json_['right'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (defaultValue != null) 'defaultValue': defaultValue!, - if (maxValue != null) 'maxValue': maxValue!, - if (minValue != null) 'minValue': minValue!, + if (count != null) 'count': count!, + if (left != null) 'left': left!, + if (right != null) 'right': right!, }; } -/// Value specification for a parameter in `INTEGER` type. -class GoogleCloudAiplatformV1StudySpecParameterSpecIntegerValueSpec { - /// A default value for an `INTEGER` parameter that is assumed to be a - /// relatively good starting point. +/// Tuning Spec for Supervised Tuning for first party models. +class GoogleCloudAiplatformV1SupervisedTuningSpec { + /// Hyperparameters for SFT. /// - /// Unset value signals that there is no offered starting point. Currently - /// only supported by the Vertex AI Vizier service. Not supported by - /// HyperparameterTuningJob or TrainingPipeline. - core.String? defaultValue; + /// Optional. + GoogleCloudAiplatformV1SupervisedHyperParameters? hyperParameters; - /// Inclusive maximum value of the parameter. + /// Cloud Storage path to file containing training dataset for tuning. + /// + /// The dataset must be formatted as a JSONL file. /// /// Required. - core.String? maxValue; + core.String? trainingDatasetUri; - /// Inclusive minimum value of the parameter. + /// Cloud Storage path to file containing validation dataset for tuning. /// - /// Required. - core.String? minValue; + /// The dataset must be formatted as a JSONL file. + /// + /// Optional. + core.String? validationDatasetUri; - GoogleCloudAiplatformV1StudySpecParameterSpecIntegerValueSpec({ - this.defaultValue, - this.maxValue, - this.minValue, + GoogleCloudAiplatformV1SupervisedTuningSpec({ + this.hyperParameters, + this.trainingDatasetUri, + this.validationDatasetUri, }); - GoogleCloudAiplatformV1StudySpecParameterSpecIntegerValueSpec.fromJson( - core.Map json_) + GoogleCloudAiplatformV1SupervisedTuningSpec.fromJson(core.Map json_) : this( - defaultValue: json_['defaultValue'] as core.String?, - maxValue: json_['maxValue'] as core.String?, - minValue: json_['minValue'] as core.String?, + hyperParameters: json_.containsKey('hyperParameters') + ? GoogleCloudAiplatformV1SupervisedHyperParameters.fromJson( + json_['hyperParameters'] + as core.Map) + : null, + trainingDatasetUri: json_['trainingDatasetUri'] as core.String?, + validationDatasetUri: json_['validationDatasetUri'] as core.String?, ); core.Map toJson() => { - if (defaultValue != null) 'defaultValue': defaultValue!, - if (maxValue != null) 'maxValue': maxValue!, - if (minValue != null) 'minValue': minValue!, + if (hyperParameters != null) 'hyperParameters': hyperParameters!, + if (trainingDatasetUri != null) + 'trainingDatasetUri': trainingDatasetUri!, + if (validationDatasetUri != null) + 'validationDatasetUri': validationDatasetUri!, }; } -/// The configuration (stopping conditions) for automated stopping of a Study. -/// -/// Conditions include trial budgets, time budgets, and convergence detection. -class GoogleCloudAiplatformV1StudySpecStudyStoppingConfig { - /// If the objective value has not improved for this much time, stop the - /// study. - /// - /// WARNING: Effective only for single-objective studies. - core.String? maxDurationNoProgress; - - /// If there are more than this many trials, stop the study. - core.int? maxNumTrials; - - /// If the objective value has not improved for this many consecutive trials, - /// stop the study. - /// - /// WARNING: Effective only for single-objective studies. - core.int? maxNumTrialsNoProgress; - - /// If the specified time or duration has passed, stop the study. - GoogleCloudAiplatformV1StudyTimeConstraint? maximumRuntimeConstraint; - - /// If there are fewer than this many COMPLETED trials, do not stop the study. - core.int? minNumTrials; - - /// Each "stopping rule" in this proto specifies an "if" condition. - /// - /// Before Vizier would generate a new suggestion, it first checks each - /// specified stopping rule, from top to bottom in this list. Note that the - /// first few rules (e.g. minimum_runtime_constraint, min_num_trials) will - /// prevent other stopping rules from being evaluated until they are met. For - /// example, setting `min_num_trials=5` and `always_stop_after= 1 hour` means - /// that the Study will ONLY stop after it has 5 COMPLETED trials, even if - /// more than an hour has passed since its creation. It follows the first - /// applicable rule (whose "if" condition is satisfied) to make a stopping - /// decision. If none of the specified rules are applicable, then Vizier - /// decides that the study should not stop. If Vizier decides that the study - /// should stop, the study enters STOPPING state (or STOPPING_ASAP if - /// should_stop_asap = true). IMPORTANT: The automatic study state transition - /// happens precisely as described above; that is, deleting trials or updating - /// StudyConfig NEVER automatically moves the study state back to ACTIVE. If - /// you want to _resume_ a Study that was stopped, 1) change the stopping - /// conditions if necessary, 2) activate the study, and then 3) ask for - /// suggestions. If the specified time or duration has not passed, do not stop - /// the study. - GoogleCloudAiplatformV1StudyTimeConstraint? minimumRuntimeConstraint; +/// Request message for FeatureOnlineStoreAdminService.SyncFeatureView. +typedef GoogleCloudAiplatformV1SyncFeatureViewRequest = $Empty; - /// If true, a Study enters STOPPING_ASAP whenever it would normally enters - /// STOPPING state. - /// - /// The bottom line is: set to true if you want to interrupt on-going - /// evaluations of Trials as soon as the study stopping condition is met. - /// (Please see Study.State documentation for the source of truth). - core.bool? shouldStopAsap; +/// Response message for FeatureOnlineStoreAdminService.SyncFeatureView. +class GoogleCloudAiplatformV1SyncFeatureViewResponse { + /// Format: + /// `projects/{project}/locations/{location}/featureOnlineStores/{feature_online_store}/featureViews/{feature_view}/featureViewSyncs/{feature_view_sync}` + core.String? featureViewSync; - GoogleCloudAiplatformV1StudySpecStudyStoppingConfig({ - this.maxDurationNoProgress, - this.maxNumTrials, - this.maxNumTrialsNoProgress, - this.maximumRuntimeConstraint, - this.minNumTrials, - this.minimumRuntimeConstraint, - this.shouldStopAsap, + GoogleCloudAiplatformV1SyncFeatureViewResponse({ + this.featureViewSync, }); - GoogleCloudAiplatformV1StudySpecStudyStoppingConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1SyncFeatureViewResponse.fromJson(core.Map json_) : this( - maxDurationNoProgress: json_['maxDurationNoProgress'] as core.String?, - maxNumTrials: json_['maxNumTrials'] as core.int?, - maxNumTrialsNoProgress: json_['maxNumTrialsNoProgress'] as core.int?, - maximumRuntimeConstraint: - json_.containsKey('maximumRuntimeConstraint') - ? GoogleCloudAiplatformV1StudyTimeConstraint.fromJson( - json_['maximumRuntimeConstraint'] - as core.Map) - : null, - minNumTrials: json_['minNumTrials'] as core.int?, - minimumRuntimeConstraint: - json_.containsKey('minimumRuntimeConstraint') - ? GoogleCloudAiplatformV1StudyTimeConstraint.fromJson( - json_['minimumRuntimeConstraint'] - as core.Map) - : null, - shouldStopAsap: json_['shouldStopAsap'] as core.bool?, + featureViewSync: json_['featureViewSync'] as core.String?, ); core.Map toJson() => { - if (maxDurationNoProgress != null) - 'maxDurationNoProgress': maxDurationNoProgress!, - if (maxNumTrials != null) 'maxNumTrials': maxNumTrials!, - if (maxNumTrialsNoProgress != null) - 'maxNumTrialsNoProgress': maxNumTrialsNoProgress!, - if (maximumRuntimeConstraint != null) - 'maximumRuntimeConstraint': maximumRuntimeConstraint!, - if (minNumTrials != null) 'minNumTrials': minNumTrials!, - if (minimumRuntimeConstraint != null) - 'minimumRuntimeConstraint': minimumRuntimeConstraint!, - if (shouldStopAsap != null) 'shouldStopAsap': shouldStopAsap!, + if (featureViewSync != null) 'featureViewSync': featureViewSync!, }; } -/// Time-based Constraint for Study -class GoogleCloudAiplatformV1StudyTimeConstraint { - /// Compares the wallclock time to this time. +/// The storage details for TFRecord output content. +class GoogleCloudAiplatformV1TFRecordDestination { + /// Google Cloud Storage location. /// - /// Must use UTC timezone. - core.String? endTime; - - /// Counts the wallclock time passed since the creation of this Study. - core.String? maxDuration; + /// Required. + GoogleCloudAiplatformV1GcsDestination? gcsDestination; - GoogleCloudAiplatformV1StudyTimeConstraint({ - this.endTime, - this.maxDuration, + GoogleCloudAiplatformV1TFRecordDestination({ + this.gcsDestination, }); - GoogleCloudAiplatformV1StudyTimeConstraint.fromJson(core.Map json_) + GoogleCloudAiplatformV1TFRecordDestination.fromJson(core.Map json_) : this( - endTime: json_['endTime'] as core.String?, - maxDuration: json_['maxDuration'] as core.String?, + gcsDestination: json_.containsKey('gcsDestination') + ? GoogleCloudAiplatformV1GcsDestination.fromJson( + json_['gcsDestination'] + as core.Map) + : null, ); core.Map toJson() => { - if (endTime != null) 'endTime': endTime!, - if (maxDuration != null) 'maxDuration': maxDuration!, + if (gcsDestination != null) 'gcsDestination': gcsDestination!, }; } -/// Request message for VizierService.SuggestTrials. -class GoogleCloudAiplatformV1SuggestTrialsRequest { - /// The identifier of the client that is requesting the suggestion. - /// - /// If multiple SuggestTrialsRequests have the same `client_id`, the service - /// will return the identical suggested Trial if the Trial is pending, and - /// provide a new Trial if the last suggested Trial was completed. +/// A tensor value type. +class GoogleCloudAiplatformV1Tensor { + /// Type specific representations that make it easy to create tensor protos in + /// all languages. /// - /// Required. - core.String? clientId; + /// Only the representation corresponding to "dtype" can be set. The values + /// hold the flattened representation of the tensor in row major order. BOOL + core.List? boolVal; - /// This allows you to specify the "context" for a Trial; a context is a slice - /// (a subspace) of the search space. - /// - /// Typical uses for contexts: 1) You are using Vizier to tune a server for - /// best performance, but there's a strong weekly cycle. The context specifies - /// the day-of-week. This allows Tuesday to generalize from Wednesday without - /// assuming that everything is identical. 2) Imagine you're optimizing some - /// medical treatment for people. As they walk in the door, you know certain - /// facts about them (e.g. sex, weight, height, blood-pressure). Put that - /// information in the context, and Vizier will adapt its suggestions to the - /// patient. 3) You want to do a fair A/B test efficiently. Specify the "A" - /// and "B" conditions as contexts, and Vizier will generalize between "A" and - /// "B" conditions. If they are similar, this will allow Vizier to converge to - /// the optimum faster than if "A" and "B" were separate Studies. NOTE: You - /// can also enter contexts as REQUESTED Trials, e.g. via the CreateTrial() - /// RPC; that's the asynchronous option where you don't need a close - /// association between contexts and suggestions. NOTE: All the Parameters you - /// set in a context MUST be defined in the Study. NOTE: You must supply 0 or - /// $suggestion_count contexts. If you don't supply any contexts, Vizier will - /// make suggestions from the full search space specified in the StudySpec; if - /// you supply a full set of context, each suggestion will match the - /// corresponding context. NOTE: A Context with no features set matches - /// anything, and allows suggestions from the full search space. NOTE: - /// Contexts MUST lie within the search space specified in the StudySpec. It's - /// an error if they don't. NOTE: Contexts preferentially match ACTIVE then - /// REQUESTED trials before new suggestions are generated. NOTE: Generation of - /// suggestions involves a match between a Context and (optionally) a - /// REQUESTED trial; if that match is not fully specified, a suggestion will - /// be geneated in the merged subspace. - /// - /// Optional. - core.List? contexts; + /// STRING + core.List? bytesVal; - /// The number of suggestions requested. - /// - /// It must be positive. - /// - /// Required. - core.int? suggestionCount; + /// DOUBLE + core.List? doubleVal; - GoogleCloudAiplatformV1SuggestTrialsRequest({ - this.clientId, - this.contexts, - this.suggestionCount, + /// The data type of tensor. + /// Possible string values are: + /// - "DATA_TYPE_UNSPECIFIED" : Not a legal value for DataType. Used to + /// indicate a DataType field has not been set. + /// - "BOOL" : Data types that all computation devices are expected to be + /// capable to support. + /// - "STRING" + /// - "FLOAT" + /// - "DOUBLE" + /// - "INT8" + /// - "INT16" + /// - "INT32" + /// - "INT64" + /// - "UINT8" + /// - "UINT16" + /// - "UINT32" + /// - "UINT64" + core.String? dtype; + + /// FLOAT + core.List? floatVal; + + /// INT64 + core.List? int64Val; + + /// INT_8 INT_16 INT_32 + core.List? intVal; + + /// A list of tensor values. + core.List? listVal; + + /// Shape of the tensor. + core.List? shape; + + /// STRING + core.List? stringVal; + + /// A map of string to tensor. + core.Map? structVal; + + /// Serialized raw tensor content. + core.String? tensorVal; + core.List get tensorValAsBytes => convert.base64.decode(tensorVal!); + + set tensorValAsBytes(core.List bytes_) { + tensorVal = + convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); + } + + /// UINT64 + core.List? uint64Val; + + /// UINT8 UINT16 UINT32 + core.List? uintVal; + + GoogleCloudAiplatformV1Tensor({ + this.boolVal, + this.bytesVal, + this.doubleVal, + this.dtype, + this.floatVal, + this.int64Val, + this.intVal, + this.listVal, + this.shape, + this.stringVal, + this.structVal, + this.tensorVal, + this.uint64Val, + this.uintVal, }); - GoogleCloudAiplatformV1SuggestTrialsRequest.fromJson(core.Map json_) + GoogleCloudAiplatformV1Tensor.fromJson(core.Map json_) : this( - clientId: json_['clientId'] as core.String?, - contexts: (json_['contexts'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1TrialContext.fromJson( + boolVal: (json_['boolVal'] as core.List?) + ?.map((value) => value as core.bool) + .toList(), + bytesVal: (json_['bytesVal'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + doubleVal: (json_['doubleVal'] as core.List?) + ?.map((value) => (value as core.num).toDouble()) + .toList(), + dtype: json_['dtype'] as core.String?, + floatVal: (json_['floatVal'] as core.List?) + ?.map((value) => (value as core.num).toDouble()) + .toList(), + int64Val: (json_['int64Val'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + intVal: (json_['intVal'] as core.List?) + ?.map((value) => value as core.int) + .toList(), + listVal: (json_['listVal'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1Tensor.fromJson( value as core.Map)) .toList(), - suggestionCount: json_['suggestionCount'] as core.int?, + shape: (json_['shape'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + stringVal: (json_['stringVal'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + structVal: + (json_['structVal'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + GoogleCloudAiplatformV1Tensor.fromJson( + value as core.Map), + ), + ), + tensorVal: json_['tensorVal'] as core.String?, + uint64Val: (json_['uint64Val'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + uintVal: (json_['uintVal'] as core.List?) + ?.map((value) => value as core.int) + .toList(), ); core.Map toJson() => { - if (clientId != null) 'clientId': clientId!, - if (contexts != null) 'contexts': contexts!, - if (suggestionCount != null) 'suggestionCount': suggestionCount!, + if (boolVal != null) 'boolVal': boolVal!, + if (bytesVal != null) 'bytesVal': bytesVal!, + if (doubleVal != null) 'doubleVal': doubleVal!, + if (dtype != null) 'dtype': dtype!, + if (floatVal != null) 'floatVal': floatVal!, + if (int64Val != null) 'int64Val': int64Val!, + if (intVal != null) 'intVal': intVal!, + if (listVal != null) 'listVal': listVal!, + if (shape != null) 'shape': shape!, + if (stringVal != null) 'stringVal': stringVal!, + if (structVal != null) 'structVal': structVal!, + if (tensorVal != null) 'tensorVal': tensorVal!, + if (uint64Val != null) 'uint64Val': uint64Val!, + if (uintVal != null) 'uintVal': uintVal!, }; } -/// Input for summarization helpfulness metric. -class GoogleCloudAiplatformV1SummarizationHelpfulnessInput { - /// Summarization helpfulness instance. +/// Tensorboard is a physical database that stores users' training metrics. +/// +/// A default Tensorboard is provided in each region of a Google Cloud project. +/// If needed users can also create extra Tensorboards in their projects. +class GoogleCloudAiplatformV1Tensorboard { + /// Consumer project Cloud Storage path prefix used to store blob data, which + /// can either be a bucket or directory. /// - /// Required. - GoogleCloudAiplatformV1SummarizationHelpfulnessInstance? instance; + /// Does not end with a '/'. + /// + /// Output only. + core.String? blobStoragePathPrefix; - /// Spec for summarization helpfulness score metric. + /// Timestamp when this Tensorboard was created. + /// + /// Output only. + core.String? createTime; + + /// Description of this Tensorboard. + core.String? description; + + /// User provided name of this Tensorboard. /// /// Required. - GoogleCloudAiplatformV1SummarizationHelpfulnessSpec? metricSpec; + core.String? displayName; - GoogleCloudAiplatformV1SummarizationHelpfulnessInput({ - this.instance, - this.metricSpec, - }); + /// Customer-managed encryption key spec for a Tensorboard. + /// + /// If set, this Tensorboard and all sub-resources of this Tensorboard will be + /// secured by this key. + GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; - GoogleCloudAiplatformV1SummarizationHelpfulnessInput.fromJson(core.Map json_) - : this( - instance: json_.containsKey('instance') - ? GoogleCloudAiplatformV1SummarizationHelpfulnessInstance - .fromJson( - json_['instance'] as core.Map) - : null, - metricSpec: json_.containsKey('metricSpec') - ? GoogleCloudAiplatformV1SummarizationHelpfulnessSpec.fromJson( - json_['metricSpec'] as core.Map) - : null, - ); + /// Used to perform a consistent read-modify-write updates. + /// + /// If not set, a blind "overwrite" update happens. + core.String? etag; - core.Map toJson() => { - if (instance != null) 'instance': instance!, - if (metricSpec != null) 'metricSpec': metricSpec!, - }; -} + /// Used to indicate if the TensorBoard instance is the default one. + /// + /// Each project & region can have at most one default TensorBoard instance. + /// Creation of a default TensorBoard instance and updating an existing + /// TensorBoard instance to be default will mark all other TensorBoard + /// instances (if any) as non default. + core.bool? isDefault; -/// Spec for summarization helpfulness instance. -typedef GoogleCloudAiplatformV1SummarizationHelpfulnessInstance = $Instance03; + /// The labels with user-defined metadata to organize your Tensorboards. + /// + /// Label keys and values can be no longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. No more than + /// 64 user labels can be associated with one Tensorboard (System labels are + /// excluded). See https://goo.gl/xmQnxf for more information and examples of + /// labels. System reserved label keys are prefixed with + /// "aiplatform.googleapis.com/" and are immutable. + core.Map? labels; + + /// Name of the Tensorboard. + /// + /// Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}` + /// + /// Output only. + core.String? name; + + /// The number of Runs stored in this Tensorboard. + /// + /// Output only. + core.int? runCount; -/// Spec for summarization helpfulness result. -class GoogleCloudAiplatformV1SummarizationHelpfulnessResult { - /// Confidence for summarization helpfulness score. + /// Reserved for future use. /// /// Output only. - core.double? confidence; + core.bool? satisfiesPzi; - /// Explanation for summarization helpfulness score. + /// Reserved for future use. /// /// Output only. - core.String? explanation; + core.bool? satisfiesPzs; - /// Summarization Helpfulness score. + /// Timestamp when this Tensorboard was last updated. /// /// Output only. - core.double? score; + core.String? updateTime; - GoogleCloudAiplatformV1SummarizationHelpfulnessResult({ - this.confidence, - this.explanation, - this.score, + GoogleCloudAiplatformV1Tensorboard({ + this.blobStoragePathPrefix, + this.createTime, + this.description, + this.displayName, + this.encryptionSpec, + this.etag, + this.isDefault, + this.labels, + this.name, + this.runCount, + this.satisfiesPzi, + this.satisfiesPzs, + this.updateTime, }); - GoogleCloudAiplatformV1SummarizationHelpfulnessResult.fromJson(core.Map json_) + GoogleCloudAiplatformV1Tensorboard.fromJson(core.Map json_) : this( - confidence: (json_['confidence'] as core.num?)?.toDouble(), - explanation: json_['explanation'] as core.String?, - score: (json_['score'] as core.num?)?.toDouble(), + blobStoragePathPrefix: json_['blobStoragePathPrefix'] as core.String?, + createTime: json_['createTime'] as core.String?, + description: json_['description'] as core.String?, + displayName: json_['displayName'] as core.String?, + encryptionSpec: json_.containsKey('encryptionSpec') + ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( + json_['encryptionSpec'] + as core.Map) + : null, + etag: json_['etag'] as core.String?, + isDefault: json_['isDefault'] as core.bool?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + name: json_['name'] as core.String?, + runCount: json_['runCount'] as core.int?, + satisfiesPzi: json_['satisfiesPzi'] as core.bool?, + satisfiesPzs: json_['satisfiesPzs'] as core.bool?, + updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (confidence != null) 'confidence': confidence!, - if (explanation != null) 'explanation': explanation!, - if (score != null) 'score': score!, + if (blobStoragePathPrefix != null) + 'blobStoragePathPrefix': blobStoragePathPrefix!, + if (createTime != null) 'createTime': createTime!, + if (description != null) 'description': description!, + if (displayName != null) 'displayName': displayName!, + if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, + if (etag != null) 'etag': etag!, + if (isDefault != null) 'isDefault': isDefault!, + if (labels != null) 'labels': labels!, + if (name != null) 'name': name!, + if (runCount != null) 'runCount': runCount!, + if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, + if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, + if (updateTime != null) 'updateTime': updateTime!, }; } -/// Spec for summarization helpfulness score metric. -class GoogleCloudAiplatformV1SummarizationHelpfulnessSpec { - /// Whether to use instance.reference to compute summarization helpfulness. +/// One blob (e.g, image, graph) viewable on a blob metric plot. +class GoogleCloudAiplatformV1TensorboardBlob { + /// The bytes of the blob is not present unless it's returned by the + /// ReadTensorboardBlobData endpoint. /// /// Optional. - core.bool? useReference; + core.String? data; + core.List get dataAsBytes => convert.base64.decode(data!); - /// Which version to use for evaluation. + set dataAsBytes(core.List bytes_) { + data = + convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); + } + + /// A URI safe key uniquely identifying a blob. /// - /// Optional. - core.int? version; + /// Can be used to locate the blob stored in the Cloud Storage bucket of the + /// consumer project. + /// + /// Output only. + core.String? id; - GoogleCloudAiplatformV1SummarizationHelpfulnessSpec({ - this.useReference, - this.version, + GoogleCloudAiplatformV1TensorboardBlob({ + this.data, + this.id, }); - GoogleCloudAiplatformV1SummarizationHelpfulnessSpec.fromJson(core.Map json_) + GoogleCloudAiplatformV1TensorboardBlob.fromJson(core.Map json_) : this( - useReference: json_['useReference'] as core.bool?, - version: json_['version'] as core.int?, + data: json_['data'] as core.String?, + id: json_['id'] as core.String?, ); core.Map toJson() => { - if (useReference != null) 'useReference': useReference!, - if (version != null) 'version': version!, + if (data != null) 'data': data!, + if (id != null) 'id': id!, }; } -/// Input for summarization quality metric. -class GoogleCloudAiplatformV1SummarizationQualityInput { - /// Summarization quality instance. - /// - /// Required. - GoogleCloudAiplatformV1SummarizationQualityInstance? instance; - - /// Spec for summarization quality score metric. - /// - /// Required. - GoogleCloudAiplatformV1SummarizationQualitySpec? metricSpec; +/// One point viewable on a blob metric plot, but mostly just a wrapper message +/// to work around repeated fields can't be used directly within `oneof` fields. +class GoogleCloudAiplatformV1TensorboardBlobSequence { + /// List of blobs contained within the sequence. + core.List? values; - GoogleCloudAiplatformV1SummarizationQualityInput({ - this.instance, - this.metricSpec, + GoogleCloudAiplatformV1TensorboardBlobSequence({ + this.values, }); - GoogleCloudAiplatformV1SummarizationQualityInput.fromJson(core.Map json_) + GoogleCloudAiplatformV1TensorboardBlobSequence.fromJson(core.Map json_) : this( - instance: json_.containsKey('instance') - ? GoogleCloudAiplatformV1SummarizationQualityInstance.fromJson( - json_['instance'] as core.Map) - : null, - metricSpec: json_.containsKey('metricSpec') - ? GoogleCloudAiplatformV1SummarizationQualitySpec.fromJson( - json_['metricSpec'] as core.Map) - : null, + values: (json_['values'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1TensorboardBlob.fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (instance != null) 'instance': instance!, - if (metricSpec != null) 'metricSpec': metricSpec!, + if (values != null) 'values': values!, }; } -/// Spec for summarization quality instance. -class GoogleCloudAiplatformV1SummarizationQualityInstance { - /// Text to be summarized. +/// A TensorboardExperiment is a group of TensorboardRuns, that are typically +/// the results of a training job run, in a Tensorboard. +class GoogleCloudAiplatformV1TensorboardExperiment { + /// Timestamp when this TensorboardExperiment was created. /// - /// Required. - core.String? context; + /// Output only. + core.String? createTime; - /// Summarization prompt for LLM. + /// Description of this TensorboardExperiment. + core.String? description; + + /// User provided name of this TensorboardExperiment. + core.String? displayName; + + /// Used to perform consistent read-modify-write updates. /// - /// Required. - core.String? instruction; + /// If not set, a blind "overwrite" update happens. + core.String? etag; - /// Output of the evaluated model. + /// The labels with user-defined metadata to organize your + /// TensorboardExperiment. /// - /// Required. - core.String? prediction; + /// Label keys and values cannot be longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. No more than + /// 64 user labels can be associated with one Dataset (System labels are + /// excluded). See https://goo.gl/xmQnxf for more information and examples of + /// labels. System reserved label keys are prefixed with + /// `aiplatform.googleapis.com/` and are immutable. The following system + /// labels exist for each Dataset: * + /// `aiplatform.googleapis.com/dataset_metadata_schema`: output only. Its + /// value is the metadata_schema's title. + core.Map? labels; - /// Ground truth used to compare against the prediction. + /// Name of the TensorboardExperiment. /// - /// Optional. - core.String? reference; + /// Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}` + /// + /// Output only. + core.String? name; - GoogleCloudAiplatformV1SummarizationQualityInstance({ - this.context, - this.instruction, - this.prediction, - this.reference, + /// Source of the TensorboardExperiment. + /// + /// Example: a custom training job. + /// + /// Immutable. + core.String? source; + + /// Timestamp when this TensorboardExperiment was last updated. + /// + /// Output only. + core.String? updateTime; + + GoogleCloudAiplatformV1TensorboardExperiment({ + this.createTime, + this.description, + this.displayName, + this.etag, + this.labels, + this.name, + this.source, + this.updateTime, }); - GoogleCloudAiplatformV1SummarizationQualityInstance.fromJson(core.Map json_) + GoogleCloudAiplatformV1TensorboardExperiment.fromJson(core.Map json_) : this( - context: json_['context'] as core.String?, - instruction: json_['instruction'] as core.String?, - prediction: json_['prediction'] as core.String?, - reference: json_['reference'] as core.String?, + createTime: json_['createTime'] as core.String?, + description: json_['description'] as core.String?, + displayName: json_['displayName'] as core.String?, + etag: json_['etag'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + name: json_['name'] as core.String?, + source: json_['source'] as core.String?, + updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (context != null) 'context': context!, - if (instruction != null) 'instruction': instruction!, - if (prediction != null) 'prediction': prediction!, - if (reference != null) 'reference': reference!, + if (createTime != null) 'createTime': createTime!, + if (description != null) 'description': description!, + if (displayName != null) 'displayName': displayName!, + if (etag != null) 'etag': etag!, + if (labels != null) 'labels': labels!, + if (name != null) 'name': name!, + if (source != null) 'source': source!, + if (updateTime != null) 'updateTime': updateTime!, }; } -/// Spec for summarization quality result. -class GoogleCloudAiplatformV1SummarizationQualityResult { - /// Confidence for summarization quality score. +/// TensorboardRun maps to a specific execution of a training job with a given +/// set of hyperparameter values, model definition, dataset, etc +class GoogleCloudAiplatformV1TensorboardRun { + /// Timestamp when this TensorboardRun was created. /// /// Output only. - core.double? confidence; + core.String? createTime; - /// Explanation for summarization quality score. + /// Description of this TensorboardRun. + core.String? description; + + /// User provided name of this TensorboardRun. + /// + /// This value must be unique among all TensorboardRuns belonging to the same + /// parent TensorboardExperiment. + /// + /// Required. + core.String? displayName; + + /// Used to perform a consistent read-modify-write updates. + /// + /// If not set, a blind "overwrite" update happens. + core.String? etag; + + /// The labels with user-defined metadata to organize your TensorboardRuns. + /// + /// This field will be used to filter and visualize Runs in the Tensorboard + /// UI. For example, a Vertex AI training job can set a label + /// aiplatform.googleapis.com/training_job_id=xxxxx to all the runs created + /// within that job. An end user can set a label experiment_id=xxxxx for all + /// the runs produced in a Jupyter notebook. These runs can be grouped by a + /// label value and visualized together in the Tensorboard UI. Label keys and + /// values can be no longer than 64 characters (Unicode codepoints), can only + /// contain lowercase letters, numeric characters, underscores and dashes. + /// International characters are allowed. No more than 64 user labels can be + /// associated with one TensorboardRun (System labels are excluded). See + /// https://goo.gl/xmQnxf for more information and examples of labels. System + /// reserved label keys are prefixed with "aiplatform.googleapis.com/" and are + /// immutable. + core.Map? labels; + + /// Name of the TensorboardRun. + /// + /// Format: + /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}` /// /// Output only. - core.String? explanation; + core.String? name; - /// Summarization Quality score. + /// Timestamp when this TensorboardRun was last updated. /// /// Output only. - core.double? score; + core.String? updateTime; - GoogleCloudAiplatformV1SummarizationQualityResult({ - this.confidence, - this.explanation, - this.score, + GoogleCloudAiplatformV1TensorboardRun({ + this.createTime, + this.description, + this.displayName, + this.etag, + this.labels, + this.name, + this.updateTime, }); - GoogleCloudAiplatformV1SummarizationQualityResult.fromJson(core.Map json_) + GoogleCloudAiplatformV1TensorboardRun.fromJson(core.Map json_) : this( - confidence: (json_['confidence'] as core.num?)?.toDouble(), - explanation: json_['explanation'] as core.String?, - score: (json_['score'] as core.num?)?.toDouble(), + createTime: json_['createTime'] as core.String?, + description: json_['description'] as core.String?, + displayName: json_['displayName'] as core.String?, + etag: json_['etag'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + name: json_['name'] as core.String?, + updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (confidence != null) 'confidence': confidence!, - if (explanation != null) 'explanation': explanation!, - if (score != null) 'score': score!, + if (createTime != null) 'createTime': createTime!, + if (description != null) 'description': description!, + if (displayName != null) 'displayName': displayName!, + if (etag != null) 'etag': etag!, + if (labels != null) 'labels': labels!, + if (name != null) 'name': name!, + if (updateTime != null) 'updateTime': updateTime!, }; } -/// Spec for summarization quality score metric. -class GoogleCloudAiplatformV1SummarizationQualitySpec { - /// Whether to use instance.reference to compute summarization quality. +/// One point viewable on a tensor metric plot. +class GoogleCloudAiplatformV1TensorboardTensor { + /// Serialized form of + /// https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/framework/tensor.proto /// - /// Optional. - core.bool? useReference; + /// Required. + core.String? value; + core.List get valueAsBytes => convert.base64.decode(value!); - /// Which version to use for evaluation. + set valueAsBytes(core.List bytes_) { + value = + convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); + } + + /// Version number of TensorProto used to serialize value. /// /// Optional. - core.int? version; + core.int? versionNumber; - GoogleCloudAiplatformV1SummarizationQualitySpec({ - this.useReference, - this.version, + GoogleCloudAiplatformV1TensorboardTensor({ + this.value, + this.versionNumber, }); - GoogleCloudAiplatformV1SummarizationQualitySpec.fromJson(core.Map json_) + GoogleCloudAiplatformV1TensorboardTensor.fromJson(core.Map json_) : this( - useReference: json_['useReference'] as core.bool?, - version: json_['version'] as core.int?, + value: json_['value'] as core.String?, + versionNumber: json_['versionNumber'] as core.int?, ); core.Map toJson() => { - if (useReference != null) 'useReference': useReference!, - if (version != null) 'version': version!, + if (value != null) 'value': value!, + if (versionNumber != null) 'versionNumber': versionNumber!, }; } -/// Input for summarization verbosity metric. -class GoogleCloudAiplatformV1SummarizationVerbosityInput { - /// Summarization verbosity instance. +/// TensorboardTimeSeries maps to times series produced in training runs +class GoogleCloudAiplatformV1TensorboardTimeSeries { + /// Timestamp when this TensorboardTimeSeries was created. /// - /// Required. - GoogleCloudAiplatformV1SummarizationVerbosityInstance? instance; + /// Output only. + core.String? createTime; - /// Spec for summarization verbosity score metric. + /// Description of this TensorboardTimeSeries. + core.String? description; + + /// User provided name of this TensorboardTimeSeries. + /// + /// This value should be unique among all TensorboardTimeSeries resources + /// belonging to the same TensorboardRun resource (parent resource). /// /// Required. - GoogleCloudAiplatformV1SummarizationVerbositySpec? metricSpec; + core.String? displayName; - GoogleCloudAiplatformV1SummarizationVerbosityInput({ - this.instance, - this.metricSpec, + /// Used to perform a consistent read-modify-write updates. + /// + /// If not set, a blind "overwrite" update happens. + core.String? etag; + + /// Scalar, Tensor, or Blob metadata for this TensorboardTimeSeries. + /// + /// Output only. + GoogleCloudAiplatformV1TensorboardTimeSeriesMetadata? metadata; + + /// Name of the TensorboardTimeSeries. + /// + /// Output only. + core.String? name; + + /// Data of the current plugin, with the size limited to 65KB. + core.String? pluginData; + core.List get pluginDataAsBytes => + convert.base64.decode(pluginData!); + + set pluginDataAsBytes(core.List bytes_) { + pluginData = + convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); + } + + /// Name of the plugin this time series pertain to. + /// + /// Such as Scalar, Tensor, Blob + /// + /// Immutable. + core.String? pluginName; + + /// Timestamp when this TensorboardTimeSeries was last updated. + /// + /// Output only. + core.String? updateTime; + + /// Type of TensorboardTimeSeries value. + /// + /// Required. Immutable. + /// Possible string values are: + /// - "VALUE_TYPE_UNSPECIFIED" : The value type is unspecified. + /// - "SCALAR" : Used for TensorboardTimeSeries that is a list of scalars. + /// E.g. accuracy of a model over epochs/time. + /// - "TENSOR" : Used for TensorboardTimeSeries that is a list of tensors. + /// E.g. histograms of weights of layer in a model over epoch/time. + /// - "BLOB_SEQUENCE" : Used for TensorboardTimeSeries that is a list of blob + /// sequences. E.g. set of sample images with labels over epochs/time. + core.String? valueType; + + GoogleCloudAiplatformV1TensorboardTimeSeries({ + this.createTime, + this.description, + this.displayName, + this.etag, + this.metadata, + this.name, + this.pluginData, + this.pluginName, + this.updateTime, + this.valueType, }); - GoogleCloudAiplatformV1SummarizationVerbosityInput.fromJson(core.Map json_) + GoogleCloudAiplatformV1TensorboardTimeSeries.fromJson(core.Map json_) : this( - instance: json_.containsKey('instance') - ? GoogleCloudAiplatformV1SummarizationVerbosityInstance.fromJson( - json_['instance'] as core.Map) - : null, - metricSpec: json_.containsKey('metricSpec') - ? GoogleCloudAiplatformV1SummarizationVerbositySpec.fromJson( - json_['metricSpec'] as core.Map) + createTime: json_['createTime'] as core.String?, + description: json_['description'] as core.String?, + displayName: json_['displayName'] as core.String?, + etag: json_['etag'] as core.String?, + metadata: json_.containsKey('metadata') + ? GoogleCloudAiplatformV1TensorboardTimeSeriesMetadata.fromJson( + json_['metadata'] as core.Map) : null, + name: json_['name'] as core.String?, + pluginData: json_['pluginData'] as core.String?, + pluginName: json_['pluginName'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + valueType: json_['valueType'] as core.String?, ); core.Map toJson() => { - if (instance != null) 'instance': instance!, - if (metricSpec != null) 'metricSpec': metricSpec!, + if (createTime != null) 'createTime': createTime!, + if (description != null) 'description': description!, + if (displayName != null) 'displayName': displayName!, + if (etag != null) 'etag': etag!, + if (metadata != null) 'metadata': metadata!, + if (name != null) 'name': name!, + if (pluginData != null) 'pluginData': pluginData!, + if (pluginName != null) 'pluginName': pluginName!, + if (updateTime != null) 'updateTime': updateTime!, + if (valueType != null) 'valueType': valueType!, }; } -/// Spec for summarization verbosity instance. -typedef GoogleCloudAiplatformV1SummarizationVerbosityInstance = $Instance03; - -/// Spec for summarization verbosity result. -class GoogleCloudAiplatformV1SummarizationVerbosityResult { - /// Confidence for summarization verbosity score. +/// Describes metadata for a TensorboardTimeSeries. +class GoogleCloudAiplatformV1TensorboardTimeSeriesMetadata { + /// The largest blob sequence length (number of blobs) of all data points in + /// this time series, if its ValueType is BLOB_SEQUENCE. /// /// Output only. - core.double? confidence; + core.String? maxBlobSequenceLength; - /// Explanation for summarization verbosity score. + /// Max step index of all data points within a TensorboardTimeSeries. /// /// Output only. - core.String? explanation; + core.String? maxStep; - /// Summarization Verbosity score. + /// Max wall clock timestamp of all data points within a + /// TensorboardTimeSeries. /// /// Output only. - core.double? score; + core.String? maxWallTime; - GoogleCloudAiplatformV1SummarizationVerbosityResult({ - this.confidence, - this.explanation, - this.score, + GoogleCloudAiplatformV1TensorboardTimeSeriesMetadata({ + this.maxBlobSequenceLength, + this.maxStep, + this.maxWallTime, }); - GoogleCloudAiplatformV1SummarizationVerbosityResult.fromJson(core.Map json_) + GoogleCloudAiplatformV1TensorboardTimeSeriesMetadata.fromJson(core.Map json_) : this( - confidence: (json_['confidence'] as core.num?)?.toDouble(), - explanation: json_['explanation'] as core.String?, - score: (json_['score'] as core.num?)?.toDouble(), + maxBlobSequenceLength: json_['maxBlobSequenceLength'] as core.String?, + maxStep: json_['maxStep'] as core.String?, + maxWallTime: json_['maxWallTime'] as core.String?, ); core.Map toJson() => { - if (confidence != null) 'confidence': confidence!, - if (explanation != null) 'explanation': explanation!, - if (score != null) 'score': score!, + if (maxBlobSequenceLength != null) + 'maxBlobSequenceLength': maxBlobSequenceLength!, + if (maxStep != null) 'maxStep': maxStep!, + if (maxWallTime != null) 'maxWallTime': maxWallTime!, }; } -/// Spec for summarization verbosity score metric. -class GoogleCloudAiplatformV1SummarizationVerbositySpec { - /// Whether to use instance.reference to compute summarization verbosity. - /// - /// Optional. - core.bool? useReference; - - /// Which version to use for evaluation. +/// The config for feature monitoring threshold. +class GoogleCloudAiplatformV1ThresholdConfig { + /// Specify a threshold value that can trigger the alert. /// - /// Optional. - core.int? version; + /// If this threshold config is for feature distribution distance: 1. For + /// categorical feature, the distribution distance is calculated by + /// L-inifinity norm. 2. For numerical feature, the distribution distance is + /// calculated by Jensen–Shannon divergence. Each feature must have a non-zero + /// threshold if they need to be monitored. Otherwise no alert will be + /// triggered for that feature. + core.double? value; - GoogleCloudAiplatformV1SummarizationVerbositySpec({ - this.useReference, - this.version, + GoogleCloudAiplatformV1ThresholdConfig({ + this.value, }); - GoogleCloudAiplatformV1SummarizationVerbositySpec.fromJson(core.Map json_) + GoogleCloudAiplatformV1ThresholdConfig.fromJson(core.Map json_) : this( - useReference: json_['useReference'] as core.bool?, - version: json_['version'] as core.int?, + value: (json_['value'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (useReference != null) 'useReference': useReference!, - if (version != null) 'version': version!, + if (value != null) 'value': value!, }; } -/// Hyperparameters for SFT. -class GoogleCloudAiplatformV1SupervisedHyperParameters { - /// Adapter size for tuning. +/// All the data stored in a TensorboardTimeSeries. +class GoogleCloudAiplatformV1TimeSeriesData { + /// The ID of the TensorboardTimeSeries, which will become the final component + /// of the TensorboardTimeSeries' resource name /// - /// Optional. - /// Possible string values are: - /// - "ADAPTER_SIZE_UNSPECIFIED" : Adapter size is unspecified. - /// - "ADAPTER_SIZE_ONE" : Adapter size 1. - /// - "ADAPTER_SIZE_FOUR" : Adapter size 4. - /// - "ADAPTER_SIZE_EIGHT" : Adapter size 8. - /// - "ADAPTER_SIZE_SIXTEEN" : Adapter size 16. - /// - "ADAPTER_SIZE_THIRTY_TWO" : Adapter size 32. - core.String? adapterSize; + /// Required. + core.String? tensorboardTimeSeriesId; - /// Number of complete passes the model makes over the entire training dataset - /// during training. + /// The value type of this time series. /// - /// Optional. - core.String? epochCount; + /// All the values in this time series data must match this value type. + /// + /// Required. Immutable. + /// Possible string values are: + /// - "VALUE_TYPE_UNSPECIFIED" : The value type is unspecified. + /// - "SCALAR" : Used for TensorboardTimeSeries that is a list of scalars. + /// E.g. accuracy of a model over epochs/time. + /// - "TENSOR" : Used for TensorboardTimeSeries that is a list of tensors. + /// E.g. histograms of weights of layer in a model over epoch/time. + /// - "BLOB_SEQUENCE" : Used for TensorboardTimeSeries that is a list of blob + /// sequences. E.g. set of sample images with labels over epochs/time. + core.String? valueType; - /// Multiplier for adjusting the default learning rate. + /// Data points in this time series. /// - /// Optional. - core.double? learningRateMultiplier; + /// Required. + core.List? values; - GoogleCloudAiplatformV1SupervisedHyperParameters({ - this.adapterSize, - this.epochCount, - this.learningRateMultiplier, + GoogleCloudAiplatformV1TimeSeriesData({ + this.tensorboardTimeSeriesId, + this.valueType, + this.values, }); - GoogleCloudAiplatformV1SupervisedHyperParameters.fromJson(core.Map json_) + GoogleCloudAiplatformV1TimeSeriesData.fromJson(core.Map json_) : this( - adapterSize: json_['adapterSize'] as core.String?, - epochCount: json_['epochCount'] as core.String?, - learningRateMultiplier: - (json_['learningRateMultiplier'] as core.num?)?.toDouble(), + tensorboardTimeSeriesId: + json_['tensorboardTimeSeriesId'] as core.String?, + valueType: json_['valueType'] as core.String?, + values: (json_['values'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1TimeSeriesDataPoint.fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (adapterSize != null) 'adapterSize': adapterSize!, - if (epochCount != null) 'epochCount': epochCount!, - if (learningRateMultiplier != null) - 'learningRateMultiplier': learningRateMultiplier!, + if (tensorboardTimeSeriesId != null) + 'tensorboardTimeSeriesId': tensorboardTimeSeriesId!, + if (valueType != null) 'valueType': valueType!, + if (values != null) 'values': values!, }; } -/// Tuning data statistics for Supervised Tuning. -class GoogleCloudAiplatformV1SupervisedTuningDataStats { - /// Number of billable characters in the tuning dataset. - /// - /// Output only. - @core.Deprecated( - 'Not supported. Member documentation may have more information.', - ) - core.String? totalBillableCharacterCount; +/// A TensorboardTimeSeries data point. +class GoogleCloudAiplatformV1TimeSeriesDataPoint { + /// A blob sequence value. + GoogleCloudAiplatformV1TensorboardBlobSequence? blobs; - /// Number of billable tokens in the tuning dataset. - /// - /// Output only. - core.String? totalBillableTokenCount; + /// A scalar value. + GoogleCloudAiplatformV1Scalar? scalar; - /// The number of examples in the dataset that have been truncated by any - /// amount. - core.String? totalTruncatedExampleCount; + /// Step index of this data point within the run. + core.String? step; - /// Number of tuning characters in the tuning dataset. - /// - /// Output only. - core.String? totalTuningCharacterCount; + /// A tensor value. + GoogleCloudAiplatformV1TensorboardTensor? tensor; - /// A partial sample of the indices (starting from 1) of the truncated - /// examples. - core.List? truncatedExampleIndices; + /// Wall clock timestamp when this data point is generated by the end user. + core.String? wallTime; - /// Number of examples in the tuning dataset. - /// - /// Output only. - core.String? tuningDatasetExampleCount; + GoogleCloudAiplatformV1TimeSeriesDataPoint({ + this.blobs, + this.scalar, + this.step, + this.tensor, + this.wallTime, + }); - /// Number of tuning steps for this Tuning Job. - /// - /// Output only. - core.String? tuningStepCount; + GoogleCloudAiplatformV1TimeSeriesDataPoint.fromJson(core.Map json_) + : this( + blobs: json_.containsKey('blobs') + ? GoogleCloudAiplatformV1TensorboardBlobSequence.fromJson( + json_['blobs'] as core.Map) + : null, + scalar: json_.containsKey('scalar') + ? GoogleCloudAiplatformV1Scalar.fromJson( + json_['scalar'] as core.Map) + : null, + step: json_['step'] as core.String?, + tensor: json_.containsKey('tensor') + ? GoogleCloudAiplatformV1TensorboardTensor.fromJson( + json_['tensor'] as core.Map) + : null, + wallTime: json_['wallTime'] as core.String?, + ); - /// Sample user messages in the training dataset uri. - /// - /// Output only. - core.List? userDatasetExamples; + core.Map toJson() => { + if (blobs != null) 'blobs': blobs!, + if (scalar != null) 'scalar': scalar!, + if (step != null) 'step': step!, + if (tensor != null) 'tensor': tensor!, + if (wallTime != null) 'wallTime': wallTime!, + }; +} - /// Dataset distributions for the user input tokens. +/// Assigns input data to training, validation, and test sets based on a +/// provided timestamps. +/// +/// The youngest data pieces are assigned to training set, next to validation +/// set, and the oldest to the test set. Supported only for tabular Datasets. +class GoogleCloudAiplatformV1TimestampSplit { + /// The key is a name of one of the Dataset's data columns. /// - /// Output only. - GoogleCloudAiplatformV1SupervisedTuningDatasetDistribution? - userInputTokenDistribution; - - /// Dataset distributions for the messages per example. + /// The values of the key (the values in the column) must be in RFC 3339 + /// `date-time` format, where `time-offset` = `"Z"` (e.g. + /// 1985-04-12T23:20:50.52Z). If for a piece of data the key is not present or + /// has an invalid value, that piece is ignored by the pipeline. /// - /// Output only. - GoogleCloudAiplatformV1SupervisedTuningDatasetDistribution? - userMessagePerExampleDistribution; + /// Required. + core.String? key; - /// Dataset distributions for the user output tokens. - /// - /// Output only. - GoogleCloudAiplatformV1SupervisedTuningDatasetDistribution? - userOutputTokenDistribution; + /// The fraction of the input data that is to be used to evaluate the Model. + core.double? testFraction; - GoogleCloudAiplatformV1SupervisedTuningDataStats({ - this.totalBillableCharacterCount, - this.totalBillableTokenCount, - this.totalTruncatedExampleCount, - this.totalTuningCharacterCount, - this.truncatedExampleIndices, - this.tuningDatasetExampleCount, - this.tuningStepCount, - this.userDatasetExamples, - this.userInputTokenDistribution, - this.userMessagePerExampleDistribution, - this.userOutputTokenDistribution, + /// The fraction of the input data that is to be used to train the Model. + core.double? trainingFraction; + + /// The fraction of the input data that is to be used to validate the Model. + core.double? validationFraction; + + GoogleCloudAiplatformV1TimestampSplit({ + this.key, + this.testFraction, + this.trainingFraction, + this.validationFraction, }); - GoogleCloudAiplatformV1SupervisedTuningDataStats.fromJson(core.Map json_) + GoogleCloudAiplatformV1TimestampSplit.fromJson(core.Map json_) : this( - totalBillableCharacterCount: - json_['totalBillableCharacterCount'] as core.String?, - totalBillableTokenCount: - json_['totalBillableTokenCount'] as core.String?, - totalTruncatedExampleCount: - json_['totalTruncatedExampleCount'] as core.String?, - totalTuningCharacterCount: - json_['totalTuningCharacterCount'] as core.String?, - truncatedExampleIndices: - (json_['truncatedExampleIndices'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - tuningDatasetExampleCount: - json_['tuningDatasetExampleCount'] as core.String?, - tuningStepCount: json_['tuningStepCount'] as core.String?, - userDatasetExamples: (json_['userDatasetExamples'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Content.fromJson( - value as core.Map)) - .toList(), - userInputTokenDistribution: - json_.containsKey('userInputTokenDistribution') - ? GoogleCloudAiplatformV1SupervisedTuningDatasetDistribution - .fromJson(json_['userInputTokenDistribution'] - as core.Map) - : null, - userMessagePerExampleDistribution: - json_.containsKey('userMessagePerExampleDistribution') - ? GoogleCloudAiplatformV1SupervisedTuningDatasetDistribution - .fromJson(json_['userMessagePerExampleDistribution'] - as core.Map) - : null, - userOutputTokenDistribution: - json_.containsKey('userOutputTokenDistribution') - ? GoogleCloudAiplatformV1SupervisedTuningDatasetDistribution - .fromJson(json_['userOutputTokenDistribution'] - as core.Map) - : null, + key: json_['key'] as core.String?, + testFraction: (json_['testFraction'] as core.num?)?.toDouble(), + trainingFraction: + (json_['trainingFraction'] as core.num?)?.toDouble(), + validationFraction: + (json_['validationFraction'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (totalBillableCharacterCount != null) - 'totalBillableCharacterCount': totalBillableCharacterCount!, - if (totalBillableTokenCount != null) - 'totalBillableTokenCount': totalBillableTokenCount!, - if (totalTruncatedExampleCount != null) - 'totalTruncatedExampleCount': totalTruncatedExampleCount!, - if (totalTuningCharacterCount != null) - 'totalTuningCharacterCount': totalTuningCharacterCount!, - if (truncatedExampleIndices != null) - 'truncatedExampleIndices': truncatedExampleIndices!, - if (tuningDatasetExampleCount != null) - 'tuningDatasetExampleCount': tuningDatasetExampleCount!, - if (tuningStepCount != null) 'tuningStepCount': tuningStepCount!, - if (userDatasetExamples != null) - 'userDatasetExamples': userDatasetExamples!, - if (userInputTokenDistribution != null) - 'userInputTokenDistribution': userInputTokenDistribution!, - if (userMessagePerExampleDistribution != null) - 'userMessagePerExampleDistribution': - userMessagePerExampleDistribution!, - if (userOutputTokenDistribution != null) - 'userOutputTokenDistribution': userOutputTokenDistribution!, + if (key != null) 'key': key!, + if (testFraction != null) 'testFraction': testFraction!, + if (trainingFraction != null) 'trainingFraction': trainingFraction!, + if (validationFraction != null) + 'validationFraction': validationFraction!, }; } -/// Dataset distribution for Supervised Tuning. -class GoogleCloudAiplatformV1SupervisedTuningDatasetDistribution { - /// Sum of a given population of values that are billable. +/// Tokens info with a list of tokens and the corresponding list of token ids. +class GoogleCloudAiplatformV1TokensInfo { + /// Optional fields for the role from the corresponding Content. /// - /// Output only. - core.String? billableSum; + /// Optional. + core.String? role; - /// Defines the histogram bucket. - /// - /// Output only. - core.List< - GoogleCloudAiplatformV1SupervisedTuningDatasetDistributionDatasetBucket>? - buckets; + /// A list of token ids from the input. + core.List? tokenIds; - /// The maximum of the population values. - /// - /// Output only. - core.double? max; + /// A list of tokens from the input. + core.List? tokens; - /// The arithmetic mean of the values in the population. - /// - /// Output only. - core.double? mean; + GoogleCloudAiplatformV1TokensInfo({ + this.role, + this.tokenIds, + this.tokens, + }); - /// The median of the values in the population. - /// - /// Output only. - core.double? median; + GoogleCloudAiplatformV1TokensInfo.fromJson(core.Map json_) + : this( + role: json_['role'] as core.String?, + tokenIds: (json_['tokenIds'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + tokens: (json_['tokens'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); - /// The minimum of the population values. + core.Map toJson() => { + if (role != null) 'role': role!, + if (tokenIds != null) 'tokenIds': tokenIds!, + if (tokens != null) 'tokens': tokens!, + }; +} + +/// Tool details that the model may use to generate response. +/// +/// A `Tool` is a piece of code that enables the system to interact with +/// external systems to perform an action, or set of actions, outside of +/// knowledge and scope of the model. A Tool object should contain exactly one +/// type of Tool (e.g FunctionDeclaration, Retrieval or GoogleSearchRetrieval). +class GoogleCloudAiplatformV1Tool { + /// Function tool type. /// - /// Output only. - core.double? min; + /// One or more function declarations to be passed to the model along with the + /// current user query. Model may decide to call a subset of these functions + /// by populating FunctionCall in the response. User should provide a + /// FunctionResponse for each function call in the next turn. Based on the + /// function responses, Model will generate the final response back to the + /// user. Maximum 128 function declarations can be provided. + /// + /// Optional. + core.List? functionDeclarations; - /// The 5th percentile of the values in the population. + /// GoogleSearch tool type. /// - /// Output only. - core.double? p5; + /// Tool to support Google Search in Model. Powered by Google. + /// + /// Optional. + GoogleCloudAiplatformV1ToolGoogleSearch? googleSearch; - /// The 95th percentile of the values in the population. + /// GoogleSearchRetrieval tool type. /// - /// Output only. - core.double? p95; + /// Specialized retrieval tool that is powered by Google search. + /// + /// Optional. + GoogleCloudAiplatformV1GoogleSearchRetrieval? googleSearchRetrieval; - /// Sum of a given population of values. + /// Retrieval tool type. /// - /// Output only. - core.String? sum; + /// System will always execute the provided retrieval tool(s) to get external + /// knowledge to answer the prompt. Retrieval results are presented to the + /// model for generation. + /// + /// Optional. + GoogleCloudAiplatformV1Retrieval? retrieval; - GoogleCloudAiplatformV1SupervisedTuningDatasetDistribution({ - this.billableSum, - this.buckets, - this.max, - this.mean, - this.median, - this.min, - this.p5, - this.p95, - this.sum, + GoogleCloudAiplatformV1Tool({ + this.functionDeclarations, + this.googleSearch, + this.googleSearchRetrieval, + this.retrieval, }); - GoogleCloudAiplatformV1SupervisedTuningDatasetDistribution.fromJson( - core.Map json_) + GoogleCloudAiplatformV1Tool.fromJson(core.Map json_) : this( - billableSum: json_['billableSum'] as core.String?, - buckets: (json_['buckets'] as core.List?) + functionDeclarations: (json_['functionDeclarations'] as core.List?) ?.map((value) => - GoogleCloudAiplatformV1SupervisedTuningDatasetDistributionDatasetBucket - .fromJson(value as core.Map)) + GoogleCloudAiplatformV1FunctionDeclaration.fromJson( + value as core.Map)) .toList(), - max: (json_['max'] as core.num?)?.toDouble(), - mean: (json_['mean'] as core.num?)?.toDouble(), - median: (json_['median'] as core.num?)?.toDouble(), - min: (json_['min'] as core.num?)?.toDouble(), - p5: (json_['p5'] as core.num?)?.toDouble(), - p95: (json_['p95'] as core.num?)?.toDouble(), - sum: json_['sum'] as core.String?, + googleSearch: json_.containsKey('googleSearch') + ? GoogleCloudAiplatformV1ToolGoogleSearch.fromJson( + json_['googleSearch'] as core.Map) + : null, + googleSearchRetrieval: json_.containsKey('googleSearchRetrieval') + ? GoogleCloudAiplatformV1GoogleSearchRetrieval.fromJson( + json_['googleSearchRetrieval'] + as core.Map) + : null, + retrieval: json_.containsKey('retrieval') + ? GoogleCloudAiplatformV1Retrieval.fromJson( + json_['retrieval'] as core.Map) + : null, ); core.Map toJson() => { - if (billableSum != null) 'billableSum': billableSum!, - if (buckets != null) 'buckets': buckets!, - if (max != null) 'max': max!, - if (mean != null) 'mean': mean!, - if (median != null) 'median': median!, - if (min != null) 'min': min!, - if (p5 != null) 'p5': p5!, - if (p95 != null) 'p95': p95!, - if (sum != null) 'sum': sum!, + if (functionDeclarations != null) + 'functionDeclarations': functionDeclarations!, + if (googleSearch != null) 'googleSearch': googleSearch!, + if (googleSearchRetrieval != null) + 'googleSearchRetrieval': googleSearchRetrieval!, + if (retrieval != null) 'retrieval': retrieval!, }; } -/// Dataset bucket used to create a histogram for the distribution given a -/// population of values. -class GoogleCloudAiplatformV1SupervisedTuningDatasetDistributionDatasetBucket { - /// Number of values in the bucket. - /// - /// Output only. - core.double? count; - - /// Left bound of the bucket. +/// Spec for tool call. +class GoogleCloudAiplatformV1ToolCall { + /// Spec for tool input /// - /// Output only. - core.double? left; + /// Optional. + core.String? toolInput; - /// Right bound of the bucket. + /// Spec for tool name /// - /// Output only. - core.double? right; + /// Required. + core.String? toolName; - GoogleCloudAiplatformV1SupervisedTuningDatasetDistributionDatasetBucket({ - this.count, - this.left, - this.right, + GoogleCloudAiplatformV1ToolCall({ + this.toolInput, + this.toolName, }); - GoogleCloudAiplatformV1SupervisedTuningDatasetDistributionDatasetBucket.fromJson( - core.Map json_) + GoogleCloudAiplatformV1ToolCall.fromJson(core.Map json_) : this( - count: (json_['count'] as core.num?)?.toDouble(), - left: (json_['left'] as core.num?)?.toDouble(), - right: (json_['right'] as core.num?)?.toDouble(), + toolInput: json_['toolInput'] as core.String?, + toolName: json_['toolName'] as core.String?, ); core.Map toJson() => { - if (count != null) 'count': count!, - if (left != null) 'left': left!, - if (right != null) 'right': right!, + if (toolInput != null) 'toolInput': toolInput!, + if (toolName != null) 'toolName': toolName!, }; } -/// Tuning Spec for Supervised Tuning for first party models. -class GoogleCloudAiplatformV1SupervisedTuningSpec { - /// Hyperparameters for SFT. - /// - /// Optional. - GoogleCloudAiplatformV1SupervisedHyperParameters? hyperParameters; - - /// Cloud Storage path to file containing training dataset for tuning. - /// - /// The dataset must be formatted as a JSONL file. +/// Input for tool call valid metric. +class GoogleCloudAiplatformV1ToolCallValidInput { + /// Repeated tool call valid instances. /// /// Required. - core.String? trainingDatasetUri; + core.List? instances; - /// Cloud Storage path to file containing validation dataset for tuning. - /// - /// The dataset must be formatted as a JSONL file. + /// Spec for tool call valid metric. /// - /// Optional. - core.String? validationDatasetUri; + /// Required. + GoogleCloudAiplatformV1ToolCallValidSpec? metricSpec; - GoogleCloudAiplatformV1SupervisedTuningSpec({ - this.hyperParameters, - this.trainingDatasetUri, - this.validationDatasetUri, + GoogleCloudAiplatformV1ToolCallValidInput({ + this.instances, + this.metricSpec, }); - GoogleCloudAiplatformV1SupervisedTuningSpec.fromJson(core.Map json_) + GoogleCloudAiplatformV1ToolCallValidInput.fromJson(core.Map json_) : this( - hyperParameters: json_.containsKey('hyperParameters') - ? GoogleCloudAiplatformV1SupervisedHyperParameters.fromJson( - json_['hyperParameters'] - as core.Map) + instances: (json_['instances'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1ToolCallValidInstance.fromJson( + value as core.Map)) + .toList(), + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1ToolCallValidSpec.fromJson( + json_['metricSpec'] as core.Map) : null, - trainingDatasetUri: json_['trainingDatasetUri'] as core.String?, - validationDatasetUri: json_['validationDatasetUri'] as core.String?, ); core.Map toJson() => { - if (hyperParameters != null) 'hyperParameters': hyperParameters!, - if (trainingDatasetUri != null) - 'trainingDatasetUri': trainingDatasetUri!, - if (validationDatasetUri != null) - 'validationDatasetUri': validationDatasetUri!, + if (instances != null) 'instances': instances!, + if (metricSpec != null) 'metricSpec': metricSpec!, }; } -/// Request message for FeatureOnlineStoreAdminService.SyncFeatureView. -typedef GoogleCloudAiplatformV1SyncFeatureViewRequest = $Empty; +/// Spec for tool call valid instance. +typedef GoogleCloudAiplatformV1ToolCallValidInstance = $Instance00; -/// Response message for FeatureOnlineStoreAdminService.SyncFeatureView. -class GoogleCloudAiplatformV1SyncFeatureViewResponse { - /// Format: - /// `projects/{project}/locations/{location}/featureOnlineStores/{feature_online_store}/featureViews/{feature_view}/featureViewSyncs/{feature_view_sync}` - core.String? featureViewSync; +/// Tool call valid metric value for an instance. +class GoogleCloudAiplatformV1ToolCallValidMetricValue { + /// Tool call valid score. + /// + /// Output only. + core.double? score; - GoogleCloudAiplatformV1SyncFeatureViewResponse({ - this.featureViewSync, + GoogleCloudAiplatformV1ToolCallValidMetricValue({ + this.score, }); - GoogleCloudAiplatformV1SyncFeatureViewResponse.fromJson(core.Map json_) + GoogleCloudAiplatformV1ToolCallValidMetricValue.fromJson(core.Map json_) : this( - featureViewSync: json_['featureViewSync'] as core.String?, + score: (json_['score'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (featureViewSync != null) 'featureViewSync': featureViewSync!, + if (score != null) 'score': score!, }; } -/// The storage details for TFRecord output content. -class GoogleCloudAiplatformV1TFRecordDestination { - /// Google Cloud Storage location. +/// Results for tool call valid metric. +class GoogleCloudAiplatformV1ToolCallValidResults { + /// Tool call valid metric values. /// - /// Required. - GoogleCloudAiplatformV1GcsDestination? gcsDestination; + /// Output only. + core.List? + toolCallValidMetricValues; - GoogleCloudAiplatformV1TFRecordDestination({ - this.gcsDestination, + GoogleCloudAiplatformV1ToolCallValidResults({ + this.toolCallValidMetricValues, }); - GoogleCloudAiplatformV1TFRecordDestination.fromJson(core.Map json_) + GoogleCloudAiplatformV1ToolCallValidResults.fromJson(core.Map json_) : this( - gcsDestination: json_.containsKey('gcsDestination') - ? GoogleCloudAiplatformV1GcsDestination.fromJson( - json_['gcsDestination'] - as core.Map) - : null, + toolCallValidMetricValues: + (json_['toolCallValidMetricValues'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1ToolCallValidMetricValue.fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (gcsDestination != null) 'gcsDestination': gcsDestination!, + if (toolCallValidMetricValues != null) + 'toolCallValidMetricValues': toolCallValidMetricValues!, }; } -/// A tensor value type. -class GoogleCloudAiplatformV1Tensor { - /// Type specific representations that make it easy to create tensor protos in - /// all languages. - /// - /// Only the representation corresponding to "dtype" can be set. The values - /// hold the flattened representation of the tensor in row major order. BOOL - core.List? boolVal; - - /// STRING - core.List? bytesVal; - - /// DOUBLE - core.List? doubleVal; - - /// The data type of tensor. - /// Possible string values are: - /// - "DATA_TYPE_UNSPECIFIED" : Not a legal value for DataType. Used to - /// indicate a DataType field has not been set. - /// - "BOOL" : Data types that all computation devices are expected to be - /// capable to support. - /// - "STRING" - /// - "FLOAT" - /// - "DOUBLE" - /// - "INT8" - /// - "INT16" - /// - "INT32" - /// - "INT64" - /// - "UINT8" - /// - "UINT16" - /// - "UINT32" - /// - "UINT64" - core.String? dtype; - - /// FLOAT - core.List? floatVal; - - /// INT64 - core.List? int64Val; - - /// INT_8 INT_16 INT_32 - core.List? intVal; - - /// A list of tensor values. - core.List? listVal; +/// Spec for tool call valid metric. +typedef GoogleCloudAiplatformV1ToolCallValidSpec = $Empty; - /// Shape of the tensor. - core.List? shape; +/// Tool config. +/// +/// This config is shared for all tools provided in the request. +class GoogleCloudAiplatformV1ToolConfig { + /// Function calling config. + /// + /// Optional. + GoogleCloudAiplatformV1FunctionCallingConfig? functionCallingConfig; - /// STRING - core.List? stringVal; + GoogleCloudAiplatformV1ToolConfig({ + this.functionCallingConfig, + }); - /// A map of string to tensor. - core.Map? structVal; + GoogleCloudAiplatformV1ToolConfig.fromJson(core.Map json_) + : this( + functionCallingConfig: json_.containsKey('functionCallingConfig') + ? GoogleCloudAiplatformV1FunctionCallingConfig.fromJson( + json_['functionCallingConfig'] + as core.Map) + : null, + ); - /// Serialized raw tensor content. - core.String? tensorVal; - core.List get tensorValAsBytes => convert.base64.decode(tensorVal!); + core.Map toJson() => { + if (functionCallingConfig != null) + 'functionCallingConfig': functionCallingConfig!, + }; +} - set tensorValAsBytes(core.List bytes_) { - tensorVal = - convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); - } +/// GoogleSearch tool type. +/// +/// Tool to support Google Search in Model. Powered by Google. +typedef GoogleCloudAiplatformV1ToolGoogleSearch = $Empty; - /// UINT64 - core.List? uint64Val; +/// Input for tool name match metric. +class GoogleCloudAiplatformV1ToolNameMatchInput { + /// Repeated tool name match instances. + /// + /// Required. + core.List? instances; - /// UINT8 UINT16 UINT32 - core.List? uintVal; + /// Spec for tool name match metric. + /// + /// Required. + GoogleCloudAiplatformV1ToolNameMatchSpec? metricSpec; - GoogleCloudAiplatformV1Tensor({ - this.boolVal, - this.bytesVal, - this.doubleVal, - this.dtype, - this.floatVal, - this.int64Val, - this.intVal, - this.listVal, - this.shape, - this.stringVal, - this.structVal, - this.tensorVal, - this.uint64Val, - this.uintVal, + GoogleCloudAiplatformV1ToolNameMatchInput({ + this.instances, + this.metricSpec, }); - GoogleCloudAiplatformV1Tensor.fromJson(core.Map json_) + GoogleCloudAiplatformV1ToolNameMatchInput.fromJson(core.Map json_) : this( - boolVal: (json_['boolVal'] as core.List?) - ?.map((value) => value as core.bool) - .toList(), - bytesVal: (json_['bytesVal'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - doubleVal: (json_['doubleVal'] as core.List?) - ?.map((value) => (value as core.num).toDouble()) - .toList(), - dtype: json_['dtype'] as core.String?, - floatVal: (json_['floatVal'] as core.List?) - ?.map((value) => (value as core.num).toDouble()) - .toList(), - int64Val: (json_['int64Val'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - intVal: (json_['intVal'] as core.List?) - ?.map((value) => value as core.int) - .toList(), - listVal: (json_['listVal'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1Tensor.fromJson( - value as core.Map)) - .toList(), - shape: (json_['shape'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - stringVal: (json_['stringVal'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - structVal: - (json_['structVal'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - GoogleCloudAiplatformV1Tensor.fromJson( - value as core.Map), - ), - ), - tensorVal: json_['tensorVal'] as core.String?, - uint64Val: (json_['uint64Val'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - uintVal: (json_['uintVal'] as core.List?) - ?.map((value) => value as core.int) + instances: (json_['instances'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1ToolNameMatchInstance.fromJson( + value as core.Map)) .toList(), + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1ToolNameMatchSpec.fromJson( + json_['metricSpec'] as core.Map) + : null, ); core.Map toJson() => { - if (boolVal != null) 'boolVal': boolVal!, - if (bytesVal != null) 'bytesVal': bytesVal!, - if (doubleVal != null) 'doubleVal': doubleVal!, - if (dtype != null) 'dtype': dtype!, - if (floatVal != null) 'floatVal': floatVal!, - if (int64Val != null) 'int64Val': int64Val!, - if (intVal != null) 'intVal': intVal!, - if (listVal != null) 'listVal': listVal!, - if (shape != null) 'shape': shape!, - if (stringVal != null) 'stringVal': stringVal!, - if (structVal != null) 'structVal': structVal!, - if (tensorVal != null) 'tensorVal': tensorVal!, - if (uint64Val != null) 'uint64Val': uint64Val!, - if (uintVal != null) 'uintVal': uintVal!, + if (instances != null) 'instances': instances!, + if (metricSpec != null) 'metricSpec': metricSpec!, }; } -/// Tensorboard is a physical database that stores users' training metrics. -/// -/// A default Tensorboard is provided in each region of a Google Cloud project. -/// If needed users can also create extra Tensorboards in their projects. -class GoogleCloudAiplatformV1Tensorboard { - /// Consumer project Cloud Storage path prefix used to store blob data, which - /// can either be a bucket or directory. - /// - /// Does not end with a '/'. - /// - /// Output only. - core.String? blobStoragePathPrefix; +/// Spec for tool name match instance. +typedef GoogleCloudAiplatformV1ToolNameMatchInstance = $Instance00; - /// Timestamp when this Tensorboard was created. +/// Tool name match metric value for an instance. +class GoogleCloudAiplatformV1ToolNameMatchMetricValue { + /// Tool name match score. /// /// Output only. - core.String? createTime; - - /// Description of this Tensorboard. - core.String? description; + core.double? score; - /// User provided name of this Tensorboard. - /// - /// Required. - core.String? displayName; + GoogleCloudAiplatformV1ToolNameMatchMetricValue({ + this.score, + }); - /// Customer-managed encryption key spec for a Tensorboard. - /// - /// If set, this Tensorboard and all sub-resources of this Tensorboard will be - /// secured by this key. - GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; + GoogleCloudAiplatformV1ToolNameMatchMetricValue.fromJson(core.Map json_) + : this( + score: (json_['score'] as core.num?)?.toDouble(), + ); - /// Used to perform a consistent read-modify-write updates. - /// - /// If not set, a blind "overwrite" update happens. - core.String? etag; + core.Map toJson() => { + if (score != null) 'score': score!, + }; +} - /// Used to indicate if the TensorBoard instance is the default one. +/// Results for tool name match metric. +class GoogleCloudAiplatformV1ToolNameMatchResults { + /// Tool name match metric values. /// - /// Each project & region can have at most one default TensorBoard instance. - /// Creation of a default TensorBoard instance and updating an existing - /// TensorBoard instance to be default will mark all other TensorBoard - /// instances (if any) as non default. - core.bool? isDefault; + /// Output only. + core.List? + toolNameMatchMetricValues; - /// The labels with user-defined metadata to organize your Tensorboards. - /// - /// Label keys and values can be no longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. No more than - /// 64 user labels can be associated with one Tensorboard (System labels are - /// excluded). See https://goo.gl/xmQnxf for more information and examples of - /// labels. System reserved label keys are prefixed with - /// "aiplatform.googleapis.com/" and are immutable. - core.Map? labels; + GoogleCloudAiplatformV1ToolNameMatchResults({ + this.toolNameMatchMetricValues, + }); - /// Name of the Tensorboard. - /// - /// Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}` - /// - /// Output only. - core.String? name; + GoogleCloudAiplatformV1ToolNameMatchResults.fromJson(core.Map json_) + : this( + toolNameMatchMetricValues: + (json_['toolNameMatchMetricValues'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1ToolNameMatchMetricValue.fromJson( + value as core.Map)) + .toList(), + ); - /// The number of Runs stored in this Tensorboard. - /// - /// Output only. - core.int? runCount; + core.Map toJson() => { + if (toolNameMatchMetricValues != null) + 'toolNameMatchMetricValues': toolNameMatchMetricValues!, + }; +} - /// Reserved for future use. - /// - /// Output only. - core.bool? satisfiesPzi; +/// Spec for tool name match metric. +typedef GoogleCloudAiplatformV1ToolNameMatchSpec = $Empty; - /// Reserved for future use. +/// Input for tool parameter key value match metric. +class GoogleCloudAiplatformV1ToolParameterKVMatchInput { + /// Repeated tool parameter key value match instances. /// - /// Output only. - core.bool? satisfiesPzs; + /// Required. + core.List? instances; - /// Timestamp when this Tensorboard was last updated. + /// Spec for tool parameter key value match metric. /// - /// Output only. - core.String? updateTime; + /// Required. + GoogleCloudAiplatformV1ToolParameterKVMatchSpec? metricSpec; - GoogleCloudAiplatformV1Tensorboard({ - this.blobStoragePathPrefix, - this.createTime, - this.description, - this.displayName, - this.encryptionSpec, - this.etag, - this.isDefault, - this.labels, - this.name, - this.runCount, - this.satisfiesPzi, - this.satisfiesPzs, - this.updateTime, + GoogleCloudAiplatformV1ToolParameterKVMatchInput({ + this.instances, + this.metricSpec, }); - GoogleCloudAiplatformV1Tensorboard.fromJson(core.Map json_) + GoogleCloudAiplatformV1ToolParameterKVMatchInput.fromJson(core.Map json_) : this( - blobStoragePathPrefix: json_['blobStoragePathPrefix'] as core.String?, - createTime: json_['createTime'] as core.String?, - description: json_['description'] as core.String?, - displayName: json_['displayName'] as core.String?, - encryptionSpec: json_.containsKey('encryptionSpec') - ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( - json_['encryptionSpec'] - as core.Map) + instances: (json_['instances'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1ToolParameterKVMatchInstance.fromJson( + value as core.Map)) + .toList(), + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1ToolParameterKVMatchSpec.fromJson( + json_['metricSpec'] as core.Map) : null, - etag: json_['etag'] as core.String?, - isDefault: json_['isDefault'] as core.bool?, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - name: json_['name'] as core.String?, - runCount: json_['runCount'] as core.int?, - satisfiesPzi: json_['satisfiesPzi'] as core.bool?, - satisfiesPzs: json_['satisfiesPzs'] as core.bool?, - updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (blobStoragePathPrefix != null) - 'blobStoragePathPrefix': blobStoragePathPrefix!, - if (createTime != null) 'createTime': createTime!, - if (description != null) 'description': description!, - if (displayName != null) 'displayName': displayName!, - if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, - if (etag != null) 'etag': etag!, - if (isDefault != null) 'isDefault': isDefault!, - if (labels != null) 'labels': labels!, - if (name != null) 'name': name!, - if (runCount != null) 'runCount': runCount!, - if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, - if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, - if (updateTime != null) 'updateTime': updateTime!, + if (instances != null) 'instances': instances!, + if (metricSpec != null) 'metricSpec': metricSpec!, }; } -/// One blob (e.g, image, graph) viewable on a blob metric plot. -class GoogleCloudAiplatformV1TensorboardBlob { - /// The bytes of the blob is not present unless it's returned by the - /// ReadTensorboardBlobData endpoint. - /// - /// Optional. - core.String? data; - core.List get dataAsBytes => convert.base64.decode(data!); - - set dataAsBytes(core.List bytes_) { - data = - convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); - } - - /// A URI safe key uniquely identifying a blob. - /// - /// Can be used to locate the blob stored in the Cloud Storage bucket of the - /// consumer project. +/// Spec for tool parameter key value match instance. +typedef GoogleCloudAiplatformV1ToolParameterKVMatchInstance = $Instance00; + +/// Tool parameter key value match metric value for an instance. +class GoogleCloudAiplatformV1ToolParameterKVMatchMetricValue { + /// Tool parameter key value match score. /// /// Output only. - core.String? id; + core.double? score; - GoogleCloudAiplatformV1TensorboardBlob({ - this.data, - this.id, + GoogleCloudAiplatformV1ToolParameterKVMatchMetricValue({ + this.score, }); - GoogleCloudAiplatformV1TensorboardBlob.fromJson(core.Map json_) + GoogleCloudAiplatformV1ToolParameterKVMatchMetricValue.fromJson( + core.Map json_) : this( - data: json_['data'] as core.String?, - id: json_['id'] as core.String?, + score: (json_['score'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (data != null) 'data': data!, - if (id != null) 'id': id!, + if (score != null) 'score': score!, }; } -/// One point viewable on a blob metric plot, but mostly just a wrapper message -/// to work around repeated fields can't be used directly within `oneof` fields. -class GoogleCloudAiplatformV1TensorboardBlobSequence { - /// List of blobs contained within the sequence. - core.List? values; +/// Results for tool parameter key value match metric. +class GoogleCloudAiplatformV1ToolParameterKVMatchResults { + /// Tool parameter key value match metric values. + /// + /// Output only. + core.List? + toolParameterKvMatchMetricValues; - GoogleCloudAiplatformV1TensorboardBlobSequence({ - this.values, + GoogleCloudAiplatformV1ToolParameterKVMatchResults({ + this.toolParameterKvMatchMetricValues, }); - GoogleCloudAiplatformV1TensorboardBlobSequence.fromJson(core.Map json_) + GoogleCloudAiplatformV1ToolParameterKVMatchResults.fromJson(core.Map json_) : this( - values: (json_['values'] as core.List?) - ?.map((value) => GoogleCloudAiplatformV1TensorboardBlob.fromJson( - value as core.Map)) - .toList(), + toolParameterKvMatchMetricValues: + (json_['toolParameterKvMatchMetricValues'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1ToolParameterKVMatchMetricValue + .fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (values != null) 'values': values!, + if (toolParameterKvMatchMetricValues != null) + 'toolParameterKvMatchMetricValues': toolParameterKvMatchMetricValues!, }; } -/// A TensorboardExperiment is a group of TensorboardRuns, that are typically -/// the results of a training job run, in a Tensorboard. -class GoogleCloudAiplatformV1TensorboardExperiment { - /// Timestamp when this TensorboardExperiment was created. +/// Spec for tool parameter key value match metric. +class GoogleCloudAiplatformV1ToolParameterKVMatchSpec { + /// Whether to use STRICT string match on parameter values. /// - /// Output only. - core.String? createTime; - - /// Description of this TensorboardExperiment. - core.String? description; - - /// User provided name of this TensorboardExperiment. - core.String? displayName; + /// Optional. + core.bool? useStrictStringMatch; - /// Used to perform consistent read-modify-write updates. - /// - /// If not set, a blind "overwrite" update happens. - core.String? etag; + GoogleCloudAiplatformV1ToolParameterKVMatchSpec({ + this.useStrictStringMatch, + }); - /// The labels with user-defined metadata to organize your - /// TensorboardExperiment. - /// - /// Label keys and values cannot be longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. No more than - /// 64 user labels can be associated with one Dataset (System labels are - /// excluded). See https://goo.gl/xmQnxf for more information and examples of - /// labels. System reserved label keys are prefixed with - /// `aiplatform.googleapis.com/` and are immutable. The following system - /// labels exist for each Dataset: * - /// `aiplatform.googleapis.com/dataset_metadata_schema`: output only. Its - /// value is the metadata_schema's title. - core.Map? labels; + GoogleCloudAiplatformV1ToolParameterKVMatchSpec.fromJson(core.Map json_) + : this( + useStrictStringMatch: json_['useStrictStringMatch'] as core.bool?, + ); - /// Name of the TensorboardExperiment. - /// - /// Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}` - /// - /// Output only. - core.String? name; + core.Map toJson() => { + if (useStrictStringMatch != null) + 'useStrictStringMatch': useStrictStringMatch!, + }; +} - /// Source of the TensorboardExperiment. - /// - /// Example: a custom training job. +/// Input for tool parameter key match metric. +class GoogleCloudAiplatformV1ToolParameterKeyMatchInput { + /// Repeated tool parameter key match instances. /// - /// Immutable. - core.String? source; + /// Required. + core.List? instances; - /// Timestamp when this TensorboardExperiment was last updated. + /// Spec for tool parameter key match metric. /// - /// Output only. - core.String? updateTime; + /// Required. + GoogleCloudAiplatformV1ToolParameterKeyMatchSpec? metricSpec; - GoogleCloudAiplatformV1TensorboardExperiment({ - this.createTime, - this.description, - this.displayName, - this.etag, - this.labels, - this.name, - this.source, - this.updateTime, + GoogleCloudAiplatformV1ToolParameterKeyMatchInput({ + this.instances, + this.metricSpec, }); - GoogleCloudAiplatformV1TensorboardExperiment.fromJson(core.Map json_) + GoogleCloudAiplatformV1ToolParameterKeyMatchInput.fromJson(core.Map json_) : this( - createTime: json_['createTime'] as core.String?, - description: json_['description'] as core.String?, - displayName: json_['displayName'] as core.String?, - etag: json_['etag'] as core.String?, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - name: json_['name'] as core.String?, - source: json_['source'] as core.String?, - updateTime: json_['updateTime'] as core.String?, + instances: (json_['instances'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1ToolParameterKeyMatchInstance.fromJson( + value as core.Map)) + .toList(), + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1ToolParameterKeyMatchSpec.fromJson( + json_['metricSpec'] as core.Map) + : null, ); core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (description != null) 'description': description!, - if (displayName != null) 'displayName': displayName!, - if (etag != null) 'etag': etag!, - if (labels != null) 'labels': labels!, - if (name != null) 'name': name!, - if (source != null) 'source': source!, - if (updateTime != null) 'updateTime': updateTime!, + if (instances != null) 'instances': instances!, + if (metricSpec != null) 'metricSpec': metricSpec!, }; } -/// TensorboardRun maps to a specific execution of a training job with a given -/// set of hyperparameter values, model definition, dataset, etc -class GoogleCloudAiplatformV1TensorboardRun { - /// Timestamp when this TensorboardRun was created. - /// - /// Output only. - core.String? createTime; - - /// Description of this TensorboardRun. - core.String? description; +/// Spec for tool parameter key match instance. +typedef GoogleCloudAiplatformV1ToolParameterKeyMatchInstance = $Instance00; - /// User provided name of this TensorboardRun. - /// - /// This value must be unique among all TensorboardRuns belonging to the same - /// parent TensorboardExperiment. +/// Tool parameter key match metric value for an instance. +class GoogleCloudAiplatformV1ToolParameterKeyMatchMetricValue { + /// Tool parameter key match score. /// - /// Required. - core.String? displayName; + /// Output only. + core.double? score; - /// Used to perform a consistent read-modify-write updates. - /// - /// If not set, a blind "overwrite" update happens. - core.String? etag; + GoogleCloudAiplatformV1ToolParameterKeyMatchMetricValue({ + this.score, + }); - /// The labels with user-defined metadata to organize your TensorboardRuns. - /// - /// This field will be used to filter and visualize Runs in the Tensorboard - /// UI. For example, a Vertex AI training job can set a label - /// aiplatform.googleapis.com/training_job_id=xxxxx to all the runs created - /// within that job. An end user can set a label experiment_id=xxxxx for all - /// the runs produced in a Jupyter notebook. These runs can be grouped by a - /// label value and visualized together in the Tensorboard UI. Label keys and - /// values can be no longer than 64 characters (Unicode codepoints), can only - /// contain lowercase letters, numeric characters, underscores and dashes. - /// International characters are allowed. No more than 64 user labels can be - /// associated with one TensorboardRun (System labels are excluded). See - /// https://goo.gl/xmQnxf for more information and examples of labels. System - /// reserved label keys are prefixed with "aiplatform.googleapis.com/" and are - /// immutable. - core.Map? labels; + GoogleCloudAiplatformV1ToolParameterKeyMatchMetricValue.fromJson( + core.Map json_) + : this( + score: (json_['score'] as core.num?)?.toDouble(), + ); - /// Name of the TensorboardRun. - /// - /// Format: - /// `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}` - /// - /// Output only. - core.String? name; + core.Map toJson() => { + if (score != null) 'score': score!, + }; +} - /// Timestamp when this TensorboardRun was last updated. +/// Results for tool parameter key match metric. +class GoogleCloudAiplatformV1ToolParameterKeyMatchResults { + /// Tool parameter key match metric values. /// /// Output only. - core.String? updateTime; + core.List? + toolParameterKeyMatchMetricValues; - GoogleCloudAiplatformV1TensorboardRun({ - this.createTime, - this.description, - this.displayName, - this.etag, - this.labels, - this.name, - this.updateTime, + GoogleCloudAiplatformV1ToolParameterKeyMatchResults({ + this.toolParameterKeyMatchMetricValues, }); - GoogleCloudAiplatformV1TensorboardRun.fromJson(core.Map json_) + GoogleCloudAiplatformV1ToolParameterKeyMatchResults.fromJson(core.Map json_) : this( - createTime: json_['createTime'] as core.String?, - description: json_['description'] as core.String?, - displayName: json_['displayName'] as core.String?, - etag: json_['etag'] as core.String?, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - name: json_['name'] as core.String?, - updateTime: json_['updateTime'] as core.String?, + toolParameterKeyMatchMetricValues: + (json_['toolParameterKeyMatchMetricValues'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1ToolParameterKeyMatchMetricValue + .fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (description != null) 'description': description!, - if (displayName != null) 'displayName': displayName!, - if (etag != null) 'etag': etag!, - if (labels != null) 'labels': labels!, - if (name != null) 'name': name!, - if (updateTime != null) 'updateTime': updateTime!, + if (toolParameterKeyMatchMetricValues != null) + 'toolParameterKeyMatchMetricValues': + toolParameterKeyMatchMetricValues!, }; } -/// One point viewable on a tensor metric plot. -class GoogleCloudAiplatformV1TensorboardTensor { - /// Serialized form of - /// https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/framework/tensor.proto - /// - /// Required. - core.String? value; - core.List get valueAsBytes => convert.base64.decode(value!); - - set valueAsBytes(core.List bytes_) { - value = - convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); - } +/// Spec for tool parameter key match metric. +typedef GoogleCloudAiplatformV1ToolParameterKeyMatchSpec = $Empty; - /// Version number of TensorProto used to serialize value. - /// - /// Optional. - core.int? versionNumber; +/// CMLE training config. +/// +/// For every active learning labeling iteration, system will train a machine +/// learning model on CMLE. The trained model will be used by data sampling +/// algorithm to select DataItems. +class GoogleCloudAiplatformV1TrainingConfig { + /// The timeout hours for the CMLE training job, expressed in milli hours i.e. + /// 1,000 value in this field means 1 hour. + core.String? timeoutTrainingMilliHours; - GoogleCloudAiplatformV1TensorboardTensor({ - this.value, - this.versionNumber, + GoogleCloudAiplatformV1TrainingConfig({ + this.timeoutTrainingMilliHours, }); - GoogleCloudAiplatformV1TensorboardTensor.fromJson(core.Map json_) + GoogleCloudAiplatformV1TrainingConfig.fromJson(core.Map json_) : this( - value: json_['value'] as core.String?, - versionNumber: json_['versionNumber'] as core.int?, + timeoutTrainingMilliHours: + json_['timeoutTrainingMilliHours'] as core.String?, ); core.Map toJson() => { - if (value != null) 'value': value!, - if (versionNumber != null) 'versionNumber': versionNumber!, + if (timeoutTrainingMilliHours != null) + 'timeoutTrainingMilliHours': timeoutTrainingMilliHours!, }; } -/// TensorboardTimeSeries maps to times series produced in training runs -class GoogleCloudAiplatformV1TensorboardTimeSeries { - /// Timestamp when this TensorboardTimeSeries was created. +/// The TrainingPipeline orchestrates tasks associated with training a Model. +/// +/// It always executes the training task, and optionally may also export data +/// from Vertex AI's Dataset which becomes the training input, upload the Model +/// to Vertex AI, and evaluate the Model. +class GoogleCloudAiplatformV1TrainingPipeline { + /// Time when the TrainingPipeline was created. /// /// Output only. core.String? createTime; - /// Description of this TensorboardTimeSeries. - core.String? description; - - /// User provided name of this TensorboardTimeSeries. - /// - /// This value should be unique among all TensorboardTimeSeries resources - /// belonging to the same TensorboardRun resource (parent resource). + /// The user-defined name of this TrainingPipeline. /// /// Required. core.String? displayName; - /// Used to perform a consistent read-modify-write updates. + /// Customer-managed encryption key spec for a TrainingPipeline. /// - /// If not set, a blind "overwrite" update happens. - core.String? etag; + /// If set, this TrainingPipeline will be secured by this key. Note: Model + /// trained by this TrainingPipeline is also secured by this key if + /// model_to_upload is not set separately. + GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; - /// Scalar, Tensor, or Blob metadata for this TensorboardTimeSeries. + /// Time when the TrainingPipeline entered any of the following states: + /// `PIPELINE_STATE_SUCCEEDED`, `PIPELINE_STATE_FAILED`, + /// `PIPELINE_STATE_CANCELLED`. /// /// Output only. - GoogleCloudAiplatformV1TensorboardTimeSeriesMetadata? metadata; + core.String? endTime; - /// Name of the TensorboardTimeSeries. + /// Only populated when the pipeline's state is `PIPELINE_STATE_FAILED` or + /// `PIPELINE_STATE_CANCELLED`. /// /// Output only. - core.String? name; + GoogleRpcStatus? error; - /// Data of the current plugin, with the size limited to 65KB. - core.String? pluginData; - core.List get pluginDataAsBytes => - convert.base64.decode(pluginData!); + /// Specifies Vertex AI owned input data that may be used for training the + /// Model. + /// + /// The TrainingPipeline's training_task_definition should make clear whether + /// this config is used and if there are any special requirements on how it + /// should be filled. If nothing about this config is mentioned in the + /// training_task_definition, then it should be assumed that the + /// TrainingPipeline does not depend on this configuration. + GoogleCloudAiplatformV1InputDataConfig? inputDataConfig; - set pluginDataAsBytes(core.List bytes_) { - pluginData = - convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); - } + /// The labels with user-defined metadata to organize TrainingPipelines. + /// + /// Label keys and values can be no longer than 64 characters (Unicode + /// codepoints), can only contain lowercase letters, numeric characters, + /// underscores and dashes. International characters are allowed. See + /// https://goo.gl/xmQnxf for more information and examples of labels. + core.Map? labels; - /// Name of the plugin this time series pertain to. + /// The ID to use for the uploaded Model, which will become the final + /// component of the model resource name. /// - /// Such as Scalar, Tensor, Blob + /// This value may be up to 63 characters, and valid characters are + /// `[a-z0-9_-]`. The first character cannot be a number or hyphen. /// - /// Immutable. - core.String? pluginName; + /// Optional. + core.String? modelId; - /// Timestamp when this TensorboardTimeSeries was last updated. + /// Describes the Model that may be uploaded (via ModelService.UploadModel) by + /// this TrainingPipeline. + /// + /// The TrainingPipeline's training_task_definition should make clear whether + /// this Model description should be populated, and if there are any special + /// requirements regarding how it should be filled. If nothing is mentioned in + /// the training_task_definition, then it should be assumed that this field + /// should not be filled and the training task either uploads the Model + /// without a need of this information, or that training task does not support + /// uploading a Model as part of the pipeline. When the Pipeline's state + /// becomes `PIPELINE_STATE_SUCCEEDED` and the trained Model had been uploaded + /// into Vertex AI, then the model_to_upload's resource name is populated. The + /// Model is always uploaded into the Project and Location in which this + /// pipeline is. + GoogleCloudAiplatformV1Model? modelToUpload; + + /// Resource name of the TrainingPipeline. /// /// Output only. - core.String? updateTime; + core.String? name; - /// Type of TensorboardTimeSeries value. + /// When specify this field, the `model_to_upload` will not be uploaded as a + /// new model, instead, it will become a new version of this `parent_model`. /// - /// Required. Immutable. + /// Optional. + core.String? parentModel; + + /// Time when the TrainingPipeline for the first time entered the + /// `PIPELINE_STATE_RUNNING` state. + /// + /// Output only. + core.String? startTime; + + /// The detailed state of the pipeline. + /// + /// Output only. /// Possible string values are: - /// - "VALUE_TYPE_UNSPECIFIED" : The value type is unspecified. - /// - "SCALAR" : Used for TensorboardTimeSeries that is a list of scalars. - /// E.g. accuracy of a model over epochs/time. - /// - "TENSOR" : Used for TensorboardTimeSeries that is a list of tensors. - /// E.g. histograms of weights of layer in a model over epoch/time. - /// - "BLOB_SEQUENCE" : Used for TensorboardTimeSeries that is a list of blob - /// sequences. E.g. set of sample images with labels over epochs/time. - core.String? valueType; + /// - "PIPELINE_STATE_UNSPECIFIED" : The pipeline state is unspecified. + /// - "PIPELINE_STATE_QUEUED" : The pipeline has been created or resumed, and + /// processing has not yet begun. + /// - "PIPELINE_STATE_PENDING" : The service is preparing to run the pipeline. + /// - "PIPELINE_STATE_RUNNING" : The pipeline is in progress. + /// - "PIPELINE_STATE_SUCCEEDED" : The pipeline completed successfully. + /// - "PIPELINE_STATE_FAILED" : The pipeline failed. + /// - "PIPELINE_STATE_CANCELLING" : The pipeline is being cancelled. From this + /// state, the pipeline may only go to either PIPELINE_STATE_SUCCEEDED, + /// PIPELINE_STATE_FAILED or PIPELINE_STATE_CANCELLED. + /// - "PIPELINE_STATE_CANCELLED" : The pipeline has been cancelled. + /// - "PIPELINE_STATE_PAUSED" : The pipeline has been stopped, and can be + /// resumed. + core.String? state; - GoogleCloudAiplatformV1TensorboardTimeSeries({ + /// A Google Cloud Storage path to the YAML file that defines the training + /// task which is responsible for producing the model artifact, and may also + /// include additional auxiliary work. + /// + /// The definition files that can be used here are found in + /// gs://google-cloud-aiplatform/schema/trainingjob/definition/. Note: The URI + /// given on output will be immutable and probably different, including the + /// URI scheme, than the one given on input. The output URI will point to a + /// location where the user only has a read access. + /// + /// Required. + core.String? trainingTaskDefinition; + + /// The training task's parameter(s), as specified in the + /// training_task_definition's `inputs`. + /// + /// Required. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? trainingTaskInputs; + + /// The metadata information as specified in the training_task_definition's + /// `metadata`. + /// + /// This metadata is an auxiliary runtime and final information about the + /// training task. While the pipeline is running this information is populated + /// only at a best effort basis. Only present if the pipeline's + /// training_task_definition contains `metadata` object. + /// + /// Output only. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Object? trainingTaskMetadata; + + /// Time when the TrainingPipeline was most recently updated. + /// + /// Output only. + core.String? updateTime; + + GoogleCloudAiplatformV1TrainingPipeline({ this.createTime, - this.description, this.displayName, - this.etag, - this.metadata, + this.encryptionSpec, + this.endTime, + this.error, + this.inputDataConfig, + this.labels, + this.modelId, + this.modelToUpload, this.name, - this.pluginData, - this.pluginName, + this.parentModel, + this.startTime, + this.state, + this.trainingTaskDefinition, + this.trainingTaskInputs, + this.trainingTaskMetadata, this.updateTime, - this.valueType, }); - GoogleCloudAiplatformV1TensorboardTimeSeries.fromJson(core.Map json_) + GoogleCloudAiplatformV1TrainingPipeline.fromJson(core.Map json_) : this( createTime: json_['createTime'] as core.String?, - description: json_['description'] as core.String?, displayName: json_['displayName'] as core.String?, - etag: json_['etag'] as core.String?, - metadata: json_.containsKey('metadata') - ? GoogleCloudAiplatformV1TensorboardTimeSeriesMetadata.fromJson( - json_['metadata'] as core.Map) + encryptionSpec: json_.containsKey('encryptionSpec') + ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( + json_['encryptionSpec'] + as core.Map) + : null, + endTime: json_['endTime'] as core.String?, + error: json_.containsKey('error') + ? GoogleRpcStatus.fromJson( + json_['error'] as core.Map) + : null, + inputDataConfig: json_.containsKey('inputDataConfig') + ? GoogleCloudAiplatformV1InputDataConfig.fromJson( + json_['inputDataConfig'] + as core.Map) + : null, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + modelId: json_['modelId'] as core.String?, + modelToUpload: json_.containsKey('modelToUpload') + ? GoogleCloudAiplatformV1Model.fromJson( + json_['modelToUpload'] as core.Map) : null, name: json_['name'] as core.String?, - pluginData: json_['pluginData'] as core.String?, - pluginName: json_['pluginName'] as core.String?, + parentModel: json_['parentModel'] as core.String?, + startTime: json_['startTime'] as core.String?, + state: json_['state'] as core.String?, + trainingTaskDefinition: + json_['trainingTaskDefinition'] as core.String?, + trainingTaskInputs: json_['trainingTaskInputs'], + trainingTaskMetadata: json_['trainingTaskMetadata'], updateTime: json_['updateTime'] as core.String?, - valueType: json_['valueType'] as core.String?, ); core.Map toJson() => { if (createTime != null) 'createTime': createTime!, - if (description != null) 'description': description!, if (displayName != null) 'displayName': displayName!, - if (etag != null) 'etag': etag!, - if (metadata != null) 'metadata': metadata!, + if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, + if (endTime != null) 'endTime': endTime!, + if (error != null) 'error': error!, + if (inputDataConfig != null) 'inputDataConfig': inputDataConfig!, + if (labels != null) 'labels': labels!, + if (modelId != null) 'modelId': modelId!, + if (modelToUpload != null) 'modelToUpload': modelToUpload!, if (name != null) 'name': name!, - if (pluginData != null) 'pluginData': pluginData!, - if (pluginName != null) 'pluginName': pluginName!, + if (parentModel != null) 'parentModel': parentModel!, + if (startTime != null) 'startTime': startTime!, + if (state != null) 'state': state!, + if (trainingTaskDefinition != null) + 'trainingTaskDefinition': trainingTaskDefinition!, + if (trainingTaskInputs != null) + 'trainingTaskInputs': trainingTaskInputs!, + if (trainingTaskMetadata != null) + 'trainingTaskMetadata': trainingTaskMetadata!, if (updateTime != null) 'updateTime': updateTime!, - if (valueType != null) 'valueType': valueType!, }; } -/// Describes metadata for a TensorboardTimeSeries. -class GoogleCloudAiplatformV1TensorboardTimeSeriesMetadata { - /// The largest blob sequence length (number of blobs) of all data points in - /// this time series, if its ValueType is BLOB_SEQUENCE. - /// - /// Output only. - core.String? maxBlobSequenceLength; - - /// Max step index of all data points within a TensorboardTimeSeries. - /// - /// Output only. - core.String? maxStep; - - /// Max wall clock timestamp of all data points within a - /// TensorboardTimeSeries. +/// Spec for trajectory. +class GoogleCloudAiplatformV1Trajectory { + /// Tool calls in the trajectory. /// - /// Output only. - core.String? maxWallTime; + /// Required. + core.List? toolCalls; - GoogleCloudAiplatformV1TensorboardTimeSeriesMetadata({ - this.maxBlobSequenceLength, - this.maxStep, - this.maxWallTime, + GoogleCloudAiplatformV1Trajectory({ + this.toolCalls, }); - GoogleCloudAiplatformV1TensorboardTimeSeriesMetadata.fromJson(core.Map json_) + GoogleCloudAiplatformV1Trajectory.fromJson(core.Map json_) : this( - maxBlobSequenceLength: json_['maxBlobSequenceLength'] as core.String?, - maxStep: json_['maxStep'] as core.String?, - maxWallTime: json_['maxWallTime'] as core.String?, + toolCalls: (json_['toolCalls'] as core.List?) + ?.map((value) => GoogleCloudAiplatformV1ToolCall.fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (maxBlobSequenceLength != null) - 'maxBlobSequenceLength': maxBlobSequenceLength!, - if (maxStep != null) 'maxStep': maxStep!, - if (maxWallTime != null) 'maxWallTime': maxWallTime!, + if (toolCalls != null) 'toolCalls': toolCalls!, }; } -/// The config for feature monitoring threshold. -class GoogleCloudAiplatformV1ThresholdConfig { - /// Specify a threshold value that can trigger the alert. +/// Instances and metric spec for TrajectoryAnyOrderMatch metric. +class GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput { + /// Repeated TrajectoryAnyOrderMatch instance. /// - /// If this threshold config is for feature distribution distance: 1. For - /// categorical feature, the distribution distance is calculated by - /// L-inifinity norm. 2. For numerical feature, the distribution distance is - /// calculated by Jensen–Shannon divergence. Each feature must have a non-zero - /// threshold if they need to be monitored. Otherwise no alert will be - /// triggered for that feature. - core.double? value; + /// Required. + core.List? instances; - GoogleCloudAiplatformV1ThresholdConfig({ - this.value, + /// Spec for TrajectoryAnyOrderMatch metric. + /// + /// Required. + GoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec? metricSpec; + + GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput({ + this.instances, + this.metricSpec, }); - GoogleCloudAiplatformV1ThresholdConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput.fromJson(core.Map json_) : this( - value: (json_['value'] as core.num?)?.toDouble(), + instances: (json_['instances'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance + .fromJson(value as core.Map)) + .toList(), + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec.fromJson( + json_['metricSpec'] as core.Map) + : null, ); core.Map toJson() => { - if (value != null) 'value': value!, + if (instances != null) 'instances': instances!, + if (metricSpec != null) 'metricSpec': metricSpec!, }; } -/// All the data stored in a TensorboardTimeSeries. -class GoogleCloudAiplatformV1TimeSeriesData { - /// The ID of the TensorboardTimeSeries, which will become the final component - /// of the TensorboardTimeSeries' resource name +/// Spec for TrajectoryAnyOrderMatch instance. +class GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance { + /// Spec for predicted tool call trajectory. /// /// Required. - core.String? tensorboardTimeSeriesId; - - /// The value type of this time series. - /// - /// All the values in this time series data must match this value type. - /// - /// Required. Immutable. - /// Possible string values are: - /// - "VALUE_TYPE_UNSPECIFIED" : The value type is unspecified. - /// - "SCALAR" : Used for TensorboardTimeSeries that is a list of scalars. - /// E.g. accuracy of a model over epochs/time. - /// - "TENSOR" : Used for TensorboardTimeSeries that is a list of tensors. - /// E.g. histograms of weights of layer in a model over epoch/time. - /// - "BLOB_SEQUENCE" : Used for TensorboardTimeSeries that is a list of blob - /// sequences. E.g. set of sample images with labels over epochs/time. - core.String? valueType; + GoogleCloudAiplatformV1Trajectory? predictedTrajectory; - /// Data points in this time series. + /// Spec for reference tool call trajectory. /// /// Required. - core.List? values; + GoogleCloudAiplatformV1Trajectory? referenceTrajectory; - GoogleCloudAiplatformV1TimeSeriesData({ - this.tensorboardTimeSeriesId, - this.valueType, - this.values, + GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance({ + this.predictedTrajectory, + this.referenceTrajectory, }); - GoogleCloudAiplatformV1TimeSeriesData.fromJson(core.Map json_) + GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance.fromJson( + core.Map json_) : this( - tensorboardTimeSeriesId: - json_['tensorboardTimeSeriesId'] as core.String?, - valueType: json_['valueType'] as core.String?, - values: (json_['values'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1TimeSeriesDataPoint.fromJson( - value as core.Map)) - .toList(), + predictedTrajectory: json_.containsKey('predictedTrajectory') + ? GoogleCloudAiplatformV1Trajectory.fromJson( + json_['predictedTrajectory'] + as core.Map) + : null, + referenceTrajectory: json_.containsKey('referenceTrajectory') + ? GoogleCloudAiplatformV1Trajectory.fromJson( + json_['referenceTrajectory'] + as core.Map) + : null, ); core.Map toJson() => { - if (tensorboardTimeSeriesId != null) - 'tensorboardTimeSeriesId': tensorboardTimeSeriesId!, - if (valueType != null) 'valueType': valueType!, - if (values != null) 'values': values!, + if (predictedTrajectory != null) + 'predictedTrajectory': predictedTrajectory!, + if (referenceTrajectory != null) + 'referenceTrajectory': referenceTrajectory!, }; } -/// A TensorboardTimeSeries data point. -class GoogleCloudAiplatformV1TimeSeriesDataPoint { - /// A blob sequence value. - GoogleCloudAiplatformV1TensorboardBlobSequence? blobs; +/// TrajectoryAnyOrderMatch metric value for an instance. +class GoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue { + /// TrajectoryAnyOrderMatch score. + /// + /// Output only. + core.double? score; - /// A scalar value. - GoogleCloudAiplatformV1Scalar? scalar; + GoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue({ + this.score, + }); - /// Step index of this data point within the run. - core.String? step; + GoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue.fromJson( + core.Map json_) + : this( + score: (json_['score'] as core.num?)?.toDouble(), + ); - /// A tensor value. - GoogleCloudAiplatformV1TensorboardTensor? tensor; + core.Map toJson() => { + if (score != null) 'score': score!, + }; +} - /// Wall clock timestamp when this data point is generated by the end user. - core.String? wallTime; +/// Results for TrajectoryAnyOrderMatch metric. +class GoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults { + /// TrajectoryAnyOrderMatch metric values. + /// + /// Output only. + core.List? + trajectoryAnyOrderMatchMetricValues; - GoogleCloudAiplatformV1TimeSeriesDataPoint({ - this.blobs, - this.scalar, - this.step, - this.tensor, - this.wallTime, + GoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults({ + this.trajectoryAnyOrderMatchMetricValues, }); - GoogleCloudAiplatformV1TimeSeriesDataPoint.fromJson(core.Map json_) + GoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults.fromJson(core.Map json_) : this( - blobs: json_.containsKey('blobs') - ? GoogleCloudAiplatformV1TensorboardBlobSequence.fromJson( - json_['blobs'] as core.Map) - : null, - scalar: json_.containsKey('scalar') - ? GoogleCloudAiplatformV1Scalar.fromJson( - json_['scalar'] as core.Map) - : null, - step: json_['step'] as core.String?, - tensor: json_.containsKey('tensor') - ? GoogleCloudAiplatformV1TensorboardTensor.fromJson( - json_['tensor'] as core.Map) - : null, - wallTime: json_['wallTime'] as core.String?, + trajectoryAnyOrderMatchMetricValues: + (json_['trajectoryAnyOrderMatchMetricValues'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue + .fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (blobs != null) 'blobs': blobs!, - if (scalar != null) 'scalar': scalar!, - if (step != null) 'step': step!, - if (tensor != null) 'tensor': tensor!, - if (wallTime != null) 'wallTime': wallTime!, + if (trajectoryAnyOrderMatchMetricValues != null) + 'trajectoryAnyOrderMatchMetricValues': + trajectoryAnyOrderMatchMetricValues!, }; } -/// Assigns input data to training, validation, and test sets based on a -/// provided timestamps. -/// -/// The youngest data pieces are assigned to training set, next to validation -/// set, and the oldest to the test set. Supported only for tabular Datasets. -class GoogleCloudAiplatformV1TimestampSplit { - /// The key is a name of one of the Dataset's data columns. - /// - /// The values of the key (the values in the column) must be in RFC 3339 - /// `date-time` format, where `time-offset` = `"Z"` (e.g. - /// 1985-04-12T23:20:50.52Z). If for a piece of data the key is not present or - /// has an invalid value, that piece is ignored by the pipeline. +/// Spec for TrajectoryAnyOrderMatch metric - returns 1 if all tool calls in the +/// reference trajectory appear in the predicted trajectory in any order, else +/// 0. +typedef GoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec = $Empty; + +/// Instances and metric spec for TrajectoryExactMatch metric. +class GoogleCloudAiplatformV1TrajectoryExactMatchInput { + /// Repeated TrajectoryExactMatch instance. /// /// Required. - core.String? key; - - /// The fraction of the input data that is to be used to evaluate the Model. - core.double? testFraction; + core.List? instances; - /// The fraction of the input data that is to be used to train the Model. - core.double? trainingFraction; - - /// The fraction of the input data that is to be used to validate the Model. - core.double? validationFraction; + /// Spec for TrajectoryExactMatch metric. + /// + /// Required. + GoogleCloudAiplatformV1TrajectoryExactMatchSpec? metricSpec; - GoogleCloudAiplatformV1TimestampSplit({ - this.key, - this.testFraction, - this.trainingFraction, - this.validationFraction, + GoogleCloudAiplatformV1TrajectoryExactMatchInput({ + this.instances, + this.metricSpec, }); - GoogleCloudAiplatformV1TimestampSplit.fromJson(core.Map json_) + GoogleCloudAiplatformV1TrajectoryExactMatchInput.fromJson(core.Map json_) : this( - key: json_['key'] as core.String?, - testFraction: (json_['testFraction'] as core.num?)?.toDouble(), - trainingFraction: - (json_['trainingFraction'] as core.num?)?.toDouble(), - validationFraction: - (json_['validationFraction'] as core.num?)?.toDouble(), + instances: (json_['instances'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1TrajectoryExactMatchInstance.fromJson( + value as core.Map)) + .toList(), + metricSpec: json_.containsKey('metricSpec') + ? GoogleCloudAiplatformV1TrajectoryExactMatchSpec.fromJson( + json_['metricSpec'] as core.Map) + : null, ); core.Map toJson() => { - if (key != null) 'key': key!, - if (testFraction != null) 'testFraction': testFraction!, - if (trainingFraction != null) 'trainingFraction': trainingFraction!, - if (validationFraction != null) - 'validationFraction': validationFraction!, + if (instances != null) 'instances': instances!, + if (metricSpec != null) 'metricSpec': metricSpec!, }; } -/// Tokens info with a list of tokens and the corresponding list of token ids. -class GoogleCloudAiplatformV1TokensInfo { - /// Optional fields for the role from the corresponding Content. +/// Spec for TrajectoryExactMatch instance. +class GoogleCloudAiplatformV1TrajectoryExactMatchInstance { + /// Spec for predicted tool call trajectory. /// - /// Optional. - core.String? role; - - /// A list of token ids from the input. - core.List? tokenIds; + /// Required. + GoogleCloudAiplatformV1Trajectory? predictedTrajectory; - /// A list of tokens from the input. - core.List? tokens; + /// Spec for reference tool call trajectory. + /// + /// Required. + GoogleCloudAiplatformV1Trajectory? referenceTrajectory; - GoogleCloudAiplatformV1TokensInfo({ - this.role, - this.tokenIds, - this.tokens, + GoogleCloudAiplatformV1TrajectoryExactMatchInstance({ + this.predictedTrajectory, + this.referenceTrajectory, }); - GoogleCloudAiplatformV1TokensInfo.fromJson(core.Map json_) + GoogleCloudAiplatformV1TrajectoryExactMatchInstance.fromJson(core.Map json_) : this( - role: json_['role'] as core.String?, - tokenIds: (json_['tokenIds'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - tokens: (json_['tokens'] as core.List?) - ?.map((value) => value as core.String) - .toList(), + predictedTrajectory: json_.containsKey('predictedTrajectory') + ? GoogleCloudAiplatformV1Trajectory.fromJson( + json_['predictedTrajectory'] + as core.Map) + : null, + referenceTrajectory: json_.containsKey('referenceTrajectory') + ? GoogleCloudAiplatformV1Trajectory.fromJson( + json_['referenceTrajectory'] + as core.Map) + : null, ); core.Map toJson() => { - if (role != null) 'role': role!, - if (tokenIds != null) 'tokenIds': tokenIds!, - if (tokens != null) 'tokens': tokens!, + if (predictedTrajectory != null) + 'predictedTrajectory': predictedTrajectory!, + if (referenceTrajectory != null) + 'referenceTrajectory': referenceTrajectory!, }; } -/// Tool details that the model may use to generate response. -/// -/// A `Tool` is a piece of code that enables the system to interact with -/// external systems to perform an action, or set of actions, outside of -/// knowledge and scope of the model. A Tool object should contain exactly one -/// type of Tool (e.g FunctionDeclaration, Retrieval or GoogleSearchRetrieval). -class GoogleCloudAiplatformV1Tool { - /// Function tool type. - /// - /// One or more function declarations to be passed to the model along with the - /// current user query. Model may decide to call a subset of these functions - /// by populating FunctionCall in the response. User should provide a - /// FunctionResponse for each function call in the next turn. Based on the - /// function responses, Model will generate the final response back to the - /// user. Maximum 128 function declarations can be provided. +/// TrajectoryExactMatch metric value for an instance. +class GoogleCloudAiplatformV1TrajectoryExactMatchMetricValue { + /// TrajectoryExactMatch score. /// - /// Optional. - core.List? functionDeclarations; + /// Output only. + core.double? score; - /// GoogleSearchRetrieval tool type. - /// - /// Specialized retrieval tool that is powered by Google search. - /// - /// Optional. - GoogleCloudAiplatformV1GoogleSearchRetrieval? googleSearchRetrieval; + GoogleCloudAiplatformV1TrajectoryExactMatchMetricValue({ + this.score, + }); - /// Retrieval tool type. - /// - /// System will always execute the provided retrieval tool(s) to get external - /// knowledge to answer the prompt. Retrieval results are presented to the - /// model for generation. + GoogleCloudAiplatformV1TrajectoryExactMatchMetricValue.fromJson( + core.Map json_) + : this( + score: (json_['score'] as core.num?)?.toDouble(), + ); + + core.Map toJson() => { + if (score != null) 'score': score!, + }; +} + +/// Results for TrajectoryExactMatch metric. +class GoogleCloudAiplatformV1TrajectoryExactMatchResults { + /// TrajectoryExactMatch metric values. /// - /// Optional. - GoogleCloudAiplatformV1Retrieval? retrieval; + /// Output only. + core.List? + trajectoryExactMatchMetricValues; - GoogleCloudAiplatformV1Tool({ - this.functionDeclarations, - this.googleSearchRetrieval, - this.retrieval, + GoogleCloudAiplatformV1TrajectoryExactMatchResults({ + this.trajectoryExactMatchMetricValues, }); - GoogleCloudAiplatformV1Tool.fromJson(core.Map json_) + GoogleCloudAiplatformV1TrajectoryExactMatchResults.fromJson(core.Map json_) : this( - functionDeclarations: (json_['functionDeclarations'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1FunctionDeclaration.fromJson( - value as core.Map)) - .toList(), - googleSearchRetrieval: json_.containsKey('googleSearchRetrieval') - ? GoogleCloudAiplatformV1GoogleSearchRetrieval.fromJson( - json_['googleSearchRetrieval'] - as core.Map) - : null, - retrieval: json_.containsKey('retrieval') - ? GoogleCloudAiplatformV1Retrieval.fromJson( - json_['retrieval'] as core.Map) - : null, + trajectoryExactMatchMetricValues: + (json_['trajectoryExactMatchMetricValues'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1TrajectoryExactMatchMetricValue + .fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (functionDeclarations != null) - 'functionDeclarations': functionDeclarations!, - if (googleSearchRetrieval != null) - 'googleSearchRetrieval': googleSearchRetrieval!, - if (retrieval != null) 'retrieval': retrieval!, + if (trajectoryExactMatchMetricValues != null) + 'trajectoryExactMatchMetricValues': trajectoryExactMatchMetricValues!, }; } -/// Input for tool call valid metric. -class GoogleCloudAiplatformV1ToolCallValidInput { - /// Repeated tool call valid instances. +/// Spec for TrajectoryExactMatch metric - returns 1 if tool calls in the +/// reference trajectory exactly match the predicted trajectory, else 0. +typedef GoogleCloudAiplatformV1TrajectoryExactMatchSpec = $Empty; + +/// Instances and metric spec for TrajectoryInOrderMatch metric. +class GoogleCloudAiplatformV1TrajectoryInOrderMatchInput { + /// Repeated TrajectoryInOrderMatch instance. /// /// Required. - core.List? instances; + core.List? instances; - /// Spec for tool call valid metric. + /// Spec for TrajectoryInOrderMatch metric. /// /// Required. - GoogleCloudAiplatformV1ToolCallValidSpec? metricSpec; + GoogleCloudAiplatformV1TrajectoryInOrderMatchSpec? metricSpec; - GoogleCloudAiplatformV1ToolCallValidInput({ + GoogleCloudAiplatformV1TrajectoryInOrderMatchInput({ this.instances, this.metricSpec, }); - GoogleCloudAiplatformV1ToolCallValidInput.fromJson(core.Map json_) + GoogleCloudAiplatformV1TrajectoryInOrderMatchInput.fromJson(core.Map json_) : this( instances: (json_['instances'] as core.List?) ?.map((value) => - GoogleCloudAiplatformV1ToolCallValidInstance.fromJson( - value as core.Map)) + GoogleCloudAiplatformV1TrajectoryInOrderMatchInstance + .fromJson(value as core.Map)) .toList(), metricSpec: json_.containsKey('metricSpec') - ? GoogleCloudAiplatformV1ToolCallValidSpec.fromJson( + ? GoogleCloudAiplatformV1TrajectoryInOrderMatchSpec.fromJson( json_['metricSpec'] as core.Map) : null, ); @@ -58535,115 +64923,128 @@ class GoogleCloudAiplatformV1ToolCallValidInput { }; } -/// Spec for tool call valid instance. -typedef GoogleCloudAiplatformV1ToolCallValidInstance = $Instance00; +/// Spec for TrajectoryInOrderMatch instance. +class GoogleCloudAiplatformV1TrajectoryInOrderMatchInstance { + /// Spec for predicted tool call trajectory. + /// + /// Required. + GoogleCloudAiplatformV1Trajectory? predictedTrajectory; -/// Tool call valid metric value for an instance. -class GoogleCloudAiplatformV1ToolCallValidMetricValue { - /// Tool call valid score. + /// Spec for reference tool call trajectory. /// - /// Output only. - core.double? score; + /// Required. + GoogleCloudAiplatformV1Trajectory? referenceTrajectory; - GoogleCloudAiplatformV1ToolCallValidMetricValue({ - this.score, + GoogleCloudAiplatformV1TrajectoryInOrderMatchInstance({ + this.predictedTrajectory, + this.referenceTrajectory, }); - GoogleCloudAiplatformV1ToolCallValidMetricValue.fromJson(core.Map json_) + GoogleCloudAiplatformV1TrajectoryInOrderMatchInstance.fromJson(core.Map json_) : this( - score: (json_['score'] as core.num?)?.toDouble(), + predictedTrajectory: json_.containsKey('predictedTrajectory') + ? GoogleCloudAiplatformV1Trajectory.fromJson( + json_['predictedTrajectory'] + as core.Map) + : null, + referenceTrajectory: json_.containsKey('referenceTrajectory') + ? GoogleCloudAiplatformV1Trajectory.fromJson( + json_['referenceTrajectory'] + as core.Map) + : null, ); core.Map toJson() => { - if (score != null) 'score': score!, + if (predictedTrajectory != null) + 'predictedTrajectory': predictedTrajectory!, + if (referenceTrajectory != null) + 'referenceTrajectory': referenceTrajectory!, }; } -/// Results for tool call valid metric. -class GoogleCloudAiplatformV1ToolCallValidResults { - /// Tool call valid metric values. +/// TrajectoryInOrderMatch metric value for an instance. +class GoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue { + /// TrajectoryInOrderMatch score. /// /// Output only. - core.List? - toolCallValidMetricValues; + core.double? score; - GoogleCloudAiplatformV1ToolCallValidResults({ - this.toolCallValidMetricValues, + GoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue({ + this.score, }); - GoogleCloudAiplatformV1ToolCallValidResults.fromJson(core.Map json_) + GoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue.fromJson( + core.Map json_) : this( - toolCallValidMetricValues: - (json_['toolCallValidMetricValues'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1ToolCallValidMetricValue.fromJson( - value as core.Map)) - .toList(), + score: (json_['score'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (toolCallValidMetricValues != null) - 'toolCallValidMetricValues': toolCallValidMetricValues!, + if (score != null) 'score': score!, }; } -/// Spec for tool call valid metric. -typedef GoogleCloudAiplatformV1ToolCallValidSpec = $Empty; - -/// Tool config. -/// -/// This config is shared for all tools provided in the request. -class GoogleCloudAiplatformV1ToolConfig { - /// Function calling config. +/// Results for TrajectoryInOrderMatch metric. +class GoogleCloudAiplatformV1TrajectoryInOrderMatchResults { + /// TrajectoryInOrderMatch metric values. /// - /// Optional. - GoogleCloudAiplatformV1FunctionCallingConfig? functionCallingConfig; + /// Output only. + core.List? + trajectoryInOrderMatchMetricValues; - GoogleCloudAiplatformV1ToolConfig({ - this.functionCallingConfig, + GoogleCloudAiplatformV1TrajectoryInOrderMatchResults({ + this.trajectoryInOrderMatchMetricValues, }); - GoogleCloudAiplatformV1ToolConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1TrajectoryInOrderMatchResults.fromJson(core.Map json_) : this( - functionCallingConfig: json_.containsKey('functionCallingConfig') - ? GoogleCloudAiplatformV1FunctionCallingConfig.fromJson( - json_['functionCallingConfig'] - as core.Map) - : null, + trajectoryInOrderMatchMetricValues: + (json_['trajectoryInOrderMatchMetricValues'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue + .fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (functionCallingConfig != null) - 'functionCallingConfig': functionCallingConfig!, + if (trajectoryInOrderMatchMetricValues != null) + 'trajectoryInOrderMatchMetricValues': + trajectoryInOrderMatchMetricValues!, }; } -/// Input for tool name match metric. -class GoogleCloudAiplatformV1ToolNameMatchInput { - /// Repeated tool name match instances. +/// Spec for TrajectoryInOrderMatch metric - returns 1 if tool calls in the +/// reference trajectory appear in the predicted trajectory in the same order, +/// else 0. +typedef GoogleCloudAiplatformV1TrajectoryInOrderMatchSpec = $Empty; + +/// Instances and metric spec for TrajectoryPrecision metric. +class GoogleCloudAiplatformV1TrajectoryPrecisionInput { + /// Repeated TrajectoryPrecision instance. /// /// Required. - core.List? instances; + core.List? instances; - /// Spec for tool name match metric. + /// Spec for TrajectoryPrecision metric. /// /// Required. - GoogleCloudAiplatformV1ToolNameMatchSpec? metricSpec; + GoogleCloudAiplatformV1TrajectoryPrecisionSpec? metricSpec; - GoogleCloudAiplatformV1ToolNameMatchInput({ + GoogleCloudAiplatformV1TrajectoryPrecisionInput({ this.instances, this.metricSpec, }); - GoogleCloudAiplatformV1ToolNameMatchInput.fromJson(core.Map json_) + GoogleCloudAiplatformV1TrajectoryPrecisionInput.fromJson(core.Map json_) : this( instances: (json_['instances'] as core.List?) ?.map((value) => - GoogleCloudAiplatformV1ToolNameMatchInstance.fromJson( + GoogleCloudAiplatformV1TrajectoryPrecisionInstance.fromJson( value as core.Map)) .toList(), metricSpec: json_.containsKey('metricSpec') - ? GoogleCloudAiplatformV1ToolNameMatchSpec.fromJson( + ? GoogleCloudAiplatformV1TrajectoryPrecisionSpec.fromJson( json_['metricSpec'] as core.Map) : null, ); @@ -58654,21 +65055,57 @@ class GoogleCloudAiplatformV1ToolNameMatchInput { }; } -/// Spec for tool name match instance. -typedef GoogleCloudAiplatformV1ToolNameMatchInstance = $Instance00; +/// Spec for TrajectoryPrecision instance. +class GoogleCloudAiplatformV1TrajectoryPrecisionInstance { + /// Spec for predicted tool call trajectory. + /// + /// Required. + GoogleCloudAiplatformV1Trajectory? predictedTrajectory; -/// Tool name match metric value for an instance. -class GoogleCloudAiplatformV1ToolNameMatchMetricValue { - /// Tool name match score. + /// Spec for reference tool call trajectory. + /// + /// Required. + GoogleCloudAiplatformV1Trajectory? referenceTrajectory; + + GoogleCloudAiplatformV1TrajectoryPrecisionInstance({ + this.predictedTrajectory, + this.referenceTrajectory, + }); + + GoogleCloudAiplatformV1TrajectoryPrecisionInstance.fromJson(core.Map json_) + : this( + predictedTrajectory: json_.containsKey('predictedTrajectory') + ? GoogleCloudAiplatformV1Trajectory.fromJson( + json_['predictedTrajectory'] + as core.Map) + : null, + referenceTrajectory: json_.containsKey('referenceTrajectory') + ? GoogleCloudAiplatformV1Trajectory.fromJson( + json_['referenceTrajectory'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (predictedTrajectory != null) + 'predictedTrajectory': predictedTrajectory!, + if (referenceTrajectory != null) + 'referenceTrajectory': referenceTrajectory!, + }; +} + +/// TrajectoryPrecision metric value for an instance. +class GoogleCloudAiplatformV1TrajectoryPrecisionMetricValue { + /// TrajectoryPrecision score. /// /// Output only. core.double? score; - GoogleCloudAiplatformV1ToolNameMatchMetricValue({ + GoogleCloudAiplatformV1TrajectoryPrecisionMetricValue({ this.score, }); - GoogleCloudAiplatformV1ToolNameMatchMetricValue.fromJson(core.Map json_) + GoogleCloudAiplatformV1TrajectoryPrecisionMetricValue.fromJson(core.Map json_) : this( score: (json_['score'] as core.num?)?.toDouble(), ); @@ -58678,63 +65115,65 @@ class GoogleCloudAiplatformV1ToolNameMatchMetricValue { }; } -/// Results for tool name match metric. -class GoogleCloudAiplatformV1ToolNameMatchResults { - /// Tool name match metric values. +/// Results for TrajectoryPrecision metric. +class GoogleCloudAiplatformV1TrajectoryPrecisionResults { + /// TrajectoryPrecision metric values. /// /// Output only. - core.List? - toolNameMatchMetricValues; + core.List? + trajectoryPrecisionMetricValues; - GoogleCloudAiplatformV1ToolNameMatchResults({ - this.toolNameMatchMetricValues, + GoogleCloudAiplatformV1TrajectoryPrecisionResults({ + this.trajectoryPrecisionMetricValues, }); - GoogleCloudAiplatformV1ToolNameMatchResults.fromJson(core.Map json_) + GoogleCloudAiplatformV1TrajectoryPrecisionResults.fromJson(core.Map json_) : this( - toolNameMatchMetricValues: - (json_['toolNameMatchMetricValues'] as core.List?) + trajectoryPrecisionMetricValues: + (json_['trajectoryPrecisionMetricValues'] as core.List?) ?.map((value) => - GoogleCloudAiplatformV1ToolNameMatchMetricValue.fromJson( - value as core.Map)) + GoogleCloudAiplatformV1TrajectoryPrecisionMetricValue + .fromJson( + value as core.Map)) .toList(), ); core.Map toJson() => { - if (toolNameMatchMetricValues != null) - 'toolNameMatchMetricValues': toolNameMatchMetricValues!, + if (trajectoryPrecisionMetricValues != null) + 'trajectoryPrecisionMetricValues': trajectoryPrecisionMetricValues!, }; } -/// Spec for tool name match metric. -typedef GoogleCloudAiplatformV1ToolNameMatchSpec = $Empty; +/// Spec for TrajectoryPrecision metric - returns a float score based on average +/// precision of individual tool calls. +typedef GoogleCloudAiplatformV1TrajectoryPrecisionSpec = $Empty; -/// Input for tool parameter key value match metric. -class GoogleCloudAiplatformV1ToolParameterKVMatchInput { - /// Repeated tool parameter key value match instances. +/// Instances and metric spec for TrajectoryRecall metric. +class GoogleCloudAiplatformV1TrajectoryRecallInput { + /// Repeated TrajectoryRecall instance. /// /// Required. - core.List? instances; + core.List? instances; - /// Spec for tool parameter key value match metric. + /// Spec for TrajectoryRecall metric. /// /// Required. - GoogleCloudAiplatformV1ToolParameterKVMatchSpec? metricSpec; + GoogleCloudAiplatformV1TrajectoryRecallSpec? metricSpec; - GoogleCloudAiplatformV1ToolParameterKVMatchInput({ + GoogleCloudAiplatformV1TrajectoryRecallInput({ this.instances, this.metricSpec, }); - GoogleCloudAiplatformV1ToolParameterKVMatchInput.fromJson(core.Map json_) + GoogleCloudAiplatformV1TrajectoryRecallInput.fromJson(core.Map json_) : this( instances: (json_['instances'] as core.List?) ?.map((value) => - GoogleCloudAiplatformV1ToolParameterKVMatchInstance.fromJson( + GoogleCloudAiplatformV1TrajectoryRecallInstance.fromJson( value as core.Map)) .toList(), metricSpec: json_.containsKey('metricSpec') - ? GoogleCloudAiplatformV1ToolParameterKVMatchSpec.fromJson( + ? GoogleCloudAiplatformV1TrajectoryRecallSpec.fromJson( json_['metricSpec'] as core.Map) : null, ); @@ -58745,108 +65184,124 @@ class GoogleCloudAiplatformV1ToolParameterKVMatchInput { }; } -/// Spec for tool parameter key value match instance. -typedef GoogleCloudAiplatformV1ToolParameterKVMatchInstance = $Instance00; +/// Spec for TrajectoryRecall instance. +class GoogleCloudAiplatformV1TrajectoryRecallInstance { + /// Spec for predicted tool call trajectory. + /// + /// Required. + GoogleCloudAiplatformV1Trajectory? predictedTrajectory; -/// Tool parameter key value match metric value for an instance. -class GoogleCloudAiplatformV1ToolParameterKVMatchMetricValue { - /// Tool parameter key value match score. + /// Spec for reference tool call trajectory. /// - /// Output only. - core.double? score; + /// Required. + GoogleCloudAiplatformV1Trajectory? referenceTrajectory; - GoogleCloudAiplatformV1ToolParameterKVMatchMetricValue({ - this.score, + GoogleCloudAiplatformV1TrajectoryRecallInstance({ + this.predictedTrajectory, + this.referenceTrajectory, }); - GoogleCloudAiplatformV1ToolParameterKVMatchMetricValue.fromJson( - core.Map json_) + GoogleCloudAiplatformV1TrajectoryRecallInstance.fromJson(core.Map json_) : this( - score: (json_['score'] as core.num?)?.toDouble(), + predictedTrajectory: json_.containsKey('predictedTrajectory') + ? GoogleCloudAiplatformV1Trajectory.fromJson( + json_['predictedTrajectory'] + as core.Map) + : null, + referenceTrajectory: json_.containsKey('referenceTrajectory') + ? GoogleCloudAiplatformV1Trajectory.fromJson( + json_['referenceTrajectory'] + as core.Map) + : null, ); core.Map toJson() => { - if (score != null) 'score': score!, + if (predictedTrajectory != null) + 'predictedTrajectory': predictedTrajectory!, + if (referenceTrajectory != null) + 'referenceTrajectory': referenceTrajectory!, }; } -/// Results for tool parameter key value match metric. -class GoogleCloudAiplatformV1ToolParameterKVMatchResults { - /// Tool parameter key value match metric values. +/// TrajectoryRecall metric value for an instance. +class GoogleCloudAiplatformV1TrajectoryRecallMetricValue { + /// TrajectoryRecall score. /// /// Output only. - core.List? - toolParameterKvMatchMetricValues; + core.double? score; - GoogleCloudAiplatformV1ToolParameterKVMatchResults({ - this.toolParameterKvMatchMetricValues, + GoogleCloudAiplatformV1TrajectoryRecallMetricValue({ + this.score, }); - GoogleCloudAiplatformV1ToolParameterKVMatchResults.fromJson(core.Map json_) + GoogleCloudAiplatformV1TrajectoryRecallMetricValue.fromJson(core.Map json_) : this( - toolParameterKvMatchMetricValues: - (json_['toolParameterKvMatchMetricValues'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1ToolParameterKVMatchMetricValue - .fromJson( - value as core.Map)) - .toList(), + score: (json_['score'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (toolParameterKvMatchMetricValues != null) - 'toolParameterKvMatchMetricValues': toolParameterKvMatchMetricValues!, + if (score != null) 'score': score!, }; } -/// Spec for tool parameter key value match metric. -class GoogleCloudAiplatformV1ToolParameterKVMatchSpec { - /// Whether to use STRICT string match on parameter values. +/// Results for TrajectoryRecall metric. +class GoogleCloudAiplatformV1TrajectoryRecallResults { + /// TrajectoryRecall metric values. /// - /// Optional. - core.bool? useStrictStringMatch; + /// Output only. + core.List? + trajectoryRecallMetricValues; - GoogleCloudAiplatformV1ToolParameterKVMatchSpec({ - this.useStrictStringMatch, + GoogleCloudAiplatformV1TrajectoryRecallResults({ + this.trajectoryRecallMetricValues, }); - GoogleCloudAiplatformV1ToolParameterKVMatchSpec.fromJson(core.Map json_) + GoogleCloudAiplatformV1TrajectoryRecallResults.fromJson(core.Map json_) : this( - useStrictStringMatch: json_['useStrictStringMatch'] as core.bool?, + trajectoryRecallMetricValues: (json_['trajectoryRecallMetricValues'] + as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1TrajectoryRecallMetricValue.fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (useStrictStringMatch != null) - 'useStrictStringMatch': useStrictStringMatch!, + if (trajectoryRecallMetricValues != null) + 'trajectoryRecallMetricValues': trajectoryRecallMetricValues!, }; } -/// Input for tool parameter key match metric. -class GoogleCloudAiplatformV1ToolParameterKeyMatchInput { - /// Repeated tool parameter key match instances. +/// Spec for TrajectoryRecall metric - returns a float score based on average +/// recall of individual tool calls. +typedef GoogleCloudAiplatformV1TrajectoryRecallSpec = $Empty; + +/// Instances and metric spec for TrajectorySingleToolUse metric. +class GoogleCloudAiplatformV1TrajectorySingleToolUseInput { + /// Repeated TrajectorySingleToolUse instance. /// /// Required. - core.List? instances; + core.List? instances; - /// Spec for tool parameter key match metric. + /// Spec for TrajectorySingleToolUse metric. /// /// Required. - GoogleCloudAiplatformV1ToolParameterKeyMatchSpec? metricSpec; + GoogleCloudAiplatformV1TrajectorySingleToolUseSpec? metricSpec; - GoogleCloudAiplatformV1ToolParameterKeyMatchInput({ + GoogleCloudAiplatformV1TrajectorySingleToolUseInput({ this.instances, this.metricSpec, }); - GoogleCloudAiplatformV1ToolParameterKeyMatchInput.fromJson(core.Map json_) + GoogleCloudAiplatformV1TrajectorySingleToolUseInput.fromJson(core.Map json_) : this( instances: (json_['instances'] as core.List?) ?.map((value) => - GoogleCloudAiplatformV1ToolParameterKeyMatchInstance.fromJson( - value as core.Map)) + GoogleCloudAiplatformV1TrajectorySingleToolUseInstance + .fromJson(value as core.Map)) .toList(), metricSpec: json_.containsKey('metricSpec') - ? GoogleCloudAiplatformV1ToolParameterKeyMatchSpec.fromJson( + ? GoogleCloudAiplatformV1TrajectorySingleToolUseSpec.fromJson( json_['metricSpec'] as core.Map) : null, ); @@ -58857,329 +65312,104 @@ class GoogleCloudAiplatformV1ToolParameterKeyMatchInput { }; } -/// Spec for tool parameter key match instance. -typedef GoogleCloudAiplatformV1ToolParameterKeyMatchInstance = $Instance00; - -/// Tool parameter key match metric value for an instance. -class GoogleCloudAiplatformV1ToolParameterKeyMatchMetricValue { - /// Tool parameter key match score. +/// Spec for TrajectorySingleToolUse instance. +class GoogleCloudAiplatformV1TrajectorySingleToolUseInstance { + /// Spec for predicted tool call trajectory. /// - /// Output only. - core.double? score; + /// Required. + GoogleCloudAiplatformV1Trajectory? predictedTrajectory; - GoogleCloudAiplatformV1ToolParameterKeyMatchMetricValue({ - this.score, + GoogleCloudAiplatformV1TrajectorySingleToolUseInstance({ + this.predictedTrajectory, }); - GoogleCloudAiplatformV1ToolParameterKeyMatchMetricValue.fromJson( + GoogleCloudAiplatformV1TrajectorySingleToolUseInstance.fromJson( core.Map json_) : this( - score: (json_['score'] as core.num?)?.toDouble(), + predictedTrajectory: json_.containsKey('predictedTrajectory') + ? GoogleCloudAiplatformV1Trajectory.fromJson( + json_['predictedTrajectory'] + as core.Map) + : null, ); core.Map toJson() => { - if (score != null) 'score': score!, + if (predictedTrajectory != null) + 'predictedTrajectory': predictedTrajectory!, }; } -/// Results for tool parameter key match metric. -class GoogleCloudAiplatformV1ToolParameterKeyMatchResults { - /// Tool parameter key match metric values. +/// TrajectorySingleToolUse metric value for an instance. +class GoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue { + /// TrajectorySingleToolUse score. /// /// Output only. - core.List? - toolParameterKeyMatchMetricValues; + core.double? score; - GoogleCloudAiplatformV1ToolParameterKeyMatchResults({ - this.toolParameterKeyMatchMetricValues, + GoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue({ + this.score, }); - GoogleCloudAiplatformV1ToolParameterKeyMatchResults.fromJson(core.Map json_) + GoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue.fromJson( + core.Map json_) : this( - toolParameterKeyMatchMetricValues: - (json_['toolParameterKeyMatchMetricValues'] as core.List?) - ?.map((value) => - GoogleCloudAiplatformV1ToolParameterKeyMatchMetricValue - .fromJson( - value as core.Map)) - .toList(), + score: (json_['score'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (toolParameterKeyMatchMetricValues != null) - 'toolParameterKeyMatchMetricValues': - toolParameterKeyMatchMetricValues!, + if (score != null) 'score': score!, }; } -/// Spec for tool parameter key match metric. -typedef GoogleCloudAiplatformV1ToolParameterKeyMatchSpec = $Empty; - -/// CMLE training config. -/// -/// For every active learning labeling iteration, system will train a machine -/// learning model on CMLE. The trained model will be used by data sampling -/// algorithm to select DataItems. -class GoogleCloudAiplatformV1TrainingConfig { - /// The timeout hours for the CMLE training job, expressed in milli hours i.e. - /// 1,000 value in this field means 1 hour. - core.String? timeoutTrainingMilliHours; +/// Results for TrajectorySingleToolUse metric. +class GoogleCloudAiplatformV1TrajectorySingleToolUseResults { + /// TrajectorySingleToolUse metric values. + /// + /// Output only. + core.List? + trajectorySingleToolUseMetricValues; - GoogleCloudAiplatformV1TrainingConfig({ - this.timeoutTrainingMilliHours, + GoogleCloudAiplatformV1TrajectorySingleToolUseResults({ + this.trajectorySingleToolUseMetricValues, }); - GoogleCloudAiplatformV1TrainingConfig.fromJson(core.Map json_) + GoogleCloudAiplatformV1TrajectorySingleToolUseResults.fromJson(core.Map json_) : this( - timeoutTrainingMilliHours: - json_['timeoutTrainingMilliHours'] as core.String?, + trajectorySingleToolUseMetricValues: + (json_['trajectorySingleToolUseMetricValues'] as core.List?) + ?.map((value) => + GoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue + .fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (timeoutTrainingMilliHours != null) - 'timeoutTrainingMilliHours': timeoutTrainingMilliHours!, + if (trajectorySingleToolUseMetricValues != null) + 'trajectorySingleToolUseMetricValues': + trajectorySingleToolUseMetricValues!, }; } -/// The TrainingPipeline orchestrates tasks associated with training a Model. -/// -/// It always executes the training task, and optionally may also export data -/// from Vertex AI's Dataset which becomes the training input, upload the Model -/// to Vertex AI, and evaluate the Model. -class GoogleCloudAiplatformV1TrainingPipeline { - /// Time when the TrainingPipeline was created. - /// - /// Output only. - core.String? createTime; - - /// The user-defined name of this TrainingPipeline. - /// - /// Required. - core.String? displayName; - - /// Customer-managed encryption key spec for a TrainingPipeline. - /// - /// If set, this TrainingPipeline will be secured by this key. Note: Model - /// trained by this TrainingPipeline is also secured by this key if - /// model_to_upload is not set separately. - GoogleCloudAiplatformV1EncryptionSpec? encryptionSpec; - - /// Time when the TrainingPipeline entered any of the following states: - /// `PIPELINE_STATE_SUCCEEDED`, `PIPELINE_STATE_FAILED`, - /// `PIPELINE_STATE_CANCELLED`. - /// - /// Output only. - core.String? endTime; - - /// Only populated when the pipeline's state is `PIPELINE_STATE_FAILED` or - /// `PIPELINE_STATE_CANCELLED`. - /// - /// Output only. - GoogleRpcStatus? error; - - /// Specifies Vertex AI owned input data that may be used for training the - /// Model. - /// - /// The TrainingPipeline's training_task_definition should make clear whether - /// this config is used and if there are any special requirements on how it - /// should be filled. If nothing about this config is mentioned in the - /// training_task_definition, then it should be assumed that the - /// TrainingPipeline does not depend on this configuration. - GoogleCloudAiplatformV1InputDataConfig? inputDataConfig; - - /// The labels with user-defined metadata to organize TrainingPipelines. - /// - /// Label keys and values can be no longer than 64 characters (Unicode - /// codepoints), can only contain lowercase letters, numeric characters, - /// underscores and dashes. International characters are allowed. See - /// https://goo.gl/xmQnxf for more information and examples of labels. - core.Map? labels; - - /// The ID to use for the uploaded Model, which will become the final - /// component of the model resource name. - /// - /// This value may be up to 63 characters, and valid characters are - /// `[a-z0-9_-]`. The first character cannot be a number or hyphen. - /// - /// Optional. - core.String? modelId; - - /// Describes the Model that may be uploaded (via ModelService.UploadModel) by - /// this TrainingPipeline. - /// - /// The TrainingPipeline's training_task_definition should make clear whether - /// this Model description should be populated, and if there are any special - /// requirements regarding how it should be filled. If nothing is mentioned in - /// the training_task_definition, then it should be assumed that this field - /// should not be filled and the training task either uploads the Model - /// without a need of this information, or that training task does not support - /// uploading a Model as part of the pipeline. When the Pipeline's state - /// becomes `PIPELINE_STATE_SUCCEEDED` and the trained Model had been uploaded - /// into Vertex AI, then the model_to_upload's resource name is populated. The - /// Model is always uploaded into the Project and Location in which this - /// pipeline is. - GoogleCloudAiplatformV1Model? modelToUpload; - - /// Resource name of the TrainingPipeline. - /// - /// Output only. - core.String? name; - - /// When specify this field, the `model_to_upload` will not be uploaded as a - /// new model, instead, it will become a new version of this `parent_model`. - /// - /// Optional. - core.String? parentModel; - - /// Time when the TrainingPipeline for the first time entered the - /// `PIPELINE_STATE_RUNNING` state. - /// - /// Output only. - core.String? startTime; - - /// The detailed state of the pipeline. - /// - /// Output only. - /// Possible string values are: - /// - "PIPELINE_STATE_UNSPECIFIED" : The pipeline state is unspecified. - /// - "PIPELINE_STATE_QUEUED" : The pipeline has been created or resumed, and - /// processing has not yet begun. - /// - "PIPELINE_STATE_PENDING" : The service is preparing to run the pipeline. - /// - "PIPELINE_STATE_RUNNING" : The pipeline is in progress. - /// - "PIPELINE_STATE_SUCCEEDED" : The pipeline completed successfully. - /// - "PIPELINE_STATE_FAILED" : The pipeline failed. - /// - "PIPELINE_STATE_CANCELLING" : The pipeline is being cancelled. From this - /// state, the pipeline may only go to either PIPELINE_STATE_SUCCEEDED, - /// PIPELINE_STATE_FAILED or PIPELINE_STATE_CANCELLED. - /// - "PIPELINE_STATE_CANCELLED" : The pipeline has been cancelled. - /// - "PIPELINE_STATE_PAUSED" : The pipeline has been stopped, and can be - /// resumed. - core.String? state; - - /// A Google Cloud Storage path to the YAML file that defines the training - /// task which is responsible for producing the model artifact, and may also - /// include additional auxiliary work. - /// - /// The definition files that can be used here are found in - /// gs://google-cloud-aiplatform/schema/trainingjob/definition/. Note: The URI - /// given on output will be immutable and probably different, including the - /// URI scheme, than the one given on input. The output URI will point to a - /// location where the user only has a read access. - /// - /// Required. - core.String? trainingTaskDefinition; - - /// The training task's parameter(s), as specified in the - /// training_task_definition's `inputs`. +/// Spec for TrajectorySingleToolUse metric - returns 1 if tool is present in +/// the predicted trajectory, else 0. +class GoogleCloudAiplatformV1TrajectorySingleToolUseSpec { + /// Spec for tool name to be checked for in the predicted trajectory. /// /// Required. - /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Object? trainingTaskInputs; - - /// The metadata information as specified in the training_task_definition's - /// `metadata`. - /// - /// This metadata is an auxiliary runtime and final information about the - /// training task. While the pipeline is running this information is populated - /// only at a best effort basis. Only present if the pipeline's - /// training_task_definition contains `metadata` object. - /// - /// Output only. - /// - /// The values for Object must be JSON objects. It can consist of `num`, - /// `String`, `bool` and `null` as well as `Map` and `List` values. - core.Object? trainingTaskMetadata; - - /// Time when the TrainingPipeline was most recently updated. - /// - /// Output only. - core.String? updateTime; + core.String? toolName; - GoogleCloudAiplatformV1TrainingPipeline({ - this.createTime, - this.displayName, - this.encryptionSpec, - this.endTime, - this.error, - this.inputDataConfig, - this.labels, - this.modelId, - this.modelToUpload, - this.name, - this.parentModel, - this.startTime, - this.state, - this.trainingTaskDefinition, - this.trainingTaskInputs, - this.trainingTaskMetadata, - this.updateTime, + GoogleCloudAiplatformV1TrajectorySingleToolUseSpec({ + this.toolName, }); - GoogleCloudAiplatformV1TrainingPipeline.fromJson(core.Map json_) + GoogleCloudAiplatformV1TrajectorySingleToolUseSpec.fromJson(core.Map json_) : this( - createTime: json_['createTime'] as core.String?, - displayName: json_['displayName'] as core.String?, - encryptionSpec: json_.containsKey('encryptionSpec') - ? GoogleCloudAiplatformV1EncryptionSpec.fromJson( - json_['encryptionSpec'] - as core.Map) - : null, - endTime: json_['endTime'] as core.String?, - error: json_.containsKey('error') - ? GoogleRpcStatus.fromJson( - json_['error'] as core.Map) - : null, - inputDataConfig: json_.containsKey('inputDataConfig') - ? GoogleCloudAiplatformV1InputDataConfig.fromJson( - json_['inputDataConfig'] - as core.Map) - : null, - labels: - (json_['labels'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - modelId: json_['modelId'] as core.String?, - modelToUpload: json_.containsKey('modelToUpload') - ? GoogleCloudAiplatformV1Model.fromJson( - json_['modelToUpload'] as core.Map) - : null, - name: json_['name'] as core.String?, - parentModel: json_['parentModel'] as core.String?, - startTime: json_['startTime'] as core.String?, - state: json_['state'] as core.String?, - trainingTaskDefinition: - json_['trainingTaskDefinition'] as core.String?, - trainingTaskInputs: json_['trainingTaskInputs'], - trainingTaskMetadata: json_['trainingTaskMetadata'], - updateTime: json_['updateTime'] as core.String?, + toolName: json_['toolName'] as core.String?, ); core.Map toJson() => { - if (createTime != null) 'createTime': createTime!, - if (displayName != null) 'displayName': displayName!, - if (encryptionSpec != null) 'encryptionSpec': encryptionSpec!, - if (endTime != null) 'endTime': endTime!, - if (error != null) 'error': error!, - if (inputDataConfig != null) 'inputDataConfig': inputDataConfig!, - if (labels != null) 'labels': labels!, - if (modelId != null) 'modelId': modelId!, - if (modelToUpload != null) 'modelToUpload': modelToUpload!, - if (name != null) 'name': name!, - if (parentModel != null) 'parentModel': parentModel!, - if (startTime != null) 'startTime': startTime!, - if (state != null) 'state': state!, - if (trainingTaskDefinition != null) - 'trainingTaskDefinition': trainingTaskDefinition!, - if (trainingTaskInputs != null) - 'trainingTaskInputs': trainingTaskInputs!, - if (trainingTaskMetadata != null) - 'trainingTaskMetadata': trainingTaskMetadata!, - if (updateTime != null) 'updateTime': updateTime!, + if (toolName != null) 'toolName': toolName!, }; } @@ -59517,6 +65747,8 @@ class GoogleCloudAiplatformV1TuningDataStats { /// Represents a TuningJob that runs with Google owned models. class GoogleCloudAiplatformV1TuningJob { /// The base model that is being tuned, e.g., "gemini-1.0-pro-002". + /// + /// . core.String? baseModel; /// Time when the TuningJob was created. @@ -59572,6 +65804,15 @@ class GoogleCloudAiplatformV1TuningJob { /// Output only. core.String? name; + /// The service account that the tuningJob workload runs as. + /// + /// If not specified, the Vertex AI Secure Fine-Tuned Service Agent in the + /// project will be used. See + /// https://cloud.google.com/iam/docs/service-agents#vertex-ai-secure-fine-tuning-service-agent + /// Users starting the pipeline must have the `iam.serviceAccounts.actAs` + /// permission on this service account. + core.String? serviceAccount; + /// Time when the TuningJob for the first time entered the `JOB_STATE_RUNNING` /// state. /// @@ -59638,6 +65879,7 @@ class GoogleCloudAiplatformV1TuningJob { this.experiment, this.labels, this.name, + this.serviceAccount, this.startTime, this.state, this.supervisedTuningSpec, @@ -59671,6 +65913,7 @@ class GoogleCloudAiplatformV1TuningJob { ), ), name: json_['name'] as core.String?, + serviceAccount: json_['serviceAccount'] as core.String?, startTime: json_['startTime'] as core.String?, state: json_['state'] as core.String?, supervisedTuningSpec: json_.containsKey('supervisedTuningSpec') @@ -59701,6 +65944,7 @@ class GoogleCloudAiplatformV1TuningJob { if (experiment != null) 'experiment': experiment!, if (labels != null) 'labels': labels!, if (name != null) 'name': name!, + if (serviceAccount != null) 'serviceAccount': serviceAccount!, if (startTime != null) 'startTime': startTime!, if (state != null) 'state': state!, if (supervisedTuningSpec != null) @@ -59818,6 +66062,34 @@ class GoogleCloudAiplatformV1UnmanagedContainerModel { }; } +/// Request message for EndpointService.UpdateEndpointLongRunning. +class GoogleCloudAiplatformV1UpdateEndpointLongRunningRequest { + /// The Endpoint which replaces the resource on the server. + /// + /// Currently we only support updating the `client_connection_config` field, + /// all the other fields' update will be blocked. + /// + /// Required. + GoogleCloudAiplatformV1Endpoint? endpoint; + + GoogleCloudAiplatformV1UpdateEndpointLongRunningRequest({ + this.endpoint, + }); + + GoogleCloudAiplatformV1UpdateEndpointLongRunningRequest.fromJson( + core.Map json_) + : this( + endpoint: json_.containsKey('endpoint') + ? GoogleCloudAiplatformV1Endpoint.fromJson( + json_['endpoint'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (endpoint != null) 'endpoint': endpoint!, + }; +} + /// Request message for ModelService.UpdateExplanationDataset. class GoogleCloudAiplatformV1UpdateExplanationDatasetRequest { /// The example config containing the location of the dataset. @@ -59906,6 +66178,102 @@ class GoogleCloudAiplatformV1UploadModelRequest { }; } +/// Config for uploading RagFile. +class GoogleCloudAiplatformV1UploadRagFileConfig { + /// Specifies the transformation config for RagFiles. + GoogleCloudAiplatformV1RagFileTransformationConfig? + ragFileTransformationConfig; + + GoogleCloudAiplatformV1UploadRagFileConfig({ + this.ragFileTransformationConfig, + }); + + GoogleCloudAiplatformV1UploadRagFileConfig.fromJson(core.Map json_) + : this( + ragFileTransformationConfig: + json_.containsKey('ragFileTransformationConfig') + ? GoogleCloudAiplatformV1RagFileTransformationConfig.fromJson( + json_['ragFileTransformationConfig'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (ragFileTransformationConfig != null) + 'ragFileTransformationConfig': ragFileTransformationConfig!, + }; +} + +/// Request message for VertexRagDataService.UploadRagFile. +class GoogleCloudAiplatformV1UploadRagFileRequest { + /// The RagFile to upload. + /// + /// Required. + GoogleCloudAiplatformV1RagFile? ragFile; + + /// The config for the RagFiles to be uploaded into the RagCorpus. + /// + /// VertexRagDataService.UploadRagFile. + /// + /// Required. + GoogleCloudAiplatformV1UploadRagFileConfig? uploadRagFileConfig; + + GoogleCloudAiplatformV1UploadRagFileRequest({ + this.ragFile, + this.uploadRagFileConfig, + }); + + GoogleCloudAiplatformV1UploadRagFileRequest.fromJson(core.Map json_) + : this( + ragFile: json_.containsKey('ragFile') + ? GoogleCloudAiplatformV1RagFile.fromJson( + json_['ragFile'] as core.Map) + : null, + uploadRagFileConfig: json_.containsKey('uploadRagFileConfig') + ? GoogleCloudAiplatformV1UploadRagFileConfig.fromJson( + json_['uploadRagFileConfig'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (ragFile != null) 'ragFile': ragFile!, + if (uploadRagFileConfig != null) + 'uploadRagFileConfig': uploadRagFileConfig!, + }; +} + +/// Response message for VertexRagDataService.UploadRagFile. +class GoogleCloudAiplatformV1UploadRagFileResponse { + /// The error that occurred while processing the RagFile. + GoogleRpcStatus? error; + + /// The RagFile that had been uploaded into the RagCorpus. + GoogleCloudAiplatformV1RagFile? ragFile; + + GoogleCloudAiplatformV1UploadRagFileResponse({ + this.error, + this.ragFile, + }); + + GoogleCloudAiplatformV1UploadRagFileResponse.fromJson(core.Map json_) + : this( + error: json_.containsKey('error') + ? GoogleRpcStatus.fromJson( + json_['error'] as core.Map) + : null, + ragFile: json_.containsKey('ragFile') + ? GoogleCloudAiplatformV1RagFile.fromJson( + json_['ragFile'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (error != null) 'error': error!, + if (ragFile != null) 'ragFile': ragFile!, + }; +} + /// Request message for IndexService.UpsertDatapoints class GoogleCloudAiplatformV1UpsertDatapointsRequest { /// A list of datapoints to be created/updated. @@ -60046,14 +66414,6 @@ class GoogleCloudAiplatformV1VertexAISearch { /// Retrieve from Vertex RAG Store for grounding. class GoogleCloudAiplatformV1VertexRagStore { - /// Please use rag_resources instead. - /// - /// Optional. Deprecated. - @core.Deprecated( - 'Not supported. Member documentation may have more information.', - ) - core.List? ragCorpora; - /// The representation of the rag source. /// /// It can be used to specify corpus only or ragfiles. Currently only support @@ -60063,41 +66423,55 @@ class GoogleCloudAiplatformV1VertexRagStore { /// Optional. core.List? ragResources; + /// The retrieval config for the Rag query. + /// + /// Optional. + GoogleCloudAiplatformV1RagRetrievalConfig? ragRetrievalConfig; + /// Number of top k results to return from the selected corpora. /// /// Optional. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) core.int? similarityTopK; /// Only return results with vector distance smaller than the threshold. /// /// Optional. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) core.double? vectorDistanceThreshold; GoogleCloudAiplatformV1VertexRagStore({ - this.ragCorpora, this.ragResources, + this.ragRetrievalConfig, this.similarityTopK, this.vectorDistanceThreshold, }); GoogleCloudAiplatformV1VertexRagStore.fromJson(core.Map json_) : this( - ragCorpora: (json_['ragCorpora'] as core.List?) - ?.map((value) => value as core.String) - .toList(), ragResources: (json_['ragResources'] as core.List?) ?.map((value) => GoogleCloudAiplatformV1VertexRagStoreRagResource.fromJson( value as core.Map)) .toList(), + ragRetrievalConfig: json_.containsKey('ragRetrievalConfig') + ? GoogleCloudAiplatformV1RagRetrievalConfig.fromJson( + json_['ragRetrievalConfig'] + as core.Map) + : null, similarityTopK: json_['similarityTopK'] as core.int?, vectorDistanceThreshold: (json_['vectorDistanceThreshold'] as core.num?)?.toDouble(), ); core.Map toJson() => { - if (ragCorpora != null) 'ragCorpora': ragCorpora!, if (ragResources != null) 'ragResources': ragResources!, + if (ragRetrievalConfig != null) + 'ragRetrievalConfig': ragRetrievalConfig!, if (similarityTopK != null) 'similarityTopK': similarityTopK!, if (vectorDistanceThreshold != null) 'vectorDistanceThreshold': vectorDistanceThreshold!, @@ -60105,39 +66479,8 @@ class GoogleCloudAiplatformV1VertexRagStore { } /// The definition of the Rag resource. -class GoogleCloudAiplatformV1VertexRagStoreRagResource { - /// RagCorpora resource name. - /// - /// Format: `projects/{project}/locations/{location}/ragCorpora/{rag_corpus}` - /// - /// Optional. - core.String? ragCorpus; - - /// rag_file_id. - /// - /// The files should be in the same rag_corpus set in rag_corpus field. - /// - /// Optional. - core.List? ragFileIds; - - GoogleCloudAiplatformV1VertexRagStoreRagResource({ - this.ragCorpus, - this.ragFileIds, - }); - - GoogleCloudAiplatformV1VertexRagStoreRagResource.fromJson(core.Map json_) - : this( - ragCorpus: json_['ragCorpus'] as core.String?, - ragFileIds: (json_['ragFileIds'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - ); - - core.Map toJson() => { - if (ragCorpus != null) 'ragCorpus': ragCorpus!, - if (ragFileIds != null) 'ragFileIds': ragFileIds!, - }; -} +typedef GoogleCloudAiplatformV1VertexRagStoreRagResource + = $VertexRagStoreRagResource; /// Metadata describes the input video content. class GoogleCloudAiplatformV1VideoMetadata { @@ -60168,6 +66511,30 @@ class GoogleCloudAiplatformV1VideoMetadata { }; } +/// The configuration for the voice to use. +class GoogleCloudAiplatformV1VoiceConfig { + /// The configuration for the prebuilt voice to use. + GoogleCloudAiplatformV1PrebuiltVoiceConfig? prebuiltVoiceConfig; + + GoogleCloudAiplatformV1VoiceConfig({ + this.prebuiltVoiceConfig, + }); + + GoogleCloudAiplatformV1VoiceConfig.fromJson(core.Map json_) + : this( + prebuiltVoiceConfig: json_.containsKey('prebuiltVoiceConfig') + ? GoogleCloudAiplatformV1PrebuiltVoiceConfig.fromJson( + json_['prebuiltVoiceConfig'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (prebuiltVoiceConfig != null) + 'prebuiltVoiceConfig': prebuiltVoiceConfig!, + }; +} + /// Represents the spec of a worker pool in a job. class GoogleCloudAiplatformV1WorkerPoolSpec { /// The custom container task. diff --git a/generated/googleapis/lib/alloydb/v1.dart b/generated/googleapis/lib/alloydb/v1.dart index 1b6a28e50..1c8561ac3 100644 --- a/generated/googleapis/lib/alloydb/v1.dart +++ b/generated/googleapis/lib/alloydb/v1.dart @@ -709,6 +709,49 @@ class ProjectsLocationsClustersResource { return Operation.fromJson(response_ as core.Map); } + /// Exports data from the cluster. + /// + /// Imperative only. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. The resource name of the cluster. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/clusters/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future export( + ExportClusterRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':export'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + /// Gets details of a single Cluster. /// /// Request parameters: @@ -1962,8 +2005,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -3263,6 +3306,62 @@ class ContinuousBackupSource { }; } +/// Options for exporting data in CSV format. +class CsvExportOptions { + /// Specifies the character that should appear before a data character that + /// needs to be escaped. + /// + /// The default is the same as quote character. The value of this argument has + /// to be a character in Hex ASCII Code. + /// + /// Optional. + core.String? escapeCharacter; + + /// Specifies the character that separates columns within each row (line) of + /// the file. + /// + /// The default is comma. The value of this argument has to be a character in + /// Hex ASCII Code. + /// + /// Optional. + core.String? fieldDelimiter; + + /// Specifies the quoting character to be used when a data value is quoted. + /// + /// The default is double-quote. The value of this argument has to be a + /// character in Hex ASCII Code. + /// + /// Optional. + core.String? quoteCharacter; + + /// The SELECT query used to extract the data. + /// + /// Required. + core.String? selectQuery; + + CsvExportOptions({ + this.escapeCharacter, + this.fieldDelimiter, + this.quoteCharacter, + this.selectQuery, + }); + + CsvExportOptions.fromJson(core.Map json_) + : this( + escapeCharacter: json_['escapeCharacter'] as core.String?, + fieldDelimiter: json_['fieldDelimiter'] as core.String?, + quoteCharacter: json_['quoteCharacter'] as core.String?, + selectQuery: json_['selectQuery'] as core.String?, + ); + + core.Map toJson() => { + if (escapeCharacter != null) 'escapeCharacter': escapeCharacter!, + if (fieldDelimiter != null) 'fieldDelimiter': fieldDelimiter!, + if (quoteCharacter != null) 'quoteCharacter': quoteCharacter!, + if (selectQuery != null) 'selectQuery': selectQuery!, + }; +} + /// A generic empty message that you can re-use to avoid defining duplicated /// empty messages in your APIs. /// @@ -3316,9 +3415,92 @@ class EncryptionInfo { }; } +/// Export cluster request. +class ExportClusterRequest { + /// Options for exporting data in CSV format. + /// + /// Required field to be set for CSV file type. + CsvExportOptions? csvExportOptions; + + /// Name of the database where the export command will be executed. + /// + /// Note - Value provided should be the same as expected from `SELECT + /// current_database();` and NOT as a resource reference. + /// + /// Required. + core.String? database; + + /// Option to export data to cloud storage. + /// + /// Required. + GcsDestination? gcsDestination; + + /// Options for exporting data in SQL format. + /// + /// Required field to be set for SQL file type. + SqlExportOptions? sqlExportOptions; + + ExportClusterRequest({ + this.csvExportOptions, + this.database, + this.gcsDestination, + this.sqlExportOptions, + }); + + ExportClusterRequest.fromJson(core.Map json_) + : this( + csvExportOptions: json_.containsKey('csvExportOptions') + ? CsvExportOptions.fromJson(json_['csvExportOptions'] + as core.Map) + : null, + database: json_['database'] as core.String?, + gcsDestination: json_.containsKey('gcsDestination') + ? GcsDestination.fromJson(json_['gcsDestination'] + as core.Map) + : null, + sqlExportOptions: json_.containsKey('sqlExportOptions') + ? SqlExportOptions.fromJson(json_['sqlExportOptions'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (csvExportOptions != null) 'csvExportOptions': csvExportOptions!, + if (database != null) 'database': database!, + if (gcsDestination != null) 'gcsDestination': gcsDestination!, + if (sqlExportOptions != null) 'sqlExportOptions': sqlExportOptions!, + }; +} + /// Message for triggering failover on an Instance typedef FailoverInstanceRequest = $Request05; +/// Destination for Export. +/// +/// Export will be done to cloud storage. +class GcsDestination { + /// The path to the file in Google Cloud Storage where the export will be + /// stored. + /// + /// The URI is in the form `gs://bucketName/fileName`. + /// + /// Required. + core.String? uri; + + GcsDestination({ + this.uri, + }); + + GcsDestination.fromJson(core.Map json_) + : this( + uri: json_['uri'] as core.String?, + ); + + core.Map toJson() => { + if (uri != null) 'uri': uri!, + }; +} + /// The response message for Locations.ListLocations. class GoogleCloudLocationListLocationsResponse { /// A list of locations that matches the specified filter in the request. @@ -3443,16 +3625,18 @@ class Instance { /// Database flags. /// - /// Set at instance level. * They are copied from primary instance on read - /// instance creation. * Read instances can set new or override existing flags - /// that are relevant for reads, e.g. for enabling columnar cache on a read - /// instance. Flags set on read instance may or may not be present on primary. - /// This is a list of "key": "value" pairs. "key": The name of the flag. These - /// flags are passed at instance setup time, so include both server options - /// and system variables for Postgres. Flags are specified with underscores, - /// not hyphens. "value": The value of the flag. Booleans are set to **on** - /// for true and **off** for false. This field must be omitted if the flag - /// doesn't take a value. + /// Set at the instance level. They are copied from the primary instance on + /// secondary instance creation. Flags that have restrictions default to the + /// value at primary instance on read instances during creation. Read + /// instances can set new flags or override existing flags that are relevant + /// for reads, for example, for enabling columnar cache on a read instance. + /// Flags set on read instance might or might not be present on the primary + /// instance. This is a list of "key": "value" pairs. "key": The name of the + /// flag. These flags are passed at instance setup time, so include both + /// server options and system variables for Postgres. Flags are specified with + /// underscores, not hyphens. "value": The value of the flag. Booleans are set + /// to **on** for true and **off** for false. This field must be omitted if + /// the flag doesn't take a value. core.Map? databaseFlags; /// Delete time stamp @@ -4763,6 +4947,59 @@ class SecondaryConfig { }; } +/// Options for exporting data in SQL format. +class SqlExportOptions { + /// If true, output commands to DROP all the dumped database objects prior to + /// outputting the commands for creating them. + /// + /// Optional. + core.bool? cleanTargetObjects; + + /// If true, use DROP ... + /// + /// IF EXISTS commands to check for the object's existence before dropping it + /// in clean_target_objects mode. + /// + /// Optional. + core.bool? ifExistTargetObjects; + + /// If true, only export the schema. + /// + /// Optional. + core.bool? schemaOnly; + + /// Tables to export from. + /// + /// Optional. + core.List? tables; + + SqlExportOptions({ + this.cleanTargetObjects, + this.ifExistTargetObjects, + this.schemaOnly, + this.tables, + }); + + SqlExportOptions.fromJson(core.Map json_) + : this( + cleanTargetObjects: json_['cleanTargetObjects'] as core.bool?, + ifExistTargetObjects: json_['ifExistTargetObjects'] as core.bool?, + schemaOnly: json_['schemaOnly'] as core.bool?, + tables: (json_['tables'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (cleanTargetObjects != null) + 'cleanTargetObjects': cleanTargetObjects!, + if (ifExistTargetObjects != null) + 'ifExistTargetObjects': ifExistTargetObjects!, + if (schemaOnly != null) 'schemaOnly': schemaOnly!, + if (tables != null) 'tables': tables!, + }; +} + /// SSL configuration. class SslConfig { /// Certificate Authority (CA) source. diff --git a/generated/googleapis/lib/analyticshub/v1.dart b/generated/googleapis/lib/analyticshub/v1.dart index 342e5058c..71bed8576 100644 --- a/generated/googleapis/lib/analyticshub/v1.dart +++ b/generated/googleapis/lib/analyticshub/v1.dart @@ -3453,6 +3453,11 @@ class SubscribeDataExchangeRequest { /// Required. core.String? destination; + /// BigQuery destination dataset to create for the subscriber. + /// + /// Optional. + DestinationDataset? destinationDataset; + /// Email of the subscriber. core.String? subscriberContact; @@ -3465,6 +3470,7 @@ class SubscribeDataExchangeRequest { SubscribeDataExchangeRequest({ this.destination, + this.destinationDataset, this.subscriberContact, this.subscription, }); @@ -3472,12 +3478,18 @@ class SubscribeDataExchangeRequest { SubscribeDataExchangeRequest.fromJson(core.Map json_) : this( destination: json_['destination'] as core.String?, + destinationDataset: json_.containsKey('destinationDataset') + ? DestinationDataset.fromJson(json_['destinationDataset'] + as core.Map) + : null, subscriberContact: json_['subscriberContact'] as core.String?, subscription: json_['subscription'] as core.String?, ); core.Map toJson() => { if (destination != null) 'destination': destination!, + if (destinationDataset != null) + 'destinationDataset': destinationDataset!, if (subscriberContact != null) 'subscriberContact': subscriberContact!, if (subscription != null) 'subscription': subscription!, }; diff --git a/generated/googleapis/lib/androiddeviceprovisioning/v1.dart b/generated/googleapis/lib/androiddeviceprovisioning/v1.dart index 480d3d3af..4ee4b3ef7 100644 --- a/generated/googleapis/lib/androiddeviceprovisioning/v1.dart +++ b/generated/googleapis/lib/androiddeviceprovisioning/v1.dart @@ -78,8 +78,8 @@ class CustomersResource { /// /// Request parameters: /// - /// [pageSize] - The maximum number of customers to show in a page of results. - /// A number between 1 and 100 (inclusive). + /// [pageSize] - Required. The maximum number of customers to show in a page + /// of results. A number between 1 and 100 (inclusive). /// /// [pageToken] - A token specifying which result page to return. This field /// has custom validations in ListCustomersRequestValidator @@ -429,8 +429,8 @@ class CustomersDevicesResource { /// name in the format `customers/[CUSTOMER_ID]`. /// Value must have pattern `^customers/\[^/\]+$`. /// - /// [pageSize] - The maximum number of devices to show in a page of results. - /// Must be between 1 and 100 inclusive. + /// [pageSize] - Required. The maximum number of devices to show in a page of + /// results. Must be between 1 and 100 inclusive. /// /// [pageToken] - A token specifying which result page to return. /// diff --git a/generated/googleapis/lib/androidenterprise/v1.dart b/generated/googleapis/lib/androidenterprise/v1.dart index 239fcce32..c8ad1c2ba 100644 --- a/generated/googleapis/lib/androidenterprise/v1.dart +++ b/generated/googleapis/lib/androidenterprise/v1.dart @@ -502,16 +502,33 @@ class EnterprisesResource { /// /// Request parameters: /// - /// [enterpriseId] - The ID of the enterprise. + /// [enterpriseId] - Required. The ID of the enterprise. /// - /// [deviceType] - Whether it’s a dedicated device or a knowledge worker - /// device. + /// [deviceType] - Deprecated: Use enrollment_token instead. this field will + /// be removed in the future. /// Possible string values are: /// - "unknown" : This value is unused /// - "dedicatedDevice" : This device is a dedicated device. /// - "knowledgeWorker" : This device is required to have an authenticated /// user. /// + /// [enrollmentToken_duration] - \[Optional\] The length of time the + /// enrollment token is valid, ranging from 1 minute to + /// \[`Durations.MAX_VALUE`\](https://developers.google.com/protocol-buffers/docs/reference/java/com/google/protobuf/util/Durations.html#MAX_VALUE), + /// approximately 10,000 years. If not specified, the default duration is 1 + /// hour. + /// + /// [enrollmentToken_enrollmentTokenType] - \[Required\] The type of the + /// enrollment token. + /// Possible string values are: + /// - "enrollmentTokenTypeUnspecified" : The value is unused. + /// - "userlessDevice" : The enrollment token is for a userless device. + /// - "userDevice" : The enrollment token is for a user device. + /// + /// [enrollmentToken_token] - The token value that's passed to the device and + /// authorizes the device to enroll. This is a read-only field generated by + /// the server. + /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// @@ -525,10 +542,21 @@ class EnterprisesResource { async.Future createEnrollmentToken( core.String enterpriseId, { core.String? deviceType, + core.String? enrollmentToken_duration, + core.String? enrollmentToken_enrollmentTokenType, + core.String? enrollmentToken_token, core.String? $fields, }) async { final queryParams_ = >{ if (deviceType != null) 'deviceType': [deviceType], + if (enrollmentToken_duration != null) + 'enrollmentToken.duration': [enrollmentToken_duration], + if (enrollmentToken_enrollmentTokenType != null) + 'enrollmentToken.enrollmentTokenType': [ + enrollmentToken_enrollmentTokenType + ], + if (enrollmentToken_token != null) + 'enrollmentToken.token': [enrollmentToken_token], if ($fields != null) 'fields': [$fields], }; @@ -4531,20 +4559,36 @@ class ConfigurationVariables { /// Response message for create enrollment token. class CreateEnrollmentTokenResponse { - /// Enrollment token. + /// Deprecated: Use token instead. + /// + /// This field will be removed in the future. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) core.String? enrollmentToken; + /// The created enrollment token. + /// + /// Required. + EnrollmentToken? token; + CreateEnrollmentTokenResponse({ this.enrollmentToken, + this.token, }); CreateEnrollmentTokenResponse.fromJson(core.Map json_) : this( enrollmentToken: json_['enrollmentToken'] as core.String?, + token: json_.containsKey('token') + ? EnrollmentToken.fromJson( + json_['token'] as core.Map) + : null, ); core.Map toJson() => { if (enrollmentToken != null) 'enrollmentToken': enrollmentToken!, + if (token != null) 'token': token!, }; } @@ -4789,6 +4833,53 @@ class DevicesListResponse { }; } +/// A token used to enroll a device. +class EnrollmentToken { + /// The length of time the enrollment token is valid, ranging from 1 minute to + /// \[`Durations.MAX_VALUE`\](https://developers.google.com/protocol-buffers/docs/reference/java/com/google/protobuf/util/Durations.html#MAX_VALUE), + /// approximately 10,000 years. + /// + /// If not specified, the default duration is 1 hour. + /// + /// Optional. + core.String? duration; + + /// The type of the enrollment token. + /// + /// Required. + /// Possible string values are: + /// - "enrollmentTokenTypeUnspecified" : The value is unused. + /// - "userlessDevice" : The enrollment token is for a userless device. + /// - "userDevice" : The enrollment token is for a user device. + core.String? enrollmentTokenType; + + /// The token value that's passed to the device and authorizes the device to + /// enroll. + /// + /// This is a read-only field generated by the server. + core.String? token; + + EnrollmentToken({ + this.duration, + this.enrollmentTokenType, + this.token, + }); + + EnrollmentToken.fromJson(core.Map json_) + : this( + duration: json_['duration'] as core.String?, + enrollmentTokenType: json_['enrollmentTokenType'] as core.String?, + token: json_['token'] as core.String?, + ); + + core.Map toJson() => { + if (duration != null) 'duration': duration!, + if (enrollmentTokenType != null) + 'enrollmentTokenType': enrollmentTokenType!, + if (token != null) 'token': token!, + }; +} + /// An Enterprises resource represents the binding between an EMM and a specific /// organization. /// @@ -6003,10 +6094,12 @@ class Policy { /// Recommended alternative: autoUpdateMode which is set per app, provides /// greater flexibility around update frequency. When autoUpdateMode is set to /// AUTO_UPDATE_POSTPONED or AUTO_UPDATE_HIGH_PRIORITY, autoUpdatePolicy has - /// no effect. "choiceToTheUser" allows the device's user to configure the app - /// update policy. "always" enables auto updates. "never" disables auto - /// updates. "wifiOnly" enables auto updates only when the device is connected - /// to wifi. + /// no effect. - choiceToTheUser allows the device's user to configure the app + /// update policy. - always enables auto updates. - never disables auto + /// updates. - wifiOnly enables auto updates only when the device is connected + /// to wifi. *Important:* Changes to app update policies don't affect updates + /// that are in progress. Any policy changes will apply to subsequent app + /// updates. /// Possible string values are: /// - "autoUpdatePolicyUnspecified" : The auto update policy is not set. /// - "choiceToTheUser" : The user can control auto-updates. diff --git a/generated/googleapis/lib/androidmanagement/v1.dart b/generated/googleapis/lib/androidmanagement/v1.dart index d70a72b96..ae7427b86 100644 --- a/generated/googleapis/lib/androidmanagement/v1.dart +++ b/generated/googleapis/lib/androidmanagement/v1.dart @@ -2305,22 +2305,22 @@ class ApplicationPolicy { /// by default. /// - "PROMPT" : Prompt the user to grant a permission. /// - "GRANT" : Automatically grant a permission.On Android 12 and above, - /// Manifest.permission.READ_SMS + /// READ_SMS /// (https://developer.android.com/reference/android/Manifest.permission#READ_SMS) /// and following sensor-related permissions can only be granted on fully - /// managed devices: Manifest.permission.ACCESS_FINE_LOCATION + /// managed devices: ACCESS_FINE_LOCATION /// (https://developer.android.com/reference/android/Manifest.permission#ACCESS_FINE_LOCATION) - /// Manifest.permission.ACCESS_BACKGROUND_LOCATION + /// ACCESS_BACKGROUND_LOCATION /// (https://developer.android.com/reference/android/Manifest.permission#ACCESS_BACKGROUND_LOCATION) - /// Manifest.permission.ACCESS_COARSE_LOCATION + /// ACCESS_COARSE_LOCATION /// (https://developer.android.com/reference/android/Manifest.permission#ACCESS_COARSE_LOCATION) - /// Manifest.permission.CAMERA + /// CAMERA /// (https://developer.android.com/reference/android/Manifest.permission#CAMERA) - /// Manifest.permission.RECORD_AUDIO + /// RECORD_AUDIO /// (https://developer.android.com/reference/android/Manifest.permission#RECORD_AUDIO) - /// Manifest.permission.ACTIVITY_RECOGNITION + /// ACTIVITY_RECOGNITION /// (https://developer.android.com/reference/android/Manifest.permission#ACTIVITY_RECOGNITION) - /// Manifest.permission.BODY_SENSORS + /// BODY_SENSORS /// (https://developer.android.com/reference/android/Manifest.permission#BODY_SENSORS) /// - "DENY" : Automatically deny a permission. core.String? defaultPermissionPolicy; @@ -4584,12 +4584,12 @@ class Enterprise { /// Configuration to enable an app as an extension app, with the capability of /// interacting with Android Device Policy offline. /// -/// For Android versions 13 and above, extension apps are exempt from battery +/// For Android versions 11 and above, extension apps are exempt from battery /// restrictions so will not be placed into the restricted App Standby Bucket /// (https://developer.android.com/topic/performance/appstandby#restricted-bucket). /// Extensions apps are also protected against users clearing their data or /// force-closing the application, although admins can continue to use the clear -/// app data command on extension apps if needed for Android 13 and above. +/// app data command on extension apps if needed for Android 11 and above. class ExtensionConfig { /// Fully qualified class name of the receiver service class for Android /// Device Policy to notify the extension app of any local command status @@ -6545,22 +6545,22 @@ class PermissionGrant { /// by default. /// - "PROMPT" : Prompt the user to grant a permission. /// - "GRANT" : Automatically grant a permission.On Android 12 and above, - /// Manifest.permission.READ_SMS + /// READ_SMS /// (https://developer.android.com/reference/android/Manifest.permission#READ_SMS) /// and following sensor-related permissions can only be granted on fully - /// managed devices: Manifest.permission.ACCESS_FINE_LOCATION + /// managed devices: ACCESS_FINE_LOCATION /// (https://developer.android.com/reference/android/Manifest.permission#ACCESS_FINE_LOCATION) - /// Manifest.permission.ACCESS_BACKGROUND_LOCATION + /// ACCESS_BACKGROUND_LOCATION /// (https://developer.android.com/reference/android/Manifest.permission#ACCESS_BACKGROUND_LOCATION) - /// Manifest.permission.ACCESS_COARSE_LOCATION + /// ACCESS_COARSE_LOCATION /// (https://developer.android.com/reference/android/Manifest.permission#ACCESS_COARSE_LOCATION) - /// Manifest.permission.CAMERA + /// CAMERA /// (https://developer.android.com/reference/android/Manifest.permission#CAMERA) - /// Manifest.permission.RECORD_AUDIO + /// RECORD_AUDIO /// (https://developer.android.com/reference/android/Manifest.permission#RECORD_AUDIO) - /// Manifest.permission.ACTIVITY_RECOGNITION + /// ACTIVITY_RECOGNITION /// (https://developer.android.com/reference/android/Manifest.permission#ACTIVITY_RECOGNITION) - /// Manifest.permission.BODY_SENSORS + /// BODY_SENSORS /// (https://developer.android.com/reference/android/Manifest.permission#BODY_SENSORS) /// - "DENY" : Automatically deny a permission. core.String? policy; @@ -6704,6 +6704,18 @@ class PersonalUsagePolicies { /// personal profile. core.String? personalPlayStoreMode; + /// Controls whether a private space is allowed on the device. + /// + /// Optional. + /// Possible string values are: + /// - "PRIVATE_SPACE_POLICY_UNSPECIFIED" : Unspecified. Defaults to + /// PRIVATE_SPACE_ALLOWED. + /// - "PRIVATE_SPACE_ALLOWED" : Users can create a private space profile. + /// - "PRIVATE_SPACE_DISALLOWED" : Users cannot create a private space + /// profile. Supported only for company-owned devices with a work profile. + /// Caution: Any existing private space will be removed. + core.String? privateSpacePolicy; + /// If true, screen capture is disabled for all users. core.bool? screenCaptureDisabled; @@ -6713,6 +6725,7 @@ class PersonalUsagePolicies { this.maxDaysWithWorkOff, this.personalApplications, this.personalPlayStoreMode, + this.privateSpacePolicy, this.screenCaptureDisabled, }); @@ -6729,6 +6742,7 @@ class PersonalUsagePolicies { value as core.Map)) .toList(), personalPlayStoreMode: json_['personalPlayStoreMode'] as core.String?, + privateSpacePolicy: json_['privateSpacePolicy'] as core.String?, screenCaptureDisabled: json_['screenCaptureDisabled'] as core.bool?, ); @@ -6743,6 +6757,8 @@ class PersonalUsagePolicies { 'personalApplications': personalApplications!, if (personalPlayStoreMode != null) 'personalPlayStoreMode': personalPlayStoreMode!, + if (privateSpacePolicy != null) + 'privateSpacePolicy': privateSpacePolicy!, if (screenCaptureDisabled != null) 'screenCaptureDisabled': screenCaptureDisabled!, }; @@ -6755,6 +6771,9 @@ class Policy { core.List? accountTypesWithManagementDisabled; /// Whether adding new users and profiles is disabled. + /// + /// For devices where managementMode is DEVICE_OWNER this field is ignored and + /// the user is never allowed to add or remove users. core.bool? addUserDisabled; /// Whether adjusting the master volume is disabled. @@ -6964,22 +6983,22 @@ class Policy { /// by default. /// - "PROMPT" : Prompt the user to grant a permission. /// - "GRANT" : Automatically grant a permission.On Android 12 and above, - /// Manifest.permission.READ_SMS + /// READ_SMS /// (https://developer.android.com/reference/android/Manifest.permission#READ_SMS) /// and following sensor-related permissions can only be granted on fully - /// managed devices: Manifest.permission.ACCESS_FINE_LOCATION + /// managed devices: ACCESS_FINE_LOCATION /// (https://developer.android.com/reference/android/Manifest.permission#ACCESS_FINE_LOCATION) - /// Manifest.permission.ACCESS_BACKGROUND_LOCATION + /// ACCESS_BACKGROUND_LOCATION /// (https://developer.android.com/reference/android/Manifest.permission#ACCESS_BACKGROUND_LOCATION) - /// Manifest.permission.ACCESS_COARSE_LOCATION + /// ACCESS_COARSE_LOCATION /// (https://developer.android.com/reference/android/Manifest.permission#ACCESS_COARSE_LOCATION) - /// Manifest.permission.CAMERA + /// CAMERA /// (https://developer.android.com/reference/android/Manifest.permission#CAMERA) - /// Manifest.permission.RECORD_AUDIO + /// RECORD_AUDIO /// (https://developer.android.com/reference/android/Manifest.permission#RECORD_AUDIO) - /// Manifest.permission.ACTIVITY_RECOGNITION + /// ACTIVITY_RECOGNITION /// (https://developer.android.com/reference/android/Manifest.permission#ACTIVITY_RECOGNITION) - /// Manifest.permission.BODY_SENSORS + /// BODY_SENSORS /// (https://developer.android.com/reference/android/Manifest.permission#BODY_SENSORS) /// - "DENY" : Automatically deny a permission. core.String? defaultPermissionPolicy; @@ -7043,6 +7062,8 @@ class Policy { /// If true, this disables the Lock Screen /// (https://source.android.com/docs/core/display/multi_display/lock-screen) /// for primary and/or secondary displays. + /// + /// This policy is supported only in dedicated device management mode. core.bool? keyguardDisabled; /// Disabled keyguard customizations, such as widgets. @@ -9374,6 +9395,11 @@ class WifiRoamingSetting { /// Possible string values are: /// - "WIFI_ROAMING_MODE_UNSPECIFIED" : Unspecified. Defaults to /// WIFI_ROAMING_DEFAULT. + /// - "WIFI_ROAMING_DISABLED" : Wi-Fi roaming is disabled. Supported on + /// Android 15 and above on fully managed devices and work profiles on + /// company-owned devices. A nonComplianceDetail with MANAGEMENT_MODE is + /// reported for other management modes. A nonComplianceDetail with API_LEVEL + /// is reported if the Android version is less than 15. /// - "WIFI_ROAMING_DEFAULT" : Default Wi-Fi roaming mode of the device. /// - "WIFI_ROAMING_AGGRESSIVE" : Aggressive roaming mode which allows quicker /// Wi-Fi roaming. Supported on Android 15 and above on fully managed devices diff --git a/generated/googleapis/lib/androidpublisher/v3.dart b/generated/googleapis/lib/androidpublisher/v3.dart index 54e257687..855f4d3bc 100644 --- a/generated/googleapis/lib/androidpublisher/v3.dart +++ b/generated/googleapis/lib/androidpublisher/v3.dart @@ -11300,6 +11300,9 @@ class OfferTag { }; } +/// A single use promotion code. +typedef OneTimeCode = $Empty; + /// Represents a one-time transaction. class OneTimeExternalTransaction { /// Input only. @@ -12753,6 +12756,37 @@ class SdkVersionTargeting { }; } +/// The promotion applied on this item when purchased. +class SignupPromotion { + /// A one-time code was applied. + OneTimeCode? oneTimeCode; + + /// A vanity code was applied. + VanityCode? vanityCode; + + SignupPromotion({ + this.oneTimeCode, + this.vanityCode, + }); + + SignupPromotion.fromJson(core.Map json_) + : this( + oneTimeCode: json_.containsKey('oneTimeCode') + ? OneTimeCode.fromJson( + json_['oneTimeCode'] as core.Map) + : null, + vanityCode: json_.containsKey('vanityCode') + ? VanityCode.fromJson( + json_['vanityCode'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (oneTimeCode != null) 'oneTimeCode': oneTimeCode!, + if (vanityCode != null) 'vanityCode': vanityCode!, + }; +} + /// Holds data specific to Split APKs. class SplitApkMetadata { /// Indicates whether this APK is the main split of the module. @@ -13749,6 +13783,11 @@ class SubscriptionPurchaseLineItem { /// The purchased product ID (for example, 'monthly001'). core.String? productId; + /// Promotion details about this item. + /// + /// Only set if a promotion was applied during signup. + SignupPromotion? signupPromotion; + SubscriptionPurchaseLineItem({ this.autoRenewingPlan, this.deferredItemReplacement, @@ -13756,6 +13795,7 @@ class SubscriptionPurchaseLineItem { this.offerDetails, this.prepaidPlan, this.productId, + this.signupPromotion, }); SubscriptionPurchaseLineItem.fromJson(core.Map json_) @@ -13779,6 +13819,10 @@ class SubscriptionPurchaseLineItem { json_['prepaidPlan'] as core.Map) : null, productId: json_['productId'] as core.String?, + signupPromotion: json_.containsKey('signupPromotion') + ? SignupPromotion.fromJson(json_['signupPromotion'] + as core.Map) + : null, ); core.Map toJson() => { @@ -13789,6 +13833,7 @@ class SubscriptionPurchaseLineItem { if (offerDetails != null) 'offerDetails': offerDetails!, if (prepaidPlan != null) 'prepaidPlan': prepaidPlan!, if (productId != null) 'productId': productId!, + if (signupPromotion != null) 'signupPromotion': signupPromotion!, }; } @@ -15388,6 +15433,25 @@ class UsesPermission { }; } +/// A multiple use, predefined promotion code. +class VanityCode { + /// The promotion code. + core.String? promotionCode; + + VanityCode({ + this.promotionCode, + }); + + VanityCode.fromJson(core.Map json_) + : this( + promotionCode: json_['promotionCode'] as core.String?, + ); + + core.Map toJson() => { + if (promotionCode != null) 'promotionCode': promotionCode!, + }; +} + /// APK that is suitable for inclusion in a system image. /// /// The resource of SystemApksService. diff --git a/generated/googleapis/lib/apigateway/v1.dart b/generated/googleapis/lib/apigateway/v1.dart index d2016c138..bfbfbfb0e 100644 --- a/generated/googleapis/lib/apigateway/v1.dart +++ b/generated/googleapis/lib/apigateway/v1.dart @@ -1367,8 +1367,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// diff --git a/generated/googleapis/lib/apigee/v1.dart b/generated/googleapis/lib/apigee/v1.dart index ab3e54667..cca92c7fe 100644 --- a/generated/googleapis/lib/apigee/v1.dart +++ b/generated/googleapis/lib/apigee/v1.dart @@ -21715,6 +21715,10 @@ class GoogleCloudApigeeV1EnvironmentConfig { /// This is only used by Envoy-based gateways. core.String? arcConfigLocation; + /// The algorithm to resolve IP. + GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig? + clientIpResolutionConfig; + /// Time that the environment configuration was created. core.String? createTime; @@ -21802,6 +21806,7 @@ class GoogleCloudApigeeV1EnvironmentConfig { GoogleCloudApigeeV1EnvironmentConfig({ this.addonsConfig, this.arcConfigLocation, + this.clientIpResolutionConfig, this.createTime, this.dataCollectors, this.debugMask, @@ -21832,6 +21837,12 @@ class GoogleCloudApigeeV1EnvironmentConfig { json_['addonsConfig'] as core.Map) : null, arcConfigLocation: json_['arcConfigLocation'] as core.String?, + clientIpResolutionConfig: + json_.containsKey('clientIpResolutionConfig') + ? GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig + .fromJson(json_['clientIpResolutionConfig'] + as core.Map) + : null, createTime: json_['createTime'] as core.String?, dataCollectors: (json_['dataCollectors'] as core.List?) ?.map((value) => GoogleCloudApigeeV1DataCollectorConfig.fromJson( @@ -21896,6 +21907,8 @@ class GoogleCloudApigeeV1EnvironmentConfig { core.Map toJson() => { if (addonsConfig != null) 'addonsConfig': addonsConfig!, if (arcConfigLocation != null) 'arcConfigLocation': arcConfigLocation!, + if (clientIpResolutionConfig != null) + 'clientIpResolutionConfig': clientIpResolutionConfig!, if (createTime != null) 'createTime': createTime!, if (dataCollectors != null) 'dataCollectors': dataCollectors!, if (debugMask != null) 'debugMask': debugMask!, @@ -21923,6 +21936,60 @@ class GoogleCloudApigeeV1EnvironmentConfig { }; } +/// Configuration for resolving the client ip. +class GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig { + /// Resolves the client ip based on a custom header. + GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm? + headerIndexAlgorithm; + + GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig({ + this.headerIndexAlgorithm, + }); + + GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig.fromJson( + core.Map json_) + : this( + headerIndexAlgorithm: json_.containsKey('headerIndexAlgorithm') + ? GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm + .fromJson(json_['headerIndexAlgorithm'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (headerIndexAlgorithm != null) + 'headerIndexAlgorithm': headerIndexAlgorithm!, + }; +} + +/// Resolves the client ip based on a custom header. +class GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm { + /// The index of the ip in the header. + /// + /// (By default, value is 0 if missing) + core.int? ipHeaderIndex; + + /// The name of the header to extract the client ip from. + core.String? ipHeaderName; + + GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm({ + this.ipHeaderIndex, + this.ipHeaderName, + }); + + GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm.fromJson( + core.Map json_) + : this( + ipHeaderIndex: json_['ipHeaderIndex'] as core.int?, + ipHeaderName: json_['ipHeaderName'] as core.String?, + ); + + core.Map toJson() => { + if (ipHeaderIndex != null) 'ipHeaderIndex': ipHeaderIndex!, + if (ipHeaderName != null) 'ipHeaderName': ipHeaderName!, + }; +} + /// EnvironmentGroup configuration. /// /// An environment group is used to group one or more Apigee environments under @@ -24951,11 +25018,18 @@ class GoogleCloudApigeeV1Organization { /// [Getting started with the Service Networking API](https://cloud.google.com/service-infrastructure/docs/service-networking/getting-started). /// Valid only when \[RuntimeType\](#RuntimeType) is set to `CLOUD`. The value /// must be set before the creation of a runtime instance and can be updated - /// only when there are no runtime instances. For example: `default`. Apigee - /// also supports shared VPC (that is, the host network project is not the - /// same as the one that is peering with Apigee). See - /// [Shared VPC overview](https://cloud.google.com/vpc/docs/shared-vpc). To - /// use a shared VPC network, use the following format: + /// only when there are no runtime instances. For example: `default`. When + /// changing authorizedNetwork, you must reconfigure VPC peering. After VPC + /// peering with previous network is deleted, + /// [run the following command](https://cloud.google.com/sdk/gcloud/reference/services/vpc-peerings/delete): + /// `gcloud services vpc-peerings delete --network=NETWORK`, where `NETWORK` + /// is the name of the previous network. This will delete the previous Service + /// Networking. Otherwise, you will get the following error: `The resource + /// 'projects/...-tp' is already linked to another shared VPC host + /// 'projects/...-tp`. Apigee also supports shared VPC (that is, the host + /// network project is not the same as the one that is peering with Apigee). + /// See [Shared VPC overview](https://cloud.google.com/vpc/docs/shared-vpc). + /// To use a shared VPC network, use the following format: /// `projects/{host-project-id}/{region}/networks/{network-name}`. For /// example: `projects/my-sharedvpc-host/global/networks/mynetwork` **Note:** /// Not supported for Apigee hybrid. diff --git a/generated/googleapis/lib/appengine/v1.dart b/generated/googleapis/lib/appengine/v1.dart index 93f4a5de4..0504b556f 100644 --- a/generated/googleapis/lib/appengine/v1.dart +++ b/generated/googleapis/lib/appengine/v1.dart @@ -2245,6 +2245,57 @@ class ProjectsLocationsApplicationsServicesResource { ProjectsLocationsApplicationsServicesResource(commons.ApiRequester client) : _requester = client; + + /// Deletes the specified service and all enclosed versions. + /// + /// Request parameters: + /// + /// [projectsId] - Part of `name`. Name of the resource requested. Example: + /// apps/myapp/services/default. + /// + /// [locationsId] - Part of `name`. See documentation of `projectsId`. + /// + /// [applicationsId] - Part of `name`. See documentation of `projectsId`. + /// + /// [servicesId] - Part of `name`. See documentation of `projectsId`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String projectsId, + core.String locationsId, + core.String applicationsId, + core.String servicesId, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/projects/' + + commons.escapeVariable('$projectsId') + + '/locations/' + + commons.escapeVariable('$locationsId') + + '/applications/' + + commons.escapeVariable('$applicationsId') + + '/services/' + + commons.escapeVariable('$servicesId'); + + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } } class ProjectsLocationsApplicationsServicesVersionsResource { diff --git a/generated/googleapis/lib/artifactregistry/v1.dart b/generated/googleapis/lib/artifactregistry/v1.dart index 3af569fd3..a306d9d48 100644 --- a/generated/googleapis/lib/artifactregistry/v1.dart +++ b/generated/googleapis/lib/artifactregistry/v1.dart @@ -5555,6 +5555,11 @@ class Repository { /// For each location in a project, repository names must be unique. core.String? name; + /// The repository endpoint, for example: `us-docker.pkg.dev/my-proj/my-repo`. + /// + /// Output only. + core.String? registryUri; + /// Configuration specific for a Remote Repository. RemoteRepositoryConfig? remoteRepositoryConfig; @@ -5603,6 +5608,7 @@ class Repository { this.mavenConfig, this.mode, this.name, + this.registryUri, this.remoteRepositoryConfig, this.satisfiesPzi, this.satisfiesPzs, @@ -5647,6 +5653,7 @@ class Repository { : null, mode: json_['mode'] as core.String?, name: json_['name'] as core.String?, + registryUri: json_['registryUri'] as core.String?, remoteRepositoryConfig: json_.containsKey('remoteRepositoryConfig') ? RemoteRepositoryConfig.fromJson(json_['remoteRepositoryConfig'] as core.Map) @@ -5683,6 +5690,7 @@ class Repository { if (mavenConfig != null) 'mavenConfig': mavenConfig!, if (mode != null) 'mode': mode!, if (name != null) 'name': name!, + if (registryUri != null) 'registryUri': registryUri!, if (remoteRepositoryConfig != null) 'remoteRepositoryConfig': remoteRepositoryConfig!, if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, diff --git a/generated/googleapis/lib/assuredworkloads/v1.dart b/generated/googleapis/lib/assuredworkloads/v1.dart index 5e877f7d6..042a552ea 100644 --- a/generated/googleapis/lib/assuredworkloads/v1.dart +++ b/generated/googleapis/lib/assuredworkloads/v1.dart @@ -1575,8 +1575,9 @@ class GoogleCloudAssuredworkloadsV1Workload { /// - "FEDRAMP_MODERATE" : FedRAMP Moderate data protection controls /// - "US_REGIONAL_ACCESS" : Assured Workloads For US Regions data protection /// controls - /// - "HIPAA" : Health Insurance Portability and Accountability Act controls - /// - "HITRUST" : Health Information Trust Alliance controls + /// - "HIPAA" : \[DEPRECATED\] Health Insurance Portability and Accountability + /// Act controls + /// - "HITRUST" : \[DEPRECATED\] Health Information Trust Alliance controls /// - "EU_REGIONS_AND_SUPPORT" : Assured Workloads For EU Regions and Support /// controls /// - "CA_REGIONS_AND_SUPPORT" : Assured Workloads For Canada Regions and @@ -1599,6 +1600,7 @@ class GoogleCloudAssuredworkloadsV1Workload { /// - "HEALTHCARE_AND_LIFE_SCIENCES_CONTROLS_US_SUPPORT" : Healthcare and Life /// Science Controls with US Support /// - "IRS_1075" : Internal Revenue Service 1075 controls + /// - "CANADA_CONTROLLED_GOODS" : Canada Controlled Goods core.String? complianceRegime; /// Count of active Violations in the Workload. @@ -1979,6 +1981,8 @@ class GoogleCloudAssuredworkloadsV1WorkloadEkmProvisioningResponse { core.String? ekmProvisioningErrorMapping; /// Indicates Ekm enrollment Provisioning of a given workload. + /// + /// Output only. /// Possible string values are: /// - "EKM_PROVISIONING_STATE_UNSPECIFIED" : Default State for Ekm /// Provisioning @@ -2063,6 +2067,8 @@ class GoogleCloudAssuredworkloadsV1WorkloadPartnerPermissions { core.bool? assuredWorkloadsMonitoring; /// Allow the partner to view inspectability logs and monitoring violations. + /// + /// Optional. core.bool? dataLogsViewer; /// Allow partner to view access approval logs. @@ -2105,6 +2111,8 @@ class GoogleCloudAssuredworkloadsV1WorkloadResourceInfo { /// Resource identifier. /// /// For a project this represents project_number. + /// + /// Output only. core.String? resourceId; /// Indicates the type of resource. @@ -2190,6 +2198,8 @@ class GoogleCloudAssuredworkloadsV1WorkloadSaaEnrollmentResponse { core.List? setupErrors; /// Indicates SAA enrollment status of a given workload. + /// + /// Output only. /// Possible string values are: /// - "SETUP_STATE_UNSPECIFIED" : Unspecified. /// - "STATUS_PENDING" : SAA enrollment pending. diff --git a/generated/googleapis/lib/backupdr/v1.dart b/generated/googleapis/lib/backupdr/v1.dart index 4a5ebe818..0cb179906 100644 --- a/generated/googleapis/lib/backupdr/v1.dart +++ b/generated/googleapis/lib/backupdr/v1.dart @@ -28,6 +28,7 @@ /// - [ProjectsLocationsBackupVaultsDataSourcesBackupsResource] /// - [ProjectsLocationsManagementServersResource] /// - [ProjectsLocationsOperationsResource] +/// - [ProjectsLocationsServiceConfigResource] library; import 'dart:async' as async; @@ -82,6 +83,8 @@ class ProjectsLocationsResource { ProjectsLocationsManagementServersResource(_requester); ProjectsLocationsOperationsResource get operations => ProjectsLocationsOperationsResource(_requester); + ProjectsLocationsServiceConfigResource get serviceConfig => + ProjectsLocationsServiceConfigResource(_requester); ProjectsLocationsResource(commons.ApiRequester client) : _requester = client; @@ -2242,8 +2245,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -2414,6 +2417,56 @@ class ProjectsLocationsOperationsResource { } } +class ProjectsLocationsServiceConfigResource { + final commons.ApiRequester _requester; + + ProjectsLocationsServiceConfigResource(commons.ApiRequester client) + : _requester = client; + + /// Initializes the service related config for a project. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. The resource name of the serviceConfig used to + /// initialize the service. Format: + /// `projects/{project_id}/locations/{location}/serviceConfig`. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/serviceConfig$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future initialize( + InitializeServiceRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':initialize'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } +} + /// request message for AbandonBackup. class AbandonBackupRequest { /// An optional request ID to identify requests. @@ -3048,6 +3101,16 @@ class Backup { /// Output only. core.String? resourceSizeBytes; + /// Reserved for future use. + /// + /// Optional. Output only. + core.bool? satisfiesPzi; + + /// Reserved for future use. + /// + /// Optional. Output only. + core.bool? satisfiesPzs; + /// The list of BackupLocks taken by the service to prevent the deletion of /// the backup. /// @@ -3085,6 +3148,8 @@ class Backup { this.labels, this.name, this.resourceSizeBytes, + this.satisfiesPzi, + this.satisfiesPzs, this.serviceLocks, this.state, this.updateTime, @@ -3129,6 +3194,8 @@ class Backup { ), name: json_['name'] as core.String?, resourceSizeBytes: json_['resourceSizeBytes'] as core.String?, + satisfiesPzi: json_['satisfiesPzi'] as core.bool?, + satisfiesPzs: json_['satisfiesPzs'] as core.bool?, serviceLocks: (json_['serviceLocks'] as core.List?) ?.map((value) => BackupLock.fromJson( value as core.Map)) @@ -3156,6 +3223,8 @@ class Backup { if (labels != null) 'labels': labels!, if (name != null) 'name': name!, if (resourceSizeBytes != null) 'resourceSizeBytes': resourceSizeBytes!, + if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, + if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, if (serviceLocks != null) 'serviceLocks': serviceLocks!, if (state != null) 'state': state!, if (updateTime != null) 'updateTime': updateTime!, @@ -3518,8 +3587,8 @@ class BackupPlan { /// The resource type to which the `BackupPlan` will be applied. /// - /// Examples include, "compute.googleapis.com/Instance" and - /// "storage.googleapis.com/Bucket". + /// Examples include, "compute.googleapis.com/Instance", + /// "sqladmin.googleapis.com/Instance", or "alloydb.googleapis.com/Cluster". /// /// Required. core.String? resourceType; @@ -3634,7 +3703,7 @@ class BackupPlanAssociation { /// Resource type of workload on which backupplan is applied /// - /// Optional. + /// Required. Immutable. core.String? resourceType; /// The config info related to backup rules. @@ -3705,7 +3774,12 @@ class BackupRule { /// Configures the duration for which backup data will be kept. /// /// It is defined in “days”. The value should be greater than or equal to - /// minimum enforced retention of the backup vault. + /// minimum enforced retention of the backup vault. Minimum value is 1 and + /// maximum value is 90 for hourly backups. Minimum value is 1 and maximum + /// value is 90 for daily backups. Minimum value is 7 and maximum value is 186 + /// for weekly backups. Minimum value is 30 and maximum value is 732 for + /// monthly backups. Minimum value is 365 and maximum value is 36159 for + /// yearly backups. /// /// Required. core.int? backupRetentionDays; @@ -3754,17 +3828,22 @@ class BackupVault { /// Note: This field is added for future use case and will not be supported in /// the current release. /// - /// Optional. Access restriction for the backup vault. Default value is + /// Access restriction for the backup vault. Default value is /// WITHIN_ORGANIZATION if not provided during creation. /// /// Optional. /// Possible string values are: - /// - "ACCESS_RESTRICTION_UNSPECIFIED" : Access restriction not set. + /// - "ACCESS_RESTRICTION_UNSPECIFIED" : Access restriction not set. If user + /// does not provide any value or pass this value, it will be changed to + /// WITHIN_ORGANIZATION. /// - "WITHIN_PROJECT" : Access to or from resources outside your current /// project will be denied. /// - "WITHIN_ORGANIZATION" : Access to or from resources outside your current /// organization will be denied. /// - "UNRESTRICTED" : No access restriction. + /// - "WITHIN_ORG_BUT_UNRESTRICTED_FOR_BA" : Access to or from resources + /// outside your current organization will be denied except for backup + /// appliance. core.String? accessRestriction; /// User annotations. @@ -3856,7 +3935,7 @@ class BackupVault { /// Output only. core.String? totalStoredBytes; - /// Output only Immutable after resource creation until resource deletion. + /// Immutable after resource creation until resource deletion. /// /// Output only. core.String? uid; @@ -5384,6 +5463,49 @@ class InitializeParams { }; } +/// Request message for initializing the service. +class InitializeServiceRequest { + /// An optional request ID to identify requests. + /// + /// Specify a unique request ID so that if you must retry your request, the + /// server will know to ignore the request if it has already been completed. + /// The server will guarantee that for at least 60 minutes since the first + /// request. For example, consider a situation where you make an initial + /// request and t he request times out. If you make the request again with the + /// same request ID, the server can check if original operation with the same + /// request ID was received, and if so, will ignore the second request. This + /// prevents clients from accidentally creating duplicate commitments. The + /// request ID must be a valid UUID with the exception that zero UUID is not + /// supported (00000000-0000-0000-0000-000000000000). + /// + /// Optional. + core.String? requestId; + + /// The resource type to which the default service config will be applied. + /// + /// Examples include, "compute.googleapis.com/Instance" and + /// "storage.googleapis.com/Bucket". + /// + /// Required. + core.String? resourceType; + + InitializeServiceRequest({ + this.requestId, + this.resourceType, + }); + + InitializeServiceRequest.fromJson(core.Map json_) + : this( + requestId: json_['requestId'] as core.String?, + resourceType: json_['resourceType'] as core.String?, + ); + + core.Map toJson() => { + if (requestId != null) 'requestId': requestId!, + if (resourceType != null) 'resourceType': resourceType!, + }; +} + /// request message for InitiateBackup. class InitiateBackupRequest { /// Resource ID of the Backup resource. @@ -5857,9 +5979,10 @@ class ManagementServer { /// VPC networks to which the ManagementServer instance is connected. /// - /// For this version, only a single network is supported. + /// For this version, only a single network is supported. This field is + /// optional if MS is created without PSA /// - /// Required. + /// Optional. core.List? networks; /// The OAuth 2.0 client id is required to make API calls to the BackupDR diff --git a/generated/googleapis/lib/batch/v1.dart b/generated/googleapis/lib/batch/v1.dart index d2c053532..7605894e9 100644 --- a/generated/googleapis/lib/batch/v1.dart +++ b/generated/googleapis/lib/batch/v1.dart @@ -506,8 +506,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -1628,7 +1628,7 @@ typedef CancelOperationRequest = $Empty; /// `CloudLoggingOption` contains additional settings for Cloud Logging logs /// generated by Batch job. class CloudLoggingOption { - /// Set this flag to true to change the + /// Set this field to `true` to change the /// [monitored resource type](https://cloud.google.com/monitoring/api/resources) /// for Cloud Logging logs generated by this Batch job from the /// \[`batch.googleapis.com/Job`\](https://cloud.google.com/monitoring/api/resources#tag_batch.googleapis.com/Job) @@ -2146,8 +2146,9 @@ class InstancePolicyOrTemplate { /// Name of an instance template used to create VMs. /// /// Named the field as 'instance_template' instead of 'template' to avoid C++ - /// keyword conflict. Batch only supports global instance templates. You can - /// specify the global instance template as a full or partial URL. + /// keyword conflict. Batch only supports global instance templates from the + /// same project as the job. You can specify the global instance template as a + /// full or partial URL. core.String? instanceTemplate; /// InstancePolicy. @@ -2669,28 +2670,37 @@ class LocationPolicy { }; } -/// LogsPolicy describes how outputs from a Job's Tasks (stdout/stderr) will be -/// preserved. +/// LogsPolicy describes if and how a job's logs are preserved. +/// +/// Logs include information that is automatically written by the Batch service +/// agent and any information that you configured the job's runnables to write +/// to the `stdout` or `stderr` streams. class LogsPolicy { - /// Additional settings for Cloud Logging. - /// - /// It will only take effect when the destination of `LogsPolicy` is set to - /// `CLOUD_LOGGING`. + /// When `destination` is set to `CLOUD_LOGGING`, you can optionally set this + /// field to configure additional settings for Cloud Logging. /// /// Optional. CloudLoggingOption? cloudLoggingOption; - /// Where logs should be saved. + /// If and where logs should be saved. /// Possible string values are: - /// - "DESTINATION_UNSPECIFIED" : Logs are not preserved. - /// - "CLOUD_LOGGING" : Logs are streamed to Cloud Logging. - /// - "PATH" : Logs are saved to a file path. + /// - "DESTINATION_UNSPECIFIED" : (Default) Logs are not preserved. + /// - "CLOUD_LOGGING" : Logs are streamed to Cloud Logging. Optionally, you + /// can configure additional settings in the `cloudLoggingOption` field. + /// - "PATH" : Logs are saved to the file path specified in the `logsPath` + /// field. core.String? destination; - /// The path to which logs are saved when the destination = PATH. + /// When `destination` is set to `PATH`, you must set this field to the path + /// where you want logs to be saved. /// - /// This can be a local file path on the VM, or under the mount point of a - /// Persistent Disk or Filestore, or a Cloud Storage path. + /// This path can point to a local directory on the VM or (if congifured) a + /// directory under the mount path of any Cloud Storage bucket, network file + /// system (NFS), or writable persistent disk that is mounted to the job. For + /// example, if the job has a bucket with `mountPath` set to + /// `/mnt/disks/my-bucket`, you can write logs to the root directory of the + /// `remotePath` of that bucket by setting this field to + /// `/mnt/disks/my-bucket/`. core.String? logsPath; LogsPolicy({ diff --git a/generated/googleapis/lib/beyondcorp/v1.dart b/generated/googleapis/lib/beyondcorp/v1.dart index d90f3a633..3ff250ba6 100644 --- a/generated/googleapis/lib/beyondcorp/v1.dart +++ b/generated/googleapis/lib/beyondcorp/v1.dart @@ -43,10 +43,6 @@ /// - [ProjectsLocationsOperationsResource] /// - [ProjectsLocationsSecurityGatewaysResource] /// - [ProjectsLocationsSecurityGatewaysApplicationsResource] -/// - [VResource] -/// - [VProjectsResource] -/// - [VProjectsLocationsResource] -/// - [VProjectsLocationsSecurityGatewaysResource] library; import 'dart:async' as async; @@ -77,7 +73,6 @@ class BeyondCorpApi { OrganizationsResource get organizations => OrganizationsResource(_requester); ProjectsResource get projects => ProjectsResource(_requester); - VResource get v => VResource(_requester); BeyondCorpApi(http.Client client, {core.String rootUrl = 'https://beyondcorp.googleapis.com/', @@ -635,8 +630,8 @@ class OrganizationsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -2972,8 +2967,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -3489,34 +3484,35 @@ class ProjectsLocationsSecurityGatewaysResource { response_ as core.Map); } - /// This is a custom method to allow customers to create a peering connections - /// between Google network and customer networks. + /// Sets the access control policy on the specified resource. /// - /// This is enabled only for the allowlisted customers. + /// Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, + /// and `PERMISSION_DENIED` errors. /// /// [request] - The metadata request object. /// /// Request parameters: /// - /// [securityGateway] - Required. BeyondCorp SecurityGateway name using the - /// form: - /// `projects/{project}/locations/{location}/securityGateways/{security_gateway}` + /// [resource] - REQUIRED: The resource for which the policy is being + /// specified. See + /// [Resource names](https://cloud.google.com/apis/design/resource_names) for + /// the appropriate value for this field. /// Value must have pattern /// `^projects/\[^/\]+/locations/\[^/\]+/securityGateways/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleLongrunningOperation]. + /// Completes with a [GoogleIamV1Policy]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future setPeering( - GoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest request, - core.String securityGateway, { + async.Future setIamPolicy( + GoogleIamV1SetIamPolicyRequest request, + core.String resource, { core.String? $fields, }) async { final body_ = convert.json.encode(request); @@ -3524,8 +3520,7 @@ class ProjectsLocationsSecurityGatewaysResource { if ($fields != null) 'fields': [$fields], }; - final url_ = - 'v1/' + core.Uri.encodeFull('$securityGateway') + ':setPeering'; + final url_ = 'v1/' + core.Uri.encodeFull('$resource') + ':setIamPolicy'; final response_ = await _requester.request( url_, @@ -3533,7 +3528,7 @@ class ProjectsLocationsSecurityGatewaysResource { body: body_, queryParams: queryParams_, ); - return GoogleLongrunningOperation.fromJson( + return GoogleIamV1Policy.fromJson( response_ as core.Map); } @@ -3864,87 +3859,6 @@ class ProjectsLocationsSecurityGatewaysApplicationsResource { } } -class VResource { - final commons.ApiRequester _requester; - - VProjectsResource get projects => VProjectsResource(_requester); - - VResource(commons.ApiRequester client) : _requester = client; -} - -class VProjectsResource { - final commons.ApiRequester _requester; - - VProjectsLocationsResource get locations => - VProjectsLocationsResource(_requester); - - VProjectsResource(commons.ApiRequester client) : _requester = client; -} - -class VProjectsLocationsResource { - final commons.ApiRequester _requester; - - VProjectsLocationsSecurityGatewaysResource get securityGateways => - VProjectsLocationsSecurityGatewaysResource(_requester); - - VProjectsLocationsResource(commons.ApiRequester client) : _requester = client; -} - -class VProjectsLocationsSecurityGatewaysResource { - final commons.ApiRequester _requester; - - VProjectsLocationsSecurityGatewaysResource(commons.ApiRequester client) - : _requester = client; - - /// Sets the access control policy on the specified resource. - /// - /// Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, - /// and `PERMISSION_DENIED` errors. - /// - /// [request] - The metadata request object. - /// - /// Request parameters: - /// - /// [resource] - REQUIRED: The resource for which the policy is being - /// specified. See - /// [Resource names](https://cloud.google.com/apis/design/resource_names) for - /// the appropriate value for this field. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/securityGateways/\[^/\]+$`. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleIamV1Policy]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future setIamPolicy( - GoogleIamV1SetIamPolicyRequest request, - core.String resource, { - core.String? $fields, - }) async { - final body_ = convert.json.encode(request); - final queryParams_ = >{ - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v/' + core.Uri.encodeFull('$resource') + ':setIamPolicy'; - - final response_ = await _requester.request( - url_, - 'POST', - body: body_, - queryParams: queryParams_, - ); - return GoogleIamV1Policy.fromJson( - response_ as core.Map); - } -} - /// Allocated connection of the AppGateway. class AllocatedConnection { /// The ingress port of an allocated connection @@ -5211,38 +5125,6 @@ class GoogleCloudBeyondcorpSecuritygatewaysV1ListSecurityGatewaysResponse { }; } -/// VPC Peering details. -class GoogleCloudBeyondcorpSecuritygatewaysV1Peering { - /// List of DNS zones for DNS peering with the customer VPC network. - /// - /// Optional. - core.List? dnsZones; - - /// The name of the Target VPC network name in the format: - /// \`projects/{project}/global/networks/{network} - /// - /// Required. - core.String? targetNetwork; - - GoogleCloudBeyondcorpSecuritygatewaysV1Peering({ - this.dnsZones, - this.targetNetwork, - }); - - GoogleCloudBeyondcorpSecuritygatewaysV1Peering.fromJson(core.Map json_) - : this( - dnsZones: (json_['dnsZones'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - targetNetwork: json_['targetNetwork'] as core.String?, - ); - - core.Map toJson() => { - if (dnsZones != null) 'dnsZones': dnsZones!, - if (targetNetwork != null) 'targetNetwork': targetNetwork!, - }; -} - /// Information about a BeyoncCorp SecurityGateway resource. class GoogleCloudBeyondcorpSecuritygatewaysV1SecurityGateway { /// Timestamp when the resource was created. @@ -5335,61 +5217,6 @@ class GoogleCloudBeyondcorpSecuritygatewaysV1SecurityGateway { }; } -/// Set Peering request for creating a VPC peering between Google network and -/// customer networks. -class GoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest { - /// List of Peering connection information. - /// - /// Required. - core.List? peerings; - - /// An optional request ID to identify requests. - /// - /// Specify a unique request ID so that if you must retry your request, the - /// server will know to ignore the request if it has already been completed. - /// The server will guarantee that for at least 60 minutes since the first - /// request. For example, consider a situation where you make an initial - /// request and the request times out. If you make the request again with the - /// same request ID, the server can check if original operation with the same - /// request ID was received, and if so, will ignore the second request. This - /// prevents clients from accidentally creating duplicate commitments. The - /// request ID must be a valid UUID with the exception that zero UUID is not - /// supported (00000000-0000-0000-0000-000000000000). - /// - /// Optional. - core.String? requestId; - - /// If set, validates request by executing a dry-run which would not alter the - /// resource in any way. - /// - /// Optional. - core.bool? validateOnly; - - GoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest({ - this.peerings, - this.requestId, - this.validateOnly, - }); - - GoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest.fromJson( - core.Map json_) - : this( - peerings: (json_['peerings'] as core.List?) - ?.map((value) => - GoogleCloudBeyondcorpSecuritygatewaysV1Peering.fromJson( - value as core.Map)) - .toList(), - requestId: json_['requestId'] as core.String?, - validateOnly: json_['validateOnly'] as core.bool?, - ); - - core.Map toJson() => { - if (peerings != null) 'peerings': peerings!, - if (requestId != null) 'requestId': requestId!, - if (validateOnly != null) 'validateOnly': validateOnly!, - }; -} - /// The response message for Locations.ListLocations. class GoogleCloudLocationListLocationsResponse { /// A list of locations that matches the specified filter in the request. diff --git a/generated/googleapis/lib/bigquery/v2.dart b/generated/googleapis/lib/bigquery/v2.dart index 2a8ab36c2..643405878 100644 --- a/generated/googleapis/lib/bigquery/v2.dart +++ b/generated/googleapis/lib/bigquery/v2.dart @@ -173,7 +173,7 @@ class DatasetsResource { /// value will be rejected. Requests for conditional access policy binding in /// datasets must specify version 3. Dataset with no conditional role bindings /// in access policy may specify any valid value or leave the field unset. - /// This field will be maped to + /// This field will be mapped to /// [IAM Policy version](https://cloud.google.com/iam/docs/policies#versions) /// and will be used to fetch policy from IAM. If unset or if 0 or 1 value is /// used for dataset with conditional bindings, access entry with condition @@ -251,9 +251,9 @@ class DatasetsResource { /// datasets must specify version 3. * But dataset with no conditional role /// bindings in access policy may specify any valid value or leave the field /// unset. If unset or if 0 or 1 value is used for dataset with conditional - /// bindings, request will be rejected. This field will be maped to IAM Policy - /// version (https://cloud.google.com/iam/docs/policies#versions) and will be - /// used to set policy in IAM. + /// bindings, request will be rejected. This field will be mapped to IAM + /// Policy version (https://cloud.google.com/iam/docs/policies#versions) and + /// will be used to set policy in IAM. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -378,7 +378,7 @@ class DatasetsResource { /// condition. * But dataset with no conditional role bindings in access /// policy may specify any valid value or leave the field unset. If unset or /// if 0 or 1 value is used for dataset with conditional bindings, request - /// will be rejected. This field will be maped to IAM Policy version + /// will be rejected. This field will be mapped to IAM Policy version /// (https://cloud.google.com/iam/docs/policies#versions) and will be used to /// set policy in IAM. /// @@ -499,7 +499,7 @@ class DatasetsResource { /// condition. * But dataset with no conditional role bindings in access /// policy may specify any valid value or leave the field unset. If unset or /// if 0 or 1 value is used for dataset with conditional bindings, request - /// will be rejected. This field will be maped to IAM Policy version + /// will be rejected. This field will be mapped to IAM Policy version /// (https://cloud.google.com/iam/docs/policies#versions) and will be used to /// set policy in IAM. /// @@ -2608,10 +2608,10 @@ class Argument { /// - "FIXED_TYPE" : The argument is a variable with fully specified type, /// which can be a struct or an array, but not a table. /// - "ANY_TYPE" : The argument is any type, including struct or array, but - /// not a table. To be added: FIXED_TABLE, ANY_TABLE + /// not a table. core.String? argumentKind; - /// Required unless argument_kind = ANY_TYPE. + /// Set if argument_kind == FIXED_TYPE. StandardSqlDataType? dataType; /// Whether the argument is an aggregate function parameter. @@ -3281,12 +3281,12 @@ class BigLakeConfiguration { /// \`{project}.{location}.{connection_id}\` or /// \`projects/{project}/locations/{location}/connections/{connection_id}". /// - /// Required. + /// Optional. core.String? connectionId; /// The file format the table data is stored in. /// - /// Required. + /// Optional. /// Possible string values are: /// - "FILE_FORMAT_UNSPECIFIED" : Default Value. /// - "PARQUET" : Apache Parquet format. @@ -3298,12 +3298,12 @@ class BigLakeConfiguration { /// The '*' wildcard character is not allowed. The URI should be in the format /// `gs://bucket/path_to_table/` /// - /// Required. + /// Optional. core.String? storageUri; /// The table format the metadata only snapshots are stored in. /// - /// Required. + /// Optional. /// Possible string values are: /// - "TABLE_FORMAT_UNSPECIFIED" : Default Value. /// - "ICEBERG" : Apache Iceberg format. @@ -9483,7 +9483,7 @@ class JobStatistics { /// as ENTERPRISE. /// - "STANDARD" : Standard edition. /// - "ENTERPRISE" : Enterprise edition. - /// - "ENTERPRISE_PLUS" : Enterprise plus edition. + /// - "ENTERPRISE_PLUS" : Enterprise Plus edition. core.String? edition; /// End time of this job, in milliseconds since the epoch. @@ -14901,6 +14901,16 @@ class Table { /// Output only. core.String? location; + /// If set, overrides the default managed table type configured in the + /// dataset. + /// + /// Optional. + /// Possible string values are: + /// - "MANAGED_TABLE_TYPE_UNSPECIFIED" : No managed table type specified. + /// - "NATIVE" : The managed table is a native BigQuery table. + /// - "ICEBERG" : The managed table is a BigQuery table for Apache Iceberg. + core.String? managedTableType; + /// The materialized view definition. /// /// Optional. @@ -15139,6 +15149,7 @@ class Table { this.labels, this.lastModifiedTime, this.location, + this.managedTableType, this.materializedView, this.materializedViewStatus, this.maxStaleness, @@ -15223,6 +15234,7 @@ class Table { ), lastModifiedTime: json_['lastModifiedTime'] as core.String?, location: json_['location'] as core.String?, + managedTableType: json_['managedTableType'] as core.String?, materializedView: json_.containsKey('materializedView') ? MaterializedViewDefinition.fromJson(json_['materializedView'] as core.Map) @@ -15339,6 +15351,7 @@ class Table { if (labels != null) 'labels': labels!, if (lastModifiedTime != null) 'lastModifiedTime': lastModifiedTime!, if (location != null) 'location': location!, + if (managedTableType != null) 'managedTableType': managedTableType!, if (materializedView != null) 'materializedView': materializedView!, if (materializedViewStatus != null) 'materializedViewStatus': materializedViewStatus!, @@ -16643,7 +16656,7 @@ class TrainingOptions { /// /// Applies to contribution analysis models. Allowed formats supported are for /// summable and summable ratio contribution metrics. These include - /// expressions such as "SUM(x)" or "SUM(x)/SUM(y)", where x and y are column + /// expressions such as `SUM(x)` or `SUM(x)/SUM(y)`, where x and y are column /// names from the base table. core.String? contributionMetric; diff --git a/generated/googleapis/lib/bigqueryreservation/v1.dart b/generated/googleapis/lib/bigqueryreservation/v1.dart index 4ba995c22..bf493bcb3 100644 --- a/generated/googleapis/lib/bigqueryreservation/v1.dart +++ b/generated/googleapis/lib/bigqueryreservation/v1.dart @@ -755,7 +755,7 @@ class ProjectsLocationsReservationsResource { return Empty.fromJson(response_ as core.Map); } - /// Failover a reservation to the secondary location. + /// Fail over a reservation to the secondary location. /// /// The operation should be done in the current secondary location, which will /// be promoted to the new primary location for the reservation. Attempting to @@ -1225,6 +1225,19 @@ class Assignment { /// E.g. `projects/myproject`, `folders/123`, or `organizations/456`. core.String? assignee; + /// This field controls if "Gemini in BigQuery" + /// (https://cloud.google.com/gemini/docs/bigquery/overview) features should + /// be enabled for this reservation assignment, which is not on by default. + /// + /// "Gemini in BigQuery" has a distinct compliance posture from BigQuery. If + /// this field is set to true, the assignment job type is QUERY, and the + /// parent reservation edition is ENTERPRISE_PLUS, then the assignment will + /// give the grantee project/organization access to "Gemini in BigQuery" + /// features. + /// + /// Optional. + core.bool? enableGeminiInBigquery; + /// Which type of jobs will use the reservation. /// Possible string values are: /// - "JOB_TYPE_UNSPECIFIED" : Invalid type. Requests with this value will be @@ -1264,6 +1277,7 @@ class Assignment { Assignment({ this.assignee, + this.enableGeminiInBigquery, this.jobType, this.name, this.state, @@ -1272,6 +1286,7 @@ class Assignment { Assignment.fromJson(core.Map json_) : this( assignee: json_['assignee'] as core.String?, + enableGeminiInBigquery: json_['enableGeminiInBigquery'] as core.bool?, jobType: json_['jobType'] as core.String?, name: json_['name'] as core.String?, state: json_['state'] as core.String?, @@ -1279,6 +1294,8 @@ class Assignment { core.Map toJson() => { if (assignee != null) 'assignee': assignee!, + if (enableGeminiInBigquery != null) + 'enableGeminiInBigquery': enableGeminiInBigquery!, if (jobType != null) 'jobType': jobType!, if (name != null) 'name': name!, if (state != null) 'state': state!, @@ -1289,7 +1306,10 @@ class Assignment { class Autoscale { /// The slot capacity added to this reservation when autoscale happens. /// - /// Will be between \[0, max_slots\]. + /// Will be between \[0, max_slots\]. Note: after users reduce max_slots, it + /// may take a while before it can be propagated, so current_slots may stay in + /// the original value and could be larger than max_slots for that brief + /// period (less than one minute) /// /// Output only. core.String? currentSlots; @@ -1392,7 +1412,7 @@ class CapacityCommitment { /// ENTERPRISE. /// - "STANDARD" : Standard edition. /// - "ENTERPRISE" : Enterprise edition. - /// - "ENTERPRISE_PLUS" : Enterprise plus edition. + /// - "ENTERPRISE_PLUS" : Enterprise Plus edition. core.String? edition; /// For FAILED commitment plan, provides the reason of failure. @@ -1735,7 +1755,7 @@ class Reservation { /// optimizations for small queries. Default value is 0 which means that /// concurrency target will be automatically computed by the system. NOTE: /// this field is exposed as target job concurrency in the Information Schema, - /// DDL and BQ CLI. + /// DDL and BigQuery CLI. core.String? concurrency; /// Creation time of the reservation. @@ -1749,7 +1769,7 @@ class Reservation { /// ENTERPRISE. /// - "STANDARD" : Standard edition. /// - "ENTERPRISE" : Enterprise edition. - /// - "ENTERPRISE_PLUS" : Enterprise plus edition. + /// - "ENTERPRISE_PLUS" : Enterprise Plus edition. core.String? edition; /// If false, any query or pipeline job using this reservation will use idle @@ -1759,6 +1779,14 @@ class Reservation { /// the slot capacity specified in the slot_capacity field at most. core.bool? ignoreIdleSlots; + /// The labels associated with this reservation. + /// + /// You can use these to organize and group your reservations. You can set + /// this property when inserting or updating a reservation. + /// + /// Optional. + core.Map? labels; + /// Applicable only for reservations located within one of the BigQuery /// multi-regions (US or EU). /// @@ -1777,35 +1805,28 @@ class Reservation { /// maximum length is 64 characters. core.String? name; - /// The original primary location of the reservation which is set only during - /// its creation and remains unchanged afterwards. + /// The location where the reservation was originally created. /// - /// It can be used by the customer to answer questions about disaster recovery - /// billing. The field is output only for customers and should not be - /// specified, however, the google.api.field_behavior is not set to - /// OUTPUT_ONLY since these fields are set in rerouted requests sent across - /// regions. + /// This is set only during the failover reservation's creation. All billing + /// charges for the failover reservation will be applied to this location. /// - /// Optional. + /// Output only. core.String? originalPrimaryLocation; - /// The primary location of the reservation. + /// The current location of the reservation's primary replica. /// - /// The field is only meaningful for reservation used for cross region - /// disaster recovery. The field is output only for customers and should not - /// be specified, however, the google.api.field_behavior is not set to - /// OUTPUT_ONLY since these fields are set in rerouted requests sent across - /// regions. + /// This field is only set for reservations using the managed disaster + /// recovery feature. /// - /// Optional. + /// Output only. core.String? primaryLocation; - /// The secondary location of the reservation which is used for cross region - /// disaster recovery purposes. + /// The current location of the reservation's secondary replica. /// - /// Customer can set this in create/update reservation calls to create a - /// failover reservation or convert a non-failover reservation to a failover - /// reservation. + /// This field is only set for reservations using the managed disaster + /// recovery feature. Users can set this in create reservation calls to create + /// a failover reservation or in update reservation calls to convert a + /// non-failover reservation to a failover reservation(or vice versa). /// /// Optional. core.String? secondaryLocation; @@ -1841,6 +1862,7 @@ class Reservation { this.creationTime, this.edition, this.ignoreIdleSlots, + this.labels, this.multiRegionAuxiliary, this.name, this.originalPrimaryLocation, @@ -1860,6 +1882,13 @@ class Reservation { creationTime: json_['creationTime'] as core.String?, edition: json_['edition'] as core.String?, ignoreIdleSlots: json_['ignoreIdleSlots'] as core.bool?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), multiRegionAuxiliary: json_['multiRegionAuxiliary'] as core.bool?, name: json_['name'] as core.String?, originalPrimaryLocation: @@ -1876,6 +1905,7 @@ class Reservation { if (creationTime != null) 'creationTime': creationTime!, if (edition != null) 'edition': edition!, if (ignoreIdleSlots != null) 'ignoreIdleSlots': ignoreIdleSlots!, + if (labels != null) 'labels': labels!, if (multiRegionAuxiliary != null) 'multiRegionAuxiliary': multiRegionAuxiliary!, if (name != null) 'name': name!, diff --git a/generated/googleapis/lib/bigtableadmin/v2.dart b/generated/googleapis/lib/bigtableadmin/v2.dart index c6353d5c6..62819eb28 100644 --- a/generated/googleapis/lib/bigtableadmin/v2.dart +++ b/generated/googleapis/lib/bigtableadmin/v2.dart @@ -3094,7 +3094,7 @@ class AutomatedBackupPolicy { /// How long the automated backups should be retained. /// - /// The only supported value at this time is 3 days. + /// Values must be at least 3 days and at most 90 days. /// /// Required. core.String? retentionPeriod; diff --git a/generated/googleapis/lib/binaryauthorization/v1.dart b/generated/googleapis/lib/binaryauthorization/v1.dart index cbad9bf74..2ff420179 100644 --- a/generated/googleapis/lib/binaryauthorization/v1.dart +++ b/generated/googleapis/lib/binaryauthorization/v1.dart @@ -1133,17 +1133,6 @@ class AdmissionRule { /// all of the images in the pod spec. /// - "ALWAYS_DENY" : This rule denies all pod creations. core.String? evaluationMode; - - /// The resource names of the attestors that must attest to a container image, - /// in the format `projects / * /attestors / * `. - /// - /// Each attestor must exist before a policy can reference it. To add an - /// attestor to a policy the principal issuing the policy change request must - /// be able to read the attestor resource. Note: this field must be non-empty - /// when the `evaluation_mode` field specifies `REQUIRE_ATTESTATION`, - /// otherwise it must be empty. - /// - /// Optional. core.List? requireAttestationsBy; AdmissionRule({ diff --git a/generated/googleapis/lib/blockchainnodeengine/v1.dart b/generated/googleapis/lib/blockchainnodeengine/v1.dart index 8ee2f448c..0bbd39b66 100644 --- a/generated/googleapis/lib/blockchainnodeengine/v1.dart +++ b/generated/googleapis/lib/blockchainnodeengine/v1.dart @@ -449,8 +449,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// diff --git a/generated/googleapis/lib/calendar/v3.dart b/generated/googleapis/lib/calendar/v3.dart index 7043fed75..5fd032182 100644 --- a/generated/googleapis/lib/calendar/v3.dart +++ b/generated/googleapis/lib/calendar/v3.dart @@ -3955,6 +3955,11 @@ class Event { /// to only update the participant's response. Optional. The default is False. core.bool? attendeesOmitted; + /// Birthday or special event data. + /// + /// Used if eventType is "birthday". Immutable. + EventBirthdayProperties? birthdayProperties; + /// The color of the event. /// /// This is an ID referring to an entry in the event section of the colors @@ -4227,6 +4232,7 @@ class Event { this.attachments, this.attendees, this.attendeesOmitted, + this.birthdayProperties, this.colorId, this.conferenceData, this.created, @@ -4279,6 +4285,10 @@ class Event { value as core.Map)) .toList(), attendeesOmitted: json_['attendeesOmitted'] as core.bool?, + birthdayProperties: json_.containsKey('birthdayProperties') + ? EventBirthdayProperties.fromJson(json_['birthdayProperties'] + as core.Map) + : null, colorId: json_['colorId'] as core.String?, conferenceData: json_.containsKey('conferenceData') ? ConferenceData.fromJson(json_['conferenceData'] @@ -4373,6 +4383,8 @@ class Event { if (attachments != null) 'attachments': attachments!, if (attendees != null) 'attendees': attendees!, if (attendeesOmitted != null) 'attendeesOmitted': attendeesOmitted!, + if (birthdayProperties != null) + 'birthdayProperties': birthdayProperties!, if (colorId != null) 'colorId': colorId!, if (conferenceData != null) 'conferenceData': conferenceData!, if (created != null) 'created': created!.toUtc().toIso8601String(), @@ -4579,6 +4591,54 @@ class EventAttendee { }; } +class EventBirthdayProperties { + /// Resource name of the contact this birthday event is linked to. + /// + /// This can be used to fetch contact details from People API. Format: + /// "people/c12345". Read-only. + core.String? contact; + + /// Custom type label specified for this event. + /// + /// This is populated if birthdayProperties.type is set to "custom". + /// Read-only. + core.String? customTypeName; + + /// Type of birthday or special event. + /// + /// Possible values are: + /// - "anniversary" - An anniversary other than birthday. Always has a + /// contact. + /// - "birthday" - A birthday event. This is the default value. + /// - "custom" - A special date whose label is further specified in the + /// customTypeName field. Always has a contact. + /// - "other" - A special date which does not fall into the other categories, + /// and does not have a custom label. Always has a contact. + /// - "self" - Calendar owner's own birthday. Cannot have a contact. The + /// Calendar API only supports creating events with the type "birthday". The + /// type cannot be changed after the event is created. + core.String? type; + + EventBirthdayProperties({ + this.contact, + this.customTypeName, + this.type, + }); + + EventBirthdayProperties.fromJson(core.Map json_) + : this( + contact: json_['contact'] as core.String?, + customTypeName: json_['customTypeName'] as core.String?, + type: json_['type'] as core.String?, + ); + + core.Map toJson() => { + if (contact != null) 'contact': contact!, + if (customTypeName != null) 'customTypeName': customTypeName!, + if (type != null) 'type': type!, + }; +} + class EventDateTime { /// The date, in the format "yyyy-mm-dd", if this is an all-day event. core.DateTime? date; diff --git a/generated/googleapis/lib/certificatemanager/v1.dart b/generated/googleapis/lib/certificatemanager/v1.dart index 47b0bbc8c..6342da2a2 100644 --- a/generated/googleapis/lib/certificatemanager/v1.dart +++ b/generated/googleapis/lib/certificatemanager/v1.dart @@ -1405,8 +1405,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -1961,13 +1961,15 @@ class Certificate { /// /// Optional. Immutable. /// Possible string values are: - /// - "DEFAULT" : Certificates with default scope are served from core Google - /// data centers. If unsure, choose this option. - /// - "EDGE_CACHE" : Certificates with scope EDGE_CACHE are special-purposed - /// certificates, served from Edge Points of Presence. See - /// https://cloud.google.com/vpc/docs/edge-locations. - /// - "ALL_REGIONS" : Certificates with ALL_REGIONS scope are served from all - /// Google Cloud regions. See + /// - "DEFAULT" : Use the DEFAULT scope if you plan to use the certificate + /// with global external Application Load Balancer, global external proxy + /// Network Load Balancer, or any of the regional Google Cloud services. + /// - "EDGE_CACHE" : Use the EDGE_CACHE scope if you plan to use the + /// certificate with Media CDN. The certificates are served from Edge Points + /// of Presence. See https://cloud.google.com/vpc/docs/edge-locations. + /// - "ALL_REGIONS" : Use the ALL_REGIONS scope if you plan to use the + /// certificate with cross-region internal Application Load Balancer. The + /// certificates are served from all Google Cloud regions. See /// https://cloud.google.com/compute/docs/regions-zones. core.String? scope; diff --git a/generated/googleapis/lib/chat/v1.dart b/generated/googleapis/lib/chat/v1.dart index d1901fa16..6f9fd9178 100644 --- a/generated/googleapis/lib/chat/v1.dart +++ b/generated/googleapis/lib/chat/v1.dart @@ -371,17 +371,11 @@ class SpacesResource { response_ as core.Map); } - /// Creates a space with no members. + /// Creates a space. /// - /// Can be used to create a named space. Spaces grouped by topics aren't - /// supported. For an example, see + /// Can be used to create a named space, or a group chat in `Import mode`. For + /// an example, see /// [Create a space](https://developers.google.com/workspace/chat/create-spaces). - /// If you receive the error message `ALREADY_EXISTS` when creating a space, - /// try a different `displayName`. An existing space within the Google - /// Workspace organization might already use this display name. If you're a - /// member of the - /// [Developer Preview program](https://developers.google.com/workspace/preview), - /// you can create a group chat in import mode using `spaceType.GROUP_CHAT`. /// Supports the following types of /// [authentication](https://developers.google.com/workspace/chat/authenticate-authorize): /// - @@ -391,7 +385,13 @@ class SpacesResource { /// [Developer Preview](https://developers.google.com/workspace/preview) - /// [User authentication](https://developers.google.com/workspace/chat/authenticate-authorize-chat-user) /// When authenticating as an app, the `space.customer` field must be set in - /// the request. + /// the request. Space membership upon creation depends on whether the space + /// is created in `Import mode`: * **Import mode:** No members are created. * + /// **All other modes:** The calling user is added as a member. This is: * The + /// app itself when using app authentication. * The human user when using user + /// authentication. If you receive the error message `ALREADY_EXISTS` when + /// creating a space, try a different `displayName`. An existing space within + /// the Google Workspace organization might already use this display name. /// /// [request] - The metadata request object. /// @@ -458,9 +458,9 @@ class SpacesResource { /// `spaces/{space}` /// Value must have pattern `^spaces/\[^/\]+$`. /// - /// [useAdminAccess] - When `true`, the method runs using the user's Google - /// Workspace administrator privileges. The calling user must be a Google - /// Workspace administrator with the + /// [useAdminAccess] - Optional. When `true`, the method runs using the user's + /// Google Workspace administrator privileges. The calling user must be a + /// Google Workspace administrator with the /// [manage chat and spaces conversations privilege](https://support.google.com/a/answer/13369245). /// Requires the `chat.admin.delete` /// [OAuth 2.0 scope](https://developers.google.com/workspace/chat/authenticate-authorize#chat-api-scopes). @@ -575,9 +575,9 @@ class SpacesResource { /// `spaces/{space}`. Format: `spaces/{space}` /// Value must have pattern `^spaces/\[^/\]+$`. /// - /// [useAdminAccess] - When `true`, the method runs using the user's Google - /// Workspace administrator privileges. The calling user must be a Google - /// Workspace administrator with the + /// [useAdminAccess] - Optional. When `true`, the method runs using the user's + /// Google Workspace administrator privileges. The calling user must be a + /// Google Workspace administrator with the /// [manage chat and spaces conversations privilege](https://support.google.com/a/answer/13369245). /// Requires the `chat.admin.spaces` or `chat.admin.spaces.readonly` /// [OAuth 2.0 scopes](https://developers.google.com/workspace/chat/authenticate-authorize#chat-api-scopes). @@ -707,9 +707,9 @@ class SpacesResource { /// /// Request parameters: /// - /// [name] - Resource name of the space. Format: `spaces/{space}` Where - /// `{space}` represents the system-assigned ID for the space. You can obtain - /// the space ID by calling the + /// [name] - Identifier. Resource name of the space. Format: `spaces/{space}` + /// Where `{space}` represents the system-assigned ID for the space. You can + /// obtain the space ID by calling the /// \[`spaces.list()`\](https://developers.google.com/workspace/chat/api/reference/rest/v1/spaces/list) /// method or from the space URL. For example, if the space URL is /// `https://mail.google.com/mail/u/0/#chat/space/AAAAAAAAA`, the space ID is @@ -761,9 +761,9 @@ class SpacesResource { /// `permission_settings.useAtMentionAll` - `permission_settings.manageApps` - /// `permission_settings.manageWebhooks` - `permission_settings.replyMessages` /// - /// [useAdminAccess] - When `true`, the method runs using the user's Google - /// Workspace administrator privileges. The calling user must be a Google - /// Workspace administrator with the + /// [useAdminAccess] - Optional. When `true`, the method runs using the user's + /// Google Workspace administrator privileges. The calling user must be a + /// Google Workspace administrator with the /// [manage chat and spaces conversations privilege](https://support.google.com/a/answer/13369245). /// Requires the `chat.admin.spaces` /// [OAuth 2.0 scope](https://developers.google.com/workspace/chat/authenticate-authorize#chat-api-scopes). @@ -1035,9 +1035,9 @@ class SpacesMembersResource { /// the membership. Format: spaces/{space} /// Value must have pattern `^spaces/\[^/\]+$`. /// - /// [useAdminAccess] - When `true`, the method runs using the user's Google - /// Workspace administrator privileges. The calling user must be a Google - /// Workspace administrator with the + /// [useAdminAccess] - Optional. When `true`, the method runs using the user's + /// Google Workspace administrator privileges. The calling user must be a + /// Google Workspace administrator with the /// [manage chat and spaces conversations privilege](https://support.google.com/a/answer/13369245). /// Requires the `chat.admin.memberships` /// [OAuth 2.0 scope](https://developers.google.com/workspace/chat/authenticate-authorize#chat-api-scopes). @@ -1108,9 +1108,9 @@ class SpacesMembersResource { /// `spaces/{space}/members/app`. /// Value must have pattern `^spaces/\[^/\]+/members/\[^/\]+$`. /// - /// [useAdminAccess] - When `true`, the method runs using the user's Google - /// Workspace administrator privileges. The calling user must be a Google - /// Workspace administrator with the + /// [useAdminAccess] - Optional. When `true`, the method runs using the user's + /// Google Workspace administrator privileges. The calling user must be a + /// Google Workspace administrator with the /// [manage chat and spaces conversations privilege](https://support.google.com/a/answer/13369245). /// Requires the `chat.admin.memberships` /// [OAuth 2.0 scope](https://developers.google.com/workspace/chat/authenticate-authorize#chat-api-scopes). @@ -1172,9 +1172,9 @@ class SpacesMembersResource { /// the email of the Google Chat user. /// Value must have pattern `^spaces/\[^/\]+/members/\[^/\]+$`. /// - /// [useAdminAccess] - When `true`, the method runs using the user's Google - /// Workspace administrator privileges. The calling user must be a Google - /// Workspace administrator with the + /// [useAdminAccess] - Optional. When `true`, the method runs using the user's + /// Google Workspace administrator privileges. The calling user must be a + /// Google Workspace administrator with the /// [manage chat and spaces conversations privilege](https://support.google.com/a/answer/13369245). /// Requires the `chat.admin.memberships` or `chat.admin.memberships.readonly` /// [OAuth 2.0 scopes](https://developers.google.com/workspace/chat/authenticate-authorize#chat-api-scopes). @@ -1278,9 +1278,9 @@ class SpacesMembersResource { /// aren't returned. Currently requires /// [user authentication](https://developers.google.com/workspace/chat/authenticate-authorize-chat-user). /// - /// [useAdminAccess] - When `true`, the method runs using the user's Google - /// Workspace administrator privileges. The calling user must be a Google - /// Workspace administrator with the + /// [useAdminAccess] - Optional. When `true`, the method runs using the user's + /// Google Workspace administrator privileges. The calling user must be a + /// Google Workspace administrator with the /// [manage chat and spaces conversations privilege](https://support.google.com/a/answer/13369245). /// Requires either the `chat.admin.memberships.readonly` or /// `chat.admin.memberships` @@ -1348,17 +1348,17 @@ class SpacesMembersResource { /// /// Request parameters: /// - /// [name] - Resource name of the membership, assigned by the server. Format: - /// `spaces/{space}/members/{member}` + /// [name] - Identifier. Resource name of the membership, assigned by the + /// server. Format: `spaces/{space}/members/{member}` /// Value must have pattern `^spaces/\[^/\]+/members/\[^/\]+$`. /// /// [updateMask] - Required. The field paths to update. Separate multiple /// values with commas or use `*` to update all field paths. Currently /// supported field paths: - `role` /// - /// [useAdminAccess] - When `true`, the method runs using the user's Google - /// Workspace administrator privileges. The calling user must be a Google - /// Workspace administrator with the + /// [useAdminAccess] - Optional. When `true`, the method runs using the user's + /// Google Workspace administrator privileges. The calling user must be a + /// Google Workspace administrator with the /// [manage chat and spaces conversations privilege](https://support.google.com/a/answer/13369245). /// Requires the `chat.admin.memberships` /// [OAuth 2.0 scope](https://developers.google.com/workspace/chat/authenticate-authorize#chat-api-scopes). @@ -1431,6 +1431,11 @@ class SpacesMessagesResource { /// of message can only contain text (`text`). /// ![Message sent with user authentication](https://developers.google.com/workspace/chat/images/message-user-auth.svg) /// The maximum message size, including the message contents, is 32,000 bytes. + /// For + /// [webhook](https://developers.google.com/workspace/chat/quickstart/webhooks) + /// requests, the response doesn't contain the full message. The response only + /// populates the `name` and `thread.name` fields in addition to the + /// information that was in the request. /// /// [request] - The metadata request object. /// @@ -1452,7 +1457,11 @@ class SpacesMessagesResource { /// [Name a message](https://developers.google.com/workspace/chat/create-messages#name_a_created_message). /// /// [messageReplyOption] - Optional. Specifies whether a message starts a - /// thread or replies to one. Only supported in named spaces. + /// thread or replies to one. Only supported in named spaces. When + /// [responding to user interactions](https://developers.google.com/workspace/chat/receive-respond-interactions), + /// this field is ignored. For interactions within a thread, the reply is + /// created in the same thread. Otherwise, the reply is created as a new + /// thread. /// Possible string values are: /// - "MESSAGE_REPLY_OPTION_UNSPECIFIED" : Default. Starts a new thread. Using /// this option ignores any thread ID or `thread_key` that's included. @@ -1536,9 +1545,9 @@ class SpacesMessagesResource { /// [Name a message](https://developers.google.com/workspace/chat/create-messages#name_a_created_message). /// Value must have pattern `^spaces/\[^/\]+/messages/\[^/\]+$`. /// - /// [force] - When `true`, deleting a message also deletes its threaded - /// replies. When `false`, if a message has threaded replies, deletion fails. - /// Only applies when + /// [force] - Optional. When `true`, deleting a message also deletes its + /// threaded replies. When `false`, if a message has threaded replies, + /// deletion fails. Only applies when /// [authenticating as a user](https://developers.google.com/workspace/chat/authenticate-authorize-chat-user). /// Has no effect when /// [authenticating as a Chat app](https://developers.google.com/workspace/chat/authenticate-authorize-chat-app). @@ -1638,9 +1647,9 @@ class SpacesMessagesResource { /// Format: `spaces/{space}` /// Value must have pattern `^spaces/\[^/\]+$`. /// - /// [filter] - A query filter. You can filter messages by date (`create_time`) - /// and thread (`thread.name`). To filter messages by the date they were - /// created, specify the `create_time` with a timestamp in + /// [filter] - Optional. A query filter. You can filter messages by date + /// (`create_time`) and thread (`thread.name`). To filter messages by the date + /// they were created, specify the `create_time` with a timestamp in /// \[RFC-3339\](https://www.rfc-editor.org/rfc/rfc3339) format and double /// quotation marks. For example, `"2023-04-21T11:30:00-04:00"`. You can use /// the greater than operator `>` to list messages that were created after a @@ -1658,26 +1667,26 @@ class SpacesMessagesResource { /// spaces/AAAAAAAAAAA/threads/123 ``` Invalid queries are rejected by the /// server with an `INVALID_ARGUMENT` error. /// - /// [orderBy] - Optional, if resuming from a previous query. How the list of - /// messages is ordered. Specify a value to order by an ordering operation. - /// Valid ordering operation values are as follows: - `ASC` for ascending. - - /// `DESC` for descending. The default ordering is `create_time ASC`. + /// [orderBy] - Optional. How the list of messages is ordered. Specify a value + /// to order by an ordering operation. Valid ordering operation values are as + /// follows: - `ASC` for ascending. - `DESC` for descending. The default + /// ordering is `create_time ASC`. /// - /// [pageSize] - The maximum number of messages returned. The service might - /// return fewer messages than this value. If unspecified, at most 25 are - /// returned. The maximum value is 1000. If you use a value more than 1000, - /// it's automatically changed to 1000. Negative values return an + /// [pageSize] - Optional. The maximum number of messages returned. The + /// service might return fewer messages than this value. If unspecified, at + /// most 25 are returned. The maximum value is 1000. If you use a value more + /// than 1000, it's automatically changed to 1000. Negative values return an /// `INVALID_ARGUMENT` error. /// - /// [pageToken] - Optional, if resuming from a previous query. A page token - /// received from a previous list messages call. Provide this parameter to - /// retrieve the subsequent page. When paginating, all other parameters - /// provided should match the call that provided the page token. Passing - /// different values to the other parameters might lead to unexpected results. + /// [pageToken] - Optional. A page token received from a previous list + /// messages call. Provide this parameter to retrieve the subsequent page. + /// When paginating, all other parameters provided should match the call that + /// provided the page token. Passing different values to the other parameters + /// might lead to unexpected results. /// - /// [showDeleted] - Whether to include deleted messages. Deleted messages - /// include deleted time and metadata about their deletion, but message - /// content is unavailable. + /// [showDeleted] - Optional. Whether to include deleted messages. Deleted + /// messages include deleted time and metadata about their deletion, but + /// message content is unavailable. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -1737,7 +1746,7 @@ class SpacesMessagesResource { /// /// Request parameters: /// - /// [name] - Resource name of the message. Format: + /// [name] - Identifier. Resource name of the message. Format: /// `spaces/{space}/messages/{message}` Where `{space}` is the ID of the space /// where the message is posted and `{message}` is a system-assigned ID for /// the message. For example, @@ -1817,7 +1826,7 @@ class SpacesMessagesResource { /// /// Request parameters: /// - /// [name] - Resource name of the message. Format: + /// [name] - Identifier. Resource name of the message. Format: /// `spaces/{space}/messages/{message}` Where `{space}` is the ID of the space /// where the message is posted and `{message}` is a system-assigned ID for /// the message. For example, @@ -2217,11 +2226,11 @@ class SpacesSpaceEventsResource { /// service might return fewer than this value. Negative values return an /// `INVALID_ARGUMENT` error. /// - /// [pageToken] - A page token, received from a previous list space events - /// call. Provide this to retrieve the subsequent page. When paginating, all - /// other parameters provided to list space events must match the call that - /// provided the page token. Passing different values to the other parameters - /// might lead to unexpected results. + /// [pageToken] - Optional. A page token, received from a previous list space + /// events call. Provide this to retrieve the subsequent page. When + /// paginating, all other parameters provided to list space events must match + /// the call that provided the page token. Passing different values to the + /// other parameters might lead to unexpected results. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -2815,7 +2824,10 @@ class AttachedGif { class Attachment { /// A reference to the attachment data. /// - /// This field is used with the media API to download the attachment data. + /// This field is used to create or update messages with attachments, or with + /// the media API to download the attachment data. + /// + /// Optional. AttachmentDataRef? attachmentDataRef; /// The original file name for the content, not the full path. @@ -2845,6 +2857,8 @@ class Attachment { /// Resource name of the attachment, in the form /// `spaces/{space}/messages/{message}/attachments/{attachment}`. + /// + /// Optional. core.String? name; /// The source of the attachment. @@ -2911,11 +2925,15 @@ class AttachmentDataRef { /// /// Treated by clients as an opaque string and used to create or update Chat /// messages with attachments. + /// + /// Optional. core.String? attachmentUploadToken; /// The resource name of the attachment data. /// /// This field is used with the media API to download the attachment data. + /// + /// Optional. core.String? resourceName; AttachmentDataRef({ @@ -3448,6 +3466,8 @@ class Emoji { CustomEmoji? customEmoji; /// A basic emoji represented by a unicode string. + /// + /// Optional. core.String? unicode; Emoji({ @@ -3473,9 +3493,13 @@ class Emoji { /// The number of people who reacted to a message with a specific emoji. class EmojiReactionSummary { /// Emoji associated with the reactions. + /// + /// Output only. Emoji? emoji; /// The total number of reactions using the associated emoji. + /// + /// Output only. core.int? reactionCount; EmojiReactionSummary({ @@ -3750,6 +3774,30 @@ class GoogleAppsCardV1Button { /// The text displayed inside the button. core.String? text; + /// The type of a button. + /// + /// If unset, button type defaults to `OUTLINED`. If the `color` field is set, + /// the button type is forced to `FILLED` and any value set for this field is + /// ignored. [Google Chat apps](https://developers.google.com/workspace/chat): + /// + /// Optional. + /// Possible string values are: + /// - "TYPE_UNSPECIFIED" : Don't use. Unspecified. + /// - "OUTLINED" : Outlined buttons are medium-emphasis buttons. They usually + /// contain actions that are important, but aren’t the primary action in a + /// Chat app or an add-on. + /// - "FILLED" : A filled button has a container with a solid color. It has + /// the most visual impact and is recommended for the important and primary + /// action in a Chat app or an add-on. + /// - "FILLED_TONAL" : A filled tonal button is an alternative middle ground + /// between filled and outlined buttons. They’re useful in contexts where a + /// lower-priority button requires slightly more emphasis than an outline + /// button would give. + /// - "BORDERLESS" : A button does not have an invisible container in its + /// default state. It is often used for the lowest priority actions, + /// especially when presenting multiple options. + core.String? type; + GoogleAppsCardV1Button({ this.altText, this.color, @@ -3757,6 +3805,7 @@ class GoogleAppsCardV1Button { this.icon, this.onClick, this.text, + this.type, }); GoogleAppsCardV1Button.fromJson(core.Map json_) @@ -3776,6 +3825,7 @@ class GoogleAppsCardV1Button { json_['onClick'] as core.Map) : null, text: json_['text'] as core.String?, + type: json_['type'] as core.String?, ); core.Map toJson() => { @@ -3785,6 +3835,7 @@ class GoogleAppsCardV1Button { if (icon != null) 'icon': icon!, if (onClick != null) 'onClick': onClick!, if (text != null) 'text': text!, + if (type != null) 'type': type!, }; } @@ -3826,7 +3877,10 @@ class GoogleAppsCardV1ButtonList { /// [Design the components of a card or dialog](https://developers.google.com/workspace/chat/design-components-card-dialog). /// * For Google Workspace Add-ons, see \[Card-based /// interfaces\](https://developers.google.com/apps-script/add-ons/concepts/cards). -/// **Example: Card message for a Google Chat app** +/// Note: You can add up to 100 widgets per card. Any widgets beyond this limit +/// are ignored. This limit applies to both card messages and dialogs in Google +/// Chat apps, and to cards in Google Workspace Add-ons. **Example: Card message +/// for a Google Chat app** /// ![Example contact card](https://developers.google.com/workspace/chat/images/card_api_reference.png) /// To create the sample card message in Google Chat, use the following JSON: /// ``` { "cardsV2": [ { "cardId": "unique-card-id", "card": { "header": { @@ -4110,6 +4164,248 @@ class GoogleAppsCardV1CardHeader { }; } +/// [Developer Preview](https://developers.google.com/workspace/preview): A +/// carousel, also known as a slider, rotates and displays a list of widgets in +/// a slideshow format, with buttons navigating to the previous or next widget. +/// +/// For example, this is a JSON representation of a carousel that contains three +/// text paragraph widgets. ``` { "carouselCards": [ { "widgets": [ { +/// "textParagraph": { "text": "First text paragraph in carousel", } } ] }, { +/// "widgets": [ { "textParagraph": { "text": "Second text paragraph in +/// carousel", } } ] }, { "widgets": [ { "textParagraph": { "text": "Third text +/// paragraph in carousel", } } ] } ] } ``` +/// [Google Chat apps](https://developers.google.com/workspace/chat): +class GoogleAppsCardV1Carousel { + /// A list of cards included in the carousel. + core.List? carouselCards; + + GoogleAppsCardV1Carousel({ + this.carouselCards, + }); + + GoogleAppsCardV1Carousel.fromJson(core.Map json_) + : this( + carouselCards: (json_['carouselCards'] as core.List?) + ?.map((value) => GoogleAppsCardV1CarouselCard.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (carouselCards != null) 'carouselCards': carouselCards!, + }; +} + +/// [Developer Preview](https://developers.google.com/workspace/preview): A card +/// that can be displayed as a carousel item. +/// +/// [Google Chat apps](https://developers.google.com/workspace/chat): +class GoogleAppsCardV1CarouselCard { + /// A list of widgets displayed at the bottom of the carousel card. + /// + /// The widgets are displayed in the order that they are specified. + core.List? footerWidgets; + + /// A list of widgets displayed in the carousel card. + /// + /// The widgets are displayed in the order that they are specified. + core.List? widgets; + + GoogleAppsCardV1CarouselCard({ + this.footerWidgets, + this.widgets, + }); + + GoogleAppsCardV1CarouselCard.fromJson(core.Map json_) + : this( + footerWidgets: (json_['footerWidgets'] as core.List?) + ?.map((value) => GoogleAppsCardV1NestedWidget.fromJson( + value as core.Map)) + .toList(), + widgets: (json_['widgets'] as core.List?) + ?.map((value) => GoogleAppsCardV1NestedWidget.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (footerWidgets != null) 'footerWidgets': footerWidgets!, + if (widgets != null) 'widgets': widgets!, + }; +} + +/// A text, icon, or text and icon chip that users can click. +/// +/// [Google Chat apps](https://developers.google.com/workspace/chat): +class GoogleAppsCardV1Chip { + /// The alternative text that's used for accessibility. + /// + /// Set descriptive text that lets users know what the chip does. For example, + /// if a chip opens a hyperlink, write: "Opens a new browser tab and navigates + /// to the Google Chat developer documentation at + /// https://developers.google.com/workspace/chat". + core.String? altText; + + /// Whether the chip is in an inactive state and ignores user actions. + /// + /// Defaults to `false`. + core.bool? disabled; + + /// Whether the chip is in an active state and responds to user actions. + /// + /// Defaults to `true`. Deprecated. Use `disabled` instead. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) + core.bool? enabled; + + /// The icon image. + /// + /// If both `icon` and `text` are set, then the icon appears before the text. + GoogleAppsCardV1Icon? icon; + + /// The text displayed inside the chip. + core.String? label; + + /// The action to perform when a user clicks the chip, such as opening a + /// hyperlink or running a custom function. + /// + /// Optional. + GoogleAppsCardV1OnClick? onClick; + + GoogleAppsCardV1Chip({ + this.altText, + this.disabled, + this.enabled, + this.icon, + this.label, + this.onClick, + }); + + GoogleAppsCardV1Chip.fromJson(core.Map json_) + : this( + altText: json_['altText'] as core.String?, + disabled: json_['disabled'] as core.bool?, + enabled: json_['enabled'] as core.bool?, + icon: json_.containsKey('icon') + ? GoogleAppsCardV1Icon.fromJson( + json_['icon'] as core.Map) + : null, + label: json_['label'] as core.String?, + onClick: json_.containsKey('onClick') + ? GoogleAppsCardV1OnClick.fromJson( + json_['onClick'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (altText != null) 'altText': altText!, + if (disabled != null) 'disabled': disabled!, + if (enabled != null) 'enabled': enabled!, + if (icon != null) 'icon': icon!, + if (label != null) 'label': label!, + if (onClick != null) 'onClick': onClick!, + }; +} + +/// A list of chips layed out horizontally, which can either scroll horizontally +/// or wrap to the next line. +/// +/// [Google Chat apps](https://developers.google.com/workspace/chat): +class GoogleAppsCardV1ChipList { + /// An array of chips. + core.List? chips; + + /// Specified chip list layout. + /// Possible string values are: + /// - "LAYOUT_UNSPECIFIED" : Don't use. Unspecified. + /// - "WRAPPED" : Default value. The chip list wraps to the next line if there + /// isn't enough horizontal space. + /// - "HORIZONTAL_SCROLLABLE" : The chips scroll horizontally if they don't + /// fit in the available space. + core.String? layout; + + GoogleAppsCardV1ChipList({ + this.chips, + this.layout, + }); + + GoogleAppsCardV1ChipList.fromJson(core.Map json_) + : this( + chips: (json_['chips'] as core.List?) + ?.map((value) => GoogleAppsCardV1Chip.fromJson( + value as core.Map)) + .toList(), + layout: json_['layout'] as core.String?, + ); + + core.Map toJson() => { + if (chips != null) 'chips': chips!, + if (layout != null) 'layout': layout!, + }; +} + +/// Represent an expand and collapse control. +/// +/// [Google Chat apps](https://developers.google.com/workspace/chat): +class GoogleAppsCardV1CollapseControl { + /// Define a customizable button to collapse the section. + /// + /// Both expand_button and collapse_button field must be set. Only one field + /// set will not take into effect. If this field isn't set, the default button + /// is used. + /// + /// Optional. + GoogleAppsCardV1Button? collapseButton; + + /// Define a customizable button to expand the section. + /// + /// Both expand_button and collapse_button field must be set. Only one field + /// set will not take into effect. If this field isn't set, the default button + /// is used. + /// + /// Optional. + GoogleAppsCardV1Button? expandButton; + + /// The horizontal alignment of the expand and collapse button. + /// Possible string values are: + /// - "HORIZONTAL_ALIGNMENT_UNSPECIFIED" : Don't use. Unspecified. + /// - "START" : Default value. Aligns widgets to the start position of the + /// column. For left-to-right layouts, aligns to the left. For right-to-left + /// layouts, aligns to the right. + /// - "CENTER" : Aligns widgets to the center of the column. + /// - "END" : Aligns widgets to the end position of the column. For + /// left-to-right layouts, aligns widgets to the right. For right-to-left + /// layouts, aligns widgets to the left. + core.String? horizontalAlignment; + + GoogleAppsCardV1CollapseControl({ + this.collapseButton, + this.expandButton, + this.horizontalAlignment, + }); + + GoogleAppsCardV1CollapseControl.fromJson(core.Map json_) + : this( + collapseButton: json_.containsKey('collapseButton') + ? GoogleAppsCardV1Button.fromJson(json_['collapseButton'] + as core.Map) + : null, + expandButton: json_.containsKey('expandButton') + ? GoogleAppsCardV1Button.fromJson( + json_['expandButton'] as core.Map) + : null, + horizontalAlignment: json_['horizontalAlignment'] as core.String?, + ); + + core.Map toJson() => { + if (collapseButton != null) 'collapseButton': collapseButton!, + if (expandButton != null) 'expandButton': expandButton!, + if (horizontalAlignment != null) + 'horizontalAlignment': horizontalAlignment!, + }; +} + /// A column. /// /// \[Google Workspace Add-ons and Chat @@ -4855,6 +5151,50 @@ class GoogleAppsCardV1MaterialIcon { }; } +/// [Developer Preview](https://developers.google.com/workspace/preview): A list +/// of widgets that can be displayed in a containing layout, such as a +/// `CarouselCard`. +/// +/// [Google Chat apps](https://developers.google.com/workspace/chat): +class GoogleAppsCardV1NestedWidget { + /// A button list widget. + GoogleAppsCardV1ButtonList? buttonList; + + /// An image widget. + GoogleAppsCardV1Image? image; + + /// A text paragraph widget. + GoogleAppsCardV1TextParagraph? textParagraph; + + GoogleAppsCardV1NestedWidget({ + this.buttonList, + this.image, + this.textParagraph, + }); + + GoogleAppsCardV1NestedWidget.fromJson(core.Map json_) + : this( + buttonList: json_.containsKey('buttonList') + ? GoogleAppsCardV1ButtonList.fromJson( + json_['buttonList'] as core.Map) + : null, + image: json_.containsKey('image') + ? GoogleAppsCardV1Image.fromJson( + json_['image'] as core.Map) + : null, + textParagraph: json_.containsKey('textParagraph') + ? GoogleAppsCardV1TextParagraph.fromJson( + json_['textParagraph'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (buttonList != null) 'buttonList': buttonList!, + if (image != null) 'image': image!, + if (textParagraph != null) 'textParagraph': textParagraph!, + }; +} + /// Represents how to respond when users click an interactive element on a card, /// such as a button. /// @@ -4881,11 +5221,17 @@ class GoogleAppsCardV1OnClick { /// If specified, this `onClick` triggers an open link action. GoogleAppsCardV1OpenLink? openLink; + /// If specified, this `onClick` opens an overflow menu. + /// + /// [Google Chat apps](https://developers.google.com/workspace/chat): + GoogleAppsCardV1OverflowMenu? overflowMenu; + GoogleAppsCardV1OnClick({ this.action, this.card, this.openDynamicLinkAction, this.openLink, + this.overflowMenu, }); GoogleAppsCardV1OnClick.fromJson(core.Map json_) @@ -4906,6 +5252,10 @@ class GoogleAppsCardV1OnClick { ? GoogleAppsCardV1OpenLink.fromJson( json_['openLink'] as core.Map) : null, + overflowMenu: json_.containsKey('overflowMenu') + ? GoogleAppsCardV1OverflowMenu.fromJson( + json_['overflowMenu'] as core.Map) + : null, ); core.Map toJson() => { @@ -4914,6 +5264,7 @@ class GoogleAppsCardV1OnClick { if (openDynamicLinkAction != null) 'openDynamicLinkAction': openDynamicLinkAction!, if (openLink != null) 'openLink': openLink!, + if (overflowMenu != null) 'overflowMenu': overflowMenu!, }; } @@ -4969,6 +5320,90 @@ class GoogleAppsCardV1OpenLink { }; } +/// A widget that presents a pop-up menu with one or more actions that users can +/// invoke. +/// +/// For example, showing non-primary actions in a card. You can use this widget +/// when actions don't fit in the available space. To use, specify this widget +/// in the `OnClick` action of widgets that support it. For example, in a +/// `Button`. [Google Chat apps](https://developers.google.com/workspace/chat): +class GoogleAppsCardV1OverflowMenu { + /// The list of menu options. + /// + /// Required. + core.List? items; + + GoogleAppsCardV1OverflowMenu({ + this.items, + }); + + GoogleAppsCardV1OverflowMenu.fromJson(core.Map json_) + : this( + items: (json_['items'] as core.List?) + ?.map((value) => GoogleAppsCardV1OverflowMenuItem.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (items != null) 'items': items!, + }; +} + +/// An option that users can invoke in an overflow menu. +/// +/// [Google Chat apps](https://developers.google.com/workspace/chat): +class GoogleAppsCardV1OverflowMenuItem { + /// Whether the menu option is disabled. + /// + /// Defaults to false. + core.bool? disabled; + + /// The action invoked when a menu option is selected. + /// + /// This `OnClick` cannot contain an `OverflowMenu`, any specified + /// `OverflowMenu` is dropped and the menu item disabled. + /// + /// Required. + GoogleAppsCardV1OnClick? onClick; + + /// The icon displayed in front of the text. + GoogleAppsCardV1Icon? startIcon; + + /// The text that identifies or describes the item to users. + /// + /// Required. + core.String? text; + + GoogleAppsCardV1OverflowMenuItem({ + this.disabled, + this.onClick, + this.startIcon, + this.text, + }); + + GoogleAppsCardV1OverflowMenuItem.fromJson(core.Map json_) + : this( + disabled: json_['disabled'] as core.bool?, + onClick: json_.containsKey('onClick') + ? GoogleAppsCardV1OnClick.fromJson( + json_['onClick'] as core.Map) + : null, + startIcon: json_.containsKey('startIcon') + ? GoogleAppsCardV1Icon.fromJson( + json_['startIcon'] as core.Map) + : null, + text: json_['text'] as core.String?, + ); + + core.Map toJson() => { + if (disabled != null) 'disabled': disabled!, + if (onClick != null) 'onClick': onClick!, + if (startIcon != null) 'startIcon': startIcon!, + if (text != null) 'text': text!, + }; +} + /// For a `SelectionInput` widget that uses a multiselect menu, a data source /// from Google Workspace. /// @@ -5017,6 +5452,15 @@ class GoogleAppsCardV1PlatformDataSource { /// \[Google Workspace Add-ons and Chat /// apps\](https://developers.google.com/workspace/extend): class GoogleAppsCardV1Section { + /// Define the expand and collapse button of the section. + /// + /// This button will be shown only if the section is collapsible. If this + /// field isn't set, the default button is used. + /// [Google Chat apps](https://developers.google.com/workspace/chat): + /// + /// Optional. + GoogleAppsCardV1CollapseControl? collapseControl; + /// Indicates whether this section is collapsible. /// /// Collapsible sections hide some or all widgets, but users can expand the @@ -5050,6 +5494,7 @@ class GoogleAppsCardV1Section { core.List? widgets; GoogleAppsCardV1Section({ + this.collapseControl, this.collapsible, this.header, this.uncollapsibleWidgetsCount, @@ -5058,6 +5503,11 @@ class GoogleAppsCardV1Section { GoogleAppsCardV1Section.fromJson(core.Map json_) : this( + collapseControl: json_.containsKey('collapseControl') + ? GoogleAppsCardV1CollapseControl.fromJson( + json_['collapseControl'] + as core.Map) + : null, collapsible: json_['collapsible'] as core.bool?, header: json_['header'] as core.String?, uncollapsibleWidgetsCount: @@ -5069,6 +5519,7 @@ class GoogleAppsCardV1Section { ); core.Map toJson() => { + if (collapseControl != null) 'collapseControl': collapseControl!, if (collapsible != null) 'collapsible': collapsible!, if (header != null) 'header': header!, if (uncollapsibleWidgetsCount != null) @@ -5090,7 +5541,7 @@ class GoogleAppsCardV1Section { /// \[Google Workspace Add-ons and Chat /// apps\](https://developers.google.com/workspace/extend): class GoogleAppsCardV1SelectionInput { - /// An external data source, such as a relational data base. + /// An external data source, such as a relational database. GoogleAppsCardV1Action? externalDataSource; /// An array of selectable items. @@ -5113,11 +5564,13 @@ class GoogleAppsCardV1SelectionInput { core.int? multiSelectMaxSelectedItems; /// For multiselect menus, the number of text characters that a user inputs - /// before the app queries autocomplete and displays suggested items in the - /// menu. + /// before the menu returns suggested selection items. /// - /// If unspecified, defaults to 0 characters for static data sources and 3 - /// characters for external data sources. + /// If unset, the multiselect menu uses the following default values: * If the + /// menu uses a static array of `SelectionInput` items, defaults to 0 + /// characters and immediately populates items from the array. * If the menu + /// uses a dynamic data source (`multi_select_data_source`), defaults to 3 + /// characters before querying the data source to return suggested items. core.int? multiSelectMinQueryLength; /// The name that identifies the selection input in a form input event. @@ -5151,17 +5604,18 @@ class GoogleAppsCardV1SelectionInput { /// button. /// - "SWITCH" : A set of switches. Users can turn on one or more switches. /// - "DROPDOWN" : A dropdown menu. Users can select one item from the menu. - /// - "MULTI_SELECT" : A multiselect menu for static or dynamic data. From the - /// menu bar, users select one or more items. Users can also input values to - /// populate dynamic data. For example, users can start typing the name of a - /// Google Chat space and the widget autosuggests the space. To populate items - /// for a multiselect menu, you can use one of the following types of data - /// sources: * Static data: Items are specified as `SelectionItem` objects in - /// the widget. Up to 100 items. * Google Workspace data: Items are populated - /// using data from Google Workspace, such as Google Workspace users or Google - /// Chat spaces. * External data: Items are populated from an external data - /// source outside of Google Workspace. For examples of how to implement - /// multiselect menus, see + /// - "MULTI_SELECT" : A menu with a text box. Users can type and select one + /// or more items. For Google Workspace Add-ons, you must populate items using + /// a static array of `SelectionItem` objects. For Google Chat apps, you can + /// also populate items using a dynamic data source and autosuggest items as + /// users type in the menu. For example, users can start typing the name of a + /// Google Chat space and the widget autosuggests the space. To dynamically + /// populate items for a multiselect menu, use one of the following types of + /// data sources: * Google Workspace data: Items are populated using data from + /// Google Workspace, such as Google Workspace users or Google Chat spaces. * + /// External data: Items are populated from an external data source outside of + /// Google Workspace. For examples of how to implement multiselect menus for + /// Chat apps, see /// [Add a multiselect menu](https://developers.google.com/workspace/chat/design-interactive-card-dialog#multiselect-menu). /// \[Google Workspace Add-ons and Chat /// apps\](https://developers.google.com/workspace/extend): @@ -5227,7 +5681,7 @@ class GoogleAppsCardV1SelectionInput { /// An item that users can select in a selection input, such as a checkbox or /// switch. /// -/// \[Google Workspace Add-ons and Chat +/// Supports up to 100 items. \[Google Workspace Add-ons and Chat /// apps\](https://developers.google.com/workspace/extend): class GoogleAppsCardV1SelectionItem { /// For multiselect menus, a text description or label that's displayed below @@ -5557,19 +6011,32 @@ class GoogleAppsCardV1TextInput { /// \[Google Workspace Add-ons and Chat /// apps\](https://developers.google.com/workspace/extend): class GoogleAppsCardV1TextParagraph { + /// The maximum number of lines of text that are displayed in the widget. + /// + /// If the text exceeds the specified maximum number of lines, the excess + /// content is concealed behind a **show more** button. If the text is equal + /// or shorter than the specified maximum number of lines, a **show more** + /// button isn't displayed. The default value is 0, in which case all context + /// is displayed. Negative values are ignored. + /// [Google Chat apps](https://developers.google.com/workspace/chat): + core.int? maxLines; + /// The text that's shown in the widget. core.String? text; GoogleAppsCardV1TextParagraph({ + this.maxLines, this.text, }); GoogleAppsCardV1TextParagraph.fromJson(core.Map json_) : this( + maxLines: json_['maxLines'] as core.int?, text: json_['text'] as core.String?, ); core.Map toJson() => { + if (maxLines != null) 'maxLines': maxLines!, if (text != null) 'text': text!, }; } @@ -5589,6 +6056,24 @@ class GoogleAppsCardV1Widget { /// "https://example.com/calendar" } } } ] } ``` GoogleAppsCardV1ButtonList? buttonList; + /// A carousel contains a collection of nested widgets. + /// + /// For example, this is a JSON representation of a carousel that contains two + /// text paragraphs. ``` { "widgets": [ { "textParagraph": { "text": "First + /// text paragraph in the carousel." } }, { "textParagraph": { "text": "Second + /// text paragraph in the carousel." } } ] } ``` + GoogleAppsCardV1Carousel? carousel; + + /// A list of chips. + /// + /// For example, the following JSON creates two chips. The first is a text + /// chip and the second is an icon chip that opens a link: ``` "chipList": { + /// "chips": [ { "text": "Edit", "disabled": true, }, { "icon": { "knownIcon": + /// "INVITE", "altText": "check calendar" }, "onClick": { "openLink": { "url": + /// "https://example.com/calendar" } } } ] } ``` + /// [Google Chat apps](https://developers.google.com/workspace/chat): + GoogleAppsCardV1ChipList? chipList; + /// Displays up to 2 columns. /// /// To include more than 2 columns, or to use rows, use the `Grid` widget. For @@ -5696,6 +6181,8 @@ class GoogleAppsCardV1Widget { GoogleAppsCardV1Widget({ this.buttonList, + this.carousel, + this.chipList, this.columns, this.dateTimePicker, this.decoratedText, @@ -5714,6 +6201,14 @@ class GoogleAppsCardV1Widget { ? GoogleAppsCardV1ButtonList.fromJson( json_['buttonList'] as core.Map) : null, + carousel: json_.containsKey('carousel') + ? GoogleAppsCardV1Carousel.fromJson( + json_['carousel'] as core.Map) + : null, + chipList: json_.containsKey('chipList') + ? GoogleAppsCardV1ChipList.fromJson( + json_['chipList'] as core.Map) + : null, columns: json_.containsKey('columns') ? GoogleAppsCardV1Columns.fromJson( json_['columns'] as core.Map) @@ -5755,6 +6250,8 @@ class GoogleAppsCardV1Widget { core.Map toJson() => { if (buttonList != null) 'buttonList': buttonList!, + if (carousel != null) 'carousel': carousel!, + if (chipList != null) 'chipList': chipList!, if (columns != null) 'columns': columns!, if (dateTimePicker != null) 'dateTimePicker': dateTimePicker!, if (decoratedText != null) 'decoratedText': decoratedText!, @@ -5777,6 +6274,11 @@ class GoogleAppsCardV1Widgets { /// ButtonList widget. GoogleAppsCardV1ButtonList? buttonList; + /// ChipList widget. + /// + /// [Google Chat apps](https://developers.google.com/workspace/chat): + GoogleAppsCardV1ChipList? chipList; + /// DateTimePicker widget. GoogleAppsCardV1DateTimePicker? dateTimePicker; @@ -5797,6 +6299,7 @@ class GoogleAppsCardV1Widgets { GoogleAppsCardV1Widgets({ this.buttonList, + this.chipList, this.dateTimePicker, this.decoratedText, this.image, @@ -5811,6 +6314,10 @@ class GoogleAppsCardV1Widgets { ? GoogleAppsCardV1ButtonList.fromJson( json_['buttonList'] as core.Map) : null, + chipList: json_.containsKey('chipList') + ? GoogleAppsCardV1ChipList.fromJson( + json_['chipList'] as core.Map) + : null, dateTimePicker: json_.containsKey('dateTimePicker') ? GoogleAppsCardV1DateTimePicker.fromJson(json_['dateTimePicker'] as core.Map) @@ -5839,6 +6346,7 @@ class GoogleAppsCardV1Widgets { core.Map toJson() => { if (buttonList != null) 'buttonList': buttonList!, + if (chipList != null) 'chipList': chipList!, if (dateTimePicker != null) 'dateTimePicker': dateTimePicker!, if (decoratedText != null) 'decoratedText': decoratedText!, if (image != null) 'image': image!, @@ -6341,6 +6849,8 @@ class Membership { /// /// Reading or mutating memberships for Google Groups requires /// [user authentication](https://developers.google.com/workspace/chat/authenticate-authorize-chat-user). + /// + /// Optional. Group? groupMember; /// The Google Chat user or app the membership corresponds to. @@ -6350,11 +6860,14 @@ class Membership { /// the output populates the /// [user](https://developers.google.com/workspace/chat/api/reference/rest/v1/User) /// `name` and `type`. + /// + /// Optional. User? member; - /// Resource name of the membership, assigned by the server. + /// Identifier. /// - /// Format: `spaces/{space}/members/{member}` + /// Resource name of the membership, assigned by the server. Format: + /// `spaces/{space}/members/{member}` core.String? name; /// User's role within a Chat space, which determines their permitted actions @@ -6501,9 +7014,13 @@ class MembershipBatchUpdatedEventData { class MembershipCount { /// Count of human users that have directly joined the space, not counting /// users joined by having membership in a joined group. + /// + /// Output only. core.int? joinedDirectHumanUserCount; /// Count of all groups that have directly joined the space. + /// + /// Output only. core.int? joinedGroupCount; MembershipCount({ @@ -6609,6 +7126,8 @@ class Message { /// [Add interactive widgets at the bottom of a message](https://developers.google.com/workspace/chat/create-messages#add-accessory-widgets). /// Creating a message with accessory widgets requires /// [app authentication](https://developers.google.com/workspace/chat/authenticate-authorize-chat-app). + /// + /// Optional. core.List? accessoryWidgets; /// Input only. @@ -6633,6 +7152,8 @@ class Message { core.List? attachedGifs; /// User-uploaded attachment. + /// + /// Optional. core.List? attachment; /// Deprecated: Use `cards_v2` instead. @@ -6655,6 +7176,8 @@ class Message { /// contains cards, see /// [Send a message](https://developers.google.com/workspace/chat/create-messages). /// [Card builder](https://addons.gsuite.google.com/uikit/builder) + /// + /// Optional. core.List? cardsV2; /// A custom ID for the message. @@ -6699,6 +7222,8 @@ class Message { /// A plain-text description of the message's cards, used when the actual /// cards can't be displayed—for example, mobile notifications. + /// + /// Optional. core.String? fallbackText; /// Contains the message `text` with markups added to communicate formatting. @@ -6735,11 +7260,11 @@ class Message { /// Output only. MatchedUrl? matchedUrl; - /// Resource name of the message. + /// Identifier. /// - /// Format: `spaces/{space}/messages/{message}` Where `{space}` is the ID of - /// the space where the message is posted and `{message}` is a system-assigned - /// ID for the message. For example, + /// Resource name of the message. Format: `spaces/{space}/messages/{message}` + /// Where `{space}` is the ID of the space where the message is posted and + /// `{message}` is a system-assigned ID for the message. For example, /// `spaces/AAAAAAAAAAA/messages/BBBBBBBBBBB.BBBBBBBBBBB`. If you set a custom /// ID when you create a message, you can use this ID to specify the message /// in a request by replacing `{message}` with the value from the @@ -6761,7 +7286,7 @@ class Message { /// For details, see /// [Send a message privately](https://developers.google.com/workspace/chat/create-messages#private). /// - /// Immutable. + /// Optional. Immutable. User? privateMessageViewer; /// Information about a message that's quoted by a Google Chat user in a @@ -6790,9 +7315,11 @@ class Message { /// If your Chat app /// [authenticates as a user](https://developers.google.com/workspace/chat/authenticate-authorize-chat-user), - /// the output populates the + /// the output only populates the /// [space](https://developers.google.com/workspace/chat/api/reference/rest/v1/spaces) /// `name`. + /// + /// Output only. Space? space; /// Plain-text body of the message. @@ -6803,6 +7330,8 @@ class Message { /// user\](https://developers.google.com/workspace/chat/format-messages#messages-@mention), /// or everyone in the space. To learn about creating text messages, see /// [Send a message](https://developers.google.com/workspace/chat/create-messages). + /// + /// Optional. core.String? text; /// The thread the message belongs to. @@ -7164,9 +7693,13 @@ class OpenLink { /// Represents a space permission setting. class PermissionSetting { /// Whether spaces managers have this permission. + /// + /// Optional. core.bool? managersAllowed; /// Whether non-manager members have this permission. + /// + /// Optional. core.bool? membersAllowed; PermissionSetting({ @@ -7193,15 +7726,23 @@ class PermissionSetting { /// `PredefinedPermissionSettings` field in your request. class PermissionSettings { /// Setting for managing apps in a space. + /// + /// Optional. PermissionSetting? manageApps; /// Setting for managing members and groups in a space. + /// + /// Optional. PermissionSetting? manageMembersAndGroups; /// Setting for managing webhooks in a space. + /// + /// Optional. PermissionSetting? manageWebhooks; /// Setting for updating space name, avatar, description and guidelines. + /// + /// Optional. PermissionSetting? modifySpaceDetails; /// Setting for posting messages in a space. @@ -7210,12 +7751,18 @@ class PermissionSettings { PermissionSetting? postMessages; /// Setting for replying to messages in a space. + /// + /// Optional. PermissionSetting? replyMessages; /// Setting for toggling space history on and off. + /// + /// Optional. PermissionSetting? toggleHistory; /// Setting for using @all in a space. + /// + /// Optional. PermissionSetting? useAtMentionAll; PermissionSettings({ @@ -7314,11 +7861,14 @@ class QuotedMessageMetadata { /// A reaction to a message. class Reaction { /// The emoji used in the reaction. + /// + /// Required. Emoji? emoji; - /// The resource name of the reaction. + /// Identifier. /// - /// Format: `spaces/{space}/messages/{message}/reactions/{reaction}` + /// The resource name of the reaction. Format: + /// `spaces/{space}/messages/{message}/reactions/{reaction}` core.String? name; /// The user who created the reaction. @@ -7599,7 +8149,7 @@ class SetUpSpaceRequest { /// The Google Chat users or groups to invite to join the space. /// /// Omit the calling user, as they are added automatically. The set currently - /// allows up to 20 memberships (in addition to the caller). For human + /// allows up to 49 memberships (in addition to the caller). For human /// membership, the `Membership.member` field must contain a `user` with /// `name` populated (format: `users/{user}`) and `type` set to /// `User.Type.HUMAN`. You can only add human users when setting up a space @@ -7791,6 +8341,8 @@ class Space { /// a different `displayName`. An existing space within the Google Workspace /// organization might already use this display name. For direct messages, /// this field might be empty. Supports up to 128 characters. + /// + /// Optional. core.String? displayName; /// Whether this space permits any Google Chat user as a member. @@ -7801,7 +8353,7 @@ class Space { /// default, a space created by a consumer account permits any Google Chat /// user. For existing spaces, this field is output only. /// - /// Immutable. + /// Optional. Immutable. core.bool? externalUserAllowed; /// Whether this space is created in `Import Mode` as part of a data migration @@ -7814,6 +8366,16 @@ class Space { /// Optional. core.bool? importMode; + /// The time when the space will be automatically deleted by the system if it + /// remains in import mode. + /// + /// Each space created in import mode must exit this mode before this expire + /// time using `spaces.completeImport`. This field is only populated for + /// spaces that were created with import mode. + /// + /// Output only. + core.String? importModeExpireTime; + /// Timestamp of the last message in the space. /// /// Output only. @@ -7827,10 +8389,11 @@ class Space { /// Output only. MembershipCount? membershipCount; - /// Resource name of the space. + /// Identifier. /// - /// Format: `spaces/{space}` Where `{space}` represents the system-assigned ID - /// for the space. You can obtain the space ID by calling the + /// Resource name of the space. Format: `spaces/{space}` Where `{space}` + /// represents the system-assigned ID for the space. You can obtain the space + /// ID by calling the /// \[`spaces.list()`\](https://developers.google.com/workspace/chat/api/reference/rest/v1/spaces/list) /// method or from the space URL. For example, if the space URL is /// `https://mail.google.com/mail/u/0/#chat/space/AAAAAAAAA`, the space ID is @@ -7867,9 +8430,13 @@ class Space { core.bool? singleUserBotDm; /// Details about the space including description and rules. + /// + /// Optional. SpaceDetails? spaceDetails; /// The message history state for messages and threads in this space. + /// + /// Optional. /// Possible string values are: /// - "HISTORY_STATE_UNSPECIFIED" : Default value. Do not use. /// - "HISTORY_OFF" : History off. @@ -7897,6 +8464,8 @@ class Space { /// /// Required when creating a space or updating the space type of a space. /// Output only for other usage. + /// + /// Optional. /// Possible string values are: /// - "SPACE_TYPE_UNSPECIFIED" : Reserved. /// - "SPACE" : A place where people send messages, share files, and @@ -7945,6 +8514,7 @@ class Space { this.displayName, this.externalUserAllowed, this.importMode, + this.importModeExpireTime, this.lastActiveTime, this.membershipCount, this.name, @@ -7971,6 +8541,7 @@ class Space { displayName: json_['displayName'] as core.String?, externalUserAllowed: json_['externalUserAllowed'] as core.bool?, importMode: json_['importMode'] as core.bool?, + importModeExpireTime: json_['importModeExpireTime'] as core.String?, lastActiveTime: json_['lastActiveTime'] as core.String?, membershipCount: json_.containsKey('membershipCount') ? MembershipCount.fromJson(json_['membershipCount'] @@ -8004,6 +8575,8 @@ class Space { if (externalUserAllowed != null) 'externalUserAllowed': externalUserAllowed!, if (importMode != null) 'importMode': importMode!, + if (importModeExpireTime != null) + 'importModeExpireTime': importModeExpireTime!, if (lastActiveTime != null) 'lastActiveTime': lastActiveTime!, if (membershipCount != null) 'membershipCount': membershipCount!, if (name != null) 'name': name!, @@ -8538,9 +9111,9 @@ class TextParagraph { /// \[`messageReplyOption`\](https://developers.google.com/workspace/chat/api/reference/rest/v1/spaces.messages/create#messagereplyoption) /// field to determine what happens if no matching thread is found. class Thread { - /// Resource name of the thread. + /// Identifier. /// - /// Example: `spaces/{space}/threads/{thread}` + /// Resource name of the thread. Example: `spaces/{space}/threads/{thread}` core.String? name; /// Input for creating or updating a thread. @@ -8602,9 +9175,8 @@ class ThreadReadState { }; } -/// The response of the updated widget. -/// -/// Used to provide autocomplete options for a widget. +/// For `selectionInput` widgets, returns autocomplete suggestions for a +/// multiselect menu. class UpdatedWidget { /// List of widget autocomplete results SelectionItems? suggestions; diff --git a/generated/googleapis/lib/chromemanagement/v1.dart b/generated/googleapis/lib/chromemanagement/v1.dart index 1d54fae64..35139df29 100644 --- a/generated/googleapis/lib/chromemanagement/v1.dart +++ b/generated/googleapis/lib/chromemanagement/v1.dart @@ -27,6 +27,7 @@ /// - [CustomersAppsAndroidResource] /// - [CustomersAppsChromeResource] /// - [CustomersAppsWebResource] +/// - [CustomersProfilesResource] /// - [CustomersReportsResource] /// - [CustomersTelemetryResource] /// - [CustomersTelemetryDevicesResource] @@ -82,6 +83,8 @@ class CustomersResource { final commons.ApiRequester _requester; CustomersAppsResource get apps => CustomersAppsResource(_requester); + CustomersProfilesResource get profiles => + CustomersProfilesResource(_requester); CustomersReportsResource get reports => CustomersReportsResource(_requester); CustomersTelemetryResource get telemetry => CustomersTelemetryResource(_requester); @@ -423,6 +426,169 @@ class CustomersAppsWebResource { } } +class CustomersProfilesResource { + final commons.ApiRequester _requester; + + CustomersProfilesResource(commons.ApiRequester client) : _requester = client; + + /// Deletes the data collected from a Chrome browser profile. + /// + /// Request parameters: + /// + /// [name] - Required. Format: + /// customers/{customer_id}/profiles/{profile_permanent_id} + /// Value must have pattern `^customers/\[^/\]+/profiles/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleProtobufEmpty]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return GoogleProtobufEmpty.fromJson( + response_ as core.Map); + } + + /// Gets a Chrome browser profile with customer ID and profile permanent ID. + /// + /// Request parameters: + /// + /// [name] - Required. Format: + /// customers/{customer_id}/profiles/{profile_permanent_id} + /// Value must have pattern `^customers/\[^/\]+/profiles/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleChromeManagementVersionsV1ChromeBrowserProfile]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleChromeManagementVersionsV1ChromeBrowserProfile.fromJson( + response_ as core.Map); + } + + /// Lists Chrome browser profiles of a customer based on the given search and + /// sorting criteria. + /// + /// Request parameters: + /// + /// [parent] - Required. Format: customers/{customer_id} + /// Value must have pattern `^customers/\[^/\]+$`. + /// + /// [filter] - Optional. The filter used to filter profiles. The following + /// fields can be used in the filter: - profile_id - display_name - user_email + /// - last_activity_time - last_policy_sync_time - last_status_report_time - + /// first_enrollment_time - os_platform_type - os_version - browser_version - + /// browser_channel - policy_count - extension_count - identity_provider - + /// affiliation_state - ouId Any of the above fields can be used to specify a + /// filter, and filtering by multiple fields is supported with AND operator. + /// String type fields and enum type fields support '=' and '!=' operators. + /// The integer type and the timestamp type fields support '=', '!=', '\<', + /// '\>', '\<=' and '\>=' operators. Timestamps expect an RFC-3339 formatted + /// string (e.g. 2012-04-21T11:30:00-04:00). Wildcard '*' can be used with a + /// string type field filter. In addition, string literal filtering is also + /// supported, for example, 'ABC' as a filter maps to a filter that checks if + /// any of the filterable string type fields contains 'ABC'. Organization unit + /// number can be used as a filtering criteria here by specifying 'ouId = + /// ${your_org_unit_id}', please note that only single OU ID matching is + /// supported. + /// + /// [orderBy] - Optional. The fields used to specify the ordering of the + /// results. The supported fields are: - profile_id - display_name - + /// user_email - last_activity_time - last_policy_sync_time - + /// last_status_report_time - first_enrollment_time - os_platform_type - + /// os_version - browser_version - browser_channel - policy_count - + /// extension_count - identity_provider - affiliation_state By default, + /// sorting is in ascending order, to specify descending order for a field, a + /// suffix " desc" should be added to the field name. The default ordering is + /// the descending order of last_status_report_time. + /// + /// [pageSize] - Optional. The maximum number of profiles to return. The + /// default page size is 100 if page_size is unspecified, and the maximum page + /// size allowed is 200. + /// + /// [pageToken] - Optional. The page token used to retrieve a specific page of + /// the listing request. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a + /// [GoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async + .Future + list( + core.String parent, { + core.String? filter, + core.String? orderBy, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (orderBy != null) 'orderBy': [orderBy], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/profiles'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse + .fromJson(response_ as core.Map); + } +} + class CustomersReportsResource { final commons.ApiRequester _requester; @@ -780,11 +946,11 @@ class CustomersReportsResource { /// syntax. Note: OR operations are not supported in this filter. Supported /// filter fields: * app_name * app_type * install_type * /// number_of_permissions * total_install_count * latest_profile_active_date * - /// permission_name * app_id * manifest_versions + /// permission_name * app_id * manifest_versions * risk_score /// /// [orderBy] - Field used to order results. Supported order by fields: * /// app_name * app_type * install_type * number_of_permissions * - /// total_install_count * app_id * manifest_versions + /// total_install_count * app_id * manifest_versions * risk_score /// /// [orgUnitId] - The ID of the organizational unit. /// @@ -1899,16 +2065,16 @@ class GoogleChromeManagementV1AppUsageData { /// - "APPLICATION_TYPE_WEB" : Application type web. /// - "APPLICATION_TYPE_MAC_OS" : Application type Mac OS. /// - "APPLICATION_TYPE_PLUGIN_VM" : Application type Plugin VM. - /// - "APPLICATION_TYPE_STANDALONE_BROWSER" : Application type standalone - /// browser (Lacros browser app). + /// - "APPLICATION_TYPE_STANDALONE_BROWSER" : Deprecated. This vaule is no + /// longer used. Application type standalone browser (Lacros browser app). /// - "APPLICATION_TYPE_REMOTE" : Application type remote. /// - "APPLICATION_TYPE_BOREALIS" : Application type borealis. /// - "APPLICATION_TYPE_SYSTEM_WEB" : Application type system web. - /// - "APPLICATION_TYPE_STANDALONE_BROWSER_CHROME_APP" : Application type - /// standalone browser chrome app (hosted in Lacros). + /// - "APPLICATION_TYPE_STANDALONE_BROWSER_CHROME_APP" : Deprecated. This + /// vaule is no longer used. Application type standalone browser chrome app. /// - "APPLICATION_TYPE_EXTENSION" : Application type extension. - /// - "APPLICATION_TYPE_STANDALONE_BROWSER_EXTENSION" : Application type - /// standalone browser extension. + /// - "APPLICATION_TYPE_STANDALONE_BROWSER_EXTENSION" : Deprecated. This vaule + /// is no longer used. Application type standalone browser extension. /// - "APPLICATION_TYPE_BRUSCHETTA" : Application type bruschetta. core.String? appType; @@ -4162,6 +4328,11 @@ class GoogleChromeManagementV1InstalledApp { /// Output only. core.List? permissions; + /// If available, the risk assessment data about this extension. + /// + /// Output only. + GoogleChromeManagementV1RiskAssessmentData? riskAssessment; + GoogleChromeManagementV1InstalledApp({ this.appId, this.appInstallType, @@ -4174,6 +4345,7 @@ class GoogleChromeManagementV1InstalledApp { this.homepageUri, this.osUserCount, this.permissions, + this.riskAssessment, }); GoogleChromeManagementV1InstalledApp.fromJson(core.Map json_) @@ -4191,6 +4363,11 @@ class GoogleChromeManagementV1InstalledApp { permissions: (json_['permissions'] as core.List?) ?.map((value) => value as core.String) .toList(), + riskAssessment: json_.containsKey('riskAssessment') + ? GoogleChromeManagementV1RiskAssessmentData.fromJson( + json_['riskAssessment'] + as core.Map) + : null, ); core.Map toJson() => { @@ -4206,6 +4383,7 @@ class GoogleChromeManagementV1InstalledApp { if (homepageUri != null) 'homepageUri': homepageUri!, if (osUserCount != null) 'osUserCount': osUserCount!, if (permissions != null) 'permissions': permissions!, + if (riskAssessment != null) 'riskAssessment': riskAssessment!, }; } @@ -5094,6 +5272,122 @@ class GoogleChromeManagementV1PrinterReport { }; } +/// Risk assessment for a Chrome extension. +class GoogleChromeManagementV1RiskAssessment { + /// Risk assessment for the extension. + /// + /// Currently, this is a numerical value, and its interpretation is specific + /// to each risk assessment provider. + core.String? assessment; + + /// A URL that a user can navigate to for more information about the risk + /// assessment. + core.String? detailsUrl; + + /// The version of the extension that this assessment applies to. + core.String? version; + + GoogleChromeManagementV1RiskAssessment({ + this.assessment, + this.detailsUrl, + this.version, + }); + + GoogleChromeManagementV1RiskAssessment.fromJson(core.Map json_) + : this( + assessment: json_['assessment'] as core.String?, + detailsUrl: json_['detailsUrl'] as core.String?, + version: json_['version'] as core.String?, + ); + + core.Map toJson() => { + if (assessment != null) 'assessment': assessment!, + if (detailsUrl != null) 'detailsUrl': detailsUrl!, + if (version != null) 'version': version!, + }; +} + +/// Risk assessment data about an extension/app. +class GoogleChromeManagementV1RiskAssessmentData { + /// Individual risk assessments. + core.List? entries; + + /// Overall assessed risk level across all entries. + /// + /// This will be the highest risk level from all entries. + /// Possible string values are: + /// - "RISK_LEVEL_UNSPECIFIED" : Risk level not specified. + /// - "RISK_LEVEL_LOW" : Extension that represents a low risk. + /// - "RISK_LEVEL_MEDIUM" : Extension that represents a medium risk. + /// - "RISK_LEVEL_HIGH" : Extension that represents a high risk. + core.String? overallRiskLevel; + + GoogleChromeManagementV1RiskAssessmentData({ + this.entries, + this.overallRiskLevel, + }); + + GoogleChromeManagementV1RiskAssessmentData.fromJson(core.Map json_) + : this( + entries: (json_['entries'] as core.List?) + ?.map((value) => + GoogleChromeManagementV1RiskAssessmentEntry.fromJson( + value as core.Map)) + .toList(), + overallRiskLevel: json_['overallRiskLevel'] as core.String?, + ); + + core.Map toJson() => { + if (entries != null) 'entries': entries!, + if (overallRiskLevel != null) 'overallRiskLevel': overallRiskLevel!, + }; +} + +/// One risk assessment entry. +class GoogleChromeManagementV1RiskAssessmentEntry { + /// The risk assessment provider from which this entry comes from. + /// Possible string values are: + /// - "RISK_ASSESSMENT_PROVIDER_UNSPECIFIED" : Default value when no provider + /// is specified. + /// - "RISK_ASSESSMENT_PROVIDER_CRXCAVATOR" : CRXcavator. + /// - "RISK_ASSESSMENT_PROVIDER_SPIN_AI" : Spin.Ai. + core.String? provider; + + /// The details of the provider's risk assessment. + GoogleChromeManagementV1RiskAssessment? riskAssessment; + + /// The bucketed risk level for the risk assessment. + /// Possible string values are: + /// - "RISK_LEVEL_UNSPECIFIED" : Risk level not specified. + /// - "RISK_LEVEL_LOW" : Extension that represents a low risk. + /// - "RISK_LEVEL_MEDIUM" : Extension that represents a medium risk. + /// - "RISK_LEVEL_HIGH" : Extension that represents a high risk. + core.String? riskLevel; + + GoogleChromeManagementV1RiskAssessmentEntry({ + this.provider, + this.riskAssessment, + this.riskLevel, + }); + + GoogleChromeManagementV1RiskAssessmentEntry.fromJson(core.Map json_) + : this( + provider: json_['provider'] as core.String?, + riskAssessment: json_.containsKey('riskAssessment') + ? GoogleChromeManagementV1RiskAssessment.fromJson( + json_['riskAssessment'] + as core.Map) + : null, + riskLevel: json_['riskLevel'] as core.String?, + ); + + core.Map toJson() => { + if (provider != null) 'provider': provider!, + if (riskAssessment != null) 'riskAssessment': riskAssessment!, + if (riskLevel != null) 'riskLevel': riskLevel!, + }; +} + /// Runtime counters retrieved from CPU. /// /// Currently the runtime counters telemetry is only supported by Intel vPro PSR @@ -5333,16 +5627,16 @@ class GoogleChromeManagementV1TelemetryAppInstallEvent { /// - "APPLICATION_TYPE_WEB" : Application type web. /// - "APPLICATION_TYPE_MAC_OS" : Application type Mac OS. /// - "APPLICATION_TYPE_PLUGIN_VM" : Application type Plugin VM. - /// - "APPLICATION_TYPE_STANDALONE_BROWSER" : Application type standalone - /// browser (Lacros browser app). + /// - "APPLICATION_TYPE_STANDALONE_BROWSER" : Deprecated. This vaule is no + /// longer used. Application type standalone browser (Lacros browser app). /// - "APPLICATION_TYPE_REMOTE" : Application type remote. /// - "APPLICATION_TYPE_BOREALIS" : Application type borealis. /// - "APPLICATION_TYPE_SYSTEM_WEB" : Application type system web. - /// - "APPLICATION_TYPE_STANDALONE_BROWSER_CHROME_APP" : Application type - /// standalone browser chrome app (hosted in Lacros). + /// - "APPLICATION_TYPE_STANDALONE_BROWSER_CHROME_APP" : Deprecated. This + /// vaule is no longer used. Application type standalone browser chrome app. /// - "APPLICATION_TYPE_EXTENSION" : Application type extension. - /// - "APPLICATION_TYPE_STANDALONE_BROWSER_EXTENSION" : Application type - /// standalone browser extension. + /// - "APPLICATION_TYPE_STANDALONE_BROWSER_EXTENSION" : Deprecated. This vaule + /// is no longer used. Application type standalone browser extension. /// - "APPLICATION_TYPE_BRUSCHETTA" : Application type bruschetta. core.String? appType; @@ -5464,6 +5758,8 @@ class GoogleChromeManagementV1TelemetryAppLaunchEvent { /// experimental feature Sparky. /// - "APPLICATION_LAUNCH_SOURCE_NAVIGATION_CAPTURING" : Application launched /// from navigation capturing. + /// - "APPLICATION_LAUNCH_SOURCE_WEB_INSTALL_API" : Application launched from + /// web install API. core.String? appLaunchSource; /// Type of app. @@ -5476,16 +5772,16 @@ class GoogleChromeManagementV1TelemetryAppLaunchEvent { /// - "APPLICATION_TYPE_WEB" : Application type web. /// - "APPLICATION_TYPE_MAC_OS" : Application type Mac OS. /// - "APPLICATION_TYPE_PLUGIN_VM" : Application type Plugin VM. - /// - "APPLICATION_TYPE_STANDALONE_BROWSER" : Application type standalone - /// browser (Lacros browser app). + /// - "APPLICATION_TYPE_STANDALONE_BROWSER" : Deprecated. This vaule is no + /// longer used. Application type standalone browser (Lacros browser app). /// - "APPLICATION_TYPE_REMOTE" : Application type remote. /// - "APPLICATION_TYPE_BOREALIS" : Application type borealis. /// - "APPLICATION_TYPE_SYSTEM_WEB" : Application type system web. - /// - "APPLICATION_TYPE_STANDALONE_BROWSER_CHROME_APP" : Application type - /// standalone browser chrome app (hosted in Lacros). + /// - "APPLICATION_TYPE_STANDALONE_BROWSER_CHROME_APP" : Deprecated. This + /// vaule is no longer used. Application type standalone browser chrome app. /// - "APPLICATION_TYPE_EXTENSION" : Application type extension. - /// - "APPLICATION_TYPE_STANDALONE_BROWSER_EXTENSION" : Application type - /// standalone browser extension. + /// - "APPLICATION_TYPE_STANDALONE_BROWSER_EXTENSION" : Deprecated. This vaule + /// is no longer used. Application type standalone browser extension. /// - "APPLICATION_TYPE_BRUSCHETTA" : Application type bruschetta. core.String? appType; @@ -5527,16 +5823,16 @@ class GoogleChromeManagementV1TelemetryAppUninstallEvent { /// - "APPLICATION_TYPE_WEB" : Application type web. /// - "APPLICATION_TYPE_MAC_OS" : Application type Mac OS. /// - "APPLICATION_TYPE_PLUGIN_VM" : Application type Plugin VM. - /// - "APPLICATION_TYPE_STANDALONE_BROWSER" : Application type standalone - /// browser (Lacros browser app). + /// - "APPLICATION_TYPE_STANDALONE_BROWSER" : Deprecated. This vaule is no + /// longer used. Application type standalone browser (Lacros browser app). /// - "APPLICATION_TYPE_REMOTE" : Application type remote. /// - "APPLICATION_TYPE_BOREALIS" : Application type borealis. /// - "APPLICATION_TYPE_SYSTEM_WEB" : Application type system web. - /// - "APPLICATION_TYPE_STANDALONE_BROWSER_CHROME_APP" : Application type - /// standalone browser chrome app (hosted in Lacros). + /// - "APPLICATION_TYPE_STANDALONE_BROWSER_CHROME_APP" : Deprecated. This + /// vaule is no longer used. Application type standalone browser chrome app. /// - "APPLICATION_TYPE_EXTENSION" : Application type extension. - /// - "APPLICATION_TYPE_STANDALONE_BROWSER_EXTENSION" : Application type - /// standalone browser extension. + /// - "APPLICATION_TYPE_STANDALONE_BROWSER_EXTENSION" : Deprecated. This vaule + /// is no longer used. Application type standalone browser extension. /// - "APPLICATION_TYPE_BRUSCHETTA" : Application type bruschetta. core.String? appType; @@ -6957,6 +7253,777 @@ class GoogleChromeManagementV1UserRequestingExtensionDetails { }; } +/// Information of public key associated with a Chrome browser profile. +class GoogleChromeManagementVersionsV1AttestationCredential { + /// Latest rotation timestamp of the public key rotation. + /// + /// Output only. + core.String? keyRotationTime; + + /// Trust level of the public key. + /// + /// Output only. + /// Possible string values are: + /// - "KEY_TRUST_LEVEL_UNSPECIFIED" : Represents an unspecified public key + /// trust level. + /// - "CHROME_BROWSER_HW_KEY" : Represents a HW key. + /// - "CHROME_BROWSER_OS_KEY" : Represents an OS key. + core.String? keyTrustLevel; + + /// Type of the public key. + /// + /// Output only. + /// Possible string values are: + /// - "KEY_TYPE_UNSPECIFIED" : Represents an unspecified public key type. + /// - "RSA_KEY" : Represents a RSA key. + /// - "EC_KEY" : Represents an EC key. + core.String? keyType; + + /// Value of the public key. + /// + /// Output only. + core.String? publicKey; + core.List get publicKeyAsBytes => convert.base64.decode(publicKey!); + + set publicKeyAsBytes(core.List bytes_) { + publicKey = + convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); + } + + GoogleChromeManagementVersionsV1AttestationCredential({ + this.keyRotationTime, + this.keyTrustLevel, + this.keyType, + this.publicKey, + }); + + GoogleChromeManagementVersionsV1AttestationCredential.fromJson(core.Map json_) + : this( + keyRotationTime: json_['keyRotationTime'] as core.String?, + keyTrustLevel: json_['keyTrustLevel'] as core.String?, + keyType: json_['keyType'] as core.String?, + publicKey: json_['publicKey'] as core.String?, + ); + + core.Map toJson() => { + if (keyRotationTime != null) 'keyRotationTime': keyRotationTime!, + if (keyTrustLevel != null) 'keyTrustLevel': keyTrustLevel!, + if (keyType != null) 'keyType': keyType!, + if (publicKey != null) 'publicKey': publicKey!, + }; +} + +/// A representation of a Chrome browser profile. +class GoogleChromeManagementVersionsV1ChromeBrowserProfile { + /// The specific affiliation state of the profile. + /// + /// Output only. + /// Possible string values are: + /// - "AFFILIATION_STATE_UNSPECIFIED" : Unspecified affiliation state. + /// - "UNAFFILIATED_GENERIC" : Unaffiliated - but we do not have the details + /// for the type of unaffiliated profile. + /// - "PROFILE_ONLY" : Unaffiliated - A managed profile that appears on a + /// totally unamanaged browser. + /// - "UNAFFILIATED_LOCAL_MACHINE" : Unaffiliated - A managed profile that + /// appears on a machine that is locally managed by a different organization + /// (through platform management mechanisms like GPO). + /// - "UNAFFILIATED_CLOUD_MACHINE" : Unaffiliated - A managed profile that + /// appears on a managed browser that is cloud managed by a different + /// organization (using Chrome Browser Cloud Management). + /// - "AFFILIATED_CLOUD_MANAGED" : Affiliated - Both the profile and the + /// managed browser are managed by the same organization. + core.String? affiliationState; + + /// Location of the profile annotated by the admin. + /// + /// Optional. + core.String? annotatedLocation; + + /// User of the profile annotated by the admin. + /// + /// Optional. + core.String? annotatedUser; + + /// Attestation credential information of the profile. + /// + /// Output only. + GoogleChromeManagementVersionsV1AttestationCredential? attestationCredential; + + /// Channel of the browser on which the profile exists. + /// + /// Output only. + core.String? browserChannel; + + /// Version of the browser on which the profile exists. + /// + /// Output only. + core.String? browserVersion; + + /// Basic information of the device on which the profile exists. + /// + /// This information is only available for the affiliated profiles. + /// + /// Output only. + GoogleChromeManagementVersionsV1DeviceInfo? deviceInfo; + + /// Profile display name set by client. + /// + /// Output only. + core.String? displayName; + + /// Etag of this ChromeBrowserProfile resource. + /// + /// This etag can be used with UPDATE operation to ensure consistency. + /// + /// Output only. + core.String? etag; + + /// Number of extensions installed on the profile. + /// + /// Output only. + core.String? extensionCount; + + /// Timestamp of the first enrollment of the profile. + /// + /// Output only. + core.String? firstEnrollmentTime; + + /// Identify provider of the profile. + /// + /// Output only. + /// Possible string values are: + /// - "IDENTITY_PROVIDER_UNSPECIFIED" : Represents an unspecified identity + /// provider. + /// - "GOOGLE_IDENTITY_PROVIDER" : Represents a Google identity provider. + /// - "EXTERNAL_IDENTITY_PROVIDER" : Represents an external identity provider. + core.String? identityProvider; + + /// Timestamp of the latest activity by the profile. + /// + /// Output only. + core.String? lastActivityTime; + + /// Timestamp of the latest policy fetch by the profile. + /// + /// Output only. + core.String? lastPolicyFetchTime; + + /// Timestamp of the latest policy sync by the profile. + /// + /// Output only. + core.String? lastPolicySyncTime; + + /// Timestamp of the latest status report by the profile. + /// + /// Output only. + core.String? lastStatusReportTime; + + /// Identifier. + /// + /// Format: customers/{customer_id}/profiles/{profile_permanent_id} + core.String? name; + + /// OS platform of the device on which the profile exists. + /// + /// Output only. + core.String? osPlatformType; + + /// Major OS version of the device on which the profile exists. + /// + /// (i.e. Windows 10) + /// + /// Output only. + core.String? osPlatformVersion; + + /// OS version of the device on which the profile exists. + /// + /// Output only. + core.String? osVersion; + + /// Number of policies applied on the profile. + /// + /// Output only. + core.String? policyCount; + + /// Chrome client side profile ID. + /// + /// Output only. + core.String? profileId; + + /// Profile permanent ID is the unique identifier of a profile within one + /// customer. + /// + /// Output only. + core.String? profilePermanentId; + + /// Detailed reporting data of the profile. + /// + /// This information is only available when the profile reporting policy is + /// enabled. + /// + /// Output only. + GoogleChromeManagementVersionsV1ReportingData? reportingData; + + /// Email address of the user to which the profile belongs. + /// + /// Output only. + core.String? userEmail; + + /// Unique Directory API ID of the user that can be used in Admin SDK Users + /// API. + /// + /// Output only. + core.String? userId; + + GoogleChromeManagementVersionsV1ChromeBrowserProfile({ + this.affiliationState, + this.annotatedLocation, + this.annotatedUser, + this.attestationCredential, + this.browserChannel, + this.browserVersion, + this.deviceInfo, + this.displayName, + this.etag, + this.extensionCount, + this.firstEnrollmentTime, + this.identityProvider, + this.lastActivityTime, + this.lastPolicyFetchTime, + this.lastPolicySyncTime, + this.lastStatusReportTime, + this.name, + this.osPlatformType, + this.osPlatformVersion, + this.osVersion, + this.policyCount, + this.profileId, + this.profilePermanentId, + this.reportingData, + this.userEmail, + this.userId, + }); + + GoogleChromeManagementVersionsV1ChromeBrowserProfile.fromJson(core.Map json_) + : this( + affiliationState: json_['affiliationState'] as core.String?, + annotatedLocation: json_['annotatedLocation'] as core.String?, + annotatedUser: json_['annotatedUser'] as core.String?, + attestationCredential: json_.containsKey('attestationCredential') + ? GoogleChromeManagementVersionsV1AttestationCredential.fromJson( + json_['attestationCredential'] + as core.Map) + : null, + browserChannel: json_['browserChannel'] as core.String?, + browserVersion: json_['browserVersion'] as core.String?, + deviceInfo: json_.containsKey('deviceInfo') + ? GoogleChromeManagementVersionsV1DeviceInfo.fromJson( + json_['deviceInfo'] as core.Map) + : null, + displayName: json_['displayName'] as core.String?, + etag: json_['etag'] as core.String?, + extensionCount: json_['extensionCount'] as core.String?, + firstEnrollmentTime: json_['firstEnrollmentTime'] as core.String?, + identityProvider: json_['identityProvider'] as core.String?, + lastActivityTime: json_['lastActivityTime'] as core.String?, + lastPolicyFetchTime: json_['lastPolicyFetchTime'] as core.String?, + lastPolicySyncTime: json_['lastPolicySyncTime'] as core.String?, + lastStatusReportTime: json_['lastStatusReportTime'] as core.String?, + name: json_['name'] as core.String?, + osPlatformType: json_['osPlatformType'] as core.String?, + osPlatformVersion: json_['osPlatformVersion'] as core.String?, + osVersion: json_['osVersion'] as core.String?, + policyCount: json_['policyCount'] as core.String?, + profileId: json_['profileId'] as core.String?, + profilePermanentId: json_['profilePermanentId'] as core.String?, + reportingData: json_.containsKey('reportingData') + ? GoogleChromeManagementVersionsV1ReportingData.fromJson( + json_['reportingData'] as core.Map) + : null, + userEmail: json_['userEmail'] as core.String?, + userId: json_['userId'] as core.String?, + ); + + core.Map toJson() => { + if (affiliationState != null) 'affiliationState': affiliationState!, + if (annotatedLocation != null) 'annotatedLocation': annotatedLocation!, + if (annotatedUser != null) 'annotatedUser': annotatedUser!, + if (attestationCredential != null) + 'attestationCredential': attestationCredential!, + if (browserChannel != null) 'browserChannel': browserChannel!, + if (browserVersion != null) 'browserVersion': browserVersion!, + if (deviceInfo != null) 'deviceInfo': deviceInfo!, + if (displayName != null) 'displayName': displayName!, + if (etag != null) 'etag': etag!, + if (extensionCount != null) 'extensionCount': extensionCount!, + if (firstEnrollmentTime != null) + 'firstEnrollmentTime': firstEnrollmentTime!, + if (identityProvider != null) 'identityProvider': identityProvider!, + if (lastActivityTime != null) 'lastActivityTime': lastActivityTime!, + if (lastPolicyFetchTime != null) + 'lastPolicyFetchTime': lastPolicyFetchTime!, + if (lastPolicySyncTime != null) + 'lastPolicySyncTime': lastPolicySyncTime!, + if (lastStatusReportTime != null) + 'lastStatusReportTime': lastStatusReportTime!, + if (name != null) 'name': name!, + if (osPlatformType != null) 'osPlatformType': osPlatformType!, + if (osPlatformVersion != null) 'osPlatformVersion': osPlatformVersion!, + if (osVersion != null) 'osVersion': osVersion!, + if (policyCount != null) 'policyCount': policyCount!, + if (profileId != null) 'profileId': profileId!, + if (profilePermanentId != null) + 'profilePermanentId': profilePermanentId!, + if (reportingData != null) 'reportingData': reportingData!, + if (userEmail != null) 'userEmail': userEmail!, + if (userId != null) 'userId': userId!, + }; +} + +/// Information of a device that runs a Chrome browser profile. +class GoogleChromeManagementVersionsV1DeviceInfo { + /// Device ID that identifies the affiliated device on which the profile + /// exists. + /// + /// If the device type is CHROME_BROWSER, then this represents a unique + /// Directory API ID of the device that can be used in Admin SDK Browsers API. + /// + /// Output only. + core.String? affiliatedDeviceId; + + /// Type of the device on which the profile exists. + /// + /// Output only. + /// Possible string values are: + /// - "DEVICE_TYPE_UNSPECIFIED" : Represents an unspecified device type. + /// - "CHROME_BROWSER" : Represents a Chrome browser device. + core.String? deviceType; + + /// Hostname of the device on which the profile exists. + /// + /// Output only. + core.String? hostname; + + /// Machine name of the device on which the profile exists. + /// + /// On platforms which do not report the machine name (currently iOS and + /// Android) this is instead set to the browser's device_id - but note that + /// this is a different device_id than the |affiliated_device_id|. + /// + /// Output only. + core.String? machine; + + GoogleChromeManagementVersionsV1DeviceInfo({ + this.affiliatedDeviceId, + this.deviceType, + this.hostname, + this.machine, + }); + + GoogleChromeManagementVersionsV1DeviceInfo.fromJson(core.Map json_) + : this( + affiliatedDeviceId: json_['affiliatedDeviceId'] as core.String?, + deviceType: json_['deviceType'] as core.String?, + hostname: json_['hostname'] as core.String?, + machine: json_['machine'] as core.String?, + ); + + core.Map toJson() => { + if (affiliatedDeviceId != null) + 'affiliatedDeviceId': affiliatedDeviceId!, + if (deviceType != null) 'deviceType': deviceType!, + if (hostname != null) 'hostname': hostname!, + if (machine != null) 'machine': machine!, + }; +} + +/// Response to ListChromeBrowserProfiles method. +class GoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse { + /// The list of profiles returned. + core.List? + chromeBrowserProfiles; + + /// The pagination token that can be used to list the next page. + core.String? nextPageToken; + + /// Total size represents an estimated number of resources returned. + /// + /// Not guaranteed to be accurate above 10k profiles. + core.String? totalSize; + + GoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse({ + this.chromeBrowserProfiles, + this.nextPageToken, + this.totalSize, + }); + + GoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse.fromJson( + core.Map json_) + : this( + chromeBrowserProfiles: (json_['chromeBrowserProfiles'] as core.List?) + ?.map((value) => + GoogleChromeManagementVersionsV1ChromeBrowserProfile.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + totalSize: json_['totalSize'] as core.String?, + ); + + core.Map toJson() => { + if (chromeBrowserProfiles != null) + 'chromeBrowserProfiles': chromeBrowserProfiles!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (totalSize != null) 'totalSize': totalSize!, + }; +} + +/// Reporting data of a Chrome browser profile. +class GoogleChromeManagementVersionsV1ReportingData { + /// Executable path of the installed Chrome browser. + /// + /// A valid path is included only in affiliated profiles. + /// + /// Output only. + core.String? browserExecutablePath; + + /// Information of the extensions installed on the profile. + /// + /// Output only. + core.List? + extensionData; + + /// Information of the policies applied on the extensions. + /// + /// Output only. + core.List? + extensionPolicyData; + + /// Updated version of a browser, if it is different from the active browser + /// version. + /// + /// Output only. + core.String? installedBrowserVersion; + + /// Information of the policies applied on the profile. + /// + /// Output only. + core.List? + policyData; + + /// Path of the profile. + /// + /// A valid path is included only in affiliated profiles. + /// + /// Output only. + core.String? profilePath; + + GoogleChromeManagementVersionsV1ReportingData({ + this.browserExecutablePath, + this.extensionData, + this.extensionPolicyData, + this.installedBrowserVersion, + this.policyData, + this.profilePath, + }); + + GoogleChromeManagementVersionsV1ReportingData.fromJson(core.Map json_) + : this( + browserExecutablePath: json_['browserExecutablePath'] as core.String?, + extensionData: (json_['extensionData'] as core.List?) + ?.map((value) => + GoogleChromeManagementVersionsV1ReportingDataExtensionData + .fromJson(value as core.Map)) + .toList(), + extensionPolicyData: (json_['extensionPolicyData'] as core.List?) + ?.map((value) => + GoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData + .fromJson(value as core.Map)) + .toList(), + installedBrowserVersion: + json_['installedBrowserVersion'] as core.String?, + policyData: (json_['policyData'] as core.List?) + ?.map((value) => + GoogleChromeManagementVersionsV1ReportingDataPolicyData + .fromJson(value as core.Map)) + .toList(), + profilePath: json_['profilePath'] as core.String?, + ); + + core.Map toJson() => { + if (browserExecutablePath != null) + 'browserExecutablePath': browserExecutablePath!, + if (extensionData != null) 'extensionData': extensionData!, + if (extensionPolicyData != null) + 'extensionPolicyData': extensionPolicyData!, + if (installedBrowserVersion != null) + 'installedBrowserVersion': installedBrowserVersion!, + if (policyData != null) 'policyData': policyData!, + if (profilePath != null) 'profilePath': profilePath!, + }; +} + +/// Information of conflicting policy applied on a Chrome browser profile. +class GoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData { + /// Source of the policy. + /// + /// Output only. + /// Possible string values are: + /// - "POLICY_SOURCE_UNSPECIFIED" : Represents an unspecified policy source. + /// - "MACHINE_PLATFORM" : Represents a machine level platform policy. + /// - "USER_PLATFORM" : Represents a user level platform policy. + /// - "MACHINE_LEVEL_USER_CLOUD" : Represents a machine level user cloud + /// policy. + /// - "USER_CLOUD" : Represents a user level cloud policy. + /// - "MACHINE_MERGED" : Represents a machine level merged policy. + core.String? source; + + GoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData({ + this.source, + }); + + GoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData.fromJson( + core.Map json_) + : this( + source: json_['source'] as core.String?, + ); + + core.Map toJson() => { + if (source != null) 'source': source!, + }; +} + +/// Information of an extension installed on a Chrome browser profile. +class GoogleChromeManagementVersionsV1ReportingDataExtensionData { + /// Description of the extension. + /// + /// Output only. + core.String? description; + + /// ID of the extension. + /// + /// Output only. + core.String? extensionId; + + /// Type of the extension. + /// + /// Output only. + /// Possible string values are: + /// - "EXTENSION_TYPE_UNSPECIFIED" : Represents an unspecified extension type. + /// - "EXTENSION" : Represents an extension. + /// - "APP" : Represents an app. + /// - "THEME" : Represents a theme. + /// - "HOSTED_APP" : Represents a hosted app. + core.String? extensionType; + + /// The URL of the homepage of the extension. + /// + /// Output only. + core.String? homepageUri; + + /// Installation type of the extension. + /// + /// Output only. + /// Possible string values are: + /// - "INSTALLATION_TYPE_UNSPECIFIED" : Represents an unspecified installation + /// type. + /// - "MULTIPLE" : Represents instances of the extension having mixed + /// installation types. + /// - "NORMAL" : Represents a normal installation type. + /// - "ADMIN" : Represents an installation by admin. + /// - "DEVELOPMENT" : Represents a development installation type. + /// - "SIDELOAD" : Represents a sideload installation type. + /// - "OTHER" : Represents an installation type that is not covered in the + /// other options. + core.String? installationType; + + /// Represents whether the user disabled the extension. + /// + /// Output only. + core.bool? isDisabled; + + /// Represents whether the extension is from the webstore. + /// + /// Output only. + core.bool? isWebstoreExtension; + + /// Manifest version of the extension. + /// + /// Output only. + core.int? manifestVersion; + + /// Name of the extension. + /// + /// Output only. + core.String? name; + + /// Permissions requested by the extension. + /// + /// Output only. + core.List? permissions; + + /// Version of the extension. + /// + /// Output only. + core.String? version; + + GoogleChromeManagementVersionsV1ReportingDataExtensionData({ + this.description, + this.extensionId, + this.extensionType, + this.homepageUri, + this.installationType, + this.isDisabled, + this.isWebstoreExtension, + this.manifestVersion, + this.name, + this.permissions, + this.version, + }); + + GoogleChromeManagementVersionsV1ReportingDataExtensionData.fromJson( + core.Map json_) + : this( + description: json_['description'] as core.String?, + extensionId: json_['extensionId'] as core.String?, + extensionType: json_['extensionType'] as core.String?, + homepageUri: json_['homepageUri'] as core.String?, + installationType: json_['installationType'] as core.String?, + isDisabled: json_['isDisabled'] as core.bool?, + isWebstoreExtension: json_['isWebstoreExtension'] as core.bool?, + manifestVersion: json_['manifestVersion'] as core.int?, + name: json_['name'] as core.String?, + permissions: (json_['permissions'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + version: json_['version'] as core.String?, + ); + + core.Map toJson() => { + if (description != null) 'description': description!, + if (extensionId != null) 'extensionId': extensionId!, + if (extensionType != null) 'extensionType': extensionType!, + if (homepageUri != null) 'homepageUri': homepageUri!, + if (installationType != null) 'installationType': installationType!, + if (isDisabled != null) 'isDisabled': isDisabled!, + if (isWebstoreExtension != null) + 'isWebstoreExtension': isWebstoreExtension!, + if (manifestVersion != null) 'manifestVersion': manifestVersion!, + if (name != null) 'name': name!, + if (permissions != null) 'permissions': permissions!, + if (version != null) 'version': version!, + }; +} + +/// Information of the policies applied on an extension. +class GoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData { + /// ID of the extension. + /// + /// Output only. + core.String? extensionId; + + /// Name of the extension. + /// + /// Output only. + core.String? extensionName; + + /// Information of the policies applied on the extension. + /// + /// Output only. + core.List? + policyData; + + GoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData({ + this.extensionId, + this.extensionName, + this.policyData, + }); + + GoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData.fromJson( + core.Map json_) + : this( + extensionId: json_['extensionId'] as core.String?, + extensionName: json_['extensionName'] as core.String?, + policyData: (json_['policyData'] as core.List?) + ?.map((value) => + GoogleChromeManagementVersionsV1ReportingDataPolicyData + .fromJson(value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (extensionId != null) 'extensionId': extensionId!, + if (extensionName != null) 'extensionName': extensionName!, + if (policyData != null) 'policyData': policyData!, + }; +} + +/// Information of a policy applied on a Chrome browser profile. +class GoogleChromeManagementVersionsV1ReportingDataPolicyData { + /// Conflicting policy information. + /// + /// Output only. + core.List? + conflicts; + + /// Error message of the policy, if any. + /// + /// Output only. + core.String? error; + + /// Name of the policy. + /// + /// Output only. + core.String? name; + + /// Source of the policy. + /// + /// Output only. + /// Possible string values are: + /// - "POLICY_SOURCE_UNSPECIFIED" : Represents an unspecified policy source. + /// - "MACHINE_PLATFORM" : Represents a machine level platform policy. + /// - "USER_PLATFORM" : Represents a user level platform policy. + /// - "MACHINE_LEVEL_USER_CLOUD" : Represents a machine level user cloud + /// policy. + /// - "USER_CLOUD" : Represents a user level cloud policy. + /// - "MACHINE_MERGED" : Represents a machine level merged policy. + core.String? source; + + /// Value of the policy. + /// + /// Output only. + core.String? value; + + GoogleChromeManagementVersionsV1ReportingDataPolicyData({ + this.conflicts, + this.error, + this.name, + this.source, + this.value, + }); + + GoogleChromeManagementVersionsV1ReportingDataPolicyData.fromJson( + core.Map json_) + : this( + conflicts: (json_['conflicts'] as core.List?) + ?.map((value) => + GoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData + .fromJson(value as core.Map)) + .toList(), + error: json_['error'] as core.String?, + name: json_['name'] as core.String?, + source: json_['source'] as core.String?, + value: json_['value'] as core.String?, + ); + + core.Map toJson() => { + if (conflicts != null) 'conflicts': conflicts!, + if (error != null) 'error': error!, + if (name != null) 'name': name!, + if (source != null) 'source': source!, + if (value != null) 'value': value!, + }; +} + /// A generic empty message that you can re-use to avoid defining duplicated /// empty messages in your APIs. /// diff --git a/generated/googleapis/lib/chromepolicy/v1.dart b/generated/googleapis/lib/chromepolicy/v1.dart index ad51c5a27..3c1a0ac31 100644 --- a/generated/googleapis/lib/chromepolicy/v1.dart +++ b/generated/googleapis/lib/chromepolicy/v1.dart @@ -2737,7 +2737,10 @@ class Proto2FileDescriptorProto { /// The syntax of the proto file. /// /// The supported values are "proto2", "proto3", and "editions". If `edition` - /// is present, this value must be "editions". + /// is present, this value must be "editions". WARNING: This field should only + /// be used by protobuf plugins or special cases like the proto compiler. + /// Other uses are discouraged and developers should rely on the protoreflect + /// APIs for their client language. core.String? syntax; Proto2FileDescriptorProto({ diff --git a/generated/googleapis/lib/classroom/v1.dart b/generated/googleapis/lib/classroom/v1.dart index e2768fd9e..d58d77b50 100644 --- a/generated/googleapis/lib/classroom/v1.dart +++ b/generated/googleapis/lib/classroom/v1.dart @@ -27,6 +27,7 @@ /// - [CoursesCourseWorkResource] /// - [CoursesCourseWorkAddOnAttachmentsResource] /// - [CoursesCourseWorkAddOnAttachmentsStudentSubmissionsResource] +/// - [CoursesCourseWorkRubricsResource] /// - [CoursesCourseWorkStudentSubmissionsResource] /// - [CoursesCourseWorkMaterialsResource] /// - [CoursesCourseWorkMaterialsAddOnAttachmentsResource] @@ -1399,6 +1400,8 @@ class CoursesCourseWorkResource { CoursesCourseWorkAddOnAttachmentsResource get addOnAttachments => CoursesCourseWorkAddOnAttachmentsResource(_requester); + CoursesCourseWorkRubricsResource get rubrics => + CoursesCourseWorkRubricsResource(_requester); CoursesCourseWorkStudentSubmissionsResource get studentSubmissions => CoursesCourseWorkStudentSubmissionsResource(_requester); @@ -1829,6 +1832,88 @@ class CoursesCourseWorkResource { return CourseWork.fromJson( response_ as core.Map); } + + /// Updates a rubric. + /// + /// See google.classroom.v1.Rubric for details of which fields can be updated. + /// Rubric update capabilities are \[limited\](/classroom/rubrics/limitations) + /// once grading has started. The requesting user and course owner must have + /// rubrics creation capabilities. For details, see + /// [licensing requirements](https://developers.google.com/classroom/rubrics/limitations#license-requirements). + /// This request must be made by the Google Cloud console of the + /// [OAuth client ID](https://support.google.com/cloud/answer/6158849) used to + /// create the parent course work item. This method returns the following + /// error codes: * `PERMISSION_DENIED` if the requesting developer project + /// didn't create the corresponding course work, if the user isn't permitted + /// to make the requested modification to the rubric, or for access errors. + /// This error code is also returned if grading has already started on the + /// rubric. * `INVALID_ARGUMENT` if the request is malformed and for the + /// following request error: * `RubricCriteriaInvalidFormat` * `NOT_FOUND` if + /// the requested course, course work, or rubric doesn't exist or if the user + /// doesn't have access to the corresponding course work. * `INTERNAL` if + /// grading has already started on the rubric. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [courseId] - Required. Identifier of the course. + /// + /// [courseWorkId] - Required. Identifier of the course work. + /// + /// [id] - Optional. Identifier of the rubric. + /// + /// [updateMask] - Optional. Mask that identifies which fields on the rubric + /// to update. This field is required to do an update. The update fails if + /// invalid fields are specified. There are multiple options to define the + /// criteria of a rubric: the `source_spreadsheet_id` and the `criteria` list. + /// Only one of these can be used at a time to define a rubric. The rubric + /// `criteria` list is fully replaced by the rubric criteria specified in the + /// update request. For example, if a criterion or level is missing from the + /// request, it is deleted. New criteria and levels are added and an ID is + /// assigned. Existing criteria and levels retain the previously assigned ID + /// if the ID is specified in the request. The following fields can be + /// specified by teachers: * `criteria` * `source_spreadsheet_id` + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Rubric]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future updateRubric( + Rubric request, + core.String courseId, + core.String courseWorkId, { + core.String? id, + core.String? updateMask, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (id != null) 'id': [id], + if (updateMask != null) 'updateMask': [updateMask], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/courses/' + + commons.escapeVariable('$courseId') + + '/courseWork/' + + commons.escapeVariable('$courseWorkId') + + '/rubric'; + + final response_ = await _requester.request( + url_, + 'PATCH', + body: body_, + queryParams: queryParams_, + ); + return Rubric.fromJson(response_ as core.Map); + } } class CoursesCourseWorkAddOnAttachmentsResource { @@ -2312,6 +2397,326 @@ class CoursesCourseWorkAddOnAttachmentsStudentSubmissionsResource { } } +class CoursesCourseWorkRubricsResource { + final commons.ApiRequester _requester; + + CoursesCourseWorkRubricsResource(commons.ApiRequester client) + : _requester = client; + + /// Creates a rubric. + /// + /// The requesting user and course owner must have rubrics creation + /// capabilities. For details, see + /// [licensing requirements](https://developers.google.com/classroom/rubrics/limitations#license-requirements). + /// For further details, see \[Rubrics structure and known + /// limitations\](/classroom/rubrics/limitations). This request must be made + /// by the Google Cloud console of the + /// [OAuth client ID](https://support.google.com/cloud/answer/6158849) used to + /// create the parent course work item. This method returns the following + /// error codes: * `PERMISSION_DENIED` if the requesting user isn't permitted + /// to create rubrics for course work in the requested course. * `INTERNAL` if + /// the request has insufficient OAuth scopes. * `INVALID_ARGUMENT` if the + /// request is malformed and for the following request error: * + /// `RubricCriteriaInvalidFormat` * `NOT_FOUND` if the requested course or + /// course work don't exist or the user doesn't have access to the course or + /// course work. * `FAILED_PRECONDITION` for the following request error: * + /// `AttachmentNotVisible` + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [courseId] - Required. Identifier of the course. + /// + /// [courseWorkId] - Required. Identifier of the course work. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Rubric]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future create( + Rubric request, + core.String courseId, + core.String courseWorkId, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/courses/' + + commons.escapeVariable('$courseId') + + '/courseWork/' + + commons.escapeVariable('$courseWorkId') + + '/rubrics'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Rubric.fromJson(response_ as core.Map); + } + + /// Deletes a rubric. + /// + /// The requesting user and course owner must have rubrics creation + /// capabilities. For details, see + /// [licensing requirements](https://developers.google.com/classroom/rubrics/limitations#license-requirements). + /// This request must be made by the Google Cloud console of the + /// [OAuth client ID](https://support.google.com/cloud/answer/6158849) used to + /// create the corresponding rubric. This method returns the following error + /// codes: * `PERMISSION_DENIED` if the requesting developer project didn't + /// create the corresponding rubric, or if the requesting user isn't permitted + /// to delete the requested rubric. * `NOT_FOUND` if no rubric exists with the + /// requested ID or the user does not have access to the course, course work, + /// or rubric. * `INVALID_ARGUMENT` if grading has already started on the + /// rubric. + /// + /// Request parameters: + /// + /// [courseId] - Required. Identifier of the course. + /// + /// [courseWorkId] - Required. Identifier of the course work. + /// + /// [id] - Required. Identifier of the rubric. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Empty]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String courseId, + core.String courseWorkId, + core.String id, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/courses/' + + commons.escapeVariable('$courseId') + + '/courseWork/' + + commons.escapeVariable('$courseWorkId') + + '/rubrics/' + + commons.escapeVariable('$id'); + + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return Empty.fromJson(response_ as core.Map); + } + + /// Returns a rubric. + /// + /// This method returns the following error codes: * `PERMISSION_DENIED` for + /// access errors. * `INVALID_ARGUMENT` if the request is malformed. * + /// `NOT_FOUND` if the requested course, course work, or rubric doesn't exist + /// or if the user doesn't have access to the corresponding course work. + /// + /// Request parameters: + /// + /// [courseId] - Required. Identifier of the course. + /// + /// [courseWorkId] - Required. Identifier of the course work. + /// + /// [id] - Required. Identifier of the rubric. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Rubric]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String courseId, + core.String courseWorkId, + core.String id, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/courses/' + + commons.escapeVariable('$courseId') + + '/courseWork/' + + commons.escapeVariable('$courseWorkId') + + '/rubrics/' + + commons.escapeVariable('$id'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return Rubric.fromJson(response_ as core.Map); + } + + /// Returns a list of rubrics that the requester is permitted to view. + /// + /// This method returns the following error codes: * `PERMISSION_DENIED` for + /// access errors. * `INVALID_ARGUMENT` if the request is malformed. * + /// `NOT_FOUND` if the requested course or course work doesn't exist or if the + /// user doesn't have access to the corresponding course work. + /// + /// Request parameters: + /// + /// [courseId] - Required. Identifier of the course. + /// + /// [courseWorkId] - Required. Identifier of the course work. + /// + /// [pageSize] - The maximum number of rubrics to return. If unspecified, at + /// most 1 rubric is returned. The maximum value is 1; values above 1 are + /// coerced to 1. + /// + /// [pageToken] - nextPageToken value returned from a previous list call, + /// indicating that the subsequent page of results should be returned. The + /// list request must be otherwise identical to the one that resulted in this + /// token. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListRubricsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String courseId, + core.String courseWorkId, { + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/courses/' + + commons.escapeVariable('$courseId') + + '/courseWork/' + + commons.escapeVariable('$courseWorkId') + + '/rubrics'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListRubricsResponse.fromJson( + response_ as core.Map); + } + + /// Updates a rubric. + /// + /// See google.classroom.v1.Rubric for details of which fields can be updated. + /// Rubric update capabilities are \[limited\](/classroom/rubrics/limitations) + /// once grading has started. The requesting user and course owner must have + /// rubrics creation capabilities. For details, see + /// [licensing requirements](https://developers.google.com/classroom/rubrics/limitations#license-requirements). + /// This request must be made by the Google Cloud console of the + /// [OAuth client ID](https://support.google.com/cloud/answer/6158849) used to + /// create the parent course work item. This method returns the following + /// error codes: * `PERMISSION_DENIED` if the requesting developer project + /// didn't create the corresponding course work, if the user isn't permitted + /// to make the requested modification to the rubric, or for access errors. + /// This error code is also returned if grading has already started on the + /// rubric. * `INVALID_ARGUMENT` if the request is malformed and for the + /// following request error: * `RubricCriteriaInvalidFormat` * `NOT_FOUND` if + /// the requested course, course work, or rubric doesn't exist or if the user + /// doesn't have access to the corresponding course work. * `INTERNAL` if + /// grading has already started on the rubric. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [courseId] - Required. Identifier of the course. + /// + /// [courseWorkId] - Required. Identifier of the course work. + /// + /// [id] - Optional. Identifier of the rubric. + /// + /// [updateMask] - Optional. Mask that identifies which fields on the rubric + /// to update. This field is required to do an update. The update fails if + /// invalid fields are specified. There are multiple options to define the + /// criteria of a rubric: the `source_spreadsheet_id` and the `criteria` list. + /// Only one of these can be used at a time to define a rubric. The rubric + /// `criteria` list is fully replaced by the rubric criteria specified in the + /// update request. For example, if a criterion or level is missing from the + /// request, it is deleted. New criteria and levels are added and an ID is + /// assigned. Existing criteria and levels retain the previously assigned ID + /// if the ID is specified in the request. The following fields can be + /// specified by teachers: * `criteria` * `source_spreadsheet_id` + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Rubric]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future patch( + Rubric request, + core.String courseId, + core.String courseWorkId, + core.String id, { + core.String? updateMask, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (updateMask != null) 'updateMask': [updateMask], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/courses/' + + commons.escapeVariable('$courseId') + + '/courseWork/' + + commons.escapeVariable('$courseWorkId') + + '/rubrics/' + + commons.escapeVariable('$id'); + + final response_ = await _requester.request( + url_, + 'PATCH', + body: body_, + queryParams: queryParams_, + ); + return Rubric.fromJson(response_ as core.Map); + } +} + class CoursesCourseWorkStudentSubmissionsResource { final commons.ApiRequester _requester; @@ -7001,6 +7406,50 @@ class CourseWorkMaterial { }; } +/// A rubric criterion. +/// +/// Each criterion is a dimension on which performance is rated. +class Criterion { + /// The description of the criterion. + core.String? description; + + /// The criterion ID. + /// + /// On creation, an ID is assigned. + core.String? id; + + /// The list of levels within this criterion. + core.List? levels; + + /// The title of the criterion. + core.String? title; + + Criterion({ + this.description, + this.id, + this.levels, + this.title, + }); + + Criterion.fromJson(core.Map json_) + : this( + description: json_['description'] as core.String?, + id: json_['id'] as core.String?, + levels: (json_['levels'] as core.List?) + ?.map((value) => + Level.fromJson(value as core.Map)) + .toList(), + title: json_['title'] as core.String?, + ); + + core.Map toJson() => { + if (description != null) 'description': description!, + if (id != null) 'id': id!, + if (levels != null) 'levels': levels!, + if (title != null) 'title': title!, + }; +} + /// Represents a whole or partial calendar date, such as a birthday. /// /// The time of day and time zone are either specified elsewhere or are @@ -7587,6 +8036,51 @@ class Invitation { }; } +/// A level of the criterion. +class Level { + /// The description of the level. + core.String? description; + + /// The level ID. + /// + /// On creation, an ID is assigned. + core.String? id; + + /// Optional points associated with this level. + /// + /// If set, all levels within the rubric must specify points and the value + /// must be distinct across all levels within a single criterion. 0 is + /// distinct from no points. + core.double? points; + + /// The title of the level. + /// + /// If the level has no points set, title must be set. + core.String? title; + + Level({ + this.description, + this.id, + this.points, + this.title, + }); + + Level.fromJson(core.Map json_) + : this( + description: json_['description'] as core.String?, + id: json_['id'] as core.String?, + points: (json_['points'] as core.num?)?.toDouble(), + title: json_['title'] as core.String?, + ); + + core.Map toJson() => { + if (description != null) 'description': description!, + if (id != null) 'id': id!, + if (points != null) 'points': points!, + if (title != null) 'title': title!, + }; +} + /// URL item. class Link { /// URL of a thumbnail image of the target URL. @@ -7898,6 +8392,36 @@ class ListInvitationsResponse { }; } +/// Response when listing rubrics. +class ListRubricsResponse { + /// Token identifying the next page of results to return. + /// + /// If empty, no further results are available. + core.String? nextPageToken; + + /// Rubrics that match the request. + core.List? rubrics; + + ListRubricsResponse({ + this.nextPageToken, + this.rubrics, + }); + + ListRubricsResponse.fromJson(core.Map json_) + : this( + nextPageToken: json_['nextPageToken'] as core.String?, + rubrics: (json_['rubrics'] as core.List?) + ?.map((value) => + Rubric.fromJson(value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (rubrics != null) 'rubrics': rubrics!, + }; +} + /// Response when listing student submissions. class ListStudentSubmissionsResponse { /// Token identifying the next page of results to return. @@ -8341,6 +8865,136 @@ class Registration { /// Request to return a student submission. typedef ReturnStudentSubmissionRequest = $Empty; +/// The rubric of the course work. +/// +/// A rubric is a scoring guide used to evaluate student work and give feedback. +/// For further details, see \[Rubrics structure and known +/// limitations\](/classroom/rubrics/limitations). +class Rubric { + /// Identifier of the course. + /// + /// Read-only. + core.String? courseId; + + /// Identifier for the course work this corresponds to. + /// + /// Read-only. + core.String? courseWorkId; + + /// Timestamp when this rubric was created. + /// + /// Read-only. + /// + /// Output only. + core.String? creationTime; + + /// List of criteria. + /// + /// Each criterion is a dimension on which performance is rated. + core.List? criteria; + + /// Classroom-assigned identifier for the rubric. + /// + /// This is unique among rubrics for the relevant course work. Read-only. + core.String? id; + + /// Input only. + /// + /// Immutable. Google Sheets ID of the spreadsheet. This spreadsheet must + /// contain formatted rubric settings. See + /// [Create or reuse a rubric for an assignment](https://support.google.com/edu/classroom/answer/9335069). + /// Use of this field requires the + /// `https://www.googleapis.com/auth/spreadsheets.readonly` or + /// `https://www.googleapis.com/auth/spreadsheets` scope. + core.String? sourceSpreadsheetId; + + /// Timestamp of the most recent change to this rubric. + /// + /// Read-only. + /// + /// Output only. + core.String? updateTime; + + Rubric({ + this.courseId, + this.courseWorkId, + this.creationTime, + this.criteria, + this.id, + this.sourceSpreadsheetId, + this.updateTime, + }); + + Rubric.fromJson(core.Map json_) + : this( + courseId: json_['courseId'] as core.String?, + courseWorkId: json_['courseWorkId'] as core.String?, + creationTime: json_['creationTime'] as core.String?, + criteria: (json_['criteria'] as core.List?) + ?.map((value) => Criterion.fromJson( + value as core.Map)) + .toList(), + id: json_['id'] as core.String?, + sourceSpreadsheetId: json_['sourceSpreadsheetId'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + ); + + core.Map toJson() => { + if (courseId != null) 'courseId': courseId!, + if (courseWorkId != null) 'courseWorkId': courseWorkId!, + if (creationTime != null) 'creationTime': creationTime!, + if (criteria != null) 'criteria': criteria!, + if (id != null) 'id': id!, + if (sourceSpreadsheetId != null) + 'sourceSpreadsheetId': sourceSpreadsheetId!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} + +/// A rubric grade set for the student submission. +/// +/// There is at most one entry per rubric criterion. +class RubricGrade { + /// Criterion ID. + /// + /// Optional. + core.String? criterionId; + + /// Optional level ID of the selected level. + /// + /// If empty, no level was selected. + /// + /// Optional. + core.String? levelId; + + /// Optional points assigned for this criterion, typically based on the level. + /// + /// Levels might or might not have points. If unset, no points were set for + /// this criterion. + /// + /// Optional. + core.double? points; + + RubricGrade({ + this.criterionId, + this.levelId, + this.points, + }); + + RubricGrade.fromJson(core.Map json_) + : this( + criterionId: json_['criterionId'] as core.String?, + levelId: json_['levelId'] as core.String?, + points: (json_['points'] as core.num?)?.toDouble(), + ); + + core.Map toJson() => { + if (criterionId != null) 'criterionId': criterionId!, + if (levelId != null) 'levelId': levelId!, + if (points != null) 'points': points!, + }; +} + /// Drive file that is used as material for course work. class SharedDriveFile { /// Drive file details. @@ -8531,6 +9185,14 @@ class StudentSubmission { /// places. This may be modified only by course teachers. core.double? assignedGrade; + /// Assigned rubric grades based on the rubric's Criteria. + /// + /// This map is empty if there is no rubric attached to this course work or if + /// a rubric is attached, but no grades have been set on any Criteria. Entries + /// are only populated for grades that have been set. Key: The rubric's + /// criterion ID. Read-only. + core.Map? assignedRubricGrades; + /// Submission content when course_work_type is ASSIGNMENT. /// /// Students can modify this content using ModifyAttachments. @@ -8575,6 +9237,14 @@ class StudentSubmission { /// places. This is only visible to and modifiable by course teachers. core.double? draftGrade; + /// Pending rubric grades based on the rubric's criteria. + /// + /// This map is empty if there is no rubric attached to this course work or if + /// a rubric is attached, but no grades have been set on any criteria. Entries + /// are only populated for grades that have been set. Key: The rubric's + /// criterion ID. Read-only. + core.Map? draftRubricGrades; + /// Classroom-assigned Identifier for the student submission. /// /// This is unique among submissions for the relevant course work. Read-only. @@ -8623,6 +9293,7 @@ class StudentSubmission { StudentSubmission({ this.alternateLink, this.assignedGrade, + this.assignedRubricGrades, this.assignmentSubmission, this.associatedWithDeveloper, this.courseId, @@ -8630,6 +9301,7 @@ class StudentSubmission { this.courseWorkType, this.creationTime, this.draftGrade, + this.draftRubricGrades, this.id, this.late, this.multipleChoiceSubmission, @@ -8644,6 +9316,15 @@ class StudentSubmission { : this( alternateLink: json_['alternateLink'] as core.String?, assignedGrade: (json_['assignedGrade'] as core.num?)?.toDouble(), + assignedRubricGrades: (json_['assignedRubricGrades'] + as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + RubricGrade.fromJson( + value as core.Map), + ), + ), assignmentSubmission: json_.containsKey('assignmentSubmission') ? AssignmentSubmission.fromJson(json_['assignmentSubmission'] as core.Map) @@ -8655,6 +9336,15 @@ class StudentSubmission { courseWorkType: json_['courseWorkType'] as core.String?, creationTime: json_['creationTime'] as core.String?, draftGrade: (json_['draftGrade'] as core.num?)?.toDouble(), + draftRubricGrades: (json_['draftRubricGrades'] + as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + RubricGrade.fromJson( + value as core.Map), + ), + ), id: json_['id'] as core.String?, late: json_['late'] as core.bool?, multipleChoiceSubmission: @@ -8679,6 +9369,8 @@ class StudentSubmission { core.Map toJson() => { if (alternateLink != null) 'alternateLink': alternateLink!, if (assignedGrade != null) 'assignedGrade': assignedGrade!, + if (assignedRubricGrades != null) + 'assignedRubricGrades': assignedRubricGrades!, if (assignmentSubmission != null) 'assignmentSubmission': assignmentSubmission!, if (associatedWithDeveloper != null) @@ -8688,6 +9380,7 @@ class StudentSubmission { if (courseWorkType != null) 'courseWorkType': courseWorkType!, if (creationTime != null) 'creationTime': creationTime!, if (draftGrade != null) 'draftGrade': draftGrade!, + if (draftRubricGrades != null) 'draftRubricGrades': draftRubricGrades!, if (id != null) 'id': id!, if (late != null) 'late': late!, if (multipleChoiceSubmission != null) diff --git a/generated/googleapis/lib/cloudasset/v1.dart b/generated/googleapis/lib/cloudasset/v1.dart index b52afd42c..e6d75d763 100644 --- a/generated/googleapis/lib/cloudasset/v1.dart +++ b/generated/googleapis/lib/cloudasset/v1.dart @@ -1947,7 +1947,8 @@ class AnalyzeIamPolicyResponse { IamPolicyAnalysis? mainAnalysis; /// The service account impersonation analysis if - /// AnalyzeIamPolicyRequest.analyze_service_account_impersonation is enabled. + /// IamPolicyAnalysisQuery.Options.analyze_service_account_impersonation is + /// enabled. core.List? serviceAccountImpersonationAnalysis; AnalyzeIamPolicyResponse({ @@ -2928,7 +2929,7 @@ class EffectiveIamPolicy { class EffectiveTagDetails { /// The /// [full resource name](https://cloud.google.com/asset-inventory/docs/resource-name-format) - /// of the ancestor from which an effective_tag is inherited, according to + /// of the ancestor from which effective_tags are inherited, according to /// [tag inheritance](https://cloud.google.com/resource-manager/docs/tags/tags-overview#inheritance). core.String? attachedResource; @@ -3437,8 +3438,8 @@ class GoogleCloudAssetV1AnalyzeOrgPolicyGovernedAssetsResponseGovernedAsset { governedResource; /// The ordered list of all organization policies from the - /// AnalyzeOrgPoliciesResponse.OrgPolicyResult.consolidated_policy.attached_resource - /// to the scope specified in the request. + /// consolidated_policy.attached_resource to the scope specified in the + /// request. /// /// If the constraint is defined with default policy, it will also appear in /// the list. @@ -3964,7 +3965,7 @@ class GoogleCloudAssetV1GovernedContainer { core.String? parent; /// The ordered list of all organization policies from the - /// AnalyzeOrgPoliciesResponse.OrgPolicyResult.consolidated_policy.attached_resource. + /// consolidated_policy.attached_resource. /// /// to the scope specified in the request. If the constraint is defined with /// default policy, it will also appear in the list. @@ -4884,9 +4885,19 @@ class GoogleIdentityAccesscontextmanagerV1EgressPolicy { /// cause this EgressPolicy to apply. GoogleIdentityAccesscontextmanagerV1EgressTo? egressTo; + /// Human-readable title for the egress rule. + /// + /// The title must be unique within the perimeter and can not exceed 100 + /// characters. Within the access policy, the combined length of all rule + /// titles must not exceed 240,000 characters. + /// + /// Optional. + core.String? title; + GoogleIdentityAccesscontextmanagerV1EgressPolicy({ this.egressFrom, this.egressTo, + this.title, }); GoogleIdentityAccesscontextmanagerV1EgressPolicy.fromJson(core.Map json_) @@ -4899,11 +4910,13 @@ class GoogleIdentityAccesscontextmanagerV1EgressPolicy { ? GoogleIdentityAccesscontextmanagerV1EgressTo.fromJson( json_['egressTo'] as core.Map) : null, + title: json_['title'] as core.String?, ); core.Map toJson() => { if (egressFrom != null) 'egressFrom': egressFrom!, if (egressTo != null) 'egressTo': egressTo!, + if (title != null) 'title': title!, }; } @@ -5053,9 +5066,19 @@ class GoogleIdentityAccesscontextmanagerV1IngressPolicy { /// cause this IngressPolicy to apply. GoogleIdentityAccesscontextmanagerV1IngressTo? ingressTo; + /// Human-readable title for the ingress rule. + /// + /// The title must be unique within the perimeter and can not exceed 100 + /// characters. Within the access policy, the combined length of all rule + /// titles must not exceed 240,000 characters. + /// + /// Optional. + core.String? title; + GoogleIdentityAccesscontextmanagerV1IngressPolicy({ this.ingressFrom, this.ingressTo, + this.title, }); GoogleIdentityAccesscontextmanagerV1IngressPolicy.fromJson(core.Map json_) @@ -5068,11 +5091,13 @@ class GoogleIdentityAccesscontextmanagerV1IngressPolicy { ? GoogleIdentityAccesscontextmanagerV1IngressTo.fromJson( json_['ingressTo'] as core.Map) : null, + title: json_['title'] as core.String?, ); core.Map toJson() => { if (ingressFrom != null) 'ingressFrom': ingressFrom!, if (ingressTo != null) 'ingressTo': ingressTo!, + if (title != null) 'title': title!, }; } @@ -5144,6 +5169,14 @@ class GoogleIdentityAccesscontextmanagerV1ServicePerimeter { /// Does not affect behavior. core.String? description; + /// An opaque identifier for the current version of the `ServicePerimeter`. + /// + /// This identifier does not follow any specific format. If an etag is not + /// provided, the operation will be performed as if a valid etag is provided. + /// + /// Optional. + core.String? etag; + /// Identifier. /// /// Resource name for the `ServicePerimeter`. Format: @@ -5200,6 +5233,7 @@ class GoogleIdentityAccesscontextmanagerV1ServicePerimeter { GoogleIdentityAccesscontextmanagerV1ServicePerimeter({ this.description, + this.etag, this.name, this.perimeterType, this.spec, @@ -5211,6 +5245,7 @@ class GoogleIdentityAccesscontextmanagerV1ServicePerimeter { GoogleIdentityAccesscontextmanagerV1ServicePerimeter.fromJson(core.Map json_) : this( description: json_['description'] as core.String?, + etag: json_['etag'] as core.String?, name: json_['name'] as core.String?, perimeterType: json_['perimeterType'] as core.String?, spec: json_.containsKey('spec') @@ -5229,6 +5264,7 @@ class GoogleIdentityAccesscontextmanagerV1ServicePerimeter { core.Map toJson() => { if (description != null) 'description': description!, + if (etag != null) 'etag': etag!, if (name != null) 'name': name!, if (perimeterType != null) 'perimeterType': perimeterType!, if (spec != null) 'spec': spec!, @@ -6360,8 +6396,7 @@ class OrgPolicyResult { /// The consolidated organization policy for the analyzed resource. /// /// The consolidated organization policy is computed by merging and evaluating - /// AnalyzeOrgPoliciesResponse.policy_bundle. The evaluation will respect the - /// organization policy + /// policy_bundle. The evaluation will respect the organization policy /// [hierarchy rules](https://cloud.google.com/resource-manager/docs/organization-policy/understanding-hierarchy). AnalyzerOrgPolicy? consolidatedPolicy; @@ -6380,7 +6415,7 @@ class OrgPolicyResult { core.String? organization; /// The ordered list of all organization policies from the - /// AnalyzeOrgPoliciesResponse.OrgPolicyResult.consolidated_policy.attached_resource. + /// consolidated_policy.attached_resource. /// /// to the scope specified in the request. If the constraint is defined with /// default policy, it will also appear in the list. diff --git a/generated/googleapis/lib/cloudbuild/v1.dart b/generated/googleapis/lib/cloudbuild/v1.dart index c501e182c..e8bd1f2f3 100644 --- a/generated/googleapis/lib/cloudbuild/v1.dart +++ b/generated/googleapis/lib/cloudbuild/v1.dart @@ -194,8 +194,8 @@ class OperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -2297,8 +2297,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -6991,11 +6991,17 @@ class PrivatePoolV1Config { /// Network configuration for the pool. NetworkConfig? networkConfig; + /// Private Service Connect(PSC) Network configuration for the pool. + /// + /// Immutable. + PrivateServiceConnect? privateServiceConnect; + /// Machine configuration for the workers in the pool. WorkerConfig? workerConfig; PrivatePoolV1Config({ this.networkConfig, + this.privateServiceConnect, this.workerConfig, }); @@ -7005,6 +7011,10 @@ class PrivatePoolV1Config { ? NetworkConfig.fromJson( json_['networkConfig'] as core.Map) : null, + privateServiceConnect: json_.containsKey('privateServiceConnect') + ? PrivateServiceConnect.fromJson(json_['privateServiceConnect'] + as core.Map) + : null, workerConfig: json_.containsKey('workerConfig') ? WorkerConfig.fromJson( json_['workerConfig'] as core.Map) @@ -7013,10 +7023,69 @@ class PrivatePoolV1Config { core.Map toJson() => { if (networkConfig != null) 'networkConfig': networkConfig!, + if (privateServiceConnect != null) + 'privateServiceConnect': privateServiceConnect!, if (workerConfig != null) 'workerConfig': workerConfig!, }; } +/// Defines the Private Service Connect network configuration for the pool. +class PrivateServiceConnect { + /// The network attachment that the worker network interface is peered to. + /// + /// Must be in the format + /// `projects/{project}/regions/{region}/networkAttachments/{networkAttachment}`. + /// The region of network attachment must be the same as the worker pool. See + /// [Network Attachments](https://cloud.google.com/vpc/docs/about-network-attachments) + /// + /// Required. Immutable. + core.String? networkAttachment; + + /// Disable public IP on the primary network interface. + /// + /// If true, workers are created without any public address, which prevents + /// network egress to public IPs unless a network proxy is configured. If + /// false, workers are created with a public address which allows for public + /// internet egress. The public address only applies to traffic through the + /// primary network interface. If `route_all_traffic` is set to true, all + /// traffic will go through the non-primary network interface, this boolean + /// has no effect. + /// + /// Required. Immutable. + core.bool? publicIpAddressDisabled; + + /// Route all traffic through PSC interface. + /// + /// Enable this if you want full control of traffic in the private pool. + /// Configure Cloud NAT for the subnet of network attachment if you need to + /// access public Internet. If false, Only route private IPs, e.g. 10.0.0.0/8, + /// 172.16.0.0/12, and 192.168.0.0/16 through PSC interface. + /// + /// Immutable. + core.bool? routeAllTraffic; + + PrivateServiceConnect({ + this.networkAttachment, + this.publicIpAddressDisabled, + this.routeAllTraffic, + }); + + PrivateServiceConnect.fromJson(core.Map json_) + : this( + networkAttachment: json_['networkAttachment'] as core.String?, + publicIpAddressDisabled: + json_['publicIpAddressDisabled'] as core.bool?, + routeAllTraffic: json_['routeAllTraffic'] as core.bool?, + ); + + core.Map toJson() => { + if (networkAttachment != null) 'networkAttachment': networkAttachment!, + if (publicIpAddressDisabled != null) + 'publicIpAddressDisabled': publicIpAddressDisabled!, + if (routeAllTraffic != null) 'routeAllTraffic': routeAllTraffic!, + }; +} + /// PubsubConfig describes the configuration of a trigger that creates a build /// whenever a Pub/Sub message is published. class PubsubConfig { diff --git a/generated/googleapis/lib/cloudbuild/v2.dart b/generated/googleapis/lib/cloudbuild/v2.dart index 09c73dbd0..7d4535528 100644 --- a/generated/googleapis/lib/cloudbuild/v2.dart +++ b/generated/googleapis/lib/cloudbuild/v2.dart @@ -1063,8 +1063,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// diff --git a/generated/googleapis/lib/cloudchannel/v1.dart b/generated/googleapis/lib/cloudchannel/v1.dart index 9e4ea8a69..5a58ad6f2 100644 --- a/generated/googleapis/lib/cloudchannel/v1.dart +++ b/generated/googleapis/lib/cloudchannel/v1.dart @@ -34,6 +34,7 @@ /// - [AccountsReportsResource] /// - [AccountsSkuGroupsResource] /// - [AccountsSkuGroupsBillableSkusResource] +/// - [IntegratorsResource] /// - [OperationsResource] /// - [ProductsResource] /// - [ProductsSkusResource] @@ -66,6 +67,7 @@ class CloudchannelApi { final commons.ApiRequester _requester; AccountsResource get accounts => AccountsResource(_requester); + IntegratorsResource get integrators => IntegratorsResource(_requester); OperationsResource get operations => OperationsResource(_requester); ProductsResource get products => ProductsResource(_requester); @@ -147,8 +149,8 @@ class AccountsResource { response_ as core.Map); } - /// Lists service accounts with subscriber privileges on the Cloud Pub/Sub - /// topic created for this Channel Services account. + /// Lists service accounts with subscriber privileges on the Pub/Sub topic + /// created for this Channel Services account or integrator. /// /// Possible error codes: * PERMISSION_DENIED: The reseller account making the /// request and the provided reseller account are different, or the @@ -161,9 +163,13 @@ class AccountsResource { /// /// Request parameters: /// - /// [account] - Optional. Resource name of the account. + /// [account] - Optional. Resource name of the account. Required if integrator + /// is not provided. Otherwise, leave this field empty/unset. /// Value must have pattern `^accounts/\[^/\]+$`. /// + /// [integrator] - Optional. Resource name of the integrator. Required if + /// account is not provided. Otherwise, leave this field empty/unset. + /// /// [pageSize] - Optional. The maximum number of service accounts to return. /// The service may return fewer than this value. If unspecified, returns at /// most 100 service accounts. The maximum value is 1000; the server will @@ -186,11 +192,13 @@ class AccountsResource { /// this method will complete with the same error. async.Future listSubscribers( core.String account, { + core.String? integrator, core.int? pageSize, core.String? pageToken, core.String? $fields, }) async { final queryParams_ = >{ + if (integrator != null) 'integrator': [integrator], if (pageSize != null) 'pageSize': ['${pageSize}'], if (pageToken != null) 'pageToken': [pageToken], if ($fields != null) 'fields': [$fields], @@ -317,8 +325,8 @@ class AccountsResource { response_ as core.Map); } - /// Registers a service account with subscriber privileges on the Cloud - /// Pub/Sub topic for this Channel Services account. + /// Registers a service account with subscriber privileges on the Pub/Sub + /// topic for this Channel Services account or integrator. /// /// After you create a subscriber, you get the events through SubscriberEvent /// Possible error codes: * PERMISSION_DENIED: The reseller account making the @@ -334,7 +342,8 @@ class AccountsResource { /// /// Request parameters: /// - /// [account] - Optional. Resource name of the account. + /// [account] - Optional. Resource name of the account. Required if integrator + /// is not provided. Otherwise, leave this field empty/unset. /// Value must have pattern `^accounts/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial @@ -369,8 +378,8 @@ class AccountsResource { response_ as core.Map); } - /// Unregisters a service account with subscriber privileges on the Cloud - /// Pub/Sub topic created for this Channel Services account. + /// Unregisters a service account with subscriber privileges on the Pub/Sub + /// topic created for this Channel Services account or integrator. /// /// If there are no service accounts left with subscriber privileges, this /// deletes the topic. You can call ListSubscribers to check for these @@ -389,7 +398,8 @@ class AccountsResource { /// /// Request parameters: /// - /// [account] - Optional. Resource name of the account. + /// [account] - Optional. Resource name of the account. Required if integrator + /// is not provided. Otherwise, leave this field empty/unset. /// Value must have pattern `^accounts/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial @@ -3358,8 +3368,8 @@ class AccountsSkuGroupsResource { /// will be coerced to 1000. /// /// [pageToken] - Optional. A token identifying a page of results beyond the - /// first page. Obtained through ListSkuGroups.next_page_token of the previous - /// CloudChannelService.ListSkuGroups call. + /// first page. Obtained through ListSkuGroupsResponse.next_page_token of the + /// previous CloudChannelService.ListSkuGroups call. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -3424,8 +3434,9 @@ class AccountsSkuGroupsBillableSkusResource { /// coerced to 100000. /// /// [pageToken] - Optional. A token identifying a page of results beyond the - /// first page. Obtained through ListSkuGroupBillableSkus.next_page_token of - /// the previous CloudChannelService.ListSkuGroupBillableSkus call. + /// first page. Obtained through + /// ListSkuGroupBillableSkusResponse.next_page_token of the previous + /// CloudChannelService.ListSkuGroupBillableSkus call. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -3461,6 +3472,192 @@ class AccountsSkuGroupsBillableSkusResource { } } +class IntegratorsResource { + final commons.ApiRequester _requester; + + IntegratorsResource(commons.ApiRequester client) : _requester = client; + + /// Lists service accounts with subscriber privileges on the Pub/Sub topic + /// created for this Channel Services account or integrator. + /// + /// Possible error codes: * PERMISSION_DENIED: The reseller account making the + /// request and the provided reseller account are different, or the + /// impersonated user is not a super admin. * INVALID_ARGUMENT: Required + /// request parameters are missing or invalid. * NOT_FOUND: The topic resource + /// doesn't exist. * INTERNAL: Any non-user error related to a technical issue + /// in the backend. Contact Cloud Channel support. * UNKNOWN: Any non-user + /// error related to a technical issue in the backend. Contact Cloud Channel + /// support. Return value: A list of service email addresses. + /// + /// Request parameters: + /// + /// [integrator] - Optional. Resource name of the integrator. Required if + /// account is not provided. Otherwise, leave this field empty/unset. + /// Value must have pattern `^integrators/\[^/\]+$`. + /// + /// [account] - Optional. Resource name of the account. Required if integrator + /// is not provided. Otherwise, leave this field empty/unset. + /// + /// [pageSize] - Optional. The maximum number of service accounts to return. + /// The service may return fewer than this value. If unspecified, returns at + /// most 100 service accounts. The maximum value is 1000; the server will + /// coerce values above 1000. + /// + /// [pageToken] - Optional. A page token, received from a previous + /// `ListSubscribers` call. Provide this to retrieve the subsequent page. When + /// paginating, all other parameters provided to `ListSubscribers` must match + /// the call that provided the page token. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudChannelV1ListSubscribersResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future listSubscribers( + core.String integrator, { + core.String? account, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (account != null) 'account': [account], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = + 'v1/' + core.Uri.encodeFull('$integrator') + ':listSubscribers'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleCloudChannelV1ListSubscribersResponse.fromJson( + response_ as core.Map); + } + + /// Registers a service account with subscriber privileges on the Pub/Sub + /// topic for this Channel Services account or integrator. + /// + /// After you create a subscriber, you get the events through SubscriberEvent + /// Possible error codes: * PERMISSION_DENIED: The reseller account making the + /// request and the provided reseller account are different, or the + /// impersonated user is not a super admin. * INVALID_ARGUMENT: Required + /// request parameters are missing or invalid. * INTERNAL: Any non-user error + /// related to a technical issue in the backend. Contact Cloud Channel + /// support. * UNKNOWN: Any non-user error related to a technical issue in the + /// backend. Contact Cloud Channel support. Return value: The topic name with + /// the registered service email address. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [integrator] - Optional. Resource name of the integrator. Required if + /// account is not provided. Otherwise, leave this field empty/unset. + /// Value must have pattern `^integrators/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudChannelV1RegisterSubscriberResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future + registerSubscriber( + GoogleCloudChannelV1RegisterSubscriberRequest request, + core.String integrator, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = + 'v1/' + core.Uri.encodeFull('$integrator') + ':registerSubscriber'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleCloudChannelV1RegisterSubscriberResponse.fromJson( + response_ as core.Map); + } + + /// Unregisters a service account with subscriber privileges on the Pub/Sub + /// topic created for this Channel Services account or integrator. + /// + /// If there are no service accounts left with subscriber privileges, this + /// deletes the topic. You can call ListSubscribers to check for these + /// accounts. Possible error codes: * PERMISSION_DENIED: The reseller account + /// making the request and the provided reseller account are different, or the + /// impersonated user is not a super admin. * INVALID_ARGUMENT: Required + /// request parameters are missing or invalid. * NOT_FOUND: The topic resource + /// doesn't exist. * INTERNAL: Any non-user error related to a technical issue + /// in the backend. Contact Cloud Channel support. * UNKNOWN: Any non-user + /// error related to a technical issue in the backend. Contact Cloud Channel + /// support. Return value: The topic name that unregistered the service email + /// address. Returns a success response if the service email address wasn't + /// registered with the topic. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [integrator] - Optional. Resource name of the integrator. Required if + /// account is not provided. Otherwise, leave this field empty/unset. + /// Value must have pattern `^integrators/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudChannelV1UnregisterSubscriberResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future + unregisterSubscriber( + GoogleCloudChannelV1UnregisterSubscriberRequest request, + core.String integrator, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = + 'v1/' + core.Uri.encodeFull('$integrator') + ':unregisterSubscriber'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleCloudChannelV1UnregisterSubscriberResponse.fromJson( + response_ as core.Map); + } +} + class OperationsResource { final commons.ApiRequester _requester; @@ -3474,8 +3671,8 @@ class OperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -3980,6 +4177,16 @@ class GoogleCloudChannelV1ChangeOfferRequest { /// Optional. core.List? parameters; + /// Price reference ID for the offer. + /// + /// Optional field only for offers that require additional price information. + /// Used to guarantee that the pricing is consistent between quoting the offer + /// and placing the order. Not yet implemented: if populated in a request, + /// this field isn't evaluated in the API. + /// + /// Optional. + core.String? priceReferenceId; + /// Purchase order id provided by the reseller. /// /// Optional. @@ -4002,6 +4209,7 @@ class GoogleCloudChannelV1ChangeOfferRequest { this.billingAccount, this.offer, this.parameters, + this.priceReferenceId, this.purchaseOrderId, this.requestId, }); @@ -4014,6 +4222,7 @@ class GoogleCloudChannelV1ChangeOfferRequest { ?.map((value) => GoogleCloudChannelV1Parameter.fromJson( value as core.Map)) .toList(), + priceReferenceId: json_['priceReferenceId'] as core.String?, purchaseOrderId: json_['purchaseOrderId'] as core.String?, requestId: json_['requestId'] as core.String?, ); @@ -4022,12 +4231,13 @@ class GoogleCloudChannelV1ChangeOfferRequest { if (billingAccount != null) 'billingAccount': billingAccount!, if (offer != null) 'offer': offer!, if (parameters != null) 'parameters': parameters!, + if (priceReferenceId != null) 'priceReferenceId': priceReferenceId!, if (purchaseOrderId != null) 'purchaseOrderId': purchaseOrderId!, if (requestId != null) 'requestId': requestId!, }; } -/// Request message for CloudChannelService.ChangeParametersRequest. +/// Request message for CloudChannelService.ChangeParameters. class GoogleCloudChannelV1ChangeParametersRequest { /// Entitlement parameters to update. /// @@ -4354,7 +4564,8 @@ class GoogleCloudChannelV1CloudIdentityCustomerAccount { core.bool? existing; /// Returns true if the Cloud Identity account is associated with a customer - /// of the Channel Services partner. + /// of the Channel Services partner (with active subscriptions or purchase + /// consents). core.bool? owned; GoogleCloudChannelV1CloudIdentityCustomerAccount({ @@ -5130,6 +5341,16 @@ class GoogleCloudChannelV1Entitlement { /// the billing subaccount. core.List? parameters; + /// Price reference ID for the offer. + /// + /// Optional field only for offers that require additional price information. + /// Used to guarantee that the pricing is consistent between quoting the offer + /// and placing the order. Not yet implemented: if this field is populated in + /// a request, it isn't evaluated in the API. + /// + /// Optional. + core.String? priceReferenceId; + /// Service provisioning details for the entitlement. /// /// Output only. @@ -5177,6 +5398,7 @@ class GoogleCloudChannelV1Entitlement { this.name, this.offer, this.parameters, + this.priceReferenceId, this.provisionedService, this.provisioningState, this.purchaseOrderId, @@ -5205,6 +5427,7 @@ class GoogleCloudChannelV1Entitlement { ?.map((value) => GoogleCloudChannelV1Parameter.fromJson( value as core.Map)) .toList(), + priceReferenceId: json_['priceReferenceId'] as core.String?, provisionedService: json_.containsKey('provisionedService') ? GoogleCloudChannelV1ProvisionedService.fromJson( json_['provisionedService'] @@ -5231,6 +5454,7 @@ class GoogleCloudChannelV1Entitlement { if (name != null) 'name': name!, if (offer != null) 'offer': offer!, if (parameters != null) 'parameters': parameters!, + if (priceReferenceId != null) 'priceReferenceId': priceReferenceId!, if (provisionedService != null) 'provisionedService': provisionedService!, if (provisioningState != null) 'provisioningState': provisioningState!, @@ -5919,7 +6143,7 @@ class GoogleCloudChannelV1ListSkuGroupBillableSkusResponse { /// A token to retrieve the next page of results. /// - /// Pass to ListSkuGroupBillableSkus.page_token to obtain that page. + /// Pass to ListSkuGroupBillableSkusRequest.page_token to obtain that page. core.String? nextPageToken; GoogleCloudChannelV1ListSkuGroupBillableSkusResponse({ @@ -5946,7 +6170,7 @@ class GoogleCloudChannelV1ListSkuGroupBillableSkusResponse { class GoogleCloudChannelV1ListSkuGroupsResponse { /// A token to retrieve the next page of results. /// - /// Pass to ListSkuGroups.page_token to obtain that page. + /// Pass to ListSkuGroupsRequest.page_token to obtain that page. core.String? nextPageToken; /// The list of SKU groups requested. @@ -6969,8 +7193,18 @@ class GoogleCloudChannelV1PurchasableOffer { /// Offer. GoogleCloudChannelV1Offer? offer; + /// Price reference ID for the offer. + /// + /// Optional field only for offers that require additional price information. + /// Used to guarantee that the pricing is consistent between quoting the offer + /// and placing the order. + /// + /// Optional. + core.String? priceReferenceId; + GoogleCloudChannelV1PurchasableOffer({ this.offer, + this.priceReferenceId, }); GoogleCloudChannelV1PurchasableOffer.fromJson(core.Map json_) @@ -6979,10 +7213,12 @@ class GoogleCloudChannelV1PurchasableOffer { ? GoogleCloudChannelV1Offer.fromJson( json_['offer'] as core.Map) : null, + priceReferenceId: json_['priceReferenceId'] as core.String?, ); core.Map toJson() => { if (offer != null) 'offer': offer!, + if (priceReferenceId != null) 'priceReferenceId': priceReferenceId!, }; } @@ -7039,21 +7275,43 @@ class GoogleCloudChannelV1QueryEligibleBillingAccountsResponse { /// Request Message for RegisterSubscriber. class GoogleCloudChannelV1RegisterSubscriberRequest { + /// Resource name of the account. + /// + /// Required if integrator is not provided. Otherwise, leave this field + /// empty/unset. + /// + /// Optional. + core.String? account; + + /// Resource name of the integrator. + /// + /// Required if account is not provided. Otherwise, leave this field + /// empty/unset. + /// + /// Optional. + core.String? integrator; + /// Service account that provides subscriber access to the registered topic. /// /// Required. core.String? serviceAccount; GoogleCloudChannelV1RegisterSubscriberRequest({ + this.account, + this.integrator, this.serviceAccount, }); GoogleCloudChannelV1RegisterSubscriberRequest.fromJson(core.Map json_) : this( + account: json_['account'] as core.String?, + integrator: json_['integrator'] as core.String?, serviceAccount: json_['serviceAccount'] as core.String?, ); core.Map toJson() => { + if (account != null) 'account': account!, + if (integrator != null) 'integrator': integrator!, if (serviceAccount != null) 'serviceAccount': serviceAccount!, }; } @@ -7843,8 +8101,18 @@ class GoogleCloudChannelV1TransferableOffer { /// Offer with parameter constraints updated to allow the Transfer. GoogleCloudChannelV1Offer? offer; + /// Price reference ID for the offer. + /// + /// Optional field only for offers that require additional price information. + /// Used to guarantee that the pricing is consistent between quoting the offer + /// and placing the order. + /// + /// Optional. + core.String? priceReferenceId; + GoogleCloudChannelV1TransferableOffer({ this.offer, + this.priceReferenceId, }); GoogleCloudChannelV1TransferableOffer.fromJson(core.Map json_) @@ -7853,10 +8121,12 @@ class GoogleCloudChannelV1TransferableOffer { ? GoogleCloudChannelV1Offer.fromJson( json_['offer'] as core.Map) : null, + priceReferenceId: json_['priceReferenceId'] as core.String?, ); core.Map toJson() => { if (offer != null) 'offer': offer!, + if (priceReferenceId != null) 'priceReferenceId': priceReferenceId!, }; } @@ -7940,21 +8210,43 @@ class GoogleCloudChannelV1TrialSettings { /// Request Message for UnregisterSubscriber. class GoogleCloudChannelV1UnregisterSubscriberRequest { + /// Resource name of the account. + /// + /// Required if integrator is not provided. Otherwise, leave this field + /// empty/unset. + /// + /// Optional. + core.String? account; + + /// Resource name of the integrator. + /// + /// Required if account is not provided. Otherwise, leave this field + /// empty/unset. + /// + /// Optional. + core.String? integrator; + /// Service account to unregister from subscriber access to the topic. /// /// Required. core.String? serviceAccount; GoogleCloudChannelV1UnregisterSubscriberRequest({ + this.account, + this.integrator, this.serviceAccount, }); GoogleCloudChannelV1UnregisterSubscriberRequest.fromJson(core.Map json_) : this( + account: json_['account'] as core.String?, + integrator: json_['integrator'] as core.String?, serviceAccount: json_['serviceAccount'] as core.String?, ); core.Map toJson() => { + if (account != null) 'account': account!, + if (integrator != null) 'integrator': integrator!, if (serviceAccount != null) 'serviceAccount': serviceAccount!, }; } @@ -8323,9 +8615,10 @@ class GoogleTypeDateTime { /// A representation of a decimal value, such as 2.5. /// /// Clients may convert values into language-native decimal formats, such as -/// Java's BigDecimal or Python's decimal.Decimal. \[BigDecimal\]: -/// https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/math/BigDecimal.html -/// \[decimal.Decimal\]: https://docs.python.org/3/library/decimal.html +/// Java's +/// [BigDecimal](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/math/BigDecimal.html) +/// or Python's +/// [decimal.Decimal](https://docs.python.org/3/library/decimal.html). class GoogleTypeDecimal { /// The decimal value, as a string. /// @@ -8385,19 +8678,19 @@ class GoogleTypeDecimal { /// Represents an amount of money with its currency type. typedef GoogleTypeMoney = $Money; -/// Represents a postal address, e.g. for postal delivery or payments addresses. +/// Represents a postal address. /// -/// Given a postal address, a postal service can deliver items to a premise, -/// P.O. Box or similar. It is not intended to model geographical locations -/// (roads, towns, mountains). In typical usage an address would be created via -/// user input or from importing existing data, depending on the type of -/// process. Advice on address input / editing: - Use an -/// internationalization-ready address widget such as -/// https://github.com/google/libaddressinput) - Users should not be presented -/// with UI elements for input or editing of fields outside countries where that -/// field is used. For more guidance on how to use this schema, please see: +/// For example for postal delivery or payments addresses. Given a postal +/// address, a postal service can deliver items to a premise, P.O. Box or +/// similar. It is not intended to model geographical locations (roads, towns, +/// mountains). In typical usage an address would be created by user input or +/// from importing existing data, depending on the type of process. Advice on +/// address input / editing: - Use an internationalization-ready address widget +/// such as https://github.com/google/libaddressinput) - Users should not be +/// presented with UI elements for input or editing of fields outside countries +/// where that field is used. For more guidance on how to use this schema, see: /// https://support.google.com/business/answer/6397478 -typedef GoogleTypePostalAddress = $PostalAddress; +typedef GoogleTypePostalAddress = $PostalAddress00; /// Represents a time zone from the /// [IANA Time Zone Database](https://www.iana.org/time-zones). diff --git a/generated/googleapis/lib/cloudcontrolspartner/v1.dart b/generated/googleapis/lib/cloudcontrolspartner/v1.dart index e02051d5d..f2d218738 100644 --- a/generated/googleapis/lib/cloudcontrolspartner/v1.dart +++ b/generated/googleapis/lib/cloudcontrolspartner/v1.dart @@ -32,6 +32,7 @@ library; import 'dart:async' as async; +import 'dart:convert' as convert; import 'dart:core' as core; import 'package:_discoveryapis_commons/_discoveryapis_commons.dart' as commons; @@ -127,6 +128,90 @@ class OrganizationsLocationsCustomersResource { OrganizationsLocationsCustomersResource(commons.ApiRequester client) : _requester = client; + /// Creates a new customer. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. Parent resource Format: + /// `organizations/{organization}/locations/{location}` + /// Value must have pattern `^organizations/\[^/\]+/locations/\[^/\]+$`. + /// + /// [customerId] - Required. The customer id to use for the customer, which + /// will become the final component of the customer's resource name. The + /// specified value must be a valid Google cloud organization id. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Customer]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future create( + Customer request, + core.String parent, { + core.String? customerId, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (customerId != null) 'customerId': [customerId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/customers'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Customer.fromJson(response_ as core.Map); + } + + /// Delete details of a single customer + /// + /// Request parameters: + /// + /// [name] - Required. name of the resource to be deleted format: + /// name=organizations / * /locations / * /customers / * + /// Value must have pattern + /// `^organizations/\[^/\]+/locations/\[^/\]+/customers/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Empty]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return Empty.fromJson(response_ as core.Map); + } + /// Gets details of a single customer /// /// Request parameters: @@ -220,6 +305,52 @@ class OrganizationsLocationsCustomersResource { return ListCustomersResponse.fromJson( response_ as core.Map); } + + /// Update details of a single customer + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Identifier. Format: + /// `organizations/{organization}/locations/{location}/customers/{customer}` + /// Value must have pattern + /// `^organizations/\[^/\]+/locations/\[^/\]+/customers/\[^/\]+$`. + /// + /// [updateMask] - Optional. The list of fields to update + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Customer]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future patch( + Customer request, + core.String name, { + core.String? updateMask, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (updateMask != null) 'updateMask': [updateMask], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'PATCH', + body: body_, + queryParams: queryParams_, + ); + return Customer.fromJson(response_ as core.Map); + } } class OrganizationsLocationsCustomersWorkloadsResource { @@ -752,11 +883,20 @@ class Customer { /// `organizations/{organization}/locations/{location}/customers/{customer}` core.String? name; + /// The customer organization domain, extracted from CRM Organization’s + /// display_name field. + /// + /// e.g. "google.com" + /// + /// Output only. + core.String? organizationDomain; + Customer({ this.customerOnboardingState, this.displayName, this.isOnboarded, this.name, + this.organizationDomain, }); Customer.fromJson(core.Map json_) @@ -769,6 +909,7 @@ class Customer { displayName: json_['displayName'] as core.String?, isOnboarded: json_['isOnboarded'] as core.bool?, name: json_['name'] as core.String?, + organizationDomain: json_['organizationDomain'] as core.String?, ); core.Map toJson() => { @@ -777,6 +918,8 @@ class Customer { if (displayName != null) 'displayName': displayName!, if (isOnboarded != null) 'isOnboarded': isOnboarded!, if (name != null) 'name': name!, + if (organizationDomain != null) + 'organizationDomain': organizationDomain!, }; } @@ -957,6 +1100,14 @@ class EkmMetadata { }; } +/// A generic empty message that you can re-use to avoid defining duplicated +/// empty messages in your APIs. +/// +/// A typical example is to use it as the request or the response type of an API +/// method. For instance: service Foo { rpc Bar(google.protobuf.Empty) returns +/// (google.protobuf.Empty); } +typedef Empty = $Empty; + /// Remediation instructions to resolve violation via gcloud cli typedef Gcloud = $Gcloud; diff --git a/generated/googleapis/lib/clouddeploy/v1.dart b/generated/googleapis/lib/clouddeploy/v1.dart index 95c4781e1..1edd752e0 100644 --- a/generated/googleapis/lib/clouddeploy/v1.dart +++ b/generated/googleapis/lib/clouddeploy/v1.dart @@ -2747,8 +2747,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -3979,10 +3979,17 @@ class AutomationRule { /// Optional. RepairRolloutRule? repairRolloutRule; + /// The `TimedPromoteReleaseRule` will automatically promote a release from + /// the current target(s) to the specified target(s) on a configured schedule. + /// + /// Optional. + TimedPromoteReleaseRule? timedPromoteReleaseRule; + AutomationRule({ this.advanceRolloutRule, this.promoteReleaseRule, this.repairRolloutRule, + this.timedPromoteReleaseRule, }); AutomationRule.fromJson(core.Map json_) @@ -3999,6 +4006,11 @@ class AutomationRule { ? RepairRolloutRule.fromJson(json_['repairRolloutRule'] as core.Map) : null, + timedPromoteReleaseRule: json_.containsKey('timedPromoteReleaseRule') + ? TimedPromoteReleaseRule.fromJson( + json_['timedPromoteReleaseRule'] + as core.Map) + : null, ); core.Map toJson() => { @@ -4007,6 +4019,8 @@ class AutomationRule { if (promoteReleaseRule != null) 'promoteReleaseRule': promoteReleaseRule!, if (repairRolloutRule != null) 'repairRolloutRule': repairRolloutRule!, + if (timedPromoteReleaseRule != null) + 'timedPromoteReleaseRule': timedPromoteReleaseRule!, }; } @@ -4018,8 +4032,15 @@ class AutomationRuleCondition { /// Optional. TargetsPresentCondition? targetsPresentCondition; + /// TimedPromoteReleaseCondition contains rule conditions specific to a an + /// Automation with a timed promote release rule defined. + /// + /// Optional. + TimedPromoteReleaseCondition? timedPromoteReleaseCondition; + AutomationRuleCondition({ this.targetsPresentCondition, + this.timedPromoteReleaseCondition, }); AutomationRuleCondition.fromJson(core.Map json_) @@ -4029,11 +4050,19 @@ class AutomationRuleCondition { json_['targetsPresentCondition'] as core.Map) : null, + timedPromoteReleaseCondition: + json_.containsKey('timedPromoteReleaseCondition') + ? TimedPromoteReleaseCondition.fromJson( + json_['timedPromoteReleaseCondition'] + as core.Map) + : null, ); core.Map toJson() => { if (targetsPresentCondition != null) 'targetsPresentCondition': targetsPresentCondition!, + if (timedPromoteReleaseCondition != null) + 'timedPromoteReleaseCondition': timedPromoteReleaseCondition!, }; } @@ -4132,14 +4161,19 @@ class AutomationRun { /// Output only. core.String? stateDescription; - /// The ID of the target that represents the promotion stage that initiates - /// the `AutomationRun`. + /// The ID of the source target that initiates the `AutomationRun`. /// /// The value of this field is the last segment of a target name. /// /// Output only. core.String? targetId; + /// Promotes a release to a specified 'Target' as defined in a Timed Promote + /// Release rule. + /// + /// Output only. + TimedPromoteReleaseOperation? timedPromoteReleaseOperation; + /// Time at which the automationRun was updated. /// /// Output only. @@ -4168,6 +4202,7 @@ class AutomationRun { this.state, this.stateDescription, this.targetId, + this.timedPromoteReleaseOperation, this.updateTime, this.waitUntilTime, }); @@ -4206,6 +4241,12 @@ class AutomationRun { state: json_['state'] as core.String?, stateDescription: json_['stateDescription'] as core.String?, targetId: json_['targetId'] as core.String?, + timedPromoteReleaseOperation: + json_.containsKey('timedPromoteReleaseOperation') + ? TimedPromoteReleaseOperation.fromJson( + json_['timedPromoteReleaseOperation'] + as core.Map) + : null, updateTime: json_['updateTime'] as core.String?, waitUntilTime: json_['waitUntilTime'] as core.String?, ); @@ -4230,6 +4271,8 @@ class AutomationRun { if (state != null) 'state': state!, if (stateDescription != null) 'stateDescription': stateDescription!, if (targetId != null) 'targetId': targetId!, + if (timedPromoteReleaseOperation != null) + 'timedPromoteReleaseOperation': timedPromoteReleaseOperation!, if (updateTime != null) 'updateTime': updateTime!, if (waitUntilTime != null) 'waitUntilTime': waitUntilTime!, }; @@ -7907,6 +7950,8 @@ class Release { /// Not all versions are valid; Cloud Deploy supports a specific set of /// versions. If unset, the most recent supported Skaffold version will be /// used. + /// + /// Optional. core.String? skaffoldVersion; /// Map from target ID to the target artifacts created during the render @@ -10152,6 +10197,36 @@ class TargetRender { }; } +/// The targets involved in a single timed promotion. +class Targets { + /// The destination target ID. + /// + /// Optional. + core.String? destinationTargetId; + + /// The source target ID. + /// + /// Optional. + core.String? sourceTargetId; + + Targets({ + this.destinationTargetId, + this.sourceTargetId, + }); + + Targets.fromJson(core.Map json_) + : this( + destinationTargetId: json_['destinationTargetId'] as core.String?, + sourceTargetId: json_['sourceTargetId'] as core.String?, + ); + + core.Map toJson() => { + if (destinationTargetId != null) + 'destinationTargetId': destinationTargetId!, + if (sourceTargetId != null) 'sourceTargetId': sourceTargetId!, + }; +} + /// `TargetsPresentCondition` contains information on any Targets referenced in /// the Delivery Pipeline that do not actually exist. class TargetsPresentCondition { @@ -10286,6 +10361,158 @@ class TimeWindows { }; } +/// `TimedPromoteReleaseCondition` contains conditions specific to an Automation +/// with a Timed Promote Release rule defined. +class TimedPromoteReleaseCondition { + /// When the next scheduled promotion(s) will occur. + /// + /// Output only. + core.String? nextPromotionTime; + + /// A list of targets involved in the upcoming timed promotion(s). + /// + /// Output only. + core.List? targetsList; + + TimedPromoteReleaseCondition({ + this.nextPromotionTime, + this.targetsList, + }); + + TimedPromoteReleaseCondition.fromJson(core.Map json_) + : this( + nextPromotionTime: json_['nextPromotionTime'] as core.String?, + targetsList: (json_['targetsList'] as core.List?) + ?.map((value) => Targets.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (nextPromotionTime != null) 'nextPromotionTime': nextPromotionTime!, + if (targetsList != null) 'targetsList': targetsList!, + }; +} + +/// Contains the information of an automated timed promote-release operation. +class TimedPromoteReleaseOperation { + /// The starting phase of the rollout created by this operation. + /// + /// Output only. + core.String? phase; + + /// The name of the release to be promoted. + /// + /// Output only. + core.String? release; + + /// The ID of the target that represents the promotion stage to which the + /// release will be promoted. + /// + /// The value of this field is the last segment of a target name. + /// + /// Output only. + core.String? targetId; + + TimedPromoteReleaseOperation({ + this.phase, + this.release, + this.targetId, + }); + + TimedPromoteReleaseOperation.fromJson(core.Map json_) + : this( + phase: json_['phase'] as core.String?, + release: json_['release'] as core.String?, + targetId: json_['targetId'] as core.String?, + ); + + core.Map toJson() => { + if (phase != null) 'phase': phase!, + if (release != null) 'release': release!, + if (targetId != null) 'targetId': targetId!, + }; +} + +/// The `TimedPromoteReleaseRule` will automatically promote a release from the +/// current target(s) to the specified target(s) on a configured schedule. +class TimedPromoteReleaseRule { + /// Information around the state of the Automation rule. + /// + /// Output only. + AutomationRuleCondition? condition; + + /// The starting phase of the rollout created by this rule. + /// + /// Default to the first phase. + /// + /// Optional. + core.String? destinationPhase; + + /// The ID of the stage in the pipeline to which this `Release` is deploying. + /// + /// If unspecified, default it to the next stage in the promotion flow. The + /// value of this field could be one of the following: * The last segment of a + /// target name * "@next", the next target in the promotion sequence + /// + /// Optional. + core.String? destinationTargetId; + + /// ID of the rule. + /// + /// This ID must be unique in the `Automation` resource to which this rule + /// belongs. The format is `[a-z]([a-z0-9-]{0,61}[a-z0-9])?`. + /// + /// Required. + core.String? id; + + /// Schedule in crontab format. + /// + /// e.g. "0 9 * * 1" for every Monday at 9am. + /// + /// Required. + core.String? schedule; + + /// The time zone in IANA format + /// [IANA Time Zone Database](https://www.iana.org/time-zones) (e.g. + /// America/New_York). + /// + /// Required. + core.String? timeZone; + + TimedPromoteReleaseRule({ + this.condition, + this.destinationPhase, + this.destinationTargetId, + this.id, + this.schedule, + this.timeZone, + }); + + TimedPromoteReleaseRule.fromJson(core.Map json_) + : this( + condition: json_.containsKey('condition') + ? AutomationRuleCondition.fromJson( + json_['condition'] as core.Map) + : null, + destinationPhase: json_['destinationPhase'] as core.String?, + destinationTargetId: json_['destinationTargetId'] as core.String?, + id: json_['id'] as core.String?, + schedule: json_['schedule'] as core.String?, + timeZone: json_['timeZone'] as core.String?, + ); + + core.Map toJson() => { + if (condition != null) 'condition': condition!, + if (destinationPhase != null) 'destinationPhase': destinationPhase!, + if (destinationTargetId != null) + 'destinationTargetId': destinationTargetId!, + if (id != null) 'id': id!, + if (schedule != null) 'schedule': schedule!, + if (timeZone != null) 'timeZone': timeZone!, + }; +} + /// A verify Job. typedef VerifyJob = $Empty; diff --git a/generated/googleapis/lib/cloudfunctions/v2.dart b/generated/googleapis/lib/cloudfunctions/v2.dart index 9c47a6d58..d2d79e288 100644 --- a/generated/googleapis/lib/cloudfunctions/v2.dart +++ b/generated/googleapis/lib/cloudfunctions/v2.dart @@ -1229,8 +1229,7 @@ class BuildConfig { /// managed encryption key. Otherwise, GCF will create and use a repository /// named 'gcf-artifacts' for every deployed region. It must match the pattern /// `projects/{project}/locations/{location}/repositories/{repository}`. - /// Cross-project repositories are not supported. Cross-location repositories - /// are not supported. Repository format must be 'DOCKER'. + /// Repository format must be 'DOCKER'. core.String? dockerRepository; /// The name of the function (as defined in source code) that will be @@ -2883,7 +2882,9 @@ typedef TestIamPermissionsRequest = $TestIamPermissionsRequest00; typedef TestIamPermissionsResponse = $PermissionsResponse; /// Information related to: * A function's eligibility for 1st Gen to 2nd Gen -/// migration * Current state of migration for function undergoing migration. +/// migration and 2nd Gen to CRf detach. +/// +/// * Current state of migration for function undergoing migration/detach. class UpgradeInfo { /// Describes the Build step of the function that builds a container to /// prepare for 2nd gen upgrade. @@ -2921,6 +2922,8 @@ class UpgradeInfo { /// RollbackFunctionUpgradeTraffic API was un-successful. /// - "COMMIT_FUNCTION_UPGRADE_ERROR" : CommitFunctionUpgrade API was /// un-successful. + /// - "DETACH_IN_PROGRESS" : Function is requested to be detached from 2nd Gen + /// to CRf. core.String? upgradeState; UpgradeInfo({ diff --git a/generated/googleapis/lib/cloudidentity/v1.dart b/generated/googleapis/lib/cloudidentity/v1.dart index 424b6e394..12b10f1a0 100644 --- a/generated/googleapis/lib/cloudidentity/v1.dart +++ b/generated/googleapis/lib/cloudidentity/v1.dart @@ -30,6 +30,7 @@ /// - [InboundSamlSsoProfilesResource] /// - [InboundSamlSsoProfilesIdpCredentialsResource] /// - [InboundSsoAssignmentsResource] +/// - [PoliciesResource] library; import 'dart:async' as async; @@ -70,6 +71,24 @@ class CloudIdentityApi { static const cloudIdentityGroupsReadonlyScope = 'https://www.googleapis.com/auth/cloud-identity.groups.readonly'; + /// See and edit all of the Inbound SSO profiles and their assignments to any + /// Org Units or Google Groups in your Cloud Identity Organization. + static const cloudIdentityInboundssoScope = + 'https://www.googleapis.com/auth/cloud-identity.inboundsso'; + + /// See all of the Inbound SSO profiles and their assignments to any Org Units + /// or Google Groups in your Cloud Identity Organization. + static const cloudIdentityInboundssoReadonlyScope = + 'https://www.googleapis.com/auth/cloud-identity.inboundsso.readonly'; + + /// See and edit policies in your Cloud Identity Organization. + static const cloudIdentityPoliciesScope = + 'https://www.googleapis.com/auth/cloud-identity.policies'; + + /// See policies in your Cloud Identity Organization. + static const cloudIdentityPoliciesReadonlyScope = + 'https://www.googleapis.com/auth/cloud-identity.policies.readonly'; + /// See, edit, configure, and delete your Google Cloud data and see the email /// address for your Google Account. static const cloudPlatformScope = @@ -84,6 +103,7 @@ class CloudIdentityApi { InboundSamlSsoProfilesResource(_requester); InboundSsoAssignmentsResource get inboundSsoAssignments => InboundSsoAssignmentsResource(_requester); + PoliciesResource get policies => PoliciesResource(_requester); CloudIdentityApi(http.Client client, {core.String rootUrl = 'https://cloudidentity.googleapis.com/', @@ -2230,7 +2250,7 @@ class GroupsMembershipsResource { /// /// [pageSize] - The default page size is 200 (max 1000). /// - /// [pageToken] - The next_page_token value returned from a previous list + /// [pageToken] - The `next_page_token` value returned from a previous list /// request, if any /// /// [query] - Required. A CEL expression that MUST include member @@ -2300,7 +2320,7 @@ class GroupsMembershipsResource { /// /// [pageSize] - The default page size is 200 (max 1000). /// - /// [pageToken] - The next_page_token value returned from a previous list + /// [pageToken] - The `next_page_token` value returned from a previous list /// request, if any. /// /// [query] - Required. A CEL expression that MUST include member @@ -2313,7 +2333,7 @@ class GroupsMembershipsResource { /// operators on the parent of the group restricting the search within a /// particular customer, e.g. `parent == 'customers/{customer_id}'`. The /// `customer_id` must begin with "C" (for example, 'C046psxkn'). This - /// filtering is only supported for Admins with groups read permissons on the + /// filtering is only supported for Admins with groups read permissions on the /// input customer. Example query: `member_key_id == 'member_key_id_value' && /// in labels && parent == 'customers/C046psxkn'` /// @@ -2373,7 +2393,7 @@ class GroupsMembershipsResource { /// /// [pageSize] - The default page size is 200 (max 1000). /// - /// [pageToken] - The next_page_token value returned from a previous list + /// [pageToken] - The `next_page_token` value returned from a previous list /// request, if any. /// /// [$fields] - Selector specifying which fields to include in a partial @@ -3065,6 +3085,109 @@ class InboundSsoAssignmentsResource { } } +class PoliciesResource { + final commons.ApiRequester _requester; + + PoliciesResource(commons.ApiRequester client) : _requester = client; + + /// Get a Policy + /// + /// Request parameters: + /// + /// [name] - Required. The name of the policy to retrieve. Format: + /// "policies/{policy}". + /// Value must have pattern `^policies/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Policy]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return Policy.fromJson(response_ as core.Map); + } + + /// List Policies + /// + /// Request parameters: + /// + /// [filter] - Optional. A CEL expression for filtering the results. Policies + /// can be filtered by application with this expression: setting.name = + /// 'settings/gmail.*' Policies can be filtered by setting type with this + /// expression: setting.name = '*.service_status' A maximum of one of the + /// above setting.name clauses can be used. Policies can be filtered by + /// customer with this expression: customer = "customers/{customer}" Where + /// `customer` is the `id` from the \[Admin SDK `Customer` + /// resource\](https://developers.google.com/admin-sdk/directory/reference/rest/v1/customers). + /// You may use `customers/my_customer` to specify your own organization. When + /// no customer is mentioned it will be default to customers/my_customer. A + /// maximum of one customer clause can be used. The above clauses can only be + /// combined together in a single filter expression with the `&&` operator. + /// + /// [pageSize] - Optional. The maximum number of results to return. The + /// service can return fewer than this number. If omitted or set to 0, the + /// default is 50 results per page. The maximum allowed value is 100. + /// `page_size` values greater than 100 default to 100. + /// + /// [pageToken] - Optional. The pagination token received from a prior call to + /// PoliciesService.ListPolicies to retrieve the next page of results. When + /// paginating, all other parameters provided to `ListPoliciesRequest` must + /// match the call that provided the page token. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListPoliciesResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list({ + core.String? filter, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + const url_ = 'v1/policies'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListPoliciesResponse.fromJson( + response_ as core.Map); + } +} + /// The request for creating an IdpCredential with its associated payload. /// /// An InboundSamlSsoProfile can own up to 2 credentials. @@ -4070,6 +4193,10 @@ class GoogleAppsCloudidentityDevicesV1Device { /// the Device in format: `devices/{device}`, where device is the unique id /// assigned to the Device. /// + /// Important: Device API scopes require that you use domain-wide delegation + /// to access the API. For more information, see + /// [Set up the Devices API](https://cloud.google.com/identity/docs/how-to/setup-devices). + /// /// Output only. core.String? name; @@ -5189,6 +5316,36 @@ class ListMembershipsResponse { }; } +/// The response message for PoliciesService.ListPolicies. +class ListPoliciesResponse { + /// The pagination token to retrieve the next page of results. + /// + /// If this field is empty, there are no subsequent pages. + core.String? nextPageToken; + + /// The results + core.List? policies; + + ListPoliciesResponse({ + this.nextPageToken, + this.policies, + }); + + ListPoliciesResponse.fromJson(core.Map json_) + : this( + nextPageToken: json_['nextPageToken'] as core.String?, + policies: (json_['policies'] as core.List?) + ?.map((value) => + Policy.fromJson(value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (policies != null) 'policies': policies!, + }; +} + /// Response message for UserInvitation listing request. class ListUserInvitationsResponse { /// The token for the next page. @@ -5756,6 +5913,148 @@ class Operation { }; } +/// A Policy resource binds an instance of a single Setting with the scope of a +/// PolicyQuery. +/// +/// The Setting instance will be applied to all entities that satisfy the query. +class Policy { + /// Customer that the Policy belongs to. + /// + /// The value is in the format 'customers/{customerId}'. The `customerId` must + /// begin with "C" To find your customer ID in Admin Console see + /// https://support.google.com/a/answer/10070793. + /// + /// Immutable. + core.String? customer; + + /// Identifier. + /// + /// The [resource name](https://cloud.google.com/apis/design/resource_names) + /// of the Policy. Format: policies/{policy}. + /// + /// Output only. + core.String? name; + + /// The PolicyQuery the Setting applies to. + /// + /// Required. + PolicyQuery? policyQuery; + + /// The Setting configured by this Policy. + /// + /// Required. + Setting? setting; + + /// The type of the policy. + /// + /// Output only. + /// Possible string values are: + /// - "POLICY_TYPE_UNSPECIFIED" : Unspecified policy type. + /// - "SYSTEM" : Policy type denoting the system-configured policies. + /// - "ADMIN" : Policy type denoting the admin-configurable policies. + core.String? type; + + Policy({ + this.customer, + this.name, + this.policyQuery, + this.setting, + this.type, + }); + + Policy.fromJson(core.Map json_) + : this( + customer: json_['customer'] as core.String?, + name: json_['name'] as core.String?, + policyQuery: json_.containsKey('policyQuery') + ? PolicyQuery.fromJson( + json_['policyQuery'] as core.Map) + : null, + setting: json_.containsKey('setting') + ? Setting.fromJson( + json_['setting'] as core.Map) + : null, + type: json_['type'] as core.String?, + ); + + core.Map toJson() => { + if (customer != null) 'customer': customer!, + if (name != null) 'name': name!, + if (policyQuery != null) 'policyQuery': policyQuery!, + if (setting != null) 'setting': setting!, + if (type != null) 'type': type!, + }; +} + +/// PolicyQuery +class PolicyQuery { + /// The group that the query applies to. + /// + /// This field is only set if there is a single value for group that satisfies + /// all clauses of the query. If no group applies, this will be the empty + /// string. + /// + /// Immutable. + core.String? group; + + /// Non-empty default. + /// + /// The OrgUnit the query applies to. This field is only set if there is a + /// single value for org_unit that satisfies all clauses of the query. + /// + /// Required. Immutable. + core.String? orgUnit; + + /// The CEL query that defines which entities the Policy applies to (ex. + /// + /// a User entity). For details about CEL see + /// https://opensource.google.com/projects/cel. The OrgUnits the Policy + /// applies to are represented by a clause like so: + /// entity.org_units.exists(org_unit, org_unit.org_unit_id == + /// orgUnitId('{orgUnitId}')) The Group the Policy applies to are represented + /// by a clause like so: entity.groups.exists(group, group.group_id == + /// groupId('{groupId}')) The Licenses the Policy applies to are represented + /// by a clause like so: entity.licenses.exists(license, license in + /// \['/product/{productId}/sku/{skuId}'\]) The above clauses can be present + /// in any combination, and used in conjunction with the &&, || and ! + /// operators. The org_unit and group fields below are helper fields that + /// contain the corresponding value(s) as the query to make the query easier + /// to use. + /// + /// Immutable. + core.String? query; + + /// The decimal sort order of this PolicyQuery. + /// + /// The value is relative to all other policies with the same setting type for + /// the customer. (There are no duplicates within this set). + /// + /// Output only. + core.double? sortOrder; + + PolicyQuery({ + this.group, + this.orgUnit, + this.query, + this.sortOrder, + }); + + PolicyQuery.fromJson(core.Map json_) + : this( + group: json_['group'] as core.String?, + orgUnit: json_['orgUnit'] as core.String?, + query: json_['query'] as core.String?, + sortOrder: (json_['sortOrder'] as core.num?)?.toDouble(), + ); + + core.Map toJson() => { + if (group != null) 'group': group!, + if (orgUnit != null) 'orgUnit': orgUnit!, + if (query != null) 'query': query!, + if (sortOrder != null) 'sortOrder': sortOrder!, + }; +} + /// The evaluated state of this restriction. class RestrictionEvaluation { /// The current state of the restriction @@ -6098,6 +6397,42 @@ class SecuritySettings { /// UserInvitation. typedef SendUserInvitationRequest = $Empty; +/// Setting +class Setting { + /// The type of the Setting. + /// + /// . + /// + /// Required. Immutable. + core.String? type; + + /// The value of the Setting. + /// + /// Required. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Map? value; + + Setting({ + this.type, + this.value, + }); + + Setting.fromJson(core.Map json_) + : this( + type: json_['type'] as core.String?, + value: json_.containsKey('value') + ? json_['value'] as core.Map + : null, + ); + + core.Map toJson() => { + if (type != null) 'type': type!, + if (value != null) 'value': value!, + }; +} + /// Controls sign-in behavior. class SignInBehavior { /// When to redirect sign-ins to the IdP. diff --git a/generated/googleapis/lib/cloudkms/v1.dart b/generated/googleapis/lib/cloudkms/v1.dart index 420f0b69e..46c60e1b5 100644 --- a/generated/googleapis/lib/cloudkms/v1.dart +++ b/generated/googleapis/lib/cloudkms/v1.dart @@ -5644,7 +5644,7 @@ class MacVerifyRequest { /// received MacVerifyRequest.mac using this checksum. KeyManagementService /// will report an error if the checksum verification fails. If you receive a /// checksum error, your client should verify that - /// CRC32C(MacVerifyRequest.tag) is equal to MacVerifyRequest.mac_crc32c, and + /// CRC32C(MacVerifyRequest.mac) is equal to MacVerifyRequest.mac_crc32c, and /// if so, perform a limited number of retries. A persistent mismatch may /// indicate an issue in your computation of the CRC32C checksum. Note: This /// field is defined as int64 for reasons of compatibility across different diff --git a/generated/googleapis/lib/cloudscheduler/v1.dart b/generated/googleapis/lib/cloudscheduler/v1.dart index 3fe1ee6c5..9b6d8e34d 100644 --- a/generated/googleapis/lib/cloudscheduler/v1.dart +++ b/generated/googleapis/lib/cloudscheduler/v1.dart @@ -71,8 +71,8 @@ class OperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -482,9 +482,7 @@ class ProjectsLocationsJobsResource { /// [pageToken] - A token identifying a page of results the server will /// return. To request the first page results, page_token must be empty. To /// request the next page of results, page_token must be the value of - /// next_page_token returned from the previous call to ListJobs. It is an - /// error to switch the value of filter or order_by while iterating through - /// pages. + /// next_page_token returned from the previous call to ListJobs. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. diff --git a/generated/googleapis/lib/cloudshell/v1.dart b/generated/googleapis/lib/cloudshell/v1.dart index 07899672f..8376e6859 100644 --- a/generated/googleapis/lib/cloudshell/v1.dart +++ b/generated/googleapis/lib/cloudshell/v1.dart @@ -72,8 +72,8 @@ class OperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// diff --git a/generated/googleapis/lib/cloudsupport/v2.dart b/generated/googleapis/lib/cloudsupport/v2.dart index 5cf3b602a..10e7af60f 100644 --- a/generated/googleapis/lib/cloudsupport/v2.dart +++ b/generated/googleapis/lib/cloudsupport/v2.dart @@ -613,7 +613,9 @@ class CasesAttachmentsResource { /// /// [pageSize] - The maximum number of attachments fetched with each request. /// If not provided, the default is 10. The maximum page size that will be - /// returned is 100. + /// returned is 100. The size of each page can be smaller than the requested + /// page size and can include zero. For example, you could request 100 + /// attachments on one page, receive 0, and then on the next page, receive 90. /// /// [pageToken] - A token identifying the page of results to return. If /// unspecified, the first page is retrieved. diff --git a/generated/googleapis/lib/cloudtasks/v2.dart b/generated/googleapis/lib/cloudtasks/v2.dart index 32bbf3052..3b63b7aea 100644 --- a/generated/googleapis/lib/cloudtasks/v2.dart +++ b/generated/googleapis/lib/cloudtasks/v2.dart @@ -1123,14 +1123,12 @@ class ProjectsLocationsQueuesTasksResource { /// the task is already running, the queue has reached its RateLimits or is /// PAUSED. This command is meant to be used for manual debugging. For /// example, RunTask can be used to retry a failed task after a fix has been - /// made or to manually force a task to be dispatched now. The dispatched task - /// is returned. That is, the task that is returned contains the status after - /// the task is dispatched but before the task is received by its target. If - /// Cloud Tasks receives a successful response from the task's target, then - /// the task will be deleted; otherwise the task's schedule_time will be reset - /// to the time that RunTask was called plus the retry delay specified in the - /// queue's RetryConfig. RunTask returns NOT_FOUND when it is called on a task - /// that has already succeeded or permanently failed. + /// made or to manually force a task to be dispatched now. If Cloud Tasks + /// receives a successful response from the task's target, then the task will + /// be deleted; otherwise the task's schedule_time will be reset to the time + /// that RunTask was called plus the retry delay specified in the queue's + /// RetryConfig. RunTask returns NOT_FOUND when it is called on a task that + /// has already succeeded or permanently failed. /// /// [request] - The metadata request object. /// diff --git a/generated/googleapis/lib/composer/v1.dart b/generated/googleapis/lib/composer/v1.dart index af4dbf1b8..4b352c628 100644 --- a/generated/googleapis/lib/composer/v1.dart +++ b/generated/googleapis/lib/composer/v1.dart @@ -1905,6 +1905,8 @@ class EncryptionConfig { /// An environment for running orchestration tasks. class Environment { /// Configuration parameters for this environment. + /// + /// Optional. EnvironmentConfig? config; /// The time at which this environment was created. @@ -1923,6 +1925,8 @@ class Environment { /// Optional. core.Map? labels; + /// Identifier. + /// /// The resource name of the environment, in the form: /// "projects/{projectId}/locations/{locationId}/environments/{environmentId}" /// EnvironmentId must start with a lowercase letter followed by up to 63 @@ -2113,6 +2117,8 @@ class EnvironmentConfig { MasterAuthorizedNetworksConfig? masterAuthorizedNetworksConfig; /// The configuration used for the Kubernetes Engine cluster. + /// + /// Optional. NodeConfig? nodeConfig; /// The number of nodes in the Kubernetes Engine cluster that will be used to @@ -2123,6 +2129,8 @@ class EnvironmentConfig { core.int? nodeCount; /// The configuration used for the Private IP Cloud Composer environment. + /// + /// Optional. PrivateEnvironmentConfig? privateEnvironmentConfig; /// The Recovery settings configuration of an environment. @@ -2147,6 +2155,8 @@ class EnvironmentConfig { core.String? resilienceMode; /// The configuration settings for software inside the environment. + /// + /// Optional. SoftwareConfig? softwareConfig; /// The configuration settings for the Airflow web server App Engine instance. @@ -3726,6 +3736,8 @@ class SoftwareConfig { /// list\](/composer/docs/concepts/versioning/composer-versions) and /// \[versioning /// overview\](/composer/docs/concepts/versioning/composer-versioning-overview). + /// + /// Optional. core.String? imageVersion; /// Custom Python Package Index (PyPI) packages to be installed in the @@ -4004,7 +4016,8 @@ class UserWorkloadsConfigMap { /// The "data" field of Kubernetes ConfigMap, organized in key-value pairs. /// /// For details see: - /// https://kubernetes.io/docs/concepts/configuration/configmap/ + /// https://kubernetes.io/docs/concepts/configuration/configmap/ Example: { + /// "example_key": "example_value", "another_key": "another_value" } /// /// Optional. core.Map? data; @@ -4044,7 +4057,9 @@ class UserWorkloadsSecret { /// can contain sensitive values such as a password, a token, or a key. /// /// The values for all keys have to be base64-encoded strings. For details - /// see: https://kubernetes.io/docs/concepts/configuration/secret/ + /// see: https://kubernetes.io/docs/concepts/configuration/secret/ Example: { + /// "example": "ZXhhbXBsZV92YWx1ZQ==", "another-example": + /// "YW5vdGhlcl9leGFtcGxlX3ZhbHVl" } /// /// Optional. core.Map? data; diff --git a/generated/googleapis/lib/compute/v1.dart b/generated/googleapis/lib/compute/v1.dart index abed4a3d0..36896ff27 100644 --- a/generated/googleapis/lib/compute/v1.dart +++ b/generated/googleapis/lib/compute/v1.dart @@ -61,6 +61,7 @@ /// - [NetworkEdgeSecurityServicesResource] /// - [NetworkEndpointGroupsResource] /// - [NetworkFirewallPoliciesResource] +/// - [NetworkProfilesResource] /// - [NetworksResource] /// - [NodeGroupsResource] /// - [NodeTemplatesResource] @@ -235,6 +236,8 @@ class ComputeApi { NetworkEndpointGroupsResource(_requester); NetworkFirewallPoliciesResource get networkFirewallPolicies => NetworkFirewallPoliciesResource(_requester); + NetworkProfilesResource get networkProfiles => + NetworkProfilesResource(_requester); NetworksResource get networks => NetworksResource(_requester); NodeGroupsResource get nodeGroups => NodeGroupsResource(_requester); NodeTemplatesResource get nodeTemplates => NodeTemplatesResource(_requester); @@ -13818,6 +13821,85 @@ class InstanceGroupManagersResource { return Operation.fromJson(response_ as core.Map); } + /// Flags the specified instances in the managed instance group to be resumed. + /// + /// This method increases the targetSize and decreases the targetSuspendedSize + /// of the managed instance group by the number of instances that you resume. + /// The resumeInstances operation is marked DONE if the resumeInstances + /// request is successful. The underlying actions take additional time. You + /// must separately verify the status of the RESUMING action with the + /// listmanagedinstances method. In this request, you can only specify + /// instances that are suspended. For example, if an instance was previously + /// suspended using the suspendInstances method, it can be resumed using the + /// resumeInstances method. If a health check is attached to the managed + /// instance group, the specified instances will be verified as healthy after + /// they are resumed. You can specify a maximum of 1000 instances with this + /// method per request. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [project] - Project ID for this request. + /// Value must have pattern + /// `(?:(?:\[-a-z0-9\]{1,63}\.)*(?:\[a-z\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?):)?(?:\[0-9\]{1,19}|(?:\[a-z0-9\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?))`. + /// + /// [zone] - The name of the zone where the managed instance group is located. + /// + /// [instanceGroupManager] - The name of the managed instance group. + /// + /// [requestId] - An optional request ID to identify requests. Specify a + /// unique request ID so that if you must retry your request, the server will + /// know to ignore the request if it has already been completed. For example, + /// consider a situation where you make an initial request and the request + /// times out. If you make the request again with the same request ID, the + /// server can check if original operation with the same request ID was + /// received, and if so, will ignore the second request. This prevents clients + /// from accidentally creating duplicate commitments. The request ID must be a + /// valid UUID with the exception that zero UUID is not supported ( + /// 00000000-0000-0000-0000-000000000000). + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future resumeInstances( + InstanceGroupManagersResumeInstancesRequest request, + core.String project, + core.String zone, + core.String instanceGroupManager, { + core.String? requestId, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (requestId != null) 'requestId': [requestId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'projects/' + + commons.escapeVariable('$project') + + '/zones/' + + commons.escapeVariable('$zone') + + '/instanceGroupManagers/' + + commons.escapeVariable('$instanceGroupManager') + + '/resumeInstances'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + /// Specifies the instance template to use when creating new instances in this /// group. /// @@ -13962,6 +14044,256 @@ class InstanceGroupManagersResource { return Operation.fromJson(response_ as core.Map); } + /// Flags the specified instances in the managed instance group to be started. + /// + /// This method increases the targetSize and decreases the targetStoppedSize + /// of the managed instance group by the number of instances that you start. + /// The startInstances operation is marked DONE if the startInstances request + /// is successful. The underlying actions take additional time. You must + /// separately verify the status of the STARTING action with the + /// listmanagedinstances method. In this request, you can only specify + /// instances that are stopped. For example, if an instance was previously + /// stopped using the stopInstances method, it can be started using the + /// startInstances method. If a health check is attached to the managed + /// instance group, the specified instances will be verified as healthy after + /// they are started. You can specify a maximum of 1000 instances with this + /// method per request. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [project] - Project ID for this request. + /// Value must have pattern + /// `(?:(?:\[-a-z0-9\]{1,63}\.)*(?:\[a-z\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?):)?(?:\[0-9\]{1,19}|(?:\[a-z0-9\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?))`. + /// + /// [zone] - The name of the zone where the managed instance group is located. + /// + /// [instanceGroupManager] - The name of the managed instance group. + /// + /// [requestId] - An optional request ID to identify requests. Specify a + /// unique request ID so that if you must retry your request, the server will + /// know to ignore the request if it has already been completed. For example, + /// consider a situation where you make an initial request and the request + /// times out. If you make the request again with the same request ID, the + /// server can check if original operation with the same request ID was + /// received, and if so, will ignore the second request. This prevents clients + /// from accidentally creating duplicate commitments. The request ID must be a + /// valid UUID with the exception that zero UUID is not supported ( + /// 00000000-0000-0000-0000-000000000000). + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future startInstances( + InstanceGroupManagersStartInstancesRequest request, + core.String project, + core.String zone, + core.String instanceGroupManager, { + core.String? requestId, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (requestId != null) 'requestId': [requestId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'projects/' + + commons.escapeVariable('$project') + + '/zones/' + + commons.escapeVariable('$zone') + + '/instanceGroupManagers/' + + commons.escapeVariable('$instanceGroupManager') + + '/startInstances'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Flags the specified instances in the managed instance group to be + /// immediately stopped. + /// + /// You can only specify instances that are running in this request. This + /// method reduces the targetSize and increases the targetStoppedSize of the + /// managed instance group by the number of instances that you stop. The + /// stopInstances operation is marked DONE if the stopInstances request is + /// successful. The underlying actions take additional time. You must + /// separately verify the status of the STOPPING action with the + /// listmanagedinstances method. If the standbyPolicy.initialDelaySec field is + /// set, the group delays stopping the instances until initialDelaySec have + /// passed from instance.creationTimestamp (that is, when the instance was + /// created). This delay gives your application time to set itself up and + /// initialize on the instance. If more than initialDelaySec seconds have + /// passed since instance.creationTimestamp when this method is called, there + /// will be zero delay. If the group is part of a backend service that has + /// enabled connection draining, it can take up to 60 seconds after the + /// connection draining duration has elapsed before the VM instance is + /// stopped. Stopped instances can be started using the startInstances method. + /// You can specify a maximum of 1000 instances with this method per request. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [project] - Project ID for this request. + /// Value must have pattern + /// `(?:(?:\[-a-z0-9\]{1,63}\.)*(?:\[a-z\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?):)?(?:\[0-9\]{1,19}|(?:\[a-z0-9\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?))`. + /// + /// [zone] - The name of the zone where the managed instance group is located. + /// + /// [instanceGroupManager] - The name of the managed instance group. + /// + /// [requestId] - An optional request ID to identify requests. Specify a + /// unique request ID so that if you must retry your request, the server will + /// know to ignore the request if it has already been completed. For example, + /// consider a situation where you make an initial request and the request + /// times out. If you make the request again with the same request ID, the + /// server can check if original operation with the same request ID was + /// received, and if so, will ignore the second request. This prevents clients + /// from accidentally creating duplicate commitments. The request ID must be a + /// valid UUID with the exception that zero UUID is not supported ( + /// 00000000-0000-0000-0000-000000000000). + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future stopInstances( + InstanceGroupManagersStopInstancesRequest request, + core.String project, + core.String zone, + core.String instanceGroupManager, { + core.String? requestId, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (requestId != null) 'requestId': [requestId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'projects/' + + commons.escapeVariable('$project') + + '/zones/' + + commons.escapeVariable('$zone') + + '/instanceGroupManagers/' + + commons.escapeVariable('$instanceGroupManager') + + '/stopInstances'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Flags the specified instances in the managed instance group to be + /// immediately suspended. + /// + /// You can only specify instances that are running in this request. This + /// method reduces the targetSize and increases the targetSuspendedSize of the + /// managed instance group by the number of instances that you suspend. The + /// suspendInstances operation is marked DONE if the suspendInstances request + /// is successful. The underlying actions take additional time. You must + /// separately verify the status of the SUSPENDING action with the + /// listmanagedinstances method. If the standbyPolicy.initialDelaySec field is + /// set, the group delays suspension of the instances until initialDelaySec + /// have passed from instance.creationTimestamp (that is, when the instance + /// was created). This delay gives your application time to set itself up and + /// initialize on the instance. If more than initialDelaySec seconds have + /// passed since instance.creationTimestamp when this method is called, there + /// will be zero delay. If the group is part of a backend service that has + /// enabled connection draining, it can take up to 60 seconds after the + /// connection draining duration has elapsed before the VM instance is + /// suspended. Suspended instances can be resumed using the resumeInstances + /// method. You can specify a maximum of 1000 instances with this method per + /// request. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [project] - Project ID for this request. + /// Value must have pattern + /// `(?:(?:\[-a-z0-9\]{1,63}\.)*(?:\[a-z\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?):)?(?:\[0-9\]{1,19}|(?:\[a-z0-9\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?))`. + /// + /// [zone] - The name of the zone where the managed instance group is located. + /// + /// [instanceGroupManager] - The name of the managed instance group. + /// + /// [requestId] - An optional request ID to identify requests. Specify a + /// unique request ID so that if you must retry your request, the server will + /// know to ignore the request if it has already been completed. For example, + /// consider a situation where you make an initial request and the request + /// times out. If you make the request again with the same request ID, the + /// server can check if original operation with the same request ID was + /// received, and if so, will ignore the second request. This prevents clients + /// from accidentally creating duplicate commitments. The request ID must be a + /// valid UUID with the exception that zero UUID is not supported ( + /// 00000000-0000-0000-0000-000000000000). + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future suspendInstances( + InstanceGroupManagersSuspendInstancesRequest request, + core.String project, + core.String zone, + core.String instanceGroupManager, { + core.String? requestId, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (requestId != null) 'requestId': [requestId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'projects/' + + commons.escapeVariable('$project') + + '/zones/' + + commons.escapeVariable('$zone') + + '/instanceGroupManagers/' + + commons.escapeVariable('$instanceGroupManager') + + '/suspendInstances'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + /// Inserts or updates per-instance configurations for the managed instance /// group. /// @@ -16421,7 +16753,9 @@ class InstancesResource { /// returned as the `start` property value. You can also provide a negative /// start position, which translates to the most recent number of bytes /// written to the serial port. For example, -3 is interpreted as the most - /// recent 3 bytes written to the serial console. + /// recent 3 bytes written to the serial console. Note that the negative start + /// is bounded by the retained buffer size, and the returned serial console + /// output will not exceed the max buffer size. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -25133,6 +25467,165 @@ class NetworkFirewallPoliciesResource { } } +class NetworkProfilesResource { + final commons.ApiRequester _requester; + + NetworkProfilesResource(commons.ApiRequester client) : _requester = client; + + /// Returns the specified network profile. + /// + /// Request parameters: + /// + /// [project] - Project ID for this request. + /// Value must have pattern + /// `(?:(?:\[-a-z0-9\]{1,63}\.)*(?:\[a-z\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?):)?(?:\[0-9\]{1,19}|(?:\[a-z0-9\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?))`. + /// + /// [networkProfile] - Name of the network profile to return. + /// Value must have pattern + /// `\[a-z\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?|\[1-9\]\[0-9\]{0,19}`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [NetworkProfile]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String project, + core.String networkProfile, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'projects/' + + commons.escapeVariable('$project') + + '/global/networkProfiles/' + + commons.escapeVariable('$networkProfile'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return NetworkProfile.fromJson( + response_ as core.Map); + } + + /// Retrieves a list of network profiles available to the specified project. + /// + /// Request parameters: + /// + /// [project] - Project ID for this request. + /// Value must have pattern + /// `(?:(?:\[-a-z0-9\]{1,63}\.)*(?:\[a-z\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?):)?(?:\[0-9\]{1,19}|(?:\[a-z0-9\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?))`. + /// + /// [filter] - A filter expression that filters resources listed in the + /// response. Most Compute resources support two types of filter expressions: + /// expressions that support regular expressions and expressions that follow + /// API improvement proposal AIP-160. These two types of filter expressions + /// cannot be mixed in one request. If you want to use AIP-160, your + /// expression must specify the field name, an operator, and the value that + /// you want to use for filtering. The value must be a string, a number, or a + /// boolean. The operator must be either `=`, `!=`, `>`, `<`, `<=`, `>=` or + /// `:`. For example, if you are filtering Compute Engine instances, you can + /// exclude instances named `example-instance` by specifying `name != + /// example-instance`. The `:*` comparison can be used to test whether a key + /// has been defined. For example, to find all objects with `owner` label use: + /// ``` labels.owner:* ``` You can also filter nested fields. For example, you + /// could specify `scheduling.automaticRestart = false` to include instances + /// only if they are not scheduled for automatic restarts. You can use + /// filtering on nested fields to filter based on resource labels. To filter + /// on multiple expressions, provide each separate expression within + /// parentheses. For example: ``` (scheduling.automaticRestart = true) + /// (cpuPlatform = "Intel Skylake") ``` By default, each expression is an + /// `AND` expression. However, you can include `AND` and `OR` expressions + /// explicitly. For example: ``` (cpuPlatform = "Intel Skylake") OR + /// (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true) + /// ``` If you want to use a regular expression, use the `eq` (equal) or `ne` + /// (not equal) operator against a single un-parenthesized expression with or + /// without quotes or against multiple parenthesized expressions. Examples: + /// `fieldname eq unquoted literal` `fieldname eq 'single quoted literal'` + /// `fieldname eq "double quoted literal"` `(fieldname1 eq literal) + /// (fieldname2 ne "literal")` The literal value is interpreted as a regular + /// expression using Google RE2 library syntax. The literal value must match + /// the entire field. For example, to filter for instances that do not end + /// with name "instance", you would use `name ne .*instance`. You cannot + /// combine constraints on multiple fields using regular expressions. + /// + /// [maxResults] - The maximum number of results per page that should be + /// returned. If the number of available results is larger than `maxResults`, + /// Compute Engine returns a `nextPageToken` that can be used to get the next + /// page of results in subsequent list requests. Acceptable values are `0` to + /// `500`, inclusive. (Default: `500`) + /// + /// [orderBy] - Sorts list results by a certain order. By default, results are + /// returned in alphanumerical order based on the resource name. You can also + /// sort results in descending order based on the creation timestamp using + /// `orderBy="creationTimestamp desc"`. This sorts results based on the + /// `creationTimestamp` field in reverse chronological order (newest result + /// first). Use this to sort resources like operations so that the newest + /// operation is returned first. Currently, only sorting by `name` or + /// `creationTimestamp desc` is supported. + /// + /// [pageToken] - Specifies a page token to use. Set `pageToken` to the + /// `nextPageToken` returned by a previous list request to get the next page + /// of results. + /// + /// [returnPartialSuccess] - Opt-in for partial success behavior which + /// provides partial results in case of failure. The default value is false. + /// For example, when partial success behavior is enabled, aggregatedList for + /// a single zone scope either returns all resources in the zone or no + /// resources, with an error code. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [NetworkProfilesListResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String project, { + core.String? filter, + core.int? maxResults, + core.String? orderBy, + core.String? pageToken, + core.bool? returnPartialSuccess, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (maxResults != null) 'maxResults': ['${maxResults}'], + if (orderBy != null) 'orderBy': [orderBy], + if (pageToken != null) 'pageToken': [pageToken], + if (returnPartialSuccess != null) + 'returnPartialSuccess': ['${returnPartialSuccess}'], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'projects/' + + commons.escapeVariable('$project') + + '/global/networkProfiles'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return NetworkProfilesListResponse.fromJson( + response_ as core.Map); + } +} + class NetworksResource { final commons.ApiRequester _requester; @@ -35660,6 +36153,85 @@ class RegionInstanceGroupManagersResource { return Operation.fromJson(response_ as core.Map); } + /// Flags the specified instances in the managed instance group to be resumed. + /// + /// This method increases the targetSize and decreases the targetSuspendedSize + /// of the managed instance group by the number of instances that you resume. + /// The resumeInstances operation is marked DONE if the resumeInstances + /// request is successful. The underlying actions take additional time. You + /// must separately verify the status of the RESUMING action with the + /// listmanagedinstances method. In this request, you can only specify + /// instances that are suspended. For example, if an instance was previously + /// suspended using the suspendInstances method, it can be resumed using the + /// resumeInstances method. If a health check is attached to the managed + /// instance group, the specified instances will be verified as healthy after + /// they are resumed. You can specify a maximum of 1000 instances with this + /// method per request. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [project] - Project ID for this request. + /// Value must have pattern + /// `(?:(?:\[-a-z0-9\]{1,63}\.)*(?:\[a-z\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?):)?(?:\[0-9\]{1,19}|(?:\[a-z0-9\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?))`. + /// + /// [region] - Name of the region scoping this request. + /// + /// [instanceGroupManager] - Name of the managed instance group. + /// + /// [requestId] - An optional request ID to identify requests. Specify a + /// unique request ID so that if you must retry your request, the server will + /// know to ignore the request if it has already been completed. For example, + /// consider a situation where you make an initial request and the request + /// times out. If you make the request again with the same request ID, the + /// server can check if original operation with the same request ID was + /// received, and if so, will ignore the second request. This prevents clients + /// from accidentally creating duplicate commitments. The request ID must be a + /// valid UUID with the exception that zero UUID is not supported ( + /// 00000000-0000-0000-0000-000000000000). + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future resumeInstances( + RegionInstanceGroupManagersResumeInstancesRequest request, + core.String project, + core.String region, + core.String instanceGroupManager, { + core.String? requestId, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (requestId != null) 'requestId': [requestId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'projects/' + + commons.escapeVariable('$project') + + '/regions/' + + commons.escapeVariable('$region') + + '/instanceGroupManagers/' + + commons.escapeVariable('$instanceGroupManager') + + '/resumeInstances'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + /// Sets the instance template to use when creating new instances or /// recreating instances in this group. /// @@ -35798,6 +36370,256 @@ class RegionInstanceGroupManagersResource { return Operation.fromJson(response_ as core.Map); } + /// Flags the specified instances in the managed instance group to be started. + /// + /// This method increases the targetSize and decreases the targetStoppedSize + /// of the managed instance group by the number of instances that you start. + /// The startInstances operation is marked DONE if the startInstances request + /// is successful. The underlying actions take additional time. You must + /// separately verify the status of the STARTING action with the + /// listmanagedinstances method. In this request, you can only specify + /// instances that are stopped. For example, if an instance was previously + /// stopped using the stopInstances method, it can be started using the + /// startInstances method. If a health check is attached to the managed + /// instance group, the specified instances will be verified as healthy after + /// they are started. You can specify a maximum of 1000 instances with this + /// method per request. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [project] - Project ID for this request. + /// Value must have pattern + /// `(?:(?:\[-a-z0-9\]{1,63}\.)*(?:\[a-z\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?):)?(?:\[0-9\]{1,19}|(?:\[a-z0-9\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?))`. + /// + /// [region] - Name of the region scoping this request. + /// + /// [instanceGroupManager] - Name of the managed instance group. + /// + /// [requestId] - An optional request ID to identify requests. Specify a + /// unique request ID so that if you must retry your request, the server will + /// know to ignore the request if it has already been completed. For example, + /// consider a situation where you make an initial request and the request + /// times out. If you make the request again with the same request ID, the + /// server can check if original operation with the same request ID was + /// received, and if so, will ignore the second request. This prevents clients + /// from accidentally creating duplicate commitments. The request ID must be a + /// valid UUID with the exception that zero UUID is not supported ( + /// 00000000-0000-0000-0000-000000000000). + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future startInstances( + RegionInstanceGroupManagersStartInstancesRequest request, + core.String project, + core.String region, + core.String instanceGroupManager, { + core.String? requestId, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (requestId != null) 'requestId': [requestId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'projects/' + + commons.escapeVariable('$project') + + '/regions/' + + commons.escapeVariable('$region') + + '/instanceGroupManagers/' + + commons.escapeVariable('$instanceGroupManager') + + '/startInstances'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Flags the specified instances in the managed instance group to be + /// immediately stopped. + /// + /// You can only specify instances that are running in this request. This + /// method reduces the targetSize and increases the targetStoppedSize of the + /// managed instance group by the number of instances that you stop. The + /// stopInstances operation is marked DONE if the stopInstances request is + /// successful. The underlying actions take additional time. You must + /// separately verify the status of the STOPPING action with the + /// listmanagedinstances method. If the standbyPolicy.initialDelaySec field is + /// set, the group delays stopping the instances until initialDelaySec have + /// passed from instance.creationTimestamp (that is, when the instance was + /// created). This delay gives your application time to set itself up and + /// initialize on the instance. If more than initialDelaySec seconds have + /// passed since instance.creationTimestamp when this method is called, there + /// will be zero delay. If the group is part of a backend service that has + /// enabled connection draining, it can take up to 60 seconds after the + /// connection draining duration has elapsed before the VM instance is + /// stopped. Stopped instances can be started using the startInstances method. + /// You can specify a maximum of 1000 instances with this method per request. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [project] - Project ID for this request. + /// Value must have pattern + /// `(?:(?:\[-a-z0-9\]{1,63}\.)*(?:\[a-z\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?):)?(?:\[0-9\]{1,19}|(?:\[a-z0-9\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?))`. + /// + /// [region] - Name of the region scoping this request. + /// + /// [instanceGroupManager] - The name of the managed instance group. + /// + /// [requestId] - An optional request ID to identify requests. Specify a + /// unique request ID so that if you must retry your request, the server will + /// know to ignore the request if it has already been completed. For example, + /// consider a situation where you make an initial request and the request + /// times out. If you make the request again with the same request ID, the + /// server can check if original operation with the same request ID was + /// received, and if so, will ignore the second request. This prevents clients + /// from accidentally creating duplicate commitments. The request ID must be a + /// valid UUID with the exception that zero UUID is not supported ( + /// 00000000-0000-0000-0000-000000000000). + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future stopInstances( + RegionInstanceGroupManagersStopInstancesRequest request, + core.String project, + core.String region, + core.String instanceGroupManager, { + core.String? requestId, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (requestId != null) 'requestId': [requestId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'projects/' + + commons.escapeVariable('$project') + + '/regions/' + + commons.escapeVariable('$region') + + '/instanceGroupManagers/' + + commons.escapeVariable('$instanceGroupManager') + + '/stopInstances'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Flags the specified instances in the managed instance group to be + /// immediately suspended. + /// + /// You can only specify instances that are running in this request. This + /// method reduces the targetSize and increases the targetSuspendedSize of the + /// managed instance group by the number of instances that you suspend. The + /// suspendInstances operation is marked DONE if the suspendInstances request + /// is successful. The underlying actions take additional time. You must + /// separately verify the status of the SUSPENDING action with the + /// listmanagedinstances method. If the standbyPolicy.initialDelaySec field is + /// set, the group delays suspension of the instances until initialDelaySec + /// have passed from instance.creationTimestamp (that is, when the instance + /// was created). This delay gives your application time to set itself up and + /// initialize on the instance. If more than initialDelaySec seconds have + /// passed since instance.creationTimestamp when this method is called, there + /// will be zero delay. If the group is part of a backend service that has + /// enabled connection draining, it can take up to 60 seconds after the + /// connection draining duration has elapsed before the VM instance is + /// suspended. Suspended instances can be resumed using the resumeInstances + /// method. You can specify a maximum of 1000 instances with this method per + /// request. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [project] - Project ID for this request. + /// Value must have pattern + /// `(?:(?:\[-a-z0-9\]{1,63}\.)*(?:\[a-z\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?):)?(?:\[0-9\]{1,19}|(?:\[a-z0-9\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?))`. + /// + /// [region] - Name of the region scoping this request. + /// + /// [instanceGroupManager] - Name of the managed instance group. + /// + /// [requestId] - An optional request ID to identify requests. Specify a + /// unique request ID so that if you must retry your request, the server will + /// know to ignore the request if it has already been completed. For example, + /// consider a situation where you make an initial request and the request + /// times out. If you make the request again with the same request ID, the + /// server can check if original operation with the same request ID was + /// received, and if so, will ignore the second request. This prevents clients + /// from accidentally creating duplicate commitments. The request ID must be a + /// valid UUID with the exception that zero UUID is not supported ( + /// 00000000-0000-0000-0000-000000000000). + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future suspendInstances( + RegionInstanceGroupManagersSuspendInstancesRequest request, + core.String project, + core.String region, + core.String instanceGroupManager, { + core.String? requestId, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (requestId != null) 'requestId': [requestId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'projects/' + + commons.escapeVariable('$project') + + '/regions/' + + commons.escapeVariable('$region') + + '/instanceGroupManagers/' + + commons.escapeVariable('$instanceGroupManager') + + '/suspendInstances'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + /// Inserts or updates per-instance configurations for the managed instance /// group. /// @@ -40059,6 +40881,77 @@ class RegionSecurityPoliciesResource { ); return Operation.fromJson(response_ as core.Map); } + + /// Sets the labels on a security policy. + /// + /// To learn more about labels, read the Labeling Resources documentation. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [project] - Project ID for this request. + /// Value must have pattern + /// `(?:(?:\[-a-z0-9\]{1,63}\.)*(?:\[a-z\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?):)?(?:\[0-9\]{1,19}|(?:\[a-z0-9\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?))`. + /// + /// [region] - The region for this request. + /// Value must have pattern `\[a-z\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?`. + /// + /// [resource] - Name or id of the resource for this request. + /// Value must have pattern + /// `\[a-z\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?|\[1-9\]\[0-9\]{0,19}`. + /// + /// [requestId] - An optional request ID to identify requests. Specify a + /// unique request ID so that if you must retry your request, the server will + /// know to ignore the request if it has already been completed. For example, + /// consider a situation where you make an initial request and the request + /// times out. If you make the request again with the same request ID, the + /// server can check if original operation with the same request ID was + /// received, and if so, will ignore the second request. This prevents clients + /// from accidentally creating duplicate commitments. The request ID must be a + /// valid UUID with the exception that zero UUID is not supported ( + /// 00000000-0000-0000-0000-000000000000). + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future setLabels( + RegionSetLabelsRequest request, + core.String project, + core.String region, + core.String resource, { + core.String? requestId, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (requestId != null) 'requestId': [requestId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'projects/' + + commons.escapeVariable('$project') + + '/regions/' + + commons.escapeVariable('$region') + + '/securityPolicies/' + + commons.escapeVariable('$resource') + + '/setLabels'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } } class RegionSslCertificatesResource { @@ -57617,6 +58510,8 @@ class AcceleratorTypeAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -57842,6 +58737,8 @@ class AcceleratorTypeListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -58054,6 +58951,8 @@ class AcceleratorTypesScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -58641,6 +59540,8 @@ class AddressAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -58864,6 +59765,8 @@ class AddressListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -59076,6 +59979,8 @@ class AddressesScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -60108,8 +61013,8 @@ class AttachedDiskInitializeParams { /// "audit_log_configs": \[ { "log_type": "DATA_READ" }, { "log_type": /// "DATA_WRITE", "exempted_members": \[ "user:aliya@example.com" \] } \] } \] } /// For sampleservice, this policy enables DATA_READ, DATA_WRITE and ADMIN_READ -/// logging. It also exempts jose@example.com from DATA_READ logging, and -/// aliya@example.com from DATA_WRITE logging. +/// logging. It also exempts `jose@example.com` from DATA_READ logging, and +/// `aliya@example.com` from DATA_WRITE logging. class AuditConfig { /// The configuration for logging of each type of permission. core.List? auditLogConfigs; @@ -60451,6 +61356,8 @@ class AutoscalerAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -60677,6 +61584,8 @@ class AutoscalerListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -60995,6 +61904,8 @@ class AutoscalersScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -61822,7 +62733,8 @@ class BackendBucketCdnPolicy { /// content, including common image formats, media (video and audio), and web /// assets (JavaScript and CSS). Requests and responses that are marked as /// uncacheable, as well as dynamic content (including HTML), will not be - /// cached. + /// cached. If no value is provided for cdnPolicy.cacheMode, it defaults to + /// CACHE_ALL_STATIC. /// Possible string values are: /// - "CACHE_ALL_STATIC" : Automatically cache static content, including /// common image formats, media (video and audio), and web assets (JavaScript @@ -62137,6 +63049,8 @@ class BackendBucketListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -63078,6 +63992,8 @@ class BackendServiceAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -63236,7 +64152,8 @@ class BackendServiceCdnPolicy { /// content, including common image formats, media (video and audio), and web /// assets (JavaScript and CSS). Requests and responses that are marked as /// uncacheable, as well as dynamic content (including HTML), will not be - /// cached. + /// cached. If no value is provided for cdnPolicy.cacheMode, it defaults to + /// CACHE_ALL_STATIC. /// Possible string values are: /// - "CACHE_ALL_STATIC" : Automatically cache static content, including /// common image formats, media (video and audio), and web assets (JavaScript @@ -63793,6 +64710,8 @@ class BackendServiceListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -64004,6 +64923,8 @@ class BackendServiceListUsableWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -64436,6 +65357,8 @@ class BackendServicesScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -64852,11 +65775,6 @@ class BfdStatusPacketCounts { /// Associates `members`, or principals, with a `role`. class Binding { - /// This is deprecated and has no effect. - /// - /// Do not use. - core.String? bindingId; - /// The condition that is associated with this binding. /// /// If the condition evaluates to `true`, then this binding applies to the @@ -64936,7 +65854,6 @@ class Binding { core.String? role; Binding({ - this.bindingId, this.condition, this.members, this.role, @@ -64944,7 +65861,6 @@ class Binding { Binding.fromJson(core.Map json_) : this( - bindingId: json_['bindingId'] as core.String?, condition: json_.containsKey('condition') ? Expr.fromJson( json_['condition'] as core.Map) @@ -64956,7 +65872,6 @@ class Binding { ); core.Map toJson() => { - if (bindingId != null) 'bindingId': bindingId!, if (condition != null) 'condition': condition!, if (members != null) 'members': members!, if (role != null) 'role': role!, @@ -65533,6 +66448,7 @@ class Commitment { /// - "COMPUTE_OPTIMIZED_H3" /// - "GENERAL_PURPOSE" /// - "GENERAL_PURPOSE_C4" + /// - "GENERAL_PURPOSE_C4A" /// - "GENERAL_PURPOSE_E2" /// - "GENERAL_PURPOSE_N2" /// - "GENERAL_PURPOSE_N2D" @@ -65541,6 +66457,9 @@ class Commitment { /// - "GRAPHICS_OPTIMIZED" /// - "MEMORY_OPTIMIZED" /// - "MEMORY_OPTIMIZED_M3" + /// - "MEMORY_OPTIMIZED_X4_16TB" + /// - "MEMORY_OPTIMIZED_X4_24TB" + /// - "MEMORY_OPTIMIZED_X4_32TB" /// - "STORAGE_OPTIMIZED_Z3" /// - "TYPE_UNSPECIFIED" core.String? type; @@ -65734,6 +66653,8 @@ class CommitmentAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -65958,6 +66879,8 @@ class CommitmentListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -66200,6 +67123,8 @@ class CommitmentsScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -66286,85 +67211,6 @@ class CommitmentsScopedList { }; } -/// This is deprecated and has no effect. -/// -/// Do not use. -class Condition { - /// This is deprecated and has no effect. - /// - /// Do not use. - /// Possible string values are: - /// - "APPROVER" : This is deprecated and has no effect. Do not use. - /// - "ATTRIBUTION" : This is deprecated and has no effect. Do not use. - /// - "AUTHORITY" : This is deprecated and has no effect. Do not use. - /// - "CREDENTIALS_TYPE" : This is deprecated and has no effect. Do not use. - /// - "CREDS_ASSERTION" : This is deprecated and has no effect. Do not use. - /// - "JUSTIFICATION_TYPE" : This is deprecated and has no effect. Do not use. - /// - "NO_ATTR" : This is deprecated and has no effect. Do not use. - /// - "SECURITY_REALM" : This is deprecated and has no effect. Do not use. - core.String? iam; - - /// This is deprecated and has no effect. - /// - /// Do not use. - /// Possible string values are: - /// - "DISCHARGED" : This is deprecated and has no effect. Do not use. - /// - "EQUALS" : This is deprecated and has no effect. Do not use. - /// - "IN" : This is deprecated and has no effect. Do not use. - /// - "NOT_EQUALS" : This is deprecated and has no effect. Do not use. - /// - "NOT_IN" : This is deprecated and has no effect. Do not use. - /// - "NO_OP" : This is deprecated and has no effect. Do not use. - core.String? op; - - /// This is deprecated and has no effect. - /// - /// Do not use. - core.String? svc; - - /// This is deprecated and has no effect. - /// - /// Do not use. - /// Possible string values are: - /// - "IP" : This is deprecated and has no effect. Do not use. - /// - "NAME" : This is deprecated and has no effect. Do not use. - /// - "NO_ATTR" : This is deprecated and has no effect. Do not use. - /// - "REGION" : This is deprecated and has no effect. Do not use. - /// - "SERVICE" : This is deprecated and has no effect. Do not use. - core.String? sys; - - /// This is deprecated and has no effect. - /// - /// Do not use. - core.List? values; - - Condition({ - this.iam, - this.op, - this.svc, - this.sys, - this.values, - }); - - Condition.fromJson(core.Map json_) - : this( - iam: json_['iam'] as core.String?, - op: json_['op'] as core.String?, - svc: json_['svc'] as core.String?, - sys: json_['sys'] as core.String?, - values: (json_['values'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - ); - - core.Map toJson() => { - if (iam != null) 'iam': iam!, - if (op != null) 'op': op!, - if (svc != null) 'svc': svc!, - if (sys != null) 'sys': sys!, - if (values != null) 'values': values!, - }; -} - /// A set of Confidential Instance options. class ConfidentialInstanceConfig { /// Defines the type of technology used by the confidential instance. @@ -67601,6 +68447,8 @@ class DiskAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -67980,6 +68828,8 @@ class DiskListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -68446,6 +69296,8 @@ class DiskTypeAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -68669,6 +69521,8 @@ class DiskTypeListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -68881,6 +69735,8 @@ class DiskTypesScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -69099,6 +69955,8 @@ class DisksScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -69543,6 +70401,8 @@ class ExchangedPeeringRoutesListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -69976,6 +70836,8 @@ class ExternalVpnGatewayListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -70549,6 +71411,8 @@ class FirewallListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -71061,6 +71925,8 @@ class FirewallPolicyListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -72226,6 +73092,8 @@ class ForwardingRuleAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -72449,6 +73317,8 @@ class ForwardingRuleListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -72714,6 +73584,8 @@ class ForwardingRulesScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -73835,6 +74707,8 @@ class HealthCheckListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -74289,6 +75163,8 @@ class HealthCheckServicesListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -74499,6 +75375,8 @@ class HealthChecksAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -74717,6 +75595,8 @@ class HealthChecksScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -75592,6 +76472,8 @@ class HttpHealthCheckListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -76135,30 +77017,24 @@ class HttpRouteRule { /// routing actions, such as URL rewrites and header transformations, before /// forwarding the request to the selected backend. /// - /// If routeAction specifies any weightedBackendServices, service must not be - /// set. Conversely if service is set, routeAction cannot contain any - /// weightedBackendServices. Only one of urlRedirect, service or - /// routeAction.weightedBackendService must be set. URL maps for classic - /// Application Load Balancers only support the urlRewrite action within a - /// route rule's routeAction. + /// Only one of urlRedirect, service or routeAction.weightedBackendService can + /// be set. URL maps for classic Application Load Balancers only support the + /// urlRewrite action within a route rule's routeAction. HttpRouteAction? routeAction; /// The full or partial URL of the backend service resource to which traffic /// is directed if this rule is matched. /// /// If routeAction is also specified, advanced routing actions, such as URL - /// rewrites, take effect before sending the request to the backend. However, - /// if service is specified, routeAction cannot contain any - /// weightedBackendServices. Conversely, if routeAction specifies any - /// weightedBackendServices, service must not be specified. Only one of - /// urlRedirect, service or routeAction.weightedBackendService must be set. + /// rewrites, take effect before sending the request to the backend. Only one + /// of urlRedirect, service or routeAction.weightedBackendService can be set. core.String? service; /// When this rule is matched, the request is redirected to a URL specified by /// urlRedirect. /// - /// If urlRedirect is specified, service or routeAction must not be set. Not - /// supported when the URL map is bound to a target gRPC proxy. + /// Only one of urlRedirect, service or routeAction.weightedBackendService can + /// be set. Not supported when the URL map is bound to a target gRPC proxy. HttpRedirectAction? urlRedirect; HttpRouteRule({ @@ -76556,6 +77432,8 @@ class HttpsHealthCheckListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -77238,6 +78116,8 @@ class ImageListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -78040,6 +78920,8 @@ class InstanceAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -78484,6 +79366,8 @@ class InstanceGroupAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -78706,6 +79590,8 @@ class InstanceGroupListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -78899,6 +79785,13 @@ class InstanceGroupManager { /// Output only. core.String? id; + /// Instance flexibility allowing MIG to create VMs from multiple types of + /// machines. + /// + /// Instance flexibility configuration on MIG overrides instance template + /// configuration. + InstanceGroupManagerInstanceFlexibilityPolicy? instanceFlexibilityPolicy; + /// The URL of the Instance Group resource. /// /// Output only. @@ -78937,8 +79830,10 @@ class InstanceGroupManager { /// The name must be 1-63 characters long, and comply with RFC1035. core.String? name; - /// Named ports configured for the Instance Groups complementary to this + /// Named ports configured on the Instance Groups complementary to this /// Instance Group Manager. + /// + /// Output only. core.List? namedPorts; /// The URL of the region where the managed instance group resides (for @@ -78964,6 +79859,9 @@ class InstanceGroupManager { /// Output only. core.String? selfLink; + /// Standby policy for stopped and suspended instances. + InstanceGroupManagerStandbyPolicy? standbyPolicy; + /// Stateful configuration for this Instanced Group Manager StatefulPolicy? statefulPolicy; @@ -78986,6 +79884,20 @@ class InstanceGroupManager { /// changes this number. core.int? targetSize; + /// The target number of stopped instances for this managed instance group. + /// + /// This number changes when you: - Stop instance using the stopInstances + /// method or start instances using the startInstances method. - Manually + /// change the targetStoppedSize using the update method. + core.int? targetStoppedSize; + + /// The target number of suspended instances for this managed instance group. + /// + /// This number changes when you: - Suspend instance using the + /// suspendInstances method or resume instances using the resumeInstances + /// method. - Manually change the targetSuspendedSize using the update method. + core.int? targetSuspendedSize; + /// The update policy for this managed instance group. InstanceGroupManagerUpdatePolicy? updatePolicy; @@ -79016,6 +79928,7 @@ class InstanceGroupManager { this.distributionPolicy, this.fingerprint, this.id, + this.instanceFlexibilityPolicy, this.instanceGroup, this.instanceLifecyclePolicy, this.instanceTemplate, @@ -79027,10 +79940,13 @@ class InstanceGroupManager { this.satisfiesPzi, this.satisfiesPzs, this.selfLink, + this.standbyPolicy, this.statefulPolicy, this.status, this.targetPools, this.targetSize, + this.targetStoppedSize, + this.targetSuspendedSize, this.updatePolicy, this.versions, this.zone, @@ -79061,6 +79977,12 @@ class InstanceGroupManager { : null, fingerprint: json_['fingerprint'] as core.String?, id: json_['id'] as core.String?, + instanceFlexibilityPolicy: + json_.containsKey('instanceFlexibilityPolicy') + ? InstanceGroupManagerInstanceFlexibilityPolicy.fromJson( + json_['instanceFlexibilityPolicy'] + as core.Map) + : null, instanceGroup: json_['instanceGroup'] as core.String?, instanceLifecyclePolicy: json_.containsKey('instanceLifecyclePolicy') ? InstanceGroupManagerInstanceLifecyclePolicy.fromJson( @@ -79080,6 +80002,10 @@ class InstanceGroupManager { satisfiesPzi: json_['satisfiesPzi'] as core.bool?, satisfiesPzs: json_['satisfiesPzs'] as core.bool?, selfLink: json_['selfLink'] as core.String?, + standbyPolicy: json_.containsKey('standbyPolicy') + ? InstanceGroupManagerStandbyPolicy.fromJson( + json_['standbyPolicy'] as core.Map) + : null, statefulPolicy: json_.containsKey('statefulPolicy') ? StatefulPolicy.fromJson(json_['statefulPolicy'] as core.Map) @@ -79092,6 +80018,8 @@ class InstanceGroupManager { ?.map((value) => value as core.String) .toList(), targetSize: json_['targetSize'] as core.int?, + targetStoppedSize: json_['targetStoppedSize'] as core.int?, + targetSuspendedSize: json_['targetSuspendedSize'] as core.int?, updatePolicy: json_.containsKey('updatePolicy') ? InstanceGroupManagerUpdatePolicy.fromJson( json_['updatePolicy'] as core.Map) @@ -79116,6 +80044,8 @@ class InstanceGroupManager { 'distributionPolicy': distributionPolicy!, if (fingerprint != null) 'fingerprint': fingerprint!, if (id != null) 'id': id!, + if (instanceFlexibilityPolicy != null) + 'instanceFlexibilityPolicy': instanceFlexibilityPolicy!, if (instanceGroup != null) 'instanceGroup': instanceGroup!, if (instanceLifecyclePolicy != null) 'instanceLifecyclePolicy': instanceLifecyclePolicy!, @@ -79129,10 +80059,14 @@ class InstanceGroupManager { if (satisfiesPzi != null) 'satisfiesPzi': satisfiesPzi!, if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, if (selfLink != null) 'selfLink': selfLink!, + if (standbyPolicy != null) 'standbyPolicy': standbyPolicy!, if (statefulPolicy != null) 'statefulPolicy': statefulPolicy!, if (status != null) 'status': status!, if (targetPools != null) 'targetPools': targetPools!, if (targetSize != null) 'targetSize': targetSize!, + if (targetStoppedSize != null) 'targetStoppedSize': targetStoppedSize!, + if (targetSuspendedSize != null) + 'targetSuspendedSize': targetSuspendedSize!, if (updatePolicy != null) 'updatePolicy': updatePolicy!, if (versions != null) 'versions': versions!, if (zone != null) 'zone': zone!, @@ -79379,6 +80313,8 @@ class InstanceGroupManagerAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -79574,6 +80510,68 @@ class InstanceGroupManagerAutoHealingPolicy { }; } +class InstanceGroupManagerInstanceFlexibilityPolicy { + /// Named instance selections configuring properties that the group will use + /// when creating new VMs. + core.Map? + instanceSelections; + + InstanceGroupManagerInstanceFlexibilityPolicy({ + this.instanceSelections, + }); + + InstanceGroupManagerInstanceFlexibilityPolicy.fromJson(core.Map json_) + : this( + instanceSelections: (json_['instanceSelections'] + as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + InstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection + .fromJson(value as core.Map), + ), + ), + ); + + core.Map toJson() => { + if (instanceSelections != null) + 'instanceSelections': instanceSelections!, + }; +} + +class InstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection { + /// Full machine-type names, e.g. "n1-standard-16". + core.List? machineTypes; + + /// Preference of this instance selection. + /// + /// Lower number means higher preference. MIG will first try to create a VM + /// based on the machine-type with lowest rank and fallback to next rank based + /// on availability. Machine types and instance selections with the same rank + /// have the same preference. + core.int? rank; + + InstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection({ + this.machineTypes, + this.rank, + }); + + InstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection.fromJson( + core.Map json_) + : this( + machineTypes: (json_['machineTypes'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + rank: json_['rank'] as core.int?, + ); + + core.Map toJson() => { + if (machineTypes != null) 'machineTypes': machineTypes!, + if (rank != null) 'rank': rank!, + }; +} + class InstanceGroupManagerInstanceLifecyclePolicy { /// The action that a MIG performs on a failed or an unhealthy VM. /// @@ -79709,6 +80707,8 @@ class InstanceGroupManagerListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -80382,6 +81382,8 @@ class InstanceGroupManagerResizeRequestsListResponseWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -80509,6 +81511,44 @@ class InstanceGroupManagerResizeRequestsListResponse { }; } +class InstanceGroupManagerStandbyPolicy { + /// Specifies the number of seconds that the MIG should wait to suspend or + /// stop a VM after that VM was created. + /// + /// The initial delay gives the initialization script the time to prepare your + /// VM for a quick scale out. The value of initial delay must be between 0 and + /// 3600 seconds. The default value is 0. + core.int? initialDelaySec; + + /// Defines how a MIG resumes or starts VMs from a standby pool when the group + /// scales out. + /// + /// The default mode is `MANUAL`. + /// Possible string values are: + /// - "MANUAL" : MIG does not automatically resume or start VMs in the standby + /// pool when the group scales out. + /// - "SCALE_OUT_POOL" : MIG automatically resumes or starts VMs in the + /// standby pool when the group scales out, and replenishes the standby pool + /// afterwards. + core.String? mode; + + InstanceGroupManagerStandbyPolicy({ + this.initialDelaySec, + this.mode, + }); + + InstanceGroupManagerStandbyPolicy.fromJson(core.Map json_) + : this( + initialDelaySec: json_['initialDelaySec'] as core.int?, + mode: json_['mode'] as core.String?, + ); + + core.Map toJson() => { + if (initialDelaySec != null) 'initialDelaySec': initialDelaySec!, + if (mode != null) 'mode': mode!, + }; +} + class InstanceGroupManagerStatus { /// Status of all-instances configuration on the group. /// @@ -81120,6 +82160,8 @@ class InstanceGroupManagersListPerInstanceConfigsRespWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -81245,6 +82287,8 @@ class InstanceGroupManagersPatchPerInstanceConfigsReq { } typedef InstanceGroupManagersRecreateInstancesRequest = $Request08; +typedef InstanceGroupManagersResumeInstancesRequest + = $InstanceGroupManagersResumeInstancesRequest; class InstanceGroupManagersScopedListWarningData { /// A key that provides more detail on the warning being returned. @@ -81335,6 +82379,8 @@ class InstanceGroupManagersScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -81492,6 +82538,13 @@ class InstanceGroupManagersSetTargetPoolsRequest { }; } +typedef InstanceGroupManagersStartInstancesRequest + = $InstanceGroupManagersStartInstancesRequest; +typedef InstanceGroupManagersStopInstancesRequest + = $InstanceGroupManagersStopInstancesRequest; +typedef InstanceGroupManagersSuspendInstancesRequest + = $InstanceGroupManagersSuspendInstancesRequest; + /// InstanceGroupManagers.updatePerInstanceConfigs class InstanceGroupManagersUpdatePerInstanceConfigsReq { /// The list of per-instance configurations to insert or patch on this managed @@ -81625,6 +82678,8 @@ class InstanceGroupsListInstancesWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -81882,6 +82937,8 @@ class InstanceGroupsScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -82097,6 +83154,8 @@ class InstanceListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -82308,6 +83367,8 @@ class InstanceListReferrersWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -83302,6 +84363,8 @@ class InstanceTemplateAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -83513,6 +84576,8 @@ class InstanceTemplateListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -83724,6 +84789,8 @@ class InstanceTemplatesScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -84141,6 +85208,8 @@ class InstancesScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -84735,6 +85804,8 @@ class InstantSnapshotAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -84960,6 +86031,8 @@ class InstantSnapshotListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -85188,6 +86261,8 @@ class InstantSnapshotsScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -86318,6 +87393,8 @@ class InterconnectAttachmentAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -86620,6 +87697,8 @@ class InterconnectAttachmentListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -86900,6 +87979,8 @@ class InterconnectAttachmentsScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -87405,6 +88486,8 @@ class InterconnectListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -87549,7 +88632,7 @@ class InterconnectLocation { core.String? availabilityZone; /// List of features available at this InterconnectLocation, which can take - /// one of the following values: - MACSEC + /// one of the following values: - IF_MACSEC /// /// Output only. core.List? availableFeatures; @@ -87827,6 +88910,8 @@ class InterconnectLocationListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -88685,6 +89770,8 @@ class InterconnectRemoteLocationListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -88924,6 +90011,8 @@ class License { /// /// The name must be 1-63 characters long and comply with RFC1035. core.String? name; + + /// \[Input Only\] Deprecated. LicenseResourceRequirements? resourceRequirements; /// Server-defined URL for the resource. @@ -89151,14 +90240,16 @@ class LicenseResourceCommitment { } class LicenseResourceRequirements { - /// Minimum number of guest cpus required to use the Instance. + /// \[Input Only\] Deprecated. /// - /// Enforced at Instance creation and Instance start. + /// This field no longer reflects the minimum number of guest cpus required to + /// use the Instance. core.int? minGuestCpuCount; - /// Minimum memory required to use the Instance. + /// \[Input Only\] Deprecated. /// - /// Enforced at Instance creation and Instance start. + /// This field no longer reflects the minimum memory required to use the + /// Instance. core.int? minMemoryMb; LicenseResourceRequirements({ @@ -89266,6 +90357,8 @@ class LicensesListResponseWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -89521,182 +90614,6 @@ class LocationPolicyLocationConstraints { }; } -/// This is deprecated and has no effect. -/// -/// Do not use. -class LogConfig { - /// This is deprecated and has no effect. - /// - /// Do not use. - LogConfigCloudAuditOptions? cloudAudit; - - /// This is deprecated and has no effect. - /// - /// Do not use. - LogConfigCounterOptions? counter; - - /// This is deprecated and has no effect. - /// - /// Do not use. - LogConfigDataAccessOptions? dataAccess; - - LogConfig({ - this.cloudAudit, - this.counter, - this.dataAccess, - }); - - LogConfig.fromJson(core.Map json_) - : this( - cloudAudit: json_.containsKey('cloudAudit') - ? LogConfigCloudAuditOptions.fromJson( - json_['cloudAudit'] as core.Map) - : null, - counter: json_.containsKey('counter') - ? LogConfigCounterOptions.fromJson( - json_['counter'] as core.Map) - : null, - dataAccess: json_.containsKey('dataAccess') - ? LogConfigDataAccessOptions.fromJson( - json_['dataAccess'] as core.Map) - : null, - ); - - core.Map toJson() => { - if (cloudAudit != null) 'cloudAudit': cloudAudit!, - if (counter != null) 'counter': counter!, - if (dataAccess != null) 'dataAccess': dataAccess!, - }; -} - -/// This is deprecated and has no effect. -/// -/// Do not use. -class LogConfigCloudAuditOptions { - /// This is deprecated and has no effect. - /// - /// Do not use. - /// Possible string values are: - /// - "ADMIN_ACTIVITY" : This is deprecated and has no effect. Do not use. - /// - "DATA_ACCESS" : This is deprecated and has no effect. Do not use. - /// - "UNSPECIFIED_LOG_NAME" : This is deprecated and has no effect. Do not - /// use. - core.String? logName; - - LogConfigCloudAuditOptions({ - this.logName, - }); - - LogConfigCloudAuditOptions.fromJson(core.Map json_) - : this( - logName: json_['logName'] as core.String?, - ); - - core.Map toJson() => { - if (logName != null) 'logName': logName!, - }; -} - -/// This is deprecated and has no effect. -/// -/// Do not use. -class LogConfigCounterOptions { - /// This is deprecated and has no effect. - /// - /// Do not use. - core.List? customFields; - - /// This is deprecated and has no effect. - /// - /// Do not use. - core.String? field; - - /// This is deprecated and has no effect. - /// - /// Do not use. - core.String? metric; - - LogConfigCounterOptions({ - this.customFields, - this.field, - this.metric, - }); - - LogConfigCounterOptions.fromJson(core.Map json_) - : this( - customFields: (json_['customFields'] as core.List?) - ?.map((value) => LogConfigCounterOptionsCustomField.fromJson( - value as core.Map)) - .toList(), - field: json_['field'] as core.String?, - metric: json_['metric'] as core.String?, - ); - - core.Map toJson() => { - if (customFields != null) 'customFields': customFields!, - if (field != null) 'field': field!, - if (metric != null) 'metric': metric!, - }; -} - -/// This is deprecated and has no effect. -/// -/// Do not use. -class LogConfigCounterOptionsCustomField { - /// This is deprecated and has no effect. - /// - /// Do not use. - core.String? name; - - /// This is deprecated and has no effect. - /// - /// Do not use. - core.String? value; - - LogConfigCounterOptionsCustomField({ - this.name, - this.value, - }); - - LogConfigCounterOptionsCustomField.fromJson(core.Map json_) - : this( - name: json_['name'] as core.String?, - value: json_['value'] as core.String?, - ); - - core.Map toJson() => { - if (name != null) 'name': name!, - if (value != null) 'value': value!, - }; -} - -/// This is deprecated and has no effect. -/// -/// Do not use. -class LogConfigDataAccessOptions { - /// This is deprecated and has no effect. - /// - /// Do not use. - /// Possible string values are: - /// - "LOG_FAIL_CLOSED" : This is deprecated and has no effect. Do not use. - /// - "LOG_MODE_UNSPECIFIED" : This is deprecated and has no effect. Do not - /// use. - core.String? logMode; - - LogConfigDataAccessOptions({ - this.logMode, - }); - - LogConfigDataAccessOptions.fromJson(core.Map json_) - : this( - logMode: json_['logMode'] as core.String?, - ); - - core.Map toJson() => { - if (logMode != null) 'logMode': logMode!, - }; -} - /// Represents a machine image resource. /// /// A machine image is a Compute Engine resource that stores all the @@ -90003,6 +90920,8 @@ class MachineImageListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -90437,6 +91356,8 @@ class MachineTypeAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -90661,6 +91582,8 @@ class MachineTypeListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -90872,6 +91795,8 @@ class MachineTypesScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -91081,6 +92006,13 @@ class ManagedInstance { /// Output only. PreservedState? preservedStateFromPolicy; + /// Instance properties selected for this instance resulting from + /// InstanceFlexibilityPolicy. + /// + /// Output only. + ManagedInstancePropertiesFromFlexibilityPolicy? + propertiesFromFlexibilityPolicy; + /// Intended version of this instance. /// /// Output only. @@ -91096,6 +92028,7 @@ class ManagedInstance { this.name, this.preservedStateFromConfig, this.preservedStateFromPolicy, + this.propertiesFromFlexibilityPolicy, this.version, }); @@ -91124,6 +92057,12 @@ class ManagedInstance { ? PreservedState.fromJson(json_['preservedStateFromPolicy'] as core.Map) : null, + propertiesFromFlexibilityPolicy: + json_.containsKey('propertiesFromFlexibilityPolicy') + ? ManagedInstancePropertiesFromFlexibilityPolicy.fromJson( + json_['propertiesFromFlexibilityPolicy'] + as core.Map) + : null, version: json_.containsKey('version') ? ManagedInstanceVersion.fromJson( json_['version'] as core.Map) @@ -91142,6 +92081,8 @@ class ManagedInstance { 'preservedStateFromConfig': preservedStateFromConfig!, if (preservedStateFromPolicy != null) 'preservedStateFromPolicy': preservedStateFromPolicy!, + if (propertiesFromFlexibilityPolicy != null) + 'propertiesFromFlexibilityPolicy': propertiesFromFlexibilityPolicy!, if (version != null) 'version': version!, }; } @@ -91334,6 +92275,24 @@ class ManagedInstanceLastAttempt { }; } +class ManagedInstancePropertiesFromFlexibilityPolicy { + /// The machine type to be used for this instance. + core.String? machineType; + + ManagedInstancePropertiesFromFlexibilityPolicy({ + this.machineType, + }); + + ManagedInstancePropertiesFromFlexibilityPolicy.fromJson(core.Map json_) + : this( + machineType: json_['machineType'] as core.String?, + ); + + core.Map toJson() => { + if (machineType != null) 'machineType': machineType!, + }; +} + class ManagedInstanceVersion { /// The intended template of the instance. /// @@ -91763,6 +92722,14 @@ class Network { /// - "BEFORE_CLASSIC_FIREWALL" core.String? networkFirewallPolicyEnforcementOrder; + /// A full or partial URL of the network profile to apply to this network. + /// + /// This field can be set only at resource creation time. For example, the + /// following are valid URLs: - + /// https://www.googleapis.com/compute/{api_version}/projects/{project_id}/global/networkProfiles/{network_profile_name} + /// - projects/{project_id}/global/networkProfiles/{network_profile_name} + core.String? networkProfile; + /// A list of network peerings for the resource. /// /// Output only. @@ -91804,6 +92771,7 @@ class Network { this.mtu, this.name, this.networkFirewallPolicyEnforcementOrder, + this.networkProfile, this.peerings, this.routingConfig, this.selfLink, @@ -91827,6 +92795,7 @@ class Network { name: json_['name'] as core.String?, networkFirewallPolicyEnforcementOrder: json_['networkFirewallPolicyEnforcementOrder'] as core.String?, + networkProfile: json_['networkProfile'] as core.String?, peerings: (json_['peerings'] as core.List?) ?.map((value) => NetworkPeering.fromJson( value as core.Map)) @@ -91860,6 +92829,7 @@ class Network { if (networkFirewallPolicyEnforcementOrder != null) 'networkFirewallPolicyEnforcementOrder': networkFirewallPolicyEnforcementOrder!, + if (networkProfile != null) 'networkProfile': networkProfile!, if (peerings != null) 'peerings': peerings!, if (routingConfig != null) 'routingConfig': routingConfig!, if (selfLink != null) 'selfLink': selfLink!, @@ -92130,6 +93100,8 @@ class NetworkAttachmentAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -92420,6 +93392,8 @@ class NetworkAttachmentListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -92623,6 +93597,8 @@ class NetworkAttachmentsScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -92910,6 +93886,8 @@ class NetworkEdgeSecurityServiceAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -93139,6 +94117,8 @@ class NetworkEdgeSecurityServicesScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -93616,6 +94596,8 @@ class NetworkEndpointGroupAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -93983,6 +94965,8 @@ class NetworkEndpointGroupListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -94321,6 +95305,8 @@ class NetworkEndpointGroupsListNetworkEndpointsWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -94525,6 +95511,8 @@ class NetworkEndpointGroupsScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -94754,6 +95742,8 @@ class NetworkInterface { /// Possible string values are: /// - "GVNIC" : GVNIC /// - "IDPF" : IDPF + /// - "IRDMA" : IRDMA + /// - "MRDMA" : MRDMA /// - "UNSPECIFIED_NIC_TYPE" : No type specified. /// - "VIRTIO_NET" : VIRTIO core.String? nicType; @@ -94949,6 +95939,748 @@ class NetworkListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). + /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that + /// requires a TOS they have not accepted. + /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is + /// in use. + /// - "RESOURCE_NOT_DELETED" : One or more of the resources set to auto-delete + /// could not be deleted because they were in use. + /// - "SCHEMA_VALIDATION_IGNORED" : When a resource schema validation is + /// ignored. + /// - "SINGLE_INSTANCE_PROPERTY_TEMPLATE" : Instance template used in instance + /// group manager is valid as such, but its application does not make a lot of + /// sense, because it allows only single instance in instance group. + /// - "UNDECLARED_PROPERTIES" : When undeclared properties in the schema are + /// present + /// - "UNREACHABLE" : A given scope cannot be reached. + core.String? code; + + /// Metadata about this warning in key: value format. + /// + /// For example: "data": \[ { "key": "scope", "value": "zones/us-east1-d" } + /// + /// Output only. + core.List? data; + + /// A human-readable description of the warning code. + /// + /// Output only. + core.String? message; + + NetworkListWarning({ + this.code, + this.data, + this.message, + }); + + NetworkListWarning.fromJson(core.Map json_) + : this( + code: json_['code'] as core.String?, + data: (json_['data'] as core.List?) + ?.map((value) => NetworkListWarningData.fromJson( + value as core.Map)) + .toList(), + message: json_['message'] as core.String?, + ); + + core.Map toJson() => { + if (code != null) 'code': code!, + if (data != null) 'data': data!, + if (message != null) 'message': message!, + }; +} + +/// Contains a list of networks. +class NetworkList { + /// Unique identifier for the resource; defined by the server. + /// + /// Output only. + core.String? id; + + /// A list of Network resources. + core.List? items; + + /// Type of resource. + /// + /// Always compute#networkList for lists of networks. + /// + /// Output only. + core.String? kind; + + /// This token allows you to get the next page of results for list requests. + /// + /// If the number of results is larger than maxResults, use the nextPageToken + /// as a value for the query parameter pageToken in the next list request. + /// Subsequent list requests will have their own nextPageToken to continue + /// paging through the results. + /// + /// Output only. + core.String? nextPageToken; + + /// Server-defined URL for this resource. + /// + /// Output only. + core.String? selfLink; + + /// Informational warning message. + /// + /// Output only. + NetworkListWarning? warning; + + NetworkList({ + this.id, + this.items, + this.kind, + this.nextPageToken, + this.selfLink, + this.warning, + }); + + NetworkList.fromJson(core.Map json_) + : this( + id: json_['id'] as core.String?, + items: (json_['items'] as core.List?) + ?.map((value) => Network.fromJson( + value as core.Map)) + .toList(), + kind: json_['kind'] as core.String?, + nextPageToken: json_['nextPageToken'] as core.String?, + selfLink: json_['selfLink'] as core.String?, + warning: json_.containsKey('warning') + ? NetworkListWarning.fromJson( + json_['warning'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (id != null) 'id': id!, + if (items != null) 'items': items!, + if (kind != null) 'kind': kind!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (selfLink != null) 'selfLink': selfLink!, + if (warning != null) 'warning': warning!, + }; +} + +/// A network peering attached to a network resource. +/// +/// The message includes the peering name, peer network, peering state, and a +/// flag indicating whether Google Compute Engine should automatically create +/// routes for the peering. +class NetworkPeering { + /// This field will be deprecated soon. + /// + /// Use the exchange_subnet_routes field instead. Indicates whether full mesh + /// connectivity is created and managed automatically between peered networks. + /// Currently this field should always be true since Google Compute Engine + /// will automatically create and manage subnetwork routes between two + /// networks when peering state is ACTIVE. + core.bool? autoCreateRoutes; + + /// Indicates whether full mesh connectivity is created and managed + /// automatically between peered networks. + /// + /// Currently this field should always be true since Google Compute Engine + /// will automatically create and manage subnetwork routes between two + /// networks when peering state is ACTIVE. + core.bool? exchangeSubnetRoutes; + + /// Whether to export the custom routes to peer network. + /// + /// The default value is false. + core.bool? exportCustomRoutes; + + /// Whether subnet routes with public IP range are exported. + /// + /// The default value is true, all subnet routes are exported. IPv4 + /// special-use ranges are always exported to peers and are not controlled by + /// this field. + core.bool? exportSubnetRoutesWithPublicIp; + + /// Whether to import the custom routes from peer network. + /// + /// The default value is false. + core.bool? importCustomRoutes; + + /// Whether subnet routes with public IP range are imported. + /// + /// The default value is false. IPv4 special-use ranges are always imported + /// from peers and are not controlled by this field. + core.bool? importSubnetRoutesWithPublicIp; + + /// Name of this peering. + /// + /// Provided by the client when the peering is created. The name must comply + /// with RFC1035. Specifically, the name must be 1-63 characters long and + /// match regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`. The first character + /// must be a lowercase letter, and all the following characters must be a + /// dash, lowercase letter, or digit, except the last character, which cannot + /// be a dash. + core.String? name; + + /// The URL of the peer network. + /// + /// It can be either full URL or partial URL. The peer network may belong to a + /// different project. If the partial URL does not contain project, it is + /// assumed that the peer network is in the same project as the current + /// network. + core.String? network; + + /// Maximum Transmission Unit in bytes. + core.int? peerMtu; + + /// Which IP version(s) of traffic and routes are allowed to be imported or + /// exported between peer networks. + /// + /// The default value is IPV4_ONLY. + /// Possible string values are: + /// - "IPV4_IPV6" : This Peering will allow IPv4 traffic and routes to be + /// exchanged. Additionally if the matching peering is IPV4_IPV6, IPv6 traffic + /// and routes will be exchanged as well. + /// - "IPV4_ONLY" : This Peering will only allow IPv4 traffic and routes to be + /// exchanged, even if the matching peering is IPV4_IPV6. + core.String? stackType; + + /// State for the peering, either `ACTIVE` or `INACTIVE`. + /// + /// The peering is `ACTIVE` when there's a matching configuration in the peer + /// network. + /// + /// Output only. + /// Possible string values are: + /// - "ACTIVE" : Matching configuration exists on the peer. + /// - "INACTIVE" : There is no matching configuration on the peer, including + /// the case when peer does not exist. + core.String? state; + + /// Details about the current state of the peering. + /// + /// Output only. + core.String? stateDetails; + + NetworkPeering({ + this.autoCreateRoutes, + this.exchangeSubnetRoutes, + this.exportCustomRoutes, + this.exportSubnetRoutesWithPublicIp, + this.importCustomRoutes, + this.importSubnetRoutesWithPublicIp, + this.name, + this.network, + this.peerMtu, + this.stackType, + this.state, + this.stateDetails, + }); + + NetworkPeering.fromJson(core.Map json_) + : this( + autoCreateRoutes: json_['autoCreateRoutes'] as core.bool?, + exchangeSubnetRoutes: json_['exchangeSubnetRoutes'] as core.bool?, + exportCustomRoutes: json_['exportCustomRoutes'] as core.bool?, + exportSubnetRoutesWithPublicIp: + json_['exportSubnetRoutesWithPublicIp'] as core.bool?, + importCustomRoutes: json_['importCustomRoutes'] as core.bool?, + importSubnetRoutesWithPublicIp: + json_['importSubnetRoutesWithPublicIp'] as core.bool?, + name: json_['name'] as core.String?, + network: json_['network'] as core.String?, + peerMtu: json_['peerMtu'] as core.int?, + stackType: json_['stackType'] as core.String?, + state: json_['state'] as core.String?, + stateDetails: json_['stateDetails'] as core.String?, + ); + + core.Map toJson() => { + if (autoCreateRoutes != null) 'autoCreateRoutes': autoCreateRoutes!, + if (exchangeSubnetRoutes != null) + 'exchangeSubnetRoutes': exchangeSubnetRoutes!, + if (exportCustomRoutes != null) + 'exportCustomRoutes': exportCustomRoutes!, + if (exportSubnetRoutesWithPublicIp != null) + 'exportSubnetRoutesWithPublicIp': exportSubnetRoutesWithPublicIp!, + if (importCustomRoutes != null) + 'importCustomRoutes': importCustomRoutes!, + if (importSubnetRoutesWithPublicIp != null) + 'importSubnetRoutesWithPublicIp': importSubnetRoutesWithPublicIp!, + if (name != null) 'name': name!, + if (network != null) 'network': network!, + if (peerMtu != null) 'peerMtu': peerMtu!, + if (stackType != null) 'stackType': stackType!, + if (state != null) 'state': state!, + if (stateDetails != null) 'stateDetails': stateDetails!, + }; +} + +class NetworkPerformanceConfig { + /// + /// Possible string values are: + /// - "DEFAULT" + /// - "TIER_1" + core.String? totalEgressBandwidthTier; + + NetworkPerformanceConfig({ + this.totalEgressBandwidthTier, + }); + + NetworkPerformanceConfig.fromJson(core.Map json_) + : this( + totalEgressBandwidthTier: + json_['totalEgressBandwidthTier'] as core.String?, + ); + + core.Map toJson() => { + if (totalEgressBandwidthTier != null) + 'totalEgressBandwidthTier': totalEgressBandwidthTier!, + }; +} + +/// NetworkProfile represents a Google managed network profile resource. +class NetworkProfile { + /// Creation timestamp in RFC3339 text format. + /// + /// Output only. + core.String? creationTimestamp; + + /// An optional description of this resource. + /// + /// Output only. + core.String? description; + + /// Features supported by the network. + /// + /// Output only. + NetworkProfileNetworkFeatures? features; + + /// The unique identifier for the resource. + /// + /// This identifier is defined by the server. + /// + /// Output only. + core.String? id; + + /// Type of the resource. + /// + /// Always compute#networkProfile for network profiles. + /// + /// Output only. + core.String? kind; + + /// Location to which the network is restricted. + /// + /// Output only. + NetworkProfileLocation? location; + + /// Name of the resource. + /// + /// Output only. + core.String? name; + + /// Server-defined URL for the resource. + /// + /// Output only. + core.String? selfLink; + + /// Server-defined URL for this resource with the resource id. + /// + /// Output only. + core.String? selfLinkWithId; + + /// Zone to which the network is restricted. + /// + /// Output only. + core.String? zone; + + NetworkProfile({ + this.creationTimestamp, + this.description, + this.features, + this.id, + this.kind, + this.location, + this.name, + this.selfLink, + this.selfLinkWithId, + this.zone, + }); + + NetworkProfile.fromJson(core.Map json_) + : this( + creationTimestamp: json_['creationTimestamp'] as core.String?, + description: json_['description'] as core.String?, + features: json_.containsKey('features') + ? NetworkProfileNetworkFeatures.fromJson( + json_['features'] as core.Map) + : null, + id: json_['id'] as core.String?, + kind: json_['kind'] as core.String?, + location: json_.containsKey('location') + ? NetworkProfileLocation.fromJson( + json_['location'] as core.Map) + : null, + name: json_['name'] as core.String?, + selfLink: json_['selfLink'] as core.String?, + selfLinkWithId: json_['selfLinkWithId'] as core.String?, + zone: json_['zone'] as core.String?, + ); + + core.Map toJson() => { + if (creationTimestamp != null) 'creationTimestamp': creationTimestamp!, + if (description != null) 'description': description!, + if (features != null) 'features': features!, + if (id != null) 'id': id!, + if (kind != null) 'kind': kind!, + if (location != null) 'location': location!, + if (name != null) 'name': name!, + if (selfLink != null) 'selfLink': selfLink!, + if (selfLinkWithId != null) 'selfLinkWithId': selfLinkWithId!, + if (zone != null) 'zone': zone!, + }; +} + +class NetworkProfileLocation { + core.String? name; + + /// + /// Possible string values are: + /// - "REGION" + /// - "ZONE" + core.String? scope; + + NetworkProfileLocation({ + this.name, + this.scope, + }); + + NetworkProfileLocation.fromJson(core.Map json_) + : this( + name: json_['name'] as core.String?, + scope: json_['scope'] as core.String?, + ); + + core.Map toJson() => { + if (name != null) 'name': name!, + if (scope != null) 'scope': scope!, + }; +} + +class NetworkProfileNetworkFeatures { + /// Specifies what address purposes are supported. + /// + /// If empty, all address purposes are supported. + core.List? addressPurposes; + + /// Specifies whether alias IP ranges (and secondary address ranges) are + /// allowed. + /// Possible string values are: + /// - "ALIAS_IP_RANGES_ALLOWED" + /// - "ALIAS_IP_RANGES_BLOCKED" + core.String? allowAliasIpRanges; + + /// Specifies whether auto mode subnet creation is allowed. + /// Possible string values are: + /// - "AUTO_MODE_SUBNET_ALLOWED" + /// - "AUTO_MODE_SUBNET_BLOCKED" + core.String? allowAutoModeSubnet; + + /// Specifies whether firewalls for Class D address ranges are supported. + /// Possible string values are: + /// - "CLASS_D_FIREWALLS_ALLOWED" + /// - "CLASS_D_FIREWALLS_BLOCKED" + core.String? allowClassDFirewalls; + + /// Specifies whether cloud NAT creation is allowed. + /// Possible string values are: + /// - "CLOUD_NAT_ALLOWED" + /// - "CLOUD_NAT_BLOCKED" + core.String? allowCloudNat; + + /// Specifies whether cloud router creation is allowed. + /// Possible string values are: + /// - "CLOUD_ROUTER_ALLOWED" + /// - "CLOUD_ROUTER_BLOCKED" + core.String? allowCloudRouter; + + /// Specifies whether VMs are allowed to have external IP access on network + /// interfaces connected to this VPC. + /// Possible string values are: + /// - "EXTERNAL_IP_ACCESS_ALLOWED" + /// - "EXTERNAL_IP_ACCESS_BLOCKED" + core.String? allowExternalIpAccess; + + /// Specifies whether Cloud Interconnect creation is allowed. + /// Possible string values are: + /// - "INTERCONNECT_ALLOWED" + /// - "INTERCONNECT_BLOCKED" + core.String? allowInterconnect; + + /// Specifies whether cloud load balancing is allowed. + /// Possible string values are: + /// - "LOAD_BALANCING_ALLOWED" + /// - "LOAD_BALANCING_BLOCKED" + core.String? allowLoadBalancing; + + /// Specifies whether multi-nic in the same network is allowed. + /// Possible string values are: + /// - "MULTI_NIC_IN_SAME_NETWORK_ALLOWED" + /// - "MULTI_NIC_IN_SAME_NETWORK_BLOCKED" + core.String? allowMultiNicInSameNetwork; + + /// Specifies whether Packet Mirroring 1.0 is supported. + /// Possible string values are: + /// - "PACKET_MIRRORING_ALLOWED" + /// - "PACKET_MIRRORING_BLOCKED" + core.String? allowPacketMirroring; + + /// Specifies whether private Google access is allowed. + /// Possible string values are: + /// - "PRIVATE_GOOGLE_ACCESS_ALLOWED" + /// - "PRIVATE_GOOGLE_ACCESS_BLOCKED" + core.String? allowPrivateGoogleAccess; + + /// Specifies whether PSC creation is allowed. + /// Possible string values are: + /// - "PSC_ALLOWED" + /// - "PSC_BLOCKED" + core.String? allowPsc; + + /// Specifies whether unicast within the same network is allowed. + /// Possible string values are: + /// - "SAME_NETWORK_UNICAST_ALLOWED" + /// - "SAME_NETWORK_UNICAST_BLOCKED" + core.String? allowSameNetworkUnicast; + + /// Specifies whether static route creation is allowed. + /// Possible string values are: + /// - "STATIC_ROUTES_ALLOWED" + /// - "STATIC_ROUTES_BLOCKED" + core.String? allowStaticRoutes; + + /// Specifies whether sub interfaces are allowed. + /// Possible string values are: + /// - "SUBINTERFACES_ALLOWED" + /// - "SUBINTERFACES_BLOCKED" + core.String? allowSubInterfaces; + + /// Specifies whether VPC peering is allowed. + /// Possible string values are: + /// - "VPC_PEERING_ALLOWED" + /// - "VPC_PEERING_BLOCKED" + core.String? allowVpcPeering; + + /// Specifies whether VPN creation is allowed. + /// Possible string values are: + /// - "VPN_ALLOWED" + /// - "VPN_BLOCKED" + core.String? allowVpn; + + /// If set, limits the interface types that the network supports. + /// + /// If empty, all interface types are supported. + core.List? interfaceTypes; + + /// Specifies which subnetwork purposes are supported. + core.List? subnetPurposes; + + /// Specifies which subnetwork stack types are supported. + core.List? subnetStackTypes; + + /// Specifies which type of unicast is supported. + /// Possible string values are: + /// - "UNICAST_SDN" + /// - "UNICAST_ULL" + core.String? unicast; + + NetworkProfileNetworkFeatures({ + this.addressPurposes, + this.allowAliasIpRanges, + this.allowAutoModeSubnet, + this.allowClassDFirewalls, + this.allowCloudNat, + this.allowCloudRouter, + this.allowExternalIpAccess, + this.allowInterconnect, + this.allowLoadBalancing, + this.allowMultiNicInSameNetwork, + this.allowPacketMirroring, + this.allowPrivateGoogleAccess, + this.allowPsc, + this.allowSameNetworkUnicast, + this.allowStaticRoutes, + this.allowSubInterfaces, + this.allowVpcPeering, + this.allowVpn, + this.interfaceTypes, + this.subnetPurposes, + this.subnetStackTypes, + this.unicast, + }); + + NetworkProfileNetworkFeatures.fromJson(core.Map json_) + : this( + addressPurposes: (json_['addressPurposes'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + allowAliasIpRanges: json_['allowAliasIpRanges'] as core.String?, + allowAutoModeSubnet: json_['allowAutoModeSubnet'] as core.String?, + allowClassDFirewalls: json_['allowClassDFirewalls'] as core.String?, + allowCloudNat: json_['allowCloudNat'] as core.String?, + allowCloudRouter: json_['allowCloudRouter'] as core.String?, + allowExternalIpAccess: json_['allowExternalIpAccess'] as core.String?, + allowInterconnect: json_['allowInterconnect'] as core.String?, + allowLoadBalancing: json_['allowLoadBalancing'] as core.String?, + allowMultiNicInSameNetwork: + json_['allowMultiNicInSameNetwork'] as core.String?, + allowPacketMirroring: json_['allowPacketMirroring'] as core.String?, + allowPrivateGoogleAccess: + json_['allowPrivateGoogleAccess'] as core.String?, + allowPsc: json_['allowPsc'] as core.String?, + allowSameNetworkUnicast: + json_['allowSameNetworkUnicast'] as core.String?, + allowStaticRoutes: json_['allowStaticRoutes'] as core.String?, + allowSubInterfaces: json_['allowSubInterfaces'] as core.String?, + allowVpcPeering: json_['allowVpcPeering'] as core.String?, + allowVpn: json_['allowVpn'] as core.String?, + interfaceTypes: (json_['interfaceTypes'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + subnetPurposes: (json_['subnetPurposes'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + subnetStackTypes: (json_['subnetStackTypes'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + unicast: json_['unicast'] as core.String?, + ); + + core.Map toJson() => { + if (addressPurposes != null) 'addressPurposes': addressPurposes!, + if (allowAliasIpRanges != null) + 'allowAliasIpRanges': allowAliasIpRanges!, + if (allowAutoModeSubnet != null) + 'allowAutoModeSubnet': allowAutoModeSubnet!, + if (allowClassDFirewalls != null) + 'allowClassDFirewalls': allowClassDFirewalls!, + if (allowCloudNat != null) 'allowCloudNat': allowCloudNat!, + if (allowCloudRouter != null) 'allowCloudRouter': allowCloudRouter!, + if (allowExternalIpAccess != null) + 'allowExternalIpAccess': allowExternalIpAccess!, + if (allowInterconnect != null) 'allowInterconnect': allowInterconnect!, + if (allowLoadBalancing != null) + 'allowLoadBalancing': allowLoadBalancing!, + if (allowMultiNicInSameNetwork != null) + 'allowMultiNicInSameNetwork': allowMultiNicInSameNetwork!, + if (allowPacketMirroring != null) + 'allowPacketMirroring': allowPacketMirroring!, + if (allowPrivateGoogleAccess != null) + 'allowPrivateGoogleAccess': allowPrivateGoogleAccess!, + if (allowPsc != null) 'allowPsc': allowPsc!, + if (allowSameNetworkUnicast != null) + 'allowSameNetworkUnicast': allowSameNetworkUnicast!, + if (allowStaticRoutes != null) 'allowStaticRoutes': allowStaticRoutes!, + if (allowSubInterfaces != null) + 'allowSubInterfaces': allowSubInterfaces!, + if (allowVpcPeering != null) 'allowVpcPeering': allowVpcPeering!, + if (allowVpn != null) 'allowVpn': allowVpn!, + if (interfaceTypes != null) 'interfaceTypes': interfaceTypes!, + if (subnetPurposes != null) 'subnetPurposes': subnetPurposes!, + if (subnetStackTypes != null) 'subnetStackTypes': subnetStackTypes!, + if (unicast != null) 'unicast': unicast!, + }; +} + +class NetworkProfilesListResponseWarningData { + /// A key that provides more detail on the warning being returned. + /// + /// For example, for warnings where there are no results in a list request for + /// a particular zone, this key might be scope and the key value might be the + /// zone name. Other examples might be a key indicating a deprecated resource + /// and a suggested replacement, or a warning about invalid network settings + /// (for example, if an instance attempts to perform IP forwarding but is not + /// enabled for IP forwarding). + /// + /// Output only. + core.String? key; + + /// A warning data value corresponding to the key. + /// + /// Output only. + core.String? value; + + NetworkProfilesListResponseWarningData({ + this.key, + this.value, + }); + + NetworkProfilesListResponseWarningData.fromJson(core.Map json_) + : this( + key: json_['key'] as core.String?, + value: json_['value'] as core.String?, + ); + + core.Map toJson() => { + if (key != null) 'key': key!, + if (value != null) 'value': value!, + }; +} + +/// Informational warning message. +/// +/// Output only. +class NetworkProfilesListResponseWarning { + /// A warning code, if applicable. + /// + /// For example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no + /// results in the response. + /// + /// Output only. + /// Possible string values are: + /// - "CLEANUP_FAILED" : Warning about failed cleanup of transient changes + /// made by a failed operation. + /// - "DEPRECATED_RESOURCE_USED" : A link to a deprecated resource was + /// created. + /// - "DEPRECATED_TYPE_USED" : When deploying and at least one of the + /// resources has a type marked as deprecated + /// - "DISK_SIZE_LARGER_THAN_IMAGE_SIZE" : The user created a boot disk that + /// is larger than image size. + /// - "EXPERIMENTAL_TYPE_USED" : When deploying and at least one of the + /// resources has a type marked as experimental + /// - "EXTERNAL_API_WARNING" : Warning that is present in an external api call + /// - "FIELD_VALUE_OVERRIDEN" : Warning that value of a field has been + /// overridden. Deprecated unused field. + /// - "INJECTED_KERNELS_DEPRECATED" : The operation involved use of an + /// injected kernel, which is deprecated. + /// - "INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB" : A WEIGHTED_MAGLEV + /// backend service is associated with a health check that is not of type + /// HTTP/HTTPS/HTTP2. + /// - "LARGE_DEPLOYMENT_WARNING" : When deploying a deployment with a + /// exceedingly large number of resources + /// - "LIST_OVERHEAD_QUOTA_EXCEED" : Resource can't be retrieved due to list + /// overhead quota exceed which captures the amount of resources filtered out + /// by user-defined list filter. + /// - "MISSING_TYPE_DEPENDENCY" : A resource depends on a missing type + /// - "NEXT_HOP_ADDRESS_NOT_ASSIGNED" : The route's nextHopIp address is not + /// assigned to an instance on the network. + /// - "NEXT_HOP_CANNOT_IP_FORWARD" : The route's next hop instance cannot ip + /// forward. + /// - "NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE" : The route's nextHopInstance + /// URL refers to an instance that does not have an ipv6 interface on the same + /// network as the route. + /// - "NEXT_HOP_INSTANCE_NOT_FOUND" : The route's nextHopInstance URL refers + /// to an instance that does not exist. + /// - "NEXT_HOP_INSTANCE_NOT_ON_NETWORK" : The route's nextHopInstance URL + /// refers to an instance that is not on the same network as the route. + /// - "NEXT_HOP_NOT_RUNNING" : The route's next hop instance does not have a + /// status of RUNNING. + /// - "NOT_CRITICAL_ERROR" : Error which is not critical. We decided to + /// continue the process despite the mentioned error. + /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. + /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing + /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -94970,24 +96702,24 @@ class NetworkListWarning { /// For example: "data": \[ { "key": "scope", "value": "zones/us-east1-d" } /// /// Output only. - core.List? data; + core.List? data; /// A human-readable description of the warning code. /// /// Output only. core.String? message; - NetworkListWarning({ + NetworkProfilesListResponseWarning({ this.code, this.data, this.message, }); - NetworkListWarning.fromJson(core.Map json_) + NetworkProfilesListResponseWarning.fromJson(core.Map json_) : this( code: json_['code'] as core.String?, data: (json_['data'] as core.List?) - ?.map((value) => NetworkListWarningData.fromJson( + ?.map((value) => NetworkProfilesListResponseWarningData.fromJson( value as core.Map)) .toList(), message: json_['message'] as core.String?, @@ -95000,19 +96732,21 @@ class NetworkListWarning { }; } -/// Contains a list of networks. -class NetworkList { +/// Contains a list of network profiles. +class NetworkProfilesListResponse { + core.String? etag; + /// Unique identifier for the resource; defined by the server. /// /// Output only. core.String? id; - /// A list of Network resources. - core.List? items; + /// A list of NetworkProfile resources. + core.List? items; /// Type of resource. /// - /// Always compute#networkList for lists of networks. + /// Always compute#networkProfileList for network profiles. /// /// Output only. core.String? kind; @@ -95032,224 +96766,90 @@ class NetworkList { /// Output only. core.String? selfLink; + /// Unreachable resources. + /// + /// end_interface: MixerListResponseWithEtagBuilder + /// + /// Output only. + core.List? unreachables; + /// Informational warning message. /// /// Output only. - NetworkListWarning? warning; + NetworkProfilesListResponseWarning? warning; - NetworkList({ + NetworkProfilesListResponse({ + this.etag, this.id, this.items, this.kind, this.nextPageToken, this.selfLink, + this.unreachables, this.warning, }); - NetworkList.fromJson(core.Map json_) + NetworkProfilesListResponse.fromJson(core.Map json_) : this( + etag: json_['etag'] as core.String?, id: json_['id'] as core.String?, items: (json_['items'] as core.List?) - ?.map((value) => Network.fromJson( + ?.map((value) => NetworkProfile.fromJson( value as core.Map)) .toList(), kind: json_['kind'] as core.String?, nextPageToken: json_['nextPageToken'] as core.String?, selfLink: json_['selfLink'] as core.String?, + unreachables: (json_['unreachables'] as core.List?) + ?.map((value) => value as core.String) + .toList(), warning: json_.containsKey('warning') - ? NetworkListWarning.fromJson( + ? NetworkProfilesListResponseWarning.fromJson( json_['warning'] as core.Map) : null, ); core.Map toJson() => { + if (etag != null) 'etag': etag!, if (id != null) 'id': id!, if (items != null) 'items': items!, if (kind != null) 'kind': kind!, if (nextPageToken != null) 'nextPageToken': nextPageToken!, if (selfLink != null) 'selfLink': selfLink!, + if (unreachables != null) 'unreachables': unreachables!, if (warning != null) 'warning': warning!, }; } -/// A network peering attached to a network resource. +/// A routing configuration attached to a network resource. /// -/// The message includes the peering name, peer network, peering state, and a -/// flag indicating whether Google Compute Engine should automatically create -/// routes for the peering. -class NetworkPeering { - /// This field will be deprecated soon. - /// - /// Use the exchange_subnet_routes field instead. Indicates whether full mesh - /// connectivity is created and managed automatically between peered networks. - /// Currently this field should always be true since Google Compute Engine - /// will automatically create and manage subnetwork routes between two - /// networks when peering state is ACTIVE. - core.bool? autoCreateRoutes; - - /// Indicates whether full mesh connectivity is created and managed - /// automatically between peered networks. - /// - /// Currently this field should always be true since Google Compute Engine - /// will automatically create and manage subnetwork routes between two - /// networks when peering state is ACTIVE. - core.bool? exchangeSubnetRoutes; - - /// Whether to export the custom routes to peer network. - /// - /// The default value is false. - core.bool? exportCustomRoutes; - - /// Whether subnet routes with public IP range are exported. - /// - /// The default value is true, all subnet routes are exported. IPv4 - /// special-use ranges are always exported to peers and are not controlled by - /// this field. - core.bool? exportSubnetRoutesWithPublicIp; - - /// Whether to import the custom routes from peer network. - /// - /// The default value is false. - core.bool? importCustomRoutes; - - /// Whether subnet routes with public IP range are imported. - /// - /// The default value is false. IPv4 special-use ranges are always imported - /// from peers and are not controlled by this field. - core.bool? importSubnetRoutesWithPublicIp; - - /// Name of this peering. - /// - /// Provided by the client when the peering is created. The name must comply - /// with RFC1035. Specifically, the name must be 1-63 characters long and - /// match regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`. The first character - /// must be a lowercase letter, and all the following characters must be a - /// dash, lowercase letter, or digit, except the last character, which cannot - /// be a dash. - core.String? name; - - /// The URL of the peer network. - /// - /// It can be either full URL or partial URL. The peer network may belong to a - /// different project. If the partial URL does not contain project, it is - /// assumed that the peer network is in the same project as the current - /// network. - core.String? network; - - /// Maximum Transmission Unit in bytes. - core.int? peerMtu; - - /// Which IP version(s) of traffic and routes are allowed to be imported or - /// exported between peer networks. - /// - /// The default value is IPV4_ONLY. - /// Possible string values are: - /// - "IPV4_IPV6" : This Peering will allow IPv4 traffic and routes to be - /// exchanged. Additionally if the matching peering is IPV4_IPV6, IPv6 traffic - /// and routes will be exchanged as well. - /// - "IPV4_ONLY" : This Peering will only allow IPv4 traffic and routes to be - /// exchanged, even if the matching peering is IPV4_IPV6. - core.String? stackType; +/// The message includes the list of routers associated with the network, and a +/// flag indicating the type of routing behavior to enforce network-wide. +class NetworkRoutingConfig { + /// Enable comparison of Multi-Exit Discriminators (MED) across routes with + /// different neighbor ASNs when using the STANDARD BGP best path selection + /// algorithm. + core.bool? bgpAlwaysCompareMed; - /// State for the peering, either `ACTIVE` or `INACTIVE`. - /// - /// The peering is `ACTIVE` when there's a matching configuration in the peer - /// network. + /// The BGP best path selection algorithm to be employed within this network + /// for dynamic routes learned by Cloud Routers. /// - /// Output only. + /// Can be LEGACY (default) or STANDARD. /// Possible string values are: - /// - "ACTIVE" : Matching configuration exists on the peer. - /// - "INACTIVE" : There is no matching configuration on the peer, including - /// the case when peer does not exist. - core.String? state; - - /// Details about the current state of the peering. - /// - /// Output only. - core.String? stateDetails; - - NetworkPeering({ - this.autoCreateRoutes, - this.exchangeSubnetRoutes, - this.exportCustomRoutes, - this.exportSubnetRoutesWithPublicIp, - this.importCustomRoutes, - this.importSubnetRoutesWithPublicIp, - this.name, - this.network, - this.peerMtu, - this.stackType, - this.state, - this.stateDetails, - }); - - NetworkPeering.fromJson(core.Map json_) - : this( - autoCreateRoutes: json_['autoCreateRoutes'] as core.bool?, - exchangeSubnetRoutes: json_['exchangeSubnetRoutes'] as core.bool?, - exportCustomRoutes: json_['exportCustomRoutes'] as core.bool?, - exportSubnetRoutesWithPublicIp: - json_['exportSubnetRoutesWithPublicIp'] as core.bool?, - importCustomRoutes: json_['importCustomRoutes'] as core.bool?, - importSubnetRoutesWithPublicIp: - json_['importSubnetRoutesWithPublicIp'] as core.bool?, - name: json_['name'] as core.String?, - network: json_['network'] as core.String?, - peerMtu: json_['peerMtu'] as core.int?, - stackType: json_['stackType'] as core.String?, - state: json_['state'] as core.String?, - stateDetails: json_['stateDetails'] as core.String?, - ); - - core.Map toJson() => { - if (autoCreateRoutes != null) 'autoCreateRoutes': autoCreateRoutes!, - if (exchangeSubnetRoutes != null) - 'exchangeSubnetRoutes': exchangeSubnetRoutes!, - if (exportCustomRoutes != null) - 'exportCustomRoutes': exportCustomRoutes!, - if (exportSubnetRoutesWithPublicIp != null) - 'exportSubnetRoutesWithPublicIp': exportSubnetRoutesWithPublicIp!, - if (importCustomRoutes != null) - 'importCustomRoutes': importCustomRoutes!, - if (importSubnetRoutesWithPublicIp != null) - 'importSubnetRoutesWithPublicIp': importSubnetRoutesWithPublicIp!, - if (name != null) 'name': name!, - if (network != null) 'network': network!, - if (peerMtu != null) 'peerMtu': peerMtu!, - if (stackType != null) 'stackType': stackType!, - if (state != null) 'state': state!, - if (stateDetails != null) 'stateDetails': stateDetails!, - }; -} + /// - "LEGACY" + /// - "STANDARD" + core.String? bgpBestPathSelectionMode; -class NetworkPerformanceConfig { + /// Allows to define a preferred approach for handling inter-region cost in + /// the selection process when using the STANDARD BGP best path selection + /// algorithm. /// + /// Can be DEFAULT or ADD_COST_TO_MED. /// Possible string values are: + /// - "ADD_COST_TO_MED" /// - "DEFAULT" - /// - "TIER_1" - core.String? totalEgressBandwidthTier; - - NetworkPerformanceConfig({ - this.totalEgressBandwidthTier, - }); - - NetworkPerformanceConfig.fromJson(core.Map json_) - : this( - totalEgressBandwidthTier: - json_['totalEgressBandwidthTier'] as core.String?, - ); + core.String? bgpInterRegionCost; - core.Map toJson() => { - if (totalEgressBandwidthTier != null) - 'totalEgressBandwidthTier': totalEgressBandwidthTier!, - }; -} - -/// A routing configuration attached to a network resource. -/// -/// The message includes the list of routers associated with the network, and a -/// flag indicating the type of routing behavior to enforce network-wide. -class NetworkRoutingConfig { /// The network-wide routing mode to use. /// /// If set to REGIONAL, this network's Cloud Routers will only advertise @@ -95262,15 +96862,28 @@ class NetworkRoutingConfig { core.String? routingMode; NetworkRoutingConfig({ + this.bgpAlwaysCompareMed, + this.bgpBestPathSelectionMode, + this.bgpInterRegionCost, this.routingMode, }); NetworkRoutingConfig.fromJson(core.Map json_) : this( + bgpAlwaysCompareMed: json_['bgpAlwaysCompareMed'] as core.bool?, + bgpBestPathSelectionMode: + json_['bgpBestPathSelectionMode'] as core.String?, + bgpInterRegionCost: json_['bgpInterRegionCost'] as core.String?, routingMode: json_['routingMode'] as core.String?, ); core.Map toJson() => { + if (bgpAlwaysCompareMed != null) + 'bgpAlwaysCompareMed': bgpAlwaysCompareMed!, + if (bgpBestPathSelectionMode != null) + 'bgpBestPathSelectionMode': bgpBestPathSelectionMode!, + if (bgpInterRegionCost != null) + 'bgpInterRegionCost': bgpInterRegionCost!, if (routingMode != null) 'routingMode': routingMode!, }; } @@ -95770,6 +97383,8 @@ class NodeGroupAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -96033,6 +97648,8 @@ class NodeGroupListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -96454,6 +98071,8 @@ class NodeGroupsListNodesWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -96693,6 +98312,8 @@ class NodeGroupsScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -97084,6 +98705,8 @@ class NodeTemplateAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -97306,6 +98929,8 @@ class NodeTemplateListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -97541,6 +99166,8 @@ class NodeTemplatesScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -97837,6 +99464,8 @@ class NodeTypeAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -98059,6 +99688,8 @@ class NodeTypeListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -98268,6 +99899,8 @@ class NodeTypesScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -98604,6 +100237,8 @@ class NotificationEndpointListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -98932,6 +100567,8 @@ class OperationWarnings { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -99362,6 +100999,8 @@ class OperationAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -99590,6 +101229,8 @@ class OperationListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -99806,6 +101447,8 @@ class OperationsScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -100351,6 +101994,8 @@ class PacketMirroringAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -100649,6 +102294,8 @@ class PacketMirroringListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -100990,6 +102637,8 @@ class PacketMirroringsScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -101111,13 +102760,10 @@ class PathMatcher { /// /// The load balancer performs advanced routing actions, such as URL rewrites /// and header transformations, before forwarding the request to the selected - /// backend. If defaultRouteAction specifies any weightedBackendServices, - /// defaultService must not be set. Conversely if defaultService is set, - /// defaultRouteAction cannot contain any weightedBackendServices. If - /// defaultRouteAction is specified, don't set defaultUrlRedirect. If - /// defaultRouteAction.weightedBackendServices is specified, don't set - /// defaultService. URL maps for classic Application Load Balancers only - /// support the urlRewrite action within a path matcher's defaultRouteAction. + /// backend. Only one of defaultUrlRedirect, defaultService or + /// defaultRouteAction.weightedBackendService can be set. URL maps for classic + /// Application Load Balancers only support the urlRewrite action within a + /// path matcher's defaultRouteAction. HttpRouteAction? defaultRouteAction; /// The full or partial URL to the BackendService resource. @@ -101130,23 +102776,19 @@ class PathMatcher { /// compute/v1/projects/project/global/backendServices/backendService - /// global/backendServices/backendService If defaultRouteAction is also /// specified, advanced routing actions, such as URL rewrites, take effect - /// before sending the request to the backend. However, if defaultService is - /// specified, defaultRouteAction cannot contain any weightedBackendServices. - /// Conversely, if defaultRouteAction specifies any weightedBackendServices, - /// defaultService must not be specified. If defaultService is specified, then - /// set either defaultUrlRedirect or - /// defaultRouteAction.weightedBackendService. Don't set both. Authorization - /// requires one or more of the following Google IAM permissions on the - /// specified resource default_service: - compute.backendBuckets.use - + /// before sending the request to the backend. Only one of defaultUrlRedirect, + /// defaultService or defaultRouteAction.weightedBackendService can be set. + /// Authorization requires one or more of the following Google IAM permissions + /// on the specified resource default_service: - compute.backendBuckets.use - /// compute.backendServices.use core.String? defaultService; /// When none of the specified pathRules or routeRules match, the request is /// redirected to a URL specified by defaultUrlRedirect. /// - /// If defaultUrlRedirect is specified, then set either defaultService or - /// defaultRouteAction. Don't set both. Not supported when the URL map is - /// bound to a target gRPC proxy. + /// Only one of defaultUrlRedirect, defaultService or + /// defaultRouteAction.weightedBackendService can be set. Not supported when + /// the URL map is bound to a target gRPC proxy. HttpRedirectAction? defaultUrlRedirect; /// An optional description of this resource. @@ -101286,10 +102928,8 @@ class PathRule { /// routing actions, such as URL rewrites and header transformations, before /// forwarding the request to the selected backend. /// - /// If routeAction specifies any weightedBackendServices, service must not be - /// set. Conversely if service is set, routeAction cannot contain any - /// weightedBackendServices. Only one of routeAction or urlRedirect must be - /// set. URL maps for classic Application Load Balancers only support the + /// Only one of urlRedirect, service or routeAction.weightedBackendService can + /// be set. URL maps for classic Application Load Balancers only support the /// urlRewrite action within a path rule's routeAction. HttpRouteAction? routeAction; @@ -101297,18 +102937,15 @@ class PathRule { /// is directed if this rule is matched. /// /// If routeAction is also specified, advanced routing actions, such as URL - /// rewrites, take effect before sending the request to the backend. However, - /// if service is specified, routeAction cannot contain any - /// weightedBackendServices. Conversely, if routeAction specifies any - /// weightedBackendServices, service must not be specified. Only one of - /// urlRedirect, service or routeAction.weightedBackendService must be set. + /// rewrites, take effect before sending the request to the backend. Only one + /// of urlRedirect, service or routeAction.weightedBackendService can be set. core.String? service; /// When a path pattern is matched, the request is redirected to a URL /// specified by urlRedirect. /// - /// If urlRedirect is specified, service or routeAction must not be set. Not - /// supported when the URL map is bound to a target gRPC proxy. + /// Only one of urlRedirect, service or routeAction.weightedBackendService can + /// be set. Not supported when the URL map is bound to a target gRPC proxy. HttpRedirectAction? urlRedirect; PathRule({ @@ -101372,9 +103009,9 @@ class PerInstanceConfig { /// Serves as a merge key during UpdatePerInstanceConfigs operations, that is, /// if a per-instance configuration with the same name exists then it will be /// updated, otherwise a new one will be created for the VM instance with the - /// same name. An attempt to create a per-instance configconfiguration for a - /// VM instance that either doesn't exist or is not part of the group will - /// result in an error. + /// same name. An attempt to create a per-instance configuration for a VM + /// instance that either doesn't exist or is not part of the group will result + /// in an error. core.String? name; /// The intended preserved state for the given instance. @@ -101494,11 +103131,6 @@ class Policy { convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); } - /// This is deprecated and has no effect. - /// - /// Do not use. - core.List? rules; - /// Specifies the format of the policy. /// /// Valid values are `0`, `1`, and `3`. Requests that specify an invalid value @@ -101522,7 +103154,6 @@ class Policy { this.auditConfigs, this.bindings, this.etag, - this.rules, this.version, }); @@ -101537,10 +103168,6 @@ class Policy { value as core.Map)) .toList(), etag: json_['etag'] as core.String?, - rules: (json_['rules'] as core.List?) - ?.map((value) => - Rule.fromJson(value as core.Map)) - .toList(), version: json_['version'] as core.int?, ); @@ -101548,7 +103175,6 @@ class Policy { if (auditConfigs != null) 'auditConfigs': auditConfigs!, if (bindings != null) 'bindings': bindings!, if (etag != null) 'etag': etag!, - if (rules != null) 'rules': rules!, if (version != null) 'version': version!, }; } @@ -101801,7 +103427,7 @@ class Project { /// An optional textual description of the resource. core.String? description; - /// Restricted features enabled for use on this project. + /// An optional list of restricted features enabled for use on this project. core.List? enabledFeatures; /// The unique identifier for the resource. @@ -101835,8 +103461,8 @@ class Project { /// Output only. core.String? selfLink; - /// The naming prefix for daily usage reports and the Google Cloud Storage - /// bucket where they are stored. + /// An optional naming prefix for daily usage reports and the Google Cloud + /// Storage bucket where they are stored. UsageExportLocation? usageExportLocation; /// Default internal DNS setting used by VMs running in this project. @@ -102366,6 +103992,8 @@ class PublicAdvertisedPrefixListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -102829,6 +104457,8 @@ class PublicDelegatedPrefixAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -103054,6 +104684,8 @@ class PublicDelegatedPrefixListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -103353,6 +104985,8 @@ class PublicDelegatedPrefixesScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -103853,6 +105487,8 @@ class RegionQuotaStatusWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -104158,6 +105794,8 @@ class RegionAutoscalerListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -104365,6 +106003,8 @@ class RegionDiskTypeListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -104619,6 +106259,8 @@ class RegionInstanceGroupListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -104829,6 +106471,8 @@ class RegionInstanceGroupManagerListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -105194,6 +106838,8 @@ class RegionInstanceGroupManagersListInstanceConfigsRespWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -105331,6 +106977,8 @@ class RegionInstanceGroupManagersListInstancesResponse { } typedef RegionInstanceGroupManagersRecreateRequest = $Request08; +typedef RegionInstanceGroupManagersResumeInstancesRequest + = $InstanceGroupManagersResumeInstancesRequest; class RegionInstanceGroupManagersSetTargetPoolsRequest { /// Fingerprint of the target pools information, which is a hash of the @@ -105392,6 +107040,13 @@ class RegionInstanceGroupManagersSetTemplateRequest { }; } +typedef RegionInstanceGroupManagersStartInstancesRequest + = $InstanceGroupManagersStartInstancesRequest; +typedef RegionInstanceGroupManagersStopInstancesRequest + = $InstanceGroupManagersStopInstancesRequest; +typedef RegionInstanceGroupManagersSuspendInstancesRequest + = $InstanceGroupManagersSuspendInstancesRequest; + class RegionInstanceGroupsListInstancesWarningData { /// A key that provides more detail on the warning being returned. /// @@ -105480,6 +107135,8 @@ class RegionInstanceGroupsListInstancesWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -105761,6 +107418,8 @@ class RegionListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -106481,6 +108140,8 @@ class ReservationAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -106701,6 +108362,8 @@ class ReservationListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -106931,6 +108594,8 @@ class ReservationsScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -107166,6 +108831,8 @@ class ResourcePoliciesScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -107481,6 +109148,8 @@ class ResourcePolicyAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -107909,6 +109578,8 @@ class ResourcePolicyListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -108350,7 +110021,10 @@ class ResourcePolicyWeeklyCycleDayOfWeek { /// compared to the value requested by the user (intent) in their instance CRUD /// calls. class ResourceStatus { - /// An opaque ID of the host on which the VM is running. + /// The precise location of your instance within the zone's data center, + /// including the block, sub-block, and host. + /// + /// The field is formatted as follows: blockId/subBlockId/hostId. /// /// Output only. core.String? physicalHost; @@ -108491,6 +110165,8 @@ class RouteWarnings { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -108630,6 +110306,13 @@ class Route { /// https://www.googleapis.com/compute/v1/projects/project/zones/zone/instances/ core.String? nextHopInstance; + /// Internal fixed region-to-region cost that Google Cloud calculates based on + /// factors such as network performance, distance, and available bandwidth + /// between regions. + /// + /// Output only. + core.int? nextHopInterRegionCost; + /// The network IP address of an instance that should handle matching packets. /// /// Both IPv6 address and IPv4 addresses are supported. Must specify an IPv4 @@ -108639,9 +110322,27 @@ class Route { /// 2001:db8::2d9:51:0:0). Should never be an IPv4-mapped IPv6 address. core.String? nextHopIp; + /// Multi-Exit Discriminator, a BGP route metric that indicates the + /// desirability of a particular route in a network. + /// + /// Output only. + core.int? nextHopMed; + /// The URL of the local network if it should handle matching packets. core.String? nextHopNetwork; + /// Indicates the origin of the route. + /// + /// Can be IGP (Interior Gateway Protocol), EGP (Exterior Gateway Protocol), + /// or INCOMPLETE. + /// + /// Output only. + /// Possible string values are: + /// - "EGP" + /// - "IGP" + /// - "INCOMPLETE" + core.String? nextHopOrigin; + /// The network peering name that should handle matching packets, which should /// conform to RFC1035. /// @@ -108715,8 +110416,11 @@ class Route { this.nextHopHub, this.nextHopIlb, this.nextHopInstance, + this.nextHopInterRegionCost, this.nextHopIp, + this.nextHopMed, this.nextHopNetwork, + this.nextHopOrigin, this.nextHopPeering, this.nextHopVpnTunnel, this.priority, @@ -108744,8 +110448,11 @@ class Route { nextHopHub: json_['nextHopHub'] as core.String?, nextHopIlb: json_['nextHopIlb'] as core.String?, nextHopInstance: json_['nextHopInstance'] as core.String?, + nextHopInterRegionCost: json_['nextHopInterRegionCost'] as core.int?, nextHopIp: json_['nextHopIp'] as core.String?, + nextHopMed: json_['nextHopMed'] as core.int?, nextHopNetwork: json_['nextHopNetwork'] as core.String?, + nextHopOrigin: json_['nextHopOrigin'] as core.String?, nextHopPeering: json_['nextHopPeering'] as core.String?, nextHopVpnTunnel: json_['nextHopVpnTunnel'] as core.String?, priority: json_['priority'] as core.int?, @@ -108774,8 +110481,12 @@ class Route { if (nextHopHub != null) 'nextHopHub': nextHopHub!, if (nextHopIlb != null) 'nextHopIlb': nextHopIlb!, if (nextHopInstance != null) 'nextHopInstance': nextHopInstance!, + if (nextHopInterRegionCost != null) + 'nextHopInterRegionCost': nextHopInterRegionCost!, if (nextHopIp != null) 'nextHopIp': nextHopIp!, + if (nextHopMed != null) 'nextHopMed': nextHopMed!, if (nextHopNetwork != null) 'nextHopNetwork': nextHopNetwork!, + if (nextHopOrigin != null) 'nextHopOrigin': nextHopOrigin!, if (nextHopPeering != null) 'nextHopPeering': nextHopPeering!, if (nextHopVpnTunnel != null) 'nextHopVpnTunnel': nextHopVpnTunnel!, if (priority != null) 'priority': priority!, @@ -108916,6 +110627,8 @@ class RouteListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -109304,6 +111017,8 @@ class RouterAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -110096,6 +111811,8 @@ class RouterListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -111226,6 +112943,8 @@ class RoutersScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -111308,96 +113027,6 @@ class RoutersScopedList { }; } -/// This is deprecated and has no effect. -/// -/// Do not use. -class Rule { - /// This is deprecated and has no effect. - /// - /// Do not use. - /// Possible string values are: - /// - "ALLOW" : This is deprecated and has no effect. Do not use. - /// - "ALLOW_WITH_LOG" : This is deprecated and has no effect. Do not use. - /// - "DENY" : This is deprecated and has no effect. Do not use. - /// - "DENY_WITH_LOG" : This is deprecated and has no effect. Do not use. - /// - "LOG" : This is deprecated and has no effect. Do not use. - /// - "NO_ACTION" : This is deprecated and has no effect. Do not use. - core.String? action; - - /// This is deprecated and has no effect. - /// - /// Do not use. - core.List? conditions; - - /// This is deprecated and has no effect. - /// - /// Do not use. - core.String? description; - - /// This is deprecated and has no effect. - /// - /// Do not use. - core.List? ins; - - /// This is deprecated and has no effect. - /// - /// Do not use. - core.List? logConfigs; - - /// This is deprecated and has no effect. - /// - /// Do not use. - core.List? notIns; - - /// This is deprecated and has no effect. - /// - /// Do not use. - core.List? permissions; - - Rule({ - this.action, - this.conditions, - this.description, - this.ins, - this.logConfigs, - this.notIns, - this.permissions, - }); - - Rule.fromJson(core.Map json_) - : this( - action: json_['action'] as core.String?, - conditions: (json_['conditions'] as core.List?) - ?.map((value) => Condition.fromJson( - value as core.Map)) - .toList(), - description: json_['description'] as core.String?, - ins: (json_['ins'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - logConfigs: (json_['logConfigs'] as core.List?) - ?.map((value) => LogConfig.fromJson( - value as core.Map)) - .toList(), - notIns: (json_['notIns'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - permissions: (json_['permissions'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - ); - - core.Map toJson() => { - if (action != null) 'action': action!, - if (conditions != null) 'conditions': conditions!, - if (description != null) 'description': description!, - if (ins != null) 'ins': ins!, - if (logConfigs != null) 'logConfigs': logConfigs!, - if (notIns != null) 'notIns': notIns!, - if (permissions != null) 'permissions': permissions!, - }; -} - class SSLHealthCheck { /// The TCP port number to which the health check prober sends packets. /// @@ -112111,6 +113740,8 @@ class SecurityPoliciesAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -112364,6 +113995,8 @@ class SecurityPoliciesScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -113085,6 +114718,8 @@ class SecurityPolicyListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -113917,7 +115552,9 @@ class SecurityPolicyRuleRateLimitOptions { /// defaults to ALL. - USER_IP: The IP address of the originating client, /// which is resolved based on "userIpRequestHeaders" configured with the /// security policy. If there is no "userIpRequestHeaders" configuration or an - /// IP address cannot be resolved from it, the key type defaults to IP. + /// IP address cannot be resolved from it, the key type defaults to IP. - + /// TLS_JA4_FINGERPRINT: JA4 TLS/SSL fingerprint if the client connects using + /// HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. /// Possible string values are: /// - "ALL" /// - "HTTP_COOKIE" @@ -113927,6 +115564,7 @@ class SecurityPolicyRuleRateLimitOptions { /// - "REGION_CODE" /// - "SNI" /// - "TLS_JA3_FINGERPRINT" + /// - "TLS_JA4_FINGERPRINT" /// - "USER_IP" /// - "XFF_IP" core.String? enforceOnKey; @@ -114058,7 +115696,9 @@ class SecurityPolicyRuleRateLimitOptionsEnforceOnKeyConfig { /// defaults to ALL. - USER_IP: The IP address of the originating client, /// which is resolved based on "userIpRequestHeaders" configured with the /// security policy. If there is no "userIpRequestHeaders" configuration or an - /// IP address cannot be resolved from it, the key type defaults to IP. + /// IP address cannot be resolved from it, the key type defaults to IP. - + /// TLS_JA4_FINGERPRINT: JA4 TLS/SSL fingerprint if the client connects using + /// HTTPS, HTTP/2 or HTTP/3. If not available, the key type defaults to ALL. /// Possible string values are: /// - "ALL" /// - "HTTP_COOKIE" @@ -114068,6 +115708,7 @@ class SecurityPolicyRuleRateLimitOptionsEnforceOnKeyConfig { /// - "REGION_CODE" /// - "SNI" /// - "TLS_JA3_FINGERPRINT" + /// - "TLS_JA4_FINGERPRINT" /// - "USER_IP" /// - "XFF_IP" core.String? enforceOnKeyType; @@ -114707,6 +116348,8 @@ class ServiceAttachmentAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -115016,6 +116659,8 @@ class ServiceAttachmentListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -115225,6 +116870,8 @@ class ServiceAttachmentsScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -116131,6 +117778,8 @@ class SnapshotListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -116831,6 +118480,8 @@ class SslCertificateAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -117054,6 +118705,8 @@ class SslCertificateListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -117358,6 +119011,8 @@ class SslCertificatesScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -117528,6 +119183,8 @@ class SslPoliciesAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -117756,6 +119413,8 @@ class SslPoliciesListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -117984,6 +119643,8 @@ class SslPoliciesScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -118151,6 +119812,8 @@ class SslPolicyWarnings { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -118854,6 +120517,8 @@ class StoragePoolAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -119196,6 +120861,8 @@ class StoragePoolListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -119424,6 +121091,8 @@ class StoragePoolListDisksWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -119945,6 +121614,8 @@ class StoragePoolTypeAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -120159,6 +121830,8 @@ class StoragePoolTypeListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -120371,6 +122044,8 @@ class StoragePoolTypesScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -120546,6 +122221,8 @@ class StoragePoolsScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -120771,14 +122448,16 @@ class Subnetwork { /// The purpose of the resource. /// /// This field can be either PRIVATE, GLOBAL_MANAGED_PROXY, - /// REGIONAL_MANAGED_PROXY, PRIVATE_SERVICE_CONNECT, or PRIVATE is the default - /// purpose for user-created subnets or subnets that are automatically created - /// in auto mode networks. Subnets with purpose set to GLOBAL_MANAGED_PROXY or - /// REGIONAL_MANAGED_PROXY are user-created subnetworks that are reserved for - /// Envoy-based load balancers. A subnet with purpose set to - /// PRIVATE_SERVICE_CONNECT is used to publish services using Private Service - /// Connect. If unspecified, the subnet purpose defaults to PRIVATE. The - /// enableFlowLogs field isn't supported if the subnet purpose field is set to + /// REGIONAL_MANAGED_PROXY, PEER_MIGRATION or PRIVATE_SERVICE_CONNECT. PRIVATE + /// is the default purpose for user-created subnets or subnets that are + /// automatically created in auto mode networks. Subnets with purpose set to + /// GLOBAL_MANAGED_PROXY or REGIONAL_MANAGED_PROXY are user-created + /// subnetworks that are reserved for Envoy-based load balancers. A subnet + /// with purpose set to PRIVATE_SERVICE_CONNECT is used to publish services + /// using Private Service Connect. A subnet with purpose set to PEER_MIGRATION + /// is used for subnet migration from one peered VPC to another. If + /// unspecified, the subnet purpose defaults to PRIVATE. The enableFlowLogs + /// field isn't supported if the subnet purpose field is set to /// GLOBAL_MANAGED_PROXY or REGIONAL_MANAGED_PROXY. /// Possible string values are: /// - "GLOBAL_MANAGED_PROXY" : Subnet reserved for Global Envoy-based Load @@ -120786,6 +122465,9 @@ class Subnetwork { /// - "INTERNAL_HTTPS_LOAD_BALANCER" : Subnet reserved for Internal HTTP(S) /// Load Balancing. This is a legacy purpose, please use /// REGIONAL_MANAGED_PROXY instead. + /// - "PEER_MIGRATION" : Subnetwork will be used for Migration from one peered + /// VPC to another. (a transient state of subnetwork while migrating resources + /// from one project to another). /// - "PRIVATE" : Regular user created or automatically created subnet. /// - "PRIVATE_NAT" : Subnetwork used as source range for Private NAT /// Gateways. @@ -121046,6 +122728,8 @@ class SubnetworkAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -121270,6 +122954,8 @@ class SubnetworkListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -121629,6 +123315,8 @@ class SubnetworksScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -122113,6 +123801,8 @@ class TargetGrpcProxyListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -122322,6 +124012,8 @@ class TargetHttpProxiesScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -122713,6 +124405,8 @@ class TargetHttpProxyListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -122921,6 +124615,8 @@ class TargetHttpsProxiesScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -123079,7 +124775,7 @@ class TargetHttpsProxiesSetSslCertificatesRequest { /// Google Compute Engine has two Target HTTPS Proxy resources: * /// \[Global\](/compute/docs/reference/rest/v1/targetHttpsProxies) * /// \[Regional\](/compute/docs/reference/rest/v1/regionTargetHttpsProxies) A -/// target HTTPS proxy is a component of GCP HTTPS load balancers. * +/// target HTTPS proxy is a component of Google Cloud HTTPS load balancers. * /// targetHttpProxies are used by global external Application Load Balancers, /// classic Application Load Balancers, cross-region internal Application Load /// Balancers, and Traffic Director. * regionTargetHttpProxies are used by @@ -123280,6 +124976,12 @@ class TargetHttpsProxy { /// without query parameters. Requests that send Early Data with /// non-idempotent HTTP methods or with query parameters will be rejected with /// a HTTP 425. + /// - "UNRESTRICTED" : This enables TLS 1.3 Early Data for requests with any + /// HTTP method including non-idempotent methods list POST. This mode does not + /// enforce any other limitations. This may be valuable for gRPC use cases. + /// However, we do not recommend this method unless you have evaluated your + /// security stance and mitigated the risk of replay attacks using other + /// mechanisms. core.String? tlsEarlyData; /// A fully-qualified or valid partial URL to the UrlMap resource that defines @@ -123448,6 +125150,8 @@ class TargetHttpsProxyAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -123673,6 +125377,8 @@ class TargetHttpsProxyListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -124011,6 +125717,8 @@ class TargetInstanceAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -124230,6 +125938,8 @@ class TargetInstanceListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -124436,6 +126146,8 @@ class TargetInstancesScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -124797,6 +126509,8 @@ class TargetPoolAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -125052,6 +126766,8 @@ class TargetPoolListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -125359,6 +127075,8 @@ class TargetPoolsScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -125739,6 +127457,8 @@ class TargetSslProxyListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -125945,6 +127665,8 @@ class TargetTcpProxiesScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -126252,6 +127974,8 @@ class TargetTcpProxyAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -126476,6 +128200,8 @@ class TargetTcpProxyListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -126850,6 +128576,8 @@ class TargetVpnGatewayAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -127074,6 +128802,8 @@ class TargetVpnGatewayListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -127286,6 +129016,8 @@ class TargetVpnGatewaysScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -127597,10 +129329,8 @@ class UrlMap { /// /// The load balancer performs advanced routing actions, such as URL rewrites /// and header transformations, before forwarding the request to the selected - /// backend. If defaultRouteAction specifies any weightedBackendServices, - /// defaultService must not be set. Conversely if defaultService is set, - /// defaultRouteAction cannot contain any weightedBackendServices. Only one of - /// defaultRouteAction or defaultUrlRedirect must be set. URL maps for classic + /// backend. Only one of defaultUrlRedirect, defaultService or + /// defaultRouteAction.weightedBackendService can be set. URL maps for classic /// Application Load Balancers only support the urlRewrite action within /// defaultRouteAction. defaultRouteAction has no effect when the URL map is /// bound to a target gRPC proxy that has the validateForProxyless field set @@ -127611,23 +129341,19 @@ class UrlMap { /// directed if none of the hostRules match. /// /// If defaultRouteAction is also specified, advanced routing actions, such as - /// URL rewrites, take effect before sending the request to the backend. - /// However, if defaultService is specified, defaultRouteAction cannot contain - /// any defaultRouteAction.weightedBackendServices. Conversely, if - /// defaultRouteAction specifies any - /// defaultRouteAction.weightedBackendServices, defaultService must not be - /// specified. If defaultService is specified, then set either - /// defaultUrlRedirect , or defaultRouteAction.weightedBackendService Don't - /// set both. defaultService has no effect when the URL map is bound to a - /// target gRPC proxy that has the validateForProxyless field set to true. + /// URL rewrites, take effect before sending the request to the backend. Only + /// one of defaultUrlRedirect, defaultService or + /// defaultRouteAction.weightedBackendService can be set. defaultService has + /// no effect when the URL map is bound to a target gRPC proxy that has the + /// validateForProxyless field set to true. core.String? defaultService; /// When none of the specified hostRules match, the request is redirected to a /// URL specified by defaultUrlRedirect. /// - /// If defaultUrlRedirect is specified, defaultService or defaultRouteAction - /// must not be set. Not supported when the URL map is bound to a target gRPC - /// proxy. + /// Only one of defaultUrlRedirect, defaultService or + /// defaultRouteAction.weightedBackendService can be set. Not supported when + /// the URL map is bound to a target gRPC proxy. HttpRedirectAction? defaultUrlRedirect; /// An optional description of this resource. @@ -127886,6 +129612,8 @@ class UrlMapListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -128243,6 +129971,8 @@ class UrlMapsAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -128461,6 +130191,8 @@ class UrlMapsScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -128690,14 +130422,16 @@ class UsableSubnetwork { /// The purpose of the resource. /// /// This field can be either PRIVATE, GLOBAL_MANAGED_PROXY, - /// REGIONAL_MANAGED_PROXY, PRIVATE_SERVICE_CONNECT, or PRIVATE is the default - /// purpose for user-created subnets or subnets that are automatically created - /// in auto mode networks. Subnets with purpose set to GLOBAL_MANAGED_PROXY or - /// REGIONAL_MANAGED_PROXY are user-created subnetworks that are reserved for - /// Envoy-based load balancers. A subnet with purpose set to - /// PRIVATE_SERVICE_CONNECT is used to publish services using Private Service - /// Connect. If unspecified, the subnet purpose defaults to PRIVATE. The - /// enableFlowLogs field isn't supported if the subnet purpose field is set to + /// REGIONAL_MANAGED_PROXY, PEER_MIGRATION or PRIVATE_SERVICE_CONNECT. PRIVATE + /// is the default purpose for user-created subnets or subnets that are + /// automatically created in auto mode networks. Subnets with purpose set to + /// GLOBAL_MANAGED_PROXY or REGIONAL_MANAGED_PROXY are user-created + /// subnetworks that are reserved for Envoy-based load balancers. A subnet + /// with purpose set to PRIVATE_SERVICE_CONNECT is used to publish services + /// using Private Service Connect. A subnet with purpose set to PEER_MIGRATION + /// is used for subnet migration from one peered VPC to another. If + /// unspecified, the subnet purpose defaults to PRIVATE. The enableFlowLogs + /// field isn't supported if the subnet purpose field is set to /// GLOBAL_MANAGED_PROXY or REGIONAL_MANAGED_PROXY. /// Possible string values are: /// - "GLOBAL_MANAGED_PROXY" : Subnet reserved for Global Envoy-based Load @@ -128705,6 +130439,9 @@ class UsableSubnetwork { /// - "INTERNAL_HTTPS_LOAD_BALANCER" : Subnet reserved for Internal HTTP(S) /// Load Balancing. This is a legacy purpose, please use /// REGIONAL_MANAGED_PROXY instead. + /// - "PEER_MIGRATION" : Subnetwork will be used for Migration from one peered + /// VPC to another. (a transient state of subnetwork while migrating resources + /// from one project to another). /// - "PRIVATE" : Regular user created or automatically created subnet. /// - "PRIVATE_NAT" : Subnetwork used as source range for Private NAT /// Gateways. @@ -128913,6 +130650,8 @@ class UsableSubnetworksAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -129343,6 +131082,8 @@ class VmEndpointNatMappingsListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -129722,6 +131463,8 @@ class VpnGatewayAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -129945,6 +131688,8 @@ class VpnGatewayListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -130391,6 +132136,8 @@ class VpnGatewaysScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -130868,6 +132615,8 @@ class VpnTunnelAggregatedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -131091,6 +132840,8 @@ class VpnTunnelListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -131301,6 +133052,8 @@ class VpnTunnelsScopedListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -131603,6 +133356,8 @@ class XpnHostListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is @@ -131964,6 +133719,8 @@ class ZoneListWarning { /// - "NO_RESULTS_ON_PAGE" : No results are present on a particular list page. /// - "PARTIAL_SUCCESS" : Success is reported, but some results may be missing /// due to errors + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). /// - "REQUIRED_TOS_AGREEMENT" : The user attempted to use a resource that /// requires a TOS they have not accepted. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is diff --git a/generated/googleapis/lib/config/v1.dart b/generated/googleapis/lib/config/v1.dart index ef477cc2e..4af830308 100644 --- a/generated/googleapis/lib/config/v1.dart +++ b/generated/googleapis/lib/config/v1.dart @@ -1173,8 +1173,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -2082,7 +2082,7 @@ class Deployment { /// /// Format: `projects/{projectID}/serviceAccounts/{serviceAccount}` /// - /// Optional. + /// Required. core.String? serviceAccount; /// Current state of the deployment. @@ -2504,7 +2504,7 @@ class ListPreviewsResponse { /// obtain the next set of results. core.String? nextPageToken; - /// List of Previewss. + /// List of Previews. core.List? previews; /// Locations that could not be reached. @@ -2546,7 +2546,7 @@ class ListResourcesResponse { /// return. core.String? nextPageToken; - /// List of Resourcess. + /// List of Resources. core.List? resources; /// Locations that could not be reached. @@ -3017,7 +3017,7 @@ class Preview { /// /// Format: `projects/{projectID}/serviceAccounts/{serviceAccount}` /// - /// Optional. + /// Required. core.String? serviceAccount; /// Current state of the preview. @@ -3705,6 +3705,8 @@ class TerraformBlueprint { GitSource? gitSource; /// Input variable values for the Terraform blueprint. + /// + /// Optional. core.Map? inputValues; TerraformBlueprint({ @@ -3741,6 +3743,8 @@ class TerraformBlueprint { /// Errors encountered during actuation using Terraform class TerraformError { /// Original error response from underlying Google API, if available. + /// + /// Output only. Status? error; /// A human-readable error description. @@ -3815,6 +3819,8 @@ class TerraformOutput { class TerraformVariable { /// Input variable value. /// + /// Optional. + /// /// The values for Object must be JSON objects. It can consist of `num`, /// `String`, `bool` and `null` as well as `Map` and `List` values. core.Object? inputValue; diff --git a/generated/googleapis/lib/connectors/v1.dart b/generated/googleapis/lib/connectors/v1.dart index c380cfad6..65c5061ed 100644 --- a/generated/googleapis/lib/connectors/v1.dart +++ b/generated/googleapis/lib/connectors/v1.dart @@ -1705,6 +1705,95 @@ class ProjectsLocationsCustomConnectorsCustomConnectorVersionsResource { ); return Operation.fromJson(response_ as core.Map); } + + /// Publish request for the CustomConnectorVersion. + /// + /// Once approved, the CustomConnectorVersion will be published as + /// PartnerConnector. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. Resource name of the form: + /// `projects/{project}/locations/{location}/customConnectors/{custom_connector}/customConnectorVersions/{custom_connector_version}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/customConnectors/\[^/\]+/customConnectorVersions/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future publish( + PublishCustomConnectorVersionRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':publish'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Withdraw the publish request for the CustomConnectorVersion. + /// + /// This can only be used before the CustomConnectorVersion is published. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. Resource name of the form: + /// `projects/{project}/locations/{location}/customConnectors/{custom_connector}/customConnectorVersions/{custom_connector_version}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/customConnectors/\[^/\]+/customConnectorVersions/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future withdraw( + WithdrawCustomConnectorVersionRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':withdraw'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } } class ProjectsLocationsEndpointAttachmentsResource { @@ -2684,8 +2773,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -3880,6 +3969,8 @@ class AuthSchema { class AuthorizationCodeLink { /// The client ID assigned to the Google Cloud Connectors OAuth app for the /// connector data source. + /// + /// Optional. core.String? clientId; /// The client secret assigned to the Google Cloud Connectors OAuth app for @@ -3889,6 +3980,8 @@ class AuthorizationCodeLink { Secret? clientSecret; /// Whether to enable PKCE for the auth code flow. + /// + /// Optional. core.bool? enablePkce; /// Omit query params from the redirect URI. @@ -3898,10 +3991,14 @@ class AuthorizationCodeLink { /// The scopes for which the user will authorize Google Cloud Connectors on /// the connector data source. + /// + /// Optional. core.List? scopes; /// The base URI the user must click to trigger the authorization code login /// flow. + /// + /// Optional. core.String? uri; AuthorizationCodeLink({ @@ -4113,17 +4210,25 @@ class ConfigVariableTemplate { /// Authorization code link options. /// /// To be populated if `ValueType` is `AUTHORIZATION_CODE` + /// + /// Optional. AuthorizationCodeLink? authorizationCodeLink; /// Description. + /// + /// Optional. core.String? description; /// Display name of the parameter. + /// + /// Optional. core.String? displayName; /// Enum options. /// /// To be populated if `ValueType` is `ENUM` + /// + /// Optional. core.List? enumOptions; /// enum source denotes the source of api to fill the enum options @@ -4135,9 +4240,13 @@ class ConfigVariableTemplate { core.String? enumSource; /// Indicates if current template is part of advanced settings + /// + /// Optional. core.bool? isAdvanced; /// Key of the config variable. + /// + /// Optional. core.String? key; /// Location Tyep denotes where this value should be sent in BYOC connections. @@ -4159,17 +4268,25 @@ class ConfigVariableTemplate { /// Flag represents that this `ConfigVariable` must be provided for a /// connection. + /// + /// Optional. core.bool? required; /// Condition under which a field would be required. /// /// The condition can be represented in the form of a logical expression. + /// + /// Optional. LogicalExpression? requiredCondition; /// Role grant configuration for the config variable. + /// + /// Optional. RoleGrant? roleGrant; /// State of the config variable. + /// + /// Output only. /// Possible string values are: /// - "STATE_UNSPECIFIED" : Status is unspecified. /// - "ACTIVE" : Config variable is active @@ -4178,11 +4295,15 @@ class ConfigVariableTemplate { /// Regular expression in RE2 syntax used for validating the `value` of a /// `ConfigVariable`. + /// + /// Optional. core.String? validationRegex; /// Type of the parameter: string, int, bool etc. /// /// consider custom type for the benefit for the validation. + /// + /// Optional. /// Possible string values are: /// - "VALUE_TYPE_UNSPECIFIED" : Value type is not specified. /// - "STRING" : Value type is string. @@ -4332,6 +4453,7 @@ class Connection { /// - "PREVIEW" : PREVIEW. /// - "GA" : GA. /// - "DEPRECATED" : DEPRECATED. + /// - "TEST" : TEST. /// - "PRIVATE_PREVIEW" : PRIVATE_PREVIEW. core.String? connectorVersionLaunchStage; @@ -4793,9 +4915,17 @@ class Connector { /// - "PREVIEW" : PREVIEW. /// - "GA" : GA. /// - "DEPRECATED" : DEPRECATED. + /// - "TEST" : TEST. /// - "PRIVATE_PREVIEW" : PRIVATE_PREVIEW. core.String? launchStage; + /// Marketplace connector details. + /// + /// Will be null if the connector is not marketplace connector. + /// + /// Output only. + MarketplaceConnectorDetails? marketplaceConnectorDetails; + /// Resource name of the Connector. /// /// Format: @@ -4831,6 +4961,7 @@ class Connector { this.externalUri, this.labels, this.launchStage, + this.marketplaceConnectorDetails, this.name, this.tags, this.updateTime, @@ -4858,6 +4989,12 @@ class Connector { ), ), launchStage: json_['launchStage'] as core.String?, + marketplaceConnectorDetails: + json_.containsKey('marketplaceConnectorDetails') + ? MarketplaceConnectorDetails.fromJson( + json_['marketplaceConnectorDetails'] + as core.Map) + : null, name: json_['name'] as core.String?, tags: (json_['tags'] as core.List?) ?.map((value) => value as core.String) @@ -4877,6 +5014,8 @@ class Connector { if (externalUri != null) 'externalUri': externalUri!, if (labels != null) 'labels': labels!, if (launchStage != null) 'launchStage': launchStage!, + if (marketplaceConnectorDetails != null) + 'marketplaceConnectorDetails': marketplaceConnectorDetails!, if (name != null) 'name': name!, if (tags != null) 'tags': tags!, if (updateTime != null) 'updateTime': updateTime!, @@ -5085,6 +5224,7 @@ class ConnectorVersion { /// - "PREVIEW" : PREVIEW. /// - "GA" : GA. /// - "DEPRECATED" : DEPRECATED. + /// - "TEST" : TEST. /// - "PRIVATE_PREVIEW" : PRIVATE_PREVIEW. core.String? launchStage; @@ -5315,7 +5455,7 @@ class ConnectorVersionInfraConfig { /// Indicates whether connector is deployed on GKE/CloudRun /// - /// Optional. + /// Output only. /// Possible string values are: /// - "DEPLOYMENT_MODEL_UNSPECIFIED" : Deployment model is not specified. /// - "GKE_MST" : Default model gke mst. @@ -5458,6 +5598,11 @@ class CustomConnector { /// Output only. core.List? allConnectorVersions; + /// All marketplace versions. + /// + /// Output only. + core.List? allMarketplaceVersions; + /// Created time. /// /// Output only. @@ -5501,6 +5646,11 @@ class CustomConnector { /// projects/{project}/locations/{location}/customConnectors/{connector} core.String? name; + /// Published marketplace versions. + /// + /// Output only. + core.List? publishedMarketplaceVersions; + /// Updated time. /// /// Output only. @@ -5509,6 +5659,7 @@ class CustomConnector { CustomConnector({ this.activeConnectorVersions, this.allConnectorVersions, + this.allMarketplaceVersions, this.createTime, this.customConnectorType, this.description, @@ -5516,6 +5667,7 @@ class CustomConnector { this.labels, this.logo, this.name, + this.publishedMarketplaceVersions, this.updateTime, }); @@ -5528,6 +5680,10 @@ class CustomConnector { allConnectorVersions: (json_['allConnectorVersions'] as core.List?) ?.map((value) => value as core.String) .toList(), + allMarketplaceVersions: + (json_['allMarketplaceVersions'] as core.List?) + ?.map((value) => value as core.String) + .toList(), createTime: json_['createTime'] as core.String?, customConnectorType: json_['customConnectorType'] as core.String?, description: json_['description'] as core.String?, @@ -5541,6 +5697,10 @@ class CustomConnector { ), logo: json_['logo'] as core.String?, name: json_['name'] as core.String?, + publishedMarketplaceVersions: + (json_['publishedMarketplaceVersions'] as core.List?) + ?.map((value) => value as core.String) + .toList(), updateTime: json_['updateTime'] as core.String?, ); @@ -5549,6 +5709,8 @@ class CustomConnector { 'activeConnectorVersions': activeConnectorVersions!, if (allConnectorVersions != null) 'allConnectorVersions': allConnectorVersions!, + if (allMarketplaceVersions != null) + 'allMarketplaceVersions': allMarketplaceVersions!, if (createTime != null) 'createTime': createTime!, if (customConnectorType != null) 'customConnectorType': customConnectorType!, @@ -5557,6 +5719,8 @@ class CustomConnector { if (labels != null) 'labels': labels!, if (logo != null) 'logo': logo!, if (name != null) 'name': name!, + if (publishedMarketplaceVersions != null) + 'publishedMarketplaceVersions': publishedMarketplaceVersions!, if (updateTime != null) 'updateTime': updateTime!, }; } @@ -5611,6 +5775,19 @@ class CustomConnectorVersion { /// Output only. core.String? name; + /// Partner metadata details. + /// + /// This should be populated only when publishing the custom connector to + /// partner connector. + /// + /// Optional. + PartnerMetadata? partnerMetadata; + + /// Publish status of a custom connector. + /// + /// Output only. + PublishStatus? publishStatus; + /// Service account used by runtime plane to access auth config secrets. /// /// Optional. @@ -5651,6 +5828,8 @@ class CustomConnectorVersion { this.enableBackendDestinationConfig, this.labels, this.name, + this.partnerMetadata, + this.publishStatus, this.serviceAccount, this.specLocation, this.specServerUrls, @@ -5684,6 +5863,14 @@ class CustomConnectorVersion { ), ), name: json_['name'] as core.String?, + partnerMetadata: json_.containsKey('partnerMetadata') + ? PartnerMetadata.fromJson(json_['partnerMetadata'] + as core.Map) + : null, + publishStatus: json_.containsKey('publishStatus') + ? PublishStatus.fromJson( + json_['publishStatus'] as core.Map) + : null, serviceAccount: json_['serviceAccount'] as core.String?, specLocation: json_['specLocation'] as core.String?, specServerUrls: (json_['specServerUrls'] as core.List?) @@ -5704,6 +5891,8 @@ class CustomConnectorVersion { 'enableBackendDestinationConfig': enableBackendDestinationConfig!, if (labels != null) 'labels': labels!, if (name != null) 'name': name!, + if (partnerMetadata != null) 'partnerMetadata': partnerMetadata!, + if (publishStatus != null) 'publishStatus': publishStatus!, if (serviceAccount != null) 'serviceAccount': serviceAccount!, if (specLocation != null) 'specLocation': specLocation!, if (specServerUrls != null) 'specServerUrls': specServerUrls!, @@ -6063,9 +6252,13 @@ class EndpointAttachment { /// EnumOption definition class EnumOption { /// Display name of the option. + /// + /// Optional. core.String? displayName; /// Id of the option. + /// + /// Optional. core.String? id; EnumOption({ @@ -6200,6 +6393,9 @@ class EventSubscriptionDestination { /// OPTION 1: Hit an endpoint when we receive an event. EndPoint? endpoint; + /// OPTION 2: Write the event to Cloud Storage bucket. + GSUtil? gsutil; + /// Service account needed for runtime plane to trigger IP workflow. core.String? serviceAccount; @@ -6207,10 +6403,13 @@ class EventSubscriptionDestination { /// Possible string values are: /// - "TYPE_UNSPECIFIED" : Default state. /// - "ENDPOINT" : Endpoint - Hit the value of endpoint when event is received + /// - "GCS" : Cloud Storage - Write the event to Cloud Storage bucket + /// - "PUBSUB" : Pub/Sub - Write the event to Pub/Sub topic core.String? type; EventSubscriptionDestination({ this.endpoint, + this.gsutil, this.serviceAccount, this.type, }); @@ -6221,12 +6420,17 @@ class EventSubscriptionDestination { ? EndPoint.fromJson( json_['endpoint'] as core.Map) : null, + gsutil: json_.containsKey('gsutil') + ? GSUtil.fromJson( + json_['gsutil'] as core.Map) + : null, serviceAccount: json_['serviceAccount'] as core.String?, type: json_['type'] as core.String?, ); core.Map toJson() => { if (endpoint != null) 'endpoint': endpoint!, + if (gsutil != null) 'gsutil': gsutil!, if (serviceAccount != null) 'serviceAccount': serviceAccount!, if (type != null) 'type': type!, }; @@ -6364,9 +6568,13 @@ class EventType { /// Eventing Configuration of a connection class EventingConfig { /// Additional eventing related field values + /// + /// Optional. core.List? additionalVariables; /// Auth details for the webhook adapter. + /// + /// Optional. AuthConfig? authConfig; /// Dead letter configuration for eventing of a connection. @@ -6375,6 +6583,8 @@ class EventingConfig { DeadLetterConfig? deadLetterConfig; /// Enrichment Enabled. + /// + /// Optional. core.bool? enrichmentEnabled; /// Ingress endpoint of the event listener. @@ -6400,6 +6610,8 @@ class EventingConfig { DestinationConfig? proxyDestinationConfig; /// Registration endpoint for auto registration. + /// + /// Optional. DestinationConfig? registrationDestinationConfig; EventingConfig({ @@ -6626,6 +6838,7 @@ class EventingDetails { /// - "PREVIEW" : PREVIEW. /// - "GA" : GA. /// - "DEPRECATED" : DEPRECATED. + /// - "TEST" : TEST. /// - "PRIVATE_PREVIEW" : PRIVATE_PREVIEW. core.String? launchStage; @@ -6713,11 +6926,17 @@ class EventingRuntimeData { /// Output only. WebhookData? webhookData; + /// Webhook subscriptions. + /// + /// Output only. + WebhookSubscriptions? webhookSubscriptions; + EventingRuntimeData({ this.eventsListenerEndpoint, this.eventsListenerPscSa, this.status, this.webhookData, + this.webhookSubscriptions, }); EventingRuntimeData.fromJson(core.Map json_) @@ -6733,6 +6952,10 @@ class EventingRuntimeData { ? WebhookData.fromJson( json_['webhookData'] as core.Map) : null, + webhookSubscriptions: json_.containsKey('webhookSubscriptions') + ? WebhookSubscriptions.fromJson(json_['webhookSubscriptions'] + as core.Map) + : null, ); core.Map toJson() => { @@ -6742,6 +6965,8 @@ class EventingRuntimeData { 'eventsListenerPscSa': eventsListenerPscSa!, if (status != null) 'status': status!, if (webhookData != null) 'webhookData': webhookData!, + if (webhookSubscriptions != null) + 'webhookSubscriptions': webhookSubscriptions!, }; } @@ -6992,6 +7217,8 @@ class FieldComparison { core.bool? boolValue; /// Comparator to use for comparing the field value. + /// + /// Optional. /// Possible string values are: /// - "COMPARATOR_UNSPECIFIED" : The default value. /// - "EQUALS" : The field value must be equal to the specified value. @@ -7002,6 +7229,8 @@ class FieldComparison { core.String? intValue; /// Key of the field. + /// + /// Optional. core.String? key; /// String value @@ -7033,6 +7262,27 @@ class FieldComparison { }; } +/// GSUtil message includes details of the Destination Cloud Storage bucket. +class GSUtil { + /// The URI of the Cloud Storage bucket. + /// + /// Required. + core.String? gsutilUri; + + GSUtil({ + this.gsutilUri, + }); + + GSUtil.fromJson(core.Map json_) + : this( + gsutilUri: json_['gsutilUri'] as core.String?, + ); + + core.Map toJson() => { + if (gsutilUri != null) 'gsutilUri': gsutilUri!, + }; +} + /// Autoscaling config for connector deployment system metrics. typedef HPAConfig = $HPAConfig; @@ -7936,12 +8186,18 @@ typedef LockConfig = $LockConfig; /// Struct for representing boolean expressions. class LogicalExpression { /// A list of fields to be compared. + /// + /// Optional. core.List? fieldComparisons; /// A list of nested conditions to be compared. + /// + /// Optional. core.List? logicalExpressions; /// The logical operator to use between the fields and conditions. + /// + /// Optional. /// Possible string values are: /// - "OPERATOR_UNSPECIFIED" : The default value. /// - "AND" : AND operator; The conditions must all be true. @@ -8063,6 +8319,46 @@ class ManagedZone { }; } +/// Marketplace connector details. +class MarketplaceConnectorDetails { + /// Marketplace product name. + core.String? marketplaceProduct; + + /// Marketplace product ID. + core.String? marketplaceProductId; + + /// Marketplace product URL. + core.String? marketplaceProductUri; + + /// The name of the partner. + core.String? partner; + + MarketplaceConnectorDetails({ + this.marketplaceProduct, + this.marketplaceProductId, + this.marketplaceProductUri, + this.partner, + }); + + MarketplaceConnectorDetails.fromJson(core.Map json_) + : this( + marketplaceProduct: json_['marketplaceProduct'] as core.String?, + marketplaceProductId: json_['marketplaceProductId'] as core.String?, + marketplaceProductUri: json_['marketplaceProductUri'] as core.String?, + partner: json_['partner'] as core.String?, + ); + + core.Map toJson() => { + if (marketplaceProduct != null) + 'marketplaceProduct': marketplaceProduct!, + if (marketplaceProductId != null) + 'marketplaceProductId': marketplaceProductId!, + if (marketplaceProductUri != null) + 'marketplaceProductUri': marketplaceProductUri!, + if (partner != null) 'partner': partner!, + }; +} + /// MultipleSelectConfig represents the multiple options for a config variable. class MultipleSelectConfig { /// Allow custom values. @@ -8409,6 +8705,161 @@ class Operation { }; } +/// Partner metadata details. +/// +/// This will be populated when publishing the custom connector as a partner +/// connector version. On publishing, parntner connector version will be created +/// using the fields in PartnerMetadata. +class PartnerMetadata { + /// Whether the user has accepted the Google Cloud Platform Terms of Service + /// (https://cloud.google.com/terms/) and the Google Cloud Marketplace Terms + /// of Service (https://cloud.google.com/terms/marketplace/launcher?hl=en). + /// + /// Required. + core.bool? acceptGcpTos; + + /// Additional comments for the submission. + /// + /// Optional. + core.String? additionalComments; + + /// Confirmation that connector meets all applicable requirements mentioned in + /// the Partner Connector Publishing requirements list and Partner onboardiong + /// requirements list + /// (https://cloud.google.com/marketplace/docs/partners/get-started#requirements). + /// + /// Required. + core.bool? confirmPartnerRequirements; + + /// Public URL for the demo video. + /// + /// Required. + core.String? demoUri; + + /// Integration example templates for the custom connector. + /// + /// Required. + core.String? integrationTemplates; + + /// Marketplace product name. + /// + /// Optional. + core.String? marketplaceProduct; + + /// Marketplace product ID. + /// + /// Required. + core.String? marketplaceProductId; + + /// Marketplace product project ID. + /// + /// Optional. + core.String? marketplaceProductProjectId; + + /// Marketplace product URL. + /// + /// Optional. + core.String? marketplaceProductUri; + + /// Partner name. + /// + /// Required. + core.String? partner; + + /// Partner connector display name. + /// + /// Required. + core.String? partnerConnectorDisplayName; + + /// Publish request time. + /// + /// Output only. + core.String? publishRequestTime; + + /// Target application for which partner connector is built. + /// + /// Required. + core.String? targetApplication; + + /// Target customer segment for the partner connector. + /// + /// Required. + core.String? targetCustomerSegment; + + /// Details about partner connector use cases. + /// + /// Required. + core.String? useCases; + + PartnerMetadata({ + this.acceptGcpTos, + this.additionalComments, + this.confirmPartnerRequirements, + this.demoUri, + this.integrationTemplates, + this.marketplaceProduct, + this.marketplaceProductId, + this.marketplaceProductProjectId, + this.marketplaceProductUri, + this.partner, + this.partnerConnectorDisplayName, + this.publishRequestTime, + this.targetApplication, + this.targetCustomerSegment, + this.useCases, + }); + + PartnerMetadata.fromJson(core.Map json_) + : this( + acceptGcpTos: json_['acceptGcpTos'] as core.bool?, + additionalComments: json_['additionalComments'] as core.String?, + confirmPartnerRequirements: + json_['confirmPartnerRequirements'] as core.bool?, + demoUri: json_['demoUri'] as core.String?, + integrationTemplates: json_['integrationTemplates'] as core.String?, + marketplaceProduct: json_['marketplaceProduct'] as core.String?, + marketplaceProductId: json_['marketplaceProductId'] as core.String?, + marketplaceProductProjectId: + json_['marketplaceProductProjectId'] as core.String?, + marketplaceProductUri: json_['marketplaceProductUri'] as core.String?, + partner: json_['partner'] as core.String?, + partnerConnectorDisplayName: + json_['partnerConnectorDisplayName'] as core.String?, + publishRequestTime: json_['publishRequestTime'] as core.String?, + targetApplication: json_['targetApplication'] as core.String?, + targetCustomerSegment: json_['targetCustomerSegment'] as core.String?, + useCases: json_['useCases'] as core.String?, + ); + + core.Map toJson() => { + if (acceptGcpTos != null) 'acceptGcpTos': acceptGcpTos!, + if (additionalComments != null) + 'additionalComments': additionalComments!, + if (confirmPartnerRequirements != null) + 'confirmPartnerRequirements': confirmPartnerRequirements!, + if (demoUri != null) 'demoUri': demoUri!, + if (integrationTemplates != null) + 'integrationTemplates': integrationTemplates!, + if (marketplaceProduct != null) + 'marketplaceProduct': marketplaceProduct!, + if (marketplaceProductId != null) + 'marketplaceProductId': marketplaceProductId!, + if (marketplaceProductProjectId != null) + 'marketplaceProductProjectId': marketplaceProductProjectId!, + if (marketplaceProductUri != null) + 'marketplaceProductUri': marketplaceProductUri!, + if (partner != null) 'partner': partner!, + if (partnerConnectorDisplayName != null) + 'partnerConnectorDisplayName': partnerConnectorDisplayName!, + if (publishRequestTime != null) + 'publishRequestTime': publishRequestTime!, + if (targetApplication != null) 'targetApplication': targetApplication!, + if (targetCustomerSegment != null) + 'targetCustomerSegment': targetCustomerSegment!, + if (useCases != null) 'useCases': useCases!, + }; +} + /// An Identity and Access Management (IAM) policy, which specifies access /// controls for Google Cloud resources. /// @@ -8567,6 +9018,7 @@ class Provider { /// - "PREVIEW" : PREVIEW. /// - "GA" : GA. /// - "DEPRECATED" : DEPRECATED. + /// - "TEST" : TEST. /// - "PRIVATE_PREVIEW" : PRIVATE_PREVIEW. core.String? launchStage; @@ -8635,6 +9087,90 @@ class Provider { }; } +/// Request message for ConnectorsService.PublishCustomConnectorVersion +class PublishCustomConnectorVersionRequest { + /// Partner metadata details for validating and publishing the custom + /// connector as a partner connector version. + /// + /// Required. + PartnerMetadata? partnerMetadata; + + PublishCustomConnectorVersionRequest({ + this.partnerMetadata, + }); + + PublishCustomConnectorVersionRequest.fromJson(core.Map json_) + : this( + partnerMetadata: json_.containsKey('partnerMetadata') + ? PartnerMetadata.fromJson(json_['partnerMetadata'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (partnerMetadata != null) 'partnerMetadata': partnerMetadata!, + }; +} + +/// Publish status of a custom connector. +class PublishStatus { + /// Publish state of the custom connector. + /// + /// Output only. + /// Possible string values are: + /// - "PUBLISH_STATE_UNSPECIFIED" : State Unspecified. + /// - "PUBLISHED" : Connector version has been published as partner connector + /// version. + /// - "PUBLISH_IN_PROGRESS" : Connector version is in the process of being + /// published as partner connector version. + /// - "UNPUBLISHED" : Connector version has been unpublished as partner + /// connector version + core.String? publishState; + + /// Publish time. + /// + /// Output only. + core.String? publishTime; + + /// Partner connector name. + /// + /// Will be set on the custom connector. Format: + /// providers/partner/connectors//versions/ + /// + /// Output only. + core.String? publishedAs; + + /// Custom connector name. + /// + /// Will be set on the partner connector. Format: + /// providers/customconnectors/connectors//versions/ + /// + /// Output only. + core.String? publishedSource; + + PublishStatus({ + this.publishState, + this.publishTime, + this.publishedAs, + this.publishedSource, + }); + + PublishStatus.fromJson(core.Map json_) + : this( + publishState: json_['publishState'] as core.String?, + publishTime: json_['publishTime'] as core.String?, + publishedAs: json_['publishedAs'] as core.String?, + publishedSource: json_['publishedSource'] as core.String?, + ); + + core.Map toJson() => { + if (publishState != null) 'publishState': publishState!, + if (publishTime != null) 'publishTime': publishTime!, + if (publishedAs != null) 'publishedAs': publishedAs!, + if (publishedSource != null) 'publishedSource': publishedSource!, + }; +} + /// Request message for ConnectorsService.RefreshConnectionSchemaMetadata. typedef RefreshConnectionSchemaMetadataRequest = $Empty; @@ -8699,9 +9235,13 @@ class Resource { /// Template to uniquely represent a Google Cloud resource in a format IAM /// expects This is a template that can have references to other values /// provided in the config variable template. + /// + /// Optional. core.String? pathTemplate; /// Different types of resource supported. + /// + /// Optional. /// Possible string values are: /// - "TYPE_UNSPECIFIED" : Value type is not specified. /// - "GCP_PROJECT" : Google Cloud Project Resource. @@ -8852,9 +9392,13 @@ typedef RetryEventSubscriptionRequest = $Empty; /// a 'grant' button to do the same on behalf of the user. class RoleGrant { /// Template that UI can use to provide helper text to customers. + /// + /// Optional. core.String? helperTextTemplate; /// Prinicipal/Identity for whom the role need to assigned. + /// + /// Optional. /// Possible string values are: /// - "PRINCIPAL_UNSPECIFIED" : Value type is not specified. /// - "CONNECTOR_SA" : Service Account used for Connector workload identity @@ -8863,9 +9407,13 @@ class RoleGrant { core.String? principal; /// Resource on which the roles needs to be granted for the principal. + /// + /// Optional. Resource? resource; /// List of roles that need to be granted. + /// + /// Optional. core.List? roles; RoleGrant({ @@ -9442,39 +9990,55 @@ class SshPublicKey { /// SSL Configuration of a connection class SslConfig { /// Additional SSL related field values + /// + /// Optional. core.List? additionalVariables; /// Type of Client Cert (PEM/JKS/.. /// /// etc.) + /// + /// Optional. /// Possible string values are: /// - "CERT_TYPE_UNSPECIFIED" : Cert type unspecified. /// - "PEM" : Privacy Enhanced Mail (PEM) Type core.String? clientCertType; /// Client Certificate + /// + /// Optional. Secret? clientCertificate; /// Client Private Key + /// + /// Optional. Secret? clientPrivateKey; /// Secret containing the passphrase protecting the Client Private Key + /// + /// Optional. Secret? clientPrivateKeyPass; /// Private Server Certificate. /// /// Needs to be specified if trust model is `PRIVATE`. + /// + /// Optional. Secret? privateServerCertificate; /// Type of Server Cert (PEM/JKS/.. /// /// etc.) + /// + /// Optional. /// Possible string values are: /// - "CERT_TYPE_UNSPECIFIED" : Cert type unspecified. /// - "PEM" : Privacy Enhanced Mail (PEM) Type core.String? serverCertType; /// Trust Model of the SSL connection + /// + /// Optional. /// Possible string values are: /// - "PUBLIC" : Public Trust Model. Takes the Default Java trust store. /// - "PRIVATE" : Private Trust Model. Takes custom/private trust store. @@ -9482,6 +10046,8 @@ class SslConfig { core.String? trustModel; /// Controls the ssl type for the given connector version. + /// + /// Optional. /// Possible string values are: /// - "SSL_TYPE_UNSPECIFIED" : No SSL configuration required. /// - "TLS" : TLS Handshake @@ -9489,6 +10055,8 @@ class SslConfig { core.String? type; /// Bool for enabling SSL + /// + /// Optional. core.bool? useSsl; SslConfig({ @@ -9849,3 +10417,30 @@ class WebhookData { if (updateTime != null) 'updateTime': updateTime!, }; } + +/// WebhookSubscriptions has details of webhook subscriptions. +class WebhookSubscriptions { + /// Webhook data. + /// + /// Output only. + core.List? webhookData; + + WebhookSubscriptions({ + this.webhookData, + }); + + WebhookSubscriptions.fromJson(core.Map json_) + : this( + webhookData: (json_['webhookData'] as core.List?) + ?.map((value) => WebhookData.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (webhookData != null) 'webhookData': webhookData!, + }; +} + +/// Request message for ConnectorsService.WithdrawCustomConnectorVersion +typedef WithdrawCustomConnectorVersionRequest = $Empty; diff --git a/generated/googleapis/lib/contactcenterinsights/v1.dart b/generated/googleapis/lib/contactcenterinsights/v1.dart index 31e315b7a..c4a2b452b 100644 --- a/generated/googleapis/lib/contactcenterinsights/v1.dart +++ b/generated/googleapis/lib/contactcenterinsights/v1.dart @@ -22,18 +22,9 @@ /// - [ProjectsResource] /// - [ProjectsLocationsResource] /// - [ProjectsLocationsAnalysisRulesResource] -/// - [ProjectsLocationsAuthorizedViewSetResource] -/// - [ProjectsLocationsAuthorizedViewSetAuthorizedViewResource] -/// - [ProjectsLocationsAuthorizedViewSetAuthorizedViewsResource] -/// - [ProjectsLocationsAuthorizedViewSetAuthorizedViewsConversationsResource] -/// - -/// [ProjectsLocationsAuthorizedViewSetAuthorizedViewsConversationsFeedbackLabelsResource] /// - [ProjectsLocationsAuthorizedViewSetsResource] /// - [ProjectsLocationsAuthorizedViewSetsAuthorizedViewsResource] /// - [ProjectsLocationsAuthorizedViewSetsAuthorizedViewsConversationsResource] -/// - -/// [ProjectsLocationsAuthorizedViewSetsAuthorizedViewsConversationsAnalysesResource] -/// - [ProjectsLocationsAuthorizedViewSetsAuthorizedViewsOperationsResource] /// - [ProjectsLocationsConversationsResource] /// - [ProjectsLocationsConversationsAnalysesResource] /// - [ProjectsLocationsConversationsFeedbackLabelsResource] @@ -93,8 +84,6 @@ class ProjectsLocationsResource { ProjectsLocationsAnalysisRulesResource get analysisRules => ProjectsLocationsAnalysisRulesResource(_requester); - ProjectsLocationsAuthorizedViewSetResource get authorizedViewSet => - ProjectsLocationsAuthorizedViewSetResource(_requester); ProjectsLocationsAuthorizedViewSetsResource get authorizedViewSets => ProjectsLocationsAuthorizedViewSetsResource(_requester); ProjectsLocationsConversationsResource get conversations => @@ -650,1255 +639,125 @@ class ProjectsLocationsAnalysisRulesResource { } } -class ProjectsLocationsAuthorizedViewSetResource { - final commons.ApiRequester _requester; - - ProjectsLocationsAuthorizedViewSetAuthorizedViewResource get authorizedView => - ProjectsLocationsAuthorizedViewSetAuthorizedViewResource(_requester); - ProjectsLocationsAuthorizedViewSetAuthorizedViewsResource - get authorizedViews => - ProjectsLocationsAuthorizedViewSetAuthorizedViewsResource(_requester); - - ProjectsLocationsAuthorizedViewSetResource(commons.ApiRequester client) - : _requester = client; -} - -class ProjectsLocationsAuthorizedViewSetAuthorizedViewResource { - final commons.ApiRequester _requester; - - ProjectsLocationsAuthorizedViewSetAuthorizedViewResource( - commons.ApiRequester client) - : _requester = client; - - /// Gets conversation statistics. - /// - /// Request parameters: - /// - /// [location] - Required. The location of the conversations. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/authorizedViewSet/\[^/\]+/authorizedView/\[^/\]+$`. - /// - /// [filter] - A filter to reduce results to a specific subset. This field is - /// useful for getting statistics about conversations with specific - /// properties. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a - /// [GoogleCloudContactcenterinsightsV1CalculateStatsResponse]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future - calculateStats( - core.String location, { - core.String? filter, - core.String? $fields, - }) async { - final queryParams_ = >{ - if (filter != null) 'filter': [filter], - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$location') + ':calculateStats'; - - final response_ = await _requester.request( - url_, - 'GET', - queryParams: queryParams_, - ); - return GoogleCloudContactcenterinsightsV1CalculateStatsResponse.fromJson( - response_ as core.Map); - } - - /// Query metrics. - /// - /// [request] - The metadata request object. - /// - /// Request parameters: - /// - /// [location] - Required. The location of the data. - /// "projects/{project}/locations/{location}" - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/authorizedViewSet/\[^/\]+/authorizedView/\[^/\]+$`. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleLongrunningOperation]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future queryMetrics( - GoogleCloudContactcenterinsightsV1QueryMetricsRequest request, - core.String location, { - core.String? $fields, - }) async { - final body_ = convert.json.encode(request); - final queryParams_ = >{ - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$location') + ':queryMetrics'; - - final response_ = await _requester.request( - url_, - 'POST', - body: body_, - queryParams: queryParams_, - ); - return GoogleLongrunningOperation.fromJson( - response_ as core.Map); - } -} - -class ProjectsLocationsAuthorizedViewSetAuthorizedViewsResource { - final commons.ApiRequester _requester; - - ProjectsLocationsAuthorizedViewSetAuthorizedViewsConversationsResource - get conversations => - ProjectsLocationsAuthorizedViewSetAuthorizedViewsConversationsResource( - _requester); - - ProjectsLocationsAuthorizedViewSetAuthorizedViewsResource( - commons.ApiRequester client) - : _requester = client; -} - -class ProjectsLocationsAuthorizedViewSetAuthorizedViewsConversationsResource { - final commons.ApiRequester _requester; - - ProjectsLocationsAuthorizedViewSetAuthorizedViewsConversationsFeedbackLabelsResource - get feedbackLabels => - ProjectsLocationsAuthorizedViewSetAuthorizedViewsConversationsFeedbackLabelsResource( - _requester); - - ProjectsLocationsAuthorizedViewSetAuthorizedViewsConversationsResource( - commons.ApiRequester client) - : _requester = client; -} - -class ProjectsLocationsAuthorizedViewSetAuthorizedViewsConversationsFeedbackLabelsResource { - final commons.ApiRequester _requester; - - ProjectsLocationsAuthorizedViewSetAuthorizedViewsConversationsFeedbackLabelsResource( - commons.ApiRequester client) - : _requester = client; - - /// Create feedback label. - /// - /// [request] - The metadata request object. - /// - /// Request parameters: - /// - /// [parent] - Required. The parent resource of the feedback label. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/authorizedViewSet/\[^/\]+/authorizedViews/\[^/\]+/conversations/\[^/\]+$`. - /// - /// [feedbackLabelId] - Optional. The ID of the feedback label to create. If - /// one is not specified it will be generated by the server. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleCloudContactcenterinsightsV1FeedbackLabel]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future create( - GoogleCloudContactcenterinsightsV1FeedbackLabel request, - core.String parent, { - core.String? feedbackLabelId, - core.String? $fields, - }) async { - final body_ = convert.json.encode(request); - final queryParams_ = >{ - if (feedbackLabelId != null) 'feedbackLabelId': [feedbackLabelId], - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/feedbackLabels'; - - final response_ = await _requester.request( - url_, - 'POST', - body: body_, - queryParams: queryParams_, - ); - return GoogleCloudContactcenterinsightsV1FeedbackLabel.fromJson( - response_ as core.Map); - } - - /// Delete feedback label. - /// - /// Request parameters: - /// - /// [name] - Required. The name of the feedback label to delete. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/authorizedViewSet/\[^/\]+/authorizedViews/\[^/\]+/conversations/\[^/\]+/feedbackLabels/\[^/\]+$`. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleProtobufEmpty]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future delete( - core.String name, { - core.String? $fields, - }) async { - final queryParams_ = >{ - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$name'); - - final response_ = await _requester.request( - url_, - 'DELETE', - queryParams: queryParams_, - ); - return GoogleProtobufEmpty.fromJson( - response_ as core.Map); - } - - /// Get feedback label. - /// - /// Request parameters: - /// - /// [name] - Required. The name of the feedback label to get. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/authorizedViewSet/\[^/\]+/authorizedViews/\[^/\]+/conversations/\[^/\]+/feedbackLabels/\[^/\]+$`. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleCloudContactcenterinsightsV1FeedbackLabel]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future get( - core.String name, { - core.String? $fields, - }) async { - final queryParams_ = >{ - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$name'); - - final response_ = await _requester.request( - url_, - 'GET', - queryParams: queryParams_, - ); - return GoogleCloudContactcenterinsightsV1FeedbackLabel.fromJson( - response_ as core.Map); - } - - /// List feedback labels. - /// - /// Request parameters: - /// - /// [parent] - Required. The parent resource of the feedback labels. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/authorizedViewSet/\[^/\]+/authorizedViews/\[^/\]+/conversations/\[^/\]+$`. - /// - /// [filter] - Optional. A filter to reduce results to a specific subset. - /// Supports disjunctions (OR) and conjunctions (AND). Automatically sorts by - /// conversation ID. To sort by all feedback labels in a project see - /// ListAllFeedbackLabels. Supported fields: * `issue_model_id` * - /// `qa_question_id` * `qa_scorecard_id` * `min_create_time` * - /// `max_create_time` * `min_update_time` * `max_update_time` * - /// `feedback_label_type`: QUALITY_AI, TOPIC_MODELING - /// - /// [pageSize] - Optional. The maximum number of feedback labels to return in - /// the response. A valid page size ranges from 0 to 100,000 inclusive. If the - /// page size is zero or unspecified, a default page size of 100 will be - /// chosen. Note that a call might return fewer results than the requested - /// page size. - /// - /// [pageToken] - Optional. The value returned by the last - /// `ListFeedbackLabelsResponse`. This value indicates that this is a - /// continuation of a prior `ListFeedbackLabels` call and that the system - /// should return the next page of data. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a - /// [GoogleCloudContactcenterinsightsV1ListFeedbackLabelsResponse]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future - list( - core.String parent, { - core.String? filter, - core.int? pageSize, - core.String? pageToken, - core.String? $fields, - }) async { - final queryParams_ = >{ - if (filter != null) 'filter': [filter], - if (pageSize != null) 'pageSize': ['${pageSize}'], - if (pageToken != null) 'pageToken': [pageToken], - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/feedbackLabels'; - - final response_ = await _requester.request( - url_, - 'GET', - queryParams: queryParams_, - ); - return GoogleCloudContactcenterinsightsV1ListFeedbackLabelsResponse - .fromJson(response_ as core.Map); - } - - /// Update feedback label. - /// - /// [request] - The metadata request object. - /// - /// Request parameters: - /// - /// [name] - Immutable. Resource name of the FeedbackLabel. Format: - /// projects/{project}/locations/{location}/conversations/{conversation}/feedbackLabels/{feedback_label} - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/authorizedViewSet/\[^/\]+/authorizedViews/\[^/\]+/conversations/\[^/\]+/feedbackLabels/\[^/\]+$`. - /// - /// [updateMask] - Required. The list of fields to be updated. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleCloudContactcenterinsightsV1FeedbackLabel]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future patch( - GoogleCloudContactcenterinsightsV1FeedbackLabel request, - core.String name, { - core.String? updateMask, - core.String? $fields, - }) async { - final body_ = convert.json.encode(request); - final queryParams_ = >{ - if (updateMask != null) 'updateMask': [updateMask], - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$name'); - - final response_ = await _requester.request( - url_, - 'PATCH', - body: body_, - queryParams: queryParams_, - ); - return GoogleCloudContactcenterinsightsV1FeedbackLabel.fromJson( - response_ as core.Map); - } -} - -class ProjectsLocationsAuthorizedViewSetsResource { - final commons.ApiRequester _requester; - - ProjectsLocationsAuthorizedViewSetsAuthorizedViewsResource - get authorizedViews => - ProjectsLocationsAuthorizedViewSetsAuthorizedViewsResource( - _requester); - - ProjectsLocationsAuthorizedViewSetsResource(commons.ApiRequester client) - : _requester = client; -} - -class ProjectsLocationsAuthorizedViewSetsAuthorizedViewsResource { - final commons.ApiRequester _requester; - - ProjectsLocationsAuthorizedViewSetsAuthorizedViewsConversationsResource - get conversations => - ProjectsLocationsAuthorizedViewSetsAuthorizedViewsConversationsResource( - _requester); - ProjectsLocationsAuthorizedViewSetsAuthorizedViewsOperationsResource - get operations => - ProjectsLocationsAuthorizedViewSetsAuthorizedViewsOperationsResource( - _requester); - - ProjectsLocationsAuthorizedViewSetsAuthorizedViewsResource( - commons.ApiRequester client) - : _requester = client; - - /// Gets the access control policy for a resource. - /// - /// Returns an empty policy if the resource exists and does not have a policy - /// set. - /// - /// Request parameters: - /// - /// [resource] - REQUIRED: The resource for which the policy is being - /// requested. See - /// [Resource names](https://cloud.google.com/apis/design/resource_names) for - /// the appropriate value for this field. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/authorizedViewSets/\[^/\]+/authorizedViews/\[^/\]+$`. - /// - /// [options_requestedPolicyVersion] - Optional. The maximum policy version - /// that will be used to format the policy. Valid values are 0, 1, and 3. - /// Requests specifying an invalid value will be rejected. Requests for - /// policies with any conditional role bindings must specify version 3. - /// Policies with no conditional role bindings may specify any valid value or - /// leave the field unset. The policy in the response might use the policy - /// version that you specified, or it might use a lower policy version. For - /// example, if you specify version 3, but the policy has no conditional role - /// bindings, the response uses version 1. To learn which resources support - /// conditions in their IAM policies, see the - /// [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleIamV1Policy]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future getIamPolicy( - core.String resource, { - core.int? options_requestedPolicyVersion, - core.String? $fields, - }) async { - final queryParams_ = >{ - if (options_requestedPolicyVersion != null) - 'options.requestedPolicyVersion': ['${options_requestedPolicyVersion}'], - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$resource') + ':getIamPolicy'; - - final response_ = await _requester.request( - url_, - 'GET', - queryParams: queryParams_, - ); - return GoogleIamV1Policy.fromJson( - response_ as core.Map); - } - - /// Sets the access control policy on the specified resource. - /// - /// Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, - /// and `PERMISSION_DENIED` errors. - /// - /// [request] - The metadata request object. - /// - /// Request parameters: - /// - /// [resource] - REQUIRED: The resource for which the policy is being - /// specified. See - /// [Resource names](https://cloud.google.com/apis/design/resource_names) for - /// the appropriate value for this field. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/authorizedViewSets/\[^/\]+/authorizedViews/\[^/\]+$`. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleIamV1Policy]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future setIamPolicy( - GoogleIamV1SetIamPolicyRequest request, - core.String resource, { - core.String? $fields, - }) async { - final body_ = convert.json.encode(request); - final queryParams_ = >{ - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$resource') + ':setIamPolicy'; - - final response_ = await _requester.request( - url_, - 'POST', - body: body_, - queryParams: queryParams_, - ); - return GoogleIamV1Policy.fromJson( - response_ as core.Map); - } - - /// Returns permissions that a caller has on the specified resource. - /// - /// If the resource does not exist, this will return an empty set of - /// permissions, not a `NOT_FOUND` error. Note: This operation is designed to - /// be used for building permission-aware UIs and command-line tools, not for - /// authorization checking. This operation may "fail open" without warning. - /// - /// [request] - The metadata request object. - /// - /// Request parameters: - /// - /// [resource] - REQUIRED: The resource for which the policy detail is being - /// requested. See - /// [Resource names](https://cloud.google.com/apis/design/resource_names) for - /// the appropriate value for this field. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/authorizedViewSets/\[^/\]+/authorizedViews/\[^/\]+$`. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleIamV1TestIamPermissionsResponse]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future testIamPermissions( - GoogleIamV1TestIamPermissionsRequest request, - core.String resource, { - core.String? $fields, - }) async { - final body_ = convert.json.encode(request); - final queryParams_ = >{ - if ($fields != null) 'fields': [$fields], - }; - - final url_ = - 'v1/' + core.Uri.encodeFull('$resource') + ':testIamPermissions'; - - final response_ = await _requester.request( - url_, - 'POST', - body: body_, - queryParams: queryParams_, - ); - return GoogleIamV1TestIamPermissionsResponse.fromJson( - response_ as core.Map); - } -} - -class ProjectsLocationsAuthorizedViewSetsAuthorizedViewsConversationsResource { - final commons.ApiRequester _requester; - - ProjectsLocationsAuthorizedViewSetsAuthorizedViewsConversationsAnalysesResource - get analyses => - ProjectsLocationsAuthorizedViewSetsAuthorizedViewsConversationsAnalysesResource( - _requester); - - ProjectsLocationsAuthorizedViewSetsAuthorizedViewsConversationsResource( - commons.ApiRequester client) - : _requester = client; - - /// Creates a conversation. - /// - /// Note that this method does not support audio transcription or redaction. - /// Use `conversations.upload` instead. - /// - /// [request] - The metadata request object. - /// - /// Request parameters: - /// - /// [parent] - Required. The parent resource of the conversation. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/authorizedViewSets/\[^/\]+/authorizedViews/\[^/\]+$`. - /// - /// [conversationId] - A unique ID for the new conversation. This ID will - /// become the final component of the conversation's resource name. If no ID - /// is specified, a server-generated ID will be used. This value should be - /// 4-64 characters and must match the regular expression `^[a-z0-9-]{4,64}$`. - /// Valid characters are `a-z-` - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleCloudContactcenterinsightsV1Conversation]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future create( - GoogleCloudContactcenterinsightsV1Conversation request, - core.String parent, { - core.String? conversationId, - core.String? $fields, - }) async { - final body_ = convert.json.encode(request); - final queryParams_ = >{ - if (conversationId != null) 'conversationId': [conversationId], - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/conversations'; - - final response_ = await _requester.request( - url_, - 'POST', - body: body_, - queryParams: queryParams_, - ); - return GoogleCloudContactcenterinsightsV1Conversation.fromJson( - response_ as core.Map); - } - - /// Deletes a conversation. - /// - /// Request parameters: - /// - /// [name] - Required. The name of the conversation to delete. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/authorizedViewSets/\[^/\]+/authorizedViews/\[^/\]+/conversations/\[^/\]+$`. - /// - /// [force] - If set to true, all of this conversation's analyses will also be - /// deleted. Otherwise, the request will only succeed if the conversation has - /// no analyses. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleProtobufEmpty]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future delete( - core.String name, { - core.bool? force, - core.String? $fields, - }) async { - final queryParams_ = >{ - if (force != null) 'force': ['${force}'], - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$name'); - - final response_ = await _requester.request( - url_, - 'DELETE', - queryParams: queryParams_, - ); - return GoogleProtobufEmpty.fromJson( - response_ as core.Map); - } - - /// Gets a conversation. - /// - /// Request parameters: - /// - /// [name] - Required. The name of the conversation to get. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/authorizedViewSets/\[^/\]+/authorizedViews/\[^/\]+/conversations/\[^/\]+$`. - /// - /// [view] - The level of details of the conversation. Default is `FULL`. - /// Possible string values are: - /// - "CONVERSATION_VIEW_UNSPECIFIED" : The conversation view is not - /// specified. * Defaults to `FULL` in `GetConversationRequest`. * Defaults to - /// `BASIC` in `ListConversationsRequest`. - /// - "FULL" : Populates all fields in the conversation. - /// - "BASIC" : Populates all fields in the conversation except the - /// transcript. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleCloudContactcenterinsightsV1Conversation]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future get( - core.String name, { - core.String? view, - core.String? $fields, - }) async { - final queryParams_ = >{ - if (view != null) 'view': [view], - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$name'); - - final response_ = await _requester.request( - url_, - 'GET', - queryParams: queryParams_, - ); - return GoogleCloudContactcenterinsightsV1Conversation.fromJson( - response_ as core.Map); - } - - /// Lists conversations. - /// - /// Request parameters: - /// - /// [parent] - Required. The parent resource of the conversation. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/authorizedViewSets/\[^/\]+/authorizedViews/\[^/\]+$`. - /// - /// [filter] - A filter to reduce results to a specific subset. Useful for - /// querying conversations with specific properties. - /// - /// [orderBy] - Optional. The attribute by which to order conversations in the - /// response. If empty, conversations will be ordered by descending creation - /// time. Supported values are one of the following: * create_time * - /// customer_satisfaction_rating * duration * latest_analysis * start_time * - /// turn_count The default sort order is ascending. To specify order, append - /// `asc` or `desc` (`create_time desc`). For more details, see - /// [Google AIPs Ordering](https://google.aip.dev/132#ordering). - /// - /// [pageSize] - The maximum number of conversations to return in the - /// response. A valid page size ranges from 0 to 100,000 inclusive. If the - /// page size is zero or unspecified, a default page size of 100 will be - /// chosen. Note that a call might return fewer results than the requested - /// page size. - /// - /// [pageToken] - The value returned by the last `ListConversationsResponse`. - /// This value indicates that this is a continuation of a prior - /// `ListConversations` call and that the system should return the next page - /// of data. - /// - /// [view] - The level of details of the conversation. Default is `BASIC`. - /// Possible string values are: - /// - "CONVERSATION_VIEW_UNSPECIFIED" : The conversation view is not - /// specified. * Defaults to `FULL` in `GetConversationRequest`. * Defaults to - /// `BASIC` in `ListConversationsRequest`. - /// - "FULL" : Populates all fields in the conversation. - /// - "BASIC" : Populates all fields in the conversation except the - /// transcript. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a - /// [GoogleCloudContactcenterinsightsV1ListConversationsResponse]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future - list( - core.String parent, { - core.String? filter, - core.String? orderBy, - core.int? pageSize, - core.String? pageToken, - core.String? view, - core.String? $fields, - }) async { - final queryParams_ = >{ - if (filter != null) 'filter': [filter], - if (orderBy != null) 'orderBy': [orderBy], - if (pageSize != null) 'pageSize': ['${pageSize}'], - if (pageToken != null) 'pageToken': [pageToken], - if (view != null) 'view': [view], - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/conversations'; - - final response_ = await _requester.request( - url_, - 'GET', - queryParams: queryParams_, - ); - return GoogleCloudContactcenterinsightsV1ListConversationsResponse.fromJson( - response_ as core.Map); - } - - /// Updates a conversation. - /// - /// [request] - The metadata request object. - /// - /// Request parameters: - /// - /// [name] - Immutable. The resource name of the conversation. Format: - /// projects/{project}/locations/{location}/conversations/{conversation} - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/authorizedViewSets/\[^/\]+/authorizedViews/\[^/\]+/conversations/\[^/\]+$`. - /// - /// [updateMask] - The list of fields to be updated. All possible fields can - /// be updated by passing `*`, or a subset of the following updateable fields - /// can be provided: * `agent_id` * `language_code` * `labels` * `metadata` * - /// `quality_metadata` * `call_metadata` * `start_time` * `expire_time` or - /// `ttl` * `data_source.gcs_source.audio_uri` or - /// `data_source.dialogflow_source.audio_uri` - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleCloudContactcenterinsightsV1Conversation]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future patch( - GoogleCloudContactcenterinsightsV1Conversation request, - core.String name, { - core.String? updateMask, - core.String? $fields, - }) async { - final body_ = convert.json.encode(request); - final queryParams_ = >{ - if (updateMask != null) 'updateMask': [updateMask], - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$name'); - - final response_ = await _requester.request( - url_, - 'PATCH', - body: body_, - queryParams: queryParams_, - ); - return GoogleCloudContactcenterinsightsV1Conversation.fromJson( - response_ as core.Map); - } - - /// Create a long-running conversation upload operation. - /// - /// This method differs from `CreateConversation` by allowing audio - /// transcription and optional DLP redaction. - /// - /// [request] - The metadata request object. - /// - /// Request parameters: - /// - /// [parent] - Required. The parent resource of the conversation. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/authorizedViewSets/\[^/\]+/authorizedViews/\[^/\]+$`. - /// - /// [conversationId] - Optional. A unique ID for the new conversation. This ID - /// will become the final component of the conversation's resource name. If no - /// ID is specified, a server-generated ID will be used. This value should be - /// 4-64 characters and must match the regular expression `^[a-z0-9-]{4,64}$`. - /// Valid characters are `a-z-` - /// - /// [redactionConfig_deidentifyTemplate] - The fully-qualified DLP deidentify - /// template resource name. Format: - /// `projects/{project}/deidentifyTemplates/{template}` - /// - /// [redactionConfig_inspectTemplate] - The fully-qualified DLP inspect - /// template resource name. Format: - /// `projects/{project}/locations/{location}/inspectTemplates/{template}` - /// - /// [speechConfig_speechRecognizer] - The fully-qualified Speech Recognizer - /// resource name. Format: - /// `projects/{project_id}/locations/{location}/recognizer/{recognizer}` - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleLongrunningOperation]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future upload( - GoogleCloudContactcenterinsightsV1Conversation request, - core.String parent, { - core.String? conversationId, - core.String? redactionConfig_deidentifyTemplate, - core.String? redactionConfig_inspectTemplate, - core.String? speechConfig_speechRecognizer, - core.String? $fields, - }) async { - final body_ = convert.json.encode(request); - final queryParams_ = >{ - if (conversationId != null) 'conversationId': [conversationId], - if (redactionConfig_deidentifyTemplate != null) - 'redactionConfig.deidentifyTemplate': [ - redactionConfig_deidentifyTemplate - ], - if (redactionConfig_inspectTemplate != null) - 'redactionConfig.inspectTemplate': [redactionConfig_inspectTemplate], - if (speechConfig_speechRecognizer != null) - 'speechConfig.speechRecognizer': [speechConfig_speechRecognizer], - if ($fields != null) 'fields': [$fields], - }; - - final url_ = - 'v1/' + core.Uri.encodeFull('$parent') + '/conversations:upload'; - - final response_ = await _requester.request( - url_, - 'POST', - body: body_, - queryParams: queryParams_, - ); - return GoogleLongrunningOperation.fromJson( - response_ as core.Map); - } -} - -class ProjectsLocationsAuthorizedViewSetsAuthorizedViewsConversationsAnalysesResource { - final commons.ApiRequester _requester; - - ProjectsLocationsAuthorizedViewSetsAuthorizedViewsConversationsAnalysesResource( - commons.ApiRequester client) - : _requester = client; - - /// Creates an analysis. - /// - /// The long running operation is done when the analysis has completed. - /// - /// [request] - The metadata request object. - /// - /// Request parameters: - /// - /// [parent] - Required. The parent resource of the analysis. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/authorizedViewSets/\[^/\]+/authorizedViews/\[^/\]+/conversations/\[^/\]+$`. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleLongrunningOperation]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future create( - GoogleCloudContactcenterinsightsV1Analysis request, - core.String parent, { - core.String? $fields, - }) async { - final body_ = convert.json.encode(request); - final queryParams_ = >{ - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/analyses'; - - final response_ = await _requester.request( - url_, - 'POST', - body: body_, - queryParams: queryParams_, - ); - return GoogleLongrunningOperation.fromJson( - response_ as core.Map); - } - - /// Deletes an analysis. - /// - /// Request parameters: - /// - /// [name] - Required. The name of the analysis to delete. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/authorizedViewSets/\[^/\]+/authorizedViews/\[^/\]+/conversations/\[^/\]+/analyses/\[^/\]+$`. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleProtobufEmpty]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future delete( - core.String name, { - core.String? $fields, - }) async { - final queryParams_ = >{ - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$name'); - - final response_ = await _requester.request( - url_, - 'DELETE', - queryParams: queryParams_, - ); - return GoogleProtobufEmpty.fromJson( - response_ as core.Map); - } - - /// Gets an analysis. - /// - /// Request parameters: - /// - /// [name] - Required. The name of the analysis to get. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/authorizedViewSets/\[^/\]+/authorizedViews/\[^/\]+/conversations/\[^/\]+/analyses/\[^/\]+$`. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleCloudContactcenterinsightsV1Analysis]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future get( - core.String name, { - core.String? $fields, - }) async { - final queryParams_ = >{ - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$name'); - - final response_ = await _requester.request( - url_, - 'GET', - queryParams: queryParams_, - ); - return GoogleCloudContactcenterinsightsV1Analysis.fromJson( - response_ as core.Map); - } - - /// Lists analyses. - /// - /// Request parameters: - /// - /// [parent] - Required. The parent resource of the analyses. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/authorizedViewSets/\[^/\]+/authorizedViews/\[^/\]+/conversations/\[^/\]+$`. - /// - /// [filter] - A filter to reduce results to a specific subset. Useful for - /// querying conversations with specific properties. - /// - /// [pageSize] - The maximum number of analyses to return in the response. If - /// this value is zero, the service will select a default size. A call might - /// return fewer objects than requested. A non-empty `next_page_token` in the - /// response indicates that more data is available. - /// - /// [pageToken] - The value returned by the last `ListAnalysesResponse`; - /// indicates that this is a continuation of a prior `ListAnalyses` call and - /// the system should return the next page of data. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleCloudContactcenterinsightsV1ListAnalysesResponse]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future list( - core.String parent, { - core.String? filter, - core.int? pageSize, - core.String? pageToken, - core.String? $fields, - }) async { - final queryParams_ = >{ - if (filter != null) 'filter': [filter], - if (pageSize != null) 'pageSize': ['${pageSize}'], - if (pageToken != null) 'pageToken': [pageToken], - if ($fields != null) 'fields': [$fields], - }; +class ProjectsLocationsAuthorizedViewSetsResource { + final commons.ApiRequester _requester; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/analyses'; + ProjectsLocationsAuthorizedViewSetsAuthorizedViewsResource + get authorizedViews => + ProjectsLocationsAuthorizedViewSetsAuthorizedViewsResource( + _requester); - final response_ = await _requester.request( - url_, - 'GET', - queryParams: queryParams_, - ); - return GoogleCloudContactcenterinsightsV1ListAnalysesResponse.fromJson( - response_ as core.Map); - } + ProjectsLocationsAuthorizedViewSetsResource(commons.ApiRequester client) + : _requester = client; } -class ProjectsLocationsAuthorizedViewSetsAuthorizedViewsOperationsResource { +class ProjectsLocationsAuthorizedViewSetsAuthorizedViewsResource { final commons.ApiRequester _requester; - ProjectsLocationsAuthorizedViewSetsAuthorizedViewsOperationsResource( + ProjectsLocationsAuthorizedViewSetsAuthorizedViewsConversationsResource + get conversations => + ProjectsLocationsAuthorizedViewSetsAuthorizedViewsConversationsResource( + _requester); + + ProjectsLocationsAuthorizedViewSetsAuthorizedViewsResource( commons.ApiRequester client) : _requester = client; - /// Starts asynchronous cancellation on a long-running operation. + /// Query metrics. /// - /// The server makes a best effort to cancel the operation, but success is not - /// guaranteed. If the server doesn't support this method, it returns - /// `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation - /// or other methods to check whether the cancellation succeeded or whether - /// the operation completed despite cancellation. On successful cancellation, - /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// [request] - The metadata request object. /// /// Request parameters: /// - /// [name] - The name of the operation resource to be cancelled. + /// [location] - Required. The location of the data. + /// "projects/{project}/locations/{location}" /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/authorizedViewSets/\[^/\]+/authorizedViews/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/authorizedViewSets/\[^/\]+/authorizedViews/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleProtobufEmpty]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future cancel( - core.String name, { + async.Future queryMetrics( + GoogleCloudContactcenterinsightsV1QueryMetricsRequest request, + core.String location, { core.String? $fields, }) async { + final body_ = convert.json.encode(request); final queryParams_ = >{ if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':cancel'; + final url_ = 'v1/' + core.Uri.encodeFull('$location') + ':queryMetrics'; final response_ = await _requester.request( url_, 'POST', + body: body_, queryParams: queryParams_, ); - return GoogleProtobufEmpty.fromJson( + return GoogleLongrunningOperation.fromJson( response_ as core.Map); } +} - /// Gets the latest state of a long-running operation. - /// - /// Clients can use this method to poll the operation result at intervals as - /// recommended by the API service. - /// - /// Request parameters: - /// - /// [name] - The name of the operation resource. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/authorizedViewSets/\[^/\]+/authorizedViews/\[^/\]+/operations/\[^/\]+$`. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleLongrunningOperation]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future get( - core.String name, { - core.String? $fields, - }) async { - final queryParams_ = >{ - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$name'); +class ProjectsLocationsAuthorizedViewSetsAuthorizedViewsConversationsResource { + final commons.ApiRequester _requester; - final response_ = await _requester.request( - url_, - 'GET', - queryParams: queryParams_, - ); - return GoogleLongrunningOperation.fromJson( - response_ as core.Map); - } + ProjectsLocationsAuthorizedViewSetsAuthorizedViewsConversationsResource( + commons.ApiRequester client) + : _requester = client; - /// Lists operations that match the specified filter in the request. - /// - /// If the server doesn't support this method, it returns `UNIMPLEMENTED`. + /// Gets conversation statistics. /// /// Request parameters: /// - /// [name] - The name of the operation's parent resource. + /// [location] - Required. The location of the conversations. /// Value must have pattern /// `^projects/\[^/\]+/locations/\[^/\]+/authorizedViewSets/\[^/\]+/authorizedViews/\[^/\]+$`. /// - /// [filter] - The standard list filter. - /// - /// [pageSize] - The standard list page size. - /// - /// [pageToken] - The standard list page token. + /// [filter] - A filter to reduce results to a specific subset. This field is + /// useful for getting statistics about conversations with specific + /// properties. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleLongrunningListOperationsResponse]. + /// Completes with a + /// [GoogleCloudContactcenterinsightsV1CalculateStatsResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future list( - core.String name, { + async.Future + calculateStats( + core.String location, { core.String? filter, - core.int? pageSize, - core.String? pageToken, core.String? $fields, }) async { final queryParams_ = >{ if (filter != null) 'filter': [filter], - if (pageSize != null) 'pageSize': ['${pageSize}'], - if (pageToken != null) 'pageToken': [pageToken], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$name') + '/operations'; + final url_ = 'v1/' + + core.Uri.encodeFull('$location') + + '/conversations:calculateStats'; final response_ = await _requester.request( url_, 'GET', queryParams: queryParams_, ); - return GoogleLongrunningListOperationsResponse.fromJson( + return GoogleCloudContactcenterinsightsV1CalculateStatsResponse.fromJson( response_ as core.Map); } } @@ -3535,8 +2394,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -6261,7 +5120,7 @@ class GoogleCloudContactcenterinsightsV1Conversation { /// A map for the user to specify any custom fields. /// - /// A maximum of 20 labels per conversation is allowed, with a maximum of 256 + /// A maximum of 100 labels per conversation is allowed, with a maximum of 256 /// characters per entry. core.Map? labels; @@ -6696,6 +5555,16 @@ class GoogleCloudContactcenterinsightsV1ConversationQualityMetadataAgentInfo { /// A user-specified string representing the agent. core.String? agentId; + /// The agent type, e.g. HUMAN_AGENT. + /// Possible string values are: + /// - "ROLE_UNSPECIFIED" : Participant's role is not set. + /// - "HUMAN_AGENT" : Participant is a human agent. + /// - "AUTOMATED_AGENT" : Participant is an automated agent. + /// - "END_USER" : Participant is an end user who conversed with the contact + /// center. + /// - "ANY_AGENT" : Participant is either a human or automated agent. + core.String? agentType; + /// The agent's name. core.String? displayName; @@ -6703,30 +5572,52 @@ class GoogleCloudContactcenterinsightsV1ConversationQualityMetadataAgentInfo { /// the call. core.String? dispositionCode; + /// The agent's location. + core.String? location; + /// A user-specified string representing the agent's team. + /// + /// Deprecated in favor of the `teams` field. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) core.String? team; + /// User-specified strings representing the agent's teams. + core.List? teams; + GoogleCloudContactcenterinsightsV1ConversationQualityMetadataAgentInfo({ this.agentId, + this.agentType, this.displayName, this.dispositionCode, + this.location, this.team, + this.teams, }); GoogleCloudContactcenterinsightsV1ConversationQualityMetadataAgentInfo.fromJson( core.Map json_) : this( agentId: json_['agentId'] as core.String?, + agentType: json_['agentType'] as core.String?, displayName: json_['displayName'] as core.String?, dispositionCode: json_['dispositionCode'] as core.String?, + location: json_['location'] as core.String?, team: json_['team'] as core.String?, + teams: (json_['teams'] as core.List?) + ?.map((value) => value as core.String) + .toList(), ); core.Map toJson() => { if (agentId != null) 'agentId': agentId!, + if (agentType != null) 'agentType': agentType!, if (displayName != null) 'displayName': displayName!, if (dispositionCode != null) 'dispositionCode': dispositionCode!, + if (location != null) 'location': location!, if (team != null) 'team': team!, + if (teams != null) 'teams': teams!, }; } @@ -8070,7 +6961,7 @@ class GoogleCloudContactcenterinsightsV1IngestConversationsRequestGcsSource { /// `metadata_bucket_uri`. /// /// Keys not included in this field will be ignored. Note that there is a - /// limit of 20 labels per conversation. + /// limit of 100 labels per conversation. /// /// Optional. core.List? customMetadataKeys; @@ -9254,11 +8145,6 @@ class GoogleCloudContactcenterinsightsV1QaAnswer { /// The conversation the answer applies to. core.String? conversation; - /// The maximum potential score of the question. - /// - /// If the question was answered using `na_value`, this field will be zero. - core.double? potentialScore; - /// The QaQuestion answered by this answer. core.String? qaQuestion; @@ -9277,7 +8163,6 @@ class GoogleCloudContactcenterinsightsV1QaAnswer { this.answerSources, this.answerValue, this.conversation, - this.potentialScore, this.qaQuestion, this.questionBody, this.tags, @@ -9295,7 +8180,6 @@ class GoogleCloudContactcenterinsightsV1QaAnswer { json_['answerValue'] as core.Map) : null, conversation: json_['conversation'] as core.String?, - potentialScore: (json_['potentialScore'] as core.num?)?.toDouble(), qaQuestion: json_['qaQuestion'] as core.String?, questionBody: json_['questionBody'] as core.String?, tags: (json_['tags'] as core.List?) @@ -9307,7 +8191,6 @@ class GoogleCloudContactcenterinsightsV1QaAnswer { if (answerSources != null) 'answerSources': answerSources!, if (answerValue != null) 'answerValue': answerValue!, if (conversation != null) 'conversation': conversation!, - if (potentialScore != null) 'potentialScore': potentialScore!, if (qaQuestion != null) 'qaQuestion': qaQuestion!, if (questionBody != null) 'questionBody': questionBody!, if (tags != null) 'tags': tags!, @@ -9351,7 +8234,7 @@ class GoogleCloudContactcenterinsightsV1QaAnswerAnswerSource { }; } -/// Message for holding the value of the answer. +/// Message for holding the value of a QaAnswer. /// /// QaQuestion.AnswerChoice defines the possible answer values for a question. class GoogleCloudContactcenterinsightsV1QaAnswerAnswerValue { @@ -9364,12 +8247,28 @@ class GoogleCloudContactcenterinsightsV1QaAnswerAnswerValue { core.String? key; /// A value of "Not Applicable (N/A)". + /// + /// Should only ever be `true`. core.bool? naValue; + /// Normalized score of the questions. + /// + /// Calculated as score / potential_score. + /// + /// Output only. + core.double? normalizedScore; + /// Numerical value. core.double? numValue; + /// The maximum potential score of the question. + /// + /// Output only. + core.double? potentialScore; + /// Numerical score of the answer. + /// + /// Output only. core.double? score; /// String value. @@ -9379,7 +8278,9 @@ class GoogleCloudContactcenterinsightsV1QaAnswerAnswerValue { this.boolValue, this.key, this.naValue, + this.normalizedScore, this.numValue, + this.potentialScore, this.score, this.strValue, }); @@ -9389,7 +8290,9 @@ class GoogleCloudContactcenterinsightsV1QaAnswerAnswerValue { boolValue: json_['boolValue'] as core.bool?, key: json_['key'] as core.String?, naValue: json_['naValue'] as core.bool?, + normalizedScore: (json_['normalizedScore'] as core.num?)?.toDouble(), numValue: (json_['numValue'] as core.num?)?.toDouble(), + potentialScore: (json_['potentialScore'] as core.num?)?.toDouble(), score: (json_['score'] as core.num?)?.toDouble(), strValue: json_['strValue'] as core.String?, ); @@ -9398,7 +8301,9 @@ class GoogleCloudContactcenterinsightsV1QaAnswerAnswerValue { if (boolValue != null) 'boolValue': boolValue!, if (key != null) 'key': key!, if (naValue != null) 'naValue': naValue!, + if (normalizedScore != null) 'normalizedScore': normalizedScore!, if (numValue != null) 'numValue': numValue!, + if (potentialScore != null) 'potentialScore': potentialScore!, if (score != null) 'score': score!, if (strValue != null) 'strValue': strValue!, }; @@ -10015,6 +8920,8 @@ class GoogleCloudContactcenterinsightsV1QueryMetricsRequest { /// granularity. PER_MINUTE means \[01:00 to 01:01). /// - "PER_5_MINUTES" : Data points in the time series will aggregate at a 1 /// minute granularity. PER_5_MINUTES means \[01:00 to 01:05). + /// - "MONTHLY" : Data points in the time series will aggregate at a monthly + /// granularity. 1 MONTH means \[01st of the month to 1st of the next month). core.String? timeGranularity; GoogleCloudContactcenterinsightsV1QueryMetricsRequest({ @@ -10782,322 +9689,6 @@ class GoogleCloudContactcenterinsightsV1View { }; } -/// Specifies the audit configuration for a service. -/// -/// The configuration determines which permission types are logged, and what -/// identities, if any, are exempted from logging. An AuditConfig must have one -/// or more AuditLogConfigs. If there are AuditConfigs for both `allServices` -/// and a specific service, the union of the two AuditConfigs is used for that -/// service: the log_types specified in each AuditConfig are enabled, and the -/// exempted_members in each AuditLogConfig are exempted. Example Policy with -/// multiple AuditConfigs: { "audit_configs": \[ { "service": "allServices", -/// "audit_log_configs": \[ { "log_type": "DATA_READ", "exempted_members": \[ -/// "user:jose@example.com" \] }, { "log_type": "DATA_WRITE" }, { "log_type": -/// "ADMIN_READ" } \] }, { "service": "sampleservice.googleapis.com", -/// "audit_log_configs": \[ { "log_type": "DATA_READ" }, { "log_type": -/// "DATA_WRITE", "exempted_members": \[ "user:aliya@example.com" \] } \] } \] } -/// For sampleservice, this policy enables DATA_READ, DATA_WRITE and ADMIN_READ -/// logging. It also exempts `jose@example.com` from DATA_READ logging, and -/// `aliya@example.com` from DATA_WRITE logging. -class GoogleIamV1AuditConfig { - /// The configuration for logging of each type of permission. - core.List? auditLogConfigs; - - /// Specifies a service that will be enabled for audit logging. - /// - /// For example, `storage.googleapis.com`, `cloudsql.googleapis.com`. - /// `allServices` is a special value that covers all services. - core.String? service; - - GoogleIamV1AuditConfig({ - this.auditLogConfigs, - this.service, - }); - - GoogleIamV1AuditConfig.fromJson(core.Map json_) - : this( - auditLogConfigs: (json_['auditLogConfigs'] as core.List?) - ?.map((value) => GoogleIamV1AuditLogConfig.fromJson( - value as core.Map)) - .toList(), - service: json_['service'] as core.String?, - ); - - core.Map toJson() => { - if (auditLogConfigs != null) 'auditLogConfigs': auditLogConfigs!, - if (service != null) 'service': service!, - }; -} - -/// Provides the configuration for logging a type of permissions. -/// -/// Example: { "audit_log_configs": \[ { "log_type": "DATA_READ", -/// "exempted_members": \[ "user:jose@example.com" \] }, { "log_type": -/// "DATA_WRITE" } \] } This enables 'DATA_READ' and 'DATA_WRITE' logging, while -/// exempting jose@example.com from DATA_READ logging. -typedef GoogleIamV1AuditLogConfig = $AuditLogConfig; - -/// Associates `members`, or principals, with a `role`. -class GoogleIamV1Binding { - /// The condition that is associated with this binding. - /// - /// If the condition evaluates to `true`, then this binding applies to the - /// current request. If the condition evaluates to `false`, then this binding - /// does not apply to the current request. However, a different role binding - /// might grant the same role to one or more of the principals in this - /// binding. To learn which resources support conditions in their IAM - /// policies, see the - /// [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). - GoogleTypeExpr? condition; - - /// Specifies the principals requesting access for a Google Cloud resource. - /// - /// `members` can have the following values: * `allUsers`: A special - /// identifier that represents anyone who is on the internet; with or without - /// a Google account. * `allAuthenticatedUsers`: A special identifier that - /// represents anyone who is authenticated with a Google account or a service - /// account. Does not include identities that come from external identity - /// providers (IdPs) through identity federation. * `user:{emailid}`: An email - /// address that represents a specific Google account. For example, - /// `alice@example.com` . * `serviceAccount:{emailid}`: An email address that - /// represents a Google service account. For example, - /// `my-other-app@appspot.gserviceaccount.com`. * - /// `serviceAccount:{projectid}.svc.id.goog[{namespace}/{kubernetes-sa}]`: An - /// identifier for a - /// [Kubernetes service account](https://cloud.google.com/kubernetes-engine/docs/how-to/kubernetes-service-accounts). - /// For example, `my-project.svc.id.goog[my-namespace/my-kubernetes-sa]`. * - /// `group:{emailid}`: An email address that represents a Google group. For - /// example, `admins@example.com`. * `domain:{domain}`: The G Suite domain - /// (primary) that represents all the users of that domain. For example, - /// `google.com` or `example.com`. * - /// `principal://iam.googleapis.com/locations/global/workforcePools/{pool_id}/subject/{subject_attribute_value}`: - /// A single identity in a workforce identity pool. * - /// `principalSet://iam.googleapis.com/locations/global/workforcePools/{pool_id}/group/{group_id}`: - /// All workforce identities in a group. * - /// `principalSet://iam.googleapis.com/locations/global/workforcePools/{pool_id}/attribute.{attribute_name}/{attribute_value}`: - /// All workforce identities with a specific attribute value. * - /// `principalSet://iam.googleapis.com/locations/global/workforcePools/{pool_id} - /// / * `: All identities in a workforce identity pool. * - /// `principal://iam.googleapis.com/projects/{project_number}/locations/global/workloadIdentityPools/{pool_id}/subject/{subject_attribute_value}`: - /// A single identity in a workload identity pool. * - /// `principalSet://iam.googleapis.com/projects/{project_number}/locations/global/workloadIdentityPools/{pool_id}/group/{group_id}`: - /// A workload identity pool group. * - /// `principalSet://iam.googleapis.com/projects/{project_number}/locations/global/workloadIdentityPools/{pool_id}/attribute.{attribute_name}/{attribute_value}`: - /// All identities in a workload identity pool with a certain attribute. * - /// `principalSet://iam.googleapis.com/projects/{project_number}/locations/global/workloadIdentityPools/{pool_id} - /// / * `: All identities in a workload identity pool. * - /// `deleted:user:{emailid}?uid={uniqueid}`: An email address (plus unique - /// identifier) representing a user that has been recently deleted. For - /// example, `alice@example.com?uid=123456789012345678901`. If the user is - /// recovered, this value reverts to `user:{emailid}` and the recovered user - /// retains the role in the binding. * - /// `deleted:serviceAccount:{emailid}?uid={uniqueid}`: An email address (plus - /// unique identifier) representing a service account that has been recently - /// deleted. For example, - /// `my-other-app@appspot.gserviceaccount.com?uid=123456789012345678901`. If - /// the service account is undeleted, this value reverts to - /// `serviceAccount:{emailid}` and the undeleted service account retains the - /// role in the binding. * `deleted:group:{emailid}?uid={uniqueid}`: An email - /// address (plus unique identifier) representing a Google group that has been - /// recently deleted. For example, - /// `admins@example.com?uid=123456789012345678901`. If the group is recovered, - /// this value reverts to `group:{emailid}` and the recovered group retains - /// the role in the binding. * - /// `deleted:principal://iam.googleapis.com/locations/global/workforcePools/{pool_id}/subject/{subject_attribute_value}`: - /// Deleted single identity in a workforce identity pool. For example, - /// `deleted:principal://iam.googleapis.com/locations/global/workforcePools/my-pool-id/subject/my-subject-attribute-value`. - core.List? members; - - /// Role that is assigned to the list of `members`, or principals. - /// - /// For example, `roles/viewer`, `roles/editor`, or `roles/owner`. For an - /// overview of the IAM roles and permissions, see the - /// [IAM documentation](https://cloud.google.com/iam/docs/roles-overview). For - /// a list of the available pre-defined roles, see - /// [here](https://cloud.google.com/iam/docs/understanding-roles). - core.String? role; - - GoogleIamV1Binding({ - this.condition, - this.members, - this.role, - }); - - GoogleIamV1Binding.fromJson(core.Map json_) - : this( - condition: json_.containsKey('condition') - ? GoogleTypeExpr.fromJson( - json_['condition'] as core.Map) - : null, - members: (json_['members'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - role: json_['role'] as core.String?, - ); - - core.Map toJson() => { - if (condition != null) 'condition': condition!, - if (members != null) 'members': members!, - if (role != null) 'role': role!, - }; -} - -/// An Identity and Access Management (IAM) policy, which specifies access -/// controls for Google Cloud resources. -/// -/// A `Policy` is a collection of `bindings`. A `binding` binds one or more -/// `members`, or principals, to a single `role`. Principals can be user -/// accounts, service accounts, Google groups, and domains (such as G Suite). A -/// `role` is a named list of permissions; each `role` can be an IAM predefined -/// role or a user-created custom role. For some types of Google Cloud -/// resources, a `binding` can also specify a `condition`, which is a logical -/// expression that allows access to a resource only if the expression evaluates -/// to `true`. A condition can add constraints based on attributes of the -/// request, the resource, or both. To learn which resources support conditions -/// in their IAM policies, see the -/// [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). -/// **JSON example:** ``` { "bindings": [ { "role": -/// "roles/resourcemanager.organizationAdmin", "members": [ -/// "user:mike@example.com", "group:admins@example.com", "domain:google.com", -/// "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": -/// "roles/resourcemanager.organizationViewer", "members": [ -/// "user:eve@example.com" ], "condition": { "title": "expirable access", -/// "description": "Does not grant access after Sep 2020", "expression": -/// "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": -/// "BwWWja0YfJA=", "version": 3 } ``` **YAML example:** ``` bindings: - -/// members: - user:mike@example.com - group:admins@example.com - -/// domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com -/// role: roles/resourcemanager.organizationAdmin - members: - -/// user:eve@example.com role: roles/resourcemanager.organizationViewer -/// condition: title: expirable access description: Does not grant access after -/// Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') -/// etag: BwWWja0YfJA= version: 3 ``` For a description of IAM and its features, -/// see the [IAM documentation](https://cloud.google.com/iam/docs/). -class GoogleIamV1Policy { - /// Specifies cloud audit logging configuration for this policy. - core.List? auditConfigs; - - /// Associates a list of `members`, or principals, with a `role`. - /// - /// Optionally, may specify a `condition` that determines how and when the - /// `bindings` are applied. Each of the `bindings` must contain at least one - /// principal. The `bindings` in a `Policy` can refer to up to 1,500 - /// principals; up to 250 of these principals can be Google groups. Each - /// occurrence of a principal counts towards these limits. For example, if the - /// `bindings` grant 50 different roles to `user:alice@example.com`, and not - /// to any other principal, then you can add another 1,450 principals to the - /// `bindings` in the `Policy`. - core.List? bindings; - - /// `etag` is used for optimistic concurrency control as a way to help prevent - /// simultaneous updates of a policy from overwriting each other. - /// - /// It is strongly suggested that systems make use of the `etag` in the - /// read-modify-write cycle to perform policy updates in order to avoid race - /// conditions: An `etag` is returned in the response to `getIamPolicy`, and - /// systems are expected to put that etag in the request to `setIamPolicy` to - /// ensure that their change will be applied to the same version of the - /// policy. **Important:** If you use IAM Conditions, you must include the - /// `etag` field whenever you call `setIamPolicy`. If you omit this field, - /// then IAM allows you to overwrite a version `3` policy with a version `1` - /// policy, and all of the conditions in the version `3` policy are lost. - core.String? etag; - core.List get etagAsBytes => convert.base64.decode(etag!); - - set etagAsBytes(core.List bytes_) { - etag = - convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); - } - - /// Specifies the format of the policy. - /// - /// Valid values are `0`, `1`, and `3`. Requests that specify an invalid value - /// are rejected. Any operation that affects conditional role bindings must - /// specify version `3`. This requirement applies to the following operations: - /// * Getting a policy that includes a conditional role binding * Adding a - /// conditional role binding to a policy * Changing a conditional role binding - /// in a policy * Removing any role binding, with or without a condition, from - /// a policy that includes conditions **Important:** If you use IAM - /// Conditions, you must include the `etag` field whenever you call - /// `setIamPolicy`. If you omit this field, then IAM allows you to overwrite a - /// version `3` policy with a version `1` policy, and all of the conditions in - /// the version `3` policy are lost. If a policy does not include any - /// conditions, operations on that policy may specify any valid version or - /// leave the field unset. To learn which resources support conditions in - /// their IAM policies, see the - /// [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). - core.int? version; - - GoogleIamV1Policy({ - this.auditConfigs, - this.bindings, - this.etag, - this.version, - }); - - GoogleIamV1Policy.fromJson(core.Map json_) - : this( - auditConfigs: (json_['auditConfigs'] as core.List?) - ?.map((value) => GoogleIamV1AuditConfig.fromJson( - value as core.Map)) - .toList(), - bindings: (json_['bindings'] as core.List?) - ?.map((value) => GoogleIamV1Binding.fromJson( - value as core.Map)) - .toList(), - etag: json_['etag'] as core.String?, - version: json_['version'] as core.int?, - ); - - core.Map toJson() => { - if (auditConfigs != null) 'auditConfigs': auditConfigs!, - if (bindings != null) 'bindings': bindings!, - if (etag != null) 'etag': etag!, - if (version != null) 'version': version!, - }; -} - -/// Request message for `SetIamPolicy` method. -class GoogleIamV1SetIamPolicyRequest { - /// REQUIRED: The complete policy to be applied to the `resource`. - /// - /// The size of the policy is limited to a few 10s of KB. An empty policy is a - /// valid policy but certain Google Cloud services (such as Projects) might - /// reject them. - GoogleIamV1Policy? policy; - - /// OPTIONAL: A FieldMask specifying which fields of the policy to modify. - /// - /// Only the fields in the mask will be modified. If no mask is provided, the - /// following default mask is used: `paths: "bindings, etag"` - core.String? updateMask; - - GoogleIamV1SetIamPolicyRequest({ - this.policy, - this.updateMask, - }); - - GoogleIamV1SetIamPolicyRequest.fromJson(core.Map json_) - : this( - policy: json_.containsKey('policy') - ? GoogleIamV1Policy.fromJson( - json_['policy'] as core.Map) - : null, - updateMask: json_['updateMask'] as core.String?, - ); - - core.Map toJson() => { - if (policy != null) 'policy': policy!, - if (updateMask != null) 'updateMask': updateMask!, - }; -} - -/// Request message for `TestIamPermissions` method. -typedef GoogleIamV1TestIamPermissionsRequest = $TestIamPermissionsRequest00; - -/// Response message for `TestIamPermissions` method. -typedef GoogleIamV1TestIamPermissionsResponse = $PermissionsResponse; - /// The response message for Operations.ListOperations. class GoogleLongrunningListOperationsResponse { /// The standard List next-page token. @@ -11218,23 +9809,3 @@ typedef GoogleProtobufEmpty = $Empty; /// You can find out more about this error model and how to work with it in the /// [API Design Guide](https://cloud.google.com/apis/design/errors). typedef GoogleRpcStatus = $Status00; - -/// Represents a textual expression in the Common Expression Language (CEL) -/// syntax. -/// -/// CEL is a C-like expression language. The syntax and semantics of CEL are -/// documented at https://github.com/google/cel-spec. Example (Comparison): -/// title: "Summary size limit" description: "Determines if a summary is less -/// than 100 chars" expression: "document.summary.size() \< 100" Example -/// (Equality): title: "Requestor is owner" description: "Determines if -/// requestor is the document owner" expression: "document.owner == -/// request.auth.claims.email" Example (Logic): title: "Public documents" -/// description: "Determine whether the document should be publicly visible" -/// expression: "document.type != 'private' && document.type != 'internal'" -/// Example (Data Manipulation): title: "Notification string" description: -/// "Create a notification string with a timestamp." expression: "'New message -/// received at ' + string(document.create_time)" The exact variables and -/// functions that may be referenced within an expression are determined by the -/// service that evaluates it. See the service documentation for additional -/// information. -typedef GoogleTypeExpr = $Expr; diff --git a/generated/googleapis/lib/container/v1.dart b/generated/googleapis/lib/container/v1.dart index 9f0f25d89..95db050bd 100644 --- a/generated/googleapis/lib/container/v1.dart +++ b/generated/googleapis/lib/container/v1.dart @@ -4060,7 +4060,7 @@ class Autopilot { /// Enable Autopilot core.bool? enabled; - /// Workload policy configuration for Autopilot. + /// WorkloadPolicyConfig is the configuration related to GCW workload policy WorkloadPolicyConfig? workloadPolicyConfig; Autopilot({ @@ -5628,7 +5628,7 @@ class ClusterUpdate { /// The desired authenticator groups config for the cluster. AuthenticatorGroupsConfig? desiredAuthenticatorGroupsConfig; - /// The desired workload policy configuration for the autopilot cluster. + /// WorkloadPolicyConfig is the configuration related to GCW workload policy WorkloadPolicyConfig? desiredAutopilotWorkloadPolicyConfig; /// The desired configuration options for the Binary Authorization feature. @@ -5693,6 +5693,9 @@ class ClusterUpdate { ) core.bool? desiredEnablePrivateEndpoint; + /// The desired enterprise configuration for the cluster. + DesiredEnterpriseConfig? desiredEnterpriseConfig; + /// The desired fleet configuration for the cluster. Fleet? desiredFleet; @@ -5799,6 +5802,12 @@ class ClusterUpdate { /// autopilot clusters and node auto-provisioning enabled clusters. NodeKubeletConfig? desiredNodePoolAutoConfigKubeletConfig; + /// The desired Linux node config for all auto-provisioned node pools in + /// autopilot clusters and node auto-provisioning enabled clusters. + /// + /// Currently only `cgroup_mode` can be set here. + LinuxNodeConfig? desiredNodePoolAutoConfigLinuxNodeConfig; + /// The desired network tags that apply to all auto-provisioned node pools in /// autopilot clusters and node auto-provisioning enabled clusters. NetworkTags? desiredNodePoolAutoConfigNetworkTags; @@ -5943,6 +5952,7 @@ class ClusterUpdate { this.desiredEnableFqdnNetworkPolicy, this.desiredEnableMultiNetworking, this.desiredEnablePrivateEndpoint, + this.desiredEnterpriseConfig, this.desiredFleet, this.desiredGatewayApiConfig, this.desiredGcfsConfig, @@ -5963,6 +5973,7 @@ class ClusterUpdate { this.desiredNetworkPerformanceConfig, this.desiredNodeKubeletConfig, this.desiredNodePoolAutoConfigKubeletConfig, + this.desiredNodePoolAutoConfigLinuxNodeConfig, this.desiredNodePoolAutoConfigNetworkTags, this.desiredNodePoolAutoConfigResourceManagerTags, this.desiredNodePoolAutoscaling, @@ -6072,6 +6083,11 @@ class ClusterUpdate { json_['desiredEnableMultiNetworking'] as core.bool?, desiredEnablePrivateEndpoint: json_['desiredEnablePrivateEndpoint'] as core.bool?, + desiredEnterpriseConfig: json_.containsKey('desiredEnterpriseConfig') + ? DesiredEnterpriseConfig.fromJson( + json_['desiredEnterpriseConfig'] + as core.Map) + : null, desiredFleet: json_.containsKey('desiredFleet') ? Fleet.fromJson( json_['desiredFleet'] as core.Map) @@ -6151,6 +6167,12 @@ class ClusterUpdate { json_['desiredNodePoolAutoConfigKubeletConfig'] as core.Map) : null, + desiredNodePoolAutoConfigLinuxNodeConfig: + json_.containsKey('desiredNodePoolAutoConfigLinuxNodeConfig') + ? LinuxNodeConfig.fromJson( + json_['desiredNodePoolAutoConfigLinuxNodeConfig'] + as core.Map) + : null, desiredNodePoolAutoConfigNetworkTags: json_.containsKey('desiredNodePoolAutoConfigNetworkTags') ? NetworkTags.fromJson( @@ -6301,6 +6323,8 @@ class ClusterUpdate { 'desiredEnableMultiNetworking': desiredEnableMultiNetworking!, if (desiredEnablePrivateEndpoint != null) 'desiredEnablePrivateEndpoint': desiredEnablePrivateEndpoint!, + if (desiredEnterpriseConfig != null) + 'desiredEnterpriseConfig': desiredEnterpriseConfig!, if (desiredFleet != null) 'desiredFleet': desiredFleet!, if (desiredGatewayApiConfig != null) 'desiredGatewayApiConfig': desiredGatewayApiConfig!, @@ -6339,6 +6363,9 @@ class ClusterUpdate { if (desiredNodePoolAutoConfigKubeletConfig != null) 'desiredNodePoolAutoConfigKubeletConfig': desiredNodePoolAutoConfigKubeletConfig!, + if (desiredNodePoolAutoConfigLinuxNodeConfig != null) + 'desiredNodePoolAutoConfigLinuxNodeConfig': + desiredNodePoolAutoConfigLinuxNodeConfig!, if (desiredNodePoolAutoConfigNetworkTags != null) 'desiredNodePoolAutoConfigNetworkTags': desiredNodePoolAutoConfigNetworkTags!, @@ -6988,6 +7015,30 @@ class DefaultSnatStatus { }; } +/// DesiredEnterpriseConfig is a wrapper used for updating enterprise_config. +class DesiredEnterpriseConfig { + /// desired_tier specifies the desired tier of the cluster. + /// Possible string values are: + /// - "CLUSTER_TIER_UNSPECIFIED" : CLUSTER_TIER_UNSPECIFIED is when + /// cluster_tier is not set. + /// - "STANDARD" : STANDARD indicates a standard GKE cluster. + /// - "ENTERPRISE" : ENTERPRISE indicates a GKE Enterprise cluster. + core.String? desiredTier; + + DesiredEnterpriseConfig({ + this.desiredTier, + }); + + DesiredEnterpriseConfig.fromJson(core.Map json_) + : this( + desiredTier: json_['desiredTier'] as core.String?, + ); + + core.Map toJson() => { + if (desiredTier != null) 'desiredTier': desiredTier!, + }; +} + /// Configuration for NodeLocal DNSCache class DnsCacheConfig { /// Whether NodeLocal DNSCache is enabled for this cluster. @@ -7027,17 +7078,28 @@ class EnterpriseConfig { /// - "ENTERPRISE" : ENTERPRISE indicates a GKE Enterprise cluster. core.String? clusterTier; + /// desired_tier specifies the desired tier of the cluster. + /// Possible string values are: + /// - "CLUSTER_TIER_UNSPECIFIED" : CLUSTER_TIER_UNSPECIFIED is when + /// cluster_tier is not set. + /// - "STANDARD" : STANDARD indicates a standard GKE cluster. + /// - "ENTERPRISE" : ENTERPRISE indicates a GKE Enterprise cluster. + core.String? desiredTier; + EnterpriseConfig({ this.clusterTier, + this.desiredTier, }); EnterpriseConfig.fromJson(core.Map json_) : this( clusterTier: json_['clusterTier'] as core.String?, + desiredTier: json_['desiredTier'] as core.String?, ); core.Map toJson() => { if (clusterTier != null) 'clusterTier': clusterTier!, + if (desiredTier != null) 'desiredTier': desiredTier!, }; } @@ -8146,7 +8208,7 @@ class LinuxNodeConfig { /// net.core.busy_read net.core.netdev_max_backlog net.core.rmem_max /// net.core.wmem_default net.core.wmem_max net.core.optmem_max /// net.core.somaxconn net.ipv4.tcp_rmem net.ipv4.tcp_wmem - /// net.ipv4.tcp_tw_reuse + /// net.ipv4.tcp_tw_reuse kernel.shmmni kernel.shmmax kernel.shmall core.Map? sysctls; LinuxNodeConfig({ @@ -9275,6 +9337,20 @@ class NodeConfig { /// information. core.int? localSsdCount; + /// Specifies which method should be used for encrypting the Local SSDs + /// attahced to the node. + /// Possible string values are: + /// - "LOCAL_SSD_ENCRYPTION_MODE_UNSPECIFIED" : The given node will be + /// encrypted using keys managed by Google infrastructure and the keys will be + /// deleted when the node is deleted. + /// - "STANDARD_ENCRYPTION" : The given node will be encrypted using keys + /// managed by Google infrastructure and the keys will be deleted when the + /// node is deleted. + /// - "EPHEMERAL_KEY_ENCRYPTION" : The given node will opt-in for using + /// ephemeral key for encryption of Local SSDs. The Local SSDs will not be + /// able to recover data in case of node crash. + core.String? localSsdEncryptionMode; + /// Logging configuration. NodePoolLoggingConfig? loggingConfig; @@ -9283,6 +9359,11 @@ class NodeConfig { /// unspecified, the default machine type is `e2-medium`. core.String? machineType; + /// The maximum duration for the nodes to exist. + /// + /// If unspecified, the nodes can exist indefinitely. + core.String? maxRunDuration; + /// The metadata key/value pairs assigned to instances in the cluster. /// /// Keys must conform to the regexp `[a-zA-Z0-9-_]+` and be less than 128 @@ -9417,8 +9498,10 @@ class NodeConfig { this.linuxNodeConfig, this.localNvmeSsdBlockConfig, this.localSsdCount, + this.localSsdEncryptionMode, this.loggingConfig, this.machineType, + this.maxRunDuration, this.metadata, this.minCpuPlatform, this.nodeGroup, @@ -9506,11 +9589,14 @@ class NodeConfig { as core.Map) : null, localSsdCount: json_['localSsdCount'] as core.int?, + localSsdEncryptionMode: + json_['localSsdEncryptionMode'] as core.String?, loggingConfig: json_.containsKey('loggingConfig') ? NodePoolLoggingConfig.fromJson( json_['loggingConfig'] as core.Map) : null, machineType: json_['machineType'] as core.String?, + maxRunDuration: json_['maxRunDuration'] as core.String?, metadata: (json_['metadata'] as core.Map?)?.map( (key, value) => core.MapEntry( @@ -9609,8 +9695,11 @@ class NodeConfig { if (localNvmeSsdBlockConfig != null) 'localNvmeSsdBlockConfig': localNvmeSsdBlockConfig!, if (localSsdCount != null) 'localSsdCount': localSsdCount!, + if (localSsdEncryptionMode != null) + 'localSsdEncryptionMode': localSsdEncryptionMode!, if (loggingConfig != null) 'loggingConfig': loggingConfig!, if (machineType != null) 'machineType': machineType!, + if (maxRunDuration != null) 'maxRunDuration': maxRunDuration!, if (metadata != null) 'metadata': metadata!, if (minCpuPlatform != null) 'minCpuPlatform': minCpuPlatform!, if (nodeGroup != null) 'nodeGroup': nodeGroup!, @@ -10191,6 +10280,11 @@ class NodePool { /// Node pool configs that apply to all auto-provisioned node pools in autopilot /// clusters and node auto-provisioning enabled clusters. class NodePoolAutoConfig { + /// Configuration options for Linux nodes. + /// + /// Output only. + LinuxNodeConfig? linuxNodeConfig; + /// The list of instance tags applied to all nodes. /// /// Tags are used to identify valid sources or targets for network firewalls @@ -10208,6 +10302,7 @@ class NodePoolAutoConfig { ResourceManagerTags? resourceManagerTags; NodePoolAutoConfig({ + this.linuxNodeConfig, this.networkTags, this.nodeKubeletConfig, this.resourceManagerTags, @@ -10215,6 +10310,10 @@ class NodePoolAutoConfig { NodePoolAutoConfig.fromJson(core.Map json_) : this( + linuxNodeConfig: json_.containsKey('linuxNodeConfig') + ? LinuxNodeConfig.fromJson(json_['linuxNodeConfig'] + as core.Map) + : null, networkTags: json_.containsKey('networkTags') ? NetworkTags.fromJson( json_['networkTags'] as core.Map) @@ -10230,6 +10329,7 @@ class NodePoolAutoConfig { ); core.Map toJson() => { + if (linuxNodeConfig != null) 'linuxNodeConfig': linuxNodeConfig!, if (networkTags != null) 'networkTags': networkTags!, if (nodeKubeletConfig != null) 'nodeKubeletConfig': nodeKubeletConfig!, if (resourceManagerTags != null) @@ -10254,29 +10354,30 @@ class NodePoolAutoscaling { /// - "ANY" : ANY policy picks zones that have the highest capacity available. core.String? locationPolicy; - /// Maximum number of nodes for one location in the NodePool. + /// Maximum number of nodes for one location in the node pool. /// /// Must be \>= min_node_count. There has to be enough quota to scale up the /// cluster. core.int? maxNodeCount; - /// Minimum number of nodes for one location in the NodePool. + /// Minimum number of nodes for one location in the node pool. /// - /// Must be \>= 1 and \<= max_node_count. + /// Must be greater than or equal to 0 and less than or equal to + /// max_node_count. core.int? minNodeCount; /// Maximum number of nodes in the node pool. /// - /// Must be greater than total_min_node_count. There has to be enough quota to - /// scale up the cluster. The total_*_node_count fields are mutually exclusive - /// with the *_node_count fields. + /// Must be greater than or equal to total_min_node_count. There has to be + /// enough quota to scale up the cluster. The total_*_node_count fields are + /// mutually exclusive with the *_node_count fields. core.int? totalMaxNodeCount; /// Minimum number of nodes in the node pool. /// - /// Must be greater than 1 less than total_max_node_count. The - /// total_*_node_count fields are mutually exclusive with the *_node_count - /// fields. + /// Must be greater than or equal to 0 and less than or equal to + /// total_max_node_count. The total_*_node_count fields are mutually exclusive + /// with the *_node_count fields. core.int? totalMinNodeCount; NodePoolAutoscaling({ @@ -13473,6 +13574,11 @@ class UpdateNodePoolRequest { /// Optional. core.String? machineType; + /// The maximum duration for the nodes to exist. + /// + /// If unspecified, the nodes can exist indefinitely. + core.String? maxRunDuration; + /// The name (project, location, cluster, node pool) of the node pool to /// update. /// @@ -13585,6 +13691,7 @@ class UpdateNodePoolRequest { this.locations, this.loggingConfig, this.machineType, + this.maxRunDuration, this.name, this.nodeNetworkConfig, this.nodePoolId, @@ -13653,6 +13760,7 @@ class UpdateNodePoolRequest { json_['loggingConfig'] as core.Map) : null, machineType: json_['machineType'] as core.String?, + maxRunDuration: json_['maxRunDuration'] as core.String?, name: json_['name'] as core.String?, nodeNetworkConfig: json_.containsKey('nodeNetworkConfig') ? NodeNetworkConfig.fromJson(json_['nodeNetworkConfig'] @@ -13717,6 +13825,7 @@ class UpdateNodePoolRequest { if (locations != null) 'locations': locations!, if (loggingConfig != null) 'loggingConfig': loggingConfig!, if (machineType != null) 'machineType': machineType!, + if (maxRunDuration != null) 'maxRunDuration': maxRunDuration!, if (name != null) 'name': name!, if (nodeNetworkConfig != null) 'nodeNetworkConfig': nodeNetworkConfig!, if (nodePoolId != null) 'nodePoolId': nodePoolId!, @@ -14125,8 +14234,7 @@ class WorkloadMetadataConfig { }; } -/// WorkloadPolicyConfig is the configuration of workload policy for autopilot -/// clusters. +/// WorkloadPolicyConfig is the configuration related to GCW workload policy class WorkloadPolicyConfig { /// If true, workloads can use NET_ADMIN capability. core.bool? allowNetAdmin; diff --git a/generated/googleapis/lib/content/v2_1.dart b/generated/googleapis/lib/content/v2_1.dart index 6d6ecf79d..f23d2731c 100644 --- a/generated/googleapis/lib/content/v2_1.dart +++ b/generated/googleapis/lib/content/v2_1.dart @@ -53,8 +53,6 @@ /// - [ReturnaddressResource] /// - [ReturnpolicyResource] /// - [ReturnpolicyonlineResource] -/// - [SettlementreportsResource] -/// - [SettlementtransactionsResource] /// - [ShippingsettingsResource] /// - [ShoppingadsprogramResource] library; @@ -121,10 +119,6 @@ class ShoppingContentApi { ReturnpolicyResource get returnpolicy => ReturnpolicyResource(_requester); ReturnpolicyonlineResource get returnpolicyonline => ReturnpolicyonlineResource(_requester); - SettlementreportsResource get settlementreports => - SettlementreportsResource(_requester); - SettlementtransactionsResource get settlementtransactions => - SettlementtransactionsResource(_requester); ShippingsettingsResource get shippingsettings => ShippingsettingsResource(_requester); ShoppingadsprogramResource get shoppingadsprogram => @@ -5963,170 +5957,6 @@ class ReturnpolicyonlineResource { } } -class SettlementreportsResource { - final commons.ApiRequester _requester; - - SettlementreportsResource(commons.ApiRequester client) : _requester = client; - - /// Retrieves a settlement report from your Merchant Center account. - /// - /// Request parameters: - /// - /// [merchantId] - The Merchant Center account of the settlement report. - /// - /// [settlementId] - The Google-provided ID of the settlement. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [SettlementReport]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future get( - core.String merchantId, - core.String settlementId, { - core.String? $fields, - }) async { - final queryParams_ = >{ - if ($fields != null) 'fields': [$fields], - }; - - final url_ = commons.escapeVariable('$merchantId') + - '/settlementreports/' + - commons.escapeVariable('$settlementId'); - - final response_ = await _requester.request( - url_, - 'GET', - queryParams: queryParams_, - ); - return SettlementReport.fromJson( - response_ as core.Map); - } - - /// Retrieves a list of settlement reports from your Merchant Center account. - /// - /// Request parameters: - /// - /// [merchantId] - The Merchant Center account to list settlements for. - /// - /// [maxResults] - The maximum number of settlements to return in the - /// response, used for paging. The default value is 200 returns per page, and - /// the maximum allowed value is 5000 returns per page. - /// - /// [pageToken] - The token returned by the previous request. - /// - /// [transferEndDate] - Obtains settlements which have transactions before - /// this date (inclusively), in ISO 8601 format. - /// - /// [transferStartDate] - Obtains settlements which have transactions after - /// this date (inclusively), in ISO 8601 format. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [SettlementreportsListResponse]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future list( - core.String merchantId, { - core.int? maxResults, - core.String? pageToken, - core.String? transferEndDate, - core.String? transferStartDate, - core.String? $fields, - }) async { - final queryParams_ = >{ - if (maxResults != null) 'maxResults': ['${maxResults}'], - if (pageToken != null) 'pageToken': [pageToken], - if (transferEndDate != null) 'transferEndDate': [transferEndDate], - if (transferStartDate != null) 'transferStartDate': [transferStartDate], - if ($fields != null) 'fields': [$fields], - }; - - final url_ = commons.escapeVariable('$merchantId') + '/settlementreports'; - - final response_ = await _requester.request( - url_, - 'GET', - queryParams: queryParams_, - ); - return SettlementreportsListResponse.fromJson( - response_ as core.Map); - } -} - -class SettlementtransactionsResource { - final commons.ApiRequester _requester; - - SettlementtransactionsResource(commons.ApiRequester client) - : _requester = client; - - /// Retrieves a list of transactions for the settlement. - /// - /// Request parameters: - /// - /// [merchantId] - The Merchant Center account to list transactions for. - /// - /// [settlementId] - The Google-provided ID of the settlement. - /// - /// [maxResults] - The maximum number of transactions to return in the - /// response, used for paging. The default value is 200 transactions per page, - /// and the maximum allowed value is 5000 transactions per page. - /// - /// [pageToken] - The token returned by the previous request. - /// - /// [transactionIds] - The list of transactions to return. If not set, all - /// transactions will be returned. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [SettlementtransactionsListResponse]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future list( - core.String merchantId, - core.String settlementId, { - core.int? maxResults, - core.String? pageToken, - core.List? transactionIds, - core.String? $fields, - }) async { - final queryParams_ = >{ - if (maxResults != null) 'maxResults': ['${maxResults}'], - if (pageToken != null) 'pageToken': [pageToken], - if (transactionIds != null) 'transactionIds': transactionIds, - if ($fields != null) 'fields': [$fields], - }; - - final url_ = commons.escapeVariable('$merchantId') + - '/settlementreports/' + - commons.escapeVariable('$settlementId') + - '/transactions'; - - final response_ = await _requester.request( - url_, - 'GET', - queryParams: queryParams_, - ); - return SettlementtransactionsListResponse.fromJson( - response_ as core.Map); - } -} - class ShippingsettingsResource { final commons.ApiRequester _requester; @@ -14654,6 +14484,15 @@ class LoyaltyProgram { /// Required. core.String? programLabel; + /// The shipping label for the loyalty program. + /// + /// You can use this label to indicate whether this offer has the loyalty + /// shipping benefit. If not specified, the item is not eligible for loyalty + /// shipping for the given loyalty tier. + /// + /// Optional. + core.String? shippingLabel; + /// The label of the tier within the loyalty program. /// /// Must match one of the labels within the program. @@ -14667,6 +14506,7 @@ class LoyaltyProgram { this.memberPriceEffectiveDate, this.price, this.programLabel, + this.shippingLabel, this.tierLabel, }); @@ -14684,6 +14524,7 @@ class LoyaltyProgram { json_['price'] as core.Map) : null, programLabel: json_['programLabel'] as core.String?, + shippingLabel: json_['shippingLabel'] as core.String?, tierLabel: json_['tierLabel'] as core.String?, ); @@ -14695,6 +14536,7 @@ class LoyaltyProgram { 'memberPriceEffectiveDate': memberPriceEffectiveDate!, if (price != null) 'price': price!, if (programLabel != null) 'programLabel': programLabel!, + if (shippingLabel != null) 'shippingLabel': shippingLabel!, if (tierLabel != null) 'tierLabel': tierLabel!, }; } @@ -22878,395 +22720,6 @@ class ServiceStoreConfigCutoffConfigLocalCutoffTime { }; } -/// Settlement reports detail order-level and item-level credits and debits -/// between you and Google. -class SettlementReport { - /// The end date on which all transactions are included in the report, in ISO - /// 8601 format. - core.String? endDate; - - /// Identifies what kind of resource this is. - /// - /// Value: the fixed string "`content#settlementReport`" - core.String? kind; - - /// The residual amount from the previous invoice. - /// - /// This is set only if the previous invoices are not paid because of negative - /// balance. - Price? previousBalance; - - /// The ID of the settlement report. - core.String? settlementId; - - /// The start date on which all transactions are included in the report, in - /// ISO 8601 format. - core.String? startDate; - - /// The money due to the merchant. - Price? transferAmount; - - /// Date on which transfer for this payment was initiated by Google, in ISO - /// 8601 format. - core.String? transferDate; - - /// The list of bank identifiers used for the transfer. - /// - /// For example, Trace ID for Federal Automated Clearing House (ACH). This may - /// also be known as the Wire ID. - core.List? transferIds; - - SettlementReport({ - this.endDate, - this.kind, - this.previousBalance, - this.settlementId, - this.startDate, - this.transferAmount, - this.transferDate, - this.transferIds, - }); - - SettlementReport.fromJson(core.Map json_) - : this( - endDate: json_['endDate'] as core.String?, - kind: json_['kind'] as core.String?, - previousBalance: json_.containsKey('previousBalance') - ? Price.fromJson(json_['previousBalance'] - as core.Map) - : null, - settlementId: json_['settlementId'] as core.String?, - startDate: json_['startDate'] as core.String?, - transferAmount: json_.containsKey('transferAmount') - ? Price.fromJson(json_['transferAmount'] - as core.Map) - : null, - transferDate: json_['transferDate'] as core.String?, - transferIds: (json_['transferIds'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - ); - - core.Map toJson() => { - if (endDate != null) 'endDate': endDate!, - if (kind != null) 'kind': kind!, - if (previousBalance != null) 'previousBalance': previousBalance!, - if (settlementId != null) 'settlementId': settlementId!, - if (startDate != null) 'startDate': startDate!, - if (transferAmount != null) 'transferAmount': transferAmount!, - if (transferDate != null) 'transferDate': transferDate!, - if (transferIds != null) 'transferIds': transferIds!, - }; -} - -/// Settlement transactions give a detailed breakdown of the settlement report. -class SettlementTransaction { - /// The amount for the transaction. - SettlementTransactionAmount? amount; - - /// Identifiers of the transaction. - SettlementTransactionIdentifiers? identifiers; - - /// Identifies what kind of resource this is. - /// - /// Value: the fixed string "`content#settlementTransaction`" - core.String? kind; - - /// Details of the transaction. - SettlementTransactionTransaction? transaction; - - SettlementTransaction({ - this.amount, - this.identifiers, - this.kind, - this.transaction, - }); - - SettlementTransaction.fromJson(core.Map json_) - : this( - amount: json_.containsKey('amount') - ? SettlementTransactionAmount.fromJson( - json_['amount'] as core.Map) - : null, - identifiers: json_.containsKey('identifiers') - ? SettlementTransactionIdentifiers.fromJson( - json_['identifiers'] as core.Map) - : null, - kind: json_['kind'] as core.String?, - transaction: json_.containsKey('transaction') - ? SettlementTransactionTransaction.fromJson( - json_['transaction'] as core.Map) - : null, - ); - - core.Map toJson() => { - if (amount != null) 'amount': amount!, - if (identifiers != null) 'identifiers': identifiers!, - if (kind != null) 'kind': kind!, - if (transaction != null) 'transaction': transaction!, - }; -} - -class SettlementTransactionAmount { - SettlementTransactionAmountCommission? commission; - - /// The description of the event. - /// - /// Acceptable values are: - "`taxWithhold`" - "`principal`" - - /// "`principalAdjustment`" - "`shippingFee`" - "`merchantRemittedSalesTax`" - - /// "`googleRemittedSalesTax`" - "`merchantCoupon`" - "`merchantCouponTax`" - - /// "`merchantRemittedDisposalTax`" - "`googleRemittedDisposalTax`" - - /// "`merchantRemittedRedemptionFee`" - "`googleRemittedRedemptionFee`" - - /// "`eeeEcoFee`" - "`furnitureEcoFee`" - "`copyPrivateFee`" - - /// "`eeeEcoFeeCommission`" - "`furnitureEcoFeeCommission`" - - /// "`copyPrivateFeeCommission`" - "`principalRefund`" - - /// "`principalRefundTax`" - "`itemCommission`" - "`adjustmentCommission`" - - /// "`shippingFeeCommission`" - "`commissionRefund`" - "`damaged`" - - /// "`damagedOrDefectiveItem`" - "`expiredItem`" - "`faultyItem`" - - /// "`incorrectItemReceived`" - "`itemMissing`" - "`qualityNotExpected`" - - /// "`receivedTooLate`" - "`storePackageMissing`" - "`transitPackageMissing`" - /// - "`unsuccessfulDeliveryUndeliverable`" - "`wrongChargeInStore`" - - /// "`wrongItem`" - "`returns`" - "`undeliverable`" - - /// "`issueRelatedRefundAndReplacementAmountDescription`" - - /// "`refundFromMerchant`" - "`returnLabelShippingFee`" - - /// "`lumpSumCorrection`" - "`pspFee`" - "`principalRefundDoesNotFit`" - - /// "`principalRefundOrderedWrongItem`" - - /// "`principalRefundQualityNotExpected`" - - /// "`principalRefundBetterPriceFound`" - "`principalRefundNoLongerNeeded`" - - /// "`principalRefundChangedMind`" - "`principalRefundReceivedTooLate`" - - /// "`principalRefundIncorrectItemReceived`" - - /// "`principalRefundDamagedOrDefectiveItem`" - - /// "`principalRefundDidNotMatchDescription`" - "`principalRefundExpiredItem`" - core.String? description; - - /// The amount that contributes to the line item price. - Price? transactionAmount; - - /// The type of the amount. - /// - /// Acceptable values are: - "`itemPrice`" - "`orderPrice`" - "`refund`" - - /// "`earlyRefund`" - "`courtesyRefund`" - "`returnRefund`" - - /// "`returnLabelShippingFeeAmount`" - "`lumpSumCorrectionAmount`" - core.String? type; - - SettlementTransactionAmount({ - this.commission, - this.description, - this.transactionAmount, - this.type, - }); - - SettlementTransactionAmount.fromJson(core.Map json_) - : this( - commission: json_.containsKey('commission') - ? SettlementTransactionAmountCommission.fromJson( - json_['commission'] as core.Map) - : null, - description: json_['description'] as core.String?, - transactionAmount: json_.containsKey('transactionAmount') - ? Price.fromJson(json_['transactionAmount'] - as core.Map) - : null, - type: json_['type'] as core.String?, - ); - - core.Map toJson() => { - if (commission != null) 'commission': commission!, - if (description != null) 'description': description!, - if (transactionAmount != null) 'transactionAmount': transactionAmount!, - if (type != null) 'type': type!, - }; -} - -class SettlementTransactionAmountCommission { - /// The category of the commission. - /// - /// Acceptable values are: - "`animalsAndPetSupplies`" - - /// "`dogCatFoodAndCatLitter`" - "`apparelAndAccessories`" - - /// "`shoesHandbagsAndSunglasses`" - "`costumesAndAccessories`" - "`jewelry`" - /// - "`watches`" - "`hobbiesArtsAndCrafts`" - "`homeAndGarden`" - - /// "`entertainmentCollectibles`" - "`collectibleCoins`" - - /// "`sportsCollectibles`" - "`sportingGoods`" - "`toysAndGames`" - - /// "`musicalInstruments`" - "`giftCards`" - "`babyAndToddler`" - - /// "`babyFoodWipesAndDiapers`" - "`businessAndIndustrial`" - - /// "`camerasOpticsAndPhotography`" - "`consumerElectronics`" - - /// "`electronicsAccessories`" - "`personalComputers`" - "`videoGameConsoles`" - /// - "`foodAndGrocery`" - "`beverages`" - "`tobaccoProducts`" - "`furniture`" - /// - "`hardware`" - "`buildingMaterials`" - "`tools`" - - /// "`healthAndPersonalCare`" - "`beauty`" - "`householdSupplies`" - - /// "`kitchenAndDining`" - "`majorAppliances`" - "`luggageAndBags`" - - /// "`media`" - "`officeSupplies`" - "`softwareAndVideoGames`" - - /// "`vehiclePartsAndAccessories`" - "`vehicleTiresAndWheels`" - "`vehicles`" - /// - "`everythingElse`" - core.String? category; - - /// Rate of the commission in percentage. - core.String? rate; - - SettlementTransactionAmountCommission({ - this.category, - this.rate, - }); - - SettlementTransactionAmountCommission.fromJson(core.Map json_) - : this( - category: json_['category'] as core.String?, - rate: json_['rate'] as core.String?, - ); - - core.Map toJson() => { - if (category != null) 'category': category!, - if (rate != null) 'rate': rate!, - }; -} - -class SettlementTransactionIdentifiers { - /// The identifier of the adjustments, if it's available. - core.String? adjustmentId; - - /// The merchant provided order ID. - core.String? merchantOrderId; - - /// The identifier of the item. - core.String? orderItemId; - - /// The unique ID of the settlement transaction entry. - core.String? settlementEntryId; - - /// The shipment ids for the item. - core.List? shipmentIds; - - /// The Google transaction ID. - core.String? transactionId; - - SettlementTransactionIdentifiers({ - this.adjustmentId, - this.merchantOrderId, - this.orderItemId, - this.settlementEntryId, - this.shipmentIds, - this.transactionId, - }); - - SettlementTransactionIdentifiers.fromJson(core.Map json_) - : this( - adjustmentId: json_['adjustmentId'] as core.String?, - merchantOrderId: json_['merchantOrderId'] as core.String?, - orderItemId: json_['orderItemId'] as core.String?, - settlementEntryId: json_['settlementEntryId'] as core.String?, - shipmentIds: (json_['shipmentIds'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - transactionId: json_['transactionId'] as core.String?, - ); - - core.Map toJson() => { - if (adjustmentId != null) 'adjustmentId': adjustmentId!, - if (merchantOrderId != null) 'merchantOrderId': merchantOrderId!, - if (orderItemId != null) 'orderItemId': orderItemId!, - if (settlementEntryId != null) 'settlementEntryId': settlementEntryId!, - if (shipmentIds != null) 'shipmentIds': shipmentIds!, - if (transactionId != null) 'transactionId': transactionId!, - }; -} - -class SettlementTransactionTransaction { - /// The time on which the event occurred in ISO 8601 format. - core.String? postDate; - - /// The type of the transaction that occurred. - /// - /// Acceptable values are: - "`order`" - "`reversal`" - "`orderRefund`" - - /// "`reversalRefund`" - "`issueRelatedRefundAndReplacement`" - - /// "`returnLabelShippingFeeTransaction`" - - /// "`reversalIssueRelatedRefundAndReplacement`" - - /// "`reversalReturnLabelShippingFeeTransaction`" - - /// "`lumpSumCorrectionTransaction`" - core.String? type; - - SettlementTransactionTransaction({ - this.postDate, - this.type, - }); - - SettlementTransactionTransaction.fromJson(core.Map json_) - : this( - postDate: json_['postDate'] as core.String?, - type: json_['type'] as core.String?, - ); - - core.Map toJson() => { - if (postDate != null) 'postDate': postDate!, - if (type != null) 'type': type!, - }; -} - -class SettlementreportsListResponse { - /// Identifies what kind of resource this is. - /// - /// Value: the fixed string "`content#settlementreportsListResponse`". - core.String? kind; - - /// The token for the retrieval of the next page of returns. - core.String? nextPageToken; - core.List? resources; - - SettlementreportsListResponse({ - this.kind, - this.nextPageToken, - this.resources, - }); - - SettlementreportsListResponse.fromJson(core.Map json_) - : this( - kind: json_['kind'] as core.String?, - nextPageToken: json_['nextPageToken'] as core.String?, - resources: (json_['resources'] as core.List?) - ?.map((value) => SettlementReport.fromJson( - value as core.Map)) - .toList(), - ); - - core.Map toJson() => { - if (kind != null) 'kind': kind!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - if (resources != null) 'resources': resources!, - }; -} - -class SettlementtransactionsListResponse { - /// Identifies what kind of resource this is. - /// - /// Value: the fixed string "`content#settlementtransactionsListResponse`". - core.String? kind; - - /// The token for the retrieval of the next page of returns. - core.String? nextPageToken; - core.List? resources; - - SettlementtransactionsListResponse({ - this.kind, - this.nextPageToken, - this.resources, - }); - - SettlementtransactionsListResponse.fromJson(core.Map json_) - : this( - kind: json_['kind'] as core.String?, - nextPageToken: json_['nextPageToken'] as core.String?, - resources: (json_['resources'] as core.List?) - ?.map((value) => SettlementTransaction.fromJson( - value as core.Map)) - .toList(), - ); - - core.Map toJson() => { - if (kind != null) 'kind': kind!, - if (nextPageToken != null) 'nextPageToken': nextPageToken!, - if (resources != null) 'resources': resources!, - }; -} - /// The merchant account's shipping settings. /// /// All methods except getsupportedcarriers and getsupportedholidays require the diff --git a/generated/googleapis/lib/contentwarehouse/v1.dart b/generated/googleapis/lib/contentwarehouse/v1.dart index 57273d9fc..f9f064fc8 100644 --- a/generated/googleapis/lib/contentwarehouse/v1.dart +++ b/generated/googleapis/lib/contentwarehouse/v1.dart @@ -8904,19 +8904,19 @@ typedef GoogleTypeInterval = $Interval; /// Represents an amount of money with its currency type. typedef GoogleTypeMoney = $Money; -/// Represents a postal address, e.g. for postal delivery or payments addresses. +/// Represents a postal address. /// -/// Given a postal address, a postal service can deliver items to a premise, -/// P.O. Box or similar. It is not intended to model geographical locations -/// (roads, towns, mountains). In typical usage an address would be created via -/// user input or from importing existing data, depending on the type of -/// process. Advice on address input / editing: - Use an -/// internationalization-ready address widget such as -/// https://github.com/google/libaddressinput) - Users should not be presented -/// with UI elements for input or editing of fields outside countries where that -/// field is used. For more guidance on how to use this schema, please see: +/// For example for postal delivery or payments addresses. Given a postal +/// address, a postal service can deliver items to a premise, P.O. Box or +/// similar. It is not intended to model geographical locations (roads, towns, +/// mountains). In typical usage an address would be created by user input or +/// from importing existing data, depending on the type of process. Advice on +/// address input / editing: - Use an internationalization-ready address widget +/// such as https://github.com/google/libaddressinput) - Users should not be +/// presented with UI elements for input or editing of fields outside countries +/// where that field is used. For more guidance on how to use this schema, see: /// https://support.google.com/business/answer/6397478 -typedef GoogleTypePostalAddress = $PostalAddress; +typedef GoogleTypePostalAddress = $PostalAddress00; /// Represents a time zone from the /// [IANA Time Zone Database](https://www.iana.org/time-zones). diff --git a/generated/googleapis/lib/css/v1.dart b/generated/googleapis/lib/css/v1.dart index 858bb3ba2..a79bc7d02 100644 --- a/generated/googleapis/lib/css/v1.dart +++ b/generated/googleapis/lib/css/v1.dart @@ -128,8 +128,8 @@ class AccountsResource { /// /// [pageSize] - Optional. The maximum number of accounts to return. The /// service may return fewer than this value. If unspecified, at most 50 - /// accounts will be returned. The maximum value is 1000; values above 1000 - /// will be coerced to 1000. + /// accounts will be returned. The maximum value is 100; values above 100 will + /// be coerced to 100. /// /// [pageToken] - Optional. A page token, received from a previous /// `ListChildAccounts` call. Provide this to retrieve the subsequent page. @@ -282,7 +282,7 @@ class AccountsCssProductInputsResource { /// Format: accounts/{account} /// Value must have pattern `^accounts/\[^/\]+$`. /// - /// [feedId] - Required. The primary or supplemental feed id. If CSS Product + /// [feedId] - Optional. The primary or supplemental feed id. If CSS Product /// already exists and feed id provided is different, then the CSS Product /// will be moved to a new feed. Note: For now, CSSs do not need to provide /// feed ids as we create feeds on the fly. We do not have supplemental feed @@ -508,7 +508,7 @@ class AccountsLabelsResource { return Empty.fromJson(response_ as core.Map); } - /// Lists the labels assigned to an account. + /// Lists the labels owned by an account. /// /// Request parameters: /// @@ -841,17 +841,17 @@ class Attributes { /// Mobile Link to the headline offer. core.String? headlineOfferMobileLink; - /// Headline Price of the aggregate offer. + /// Headline Price of the CSS Product. Price? headlineOfferPrice; - /// Headline Price of the aggregate offer. + /// Headline Price of the CSS Product. Price? headlineOfferShippingPrice; /// Number of periods (months or years) and amount of payment per period for /// an item with an associated subscription contract. HeadlineOfferSubscriptionCost? headlineOfferSubscriptionCost; - /// High Price of the aggregate offer. + /// High Price of the CSS Product. Price? highPrice; /// URL of an image of the item. @@ -873,7 +873,7 @@ class Attributes { /// Shared identifier for all variants of the same product. core.String? itemGroupId; - /// Low Price of the aggregate offer. + /// Low Price of the CSS Product. Price? lowPrice; /// The material of which the item is made. @@ -887,7 +887,7 @@ class Attributes { /// The number of identical products in a merchant-defined multipack. core.String? multipack; - /// The number of aggregate offers. + /// The number of CSS Products. core.String? numberOfOffers; /// The item's pattern (e.g. polka dots). @@ -1220,7 +1220,7 @@ class Certification { }; } -/// The processed CSS Product(a.k.a Aggregate Offer internally). +/// The processed CSS Product. class CssProduct { /// A list of product attributes. /// @@ -1355,6 +1355,9 @@ class CssProductInput { /// will not be prevented and the last update time will default to when this /// request was received by the CSS API. If the operation is prevented, the /// aborted exception will be thrown. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) core.String? freshnessTime; /// The name of the CSS Product input. @@ -1510,18 +1513,18 @@ class CustomAttribute { /// The destination status of the product status. class DestinationStatus { - /// List of country codes (ISO 3166-1 alpha-2) where the aggregate offer is + /// List of country codes (ISO 3166-1 alpha-2) where the CSS Product is /// approved. core.List? approvedCountries; /// The name of the destination core.String? destination; - /// List of country codes (ISO 3166-1 alpha-2) where the aggregate offer is + /// List of country codes (ISO 3166-1 alpha-2) where the CSS Product is /// disapproved. core.List? disapprovedCountries; - /// List of country codes (ISO 3166-1 alpha-2) where the aggregate offer is + /// List of country codes (ISO 3166-1 alpha-2) where the CSS Product is /// pending approval. core.List? pendingCountries; @@ -1643,8 +1646,8 @@ class HeadlineOfferSubscriptionCost { /// The ItemLevelIssue of the product status. class ItemLevelIssue { - /// List of country codes (ISO 3166-1 alpha-2) where issue applies to the - /// aggregate offer. + /// List of country codes (ISO 3166-1 alpha-2) where issue applies to the CSS + /// Product. core.List? applicableCountries; /// The attribute's name, if the issue is caused by a single attribute. @@ -1668,7 +1671,7 @@ class ItemLevelIssue { /// Whether the issue can be resolved by the merchant. core.String? resolution; - /// How this issue affects serving of the aggregate offer. + /// How this issue affects serving of the CSS Product. core.String? servability; ItemLevelIssue({ diff --git a/generated/googleapis/lib/datacatalog/v1.dart b/generated/googleapis/lib/datacatalog/v1.dart index 88433c748..aade0ae86 100644 --- a/generated/googleapis/lib/datacatalog/v1.dart +++ b/generated/googleapis/lib/datacatalog/v1.dart @@ -23,6 +23,8 @@ /// /// - [CatalogResource] /// - [EntriesResource] +/// - [OrganizationsResource] +/// - [OrganizationsLocationsResource] /// - [ProjectsResource] /// - [ProjectsLocationsResource] /// - [ProjectsLocationsEntryGroupsResource] @@ -62,6 +64,7 @@ class DataCatalogApi { CatalogResource get catalog => CatalogResource(_requester); EntriesResource get entries => EntriesResource(_requester); + OrganizationsResource get organizations => OrganizationsResource(_requester); ProjectsResource get projects => ProjectsResource(_requester); DataCatalogApi(http.Client client, @@ -211,6 +214,145 @@ class EntriesResource { } } +class OrganizationsResource { + final commons.ApiRequester _requester; + + OrganizationsLocationsResource get locations => + OrganizationsLocationsResource(_requester); + + OrganizationsResource(commons.ApiRequester client) : _requester = client; +} + +class OrganizationsLocationsResource { + final commons.ApiRequester _requester; + + OrganizationsLocationsResource(commons.ApiRequester client) + : _requester = client; + + /// Retrieves the configuration related to the migration from Data Catalog to + /// Dataplex for a specific organization, including all the projects under it + /// which have a separate configuration set. + /// + /// Request parameters: + /// + /// [name] - Required. The organization whose config is being retrieved. + /// Value must have pattern `^organizations/\[^/\]+/locations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudDatacatalogV1OrganizationConfig]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future retrieveConfig( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':retrieveConfig'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleCloudDatacatalogV1OrganizationConfig.fromJson( + response_ as core.Map); + } + + /// Retrieves the effective configuration related to the migration from Data + /// Catalog to Dataplex for a specific organization or project. + /// + /// If there is no specific configuration set for the resource, the setting is + /// checked hierarchicahlly through the ancestors of the resource, starting + /// from the resource itself. + /// + /// Request parameters: + /// + /// [name] - Required. The resource whose effective config is being retrieved. + /// Value must have pattern `^organizations/\[^/\]+/locations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudDatacatalogV1MigrationConfig]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future retrieveEffectiveConfig( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = + 'v1/' + core.Uri.encodeFull('$name') + ':retrieveEffectiveConfig'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleCloudDatacatalogV1MigrationConfig.fromJson( + response_ as core.Map); + } + + /// Sets the configuration related to the migration to Dataplex for an + /// organization or project. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. The organization or project whose config is being + /// specified. + /// Value must have pattern `^organizations/\[^/\]+/locations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudDatacatalogV1MigrationConfig]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future setConfig( + GoogleCloudDatacatalogV1SetConfigRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':setConfig'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleCloudDatacatalogV1MigrationConfig.fromJson( + response_ as core.Map); + } +} + class ProjectsResource { final commons.ApiRequester _requester; @@ -233,6 +375,91 @@ class ProjectsLocationsResource { ProjectsLocationsTaxonomiesResource(_requester); ProjectsLocationsResource(commons.ApiRequester client) : _requester = client; + + /// Retrieves the effective configuration related to the migration from Data + /// Catalog to Dataplex for a specific organization or project. + /// + /// If there is no specific configuration set for the resource, the setting is + /// checked hierarchicahlly through the ancestors of the resource, starting + /// from the resource itself. + /// + /// Request parameters: + /// + /// [name] - Required. The resource whose effective config is being retrieved. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudDatacatalogV1MigrationConfig]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future retrieveEffectiveConfig( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = + 'v1/' + core.Uri.encodeFull('$name') + ':retrieveEffectiveConfig'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleCloudDatacatalogV1MigrationConfig.fromJson( + response_ as core.Map); + } + + /// Sets the configuration related to the migration to Dataplex for an + /// organization or project. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. The organization or project whose config is being + /// specified. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudDatacatalogV1MigrationConfig]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future setConfig( + GoogleCloudDatacatalogV1SetConfigRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':setConfig'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleCloudDatacatalogV1MigrationConfig.fromJson( + response_ as core.Map); + } } class ProjectsLocationsEntryGroupsResource { @@ -1723,8 +1950,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -4966,11 +5193,22 @@ class GoogleCloudDatacatalogV1EntryGroup { /// specified in its name. core.String? name; + /// When set to \[true\], it means DataCatalog EntryGroup was transferred to + /// Dataplex Catalog Service. + /// + /// It makes EntryGroup and its Entries to be read-only in DataCatalog. + /// However, new Tags on EntryGroup and its Entries can be created. After + /// setting the flag to \[true\] it cannot be unset. + /// + /// Optional. + core.bool? transferredToDataplex; + GoogleCloudDatacatalogV1EntryGroup({ this.dataCatalogTimestamps, this.description, this.displayName, this.name, + this.transferredToDataplex, }); GoogleCloudDatacatalogV1EntryGroup.fromJson(core.Map json_) @@ -4983,6 +5221,7 @@ class GoogleCloudDatacatalogV1EntryGroup { description: json_['description'] as core.String?, displayName: json_['displayName'] as core.String?, name: json_['name'] as core.String?, + transferredToDataplex: json_['transferredToDataplex'] as core.bool?, ); core.Map toJson() => { @@ -4991,6 +5230,8 @@ class GoogleCloudDatacatalogV1EntryGroup { if (description != null) 'description': description!, if (displayName != null) 'displayName': displayName!, if (name != null) 'name': name!, + if (transferredToDataplex != null) + 'transferredToDataplex': transferredToDataplex!, }; } @@ -5602,6 +5843,12 @@ class GoogleCloudDatacatalogV1LookerSystemSpec { }; } +/// The configuration related to the migration to Dataplex applied to an +/// organization or project. +/// +/// It is the response message for SetConfig and RetrieveEffectiveConfig. +typedef GoogleCloudDatacatalogV1MigrationConfig = $Shared11; + /// Specification that applies to a model. /// /// Valid only for entries with the `MODEL` type. @@ -5675,6 +5922,38 @@ class GoogleCloudDatacatalogV1ModifyEntryOverviewRequest { }; } +/// The configuration related to the migration from Data Catalog to Dataplex +/// that has been applied to an organization and any projects under it. +/// +/// It is the response message for RetrieveConfig. +class GoogleCloudDatacatalogV1OrganizationConfig { + /// Map of organizations and project resource names and their configuration. + /// + /// The format for the map keys is `organizations/{organizationId}` or + /// `projects/{projectId}`. + core.Map? config; + + GoogleCloudDatacatalogV1OrganizationConfig({ + this.config, + }); + + GoogleCloudDatacatalogV1OrganizationConfig.fromJson(core.Map json_) + : this( + config: + (json_['config'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + GoogleCloudDatacatalogV1MigrationConfig.fromJson( + value as core.Map), + ), + ), + ); + + core.Map toJson() => { + if (config != null) 'config': config!, + }; +} + /// Entry metadata relevant only to the user and private to them. class GoogleCloudDatacatalogV1PersonalDetails { /// Set if the entry is starred; unset otherwise. @@ -6644,6 +6923,9 @@ class GoogleCloudDatacatalogV1ServiceSpec { }; } +/// Request message for SetConfig. +typedef GoogleCloudDatacatalogV1SetConfigRequest = $Shared11; + /// Specification that applies to entries that are part `SQL_DATABASE` system /// (user_specified_type) class GoogleCloudDatacatalogV1SqlDatabaseSystemSpec { @@ -6810,6 +7092,21 @@ class GoogleCloudDatacatalogV1Tag { /// dot (`.`). Example: `column.nested_column`. core.String? column; + /// Denotes the transfer status of the Tag Template. + /// + /// Output only. + /// Possible string values are: + /// - "DATAPLEX_TRANSFER_STATUS_UNSPECIFIED" : Default value. TagTemplate and + /// its tags are only visible and editable in DataCatalog. + /// - "MIGRATED" : TagTemplate and its tags are auto-copied to Dataplex + /// service. Visible in both services. Editable in DataCatalog, read-only in + /// Dataplex. Deprecated: Individual TagTemplate migration is deprecated in + /// favor of organization or project wide TagTemplate migration opt-in. + /// - "TRANSFERRED" : TagTemplate and its tags are auto-copied to Dataplex + /// service. Visible in both services. Editable in Dataplex, read-only in + /// DataCatalog. + core.String? dataplexTransferStatus; + /// Maps the ID of a tag field to its value and additional information about /// that field. /// @@ -6842,6 +7139,7 @@ class GoogleCloudDatacatalogV1Tag { GoogleCloudDatacatalogV1Tag({ this.column, + this.dataplexTransferStatus, this.fields, this.name, this.template, @@ -6851,6 +7149,8 @@ class GoogleCloudDatacatalogV1Tag { GoogleCloudDatacatalogV1Tag.fromJson(core.Map json_) : this( column: json_['column'] as core.String?, + dataplexTransferStatus: + json_['dataplexTransferStatus'] as core.String?, fields: (json_['fields'] as core.Map?)?.map( (key, value) => core.MapEntry( @@ -6866,6 +7166,8 @@ class GoogleCloudDatacatalogV1Tag { core.Map toJson() => { if (column != null) 'column': column!, + if (dataplexTransferStatus != null) + 'dataplexTransferStatus': dataplexTransferStatus!, if (fields != null) 'fields': fields!, if (name != null) 'name': name!, if (template != null) 'template': template!, @@ -6993,6 +7295,9 @@ class GoogleCloudDatacatalogV1TagTemplate { /// service. Visible in both services. Editable in DataCatalog, read-only in /// Dataplex. Deprecated: Individual TagTemplate migration is deprecated in /// favor of organization or project wide TagTemplate migration opt-in. + /// - "TRANSFERRED" : TagTemplate and its tags are auto-copied to Dataplex + /// service. Visible in both services. Editable in Dataplex, read-only in + /// DataCatalog. core.String? dataplexTransferStatus; /// Display name for this template. diff --git a/generated/googleapis/lib/datafusion/v1.dart b/generated/googleapis/lib/datafusion/v1.dart index f05ba5f62..90f30bcc3 100644 --- a/generated/googleapis/lib/datafusion/v1.dart +++ b/generated/googleapis/lib/datafusion/v1.dart @@ -778,8 +778,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -1228,6 +1228,8 @@ class CryptoKeyConfig { /// /// For key in Cloud KMS, the key should be in the format of `projects / * /// /locations / * /keyRings / * /cryptoKeys / * `. + /// + /// Optional. core.String? keyReference; CryptoKeyConfig({ @@ -1259,12 +1261,10 @@ class DnsPeering { /// Required. core.String? domain; - /// The resource name of the dns peering zone. + /// Identifier. /// - /// Format: + /// The resource name of the dns peering zone. Format: /// projects/{project}/locations/{location}/instances/{instance}/dnsPeerings/{dns_peering} - /// - /// Required. core.String? name; /// Optional target network to which dns peering should happen. @@ -1388,6 +1388,8 @@ class Instance { /// The crypto key configuration. /// /// This field is used by the Customer-Managed Encryption Keys (CMEK) feature. + /// + /// Optional. CryptoKeyConfig? cryptoKeyConfig; /// Option to enable the Dataplex Lineage Integration feature. @@ -1400,9 +1402,13 @@ class Instance { /// /// This allows users to have fine-grained access control on Dataproc's /// accesses to cloud resources. + /// + /// Optional. core.String? dataprocServiceAccount; /// A description of this instance. + /// + /// Optional. core.String? description; /// If the instance state is DISABLED, the reason for disabling the instance. @@ -1411,21 +1417,33 @@ class Instance { core.List? disabledReason; /// Display name for an instance. + /// + /// Optional. core.String? displayName; /// Option to enable granular role-based access control. + /// + /// Optional. core.bool? enableRbac; /// Option to enable Stackdriver Logging. + /// + /// Optional. core.bool? enableStackdriverLogging; /// Option to enable Stackdriver Monitoring. + /// + /// Optional. core.bool? enableStackdriverMonitoring; /// Option to enable granular zone separation. + /// + /// Output only. core.bool? enableZoneSeparation; /// Option to enable and pass metadata for event publishing. + /// + /// Optional. EventPublishConfig? eventPublishConfig; /// Cloud Storage bucket generated by Data Fusion in the customer project. @@ -1453,6 +1471,8 @@ class Instance { /// Network configuration options. /// /// These are required when a private Data Fusion instance is to be created. + /// + /// Optional. NetworkConfig? networkConfig; /// Map of additional options used to configure the behavior of Data Fusion @@ -1473,6 +1493,8 @@ class Instance { /// /// If set to true, all Data Fusion nodes will have private IP addresses and /// will not be able to access the public internet. + /// + /// Optional. core.bool? privateInstance; /// Reserved for future use. @@ -1548,6 +1570,8 @@ class Instance { /// Current version of the Data Fusion. /// /// Only specifiable in Update. + /// + /// Optional. core.String? version; /// Endpoint on which the Data Fusion UI is accessible to third-party users @@ -1558,6 +1582,8 @@ class Instance { /// Name of the zone in which the Data Fusion instance will be created. /// /// Only DEVELOPER instances use this field. + /// + /// Optional. core.String? zone; Instance({ @@ -1724,15 +1750,24 @@ class Instance { /// Response message for the list available versions request. class ListAvailableVersionsResponse { /// Represents a list of versions that are supported. + /// + /// Deprecated: Use versions field instead. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) core.List? availableVersions; /// Token to retrieve the next page of results or empty if there are no more /// results in the list. core.String? nextPageToken; + /// Represents a list of all versions. + core.List? versions; + ListAvailableVersionsResponse({ this.availableVersions, this.nextPageToken, + this.versions, }); ListAvailableVersionsResponse.fromJson(core.Map json_) @@ -1742,11 +1777,16 @@ class ListAvailableVersionsResponse { value as core.Map)) .toList(), nextPageToken: json_['nextPageToken'] as core.String?, + versions: (json_['versions'] as core.List?) + ?.map((value) => Version.fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { if (availableVersions != null) 'availableVersions': availableVersions!, if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (versions != null) 'versions': versions!, }; } @@ -2421,6 +2461,7 @@ class Version { /// - "TYPE_UNSPECIFIED" : Version does not have availability yet /// - "TYPE_PREVIEW" : Version is under development and not considered stable /// - "TYPE_GENERAL_AVAILABILITY" : Version is available for public use + /// - "TYPE_DEPRECATED" : Version is no longer supported. core.String? type; /// The version number of the Data Fusion instance, such as '6.0.1.0'. diff --git a/generated/googleapis/lib/datalineage/v1.dart b/generated/googleapis/lib/datalineage/v1.dart index 72610069d..359bb2329 100644 --- a/generated/googleapis/lib/datalineage/v1.dart +++ b/generated/googleapis/lib/datalineage/v1.dart @@ -242,8 +242,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -1470,8 +1470,9 @@ class GoogleCloudDatacatalogLineageV1Origin { /// Type of the source. /// /// Use of a source_type other than `CUSTOM` for process creation or updating - /// is highly discouraged. It may cause additional billing costs and be - /// restricted in the future without notice. + /// is highly discouraged. It might be restricted in the future without + /// notice. There will be increase in cost if you use any of the source types + /// other than `CUSTOM`. /// Possible string values are: /// - "SOURCE_TYPE_UNSPECIFIED" : Source is Unspecified /// - "CUSTOM" : A custom source diff --git a/generated/googleapis/lib/datamigration/v1.dart b/generated/googleapis/lib/datamigration/v1.dart index 8aefb6f45..89eb9ddcd 100644 --- a/generated/googleapis/lib/datamigration/v1.dart +++ b/generated/googleapis/lib/datamigration/v1.dart @@ -1930,6 +1930,47 @@ class ProjectsLocationsMigrationJobsResource { return Operation.fromJson(response_ as core.Map); } + /// Retrieves objects from the source database that can be selected for data + /// migration. + /// + /// This is applicable for the following migrations: 1. PostgreSQL to Cloud + /// SQL for PostgreSQL 2. PostgreSQL to AlloyDB for PostgreSQL. + /// + /// Request parameters: + /// + /// [name] - Required. The resource name for the migration job for which + /// source objects should be returned. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/migrationJobs/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future fetchSourceObjects( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':fetchSourceObjects'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + /// Generate a SSH configuration script to configure the reverse SSH /// connectivity. /// @@ -2594,6 +2635,43 @@ class ProjectsLocationsMigrationJobsObjectsResource { ProjectsLocationsMigrationJobsObjectsResource(commons.ApiRequester client) : _requester = client; + /// Use this method to get details about a migration job object. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the migration job object resource to get. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/migrationJobs/\[^/\]+/objects/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [MigrationJobObject]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return MigrationJobObject.fromJson( + response_ as core.Map); + } + /// Gets the access control policy for a resource. /// /// Returns an empty policy if the resource exists and does not have a policy @@ -2651,6 +2729,101 @@ class ProjectsLocationsMigrationJobsObjectsResource { return Policy.fromJson(response_ as core.Map); } + /// Use this method to list the objects of a specific migration job. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent migration job that owns the collection of + /// objects. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/migrationJobs/\[^/\]+$`. + /// + /// [pageSize] - Maximum number of objects to return. Default is 50. The + /// maximum value is 1000; values above 1000 will be coerced to 1000. + /// + /// [pageToken] - Page token received from a previous + /// `ListMigrationJObObjectsRequest` call. Provide this to retrieve the + /// subsequent page. When paginating, all other parameters provided to + /// `ListMigrationJobObjectsRequest` must match the call that provided the + /// page token. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListMigrationJobObjectsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/objects'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListMigrationJobObjectsResponse.fromJson( + response_ as core.Map); + } + + /// Use this method to look up a migration job object by its source object + /// identifier. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent migration job that owns the collection of + /// objects. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/migrationJobs/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [MigrationJobObject]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future lookup( + LookupMigrationJobObjectRequest request, + core.String parent, { + core.String? $fields, + }) async { + final body_ = convert_1.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/objects:lookup'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return MigrationJobObject.fromJson( + response_ as core.Map); + } + /// Sets the access control policy on the specified resource. /// /// Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, @@ -2764,8 +2937,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -6283,6 +6456,35 @@ class ListMappingRulesResponse { }; } +/// Response containing the objects for a migration job. +class ListMigrationJobObjectsResponse { + /// List of migration job objects. + core.List? migrationJobObjects; + + /// A token, which can be sent as `page_token` to retrieve the next page. + core.String? nextPageToken; + + ListMigrationJobObjectsResponse({ + this.migrationJobObjects, + this.nextPageToken, + }); + + ListMigrationJobObjectsResponse.fromJson(core.Map json_) + : this( + migrationJobObjects: (json_['migrationJobObjects'] as core.List?) + ?.map((value) => MigrationJobObject.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + ); + + core.Map toJson() => { + if (migrationJobObjects != null) + 'migrationJobObjects': migrationJobObjects!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + }; +} + /// Response message for 'ListMigrationJobs' request. class ListMigrationJobsResponse { /// The list of migration jobs objects. @@ -6392,39 +6594,37 @@ class ListPrivateConnectionsResponse { typedef Location = $Location00; /// Configuration to specify the Oracle directories to access the log files. -class LogFileDirectories { - /// Oracle directory for archived logs. - /// - /// Required. - core.String? archivedLogDirectory; +typedef LogFileDirectories = $LogFileDirectories; + +/// Configuration to use LogMiner CDC method. +typedef LogMiner = $Empty; - /// Oracle directory for online logs. +/// Request for looking up a specific migration job object by its source object +/// identifier. +class LookupMigrationJobObjectRequest { + /// The source object identifier which maps to the migration job object. /// /// Required. - core.String? onlineLogDirectory; + SourceObjectIdentifier? sourceObjectIdentifier; - LogFileDirectories({ - this.archivedLogDirectory, - this.onlineLogDirectory, + LookupMigrationJobObjectRequest({ + this.sourceObjectIdentifier, }); - LogFileDirectories.fromJson(core.Map json_) + LookupMigrationJobObjectRequest.fromJson(core.Map json_) : this( - archivedLogDirectory: json_['archivedLogDirectory'] as core.String?, - onlineLogDirectory: json_['onlineLogDirectory'] as core.String?, + sourceObjectIdentifier: json_.containsKey('sourceObjectIdentifier') + ? SourceObjectIdentifier.fromJson(json_['sourceObjectIdentifier'] + as core.Map) + : null, ); core.Map toJson() => { - if (archivedLogDirectory != null) - 'archivedLogDirectory': archivedLogDirectory!, - if (onlineLogDirectory != null) - 'onlineLogDirectory': onlineLogDirectory!, + if (sourceObjectIdentifier != null) + 'sourceObjectIdentifier': sourceObjectIdentifier!, }; } -/// Configuration to use LogMiner CDC method. -typedef LogMiner = $Empty; - /// MachineConfig describes the configuration of a machine. typedef MachineConfig = $MachineConfig; @@ -6756,11 +6956,15 @@ class MaterializedViewEntity { /// `String`, `bool` and `null` as well as `Map` and `List` values. core.Map? customFeatures; + /// View indices. + core.List? indices; + /// The SQL code which creates the view. core.String? sqlCode; MaterializedViewEntity({ this.customFeatures, + this.indices, this.sqlCode, }); @@ -6769,11 +6973,16 @@ class MaterializedViewEntity { customFeatures: json_.containsKey('customFeatures') ? json_['customFeatures'] as core.Map : null, + indices: (json_['indices'] as core.List?) + ?.map((value) => IndexEntity.fromJson( + value as core.Map)) + .toList(), sqlCode: json_['sqlCode'] as core.String?, ); core.Map toJson() => { if (customFeatures != null) 'customFeatures': customFeatures!, + if (indices != null) 'indices': indices!, if (sqlCode != null) 'sqlCode': sqlCode!, }; } @@ -6874,6 +7083,11 @@ class MigrationJob { /// projects/{project}/locations/{location}/migrationJobs/{migrationJob}. core.String? name; + /// The objects that need to be migrated. + /// + /// Optional. + MigrationJobObjectsConfig? objectsConfig; + /// Configuration for heterogeneous **Oracle to Cloud SQL for PostgreSQL** and /// **Oracle to AlloyDB for PostgreSQL** migrations. OracleToPostgresConfig? oracleToPostgresConfig; @@ -6976,6 +7190,7 @@ class MigrationJob { this.filter, this.labels, this.name, + this.objectsConfig, this.oracleToPostgresConfig, this.performanceConfig, this.phase, @@ -7025,6 +7240,10 @@ class MigrationJob { ), ), name: json_['name'] as core.String?, + objectsConfig: json_.containsKey('objectsConfig') + ? MigrationJobObjectsConfig.fromJson( + json_['objectsConfig'] as core.Map) + : null, oracleToPostgresConfig: json_.containsKey('oracleToPostgresConfig') ? OracleToPostgresConfig.fromJson(json_['oracleToPostgresConfig'] as core.Map) @@ -7080,6 +7299,7 @@ class MigrationJob { if (filter != null) 'filter': filter!, if (labels != null) 'labels': labels!, if (name != null) 'name': name!, + if (objectsConfig != null) 'objectsConfig': objectsConfig!, if (oracleToPostgresConfig != null) 'oracleToPostgresConfig': oracleToPostgresConfig!, if (performanceConfig != null) 'performanceConfig': performanceConfig!, @@ -7101,6 +7321,119 @@ class MigrationJob { }; } +/// A specific Migration Job Object (e.g. a specifc DB Table) +class MigrationJobObject { + /// The creation time of the migration job object. + /// + /// Output only. + core.String? createTime; + + /// The error details in case of failure. + /// + /// Output only. + Status? error; + + /// The object's name. + core.String? name; + + /// The phase of the migration job object. + /// + /// Output only. + /// Possible string values are: + /// - "PHASE_UNSPECIFIED" : The phase of the migration job is unknown. + /// - "FULL_DUMP" : The migration job object is in the full dump phase. + /// - "CDC" : The migration job object is in CDC phase. + /// - "READY_FOR_PROMOTE" : The migration job object is ready to be promoted. + /// - "PROMOTE_IN_PROGRESS" : The migration job object is in running the + /// promote phase. + /// - "PROMOTED" : The migration job is promoted. + /// - "DIFF_BACKUP" : The migration job object is in the differential backup + /// phase. + core.String? phase; + + /// The object identifier in the data source. + SourceObjectIdentifier? sourceObject; + + /// The state of the migration job object. + /// Possible string values are: + /// - "STATE_UNSPECIFIED" : The state of the migration job object is unknown. + /// - "NOT_STARTED" : The migration job object is not started. + /// - "RUNNING" : The migration job object is running. + /// - "STOPPING" : The migration job object is being stopped. + /// - "STOPPED" : The migration job object is currently stopped. + /// - "RESTARTING" : The migration job object is restarting. + /// - "FAILED" : The migration job object failed. + /// - "REMOVING" : The migration job object is deleting. + /// - "NOT_SELECTED" : The migration job object is not selected for migration. + /// - "COMPLETED" : The migration job object is completed. + core.String? state; + + /// The last update time of the migration job object. + /// + /// Output only. + core.String? updateTime; + + MigrationJobObject({ + this.createTime, + this.error, + this.name, + this.phase, + this.sourceObject, + this.state, + this.updateTime, + }); + + MigrationJobObject.fromJson(core.Map json_) + : this( + createTime: json_['createTime'] as core.String?, + error: json_.containsKey('error') + ? Status.fromJson( + json_['error'] as core.Map) + : null, + name: json_['name'] as core.String?, + phase: json_['phase'] as core.String?, + sourceObject: json_.containsKey('sourceObject') + ? SourceObjectIdentifier.fromJson( + json_['sourceObject'] as core.Map) + : null, + state: json_['state'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + ); + + core.Map toJson() => { + if (createTime != null) 'createTime': createTime!, + if (error != null) 'error': error!, + if (name != null) 'name': name!, + if (phase != null) 'phase': phase!, + if (sourceObject != null) 'sourceObject': sourceObject!, + if (state != null) 'state': state!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} + +/// Configuration for the objects to be migrated. +class MigrationJobObjectsConfig { + /// The list of the migration job objects. + SourceObjectsConfig? sourceObjectsConfig; + + MigrationJobObjectsConfig({ + this.sourceObjectsConfig, + }); + + MigrationJobObjectsConfig.fromJson(core.Map json_) + : this( + sourceObjectsConfig: json_.containsKey('sourceObjectsConfig') + ? SourceObjectsConfig.fromJson(json_['sourceObjectsConfig'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (sourceObjectsConfig != null) + 'sourceObjectsConfig': sourceObjectsConfig!, + }; +} + /// Options to configure rule type MultiColumnDatatypeChange. /// /// The rule is used to change the data type and associated properties of @@ -7961,6 +8294,11 @@ class PostgreSqlConnectionProfile { /// SQL instance ID of the source. core.String? cloudSqlId; + /// The name of the specific database within the host. + /// + /// Optional. + core.String? database; + /// The IP or hostname of the source PostgreSQL database. /// /// Required. @@ -8017,6 +8355,7 @@ class PostgreSqlConnectionProfile { PostgreSqlConnectionProfile({ this.alloydbClusterId, this.cloudSqlId, + this.database, this.host, this.networkArchitecture, this.password, @@ -8032,6 +8371,7 @@ class PostgreSqlConnectionProfile { : this( alloydbClusterId: json_['alloydbClusterId'] as core.String?, cloudSqlId: json_['cloudSqlId'] as core.String?, + database: json_['database'] as core.String?, host: json_['host'] as core.String?, networkArchitecture: json_['networkArchitecture'] as core.String?, password: json_['password'] as core.String?, @@ -8057,6 +8397,7 @@ class PostgreSqlConnectionProfile { core.Map toJson() => { if (alloydbClusterId != null) 'alloydbClusterId': alloydbClusterId!, if (cloudSqlId != null) 'cloudSqlId': cloudSqlId!, + if (database != null) 'database': database!, if (host != null) 'host': host!, if (networkArchitecture != null) 'networkArchitecture': networkArchitecture!, @@ -8344,10 +8685,36 @@ class PrivateServiceConnectConnectivity { } /// Request message for 'PromoteMigrationJob' request. -typedef PromoteMigrationJobRequest = $Empty; +class PromoteMigrationJobRequest { + /// The object filter to apply to the migration job. + /// + /// Optional. + MigrationJobObjectsConfig? objectsFilter; + + PromoteMigrationJobRequest({ + this.objectsFilter, + }); + + PromoteMigrationJobRequest.fromJson(core.Map json_) + : this( + objectsFilter: json_.containsKey('objectsFilter') + ? MigrationJobObjectsConfig.fromJson( + json_['objectsFilter'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (objectsFilter != null) 'objectsFilter': objectsFilter!, + }; +} /// Request message for 'RestartMigrationJob' request. class RestartMigrationJobRequest { + /// The object filter to apply to the migration job. + /// + /// Optional. + MigrationJobObjectsConfig? objectsFilter; + /// Restart the migration job without running prior configuration /// verification. /// @@ -8357,15 +8724,21 @@ class RestartMigrationJobRequest { core.bool? skipValidation; RestartMigrationJobRequest({ + this.objectsFilter, this.skipValidation, }); RestartMigrationJobRequest.fromJson(core.Map json_) : this( + objectsFilter: json_.containsKey('objectsFilter') + ? MigrationJobObjectsConfig.fromJson( + json_['objectsFilter'] as core.Map) + : null, skipValidation: json_['skipValidation'] as core.bool?, ); core.Map toJson() => { + if (objectsFilter != null) 'objectsFilter': objectsFilter!, if (skipValidation != null) 'skipValidation': skipValidation!, }; } @@ -9038,6 +9411,98 @@ class SourceNumericFilter { }; } +/// Config for a single migration job object. +class SourceObjectConfig { + /// The object identifier. + SourceObjectIdentifier? objectIdentifier; + + SourceObjectConfig({ + this.objectIdentifier, + }); + + SourceObjectConfig.fromJson(core.Map json_) + : this( + objectIdentifier: json_.containsKey('objectIdentifier') + ? SourceObjectIdentifier.fromJson(json_['objectIdentifier'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (objectIdentifier != null) 'objectIdentifier': objectIdentifier!, + }; +} + +/// An identifier for the Migration Job Object. +class SourceObjectIdentifier { + /// The database name. + /// + /// This will be required only if the object uses a database name as part of + /// its unique identifier. + core.String? database; + + /// The type of the migration job object. + /// + /// Required. + /// Possible string values are: + /// - "MIGRATION_JOB_OBJECT_TYPE_UNSPECIFIED" : The type of the migration job + /// object is unknown. + /// - "DATABASE" : The migration job object is a database. + core.String? type; + + SourceObjectIdentifier({ + this.database, + this.type, + }); + + SourceObjectIdentifier.fromJson(core.Map json_) + : this( + database: json_['database'] as core.String?, + type: json_['type'] as core.String?, + ); + + core.Map toJson() => { + if (database != null) 'database': database!, + if (type != null) 'type': type!, + }; +} + +/// List of configurations for the source objects to be migrated. +class SourceObjectsConfig { + /// The list of the objects to be migrated. + core.List? objectConfigs; + + /// The objects selection type of the migration job. + /// + /// Optional. + /// Possible string values are: + /// - "OBJECTS_SELECTION_TYPE_UNSPECIFIED" : The type of the objects selection + /// is unknown, indicating that the migration job is at instance level. + /// - "ALL_OBJECTS" : Migrate all of the objects. + /// - "SPECIFIED_OBJECTS" : Migrate specific objects. + core.String? objectsSelectionType; + + SourceObjectsConfig({ + this.objectConfigs, + this.objectsSelectionType, + }); + + SourceObjectsConfig.fromJson(core.Map json_) + : this( + objectConfigs: (json_['objectConfigs'] as core.List?) + ?.map((value) => SourceObjectConfig.fromJson( + value as core.Map)) + .toList(), + objectsSelectionType: json_['objectsSelectionType'] as core.String?, + ); + + core.Map toJson() => { + if (objectConfigs != null) 'objectConfigs': objectConfigs!, + if (objectsSelectionType != null) + 'objectsSelectionType': objectsSelectionType!, + }; +} + /// Options to configure rule type SourceSqlChange. /// /// The rule is used to alter the sql code for database entities. The rule @@ -9401,19 +9866,27 @@ class SqlServerDatabaseBackup { /// Encryption settings for the SQL Server database. class SqlServerEncryptionOptions { - /// Path to certificate. + /// Path to the Certificate (.cer) in Cloud Storage, in the form + /// `gs://bucketName/fileName`. + /// + /// The instance must have write permissions to the bucket and read access to + /// the file. /// /// Required. core.String? certPath; /// Input only. /// - /// Private key password. + /// Password that encrypts the private key. /// /// Required. core.String? pvkPassword; - /// Path to certificate private key. + /// Path to the Certificate Private Key (.pvk) in Cloud Storage, in the form + /// `gs://bucketName/fileName`. + /// + /// The instance must have write permissions to the bucket and read access to + /// the file. /// /// Required. core.String? pvkPath; @@ -9548,6 +10021,9 @@ class SslConfig { /// - "SERVER_ONLY" : Only 'ca_certificate' specified. /// - "SERVER_CLIENT" : Both server ('ca_certificate'), and client /// ('client_key', 'client_certificate') specified. + /// - "REQUIRED" : Mandates SSL encryption for all connections. This doesn’t + /// require certificate verification. + /// - "NONE" : Connection is not encrypted. core.String? type; SslConfig({ diff --git a/generated/googleapis/lib/dataplex/v1.dart b/generated/googleapis/lib/dataplex/v1.dart index 794e0b3fc..a11c6697c 100644 --- a/generated/googleapis/lib/dataplex/v1.dart +++ b/generated/googleapis/lib/dataplex/v1.dart @@ -670,7 +670,9 @@ class ProjectsLocationsResource { /// in the following form: projects/{project}/locations/{location}. /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. /// - /// [orderBy] - Optional. Specifies the ordering of results. + /// [orderBy] - Optional. Specifies the ordering of results. Supported values + /// are: * relevance (default) * last_modified_timestamp * + /// last_modified_timestamp asc /// /// [pageSize] - Optional. Number of results in the search page. If \<=0, then /// defaults to 10. Max limit for page_size is 1000. Throws an invalid @@ -680,7 +682,8 @@ class ProjectsLocationsResource { /// call. Provide this to retrieve the subsequent page. /// /// [query] - Required. The query against which entries in scope should be - /// matched. + /// matched. The query syntax is defined in Search syntax for Dataplex Catalog + /// (https://cloud.google.com/dataplex/docs/search-syntax). /// /// [scope] - Optional. The scope under which the search should be operating. /// It must either be organizations/ or projects/. If it is unspecified, it @@ -1640,6 +1643,10 @@ class ProjectsLocationsDataScansResource { /// Value must have pattern /// `^projects/\[^/\]+/locations/\[^/\]+/dataScans/\[^/\]+$`. /// + /// [force] - Optional. If set to true, any child resources of this data scan + /// will also be deleted. (Otherwise, the request will only work if the data + /// scan has no child resources.) + /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// @@ -1652,9 +1659,11 @@ class ProjectsLocationsDataScansResource { /// this method will complete with the same error. async.Future delete( core.String name, { + core.bool? force, core.String? $fields, }) async { final queryParams_ = >{ + if (force != null) 'force': ['${force}'], if ($fields != null) 'fields': [$fields], }; @@ -1891,14 +1900,15 @@ class ProjectsLocationsDataScansResource { /// /// Request parameters: /// - /// [name] - Output only. The relative resource name of the scan, of the form: + /// [name] - Output only. Identifier. The relative resource name of the scan, + /// of the form: /// projects/{project}/locations/{location_id}/dataScans/{datascan_id}, where /// project refers to a project_id or project_number and location_id refers to /// a GCP region. /// Value must have pattern /// `^projects/\[^/\]+/locations/\[^/\]+/dataScans/\[^/\]+$`. /// - /// [updateMask] - Required. Mask of fields to update. + /// [updateMask] - Optional. Mask of fields to update. /// /// [validateOnly] - Optional. Only validate the request, but do not perform /// mutations. The default is false. @@ -3757,8 +3767,8 @@ class ProjectsLocationsEntryGroupsEntriesResource { /// should modify. It supports the following syntaxes: - matches an aspect of /// the given type and empty path. @path - matches an aspect of the given type /// and specified path. For example, to attach an aspect to a field that is - /// specified by the schema aspect, the path should have the format Schema.. * - /// - matches aspects of the given type for all paths. *@path - matches + /// specified by the schema aspect, the path should have the format Schema.. + /// @* - matches aspects of the given type for all paths. *@path - matches /// aspects of all types on the given path.The service will not remove /// existing aspects matching the syntax unless delete_missing_aspects is set /// to true.If this field is left empty, the service treats it as specifying @@ -8931,6 +8941,9 @@ class ProjectsLocationsMetadataJobsResource { /// [metadataJobId] - Optional. The metadata job ID. If not provided, a unique /// ID is generated with the prefix metadata-job-. /// + /// [validateOnly] - Optional. The service validates the request without + /// performing any mutations. The default is false. + /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// @@ -8945,11 +8958,13 @@ class ProjectsLocationsMetadataJobsResource { GoogleCloudDataplexV1MetadataJob request, core.String parent, { core.String? metadataJobId, + core.bool? validateOnly, core.String? $fields, }) async { final body_ = convert.json.encode(request); final queryParams_ = >{ if (metadataJobId != null) 'metadataJobId': [metadataJobId], + if (validateOnly != null) 'validateOnly': ['${validateOnly}'], if ($fields != null) 'fields': [$fields], }; @@ -11176,6 +11191,306 @@ class GoogleCloudDataplexV1DataAttributeBindingPath { }; } +/// The output of a data discovery scan. +class GoogleCloudDataplexV1DataDiscoveryResult { + /// Configuration for metadata publishing. + /// + /// Output only. + GoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing? + bigqueryPublishing; + + GoogleCloudDataplexV1DataDiscoveryResult({ + this.bigqueryPublishing, + }); + + GoogleCloudDataplexV1DataDiscoveryResult.fromJson(core.Map json_) + : this( + bigqueryPublishing: json_.containsKey('bigqueryPublishing') + ? GoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing + .fromJson(json_['bigqueryPublishing'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (bigqueryPublishing != null) + 'bigqueryPublishing': bigqueryPublishing!, + }; +} + +/// Describes BigQuery publishing configurations. +class GoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing { + /// The BigQuery dataset to publish to. + /// + /// It takes the form projects/{project_id}/datasets/{dataset_id}. If not set, + /// the service creates a default publishing dataset. + /// + /// Output only. + core.String? dataset; + + GoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing({ + this.dataset, + }); + + GoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing.fromJson( + core.Map json_) + : this( + dataset: json_['dataset'] as core.String?, + ); + + core.Map toJson() => { + if (dataset != null) 'dataset': dataset!, + }; +} + +/// Spec for a data discovery scan. +class GoogleCloudDataplexV1DataDiscoverySpec { + /// Configuration for metadata publishing. + /// + /// Optional. + GoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig? + bigqueryPublishingConfig; + + /// Cloud Storage related configurations. + GoogleCloudDataplexV1DataDiscoverySpecStorageConfig? storageConfig; + + GoogleCloudDataplexV1DataDiscoverySpec({ + this.bigqueryPublishingConfig, + this.storageConfig, + }); + + GoogleCloudDataplexV1DataDiscoverySpec.fromJson(core.Map json_) + : this( + bigqueryPublishingConfig: json_ + .containsKey('bigqueryPublishingConfig') + ? GoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig + .fromJson(json_['bigqueryPublishingConfig'] + as core.Map) + : null, + storageConfig: json_.containsKey('storageConfig') + ? GoogleCloudDataplexV1DataDiscoverySpecStorageConfig.fromJson( + json_['storageConfig'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (bigqueryPublishingConfig != null) + 'bigqueryPublishingConfig': bigqueryPublishingConfig!, + if (storageConfig != null) 'storageConfig': storageConfig!, + }; +} + +/// Describes BigQuery publishing configurations. +class GoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig { + /// The BigQuery connection used to create BigLake tables. + /// + /// Must be in the form + /// projects/{project_id}/locations/{location_id}/connections/{connection_id} + /// + /// Optional. + core.String? connection; + + /// Determines whether to publish discovered tables as BigLake external tables + /// or non-BigLake external tables. + /// + /// Optional. + /// Possible string values are: + /// - "TABLE_TYPE_UNSPECIFIED" : Table type unspecified. + /// - "EXTERNAL" : Default. Discovered tables are published as BigQuery + /// external tables whose data is accessed using the credentials of the user + /// querying the table. + /// - "BIGLAKE" : Discovered tables are published as BigLake external tables + /// whose data is accessed using the credentials of the associated BigQuery + /// connection. + core.String? tableType; + + GoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig({ + this.connection, + this.tableType, + }); + + GoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig.fromJson( + core.Map json_) + : this( + connection: json_['connection'] as core.String?, + tableType: json_['tableType'] as core.String?, + ); + + core.Map toJson() => { + if (connection != null) 'connection': connection!, + if (tableType != null) 'tableType': tableType!, + }; +} + +/// Configurations related to Cloud Storage as the data source. +class GoogleCloudDataplexV1DataDiscoverySpecStorageConfig { + /// Configuration for CSV data. + /// + /// Optional. + GoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions? csvOptions; + + /// Defines the data to exclude during discovery. + /// + /// Provide a list of patterns that identify the data to exclude. For Cloud + /// Storage bucket assets, these patterns are interpreted as glob patterns + /// used to match object names. For BigQuery dataset assets, these patterns + /// are interpreted as patterns to match table names. + /// + /// Optional. + core.List? excludePatterns; + + /// Defines the data to include during discovery when only a subset of the + /// data should be considered. + /// + /// Provide a list of patterns that identify the data to include. For Cloud + /// Storage bucket assets, these patterns are interpreted as glob patterns + /// used to match object names. For BigQuery dataset assets, these patterns + /// are interpreted as patterns to match table names. + /// + /// Optional. + core.List? includePatterns; + + /// Configuration for JSON data. + /// + /// Optional. + GoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions? jsonOptions; + + GoogleCloudDataplexV1DataDiscoverySpecStorageConfig({ + this.csvOptions, + this.excludePatterns, + this.includePatterns, + this.jsonOptions, + }); + + GoogleCloudDataplexV1DataDiscoverySpecStorageConfig.fromJson(core.Map json_) + : this( + csvOptions: json_.containsKey('csvOptions') + ? GoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions + .fromJson(json_['csvOptions'] + as core.Map) + : null, + excludePatterns: (json_['excludePatterns'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + includePatterns: (json_['includePatterns'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + jsonOptions: json_.containsKey('jsonOptions') + ? GoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions + .fromJson(json_['jsonOptions'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (csvOptions != null) 'csvOptions': csvOptions!, + if (excludePatterns != null) 'excludePatterns': excludePatterns!, + if (includePatterns != null) 'includePatterns': includePatterns!, + if (jsonOptions != null) 'jsonOptions': jsonOptions!, + }; +} + +/// Describes CSV and similar semi-structured data formats. +class GoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions { + /// The delimiter that is used to separate values. + /// + /// The default is , (comma). + /// + /// Optional. + core.String? delimiter; + + /// The character encoding of the data. + /// + /// The default is UTF-8. + /// + /// Optional. + core.String? encoding; + + /// The number of rows to interpret as header rows that should be skipped when + /// reading data rows. + /// + /// Optional. + core.int? headerRows; + + /// The character used to quote column values. + /// + /// Accepts " (double quotation mark) or ' (single quotation mark). If + /// unspecified, defaults to " (double quotation mark). + /// + /// Optional. + core.String? quote; + + /// Whether to disable the inference of data types for CSV data. + /// + /// If true, all columns are registered as strings. + /// + /// Optional. + core.bool? typeInferenceDisabled; + + GoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions({ + this.delimiter, + this.encoding, + this.headerRows, + this.quote, + this.typeInferenceDisabled, + }); + + GoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions.fromJson( + core.Map json_) + : this( + delimiter: json_['delimiter'] as core.String?, + encoding: json_['encoding'] as core.String?, + headerRows: json_['headerRows'] as core.int?, + quote: json_['quote'] as core.String?, + typeInferenceDisabled: json_['typeInferenceDisabled'] as core.bool?, + ); + + core.Map toJson() => { + if (delimiter != null) 'delimiter': delimiter!, + if (encoding != null) 'encoding': encoding!, + if (headerRows != null) 'headerRows': headerRows!, + if (quote != null) 'quote': quote!, + if (typeInferenceDisabled != null) + 'typeInferenceDisabled': typeInferenceDisabled!, + }; +} + +/// Describes JSON data format. +class GoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions { + /// The character encoding of the data. + /// + /// The default is UTF-8. + /// + /// Optional. + core.String? encoding; + + /// Whether to disable the inference of data types for JSON data. + /// + /// If true, all columns are registered as their primitive types (strings, + /// number, or boolean). + /// + /// Optional. + core.bool? typeInferenceDisabled; + + GoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions({ + this.encoding, + this.typeInferenceDisabled, + }); + + GoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions.fromJson( + core.Map json_) + : this( + encoding: json_['encoding'] as core.String?, + typeInferenceDisabled: json_['typeInferenceDisabled'] as core.bool?, + ); + + core.Map toJson() => { + if (encoding != null) 'encoding': encoding!, + if (typeInferenceDisabled != null) + 'typeInferenceDisabled': typeInferenceDisabled!, + }; +} + /// DataProfileResult defines the output of DataProfileScan. /// /// Each field of the table will have field type specific profile result. @@ -12743,6 +13058,14 @@ class GoogleCloudDataplexV1DataScan { /// Required. GoogleCloudDataplexV1DataSource? data; + /// The result of a data discovery scan. + /// + /// Output only. + GoogleCloudDataplexV1DataDiscoveryResult? dataDiscoveryResult; + + /// Settings for a data discovery scan. + GoogleCloudDataplexV1DataDiscoverySpec? dataDiscoverySpec; + /// The result of a data profile scan. /// /// Output only. @@ -12789,6 +13112,8 @@ class GoogleCloudDataplexV1DataScan { /// Optional. core.Map? labels; + /// Identifier. + /// /// The relative resource name of the scan, of the form: /// projects/{project}/locations/{location_id}/dataScans/{datascan_id}, where /// project refers to a project_id or project_number and location_id refers to @@ -12815,6 +13140,7 @@ class GoogleCloudDataplexV1DataScan { /// - "DATA_SCAN_TYPE_UNSPECIFIED" : The data scan type is unspecified. /// - "DATA_QUALITY" : Data quality scan. /// - "DATA_PROFILE" : Data profile scan. + /// - "DATA_DISCOVERY" : Data discovery scan. core.String? type; /// System generated globally unique ID for the scan. @@ -12833,6 +13159,8 @@ class GoogleCloudDataplexV1DataScan { GoogleCloudDataplexV1DataScan({ this.createTime, this.data, + this.dataDiscoveryResult, + this.dataDiscoverySpec, this.dataProfileResult, this.dataProfileSpec, this.dataQualityResult, @@ -12856,6 +13184,16 @@ class GoogleCloudDataplexV1DataScan { ? GoogleCloudDataplexV1DataSource.fromJson( json_['data'] as core.Map) : null, + dataDiscoveryResult: json_.containsKey('dataDiscoveryResult') + ? GoogleCloudDataplexV1DataDiscoveryResult.fromJson( + json_['dataDiscoveryResult'] + as core.Map) + : null, + dataDiscoverySpec: json_.containsKey('dataDiscoverySpec') + ? GoogleCloudDataplexV1DataDiscoverySpec.fromJson( + json_['dataDiscoverySpec'] + as core.Map) + : null, dataProfileResult: json_.containsKey('dataProfileResult') ? GoogleCloudDataplexV1DataProfileResult.fromJson( json_['dataProfileResult'] @@ -12904,6 +13242,9 @@ class GoogleCloudDataplexV1DataScan { core.Map toJson() => { if (createTime != null) 'createTime': createTime!, if (data != null) 'data': data!, + if (dataDiscoveryResult != null) + 'dataDiscoveryResult': dataDiscoveryResult!, + if (dataDiscoverySpec != null) 'dataDiscoverySpec': dataDiscoverySpec!, if (dataProfileResult != null) 'dataProfileResult': dataProfileResult!, if (dataProfileSpec != null) 'dataProfileSpec': dataProfileSpec!, if (dataQualityResult != null) 'dataQualityResult': dataQualityResult!, @@ -12965,9 +13306,13 @@ class GoogleCloudDataplexV1DataScanExecutionStatus { core.String? latestJobCreateTime; /// The time when the latest DataScanJob ended. + /// + /// Optional. core.String? latestJobEndTime; /// The time when the latest DataScanJob started. + /// + /// Optional. core.String? latestJobStartTime; GoogleCloudDataplexV1DataScanExecutionStatus({ @@ -12999,6 +13344,16 @@ class GoogleCloudDataplexV1DataScanJob { /// Output only. core.String? createTime; + /// The result of a data discovery scan. + /// + /// Output only. + GoogleCloudDataplexV1DataDiscoveryResult? dataDiscoveryResult; + + /// Settings for a data discovery scan. + /// + /// Output only. + GoogleCloudDataplexV1DataDiscoverySpec? dataDiscoverySpec; + /// The result of a data profile scan. /// /// Output only. @@ -13029,6 +13384,8 @@ class GoogleCloudDataplexV1DataScanJob { /// Output only. core.String? message; + /// Identifier. + /// /// The relative resource name of the DataScanJob, of the form: /// projects/{project}/locations/{location_id}/dataScans/{datascan_id}/jobs/{job_id}, /// where project refers to a project_id or project_number and location_id @@ -13062,6 +13419,7 @@ class GoogleCloudDataplexV1DataScanJob { /// - "DATA_SCAN_TYPE_UNSPECIFIED" : The data scan type is unspecified. /// - "DATA_QUALITY" : Data quality scan. /// - "DATA_PROFILE" : Data profile scan. + /// - "DATA_DISCOVERY" : Data discovery scan. core.String? type; /// System generated globally unique ID for the DataScanJob. @@ -13071,6 +13429,8 @@ class GoogleCloudDataplexV1DataScanJob { GoogleCloudDataplexV1DataScanJob({ this.createTime, + this.dataDiscoveryResult, + this.dataDiscoverySpec, this.dataProfileResult, this.dataProfileSpec, this.dataQualityResult, @@ -13087,6 +13447,16 @@ class GoogleCloudDataplexV1DataScanJob { GoogleCloudDataplexV1DataScanJob.fromJson(core.Map json_) : this( createTime: json_['createTime'] as core.String?, + dataDiscoveryResult: json_.containsKey('dataDiscoveryResult') + ? GoogleCloudDataplexV1DataDiscoveryResult.fromJson( + json_['dataDiscoveryResult'] + as core.Map) + : null, + dataDiscoverySpec: json_.containsKey('dataDiscoverySpec') + ? GoogleCloudDataplexV1DataDiscoverySpec.fromJson( + json_['dataDiscoverySpec'] + as core.Map) + : null, dataProfileResult: json_.containsKey('dataProfileResult') ? GoogleCloudDataplexV1DataProfileResult.fromJson( json_['dataProfileResult'] @@ -13118,6 +13488,9 @@ class GoogleCloudDataplexV1DataScanJob { core.Map toJson() => { if (createTime != null) 'createTime': createTime!, + if (dataDiscoveryResult != null) + 'dataDiscoveryResult': dataDiscoveryResult!, + if (dataDiscoverySpec != null) 'dataDiscoverySpec': dataDiscoverySpec!, if (dataProfileResult != null) 'dataProfileResult': dataProfileResult!, if (dataProfileSpec != null) 'dataProfileSpec': dataProfileSpec!, if (dataQualityResult != null) 'dataQualityResult': dataQualityResult!, diff --git a/generated/googleapis/lib/dataproc/v1.dart b/generated/googleapis/lib/dataproc/v1.dart index c2a04ef28..f0950d5e1 100644 --- a/generated/googleapis/lib/dataproc/v1.dart +++ b/generated/googleapis/lib/dataproc/v1.dart @@ -14128,7 +14128,7 @@ class PySparkBatch { } /// A Dataproc job for running Apache PySpark -/// (https://spark.apache.org/docs/0.9.0/python-programming-guide.html) +/// (https://spark.apache.org/docs/latest/api/python/index.html#pyspark-overview) /// applications on YARN. class PySparkJob { /// HCFS URIs of archives to be extracted into the working directory of each diff --git a/generated/googleapis/lib/datastore/v1.dart b/generated/googleapis/lib/datastore/v1.dart index 396cc9452..5fc17af17 100644 --- a/generated/googleapis/lib/datastore/v1.dart +++ b/generated/googleapis/lib/datastore/v1.dart @@ -720,8 +720,8 @@ class ProjectsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -1607,6 +1607,114 @@ class Filter { }; } +/// Nearest Neighbors search config. +/// +/// The ordering provided by FindNearest supersedes the order_by stage. If +/// multiple documents have the same vector distance, the returned document +/// order is not guaranteed to be stable between queries. +class FindNearest { + /// The Distance Measure to use, required. + /// + /// Required. + /// Possible string values are: + /// - "DISTANCE_MEASURE_UNSPECIFIED" : Should not be set. + /// - "EUCLIDEAN" : Measures the EUCLIDEAN distance between the vectors. See + /// [Euclidean](https://en.wikipedia.org/wiki/Euclidean_distance) to learn + /// more. The resulting distance decreases the more similar two vectors are. + /// - "COSINE" : COSINE distance compares vectors based on the angle between + /// them, which allows you to measure similarity that isn't based on the + /// vectors magnitude. We recommend using DOT_PRODUCT with unit normalized + /// vectors instead of COSINE distance, which is mathematically equivalent + /// with better performance. See + /// [Cosine Similarity](https://en.wikipedia.org/wiki/Cosine_similarity) to + /// learn more about COSINE similarity and COSINE distance. The resulting + /// COSINE distance decreases the more similar two vectors are. + /// - "DOT_PRODUCT" : Similar to cosine but is affected by the magnitude of + /// the vectors. See [Dot Product](https://en.wikipedia.org/wiki/Dot_product) + /// to learn more. The resulting distance increases the more similar two + /// vectors are. + core.String? distanceMeasure; + + /// Optional name of the field to output the result of the vector distance + /// calculation. + /// + /// Must conform to entity property limitations. + /// + /// Optional. + core.String? distanceResultProperty; + + /// Option to specify a threshold for which no less similar documents will be + /// returned. + /// + /// The behavior of the specified `distance_measure` will affect the meaning + /// of the distance threshold. Since DOT_PRODUCT distances increase when the + /// vectors are more similar, the comparison is inverted. * For EUCLIDEAN, + /// COSINE: WHERE distance \<= distance_threshold * For DOT_PRODUCT: WHERE + /// distance \>= distance_threshold + /// + /// Optional. + core.double? distanceThreshold; + + /// The number of nearest neighbors to return. + /// + /// Must be a positive integer of no more than 100. + /// + /// Required. + core.int? limit; + + /// The query vector that we are searching on. + /// + /// Must be a vector of no more than 2048 dimensions. + /// + /// Required. + Value? queryVector; + + /// An indexed vector property to search upon. + /// + /// Only documents which contain vectors whose dimensionality match the + /// query_vector can be returned. + /// + /// Required. + PropertyReference? vectorProperty; + + FindNearest({ + this.distanceMeasure, + this.distanceResultProperty, + this.distanceThreshold, + this.limit, + this.queryVector, + this.vectorProperty, + }); + + FindNearest.fromJson(core.Map json_) + : this( + distanceMeasure: json_['distanceMeasure'] as core.String?, + distanceResultProperty: + json_['distanceResultProperty'] as core.String?, + distanceThreshold: + (json_['distanceThreshold'] as core.num?)?.toDouble(), + limit: json_['limit'] as core.int?, + queryVector: json_.containsKey('queryVector') + ? Value.fromJson( + json_['queryVector'] as core.Map) + : null, + vectorProperty: json_.containsKey('vectorProperty') + ? PropertyReference.fromJson(json_['vectorProperty'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (distanceMeasure != null) 'distanceMeasure': distanceMeasure!, + if (distanceResultProperty != null) + 'distanceResultProperty': distanceResultProperty!, + if (distanceThreshold != null) 'distanceThreshold': distanceThreshold!, + if (limit != null) 'limit': limit!, + if (queryVector != null) 'queryVector': queryVector!, + if (vectorProperty != null) 'vectorProperty': vectorProperty!, + }; +} + /// Identifies a subset of entities in a project. /// /// This is specified as combinations of kinds and namespaces (either or both of @@ -2897,6 +3005,10 @@ class PropertyTransform { } /// A query for entities. +/// +/// The query stages are executed in the following order: 1. kind 2. filter 3. +/// projection 4. order + start_cursor + end_cursor 5. offset 6. limit 7. +/// find_nearest class Query { /// The properties to make distinct. /// @@ -2921,6 +3033,14 @@ class Query { /// The filter to apply. Filter? filter; + /// A potential Nearest Neighbors Search. + /// + /// Applies after all other filters and ordering. Finds the closest vector + /// embeddings to the given query vector. + /// + /// Optional. + FindNearest? findNearest; + /// The kinds to query (if empty, returns entities of all kinds). /// /// Currently at most 1 kind may be specified. @@ -2963,6 +3083,7 @@ class Query { this.distinctOn, this.endCursor, this.filter, + this.findNearest, this.kind, this.limit, this.offset, @@ -2982,6 +3103,10 @@ class Query { ? Filter.fromJson( json_['filter'] as core.Map) : null, + findNearest: json_.containsKey('findNearest') + ? FindNearest.fromJson( + json_['findNearest'] as core.Map) + : null, kind: (json_['kind'] as core.List?) ?.map((value) => KindExpression.fromJson( value as core.Map)) @@ -3003,6 +3128,7 @@ class Query { if (distinctOn != null) 'distinctOn': distinctOn!, if (endCursor != null) 'endCursor': endCursor!, if (filter != null) 'filter': filter!, + if (findNearest != null) 'findNearest': findNearest!, if (kind != null) 'kind': kind!, if (limit != null) 'limit': limit!, if (offset != null) 'offset': offset!, diff --git a/generated/googleapis/lib/datastream/v1.dart b/generated/googleapis/lib/datastream/v1.dart index e24dc8376..380f678e0 100644 --- a/generated/googleapis/lib/datastream/v1.dart +++ b/generated/googleapis/lib/datastream/v1.dart @@ -491,7 +491,7 @@ class ProjectsLocationsConnectionProfilesResource { /// /// Request parameters: /// - /// [name] - Output only. The resource's name. + /// [name] - Output only. Identifier. The resource's name. /// Value must have pattern /// `^projects/\[^/\]+/locations/\[^/\]+/connectionProfiles/\[^/\]+$`. /// @@ -572,8 +572,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -1409,7 +1409,7 @@ class ProjectsLocationsStreamsResource { /// /// Request parameters: /// - /// [name] - Output only. The stream's name. + /// [name] - Output only. Identifier. The stream's name. /// Value must have pattern /// `^projects/\[^/\]+/locations/\[^/\]+/streams/\[^/\]+$`. /// @@ -1950,6 +1950,42 @@ class BigQueryDestinationConfig { /// BigQuery warehouse profile. typedef BigQueryProfile = $Empty; +/// Configuration to use Binary Log Parser CDC technique. +class BinaryLogParser { + /// Use Oracle directories. + LogFileDirectories? logFileDirectories; + + /// Use Oracle ASM. + OracleAsmLogFileAccess? oracleAsmLogFileAccess; + + BinaryLogParser({ + this.logFileDirectories, + this.oracleAsmLogFileAccess, + }); + + BinaryLogParser.fromJson(core.Map json_) + : this( + logFileDirectories: json_.containsKey('logFileDirectories') + ? LogFileDirectories.fromJson(json_['logFileDirectories'] + as core.Map) + : null, + oracleAsmLogFileAccess: json_.containsKey('oracleAsmLogFileAccess') + ? OracleAsmLogFileAccess.fromJson(json_['oracleAsmLogFileAccess'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (logFileDirectories != null) + 'logFileDirectories': logFileDirectories!, + if (oracleAsmLogFileAccess != null) + 'oracleAsmLogFileAccess': oracleAsmLogFileAccess!, + }; +} + +/// Use Binary log position based replication. +typedef BinaryLogPosition = $Empty; + /// The request message for Operations.CancelOperation. typedef CancelOperationRequest = $Empty; @@ -2033,6 +2069,8 @@ class ConnectionProfile { /// MySQL ConnectionProfile configuration. MysqlProfile? mysqlProfile; + /// Identifier. + /// /// The resource's name. /// /// Output only. @@ -2544,6 +2582,9 @@ class GcsProfile { }; } +/// Use GTID based replication. +typedef Gtid = $Empty; + /// JSON file format configuration. class JsonFileFormat { /// Compression of the loaded JSON file. @@ -2818,6 +2859,12 @@ class ListStreamsResponse { /// A resource that represents a Google Cloud location. typedef Location = $Location00; +/// Configuration to specify the Oracle directories to access the log files. +typedef LogFileDirectories = $LogFileDirectories; + +/// Configuration to use LogMiner CDC method. +typedef LogMiner = $Empty; + /// Request for looking up a specific stream object by its source object /// identifier. class LookupStreamObjectRequest { @@ -3091,9 +3138,15 @@ class MysqlRdbms { /// MySQL source configuration class MysqlSourceConfig { + /// Use Binary log position based replication. + BinaryLogPosition? binaryLogPosition; + /// MySQL objects to exclude from the stream. MysqlRdbms? excludeObjects; + /// Use GTID based replication. + Gtid? gtid; + /// MySQL objects to retrieve from the source. MysqlRdbms? includeObjects; @@ -3110,7 +3163,9 @@ class MysqlSourceConfig { core.int? maxConcurrentCdcTasks; MysqlSourceConfig({ + this.binaryLogPosition, this.excludeObjects, + this.gtid, this.includeObjects, this.maxConcurrentBackfillTasks, this.maxConcurrentCdcTasks, @@ -3118,10 +3173,18 @@ class MysqlSourceConfig { MysqlSourceConfig.fromJson(core.Map json_) : this( + binaryLogPosition: json_.containsKey('binaryLogPosition') + ? BinaryLogPosition.fromJson(json_['binaryLogPosition'] + as core.Map) + : null, excludeObjects: json_.containsKey('excludeObjects') ? MysqlRdbms.fromJson(json_['excludeObjects'] as core.Map) : null, + gtid: json_.containsKey('gtid') + ? Gtid.fromJson( + json_['gtid'] as core.Map) + : null, includeObjects: json_.containsKey('includeObjects') ? MysqlRdbms.fromJson(json_['includeObjects'] as core.Map) @@ -3132,7 +3195,9 @@ class MysqlSourceConfig { ); core.Map toJson() => { + if (binaryLogPosition != null) 'binaryLogPosition': binaryLogPosition!, if (excludeObjects != null) 'excludeObjects': excludeObjects!, + if (gtid != null) 'gtid': gtid!, if (includeObjects != null) 'includeObjects': includeObjects!, if (maxConcurrentBackfillTasks != null) 'maxConcurrentBackfillTasks': maxConcurrentBackfillTasks!, @@ -3319,6 +3384,91 @@ class Operation { }; } +/// Configuration for Oracle Automatic Storage Management (ASM) connection. +/// +/// . +class OracleAsmConfig { + /// ASM service name for the Oracle ASM connection. + /// + /// Required. + core.String? asmService; + + /// Connection string attributes + /// + /// Optional. + core.Map? connectionAttributes; + + /// Hostname for the Oracle ASM connection. + /// + /// Required. + core.String? hostname; + + /// SSL configuration for the Oracle connection. + /// + /// Optional. + OracleSslConfig? oracleSslConfig; + + /// Password for the Oracle ASM connection. + /// + /// Optional. + core.String? password; + + /// Port for the Oracle ASM connection. + /// + /// Required. + core.int? port; + + /// Username for the Oracle ASM connection. + /// + /// Required. + core.String? username; + + OracleAsmConfig({ + this.asmService, + this.connectionAttributes, + this.hostname, + this.oracleSslConfig, + this.password, + this.port, + this.username, + }); + + OracleAsmConfig.fromJson(core.Map json_) + : this( + asmService: json_['asmService'] as core.String?, + connectionAttributes: (json_['connectionAttributes'] + as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + hostname: json_['hostname'] as core.String?, + oracleSslConfig: json_.containsKey('oracleSslConfig') + ? OracleSslConfig.fromJson(json_['oracleSslConfig'] + as core.Map) + : null, + password: json_['password'] as core.String?, + port: json_['port'] as core.int?, + username: json_['username'] as core.String?, + ); + + core.Map toJson() => { + if (asmService != null) 'asmService': asmService!, + if (connectionAttributes != null) + 'connectionAttributes': connectionAttributes!, + if (hostname != null) 'hostname': hostname!, + if (oracleSslConfig != null) 'oracleSslConfig': oracleSslConfig!, + if (password != null) 'password': password!, + if (port != null) 'port': port!, + if (username != null) 'username': username!, + }; +} + +/// Configuration to use Oracle ASM to access the log files. +typedef OracleAsmLogFileAccess = $Empty; + /// Oracle Column. class OracleColumn { /// Column name. @@ -3406,6 +3556,11 @@ class OracleProfile { /// Required. core.String? hostname; + /// Configuration for Oracle ASM connection. + /// + /// Optional. + OracleAsmConfig? oracleAsmConfig; + /// SSL configuration for the Oracle connection. /// /// Optional. @@ -3421,6 +3576,14 @@ class OracleProfile { /// Port for the Oracle connection, default value is 1521. core.int? port; + /// A reference to a Secret Manager resource name storing the Oracle + /// connection password. + /// + /// Mutually exclusive with the `password` field. + /// + /// Optional. + core.String? secretManagerStoredPassword; + /// Username for the Oracle connection. /// /// Required. @@ -3430,9 +3593,11 @@ class OracleProfile { this.connectionAttributes, this.databaseService, this.hostname, + this.oracleAsmConfig, this.oracleSslConfig, this.password, this.port, + this.secretManagerStoredPassword, this.username, }); @@ -3448,12 +3613,18 @@ class OracleProfile { ), databaseService: json_['databaseService'] as core.String?, hostname: json_['hostname'] as core.String?, + oracleAsmConfig: json_.containsKey('oracleAsmConfig') + ? OracleAsmConfig.fromJson(json_['oracleAsmConfig'] + as core.Map) + : null, oracleSslConfig: json_.containsKey('oracleSslConfig') ? OracleSslConfig.fromJson(json_['oracleSslConfig'] as core.Map) : null, password: json_['password'] as core.String?, port: json_['port'] as core.int?, + secretManagerStoredPassword: + json_['secretManagerStoredPassword'] as core.String?, username: json_['username'] as core.String?, ); @@ -3462,9 +3633,12 @@ class OracleProfile { 'connectionAttributes': connectionAttributes!, if (databaseService != null) 'databaseService': databaseService!, if (hostname != null) 'hostname': hostname!, + if (oracleAsmConfig != null) 'oracleAsmConfig': oracleAsmConfig!, if (oracleSslConfig != null) 'oracleSslConfig': oracleSslConfig!, if (password != null) 'password': password!, if (port != null) 'port': port!, + if (secretManagerStoredPassword != null) + 'secretManagerStoredPassword': secretManagerStoredPassword!, if (username != null) 'username': username!, }; } @@ -3542,6 +3716,9 @@ class OracleScnPosition { /// Oracle data source configuration class OracleSourceConfig { + /// Use Binary Log Parser. + BinaryLogParser? binaryLogParser; + /// Drop large object values. DropLargeObjects? dropLargeObjects; @@ -3551,6 +3728,9 @@ class OracleSourceConfig { /// Oracle objects to include in the stream. OracleRdbms? includeObjects; + /// Use LogMiner. + LogMiner? logMiner; + /// Maximum number of concurrent backfill tasks. /// /// The number should be non-negative. If not set (or set to 0), the system's @@ -3567,9 +3747,11 @@ class OracleSourceConfig { StreamLargeObjects? streamLargeObjects; OracleSourceConfig({ + this.binaryLogParser, this.dropLargeObjects, this.excludeObjects, this.includeObjects, + this.logMiner, this.maxConcurrentBackfillTasks, this.maxConcurrentCdcTasks, this.streamLargeObjects, @@ -3577,6 +3759,10 @@ class OracleSourceConfig { OracleSourceConfig.fromJson(core.Map json_) : this( + binaryLogParser: json_.containsKey('binaryLogParser') + ? BinaryLogParser.fromJson(json_['binaryLogParser'] + as core.Map) + : null, dropLargeObjects: json_.containsKey('dropLargeObjects') ? DropLargeObjects.fromJson(json_['dropLargeObjects'] as core.Map) @@ -3589,6 +3775,10 @@ class OracleSourceConfig { ? OracleRdbms.fromJson(json_['includeObjects'] as core.Map) : null, + logMiner: json_.containsKey('logMiner') + ? LogMiner.fromJson( + json_['logMiner'] as core.Map) + : null, maxConcurrentBackfillTasks: json_['maxConcurrentBackfillTasks'] as core.int?, maxConcurrentCdcTasks: json_['maxConcurrentCdcTasks'] as core.int?, @@ -3599,9 +3789,11 @@ class OracleSourceConfig { ); core.Map toJson() => { + if (binaryLogParser != null) 'binaryLogParser': binaryLogParser!, if (dropLargeObjects != null) 'dropLargeObjects': dropLargeObjects!, if (excludeObjects != null) 'excludeObjects': excludeObjects!, if (includeObjects != null) 'includeObjects': includeObjects!, + if (logMiner != null) 'logMiner': logMiner!, if (maxConcurrentBackfillTasks != null) 'maxConcurrentBackfillTasks': maxConcurrentBackfillTasks!, if (maxConcurrentCdcTasks != null) @@ -3738,8 +3930,6 @@ class PostgresqlColumn { typedef PostgresqlObjectIdentifier = $ObjectIdentifier; /// PostgreSQL database profile. -/// -/// Next ID: 7. class PostgresqlProfile { /// Database for the PostgreSQL connection. /// @@ -3954,6 +4144,8 @@ class PrivateConnection { /// Labels. core.Map? labels; + /// Identifier. + /// /// The resource's name. /// /// Output only. @@ -4077,6 +4269,8 @@ class Route { /// Labels. core.Map? labels; + /// Identifier. + /// /// The resource's name. /// /// Output only. @@ -4330,9 +4524,13 @@ class SpecificStartPosition { /// Oracle SCN to start replicating from. OracleScnPosition? oracleScnPosition; + /// SqlServer LSN to start replicating from. + SqlServerLsnPosition? sqlServerLsnPosition; + SpecificStartPosition({ this.mysqlLogPosition, this.oracleScnPosition, + this.sqlServerLsnPosition, }); SpecificStartPosition.fromJson(core.Map json_) @@ -4345,11 +4543,17 @@ class SpecificStartPosition { ? OracleScnPosition.fromJson(json_['oracleScnPosition'] as core.Map) : null, + sqlServerLsnPosition: json_.containsKey('sqlServerLsnPosition') + ? SqlServerLsnPosition.fromJson(json_['sqlServerLsnPosition'] + as core.Map) + : null, ); core.Map toJson() => { if (mysqlLogPosition != null) 'mysqlLogPosition': mysqlLogPosition!, if (oracleScnPosition != null) 'oracleScnPosition': oracleScnPosition!, + if (sqlServerLsnPosition != null) + 'sqlServerLsnPosition': sqlServerLsnPosition!, }; } @@ -4417,6 +4621,27 @@ class SqlServerColumn { }; } +/// SQL Server LSN position +class SqlServerLsnPosition { + /// Log sequence number (LSN) from where Logs will be read + /// + /// Required. + core.String? lsn; + + SqlServerLsnPosition({ + this.lsn, + }); + + SqlServerLsnPosition.fromJson(core.Map json_) + : this( + lsn: json_['lsn'] as core.String?, + ); + + core.Map toJson() => { + if (lsn != null) 'lsn': lsn!, + }; +} + /// SQLServer data source object identifier. typedef SqlServerObjectIdentifier = $ObjectIdentifier; @@ -4741,6 +4966,8 @@ class Stream { /// Output only. core.String? lastRecoveryTime; + /// Identifier. + /// /// The stream's name. /// /// Output only. @@ -4869,6 +5096,8 @@ class StreamObject { /// Output only. core.List? errors; + /// Identifier. + /// /// The object resource's name. /// /// Output only. diff --git a/generated/googleapis/lib/deploymentmanager/v2.dart b/generated/googleapis/lib/deploymentmanager/v2.dart index b16583fae..697c7a277 100644 --- a/generated/googleapis/lib/deploymentmanager/v2.dart +++ b/generated/googleapis/lib/deploymentmanager/v2.dart @@ -150,6 +150,8 @@ class DeploymentsResource { /// - "DELETE" /// - "ABANDON" /// + /// [header_bypassBillingFilter] - null + /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// @@ -164,10 +166,13 @@ class DeploymentsResource { core.String project, core.String deployment, { core.String? deletePolicy, + core.bool? header_bypassBillingFilter, core.String? $fields, }) async { final queryParams_ = >{ if (deletePolicy != null) 'deletePolicy': [deletePolicy], + if (header_bypassBillingFilter != null) + 'header.bypassBillingFilter': ['${header_bypassBillingFilter}'], if ($fields != null) 'fields': [$fields], }; @@ -195,6 +200,8 @@ class DeploymentsResource { /// [deployment] - The name of the deployment for this request. /// Value must have pattern `\[a-z\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?`. /// + /// [header_bypassBillingFilter] - null + /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// @@ -208,9 +215,12 @@ class DeploymentsResource { async.Future get( core.String project, core.String deployment, { + core.bool? header_bypassBillingFilter, core.String? $fields, }) async { final queryParams_ = >{ + if (header_bypassBillingFilter != null) + 'header.bypassBillingFilter': ['${header_bypassBillingFilter}'], if ($fields != null) 'fields': [$fields], }; @@ -242,6 +252,8 @@ class DeploymentsResource { /// Value must have pattern /// `\[a-z\](?:\[-a-z0-9_\]{0,61}\[a-z0-9\])?|\[1-9\]\[0-9\]{0,19}`. /// + /// [header_bypassBillingFilter] - null + /// /// [optionsRequestedPolicyVersion] - Requested IAM Policy version. /// /// [$fields] - Selector specifying which fields to include in a partial @@ -257,10 +269,13 @@ class DeploymentsResource { async.Future getIamPolicy( core.String project, core.String resource, { + core.bool? header_bypassBillingFilter, core.int? optionsRequestedPolicyVersion, core.String? $fields, }) async { final queryParams_ = >{ + if (header_bypassBillingFilter != null) + 'header.bypassBillingFilter': ['${header_bypassBillingFilter}'], if (optionsRequestedPolicyVersion != null) 'optionsRequestedPolicyVersion': ['${optionsRequestedPolicyVersion}'], if ($fields != null) 'fields': [$fields], @@ -296,6 +311,8 @@ class DeploymentsResource { /// - "CREATE_OR_ACQUIRE" /// - "ACQUIRE" /// + /// [header_bypassBillingFilter] - null + /// /// [preview] - If set to true, creates a deployment and creates "shell" /// resources but does not actually instantiate these resources. This allows /// you to preview what your deployment looks like. After previewing a @@ -319,12 +336,15 @@ class DeploymentsResource { Deployment request, core.String project, { core.String? createPolicy, + core.bool? header_bypassBillingFilter, core.bool? preview, core.String? $fields, }) async { final body_ = convert.json.encode(request); final queryParams_ = >{ if (createPolicy != null) 'createPolicy': [createPolicy], + if (header_bypassBillingFilter != null) + 'header.bypassBillingFilter': ['${header_bypassBillingFilter}'], if (preview != null) 'preview': ['${preview}'], if ($fields != null) 'fields': [$fields], }; @@ -465,6 +485,8 @@ class DeploymentsResource { /// - "DELETE" /// - "ABANDON" /// + /// [header_bypassBillingFilter] - null + /// /// [preview] - If set to true, updates the deployment and creates and updates /// the "shell" resources but does not actually alter or instantiate these /// resources. This allows you to preview what your deployment will look like. @@ -492,6 +514,7 @@ class DeploymentsResource { core.String deployment, { core.String? createPolicy, core.String? deletePolicy, + core.bool? header_bypassBillingFilter, core.bool? preview, core.String? $fields, }) async { @@ -499,6 +522,8 @@ class DeploymentsResource { final queryParams_ = >{ if (createPolicy != null) 'createPolicy': [createPolicy], if (deletePolicy != null) 'deletePolicy': [deletePolicy], + if (header_bypassBillingFilter != null) + 'header.bypassBillingFilter': ['${header_bypassBillingFilter}'], if (preview != null) 'preview': ['${preview}'], if ($fields != null) 'fields': [$fields], }; @@ -635,6 +660,8 @@ class DeploymentsResource { /// Value must have pattern /// `\[a-z\](?:\[-a-z0-9_\]{0,61}\[a-z0-9\])?|\[1-9\]\[0-9\]{0,19}`. /// + /// [header_bypassBillingFilter] - null + /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// @@ -649,10 +676,13 @@ class DeploymentsResource { TestPermissionsRequest request, core.String project, core.String resource, { + core.bool? header_bypassBillingFilter, core.String? $fields, }) async { final body_ = convert.json.encode(request); final queryParams_ = >{ + if (header_bypassBillingFilter != null) + 'header.bypassBillingFilter': ['${header_bypassBillingFilter}'], if ($fields != null) 'fields': [$fields], }; @@ -696,6 +726,8 @@ class DeploymentsResource { /// - "DELETE" /// - "ABANDON" /// + /// [header_bypassBillingFilter] - null + /// /// [preview] - If set to true, updates the deployment and creates and updates /// the "shell" resources but does not actually alter or instantiate these /// resources. This allows you to preview what your deployment will look like. @@ -723,6 +755,7 @@ class DeploymentsResource { core.String deployment, { core.String? createPolicy, core.String? deletePolicy, + core.bool? header_bypassBillingFilter, core.bool? preview, core.String? $fields, }) async { @@ -730,6 +763,8 @@ class DeploymentsResource { final queryParams_ = >{ if (createPolicy != null) 'createPolicy': [createPolicy], if (deletePolicy != null) 'deletePolicy': [deletePolicy], + if (header_bypassBillingFilter != null) + 'header.bypassBillingFilter': ['${header_bypassBillingFilter}'], if (preview != null) 'preview': ['${preview}'], if ($fields != null) 'fields': [$fields], }; @@ -768,6 +803,8 @@ class ManifestsResource { /// [manifest] - The name of the manifest for this request. /// Value must have pattern `\[a-z\](?:\[-a-z0-9\]{0,61}\[a-z0-9\])?`. /// + /// [header_bypassBillingFilter] - null + /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// @@ -782,9 +819,12 @@ class ManifestsResource { core.String project, core.String deployment, core.String manifest, { + core.bool? header_bypassBillingFilter, core.String? $fields, }) async { final queryParams_ = >{ + if (header_bypassBillingFilter != null) + 'header.bypassBillingFilter': ['${header_bypassBillingFilter}'], if ($fields != null) 'fields': [$fields], }; @@ -924,6 +964,8 @@ class OperationsResource { /// /// [operation] - The name of the operation for this request. /// + /// [header_bypassBillingFilter] - null + /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// @@ -937,9 +979,12 @@ class OperationsResource { async.Future get( core.String project, core.String operation, { + core.bool? header_bypassBillingFilter, core.String? $fields, }) async { final queryParams_ = >{ + if (header_bypassBillingFilter != null) + 'header.bypassBillingFilter': ['${header_bypassBillingFilter}'], if ($fields != null) 'fields': [$fields], }; @@ -1074,6 +1119,8 @@ class ResourcesResource { /// /// [resource] - The name of the resource for this request. /// + /// [header_bypassBillingFilter] - null + /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// @@ -1088,9 +1135,12 @@ class ResourcesResource { core.String project, core.String deployment, core.String resource, { + core.bool? header_bypassBillingFilter, core.String? $fields, }) async { final queryParams_ = >{ + if (header_bypassBillingFilter != null) + 'header.bypassBillingFilter': ['${header_bypassBillingFilter}'], if ($fields != null) 'fields': [$fields], }; @@ -1557,6 +1607,33 @@ class ConfigFile { }; } +/// Describes additional debugging info. +class DebugInfo { + /// Additional debugging information provided by the server. + core.String? detail; + + /// The stack trace entries indicating where the error occurred. + core.List? stackEntries; + + DebugInfo({ + this.detail, + this.stackEntries, + }); + + DebugInfo.fromJson(core.Map json_) + : this( + detail: json_['detail'] as core.String?, + stackEntries: (json_['stackEntries'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (detail != null) 'detail': detail!, + if (stackEntries != null) 'stackEntries': stackEntries!, + }; +} + class Deployment { /// An optional user-provided description of the deployment. core.String? description; @@ -1850,6 +1927,18 @@ class DeploymentsStopRequest { }; } +/// Describes the cause of the error with structured details. +/// +/// Example of an error when contacting the "pubsub.googleapis.com" API when it +/// is not enabled: { "reason": "API_DISABLED" "domain": "googleapis.com" +/// "metadata": { "resource": "projects/123", "service": "pubsub.googleapis.com" +/// } } This response indicates that the pubsub.googleapis.com API is not +/// enabled. Example of an error that is returned when attempting to create a +/// Spanner instance in a region that is out of stock: { "reason": "STOCKOUT" +/// "domain": "spanner.googleapis.com", "metadata": { "availableRegions": +/// "us-central1,us-east2" } } +typedef ErrorInfo = $ErrorInfo; + /// Represents a textual expression in the Common Expression Language (CEL) /// syntax. /// @@ -1893,11 +1982,13 @@ class GlobalSetPolicyRequest { /// in general a valid policy but certain services (like Projects) might /// reject them. Policy? policy; + core.String? updateMask; GlobalSetPolicyRequest({ this.bindings, this.etag, this.policy, + this.updateMask, }); GlobalSetPolicyRequest.fromJson(core.Map json_) @@ -1911,15 +2002,46 @@ class GlobalSetPolicyRequest { ? Policy.fromJson( json_['policy'] as core.Map) : null, + updateMask: json_['updateMask'] as core.String?, ); core.Map toJson() => { if (bindings != null) 'bindings': bindings!, if (etag != null) 'etag': etag!, if (policy != null) 'policy': policy!, + if (updateMask != null) 'updateMask': updateMask!, }; } +/// Provides links to documentation or for performing an out of band action. +/// +/// For example, if a quota check failed with an error indicating the calling +/// project hasn't enabled the accessed service, this can contain a URL pointing +/// directly to the right place in the developer console to flip the bit. +class Help { + /// URL(s) pointing to additional information on handling the current error. + core.List? links; + + Help({ + this.links, + }); + + Help.fromJson(core.Map json_) + : this( + links: (json_['links'] as core.List?) + ?.map((value) => HelpLink.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (links != null) 'links': links!, + }; +} + +/// Describes a URL link. +typedef HelpLink = $Link; + class ImportFile { /// The contents of the file. core.String? content; @@ -1972,6 +2094,10 @@ class InstancesBulkInsertOperationMetadata { }; } +/// Provides a localized error message that is safe to return to the user which +/// can be attached to an RPC error. +typedef LocalizedMessage = $LocalizedMessage; + class Manifest { /// The YAML configuration for this manifest. /// @@ -2102,11 +2228,70 @@ class ManifestsListResponse { }; } +class OperationErrorErrorsErrorDetails { + ErrorInfo? errorInfo; + Help? help; + LocalizedMessage? localizedMessage; + QuotaExceededInfo? quotaInfo; + + OperationErrorErrorsErrorDetails({ + this.errorInfo, + this.help, + this.localizedMessage, + this.quotaInfo, + }); + + OperationErrorErrorsErrorDetails.fromJson(core.Map json_) + : this( + errorInfo: json_.containsKey('errorInfo') + ? ErrorInfo.fromJson( + json_['errorInfo'] as core.Map) + : null, + help: json_.containsKey('help') + ? Help.fromJson( + json_['help'] as core.Map) + : null, + localizedMessage: json_.containsKey('localizedMessage') + ? LocalizedMessage.fromJson(json_['localizedMessage'] + as core.Map) + : null, + quotaInfo: json_.containsKey('quotaInfo') + ? QuotaExceededInfo.fromJson( + json_['quotaInfo'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (errorInfo != null) 'errorInfo': errorInfo!, + if (help != null) 'help': help!, + if (localizedMessage != null) 'localizedMessage': localizedMessage!, + if (quotaInfo != null) 'quotaInfo': quotaInfo!, + }; +} + class OperationErrorErrors { + /// Optional error details WARNING: DO NOT MAKE VISIBLE This is for internal + /// use-only (like componentization) (thus the visibility "none") and in case + /// of public exposure it is strongly recommended to follow pattern of: + /// https://aip.dev/193 and expose as details field. + /// + /// Output only. + core.List? arguments; + /// The error type identifier for this error. /// /// Output only. core.String? code; + DebugInfo? debugInfo; + + /// An optional list of messages that contain the error details. + /// + /// There is a set of defined message types to use for providing details.The + /// syntax depends on the error code. For example, QuotaExceededInfo will have + /// details when the error code is QUOTA_EXCEEDED. + /// + /// Output only. + core.List? errorDetails; /// Indicates the field in the request that caused the error. /// @@ -2121,20 +2306,37 @@ class OperationErrorErrors { core.String? message; OperationErrorErrors({ + this.arguments, this.code, + this.debugInfo, + this.errorDetails, this.location, this.message, }); OperationErrorErrors.fromJson(core.Map json_) : this( + arguments: (json_['arguments'] as core.List?) + ?.map((value) => value as core.String) + .toList(), code: json_['code'] as core.String?, + debugInfo: json_.containsKey('debugInfo') + ? DebugInfo.fromJson( + json_['debugInfo'] as core.Map) + : null, + errorDetails: (json_['errorDetails'] as core.List?) + ?.map((value) => OperationErrorErrorsErrorDetails.fromJson( + value as core.Map)) + .toList(), location: json_['location'] as core.String?, message: json_['message'] as core.String?, ); core.Map toJson() => { + if (arguments != null) 'arguments': arguments!, if (code != null) 'code': code!, + if (debugInfo != null) 'debugInfo': debugInfo!, + if (errorDetails != null) 'errorDetails': errorDetails!, if (location != null) 'location': location!, if (message != null) 'message': message!, }; @@ -2243,6 +2445,14 @@ class OperationWarnings { /// overridden. Deprecated unused field. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is /// in use. + /// - "NETWORK_ENDPOINT_NOT_DETACHED" : Warning that network endpoint was not + /// detached. + /// - "PAGE_MISSING_RESULTS" : Current page contains less results than + /// requested but a next page token exists. + /// - "SSL_POLICY_ENABLED_FEATURES_NOT_FETCHED" : Warning that SSL policy + /// resource in the response does not contain information about the list of + /// enabled features. + /// - "RESOURCE_NOT_FOUND_WARNING" : Warning that a resource is not found. /// - "MISSING_TYPE_DEPENDENCY" : A resource depends on a missing type /// - "EXTERNAL_API_WARNING" : Warning that is present in an external api call /// - "SCHEMA_VALIDATION_IGNORED" : When a resource schema validation is @@ -2266,6 +2476,19 @@ class OperationWarnings { /// - "LIST_OVERHEAD_QUOTA_EXCEED" : Resource can't be retrieved due to list /// overhead quota exceed which captures the amount of resources filtered out /// by user-defined list filter. + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). + /// - "RESOURCE_USES_GLOBAL_DNS" : Indicates that a VM is using global DNS. + /// Can also be used to indicate that a resource has attributes that could + /// result in the creation of a VM that uses global DNS. + /// - "RATE_LIMIT_EXCEEDED" : Resource can't be retrieved due to api quota + /// exceeded. + /// - "RESERVED_ENTRY_135" : Reserved entries for quickly adding new warnings + /// without breaking dependent clients. + /// - "RESERVED_ENTRY_136" + /// - "RESERVED_ENTRY_139" + /// - "RESERVED_ENTRY_141" + /// - "RESERVED_ENTRY_142" core.String? code; /// Metadata about this warning in key: value format. @@ -2422,6 +2645,11 @@ class Operation { /// Output only. core.String? selfLink; + /// Server-defined URL for this resource with the resource id. + /// + /// Output only. + core.String? selfLinkWithId; + /// If the operation is for projects.setCommonInstanceMetadata, this field /// will contain information on all underlying zonal actions and their state. /// @@ -2459,8 +2687,8 @@ class Operation { /// The URL of the resource that the operation modifies. /// - /// For operations related to creating a snapshot, this points to the - /// persistent disk that the snapshot was created from. + /// For operations related to creating a snapshot, this points to the disk + /// that the snapshot was created from. /// /// Output only. core.String? targetLink; @@ -2502,6 +2730,7 @@ class Operation { this.progress, this.region, this.selfLink, + this.selfLinkWithId, this.setCommonInstanceMetadataOperationMetadata, this.startTime, this.status, @@ -2540,6 +2769,7 @@ class Operation { progress: json_['progress'] as core.int?, region: json_['region'] as core.String?, selfLink: json_['selfLink'] as core.String?, + selfLinkWithId: json_['selfLinkWithId'] as core.String?, setCommonInstanceMetadataOperationMetadata: json_.containsKey('setCommonInstanceMetadataOperationMetadata') ? SetCommonInstanceMetadataOperationMetadata.fromJson( @@ -2580,6 +2810,7 @@ class Operation { if (progress != null) 'progress': progress!, if (region != null) 'region': region!, if (selfLink != null) 'selfLink': selfLink!, + if (selfLinkWithId != null) 'selfLinkWithId': selfLinkWithId!, if (setCommonInstanceMetadataOperationMetadata != null) 'setCommonInstanceMetadataOperationMetadata': setCommonInstanceMetadataOperationMetadata!, @@ -2742,6 +2973,71 @@ class Policy { }; } +/// Additional details for quota exceeded error for resource quota. +class QuotaExceededInfo { + /// The map holding related quota dimensions. + core.Map? dimensions; + + /// Future quota limit being rolled out. + /// + /// The limit's unit depends on the quota type or metric. + core.double? futureLimit; + + /// Current effective quota limit. + /// + /// The limit's unit depends on the quota type or metric. + core.double? limit; + + /// The name of the quota limit. + core.String? limitName; + + /// The Compute Engine quota metric name. + core.String? metricName; + + /// Rollout status of the future quota limit. + /// Possible string values are: + /// - "ROLLOUT_STATUS_UNSPECIFIED" : ROLLOUT_STATUS_UNSPECIFIED - Rollout + /// status is not specified. The default value. + /// - "IN_PROGRESS" : IN_PROGRESS - A rollout is in process which will change + /// the limit value to future limit. + core.String? rolloutStatus; + + QuotaExceededInfo({ + this.dimensions, + this.futureLimit, + this.limit, + this.limitName, + this.metricName, + this.rolloutStatus, + }); + + QuotaExceededInfo.fromJson(core.Map json_) + : this( + dimensions: + (json_['dimensions'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + futureLimit: (json_['futureLimit'] as core.num?)?.toDouble(), + limit: (json_['limit'] as core.num?)?.toDouble(), + limitName: json_['limitName'] as core.String?, + metricName: json_['metricName'] as core.String?, + rolloutStatus: json_['rolloutStatus'] as core.String?, + ); + + core.Map toJson() => { + if (dimensions != null) 'dimensions': dimensions!, + if (futureLimit != null) 'futureLimit': futureLimit!, + if (limit != null) 'limit': limit!, + if (limitName != null) 'limitName': limitName!, + if (metricName != null) 'metricName': metricName!, + if (rolloutStatus != null) 'rolloutStatus': rolloutStatus!, + }; +} + class ResourceWarningsData { /// A key that provides more detail on the warning being returned. /// @@ -2818,6 +3114,14 @@ class ResourceWarnings { /// overridden. Deprecated unused field. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is /// in use. + /// - "NETWORK_ENDPOINT_NOT_DETACHED" : Warning that network endpoint was not + /// detached. + /// - "PAGE_MISSING_RESULTS" : Current page contains less results than + /// requested but a next page token exists. + /// - "SSL_POLICY_ENABLED_FEATURES_NOT_FETCHED" : Warning that SSL policy + /// resource in the response does not contain information about the list of + /// enabled features. + /// - "RESOURCE_NOT_FOUND_WARNING" : Warning that a resource is not found. /// - "MISSING_TYPE_DEPENDENCY" : A resource depends on a missing type /// - "EXTERNAL_API_WARNING" : Warning that is present in an external api call /// - "SCHEMA_VALIDATION_IGNORED" : When a resource schema validation is @@ -2841,6 +3145,19 @@ class ResourceWarnings { /// - "LIST_OVERHEAD_QUOTA_EXCEED" : Resource can't be retrieved due to list /// overhead quota exceed which captures the amount of resources filtered out /// by user-defined list filter. + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). + /// - "RESOURCE_USES_GLOBAL_DNS" : Indicates that a VM is using global DNS. + /// Can also be used to indicate that a resource has attributes that could + /// result in the creation of a VM that uses global DNS. + /// - "RATE_LIMIT_EXCEEDED" : Resource can't be retrieved due to api quota + /// exceeded. + /// - "RESERVED_ENTRY_135" : Reserved entries for quickly adding new warnings + /// without breaking dependent clients. + /// - "RESERVED_ENTRY_136" + /// - "RESERVED_ENTRY_139" + /// - "RESERVED_ENTRY_141" + /// - "RESERVED_ENTRY_142" core.String? code; /// Metadata about this warning in key: value format. @@ -3017,11 +3334,70 @@ class ResourceAccessControl { }; } +class ResourceUpdateErrorErrorsErrorDetails { + ErrorInfo? errorInfo; + Help? help; + LocalizedMessage? localizedMessage; + QuotaExceededInfo? quotaInfo; + + ResourceUpdateErrorErrorsErrorDetails({ + this.errorInfo, + this.help, + this.localizedMessage, + this.quotaInfo, + }); + + ResourceUpdateErrorErrorsErrorDetails.fromJson(core.Map json_) + : this( + errorInfo: json_.containsKey('errorInfo') + ? ErrorInfo.fromJson( + json_['errorInfo'] as core.Map) + : null, + help: json_.containsKey('help') + ? Help.fromJson( + json_['help'] as core.Map) + : null, + localizedMessage: json_.containsKey('localizedMessage') + ? LocalizedMessage.fromJson(json_['localizedMessage'] + as core.Map) + : null, + quotaInfo: json_.containsKey('quotaInfo') + ? QuotaExceededInfo.fromJson( + json_['quotaInfo'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (errorInfo != null) 'errorInfo': errorInfo!, + if (help != null) 'help': help!, + if (localizedMessage != null) 'localizedMessage': localizedMessage!, + if (quotaInfo != null) 'quotaInfo': quotaInfo!, + }; +} + class ResourceUpdateErrorErrors { + /// Optional error details WARNING: DO NOT MAKE VISIBLE This is for internal + /// use-only (like componentization) (thus the visibility "none") and in case + /// of public exposure it is strongly recommended to follow pattern of: + /// https://aip.dev/193 and expose as details field. + /// + /// Output only. + core.List? arguments; + /// The error type identifier for this error. /// /// Output only. core.String? code; + DebugInfo? debugInfo; + + /// An optional list of messages that contain the error details. + /// + /// There is a set of defined message types to use for providing details.The + /// syntax depends on the error code. For example, QuotaExceededInfo will have + /// details when the error code is QUOTA_EXCEEDED. + /// + /// Output only. + core.List? errorDetails; /// Indicates the field in the request that caused the error. /// @@ -3036,20 +3412,37 @@ class ResourceUpdateErrorErrors { core.String? message; ResourceUpdateErrorErrors({ + this.arguments, this.code, + this.debugInfo, + this.errorDetails, this.location, this.message, }); ResourceUpdateErrorErrors.fromJson(core.Map json_) : this( + arguments: (json_['arguments'] as core.List?) + ?.map((value) => value as core.String) + .toList(), code: json_['code'] as core.String?, + debugInfo: json_.containsKey('debugInfo') + ? DebugInfo.fromJson( + json_['debugInfo'] as core.Map) + : null, + errorDetails: (json_['errorDetails'] as core.List?) + ?.map((value) => ResourceUpdateErrorErrorsErrorDetails.fromJson( + value as core.Map)) + .toList(), location: json_['location'] as core.String?, message: json_['message'] as core.String?, ); core.Map toJson() => { + if (arguments != null) 'arguments': arguments!, if (code != null) 'code': code!, + if (debugInfo != null) 'debugInfo': debugInfo!, + if (errorDetails != null) 'errorDetails': errorDetails!, if (location != null) 'location': location!, if (message != null) 'message': message!, }; @@ -3158,6 +3551,14 @@ class ResourceUpdateWarnings { /// overridden. Deprecated unused field. /// - "RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING" : Warning that a resource is /// in use. + /// - "NETWORK_ENDPOINT_NOT_DETACHED" : Warning that network endpoint was not + /// detached. + /// - "PAGE_MISSING_RESULTS" : Current page contains less results than + /// requested but a next page token exists. + /// - "SSL_POLICY_ENABLED_FEATURES_NOT_FETCHED" : Warning that SSL policy + /// resource in the response does not contain information about the list of + /// enabled features. + /// - "RESOURCE_NOT_FOUND_WARNING" : Warning that a resource is not found. /// - "MISSING_TYPE_DEPENDENCY" : A resource depends on a missing type /// - "EXTERNAL_API_WARNING" : Warning that is present in an external api call /// - "SCHEMA_VALIDATION_IGNORED" : When a resource schema validation is @@ -3181,6 +3582,19 @@ class ResourceUpdateWarnings { /// - "LIST_OVERHEAD_QUOTA_EXCEED" : Resource can't be retrieved due to list /// overhead quota exceed which captures the amount of resources filtered out /// by user-defined list filter. + /// - "QUOTA_INFO_UNAVAILABLE" : Quota information is not available to client + /// requests (e.g: regions.list). + /// - "RESOURCE_USES_GLOBAL_DNS" : Indicates that a VM is using global DNS. + /// Can also be used to indicate that a resource has attributes that could + /// result in the creation of a VM that uses global DNS. + /// - "RATE_LIMIT_EXCEEDED" : Resource can't be retrieved due to api quota + /// exceeded. + /// - "RESERVED_ENTRY_135" : Reserved entries for quickly adding new warnings + /// without breaking dependent clients. + /// - "RESERVED_ENTRY_136" + /// - "RESERVED_ENTRY_139" + /// - "RESERVED_ENTRY_141" + /// - "RESERVED_ENTRY_142" core.String? code; /// Metadata about this warning in key: value format. diff --git a/generated/googleapis/lib/developerconnect/v1.dart b/generated/googleapis/lib/developerconnect/v1.dart index eab983350..25f764921 100644 --- a/generated/googleapis/lib/developerconnect/v1.dart +++ b/generated/googleapis/lib/developerconnect/v1.dart @@ -1095,8 +1095,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -1343,7 +1343,7 @@ class Connection { /// Output only. core.bool? reconciling; - /// A system-assigned unique identifier for a the GitRepositoryLink. + /// A system-assigned unique identifier for the Connection. /// /// Output only. core.String? uid; @@ -1922,7 +1922,7 @@ class GitRepositoryLink { /// Output only. core.bool? reconciling; - /// A system-assigned unique identifier for a the GitRepositoryLink. + /// A system-assigned unique identifier for the GitRepositoryLink. /// /// Output only. core.String? uid; diff --git a/generated/googleapis/lib/dialogflow/v2.dart b/generated/googleapis/lib/dialogflow/v2.dart index afa024f4c..bb647bec6 100644 --- a/generated/googleapis/lib/dialogflow/v2.dart +++ b/generated/googleapis/lib/dialogflow/v2.dart @@ -4727,7 +4727,7 @@ class ProjectsConversationProfilesResource { /// Creates a conversation profile in the specified project. /// - /// ConversationProfile.CreateTime and ConversationProfile.UpdateTime aren't + /// ConversationProfile.create_time and ConversationProfile.update_time aren't /// populated in the response. You can retrieve them via /// GetConversationProfile API. /// @@ -4897,7 +4897,7 @@ class ProjectsConversationProfilesResource { /// Updates the specified conversation profile. /// - /// ConversationProfile.CreateTime and ConversationProfile.UpdateTime aren't + /// ConversationProfile.create_time and ConversationProfile.update_time aren't /// populated in the response. You can retrieve them via /// GetConversationProfile API. /// @@ -10925,7 +10925,7 @@ class ProjectsLocationsConversationProfilesResource { /// Creates a conversation profile in the specified project. /// - /// ConversationProfile.CreateTime and ConversationProfile.UpdateTime aren't + /// ConversationProfile.create_time and ConversationProfile.update_time aren't /// populated in the response. You can retrieve them via /// GetConversationProfile API. /// @@ -11097,7 +11097,7 @@ class ProjectsLocationsConversationProfilesResource { /// Updates the specified conversation profile. /// - /// ConversationProfile.CreateTime and ConversationProfile.UpdateTime aren't + /// ConversationProfile.create_time and ConversationProfile.update_time aren't /// populated in the response. You can retrieve them via /// GetConversationProfile API. /// @@ -12975,8 +12975,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -13268,8 +13268,8 @@ class ProjectsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -14043,9 +14043,10 @@ class GoogleCloudDialogflowV2AnalyzeContentRequest { class GoogleCloudDialogflowV2AnalyzeContentResponse { /// Only set if a Dialogflow automated agent has responded. /// - /// Note that: AutomatedAgentReply.detect_intent_response.output_audio and - /// AutomatedAgentReply.detect_intent_response.output_audio_config are always - /// empty, use reply_audio instead. + /// Note that in AutomatedAgentReply.DetectIntentResponse, + /// Sessions.DetectIntentResponse.output_audio and + /// Sessions.DetectIntentResponse.output_audio_config are always empty, use + /// reply_audio instead. GoogleCloudDialogflowV2AutomatedAgentReply? automatedAgentReply; /// Indicates the parameters of DTMF. @@ -14277,8 +14278,8 @@ class GoogleCloudDialogflowV2AnswerFeedback { /// is: 1. For human agent assistant, customers get suggestion via /// ListSuggestions API. Together with the answers, AnswerRecord.name are /// returned to the customers. 2. The customer uses the AnswerRecord.name to -/// call the UpdateAnswerRecord method to send feedback about a specific answer -/// that they believe is wrong. +/// call the AnswerRecords.UpdateAnswerRecord method to send feedback about a +/// specific answer that they believe is wrong. class GoogleCloudDialogflowV2AnswerRecord { /// The record for human agent assistant. /// @@ -14842,7 +14843,7 @@ class GoogleCloudDialogflowV2BatchUpdateIntentsRequest { }; } -/// The request message for ConversationProfiles.ClearFeature. +/// The request message for ConversationProfiles.ClearSuggestionFeatureConfig. class GoogleCloudDialogflowV2ClearSuggestionFeatureConfigRequest { /// The participant role to remove the suggestion feature config. /// @@ -16746,8 +16747,6 @@ class GoogleCloudDialogflowV2FaqAnswer { /// Providing examples in the generator (i.e. building a few-shot generator) /// helps convey the desired format of the LLM response. -/// -/// NEXT_ID: 10 class GoogleCloudDialogflowV2FewShotExample { /// Conversation transcripts. /// @@ -17219,12 +17218,12 @@ class GoogleCloudDialogflowV2GenerateStatelessSummaryRequestMinimalConversation /// The response message for Conversations.GenerateStatelessSummary. class GoogleCloudDialogflowV2GenerateStatelessSummaryResponse { - /// Number of messages prior to and including last_conversation_message used - /// to compile the suggestion. + /// Number of messages prior to and including latest_message used to compile + /// the suggestion. /// - /// It may be smaller than the GenerateStatelessSummaryRequest.context_size - /// field in the request if there weren't that many messages in the - /// conversation. + /// It may be smaller than the + /// GenerateStatelessSummaryRequest.max_context_size field in the request if + /// there weren't that many messages in the conversation. core.int? contextSize; /// The name of the latest conversation message used as context for compiling @@ -17823,6 +17822,13 @@ class GoogleCloudDialogflowV2HumanAgentAssistantConfigSuggestionQueryConfig { GoogleCloudDialogflowV2HumanAgentAssistantConfigSuggestionQueryConfigContextFilterSettings? contextFilterSettings; + /// The number of recent messages to include in the context. + /// + /// Supported features: KNOWLEDGE_ASSIST. + /// + /// Optional. + core.int? contextSize; + /// Query from Dialogflow agent. /// /// It is used by DIALOGFLOW_ASSIST. @@ -17856,6 +17862,7 @@ class GoogleCloudDialogflowV2HumanAgentAssistantConfigSuggestionQueryConfig { GoogleCloudDialogflowV2HumanAgentAssistantConfigSuggestionQueryConfig({ this.confidenceThreshold, this.contextFilterSettings, + this.contextSize, this.dialogflowQuerySource, this.documentQuerySource, this.knowledgeBaseQuerySource, @@ -17873,6 +17880,7 @@ class GoogleCloudDialogflowV2HumanAgentAssistantConfigSuggestionQueryConfig { .fromJson(json_['contextFilterSettings'] as core.Map) : null, + contextSize: json_['contextSize'] as core.int?, dialogflowQuerySource: json_.containsKey('dialogflowQuerySource') ? GoogleCloudDialogflowV2HumanAgentAssistantConfigSuggestionQueryConfigDialogflowQuerySource .fromJson(json_['dialogflowQuerySource'] @@ -17902,6 +17910,7 @@ class GoogleCloudDialogflowV2HumanAgentAssistantConfigSuggestionQueryConfig { 'confidenceThreshold': confidenceThreshold!, if (contextFilterSettings != null) 'contextFilterSettings': contextFilterSettings!, + if (contextSize != null) 'contextSize': contextSize!, if (dialogflowQuerySource != null) 'dialogflowQuerySource': dialogflowQuerySource!, if (documentQuerySource != null) @@ -18623,6 +18632,9 @@ class GoogleCloudDialogflowV2InputAudioConfig { ) core.List? phraseHints; + /// A collection of phrase set resources to use for speech adaptation. + core.List? phraseSets; + /// Sample rate (in Hertz) of the audio content sent in the query. /// /// Refer to @@ -18661,6 +18673,7 @@ class GoogleCloudDialogflowV2InputAudioConfig { this.modelVariant, this.optOutConformerModelMigration, this.phraseHints, + this.phraseSets, this.sampleRateHertz, this.singleUtterance, this.speechContexts, @@ -18682,6 +18695,9 @@ class GoogleCloudDialogflowV2InputAudioConfig { phraseHints: (json_['phraseHints'] as core.List?) ?.map((value) => value as core.String) .toList(), + phraseSets: (json_['phraseSets'] as core.List?) + ?.map((value) => value as core.String) + .toList(), sampleRateHertz: json_['sampleRateHertz'] as core.int?, singleUtterance: json_['singleUtterance'] as core.bool?, speechContexts: (json_['speechContexts'] as core.List?) @@ -18703,6 +18719,7 @@ class GoogleCloudDialogflowV2InputAudioConfig { if (optOutConformerModelMigration != null) 'optOutConformerModelMigration': optOutConformerModelMigration!, if (phraseHints != null) 'phraseHints': phraseHints!, + if (phraseSets != null) 'phraseSets': phraseSets!, if (sampleRateHertz != null) 'sampleRateHertz': sampleRateHertz!, if (singleUtterance != null) 'singleUtterance': singleUtterance!, if (speechContexts != null) 'speechContexts': speechContexts!, @@ -20702,6 +20719,12 @@ class GoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeSourc /// Snippet Source for a Generative Prediction. class GoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeSourceSnippet { + /// Metadata of the document. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Map? metadata; + /// Text taken from that URI. core.String? text; @@ -20712,6 +20735,7 @@ class GoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeSourc core.String? uri; GoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeSourceSnippet({ + this.metadata, this.text, this.title, this.uri, @@ -20720,12 +20744,16 @@ class GoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeSourc GoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeSourceSnippet.fromJson( core.Map json_) : this( + metadata: json_.containsKey('metadata') + ? json_['metadata'] as core.Map + : null, text: json_['text'] as core.String?, title: json_['title'] as core.String?, uri: json_['uri'] as core.String?, ); core.Map toJson() => { + if (metadata != null) 'metadata': metadata!, if (text != null) 'text': text!, if (title != null) 'title': title!, if (uri != null) 'uri': uri!, @@ -22344,6 +22372,12 @@ class GoogleCloudDialogflowV2SearchKnowledgeAnswer { /// The sources of the answers. class GoogleCloudDialogflowV2SearchKnowledgeAnswerAnswerSource { + /// Metadata associated with the article. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Map? metadata; + /// The relevant snippet of the article. core.String? snippet; @@ -22354,6 +22388,7 @@ class GoogleCloudDialogflowV2SearchKnowledgeAnswerAnswerSource { core.String? uri; GoogleCloudDialogflowV2SearchKnowledgeAnswerAnswerSource({ + this.metadata, this.snippet, this.title, this.uri, @@ -22362,12 +22397,16 @@ class GoogleCloudDialogflowV2SearchKnowledgeAnswerAnswerSource { GoogleCloudDialogflowV2SearchKnowledgeAnswerAnswerSource.fromJson( core.Map json_) : this( + metadata: json_.containsKey('metadata') + ? json_['metadata'] as core.Map + : null, snippet: json_['snippet'] as core.String?, title: json_['title'] as core.String?, uri: json_['uri'] as core.String?, ); core.Map toJson() => { + if (metadata != null) 'metadata': metadata!, if (snippet != null) 'snippet': snippet!, if (title != null) 'title': title!, if (uri != null) 'uri': uri!, @@ -22391,6 +22430,26 @@ class GoogleCloudDialogflowV2SearchKnowledgeRequest { /// Required. core.String? conversationProfile; + /// Information about the end-user to improve the relevance and accuracy of + /// generative answers. + /// + /// This will be interpreted and used by a language model, so, for good + /// results, the data should be self-descriptive, and in a simple structure. + /// Example: ```json { "subscription plan": "Business Premium Plus", "devices + /// owned": [ {"model": "Google Pixel 7"}, {"model": "Google Pixel Tablet"} ] + /// } ``` + /// + /// Optional. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Map? endUserMetadata; + + /// Whether to search the query exactly without query rewrite. + /// + /// Optional. + core.bool? exactSearch; + /// The name of the latest conversation message when the request is triggered. /// /// Format: `projects//locations//conversations//messages/`. @@ -22409,6 +22468,21 @@ class GoogleCloudDialogflowV2SearchKnowledgeRequest { /// Required. GoogleCloudDialogflowV2TextInput? query; + /// The source of the query in the request. + /// + /// Optional. + /// Possible string values are: + /// - "QUERY_SOURCE_UNSPECIFIED" : Unknown query source. + /// - "AGENT_QUERY" : The query is from agents. + /// - "SUGGESTED_QUERY" : The query is a suggested query from + /// Participants.SuggestKnowledgeAssist. + core.String? querySource; + + /// Configuration specific to search queries with data stores. + /// + /// Optional. + GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig? searchConfig; + /// The ID of the search session. /// /// The session_id can be combined with Dialogflow V3 Agent ID retrieved from @@ -22424,9 +22498,13 @@ class GoogleCloudDialogflowV2SearchKnowledgeRequest { GoogleCloudDialogflowV2SearchKnowledgeRequest({ this.conversation, this.conversationProfile, + this.endUserMetadata, + this.exactSearch, this.latestMessage, this.parent, this.query, + this.querySource, + this.searchConfig, this.sessionId, }); @@ -22434,12 +22512,22 @@ class GoogleCloudDialogflowV2SearchKnowledgeRequest { : this( conversation: json_['conversation'] as core.String?, conversationProfile: json_['conversationProfile'] as core.String?, + endUserMetadata: json_.containsKey('endUserMetadata') + ? json_['endUserMetadata'] as core.Map + : null, + exactSearch: json_['exactSearch'] as core.bool?, latestMessage: json_['latestMessage'] as core.String?, parent: json_['parent'] as core.String?, query: json_.containsKey('query') ? GoogleCloudDialogflowV2TextInput.fromJson( json_['query'] as core.Map) : null, + querySource: json_['querySource'] as core.String?, + searchConfig: json_.containsKey('searchConfig') + ? GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig + .fromJson(json_['searchConfig'] + as core.Map) + : null, sessionId: json_['sessionId'] as core.String?, ); @@ -22447,13 +22535,326 @@ class GoogleCloudDialogflowV2SearchKnowledgeRequest { if (conversation != null) 'conversation': conversation!, if (conversationProfile != null) 'conversationProfile': conversationProfile!, + if (endUserMetadata != null) 'endUserMetadata': endUserMetadata!, + if (exactSearch != null) 'exactSearch': exactSearch!, if (latestMessage != null) 'latestMessage': latestMessage!, if (parent != null) 'parent': parent!, if (query != null) 'query': query!, + if (querySource != null) 'querySource': querySource!, + if (searchConfig != null) 'searchConfig': searchConfig!, if (sessionId != null) 'sessionId': sessionId!, }; } +/// Configuration specific to search queries with data stores. +class GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig { + /// Boost specifications for data stores. + /// + /// Optional. + core.List< + GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs>? + boostSpecs; + + /// Filter specification for data store queries. + /// + /// Optional. + core.List< + GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs>? + filterSpecs; + + GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig({ + this.boostSpecs, + this.filterSpecs, + }); + + GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig.fromJson( + core.Map json_) + : this( + boostSpecs: (json_['boostSpecs'] as core.List?) + ?.map((value) => + GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs + .fromJson(value as core.Map)) + .toList(), + filterSpecs: (json_['filterSpecs'] as core.List?) + ?.map((value) => + GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs + .fromJson(value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (boostSpecs != null) 'boostSpecs': boostSpecs!, + if (filterSpecs != null) 'filterSpecs': filterSpecs!, + }; +} + +/// Boost specifications for data stores. +class GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs { + /// Data Stores where the boosting configuration is applied. + /// + /// The full names of the referenced data stores. Formats: + /// `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}` + /// `projects/{project}/locations/{location}/dataStores/{data_store}` + /// + /// Optional. + core.List? dataStores; + + /// A list of boosting specifications. + /// + /// Optional. + core.List< + GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec>? + spec; + + GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs({ + this.dataStores, + this.spec, + }); + + GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs.fromJson( + core.Map json_) + : this( + dataStores: (json_['dataStores'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + spec: (json_['spec'] as core.List?) + ?.map((value) => + GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec + .fromJson(value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (dataStores != null) 'dataStores': dataStores!, + if (spec != null) 'spec': spec!, + }; +} + +/// Boost specification to boost certain documents. +/// +/// A copy of google.cloud.discoveryengine.v1main.BoostSpec, field documentation +/// is available at +/// https://cloud.google.com/generative-ai-app-builder/docs/reference/rest/v1alpha/BoostSpec +class GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec { + /// Condition boost specifications. + /// + /// If a document matches multiple conditions in the specifictions, boost + /// scores from these specifications are all applied and combined in a + /// non-linear way. Maximum number of specifications is 20. + /// + /// Optional. + core.List< + GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec>? + conditionBoostSpecs; + + GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec({ + this.conditionBoostSpecs, + }); + + GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec.fromJson( + core.Map json_) + : this( + conditionBoostSpecs: (json_['conditionBoostSpecs'] as core.List?) + ?.map((value) => + GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec + .fromJson(value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (conditionBoostSpecs != null) + 'conditionBoostSpecs': conditionBoostSpecs!, + }; +} + +/// Boost applies to documents which match a condition. +class GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec { + /// Strength of the condition boost, which should be in \[-1, 1\]. + /// + /// Negative boost means demotion. Default is 0.0. Setting to 1.0 gives the + /// document a big promotion. However, it does not necessarily mean that the + /// boosted document will be the top result at all times, nor that other + /// documents will be excluded. Results could still be shown even when none of + /// them matches the condition. And results that are significantly more + /// relevant to the search query can still trump your heavily favored but + /// irrelevant documents. Setting to -1.0 gives the document a big demotion. + /// However, results that are deeply relevant might still be shown. The + /// document will have an upstream battle to get a fairly high ranking, but it + /// is not blocked out completely. Setting to 0.0 means no boost applied. The + /// boosting condition is ignored. + /// + /// Optional. + core.double? boost; + + /// Complex specification for custom ranking based on customer defined + /// attribute value. + /// + /// Optional. + GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec? + boostControlSpec; + + /// An expression which specifies a boost condition. + /// + /// The syntax and supported fields are the same as a filter expression. + /// Examples: * To boost documents with document ID "doc_1" or "doc_2", and + /// color "Red" or "Blue": * (id: ANY("doc_1", "doc_2")) AND (color: + /// ANY("Red","Blue")) + /// + /// Optional. + core.String? condition; + + GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec({ + this.boost, + this.boostControlSpec, + this.condition, + }); + + GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec.fromJson( + core.Map json_) + : this( + boost: (json_['boost'] as core.num?)?.toDouble(), + boostControlSpec: json_.containsKey('boostControlSpec') + ? GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec + .fromJson(json_['boostControlSpec'] + as core.Map) + : null, + condition: json_['condition'] as core.String?, + ); + + core.Map toJson() => { + if (boost != null) 'boost': boost!, + if (boostControlSpec != null) 'boostControlSpec': boostControlSpec!, + if (condition != null) 'condition': condition!, + }; +} + +/// Specification for custom ranking based on customer specified attribute +/// value. +/// +/// It provides more controls for customized ranking than the simple (condition, +/// boost) combination above. +class GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec { + /// The attribute type to be used to determine the boost amount. + /// + /// The attribute value can be derived from the field value of the specified + /// field_name. In the case of numerical it is straightforward i.e. + /// attribute_value = numerical_field_value. In the case of freshness however, + /// attribute_value = (time.now() - datetime_field_value). + /// + /// Optional. + /// Possible string values are: + /// - "ATTRIBUTE_TYPE_UNSPECIFIED" : Unspecified AttributeType. + /// - "NUMERICAL" : The value of the numerical field will be used to + /// dynamically update the boost amount. In this case, the attribute_value + /// (the x value) of the control point will be the actual value of the + /// numerical field for which the boost_amount is specified. + /// - "FRESHNESS" : For the freshness use case the attribute value will be the + /// duration between the current time and the date in the datetime field + /// specified. The value must be formatted as an XSD `dayTimeDuration` value + /// (a restricted subset of an ISO 8601 duration value). The pattern for this + /// is: `nDnM]`. E.g. `5D`, `3DT12H30M`, `T24H`. + core.String? attributeType; + + /// The control points used to define the curve. + /// + /// The monotonic function (defined through the interpolation_type above) + /// passes through the control points listed here. + /// + /// Optional. + core.List< + GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint>? + controlPoints; + + /// The name of the field whose value will be used to determine the boost + /// amount. + /// + /// Optional. + core.String? fieldName; + + /// The interpolation type to be applied to connect the control points listed + /// below. + /// + /// Optional. + /// Possible string values are: + /// - "INTERPOLATION_TYPE_UNSPECIFIED" : Interpolation type is unspecified. In + /// this case, it defaults to Linear. + /// - "LINEAR" : Piecewise linear interpolation will be applied. + core.String? interpolationType; + + GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec({ + this.attributeType, + this.controlPoints, + this.fieldName, + this.interpolationType, + }); + + GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec.fromJson( + core.Map json_) + : this( + attributeType: json_['attributeType'] as core.String?, + controlPoints: (json_['controlPoints'] as core.List?) + ?.map((value) => + GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint + .fromJson(value as core.Map)) + .toList(), + fieldName: json_['fieldName'] as core.String?, + interpolationType: json_['interpolationType'] as core.String?, + ); + + core.Map toJson() => { + if (attributeType != null) 'attributeType': attributeType!, + if (controlPoints != null) 'controlPoints': controlPoints!, + if (fieldName != null) 'fieldName': fieldName!, + if (interpolationType != null) 'interpolationType': interpolationType!, + }; +} + +/// The control points used to define the curve. +/// +/// The curve defined through these control points can only be monotonically +/// increasing or decreasing(constant values are acceptable). +typedef GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint + = $BoostSpecConditionBoostSpecBoostControlSpecControlPoint; + +/// Filter specification for data store queries. +class GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs { + /// The data store where the filter configuration is applied. + /// + /// Full resource name of data store, such as + /// projects/{project}/locations/{location}/collections/{collectionId}/ + /// dataStores/{dataStoreId}. + /// + /// Optional. + core.List? dataStores; + + /// The filter expression to be applied. + /// + /// Expression syntax is documented at + /// https://cloud.google.com/generative-ai-app-builder/docs/filter-search-metadata#filter-expression-syntax + /// + /// Optional. + core.String? filter; + + GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs({ + this.dataStores, + this.filter, + }); + + GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs.fromJson( + core.Map json_) + : this( + dataStores: (json_['dataStores'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + filter: json_['filter'] as core.String?, + ); + + core.Map toJson() => { + if (dataStores != null) 'dataStores': dataStores!, + if (filter != null) 'filter': filter!, + }; +} + /// The response message for Conversations.SearchKnowledge. class GoogleCloudDialogflowV2SearchKnowledgeResponse { /// Most relevant snippets extracted from articles in the given knowledge @@ -22490,7 +22891,7 @@ class GoogleCloudDialogflowV2SearchKnowledgeResponse { /// See: /// https://cloud.google.com/natural-language/docs/basics#interpreting_sentiment_analysis_values /// for how to interpret the result. -typedef GoogleCloudDialogflowV2Sentiment = $Shared11; +typedef GoogleCloudDialogflowV2Sentiment = $Shared12; /// Configures the types of sentiment analysis to perform. class GoogleCloudDialogflowV2SentimentAnalysisRequestConfig { @@ -22519,10 +22920,9 @@ class GoogleCloudDialogflowV2SentimentAnalysisRequestConfig { /// /// Sentiment analysis inspects user input and identifies the prevailing /// subjective opinion, especially to determine a user's attitude as positive, -/// negative, or neutral. For Participants.DetectIntent, it needs to be -/// configured in DetectIntentRequest.query_params. For -/// Participants.StreamingDetectIntent, it needs to be configured in -/// StreamingDetectIntentRequest.query_params. And for +/// negative, or neutral. For DetectIntent, it needs to be configured in +/// DetectIntentRequest.query_params. For StreamingDetectIntent, it needs to be +/// configured in StreamingDetectIntentRequest.query_params. And for /// Participants.AnalyzeContent and Participants.StreamingAnalyzeContent, it /// needs to be configured in ConversationProfile.human_agent_assistant_config class GoogleCloudDialogflowV2SentimentAnalysisResult { @@ -22618,7 +23018,7 @@ class GoogleCloudDialogflowV2SessionEntityType { }; } -/// The request message for ConversationProfiles.SetSuggestionFeature. +/// The request message for ConversationProfiles.SetSuggestionFeatureConfig. class GoogleCloudDialogflowV2SetSuggestionFeatureConfigRequest { /// The participant role to add or update the suggestion feature config. /// @@ -22914,6 +23314,9 @@ class GoogleCloudDialogflowV2SpeechToTextConfig { /// for model selection. core.String? model; + /// List of names of Cloud Speech phrase sets that are used for transcription. + core.List? phraseSets; + /// Sample rate (in Hertz) of the audio content sent in the query. /// /// Refer to @@ -22958,6 +23361,7 @@ class GoogleCloudDialogflowV2SpeechToTextConfig { this.enableWordInfo, this.languageCode, this.model, + this.phraseSets, this.sampleRateHertz, this.speechModelVariant, this.useTimeoutBasedEndpointing, @@ -22969,6 +23373,9 @@ class GoogleCloudDialogflowV2SpeechToTextConfig { enableWordInfo: json_['enableWordInfo'] as core.bool?, languageCode: json_['languageCode'] as core.String?, model: json_['model'] as core.String?, + phraseSets: (json_['phraseSets'] as core.List?) + ?.map((value) => value as core.String) + .toList(), sampleRateHertz: json_['sampleRateHertz'] as core.int?, speechModelVariant: json_['speechModelVariant'] as core.String?, useTimeoutBasedEndpointing: @@ -22980,6 +23387,7 @@ class GoogleCloudDialogflowV2SpeechToTextConfig { if (enableWordInfo != null) 'enableWordInfo': enableWordInfo!, if (languageCode != null) 'languageCode': languageCode!, if (model != null) 'model': model!, + if (phraseSets != null) 'phraseSets': phraseSets!, if (sampleRateHertz != null) 'sampleRateHertz': sampleRateHertz!, if (speechModelVariant != null) 'speechModelVariant': speechModelVariant!, @@ -23127,11 +23535,12 @@ class GoogleCloudDialogflowV2SuggestConversationSummaryRequest { /// The response message for Conversations.SuggestConversationSummary. class GoogleCloudDialogflowV2SuggestConversationSummaryResponse { - /// Number of messages prior to and including last_conversation_message used - /// to compile the suggestion. + /// Number of messages prior to and including latest_message used to compile + /// the suggestion. /// - /// It may be smaller than the SuggestSummaryRequest.context_size field in the - /// request if there weren't that many messages in the conversation. + /// It may be smaller than the SuggestConversationSummaryRequest.context_size + /// field in the request if there weren't that many messages in the + /// conversation. core.int? contextSize; /// The name of the latest conversation message used as context for compiling diff --git a/generated/googleapis/lib/dialogflow/v3.dart b/generated/googleapis/lib/dialogflow/v3.dart index 388af9b12..43922431d 100644 --- a/generated/googleapis/lib/dialogflow/v3.dart +++ b/generated/googleapis/lib/dialogflow/v3.dart @@ -6278,8 +6278,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -6644,8 +6644,8 @@ class ProjectsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -7619,7 +7619,7 @@ class GoogleCloudDialogflowCxV3AudioInput { /// speech detection and may inform the client that an utterance has been /// detected. Note that no-speech event is not expected in this phase. The /// client provides this configuration in terms of the durations of those two -/// phases. The durations are measured in terms of the audio length from the the +/// phases. The durations are measured in terms of the audio length from the /// start of the input audio. No-speech event is a response with /// END_OF_UTTERANCE without any transcript following up. class GoogleCloudDialogflowCxV3BargeInConfig { @@ -7883,39 +7883,8 @@ class GoogleCloudDialogflowCxV3BoostSpecConditionBoostSpecBoostControlSpec { /// /// The curve defined through these control points can only be monotonically /// increasing or decreasing(constant values are acceptable). -class GoogleCloudDialogflowCxV3BoostSpecConditionBoostSpecBoostControlSpecControlPoint { - /// Can be one of: 1. - /// - /// The numerical field value. 2. The duration spec for freshness: The value - /// must be formatted as an XSD `dayTimeDuration` value (a restricted subset - /// of an ISO 8601 duration value). The pattern for this is: `nDnM]`. - /// - /// Optional. - core.String? attributeValue; - - /// The value between -1 to 1 by which to boost the score if the - /// attribute_value evaluates to the value specified above. - /// - /// Optional. - core.double? boostAmount; - - GoogleCloudDialogflowCxV3BoostSpecConditionBoostSpecBoostControlSpecControlPoint({ - this.attributeValue, - this.boostAmount, - }); - - GoogleCloudDialogflowCxV3BoostSpecConditionBoostSpecBoostControlSpecControlPoint.fromJson( - core.Map json_) - : this( - attributeValue: json_['attributeValue'] as core.String?, - boostAmount: (json_['boostAmount'] as core.num?)?.toDouble(), - ); - - core.Map toJson() => { - if (attributeValue != null) 'attributeValue': attributeValue!, - if (boostAmount != null) 'boostAmount': boostAmount!, - }; -} +typedef GoogleCloudDialogflowCxV3BoostSpecConditionBoostSpecBoostControlSpecControlPoint + = $BoostSpecConditionBoostSpecBoostControlSpecControlPoint; /// Boost specifications for data stores. class GoogleCloudDialogflowCxV3BoostSpecs { @@ -11246,6 +11215,9 @@ class GoogleCloudDialogflowCxV3Generator { /// Required. core.String? displayName; + /// Parameters passed to the LLM to configure its behavior. + GoogleCloudDialogflowCxV3GeneratorModelParameter? modelParameter; + /// The unique identifier of the generator. /// /// Must be set for the Generators.UpdateGenerator method. @@ -11265,6 +11237,7 @@ class GoogleCloudDialogflowCxV3Generator { GoogleCloudDialogflowCxV3Generator({ this.displayName, + this.modelParameter, this.name, this.placeholders, this.promptText, @@ -11273,6 +11246,11 @@ class GoogleCloudDialogflowCxV3Generator { GoogleCloudDialogflowCxV3Generator.fromJson(core.Map json_) : this( displayName: json_['displayName'] as core.String?, + modelParameter: json_.containsKey('modelParameter') + ? GoogleCloudDialogflowCxV3GeneratorModelParameter.fromJson( + json_['modelParameter'] + as core.Map) + : null, name: json_['name'] as core.String?, placeholders: (json_['placeholders'] as core.List?) ?.map((value) => @@ -11287,12 +11265,65 @@ class GoogleCloudDialogflowCxV3Generator { core.Map toJson() => { if (displayName != null) 'displayName': displayName!, + if (modelParameter != null) 'modelParameter': modelParameter!, if (name != null) 'name': name!, if (placeholders != null) 'placeholders': placeholders!, if (promptText != null) 'promptText': promptText!, }; } +/// Parameters to be passed to the LLM. +/// +/// If not set, default values will be used. +class GoogleCloudDialogflowCxV3GeneratorModelParameter { + /// The maximum number of tokens to generate. + core.int? maxDecodeSteps; + + /// The temperature used for sampling. + /// + /// Temperature sampling occurs after both topP and topK have been applied. + /// Valid range: \[0.0, 1.0\] Low temperature = less random. High temperature + /// = more random. + core.double? temperature; + + /// If set, the sampling process in each step is limited to the top_k tokens + /// with highest probabilities. + /// + /// Valid range: \[1, 40\] or 1000+. Small topK = less random. Large topK = + /// more random. + core.int? topK; + + /// If set, only the tokens comprising the top top_p probability mass are + /// considered. + /// + /// If both top_p and top_k are set, top_p will be used for further refining + /// candidates selected with top_k. Valid range: (0.0, 1.0\]. Small topP = + /// less random. Large topP = more random. + core.double? topP; + + GoogleCloudDialogflowCxV3GeneratorModelParameter({ + this.maxDecodeSteps, + this.temperature, + this.topK, + this.topP, + }); + + GoogleCloudDialogflowCxV3GeneratorModelParameter.fromJson(core.Map json_) + : this( + maxDecodeSteps: json_['maxDecodeSteps'] as core.int?, + temperature: (json_['temperature'] as core.num?)?.toDouble(), + topK: json_['topK'] as core.int?, + topP: (json_['topP'] as core.num?)?.toDouble(), + ); + + core.Map toJson() => { + if (maxDecodeSteps != null) 'maxDecodeSteps': maxDecodeSteps!, + if (temperature != null) 'temperature': temperature!, + if (topK != null) 'topK': topK!, + if (topP != null) 'topP': topP!, + }; +} + /// Represents a custom placeholder in the prompt text. class GoogleCloudDialogflowCxV3GeneratorPlaceholder { /// Unique ID used to map custom placeholder to parameters in fulfillment. @@ -14867,7 +14898,7 @@ class GoogleCloudDialogflowCxV3SecuritySettingsInsightsExportSettings { /// Sentiment analysis inspects user input and identifies the prevailing /// subjective opinion, especially to determine a user's attitude as positive, /// negative, or neutral. -typedef GoogleCloudDialogflowCxV3SentimentAnalysisResult = $Shared11; +typedef GoogleCloudDialogflowCxV3SentimentAnalysisResult = $Shared12; /// Session entity types are referred to as **User** entity types and are /// entities that are built for an individual user such as favorites, diff --git a/generated/googleapis/lib/digitalassetlinks/v1.dart b/generated/googleapis/lib/digitalassetlinks/v1.dart index 9a3deaf05..1014b03d5 100644 --- a/generated/googleapis/lib/digitalassetlinks/v1.dart +++ b/generated/googleapis/lib/digitalassetlinks/v1.dart @@ -27,7 +27,6 @@ library; import 'dart:async' as async; -import 'dart:convert' as convert; import 'dart:core' as core; import 'package:_discoveryapis_commons/_discoveryapis_commons.dart' as commons; @@ -58,48 +57,6 @@ class AssetlinksResource { AssetlinksResource(commons.ApiRequester client) : _requester = client; - /// Send a bundle of statement checks in a single RPC to minimize latency and - /// service load. - /// - /// Statements need not be all for the same source and/or target. We recommend - /// using this method when you need to check more than one statement in a - /// short period of time. - /// - /// [request] - The metadata request object. - /// - /// Request parameters: - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [BulkCheckResponse]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future bulkCheck( - BulkCheckRequest request, { - core.String? $fields, - }) async { - final body_ = convert.json.encode(request); - final queryParams_ = >{ - if ($fields != null) 'fields': [$fields], - }; - - const url_ = 'v1/assetlinks:bulkCheck'; - - final response_ = await _requester.request( - url_, - 'POST', - body: body_, - queryParams: queryParams_, - ); - return BulkCheckResponse.fromJson( - response_ as core.Map); - } - /// Determines whether the specified (directional) relationship exists between /// the specified source and target assets. /// @@ -441,129 +398,6 @@ class Asset { }; } -/// Message used to check for the existence of multiple digital asset links -/// within a single RPC. -class BulkCheckRequest { - /// Same configuration as in Check request, all statements checks will use - /// same configurations. - core.bool? allowGoogleInternalDataSources; - - /// If specified, will be used in any given template statement that doesn’t - /// specify a relation. - core.String? defaultRelation; - - /// If specified, will be used in any given template statement that doesn’t - /// specify a source. - Asset? defaultSource; - - /// If specified, will be used in any given template statement that doesn’t - /// specify a target. - Asset? defaultTarget; - - /// Same configuration as in Check request, all statements checks will use - /// same configurations. - core.bool? skipCacheLookup; - - /// List of statements to check. - /// - /// For each statement, you can omit a field if the corresponding default_* - /// field below was supplied. Minimum 1 statement; maximum 1,000 statements. - /// Any additional statements will be ignored. - core.List? statements; - - BulkCheckRequest({ - this.allowGoogleInternalDataSources, - this.defaultRelation, - this.defaultSource, - this.defaultTarget, - this.skipCacheLookup, - this.statements, - }); - - BulkCheckRequest.fromJson(core.Map json_) - : this( - allowGoogleInternalDataSources: - json_['allowGoogleInternalDataSources'] as core.bool?, - defaultRelation: json_['defaultRelation'] as core.String?, - defaultSource: json_.containsKey('defaultSource') - ? Asset.fromJson( - json_['defaultSource'] as core.Map) - : null, - defaultTarget: json_.containsKey('defaultTarget') - ? Asset.fromJson( - json_['defaultTarget'] as core.Map) - : null, - skipCacheLookup: json_['skipCacheLookup'] as core.bool?, - statements: (json_['statements'] as core.List?) - ?.map((value) => StatementTemplate.fromJson( - value as core.Map)) - .toList(), - ); - - core.Map toJson() => { - if (allowGoogleInternalDataSources != null) - 'allowGoogleInternalDataSources': allowGoogleInternalDataSources!, - if (defaultRelation != null) 'defaultRelation': defaultRelation!, - if (defaultSource != null) 'defaultSource': defaultSource!, - if (defaultTarget != null) 'defaultTarget': defaultTarget!, - if (skipCacheLookup != null) 'skipCacheLookup': skipCacheLookup!, - if (statements != null) 'statements': statements!, - }; -} - -/// Response for BulkCheck call. -/// -/// Results are sent in a list in the same order in which they were sent. -/// Individual check errors are described in the appropriate check_results -/// entry. If the entire call fails, the response will include a bulk_error_code -/// field describing the error. -class BulkCheckResponse { - /// Error code for the entire request. - /// - /// Present only if the entire request failed. Individual check errors will - /// not trigger the presence of this field. - /// Possible string values are: - /// - "ERROR_CODE_UNSPECIFIED" - /// - "ERROR_CODE_INVALID_QUERY" : Unable to parse query. - /// - "ERROR_CODE_FETCH_ERROR" : Unable to fetch the asset links data. - /// - "ERROR_CODE_FAILED_SSL_VALIDATION" : Invalid HTTPS certificate . - /// - "ERROR_CODE_REDIRECT" : HTTP redirects (e.g, 301) are not allowed. - /// - "ERROR_CODE_TOO_LARGE" : Asset links data exceeds maximum size. - /// - "ERROR_CODE_MALFORMED_HTTP_RESPONSE" : Can't parse HTTP response. - /// - "ERROR_CODE_WRONG_CONTENT_TYPE" : HTTP Content-type should be - /// application/json. - /// - "ERROR_CODE_MALFORMED_CONTENT" : JSON content is malformed. - /// - "ERROR_CODE_SECURE_ASSET_INCLUDES_INSECURE" : A secure asset includes an - /// insecure asset (security downgrade). - /// - "ERROR_CODE_FETCH_BUDGET_EXHAUSTED" : Too many includes (maybe a loop). - core.String? bulkErrorCode; - - /// List of results for each check request. - /// - /// Results are returned in the same order in which they were sent in the - /// request. - core.List? checkResults; - - BulkCheckResponse({ - this.bulkErrorCode, - this.checkResults, - }); - - BulkCheckResponse.fromJson(core.Map json_) - : this( - bulkErrorCode: json_['bulkErrorCode'] as core.String?, - checkResults: (json_['checkResults'] as core.List?) - ?.map((value) => CheckResponse.fromJson( - value as core.Map)) - .toList(), - ); - - core.Map toJson() => { - if (bulkErrorCode != null) 'bulkErrorCode': bulkErrorCode!, - if (checkResults != null) 'checkResults': checkResults!, - }; -} - /// Describes an X509 certificate. class CertificateInfo { /// The uppercase SHA-265 fingerprint of the certificate. @@ -755,54 +589,6 @@ class Statement { }; } -/// A single statement to check in a bulk call using BulkCheck. -/// -/// See CheckRequest for details about each field. -class StatementTemplate { - /// The relationship being asserted between the source and target. - /// - /// If omitted, you must specify a BulkCheckRequest.default_relation value to - /// use here. - core.String? relation; - - /// The source asset that is asserting the statement. - /// - /// If omitted, you must specify a BulkCheckRequest.default_source value to - /// use here. - Asset? source; - - /// The target that the source is declaring the relationship with. - /// - /// If omitted, you must specify a BulkCheckRequest.default_target to use - /// here. - Asset? target; - - StatementTemplate({ - this.relation, - this.source, - this.target, - }); - - StatementTemplate.fromJson(core.Map json_) - : this( - relation: json_['relation'] as core.String?, - source: json_.containsKey('source') - ? Asset.fromJson( - json_['source'] as core.Map) - : null, - target: json_.containsKey('target') - ? Asset.fromJson( - json_['target'] as core.Map) - : null, - ); - - core.Map toJson() => { - if (relation != null) 'relation': relation!, - if (source != null) 'source': source!, - if (target != null) 'target': target!, - }; -} - /// Describes a web asset. class WebAsset { /// Web assets are identified by a URL that contains only the scheme, hostname diff --git a/generated/googleapis/lib/displayvideo/v2.dart b/generated/googleapis/lib/displayvideo/v2.dart index b0cf926cb..d84c3869c 100644 --- a/generated/googleapis/lib/displayvideo/v2.dart +++ b/generated/googleapis/lib/displayvideo/v2.dart @@ -8740,9 +8740,9 @@ class FirstAndThirdPartyAudiencesResource { /// suffix "desc" should be added to the field name. Example: `displayName /// desc`. /// - /// [pageSize] - Requested page size. Must be between `1` and `200`. If - /// unspecified will default to `100`. Returns error code `INVALID_ARGUMENT` - /// if an invalid value is specified. + /// [pageSize] - Requested page size. Must be between `1` and `5000`. If + /// unspecified, this value defaults to `100`. Returns error code + /// `INVALID_ARGUMENT` if an invalid value is specified. /// /// [pageToken] - A token identifying a page of results the server should /// return. Typically, this is the value of next_page_token returned from the @@ -12802,9 +12802,9 @@ typedef ActiveViewVideoViewabilityMetricConfig /// Additional URLs related to the ad, including beacons. typedef AdUrl = $AdUrl; -/// Details of Adloox settings. +/// Details of Adloox brand safety settings. class Adloox { - /// Adloox's brand safety settings. + /// Adloox categories to exclude. core.List? excludedAdlooxCategories; Adloox({ @@ -12841,7 +12841,7 @@ class Advertiser { /// Billing related settings of the advertiser. /// - /// Optional. Required. + /// Required. AdvertiserBillingConfig? billingConfig; /// Creative related settings of the advertiser. @@ -13219,6 +13219,21 @@ class AssetAssociation { /// - "ASSET_ROLE_COVER_IMAGE" : The cover image of a native video creative. /// This role is only supported and required in following creative_type: * /// `CREATIVE_TYPE_VIDEO` + /// - "ASSET_ROLE_BACKGROUND_COLOR" : The main color to use in a creative. + /// This role is only supported and required in following creative_type: * + /// `CREATIVE_TYPE_ASSET_BASED_CREATIVE` + /// - "ASSET_ROLE_ACCENT_COLOR" : The accent color to use in a creative. This + /// role is only supported and required in following creative_type: * + /// `CREATIVE_TYPE_ASSET_BASED_CREATIVE` + /// - "ASSET_ROLE_REQUIRE_LOGO" : Whether the creative must use a logo asset. + /// This role is only supported and required in following creative_type: * + /// `CREATIVE_TYPE_ASSET_BASED_CREATIVE` + /// - "ASSET_ROLE_REQUIRE_IMAGE" : Whether the creative must use an image + /// asset. This role is only supported and required in following + /// creative_type: * `CREATIVE_TYPE_ASSET_BASED_CREATIVE` + /// - "ASSET_ROLE_ENABLE_ASSET_ENHANCEMENTS" : Whether asset enhancements can + /// be applied to the creative. This role is only supported and required in + /// following creative_type: * `CREATIVE_TYPE_ASSET_BASED_CREATIVE` core.String? role; AssetAssociation({ @@ -16270,8 +16285,9 @@ class CreateSdfDownloadTaskRequest { /// - "SDF_VERSION_7_1" : SDF version 7.1. Read the \[v7 migration /// guide\](/display-video/api/structured-data-file/v7-migration-guide) before /// migrating to this version. - /// - "SDF_VERSION_8" : SDF version 8. **This SDF version is in beta. It is - /// only available to a subset of users.** + /// - "SDF_VERSION_8" : SDF version 8. Read the \[v8 migration + /// guide\](/display-video/api/structured-data-file/v8-migration-guide) before + /// migrating to this version. core.String? version; CreateSdfDownloadTaskRequest({ @@ -16439,6 +16455,9 @@ class Creative { /// - "CREATIVE_TYPE_TEMPLATED_APP_INSTALL_VIDEO" : Templated app install /// mobile video creative. Create and update methods are **not** supported for /// this creative type. + /// - "CREATIVE_TYPE_ASSET_BASED_CREATIVE" : Asset based creative. Create and + /// update methods are supported for this creative type if the hosting_source + /// is `HOSTING_SOURCE_HOSTED`. core.String? creativeType; /// Primary dimensions of the creative. @@ -17048,6 +17067,9 @@ class CreativeConfig { /// - "CREATIVE_TYPE_TEMPLATED_APP_INSTALL_VIDEO" : Templated app install /// mobile video creative. Create and update methods are **not** supported for /// this creative type. + /// - "CREATIVE_TYPE_ASSET_BASED_CREATIVE" : Asset based creative. Create and + /// update methods are supported for this creative type if the hosting_source + /// is `HOSTING_SOURCE_HOSTED`. core.String? creativeType; /// The configuration for display creatives. @@ -18624,6 +18646,8 @@ class GuaranteedOrder { /// - "EXCHANGE_ADMOST_GBID" : AdMost. /// - "EXCHANGE_TOPON_GBID" : TopOn. /// - "EXCHANGE_NETFLIX" : Netflix. + /// - "EXCHANGE_CORE" : Core. + /// - "EXCHANGE_TUBI" : Tubi. core.String? exchange; /// The unique identifier of the guaranteed order. @@ -19019,10 +19043,8 @@ class InsertionOrder { /// The budget spending speed setting of the insertion order. /// - /// *Warning*: Starting on **November 5, 2024**, pacing_type - /// `PACING_TYPE_ASAP` will no longer be compatible with pacing_period - /// `PACING_PERIOD_FLIGHT`. \[Read more about this announced - /// change\](/display-video/api/deprecations#features.io_asap). + /// pacing_type `PACING_TYPE_ASAP` is not compatible with pacing_period + /// `PACING_PERIOD_FLIGHT`. /// /// Required. Pacing? pacing; @@ -19406,6 +19428,8 @@ class InventorySource { /// - "EXCHANGE_ADMOST_GBID" : AdMost. /// - "EXCHANGE_TOPON_GBID" : TopOn. /// - "EXCHANGE_NETFLIX" : Netflix. + /// - "EXCHANGE_CORE" : Core. + /// - "EXCHANGE_TUBI" : Tubi. core.String? exchange; /// The ID of the guaranteed order that this inventory source belongs to. diff --git a/generated/googleapis/lib/displayvideo/v3.dart b/generated/googleapis/lib/displayvideo/v3.dart index 1829c8267..5bcac88ba 100644 --- a/generated/googleapis/lib/displayvideo/v3.dart +++ b/generated/googleapis/lib/displayvideo/v3.dart @@ -8623,9 +8623,9 @@ class FirstAndThirdPartyAudiencesResource { /// suffix "desc" should be added to the field name. Example: `displayName /// desc`. /// - /// [pageSize] - Requested page size. Must be between `1` and `200`. If - /// unspecified will default to `100`. Returns error code `INVALID_ARGUMENT` - /// if an invalid value is specified. + /// [pageSize] - Requested page size. Must be between `1` and `5000`. If + /// unspecified, this value defaults to `100`. Returns error code + /// `INVALID_ARGUMENT` if an invalid value is specified. /// /// [pageToken] - A token identifying a page of results the server should /// return. Typically, this is the value of next_page_token returned from the @@ -13004,79 +13004,89 @@ class AdGroupAssignedTargetingOption { /// Additional URLs related to the ad, including beacons. typedef AdUrl = $AdUrl; -/// Details of Adloox settings. +/// Details of Adloox brand safety settings. class Adloox { - /// Adult explicit sexual content. + /// Adult and Explicit Sexual Content + /// [GARM](https://wfanet.org/leadership/garm/about-garm) risk ranges to + /// exclude. /// /// Optional. /// Possible string values are: /// - "GARM_RISK_EXCLUSION_UNSPECIFIED" : This enum is only a placeholder and - /// it doesn't specify any GARM risk level option. + /// it doesn't specify any GARM risk exclusion option. /// - "GARM_RISK_EXCLUSION_FLOOR" : Exclude floor risk. /// - "GARM_RISK_EXCLUSION_HIGH" : Exclude high and floor risk. - /// - "GARM_RISK_EXCLUSION_MEDIUM" : Exclude high, medium, and floor risk. - /// - "GARM_RISK_EXCLUSION_LOW" : Exclude All Level of Risk (Low, Medium, High - /// and Floor). + /// - "GARM_RISK_EXCLUSION_MEDIUM" : Exclude medium, high, and floor risk. + /// - "GARM_RISK_EXCLUSION_LOW" : Exclude all levels of risk (low, medium, + /// high and floor). core.String? adultExplicitSexualContent; - /// Arms ammunition content. + /// Arms and Ammunition Content + /// [GARM](https://wfanet.org/leadership/garm/about-garm) risk ranges to + /// exclude. /// /// Optional. /// Possible string values are: /// - "GARM_RISK_EXCLUSION_UNSPECIFIED" : This enum is only a placeholder and - /// it doesn't specify any GARM risk level option. + /// it doesn't specify any GARM risk exclusion option. /// - "GARM_RISK_EXCLUSION_FLOOR" : Exclude floor risk. /// - "GARM_RISK_EXCLUSION_HIGH" : Exclude high and floor risk. - /// - "GARM_RISK_EXCLUSION_MEDIUM" : Exclude high, medium, and floor risk. - /// - "GARM_RISK_EXCLUSION_LOW" : Exclude All Level of Risk (Low, Medium, High - /// and Floor). + /// - "GARM_RISK_EXCLUSION_MEDIUM" : Exclude medium, high, and floor risk. + /// - "GARM_RISK_EXCLUSION_LOW" : Exclude all levels of risk (low, medium, + /// high and floor). core.String? armsAmmunitionContent; - /// Crime harmful acts to individuals society human rights violations content. + /// Crime and Harmful Acts Content + /// [GARM](https://wfanet.org/leadership/garm/about-garm) risk ranges to + /// exclude. /// /// Optional. /// Possible string values are: /// - "GARM_RISK_EXCLUSION_UNSPECIFIED" : This enum is only a placeholder and - /// it doesn't specify any GARM risk level option. + /// it doesn't specify any GARM risk exclusion option. /// - "GARM_RISK_EXCLUSION_FLOOR" : Exclude floor risk. /// - "GARM_RISK_EXCLUSION_HIGH" : Exclude high and floor risk. - /// - "GARM_RISK_EXCLUSION_MEDIUM" : Exclude high, medium, and floor risk. - /// - "GARM_RISK_EXCLUSION_LOW" : Exclude All Level of Risk (Low, Medium, High - /// and Floor). + /// - "GARM_RISK_EXCLUSION_MEDIUM" : Exclude medium, high, and floor risk. + /// - "GARM_RISK_EXCLUSION_LOW" : Exclude all levels of risk (low, medium, + /// high and floor). core.String? crimeHarmfulActsIndividualsSocietyHumanRightsViolationsContent; - /// Death injury military conflict content. + /// Death, Injury, or Military Conflict Content + /// [GARM](https://wfanet.org/leadership/garm/about-garm) risk ranges to + /// exclude. /// /// Optional. /// Possible string values are: /// - "GARM_RISK_EXCLUSION_UNSPECIFIED" : This enum is only a placeholder and - /// it doesn't specify any GARM risk level option. + /// it doesn't specify any GARM risk exclusion option. /// - "GARM_RISK_EXCLUSION_FLOOR" : Exclude floor risk. /// - "GARM_RISK_EXCLUSION_HIGH" : Exclude high and floor risk. - /// - "GARM_RISK_EXCLUSION_MEDIUM" : Exclude high, medium, and floor risk. - /// - "GARM_RISK_EXCLUSION_LOW" : Exclude All Level of Risk (Low, Medium, High - /// and Floor). + /// - "GARM_RISK_EXCLUSION_MEDIUM" : Exclude medium, high, and floor risk. + /// - "GARM_RISK_EXCLUSION_LOW" : Exclude all levels of risk (low, medium, + /// high and floor). core.String? deathInjuryMilitaryConflictContent; - /// Debated sensitive social issue content. + /// Debated Sensitive Social Issue Content + /// [GARM](https://wfanet.org/leadership/garm/about-garm) risk ranges to + /// exclude. /// /// Optional. /// Possible string values are: /// - "GARM_RISK_EXCLUSION_UNSPECIFIED" : This enum is only a placeholder and - /// it doesn't specify any GARM risk level option. + /// it doesn't specify any GARM risk exclusion option. /// - "GARM_RISK_EXCLUSION_FLOOR" : Exclude floor risk. /// - "GARM_RISK_EXCLUSION_HIGH" : Exclude high and floor risk. - /// - "GARM_RISK_EXCLUSION_MEDIUM" : Exclude high, medium, and floor risk. - /// - "GARM_RISK_EXCLUSION_LOW" : Exclude All Level of Risk (Low, Medium, High - /// and Floor). + /// - "GARM_RISK_EXCLUSION_MEDIUM" : Exclude medium, high, and floor risk. + /// - "GARM_RISK_EXCLUSION_LOW" : Exclude all levels of risk (low, medium, + /// high and floor). core.String? debatedSensitiveSocialIssueContent; - /// Display IAB viewability. + /// IAB viewability threshold for display ads. /// /// Optional. /// Possible string values are: - /// - "DISPLAY_IAB_VIEWABILITY_UNSPECIFIED" : This enum is only a placeholder - /// and it doesn't specify any display viewability options. + /// - "DISPLAY_IAB_VIEWABILITY_UNSPECIFIED" : Default value when not specified + /// or is unknown in this version. /// - "DISPLAY_IAB_VIEWABILITY_10" : 10%+ in view (IAB display viewability /// standard). /// - "DISPLAY_IAB_VIEWABILITY_20" : 20%+ in view (IAB display viewability @@ -13089,111 +13099,124 @@ class Adloox { /// standard). core.String? displayIabViewability; - /// Adloox's brand safety settings. + /// Adloox categories to exclude. core.List? excludedAdlooxCategories; - /// Adloox's fraud IVT MFA settings. + /// Adloox's fraud IVT MFA categories to exclude. /// /// Optional. core.List? excludedFraudIvtMfaCategories; - /// Hate speech acts of aggression content. + /// Hate Speech and Acts of Aggression Content + /// [GARM](https://wfanet.org/leadership/garm/about-garm) risk ranges to + /// exclude. /// /// Optional. /// Possible string values are: /// - "GARM_RISK_EXCLUSION_UNSPECIFIED" : This enum is only a placeholder and - /// it doesn't specify any GARM risk level option. + /// it doesn't specify any GARM risk exclusion option. /// - "GARM_RISK_EXCLUSION_FLOOR" : Exclude floor risk. /// - "GARM_RISK_EXCLUSION_HIGH" : Exclude high and floor risk. - /// - "GARM_RISK_EXCLUSION_MEDIUM" : Exclude high, medium, and floor risk. - /// - "GARM_RISK_EXCLUSION_LOW" : Exclude All Level of Risk (Low, Medium, High - /// and Floor). + /// - "GARM_RISK_EXCLUSION_MEDIUM" : Exclude medium, high, and floor risk. + /// - "GARM_RISK_EXCLUSION_LOW" : Exclude all levels of risk (low, medium, + /// high and floor). core.String? hateSpeechActsAggressionContent; - /// Illegal drugs tobacco ecigarettes vaping alcohol content. + /// Illegal Drugs/Alcohol Content + /// [GARM](https://wfanet.org/leadership/garm/about-garm) risk ranges to + /// exclude. /// /// Optional. /// Possible string values are: /// - "GARM_RISK_EXCLUSION_UNSPECIFIED" : This enum is only a placeholder and - /// it doesn't specify any GARM risk level option. + /// it doesn't specify any GARM risk exclusion option. /// - "GARM_RISK_EXCLUSION_FLOOR" : Exclude floor risk. /// - "GARM_RISK_EXCLUSION_HIGH" : Exclude high and floor risk. - /// - "GARM_RISK_EXCLUSION_MEDIUM" : Exclude high, medium, and floor risk. - /// - "GARM_RISK_EXCLUSION_LOW" : Exclude All Level of Risk (Low, Medium, High - /// and Floor). + /// - "GARM_RISK_EXCLUSION_MEDIUM" : Exclude medium, high, and floor risk. + /// - "GARM_RISK_EXCLUSION_LOW" : Exclude all levels of risk (low, medium, + /// high and floor). core.String? illegalDrugsTobaccoEcigarettesVapingAlcoholContent; - /// Misinformation content. + /// Misinformation Content + /// [GARM](https://wfanet.org/leadership/garm/about-garm) risk ranges to + /// exclude. /// /// Optional. /// Possible string values are: /// - "GARM_RISK_EXCLUSION_UNSPECIFIED" : This enum is only a placeholder and - /// it doesn't specify any GARM risk level option. + /// it doesn't specify any GARM risk exclusion option. /// - "GARM_RISK_EXCLUSION_FLOOR" : Exclude floor risk. /// - "GARM_RISK_EXCLUSION_HIGH" : Exclude high and floor risk. - /// - "GARM_RISK_EXCLUSION_MEDIUM" : Exclude high, medium, and floor risk. - /// - "GARM_RISK_EXCLUSION_LOW" : Exclude All Level of Risk (Low, Medium, High - /// and Floor). + /// - "GARM_RISK_EXCLUSION_MEDIUM" : Exclude medium, high, and floor risk. + /// - "GARM_RISK_EXCLUSION_LOW" : Exclude all levels of risk (low, medium, + /// high and floor). core.String? misinformationContent; - /// Obscenity profanity content. + /// Obscenity and Profanity Content + /// [GARM](https://wfanet.org/leadership/garm/about-garm) risk ranges to + /// exclude. /// /// Optional. /// Possible string values are: /// - "GARM_RISK_EXCLUSION_UNSPECIFIED" : This enum is only a placeholder and - /// it doesn't specify any GARM risk level option. + /// it doesn't specify any GARM risk exclusion option. /// - "GARM_RISK_EXCLUSION_FLOOR" : Exclude floor risk. /// - "GARM_RISK_EXCLUSION_HIGH" : Exclude high and floor risk. - /// - "GARM_RISK_EXCLUSION_MEDIUM" : Exclude high, medium, and floor risk. - /// - "GARM_RISK_EXCLUSION_LOW" : Exclude All Level of Risk (Low, Medium, High - /// and Floor). + /// - "GARM_RISK_EXCLUSION_MEDIUM" : Exclude medium, high, and floor risk. + /// - "GARM_RISK_EXCLUSION_LOW" : Exclude all levels of risk (low, medium, + /// high and floor). core.String? obscenityProfanityContent; - /// Online piracy content. + /// Online Piracy Content + /// [GARM](https://wfanet.org/leadership/garm/about-garm) risk ranges to + /// exclude. /// /// Optional. /// Possible string values are: /// - "GARM_RISK_EXCLUSION_UNSPECIFIED" : This enum is only a placeholder and - /// it doesn't specify any GARM risk level option. + /// it doesn't specify any GARM risk exclusion option. /// - "GARM_RISK_EXCLUSION_FLOOR" : Exclude floor risk. /// - "GARM_RISK_EXCLUSION_HIGH" : Exclude high and floor risk. - /// - "GARM_RISK_EXCLUSION_MEDIUM" : Exclude high, medium, and floor risk. - /// - "GARM_RISK_EXCLUSION_LOW" : Exclude All Level of Risk (Low, Medium, High - /// and Floor). + /// - "GARM_RISK_EXCLUSION_MEDIUM" : Exclude medium, high, and floor risk. + /// - "GARM_RISK_EXCLUSION_LOW" : Exclude all levels of risk (low, medium, + /// high and floor). core.String? onlinePiracyContent; - /// Spam harmful content. + /// Spam or Harmful Content + /// [GARM](https://wfanet.org/leadership/garm/about-garm) risk ranges to + /// exclude. /// /// Optional. /// Possible string values are: /// - "GARM_RISK_EXCLUSION_UNSPECIFIED" : This enum is only a placeholder and - /// it doesn't specify any GARM risk level option. + /// it doesn't specify any GARM risk exclusion option. /// - "GARM_RISK_EXCLUSION_FLOOR" : Exclude floor risk. /// - "GARM_RISK_EXCLUSION_HIGH" : Exclude high and floor risk. - /// - "GARM_RISK_EXCLUSION_MEDIUM" : Exclude high, medium, and floor risk. - /// - "GARM_RISK_EXCLUSION_LOW" : Exclude All Level of Risk (Low, Medium, High - /// and Floor). + /// - "GARM_RISK_EXCLUSION_MEDIUM" : Exclude medium, high, and floor risk. + /// - "GARM_RISK_EXCLUSION_LOW" : Exclude all levels of risk (low, medium, + /// high and floor). core.String? spamHarmfulContent; - /// Terrorism content. + /// Terrorism Content [GARM](https://wfanet.org/leadership/garm/about-garm) + /// risk ranges to exclude. /// /// Optional. /// Possible string values are: /// - "GARM_RISK_EXCLUSION_UNSPECIFIED" : This enum is only a placeholder and - /// it doesn't specify any GARM risk level option. + /// it doesn't specify any GARM risk exclusion option. /// - "GARM_RISK_EXCLUSION_FLOOR" : Exclude floor risk. /// - "GARM_RISK_EXCLUSION_HIGH" : Exclude high and floor risk. - /// - "GARM_RISK_EXCLUSION_MEDIUM" : Exclude high, medium, and floor risk. - /// - "GARM_RISK_EXCLUSION_LOW" : Exclude All Level of Risk (Low, Medium, High - /// and Floor). + /// - "GARM_RISK_EXCLUSION_MEDIUM" : Exclude medium, high, and floor risk. + /// - "GARM_RISK_EXCLUSION_LOW" : Exclude all levels of risk (low, medium, + /// high and floor). core.String? terrorismContent; - /// Video IAB viewability. + /// IAB viewability threshold for video ads. /// /// Optional. /// Possible string values are: - /// - "VIDEO_IAB_VIEWABILITY_UNSPECIFIED" : This enum is only a placeholder - /// and it doesn't specify any video viewability options. + /// - "VIDEO_IAB_VIEWABILITY_UNSPECIFIED" : Default value when not specified + /// or is unknown in this version. /// - "VIDEO_IAB_VIEWABILITY_10" : 10%+ in view (IAB video viewability /// standard). /// - "VIDEO_IAB_VIEWABILITY_20" : 20%+ in view (IAB video viewability @@ -13316,7 +13339,7 @@ class Advertiser { /// Billing related settings of the advertiser. /// - /// Optional. Required. + /// Required. AdvertiserBillingConfig? billingConfig; /// Creative related settings of the advertiser. @@ -13694,6 +13717,21 @@ class AssetAssociation { /// - "ASSET_ROLE_COVER_IMAGE" : The cover image of a native video creative. /// This role is only supported and required in following creative_type: * /// `CREATIVE_TYPE_VIDEO` + /// - "ASSET_ROLE_BACKGROUND_COLOR" : The main color to use in a creative. + /// This role is only supported and required in following creative_type: * + /// `CREATIVE_TYPE_ASSET_BASED_CREATIVE` + /// - "ASSET_ROLE_ACCENT_COLOR" : The accent color to use in a creative. This + /// role is only supported and required in following creative_type: * + /// `CREATIVE_TYPE_ASSET_BASED_CREATIVE` + /// - "ASSET_ROLE_REQUIRE_LOGO" : Whether the creative must use a logo asset. + /// This role is only supported and required in following creative_type: * + /// `CREATIVE_TYPE_ASSET_BASED_CREATIVE` + /// - "ASSET_ROLE_REQUIRE_IMAGE" : Whether the creative must use an image + /// asset. This role is only supported and required in following + /// creative_type: * `CREATIVE_TYPE_ASSET_BASED_CREATIVE` + /// - "ASSET_ROLE_ENABLE_ASSET_ENHANCEMENTS" : Whether asset enhancements can + /// be applied to the creative. This role is only supported and required in + /// following creative_type: * `CREATIVE_TYPE_ASSET_BASED_CREATIVE` core.String? role; AssetAssociation({ @@ -16756,8 +16794,9 @@ class CreateSdfDownloadTaskRequest { /// - "SDF_VERSION_7_1" : SDF version 7.1. Read the \[v7 migration /// guide\](/display-video/api/structured-data-file/v7-migration-guide) before /// migrating to this version. - /// - "SDF_VERSION_8" : SDF version 8. **This SDF version is in beta. It is - /// only available to a subset of users.** + /// - "SDF_VERSION_8" : SDF version 8. Read the \[v8 migration + /// guide\](/display-video/api/structured-data-file/v8-migration-guide) before + /// migrating to this version. core.String? version; CreateSdfDownloadTaskRequest({ @@ -16925,6 +16964,9 @@ class Creative { /// - "CREATIVE_TYPE_TEMPLATED_APP_INSTALL_VIDEO" : Templated app install /// mobile video creative. Create and update methods are **not** supported for /// this creative type. + /// - "CREATIVE_TYPE_ASSET_BASED_CREATIVE" : Asset based creative. Create and + /// update methods are supported for this creative type if the hosting_source + /// is `HOSTING_SOURCE_HOSTED`. core.String? creativeType; /// Primary dimensions of the creative. @@ -17534,6 +17576,9 @@ class CreativeConfig { /// - "CREATIVE_TYPE_TEMPLATED_APP_INSTALL_VIDEO" : Templated app install /// mobile video creative. Create and update methods are **not** supported for /// this creative type. + /// - "CREATIVE_TYPE_ASSET_BASED_CREATIVE" : Asset based creative. Create and + /// update methods are supported for this creative type if the hosting_source + /// is `HOSTING_SOURCE_HOSTED`. core.String? creativeType; /// The configuration for display creatives. @@ -19263,6 +19308,8 @@ class GuaranteedOrder { /// - "EXCHANGE_ADMOST_GBID" : AdMost. /// - "EXCHANGE_TOPON_GBID" : TopOn. /// - "EXCHANGE_NETFLIX" : Netflix. + /// - "EXCHANGE_CORE" : Core. + /// - "EXCHANGE_TUBI" : Tubi. core.String? exchange; /// The unique identifier of the guaranteed order. @@ -19700,10 +19747,8 @@ class InsertionOrder { /// The budget spending speed setting of the insertion order. /// - /// *Warning*: Starting on **November 5, 2024**, pacing_type - /// `PACING_TYPE_ASAP` will no longer be compatible with pacing_period - /// `PACING_PERIOD_FLIGHT`. \[Read more about this announced - /// change\](/display-video/api/deprecations#features.io_asap). + /// pacing_type `PACING_TYPE_ASAP` is not compatible with pacing_period + /// `PACING_PERIOD_FLIGHT`. /// /// Required. Pacing? pacing; @@ -20083,6 +20128,8 @@ class InventorySource { /// - "EXCHANGE_ADMOST_GBID" : AdMost. /// - "EXCHANGE_TOPON_GBID" : TopOn. /// - "EXCHANGE_NETFLIX" : Netflix. + /// - "EXCHANGE_CORE" : Core. + /// - "EXCHANGE_TUBI" : Tubi. core.String? exchange; /// The ID of the guaranteed order that this inventory source belongs to. diff --git a/generated/googleapis/lib/dlp/v2.dart b/generated/googleapis/lib/dlp/v2.dart index ff3ddc5a0..cf38e407d 100644 --- a/generated/googleapis/lib/dlp/v2.dart +++ b/generated/googleapis/lib/dlp/v2.dart @@ -108,7 +108,8 @@ class InfoTypesResource { InfoTypesResource(commons.ApiRequester client) : _requester = client; - /// Returns a list of the sensitive information types that DLP API supports. + /// Returns a list of the sensitive information types that the DLP API + /// supports. /// /// See /// https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference @@ -179,7 +180,8 @@ class LocationsInfoTypesResource { LocationsInfoTypesResource(commons.ApiRequester client) : _requester = client; - /// Returns a list of the sensitive information types that DLP API supports. + /// Returns a list of the sensitive information types that the DLP API + /// supports. /// /// See /// https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference @@ -8941,7 +8943,7 @@ class GooglePrivacyDlpV2BigQueryTable { /// Dataset ID of the table. core.String? datasetId; - /// The Google Cloud Platform project ID of the project containing the table. + /// The Google Cloud project ID of the project containing the table. /// /// If omitted, project ID is inferred from the API call. core.String? projectId; @@ -9165,6 +9167,7 @@ class GooglePrivacyDlpV2ByteContentItem { /// - "AUDIO" : Audio file types. Only used for profiling. /// - "VIDEO" : Video file types. Only used for profiling. /// - "EXECUTABLE" : Executable file types. Only used for profiling. + /// - "AI_MODEL" : AI model file types. Only used for profiling. core.String? type; GooglePrivacyDlpV2ByteContentItem({ @@ -9499,7 +9502,7 @@ class GooglePrivacyDlpV2CloudSqlProperties { /// - "DATABASE_ENGINE_POSTGRES" : Cloud SQL for PostgreSQL instance. core.String? databaseEngine; - /// DLP will limit its connections to max_connections. + /// The DLP API will limit its connections to max_connections. /// /// Must be 2 or greater. /// @@ -10207,8 +10210,8 @@ class GooglePrivacyDlpV2Conditions { }; } -/// A data connection to allow DLP to profile data in locations that require -/// additional configuration. +/// A data connection to allow the DLP API to profile data in locations that +/// require additional configuration. class GooglePrivacyDlpV2Connection { /// Connect to a Cloud SQL instance. GooglePrivacyDlpV2CloudSqlProperties? cloudSql; @@ -10231,8 +10234,9 @@ class GooglePrivacyDlpV2Connection { /// Required. /// Possible string values are: /// - "CONNECTION_STATE_UNSPECIFIED" : Unused - /// - "MISSING_CREDENTIALS" : DLP automatically created this connection during - /// an initial scan, and it is awaiting full configuration by a user. + /// - "MISSING_CREDENTIALS" : The DLP API automatically created this + /// connection during an initial scan, and it is awaiting full configuration + /// by a user. /// - "AVAILABLE" : A configured connection that has not encountered any /// errors. /// - "ERROR" : A configured connection that encountered errors during its @@ -11176,7 +11180,7 @@ class GooglePrivacyDlpV2DataProfileAction { /// analytics\](https://cloud.google.com/chronicle/docs/detection/usecase-dlp-high-risk-user-download). GooglePrivacyDlpV2PublishToChronicle? publishToChronicle; - /// Publishes findings to SCC for each data profile. + /// Publishes findings to Security Command Center for each data profile. GooglePrivacyDlpV2PublishToSecurityCommandCenter? publishToScc; /// Tags the profiled resources with the specified tag values. @@ -11328,7 +11332,7 @@ class GooglePrivacyDlpV2DataProfileJobConfig { /// The project that will run the scan. /// /// The DLP service account that exists within this project must have access - /// to all resources that are profiled, and the Cloud DLP API must be enabled. + /// to all resources that are profiled, and the DLP API must be enabled. core.String? projectId; GooglePrivacyDlpV2DataProfileJobConfig({ @@ -14140,6 +14144,7 @@ class GooglePrivacyDlpV2FileClusterType { /// - "CLUSTER_ARCHIVE" : Archives and containers like .zip, .tar etc. /// - "CLUSTER_MULTIMEDIA" : Multimedia like .mp4, .avi etc. /// - "CLUSTER_EXECUTABLE" : Executable files like .exe, .class, .apk etc. + /// - "CLUSTER_AI_MODEL" : AI models like .tflite etc. core.String? cluster; GooglePrivacyDlpV2FileClusterType({ @@ -15450,6 +15455,9 @@ class GooglePrivacyDlpV2InfoTypeDescription { /// Human readable form of the infoType name. core.String? displayName; + /// A sample true positive for this infoType. + core.String? example; + /// Internal name of the infoType. core.String? name; @@ -15466,6 +15474,7 @@ class GooglePrivacyDlpV2InfoTypeDescription { this.categories, this.description, this.displayName, + this.example, this.name, this.sensitivityScore, this.supportedBy, @@ -15480,6 +15489,7 @@ class GooglePrivacyDlpV2InfoTypeDescription { .toList(), description: json_['description'] as core.String?, displayName: json_['displayName'] as core.String?, + example: json_['example'] as core.String?, name: json_['name'] as core.String?, sensitivityScore: json_.containsKey('sensitivityScore') ? GooglePrivacyDlpV2SensitivityScore.fromJson( @@ -15499,6 +15509,7 @@ class GooglePrivacyDlpV2InfoTypeDescription { if (categories != null) 'categories': categories!, if (description != null) 'description': description!, if (displayName != null) 'displayName': displayName!, + if (example != null) 'example': example!, if (name != null) 'name': name!, if (sensitivityScore != null) 'sensitivityScore': sensitivityScore!, if (supportedBy != null) 'supportedBy': supportedBy!, @@ -17552,7 +17563,7 @@ class GooglePrivacyDlpV2OrgConfig { /// The project that will run the scan. /// /// The DLP service account that exists within this project must have access - /// to all resources that are profiled, and the Cloud DLP API must be enabled. + /// to all resources that are profiled, and the DLP API must be enabled. core.String? projectId; GooglePrivacyDlpV2OrgConfig({ @@ -18496,7 +18507,8 @@ class GooglePrivacyDlpV2PublishToPubSub { }; } -/// If set, a summary finding will be created/updated in SCC for each profile. +/// If set, a summary finding will be created or updated in Security Command +/// Center for each profile. typedef GooglePrivacyDlpV2PublishToSecurityCommandCenter = $Empty; /// Enable Stackdriver metric dlp.googleapis.com/finding_count. diff --git a/generated/googleapis/lib/documentai/v1.dart b/generated/googleapis/lib/documentai/v1.dart index e1cef0dab..19df31a22 100644 --- a/generated/googleapis/lib/documentai/v1.dart +++ b/generated/googleapis/lib/documentai/v1.dart @@ -282,8 +282,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -6564,19 +6564,19 @@ class GoogleTypeDateTime { /// Represents an amount of money with its currency type. typedef GoogleTypeMoney = $Money; -/// Represents a postal address, e.g. for postal delivery or payments addresses. +/// Represents a postal address. /// -/// Given a postal address, a postal service can deliver items to a premise, -/// P.O. Box or similar. It is not intended to model geographical locations -/// (roads, towns, mountains). In typical usage an address would be created via -/// user input or from importing existing data, depending on the type of -/// process. Advice on address input / editing: - Use an -/// internationalization-ready address widget such as -/// https://github.com/google/libaddressinput) - Users should not be presented -/// with UI elements for input or editing of fields outside countries where that -/// field is used. For more guidance on how to use this schema, please see: +/// For example for postal delivery or payments addresses. Given a postal +/// address, a postal service can deliver items to a premise, P.O. Box or +/// similar. It is not intended to model geographical locations (roads, towns, +/// mountains). In typical usage an address would be created by user input or +/// from importing existing data, depending on the type of process. Advice on +/// address input / editing: - Use an internationalization-ready address widget +/// such as https://github.com/google/libaddressinput) - Users should not be +/// presented with UI elements for input or editing of fields outside countries +/// where that field is used. For more guidance on how to use this schema, see: /// https://support.google.com/business/answer/6397478 -typedef GoogleTypePostalAddress = $PostalAddress; +typedef GoogleTypePostalAddress = $PostalAddress00; /// Represents a time zone from the /// [IANA Time Zone Database](https://www.iana.org/time-zones). diff --git a/generated/googleapis/lib/domains/v1.dart b/generated/googleapis/lib/domains/v1.dart index b3af75e31..42b0742b0 100644 --- a/generated/googleapis/lib/domains/v1.dart +++ b/generated/googleapis/lib/domains/v1.dart @@ -3076,7 +3076,7 @@ class Policy { /// with UI elements for input or editing of fields outside countries where that /// field is used. For more guidance on how to use this schema, please see: /// https://support.google.com/business/answer/6397478 -typedef PostalAddress = $PostalAddress; +typedef PostalAddress = $PostalAddress01; /// Configures a RRSetRoutingPolicy such that all queries are responded with the /// primary_targets if they are healthy. diff --git a/generated/googleapis/lib/drive/v3.dart b/generated/googleapis/lib/drive/v3.dart index 1e386ed1b..c9bb221aa 100644 --- a/generated/googleapis/lib/drive/v3.dart +++ b/generated/googleapis/lib/drive/v3.dart @@ -21,13 +21,13 @@ /// Create an instance of [DriveApi] to access these resources: /// /// - [AboutResource] +/// - [AccessproposalsResource] /// - [AppsResource] /// - [ChangesResource] /// - [ChannelsResource] /// - [CommentsResource] /// - [DrivesResource] /// - [FilesResource] -/// - [FilesAccessproposalsResource] /// - [OperationResource] /// - [OperationsResource] /// - [PermissionsResource] @@ -102,6 +102,8 @@ class DriveApi { final commons.ApiRequester _requester; AboutResource get about => AboutResource(_requester); + AccessproposalsResource get accessproposals => + AccessproposalsResource(_requester); AppsResource get apps => AppsResource(_requester); ChangesResource get changes => ChangesResource(_requester); ChannelsResource get channels => ChannelsResource(_requester); @@ -130,6 +132,12 @@ class AboutResource { /// Gets information about the user, the user's Drive, and system /// capabilities. /// + /// For more information, see + /// [Return user info](https://developers.google.com/drive/api/guides/user-info). + /// Required: The `fields` parameter must be set. To return the exact fields + /// you need, see + /// [Return specific fields](https://developers.google.com/drive/api/guides/fields-parameter). + /// /// Request parameters: /// /// [$fields] - Selector specifying which fields to include in a partial @@ -160,6 +168,145 @@ class AboutResource { } } +class AccessproposalsResource { + final commons.ApiRequester _requester; + + AccessproposalsResource(commons.ApiRequester client) : _requester = client; + + /// Retrieves an AccessProposal by ID. + /// + /// Request parameters: + /// + /// [fileId] - Required. The id of the item the request is on. + /// + /// [proposalId] - Required. The id of the access proposal to resolve. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [AccessProposal]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String fileId, + core.String proposalId, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'files/' + + commons.escapeVariable('$fileId') + + '/accessproposals/' + + commons.escapeVariable('$proposalId'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return AccessProposal.fromJson( + response_ as core.Map); + } + + /// List the AccessProposals on a file. + /// + /// Note: Only approvers are able to list AccessProposals on a file. If the + /// user is not an approver, returns a 403. + /// + /// Request parameters: + /// + /// [fileId] - Required. The id of the item the request is on. + /// + /// [pageSize] - Optional. The number of results per page + /// + /// [pageToken] - Optional. The continuation token on the list of access + /// requests. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListAccessProposalsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String fileId, { + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = + 'files/' + commons.escapeVariable('$fileId') + '/accessproposals'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListAccessProposalsResponse.fromJson( + response_ as core.Map); + } + + /// Used to approve or deny an Access Proposal. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [fileId] - Required. The id of the item the request is on. + /// + /// [proposalId] - Required. The id of the access proposal to resolve. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future resolve( + ResolveAccessProposalRequest request, + core.String fileId, + core.String proposalId, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'files/' + + commons.escapeVariable('$fileId') + + '/accessproposals/' + + commons.escapeVariable('$proposalId') + + ':resolve'; + + await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + downloadOptions: null, + ); + } +} + class AppsResource { final commons.ApiRequester _requester; @@ -167,6 +314,9 @@ class AppsResource { /// Gets a specific app. /// + /// For more information, see + /// [Return user info](https://developers.google.com/drive/api/guides/user-info). + /// /// Request parameters: /// /// [appId] - The ID of the app. @@ -201,6 +351,9 @@ class AppsResource { /// Lists a user's installed apps. /// + /// For more information, see + /// [Return user info](https://developers.google.com/drive/api/guides/user-info). + /// /// Request parameters: /// /// [appFilterExtensions] - A comma-separated list of file extensions to limit @@ -262,6 +415,9 @@ class ChangesResource { /// Gets the starting pageToken for listing future changes. /// + /// For more information, see + /// [Retrieve changes](https://developers.google.com/drive/api/guides/manage-changes). + /// /// Request parameters: /// /// [driveId] - The ID of the shared drive for which the starting pageToken @@ -314,6 +470,9 @@ class ChangesResource { /// Lists the changes for a user or shared drive. /// + /// For more information, see + /// [Retrieve changes](https://developers.google.com/drive/api/guides/manage-changes). + /// /// Request parameters: /// /// [pageToken] - The token for continuing a previous list request on the next @@ -428,6 +587,9 @@ class ChangesResource { /// Subscribes to changes for a user. /// + /// For more information, see + /// [Notifications for resource changes](https://developers.google.com/drive/api/guides/push). + /// /// [request] - The metadata request object. /// /// Request parameters: @@ -552,6 +714,9 @@ class ChannelsResource { /// Stops watching resources through this channel. /// + /// For more information, see + /// [Notifications for resource changes](https://developers.google.com/drive/api/guides/push). + /// /// [request] - The metadata request object. /// /// Request parameters: @@ -592,6 +757,12 @@ class CommentsResource { /// Creates a comment on a file. /// + /// For more information, see + /// [Manage comments and replies](https://developers.google.com/drive/api/guides/manage-comments). + /// Required: The `fields` parameter must be set. To return the exact fields + /// you need, see + /// [Return specific fields](https://developers.google.com/drive/api/guides/fields-parameter). + /// /// [request] - The metadata request object. /// /// Request parameters: @@ -631,6 +802,12 @@ class CommentsResource { /// Deletes a comment. /// + /// For more information, see + /// [Manage comments and replies](https://developers.google.com/drive/api/guides/manage-comments). + /// Required: The `fields` parameter must be set. To return the exact fields + /// you need, see + /// [Return specific fields](https://developers.google.com/drive/api/guides/fields-parameter). + /// /// Request parameters: /// /// [fileId] - The ID of the file. @@ -669,6 +846,12 @@ class CommentsResource { /// Gets a comment by ID. /// + /// For more information, see + /// [Manage comments and replies](https://developers.google.com/drive/api/guides/manage-comments). + /// Required: The `fields` parameter must be set. To return the exact fields + /// you need, see + /// [Return specific fields](https://developers.google.com/drive/api/guides/fields-parameter). + /// /// Request parameters: /// /// [fileId] - The ID of the file. @@ -714,6 +897,12 @@ class CommentsResource { /// Lists a file's comments. /// + /// For more information, see + /// [Manage comments and replies](https://developers.google.com/drive/api/guides/manage-comments). + /// Required: The `fields` parameter must be set. To return the exact fields + /// you need, see + /// [Return specific fields](https://developers.google.com/drive/api/guides/fields-parameter). + /// /// Request parameters: /// /// [fileId] - The ID of the file. @@ -770,6 +959,12 @@ class CommentsResource { /// Updates a comment with patch semantics. /// + /// For more information, see + /// [Manage comments and replies](https://developers.google.com/drive/api/guides/manage-comments). + /// Required: The `fields` parameter must be set. To return the exact fields + /// you need, see + /// [Return specific fields](https://developers.google.com/drive/api/guides/fields-parameter). + /// /// [request] - The metadata request object. /// /// Request parameters: @@ -1125,9 +1320,6 @@ class DrivesResource { class FilesResource { final commons.ApiRequester _requester; - FilesAccessproposalsResource get accessproposals => - FilesAccessproposalsResource(_requester); - FilesResource(commons.ApiRequester client) : _requester = client; /// Creates a copy of a file and applies any requested updates with patch @@ -2096,124 +2288,6 @@ class FilesResource { } } -class FilesAccessproposalsResource { - final commons.ApiRequester _requester; - - FilesAccessproposalsResource(commons.ApiRequester client) - : _requester = client; - - /// List the AccessProposals on a file. - /// - /// Note: Only approvers are able to list AccessProposals on a file. If the - /// user is not an approver, returns a 403. - /// - /// Request parameters: - /// - /// [fileId] - Required. The id of the item the request is on. - /// - /// [pageSize] - Optional. The number of results per page - /// - /// [pageToken] - Optional. The continuation token on the list of access - /// requests. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [ListAccessProposalsResponse]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future list( - core.String fileId, { - core.int? pageSize, - core.String? pageToken, - core.String? $fields, - }) async { - final queryParams_ = >{ - if (pageSize != null) 'pageSize': ['${pageSize}'], - if (pageToken != null) 'pageToken': [pageToken], - if ($fields != null) 'fields': [$fields], - }; - - final url_ = - 'files/' + commons.escapeVariable('$fileId') + '/accessproposals'; - - final response_ = await _requester.request( - url_, - 'GET', - queryParams: queryParams_, - ); - return ListAccessProposalsResponse.fromJson( - response_ as core.Map); - } - - /// Used to approve or deny an Access Proposal. - /// - /// Request parameters: - /// - /// [fileId] - Required. The id of the item the request is on. - /// - /// [proposalId] - Required. The id of the access proposal to resolve. - /// - /// [action] - Required. The action to take on the AccessProposal. - /// Possible string values are: - /// - "ACTION_UNSPECIFIED" : Unspecified action - /// - "ACCEPT" : The user accepts the proposal - /// - "DENY" : The user denies the proposal - /// - /// [role] - Optional. The roles the approver has allowed, if any. Note: This - /// field is required for the `ACCEPT` action. - /// - /// [sendNotification] - Optional. Whether to send an email to the requester - /// when the AccessProposal is denied or accepted. - /// - /// [view] - Optional. Indicates the view for this access proposal. This - /// should only be set when the proposal belongs to a view. `published` is the - /// only supported value. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future resolve( - core.String fileId, - core.String proposalId, { - core.String? action, - core.List? role, - core.bool? sendNotification, - core.String? view, - core.String? $fields, - }) async { - final queryParams_ = >{ - if (action != null) 'action': [action], - if (role != null) 'role': role, - if (sendNotification != null) 'sendNotification': ['${sendNotification}'], - if (view != null) 'view': [view], - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'files/' + - commons.escapeVariable('$fileId') + - '/accessproposals/' + - commons.escapeVariable('$proposalId') + - ':resolve'; - - await _requester.request( - url_, - 'POST', - queryParams: queryParams_, - downloadOptions: null, - ); - } -} - class OperationResource { final commons.ApiRequester _requester; @@ -2227,8 +2301,8 @@ class OperationResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -7345,9 +7419,13 @@ class PermissionTeamDrivePermissionDetails { /// A permission for a file. /// /// A permission grants a user, group, domain, or the world access to a file or -/// a folder hierarchy. Some resource methods (such as `permissions.update`) -/// require a `permissionId`. Use the `permissions.list` method to retrieve the -/// ID for a file, folder, or shared drive. +/// a folder hierarchy. By default, permissions requests only return a subset of +/// fields. Permission kind, ID, type, and role are always returned. To retrieve +/// specific fields, see +/// https://developers.google.com/drive/api/guides/fields-parameter. Some +/// resource methods (such as `permissions.update`) require a `permissionId`. +/// Use the `permissions.list` method to retrieve the ID for a file, folder, or +/// shared drive. class Permission { /// Whether the permission allows the file to be discovered through search. /// @@ -7709,6 +7787,64 @@ class ReplyList { }; } +/// Request message for resolving an AccessProposal on a file. +class ResolveAccessProposalRequest { + /// The action to take on the AccessProposal. + /// + /// Required. + /// Possible string values are: + /// - "ACTION_UNSPECIFIED" : Unspecified action + /// - "ACCEPT" : The user accepts the proposal. Note: If this action is used, + /// the `role` field must have at least one value. + /// - "DENY" : The user denies the proposal + core.String? action; + + /// The roles the approver has allowed, if any. + /// + /// Note: This field is required for the `ACCEPT` action. + /// + /// Optional. + core.List? role; + + /// Whether to send an email to the requester when the AccessProposal is + /// denied or accepted. + /// + /// Optional. + core.bool? sendNotification; + + /// Indicates the view for this access proposal. + /// + /// This should only be set when the proposal belongs to a view. `published` + /// is the only supported value. + /// + /// Optional. + core.String? view; + + ResolveAccessProposalRequest({ + this.action, + this.role, + this.sendNotification, + this.view, + }); + + ResolveAccessProposalRequest.fromJson(core.Map json_) + : this( + action: json_['action'] as core.String?, + role: (json_['role'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + sendNotification: json_['sendNotification'] as core.bool?, + view: json_['view'] as core.String?, + ); + + core.Map toJson() => { + if (action != null) 'action': action!, + if (role != null) 'role': role!, + if (sendNotification != null) 'sendNotification': sendNotification!, + if (view != null) 'view': view!, + }; +} + /// The metadata for a revision to a file. /// /// Some resource methods (such as `revisions.update`) require a `revisionId`. diff --git a/generated/googleapis/lib/eventarc/v1.dart b/generated/googleapis/lib/eventarc/v1.dart index 9e6164a34..c93d78f99 100644 --- a/generated/googleapis/lib/eventarc/v1.dart +++ b/generated/googleapis/lib/eventarc/v1.dart @@ -1011,167 +1011,147 @@ class ProjectsLocationsEnrollmentsResource { ProjectsLocationsEnrollmentsResource(commons.ApiRequester client) : _requester = client; - /// Gets the access control policy for a resource. + /// Create a new Enrollment in a particular project and location. /// - /// Returns an empty policy if the resource exists and does not have a policy - /// set. + /// [request] - The metadata request object. /// /// Request parameters: /// - /// [resource] - REQUIRED: The resource for which the policy is being - /// requested. See - /// [Resource names](https://cloud.google.com/apis/design/resource_names) for - /// the appropriate value for this field. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/enrollments/\[^/\]+$`. + /// [parent] - Required. The parent collection in which to add this + /// enrollment. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. /// - /// [options_requestedPolicyVersion] - Optional. The maximum policy version - /// that will be used to format the policy. Valid values are 0, 1, and 3. - /// Requests specifying an invalid value will be rejected. Requests for - /// policies with any conditional role bindings must specify version 3. - /// Policies with no conditional role bindings may specify any valid value or - /// leave the field unset. The policy in the response might use the policy - /// version that you specified, or it might use a lower policy version. For - /// example, if you specify version 3, but the policy has no conditional role - /// bindings, the response uses version 1. To learn which resources support - /// conditions in their IAM policies, see the - /// [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + /// [enrollmentId] - Required. The user-provided ID to be assigned to the + /// Enrollment. It should match the format + /// `^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$`. + /// + /// [validateOnly] - Optional. If set, validate the request and preview the + /// review, but do not post it. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [Policy]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future getIamPolicy( - core.String resource, { - core.int? options_requestedPolicyVersion, + async.Future create( + Enrollment request, + core.String parent, { + core.String? enrollmentId, + core.bool? validateOnly, core.String? $fields, }) async { + final body_ = convert.json.encode(request); final queryParams_ = >{ - if (options_requestedPolicyVersion != null) - 'options.requestedPolicyVersion': ['${options_requestedPolicyVersion}'], + if (enrollmentId != null) 'enrollmentId': [enrollmentId], + if (validateOnly != null) 'validateOnly': ['${validateOnly}'], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$resource') + ':getIamPolicy'; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/enrollments'; final response_ = await _requester.request( url_, - 'GET', + 'POST', + body: body_, queryParams: queryParams_, ); - return Policy.fromJson(response_ as core.Map); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); } - /// Sets the access control policy on the specified resource. - /// - /// Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, - /// and `PERMISSION_DENIED` errors. - /// - /// [request] - The metadata request object. + /// Delete a single Enrollment. /// /// Request parameters: /// - /// [resource] - REQUIRED: The resource for which the policy is being - /// specified. See - /// [Resource names](https://cloud.google.com/apis/design/resource_names) for - /// the appropriate value for this field. + /// [name] - Required. The name of the Enrollment to be deleted. /// Value must have pattern /// `^projects/\[^/\]+/locations/\[^/\]+/enrollments/\[^/\]+$`. /// + /// [allowMissing] - Optional. If set to true, and the Enrollment is not + /// found, the request will succeed but no action will be taken on the server. + /// + /// [etag] - Optional. If provided, the Enrollment will only be deleted if the + /// etag matches the current etag on the resource. + /// + /// [validateOnly] - Optional. If set, validate the request and preview the + /// review, but do not post it. + /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [Policy]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future setIamPolicy( - SetIamPolicyRequest request, - core.String resource, { + async.Future delete( + core.String name, { + core.bool? allowMissing, + core.String? etag, + core.bool? validateOnly, core.String? $fields, }) async { - final body_ = convert.json.encode(request); final queryParams_ = >{ + if (allowMissing != null) 'allowMissing': ['${allowMissing}'], + if (etag != null) 'etag': [etag], + if (validateOnly != null) 'validateOnly': ['${validateOnly}'], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$resource') + ':setIamPolicy'; + final url_ = 'v1/' + core.Uri.encodeFull('$name'); final response_ = await _requester.request( url_, - 'POST', - body: body_, + 'DELETE', queryParams: queryParams_, ); - return Policy.fromJson(response_ as core.Map); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); } - /// Returns permissions that a caller has on the specified resource. - /// - /// If the resource does not exist, this will return an empty set of - /// permissions, not a `NOT_FOUND` error. Note: This operation is designed to - /// be used for building permission-aware UIs and command-line tools, not for - /// authorization checking. This operation may "fail open" without warning. - /// - /// [request] - The metadata request object. + /// Get a single Enrollment. /// /// Request parameters: /// - /// [resource] - REQUIRED: The resource for which the policy detail is being - /// requested. See - /// [Resource names](https://cloud.google.com/apis/design/resource_names) for - /// the appropriate value for this field. + /// [name] - Required. The name of the Enrollment to get. /// Value must have pattern /// `^projects/\[^/\]+/locations/\[^/\]+/enrollments/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [TestIamPermissionsResponse]. + /// Completes with a [Enrollment]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future testIamPermissions( - TestIamPermissionsRequest request, - core.String resource, { + async.Future get( + core.String name, { core.String? $fields, }) async { - final body_ = convert.json.encode(request); final queryParams_ = >{ if ($fields != null) 'fields': [$fields], }; - final url_ = - 'v1/' + core.Uri.encodeFull('$resource') + ':testIamPermissions'; + final url_ = 'v1/' + core.Uri.encodeFull('$name'); final response_ = await _requester.request( url_, - 'POST', - body: body_, + 'GET', queryParams: queryParams_, ); - return TestIamPermissionsResponse.fromJson( + return Enrollment.fromJson( response_ as core.Map); } -} - -class ProjectsLocationsGoogleApiSourcesResource { - final commons.ApiRequester _requester; - - ProjectsLocationsGoogleApiSourcesResource(commons.ApiRequester client) - : _requester = client; /// Gets the access control policy for a resource. /// @@ -1185,7 +1165,7 @@ class ProjectsLocationsGoogleApiSourcesResource { /// [Resource names](https://cloud.google.com/apis/design/resource_names) for /// the appropriate value for this field. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/googleApiSources/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/enrollments/\[^/\]+$`. /// /// [options_requestedPolicyVersion] - Optional. The maximum policy version /// that will be used to format the policy. Valid values are 0, 1, and 3. @@ -1230,166 +1210,125 @@ class ProjectsLocationsGoogleApiSourcesResource { return Policy.fromJson(response_ as core.Map); } - /// Sets the access control policy on the specified resource. - /// - /// Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, - /// and `PERMISSION_DENIED` errors. - /// - /// [request] - The metadata request object. + /// List Enrollments. /// /// Request parameters: /// - /// [resource] - REQUIRED: The resource for which the policy is being - /// specified. See - /// [Resource names](https://cloud.google.com/apis/design/resource_names) for - /// the appropriate value for this field. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/googleApiSources/\[^/\]+$`. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [Policy]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future setIamPolicy( - SetIamPolicyRequest request, - core.String resource, { - core.String? $fields, - }) async { - final body_ = convert.json.encode(request); - final queryParams_ = >{ - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$resource') + ':setIamPolicy'; - - final response_ = await _requester.request( - url_, - 'POST', - body: body_, - queryParams: queryParams_, - ); - return Policy.fromJson(response_ as core.Map); - } - - /// Returns permissions that a caller has on the specified resource. + /// [parent] - Required. The parent collection to list triggers on. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. /// - /// If the resource does not exist, this will return an empty set of - /// permissions, not a `NOT_FOUND` error. Note: This operation is designed to - /// be used for building permission-aware UIs and command-line tools, not for - /// authorization checking. This operation may "fail open" without warning. + /// [filter] - Optional. The filter field that the list request will filter + /// on. Possible filtersare described in https://google.aip.dev/160. /// - /// [request] - The metadata request object. + /// [orderBy] - Optional. The sorting order of the resources returned. Value + /// should be a comma-separated list of fields. The default sorting order is + /// ascending. To specify descending order for a field, append a `desc` + /// suffix; for example: `name desc, update_time`. /// - /// Request parameters: + /// [pageSize] - Optional. The maximum number of results to return on each + /// page. Note: The service may send fewer. /// - /// [resource] - REQUIRED: The resource for which the policy detail is being - /// requested. See - /// [Resource names](https://cloud.google.com/apis/design/resource_names) for - /// the appropriate value for this field. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/googleApiSources/\[^/\]+$`. + /// [pageToken] - Optional. The page token; provide the value from the + /// `next_page_token` field in a previous call to retrieve the subsequent + /// page. When paginating, all other parameters provided must match the + /// previous call that provided the page token. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [TestIamPermissionsResponse]. + /// Completes with a [ListEnrollmentsResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future testIamPermissions( - TestIamPermissionsRequest request, - core.String resource, { + async.Future list( + core.String parent, { + core.String? filter, + core.String? orderBy, + core.int? pageSize, + core.String? pageToken, core.String? $fields, }) async { - final body_ = convert.json.encode(request); final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (orderBy != null) 'orderBy': [orderBy], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], if ($fields != null) 'fields': [$fields], }; - final url_ = - 'v1/' + core.Uri.encodeFull('$resource') + ':testIamPermissions'; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/enrollments'; final response_ = await _requester.request( url_, - 'POST', - body: body_, + 'GET', queryParams: queryParams_, ); - return TestIamPermissionsResponse.fromJson( + return ListEnrollmentsResponse.fromJson( response_ as core.Map); } -} - -class ProjectsLocationsMessageBusesResource { - final commons.ApiRequester _requester; - - ProjectsLocationsMessageBusesResource(commons.ApiRequester client) - : _requester = client; - /// Gets the access control policy for a resource. + /// Update a single Enrollment. /// - /// Returns an empty policy if the resource exists and does not have a policy - /// set. + /// [request] - The metadata request object. /// /// Request parameters: /// - /// [resource] - REQUIRED: The resource for which the policy is being - /// requested. See - /// [Resource names](https://cloud.google.com/apis/design/resource_names) for - /// the appropriate value for this field. + /// [name] - Identifier. Resource name of the form + /// projects/{project}/locations/{location}/enrollments/{enrollment} /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/messageBuses/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/enrollments/\[^/\]+$`. /// - /// [options_requestedPolicyVersion] - Optional. The maximum policy version - /// that will be used to format the policy. Valid values are 0, 1, and 3. - /// Requests specifying an invalid value will be rejected. Requests for - /// policies with any conditional role bindings must specify version 3. - /// Policies with no conditional role bindings may specify any valid value or - /// leave the field unset. The policy in the response might use the policy - /// version that you specified, or it might use a lower policy version. For - /// example, if you specify version 3, but the policy has no conditional role - /// bindings, the response uses version 1. To learn which resources support - /// conditions in their IAM policies, see the - /// [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + /// [allowMissing] - Optional. If set to true, and the Enrollment is not + /// found, a new Enrollment will be created. In this situation, `update_mask` + /// is ignored. + /// + /// [updateMask] - Optional. The fields to be updated; only fields explicitly + /// provided are updated. If no field mask is provided, all provided fields in + /// the request are updated. To update all fields, provide a field mask of + /// "*". + /// + /// [validateOnly] - Optional. If set, validate the request and preview the + /// review, but do not post it. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [Policy]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future getIamPolicy( - core.String resource, { - core.int? options_requestedPolicyVersion, + async.Future patch( + Enrollment request, + core.String name, { + core.bool? allowMissing, + core.String? updateMask, + core.bool? validateOnly, core.String? $fields, }) async { + final body_ = convert.json.encode(request); final queryParams_ = >{ - if (options_requestedPolicyVersion != null) - 'options.requestedPolicyVersion': ['${options_requestedPolicyVersion}'], + if (allowMissing != null) 'allowMissing': ['${allowMissing}'], + if (updateMask != null) 'updateMask': [updateMask], + if (validateOnly != null) 'validateOnly': ['${validateOnly}'], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$resource') + ':getIamPolicy'; + final url_ = 'v1/' + core.Uri.encodeFull('$name'); final response_ = await _requester.request( url_, - 'GET', + 'PATCH', + body: body_, queryParams: queryParams_, ); - return Policy.fromJson(response_ as core.Map); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); } /// Sets the access control policy on the specified resource. @@ -1406,7 +1345,7 @@ class ProjectsLocationsMessageBusesResource { /// [Resource names](https://cloud.google.com/apis/design/resource_names) for /// the appropriate value for this field. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/messageBuses/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/enrollments/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -1455,7 +1394,7 @@ class ProjectsLocationsMessageBusesResource { /// [Resource names](https://cloud.google.com/apis/design/resource_names) for /// the appropriate value for this field. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/messageBuses/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/enrollments/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -1491,52 +1430,54 @@ class ProjectsLocationsMessageBusesResource { } } -class ProjectsLocationsOperationsResource { +class ProjectsLocationsGoogleApiSourcesResource { final commons.ApiRequester _requester; - ProjectsLocationsOperationsResource(commons.ApiRequester client) + ProjectsLocationsGoogleApiSourcesResource(commons.ApiRequester client) : _requester = client; - /// Starts asynchronous cancellation on a long-running operation. - /// - /// The server makes a best effort to cancel the operation, but success is not - /// guaranteed. If the server doesn't support this method, it returns - /// `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation - /// or other methods to check whether the cancellation succeeded or whether - /// the operation completed despite cancellation. On successful cancellation, - /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Create a new GoogleApiSource in a particular project and location. /// /// [request] - The metadata request object. /// /// Request parameters: /// - /// [name] - The name of the operation resource to be cancelled. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/operations/\[^/\]+$`. + /// [parent] - Required. The parent collection in which to add this google api + /// source. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [googleApiSourceId] - Required. The user-provided ID to be assigned to the + /// GoogleApiSource. It should match the format + /// `^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$`. + /// + /// [validateOnly] - Optional. If set, validate the request and preview the + /// review, but do not post it. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [Empty]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future cancel( - GoogleLongrunningCancelOperationRequest request, - core.String name, { + async.Future create( + GoogleApiSource request, + core.String parent, { + core.String? googleApiSourceId, + core.bool? validateOnly, core.String? $fields, }) async { final body_ = convert.json.encode(request); final queryParams_ = >{ + if (googleApiSourceId != null) 'googleApiSourceId': [googleApiSourceId], + if (validateOnly != null) 'validateOnly': ['${validateOnly}'], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':cancel'; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/googleApiSources'; final response_ = await _requester.request( url_, @@ -1544,36 +1485,48 @@ class ProjectsLocationsOperationsResource { body: body_, queryParams: queryParams_, ); - return Empty.fromJson(response_ as core.Map); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); } - /// Deletes a long-running operation. - /// - /// This method indicates that the client is no longer interested in the - /// operation result. It does not cancel the operation. If the server doesn't - /// support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. + /// Delete a single GoogleApiSource. /// /// Request parameters: /// - /// [name] - The name of the operation resource to be deleted. + /// [name] - Required. The name of the GoogleApiSource to be deleted. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/googleApiSources/\[^/\]+$`. + /// + /// [allowMissing] - Optional. If set to true, and the MessageBus is not + /// found, the request will succeed but no action will be taken on the server. + /// + /// [etag] - Optional. If provided, the MessageBus will only be deleted if the + /// etag matches the current etag on the resource. + /// + /// [validateOnly] - Optional. If set, validate the request and preview the + /// review, but do not post it. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [Empty]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future delete( + async.Future delete( core.String name, { + core.bool? allowMissing, + core.String? etag, + core.bool? validateOnly, core.String? $fields, }) async { final queryParams_ = >{ + if (allowMissing != null) 'allowMissing': ['${allowMissing}'], + if (etag != null) 'etag': [etag], + if (validateOnly != null) 'validateOnly': ['${validateOnly}'], if ($fields != null) 'fields': [$fields], }; @@ -1584,31 +1537,29 @@ class ProjectsLocationsOperationsResource { 'DELETE', queryParams: queryParams_, ); - return Empty.fromJson(response_ as core.Map); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); } - /// Gets the latest state of a long-running operation. - /// - /// Clients can use this method to poll the operation result at intervals as - /// recommended by the API service. + /// Get a single GoogleApiSource. /// /// Request parameters: /// - /// [name] - The name of the operation resource. + /// [name] - Required. The name of the google api source to get. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/operations/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/googleApiSources/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleLongrunningOperation]. + /// Completes with a [GoogleApiSource]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future get( + async.Future get( core.String name, { core.String? $fields, }) async { @@ -1623,122 +1574,186 @@ class ProjectsLocationsOperationsResource { 'GET', queryParams: queryParams_, ); - return GoogleLongrunningOperation.fromJson( + return GoogleApiSource.fromJson( response_ as core.Map); } - /// Lists operations that match the specified filter in the request. + /// Gets the access control policy for a resource. /// - /// If the server doesn't support this method, it returns `UNIMPLEMENTED`. + /// Returns an empty policy if the resource exists and does not have a policy + /// set. /// /// Request parameters: /// - /// [name] - The name of the operation's parent resource. + /// [resource] - REQUIRED: The resource for which the policy is being + /// requested. See + /// [Resource names](https://cloud.google.com/apis/design/resource_names) for + /// the appropriate value for this field. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/googleApiSources/\[^/\]+$`. + /// + /// [options_requestedPolicyVersion] - Optional. The maximum policy version + /// that will be used to format the policy. Valid values are 0, 1, and 3. + /// Requests specifying an invalid value will be rejected. Requests for + /// policies with any conditional role bindings must specify version 3. + /// Policies with no conditional role bindings may specify any valid value or + /// leave the field unset. The policy in the response might use the policy + /// version that you specified, or it might use a lower policy version. For + /// example, if you specify version 3, but the policy has no conditional role + /// bindings, the response uses version 1. To learn which resources support + /// conditions in their IAM policies, see the + /// [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Policy]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future getIamPolicy( + core.String resource, { + core.int? options_requestedPolicyVersion, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (options_requestedPolicyVersion != null) + 'options.requestedPolicyVersion': ['${options_requestedPolicyVersion}'], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$resource') + ':getIamPolicy'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return Policy.fromJson(response_ as core.Map); + } + + /// List GoogleApiSources. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent collection to list GoogleApiSources on. /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. /// - /// [filter] - The standard list filter. + /// [filter] - Optional. The filter field that the list request will filter + /// on. Possible filtersare described in https://google.aip.dev/160. /// - /// [pageSize] - The standard list page size. + /// [orderBy] - Optional. The sorting order of the resources returned. Value + /// should be a comma-separated list of fields. The default sorting order is + /// ascending. To specify descending order for a field, append a `desc` + /// suffix; for example: `name desc, update_time`. /// - /// [pageToken] - The standard list page token. + /// [pageSize] - Optional. The maximum number of results to return on each + /// page. Note: The service may send fewer. + /// + /// [pageToken] - Optional. The page token; provide the value from the + /// `next_page_token` field in a previous call to retrieve the subsequent + /// page. When paginating, all other parameters provided must match the + /// previous call that provided the page token. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [GoogleLongrunningListOperationsResponse]. + /// Completes with a [ListGoogleApiSourcesResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future list( - core.String name, { + async.Future list( + core.String parent, { core.String? filter, + core.String? orderBy, core.int? pageSize, core.String? pageToken, core.String? $fields, }) async { final queryParams_ = >{ if (filter != null) 'filter': [filter], + if (orderBy != null) 'orderBy': [orderBy], if (pageSize != null) 'pageSize': ['${pageSize}'], if (pageToken != null) 'pageToken': [pageToken], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$name') + '/operations'; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/googleApiSources'; final response_ = await _requester.request( url_, 'GET', queryParams: queryParams_, ); - return GoogleLongrunningListOperationsResponse.fromJson( + return ListGoogleApiSourcesResponse.fromJson( response_ as core.Map); } -} - -class ProjectsLocationsPipelinesResource { - final commons.ApiRequester _requester; - - ProjectsLocationsPipelinesResource(commons.ApiRequester client) - : _requester = client; - /// Gets the access control policy for a resource. + /// Update a single GoogleApiSource. /// - /// Returns an empty policy if the resource exists and does not have a policy - /// set. + /// [request] - The metadata request object. /// /// Request parameters: /// - /// [resource] - REQUIRED: The resource for which the policy is being - /// requested. See - /// [Resource names](https://cloud.google.com/apis/design/resource_names) for - /// the appropriate value for this field. + /// [name] - Identifier. Resource name of the form + /// projects/{project}/locations/{location}/googleApiSources/{google_api_source} /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/pipelines/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/googleApiSources/\[^/\]+$`. /// - /// [options_requestedPolicyVersion] - Optional. The maximum policy version - /// that will be used to format the policy. Valid values are 0, 1, and 3. - /// Requests specifying an invalid value will be rejected. Requests for - /// policies with any conditional role bindings must specify version 3. - /// Policies with no conditional role bindings may specify any valid value or - /// leave the field unset. The policy in the response might use the policy - /// version that you specified, or it might use a lower policy version. For - /// example, if you specify version 3, but the policy has no conditional role - /// bindings, the response uses version 1. To learn which resources support - /// conditions in their IAM policies, see the - /// [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + /// [allowMissing] - Optional. If set to true, and the GoogleApiSource is not + /// found, a new GoogleApiSource will be created. In this situation, + /// `update_mask` is ignored. + /// + /// [updateMask] - Optional. The fields to be updated; only fields explicitly + /// provided are updated. If no field mask is provided, all provided fields in + /// the request are updated. To update all fields, provide a field mask of + /// "*". + /// + /// [validateOnly] - Optional. If set, validate the request and preview the + /// review, but do not post it. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [Policy]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future getIamPolicy( - core.String resource, { - core.int? options_requestedPolicyVersion, + async.Future patch( + GoogleApiSource request, + core.String name, { + core.bool? allowMissing, + core.String? updateMask, + core.bool? validateOnly, core.String? $fields, }) async { + final body_ = convert.json.encode(request); final queryParams_ = >{ - if (options_requestedPolicyVersion != null) - 'options.requestedPolicyVersion': ['${options_requestedPolicyVersion}'], + if (allowMissing != null) 'allowMissing': ['${allowMissing}'], + if (updateMask != null) 'updateMask': [updateMask], + if (validateOnly != null) 'validateOnly': ['${validateOnly}'], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$resource') + ':getIamPolicy'; + final url_ = 'v1/' + core.Uri.encodeFull('$name'); final response_ = await _requester.request( url_, - 'GET', + 'PATCH', + body: body_, queryParams: queryParams_, ); - return Policy.fromJson(response_ as core.Map); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); } /// Sets the access control policy on the specified resource. @@ -1755,7 +1770,7 @@ class ProjectsLocationsPipelinesResource { /// [Resource names](https://cloud.google.com/apis/design/resource_names) for /// the appropriate value for this field. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/pipelines/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/googleApiSources/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -1804,7 +1819,7 @@ class ProjectsLocationsPipelinesResource { /// [Resource names](https://cloud.google.com/apis/design/resource_names) for /// the appropriate value for this field. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/pipelines/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/googleApiSources/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -1840,185 +1855,86 @@ class ProjectsLocationsPipelinesResource { } } -class ProjectsLocationsProvidersResource { +class ProjectsLocationsMessageBusesResource { final commons.ApiRequester _requester; - ProjectsLocationsProvidersResource(commons.ApiRequester client) + ProjectsLocationsMessageBusesResource(commons.ApiRequester client) : _requester = client; - /// Get a single Provider. + /// Create a new MessageBus in a particular project and location. + /// + /// [request] - The metadata request object. /// /// Request parameters: /// - /// [name] - Required. The name of the provider to get. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/providers/\[^/\]+$`. + /// [parent] - Required. The parent collection in which to add this message + /// bus. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [messageBusId] - Required. The user-provided ID to be assigned to the + /// MessageBus. It should match the format + /// `^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$`. + /// + /// [validateOnly] - Optional. If set, validate the request and preview the + /// review, but do not post it. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [Provider]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future get( - core.String name, { + async.Future create( + MessageBus request, + core.String parent, { + core.String? messageBusId, + core.bool? validateOnly, core.String? $fields, }) async { + final body_ = convert.json.encode(request); final queryParams_ = >{ + if (messageBusId != null) 'messageBusId': [messageBusId], + if (validateOnly != null) 'validateOnly': ['${validateOnly}'], if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$name'); + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/messageBuses'; final response_ = await _requester.request( url_, - 'GET', + 'POST', + body: body_, queryParams: queryParams_, ); - return Provider.fromJson(response_ as core.Map); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); } - /// List providers. + /// Delete a single message bus. /// /// Request parameters: /// - /// [parent] - Required. The parent of the provider to get. - /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. - /// - /// [filter] - The filter field that the list request will filter on. + /// [name] - Required. The name of the MessageBus to be deleted. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/messageBuses/\[^/\]+$`. /// - /// [orderBy] - The sorting order of the resources returned. Value should be a - /// comma-separated list of fields. The default sorting oder is ascending. To - /// specify descending order for a field, append a `desc` suffix; for example: - /// `name desc, _id`. + /// [allowMissing] - Optional. If set to true, and the MessageBus is not + /// found, the request will succeed but no action will be taken on the server. /// - /// [pageSize] - The maximum number of providers to return on each page. + /// [etag] - Optional. If provided, the MessageBus will only be deleted if the + /// etag matches the current etag on the resource. /// - /// [pageToken] - The page token; provide the value from the `next_page_token` - /// field in a previous `ListProviders` call to retrieve the subsequent page. - /// When paginating, all other parameters provided to `ListProviders` must - /// match the call that provided the page token. + /// [validateOnly] - Optional. If set, validate the request and preview the + /// review, but do not post it. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [ListProvidersResponse]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future list( - core.String parent, { - core.String? filter, - core.String? orderBy, - core.int? pageSize, - core.String? pageToken, - core.String? $fields, - }) async { - final queryParams_ = >{ - if (filter != null) 'filter': [filter], - if (orderBy != null) 'orderBy': [orderBy], - if (pageSize != null) 'pageSize': ['${pageSize}'], - if (pageToken != null) 'pageToken': [pageToken], - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/providers'; - - final response_ = await _requester.request( - url_, - 'GET', - queryParams: queryParams_, - ); - return ListProvidersResponse.fromJson( - response_ as core.Map); - } -} - -class ProjectsLocationsTriggersResource { - final commons.ApiRequester _requester; - - ProjectsLocationsTriggersResource(commons.ApiRequester client) - : _requester = client; - - /// Create a new trigger in a particular project and location. - /// - /// [request] - The metadata request object. - /// - /// Request parameters: - /// - /// [parent] - Required. The parent collection in which to add this trigger. - /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. - /// - /// [triggerId] - Required. The user-provided ID to be assigned to the - /// trigger. - /// - /// [validateOnly] - Optional. If set, validate the request and preview the - /// review, but do not post it. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleLongrunningOperation]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future create( - Trigger request, - core.String parent, { - core.String? triggerId, - core.bool? validateOnly, - core.String? $fields, - }) async { - final body_ = convert.json.encode(request); - final queryParams_ = >{ - if (triggerId != null) 'triggerId': [triggerId], - if (validateOnly != null) 'validateOnly': ['${validateOnly}'], - if ($fields != null) 'fields': [$fields], - }; - - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/triggers'; - - final response_ = await _requester.request( - url_, - 'POST', - body: body_, - queryParams: queryParams_, - ); - return GoogleLongrunningOperation.fromJson( - response_ as core.Map); - } - - /// Delete a single trigger. - /// - /// Request parameters: - /// - /// [name] - Required. The name of the trigger to be deleted. - /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/triggers/\[^/\]+$`. - /// - /// [allowMissing] - If set to true, and the trigger is not found, the request - /// will succeed but no action will be taken on the server. - /// - /// [etag] - If provided, the trigger will only be deleted if the etag matches - /// the current etag on the resource. - /// - /// [validateOnly] - Optional. If set, validate the request and preview the - /// review, but do not post it. - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [GoogleLongrunningOperation]. + /// Completes with a [GoogleLongrunningOperation]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. @@ -2050,25 +1966,25 @@ class ProjectsLocationsTriggersResource { response_ as core.Map); } - /// Get a single trigger. + /// Get a single MessageBus. /// /// Request parameters: /// - /// [name] - Required. The name of the trigger to get. + /// [name] - Required. The name of the message bus to get. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/triggers/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/messageBuses/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [Trigger]. + /// Completes with a [MessageBus]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future get( + async.Future get( core.String name, { core.String? $fields, }) async { @@ -2083,7 +1999,8 @@ class ProjectsLocationsTriggersResource { 'GET', queryParams: queryParams_, ); - return Trigger.fromJson(response_ as core.Map); + return MessageBus.fromJson( + response_ as core.Map); } /// Gets the access control policy for a resource. @@ -2098,7 +2015,7 @@ class ProjectsLocationsTriggersResource { /// [Resource names](https://cloud.google.com/apis/design/resource_names) for /// the appropriate value for this field. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/triggers/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/messageBuses/\[^/\]+$`. /// /// [options_requestedPolicyVersion] - Optional. The maximum policy version /// that will be used to format the policy. Valid values are 0, 1, and 3. @@ -2143,42 +2060,40 @@ class ProjectsLocationsTriggersResource { return Policy.fromJson(response_ as core.Map); } - /// List triggers. + /// List message buses. /// /// Request parameters: /// - /// [parent] - Required. The parent collection to list triggers on. + /// [parent] - Required. The parent collection to list message buses on. /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. /// - /// [filter] - Filter field. Used to filter the Triggers to be listed. - /// Possible filters are described in https://google.aip.dev/160. For example, - /// using "?filter=destination:gke" would list only Triggers with a gke - /// destination. + /// [filter] - Optional. The filter field that the list request will filter + /// on. Possible filtersare described in https://google.aip.dev/160. /// - /// [orderBy] - The sorting order of the resources returned. Value should be a - /// comma-separated list of fields. The default sorting order is ascending. To - /// specify descending order for a field, append a `desc` suffix; for example: - /// `name desc, trigger_id`. + /// [orderBy] - Optional. The sorting order of the resources returned. Value + /// should be a comma-separated list of fields. The default sorting order is + /// ascending. To specify descending order for a field, append a `desc` + /// suffix; for example: `name desc, update_time`. /// - /// [pageSize] - The maximum number of triggers to return on each page. Note: - /// The service may send fewer. + /// [pageSize] - Optional. The maximum number of results to return on each + /// page. Note: The service may send fewer. /// - /// [pageToken] - The page token; provide the value from the `next_page_token` - /// field in a previous `ListTriggers` call to retrieve the subsequent page. - /// When paginating, all other parameters provided to `ListTriggers` must - /// match the call that provided the page token. + /// [pageToken] - Optional. The page token; provide the value from the + /// `next_page_token` field in a previous call to retrieve the subsequent + /// page. When paginating, all other parameters provided must match the + /// previous call that provided the page token. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// - /// Completes with a [ListTriggersResponse]. + /// Completes with a [ListMessageBusesResponse]. /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future list( + async.Future list( core.String parent, { core.String? filter, core.String? orderBy, @@ -2194,35 +2109,85 @@ class ProjectsLocationsTriggersResource { if ($fields != null) 'fields': [$fields], }; - final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/triggers'; + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/messageBuses'; final response_ = await _requester.request( url_, 'GET', queryParams: queryParams_, ); - return ListTriggersResponse.fromJson( + return ListMessageBusesResponse.fromJson( response_ as core.Map); } - /// Update a single trigger. + /// List message bus enrollments. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent message bus to list enrollments on. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/messageBuses/\[^/\]+$`. + /// + /// [pageSize] - Optional. The maximum number of results to return on each + /// page. Note: The service may send fewer. + /// + /// [pageToken] - Optional. The page token; provide the value from the + /// `next_page_token` field in a previous call to retrieve the subsequent + /// page. When paginating, all other parameters provided must match the + /// previous call that provided the page token. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListMessageBusEnrollmentsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future listEnrollments( + core.String parent, { + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + ':listEnrollments'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListMessageBusEnrollmentsResponse.fromJson( + response_ as core.Map); + } + + /// Update a single message bus. /// /// [request] - The metadata request object. /// /// Request parameters: /// - /// [name] - Required. The resource name of the trigger. Must be unique within - /// the location of the project and must be in - /// `projects/{project}/locations/{location}/triggers/{trigger}` format. + /// [name] - Identifier. Resource name of the form + /// projects/{project}/locations/{location}/messageBuses/{message_bus} /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/triggers/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/messageBuses/\[^/\]+$`. /// - /// [allowMissing] - If set to true, and the trigger is not found, a new - /// trigger will be created. In this situation, `update_mask` is ignored. + /// [allowMissing] - Optional. If set to true, and the MessageBus is not + /// found, a new MessageBus will be created. In this situation, `update_mask` + /// is ignored. /// - /// [updateMask] - The fields to be updated; only fields explicitly provided - /// are updated. If no field mask is provided, all provided fields in the - /// request are updated. To update all fields, provide a field mask of "*". + /// [updateMask] - Optional. The fields to be updated; only fields explicitly + /// provided are updated. If no field mask is provided, all provided fields in + /// the request are updated. To update all fields, provide a field mask of + /// "*". /// /// [validateOnly] - Optional. If set, validate the request and preview the /// review, but do not post it. @@ -2238,7 +2203,7 @@ class ProjectsLocationsTriggersResource { /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. async.Future patch( - Trigger request, + MessageBus request, core.String name, { core.bool? allowMissing, core.String? updateMask, @@ -2279,7 +2244,7 @@ class ProjectsLocationsTriggersResource { /// [Resource names](https://cloud.google.com/apis/design/resource_names) for /// the appropriate value for this field. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/triggers/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/messageBuses/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -2328,7 +2293,7 @@ class ProjectsLocationsTriggersResource { /// [Resource names](https://cloud.google.com/apis/design/resource_names) for /// the appropriate value for this field. /// Value must have pattern - /// `^projects/\[^/\]+/locations/\[^/\]+/triggers/\[^/\]+$`. + /// `^projects/\[^/\]+/locations/\[^/\]+/messageBuses/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -2364,179 +2329,2040 @@ class ProjectsLocationsTriggersResource { } } -/// Specifies the audit configuration for a service. -/// -/// The configuration determines which permission types are logged, and what -/// identities, if any, are exempted from logging. An AuditConfig must have one -/// or more AuditLogConfigs. If there are AuditConfigs for both `allServices` -/// and a specific service, the union of the two AuditConfigs is used for that -/// service: the log_types specified in each AuditConfig are enabled, and the -/// exempted_members in each AuditLogConfig are exempted. Example Policy with -/// multiple AuditConfigs: { "audit_configs": \[ { "service": "allServices", -/// "audit_log_configs": \[ { "log_type": "DATA_READ", "exempted_members": \[ -/// "user:jose@example.com" \] }, { "log_type": "DATA_WRITE" }, { "log_type": -/// "ADMIN_READ" } \] }, { "service": "sampleservice.googleapis.com", -/// "audit_log_configs": \[ { "log_type": "DATA_READ" }, { "log_type": -/// "DATA_WRITE", "exempted_members": \[ "user:aliya@example.com" \] } \] } \] } -/// For sampleservice, this policy enables DATA_READ, DATA_WRITE and ADMIN_READ -/// logging. It also exempts `jose@example.com` from DATA_READ logging, and -/// `aliya@example.com` from DATA_WRITE logging. -class AuditConfig { - /// The configuration for logging of each type of permission. - core.List? auditLogConfigs; - - /// Specifies a service that will be enabled for audit logging. - /// - /// For example, `storage.googleapis.com`, `cloudsql.googleapis.com`. - /// `allServices` is a special value that covers all services. - core.String? service; - - AuditConfig({ - this.auditLogConfigs, - this.service, - }); - - AuditConfig.fromJson(core.Map json_) - : this( - auditLogConfigs: (json_['auditLogConfigs'] as core.List?) - ?.map((value) => AuditLogConfig.fromJson( - value as core.Map)) - .toList(), - service: json_['service'] as core.String?, - ); - - core.Map toJson() => { - if (auditLogConfigs != null) 'auditLogConfigs': auditLogConfigs!, - if (service != null) 'service': service!, - }; -} +class ProjectsLocationsOperationsResource { + final commons.ApiRequester _requester; -/// Provides the configuration for logging a type of permissions. -/// -/// Example: { "audit_log_configs": \[ { "log_type": "DATA_READ", -/// "exempted_members": \[ "user:jose@example.com" \] }, { "log_type": -/// "DATA_WRITE" } \] } This enables 'DATA_READ' and 'DATA_WRITE' logging, while -/// exempting jose@example.com from DATA_READ logging. -typedef AuditLogConfig = $AuditLogConfig; + ProjectsLocationsOperationsResource(commons.ApiRequester client) + : _requester = client; -/// Associates `members`, or principals, with a `role`. -class Binding { - /// The condition that is associated with this binding. + /// Starts asynchronous cancellation on a long-running operation. /// - /// If the condition evaluates to `true`, then this binding applies to the - /// current request. If the condition evaluates to `false`, then this binding - /// does not apply to the current request. However, a different role binding - /// might grant the same role to one or more of the principals in this - /// binding. To learn which resources support conditions in their IAM - /// policies, see the - /// [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + /// The server makes a best effort to cancel the operation, but success is not + /// guaranteed. If the server doesn't support this method, it returns + /// `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation + /// or other methods to check whether the cancellation succeeded or whether + /// the operation completed despite cancellation. On successful cancellation, + /// the operation is not deleted; instead, it becomes an operation with an + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - The name of the operation resource to be cancelled. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/operations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Empty]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future cancel( + GoogleLongrunningCancelOperationRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':cancel'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Empty.fromJson(response_ as core.Map); + } + + /// Deletes a long-running operation. + /// + /// This method indicates that the client is no longer interested in the + /// operation result. It does not cancel the operation. If the server doesn't + /// support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. + /// + /// Request parameters: + /// + /// [name] - The name of the operation resource to be deleted. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/operations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Empty]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return Empty.fromJson(response_ as core.Map); + } + + /// Gets the latest state of a long-running operation. + /// + /// Clients can use this method to poll the operation result at intervals as + /// recommended by the API service. + /// + /// Request parameters: + /// + /// [name] - The name of the operation resource. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/operations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + + /// Lists operations that match the specified filter in the request. + /// + /// If the server doesn't support this method, it returns `UNIMPLEMENTED`. + /// + /// Request parameters: + /// + /// [name] - The name of the operation's parent resource. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [filter] - The standard list filter. + /// + /// [pageSize] - The standard list page size. + /// + /// [pageToken] - The standard list page token. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningListOperationsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String name, { + core.String? filter, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + '/operations'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleLongrunningListOperationsResponse.fromJson( + response_ as core.Map); + } +} + +class ProjectsLocationsPipelinesResource { + final commons.ApiRequester _requester; + + ProjectsLocationsPipelinesResource(commons.ApiRequester client) + : _requester = client; + + /// Create a new Pipeline in a particular project and location. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent collection in which to add this pipeline. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [pipelineId] - Required. The user-provided ID to be assigned to the + /// Pipeline. It should match the format `^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$`. + /// + /// [validateOnly] - Optional. If set, validate the request and preview the + /// review, but do not post it. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future create( + Pipeline request, + core.String parent, { + core.String? pipelineId, + core.bool? validateOnly, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (pipelineId != null) 'pipelineId': [pipelineId], + if (validateOnly != null) 'validateOnly': ['${validateOnly}'], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/pipelines'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + + /// Delete a single pipeline. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the Pipeline to be deleted. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/pipelines/\[^/\]+$`. + /// + /// [allowMissing] - Optional. If set to true, and the Pipeline is not found, + /// the request will succeed but no action will be taken on the server. + /// + /// [etag] - Optional. If provided, the Pipeline will only be deleted if the + /// etag matches the current etag on the resource. + /// + /// [validateOnly] - Optional. If set, validate the request and preview the + /// review, but do not post it. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String name, { + core.bool? allowMissing, + core.String? etag, + core.bool? validateOnly, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (allowMissing != null) 'allowMissing': ['${allowMissing}'], + if (etag != null) 'etag': [etag], + if (validateOnly != null) 'validateOnly': ['${validateOnly}'], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + + /// Get a single Pipeline. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the pipeline to get. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/pipelines/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Pipeline]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return Pipeline.fromJson(response_ as core.Map); + } + + /// Gets the access control policy for a resource. + /// + /// Returns an empty policy if the resource exists and does not have a policy + /// set. + /// + /// Request parameters: + /// + /// [resource] - REQUIRED: The resource for which the policy is being + /// requested. See + /// [Resource names](https://cloud.google.com/apis/design/resource_names) for + /// the appropriate value for this field. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/pipelines/\[^/\]+$`. + /// + /// [options_requestedPolicyVersion] - Optional. The maximum policy version + /// that will be used to format the policy. Valid values are 0, 1, and 3. + /// Requests specifying an invalid value will be rejected. Requests for + /// policies with any conditional role bindings must specify version 3. + /// Policies with no conditional role bindings may specify any valid value or + /// leave the field unset. The policy in the response might use the policy + /// version that you specified, or it might use a lower policy version. For + /// example, if you specify version 3, but the policy has no conditional role + /// bindings, the response uses version 1. To learn which resources support + /// conditions in their IAM policies, see the + /// [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Policy]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future getIamPolicy( + core.String resource, { + core.int? options_requestedPolicyVersion, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (options_requestedPolicyVersion != null) + 'options.requestedPolicyVersion': ['${options_requestedPolicyVersion}'], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$resource') + ':getIamPolicy'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return Policy.fromJson(response_ as core.Map); + } + + /// List pipelines. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent collection to list pipelines on. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [filter] - Optional. The filter field that the list request will filter + /// on. Possible filters are described in https://google.aip.dev/160. + /// + /// [orderBy] - Optional. The sorting order of the resources returned. Value + /// should be a comma-separated list of fields. The default sorting order is + /// ascending. To specify descending order for a field, append a `desc` + /// suffix; for example: `name desc, update_time`. + /// + /// [pageSize] - Optional. The maximum number of results to return on each + /// page. Note: The service may send fewer. + /// + /// [pageToken] - Optional. The page token; provide the value from the + /// `next_page_token` field in a previous call to retrieve the subsequent + /// page. When paginating, all other parameters provided must match the + /// previous call that provided the page token. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListPipelinesResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.String? filter, + core.String? orderBy, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (orderBy != null) 'orderBy': [orderBy], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/pipelines'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListPipelinesResponse.fromJson( + response_ as core.Map); + } + + /// Update a single pipeline. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Identifier. The resource name of the Pipeline. Must be unique + /// within the location of the project and must be in + /// `projects/{project}/locations/{location}/pipelines/{pipeline}` format. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/pipelines/\[^/\]+$`. + /// + /// [allowMissing] - Optional. If set to true, and the Pipeline is not found, + /// a new Pipeline will be created. In this situation, `update_mask` is + /// ignored. + /// + /// [updateMask] - Optional. The fields to be updated; only fields explicitly + /// provided are updated. If no field mask is provided, all provided fields in + /// the request are updated. To update all fields, provide a field mask of + /// "*". + /// + /// [validateOnly] - Optional. If set, validate the request and preview the + /// review, but do not post it. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future patch( + Pipeline request, + core.String name, { + core.bool? allowMissing, + core.String? updateMask, + core.bool? validateOnly, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (allowMissing != null) 'allowMissing': ['${allowMissing}'], + if (updateMask != null) 'updateMask': [updateMask], + if (validateOnly != null) 'validateOnly': ['${validateOnly}'], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'PATCH', + body: body_, + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + + /// Sets the access control policy on the specified resource. + /// + /// Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, + /// and `PERMISSION_DENIED` errors. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [resource] - REQUIRED: The resource for which the policy is being + /// specified. See + /// [Resource names](https://cloud.google.com/apis/design/resource_names) for + /// the appropriate value for this field. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/pipelines/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Policy]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future setIamPolicy( + SetIamPolicyRequest request, + core.String resource, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$resource') + ':setIamPolicy'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Policy.fromJson(response_ as core.Map); + } + + /// Returns permissions that a caller has on the specified resource. + /// + /// If the resource does not exist, this will return an empty set of + /// permissions, not a `NOT_FOUND` error. Note: This operation is designed to + /// be used for building permission-aware UIs and command-line tools, not for + /// authorization checking. This operation may "fail open" without warning. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [resource] - REQUIRED: The resource for which the policy detail is being + /// requested. See + /// [Resource names](https://cloud.google.com/apis/design/resource_names) for + /// the appropriate value for this field. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/pipelines/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [TestIamPermissionsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future testIamPermissions( + TestIamPermissionsRequest request, + core.String resource, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = + 'v1/' + core.Uri.encodeFull('$resource') + ':testIamPermissions'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return TestIamPermissionsResponse.fromJson( + response_ as core.Map); + } +} + +class ProjectsLocationsProvidersResource { + final commons.ApiRequester _requester; + + ProjectsLocationsProvidersResource(commons.ApiRequester client) + : _requester = client; + + /// Get a single Provider. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the provider to get. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/providers/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Provider]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return Provider.fromJson(response_ as core.Map); + } + + /// List providers. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent of the provider to get. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [filter] - The filter field that the list request will filter on. + /// + /// [orderBy] - The sorting order of the resources returned. Value should be a + /// comma-separated list of fields. The default sorting oder is ascending. To + /// specify descending order for a field, append a `desc` suffix; for example: + /// `name desc, _id`. + /// + /// [pageSize] - The maximum number of providers to return on each page. + /// + /// [pageToken] - The page token; provide the value from the `next_page_token` + /// field in a previous `ListProviders` call to retrieve the subsequent page. + /// When paginating, all other parameters provided to `ListProviders` must + /// match the call that provided the page token. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListProvidersResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.String? filter, + core.String? orderBy, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (orderBy != null) 'orderBy': [orderBy], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/providers'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListProvidersResponse.fromJson( + response_ as core.Map); + } +} + +class ProjectsLocationsTriggersResource { + final commons.ApiRequester _requester; + + ProjectsLocationsTriggersResource(commons.ApiRequester client) + : _requester = client; + + /// Create a new trigger in a particular project and location. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent collection in which to add this trigger. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [triggerId] - Required. The user-provided ID to be assigned to the + /// trigger. + /// + /// [validateOnly] - Optional. If set, validate the request and preview the + /// review, but do not post it. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future create( + Trigger request, + core.String parent, { + core.String? triggerId, + core.bool? validateOnly, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (triggerId != null) 'triggerId': [triggerId], + if (validateOnly != null) 'validateOnly': ['${validateOnly}'], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/triggers'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + + /// Delete a single trigger. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the trigger to be deleted. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/triggers/\[^/\]+$`. + /// + /// [allowMissing] - If set to true, and the trigger is not found, the request + /// will succeed but no action will be taken on the server. + /// + /// [etag] - If provided, the trigger will only be deleted if the etag matches + /// the current etag on the resource. + /// + /// [validateOnly] - Optional. If set, validate the request and preview the + /// review, but do not post it. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String name, { + core.bool? allowMissing, + core.String? etag, + core.bool? validateOnly, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (allowMissing != null) 'allowMissing': ['${allowMissing}'], + if (etag != null) 'etag': [etag], + if (validateOnly != null) 'validateOnly': ['${validateOnly}'], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + + /// Get a single trigger. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the trigger to get. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/triggers/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Trigger]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return Trigger.fromJson(response_ as core.Map); + } + + /// Gets the access control policy for a resource. + /// + /// Returns an empty policy if the resource exists and does not have a policy + /// set. + /// + /// Request parameters: + /// + /// [resource] - REQUIRED: The resource for which the policy is being + /// requested. See + /// [Resource names](https://cloud.google.com/apis/design/resource_names) for + /// the appropriate value for this field. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/triggers/\[^/\]+$`. + /// + /// [options_requestedPolicyVersion] - Optional. The maximum policy version + /// that will be used to format the policy. Valid values are 0, 1, and 3. + /// Requests specifying an invalid value will be rejected. Requests for + /// policies with any conditional role bindings must specify version 3. + /// Policies with no conditional role bindings may specify any valid value or + /// leave the field unset. The policy in the response might use the policy + /// version that you specified, or it might use a lower policy version. For + /// example, if you specify version 3, but the policy has no conditional role + /// bindings, the response uses version 1. To learn which resources support + /// conditions in their IAM policies, see the + /// [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Policy]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future getIamPolicy( + core.String resource, { + core.int? options_requestedPolicyVersion, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (options_requestedPolicyVersion != null) + 'options.requestedPolicyVersion': ['${options_requestedPolicyVersion}'], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$resource') + ':getIamPolicy'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return Policy.fromJson(response_ as core.Map); + } + + /// List triggers. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent collection to list triggers on. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [filter] - Filter field. Used to filter the Triggers to be listed. + /// Possible filters are described in https://google.aip.dev/160. For example, + /// using "?filter=destination:gke" would list only Triggers with a gke + /// destination. + /// + /// [orderBy] - The sorting order of the resources returned. Value should be a + /// comma-separated list of fields. The default sorting order is ascending. To + /// specify descending order for a field, append a `desc` suffix; for example: + /// `name desc, trigger_id`. + /// + /// [pageSize] - The maximum number of triggers to return on each page. Note: + /// The service may send fewer. + /// + /// [pageToken] - The page token; provide the value from the `next_page_token` + /// field in a previous `ListTriggers` call to retrieve the subsequent page. + /// When paginating, all other parameters provided to `ListTriggers` must + /// match the call that provided the page token. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListTriggersResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.String? filter, + core.String? orderBy, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (orderBy != null) 'orderBy': [orderBy], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/triggers'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListTriggersResponse.fromJson( + response_ as core.Map); + } + + /// Update a single trigger. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. The resource name of the trigger. Must be unique within + /// the location of the project and must be in + /// `projects/{project}/locations/{location}/triggers/{trigger}` format. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/triggers/\[^/\]+$`. + /// + /// [allowMissing] - If set to true, and the trigger is not found, a new + /// trigger will be created. In this situation, `update_mask` is ignored. + /// + /// [updateMask] - The fields to be updated; only fields explicitly provided + /// are updated. If no field mask is provided, all provided fields in the + /// request are updated. To update all fields, provide a field mask of "*". + /// + /// [validateOnly] - Optional. If set, validate the request and preview the + /// review, but do not post it. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future patch( + Trigger request, + core.String name, { + core.bool? allowMissing, + core.String? updateMask, + core.bool? validateOnly, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (allowMissing != null) 'allowMissing': ['${allowMissing}'], + if (updateMask != null) 'updateMask': [updateMask], + if (validateOnly != null) 'validateOnly': ['${validateOnly}'], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'PATCH', + body: body_, + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + + /// Sets the access control policy on the specified resource. + /// + /// Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, + /// and `PERMISSION_DENIED` errors. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [resource] - REQUIRED: The resource for which the policy is being + /// specified. See + /// [Resource names](https://cloud.google.com/apis/design/resource_names) for + /// the appropriate value for this field. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/triggers/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Policy]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future setIamPolicy( + SetIamPolicyRequest request, + core.String resource, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$resource') + ':setIamPolicy'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Policy.fromJson(response_ as core.Map); + } + + /// Returns permissions that a caller has on the specified resource. + /// + /// If the resource does not exist, this will return an empty set of + /// permissions, not a `NOT_FOUND` error. Note: This operation is designed to + /// be used for building permission-aware UIs and command-line tools, not for + /// authorization checking. This operation may "fail open" without warning. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [resource] - REQUIRED: The resource for which the policy detail is being + /// requested. See + /// [Resource names](https://cloud.google.com/apis/design/resource_names) for + /// the appropriate value for this field. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/triggers/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [TestIamPermissionsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future testIamPermissions( + TestIamPermissionsRequest request, + core.String resource, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = + 'v1/' + core.Uri.encodeFull('$resource') + ':testIamPermissions'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return TestIamPermissionsResponse.fromJson( + response_ as core.Map); + } +} + +/// Specifies the audit configuration for a service. +/// +/// The configuration determines which permission types are logged, and what +/// identities, if any, are exempted from logging. An AuditConfig must have one +/// or more AuditLogConfigs. If there are AuditConfigs for both `allServices` +/// and a specific service, the union of the two AuditConfigs is used for that +/// service: the log_types specified in each AuditConfig are enabled, and the +/// exempted_members in each AuditLogConfig are exempted. Example Policy with +/// multiple AuditConfigs: { "audit_configs": \[ { "service": "allServices", +/// "audit_log_configs": \[ { "log_type": "DATA_READ", "exempted_members": \[ +/// "user:jose@example.com" \] }, { "log_type": "DATA_WRITE" }, { "log_type": +/// "ADMIN_READ" } \] }, { "service": "sampleservice.googleapis.com", +/// "audit_log_configs": \[ { "log_type": "DATA_READ" }, { "log_type": +/// "DATA_WRITE", "exempted_members": \[ "user:aliya@example.com" \] } \] } \] } +/// For sampleservice, this policy enables DATA_READ, DATA_WRITE and ADMIN_READ +/// logging. It also exempts `jose@example.com` from DATA_READ logging, and +/// `aliya@example.com` from DATA_WRITE logging. +class AuditConfig { + /// The configuration for logging of each type of permission. + core.List? auditLogConfigs; + + /// Specifies a service that will be enabled for audit logging. + /// + /// For example, `storage.googleapis.com`, `cloudsql.googleapis.com`. + /// `allServices` is a special value that covers all services. + core.String? service; + + AuditConfig({ + this.auditLogConfigs, + this.service, + }); + + AuditConfig.fromJson(core.Map json_) + : this( + auditLogConfigs: (json_['auditLogConfigs'] as core.List?) + ?.map((value) => AuditLogConfig.fromJson( + value as core.Map)) + .toList(), + service: json_['service'] as core.String?, + ); + + core.Map toJson() => { + if (auditLogConfigs != null) 'auditLogConfigs': auditLogConfigs!, + if (service != null) 'service': service!, + }; +} + +/// Provides the configuration for logging a type of permissions. +/// +/// Example: { "audit_log_configs": \[ { "log_type": "DATA_READ", +/// "exempted_members": \[ "user:jose@example.com" \] }, { "log_type": +/// "DATA_WRITE" } \] } This enables 'DATA_READ' and 'DATA_WRITE' logging, while +/// exempting jose@example.com from DATA_READ logging. +typedef AuditLogConfig = $AuditLogConfig; + +/// Associates `members`, or principals, with a `role`. +class Binding { + /// The condition that is associated with this binding. + /// + /// If the condition evaluates to `true`, then this binding applies to the + /// current request. If the condition evaluates to `false`, then this binding + /// does not apply to the current request. However, a different role binding + /// might grant the same role to one or more of the principals in this + /// binding. To learn which resources support conditions in their IAM + /// policies, see the + /// [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies). Expr? condition; - /// Specifies the principals requesting access for a Google Cloud resource. + /// Specifies the principals requesting access for a Google Cloud resource. + /// + /// `members` can have the following values: * `allUsers`: A special + /// identifier that represents anyone who is on the internet; with or without + /// a Google account. * `allAuthenticatedUsers`: A special identifier that + /// represents anyone who is authenticated with a Google account or a service + /// account. Does not include identities that come from external identity + /// providers (IdPs) through identity federation. * `user:{emailid}`: An email + /// address that represents a specific Google account. For example, + /// `alice@example.com` . * `serviceAccount:{emailid}`: An email address that + /// represents a Google service account. For example, + /// `my-other-app@appspot.gserviceaccount.com`. * + /// `serviceAccount:{projectid}.svc.id.goog[{namespace}/{kubernetes-sa}]`: An + /// identifier for a + /// [Kubernetes service account](https://cloud.google.com/kubernetes-engine/docs/how-to/kubernetes-service-accounts). + /// For example, `my-project.svc.id.goog[my-namespace/my-kubernetes-sa]`. * + /// `group:{emailid}`: An email address that represents a Google group. For + /// example, `admins@example.com`. * `domain:{domain}`: The G Suite domain + /// (primary) that represents all the users of that domain. For example, + /// `google.com` or `example.com`. * + /// `principal://iam.googleapis.com/locations/global/workforcePools/{pool_id}/subject/{subject_attribute_value}`: + /// A single identity in a workforce identity pool. * + /// `principalSet://iam.googleapis.com/locations/global/workforcePools/{pool_id}/group/{group_id}`: + /// All workforce identities in a group. * + /// `principalSet://iam.googleapis.com/locations/global/workforcePools/{pool_id}/attribute.{attribute_name}/{attribute_value}`: + /// All workforce identities with a specific attribute value. * + /// `principalSet://iam.googleapis.com/locations/global/workforcePools/{pool_id} + /// / * `: All identities in a workforce identity pool. * + /// `principal://iam.googleapis.com/projects/{project_number}/locations/global/workloadIdentityPools/{pool_id}/subject/{subject_attribute_value}`: + /// A single identity in a workload identity pool. * + /// `principalSet://iam.googleapis.com/projects/{project_number}/locations/global/workloadIdentityPools/{pool_id}/group/{group_id}`: + /// A workload identity pool group. * + /// `principalSet://iam.googleapis.com/projects/{project_number}/locations/global/workloadIdentityPools/{pool_id}/attribute.{attribute_name}/{attribute_value}`: + /// All identities in a workload identity pool with a certain attribute. * + /// `principalSet://iam.googleapis.com/projects/{project_number}/locations/global/workloadIdentityPools/{pool_id} + /// / * `: All identities in a workload identity pool. * + /// `deleted:user:{emailid}?uid={uniqueid}`: An email address (plus unique + /// identifier) representing a user that has been recently deleted. For + /// example, `alice@example.com?uid=123456789012345678901`. If the user is + /// recovered, this value reverts to `user:{emailid}` and the recovered user + /// retains the role in the binding. * + /// `deleted:serviceAccount:{emailid}?uid={uniqueid}`: An email address (plus + /// unique identifier) representing a service account that has been recently + /// deleted. For example, + /// `my-other-app@appspot.gserviceaccount.com?uid=123456789012345678901`. If + /// the service account is undeleted, this value reverts to + /// `serviceAccount:{emailid}` and the undeleted service account retains the + /// role in the binding. * `deleted:group:{emailid}?uid={uniqueid}`: An email + /// address (plus unique identifier) representing a Google group that has been + /// recently deleted. For example, + /// `admins@example.com?uid=123456789012345678901`. If the group is recovered, + /// this value reverts to `group:{emailid}` and the recovered group retains + /// the role in the binding. * + /// `deleted:principal://iam.googleapis.com/locations/global/workforcePools/{pool_id}/subject/{subject_attribute_value}`: + /// Deleted single identity in a workforce identity pool. For example, + /// `deleted:principal://iam.googleapis.com/locations/global/workforcePools/my-pool-id/subject/my-subject-attribute-value`. + core.List? members; + + /// Role that is assigned to the list of `members`, or principals. + /// + /// For example, `roles/viewer`, `roles/editor`, or `roles/owner`. For an + /// overview of the IAM roles and permissions, see the + /// [IAM documentation](https://cloud.google.com/iam/docs/roles-overview). For + /// a list of the available pre-defined roles, see + /// [here](https://cloud.google.com/iam/docs/understanding-roles). + core.String? role; + + Binding({ + this.condition, + this.members, + this.role, + }); + + Binding.fromJson(core.Map json_) + : this( + condition: json_.containsKey('condition') + ? Expr.fromJson( + json_['condition'] as core.Map) + : null, + members: (json_['members'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + role: json_['role'] as core.String?, + ); + + core.Map toJson() => { + if (condition != null) 'condition': condition!, + if (members != null) 'members': members!, + if (role != null) 'role': role!, + }; +} + +/// A representation of the Channel resource. +/// +/// A Channel is a resource on which event providers publish their events. The +/// published events are delivered through the transport associated with the +/// channel. Note that a channel is associated with exactly one event provider. +class Channel { + /// The activation token for the channel. + /// + /// The token must be used by the provider to register the channel for + /// publishing. + /// + /// Output only. + core.String? activationToken; + + /// The creation time. + /// + /// Output only. + core.String? createTime; + + /// Resource name of a KMS crypto key (managed by the user) used to + /// encrypt/decrypt their event data. + /// + /// It must match the pattern `projects / * /locations / * /keyRings / * + /// /cryptoKeys / * `. + core.String? cryptoKeyName; + + /// Resource labels. + /// + /// Optional. + core.Map? labels; + + /// The resource name of the channel. + /// + /// Must be unique within the location on the project and must be in + /// `projects/{project}/locations/{location}/channels/{channel_id}` format. + /// + /// Required. + core.String? name; + + /// The name of the event provider (e.g. Eventarc SaaS partner) associated + /// with the channel. + /// + /// This provider will be granted permissions to publish events to the + /// channel. Format: + /// `projects/{project}/locations/{location}/providers/{provider_id}`. + core.String? provider; + + /// The name of the Pub/Sub topic created and managed by Eventarc system as a + /// transport for the event delivery. + /// + /// Format: `projects/{project}/topics/{topic_id}`. + /// + /// Output only. + core.String? pubsubTopic; + + /// Whether or not this Channel satisfies the requirements of physical zone + /// separation + /// + /// Output only. + core.bool? satisfiesPzs; + + /// The state of a Channel. + /// + /// Output only. + /// Possible string values are: + /// - "STATE_UNSPECIFIED" : Default value. This value is unused. + /// - "PENDING" : The PENDING state indicates that a Channel has been created + /// successfully and there is a new activation token available for the + /// subscriber to use to convey the Channel to the provider in order to create + /// a Connection. + /// - "ACTIVE" : The ACTIVE state indicates that a Channel has been + /// successfully connected with the event provider. An ACTIVE Channel is ready + /// to receive and route events from the event provider. + /// - "INACTIVE" : The INACTIVE state indicates that the Channel cannot + /// receive events permanently. There are two possible cases this state can + /// happen: 1. The SaaS provider disconnected from this Channel. 2. The + /// Channel activation token has expired but the SaaS provider wasn't + /// connected. To re-establish a Connection with a provider, the subscriber + /// should create a new Channel and give it to the provider. + core.String? state; + + /// Server assigned unique identifier for the channel. + /// + /// The value is a UUID4 string and guaranteed to remain unchanged until the + /// resource is deleted. + /// + /// Output only. + core.String? uid; + + /// The last-modified time. + /// + /// Output only. + core.String? updateTime; + + Channel({ + this.activationToken, + this.createTime, + this.cryptoKeyName, + this.labels, + this.name, + this.provider, + this.pubsubTopic, + this.satisfiesPzs, + this.state, + this.uid, + this.updateTime, + }); + + Channel.fromJson(core.Map json_) + : this( + activationToken: json_['activationToken'] as core.String?, + createTime: json_['createTime'] as core.String?, + cryptoKeyName: json_['cryptoKeyName'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + name: json_['name'] as core.String?, + provider: json_['provider'] as core.String?, + pubsubTopic: json_['pubsubTopic'] as core.String?, + satisfiesPzs: json_['satisfiesPzs'] as core.bool?, + state: json_['state'] as core.String?, + uid: json_['uid'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + ); + + core.Map toJson() => { + if (activationToken != null) 'activationToken': activationToken!, + if (createTime != null) 'createTime': createTime!, + if (cryptoKeyName != null) 'cryptoKeyName': cryptoKeyName!, + if (labels != null) 'labels': labels!, + if (name != null) 'name': name!, + if (provider != null) 'provider': provider!, + if (pubsubTopic != null) 'pubsubTopic': pubsubTopic!, + if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, + if (state != null) 'state': state!, + if (uid != null) 'uid': uid!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} + +/// A representation of the ChannelConnection resource. +/// +/// A ChannelConnection is a resource which event providers create during the +/// activation process to establish a connection between the provider and the +/// subscriber channel. +class ChannelConnection { + /// Input only. + /// + /// Activation token for the channel. The token will be used during the + /// creation of ChannelConnection to bind the channel with the provider + /// project. This field will not be stored in the provider resource. + core.String? activationToken; + + /// The name of the connected subscriber Channel. + /// + /// This is a weak reference to avoid cross project and cross accounts + /// references. This must be in + /// `projects/{project}/location/{location}/channels/{channel_id}` format. + /// + /// Required. + core.String? channel; + + /// The creation time. + /// + /// Output only. + core.String? createTime; + + /// Resource labels. + /// + /// Optional. + core.Map? labels; + + /// The name of the connection. + /// + /// Required. + core.String? name; + + /// Server assigned ID of the resource. + /// + /// The server guarantees uniqueness and immutability until deleted. + /// + /// Output only. + core.String? uid; + + /// The last-modified time. + /// + /// Output only. + core.String? updateTime; + + ChannelConnection({ + this.activationToken, + this.channel, + this.createTime, + this.labels, + this.name, + this.uid, + this.updateTime, + }); + + ChannelConnection.fromJson(core.Map json_) + : this( + activationToken: json_['activationToken'] as core.String?, + channel: json_['channel'] as core.String?, + createTime: json_['createTime'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + name: json_['name'] as core.String?, + uid: json_['uid'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + ); + + core.Map toJson() => { + if (activationToken != null) 'activationToken': activationToken!, + if (channel != null) 'channel': channel!, + if (createTime != null) 'createTime': createTime!, + if (labels != null) 'labels': labels!, + if (name != null) 'name': name!, + if (uid != null) 'uid': uid!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} + +/// Represents a Cloud Run destination. +class CloudRun { + /// The relative path on the Cloud Run service the events should be sent to. + /// + /// The value must conform to the definition of a URI path segment (section + /// 3.3 of RFC2396). Examples: "/route", "route", "route/subroute". + /// + /// Optional. + core.String? path; + + /// The region the Cloud Run service is deployed in. + /// + /// Required. + core.String? region; + + /// The name of the Cloud Run service being addressed. + /// + /// See + /// https://cloud.google.com/run/docs/reference/rest/v1/namespaces.services. + /// Only services located in the same project as the trigger object can be + /// addressed. + /// + /// Required. + core.String? service; + + CloudRun({ + this.path, + this.region, + this.service, + }); + + CloudRun.fromJson(core.Map json_) + : this( + path: json_['path'] as core.String?, + region: json_['region'] as core.String?, + service: json_['service'] as core.String?, + ); + + core.Map toJson() => { + if (path != null) 'path': path!, + if (region != null) 'region': region!, + if (service != null) 'service': service!, + }; +} + +/// Represents a target of an invocation over HTTP. +class Destination { + /// The Cloud Function resource name. + /// + /// Cloud Functions V1 and V2 are supported. Format: + /// `projects/{project}/locations/{location}/functions/{function}` This is a + /// read-only field. Creating Cloud Functions V1/V2 triggers is only supported + /// via the Cloud Functions product. An error will be returned if the user + /// sets this value. + core.String? cloudFunction; + + /// Cloud Run fully-managed resource that receives the events. + /// + /// The resource should be in the same project as the trigger. + CloudRun? cloudRun; + + /// A GKE service capable of receiving events. + /// + /// The service should be running in the same project as the trigger. + GKE? gke; + + /// An HTTP endpoint destination described by an URI. + HttpEndpoint? httpEndpoint; + + /// Network config is used to configure how Eventarc resolves and connect to a + /// destination. + /// + /// This should only be used with HttpEndpoint destination type. + /// + /// Optional. + NetworkConfig? networkConfig; + + /// The resource name of the Workflow whose Executions are triggered by the + /// events. + /// + /// The Workflow resource should be deployed in the same project as the + /// trigger. Format: + /// `projects/{project}/locations/{location}/workflows/{workflow}` + core.String? workflow; + + Destination({ + this.cloudFunction, + this.cloudRun, + this.gke, + this.httpEndpoint, + this.networkConfig, + this.workflow, + }); + + Destination.fromJson(core.Map json_) + : this( + cloudFunction: json_['cloudFunction'] as core.String?, + cloudRun: json_.containsKey('cloudRun') + ? CloudRun.fromJson( + json_['cloudRun'] as core.Map) + : null, + gke: json_.containsKey('gke') + ? GKE + .fromJson(json_['gke'] as core.Map) + : null, + httpEndpoint: json_.containsKey('httpEndpoint') + ? HttpEndpoint.fromJson( + json_['httpEndpoint'] as core.Map) + : null, + networkConfig: json_.containsKey('networkConfig') + ? NetworkConfig.fromJson( + json_['networkConfig'] as core.Map) + : null, + workflow: json_['workflow'] as core.String?, + ); + + core.Map toJson() => { + if (cloudFunction != null) 'cloudFunction': cloudFunction!, + if (cloudRun != null) 'cloudRun': cloudRun!, + if (gke != null) 'gke': gke!, + if (httpEndpoint != null) 'httpEndpoint': httpEndpoint!, + if (networkConfig != null) 'networkConfig': networkConfig!, + if (workflow != null) 'workflow': workflow!, + }; +} + +/// A generic empty message that you can re-use to avoid defining duplicated +/// empty messages in your APIs. +/// +/// A typical example is to use it as the request or the response type of an API +/// method. For instance: service Foo { rpc Bar(google.protobuf.Empty) returns +/// (google.protobuf.Empty); } +typedef Empty = $Empty; + +/// An enrollment represents a subscription for messages on a particular message +/// bus. +/// +/// It defines a matching criteria for messages on the bus and the subscriber +/// endpoint where matched messages should be delivered. +class Enrollment { + /// Resource annotations. + /// + /// Optional. + core.Map? annotations; + + /// A CEL expression identifying which messages this enrollment applies to. + /// + /// Required. + core.String? celMatch; + + /// The creation time. + /// + /// Output only. + core.String? createTime; + + /// Destination is the Pipeline that the Enrollment is delivering to. + /// + /// It must point to the full resource name of a Pipeline. Format: + /// "projects/{PROJECT_ID}/locations/{region}/pipelines/{PIPELINE_ID)" + /// + /// Required. + core.String? destination; + + /// Resource display name. + /// + /// Optional. + core.String? displayName; + + /// This checksum is computed by the server based on the value of other + /// fields, and might be sent only on update and delete requests to ensure + /// that the client has an up-to-date value before proceeding. + /// + /// Output only. + core.String? etag; + + /// Resource labels. + /// + /// Optional. + core.Map? labels; + + /// Resource name of the message bus identifying the source of the messages. + /// + /// It matches the form + /// projects/{project}/locations/{location}/messageBuses/{messageBus}. + /// + /// Required. + core.String? messageBus; + + /// Identifier. + /// + /// Resource name of the form + /// projects/{project}/locations/{location}/enrollments/{enrollment} + core.String? name; + + /// Server assigned unique identifier for the channel. + /// + /// The value is a UUID4 string and guaranteed to remain unchanged until the + /// resource is deleted. + /// + /// Output only. + core.String? uid; + + /// The last-modified time. + /// + /// Output only. + core.String? updateTime; + + Enrollment({ + this.annotations, + this.celMatch, + this.createTime, + this.destination, + this.displayName, + this.etag, + this.labels, + this.messageBus, + this.name, + this.uid, + this.updateTime, + }); + + Enrollment.fromJson(core.Map json_) + : this( + annotations: + (json_['annotations'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + celMatch: json_['celMatch'] as core.String?, + createTime: json_['createTime'] as core.String?, + destination: json_['destination'] as core.String?, + displayName: json_['displayName'] as core.String?, + etag: json_['etag'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + messageBus: json_['messageBus'] as core.String?, + name: json_['name'] as core.String?, + uid: json_['uid'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + ); + + core.Map toJson() => { + if (annotations != null) 'annotations': annotations!, + if (celMatch != null) 'celMatch': celMatch!, + if (createTime != null) 'createTime': createTime!, + if (destination != null) 'destination': destination!, + if (displayName != null) 'displayName': displayName!, + if (etag != null) 'etag': etag!, + if (labels != null) 'labels': labels!, + if (messageBus != null) 'messageBus': messageBus!, + if (name != null) 'name': name!, + if (uid != null) 'uid': uid!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} + +/// Filters events based on exact matches on the CloudEvents attributes. +class EventFilter { + /// The name of a CloudEvents attribute. /// - /// `members` can have the following values: * `allUsers`: A special - /// identifier that represents anyone who is on the internet; with or without - /// a Google account. * `allAuthenticatedUsers`: A special identifier that - /// represents anyone who is authenticated with a Google account or a service - /// account. Does not include identities that come from external identity - /// providers (IdPs) through identity federation. * `user:{emailid}`: An email - /// address that represents a specific Google account. For example, - /// `alice@example.com` . * `serviceAccount:{emailid}`: An email address that - /// represents a Google service account. For example, - /// `my-other-app@appspot.gserviceaccount.com`. * - /// `serviceAccount:{projectid}.svc.id.goog[{namespace}/{kubernetes-sa}]`: An - /// identifier for a - /// [Kubernetes service account](https://cloud.google.com/kubernetes-engine/docs/how-to/kubernetes-service-accounts). - /// For example, `my-project.svc.id.goog[my-namespace/my-kubernetes-sa]`. * - /// `group:{emailid}`: An email address that represents a Google group. For - /// example, `admins@example.com`. * `domain:{domain}`: The G Suite domain - /// (primary) that represents all the users of that domain. For example, - /// `google.com` or `example.com`. * - /// `principal://iam.googleapis.com/locations/global/workforcePools/{pool_id}/subject/{subject_attribute_value}`: - /// A single identity in a workforce identity pool. * - /// `principalSet://iam.googleapis.com/locations/global/workforcePools/{pool_id}/group/{group_id}`: - /// All workforce identities in a group. * - /// `principalSet://iam.googleapis.com/locations/global/workforcePools/{pool_id}/attribute.{attribute_name}/{attribute_value}`: - /// All workforce identities with a specific attribute value. * - /// `principalSet://iam.googleapis.com/locations/global/workforcePools/{pool_id} - /// / * `: All identities in a workforce identity pool. * - /// `principal://iam.googleapis.com/projects/{project_number}/locations/global/workloadIdentityPools/{pool_id}/subject/{subject_attribute_value}`: - /// A single identity in a workload identity pool. * - /// `principalSet://iam.googleapis.com/projects/{project_number}/locations/global/workloadIdentityPools/{pool_id}/group/{group_id}`: - /// A workload identity pool group. * - /// `principalSet://iam.googleapis.com/projects/{project_number}/locations/global/workloadIdentityPools/{pool_id}/attribute.{attribute_name}/{attribute_value}`: - /// All identities in a workload identity pool with a certain attribute. * - /// `principalSet://iam.googleapis.com/projects/{project_number}/locations/global/workloadIdentityPools/{pool_id} - /// / * `: All identities in a workload identity pool. * - /// `deleted:user:{emailid}?uid={uniqueid}`: An email address (plus unique - /// identifier) representing a user that has been recently deleted. For - /// example, `alice@example.com?uid=123456789012345678901`. If the user is - /// recovered, this value reverts to `user:{emailid}` and the recovered user - /// retains the role in the binding. * - /// `deleted:serviceAccount:{emailid}?uid={uniqueid}`: An email address (plus - /// unique identifier) representing a service account that has been recently - /// deleted. For example, - /// `my-other-app@appspot.gserviceaccount.com?uid=123456789012345678901`. If - /// the service account is undeleted, this value reverts to - /// `serviceAccount:{emailid}` and the undeleted service account retains the - /// role in the binding. * `deleted:group:{emailid}?uid={uniqueid}`: An email - /// address (plus unique identifier) representing a Google group that has been - /// recently deleted. For example, - /// `admins@example.com?uid=123456789012345678901`. If the group is recovered, - /// this value reverts to `group:{emailid}` and the recovered group retains - /// the role in the binding. * - /// `deleted:principal://iam.googleapis.com/locations/global/workforcePools/{pool_id}/subject/{subject_attribute_value}`: - /// Deleted single identity in a workforce identity pool. For example, - /// `deleted:principal://iam.googleapis.com/locations/global/workforcePools/my-pool-id/subject/my-subject-attribute-value`. - core.List? members; + /// Currently, only a subset of attributes are supported for filtering. You + /// can \[retrieve a specific provider's supported event + /// types\](/eventarc/docs/list-providers#describe-provider). All triggers + /// MUST provide a filter for the 'type' attribute. + /// + /// Required. + core.String? attribute; + + /// The operator used for matching the events with the value of the filter. + /// + /// If not specified, only events that have an exact key-value pair specified + /// in the filter are matched. The allowed values are `path_pattern` and + /// `match-path-pattern`. `path_pattern` is only allowed for GCFv1 triggers. + /// + /// Optional. + core.String? operator; + + /// The value for the attribute. + /// + /// Required. + core.String? value; + + EventFilter({ + this.attribute, + this.operator, + this.value, + }); + + EventFilter.fromJson(core.Map json_) + : this( + attribute: json_['attribute'] as core.String?, + operator: json_['operator'] as core.String?, + value: json_['value'] as core.String?, + ); + + core.Map toJson() => { + if (attribute != null) 'attribute': attribute!, + if (operator != null) 'operator': operator!, + if (value != null) 'value': value!, + }; +} + +/// A representation of the event type resource. +class EventType { + /// Human friendly description of what the event type is about. + /// + /// For example "Bucket created in Cloud Storage". + /// + /// Output only. + core.String? description; + + /// URI for the event schema. + /// + /// For example + /// "https://github.com/googleapis/google-cloudevents/blob/master/proto/google/events/cloud/storage/v1/events.proto" + /// + /// Output only. + core.String? eventSchemaUri; + + /// Filtering attributes for the event type. + /// + /// Output only. + core.List? filteringAttributes; + + /// The full name of the event type (for example, + /// "google.cloud.storage.object.v1.finalized"). + /// + /// In the form of {provider-specific-prefix}.{resource}.{version}.{verb}. + /// Types MUST be versioned and event schemas are guaranteed to remain + /// backward compatible within one version. Note that event type versions and + /// API versions do not need to match. + /// + /// Output only. + core.String? type; + + EventType({ + this.description, + this.eventSchemaUri, + this.filteringAttributes, + this.type, + }); + + EventType.fromJson(core.Map json_) + : this( + description: json_['description'] as core.String?, + eventSchemaUri: json_['eventSchemaUri'] as core.String?, + filteringAttributes: (json_['filteringAttributes'] as core.List?) + ?.map((value) => FilteringAttribute.fromJson( + value as core.Map)) + .toList(), + type: json_['type'] as core.String?, + ); + + core.Map toJson() => { + if (description != null) 'description': description!, + if (eventSchemaUri != null) 'eventSchemaUri': eventSchemaUri!, + if (filteringAttributes != null) + 'filteringAttributes': filteringAttributes!, + if (type != null) 'type': type!, + }; +} + +/// Represents a textual expression in the Common Expression Language (CEL) +/// syntax. +/// +/// CEL is a C-like expression language. The syntax and semantics of CEL are +/// documented at https://github.com/google/cel-spec. Example (Comparison): +/// title: "Summary size limit" description: "Determines if a summary is less +/// than 100 chars" expression: "document.summary.size() \< 100" Example +/// (Equality): title: "Requestor is owner" description: "Determines if +/// requestor is the document owner" expression: "document.owner == +/// request.auth.claims.email" Example (Logic): title: "Public documents" +/// description: "Determine whether the document should be publicly visible" +/// expression: "document.type != 'private' && document.type != 'internal'" +/// Example (Data Manipulation): title: "Notification string" description: +/// "Create a notification string with a timestamp." expression: "'New message +/// received at ' + string(document.create_time)" The exact variables and +/// functions that may be referenced within an expression are determined by the +/// service that evaluates it. See the service documentation for additional +/// information. +typedef Expr = $Expr; + +/// A representation of the FilteringAttribute resource. +/// +/// Filtering attributes are per event type. +class FilteringAttribute { + /// Attribute used for filtering the event type. + /// + /// Output only. + core.String? attribute; + + /// Description of the purpose of the attribute. + /// + /// Output only. + core.String? description; + + /// If true, the attribute accepts matching expressions in the Eventarc + /// PathPattern format. + /// + /// Output only. + core.bool? pathPatternSupported; + + /// If true, the triggers for this provider should always specify a filter on + /// these attributes. + /// + /// Trigger creation will fail otherwise. + /// + /// Output only. + core.bool? required; + + FilteringAttribute({ + this.attribute, + this.description, + this.pathPatternSupported, + this.required, + }); + + FilteringAttribute.fromJson(core.Map json_) + : this( + attribute: json_['attribute'] as core.String?, + description: json_['description'] as core.String?, + pathPatternSupported: json_['pathPatternSupported'] as core.bool?, + required: json_['required'] as core.bool?, + ); + + core.Map toJson() => { + if (attribute != null) 'attribute': attribute!, + if (description != null) 'description': description!, + if (pathPatternSupported != null) + 'pathPatternSupported': pathPatternSupported!, + if (required != null) 'required': required!, + }; +} + +/// Represents a GKE destination. +class GKE { + /// The name of the cluster the GKE service is running in. + /// + /// The cluster must be running in the same project as the trigger being + /// created. + /// + /// Required. + core.String? cluster; + + /// The name of the Google Compute Engine in which the cluster resides, which + /// can either be compute zone (for example, us-central1-a) for the zonal + /// clusters or region (for example, us-central1) for regional clusters. + /// + /// Required. + core.String? location; + + /// The namespace the GKE service is running in. + /// + /// Required. + core.String? namespace; - /// Role that is assigned to the list of `members`, or principals. + /// The relative path on the GKE service the events should be sent to. /// - /// For example, `roles/viewer`, `roles/editor`, or `roles/owner`. For an - /// overview of the IAM roles and permissions, see the - /// [IAM documentation](https://cloud.google.com/iam/docs/roles-overview). For - /// a list of the available pre-defined roles, see - /// [here](https://cloud.google.com/iam/docs/understanding-roles). - core.String? role; + /// The value must conform to the definition of a URI path segment (section + /// 3.3 of RFC2396). Examples: "/route", "route", "route/subroute". + /// + /// Optional. + core.String? path; - Binding({ - this.condition, - this.members, - this.role, + /// Name of the GKE service. + /// + /// Required. + core.String? service; + + GKE({ + this.cluster, + this.location, + this.namespace, + this.path, + this.service, }); - Binding.fromJson(core.Map json_) + GKE.fromJson(core.Map json_) : this( - condition: json_.containsKey('condition') - ? Expr.fromJson( - json_['condition'] as core.Map) - : null, - members: (json_['members'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - role: json_['role'] as core.String?, + cluster: json_['cluster'] as core.String?, + location: json_['location'] as core.String?, + namespace: json_['namespace'] as core.String?, + path: json_['path'] as core.String?, + service: json_['service'] as core.String?, ); core.Map toJson() => { - if (condition != null) 'condition': condition!, - if (members != null) 'members': members!, - if (role != null) 'role': role!, + if (cluster != null) 'cluster': cluster!, + if (location != null) 'location': location!, + if (namespace != null) 'namespace': namespace!, + if (path != null) 'path': path!, + if (service != null) 'service': service!, }; } -/// A representation of the Channel resource. -/// -/// A Channel is a resource on which event providers publish their events. The -/// published events are delivered through the transport associated with the -/// channel. Note that a channel is associated with exactly one event provider. -class Channel { - /// The activation token for the channel. - /// - /// The token must be used by the provider to register the channel for - /// publishing. +/// A GoogleApiSource represents a subscription of 1P events from a MessageBus. +class GoogleApiSource { + /// Resource annotations. /// - /// Output only. - core.String? activationToken; + /// Optional. + core.Map? annotations; /// The creation time. /// @@ -2548,57 +4374,45 @@ class Channel { /// /// It must match the pattern `projects / * /locations / * /keyRings / * /// /cryptoKeys / * `. + /// + /// Optional. core.String? cryptoKeyName; - /// The resource name of the channel. + /// Destination is the message bus that the GoogleApiSource is delivering to. /// - /// Must be unique within the location on the project and must be in - /// `projects/{project}/locations/{location}/channels/{channel_id}` format. + /// It must be point to the full resource name of a MessageBus. Format: + /// "projects/{PROJECT_ID}/locations/{region}/messagesBuses/{MESSAGE_BUS_ID) /// /// Required. - core.String? name; + core.String? destination; - /// The name of the event provider (e.g. Eventarc SaaS partner) associated - /// with the channel. + /// Resource display name. /// - /// This provider will be granted permissions to publish events to the - /// channel. Format: - /// `projects/{project}/locations/{location}/providers/{provider_id}`. - core.String? provider; + /// Optional. + core.String? displayName; - /// The name of the Pub/Sub topic created and managed by Eventarc system as a - /// transport for the event delivery. - /// - /// Format: `projects/{project}/topics/{topic_id}`. + /// This checksum is computed by the server based on the value of other + /// fields, and might be sent only on update and delete requests to ensure + /// that the client has an up-to-date value before proceeding. /// /// Output only. - core.String? pubsubTopic; + core.String? etag; - /// Whether or not this Channel satisfies the requirements of physical zone - /// separation + /// Resource labels. /// - /// Output only. - core.bool? satisfiesPzs; + /// Optional. + core.Map? labels; - /// The state of a Channel. + /// Config to control Platform logging for the GoogleApiSource. /// - /// Output only. - /// Possible string values are: - /// - "STATE_UNSPECIFIED" : Default value. This value is unused. - /// - "PENDING" : The PENDING state indicates that a Channel has been created - /// successfully and there is a new activation token available for the - /// subscriber to use to convey the Channel to the provider in order to create - /// a Connection. - /// - "ACTIVE" : The ACTIVE state indicates that a Channel has been - /// successfully connected with the event provider. An ACTIVE Channel is ready - /// to receive and route events from the event provider. - /// - "INACTIVE" : The INACTIVE state indicates that the Channel cannot - /// receive events permanently. There are two possible cases this state can - /// happen: 1. The SaaS provider disconnected from this Channel. 2. The - /// Channel activation token has expired but the SaaS provider wasn't - /// connected. To re-establish a Connection with a provider, the subscriber - /// should create a new Channel and give it to the provider. - core.String? state; + /// Optional. + LoggingConfig? loggingConfig; + + /// Identifier. + /// + /// Resource name of the form + /// projects/{project}/locations/{location}/googleApiSources/{google_api_source} + core.String? name; /// Server assigned unique identifier for the channel. /// @@ -2613,539 +4427,725 @@ class Channel { /// Output only. core.String? updateTime; - Channel({ - this.activationToken, + GoogleApiSource({ + this.annotations, this.createTime, this.cryptoKeyName, + this.destination, + this.displayName, + this.etag, + this.labels, + this.loggingConfig, this.name, - this.provider, - this.pubsubTopic, - this.satisfiesPzs, - this.state, this.uid, this.updateTime, }); - Channel.fromJson(core.Map json_) + GoogleApiSource.fromJson(core.Map json_) : this( - activationToken: json_['activationToken'] as core.String?, + annotations: + (json_['annotations'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), createTime: json_['createTime'] as core.String?, cryptoKeyName: json_['cryptoKeyName'] as core.String?, + destination: json_['destination'] as core.String?, + displayName: json_['displayName'] as core.String?, + etag: json_['etag'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + loggingConfig: json_.containsKey('loggingConfig') + ? LoggingConfig.fromJson( + json_['loggingConfig'] as core.Map) + : null, name: json_['name'] as core.String?, - provider: json_['provider'] as core.String?, - pubsubTopic: json_['pubsubTopic'] as core.String?, - satisfiesPzs: json_['satisfiesPzs'] as core.bool?, - state: json_['state'] as core.String?, uid: json_['uid'] as core.String?, updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (activationToken != null) 'activationToken': activationToken!, + if (annotations != null) 'annotations': annotations!, if (createTime != null) 'createTime': createTime!, if (cryptoKeyName != null) 'cryptoKeyName': cryptoKeyName!, + if (destination != null) 'destination': destination!, + if (displayName != null) 'displayName': displayName!, + if (etag != null) 'etag': etag!, + if (labels != null) 'labels': labels!, + if (loggingConfig != null) 'loggingConfig': loggingConfig!, if (name != null) 'name': name!, - if (provider != null) 'provider': provider!, - if (pubsubTopic != null) 'pubsubTopic': pubsubTopic!, - if (satisfiesPzs != null) 'satisfiesPzs': satisfiesPzs!, - if (state != null) 'state': state!, if (uid != null) 'uid': uid!, if (updateTime != null) 'updateTime': updateTime!, }; } -/// A representation of the ChannelConnection resource. +/// A GoogleChannelConfig is a resource that stores the custom settings +/// respected by Eventarc first-party triggers in the matching region. /// -/// A ChannelConnection is a resource which event providers create during the -/// activation process to establish a connection between the provider and the -/// subscriber channel. -class ChannelConnection { - /// Input only. - /// - /// Activation token for the channel. The token will be used during the - /// creation of ChannelConnection to bind the channel with the provider - /// project. This field will not be stored in the provider resource. - core.String? activationToken; - - /// The name of the connected subscriber Channel. +/// Once configured, first-party event data will be protected using the +/// specified custom managed encryption key instead of Google-managed encryption +/// keys. +class GoogleChannelConfig { + /// Resource name of a KMS crypto key (managed by the user) used to + /// encrypt/decrypt their event data. /// - /// This is a weak reference to avoid cross project and cross accounts - /// references. This must be in - /// `projects/{project}/location/{location}/channels/{channel_id}` format. + /// It must match the pattern `projects / * /locations / * /keyRings / * + /// /cryptoKeys / * `. /// - /// Required. - core.String? channel; + /// Optional. + core.String? cryptoKeyName; - /// The creation time. + /// The resource name of the config. /// - /// Output only. - core.String? createTime; - - /// The name of the connection. + /// Must be in the format of, + /// `projects/{project}/locations/{location}/googleChannelConfig`. /// /// Required. core.String? name; - /// Server assigned ID of the resource. - /// - /// The server guarantees uniqueness and immutability until deleted. - /// - /// Output only. - core.String? uid; - /// The last-modified time. /// - /// Output only. - core.String? updateTime; - - ChannelConnection({ - this.activationToken, - this.channel, - this.createTime, - this.name, - this.uid, - this.updateTime, - }); - - ChannelConnection.fromJson(core.Map json_) - : this( - activationToken: json_['activationToken'] as core.String?, - channel: json_['channel'] as core.String?, - createTime: json_['createTime'] as core.String?, - name: json_['name'] as core.String?, - uid: json_['uid'] as core.String?, - updateTime: json_['updateTime'] as core.String?, - ); - - core.Map toJson() => { - if (activationToken != null) 'activationToken': activationToken!, - if (channel != null) 'channel': channel!, - if (createTime != null) 'createTime': createTime!, - if (name != null) 'name': name!, - if (uid != null) 'uid': uid!, - if (updateTime != null) 'updateTime': updateTime!, - }; -} - -/// Represents a Cloud Run destination. -class CloudRun { - /// The relative path on the Cloud Run service the events should be sent to. - /// - /// The value must conform to the definition of a URI path segment (section - /// 3.3 of RFC2396). Examples: "/route", "route", "route/subroute". - /// - /// Optional. - core.String? path; - - /// The region the Cloud Run service is deployed in. - /// - /// Required. - core.String? region; - - /// The name of the Cloud Run service being addressed. - /// - /// See - /// https://cloud.google.com/run/docs/reference/rest/v1/namespaces.services. - /// Only services located in the same project as the trigger object can be - /// addressed. - /// - /// Required. - core.String? service; + /// Output only. + core.String? updateTime; - CloudRun({ - this.path, - this.region, - this.service, + GoogleChannelConfig({ + this.cryptoKeyName, + this.name, + this.updateTime, }); - CloudRun.fromJson(core.Map json_) + GoogleChannelConfig.fromJson(core.Map json_) : this( - path: json_['path'] as core.String?, - region: json_['region'] as core.String?, - service: json_['service'] as core.String?, + cryptoKeyName: json_['cryptoKeyName'] as core.String?, + name: json_['name'] as core.String?, + updateTime: json_['updateTime'] as core.String?, ); core.Map toJson() => { - if (path != null) 'path': path!, - if (region != null) 'region': region!, - if (service != null) 'service': service!, + if (cryptoKeyName != null) 'cryptoKeyName': cryptoKeyName!, + if (name != null) 'name': name!, + if (updateTime != null) 'updateTime': updateTime!, }; } /// Represents a target of an invocation over HTTP. -class Destination { - /// The Cloud Function resource name. +class GoogleCloudEventarcV1PipelineDestination { + /// An authentication config used to authenticate message requests, such that + /// destinations can verify the source. /// - /// Cloud Functions V1 and V2 are supported. Format: - /// `projects/{project}/locations/{location}/functions/{function}` This is a - /// read-only field. Creating Cloud Functions V1/V2 triggers is only supported - /// via the Cloud Functions product. An error will be returned if the user - /// sets this value. - core.String? cloudFunction; + /// For example, this can be used with private GCP destinations that require + /// GCP credentials to access like Cloud Run. This field is optional and + /// should be set only by users interested in authenticated push + /// + /// Optional. + GoogleCloudEventarcV1PipelineDestinationAuthenticationConfig? + authenticationConfig; - /// Cloud Run fully-managed resource that receives the events. + /// An HTTP endpoint destination described by an URI. /// - /// The resource should be in the same project as the trigger. - CloudRun? cloudRun; + /// If a DNS FQDN is provided as the endpoint, Pipeline will create a peering + /// zone to the consumer VPC and forward DNS requests to the VPC specified by + /// network config to resolve the service endpoint. See: + /// https://cloud.google.com/dns/docs/zones/zones-overview#peering_zones + /// + /// Optional. + GoogleCloudEventarcV1PipelineDestinationHttpEndpoint? httpEndpoint; - /// A GKE service capable of receiving events. + /// The resource name of the Message Bus to which events should be published. /// - /// The service should be running in the same project as the trigger. - GKE? gke; + /// The Message Bus resource should exist in the same project as the Pipeline. + /// Format: + /// `projects/{project}/locations/{location}/messageBuses/{message_bus}` + /// + /// Optional. + core.String? messageBus; - /// An HTTP endpoint destination described by an URI. - HttpEndpoint? httpEndpoint; + /// Network config is used to configure how Pipeline resolves and connects to + /// a destination. + /// + /// Optional. + GoogleCloudEventarcV1PipelineDestinationNetworkConfig? networkConfig; - /// Network config is used to configure how Eventarc resolves and connect to a - /// destination. + /// The message format before it is delivered to the destination. /// - /// This should only be used with HttpEndpoint destination type. + /// If not set, the message will be delivered in the format it was originally + /// delivered to the Pipeline. This field can only be set if + /// Pipeline.input_payload_format is also set. /// /// Optional. - NetworkConfig? networkConfig; + GoogleCloudEventarcV1PipelineMessagePayloadFormat? outputPayloadFormat; + + /// The resource name of the Pub/Sub topic to which events should be + /// published. + /// + /// Format: `projects/{project}/locations/{location}/topics/{topic}` + /// + /// Optional. + core.String? topic; /// The resource name of the Workflow whose Executions are triggered by the /// events. /// /// The Workflow resource should be deployed in the same project as the - /// trigger. Format: + /// Pipeline. Format: /// `projects/{project}/locations/{location}/workflows/{workflow}` + /// + /// Optional. core.String? workflow; - Destination({ - this.cloudFunction, - this.cloudRun, - this.gke, + GoogleCloudEventarcV1PipelineDestination({ + this.authenticationConfig, this.httpEndpoint, + this.messageBus, this.networkConfig, + this.outputPayloadFormat, + this.topic, this.workflow, }); - Destination.fromJson(core.Map json_) + GoogleCloudEventarcV1PipelineDestination.fromJson(core.Map json_) : this( - cloudFunction: json_['cloudFunction'] as core.String?, - cloudRun: json_.containsKey('cloudRun') - ? CloudRun.fromJson( - json_['cloudRun'] as core.Map) - : null, - gke: json_.containsKey('gke') - ? GKE - .fromJson(json_['gke'] as core.Map) + authenticationConfig: json_.containsKey('authenticationConfig') + ? GoogleCloudEventarcV1PipelineDestinationAuthenticationConfig + .fromJson(json_['authenticationConfig'] + as core.Map) : null, httpEndpoint: json_.containsKey('httpEndpoint') - ? HttpEndpoint.fromJson( + ? GoogleCloudEventarcV1PipelineDestinationHttpEndpoint.fromJson( json_['httpEndpoint'] as core.Map) : null, + messageBus: json_['messageBus'] as core.String?, networkConfig: json_.containsKey('networkConfig') - ? NetworkConfig.fromJson( + ? GoogleCloudEventarcV1PipelineDestinationNetworkConfig.fromJson( json_['networkConfig'] as core.Map) : null, + outputPayloadFormat: json_.containsKey('outputPayloadFormat') + ? GoogleCloudEventarcV1PipelineMessagePayloadFormat.fromJson( + json_['outputPayloadFormat'] + as core.Map) + : null, + topic: json_['topic'] as core.String?, workflow: json_['workflow'] as core.String?, ); core.Map toJson() => { - if (cloudFunction != null) 'cloudFunction': cloudFunction!, - if (cloudRun != null) 'cloudRun': cloudRun!, - if (gke != null) 'gke': gke!, + if (authenticationConfig != null) + 'authenticationConfig': authenticationConfig!, if (httpEndpoint != null) 'httpEndpoint': httpEndpoint!, + if (messageBus != null) 'messageBus': messageBus!, if (networkConfig != null) 'networkConfig': networkConfig!, + if (outputPayloadFormat != null) + 'outputPayloadFormat': outputPayloadFormat!, + if (topic != null) 'topic': topic!, if (workflow != null) 'workflow': workflow!, }; } -/// A generic empty message that you can re-use to avoid defining duplicated -/// empty messages in your APIs. -/// -/// A typical example is to use it as the request or the response type of an API -/// method. For instance: service Foo { rpc Bar(google.protobuf.Empty) returns -/// (google.protobuf.Empty); } -typedef Empty = $Empty; +/// Represents a config used to authenticate message requests. +class GoogleCloudEventarcV1PipelineDestinationAuthenticationConfig { + /// This authenticate method will apply Google OIDC tokens signed by a GCP + /// service account to the requests. + /// + /// Optional. + GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken? + googleOidc; -/// Filters events based on exact matches on the CloudEvents attributes. -class EventFilter { - /// The name of a CloudEvents attribute. + /// If specified, an + /// [OAuth token](https://developers.google.com/identity/protocols/OAuth2) + /// will be generated and attached as an `Authorization` header in the HTTP + /// request. /// - /// Currently, only a subset of attributes are supported for filtering. You - /// can \[retrieve a specific provider's supported event - /// types\](/eventarc/docs/list-providers#describe-provider). All triggers - /// MUST provide a filter for the 'type' attribute. + /// This type of authorization should generally only be used when calling + /// Google APIs hosted on *.googleapis.com. /// - /// Required. - core.String? attribute; + /// Optional. + GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken? + oauthToken; - /// The operator used for matching the events with the value of the filter. + GoogleCloudEventarcV1PipelineDestinationAuthenticationConfig({ + this.googleOidc, + this.oauthToken, + }); + + GoogleCloudEventarcV1PipelineDestinationAuthenticationConfig.fromJson( + core.Map json_) + : this( + googleOidc: json_.containsKey('googleOidc') + ? GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken + .fromJson(json_['googleOidc'] + as core.Map) + : null, + oauthToken: json_.containsKey('oauthToken') + ? GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken + .fromJson(json_['oauthToken'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (googleOidc != null) 'googleOidc': googleOidc!, + if (oauthToken != null) 'oauthToken': oauthToken!, + }; +} + +/// Contains information needed for generating an +/// [OAuth token](https://developers.google.com/identity/protocols/OAuth2). +/// +/// This type of authorization should generally only be used when calling Google +/// APIs hosted on *.googleapis.com. +class GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken { + /// OAuth scope to be used for generating OAuth access token. /// - /// If not specified, only events that have an exact key-value pair specified - /// in the filter are matched. The allowed values are `path_pattern` and - /// `match-path-pattern`. `path_pattern` is only allowed for GCFv1 triggers. + /// If not specified, "https://www.googleapis.com/auth/cloud-platform" will be + /// used. /// /// Optional. - core.String? operator; + core.String? scope; - /// The value for the attribute. + /// Service account email used to generate the + /// [OAuth token](https://developers.google.com/identity/protocols/OAuth2). + /// + /// The principal who calls this API must have iam.serviceAccounts.actAs + /// permission in the service account. See + /// https://cloud.google.com/iam/docs/understanding-service-accounts for more + /// information. Eventarc service agents must have + /// roles/roles/iam.serviceAccountTokenCreator role to allow Pipeline to + /// create OAuth2 tokens for authenticated requests. /// /// Required. - core.String? value; + core.String? serviceAccount; - EventFilter({ - this.attribute, - this.operator, - this.value, + GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken({ + this.scope, + this.serviceAccount, }); - EventFilter.fromJson(core.Map json_) + GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken.fromJson( + core.Map json_) : this( - attribute: json_['attribute'] as core.String?, - operator: json_['operator'] as core.String?, - value: json_['value'] as core.String?, + scope: json_['scope'] as core.String?, + serviceAccount: json_['serviceAccount'] as core.String?, ); core.Map toJson() => { - if (attribute != null) 'attribute': attribute!, - if (operator != null) 'operator': operator!, - if (value != null) 'value': value!, + if (scope != null) 'scope': scope!, + if (serviceAccount != null) 'serviceAccount': serviceAccount!, }; } -/// A representation of the event type resource. -class EventType { - /// Human friendly description of what the event type is about. +/// Represents a config used to authenticate with a Google OIDC token using a +/// GCP service account. +/// +/// Use this authentication method to invoke your Cloud Run and Cloud Functions +/// destinations or HTTP endpoints that support Google OIDC. +class GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken { + /// Audience to be used to generate the OIDC Token. /// - /// For example "Bucket created in Cloud Storage". + /// The audience claim identifies the recipient that the JWT is intended for. + /// If unspecified, the destination URI will be used. /// - /// Output only. - core.String? description; + /// Optional. + core.String? audience; - /// URI for the event schema. + /// Service account email used to generate the OIDC Token. /// - /// For example - /// "https://github.com/googleapis/google-cloudevents/blob/master/proto/google/events/cloud/storage/v1/events.proto" + /// The principal who calls this API must have iam.serviceAccounts.actAs + /// permission in the service account. See + /// https://cloud.google.com/iam/docs/understanding-service-accounts for more + /// information. Eventarc service agents must have + /// roles/roles/iam.serviceAccountTokenCreator role to allow the Pipeline to + /// create OpenID tokens for authenticated requests. /// - /// Output only. - core.String? eventSchemaUri; + /// Required. + core.String? serviceAccount; - /// Filtering attributes for the event type. + GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken({ + this.audience, + this.serviceAccount, + }); + + GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken.fromJson( + core.Map json_) + : this( + audience: json_['audience'] as core.String?, + serviceAccount: json_['serviceAccount'] as core.String?, + ); + + core.Map toJson() => { + if (audience != null) 'audience': audience!, + if (serviceAccount != null) 'serviceAccount': serviceAccount!, + }; +} + +/// Represents a HTTP endpoint destination. +class GoogleCloudEventarcV1PipelineDestinationHttpEndpoint { + /// The CEL expression used to modify how the destination-bound HTTP request + /// is constructed. + /// + /// If a binding expression is not specified here, the message is treated as a + /// CloudEvent and is mapped to the HTTP request according to the CloudEvent + /// HTTP Protocol Binding Binary Content Mode + /// (https://github.com/cloudevents/spec/blob/main/cloudevents/bindings/http-protocol-binding.md#31-binary-content-mode). + /// In this representation, all fields except the `data` and `datacontenttype` + /// field on the message are mapped to HTTP request headers with a prefix of + /// `ce-`. To construct the HTTP request payload and the value of the + /// content-type HTTP header, the payload format is defined as follows: 1) Use + /// the output_payload_format_type on the Pipeline.Destination if it is set, + /// else: 2) Use the input_payload_format_type on the Pipeline if it is set, + /// else: 3) Treat the payload as opaque binary data. The `data` field of the + /// message is converted to the payload format or left as-is for case 3) and + /// then attached as the payload of the HTTP request. The `content-type` + /// header on the HTTP request is set to the payload format type or left empty + /// for case 3). However, if a mediation has updated the `datacontenttype` + /// field on the message so that it is not the same as the payload format type + /// but it is still a prefix of the payload format type, then the + /// `content-type` header on the HTTP request is set to this `datacontenttype` + /// value. For example, if the `datacontenttype` is "application/json" and the + /// payload format type is "application/json; charset=utf-8", then the + /// `content-type` header on the HTTP request is set to "application/json; + /// charset=utf-8". If a non-empty binding expression is specified then this + /// expression is used to modify the default CloudEvent HTTP Protocol Binding + /// Binary Content representation. The result of the CEL expression must be a + /// map of key/value pairs which is used as follows: - If a map named + /// `headers` exists on the result of the expression, then its key/value pairs + /// are directly mapped to the HTTP request headers. The headers values are + /// constructed from the corresponding value type's canonical representation. + /// If the `headers` field doesn't exist then the resulting HTTP request will + /// be the headers of the CloudEvent HTTP Binding Binary Content Mode + /// representation of the final message. Note: If the specified binding + /// expression, has updated the `datacontenttype` field on the message so that + /// it is not the same as the payload format type but it is still a prefix of + /// the payload format type, then the `content-type` header in the `headers` + /// map is set to this `datacontenttype` value. - If a field named `body` + /// exists on the result of the expression then its value is directly mapped + /// to the body of the request. If the value of the `body` field is of type + /// bytes or string then it is used for the HTTP request body as-is, with no + /// conversion. If the body field is of any other type then it is converted to + /// a JSON string. If the body field does not exist then the resulting payload + /// of the HTTP request will be data value of the CloudEvent HTTP Binding + /// Binary Content Mode representation of the final message as described + /// earlier. - Any other fields in the resulting expression will be ignored. + /// The CEL expression may access the incoming CloudEvent message in its + /// definition, as follows: - The `data` field of the incoming CloudEvent + /// message can be accessed using the `message.data` value. Subfields of + /// `message.data` may also be accessed if an input_payload_format has been + /// specified on the Pipeline. - Each attribute of the incoming CloudEvent + /// message can be accessed using the `message.` value, where is replaced with + /// the name of the attribute. - Existing headers can be accessed in the CEL + /// expression using the `headers` variable. The `headers` variable defines a + /// map of key/value pairs corresponding to the HTTP headers of the CloudEvent + /// HTTP Binding Binary Content Mode representation of the final message as + /// described earlier. For example, the following CEL expression can be used + /// to construct an HTTP request by adding an additional header to the HTTP + /// headers of the CloudEvent HTTP Binding Binary Content Mode representation + /// of the final message and by overwriting the body of the request: ``` { + /// "headers": headers.merge({"new-header-key": "new-header-value"}), "body": + /// "new-body" } ``` - The default binding for the message payload can be + /// accessed using the `body` variable. It conatins a string representation of + /// the message payload in the format specified by the `output_payload_format` + /// field. If the `input_payload_format` field is not set, the `body` variable + /// contains the same message payload bytes that were published. Additionally, + /// the following CEL extension functions are provided for use in this CEL + /// expression: - toBase64Url: map.toBase64Url() -\> string - Converts a + /// CelValue to a base64url encoded string - toJsonString: map.toJsonString() + /// -\> string - Converts a CelValue to a JSON string - merge: + /// map1.merge(map2) -\> map3 - Merges the passed CEL map with the existing + /// CEL map the function is applied to. - If the same key exists in both maps, + /// if the key's value is type map both maps are merged else the value from + /// the passed map is used. - denormalize: map.denormalize() -\> map - + /// Denormalizes a CEL map such that every value of type map or key in the map + /// is expanded to return a single level map. - The resulting keys are "." + /// separated indices of the map keys. - For example: { "a": 1, "b": { "c": 2, + /// "d": 3 } "e": \[4, 5\] } .denormalize() -\> { "a": 1, "b.c": 2, "b.d": 3, + /// "e.0": 4, "e.1": 5 } - setField: map.setField(key, value) -\> message - + /// Sets the field of the message with the given key to the given value. - If + /// the field is not present it will be added. - If the field is present it + /// will be overwritten. - The key can be a dot separated path to set a field + /// in a nested message. - Key must be of type string. - Value may be any + /// valid type. - removeFields: map.removeFields(\[key1, key2, ...\]) -\> + /// message - Removes the fields of the map with the given keys. - The keys + /// can be a dot separated path to remove a field in a nested message. - If a + /// key is not found it will be ignored. - Keys must be of type string. - + /// toMap: \[map1, map2, ...\].toMap() -\> map - Converts a CEL list of CEL + /// maps to a single CEL map - toCloudEventJsonWithPayloadFormat: + /// message.toCloudEventJsonWithPayloadFormat() -\> map - Converts a message + /// to the corresponding structure of JSON format for CloudEvents. - It + /// converts `data` to destination payload format specified in + /// `output_payload_format`. If `output_payload_format` is not set, the data + /// will remain unchanged. - It also sets the corresponding datacontenttype of + /// the CloudEvent, as indicated by `output_payload_format`. If no + /// `output_payload_format` is set it will use the value of the + /// "datacontenttype" attribute on the CloudEvent if present, else remove + /// "datacontenttype" attribute. - This function expects that the content of + /// the message will adhere to the standard CloudEvent format. If it doesn't + /// then this function will fail. - The result is a CEL map that corresponds + /// to the JSON representation of the CloudEvent. To convert that data to a + /// JSON string it can be chained with the toJsonString function. The Pipeline + /// expects that the message it receives adheres to the standard CloudEvent + /// format. If it doesn't then the outgoing message request may fail with a + /// persistent error. /// - /// Output only. - core.List? filteringAttributes; + /// Optional. + core.String? messageBindingTemplate; - /// The full name of the event type (for example, - /// "google.cloud.storage.object.v1.finalized"). + /// The URI of the HTTP enpdoint. /// - /// In the form of {provider-specific-prefix}.{resource}.{version}.{verb}. - /// Types MUST be versioned and event schemas are guaranteed to remain - /// backward compatible within one version. Note that event type versions and - /// API versions do not need to match. + /// The value must be a RFC2396 URI string. Examples: + /// `https://svc.us-central1.p.local:8080/route`. Only the HTTPS protocol is + /// supported. /// - /// Output only. - core.String? type; + /// Required. + core.String? uri; - EventType({ - this.description, - this.eventSchemaUri, - this.filteringAttributes, - this.type, + GoogleCloudEventarcV1PipelineDestinationHttpEndpoint({ + this.messageBindingTemplate, + this.uri, }); - EventType.fromJson(core.Map json_) + GoogleCloudEventarcV1PipelineDestinationHttpEndpoint.fromJson(core.Map json_) : this( - description: json_['description'] as core.String?, - eventSchemaUri: json_['eventSchemaUri'] as core.String?, - filteringAttributes: (json_['filteringAttributes'] as core.List?) - ?.map((value) => FilteringAttribute.fromJson( - value as core.Map)) - .toList(), - type: json_['type'] as core.String?, + messageBindingTemplate: + json_['messageBindingTemplate'] as core.String?, + uri: json_['uri'] as core.String?, ); core.Map toJson() => { - if (description != null) 'description': description!, - if (eventSchemaUri != null) 'eventSchemaUri': eventSchemaUri!, - if (filteringAttributes != null) - 'filteringAttributes': filteringAttributes!, - if (type != null) 'type': type!, + if (messageBindingTemplate != null) + 'messageBindingTemplate': messageBindingTemplate!, + if (uri != null) 'uri': uri!, }; } -/// Represents a textual expression in the Common Expression Language (CEL) -/// syntax. -/// -/// CEL is a C-like expression language. The syntax and semantics of CEL are -/// documented at https://github.com/google/cel-spec. Example (Comparison): -/// title: "Summary size limit" description: "Determines if a summary is less -/// than 100 chars" expression: "document.summary.size() \< 100" Example -/// (Equality): title: "Requestor is owner" description: "Determines if -/// requestor is the document owner" expression: "document.owner == -/// request.auth.claims.email" Example (Logic): title: "Public documents" -/// description: "Determine whether the document should be publicly visible" -/// expression: "document.type != 'private' && document.type != 'internal'" -/// Example (Data Manipulation): title: "Notification string" description: -/// "Create a notification string with a timestamp." expression: "'New message -/// received at ' + string(document.create_time)" The exact variables and -/// functions that may be referenced within an expression are determined by the -/// service that evaluates it. See the service documentation for additional -/// information. -typedef Expr = $Expr; - -/// A representation of the FilteringAttribute resource. -/// -/// Filtering attributes are per event type. -class FilteringAttribute { - /// Attribute used for filtering the event type. +/// Represents a network config to be used for destination resolution and +/// connectivity. +class GoogleCloudEventarcV1PipelineDestinationNetworkConfig { + /// Name of the NetworkAttachment that allows access to the consumer VPC. /// - /// Output only. - core.String? attribute; - - /// Description of the purpose of the attribute. + /// Format: + /// `projects/{PROJECT_ID}/regions/{REGION}/networkAttachments/{NETWORK_ATTACHMENT_NAME}` /// - /// Output only. - core.String? description; + /// Required. + core.String? networkAttachment; - /// If true, the attribute accepts matching expressions in the Eventarc - /// PathPattern format. - /// - /// Output only. - core.bool? pathPatternSupported; + GoogleCloudEventarcV1PipelineDestinationNetworkConfig({ + this.networkAttachment, + }); + + GoogleCloudEventarcV1PipelineDestinationNetworkConfig.fromJson(core.Map json_) + : this( + networkAttachment: json_['networkAttachment'] as core.String?, + ); + + core.Map toJson() => { + if (networkAttachment != null) 'networkAttachment': networkAttachment!, + }; +} - /// If true, the triggers for this provider should always specify a filter on - /// these attributes. - /// - /// Trigger creation will fail otherwise. +/// Mediation defines different ways to modify the Pipeline. +class GoogleCloudEventarcV1PipelineMediation { + /// How the Pipeline is to transform messages /// - /// Output only. - core.bool? required; + /// Optional. + GoogleCloudEventarcV1PipelineMediationTransformation? transformation; - FilteringAttribute({ - this.attribute, - this.description, - this.pathPatternSupported, - this.required, + GoogleCloudEventarcV1PipelineMediation({ + this.transformation, }); - FilteringAttribute.fromJson(core.Map json_) + GoogleCloudEventarcV1PipelineMediation.fromJson(core.Map json_) : this( - attribute: json_['attribute'] as core.String?, - description: json_['description'] as core.String?, - pathPatternSupported: json_['pathPatternSupported'] as core.bool?, - required: json_['required'] as core.bool?, + transformation: json_.containsKey('transformation') + ? GoogleCloudEventarcV1PipelineMediationTransformation.fromJson( + json_['transformation'] + as core.Map) + : null, ); core.Map toJson() => { - if (attribute != null) 'attribute': attribute!, - if (description != null) 'description': description!, - if (pathPatternSupported != null) - 'pathPatternSupported': pathPatternSupported!, - if (required != null) 'required': required!, + if (transformation != null) 'transformation': transformation!, }; } -/// Represents a GKE destination. -class GKE { - /// The name of the cluster the GKE service is running in. - /// - /// The cluster must be running in the same project as the trigger being - /// created. +/// Transformation defines the way to transform an incoming message. +class GoogleCloudEventarcV1PipelineMediationTransformation { + /// The CEL expression template to apply to transform messages. + /// + /// The following CEL extension functions are provided for use in this CEL + /// expression: - merge: map1.merge(map2) -\> map3 - Merges the passed CEL map + /// with the existing CEL map the function is applied to. - If the same key + /// exists in both maps, if the key's value is type map both maps are merged + /// else the value from the passed map is used. - denormalize: + /// map.denormalize() -\> map - Denormalizes a CEL map such that every value + /// of type map or key in the map is expanded to return a single level map. - + /// The resulting keys are "." separated indices of the map keys. - For + /// example: { "a": 1, "b": { "c": 2, "d": 3 } "e": \[4, 5\] } .denormalize() + /// -\> { "a": 1, "b.c": 2, "b.d": 3, "e.0": 4, "e.1": 5 } - setField: + /// map.setField(key, value) -\> message - Sets the field of the message with + /// the given key to the given value. - If the field is not present it will be + /// added. - If the field is present it will be overwritten. - The key can be + /// a dot separated path to set a field in a nested message. - Key must be of + /// type string. - Value may be any valid type. - removeFields: + /// map.removeFields(\[key1, key2, ...\]) -\> message - Removes the fields of + /// the map with the given keys. - The keys can be a dot separated path to + /// remove a field in a nested message. - If a key is not found it will be + /// ignored. - Keys must be of type string. - toMap: \[map1, map2, + /// ...\].toMap() -\> map - Converts a CEL list of CEL maps to a single CEL + /// map - toDestinationPayloadFormat(): + /// message.data.toDestinationPayloadFormat() -\> string or bytes - Converts + /// the message data to the destination payload format specified in + /// Pipeline.Destination.output_payload_format - This function is meant to be + /// applied to the message.data field. - If the destination payload format is + /// not set, the function will return the message data unchanged. - + /// toCloudEventJsonWithPayloadFormat: + /// message.toCloudEventJsonWithPayloadFormat() -\> map - Converts a message + /// to the corresponding structure of JSON format for CloudEvents - This + /// function applies toDestinationPayloadFormat() to the message data. It also + /// sets the corresponding datacontenttype of the CloudEvent, as indicated by + /// Pipeline.Destination.output_payload_format. If no output_payload_format is + /// set it will use the existing datacontenttype on the CloudEvent if present, + /// else leave datacontenttype absent. - This function expects that the + /// content of the message will adhere to the standard CloudEvent format. If + /// it doesn't then this function will fail. - The result is a CEL map that + /// corresponds to the JSON representation of the CloudEvent. To convert that + /// data to a JSON string it can be chained with the toJsonString function. /// - /// Required. - core.String? cluster; + /// Optional. + core.String? transformationTemplate; - /// The name of the Google Compute Engine in which the cluster resides, which - /// can either be compute zone (for example, us-central1-a) for the zonal - /// clusters or region (for example, us-central1) for regional clusters. - /// - /// Required. - core.String? location; + GoogleCloudEventarcV1PipelineMediationTransformation({ + this.transformationTemplate, + }); - /// The namespace the GKE service is running in. - /// - /// Required. - core.String? namespace; + GoogleCloudEventarcV1PipelineMediationTransformation.fromJson(core.Map json_) + : this( + transformationTemplate: + json_['transformationTemplate'] as core.String?, + ); - /// The relative path on the GKE service the events should be sent to. + core.Map toJson() => { + if (transformationTemplate != null) + 'transformationTemplate': transformationTemplate!, + }; +} + +/// Represents the format of message data. +class GoogleCloudEventarcV1PipelineMessagePayloadFormat { + /// Avro format. /// - /// The value must conform to the definition of a URI path segment (section - /// 3.3 of RFC2396). Examples: "/route", "route", "route/subroute". + /// Optional. + GoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat? avro; + + /// JSON format. /// /// Optional. - core.String? path; + GoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat? json; - /// Name of the GKE service. + /// Protobuf format. /// - /// Required. - core.String? service; + /// Optional. + GoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat? protobuf; - GKE({ - this.cluster, - this.location, - this.namespace, - this.path, - this.service, + GoogleCloudEventarcV1PipelineMessagePayloadFormat({ + this.avro, + this.json, + this.protobuf, }); - GKE.fromJson(core.Map json_) + GoogleCloudEventarcV1PipelineMessagePayloadFormat.fromJson(core.Map json_) : this( - cluster: json_['cluster'] as core.String?, - location: json_['location'] as core.String?, - namespace: json_['namespace'] as core.String?, - path: json_['path'] as core.String?, - service: json_['service'] as core.String?, + avro: json_.containsKey('avro') + ? GoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat + .fromJson( + json_['avro'] as core.Map) + : null, + json: json_.containsKey('json') + ? GoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat + .fromJson( + json_['json'] as core.Map) + : null, + protobuf: json_.containsKey('protobuf') + ? GoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat + .fromJson( + json_['protobuf'] as core.Map) + : null, ); core.Map toJson() => { - if (cluster != null) 'cluster': cluster!, - if (location != null) 'location': location!, - if (namespace != null) 'namespace': namespace!, - if (path != null) 'path': path!, - if (service != null) 'service': service!, + if (avro != null) 'avro': avro!, + if (json != null) 'json': json!, + if (protobuf != null) 'protobuf': protobuf!, }; } -/// A GoogleChannelConfig is a resource that stores the custom settings -/// respected by Eventarc first-party triggers in the matching region. +/// The format of an AVRO message payload. +typedef GoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat = $Format; + +/// The format of a JSON message payload. +typedef GoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat = $Empty; + +/// The format of a Protobuf message payload. +typedef GoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat + = $Format; + +/// The retry policy configuration for the Pipeline. /// -/// Once configured, first-party event data will be protected using the -/// specified custom managed encryption key instead of Google-managed encryption -/// keys. -class GoogleChannelConfig { - /// Resource name of a KMS crypto key (managed by the user) used to - /// encrypt/decrypt their event data. - /// - /// It must match the pattern `projects / * /locations / * /keyRings / * - /// /cryptoKeys / * `. +/// The pipeline exponentially backs off in case the destination is non +/// responsive or returns a retryable error code. The default semantics are as +/// follows: The backoff starts with a 5 second delay and doubles the delay +/// after each failed attempt (10 seconds, 20 seconds, 40 seconds, etc.). The +/// delay is capped at 60 seconds by default. Please note that if you set the +/// min_retry_delay and max_retry_delay fields to the same value this will make +/// the duration between retries constant. +class GoogleCloudEventarcV1PipelineRetryPolicy { + /// The maximum number of delivery attempts for any message. + /// + /// The value must be between 1 and 100. The default value for this field is + /// 5. /// /// Optional. - core.String? cryptoKeyName; + core.int? maxAttempts; - /// The resource name of the config. + /// The maximum amount of seconds to wait between retry attempts. /// - /// Must be in the format of, - /// `projects/{project}/locations/{location}/googleChannelConfig`. + /// The value must be between 1 and 600. The default value for this field is + /// 60. /// - /// Required. - core.String? name; + /// Optional. + core.String? maxRetryDelay; - /// The last-modified time. + /// The minimum amount of seconds to wait between retry attempts. /// - /// Output only. - core.String? updateTime; + /// The value must be between 1 and 600. The default value for this field is + /// 5. + /// + /// Optional. + core.String? minRetryDelay; - GoogleChannelConfig({ - this.cryptoKeyName, - this.name, - this.updateTime, + GoogleCloudEventarcV1PipelineRetryPolicy({ + this.maxAttempts, + this.maxRetryDelay, + this.minRetryDelay, }); - GoogleChannelConfig.fromJson(core.Map json_) + GoogleCloudEventarcV1PipelineRetryPolicy.fromJson(core.Map json_) : this( - cryptoKeyName: json_['cryptoKeyName'] as core.String?, - name: json_['name'] as core.String?, - updateTime: json_['updateTime'] as core.String?, + maxAttempts: json_['maxAttempts'] as core.int?, + maxRetryDelay: json_['maxRetryDelay'] as core.String?, + minRetryDelay: json_['minRetryDelay'] as core.String?, ); core.Map toJson() => { - if (cryptoKeyName != null) 'cryptoKeyName': cryptoKeyName!, - if (name != null) 'name': name!, - if (updateTime != null) 'updateTime': updateTime!, + if (maxAttempts != null) 'maxAttempts': maxAttempts!, + if (maxRetryDelay != null) 'maxRetryDelay': maxRetryDelay!, + if (minRetryDelay != null) 'minRetryDelay': minRetryDelay!, }; } @@ -3352,10 +5352,193 @@ class ListChannelsResponse { this.unreachable, }); - ListChannelsResponse.fromJson(core.Map json_) + ListChannelsResponse.fromJson(core.Map json_) + : this( + channels: (json_['channels'] as core.List?) + ?.map((value) => Channel.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + unreachable: (json_['unreachable'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (channels != null) 'channels': channels!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (unreachable != null) 'unreachable': unreachable!, + }; +} + +/// The response message for the `ListEnrollments` method. +class ListEnrollmentsResponse { + /// The requested Enrollments, up to the number specified in `page_size`. + core.List? enrollments; + + /// A page token that can be sent to `ListEnrollments` to request the next + /// page. + /// + /// If this is empty, then there are no more pages. + core.String? nextPageToken; + + /// Unreachable resources, if any. + core.List? unreachable; + + ListEnrollmentsResponse({ + this.enrollments, + this.nextPageToken, + this.unreachable, + }); + + ListEnrollmentsResponse.fromJson(core.Map json_) + : this( + enrollments: (json_['enrollments'] as core.List?) + ?.map((value) => Enrollment.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + unreachable: (json_['unreachable'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (enrollments != null) 'enrollments': enrollments!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (unreachable != null) 'unreachable': unreachable!, + }; +} + +/// The response message for the `ListGoogleApiSources` method. +class ListGoogleApiSourcesResponse { + /// The requested GoogleApiSources, up to the number specified in `page_size`. + core.List? googleApiSources; + + /// A page token that can be sent to `ListMessageBusEnrollments` to request + /// the next page. + /// + /// If this is empty, then there are no more pages. + core.String? nextPageToken; + + /// Unreachable resources, if any. + core.List? unreachable; + + ListGoogleApiSourcesResponse({ + this.googleApiSources, + this.nextPageToken, + this.unreachable, + }); + + ListGoogleApiSourcesResponse.fromJson(core.Map json_) + : this( + googleApiSources: (json_['googleApiSources'] as core.List?) + ?.map((value) => GoogleApiSource.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + unreachable: (json_['unreachable'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (googleApiSources != null) 'googleApiSources': googleApiSources!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (unreachable != null) 'unreachable': unreachable!, + }; +} + +/// The response message for Locations.ListLocations. +class ListLocationsResponse { + /// A list of locations that matches the specified filter in the request. + core.List? locations; + + /// The standard List next-page token. + core.String? nextPageToken; + + ListLocationsResponse({ + this.locations, + this.nextPageToken, + }); + + ListLocationsResponse.fromJson(core.Map json_) + : this( + locations: (json_['locations'] as core.List?) + ?.map((value) => Location.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + ); + + core.Map toJson() => { + if (locations != null) 'locations': locations!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + }; +} + +/// The response message for the \`ListMessageBusEnrollments\` method.\` +class ListMessageBusEnrollmentsResponse { + /// The requested enrollments, up to the number specified in `page_size`. + core.List? enrollments; + + /// A page token that can be sent to `ListMessageBusEnrollments` to request + /// the next page. + /// + /// If this is empty, then there are no more pages. + core.String? nextPageToken; + + /// Unreachable resources, if any. + core.List? unreachable; + + ListMessageBusEnrollmentsResponse({ + this.enrollments, + this.nextPageToken, + this.unreachable, + }); + + ListMessageBusEnrollmentsResponse.fromJson(core.Map json_) + : this( + enrollments: (json_['enrollments'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + unreachable: (json_['unreachable'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (enrollments != null) 'enrollments': enrollments!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (unreachable != null) 'unreachable': unreachable!, + }; +} + +/// The response message for the `ListMessageBuses` method. +class ListMessageBusesResponse { + /// The requested message buses, up to the number specified in `page_size`. + core.List? messageBuses; + + /// A page token that can be sent to `ListMessageBuses` to request the next + /// page. + /// + /// If this is empty, then there are no more pages. + core.String? nextPageToken; + + /// Unreachable resources, if any. + core.List? unreachable; + + ListMessageBusesResponse({ + this.messageBuses, + this.nextPageToken, + this.unreachable, + }); + + ListMessageBusesResponse.fromJson(core.Map json_) : this( - channels: (json_['channels'] as core.List?) - ?.map((value) => Channel.fromJson( + messageBuses: (json_['messageBuses'] as core.List?) + ?.map((value) => MessageBus.fromJson( value as core.Map)) .toList(), nextPageToken: json_['nextPageToken'] as core.String?, @@ -3365,37 +5548,47 @@ class ListChannelsResponse { ); core.Map toJson() => { - if (channels != null) 'channels': channels!, + if (messageBuses != null) 'messageBuses': messageBuses!, if (nextPageToken != null) 'nextPageToken': nextPageToken!, if (unreachable != null) 'unreachable': unreachable!, }; } -/// The response message for Locations.ListLocations. -class ListLocationsResponse { - /// A list of locations that matches the specified filter in the request. - core.List? locations; - - /// The standard List next-page token. +/// The response message for the ListPipelines method. +class ListPipelinesResponse { + /// A page token that can be sent to `ListPipelines` to request the next page. + /// + /// If this is empty, then there are no more pages. core.String? nextPageToken; - ListLocationsResponse({ - this.locations, + /// The requested pipelines, up to the number specified in `page_size`. + core.List? pipelines; + + /// Unreachable resources, if any. + core.List? unreachable; + + ListPipelinesResponse({ this.nextPageToken, + this.pipelines, + this.unreachable, }); - ListLocationsResponse.fromJson(core.Map json_) + ListPipelinesResponse.fromJson(core.Map json_) : this( - locations: (json_['locations'] as core.List?) - ?.map((value) => Location.fromJson( + nextPageToken: json_['nextPageToken'] as core.String?, + pipelines: (json_['pipelines'] as core.List?) + ?.map((value) => Pipeline.fromJson( value as core.Map)) .toList(), - nextPageToken: json_['nextPageToken'] as core.String?, + unreachable: (json_['unreachable'] as core.List?) + ?.map((value) => value as core.String) + .toList(), ); core.Map toJson() => { - if (locations != null) 'locations': locations!, if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (pipelines != null) 'pipelines': pipelines!, + if (unreachable != null) 'unreachable': unreachable!, }; } @@ -3478,6 +5671,172 @@ class ListTriggersResponse { /// A resource that represents a Google Cloud location. typedef Location = $Location00; +/// The configuration for Platform Telemetry logging for Eventarc Advanced +/// resources. +class LoggingConfig { + /// The minimum severity of logs that will be sent to Stackdriver/Platform + /// Telemetry. + /// + /// Logs at severitiy ≥ this value will be sent, unless it is NONE. + /// + /// Optional. + /// Possible string values are: + /// - "LOG_SEVERITY_UNSPECIFIED" : Log severity is not specified. This value + /// is treated the same as NONE, but is used to distinguish between no update + /// and update to NONE in update_masks. + /// - "NONE" : Default value at resource creation, presence of this value must + /// be treated as no logging/disable logging. + /// - "DEBUG" : Debug or trace level logging. + /// - "INFO" : Routine information, such as ongoing status or performance. + /// - "NOTICE" : Normal but significant events, such as start up, shut down, + /// or a configuration change. + /// - "WARNING" : Warning events might cause problems. + /// - "ERROR" : Error events are likely to cause problems. + /// - "CRITICAL" : Critical events cause more severe problems or outages. + /// - "ALERT" : A person must take action immediately. + /// - "EMERGENCY" : One or more systems are unusable. + core.String? logSeverity; + + LoggingConfig({ + this.logSeverity, + }); + + LoggingConfig.fromJson(core.Map json_) + : this( + logSeverity: json_['logSeverity'] as core.String?, + ); + + core.Map toJson() => { + if (logSeverity != null) 'logSeverity': logSeverity!, + }; +} + +/// MessageBus for the messages flowing through the system. +/// +/// The admin has visibility and control over the messages being published and +/// consumed and can restrict publishers and subscribers to only a subset of +/// data available in the system by defining authorization policies. +class MessageBus { + /// Resource annotations. + /// + /// Optional. + core.Map? annotations; + + /// The creation time. + /// + /// Output only. + core.String? createTime; + + /// Resource name of a KMS crypto key (managed by the user) used to + /// encrypt/decrypt their event data. + /// + /// It must match the pattern `projects / * /locations / * /keyRings / * + /// /cryptoKeys / * `. + /// + /// Optional. + core.String? cryptoKeyName; + + /// Resource display name. + /// + /// Optional. + core.String? displayName; + + /// This checksum is computed by the server based on the value of other + /// fields, and might be sent only on update and delete requests to ensure + /// that the client has an up-to-date value before proceeding. + /// + /// Output only. + core.String? etag; + + /// Resource labels. + /// + /// Optional. + core.Map? labels; + + /// Config to control Platform logging for the Message Bus. + /// + /// This log configuration is applied to the Message Bus itself, and all the + /// Enrollments attached to it. + /// + /// Optional. + LoggingConfig? loggingConfig; + + /// Identifier. + /// + /// Resource name of the form + /// projects/{project}/locations/{location}/messageBuses/{message_bus} + core.String? name; + + /// Server assigned unique identifier for the channel. + /// + /// The value is a UUID4 string and guaranteed to remain unchanged until the + /// resource is deleted. + /// + /// Output only. + core.String? uid; + + /// The last-modified time. + /// + /// Output only. + core.String? updateTime; + + MessageBus({ + this.annotations, + this.createTime, + this.cryptoKeyName, + this.displayName, + this.etag, + this.labels, + this.loggingConfig, + this.name, + this.uid, + this.updateTime, + }); + + MessageBus.fromJson(core.Map json_) + : this( + annotations: + (json_['annotations'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + createTime: json_['createTime'] as core.String?, + cryptoKeyName: json_['cryptoKeyName'] as core.String?, + displayName: json_['displayName'] as core.String?, + etag: json_['etag'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + loggingConfig: json_.containsKey('loggingConfig') + ? LoggingConfig.fromJson( + json_['loggingConfig'] as core.Map) + : null, + name: json_['name'] as core.String?, + uid: json_['uid'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + ); + + core.Map toJson() => { + if (annotations != null) 'annotations': annotations!, + if (createTime != null) 'createTime': createTime!, + if (cryptoKeyName != null) 'cryptoKeyName': cryptoKeyName!, + if (displayName != null) 'displayName': displayName!, + if (etag != null) 'etag': etag!, + if (labels != null) 'labels': labels!, + if (loggingConfig != null) 'loggingConfig': loggingConfig!, + if (name != null) 'name': name!, + if (uid != null) 'uid': uid!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} + /// Network Configuration that can be inherited by other protos. class NetworkConfig { /// Name of the NetworkAttachment that allows access to the customer's VPC. @@ -3502,6 +5861,199 @@ class NetworkConfig { }; } +/// A representation of the Pipeline resource. +class Pipeline { + /// User-defined annotations. + /// + /// See https://google.aip.dev/128#annotations. + /// + /// Optional. + core.Map? annotations; + + /// The creation time. + /// + /// A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and + /// up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and + /// "2014-10-02T15:01:23.045123456Z". + /// + /// Output only. + core.String? createTime; + + /// Resource name of a KMS crypto key (managed by the user) used to + /// encrypt/decrypt the event data. + /// + /// If not set, an internal Google-owned key will be used to encrypt messages. + /// It must match the pattern + /// "projects/{project}/locations/{location}/keyRings/{keyring}/cryptoKeys/{key}". + /// + /// Optional. + core.String? cryptoKeyName; + + /// List of destinations to which messages will be forwarded. + /// + /// Currently, exactly one destination is supported per Pipeline. + /// + /// Required. + core.List? destinations; + + /// Display name of resource. + /// + /// Optional. + core.String? displayName; + + /// This checksum is computed by the server based on the value of other + /// fields, and might be sent only on create requests to ensure that the + /// client has an up-to-date value before proceeding. + /// + /// Output only. + core.String? etag; + + /// The payload format expected for the messages received by the Pipeline. + /// + /// If input_payload_format is set then any messages not matching this format + /// will be treated as persistent errors. If input_payload_format is not set, + /// then the message data will be treated as an opaque binary and no output + /// format can be set on the Pipeline through the + /// Pipeline.Destination.output_payload_format field. Any Mediations on the + /// Pipeline that involve access to the data field will fail as persistent + /// errors. + /// + /// Optional. + GoogleCloudEventarcV1PipelineMessagePayloadFormat? inputPayloadFormat; + + /// User labels attached to the Pipeline that can be used to group resources. + /// + /// An object containing a list of "key": value pairs. Example: { "name": + /// "wrench", "mass": "1.3kg", "count": "3" }. + /// + /// Optional. + core.Map? labels; + + /// Config to control Platform Logging for Pipelines. + /// + /// Optional. + LoggingConfig? loggingConfig; + + /// List of mediation operations to be performed on the message. + /// + /// Currently, only one Transformation operation is allowed in each Pipeline. + /// + /// Optional. + core.List? mediations; + + /// Identifier. + /// + /// The resource name of the Pipeline. Must be unique within the location of + /// the project and must be in + /// `projects/{project}/locations/{location}/pipelines/{pipeline}` format. + core.String? name; + + /// The retry policy to use in the pipeline. + /// + /// Optional. + GoogleCloudEventarcV1PipelineRetryPolicy? retryPolicy; + + /// Server-assigned unique identifier for the Pipeline. + /// + /// The value is a UUID4 string and guaranteed to remain unchanged until the + /// resource is deleted. + /// + /// Output only. + core.String? uid; + + /// The last-modified time. + /// + /// A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and + /// up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and + /// "2014-10-02T15:01:23.045123456Z". + /// + /// Output only. + core.String? updateTime; + + Pipeline({ + this.annotations, + this.createTime, + this.cryptoKeyName, + this.destinations, + this.displayName, + this.etag, + this.inputPayloadFormat, + this.labels, + this.loggingConfig, + this.mediations, + this.name, + this.retryPolicy, + this.uid, + this.updateTime, + }); + + Pipeline.fromJson(core.Map json_) + : this( + annotations: + (json_['annotations'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + createTime: json_['createTime'] as core.String?, + cryptoKeyName: json_['cryptoKeyName'] as core.String?, + destinations: (json_['destinations'] as core.List?) + ?.map((value) => + GoogleCloudEventarcV1PipelineDestination.fromJson( + value as core.Map)) + .toList(), + displayName: json_['displayName'] as core.String?, + etag: json_['etag'] as core.String?, + inputPayloadFormat: json_.containsKey('inputPayloadFormat') + ? GoogleCloudEventarcV1PipelineMessagePayloadFormat.fromJson( + json_['inputPayloadFormat'] + as core.Map) + : null, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + loggingConfig: json_.containsKey('loggingConfig') + ? LoggingConfig.fromJson( + json_['loggingConfig'] as core.Map) + : null, + mediations: (json_['mediations'] as core.List?) + ?.map((value) => GoogleCloudEventarcV1PipelineMediation.fromJson( + value as core.Map)) + .toList(), + name: json_['name'] as core.String?, + retryPolicy: json_.containsKey('retryPolicy') + ? GoogleCloudEventarcV1PipelineRetryPolicy.fromJson( + json_['retryPolicy'] as core.Map) + : null, + uid: json_['uid'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + ); + + core.Map toJson() => { + if (annotations != null) 'annotations': annotations!, + if (createTime != null) 'createTime': createTime!, + if (cryptoKeyName != null) 'cryptoKeyName': cryptoKeyName!, + if (destinations != null) 'destinations': destinations!, + if (displayName != null) 'displayName': displayName!, + if (etag != null) 'etag': etag!, + if (inputPayloadFormat != null) + 'inputPayloadFormat': inputPayloadFormat!, + if (labels != null) 'labels': labels!, + if (loggingConfig != null) 'loggingConfig': loggingConfig!, + if (mediations != null) 'mediations': mediations!, + if (name != null) 'name': name!, + if (retryPolicy != null) 'retryPolicy': retryPolicy!, + if (uid != null) 'uid': uid!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} + /// An Identity and Access Management (IAM) policy, which specifies access /// controls for Google Cloud resources. /// diff --git a/generated/googleapis/lib/fcm/v1.dart b/generated/googleapis/lib/fcm/v1.dart index 571443258..2a43ae36c 100644 --- a/generated/googleapis/lib/fcm/v1.dart +++ b/generated/googleapis/lib/fcm/v1.dart @@ -588,6 +588,21 @@ class ApnsConfig { /// default value for `apns-priority` of 10 if not explicitly set. core.Map? headers; + /// [Apple Live Activity](https://developer.apple.com/design/human-interface-guidelines/live-activities) + /// token to send updates to. + /// + /// This token can either be a push token or + /// \[push-to-start\](https://developer.apple.com/documentation/activitykit/activity/pushtostarttoken) + /// token from Apple. To start, update, or end a live activity remotely using + /// FCM, construct an \[`aps + /// payload`\](https://developer.apple.com/documentation/activitykit/starting-and-updating-live-activities-with-activitykit-push-notifications#Construct-the-payload-that-starts-a-Live-Activity) + /// and put it in the + /// \[`apns.payload`\](https://firebase.google.com/docs/reference/fcm/rest/v1/projects.messages#ApnsConfig) + /// field. + /// + /// Optional. + core.String? liveActivityToken; + /// APNs payload as a JSON object, including both `aps` dictionary and custom /// payload. /// @@ -603,6 +618,7 @@ class ApnsConfig { ApnsConfig({ this.fcmOptions, this.headers, + this.liveActivityToken, this.payload, }); @@ -619,6 +635,7 @@ class ApnsConfig { value as core.String, ), ), + liveActivityToken: json_['liveActivityToken'] as core.String?, payload: json_.containsKey('payload') ? json_['payload'] as core.Map : null, @@ -627,6 +644,7 @@ class ApnsConfig { core.Map toJson() => { if (fcmOptions != null) 'fcmOptions': fcmOptions!, if (headers != null) 'headers': headers!, + if (liveActivityToken != null) 'liveActivityToken': liveActivityToken!, if (payload != null) 'payload': payload!, }; } diff --git a/generated/googleapis/lib/file/v1.dart b/generated/googleapis/lib/file/v1.dart index 43afecc12..5dfe5e393 100644 --- a/generated/googleapis/lib/file/v1.dart +++ b/generated/googleapis/lib/file/v1.dart @@ -946,6 +946,9 @@ class ProjectsLocationsInstancesSnapshotsResource { /// [pageToken] - The next_page_token value to use if there are additional /// results to retrieve for this list request. /// + /// [returnPartialSuccess] - Optional. If true, allow partial responses for + /// multi-regional Aggregated List requests. + /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// @@ -962,6 +965,7 @@ class ProjectsLocationsInstancesSnapshotsResource { core.String? orderBy, core.int? pageSize, core.String? pageToken, + core.bool? returnPartialSuccess, core.String? $fields, }) async { final queryParams_ = >{ @@ -969,6 +973,8 @@ class ProjectsLocationsInstancesSnapshotsResource { if (orderBy != null) 'orderBy': [orderBy], if (pageSize != null) 'pageSize': ['${pageSize}'], if (pageToken != null) 'pageToken': [pageToken], + if (returnPartialSuccess != null) + 'returnPartialSuccess': ['${returnPartialSuccess}'], if ($fields != null) 'fields': [$fields], }; @@ -1045,8 +1051,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -1336,8 +1342,13 @@ class Backup { /// Input only. /// - /// Immutable. Tag key-value pairs are bound to this resource. For example: - /// "123/environment": "production", "123/costCenter": "marketing" + /// Immutable. Tag key-value pairs bound to this resource. Each key must be a + /// namespaced name and each value a short name. Example: + /// "123456789012/environment" : "production", "123456789013/costCenter" : + /// "marketing" See the documentation for more information: - Namespaced name: + /// https://cloud.google.com/resource-manager/docs/tags/tags-creating-and-managing#retrieving_tag_key + /// - Short name: + /// https://cloud.google.com/resource-manager/docs/tags/tags-creating-and-managing#retrieving_tag_value /// /// Optional. core.Map? tags; @@ -1484,30 +1495,17 @@ class FixedIOPS { /// Required. core.String? maxIops; - /// Deprecated: `max_iops` should be used instead of this parameter. - /// - /// Maximum raw read IOPS. - /// - /// Optional. - @core.Deprecated( - 'Not supported. Member documentation may have more information.', - ) - core.String? maxReadIops; - FixedIOPS({ this.maxIops, - this.maxReadIops, }); FixedIOPS.fromJson(core.Map json_) : this( maxIops: json_['maxIops'] as core.String?, - maxReadIops: json_['maxReadIops'] as core.String?, ); core.Map toJson() => { if (maxIops != null) 'maxIops': maxIops!, - if (maxReadIops != null) 'maxReadIops': maxReadIops!, }; } @@ -1520,30 +1518,17 @@ class IOPSPerTB { /// Required. core.String? maxIopsPerTb; - /// Deprecated: `max_iops_per_tb` should be used instead of this parameter. - /// - /// Maximum read IOPS per TiB. - /// - /// Optional. - @core.Deprecated( - 'Not supported. Member documentation may have more information.', - ) - core.String? maxReadIopsPerTb; - IOPSPerTB({ this.maxIopsPerTb, - this.maxReadIopsPerTb, }); IOPSPerTB.fromJson(core.Map json_) : this( maxIopsPerTb: json_['maxIopsPerTb'] as core.String?, - maxReadIopsPerTb: json_['maxReadIopsPerTb'] as core.String?, ); core.Map toJson() => { if (maxIopsPerTb != null) 'maxIopsPerTb': maxIopsPerTb!, - if (maxReadIopsPerTb != null) 'maxReadIopsPerTb': maxReadIopsPerTb!, }; } @@ -1673,8 +1658,13 @@ class Instance { /// Input only. /// - /// Immutable. Tag key-value pairs are bound to this resource. For example: - /// "123/environment": "production", "123/costCenter": "marketing" + /// Immutable. Tag key-value pairs bound to this resource. Each key must be a + /// namespaced name and each value a short name. Example: + /// "123456789012/environment" : "production", "123456789013/costCenter" : + /// "marketing" See the documentation for more information: - Namespaced name: + /// https://cloud.google.com/resource-manager/docs/tags/tags-creating-and-managing#retrieving_tag_key + /// - Short name: + /// https://cloud.google.com/resource-manager/docs/tags/tags-creating-and-managing#retrieving_tag_value /// /// Optional. core.Map? tags; @@ -1828,6 +1818,8 @@ class ListBackupsResponse { /// Not returned if there are no more results in the list. core.String? nextPageToken; + /// Unordered list. + /// /// Locations that could not be reached. core.List? unreachable; @@ -1872,6 +1864,8 @@ class ListInstancesResponse { /// Not returned if there are no more results in the list. core.String? nextPageToken; + /// Unordered list. + /// /// Locations that could not be reached. core.List? unreachable; @@ -1966,9 +1960,15 @@ class ListSnapshotsResponse { /// A list of snapshots in the project for the specified instance. core.List? snapshots; + /// Unordered list. + /// + /// Locations that could not be reached. + core.List? unreachable; + ListSnapshotsResponse({ this.nextPageToken, this.snapshots, + this.unreachable, }); ListSnapshotsResponse.fromJson(core.Map json_) @@ -1978,11 +1978,15 @@ class ListSnapshotsResponse { ?.map((value) => Snapshot.fromJson( value as core.Map)) .toList(), + unreachable: (json_['unreachable'] as core.List?) + ?.map((value) => value as core.String) + .toList(), ); core.Map toJson() => { if (nextPageToken != null) 'nextPageToken': nextPageToken!, if (snapshots != null) 'snapshots': snapshots!, + if (unreachable != null) 'unreachable': unreachable!, }; } @@ -2516,8 +2520,13 @@ class Snapshot { /// Input only. /// - /// Immutable. Tag key-value pairs are bound to this resource. For example: - /// "123/environment": "production", "123/costCenter": "marketing" + /// Immutable. Tag key-value pairs bound to this resource. Each key must be a + /// namespaced name and each value a short name. Example: + /// "123456789012/environment" : "production", "123456789013/costCenter" : + /// "marketing" See the documentation for more information: - Namespaced name: + /// https://cloud.google.com/resource-manager/docs/tags/tags-creating-and-managing#retrieving_tag_key + /// - Short name: + /// https://cloud.google.com/resource-manager/docs/tags/tags-creating-and-managing#retrieving_tag_value /// /// Optional. core.Map? tags; diff --git a/generated/googleapis/lib/firebaseappdistribution/v1.dart b/generated/googleapis/lib/firebaseappdistribution/v1.dart index 56abdfb93..f78c0766d 100644 --- a/generated/googleapis/lib/firebaseappdistribution/v1.dart +++ b/generated/googleapis/lib/firebaseappdistribution/v1.dart @@ -601,8 +601,8 @@ class ProjectsAppsReleasesOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// diff --git a/generated/googleapis/lib/firebasehosting/v1.dart b/generated/googleapis/lib/firebasehosting/v1.dart index c2207c9b2..ec31eb12f 100644 --- a/generated/googleapis/lib/firebasehosting/v1.dart +++ b/generated/googleapis/lib/firebasehosting/v1.dart @@ -82,8 +82,8 @@ class OperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// diff --git a/generated/googleapis/lib/firebaseml/v1.dart b/generated/googleapis/lib/firebaseml/v1.dart index 3bba303a0..e4b71a8f0 100644 --- a/generated/googleapis/lib/firebaseml/v1.dart +++ b/generated/googleapis/lib/firebaseml/v1.dart @@ -67,8 +67,8 @@ class OperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// diff --git a/generated/googleapis/lib/firebaserules/v1.dart b/generated/googleapis/lib/firebaserules/v1.dart index 46d70f216..4c806544d 100644 --- a/generated/googleapis/lib/firebaserules/v1.dart +++ b/generated/googleapis/lib/firebaserules/v1.dart @@ -273,8 +273,8 @@ class ProjectsReleasesResource { /// `projects/{project_id}/releases/{release_id}` /// Value must have pattern `^projects/\[^/\]+/releases/.*$`. /// - /// [executableVersion] - The requested runtime executable version. Defaults - /// to FIREBASE_RULES_EXECUTABLE_V1. + /// [executableVersion] - Optional. The requested runtime executable version. + /// Defaults to FIREBASE_RULES_EXECUTABLE_V1. /// Possible string values are: /// - "RELEASE_EXECUTABLE_VERSION_UNSPECIFIED" : Executable format /// unspecified. Defaults to FIREBASE_RULES_EXECUTABLE_V1 @@ -325,10 +325,10 @@ class ProjectsReleasesResource { /// `projects/{project_id}` /// Value must have pattern `^projects/\[^/\]+$`. /// - /// [filter] - `Release` filter. The list method supports filters with - /// restrictions on the `Release.name`, and `Release.ruleset_name`. Example 1: - /// A filter of 'name=prod*' might return `Release`s with names within - /// 'projects/foo' prefixed with 'prod': Name -\> Ruleset Name: * + /// [filter] - Optional. `Release` filter. The list method supports filters + /// with restrictions on the `Release.name`, and `Release.ruleset_name`. + /// Example 1: A filter of 'name=prod*' might return `Release`s with names + /// within 'projects/foo' prefixed with 'prod': Name -\> Ruleset Name: * /// projects/foo/releases/prod -\> projects/foo/rulesets/uuid1234 * /// projects/foo/releases/prod/v1 -\> projects/foo/rulesets/uuid1234 * /// projects/foo/releases/prod/v2 -\> projects/foo/rulesets/uuid8888 Example @@ -340,13 +340,14 @@ class ProjectsReleasesResource { /// examples, the filter parameters refer to the search filters are relative /// to the project. Fully qualified prefixed may also be used. /// - /// [pageSize] - Page size to load. Maximum of 100. Defaults to 10. Note: - /// `page_size` is just a hint and the service may choose to load fewer than - /// `page_size` results due to the size of the output. To traverse all of the - /// releases, the caller should iterate until the `page_token` on the response - /// is empty. + /// [pageSize] - Optional. Page size to load. Maximum of 100. Defaults to 10. + /// Note: `page_size` is just a hint and the service may choose to load fewer + /// than `page_size` results due to the size of the output. To traverse all of + /// the releases, the caller should iterate until the `page_token` on the + /// response is empty. /// - /// [pageToken] - Next page token for the next batch of `Release` instances. + /// [pageToken] - Optional. Next page token for the next batch of `Release` + /// instances. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -565,19 +566,19 @@ class ProjectsRulesetsResource { /// `projects/{project_id}` /// Value must have pattern `^projects/\[^/\]+$`. /// - /// [filter] - `Ruleset` filter. The list method supports filters with - /// restrictions on `Ruleset.name`. Filters on `Ruleset.create_time` should - /// use the `date` function which parses strings that conform to the RFC 3339 - /// date/time specifications. Example: `create_time > + /// [filter] - Optional. `Ruleset` filter. The list method supports filters + /// with restrictions on `Ruleset.name`. Filters on `Ruleset.create_time` + /// should use the `date` function which parses strings that conform to the + /// RFC 3339 date/time specifications. Example: `create_time > /// date("2017-01-01T00:00:00Z") AND name=UUID-*` /// - /// [pageSize] - Page size to load. Maximum of 100. Defaults to 10. Note: - /// `page_size` is just a hint and the service may choose to load less than - /// `page_size` due to the size of the output. To traverse all of the + /// [pageSize] - Optional. Page size to load. Maximum of 100. Defaults to 10. + /// Note: `page_size` is just a hint and the service may choose to load less + /// than `page_size` due to the size of the output. To traverse all of the /// releases, caller should iterate until the `page_token` is empty. /// - /// [pageToken] - Next page token for loading the next batch of `Ruleset` - /// instances. + /// [pageToken] - Optional. Next page token for loading the next batch of + /// `Ruleset` instances. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -1429,6 +1430,8 @@ class TestRulesetRequest { /// Optional `Source` to be checked for correctness. /// /// This field must not be set when the resource name refers to a `Ruleset`. + /// + /// Optional. Source? source; /// The tests to execute against the `Source`. @@ -1436,6 +1439,8 @@ class TestRulesetRequest { /// When `Source` is provided inline, the test cases will only be run if the /// `Source` is syntactically and semantically valid. Inline `TestSuite` to /// run. + /// + /// Required. TestSuite? testSuite; TestRulesetRequest({ @@ -1531,6 +1536,8 @@ class UpdateReleaseRequest { Release? release; /// Specifies which fields to update. + /// + /// Optional. core.String? updateMask; UpdateReleaseRequest({ diff --git a/generated/googleapis/lib/firestore/v1.dart b/generated/googleapis/lib/firestore/v1.dart index 5a01ff04e..ad5ab1c1e 100644 --- a/generated/googleapis/lib/firestore/v1.dart +++ b/generated/googleapis/lib/firestore/v1.dart @@ -2000,8 +2000,8 @@ class ProjectsDatabasesOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -3460,8 +3460,8 @@ class FindNearest { /// /// The behavior of the specified `distance_measure` will affect the meaning /// of the distance threshold. Since DOT_PRODUCT distances increase when the - /// vectors are more similar, the comparison is inverted. For EUCLIDEAN, - /// COSINE: WHERE distance \<= distance_threshold For DOT_PRODUCT: WHERE + /// vectors are more similar, the comparison is inverted. * For EUCLIDEAN, + /// COSINE: WHERE distance \<= distance_threshold * For DOT_PRODUCT: WHERE /// distance \>= distance_threshold /// /// Optional. @@ -5816,7 +5816,7 @@ class StructuredAggregationQuery { /// A Firestore query. /// /// The query stages are executed in the following order: 1. from 2. where 3. -/// select 4. order_by + start_at + end_at 5. offset 6. limit +/// select 4. order_by + start_at + end_at 5. offset 6. limit 7. find_nearest class StructuredQuery { /// A potential prefix of a position in the result set to end the query at. /// diff --git a/generated/googleapis/lib/forms/v1.dart b/generated/googleapis/lib/forms/v1.dart index 77cec5782..083be6e30 100644 --- a/generated/googleapis/lib/forms/v1.dart +++ b/generated/googleapis/lib/forms/v1.dart @@ -1876,6 +1876,9 @@ class Question { /// already used in the form. If not provided, a new ID is assigned. core.String? questionId; + /// A respondent can choose a rating from a pre-defined set of icons. + RatingQuestion? ratingQuestion; + /// Whether the question must be answered in order for a respondent to submit /// their response. core.bool? required; @@ -1898,6 +1901,7 @@ class Question { this.fileUploadQuestion, this.grading, this.questionId, + this.ratingQuestion, this.required, this.rowQuestion, this.scaleQuestion, @@ -1924,6 +1928,10 @@ class Question { json_['grading'] as core.Map) : null, questionId: json_['questionId'] as core.String?, + ratingQuestion: json_.containsKey('ratingQuestion') + ? RatingQuestion.fromJson(json_['ratingQuestion'] + as core.Map) + : null, required: json_['required'] as core.bool?, rowQuestion: json_.containsKey('rowQuestion') ? RowQuestion.fromJson( @@ -1950,6 +1958,7 @@ class Question { 'fileUploadQuestion': fileUploadQuestion!, if (grading != null) 'grading': grading!, if (questionId != null) 'questionId': questionId!, + if (ratingQuestion != null) 'ratingQuestion': ratingQuestion!, if (required != null) 'required': required!, if (rowQuestion != null) 'rowQuestion': rowQuestion!, if (scaleQuestion != null) 'scaleQuestion': scaleQuestion!, @@ -2064,6 +2073,42 @@ class QuizSettings { }; } +/// A rating question. +/// +/// The user has a range of icons to choose from. +class RatingQuestion { + /// The icon type to use for the rating. + /// + /// Required. + /// Possible string values are: + /// - "RATING_ICON_TYPE_UNSPECIFIED" : Default value. Unused. + /// - "STAR" : A star icon. + /// - "HEART" : A heart icon. + /// - "THUMB_UP" : A thumbs down icon. + core.String? iconType; + + /// The rating scale level of the rating question. + /// + /// Required. + core.int? ratingScaleLevel; + + RatingQuestion({ + this.iconType, + this.ratingScaleLevel, + }); + + RatingQuestion.fromJson(core.Map json_) + : this( + iconType: json_['iconType'] as core.String?, + ratingScaleLevel: json_['ratingScaleLevel'] as core.int?, + ); + + core.Map toJson() => { + if (iconType != null) 'iconType': iconType!, + if (ratingScaleLevel != null) 'ratingScaleLevel': ratingScaleLevel!, + }; +} + /// Renew an existing Watch for seven days. typedef RenewWatchRequest = $Empty; diff --git a/generated/googleapis/lib/games/v1.dart b/generated/googleapis/lib/games/v1.dart index bbfe2f946..66bf5daf1 100644 --- a/generated/googleapis/lib/games/v1.dart +++ b/generated/googleapis/lib/games/v1.dart @@ -14,7 +14,7 @@ /// Google Play Game Services - v1 /// -/// The Google Play games service allows developers to enhance games with social +/// The Google Play Games Service allows developers to enhance games with social /// leaderboards, achievements, game state, sign-in with Google, and more. /// /// For more information, see @@ -48,7 +48,7 @@ import '../src/user_agent.dart'; export 'package:_discoveryapis_commons/_discoveryapis_commons.dart' show ApiRequestError, DetailedApiRequestError; -/// The Google Play games service allows developers to enhance games with social +/// The Google Play Games Service allows developers to enhance games with social /// leaderboards, achievements, game state, sign-in with Google, and more. class GamesApi { /// View and manage your Google Play Developer account diff --git a/generated/googleapis/lib/gamesmanagement/v1management.dart b/generated/googleapis/lib/gamesmanagement/v1management.dart index 1693ef3fb..7666f79c3 100644 --- a/generated/googleapis/lib/gamesmanagement/v1management.dart +++ b/generated/googleapis/lib/gamesmanagement/v1management.dart @@ -14,7 +14,7 @@ /// Google Play Game Management - v1management /// -/// The Google Play Game Management API allows developers to manage resources +/// The Google Play Games Management API allows developers to manage resources /// from the Google Play Game service. /// /// For more information, see @@ -40,7 +40,7 @@ import '../src/user_agent.dart'; export 'package:_discoveryapis_commons/_discoveryapis_commons.dart' show ApiRequestError, DetailedApiRequestError; -/// The Google Play Game Management API allows developers to manage resources +/// The Google Play Games Management API allows developers to manage resources /// from the Google Play Game service. class GamesManagementApi { /// Create, edit, and delete your Google Play Games activity diff --git a/generated/googleapis/lib/gkebackup/v1.dart b/generated/googleapis/lib/gkebackup/v1.dart index 4387d580e..768667084 100644 --- a/generated/googleapis/lib/gkebackup/v1.dart +++ b/generated/googleapis/lib/gkebackup/v1.dart @@ -1320,8 +1320,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -3426,13 +3426,14 @@ class ClusterMetadata { /// /// Some group kinds are not reasonable choices for a restore, and will cause an /// error if selected here. Any scope selection that would restore "all valid" -/// resources automatically excludes these group kinds. - -/// gkebackup.gke.io/BackupJob - gkebackup.gke.io/RestoreJob - +/// resources automatically excludes these group kinds. - Node - ComponentStatus +/// - gkebackup.gke.io/BackupJob - gkebackup.gke.io/RestoreJob - /// metrics.k8s.io/NodeMetrics - migration.k8s.io/StorageState - -/// migration.k8s.io/StorageVersionMigration - Node - -/// snapshot.storage.k8s.io/VolumeSnapshotContent - storage.k8s.io/CSINode Some -/// group kinds are driven by restore configuration elsewhere, and will cause an -/// error if selected here. - Namespace - PersistentVolume +/// migration.k8s.io/StorageVersionMigration - +/// snapshot.storage.k8s.io/VolumeSnapshotContent - storage.k8s.io/CSINode - +/// storage.k8s.io/VolumeAttachment Some group kinds are driven by restore +/// configuration elsewhere, and will cause an error if selected here. - +/// Namespace - PersistentVolume class ClusterResourceRestoreScope { /// If True, all valid cluster-scoped resources will be restored. /// diff --git a/generated/googleapis/lib/gkehub/v1.dart b/generated/googleapis/lib/gkehub/v1.dart index 029d91381..ce4a78371 100644 --- a/generated/googleapis/lib/gkehub/v1.dart +++ b/generated/googleapis/lib/gkehub/v1.dart @@ -2013,8 +2013,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -3286,6 +3286,18 @@ class Authority { convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); } + /// The identity provider for the scope-tenancy workload identity pool. + /// + /// Optional. Output only. + core.String? scopeTenancyIdentityProvider; + + /// The name of the scope-tenancy workload identity pool. + /// + /// This pool is set in the fleet-level feature. + /// + /// Optional. Output only. + core.String? scopeTenancyWorkloadIdentityPool; + /// The name of the workload identity pool in which `issuer` will be /// recognized. /// @@ -3301,6 +3313,8 @@ class Authority { this.identityProvider, this.issuer, this.oidcJwks, + this.scopeTenancyIdentityProvider, + this.scopeTenancyWorkloadIdentityPool, this.workloadIdentityPool, }); @@ -3309,6 +3323,10 @@ class Authority { identityProvider: json_['identityProvider'] as core.String?, issuer: json_['issuer'] as core.String?, oidcJwks: json_['oidcJwks'] as core.String?, + scopeTenancyIdentityProvider: + json_['scopeTenancyIdentityProvider'] as core.String?, + scopeTenancyWorkloadIdentityPool: + json_['scopeTenancyWorkloadIdentityPool'] as core.String?, workloadIdentityPool: json_['workloadIdentityPool'] as core.String?, ); @@ -3316,6 +3334,10 @@ class Authority { if (identityProvider != null) 'identityProvider': identityProvider!, if (issuer != null) 'issuer': issuer!, if (oidcJwks != null) 'oidcJwks': oidcJwks!, + if (scopeTenancyIdentityProvider != null) + 'scopeTenancyIdentityProvider': scopeTenancyIdentityProvider!, + if (scopeTenancyWorkloadIdentityPool != null) + 'scopeTenancyWorkloadIdentityPool': scopeTenancyWorkloadIdentityPool!, if (workloadIdentityPool != null) 'workloadIdentityPool': workloadIdentityPool!, }; @@ -4084,7 +4106,13 @@ class ConfigManagementConfigSync { /// The GSA should have the Monitoring Metric Writer /// (roles/monitoring.metricWriter) IAM role. The Kubernetes ServiceAccount /// `default` in the namespace `config-management-monitoring` should be bound - /// to the GSA. + /// to the GSA. Deprecated: If Workload Identity Federation for GKE is + /// enabled, Google Cloud Service Account is no longer needed for exporting + /// Config Sync metrics: + /// https://cloud.google.com/kubernetes-engine/enterprise/config-sync/docs/how-to/monitor-config-sync-cloud-monitoring#custom-monitoring. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) core.String? metricsGcpServiceAccountEmail; /// OCI repo configuration for the cluster @@ -4100,6 +4128,11 @@ class ConfigManagementConfigSync { /// "unstructured" mode. core.String? sourceFormat; + /// Set to true to stop syncing configs for a single cluster. + /// + /// Default to false. + core.bool? stopSyncing; + ConfigManagementConfigSync({ this.allowVerticalScale, this.enabled, @@ -4108,6 +4141,7 @@ class ConfigManagementConfigSync { this.oci, this.preventDrift, this.sourceFormat, + this.stopSyncing, }); ConfigManagementConfigSync.fromJson(core.Map json_) @@ -4126,6 +4160,7 @@ class ConfigManagementConfigSync { : null, preventDrift: json_['preventDrift'] as core.bool?, sourceFormat: json_['sourceFormat'] as core.String?, + stopSyncing: json_['stopSyncing'] as core.bool?, ); core.Map toJson() => { @@ -4138,6 +4173,7 @@ class ConfigManagementConfigSync { if (oci != null) 'oci': oci!, if (preventDrift != null) 'preventDrift': preventDrift!, if (sourceFormat != null) 'sourceFormat': sourceFormat!, + if (stopSyncing != null) 'stopSyncing': stopSyncing!, }; } @@ -4280,6 +4316,11 @@ class ConfigManagementConfigSyncState { /// level. core.String? clusterLevelStopSyncingState; + /// The number of RootSync and RepoSync CRs in the cluster. + /// + /// Output only. + core.int? crCount; + /// Information about the deployment of ConfigSync, including the version of /// the various Pods deployed ConfigManagementConfigSyncDeploymentState? deploymentState; @@ -4325,6 +4366,7 @@ class ConfigManagementConfigSyncState { ConfigManagementConfigSyncState({ this.clusterLevelStopSyncingState, + this.crCount, this.deploymentState, this.errors, this.reposyncCrd, @@ -4338,6 +4380,7 @@ class ConfigManagementConfigSyncState { : this( clusterLevelStopSyncingState: json_['clusterLevelStopSyncingState'] as core.String?, + crCount: json_['crCount'] as core.int?, deploymentState: json_.containsKey('deploymentState') ? ConfigManagementConfigSyncDeploymentState.fromJson( json_['deploymentState'] @@ -4363,6 +4406,7 @@ class ConfigManagementConfigSyncState { core.Map toJson() => { if (clusterLevelStopSyncingState != null) 'clusterLevelStopSyncingState': clusterLevelStopSyncingState!, + if (crCount != null) 'crCount': crCount!, if (deploymentState != null) 'deploymentState': deploymentState!, if (errors != null) 'errors': errors!, if (reposyncCrd != null) 'reposyncCrd': reposyncCrd!, @@ -6887,6 +6931,15 @@ class Membership { /// Optional. Authority? authority; + /// The tier of the cluster. + /// + /// Output only. + /// Possible string values are: + /// - "CLUSTER_TIER_UNSPECIFIED" : The ClusterTier is not set. + /// - "STANDARD" : The ClusterTier is standard. + /// - "ENTERPRISE" : The ClusterTier is enterprise. + core.String? clusterTier; + /// When the Membership was created. /// /// Output only. @@ -6973,6 +7026,7 @@ class Membership { Membership({ this.authority, + this.clusterTier, this.createTime, this.deleteTime, this.description, @@ -6993,6 +7047,7 @@ class Membership { ? Authority.fromJson( json_['authority'] as core.Map) : null, + clusterTier: json_['clusterTier'] as core.String?, createTime: json_['createTime'] as core.String?, deleteTime: json_['deleteTime'] as core.String?, description: json_['description'] as core.String?, @@ -7024,6 +7079,7 @@ class Membership { core.Map toJson() => { if (authority != null) 'authority': authority!, + if (clusterTier != null) 'clusterTier': clusterTier!, if (createTime != null) 'createTime': createTime!, if (deleteTime != null) 'deleteTime': deleteTime!, if (description != null) 'description': description!, @@ -9054,6 +9110,15 @@ class ServiceMeshDataPlaneManagement { /// **Service Mesh**: Spec for a single Membership for the servicemesh feature class ServiceMeshMembershipSpec { + /// Specifies the API that will be used for configuring the mesh workloads. + /// + /// Optional. + /// Possible string values are: + /// - "CONFIG_API_UNSPECIFIED" : Unspecified + /// - "CONFIG_API_ISTIO" : Use the Istio API for configuration. + /// - "CONFIG_API_GATEWAY" : Use the K8s Gateway API for configuration. + core.String? configApi; + /// Deprecated: use `management` instead Enables automatic control plane /// management. /// Possible string values are: @@ -9081,17 +9146,20 @@ class ServiceMeshMembershipSpec { core.String? management; ServiceMeshMembershipSpec({ + this.configApi, this.controlPlane, this.management, }); ServiceMeshMembershipSpec.fromJson(core.Map json_) : this( + configApi: json_['configApi'] as core.String?, controlPlane: json_['controlPlane'] as core.String?, management: json_['management'] as core.String?, ); core.Map toJson() => { + if (configApi != null) 'configApi': configApi!, if (controlPlane != null) 'controlPlane': controlPlane!, if (management != null) 'management': management!, }; diff --git a/generated/googleapis/lib/gkehub/v2.dart b/generated/googleapis/lib/gkehub/v2.dart index 5611556fb..5c4564a99 100644 --- a/generated/googleapis/lib/gkehub/v2.dart +++ b/generated/googleapis/lib/gkehub/v2.dart @@ -450,8 +450,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -844,7 +844,13 @@ class ConfigManagementConfigSync { /// The GSA should have the Monitoring Metric Writer /// (roles/monitoring.metricWriter) IAM role. The Kubernetes ServiceAccount /// `default` in the namespace `config-management-monitoring` should be bound - /// to the GSA. + /// to the GSA. Deprecated: If Workload Identity Federation for GKE is + /// enabled, Google Cloud Service Account is no longer needed for exporting + /// Config Sync metrics: + /// https://cloud.google.com/kubernetes-engine/enterprise/config-sync/docs/how-to/monitor-config-sync-cloud-monitoring#custom-monitoring. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) core.String? metricsGcpServiceAccountEmail; /// OCI repo configuration for the cluster. @@ -860,6 +866,11 @@ class ConfigManagementConfigSync { /// "unstructured" mode. core.String? sourceFormat; + /// Set to true to stop syncing configs for a single cluster. + /// + /// Default to false. + core.bool? stopSyncing; + ConfigManagementConfigSync({ this.allowVerticalScale, this.enabled, @@ -868,6 +879,7 @@ class ConfigManagementConfigSync { this.oci, this.preventDrift, this.sourceFormat, + this.stopSyncing, }); ConfigManagementConfigSync.fromJson(core.Map json_) @@ -886,6 +898,7 @@ class ConfigManagementConfigSync { : null, preventDrift: json_['preventDrift'] as core.bool?, sourceFormat: json_['sourceFormat'] as core.String?, + stopSyncing: json_['stopSyncing'] as core.bool?, ); core.Map toJson() => { @@ -898,6 +911,7 @@ class ConfigManagementConfigSync { if (oci != null) 'oci': oci!, if (preventDrift != null) 'preventDrift': preventDrift!, if (sourceFormat != null) 'sourceFormat': sourceFormat!, + if (stopSyncing != null) 'stopSyncing': stopSyncing!, }; } @@ -1049,6 +1063,11 @@ class ConfigManagementConfigSyncState { /// level. core.String? clusterLevelStopSyncingState; + /// The number of RootSync and RepoSync CRs in the cluster. + /// + /// Output only. + core.int? crCount; + /// Information about the deployment of ConfigSync, including the version. /// /// of the various Pods deployed @@ -1095,6 +1114,7 @@ class ConfigManagementConfigSyncState { ConfigManagementConfigSyncState({ this.clusterLevelStopSyncingState, + this.crCount, this.deploymentState, this.errors, this.reposyncCrd, @@ -1108,6 +1128,7 @@ class ConfigManagementConfigSyncState { : this( clusterLevelStopSyncingState: json_['clusterLevelStopSyncingState'] as core.String?, + crCount: json_['crCount'] as core.int?, deploymentState: json_.containsKey('deploymentState') ? ConfigManagementConfigSyncDeploymentState.fromJson( json_['deploymentState'] @@ -1133,6 +1154,7 @@ class ConfigManagementConfigSyncState { core.Map toJson() => { if (clusterLevelStopSyncingState != null) 'clusterLevelStopSyncingState': clusterLevelStopSyncingState!, + if (crCount != null) 'crCount': crCount!, if (deploymentState != null) 'deploymentState': deploymentState!, if (errors != null) 'errors': errors!, if (reposyncCrd != null) 'reposyncCrd': reposyncCrd!, @@ -1923,44 +1945,6 @@ class ConfigManagementSyncState { /// (google.protobuf.Empty); } typedef Empty = $Empty; -/// Information of the FeatureConfig applied on the MembershipFeature. -class FeatureConfigRef { - /// Input only. - /// - /// Resource name of FeatureConfig, in the format: - /// `projects/{project}/locations/global/featureConfigs/{feature_config}`. - core.String? config; - - /// When the FeatureConfig was last applied and copied to FeatureSpec. - /// - /// Output only. - core.String? configUpdateTime; - - /// An id that uniquely identify a FeatureConfig object. - /// - /// Output only. - core.String? uuid; - - FeatureConfigRef({ - this.config, - this.configUpdateTime, - this.uuid, - }); - - FeatureConfigRef.fromJson(core.Map json_) - : this( - config: json_['config'] as core.String?, - configUpdateTime: json_['configUpdateTime'] as core.String?, - uuid: json_['uuid'] as core.String?, - ); - - core.Map toJson() => { - if (config != null) 'config': config!, - if (configUpdateTime != null) 'configUpdateTime': configUpdateTime!, - if (uuid != null) 'uuid': uuid!, - }; -} - /// FeatureSpec contains user input per-feature spec information. class FeatureSpec { /// Cloudbuild-specific FeatureSpec. @@ -2592,10 +2576,6 @@ class MembershipFeature { /// Output only. core.String? deleteTime; - /// Reference information for a FeatureConfig applied on the - /// MembershipFeature. - FeatureConfigRef? featureConfigRef; - /// GCP labels for this MembershipFeature. core.Map? labels; @@ -2614,6 +2594,8 @@ class MembershipFeature { core.String? name; /// Spec of this membershipFeature. + /// + /// Optional. FeatureSpec? spec; /// State of the this membershipFeature. @@ -2629,7 +2611,6 @@ class MembershipFeature { MembershipFeature({ this.createTime, this.deleteTime, - this.featureConfigRef, this.labels, this.lifecycleState, this.name, @@ -2642,10 +2623,6 @@ class MembershipFeature { : this( createTime: json_['createTime'] as core.String?, deleteTime: json_['deleteTime'] as core.String?, - featureConfigRef: json_.containsKey('featureConfigRef') - ? FeatureConfigRef.fromJson(json_['featureConfigRef'] - as core.Map) - : null, labels: (json_['labels'] as core.Map?)?.map( (key, value) => core.MapEntry( @@ -2672,7 +2649,6 @@ class MembershipFeature { core.Map toJson() => { if (createTime != null) 'createTime': createTime!, if (deleteTime != null) 'deleteTime': deleteTime!, - if (featureConfigRef != null) 'featureConfigRef': featureConfigRef!, if (labels != null) 'labels': labels!, if (lifecycleState != null) 'lifecycleState': lifecycleState!, if (name != null) 'name': name!, diff --git a/generated/googleapis/lib/gkeonprem/v1.dart b/generated/googleapis/lib/gkeonprem/v1.dart index ace7c3828..de21cb4ab 100644 --- a/generated/googleapis/lib/gkeonprem/v1.dart +++ b/generated/googleapis/lib/gkeonprem/v1.dart @@ -2309,8 +2309,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -2490,6 +2490,71 @@ class ProjectsLocationsVmwareAdminClustersResource { ProjectsLocationsVmwareAdminClustersResource(commons.ApiRequester client) : _requester = client; + /// Creates a new VMware admin cluster in a given project and location. + /// + /// The API needs to be combined with creating a bootstrap cluster to work. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent of the project and location where the + /// cluster is created in. Format: "projects/{project}/locations/{location}" + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [allowPreflightFailure] - Optional. If set to true, CLM will force CCFE to + /// persist the cluster resource in RMS when the creation fails during + /// standalone preflight checks. In that case the subsequent create call will + /// fail with "cluster already exists" error and hence a update cluster is + /// required to fix the cluster. + /// + /// [validateOnly] - Validate the request without actually doing any updates. + /// + /// [vmwareAdminClusterId] - Required. User provided identifier that is used + /// as part of the resource name; must conform to RFC-1034 and additionally + /// restrict to lower-cased letters. This comes out roughly to: + /// /^a-z+\[a-z0-9\]$/ + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future create( + VmwareAdminCluster request, + core.String parent, { + core.bool? allowPreflightFailure, + core.bool? validateOnly, + core.String? vmwareAdminClusterId, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (allowPreflightFailure != null) + 'allowPreflightFailure': ['${allowPreflightFailure}'], + if (validateOnly != null) 'validateOnly': ['${validateOnly}'], + if (vmwareAdminClusterId != null) + 'vmwareAdminClusterId': [vmwareAdminClusterId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = + 'v1/' + core.Uri.encodeFull('$parent') + '/vmwareAdminClusters'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + /// Enrolls an existing VMware admin cluster to the Anthos On-Prem API within /// a given project and location. /// @@ -8800,7 +8865,23 @@ class VmwareAdminManualLbConfig { /// load balancer. /// /// For admin clusters, currently no configurations is needed. -typedef VmwareAdminMetalLbConfig = $Empty; +class VmwareAdminMetalLbConfig { + /// Whether MetalLB is enabled. + core.bool? enabled; + + VmwareAdminMetalLbConfig({ + this.enabled, + }); + + VmwareAdminMetalLbConfig.fromJson(core.Map json_) + : this( + enabled: json_['enabled'] as core.bool?, + ); + + core.Map toJson() => { + if (enabled != null) 'enabled': enabled!, + }; +} /// VmwareAdminNetworkConfig contains network configuration for VMware admin /// cluster. diff --git a/generated/googleapis/lib/healthcare/v1.dart b/generated/googleapis/lib/healthcare/v1.dart index b7f50b4d7..13f41c627 100644 --- a/generated/googleapis/lib/healthcare/v1.dart +++ b/generated/googleapis/lib/healthcare/v1.dart @@ -4695,6 +4695,120 @@ class ProjectsLocationsDatasetsFhirStoresResource { ProjectsLocationsDatasetsFhirStoresResource(commons.ApiRequester client) : _requester = client; + /// Applies the admin Consent resources for the FHIR store and reindexes the + /// underlying resources in the FHIR store according to the aggregate + /// consents. + /// + /// This method also updates the `consent_config.enforced_admin_consents` + /// field of the FhirStore unless `validate_only=true` in + /// ApplyAdminConsentsRequest. Any admin Consent resource change after this + /// operation execution (including deletion) requires you to call + /// ApplyAdminConsents again for the change to take effect. This method + /// returns an Operation that can be used to track the progress of the + /// resources that were reindexed, by calling GetOperation. Upon completion, + /// the ApplyAdminConsentsResponse additionally contains the number of + /// resources that were reindexed. If at least one Consent resource contains + /// an error or fails be be enforced for any reason, the method returns an + /// error instead of an Operation. No resources will be reindexed and the + /// `consent_config.enforced_admin_consents` field will be unchanged. To + /// enforce a consent check for data access, `consent_config.access_enforced` + /// must be set to true for the FhirStore. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the FHIR store to enforce, in the format + /// `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/fhirStores/{fhir_store_id}`. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/datasets/\[^/\]+/fhirStores/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future applyAdminConsents( + ApplyAdminConsentsRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':applyAdminConsents'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Apply the Consent resources for the FHIR store and reindex the underlying + /// resources in the FHIR store according to the aggregate consent. + /// + /// The aggregate consent of the patient in scope in this request replaces any + /// previous call of this method. Any Consent resource change after this + /// operation execution (including deletion) requires you to call + /// ApplyConsents again to have effect. This method returns an Operation that + /// can be used to track the progress of the consent resources that were + /// processed by calling GetOperation. Upon completion, the + /// ApplyConsentsResponse additionally contains the number of resources that + /// was reindexed. Errors are logged to Cloud Logging (see + /// [Viewing error logs in Cloud Logging](https://cloud.google.com/healthcare/docs/how-tos/logging)). + /// To enforce consent check for data access, `consent_config.access_enforced` + /// must be set to true for the FhirStore. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the FHIR store to enforce, in the format + /// `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/fhirStores/{fhir_store_id}`. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/datasets/\[^/\]+/fhirStores/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future applyConsents( + ApplyConsentsRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':applyConsents'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + /// Creates a new FHIR store within the parent dataset. /// /// [request] - The metadata request object. @@ -4826,6 +4940,50 @@ class ProjectsLocationsDatasetsFhirStoresResource { return Empty.fromJson(response_ as core.Map); } + /// Explains all the permitted/denied actor, purpose and environment for a + /// given resource. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the FHIR store to enforce, in the format + /// `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/fhirStores/{fhir_store_id}`. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/datasets/\[^/\]+/fhirStores/\[^/\]+$`. + /// + /// [resourceId] - Required. The ID (`{resourceType}/{id}`) of the resource to + /// explain data access on. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ExplainDataAccessResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future explainDataAccess( + core.String name, { + core.String? resourceId, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (resourceId != null) 'resourceId': [resourceId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':explainDataAccess'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ExplainDataAccessResponse.fromJson( + response_ as core.Map); + } + /// Export resources from the FHIR store to the specified destination. /// /// This method returns an Operation that can be used to track the status of @@ -5402,7 +5560,7 @@ class ProjectsLocationsDatasetsFhirStoresFhirResource { /// `DSTU2`) will be populated from the `X-Security-Context` header if it /// exists. At this time `securityContext` has no special behavior in the /// Cloud Healthcare API. Note: the limit on data ingested through this method - /// is 2 GB. For best performance, use a non-FHIR data type instead of + /// is 1 GB. For best performance, use a non-FHIR data type instead of /// wrapping the data in a Binary resource. Some of the Healthcare API /// features, such as /// [exporting to BigQuery](https://cloud.google.com/healthcare-api/docs/how-tos/fhir-export-bigquery) @@ -5626,6 +5784,108 @@ class ProjectsLocationsDatasetsFhirStoresFhirResource { return HttpBody.fromJson(response_ as core.Map); } + /// Returns the consent enforcement status of a single consent resource. + /// + /// On success, the response body contains a JSON-encoded representation of a + /// `Parameters` (http://hl7.org/fhir/parameters.html) FHIR resource, + /// containing the current enforcement status. Does not support DSTU2. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the consent resource to find enforcement + /// status, in the format + /// `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/fhirStores/{fhir_store_id}/fhir/Consent/{consent_id}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/datasets/\[^/\]+/fhirStores/\[^/\]+/fhir/Consent/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [HttpBody]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future ConsentEnforcementStatus( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = + 'v1/' + core.Uri.encodeFull('$name') + '/\$consent-enforcement-status'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return HttpBody.fromJson(response_ as core.Map); + } + + /// Returns the consent enforcement status of all consent resources for a + /// patient. + /// + /// On success, the response body contains a JSON-encoded representation of a + /// bundle of `Parameters` (http://hl7.org/fhir/parameters.html) FHIR + /// resources, containing the current enforcement status for each consent + /// resource of the patient. Does not support DSTU2. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the patient to find enforcement statuses, + /// in the format + /// `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/fhirStores/{fhir_store_id}/fhir/Patient/{patient_id}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/datasets/\[^/\]+/fhirStores/\[^/\]+/fhir/Patient/\[^/\]+$`. + /// + /// [P_count] - Optional. The maximum number of results on a page. If not + /// specified, 100 is used. May not be larger than 1000. + /// + /// [P_pageToken] - Optional. Used to retrieve the first, previous, next, or + /// last page of consent enforcement statuses when using pagination. Value + /// should be set to the value of `_page_token` set in next or previous page + /// links' URLs. Next and previous page are returned in the response bundle's + /// links field, where `link.relation` is "previous" or "next". Omit + /// `_page_token` if no previous request has been made. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [HttpBody]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future PatientConsentEnforcementStatus( + core.String name, { + core.int? P_count, + core.String? P_pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (P_count != null) '_count': ['${P_count}'], + if (P_pageToken != null) '_page_token': [P_pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = + 'v1/' + core.Uri.encodeFull('$name') + '/\$consent-enforcement-status'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return HttpBody.fromJson(response_ as core.Map); + } + /// Retrieves a Patient resource and resources related to that patient. /// /// Implements the FHIR extended operation Patient-everything @@ -7838,8 +8098,8 @@ class ProjectsLocationsDatasetsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -8036,6 +8296,51 @@ class ProjectsLocationsServicesNlpResource { } } +/// Configures consent audit log config for FHIR create, read, update, and +/// delete (CRUD) operations. +/// +/// Cloud audit log for healthcare API must be +/// [enabled](https://cloud.google.com/logging/docs/audit/configure-data-access#config-console-enable). +/// The consent-related logs are included as part of `protoPayload.metadata`. +class AccessDeterminationLogConfig { + /// Controls the amount of detail to include as part of the audit logs. + /// + /// Optional. + /// Possible string values are: + /// - "LOG_LEVEL_UNSPECIFIED" : No log level specified. This value is unused. + /// - "DISABLED" : No additional consent-related logging is added to audit + /// logs. + /// - "MINIMUM" : The following information is included: * One of the + /// following + /// \[`consentMode`\](https://cloud.google.com/healthcare-api/docs/fhir-consent#audit_logs) + /// fields: (`off`|`emptyScope`|`enforced`|`btg`|`bypass`). * The accessor's + /// request headers * The `log_level` of the AccessDeterminationLogConfig * + /// The final consent evaluation (`PERMIT`, `DENY`, or `NO_CONSENT`) * A + /// human-readable summary of the evaluation + /// - "VERBOSE" : Includes `MINIMUM` and, for each resource owner, returns: * + /// The resource owner's name * Most specific part of the `X-Consent-Scope` + /// resulting in consensual determination * Timestamp of the applied + /// enforcement leading to the decision * Enforcement version at the time the + /// applicable consents were applied * The Consent resource name * The + /// timestamp of the Consent resource used for enforcement * Policy type + /// (`PATIENT` or `ADMIN`) Note that this mode adds some overhead to CRUD + /// operations. + core.String? logLevel; + + AccessDeterminationLogConfig({ + this.logLevel, + }); + + AccessDeterminationLogConfig.fromJson(core.Map json_) + : this( + logLevel: json_['logLevel'] as core.String?, + ); + + core.Map toJson() => { + if (logLevel != null) 'logLevel': logLevel!, + }; +} + /// Activates the latest revision of the specified Consent by committing a new /// revision with `state` updated to `ACTIVE`. /// @@ -8079,6 +8384,33 @@ class ActivateConsentRequest { }; } +/// List of admin Consent resources to be applied. +class AdminConsents { + /// The versioned names of the admin Consent resource(s), in the format + /// `projects/{project_id}/locations/{location}/datasets/{dataset_id}/fhirStores/{fhir_store_id}/fhir/Consent/{resource_id}/_history/{version_id}`. + /// + /// For FHIR stores with `disable_resource_versioning=true`, the format is + /// `projects/{project_id}/locations/{location}/datasets/{dataset_id}/fhirStores/{fhir_store_id}/fhir/Consent/{resource_id}`. + /// + /// Optional. + core.List? names; + + AdminConsents({ + this.names, + }); + + AdminConsents.fromJson(core.Map json_) + : this( + names: (json_['names'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (names != null) 'names': names!, + }; +} + /// The request to analyze healthcare entities in a document. class AnalyzeEntitiesRequest { /// Alternative output format to be generated based on the results of @@ -8176,6 +8508,119 @@ class AnalyzeEntitiesResponse { }; } +/// Request to apply the admin Consent resources for the specified FHIR store. +class ApplyAdminConsentsRequest { + /// A new list of admin Consent resources to be applied. + /// + /// Any existing enforced Consents, which are specified in + /// `consent_config.enforced_admin_consents` of the FhirStore, that are not + /// part of this list will be disabled. An empty list is equivalent to + /// clearing or disabling all Consents enforced on the FHIR store. When a FHIR + /// store has `disable_resource_versioning=true` and this list contains a + /// Consent resource that exists in `consent_config.enforced_admin_consents`, + /// the method enforces any updates to the existing resource since the last + /// enforcement. If the existing resource hasn't been updated since the last + /// enforcement, the resource is unaffected. After the method finishes, the + /// resulting consent enforcement model is determined by the contents of the + /// Consent resource(s) when the method was called: * When + /// `disable_resource_versioning=true`, the result is identical to the current + /// resource(s) in the FHIR store. * When `disable_resource_versioning=false`, + /// the result is based on the historical version(s) of the Consent + /// resource(s) at the point in time when the method was called. At most 200 + /// Consents can be specified. + AdminConsents? newConsentsList; + + /// If true, the method only validates Consent resources to make sure they are + /// supported. + /// + /// Otherwise, the method applies the aggregate consent information to update + /// the enforcement model and reindex the FHIR resources. If all Consent + /// resources can be applied successfully, the ApplyAdminConsentsResponse is + /// returned containing the following fields: * `consent_apply_success` to + /// indicate the number of Consent resources applied. * `affected_resources` + /// to indicate the number of resources that might have had their consent + /// access changed. If, however, one or more Consent resources are unsupported + /// or cannot be applied, the method fails and ApplyAdminConsentsErrorDetail + /// is is returned with details about the unsupported Consent resources. + /// + /// Optional. + core.bool? validateOnly; + + ApplyAdminConsentsRequest({ + this.newConsentsList, + this.validateOnly, + }); + + ApplyAdminConsentsRequest.fromJson(core.Map json_) + : this( + newConsentsList: json_.containsKey('newConsentsList') + ? AdminConsents.fromJson(json_['newConsentsList'] + as core.Map) + : null, + validateOnly: json_['validateOnly'] as core.bool?, + ); + + core.Map toJson() => { + if (newConsentsList != null) 'newConsentsList': newConsentsList!, + if (validateOnly != null) 'validateOnly': validateOnly!, + }; +} + +/// Request to apply the Consent resources for the specified FHIR store. +class ApplyConsentsRequest { + /// Scope down to a list of patients. + /// + /// Optional. + PatientScope? patientScope; + + /// Scope down to patients whose most recent consent changes are in the time + /// range. + /// + /// Can only be used with a versioning store (i.e. when + /// disable_resource_versioning is set to false). + /// + /// Optional. + TimeRange? timeRange; + + /// If true, the method only validates Consent resources to make sure they are + /// supported. + /// + /// When the operation completes, ApplyConsentsResponse is returned where + /// `consent_apply_success` and `consent_apply_failure` indicate supported and + /// unsupported (or invalid) Consent resources, respectively. Otherwise, the + /// method propagates the aggregate consensual information to the patient's + /// resources. Upon success, `affected_resources` in the ApplyConsentsResponse + /// indicates the number of resources that may have consensual access changed. + /// + /// Optional. + core.bool? validateOnly; + + ApplyConsentsRequest({ + this.patientScope, + this.timeRange, + this.validateOnly, + }); + + ApplyConsentsRequest.fromJson(core.Map json_) + : this( + patientScope: json_.containsKey('patientScope') + ? PatientScope.fromJson( + json_['patientScope'] as core.Map) + : null, + timeRange: json_.containsKey('timeRange') + ? TimeRange.fromJson( + json_['timeRange'] as core.Map) + : null, + validateOnly: json_['validateOnly'] as core.bool?, + ); + + core.Map toJson() => { + if (patientScope != null) 'patientScope': patientScope!, + if (timeRange != null) 'timeRange': timeRange!, + if (validateOnly != null) 'validateOnly': validateOnly!, + }; +} + /// Archives the specified User data mapping. typedef ArchiveUserDataMappingRequest = $Empty; @@ -8557,6 +9002,8 @@ class CharacterMaskConfig { /// Character to mask the sensitive values. /// /// If not supplied, defaults to "*". + /// + /// Optional. core.String? maskingCharacter; CharacterMaskConfig({ @@ -8814,6 +9261,47 @@ class Consent { }; } +/// The accessor scope that describes who can access, for what purpose, in which +/// environment. +class ConsentAccessorScope { + /// An individual, group, or access role that identifies the accessor or a + /// characteristic of the accessor. + /// + /// This can be a resource ID (such as `{resourceType}/{id}`) or an external + /// URI. This value must be present. + core.String? actor; + + /// An abstract identifier that describes the environment or conditions under + /// which the accessor is acting. + /// + /// Can be "*" if it applies to all environments. + core.String? environment; + + /// The intent of data use. + /// + /// Can be "*" if it applies to all purposes. + core.String? purpose; + + ConsentAccessorScope({ + this.actor, + this.environment, + this.purpose, + }); + + ConsentAccessorScope.fromJson(core.Map json_) + : this( + actor: json_['actor'] as core.String?, + environment: json_['environment'] as core.String?, + purpose: json_['purpose'] as core.String?, + ); + + core.Map toJson() => { + if (actor != null) 'actor': actor!, + if (environment != null) 'environment': environment!, + if (purpose != null) 'purpose': purpose!, + }; +} + /// Documentation of a user's consent. class ConsentArtifact { /// Screenshots, PDFs, or other binary information documenting the user's @@ -8918,6 +9406,98 @@ class ConsentArtifact { }; } +/// Configures whether to enforce consent for the FHIR store and which consent +/// enforcement version is being used. +class ConsentConfig { + /// Specifies how the server logs the consent-aware requests. + /// + /// If not specified, the `AccessDeterminationLogConfig.LogLevel.MINIMUM` + /// option is used. + /// + /// Optional. + AccessDeterminationLogConfig? accessDeterminationLogConfig; + + /// The default value is false. + /// + /// If set to true, when accessing FHIR resources, the consent headers will be + /// verified against consents given by patients. See the + /// ConsentEnforcementVersion for the supported consent headers. + /// + /// Optional. + core.bool? accessEnforced; + + /// Different options to configure the behaviour of the server when handling + /// the `X-Consent-Scope` header. + /// + /// Optional. + ConsentHeaderHandling? consentHeaderHandling; + + /// The versioned names of the enforced admin Consent resource(s), in the + /// format + /// `projects/{project_id}/locations/{location}/datasets/{dataset_id}/fhirStores/{fhir_store_id}/fhir/Consent/{resource_id}/_history/{version_id}`. + /// + /// For FHIR stores with `disable_resource_versioning=true`, the format is + /// `projects/{project_id}/locations/{location}/datasets/{dataset_id}/fhirStores/{fhir_store_id}/fhir/Consent/{resource_id}`. + /// This field can only be updated using ApplyAdminConsents. + /// + /// Output only. + core.List? enforcedAdminConsents; + + /// Specifies which consent enforcement version is being used for this FHIR + /// store. + /// + /// This field can only be set once by either CreateFhirStore or + /// UpdateFhirStore. After that, you must call ApplyConsents to change the + /// version. + /// + /// Required. + /// Possible string values are: + /// - "CONSENT_ENFORCEMENT_VERSION_UNSPECIFIED" : Users must specify an + /// enforcement version or an error is returned. + /// - "V1" : Enforcement version 1. See the + /// [FHIR Consent resources in the Cloud Healthcare API](https://cloud.google.com/healthcare-api/docs/fhir-consent) + /// guide for more details. + core.String? version; + + ConsentConfig({ + this.accessDeterminationLogConfig, + this.accessEnforced, + this.consentHeaderHandling, + this.enforcedAdminConsents, + this.version, + }); + + ConsentConfig.fromJson(core.Map json_) + : this( + accessDeterminationLogConfig: + json_.containsKey('accessDeterminationLogConfig') + ? AccessDeterminationLogConfig.fromJson( + json_['accessDeterminationLogConfig'] + as core.Map) + : null, + accessEnforced: json_['accessEnforced'] as core.bool?, + consentHeaderHandling: json_.containsKey('consentHeaderHandling') + ? ConsentHeaderHandling.fromJson(json_['consentHeaderHandling'] + as core.Map) + : null, + enforcedAdminConsents: (json_['enforcedAdminConsents'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + version: json_['version'] as core.String?, + ); + + core.Map toJson() => { + if (accessDeterminationLogConfig != null) + 'accessDeterminationLogConfig': accessDeterminationLogConfig!, + if (accessEnforced != null) 'accessEnforced': accessEnforced!, + if (consentHeaderHandling != null) + 'consentHeaderHandling': consentHeaderHandling!, + if (enforcedAdminConsents != null) + 'enforcedAdminConsents': enforcedAdminConsents!, + if (version != null) 'version': version!, + }; +} + /// The detailed evaluation of a particular Consent. class ConsentEvaluation { /// The evaluation result. @@ -8952,6 +9532,43 @@ class ConsentEvaluation { }; } +/// How the server handles the consent header. +class ConsentHeaderHandling { + /// Specifies the default server behavior when the header is empty. + /// + /// If not specified, the `ScopeProfile.PERMIT_EMPTY_SCOPE` option is used. + /// + /// Optional. + /// Possible string values are: + /// - "SCOPE_PROFILE_UNSPECIFIED" : If not specified, the default value + /// `PERMIT_EMPTY_SCOPE` is used. + /// - "PERMIT_EMPTY_SCOPE" : When no consent scopes are provided (for example, + /// if there's an empty or missing header), then consent check is disabled, + /// similar to when `access_enforced` is `false`. You can use audit logs to + /// differentiate these two cases by looking at the value of + /// `protopayload.metadata.consentMode`. If consents scopes are present, they + /// must be valid and within the allowed limits, otherwise the request will be + /// rejected with a `4xx` code. + /// - "REQUIRED_ON_READ" : The consent header must be non-empty when + /// performing read and search operations, otherwise the request is rejected + /// with a `4xx` code. Additionally, invalid consent scopes or scopes + /// exceeding the allowed limits are rejected. + core.String? profile; + + ConsentHeaderHandling({ + this.profile, + }); + + ConsentHeaderHandling.fromJson(core.Map json_) + : this( + profile: json_['profile'] as core.String?, + ); + + core.Map toJson() => { + if (profile != null) 'profile': profile!, + }; +} + /// List of resource names of Consent resources. class ConsentList { /// The resource names of the Consents to evaluate against, of the form @@ -9214,6 +9831,8 @@ class DateShiftConfig { class DeidentifiedStoreDestination { /// The configuration to use when de-identifying resources that are added to /// this store. + /// + /// Optional. DeidentifyConfig? config; /// The full resource name of a Cloud Healthcare FHIR store, for example, @@ -9249,17 +9868,25 @@ class DeidentifiedStoreDestination { /// applied in a nested manner at runtime. class DeidentifyConfig { /// Configures de-id of application/DICOM content. + /// + /// Optional. DicomConfig? dicom; /// Configures de-id of application/FHIR content. + /// + /// Optional. FhirConfig? fhir; /// Configures de-identification of image pixels wherever they are found in /// the source_dataset. + /// + /// Optional. ImageConfig? image; /// Configures de-identification of text wherever it is found in the /// source_dataset. + /// + /// Optional. TextConfig? text; /// Ensures in-flight data remains in the region of origin during @@ -9270,6 +9897,8 @@ class DeidentifyConfig { /// `ORGANIZATION_NAME` infoTypes. `LOCATION` must be excluded within /// TextConfig, and must also be excluded within ImageConfig if image /// redaction is required. + /// + /// Optional. core.bool? useRegionalDataProcessing; DeidentifyConfig({ @@ -9534,6 +10163,8 @@ class DicomConfig { /// to a database of the original images containing the UIDs, it would be /// possible to recover the individual's identity." /// http://dicom.nema.org/medical/dicom/current/output/chtml/part15/sect_E.3.9.html + /// + /// Optional. core.bool? skipIdRedaction; DicomConfig({ @@ -10077,6 +10708,172 @@ class EvaluateUserConsentsResponse { }; } +/// The enforcing consent's metadata. +class ExplainDataAccessConsentInfo { + /// The compartment base resources that matched a cascading policy. + /// + /// Each resource has the following format: + /// `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/fhirStores/{fhir_store_id}/fhir/{resource_type}/{resource_id}` + core.List? cascadeOrigins; + + /// The resource name of this consent resource, in the format: + /// `projects/{project_id}/locations/{location}/datasets/{dataset_id}/fhirStores/{fhir_store_id}/fhir/Consent/{resource_id}`. + core.String? consentResource; + + /// Last enforcement timestamp of this consent resource. + core.String? enforcementTime; + + /// A list of all the matching accessor scopes of this consent policy that + /// enforced ExplainDataAccessConsentScope.accessor_scope. + core.List? matchingAccessorScopes; + + /// The patient owning the consent (only applicable for patient consents), in + /// the format: + /// `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/fhirStores/{fhir_store_id}/fhir/Patient/{patient_id}` + core.String? patientConsentOwner; + + /// The policy type of consent resource (e.g. PATIENT, ADMIN). + /// Possible string values are: + /// - "CONSENT_POLICY_TYPE_UNSPECIFIED" : Unspecified policy type. + /// - "CONSENT_POLICY_TYPE_PATIENT" : Consent represent a patient consent. + /// - "CONSENT_POLICY_TYPE_ADMIN" : Consent represent an admin consent. + core.String? type; + + /// The consent's variant combinations. + /// + /// A single consent may have multiple variants. + core.List? variants; + + ExplainDataAccessConsentInfo({ + this.cascadeOrigins, + this.consentResource, + this.enforcementTime, + this.matchingAccessorScopes, + this.patientConsentOwner, + this.type, + this.variants, + }); + + ExplainDataAccessConsentInfo.fromJson(core.Map json_) + : this( + cascadeOrigins: (json_['cascadeOrigins'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + consentResource: json_['consentResource'] as core.String?, + enforcementTime: json_['enforcementTime'] as core.String?, + matchingAccessorScopes: + (json_['matchingAccessorScopes'] as core.List?) + ?.map((value) => ConsentAccessorScope.fromJson( + value as core.Map)) + .toList(), + patientConsentOwner: json_['patientConsentOwner'] as core.String?, + type: json_['type'] as core.String?, + variants: (json_['variants'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (cascadeOrigins != null) 'cascadeOrigins': cascadeOrigins!, + if (consentResource != null) 'consentResource': consentResource!, + if (enforcementTime != null) 'enforcementTime': enforcementTime!, + if (matchingAccessorScopes != null) + 'matchingAccessorScopes': matchingAccessorScopes!, + if (patientConsentOwner != null) + 'patientConsentOwner': patientConsentOwner!, + if (type != null) 'type': type!, + if (variants != null) 'variants': variants!, + }; +} + +/// A single consent scope that provides info on who has access to the requested +/// resource scope for a particular purpose and environment, enforced by which +/// consent. +class ExplainDataAccessConsentScope { + /// The accessor scope that describes who can access, for what purpose, and in + /// which environment. + ConsentAccessorScope? accessorScope; + + /// Whether the current consent scope is permitted or denied access on the + /// requested resource. + /// Possible string values are: + /// - "CONSENT_DECISION_TYPE_UNSPECIFIED" : Unspecified consent decision type. + /// - "CONSENT_DECISION_TYPE_PERMIT" : Consent permitted access. + /// - "CONSENT_DECISION_TYPE_DENY" : Consent denied access. + core.String? decision; + + /// Metadata of the consent resources that enforce the consent scope's access. + core.List? enforcingConsents; + + /// Other consent scopes that created exceptions within this scope. + core.List? exceptions; + + ExplainDataAccessConsentScope({ + this.accessorScope, + this.decision, + this.enforcingConsents, + this.exceptions, + }); + + ExplainDataAccessConsentScope.fromJson(core.Map json_) + : this( + accessorScope: json_.containsKey('accessorScope') + ? ConsentAccessorScope.fromJson( + json_['accessorScope'] as core.Map) + : null, + decision: json_['decision'] as core.String?, + enforcingConsents: (json_['enforcingConsents'] as core.List?) + ?.map((value) => ExplainDataAccessConsentInfo.fromJson( + value as core.Map)) + .toList(), + exceptions: (json_['exceptions'] as core.List?) + ?.map((value) => ExplainDataAccessConsentScope.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (accessorScope != null) 'accessorScope': accessorScope!, + if (decision != null) 'decision': decision!, + if (enforcingConsents != null) 'enforcingConsents': enforcingConsents!, + if (exceptions != null) 'exceptions': exceptions!, + }; +} + +/// List of consent scopes that are applicable to the explained access on a +/// given resource. +class ExplainDataAccessResponse { + /// List of applicable consent scopes. + /// + /// Sorted in order of actor such that scopes belonging to the same actor will + /// be adjacent to each other in the list. + core.List? consentScopes; + + /// Warnings associated with this response. + /// + /// It inform user with exceeded scope limit errors. + core.String? warning; + + ExplainDataAccessResponse({ + this.consentScopes, + this.warning, + }); + + ExplainDataAccessResponse.fromJson(core.Map json_) + : this( + consentScopes: (json_['consentScopes'] as core.List?) + ?.map((value) => ExplainDataAccessConsentScope.fromJson( + value as core.Map)) + .toList(), + warning: json_['warning'] as core.String?, + ); + + core.Map toJson() => { + if (consentScopes != null) 'consentScopes': consentScopes!, + if (warning != null) 'warning': warning!, + }; +} + /// Exports data from the specified DICOM store. /// /// If a given resource, such as a DICOM object with the same SOPInstance UID, @@ -10349,6 +11146,8 @@ class FhirConfig { /// If true, all extensions are preserved during de-identification by default. /// If false or unspecified, all extensions are removed during /// de-identification by default. + /// + /// Optional. core.bool? defaultKeepExtensions; /// Specifies FHIR paths to match and how to transform them. @@ -10356,6 +11155,8 @@ class FhirConfig { /// Any field that is not matched by a FieldMetadata is passed through to the /// output dataset unmodified. All extensions will be processed according to /// `default_keep_extensions`. + /// + /// Optional. core.List? fieldMetadataList; FhirConfig({ @@ -10491,6 +11292,13 @@ class FhirStore { /// - "ENABLED" : References in complex data types are parsed. core.String? complexDataTypeReferenceParsing; + /// Specifies whether this store has consent enforcement. + /// + /// Not available for DSTU2 FHIR version due to absence of Consent resources. + /// + /// Optional. + ConsentConfig? consentConfig; + /// If true, overrides the default search behavior for this FHIR store to /// `handling=strict` which returns an error for unrecognized search /// parameters. @@ -10624,6 +11432,7 @@ class FhirStore { FhirStore({ this.complexDataTypeReferenceParsing, + this.consentConfig, this.defaultSearchHandlingStrict, this.disableReferentialIntegrity, this.disableResourceVersioning, @@ -10641,6 +11450,10 @@ class FhirStore { : this( complexDataTypeReferenceParsing: json_['complexDataTypeReferenceParsing'] as core.String?, + consentConfig: json_.containsKey('consentConfig') + ? ConsentConfig.fromJson( + json_['consentConfig'] as core.Map) + : null, defaultSearchHandlingStrict: json_['defaultSearchHandlingStrict'] as core.bool?, disableReferentialIntegrity: @@ -10678,6 +11491,7 @@ class FhirStore { core.Map toJson() => { if (complexDataTypeReferenceParsing != null) 'complexDataTypeReferenceParsing': complexDataTypeReferenceParsing!, + if (consentConfig != null) 'consentConfig': consentConfig!, if (defaultSearchHandlingStrict != null) 'defaultSearchHandlingStrict': defaultSearchHandlingStrict!, if (disableReferentialIntegrity != null) @@ -10817,8 +11631,11 @@ class Field { /// matching fields. class FieldMetadata { /// Deidentify action for one field. + /// + /// Optional. /// Possible string values are: - /// - "ACTION_UNSPECIFIED" : No action specified. + /// - "ACTION_UNSPECIFIED" : No action specified. Defaults to + /// DO_NOT_TRANSFORM. /// - "TRANSFORM" : Transform the entire field. /// - "INSPECT_AND_TRANSFORM" : Inspect and transform any found PHI. /// - "DO_NOT_TRANSFORM" : Do not transform. @@ -10834,6 +11651,8 @@ class FieldMetadata { /// Base64Binary, Boolean, Code, Date, DateTime, Decimal, HumanName, Id, /// Instant, Integer, LanguageCode, Markdown, Oid, PositiveInt, String, /// UnsignedInt, Uri, Uuid, Xhtml. + /// + /// Optional. core.List? paths; FieldMetadata({ @@ -11661,6 +12480,8 @@ class Image { /// Specifies how to handle de-identification of image pixels. class ImageConfig { /// Determines how to redact text from image. + /// + /// Optional. /// Possible string values are: /// - "TEXT_REDACTION_MODE_UNSPECIFIED" : No text redaction specified. Same as /// REDACT_NO_TEXT. @@ -11814,6 +12635,8 @@ class InfoTypeTransformation { /// InfoTypes to apply this transformation to. /// /// If this is not specified, the transformation applies to any info_type. + /// + /// Optional. core.List? infoTypes; /// Config for text redaction. @@ -12697,6 +13520,8 @@ class ParserConfig { /// /// If this is unset, '\r' is used as segment terminator, matching the HL7 /// version 2 specification. + /// + /// Optional. core.String? segmentTerminator; core.List get segmentTerminatorAsBytes => convert.base64.decode(segmentTerminator!); @@ -12782,6 +13607,32 @@ class PatientId { }; } +/// Apply consents given by a list of patients. +class PatientScope { + /// The list of patient IDs whose Consent resources will be enforced. + /// + /// At most 10,000 patients can be specified. An empty list is equivalent to + /// all patients (meaning the entire FHIR store). + /// + /// Optional. + core.List? patientIds; + + PatientScope({ + this.patientIds, + }); + + PatientScope.fromJson(core.Map json_) + : this( + patientIds: (json_['patientIds'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (patientIds != null) 'patientIds': patientIds!, + }; +} + /// An Identity and Access Management (IAM) policy, which specifies access /// controls for Google Cloud resources. /// @@ -14113,6 +14964,8 @@ class TagFilterList { /// http://dicom.nema.org/medical/dicom/current/output/html/part06.html#table_6-1,. /// They may be provided by "Keyword" or "Tag". For example "PatientID", /// "00100010". + /// + /// Optional. core.List? tags; TagFilterList({ @@ -14140,14 +14993,20 @@ typedef TestIamPermissionsResponse = $PermissionsResponse; class TextConfig { /// Transformations to apply to the detected data, overridden by /// `exclude_info_types`. + /// + /// Optional. core.List? additionalTransformations; /// InfoTypes to skip transforming, overriding `additional_transformations`. + /// + /// Optional. core.List? excludeInfoTypes; /// The transformations to apply to the detected data. /// /// Deprecated. Use `additional_transformations` instead. + /// + /// Optional. core.List? transformations; TextConfig({ @@ -14236,6 +15095,42 @@ class TimePartitioning { }; } +/// Apply consents given by patients whose most recent consent changes are in +/// the time range. +/// +/// Note that after identifying these patients, the server applies all Consent +/// resources given by those patients, not just the Consent resources within the +/// timestamp in the range. +class TimeRange { + /// The latest consent change time, in format YYYY-MM-DDThh:mm:ss.sss+zz:zz If + /// not specified, the system uses the time when ApplyConsents was called. + /// + /// Optional. + core.String? end; + + /// The earliest consent change time, in format YYYY-MM-DDThh:mm:ss.sss+zz:zz + /// If not specified, the system uses the FHIR store creation time. + /// + /// Optional. + core.String? start; + + TimeRange({ + this.end, + this.start, + }); + + TimeRange.fromJson(core.Map json_) + : this( + end: json_['end'] as core.String?, + start: json_['start'] as core.String?, + ); + + core.Map toJson() => { + if (end != null) 'end': end!, + if (start != null) 'start': start!, + }; +} + /// A type definition for some HL7v2 type (incl. /// /// Segments and Datatypes). diff --git a/generated/googleapis/lib/iam/v1.dart b/generated/googleapis/lib/iam/v1.dart index 50ede9588..c3da3aa9d 100644 --- a/generated/googleapis/lib/iam/v1.dart +++ b/generated/googleapis/lib/iam/v1.dart @@ -224,15 +224,15 @@ class LocationsWorkforcePoolsResource { /// /// Request parameters: /// - /// [location] - The location of the pool to create. Format: + /// [location] - Optional. The location of the pool to create. Format: /// `locations/{location}`. /// Value must have pattern `^locations/\[^/\]+$`. /// - /// [workforcePoolId] - The ID to use for the pool, which becomes the final - /// component of the resource name. The IDs must be a globally unique string - /// of 6 to 63 lowercase letters, digits, or hyphens. It must start with a - /// letter, and cannot have a trailing hyphen. The prefix `gcp-` is reserved - /// for use by Google, and may not be specified. + /// [workforcePoolId] - Optional. The ID to use for the pool, which becomes + /// the final component of the resource name. The IDs must be a globally + /// unique string of 6 to 63 lowercase letters, digits, or hyphens. It must + /// start with a letter, and cannot have a trailing hyphen. The prefix `gcp-` + /// is reserved for use by Google, and may not be specified. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -2132,7 +2132,7 @@ class ProjectsLocationsOauthClientsResource { /// /// Request parameters: /// - /// [name] - Immutable. The resource name of the OauthClient. + /// [name] - Immutable. Identifier. The resource name of the OauthClient. /// Format:`projects/{project}/locations/{location}/oauthClients/{oauth_client}`. /// Value must have pattern /// `^projects/\[^/\]+/locations/\[^/\]+/oauthClients/\[^/\]+$`. @@ -2398,8 +2398,8 @@ class ProjectsLocationsOauthClientsCredentialsResource { /// /// Request parameters: /// - /// [name] - Immutable. The resource name of the OauthClientCredential. - /// Format: + /// [name] - Immutable. Identifier. The resource name of the + /// OauthClientCredential. Format: /// `projects/{project}/locations/{location}/oauthClients/{oauth_client}/credentials/{credential}` /// Value must have pattern /// `^projects/\[^/\]+/locations/\[^/\]+/oauthClients/\[^/\]+/credentials/\[^/\]+$`. @@ -5973,6 +5973,8 @@ class GoogleIamAdminV1WorkforcePoolProviderOidc { /// The optional client secret. /// /// Required to enable Authorization Code flow for web sign-in. + /// + /// Optional. GoogleIamAdminV1WorkforcePoolProviderOidcClientSecret? clientSecret; /// The OIDC issuer URI. @@ -5991,6 +5993,8 @@ class GoogleIamAdminV1WorkforcePoolProviderOidc { /// must use following format and include only the following fields: { "keys": /// \[ { "kty": "RSA/EC", "alg": "", "use": "sig", "kid": "", "n": "", "e": /// "", "x": "", "y": "", "crv": "" } \] } + /// + /// Optional. core.String? jwksJson; /// Configuration for web single sign-on for the OIDC provider. @@ -6062,6 +6066,8 @@ class GoogleIamAdminV1WorkforcePoolProviderOidcClientSecretValue { /// /// The plain text of the client secret value. For security reasons, this /// field is only used for input and will never be populated in any response. + /// + /// Optional. core.String? plainText; /// A thumbprint to represent the current client secret value. @@ -6095,6 +6101,8 @@ class GoogleIamAdminV1WorkforcePoolProviderOidcWebSsoConfig { /// By default, the `openid`, `profile` and `email` scopes that are supported /// by the identity provider are requested. Each additional scope may be at /// most 256 characters. A maximum of 10 additional scopes may be configured. + /// + /// Optional. core.List? additionalScopes; /// The behavior for how OIDC Claims are included in the `assertion` object @@ -6797,8 +6805,9 @@ class OauthClient { /// Output only. core.String? expireTime; - /// The resource name of the OauthClient. + /// Identifier. /// + /// The resource name of the OauthClient. /// Format:`projects/{project}/locations/{location}/oauthClients/{oauth_client}`. /// /// Immutable. @@ -6894,9 +6903,9 @@ class OauthClientCredential { /// Optional. core.String? displayName; - /// The resource name of the OauthClientCredential. + /// Identifier. /// - /// Format: + /// The resource name of the OauthClientCredential. Format: /// `projects/{project}/locations/{location}/oauthClients/{oauth_client}/credentials/{credential}` /// /// Immutable. @@ -6938,6 +6947,8 @@ class Oidc { /// //iam.googleapis.com/projects//locations//workloadIdentityPools//providers/ /// https://iam.googleapis.com/projects//locations//workloadIdentityPools//providers/ /// ``` + /// + /// Optional. core.List? allowedAudiences; /// The OIDC issuer URL. @@ -8259,6 +8270,8 @@ class WorkforcePool { /// A user-specified description of the pool. /// /// Cannot exceed 256 characters. + /// + /// Optional. core.String? description; /// Disables the workforce pool. @@ -8266,11 +8279,15 @@ class WorkforcePool { /// You cannot use a disabled pool to exchange tokens, or use existing tokens /// to access resources. If the pool is re-enabled, existing tokens grant /// access again. + /// + /// Optional. core.bool? disabled; /// A user-specified display name of the pool in Google Cloud Console. /// /// Cannot exceed 32 characters. + /// + /// Optional. core.String? displayName; /// Time after which the workforce pool will be permanently purged and cannot @@ -8301,6 +8318,8 @@ class WorkforcePool { /// duration of one hour (3600s). For SAML providers, the lifetime of the /// token is the minimum of the `session_duration` and the /// `SessionNotOnOrAfter` claim in the SAML assertion. + /// + /// Optional. core.String? sessionDuration; /// The state of the pool. @@ -8376,6 +8395,8 @@ class WorkforcePoolProvider { /// If unspecified, all valid authentication credentials will be accepted. The /// following example shows how to only allow credentials with a mapped /// `google.groups` value of `admins`: ``` "'admins' in google.groups" ``` + /// + /// Optional. core.String? attributeCondition; /// Maps attributes from the authentication credentials issued by an external @@ -8432,17 +8453,23 @@ class WorkforcePoolProvider { /// A user-specified description of the provider. /// /// Cannot exceed 256 characters. + /// + /// Optional. core.String? description; /// Disables the workforce pool provider. /// /// You cannot use a disabled provider to exchange tokens. However, existing /// tokens still grant access. + /// + /// Optional. core.bool? disabled; /// A user-specified display name for the provider. /// /// Cannot exceed 32 characters. + /// + /// Optional. core.String? displayName; /// Time after which the workload pool provider will be permanently purged and @@ -8633,6 +8660,8 @@ class WorkloadIdentityPool { /// A description of the pool. /// /// Cannot exceed 256 characters. + /// + /// Optional. core.String? description; /// Whether the pool is disabled. @@ -8640,11 +8669,15 @@ class WorkloadIdentityPool { /// You cannot use a disabled pool to exchange tokens, or use existing tokens /// to access resources. If the pool is re-enabled, existing tokens grant /// access again. + /// + /// Optional. core.bool? disabled; /// A display name for the pool. /// /// Cannot exceed 32 characters. + /// + /// Optional. core.String? displayName; /// Time after which the workload identity pool will be permanently purged and @@ -8718,6 +8751,8 @@ class WorkloadIdentityPoolProvider { /// valid authentication credential are accepted. The following example shows /// how to only allow credentials with a mapped `google.groups` value of /// `admins`: ``` "'admins' in google.groups" ``` + /// + /// Optional. core.String? attributeCondition; /// Maps attributes from authentication credentials issued by an external @@ -8762,6 +8797,8 @@ class WorkloadIdentityPoolProvider { /// `google.subject` attribute. For example, the following maps the `sub` /// claim of the incoming credential to the `subject` attribute on a Google /// token: ``` {"google.subject": "assertion.sub"} ``` + /// + /// Optional. core.Map? attributeMapping; /// An Amazon Web Services identity provider. @@ -8770,17 +8807,23 @@ class WorkloadIdentityPoolProvider { /// A description for the provider. /// /// Cannot exceed 256 characters. + /// + /// Optional. core.String? description; /// Whether the provider is disabled. /// /// You cannot use a disabled provider to exchange tokens. However, existing /// tokens still grant access. + /// + /// Optional. core.bool? disabled; /// A display name for the provider. /// /// Cannot exceed 32 characters. + /// + /// Optional. core.String? displayName; /// Time after which the workload identity pool provider will be permanently diff --git a/generated/googleapis/lib/iamcredentials/v1.dart b/generated/googleapis/lib/iamcredentials/v1.dart index ef0d96fa3..86bfc05ae 100644 --- a/generated/googleapis/lib/iamcredentials/v1.dart +++ b/generated/googleapis/lib/iamcredentials/v1.dart @@ -163,6 +163,42 @@ class ProjectsServiceAccountsResource { response_ as core.Map); } + /// Returns the trust boundary info for a given service account. + /// + /// Request parameters: + /// + /// [name] - Required. Resource name of service account. + /// Value must have pattern `^projects/\[^/\]+/serviceAccounts/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ServiceAccountAllowedLocations]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future getAllowedLocations( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + '/allowedLocations'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ServiceAccountAllowedLocations.fromJson( + response_ as core.Map); + } + /// Signs a blob using a service account's system-managed private key. /// /// [request] - The metadata request object. @@ -403,6 +439,39 @@ class GenerateIdTokenResponse { }; } +/// Represents a list of allowed locations for given service account. +class ServiceAccountAllowedLocations { + /// The hex encoded bitmap of the trust boundary locations + /// + /// Output only. + core.String? encodedLocations; + + /// The human readable trust boundary locations. + /// + /// For example, \["us-central1", "europe-west1"\] + /// + /// Output only. + core.List? locations; + + ServiceAccountAllowedLocations({ + this.encodedLocations, + this.locations, + }); + + ServiceAccountAllowedLocations.fromJson(core.Map json_) + : this( + encodedLocations: json_['encodedLocations'] as core.String?, + locations: (json_['locations'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (encodedLocations != null) 'encodedLocations': encodedLocations!, + if (locations != null) 'locations': locations!, + }; +} + class SignBlobRequest { /// The sequence of service accounts in a delegation chain. /// diff --git a/generated/googleapis/lib/iap/v1.dart b/generated/googleapis/lib/iap/v1.dart index 96da1a354..351651a66 100644 --- a/generated/googleapis/lib/iap/v1.dart +++ b/generated/googleapis/lib/iap/v1.dart @@ -2134,10 +2134,8 @@ class Resource { /// Services integrated with custom org policy must populate this field for /// all requests where the API call changes the state of the resource. Custom /// org policy backend uses these attributes to enforce custom org policies. - /// When a proto is wrapped, it is generally the One Platform API proto. When - /// a JSON string is wrapped, use `google.protobuf.StringValue` for the inner - /// value. For create operations, GCP service is expected to pass resource - /// from customer request as is. For update/patch operations, GCP service is + /// For create operations, GCP service is expected to pass resource from + /// customer request as is. For update/patch operations, GCP service is /// expected to compute the next state with the patch provided by the user. /// See go/custom-constraints-org-policy-integration-guide for additional /// details. diff --git a/generated/googleapis/lib/identitytoolkit/v1.dart b/generated/googleapis/lib/identitytoolkit/v1.dart index acd5723d3..9ff1200c9 100644 --- a/generated/googleapis/lib/identitytoolkit/v1.dart +++ b/generated/googleapis/lib/identitytoolkit/v1.dart @@ -4062,6 +4062,8 @@ class GoogleCloudIdentitytoolkitV1ResetPasswordRequest { /// /// Specify this and the old password in order to change an account's password /// without using an out-of-band code. + /// + /// Optional. core.String? email; /// The new password to be set for this account. @@ -4086,6 +4088,8 @@ class GoogleCloudIdentitytoolkitV1ResetPasswordRequest { core.String? oobCode; /// The tenant ID of the Identity Platform tenant the account belongs to. + /// + /// Optional. core.String? tenantId; GoogleCloudIdentitytoolkitV1ResetPasswordRequest({ @@ -4172,9 +4176,10 @@ class GoogleCloudIdentitytoolkitV1ResetPasswordResponse { /// Request message for SendVerificationCode. /// -/// At least one of (`ios_receipt` and `ios_secret`), `recaptcha_token`, or -/// `safety_net_token` must be specified to verify the verification code is -/// being sent on behalf of a real app and not an emulator. +/// 'captcha_response' is required when reCAPTCHA enterprise is enabled, or +/// otherwise at least one of (`ios_receipt` and `ios_secret`), +/// `recaptcha_token`, or `safety_net_token` must be specified to verify the +/// verification code is being sent on behalf of a real app and not an emulator. class GoogleCloudIdentitytoolkitV1SendVerificationCodeRequest { /// Android only. /// @@ -4205,9 +4210,10 @@ class GoogleCloudIdentitytoolkitV1SendVerificationCodeRequest { /// /// At least one of (`ios_receipt` and `ios_secret`), `recaptcha_token`, or /// `safety_net_token` must be specified to verify the verification code is - /// being sent on behalf of a real app and not an emulator. This should come - /// from the response of verifyIosClient. If present, the caller should also - /// provide the `ios_secret`, as well as a bundle ID in the + /// being sent on behalf of a real app and not an emulator, if + /// 'captcha_response' is not used (reCAPTCHA enterprise is not enabled). This + /// should come from the response of verifyIosClient. If present, the caller + /// should also provide the `ios_secret`, as well as a bundle ID in the /// `x-ios-bundle-identifier` header, which must match the bundle ID from the /// verifyIosClient request. core.String? iosReceipt; @@ -4227,7 +4233,8 @@ class GoogleCloudIdentitytoolkitV1SendVerificationCodeRequest { /// safety_net_token). At least one of (`ios_receipt` and `ios_secret`), /// `recaptcha_token`, , or `play_integrity_token` must be specified to verify /// the verification code is being sent on behalf of a real app and not an - /// emulator. A Play Integrity Token can be generated via the + /// emulator, if 'captcha_response' is not used (reCAPTCHA enterprise is not + /// enabled). A Play Integrity Token can be generated via the /// [PlayIntegrity API](https://developer.android.com/google/play/integrity) /// with applying SHA256 to the `phone_number` field as the nonce. core.String? playIntegrityToken; @@ -4236,13 +4243,17 @@ class GoogleCloudIdentitytoolkitV1SendVerificationCodeRequest { /// /// At least one of (`ios_receipt` and `ios_secret`), `recaptcha_token`, or /// `safety_net_token` must be specified to verify the verification code is - /// being sent on behalf of a real app and not an emulator. The recaptcha - /// should be generated by calling getRecaptchaParams and the recaptcha token - /// will be generated on user completion of the recaptcha challenge. + /// being sent on behalf of a real app and not an emulator, if + /// 'captcha_response' is not used (reCAPTCHA enterprise is not enabled). The + /// recaptcha should be generated by calling getRecaptchaParams and the + /// recaptcha token will be generated on user completion of the recaptcha + /// challenge. core.String? recaptchaToken; /// The reCAPTCHA version of the reCAPTCHA token in the captcha_response. /// + /// Required when reCAPTCHA Enterprise is enabled. + /// /// Optional. /// Possible string values are: /// - "RECAPTCHA_VERSION_UNSPECIFIED" : The reCAPTCHA version is not @@ -4255,8 +4266,9 @@ class GoogleCloudIdentitytoolkitV1SendVerificationCodeRequest { /// Used to assert application identity in place of a recaptcha token. At /// least one of (`ios_receipt` and `ios_secret`), `recaptcha_token`, or /// `safety_net_token` must be specified to verify the verification code is - /// being sent on behalf of a real app and not an emulator. A SafetyNet Token - /// can be generated via the + /// being sent on behalf of a real app and not an emulator, if + /// 'captcha_response' is not used (reCAPTCHA enterprise is not enabled). A + /// SafetyNet Token can be generated via the /// [SafetyNet Android Attestation API](https://developer.android.com/training/safetynet/attestation.html), /// with the Base64 encoding of the `phone_number` field as the nonce. core.String? safetyNetToken; diff --git a/generated/googleapis/lib/identitytoolkit/v2.dart b/generated/googleapis/lib/identitytoolkit/v2.dart index fba54eb74..7591d768e 100644 --- a/generated/googleapis/lib/identitytoolkit/v2.dart +++ b/generated/googleapis/lib/identitytoolkit/v2.dart @@ -3945,8 +3945,8 @@ class GoogleCloudIdentitytoolkitAdminV2RecaptchaConfig { /// The reCAPTCHA config for email/password provider, containing the /// enforcement status. /// - /// The email/password provider contains all related user flows protected by - /// reCAPTCHA. + /// The email/password provider contains all email related user flows + /// protected by reCAPTCHA. /// Possible string values are: /// - "RECAPTCHA_PROVIDER_ENFORCEMENT_STATE_UNSPECIFIED" : Enforcement state /// has not been set. @@ -3965,7 +3965,8 @@ class GoogleCloudIdentitytoolkitAdminV2RecaptchaConfig { /// The reCAPTCHA config for phone provider, containing the enforcement /// status. /// - /// The phone provider contains all related user flows protected by reCAPTCHA. + /// The phone provider contains all SMS related user flows protected by + /// reCAPTCHA. /// Possible string values are: /// - "RECAPTCHA_PROVIDER_ENFORCEMENT_STATE_UNSPECIFIED" : Enforcement state /// has not been set. @@ -3978,10 +3979,12 @@ class GoogleCloudIdentitytoolkitAdminV2RecaptchaConfig { /// The reCAPTCHA keys. core.List? recaptchaKeys; - /// The managed rules for toll fraud provider, containing the enforcement - /// status. + /// The managed rules for the authentication action based on reCAPTCHA toll + /// fraud risk scores. /// - /// The toll fraud provider contains all SMS related user flows. + /// Toll fraud managed rules will only take effect when the + /// phone_enforcement_state is AUDIT or ENFORCE and + /// use_sms_toll_fraud_protection is true. core.List? tollFraudManagedRules; @@ -3990,6 +3993,17 @@ class GoogleCloudIdentitytoolkitAdminV2RecaptchaConfig { /// Defaults to `false`. core.bool? useAccountDefender; + /// Whether to use the rCE bot score for reCAPTCHA phone provider. + /// + /// Can only be true when the phone_enforcement_state is AUDIT or ENFORCE. + core.bool? useSmsBotScore; + + /// Whether to use the rCE sms toll fraud protection risk score for reCAPTCHA + /// phone provider. + /// + /// Can only be true when the phone_enforcement_state is AUDIT or ENFORCE. + core.bool? useSmsTollFraudProtection; + GoogleCloudIdentitytoolkitAdminV2RecaptchaConfig({ this.emailPasswordEnforcementState, this.managedRules, @@ -3997,6 +4011,8 @@ class GoogleCloudIdentitytoolkitAdminV2RecaptchaConfig { this.recaptchaKeys, this.tollFraudManagedRules, this.useAccountDefender, + this.useSmsBotScore, + this.useSmsTollFraudProtection, }); GoogleCloudIdentitytoolkitAdminV2RecaptchaConfig.fromJson(core.Map json_) @@ -4020,6 +4036,9 @@ class GoogleCloudIdentitytoolkitAdminV2RecaptchaConfig { .fromJson(value as core.Map)) .toList(), useAccountDefender: json_['useAccountDefender'] as core.bool?, + useSmsBotScore: json_['useSmsBotScore'] as core.bool?, + useSmsTollFraudProtection: + json_['useSmsTollFraudProtection'] as core.bool?, ); core.Map toJson() => { @@ -4033,6 +4052,9 @@ class GoogleCloudIdentitytoolkitAdminV2RecaptchaConfig { 'tollFraudManagedRules': tollFraudManagedRules!, if (useAccountDefender != null) 'useAccountDefender': useAccountDefender!, + if (useSmsBotScore != null) 'useSmsBotScore': useSmsBotScore!, + if (useSmsTollFraudProtection != null) + 'useSmsTollFraudProtection': useSmsTollFraudProtection!, }; } @@ -4110,8 +4132,9 @@ class GoogleCloudIdentitytoolkitAdminV2RecaptchaManagedRule { /// The config for a reCAPTCHA toll fraud assessment managed rule. /// -/// Models a single interval \[start_score, end_score\]. The start_score is -/// maximum_allowed_score. End score is 1.0. +/// Models a single interval \[start_score, end_score\]. The end_score is +/// implicit. It is either the closest smaller end_score (if one is available) +/// or 0. Intervals in aggregate span \[0, 1\] without overlapping. class GoogleCloudIdentitytoolkitAdminV2RecaptchaTollFraudManagedRule { /// The action taken if the reCAPTCHA score of a request is within the /// interval \[start_score, end_score\]. @@ -4122,8 +4145,10 @@ class GoogleCloudIdentitytoolkitAdminV2RecaptchaTollFraudManagedRule { /// The start score (inclusive) for an action. /// - /// A score of 0.0 indicates the safest request (likely legitimate), whereas - /// 1.0 indicates the riskiest request (likely toll fraud). See + /// Must be a value between 0.0 and 1.0, at 11 discrete values; e.g. 0, 0.1, + /// 0.2, 0.3, ... 0.9, 1.0. A score of 0.0 indicates the safest request + /// (likely legitimate), whereas 1.0 indicates the riskiest request (likely + /// toll fraud). See /// https://cloud.google.com/recaptcha-enterprise/docs/sms-fraud-detection#create-assessment-sms. core.double? startScore; @@ -5223,9 +5248,18 @@ class GoogleCloudIdentitytoolkitV2RecaptchaConfig { /// or ENFORCE on at least one of the reCAPTCHA providers. core.String? recaptchaKey; + /// Whether to use the rCE bot score for reCAPTCHA phone provider. + core.bool? useSmsBotScore; + + /// Whether to use the rCE sms toll fraud protection risk score for reCAPTCHA + /// phone provider. + core.bool? useSmsTollFraudProtection; + GoogleCloudIdentitytoolkitV2RecaptchaConfig({ this.recaptchaEnforcementState, this.recaptchaKey, + this.useSmsBotScore, + this.useSmsTollFraudProtection, }); GoogleCloudIdentitytoolkitV2RecaptchaConfig.fromJson(core.Map json_) @@ -5237,12 +5271,18 @@ class GoogleCloudIdentitytoolkitV2RecaptchaConfig { .fromJson(value as core.Map)) .toList(), recaptchaKey: json_['recaptchaKey'] as core.String?, + useSmsBotScore: json_['useSmsBotScore'] as core.bool?, + useSmsTollFraudProtection: + json_['useSmsTollFraudProtection'] as core.bool?, ); core.Map toJson() => { if (recaptchaEnforcementState != null) 'recaptchaEnforcementState': recaptchaEnforcementState!, if (recaptchaKey != null) 'recaptchaKey': recaptchaKey!, + if (useSmsBotScore != null) 'useSmsBotScore': useSmsBotScore!, + if (useSmsTollFraudProtection != null) + 'useSmsTollFraudProtection': useSmsTollFraudProtection!, }; } @@ -5504,6 +5544,8 @@ class GoogleCloudIdentitytoolkitV2StartMfaPhoneRequestInfo { core.String? recaptchaToken; /// The reCAPTCHA version of the reCAPTCHA token in the captcha_response. + /// + /// Required when reCAPTCHA Enterprise is enabled. /// Possible string values are: /// - "RECAPTCHA_VERSION_UNSPECIFIED" : The reCAPTCHA version is not /// specified. diff --git a/generated/googleapis/lib/integrations/v1.dart b/generated/googleapis/lib/integrations/v1.dart index 8ca36fe7c..6e0ea9da5 100644 --- a/generated/googleapis/lib/integrations/v1.dart +++ b/generated/googleapis/lib/integrations/v1.dart @@ -266,6 +266,50 @@ class ProjectsLocationsResource { ProjectsLocationsResource(commons.ApiRequester client) : _requester = client; + /// Generate OpenAPI spec for the requested integrations and api triggers + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. Project and location from which the integrations should + /// be fetched. Format: projects/{project}/location/{location} + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a + /// [GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future + generateOpenApiSpec( + GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':generateOpenApiSpec'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse.fromJson( + response_ as core.Map); + } + /// Gets the client configuration for the given project and location resource /// name /// @@ -2080,7 +2124,7 @@ class ProjectsLocationsIntegrationsExecutionsResource { /// /// Request parameters: /// - /// [name] - Required. Next ID: 3 The execution resource name. Format: + /// [name] - Required. Next ID: 6 The execution resource name. Format: /// projects/{gcp_project_id}/locations/{location}/integrations/{integration}/executions/{execution_id} /// Value must have pattern /// `^projects/\[^/\]+/locations/\[^/\]+/integrations/\[^/\]+/executions/\[^/\]+$`. @@ -10012,8 +10056,56 @@ class EnterpriseCrmFrontendsEventbusProtoEventExecutionInfo { /// Contains the details of the execution info: this includes the replay reason /// and replay tree connecting executions in a parent-child relationship -typedef EnterpriseCrmFrontendsEventbusProtoEventExecutionInfoReplayInfo - = $ReplayInfo; +class EnterpriseCrmFrontendsEventbusProtoEventExecutionInfoReplayInfo { + /// If this execution is a replay of another execution, then this field + /// contains the original execution id. + core.String? originalExecutionInfoId; + + /// Replay mode for the execution + /// Possible string values are: + /// - "REPLAY_MODE_UNSPECIFIED" + /// - "REPLAY_MODE_FROM_BEGINNING" : Replay the original execution from the + /// beginning. + /// - "REPLAY_MODE_POINT_OF_FAILURE" : Replay the execution from the first + /// failed task. + core.String? replayMode; + + /// reason for replay + core.String? replayReason; + + /// If this execution has been replayed, then this field contains the + /// execution ids of the replayed executions. + core.List? replayedExecutionInfoIds; + + EnterpriseCrmFrontendsEventbusProtoEventExecutionInfoReplayInfo({ + this.originalExecutionInfoId, + this.replayMode, + this.replayReason, + this.replayedExecutionInfoIds, + }); + + EnterpriseCrmFrontendsEventbusProtoEventExecutionInfoReplayInfo.fromJson( + core.Map json_) + : this( + originalExecutionInfoId: + json_['originalExecutionInfoId'] as core.String?, + replayMode: json_['replayMode'] as core.String?, + replayReason: json_['replayReason'] as core.String?, + replayedExecutionInfoIds: + (json_['replayedExecutionInfoIds'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (originalExecutionInfoId != null) + 'originalExecutionInfoId': originalExecutionInfoId!, + if (replayMode != null) 'replayMode': replayMode!, + if (replayReason != null) 'replayReason': replayReason!, + if (replayedExecutionInfoIds != null) + 'replayedExecutionInfoIds': replayedExecutionInfoIds!, + }; +} class EnterpriseCrmFrontendsEventbusProtoEventExecutionSnapshot { /// Indicates "right after which checkpoint task's execution" this snapshot is @@ -10963,7 +11055,7 @@ class EnterpriseCrmFrontendsEventbusProtoTriggerConfig { /// List of input variables for the api trigger. /// /// Optional. - core.List? inputVariables; + EnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables? inputVariables; /// The user created label for a particular trigger. core.String? label; @@ -10980,7 +11072,7 @@ class EnterpriseCrmFrontendsEventbusProtoTriggerConfig { /// List of output variables for the api trigger. /// /// Optional. - core.List? outputVariables; + EnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables? outputVariables; /// If set to true, any upcoming requests for this trigger config will be /// paused and the executions will be resumed later when the flag is reset. @@ -11094,15 +11186,19 @@ class EnterpriseCrmFrontendsEventbusProtoTriggerConfig { ?.map((value) => value as core.String) .toList(), errorCatcherId: json_['errorCatcherId'] as core.String?, - inputVariables: (json_['inputVariables'] as core.List?) - ?.map((value) => value as core.String) - .toList(), + inputVariables: json_.containsKey('inputVariables') + ? EnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables + .fromJson(json_['inputVariables'] + as core.Map) + : null, label: json_['label'] as core.String?, nextTasksExecutionPolicy: json_['nextTasksExecutionPolicy'] as core.String?, - outputVariables: (json_['outputVariables'] as core.List?) - ?.map((value) => value as core.String) - .toList(), + outputVariables: json_.containsKey('outputVariables') + ? EnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables + .fromJson(json_['outputVariables'] + as core.Map) + : null, pauseWorkflowExecutions: json_['pauseWorkflowExecutions'] as core.bool?, position: json_.containsKey('position') @@ -11157,6 +11253,10 @@ class EnterpriseCrmFrontendsEventbusProtoTriggerConfig { }; } +/// Variables names mapped to api trigger. +typedef EnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables + = $TriggerConfigVariables; + class EnterpriseCrmFrontendsEventbusProtoWorkflowParameterEntry { /// Metadata information about the parameters. EnterpriseCrmEventbusProtoAttributes? attributes; @@ -11819,6 +11919,7 @@ class GoogleCloudConnectorsV1Connection { /// - "PREVIEW" : PREVIEW. /// - "GA" : GA. /// - "DEPRECATED" : DEPRECATED. + /// - "TEST" : TEST. /// - "PRIVATE_PREVIEW" : PRIVATE_PREVIEW. core.String? connectorVersionLaunchStage; @@ -12141,7 +12242,7 @@ class GoogleCloudConnectorsV1ConnectorVersionInfraConfig { /// Indicates whether connector is deployed on GKE/CloudRun /// - /// Optional. + /// Output only. /// Possible string values are: /// - "DEPLOYMENT_MODEL_UNSPECIFIED" : Deployment model is not specified. /// - "GKE_MST" : Default model gke mst. @@ -12306,9 +12407,13 @@ typedef GoogleCloudConnectorsV1EncryptionKey = $EncryptionKey; /// Eventing Configuration of a connection class GoogleCloudConnectorsV1EventingConfig { /// Additional eventing related field values + /// + /// Optional. core.List? additionalVariables; /// Auth details for the webhook adapter. + /// + /// Optional. GoogleCloudConnectorsV1AuthConfig? authConfig; /// Dead letter configuration for eventing of a connection. @@ -12317,6 +12422,8 @@ class GoogleCloudConnectorsV1EventingConfig { GoogleCloudConnectorsV1EventingConfigDeadLetterConfig? deadLetterConfig; /// Enrichment Enabled. + /// + /// Optional. core.bool? enrichmentEnabled; /// Ingress endpoint of the event listener. @@ -12342,6 +12449,8 @@ class GoogleCloudConnectorsV1EventingConfig { GoogleCloudConnectorsV1DestinationConfig? proxyDestinationConfig; /// Registration endpoint for auto registration. + /// + /// Optional. GoogleCloudConnectorsV1DestinationConfig? registrationDestinationConfig; GoogleCloudConnectorsV1EventingConfig({ @@ -12445,11 +12554,18 @@ class GoogleCloudConnectorsV1EventingRuntimeData { /// Output only. GoogleCloudConnectorsV1EventingRuntimeDataWebhookData? webhookData; + /// Webhook subscriptions. + /// + /// Output only. + GoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions? + webhookSubscriptions; + GoogleCloudConnectorsV1EventingRuntimeData({ this.eventsListenerEndpoint, this.eventsListenerPscSa, this.status, this.webhookData, + this.webhookSubscriptions, }); GoogleCloudConnectorsV1EventingRuntimeData.fromJson(core.Map json_) @@ -12465,6 +12581,11 @@ class GoogleCloudConnectorsV1EventingRuntimeData { ? GoogleCloudConnectorsV1EventingRuntimeDataWebhookData.fromJson( json_['webhookData'] as core.Map) : null, + webhookSubscriptions: json_.containsKey('webhookSubscriptions') + ? GoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions + .fromJson(json_['webhookSubscriptions'] + as core.Map) + : null, ); core.Map toJson() => { @@ -12474,6 +12595,8 @@ class GoogleCloudConnectorsV1EventingRuntimeData { 'eventsListenerPscSa': eventsListenerPscSa!, if (status != null) 'status': status!, if (webhookData != null) 'webhookData': webhookData!, + if (webhookSubscriptions != null) + 'webhookSubscriptions': webhookSubscriptions!, }; } @@ -12544,6 +12667,32 @@ class GoogleCloudConnectorsV1EventingRuntimeDataWebhookData { }; } +/// WebhookSubscriptions has details of webhook subscriptions. +class GoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions { + /// Webhook data. + /// + /// Output only. + core.List? webhookData; + + GoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions({ + this.webhookData, + }); + + GoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions.fromJson( + core.Map json_) + : this( + webhookData: (json_['webhookData'] as core.List?) + ?.map((value) => + GoogleCloudConnectorsV1EventingRuntimeDataWebhookData + .fromJson(value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (webhookData != null) 'webhookData': webhookData!, + }; +} + /// EventingStatus indicates the state of eventing. typedef GoogleCloudConnectorsV1EventingStatus = $EventingStatus; @@ -12573,39 +12722,55 @@ typedef GoogleCloudConnectorsV1Secret = $Secret; /// SSL Configuration of a connection class GoogleCloudConnectorsV1SslConfig { /// Additional SSL related field values + /// + /// Optional. core.List? additionalVariables; /// Type of Client Cert (PEM/JKS/.. /// /// etc.) + /// + /// Optional. /// Possible string values are: /// - "CERT_TYPE_UNSPECIFIED" : Cert type unspecified. /// - "PEM" : Privacy Enhanced Mail (PEM) Type core.String? clientCertType; /// Client Certificate + /// + /// Optional. GoogleCloudConnectorsV1Secret? clientCertificate; /// Client Private Key + /// + /// Optional. GoogleCloudConnectorsV1Secret? clientPrivateKey; /// Secret containing the passphrase protecting the Client Private Key + /// + /// Optional. GoogleCloudConnectorsV1Secret? clientPrivateKeyPass; /// Private Server Certificate. /// /// Needs to be specified if trust model is `PRIVATE`. + /// + /// Optional. GoogleCloudConnectorsV1Secret? privateServerCertificate; /// Type of Server Cert (PEM/JKS/.. /// /// etc.) + /// + /// Optional. /// Possible string values are: /// - "CERT_TYPE_UNSPECIFIED" : Cert type unspecified. /// - "PEM" : Privacy Enhanced Mail (PEM) Type core.String? serverCertType; /// Trust Model of the SSL connection + /// + /// Optional. /// Possible string values are: /// - "PUBLIC" : Public Trust Model. Takes the Default Java trust store. /// - "PRIVATE" : Private Trust Model. Takes custom/private trust store. @@ -12613,6 +12778,8 @@ class GoogleCloudConnectorsV1SslConfig { core.String? trustModel; /// Controls the ssl type for the given connector version. + /// + /// Optional. /// Possible string values are: /// - "SSL_TYPE_UNSPECIFIED" : No SSL configuration required. /// - "TLS" : TLS Handshake @@ -12620,6 +12787,8 @@ class GoogleCloudConnectorsV1SslConfig { core.String? type; /// Bool for enabling SSL + /// + /// Optional. core.bool? useSsl; GoogleCloudConnectorsV1SslConfig({ @@ -12737,10 +12906,44 @@ class GoogleCloudIntegrationsV1alphaAccessToken { }; } +/// List of API triggerID and their workflow resource name. +class GoogleCloudIntegrationsV1alphaApiTriggerResource { + /// Integration where the API is published + /// + /// Required. + core.String? integrationResource; + + /// Trigger Id of the API trigger(s) in the integration + /// + /// Required. + core.List? triggerId; + + GoogleCloudIntegrationsV1alphaApiTriggerResource({ + this.integrationResource, + this.triggerId, + }); + + GoogleCloudIntegrationsV1alphaApiTriggerResource.fromJson(core.Map json_) + : this( + integrationResource: json_['integrationResource'] as core.String?, + triggerId: (json_['triggerId'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (integrationResource != null) + 'integrationResource': integrationResource!, + if (triggerId != null) 'triggerId': triggerId!, + }; +} + /// An assertion which will check for a condition over task execution status or -/// an expression for task output variables Next available id: 5 +/// an expression for task output variables class GoogleCloudIntegrationsV1alphaAssertion { /// The type of assertion to perform. + /// + /// Optional. /// Possible string values are: /// - "ASSERTION_STRATEGY_UNSPECIFIED" : Unspecified Assertion strategy /// - "ASSERT_SUCCESSFUL_EXECUTION" : Test a successful execution @@ -14677,7 +14880,55 @@ class GoogleCloudIntegrationsV1alphaExecutionDetails { /// Contains the details of the execution info: this includes the replay reason /// and replay tree connecting executions in a parent-child relationship -typedef GoogleCloudIntegrationsV1alphaExecutionReplayInfo = $ReplayInfo; +class GoogleCloudIntegrationsV1alphaExecutionReplayInfo { + /// If this execution is a replay of another execution, then this field + /// contains the original execution id. + core.String? originalExecutionInfoId; + + /// Replay mode for the execution + /// Possible string values are: + /// - "REPLAY_MODE_UNSPECIFIED" : Default value. + /// - "REPLAY_MODE_FROM_BEGINNING" : Replay the original execution from the + /// beginning. + /// - "REPLAY_MODE_POINT_OF_FAILURE" : Replay the execution from the first + /// failed task. + core.String? replayMode; + + /// reason for replay + core.String? replayReason; + + /// If this execution has been replayed, then this field contains the + /// execution ids of the replayed executions. + core.List? replayedExecutionInfoIds; + + GoogleCloudIntegrationsV1alphaExecutionReplayInfo({ + this.originalExecutionInfoId, + this.replayMode, + this.replayReason, + this.replayedExecutionInfoIds, + }); + + GoogleCloudIntegrationsV1alphaExecutionReplayInfo.fromJson(core.Map json_) + : this( + originalExecutionInfoId: + json_['originalExecutionInfoId'] as core.String?, + replayMode: json_['replayMode'] as core.String?, + replayReason: json_['replayReason'] as core.String?, + replayedExecutionInfoIds: + (json_['replayedExecutionInfoIds'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (originalExecutionInfoId != null) + 'originalExecutionInfoId': originalExecutionInfoId!, + if (replayMode != null) 'replayMode': replayMode!, + if (replayReason != null) 'replayReason': replayReason!, + if (replayedExecutionInfoIds != null) + 'replayedExecutionInfoIds': replayedExecutionInfoIds!, + }; +} /// Contains the snapshot of the execution for a given checkpoint. class GoogleCloudIntegrationsV1alphaExecutionSnapshot { @@ -14940,6 +15191,66 @@ class GoogleCloudIntegrationsV1alphaFile { }; } +/// Request for GenerateOpenApiSpec. +class GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest { + /// List of api triggers + /// + /// Required. + core.List? + apiTriggerResources; + + /// File format for generated spec. + /// + /// Required. + /// Possible string values are: + /// - "FILE_FORMAT_UNSPECIFIED" : Unspecified file format + /// - "JSON" : JSON File Format + /// - "YAML" : YAML File Format + core.String? fileFormat; + + GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest({ + this.apiTriggerResources, + this.fileFormat, + }); + + GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest.fromJson( + core.Map json_) + : this( + apiTriggerResources: (json_['apiTriggerResources'] as core.List?) + ?.map((value) => + GoogleCloudIntegrationsV1alphaApiTriggerResource.fromJson( + value as core.Map)) + .toList(), + fileFormat: json_['fileFormat'] as core.String?, + ); + + core.Map toJson() => { + if (apiTriggerResources != null) + 'apiTriggerResources': apiTriggerResources!, + if (fileFormat != null) 'fileFormat': fileFormat!, + }; +} + +/// Response of the GenerateOpenApiSpec API. +class GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse { + /// Open API spec as per the required format + core.String? openApiSpec; + + GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse({ + this.openApiSpec, + }); + + GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse.fromJson( + core.Map json_) + : this( + openApiSpec: json_['openApiSpec'] as core.String?, + ); + + core.Map toJson() => { + if (openApiSpec != null) 'openApiSpec': openApiSpec!, + }; +} + /// Returns success or error message class GoogleCloudIntegrationsV1alphaGenerateTokenResponse { /// The message that notifies the user if the request succeeded or not. @@ -17380,8 +17691,6 @@ class GoogleCloudIntegrationsV1alphaRuntimeEntitySchema { } /// The request for scheduling an integration. -/// -/// Next available id: 11 class GoogleCloudIntegrationsV1alphaScheduleIntegrationsRequest { /// Input parameters used by integration execution. /// @@ -19108,7 +19417,7 @@ class GoogleCloudIntegrationsV1alphaTriggerConfig { /// List of input variables for the api trigger. /// /// Optional. - core.List? inputVariables; + GoogleCloudIntegrationsV1alphaTriggerConfigVariables? inputVariables; /// The user created label for a particular trigger. /// @@ -19129,7 +19438,7 @@ class GoogleCloudIntegrationsV1alphaTriggerConfig { /// List of output variables for the api trigger. /// /// Optional. - core.List? outputVariables; + GoogleCloudIntegrationsV1alphaTriggerConfigVariables? outputVariables; /// Informs the front-end application where to draw this error catcher config /// on the UI. @@ -19229,15 +19538,19 @@ class GoogleCloudIntegrationsV1alphaTriggerConfig { : null, description: json_['description'] as core.String?, errorCatcherId: json_['errorCatcherId'] as core.String?, - inputVariables: (json_['inputVariables'] as core.List?) - ?.map((value) => value as core.String) - .toList(), + inputVariables: json_.containsKey('inputVariables') + ? GoogleCloudIntegrationsV1alphaTriggerConfigVariables.fromJson( + json_['inputVariables'] + as core.Map) + : null, label: json_['label'] as core.String?, nextTasksExecutionPolicy: json_['nextTasksExecutionPolicy'] as core.String?, - outputVariables: (json_['outputVariables'] as core.List?) - ?.map((value) => value as core.String) - .toList(), + outputVariables: json_.containsKey('outputVariables') + ? GoogleCloudIntegrationsV1alphaTriggerConfigVariables.fromJson( + json_['outputVariables'] + as core.Map) + : null, position: json_.containsKey('position') ? GoogleCloudIntegrationsV1alphaCoordinate.fromJson( json_['position'] as core.Map) @@ -19281,6 +19594,10 @@ class GoogleCloudIntegrationsV1alphaTriggerConfig { }; } +/// Variables names mapped to api trigger. +typedef GoogleCloudIntegrationsV1alphaTriggerConfigVariables + = $TriggerConfigVariables; + /// Request for UnpublishIntegrationVersion. typedef GoogleCloudIntegrationsV1alphaUnpublishIntegrationVersionRequest = $Empty; diff --git a/generated/googleapis/lib/jobs/v3.dart b/generated/googleapis/lib/jobs/v3.dart index d37cf9037..51a432408 100644 --- a/generated/googleapis/lib/jobs/v3.dart +++ b/generated/googleapis/lib/jobs/v3.dart @@ -3552,7 +3552,7 @@ class NumericBucketingResult { /// with UI elements for input or editing of fields outside countries where that /// field is used. For more guidance on how to use this schema, please see: /// https://support.google.com/business/answer/6397478 -typedef PostalAddress = $PostalAddress; +typedef PostalAddress = $PostalAddress01; /// Input only. /// diff --git a/generated/googleapis/lib/jobs/v4.dart b/generated/googleapis/lib/jobs/v4.dart index 84b3b4e83..4e62fb657 100644 --- a/generated/googleapis/lib/jobs/v4.dart +++ b/generated/googleapis/lib/jobs/v4.dart @@ -3487,7 +3487,7 @@ class Operation { /// with UI elements for input or editing of fields outside countries where that /// field is used. For more guidance on how to use this schema, please see: /// https://support.google.com/business/answer/6397478 -typedef PostalAddress = $PostalAddress; +typedef PostalAddress = $PostalAddress01; /// Options for job processing. class ProcessingOptions { diff --git a/generated/googleapis/lib/language/v1.dart b/generated/googleapis/lib/language/v1.dart index f0da85caf..5b2d79f53 100644 --- a/generated/googleapis/lib/language/v1.dart +++ b/generated/googleapis/lib/language/v1.dart @@ -1615,7 +1615,32 @@ class Sentence { /// Represents the feeling associated with the entire text or entities in the /// text. -typedef Sentiment = $Sentiment; +class Sentiment { + /// A non-negative number in the \[0, +inf) range, which represents the + /// absolute magnitude of sentiment regardless of score (positive or + /// negative). + core.double? magnitude; + + /// Sentiment score between -1.0 (negative sentiment) and 1.0 (positive + /// sentiment). + core.double? score; + + Sentiment({ + this.magnitude, + this.score, + }); + + Sentiment.fromJson(core.Map json_) + : this( + magnitude: (json_['magnitude'] as core.num?)?.toDouble(), + score: (json_['score'] as core.num?)?.toDouble(), + ); + + core.Map toJson() => { + if (magnitude != null) 'magnitude': magnitude!, + if (score != null) 'score': score!, + }; +} /// Represents a text span in the input document. typedef TextSpan = $TextSpan; diff --git a/generated/googleapis/lib/language/v2.dart b/generated/googleapis/lib/language/v2.dart index 8086fe87f..897e54be9 100644 --- a/generated/googleapis/lib/language/v2.dart +++ b/generated/googleapis/lib/language/v2.dart @@ -305,7 +305,7 @@ class AnalyzeEntitiesResponse { /// The language of the text, which will be the same as the language specified /// in the request or, if not specified, the automatically-detected language. /// - /// See Document.language field for more details. + /// See Document.language_code field for more details. core.String? languageCode; /// Whether the language is officially supported. @@ -387,7 +387,7 @@ class AnalyzeSentimentResponse { /// The language of the text, which will be the same as the language specified /// in the request or, if not specified, the automatically-detected language. /// - /// See Document.language field for more details. + /// See Document.language_code field for more details. core.String? languageCode; /// Whether the language is officially supported. @@ -545,14 +545,13 @@ class AnnotateTextResponse { /// Entities, along with their semantic information, in the input document. /// /// Populated if the user enables - /// AnnotateTextRequest.Features.extract_entities or - /// AnnotateTextRequest.Features.extract_entity_sentiment. + /// AnnotateTextRequest.Features.extract_entities . core.List? entities; /// The language of the text, which will be the same as the language specified /// in the request or, if not specified, the automatically-detected language. /// - /// See Document.language field for more details. + /// See Document.language_code field for more details. core.String? languageCode; /// Whether the language is officially supported by all requested features. @@ -689,7 +688,7 @@ class ClassifyTextResponse { /// The language of the text, which will be the same as the language specified /// in the request or, if not specified, the automatically-detected language. /// - /// See Document.language field for more details. + /// See Document.language_code field for more details. core.String? languageCode; /// Whether the language is officially supported. @@ -801,10 +800,8 @@ class Entity { /// The representative name for the entity. core.String? name; - /// For calls to AnalyzeEntitySentiment or if - /// AnnotateTextRequest.Features.extract_entity_sentiment is set to true, this - /// field will contain the aggregate sentiment expressed for this entity in - /// the provided document. + /// For calls to AnalyzeEntitySentiment this field will contain the aggregate + /// sentiment expressed for this entity in the provided document. Sentiment? sentiment; /// The entity type. @@ -886,10 +883,8 @@ class EntityMention { /// type. The score is in (0, 1\] range. core.double? probability; - /// For calls to AnalyzeEntitySentiment or if - /// AnnotateTextRequest.Features.extract_entity_sentiment is set to true, this - /// field will contain the sentiment expressed for this mention of the entity - /// in the provided document. + /// For calls to AnalyzeEntitySentiment this field will contain the sentiment + /// expressed for this mention of the entity in the provided document. Sentiment? sentiment; /// The mention text. @@ -976,7 +971,7 @@ class ModerateTextResponse { /// The language of the text, which will be the same as the language specified /// in the request or, if not specified, the automatically-detected language. /// - /// See Document.language field for more details. + /// See Document.language_code field for more details. core.String? languageCode; /// Whether the language is officially supported. @@ -1047,7 +1042,32 @@ class Sentence { /// Represents the feeling associated with the entire text or entities in the /// text. -typedef Sentiment = $Sentiment; +class Sentiment { + /// A non-negative number in the \[0, +inf\] range, which represents the + /// absolute magnitude of sentiment regardless of score (positive or + /// negative). + core.double? magnitude; + + /// Sentiment score between -1.0 (negative sentiment) and 1.0 (positive + /// sentiment). + core.double? score; + + Sentiment({ + this.magnitude, + this.score, + }); + + Sentiment.fromJson(core.Map json_) + : this( + magnitude: (json_['magnitude'] as core.num?)?.toDouble(), + score: (json_['score'] as core.num?)?.toDouble(), + ); + + core.Map toJson() => { + if (magnitude != null) 'magnitude': magnitude!, + if (score != null) 'score': score!, + }; +} /// Represents a text span in the input document. typedef TextSpan = $TextSpan; diff --git a/generated/googleapis/lib/logging/v2.dart b/generated/googleapis/lib/logging/v2.dart index 43342d71a..0de879173 100644 --- a/generated/googleapis/lib/logging/v2.dart +++ b/generated/googleapis/lib/logging/v2.dart @@ -15996,7 +15996,7 @@ class LogScope { /// Names of one or more parent resources: projects/\[PROJECT_ID\]May /// alternatively be one or more views: /// projects/\[PROJECT_ID\]/locations/\[LOCATION_ID\]/buckets/\[BUCKET_ID\]/views/\[VIEW_ID\]A - /// log scope can include a maximum of 50 projects and a maximum of 100 + /// log scope can include a maximum of 5 projects and a maximum of 100 /// resources in total. /// /// Required. diff --git a/generated/googleapis/lib/looker/v1.dart b/generated/googleapis/lib/looker/v1.dart index 86f28165d..2f656d7e0 100644 --- a/generated/googleapis/lib/looker/v1.dart +++ b/generated/googleapis/lib/looker/v1.dart @@ -848,8 +848,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -1538,6 +1538,10 @@ class Instance { /// - "LOOKER_CORE_STANDARD_ANNUAL" : Subscription Standard. /// - "LOOKER_CORE_ENTERPRISE_ANNUAL" : Subscription Enterprise. /// - "LOOKER_CORE_EMBED_ANNUAL" : Subscription Embed. + /// - "LOOKER_CORE_NONPROD_STANDARD_ANNUAL" : Nonprod Subscription Standard. + /// - "LOOKER_CORE_NONPROD_ENTERPRISE_ANNUAL" : Nonprod Subscription + /// Enterprise. + /// - "LOOKER_CORE_NONPROD_EMBED_ANNUAL" : Nonprod Subscription Embed. core.String? platformEdition; /// Whether private IP is enabled on the Looker instance. @@ -1587,6 +1591,8 @@ class Instance { core.String? updateTime; /// User metadata. + /// + /// Optional. UserMetadata? userMetadata; Instance({ @@ -2269,7 +2275,7 @@ typedef TestIamPermissionsResponse = $PermissionsResponse; /// The date and time zone are either not significant or are specified /// elsewhere. An API may choose to allow leap seconds. Related types are /// google.type.Date and `google.protobuf.Timestamp`. -typedef TimeOfDay = $TimeOfDay01; +typedef TimeOfDay = $TimeOfDay00; /// Metadata about users for a Looker instance. class UserMetadata { diff --git a/generated/googleapis/lib/metastore/v1.dart b/generated/googleapis/lib/metastore/v1.dart index de7fff94a..871c0a146 100644 --- a/generated/googleapis/lib/metastore/v1.dart +++ b/generated/googleapis/lib/metastore/v1.dart @@ -2602,89 +2602,11 @@ class ProjectsLocationsServicesMigrationExecutionsResource { } /// Request message for DataprocMetastore.AlterMetadataResourceLocation. -class AlterMetadataResourceLocationRequest { - /// The new location URI for the metadata resource. - /// - /// Required. - core.String? locationUri; - - /// The relative metadata resource name in the following - /// format.databases/{database_id} or - /// databases/{database_id}/tables/{table_id} or - /// databases/{database_id}/tables/{table_id}/partitions/{partition_id} - /// - /// Required. - core.String? resourceName; - - AlterMetadataResourceLocationRequest({ - this.locationUri, - this.resourceName, - }); - - AlterMetadataResourceLocationRequest.fromJson(core.Map json_) - : this( - locationUri: json_['locationUri'] as core.String?, - resourceName: json_['resourceName'] as core.String?, - ); - - core.Map toJson() => { - if (locationUri != null) 'locationUri': locationUri!, - if (resourceName != null) 'resourceName': resourceName!, - }; -} +typedef AlterMetadataResourceLocationRequest + = $AlterMetadataResourceLocationRequest; /// Request message for DataprocMetastore.AlterTableProperties. -class AlterTablePropertiesRequest { - /// A map that describes the desired values to mutate. - /// - /// If update_mask is empty, the properties will not update. Otherwise, the - /// properties only alters the value whose associated paths exist in the - /// update mask - core.Map? properties; - - /// The name of the table containing the properties you're altering in the - /// following format.databases/{database_id}/tables/{table_id} - /// - /// Required. - core.String? tableName; - - /// A field mask that specifies the metadata table properties that are - /// overwritten by the update. - /// - /// Fields specified in the update_mask are relative to the resource (not to - /// the full request). A field is overwritten if it is in the mask.For - /// example, given the target properties: properties { a: 1 b: 2 } And an - /// update properties: properties { a: 2 b: 3 c: 4 } then if the field mask - /// is:paths: "properties.b", "properties.c"then the result will be: - /// properties { a: 1 b: 3 c: 4 } - core.String? updateMask; - - AlterTablePropertiesRequest({ - this.properties, - this.tableName, - this.updateMask, - }); - - AlterTablePropertiesRequest.fromJson(core.Map json_) - : this( - properties: - (json_['properties'] as core.Map?) - ?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - tableName: json_['tableName'] as core.String?, - updateMask: json_['updateMask'] as core.String?, - ); - - core.Map toJson() => { - if (properties != null) 'properties': properties!, - if (tableName != null) 'tableName': tableName!, - if (updateMask != null) 'updateMask': updateMask!, - }; -} +typedef AlterTablePropertiesRequest = $AlterTablePropertiesRequest; /// Specifies the audit configuration for a service. /// @@ -3347,28 +3269,7 @@ class Consumer { /// Specifies how metastore metadata should be integrated with the Data Catalog /// service. -class DataCatalogConfig { - /// Defines whether the metastore metadata should be synced to Data Catalog. - /// - /// The default value is to disable syncing metastore metadata to Data - /// Catalog. - /// - /// Optional. - core.bool? enabled; - - DataCatalogConfig({ - this.enabled, - }); - - DataCatalogConfig.fromJson(core.Map json_) - : this( - enabled: json_['enabled'] as core.bool?, - ); - - core.Map toJson() => { - if (enabled != null) 'enabled': enabled!, - }; -} +typedef DataCatalogConfig = $DataCatalogConfig; /// A specification of the location of and metadata about a database dump from a /// relational database management system. @@ -3459,57 +3360,7 @@ class EncryptionConfig { } /// Request message for DataprocMetastore.ExportMetadata. -class ExportMetadataRequest { - /// The type of the database dump. - /// - /// If unspecified, defaults to MYSQL. - /// - /// Optional. - /// Possible string values are: - /// - "TYPE_UNSPECIFIED" : The type of the database dump is unknown. - /// - "MYSQL" : Database dump is a MySQL dump file. - /// - "AVRO" : Database dump contains Avro files. - core.String? databaseDumpType; - - /// A Cloud Storage URI of a folder, in the format gs:///. - /// - /// A sub-folder containing exported files will be created below it. - core.String? destinationGcsFolder; - - /// A request ID. - /// - /// Specify a unique request ID to allow the server to ignore the request if - /// it has completed. The server will ignore subsequent requests that provide - /// a duplicate request ID for at least 60 minutes after the first request.For - /// example, if an initial request times out, followed by another request with - /// the same request ID, the server ignores the second request to prevent the - /// creation of duplicate commitments.The request ID must be a valid UUID - /// (https://en.wikipedia.org/wiki/Universally_unique_identifier#Format). A - /// zero UUID (00000000-0000-0000-0000-000000000000) is not supported. - /// - /// Optional. - core.String? requestId; - - ExportMetadataRequest({ - this.databaseDumpType, - this.destinationGcsFolder, - this.requestId, - }); - - ExportMetadataRequest.fromJson(core.Map json_) - : this( - databaseDumpType: json_['databaseDumpType'] as core.String?, - destinationGcsFolder: json_['destinationGcsFolder'] as core.String?, - requestId: json_['requestId'] as core.String?, - ); - - core.Map toJson() => { - if (databaseDumpType != null) 'databaseDumpType': databaseDumpType!, - if (destinationGcsFolder != null) - 'destinationGcsFolder': destinationGcsFolder!, - if (requestId != null) 'requestId': requestId!, - }; -} +typedef ExportMetadataRequest = $ExportMetadataRequest; /// Represents a textual expression in the Common Expression Language (CEL) /// syntax. @@ -3792,56 +3643,7 @@ class KerberosConfig { } /// The details of the latest scheduled backup. -class LatestBackup { - /// The ID of an in-progress scheduled backup. - /// - /// Empty if no backup is in progress. - /// - /// Output only. - core.String? backupId; - - /// The duration of the backup completion. - /// - /// Output only. - core.String? duration; - - /// The time when the backup was started. - /// - /// Output only. - core.String? startTime; - - /// The current state of the backup. - /// - /// Output only. - /// Possible string values are: - /// - "STATE_UNSPECIFIED" : The state of the backup is unknown. - /// - "IN_PROGRESS" : The backup is in progress. - /// - "SUCCEEDED" : The backup completed. - /// - "FAILED" : The backup failed. - core.String? state; - - LatestBackup({ - this.backupId, - this.duration, - this.startTime, - this.state, - }); - - LatestBackup.fromJson(core.Map json_) - : this( - backupId: json_['backupId'] as core.String?, - duration: json_['duration'] as core.String?, - startTime: json_['startTime'] as core.String?, - state: json_['state'] as core.String?, - ); - - core.Map toJson() => { - if (backupId != null) 'backupId': backupId!, - if (duration != null) 'duration': duration!, - if (startTime != null) 'startTime': startTime!, - if (state != null) 'state': state!, - }; -} +typedef LatestBackup = $LatestBackup; /// Represents the autoscaling limit configuration of a metastore service. class LimitConfig { @@ -4463,41 +4265,7 @@ class MigrationExecution { } /// Request message for DataprocMetastore.MoveTableToDatabase. -class MoveTableToDatabaseRequest { - /// The name of the database where the table resides. - /// - /// Required. - core.String? dbName; - - /// The name of the database where the table should be moved. - /// - /// Required. - core.String? destinationDbName; - - /// The name of the table to be moved. - /// - /// Required. - core.String? tableName; - - MoveTableToDatabaseRequest({ - this.dbName, - this.destinationDbName, - this.tableName, - }); - - MoveTableToDatabaseRequest.fromJson(core.Map json_) - : this( - dbName: json_['dbName'] as core.String?, - destinationDbName: json_['destinationDbName'] as core.String?, - tableName: json_['tableName'] as core.String?, - ); - - core.Map toJson() => { - if (dbName != null) 'dbName': dbName!, - if (destinationDbName != null) 'destinationDbName': destinationDbName!, - if (tableName != null) 'tableName': tableName!, - }; -} +typedef MoveTableToDatabaseRequest = $MoveTableToDatabaseRequest; /// Network configuration for the Dataproc Metastore service. class NetworkConfig { @@ -4714,27 +4482,7 @@ class Policy { } /// Request message for DataprocMetastore.QueryMetadata. -class QueryMetadataRequest { - /// A read-only SQL query to execute against the metadata database. - /// - /// The query cannot change or mutate the data. - /// - /// Required. - core.String? query; - - QueryMetadataRequest({ - this.query, - }); - - QueryMetadataRequest.fromJson(core.Map json_) - : this( - query: json_['query'] as core.String?, - ); - - core.Map toJson() => { - if (query != null) 'query': query!, - }; -} +typedef QueryMetadataRequest = $QueryMetadataRequest; /// The details of a metadata restore operation. class Restore { @@ -4819,74 +4567,8 @@ class Restore { }; } -/// Request message for DataprocMetastore.Restore. -class RestoreServiceRequest { - /// The relative resource name of the metastore service backup to restore - /// from, in the following - /// form:projects/{project_id}/locations/{location_id}/services/{service_id}/backups/{backup_id}. - /// - /// Mutually exclusive with backup_location, and exactly one of the two must - /// be set. - /// - /// Optional. - core.String? backup; - - /// A Cloud Storage URI specifying the location of the backup artifacts, - /// namely - backup avro files under "avro/", backup_metastore.json and - /// service.json, in the following form:gs://. - /// - /// Mutually exclusive with backup, and exactly one of the two must be set. - /// - /// Optional. - core.String? backupLocation; - - /// A request ID. - /// - /// Specify a unique request ID to allow the server to ignore the request if - /// it has completed. The server will ignore subsequent requests that provide - /// a duplicate request ID for at least 60 minutes after the first request.For - /// example, if an initial request times out, followed by another request with - /// the same request ID, the server ignores the second request to prevent the - /// creation of duplicate commitments.The request ID must be a valid UUID - /// (https://en.wikipedia.org/wiki/Universally_unique_identifier#Format). A - /// zero UUID (00000000-0000-0000-0000-000000000000) is not supported. - /// - /// Optional. - core.String? requestId; - - /// The type of restore. - /// - /// If unspecified, defaults to METADATA_ONLY. - /// - /// Optional. - /// Possible string values are: - /// - "RESTORE_TYPE_UNSPECIFIED" : The restore type is unknown. - /// - "FULL" : The service's metadata and configuration are restored. - /// - "METADATA_ONLY" : Only the service's metadata is restored. - core.String? restoreType; - - RestoreServiceRequest({ - this.backup, - this.backupLocation, - this.requestId, - this.restoreType, - }); - - RestoreServiceRequest.fromJson(core.Map json_) - : this( - backup: json_['backup'] as core.String?, - backupLocation: json_['backupLocation'] as core.String?, - requestId: json_['requestId'] as core.String?, - restoreType: json_['restoreType'] as core.String?, - ); - - core.Map toJson() => { - if (backup != null) 'backup': backup!, - if (backupLocation != null) 'backupLocation': backupLocation!, - if (requestId != null) 'requestId': requestId!, - if (restoreType != null) 'restoreType': restoreType!, - }; -} +/// Request message for DataprocMetastore.RestoreService. +typedef RestoreServiceRequest = $RestoreServiceRequest; /// Represents the scaling configuration of a metastore service. class ScalingConfig { diff --git a/generated/googleapis/lib/metastore/v2.dart b/generated/googleapis/lib/metastore/v2.dart new file mode 100644 index 000000000..20bd729c1 --- /dev/null +++ b/generated/googleapis/lib/metastore/v2.dart @@ -0,0 +1,1733 @@ +// This is a generated file (see the discoveryapis_generator project). + +// ignore_for_file: camel_case_types +// ignore_for_file: comment_references +// ignore_for_file: deprecated_member_use_from_same_package +// ignore_for_file: doc_directive_unknown +// ignore_for_file: lines_longer_than_80_chars +// ignore_for_file: non_constant_identifier_names +// ignore_for_file: prefer_interpolation_to_compose_strings +// ignore_for_file: unintended_html_in_doc_comment +// ignore_for_file: unnecessary_brace_in_string_interps +// ignore_for_file: unnecessary_lambdas +// ignore_for_file: unnecessary_string_interpolations + +/// Dataproc Metastore API - v2 +/// +/// The Dataproc Metastore API is used to manage the lifecycle and configuration +/// of metastore services. +/// +/// For more information, see +/// +/// Create an instance of [DataprocMetastoreApi] to access these resources: +/// +/// - [ProjectsResource] +/// - [ProjectsLocationsResource] +/// - [ProjectsLocationsServicesResource] +/// - [ProjectsLocationsServicesBackupsResource] +library; + +import 'dart:async' as async; +import 'dart:convert' as convert; +import 'dart:core' as core; + +import 'package:_discoveryapis_commons/_discoveryapis_commons.dart' as commons; +import 'package:http/http.dart' as http; + +import '../shared.dart'; +import '../src/user_agent.dart'; + +export 'package:_discoveryapis_commons/_discoveryapis_commons.dart' + show ApiRequestError, DetailedApiRequestError; + +/// The Dataproc Metastore API is used to manage the lifecycle and configuration +/// of metastore services. +class DataprocMetastoreApi { + /// See, edit, configure, and delete your Google Cloud data and see the email + /// address for your Google Account. + static const cloudPlatformScope = + 'https://www.googleapis.com/auth/cloud-platform'; + + final commons.ApiRequester _requester; + + ProjectsResource get projects => ProjectsResource(_requester); + + DataprocMetastoreApi(http.Client client, + {core.String rootUrl = 'https://metastore.googleapis.com/', + core.String servicePath = ''}) + : _requester = + commons.ApiRequester(client, rootUrl, servicePath, requestHeaders); +} + +class ProjectsResource { + final commons.ApiRequester _requester; + + ProjectsLocationsResource get locations => + ProjectsLocationsResource(_requester); + + ProjectsResource(commons.ApiRequester client) : _requester = client; +} + +class ProjectsLocationsResource { + final commons.ApiRequester _requester; + + ProjectsLocationsServicesResource get services => + ProjectsLocationsServicesResource(_requester); + + ProjectsLocationsResource(commons.ApiRequester client) : _requester = client; +} + +class ProjectsLocationsServicesResource { + final commons.ApiRequester _requester; + + ProjectsLocationsServicesBackupsResource get backups => + ProjectsLocationsServicesBackupsResource(_requester); + + ProjectsLocationsServicesResource(commons.ApiRequester client) + : _requester = client; + + /// Alter metadata resource location. + /// + /// The metadata resource can be a database, table, or partition. This + /// functionality only updates the parent directory for the respective + /// metadata resource and does not transfer any existing data to the new + /// location. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [service] - Required. The relative resource name of the metastore service + /// to mutate metadata, in the following + /// format:projects/{project_id}/locations/{location_id}/services/{service_id}. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/services/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future alterLocation( + GoogleCloudMetastoreV2AlterMetadataResourceLocationRequest request, + core.String service, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v2/' + core.Uri.encodeFull('$service') + ':alterLocation'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + + /// Alter metadata table properties. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [service] - Required. The relative resource name of the Dataproc Metastore + /// service that's being used to mutate metadata table properties, in the + /// following + /// format:projects/{project_id}/locations/{location_id}/services/{service_id}. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/services/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future alterTableProperties( + GoogleCloudMetastoreV2AlterTablePropertiesRequest request, + core.String service, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = + 'v2/' + core.Uri.encodeFull('$service') + ':alterTableProperties'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + + /// Creates a metastore service in a project and location. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The relative resource name of the location in which + /// to create a metastore service, in the following + /// form:projects/{project_number}/locations/{location_id}. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [requestId] - Optional. A request ID. Specify a unique request ID to allow + /// the server to ignore the request if it has completed. The server will + /// ignore subsequent requests that provide a duplicate request ID for at + /// least 60 minutes after the first request.For example, if an initial + /// request times out, followed by another request with the same request ID, + /// the server ignores the second request to prevent the creation of duplicate + /// commitments.The request ID must be a valid UUID + /// (https://en.wikipedia.org/wiki/Universally_unique_identifier#Format) A + /// zero UUID (00000000-0000-0000-0000-000000000000) is not supported. + /// + /// [serviceId] - Required. The ID of the metastore service, which is used as + /// the final component of the metastore service's name.This value must be + /// between 2 and 63 characters long inclusive, begin with a letter, end with + /// a letter or number, and consist of alpha-numeric ASCII characters or + /// hyphens. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future create( + GoogleCloudMetastoreV2Service request, + core.String parent, { + core.String? requestId, + core.String? serviceId, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (requestId != null) 'requestId': [requestId], + if (serviceId != null) 'serviceId': [serviceId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v2/' + core.Uri.encodeFull('$parent') + '/services'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + + /// Deletes a single service. + /// + /// Request parameters: + /// + /// [name] - Required. The relative resource name of the metastore service to + /// delete, in the following + /// form:projects/{project_number}/locations/{location_id}/services/{service_id}. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/services/\[^/\]+$`. + /// + /// [requestId] - Optional. A request ID. Specify a unique request ID to allow + /// the server to ignore the request if it has completed. The server will + /// ignore subsequent requests that provide a duplicate request ID for at + /// least 60 minutes after the first request.For example, if an initial + /// request times out, followed by another request with the same request ID, + /// the server ignores the second request to prevent the creation of duplicate + /// commitments.The request ID must be a valid UUID + /// (https://en.wikipedia.org/wiki/Universally_unique_identifier#Format) A + /// zero UUID (00000000-0000-0000-0000-000000000000) is not supported. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String name, { + core.String? requestId, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (requestId != null) 'requestId': [requestId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v2/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + + /// Exports metadata from a service. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [service] - Required. The relative resource name of the metastore service + /// to run export, in the following + /// form:projects/{project_id}/locations/{location_id}/services/{service_id}. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/services/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future exportMetadata( + GoogleCloudMetastoreV2ExportMetadataRequest request, + core.String service, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v2/' + core.Uri.encodeFull('$service') + ':exportMetadata'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + + /// Gets the details of a single service. + /// + /// Request parameters: + /// + /// [name] - Required. The relative resource name of the metastore service to + /// retrieve, in the following + /// form:projects/{project_number}/locations/{location_id}/services/{service_id}. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/services/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudMetastoreV2Service]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v2/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleCloudMetastoreV2Service.fromJson( + response_ as core.Map); + } + + /// Imports Metadata into a Dataproc Metastore service. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Immutable. The relative resource name of the metastore service to + /// run import, in the following + /// form:projects/{project_id}/locations/{location_id}/services/{service_id}. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/services/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future importMetadata( + GoogleCloudMetastoreV2ImportMetadataRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v2/' + core.Uri.encodeFull('$name') + ':importMetadata'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + + /// Lists services in a project and location. + /// + /// Request parameters: + /// + /// [parent] - Required. The relative resource name of the location of + /// metastore services to list, in the following + /// form:projects/{project_number}/locations/{location_id}. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [filter] - Optional. The filter to apply to list results. + /// + /// [orderBy] - Optional. Specify the ordering of results as described in + /// Sorting Order + /// (https://cloud.google.com/apis/design/design_patterns#sorting_order). If + /// not specified, the results will be sorted in the default order. + /// + /// [pageSize] - Optional. The maximum number of services to return. The + /// response may contain less than the maximum number. If unspecified, no more + /// than 500 services are returned. The maximum value is 1000; values above + /// 1000 are changed to 1000. + /// + /// [pageToken] - Optional. A page token, received from a previous + /// DataprocMetastore.ListServices call. Provide this token to retrieve the + /// subsequent page.To retrieve the first page, supply an empty page + /// token.When paginating, other parameters provided to + /// DataprocMetastore.ListServices must match the call that provided the page + /// token. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudMetastoreV2ListServicesResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.String? filter, + core.String? orderBy, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (orderBy != null) 'orderBy': [orderBy], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v2/' + core.Uri.encodeFull('$parent') + '/services'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleCloudMetastoreV2ListServicesResponse.fromJson( + response_ as core.Map); + } + + /// Move a table to another database. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [service] - Required. The relative resource name of the metastore service + /// to mutate metadata, in the following + /// format:projects/{project_id}/locations/{location_id}/services/{service_id}. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/services/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future moveTableToDatabase( + GoogleCloudMetastoreV2MoveTableToDatabaseRequest request, + core.String service, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = + 'v2/' + core.Uri.encodeFull('$service') + ':moveTableToDatabase'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + + /// Updates the parameters of a single service. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Immutable. The relative resource name of the metastore service, + /// in the following + /// format:projects/{project_number}/locations/{location_id}/services/{service_id}. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/services/\[^/\]+$`. + /// + /// [requestId] - Optional. A request ID. Specify a unique request ID to allow + /// the server to ignore the request if it has completed. The server will + /// ignore subsequent requests that provide a duplicate request ID for at + /// least 60 minutes after the first request.For example, if an initial + /// request times out, followed by another request with the same request ID, + /// the server ignores the second request to prevent the creation of duplicate + /// commitments.The request ID must be a valid UUID + /// (https://en.wikipedia.org/wiki/Universally_unique_identifier#Format) A + /// zero UUID (00000000-0000-0000-0000-000000000000) is not supported. + /// + /// [updateMask] - Required. A field mask used to specify the fields to be + /// overwritten in the metastore service resource by the update. Fields + /// specified in the update_mask are relative to the resource (not to the full + /// request). A field is overwritten if it is in the mask. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future patch( + GoogleCloudMetastoreV2Service request, + core.String name, { + core.String? requestId, + core.String? updateMask, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (requestId != null) 'requestId': [requestId], + if (updateMask != null) 'updateMask': [updateMask], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v2/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'PATCH', + body: body_, + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + + /// Query Dataproc Metastore metadata. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [service] - Required. The relative resource name of the metastore service + /// to query metadata, in the following + /// format:projects/{project_id}/locations/{location_id}/services/{service_id}. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/services/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future queryMetadata( + GoogleCloudMetastoreV2QueryMetadataRequest request, + core.String service, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v2/' + core.Uri.encodeFull('$service') + ':queryMetadata'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + + /// Restores a service from a backup. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [service] - Required. The relative resource name of the metastore service + /// to run restore, in the following + /// form:projects/{project_id}/locations/{location_id}/services/{service_id}. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/services/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future restore( + GoogleCloudMetastoreV2RestoreServiceRequest request, + core.String service, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v2/' + core.Uri.encodeFull('$service') + ':restore'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } +} + +class ProjectsLocationsServicesBackupsResource { + final commons.ApiRequester _requester; + + ProjectsLocationsServicesBackupsResource(commons.ApiRequester client) + : _requester = client; + + /// Creates a new backup in a given project and location. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The relative resource name of the service in which to + /// create a backup of the following + /// form:projects/{project_number}/locations/{location_id}/services/{service_id}. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/services/\[^/\]+$`. + /// + /// [backupId] - Required. The ID of the backup, which is used as the final + /// component of the backup's name.This value must be between 1 and 64 + /// characters long, begin with a letter, end with a letter or number, and + /// consist of alpha-numeric ASCII characters or hyphens. + /// + /// [requestId] - Optional. A request ID. Specify a unique request ID to allow + /// the server to ignore the request if it has completed. The server will + /// ignore subsequent requests that provide a duplicate request ID for at + /// least 60 minutes after the first request.For example, if an initial + /// request times out, followed by another request with the same request ID, + /// the server ignores the second request to prevent the creation of duplicate + /// commitments.The request ID must be a valid UUID + /// (https://en.wikipedia.org/wiki/Universally_unique_identifier#Format) A + /// zero UUID (00000000-0000-0000-0000-000000000000) is not supported. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future create( + GoogleCloudMetastoreV2Backup request, + core.String parent, { + core.String? backupId, + core.String? requestId, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (backupId != null) 'backupId': [backupId], + if (requestId != null) 'requestId': [requestId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v2/' + core.Uri.encodeFull('$parent') + '/backups'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + + /// Deletes a single backup. + /// + /// Request parameters: + /// + /// [name] - Required. The relative resource name of the backup to delete, in + /// the following + /// form:projects/{project_number}/locations/{location_id}/services/{service_id}/backups/{backup_id}. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/services/\[^/\]+/backups/\[^/\]+$`. + /// + /// [requestId] - Optional. A request ID. Specify a unique request ID to allow + /// the server to ignore the request if it has completed. The server will + /// ignore subsequent requests that provide a duplicate request ID for at + /// least 60 minutes after the first request.For example, if an initial + /// request times out, followed by another request with the same request ID, + /// the server ignores the second request to prevent the creation of duplicate + /// commitments.The request ID must be a valid UUID + /// (https://en.wikipedia.org/wiki/Universally_unique_identifier#Format) A + /// zero UUID (00000000-0000-0000-0000-000000000000) is not supported. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleLongrunningOperation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String name, { + core.String? requestId, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (requestId != null) 'requestId': [requestId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v2/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return GoogleLongrunningOperation.fromJson( + response_ as core.Map); + } + + /// Gets details of a single backup. + /// + /// Request parameters: + /// + /// [name] - Required. The relative resource name of the backup to retrieve, + /// in the following + /// form:projects/{project_number}/locations/{location_id}/services/{service_id}/backups/{backup_id}. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/services/\[^/\]+/backups/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudMetastoreV2Backup]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v2/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleCloudMetastoreV2Backup.fromJson( + response_ as core.Map); + } + + /// Lists backups in a service. + /// + /// Request parameters: + /// + /// [parent] - Required. The relative resource name of the service whose + /// backups to list, in the following + /// form:projects/{project_number}/locations/{location_id}/services/{service_id}/backups. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/services/\[^/\]+$`. + /// + /// [filter] - Optional. The filter to apply to list results. + /// + /// [orderBy] - Optional. Specify the ordering of results as described in + /// Sorting Order + /// (https://cloud.google.com/apis/design/design_patterns#sorting_order). If + /// not specified, the results will be sorted in the default order. + /// + /// [pageSize] - Optional. The maximum number of backups to return. The + /// response may contain less than the maximum number. If unspecified, no more + /// than 500 backups are returned. The maximum value is 1000; values above + /// 1000 are changed to 1000. + /// + /// [pageToken] - Optional. A page token, received from a previous + /// DataprocMetastore.ListBackups call. Provide this token to retrieve the + /// subsequent page.To retrieve the first page, supply an empty page + /// token.When paginating, other parameters provided to + /// DataprocMetastore.ListBackups must match the call that provided the page + /// token. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleCloudMetastoreV2ListBackupsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.String? filter, + core.String? orderBy, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (orderBy != null) 'orderBy': [orderBy], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v2/' + core.Uri.encodeFull('$parent') + '/backups'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GoogleCloudMetastoreV2ListBackupsResponse.fromJson( + response_ as core.Map); + } +} + +/// Request message for DataprocMetastore.AlterMetadataResourceLocation. +typedef GoogleCloudMetastoreV2AlterMetadataResourceLocationRequest + = $AlterMetadataResourceLocationRequest; + +/// Request message for DataprocMetastore.AlterTableProperties. +typedef GoogleCloudMetastoreV2AlterTablePropertiesRequest + = $AlterTablePropertiesRequest; + +/// Configuration information for the auxiliary service versions. +class GoogleCloudMetastoreV2AuxiliaryVersionConfig { + /// A mapping of Hive metastore configuration key-value pairs to apply to the + /// auxiliary Hive metastore (configured in hive-site.xml) in addition to the + /// primary version's overrides. + /// + /// If keys are present in both the auxiliary version's overrides and the + /// primary version's overrides, the value from the auxiliary version's + /// overrides takes precedence. + core.Map? configOverrides; + + /// The list of endpoints used to access the auxiliary metastore service, + /// includes version and region data. + /// + /// Output only. + core.List? endpoints; + + /// The Hive metastore version of the auxiliary service. + /// + /// It must be less than the primary Hive metastore service's version. + core.String? version; + + GoogleCloudMetastoreV2AuxiliaryVersionConfig({ + this.configOverrides, + this.endpoints, + this.version, + }); + + GoogleCloudMetastoreV2AuxiliaryVersionConfig.fromJson(core.Map json_) + : this( + configOverrides: + (json_['configOverrides'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + endpoints: (json_['endpoints'] as core.List?) + ?.map((value) => GoogleCloudMetastoreV2Endpoint.fromJson( + value as core.Map)) + .toList(), + version: json_['version'] as core.String?, + ); + + core.Map toJson() => { + if (configOverrides != null) 'configOverrides': configOverrides!, + if (endpoints != null) 'endpoints': endpoints!, + if (version != null) 'version': version!, + }; +} + +/// The details of a backup resource. +class GoogleCloudMetastoreV2Backup { + /// The time when the backup was started. + /// + /// Output only. + core.String? createTime; + + /// The description of the backup. + core.String? description; + + /// The time when the backup finished creating. + /// + /// Output only. + core.String? endTime; + + /// The relative resource name of the backup, in the following + /// form:projects/{project_number}/locations/{location_id}/services/{service_id}/backups/{backup_id} + /// + /// Immutable. + core.String? name; + + /// Services that are restoring from the backup. + /// + /// Output only. + core.List? restoringServices; + + /// The revision of the service at the time of backup. + /// + /// Output only. + GoogleCloudMetastoreV2Service? serviceRevision; + + /// The current state of the backup. + /// + /// Output only. + /// Possible string values are: + /// - "STATE_UNSPECIFIED" : The state of the backup is unknown. + /// - "CREATING" : The backup is being created. + /// - "DELETING" : The backup is being deleted. + /// - "ACTIVE" : The backup is active and ready to use. + /// - "FAILED" : The backup failed. + /// - "RESTORING" : The backup is being restored. + core.String? state; + + GoogleCloudMetastoreV2Backup({ + this.createTime, + this.description, + this.endTime, + this.name, + this.restoringServices, + this.serviceRevision, + this.state, + }); + + GoogleCloudMetastoreV2Backup.fromJson(core.Map json_) + : this( + createTime: json_['createTime'] as core.String?, + description: json_['description'] as core.String?, + endTime: json_['endTime'] as core.String?, + name: json_['name'] as core.String?, + restoringServices: (json_['restoringServices'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + serviceRevision: json_.containsKey('serviceRevision') + ? GoogleCloudMetastoreV2Service.fromJson(json_['serviceRevision'] + as core.Map) + : null, + state: json_['state'] as core.String?, + ); + + core.Map toJson() => { + if (createTime != null) 'createTime': createTime!, + if (description != null) 'description': description!, + if (endTime != null) 'endTime': endTime!, + if (name != null) 'name': name!, + if (restoringServices != null) 'restoringServices': restoringServices!, + if (serviceRevision != null) 'serviceRevision': serviceRevision!, + if (state != null) 'state': state!, + }; +} + +/// Specifies how metastore metadata should be integrated with the Data Catalog +/// service. +typedef GoogleCloudMetastoreV2DataCatalogConfig = $DataCatalogConfig; + +/// A specification of the location of and metadata about a database dump from a +/// relational database management system. +class GoogleCloudMetastoreV2DatabaseDump { + /// A Cloud Storage object or folder URI that specifies the source from which + /// to import metadata. + /// + /// It must begin with gs://. + /// + /// Required. + core.String? gcsUri; + + /// The type of the database dump. + /// + /// If unspecified, defaults to MYSQL. + /// + /// Optional. + /// Possible string values are: + /// - "TYPE_UNSPECIFIED" : The type of the database dump is unknown. + /// - "MYSQL" : Database dump is a MySQL dump file. + /// - "AVRO" : Database dump contains Avro files. + core.String? type; + + GoogleCloudMetastoreV2DatabaseDump({ + this.gcsUri, + this.type, + }); + + GoogleCloudMetastoreV2DatabaseDump.fromJson(core.Map json_) + : this( + gcsUri: json_['gcsUri'] as core.String?, + type: json_['type'] as core.String?, + ); + + core.Map toJson() => { + if (gcsUri != null) 'gcsUri': gcsUri!, + if (type != null) 'type': type!, + }; +} + +/// Encryption settings for the service. +typedef GoogleCloudMetastoreV2EncryptionConfig = $Empty; + +/// An endpoint used to access the metastore service. +class GoogleCloudMetastoreV2Endpoint { + /// The URI of the endpoint used to access the metastore service. + /// + /// Output only. + core.String? endpointUri; + + /// The region where the endpoint is located. + /// + /// Output only. + core.String? region; + + GoogleCloudMetastoreV2Endpoint({ + this.endpointUri, + this.region, + }); + + GoogleCloudMetastoreV2Endpoint.fromJson(core.Map json_) + : this( + endpointUri: json_['endpointUri'] as core.String?, + region: json_['region'] as core.String?, + ); + + core.Map toJson() => { + if (endpointUri != null) 'endpointUri': endpointUri!, + if (region != null) 'region': region!, + }; +} + +/// Request message for DataprocMetastore.ExportMetadata. +typedef GoogleCloudMetastoreV2ExportMetadataRequest = $ExportMetadataRequest; + +/// Specifies configuration information specific to running Hive metastore +/// software as the metastore service. +class GoogleCloudMetastoreV2HiveMetastoreConfig { + /// A mapping of Hive metastore version to the auxiliary version + /// configuration. + /// + /// When specified, a secondary Hive metastore service is created along with + /// the primary service. All auxiliary versions must be less than the + /// service's primary version. The key is the auxiliary service name and it + /// must match the regular expression a-z?. This means that the first + /// character must be a lowercase letter, and all the following characters + /// must be hyphens, lowercase letters, or digits, except the last character, + /// which cannot be a hyphen. + /// + /// Optional. + core.Map? + auxiliaryVersions; + + /// A mapping of Hive metastore configuration key-value pairs to apply to the + /// Hive metastore (configured in hive-site.xml). + /// + /// The mappings override system defaults (some keys cannot be overridden). + /// These overrides are also applied to auxiliary versions and can be further + /// customized in the auxiliary version's AuxiliaryVersionConfig. + /// + /// Optional. + core.Map? configOverrides; + + /// The protocol to use for the metastore service endpoint. + /// + /// If unspecified, defaults to GRPC. + /// + /// Optional. + /// Possible string values are: + /// - "ENDPOINT_PROTOCOL_UNSPECIFIED" : The protocol is not set. + /// - "THRIFT" : Use the legacy Apache Thrift protocol for the metastore + /// service endpoint. + /// - "GRPC" : Use the modernized gRPC protocol for the metastore service + /// endpoint. + core.String? endpointProtocol; + + /// The Hive metastore schema version. + /// + /// Immutable. + core.String? version; + + GoogleCloudMetastoreV2HiveMetastoreConfig({ + this.auxiliaryVersions, + this.configOverrides, + this.endpointProtocol, + this.version, + }); + + GoogleCloudMetastoreV2HiveMetastoreConfig.fromJson(core.Map json_) + : this( + auxiliaryVersions: (json_['auxiliaryVersions'] + as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + GoogleCloudMetastoreV2AuxiliaryVersionConfig.fromJson( + value as core.Map), + ), + ), + configOverrides: + (json_['configOverrides'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + endpointProtocol: json_['endpointProtocol'] as core.String?, + version: json_['version'] as core.String?, + ); + + core.Map toJson() => { + if (auxiliaryVersions != null) 'auxiliaryVersions': auxiliaryVersions!, + if (configOverrides != null) 'configOverrides': configOverrides!, + if (endpointProtocol != null) 'endpointProtocol': endpointProtocol!, + if (version != null) 'version': version!, + }; +} + +/// Request message for DataprocMetastore.CreateMetadataImport. +class GoogleCloudMetastoreV2ImportMetadataRequest { + /// A database dump from a pre-existing metastore's database. + /// + /// Immutable. + GoogleCloudMetastoreV2DatabaseDump? databaseDump; + + /// The description of the metadata import. + /// + /// Optional. + core.String? description; + + /// A request ID. + /// + /// Specify a unique request ID to allow the server to ignore the request if + /// it has completed. The server will ignore subsequent requests that provide + /// a duplicate request ID for at least 60 minutes after the first request.For + /// example, if an initial request times out, followed by another request with + /// the same request ID, the server ignores the second request to prevent the + /// creation of duplicate commitments.The request ID must be a valid UUID + /// (https://en.wikipedia.org/wiki/Universally_unique_identifier#Format). A + /// zero UUID (00000000-0000-0000-0000-000000000000) is not supported. + /// + /// Optional. + core.String? requestId; + + GoogleCloudMetastoreV2ImportMetadataRequest({ + this.databaseDump, + this.description, + this.requestId, + }); + + GoogleCloudMetastoreV2ImportMetadataRequest.fromJson(core.Map json_) + : this( + databaseDump: json_.containsKey('databaseDump') + ? GoogleCloudMetastoreV2DatabaseDump.fromJson( + json_['databaseDump'] as core.Map) + : null, + description: json_['description'] as core.String?, + requestId: json_['requestId'] as core.String?, + ); + + core.Map toJson() => { + if (databaseDump != null) 'databaseDump': databaseDump!, + if (description != null) 'description': description!, + if (requestId != null) 'requestId': requestId!, + }; +} + +/// The details of the latest scheduled backup. +typedef GoogleCloudMetastoreV2LatestBackup = $LatestBackup; + +/// Response message for DataprocMetastore.ListBackups. +class GoogleCloudMetastoreV2ListBackupsResponse { + /// The backups of the specified service. + core.List? backups; + + /// A token that can be sent as page_token to retrieve the next page. + /// + /// If this field is omitted, there are no subsequent pages. + core.String? nextPageToken; + + /// Locations that could not be reached. + core.List? unreachable; + + GoogleCloudMetastoreV2ListBackupsResponse({ + this.backups, + this.nextPageToken, + this.unreachable, + }); + + GoogleCloudMetastoreV2ListBackupsResponse.fromJson(core.Map json_) + : this( + backups: (json_['backups'] as core.List?) + ?.map((value) => GoogleCloudMetastoreV2Backup.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + unreachable: (json_['unreachable'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (backups != null) 'backups': backups!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (unreachable != null) 'unreachable': unreachable!, + }; +} + +/// Response message for DataprocMetastore.ListServices. +class GoogleCloudMetastoreV2ListServicesResponse { + /// A token that can be sent as page_token to retrieve the next page. + /// + /// If this field is omitted, there are no subsequent pages. + core.String? nextPageToken; + + /// The services in the specified location. + core.List? services; + + /// Locations that could not be reached. + core.List? unreachable; + + GoogleCloudMetastoreV2ListServicesResponse({ + this.nextPageToken, + this.services, + this.unreachable, + }); + + GoogleCloudMetastoreV2ListServicesResponse.fromJson(core.Map json_) + : this( + nextPageToken: json_['nextPageToken'] as core.String?, + services: (json_['services'] as core.List?) + ?.map((value) => GoogleCloudMetastoreV2Service.fromJson( + value as core.Map)) + .toList(), + unreachable: (json_['unreachable'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (services != null) 'services': services!, + if (unreachable != null) 'unreachable': unreachable!, + }; +} + +/// Specifies how metastore metadata should be integrated with external +/// services. +class GoogleCloudMetastoreV2MetadataIntegration { + /// The integration config for the Data Catalog service. + /// + /// Optional. + GoogleCloudMetastoreV2DataCatalogConfig? dataCatalogConfig; + + GoogleCloudMetastoreV2MetadataIntegration({ + this.dataCatalogConfig, + }); + + GoogleCloudMetastoreV2MetadataIntegration.fromJson(core.Map json_) + : this( + dataCatalogConfig: json_.containsKey('dataCatalogConfig') + ? GoogleCloudMetastoreV2DataCatalogConfig.fromJson( + json_['dataCatalogConfig'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (dataCatalogConfig != null) 'dataCatalogConfig': dataCatalogConfig!, + }; +} + +/// Request message for DataprocMetastore.MoveTableToDatabase. +typedef GoogleCloudMetastoreV2MoveTableToDatabaseRequest + = $MoveTableToDatabaseRequest; + +/// Request message for DataprocMetastore.QueryMetadata. +typedef GoogleCloudMetastoreV2QueryMetadataRequest = $QueryMetadataRequest; + +/// Request message for DataprocMetastore.Restore. +typedef GoogleCloudMetastoreV2RestoreServiceRequest = $RestoreServiceRequest; + +/// Represents the scaling configuration of a metastore service. +class GoogleCloudMetastoreV2ScalingConfig { + /// Scaling factor from 1 to 5, increments of 1. + /// + /// Optional. + core.int? scalingFactor; + + GoogleCloudMetastoreV2ScalingConfig({ + this.scalingFactor, + }); + + GoogleCloudMetastoreV2ScalingConfig.fromJson(core.Map json_) + : this( + scalingFactor: json_['scalingFactor'] as core.int?, + ); + + core.Map toJson() => { + if (scalingFactor != null) 'scalingFactor': scalingFactor!, + }; +} + +/// This specifies the configuration of scheduled backup. +class GoogleCloudMetastoreV2ScheduledBackup { + /// A Cloud Storage URI of a folder, in the format gs:///. + /// + /// A sub-folder containing backup files will be stored below it. + /// + /// Optional. + core.String? backupLocation; + + /// The scheduled interval in Cron format, see + /// https://en.wikipedia.org/wiki/Cron The default is empty: scheduled backup + /// is not enabled. + /// + /// Must be specified to enable scheduled backups. + /// + /// Optional. + core.String? cronSchedule; + + /// Defines whether the scheduled backup is enabled. + /// + /// The default value is false. + /// + /// Optional. + core.bool? enabled; + + /// The details of the latest scheduled backup. + /// + /// Output only. + GoogleCloudMetastoreV2LatestBackup? latestBackup; + + /// The time when the next backups execution is scheduled to start. + /// + /// Output only. + core.String? nextScheduledTime; + + /// Specifies the time zone to be used when interpreting cron_schedule. + /// + /// Must be a time zone name from the time zone database + /// (https://en.wikipedia.org/wiki/List_of_tz_database_time_zones), e.g. + /// America/Los_Angeles or Africa/Abidjan. If left unspecified, the default is + /// UTC. + /// + /// Optional. + core.String? timeZone; + + GoogleCloudMetastoreV2ScheduledBackup({ + this.backupLocation, + this.cronSchedule, + this.enabled, + this.latestBackup, + this.nextScheduledTime, + this.timeZone, + }); + + GoogleCloudMetastoreV2ScheduledBackup.fromJson(core.Map json_) + : this( + backupLocation: json_['backupLocation'] as core.String?, + cronSchedule: json_['cronSchedule'] as core.String?, + enabled: json_['enabled'] as core.bool?, + latestBackup: json_.containsKey('latestBackup') + ? GoogleCloudMetastoreV2LatestBackup.fromJson( + json_['latestBackup'] as core.Map) + : null, + nextScheduledTime: json_['nextScheduledTime'] as core.String?, + timeZone: json_['timeZone'] as core.String?, + ); + + core.Map toJson() => { + if (backupLocation != null) 'backupLocation': backupLocation!, + if (cronSchedule != null) 'cronSchedule': cronSchedule!, + if (enabled != null) 'enabled': enabled!, + if (latestBackup != null) 'latestBackup': latestBackup!, + if (nextScheduledTime != null) 'nextScheduledTime': nextScheduledTime!, + if (timeZone != null) 'timeZone': timeZone!, + }; +} + +/// A managed metastore service that serves metadata queries. +class GoogleCloudMetastoreV2Service { + /// The time when the metastore service was created. + /// + /// Output only. + core.String? createTime; + + /// Information used to configure the Dataproc Metastore service to encrypt + /// customer data at rest. + /// + /// Cannot be updated. + /// + /// Immutable. + GoogleCloudMetastoreV2EncryptionConfig? encryptionConfig; + + /// The list of endpoints used to access the metastore service. + /// + /// Output only. + core.List? endpoints; + + /// Configuration information specific to running Hive metastore software as + /// the metastore service. + GoogleCloudMetastoreV2HiveMetastoreConfig? hiveMetastoreConfig; + + /// User-defined labels for the metastore service. + core.Map? labels; + + /// The setting that defines how metastore metadata should be integrated with + /// external services and systems. + /// + /// Optional. + GoogleCloudMetastoreV2MetadataIntegration? metadataIntegration; + + /// The relative resource name of the metastore service, in the following + /// format:projects/{project_number}/locations/{location_id}/services/{service_id}. + /// + /// Immutable. + core.String? name; + + /// Scaling configuration of the metastore service. + /// + /// Optional. + GoogleCloudMetastoreV2ScalingConfig? scalingConfig; + + /// The configuration of scheduled backup for the metastore service. + /// + /// Optional. + GoogleCloudMetastoreV2ScheduledBackup? scheduledBackup; + + /// The current state of the metastore service. + /// + /// Output only. + /// Possible string values are: + /// - "STATE_UNSPECIFIED" : The state of the metastore service is unknown. + /// - "CREATING" : The metastore service is in the process of being created. + /// - "ACTIVE" : The metastore service is running and ready to serve queries. + /// - "SUSPENDING" : The metastore service is entering suspension. Its + /// query-serving availability may cease unexpectedly. + /// - "SUSPENDED" : The metastore service is suspended and unable to serve + /// queries. + /// - "UPDATING" : The metastore service is being updated. It remains usable + /// but cannot accept additional update requests or be deleted at this time. + /// - "DELETING" : The metastore service is undergoing deletion. It cannot be + /// used. + /// - "ERROR" : The metastore service has encountered an error and cannot be + /// used. The metastore service should be deleted. + core.String? state; + + /// Additional information about the current state of the metastore service, + /// if available. + /// + /// Output only. + core.String? stateMessage; + + /// The globally unique resource identifier of the metastore service. + /// + /// Output only. + core.String? uid; + + /// The time when the metastore service was last updated. + /// + /// Output only. + core.String? updateTime; + + /// A Cloud Storage URI (starting with gs://) that specifies the default + /// warehouse directory of the Hive Metastore. + /// + /// Required. + core.String? warehouseGcsUri; + + GoogleCloudMetastoreV2Service({ + this.createTime, + this.encryptionConfig, + this.endpoints, + this.hiveMetastoreConfig, + this.labels, + this.metadataIntegration, + this.name, + this.scalingConfig, + this.scheduledBackup, + this.state, + this.stateMessage, + this.uid, + this.updateTime, + this.warehouseGcsUri, + }); + + GoogleCloudMetastoreV2Service.fromJson(core.Map json_) + : this( + createTime: json_['createTime'] as core.String?, + encryptionConfig: json_.containsKey('encryptionConfig') + ? GoogleCloudMetastoreV2EncryptionConfig.fromJson( + json_['encryptionConfig'] + as core.Map) + : null, + endpoints: (json_['endpoints'] as core.List?) + ?.map((value) => GoogleCloudMetastoreV2Endpoint.fromJson( + value as core.Map)) + .toList(), + hiveMetastoreConfig: json_.containsKey('hiveMetastoreConfig') + ? GoogleCloudMetastoreV2HiveMetastoreConfig.fromJson( + json_['hiveMetastoreConfig'] + as core.Map) + : null, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + metadataIntegration: json_.containsKey('metadataIntegration') + ? GoogleCloudMetastoreV2MetadataIntegration.fromJson( + json_['metadataIntegration'] + as core.Map) + : null, + name: json_['name'] as core.String?, + scalingConfig: json_.containsKey('scalingConfig') + ? GoogleCloudMetastoreV2ScalingConfig.fromJson( + json_['scalingConfig'] as core.Map) + : null, + scheduledBackup: json_.containsKey('scheduledBackup') + ? GoogleCloudMetastoreV2ScheduledBackup.fromJson( + json_['scheduledBackup'] + as core.Map) + : null, + state: json_['state'] as core.String?, + stateMessage: json_['stateMessage'] as core.String?, + uid: json_['uid'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + warehouseGcsUri: json_['warehouseGcsUri'] as core.String?, + ); + + core.Map toJson() => { + if (createTime != null) 'createTime': createTime!, + if (encryptionConfig != null) 'encryptionConfig': encryptionConfig!, + if (endpoints != null) 'endpoints': endpoints!, + if (hiveMetastoreConfig != null) + 'hiveMetastoreConfig': hiveMetastoreConfig!, + if (labels != null) 'labels': labels!, + if (metadataIntegration != null) + 'metadataIntegration': metadataIntegration!, + if (name != null) 'name': name!, + if (scalingConfig != null) 'scalingConfig': scalingConfig!, + if (scheduledBackup != null) 'scheduledBackup': scheduledBackup!, + if (state != null) 'state': state!, + if (stateMessage != null) 'stateMessage': stateMessage!, + if (uid != null) 'uid': uid!, + if (updateTime != null) 'updateTime': updateTime!, + if (warehouseGcsUri != null) 'warehouseGcsUri': warehouseGcsUri!, + }; +} + +/// This resource represents a long-running operation that is the result of a +/// network API call. +class GoogleLongrunningOperation { + /// If the value is false, it means the operation is still in progress. + /// + /// If true, the operation is completed, and either error or response is + /// available. + core.bool? done; + + /// The error result of the operation in case of failure or cancellation. + GoogleRpcStatus? error; + + /// Service-specific metadata associated with the operation. + /// + /// It typically contains progress information and common metadata such as + /// create time. Some services might not provide such metadata. Any method + /// that returns a long-running operation should document the metadata type, + /// if any. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Map? metadata; + + /// The server-assigned name, which is only unique within the same service + /// that originally returns it. + /// + /// If you use the default HTTP mapping, the name should be a resource name + /// ending with operations/{unique_id}. + core.String? name; + + /// The normal, successful response of the operation. + /// + /// If the original method returns no data on success, such as Delete, the + /// response is google.protobuf.Empty. If the original method is standard + /// Get/Create/Update, the response should be the resource. For other methods, + /// the response should have the type XxxResponse, where Xxx is the original + /// method name. For example, if the original method name is TakeSnapshot(), + /// the inferred response type is TakeSnapshotResponse. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Map? response; + + GoogleLongrunningOperation({ + this.done, + this.error, + this.metadata, + this.name, + this.response, + }); + + GoogleLongrunningOperation.fromJson(core.Map json_) + : this( + done: json_['done'] as core.bool?, + error: json_.containsKey('error') + ? GoogleRpcStatus.fromJson( + json_['error'] as core.Map) + : null, + metadata: json_.containsKey('metadata') + ? json_['metadata'] as core.Map + : null, + name: json_['name'] as core.String?, + response: json_.containsKey('response') + ? json_['response'] as core.Map + : null, + ); + + core.Map toJson() => { + if (done != null) 'done': done!, + if (error != null) 'error': error!, + if (metadata != null) 'metadata': metadata!, + if (name != null) 'name': name!, + if (response != null) 'response': response!, + }; +} + +/// The Status type defines a logical error model that is suitable for different +/// programming environments, including REST APIs and RPC APIs. +/// +/// It is used by gRPC (https://github.com/grpc). Each Status message contains +/// three pieces of data: error code, error message, and error details.You can +/// find out more about this error model and how to work with it in the API +/// Design Guide (https://cloud.google.com/apis/design/errors). +typedef GoogleRpcStatus = $Status00; diff --git a/generated/googleapis/lib/migrationcenter/v1.dart b/generated/googleapis/lib/migrationcenter/v1.dart index bf172853f..8a5884a5e 100644 --- a/generated/googleapis/lib/migrationcenter/v1.dart +++ b/generated/googleapis/lib/migrationcenter/v1.dart @@ -1973,8 +1973,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -3661,6 +3661,16 @@ class Asset { /// Output only. core.String? createTime; + /// Asset information specific for database deployments. + /// + /// Output only. + DatabaseDeploymentDetails? databaseDeploymentDetails; + + /// Asset information specific for logical databases. + /// + /// Output only. + DatabaseDetails? databaseDetails; + /// The list of insights associated with the asset. /// /// Output only. @@ -3689,6 +3699,11 @@ class Asset { /// Output only. core.List? sources; + /// Server generated human readable name of the asset. + /// + /// Output only. + core.String? title; + /// The timestamp when the asset was last updated. /// /// Output only. @@ -3698,12 +3713,15 @@ class Asset { this.assignedGroups, this.attributes, this.createTime, + this.databaseDeploymentDetails, + this.databaseDetails, this.insightList, this.labels, this.machineDetails, this.name, this.performanceData, this.sources, + this.title, this.updateTime, }); @@ -3721,6 +3739,16 @@ class Asset { ), ), createTime: json_['createTime'] as core.String?, + databaseDeploymentDetails: + json_.containsKey('databaseDeploymentDetails') + ? DatabaseDeploymentDetails.fromJson( + json_['databaseDeploymentDetails'] + as core.Map) + : null, + databaseDetails: json_.containsKey('databaseDetails') + ? DatabaseDetails.fromJson(json_['databaseDetails'] + as core.Map) + : null, insightList: json_.containsKey('insightList') ? InsightList.fromJson( json_['insightList'] as core.Map) @@ -3744,6 +3772,7 @@ class Asset { sources: (json_['sources'] as core.List?) ?.map((value) => value as core.String) .toList(), + title: json_['title'] as core.String?, updateTime: json_['updateTime'] as core.String?, ); @@ -3751,12 +3780,16 @@ class Asset { if (assignedGroups != null) 'assignedGroups': assignedGroups!, if (attributes != null) 'attributes': attributes!, if (createTime != null) 'createTime': createTime!, + if (databaseDeploymentDetails != null) + 'databaseDeploymentDetails': databaseDeploymentDetails!, + if (databaseDetails != null) 'databaseDetails': databaseDetails!, if (insightList != null) 'insightList': insightList!, if (labels != null) 'labels': labels!, if (machineDetails != null) 'machineDetails': machineDetails!, if (name != null) 'name': name!, if (performanceData != null) 'performanceData': performanceData!, if (sources != null) 'sources': sources!, + if (title != null) 'title': title!, if (updateTime != null) 'updateTime': updateTime!, }; } @@ -3779,6 +3812,12 @@ class AssetFrame { /// - "SOURCE_TYPE_DISCOVERY_CLIENT" : Discovery clients core.String? collectionType; + /// Asset information specific for database deployments. + DatabaseDeploymentDetails? databaseDeploymentDetails; + + /// Asset information specific for logical databases. + DatabaseDetails? databaseDetails; + /// Labels as key value pairs. core.Map? labels; @@ -3802,6 +3841,8 @@ class AssetFrame { AssetFrame({ this.attributes, this.collectionType, + this.databaseDeploymentDetails, + this.databaseDetails, this.labels, this.machineDetails, this.performanceSamples, @@ -3820,6 +3861,16 @@ class AssetFrame { ), ), collectionType: json_['collectionType'] as core.String?, + databaseDeploymentDetails: + json_.containsKey('databaseDeploymentDetails') + ? DatabaseDeploymentDetails.fromJson( + json_['databaseDeploymentDetails'] + as core.Map) + : null, + databaseDetails: json_.containsKey('databaseDetails') + ? DatabaseDetails.fromJson(json_['databaseDetails'] + as core.Map) + : null, labels: (json_['labels'] as core.Map?)?.map( (key, value) => core.MapEntry( @@ -3842,6 +3893,9 @@ class AssetFrame { core.Map toJson() => { if (attributes != null) 'attributes': attributes!, if (collectionType != null) 'collectionType': collectionType!, + if (databaseDeploymentDetails != null) + 'databaseDeploymentDetails': databaseDeploymentDetails!, + if (databaseDetails != null) 'databaseDetails': databaseDetails!, if (labels != null) 'labels': labels!, if (machineDetails != null) 'machineDetails': machineDetails!, if (performanceSamples != null) @@ -4177,9 +4231,8 @@ class ComputeEnginePreferences { /// If unspecified (default), all types are considered, based on available /// usage data. /// Possible string values are: - /// - "PERSISTENT_DISK_TYPE_UNSPECIFIED" : Unspecified (default value). - /// Selecting this value allows the system to use any disk type according to - /// reported usage. This a good value to start with. + /// - "PERSISTENT_DISK_TYPE_UNSPECIFIED" : Unspecified. Fallback to default + /// value based on context. /// - "PERSISTENT_DISK_TYPE_STANDARD" : Standard HDD Persistent Disk. /// - "PERSISTENT_DISK_TYPE_BALANCED" : Balanced Persistent Disk. /// - "PERSISTENT_DISK_TYPE_SSD" : SSD Persistent Disk. @@ -4283,9 +4336,8 @@ class ComputeStorageDescriptor { /// /// Output only. /// Possible string values are: - /// - "PERSISTENT_DISK_TYPE_UNSPECIFIED" : Unspecified (default value). - /// Selecting this value allows the system to use any disk type according to - /// reported usage. This a good value to start with. + /// - "PERSISTENT_DISK_TYPE_UNSPECIFIED" : Unspecified. Fallback to default + /// value based on context. /// - "PERSISTENT_DISK_TYPE_STANDARD" : Standard HDD Persistent Disk. /// - "PERSISTENT_DISK_TYPE_BALANCED" : Balanced Persistent Disk. /// - "PERSISTENT_DISK_TYPE_SSD" : SSD Persistent Disk. @@ -4419,10 +4471,24 @@ class DailyResourceUsageAggregationCPU { /// Statistical aggregation of disk usage. class DailyResourceUsageAggregationDisk { /// Disk I/O operations per second. + /// + /// Optional. DailyResourceUsageAggregationStats? iops; + /// Disk read I/O operations per second. + /// + /// Optional. + DailyResourceUsageAggregationStats? readIops; + + /// Disk write I/O operations per second. + /// + /// Optional. + DailyResourceUsageAggregationStats? writeIops; + DailyResourceUsageAggregationDisk({ this.iops, + this.readIops, + this.writeIops, }); DailyResourceUsageAggregationDisk.fromJson(core.Map json_) @@ -4431,10 +4497,20 @@ class DailyResourceUsageAggregationDisk { ? DailyResourceUsageAggregationStats.fromJson( json_['iops'] as core.Map) : null, + readIops: json_.containsKey('readIops') + ? DailyResourceUsageAggregationStats.fromJson( + json_['readIops'] as core.Map) + : null, + writeIops: json_.containsKey('writeIops') + ? DailyResourceUsageAggregationStats.fromJson( + json_['writeIops'] as core.Map) + : null, ); core.Map toJson() => { if (iops != null) 'iops': iops!, + if (readIops != null) 'readIops': readIops!, + if (writeIops != null) 'writeIops': writeIops!, }; } @@ -4532,6 +4608,497 @@ class DailyResourceUsageAggregationStats { }; } +/// The details of a database deployment asset. +class DatabaseDeploymentDetails { + /// Aggregated stats for the database deployment. + /// + /// Output only. + DatabaseDeploymentDetailsAggregatedStats? aggregatedStats; + + /// The database deployment edition. + /// + /// Optional. + core.String? edition; + + /// The database deployment generated ID. + /// + /// Optional. + core.String? generatedId; + + /// A manual unique ID set by the user. + /// + /// Optional. + core.String? manualUniqueId; + + /// Details of a MYSQL database deployment. + /// + /// Optional. + MysqlDatabaseDeployment? mysql; + + /// Details of a PostgreSQL database deployment. + /// + /// Optional. + PostgreSqlDatabaseDeployment? postgresql; + + /// Details of a Microsoft SQL Server database deployment. + /// + /// Optional. + SqlServerDatabaseDeployment? sqlServer; + + /// Details of the database deployment topology. + /// + /// Optional. + DatabaseDeploymentTopology? topology; + + /// The database deployment version. + /// + /// Optional. + core.String? version; + + DatabaseDeploymentDetails({ + this.aggregatedStats, + this.edition, + this.generatedId, + this.manualUniqueId, + this.mysql, + this.postgresql, + this.sqlServer, + this.topology, + this.version, + }); + + DatabaseDeploymentDetails.fromJson(core.Map json_) + : this( + aggregatedStats: json_.containsKey('aggregatedStats') + ? DatabaseDeploymentDetailsAggregatedStats.fromJson( + json_['aggregatedStats'] + as core.Map) + : null, + edition: json_['edition'] as core.String?, + generatedId: json_['generatedId'] as core.String?, + manualUniqueId: json_['manualUniqueId'] as core.String?, + mysql: json_.containsKey('mysql') + ? MysqlDatabaseDeployment.fromJson( + json_['mysql'] as core.Map) + : null, + postgresql: json_.containsKey('postgresql') + ? PostgreSqlDatabaseDeployment.fromJson( + json_['postgresql'] as core.Map) + : null, + sqlServer: json_.containsKey('sqlServer') + ? SqlServerDatabaseDeployment.fromJson( + json_['sqlServer'] as core.Map) + : null, + topology: json_.containsKey('topology') + ? DatabaseDeploymentTopology.fromJson( + json_['topology'] as core.Map) + : null, + version: json_['version'] as core.String?, + ); + + core.Map toJson() => { + if (aggregatedStats != null) 'aggregatedStats': aggregatedStats!, + if (edition != null) 'edition': edition!, + if (generatedId != null) 'generatedId': generatedId!, + if (manualUniqueId != null) 'manualUniqueId': manualUniqueId!, + if (mysql != null) 'mysql': mysql!, + if (postgresql != null) 'postgresql': postgresql!, + if (sqlServer != null) 'sqlServer': sqlServer!, + if (topology != null) 'topology': topology!, + if (version != null) 'version': version!, + }; +} + +/// Aggregated stats for the database deployment. +class DatabaseDeploymentDetailsAggregatedStats { + /// The number of databases in the deployment. + /// + /// Output only. + core.int? databaseCount; + + DatabaseDeploymentDetailsAggregatedStats({ + this.databaseCount, + }); + + DatabaseDeploymentDetailsAggregatedStats.fromJson(core.Map json_) + : this( + databaseCount: json_['databaseCount'] as core.int?, + ); + + core.Map toJson() => { + if (databaseCount != null) 'databaseCount': databaseCount!, + }; +} + +/// Details of database deployment's topology. +class DatabaseDeploymentTopology { + /// Number of total logical cores. + /// + /// Optional. + core.int? coreCount; + + /// Number of total logical cores limited by db deployment. + /// + /// Optional. + core.int? coreLimit; + + /// Disk allocated in bytes. + /// + /// Optional. + core.String? diskAllocatedBytes; + + /// Disk used in bytes. + /// + /// Optional. + core.String? diskUsedBytes; + + /// List of database instances. + /// + /// Optional. + core.List? instances; + + /// Total memory in bytes. + /// + /// Optional. + core.String? memoryBytes; + + /// Total memory in bytes limited by db deployment. + /// + /// Optional. + core.String? memoryLimitBytes; + + /// Number of total physical cores. + /// + /// Optional. + core.int? physicalCoreCount; + + /// Number of total physical cores limited by db deployment. + /// + /// Optional. + core.int? physicalCoreLimit; + + DatabaseDeploymentTopology({ + this.coreCount, + this.coreLimit, + this.diskAllocatedBytes, + this.diskUsedBytes, + this.instances, + this.memoryBytes, + this.memoryLimitBytes, + this.physicalCoreCount, + this.physicalCoreLimit, + }); + + DatabaseDeploymentTopology.fromJson(core.Map json_) + : this( + coreCount: json_['coreCount'] as core.int?, + coreLimit: json_['coreLimit'] as core.int?, + diskAllocatedBytes: json_['diskAllocatedBytes'] as core.String?, + diskUsedBytes: json_['diskUsedBytes'] as core.String?, + instances: (json_['instances'] as core.List?) + ?.map((value) => DatabaseInstance.fromJson( + value as core.Map)) + .toList(), + memoryBytes: json_['memoryBytes'] as core.String?, + memoryLimitBytes: json_['memoryLimitBytes'] as core.String?, + physicalCoreCount: json_['physicalCoreCount'] as core.int?, + physicalCoreLimit: json_['physicalCoreLimit'] as core.int?, + ); + + core.Map toJson() => { + if (coreCount != null) 'coreCount': coreCount!, + if (coreLimit != null) 'coreLimit': coreLimit!, + if (diskAllocatedBytes != null) + 'diskAllocatedBytes': diskAllocatedBytes!, + if (diskUsedBytes != null) 'diskUsedBytes': diskUsedBytes!, + if (instances != null) 'instances': instances!, + if (memoryBytes != null) 'memoryBytes': memoryBytes!, + if (memoryLimitBytes != null) 'memoryLimitBytes': memoryLimitBytes!, + if (physicalCoreCount != null) 'physicalCoreCount': physicalCoreCount!, + if (physicalCoreLimit != null) 'physicalCoreLimit': physicalCoreLimit!, + }; +} + +/// Details of a logical database. +class DatabaseDetails { + /// The allocated storage for the database in bytes. + /// + /// Optional. + core.String? allocatedStorageBytes; + + /// The name of the database. + /// + /// Required. + core.String? databaseName; + + /// The parent database deployment that contains the logical database. + /// + /// Required. + DatabaseDetailsParentDatabaseDeployment? parentDatabaseDeployment; + + /// The database schemas. + /// + /// Optional. + core.List? schemas; + + DatabaseDetails({ + this.allocatedStorageBytes, + this.databaseName, + this.parentDatabaseDeployment, + this.schemas, + }); + + DatabaseDetails.fromJson(core.Map json_) + : this( + allocatedStorageBytes: json_['allocatedStorageBytes'] as core.String?, + databaseName: json_['databaseName'] as core.String?, + parentDatabaseDeployment: + json_.containsKey('parentDatabaseDeployment') + ? DatabaseDetailsParentDatabaseDeployment.fromJson( + json_['parentDatabaseDeployment'] + as core.Map) + : null, + schemas: (json_['schemas'] as core.List?) + ?.map((value) => DatabaseSchema.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (allocatedStorageBytes != null) + 'allocatedStorageBytes': allocatedStorageBytes!, + if (databaseName != null) 'databaseName': databaseName!, + if (parentDatabaseDeployment != null) + 'parentDatabaseDeployment': parentDatabaseDeployment!, + if (schemas != null) 'schemas': schemas!, + }; +} + +/// The identifiers of the parent database deployment. +class DatabaseDetailsParentDatabaseDeployment { + /// The parent database deployment generated ID. + /// + /// Optional. + core.String? generatedId; + + /// The parent database deployment optional manual unique ID set by the user. + /// + /// Optional. + core.String? manualUniqueId; + + DatabaseDetailsParentDatabaseDeployment({ + this.generatedId, + this.manualUniqueId, + }); + + DatabaseDetailsParentDatabaseDeployment.fromJson(core.Map json_) + : this( + generatedId: json_['generatedId'] as core.String?, + manualUniqueId: json_['manualUniqueId'] as core.String?, + ); + + core.Map toJson() => { + if (generatedId != null) 'generatedId': generatedId!, + if (manualUniqueId != null) 'manualUniqueId': manualUniqueId!, + }; +} + +/// Details of a database instance. +class DatabaseInstance { + /// The instance's name. + /// + /// Optional. + core.String? instanceName; + + /// Networking details. + /// + /// Optional. + DatabaseInstanceNetwork? network; + + /// The instance role in the database engine. + /// + /// Optional. + /// Possible string values are: + /// - "ROLE_UNSPECIFIED" : Unspecified. + /// - "PRIMARY" : Primary. + /// - "SECONDARY" : Secondary. + /// - "ARBITER" : Arbiter. + core.String? role; + + DatabaseInstance({ + this.instanceName, + this.network, + this.role, + }); + + DatabaseInstance.fromJson(core.Map json_) + : this( + instanceName: json_['instanceName'] as core.String?, + network: json_.containsKey('network') + ? DatabaseInstanceNetwork.fromJson( + json_['network'] as core.Map) + : null, + role: json_['role'] as core.String?, + ); + + core.Map toJson() => { + if (instanceName != null) 'instanceName': instanceName!, + if (network != null) 'network': network!, + if (role != null) 'role': role!, + }; +} + +/// Network details of a database instance. +class DatabaseInstanceNetwork { + /// The instance's host names. + /// + /// Optional. + core.List? hostNames; + + /// The instance's IP addresses. + /// + /// Optional. + core.List? ipAddresses; + + /// The instance's primary MAC address. + /// + /// Optional. + core.String? primaryMacAddress; + + DatabaseInstanceNetwork({ + this.hostNames, + this.ipAddresses, + this.primaryMacAddress, + }); + + DatabaseInstanceNetwork.fromJson(core.Map json_) + : this( + hostNames: (json_['hostNames'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ipAddresses: (json_['ipAddresses'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + primaryMacAddress: json_['primaryMacAddress'] as core.String?, + ); + + core.Map toJson() => { + if (hostNames != null) 'hostNames': hostNames!, + if (ipAddresses != null) 'ipAddresses': ipAddresses!, + if (primaryMacAddress != null) 'primaryMacAddress': primaryMacAddress!, + }; +} + +/// Details of a group of database objects. +class DatabaseObjects { + /// The category of the objects. + /// + /// Optional. + /// Possible string values are: + /// - "CATEGORY_UNSPECIFIED" : Unspecified type. + /// - "TABLE" : Table. + /// - "INDEX" : Index. + /// - "CONSTRAINTS" : Constraints. + /// - "VIEWS" : Views. + /// - "SOURCE_CODE" : Source code, e.g. procedures. + /// - "OTHER" : Uncategorized objects. + core.String? category; + + /// The number of objects. + /// + /// Optional. + core.String? count; + + DatabaseObjects({ + this.category, + this.count, + }); + + DatabaseObjects.fromJson(core.Map json_) + : this( + category: json_['category'] as core.String?, + count: json_['count'] as core.String?, + ); + + core.Map toJson() => { + if (category != null) 'category': category!, + if (count != null) 'count': count!, + }; +} + +/// Details of a database schema. +class DatabaseSchema { + /// Details of a Mysql schema. + /// + /// Optional. + MySqlSchemaDetails? mysql; + + /// List of details of objects by category. + /// + /// Optional. + core.List? objects; + + /// Details of a PostgreSql schema. + /// + /// Optional. + PostgreSqlSchemaDetails? postgresql; + + /// The name of the schema. + /// + /// Required. + core.String? schemaName; + + /// Details of a SqlServer schema. + /// + /// Optional. + SqlServerSchemaDetails? sqlServer; + + /// The total size of tables in bytes. + /// + /// Optional. + core.String? tablesSizeBytes; + + DatabaseSchema({ + this.mysql, + this.objects, + this.postgresql, + this.schemaName, + this.sqlServer, + this.tablesSizeBytes, + }); + + DatabaseSchema.fromJson(core.Map json_) + : this( + mysql: json_.containsKey('mysql') + ? MySqlSchemaDetails.fromJson( + json_['mysql'] as core.Map) + : null, + objects: (json_['objects'] as core.List?) + ?.map((value) => DatabaseObjects.fromJson( + value as core.Map)) + .toList(), + postgresql: json_.containsKey('postgresql') + ? PostgreSqlSchemaDetails.fromJson( + json_['postgresql'] as core.Map) + : null, + schemaName: json_['schemaName'] as core.String?, + sqlServer: json_.containsKey('sqlServer') + ? SqlServerSchemaDetails.fromJson( + json_['sqlServer'] as core.Map) + : null, + tablesSizeBytes: json_['tablesSizeBytes'] as core.String?, + ); + + core.Map toJson() => { + if (mysql != null) 'mysql': mysql!, + if (objects != null) 'objects': objects!, + if (postgresql != null) 'postgresql': postgresql!, + if (schemaName != null) 'schemaName': schemaName!, + if (sqlServer != null) 'sqlServer': sqlServer!, + if (tablesSizeBytes != null) 'tablesSizeBytes': tablesSizeBytes!, + }; +} + /// Represents a whole or partial calendar date, such as a birthday. /// /// The time of day and time zone are either specified elsewhere or are @@ -4896,20 +5463,44 @@ class DiskPartitionList { class DiskUsageSample { /// Average IOPS sampled over a short window. /// - /// Must be non-negative. + /// Must be non-negative. Must be equal to the sum of read and write if one of + /// them is positive. if both read and write are zero they are ignored. + /// + /// Optional. core.double? averageIops; + /// Average read IOPS sampled over a short window. + /// + /// Must be non-negative. + /// + /// Optional. + core.double? averageReadIops; + + /// Average write IOPS sampled over a short window. + /// + /// Must be non-negative. + /// + /// Optional. + core.double? averageWriteIops; + DiskUsageSample({ this.averageIops, + this.averageReadIops, + this.averageWriteIops, }); DiskUsageSample.fromJson(core.Map json_) : this( averageIops: (json_['averageIops'] as core.num?)?.toDouble(), + averageReadIops: (json_['averageReadIops'] as core.num?)?.toDouble(), + averageWriteIops: + (json_['averageWriteIops'] as core.num?)?.toDouble(), ); core.Map toJson() => { if (averageIops != null) 'averageIops': averageIops!, + if (averageReadIops != null) 'averageReadIops': averageReadIops!, + if (averageWriteIops != null) 'averageWriteIops': averageWriteIops!, }; } @@ -5685,6 +6276,8 @@ class ImportDataFile { /// - "IMPORT_JOB_FORMAT_STRATOZONE_CSV" : CSV format created manually and /// following the StratoZone format. For more information, see Manually create /// and upload data tables. + /// - "IMPORT_JOB_FORMAT_DATABASE_ZIP" : ZIP file with nested CSV files + /// generated by a database collector. core.String? format; /// The name of the file. @@ -5886,6 +6479,14 @@ class ImportJob { /// A resource that reports the import job errors at row level. class ImportRowError { + /// Error details for an archive file. + ImportRowErrorArchiveErrorDetails? archiveError; + + /// The asset title. + /// + /// Output only. + core.String? assetTitle; + /// Error details for a CSV file. ImportRowErrorCsvErrorDetails? csvError; @@ -5908,6 +6509,8 @@ class ImportRowError { ImportRowErrorXlsxErrorDetails? xlsxError; ImportRowError({ + this.archiveError, + this.assetTitle, this.csvError, this.errors, this.rowNumber, @@ -5918,6 +6521,11 @@ class ImportRowError { ImportRowError.fromJson(core.Map json_) : this( + archiveError: json_.containsKey('archiveError') + ? ImportRowErrorArchiveErrorDetails.fromJson( + json_['archiveError'] as core.Map) + : null, + assetTitle: json_['assetTitle'] as core.String?, csvError: json_.containsKey('csvError') ? ImportRowErrorCsvErrorDetails.fromJson( json_['csvError'] as core.Map) @@ -5936,6 +6544,8 @@ class ImportRowError { ); core.Map toJson() => { + if (archiveError != null) 'archiveError': archiveError!, + if (assetTitle != null) 'assetTitle': assetTitle!, if (csvError != null) 'csvError': csvError!, if (errors != null) 'errors': errors!, if (rowNumber != null) 'rowNumber': rowNumber!, @@ -5945,6 +6555,36 @@ class ImportRowError { }; } +/// Error details for an archive file. +class ImportRowErrorArchiveErrorDetails { + /// Error details for a CSV file. + ImportRowErrorCsvErrorDetails? csvError; + + /// The file path inside the archive where the error was detected. + /// + /// Output only. + core.String? filePath; + + ImportRowErrorArchiveErrorDetails({ + this.csvError, + this.filePath, + }); + + ImportRowErrorArchiveErrorDetails.fromJson(core.Map json_) + : this( + csvError: json_.containsKey('csvError') + ? ImportRowErrorCsvErrorDetails.fromJson( + json_['csvError'] as core.Map) + : null, + filePath: json_['filePath'] as core.String?, + ); + + core.Map toJson() => { + if (csvError != null) 'csvError': csvError!, + if (filePath != null) 'filePath': filePath!, + }; +} + /// Error details for a CSV file. class ImportRowErrorCsvErrorDetails { /// The row number where the error was detected. @@ -5991,7 +6631,7 @@ class ImportRowErrorXlsxErrorDetails { /// An insight about an asset. class Insight { - /// A generic insight about an asset + /// A generic insight about an asset. /// /// Output only. GenericInsight? genericInsight; @@ -6487,6 +7127,11 @@ class MachineArchitectureDetails { /// CPU architecture, e.g., "x64-based PC", "x86_64", "i686" etc. core.String? cpuArchitecture; + /// CPU manufacturer, e.g., "Intel", "AMD". + /// + /// Optional. + core.String? cpuManufacturer; + /// CPU name, e.g., "Intel Xeon E5-2690", "AMD EPYC 7571" etc. core.String? cpuName; @@ -6521,6 +7166,7 @@ class MachineArchitectureDetails { MachineArchitectureDetails({ this.bios, this.cpuArchitecture, + this.cpuManufacturer, this.cpuName, this.cpuSocketCount, this.cpuThreadCount, @@ -6536,6 +7182,7 @@ class MachineArchitectureDetails { json_['bios'] as core.Map) : null, cpuArchitecture: json_['cpuArchitecture'] as core.String?, + cpuManufacturer: json_['cpuManufacturer'] as core.String?, cpuName: json_['cpuName'] as core.String?, cpuSocketCount: json_['cpuSocketCount'] as core.int?, cpuThreadCount: json_['cpuThreadCount'] as core.int?, @@ -6547,6 +7194,7 @@ class MachineArchitectureDetails { core.Map toJson() => { if (bios != null) 'bios': bios!, if (cpuArchitecture != null) 'cpuArchitecture': cpuArchitecture!, + if (cpuManufacturer != null) 'cpuManufacturer': cpuManufacturer!, if (cpuName != null) 'cpuName': cpuName!, if (cpuSocketCount != null) 'cpuSocketCount': cpuSocketCount!, if (cpuThreadCount != null) 'cpuThreadCount': cpuThreadCount!, @@ -6781,84 +7429,291 @@ class MachineSeries { /// VMware Engine: https://cloud.google.com/vmware-engine/pricing core.String? code; - MachineSeries({ - this.code, + MachineSeries({ + this.code, + }); + + MachineSeries.fromJson(core.Map json_) + : this( + code: json_['code'] as core.String?, + ); + + core.Map toJson() => { + if (code != null) 'code': code!, + }; +} + +/// Memory usage sample. +class MemoryUsageSample { + /// Percentage of system memory utilized. + /// + /// Must be in the interval \[0, 100\]. + core.double? utilizedPercentage; + + MemoryUsageSample({ + this.utilizedPercentage, + }); + + MemoryUsageSample.fromJson(core.Map json_) + : this( + utilizedPercentage: + (json_['utilizedPercentage'] as core.num?)?.toDouble(), + ); + + core.Map toJson() => { + if (utilizedPercentage != null) + 'utilizedPercentage': utilizedPercentage!, + }; +} + +/// An insight about potential migrations for an asset. +class MigrationInsight { + /// A Google Compute Engine target. + /// + /// Output only. + ComputeEngineMigrationTarget? computeEngineTarget; + + /// Description of how well the asset this insight is associated with fits the + /// proposed migration. + /// + /// Output only. + FitDescriptor? fit; + + MigrationInsight({ + this.computeEngineTarget, + this.fit, + }); + + MigrationInsight.fromJson(core.Map json_) + : this( + computeEngineTarget: json_.containsKey('computeEngineTarget') + ? ComputeEngineMigrationTarget.fromJson( + json_['computeEngineTarget'] + as core.Map) + : null, + fit: json_.containsKey('fit') + ? FitDescriptor.fromJson( + json_['fit'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (computeEngineTarget != null) + 'computeEngineTarget': computeEngineTarget!, + if (fit != null) 'fit': fit!, + }; +} + +/// Represents an amount of money with its currency type. +typedef Money = $Money; + +/// MySql plugin. +class MySqlPlugin { + /// The plugin is active. + /// + /// Required. + core.bool? enabled; + + /// The plugin name. + /// + /// Required. + core.String? plugin; + + /// The plugin version. + /// + /// Required. + core.String? version; + + MySqlPlugin({ + this.enabled, + this.plugin, + this.version, + }); + + MySqlPlugin.fromJson(core.Map json_) + : this( + enabled: json_['enabled'] as core.bool?, + plugin: json_['plugin'] as core.String?, + version: json_['version'] as core.String?, + ); + + core.Map toJson() => { + if (enabled != null) 'enabled': enabled!, + if (plugin != null) 'plugin': plugin!, + if (version != null) 'version': version!, + }; +} + +/// MySql property. +typedef MySqlProperty = $SqlProperty; + +/// Specific details for a Mysql database. +class MySqlSchemaDetails { + /// Mysql storage engine tables. + /// + /// Optional. + core.List? storageEngines; + + MySqlSchemaDetails({ + this.storageEngines, + }); + + MySqlSchemaDetails.fromJson(core.Map json_) + : this( + storageEngines: (json_['storageEngines'] as core.List?) + ?.map((value) => MySqlStorageEngineDetails.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (storageEngines != null) 'storageEngines': storageEngines!, + }; +} + +/// Mysql storage engine tables. +class MySqlStorageEngineDetails { + /// The number of encrypted tables. + /// + /// Optional. + core.int? encryptedTableCount; + + /// The storage engine. + /// + /// Required. + /// Possible string values are: + /// - "ENGINE_UNSPECIFIED" : Unspecified storage engine. + /// - "INNODB" : InnoDB. + /// - "MYISAM" : MyISAM. + /// - "MEMORY" : Memory. + /// - "CSV" : CSV. + /// - "ARCHIVE" : Archive. + /// - "BLACKHOLE" : Blackhole. + /// - "NDB" : NDB. + /// - "MERGE" : Merge. + /// - "FEDERATED" : Federated. + /// - "EXAMPLE" : Example. + /// - "OTHER" : Other. + core.String? engine; + + /// The number of tables. + /// + /// Optional. + core.int? tableCount; + + MySqlStorageEngineDetails({ + this.encryptedTableCount, + this.engine, + this.tableCount, }); - MachineSeries.fromJson(core.Map json_) + MySqlStorageEngineDetails.fromJson(core.Map json_) : this( - code: json_['code'] as core.String?, + encryptedTableCount: json_['encryptedTableCount'] as core.int?, + engine: json_['engine'] as core.String?, + tableCount: json_['tableCount'] as core.int?, ); core.Map toJson() => { - if (code != null) 'code': code!, + if (encryptedTableCount != null) + 'encryptedTableCount': encryptedTableCount!, + if (engine != null) 'engine': engine!, + if (tableCount != null) 'tableCount': tableCount!, }; } -/// Memory usage sample. -class MemoryUsageSample { - /// Percentage of system memory utilized. +/// MySql variable. +class MySqlVariable { + /// The variable category. /// - /// Must be in the interval \[0, 100\]. - core.double? utilizedPercentage; + /// Required. + core.String? category; - MemoryUsageSample({ - this.utilizedPercentage, + /// The variable value. + /// + /// Required. + core.String? value; + + /// The variable name. + /// + /// Required. + core.String? variable; + + MySqlVariable({ + this.category, + this.value, + this.variable, }); - MemoryUsageSample.fromJson(core.Map json_) + MySqlVariable.fromJson(core.Map json_) : this( - utilizedPercentage: - (json_['utilizedPercentage'] as core.num?)?.toDouble(), + category: json_['category'] as core.String?, + value: json_['value'] as core.String?, + variable: json_['variable'] as core.String?, ); core.Map toJson() => { - if (utilizedPercentage != null) - 'utilizedPercentage': utilizedPercentage!, + if (category != null) 'category': category!, + if (value != null) 'value': value!, + if (variable != null) 'variable': variable!, }; } -/// An insight about potential migrations for an asset. -class MigrationInsight { - /// A Google Compute Engine target. +/// Specific details for a Mysql database deployment. +class MysqlDatabaseDeployment { + /// List of MySql plugins. /// - /// Output only. - ComputeEngineMigrationTarget? computeEngineTarget; + /// Optional. + core.List? plugins; - /// Description of how well the asset this insight is associated with fits the - /// proposed migration. + /// List of MySql properties. /// - /// Output only. - FitDescriptor? fit; + /// Optional. + core.List? properties; - MigrationInsight({ - this.computeEngineTarget, - this.fit, + /// Number of resource groups. + /// + /// Optional. + core.int? resourceGroupsCount; + + /// List of MySql variables. + /// + /// Optional. + core.List? variables; + + MysqlDatabaseDeployment({ + this.plugins, + this.properties, + this.resourceGroupsCount, + this.variables, }); - MigrationInsight.fromJson(core.Map json_) + MysqlDatabaseDeployment.fromJson(core.Map json_) : this( - computeEngineTarget: json_.containsKey('computeEngineTarget') - ? ComputeEngineMigrationTarget.fromJson( - json_['computeEngineTarget'] - as core.Map) - : null, - fit: json_.containsKey('fit') - ? FitDescriptor.fromJson( - json_['fit'] as core.Map) - : null, + plugins: (json_['plugins'] as core.List?) + ?.map((value) => MySqlPlugin.fromJson( + value as core.Map)) + .toList(), + properties: (json_['properties'] as core.List?) + ?.map((value) => MySqlProperty.fromJson( + value as core.Map)) + .toList(), + resourceGroupsCount: json_['resourceGroupsCount'] as core.int?, + variables: (json_['variables'] as core.List?) + ?.map((value) => MySqlVariable.fromJson( + value as core.Map)) + .toList(), ); core.Map toJson() => { - if (computeEngineTarget != null) - 'computeEngineTarget': computeEngineTarget!, - if (fit != null) 'fit': fit!, + if (plugins != null) 'plugins': plugins!, + if (properties != null) 'properties': properties!, + if (resourceGroupsCount != null) + 'resourceGroupsCount': resourceGroupsCount!, + if (variables != null) 'variables': variables!, }; } -/// Represents an amount of money with its currency type. -typedef Money = $Money; - /// Details of network adapter. class NetworkAdapterDetails { /// Network adapter type (e.g. VMXNET3). @@ -7440,6 +8295,178 @@ class PlatformDetails { }; } +/// Specific details for a PostgreSQL database deployment. +class PostgreSqlDatabaseDeployment { + /// List of PostgreSql properties. + /// + /// Optional. + core.List? properties; + + /// List of PostgreSql settings. + /// + /// Optional. + core.List? settings; + + PostgreSqlDatabaseDeployment({ + this.properties, + this.settings, + }); + + PostgreSqlDatabaseDeployment.fromJson(core.Map json_) + : this( + properties: (json_['properties'] as core.List?) + ?.map((value) => PostgreSqlProperty.fromJson( + value as core.Map)) + .toList(), + settings: (json_['settings'] as core.List?) + ?.map((value) => PostgreSqlSetting.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (properties != null) 'properties': properties!, + if (settings != null) 'settings': settings!, + }; +} + +/// PostgreSql extension. +class PostgreSqlExtension { + /// The extension name. + /// + /// Required. + core.String? extension; + + /// The extension version. + /// + /// Required. + core.String? version; + + PostgreSqlExtension({ + this.extension, + this.version, + }); + + PostgreSqlExtension.fromJson(core.Map json_) + : this( + extension: json_['extension'] as core.String?, + version: json_['version'] as core.String?, + ); + + core.Map toJson() => { + if (extension != null) 'extension': extension!, + if (version != null) 'version': version!, + }; +} + +/// PostgreSql property. +typedef PostgreSqlProperty = $SqlProperty; + +/// Specific details for a PostgreSql schema. +class PostgreSqlSchemaDetails { + /// PostgreSql foreign tables. + /// + /// Optional. + core.int? foreignTablesCount; + + /// PostgreSql extensions. + /// + /// Optional. + core.List? postgresqlExtensions; + + PostgreSqlSchemaDetails({ + this.foreignTablesCount, + this.postgresqlExtensions, + }); + + PostgreSqlSchemaDetails.fromJson(core.Map json_) + : this( + foreignTablesCount: json_['foreignTablesCount'] as core.int?, + postgresqlExtensions: (json_['postgresqlExtensions'] as core.List?) + ?.map((value) => PostgreSqlExtension.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (foreignTablesCount != null) + 'foreignTablesCount': foreignTablesCount!, + if (postgresqlExtensions != null) + 'postgresqlExtensions': postgresqlExtensions!, + }; +} + +/// PostgreSql setting. +class PostgreSqlSetting { + /// The setting boolean value. + /// + /// Required. + core.bool? boolValue; + + /// The setting int value. + /// + /// Required. + core.String? intValue; + + /// The setting real value. + /// + /// Required. + core.double? realValue; + + /// The setting name. + /// + /// Required. + core.String? setting; + + /// The setting source. + /// + /// Required. + core.String? source; + + /// The setting string value. + /// + /// Notice that enum values are stored as strings. + /// + /// Required. + core.String? stringValue; + + /// The setting unit. + /// + /// Optional. + core.String? unit; + + PostgreSqlSetting({ + this.boolValue, + this.intValue, + this.realValue, + this.setting, + this.source, + this.stringValue, + this.unit, + }); + + PostgreSqlSetting.fromJson(core.Map json_) + : this( + boolValue: json_['boolValue'] as core.bool?, + intValue: json_['intValue'] as core.String?, + realValue: (json_['realValue'] as core.num?)?.toDouble(), + setting: json_['setting'] as core.String?, + source: json_['source'] as core.String?, + stringValue: json_['stringValue'] as core.String?, + unit: json_['unit'] as core.String?, + ); + + core.Map toJson() => { + if (boolValue != null) 'boolValue': boolValue!, + if (intValue != null) 'intValue': intValue!, + if (realValue != null) 'realValue': realValue!, + if (setting != null) 'setting': setting!, + if (source != null) 'source': source!, + if (stringValue != null) 'stringValue': stringValue!, + if (unit != null) 'unit': unit!, + }; +} + /// The preferences that apply to all assets in a given context. class PreferenceSet { /// The timestamp when the preference set was created. @@ -8914,6 +9941,177 @@ class Source { }; } +/// Specific details for a Microsoft SQL Server database deployment. +class SqlServerDatabaseDeployment { + /// List of SQL Server features. + /// + /// Optional. + core.List? features; + + /// List of SQL Server server flags. + /// + /// Optional. + core.List? serverFlags; + + /// List of SQL Server trace flags. + /// + /// Optional. + core.List? traceFlags; + + SqlServerDatabaseDeployment({ + this.features, + this.serverFlags, + this.traceFlags, + }); + + SqlServerDatabaseDeployment.fromJson(core.Map json_) + : this( + features: (json_['features'] as core.List?) + ?.map((value) => SqlServerFeature.fromJson( + value as core.Map)) + .toList(), + serverFlags: (json_['serverFlags'] as core.List?) + ?.map((value) => SqlServerServerFlag.fromJson( + value as core.Map)) + .toList(), + traceFlags: (json_['traceFlags'] as core.List?) + ?.map((value) => SqlServerTraceFlag.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (features != null) 'features': features!, + if (serverFlags != null) 'serverFlags': serverFlags!, + if (traceFlags != null) 'traceFlags': traceFlags!, + }; +} + +/// SQL Server feature details. +class SqlServerFeature { + /// Field enabled is set when a feature is used on the source deployment. + /// + /// Required. + core.bool? enabled; + + /// The feature name. + /// + /// Required. + core.String? featureName; + + SqlServerFeature({ + this.enabled, + this.featureName, + }); + + SqlServerFeature.fromJson(core.Map json_) + : this( + enabled: json_['enabled'] as core.bool?, + featureName: json_['featureName'] as core.String?, + ); + + core.Map toJson() => { + if (enabled != null) 'enabled': enabled!, + if (featureName != null) 'featureName': featureName!, + }; +} + +/// Specific details for a SqlServer database. +class SqlServerSchemaDetails { + /// SqlServer number of CLR objects. + /// + /// Optional. + core.int? clrObjectCount; + + SqlServerSchemaDetails({ + this.clrObjectCount, + }); + + SqlServerSchemaDetails.fromJson(core.Map json_) + : this( + clrObjectCount: json_['clrObjectCount'] as core.int?, + ); + + core.Map toJson() => { + if (clrObjectCount != null) 'clrObjectCount': clrObjectCount!, + }; +} + +/// SQL Server server flag details. +class SqlServerServerFlag { + /// The server flag name. + /// + /// Required. + core.String? serverFlagName; + + /// The server flag value set by the user. + /// + /// Required. + core.String? value; + + /// The server flag actual value. + /// + /// If `value_in_use` is different from `value` it means that either the + /// configuration change was not applied or it is an expected behavior. See + /// SQL Server documentation for more details. + /// + /// Required. + core.String? valueInUse; + + SqlServerServerFlag({ + this.serverFlagName, + this.value, + this.valueInUse, + }); + + SqlServerServerFlag.fromJson(core.Map json_) + : this( + serverFlagName: json_['serverFlagName'] as core.String?, + value: json_['value'] as core.String?, + valueInUse: json_['valueInUse'] as core.String?, + ); + + core.Map toJson() => { + if (serverFlagName != null) 'serverFlagName': serverFlagName!, + if (value != null) 'value': value!, + if (valueInUse != null) 'valueInUse': valueInUse!, + }; +} + +/// SQL Server trace flag details. +class SqlServerTraceFlag { + /// The trace flag scope. + /// + /// Required. + /// Possible string values are: + /// - "SCOPE_UNSPECIFIED" : Unspecified. + /// - "OFF" : Off. + /// - "GLOBAL" : Global. + /// - "SESSION" : Session. + core.String? scope; + + /// The trace flag name. + /// + /// Required. + core.String? traceFlagName; + + SqlServerTraceFlag({ + this.scope, + this.traceFlagName, + }); + + SqlServerTraceFlag.fromJson(core.Map json_) + : this( + scope: json_['scope'] as core.String?, + traceFlagName: json_['traceFlagName'] as core.String?, + ); + + core.Map toJson() => { + if (scope != null) 'scope': scope!, + if (traceFlagName != null) 'traceFlagName': traceFlagName!, + }; +} + /// The `Status` type defines a logical error model that is suitable for /// different programming environments, including REST APIs and RPC APIs. /// @@ -9117,7 +10315,7 @@ class VirtualMachinePreferences { /// - "COMPUTE_MIGRATION_TARGET_PRODUCT_COMPUTE_ENGINE" : Prefer to migrate to /// Google Cloud Compute Engine. /// - "COMPUTE_MIGRATION_TARGET_PRODUCT_VMWARE_ENGINE" : Prefer to migrate to - /// Google Cloud VMware Engine. + /// Google Cloud VMware Engine.6278 /// - "COMPUTE_MIGRATION_TARGET_PRODUCT_SOLE_TENANCY" : Prefer to migrate to /// Google Cloud Sole Tenant Nodes. core.String? targetProduct; diff --git a/generated/googleapis/lib/ml/v1.dart b/generated/googleapis/lib/ml/v1.dart index 895553c34..7ae2c2fd4 100644 --- a/generated/googleapis/lib/ml/v1.dart +++ b/generated/googleapis/lib/ml/v1.dart @@ -720,8 +720,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -2110,8 +2110,8 @@ class ProjectsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// diff --git a/generated/googleapis/lib/monitoring/v3.dart b/generated/googleapis/lib/monitoring/v3.dart index 3b620b2c1..b6df0a283 100644 --- a/generated/googleapis/lib/monitoring/v3.dart +++ b/generated/googleapis/lib/monitoring/v3.dart @@ -2017,22 +2017,23 @@ class ProjectsMetricDescriptorsResource { /// execute the request. The format is: projects/\[PROJECT_ID_OR_NUMBER\] /// Value must have pattern `^projects/\[^/\]+$`. /// - /// [filter] - If this field is empty, all custom and system-defined metric - /// descriptors are returned. Otherwise, the filter + /// [filter] - Optional. If this field is empty, all custom and system-defined + /// metric descriptors are returned. Otherwise, the filter /// (https://cloud.google.com/monitoring/api/v3/filters) specifies which /// metric descriptors are to be returned. For example, the following filter /// matches all custom metrics /// (https://cloud.google.com/monitoring/custom-metrics): metric.type = /// starts_with("custom.googleapis.com/") /// - /// [pageSize] - A positive number that is the maximum number of results to - /// return. The default and maximum value is 10,000. If a page_size \<= 0 or - /// \> 10,000 is submitted, will instead return a maximum of 10,000 results. + /// [pageSize] - Optional. A positive number that is the maximum number of + /// results to return. The default and maximum value is 10,000. If a page_size + /// \<= 0 or \> 10,000 is submitted, will instead return a maximum of 10,000 + /// results. /// - /// [pageToken] - If this field is not empty then it must contain the - /// nextPageToken value returned by a previous call to this method. Using this - /// field causes the method to return additional results from the previous - /// method call. + /// [pageToken] - Optional. If this field is not empty then it must contain + /// the nextPageToken value returned by a previous call to this method. Using + /// this field causes the method to return additional results from the + /// previous method call. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -9089,6 +9090,16 @@ class PrometheusQueryLanguageCondition { /// Optional. core.String? alertRule; + /// Whether to disable metric existence validation for this condition.This + /// allows alerting policies to be defined on metrics that do not yet exist, + /// improving advanced customer workflows such as configuring alerting + /// policies using Terraform.Users with the monitoring.alertPolicyViewer role + /// are able to see the name of the non-existent metric in the alerting policy + /// condition. + /// + /// Optional. + core.bool? disableMetricValidation; + /// Alerts are considered firing once their PromQL expression was evaluated to /// be "true" for this long. /// @@ -9146,6 +9157,7 @@ class PrometheusQueryLanguageCondition { PrometheusQueryLanguageCondition({ this.alertRule, + this.disableMetricValidation, this.duration, this.evaluationInterval, this.labels, @@ -9156,6 +9168,8 @@ class PrometheusQueryLanguageCondition { PrometheusQueryLanguageCondition.fromJson(core.Map json_) : this( alertRule: json_['alertRule'] as core.String?, + disableMetricValidation: + json_['disableMetricValidation'] as core.bool?, duration: json_['duration'] as core.String?, evaluationInterval: json_['evaluationInterval'] as core.String?, labels: @@ -9171,6 +9185,8 @@ class PrometheusQueryLanguageCondition { core.Map toJson() => { if (alertRule != null) 'alertRule': alertRule!, + if (disableMetricValidation != null) + 'disableMetricValidation': disableMetricValidation!, if (duration != null) 'duration': duration!, if (evaluationInterval != null) 'evaluationInterval': evaluationInterval!, diff --git a/generated/googleapis/lib/mybusinessaccountmanagement/v1.dart b/generated/googleapis/lib/mybusinessaccountmanagement/v1.dart index d760e18ae..fd7419c2c 100644 --- a/generated/googleapis/lib/mybusinessaccountmanagement/v1.dart +++ b/generated/googleapis/lib/mybusinessaccountmanagement/v1.dart @@ -1261,19 +1261,19 @@ class OrganizationInfo { }; } -/// Represents a postal address, e.g. for postal delivery or payments addresses. +/// Represents a postal address. /// -/// Given a postal address, a postal service can deliver items to a premise, -/// P.O. Box or similar. It is not intended to model geographical locations -/// (roads, towns, mountains). In typical usage an address would be created via -/// user input or from importing existing data, depending on the type of -/// process. Advice on address input / editing: - Use an -/// internationalization-ready address widget such as -/// https://github.com/google/libaddressinput) - Users should not be presented -/// with UI elements for input or editing of fields outside countries where that -/// field is used. For more guidance on how to use this schema, please see: +/// For example for postal delivery or payments addresses. Given a postal +/// address, a postal service can deliver items to a premise, P.O. Box or +/// similar. It is not intended to model geographical locations (roads, towns, +/// mountains). In typical usage an address would be created by user input or +/// from importing existing data, depending on the type of process. Advice on +/// address input / editing: - Use an internationalization-ready address widget +/// such as https://github.com/google/libaddressinput) - Users should not be +/// presented with UI elements for input or editing of fields outside countries +/// where that field is used. For more guidance on how to use this schema, see: /// https://support.google.com/business/answer/6397478 -typedef PostalAddress = $PostalAddress; +typedef PostalAddress = $PostalAddress00; /// Represents a target location for a pending invitation. class TargetLocation { diff --git a/generated/googleapis/lib/mybusinessbusinessinformation/v1.dart b/generated/googleapis/lib/mybusinessbusinessinformation/v1.dart index 2fbca4e52..1941585cf 100644 --- a/generated/googleapis/lib/mybusinessbusinessinformation/v1.dart +++ b/generated/googleapis/lib/mybusinessbusinessinformation/v1.dart @@ -2314,19 +2314,19 @@ class Places { }; } -/// Represents a postal address, e.g. for postal delivery or payments addresses. +/// Represents a postal address. /// -/// Given a postal address, a postal service can deliver items to a premise, -/// P.O. Box or similar. It is not intended to model geographical locations -/// (roads, towns, mountains). In typical usage an address would be created via -/// user input or from importing existing data, depending on the type of -/// process. Advice on address input / editing: - Use an -/// internationalization-ready address widget such as -/// https://github.com/google/libaddressinput) - Users should not be presented -/// with UI elements for input or editing of fields outside countries where that -/// field is used. For more guidance on how to use this schema, please see: +/// For example for postal delivery or payments addresses. Given a postal +/// address, a postal service can deliver items to a premise, P.O. Box or +/// similar. It is not intended to model geographical locations (roads, towns, +/// mountains). In typical usage an address would be created by user input or +/// from importing existing data, depending on the type of process. Advice on +/// address input / editing: - Use an internationalization-ready address widget +/// such as https://github.com/google/libaddressinput) - Users should not be +/// presented with UI elements for input or editing of fields outside countries +/// where that field is used. For more guidance on how to use this schema, see: /// https://support.google.com/business/answer/6397478 -typedef PostalAddress = $PostalAddress; +typedef PostalAddress = $PostalAddress00; /// All information pertaining to the location's profile. class Profile { diff --git a/generated/googleapis/lib/mybusinessverifications/v1.dart b/generated/googleapis/lib/mybusinessverifications/v1.dart index 26090abf5..9c1e715cc 100644 --- a/generated/googleapis/lib/mybusinessverifications/v1.dart +++ b/generated/googleapis/lib/mybusinessverifications/v1.dart @@ -520,19 +520,19 @@ class ListVerificationsResponse { }; } -/// Represents a postal address, e.g. for postal delivery or payments addresses. +/// Represents a postal address. /// -/// Given a postal address, a postal service can deliver items to a premise, -/// P.O. Box or similar. It is not intended to model geographical locations -/// (roads, towns, mountains). In typical usage an address would be created via -/// user input or from importing existing data, depending on the type of -/// process. Advice on address input / editing: - Use an -/// internationalization-ready address widget such as -/// https://github.com/google/libaddressinput) - Users should not be presented -/// with UI elements for input or editing of fields outside countries where that -/// field is used. For more guidance on how to use this schema, please see: +/// For example for postal delivery or payments addresses. Given a postal +/// address, a postal service can deliver items to a premise, P.O. Box or +/// similar. It is not intended to model geographical locations (roads, towns, +/// mountains). In typical usage an address would be created by user input or +/// from importing existing data, depending on the type of process. Advice on +/// address input / editing: - Use an internationalization-ready address widget +/// such as https://github.com/google/libaddressinput) - Users should not be +/// presented with UI elements for input or editing of fields outside countries +/// where that field is used. For more guidance on how to use this schema, see: /// https://support.google.com/business/answer/6397478 -typedef PostalAddress = $PostalAddress; +typedef PostalAddress = $PostalAddress00; /// Indicates that the location duplicates another location that is in good /// standing. diff --git a/generated/googleapis/lib/netapp/v1.dart b/generated/googleapis/lib/netapp/v1.dart index 35b276ba5..070e2ff98 100644 --- a/generated/googleapis/lib/netapp/v1.dart +++ b/generated/googleapis/lib/netapp/v1.dart @@ -1468,8 +1468,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -1912,6 +1912,49 @@ class ProjectsLocationsStoragePoolsResource { ); return Operation.fromJson(response_ as core.Map); } + + /// ValidateDirectoryService does a connectivity check for a directory service + /// policy attached to the storage pool. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. Name of the storage pool + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/storagePools/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future validateDirectoryService( + ValidateDirectoryServiceRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = + 'v1/' + core.Uri.encodeFull('$name') + ':validateDirectoryService'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } } class ProjectsLocationsVolumesResource { @@ -2292,6 +2335,48 @@ class ProjectsLocationsVolumesReplicationsResource { return Operation.fromJson(response_ as core.Map); } + /// Establish replication peering. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. The resource name of the replication, in the format of + /// projects/{project_id}/locations/{location}/volumes/{volume_id}/replications/{replication_id}. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/volumes/\[^/\]+/replications/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future establishPeering( + EstablishPeeringRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':establishPeering'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + /// Describe a replication for a volume. /// /// Request parameters: @@ -2561,6 +2646,50 @@ class ProjectsLocationsVolumesReplicationsResource { ); return Operation.fromJson(response_ as core.Map); } + + /// Syncs the replication. + /// + /// This will invoke one time volume data transfer from source to destination. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. The resource name of the replication, in the format of + /// projects/{project_id}/locations/{location}/volumes/{volume_id}/replications/{replication_id}. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/volumes/\[^/\]+/replications/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future sync( + SyncReplicationRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':sync'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } } class ProjectsLocationsVolumesSnapshotsResource { @@ -3395,6 +3524,11 @@ class DestinationVolumeParameters { /// Required. core.String? storagePool; + /// Tiering policy for the volume. + /// + /// Optional. + TieringPolicy? tieringPolicy; + /// Desired destination volume resource id. /// /// If not specified, source volume's resource id will be used. This value @@ -3406,6 +3540,7 @@ class DestinationVolumeParameters { this.description, this.shareName, this.storagePool, + this.tieringPolicy, this.volumeId, }); @@ -3414,6 +3549,10 @@ class DestinationVolumeParameters { description: json_['description'] as core.String?, shareName: json_['shareName'] as core.String?, storagePool: json_['storagePool'] as core.String?, + tieringPolicy: json_.containsKey('tieringPolicy') + ? TieringPolicy.fromJson( + json_['tieringPolicy'] as core.Map) + : null, volumeId: json_['volumeId'] as core.String?, ); @@ -3421,6 +3560,7 @@ class DestinationVolumeParameters { if (description != null) 'description': description!, if (shareName != null) 'shareName': shareName!, if (storagePool != null) 'storagePool': storagePool!, + if (tieringPolicy != null) 'tieringPolicy': tieringPolicy!, if (volumeId != null) 'volumeId': volumeId!, }; } @@ -3428,6 +3568,57 @@ class DestinationVolumeParameters { /// EncryptVolumesRequest specifies the KMS config to encrypt existing volumes. typedef EncryptVolumesRequest = $Empty; +/// EstablishPeeringRequest establishes cluster and svm peerings between the +/// source and the destination replications. +class EstablishPeeringRequest { + /// Name of the user's local source cluster to be peered with the destination + /// cluster. + /// + /// Required. + core.String? peerClusterName; + + /// List of IPv4 ip addresses to be used for peering. + /// + /// Optional. + core.List? peerIpAddresses; + + /// Name of the user's local source vserver svm to be peered with the + /// destination vserver svm. + /// + /// Required. + core.String? peerSvmName; + + /// Name of the user's local source volume to be peered with the destination + /// volume. + /// + /// Required. + core.String? peerVolumeName; + + EstablishPeeringRequest({ + this.peerClusterName, + this.peerIpAddresses, + this.peerSvmName, + this.peerVolumeName, + }); + + EstablishPeeringRequest.fromJson(core.Map json_) + : this( + peerClusterName: json_['peerClusterName'] as core.String?, + peerIpAddresses: (json_['peerIpAddresses'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + peerSvmName: json_['peerSvmName'] as core.String?, + peerVolumeName: json_['peerVolumeName'] as core.String?, + ); + + core.Map toJson() => { + if (peerClusterName != null) 'peerClusterName': peerClusterName!, + if (peerIpAddresses != null) 'peerIpAddresses': peerIpAddresses!, + if (peerSvmName != null) 'peerSvmName': peerSvmName!, + if (peerVolumeName != null) 'peerVolumeName': peerVolumeName!, + }; +} + /// Defines the export policy for the volume. class ExportPolicy { /// List of export policy rules @@ -3486,6 +3677,142 @@ class HourlySchedule { }; } +/// HybridPeeringDetails contains details about the hybrid peering. +class HybridPeeringDetails { + /// Copy-paste-able commands to be used on user's ONTAP to accept peering + /// requests. + /// + /// Optional. + core.String? command; + + /// Expiration time for the peering command to be executed on user's ONTAP. + /// + /// Optional. + core.String? commandExpiryTime; + + /// Temporary passphrase generated to accept cluster peering command. + /// + /// Optional. + core.String? passphrase; + + /// IP address of the subnet. + /// + /// Optional. + core.String? subnetIp; + + HybridPeeringDetails({ + this.command, + this.commandExpiryTime, + this.passphrase, + this.subnetIp, + }); + + HybridPeeringDetails.fromJson(core.Map json_) + : this( + command: json_['command'] as core.String?, + commandExpiryTime: json_['commandExpiryTime'] as core.String?, + passphrase: json_['passphrase'] as core.String?, + subnetIp: json_['subnetIp'] as core.String?, + ); + + core.Map toJson() => { + if (command != null) 'command': command!, + if (commandExpiryTime != null) 'commandExpiryTime': commandExpiryTime!, + if (passphrase != null) 'passphrase': passphrase!, + if (subnetIp != null) 'subnetIp': subnetIp!, + }; +} + +/// The Hybrid Replication parameters for the volume. +class HybridReplicationParameters { + /// Name of source cluster location associated with the Hybrid replication. + /// + /// This is a free-form field for the display purpose only. + /// + /// Optional. + core.String? clusterLocation; + + /// Description of the replication. + /// + /// Optional. + core.String? description; + + /// Labels to be added to the replication as the key value pairs. + /// + /// Optional. + core.Map? labels; + + /// Name of the user's local source cluster to be peered with the destination + /// cluster. + /// + /// Required. + core.String? peerClusterName; + + /// List of node ip addresses to be peered with. + /// + /// Required. + core.List? peerIpAddresses; + + /// Name of the user's local source vserver svm to be peered with the + /// destination vserver svm. + /// + /// Required. + core.String? peerSvmName; + + /// Name of the user's local source volume to be peered with the destination + /// volume. + /// + /// Required. + core.String? peerVolumeName; + + /// Desired name for the replication of this volume. + /// + /// Required. + core.String? replication; + + HybridReplicationParameters({ + this.clusterLocation, + this.description, + this.labels, + this.peerClusterName, + this.peerIpAddresses, + this.peerSvmName, + this.peerVolumeName, + this.replication, + }); + + HybridReplicationParameters.fromJson(core.Map json_) + : this( + clusterLocation: json_['clusterLocation'] as core.String?, + description: json_['description'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + peerClusterName: json_['peerClusterName'] as core.String?, + peerIpAddresses: (json_['peerIpAddresses'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + peerSvmName: json_['peerSvmName'] as core.String?, + peerVolumeName: json_['peerVolumeName'] as core.String?, + replication: json_['replication'] as core.String?, + ); + + core.Map toJson() => { + if (clusterLocation != null) 'clusterLocation': clusterLocation!, + if (description != null) 'description': description!, + if (labels != null) 'labels': labels!, + if (peerClusterName != null) 'peerClusterName': peerClusterName!, + if (peerIpAddresses != null) 'peerIpAddresses': peerIpAddresses!, + if (peerSvmName != null) 'peerSvmName': peerSvmName!, + if (peerVolumeName != null) 'peerVolumeName': peerVolumeName!, + if (replication != null) 'replication': replication!, + }; +} + /// KmsConfig is the customer managed encryption key(CMEK) configuration. class KmsConfig { /// Create time of the KmsConfig. @@ -4146,6 +4473,11 @@ class Operation { /// Replication is a nested resource under Volume, that describes a cross-region /// replication relationship between 2 volumes in different regions. class Replication { + /// Location of the user cluster. + /// + /// Optional. + core.String? clusterLocation; + /// Replication create time. /// /// Output only. @@ -4178,6 +4510,22 @@ class Replication { /// Output only. core.bool? healthy; + /// Hybrid peering details. + /// + /// Output only. + HybridPeeringDetails? hybridPeeringDetails; + + /// Type of the hybrid replication. + /// + /// Output only. + /// Possible string values are: + /// - "HYBRID_REPLICATION_TYPE_UNSPECIFIED" : Unspecified hybrid replication + /// type. + /// - "MIGRATION" : Hybrid replication type for migration. + /// - "CONTINUOUS_REPLICATION" : Hybrid replication type for continuous + /// replication. + core.String? hybridReplicationType; + /// Resource labels to represent user provided metadata. core.Map? labels; @@ -4191,6 +4539,8 @@ class Replication { /// receive replication transfers. /// - "STOPPED" : Destination volume is not receiving replication transfers. /// - "TRANSFERRING" : Incremental replication is in progress. + /// - "BASELINE_TRANSFERRING" : Baseline replication is in progress. + /// - "ABORTED" : Replication is aborted. core.String? mirrorState; /// Identifier. @@ -4235,6 +4585,10 @@ class Replication { /// - "UPDATING" : Replication is updating. /// - "DELETING" : Replication is deleting. /// - "ERROR" : Replication is in error state. + /// - "PENDING_CLUSTER_PEERING" : Replication is waiting for cluster peering + /// to be established. + /// - "PENDING_SVM_PEERING" : Replication is waiting for SVM peering to be + /// established. core.String? state; /// State details of the replication. @@ -4248,11 +4602,14 @@ class Replication { TransferStats? transferStats; Replication({ + this.clusterLocation, this.createTime, this.description, this.destinationVolume, this.destinationVolumeParameters, this.healthy, + this.hybridPeeringDetails, + this.hybridReplicationType, this.labels, this.mirrorState, this.name, @@ -4266,6 +4623,7 @@ class Replication { Replication.fromJson(core.Map json_) : this( + clusterLocation: json_['clusterLocation'] as core.String?, createTime: json_['createTime'] as core.String?, description: json_['description'] as core.String?, destinationVolume: json_['destinationVolume'] as core.String?, @@ -4276,6 +4634,11 @@ class Replication { as core.Map) : null, healthy: json_['healthy'] as core.bool?, + hybridPeeringDetails: json_.containsKey('hybridPeeringDetails') + ? HybridPeeringDetails.fromJson(json_['hybridPeeringDetails'] + as core.Map) + : null, + hybridReplicationType: json_['hybridReplicationType'] as core.String?, labels: (json_['labels'] as core.Map?)?.map( (key, value) => core.MapEntry( @@ -4297,12 +4660,17 @@ class Replication { ); core.Map toJson() => { + if (clusterLocation != null) 'clusterLocation': clusterLocation!, if (createTime != null) 'createTime': createTime!, if (description != null) 'description': description!, if (destinationVolume != null) 'destinationVolume': destinationVolume!, if (destinationVolumeParameters != null) 'destinationVolumeParameters': destinationVolumeParameters!, if (healthy != null) 'healthy': healthy!, + if (hybridPeeringDetails != null) + 'hybridPeeringDetails': hybridPeeringDetails!, + if (hybridReplicationType != null) + 'hybridReplicationType': hybridReplicationType!, if (labels != null) 'labels': labels!, if (mirrorState != null) 'mirrorState': mirrorState!, if (name != null) 'name': name!, @@ -4879,6 +5247,9 @@ class StoragePool { /// storagePool. typedef SwitchActiveReplicaZoneRequest = $Empty; +/// SyncReplicationRequest syncs the replication from source to destination. +typedef SyncReplicationRequest = $Empty; + /// Defines tiering policy for the volume. class TieringPolicy { /// Time in days to mark the volume's data block as cold and make it eligible @@ -4985,6 +5356,32 @@ class TransferStats { }; } +/// ValidateDirectoryServiceRequest validates the directory service policy +/// attached to the storage pool. +class ValidateDirectoryServiceRequest { + /// Type of directory service policy attached to the storage pool. + /// Possible string values are: + /// - "DIRECTORY_SERVICE_TYPE_UNSPECIFIED" : Directory service type is not + /// specified. + /// - "ACTIVE_DIRECTORY" : Active directory policy attached to the storage + /// pool. + core.String? directoryServiceType; + + ValidateDirectoryServiceRequest({ + this.directoryServiceType, + }); + + ValidateDirectoryServiceRequest.fromJson(core.Map json_) + : this( + directoryServiceType: json_['directoryServiceType'] as core.String?, + ); + + core.Map toJson() => { + if (directoryServiceType != null) + 'directoryServiceType': directoryServiceType!, + }; +} + /// VerifyKmsConfigRequest specifies the KMS config to be validated. typedef VerifyKmsConfigRequest = $Empty; @@ -5077,6 +5474,11 @@ class Volume { /// Output only. core.bool? hasReplication; + /// The Hybrid Replication parameters for the volume. + /// + /// Optional. + HybridReplicationParameters? hybridReplicationParameters; + /// Flag indicating if the volume is a kerberos volume or not, export policy /// rules control kerberos security modes (krb5, krb5i, krb5p). /// @@ -5219,6 +5621,11 @@ class Volume { /// - "RESTORING" : Volume State is Restoring /// - "DISABLED" : Volume State is Disabled /// - "ERROR" : Volume State is Error + /// - "PREPARING" : Volume State is Preparing. Note that this is different + /// from CREATING where CREATING means the volume is being created, while + /// PREPARING means the volume is created and now being prepared for the + /// replication. + /// - "READ_ONLY" : Volume State is Read Only core.String? state; /// State details of the volume @@ -5265,6 +5672,7 @@ class Volume { this.encryptionType, this.exportPolicy, this.hasReplication, + this.hybridReplicationParameters, this.kerberosEnabled, this.kmsConfig, this.labels, @@ -5312,6 +5720,12 @@ class Volume { json_['exportPolicy'] as core.Map) : null, hasReplication: json_['hasReplication'] as core.bool?, + hybridReplicationParameters: + json_.containsKey('hybridReplicationParameters') + ? HybridReplicationParameters.fromJson( + json_['hybridReplicationParameters'] + as core.Map) + : null, kerberosEnabled: json_['kerberosEnabled'] as core.bool?, kmsConfig: json_['kmsConfig'] as core.String?, labels: @@ -5376,6 +5790,8 @@ class Volume { if (encryptionType != null) 'encryptionType': encryptionType!, if (exportPolicy != null) 'exportPolicy': exportPolicy!, if (hasReplication != null) 'hasReplication': hasReplication!, + if (hybridReplicationParameters != null) + 'hybridReplicationParameters': hybridReplicationParameters!, if (kerberosEnabled != null) 'kerberosEnabled': kerberosEnabled!, if (kmsConfig != null) 'kmsConfig': kmsConfig!, if (labels != null) 'labels': labels!, diff --git a/generated/googleapis/lib/networkconnectivity/v1.dart b/generated/googleapis/lib/networkconnectivity/v1.dart index 1b029e219..a81dde87a 100644 --- a/generated/googleapis/lib/networkconnectivity/v1.dart +++ b/generated/googleapis/lib/networkconnectivity/v1.dart @@ -663,6 +663,81 @@ class ProjectsLocationsGlobalHubsResource { response_ as core.Map); } + /// Query PSC propagation status the status of a Network Connectivity Center + /// hub. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the hub. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/global/hubs/\[^/\]+$`. + /// + /// [filter] - Optional. An expression that filters the list of results. The + /// filter can be used to filter the results by the following fields: * + /// psc_propagation_status.source_spoke * psc_propagation_status.source_group + /// * psc_propagation_status.source_forwarding_rule * + /// psc_propagation_status.target_spoke * psc_propagation_status.target_group + /// * psc_propagation_status.code * psc_propagation_status.message + /// + /// [groupBy] - Optional. A field that counts are grouped by. A + /// comma-separated list of any of these fields: * + /// psc_propagation_status.source_spoke * psc_propagation_status.source_group + /// * psc_propagation_status.source_forwarding_rule * + /// psc_propagation_status.target_spoke * psc_propagation_status.target_group + /// * psc_propagation_status.code + /// + /// [orderBy] - Optional. Sort the results in the ascending order by specific + /// fields returned in the response. A comma-separated list of any of these + /// fields: * psc_propagation_status.source_spoke * + /// psc_propagation_status.source_group * + /// psc_propagation_status.source_forwarding_rule * + /// psc_propagation_status.target_spoke * psc_propagation_status.target_group + /// * psc_propagation_status.code If `group_by` is set, the value of the + /// `order_by` field must be the same as or a subset of the `group_by` field. + /// + /// [pageSize] - Optional. The maximum number of results to return per page. + /// + /// [pageToken] - Optional. The page token. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [QueryHubStatusResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future queryStatus( + core.String name, { + core.String? filter, + core.String? groupBy, + core.String? orderBy, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (groupBy != null) 'groupBy': [groupBy], + if (orderBy != null) 'orderBy': [orderBy], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':queryStatus'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return QueryHubStatusResponse.fromJson( + response_ as core.Map); + } + /// Rejects a Network Connectivity Center spoke from being attached to a hub. /// /// If the spoke was previously in the `ACTIVE` state, it transitions to the @@ -5022,60 +5097,7 @@ class GoogleLongrunningOperation { /// Spanner instance in a region that is out of stock: { "reason": "STOCKOUT" /// "domain": "spanner.googleapis.com", "metadata": { "availableRegions": /// "us-central1,us-east2" } } -class GoogleRpcErrorInfo { - /// The logical grouping to which the "reason" belongs. - /// - /// The error domain is typically the registered service name of the tool or - /// product that generates the error. Example: "pubsub.googleapis.com". If the - /// error is generated by some common infrastructure, the error domain must be - /// a globally unique value that identifies the infrastructure. For Google API - /// infrastructure, the error domain is "googleapis.com". - core.String? domain; - - /// Additional structured details about this error. - /// - /// Keys must match /a-z+/ but should ideally be lowerCamelCase. Also they - /// must be limited to 64 characters in length. When identifying the current - /// value of an exceeded limit, the units should be contained in the key, not - /// the value. For example, rather than {"instanceLimit": "100/request"}, - /// should be returned as, {"instanceLimitPerRequest": "100"}, if the client - /// exceeds the number of instances that can be created in a single (batch) - /// request. - core.Map? metadata; - - /// The reason of the error. - /// - /// This is a constant value that identifies the proximate cause of the error. - /// Error reasons are unique within a particular domain of errors. This should - /// be at most 63 characters and match a regular expression of `A-Z+[A-Z0-9]`, - /// which represents UPPER_SNAKE_CASE. - core.String? reason; - - GoogleRpcErrorInfo({ - this.domain, - this.metadata, - this.reason, - }); - - GoogleRpcErrorInfo.fromJson(core.Map json_) - : this( - domain: json_['domain'] as core.String?, - metadata: - (json_['metadata'] as core.Map?)?.map( - (key, value) => core.MapEntry( - key, - value as core.String, - ), - ), - reason: json_['reason'] as core.String?, - ); - - core.Map toJson() => { - if (domain != null) 'domain': domain!, - if (metadata != null) 'metadata': metadata!, - if (reason != null) 'reason': reason!, - }; -} +typedef GoogleRpcErrorInfo = $ErrorInfo; /// The `Status` type defines a logical error model that is suitable for /// different programming environments, including REST APIs and RPC APIs. @@ -5394,6 +5416,43 @@ class Hub { }; } +/// The hub status entry. +class HubStatusEntry { + /// The number of status. + /// + /// If group_by is not set in the request, the default is 1. + core.int? count; + + /// The same group_by field from the request. + core.String? groupBy; + + /// The PSC propagation status. + PscPropagationStatus? pscPropagationStatus; + + HubStatusEntry({ + this.count, + this.groupBy, + this.pscPropagationStatus, + }); + + HubStatusEntry.fromJson(core.Map json_) + : this( + count: json_['count'] as core.int?, + groupBy: json_['groupBy'] as core.String?, + pscPropagationStatus: json_.containsKey('pscPropagationStatus') + ? PscPropagationStatus.fromJson(json_['pscPropagationStatus'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (count != null) 'count': count!, + if (groupBy != null) 'groupBy': groupBy!, + if (pscPropagationStatus != null) + 'pscPropagationStatus': pscPropagationStatus!, + }; +} + /// InterconnectAttachment that this route applies to. class InterconnectAttachment { /// Cloud region to install this policy-based route on interconnect @@ -5675,13 +5734,17 @@ class LinkedInterconnectAttachments { }; } -/// Next ID: 7 class LinkedProducerVpcNetwork { /// IP ranges encompassing the subnets to be excluded from peering. /// /// Optional. core.List? excludeExportRanges; + /// IP ranges allowed to be included from peering. + /// + /// Optional. + core.List? includeExportRanges; + /// The URI of the Service Consumer VPC that the Producer VPC is peered with. /// /// Immutable. @@ -5708,6 +5771,7 @@ class LinkedProducerVpcNetwork { LinkedProducerVpcNetwork({ this.excludeExportRanges, + this.includeExportRanges, this.network, this.peering, this.producerNetwork, @@ -5719,6 +5783,9 @@ class LinkedProducerVpcNetwork { excludeExportRanges: (json_['excludeExportRanges'] as core.List?) ?.map((value) => value as core.String) .toList(), + includeExportRanges: (json_['includeExportRanges'] as core.List?) + ?.map((value) => value as core.String) + .toList(), network: json_['network'] as core.String?, peering: json_['peering'] as core.String?, producerNetwork: json_['producerNetwork'] as core.String?, @@ -5729,6 +5796,8 @@ class LinkedProducerVpcNetwork { core.Map toJson() => { if (excludeExportRanges != null) 'excludeExportRanges': excludeExportRanges!, + if (includeExportRanges != null) + 'includeExportRanges': includeExportRanges!, if (network != null) 'network': network!, if (peering != null) 'peering': peering!, if (producerNetwork != null) 'producerNetwork': producerNetwork!, @@ -6099,7 +6168,7 @@ class ListLocationsResponse { }; } -/// Response for PolicyBasedRouting.ListPolicyBasedRoutes method. +/// Response for PolicyBasedRoutingService.ListPolicyBasedRoutes method. class ListPolicyBasedRoutesResponse { /// The next pagination token in the List response. /// @@ -7189,6 +7258,111 @@ class PscConnection { }; } +/// The PSC propagation status in a hub. +class PscPropagationStatus { + /// The propagation status. + /// Possible string values are: + /// - "CODE_UNSPECIFIED" : The code is unspecified. + /// - "READY" : The propagated PSC connection is ready. + /// - "PROPAGATING" : PSC connection is propagating. This is a transient + /// state. + /// - "ERROR_PRODUCER_PROPAGATED_CONNECTION_LIMIT_EXCEEDED" : The PSC + /// connection propagation failed because the VPC network or the project of + /// the target spoke has exceeded the connection limit set by the producer. + /// - "ERROR_PRODUCER_NAT_IP_SPACE_EXHAUSTED" : The PSC connection propagation + /// failed because the NAT IP subnet space has been exhausted. It is + /// equivalent to the `Needs attention` status of the PSC connection. See + /// https://cloud.google.com/vpc/docs/about-accessing-vpc-hosted-services-endpoints#connection-statuses. + /// - "ERROR_PRODUCER_QUOTA_EXCEEDED" : PSC connection propagation failed + /// because the `PSC_ILB_CONSUMER_FORWARDING_RULES_PER_PRODUCER_NETWORK` quota + /// in the producer VPC network has been exceeded. + /// - "ERROR_CONSUMER_QUOTA_EXCEEDED" : The PSC connection propagation failed + /// because the `PSC_PROPAGATED_CONNECTIONS_PER_VPC_NETWORK` quota in the + /// consumer VPC network has been exceeded. + core.String? code; + + /// The human-readable summary of the PSC connection propagation status. + core.String? message; + + /// The name of the forwarding rule exported to the hub. + core.String? sourceForwardingRule; + + /// The name of the group that the source spoke belongs to. + core.String? sourceGroup; + + /// The name of the spoke that the source forwarding rule belongs to. + core.String? sourceSpoke; + + /// The name of the group that the target spoke belongs to. + core.String? targetGroup; + + /// The name of the spoke that the source forwarding rule propagates to. + core.String? targetSpoke; + + PscPropagationStatus({ + this.code, + this.message, + this.sourceForwardingRule, + this.sourceGroup, + this.sourceSpoke, + this.targetGroup, + this.targetSpoke, + }); + + PscPropagationStatus.fromJson(core.Map json_) + : this( + code: json_['code'] as core.String?, + message: json_['message'] as core.String?, + sourceForwardingRule: json_['sourceForwardingRule'] as core.String?, + sourceGroup: json_['sourceGroup'] as core.String?, + sourceSpoke: json_['sourceSpoke'] as core.String?, + targetGroup: json_['targetGroup'] as core.String?, + targetSpoke: json_['targetSpoke'] as core.String?, + ); + + core.Map toJson() => { + if (code != null) 'code': code!, + if (message != null) 'message': message!, + if (sourceForwardingRule != null) + 'sourceForwardingRule': sourceForwardingRule!, + if (sourceGroup != null) 'sourceGroup': sourceGroup!, + if (sourceSpoke != null) 'sourceSpoke': sourceSpoke!, + if (targetGroup != null) 'targetGroup': targetGroup!, + if (targetSpoke != null) 'targetSpoke': targetSpoke!, + }; +} + +/// The response for HubService.QueryHubStatus. +class QueryHubStatusResponse { + /// The list of hub status. + core.List? hubStatusEntries; + + /// The token for the next page of the response. + /// + /// To see more results, use this value as the page_token for your next + /// request. If this value is empty, there are no more results. + core.String? nextPageToken; + + QueryHubStatusResponse({ + this.hubStatusEntries, + this.nextPageToken, + }); + + QueryHubStatusResponse.fromJson(core.Map json_) + : this( + hubStatusEntries: (json_['hubStatusEntries'] as core.List?) + ?.map((value) => HubStatusEntry.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + ); + + core.Map toJson() => { + if (hubStatusEntries != null) 'hubStatusEntries': hubStatusEntries!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + }; +} + /// The RegionalEndpoint resource. class RegionalEndpoint { /// The access type of this regional endpoint. @@ -7209,7 +7383,8 @@ class RegionalEndpoint { /// When no address is provided, an IP from the subnetwork is allocated. Use /// one of the following formats: * IPv4 address as in `10.0.0.1` * Address /// resource URI as in - /// `projects/{project}/regions/{region}/addresses/{address_name}` + /// `projects/{project}/regions/{region}/addresses/{address_name}` for an IPv4 + /// or IPv6 address. /// /// Optional. core.String? address; @@ -7741,8 +7916,6 @@ class RoutingVPC { } /// The ServiceClass resource. -/// -/// Next id: 9 class ServiceClass { /// Time when the ServiceClass was created. /// @@ -7822,8 +7995,6 @@ class ServiceClass { } /// The ServiceConnectionMap resource. -/// -/// Next id: 15 class ServiceConnectionMap { /// The PSC configurations on consumer side. core.List? consumerPscConfigs; @@ -7964,8 +8135,6 @@ class ServiceConnectionMap { } /// The ServiceConnectionPolicy resource. -/// -/// Next id: 12 class ServiceConnectionPolicy { /// Time when the ServiceConnectionPolicy was created. /// @@ -8089,8 +8258,6 @@ class ServiceConnectionPolicy { } /// The ServiceConnectionToken resource. -/// -/// Next id: 10 class ServiceConnectionToken { /// Time when the ServiceConnectionToken was created. /// @@ -8278,8 +8445,6 @@ class Spoke { /// The reasons for current state of the spoke. /// - /// Only present when the spoke is in the `INACTIVE` state. - /// /// Output only. core.List? reasons; @@ -8479,6 +8644,11 @@ class SpokeStateReasonCount { /// - "PAUSED" : The spoke has been deactivated internally. /// - "FAILED" : Network Connectivity Center encountered errors while /// accepting the spoke. + /// - "UPDATE_PENDING_REVIEW" : The proposed spoke update is pending review. + /// - "UPDATE_REJECTED" : The proposed spoke update has been rejected by the + /// hub administrator. + /// - "UPDATE_FAILED" : Network Connectivity Center encountered errors while + /// accepting the spoke update. core.String? stateReasonCode; SpokeStateReasonCount({ @@ -8604,6 +8774,11 @@ class StateReason { /// - "PAUSED" : The spoke has been deactivated internally. /// - "FAILED" : Network Connectivity Center encountered errors while /// accepting the spoke. + /// - "UPDATE_PENDING_REVIEW" : The proposed spoke update is pending review. + /// - "UPDATE_REJECTED" : The proposed spoke update has been rejected by the + /// hub administrator. + /// - "UPDATE_FAILED" : Network Connectivity Center encountered errors while + /// accepting the spoke update. core.String? code; /// Human-readable details about this reason. diff --git a/generated/googleapis/lib/networkmanagement/v1.dart b/generated/googleapis/lib/networkmanagement/v1.dart index a855c15af..238f1d2ca 100644 --- a/generated/googleapis/lib/networkmanagement/v1.dart +++ b/generated/googleapis/lib/networkmanagement/v1.dart @@ -26,6 +26,7 @@ /// - [ProjectsLocationsGlobalResource] /// - [ProjectsLocationsGlobalConnectivityTestsResource] /// - [ProjectsLocationsGlobalOperationsResource] +/// - [ProjectsLocationsVpcFlowLogsConfigsResource] library; import 'dart:async' as async; @@ -74,6 +75,8 @@ class ProjectsLocationsResource { ProjectsLocationsGlobalResource get global => ProjectsLocationsGlobalResource(_requester); + ProjectsLocationsVpcFlowLogsConfigsResource get vpcFlowLogsConfigs => + ProjectsLocationsVpcFlowLogsConfigsResource(_requester); ProjectsLocationsResource(commons.ApiRequester client) : _requester = client; @@ -659,8 +662,8 @@ class ProjectsLocationsGlobalOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -831,6 +834,253 @@ class ProjectsLocationsGlobalOperationsResource { } } +class ProjectsLocationsVpcFlowLogsConfigsResource { + final commons.ApiRequester _requester; + + ProjectsLocationsVpcFlowLogsConfigsResource(commons.ApiRequester client) + : _requester = client; + + /// Creates a new `VpcFlowLogsConfig`. + /// + /// If a configuration with the exact same settings already exists (even if + /// the ID is different), the creation fails. Notes: 1. Creating a + /// configuration with state=DISABLED will fail 2. The following fields are + /// not considered as `settings` for the purpose of the check mentioned above, + /// therefore - creating another configuration with the same fields but + /// different values for the following fields will fail as well: * name * + /// create_time * update_time * labels * description + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent resource of the VPC Flow Logs + /// configuration to create: `projects/{project_id}/locations/global` + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [vpcFlowLogsConfigId] - Required. ID of the `VpcFlowLogsConfig`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future create( + VpcFlowLogsConfig request, + core.String parent, { + core.String? vpcFlowLogsConfigId, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (vpcFlowLogsConfigId != null) + 'vpcFlowLogsConfigId': [vpcFlowLogsConfigId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/vpcFlowLogsConfigs'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Deletes a specific `VpcFlowLogsConfig`. + /// + /// Request parameters: + /// + /// [name] - Required. `VpcFlowLogsConfig` resource name using the form: + /// `projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/vpcFlowLogsConfigs/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Gets the details of a specific `VpcFlowLogsConfig`. + /// + /// Request parameters: + /// + /// [name] - Required. `VpcFlowLogsConfig` resource name using the form: + /// `projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/vpcFlowLogsConfigs/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [VpcFlowLogsConfig]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return VpcFlowLogsConfig.fromJson( + response_ as core.Map); + } + + /// Lists all `VpcFlowLogsConfigs` in a given project. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent resource of the VpcFlowLogsConfig: + /// `projects/{project_id}/locations/global` + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [filter] - Optional. Lists the `VpcFlowLogsConfigs` that match the filter + /// expression. A filter expression must use the supported + /// [CEL logic operators](https://cloud.google.com/vpc/docs/about-flow-logs-records#supported_cel_logic_operators). + /// + /// [orderBy] - Optional. Field to use to sort the list. + /// + /// [pageSize] - Optional. Number of `VpcFlowLogsConfigs` to return. + /// + /// [pageToken] - Optional. Page token from an earlier query, as returned in + /// `next_page_token`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListVpcFlowLogsConfigsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.String? filter, + core.String? orderBy, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (orderBy != null) 'orderBy': [orderBy], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/vpcFlowLogsConfigs'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListVpcFlowLogsConfigsResponse.fromJson( + response_ as core.Map); + } + + /// Updates an existing `VpcFlowLogsConfig`. + /// + /// If a configuration with the exact same settings already exists (even if + /// the ID is different), the creation fails. Notes: 1. Updating a + /// configuration with state=DISABLED will fail. 2. The following fields are + /// not considered as `settings` for the purpose of the check mentioned above, + /// therefore - updating another configuration with the same fields but + /// different values for the following fields will fail as well: * name * + /// create_time * update_time * labels * description + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Identifier. Unique name of the configuration using the form: + /// `projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/vpcFlowLogsConfigs/\[^/\]+$`. + /// + /// [updateMask] - Required. Mask of fields to update. At least one path must + /// be supplied in this field. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future patch( + VpcFlowLogsConfig request, + core.String name, { + core.String? updateMask, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (updateMask != null) 'updateMask': [updateMask], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'PATCH', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } +} + /// Details of the final state "abort" and associated resource. class AbortInfo { /// Causes that the analysis is aborted. @@ -867,6 +1117,8 @@ class AbortInfo { /// - "PERMISSION_DENIED_NO_NEG_ENDPOINT_CONFIGS" : Aborted because user lacks /// permission to access Network endpoint group endpoint configs required to /// run the test. + /// - "PERMISSION_DENIED_NO_CLOUD_ROUTER_CONFIGS" : Aborted because user lacks + /// permission to access Cloud Router configs required to run the test. /// - "NO_SOURCE_LOCATION" : Aborted because no valid source or destination /// endpoint is derived from the input test request. /// - "INVALID_ARGUMENT" : Aborted because the source or destination endpoint @@ -1421,6 +1673,20 @@ class ConnectivityTest { /// This is applicable to scenarios where a test can cross project boundaries. core.List? relatedProjects; + /// The reachability details of this test from the latest run for the return + /// path. + /// + /// The details are updated when creating a new test, updating an existing + /// test, or triggering a one-time rerun of an existing test. + /// + /// Output only. + ReachabilityDetails? returnReachabilityDetails; + + /// Whether run analysis for the return path from destination to source. + /// + /// Default value is false. + core.bool? roundTrip; + /// Source specification of the Connectivity Test. /// /// You can use a combination of source IP address, virtual machine (VM) @@ -1457,6 +1723,8 @@ class ConnectivityTest { this.protocol, this.reachabilityDetails, this.relatedProjects, + this.returnReachabilityDetails, + this.roundTrip, this.source, this.updateTime, }); @@ -1491,6 +1759,12 @@ class ConnectivityTest { relatedProjects: (json_['relatedProjects'] as core.List?) ?.map((value) => value as core.String) .toList(), + returnReachabilityDetails: json_ + .containsKey('returnReachabilityDetails') + ? ReachabilityDetails.fromJson(json_['returnReachabilityDetails'] + as core.Map) + : null, + roundTrip: json_['roundTrip'] as core.bool?, source: json_.containsKey('source') ? Endpoint.fromJson( json_['source'] as core.Map) @@ -1512,6 +1786,9 @@ class ConnectivityTest { if (reachabilityDetails != null) 'reachabilityDetails': reachabilityDetails!, if (relatedProjects != null) 'relatedProjects': relatedProjects!, + if (returnReachabilityDetails != null) + 'returnReachabilityDetails': returnReachabilityDetails!, + if (roundTrip != null) 'roundTrip': roundTrip!, if (source != null) 'source': source!, if (updateTime != null) 'updateTime': updateTime!, }; @@ -1801,6 +2078,9 @@ class DropInfo { /// - "NO_KNOWN_ROUTE_FROM_PEERED_NETWORK_TO_DESTINATION" : Packet from the /// unknown peered network is dropped due to no known route from the source /// network to the destination IP address. + /// - "PRIVATE_NAT_TO_PSC_ENDPOINT_UNSUPPORTED" : Sending packets processed by + /// the Private NAT Gateways to the Private Service Connect endpoints is not + /// supported. core.String? cause; /// Destination IP address of the dropped packet (if relevant). @@ -1907,8 +2187,15 @@ class Endpoint { /// - "PSC" : Forwarding Rule is a Private Service Connect endpoint. core.String? forwardingRuleTarget; + /// DNS endpoint of + /// [Google Kubernetes Engine cluster control plane](https://cloud.google.com/kubernetes-engine/docs/concepts/cluster-architecture). + /// + /// Requires gke_master_cluster to be set, can't be used simultaneoulsly with + /// ip_address or network. Applicable only to destination endpoint. + core.String? fqdn; + /// A cluster URI for - /// [Google Kubernetes Engine master](https://cloud.google.com/kubernetes-engine/docs/concepts/cluster-architecture). + /// [Google Kubernetes Engine cluster control plane](https://cloud.google.com/kubernetes-engine/docs/concepts/cluster-architecture). core.String? gkeMasterCluster; /// A Compute Engine instance URI. @@ -1988,6 +2275,7 @@ class Endpoint { this.cloudSqlInstance, this.forwardingRule, this.forwardingRuleTarget, + this.fqdn, this.gkeMasterCluster, this.instance, this.ipAddress, @@ -2018,6 +2306,7 @@ class Endpoint { cloudSqlInstance: json_['cloudSqlInstance'] as core.String?, forwardingRule: json_['forwardingRule'] as core.String?, forwardingRuleTarget: json_['forwardingRuleTarget'] as core.String?, + fqdn: json_['fqdn'] as core.String?, gkeMasterCluster: json_['gkeMasterCluster'] as core.String?, instance: json_['instance'] as core.String?, ipAddress: json_['ipAddress'] as core.String?, @@ -2039,6 +2328,7 @@ class Endpoint { if (forwardingRule != null) 'forwardingRule': forwardingRule!, if (forwardingRuleTarget != null) 'forwardingRuleTarget': forwardingRuleTarget!, + if (fqdn != null) 'fqdn': fqdn!, if (gkeMasterCluster != null) 'gkeMasterCluster': gkeMasterCluster!, if (instance != null) 'instance': instance!, if (ipAddress != null) 'ipAddress': ipAddress!, @@ -2415,15 +2705,19 @@ class GKEMasterInfo { /// URI of a GKE cluster. core.String? clusterUri; - /// External IP address of a GKE cluster master. + /// DNS endpoint of a GKE cluster control plane. + core.String? dnsEndpoint; + + /// External IP address of a GKE cluster control plane. core.String? externalIp; - /// Internal IP address of a GKE cluster master. + /// Internal IP address of a GKE cluster control plane. core.String? internalIp; GKEMasterInfo({ this.clusterNetworkUri, this.clusterUri, + this.dnsEndpoint, this.externalIp, this.internalIp, }); @@ -2432,6 +2726,7 @@ class GKEMasterInfo { : this( clusterNetworkUri: json_['clusterNetworkUri'] as core.String?, clusterUri: json_['clusterUri'] as core.String?, + dnsEndpoint: json_['dnsEndpoint'] as core.String?, externalIp: json_['externalIp'] as core.String?, internalIp: json_['internalIp'] as core.String?, ); @@ -2439,6 +2734,7 @@ class GKEMasterInfo { core.Map toJson() => { if (clusterNetworkUri != null) 'clusterNetworkUri': clusterNetworkUri!, if (clusterUri != null) 'clusterUri': clusterUri!, + if (dnsEndpoint != null) 'dnsEndpoint': dnsEndpoint!, if (externalIp != null) 'externalIp': externalIp!, if (internalIp != null) 'internalIp': internalIp!, }; @@ -2711,6 +3007,44 @@ class ListOperationsResponse { }; } +/// Response for the `ListVpcFlowLogsConfigs` method. +class ListVpcFlowLogsConfigsResponse { + /// Page token to fetch the next set of configurations. + core.String? nextPageToken; + + /// Locations that could not be reached (when querying all locations with + /// `-`). + core.List? unreachable; + + /// List of VPC Flow Log configurations. + core.List? vpcFlowLogsConfigs; + + ListVpcFlowLogsConfigsResponse({ + this.nextPageToken, + this.unreachable, + this.vpcFlowLogsConfigs, + }); + + ListVpcFlowLogsConfigsResponse.fromJson(core.Map json_) + : this( + nextPageToken: json_['nextPageToken'] as core.String?, + unreachable: (json_['unreachable'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + vpcFlowLogsConfigs: (json_['vpcFlowLogsConfigs'] as core.List?) + ?.map((value) => VpcFlowLogsConfig.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (unreachable != null) 'unreachable': unreachable!, + if (vpcFlowLogsConfigs != null) + 'vpcFlowLogsConfigs': vpcFlowLogsConfigs!, + }; +} + /// For display only. /// /// Metadata associated with a specific load balancer backend. @@ -3688,13 +4022,18 @@ typedef RerunConnectivityTestRequest = $Empty; /// /// Metadata associated with a Compute Engine route. class RouteInfo { - /// For advertised routes, the URI of their next hop, i.e. the URI of the + /// For ADVERTISED routes, the URI of their next hop, i.e. the URI of the /// hybrid endpoint (VPN tunnel, Interconnect attachment, NCC router /// appliance) the advertised prefix is advertised through, or URI of the /// source peered network. + /// + /// Deprecated in favor of the next_hop_uri field, not used in new tests. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) core.String? advertisedRouteNextHopUri; - /// For advertised dynamic routes, the URI of the Cloud Router that advertised + /// For ADVERTISED dynamic routes, the URI of the Cloud Router that advertised /// the corresponding IP prefix. core.String? advertisedRouteSourceRouterUri; @@ -3703,7 +4042,7 @@ class RouteInfo { /// Destination port ranges of the route. /// - /// Policy based routes only. + /// POLICY_BASED routes only. core.List? destPortRanges; /// Name of a route. @@ -3712,31 +4051,46 @@ class RouteInfo { /// Instance tags of the route. core.List? instanceTags; - /// URI of a NCC Hub. + /// For PEERING_SUBNET and PEERING_DYNAMIC routes that are advertised by NCC + /// Hub, the URI of the corresponding route in NCC Hub's routing table. + core.String? nccHubRouteUri; + + /// URI of the NCC Hub the route is advertised by. /// - /// NCC_HUB routes only. + /// PEERING_SUBNET and PEERING_DYNAMIC routes that are advertised by NCC Hub + /// only. core.String? nccHubUri; - /// URI of a NCC Spoke. + /// URI of the destination NCC Spoke. /// - /// NCC_HUB routes only. + /// PEERING_SUBNET and PEERING_DYNAMIC routes that are advertised by NCC Hub + /// only. core.String? nccSpokeUri; - /// URI of a Compute Engine network. - /// - /// NETWORK routes only. + /// URI of a VPC network where route is located. core.String? networkUri; - /// Next hop of the route. + /// String type of the next hop of the route (for example, "VPN tunnel"). + /// + /// Deprecated in favor of the next_hop_type and next_hop_uri fields, not used + /// in new tests. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) core.String? nextHop; + /// URI of a VPC network where the next hop resource is located. + core.String? nextHopNetworkUri; + /// Type of next hop. /// Possible string values are: /// - "NEXT_HOP_TYPE_UNSPECIFIED" : Unspecified type. Default value. /// - "NEXT_HOP_IP" : Next hop is an IP address. /// - "NEXT_HOP_INSTANCE" : Next hop is a Compute Engine instance. /// - "NEXT_HOP_NETWORK" : Next hop is a VPC network gateway. - /// - "NEXT_HOP_PEERING" : Next hop is a peering VPC. + /// - "NEXT_HOP_PEERING" : Next hop is a peering VPC. This scenario only + /// happens when the user doesn't have permissions to the project where the + /// next hop resource is located. /// - "NEXT_HOP_INTERCONNECT" : Next hop is an interconnect. /// - "NEXT_HOP_VPN_TUNNEL" : Next hop is a VPN tunnel. /// - "NEXT_HOP_VPN_GATEWAY" : Next hop is a VPN gateway. This scenario only @@ -3746,31 +4100,54 @@ class RouteInfo { /// gateway. /// - "NEXT_HOP_INTERNET_GATEWAY" : Next hop is an internet gateway. /// - "NEXT_HOP_BLACKHOLE" : Next hop is blackhole; that is, the next hop - /// either does not exist or is not running. + /// either does not exist or is unusable. /// - "NEXT_HOP_ILB" : Next hop is the forwarding rule of an Internal Load /// Balancer. /// - "NEXT_HOP_ROUTER_APPLIANCE" : Next hop is a /// [router appliance instance](https://cloud.google.com/network-connectivity/docs/network-connectivity-center/concepts/ra-overview). - /// - "NEXT_HOP_NCC_HUB" : Next hop is an NCC hub. + /// - "NEXT_HOP_NCC_HUB" : Next hop is an NCC hub. This scenario only happens + /// when the user doesn't have permissions to the project where the next hop + /// resource is located. core.String? nextHopType; + /// URI of the next hop resource. + core.String? nextHopUri; + + /// For PEERING_SUBNET, PEERING_STATIC and PEERING_DYNAMIC routes, the name of + /// the originating SUBNET/STATIC/DYNAMIC route. + core.String? originatingRouteDisplayName; + + /// For PEERING_SUBNET and PEERING_STATIC routes, the URI of the originating + /// SUBNET/STATIC route. + core.String? originatingRouteUri; + /// Priority of the route. core.int? priority; /// Protocols of the route. /// - /// Policy based routes only. + /// POLICY_BASED routes only. core.List? protocols; - /// Region of the route (if applicable). + /// Region of the route. + /// + /// DYNAMIC, PEERING_DYNAMIC, POLICY_BASED and ADVERTISED routes only. If set + /// for POLICY_BASED route, this is a region of VLAN attachments for Cloud + /// Interconnect the route applies to. core.String? region; /// Indicates where route is applicable. + /// + /// Deprecated, routes with NCC_HUB scope are not included in the trace in new + /// tests. /// Possible string values are: /// - "ROUTE_SCOPE_UNSPECIFIED" : Unspecified scope. Default value. /// - "NETWORK" : Route is applicable to packets in Network. /// - "NCC_HUB" : Route is applicable to packets using NCC Hub's routing /// table. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) core.String? routeScope; /// Type of route. @@ -3780,9 +4157,11 @@ class RouteInfo { /// - "STATIC" : Static route created by the user, including the default route /// to the internet. /// - "DYNAMIC" : Dynamic route exchanged between BGP peers. - /// - "PEERING_SUBNET" : A subnet route received from peering network. + /// - "PEERING_SUBNET" : A subnet route received from peering network or NCC + /// Hub. /// - "PEERING_STATIC" : A static route received from peering network. - /// - "PEERING_DYNAMIC" : A dynamic route received from peering network. + /// - "PEERING_DYNAMIC" : A dynamic route received from peering network or NCC + /// Hub. /// - "POLICY_BASED" : Policy based route. /// - "ADVERTISED" : Advertised route. Synthetic route which is used to /// transition from the StartFromPrivateNetwork state in Connectivity tests. @@ -3790,15 +4169,18 @@ class RouteInfo { /// Source IP address range of the route. /// - /// Policy based routes only. + /// POLICY_BASED routes only. core.String? srcIpRange; /// Source port ranges of the route. /// - /// Policy based routes only. + /// POLICY_BASED routes only. core.List? srcPortRanges; - /// URI of a route (if applicable). + /// URI of a route. + /// + /// SUBNET, STATIC, PEERING_SUBNET (only for peering network) and POLICY_BASED + /// routes only. core.String? uri; RouteInfo({ @@ -3808,11 +4190,16 @@ class RouteInfo { this.destPortRanges, this.displayName, this.instanceTags, + this.nccHubRouteUri, this.nccHubUri, this.nccSpokeUri, this.networkUri, this.nextHop, + this.nextHopNetworkUri, this.nextHopType, + this.nextHopUri, + this.originatingRouteDisplayName, + this.originatingRouteUri, this.priority, this.protocols, this.region, @@ -3837,11 +4224,17 @@ class RouteInfo { instanceTags: (json_['instanceTags'] as core.List?) ?.map((value) => value as core.String) .toList(), + nccHubRouteUri: json_['nccHubRouteUri'] as core.String?, nccHubUri: json_['nccHubUri'] as core.String?, nccSpokeUri: json_['nccSpokeUri'] as core.String?, networkUri: json_['networkUri'] as core.String?, nextHop: json_['nextHop'] as core.String?, + nextHopNetworkUri: json_['nextHopNetworkUri'] as core.String?, nextHopType: json_['nextHopType'] as core.String?, + nextHopUri: json_['nextHopUri'] as core.String?, + originatingRouteDisplayName: + json_['originatingRouteDisplayName'] as core.String?, + originatingRouteUri: json_['originatingRouteUri'] as core.String?, priority: json_['priority'] as core.int?, protocols: (json_['protocols'] as core.List?) ?.map((value) => value as core.String) @@ -3865,11 +4258,18 @@ class RouteInfo { if (destPortRanges != null) 'destPortRanges': destPortRanges!, if (displayName != null) 'displayName': displayName!, if (instanceTags != null) 'instanceTags': instanceTags!, + if (nccHubRouteUri != null) 'nccHubRouteUri': nccHubRouteUri!, if (nccHubUri != null) 'nccHubUri': nccHubUri!, if (nccSpokeUri != null) 'nccSpokeUri': nccSpokeUri!, if (networkUri != null) 'networkUri': networkUri!, if (nextHop != null) 'nextHop': nextHop!, + if (nextHopNetworkUri != null) 'nextHopNetworkUri': nextHopNetworkUri!, if (nextHopType != null) 'nextHopType': nextHopType!, + if (nextHopUri != null) 'nextHopUri': nextHopUri!, + if (originatingRouteDisplayName != null) + 'originatingRouteDisplayName': originatingRouteDisplayName!, + if (originatingRouteUri != null) + 'originatingRouteUri': originatingRouteUri!, if (priority != null) 'priority': priority!, if (protocols != null) 'protocols': protocols!, if (region != null) 'region': region!, @@ -4438,6 +4838,187 @@ class VpcConnectorInfo { }; } +/// A configuration to generate VPC Flow Logs. +class VpcFlowLogsConfig { + /// The aggregation interval for the logs. + /// + /// Default value is INTERVAL_5_SEC. + /// + /// Optional. + /// Possible string values are: + /// - "AGGREGATION_INTERVAL_UNSPECIFIED" : If not specified, will default to + /// INTERVAL_5_SEC. + /// - "INTERVAL_5_SEC" : Aggregate logs in 5s intervals. + /// - "INTERVAL_30_SEC" : Aggregate logs in 30s intervals. + /// - "INTERVAL_1_MIN" : Aggregate logs in 1m intervals. + /// - "INTERVAL_5_MIN" : Aggregate logs in 5m intervals. + /// - "INTERVAL_10_MIN" : Aggregate logs in 10m intervals. + /// - "INTERVAL_15_MIN" : Aggregate logs in 15m intervals. + core.String? aggregationInterval; + + /// The time the config was created. + /// + /// Output only. + core.String? createTime; + + /// The user-supplied description of the VPC Flow Logs configuration. + /// + /// Maximum of 512 characters. + /// + /// Optional. + core.String? description; + + /// Export filter used to define which VPC Flow Logs should be logged. + /// + /// Optional. + core.String? filterExpr; + + /// The value of the field must be in (0, 1\]. + /// + /// The sampling rate of VPC Flow Logs where 1.0 means all collected logs are + /// reported. Setting the sampling rate to 0.0 is not allowed. If you want to + /// disable VPC Flow Logs, use the state field instead. Default value is 1.0. + /// + /// Optional. + core.double? flowSampling; + + /// Traffic will be logged from the Interconnect Attachment. + /// + /// Format: + /// projects/{project_id}/regions/{region}/interconnectAttachments/{name} + core.String? interconnectAttachment; + + /// Resource labels to represent user-provided metadata. + /// + /// Optional. + core.Map? labels; + + /// Configures whether all, none or a subset of metadata fields should be + /// added to the reported VPC flow logs. + /// + /// Default value is INCLUDE_ALL_METADATA. + /// + /// Optional. + /// Possible string values are: + /// - "METADATA_UNSPECIFIED" : If not specified, will default to + /// INCLUDE_ALL_METADATA. + /// - "INCLUDE_ALL_METADATA" : Include all metadata fields. + /// - "EXCLUDE_ALL_METADATA" : Exclude all metadata fields. + /// - "CUSTOM_METADATA" : Include only custom fields (specified in + /// metadata_fields). + core.String? metadata; + + /// Custom metadata fields to include in the reported VPC flow logs. + /// + /// Can only be specified if "metadata" was set to CUSTOM_METADATA. + /// + /// Optional. + core.List? metadataFields; + + /// Identifier. + /// + /// Unique name of the configuration using the form: + /// `projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}` + core.String? name; + + /// The state of the VPC Flow Log configuration. + /// + /// Default value is ENABLED. When creating a new configuration, it must be + /// enabled. + /// + /// Optional. + /// Possible string values are: + /// - "STATE_UNSPECIFIED" : If not specified, will default to ENABLED. + /// - "ENABLED" : When ENABLED, this configuration will generate logs. + /// - "DISABLED" : When DISABLED, this configuration will not generate logs. + core.String? state; + + /// A diagnostic bit - describes the state of the configured target resource + /// for diagnostic purposes. + /// + /// Output only. + /// Possible string values are: + /// - "TARGET_RESOURCE_STATE_UNSPECIFIED" : Unspecified target resource state. + /// - "TARGET_RESOURCE_EXISTS" : Indicates that the target resource exists. + /// - "TARGET_RESOURCE_DOES_NOT_EXIST" : Indicates that the target resource + /// does not exist. + core.String? targetResourceState; + + /// The time the config was updated. + /// + /// Output only. + core.String? updateTime; + + /// Traffic will be logged from the VPN Tunnel. + /// + /// Format: projects/{project_id}/regions/{region}/vpnTunnels/{name} + core.String? vpnTunnel; + + VpcFlowLogsConfig({ + this.aggregationInterval, + this.createTime, + this.description, + this.filterExpr, + this.flowSampling, + this.interconnectAttachment, + this.labels, + this.metadata, + this.metadataFields, + this.name, + this.state, + this.targetResourceState, + this.updateTime, + this.vpnTunnel, + }); + + VpcFlowLogsConfig.fromJson(core.Map json_) + : this( + aggregationInterval: json_['aggregationInterval'] as core.String?, + createTime: json_['createTime'] as core.String?, + description: json_['description'] as core.String?, + filterExpr: json_['filterExpr'] as core.String?, + flowSampling: (json_['flowSampling'] as core.num?)?.toDouble(), + interconnectAttachment: + json_['interconnectAttachment'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + metadata: json_['metadata'] as core.String?, + metadataFields: (json_['metadataFields'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + name: json_['name'] as core.String?, + state: json_['state'] as core.String?, + targetResourceState: json_['targetResourceState'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + vpnTunnel: json_['vpnTunnel'] as core.String?, + ); + + core.Map toJson() => { + if (aggregationInterval != null) + 'aggregationInterval': aggregationInterval!, + if (createTime != null) 'createTime': createTime!, + if (description != null) 'description': description!, + if (filterExpr != null) 'filterExpr': filterExpr!, + if (flowSampling != null) 'flowSampling': flowSampling!, + if (interconnectAttachment != null) + 'interconnectAttachment': interconnectAttachment!, + if (labels != null) 'labels': labels!, + if (metadata != null) 'metadata': metadata!, + if (metadataFields != null) 'metadataFields': metadataFields!, + if (name != null) 'name': name!, + if (state != null) 'state': state!, + if (targetResourceState != null) + 'targetResourceState': targetResourceState!, + if (updateTime != null) 'updateTime': updateTime!, + if (vpnTunnel != null) 'vpnTunnel': vpnTunnel!, + }; +} + /// For display only. /// /// Metadata associated with a Compute Engine VPN gateway. diff --git a/generated/googleapis/lib/networksecurity/v1.dart b/generated/googleapis/lib/networksecurity/v1.dart index 4b7710d6f..2a7c69aed 100644 --- a/generated/googleapis/lib/networksecurity/v1.dart +++ b/generated/googleapis/lib/networksecurity/v1.dart @@ -836,8 +836,8 @@ class OrganizationsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -2615,6 +2615,157 @@ class ProjectsLocationsAuthzPoliciesResource { ProjectsLocationsAuthzPoliciesResource(commons.ApiRequester client) : _requester = client; + /// Creates a new AuthzPolicy in a given project and location. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent resource of the `AuthzPolicy` resource. + /// Must be in the format `projects/{project}/locations/{location}`. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [authzPolicyId] - Required. User-provided ID of the `AuthzPolicy` resource + /// to be created. + /// + /// [requestId] - Optional. An optional request ID to identify requests. + /// Specify a unique request ID so that if you must retry your request, the + /// server can ignore the request if it has already been completed. The server + /// guarantees that for at least 60 minutes since the first request. For + /// example, consider a situation where you make an initial request and the + /// request times out. If you make the request again with the same request ID, + /// the server can check if original operation with the same request ID was + /// received, and if so, ignores the second request. This prevents clients + /// from accidentally creating duplicate commitments. The request ID must be a + /// valid UUID with the exception that zero UUID is not supported + /// (00000000-0000-0000-0000-000000000000). + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future create( + AuthzPolicy request, + core.String parent, { + core.String? authzPolicyId, + core.String? requestId, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (authzPolicyId != null) 'authzPolicyId': [authzPolicyId], + if (requestId != null) 'requestId': [requestId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/authzPolicies'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Deletes a single AuthzPolicy. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the `AuthzPolicy` resource to delete. Must + /// be in the format + /// `projects/{project}/locations/{location}/authzPolicies/{authz_policy}`. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/authzPolicies/\[^/\]+$`. + /// + /// [requestId] - Optional. An optional request ID to identify requests. + /// Specify a unique request ID so that if you must retry your request, the + /// server can ignore the request if it has already been completed. The server + /// guarantees that for at least 60 minutes after the first request. For + /// example, consider a situation where you make an initial request and the + /// request times out. If you make the request again with the same request ID, + /// the server can check if original operation with the same request ID was + /// received, and if so, ignores the second request. This prevents clients + /// from accidentally creating duplicate commitments. The request ID must be a + /// valid UUID with the exception that zero UUID is not supported + /// (00000000-0000-0000-0000-000000000000). + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String name, { + core.String? requestId, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (requestId != null) 'requestId': [requestId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Gets details of a single AuthzPolicy. + /// + /// Request parameters: + /// + /// [name] - Required. A name of the `AuthzPolicy` resource to get. Must be in + /// the format + /// `projects/{project}/locations/{location}/authzPolicies/{authz_policy}`. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/authzPolicies/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [AuthzPolicy]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return AuthzPolicy.fromJson( + response_ as core.Map); + } + /// Gets the access control policy for a resource. /// /// Returns an empty policy if the resource exists and does not have a policy @@ -2673,6 +2824,128 @@ class ProjectsLocationsAuthzPoliciesResource { response_ as core.Map); } + /// Lists AuthzPolicies in a given project and location. + /// + /// Request parameters: + /// + /// [parent] - Required. The project and location from which the `AuthzPolicy` + /// resources are listed, specified in the following format: + /// `projects/{project}/locations/{location}`. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [filter] - Optional. Filtering results. + /// + /// [orderBy] - Optional. Hint for how to order the results. + /// + /// [pageSize] - Optional. Requested page size. The server might return fewer + /// items than requested. If unspecified, the server picks an appropriate + /// default. + /// + /// [pageToken] - Optional. A token identifying a page of results that the + /// server returns. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListAuthzPoliciesResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.String? filter, + core.String? orderBy, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (orderBy != null) 'orderBy': [orderBy], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/authzPolicies'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListAuthzPoliciesResponse.fromJson( + response_ as core.Map); + } + + /// Updates the parameters of a single AuthzPolicy. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. Identifier. Name of the `AuthzPolicy` resource in the + /// following format: + /// `projects/{project}/locations/{location}/authzPolicies/{authz_policy}`. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/authzPolicies/\[^/\]+$`. + /// + /// [requestId] - Optional. An optional request ID to identify requests. + /// Specify a unique request ID so that if you must retry your request, the + /// server can ignore the request if it has already been completed. The server + /// guarantees that for at least 60 minutes since the first request. For + /// example, consider a situation where you make an initial request and the + /// request times out. If you make the request again with the same request ID, + /// the server can check if original operation with the same request ID was + /// received, and if so, ignores the second request. This prevents clients + /// from accidentally creating duplicate commitments. The request ID must be a + /// valid UUID with the exception that zero UUID is not supported + /// (00000000-0000-0000-0000-000000000000). + /// + /// [updateMask] - Required. Used to specify the fields to be overwritten in + /// the `AuthzPolicy` resource by the update. The fields specified in the + /// `update_mask` are relative to the resource, not the full request. A field + /// is overwritten if it is in the mask. If the user does not specify a mask, + /// then all fields are overwritten. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future patch( + AuthzPolicy request, + core.String name, { + core.String? requestId, + core.String? updateMask, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (requestId != null) 'requestId': [requestId], + if (updateMask != null) 'updateMask': [updateMask], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'PATCH', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + /// Sets the access control policy on the specified resource. /// /// Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, @@ -3930,8 +4203,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -5211,67 +5484,769 @@ class AuthorizationPolicy { }; } -/// The request message for Operations.CancelOperation. -typedef CancelOperationRequest = $Empty; - -/// Specification of a TLS certificate provider instance. -/// -/// Workloads may have one or more CertificateProvider instances (plugins) and -/// one of them is enabled and configured by specifying this message. Workloads -/// use the values from this message to locate and load the CertificateProvider -/// instance configuration. -class CertificateProviderInstance { - /// Plugin instance name, used to locate and load CertificateProvider instance - /// configuration. - /// - /// Set to "google_cloud_private_spiffe" to use Certificate Authority Service - /// certificate provider instance. +/// `AuthzPolicy` is a resource that allows to forward traffic to a callout +/// backend designed to scan the traffic for security purposes. +class AuthzPolicy { + /// Can be one of `ALLOW`, `DENY`, `CUSTOM`. + /// + /// When the action is `CUSTOM`, `customProvider` must be specified. When the + /// action is `ALLOW`, only requests matching the policy will be allowed. When + /// the action is `DENY`, only requests matching the policy will be denied. + /// When a request arrives, the policies are evaluated in the following order: + /// 1. If there is a `CUSTOM` policy that matches the request, the `CUSTOM` + /// policy is evaluated using the custom authorization providers and the + /// request is denied if the provider rejects the request. 2. If there are any + /// `DENY` policies that match the request, the request is denied. 3. If there + /// are no `ALLOW` policies for the resource or if any of the `ALLOW` policies + /// match the request, the request is allowed. 4. Else the request is denied + /// by default if none of the configured AuthzPolicies with `ALLOW` action + /// match the request. /// /// Required. - core.String? pluginInstance; - - CertificateProviderInstance({ - this.pluginInstance, - }); - - CertificateProviderInstance.fromJson(core.Map json_) - : this( - pluginInstance: json_['pluginInstance'] as core.String?, - ); - - core.Map toJson() => { - if (pluginInstance != null) 'pluginInstance': pluginInstance!, - }; -} - -/// ClientTlsPolicy is a resource that specifies how a client should -/// authenticate connections to backends of a service. -/// -/// This resource itself does not affect configuration unless it is attached to -/// a backend service resource. -class ClientTlsPolicy { - /// Defines a mechanism to provision client identity (public and private keys) - /// for peer to peer authentication. - /// - /// The presence of this dictates mTLS. - /// - /// Optional. - GoogleCloudNetworksecurityV1CertificateProvider? clientCertificate; + /// Possible string values are: + /// - "AUTHZ_ACTION_UNSPECIFIED" : Unspecified action. + /// - "ALLOW" : Allow request to pass through to the backend. + /// - "DENY" : Deny the request and return a HTTP 404 to the client. + /// - "CUSTOM" : Delegate the authorization decision to an external + /// authorization engine. + core.String? action; /// The timestamp when the resource was created. /// /// Output only. core.String? createTime; - /// Free-text description of the resource. + /// Required if the action is `CUSTOM`. + /// + /// Allows delegating authorization decisions to Cloud IAP or to Service + /// Extensions. One of `cloudIap` or `authzExtension` must be specified. /// /// Optional. - core.String? description; + AuthzPolicyCustomProvider? customProvider; - /// Set of label tags associated with the resource. + /// A human-readable description of the resource. /// /// Optional. - core.Map? labels; + core.String? description; + + /// A list of authorization HTTP rules to match against the incoming request. + /// + /// A policy match occurs when at least one HTTP rule matches the request or + /// when no HTTP rules are specified in the policy. At least one HTTP Rule is + /// required for Allow or Deny Action. Limited to 5 rules. + /// + /// Optional. + core.List? httpRules; + + /// Set of labels associated with the `AuthzPolicy` resource. + /// + /// The format must comply with \[the following + /// requirements\](/compute/docs/labeling-resources#requirements). + /// + /// Optional. + core.Map? labels; + + /// Identifier. + /// + /// Name of the `AuthzPolicy` resource in the following format: + /// `projects/{project}/locations/{location}/authzPolicies/{authz_policy}`. + /// + /// Required. + core.String? name; + + /// Specifies the set of resources to which this policy should be applied to. + /// + /// Required. + AuthzPolicyTarget? target; + + /// The timestamp when the resource was updated. + /// + /// Output only. + core.String? updateTime; + + AuthzPolicy({ + this.action, + this.createTime, + this.customProvider, + this.description, + this.httpRules, + this.labels, + this.name, + this.target, + this.updateTime, + }); + + AuthzPolicy.fromJson(core.Map json_) + : this( + action: json_['action'] as core.String?, + createTime: json_['createTime'] as core.String?, + customProvider: json_.containsKey('customProvider') + ? AuthzPolicyCustomProvider.fromJson(json_['customProvider'] + as core.Map) + : null, + description: json_['description'] as core.String?, + httpRules: (json_['httpRules'] as core.List?) + ?.map((value) => AuthzPolicyAuthzRule.fromJson( + value as core.Map)) + .toList(), + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + name: json_['name'] as core.String?, + target: json_.containsKey('target') + ? AuthzPolicyTarget.fromJson( + json_['target'] as core.Map) + : null, + updateTime: json_['updateTime'] as core.String?, + ); + + core.Map toJson() => { + if (action != null) 'action': action!, + if (createTime != null) 'createTime': createTime!, + if (customProvider != null) 'customProvider': customProvider!, + if (description != null) 'description': description!, + if (httpRules != null) 'httpRules': httpRules!, + if (labels != null) 'labels': labels!, + if (name != null) 'name': name!, + if (target != null) 'target': target!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} + +/// Conditions to match against the incoming request. +class AuthzPolicyAuthzRule { + /// Describes properties of a source of a request. + /// + /// Optional. + AuthzPolicyAuthzRuleFrom? from; + + /// Describes properties of a target of a request. + /// + /// Optional. + AuthzPolicyAuthzRuleTo? to; + + /// CEL expression that describes the conditions to be satisfied for the + /// action. + /// + /// The result of the CEL expression is ANDed with the from and to. Refer to + /// the CEL language reference for a list of available attributes. + /// + /// Optional. + core.String? when; + + AuthzPolicyAuthzRule({ + this.from, + this.to, + this.when, + }); + + AuthzPolicyAuthzRule.fromJson(core.Map json_) + : this( + from: json_.containsKey('from') + ? AuthzPolicyAuthzRuleFrom.fromJson( + json_['from'] as core.Map) + : null, + to: json_.containsKey('to') + ? AuthzPolicyAuthzRuleTo.fromJson( + json_['to'] as core.Map) + : null, + when: json_['when'] as core.String?, + ); + + core.Map toJson() => { + if (from != null) 'from': from!, + if (to != null) 'to': to!, + if (when != null) 'when': when!, + }; +} + +/// Describes properties of one or more sources of a request. +class AuthzPolicyAuthzRuleFrom { + /// Describes the negated properties of request sources. + /// + /// Matches requests from sources that do not match the criteria specified in + /// this field. At least one of sources or notSources must be specified. + /// + /// Optional. + core.List? notSources; + + /// Describes the properties of a request's sources. + /// + /// At least one of sources or notSources must be specified. Limited to 1 + /// source. A match occurs when ANY source (in sources or notSources) matches + /// the request. Within a single source, the match follows AND semantics + /// across fields and OR semantics within a single field, i.e. a match occurs + /// when ANY principal matches AND ANY ipBlocks match. + /// + /// Optional. + core.List? sources; + + AuthzPolicyAuthzRuleFrom({ + this.notSources, + this.sources, + }); + + AuthzPolicyAuthzRuleFrom.fromJson(core.Map json_) + : this( + notSources: (json_['notSources'] as core.List?) + ?.map((value) => AuthzPolicyAuthzRuleFromRequestSource.fromJson( + value as core.Map)) + .toList(), + sources: (json_['sources'] as core.List?) + ?.map((value) => AuthzPolicyAuthzRuleFromRequestSource.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (notSources != null) 'notSources': notSources!, + if (sources != null) 'sources': sources!, + }; +} + +/// Describes the properties of a single source. +class AuthzPolicyAuthzRuleFromRequestSource { + /// A list of identities derived from the client's certificate. + /// + /// This field will not match on a request unless mutual TLS is enabled for + /// the Forwarding rule or Gateway. Each identity is a string whose value is + /// matched against the URI SAN, or DNS SAN or the subject field in the + /// client's certificate. The match can be exact, prefix, suffix or a + /// substring match. One of exact, prefix, suffix or contains must be + /// specified. Limited to 5 principals. + /// + /// Optional. + core.List? principals; + + /// A list of resources to match against the resource of the source VM of a + /// request. + /// + /// Limited to 5 resources. + /// + /// Optional. + core.List? resources; + + AuthzPolicyAuthzRuleFromRequestSource({ + this.principals, + this.resources, + }); + + AuthzPolicyAuthzRuleFromRequestSource.fromJson(core.Map json_) + : this( + principals: (json_['principals'] as core.List?) + ?.map((value) => AuthzPolicyAuthzRuleStringMatch.fromJson( + value as core.Map)) + .toList(), + resources: (json_['resources'] as core.List?) + ?.map((value) => AuthzPolicyAuthzRuleRequestResource.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (principals != null) 'principals': principals!, + if (resources != null) 'resources': resources!, + }; +} + +/// Determines how a HTTP header should be matched. +class AuthzPolicyAuthzRuleHeaderMatch { + /// Specifies the name of the header in the request. + /// + /// Optional. + core.String? name; + + /// Specifies how the header match will be performed. + /// + /// Optional. + AuthzPolicyAuthzRuleStringMatch? value; + + AuthzPolicyAuthzRuleHeaderMatch({ + this.name, + this.value, + }); + + AuthzPolicyAuthzRuleHeaderMatch.fromJson(core.Map json_) + : this( + name: json_['name'] as core.String?, + value: json_.containsKey('value') + ? AuthzPolicyAuthzRuleStringMatch.fromJson( + json_['value'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (name != null) 'name': name!, + if (value != null) 'value': value!, + }; +} + +/// Describes the properties of a client VM resource accessing the internal +/// application load balancers. +class AuthzPolicyAuthzRuleRequestResource { + /// An IAM service account to match against the source service account of the + /// VM sending the request. + /// + /// Optional. + AuthzPolicyAuthzRuleStringMatch? iamServiceAccount; + + /// A list of resource tag value permanent IDs to match against the resource + /// manager tags value associated with the source VM of a request. + /// + /// Optional. + AuthzPolicyAuthzRuleRequestResourceTagValueIdSet? tagValueIdSet; + + AuthzPolicyAuthzRuleRequestResource({ + this.iamServiceAccount, + this.tagValueIdSet, + }); + + AuthzPolicyAuthzRuleRequestResource.fromJson(core.Map json_) + : this( + iamServiceAccount: json_.containsKey('iamServiceAccount') + ? AuthzPolicyAuthzRuleStringMatch.fromJson( + json_['iamServiceAccount'] + as core.Map) + : null, + tagValueIdSet: json_.containsKey('tagValueIdSet') + ? AuthzPolicyAuthzRuleRequestResourceTagValueIdSet.fromJson( + json_['tagValueIdSet'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (iamServiceAccount != null) 'iamServiceAccount': iamServiceAccount!, + if (tagValueIdSet != null) 'tagValueIdSet': tagValueIdSet!, + }; +} + +/// Describes a set of resource tag value permanent IDs to match against the +/// resource manager tags value associated with the source VM of a request. +class AuthzPolicyAuthzRuleRequestResourceTagValueIdSet { + /// A list of resource tag value permanent IDs to match against the resource + /// manager tags value associated with the source VM of a request. + /// + /// The match follows AND semantics which means all the ids must match. + /// Limited to 5 matches. + /// + /// Required. + core.List? ids; + + AuthzPolicyAuthzRuleRequestResourceTagValueIdSet({ + this.ids, + }); + + AuthzPolicyAuthzRuleRequestResourceTagValueIdSet.fromJson(core.Map json_) + : this( + ids: (json_['ids'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (ids != null) 'ids': ids!, + }; +} + +/// Determines how a string value should be matched. +class AuthzPolicyAuthzRuleStringMatch { + /// The input string must have the substring specified here. + /// + /// Note: empty contains match is not allowed, please use regex instead. + /// Examples: * ``abc`` matches the value ``xyz.abc.def`` + core.String? contains; + + /// The input string must match exactly the string specified here. + /// + /// Examples: * ``abc`` only matches the value ``abc``. + core.String? exact; + + /// If true, indicates the exact/prefix/suffix/contains matching should be + /// case insensitive. + /// + /// For example, the matcher ``data`` will match both input string ``Data`` + /// and ``data`` if set to true. + core.bool? ignoreCase; + + /// The input string must have the prefix specified here. + /// + /// Note: empty prefix is not allowed, please use regex instead. Examples: * + /// ``abc`` matches the value ``abc.xyz`` + core.String? prefix; + + /// The input string must have the suffix specified here. + /// + /// Note: empty prefix is not allowed, please use regex instead. Examples: * + /// ``abc`` matches the value ``xyz.abc`` + core.String? suffix; + + AuthzPolicyAuthzRuleStringMatch({ + this.contains, + this.exact, + this.ignoreCase, + this.prefix, + this.suffix, + }); + + AuthzPolicyAuthzRuleStringMatch.fromJson(core.Map json_) + : this( + contains: json_['contains'] as core.String?, + exact: json_['exact'] as core.String?, + ignoreCase: json_['ignoreCase'] as core.bool?, + prefix: json_['prefix'] as core.String?, + suffix: json_['suffix'] as core.String?, + ); + + core.Map toJson() => { + if (contains != null) 'contains': contains!, + if (exact != null) 'exact': exact!, + if (ignoreCase != null) 'ignoreCase': ignoreCase!, + if (prefix != null) 'prefix': prefix!, + if (suffix != null) 'suffix': suffix!, + }; +} + +/// Describes properties of one or more targets of a request. +class AuthzPolicyAuthzRuleTo { + /// Describes the negated properties of the targets of a request. + /// + /// Matches requests for operations that do not match the criteria specified + /// in this field. At least one of operations or notOperations must be + /// specified. + /// + /// Optional. + core.List? notOperations; + + /// Describes properties of one or more targets of a request. + /// + /// At least one of operations or notOperations must be specified. Limited to + /// 1 operation. A match occurs when ANY operation (in operations or + /// notOperations) matches. Within an operation, the match follows AND + /// semantics across fields and OR semantics within a field, i.e. a match + /// occurs when ANY path matches AND ANY header matches and ANY method + /// matches. + /// + /// Optional. + core.List? operations; + + AuthzPolicyAuthzRuleTo({ + this.notOperations, + this.operations, + }); + + AuthzPolicyAuthzRuleTo.fromJson(core.Map json_) + : this( + notOperations: (json_['notOperations'] as core.List?) + ?.map((value) => AuthzPolicyAuthzRuleToRequestOperation.fromJson( + value as core.Map)) + .toList(), + operations: (json_['operations'] as core.List?) + ?.map((value) => AuthzPolicyAuthzRuleToRequestOperation.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (notOperations != null) 'notOperations': notOperations!, + if (operations != null) 'operations': operations!, + }; +} + +/// Describes properties of one or more targets of a request. +class AuthzPolicyAuthzRuleToRequestOperation { + /// A list of headers to match against in http header. + /// + /// Optional. + AuthzPolicyAuthzRuleToRequestOperationHeaderSet? headerSet; + + /// A list of HTTP Hosts to match against. + /// + /// The match can be one of exact, prefix, suffix, or contains (substring + /// match). Matches are always case sensitive unless the ignoreCase is set. + /// Limited to 5 matches. + /// + /// Optional. + core.List? hosts; + + /// A list of HTTP methods to match against. + /// + /// Each entry must be a valid HTTP method name (GET, PUT, POST, HEAD, PATCH, + /// DELETE, OPTIONS). It only allows exact match and is always case sensitive. + /// + /// Optional. + core.List? methods; + + /// A list of paths to match against. + /// + /// The match can be one of exact, prefix, suffix, or contains (substring + /// match). Matches are always case sensitive unless the ignoreCase is set. + /// Limited to 5 matches. Note that this path match includes the query + /// parameters. For gRPC services, this should be a fully-qualified name of + /// the form /package.service/method. + /// + /// Optional. + core.List? paths; + + AuthzPolicyAuthzRuleToRequestOperation({ + this.headerSet, + this.hosts, + this.methods, + this.paths, + }); + + AuthzPolicyAuthzRuleToRequestOperation.fromJson(core.Map json_) + : this( + headerSet: json_.containsKey('headerSet') + ? AuthzPolicyAuthzRuleToRequestOperationHeaderSet.fromJson( + json_['headerSet'] as core.Map) + : null, + hosts: (json_['hosts'] as core.List?) + ?.map((value) => AuthzPolicyAuthzRuleStringMatch.fromJson( + value as core.Map)) + .toList(), + methods: (json_['methods'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + paths: (json_['paths'] as core.List?) + ?.map((value) => AuthzPolicyAuthzRuleStringMatch.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (headerSet != null) 'headerSet': headerSet!, + if (hosts != null) 'hosts': hosts!, + if (methods != null) 'methods': methods!, + if (paths != null) 'paths': paths!, + }; +} + +/// Describes a set of HTTP headers to match against. +class AuthzPolicyAuthzRuleToRequestOperationHeaderSet { + /// A list of headers to match against in http header. + /// + /// The match can be one of exact, prefix, suffix, or contains (substring + /// match). The match follows AND semantics which means all the headers must + /// match. Matches are always case sensitive unless the ignoreCase is set. + /// Limited to 5 matches. + /// + /// Required. + core.List? headers; + + AuthzPolicyAuthzRuleToRequestOperationHeaderSet({ + this.headers, + }); + + AuthzPolicyAuthzRuleToRequestOperationHeaderSet.fromJson(core.Map json_) + : this( + headers: (json_['headers'] as core.List?) + ?.map((value) => AuthzPolicyAuthzRuleHeaderMatch.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (headers != null) 'headers': headers!, + }; +} + +/// Allows delegating authorization decisions to Cloud IAP or to Service +/// Extensions. +class AuthzPolicyCustomProvider { + /// Delegate authorization decision to user authored Service Extension. + /// + /// Only one of cloudIap or authzExtension can be specified. + /// + /// Optional. + AuthzPolicyCustomProviderAuthzExtension? authzExtension; + + /// Delegates authorization decisions to Cloud IAP. + /// + /// Applicable only for managed load balancers. Enabling Cloud IAP at the + /// AuthzPolicy level is not compatible with Cloud IAP settings in the + /// BackendService. Enabling IAP in both places will result in request + /// failure. Ensure that IAP is enabled in either the AuthzPolicy or the + /// BackendService but not in both places. + /// + /// Optional. + AuthzPolicyCustomProviderCloudIap? cloudIap; + + AuthzPolicyCustomProvider({ + this.authzExtension, + this.cloudIap, + }); + + AuthzPolicyCustomProvider.fromJson(core.Map json_) + : this( + authzExtension: json_.containsKey('authzExtension') + ? AuthzPolicyCustomProviderAuthzExtension.fromJson( + json_['authzExtension'] + as core.Map) + : null, + cloudIap: json_.containsKey('cloudIap') + ? AuthzPolicyCustomProviderCloudIap.fromJson( + json_['cloudIap'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (authzExtension != null) 'authzExtension': authzExtension!, + if (cloudIap != null) 'cloudIap': cloudIap!, + }; +} + +/// Delegate authorization decision to user authored extension. +/// +/// Only one of cloudIap or authzExtension can be specified. +/// +/// Optional. +class AuthzPolicyCustomProviderAuthzExtension { + /// A list of references to authorization extensions that will be invoked for + /// requests matching this policy. + /// + /// Limited to 1 custom provider. + /// + /// Required. + core.List? resources; + + AuthzPolicyCustomProviderAuthzExtension({ + this.resources, + }); + + AuthzPolicyCustomProviderAuthzExtension.fromJson(core.Map json_) + : this( + resources: (json_['resources'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (resources != null) 'resources': resources!, + }; +} + +/// Delegates authorization decisions to Cloud IAP. +/// +/// Applicable only for managed load balancers. Enabling Cloud IAP at the +/// AuthzPolicy level is not compatible with Cloud IAP settings in the +/// BackendService. Enabling IAP in both places will result in request failure. +/// Ensure that IAP is enabled in either the AuthzPolicy or the BackendService +/// but not in both places. +/// +/// Optional. +typedef AuthzPolicyCustomProviderCloudIap = $Empty; + +/// Specifies the set of targets to which this policy should be applied to. +class AuthzPolicyTarget { + /// All gateways and forwarding rules referenced by this policy and extensions + /// must share the same load balancing scheme. + /// + /// Supported values: `INTERNAL_MANAGED` and `EXTERNAL_MANAGED`. For more + /// information, refer to + /// [Backend services overview](https://cloud.google.com/load-balancing/docs/backend-service). + /// + /// Required. + /// Possible string values are: + /// - "LOAD_BALANCING_SCHEME_UNSPECIFIED" : Default value. Do not use. + /// - "INTERNAL_MANAGED" : Signifies that this is used for Regional internal + /// or Cross-region internal Application Load Balancing. + /// - "EXTERNAL_MANAGED" : Signifies that this is used for Global external or + /// Regional external Application Load Balancing. + /// - "INTERNAL_SELF_MANAGED" : Signifies that this is used for Cloud Service + /// Mesh. Meant for use by CSM GKE controller only. + core.String? loadBalancingScheme; + + /// A list of references to the Forwarding Rules on which this policy will be + /// applied. + /// + /// Required. + core.List? resources; + + AuthzPolicyTarget({ + this.loadBalancingScheme, + this.resources, + }); + + AuthzPolicyTarget.fromJson(core.Map json_) + : this( + loadBalancingScheme: json_['loadBalancingScheme'] as core.String?, + resources: (json_['resources'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (loadBalancingScheme != null) + 'loadBalancingScheme': loadBalancingScheme!, + if (resources != null) 'resources': resources!, + }; +} + +/// The request message for Operations.CancelOperation. +typedef CancelOperationRequest = $Empty; + +/// Specification of a TLS certificate provider instance. +/// +/// Workloads may have one or more CertificateProvider instances (plugins) and +/// one of them is enabled and configured by specifying this message. Workloads +/// use the values from this message to locate and load the CertificateProvider +/// instance configuration. +class CertificateProviderInstance { + /// Plugin instance name, used to locate and load CertificateProvider instance + /// configuration. + /// + /// Set to "google_cloud_private_spiffe" to use Certificate Authority Service + /// certificate provider instance. + /// + /// Required. + core.String? pluginInstance; + + CertificateProviderInstance({ + this.pluginInstance, + }); + + CertificateProviderInstance.fromJson(core.Map json_) + : this( + pluginInstance: json_['pluginInstance'] as core.String?, + ); + + core.Map toJson() => { + if (pluginInstance != null) 'pluginInstance': pluginInstance!, + }; +} + +/// ClientTlsPolicy is a resource that specifies how a client should +/// authenticate connections to backends of a service. +/// +/// This resource itself does not affect configuration unless it is attached to +/// a backend service resource. +class ClientTlsPolicy { + /// Defines a mechanism to provision client identity (public and private keys) + /// for peer to peer authentication. + /// + /// The presence of this dictates mTLS. + /// + /// Optional. + GoogleCloudNetworksecurityV1CertificateProvider? clientCertificate; + + /// The timestamp when the resource was created. + /// + /// Output only. + core.String? createTime; + + /// Free-text description of the resource. + /// + /// Optional. + core.String? description; + + /// Set of label tags associated with the resource. + /// + /// Optional. + core.Map? labels; /// Name of the ClientTlsPolicy resource. /// @@ -5392,6 +6367,31 @@ class CloneAddressGroupItemsRequest { }; } +/// CustomInterceptProfile defines the Packet Intercept Endpoint Group used to +/// intercept traffic to a third-party firewall in a Firewall rule. +class CustomInterceptProfile { + /// The InterceptEndpointGroup to which traffic associated with the SP should + /// be mirrored. + /// + /// Required. + core.String? interceptEndpointGroup; + + CustomInterceptProfile({ + this.interceptEndpointGroup, + }); + + CustomInterceptProfile.fromJson(core.Map json_) + : this( + interceptEndpointGroup: + json_['interceptEndpointGroup'] as core.String?, + ); + + core.Map toJson() => { + if (interceptEndpointGroup != null) + 'interceptEndpointGroup': interceptEndpointGroup!, + }; +} + /// CustomMirroringProfile defines an action for mirroring traffic to a /// collector's EndpointGroup class CustomMirroringProfile { @@ -5690,7 +6690,7 @@ class FirewallEndpointAssociation { /// - "ACTIVE" : Active and ready for traffic. /// - "DELETING" : Being deleted. /// - "INACTIVE" : Down or in an error state. - /// - "ORPHAN" : The GCP project that housed the association has been deleted. + /// - "ORPHAN" : The project that housed the association has been deleted. core.String? state; /// The URL of the TlsInspectionPolicy that is being associated. @@ -6507,6 +7507,42 @@ class ListAuthorizationPoliciesResponse { }; } +/// Message for response to listing `AuthzPolicy` resources. +class ListAuthzPoliciesResponse { + /// The list of `AuthzPolicy` resources. + core.List? authzPolicies; + + /// A token identifying a page of results that the server returns. + core.String? nextPageToken; + + /// Locations that could not be reached. + core.List? unreachable; + + ListAuthzPoliciesResponse({ + this.authzPolicies, + this.nextPageToken, + this.unreachable, + }); + + ListAuthzPoliciesResponse.fromJson(core.Map json_) + : this( + authzPolicies: (json_['authzPolicies'] as core.List?) + ?.map((value) => AuthzPolicy.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + unreachable: (json_['unreachable'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (authzPolicies != null) 'authzPolicies': authzPolicies!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (unreachable != null) 'unreachable': unreachable!, + }; +} + /// Response returned by the ListClientTlsPolicies method. class ListClientTlsPoliciesResponse { /// List of ClientTlsPolicy resources. @@ -7163,14 +8199,15 @@ class Rule { /// SecurityProfile is a resource that defines the behavior for one of many /// ProfileTypes. -/// -/// Next ID: 12 class SecurityProfile { /// Resource creation timestamp. /// /// Output only. core.String? createTime; + /// The custom TPPI configuration for the SecurityProfile. + CustomInterceptProfile? customInterceptProfile; + /// The custom Packet Mirroring v2 configuration for the SecurityProfile. CustomMirroringProfile? customMirroringProfile; @@ -7212,6 +8249,7 @@ class SecurityProfile { /// - "PROFILE_TYPE_UNSPECIFIED" : Profile type not specified. /// - "THREAT_PREVENTION" : Profile type for threat prevention. /// - "CUSTOM_MIRRORING" : Profile type for packet mirroring v2 + /// - "CUSTOM_INTERCEPT" : Profile type for TPPI. core.String? type; /// Last resource update timestamp. @@ -7221,6 +8259,7 @@ class SecurityProfile { SecurityProfile({ this.createTime, + this.customInterceptProfile, this.customMirroringProfile, this.description, this.etag, @@ -7234,6 +8273,10 @@ class SecurityProfile { SecurityProfile.fromJson(core.Map json_) : this( createTime: json_['createTime'] as core.String?, + customInterceptProfile: json_.containsKey('customInterceptProfile') + ? CustomInterceptProfile.fromJson(json_['customInterceptProfile'] + as core.Map) + : null, customMirroringProfile: json_.containsKey('customMirroringProfile') ? CustomMirroringProfile.fromJson(json_['customMirroringProfile'] as core.Map) @@ -7259,6 +8302,8 @@ class SecurityProfile { core.Map toJson() => { if (createTime != null) 'createTime': createTime!, + if (customInterceptProfile != null) + 'customInterceptProfile': customInterceptProfile!, if (customMirroringProfile != null) 'customMirroringProfile': customMirroringProfile!, if (description != null) 'description': description!, @@ -7274,14 +8319,17 @@ class SecurityProfile { /// SecurityProfileGroup is a resource that defines the behavior for various /// ProfileTypes. -/// -/// Next ID: 11 class SecurityProfileGroup { /// Resource creation timestamp. /// /// Output only. core.String? createTime; + /// Reference to a SecurityProfile with the CustomIntercept configuration. + /// + /// Optional. + core.String? customInterceptProfile; + /// Reference to a SecurityProfile with the CustomMirroring configuration. /// /// Optional. @@ -7327,6 +8375,7 @@ class SecurityProfileGroup { SecurityProfileGroup({ this.createTime, + this.customInterceptProfile, this.customMirroringProfile, this.description, this.etag, @@ -7339,6 +8388,8 @@ class SecurityProfileGroup { SecurityProfileGroup.fromJson(core.Map json_) : this( createTime: json_['createTime'] as core.String?, + customInterceptProfile: + json_['customInterceptProfile'] as core.String?, customMirroringProfile: json_['customMirroringProfile'] as core.String?, description: json_['description'] as core.String?, @@ -7358,6 +8409,8 @@ class SecurityProfileGroup { core.Map toJson() => { if (createTime != null) 'createTime': createTime!, + if (customInterceptProfile != null) + 'customInterceptProfile': customInterceptProfile!, if (customMirroringProfile != null) 'customMirroringProfile': customMirroringProfile!, if (description != null) 'description': description!, diff --git a/generated/googleapis/lib/networkservices/v1.dart b/generated/googleapis/lib/networkservices/v1.dart index c301502f2..9dca14b82 100644 --- a/generated/googleapis/lib/networkservices/v1.dart +++ b/generated/googleapis/lib/networkservices/v1.dart @@ -20,21 +20,26 @@ /// /// - [ProjectsResource] /// - [ProjectsLocationsResource] +/// - [ProjectsLocationsAuthzExtensionsResource] /// - [ProjectsLocationsEdgeCacheKeysetsResource] /// - [ProjectsLocationsEdgeCacheOriginsResource] /// - [ProjectsLocationsEdgeCacheServicesResource] /// - [ProjectsLocationsEndpointPoliciesResource] /// - [ProjectsLocationsGatewaysResource] +/// - [ProjectsLocationsGatewaysRouteViewsResource] /// - [ProjectsLocationsGrpcRoutesResource] /// - [ProjectsLocationsHttpRoutesResource] /// - [ProjectsLocationsLbRouteExtensionsResource] /// - [ProjectsLocationsLbTrafficExtensionsResource] /// - [ProjectsLocationsMeshesResource] +/// - [ProjectsLocationsMeshesRouteViewsResource] /// - [ProjectsLocationsOperationsResource] /// - [ProjectsLocationsServiceBindingsResource] /// - [ProjectsLocationsServiceLbPoliciesResource] /// - [ProjectsLocationsTcpRoutesResource] /// - [ProjectsLocationsTlsRoutesResource] +/// - [ProjectsLocationsWasmPluginsResource] +/// - [ProjectsLocationsWasmPluginsVersionsResource] library; import 'dart:async' as async; @@ -79,6 +84,8 @@ class ProjectsResource { class ProjectsLocationsResource { final commons.ApiRequester _requester; + ProjectsLocationsAuthzExtensionsResource get authzExtensions => + ProjectsLocationsAuthzExtensionsResource(_requester); ProjectsLocationsEdgeCacheKeysetsResource get edgeCacheKeysets => ProjectsLocationsEdgeCacheKeysetsResource(_requester); ProjectsLocationsEdgeCacheOriginsResource get edgeCacheOrigins => @@ -109,6 +116,8 @@ class ProjectsLocationsResource { ProjectsLocationsTcpRoutesResource(_requester); ProjectsLocationsTlsRoutesResource get tlsRoutes => ProjectsLocationsTlsRoutesResource(_requester); + ProjectsLocationsWasmPluginsResource get wasmPlugins => + ProjectsLocationsWasmPluginsResource(_requester); ProjectsLocationsResource(commons.ApiRequester client) : _requester = client; @@ -200,6 +209,286 @@ class ProjectsLocationsResource { } } +class ProjectsLocationsAuthzExtensionsResource { + final commons.ApiRequester _requester; + + ProjectsLocationsAuthzExtensionsResource(commons.ApiRequester client) + : _requester = client; + + /// Creates a new `AuthzExtension` resource in a given project and location. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent resource of the `AuthzExtension` resource. + /// Must be in the format `projects/{project}/locations/{location}`. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [authzExtensionId] - Required. User-provided ID of the `AuthzExtension` + /// resource to be created. + /// + /// [requestId] - Optional. An optional request ID to identify requests. + /// Specify a unique request ID so that if you must retry your request, the + /// server can ignore the request if it has already been completed. The server + /// guarantees that for at least 60 minutes since the first request. For + /// example, consider a situation where you make an initial request and the + /// request times out. If you make the request again with the same request ID, + /// the server can check if original operation with the same request ID was + /// received, and if so, ignores the second request. This prevents clients + /// from accidentally creating duplicate commitments. The request ID must be a + /// valid UUID with the exception that zero UUID is not supported + /// (00000000-0000-0000-0000-000000000000). + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future create( + AuthzExtension request, + core.String parent, { + core.String? authzExtensionId, + core.String? requestId, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (authzExtensionId != null) 'authzExtensionId': [authzExtensionId], + if (requestId != null) 'requestId': [requestId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/authzExtensions'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Deletes the specified `AuthzExtension` resource. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the `AuthzExtension` resource to delete. + /// Must be in the format + /// `projects/{project}/locations/{location}/authzExtensions/{authz_extension}`. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/authzExtensions/\[^/\]+$`. + /// + /// [requestId] - Optional. An optional request ID to identify requests. + /// Specify a unique request ID so that if you must retry your request, the + /// server can ignore the request if it has already been completed. The server + /// guarantees that for at least 60 minutes after the first request. For + /// example, consider a situation where you make an initial request and the + /// request times out. If you make the request again with the same request ID, + /// the server can check if original operation with the same request ID was + /// received, and if so, ignores the second request. This prevents clients + /// from accidentally creating duplicate commitments. The request ID must be a + /// valid UUID with the exception that zero UUID is not supported + /// (00000000-0000-0000-0000-000000000000). + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String name, { + core.String? requestId, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (requestId != null) 'requestId': [requestId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Gets details of the specified `AuthzExtension` resource. + /// + /// Request parameters: + /// + /// [name] - Required. A name of the `AuthzExtension` resource to get. Must be + /// in the format + /// `projects/{project}/locations/{location}/authzExtensions/{authz_extension}`. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/authzExtensions/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [AuthzExtension]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return AuthzExtension.fromJson( + response_ as core.Map); + } + + /// Lists `AuthzExtension` resources in a given project and location. + /// + /// Request parameters: + /// + /// [parent] - Required. The project and location from which the + /// `AuthzExtension` resources are listed, specified in the following format: + /// `projects/{project}/locations/{location}`. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [filter] - Optional. Filtering results. + /// + /// [orderBy] - Optional. Hint for how to order the results. + /// + /// [pageSize] - Optional. Requested page size. The server might return fewer + /// items than requested. If unspecified, the server picks an appropriate + /// default. + /// + /// [pageToken] - Optional. A token identifying a page of results that the + /// server returns. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListAuthzExtensionsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.String? filter, + core.String? orderBy, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (orderBy != null) 'orderBy': [orderBy], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/authzExtensions'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListAuthzExtensionsResponse.fromJson( + response_ as core.Map); + } + + /// Updates the parameters of the specified `AuthzExtension` resource. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. Identifier. Name of the `AuthzExtension` resource in + /// the following format: + /// `projects/{project}/locations/{location}/authzExtensions/{authz_extension}`. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/authzExtensions/\[^/\]+$`. + /// + /// [requestId] - Optional. An optional request ID to identify requests. + /// Specify a unique request ID so that if you must retry your request, the + /// server can ignore the request if it has already been completed. The server + /// guarantees that for at least 60 minutes since the first request. For + /// example, consider a situation where you make an initial request and the + /// request times out. If you make the request again with the same request ID, + /// the server can check if original operation with the same request ID was + /// received, and if so, ignores the second request. This prevents clients + /// from accidentally creating duplicate commitments. The request ID must be a + /// valid UUID with the exception that zero UUID is not supported + /// (00000000-0000-0000-0000-000000000000). + /// + /// [updateMask] - Required. Used to specify the fields to be overwritten in + /// the `AuthzExtension` resource by the update. The fields specified in the + /// `update_mask` are relative to the resource, not the full request. A field + /// is overwritten if it is in the mask. If the user does not specify a mask, + /// then all fields are overwritten. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future patch( + AuthzExtension request, + core.String name, { + core.String? requestId, + core.String? updateMask, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (requestId != null) 'requestId': [requestId], + if (updateMask != null) 'updateMask': [updateMask], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'PATCH', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } +} + class ProjectsLocationsEdgeCacheKeysetsResource { final commons.ApiRequester _requester; @@ -917,6 +1206,9 @@ class ProjectsLocationsEndpointPoliciesResource { class ProjectsLocationsGatewaysResource { final commons.ApiRequester _requester; + ProjectsLocationsGatewaysRouteViewsResource get routeViews => + ProjectsLocationsGatewaysRouteViewsResource(_requester); + ProjectsLocationsGatewaysResource(commons.ApiRequester client) : _requester = client; @@ -1137,24 +1429,118 @@ class ProjectsLocationsGatewaysResource { } } -class ProjectsLocationsGrpcRoutesResource { +class ProjectsLocationsGatewaysRouteViewsResource { final commons.ApiRequester _requester; - ProjectsLocationsGrpcRoutesResource(commons.ApiRequester client) + ProjectsLocationsGatewaysRouteViewsResource(commons.ApiRequester client) : _requester = client; - /// Creates a new GrpcRoute in a given project and location. - /// - /// [request] - The metadata request object. + /// Get a single RouteView of a Gateway. /// /// Request parameters: /// - /// [parent] - Required. The parent resource of the GrpcRoute. Must be in the - /// format `projects / * /locations/global`. - /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// [name] - Required. Name of the GatewayRouteView resource. Formats: + /// projects/{project_number}/locations/{location}/gateways/{gateway_name}/routeViews/{route_view_name} + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/gateways/\[^/\]+/routeViews/\[^/\]+$`. /// - /// [grpcRouteId] - Required. Short name of the GrpcRoute resource to be - /// created. + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GatewayRouteView]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return GatewayRouteView.fromJson( + response_ as core.Map); + } + + /// Lists RouteViews + /// + /// Request parameters: + /// + /// [parent] - Required. The Gateway to which a Route is associated. Formats: + /// projects/{project_number}/locations/{location}/gateways/{gateway_name} + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/gateways/\[^/\]+$`. + /// + /// [pageSize] - Maximum number of GatewayRouteViews to return per call. + /// + /// [pageToken] - The value returned by the last + /// `ListGatewayRouteViewsResponse` Indicates that this is a continuation of a + /// prior `ListGatewayRouteViews` call, and that the system should return the + /// next page of data. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListGatewayRouteViewsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/routeViews'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListGatewayRouteViewsResponse.fromJson( + response_ as core.Map); + } +} + +class ProjectsLocationsGrpcRoutesResource { + final commons.ApiRequester _requester; + + ProjectsLocationsGrpcRoutesResource(commons.ApiRequester client) + : _requester = client; + + /// Creates a new GrpcRoute in a given project and location. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent resource of the GrpcRoute. Must be in the + /// format `projects / * /locations/global`. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [grpcRouteId] - Required. Short name of the GrpcRoute resource to be + /// created. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -2155,6 +2541,9 @@ class ProjectsLocationsLbTrafficExtensionsResource { class ProjectsLocationsMeshesResource { final commons.ApiRequester _requester; + ProjectsLocationsMeshesRouteViewsResource get routeViews => + ProjectsLocationsMeshesRouteViewsResource(_requester); + ProjectsLocationsMeshesResource(commons.ApiRequester client) : _requester = client; @@ -2375,6 +2764,99 @@ class ProjectsLocationsMeshesResource { } } +class ProjectsLocationsMeshesRouteViewsResource { + final commons.ApiRequester _requester; + + ProjectsLocationsMeshesRouteViewsResource(commons.ApiRequester client) + : _requester = client; + + /// Get a single RouteView of a Mesh. + /// + /// Request parameters: + /// + /// [name] - Required. Name of the MeshRouteView resource. Format: + /// projects/{project_number}/locations/{location}/meshes/{mesh_name}/routeViews/{route_view_name} + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/meshes/\[^/\]+/routeViews/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [MeshRouteView]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return MeshRouteView.fromJson( + response_ as core.Map); + } + + /// Lists RouteViews + /// + /// Request parameters: + /// + /// [parent] - Required. The Mesh to which a Route is associated. Format: + /// projects/{project_number}/locations/{location}/meshes/{mesh_name} + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/meshes/\[^/\]+$`. + /// + /// [pageSize] - Maximum number of MeshRouteViews to return per call. + /// + /// [pageToken] - The value returned by the last `ListMeshRouteViewsResponse` + /// Indicates that this is a continuation of a prior `ListMeshRouteViews` + /// call, and that the system should return the next page of data. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListMeshRouteViewsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/routeViews'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListMeshRouteViewsResponse.fromJson( + response_ as core.Map); + } +} + class ProjectsLocationsOperationsResource { final commons.ApiRequester _requester; @@ -2389,8 +2871,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -2921,7 +3403,7 @@ class ProjectsLocationsServiceLbPoliciesResource { /// /// Request parameters: /// - /// [name] - Required. Name of the ServiceLbPolicy resource. It matches + /// [name] - Identifier. Name of the ServiceLbPolicy resource. It matches /// pattern /// `projects/{project}/locations/{location}/serviceLbPolicies/{service_lb_policy_name}`. /// Value must have pattern @@ -3417,61 +3899,688 @@ class ProjectsLocationsTlsRoutesResource { } } -/// Specifies the audit configuration for a service. -/// -/// The configuration determines which permission types are logged, and what -/// identities, if any, are exempted from logging. An AuditConfig must have one -/// or more AuditLogConfigs. If there are AuditConfigs for both `allServices` -/// and a specific service, the union of the two AuditConfigs is used for that -/// service: the log_types specified in each AuditConfig are enabled, and the -/// exempted_members in each AuditLogConfig are exempted. Example Policy with -/// multiple AuditConfigs: { "audit_configs": \[ { "service": "allServices", -/// "audit_log_configs": \[ { "log_type": "DATA_READ", "exempted_members": \[ -/// "user:jose@example.com" \] }, { "log_type": "DATA_WRITE" }, { "log_type": -/// "ADMIN_READ" } \] }, { "service": "sampleservice.googleapis.com", -/// "audit_log_configs": \[ { "log_type": "DATA_READ" }, { "log_type": -/// "DATA_WRITE", "exempted_members": \[ "user:aliya@example.com" \] } \] } \] } -/// For sampleservice, this policy enables DATA_READ, DATA_WRITE and ADMIN_READ -/// logging. It also exempts `jose@example.com` from DATA_READ logging, and -/// `aliya@example.com` from DATA_WRITE logging. -class AuditConfig { - /// The configuration for logging of each type of permission. - core.List? auditLogConfigs; +class ProjectsLocationsWasmPluginsResource { + final commons.ApiRequester _requester; + + ProjectsLocationsWasmPluginsVersionsResource get versions => + ProjectsLocationsWasmPluginsVersionsResource(_requester); + + ProjectsLocationsWasmPluginsResource(commons.ApiRequester client) + : _requester = client; + + /// Creates a new `WasmPlugin` resource in a given project and location. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent resource of the `WasmPlugin` resource. + /// Must be in the format `projects/{project}/locations/global`. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [wasmPluginId] - Required. User-provided ID of the `WasmPlugin` resource + /// to be created. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future create( + WasmPlugin request, + core.String parent, { + core.String? wasmPluginId, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (wasmPluginId != null) 'wasmPluginId': [wasmPluginId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/wasmPlugins'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Deletes the specified `WasmPlugin` resource. + /// + /// Request parameters: + /// + /// [name] - Required. A name of the `WasmPlugin` resource to delete. Must be + /// in the format + /// `projects/{project}/locations/global/wasmPlugins/{wasm_plugin}`. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/wasmPlugins/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Gets details of the specified `WasmPlugin` resource. + /// + /// Request parameters: + /// + /// [name] - Required. A name of the `WasmPlugin` resource to get. Must be in + /// the format + /// `projects/{project}/locations/global/wasmPlugins/{wasm_plugin}`. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/wasmPlugins/\[^/\]+$`. + /// + /// [view] - Determines how much data must be returned in the response. See + /// \[AIP-157\](https://google.aip.dev/157). + /// Possible string values are: + /// - "WASM_PLUGIN_VIEW_UNSPECIFIED" : Unspecified value. Do not use. + /// - "WASM_PLUGIN_VIEW_BASIC" : If specified in the `GET` request for a + /// `WasmPlugin` resource, the server's response includes just the + /// `WasmPlugin` resource. + /// - "WASM_PLUGIN_VIEW_FULL" : If specified in the `GET` request for a + /// `WasmPlugin` resource, the server's response includes the `WasmPlugin` + /// resource with all its versions. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [WasmPlugin]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? view, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (view != null) 'view': [view], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return WasmPlugin.fromJson( + response_ as core.Map); + } + + /// Lists `WasmPlugin` resources in a given project and location. + /// + /// Request parameters: + /// + /// [parent] - Required. The project and location from which the `WasmPlugin` + /// resources are listed, specified in the following format: + /// `projects/{project}/locations/global`. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [pageSize] - Maximum number of `WasmPlugin` resources to return per call. + /// If not specified, at most 50 `WasmPlugin` resources are returned. The + /// maximum value is 1000; values above 1000 are coerced to 1000. + /// + /// [pageToken] - The value returned by the last `ListWasmPluginsResponse` + /// call. Indicates that this is a continuation of a prior `ListWasmPlugins` + /// call, and that the next page of data is to be returned. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListWasmPluginsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/wasmPlugins'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListWasmPluginsResponse.fromJson( + response_ as core.Map); + } + + /// Updates the parameters of the specified `WasmPlugin` resource. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Identifier. Name of the `WasmPlugin` resource in the following + /// format: + /// `projects/{project}/locations/{location}/wasmPlugins/{wasm_plugin}`. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/wasmPlugins/\[^/\]+$`. + /// + /// [updateMask] - Optional. Used to specify the fields to be overwritten in + /// the `WasmPlugin` resource by the update. The fields specified in the + /// `update_mask` field are relative to the resource, not the full request. An + /// omitted `update_mask` field is treated as an implied `update_mask` field + /// equivalent to all fields that are populated (that have a non-empty value). + /// The `update_mask` field supports a special value `*`, which means that + /// each field in the given `WasmPlugin` resource (including the empty ones) + /// replaces the current value. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future patch( + WasmPlugin request, + core.String name, { + core.String? updateMask, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (updateMask != null) 'updateMask': [updateMask], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'PATCH', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } +} + +class ProjectsLocationsWasmPluginsVersionsResource { + final commons.ApiRequester _requester; + + ProjectsLocationsWasmPluginsVersionsResource(commons.ApiRequester client) + : _requester = client; + + /// Creates a new `WasmPluginVersion` resource in a given project and + /// location. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent resource of the `WasmPluginVersion` + /// resource. Must be in the format + /// `projects/{project}/locations/global/wasmPlugins/{wasm_plugin}`. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/wasmPlugins/\[^/\]+$`. + /// + /// [wasmPluginVersionId] - Required. User-provided ID of the + /// `WasmPluginVersion` resource to be created. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future create( + WasmPluginVersion request, + core.String parent, { + core.String? wasmPluginVersionId, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (wasmPluginVersionId != null) + 'wasmPluginVersionId': [wasmPluginVersionId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/versions'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Deletes the specified `WasmPluginVersion` resource. + /// + /// Request parameters: + /// + /// [name] - Required. A name of the `WasmPluginVersion` resource to delete. + /// Must be in the format + /// `projects/{project}/locations/global/wasmPlugins/{wasm_plugin}/versions/{wasm_plugin_version}`. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/wasmPlugins/\[^/\]+/versions/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Gets details of the specified `WasmPluginVersion` resource. + /// + /// Request parameters: + /// + /// [name] - Required. A name of the `WasmPluginVersion` resource to get. Must + /// be in the format + /// `projects/{project}/locations/global/wasmPlugins/{wasm_plugin}/versions/{wasm_plugin_version}`. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/wasmPlugins/\[^/\]+/versions/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [WasmPluginVersion]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return WasmPluginVersion.fromJson( + response_ as core.Map); + } + + /// Lists `WasmPluginVersion` resources in a given project and location. + /// + /// Request parameters: + /// + /// [parent] - Required. The `WasmPlugin` resource whose `WasmPluginVersion`s + /// are listed, specified in the following format: + /// `projects/{project}/locations/global/wasmPlugins/{wasm_plugin}`. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/wasmPlugins/\[^/\]+$`. + /// + /// [pageSize] - Maximum number of `WasmPluginVersion` resources to return per + /// call. If not specified, at most 50 `WasmPluginVersion` resources are + /// returned. The maximum value is 1000; values above 1000 are coerced to + /// 1000. + /// + /// [pageToken] - The value returned by the last + /// `ListWasmPluginVersionsResponse` call. Indicates that this is a + /// continuation of a prior `ListWasmPluginVersions` call, and that the next + /// page of data is to be returned. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListWasmPluginVersionsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/versions'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListWasmPluginVersionsResponse.fromJson( + response_ as core.Map); + } +} + +/// Specifies the audit configuration for a service. +/// +/// The configuration determines which permission types are logged, and what +/// identities, if any, are exempted from logging. An AuditConfig must have one +/// or more AuditLogConfigs. If there are AuditConfigs for both `allServices` +/// and a specific service, the union of the two AuditConfigs is used for that +/// service: the log_types specified in each AuditConfig are enabled, and the +/// exempted_members in each AuditLogConfig are exempted. Example Policy with +/// multiple AuditConfigs: { "audit_configs": \[ { "service": "allServices", +/// "audit_log_configs": \[ { "log_type": "DATA_READ", "exempted_members": \[ +/// "user:jose@example.com" \] }, { "log_type": "DATA_WRITE" }, { "log_type": +/// "ADMIN_READ" } \] }, { "service": "sampleservice.googleapis.com", +/// "audit_log_configs": \[ { "log_type": "DATA_READ" }, { "log_type": +/// "DATA_WRITE", "exempted_members": \[ "user:aliya@example.com" \] } \] } \] } +/// For sampleservice, this policy enables DATA_READ, DATA_WRITE and ADMIN_READ +/// logging. It also exempts `jose@example.com` from DATA_READ logging, and +/// `aliya@example.com` from DATA_WRITE logging. +class AuditConfig { + /// The configuration for logging of each type of permission. + core.List? auditLogConfigs; + + /// Specifies a service that will be enabled for audit logging. + /// + /// For example, `storage.googleapis.com`, `cloudsql.googleapis.com`. + /// `allServices` is a special value that covers all services. + core.String? service; + + AuditConfig({ + this.auditLogConfigs, + this.service, + }); + + AuditConfig.fromJson(core.Map json_) + : this( + auditLogConfigs: (json_['auditLogConfigs'] as core.List?) + ?.map((value) => AuditLogConfig.fromJson( + value as core.Map)) + .toList(), + service: json_['service'] as core.String?, + ); + + core.Map toJson() => { + if (auditLogConfigs != null) 'auditLogConfigs': auditLogConfigs!, + if (service != null) 'service': service!, + }; +} + +/// Provides the configuration for logging a type of permissions. +/// +/// Example: { "audit_log_configs": \[ { "log_type": "DATA_READ", +/// "exempted_members": \[ "user:jose@example.com" \] }, { "log_type": +/// "DATA_WRITE" } \] } This enables 'DATA_READ' and 'DATA_WRITE' logging, while +/// exempting jose@example.com from DATA_READ logging. +typedef AuditLogConfig = $AuditLogConfig; + +/// `AuthzExtension` is a resource that allows traffic forwarding to a callout +/// backend service to make an authorization decision. +class AuthzExtension { + /// The `:authority` header in the gRPC request sent from Envoy to the + /// extension service. + /// + /// Required. + core.String? authority; + + /// The timestamp when the resource was created. + /// + /// Output only. + core.String? createTime; + + /// A human-readable description of the resource. + /// + /// Optional. + core.String? description; + + /// Determines how the proxy behaves if the call to the extension fails or + /// times out. + /// + /// When set to `TRUE`, request or response processing continues without + /// error. Any subsequent extensions in the extension chain are also executed. + /// When set to `FALSE` or the default setting of `FALSE` is used, one of the + /// following happens: * If response headers have not been delivered to the + /// downstream client, a generic 500 error is returned to the client. The + /// error response can be tailored by configuring a custom error response in + /// the load balancer. * If response headers have been delivered, then the + /// HTTP stream to the downstream client is reset. + /// + /// Optional. + core.bool? failOpen; + + /// List of the HTTP headers to forward to the extension (from the client). + /// + /// If omitted, all headers are sent. Each element is a string indicating the + /// header name. + /// + /// Optional. + core.List? forwardHeaders; + + /// Set of labels associated with the `AuthzExtension` resource. + /// + /// The format must comply with \[the requirements for + /// labels\](/compute/docs/labeling-resources#requirements) for Google Cloud + /// resources. + /// + /// Optional. + core.Map? labels; + + /// All backend services and forwarding rules referenced by this extension + /// must share the same load balancing scheme. + /// + /// Supported values: `INTERNAL_MANAGED`, `EXTERNAL_MANAGED`. For more + /// information, refer to + /// [Backend services overview](https://cloud.google.com/load-balancing/docs/backend-service). + /// + /// Required. + /// Possible string values are: + /// - "LOAD_BALANCING_SCHEME_UNSPECIFIED" : Default value. Do not use. + /// - "INTERNAL_MANAGED" : Signifies that this is used for Internal HTTP(S) + /// Load Balancing. + /// - "EXTERNAL_MANAGED" : Signifies that this is used for External Managed + /// HTTP(S) Load Balancing. + core.String? loadBalancingScheme; + + /// The metadata provided here is included as part of the `metadata_context` + /// (of type `google.protobuf.Struct`) in the `ProcessingRequest` message sent + /// to the extension server. + /// + /// The metadata is available under the namespace + /// `com.google.authz_extension.`. The following variables are supported in + /// the metadata Struct: `{forwarding_rule_id}` - substituted with the + /// forwarding rule's fully qualified resource name. + /// + /// Optional. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Map? metadata; + + /// Identifier. + /// + /// Name of the `AuthzExtension` resource in the following format: + /// `projects/{project}/locations/{location}/authzExtensions/{authz_extension}`. + /// + /// Required. + core.String? name; + + /// The reference to the service that runs the extension. + /// + /// To configure a callout extension, `service` must be a fully-qualified + /// reference to a + /// [backend service](https://cloud.google.com/compute/docs/reference/rest/v1/backendServices) + /// in the format: + /// `https://www.googleapis.com/compute/v1/projects/{project}/regions/{region}/backendServices/{backendService}` + /// or + /// `https://www.googleapis.com/compute/v1/projects/{project}/global/backendServices/{backendService}`. + /// + /// Required. + core.String? service; + + /// Specifies the timeout for each individual message on the stream. + /// + /// The timeout must be between 10-10000 milliseconds. + /// + /// Required. + core.String? timeout; - /// Specifies a service that will be enabled for audit logging. + /// The timestamp when the resource was updated. /// - /// For example, `storage.googleapis.com`, `cloudsql.googleapis.com`. - /// `allServices` is a special value that covers all services. - core.String? service; + /// Output only. + core.String? updateTime; - AuditConfig({ - this.auditLogConfigs, + /// The format of communication supported by the callout extension. + /// + /// If not specified, the default is `EXT_PROC_GRPC`. + /// + /// Optional. + /// Possible string values are: + /// - "WIRE_FORMAT_UNSPECIFIED" : Not specified. + /// - "EXT_PROC_GRPC" : The extension service uses ExtProc GRPC API over a + /// gRPC stream. This is the default value if the wire format is not + /// specified. The backend service for the extension must use HTTP2 or H2C as + /// the protocol. All `supported_events` for a client request will be sent as + /// part of the same gRPC stream. + core.String? wireFormat; + + AuthzExtension({ + this.authority, + this.createTime, + this.description, + this.failOpen, + this.forwardHeaders, + this.labels, + this.loadBalancingScheme, + this.metadata, + this.name, this.service, + this.timeout, + this.updateTime, + this.wireFormat, }); - AuditConfig.fromJson(core.Map json_) + AuthzExtension.fromJson(core.Map json_) : this( - auditLogConfigs: (json_['auditLogConfigs'] as core.List?) - ?.map((value) => AuditLogConfig.fromJson( - value as core.Map)) + authority: json_['authority'] as core.String?, + createTime: json_['createTime'] as core.String?, + description: json_['description'] as core.String?, + failOpen: json_['failOpen'] as core.bool?, + forwardHeaders: (json_['forwardHeaders'] as core.List?) + ?.map((value) => value as core.String) .toList(), + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + loadBalancingScheme: json_['loadBalancingScheme'] as core.String?, + metadata: json_.containsKey('metadata') + ? json_['metadata'] as core.Map + : null, + name: json_['name'] as core.String?, service: json_['service'] as core.String?, + timeout: json_['timeout'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + wireFormat: json_['wireFormat'] as core.String?, ); core.Map toJson() => { - if (auditLogConfigs != null) 'auditLogConfigs': auditLogConfigs!, + if (authority != null) 'authority': authority!, + if (createTime != null) 'createTime': createTime!, + if (description != null) 'description': description!, + if (failOpen != null) 'failOpen': failOpen!, + if (forwardHeaders != null) 'forwardHeaders': forwardHeaders!, + if (labels != null) 'labels': labels!, + if (loadBalancingScheme != null) + 'loadBalancingScheme': loadBalancingScheme!, + if (metadata != null) 'metadata': metadata!, + if (name != null) 'name': name!, if (service != null) 'service': service!, + if (timeout != null) 'timeout': timeout!, + if (updateTime != null) 'updateTime': updateTime!, + if (wireFormat != null) 'wireFormat': wireFormat!, }; } -/// Provides the configuration for logging a type of permissions. -/// -/// Example: { "audit_log_configs": \[ { "log_type": "DATA_READ", -/// "exempted_members": \[ "user:jose@example.com" \] }, { "log_type": -/// "DATA_WRITE" } \] } This enables 'DATA_READ' and 'DATA_WRITE' logging, while -/// exempting jose@example.com from DATA_READ logging. -typedef AuditLogConfig = $AuditLogConfig; - /// Associates `members`, or principals, with a `role`. class Binding { /// The condition that is associated with this binding. @@ -3924,7 +5033,8 @@ class ExtensionChainExtension { /// The `:authority` header in the gRPC request sent from Envoy to the /// extension service. /// - /// Required for Callout extensions. + /// Required for Callout extensions. This field is not supported for plugin + /// extensions. Setting it results in a validation error. /// /// Optional. core.String? authority; @@ -3953,6 +5063,24 @@ class ExtensionChainExtension { /// Optional. core.List? forwardHeaders; + /// The metadata provided here is included as part of the `metadata_context` + /// (of type `google.protobuf.Struct`) in the `ProcessingRequest` message sent + /// to the extension server. + /// + /// The metadata is available under the namespace `com.google....`. For + /// example: + /// `com.google.lb_traffic_extension.lbtrafficextension1.chain1.ext1`. The + /// following variables are supported in the metadata: `{forwarding_rule_id}` + /// - substituted with the forwarding rule's fully qualified resource name. + /// This field is not supported for plugin extensions. Setting it results in a + /// validation error. + /// + /// Optional. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Map? metadata; + /// The name for this extension. /// /// The name is logged as part of the HTTP request logs. The name must conform @@ -3965,13 +5093,21 @@ class ExtensionChainExtension { /// The reference to the service that runs the extension. /// - /// Currently only callout extensions are supported here. To configure a - /// callout extension, `service` must be a fully-qualified reference to a + /// To configure a callout extension, `service` must be a fully-qualified + /// reference to a /// [backend service](https://cloud.google.com/compute/docs/reference/rest/v1/backendServices) /// in the format: /// `https://www.googleapis.com/compute/v1/projects/{project}/regions/{region}/backendServices/{backendService}` /// or /// `https://www.googleapis.com/compute/v1/projects/{project}/global/backendServices/{backendService}`. + /// To configure a plugin extension, `service` must be a reference to a + /// \[`WasmPlugin` + /// resource\](https://cloud.google.com/service-extensions/docs/reference/rest/v1beta1/projects.locations.wasmPlugins) + /// in the format: + /// `projects/{project}/locations/{location}/wasmPlugins/{plugin}` or + /// `//networkservices.googleapis.com/projects/{project}/locations/{location}/wasmPlugins/{wasmPlugin}`. + /// Plugin extensions are currently supported for the `LbTrafficExtension` and + /// the `LbRouteExtension` resources. /// /// Required. core.String? service; @@ -3980,15 +5116,17 @@ class ExtensionChainExtension { /// extension is called. /// /// This field is required for the `LbTrafficExtension` resource. It must not - /// be set for the `LbRouteExtension` resource. + /// be set for the `LbRouteExtension` resource, otherwise a validation error + /// is returned. /// /// Optional. core.List? supportedEvents; /// Specifies the timeout for each individual message on the stream. /// - /// The timeout must be between 10-1000 milliseconds. Required for Callout - /// extensions. + /// The timeout must be between `10`-`1000` milliseconds. Required for callout + /// extensions. This field is not supported for plugin extensions. Setting it + /// results in a validation error. /// /// Optional. core.String? timeout; @@ -3997,6 +5135,7 @@ class ExtensionChainExtension { this.authority, this.failOpen, this.forwardHeaders, + this.metadata, this.name, this.service, this.supportedEvents, @@ -4010,6 +5149,9 @@ class ExtensionChainExtension { forwardHeaders: (json_['forwardHeaders'] as core.List?) ?.map((value) => value as core.String) .toList(), + metadata: json_.containsKey('metadata') + ? json_['metadata'] as core.Map + : null, name: json_['name'] as core.String?, service: json_['service'] as core.String?, supportedEvents: (json_['supportedEvents'] as core.List?) @@ -4022,6 +5164,7 @@ class ExtensionChainExtension { if (authority != null) 'authority': authority!, if (failOpen != null) 'failOpen': failOpen!, if (forwardHeaders != null) 'forwardHeaders': forwardHeaders!, + if (metadata != null) 'metadata': metadata!, if (name != null) 'name': name!, if (service != null) 'service': service!, if (supportedEvents != null) 'supportedEvents': supportedEvents!, @@ -4306,6 +5449,63 @@ class Gateway { }; } +/// GatewayRouteView defines view-only resource for Routes to a Gateway +class GatewayRouteView { + /// Identifier. + /// + /// Full path name of the GatewayRouteView resource. Format: + /// projects/{project_number}/locations/{location}/gateways/{gateway_name}/routeViews/{route_view_name} + /// + /// Output only. + core.String? name; + + /// The resource id for the route. + /// + /// Output only. + core.String? routeId; + + /// Location where the route exists. + /// + /// Output only. + core.String? routeLocation; + + /// Project number where the route exists. + /// + /// Output only. + core.String? routeProjectNumber; + + /// Type of the route: HttpRoute,GrpcRoute,TcpRoute, or TlsRoute + /// + /// Output only. + core.String? routeType; + + GatewayRouteView({ + this.name, + this.routeId, + this.routeLocation, + this.routeProjectNumber, + this.routeType, + }); + + GatewayRouteView.fromJson(core.Map json_) + : this( + name: json_['name'] as core.String?, + routeId: json_['routeId'] as core.String?, + routeLocation: json_['routeLocation'] as core.String?, + routeProjectNumber: json_['routeProjectNumber'] as core.String?, + routeType: json_['routeType'] as core.String?, + ); + + core.Map toJson() => { + if (name != null) 'name': name!, + if (routeId != null) 'routeId': routeId!, + if (routeLocation != null) 'routeLocation': routeLocation!, + if (routeProjectNumber != null) + 'routeProjectNumber': routeProjectNumber!, + if (routeType != null) 'routeType': routeType!, + }; +} + /// GrpcRoute is the resource defining how gRPC traffic routed by a Mesh or /// Gateway resource is routed. class GrpcRoute { @@ -5971,7 +7171,7 @@ class LbRouteExtension { core.List? extensionChains; /// A list of references to the forwarding rules to which this service - /// extension is attached to. + /// extension is attached. /// /// At least one forwarding rule is required. There can be only one /// `LbRouteExtension` resource per forwarding rule. @@ -6011,7 +7211,8 @@ class LbRouteExtension { /// The metadata is available under the namespace /// `com.google.lb_route_extension.`. The following variables are supported in /// the metadata Struct: `{forwarding_rule_id}` - substituted with the - /// forwarding rule's fully qualified resource name. + /// forwarding rule's fully qualified resource name. This field is not + /// supported for plugin extensions. Setting it results in a validation error. /// /// Optional. /// @@ -6111,12 +7312,12 @@ class LbTrafficExtension { core.List? extensionChains; /// A list of references to the forwarding rules to which this service - /// extension is attached to. + /// extension is attached. /// /// At least one forwarding rule is required. There can be only one /// `LBTrafficExtension` resource per forwarding rule. /// - /// Required. + /// Optional. core.List? forwardingRules; /// Set of labels associated with the `LbTrafficExtension` resource. @@ -6150,7 +7351,8 @@ class LbTrafficExtension { /// The metadata is available under the key /// `com.google.lb_traffic_extension.`. The following variables are supported /// in the metadata: `{forwarding_rule_id}` - substituted with the forwarding - /// rule's fully qualified resource name. + /// rule's fully qualified resource name. This field is not supported for + /// plugin extensions. Setting it results in a validation error. /// /// Optional. /// @@ -6223,6 +7425,42 @@ class LbTrafficExtension { }; } +/// Message for response to listing `AuthzExtension` resources. +class ListAuthzExtensionsResponse { + /// The list of `AuthzExtension` resources. + core.List? authzExtensions; + + /// A token identifying a page of results that the server returns. + core.String? nextPageToken; + + /// Locations that could not be reached. + core.List? unreachable; + + ListAuthzExtensionsResponse({ + this.authzExtensions, + this.nextPageToken, + this.unreachable, + }); + + ListAuthzExtensionsResponse.fromJson(core.Map json_) + : this( + authzExtensions: (json_['authzExtensions'] as core.List?) + ?.map((value) => AuthzExtension.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + unreachable: (json_['unreachable'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (authzExtensions != null) 'authzExtensions': authzExtensions!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (unreachable != null) 'unreachable': unreachable!, + }; +} + /// Response returned by the ListEndpointPolicies method. class ListEndpointPoliciesResponse { /// List of EndpointPolicy resources. @@ -6255,6 +7493,36 @@ class ListEndpointPoliciesResponse { }; } +/// Response returned by the ListGatewayRouteViews method. +class ListGatewayRouteViewsResponse { + /// List of GatewayRouteView resources. + core.List? gatewayRouteViews; + + /// A token, which can be sent as `page_token` to retrieve the next page. + /// + /// If this field is omitted, there are no subsequent pages. + core.String? nextPageToken; + + ListGatewayRouteViewsResponse({ + this.gatewayRouteViews, + this.nextPageToken, + }); + + ListGatewayRouteViewsResponse.fromJson(core.Map json_) + : this( + gatewayRouteViews: (json_['gatewayRouteViews'] as core.List?) + ?.map((value) => GatewayRouteView.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + ); + + core.Map toJson() => { + if (gatewayRouteViews != null) 'gatewayRouteViews': gatewayRouteViews!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + }; +} + /// Response returned by the ListGateways method. class ListGatewaysResponse { /// List of Gateway resources. @@ -6460,6 +7728,36 @@ class ListLocationsResponse { }; } +/// Response returned by the ListMeshRouteViews method. +class ListMeshRouteViewsResponse { + /// List of MeshRouteView resources. + core.List? meshRouteViews; + + /// A token, which can be sent as `page_token` to retrieve the next page. + /// + /// If this field is omitted, there are no subsequent pages. + core.String? nextPageToken; + + ListMeshRouteViewsResponse({ + this.meshRouteViews, + this.nextPageToken, + }); + + ListMeshRouteViewsResponse.fromJson(core.Map json_) + : this( + meshRouteViews: (json_['meshRouteViews'] as core.List?) + ?.map((value) => MeshRouteView.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + ); + + core.Map toJson() => { + if (meshRouteViews != null) 'meshRouteViews': meshRouteViews!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + }; +} + /// Response returned by the ListMeshes method. class ListMeshesResponse { /// List of Mesh resources. @@ -6648,6 +7946,71 @@ class ListTlsRoutesResponse { }; } +/// Response returned by the `ListWasmPluginVersions` method. +class ListWasmPluginVersionsResponse { + /// If there might be more results than those appearing in this response, then + /// `next_page_token` is included. + /// + /// To get the next set of results, call this method again using the value of + /// `next_page_token` as `page_token`. + core.String? nextPageToken; + + /// List of `WasmPluginVersion` resources. + core.List? wasmPluginVersions; + + ListWasmPluginVersionsResponse({ + this.nextPageToken, + this.wasmPluginVersions, + }); + + ListWasmPluginVersionsResponse.fromJson(core.Map json_) + : this( + nextPageToken: json_['nextPageToken'] as core.String?, + wasmPluginVersions: (json_['wasmPluginVersions'] as core.List?) + ?.map((value) => WasmPluginVersion.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (wasmPluginVersions != null) + 'wasmPluginVersions': wasmPluginVersions!, + }; +} + +/// Response returned by the `ListWasmPlugins` method. +class ListWasmPluginsResponse { + /// If there might be more results than those appearing in this response, then + /// `next_page_token` is included. + /// + /// To get the next set of results, call this method again using the value of + /// `next_page_token` as `page_token`. + core.String? nextPageToken; + + /// List of `WasmPlugin` resources. + core.List? wasmPlugins; + + ListWasmPluginsResponse({ + this.nextPageToken, + this.wasmPlugins, + }); + + ListWasmPluginsResponse.fromJson(core.Map json_) + : this( + nextPageToken: json_['nextPageToken'] as core.String?, + wasmPlugins: (json_['wasmPlugins'] as core.List?) + ?.map((value) => WasmPlugin.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (wasmPlugins != null) 'wasmPlugins': wasmPlugins!, + }; +} + /// A resource that represents a Google Cloud location. typedef Location = $Location00; @@ -6758,6 +8121,63 @@ class Mesh { }; } +/// MeshRouteView defines view-only resource for Routes to a Mesh +class MeshRouteView { + /// Identifier. + /// + /// Full path name of the MeshRouteView resource. Format: + /// projects/{project_number}/locations/{location}/meshes/{mesh_name}/routeViews/{route_view_name} + /// + /// Output only. + core.String? name; + + /// The resource id for the route. + /// + /// Output only. + core.String? routeId; + + /// Location where the route exists. + /// + /// Output only. + core.String? routeLocation; + + /// Project number where the route exists. + /// + /// Output only. + core.String? routeProjectNumber; + + /// Type of the route: HttpRoute,GrpcRoute,TcpRoute, or TlsRoute + /// + /// Output only. + core.String? routeType; + + MeshRouteView({ + this.name, + this.routeId, + this.routeLocation, + this.routeProjectNumber, + this.routeType, + }); + + MeshRouteView.fromJson(core.Map json_) + : this( + name: json_['name'] as core.String?, + routeId: json_['routeId'] as core.String?, + routeLocation: json_['routeLocation'] as core.String?, + routeProjectNumber: json_['routeProjectNumber'] as core.String?, + routeType: json_['routeType'] as core.String?, + ); + + core.Map toJson() => { + if (name != null) 'name': name!, + if (routeId != null) 'routeId': routeId!, + if (routeLocation != null) 'routeLocation': routeLocation!, + if (routeProjectNumber != null) + 'routeProjectNumber': routeProjectNumber!, + if (routeType != null) 'routeType': routeType!, + }; +} + /// This resource represents a long-running operation that is the result of a /// network API call. class Operation { @@ -7085,12 +8505,10 @@ class ServiceLbPolicy { /// to the client, before spilling over to other zones. core.String? loadBalancingAlgorithm; - /// Name of the ServiceLbPolicy resource. + /// Identifier. /// - /// It matches pattern + /// Name of the ServiceLbPolicy resource. It matches pattern /// `projects/{project}/locations/{location}/serviceLbPolicies/{service_lb_policy_name}`. - /// - /// Required. core.String? name; /// The timestamp when this resource was last updated. @@ -7834,3 +9252,465 @@ class TrafficPortSelector { if (ports != null) 'ports': ports!, }; } + +/// `WasmPlugin` is a resource representing a service executing a +/// customer-provided Wasm module. +class WasmPlugin { + /// The timestamp when the resource was created. + /// + /// Output only. + core.String? createTime; + + /// A human-readable description of the resource. + /// + /// Optional. + core.String? description; + + /// Set of labels associated with the `WasmPlugin` resource. + /// + /// The format must comply with \[the following + /// requirements\](/compute/docs/labeling-resources#requirements). + /// + /// Optional. + core.Map? labels; + + /// Specifies the logging options for the activity performed by this plugin. + /// + /// If logging is enabled, plugin logs are exported to Cloud Logging. Note + /// that the settings relate to the logs generated by using logging statements + /// in your Wasm code. + /// + /// Optional. + WasmPluginLogConfig? logConfig; + + /// The ID of the `WasmPluginVersion` resource that is the currently serving + /// one. + /// + /// The version referred to must be a child of this `WasmPlugin` resource. + /// + /// Optional. + core.String? mainVersionId; + + /// Identifier. + /// + /// Name of the `WasmPlugin` resource in the following format: + /// `projects/{project}/locations/{location}/wasmPlugins/{wasm_plugin}`. + core.String? name; + + /// The timestamp when the resource was updated. + /// + /// Output only. + core.String? updateTime; + + /// List of all + /// [extensions](https://cloud.google.com/service-extensions/docs/overview) + /// that use this `WasmPlugin` resource. + /// + /// Output only. + core.List? usedBy; + + /// All versions of this `WasmPlugin` resource in the key-value format. + /// + /// The key is the resource ID, and the value is the `VersionDetails` object. + /// Lets you create or update a `WasmPlugin` resource and its versions in a + /// single request. When the `main_version_id` field is not empty, it must + /// point to one of the `VersionDetails` objects in the map. If provided in a + /// `PATCH` request, the new versions replace the previous set. Any version + /// omitted from the `versions` field is removed. Because the + /// `WasmPluginVersion` resource is immutable, if a `WasmPluginVersion` + /// resource with the same name already exists and differs, the request fails. + /// Note: In a `GET` request, this field is populated only if the field + /// `GetWasmPluginRequest.view` is set to `WASM_PLUGIN_VIEW_FULL`. + /// + /// Optional. + core.Map? versions; + + WasmPlugin({ + this.createTime, + this.description, + this.labels, + this.logConfig, + this.mainVersionId, + this.name, + this.updateTime, + this.usedBy, + this.versions, + }); + + WasmPlugin.fromJson(core.Map json_) + : this( + createTime: json_['createTime'] as core.String?, + description: json_['description'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + logConfig: json_.containsKey('logConfig') + ? WasmPluginLogConfig.fromJson( + json_['logConfig'] as core.Map) + : null, + mainVersionId: json_['mainVersionId'] as core.String?, + name: json_['name'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + usedBy: (json_['usedBy'] as core.List?) + ?.map((value) => WasmPluginUsedBy.fromJson( + value as core.Map)) + .toList(), + versions: + (json_['versions'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + WasmPluginVersionDetails.fromJson( + value as core.Map), + ), + ), + ); + + core.Map toJson() => { + if (createTime != null) 'createTime': createTime!, + if (description != null) 'description': description!, + if (labels != null) 'labels': labels!, + if (logConfig != null) 'logConfig': logConfig!, + if (mainVersionId != null) 'mainVersionId': mainVersionId!, + if (name != null) 'name': name!, + if (updateTime != null) 'updateTime': updateTime!, + if (usedBy != null) 'usedBy': usedBy!, + if (versions != null) 'versions': versions!, + }; +} + +/// Specifies the logging options for the activity performed by this plugin. +/// +/// If logging is enabled, plugin logs are exported to Cloud Logging. +class WasmPluginLogConfig { + /// Specifies whether to enable logging for activity by this plugin. + /// + /// Defaults to `false`. + /// + /// Optional. + core.bool? enable; + + /// Non-empty default. + /// + /// Specificies the lowest level of the plugin logs that are exported to Cloud + /// Logging. This setting relates to the logs generated by using logging + /// statements in your Wasm code. This field is can be set only if logging is + /// enabled for the plugin. If the field is not provided when logging is + /// enabled, it is set to `INFO` by default. + /// Possible string values are: + /// - "LOG_LEVEL_UNSPECIFIED" : Unspecified value. Defaults to + /// `LogLevel.INFO`. + /// - "TRACE" : Report logs with TRACE level and above. + /// - "DEBUG" : Report logs with DEBUG level and above. + /// - "INFO" : Report logs with INFO level and above. + /// - "WARN" : Report logs with WARN level and above. + /// - "ERROR" : Report logs with ERROR level and above. + /// - "CRITICAL" : Report logs with CRITICAL level only. + core.String? minLogLevel; + + /// Non-empty default. + /// + /// Configures the sampling rate of activity logs, where `1.0` means all + /// logged activity is reported and `0.0` means no activity is reported. A + /// floating point value between `0.0` and `1.0` indicates that a percentage + /// of log messages is stored. The default value when logging is enabled is + /// `1.0`. The value of the field must be between `0` and `1` (inclusive). + /// This field can be specified only if logging is enabled for this plugin. + core.double? sampleRate; + + WasmPluginLogConfig({ + this.enable, + this.minLogLevel, + this.sampleRate, + }); + + WasmPluginLogConfig.fromJson(core.Map json_) + : this( + enable: json_['enable'] as core.bool?, + minLogLevel: json_['minLogLevel'] as core.String?, + sampleRate: (json_['sampleRate'] as core.num?)?.toDouble(), + ); + + core.Map toJson() => { + if (enable != null) 'enable': enable!, + if (minLogLevel != null) 'minLogLevel': minLogLevel!, + if (sampleRate != null) 'sampleRate': sampleRate!, + }; +} + +/// Defines a resource that uses the `WasmPlugin` resource. +class WasmPluginUsedBy { + /// Full name of the resource https://google.aip.dev/122#full-resource-names, + /// for example + /// `//networkservices.googleapis.com/projects/{project}/locations/{location}/lbRouteExtensions/{extension}` + /// + /// Output only. + core.String? name; + + WasmPluginUsedBy({ + this.name, + }); + + WasmPluginUsedBy.fromJson(core.Map json_) + : this( + name: json_['name'] as core.String?, + ); + + core.Map toJson() => { + if (name != null) 'name': name!, + }; +} + +/// A single immutable version of a `WasmPlugin` resource. +/// +/// Defines the Wasm module used and optionally its runtime config. +class WasmPluginVersion { + /// The timestamp when the resource was created. + /// + /// Output only. + core.String? createTime; + + /// A human-readable description of the resource. + /// + /// Optional. + core.String? description; + + /// The resolved digest for the image specified in the `image` field. + /// + /// The digest is resolved during the creation of `WasmPluginVersion` + /// resource. This field holds the digest value, regardless of whether a tag + /// or digest was originally specified in the `image` field. + /// + /// Output only. + core.String? imageDigest; + + /// URI of the container image containing the plugin, stored in the Artifact + /// Registry. + /// + /// When a new `WasmPluginVersion` resource is created, the digest of the + /// container image is saved in the `image_digest` field. When downloading an + /// image, the digest value is used instead of an image tag. + /// + /// Optional. + core.String? imageUri; + + /// Set of labels associated with the `WasmPluginVersion` resource. + /// + /// Optional. + core.Map? labels; + + /// Identifier. + /// + /// Name of the `WasmPluginVersion` resource in the following format: + /// `projects/{project}/locations/{location}/wasmPlugins/{wasm_plugin}/ + /// versions/{wasm_plugin_version}`. + core.String? name; + + /// Configuration for the plugin. + /// + /// The configuration is provided to the plugin at runtime through the + /// `ON_CONFIGURE` callback. When a new `WasmPluginVersion` resource is + /// created, the digest of the contents is saved in the `plugin_config_digest` + /// field. + core.String? pluginConfigData; + core.List get pluginConfigDataAsBytes => + convert.base64.decode(pluginConfigData!); + + set pluginConfigDataAsBytes(core.List bytes_) { + pluginConfigData = + convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); + } + + /// This field holds the digest (usually checksum) value for the plugin + /// configuration. + /// + /// The value is calculated based on the contents of `plugin_config_data` or + /// the container image defined by the `plugin_config_uri` field. + /// + /// Output only. + core.String? pluginConfigDigest; + + /// URI of the plugin configuration stored in the Artifact Registry. + /// + /// The configuration is provided to the plugin at runtime through the + /// `ON_CONFIGURE` callback. The container image must contain only a single + /// file with the name `plugin.config`. When a new `WasmPluginVersion` + /// resource is created, the digest of the container image is saved in the + /// `plugin_config_digest` field. + core.String? pluginConfigUri; + + /// The timestamp when the resource was updated. + /// + /// Output only. + core.String? updateTime; + + WasmPluginVersion({ + this.createTime, + this.description, + this.imageDigest, + this.imageUri, + this.labels, + this.name, + this.pluginConfigData, + this.pluginConfigDigest, + this.pluginConfigUri, + this.updateTime, + }); + + WasmPluginVersion.fromJson(core.Map json_) + : this( + createTime: json_['createTime'] as core.String?, + description: json_['description'] as core.String?, + imageDigest: json_['imageDigest'] as core.String?, + imageUri: json_['imageUri'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + name: json_['name'] as core.String?, + pluginConfigData: json_['pluginConfigData'] as core.String?, + pluginConfigDigest: json_['pluginConfigDigest'] as core.String?, + pluginConfigUri: json_['pluginConfigUri'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + ); + + core.Map toJson() => { + if (createTime != null) 'createTime': createTime!, + if (description != null) 'description': description!, + if (imageDigest != null) 'imageDigest': imageDigest!, + if (imageUri != null) 'imageUri': imageUri!, + if (labels != null) 'labels': labels!, + if (name != null) 'name': name!, + if (pluginConfigData != null) 'pluginConfigData': pluginConfigData!, + if (pluginConfigDigest != null) + 'pluginConfigDigest': pluginConfigDigest!, + if (pluginConfigUri != null) 'pluginConfigUri': pluginConfigUri!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} + +/// Details of a `WasmPluginVersion` resource to be inlined in the `WasmPlugin` +/// resource. +class WasmPluginVersionDetails { + /// The timestamp when the resource was created. + /// + /// Output only. + core.String? createTime; + + /// A human-readable description of the resource. + /// + /// Optional. + core.String? description; + + /// The resolved digest for the image specified in `image`. + /// + /// The digest is resolved during the creation of a `WasmPluginVersion` + /// resource. This field holds the digest value regardless of whether a tag or + /// digest was originally specified in the `image` field. + /// + /// Output only. + core.String? imageDigest; + + /// URI of the container image containing the Wasm module, stored in the + /// Artifact Registry. + /// + /// The container image must contain only a single file with the name + /// `plugin.wasm`. When a new `WasmPluginVersion` resource is created, the URI + /// gets resolved to an image digest and saved in the `image_digest` field. + /// + /// Optional. + core.String? imageUri; + + /// Set of labels associated with the `WasmPluginVersion` resource. + /// + /// Optional. + core.Map? labels; + + /// Configuration for the plugin. + /// + /// The configuration is provided to the plugin at runtime through the + /// `ON_CONFIGURE` callback. When a new `WasmPluginVersion` version is + /// created, the digest of the contents is saved in the `plugin_config_digest` + /// field. + core.String? pluginConfigData; + core.List get pluginConfigDataAsBytes => + convert.base64.decode(pluginConfigData!); + + set pluginConfigDataAsBytes(core.List bytes_) { + pluginConfigData = + convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); + } + + /// This field holds the digest (usually checksum) value for the plugin + /// configuration. + /// + /// The value is calculated based on the contents of the `plugin_config_data` + /// field or the container image defined by the `plugin_config_uri` field. + /// + /// Output only. + core.String? pluginConfigDigest; + + /// URI of the plugin configuration stored in the Artifact Registry. + /// + /// The configuration is provided to the plugin at runtime through the + /// `ON_CONFIGURE` callback. The container image must contain only a single + /// file with the name `plugin.config`. When a new `WasmPluginVersion` + /// resource is created, the digest of the container image is saved in the + /// `plugin_config_digest` field. + core.String? pluginConfigUri; + + /// The timestamp when the resource was updated. + /// + /// Output only. + core.String? updateTime; + + WasmPluginVersionDetails({ + this.createTime, + this.description, + this.imageDigest, + this.imageUri, + this.labels, + this.pluginConfigData, + this.pluginConfigDigest, + this.pluginConfigUri, + this.updateTime, + }); + + WasmPluginVersionDetails.fromJson(core.Map json_) + : this( + createTime: json_['createTime'] as core.String?, + description: json_['description'] as core.String?, + imageDigest: json_['imageDigest'] as core.String?, + imageUri: json_['imageUri'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + pluginConfigData: json_['pluginConfigData'] as core.String?, + pluginConfigDigest: json_['pluginConfigDigest'] as core.String?, + pluginConfigUri: json_['pluginConfigUri'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + ); + + core.Map toJson() => { + if (createTime != null) 'createTime': createTime!, + if (description != null) 'description': description!, + if (imageDigest != null) 'imageDigest': imageDigest!, + if (imageUri != null) 'imageUri': imageUri!, + if (labels != null) 'labels': labels!, + if (pluginConfigData != null) 'pluginConfigData': pluginConfigData!, + if (pluginConfigDigest != null) + 'pluginConfigDigest': pluginConfigDigest!, + if (pluginConfigUri != null) 'pluginConfigUri': pluginConfigUri!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} diff --git a/generated/googleapis/lib/notebooks/v1.dart b/generated/googleapis/lib/notebooks/v1.dart index cbeac0772..22c78f40f 100644 --- a/generated/googleapis/lib/notebooks/v1.dart +++ b/generated/googleapis/lib/notebooks/v1.dart @@ -1695,8 +1695,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -3108,7 +3108,7 @@ class DiagnoseInstanceRequest { /// Required. DiagnosticConfig? diagnosticConfig; - /// Maxmium amount of time in minutes before the operation times out. + /// Maximum amount of time in minutes before the operation times out. /// /// Optional. core.int? timeoutMinutes; @@ -3140,7 +3140,7 @@ class DiagnoseRuntimeRequest { /// Required. DiagnosticConfig? diagnosticConfig; - /// Maxmium amount of time in minutes before the operation times out. + /// Maximum amount of time in minutes before the operation times out. /// /// Optional. core.int? timeoutMinutes; @@ -4030,7 +4030,7 @@ class Instance { core.Map? labels; /// The - /// [Compute Engine machine type](https://cloud.google.com/compute/docs/machine-types) + /// [Compute Engine machine type](https://cloud.google.com/compute/docs/machine-resource) /// of this instance. /// /// Required. @@ -5970,12 +5970,15 @@ class Schedule { /// - "STATE_UNSPECIFIED" : Unspecified state. /// - "ENABLED" : The job is executing normally. /// - "PAUSED" : The job is paused by the user. It will not execute. A user - /// can intentionally pause the job using PauseJobRequest. + /// can intentionally pause the job using + /// [Cloud Scheduler](https://cloud.google.com/scheduler/docs/creating#pause). /// - "DISABLED" : The job is disabled by the system due to error. The user /// cannot directly set a job to be disabled. /// - "UPDATE_FAILED" : The job state resulting from a failed - /// CloudScheduler.UpdateJob operation. To recover a job from this state, - /// retry CloudScheduler.UpdateJob until a successful response is received. + /// [CloudScheduler.UpdateJob](https://cloud.google.com/scheduler/docs/creating#edit) + /// operation. To recover a job from this state, retry + /// [CloudScheduler.UpdateJob](https://cloud.google.com/scheduler/docs/creating#edit) + /// until a successful response is received. /// - "INITIALIZING" : The schedule resource is being created. /// - "DELETING" : The schedule resource is being deleted. core.String? state; @@ -6191,7 +6194,7 @@ class SetInstanceLabelsRequest { /// Request for setting instance machine type. class SetInstanceMachineTypeRequest { /// The - /// [Compute Engine machine type](https://cloud.google.com/compute/docs/machine-types). + /// [Compute Engine machine type](https://cloud.google.com/compute/docs/machine-resource). /// /// Required. core.String? machineType; diff --git a/generated/googleapis/lib/notebooks/v2.dart b/generated/googleapis/lib/notebooks/v2.dart index f29fc5581..4fd7bbf74 100644 --- a/generated/googleapis/lib/notebooks/v2.dart +++ b/generated/googleapis/lib/notebooks/v2.dart @@ -1079,8 +1079,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -1676,7 +1676,7 @@ class DiagnoseInstanceRequest { /// Required. DiagnosticConfig? diagnosticConfig; - /// Maxmium amount of time in minutes before the operation times out. + /// Maximum amount of time in minutes before the operation times out. /// /// Optional. core.int? timeoutMinutes; @@ -2130,6 +2130,12 @@ class Instance { /// Optional. core.bool? disableProxyAccess; + /// Flag that specifies that a notebook can be accessed with third party + /// identity provider. + /// + /// Optional. + core.bool? enableThirdPartyIdentity; + /// Compute Engine setup for the notebook. /// /// Uses notebook-defined fields. @@ -2243,6 +2249,7 @@ class Instance { this.createTime, this.creator, this.disableProxyAccess, + this.enableThirdPartyIdentity, this.gceSetup, this.healthInfo, this.healthState, @@ -2264,6 +2271,8 @@ class Instance { createTime: json_['createTime'] as core.String?, creator: json_['creator'] as core.String?, disableProxyAccess: json_['disableProxyAccess'] as core.bool?, + enableThirdPartyIdentity: + json_['enableThirdPartyIdentity'] as core.bool?, gceSetup: json_.containsKey('gceSetup') ? GceSetup.fromJson( json_['gceSetup'] as core.Map) @@ -2306,6 +2315,8 @@ class Instance { if (creator != null) 'creator': creator!, if (disableProxyAccess != null) 'disableProxyAccess': disableProxyAccess!, + if (enableThirdPartyIdentity != null) + 'enableThirdPartyIdentity': enableThirdPartyIdentity!, if (gceSetup != null) 'gceSetup': gceSetup!, if (healthInfo != null) 'healthInfo': healthInfo!, if (healthState != null) 'healthState': healthState!, diff --git a/generated/googleapis/lib/ondemandscanning/v1.dart b/generated/googleapis/lib/ondemandscanning/v1.dart index 7a668559f..8211f8572 100644 --- a/generated/googleapis/lib/ondemandscanning/v1.dart +++ b/generated/googleapis/lib/ondemandscanning/v1.dart @@ -93,8 +93,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// diff --git a/generated/googleapis/lib/oracledatabase/v1.dart b/generated/googleapis/lib/oracledatabase/v1.dart index c6efe3ac6..624e9704e 100644 --- a/generated/googleapis/lib/oracledatabase/v1.dart +++ b/generated/googleapis/lib/oracledatabase/v1.dart @@ -1408,8 +1408,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -3801,7 +3801,7 @@ class CloudVmClusterProperties { /// Operating system version of the image. /// - /// Output only. + /// Optional. core.String? systemVersion; /// Time zone of VM Cluster to set. @@ -4542,6 +4542,7 @@ class Entitlement { /// - "ACCOUNT_NOT_LINKED" : Account not linked. /// - "ACCOUNT_NOT_ACTIVE" : Account is linked but not active. /// - "ACTIVE" : Entitlement and Account are active. + /// - "ACCOUNT_SUSPENDED" : Account is suspended. core.String? state; Entitlement({ diff --git a/generated/googleapis/lib/orgpolicy/v2.dart b/generated/googleapis/lib/orgpolicy/v2.dart index fe9b64c31..bb66a4ddd 100644 --- a/generated/googleapis/lib/orgpolicy/v2.dart +++ b/generated/googleapis/lib/orgpolicy/v2.dart @@ -1500,6 +1500,9 @@ class GoogleCloudOrgpolicyV2Constraint { /// Shows if dry run is supported for this constraint or not. core.bool? supportsDryRun; + /// Shows if simulation is supported for this constraint or not. + core.bool? supportsSimulation; + GoogleCloudOrgpolicyV2Constraint({ this.booleanConstraint, this.constraintDefault, @@ -1508,6 +1511,7 @@ class GoogleCloudOrgpolicyV2Constraint { this.listConstraint, this.name, this.supportsDryRun, + this.supportsSimulation, }); GoogleCloudOrgpolicyV2Constraint.fromJson(core.Map json_) @@ -1527,6 +1531,7 @@ class GoogleCloudOrgpolicyV2Constraint { : null, name: json_['name'] as core.String?, supportsDryRun: json_['supportsDryRun'] as core.bool?, + supportsSimulation: json_['supportsSimulation'] as core.bool?, ); core.Map toJson() => { @@ -1537,6 +1542,8 @@ class GoogleCloudOrgpolicyV2Constraint { if (listConstraint != null) 'listConstraint': listConstraint!, if (name != null) 'name': name!, if (supportsDryRun != null) 'supportsDryRun': supportsDryRun!, + if (supportsSimulation != null) + 'supportsSimulation': supportsSimulation!, }; } @@ -2159,6 +2166,19 @@ class GoogleCloudOrgpolicyV2PolicySpecPolicyRule { /// only in policies for boolean constraints. core.bool? enforce; + /// Required for GMCs if parameters defined in constraints. + /// + /// Pass parameter values when policy enforcement is enabled. Ensure that + /// parameter value types match those defined in the constraint definition. + /// For example: { "allowedLocations" : \["us-east1", "us-west1"\], "allowAll" + /// : true } + /// + /// Optional. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Map? parameters; + /// List of values to be used for this policy rule. /// /// This field can be set only in policies for list constraints. @@ -2169,6 +2189,7 @@ class GoogleCloudOrgpolicyV2PolicySpecPolicyRule { this.condition, this.denyAll, this.enforce, + this.parameters, this.values, }); @@ -2181,6 +2202,9 @@ class GoogleCloudOrgpolicyV2PolicySpecPolicyRule { : null, denyAll: json_['denyAll'] as core.bool?, enforce: json_['enforce'] as core.bool?, + parameters: json_.containsKey('parameters') + ? json_['parameters'] as core.Map + : null, values: json_.containsKey('values') ? GoogleCloudOrgpolicyV2PolicySpecPolicyRuleStringValues.fromJson( json_['values'] as core.Map) @@ -2192,6 +2216,7 @@ class GoogleCloudOrgpolicyV2PolicySpecPolicyRule { if (condition != null) 'condition': condition!, if (denyAll != null) 'denyAll': denyAll!, if (enforce != null) 'enforce': enforce!, + if (parameters != null) 'parameters': parameters!, if (values != null) 'values': values!, }; } diff --git a/generated/googleapis/lib/osconfig/v1.dart b/generated/googleapis/lib/osconfig/v1.dart index 4ff0e2ec0..5d5bdda1c 100644 --- a/generated/googleapis/lib/osconfig/v1.dart +++ b/generated/googleapis/lib/osconfig/v1.dart @@ -923,8 +923,8 @@ class ProjectsLocationsOsPolicyAssignmentsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -3030,7 +3030,7 @@ class OSPolicyAssignmentInstanceFilter { } /// VM inventory details. -typedef OSPolicyAssignmentInstanceFilterInventory = $Shared12; +typedef OSPolicyAssignmentInstanceFilterInventory = $Shared13; /// Message representing label set. /// @@ -3395,7 +3395,7 @@ class OSPolicyAssignmentRollout { } /// Filtering criteria to select VMs based on inventory details. -typedef OSPolicyInventoryFilter = $Shared12; +typedef OSPolicyInventoryFilter = $Shared13; /// An OS policy resource is used to define the desired state configuration and /// provides a specific functionality like installing/removing packages, diff --git a/generated/googleapis/lib/parallelstore/v1.dart b/generated/googleapis/lib/parallelstore/v1.dart new file mode 100644 index 000000000..eb03c99cd --- /dev/null +++ b/generated/googleapis/lib/parallelstore/v1.dart @@ -0,0 +1,1236 @@ +// This is a generated file (see the discoveryapis_generator project). + +// ignore_for_file: camel_case_types +// ignore_for_file: comment_references +// ignore_for_file: deprecated_member_use_from_same_package +// ignore_for_file: doc_directive_unknown +// ignore_for_file: lines_longer_than_80_chars +// ignore_for_file: non_constant_identifier_names +// ignore_for_file: prefer_interpolation_to_compose_strings +// ignore_for_file: unintended_html_in_doc_comment +// ignore_for_file: unnecessary_brace_in_string_interps +// ignore_for_file: unnecessary_lambdas +// ignore_for_file: unnecessary_string_interpolations + +/// Parallelstore API - v1 +/// +/// For more information, see +/// +/// Create an instance of [ParallelstoreApi] to access these resources: +/// +/// - [ProjectsResource] +/// - [ProjectsLocationsResource] +/// - [ProjectsLocationsInstancesResource] +/// - [ProjectsLocationsOperationsResource] +library; + +import 'dart:async' as async; +import 'dart:convert' as convert; +import 'dart:core' as core; + +import 'package:_discoveryapis_commons/_discoveryapis_commons.dart' as commons; +import 'package:http/http.dart' as http; + +import '../shared.dart'; +import '../src/user_agent.dart'; + +export 'package:_discoveryapis_commons/_discoveryapis_commons.dart' + show ApiRequestError, DetailedApiRequestError; + +class ParallelstoreApi { + /// See, edit, configure, and delete your Google Cloud data and see the email + /// address for your Google Account. + static const cloudPlatformScope = + 'https://www.googleapis.com/auth/cloud-platform'; + + final commons.ApiRequester _requester; + + ProjectsResource get projects => ProjectsResource(_requester); + + ParallelstoreApi(http.Client client, + {core.String rootUrl = 'https://parallelstore.googleapis.com/', + core.String servicePath = ''}) + : _requester = + commons.ApiRequester(client, rootUrl, servicePath, requestHeaders); +} + +class ProjectsResource { + final commons.ApiRequester _requester; + + ProjectsLocationsResource get locations => + ProjectsLocationsResource(_requester); + + ProjectsResource(commons.ApiRequester client) : _requester = client; +} + +class ProjectsLocationsResource { + final commons.ApiRequester _requester; + + ProjectsLocationsInstancesResource get instances => + ProjectsLocationsInstancesResource(_requester); + ProjectsLocationsOperationsResource get operations => + ProjectsLocationsOperationsResource(_requester); + + ProjectsLocationsResource(commons.ApiRequester client) : _requester = client; + + /// Gets information about a location. + /// + /// Request parameters: + /// + /// [name] - Resource name for the location. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Location]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return Location.fromJson(response_ as core.Map); + } + + /// Lists information about the supported locations for this service. + /// + /// Request parameters: + /// + /// [name] - The resource that owns the locations collection, if applicable. + /// Value must have pattern `^projects/\[^/\]+$`. + /// + /// [filter] - A filter to narrow down results to a preferred subset. The + /// filtering language accepts strings like `"displayName=tokyo"`, and is + /// documented in more detail in \[AIP-160\](https://google.aip.dev/160). + /// + /// [pageSize] - The maximum number of results to return. If not set, the + /// service selects a default. + /// + /// [pageToken] - A page token received from the `next_page_token` field in + /// the response. Send that page token to receive the subsequent page. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListLocationsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String name, { + core.String? filter, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + '/locations'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListLocationsResponse.fromJson( + response_ as core.Map); + } +} + +class ProjectsLocationsInstancesResource { + final commons.ApiRequester _requester; + + ProjectsLocationsInstancesResource(commons.ApiRequester client) + : _requester = client; + + /// Creates a Parallelstore instance in a given project and location. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The instance's project and location, in the format + /// `projects/{project}/locations/{location}`. Locations map to Google Cloud + /// zones; for example, `us-west1-b`. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [instanceId] - Required. The name of the Parallelstore instance. * Must + /// contain only lowercase letters, numbers, and hyphens. * Must start with a + /// letter. * Must be between 1-63 characters. * Must end with a number or a + /// letter. * Must be unique within the customer project / location + /// + /// [requestId] - Optional. An optional request ID to identify requests. + /// Specify a unique request ID so that if you must retry your request, the + /// server will know to ignore the request if it has already been completed. + /// The server will guarantee that for at least 60 minutes since the first + /// request. For example, consider a situation where you make an initial + /// request and t he request times out. If you make the request again with the + /// same request ID, the server can check if original operation with the same + /// request ID was received, and if so, will ignore the second request. This + /// prevents clients from accidentally creating duplicate commitments. The + /// request ID must be a valid UUID with the exception that zero UUID is not + /// supported (00000000-0000-0000-0000-000000000000). + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future create( + Instance request, + core.String parent, { + core.String? instanceId, + core.String? requestId, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (instanceId != null) 'instanceId': [instanceId], + if (requestId != null) 'requestId': [requestId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/instances'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Deletes a single instance. + /// + /// Request parameters: + /// + /// [name] - Required. Name of the resource + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/instances/\[^/\]+$`. + /// + /// [requestId] - Optional. An optional request ID to identify requests. + /// Specify a unique request ID so that if you must retry your request, the + /// server will know to ignore the request if it has already been completed. + /// The server will guarantee that for at least 60 minutes after the first + /// request. For example, consider a situation where you make an initial + /// request and t he request times out. If you make the request again with the + /// same request ID, the server can check if original operation with the same + /// request ID was received, and if so, will ignore the second request. This + /// prevents clients from accidentally creating duplicate commitments. The + /// request ID must be a valid UUID with the exception that zero UUID is not + /// supported (00000000-0000-0000-0000-000000000000). + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String name, { + core.String? requestId, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (requestId != null) 'requestId': [requestId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Copies data from Parallelstore to Cloud Storage. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. Name of the resource. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/instances/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future exportData( + ExportDataRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':exportData'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Gets details of a single instance. + /// + /// Request parameters: + /// + /// [name] - Required. The instance resource name, in the format + /// `projects/{project_id}/locations/{location}/instances/{instance_id}`. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/instances/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Instance]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return Instance.fromJson(response_ as core.Map); + } + + /// Copies data from Cloud Storage to Parallelstore. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. Name of the resource. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/instances/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future importData( + ImportDataRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':importData'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Lists all instances in a given project and location. + /// + /// Request parameters: + /// + /// [parent] - Required. The project and location for which to retrieve + /// instance information, in the format + /// `projects/{project_id}/locations/{location}`. To retrieve instance + /// information for all locations, use "-" as the value of `{location}`. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [filter] - Optional. Filtering results. + /// + /// [orderBy] - Optional. Hint for how to order the results. + /// + /// [pageSize] - Optional. Requested page size. Server may return fewer items + /// than requested. If unspecified, the server will pick an appropriate + /// default. + /// + /// [pageToken] - Optional. A token identifying a page of results the server + /// should return. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListInstancesResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.String? filter, + core.String? orderBy, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (orderBy != null) 'orderBy': [orderBy], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/instances'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListInstancesResponse.fromJson( + response_ as core.Map); + } + + /// Updates the parameters of a single instance. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Identifier. The resource name of the instance, in the format + /// `projects/{project}/locations/{location}/instances/{instance_id}`. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/instances/\[^/\]+$`. + /// + /// [requestId] - Optional. An optional request ID to identify requests. + /// Specify a unique request ID so that if you must retry your request, the + /// server will know to ignore the request if it has already been completed. + /// The server will guarantee that for at least 60 minutes since the first + /// request. For example, consider a situation where you make an initial + /// request and t he request times out. If you make the request again with the + /// same request ID, the server can check if original operation with the same + /// request ID was received, and if so, will ignore the second request. This + /// prevents clients from accidentally creating duplicate commitments. The + /// request ID must be a valid UUID with the exception that zero UUID is not + /// supported (00000000-0000-0000-0000-000000000000). + /// + /// [updateMask] - Required. Mask of fields to update. Field mask is used to + /// specify the fields to be overwritten in the Instance resource by the + /// update. At least one path must be supplied in this field. The fields + /// specified in the update_mask are relative to the resource, not the full + /// request. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future patch( + Instance request, + core.String name, { + core.String? requestId, + core.String? updateMask, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (requestId != null) 'requestId': [requestId], + if (updateMask != null) 'updateMask': [updateMask], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'PATCH', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } +} + +class ProjectsLocationsOperationsResource { + final commons.ApiRequester _requester; + + ProjectsLocationsOperationsResource(commons.ApiRequester client) + : _requester = client; + + /// Starts asynchronous cancellation on a long-running operation. + /// + /// The server makes a best effort to cancel the operation, but success is not + /// guaranteed. If the server doesn't support this method, it returns + /// `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation + /// or other methods to check whether the cancellation succeeded or whether + /// the operation completed despite cancellation. On successful cancellation, + /// the operation is not deleted; instead, it becomes an operation with an + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - The name of the operation resource to be cancelled. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/operations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleProtobufEmpty]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future cancel( + CancelOperationRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':cancel'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleProtobufEmpty.fromJson( + response_ as core.Map); + } + + /// Deletes a long-running operation. + /// + /// This method indicates that the client is no longer interested in the + /// operation result. It does not cancel the operation. If the server doesn't + /// support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. + /// + /// Request parameters: + /// + /// [name] - The name of the operation resource to be deleted. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/operations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [GoogleProtobufEmpty]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return GoogleProtobufEmpty.fromJson( + response_ as core.Map); + } + + /// Gets the latest state of a long-running operation. + /// + /// Clients can use this method to poll the operation result at intervals as + /// recommended by the API service. + /// + /// Request parameters: + /// + /// [name] - The name of the operation resource. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/operations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Lists operations that match the specified filter in the request. + /// + /// If the server doesn't support this method, it returns `UNIMPLEMENTED`. + /// + /// Request parameters: + /// + /// [name] - The name of the operation's parent resource. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [filter] - The standard list filter. + /// + /// [pageSize] - The standard list page size. + /// + /// [pageToken] - The standard list page token. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListOperationsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String name, { + core.String? filter, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + '/operations'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListOperationsResponse.fromJson( + response_ as core.Map); + } +} + +/// The request message for Operations.CancelOperation. +typedef CancelOperationRequest = $Empty; + +/// Cloud Storage as the destination of a data transfer. +typedef DestinationGcsBucket = $GcsBucket; + +/// Parallelstore as the destination of a data transfer. +typedef DestinationParallelstore = $Parallelstore; + +/// Export data from Parallelstore to Cloud Storage. +class ExportDataRequest { + /// Cloud Storage destination. + DestinationGcsBucket? destinationGcsBucket; + + /// An optional request ID to identify requests. + /// + /// Specify a unique request ID so that if you must retry your request, the + /// server will know to ignore the request if it has already been completed. + /// The server will guarantee that for at least 60 minutes since the first + /// request. For example, consider a situation where you make an initial + /// request and t he request times out. If you make the request again with the + /// same request ID, the server can check if original operation with the same + /// request ID was received, and if so, will ignore the second request. This + /// prevents clients from accidentally creating duplicate commitments. The + /// request ID must be a valid UUID with the exception that zero UUID is not + /// supported (00000000-0000-0000-0000-000000000000). + /// + /// Optional. + core.String? requestId; + + /// User-specified Service Account (SA) credentials to be used when performing + /// the transfer. + /// + /// Use one of the following formats: * `{EMAIL_ADDRESS_OR_UNIQUE_ID}` * + /// `projects/{PROJECT_ID_OR_NUMBER}/serviceAccounts/{EMAIL_ADDRESS_OR_UNIQUE_ID}` + /// * `projects/-/serviceAccounts/{EMAIL_ADDRESS_OR_UNIQUE_ID}` If + /// unspecified, the Parallelstore service agent is used: + /// `service-@gcp-sa-parallelstore.iam.gserviceaccount.com` + /// + /// Optional. + core.String? serviceAccount; + + /// Parallelstore source. + SourceParallelstore? sourceParallelstore; + + ExportDataRequest({ + this.destinationGcsBucket, + this.requestId, + this.serviceAccount, + this.sourceParallelstore, + }); + + ExportDataRequest.fromJson(core.Map json_) + : this( + destinationGcsBucket: json_.containsKey('destinationGcsBucket') + ? DestinationGcsBucket.fromJson(json_['destinationGcsBucket'] + as core.Map) + : null, + requestId: json_['requestId'] as core.String?, + serviceAccount: json_['serviceAccount'] as core.String?, + sourceParallelstore: json_.containsKey('sourceParallelstore') + ? SourceParallelstore.fromJson(json_['sourceParallelstore'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (destinationGcsBucket != null) + 'destinationGcsBucket': destinationGcsBucket!, + if (requestId != null) 'requestId': requestId!, + if (serviceAccount != null) 'serviceAccount': serviceAccount!, + if (sourceParallelstore != null) + 'sourceParallelstore': sourceParallelstore!, + }; +} + +/// A generic empty message that you can re-use to avoid defining duplicated +/// empty messages in your APIs. +/// +/// A typical example is to use it as the request or the response type of an API +/// method. For instance: service Foo { rpc Bar(google.protobuf.Empty) returns +/// (google.protobuf.Empty); } +typedef GoogleProtobufEmpty = $Empty; + +/// Import data from Cloud Storage into a Parallelstore instance. +class ImportDataRequest { + /// Parallelstore destination. + DestinationParallelstore? destinationParallelstore; + + /// An optional request ID to identify requests. + /// + /// Specify a unique request ID so that if you must retry your request, the + /// server will know to ignore the request if it has already been completed. + /// The server will guarantee that for at least 60 minutes since the first + /// request. For example, consider a situation where you make an initial + /// request and t he request times out. If you make the request again with the + /// same request ID, the server can check if original operation with the same + /// request ID was received, and if so, will ignore the second request. This + /// prevents clients from accidentally creating duplicate commitments. The + /// request ID must be a valid UUID with the exception that zero UUID is not + /// supported (00000000-0000-0000-0000-000000000000). + /// + /// Optional. + core.String? requestId; + + /// User-specified service account credentials to be used when performing the + /// transfer. + /// + /// Use one of the following formats: * `{EMAIL_ADDRESS_OR_UNIQUE_ID}` * + /// `projects/{PROJECT_ID_OR_NUMBER}/serviceAccounts/{EMAIL_ADDRESS_OR_UNIQUE_ID}` + /// * `projects/-/serviceAccounts/{EMAIL_ADDRESS_OR_UNIQUE_ID}` If + /// unspecified, the Parallelstore service agent is used: + /// `service-@gcp-sa-parallelstore.iam.gserviceaccount.com` + /// + /// Optional. + core.String? serviceAccount; + + /// The Cloud Storage source bucket and, optionally, path inside the bucket. + SourceGcsBucket? sourceGcsBucket; + + ImportDataRequest({ + this.destinationParallelstore, + this.requestId, + this.serviceAccount, + this.sourceGcsBucket, + }); + + ImportDataRequest.fromJson(core.Map json_) + : this( + destinationParallelstore: + json_.containsKey('destinationParallelstore') + ? DestinationParallelstore.fromJson( + json_['destinationParallelstore'] + as core.Map) + : null, + requestId: json_['requestId'] as core.String?, + serviceAccount: json_['serviceAccount'] as core.String?, + sourceGcsBucket: json_.containsKey('sourceGcsBucket') + ? SourceGcsBucket.fromJson(json_['sourceGcsBucket'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (destinationParallelstore != null) + 'destinationParallelstore': destinationParallelstore!, + if (requestId != null) 'requestId': requestId!, + if (serviceAccount != null) 'serviceAccount': serviceAccount!, + if (sourceGcsBucket != null) 'sourceGcsBucket': sourceGcsBucket!, + }; +} + +/// A Parallelstore instance. +class Instance { + /// A list of IPv4 addresses used for client side configuration. + /// + /// Output only. + core.List? accessPoints; + + /// The instance's storage capacity in Gibibytes (GiB). + /// + /// Allowed values are between 12000 and 100000, in multiples of 4000; e.g., + /// 12000, 16000, 20000, ... + /// + /// Required. Immutable. + core.String? capacityGib; + + /// The time when the instance was created. + /// + /// Output only. + core.String? createTime; + + /// The version of DAOS software running in the instance. + /// + /// Output only. + core.String? daosVersion; + + /// The description of the instance. + /// + /// 2048 characters or less. + /// + /// Optional. + core.String? description; + + /// Stripe level for directories. + /// + /// Allowed values are: * `DIRECTORY_STRIPE_LEVEL_MIN`: recommended when + /// directories contain a small number of files. * + /// `DIRECTORY_STRIPE_LEVEL_BALANCED`: balances performance for workloads + /// involving a mix of small and large directories. * + /// `DIRECTORY_STRIPE_LEVEL_MAX`: recommended for directories with a large + /// number of files. + /// + /// Optional. + /// Possible string values are: + /// - "DIRECTORY_STRIPE_LEVEL_UNSPECIFIED" : If not set, DirectoryStripeLevel + /// will default to DIRECTORY_STRIPE_LEVEL_MAX + /// - "DIRECTORY_STRIPE_LEVEL_MIN" : Minimum directory striping + /// - "DIRECTORY_STRIPE_LEVEL_BALANCED" : Medium directory striping + /// - "DIRECTORY_STRIPE_LEVEL_MAX" : Maximum directory striping + core.String? directoryStripeLevel; + + /// The ID of the IP address range being used by the instance's VPC network. + /// + /// This field is populated by the service and contains the value currently + /// used by the service. + /// + /// Output only. Immutable. + core.String? effectiveReservedIpRange; + + /// Stripe level for files. + /// + /// Allowed values are: * `FILE_STRIPE_LEVEL_MIN`: offers the best performance + /// for small size files. * `FILE_STRIPE_LEVEL_BALANCED`: balances performance + /// for workloads involving a mix of small and large files. * + /// `FILE_STRIPE_LEVEL_MAX`: higher throughput performance for larger files. + /// + /// Optional. + /// Possible string values are: + /// - "FILE_STRIPE_LEVEL_UNSPECIFIED" : If not set, FileStripeLevel will + /// default to FILE_STRIPE_LEVEL_BALANCED + /// - "FILE_STRIPE_LEVEL_MIN" : Minimum file striping + /// - "FILE_STRIPE_LEVEL_BALANCED" : Medium file striping + /// - "FILE_STRIPE_LEVEL_MAX" : Maximum file striping + core.String? fileStripeLevel; + + /// Cloud Labels are a flexible and lightweight mechanism for organizing cloud + /// resources into groups that reflect a customer's organizational needs and + /// deployment strategies. + /// + /// See https://cloud.google.com/resource-manager/docs/labels-overview for + /// details. + /// + /// Optional. + core.Map? labels; + + /// Identifier. + /// + /// The resource name of the instance, in the format + /// `projects/{project}/locations/{location}/instances/{instance_id}`. + core.String? name; + + /// The name of the Compute Engine + /// [VPC network](https://cloud.google.com/vpc/docs/vpc) to which the instance + /// is connected. + /// + /// Optional. Immutable. + core.String? network; + + /// The ID of the IP address range being used by the instance's VPC network. + /// + /// See + /// [Configure a VPC network](https://cloud.google.com/parallelstore/docs/vpc#create_and_configure_the_vpc). + /// If no ID is provided, all ranges are considered. + /// + /// Optional. Immutable. + core.String? reservedIpRange; + + /// The instance state. + /// + /// Output only. + /// Possible string values are: + /// - "STATE_UNSPECIFIED" : Not set. + /// - "CREATING" : The instance is being created. + /// - "ACTIVE" : The instance is available for use. + /// - "DELETING" : The instance is being deleted. + /// - "FAILED" : The instance is not usable. + /// - "UPGRADING" : The instance is being upgraded. + core.String? state; + + /// The time when the instance was updated. + /// + /// Output only. + core.String? updateTime; + + Instance({ + this.accessPoints, + this.capacityGib, + this.createTime, + this.daosVersion, + this.description, + this.directoryStripeLevel, + this.effectiveReservedIpRange, + this.fileStripeLevel, + this.labels, + this.name, + this.network, + this.reservedIpRange, + this.state, + this.updateTime, + }); + + Instance.fromJson(core.Map json_) + : this( + accessPoints: (json_['accessPoints'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + capacityGib: json_['capacityGib'] as core.String?, + createTime: json_['createTime'] as core.String?, + daosVersion: json_['daosVersion'] as core.String?, + description: json_['description'] as core.String?, + directoryStripeLevel: json_['directoryStripeLevel'] as core.String?, + effectiveReservedIpRange: + json_['effectiveReservedIpRange'] as core.String?, + fileStripeLevel: json_['fileStripeLevel'] as core.String?, + labels: + (json_['labels'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + name: json_['name'] as core.String?, + network: json_['network'] as core.String?, + reservedIpRange: json_['reservedIpRange'] as core.String?, + state: json_['state'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + ); + + core.Map toJson() => { + if (accessPoints != null) 'accessPoints': accessPoints!, + if (capacityGib != null) 'capacityGib': capacityGib!, + if (createTime != null) 'createTime': createTime!, + if (daosVersion != null) 'daosVersion': daosVersion!, + if (description != null) 'description': description!, + if (directoryStripeLevel != null) + 'directoryStripeLevel': directoryStripeLevel!, + if (effectiveReservedIpRange != null) + 'effectiveReservedIpRange': effectiveReservedIpRange!, + if (fileStripeLevel != null) 'fileStripeLevel': fileStripeLevel!, + if (labels != null) 'labels': labels!, + if (name != null) 'name': name!, + if (network != null) 'network': network!, + if (reservedIpRange != null) 'reservedIpRange': reservedIpRange!, + if (state != null) 'state': state!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} + +/// Response from ListInstances. +class ListInstancesResponse { + /// The list of Parallelstore instances. + core.List? instances; + + /// A token identifying a page of results the server should return. + core.String? nextPageToken; + + /// Locations that could not be reached. + core.List? unreachable; + + ListInstancesResponse({ + this.instances, + this.nextPageToken, + this.unreachable, + }); + + ListInstancesResponse.fromJson(core.Map json_) + : this( + instances: (json_['instances'] as core.List?) + ?.map((value) => Instance.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + unreachable: (json_['unreachable'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (instances != null) 'instances': instances!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (unreachable != null) 'unreachable': unreachable!, + }; +} + +/// The response message for Locations.ListLocations. +class ListLocationsResponse { + /// A list of locations that matches the specified filter in the request. + core.List? locations; + + /// The standard List next-page token. + core.String? nextPageToken; + + ListLocationsResponse({ + this.locations, + this.nextPageToken, + }); + + ListLocationsResponse.fromJson(core.Map json_) + : this( + locations: (json_['locations'] as core.List?) + ?.map((value) => Location.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + ); + + core.Map toJson() => { + if (locations != null) 'locations': locations!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + }; +} + +/// The response message for Operations.ListOperations. +class ListOperationsResponse { + /// The standard List next-page token. + core.String? nextPageToken; + + /// A list of operations that matches the specified filter in the request. + core.List? operations; + + ListOperationsResponse({ + this.nextPageToken, + this.operations, + }); + + ListOperationsResponse.fromJson(core.Map json_) + : this( + nextPageToken: json_['nextPageToken'] as core.String?, + operations: (json_['operations'] as core.List?) + ?.map((value) => Operation.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (operations != null) 'operations': operations!, + }; +} + +/// A resource that represents a Google Cloud location. +typedef Location = $Location00; + +/// This resource represents a long-running operation that is the result of a +/// network API call. +class Operation { + /// If the value is `false`, it means the operation is still in progress. + /// + /// If `true`, the operation is completed, and either `error` or `response` is + /// available. + core.bool? done; + + /// The error result of the operation in case of failure or cancellation. + Status? error; + + /// Service-specific metadata associated with the operation. + /// + /// It typically contains progress information and common metadata such as + /// create time. Some services might not provide such metadata. Any method + /// that returns a long-running operation should document the metadata type, + /// if any. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Map? metadata; + + /// The server-assigned name, which is only unique within the same service + /// that originally returns it. + /// + /// If you use the default HTTP mapping, the `name` should be a resource name + /// ending with `operations/{unique_id}`. + core.String? name; + + /// The normal, successful response of the operation. + /// + /// If the original method returns no data on success, such as `Delete`, the + /// response is `google.protobuf.Empty`. If the original method is standard + /// `Get`/`Create`/`Update`, the response should be the resource. For other + /// methods, the response should have the type `XxxResponse`, where `Xxx` is + /// the original method name. For example, if the original method name is + /// `TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Map? response; + + Operation({ + this.done, + this.error, + this.metadata, + this.name, + this.response, + }); + + Operation.fromJson(core.Map json_) + : this( + done: json_['done'] as core.bool?, + error: json_.containsKey('error') + ? Status.fromJson( + json_['error'] as core.Map) + : null, + metadata: json_.containsKey('metadata') + ? json_['metadata'] as core.Map + : null, + name: json_['name'] as core.String?, + response: json_.containsKey('response') + ? json_['response'] as core.Map + : null, + ); + + core.Map toJson() => { + if (done != null) 'done': done!, + if (error != null) 'error': error!, + if (metadata != null) 'metadata': metadata!, + if (name != null) 'name': name!, + if (response != null) 'response': response!, + }; +} + +/// Cloud Storage as the source of a data transfer. +typedef SourceGcsBucket = $GcsBucket; + +/// Parallelstore as the source of a data transfer. +typedef SourceParallelstore = $Parallelstore; + +/// The `Status` type defines a logical error model that is suitable for +/// different programming environments, including REST APIs and RPC APIs. +/// +/// It is used by [gRPC](https://github.com/grpc). Each `Status` message +/// contains three pieces of data: error code, error message, and error details. +/// You can find out more about this error model and how to work with it in the +/// [API Design Guide](https://cloud.google.com/apis/design/errors). +typedef Status = $Status00; diff --git a/generated/googleapis/lib/paymentsresellersubscription/v1.dart b/generated/googleapis/lib/paymentsresellersubscription/v1.dart index 991c41415..9d1a0ef0a 100644 --- a/generated/googleapis/lib/paymentsresellersubscription/v1.dart +++ b/generated/googleapis/lib/paymentsresellersubscription/v1.dart @@ -24,6 +24,7 @@ /// - [PartnersProductsResource] /// - [PartnersPromotionsResource] /// - [PartnersSubscriptionsResource] +/// - [PartnersUserSessionsResource] library; import 'dart:async' as async; @@ -63,6 +64,8 @@ class PartnersResource { PartnersPromotionsResource(_requester); PartnersSubscriptionsResource get subscriptions => PartnersSubscriptionsResource(_requester); + PartnersUserSessionsResource get userSessions => + PartnersUserSessionsResource(_requester); PartnersResource(commons.ApiRequester client) : _requester = client; } @@ -625,6 +628,66 @@ class PartnersSubscriptionsResource { } } +class PartnersUserSessionsResource { + final commons.ApiRequester _requester; + + PartnersUserSessionsResource(commons.ApiRequester client) + : _requester = client; + + /// This API replaces user authorized OAuth consent based APIs (Create, + /// Entitle). + /// + /// Generates a short-lived token for a user session based on the user intent. + /// You can use the session token to redirect the user to Google to finish the + /// signup flow. You can re-generate new session token repeatedly for the same + /// request if necessary, regardless of the previous tokens being expired or + /// not. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent, the partner that can resell. Format: + /// partners/{partner} + /// Value must have pattern `^partners/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a + /// [GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future< + GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse> + generate( + GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest request, + core.String parent, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = + 'v1/' + core.Uri.encodeFull('$parent') + '/userSessions:generate'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse + .fromJson(response_ as core.Map); + } +} + /// Describes the amount unit including the currency code. class GoogleCloudPaymentsResellerSubscriptionV1Amount { /// Amount in micros (1_000_000 micros = 1 currency unit) @@ -735,6 +798,51 @@ class GoogleCloudPaymentsResellerSubscriptionV1CancelSubscriptionResponse { }; } +/// Intent message for creating a Subscription resource. +class GoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent { + /// The parent resource name, which is the identifier of the partner. + /// + /// Required. + core.String? parent; + + /// The Subscription to be created. + /// + /// Required. + GoogleCloudPaymentsResellerSubscriptionV1Subscription? subscription; + + /// Identifies the subscription resource on the Partner side. + /// + /// The value is restricted to 63 ASCII characters at the maximum. If a + /// subscription was previously created with the same subscription_id, we will + /// directly return that one. + /// + /// Required. + core.String? subscriptionId; + + GoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent({ + this.parent, + this.subscription, + this.subscriptionId, + }); + + GoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent.fromJson( + core.Map json_) + : this( + parent: json_['parent'] as core.String?, + subscription: json_.containsKey('subscription') + ? GoogleCloudPaymentsResellerSubscriptionV1Subscription.fromJson( + json_['subscription'] as core.Map) + : null, + subscriptionId: json_['subscriptionId'] as core.String?, + ); + + core.Map toJson() => { + if (parent != null) 'parent': parent!, + if (subscription != null) 'subscription': subscription!, + if (subscriptionId != null) 'subscriptionId': subscriptionId!, + }; +} + /// Describes the length of a period of a time. class GoogleCloudPaymentsResellerSubscriptionV1Duration { /// number of duration units to be included. @@ -766,8 +874,31 @@ class GoogleCloudPaymentsResellerSubscriptionV1Duration { }; } -/// LINT.IfChange Partner request for entitling the previously provisioned -/// subscription to an end user. +/// Intent for entitling the previously provisioned subscription to an end user. +class GoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent { + /// The name of the subscription resource that is entitled to the current end + /// user. + /// + /// Required. + core.String? name; + + GoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent({ + this.name, + }); + + GoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent.fromJson( + core.Map json_) + : this( + name: json_['name'] as core.String?, + ); + + core.Map toJson() => { + if (name != null) 'name': name!, + }; +} + +/// Partner request for entitling the previously provisioned subscription to an +/// end user. /// /// The end user identity is inferred from the request OAuth context. class GoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionRequest { @@ -1096,6 +1227,55 @@ class GoogleCloudPaymentsResellerSubscriptionV1FiniteBillingCycleDetails { }; } +/// \[Preview only\] Request to generate a user session. +class GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest { + /// The user intent to generate the user session. + GoogleCloudPaymentsResellerSubscriptionV1IntentPayload? intentPayload; + + GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest({ + this.intentPayload, + }); + + GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest.fromJson( + core.Map json_) + : this( + intentPayload: json_.containsKey('intentPayload') + ? GoogleCloudPaymentsResellerSubscriptionV1IntentPayload.fromJson( + json_['intentPayload'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (intentPayload != null) 'intentPayload': intentPayload!, + }; +} + +/// \[Preview only\] Response that contains the details for generated user +/// session. +class GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse { + /// The generated user session. + /// + /// The token size is proportional to the size of the intent payload. + GoogleCloudPaymentsResellerSubscriptionV1UserSession? userSession; + + GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse({ + this.userSession, + }); + + GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse.fromJson( + core.Map json_) + : this( + userSession: json_.containsKey('userSession') + ? GoogleCloudPaymentsResellerSubscriptionV1UserSession.fromJson( + json_['userSession'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (userSession != null) 'userSession': userSession!, + }; +} + /// Payload specific to Google One products. class GoogleCloudPaymentsResellerSubscriptionV1GoogleOnePayload { /// Campaign attributed to sales of this subscription. @@ -1157,6 +1337,42 @@ class GoogleCloudPaymentsResellerSubscriptionV1GoogleOnePayload { }; } +/// The payload that describes the user intent. +class GoogleCloudPaymentsResellerSubscriptionV1IntentPayload { + /// The request to create a subscription. + GoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent? + createIntent; + + /// The request to entitle a subscription. + GoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent? + entitleIntent; + + GoogleCloudPaymentsResellerSubscriptionV1IntentPayload({ + this.createIntent, + this.entitleIntent, + }); + + GoogleCloudPaymentsResellerSubscriptionV1IntentPayload.fromJson( + core.Map json_) + : this( + createIntent: json_.containsKey('createIntent') + ? GoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent + .fromJson(json_['createIntent'] + as core.Map) + : null, + entitleIntent: json_.containsKey('entitleIntent') + ? GoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent + .fromJson(json_['entitleIntent'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (createIntent != null) 'createIntent': createIntent!, + if (entitleIntent != null) 'entitleIntent': entitleIntent!, + }; +} + /// Response that contains the products. class GoogleCloudPaymentsResellerSubscriptionV1ListProductsResponse { /// A token, which can be sent as `page_token` to retrieve the next page. @@ -1755,6 +1971,14 @@ class GoogleCloudPaymentsResellerSubscriptionV1Subscription { core.List? lineItems; + /// Describes the details of the migrated subscription. + /// + /// Only populated if this subscription is migrated from another system. + /// + /// Output only. + GoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails? + migrationDetails; + /// Identifier. /// /// Resource name of the subscription. It will have the format of @@ -1887,6 +2111,7 @@ class GoogleCloudPaymentsResellerSubscriptionV1Subscription { this.endUserEntitled, this.freeTrialEndTime, this.lineItems, + this.migrationDetails, this.name, this.partnerUserToken, this.processingState, @@ -1918,6 +2143,11 @@ class GoogleCloudPaymentsResellerSubscriptionV1Subscription { GoogleCloudPaymentsResellerSubscriptionV1SubscriptionLineItem .fromJson(value as core.Map)) .toList(), + migrationDetails: json_.containsKey('migrationDetails') + ? GoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails + .fromJson(json_['migrationDetails'] + as core.Map) + : null, name: json_['name'] as core.String?, partnerUserToken: json_['partnerUserToken'] as core.String?, processingState: json_['processingState'] as core.String?, @@ -1957,6 +2187,7 @@ class GoogleCloudPaymentsResellerSubscriptionV1Subscription { if (endUserEntitled != null) 'endUserEntitled': endUserEntitled!, if (freeTrialEndTime != null) 'freeTrialEndTime': freeTrialEndTime!, if (lineItems != null) 'lineItems': lineItems!, + if (migrationDetails != null) 'migrationDetails': migrationDetails!, if (name != null) 'name': name!, if (partnerUserToken != null) 'partnerUserToken': partnerUserToken!, if (processingState != null) 'processingState': processingState!, @@ -2259,6 +2490,30 @@ class GoogleCloudPaymentsResellerSubscriptionV1SubscriptionLineItemOneTimeRecurr }; } +/// Describes the details of the migrated subscription. +class GoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails { + /// The migrated subscription id in the legacy system. + /// + /// Output only. + core.String? migratedSubscriptionId; + + GoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails({ + this.migratedSubscriptionId, + }); + + GoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails.fromJson( + core.Map json_) + : this( + migratedSubscriptionId: + json_['migratedSubscriptionId'] as core.String?, + ); + + core.Map toJson() => { + if (migratedSubscriptionId != null) + 'migratedSubscriptionId': migratedSubscriptionId!, + }; +} + /// Describes the spec for one promotion. class GoogleCloudPaymentsResellerSubscriptionV1SubscriptionPromotionSpec { /// The duration of the free trial if the promotion is of type FREE_TRIAL. @@ -2394,6 +2649,50 @@ class GoogleCloudPaymentsResellerSubscriptionV1UndoCancelSubscriptionResponse { }; } +/// A user session contains a short-lived token that includes information +/// required to interact with Google Payments Reseller Platform using the +/// following web endpoints. +/// +/// - A user session token should be generated dynamically for an authenticated +/// user. You should refrain from sharing a token directly with a user in an +/// unauthenticated context, such as SMS, or email. - You can re-generate new +/// session tokens repeatedly for same `generate` request if necessary, +/// regardless of the previous tokens being expired or not. You don't need to +/// worry about multiple sessions resulting in duplicate fulfillments as +/// guaranteed by the same subscription id. Please refer to the \[Google Managed +/// Signup\](/payments/reseller/subscription/reference/index/User.Signup.Integration/Google.Managed.Signup.\(In.Preview\)) +/// documentation for additional integration details. +class GoogleCloudPaymentsResellerSubscriptionV1UserSession { + /// The time at which the user session expires. + /// + /// Output only. + core.String? expireTime; + + /// The encrypted token of the user session, including the information of the + /// user's intent and request. + /// + /// This token should be provided when redirecting the user to Google. + /// + /// Output only. + core.String? token; + + GoogleCloudPaymentsResellerSubscriptionV1UserSession({ + this.expireTime, + this.token, + }); + + GoogleCloudPaymentsResellerSubscriptionV1UserSession.fromJson(core.Map json_) + : this( + expireTime: json_['expireTime'] as core.String?, + token: json_['token'] as core.String?, + ); + + core.Map toJson() => { + if (expireTime != null) 'expireTime': expireTime!, + if (token != null) 'token': token!, + }; +} + /// Payload specific to Youtube products. class GoogleCloudPaymentsResellerSubscriptionV1YoutubePayload { /// The access expiration time for this line item. diff --git a/generated/googleapis/lib/places/v1.dart b/generated/googleapis/lib/places/v1.dart index 8c4d7c489..aa71c5651 100644 --- a/generated/googleapis/lib/places/v1.dart +++ b/generated/googleapis/lib/places/v1.dart @@ -532,14 +532,18 @@ class GoogleMapsPlacesV1AddressDescriptorLandmark { /// street entrances. core.String? spatialRelationship; - /// The straight line distance in meters between the target location and the - /// landmark. + /// The straight line distance, in meters, between the center point of the + /// target and the center point of the landmark. + /// + /// In some situations, this value can be longer than + /// `travel_distance_meters`. core.double? straightLineDistanceMeters; - /// The travel distance in meters along the road network if known. + /// The travel distance, in meters, along the road network from the target to + /// the landmark, if known. /// - /// This does not take into account the mode of transportation - /// (walking/driving). + /// This value does not take into account the mode of transportation, such as + /// walking, driving, or biking. core.double? travelDistanceMeters; /// A set of type tags for this landmark. @@ -625,6 +629,18 @@ class GoogleMapsPlacesV1AuthorAttribution { /// Request proto for AutocompletePlaces. class GoogleMapsPlacesV1AutocompletePlacesRequest { + /// Include pure service area businesses if the field is set to true. + /// + /// Pure service area business is a business that visits or delivers to + /// customers directly but does not serve customers at their business address. + /// For example, businesses like cleaning services or plumbers. Those + /// businesses do not have a physical address or location on Google Maps. + /// Places will not return fields including `location`, `plus_code`, and other + /// location related fields for these businesses. + /// + /// Optional. + core.bool? includePureServiceAreaBusinesses; + /// If true, the response will include both Place and query predictions. /// /// Otherwise the response will only return Place predictions. @@ -741,6 +757,7 @@ class GoogleMapsPlacesV1AutocompletePlacesRequest { core.String? sessionToken; GoogleMapsPlacesV1AutocompletePlacesRequest({ + this.includePureServiceAreaBusinesses, this.includeQueryPredictions, this.includedPrimaryTypes, this.includedRegionCodes, @@ -756,6 +773,8 @@ class GoogleMapsPlacesV1AutocompletePlacesRequest { GoogleMapsPlacesV1AutocompletePlacesRequest.fromJson(core.Map json_) : this( + includePureServiceAreaBusinesses: + json_['includePureServiceAreaBusinesses'] as core.bool?, includeQueryPredictions: json_['includeQueryPredictions'] as core.bool?, includedPrimaryTypes: (json_['includedPrimaryTypes'] as core.List?) @@ -786,6 +805,8 @@ class GoogleMapsPlacesV1AutocompletePlacesRequest { ); core.Map toJson() => { + if (includePureServiceAreaBusinesses != null) + 'includePureServiceAreaBusinesses': includePureServiceAreaBusinesses!, if (includeQueryPredictions != null) 'includeQueryPredictions': includeQueryPredictions!, if (includedPrimaryTypes != null) @@ -1699,6 +1720,12 @@ class GoogleMapsPlacesV1Photo { /// This photo's authors. core.List? authorAttributions; + /// A link where users can flag a problem with the photo. + core.String? flagContentUri; + + /// A link to show the photo on Google Maps. + core.String? googleMapsUri; + /// The maximum available height, in pixels. core.int? heightPx; @@ -1714,6 +1741,8 @@ class GoogleMapsPlacesV1Photo { GoogleMapsPlacesV1Photo({ this.authorAttributions, + this.flagContentUri, + this.googleMapsUri, this.heightPx, this.name, this.widthPx, @@ -1725,6 +1754,8 @@ class GoogleMapsPlacesV1Photo { ?.map((value) => GoogleMapsPlacesV1AuthorAttribution.fromJson( value as core.Map)) .toList(), + flagContentUri: json_['flagContentUri'] as core.String?, + googleMapsUri: json_['googleMapsUri'] as core.String?, heightPx: json_['heightPx'] as core.int?, name: json_['name'] as core.String?, widthPx: json_['widthPx'] as core.int?, @@ -1733,6 +1764,8 @@ class GoogleMapsPlacesV1Photo { core.Map toJson() => { if (authorAttributions != null) 'authorAttributions': authorAttributions!, + if (flagContentUri != null) 'flagContentUri': flagContentUri!, + if (googleMapsUri != null) 'googleMapsUri': googleMapsUri!, if (heightPx != null) 'heightPx': heightPx!, if (name != null) 'name': name!, if (widthPx != null) 'widthPx': widthPx!, @@ -1819,6 +1852,9 @@ class GoogleMapsPlacesV1Place { /// - "CLOSED_PERMANENTLY" : The establishment is permanently closed. core.String? businessStatus; + /// List of places in which the current place is located. + core.List? containingPlaces; + /// Specifies if the business supports curbside pickup. core.bool? curbsidePickup; @@ -1887,6 +1923,9 @@ class GoogleMapsPlacesV1Place { /// Place is suitable for watching sports. core.bool? goodForWatchingSports; + /// Links to trigger different Google Maps actions. + GoogleMapsPlacesV1PlaceGoogleMapsLinks? googleMapsLinks; + /// A URL providing more information about this place. core.String? googleMapsUri; @@ -1952,6 +1991,9 @@ class GoogleMapsPlacesV1Place { /// - "PRICE_LEVEL_VERY_EXPENSIVE" : Place provides very expensive services. core.String? priceLevel; + /// The price range associated with a Place. + GoogleMapsPlacesV1PriceRange? priceRange; + /// The primary type of the given result. /// /// This type must one of the Places API supported types. For example, @@ -1968,10 +2010,23 @@ class GoogleMapsPlacesV1Place { /// https://developers.google.com/maps/documentation/places/web-service/place-types GoogleTypeLocalizedText? primaryTypeDisplayName; + /// Indicates whether the place is a pure service area business. + /// + /// Pure service area business is a business that visits or delivers to + /// customers directly but does not serve customers at their business address. + /// For example, businesses like cleaning services or plumbers. Those + /// businesses may not have a physical address or location on Google Maps. + core.bool? pureServiceAreaBusiness; + /// A rating between 1.0 and 5.0, based on user reviews of this place. core.double? rating; /// The regular hours of operation. + /// + /// Note that if a place is always open (24 hours), the `close` field will not + /// be set. Clients can rely on always open (24 hours) being represented as an + /// `open` period containing day with value `0`, hour with value `0`, and + /// minute with value `0`. GoogleMapsPlacesV1PlaceOpeningHours? regularOpeningHours; /// Contains an array of entries for information about regular secondary hours @@ -2072,6 +2127,7 @@ class GoogleMapsPlacesV1Place { this.areaSummary, this.attributions, this.businessStatus, + this.containingPlaces, this.curbsidePickup, this.currentOpeningHours, this.currentSecondaryOpeningHours, @@ -2086,6 +2142,7 @@ class GoogleMapsPlacesV1Place { this.goodForChildren, this.goodForGroups, this.goodForWatchingSports, + this.googleMapsLinks, this.googleMapsUri, this.iconBackgroundColor, this.iconMaskBaseUri, @@ -2102,8 +2159,10 @@ class GoogleMapsPlacesV1Place { this.photos, this.plusCode, this.priceLevel, + this.priceRange, this.primaryType, this.primaryTypeDisplayName, + this.pureServiceAreaBusiness, this.rating, this.regularOpeningHours, this.regularSecondaryOpeningHours, @@ -2157,6 +2216,10 @@ class GoogleMapsPlacesV1Place { value as core.Map)) .toList(), businessStatus: json_['businessStatus'] as core.String?, + containingPlaces: (json_['containingPlaces'] as core.List?) + ?.map((value) => GoogleMapsPlacesV1PlaceContainingPlace.fromJson( + value as core.Map)) + .toList(), curbsidePickup: json_['curbsidePickup'] as core.bool?, currentOpeningHours: json_.containsKey('currentOpeningHours') ? GoogleMapsPlacesV1PlaceOpeningHours.fromJson( @@ -2196,6 +2259,11 @@ class GoogleMapsPlacesV1Place { goodForChildren: json_['goodForChildren'] as core.bool?, goodForGroups: json_['goodForGroups'] as core.bool?, goodForWatchingSports: json_['goodForWatchingSports'] as core.bool?, + googleMapsLinks: json_.containsKey('googleMapsLinks') + ? GoogleMapsPlacesV1PlaceGoogleMapsLinks.fromJson( + json_['googleMapsLinks'] + as core.Map) + : null, googleMapsUri: json_['googleMapsUri'] as core.String?, iconBackgroundColor: json_['iconBackgroundColor'] as core.String?, iconMaskBaseUri: json_['iconMaskBaseUri'] as core.String?, @@ -2230,11 +2298,17 @@ class GoogleMapsPlacesV1Place { json_['plusCode'] as core.Map) : null, priceLevel: json_['priceLevel'] as core.String?, + priceRange: json_.containsKey('priceRange') + ? GoogleMapsPlacesV1PriceRange.fromJson( + json_['priceRange'] as core.Map) + : null, primaryType: json_['primaryType'] as core.String?, primaryTypeDisplayName: json_.containsKey('primaryTypeDisplayName') ? GoogleTypeLocalizedText.fromJson(json_['primaryTypeDisplayName'] as core.Map) : null, + pureServiceAreaBusiness: + json_['pureServiceAreaBusiness'] as core.bool?, rating: (json_['rating'] as core.num?)?.toDouble(), regularOpeningHours: json_.containsKey('regularOpeningHours') ? GoogleMapsPlacesV1PlaceOpeningHours.fromJson( @@ -2290,6 +2364,7 @@ class GoogleMapsPlacesV1Place { if (areaSummary != null) 'areaSummary': areaSummary!, if (attributions != null) 'attributions': attributions!, if (businessStatus != null) 'businessStatus': businessStatus!, + if (containingPlaces != null) 'containingPlaces': containingPlaces!, if (curbsidePickup != null) 'curbsidePickup': curbsidePickup!, if (currentOpeningHours != null) 'currentOpeningHours': currentOpeningHours!, @@ -2307,6 +2382,7 @@ class GoogleMapsPlacesV1Place { if (goodForGroups != null) 'goodForGroups': goodForGroups!, if (goodForWatchingSports != null) 'goodForWatchingSports': goodForWatchingSports!, + if (googleMapsLinks != null) 'googleMapsLinks': googleMapsLinks!, if (googleMapsUri != null) 'googleMapsUri': googleMapsUri!, if (iconBackgroundColor != null) 'iconBackgroundColor': iconBackgroundColor!, @@ -2326,9 +2402,12 @@ class GoogleMapsPlacesV1Place { if (photos != null) 'photos': photos!, if (plusCode != null) 'plusCode': plusCode!, if (priceLevel != null) 'priceLevel': priceLevel!, + if (priceRange != null) 'priceRange': priceRange!, if (primaryType != null) 'primaryType': primaryType!, if (primaryTypeDisplayName != null) 'primaryTypeDisplayName': primaryTypeDisplayName!, + if (pureServiceAreaBusiness != null) + 'pureServiceAreaBusiness': pureServiceAreaBusiness!, if (rating != null) 'rating': rating!, if (regularOpeningHours != null) 'regularOpeningHours': regularOpeningHours!, @@ -2462,8 +2541,12 @@ class GoogleMapsPlacesV1PlaceAreaSummary { /// Each block has a separate topic about the area. core.List? contentBlocks; + /// A link where users can flag a problem with the summary. + core.String? flagContentUri; + GoogleMapsPlacesV1PlaceAreaSummary({ this.contentBlocks, + this.flagContentUri, }); GoogleMapsPlacesV1PlaceAreaSummary.fromJson(core.Map json_) @@ -2472,10 +2555,12 @@ class GoogleMapsPlacesV1PlaceAreaSummary { ?.map((value) => GoogleMapsPlacesV1ContentBlock.fromJson( value as core.Map)) .toList(), + flagContentUri: json_['flagContentUri'] as core.String?, ); core.Map toJson() => { if (contentBlocks != null) 'contentBlocks': contentBlocks!, + if (flagContentUri != null) 'flagContentUri': flagContentUri!, }; } @@ -2504,6 +2589,31 @@ class GoogleMapsPlacesV1PlaceAttribution { }; } +/// Info about the place in which this place is located. +class GoogleMapsPlacesV1PlaceContainingPlace { + /// The place id of the place in which this place is located. + core.String? id; + + /// The resource name of the place in which this place is located. + core.String? name; + + GoogleMapsPlacesV1PlaceContainingPlace({ + this.id, + this.name, + }); + + GoogleMapsPlacesV1PlaceContainingPlace.fromJson(core.Map json_) + : this( + id: json_['id'] as core.String?, + name: json_['name'] as core.String?, + ); + + core.Map toJson() => { + if (id != null) 'id': id!, + if (name != null) 'name': name!, + }; +} + /// Experimental: See /// https://developers.google.com/maps/documentation/places/web-service/experimental/places-generative /// for more details. @@ -2513,15 +2623,23 @@ class GoogleMapsPlacesV1PlaceGenerativeSummary { /// The detailed description of the place. GoogleTypeLocalizedText? description; + /// A link where users can flag a problem with the description summary. + core.String? descriptionFlagContentUri; + /// The overview of the place. GoogleTypeLocalizedText? overview; + /// A link where users can flag a problem with the overview summary. + core.String? overviewFlagContentUri; + /// References that are used to generate the summary description. GoogleMapsPlacesV1References? references; GoogleMapsPlacesV1PlaceGenerativeSummary({ this.description, + this.descriptionFlagContentUri, this.overview, + this.overviewFlagContentUri, this.references, }); @@ -2531,10 +2649,14 @@ class GoogleMapsPlacesV1PlaceGenerativeSummary { ? GoogleTypeLocalizedText.fromJson( json_['description'] as core.Map) : null, + descriptionFlagContentUri: + json_['descriptionFlagContentUri'] as core.String?, overview: json_.containsKey('overview') ? GoogleTypeLocalizedText.fromJson( json_['overview'] as core.Map) : null, + overviewFlagContentUri: + json_['overviewFlagContentUri'] as core.String?, references: json_.containsKey('references') ? GoogleMapsPlacesV1References.fromJson( json_['references'] as core.Map) @@ -2543,13 +2665,86 @@ class GoogleMapsPlacesV1PlaceGenerativeSummary { core.Map toJson() => { if (description != null) 'description': description!, + if (descriptionFlagContentUri != null) + 'descriptionFlagContentUri': descriptionFlagContentUri!, if (overview != null) 'overview': overview!, + if (overviewFlagContentUri != null) + 'overviewFlagContentUri': overviewFlagContentUri!, if (references != null) 'references': references!, }; } +/// Links to trigger different Google Maps actions. +class GoogleMapsPlacesV1PlaceGoogleMapsLinks { + /// A link to show the directions to the place. + /// + /// The link only populates the destination location and uses the default + /// travel mode `DRIVE`. + core.String? directionsUri; + + /// A link to show photos of this place. + /// + /// This link is currently not supported on Google Maps Mobile and only works + /// on the web version of Google Maps. + core.String? photosUri; + + /// A link to show this place. + core.String? placeUri; + + /// A link to show reviews of this place. + /// + /// This link is currently not supported on Google Maps Mobile and only works + /// on the web version of Google Maps. + core.String? reviewsUri; + + /// A link to write a review for this place. + /// + /// This link is currently not supported on Google Maps Mobile and only works + /// on the web version of Google Maps. + core.String? writeAReviewUri; + + GoogleMapsPlacesV1PlaceGoogleMapsLinks({ + this.directionsUri, + this.photosUri, + this.placeUri, + this.reviewsUri, + this.writeAReviewUri, + }); + + GoogleMapsPlacesV1PlaceGoogleMapsLinks.fromJson(core.Map json_) + : this( + directionsUri: json_['directionsUri'] as core.String?, + photosUri: json_['photosUri'] as core.String?, + placeUri: json_['placeUri'] as core.String?, + reviewsUri: json_['reviewsUri'] as core.String?, + writeAReviewUri: json_['writeAReviewUri'] as core.String?, + ); + + core.Map toJson() => { + if (directionsUri != null) 'directionsUri': directionsUri!, + if (photosUri != null) 'photosUri': photosUri!, + if (placeUri != null) 'placeUri': placeUri!, + if (reviewsUri != null) 'reviewsUri': reviewsUri!, + if (writeAReviewUri != null) 'writeAReviewUri': writeAReviewUri!, + }; +} + /// Information about business hour of the place. class GoogleMapsPlacesV1PlaceOpeningHours { + /// The next time the current opening hours period ends up to 7 days in the + /// future. + /// + /// This field is only populated if the opening hours period is active at the + /// time of serving the request. + core.String? nextCloseTime; + + /// The next time the current opening hours period starts up to 7 days in the + /// future. + /// + /// This field is only populated if the opening hours period is not active at + /// the time of serving the request. + core.String? nextOpenTime; + /// Whether the opening hours period is currently active. /// /// For regular opening hours and current opening hours, this field means @@ -2601,6 +2796,8 @@ class GoogleMapsPlacesV1PlaceOpeningHours { core.List? weekdayDescriptions; GoogleMapsPlacesV1PlaceOpeningHours({ + this.nextCloseTime, + this.nextOpenTime, this.openNow, this.periods, this.secondaryHoursType, @@ -2610,6 +2807,8 @@ class GoogleMapsPlacesV1PlaceOpeningHours { GoogleMapsPlacesV1PlaceOpeningHours.fromJson(core.Map json_) : this( + nextCloseTime: json_['nextCloseTime'] as core.String?, + nextOpenTime: json_['nextOpenTime'] as core.String?, openNow: json_['openNow'] as core.bool?, periods: (json_['periods'] as core.List?) ?.map((value) => @@ -2628,6 +2827,8 @@ class GoogleMapsPlacesV1PlaceOpeningHours { ); core.Map toJson() => { + if (nextCloseTime != null) 'nextCloseTime': nextCloseTime!, + if (nextOpenTime != null) 'nextOpenTime': nextOpenTime!, if (openNow != null) 'openNow': openNow!, if (periods != null) 'periods': periods!, if (secondaryHoursType != null) @@ -2679,14 +2880,14 @@ class GoogleMapsPlacesV1PlaceOpeningHoursPeriodPoint { /// 0 is Sunday, 1 is Monday, etc. core.int? day; - /// The hour in 2 digits. + /// The hour in 24 hour format. /// - /// Ranges from 00 to 23. + /// Ranges from 0 to 23. core.int? hour; - /// The minute in 2 digits. + /// The minute. /// - /// Ranges from 00 to 59. + /// Ranges from 0 to 59. core.int? minute; /// Whether or not this endpoint was truncated. @@ -2914,6 +3115,44 @@ class GoogleMapsPlacesV1Polyline { }; } +/// The price range associated with a Place. +/// +/// `end_price` could be unset, which indicates a range without upper bound +/// (e.g. "More than $100"). +class GoogleMapsPlacesV1PriceRange { + /// The high end of the price range (exclusive). + /// + /// Price should be lower than this amount. + GoogleTypeMoney? endPrice; + + /// The low end of the price range (inclusive). + /// + /// Price should be at or above this amount. + GoogleTypeMoney? startPrice; + + GoogleMapsPlacesV1PriceRange({ + this.endPrice, + this.startPrice, + }); + + GoogleMapsPlacesV1PriceRange.fromJson(core.Map json_) + : this( + endPrice: json_.containsKey('endPrice') + ? GoogleTypeMoney.fromJson( + json_['endPrice'] as core.Map) + : null, + startPrice: json_.containsKey('startPrice') + ? GoogleTypeMoney.fromJson( + json_['startPrice'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (endPrice != null) 'endPrice': endPrice!, + if (startPrice != null) 'startPrice': startPrice!, + }; +} + /// Experimental: See /// https://developers.google.com/maps/documentation/places/web-service/experimental/places-generative /// for more details. @@ -2955,6 +3194,12 @@ class GoogleMapsPlacesV1Review { /// This review's author. GoogleMapsPlacesV1AuthorAttribution? authorAttribution; + /// A link where users can flag a problem with the review. + core.String? flagContentUri; + + /// A link to show the review on Google Maps. + core.String? googleMapsUri; + /// A reference representing this place review which may be used to look up /// this place review again (also called the API "resource" name: /// `places/{place_id}/reviews/{review}`). @@ -2978,6 +3223,8 @@ class GoogleMapsPlacesV1Review { GoogleMapsPlacesV1Review({ this.authorAttribution, + this.flagContentUri, + this.googleMapsUri, this.name, this.originalText, this.publishTime, @@ -2993,6 +3240,8 @@ class GoogleMapsPlacesV1Review { json_['authorAttribution'] as core.Map) : null, + flagContentUri: json_['flagContentUri'] as core.String?, + googleMapsUri: json_['googleMapsUri'] as core.String?, name: json_['name'] as core.String?, originalText: json_.containsKey('originalText') ? GoogleTypeLocalizedText.fromJson( @@ -3010,6 +3259,8 @@ class GoogleMapsPlacesV1Review { core.Map toJson() => { if (authorAttribution != null) 'authorAttribution': authorAttribution!, + if (flagContentUri != null) 'flagContentUri': flagContentUri!, + if (googleMapsUri != null) 'googleMapsUri': googleMapsUri!, if (name != null) 'name': name!, if (originalText != null) 'originalText': originalText!, if (publishTime != null) 'publishTime': publishTime!, @@ -3180,6 +3431,16 @@ class GoogleMapsPlacesV1RoutingParameters { /// `searchAlongRouteParameters.polyline.encodedPolyline` parameter in the /// request causes an error. class GoogleMapsPlacesV1RoutingSummary { + /// A link to show directions on Google Maps using the waypoints from the + /// given routing summary. + /// + /// The route generated by this link is not guaranteed to be the same as the + /// route used to generate the routing summary. The link uses information + /// provided in the request, from fields including `routingParameters` and + /// `searchAlongRouteParameters` when applicable, to generate the directions + /// link. + core.String? directionsUri; + /// The legs of the trip. /// /// When you calculate travel duration and distance from a set origin, `legs` @@ -3190,11 +3451,13 @@ class GoogleMapsPlacesV1RoutingSummary { core.List? legs; GoogleMapsPlacesV1RoutingSummary({ + this.directionsUri, this.legs, }); GoogleMapsPlacesV1RoutingSummary.fromJson(core.Map json_) : this( + directionsUri: json_['directionsUri'] as core.String?, legs: (json_['legs'] as core.List?) ?.map((value) => GoogleMapsPlacesV1RoutingSummaryLeg.fromJson( value as core.Map)) @@ -3202,6 +3465,7 @@ class GoogleMapsPlacesV1RoutingSummary { ); core.Map toJson() => { + if (directionsUri != null) 'directionsUri': directionsUri!, if (legs != null) 'legs': legs!, }; } @@ -3468,6 +3732,18 @@ class GoogleMapsPlacesV1SearchTextRequest { /// Optional. GoogleMapsPlacesV1SearchTextRequestEVOptions? evOptions; + /// Include pure service area businesses if the field is set to true. + /// + /// Pure service area business is a business that visits or delivers to + /// customers directly but does not serve customers at their business address. + /// For example, businesses like cleaning services or plumbers. Those + /// businesses do not have a physical address or location on Google Maps. + /// Places will not return fields including `location`, `plus_code`, and other + /// location related fields for these businesses. + /// + /// Optional. + core.bool? includePureServiceAreaBusinesses; + /// The requested place type. /// /// Full list of types supported: @@ -3599,6 +3875,7 @@ class GoogleMapsPlacesV1SearchTextRequest { GoogleMapsPlacesV1SearchTextRequest({ this.evOptions, + this.includePureServiceAreaBusinesses, this.includedType, this.languageCode, this.locationBias, @@ -3623,6 +3900,8 @@ class GoogleMapsPlacesV1SearchTextRequest { ? GoogleMapsPlacesV1SearchTextRequestEVOptions.fromJson( json_['evOptions'] as core.Map) : null, + includePureServiceAreaBusinesses: + json_['includePureServiceAreaBusinesses'] as core.bool?, includedType: json_['includedType'] as core.String?, languageCode: json_['languageCode'] as core.String?, locationBias: json_.containsKey('locationBias') @@ -3661,6 +3940,8 @@ class GoogleMapsPlacesV1SearchTextRequest { core.Map toJson() => { if (evOptions != null) 'evOptions': evOptions!, + if (includePureServiceAreaBusinesses != null) + 'includePureServiceAreaBusinesses': includePureServiceAreaBusinesses!, if (includedType != null) 'includedType': includedType!, if (languageCode != null) 'languageCode': languageCode!, if (locationBias != null) 'locationBias': locationBias!, @@ -3862,11 +4143,16 @@ class GoogleMapsPlacesV1SearchTextResponse { /// places if requested. core.List? routingSummaries; + /// A link allows the user to search with the same text query as specified in + /// the request on Google Maps. + core.String? searchUri; + GoogleMapsPlacesV1SearchTextResponse({ this.contextualContents, this.nextPageToken, this.places, this.routingSummaries, + this.searchUri, }); GoogleMapsPlacesV1SearchTextResponse.fromJson(core.Map json_) @@ -3884,6 +4170,7 @@ class GoogleMapsPlacesV1SearchTextResponse { ?.map((value) => GoogleMapsPlacesV1RoutingSummary.fromJson( value as core.Map)) .toList(), + searchUri: json_['searchUri'] as core.String?, ); core.Map toJson() => { @@ -3892,6 +4179,7 @@ class GoogleMapsPlacesV1SearchTextResponse { if (nextPageToken != null) 'nextPageToken': nextPageToken!, if (places != null) 'places': places!, if (routingSummaries != null) 'routingSummaries': routingSummaries!, + if (searchUri != null) 'searchUri': searchUri!, }; } diff --git a/generated/googleapis/lib/playintegrity/v1.dart b/generated/googleapis/lib/playintegrity/v1.dart index 53dbb835b..0ca08b52c 100644 --- a/generated/googleapis/lib/playintegrity/v1.dart +++ b/generated/googleapis/lib/playintegrity/v1.dart @@ -420,25 +420,67 @@ class DecodeIntegrityTokenResponse { }; } +/// Contains information about the device for which the integrity token was +/// generated, e.g. Android SDK version. +class DeviceAttributes { + /// Android SDK version of the device, as defined in the public Android + /// documentation: + /// https://developer.android.com/reference/android/os/Build.VERSION_CODES. + /// + /// It won't be set if a necessary requirement was missed. For example + /// DeviceIntegrity did not meet the minimum bar. + core.int? sdkVersion; + + DeviceAttributes({ + this.sdkVersion, + }); + + DeviceAttributes.fromJson(core.Map json_) + : this( + sdkVersion: json_['sdkVersion'] as core.int?, + ); + + core.Map toJson() => { + if (sdkVersion != null) 'sdkVersion': sdkVersion!, + }; +} + /// Contains the device attestation information. class DeviceIntegrity { + /// Attributes of the device where the integrity token was generated. + DeviceAttributes? deviceAttributes; + /// Details about the device recall bits set by the developer. DeviceRecall? deviceRecall; /// Details about the integrity of the device the app is running on. core.List? deviceRecognitionVerdict; + /// Contains legacy details about the integrity of the device the app is + /// running on. + /// + /// Only for devices with Android version T or higher and only for apps opted + /// in to the new verdicts. Only available during the transition period to the + /// new verdicts system and will be removed afterwards. + core.List? legacyDeviceRecognitionVerdict; + /// Details about the device activity of the device the app is running on. RecentDeviceActivity? recentDeviceActivity; DeviceIntegrity({ + this.deviceAttributes, this.deviceRecall, this.deviceRecognitionVerdict, + this.legacyDeviceRecognitionVerdict, this.recentDeviceActivity, }); DeviceIntegrity.fromJson(core.Map json_) : this( + deviceAttributes: json_.containsKey('deviceAttributes') + ? DeviceAttributes.fromJson(json_['deviceAttributes'] + as core.Map) + : null, deviceRecall: json_.containsKey('deviceRecall') ? DeviceRecall.fromJson( json_['deviceRecall'] as core.Map) @@ -447,6 +489,10 @@ class DeviceIntegrity { (json_['deviceRecognitionVerdict'] as core.List?) ?.map((value) => value as core.String) .toList(), + legacyDeviceRecognitionVerdict: + (json_['legacyDeviceRecognitionVerdict'] as core.List?) + ?.map((value) => value as core.String) + .toList(), recentDeviceActivity: json_.containsKey('recentDeviceActivity') ? RecentDeviceActivity.fromJson(json_['recentDeviceActivity'] as core.Map) @@ -454,9 +500,12 @@ class DeviceIntegrity { ); core.Map toJson() => { + if (deviceAttributes != null) 'deviceAttributes': deviceAttributes!, if (deviceRecall != null) 'deviceRecall': deviceRecall!, if (deviceRecognitionVerdict != null) 'deviceRecognitionVerdict': deviceRecognitionVerdict!, + if (legacyDeviceRecognitionVerdict != null) + 'legacyDeviceRecognitionVerdict': legacyDeviceRecognitionVerdict!, if (recentDeviceActivity != null) 'recentDeviceActivity': recentDeviceActivity!, }; diff --git a/generated/googleapis/lib/privateca/v1.dart b/generated/googleapis/lib/privateca/v1.dart index 58c1f3ac5..c4e09c06c 100644 --- a/generated/googleapis/lib/privateca/v1.dart +++ b/generated/googleapis/lib/privateca/v1.dart @@ -510,8 +510,8 @@ class ProjectsLocationsCaPoolsResource { /// /// Request parameters: /// - /// [name] - Output only. Identifier. The resource name for this CaPool in the - /// format `projects / * /locations / * /caPools / * `. + /// [name] - Identifier. The resource name for this CaPool in the format + /// `projects / * /locations / * /caPools / * `. /// Value must have pattern /// `^projects/\[^/\]+/locations/\[^/\]+/caPools/\[^/\]+$`. /// @@ -1096,9 +1096,9 @@ class ProjectsLocationsCaPoolsCertificateAuthoritiesResource { /// /// Request parameters: /// - /// [name] - Output only. Identifier. The resource name for this - /// CertificateAuthority in the format `projects / * /locations / * /caPools / - /// * /certificateAuthorities / * `. + /// [name] - Identifier. The resource name for this CertificateAuthority in + /// the format `projects / * /locations / * /caPools / * + /// /certificateAuthorities / * `. /// Value must have pattern /// `^projects/\[^/\]+/locations/\[^/\]+/caPools/\[^/\]+/certificateAuthorities/\[^/\]+$`. /// @@ -1364,7 +1364,7 @@ class ProjectsLocationsCaPoolsCertificateAuthoritiesCertificateRevocationListsRe /// /// Request parameters: /// - /// [name] - Output only. The resource name for this CertificateRevocationList + /// [name] - Identifier. The resource name for this CertificateRevocationList /// in the format `projects / * /locations / * /caPools / * /// certificateAuthorities / * / certificateRevocationLists / * `. /// Value must have pattern @@ -1538,7 +1538,7 @@ class ProjectsLocationsCaPoolsCertificatesResource { /// /// [certificateId] - Optional. It must be unique within a location and match /// the regular expression `[a-zA-Z0-9_-]{1,63}`. This field is required when - /// using a CertificateAuthority in the Enterprise CertificateAuthority.Tier, + /// using a CertificateAuthority in the Enterprise CertificateAuthority.tier, /// but is optional and its value is ignored otherwise. /// /// [issuingCertificateAuthorityId] - Optional. The resource ID of the @@ -1718,7 +1718,7 @@ class ProjectsLocationsCaPoolsCertificatesResource { /// /// Request parameters: /// - /// [name] - Output only. The resource name for this Certificate in the format + /// [name] - Identifier. The resource name for this Certificate in the format /// `projects / * /locations / * /caPools / * /certificates / * `. /// Value must have pattern /// `^projects/\[^/\]+/locations/\[^/\]+/caPools/\[^/\]+/certificates/\[^/\]+$`. @@ -2096,8 +2096,8 @@ class ProjectsLocationsCertificateTemplatesResource { /// /// Request parameters: /// - /// [name] - Output only. The resource name for this CertificateTemplate in - /// the format `projects / * /locations / * /certificateTemplates / * `. + /// [name] - Identifier. The resource name for this CertificateTemplate in the + /// format `projects / * /locations / * /certificateTemplates / * `. /// Value must have pattern /// `^projects/\[^/\]+/locations/\[^/\]+/certificateTemplates/\[^/\]+$`. /// @@ -2263,8 +2263,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -2779,8 +2779,6 @@ class CaPool { /// /// The resource name for this CaPool in the format `projects / * /locations / /// * /caPools / * `. - /// - /// Output only. core.String? name; /// The PublishingOptions to follow when issuing Certificates from any @@ -2890,10 +2888,10 @@ class Certificate { /// Required. Immutable. core.String? lifetime; + /// Identifier. + /// /// The resource name for this Certificate in the format `projects / * /// /locations / * /caPools / * /certificates / * `. - /// - /// Output only. core.String? name; /// The pem-encoded, signed X.509 certificate. @@ -3097,8 +3095,6 @@ class CertificateAuthority { /// /// The resource name for this CertificateAuthority in the format `projects / /// * /locations / * /caPools / * /certificateAuthorities / * `. - /// - /// Output only. core.String? name; /// This CertificateAuthority's certificate chain, including the current @@ -3602,11 +3598,11 @@ class CertificateRevocationList { /// Optional. core.Map? labels; + /// Identifier. + /// /// The resource name for this CertificateRevocationList in the format /// `projects / * /locations / * /caPools / * certificateAuthorities / * / /// certificateRevocationLists / * `. - /// - /// Output only. core.String? name; /// The PEM-encoded X.509 CRL. @@ -3736,10 +3732,10 @@ class CertificateTemplate { /// Optional. core.String? maximumLifetime; + /// Identifier. + /// /// The resource name for this CertificateTemplate in the format `projects / * /// /locations / * /certificateTemplates / * `. - /// - /// Output only. core.String? name; /// Describes the set of X.509 extensions that may appear in a Certificate @@ -4391,7 +4387,7 @@ class ListCaPoolsResponse { /// A token to retrieve next page of results. /// - /// Pass this value in ListCertificateAuthoritiesRequest.next_page_token to + /// Pass this value in ListCertificateAuthoritiesRequest.page_token to /// retrieve the next page of results. core.String? nextPageToken; @@ -4430,7 +4426,7 @@ class ListCertificateAuthoritiesResponse { /// A token to retrieve next page of results. /// - /// Pass this value in ListCertificateAuthoritiesRequest.next_page_token to + /// Pass this value in ListCertificateAuthoritiesRequest.page_token to /// retrieve the next page of results. core.String? nextPageToken; @@ -4472,8 +4468,8 @@ class ListCertificateRevocationListsResponse { /// A token to retrieve next page of results. /// - /// Pass this value in ListCertificateRevocationListsRequest.next_page_token - /// to retrieve the next page of results. + /// Pass this value in ListCertificateRevocationListsRequest.page_token to + /// retrieve the next page of results. core.String? nextPageToken; /// A list of locations (e.g. "us-west1") that could not be reached. @@ -4513,8 +4509,8 @@ class ListCertificateTemplatesResponse { /// A token to retrieve next page of results. /// - /// Pass this value in ListCertificateTemplatesRequest.next_page_token to - /// retrieve the next page of results. + /// Pass this value in ListCertificateTemplatesRequest.page_token to retrieve + /// the next page of results. core.String? nextPageToken; /// A list of locations (e.g. "us-west1") that could not be reached. @@ -4553,8 +4549,8 @@ class ListCertificatesResponse { /// A token to retrieve next page of results. /// - /// Pass this value in ListCertificatesRequest.next_page_token to retrieve the - /// next page of results. + /// Pass this value in ListCertificatesRequest.page_token to retrieve the next + /// page of results. core.String? nextPageToken; /// A list of locations (e.g. "us-west1") that could not be reached. diff --git a/generated/googleapis/lib/pubsublite/v1.dart b/generated/googleapis/lib/pubsublite/v1.dart index b84895d60..8f1215af7 100644 --- a/generated/googleapis/lib/pubsublite/v1.dart +++ b/generated/googleapis/lib/pubsublite/v1.dart @@ -117,8 +117,8 @@ class AdminProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// diff --git a/generated/googleapis/lib/rapidmigrationassessment/v1.dart b/generated/googleapis/lib/rapidmigrationassessment/v1.dart index 229817d8d..8d8eb47c6 100644 --- a/generated/googleapis/lib/rapidmigrationassessment/v1.dart +++ b/generated/googleapis/lib/rapidmigrationassessment/v1.dart @@ -656,8 +656,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// diff --git a/generated/googleapis/lib/recaptchaenterprise/v1.dart b/generated/googleapis/lib/recaptchaenterprise/v1.dart index 522c0b993..c2f11b6aa 100644 --- a/generated/googleapis/lib/recaptchaenterprise/v1.dart +++ b/generated/googleapis/lib/recaptchaenterprise/v1.dart @@ -1772,15 +1772,13 @@ class GoogleCloudRecaptchaenterpriseV1Event { /// /// Optional. /// Possible string values are: - /// - "FRAUD_PREVENTION_UNSPECIFIED" : Default, unspecified setting. If opted - /// in for automatic detection, `fraud_prevention_assessment` is returned - /// based on the request. Otherwise, `fraud_prevention_assessment` is returned - /// if `transaction_data` is present in the `Event` and Fraud Prevention is - /// enabled in the Google Cloud console. + /// - "FRAUD_PREVENTION_UNSPECIFIED" : Default, unspecified setting. + /// `fraud_prevention_assessment` is returned if `transaction_data` is present + /// in `Event` and Fraud Prevention is enabled in the Google Cloud console. /// - "ENABLED" : Enable Fraud Prevention for this assessment, if Fraud /// Prevention is enabled in the Google Cloud console. /// - "DISABLED" : Disable Fraud Prevention for this assessment, regardless of - /// opt-in status or Google Cloud console settings. + /// Google Cloud console settings. core.String? fraudPrevention; /// Deprecated: use `user_info.account_id` instead. @@ -3104,7 +3102,7 @@ class GoogleCloudRecaptchaenterpriseV1RelatedAccountGroupMembership { }; } -/// The removeIpOverride request message. +/// The RemoveIpOverride request message. class GoogleCloudRecaptchaenterpriseV1RemoveIpOverrideRequest { /// IP override to be removed from the key. /// @@ -3192,7 +3190,7 @@ class GoogleCloudRecaptchaenterpriseV1RetrieveLegacySecretKeyResponse { /// Risk analysis result for an event. class GoogleCloudRecaptchaenterpriseV1RiskAnalysis { - /// Challenge information for SCORE_AND_CHALLENGE keys + /// Challenge information for SCORE_AND_CHALLENGE and INVISIBLE keys /// /// Output only. /// Possible string values are: diff --git a/generated/googleapis/lib/redis/v1.dart b/generated/googleapis/lib/redis/v1.dart index 0fd3e6999..a78dbe3ae 100644 --- a/generated/googleapis/lib/redis/v1.dart +++ b/generated/googleapis/lib/redis/v1.dart @@ -22,6 +22,8 @@ /// /// - [ProjectsResource] /// - [ProjectsLocationsResource] +/// - [ProjectsLocationsBackupCollectionsResource] +/// - [ProjectsLocationsBackupCollectionsBackupsResource] /// - [ProjectsLocationsClustersResource] /// - [ProjectsLocationsInstancesResource] /// - [ProjectsLocationsOperationsResource] @@ -70,6 +72,8 @@ class ProjectsResource { class ProjectsLocationsResource { final commons.ApiRequester _requester; + ProjectsLocationsBackupCollectionsResource get backupCollections => + ProjectsLocationsBackupCollectionsResource(_requester); ProjectsLocationsClustersResource get clusters => ProjectsLocationsClustersResource(_requester); ProjectsLocationsInstancesResource get instances => @@ -167,12 +171,350 @@ class ProjectsLocationsResource { } } +class ProjectsLocationsBackupCollectionsResource { + final commons.ApiRequester _requester; + + ProjectsLocationsBackupCollectionsBackupsResource get backups => + ProjectsLocationsBackupCollectionsBackupsResource(_requester); + + ProjectsLocationsBackupCollectionsResource(commons.ApiRequester client) + : _requester = client; + + /// Get a backup collection. + /// + /// Request parameters: + /// + /// [name] - Required. Redis backupCollection resource name using the form: + /// `projects/{project_id}/locations/{location_id}/backupCollections/{backup_collection_id}` + /// where `location_id` refers to a GCP region. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/backupCollections/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [BackupCollection]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return BackupCollection.fromJson( + response_ as core.Map); + } + + /// Lists all backup collections owned by a consumer project in either the + /// specified location (region) or all locations. + /// + /// If `location_id` is specified as `-` (wildcard), then all regions + /// available to the project are queried, and the results are aggregated. + /// + /// Request parameters: + /// + /// [parent] - Required. The resource name of the backupCollection location + /// using the form: `projects/{project_id}/locations/{location_id}` where + /// `location_id` refers to a GCP region. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [pageSize] - Optional. The maximum number of items to return. If not + /// specified, a default value of 1000 will be used by the service. Regardless + /// of the page_size value, the response may include a partial list and a + /// caller should only rely on response's `next_page_token` to determine if + /// there are more clusters left to be queried. + /// + /// [pageToken] - Optional. The `next_page_token` value returned from a + /// previous \[ListBackupCollections\] request, if any. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListBackupCollectionsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/backupCollections'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListBackupCollectionsResponse.fromJson( + response_ as core.Map); + } +} + +class ProjectsLocationsBackupCollectionsBackupsResource { + final commons.ApiRequester _requester; + + ProjectsLocationsBackupCollectionsBackupsResource(commons.ApiRequester client) + : _requester = client; + + /// Deletes a specific backup. + /// + /// Request parameters: + /// + /// [name] - Required. Redis backup resource name using the form: + /// `projects/{project_id}/locations/{location_id}/backupCollections/{backup_collection_id}/backups/{backup_id}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/backupCollections/\[^/\]+/backups/\[^/\]+$`. + /// + /// [requestId] - Optional. Idempotent request UUID. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String name, { + core.String? requestId, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (requestId != null) 'requestId': [requestId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Exports a specific backup to a customer target Cloud Storage URI. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. Redis backup resource name using the form: + /// `projects/{project_id}/locations/{location_id}/backupCollections/{backup_collection_id}/backups/{backup_id}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/backupCollections/\[^/\]+/backups/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future export( + ExportBackupRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':export'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Gets the details of a specific backup. + /// + /// Request parameters: + /// + /// [name] - Required. Redis backup resource name using the form: + /// `projects/{project_id}/locations/{location_id}/backupCollections/{backup_collection_id}/backups/{backup_id}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/backupCollections/\[^/\]+/backups/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Backup]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return Backup.fromJson(response_ as core.Map); + } + + /// Lists all backups owned by a backup collection. + /// + /// Request parameters: + /// + /// [parent] - Required. The resource name of the backupCollection using the + /// form: + /// `projects/{project_id}/locations/{location_id}/backupCollections/{backup_collection_id}` + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/backupCollections/\[^/\]+$`. + /// + /// [pageSize] - Optional. The maximum number of items to return. If not + /// specified, a default value of 1000 will be used by the service. Regardless + /// of the page_size value, the response may include a partial list and a + /// caller should only rely on response's `next_page_token` to determine if + /// there are more clusters left to be queried. + /// + /// [pageToken] - Optional. The `next_page_token` value returned from a + /// previous \[ListBackupCollections\] request, if any. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListBackupsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/backups'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListBackupsResponse.fromJson( + response_ as core.Map); + } +} + class ProjectsLocationsClustersResource { final commons.ApiRequester _requester; ProjectsLocationsClustersResource(commons.ApiRequester client) : _requester = client; + /// Backup Redis Cluster. + /// + /// If this is the first time a backup is being created, a backup collection + /// will be created at the backend, and this backup belongs to this + /// collection. Both collection and backup will have a resource name. Backup + /// will be executed for each shard. A replica (primary if nonHA) will be + /// selected to perform the execution. Backup call will be rejected if there + /// is an ongoing backup or update operation. Be aware that during preview, if + /// the cluster's internal software version is too old, critical update will + /// be performed before actual backup. Once the internal software version is + /// updated to the minimum version required by the backup feature, subsequent + /// backups will not require critical update. After preview, there will be no + /// critical update needed for backup. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. Redis cluster resource name using the form: + /// `projects/{project_id}/locations/{location_id}/clusters/{cluster_id}` + /// where `location_id` refers to a GCP region. + /// Value must have pattern + /// `^projects/\[^/\]+/locations/\[^/\]+/clusters/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future backup( + BackupClusterRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':backup'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + /// Creates a Redis cluster based on the specified properties. /// /// The creation is executed asynchronously and callers may check the returned @@ -1059,8 +1401,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -1230,28 +1572,346 @@ class ProjectsLocationsOperationsResource { class AOFConfig { /// fsync configuration. /// - /// Optional. - /// Possible string values are: - /// - "APPEND_FSYNC_UNSPECIFIED" : Not set. Default: EVERYSEC - /// - "NO" : Never fsync. Normally Linux will flush data every 30 seconds with - /// this configuration, but it's up to the kernel's exact tuning. - /// - "EVERYSEC" : fsync every second. Fast enough, and you may lose 1 second - /// of data if there is a disaster - /// - "ALWAYS" : fsync every time new write commands are appended to the AOF. - /// It has the best data loss protection at the cost of performance - core.String? appendFsync; + /// Optional. + /// Possible string values are: + /// - "APPEND_FSYNC_UNSPECIFIED" : Not set. Default: EVERYSEC + /// - "NO" : Never fsync. Normally Linux will flush data every 30 seconds with + /// this configuration, but it's up to the kernel's exact tuning. + /// - "EVERYSEC" : fsync every second. Fast enough, and you may lose 1 second + /// of data if there is a disaster + /// - "ALWAYS" : fsync every time new write commands are appended to the AOF. + /// It has the best data loss protection at the cost of performance + core.String? appendFsync; + + AOFConfig({ + this.appendFsync, + }); + + AOFConfig.fromJson(core.Map json_) + : this( + appendFsync: json_['appendFsync'] as core.String?, + ); + + core.Map toJson() => { + if (appendFsync != null) 'appendFsync': appendFsync!, + }; +} + +/// The automated backup config for a cluster. +class AutomatedBackupConfig { + /// The automated backup mode. + /// + /// If the mode is disabled, the other fields will be ignored. + /// + /// Optional. + /// Possible string values are: + /// - "AUTOMATED_BACKUP_MODE_UNSPECIFIED" : Default value. Automated backup + /// config is not specified. + /// - "DISABLED" : Automated backup config disabled. + /// - "ENABLED" : Automated backup config enabled. + core.String? automatedBackupMode; + + /// Trigger automated backups at a fixed frequency. + /// + /// Optional. + FixedFrequencySchedule? fixedFrequencySchedule; + + /// How long to keep automated backups before the backups are deleted. + /// + /// The value should be between 1 day and 365 days. If not specified, the + /// default value is 35 days. + /// + /// Optional. + core.String? retention; + + AutomatedBackupConfig({ + this.automatedBackupMode, + this.fixedFrequencySchedule, + this.retention, + }); + + AutomatedBackupConfig.fromJson(core.Map json_) + : this( + automatedBackupMode: json_['automatedBackupMode'] as core.String?, + fixedFrequencySchedule: json_.containsKey('fixedFrequencySchedule') + ? FixedFrequencySchedule.fromJson(json_['fixedFrequencySchedule'] + as core.Map) + : null, + retention: json_['retention'] as core.String?, + ); + + core.Map toJson() => { + if (automatedBackupMode != null) + 'automatedBackupMode': automatedBackupMode!, + if (fixedFrequencySchedule != null) + 'fixedFrequencySchedule': fixedFrequencySchedule!, + if (retention != null) 'retention': retention!, + }; +} + +/// Backup of a cluster. +class Backup { + /// List of backup files of the backup. + /// + /// Output only. + core.List? backupFiles; + + /// Type of the backup. + /// + /// Output only. + /// Possible string values are: + /// - "BACKUP_TYPE_UNSPECIFIED" : The default value, not set. + /// - "ON_DEMAND" : On-demand backup. + /// - "AUTOMATED" : Automated backup. + core.String? backupType; + + /// Cluster resource path of this backup. + /// + /// Output only. + core.String? cluster; + + /// Cluster uid of this backup. + /// + /// Output only. + core.String? clusterUid; + + /// The time when the backup was created. + /// + /// Output only. + core.String? createTime; + + /// redis-7.2, valkey-7.5 + /// + /// Output only. + core.String? engineVersion; + + /// The time when the backup will expire. + /// + /// Output only. + core.String? expireTime; + + /// Identifier. + /// + /// Full resource path of the backup. the last part of the name is the backup + /// id with the following format: \[YYYYMMDDHHMMSS\]_\[Shorted Cluster UID\] + /// OR customer specified while backup cluster. Example: 20240515123000_1234 + core.String? name; + + /// Node type of the cluster. + /// + /// Output only. + /// Possible string values are: + /// - "NODE_TYPE_UNSPECIFIED" : Node type unspecified + /// - "REDIS_SHARED_CORE_NANO" : Redis shared core nano node_type. + /// - "REDIS_HIGHMEM_MEDIUM" : Redis highmem medium node_type. + /// - "REDIS_HIGHMEM_XLARGE" : Redis highmem xlarge node_type. + /// - "REDIS_STANDARD_SMALL" : Redis standard small node_type. + core.String? nodeType; + + /// Number of replicas for the cluster. + /// + /// Output only. + core.int? replicaCount; + + /// Number of shards for the cluster. + /// + /// Output only. + core.int? shardCount; + + /// State of the backup. + /// + /// Output only. + /// Possible string values are: + /// - "STATE_UNSPECIFIED" : The default value, not set. + /// - "CREATING" : The backup is being created. + /// - "ACTIVE" : The backup is active to be used. + /// - "DELETING" : The backup is being deleted. + /// - "SUSPENDED" : The backup is currently suspended due to reasons like + /// project deletion, billing account closure, etc. + core.String? state; + + /// Total size of the backup in bytes. + /// + /// Output only. + core.String? totalSizeBytes; + + /// System assigned unique identifier of the backup. + /// + /// Output only. + core.String? uid; + + Backup({ + this.backupFiles, + this.backupType, + this.cluster, + this.clusterUid, + this.createTime, + this.engineVersion, + this.expireTime, + this.name, + this.nodeType, + this.replicaCount, + this.shardCount, + this.state, + this.totalSizeBytes, + this.uid, + }); + + Backup.fromJson(core.Map json_) + : this( + backupFiles: (json_['backupFiles'] as core.List?) + ?.map((value) => BackupFile.fromJson( + value as core.Map)) + .toList(), + backupType: json_['backupType'] as core.String?, + cluster: json_['cluster'] as core.String?, + clusterUid: json_['clusterUid'] as core.String?, + createTime: json_['createTime'] as core.String?, + engineVersion: json_['engineVersion'] as core.String?, + expireTime: json_['expireTime'] as core.String?, + name: json_['name'] as core.String?, + nodeType: json_['nodeType'] as core.String?, + replicaCount: json_['replicaCount'] as core.int?, + shardCount: json_['shardCount'] as core.int?, + state: json_['state'] as core.String?, + totalSizeBytes: json_['totalSizeBytes'] as core.String?, + uid: json_['uid'] as core.String?, + ); + + core.Map toJson() => { + if (backupFiles != null) 'backupFiles': backupFiles!, + if (backupType != null) 'backupType': backupType!, + if (cluster != null) 'cluster': cluster!, + if (clusterUid != null) 'clusterUid': clusterUid!, + if (createTime != null) 'createTime': createTime!, + if (engineVersion != null) 'engineVersion': engineVersion!, + if (expireTime != null) 'expireTime': expireTime!, + if (name != null) 'name': name!, + if (nodeType != null) 'nodeType': nodeType!, + if (replicaCount != null) 'replicaCount': replicaCount!, + if (shardCount != null) 'shardCount': shardCount!, + if (state != null) 'state': state!, + if (totalSizeBytes != null) 'totalSizeBytes': totalSizeBytes!, + if (uid != null) 'uid': uid!, + }; +} + +/// Request for \[BackupCluster\]. +class BackupClusterRequest { + /// The id of the backup to be created. + /// + /// If not specified, the default value (\[YYYYMMDDHHMMSS\]_\[Shortened + /// Cluster UID\] is used. + /// + /// Optional. + core.String? backupId; + + /// TTL for the backup to expire. + /// + /// Value range is 1 day to 100 years. If not specified, the default value is + /// 100 years. + /// + /// Optional. + core.String? ttl; + + BackupClusterRequest({ + this.backupId, + this.ttl, + }); + + BackupClusterRequest.fromJson(core.Map json_) + : this( + backupId: json_['backupId'] as core.String?, + ttl: json_['ttl'] as core.String?, + ); + + core.Map toJson() => { + if (backupId != null) 'backupId': backupId!, + if (ttl != null) 'ttl': ttl!, + }; +} + +/// BackupCollection of a cluster. +class BackupCollection { + /// The full resource path of the cluster the backup collection belongs to. + /// + /// Example: projects/{project}/locations/{location}/clusters/{cluster} + /// + /// Output only. + core.String? cluster; + + /// The cluster uid of the backup collection. + /// + /// Output only. + core.String? clusterUid; + + /// Identifier. + /// + /// Full resource path of the backup collection. + core.String? name; + + /// System assigned unique identifier of the backup collection. + /// + /// Output only. + core.String? uid; + + BackupCollection({ + this.cluster, + this.clusterUid, + this.name, + this.uid, + }); + + BackupCollection.fromJson(core.Map json_) + : this( + cluster: json_['cluster'] as core.String?, + clusterUid: json_['clusterUid'] as core.String?, + name: json_['name'] as core.String?, + uid: json_['uid'] as core.String?, + ); + + core.Map toJson() => { + if (cluster != null) 'cluster': cluster!, + if (clusterUid != null) 'clusterUid': clusterUid!, + if (name != null) 'name': name!, + if (uid != null) 'uid': uid!, + }; +} + +/// Backup is consisted of multiple backup files. +class BackupFile { + /// The time when the backup file was created. + /// + /// Output only. + core.String? createTime; + + /// e.g: .rdb + /// + /// Output only. + core.String? fileName; + + /// Size of the backup file in bytes. + /// + /// Output only. + core.String? sizeBytes; - AOFConfig({ - this.appendFsync, + BackupFile({ + this.createTime, + this.fileName, + this.sizeBytes, }); - AOFConfig.fromJson(core.Map json_) + BackupFile.fromJson(core.Map json_) : this( - appendFsync: json_['appendFsync'] as core.String?, + createTime: json_['createTime'] as core.String?, + fileName: json_['fileName'] as core.String?, + sizeBytes: json_['sizeBytes'] as core.String?, ); core.Map toJson() => { - if (appendFsync != null) 'appendFsync': appendFsync!, + if (createTime != null) 'createTime': createTime!, + if (fileName != null) 'fileName': fileName!, + if (sizeBytes != null) 'sizeBytes': sizeBytes!, }; } @@ -1301,6 +1961,19 @@ class Cluster { /// - "AUTH_MODE_DISABLED" : Authorization disabled mode core.String? authorizationMode; + /// The automated backup config for the cluster. + /// + /// Optional. + AutomatedBackupConfig? automatedBackupConfig; + + /// The backup collection full resource name. + /// + /// Example: + /// projects/{project}/locations/{location}/backupCollections/{collection} + /// + /// Optional. Output only. + core.String? backupCollection; + /// A list of cluster enpoints. /// /// Optional. @@ -1329,6 +2002,14 @@ class Cluster { /// Output only. core.List? discoveryEndpoints; + /// Backups stored in Cloud Storage buckets. + /// + /// The Cloud Storage buckets need to be the same region as the clusters. Read + /// permission is required to import from the provided Cloud Storage objects. + /// + /// Optional. + GcsBackupSource? gcsSource; + /// ClusterMaintenancePolicy determines when to allow or deny updates. /// /// Optional. @@ -1339,6 +2020,11 @@ class Cluster { /// Output only. ClusterMaintenanceSchedule? maintenanceSchedule; + /// Backups generated and managed by memorystore service. + /// + /// Optional. + ManagedBackupSource? managedBackupSource; + /// Identifier. /// /// Unique name of the resource in this scope including project and location @@ -1455,13 +2141,17 @@ class Cluster { Cluster({ this.authorizationMode, + this.automatedBackupConfig, + this.backupCollection, this.clusterEndpoints, this.createTime, this.crossClusterReplicationConfig, this.deletionProtectionEnabled, this.discoveryEndpoints, + this.gcsSource, this.maintenancePolicy, this.maintenanceSchedule, + this.managedBackupSource, this.name, this.nodeType, this.persistenceConfig, @@ -1483,6 +2173,11 @@ class Cluster { Cluster.fromJson(core.Map json_) : this( authorizationMode: json_['authorizationMode'] as core.String?, + automatedBackupConfig: json_.containsKey('automatedBackupConfig') + ? AutomatedBackupConfig.fromJson(json_['automatedBackupConfig'] + as core.Map) + : null, + backupCollection: json_['backupCollection'] as core.String?, clusterEndpoints: (json_['clusterEndpoints'] as core.List?) ?.map((value) => ClusterEndpoint.fromJson( value as core.Map)) @@ -1500,6 +2195,10 @@ class Cluster { ?.map((value) => DiscoveryEndpoint.fromJson( value as core.Map)) .toList(), + gcsSource: json_.containsKey('gcsSource') + ? GcsBackupSource.fromJson( + json_['gcsSource'] as core.Map) + : null, maintenancePolicy: json_.containsKey('maintenancePolicy') ? ClusterMaintenancePolicy.fromJson(json_['maintenancePolicy'] as core.Map) @@ -1508,6 +2207,10 @@ class Cluster { ? ClusterMaintenanceSchedule.fromJson(json_['maintenanceSchedule'] as core.Map) : null, + managedBackupSource: json_.containsKey('managedBackupSource') + ? ManagedBackupSource.fromJson(json_['managedBackupSource'] + as core.Map) + : null, name: json_['name'] as core.String?, nodeType: json_['nodeType'] as core.String?, persistenceConfig: json_.containsKey('persistenceConfig') @@ -1553,6 +2256,9 @@ class Cluster { core.Map toJson() => { if (authorizationMode != null) 'authorizationMode': authorizationMode!, + if (automatedBackupConfig != null) + 'automatedBackupConfig': automatedBackupConfig!, + if (backupCollection != null) 'backupCollection': backupCollection!, if (clusterEndpoints != null) 'clusterEndpoints': clusterEndpoints!, if (createTime != null) 'createTime': createTime!, if (crossClusterReplicationConfig != null) @@ -1561,9 +2267,12 @@ class Cluster { 'deletionProtectionEnabled': deletionProtectionEnabled!, if (discoveryEndpoints != null) 'discoveryEndpoints': discoveryEndpoints!, + if (gcsSource != null) 'gcsSource': gcsSource!, if (maintenancePolicy != null) 'maintenancePolicy': maintenancePolicy!, if (maintenanceSchedule != null) 'maintenanceSchedule': maintenanceSchedule!, + if (managedBackupSource != null) + 'managedBackupSource': managedBackupSource!, if (name != null) 'name': name!, if (nodeType != null) 'nodeType': nodeType!, if (persistenceConfig != null) 'persistenceConfig': persistenceConfig!, @@ -1782,16 +2491,25 @@ class ClusterWeeklyMaintenanceWindow { /// Detailed information of each PSC connection. class ConnectionDetail { + /// Detailed information of a PSC connection that is created through service + /// connectivity automation. + PscAutoConnection? pscAutoConnection; + /// Detailed information of a PSC connection that is created by the customer /// who owns the cluster. PscConnection? pscConnection; ConnectionDetail({ + this.pscAutoConnection, this.pscConnection, }); ConnectionDetail.fromJson(core.Map json_) : this( + pscAutoConnection: json_.containsKey('pscAutoConnection') + ? PscAutoConnection.fromJson(json_['pscAutoConnection'] + as core.Map) + : null, pscConnection: json_.containsKey('pscConnection') ? PscConnection.fromJson( json_['pscConnection'] as core.Map) @@ -1799,6 +2517,7 @@ class ConnectionDetail { ); core.Map toJson() => { + if (pscAutoConnection != null) 'pscAutoConnection': pscAutoConnection!, if (pscConnection != null) 'pscConnection': pscConnection!, }; } @@ -1937,6 +2656,25 @@ class DiscoveryEndpoint { /// (google.protobuf.Empty); } typedef Empty = $Empty; +/// Request for \[ExportBackup\]. +class ExportBackupRequest { + /// Google Cloud Storage bucket, like "my-bucket". + core.String? gcsBucket; + + ExportBackupRequest({ + this.gcsBucket, + }); + + ExportBackupRequest.fromJson(core.Map json_) + : this( + gcsBucket: json_['gcsBucket'] as core.String?, + ); + + core.Map toJson() => { + if (gcsBucket != null) 'gcsBucket': gcsBucket!, + }; +} + /// Request for Export. class ExportInstanceRequest { /// Specify data to be exported. @@ -1995,6 +2733,60 @@ class FailoverInstanceRequest { }; } +/// This schedule allows the backup to be triggered at a fixed frequency +/// (currently only daily is supported). +class FixedFrequencySchedule { + /// The start time of every automated backup in UTC. + /// + /// It must be set to the start of an hour. This field is required. + /// + /// Required. + TimeOfDay? startTime; + + FixedFrequencySchedule({ + this.startTime, + }); + + FixedFrequencySchedule.fromJson(core.Map json_) + : this( + startTime: json_.containsKey('startTime') + ? TimeOfDay.fromJson( + json_['startTime'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (startTime != null) 'startTime': startTime!, + }; +} + +/// Backups stored in Cloud Storage buckets. +/// +/// The Cloud Storage buckets need to be the same region as the clusters. +class GcsBackupSource { + /// URIs of the GCS objects to import. + /// + /// Example: gs://bucket1/object1, gs://bucket2/folder2/object2 + /// + /// Optional. + core.List? uris; + + GcsBackupSource({ + this.uris, + }); + + GcsBackupSource.fromJson(core.Map json_) + : this( + uris: (json_['uris'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (uris != null) 'uris': uris!, + }; +} + /// The Cloud Storage location for the output content class GcsDestination { /// Data destination URI (e.g. 'gs://my_bucket/my_object'). @@ -2572,6 +3364,89 @@ class InstanceAuthString { }; } +/// Response for \[ListBackupCollections\]. +class ListBackupCollectionsResponse { + /// A list of backupCollections in the project. + /// + /// If the `location_id` in the parent field of the request is "-", all + /// regions available to the project are queried, and the results aggregated. + /// If in such an aggregated query a location is unavailable, a placeholder + /// backupCollection entry is included in the response with the `name` field + /// set to a value of the form + /// `projects/{project_id}/locations/{location_id}/backupCollections/`- and + /// the `status` field set to ERROR and `status_message` field set to + /// "location not available for ListBackupCollections". + core.List? backupCollections; + + /// Token to retrieve the next page of results, or empty if there are no more + /// results in the list. + core.String? nextPageToken; + + /// Locations that could not be reached. + core.List? unreachable; + + ListBackupCollectionsResponse({ + this.backupCollections, + this.nextPageToken, + this.unreachable, + }); + + ListBackupCollectionsResponse.fromJson(core.Map json_) + : this( + backupCollections: (json_['backupCollections'] as core.List?) + ?.map((value) => BackupCollection.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + unreachable: (json_['unreachable'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (backupCollections != null) 'backupCollections': backupCollections!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (unreachable != null) 'unreachable': unreachable!, + }; +} + +/// Response for \[ListBackups\]. +class ListBackupsResponse { + /// A list of backups in the project. + core.List? backups; + + /// Token to retrieve the next page of results, or empty if there are no more + /// results in the list. + core.String? nextPageToken; + + /// Backups that could not be reached. + core.List? unreachable; + + ListBackupsResponse({ + this.backups, + this.nextPageToken, + this.unreachable, + }); + + ListBackupsResponse.fromJson(core.Map json_) + : this( + backups: (json_['backups'] as core.List?) + ?.map((value) => + Backup.fromJson(value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + unreachable: (json_['unreachable'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (backups != null) 'backups': backups!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (unreachable != null) 'unreachable': unreachable!, + }; +} + /// Response for ListClusters. class ListClustersResponse { /// A list of Redis clusters in the project in the specified location, or @@ -2896,6 +3771,32 @@ class MaintenanceSchedule { }; } +/// Backups that generated and managed by memorystore. +class ManagedBackupSource { + /// Example: + /// //redis.googleapis.com/projects/{project}/locations/{location}/backupCollections/{collection}/backups/{backup} + /// A shorter version (without the prefix) of the backup name is also + /// supported, like + /// projects/{project}/locations/{location}/backupCollections/{collection}/backups/{backup_id} + /// In this case, it assumes the backup is under redis.googleapis.com. + /// + /// Optional. + core.String? backup; + + ManagedBackupSource({ + this.backup, + }); + + ManagedBackupSource.fromJson(core.Map json_) + : this( + backup: json_['backup'] as core.String?, + ); + + core.Map toJson() => { + if (backup != null) 'backup': backup!, + }; +} + class ManagedCertificateAuthority { /// The PEM encoded CA certificate chains for redis managed server /// authentication @@ -3158,6 +4059,108 @@ class PersistenceConfig { }; } +/// Details of consumer resources in a PSC connection that is created through +/// Service Connectivity Automation. +class PscAutoConnection { + /// The IP allocated on the consumer network for the PSC forwarding rule. + /// + /// Output only. + core.String? address; + + /// Type of the PSC connection. + /// + /// Output only. + /// Possible string values are: + /// - "CONNECTION_TYPE_UNSPECIFIED" : Cluster endpoint Type is not set + /// - "CONNECTION_TYPE_DISCOVERY" : Cluster endpoint that will be used as for + /// cluster topology discovery. + /// - "CONNECTION_TYPE_PRIMARY" : Cluster endpoint that will be used as + /// primary endpoint to access primary. + /// - "CONNECTION_TYPE_READER" : Cluster endpoint that will be used as reader + /// endpoint to access replicas. + core.String? connectionType; + + /// The URI of the consumer side forwarding rule. + /// + /// Example: + /// projects/{projectNumOrId}/regions/us-east1/forwardingRules/{resourceId}. + /// + /// Output only. + core.String? forwardingRule; + + /// The consumer network where the IP address resides, in the form of + /// projects/{project_id}/global/networks/{network_id}. + /// + /// Required. + core.String? network; + + /// The consumer project_id where the forwarding rule is created from. + /// + /// Required. + core.String? projectId; + + /// The PSC connection id of the forwarding rule connected to the service + /// attachment. + /// + /// Output only. + core.String? pscConnectionId; + + /// The status of the PSC connection. + /// + /// Please note that this value is updated periodically. Please use Private + /// Service Connect APIs for the latest status. + /// + /// Output only. + /// Possible string values are: + /// - "PSC_CONNECTION_STATUS_UNSPECIFIED" : PSC connection status is not + /// specified. + /// - "PSC_CONNECTION_STATUS_ACTIVE" : The connection is active + /// - "PSC_CONNECTION_STATUS_NOT_FOUND" : Connection not found + core.String? pscConnectionStatus; + + /// The service attachment which is the target of the PSC connection, in the + /// form of + /// projects/{project-id}/regions/{region}/serviceAttachments/{service-attachment-id}. + /// + /// Output only. + core.String? serviceAttachment; + + PscAutoConnection({ + this.address, + this.connectionType, + this.forwardingRule, + this.network, + this.projectId, + this.pscConnectionId, + this.pscConnectionStatus, + this.serviceAttachment, + }); + + PscAutoConnection.fromJson(core.Map json_) + : this( + address: json_['address'] as core.String?, + connectionType: json_['connectionType'] as core.String?, + forwardingRule: json_['forwardingRule'] as core.String?, + network: json_['network'] as core.String?, + projectId: json_['projectId'] as core.String?, + pscConnectionId: json_['pscConnectionId'] as core.String?, + pscConnectionStatus: json_['pscConnectionStatus'] as core.String?, + serviceAttachment: json_['serviceAttachment'] as core.String?, + ); + + core.Map toJson() => { + if (address != null) 'address': address!, + if (connectionType != null) 'connectionType': connectionType!, + if (forwardingRule != null) 'forwardingRule': forwardingRule!, + if (network != null) 'network': network!, + if (projectId != null) 'projectId': projectId!, + if (pscConnectionId != null) 'pscConnectionId': pscConnectionId!, + if (pscConnectionStatus != null) + 'pscConnectionStatus': pscConnectionStatus!, + if (serviceAttachment != null) 'serviceAttachment': serviceAttachment!, + }; +} + class PscConfig { /// The network where the IP address of the discovery endpoint will be /// reserved, in the form of diff --git a/generated/googleapis/lib/retail/v2.dart b/generated/googleapis/lib/retail/v2.dart index 1993e25c5..e5cec8d9e 100644 --- a/generated/googleapis/lib/retail/v2.dart +++ b/generated/googleapis/lib/retail/v2.dart @@ -3826,7 +3826,7 @@ class GoogleCloudRetailV2AttributesConfig { /// - "PRODUCT_LEVEL_ATTRIBUTE_CONFIG" : At this level, we honor the attribute /// configurations set in Product.attributes. /// - "CATALOG_LEVEL_ATTRIBUTE_CONFIG" : At this level, we honor the attribute - /// configurations set in CatalogConfig.attribute_configs. + /// configurations set in `CatalogConfig.attribute_configs`. core.String? attributeConfigLevel; /// Enable attribute(s) config at catalog level. @@ -4008,8 +4008,6 @@ class GoogleCloudRetailV2BigQuerySource { core.String? gcsStagingDir; /// BigQuery time partitioned table's _PARTITIONDATE in YYYY-MM-DD format. - /// - /// Only supported in ImportProductsRequest. GoogleTypeDate? partitionDate; /// The project ID (can be project # or ID) that the BigQuery source is in @@ -4141,8 +4139,7 @@ class GoogleCloudRetailV2CatalogAttribute { /// /// `True` if at least one Product is using this attribute in /// Product.attributes. Otherwise, this field is `False`. CatalogAttribute can - /// be pre-loaded by using CatalogService.AddCatalogAttribute, - /// CatalogService.ImportCatalogAttributes, or + /// be pre-loaded by using CatalogService.AddCatalogAttribute or /// CatalogService.UpdateAttributesConfig APIs. This field is `False` for /// pre-loaded CatalogAttributes. Only pre-loaded catalog attributes that are /// neither in use by products nor predefined can be deleted. Catalog @@ -5205,7 +5202,7 @@ class GoogleCloudRetailV2CustomAttribute { }; } -/// Metadata for active A/B testing Experiment. +/// Metadata for active A/B testing experiment. class GoogleCloudRetailV2ExperimentInfo { /// The fully qualified resource name of the experiment that provides the /// serving config under test, should an active experiment exist. @@ -5243,7 +5240,7 @@ class GoogleCloudRetailV2ExperimentInfo { /// Metadata for active serving config A/B tests. class GoogleCloudRetailV2ExperimentInfoServingConfigExperiment { /// The fully qualified resource name of the serving config - /// Experiment.VariantArm.serving_config_id responsible for generating the + /// `Experiment.VariantArm.serving_config_id` responsible for generating the /// search response. /// /// For example: `projects / * /locations / * /catalogs / * /servingConfigs / @@ -5563,9 +5560,10 @@ class GoogleCloudRetailV2GetDefaultBranchResponse { /// Product image. /// -/// Recommendations AI and Retail Search do not use product images to improve -/// prediction and search results. However, product images can be returned in -/// results, and are shown in prediction or search previews in the console. +/// Recommendations AI and Retail Search use product images to improve +/// prediction and search results. Product images can be returned in results, +/// and are shown in prediction or search previews in the console. Please try to +/// provide correct product images and avoid using images with size too small. class GoogleCloudRetailV2Image { /// Height of the image in number of pixels. /// @@ -6020,32 +6018,63 @@ class GoogleCloudRetailV2LocalInventory { /// search. The `searchable` field should be unset or set to false. * The max /// summed total bytes of custom attribute keys and values per product is /// 5MiB. + /// + /// Optional. core.Map? attributes; - /// Input only. + /// The availability of the Product at this place_id. /// - /// Supported fulfillment types. Valid fulfillment type values include - /// commonly used types (such as pickup in store and same day delivery), and - /// custom types. Customers have to map custom types to their display names - /// before rendering UI. Supported values: * "pickup-in-store" * - /// "ship-to-store" * "same-day-delivery" * "next-day-delivery" * - /// "custom-type-1" * "custom-type-2" * "custom-type-3" * "custom-type-4" * - /// "custom-type-5" If this field is set to an invalid value other than these, - /// an INVALID_ARGUMENT error is returned. All the elements must be distinct. - /// Otherwise, an INVALID_ARGUMENT error is returned. + /// Default to Availability.IN_STOCK. For primary products with variants set + /// the availability of the primary as Availability.OUT_OF_STOCK and set the + /// true availability at the variant level. This way the primary product will + /// be considered "in stock" as long as it has at least one variant in stock. + /// For primary products with no variants set the true availability at the + /// primary level. Corresponding properties: Google Merchant Center property + /// [availability](https://support.google.com/merchants/answer/6324448). + /// Schema.org property [Offer.availability](https://schema.org/availability). + /// + /// Optional. + /// Possible string values are: + /// - "AVAILABILITY_UNSPECIFIED" : Default product availability. Default to + /// Availability.IN_STOCK if unset. + /// - "IN_STOCK" : Product in stock. + /// - "OUT_OF_STOCK" : Product out of stock. + /// - "PREORDER" : Product that is in pre-order state. + /// - "BACKORDER" : Product that is back-ordered (i.e. temporarily out of + /// stock). + core.String? availability; + + /// Supported fulfillment types. + /// + /// Valid fulfillment type values include commonly used types (such as pickup + /// in store and same day delivery), and custom types. Customers have to map + /// custom types to their display names before rendering UI. Supported values: + /// * "pickup-in-store" * "ship-to-store" * "same-day-delivery" * + /// "next-day-delivery" * "custom-type-1" * "custom-type-2" * "custom-type-3" + /// * "custom-type-4" * "custom-type-5" If this field is set to an invalid + /// value other than these, an INVALID_ARGUMENT error is returned. All the + /// elements must be distinct. Otherwise, an INVALID_ARGUMENT error is + /// returned. + /// + /// Optional. core.List? fulfillmentTypes; /// The place ID for the current set of inventory information. + /// + /// Required. core.String? placeId; /// Product price and cost information. /// /// Google Merchant Center property /// [price](https://support.google.com/merchants/answer/6324371). + /// + /// Optional. GoogleCloudRetailV2PriceInfo? priceInfo; GoogleCloudRetailV2LocalInventory({ this.attributes, + this.availability, this.fulfillmentTypes, this.placeId, this.priceInfo, @@ -6062,6 +6091,7 @@ class GoogleCloudRetailV2LocalInventory { value as core.Map), ), ), + availability: json_['availability'] as core.String?, fulfillmentTypes: (json_['fulfillmentTypes'] as core.List?) ?.map((value) => value as core.String) .toList(), @@ -6074,6 +6104,7 @@ class GoogleCloudRetailV2LocalInventory { core.Map toJson() => { if (attributes != null) 'attributes': attributes!, + if (availability != null) 'availability': availability!, if (fulfillmentTypes != null) 'fulfillmentTypes': fulfillmentTypes!, if (placeId != null) 'placeId': placeId!, if (priceInfo != null) 'priceInfo': priceInfo!, @@ -6507,6 +6538,73 @@ class GoogleCloudRetailV2OutputConfigGcsDestination { /// Request for pausing training of a model. typedef GoogleCloudRetailV2PauseModelRequest = $Empty; +/// Metadata for pinning to be returned in the response. +/// +/// This is used for distinguishing between applied vs dropped pins. +class GoogleCloudRetailV2PinControlMetadata { + /// Map of all matched pins, keyed by pin position. + core.Map? + allMatchedPins; + + /// Map of pins that were dropped due to overlap with other matching pins, + /// keyed by pin position. + core.Map? + droppedPins; + + GoogleCloudRetailV2PinControlMetadata({ + this.allMatchedPins, + this.droppedPins, + }); + + GoogleCloudRetailV2PinControlMetadata.fromJson(core.Map json_) + : this( + allMatchedPins: + (json_['allMatchedPins'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + GoogleCloudRetailV2PinControlMetadataProductPins.fromJson( + value as core.Map), + ), + ), + droppedPins: + (json_['droppedPins'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + GoogleCloudRetailV2PinControlMetadataProductPins.fromJson( + value as core.Map), + ), + ), + ); + + core.Map toJson() => { + if (allMatchedPins != null) 'allMatchedPins': allMatchedPins!, + if (droppedPins != null) 'droppedPins': droppedPins!, + }; +} + +/// List of product ids which have associated pins. +class GoogleCloudRetailV2PinControlMetadataProductPins { + /// List of product ids which have associated pins. + core.List? productId; + + GoogleCloudRetailV2PinControlMetadataProductPins({ + this.productId, + }); + + GoogleCloudRetailV2PinControlMetadataProductPins.fromJson(core.Map json_) + : this( + productId: (json_['productId'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (productId != null) 'productId': productId!, + }; +} + /// Request message for Predict method. class GoogleCloudRetailV2PredictRequest { /// Filter for restricting prediction results with a length limit of 5,000 @@ -9575,7 +9673,7 @@ class GoogleCloudRetailV2SearchResponse { /// based on corrected_query. Otherwise the original query is used for search. core.String? correctedQuery; - /// Metadata related to A/B testing Experiment associated with this response. + /// Metadata related to A/B testing experiment associated with this response. /// /// Only exists when an experiment is triggered. core.List? experimentInfo; @@ -9594,6 +9692,14 @@ class GoogleCloudRetailV2SearchResponse { /// If this field is omitted, there are no subsequent pages. core.String? nextPageToken; + /// Metadata for pin controls which were applicable to the request. + /// + /// This contains two map fields, one for all matched pins and one for pins + /// which were matched but not applied. The two maps are keyed by pin + /// position, and the values are the product ids which were matched to that + /// pin. + GoogleCloudRetailV2PinControlMetadata? pinControlMetadata; + /// Query expansion information for the returned results. GoogleCloudRetailV2SearchResponseQueryExpansionInfo? queryExpansionInfo; @@ -9627,6 +9733,7 @@ class GoogleCloudRetailV2SearchResponse { this.facets, this.invalidConditionBoostSpecs, this.nextPageToken, + this.pinControlMetadata, this.queryExpansionInfo, this.redirectUri, this.results, @@ -9662,6 +9769,11 @@ class GoogleCloudRetailV2SearchResponse { .fromJson(value as core.Map)) .toList(), nextPageToken: json_['nextPageToken'] as core.String?, + pinControlMetadata: json_.containsKey('pinControlMetadata') + ? GoogleCloudRetailV2PinControlMetadata.fromJson( + json_['pinControlMetadata'] + as core.Map) + : null, queryExpansionInfo: json_.containsKey('queryExpansionInfo') ? GoogleCloudRetailV2SearchResponseQueryExpansionInfo.fromJson( json_['queryExpansionInfo'] @@ -9692,6 +9804,8 @@ class GoogleCloudRetailV2SearchResponse { if (invalidConditionBoostSpecs != null) 'invalidConditionBoostSpecs': invalidConditionBoostSpecs!, if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (pinControlMetadata != null) + 'pinControlMetadata': pinControlMetadata!, if (queryExpansionInfo != null) 'queryExpansionInfo': queryExpansionInfo!, if (redirectUri != null) 'redirectUri': redirectUri!, @@ -10995,11 +11109,11 @@ class GoogleCloudRetailV2UserInfo { /// User agent as included in the HTTP header. /// - /// Required for getting SearchResponse.sponsored_results. The field must be a - /// UTF-8 encoded string with a length limit of 1,000 characters. Otherwise, - /// an INVALID_ARGUMENT error is returned. This should not be set when using - /// the client side event reporting with GTM or JavaScript tag in - /// UserEventService.CollectUserEvent or if direct_user_request is set. + /// The field must be a UTF-8 encoded string with a length limit of 1,000 + /// characters. Otherwise, an INVALID_ARGUMENT error is returned. This should + /// not be set when using the client side event reporting with GTM or + /// JavaScript tag in UserEventService.CollectUserEvent or if + /// direct_user_request is set. core.String? userAgent; /// Highly recommended for logged-in users. diff --git a/generated/googleapis/lib/run/v2.dart b/generated/googleapis/lib/run/v2.dart index 45a686a76..9f1fa1526 100644 --- a/generated/googleapis/lib/run/v2.dart +++ b/generated/googleapis/lib/run/v2.dart @@ -2025,6 +2025,14 @@ class GoogleCloudRunV2BuildpacksBuild { /// Optional. core.String? functionTarget; + /// project_descriptor stores the path to the project descriptor file. + /// + /// When empty, it means that there is no project descriptor file in the + /// source. + /// + /// Optional. + core.String? projectDescriptor; + /// The runtime name, e.g. 'go113'. /// /// Leave blank for generic builds. @@ -2039,6 +2047,7 @@ class GoogleCloudRunV2BuildpacksBuild { this.enableAutomaticUpdates, this.environmentVariables, this.functionTarget, + this.projectDescriptor, this.runtime, }); @@ -2056,6 +2065,7 @@ class GoogleCloudRunV2BuildpacksBuild { ), ), functionTarget: json_['functionTarget'] as core.String?, + projectDescriptor: json_['projectDescriptor'] as core.String?, runtime: json_['runtime'] as core.String?, ); @@ -2067,6 +2077,7 @@ class GoogleCloudRunV2BuildpacksBuild { if (environmentVariables != null) 'environmentVariables': environmentVariables!, if (functionTarget != null) 'functionTarget': functionTarget!, + if (projectDescriptor != null) 'projectDescriptor': projectDescriptor!, if (runtime != null) 'runtime': runtime!, }; } @@ -3174,22 +3185,32 @@ class GoogleCloudRunV2GCSVolumeSource { /// Cloud Storage Bucket name. core.String? bucket; + /// A list of additional flags to pass to the gcsfuse CLI. + /// + /// Options should be specified without the leading "--". + core.List? mountOptions; + /// If true, the volume will be mounted as read only for all mounts. core.bool? readOnly; GoogleCloudRunV2GCSVolumeSource({ this.bucket, + this.mountOptions, this.readOnly, }); GoogleCloudRunV2GCSVolumeSource.fromJson(core.Map json_) : this( bucket: json_['bucket'] as core.String?, + mountOptions: (json_['mountOptions'] as core.List?) + ?.map((value) => value as core.String) + .toList(), readOnly: json_['readOnly'] as core.bool?, ); core.Map toJson() => { if (bucket != null) 'bucket': bucket!, + if (mountOptions != null) 'mountOptions': mountOptions!, if (readOnly != null) 'readOnly': readOnly!, }; } @@ -4618,6 +4639,24 @@ class GoogleCloudRunV2RevisionTemplate { /// https://cloud.google.com/run/docs/securing/using-cmek core.String? encryptionKey; + /// The action to take if the encryption key is revoked. + /// + /// Optional. + /// Possible string values are: + /// - "ENCRYPTION_KEY_REVOCATION_ACTION_UNSPECIFIED" : Unspecified + /// - "PREVENT_NEW" : Prevents the creation of new instances. + /// - "SHUTDOWN" : Shuts down existing instances, and prevents creation of new + /// ones. + core.String? encryptionKeyRevocationAction; + + /// If encryption_key_revocation_action is SHUTDOWN, the duration before + /// shutting down all instances. + /// + /// The minimum increment is 1 hour. + /// + /// Optional. + core.String? encryptionKeyShutdownDuration; + /// The sandbox environment to host this Revision. /// /// Optional. @@ -4651,8 +4690,8 @@ class GoogleCloudRunV2RevisionTemplate { /// Sets the maximum number of requests that each serving instance can /// receive. /// - /// If not specified or 0, defaults to 80 when requested `CPU >= 1` and - /// defaults to 1 when requested `CPU < 1`. + /// If not specified or 0, concurrency defaults to 80 when requested `CPU >= + /// 1` and defaults to 1 when requested `CPU < 1`. /// /// Optional. core.int? maxInstanceRequestConcurrency; @@ -4717,6 +4756,8 @@ class GoogleCloudRunV2RevisionTemplate { this.annotations, this.containers, this.encryptionKey, + this.encryptionKeyRevocationAction, + this.encryptionKeyShutdownDuration, this.executionEnvironment, this.healthCheckDisabled, this.labels, @@ -4747,6 +4788,10 @@ class GoogleCloudRunV2RevisionTemplate { value as core.Map)) .toList(), encryptionKey: json_['encryptionKey'] as core.String?, + encryptionKeyRevocationAction: + json_['encryptionKeyRevocationAction'] as core.String?, + encryptionKeyShutdownDuration: + json_['encryptionKeyShutdownDuration'] as core.String?, executionEnvironment: json_['executionEnvironment'] as core.String?, healthCheckDisabled: json_['healthCheckDisabled'] as core.bool?, labels: @@ -4788,6 +4833,10 @@ class GoogleCloudRunV2RevisionTemplate { if (annotations != null) 'annotations': annotations!, if (containers != null) 'containers': containers!, if (encryptionKey != null) 'encryptionKey': encryptionKey!, + if (encryptionKeyRevocationAction != null) + 'encryptionKeyRevocationAction': encryptionKeyRevocationAction!, + if (encryptionKeyShutdownDuration != null) + 'encryptionKeyShutdownDuration': encryptionKeyShutdownDuration!, if (executionEnvironment != null) 'executionEnvironment': executionEnvironment!, if (healthCheckDisabled != null) @@ -4893,13 +4942,12 @@ class GoogleCloudRunV2SecretVolumeSource { /// Internally, a umask of 0222 will be applied to any non-zero value. * This /// is an integer representation of the mode bits. So, the octal integer value /// should look exactly as the chmod numeric notation with a leading zero. - /// Some examples: for chmod 777 (a=rwx), set to 0777 (octal) or 511 - /// (base-10). For chmod 640 (u=rw,g=r), set to 0640 (octal) or 416 (base-10). - /// For chmod 755 (u=rwx,g=rx,o=rx), set to 0755 (octal) or 493 (base-10). * - /// This might be in conflict with other options that affect the file mode, - /// like fsGroup, and the result can be other mode bits set. This might be in - /// conflict with other options that affect the file mode, like fsGroup, and - /// as a result, other mode bits could be set. + /// Some examples: for chmod 640 (u=rw,g=r), set to 0640 (octal) or 416 + /// (base-10). For chmod 755 (u=rwx,g=rx,o=rx), set to 0755 (octal) or 493 + /// (base-10). * This might be in conflict with other options that affect the + /// file mode, like fsGroup, and the result can be other mode bits set. This + /// might be in conflict with other options that affect the file mode, like + /// fsGroup, and as a result, other mode bits could be set. core.int? defaultMode; /// If unspecified, the volume will expose a file whose name is the secret, @@ -5436,6 +5484,14 @@ class GoogleCloudRunV2ServiceMesh { /// Scaling settings applied at the service level rather than at the revision /// level. class GoogleCloudRunV2ServiceScaling { + /// total instance count for the service in manual scaling mode. + /// + /// This number of instances is divided among all revisions with specified + /// traffic based on the percent of traffic they are receiving. + /// + /// Optional. + core.int? manualInstanceCount; + /// total min instances for the service. /// /// This number of instances is divided among all revisions with specified @@ -5454,17 +5510,21 @@ class GoogleCloudRunV2ServiceScaling { core.String? scalingMode; GoogleCloudRunV2ServiceScaling({ + this.manualInstanceCount, this.minInstanceCount, this.scalingMode, }); GoogleCloudRunV2ServiceScaling.fromJson(core.Map json_) : this( + manualInstanceCount: json_['manualInstanceCount'] as core.int?, minInstanceCount: json_['minInstanceCount'] as core.int?, scalingMode: json_['scalingMode'] as core.String?, ); core.Map toJson() => { + if (manualInstanceCount != null) + 'manualInstanceCount': manualInstanceCount!, if (minInstanceCount != null) 'minInstanceCount': minInstanceCount!, if (scalingMode != null) 'scalingMode': scalingMode!, }; @@ -6259,11 +6319,10 @@ class GoogleCloudRunV2VersionToPath { /// Internally, a umask of 0222 will be applied to any non-zero value. * This /// is an integer representation of the mode bits. So, the octal integer value /// should look exactly as the chmod numeric notation with a leading zero. - /// Some examples: for chmod 777 (a=rwx), set to 0777 (octal) or 511 - /// (base-10). For chmod 640 (u=rw,g=r), set to 0640 (octal) or 416 (base-10). - /// For chmod 755 (u=rwx,g=rx,o=rx), set to 0755 (octal) or 493 (base-10). * - /// This might be in conflict with other options that affect the file mode, - /// like fsGroup, and the result can be other mode bits set. + /// Some examples: for chmod 640 (u=rw,g=r), set to 0640 (octal) or 416 + /// (base-10). For chmod 755 (u=rwx,g=rx,o=rx), set to 0755 (octal) or 493 + /// (base-10). * This might be in conflict with other options that affect the + /// file mode, like fsGroup, and the result can be other mode bits set. core.int? mode; /// The relative path of the secret in the container. diff --git a/generated/googleapis/lib/secretmanager/v1.dart b/generated/googleapis/lib/secretmanager/v1.dart index c96b43598..373ab0e69 100644 --- a/generated/googleapis/lib/secretmanager/v1.dart +++ b/generated/googleapis/lib/secretmanager/v1.dart @@ -2835,7 +2835,7 @@ class Topic { } /// A replication policy that replicates the Secret payload into the locations -/// specified in Secret.replication.user_managed.replicas +/// specified in Replication.UserManaged.replicas class UserManaged { /// The list of Replicas for this Secret. /// diff --git a/generated/googleapis/lib/securitycenter/v1.dart b/generated/googleapis/lib/securitycenter/v1.dart index 9a263de51..a7994f3c9 100644 --- a/generated/googleapis/lib/securitycenter/v1.dart +++ b/generated/googleapis/lib/securitycenter/v1.dart @@ -40,6 +40,7 @@ /// - [FoldersSourcesFindingsExternalSystemsResource] /// - [OrganizationsResource] /// - [OrganizationsAssetsResource] +/// - [OrganizationsAttackPathsResource] /// - [OrganizationsBigQueryExportsResource] /// - [OrganizationsEventThreatDetectionSettingsResource] /// - [OrganizationsEventThreatDetectionSettingsCustomModulesResource] @@ -2834,6 +2835,8 @@ class OrganizationsResource { OrganizationsAssetsResource get assets => OrganizationsAssetsResource(_requester); + OrganizationsAttackPathsResource get attackPaths => + OrganizationsAttackPathsResource(_requester); OrganizationsBigQueryExportsResource get bigQueryExports => OrganizationsBigQueryExportsResource(_requester); OrganizationsEventThreatDetectionSettingsResource @@ -3253,6 +3256,70 @@ class OrganizationsAssetsResource { } } +class OrganizationsAttackPathsResource { + final commons.ApiRequester _requester; + + OrganizationsAttackPathsResource(commons.ApiRequester client) + : _requester = client; + + /// Lists the attack paths for a set of simulation results or valued resources + /// and filter. + /// + /// Request parameters: + /// + /// [parent] - Required. Name of parent to list attack paths. Valid formats: + /// `organizations/{organization}`, + /// `organizations/{organization}/simulations/{simulation}` + /// `organizations/{organization}/simulations/{simulation}/attackExposureResults/{attack_exposure_result_v2}` + /// `organizations/{organization}/simulations/{simulation}/valuedResources/{valued_resource}` + /// Value must have pattern `^organizations/\[^/\]+$`. + /// + /// [filter] - The filter expression that filters the attack path in the + /// response. Supported fields: * `valued_resources` supports = + /// + /// [pageSize] - The maximum number of results to return in a single response. + /// Default is 10, minimum is 1, maximum is 1000. + /// + /// [pageToken] - The value returned by the last `ListAttackPathsResponse`; + /// indicates that this is a continuation of a prior `ListAttackPaths` call, + /// and that the system should return the next page of data. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListAttackPathsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.String? filter, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/attackPaths'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListAttackPathsResponse.fromJson( + response_ as core.Map); + } +} + class OrganizationsBigQueryExportsResource { final commons.ApiRequester _requester; @@ -4698,8 +4765,8 @@ class OrganizationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -11861,6 +11928,68 @@ class DataFlowEvent { }; } +/// Details about data retention deletion violations, in which the data is +/// non-compliant based on their retention or deletion time, as defined in the +/// applicable data security policy. +/// +/// The Data Retention Deletion (DRD) control is a control of the DSPM (Data +/// Security Posture Management) suite that enables organizations to manage data +/// retention and deletion policies in compliance with regulations, such as GDPR +/// and CRPA. DRD supports two primary policy types: maximum storage length (max +/// TTL) and minimum storage length (min TTL). Both are aimed at helping +/// organizations meet regulatory and data management commitments. +class DataRetentionDeletionEvent { + /// Number of objects that violated the policy for this resource. + /// + /// If the number is less than 1,000, then the value of this field is the + /// exact number. If the number of objects that violated the policy is greater + /// than or equal to 1,000, then the value of this field is 1000. + core.String? dataObjectCount; + + /// Timestamp indicating when the event was detected. + core.String? eventDetectionTime; + + /// Type of the DRD event. + /// Possible string values are: + /// - "EVENT_TYPE_UNSPECIFIED" : Unspecified event type. + /// - "EVENT_TYPE_MAX_TTL_EXCEEDED" : The maximum retention time has been + /// exceeded. + core.String? eventType; + + /// Maximum duration of retention allowed from the DRD control. + /// + /// This comes from the DRD control where users set a max TTL for their data. + /// For example, suppose that a user set the max TTL for a Cloud Storage + /// bucket to 90 days. However, an object in that bucket is 100 days old. In + /// this case, a DataRetentionDeletionEvent will be generated for that Cloud + /// Storage bucket, and the max_retention_allowed is 90 days. + core.String? maxRetentionAllowed; + + DataRetentionDeletionEvent({ + this.dataObjectCount, + this.eventDetectionTime, + this.eventType, + this.maxRetentionAllowed, + }); + + DataRetentionDeletionEvent.fromJson(core.Map json_) + : this( + dataObjectCount: json_['dataObjectCount'] as core.String?, + eventDetectionTime: json_['eventDetectionTime'] as core.String?, + eventType: json_['eventType'] as core.String?, + maxRetentionAllowed: json_['maxRetentionAllowed'] as core.String?, + ); + + core.Map toJson() => { + if (dataObjectCount != null) 'dataObjectCount': dataObjectCount!, + if (eventDetectionTime != null) + 'eventDetectionTime': eventDetectionTime!, + if (eventType != null) 'eventType': eventType!, + if (maxRetentionAllowed != null) + 'maxRetentionAllowed': maxRetentionAllowed!, + }; +} + /// Represents database access information, such as queries. /// /// A database may be a sub-resource of an instance (as in the case of Cloud SQL @@ -11962,6 +12091,26 @@ class Detection { }; } +/// Contains information about the disk associated with the finding. +class Disk { + /// The name of the disk, for example, + /// "https://www.googleapis.com/compute/v1/projects/project-id/zones/zone-id/disks/disk-id". + core.String? name; + + Disk({ + this.name, + }); + + Disk.fromJson(core.Map json_) + : this( + name: json_['name'] as core.String?, + ); + + core.Map toJson() => { + if (name != null) 'name': name!, + }; +} + /// Path of the file in terms of underlying disk/partition identifiers. class DiskPath { /// UUID of the partition (format @@ -12030,6 +12179,14 @@ class DynamicMuteRecord { /// `enablement_state` for the module in all child folders or projects is also /// `enabled`. EffectiveEventThreatDetectionCustomModule is read-only. class EffectiveEventThreatDetectionCustomModule { + /// The cloud provider of the custom module. + /// Possible string values are: + /// - "CLOUD_PROVIDER_UNSPECIFIED" : Unspecified cloud provider. + /// - "GOOGLE_CLOUD_PLATFORM" : Google Cloud Platform. + /// - "AMAZON_WEB_SERVICES" : Amazon Web Services. + /// - "MICROSOFT_AZURE" : Microsoft Azure. + core.String? cloudProvider; + /// Config for the effective module. /// /// Output only. @@ -12078,6 +12235,7 @@ class EffectiveEventThreatDetectionCustomModule { core.String? type; EffectiveEventThreatDetectionCustomModule({ + this.cloudProvider, this.config, this.description, this.displayName, @@ -12088,6 +12246,7 @@ class EffectiveEventThreatDetectionCustomModule { EffectiveEventThreatDetectionCustomModule.fromJson(core.Map json_) : this( + cloudProvider: json_['cloudProvider'] as core.String?, config: json_.containsKey('config') ? json_['config'] as core.Map : null, @@ -12099,6 +12258,7 @@ class EffectiveEventThreatDetectionCustomModule { ); core.Map toJson() => { + if (cloudProvider != null) 'cloudProvider': cloudProvider!, if (config != null) 'config': config!, if (description != null) 'description': description!, if (displayName != null) 'displayName': displayName!, @@ -12158,6 +12318,14 @@ class EventThreatDetectionCustomModule { /// Output only. core.String? ancestorModule; + /// The cloud provider of the custom module. + /// Possible string values are: + /// - "CLOUD_PROVIDER_UNSPECIFIED" : Unspecified cloud provider. + /// - "GOOGLE_CLOUD_PLATFORM" : Google Cloud. + /// - "AMAZON_WEB_SERVICES" : Amazon Web Services (AWS). + /// - "MICROSOFT_AZURE" : Microsoft Azure. + core.String? cloudProvider; + /// Config for the module. /// /// For the resident module, its config value is defined at this level. For @@ -12211,6 +12379,7 @@ class EventThreatDetectionCustomModule { EventThreatDetectionCustomModule({ this.ancestorModule, + this.cloudProvider, this.config, this.description, this.displayName, @@ -12224,6 +12393,7 @@ class EventThreatDetectionCustomModule { EventThreatDetectionCustomModule.fromJson(core.Map json_) : this( ancestorModule: json_['ancestorModule'] as core.String?, + cloudProvider: json_['cloudProvider'] as core.String?, config: json_.containsKey('config') ? json_['config'] as core.Map : null, @@ -12238,6 +12408,7 @@ class EventThreatDetectionCustomModule { core.Map toJson() => { if (ancestorModule != null) 'ancestorModule': ancestorModule!, + if (cloudProvider != null) 'cloudProvider': cloudProvider!, if (config != null) 'config': config!, if (description != null) 'description': description!, if (displayName != null) 'displayName': displayName!, @@ -12494,12 +12665,18 @@ class Finding { /// Data flow events associated with the finding. core.List? dataFlowEvents; + /// Data retention deletion events associated with the finding. + core.List? dataRetentionDeletionEvents; + /// Database associated with the finding. Database? database; /// Contains more details about the finding. core.String? description; + /// Disk associated with the finding. + Disk? disk; + /// The time the finding was first detected. /// /// If an existing finding is updated, then this is the time the update @@ -12769,8 +12946,10 @@ class Finding { this.createTime, this.dataAccessEvents, this.dataFlowEvents, + this.dataRetentionDeletionEvents, this.database, this.description, + this.disk, this.eventTime, this.exfiltration, this.externalSystems, @@ -12868,11 +13047,20 @@ class Finding { ?.map((value) => DataFlowEvent.fromJson( value as core.Map)) .toList(), + dataRetentionDeletionEvents: + (json_['dataRetentionDeletionEvents'] as core.List?) + ?.map((value) => DataRetentionDeletionEvent.fromJson( + value as core.Map)) + .toList(), database: json_.containsKey('database') ? Database.fromJson( json_['database'] as core.Map) : null, description: json_['description'] as core.String?, + disk: json_.containsKey('disk') + ? Disk.fromJson( + json_['disk'] as core.Map) + : null, eventTime: json_['eventTime'] as core.String?, exfiltration: json_.containsKey('exfiltration') ? Exfiltration.fromJson( @@ -12993,8 +13181,11 @@ class Finding { if (createTime != null) 'createTime': createTime!, if (dataAccessEvents != null) 'dataAccessEvents': dataAccessEvents!, if (dataFlowEvents != null) 'dataFlowEvents': dataFlowEvents!, + if (dataRetentionDeletionEvents != null) + 'dataRetentionDeletionEvents': dataRetentionDeletionEvents!, if (database != null) 'database': database!, if (description != null) 'description': description!, + if (disk != null) 'disk': disk!, if (eventTime != null) 'eventTime': eventTime!, if (exfiltration != null) 'exfiltration': exfiltration!, if (externalSystems != null) 'externalSystems': externalSystems!, @@ -13376,6 +13567,14 @@ class GoogleCloudSecuritycenterV1CustomOutputSpec { /// enablement_state for the module in all child folders or projects is also /// `enabled`. EffectiveSecurityHealthAnalyticsCustomModule is read-only. class GoogleCloudSecuritycenterV1EffectiveSecurityHealthAnalyticsCustomModule { + /// The cloud provider of the custom module. + /// Possible string values are: + /// - "CLOUD_PROVIDER_UNSPECIFIED" : Unspecified cloud provider. + /// - "GOOGLE_CLOUD_PLATFORM" : Google Cloud Platform. + /// - "AMAZON_WEB_SERVICES" : Amazon Web Services. + /// - "MICROSOFT_AZURE" : Microsoft Azure. + core.String? cloudProvider; + /// The user-specified configuration for the module. /// /// Output only. @@ -13412,6 +13611,7 @@ class GoogleCloudSecuritycenterV1EffectiveSecurityHealthAnalyticsCustomModule { core.String? name; GoogleCloudSecuritycenterV1EffectiveSecurityHealthAnalyticsCustomModule({ + this.cloudProvider, this.customConfig, this.displayName, this.enablementState, @@ -13421,6 +13621,7 @@ class GoogleCloudSecuritycenterV1EffectiveSecurityHealthAnalyticsCustomModule { GoogleCloudSecuritycenterV1EffectiveSecurityHealthAnalyticsCustomModule.fromJson( core.Map json_) : this( + cloudProvider: json_['cloudProvider'] as core.String?, customConfig: json_.containsKey('customConfig') ? GoogleCloudSecuritycenterV1CustomConfig.fromJson( json_['customConfig'] as core.Map) @@ -13431,6 +13632,7 @@ class GoogleCloudSecuritycenterV1EffectiveSecurityHealthAnalyticsCustomModule { ); core.Map toJson() => { + if (cloudProvider != null) 'cloudProvider': cloudProvider!, if (customConfig != null) 'customConfig': customConfig!, if (displayName != null) 'displayName': displayName!, if (enablementState != null) 'enablementState': enablementState!, @@ -13778,8 +13980,9 @@ class GoogleCloudSecuritycenterV1ResourceValueConfig { /// Tag values combined with `AND` to check against. /// - /// Values in the form "tagValues/123" Example: `[ "tagValues/123", - /// "tagValues/456", "tagValues/789" ]` + /// For Google Cloud resources, they are tag value IDs in the form of + /// "tagValues/123". Example: `[ "tagValues/123", "tagValues/456", + /// "tagValues/789" ]` /// https://cloud.google.com/resource-manager/docs/tags/tags-creating-and-managing /// /// Required. @@ -13868,6 +14071,14 @@ class GoogleCloudSecuritycenterV1SecurityHealthAnalyticsCustomModule { /// Output only. core.String? ancestorModule; + /// The cloud provider of the custom module. + /// Possible string values are: + /// - "CLOUD_PROVIDER_UNSPECIFIED" : Unspecified cloud provider. + /// - "GOOGLE_CLOUD_PLATFORM" : Google Cloud. + /// - "AMAZON_WEB_SERVICES" : Amazon Web Services (AWS). + /// - "MICROSOFT_AZURE" : Microsoft Azure. + core.String? cloudProvider; + /// The user specified custom configuration for the module. GoogleCloudSecuritycenterV1CustomConfig? customConfig; @@ -13915,6 +14126,7 @@ class GoogleCloudSecuritycenterV1SecurityHealthAnalyticsCustomModule { GoogleCloudSecuritycenterV1SecurityHealthAnalyticsCustomModule({ this.ancestorModule, + this.cloudProvider, this.customConfig, this.displayName, this.enablementState, @@ -13927,6 +14139,7 @@ class GoogleCloudSecuritycenterV1SecurityHealthAnalyticsCustomModule { core.Map json_) : this( ancestorModule: json_['ancestorModule'] as core.String?, + cloudProvider: json_['cloudProvider'] as core.String?, customConfig: json_.containsKey('customConfig') ? GoogleCloudSecuritycenterV1CustomConfig.fromJson( json_['customConfig'] as core.Map) @@ -13940,6 +14153,7 @@ class GoogleCloudSecuritycenterV1SecurityHealthAnalyticsCustomModule { core.Map toJson() => { if (ancestorModule != null) 'ancestorModule': ancestorModule!, + if (cloudProvider != null) 'cloudProvider': cloudProvider!, if (customConfig != null) 'customConfig': customConfig!, if (displayName != null) 'displayName': displayName!, if (enablementState != null) 'enablementState': enablementState!, @@ -14190,6 +14404,9 @@ class GroupFindingsRequest { /// only possible state_change is "UNUSED", which will be the state_change set /// for all findings present at read_time. If this field is set then /// `state_change` must be a specified field in `group_by`. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) core.String? compareDuration; /// Expression that defines the filter to apply across findings. @@ -14245,6 +14462,9 @@ class GroupFindingsRequest { /// The filter is limited to findings existing at the supplied time and their /// values are those at that specific time. Absence of this field will default /// to the API's version of NOW. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) core.String? readTime; GroupFindingsRequest({ @@ -16975,6 +17195,9 @@ class SetFindingStateRequest { /// If unset, defaults to the request time. /// /// Optional. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) core.String? startTime; /// The desired State of the finding. diff --git a/generated/googleapis/lib/securityposture/v1.dart b/generated/googleapis/lib/securityposture/v1.dart new file mode 100644 index 000000000..9e420328a --- /dev/null +++ b/generated/googleapis/lib/securityposture/v1.dart @@ -0,0 +1,3181 @@ +// This is a generated file (see the discoveryapis_generator project). + +// ignore_for_file: camel_case_types +// ignore_for_file: comment_references +// ignore_for_file: deprecated_member_use_from_same_package +// ignore_for_file: doc_directive_unknown +// ignore_for_file: lines_longer_than_80_chars +// ignore_for_file: non_constant_identifier_names +// ignore_for_file: prefer_interpolation_to_compose_strings +// ignore_for_file: unintended_html_in_doc_comment +// ignore_for_file: unnecessary_brace_in_string_interps +// ignore_for_file: unnecessary_lambdas +// ignore_for_file: unnecessary_string_interpolations + +/// Security Posture API - v1 +/// +/// Defines, assesses, and monitors the overall status of your security in +/// Google Cloud. You can use security postures to evaluate your current cloud +/// security against defined benchmarks and help maintain the level of security +/// that your organization requires. +/// +/// For more information, see +/// +/// Create an instance of [SecurityPostureApi] to access these resources: +/// +/// - [OrganizationsResource] +/// - [OrganizationsLocationsResource] +/// - [OrganizationsLocationsOperationsResource] +/// - [OrganizationsLocationsPostureDeploymentsResource] +/// - [OrganizationsLocationsPostureTemplatesResource] +/// - [OrganizationsLocationsPosturesResource] +/// - [OrganizationsLocationsReportsResource] +/// - [ProjectsResource] +/// - [ProjectsLocationsResource] +library; + +import 'dart:async' as async; +import 'dart:convert' as convert; +import 'dart:core' as core; + +import 'package:_discoveryapis_commons/_discoveryapis_commons.dart' as commons; +import 'package:http/http.dart' as http; + +import '../shared.dart'; +import '../src/user_agent.dart'; + +export 'package:_discoveryapis_commons/_discoveryapis_commons.dart' + show ApiRequestError, DetailedApiRequestError; + +/// Defines, assesses, and monitors the overall status of your security in +/// Google Cloud. +/// +/// You can use security postures to evaluate your current cloud security +/// against defined benchmarks and help maintain the level of security that your +/// organization requires. +class SecurityPostureApi { + /// See, edit, configure, and delete your Google Cloud data and see the email + /// address for your Google Account. + static const cloudPlatformScope = + 'https://www.googleapis.com/auth/cloud-platform'; + + final commons.ApiRequester _requester; + + OrganizationsResource get organizations => OrganizationsResource(_requester); + ProjectsResource get projects => ProjectsResource(_requester); + + SecurityPostureApi(http.Client client, + {core.String rootUrl = 'https://securityposture.googleapis.com/', + core.String servicePath = ''}) + : _requester = + commons.ApiRequester(client, rootUrl, servicePath, requestHeaders); +} + +class OrganizationsResource { + final commons.ApiRequester _requester; + + OrganizationsLocationsResource get locations => + OrganizationsLocationsResource(_requester); + + OrganizationsResource(commons.ApiRequester client) : _requester = client; +} + +class OrganizationsLocationsResource { + final commons.ApiRequester _requester; + + OrganizationsLocationsOperationsResource get operations => + OrganizationsLocationsOperationsResource(_requester); + OrganizationsLocationsPostureDeploymentsResource get postureDeployments => + OrganizationsLocationsPostureDeploymentsResource(_requester); + OrganizationsLocationsPostureTemplatesResource get postureTemplates => + OrganizationsLocationsPostureTemplatesResource(_requester); + OrganizationsLocationsPosturesResource get postures => + OrganizationsLocationsPosturesResource(_requester); + OrganizationsLocationsReportsResource get reports => + OrganizationsLocationsReportsResource(_requester); + + OrganizationsLocationsResource(commons.ApiRequester client) + : _requester = client; +} + +class OrganizationsLocationsOperationsResource { + final commons.ApiRequester _requester; + + OrganizationsLocationsOperationsResource(commons.ApiRequester client) + : _requester = client; + + /// Starts asynchronous cancellation on a long-running operation. + /// + /// The server makes a best effort to cancel the operation, but success is not + /// guaranteed. If the server doesn't support this method, it returns + /// `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation + /// or other methods to check whether the cancellation succeeded or whether + /// the operation completed despite cancellation. On successful cancellation, + /// the operation is not deleted; instead, it becomes an operation with an + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - The name of the operation resource to be cancelled. + /// Value must have pattern + /// `^organizations/\[^/\]+/locations/\[^/\]+/operations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Empty]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future cancel( + CancelOperationRequest request, + core.String name, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':cancel'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Empty.fromJson(response_ as core.Map); + } + + /// Deletes a long-running operation. + /// + /// This method indicates that the client is no longer interested in the + /// operation result. It does not cancel the operation. If the server doesn't + /// support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. + /// + /// Request parameters: + /// + /// [name] - The name of the operation resource to be deleted. + /// Value must have pattern + /// `^organizations/\[^/\]+/locations/\[^/\]+/operations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Empty]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return Empty.fromJson(response_ as core.Map); + } + + /// Gets the latest state of a long-running operation. + /// + /// Clients can use this method to poll the operation result at intervals as + /// recommended by the API service. + /// + /// Request parameters: + /// + /// [name] - The name of the operation resource. + /// Value must have pattern + /// `^organizations/\[^/\]+/locations/\[^/\]+/operations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Lists operations that match the specified filter in the request. + /// + /// If the server doesn't support this method, it returns `UNIMPLEMENTED`. + /// + /// Request parameters: + /// + /// [name] - The name of the operation's parent resource. + /// Value must have pattern `^organizations/\[^/\]+/locations/\[^/\]+$`. + /// + /// [filter] - The standard list filter. + /// + /// [pageSize] - The standard list page size. + /// + /// [pageToken] - The standard list page token. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListOperationsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String name, { + core.String? filter, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + '/operations'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListOperationsResponse.fromJson( + response_ as core.Map); + } +} + +class OrganizationsLocationsPostureDeploymentsResource { + final commons.ApiRequester _requester; + + OrganizationsLocationsPostureDeploymentsResource(commons.ApiRequester client) + : _requester = client; + + /// Creates a new PostureDeployment in a given project and location. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent resource name, in the format + /// `organizations/{organization}/locations/global`. + /// Value must have pattern `^organizations/\[^/\]+/locations/\[^/\]+$`. + /// + /// [postureDeploymentId] - Required. An identifier for the posture + /// deployment. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future create( + PostureDeployment request, + core.String parent, { + core.String? postureDeploymentId, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (postureDeploymentId != null) + 'postureDeploymentId': [postureDeploymentId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/postureDeployments'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Deletes a PostureDeployment. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the posture deployment, in the format + /// `organizations/{organization}/locations/global/postureDeployments/{posture_id}`. + /// Value must have pattern + /// `^organizations/\[^/\]+/locations/\[^/\]+/postureDeployments/\[^/\]+$`. + /// + /// [etag] - Optional. An opaque identifier for the current version of the + /// posture deployment. If you provide this value, then it must match the + /// existing value. If the values don't match, then the request fails with an + /// ABORTED error. If you omit this value, then the posture deployment is + /// deleted regardless of its current `etag` value. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String name, { + core.String? etag, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (etag != null) 'etag': [etag], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Gets details for a PostureDeployment. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the PostureDeployment, in the format + /// `organizations/{organization}/locations/global/postureDeployments/{posture_deployment_id}`. + /// Value must have pattern + /// `^organizations/\[^/\]+/locations/\[^/\]+/postureDeployments/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [PostureDeployment]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return PostureDeployment.fromJson( + response_ as core.Map); + } + + /// Lists every PostureDeployment in a project and location. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent resource name, in the format + /// `organizations/{organization}/locations/global`. + /// Value must have pattern `^organizations/\[^/\]+/locations/\[^/\]+$`. + /// + /// [filter] - Optional. A filter to apply to the list of postures, in the + /// format defined in \[AIP-160: Filtering\](https://google.aip.dev/160). + /// + /// [pageSize] - Optional. The maximum number of posture deployments to + /// return. The default value is `500`. If you exceed the maximum value of + /// `1000`, then the service uses the maximum value. + /// + /// [pageToken] - Optional. A pagination token returned from a previous + /// request to list posture deployments. Provide this token to retrieve the + /// next page of results. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListPostureDeploymentsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.String? filter, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/postureDeployments'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListPostureDeploymentsResponse.fromJson( + response_ as core.Map); + } + + /// Updates an existing PostureDeployment. + /// + /// To prevent concurrent updates from overwriting each other, always follow + /// the read-modify-write pattern when you update a posture deployment: 1. + /// Call GetPostureDeployment to get the current version of the deployment. 2. + /// Update the fields in the deployment as needed. 3. Call + /// UpdatePostureDeployment to update the deployment. Ensure that your request + /// includes the `etag` value from the GetPostureDeployment response. + /// **Important:** If you omit the `etag` when you call + /// UpdatePostureDeployment, then the updated deployment unconditionally + /// overwrites the existing deployment. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. Identifier. The name of the posture deployment, in the + /// format + /// `organizations/{organization}/locations/global/postureDeployments/{deployment_id}`. + /// Value must have pattern + /// `^organizations/\[^/\]+/locations/\[^/\]+/postureDeployments/\[^/\]+$`. + /// + /// [updateMask] - Required. The fields in the PostureDeployment to update. + /// You can update only the following fields: * PostureDeployment.posture_id * + /// PostureDeployment.posture_revision_id + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future patch( + PostureDeployment request, + core.String name, { + core.String? updateMask, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (updateMask != null) 'updateMask': [updateMask], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'PATCH', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } +} + +class OrganizationsLocationsPostureTemplatesResource { + final commons.ApiRequester _requester; + + OrganizationsLocationsPostureTemplatesResource(commons.ApiRequester client) + : _requester = client; + + /// Gets a single revision of a PostureTemplate. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the PostureTemplate, in the format + /// `organizations/{organization}/locations/global/postureTemplates/{posture_template}`. + /// Value must have pattern + /// `^organizations/\[^/\]+/locations/\[^/\]+/postureTemplates/\[^/\]+$`. + /// + /// [revisionId] - Optional. The posture template revision to retrieve. If not + /// specified, the most recently updated revision is retrieved. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [PostureTemplate]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? revisionId, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (revisionId != null) 'revisionId': [revisionId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return PostureTemplate.fromJson( + response_ as core.Map); + } + + /// Lists every PostureTemplate in a given organization and location. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent resource name, in the format + /// `organizations/{organization}/locations/global`. + /// Value must have pattern `^organizations/\[^/\]+/locations/\[^/\]+$`. + /// + /// [filter] - Optional. A filter to apply to the list of postures, in the + /// format defined in \[AIP-160: Filtering\](https://google.aip.dev/160). + /// + /// [pageSize] - Optional. The maximum number of posture templates to return. + /// The default value is `500`. If you exceed the maximum value of `1000`, + /// then the service uses the maximum value. + /// + /// [pageToken] - Optional. A pagination token returned from a previous + /// request to list posture templates. Provide this token to retrieve the next + /// page of results. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListPostureTemplatesResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.String? filter, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/postureTemplates'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListPostureTemplatesResponse.fromJson( + response_ as core.Map); + } +} + +class OrganizationsLocationsPosturesResource { + final commons.ApiRequester _requester; + + OrganizationsLocationsPosturesResource(commons.ApiRequester client) + : _requester = client; + + /// Creates a new Posture. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent resource name, in the format + /// `organizations/{organization}/locations/global`. + /// Value must have pattern `^organizations/\[^/\]+/locations/\[^/\]+$`. + /// + /// [postureId] - Required. An identifier for the posture. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future create( + Posture request, + core.String parent, { + core.String? postureId, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (postureId != null) 'postureId': [postureId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/postures'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Deletes all revisions of a Posture. + /// + /// You can only delete a posture if none of its revisions are deployed. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the Posture, in the format + /// `organizations/{organization}/locations/global/postures/{posture_id}`. + /// Value must have pattern + /// `^organizations/\[^/\]+/locations/\[^/\]+/postures/\[^/\]+$`. + /// + /// [etag] - Optional. An opaque identifier for the current version of the + /// posture. If you provide this value, then it must match the existing value. + /// If the values don't match, then the request fails with an ABORTED error. + /// If you omit this value, then the posture is deleted regardless of its + /// current `etag` value. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future delete( + core.String name, { + core.String? etag, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (etag != null) 'etag': [etag], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Extracts existing policies from an organization, folder, or project, and + /// applies them to another organization, folder, or project as a Posture. + /// + /// If the other organization, folder, or project already has a posture, then + /// the result of the long-running operation is an ALREADY_EXISTS error. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent resource name, in the format + /// `organizations/{organization}/locations/global`. + /// Value must have pattern `^organizations/\[^/\]+/locations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future extract( + ExtractPostureRequest request, + core.String parent, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/postures:extract'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Gets a single revision of a Posture. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the Posture, in the format + /// `organizations/{organization}/locations/global/postures/{posture_id}`. + /// Value must have pattern + /// `^organizations/\[^/\]+/locations/\[^/\]+/postures/\[^/\]+$`. + /// + /// [revisionId] - Optional. The posture revision to retrieve. If not + /// specified, the most recently updated revision is retrieved. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Posture]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? revisionId, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (revisionId != null) 'revisionId': [revisionId], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return Posture.fromJson(response_ as core.Map); + } + + /// Lists the most recent revisions of all Posture resources in a specified + /// organization and location. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent resource name, in the format + /// `organizations/{organization}/locations/global`. + /// Value must have pattern `^organizations/\[^/\]+/locations/\[^/\]+$`. + /// + /// [filter] - Optional. A filter to apply to the list of postures, in the + /// format defined in \[AIP-160: Filtering\](https://google.aip.dev/160). + /// + /// [pageSize] - The maximum number of postures to return. The default value + /// is `500`. If you exceed the maximum value of `1000`, then the service uses + /// the maximum value. + /// + /// [pageToken] - A pagination token returned from a previous request to list + /// postures. Provide this token to retrieve the next page of results. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListPosturesResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.String? filter, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/postures'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListPosturesResponse.fromJson( + response_ as core.Map); + } + + /// Lists all revisions of a single Posture. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the Posture, in the format + /// `organizations/{organization}/locations/global/postures/{posture_id}`. + /// Value must have pattern + /// `^organizations/\[^/\]+/locations/\[^/\]+/postures/\[^/\]+$`. + /// + /// [pageSize] - Optional. The maximum number of posture revisions to return. + /// The default value is `500`. If you exceed the maximum value of `1000`, + /// then the service uses the maximum value. + /// + /// [pageToken] - Optional. A pagination token from a previous request to list + /// posture revisions. Provide this token to retrieve the next page of + /// results. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListPostureRevisionsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future listRevisions( + core.String name, { + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':listRevisions'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListPostureRevisionsResponse.fromJson( + response_ as core.Map); + } + + /// Updates a revision of an existing Posture. + /// + /// If the posture revision that you update is currently deployed, then a new + /// revision of the posture is created. To prevent concurrent updates from + /// overwriting each other, always follow the read-modify-write pattern when + /// you update a posture: 1. Call GetPosture to get the current version of the + /// posture. 2. Update the fields in the posture as needed. 3. Call + /// UpdatePosture to update the posture. Ensure that your request includes the + /// `etag` value from the GetPosture response. **Important:** If you omit the + /// `etag` when you call UpdatePosture, then the updated posture + /// unconditionally overwrites the existing posture. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [name] - Required. Identifier. The name of the posture, in the format + /// `organizations/{organization}/locations/global/postures/{posture_id}`. + /// Value must have pattern + /// `^organizations/\[^/\]+/locations/\[^/\]+/postures/\[^/\]+$`. + /// + /// [revisionId] - Required. The revision ID of the posture to update. If the + /// posture revision that you update is currently deployed, then a new + /// revision of the posture is created. + /// + /// [updateMask] - Required. The fields in the Posture to update. You can + /// update only the following fields: * Posture.description * + /// Posture.policy_sets * Posture.state + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future patch( + Posture request, + core.String name, { + core.String? revisionId, + core.String? updateMask, + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if (revisionId != null) 'revisionId': [revisionId], + if (updateMask != null) 'updateMask': [updateMask], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'PATCH', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } +} + +class OrganizationsLocationsReportsResource { + final commons.ApiRequester _requester; + + OrganizationsLocationsReportsResource(commons.ApiRequester client) + : _requester = client; + + /// Validates a specified infrastructure-as-code (IaC) configuration, and + /// creates a Report with the validation results. + /// + /// Only Terraform configurations are supported. Only modified assets are + /// validated. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent resource name, in the format + /// `organizations/{organization}/locations/global`. + /// Value must have pattern `^organizations/\[^/\]+/locations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Operation]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future createIaCValidationReport( + CreateIaCValidationReportRequest request, + core.String parent, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + + core.Uri.encodeFull('$parent') + + '/reports:createIaCValidationReport'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return Operation.fromJson(response_ as core.Map); + } + + /// Gets details for a Report. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the report, in the format + /// `organizations/{organization}/locations/global/reports/{report_id}`. + /// Value must have pattern + /// `^organizations/\[^/\]+/locations/\[^/\]+/reports/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Report]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return Report.fromJson(response_ as core.Map); + } + + /// Lists every Report in a given organization and location. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent resource name, in the format + /// `organizations/{organization}/locations/global`. + /// Value must have pattern `^organizations/\[^/\]+/locations/\[^/\]+$`. + /// + /// [filter] - Optional. A filter to apply to the list of reports, in the + /// format defined in \[AIP-160: Filtering\](https://google.aip.dev/160). + /// + /// [pageSize] - Optional. The maximum number of reports to return. The + /// default value is `500`. If you exceed the maximum value of `1000`, then + /// the service uses the maximum value. + /// + /// [pageToken] - Optional. A pagination token returned from a previous + /// request to list reports. Provide this token to retrieve the next page of + /// results. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListReportsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String parent, { + core.String? filter, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/reports'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListReportsResponse.fromJson( + response_ as core.Map); + } +} + +class ProjectsResource { + final commons.ApiRequester _requester; + + ProjectsLocationsResource get locations => + ProjectsLocationsResource(_requester); + + ProjectsResource(commons.ApiRequester client) : _requester = client; +} + +class ProjectsLocationsResource { + final commons.ApiRequester _requester; + + ProjectsLocationsResource(commons.ApiRequester client) : _requester = client; + + /// Gets information about a location. + /// + /// Request parameters: + /// + /// [name] - Resource name for the location. + /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Location]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future get( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return Location.fromJson(response_ as core.Map); + } + + /// Lists information about the supported locations for this service. + /// + /// Request parameters: + /// + /// [name] - The resource that owns the locations collection, if applicable. + /// Value must have pattern `^projects/\[^/\]+$`. + /// + /// [filter] - A filter to narrow down results to a preferred subset. The + /// filtering language accepts strings like `"displayName=tokyo"`, and is + /// documented in more detail in \[AIP-160\](https://google.aip.dev/160). + /// + /// [pageSize] - The maximum number of results to return. If not set, the + /// service selects a default. + /// + /// [pageToken] - A page token received from the `next_page_token` field in + /// the response. Send that page token to receive the subsequent page. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [ListLocationsResponse]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future list( + core.String name, { + core.String? filter, + core.int? pageSize, + core.String? pageToken, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (filter != null) 'filter': [filter], + if (pageSize != null) 'pageSize': ['${pageSize}'], + if (pageToken != null) 'pageToken': [pageToken], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1/' + core.Uri.encodeFull('$name') + '/locations'; + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return ListLocationsResponse.fromJson( + response_ as core.Map); + } +} + +/// Details of a Cloud Asset Inventory asset that caused a violation. +class AssetDetails { + /// Information about the Cloud Asset Inventory asset that violated a policy. + /// + /// The format of this information can change at any time without prior + /// notice. Your application must not depend on this information in any way. + core.String? asset; + + /// The type of Cloud Asset Inventory asset. + /// + /// For a list of asset types, see + /// [Supported asset types](https://cloud.google.com/asset-inventory/docs/supported-asset-types). + core.String? assetType; + + AssetDetails({ + this.asset, + this.assetType, + }); + + AssetDetails.fromJson(core.Map json_) + : this( + asset: json_['asset'] as core.String?, + assetType: json_['assetType'] as core.String?, + ); + + core.Map toJson() => { + if (asset != null) 'asset': asset!, + if (assetType != null) 'assetType': assetType!, + }; +} + +/// The request message for Operations.CancelOperation. +typedef CancelOperationRequest = $Empty; + +/// Information about a compliance standard that the policy helps enforce. +class ComplianceStandard { + /// The control in the compliance standard that the policy helps enforce. + /// + /// For example, `AC-3`. + /// + /// Optional. + core.String? control; + + /// The compliance standard that the policy helps enforce. + /// + /// For example, `NIST SP 800-53`. + /// + /// Optional. + core.String? standard; + + ComplianceStandard({ + this.control, + this.standard, + }); + + ComplianceStandard.fromJson(core.Map json_) + : this( + control: json_['control'] as core.String?, + standard: json_['standard'] as core.String?, + ); + + core.Map toJson() => { + if (control != null) 'control': control!, + if (standard != null) 'standard': standard!, + }; +} + +/// Metadata for a constraint in a Policy. +class Constraint { + /// A predefined organization policy constraint. + /// + /// Optional. + OrgPolicyConstraint? orgPolicyConstraint; + + /// A custom organization policy constraint. + /// + /// Optional. + OrgPolicyConstraintCustom? orgPolicyConstraintCustom; + + /// A custom module for Security Health Analytics. + /// + /// Optional. + SecurityHealthAnalyticsCustomModule? securityHealthAnalyticsCustomModule; + + /// A built-in detector for Security Health Analytics. + /// + /// Optional. + SecurityHealthAnalyticsModule? securityHealthAnalyticsModule; + + Constraint({ + this.orgPolicyConstraint, + this.orgPolicyConstraintCustom, + this.securityHealthAnalyticsCustomModule, + this.securityHealthAnalyticsModule, + }); + + Constraint.fromJson(core.Map json_) + : this( + orgPolicyConstraint: json_.containsKey('orgPolicyConstraint') + ? OrgPolicyConstraint.fromJson(json_['orgPolicyConstraint'] + as core.Map) + : null, + orgPolicyConstraintCustom: + json_.containsKey('orgPolicyConstraintCustom') + ? OrgPolicyConstraintCustom.fromJson( + json_['orgPolicyConstraintCustom'] + as core.Map) + : null, + securityHealthAnalyticsCustomModule: + json_.containsKey('securityHealthAnalyticsCustomModule') + ? SecurityHealthAnalyticsCustomModule.fromJson( + json_['securityHealthAnalyticsCustomModule'] + as core.Map) + : null, + securityHealthAnalyticsModule: + json_.containsKey('securityHealthAnalyticsModule') + ? SecurityHealthAnalyticsModule.fromJson( + json_['securityHealthAnalyticsModule'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (orgPolicyConstraint != null) + 'orgPolicyConstraint': orgPolicyConstraint!, + if (orgPolicyConstraintCustom != null) + 'orgPolicyConstraintCustom': orgPolicyConstraintCustom!, + if (securityHealthAnalyticsCustomModule != null) + 'securityHealthAnalyticsCustomModule': + securityHealthAnalyticsCustomModule!, + if (securityHealthAnalyticsModule != null) + 'securityHealthAnalyticsModule': securityHealthAnalyticsModule!, + }; +} + +/// Request message for CreateIaCValidationReport. +class CreateIaCValidationReportRequest { + /// The infrastructure-as-code (IaC) configuration to validate. + /// + /// Required. + IaC? iac; + + CreateIaCValidationReportRequest({ + this.iac, + }); + + CreateIaCValidationReportRequest.fromJson(core.Map json_) + : this( + iac: json_.containsKey('iac') + ? IaC.fromJson( + json_['iac'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (iac != null) 'iac': iac!, + }; +} + +/// A custom module configuration for Security Health Analytics. +/// +/// Use `CustomConfig` to create custom detectors that generate custom findings +/// for resources that you specify. +class CustomConfig { + /// Definitions of custom source properties to include in findings. + /// + /// Optional. + CustomOutputSpec? customOutput; + + /// A description of the vulnerability or misconfiguration that the custom + /// module detects. + /// + /// The description appears in each finding. Provide enough information to + /// help an investigator understand the finding. The value must be enclosed in + /// quotation marks. + /// + /// Optional. + core.String? description; + + /// The Common Expression Language (CEL) expression to evaluate. + /// + /// When the expression evaluates to `true` for a resource, a finding is + /// generated. + /// + /// Required. + Expr? predicate; + + /// An explanation of the steps that security teams can take to resolve the + /// detected issue. + /// + /// The explanation appears in each finding. + /// + /// Required. + core.String? recommendation; + + /// The resource types that the custom module operates on. + /// + /// Required. + ResourceSelector? resourceSelector; + + /// The severity of findings generated by the custom module. + /// + /// Required. + /// Possible string values are: + /// - "SEVERITY_UNSPECIFIED" : Default value. This value is unused. + /// - "CRITICAL" : Critical severity. + /// - "HIGH" : High severity. + /// - "MEDIUM" : Medium severity. + /// - "LOW" : Low severity. + core.String? severity; + + CustomConfig({ + this.customOutput, + this.description, + this.predicate, + this.recommendation, + this.resourceSelector, + this.severity, + }); + + CustomConfig.fromJson(core.Map json_) + : this( + customOutput: json_.containsKey('customOutput') + ? CustomOutputSpec.fromJson( + json_['customOutput'] as core.Map) + : null, + description: json_['description'] as core.String?, + predicate: json_.containsKey('predicate') + ? Expr.fromJson( + json_['predicate'] as core.Map) + : null, + recommendation: json_['recommendation'] as core.String?, + resourceSelector: json_.containsKey('resourceSelector') + ? ResourceSelector.fromJson(json_['resourceSelector'] + as core.Map) + : null, + severity: json_['severity'] as core.String?, + ); + + core.Map toJson() => { + if (customOutput != null) 'customOutput': customOutput!, + if (description != null) 'description': description!, + if (predicate != null) 'predicate': predicate!, + if (recommendation != null) 'recommendation': recommendation!, + if (resourceSelector != null) 'resourceSelector': resourceSelector!, + if (severity != null) 'severity': severity!, + }; +} + +/// Definitions of custom source properties that can appear in findings. +class CustomOutputSpec { + /// The custom source properties that can appear in findings. + /// + /// Optional. + core.List? properties; + + CustomOutputSpec({ + this.properties, + }); + + CustomOutputSpec.fromJson(core.Map json_) + : this( + properties: (json_['properties'] as core.List?) + ?.map((value) => Property.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (properties != null) 'properties': properties!, + }; +} + +/// A generic empty message that you can re-use to avoid defining duplicated +/// empty messages in your APIs. +/// +/// A typical example is to use it as the request or the response type of an API +/// method. For instance: service Foo { rpc Bar(google.protobuf.Empty) returns +/// (google.protobuf.Empty); } +typedef Empty = $Empty; + +/// Represents a textual expression in the Common Expression Language (CEL) +/// syntax. +/// +/// CEL is a C-like expression language. The syntax and semantics of CEL are +/// documented at https://github.com/google/cel-spec. Example (Comparison): +/// title: "Summary size limit" description: "Determines if a summary is less +/// than 100 chars" expression: "document.summary.size() \< 100" Example +/// (Equality): title: "Requestor is owner" description: "Determines if +/// requestor is the document owner" expression: "document.owner == +/// request.auth.claims.email" Example (Logic): title: "Public documents" +/// description: "Determine whether the document should be publicly visible" +/// expression: "document.type != 'private' && document.type != 'internal'" +/// Example (Data Manipulation): title: "Notification string" description: +/// "Create a notification string with a timestamp." expression: "'New message +/// received at ' + string(document.create_time)" The exact variables and +/// functions that may be referenced within an expression are determined by the +/// service that evaluates it. See the service documentation for additional +/// information. +typedef Expr = $Expr; + +/// Request message for ExtractPosture. +class ExtractPostureRequest { + /// An identifier for the posture. + /// + /// Required. + core.String? postureId; + + /// The organization, folder, or project from which policies are extracted. + /// + /// Must be within the organization defined in parent. Use one of the + /// following formats: * `organization/{organization_number}` * + /// `folder/{folder_number}` * `project/{project_number}` + /// + /// Required. + core.String? workload; + + ExtractPostureRequest({ + this.postureId, + this.workload, + }); + + ExtractPostureRequest.fromJson(core.Map json_) + : this( + postureId: json_['postureId'] as core.String?, + workload: json_['workload'] as core.String?, + ); + + core.Map toJson() => { + if (postureId != null) 'postureId': postureId!, + if (workload != null) 'workload': workload!, + }; +} + +/// A custom, user-defined constraint. +/// +/// You can apply the constraint only to the resource types specified in the +/// constraint, and only within the organization where the constraint is +/// defined. _When you create a custom constraint, it is not enforced +/// automatically._ You must use an organization policy to +/// [enforce the constraint](https://cloud.google.com/resource-manager/help/organization-policy/constraints/enforce). +class GoogleCloudSecuritypostureV1CustomConstraint { + /// Whether to allow or deny the action. + /// Possible string values are: + /// - "ACTION_TYPE_UNSPECIFIED" : Default value. This value is unused. + /// - "ALLOW" : Allow the action. + /// - "DENY" : Deny the action. + core.String? actionType; + + /// A Common Expression Language (CEL) condition expression that must evaluate + /// to `true` for the constraint to be enforced. + /// + /// The maximum length is 1000 characters. For example: + + /// `resource.instanceName.matches('(production|test)_(.+_)?[\d]+')`: + /// Evaluates to `true` if the resource's `instanceName` attribute contains + /// the following: + The prefix `production` or `test` + An underscore (`_`) + + /// Optional: One or more characters, followed by an underscore (`_`) + One or + /// more digits + `resource.management.auto_upgrade == true`: Evaluates to + /// `true` if the resource's `management.auto_upgrade` attribute is `true`. + core.String? condition; + + /// A description of the constraint. + /// + /// The maximum length is 2000 characters. + core.String? description; + + /// A display name for the constraint. + /// + /// The maximum length is 200 characters. + core.String? displayName; + + /// The types of operations that the constraint applies to. + core.List? methodTypes; + + /// The name of the constraint, in the format + /// `organizations/{organization_id}/customConstraints/custom.{custom_constraint_id}`. + /// + /// For example, + /// `organizations/123456789012/customConstraints/custom.createOnlyE2TypeVms`. + /// Must contain 1 to 62 characters, excluding the prefix + /// `organizations/{organization_id}/customConstraints/custom.`. + /// + /// Immutable. + core.String? name; + + /// The resource type that the constraint applies to, in the format + /// `{canonical_service_name}/{resource_type_name}`. + /// + /// For example, `compute.googleapis.com/Instance`. + /// + /// Immutable. + core.List? resourceTypes; + + /// The last time at which the constraint was updated or created. + /// + /// Output only. + core.String? updateTime; + + GoogleCloudSecuritypostureV1CustomConstraint({ + this.actionType, + this.condition, + this.description, + this.displayName, + this.methodTypes, + this.name, + this.resourceTypes, + this.updateTime, + }); + + GoogleCloudSecuritypostureV1CustomConstraint.fromJson(core.Map json_) + : this( + actionType: json_['actionType'] as core.String?, + condition: json_['condition'] as core.String?, + description: json_['description'] as core.String?, + displayName: json_['displayName'] as core.String?, + methodTypes: (json_['methodTypes'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + name: json_['name'] as core.String?, + resourceTypes: (json_['resourceTypes'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + updateTime: json_['updateTime'] as core.String?, + ); + + core.Map toJson() => { + if (actionType != null) 'actionType': actionType!, + if (condition != null) 'condition': condition!, + if (description != null) 'description': description!, + if (displayName != null) 'displayName': displayName!, + if (methodTypes != null) 'methodTypes': methodTypes!, + if (name != null) 'name': name!, + if (resourceTypes != null) 'resourceTypes': resourceTypes!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} + +/// A rule that defines the allowed and denied values for an organization policy +/// constraint. +class GoogleCloudSecuritypostureV1PolicyRule { + /// Whether to allow any value for a list constraint. + /// + /// Valid only for list constraints. + core.bool? allowAll; + + /// A condition that determines whether this rule is used to evaluate the + /// policy. + /// + /// When set, the google.type.Expr.expression field must contain 1 to 10 + /// subexpressions, joined by the `||` or `&&` operators. Each subexpression + /// must use the `resource.matchTag()` or `resource.matchTagId()` Common + /// Expression Language (CEL) function. The `resource.matchTag()` function + /// takes the following arguments: * `key_name`: the namespaced name of the + /// tag key, with the organization ID and a slash (`/`) as a prefix; for + /// example, `123456789012/environment` * `value_name`: the short name of the + /// tag value For example: `resource.matchTag('123456789012/environment, + /// 'prod')` The `resource.matchTagId()` function takes the following + /// arguments: * `key_id`: the permanent ID of the tag key; for example, + /// `tagKeys/123456789012` * `value_id`: the permanent ID of the tag value; + /// for example, `tagValues/567890123456` For example: + /// `resource.matchTagId('tagKeys/123456789012', 'tagValues/567890123456')` + Expr? condition; + + /// Whether to deny all values for a list constraint. + /// + /// Valid only for list constraints. + core.bool? denyAll; + + /// Whether to enforce the constraint. + /// + /// Valid only for boolean constraints. + core.bool? enforce; + + /// Required for GMCs if parameters defined in constraints. + /// + /// Pass parameter values when policy enforcement is enabled. Ensure that + /// parameter value types match those defined in the constraint definition. + /// For example: { "allowedLocations" : \["us-east1", "us-west1"\], "allowAll" + /// : true } + /// + /// Optional. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Map? parameters; + + /// The resource types policy can support, only used for Google managed + /// constraint and method type is GOVERN_TAGS. + /// + /// Optional. + ResourceTypes? resourceTypes; + + /// The allowed and denied values for a list constraint. + /// + /// Valid only for list constraints. + GoogleCloudSecuritypostureV1PolicyRuleStringValues? values; + + GoogleCloudSecuritypostureV1PolicyRule({ + this.allowAll, + this.condition, + this.denyAll, + this.enforce, + this.parameters, + this.resourceTypes, + this.values, + }); + + GoogleCloudSecuritypostureV1PolicyRule.fromJson(core.Map json_) + : this( + allowAll: json_['allowAll'] as core.bool?, + condition: json_.containsKey('condition') + ? Expr.fromJson( + json_['condition'] as core.Map) + : null, + denyAll: json_['denyAll'] as core.bool?, + enforce: json_['enforce'] as core.bool?, + parameters: json_.containsKey('parameters') + ? json_['parameters'] as core.Map + : null, + resourceTypes: json_.containsKey('resourceTypes') + ? ResourceTypes.fromJson( + json_['resourceTypes'] as core.Map) + : null, + values: json_.containsKey('values') + ? GoogleCloudSecuritypostureV1PolicyRuleStringValues.fromJson( + json_['values'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (allowAll != null) 'allowAll': allowAll!, + if (condition != null) 'condition': condition!, + if (denyAll != null) 'denyAll': denyAll!, + if (enforce != null) 'enforce': enforce!, + if (parameters != null) 'parameters': parameters!, + if (resourceTypes != null) 'resourceTypes': resourceTypes!, + if (values != null) 'values': values!, + }; +} + +/// The allowed and denied values for a list constraint. +/// +/// For all constraints, these fields can contain literal values. Optionally, +/// you can add the `is:` prefix to these values. If the value contains a colon +/// (`:`), then the `is:` prefix is required. Some constraints allow you to +/// specify a portion of the resource hierarchy, known as a +/// [_hierarchy subtree_](https://cloud.google.com/resource-manager/help/organization-policy/hierarchy-subtree), +/// that the constraint applies to. To specify a hierarchy subtree, use the +/// `under:` prefix, followed by a value with one of these formats: - +/// `projects/{project_id}` (for example, `projects/tokyo-rain-123`) - +/// `folders/{folder_id}` (for example, `folders/1234567890123`) - +/// `organizations/{organization_id}` (for example, +/// `organizations/123456789012`) A constraint's `supports_under` field +/// indicates whether you can specify a hierarchy subtree. To learn which +/// predefined constraints let you specify a hierarchy subtree, see the +/// [constraints reference](https://cloud.google.com/resource-manager/help/organization-policy/constraints/reference). +class GoogleCloudSecuritypostureV1PolicyRuleStringValues { + /// The allowed values for the constraint. + core.List? allowedValues; + + /// The denied values for the constraint. + core.List? deniedValues; + + GoogleCloudSecuritypostureV1PolicyRuleStringValues({ + this.allowedValues, + this.deniedValues, + }); + + GoogleCloudSecuritypostureV1PolicyRuleStringValues.fromJson(core.Map json_) + : this( + allowedValues: (json_['allowedValues'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + deniedValues: (json_['deniedValues'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (allowedValues != null) 'allowedValues': allowedValues!, + if (deniedValues != null) 'deniedValues': deniedValues!, + }; +} + +/// Details of an infrastructure-as-code (IaC) configuration. +class IaC { + /// A Terraform plan file, formatted as a stringified JSON object. + /// + /// To learn how to generate a Terraform plan file in JSON format, see + /// [JSON output format](https://developer.hashicorp.com/terraform/internals/json-format) + /// in the Terraform documentation. + /// + /// Optional. + core.String? tfPlan; + core.List get tfPlanAsBytes => convert.base64.decode(tfPlan!); + + set tfPlanAsBytes(core.List bytes_) { + tfPlan = + convert.base64.encode(bytes_).replaceAll('/', '_').replaceAll('+', '-'); + } + + IaC({ + this.tfPlan, + }); + + IaC.fromJson(core.Map json_) + : this( + tfPlan: json_['tfPlan'] as core.String?, + ); + + core.Map toJson() => { + if (tfPlan != null) 'tfPlan': tfPlan!, + }; +} + +/// Details of an infrastructure-as-code (IaC) validation report. +class IaCValidationReport { + /// Additional information about the report. + core.String? note; + + /// A list of every Violation found in the IaC configuration. + core.List? violations; + + IaCValidationReport({ + this.note, + this.violations, + }); + + IaCValidationReport.fromJson(core.Map json_) + : this( + note: json_['note'] as core.String?, + violations: (json_['violations'] as core.List?) + ?.map((value) => Violation.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (note != null) 'note': note!, + if (violations != null) 'violations': violations!, + }; +} + +/// The response message for Locations.ListLocations. +class ListLocationsResponse { + /// A list of locations that matches the specified filter in the request. + core.List? locations; + + /// The standard List next-page token. + core.String? nextPageToken; + + ListLocationsResponse({ + this.locations, + this.nextPageToken, + }); + + ListLocationsResponse.fromJson(core.Map json_) + : this( + locations: (json_['locations'] as core.List?) + ?.map((value) => Location.fromJson( + value as core.Map)) + .toList(), + nextPageToken: json_['nextPageToken'] as core.String?, + ); + + core.Map toJson() => { + if (locations != null) 'locations': locations!, + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + }; +} + +/// The response message for Operations.ListOperations. +class ListOperationsResponse { + /// The standard List next-page token. + core.String? nextPageToken; + + /// A list of operations that matches the specified filter in the request. + core.List? operations; + + ListOperationsResponse({ + this.nextPageToken, + this.operations, + }); + + ListOperationsResponse.fromJson(core.Map json_) + : this( + nextPageToken: json_['nextPageToken'] as core.String?, + operations: (json_['operations'] as core.List?) + ?.map((value) => Operation.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (operations != null) 'operations': operations!, + }; +} + +/// Response message for ListPostureDeployments. +class ListPostureDeploymentsResponse { + /// A pagination token. + /// + /// To retrieve the next page of results, call the method again with this + /// token. + core.String? nextPageToken; + + /// The list of PostureDeployment resources. + core.List? postureDeployments; + + /// Locations that were temporarily unavailable and could not be reached. + core.List? unreachable; + + ListPostureDeploymentsResponse({ + this.nextPageToken, + this.postureDeployments, + this.unreachable, + }); + + ListPostureDeploymentsResponse.fromJson(core.Map json_) + : this( + nextPageToken: json_['nextPageToken'] as core.String?, + postureDeployments: (json_['postureDeployments'] as core.List?) + ?.map((value) => PostureDeployment.fromJson( + value as core.Map)) + .toList(), + unreachable: (json_['unreachable'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (postureDeployments != null) + 'postureDeployments': postureDeployments!, + if (unreachable != null) 'unreachable': unreachable!, + }; +} + +/// Response message for ListPostureRevisions. +class ListPostureRevisionsResponse { + /// A pagination token. + /// + /// To retrieve the next page of results, call the method again with this + /// token. + core.String? nextPageToken; + + /// The list of revisions for the Posture. + core.List? revisions; + + ListPostureRevisionsResponse({ + this.nextPageToken, + this.revisions, + }); + + ListPostureRevisionsResponse.fromJson(core.Map json_) + : this( + nextPageToken: json_['nextPageToken'] as core.String?, + revisions: (json_['revisions'] as core.List?) + ?.map((value) => Posture.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (revisions != null) 'revisions': revisions!, + }; +} + +/// Response message for ListPostureTemplates. +class ListPostureTemplatesResponse { + /// A pagination token. + /// + /// To retrieve the next page of results, call the method again with this + /// token. + core.String? nextPageToken; + + /// The list of PostureTemplate resources. + core.List? postureTemplates; + + ListPostureTemplatesResponse({ + this.nextPageToken, + this.postureTemplates, + }); + + ListPostureTemplatesResponse.fromJson(core.Map json_) + : this( + nextPageToken: json_['nextPageToken'] as core.String?, + postureTemplates: (json_['postureTemplates'] as core.List?) + ?.map((value) => PostureTemplate.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (postureTemplates != null) 'postureTemplates': postureTemplates!, + }; +} + +/// Response message for ListPostures. +class ListPosturesResponse { + /// A pagination token. + /// + /// To retrieve the next page of results, call the method again with this + /// token. + core.String? nextPageToken; + + /// The list of Posture resources. + core.List? postures; + + /// Locations that were temporarily unavailable and could not be reached. + core.List? unreachable; + + ListPosturesResponse({ + this.nextPageToken, + this.postures, + this.unreachable, + }); + + ListPosturesResponse.fromJson(core.Map json_) + : this( + nextPageToken: json_['nextPageToken'] as core.String?, + postures: (json_['postures'] as core.List?) + ?.map((value) => Posture.fromJson( + value as core.Map)) + .toList(), + unreachable: (json_['unreachable'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (postures != null) 'postures': postures!, + if (unreachable != null) 'unreachable': unreachable!, + }; +} + +/// Response message for ListReports. +class ListReportsResponse { + /// A pagination token. + /// + /// To retrieve the next page of results, call the method again with this + /// token. + core.String? nextPageToken; + + /// The list of Report resources. + core.List? reports; + + /// Locations that were temporarily unavailable and could not be reached. + core.List? unreachable; + + ListReportsResponse({ + this.nextPageToken, + this.reports, + this.unreachable, + }); + + ListReportsResponse.fromJson(core.Map json_) + : this( + nextPageToken: json_['nextPageToken'] as core.String?, + reports: (json_['reports'] as core.List?) + ?.map((value) => + Report.fromJson(value as core.Map)) + .toList(), + unreachable: (json_['unreachable'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (nextPageToken != null) 'nextPageToken': nextPageToken!, + if (reports != null) 'reports': reports!, + if (unreachable != null) 'unreachable': unreachable!, + }; +} + +/// A resource that represents a Google Cloud location. +typedef Location = $Location00; + +/// This resource represents a long-running operation that is the result of a +/// network API call. +class Operation { + /// If the value is `false`, it means the operation is still in progress. + /// + /// If `true`, the operation is completed, and either `error` or `response` is + /// available. + core.bool? done; + + /// The error result of the operation in case of failure or cancellation. + Status? error; + + /// Service-specific metadata associated with the operation. + /// + /// It typically contains progress information and common metadata such as + /// create time. Some services might not provide such metadata. Any method + /// that returns a long-running operation should document the metadata type, + /// if any. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Map? metadata; + + /// The server-assigned name, which is only unique within the same service + /// that originally returns it. + /// + /// If you use the default HTTP mapping, the `name` should be a resource name + /// ending with `operations/{unique_id}`. + core.String? name; + + /// The normal, successful response of the operation. + /// + /// If the original method returns no data on success, such as `Delete`, the + /// response is `google.protobuf.Empty`. If the original method is standard + /// `Get`/`Create`/`Update`, the response should be the resource. For other + /// methods, the response should have the type `XxxResponse`, where `Xxx` is + /// the original method name. For example, if the original method name is + /// `TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`. + /// + /// The values for Object must be JSON objects. It can consist of `num`, + /// `String`, `bool` and `null` as well as `Map` and `List` values. + core.Map? response; + + Operation({ + this.done, + this.error, + this.metadata, + this.name, + this.response, + }); + + Operation.fromJson(core.Map json_) + : this( + done: json_['done'] as core.bool?, + error: json_.containsKey('error') + ? Status.fromJson( + json_['error'] as core.Map) + : null, + metadata: json_.containsKey('metadata') + ? json_['metadata'] as core.Map + : null, + name: json_['name'] as core.String?, + response: json_.containsKey('response') + ? json_['response'] as core.Map + : null, + ); + + core.Map toJson() => { + if (done != null) 'done': done!, + if (error != null) 'error': error!, + if (metadata != null) 'metadata': metadata!, + if (name != null) 'name': name!, + if (response != null) 'response': response!, + }; +} + +/// A predefined organization policy constraint. +class OrgPolicyConstraint { + /// A unique identifier for the constraint. + /// + /// Required. + core.String? cannedConstraintId; + + /// The rules enforced by the constraint. + /// + /// Required. + core.List? policyRules; + + OrgPolicyConstraint({ + this.cannedConstraintId, + this.policyRules, + }); + + OrgPolicyConstraint.fromJson(core.Map json_) + : this( + cannedConstraintId: json_['cannedConstraintId'] as core.String?, + policyRules: (json_['policyRules'] as core.List?) + ?.map((value) => GoogleCloudSecuritypostureV1PolicyRule.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (cannedConstraintId != null) + 'cannedConstraintId': cannedConstraintId!, + if (policyRules != null) 'policyRules': policyRules!, + }; +} + +/// A custom organization policy constraint. +class OrgPolicyConstraintCustom { + /// Metadata for the constraint. + /// + /// Required. + GoogleCloudSecuritypostureV1CustomConstraint? customConstraint; + + /// The rules enforced by the constraint. + /// + /// Required. + core.List? policyRules; + + OrgPolicyConstraintCustom({ + this.customConstraint, + this.policyRules, + }); + + OrgPolicyConstraintCustom.fromJson(core.Map json_) + : this( + customConstraint: json_.containsKey('customConstraint') + ? GoogleCloudSecuritypostureV1CustomConstraint.fromJson( + json_['customConstraint'] + as core.Map) + : null, + policyRules: (json_['policyRules'] as core.List?) + ?.map((value) => GoogleCloudSecuritypostureV1PolicyRule.fromJson( + value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (customConstraint != null) 'customConstraint': customConstraint!, + if (policyRules != null) 'policyRules': policyRules!, + }; +} + +/// The details of a policy, including the constraints that it includes. +class Policy { + /// The compliance standards that the policy helps enforce. + /// + /// Optional. + core.List? complianceStandards; + + /// The constraints that the policy includes. + /// + /// Required. + Constraint? constraint; + + /// A description of the policy. + /// + /// Optional. + core.String? description; + + /// A user-specified identifier for the policy. + /// + /// In a PolicySet, each policy must have a unique identifier. + /// + /// Required. + core.String? policyId; + + Policy({ + this.complianceStandards, + this.constraint, + this.description, + this.policyId, + }); + + Policy.fromJson(core.Map json_) + : this( + complianceStandards: (json_['complianceStandards'] as core.List?) + ?.map((value) => ComplianceStandard.fromJson( + value as core.Map)) + .toList(), + constraint: json_.containsKey('constraint') + ? Constraint.fromJson( + json_['constraint'] as core.Map) + : null, + description: json_['description'] as core.String?, + policyId: json_['policyId'] as core.String?, + ); + + core.Map toJson() => { + if (complianceStandards != null) + 'complianceStandards': complianceStandards!, + if (constraint != null) 'constraint': constraint!, + if (description != null) 'description': description!, + if (policyId != null) 'policyId': policyId!, + }; +} + +/// Details of a policy that was violated. +class PolicyDetails { + /// The compliance standards that the policy maps to. + /// + /// For example, `CIS-2.0 1.15`. + core.List? complianceStandards; + + /// Information about the constraint that was violated. + /// + /// The format of this information can change at any time without prior + /// notice. Your application must not depend on this information in any way. + core.String? constraint; + + /// The type of constraint that was violated. + /// Possible string values are: + /// - "CONSTRAINT_TYPE_UNSPECIFIED" : Default value. This value is unused. + /// - "SECURITY_HEALTH_ANALYTICS_CUSTOM_MODULE" : A custom module for Security + /// Health Analytics. + /// - "ORG_POLICY_CUSTOM" : A custom organization policy constraint. + /// - "SECURITY_HEALTH_ANALYTICS_MODULE" : A built-in detector for Security + /// Health Analytics. + /// - "ORG_POLICY" : A predefined organization policy constraint. + /// - "REGO_POLICY" : A custom rego policy constraint. + core.String? constraintType; + + /// A description of the policy. + core.String? description; + + PolicyDetails({ + this.complianceStandards, + this.constraint, + this.constraintType, + this.description, + }); + + PolicyDetails.fromJson(core.Map json_) + : this( + complianceStandards: (json_['complianceStandards'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + constraint: json_['constraint'] as core.String?, + constraintType: json_['constraintType'] as core.String?, + description: json_['description'] as core.String?, + ); + + core.Map toJson() => { + if (complianceStandards != null) + 'complianceStandards': complianceStandards!, + if (constraint != null) 'constraint': constraint!, + if (constraintType != null) 'constraintType': constraintType!, + if (description != null) 'description': description!, + }; +} + +/// A group of one or more Policy resources. +class PolicySet { + /// A description of the policy set. + /// + /// Optional. + core.String? description; + + /// The Policy resources in the policy set. + /// + /// Each policy must have a policy_id that's unique within the policy set. + /// + /// Required. + core.List? policies; + + /// An identifier for the policy set. + /// + /// Required. + core.String? policySetId; + + PolicySet({ + this.description, + this.policies, + this.policySetId, + }); + + PolicySet.fromJson(core.Map json_) + : this( + description: json_['description'] as core.String?, + policies: (json_['policies'] as core.List?) + ?.map((value) => + Policy.fromJson(value as core.Map)) + .toList(), + policySetId: json_['policySetId'] as core.String?, + ); + + core.Map toJson() => { + if (description != null) 'description': description!, + if (policies != null) 'policies': policies!, + if (policySetId != null) 'policySetId': policySetId!, + }; +} + +/// The details of a posture. +class Posture { + /// The user-specified annotations for the posture. + /// + /// For details about the values you can use in an annotation, see \[AIP-148: + /// Standard fields\](https://google.aip.dev/148#annotations). + /// + /// Optional. + core.Map? annotations; + + /// The categories that the posture belongs to, as determined by the Security + /// Posture API. + /// + /// Output only. + core.List? categories; + + /// The time at which the posture was created. + /// + /// Output only. + core.String? createTime; + + /// A description of the posture. + /// + /// Optional. + core.String? description; + + /// An opaque identifier for the current version of the posture at the + /// specified `revision_id`. + /// + /// To prevent concurrent updates from overwriting each other, always provide + /// the `etag` when you update a posture. You can also provide the `etag` when + /// you delete a posture, to help ensure that you're deleting the intended + /// version of the posture. + /// + /// Optional. + core.String? etag; + + /// Identifier. + /// + /// The name of the posture, in the format + /// `organizations/{organization}/locations/global/postures/{posture_id}`. + /// + /// Required. + core.String? name; + + /// The PolicySet resources that the posture includes. + /// + /// Required. + core.List? policySets; + + /// Whether the posture is in the process of being updated. + /// + /// Output only. + core.bool? reconciling; + + /// An opaque eight-character string that identifies the revision of the + /// posture. + /// + /// A posture can have multiple revisions; when you deploy a posture, you + /// deploy a specific revision of the posture. + /// + /// Output only. Immutable. + core.String? revisionId; + + /// The state of the posture at the specified `revision_id`. + /// + /// Required. + /// Possible string values are: + /// - "STATE_UNSPECIFIED" : Default value. This value is unused. + /// - "DEPRECATED" : The posture is deprecated and can no longer be deployed. + /// - "DRAFT" : The posture is a draft and is not ready to deploy. + /// - "ACTIVE" : The posture is complete and ready to deploy. + core.String? state; + + /// The time at which the posture was last updated. + /// + /// Output only. + core.String? updateTime; + + Posture({ + this.annotations, + this.categories, + this.createTime, + this.description, + this.etag, + this.name, + this.policySets, + this.reconciling, + this.revisionId, + this.state, + this.updateTime, + }); + + Posture.fromJson(core.Map json_) + : this( + annotations: + (json_['annotations'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + categories: (json_['categories'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + createTime: json_['createTime'] as core.String?, + description: json_['description'] as core.String?, + etag: json_['etag'] as core.String?, + name: json_['name'] as core.String?, + policySets: (json_['policySets'] as core.List?) + ?.map((value) => PolicySet.fromJson( + value as core.Map)) + .toList(), + reconciling: json_['reconciling'] as core.bool?, + revisionId: json_['revisionId'] as core.String?, + state: json_['state'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + ); + + core.Map toJson() => { + if (annotations != null) 'annotations': annotations!, + if (categories != null) 'categories': categories!, + if (createTime != null) 'createTime': createTime!, + if (description != null) 'description': description!, + if (etag != null) 'etag': etag!, + if (name != null) 'name': name!, + if (policySets != null) 'policySets': policySets!, + if (reconciling != null) 'reconciling': reconciling!, + if (revisionId != null) 'revisionId': revisionId!, + if (state != null) 'state': state!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} + +/// Details for a Posture deployment on an organization, folder, or project. +/// +/// You can deploy at most one posture to each organization, folder, or project. +/// The parent resource for a posture deployment is always the organization, +/// even if the deployment applies to a folder or project. +class PostureDeployment { + /// The user-specified annotations for the posture deployment. + /// + /// For details about the values you can use in an annotation, see \[AIP-148: + /// Standard fields\](https://google.aip.dev/148#annotations). + /// + /// Optional. + core.Map? annotations; + + /// The categories that the posture deployment belongs to, as determined by + /// the Security Posture API. + /// + /// Output only. + core.List? categories; + + /// The time at which the posture deployment was created. + /// + /// Output only. + core.String? createTime; + + /// A description of the posture deployment. + /// + /// Optional. + core.String? description; + + /// The posture ID that was specified for the deployment. + /// + /// Present only if the posture deployment is in a failed state. + /// + /// Output only. + core.String? desiredPostureId; + + /// The revision ID of the posture that was specified for the deployment. + /// + /// Present only if the deployment is in a failed state. + /// + /// Output only. + core.String? desiredPostureRevisionId; + + /// An opaque identifier for the current version of the posture deployment. + /// + /// To prevent concurrent updates from overwriting each other, always provide + /// the `etag` when you update a posture deployment. You can also provide the + /// `etag` when you delete a posture deployment, to help ensure that you're + /// deleting the intended posture deployment. + /// + /// Optional. + core.String? etag; + + /// A description of why the posture deployment failed. + /// + /// Present only if the deployment is in a failed state. + /// + /// Output only. + core.String? failureMessage; + + /// Identifier. + /// + /// The name of the posture deployment, in the format + /// `organizations/{organization}/locations/global/postureDeployments/{deployment_id}`. + /// + /// Required. + core.String? name; + + /// The posture used in the deployment, in the format + /// `organizations/{organization}/locations/global/postures/{posture_id}`. + /// + /// Required. + core.String? postureId; + + /// The revision ID of the posture used in the deployment. + /// + /// Required. + core.String? postureRevisionId; + + /// Whether the posture deployment is in the process of being updated. + /// + /// Output only. + core.bool? reconciling; + + /// The state of the posture deployment. + /// + /// Output only. + /// Possible string values are: + /// - "STATE_UNSPECIFIED" : Default value. This value is unused. + /// - "CREATING" : The posture deployment is being created. + /// - "DELETING" : The posture deployment is being deleted. + /// - "UPDATING" : The posture deployment is being updated. + /// - "ACTIVE" : The posture deployment is active and in use. + /// - "CREATE_FAILED" : The posture deployment could not be created. + /// - "UPDATE_FAILED" : The posture deployment could not be updated. + /// - "DELETE_FAILED" : The posture deployment could not be deleted. + core.String? state; + + /// The organization, folder, or project where the posture is deployed. + /// + /// Uses one of the following formats: * `organizations/{organization_number}` + /// * `folders/{folder_number}` * `projects/{project_number}` + /// + /// Required. + core.String? targetResource; + + /// The time at which the posture deployment was last updated. + /// + /// Output only. + core.String? updateTime; + + PostureDeployment({ + this.annotations, + this.categories, + this.createTime, + this.description, + this.desiredPostureId, + this.desiredPostureRevisionId, + this.etag, + this.failureMessage, + this.name, + this.postureId, + this.postureRevisionId, + this.reconciling, + this.state, + this.targetResource, + this.updateTime, + }); + + PostureDeployment.fromJson(core.Map json_) + : this( + annotations: + (json_['annotations'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + categories: (json_['categories'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + createTime: json_['createTime'] as core.String?, + description: json_['description'] as core.String?, + desiredPostureId: json_['desiredPostureId'] as core.String?, + desiredPostureRevisionId: + json_['desiredPostureRevisionId'] as core.String?, + etag: json_['etag'] as core.String?, + failureMessage: json_['failureMessage'] as core.String?, + name: json_['name'] as core.String?, + postureId: json_['postureId'] as core.String?, + postureRevisionId: json_['postureRevisionId'] as core.String?, + reconciling: json_['reconciling'] as core.bool?, + state: json_['state'] as core.String?, + targetResource: json_['targetResource'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + ); + + core.Map toJson() => { + if (annotations != null) 'annotations': annotations!, + if (categories != null) 'categories': categories!, + if (createTime != null) 'createTime': createTime!, + if (description != null) 'description': description!, + if (desiredPostureId != null) 'desiredPostureId': desiredPostureId!, + if (desiredPostureRevisionId != null) + 'desiredPostureRevisionId': desiredPostureRevisionId!, + if (etag != null) 'etag': etag!, + if (failureMessage != null) 'failureMessage': failureMessage!, + if (name != null) 'name': name!, + if (postureId != null) 'postureId': postureId!, + if (postureRevisionId != null) 'postureRevisionId': postureRevisionId!, + if (reconciling != null) 'reconciling': reconciling!, + if (state != null) 'state': state!, + if (targetResource != null) 'targetResource': targetResource!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} + +/// Details of a posture deployment. +class PostureDetails { + /// The identifier for the PolicySet that the relevant policy belongs to. + core.String? policySet; + + /// The posture used in the deployment, in the format + /// `organizations/{organization}/locations/global/postures/{posture_id}`. + core.String? posture; + + /// The name of the posture deployment, in the format + /// `organizations/{organization}/locations/global/postureDeployments/{deployment_id}`. + core.String? postureDeployment; + + /// The organization, folder, or project where the posture is deployed. + /// + /// Uses one of the following formats: * `organizations/{organization_number}` + /// * `folders/{folder_number}` * `projects/{project_number}` + core.String? postureDeploymentTargetResource; + + /// The revision ID of the posture used in the deployment. + core.String? postureRevisionId; + + PostureDetails({ + this.policySet, + this.posture, + this.postureDeployment, + this.postureDeploymentTargetResource, + this.postureRevisionId, + }); + + PostureDetails.fromJson(core.Map json_) + : this( + policySet: json_['policySet'] as core.String?, + posture: json_['posture'] as core.String?, + postureDeployment: json_['postureDeployment'] as core.String?, + postureDeploymentTargetResource: + json_['postureDeploymentTargetResource'] as core.String?, + postureRevisionId: json_['postureRevisionId'] as core.String?, + ); + + core.Map toJson() => { + if (policySet != null) 'policySet': policySet!, + if (posture != null) 'posture': posture!, + if (postureDeployment != null) 'postureDeployment': postureDeployment!, + if (postureDeploymentTargetResource != null) + 'postureDeploymentTargetResource': postureDeploymentTargetResource!, + if (postureRevisionId != null) 'postureRevisionId': postureRevisionId!, + }; +} + +/// The details of a posture template. +class PostureTemplate { + /// The categories that the posture template belongs to, as determined by the + /// Security Posture API. + /// + /// Output only. + core.List? categories; + + /// A description of the posture template. + /// + /// Output only. + core.String? description; + + /// Identifier. + /// + /// The name of the posture template, in the format + /// `organizations/{organization}/locations/global/postureTemplates/{posture_template}`. + /// + /// Output only. + core.String? name; + + /// The PolicySet resources that the posture template includes. + /// + /// Output only. + core.List? policySets; + + /// A string that identifies the revision of the posture template. + /// + /// Output only. + core.String? revisionId; + + /// The state of the posture template at the specified `revision_id`. + /// + /// Output only. + /// Possible string values are: + /// - "STATE_UNSPECIFIED" : Default value. This value is unused. + /// - "ACTIVE" : The posture template follows the latest controls and + /// standards. + /// - "DEPRECATED" : The posture template uses outdated controls and + /// standards. We recommend that you use a newer revision of the posture + /// template. + core.String? state; + + PostureTemplate({ + this.categories, + this.description, + this.name, + this.policySets, + this.revisionId, + this.state, + }); + + PostureTemplate.fromJson(core.Map json_) + : this( + categories: (json_['categories'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + description: json_['description'] as core.String?, + name: json_['name'] as core.String?, + policySets: (json_['policySets'] as core.List?) + ?.map((value) => PolicySet.fromJson( + value as core.Map)) + .toList(), + revisionId: json_['revisionId'] as core.String?, + state: json_['state'] as core.String?, + ); + + core.Map toJson() => { + if (categories != null) 'categories': categories!, + if (description != null) 'description': description!, + if (name != null) 'name': name!, + if (policySets != null) 'policySets': policySets!, + if (revisionId != null) 'revisionId': revisionId!, + if (state != null) 'state': state!, + }; +} + +/// A name-value pair used as a custom source property. +class Property { + /// The name of the custom source property. + /// + /// Required. + core.String? name; + + /// The CEL expression for the value of the custom source property. + /// + /// For resource properties, you can return the value of the property or a + /// string enclosed in quotation marks. + /// + /// Optional. + Expr? valueExpression; + + Property({ + this.name, + this.valueExpression, + }); + + Property.fromJson(core.Map json_) + : this( + name: json_['name'] as core.String?, + valueExpression: json_.containsKey('valueExpression') + ? Expr.fromJson(json_['valueExpression'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (name != null) 'name': name!, + if (valueExpression != null) 'valueExpression': valueExpression!, + }; +} + +/// Details of a report. +class Report { + /// The time at which the report was created. + /// + /// Output only. + core.String? createTime; + + /// An infrastructure-as-code (IaC) validation report. + /// + /// Output only. + IaCValidationReport? iacValidationReport; + + /// The name of the report, in the format + /// `organizations/{organization}/locations/global/reports/{report_id}`. + /// + /// Required. + core.String? name; + + /// The time at which the report was last updated. + /// + /// Output only. + core.String? updateTime; + + Report({ + this.createTime, + this.iacValidationReport, + this.name, + this.updateTime, + }); + + Report.fromJson(core.Map json_) + : this( + createTime: json_['createTime'] as core.String?, + iacValidationReport: json_.containsKey('iacValidationReport') + ? IaCValidationReport.fromJson(json_['iacValidationReport'] + as core.Map) + : null, + name: json_['name'] as core.String?, + updateTime: json_['updateTime'] as core.String?, + ); + + core.Map toJson() => { + if (createTime != null) 'createTime': createTime!, + if (iacValidationReport != null) + 'iacValidationReport': iacValidationReport!, + if (name != null) 'name': name!, + if (updateTime != null) 'updateTime': updateTime!, + }; +} + +/// A selector for the resource types to run the detector on. +class ResourceSelector { + /// The resource types to run the detector on. + /// + /// Each custom module can specify up to 5 resource types. + /// + /// Required. + core.List? resourceTypes; + + ResourceSelector({ + this.resourceTypes, + }); + + ResourceSelector.fromJson(core.Map json_) + : this( + resourceTypes: (json_['resourceTypes'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (resourceTypes != null) 'resourceTypes': resourceTypes!, + }; +} + +/// Set multiple resource types for one policy, eg: resourceTypes: included: - +/// compute.googleapis.com/Instance - compute.googleapis.com/Disk Constraint +/// definition contains an empty resource type in order to support multiple +/// resource types in the policy. +/// +/// Only support Google managed constriaint and method type is GOVERN_TAGS Refer +/// go/multi-resource-support-force-tags-gmc to get more details. +class ResourceTypes { + /// The resource type we currently support. + /// + /// cloud/orgpolicy/customconstraintconfig/prod/resource_types.prototext + /// + /// Optional. + core.List? included; + + ResourceTypes({ + this.included, + }); + + ResourceTypes.fromJson(core.Map json_) + : this( + included: (json_['included'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (included != null) 'included': included!, + }; +} + +/// A custom module for Security Health Analytics. +class SecurityHealthAnalyticsCustomModule { + /// Configuration settings for the custom module. + /// + /// Required. + CustomConfig? config; + + /// The display name of the custom module. + /// + /// This value is used as the finding category for all the asset violation + /// findings that the custom module returns. The display name must contain + /// between 1 and 128 alphanumeric characters or underscores, and it must + /// start with a lowercase letter. + /// + /// Optional. + core.String? displayName; + + /// The unique identifier for the custom module. + /// + /// Contains 1 to 20 digits. + /// + /// Output only. Immutable. + core.String? id; + + /// Whether the custom module is enabled at a specified level of the resource + /// hierarchy. + /// Possible string values are: + /// - "ENABLEMENT_STATE_UNSPECIFIED" : Default value. This value is unused. + /// - "ENABLED" : The detector or custom module is enabled. + /// - "DISABLED" : The detector or custom module is disabled. + core.String? moduleEnablementState; + + SecurityHealthAnalyticsCustomModule({ + this.config, + this.displayName, + this.id, + this.moduleEnablementState, + }); + + SecurityHealthAnalyticsCustomModule.fromJson(core.Map json_) + : this( + config: json_.containsKey('config') + ? CustomConfig.fromJson( + json_['config'] as core.Map) + : null, + displayName: json_['displayName'] as core.String?, + id: json_['id'] as core.String?, + moduleEnablementState: json_['moduleEnablementState'] as core.String?, + ); + + core.Map toJson() => { + if (config != null) 'config': config!, + if (displayName != null) 'displayName': displayName!, + if (id != null) 'id': id!, + if (moduleEnablementState != null) + 'moduleEnablementState': moduleEnablementState!, + }; +} + +/// A built-in detector for Security Health Analytics. +class SecurityHealthAnalyticsModule { + /// Whether the detector is enabled at a specified level of the resource + /// hierarchy. + /// Possible string values are: + /// - "ENABLEMENT_STATE_UNSPECIFIED" : Default value. This value is unused. + /// - "ENABLED" : The detector or custom module is enabled. + /// - "DISABLED" : The detector or custom module is disabled. + core.String? moduleEnablementState; + + /// The name of the detector. + /// + /// For example, `BIGQUERY_TABLE_CMEK_DISABLED`. This field is also used as + /// the finding category for all the asset violation findings that the + /// detector returns. + /// + /// Required. + core.String? moduleName; + + SecurityHealthAnalyticsModule({ + this.moduleEnablementState, + this.moduleName, + }); + + SecurityHealthAnalyticsModule.fromJson(core.Map json_) + : this( + moduleEnablementState: json_['moduleEnablementState'] as core.String?, + moduleName: json_['moduleName'] as core.String?, + ); + + core.Map toJson() => { + if (moduleEnablementState != null) + 'moduleEnablementState': moduleEnablementState!, + if (moduleName != null) 'moduleName': moduleName!, + }; +} + +/// The `Status` type defines a logical error model that is suitable for +/// different programming environments, including REST APIs and RPC APIs. +/// +/// It is used by [gRPC](https://github.com/grpc). Each `Status` message +/// contains three pieces of data: error code, error message, and error details. +/// You can find out more about this error model and how to work with it in the +/// [API Design Guide](https://cloud.google.com/apis/design/errors). +typedef Status = $Status00; + +/// Details of a violation. +class Violation { + /// The full resource name of the asset that caused the violation. + /// + /// For details about the format of the full resource name for each asset + /// type, see + /// [Resource name format](https://cloud.google.com/asset-inventory/docs/resource-name-format). + core.String? assetId; + + /// A description of the steps that you can take to fix the violation. + core.String? nextSteps; + + /// The policy that was violated. + core.String? policyId; + + /// The severity of the violation. + /// Possible string values are: + /// - "SEVERITY_UNSPECIFIED" : Default value. This value is unused. + /// - "CRITICAL" : Critical severity. + /// - "HIGH" : High severity. + /// - "MEDIUM" : Medium severity. + /// - "LOW" : Low severity. + core.String? severity; + + /// Details of the Cloud Asset Inventory asset that caused the violation. + AssetDetails? violatedAsset; + + /// Details of the policy that was violated. + PolicyDetails? violatedPolicy; + + /// Details for the posture that was violated. + /// + /// This field is present only if the violated policy belongs to a deployed + /// posture. + PostureDetails? violatedPosture; + + Violation({ + this.assetId, + this.nextSteps, + this.policyId, + this.severity, + this.violatedAsset, + this.violatedPolicy, + this.violatedPosture, + }); + + Violation.fromJson(core.Map json_) + : this( + assetId: json_['assetId'] as core.String?, + nextSteps: json_['nextSteps'] as core.String?, + policyId: json_['policyId'] as core.String?, + severity: json_['severity'] as core.String?, + violatedAsset: json_.containsKey('violatedAsset') + ? AssetDetails.fromJson( + json_['violatedAsset'] as core.Map) + : null, + violatedPolicy: json_.containsKey('violatedPolicy') + ? PolicyDetails.fromJson(json_['violatedPolicy'] + as core.Map) + : null, + violatedPosture: json_.containsKey('violatedPosture') + ? PostureDetails.fromJson(json_['violatedPosture'] + as core.Map) + : null, + ); + + core.Map toJson() => { + if (assetId != null) 'assetId': assetId!, + if (nextSteps != null) 'nextSteps': nextSteps!, + if (policyId != null) 'policyId': policyId!, + if (severity != null) 'severity': severity!, + if (violatedAsset != null) 'violatedAsset': violatedAsset!, + if (violatedPolicy != null) 'violatedPolicy': violatedPolicy!, + if (violatedPosture != null) 'violatedPosture': violatedPosture!, + }; +} diff --git a/generated/googleapis/lib/serviceconsumermanagement/v1.dart b/generated/googleapis/lib/serviceconsumermanagement/v1.dart index c21ba8a1d..2b7180c0c 100644 --- a/generated/googleapis/lib/serviceconsumermanagement/v1.dart +++ b/generated/googleapis/lib/serviceconsumermanagement/v1.dart @@ -73,8 +73,8 @@ class OperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// diff --git a/generated/googleapis/lib/servicemanagement/v1.dart b/generated/googleapis/lib/servicemanagement/v1.dart index 2cbe63fae..828de5f4f 100644 --- a/generated/googleapis/lib/servicemanagement/v1.dart +++ b/generated/googleapis/lib/servicemanagement/v1.dart @@ -2573,6 +2573,12 @@ class Diagnostic { /// directive `suppress_warning` does not directly affect documentation and is /// documented together with service config validation. class Documentation { + /// Optional information about the IAM configuration. + /// + /// This is typically used to link to documentation about a product's IAM + /// roles and permissions. + core.String? additionalIamInfo; + /// The URL to the root of documentation. core.String? documentationRootUrl; @@ -2614,6 +2620,7 @@ class Documentation { core.String? summary; Documentation({ + this.additionalIamInfo, this.documentationRootUrl, this.overview, this.pages, @@ -2625,6 +2632,7 @@ class Documentation { Documentation.fromJson(core.Map json_) : this( + additionalIamInfo: json_['additionalIamInfo'] as core.String?, documentationRootUrl: json_['documentationRootUrl'] as core.String?, overview: json_['overview'] as core.String?, pages: (json_['pages'] as core.List?) @@ -2644,6 +2652,7 @@ class Documentation { ); core.Map toJson() => { + if (additionalIamInfo != null) 'additionalIamInfo': additionalIamInfo!, if (documentationRootUrl != null) 'documentationRootUrl': documentationRootUrl!, if (overview != null) 'overview': overview!, @@ -2866,6 +2875,13 @@ class EnumValue { /// These fields will be deprecated once the feature graduates and is enabled by /// default. class ExperimentalFeatures { + /// Enables generation of protobuf code using new types that are more Pythonic + /// which are included in `protobuf>=5.29.x`. + /// + /// This feature will be enabled by default 1 month after launching the + /// feature in preview packages. + core.bool? protobufPythonicTypesEnabled; + /// Enables generation of asynchronous REST clients if `rest` transport is /// enabled. /// @@ -2875,15 +2891,20 @@ class ExperimentalFeatures { core.bool? restAsyncIoEnabled; ExperimentalFeatures({ + this.protobufPythonicTypesEnabled, this.restAsyncIoEnabled, }); ExperimentalFeatures.fromJson(core.Map json_) : this( + protobufPythonicTypesEnabled: + json_['protobufPythonicTypesEnabled'] as core.bool?, restAsyncIoEnabled: json_['restAsyncIoEnabled'] as core.bool?, ); core.Map toJson() => { + if (protobufPythonicTypesEnabled != null) + 'protobufPythonicTypesEnabled': protobufPythonicTypesEnabled!, if (restAsyncIoEnabled != null) 'restAsyncIoEnabled': restAsyncIoEnabled!, }; @@ -3191,8 +3212,16 @@ class GoSettings { /// Some settings. CommonLanguageSettings? common; + /// Map of service names to renamed services. + /// + /// Keys are the package relative service names and values are the name to be + /// used for the service client and call options. publishing: go_settings: + /// renamed_services: Publisher: TopicAdmin + core.Map? renamedServices; + GoSettings({ this.common, + this.renamedServices, }); GoSettings.fromJson(core.Map json_) @@ -3201,10 +3230,19 @@ class GoSettings { ? CommonLanguageSettings.fromJson( json_['common'] as core.Map) : null, + renamedServices: + (json_['renamedServices'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), ); core.Map toJson() => { if (common != null) 'common': common!, + if (renamedServices != null) 'renamedServices': renamedServices!, }; } diff --git a/generated/googleapis/lib/servicenetworking/v1.dart b/generated/googleapis/lib/servicenetworking/v1.dart index 86ab120a6..3a75f8650 100644 --- a/generated/googleapis/lib/servicenetworking/v1.dart +++ b/generated/googleapis/lib/servicenetworking/v1.dart @@ -85,8 +85,8 @@ class OperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// diff --git a/generated/googleapis/lib/serviceusage/v1.dart b/generated/googleapis/lib/serviceusage/v1.dart index 73d2ee3c2..89299bb83 100644 --- a/generated/googleapis/lib/serviceusage/v1.dart +++ b/generated/googleapis/lib/serviceusage/v1.dart @@ -82,8 +82,8 @@ class OperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -961,6 +961,12 @@ class DisableServiceRequest { /// directive `suppress_warning` does not directly affect documentation and is /// documented together with service config validation. class Documentation { + /// Optional information about the IAM configuration. + /// + /// This is typically used to link to documentation about a product's IAM + /// roles and permissions. + core.String? additionalIamInfo; + /// The URL to the root of documentation. core.String? documentationRootUrl; @@ -1002,6 +1008,7 @@ class Documentation { core.String? summary; Documentation({ + this.additionalIamInfo, this.documentationRootUrl, this.overview, this.pages, @@ -1013,6 +1020,7 @@ class Documentation { Documentation.fromJson(core.Map json_) : this( + additionalIamInfo: json_['additionalIamInfo'] as core.String?, documentationRootUrl: json_['documentationRootUrl'] as core.String?, overview: json_['overview'] as core.String?, pages: (json_['pages'] as core.List?) @@ -1032,6 +1040,7 @@ class Documentation { ); core.Map toJson() => { + if (additionalIamInfo != null) 'additionalIamInfo': additionalIamInfo!, if (documentationRootUrl != null) 'documentationRootUrl': documentationRootUrl!, if (overview != null) 'overview': overview!, diff --git a/generated/googleapis/lib/shared.dart b/generated/googleapis/lib/shared.dart index 447100a9c..959ce6f4c 100644 --- a/generated/googleapis/lib/shared.dart +++ b/generated/googleapis/lib/shared.dart @@ -334,7 +334,7 @@ class $AddressPool { class $AdvertiserBillingConfig { /// The ID of a billing profile assigned to the advertiser. /// - /// Optional. + /// Required. core.String? billingProfileId; $AdvertiserBillingConfig({ @@ -662,6 +662,97 @@ class $AliasContext { }; } +/// Used by: +/// +/// - metastore:v1 : AlterMetadataResourceLocationRequest +/// - metastore:v2 : GoogleCloudMetastoreV2AlterMetadataResourceLocationRequest +class $AlterMetadataResourceLocationRequest { + /// The new location URI for the metadata resource. + /// + /// Required. + core.String? locationUri; + + /// The relative metadata resource name in the following + /// format.databases/{database_id} or + /// databases/{database_id}/tables/{table_id} or + /// databases/{database_id}/tables/{table_id}/partitions/{partition_id} + /// + /// Required. + core.String? resourceName; + + $AlterMetadataResourceLocationRequest({ + this.locationUri, + this.resourceName, + }); + + $AlterMetadataResourceLocationRequest.fromJson(core.Map json_) + : this( + locationUri: json_['locationUri'] as core.String?, + resourceName: json_['resourceName'] as core.String?, + ); + + core.Map toJson() => { + if (locationUri != null) 'locationUri': locationUri!, + if (resourceName != null) 'resourceName': resourceName!, + }; +} + +/// Used by: +/// +/// - metastore:v1 : AlterTablePropertiesRequest +/// - metastore:v2 : GoogleCloudMetastoreV2AlterTablePropertiesRequest +class $AlterTablePropertiesRequest { + /// A map that describes the desired values to mutate. + /// + /// If update_mask is empty, the properties will not update. Otherwise, the + /// properties only alters the value whose associated paths exist in the + /// update mask + core.Map? properties; + + /// The name of the table containing the properties you're altering in the + /// following format.databases/{database_id}/tables/{table_id} + /// + /// Required. + core.String? tableName; + + /// A field mask that specifies the metadata table properties that are + /// overwritten by the update. + /// + /// Fields specified in the update_mask are relative to the resource (not to + /// the full request). A field is overwritten if it is in the mask.For + /// example, given the target properties: properties { a: 1 b: 2 } And an + /// update properties: properties { a: 2 b: 3 c: 4 } then if the field mask + /// is:paths: "properties.b", "properties.c"then the result will be: + /// properties { a: 1 b: 3 c: 4 } + core.String? updateMask; + + $AlterTablePropertiesRequest({ + this.properties, + this.tableName, + this.updateMask, + }); + + $AlterTablePropertiesRequest.fromJson(core.Map json_) + : this( + properties: + (json_['properties'] as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + tableName: json_['tableName'] as core.String?, + updateMask: json_['updateMask'] as core.String?, + ); + + core.Map toJson() => { + if (properties != null) 'properties': properties!, + if (tableName != null) 'tableName': tableName!, + if (updateMask != null) 'updateMask': updateMask!, + }; +} + /// Used by: /// /// - containeranalysis:v1 : AnalysisCompleted @@ -1388,7 +1479,6 @@ class $AuditAdvertiserResponse { /// - cloudresourcemanager:v3 : AuditLogConfig /// - config:v1 : AuditLogConfig /// - connectors:v1 : AuditLogConfig -/// - contactcenterinsights:v1 : GoogleIamV1AuditLogConfig /// - contentwarehouse:v1 : GoogleIamV1AuditLogConfig /// - datafusion:v1 : AuditLogConfig /// - datamigration:v1 : AuditLogConfig @@ -1912,6 +2002,44 @@ class $BooleanPolicy { }; } +/// Used by: +/// +/// - dialogflow:v2 : GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint +/// - dialogflow:v3 : GoogleCloudDialogflowCxV3BoostSpecConditionBoostSpecBoostControlSpecControlPoint +class $BoostSpecConditionBoostSpecBoostControlSpecControlPoint { + /// Can be one of: 1. + /// + /// The numerical field value. 2. The duration spec for freshness: The value + /// must be formatted as an XSD `dayTimeDuration` value (a restricted subset + /// of an ISO 8601 duration value). The pattern for this is: `nDnM]`. + /// + /// Optional. + core.String? attributeValue; + + /// The value between -1 to 1 by which to boost the score if the + /// attribute_value evaluates to the value specified above. + /// + /// Optional. + core.double? boostAmount; + + $BoostSpecConditionBoostSpecBoostControlSpecControlPoint({ + this.attributeValue, + this.boostAmount, + }); + + $BoostSpecConditionBoostSpecBoostControlSpecControlPoint.fromJson( + core.Map json_) + : this( + attributeValue: json_['attributeValue'] as core.String?, + boostAmount: (json_['boostAmount'] as core.num?)?.toDouble(), + ); + + core.Map toJson() => { + if (attributeValue != null) 'attributeValue': attributeValue!, + if (boostAmount != null) 'boostAmount': boostAmount!, + }; +} + /// Used by: /// /// - displayvideo:v2 : BrowserAssignedTargetingOptionDetails @@ -4815,6 +4943,33 @@ class $CustomListTargetingSetting { }; } +/// Used by: +/// +/// - metastore:v1 : DataCatalogConfig +/// - metastore:v2 : GoogleCloudMetastoreV2DataCatalogConfig +class $DataCatalogConfig { + /// Defines whether the metastore metadata should be synced to Data Catalog. + /// + /// The default value is to disable syncing metastore metadata to Data + /// Catalog. + /// + /// Optional. + core.bool? enabled; + + $DataCatalogConfig({ + this.enabled, + }); + + $DataCatalogConfig.fromJson(core.Map json_) + : this( + enabled: json_['enabled'] as core.bool?, + ); + + core.Map toJson() => { + if (enabled != null) 'enabled': enabled!, + }; +} + /// Used by: /// /// - admob:v1 : Date @@ -6562,23 +6717,32 @@ class $EgressSource { /// - aiplatform:v1 : GoogleCloudAiplatformV1CancelTrainingPipelineRequest /// - aiplatform:v1 : GoogleCloudAiplatformV1CancelTuningJobRequest /// - aiplatform:v1 : GoogleCloudAiplatformV1CheckTrialEarlyStoppingStateRequest +/// - aiplatform:v1 : GoogleCloudAiplatformV1DirectUploadSource /// - aiplatform:v1 : GoogleCloudAiplatformV1ExactMatchSpec /// - aiplatform:v1 : GoogleCloudAiplatformV1FeatureOnlineStoreOptimized /// - aiplatform:v1 : GoogleCloudAiplatformV1FeatureViewIndexConfigBruteForceConfig /// - aiplatform:v1 : GoogleCloudAiplatformV1ListOptimalTrialsRequest +/// - aiplatform:v1 : GoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime /// - aiplatform:v1 : GoogleCloudAiplatformV1PauseModelDeploymentMonitoringJobRequest /// - aiplatform:v1 : GoogleCloudAiplatformV1PauseScheduleRequest -/// - aiplatform:v1 : GoogleCloudAiplatformV1PscInterfaceConfig +/// - aiplatform:v1 : GoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb /// - aiplatform:v1 : GoogleCloudAiplatformV1RebootPersistentResourceRequest /// - aiplatform:v1 : GoogleCloudAiplatformV1RemoveContextChildrenResponse /// - aiplatform:v1 : GoogleCloudAiplatformV1RemoveDatapointsResponse /// - aiplatform:v1 : GoogleCloudAiplatformV1ResumeModelDeploymentMonitoringJobRequest /// - aiplatform:v1 : GoogleCloudAiplatformV1StartNotebookRuntimeRequest +/// - aiplatform:v1 : GoogleCloudAiplatformV1StopNotebookRuntimeRequest /// - aiplatform:v1 : GoogleCloudAiplatformV1StopTrialRequest /// - aiplatform:v1 : GoogleCloudAiplatformV1SyncFeatureViewRequest /// - aiplatform:v1 : GoogleCloudAiplatformV1ToolCallValidSpec +/// - aiplatform:v1 : GoogleCloudAiplatformV1ToolGoogleSearch /// - aiplatform:v1 : GoogleCloudAiplatformV1ToolNameMatchSpec /// - aiplatform:v1 : GoogleCloudAiplatformV1ToolParameterKeyMatchSpec +/// - aiplatform:v1 : GoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec +/// - aiplatform:v1 : GoogleCloudAiplatformV1TrajectoryExactMatchSpec +/// - aiplatform:v1 : GoogleCloudAiplatformV1TrajectoryInOrderMatchSpec +/// - aiplatform:v1 : GoogleCloudAiplatformV1TrajectoryPrecisionSpec +/// - aiplatform:v1 : GoogleCloudAiplatformV1TrajectoryRecallSpec /// - aiplatform:v1 : GoogleCloudAiplatformV1UpgradeNotebookRuntimeRequest /// - aiplatform:v1 : GoogleCloudAiplatformV1UpsertDatapointsResponse /// - aiplatform:v1 : GoogleCloudAiplatformV1WriteFeatureValuesResponse @@ -6606,6 +6770,7 @@ class $EgressSource { /// - androidpublisher:v3 : ExternalTransactionTestPurchase /// - androidpublisher:v3 : FullRefund /// - androidpublisher:v3 : MigrateBasePlanPricesResponse +/// - androidpublisher:v3 : OneTimeCode /// - androidpublisher:v3 : OtherRecurringProduct /// - androidpublisher:v3 : OtherRegionsSubscriptionOfferPhaseFreePriceOverride /// - androidpublisher:v3 : PendingCancellation @@ -6741,6 +6906,7 @@ class $EgressSource { /// - cloudbuild:v2 : FetchReadWriteTokenRequest /// - cloudchannel:v1 : GoogleLongrunningCancelOperationRequest /// - cloudchannel:v1 : GoogleProtobufEmpty +/// - cloudcontrolspartner:v1 : Empty /// - clouddeploy:v1 : AbandonReleaseRequest /// - clouddeploy:v1 : AbandonReleaseResponse /// - clouddeploy:v1 : AdvanceChildRolloutJob @@ -6811,6 +6977,7 @@ class $EgressSource { /// - connectors:v1 : RefreshConnectionSchemaMetadataRequest /// - connectors:v1 : RepairEventingRequest /// - connectors:v1 : RetryEventSubscriptionRequest +/// - connectors:v1 : WithdrawCustomConnectorVersionRequest /// - connectors:v2 : Empty /// - contactcenterinsights:v1 : GoogleCloudContactcenterinsightsV1DeployQaScorecardRevisionRequest /// - contactcenterinsights:v1 : GoogleCloudContactcenterinsightsV1HoldData @@ -6851,7 +7018,6 @@ class $EgressSource { /// - datamigration:v1 : Empty /// - datamigration:v1 : LogMiner /// - datamigration:v1 : OracleAsmLogFileAccess -/// - datamigration:v1 : PromoteMigrationJobRequest /// - datamigration:v1 : RollbackConversionWorkspaceRequest /// - datamigration:v1 : StaticIpConnectivity /// - datamigration:v1 : StaticServiceIpConnectivity @@ -6889,12 +7055,16 @@ class $EgressSource { /// - datastream:v1 : AvroFileFormat /// - datastream:v1 : BackfillNoneStrategy /// - datastream:v1 : BigQueryProfile +/// - datastream:v1 : BinaryLogPosition /// - datastream:v1 : CancelOperationRequest /// - datastream:v1 : DropLargeObjects /// - datastream:v1 : Empty +/// - datastream:v1 : Gtid +/// - datastream:v1 : LogMiner /// - datastream:v1 : Merge /// - datastream:v1 : MostRecentStartPosition /// - datastream:v1 : NextAvailableStartPosition +/// - datastream:v1 : OracleAsmLogFileAccess /// - datastream:v1 : SqlServerChangeTables /// - datastream:v1 : SqlServerTransactionLogs /// - datastream:v1 : StartBackfillJobRequest @@ -6978,6 +7148,7 @@ class $EgressSource { /// - drivelabels:v2 : GoogleProtobufEmpty /// - essentialcontacts:v1 : GoogleProtobufEmpty /// - eventarc:v1 : Empty +/// - eventarc:v1 : GoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat /// - eventarc:v1 : GoogleLongrunningCancelOperationRequest /// - file:v1 : CancelOperationRequest /// - file:v1 : Empty @@ -7015,7 +7186,6 @@ class $EgressSource { /// - gkehub:v2 : Empty /// - gkeonprem:v1 : CancelOperationRequest /// - gkeonprem:v1 : Empty -/// - gkeonprem:v1 : VmwareAdminMetalLbConfig /// - gmail:v1 : DisableCseKeyPairRequest /// - gmail:v1 : EnableCseKeyPairRequest /// - gmail:v1 : ObliterateCseKeyPairRequest @@ -7086,6 +7256,7 @@ class $EgressSource { /// - metastore:v1 : CancelOperationRequest /// - metastore:v1 : CompleteMigrationRequest /// - metastore:v1 : Empty +/// - metastore:v2 : GoogleCloudMetastoreV2EncryptionConfig /// - migrationcenter:v1 : AggregationCount /// - migrationcenter:v1 : AggregationFrequency /// - migrationcenter:v1 : AggregationSum @@ -7116,12 +7287,14 @@ class $EgressSource { /// - netapp:v1 : ResumeReplicationRequest /// - netapp:v1 : ReverseReplicationDirectionRequest /// - netapp:v1 : SwitchActiveReplicaZoneRequest +/// - netapp:v1 : SyncReplicationRequest /// - netapp:v1 : VerifyKmsConfigRequest /// - networkconnectivity:v1 : Empty /// - networkconnectivity:v1 : GoogleLongrunningCancelOperationRequest /// - networkmanagement:v1 : CancelOperationRequest /// - networkmanagement:v1 : Empty /// - networkmanagement:v1 : RerunConnectivityTestRequest +/// - networksecurity:v1 : AuthzPolicyCustomProviderCloudIap /// - networksecurity:v1 : CancelOperationRequest /// - networksecurity:v1 : Empty /// - networkservices:v1 : CancelOperationRequest @@ -7150,6 +7323,8 @@ class $EgressSource { /// - osconfig:v1 : PausePatchDeploymentRequest /// - osconfig:v1 : ResumePatchDeploymentRequest /// - oslogin:v1 : Empty +/// - parallelstore:v1 : CancelOperationRequest +/// - parallelstore:v1 : GoogleProtobufEmpty /// - paymentsresellersubscription:v1 : GoogleCloudPaymentsResellerSubscriptionV1UndoCancelSubscriptionRequest /// - people:v1 : Empty /// - playintegrity:v1 : WriteDeviceRecallResponse @@ -7203,6 +7378,8 @@ class $EgressSource { /// - secretmanager:v1 : Empty /// - securitycenter:v1 : Empty /// - securitycenter:v1 : RunAssetDiscoveryRequest +/// - securityposture:v1 : CancelOperationRequest +/// - securityposture:v1 : Empty /// - serviceconsumermanagement:v1 : CancelOperationRequest /// - serviceconsumermanagement:v1 : Empty /// - servicedirectory:v1 : Empty @@ -7604,6 +7781,65 @@ class $Error { }; } +/// Used by: +/// +/// - deploymentmanager:v2 : ErrorInfo +/// - networkconnectivity:v1 : GoogleRpcErrorInfo +class $ErrorInfo { + /// The logical grouping to which the "reason" belongs. + /// + /// The error domain is typically the registered service name of the tool or + /// product that generates the error. Example: "pubsub.googleapis.com". If the + /// error is generated by some common infrastructure, the error domain must be + /// a globally unique value that identifies the infrastructure. For Google API + /// infrastructure, the error domain is "googleapis.com". + core.String? domain; + + /// Additional structured details about this error. + /// + /// Keys must match /a-z+/ but should ideally be lowerCamelCase. Also they + /// must be limited to 64 characters in length. When identifying the current + /// value of an exceeded limit, the units should be contained in the key, not + /// the value. For example, rather than {"instanceLimit": "100/request"}, + /// should be returned as, {"instanceLimitPerRequest": "100"}, if the client + /// exceeds the number of instances that can be created in a single (batch) + /// request. + core.Map? metadata; + + /// The reason of the error. + /// + /// This is a constant value that identifies the proximate cause of the error. + /// Error reasons are unique within a particular domain of errors. This should + /// be at most 63 characters and match a regular expression of `A-Z+[A-Z0-9]`, + /// which represents UPPER_SNAKE_CASE. + core.String? reason; + + $ErrorInfo({ + this.domain, + this.metadata, + this.reason, + }); + + $ErrorInfo.fromJson(core.Map json_) + : this( + domain: json_['domain'] as core.String?, + metadata: + (json_['metadata'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + reason: json_['reason'] as core.String?, + ); + + core.Map toJson() => { + if (domain != null) 'domain': domain!, + if (metadata != null) 'metadata': metadata!, + if (reason != null) 'reason': reason!, + }; +} + /// Used by: /// /// - integrations:v1 : EnterpriseCrmEventbusProtoBooleanParameterArray @@ -7882,6 +8118,8 @@ class $ExchangeAssignedTargetingOptionDetails { /// - "EXCHANGE_ADMOST_GBID" : AdMost. /// - "EXCHANGE_TOPON_GBID" : TopOn. /// - "EXCHANGE_NETFLIX" : Netflix. + /// - "EXCHANGE_CORE" : Core. + /// - "EXCHANGE_TUBI" : Tubi. core.String? exchange; $ExchangeAssignedTargetingOptionDetails({ @@ -7990,6 +8228,8 @@ class $ExchangeConfigEnabledExchange { /// - "EXCHANGE_ADMOST_GBID" : AdMost. /// - "EXCHANGE_TOPON_GBID" : TopOn. /// - "EXCHANGE_NETFLIX" : Netflix. + /// - "EXCHANGE_CORE" : Core. + /// - "EXCHANGE_TUBI" : Tubi. core.String? exchange; /// Agency ID of Google Ad Manager. @@ -8130,6 +8370,8 @@ class $ExchangeReviewStatus { /// - "EXCHANGE_ADMOST_GBID" : AdMost. /// - "EXCHANGE_TOPON_GBID" : TopOn. /// - "EXCHANGE_NETFLIX" : Netflix. + /// - "EXCHANGE_CORE" : Core. + /// - "EXCHANGE_TUBI" : Tubi. core.String? exchange; /// Status of the exchange review. @@ -8252,6 +8494,8 @@ class $ExchangeTargetingOptionDetails { /// - "EXCHANGE_ADMOST_GBID" : AdMost. /// - "EXCHANGE_TOPON_GBID" : TopOn. /// - "EXCHANGE_NETFLIX" : Netflix. + /// - "EXCHANGE_CORE" : Core. + /// - "EXCHANGE_TUBI" : Tubi. core.String? exchange; $ExchangeTargetingOptionDetails({ @@ -8524,6 +8768,62 @@ class $Exponential { }; } +/// Used by: +/// +/// - metastore:v1 : ExportMetadataRequest +/// - metastore:v2 : GoogleCloudMetastoreV2ExportMetadataRequest +class $ExportMetadataRequest { + /// The type of the database dump. + /// + /// If unspecified, defaults to MYSQL. + /// + /// Optional. + /// Possible string values are: + /// - "TYPE_UNSPECIFIED" : The type of the database dump is unknown. + /// - "MYSQL" : Database dump is a MySQL dump file. + /// - "AVRO" : Database dump contains Avro files. + core.String? databaseDumpType; + + /// A Cloud Storage URI of a folder, in the format gs:///. + /// + /// A sub-folder containing exported files will be created below it. + core.String? destinationGcsFolder; + + /// A request ID. + /// + /// Specify a unique request ID to allow the server to ignore the request if + /// it has completed. The server will ignore subsequent requests that provide + /// a duplicate request ID for at least 60 minutes after the first request.For + /// example, if an initial request times out, followed by another request with + /// the same request ID, the server ignores the second request to prevent the + /// creation of duplicate commitments.The request ID must be a valid UUID + /// (https://en.wikipedia.org/wiki/Universally_unique_identifier#Format). A + /// zero UUID (00000000-0000-0000-0000-000000000000) is not supported. + /// + /// Optional. + core.String? requestId; + + $ExportMetadataRequest({ + this.databaseDumpType, + this.destinationGcsFolder, + this.requestId, + }); + + $ExportMetadataRequest.fromJson(core.Map json_) + : this( + databaseDumpType: json_['databaseDumpType'] as core.String?, + destinationGcsFolder: json_['destinationGcsFolder'] as core.String?, + requestId: json_['requestId'] as core.String?, + ); + + core.Map toJson() => { + if (databaseDumpType != null) 'databaseDumpType': databaseDumpType!, + if (destinationGcsFolder != null) + 'destinationGcsFolder': destinationGcsFolder!, + if (requestId != null) 'requestId': requestId!, + }; +} + /// Used by: /// /// - vault:v1 : CalendarExportOptions @@ -8613,7 +8913,6 @@ class $ExportOptions01 { /// - compute:v1 : Expr /// - config:v1 : Expr /// - connectors:v1 : Expr -/// - contactcenterinsights:v1 : GoogleTypeExpr /// - containeranalysis:v1 : Expr /// - contentwarehouse:v1 : GoogleTypeExpr /// - datacatalog:v1 : Expr @@ -8654,6 +8953,7 @@ class $ExportOptions01 { /// - run:v2 : GoogleTypeExpr /// - secretmanager:v1 : Expr /// - securitycenter:v1 : Expr +/// - securityposture:v1 : Expr /// - servicedirectory:v1 : Expr /// - servicemanagement:v1 : Expr /// - spanner:v1 : Expr @@ -9056,6 +9356,30 @@ class $FixedBidStrategy { }; } +/// Used by: +/// +/// - eventarc:v1 : GoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat +/// - eventarc:v1 : GoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat +class $Format { + /// The entire schema definition is stored in this field. + /// + /// Optional. + core.String? schemaDefinition; + + $Format({ + this.schemaDefinition, + }); + + $Format.fromJson(core.Map json_) + : this( + schemaDefinition: json_['schemaDefinition'] as core.String?, + ); + + core.Map toJson() => { + if (schemaDefinition != null) 'schemaDefinition': schemaDefinition!, + }; +} + /// Used by: /// /// - datamigration:v1 : ForwardSshTunnelConnectivity @@ -9270,6 +9594,32 @@ class $Gcloud { }; } +/// Used by: +/// +/// - parallelstore:v1 : DestinationGcsBucket +/// - parallelstore:v1 : SourceGcsBucket +class $GcsBucket { + /// URI to a Cloud Storage bucket in the format: `gs:///`. + /// + /// The path inside the bucket is optional. + /// + /// Required. + core.String? uri; + + $GcsBucket({ + this.uri, + }); + + $GcsBucket.fromJson(core.Map json_) + : this( + uri: json_['uri'] as core.String?, + ); + + core.Map toJson() => { + if (uri != null) 'uri': uri!, + }; +} + /// Used by: /// /// - healthcare:v1 : GcsSource @@ -11758,11 +12108,51 @@ class $Instance02 { }; } +/// Used by: +/// +/// - aiplatform:v1 : GoogleCloudAiplatformV1CometInstance +/// - aiplatform:v1 : GoogleCloudAiplatformV1MetricxInstance +class $Instance03 { + /// Output of the evaluated model. + /// + /// Required. + core.String? prediction; + + /// Ground truth used to compare against the prediction. + /// + /// Optional. + core.String? reference; + + /// Source text in original language. + /// + /// Optional. + core.String? source; + + $Instance03({ + this.prediction, + this.reference, + this.source, + }); + + $Instance03.fromJson(core.Map json_) + : this( + prediction: json_['prediction'] as core.String?, + reference: json_['reference'] as core.String?, + source: json_['source'] as core.String?, + ); + + core.Map toJson() => { + if (prediction != null) 'prediction': prediction!, + if (reference != null) 'reference': reference!, + if (source != null) 'source': source!, + }; +} + /// Used by: /// /// - aiplatform:v1 : GoogleCloudAiplatformV1SummarizationHelpfulnessInstance /// - aiplatform:v1 : GoogleCloudAiplatformV1SummarizationVerbosityInstance -class $Instance03 { +class $Instance04 { /// Text to be summarized. /// /// Required. @@ -11783,14 +12173,14 @@ class $Instance03 { /// Optional. core.String? reference; - $Instance03({ + $Instance04({ this.context, this.instruction, this.prediction, this.reference, }); - $Instance03.fromJson(core.Map json_) + $Instance04.fromJson(core.Map json_) : this( context: json_['context'] as core.String?, instruction: json_['instruction'] as core.String?, @@ -11912,40 +12302,162 @@ class $InstanceGroupManagersApplyUpdatesRequest { /// Used by: /// -/// - displayvideo:v2 : IntegralAdScience -/// - displayvideo:v3 : IntegralAdScience -class $IntegralAdScience { - /// The custom segment ID provided by Integral Ad Science. +/// - compute:v1 : InstanceGroupManagersResumeInstancesRequest +/// - compute:v1 : RegionInstanceGroupManagersResumeInstancesRequest +class $InstanceGroupManagersResumeInstancesRequest { + /// The URLs of one or more instances to resume. /// - /// The ID must be between `1000001` and `1999999`, inclusive. - core.List? customSegmentId; + /// This can be a full URL or a partial URL, such as + /// zones/\[ZONE\]/instances/\[INSTANCE_NAME\]. + core.List? instances; - /// Display Viewability section (applicable to display line items only). - /// Possible string values are: - /// - "PERFORMANCE_VIEWABILITY_UNSPECIFIED" : This enum is only a placeholder - /// and it doesn't specify any display viewability options. - /// - "PERFORMANCE_VIEWABILITY_40" : Target 40% Viewability or Higher. - /// - "PERFORMANCE_VIEWABILITY_50" : Target 50% Viewability or Higher. - /// - "PERFORMANCE_VIEWABILITY_60" : Target 60% Viewability or Higher. - /// - "PERFORMANCE_VIEWABILITY_70" : Target 70% Viewability or Higher. - core.String? displayViewability; + $InstanceGroupManagersResumeInstancesRequest({ + this.instances, + }); - /// Brand Safety - **Unrateable**. - core.bool? excludeUnrateable; + $InstanceGroupManagersResumeInstancesRequest.fromJson(core.Map json_) + : this( + instances: (json_['instances'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); - /// Ad Fraud settings. - /// Possible string values are: - /// - "SUSPICIOUS_ACTIVITY_UNSPECIFIED" : This enum is only a placeholder and - /// it doesn't specify any ad fraud prevention options. - /// - "SUSPICIOUS_ACTIVITY_HR" : Ad Fraud - Exclude High Risk. - /// - "SUSPICIOUS_ACTIVITY_HMR" : Ad Fraud - Exclude High and Moderate Risk. - core.String? excludedAdFraudRisk; + core.Map toJson() => { + if (instances != null) 'instances': instances!, + }; +} - /// Brand Safety - **Adult content**. - /// Possible string values are: - /// - "ADULT_UNSPECIFIED" : This enum is only a placeholder and it doesn't - /// specify any adult options. - /// - "ADULT_HR" : Adult - Exclude High Risk. +/// Used by: +/// +/// - compute:v1 : InstanceGroupManagersStartInstancesRequest +/// - compute:v1 : RegionInstanceGroupManagersStartInstancesRequest +class $InstanceGroupManagersStartInstancesRequest { + /// The URLs of one or more instances to start. + /// + /// This can be a full URL or a partial URL, such as + /// zones/\[ZONE\]/instances/\[INSTANCE_NAME\]. + core.List? instances; + + $InstanceGroupManagersStartInstancesRequest({ + this.instances, + }); + + $InstanceGroupManagersStartInstancesRequest.fromJson(core.Map json_) + : this( + instances: (json_['instances'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (instances != null) 'instances': instances!, + }; +} + +/// Used by: +/// +/// - compute:v1 : InstanceGroupManagersStopInstancesRequest +/// - compute:v1 : RegionInstanceGroupManagersStopInstancesRequest +class $InstanceGroupManagersStopInstancesRequest { + /// If this flag is set to true, the Instance Group Manager will proceed to + /// stop the instances, skipping initialization on them. + core.bool? forceStop; + + /// The URLs of one or more instances to stop. + /// + /// This can be a full URL or a partial URL, such as + /// zones/\[ZONE\]/instances/\[INSTANCE_NAME\]. + core.List? instances; + + $InstanceGroupManagersStopInstancesRequest({ + this.forceStop, + this.instances, + }); + + $InstanceGroupManagersStopInstancesRequest.fromJson(core.Map json_) + : this( + forceStop: json_['forceStop'] as core.bool?, + instances: (json_['instances'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (forceStop != null) 'forceStop': forceStop!, + if (instances != null) 'instances': instances!, + }; +} + +/// Used by: +/// +/// - compute:v1 : InstanceGroupManagersSuspendInstancesRequest +/// - compute:v1 : RegionInstanceGroupManagersSuspendInstancesRequest +class $InstanceGroupManagersSuspendInstancesRequest { + /// If this flag is set to true, the Instance Group Manager will proceed to + /// suspend the instances, skipping initialization on them. + core.bool? forceSuspend; + + /// The URLs of one or more instances to suspend. + /// + /// This can be a full URL or a partial URL, such as + /// zones/\[ZONE\]/instances/\[INSTANCE_NAME\]. + core.List? instances; + + $InstanceGroupManagersSuspendInstancesRequest({ + this.forceSuspend, + this.instances, + }); + + $InstanceGroupManagersSuspendInstancesRequest.fromJson(core.Map json_) + : this( + forceSuspend: json_['forceSuspend'] as core.bool?, + instances: (json_['instances'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (forceSuspend != null) 'forceSuspend': forceSuspend!, + if (instances != null) 'instances': instances!, + }; +} + +/// Used by: +/// +/// - displayvideo:v2 : IntegralAdScience +/// - displayvideo:v3 : IntegralAdScience +class $IntegralAdScience { + /// The custom segment ID provided by Integral Ad Science. + /// + /// The ID must be between `1000001` and `1999999`, inclusive. + core.List? customSegmentId; + + /// Display Viewability section (applicable to display line items only). + /// Possible string values are: + /// - "PERFORMANCE_VIEWABILITY_UNSPECIFIED" : This enum is only a placeholder + /// and it doesn't specify any display viewability options. + /// - "PERFORMANCE_VIEWABILITY_40" : Target 40% Viewability or Higher. + /// - "PERFORMANCE_VIEWABILITY_50" : Target 50% Viewability or Higher. + /// - "PERFORMANCE_VIEWABILITY_60" : Target 60% Viewability or Higher. + /// - "PERFORMANCE_VIEWABILITY_70" : Target 70% Viewability or Higher. + core.String? displayViewability; + + /// Brand Safety - **Unrateable**. + core.bool? excludeUnrateable; + + /// Ad Fraud settings. + /// Possible string values are: + /// - "SUSPICIOUS_ACTIVITY_UNSPECIFIED" : This enum is only a placeholder and + /// it doesn't specify any ad fraud prevention options. + /// - "SUSPICIOUS_ACTIVITY_HR" : Ad Fraud - Exclude High Risk. + /// - "SUSPICIOUS_ACTIVITY_HMR" : Ad Fraud - Exclude High and Moderate Risk. + core.String? excludedAdFraudRisk; + + /// Brand Safety - **Adult content**. + /// Possible string values are: + /// - "ADULT_UNSPECIFIED" : This enum is only a placeholder and it doesn't + /// specify any adult options. + /// - "ADULT_HR" : Adult - Exclude High Risk. /// - "ADULT_HMR" : Adult - Exclude High and Moderate Risk. core.String? excludedAdultRisk; @@ -13000,6 +13512,61 @@ class $LatLng { }; } +/// Used by: +/// +/// - metastore:v1 : LatestBackup +/// - metastore:v2 : GoogleCloudMetastoreV2LatestBackup +class $LatestBackup { + /// The ID of an in-progress scheduled backup. + /// + /// Empty if no backup is in progress. + /// + /// Output only. + core.String? backupId; + + /// The duration of the backup completion. + /// + /// Output only. + core.String? duration; + + /// The time when the backup was started. + /// + /// Output only. + core.String? startTime; + + /// The current state of the backup. + /// + /// Output only. + /// Possible string values are: + /// - "STATE_UNSPECIFIED" : The state of the backup is unknown. + /// - "IN_PROGRESS" : The backup is in progress. + /// - "SUCCEEDED" : The backup completed. + /// - "FAILED" : The backup failed. + core.String? state; + + $LatestBackup({ + this.backupId, + this.duration, + this.startTime, + this.state, + }); + + $LatestBackup.fromJson(core.Map json_) + : this( + backupId: json_['backupId'] as core.String?, + duration: json_['duration'] as core.String?, + startTime: json_['startTime'] as core.String?, + state: json_['state'] as core.String?, + ); + + core.Map toJson() => { + if (backupId != null) 'backupId': backupId!, + if (duration != null) 'duration': duration!, + if (startTime != null) 'startTime': startTime!, + if (state != null) 'state': state!, + }; +} + /// Used by: /// /// - containeranalysis:v1 : Layer @@ -13241,6 +13808,7 @@ class $Linear { /// Used by: /// /// - compute:v1 : HelpLink +/// - deploymentmanager:v2 : HelpLink /// - vmmigration:v1 : Link class $Link { /// Describes what the link offers. @@ -13435,6 +14003,7 @@ class $Locality { /// Used by: /// /// - compute:v1 : LocalizedMessage +/// - deploymentmanager:v2 : LocalizedMessage /// - vmmigration:v1 : LocalizedMessage class $LocalizedMessage { /// The locale used following the specification defined at @@ -13517,10 +14086,12 @@ class $LocalizedMessage { /// - notebooks:v1 : Location /// - notebooks:v2 : Location /// - oracledatabase:v1 : Location +/// - parallelstore:v1 : Location /// - privateca:v1 : Location /// - rapidmigrationassessment:v1 : Location /// - run:v1 : Location /// - secretmanager:v1 : Location +/// - securityposture:v1 : Location /// - servicedirectory:v1 : Location /// - tpu:v1 : Location /// - tpu:v2 : Location @@ -13794,6 +14365,40 @@ class $LogConfig { }; } +/// Used by: +/// +/// - datamigration:v1 : LogFileDirectories +/// - datastream:v1 : LogFileDirectories +class $LogFileDirectories { + /// Oracle directory for archived logs. + /// + /// Required. + core.String? archivedLogDirectory; + + /// Oracle directory for online logs. + /// + /// Required. + core.String? onlineLogDirectory; + + $LogFileDirectories({ + this.archivedLogDirectory, + this.onlineLogDirectory, + }); + + $LogFileDirectories.fromJson(core.Map json_) + : this( + archivedLogDirectory: json_['archivedLogDirectory'] as core.String?, + onlineLogDirectory: json_['onlineLogDirectory'] as core.String?, + ); + + core.Map toJson() => { + if (archivedLogDirectory != null) + 'archivedLogDirectory': archivedLogDirectory!, + if (onlineLogDirectory != null) + 'onlineLogDirectory': onlineLogDirectory!, + }; +} + /// Used by: /// /// - displayvideo:v2 : LookbackWindow @@ -14550,6 +15155,46 @@ class $MonitoringDestination { }; } +/// Used by: +/// +/// - metastore:v1 : MoveTableToDatabaseRequest +/// - metastore:v2 : GoogleCloudMetastoreV2MoveTableToDatabaseRequest +class $MoveTableToDatabaseRequest { + /// The name of the database where the table resides. + /// + /// Required. + core.String? dbName; + + /// The name of the database where the table should be moved. + /// + /// Required. + core.String? destinationDbName; + + /// The name of the table to be moved. + /// + /// Required. + core.String? tableName; + + $MoveTableToDatabaseRequest({ + this.dbName, + this.destinationDbName, + this.tableName, + }); + + $MoveTableToDatabaseRequest.fromJson(core.Map json_) + : this( + dbName: json_['dbName'] as core.String?, + destinationDbName: json_['destinationDbName'] as core.String?, + tableName: json_['tableName'] as core.String?, + ); + + core.Map toJson() => { + if (dbName != null) 'dbName': dbName!, + if (destinationDbName != null) 'destinationDbName': destinationDbName!, + if (tableName != null) 'tableName': tableName!, + }; +} + /// Used by: /// /// - displayvideo:v2 : NativeContentPositionAssignedTargetingOptionDetails @@ -15512,10 +16157,8 @@ class $Pacing { /// The type of pacing that defines how the budget amount will be spent across /// the pacing_period. /// - /// *Warning*: Starting on **November 5, 2024**, `PACING_TYPE_ASAP` will no - /// longer be compatible with pacing_period `PACING_PERIOD_FLIGHT` for - /// insertion orders. \[Read more about this announced - /// change\](/display-video/api/deprecations#features.io_asap). + /// `PACING_TYPE_ASAP` is not compatible with pacing_period + /// `PACING_PERIOD_FLIGHT` for insertion orders. /// /// Required. /// Possible string values are: @@ -15591,6 +16234,32 @@ class $PageInfo { }; } +/// Used by: +/// +/// - parallelstore:v1 : DestinationParallelstore +/// - parallelstore:v1 : SourceParallelstore +class $Parallelstore { + /// Root directory path to the Paralellstore filesystem, starting with `/`. + /// + /// Defaults to `/` if unset. + /// + /// Optional. + core.String? path; + + $Parallelstore({ + this.path, + }); + + $Parallelstore.fromJson(core.Map json_) + : this( + path: json_['path'] as core.String?, + ); + + core.Map toJson() => { + if (path != null) 'path': path!, + }; +} + /// Used by: /// /// - displayvideo:v2 : ParentalStatusAssignedTargetingOptionDetails @@ -16174,7 +16843,6 @@ class $Permissions { /// - compute:v1 : TestPermissionsResponse /// - config:v1 : TestIamPermissionsResponse /// - connectors:v1 : TestIamPermissionsResponse -/// - contactcenterinsights:v1 : GoogleIamV1TestIamPermissionsResponse /// - containeranalysis:v1 : TestIamPermissionsResponse /// - datacatalog:v1 : TestIamPermissionsResponse /// - datafusion:v1 : TestIamPermissionsResponse @@ -16587,35 +17255,197 @@ class $PortConfig { /// The port that control plane hosted load balancers will listen on. core.int? controlPlaneLoadBalancerPort; - $PortConfig({ - this.controlPlaneLoadBalancerPort, + $PortConfig({ + this.controlPlaneLoadBalancerPort, + }); + + $PortConfig.fromJson(core.Map json_) + : this( + controlPlaneLoadBalancerPort: + json_['controlPlaneLoadBalancerPort'] as core.int?, + ); + + core.Map toJson() => { + if (controlPlaneLoadBalancerPort != null) + 'controlPlaneLoadBalancerPort': controlPlaneLoadBalancerPort!, + }; +} + +/// Used by: +/// +/// - addressvalidation:v1 : GoogleTypePostalAddress +/// - cloudchannel:v1 : GoogleTypePostalAddress +/// - contentwarehouse:v1 : GoogleTypePostalAddress +/// - documentai:v1 : GoogleTypePostalAddress +/// - mybusinessaccountmanagement:v1 : PostalAddress +/// - mybusinessbusinessinformation:v1 : PostalAddress +/// - mybusinessverifications:v1 : PostalAddress +class $PostalAddress00 { + /// Unstructured address lines describing the lower levels of an address. + /// + /// Because values in address_lines do not have type information and may + /// sometimes contain multiple values in a single field (For example "Austin, + /// TX"), it is important that the line order is clear. The order of address + /// lines should be "envelope order" for the country/region of the address. In + /// places where this can vary (For example Japan), address_language is used + /// to make it explicit (For example "ja" for large-to-small ordering and + /// "ja-Latn" or "en" for small-to-large). This way, the most specific line of + /// an address can be selected based on the language. The minimum permitted + /// structural representation of an address consists of a region_code with all + /// remaining information placed in the address_lines. It would be possible to + /// format such an address very approximately without geocoding, but no + /// semantic reasoning could be made about any of the address components until + /// it was at least partially resolved. Creating an address only containing a + /// region_code and address_lines, and then geocoding is the recommended way + /// to handle completely unstructured addresses (as opposed to guessing which + /// parts of the address should be localities or administrative areas). + core.List? addressLines; + + /// Highest administrative subdivision which is used for postal addresses of a + /// country or region. + /// + /// For example, this can be a state, a province, an oblast, or a prefecture. + /// Specifically, for Spain this is the province and not the autonomous + /// community (For example "Barcelona" and not "Catalonia"). Many countries + /// don't use an administrative area in postal addresses. For example in + /// Switzerland this should be left unpopulated. + /// + /// Optional. + core.String? administrativeArea; + + /// BCP-47 language code of the contents of this address (if known). + /// + /// This is often the UI language of the input form or is expected to match + /// one of the languages used in the address' country/region, or their + /// transliterated equivalents. This can affect formatting in certain + /// countries, but is not critical to the correctness of the data and will + /// never affect any validation or other non-formatting related operations. If + /// this value is not known, it should be omitted (rather than specifying a + /// possibly incorrect default). Examples: "zh-Hant", "ja", "ja-Latn", "en". + /// + /// Optional. + core.String? languageCode; + + /// Generally refers to the city/town portion of the address. + /// + /// Examples: US city, IT comune, UK post town. In regions of the world where + /// localities are not well defined or do not fit into this structure well, + /// leave locality empty and use address_lines. + /// + /// Optional. + core.String? locality; + + /// The name of the organization at the address. + /// + /// Optional. + core.String? organization; + + /// Postal code of the address. + /// + /// Not all countries use or require postal codes to be present, but where + /// they are used, they may trigger additional validation with other parts of + /// the address (For example state/zip validation in the U.S.A.). + /// + /// Optional. + core.String? postalCode; + + /// The recipient at the address. + /// + /// This field may, under certain circumstances, contain multiline + /// information. For example, it might contain "care of" information. + /// + /// Optional. + core.List? recipients; + + /// CLDR region code of the country/region of the address. + /// + /// This is never inferred and it is up to the user to ensure the value is + /// correct. See https://cldr.unicode.org/ and + /// https://www.unicode.org/cldr/charts/30/supplemental/territory_information.html + /// for details. Example: "CH" for Switzerland. + /// + /// Required. + core.String? regionCode; + + /// The schema revision of the `PostalAddress`. + /// + /// This must be set to 0, which is the latest revision. All new revisions + /// **must** be backward compatible with old revisions. + core.int? revision; + + /// Additional, country-specific, sorting code. + /// + /// This is not used in most regions. Where it is used, the value is either a + /// string like "CEDEX", optionally followed by a number (For example "CEDEX + /// 7"), or just a number alone, representing the "sector code" (Jamaica), + /// "delivery area indicator" (Malawi) or "post office indicator" (For example + /// Côte d'Ivoire). + /// + /// Optional. + core.String? sortingCode; + + /// Sublocality of the address. + /// + /// For example, this can be neighborhoods, boroughs, districts. + /// + /// Optional. + core.String? sublocality; + + $PostalAddress00({ + this.addressLines, + this.administrativeArea, + this.languageCode, + this.locality, + this.organization, + this.postalCode, + this.recipients, + this.regionCode, + this.revision, + this.sortingCode, + this.sublocality, }); - $PortConfig.fromJson(core.Map json_) + $PostalAddress00.fromJson(core.Map json_) : this( - controlPlaneLoadBalancerPort: - json_['controlPlaneLoadBalancerPort'] as core.int?, + addressLines: (json_['addressLines'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + administrativeArea: json_['administrativeArea'] as core.String?, + languageCode: json_['languageCode'] as core.String?, + locality: json_['locality'] as core.String?, + organization: json_['organization'] as core.String?, + postalCode: json_['postalCode'] as core.String?, + recipients: (json_['recipients'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + regionCode: json_['regionCode'] as core.String?, + revision: json_['revision'] as core.int?, + sortingCode: json_['sortingCode'] as core.String?, + sublocality: json_['sublocality'] as core.String?, ); core.Map toJson() => { - if (controlPlaneLoadBalancerPort != null) - 'controlPlaneLoadBalancerPort': controlPlaneLoadBalancerPort!, + if (addressLines != null) 'addressLines': addressLines!, + if (administrativeArea != null) + 'administrativeArea': administrativeArea!, + if (languageCode != null) 'languageCode': languageCode!, + if (locality != null) 'locality': locality!, + if (organization != null) 'organization': organization!, + if (postalCode != null) 'postalCode': postalCode!, + if (recipients != null) 'recipients': recipients!, + if (regionCode != null) 'regionCode': regionCode!, + if (revision != null) 'revision': revision!, + if (sortingCode != null) 'sortingCode': sortingCode!, + if (sublocality != null) 'sublocality': sublocality!, }; } /// Used by: /// -/// - addressvalidation:v1 : GoogleTypePostalAddress -/// - cloudchannel:v1 : GoogleTypePostalAddress -/// - contentwarehouse:v1 : GoogleTypePostalAddress -/// - documentai:v1 : GoogleTypePostalAddress /// - domains:v1 : PostalAddress /// - jobs:v3 : PostalAddress /// - jobs:v4 : PostalAddress -/// - mybusinessaccountmanagement:v1 : PostalAddress -/// - mybusinessbusinessinformation:v1 : PostalAddress -/// - mybusinessverifications:v1 : PostalAddress -class $PostalAddress { +class $PostalAddress01 { /// Unstructured address lines describing the lower levels of an address. /// /// Because values in address_lines do not have type information and may @@ -16725,7 +17555,7 @@ class $PostalAddress { /// Optional. core.String? sublocality; - $PostalAddress({ + $PostalAddress01({ this.addressLines, this.administrativeArea, this.languageCode, @@ -16739,7 +17569,7 @@ class $PostalAddress { this.sublocality, }); - $PostalAddress.fromJson(core.Map json_) + $PostalAddress01.fromJson(core.Map json_) : this( addressLines: (json_['addressLines'] as core.List?) ?.map((value) => value as core.String) @@ -17158,6 +17988,32 @@ class $Query { }; } +/// Used by: +/// +/// - metastore:v1 : QueryMetadataRequest +/// - metastore:v2 : GoogleCloudMetastoreV2QueryMetadataRequest +class $QueryMetadataRequest { + /// A read-only SQL query to execute against the metadata database. + /// + /// The query cannot change or mutate the data. + /// + /// Required. + core.String? query; + + $QueryMetadataRequest({ + this.query, + }); + + $QueryMetadataRequest.fromJson(core.Map json_) + : this( + query: json_['query'] as core.String?, + ); + + core.Map toJson() => { + if (query != null) 'query': query!, + }; +} + /// Used by: /// /// - aiplatform:v1 : GoogleCloudAiplatformV1PairwiseQuestionAnsweringQualitySpec @@ -17260,8 +18116,8 @@ class $QuotaLimit { /// Specify the unit of the quota limit. /// - /// It uses the same syntax as Metric.unit. The supported unit kinds are - /// determined by the quota backend system. Here are some examples: * + /// It uses the same syntax as MetricDescriptor.unit. The supported unit kinds + /// are determined by the quota backend system. Here are some examples: * /// "1/min/{project}" for quota per minute per project. Note: the order of /// unit components is insignificant. The "1" at the beginning is required to /// follow the metric unit syntax. @@ -17547,48 +18403,6 @@ class $ReplaceAllTextResponse { }; } -/// Used by: -/// -/// - integrations:v1 : EnterpriseCrmFrontendsEventbusProtoEventExecutionInfoReplayInfo -/// - integrations:v1 : GoogleCloudIntegrationsV1alphaExecutionReplayInfo -class $ReplayInfo { - /// If this execution is a replay of another execution, then this field - /// contains the original execution id. - core.String? originalExecutionInfoId; - - /// reason for replay - core.String? replayReason; - - /// If this execution has been replayed, then this field contains the - /// execution ids of the replayed executions. - core.List? replayedExecutionInfoIds; - - $ReplayInfo({ - this.originalExecutionInfoId, - this.replayReason, - this.replayedExecutionInfoIds, - }); - - $ReplayInfo.fromJson(core.Map json_) - : this( - originalExecutionInfoId: - json_['originalExecutionInfoId'] as core.String?, - replayReason: json_['replayReason'] as core.String?, - replayedExecutionInfoIds: - (json_['replayedExecutionInfoIds'] as core.List?) - ?.map((value) => value as core.String) - .toList(), - ); - - core.Map toJson() => { - if (originalExecutionInfoId != null) - 'originalExecutionInfoId': originalExecutionInfoId!, - if (replayReason != null) 'replayReason': replayReason!, - if (replayedExecutionInfoIds != null) - 'replayedExecutionInfoIds': replayedExecutionInfoIds!, - }; -} - /// Used by: /// /// - compute:v1 : InstanceGroupManagersDeletePerInstanceConfigsReq @@ -18206,6 +19020,78 @@ class $ResponseMetadata { }; } +/// Used by: +/// +/// - metastore:v1 : RestoreServiceRequest +/// - metastore:v2 : GoogleCloudMetastoreV2RestoreServiceRequest +class $RestoreServiceRequest { + /// The relative resource name of the metastore service backup to restore + /// from, in the following + /// form:projects/{project_id}/locations/{location_id}/services/{service_id}/backups/{backup_id}. + /// + /// Mutually exclusive with backup_location, and exactly one of the two must + /// be set. + /// + /// Optional. + core.String? backup; + + /// A Cloud Storage URI specifying the location of the backup artifacts, + /// namely - backup avro files under "avro/", backup_metastore.json and + /// service.json, in the following form:gs://. + /// + /// Mutually exclusive with backup, and exactly one of the two must be set. + /// + /// Optional. + core.String? backupLocation; + + /// A request ID. + /// + /// Specify a unique request ID to allow the server to ignore the request if + /// it has completed. The server will ignore subsequent requests that provide + /// a duplicate request ID for at least 60 minutes after the first request.For + /// example, if an initial request times out, followed by another request with + /// the same request ID, the server ignores the second request to prevent the + /// creation of duplicate commitments.The request ID must be a valid UUID + /// (https://en.wikipedia.org/wiki/Universally_unique_identifier#Format). A + /// zero UUID (00000000-0000-0000-0000-000000000000) is not supported. + /// + /// Optional. + core.String? requestId; + + /// The type of restore. + /// + /// If unspecified, defaults to METADATA_ONLY. + /// + /// Optional. + /// Possible string values are: + /// - "RESTORE_TYPE_UNSPECIFIED" : The restore type is unknown. + /// - "FULL" : The service's metadata and configuration are restored. + /// - "METADATA_ONLY" : Only the service's metadata is restored. + core.String? restoreType; + + $RestoreServiceRequest({ + this.backup, + this.backupLocation, + this.requestId, + this.restoreType, + }); + + $RestoreServiceRequest.fromJson(core.Map json_) + : this( + backup: json_['backup'] as core.String?, + backupLocation: json_['backupLocation'] as core.String?, + requestId: json_['requestId'] as core.String?, + restoreType: json_['restoreType'] as core.String?, + ); + + core.Map toJson() => { + if (backup != null) 'backup': backup!, + if (backupLocation != null) 'backupLocation': backupLocation!, + if (requestId != null) 'requestId': requestId!, + if (restoreType != null) 'restoreType': restoreType!, + }; +} + /// Used by: /// /// - dataplex:v1 : GoogleCloudDataplexV1DataProfileResultPostScanActionsResultBigQueryExportResult @@ -18612,8 +19498,9 @@ class $SdfConfig { /// - "SDF_VERSION_7_1" : SDF version 7.1. Read the \[v7 migration /// guide\](/display-video/api/structured-data-file/v7-migration-guide) before /// migrating to this version. - /// - "SDF_VERSION_8" : SDF version 8. **This SDF version is in beta. It is - /// only available to a subset of users.** + /// - "SDF_VERSION_8" : SDF version 8. Read the \[v8 migration + /// guide\](/display-video/api/structured-data-file/v8-migration-guide) before + /// migrating to this version. core.String? version; $SdfConfig({ @@ -18916,37 +19803,6 @@ class $SensitiveCategoryTargetingOptionDetails { }; } -/// Used by: -/// -/// - language:v1 : Sentiment -/// - language:v2 : Sentiment -class $Sentiment { - /// A non-negative number in the \[0, +inf) range, which represents the - /// absolute magnitude of sentiment regardless of score (positive or - /// negative). - core.double? magnitude; - - /// Sentiment score between -1.0 (negative sentiment) and 1.0 (positive - /// sentiment). - core.double? score; - - $Sentiment({ - this.magnitude, - this.score, - }); - - $Sentiment.fromJson(core.Map json_) - : this( - magnitude: (json_['magnitude'] as core.num?)?.toDouble(), - score: (json_['score'] as core.num?)?.toDouble(), - ); - - core.Map toJson() => { - if (magnitude != null) 'magnitude': magnitude!, - if (score != null) 'score': score!, - }; -} - /// Used by: /// /// - cloudbuild:v2 : GoogleDevtoolsCloudbuildV2ServiceDirectoryConfig @@ -19490,11 +20346,53 @@ class $Shared10 { }; } +/// Used by: +/// +/// - datacatalog:v1 : GoogleCloudDatacatalogV1MigrationConfig +/// - datacatalog:v1 : GoogleCloudDatacatalogV1SetConfigRequest +class $Shared11 { + /// Opt-in status for the UI switch to Dataplex. + /// Possible string values are: + /// - "CATALOG_UI_EXPERIENCE_UNSPECIFIED" : Default value. The default UI is + /// Dataplex. + /// - "CATALOG_UI_EXPERIENCE_ENABLED" : The UI is Dataplex. + /// - "CATALOG_UI_EXPERIENCE_DISABLED" : The UI is Data Catalog. + core.String? catalogUiExperience; + + /// Opt-in status for the migration of Tag Templates to Dataplex. + /// Possible string values are: + /// - "TAG_TEMPLATE_MIGRATION_UNSPECIFIED" : Default value. Migration of Tag + /// Templates from Data Catalog to Dataplex is not performed. + /// - "TAG_TEMPLATE_MIGRATION_ENABLED" : Migration of Tag Templates from Data + /// Catalog to Dataplex is enabled. + /// - "TAG_TEMPLATE_MIGRATION_DISABLED" : Migration of Tag Templates from Data + /// Catalog to Dataplex is disabled. + core.String? tagTemplateMigration; + + $Shared11({ + this.catalogUiExperience, + this.tagTemplateMigration, + }); + + $Shared11.fromJson(core.Map json_) + : this( + catalogUiExperience: json_['catalogUiExperience'] as core.String?, + tagTemplateMigration: json_['tagTemplateMigration'] as core.String?, + ); + + core.Map toJson() => { + if (catalogUiExperience != null) + 'catalogUiExperience': catalogUiExperience!, + if (tagTemplateMigration != null) + 'tagTemplateMigration': tagTemplateMigration!, + }; +} + /// Used by: /// /// - dialogflow:v2 : GoogleCloudDialogflowV2Sentiment /// - dialogflow:v3 : GoogleCloudDialogflowCxV3SentimentAnalysisResult -class $Shared11 { +class $Shared12 { /// A non-negative number in the \[0, +inf) range, which represents the /// absolute magnitude of sentiment, regardless of score (positive or /// negative). @@ -19504,12 +20402,12 @@ class $Shared11 { /// sentiment). core.double? score; - $Shared11({ + $Shared12({ this.magnitude, this.score, }); - $Shared11.fromJson(core.Map json_) + $Shared12.fromJson(core.Map json_) : this( magnitude: (json_['magnitude'] as core.num?)?.toDouble(), score: (json_['score'] as core.num?)?.toDouble(), @@ -19525,7 +20423,7 @@ class $Shared11 { /// /// - osconfig:v1 : OSPolicyAssignmentInstanceFilterInventory /// - osconfig:v1 : OSPolicyInventoryFilter -class $Shared12 { +class $Shared13 { /// The OS short name /// /// Required. @@ -19539,12 +20437,12 @@ class $Shared12 { /// versions. core.String? osVersion; - $Shared12({ + $Shared13({ this.osShortName, this.osVersion, }); - $Shared12.fromJson(core.Map json_) + $Shared13.fromJson(core.Map json_) : this( osShortName: json_['osShortName'] as core.String?, osVersion: json_['osVersion'] as core.String?, @@ -19560,7 +20458,7 @@ class $Shared12 { /// /// - vmwareengine:v1 : Nsx /// - vmwareengine:v1 : Vcenter -class $Shared13 { +class $Shared14 { /// Fully qualified domain name of the appliance. core.String? fqdn; @@ -19580,14 +20478,14 @@ class $Shared13 { /// Version of the appliance. core.String? version; - $Shared13({ + $Shared14({ this.fqdn, this.internalIp, this.state, this.version, }); - $Shared13.fromJson(core.Map json_) + $Shared14.fromJson(core.Map json_) : this( fqdn: json_['fqdn'] as core.String?, internalIp: json_['internalIp'] as core.String?, @@ -19607,14 +20505,14 @@ class $Shared13 { /// /// - youtube:v3 : CommentSnippetAuthorChannelId /// - youtube:v3 : LanguageTag -class $Shared14 { +class $Shared15 { core.String? value; - $Shared14({ + $Shared15({ this.value, }); - $Shared14.fromJson(core.Map json_) + $Shared15.fromJson(core.Map json_) : this( value: json_['value'] as core.String?, ); @@ -20069,6 +20967,46 @@ class $Spec { }; } +/// Used by: +/// +/// - migrationcenter:v1 : MySqlProperty +/// - migrationcenter:v1 : PostgreSqlProperty +class $SqlProperty { + /// The property is enabled. + /// + /// Required. + core.bool? enabled; + + /// The property numeric value. + /// + /// Required. + core.String? numericValue; + + /// The property name. + /// + /// Required. + core.String? property; + + $SqlProperty({ + this.enabled, + this.numericValue, + this.property, + }); + + $SqlProperty.fromJson(core.Map json_) + : this( + enabled: json_['enabled'] as core.bool?, + numericValue: json_['numericValue'] as core.String?, + property: json_['property'] as core.String?, + ); + + core.Map toJson() => { + if (enabled != null) 'enabled': enabled!, + if (numericValue != null) 'numericValue': numericValue!, + if (property != null) 'property': property!, + }; +} + /// Used by: /// /// - workflowexecutions:v1 : StateError @@ -20290,6 +21228,7 @@ class $StaticRouteConfig { /// - managedidentities:v1 : Status /// - memcache:v1 : Status /// - metastore:v1 : Status +/// - metastore:v2 : GoogleRpcStatus /// - migrationcenter:v1 : Status /// - ml:v1 : GoogleRpc__Status /// - monitoring:v3 : Status @@ -20303,6 +21242,7 @@ class $StaticRouteConfig { /// - ondemandscanning:v1 : Status /// - oracledatabase:v1 : Status /// - osconfig:v1 : Status +/// - parallelstore:v1 : Status /// - people:v1 : Status /// - policysimulator:v1 : GoogleRpcStatus /// - policytroubleshooter:v1 : GoogleRpcStatus @@ -20316,6 +21256,7 @@ class $StaticRouteConfig { /// - run:v2 : GoogleRpcStatus /// - runtimeconfig:v1 : Status /// - securitycenter:v1 : Status +/// - securityposture:v1 : Status /// - serviceconsumermanagement:v1 : Status /// - servicecontrol:v1 : Status /// - servicecontrol:v2 : Status @@ -20723,7 +21664,6 @@ class $TenantProjectRequest { /// - cloudtasks:v2 : TestIamPermissionsRequest /// - config:v1 : TestIamPermissionsRequest /// - connectors:v1 : TestIamPermissionsRequest -/// - contactcenterinsights:v1 : GoogleIamV1TestIamPermissionsRequest /// - containeranalysis:v1 : TestIamPermissionsRequest /// - datacatalog:v1 : TestIamPermissionsRequest /// - datafusion:v1 : TestIamPermissionsRequest @@ -21035,6 +21975,7 @@ class $ThirdPartyVendorConfig { /// - clouddeploy:v1 : TimeOfDay /// - dlp:v2 : GoogleTypeTimeOfDay /// - gkebackup:v1 : TimeOfDay +/// - looker:v1 : TimeOfDay /// - memcache:v1 : TimeOfDay /// - monitoring:v3 : TimeOfDay /// - mybusinessbusinessinformation:v1 : TimeOfDay @@ -21095,7 +22036,6 @@ class $TimeOfDay00 { /// /// - jobs:v3 : TimeOfDay /// - jobs:v4 : TimeOfDay -/// - looker:v1 : TimeOfDay class $TimeOfDay01 { /// Hours of day in 24 hour format. /// @@ -21187,10 +22127,14 @@ class $TimeRange { /// - oracledatabase:v1 : TimeZone /// - osconfig:v1 : TimeZone class $TimeZone { - /// IANA Time Zone Database time zone, e.g. "America/New_York". + /// IANA Time Zone Database time zone. + /// + /// For example "America/New_York". core.String? id; - /// IANA Time Zone Database version number, e.g. "2019a". + /// IANA Time Zone Database version number. + /// + /// For example "2019a". /// /// Optional. core.String? version; @@ -21431,6 +22375,32 @@ class $TrackingFloodlightActivityConfig { }; } +/// Used by: +/// +/// - integrations:v1 : EnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables +/// - integrations:v1 : GoogleCloudIntegrationsV1alphaTriggerConfigVariables +class $TriggerConfigVariables { + /// List of variable names. + /// + /// Optional. + core.List? names; + + $TriggerConfigVariables({ + this.names, + }); + + $TriggerConfigVariables.fromJson(core.Map json_) + : this( + names: (json_['names'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (names != null) 'names': names!, + }; +} + /// Used by: /// /// - cloudtrace:v2 : TruncatableString @@ -21931,6 +22901,44 @@ class $VersionedPackage { }; } +/// Used by: +/// +/// - aiplatform:v1 : GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource +/// - aiplatform:v1 : GoogleCloudAiplatformV1VertexRagStoreRagResource +class $VertexRagStoreRagResource { + /// RagCorpora resource name. + /// + /// Format: `projects/{project}/locations/{location}/ragCorpora/{rag_corpus}` + /// + /// Optional. + core.String? ragCorpus; + + /// rag_file_id. + /// + /// The files should be in the same rag_corpus set in rag_corpus field. + /// + /// Optional. + core.List? ragFileIds; + + $VertexRagStoreRagResource({ + this.ragCorpus, + this.ragFileIds, + }); + + $VertexRagStoreRagResource.fromJson(core.Map json_) + : this( + ragCorpus: json_['ragCorpus'] as core.String?, + ragFileIds: (json_['ragFileIds'] as core.List?) + ?.map((value) => value as core.String) + .toList(), + ); + + core.Map toJson() => { + if (ragCorpus != null) 'ragCorpus': ragCorpus!, + if (ragFileIds != null) 'ragFileIds': ragFileIds!, + }; +} + /// Used by: /// /// - displayvideo:v2 : VideoAdSequenceStep diff --git a/generated/googleapis/lib/sheets/v4.dart b/generated/googleapis/lib/sheets/v4.dart index fd9344768..e9ea4f73b 100644 --- a/generated/googleapis/lib/sheets/v4.dart +++ b/generated/googleapis/lib/sheets/v4.dart @@ -11370,6 +11370,12 @@ class SetBasicFilterRequest { /// /// To clear validation in a range, call this with no rule specified. class SetDataValidationRequest { + /// If true, the data validation rule will be applied to the filtered rows as + /// well. + /// + /// Optional. + core.bool? filteredRowsIncluded; + /// The range the data validation rule should apply to. GridRange? range; @@ -11378,12 +11384,14 @@ class SetDataValidationRequest { DataValidationRule? rule; SetDataValidationRequest({ + this.filteredRowsIncluded, this.range, this.rule, }); SetDataValidationRequest.fromJson(core.Map json_) : this( + filteredRowsIncluded: json_['filteredRowsIncluded'] as core.bool?, range: json_.containsKey('range') ? GridRange.fromJson( json_['range'] as core.Map) @@ -11395,6 +11403,8 @@ class SetDataValidationRequest { ); core.Map toJson() => { + if (filteredRowsIncluded != null) + 'filteredRowsIncluded': filteredRowsIncluded!, if (range != null) 'range': range!, if (rule != null) 'rule': rule!, }; diff --git a/generated/googleapis/lib/solar/v1.dart b/generated/googleapis/lib/solar/v1.dart index cf5d97707..41d8a10de 100644 --- a/generated/googleapis/lib/solar/v1.dart +++ b/generated/googleapis/lib/solar/v1.dart @@ -328,7 +328,7 @@ class BuildingInsights { /// at 0.25 m/pixel. core.String? imageryQuality; - /// The resource name for the building, of the format `building/`. + /// The resource name for the building, of the format `buildings/{place_id}`. core.String? name; /// Postal code (e.g., US zip code) this building is contained by. diff --git a/generated/googleapis/lib/spanner/v1.dart b/generated/googleapis/lib/spanner/v1.dart index efde295ca..3d901a657 100644 --- a/generated/googleapis/lib/spanner/v1.dart +++ b/generated/googleapis/lib/spanner/v1.dart @@ -1567,15 +1567,16 @@ class ProjectsInstancesBackupsResource { /// [encryptionConfig_kmsKeyNames] - Optional. Specifies the KMS configuration /// for the one or more keys used to protect the backup. Values are of the /// form `projects//locations//keyRings//cryptoKeys/`. The keys referenced by - /// kms_key_names must fully cover all regions of the backup's instance - /// configuration. Some examples: * For single region instance configs, - /// specify a single regional location KMS key. * For multi-regional instance - /// configs of type GOOGLE_MANAGED, either specify a multi-regional location - /// KMS key or multiple regional location KMS keys that cover all regions in - /// the instance config. * For an instance config of type USER_MANAGED, please - /// specify only regional location KMS keys to cover each region in the - /// instance config. Multi-regional location KMS keys are not supported for - /// USER_MANAGED instance configs. + /// `kms_key_names` must fully cover all regions of the backup's instance + /// configuration. Some examples: * For regional (single-region) instance + /// configurations, specify a regional location KMS key. * For multi-region + /// instance configurations of type `GOOGLE_MANAGED`, either specify a + /// multi-region location KMS key or multiple regional location KMS keys that + /// cover all regions in the instance configuration. * For an instance + /// configuration of type `USER_MANAGED`, specify only regional location KMS + /// keys to cover each region in the instance configuration. Multi-region + /// location KMS keys aren't supported for `USER_MANAGED` type instance + /// configurations. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -2188,7 +2189,7 @@ class ProjectsInstancesDatabaseOperationsResource { /// value for filtering. The value must be a string, a number, or a boolean. /// The comparison operator must be one of: `<`, `>`, `<=`, `>=`, `!=`, `=`, /// or `:`. Colon `:` is the contains operator. Filter rules are not case - /// sensitive. The following fields in the Operation are eligible for + /// sensitive. The following fields in the operation are eligible for /// filtering: * `name` - The name of the long-running operation * `done` - /// False if the operation is in progress, else true. * `metadata.@type` - the /// type of metadata. For example, the type string for RestoreDatabaseMetadata @@ -4452,7 +4453,8 @@ class ProjectsInstancesInstancePartitionOperationsResource { /// [instancePartitionDeadline] - Optional. Deadline used while retrieving /// metadata for instance partition operations. Instance partitions whose /// operation metadata cannot be retrieved within this deadline will be added - /// to unreachable in ListInstancePartitionOperationsResponse. + /// to unreachable_instance_partitions in + /// ListInstancePartitionOperationsResponse. /// /// [pageSize] - Optional. Number of operations to be returned in the /// response. If 0 or less, defaults to the server's maximum allowed page @@ -5505,9 +5507,9 @@ class Backup { /// or more KMS keys. /// /// The information includes all Cloud KMS key versions used to encrypt the - /// backup. The \`encryption_status' field inside of each \`EncryptionInfo\` - /// is not populated. At least one of the key versions must be available for - /// the backup to be restored. If a key version is revoked in the middle of a + /// backup. The `encryption_status` field inside of each `EncryptionInfo` is + /// not populated. At least one of the key versions must be available for the + /// backup to be restored. If a key version is revoked in the middle of a /// restore, the restore behavior is undefined. /// /// Output only. @@ -5762,8 +5764,6 @@ class BackupInfo { /// BackupSchedule expresses the automated backup creation specification for a /// Spanner database. -/// -/// Next ID: 10 class BackupSchedule { /// The encryption configuration that will be used to encrypt the backup. /// @@ -6530,17 +6530,17 @@ class CopyBackupEncryptionConfig { /// Specifies the KMS configuration for the one or more keys used to protect /// the backup. /// - /// Values are of the form `projects//locations//keyRings//cryptoKeys/`. Kms - /// keys specified can be in any order. The keys referenced by kms_key_names + /// Values are of the form `projects//locations//keyRings//cryptoKeys/`. KMS + /// keys specified can be in any order. The keys referenced by `kms_key_names` /// must fully cover all regions of the backup's instance configuration. Some - /// examples: * For single region instance configs, specify a single regional - /// location KMS key. * For multi-regional instance configs of type - /// GOOGLE_MANAGED, either specify a multi-regional location KMS key or + /// examples: * For regional (single-region) instance configurations, specify + /// a regional location KMS key. * For multi-region instance configurations of + /// type `GOOGLE_MANAGED`, either specify a multi-region location KMS key or /// multiple regional location KMS keys that cover all regions in the instance - /// config. * For an instance config of type USER_MANAGED, please specify only - /// regional location KMS keys to cover each region in the instance config. - /// Multi-regional location KMS keys are not supported for USER_MANAGED - /// instance configs. + /// configuration. * For an instance configuration of type `USER_MANAGED`, + /// specify only regional location KMS keys to cover each region in the + /// instance configuration. Multi-region location KMS keys aren't supported + /// for `USER_MANAGED` type instance configurations. /// /// Optional. core.List? kmsKeyNames; @@ -6661,15 +6661,16 @@ class CreateBackupEncryptionConfig { /// the backup. /// /// Values are of the form `projects//locations//keyRings//cryptoKeys/`. The - /// keys referenced by kms_key_names must fully cover all regions of the - /// backup's instance configuration. Some examples: * For single region - /// instance configs, specify a single regional location KMS key. * For - /// multi-regional instance configs of type GOOGLE_MANAGED, either specify a - /// multi-regional location KMS key or multiple regional location KMS keys - /// that cover all regions in the instance config. * For an instance config of - /// type USER_MANAGED, please specify only regional location KMS keys to cover - /// each region in the instance config. Multi-regional location KMS keys are - /// not supported for USER_MANAGED instance configs. + /// keys referenced by `kms_key_names` must fully cover all regions of the + /// backup's instance configuration. Some examples: * For regional + /// (single-region) instance configurations, specify a regional location KMS + /// key. * For multi-region instance configurations of type `GOOGLE_MANAGED`, + /// either specify a multi-region location KMS key or multiple regional + /// location KMS keys that cover all regions in the instance configuration. * + /// For an instance configuration of type `USER_MANAGED`, specify only + /// regional location KMS keys to cover each region in the instance + /// configuration. Multi-region location KMS keys aren't supported for + /// `USER_MANAGED` type instance configurations. /// /// Optional. core.List? kmsKeyNames; @@ -6791,7 +6792,7 @@ class CreateDatabaseRequest { }; } -/// The request for CreateInstanceConfigRequest. +/// The request for CreateInstanceConfig. class CreateInstanceConfigRequest { /// The `InstanceConfig` proto of the configuration to create. /// @@ -7052,7 +7053,7 @@ class Database { /// contains the encryption information for the database, such as all Cloud /// KMS key versions that are in use. /// - /// The \`encryption_status' field inside of each \`EncryptionInfo\` is not + /// The `encryption_status` field inside of each `EncryptionInfo` is not /// populated. For databases that are using Google default or other types of /// encryption, this field is empty. This field is propagated lazily from the /// backend. There might be a delay from when a key version is being used and @@ -7407,20 +7408,20 @@ class EncryptionConfig { /// Values are of the form `projects//locations//keyRings//cryptoKeys/`. core.String? kmsKeyName; - /// Specifies the KMS configuration for the one or more keys used to encrypt - /// the database. + /// Specifies the KMS configuration for one or more keys used to encrypt the + /// database. /// /// Values are of the form `projects//locations//keyRings//cryptoKeys/`. The - /// keys referenced by kms_key_names must fully cover all regions of the - /// database instance configuration. Some examples: * For single region - /// database instance configs, specify a single regional location KMS key. * - /// For multi-regional database instance configs of type GOOGLE_MANAGED, - /// either specify a multi-regional location KMS key or multiple regional - /// location KMS keys that cover all regions in the instance config. * For a - /// database instance config of type USER_MANAGED, please specify only - /// regional location KMS keys to cover each region in the instance config. - /// Multi-regional location KMS keys are not supported for USER_MANAGED - /// instance configs. + /// keys referenced by `kms_key_names` must fully cover all regions of the + /// database's instance configuration. Some examples: * For regional + /// (single-region) instance configurations, specify a regional location KMS + /// key. * For multi-region instance configurations of type `GOOGLE_MANAGED`, + /// either specify a multi-region location KMS key or multiple regional + /// location KMS keys that cover all regions in the instance configuration. * + /// For an instance configuration of type `USER_MANAGED`, specify only + /// regional location KMS keys to cover each region in the instance + /// configuration. Multi-region location KMS keys aren't supported for + /// `USER_MANAGED` type instance configurations. core.List? kmsKeyNames; EncryptionConfig({ @@ -8185,6 +8186,27 @@ class Instance { /// Output only. core.String? createTime; + /// Controls the default backup behavior for new databases within the + /// instance. + /// + /// Note that `AUTOMATIC` is not permitted for free instances, as backups and + /// backup schedules are not allowed for free instances. In the `GetInstance` + /// or `ListInstances` response, if the value of default_backup_schedule_type + /// is unset or NONE, no default backup schedule will be created for new + /// databases within the instance. + /// + /// Optional. + /// Possible string values are: + /// - "DEFAULT_BACKUP_SCHEDULE_TYPE_UNSPECIFIED" : Not specified. + /// - "NONE" : No default backup schedule will be created automatically on + /// creation of a database within the instance. + /// - "AUTOMATIC" : A default backup schedule will be created automatically on + /// creation of a database within the instance. Once created, the default + /// backup schedule can be edited or deleted just like any other backup + /// schedule. Currently, the default backup schedule creates a full backup + /// every 24 hours and retains the backup for a period of 7 days. + core.String? defaultBackupScheduleType; + /// The descriptive name for this instance as it appears in UIs. /// /// Must be unique per project and between 4 and 30 characters in length. @@ -8258,10 +8280,8 @@ class Instance { /// the target number of nodes allocated to the instance. If autoscaling is /// enabled, `node_count` is treated as an `OUTPUT_ONLY` field and reflects /// the current number of nodes allocated to the instance. This might be zero - /// in API responses for instances that are not yet in the `READY` state. If - /// the instance has varying node count across replicas (achieved by setting - /// asymmetric_autoscaling_options in autoscaling config), the node_count here - /// is the maximum node count across all replicas. For more information, see + /// in API responses for instances that are not yet in the `READY` state. For + /// more information, see /// [Compute capacity, nodes, and processing units](https://cloud.google.com/spanner/docs/compute-capacity). core.int? nodeCount; @@ -8273,11 +8293,7 @@ class Instance { /// If autoscaling is enabled, `processing_units` is treated as an /// `OUTPUT_ONLY` field and reflects the current number of processing units /// allocated to the instance. This might be zero in API responses for - /// instances that are not yet in the `READY` state. If the instance has - /// varying processing units per replica (achieved by setting - /// asymmetric_autoscaling_options in autoscaling config), the - /// processing_units here is the maximum processing units across all replicas. - /// For more information, see + /// instances that are not yet in the `READY` state. For more information, see /// [Compute capacity, nodes and processing units](https://cloud.google.com/spanner/docs/compute-capacity). core.int? processingUnits; @@ -8313,6 +8329,7 @@ class Instance { this.autoscalingConfig, this.config, this.createTime, + this.defaultBackupScheduleType, this.displayName, this.edition, this.endpointUris, @@ -8335,6 +8352,8 @@ class Instance { : null, config: json_['config'] as core.String?, createTime: json_['createTime'] as core.String?, + defaultBackupScheduleType: + json_['defaultBackupScheduleType'] as core.String?, displayName: json_['displayName'] as core.String?, edition: json_['edition'] as core.String?, endpointUris: (json_['endpointUris'] as core.List?) @@ -8368,6 +8387,8 @@ class Instance { if (autoscalingConfig != null) 'autoscalingConfig': autoscalingConfig!, if (config != null) 'config': config!, if (createTime != null) 'createTime': createTime!, + if (defaultBackupScheduleType != null) + 'defaultBackupScheduleType': defaultBackupScheduleType!, if (displayName != null) 'displayName': displayName!, if (edition != null) 'edition': edition!, if (endpointUris != null) 'endpointUris': endpointUris!, @@ -11380,20 +11401,20 @@ class RestoreDatabaseEncryptionConfig { /// Optional. core.String? kmsKeyName; - /// Specifies the KMS configuration for the one or more keys used to encrypt - /// the database. + /// Specifies the KMS configuration for one or more keys used to encrypt the + /// database. /// /// Values have the form `projects//locations//keyRings//cryptoKeys/`. The - /// keys referenced by kms_key_names must fully cover all regions of the - /// database instance configuration. Some examples: * For single region - /// database instance configurations, specify a single regional location KMS - /// key. * For multi-regional database instance configurations of type - /// `GOOGLE_MANAGED`, either specify a multi-regional location KMS key or - /// multiple regional location KMS keys that cover all regions in the instance - /// configuration. * For a database instance configuration of type - /// `USER_MANAGED`, please specify only regional location KMS keys to cover - /// each region in the instance configuration. Multi-regional location KMS - /// keys are not supported for USER_MANAGED instance configurations. + /// keys referenced by `kms_key_names` must fully cover all regions of the + /// database's instance configuration. Some examples: * For regional + /// (single-region) instance configurations, specify a regional location KMS + /// key. * For multi-region instance configurations of type `GOOGLE_MANAGED`, + /// either specify a multi-region location KMS key or multiple regional + /// location KMS keys that cover all regions in the instance configuration. * + /// For an instance configuration of type `USER_MANAGED`, specify only + /// regional location KMS keys to cover each region in the instance + /// configuration. Multi-region location KMS keys aren't supported for + /// `USER_MANAGED` type instance configurations. /// /// Optional. core.List? kmsKeyNames; @@ -12614,11 +12635,11 @@ class UpdateDatabaseDdlRequest { /// Operation. Specifying an explicit operation ID simplifies determining /// whether the statements were executed in the event that the /// UpdateDatabaseDdl call is replayed, or the return value is otherwise lost: - /// the database and `operation_id` fields can be combined to form the name of - /// the resulting longrunning.Operation: `/operations/`. `operation_id` should - /// be unique within the database, and must be a valid identifier: `a-z*`. - /// Note that automatically-generated operation IDs always begin with an - /// underscore. If the named operation already exists, UpdateDatabaseDdl + /// the database and `operation_id` fields can be combined to form the `name` + /// of the resulting longrunning.Operation: `/operations/`. `operation_id` + /// should be unique within the database, and must be a valid identifier: + /// `a-z*`. Note that automatically-generated operation IDs always begin with + /// an underscore. If the named operation already exists, UpdateDatabaseDdl /// returns `ALREADY_EXISTS`. core.String? operationId; @@ -12671,7 +12692,7 @@ class UpdateDatabaseDdlRequest { }; } -/// The request for UpdateInstanceConfigRequest. +/// The request for UpdateInstanceConfig. class UpdateInstanceConfigRequest { /// The user instance configuration to update, which must always include the /// instance configuration name. diff --git a/generated/googleapis/lib/storage/v1.dart b/generated/googleapis/lib/storage/v1.dart index 93999d2e1..d333ac2ee 100644 --- a/generated/googleapis/lib/storage/v1.dart +++ b/generated/googleapis/lib/storage/v1.dart @@ -1272,37 +1272,46 @@ class BucketsResource { /// /// [generation] - Generation of a bucket. /// + /// [projection] - Set of properties to return. Defaults to full. + /// Possible string values are: + /// - "full" : Include all properties. + /// - "noAcl" : Omit owner, acl and defaultObjectAcl properties. + /// /// [userProject] - The project to be billed for this request. Required for /// Requester Pays buckets. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// + /// Completes with a [Bucket]. + /// /// Completes with a [commons.ApiRequestError] if the API endpoint returned an /// error. /// /// If the used [http.Client] completes with an error when making a REST call, /// this method will complete with the same error. - async.Future restore( + async.Future restore( core.String bucket, core.String generation, { + core.String? projection, core.String? userProject, core.String? $fields, }) async { final queryParams_ = >{ 'generation': [generation], + if (projection != null) 'projection': [projection], if (userProject != null) 'userProject': [userProject], if ($fields != null) 'fields': [$fields], }; final url_ = 'b/' + commons.escapeVariable('$bucket') + '/restore'; - await _requester.request( + final response_ = await _requester.request( url_, 'POST', queryParams: queryParams_, - downloadOptions: null, ); + return Bucket.fromJson(response_ as core.Map); } /// Updates an IAM policy for the specified bucket. @@ -3847,6 +3856,134 @@ class ObjectsResource { return Objects.fromJson(response_ as core.Map); } + /// Moves the source object to the destination object in the same bucket. + /// + /// Request parameters: + /// + /// [bucket] - Name of the bucket in which the object resides. + /// + /// [sourceObject] - Name of the source object. For information about how to + /// URL encode object names to be path safe, see + /// [Encoding URI Path Parts](https://cloud.google.com/storage/docs/request-endpoints#encoding). + /// + /// [destinationObject] - Name of the destination object. For information + /// about how to URL encode object names to be path safe, see + /// [Encoding URI Path Parts](https://cloud.google.com/storage/docs/request-endpoints#encoding). + /// + /// [ifGenerationMatch] - Makes the operation conditional on whether the + /// destination object's current generation matches the given value. Setting + /// to 0 makes the operation succeed only if there are no live versions of the + /// object. `ifGenerationMatch` and `ifGenerationNotMatch` conditions are + /// mutually exclusive: it's an error for both of them to be set in the + /// request. + /// + /// [ifGenerationNotMatch] - Makes the operation conditional on whether the + /// destination object's current generation does not match the given value. If + /// no live object exists, the precondition fails. Setting to 0 makes the + /// operation succeed only if there is a live version of the + /// object.`ifGenerationMatch` and `ifGenerationNotMatch` conditions are + /// mutually exclusive: it's an error for both of them to be set in the + /// request. + /// + /// [ifMetagenerationMatch] - Makes the operation conditional on whether the + /// destination object's current metageneration matches the given value. + /// `ifMetagenerationMatch` and `ifMetagenerationNotMatch` conditions are + /// mutually exclusive: it's an error for both of them to be set in the + /// request. + /// + /// [ifMetagenerationNotMatch] - Makes the operation conditional on whether + /// the destination object's current metageneration does not match the given + /// value. `ifMetagenerationMatch` and `ifMetagenerationNotMatch` conditions + /// are mutually exclusive: it's an error for both of them to be set in the + /// request. + /// + /// [ifSourceGenerationMatch] - Makes the operation conditional on whether the + /// source object's current generation matches the given value. + /// `ifSourceGenerationMatch` and `ifSourceGenerationNotMatch` conditions are + /// mutually exclusive: it's an error for both of them to be set in the + /// request. + /// + /// [ifSourceGenerationNotMatch] - Makes the operation conditional on whether + /// the source object's current generation does not match the given value. + /// `ifSourceGenerationMatch` and `ifSourceGenerationNotMatch` conditions are + /// mutually exclusive: it's an error for both of them to be set in the + /// request. + /// + /// [ifSourceMetagenerationMatch] - Makes the operation conditional on whether + /// the source object's current metageneration matches the given value. + /// `ifSourceMetagenerationMatch` and `ifSourceMetagenerationNotMatch` + /// conditions are mutually exclusive: it's an error for both of them to be + /// set in the request. + /// + /// [ifSourceMetagenerationNotMatch] - Makes the operation conditional on + /// whether the source object's current metageneration does not match the + /// given value. `ifSourceMetagenerationMatch` and + /// `ifSourceMetagenerationNotMatch` conditions are mutually exclusive: it's + /// an error for both of them to be set in the request. + /// + /// [userProject] - The project to be billed for this request. Required for + /// Requester Pays buckets. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Object]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future move( + core.String bucket, + core.String sourceObject, + core.String destinationObject, { + core.String? ifGenerationMatch, + core.String? ifGenerationNotMatch, + core.String? ifMetagenerationMatch, + core.String? ifMetagenerationNotMatch, + core.String? ifSourceGenerationMatch, + core.String? ifSourceGenerationNotMatch, + core.String? ifSourceMetagenerationMatch, + core.String? ifSourceMetagenerationNotMatch, + core.String? userProject, + core.String? $fields, + }) async { + final queryParams_ = >{ + if (ifGenerationMatch != null) 'ifGenerationMatch': [ifGenerationMatch], + if (ifGenerationNotMatch != null) + 'ifGenerationNotMatch': [ifGenerationNotMatch], + if (ifMetagenerationMatch != null) + 'ifMetagenerationMatch': [ifMetagenerationMatch], + if (ifMetagenerationNotMatch != null) + 'ifMetagenerationNotMatch': [ifMetagenerationNotMatch], + if (ifSourceGenerationMatch != null) + 'ifSourceGenerationMatch': [ifSourceGenerationMatch], + if (ifSourceGenerationNotMatch != null) + 'ifSourceGenerationNotMatch': [ifSourceGenerationNotMatch], + if (ifSourceMetagenerationMatch != null) + 'ifSourceMetagenerationMatch': [ifSourceMetagenerationMatch], + if (ifSourceMetagenerationNotMatch != null) + 'ifSourceMetagenerationNotMatch': [ifSourceMetagenerationNotMatch], + if (userProject != null) 'userProject': [userProject], + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'b/' + + commons.escapeVariable('$bucket') + + '/o/' + + commons.escapeVariable('$sourceObject') + + '/moveTo/o/' + + commons.escapeVariable('$destinationObject'); + + final response_ = await _requester.request( + url_, + 'POST', + queryParams: queryParams_, + ); + return Object.fromJson(response_ as core.Map); + } + /// Patches an object's metadata. /// /// [request] - The metadata request object. @@ -8042,6 +8179,9 @@ class Object { /// deleted. core.DateTime? timeDeleted; + /// The time when the object was finalized. + core.DateTime? timeFinalized; + /// The time at which the object's storage class was last changed. /// /// When the object is initially created, it will be set to timeCreated. @@ -8091,6 +8231,7 @@ class Object { this.temporaryHold, this.timeCreated, this.timeDeleted, + this.timeFinalized, this.timeStorageClassUpdated, this.updated, }); @@ -8162,6 +8303,9 @@ class Object { timeDeleted: json_.containsKey('timeDeleted') ? core.DateTime.parse(json_['timeDeleted'] as core.String) : null, + timeFinalized: json_.containsKey('timeFinalized') + ? core.DateTime.parse(json_['timeFinalized'] as core.String) + : null, timeStorageClassUpdated: json_.containsKey('timeStorageClassUpdated') ? core.DateTime.parse( json_['timeStorageClassUpdated'] as core.String) @@ -8215,6 +8359,8 @@ class Object { 'timeCreated': timeCreated!.toUtc().toIso8601String(), if (timeDeleted != null) 'timeDeleted': timeDeleted!.toUtc().toIso8601String(), + if (timeFinalized != null) + 'timeFinalized': timeFinalized!.toUtc().toIso8601String(), if (timeStorageClassUpdated != null) 'timeStorageClassUpdated': timeStorageClassUpdated!.toUtc().toIso8601String(), diff --git a/generated/googleapis/lib/sts/v1.dart b/generated/googleapis/lib/sts/v1.dart index 070b757c2..f73cae6e5 100644 --- a/generated/googleapis/lib/sts/v1.dart +++ b/generated/googleapis/lib/sts/v1.dart @@ -302,9 +302,10 @@ class GoogleIdentityStsV1ExchangeTokenResponse { /// The amount of time, in seconds, between the time when the access token was /// issued and the time when the access token will expire. /// - /// This field is absent when the `subject_token` in the request is a - /// Google-issued, short-lived access token. In this case, the access token - /// has the same expiration time as the `subject_token`. + /// This field is absent when the `subject_token` in the request is a a + /// short-lived access token for a Cloud Identity or Google Workspace user + /// account. In this case, the access token has the same expiration time as + /// the `subject_token`. core.int? expiresIn; /// The token type. diff --git a/generated/googleapis/lib/tagmanager/v2.dart b/generated/googleapis/lib/tagmanager/v2.dart index e0d081f9c..1a119f5ef 100644 --- a/generated/googleapis/lib/tagmanager/v2.dart +++ b/generated/googleapis/lib/tagmanager/v2.dart @@ -6366,6 +6366,11 @@ class GalleryReference { /// gallery. core.String? signature; + /// The developer id of the community gallery template. + /// + /// This value is set whenever the template is created from the gallery. + core.String? templateDeveloperId; + /// The version of the community gallery template. core.String? version; @@ -6375,6 +6380,7 @@ class GalleryReference { this.owner, this.repository, this.signature, + this.templateDeveloperId, this.version, }); @@ -6385,6 +6391,7 @@ class GalleryReference { owner: json_['owner'] as core.String?, repository: json_['repository'] as core.String?, signature: json_['signature'] as core.String?, + templateDeveloperId: json_['templateDeveloperId'] as core.String?, version: json_['version'] as core.String?, ); @@ -6394,6 +6401,8 @@ class GalleryReference { if (owner != null) 'owner': owner!, if (repository != null) 'repository': repository!, if (signature != null) 'signature': signature!, + if (templateDeveloperId != null) + 'templateDeveloperId': templateDeveloperId!, if (version != null) 'version': version!, }; } diff --git a/generated/googleapis/lib/testing/v1.dart b/generated/googleapis/lib/testing/v1.dart index 83c1d2e85..966c847ad 100644 --- a/generated/googleapis/lib/testing/v1.dart +++ b/generated/googleapis/lib/testing/v1.dart @@ -1448,9 +1448,10 @@ class ApkManifest { /// Feature usage tags defined in the manifest. core.List? usesFeature; + core.List? usesPermission; /// Permissions declared to be used by the application - core.List? usesPermission; + core.List? usesPermissionTags; /// Version number used internally by the app. core.String? versionCode; @@ -1469,6 +1470,7 @@ class ApkManifest { this.targetSdkVersion, this.usesFeature, this.usesPermission, + this.usesPermissionTags, this.versionCode, this.versionName, }); @@ -1499,6 +1501,10 @@ class ApkManifest { usesPermission: (json_['usesPermission'] as core.List?) ?.map((value) => value as core.String) .toList(), + usesPermissionTags: (json_['usesPermissionTags'] as core.List?) + ?.map((value) => UsesPermissionTag.fromJson( + value as core.Map)) + .toList(), versionCode: json_['versionCode'] as core.String?, versionName: json_['versionName'] as core.String?, ); @@ -1514,6 +1520,8 @@ class ApkManifest { if (targetSdkVersion != null) 'targetSdkVersion': targetSdkVersion!, if (usesFeature != null) 'usesFeature': usesFeature!, if (usesPermission != null) 'usesPermission': usesPermission!, + if (usesPermissionTags != null) + 'usesPermissionTags': usesPermissionTags!, if (versionCode != null) 'versionCode': versionCode!, if (versionName != null) 'versionName': versionName!, }; @@ -4724,6 +4732,33 @@ class UsesFeature { }; } +/// The tag within a manifest. +/// +/// https://developer.android.com/guide/topics/manifest/uses-permission-element.html +class UsesPermissionTag { + /// The android:name value + core.int? maxSdkVersion; + + /// The android:name value + core.String? name; + + UsesPermissionTag({ + this.maxSdkVersion, + this.name, + }); + + UsesPermissionTag.fromJson(core.Map json_) + : this( + maxSdkVersion: json_['maxSdkVersion'] as core.int?, + name: json_['name'] as core.String?, + ); + + core.Map toJson() => { + if (maxSdkVersion != null) 'maxSdkVersion': maxSdkVersion!, + if (name != null) 'name': name!, + }; +} + /// An Xcode version that an iOS version is compatible with. class XcodeVersion { /// Tags for this Xcode version. diff --git a/generated/googleapis/lib/texttospeech/v1.dart b/generated/googleapis/lib/texttospeech/v1.dart index de478b2ad..aeb1f65a1 100644 --- a/generated/googleapis/lib/texttospeech/v1.dart +++ b/generated/googleapis/lib/texttospeech/v1.dart @@ -77,8 +77,8 @@ class OperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -408,7 +408,7 @@ class VoicesResource { /// Used for advanced voice options. class AdvancedVoiceOptions { - /// Only for Jounrney voices. + /// Only for Journey voices. /// /// If false, the synthesis will be context aware and have higher latency. core.bool? lowLatencyJourneySynthesis; @@ -703,6 +703,30 @@ class ListVoicesResponse { }; } +/// A collection of turns for multi-speaker synthesis. +class MultiSpeakerMarkup { + /// Speaker turns. + /// + /// Required. + core.List? turns; + + MultiSpeakerMarkup({ + this.turns, + }); + + MultiSpeakerMarkup.fromJson(core.Map json_) + : this( + turns: (json_['turns'] as core.List?) + ?.map((value) => + Turn.fromJson(value as core.Map)) + .toList(), + ); + + core.Map toJson() => { + if (turns != null) 'turns': turns!, + }; +} + /// This resource represents a long-running operation that is the result of a /// network API call. class Operation { @@ -806,6 +830,11 @@ class SynthesisInput { /// Optional. CustomPronunciations? customPronunciations; + /// The multi-speaker input to be synthesized. + /// + /// Only applicable for multi-speaker synthesis. + MultiSpeakerMarkup? multiSpeakerMarkup; + /// The SSML document to be synthesized. /// /// The SSML document must be valid and well-formed. Otherwise the RPC will @@ -818,6 +847,7 @@ class SynthesisInput { SynthesisInput({ this.customPronunciations, + this.multiSpeakerMarkup, this.ssml, this.text, }); @@ -828,6 +858,10 @@ class SynthesisInput { ? CustomPronunciations.fromJson(json_['customPronunciations'] as core.Map) : null, + multiSpeakerMarkup: json_.containsKey('multiSpeakerMarkup') + ? MultiSpeakerMarkup.fromJson(json_['multiSpeakerMarkup'] + as core.Map) + : null, ssml: json_['ssml'] as core.String?, text: json_['text'] as core.String?, ); @@ -835,6 +869,8 @@ class SynthesisInput { core.Map toJson() => { if (customPronunciations != null) 'customPronunciations': customPronunciations!, + if (multiSpeakerMarkup != null) + 'multiSpeakerMarkup': multiSpeakerMarkup!, if (ssml != null) 'ssml': ssml!, if (text != null) 'text': text!, }; @@ -900,7 +936,7 @@ class SynthesizeLongAudioRequest { /// The top-level message sent by the client for the `SynthesizeSpeech` method. class SynthesizeSpeechRequest { - /// Adnanced voice options. + /// Advanced voice options. AdvancedVoiceOptions? advancedVoiceOptions; /// The configuration of the synthesized audio. @@ -985,6 +1021,37 @@ class SynthesizeSpeechResponse { }; } +/// A Multi-speaker turn. +class Turn { + /// The speaker of the turn, for example, 'O' or 'Q'. + /// + /// Please refer to documentation for available speakers. + /// + /// Required. + core.String? speaker; + + /// The text to speak. + /// + /// Required. + core.String? text; + + Turn({ + this.speaker, + this.text, + }); + + Turn.fromJson(core.Map json_) + : this( + speaker: json_['speaker'] as core.String?, + text: json_['text'] as core.String?, + ); + + core.Map toJson() => { + if (speaker != null) 'speaker': speaker!, + if (text != null) 'text': text!, + }; +} + /// Description of a voice supported by the TTS service. class Voice { /// The languages that this voice supports, expressed as @@ -1038,6 +1105,27 @@ class Voice { }; } +/// The configuration of Voice Clone feature. +class VoiceCloneParams { + /// Created by GenerateVoiceCloningKey. + /// + /// Required. + core.String? voiceCloningKey; + + VoiceCloneParams({ + this.voiceCloningKey, + }); + + VoiceCloneParams.fromJson(core.Map json_) + : this( + voiceCloningKey: json_['voiceCloningKey'] as core.String?, + ); + + core.Map toJson() => { + if (voiceCloningKey != null) 'voiceCloningKey': voiceCloningKey!, + }; +} + /// Description of which voice to use for a synthesis request. class VoiceSelectionParams { /// The configuration for a custom voice. @@ -1086,11 +1174,20 @@ class VoiceSelectionParams { /// - "NEUTRAL" : A gender-neutral voice. This voice is not yet supported. core.String? ssmlGender; + /// The configuration for a voice clone. + /// + /// If \[VoiceCloneParams.voice_clone_key\] is set, the service will choose + /// the voice clone matching the specified configuration. + /// + /// Optional. + VoiceCloneParams? voiceClone; + VoiceSelectionParams({ this.customVoice, this.languageCode, this.name, this.ssmlGender, + this.voiceClone, }); VoiceSelectionParams.fromJson(core.Map json_) @@ -1102,6 +1199,10 @@ class VoiceSelectionParams { languageCode: json_['languageCode'] as core.String?, name: json_['name'] as core.String?, ssmlGender: json_['ssmlGender'] as core.String?, + voiceClone: json_.containsKey('voiceClone') + ? VoiceCloneParams.fromJson( + json_['voiceClone'] as core.Map) + : null, ); core.Map toJson() => { @@ -1109,5 +1210,6 @@ class VoiceSelectionParams { if (languageCode != null) 'languageCode': languageCode!, if (name != null) 'name': name!, if (ssmlGender != null) 'ssmlGender': ssmlGender!, + if (voiceClone != null) 'voiceClone': voiceClone!, }; } diff --git a/generated/googleapis/lib/tpu/v1.dart b/generated/googleapis/lib/tpu/v1.dart index 11856de20..58533632b 100644 --- a/generated/googleapis/lib/tpu/v1.dart +++ b/generated/googleapis/lib/tpu/v1.dart @@ -572,8 +572,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// diff --git a/generated/googleapis/lib/tpu/v2.dart b/generated/googleapis/lib/tpu/v2.dart index 6c7de32bb..4678cf895 100644 --- a/generated/googleapis/lib/tpu/v2.dart +++ b/generated/googleapis/lib/tpu/v2.dart @@ -667,8 +667,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// @@ -1175,6 +1175,7 @@ class AcceleratorConfig { /// - "V4" : TPU v4. /// - "V5LITE_POD" : TPU v5lite pod. /// - "V5P" : TPU v5p + /// - "V6E" : TPU v6e core.String? type; AcceleratorConfig({ diff --git a/generated/googleapis/lib/travelimpactmodel/v1.dart b/generated/googleapis/lib/travelimpactmodel/v1.dart index 9c51707be..1c9d5e7ca 100644 --- a/generated/googleapis/lib/travelimpactmodel/v1.dart +++ b/generated/googleapis/lib/travelimpactmodel/v1.dart @@ -288,9 +288,8 @@ class FlightWithEmissions { /// /// Will not be present if emissions could not be computed. For the list of /// reasons why emissions could not be computed, see ComputeFlightEmissions. - /// Note this field is currently equivalent to ttw_emissions_grams_per_pax - /// until TIM version 1.X.0 which will update this to be total wtw emissions - /// aka wtt_emissions_grams_per_pax + ttw_emissions_grams_per_pax. + /// This field uses wtw emissions aka ttw_emissions_grams_per_pax + + /// wtt_emissions_grams_per_pax. /// /// Optional. EmissionsGramsPerPax? emissionsGramsPerPax; diff --git a/generated/googleapis/lib/vault/v1.dart b/generated/googleapis/lib/vault/v1.dart index b5e372624..def2b9853 100644 --- a/generated/googleapis/lib/vault/v1.dart +++ b/generated/googleapis/lib/vault/v1.dart @@ -1417,8 +1417,8 @@ class OperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// diff --git a/generated/googleapis/lib/verifiedaccess/v2.dart b/generated/googleapis/lib/verifiedaccess/v2.dart index 4bdb321ff..1c42bad4b 100644 --- a/generated/googleapis/lib/verifiedaccess/v2.dart +++ b/generated/googleapis/lib/verifiedaccess/v2.dart @@ -277,8 +277,8 @@ class DeviceSignals { /// The state of the OS level firewall. /// /// On ChromeOS, the value will always be ENABLED on regular devices and - /// UNKNOWN on devices in developer mode. The signal is currently not - /// available on MacOS 15 (Sequoia) and later. + /// UNKNOWN on devices in developer mode. Support for MacOS 15 (Sequoia) and + /// later has been introduced in Chrome M131. /// Possible string values are: /// - "OS_FIREWALL_UNSPECIFIED" : Unspecified. /// - "OS_FIREWALL_UNKNOWN" : Chrome could not evaluate the OS firewall state. diff --git a/generated/googleapis/lib/versionhistory/v1.dart b/generated/googleapis/lib/versionhistory/v1.dart index 75b8af32e..8eaaaff8b 100644 --- a/generated/googleapis/lib/versionhistory/v1.dart +++ b/generated/googleapis/lib/versionhistory/v1.dart @@ -17,7 +17,7 @@ /// Version History API - Prod /// /// For more information, see -/// +/// /// /// Create an instance of [VersionHistoryApi] to access these resources: /// diff --git a/generated/googleapis/lib/vision/v1.dart b/generated/googleapis/lib/vision/v1.dart index d1d9525d9..be6881474 100644 --- a/generated/googleapis/lib/vision/v1.dart +++ b/generated/googleapis/lib/vision/v1.dart @@ -327,8 +327,8 @@ class OperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// diff --git a/generated/googleapis/lib/vmmigration/v1.dart b/generated/googleapis/lib/vmmigration/v1.dart index dab6d6cbc..1a758278e 100644 --- a/generated/googleapis/lib/vmmigration/v1.dart +++ b/generated/googleapis/lib/vmmigration/v1.dart @@ -914,8 +914,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// diff --git a/generated/googleapis/lib/vmwareengine/v1.dart b/generated/googleapis/lib/vmwareengine/v1.dart index aac04a97a..66f0bdf25 100644 --- a/generated/googleapis/lib/vmwareengine/v1.dart +++ b/generated/googleapis/lib/vmwareengine/v1.dart @@ -8094,7 +8094,7 @@ class NodeTypeConfig { } /// Details about a NSX Manager appliance. -typedef Nsx = $Shared13; +typedef Nsx = $Shared14; /// This resource represents a long-running operation that is the result of a /// network API call. @@ -9132,7 +9132,7 @@ class UndeletePrivateCloudRequest { } /// Details about a vCenter Server management appliance. -typedef Vcenter = $Shared13; +typedef Vcenter = $Shared14; /// VMware Engine network resource that provides connectivity for VMware Engine /// private clouds. diff --git a/generated/googleapis/lib/walletobjects/v1.dart b/generated/googleapis/lib/walletobjects/v1.dart index fd68ebe8c..8264427ac 100644 --- a/generated/googleapis/lib/walletobjects/v1.dart +++ b/generated/googleapis/lib/walletobjects/v1.dart @@ -4622,6 +4622,11 @@ class AppLinkData { /// Optional information about the partner app link. AppLinkDataAppLinkInfo? androidAppLinkInfo; + /// Optional display text for the app link button. + /// + /// Character limit is 30. + LocalizedString? displayText; + /// Links to open iOS apps are not supported. /// /// Deprecated. @@ -4635,6 +4640,7 @@ class AppLinkData { AppLinkData({ this.androidAppLinkInfo, + this.displayText, this.iosAppLinkInfo, this.webAppLinkInfo, }); @@ -4645,6 +4651,10 @@ class AppLinkData { ? AppLinkDataAppLinkInfo.fromJson(json_['androidAppLinkInfo'] as core.Map) : null, + displayText: json_.containsKey('displayText') + ? LocalizedString.fromJson( + json_['displayText'] as core.Map) + : null, iosAppLinkInfo: json_.containsKey('iosAppLinkInfo') ? AppLinkDataAppLinkInfo.fromJson(json_['iosAppLinkInfo'] as core.Map) @@ -4658,6 +4668,7 @@ class AppLinkData { core.Map toJson() => { if (androidAppLinkInfo != null) 'androidAppLinkInfo': androidAppLinkInfo!, + if (displayText != null) 'displayText': displayText!, if (iosAppLinkInfo != null) 'iosAppLinkInfo': iosAppLinkInfo!, if (webAppLinkInfo != null) 'webAppLinkInfo': webAppLinkInfo!, }; @@ -5840,7 +5851,7 @@ class DiffUploadResponse { typedef DiffVersionResponse = $DiffVersionResponse; /// Information about how a class may be discovered and instantiated from within -/// the Android Pay app. +/// the Google Wallet app. /// /// This is done by searching for a loyalty or gift card program and scanning or /// manually entering. @@ -6439,6 +6450,14 @@ class EventTicketClass { /// This image is displayed in the card detail view of the app. Image? logo; + /// Merchant locations. + /// + /// There is a maximum of ten on the class. Any additional MerchantLocations + /// added beyond the 10 will be rejected. These locations will trigger a + /// notification when a user enters within a Google-set radius of the point. + /// This field replaces the deprecated LatLongPoints. + core.List? merchantLocations; + /// An array of messages displayed in the app. /// /// All users of this object will receive its associated messages. The maximum @@ -6643,6 +6662,7 @@ class EventTicketClass { this.localizedIssuerName, this.locations, this.logo, + this.merchantLocations, this.messages, this.multipleDevicesAndHoldersAllowedStatus, this.notifyPreference, @@ -6752,6 +6772,10 @@ class EventTicketClass { ? Image.fromJson( json_['logo'] as core.Map) : null, + merchantLocations: (json_['merchantLocations'] as core.List?) + ?.map((value) => MerchantLocation.fromJson( + value as core.Map)) + .toList(), messages: (json_['messages'] as core.List?) ?.map((value) => Message.fromJson( value as core.Map)) @@ -6834,6 +6858,7 @@ class EventTicketClass { 'localizedIssuerName': localizedIssuerName!, if (locations != null) 'locations': locations!, if (logo != null) 'logo': logo!, + if (merchantLocations != null) 'merchantLocations': merchantLocations!, if (messages != null) 'messages': messages!, if (multipleDevicesAndHoldersAllowedStatus != null) 'multipleDevicesAndHoldersAllowedStatus': @@ -7039,6 +7064,14 @@ class EventTicketObject { ) core.List? locations; + /// Merchant locations. + /// + /// There is a maximum of ten on the object. Any additional MerchantLocations + /// added beyond the 10 will be rejected. These locations will trigger a + /// notification when a user enters within a Google-set radius of the point. + /// This field replaces the deprecated LatLongPoints. + core.List? merchantLocations; + /// An array of messages displayed in the app. /// /// All users of this object will receive its associated messages. The maximum @@ -7171,6 +7204,7 @@ class EventTicketObject { this.linkedOfferIds, this.linksModuleData, this.locations, + this.merchantLocations, this.messages, this.notifyPreference, this.passConstraints, @@ -7245,6 +7279,10 @@ class EventTicketObject { ?.map((value) => LatLongPoint.fromJson( value as core.Map)) .toList(), + merchantLocations: (json_['merchantLocations'] as core.List?) + ?.map((value) => MerchantLocation.fromJson( + value as core.Map)) + .toList(), messages: (json_['messages'] as core.List?) ?.map((value) => Message.fromJson( value as core.Map)) @@ -7316,6 +7354,7 @@ class EventTicketObject { if (linkedOfferIds != null) 'linkedOfferIds': linkedOfferIds!, if (linksModuleData != null) 'linksModuleData': linksModuleData!, if (locations != null) 'locations': locations!, + if (merchantLocations != null) 'merchantLocations': merchantLocations!, if (messages != null) 'messages': messages!, if (notifyPreference != null) 'notifyPreference': notifyPreference!, if (passConstraints != null) 'passConstraints': passConstraints!, @@ -7895,6 +7934,14 @@ class FlightClass { ) core.List? locations; + /// Merchant locations. + /// + /// There is a maximum of ten on the class. Any additional MerchantLocations + /// added beyond the 10 will be rejected by the validator. These locations + /// will trigger a notification when a user enters within a Google-set radius + /// of the point. This field replaces the deprecated LatLongPoints. + core.List? merchantLocations; + /// An array of messages displayed in the app. /// /// All users of this object will receive its associated messages. The maximum @@ -8050,6 +8097,7 @@ class FlightClass { this.localScheduledDepartureDateTime, this.localizedIssuerName, this.locations, + this.merchantLocations, this.messages, this.multipleDevicesAndHoldersAllowedStatus, this.notifyPreference, @@ -8142,6 +8190,10 @@ class FlightClass { ?.map((value) => LatLongPoint.fromJson( value as core.Map)) .toList(), + merchantLocations: (json_['merchantLocations'] as core.List?) + ?.map((value) => MerchantLocation.fromJson( + value as core.Map)) + .toList(), messages: (json_['messages'] as core.List?) ?.map((value) => Message.fromJson( value as core.Map)) @@ -8222,6 +8274,7 @@ class FlightClass { if (localizedIssuerName != null) 'localizedIssuerName': localizedIssuerName!, if (locations != null) 'locations': locations!, + if (merchantLocations != null) 'merchantLocations': merchantLocations!, if (messages != null) 'messages': messages!, if (multipleDevicesAndHoldersAllowedStatus != null) 'multipleDevicesAndHoldersAllowedStatus': @@ -8488,6 +8541,14 @@ class FlightObject { ) core.List? locations; + /// Merchant locations. + /// + /// There is a maximum of ten on the object. Any additional MerchantLocations + /// added beyond the 10 will be rejected. These locations will trigger a + /// notification when a user enters within a Google-set radius of the point. + /// This field replaces the deprecated LatLongPoints. + core.List? merchantLocations; + /// An array of messages displayed in the app. /// /// All users of this object will receive its associated messages. The maximum @@ -8609,6 +8670,7 @@ class FlightObject { this.linkedObjectIds, this.linksModuleData, this.locations, + this.merchantLocations, this.messages, this.notifyPreference, this.passConstraints, @@ -8678,6 +8740,10 @@ class FlightObject { ?.map((value) => LatLongPoint.fromJson( value as core.Map)) .toList(), + merchantLocations: (json_['merchantLocations'] as core.List?) + ?.map((value) => MerchantLocation.fromJson( + value as core.Map)) + .toList(), messages: (json_['messages'] as core.List?) ?.map((value) => Message.fromJson( value as core.Map)) @@ -8744,6 +8810,7 @@ class FlightObject { if (linkedObjectIds != null) 'linkedObjectIds': linkedObjectIds!, if (linksModuleData != null) 'linksModuleData': linksModuleData!, if (locations != null) 'locations': locations!, + if (merchantLocations != null) 'merchantLocations': merchantLocations!, if (messages != null) 'messages': messages!, if (notifyPreference != null) 'notifyPreference': notifyPreference!, if (passConstraints != null) 'passConstraints': passConstraints!, @@ -8909,6 +8976,14 @@ class GenericClass { /// and 10 from object. LinksModuleData? linksModuleData; + /// Merchant locations. + /// + /// There is a maximum of ten on the class. Any additional MerchantLocations + /// added beyond the 10 will be rejected. These locations will trigger a + /// notification when a user enters within a Google-set radius of the point. + /// This field replaces the deprecated LatLongPoints. + core.List? merchantLocations; + /// An array of messages displayed in the app. /// /// All users of this object will receive its associated messages. The maximum @@ -8983,6 +9058,7 @@ class GenericClass { this.id, this.imageModulesData, this.linksModuleData, + this.merchantLocations, this.messages, this.multipleDevicesAndHoldersAllowedStatus, this.redemptionIssuers, @@ -9016,6 +9092,10 @@ class GenericClass { ? LinksModuleData.fromJson(json_['linksModuleData'] as core.Map) : null, + merchantLocations: (json_['merchantLocations'] as core.List?) + ?.map((value) => MerchantLocation.fromJson( + value as core.Map)) + .toList(), messages: (json_['messages'] as core.List?) ?.map((value) => Message.fromJson( value as core.Map)) @@ -9048,6 +9128,7 @@ class GenericClass { if (id != null) 'id': id!, if (imageModulesData != null) 'imageModulesData': imageModulesData!, if (linksModuleData != null) 'linksModuleData': linksModuleData!, + if (merchantLocations != null) 'merchantLocations': merchantLocations!, if (messages != null) 'messages': messages!, if (multipleDevicesAndHoldersAllowedStatus != null) 'multipleDevicesAndHoldersAllowedStatus': @@ -9165,6 +9246,10 @@ class GenericObject { /// - "GENERIC_HOME_INSURANCE" : Home-insurance cards /// - "GENERIC_ENTRY_TICKET" : Entry tickets /// - "GENERIC_RECEIPT" : Receipts + /// - "GENERIC_LOYALTY_CARD" : Loyalty cards. Please note that it is advisable + /// to use a dedicated Loyalty card pass type instead of this generic type. A + /// dedicated loyalty card pass type offers more features and functionality + /// than a generic pass type. /// - "GENERIC_OTHER" : Other type core.String? genericType; @@ -9243,6 +9328,14 @@ class GenericObject { /// `cardTitle` would be shown as logo. Image? logo; + /// Merchant locations. + /// + /// There is a maximum of ten on the object. Any additional MerchantLocations + /// added beyond the 10 will be rejected. These locations will trigger a + /// notification when a user enters within a Google-set radius of the point. + /// This field replaces the deprecated LatLongPoints. + core.List? merchantLocations; + /// An array of messages displayed in the app. /// /// All users of this object will receive its associated messages. The maximum @@ -9338,6 +9431,7 @@ class GenericObject { this.linkedObjectIds, this.linksModuleData, this.logo, + this.merchantLocations, this.messages, this.notifications, this.passConstraints, @@ -9398,6 +9492,10 @@ class GenericObject { ? Image.fromJson( json_['logo'] as core.Map) : null, + merchantLocations: (json_['merchantLocations'] as core.List?) + ?.map((value) => MerchantLocation.fromJson( + value as core.Map)) + .toList(), messages: (json_['messages'] as core.List?) ?.map((value) => Message.fromJson( value as core.Map)) @@ -9460,6 +9558,7 @@ class GenericObject { if (linkedObjectIds != null) 'linkedObjectIds': linkedObjectIds!, if (linksModuleData != null) 'linksModuleData': linksModuleData!, if (logo != null) 'logo': logo!, + if (merchantLocations != null) 'merchantLocations': merchantLocations!, if (messages != null) 'messages': messages!, if (notifications != null) 'notifications': notifications!, if (passConstraints != null) 'passConstraints': passConstraints!, @@ -9680,6 +9779,14 @@ class GiftCardClass { ) core.List? locations; + /// Merchant locations. + /// + /// There is a maximum of ten on the class. Any additional MerchantLocations + /// added beyond the 10 will be rejected. These locations will trigger a + /// notification when a user enters within a Google-set radius of the point. + /// This field replaces the deprecated LatLongPoints. + core.List? merchantLocations; + /// Merchant name, such as "Adam's Apparel". /// /// The app may display an ellipsis after the first 20 characters to ensure @@ -9846,6 +9953,7 @@ class GiftCardClass { this.localizedMerchantName, this.localizedPinLabel, this.locations, + this.merchantLocations, this.merchantName, this.messages, this.multipleDevicesAndHoldersAllowedStatus, @@ -9935,6 +10043,10 @@ class GiftCardClass { ?.map((value) => LatLongPoint.fromJson( value as core.Map)) .toList(), + merchantLocations: (json_['merchantLocations'] as core.List?) + ?.map((value) => MerchantLocation.fromJson( + value as core.Map)) + .toList(), merchantName: json_['merchantName'] as core.String?, messages: (json_['messages'] as core.List?) ?.map((value) => Message.fromJson( @@ -10012,6 +10124,7 @@ class GiftCardClass { 'localizedMerchantName': localizedMerchantName!, if (localizedPinLabel != null) 'localizedPinLabel': localizedPinLabel!, if (locations != null) 'locations': locations!, + if (merchantLocations != null) 'merchantLocations': merchantLocations!, if (merchantName != null) 'merchantName': merchantName!, if (messages != null) 'messages': messages!, if (multipleDevicesAndHoldersAllowedStatus != null) @@ -10213,6 +10326,14 @@ class GiftCardObject { ) core.List? locations; + /// Merchant locations. + /// + /// There is a maximum of ten on the object. Any additional MerchantLocations + /// added beyond the 10 will be rejected. These locations will trigger a + /// notification when a user enters within a Google-set radius of the point. + /// This field replaces the deprecated LatLongPoints. + core.List? merchantLocations; + /// An array of messages displayed in the app. /// /// All users of this object will receive its associated messages. The maximum @@ -10324,6 +10445,7 @@ class GiftCardObject { this.linkedObjectIds, this.linksModuleData, this.locations, + this.merchantLocations, this.messages, this.notifyPreference, this.passConstraints, @@ -10396,6 +10518,10 @@ class GiftCardObject { ?.map((value) => LatLongPoint.fromJson( value as core.Map)) .toList(), + merchantLocations: (json_['merchantLocations'] as core.List?) + ?.map((value) => MerchantLocation.fromJson( + value as core.Map)) + .toList(), messages: (json_['messages'] as core.List?) ?.map((value) => Message.fromJson( value as core.Map)) @@ -10454,6 +10580,7 @@ class GiftCardObject { if (linkedObjectIds != null) 'linkedObjectIds': linkedObjectIds!, if (linksModuleData != null) 'linksModuleData': linksModuleData!, if (locations != null) 'locations': locations!, + if (merchantLocations != null) 'merchantLocations': merchantLocations!, if (messages != null) 'messages': messages!, if (notifyPreference != null) 'notifyPreference': notifyPreference!, if (passConstraints != null) 'passConstraints': passConstraints!, @@ -11326,6 +11453,14 @@ class LoyaltyClass { ) core.List? locations; + /// Merchant locations. + /// + /// There is a maximum of ten on the class. Any additional MerchantLocations + /// added beyond the 10 will be rejected. These locations will trigger a + /// notification when a user enters within a Google-set radius of the point. + /// This field replaces the deprecated LatLongPoints. + core.List? merchantLocations; + /// An array of messages displayed in the app. /// /// All users of this object will receive its associated messages. The maximum @@ -11511,6 +11646,7 @@ class LoyaltyClass { this.localizedSecondaryRewardsTier, this.localizedSecondaryRewardsTierLabel, this.locations, + this.merchantLocations, this.messages, this.multipleDevicesAndHoldersAllowedStatus, this.notifyPreference, @@ -11621,6 +11757,10 @@ class LoyaltyClass { ?.map((value) => LatLongPoint.fromJson( value as core.Map)) .toList(), + merchantLocations: (json_['merchantLocations'] as core.List?) + ?.map((value) => MerchantLocation.fromJson( + value as core.Map)) + .toList(), messages: (json_['messages'] as core.List?) ?.map((value) => Message.fromJson( value as core.Map)) @@ -11710,6 +11850,7 @@ class LoyaltyClass { 'localizedSecondaryRewardsTierLabel': localizedSecondaryRewardsTierLabel!, if (locations != null) 'locations': locations!, + if (merchantLocations != null) 'merchantLocations': merchantLocations!, if (messages != null) 'messages': messages!, if (multipleDevicesAndHoldersAllowedStatus != null) 'multipleDevicesAndHoldersAllowedStatus': @@ -11919,6 +12060,14 @@ class LoyaltyObject { /// The loyalty reward points label, balance, and type. LoyaltyPoints? loyaltyPoints; + /// Merchant locations. + /// + /// There is a maximum of ten on the object. Any additional MerchantLocations + /// added beyond the 10 will be rejected. These locations will trigger a + /// notification when a user enters within a Google-set radius of the point. + /// This field replaces the deprecated LatLongPoints. + core.List? merchantLocations; + /// An array of messages displayed in the app. /// /// All users of this object will receive its associated messages. The maximum @@ -12034,6 +12183,7 @@ class LoyaltyObject { this.linksModuleData, this.locations, this.loyaltyPoints, + this.merchantLocations, this.messages, this.notifyPreference, this.passConstraints, @@ -12105,6 +12255,10 @@ class LoyaltyObject { ? LoyaltyPoints.fromJson( json_['loyaltyPoints'] as core.Map) : null, + merchantLocations: (json_['merchantLocations'] as core.List?) + ?.map((value) => MerchantLocation.fromJson( + value as core.Map)) + .toList(), messages: (json_['messages'] as core.List?) ?.map((value) => Message.fromJson( value as core.Map)) @@ -12166,6 +12320,7 @@ class LoyaltyObject { if (linksModuleData != null) 'linksModuleData': linksModuleData!, if (locations != null) 'locations': locations!, if (loyaltyPoints != null) 'loyaltyPoints': loyaltyPoints!, + if (merchantLocations != null) 'merchantLocations': merchantLocations!, if (messages != null) 'messages': messages!, if (notifyPreference != null) 'notifyPreference': notifyPreference!, if (passConstraints != null) 'passConstraints': passConstraints!, @@ -12811,6 +12966,42 @@ class MediaRequestInfo { }; } +/// Locations of interest for this class or object. +/// +/// Currently, this location is used for geofenced notifications. When a user is +/// within a set radius of this lat/long, and dwells there, Google will trigger +/// a notification. When a user exits this radius, the notification will be +/// hidden. +class MerchantLocation { + /// The latitude specified as any value in the range of -90.0 through +90.0, + /// both inclusive. + /// + /// Values outside these bounds will be rejected. + core.double? latitude; + + /// The longitude specified in the range -180.0 through +180.0, both + /// inclusive. + /// + /// Values outside these bounds will be rejected. + core.double? longitude; + + MerchantLocation({ + this.latitude, + this.longitude, + }); + + MerchantLocation.fromJson(core.Map json_) + : this( + latitude: (json_['latitude'] as core.num?)?.toDouble(), + longitude: (json_['longitude'] as core.num?)?.toDouble(), + ); + + core.Map toJson() => { + if (latitude != null) 'latitude': latitude!, + if (longitude != null) 'longitude': longitude!, + }; +} + /// A message that will be displayed with a Valuable class Message { /// The message body. @@ -13220,6 +13411,14 @@ class OfferClass { ) core.List? locations; + /// Merchant locations. + /// + /// There is a maximum of ten on the class. Any additional MerchantLocations + /// added beyond the 10 will be rejected. These locations will trigger a + /// notification when a user enters within a Google-set radius of the point. + /// This field replaces the deprecated LatLongPoints. + core.List? merchantLocations; + /// An array of messages displayed in the app. /// /// All users of this object will receive its associated messages. The maximum @@ -13416,6 +13615,7 @@ class OfferClass { this.localizedShortTitle, this.localizedTitle, this.locations, + this.merchantLocations, this.messages, this.multipleDevicesAndHoldersAllowedStatus, this.notifyPreference, @@ -13512,6 +13712,10 @@ class OfferClass { ?.map((value) => LatLongPoint.fromJson( value as core.Map)) .toList(), + merchantLocations: (json_['merchantLocations'] as core.List?) + ?.map((value) => MerchantLocation.fromJson( + value as core.Map)) + .toList(), messages: (json_['messages'] as core.List?) ?.map((value) => Message.fromJson( value as core.Map)) @@ -13590,6 +13794,7 @@ class OfferClass { 'localizedShortTitle': localizedShortTitle!, if (localizedTitle != null) 'localizedTitle': localizedTitle!, if (locations != null) 'locations': locations!, + if (merchantLocations != null) 'merchantLocations': merchantLocations!, if (messages != null) 'messages': messages!, if (multipleDevicesAndHoldersAllowedStatus != null) 'multipleDevicesAndHoldersAllowedStatus': @@ -13773,6 +13978,14 @@ class OfferObject { /// Note: This field is currently not supported to trigger geo notifications. core.List? locations; + /// Merchant locations. + /// + /// There is a maximum of ten on the object. Any additional MerchantLocations + /// added beyond the 10 will be rejected. These locations will trigger a + /// notification when a user enters within a Google-set radius of the point. + /// This field replaces the deprecated LatLongPoints. + core.List? merchantLocations; + /// An array of messages displayed in the app. /// /// All users of this object will receive its associated messages. The maximum @@ -13877,6 +14090,7 @@ class OfferObject { this.linkedObjectIds, this.linksModuleData, this.locations, + this.merchantLocations, this.messages, this.notifyPreference, this.passConstraints, @@ -13938,6 +14152,10 @@ class OfferObject { ?.map((value) => LatLongPoint.fromJson( value as core.Map)) .toList(), + merchantLocations: (json_['merchantLocations'] as core.List?) + ?.map((value) => MerchantLocation.fromJson( + value as core.Map)) + .toList(), messages: (json_['messages'] as core.List?) ?.map((value) => Message.fromJson( value as core.Map)) @@ -13991,6 +14209,7 @@ class OfferObject { if (linkedObjectIds != null) 'linkedObjectIds': linkedObjectIds!, if (linksModuleData != null) 'linksModuleData': linksModuleData!, if (locations != null) 'locations': locations!, + if (merchantLocations != null) 'merchantLocations': merchantLocations!, if (messages != null) 'messages': messages!, if (notifyPreference != null) 'notifyPreference': notifyPreference!, if (passConstraints != null) 'passConstraints': passConstraints!, @@ -15590,6 +15809,14 @@ class TransitClass { /// Required. Image? logo; + /// Merchant locations. + /// + /// There is a maximum of ten on the class. Any additional MerchantLocations + /// added beyond the 10 will be rejected. These locations will trigger a + /// notification when a user enters within a Google-set radius of the point. + /// This field replaces the deprecated LatLongPoints. + core.List? merchantLocations; + /// An array of messages displayed in the app. /// /// All users of this object will receive its associated messages. The maximum @@ -15781,6 +16008,7 @@ class TransitClass { this.localizedIssuerName, this.locations, this.logo, + this.merchantLocations, this.messages, this.multipleDevicesAndHoldersAllowedStatus, this.notifyPreference, @@ -15948,6 +16176,10 @@ class TransitClass { ? Image.fromJson( json_['logo'] as core.Map) : null, + merchantLocations: (json_['merchantLocations'] as core.List?) + ?.map((value) => MerchantLocation.fromJson( + value as core.Map)) + .toList(), messages: (json_['messages'] as core.List?) ?.map((value) => Message.fromJson( value as core.Map)) @@ -16057,6 +16289,7 @@ class TransitClass { 'localizedIssuerName': localizedIssuerName!, if (locations != null) 'locations': locations!, if (logo != null) 'logo': logo!, + if (merchantLocations != null) 'merchantLocations': merchantLocations!, if (messages != null) 'messages': messages!, if (multipleDevicesAndHoldersAllowedStatus != null) 'multipleDevicesAndHoldersAllowedStatus': @@ -16274,6 +16507,14 @@ class TransitObject { ) core.List? locations; + /// Merchant locations. + /// + /// There is a maximum of ten on the object. Any additional MerchantLocations + /// added beyond the 10 will be rejected. These locations will trigger a + /// notification when a user enters within a Google-set radius of the point. + /// This field replaces the deprecated LatLongPoints. + core.List? merchantLocations; + /// An array of messages displayed in the app. /// /// All users of this object will receive its associated messages. The maximum @@ -16461,6 +16702,7 @@ class TransitObject { this.linkedObjectIds, this.linksModuleData, this.locations, + this.merchantLocations, this.messages, this.notifyPreference, this.passConstraints, @@ -16550,6 +16792,10 @@ class TransitObject { ?.map((value) => LatLongPoint.fromJson( value as core.Map)) .toList(), + merchantLocations: (json_['merchantLocations'] as core.List?) + ?.map((value) => MerchantLocation.fromJson( + value as core.Map)) + .toList(), messages: (json_['messages'] as core.List?) ?.map((value) => Message.fromJson( value as core.Map)) @@ -16634,6 +16880,7 @@ class TransitObject { if (linkedObjectIds != null) 'linkedObjectIds': linkedObjectIds!, if (linksModuleData != null) 'linksModuleData': linksModuleData!, if (locations != null) 'locations': locations!, + if (merchantLocations != null) 'merchantLocations': merchantLocations!, if (messages != null) 'messages': messages!, if (notifyPreference != null) 'notifyPreference': notifyPreference!, if (passConstraints != null) 'passConstraints': passConstraints!, diff --git a/generated/googleapis/lib/webrisk/v1.dart b/generated/googleapis/lib/webrisk/v1.dart index 4d9f2f43d..d07b89cd7 100644 --- a/generated/googleapis/lib/webrisk/v1.dart +++ b/generated/googleapis/lib/webrisk/v1.dart @@ -138,8 +138,8 @@ class ProjectsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// diff --git a/generated/googleapis/lib/workflows/v1.dart b/generated/googleapis/lib/workflows/v1.dart index 62cb1efb5..d00dc7032 100644 --- a/generated/googleapis/lib/workflows/v1.dart +++ b/generated/googleapis/lib/workflows/v1.dart @@ -983,6 +983,13 @@ class Workflow { /// Output only. StateError? stateError; + /// Input only. + /// + /// Immutable. Tags associated with this workflow. + /// + /// Optional. + core.Map? tags; + /// The timestamp for when the workflow was last updated. /// /// This is a workflow-wide field and is not tied to a specific revision. @@ -1015,6 +1022,7 @@ class Workflow { this.sourceContents, this.state, this.stateError, + this.tags, this.updateTime, this.userEnvVars, }); @@ -1050,6 +1058,12 @@ class Workflow { ? StateError.fromJson( json_['stateError'] as core.Map) : null, + tags: (json_['tags'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), updateTime: json_['updateTime'] as core.String?, userEnvVars: (json_['userEnvVars'] as core.Map?) @@ -1081,6 +1095,7 @@ class Workflow { if (sourceContents != null) 'sourceContents': sourceContents!, if (state != null) 'state': state!, if (stateError != null) 'stateError': stateError!, + if (tags != null) 'tags': tags!, if (updateTime != null) 'updateTime': updateTime!, if (userEnvVars != null) 'userEnvVars': userEnvVars!, }; diff --git a/generated/googleapis/lib/workloadmanager/v1.dart b/generated/googleapis/lib/workloadmanager/v1.dart index bc4dd4f26..81c226126 100644 --- a/generated/googleapis/lib/workloadmanager/v1.dart +++ b/generated/googleapis/lib/workloadmanager/v1.dart @@ -783,8 +783,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -1143,6 +1143,15 @@ class Evaluation { /// Description of the Evaluation core.String? description; + /// Evaluation type + /// Possible string values are: + /// - "EVALUATION_TYPE_UNSPECIFIED" : Not specified + /// - "SAP" : SAP best practices + /// - "SQL_SERVER" : SQL best practices + /// - "OTHER" : Customized best practices + /// - "SCC_IAC" : SCC IaC (Infra as Code) best practices + core.String? evaluationType; + /// Labels as key value pairs core.Map? labels; @@ -1181,6 +1190,7 @@ class Evaluation { this.createTime, this.customRulesBucket, this.description, + this.evaluationType, this.labels, this.name, this.resourceFilter, @@ -1200,6 +1210,7 @@ class Evaluation { createTime: json_['createTime'] as core.String?, customRulesBucket: json_['customRulesBucket'] as core.String?, description: json_['description'] as core.String?, + evaluationType: json_['evaluationType'] as core.String?, labels: (json_['labels'] as core.Map?)?.map( (key, value) => core.MapEntry( @@ -1232,6 +1243,7 @@ class Evaluation { if (createTime != null) 'createTime': createTime!, if (customRulesBucket != null) 'customRulesBucket': customRulesBucket!, if (description != null) 'description': description!, + if (evaluationType != null) 'evaluationType': evaluationType!, if (labels != null) 'labels': labels!, if (name != null) 'name': name!, if (resourceFilter != null) 'resourceFilter': resourceFilter!, @@ -1274,6 +1286,16 @@ class Execution { /// projects/{project}/locations/{location}/evaluations/{evaluation}/executions/{execution} core.String? name; + /// Additional information generated by the execution + /// + /// Output only. + core.List? notices; + + /// Result summary + /// + /// Output only. + Summary? resultSummary; + /// execution result summary per rule /// /// Output only. @@ -1309,6 +1331,8 @@ class Execution { this.inventoryTime, this.labels, this.name, + this.notices, + this.resultSummary, this.ruleResults, this.runType, this.startTime, @@ -1332,6 +1356,14 @@ class Execution { ), ), name: json_['name'] as core.String?, + notices: (json_['notices'] as core.List?) + ?.map((value) => + Notice.fromJson(value as core.Map)) + .toList(), + resultSummary: json_.containsKey('resultSummary') + ? Summary.fromJson( + json_['resultSummary'] as core.Map) + : null, ruleResults: (json_['ruleResults'] as core.List?) ?.map((value) => RuleExecutionResult.fromJson( value as core.Map)) @@ -1349,6 +1381,8 @@ class Execution { if (inventoryTime != null) 'inventoryTime': inventoryTime!, if (labels != null) 'labels': labels!, if (name != null) 'name': name!, + if (notices != null) 'notices': notices!, + if (resultSummary != null) 'resultSummary': resultSummary!, if (ruleResults != null) 'ruleResults': ruleResults!, if (runType != null) 'runType': runType!, if (startTime != null) 'startTime': startTime!, @@ -1530,12 +1564,16 @@ class Insight { /// The insights data for the sqlserver workload validation. SqlserverValidation? sqlserverValidation; + /// The insights data for workload validation of torso workloads. + TorsoValidation? torsoValidation; + Insight({ this.instanceId, this.sapDiscovery, this.sapValidation, this.sentTime, this.sqlserverValidation, + this.torsoValidation, }); Insight.fromJson(core.Map json_) @@ -1554,6 +1592,10 @@ class Insight { ? SqlserverValidation.fromJson(json_['sqlserverValidation'] as core.Map) : null, + torsoValidation: json_.containsKey('torsoValidation') + ? TorsoValidation.fromJson(json_['torsoValidation'] + as core.Map) + : null, ); core.Map toJson() => { @@ -1563,6 +1605,7 @@ class Insight { if (sentTime != null) 'sentTime': sentTime!, if (sqlserverValidation != null) 'sqlserverValidation': sqlserverValidation!, + if (torsoValidation != null) 'torsoValidation': torsoValidation!, }; } @@ -1785,6 +1828,27 @@ class ListScannedResourcesResponse { /// A resource that represents a Google Cloud location. typedef Location = $Location00; +/// Message for additional information generated by the execution +class Notice { + /// Message of the notice + /// + /// Output only. + core.String? message; + + Notice({ + this.message, + }); + + Notice.fromJson(core.Map json_) + : this( + message: json_['message'] as core.String?, + ); + + core.Map toJson() => { + if (message != null) 'message': message!, + }; +} + /// This resource represents a long-running operation that is the result of a /// network API call. class Operation { @@ -3186,6 +3250,111 @@ class SqlserverValidationValidationDetail { /// [API Design Guide](https://cloud.google.com/apis/design/errors). typedef Status = $Status00; +/// Message for execution summary +class Summary { + /// Number of failures + /// + /// Output only. + core.String? failures; + + /// Number of new failures compared to the previous execution + /// + /// Output only. + core.String? newFailures; + + /// Number of new fixes compared to the previous execution + /// + /// Output only. + core.String? newFixes; + + Summary({ + this.failures, + this.newFailures, + this.newFixes, + }); + + Summary.fromJson(core.Map json_) + : this( + failures: json_['failures'] as core.String?, + newFailures: json_['newFailures'] as core.String?, + newFixes: json_['newFixes'] as core.String?, + ); + + core.Map toJson() => { + if (failures != null) 'failures': failures!, + if (newFailures != null) 'newFailures': newFailures!, + if (newFixes != null) 'newFixes': newFixes!, + }; +} + +/// The schema of torso workload validation data. +class TorsoValidation { + /// agent_version lists the version of the agent that collected this data. + /// + /// Required. + core.String? agentVersion; + + /// instance_name lists the human readable name of the instance that the data + /// comes from. + /// + /// Required. + core.String? instanceName; + + /// project_id lists the human readable cloud project that the data comes + /// from. + /// + /// Required. + core.String? projectId; + + /// validation_details contains the pairs of validation data: field name & + /// field value. + /// + /// Required. + core.Map? validationDetails; + + /// workload_type specifies the type of torso workload. + /// + /// Required. + /// Possible string values are: + /// - "WORKLOAD_TYPE_UNSPECIFIED" : Unspecified workload type. + /// - "MYSQL" : MySQL workload. + /// - "ORACLE" : Oracle workload. + /// - "REDIS" : Redis workload. + core.String? workloadType; + + TorsoValidation({ + this.agentVersion, + this.instanceName, + this.projectId, + this.validationDetails, + this.workloadType, + }); + + TorsoValidation.fromJson(core.Map json_) + : this( + agentVersion: json_['agentVersion'] as core.String?, + instanceName: json_['instanceName'] as core.String?, + projectId: json_['projectId'] as core.String?, + validationDetails: (json_['validationDetails'] + as core.Map?) + ?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), + workloadType: json_['workloadType'] as core.String?, + ); + + core.Map toJson() => { + if (agentVersion != null) 'agentVersion': agentVersion!, + if (instanceName != null) 'instanceName': instanceName!, + if (projectId != null) 'projectId': projectId!, + if (validationDetails != null) 'validationDetails': validationDetails!, + if (workloadType != null) 'workloadType': workloadType!, + }; +} + /// Message describing the violation in an evaluation result. class ViolationDetails { /// The name of the asset. diff --git a/generated/googleapis/lib/workspaceevents/v1.dart b/generated/googleapis/lib/workspaceevents/v1.dart index 933803594..48252883f 100644 --- a/generated/googleapis/lib/workspaceevents/v1.dart +++ b/generated/googleapis/lib/workspaceevents/v1.dart @@ -78,6 +78,25 @@ class WorkspaceEventsApi { static const chatSpacesReadonlyScope = 'https://www.googleapis.com/auth/chat.spaces.readonly'; + /// See, edit, create, and delete all of your Google Drive files + static const driveScope = 'https://www.googleapis.com/auth/drive'; + + /// See, edit, create, and delete only the specific Google Drive files you use + /// with this app + static const driveFileScope = 'https://www.googleapis.com/auth/drive.file'; + + /// View and manage metadata of files in your Google Drive + static const driveMetadataScope = + 'https://www.googleapis.com/auth/drive.metadata'; + + /// See information about your Google Drive files + static const driveMetadataReadonlyScope = + 'https://www.googleapis.com/auth/drive.metadata.readonly'; + + /// See and download all your Google Drive files + static const driveReadonlyScope = + 'https://www.googleapis.com/auth/drive.readonly'; + /// Create, edit, and see information about your Google Meet conferences /// created by the app. static const meetingsSpaceCreatedScope = @@ -424,7 +443,8 @@ class SubscriptionsResource { /// /// This method resets your subscription's `State` field to `ACTIVE`. Before /// you use this method, you must fix the error that suspended the - /// subscription. To learn how to use this method, see + /// subscription. This method will ignore or reject any subscription that + /// isn't currently in a suspended state. To learn how to use this method, see /// [Reactivate a Google Workspace subscription](https://developers.google.com/workspace/events/guides/reactivate-subscription). /// /// [request] - The metadata request object. diff --git a/generated/googleapis/lib/workstations/v1.dart b/generated/googleapis/lib/workstations/v1.dart index e4f18d8f9..3b2e6e55b 100644 --- a/generated/googleapis/lib/workstations/v1.dart +++ b/generated/googleapis/lib/workstations/v1.dart @@ -181,8 +181,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -1178,6 +1178,9 @@ class ProjectsLocationsWorkstationClustersWorkstationConfigsWorkstationsResource /// Returns a short-lived credential that can be used to send authenticated /// and authorized traffic to a workstation. /// + /// Once generated this token cannot be revoked and is good for the lifetime + /// of the token. + /// /// [request] - The metadata request object. /// /// Request parameters: @@ -1836,7 +1839,14 @@ class Binding { }; } -/// A configuration that workstations can boost to. +/// A boost configuration is a set of resources that a workstation can use to +/// increase its performance. +/// +/// If a boost configuration is specified, when starting a workstation, users +/// can choose to use a VM provisioned under the boost config by passing the +/// boost config id in the start request. If no boost config id is provided in +/// the start request, the system will choose a VM from the pool provisioned +/// under the default config. class BoostConfig { /// A list of the type and count of accelerator cards attached to the boost /// instance. @@ -1877,7 +1887,7 @@ class BoostConfig { /// The id to be used for the boost configuration. /// - /// Optional. Required. + /// Required. core.String? id; /// The type of machine that boosted VM instances will use—for example, @@ -2169,6 +2179,9 @@ class GceInstance { /// A list of the boost configurations that workstations created using this /// workstation configuration are allowed to use. /// + /// If specified, users will have the option to choose from the list of boost + /// configs when starting a workstation. + /// /// Optional. core.List? boostConfigs; diff --git a/generated/googleapis/lib/youtube/v3.dart b/generated/googleapis/lib/youtube/v3.dart index f4347d078..5bc5a0a19 100644 --- a/generated/googleapis/lib/youtube/v3.dart +++ b/generated/googleapis/lib/youtube/v3.dart @@ -53,8 +53,6 @@ /// - [WatermarksResource] /// - [YoutubeResource] /// - [YoutubeV3Resource] -/// - [YoutubeV3LiveChatResource] -/// - [YoutubeV3LiveChatMessagesResource] library; import 'dart:async' as async; @@ -5800,9 +5798,6 @@ class YoutubeResource { class YoutubeV3Resource { final commons.ApiRequester _requester; - YoutubeV3LiveChatResource get liveChat => - YoutubeV3LiveChatResource(_requester); - YoutubeV3Resource(commons.ApiRequester client) : _requester = client; /// Updates an existing resource. @@ -5850,89 +5845,6 @@ class YoutubeV3Resource { } } -class YoutubeV3LiveChatResource { - final commons.ApiRequester _requester; - - YoutubeV3LiveChatMessagesResource get messages => - YoutubeV3LiveChatMessagesResource(_requester); - - YoutubeV3LiveChatResource(commons.ApiRequester client) : _requester = client; -} - -class YoutubeV3LiveChatMessagesResource { - final commons.ApiRequester _requester; - - YoutubeV3LiveChatMessagesResource(commons.ApiRequester client) - : _requester = client; - - /// Allows a user to load live chat through a server-streamed RPC. - /// - /// Request parameters: - /// - /// [hl] - Specifies the localization language in which the system messages - /// should be returned. - /// - /// [liveChatId] - The id of the live chat for which comments should be - /// returned. - /// - /// [maxResults] - The *maxResults* parameter specifies the maximum number of - /// items that should be returned in the result set. Not used in the streaming - /// RPC. - /// Value must be between "200" and "2000". - /// - /// [pageToken] - The *pageToken* parameter identifies a specific page in the - /// result set that should be returned. In an API response, the nextPageToken - /// property identify other pages that could be retrieved. - /// - /// [part] - The *part* parameter specifies the liveChatComment resource parts - /// that the API response will include. Supported values are id, snippet, and - /// authorDetails. - /// - /// [profileImageSize] - Specifies the size of the profile image that should - /// be returned for each user. - /// Value must be between "16" and "720". - /// - /// [$fields] - Selector specifying which fields to include in a partial - /// response. - /// - /// Completes with a [LiveChatMessageListResponse]. - /// - /// Completes with a [commons.ApiRequestError] if the API endpoint returned an - /// error. - /// - /// If the used [http.Client] completes with an error when making a REST call, - /// this method will complete with the same error. - async.Future stream({ - core.String? hl, - core.String? liveChatId, - core.int? maxResults, - core.String? pageToken, - core.List? part, - core.int? profileImageSize, - core.String? $fields, - }) async { - final queryParams_ = >{ - if (hl != null) 'hl': [hl], - if (liveChatId != null) 'liveChatId': [liveChatId], - if (maxResults != null) 'maxResults': ['${maxResults}'], - if (pageToken != null) 'pageToken': [pageToken], - if (part != null) 'part': part, - if (profileImageSize != null) 'profileImageSize': ['${profileImageSize}'], - if ($fields != null) 'fields': [$fields], - }; - - const url_ = 'youtube/v3/liveChat/messages/stream'; - - final response_ = await _requester.request( - url_, - 'GET', - queryParams: queryParams_, - ); - return LiveChatMessageListResponse.fromJson( - response_ as core.Map); - } -} - class AbuseReport { core.List? abuseTypes; core.String? description; @@ -8308,6 +8220,10 @@ class ChannelToStoreLinkDetails { /// Information specific to billing (read-only). ChannelToStoreLinkDetailsBillingDetails? billingDetails; + /// Information specific to merchant affiliate program (read-only). + ChannelToStoreLinkDetailsMerchantAffiliateProgramDetails? + merchantAffiliateProgramDetails; + /// Google Merchant Center id of the store. core.String? merchantId; @@ -8319,6 +8235,7 @@ class ChannelToStoreLinkDetails { ChannelToStoreLinkDetails({ this.billingDetails, + this.merchantAffiliateProgramDetails, this.merchantId, this.storeName, this.storeUrl, @@ -8331,6 +8248,12 @@ class ChannelToStoreLinkDetails { json_['billingDetails'] as core.Map) : null, + merchantAffiliateProgramDetails: + json_.containsKey('merchantAffiliateProgramDetails') + ? ChannelToStoreLinkDetailsMerchantAffiliateProgramDetails + .fromJson(json_['merchantAffiliateProgramDetails'] + as core.Map) + : null, merchantId: json_['merchantId'] as core.String?, storeName: json_['storeName'] as core.String?, storeUrl: json_['storeUrl'] as core.String?, @@ -8338,6 +8261,8 @@ class ChannelToStoreLinkDetails { core.Map toJson() => { if (billingDetails != null) 'billingDetails': billingDetails!, + if (merchantAffiliateProgramDetails != null) + 'merchantAffiliateProgramDetails': merchantAffiliateProgramDetails!, if (merchantId != null) 'merchantId': merchantId!, if (storeName != null) 'storeName': storeName!, if (storeUrl != null) 'storeUrl': storeUrl!, @@ -8368,6 +8293,34 @@ class ChannelToStoreLinkDetailsBillingDetails { }; } +/// Information specific to merchant affiliate program. +class ChannelToStoreLinkDetailsMerchantAffiliateProgramDetails { + /// The current merchant affiliate program status. + /// Possible string values are: + /// - "merchantAffiliateProgramStatusUnspecified" : Unspecified status. + /// - "merchantAffiliateProgramStatusEligible" : Merchant is eligible for the + /// merchant affiliate program. + /// - "merchantAffiliateProgramStatusActive" : Merchant affiliate program is + /// active. + /// - "merchantAffiliateProgramStatusPaused" : Merchant affiliate program is + /// paused. + core.String? status; + + ChannelToStoreLinkDetailsMerchantAffiliateProgramDetails({ + this.status, + }); + + ChannelToStoreLinkDetailsMerchantAffiliateProgramDetails.fromJson( + core.Map json_) + : this( + status: json_['status'] as core.String?, + ); + + core.Map toJson() => { + if (status != null) 'status': status!, + }; +} + /// Freebase topic information related to the channel. class ChannelTopicDetails { /// A list of Wikipedia URLs that describe the channel's content. @@ -8662,7 +8615,7 @@ class CommentSnippet { } /// The id of the author's YouTube channel, if any. -typedef CommentSnippetAuthorChannelId = $Shared14; +typedef CommentSnippetAuthorChannelId = $Shared15; /// A *comment thread* represents information that applies to a top level /// comment and all its replies. @@ -10942,7 +10895,7 @@ class InvideoTiming { }; } -typedef LanguageTag = $Shared14; +typedef LanguageTag = $Shared15; class LevelDetails { /// The name that should be used when referring to this level. @@ -14068,8 +14021,7 @@ class PlaylistItemListResponse { /// Identifies what kind of resource this is. /// - /// Value: the fixed string "youtube#playlistItemListResponse". Etag of this - /// resource. + /// Value: the fixed string "youtube#playlistItemListResponse". core.String? kind; /// The token that can be used as the value of the pageToken parameter to @@ -16878,7 +16830,7 @@ class VideoLiveStreamingDetails { /// The time that the broadcast is scheduled to end. /// /// If the value is empty or the property is not present, then the broadcast - /// is scheduled to contiue indefinitely. + /// is scheduled to continue indefinitely. core.DateTime? scheduledEndTime; /// The time that the broadcast is scheduled to begin. @@ -17446,6 +17398,9 @@ class VideoStatistics { /// /// Next Id: 19 class VideoStatus { + /// Indicates if the video contains altered or synthetic media. + core.bool? containsSyntheticMedia; + /// This value indicates if the video can be embedded on another website. /// /// @mutable youtube.videos.insert youtube.videos.update @@ -17520,6 +17475,7 @@ class VideoStatus { core.String? uploadStatus; VideoStatus({ + this.containsSyntheticMedia, this.embeddable, this.failureReason, this.license, @@ -17534,6 +17490,7 @@ class VideoStatus { VideoStatus.fromJson(core.Map json_) : this( + containsSyntheticMedia: json_['containsSyntheticMedia'] as core.bool?, embeddable: json_['embeddable'] as core.bool?, failureReason: json_['failureReason'] as core.String?, license: json_['license'] as core.String?, @@ -17550,6 +17507,8 @@ class VideoStatus { ); core.Map toJson() => { + if (containsSyntheticMedia != null) + 'containsSyntheticMedia': containsSyntheticMedia!, if (embeddable != null) 'embeddable': embeddable!, if (failureReason != null) 'failureReason': failureReason!, if (license != null) 'license': license!, diff --git a/generated/googleapis/test/accesscontextmanager/v1_test.dart b/generated/googleapis/test/accesscontextmanager/v1_test.dart index d8709efaa..b38fabe5b 100644 --- a/generated/googleapis/test/accesscontextmanager/v1_test.dart +++ b/generated/googleapis/test/accesscontextmanager/v1_test.dart @@ -160,7 +160,7 @@ api.AccessSettings buildAccessSettings() { buildCounterAccessSettings++; if (buildCounterAccessSettings < 3) { o.accessLevels = buildUnnamed1(); - o.reauthSettings = buildReauthSettings(); + o.sessionSettings = buildSessionSettings(); } buildCounterAccessSettings--; return o; @@ -170,7 +170,7 @@ void checkAccessSettings(api.AccessSettings o) { buildCounterAccessSettings++; if (buildCounterAccessSettings < 3) { checkUnnamed1(o.accessLevels!); - checkReauthSettings(o.reauthSettings!); + checkSessionSettings(o.sessionSettings!); } buildCounterAccessSettings--; } @@ -772,6 +772,7 @@ api.EgressPolicy buildEgressPolicy() { if (buildCounterEgressPolicy < 3) { o.egressFrom = buildEgressFrom(); o.egressTo = buildEgressTo(); + o.title = 'foo'; } buildCounterEgressPolicy--; return o; @@ -782,6 +783,10 @@ void checkEgressPolicy(api.EgressPolicy o) { if (buildCounterEgressPolicy < 3) { checkEgressFrom(o.egressFrom!); checkEgressTo(o.egressTo!); + unittest.expect( + o.title!, + unittest.equals('foo'), + ); } buildCounterEgressPolicy--; } @@ -993,9 +998,9 @@ api.GcpUserAccessBinding buildGcpUserAccessBinding() { o.dryRunAccessLevels = buildUnnamed22(); o.groupKey = 'foo'; o.name = 'foo'; - o.reauthSettings = buildReauthSettings(); o.restrictedClientApplications = buildUnnamed23(); o.scopedAccessSettings = buildUnnamed24(); + o.sessionSettings = buildSessionSettings(); } buildCounterGcpUserAccessBinding--; return o; @@ -1014,9 +1019,9 @@ void checkGcpUserAccessBinding(api.GcpUserAccessBinding o) { o.name!, unittest.equals('foo'), ); - checkReauthSettings(o.reauthSettings!); checkUnnamed23(o.restrictedClientApplications!); checkUnnamed24(o.scopedAccessSettings!); + checkSessionSettings(o.sessionSettings!); } buildCounterGcpUserAccessBinding--; } @@ -1123,6 +1128,7 @@ api.IngressPolicy buildIngressPolicy() { if (buildCounterIngressPolicy < 3) { o.ingressFrom = buildIngressFrom(); o.ingressTo = buildIngressTo(); + o.title = 'foo'; } buildCounterIngressPolicy--; return o; @@ -1133,6 +1139,10 @@ void checkIngressPolicy(api.IngressPolicy o) { if (buildCounterIngressPolicy < 3) { checkIngressFrom(o.ingressFrom!); checkIngressTo(o.ingressTo!); + unittest.expect( + o.title!, + unittest.equals('foo'), + ); } buildCounterIngressPolicy--; } @@ -1689,42 +1699,6 @@ void checkPolicy(api.Policy o) { buildCounterPolicy--; } -core.int buildCounterReauthSettings = 0; -api.ReauthSettings buildReauthSettings() { - final o = api.ReauthSettings(); - buildCounterReauthSettings++; - if (buildCounterReauthSettings < 3) { - o.maxInactivity = 'foo'; - o.reauthMethod = 'foo'; - o.sessionLength = 'foo'; - o.sessionLengthEnabled = true; - o.useOidcMaxAge = true; - } - buildCounterReauthSettings--; - return o; -} - -void checkReauthSettings(api.ReauthSettings o) { - buildCounterReauthSettings++; - if (buildCounterReauthSettings < 3) { - unittest.expect( - o.maxInactivity!, - unittest.equals('foo'), - ); - unittest.expect( - o.reauthMethod!, - unittest.equals('foo'), - ); - unittest.expect( - o.sessionLength!, - unittest.equals('foo'), - ); - unittest.expect(o.sessionLengthEnabled!, unittest.isTrue); - unittest.expect(o.useOidcMaxAge!, unittest.isTrue); - } - buildCounterReauthSettings--; -} - core.List buildUnnamed40() => [ buildAccessLevel(), buildAccessLevel(), @@ -1825,6 +1799,7 @@ api.ServicePerimeter buildServicePerimeter() { buildCounterServicePerimeter++; if (buildCounterServicePerimeter < 3) { o.description = 'foo'; + o.etag = 'foo'; o.name = 'foo'; o.perimeterType = 'foo'; o.spec = buildServicePerimeterConfig(); @@ -1843,6 +1818,10 @@ void checkServicePerimeter(api.ServicePerimeter o) { o.description!, unittest.equals('foo'), ); + unittest.expect( + o.etag!, + unittest.equals('foo'), + ); unittest.expect( o.name!, unittest.equals('foo'), @@ -1964,6 +1943,42 @@ void checkServicePerimeterConfig(api.ServicePerimeterConfig o) { buildCounterServicePerimeterConfig--; } +core.int buildCounterSessionSettings = 0; +api.SessionSettings buildSessionSettings() { + final o = api.SessionSettings(); + buildCounterSessionSettings++; + if (buildCounterSessionSettings < 3) { + o.maxInactivity = 'foo'; + o.sessionLength = 'foo'; + o.sessionLengthEnabled = true; + o.sessionReauthMethod = 'foo'; + o.useOidcMaxAge = true; + } + buildCounterSessionSettings--; + return o; +} + +void checkSessionSettings(api.SessionSettings o) { + buildCounterSessionSettings++; + if (buildCounterSessionSettings < 3) { + unittest.expect( + o.maxInactivity!, + unittest.equals('foo'), + ); + unittest.expect( + o.sessionLength!, + unittest.equals('foo'), + ); + unittest.expect(o.sessionLengthEnabled!, unittest.isTrue); + unittest.expect( + o.sessionReauthMethod!, + unittest.equals('foo'), + ); + unittest.expect(o.useOidcMaxAge!, unittest.isTrue); + } + buildCounterSessionSettings--; +} + core.int buildCounterSetIamPolicyRequest = 0; api.SetIamPolicyRequest buildSetIamPolicyRequest() { final o = api.SetIamPolicyRequest(); @@ -2708,16 +2723,6 @@ void main() { }); }); - unittest.group('obj-schema-ReauthSettings', () { - unittest.test('to-json--from-json', () async { - final o = buildReauthSettings(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.ReauthSettings.fromJson( - oJson as core.Map); - checkReauthSettings(od); - }); - }); - unittest.group('obj-schema-ReplaceAccessLevelsRequest', () { unittest.test('to-json--from-json', () async { final o = buildReplaceAccessLevelsRequest(); @@ -2768,6 +2773,16 @@ void main() { }); }); + unittest.group('obj-schema-SessionSettings', () { + unittest.test('to-json--from-json', () async { + final o = buildSessionSettings(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.SessionSettings.fromJson( + oJson as core.Map); + checkSessionSettings(od); + }); + }); + unittest.group('obj-schema-SetIamPolicyRequest', () { unittest.test('to-json--from-json', () async { final o = buildSetIamPolicyRequest(); diff --git a/generated/googleapis/test/admin/directory_v1_test.dart b/generated/googleapis/test/admin/directory_v1_test.dart index 3687e700a..b86912255 100644 --- a/generated/googleapis/test/admin/directory_v1_test.dart +++ b/generated/googleapis/test/admin/directory_v1_test.dart @@ -899,6 +899,33 @@ void checkBuildings(api.Buildings o) { buildCounterBuildings--; } +core.int buildCounterByteUsage = 0; +api.ByteUsage buildByteUsage() { + final o = api.ByteUsage(); + buildCounterByteUsage++; + if (buildCounterByteUsage < 3) { + o.capacityBytes = 'foo'; + o.usedBytes = 'foo'; + } + buildCounterByteUsage--; + return o; +} + +void checkByteUsage(api.ByteUsage o) { + buildCounterByteUsage++; + if (buildCounterByteUsage < 3) { + unittest.expect( + o.capacityBytes!, + unittest.equals('foo'), + ); + unittest.expect( + o.usedBytes!, + unittest.equals('foo'), + ); + } + buildCounterByteUsage--; +} + core.int buildCounterCalendarResource = 0; api.CalendarResource buildCalendarResource() { final o = api.CalendarResource(); @@ -1829,6 +1856,7 @@ api.ChromeOsDevice buildChromeOsDevice() { o.deviceFiles = buildUnnamed29(); o.deviceId = 'foo'; o.deviceLicenseType = 'foo'; + o.diskSpaceUsage = buildByteUsage(); o.diskVolumeReports = buildUnnamed31(); o.dockMacAddress = 'foo'; o.etag = 'foo'; @@ -1918,6 +1946,7 @@ void checkChromeOsDevice(api.ChromeOsDevice o) { o.deviceLicenseType!, unittest.equals('foo'), ); + checkByteUsage(o.diskSpaceUsage!); checkUnnamed31(o.diskVolumeReports!); unittest.expect( o.dockMacAddress!, @@ -4064,6 +4093,7 @@ api.RoleAssignment buildRoleAssignment() { if (buildCounterRoleAssignment < 3) { o.assignedTo = 'foo'; o.assigneeType = 'foo'; + o.condition = 'foo'; o.etag = 'foo'; o.kind = 'foo'; o.orgUnitId = 'foo'; @@ -4086,6 +4116,10 @@ void checkRoleAssignment(api.RoleAssignment o) { o.assigneeType!, unittest.equals('foo'), ); + unittest.expect( + o.condition!, + unittest.equals('foo'), + ); unittest.expect( o.etag!, unittest.equals('foo'), @@ -5542,6 +5576,16 @@ void main() { }); }); + unittest.group('obj-schema-ByteUsage', () { + unittest.test('to-json--from-json', () async { + final o = buildByteUsage(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.ByteUsage.fromJson(oJson as core.Map); + checkByteUsage(od); + }); + }); + unittest.group('obj-schema-CalendarResource', () { unittest.test('to-json--from-json', () async { final o = buildCalendarResource(); diff --git a/generated/googleapis/test/aiplatform/v1_test.dart b/generated/googleapis/test/aiplatform/v1_test.dart index 368ab5932..16e171749 100644 --- a/generated/googleapis/test/aiplatform/v1_test.dart +++ b/generated/googleapis/test/aiplatform/v1_test.dart @@ -480,6 +480,49 @@ void checkGoogleCloudAiplatformV1AnnotationSpec( buildCounterGoogleCloudAiplatformV1AnnotationSpec--; } +core.int buildCounterGoogleCloudAiplatformV1ApiAuth = 0; +api.GoogleCloudAiplatformV1ApiAuth buildGoogleCloudAiplatformV1ApiAuth() { + final o = api.GoogleCloudAiplatformV1ApiAuth(); + buildCounterGoogleCloudAiplatformV1ApiAuth++; + if (buildCounterGoogleCloudAiplatformV1ApiAuth < 3) { + o.apiKeyConfig = buildGoogleCloudAiplatformV1ApiAuthApiKeyConfig(); + } + buildCounterGoogleCloudAiplatformV1ApiAuth--; + return o; +} + +void checkGoogleCloudAiplatformV1ApiAuth(api.GoogleCloudAiplatformV1ApiAuth o) { + buildCounterGoogleCloudAiplatformV1ApiAuth++; + if (buildCounterGoogleCloudAiplatformV1ApiAuth < 3) { + checkGoogleCloudAiplatformV1ApiAuthApiKeyConfig(o.apiKeyConfig!); + } + buildCounterGoogleCloudAiplatformV1ApiAuth--; +} + +core.int buildCounterGoogleCloudAiplatformV1ApiAuthApiKeyConfig = 0; +api.GoogleCloudAiplatformV1ApiAuthApiKeyConfig + buildGoogleCloudAiplatformV1ApiAuthApiKeyConfig() { + final o = api.GoogleCloudAiplatformV1ApiAuthApiKeyConfig(); + buildCounterGoogleCloudAiplatformV1ApiAuthApiKeyConfig++; + if (buildCounterGoogleCloudAiplatformV1ApiAuthApiKeyConfig < 3) { + o.apiKeySecretVersion = 'foo'; + } + buildCounterGoogleCloudAiplatformV1ApiAuthApiKeyConfig--; + return o; +} + +void checkGoogleCloudAiplatformV1ApiAuthApiKeyConfig( + api.GoogleCloudAiplatformV1ApiAuthApiKeyConfig o) { + buildCounterGoogleCloudAiplatformV1ApiAuthApiKeyConfig++; + if (buildCounterGoogleCloudAiplatformV1ApiAuthApiKeyConfig < 3) { + unittest.expect( + o.apiKeySecretVersion!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1ApiAuthApiKeyConfig--; +} + core.Map buildUnnamed7() => { 'x': 'foo', 'y': 'foo', @@ -727,6 +770,116 @@ void checkGoogleCloudAiplatformV1Attribution( buildCounterGoogleCloudAiplatformV1Attribution--; } +core.List buildUnnamed10() => [ + buildGoogleCloudAiplatformV1Content(), + buildGoogleCloudAiplatformV1Content(), + ]; + +void checkUnnamed10(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1Content(o[0]); + checkGoogleCloudAiplatformV1Content(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1AugmentPromptRequest = 0; +api.GoogleCloudAiplatformV1AugmentPromptRequest + buildGoogleCloudAiplatformV1AugmentPromptRequest() { + final o = api.GoogleCloudAiplatformV1AugmentPromptRequest(); + buildCounterGoogleCloudAiplatformV1AugmentPromptRequest++; + if (buildCounterGoogleCloudAiplatformV1AugmentPromptRequest < 3) { + o.contents = buildUnnamed10(); + o.model = buildGoogleCloudAiplatformV1AugmentPromptRequestModel(); + o.vertexRagStore = buildGoogleCloudAiplatformV1VertexRagStore(); + } + buildCounterGoogleCloudAiplatformV1AugmentPromptRequest--; + return o; +} + +void checkGoogleCloudAiplatformV1AugmentPromptRequest( + api.GoogleCloudAiplatformV1AugmentPromptRequest o) { + buildCounterGoogleCloudAiplatformV1AugmentPromptRequest++; + if (buildCounterGoogleCloudAiplatformV1AugmentPromptRequest < 3) { + checkUnnamed10(o.contents!); + checkGoogleCloudAiplatformV1AugmentPromptRequestModel(o.model!); + checkGoogleCloudAiplatformV1VertexRagStore(o.vertexRagStore!); + } + buildCounterGoogleCloudAiplatformV1AugmentPromptRequest--; +} + +core.int buildCounterGoogleCloudAiplatformV1AugmentPromptRequestModel = 0; +api.GoogleCloudAiplatformV1AugmentPromptRequestModel + buildGoogleCloudAiplatformV1AugmentPromptRequestModel() { + final o = api.GoogleCloudAiplatformV1AugmentPromptRequestModel(); + buildCounterGoogleCloudAiplatformV1AugmentPromptRequestModel++; + if (buildCounterGoogleCloudAiplatformV1AugmentPromptRequestModel < 3) { + o.model = 'foo'; + o.modelVersion = 'foo'; + } + buildCounterGoogleCloudAiplatformV1AugmentPromptRequestModel--; + return o; +} + +void checkGoogleCloudAiplatformV1AugmentPromptRequestModel( + api.GoogleCloudAiplatformV1AugmentPromptRequestModel o) { + buildCounterGoogleCloudAiplatformV1AugmentPromptRequestModel++; + if (buildCounterGoogleCloudAiplatformV1AugmentPromptRequestModel < 3) { + unittest.expect( + o.model!, + unittest.equals('foo'), + ); + unittest.expect( + o.modelVersion!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1AugmentPromptRequestModel--; +} + +core.List buildUnnamed11() => [ + buildGoogleCloudAiplatformV1Content(), + buildGoogleCloudAiplatformV1Content(), + ]; + +void checkUnnamed11(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1Content(o[0]); + checkGoogleCloudAiplatformV1Content(o[1]); +} + +core.List buildUnnamed12() => [ + buildGoogleCloudAiplatformV1Fact(), + buildGoogleCloudAiplatformV1Fact(), + ]; + +void checkUnnamed12(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1Fact(o[0]); + checkGoogleCloudAiplatformV1Fact(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1AugmentPromptResponse = 0; +api.GoogleCloudAiplatformV1AugmentPromptResponse + buildGoogleCloudAiplatformV1AugmentPromptResponse() { + final o = api.GoogleCloudAiplatformV1AugmentPromptResponse(); + buildCounterGoogleCloudAiplatformV1AugmentPromptResponse++; + if (buildCounterGoogleCloudAiplatformV1AugmentPromptResponse < 3) { + o.augmentedPrompt = buildUnnamed11(); + o.facts = buildUnnamed12(); + } + buildCounterGoogleCloudAiplatformV1AugmentPromptResponse--; + return o; +} + +void checkGoogleCloudAiplatformV1AugmentPromptResponse( + api.GoogleCloudAiplatformV1AugmentPromptResponse o) { + buildCounterGoogleCloudAiplatformV1AugmentPromptResponse++; + if (buildCounterGoogleCloudAiplatformV1AugmentPromptResponse < 3) { + checkUnnamed11(o.augmentedPrompt!); + checkUnnamed12(o.facts!); + } + buildCounterGoogleCloudAiplatformV1AugmentPromptResponse--; +} + core.int buildCounterGoogleCloudAiplatformV1AutomaticResources = 0; api.GoogleCloudAiplatformV1AutomaticResources buildGoogleCloudAiplatformV1AutomaticResources() { @@ -805,12 +958,12 @@ void checkGoogleCloudAiplatformV1AvroSource( buildCounterGoogleCloudAiplatformV1AvroSource--; } -core.List buildUnnamed10() => [ +core.List buildUnnamed13() => [ 'foo', 'foo', ]; -void checkUnnamed10(core.List o) { +void checkUnnamed13(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -828,7 +981,7 @@ api.GoogleCloudAiplatformV1BatchCancelPipelineJobsRequest final o = api.GoogleCloudAiplatformV1BatchCancelPipelineJobsRequest(); buildCounterGoogleCloudAiplatformV1BatchCancelPipelineJobsRequest++; if (buildCounterGoogleCloudAiplatformV1BatchCancelPipelineJobsRequest < 3) { - o.names = buildUnnamed10(); + o.names = buildUnnamed13(); } buildCounterGoogleCloudAiplatformV1BatchCancelPipelineJobsRequest--; return o; @@ -838,17 +991,17 @@ void checkGoogleCloudAiplatformV1BatchCancelPipelineJobsRequest( api.GoogleCloudAiplatformV1BatchCancelPipelineJobsRequest o) { buildCounterGoogleCloudAiplatformV1BatchCancelPipelineJobsRequest++; if (buildCounterGoogleCloudAiplatformV1BatchCancelPipelineJobsRequest < 3) { - checkUnnamed10(o.names!); + checkUnnamed13(o.names!); } buildCounterGoogleCloudAiplatformV1BatchCancelPipelineJobsRequest--; } -core.List buildUnnamed11() => [ +core.List buildUnnamed14() => [ buildGoogleCloudAiplatformV1CreateFeatureRequest(), buildGoogleCloudAiplatformV1CreateFeatureRequest(), ]; -void checkUnnamed11( +void checkUnnamed14( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1CreateFeatureRequest(o[0]); @@ -861,7 +1014,7 @@ api.GoogleCloudAiplatformV1BatchCreateFeaturesRequest final o = api.GoogleCloudAiplatformV1BatchCreateFeaturesRequest(); buildCounterGoogleCloudAiplatformV1BatchCreateFeaturesRequest++; if (buildCounterGoogleCloudAiplatformV1BatchCreateFeaturesRequest < 3) { - o.requests = buildUnnamed11(); + o.requests = buildUnnamed14(); } buildCounterGoogleCloudAiplatformV1BatchCreateFeaturesRequest--; return o; @@ -871,18 +1024,18 @@ void checkGoogleCloudAiplatformV1BatchCreateFeaturesRequest( api.GoogleCloudAiplatformV1BatchCreateFeaturesRequest o) { buildCounterGoogleCloudAiplatformV1BatchCreateFeaturesRequest++; if (buildCounterGoogleCloudAiplatformV1BatchCreateFeaturesRequest < 3) { - checkUnnamed11(o.requests!); + checkUnnamed14(o.requests!); } buildCounterGoogleCloudAiplatformV1BatchCreateFeaturesRequest--; } core.List - buildUnnamed12() => [ + buildUnnamed15() => [ buildGoogleCloudAiplatformV1CreateTensorboardRunRequest(), buildGoogleCloudAiplatformV1CreateTensorboardRunRequest(), ]; -void checkUnnamed12( +void checkUnnamed15( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1CreateTensorboardRunRequest(o[0]); @@ -897,7 +1050,7 @@ api.GoogleCloudAiplatformV1BatchCreateTensorboardRunsRequest buildCounterGoogleCloudAiplatformV1BatchCreateTensorboardRunsRequest++; if (buildCounterGoogleCloudAiplatformV1BatchCreateTensorboardRunsRequest < 3) { - o.requests = buildUnnamed12(); + o.requests = buildUnnamed15(); } buildCounterGoogleCloudAiplatformV1BatchCreateTensorboardRunsRequest--; return o; @@ -908,17 +1061,17 @@ void checkGoogleCloudAiplatformV1BatchCreateTensorboardRunsRequest( buildCounterGoogleCloudAiplatformV1BatchCreateTensorboardRunsRequest++; if (buildCounterGoogleCloudAiplatformV1BatchCreateTensorboardRunsRequest < 3) { - checkUnnamed12(o.requests!); + checkUnnamed15(o.requests!); } buildCounterGoogleCloudAiplatformV1BatchCreateTensorboardRunsRequest--; } -core.List buildUnnamed13() => [ +core.List buildUnnamed16() => [ buildGoogleCloudAiplatformV1TensorboardRun(), buildGoogleCloudAiplatformV1TensorboardRun(), ]; -void checkUnnamed13(core.List o) { +void checkUnnamed16(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1TensorboardRun(o[0]); checkGoogleCloudAiplatformV1TensorboardRun(o[1]); @@ -932,7 +1085,7 @@ api.GoogleCloudAiplatformV1BatchCreateTensorboardRunsResponse buildCounterGoogleCloudAiplatformV1BatchCreateTensorboardRunsResponse++; if (buildCounterGoogleCloudAiplatformV1BatchCreateTensorboardRunsResponse < 3) { - o.tensorboardRuns = buildUnnamed13(); + o.tensorboardRuns = buildUnnamed16(); } buildCounterGoogleCloudAiplatformV1BatchCreateTensorboardRunsResponse--; return o; @@ -943,18 +1096,18 @@ void checkGoogleCloudAiplatformV1BatchCreateTensorboardRunsResponse( buildCounterGoogleCloudAiplatformV1BatchCreateTensorboardRunsResponse++; if (buildCounterGoogleCloudAiplatformV1BatchCreateTensorboardRunsResponse < 3) { - checkUnnamed13(o.tensorboardRuns!); + checkUnnamed16(o.tensorboardRuns!); } buildCounterGoogleCloudAiplatformV1BatchCreateTensorboardRunsResponse--; } core.List - buildUnnamed14() => [ + buildUnnamed17() => [ buildGoogleCloudAiplatformV1CreateTensorboardTimeSeriesRequest(), buildGoogleCloudAiplatformV1CreateTensorboardTimeSeriesRequest(), ]; -void checkUnnamed14( +void checkUnnamed17( core.List o) { unittest.expect(o, unittest.hasLength(2)); @@ -972,7 +1125,7 @@ api.GoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesRequest buildCounterGoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesRequest++; if (buildCounterGoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesRequest < 3) { - o.requests = buildUnnamed14(); + o.requests = buildUnnamed17(); } buildCounterGoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesRequest--; return o; @@ -983,18 +1136,18 @@ void checkGoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesRequest( buildCounterGoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesRequest++; if (buildCounterGoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesRequest < 3) { - checkUnnamed14(o.requests!); + checkUnnamed17(o.requests!); } buildCounterGoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesRequest--; } -core.List buildUnnamed15() => +core.List buildUnnamed18() => [ buildGoogleCloudAiplatformV1TensorboardTimeSeries(), buildGoogleCloudAiplatformV1TensorboardTimeSeries(), ]; -void checkUnnamed15( +void checkUnnamed18( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1TensorboardTimeSeries(o[0]); @@ -1011,7 +1164,7 @@ api.GoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesResponse buildCounterGoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesResponse++; if (buildCounterGoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesResponse < 3) { - o.tensorboardTimeSeries = buildUnnamed15(); + o.tensorboardTimeSeries = buildUnnamed18(); } buildCounterGoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesResponse--; return o; @@ -1022,7 +1175,7 @@ void checkGoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesResponse( buildCounterGoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesResponse++; if (buildCounterGoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesResponse < 3) { - checkUnnamed15(o.tensorboardTimeSeries!); + checkUnnamed18(o.tensorboardTimeSeries!); } buildCounterGoogleCloudAiplatformV1BatchCreateTensorboardTimeSeriesResponse--; } @@ -1058,12 +1211,12 @@ void checkGoogleCloudAiplatformV1BatchDedicatedResources( buildCounterGoogleCloudAiplatformV1BatchDedicatedResources--; } -core.List buildUnnamed16() => [ +core.List buildUnnamed19() => [ 'foo', 'foo', ]; -void checkUnnamed16(core.List o) { +void checkUnnamed19(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1081,7 +1234,7 @@ api.GoogleCloudAiplatformV1BatchDeletePipelineJobsRequest final o = api.GoogleCloudAiplatformV1BatchDeletePipelineJobsRequest(); buildCounterGoogleCloudAiplatformV1BatchDeletePipelineJobsRequest++; if (buildCounterGoogleCloudAiplatformV1BatchDeletePipelineJobsRequest < 3) { - o.names = buildUnnamed16(); + o.names = buildUnnamed19(); } buildCounterGoogleCloudAiplatformV1BatchDeletePipelineJobsRequest--; return o; @@ -1091,17 +1244,17 @@ void checkGoogleCloudAiplatformV1BatchDeletePipelineJobsRequest( api.GoogleCloudAiplatformV1BatchDeletePipelineJobsRequest o) { buildCounterGoogleCloudAiplatformV1BatchDeletePipelineJobsRequest++; if (buildCounterGoogleCloudAiplatformV1BatchDeletePipelineJobsRequest < 3) { - checkUnnamed16(o.names!); + checkUnnamed19(o.names!); } buildCounterGoogleCloudAiplatformV1BatchDeletePipelineJobsRequest--; } -core.List buildUnnamed17() => [ +core.List buildUnnamed20() => [ buildGoogleCloudAiplatformV1EvaluatedAnnotation(), buildGoogleCloudAiplatformV1EvaluatedAnnotation(), ]; -void checkUnnamed17( +void checkUnnamed20( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1EvaluatedAnnotation(o[0]); @@ -1117,7 +1270,7 @@ api.GoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsRequest buildCounterGoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsRequest++; if (buildCounterGoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsRequest < 3) { - o.evaluatedAnnotations = buildUnnamed17(); + o.evaluatedAnnotations = buildUnnamed20(); } buildCounterGoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsRequest--; return o; @@ -1128,7 +1281,7 @@ void checkGoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsRequest( buildCounterGoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsRequest++; if (buildCounterGoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsRequest < 3) { - checkUnnamed17(o.evaluatedAnnotations!); + checkUnnamed20(o.evaluatedAnnotations!); } buildCounterGoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsRequest--; } @@ -1162,13 +1315,13 @@ void checkGoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsResponse( buildCounterGoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsResponse--; } -core.List buildUnnamed18() => +core.List buildUnnamed21() => [ buildGoogleCloudAiplatformV1MigrateResourceRequest(), buildGoogleCloudAiplatformV1MigrateResourceRequest(), ]; -void checkUnnamed18( +void checkUnnamed21( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1MigrateResourceRequest(o[0]); @@ -1181,7 +1334,7 @@ api.GoogleCloudAiplatformV1BatchMigrateResourcesRequest final o = api.GoogleCloudAiplatformV1BatchMigrateResourcesRequest(); buildCounterGoogleCloudAiplatformV1BatchMigrateResourcesRequest++; if (buildCounterGoogleCloudAiplatformV1BatchMigrateResourcesRequest < 3) { - o.migrateResourceRequests = buildUnnamed18(); + o.migrateResourceRequests = buildUnnamed21(); } buildCounterGoogleCloudAiplatformV1BatchMigrateResourcesRequest--; return o; @@ -1191,17 +1344,17 @@ void checkGoogleCloudAiplatformV1BatchMigrateResourcesRequest( api.GoogleCloudAiplatformV1BatchMigrateResourcesRequest o) { buildCounterGoogleCloudAiplatformV1BatchMigrateResourcesRequest++; if (buildCounterGoogleCloudAiplatformV1BatchMigrateResourcesRequest < 3) { - checkUnnamed18(o.migrateResourceRequests!); + checkUnnamed21(o.migrateResourceRequests!); } buildCounterGoogleCloudAiplatformV1BatchMigrateResourcesRequest--; } -core.Map buildUnnamed19() => { +core.Map buildUnnamed22() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed19(core.Map o) { +void checkUnnamed22(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1213,12 +1366,12 @@ void checkUnnamed19(core.Map o) { ); } -core.List buildUnnamed20() => [ +core.List buildUnnamed23() => [ buildGoogleRpcStatus(), buildGoogleRpcStatus(), ]; -void checkUnnamed20(core.List o) { +void checkUnnamed23(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleRpcStatus(o[0]); checkGoogleRpcStatus(o[1]); @@ -1244,7 +1397,7 @@ api.GoogleCloudAiplatformV1BatchPredictionJob o.inputConfig = buildGoogleCloudAiplatformV1BatchPredictionJobInputConfig(); o.instanceConfig = buildGoogleCloudAiplatformV1BatchPredictionJobInstanceConfig(); - o.labels = buildUnnamed19(); + o.labels = buildUnnamed22(); o.manualBatchTuningParameters = buildGoogleCloudAiplatformV1ManualBatchTuningParameters(); o.model = 'foo'; @@ -1258,7 +1411,7 @@ api.GoogleCloudAiplatformV1BatchPredictionJob o.outputConfig = buildGoogleCloudAiplatformV1BatchPredictionJobOutputConfig(); o.outputInfo = buildGoogleCloudAiplatformV1BatchPredictionJobOutputInfo(); - o.partialFailures = buildUnnamed20(); + o.partialFailures = buildUnnamed23(); o.resourcesConsumed = buildGoogleCloudAiplatformV1ResourcesConsumed(); o.satisfiesPzi = true; o.satisfiesPzs = true; @@ -1299,7 +1452,7 @@ void checkGoogleCloudAiplatformV1BatchPredictionJob( checkGoogleCloudAiplatformV1BatchPredictionJobInputConfig(o.inputConfig!); checkGoogleCloudAiplatformV1BatchPredictionJobInstanceConfig( o.instanceConfig!); - checkUnnamed19(o.labels!); + checkUnnamed22(o.labels!); checkGoogleCloudAiplatformV1ManualBatchTuningParameters( o.manualBatchTuningParameters!); unittest.expect( @@ -1330,7 +1483,7 @@ void checkGoogleCloudAiplatformV1BatchPredictionJob( ); checkGoogleCloudAiplatformV1BatchPredictionJobOutputConfig(o.outputConfig!); checkGoogleCloudAiplatformV1BatchPredictionJobOutputInfo(o.outputInfo!); - checkUnnamed20(o.partialFailures!); + checkUnnamed23(o.partialFailures!); checkGoogleCloudAiplatformV1ResourcesConsumed(o.resourcesConsumed!); unittest.expect(o.satisfiesPzi!, unittest.isTrue); unittest.expect(o.satisfiesPzs!, unittest.isTrue); @@ -1384,12 +1537,12 @@ void checkGoogleCloudAiplatformV1BatchPredictionJobInputConfig( buildCounterGoogleCloudAiplatformV1BatchPredictionJobInputConfig--; } -core.List buildUnnamed21() => [ +core.List buildUnnamed24() => [ 'foo', 'foo', ]; -void checkUnnamed21(core.List o) { +void checkUnnamed24(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1401,12 +1554,12 @@ void checkUnnamed21(core.List o) { ); } -core.List buildUnnamed22() => [ +core.List buildUnnamed25() => [ 'foo', 'foo', ]; -void checkUnnamed22(core.List o) { +void checkUnnamed25(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1425,8 +1578,8 @@ api.GoogleCloudAiplatformV1BatchPredictionJobInstanceConfig final o = api.GoogleCloudAiplatformV1BatchPredictionJobInstanceConfig(); buildCounterGoogleCloudAiplatformV1BatchPredictionJobInstanceConfig++; if (buildCounterGoogleCloudAiplatformV1BatchPredictionJobInstanceConfig < 3) { - o.excludedFields = buildUnnamed21(); - o.includedFields = buildUnnamed22(); + o.excludedFields = buildUnnamed24(); + o.includedFields = buildUnnamed25(); o.instanceType = 'foo'; o.keyField = 'foo'; } @@ -1438,8 +1591,8 @@ void checkGoogleCloudAiplatformV1BatchPredictionJobInstanceConfig( api.GoogleCloudAiplatformV1BatchPredictionJobInstanceConfig o) { buildCounterGoogleCloudAiplatformV1BatchPredictionJobInstanceConfig++; if (buildCounterGoogleCloudAiplatformV1BatchPredictionJobInstanceConfig < 3) { - checkUnnamed21(o.excludedFields!); - checkUnnamed22(o.includedFields!); + checkUnnamed24(o.excludedFields!); + checkUnnamed25(o.includedFields!); unittest.expect( o.instanceType!, unittest.equals('foo'), @@ -1516,12 +1669,12 @@ void checkGoogleCloudAiplatformV1BatchPredictionJobOutputInfo( core.List< api.GoogleCloudAiplatformV1BatchReadFeatureValuesRequestEntityTypeSpec> - buildUnnamed23() => [ + buildUnnamed26() => [ buildGoogleCloudAiplatformV1BatchReadFeatureValuesRequestEntityTypeSpec(), buildGoogleCloudAiplatformV1BatchReadFeatureValuesRequestEntityTypeSpec(), ]; -void checkUnnamed23( +void checkUnnamed26( core.List< api .GoogleCloudAiplatformV1BatchReadFeatureValuesRequestEntityTypeSpec> @@ -1534,12 +1687,12 @@ void checkUnnamed23( core.List< api .GoogleCloudAiplatformV1BatchReadFeatureValuesRequestPassThroughField> - buildUnnamed24() => [ + buildUnnamed27() => [ buildGoogleCloudAiplatformV1BatchReadFeatureValuesRequestPassThroughField(), buildGoogleCloudAiplatformV1BatchReadFeatureValuesRequestPassThroughField(), ]; -void checkUnnamed24( +void checkUnnamed27( core.List< api .GoogleCloudAiplatformV1BatchReadFeatureValuesRequestPassThroughField> @@ -1560,8 +1713,8 @@ api.GoogleCloudAiplatformV1BatchReadFeatureValuesRequest o.bigqueryReadInstances = buildGoogleCloudAiplatformV1BigQuerySource(); o.csvReadInstances = buildGoogleCloudAiplatformV1CsvSource(); o.destination = buildGoogleCloudAiplatformV1FeatureValueDestination(); - o.entityTypeSpecs = buildUnnamed23(); - o.passThroughFields = buildUnnamed24(); + o.entityTypeSpecs = buildUnnamed26(); + o.passThroughFields = buildUnnamed27(); o.startTime = 'foo'; } buildCounterGoogleCloudAiplatformV1BatchReadFeatureValuesRequest--; @@ -1575,8 +1728,8 @@ void checkGoogleCloudAiplatformV1BatchReadFeatureValuesRequest( checkGoogleCloudAiplatformV1BigQuerySource(o.bigqueryReadInstances!); checkGoogleCloudAiplatformV1CsvSource(o.csvReadInstances!); checkGoogleCloudAiplatformV1FeatureValueDestination(o.destination!); - checkUnnamed23(o.entityTypeSpecs!); - checkUnnamed24(o.passThroughFields!); + checkUnnamed26(o.entityTypeSpecs!); + checkUnnamed27(o.passThroughFields!); unittest.expect( o.startTime!, unittest.equals('foo'), @@ -1586,12 +1739,12 @@ void checkGoogleCloudAiplatformV1BatchReadFeatureValuesRequest( } core.List - buildUnnamed25() => [ + buildUnnamed28() => [ buildGoogleCloudAiplatformV1DestinationFeatureSetting(), buildGoogleCloudAiplatformV1DestinationFeatureSetting(), ]; -void checkUnnamed25( +void checkUnnamed28( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1DestinationFeatureSetting(o[0]); @@ -1610,7 +1763,7 @@ api.GoogleCloudAiplatformV1BatchReadFeatureValuesRequestEntityTypeSpec 3) { o.entityTypeId = 'foo'; o.featureSelector = buildGoogleCloudAiplatformV1FeatureSelector(); - o.settings = buildUnnamed25(); + o.settings = buildUnnamed28(); } buildCounterGoogleCloudAiplatformV1BatchReadFeatureValuesRequestEntityTypeSpec--; return o; @@ -1626,7 +1779,7 @@ void checkGoogleCloudAiplatformV1BatchReadFeatureValuesRequestEntityTypeSpec( unittest.equals('foo'), ); checkGoogleCloudAiplatformV1FeatureSelector(o.featureSelector!); - checkUnnamed25(o.settings!); + checkUnnamed28(o.settings!); } buildCounterGoogleCloudAiplatformV1BatchReadFeatureValuesRequestEntityTypeSpec--; } @@ -1661,12 +1814,12 @@ void checkGoogleCloudAiplatformV1BatchReadFeatureValuesRequestPassThroughField( buildCounterGoogleCloudAiplatformV1BatchReadFeatureValuesRequestPassThroughField--; } -core.List buildUnnamed26() => [ +core.List buildUnnamed29() => [ buildGoogleCloudAiplatformV1TimeSeriesData(), buildGoogleCloudAiplatformV1TimeSeriesData(), ]; -void checkUnnamed26(core.List o) { +void checkUnnamed29(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1TimeSeriesData(o[0]); checkGoogleCloudAiplatformV1TimeSeriesData(o[1]); @@ -1682,7 +1835,7 @@ api.GoogleCloudAiplatformV1BatchReadTensorboardTimeSeriesDataResponse buildCounterGoogleCloudAiplatformV1BatchReadTensorboardTimeSeriesDataResponse++; if (buildCounterGoogleCloudAiplatformV1BatchReadTensorboardTimeSeriesDataResponse < 3) { - o.timeSeriesData = buildUnnamed26(); + o.timeSeriesData = buildUnnamed29(); } buildCounterGoogleCloudAiplatformV1BatchReadTensorboardTimeSeriesDataResponse--; return o; @@ -1693,7 +1846,7 @@ void checkGoogleCloudAiplatformV1BatchReadTensorboardTimeSeriesDataResponse( buildCounterGoogleCloudAiplatformV1BatchReadTensorboardTimeSeriesDataResponse++; if (buildCounterGoogleCloudAiplatformV1BatchReadTensorboardTimeSeriesDataResponse < 3) { - checkUnnamed26(o.timeSeriesData!); + checkUnnamed29(o.timeSeriesData!); } buildCounterGoogleCloudAiplatformV1BatchReadTensorboardTimeSeriesDataResponse--; } @@ -1746,12 +1899,12 @@ void checkGoogleCloudAiplatformV1BigQuerySource( buildCounterGoogleCloudAiplatformV1BigQuerySource--; } -core.List buildUnnamed27() => [ +core.List buildUnnamed30() => [ buildGoogleCloudAiplatformV1BleuInstance(), buildGoogleCloudAiplatformV1BleuInstance(), ]; -void checkUnnamed27(core.List o) { +void checkUnnamed30(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1BleuInstance(o[0]); checkGoogleCloudAiplatformV1BleuInstance(o[1]); @@ -1762,7 +1915,7 @@ api.GoogleCloudAiplatformV1BleuInput buildGoogleCloudAiplatformV1BleuInput() { final o = api.GoogleCloudAiplatformV1BleuInput(); buildCounterGoogleCloudAiplatformV1BleuInput++; if (buildCounterGoogleCloudAiplatformV1BleuInput < 3) { - o.instances = buildUnnamed27(); + o.instances = buildUnnamed30(); o.metricSpec = buildGoogleCloudAiplatformV1BleuSpec(); } buildCounterGoogleCloudAiplatformV1BleuInput--; @@ -1773,7 +1926,7 @@ void checkGoogleCloudAiplatformV1BleuInput( api.GoogleCloudAiplatformV1BleuInput o) { buildCounterGoogleCloudAiplatformV1BleuInput++; if (buildCounterGoogleCloudAiplatformV1BleuInput < 3) { - checkUnnamed27(o.instances!); + checkUnnamed30(o.instances!); checkGoogleCloudAiplatformV1BleuSpec(o.metricSpec!); } buildCounterGoogleCloudAiplatformV1BleuInput--; @@ -1832,12 +1985,12 @@ void checkGoogleCloudAiplatformV1BleuMetricValue( buildCounterGoogleCloudAiplatformV1BleuMetricValue--; } -core.List buildUnnamed28() => [ +core.List buildUnnamed31() => [ buildGoogleCloudAiplatformV1BleuMetricValue(), buildGoogleCloudAiplatformV1BleuMetricValue(), ]; -void checkUnnamed28(core.List o) { +void checkUnnamed31(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1BleuMetricValue(o[0]); checkGoogleCloudAiplatformV1BleuMetricValue(o[1]); @@ -1849,7 +2002,7 @@ api.GoogleCloudAiplatformV1BleuResults final o = api.GoogleCloudAiplatformV1BleuResults(); buildCounterGoogleCloudAiplatformV1BleuResults++; if (buildCounterGoogleCloudAiplatformV1BleuResults < 3) { - o.bleuMetricValues = buildUnnamed28(); + o.bleuMetricValues = buildUnnamed31(); } buildCounterGoogleCloudAiplatformV1BleuResults--; return o; @@ -1859,7 +2012,7 @@ void checkGoogleCloudAiplatformV1BleuResults( api.GoogleCloudAiplatformV1BleuResults o) { buildCounterGoogleCloudAiplatformV1BleuResults++; if (buildCounterGoogleCloudAiplatformV1BleuResults < 3) { - checkUnnamed28(o.bleuMetricValues!); + checkUnnamed31(o.bleuMetricValues!); } buildCounterGoogleCloudAiplatformV1BleuResults--; } @@ -1935,12 +2088,12 @@ void checkGoogleCloudAiplatformV1BlurBaselineConfig( buildCounterGoogleCloudAiplatformV1BlurBaselineConfig--; } -core.List buildUnnamed29() => [ +core.List buildUnnamed32() => [ true, true, ]; -void checkUnnamed29(core.List o) { +void checkUnnamed32(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect(o[0], unittest.isTrue); unittest.expect(o[1], unittest.isTrue); @@ -1951,7 +2104,7 @@ api.GoogleCloudAiplatformV1BoolArray buildGoogleCloudAiplatformV1BoolArray() { final o = api.GoogleCloudAiplatformV1BoolArray(); buildCounterGoogleCloudAiplatformV1BoolArray++; if (buildCounterGoogleCloudAiplatformV1BoolArray < 3) { - o.values = buildUnnamed29(); + o.values = buildUnnamed32(); } buildCounterGoogleCloudAiplatformV1BoolArray--; return o; @@ -1961,11 +2114,167 @@ void checkGoogleCloudAiplatformV1BoolArray( api.GoogleCloudAiplatformV1BoolArray o) { buildCounterGoogleCloudAiplatformV1BoolArray++; if (buildCounterGoogleCloudAiplatformV1BoolArray < 3) { - checkUnnamed29(o.values!); + checkUnnamed32(o.values!); } buildCounterGoogleCloudAiplatformV1BoolArray--; } +core.int buildCounterGoogleCloudAiplatformV1CacheConfig = 0; +api.GoogleCloudAiplatformV1CacheConfig + buildGoogleCloudAiplatformV1CacheConfig() { + final o = api.GoogleCloudAiplatformV1CacheConfig(); + buildCounterGoogleCloudAiplatformV1CacheConfig++; + if (buildCounterGoogleCloudAiplatformV1CacheConfig < 3) { + o.disableCache = true; + o.name = 'foo'; + } + buildCounterGoogleCloudAiplatformV1CacheConfig--; + return o; +} + +void checkGoogleCloudAiplatformV1CacheConfig( + api.GoogleCloudAiplatformV1CacheConfig o) { + buildCounterGoogleCloudAiplatformV1CacheConfig++; + if (buildCounterGoogleCloudAiplatformV1CacheConfig < 3) { + unittest.expect(o.disableCache!, unittest.isTrue); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1CacheConfig--; +} + +core.List buildUnnamed33() => [ + buildGoogleCloudAiplatformV1Content(), + buildGoogleCloudAiplatformV1Content(), + ]; + +void checkUnnamed33(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1Content(o[0]); + checkGoogleCloudAiplatformV1Content(o[1]); +} + +core.List buildUnnamed34() => [ + buildGoogleCloudAiplatformV1Tool(), + buildGoogleCloudAiplatformV1Tool(), + ]; + +void checkUnnamed34(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1Tool(o[0]); + checkGoogleCloudAiplatformV1Tool(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1CachedContent = 0; +api.GoogleCloudAiplatformV1CachedContent + buildGoogleCloudAiplatformV1CachedContent() { + final o = api.GoogleCloudAiplatformV1CachedContent(); + buildCounterGoogleCloudAiplatformV1CachedContent++; + if (buildCounterGoogleCloudAiplatformV1CachedContent < 3) { + o.contents = buildUnnamed33(); + o.createTime = 'foo'; + o.displayName = 'foo'; + o.expireTime = 'foo'; + o.model = 'foo'; + o.name = 'foo'; + o.systemInstruction = buildGoogleCloudAiplatformV1Content(); + o.toolConfig = buildGoogleCloudAiplatformV1ToolConfig(); + o.tools = buildUnnamed34(); + o.ttl = 'foo'; + o.updateTime = 'foo'; + o.usageMetadata = buildGoogleCloudAiplatformV1CachedContentUsageMetadata(); + } + buildCounterGoogleCloudAiplatformV1CachedContent--; + return o; +} + +void checkGoogleCloudAiplatformV1CachedContent( + api.GoogleCloudAiplatformV1CachedContent o) { + buildCounterGoogleCloudAiplatformV1CachedContent++; + if (buildCounterGoogleCloudAiplatformV1CachedContent < 3) { + checkUnnamed33(o.contents!); + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.displayName!, + unittest.equals('foo'), + ); + unittest.expect( + o.expireTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.model!, + unittest.equals('foo'), + ); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + checkGoogleCloudAiplatformV1Content(o.systemInstruction!); + checkGoogleCloudAiplatformV1ToolConfig(o.toolConfig!); + checkUnnamed34(o.tools!); + unittest.expect( + o.ttl!, + unittest.equals('foo'), + ); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), + ); + checkGoogleCloudAiplatformV1CachedContentUsageMetadata(o.usageMetadata!); + } + buildCounterGoogleCloudAiplatformV1CachedContent--; +} + +core.int buildCounterGoogleCloudAiplatformV1CachedContentUsageMetadata = 0; +api.GoogleCloudAiplatformV1CachedContentUsageMetadata + buildGoogleCloudAiplatformV1CachedContentUsageMetadata() { + final o = api.GoogleCloudAiplatformV1CachedContentUsageMetadata(); + buildCounterGoogleCloudAiplatformV1CachedContentUsageMetadata++; + if (buildCounterGoogleCloudAiplatformV1CachedContentUsageMetadata < 3) { + o.audioDurationSeconds = 42; + o.imageCount = 42; + o.textCount = 42; + o.totalTokenCount = 42; + o.videoDurationSeconds = 42; + } + buildCounterGoogleCloudAiplatformV1CachedContentUsageMetadata--; + return o; +} + +void checkGoogleCloudAiplatformV1CachedContentUsageMetadata( + api.GoogleCloudAiplatformV1CachedContentUsageMetadata o) { + buildCounterGoogleCloudAiplatformV1CachedContentUsageMetadata++; + if (buildCounterGoogleCloudAiplatformV1CachedContentUsageMetadata < 3) { + unittest.expect( + o.audioDurationSeconds!, + unittest.equals(42), + ); + unittest.expect( + o.imageCount!, + unittest.equals(42), + ); + unittest.expect( + o.textCount!, + unittest.equals(42), + ); + unittest.expect( + o.totalTokenCount!, + unittest.equals(42), + ); + unittest.expect( + o.videoDurationSeconds!, + unittest.equals(42), + ); + } + buildCounterGoogleCloudAiplatformV1CachedContentUsageMetadata--; +} + core.int buildCounterGoogleCloudAiplatformV1CancelBatchPredictionJobRequest = 0; api.GoogleCloudAiplatformV1CancelBatchPredictionJobRequest buildGoogleCloudAiplatformV1CancelBatchPredictionJobRequest() { @@ -2105,12 +2414,12 @@ void checkGoogleCloudAiplatformV1CancelTuningJobRequest( buildCounterGoogleCloudAiplatformV1CancelTuningJobRequest--; } -core.List buildUnnamed30() => [ +core.List buildUnnamed35() => [ buildGoogleCloudAiplatformV1SafetyRating(), buildGoogleCloudAiplatformV1SafetyRating(), ]; -void checkUnnamed30(core.List o) { +void checkUnnamed35(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1SafetyRating(o[0]); checkGoogleCloudAiplatformV1SafetyRating(o[1]); @@ -2129,7 +2438,7 @@ api.GoogleCloudAiplatformV1Candidate buildGoogleCloudAiplatformV1Candidate() { o.groundingMetadata = buildGoogleCloudAiplatformV1GroundingMetadata(); o.index = 42; o.logprobsResult = buildGoogleCloudAiplatformV1LogprobsResult(); - o.safetyRatings = buildUnnamed30(); + o.safetyRatings = buildUnnamed35(); } buildCounterGoogleCloudAiplatformV1Candidate--; return o; @@ -2159,7 +2468,7 @@ void checkGoogleCloudAiplatformV1Candidate( unittest.equals(42), ); checkGoogleCloudAiplatformV1LogprobsResult(o.logprobsResult!); - checkUnnamed30(o.safetyRatings!); + checkUnnamed35(o.safetyRatings!); } buildCounterGoogleCloudAiplatformV1Candidate--; } @@ -2229,12 +2538,12 @@ void checkGoogleCloudAiplatformV1Citation( buildCounterGoogleCloudAiplatformV1Citation--; } -core.List buildUnnamed31() => [ +core.List buildUnnamed36() => [ buildGoogleCloudAiplatformV1Citation(), buildGoogleCloudAiplatformV1Citation(), ]; -void checkUnnamed31(core.List o) { +void checkUnnamed36(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Citation(o[0]); checkGoogleCloudAiplatformV1Citation(o[1]); @@ -2246,7 +2555,7 @@ api.GoogleCloudAiplatformV1CitationMetadata final o = api.GoogleCloudAiplatformV1CitationMetadata(); buildCounterGoogleCloudAiplatformV1CitationMetadata++; if (buildCounterGoogleCloudAiplatformV1CitationMetadata < 3) { - o.citations = buildUnnamed31(); + o.citations = buildUnnamed36(); } buildCounterGoogleCloudAiplatformV1CitationMetadata--; return o; @@ -2256,11 +2565,86 @@ void checkGoogleCloudAiplatformV1CitationMetadata( api.GoogleCloudAiplatformV1CitationMetadata o) { buildCounterGoogleCloudAiplatformV1CitationMetadata++; if (buildCounterGoogleCloudAiplatformV1CitationMetadata < 3) { - checkUnnamed31(o.citations!); + checkUnnamed36(o.citations!); } buildCounterGoogleCloudAiplatformV1CitationMetadata--; } +core.List buildUnnamed37() => [ + 42, + 42, + ]; + +void checkUnnamed37(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals(42), + ); + unittest.expect( + o[1], + unittest.equals(42), + ); +} + +core.int buildCounterGoogleCloudAiplatformV1Claim = 0; +api.GoogleCloudAiplatformV1Claim buildGoogleCloudAiplatformV1Claim() { + final o = api.GoogleCloudAiplatformV1Claim(); + buildCounterGoogleCloudAiplatformV1Claim++; + if (buildCounterGoogleCloudAiplatformV1Claim < 3) { + o.endIndex = 42; + o.factIndexes = buildUnnamed37(); + o.score = 42.0; + o.startIndex = 42; + } + buildCounterGoogleCloudAiplatformV1Claim--; + return o; +} + +void checkGoogleCloudAiplatformV1Claim(api.GoogleCloudAiplatformV1Claim o) { + buildCounterGoogleCloudAiplatformV1Claim++; + if (buildCounterGoogleCloudAiplatformV1Claim < 3) { + unittest.expect( + o.endIndex!, + unittest.equals(42), + ); + checkUnnamed37(o.factIndexes!); + unittest.expect( + o.score!, + unittest.equals(42.0), + ); + unittest.expect( + o.startIndex!, + unittest.equals(42), + ); + } + buildCounterGoogleCloudAiplatformV1Claim--; +} + +core.int buildCounterGoogleCloudAiplatformV1ClientConnectionConfig = 0; +api.GoogleCloudAiplatformV1ClientConnectionConfig + buildGoogleCloudAiplatformV1ClientConnectionConfig() { + final o = api.GoogleCloudAiplatformV1ClientConnectionConfig(); + buildCounterGoogleCloudAiplatformV1ClientConnectionConfig++; + if (buildCounterGoogleCloudAiplatformV1ClientConnectionConfig < 3) { + o.inferenceTimeout = 'foo'; + } + buildCounterGoogleCloudAiplatformV1ClientConnectionConfig--; + return o; +} + +void checkGoogleCloudAiplatformV1ClientConnectionConfig( + api.GoogleCloudAiplatformV1ClientConnectionConfig o) { + buildCounterGoogleCloudAiplatformV1ClientConnectionConfig++; + if (buildCounterGoogleCloudAiplatformV1ClientConnectionConfig < 3) { + unittest.expect( + o.inferenceTimeout!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1ClientConnectionConfig--; +} + core.int buildCounterGoogleCloudAiplatformV1CoherenceInput = 0; api.GoogleCloudAiplatformV1CoherenceInput buildGoogleCloudAiplatformV1CoherenceInput() { @@ -2366,6 +2750,119 @@ void checkGoogleCloudAiplatformV1CoherenceSpec( buildCounterGoogleCloudAiplatformV1CoherenceSpec--; } +core.int buildCounterGoogleCloudAiplatformV1CometInput = 0; +api.GoogleCloudAiplatformV1CometInput buildGoogleCloudAiplatformV1CometInput() { + final o = api.GoogleCloudAiplatformV1CometInput(); + buildCounterGoogleCloudAiplatformV1CometInput++; + if (buildCounterGoogleCloudAiplatformV1CometInput < 3) { + o.instance = buildGoogleCloudAiplatformV1CometInstance(); + o.metricSpec = buildGoogleCloudAiplatformV1CometSpec(); + } + buildCounterGoogleCloudAiplatformV1CometInput--; + return o; +} + +void checkGoogleCloudAiplatformV1CometInput( + api.GoogleCloudAiplatformV1CometInput o) { + buildCounterGoogleCloudAiplatformV1CometInput++; + if (buildCounterGoogleCloudAiplatformV1CometInput < 3) { + checkGoogleCloudAiplatformV1CometInstance(o.instance!); + checkGoogleCloudAiplatformV1CometSpec(o.metricSpec!); + } + buildCounterGoogleCloudAiplatformV1CometInput--; +} + +core.int buildCounterGoogleCloudAiplatformV1CometInstance = 0; +api.GoogleCloudAiplatformV1CometInstance + buildGoogleCloudAiplatformV1CometInstance() { + final o = api.GoogleCloudAiplatformV1CometInstance(); + buildCounterGoogleCloudAiplatformV1CometInstance++; + if (buildCounterGoogleCloudAiplatformV1CometInstance < 3) { + o.prediction = 'foo'; + o.reference = 'foo'; + o.source = 'foo'; + } + buildCounterGoogleCloudAiplatformV1CometInstance--; + return o; +} + +void checkGoogleCloudAiplatformV1CometInstance( + api.GoogleCloudAiplatformV1CometInstance o) { + buildCounterGoogleCloudAiplatformV1CometInstance++; + if (buildCounterGoogleCloudAiplatformV1CometInstance < 3) { + unittest.expect( + o.prediction!, + unittest.equals('foo'), + ); + unittest.expect( + o.reference!, + unittest.equals('foo'), + ); + unittest.expect( + o.source!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1CometInstance--; +} + +core.int buildCounterGoogleCloudAiplatformV1CometResult = 0; +api.GoogleCloudAiplatformV1CometResult + buildGoogleCloudAiplatformV1CometResult() { + final o = api.GoogleCloudAiplatformV1CometResult(); + buildCounterGoogleCloudAiplatformV1CometResult++; + if (buildCounterGoogleCloudAiplatformV1CometResult < 3) { + o.score = 42.0; + } + buildCounterGoogleCloudAiplatformV1CometResult--; + return o; +} + +void checkGoogleCloudAiplatformV1CometResult( + api.GoogleCloudAiplatformV1CometResult o) { + buildCounterGoogleCloudAiplatformV1CometResult++; + if (buildCounterGoogleCloudAiplatformV1CometResult < 3) { + unittest.expect( + o.score!, + unittest.equals(42.0), + ); + } + buildCounterGoogleCloudAiplatformV1CometResult--; +} + +core.int buildCounterGoogleCloudAiplatformV1CometSpec = 0; +api.GoogleCloudAiplatformV1CometSpec buildGoogleCloudAiplatformV1CometSpec() { + final o = api.GoogleCloudAiplatformV1CometSpec(); + buildCounterGoogleCloudAiplatformV1CometSpec++; + if (buildCounterGoogleCloudAiplatformV1CometSpec < 3) { + o.sourceLanguage = 'foo'; + o.targetLanguage = 'foo'; + o.version = 'foo'; + } + buildCounterGoogleCloudAiplatformV1CometSpec--; + return o; +} + +void checkGoogleCloudAiplatformV1CometSpec( + api.GoogleCloudAiplatformV1CometSpec o) { + buildCounterGoogleCloudAiplatformV1CometSpec++; + if (buildCounterGoogleCloudAiplatformV1CometSpec < 3) { + unittest.expect( + o.sourceLanguage!, + unittest.equals('foo'), + ); + unittest.expect( + o.targetLanguage!, + unittest.equals('foo'), + ); + unittest.expect( + o.version!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1CometSpec--; +} + core.int buildCounterGoogleCloudAiplatformV1CompleteTrialRequest = 0; api.GoogleCloudAiplatformV1CompleteTrialRequest buildGoogleCloudAiplatformV1CompleteTrialRequest() { @@ -2433,18 +2930,18 @@ void checkGoogleCloudAiplatformV1CompletionStats( buildCounterGoogleCloudAiplatformV1CompletionStats--; } -core.List buildUnnamed32() => [ +core.List buildUnnamed38() => [ buildGoogleCloudAiplatformV1Content(), buildGoogleCloudAiplatformV1Content(), ]; -void checkUnnamed32(core.List o) { +void checkUnnamed38(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Content(o[0]); checkGoogleCloudAiplatformV1Content(o[1]); } -core.List buildUnnamed33() => [ +core.List buildUnnamed39() => [ { 'list': [1, 2, 3], 'bool': true, @@ -2457,7 +2954,7 @@ core.List buildUnnamed33() => [ }, ]; -void checkUnnamed33(core.List o) { +void checkUnnamed39(core.List o) { unittest.expect(o, unittest.hasLength(2)); var casted8 = (o[0]) as core.Map; unittest.expect(casted8, unittest.hasLength(3)); @@ -2495,8 +2992,8 @@ api.GoogleCloudAiplatformV1ComputeTokensRequest final o = api.GoogleCloudAiplatformV1ComputeTokensRequest(); buildCounterGoogleCloudAiplatformV1ComputeTokensRequest++; if (buildCounterGoogleCloudAiplatformV1ComputeTokensRequest < 3) { - o.contents = buildUnnamed32(); - o.instances = buildUnnamed33(); + o.contents = buildUnnamed38(); + o.instances = buildUnnamed39(); o.model = 'foo'; } buildCounterGoogleCloudAiplatformV1ComputeTokensRequest--; @@ -2507,8 +3004,8 @@ void checkGoogleCloudAiplatformV1ComputeTokensRequest( api.GoogleCloudAiplatformV1ComputeTokensRequest o) { buildCounterGoogleCloudAiplatformV1ComputeTokensRequest++; if (buildCounterGoogleCloudAiplatformV1ComputeTokensRequest < 3) { - checkUnnamed32(o.contents!); - checkUnnamed33(o.instances!); + checkUnnamed38(o.contents!); + checkUnnamed39(o.instances!); unittest.expect( o.model!, unittest.equals('foo'), @@ -2517,12 +3014,12 @@ void checkGoogleCloudAiplatformV1ComputeTokensRequest( buildCounterGoogleCloudAiplatformV1ComputeTokensRequest--; } -core.List buildUnnamed34() => [ +core.List buildUnnamed40() => [ buildGoogleCloudAiplatformV1TokensInfo(), buildGoogleCloudAiplatformV1TokensInfo(), ]; -void checkUnnamed34(core.List o) { +void checkUnnamed40(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1TokensInfo(o[0]); checkGoogleCloudAiplatformV1TokensInfo(o[1]); @@ -2534,7 +3031,7 @@ api.GoogleCloudAiplatformV1ComputeTokensResponse final o = api.GoogleCloudAiplatformV1ComputeTokensResponse(); buildCounterGoogleCloudAiplatformV1ComputeTokensResponse++; if (buildCounterGoogleCloudAiplatformV1ComputeTokensResponse < 3) { - o.tokensInfo = buildUnnamed34(); + o.tokensInfo = buildUnnamed40(); } buildCounterGoogleCloudAiplatformV1ComputeTokensResponse--; return o; @@ -2544,7 +3041,7 @@ void checkGoogleCloudAiplatformV1ComputeTokensResponse( api.GoogleCloudAiplatformV1ComputeTokensResponse o) { buildCounterGoogleCloudAiplatformV1ComputeTokensResponse++; if (buildCounterGoogleCloudAiplatformV1ComputeTokensResponse < 3) { - checkUnnamed34(o.tokensInfo!); + checkUnnamed40(o.tokensInfo!); } buildCounterGoogleCloudAiplatformV1ComputeTokensResponse--; } @@ -2573,12 +3070,12 @@ void checkGoogleCloudAiplatformV1ContainerRegistryDestination( buildCounterGoogleCloudAiplatformV1ContainerRegistryDestination--; } -core.List buildUnnamed35() => [ +core.List buildUnnamed41() => [ 'foo', 'foo', ]; -void checkUnnamed35(core.List o) { +void checkUnnamed41(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2590,12 +3087,12 @@ void checkUnnamed35(core.List o) { ); } -core.List buildUnnamed36() => [ +core.List buildUnnamed42() => [ 'foo', 'foo', ]; -void checkUnnamed36(core.List o) { +void checkUnnamed42(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2607,12 +3104,12 @@ void checkUnnamed36(core.List o) { ); } -core.List buildUnnamed37() => [ +core.List buildUnnamed43() => [ buildGoogleCloudAiplatformV1EnvVar(), buildGoogleCloudAiplatformV1EnvVar(), ]; -void checkUnnamed37(core.List o) { +void checkUnnamed43(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1EnvVar(o[0]); checkGoogleCloudAiplatformV1EnvVar(o[1]); @@ -2624,9 +3121,9 @@ api.GoogleCloudAiplatformV1ContainerSpec final o = api.GoogleCloudAiplatformV1ContainerSpec(); buildCounterGoogleCloudAiplatformV1ContainerSpec++; if (buildCounterGoogleCloudAiplatformV1ContainerSpec < 3) { - o.args = buildUnnamed35(); - o.command = buildUnnamed36(); - o.env = buildUnnamed37(); + o.args = buildUnnamed41(); + o.command = buildUnnamed42(); + o.env = buildUnnamed43(); o.imageUri = 'foo'; } buildCounterGoogleCloudAiplatformV1ContainerSpec--; @@ -2637,9 +3134,9 @@ void checkGoogleCloudAiplatformV1ContainerSpec( api.GoogleCloudAiplatformV1ContainerSpec o) { buildCounterGoogleCloudAiplatformV1ContainerSpec++; if (buildCounterGoogleCloudAiplatformV1ContainerSpec < 3) { - checkUnnamed35(o.args!); - checkUnnamed36(o.command!); - checkUnnamed37(o.env!); + checkUnnamed41(o.args!); + checkUnnamed42(o.command!); + checkUnnamed43(o.env!); unittest.expect( o.imageUri!, unittest.equals('foo'), @@ -2648,12 +3145,12 @@ void checkGoogleCloudAiplatformV1ContainerSpec( buildCounterGoogleCloudAiplatformV1ContainerSpec--; } -core.List buildUnnamed38() => [ +core.List buildUnnamed44() => [ buildGoogleCloudAiplatformV1Part(), buildGoogleCloudAiplatformV1Part(), ]; -void checkUnnamed38(core.List o) { +void checkUnnamed44(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Part(o[0]); checkGoogleCloudAiplatformV1Part(o[1]); @@ -2664,7 +3161,7 @@ api.GoogleCloudAiplatformV1Content buildGoogleCloudAiplatformV1Content() { final o = api.GoogleCloudAiplatformV1Content(); buildCounterGoogleCloudAiplatformV1Content++; if (buildCounterGoogleCloudAiplatformV1Content < 3) { - o.parts = buildUnnamed38(); + o.parts = buildUnnamed44(); o.role = 'foo'; } buildCounterGoogleCloudAiplatformV1Content--; @@ -2674,7 +3171,7 @@ api.GoogleCloudAiplatformV1Content buildGoogleCloudAiplatformV1Content() { void checkGoogleCloudAiplatformV1Content(api.GoogleCloudAiplatformV1Content o) { buildCounterGoogleCloudAiplatformV1Content++; if (buildCounterGoogleCloudAiplatformV1Content < 3) { - checkUnnamed38(o.parts!); + checkUnnamed44(o.parts!); unittest.expect( o.role!, unittest.equals('foo'), @@ -2683,12 +3180,12 @@ void checkGoogleCloudAiplatformV1Content(api.GoogleCloudAiplatformV1Content o) { buildCounterGoogleCloudAiplatformV1Content--; } -core.Map buildUnnamed39() => { +core.Map buildUnnamed45() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed39(core.Map o) { +void checkUnnamed45(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2700,7 +3197,7 @@ void checkUnnamed39(core.Map o) { ); } -core.Map buildUnnamed40() => { +core.Map buildUnnamed46() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -2713,7 +3210,7 @@ core.Map buildUnnamed40() => { }, }; -void checkUnnamed40(core.Map o) { +void checkUnnamed46(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted10 = (o['x']!) as core.Map; unittest.expect(casted10, unittest.hasLength(3)); @@ -2745,12 +3242,12 @@ void checkUnnamed40(core.Map o) { ); } -core.List buildUnnamed41() => [ +core.List buildUnnamed47() => [ 'foo', 'foo', ]; -void checkUnnamed41(core.List o) { +void checkUnnamed47(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2771,10 +3268,10 @@ api.GoogleCloudAiplatformV1Context buildGoogleCloudAiplatformV1Context() { o.description = 'foo'; o.displayName = 'foo'; o.etag = 'foo'; - o.labels = buildUnnamed39(); - o.metadata = buildUnnamed40(); + o.labels = buildUnnamed45(); + o.metadata = buildUnnamed46(); o.name = 'foo'; - o.parentContexts = buildUnnamed41(); + o.parentContexts = buildUnnamed47(); o.schemaTitle = 'foo'; o.schemaVersion = 'foo'; o.updateTime = 'foo'; @@ -2802,13 +3299,13 @@ void checkGoogleCloudAiplatformV1Context(api.GoogleCloudAiplatformV1Context o) { o.etag!, unittest.equals('foo'), ); - checkUnnamed39(o.labels!); - checkUnnamed40(o.metadata!); + checkUnnamed45(o.labels!); + checkUnnamed46(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed41(o.parentContexts!); + checkUnnamed47(o.parentContexts!); unittest.expect( o.schemaTitle!, unittest.equals('foo'), @@ -2861,18 +3358,149 @@ void checkGoogleCloudAiplatformV1CopyModelRequest( buildCounterGoogleCloudAiplatformV1CopyModelRequest--; } -core.List buildUnnamed42() => [ +core.int buildCounterGoogleCloudAiplatformV1CorpusStatus = 0; +api.GoogleCloudAiplatformV1CorpusStatus + buildGoogleCloudAiplatformV1CorpusStatus() { + final o = api.GoogleCloudAiplatformV1CorpusStatus(); + buildCounterGoogleCloudAiplatformV1CorpusStatus++; + if (buildCounterGoogleCloudAiplatformV1CorpusStatus < 3) { + o.errorStatus = 'foo'; + o.state = 'foo'; + } + buildCounterGoogleCloudAiplatformV1CorpusStatus--; + return o; +} + +void checkGoogleCloudAiplatformV1CorpusStatus( + api.GoogleCloudAiplatformV1CorpusStatus o) { + buildCounterGoogleCloudAiplatformV1CorpusStatus++; + if (buildCounterGoogleCloudAiplatformV1CorpusStatus < 3) { + unittest.expect( + o.errorStatus!, + unittest.equals('foo'), + ); + unittest.expect( + o.state!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1CorpusStatus--; +} + +core.List buildUnnamed48() => [ + buildGoogleCloudAiplatformV1Fact(), + buildGoogleCloudAiplatformV1Fact(), + ]; + +void checkUnnamed48(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1Fact(o[0]); + checkGoogleCloudAiplatformV1Fact(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1CorroborateContentRequest = 0; +api.GoogleCloudAiplatformV1CorroborateContentRequest + buildGoogleCloudAiplatformV1CorroborateContentRequest() { + final o = api.GoogleCloudAiplatformV1CorroborateContentRequest(); + buildCounterGoogleCloudAiplatformV1CorroborateContentRequest++; + if (buildCounterGoogleCloudAiplatformV1CorroborateContentRequest < 3) { + o.content = buildGoogleCloudAiplatformV1Content(); + o.facts = buildUnnamed48(); + o.parameters = + buildGoogleCloudAiplatformV1CorroborateContentRequestParameters(); + } + buildCounterGoogleCloudAiplatformV1CorroborateContentRequest--; + return o; +} + +void checkGoogleCloudAiplatformV1CorroborateContentRequest( + api.GoogleCloudAiplatformV1CorroborateContentRequest o) { + buildCounterGoogleCloudAiplatformV1CorroborateContentRequest++; + if (buildCounterGoogleCloudAiplatformV1CorroborateContentRequest < 3) { + checkGoogleCloudAiplatformV1Content(o.content!); + checkUnnamed48(o.facts!); + checkGoogleCloudAiplatformV1CorroborateContentRequestParameters( + o.parameters!); + } + buildCounterGoogleCloudAiplatformV1CorroborateContentRequest--; +} + +core.int + buildCounterGoogleCloudAiplatformV1CorroborateContentRequestParameters = 0; +api.GoogleCloudAiplatformV1CorroborateContentRequestParameters + buildGoogleCloudAiplatformV1CorroborateContentRequestParameters() { + final o = api.GoogleCloudAiplatformV1CorroborateContentRequestParameters(); + buildCounterGoogleCloudAiplatformV1CorroborateContentRequestParameters++; + if (buildCounterGoogleCloudAiplatformV1CorroborateContentRequestParameters < + 3) { + o.citationThreshold = 42.0; + } + buildCounterGoogleCloudAiplatformV1CorroborateContentRequestParameters--; + return o; +} + +void checkGoogleCloudAiplatformV1CorroborateContentRequestParameters( + api.GoogleCloudAiplatformV1CorroborateContentRequestParameters o) { + buildCounterGoogleCloudAiplatformV1CorroborateContentRequestParameters++; + if (buildCounterGoogleCloudAiplatformV1CorroborateContentRequestParameters < + 3) { + unittest.expect( + o.citationThreshold!, + unittest.equals(42.0), + ); + } + buildCounterGoogleCloudAiplatformV1CorroborateContentRequestParameters--; +} + +core.List buildUnnamed49() => [ + buildGoogleCloudAiplatformV1Claim(), + buildGoogleCloudAiplatformV1Claim(), + ]; + +void checkUnnamed49(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1Claim(o[0]); + checkGoogleCloudAiplatformV1Claim(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1CorroborateContentResponse = 0; +api.GoogleCloudAiplatformV1CorroborateContentResponse + buildGoogleCloudAiplatformV1CorroborateContentResponse() { + final o = api.GoogleCloudAiplatformV1CorroborateContentResponse(); + buildCounterGoogleCloudAiplatformV1CorroborateContentResponse++; + if (buildCounterGoogleCloudAiplatformV1CorroborateContentResponse < 3) { + o.claims = buildUnnamed49(); + o.corroborationScore = 42.0; + } + buildCounterGoogleCloudAiplatformV1CorroborateContentResponse--; + return o; +} + +void checkGoogleCloudAiplatformV1CorroborateContentResponse( + api.GoogleCloudAiplatformV1CorroborateContentResponse o) { + buildCounterGoogleCloudAiplatformV1CorroborateContentResponse++; + if (buildCounterGoogleCloudAiplatformV1CorroborateContentResponse < 3) { + checkUnnamed49(o.claims!); + unittest.expect( + o.corroborationScore!, + unittest.equals(42.0), + ); + } + buildCounterGoogleCloudAiplatformV1CorroborateContentResponse--; +} + +core.List buildUnnamed50() => [ buildGoogleCloudAiplatformV1Content(), buildGoogleCloudAiplatformV1Content(), ]; -void checkUnnamed42(core.List o) { +void checkUnnamed50(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Content(o[0]); checkGoogleCloudAiplatformV1Content(o[1]); } -core.List buildUnnamed43() => [ +core.List buildUnnamed51() => [ { 'list': [1, 2, 3], 'bool': true, @@ -2885,7 +3513,7 @@ core.List buildUnnamed43() => [ }, ]; -void checkUnnamed43(core.List o) { +void checkUnnamed51(core.List o) { unittest.expect(o, unittest.hasLength(2)); var casted12 = (o[0]) as core.Map; unittest.expect(casted12, unittest.hasLength(3)); @@ -2917,12 +3545,12 @@ void checkUnnamed43(core.List o) { ); } -core.List buildUnnamed44() => [ +core.List buildUnnamed52() => [ buildGoogleCloudAiplatformV1Tool(), buildGoogleCloudAiplatformV1Tool(), ]; -void checkUnnamed44(core.List o) { +void checkUnnamed52(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Tool(o[0]); checkGoogleCloudAiplatformV1Tool(o[1]); @@ -2934,12 +3562,12 @@ api.GoogleCloudAiplatformV1CountTokensRequest final o = api.GoogleCloudAiplatformV1CountTokensRequest(); buildCounterGoogleCloudAiplatformV1CountTokensRequest++; if (buildCounterGoogleCloudAiplatformV1CountTokensRequest < 3) { - o.contents = buildUnnamed42(); + o.contents = buildUnnamed50(); o.generationConfig = buildGoogleCloudAiplatformV1GenerationConfig(); - o.instances = buildUnnamed43(); + o.instances = buildUnnamed51(); o.model = 'foo'; o.systemInstruction = buildGoogleCloudAiplatformV1Content(); - o.tools = buildUnnamed44(); + o.tools = buildUnnamed52(); } buildCounterGoogleCloudAiplatformV1CountTokensRequest--; return o; @@ -2949,15 +3577,15 @@ void checkGoogleCloudAiplatformV1CountTokensRequest( api.GoogleCloudAiplatformV1CountTokensRequest o) { buildCounterGoogleCloudAiplatformV1CountTokensRequest++; if (buildCounterGoogleCloudAiplatformV1CountTokensRequest < 3) { - checkUnnamed42(o.contents!); + checkUnnamed50(o.contents!); checkGoogleCloudAiplatformV1GenerationConfig(o.generationConfig!); - checkUnnamed43(o.instances!); + checkUnnamed51(o.instances!); unittest.expect( o.model!, unittest.equals('foo'), ); checkGoogleCloudAiplatformV1Content(o.systemInstruction!); - checkUnnamed44(o.tools!); + checkUnnamed52(o.tools!); } buildCounterGoogleCloudAiplatformV1CountTokensRequest--; } @@ -3225,12 +3853,12 @@ void checkGoogleCloudAiplatformV1CsvSource( buildCounterGoogleCloudAiplatformV1CsvSource--; } -core.Map buildUnnamed45() => { +core.Map buildUnnamed53() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed45(core.Map o) { +void checkUnnamed53(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3242,12 +3870,12 @@ void checkUnnamed45(core.Map o) { ); } -core.Map buildUnnamed46() => { +core.Map buildUnnamed54() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed46(core.Map o) { +void checkUnnamed54(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3270,14 +3898,14 @@ api.GoogleCloudAiplatformV1CustomJob buildGoogleCloudAiplatformV1CustomJob() { o.endTime = 'foo'; o.error = buildGoogleRpcStatus(); o.jobSpec = buildGoogleCloudAiplatformV1CustomJobSpec(); - o.labels = buildUnnamed45(); + o.labels = buildUnnamed53(); o.name = 'foo'; o.satisfiesPzi = true; o.satisfiesPzs = true; o.startTime = 'foo'; o.state = 'foo'; o.updateTime = 'foo'; - o.webAccessUris = buildUnnamed46(); + o.webAccessUris = buildUnnamed54(); } buildCounterGoogleCloudAiplatformV1CustomJob--; return o; @@ -3302,7 +3930,7 @@ void checkGoogleCloudAiplatformV1CustomJob( ); checkGoogleRpcStatus(o.error!); checkGoogleCloudAiplatformV1CustomJobSpec(o.jobSpec!); - checkUnnamed45(o.labels!); + checkUnnamed53(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -3321,17 +3949,17 @@ void checkGoogleCloudAiplatformV1CustomJob( o.updateTime!, unittest.equals('foo'), ); - checkUnnamed46(o.webAccessUris!); + checkUnnamed54(o.webAccessUris!); } buildCounterGoogleCloudAiplatformV1CustomJob--; } -core.List buildUnnamed47() => [ +core.List buildUnnamed55() => [ 'foo', 'foo', ]; -void checkUnnamed47(core.List o) { +void checkUnnamed55(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3343,12 +3971,12 @@ void checkUnnamed47(core.List o) { ); } -core.List buildUnnamed48() => [ +core.List buildUnnamed56() => [ 'foo', 'foo', ]; -void checkUnnamed48(core.List o) { +void checkUnnamed56(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3360,12 +3988,12 @@ void checkUnnamed48(core.List o) { ); } -core.List buildUnnamed49() => [ +core.List buildUnnamed57() => [ buildGoogleCloudAiplatformV1WorkerPoolSpec(), buildGoogleCloudAiplatformV1WorkerPoolSpec(), ]; -void checkUnnamed49(core.List o) { +void checkUnnamed57(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1WorkerPoolSpec(o[0]); checkGoogleCloudAiplatformV1WorkerPoolSpec(o[1]); @@ -3382,16 +4010,15 @@ api.GoogleCloudAiplatformV1CustomJobSpec o.enableWebAccess = true; o.experiment = 'foo'; o.experimentRun = 'foo'; - o.models = buildUnnamed47(); + o.models = buildUnnamed55(); o.network = 'foo'; o.persistentResourceId = 'foo'; o.protectedArtifactLocationId = 'foo'; - o.pscInterfaceConfig = buildGoogleCloudAiplatformV1PscInterfaceConfig(); - o.reservedIpRanges = buildUnnamed48(); + o.reservedIpRanges = buildUnnamed56(); o.scheduling = buildGoogleCloudAiplatformV1Scheduling(); o.serviceAccount = 'foo'; o.tensorboard = 'foo'; - o.workerPoolSpecs = buildUnnamed49(); + o.workerPoolSpecs = buildUnnamed57(); } buildCounterGoogleCloudAiplatformV1CustomJobSpec--; return o; @@ -3412,7 +4039,7 @@ void checkGoogleCloudAiplatformV1CustomJobSpec( o.experimentRun!, unittest.equals('foo'), ); - checkUnnamed47(o.models!); + checkUnnamed55(o.models!); unittest.expect( o.network!, unittest.equals('foo'), @@ -3425,8 +4052,7 @@ void checkGoogleCloudAiplatformV1CustomJobSpec( o.protectedArtifactLocationId!, unittest.equals('foo'), ); - checkGoogleCloudAiplatformV1PscInterfaceConfig(o.pscInterfaceConfig!); - checkUnnamed48(o.reservedIpRanges!); + checkUnnamed56(o.reservedIpRanges!); checkGoogleCloudAiplatformV1Scheduling(o.scheduling!); unittest.expect( o.serviceAccount!, @@ -3436,17 +4062,17 @@ void checkGoogleCloudAiplatformV1CustomJobSpec( o.tensorboard!, unittest.equals('foo'), ); - checkUnnamed49(o.workerPoolSpecs!); + checkUnnamed57(o.workerPoolSpecs!); } buildCounterGoogleCloudAiplatformV1CustomJobSpec--; } -core.Map buildUnnamed50() => { +core.Map buildUnnamed58() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed50(core.Map o) { +void checkUnnamed58(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3465,7 +4091,7 @@ api.GoogleCloudAiplatformV1DataItem buildGoogleCloudAiplatformV1DataItem() { if (buildCounterGoogleCloudAiplatformV1DataItem < 3) { o.createTime = 'foo'; o.etag = 'foo'; - o.labels = buildUnnamed50(); + o.labels = buildUnnamed58(); o.name = 'foo'; o.payload = { 'list': [1, 2, 3], @@ -3492,7 +4118,7 @@ void checkGoogleCloudAiplatformV1DataItem( o.etag!, unittest.equals('foo'), ); - checkUnnamed50(o.labels!); + checkUnnamed58(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -3521,12 +4147,12 @@ void checkGoogleCloudAiplatformV1DataItem( buildCounterGoogleCloudAiplatformV1DataItem--; } -core.List buildUnnamed51() => [ +core.List buildUnnamed59() => [ buildGoogleCloudAiplatformV1Annotation(), buildGoogleCloudAiplatformV1Annotation(), ]; -void checkUnnamed51(core.List o) { +void checkUnnamed59(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Annotation(o[0]); checkGoogleCloudAiplatformV1Annotation(o[1]); @@ -3538,7 +4164,7 @@ api.GoogleCloudAiplatformV1DataItemView final o = api.GoogleCloudAiplatformV1DataItemView(); buildCounterGoogleCloudAiplatformV1DataItemView++; if (buildCounterGoogleCloudAiplatformV1DataItemView < 3) { - o.annotations = buildUnnamed51(); + o.annotations = buildUnnamed59(); o.dataItem = buildGoogleCloudAiplatformV1DataItem(); o.hasTruncatedAnnotations = true; } @@ -3550,19 +4176,19 @@ void checkGoogleCloudAiplatformV1DataItemView( api.GoogleCloudAiplatformV1DataItemView o) { buildCounterGoogleCloudAiplatformV1DataItemView++; if (buildCounterGoogleCloudAiplatformV1DataItemView < 3) { - checkUnnamed51(o.annotations!); + checkUnnamed59(o.annotations!); checkGoogleCloudAiplatformV1DataItem(o.dataItem!); unittest.expect(o.hasTruncatedAnnotations!, unittest.isTrue); } buildCounterGoogleCloudAiplatformV1DataItemView--; } -core.Map buildUnnamed52() => { +core.Map buildUnnamed60() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed52(core.Map o) { +void checkUnnamed60(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3574,12 +4200,12 @@ void checkUnnamed52(core.Map o) { ); } -core.List buildUnnamed53() => [ +core.List buildUnnamed61() => [ 'foo', 'foo', ]; -void checkUnnamed53(core.List o) { +void checkUnnamed61(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3591,12 +4217,12 @@ void checkUnnamed53(core.List o) { ); } -core.Map buildUnnamed54() => { +core.Map buildUnnamed62() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed54(core.Map o) { +void checkUnnamed62(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3608,12 +4234,12 @@ void checkUnnamed54(core.Map o) { ); } -core.List buildUnnamed55() => [ +core.List buildUnnamed63() => [ 'foo', 'foo', ]; -void checkUnnamed55(core.List o) { +void checkUnnamed63(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3632,10 +4258,10 @@ api.GoogleCloudAiplatformV1DataLabelingJob buildCounterGoogleCloudAiplatformV1DataLabelingJob++; if (buildCounterGoogleCloudAiplatformV1DataLabelingJob < 3) { o.activeLearningConfig = buildGoogleCloudAiplatformV1ActiveLearningConfig(); - o.annotationLabels = buildUnnamed52(); + o.annotationLabels = buildUnnamed60(); o.createTime = 'foo'; o.currentSpend = buildGoogleTypeMoney(); - o.datasets = buildUnnamed53(); + o.datasets = buildUnnamed61(); o.displayName = 'foo'; o.encryptionSpec = buildGoogleCloudAiplatformV1EncryptionSpec(); o.error = buildGoogleRpcStatus(); @@ -3648,9 +4274,9 @@ api.GoogleCloudAiplatformV1DataLabelingJob o.instructionUri = 'foo'; o.labelerCount = 42; o.labelingProgress = 42; - o.labels = buildUnnamed54(); + o.labels = buildUnnamed62(); o.name = 'foo'; - o.specialistPools = buildUnnamed55(); + o.specialistPools = buildUnnamed63(); o.state = 'foo'; o.updateTime = 'foo'; } @@ -3663,13 +4289,13 @@ void checkGoogleCloudAiplatformV1DataLabelingJob( buildCounterGoogleCloudAiplatformV1DataLabelingJob++; if (buildCounterGoogleCloudAiplatformV1DataLabelingJob < 3) { checkGoogleCloudAiplatformV1ActiveLearningConfig(o.activeLearningConfig!); - checkUnnamed52(o.annotationLabels!); + checkUnnamed60(o.annotationLabels!); unittest.expect( o.createTime!, unittest.equals('foo'), ); checkGoogleTypeMoney(o.currentSpend!); - checkUnnamed53(o.datasets!); + checkUnnamed61(o.datasets!); unittest.expect( o.displayName!, unittest.equals('foo'), @@ -3706,12 +4332,12 @@ void checkGoogleCloudAiplatformV1DataLabelingJob( o.labelingProgress!, unittest.equals(42), ); - checkUnnamed54(o.labels!); + checkUnnamed62(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed55(o.specialistPools!); + checkUnnamed63(o.specialistPools!); unittest.expect( o.state!, unittest.equals('foo'), @@ -3724,12 +4350,12 @@ void checkGoogleCloudAiplatformV1DataLabelingJob( buildCounterGoogleCloudAiplatformV1DataLabelingJob--; } -core.Map buildUnnamed56() => { +core.Map buildUnnamed64() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed56(core.Map o) { +void checkUnnamed64(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3741,12 +4367,12 @@ void checkUnnamed56(core.Map o) { ); } -core.List buildUnnamed57() => [ +core.List buildUnnamed65() => [ buildGoogleCloudAiplatformV1SavedQuery(), buildGoogleCloudAiplatformV1SavedQuery(), ]; -void checkUnnamed57(core.List o) { +void checkUnnamed65(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1SavedQuery(o[0]); checkGoogleCloudAiplatformV1SavedQuery(o[1]); @@ -3763,7 +4389,7 @@ api.GoogleCloudAiplatformV1Dataset buildGoogleCloudAiplatformV1Dataset() { o.displayName = 'foo'; o.encryptionSpec = buildGoogleCloudAiplatformV1EncryptionSpec(); o.etag = 'foo'; - o.labels = buildUnnamed56(); + o.labels = buildUnnamed64(); o.metadata = { 'list': [1, 2, 3], 'bool': true, @@ -3775,7 +4401,7 @@ api.GoogleCloudAiplatformV1Dataset buildGoogleCloudAiplatformV1Dataset() { o.name = 'foo'; o.satisfiesPzi = true; o.satisfiesPzs = true; - o.savedQueries = buildUnnamed57(); + o.savedQueries = buildUnnamed65(); o.updateTime = 'foo'; } buildCounterGoogleCloudAiplatformV1Dataset--; @@ -3806,7 +4432,7 @@ void checkGoogleCloudAiplatformV1Dataset(api.GoogleCloudAiplatformV1Dataset o) { o.etag!, unittest.equals('foo'), ); - checkUnnamed56(o.labels!); + checkUnnamed64(o.labels!); var casted16 = (o.metadata!) as core.Map; unittest.expect(casted16, unittest.hasLength(3)); unittest.expect( @@ -3839,7 +4465,7 @@ void checkGoogleCloudAiplatformV1Dataset(api.GoogleCloudAiplatformV1Dataset o) { ); unittest.expect(o.satisfiesPzi!, unittest.isTrue); unittest.expect(o.satisfiesPzs!, unittest.isTrue); - checkUnnamed57(o.savedQueries!); + checkUnnamed65(o.savedQueries!); unittest.expect( o.updateTime!, unittest.equals('foo'), @@ -3925,13 +4551,13 @@ void checkGoogleCloudAiplatformV1DatasetVersion( buildCounterGoogleCloudAiplatformV1DatasetVersion--; } -core.List buildUnnamed58() => +core.List buildUnnamed66() => [ buildGoogleCloudAiplatformV1AutoscalingMetricSpec(), buildGoogleCloudAiplatformV1AutoscalingMetricSpec(), ]; -void checkUnnamed58( +void checkUnnamed66( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1AutoscalingMetricSpec(o[0]); @@ -3944,10 +4570,11 @@ api.GoogleCloudAiplatformV1DedicatedResources final o = api.GoogleCloudAiplatformV1DedicatedResources(); buildCounterGoogleCloudAiplatformV1DedicatedResources++; if (buildCounterGoogleCloudAiplatformV1DedicatedResources < 3) { - o.autoscalingMetricSpecs = buildUnnamed58(); + o.autoscalingMetricSpecs = buildUnnamed66(); o.machineSpec = buildGoogleCloudAiplatformV1MachineSpec(); o.maxReplicaCount = 42; o.minReplicaCount = 42; + o.requiredReplicaCount = 42; o.spot = true; } buildCounterGoogleCloudAiplatformV1DedicatedResources--; @@ -3958,7 +4585,7 @@ void checkGoogleCloudAiplatformV1DedicatedResources( api.GoogleCloudAiplatformV1DedicatedResources o) { buildCounterGoogleCloudAiplatformV1DedicatedResources++; if (buildCounterGoogleCloudAiplatformV1DedicatedResources < 3) { - checkUnnamed58(o.autoscalingMetricSpecs!); + checkUnnamed66(o.autoscalingMetricSpecs!); checkGoogleCloudAiplatformV1MachineSpec(o.machineSpec!); unittest.expect( o.maxReplicaCount!, @@ -3968,6 +4595,10 @@ void checkGoogleCloudAiplatformV1DedicatedResources( o.minReplicaCount!, unittest.equals(42), ); + unittest.expect( + o.requiredReplicaCount!, + unittest.equals(42), + ); unittest.expect(o.spot!, unittest.isTrue); } buildCounterGoogleCloudAiplatformV1DedicatedResources--; @@ -4077,12 +4708,12 @@ void checkGoogleCloudAiplatformV1DeployIndexRequest( buildCounterGoogleCloudAiplatformV1DeployIndexRequest--; } -core.Map buildUnnamed59() => { +core.Map buildUnnamed67() => { 'x': 42, 'y': 42, }; -void checkUnnamed59(core.Map o) { +void checkUnnamed67(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -4101,7 +4732,7 @@ api.GoogleCloudAiplatformV1DeployModelRequest buildCounterGoogleCloudAiplatformV1DeployModelRequest++; if (buildCounterGoogleCloudAiplatformV1DeployModelRequest < 3) { o.deployedModel = buildGoogleCloudAiplatformV1DeployedModel(); - o.trafficSplit = buildUnnamed59(); + o.trafficSplit = buildUnnamed67(); } buildCounterGoogleCloudAiplatformV1DeployModelRequest--; return o; @@ -4112,29 +4743,29 @@ void checkGoogleCloudAiplatformV1DeployModelRequest( buildCounterGoogleCloudAiplatformV1DeployModelRequest++; if (buildCounterGoogleCloudAiplatformV1DeployModelRequest < 3) { checkGoogleCloudAiplatformV1DeployedModel(o.deployedModel!); - checkUnnamed59(o.trafficSplit!); + checkUnnamed67(o.trafficSplit!); } buildCounterGoogleCloudAiplatformV1DeployModelRequest--; } -core.List buildUnnamed60() => [ +core.List buildUnnamed68() => [ buildGoogleCloudAiplatformV1PSCAutomationConfig(), buildGoogleCloudAiplatformV1PSCAutomationConfig(), ]; -void checkUnnamed60( +void checkUnnamed68( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1PSCAutomationConfig(o[0]); checkGoogleCloudAiplatformV1PSCAutomationConfig(o[1]); } -core.List buildUnnamed61() => [ +core.List buildUnnamed69() => [ 'foo', 'foo', ]; -void checkUnnamed61(core.List o) { +void checkUnnamed69(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4164,8 +4795,8 @@ api.GoogleCloudAiplatformV1DeployedIndex o.index = 'foo'; o.indexSyncTime = 'foo'; o.privateEndpoints = buildGoogleCloudAiplatformV1IndexPrivateEndpoints(); - o.pscAutomationConfigs = buildUnnamed60(); - o.reservedIpRanges = buildUnnamed61(); + o.pscAutomationConfigs = buildUnnamed68(); + o.reservedIpRanges = buildUnnamed69(); } buildCounterGoogleCloudAiplatformV1DeployedIndex--; return o; @@ -4205,8 +4836,8 @@ void checkGoogleCloudAiplatformV1DeployedIndex( unittest.equals('foo'), ); checkGoogleCloudAiplatformV1IndexPrivateEndpoints(o.privateEndpoints!); - checkUnnamed60(o.pscAutomationConfigs!); - checkUnnamed61(o.reservedIpRanges!); + checkUnnamed68(o.pscAutomationConfigs!); + checkUnnamed69(o.reservedIpRanges!); } buildCounterGoogleCloudAiplatformV1DeployedIndex--; } @@ -4234,12 +4865,12 @@ void checkGoogleCloudAiplatformV1DeployedIndexAuthConfig( buildCounterGoogleCloudAiplatformV1DeployedIndexAuthConfig--; } -core.List buildUnnamed62() => [ +core.List buildUnnamed70() => [ 'foo', 'foo', ]; -void checkUnnamed62(core.List o) { +void checkUnnamed70(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4251,12 +4882,12 @@ void checkUnnamed62(core.List o) { ); } -core.List buildUnnamed63() => [ +core.List buildUnnamed71() => [ 'foo', 'foo', ]; -void checkUnnamed63(core.List o) { +void checkUnnamed71(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4276,8 +4907,8 @@ api.GoogleCloudAiplatformV1DeployedIndexAuthConfigAuthProvider buildCounterGoogleCloudAiplatformV1DeployedIndexAuthConfigAuthProvider++; if (buildCounterGoogleCloudAiplatformV1DeployedIndexAuthConfigAuthProvider < 3) { - o.allowedIssuers = buildUnnamed62(); - o.audiences = buildUnnamed63(); + o.allowedIssuers = buildUnnamed70(); + o.audiences = buildUnnamed71(); } buildCounterGoogleCloudAiplatformV1DeployedIndexAuthConfigAuthProvider--; return o; @@ -4288,8 +4919,8 @@ void checkGoogleCloudAiplatformV1DeployedIndexAuthConfigAuthProvider( buildCounterGoogleCloudAiplatformV1DeployedIndexAuthConfigAuthProvider++; if (buildCounterGoogleCloudAiplatformV1DeployedIndexAuthConfigAuthProvider < 3) { - checkUnnamed62(o.allowedIssuers!); - checkUnnamed63(o.audiences!); + checkUnnamed70(o.allowedIssuers!); + checkUnnamed71(o.audiences!); } buildCounterGoogleCloudAiplatformV1DeployedIndexAuthConfigAuthProvider--; } @@ -4328,6 +4959,23 @@ void checkGoogleCloudAiplatformV1DeployedIndexRef( buildCounterGoogleCloudAiplatformV1DeployedIndexRef--; } +core.Map buildUnnamed72() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed72(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + core.int buildCounterGoogleCloudAiplatformV1DeployedModel = 0; api.GoogleCloudAiplatformV1DeployedModel buildGoogleCloudAiplatformV1DeployedModel() { @@ -4342,12 +4990,16 @@ api.GoogleCloudAiplatformV1DeployedModel o.displayName = 'foo'; o.enableAccessLogging = true; o.explanationSpec = buildGoogleCloudAiplatformV1ExplanationSpec(); + o.fasterDeploymentConfig = + buildGoogleCloudAiplatformV1FasterDeploymentConfig(); o.id = 'foo'; o.model = 'foo'; o.modelVersionId = 'foo'; o.privateEndpoints = buildGoogleCloudAiplatformV1PrivateEndpoints(); o.serviceAccount = 'foo'; o.sharedResources = 'foo'; + o.status = buildGoogleCloudAiplatformV1DeployedModelStatus(); + o.systemLabels = buildUnnamed72(); } buildCounterGoogleCloudAiplatformV1DeployedModel--; return o; @@ -4371,6 +5023,8 @@ void checkGoogleCloudAiplatformV1DeployedModel( ); unittest.expect(o.enableAccessLogging!, unittest.isTrue); checkGoogleCloudAiplatformV1ExplanationSpec(o.explanationSpec!); + checkGoogleCloudAiplatformV1FasterDeploymentConfig( + o.fasterDeploymentConfig!); unittest.expect( o.id!, unittest.equals('foo'), @@ -4392,6 +5046,8 @@ void checkGoogleCloudAiplatformV1DeployedModel( o.sharedResources!, unittest.equals('foo'), ); + checkGoogleCloudAiplatformV1DeployedModelStatus(o.status!); + checkUnnamed72(o.systemLabels!); } buildCounterGoogleCloudAiplatformV1DeployedModel--; } @@ -4425,6 +5081,40 @@ void checkGoogleCloudAiplatformV1DeployedModelRef( buildCounterGoogleCloudAiplatformV1DeployedModelRef--; } +core.int buildCounterGoogleCloudAiplatformV1DeployedModelStatus = 0; +api.GoogleCloudAiplatformV1DeployedModelStatus + buildGoogleCloudAiplatformV1DeployedModelStatus() { + final o = api.GoogleCloudAiplatformV1DeployedModelStatus(); + buildCounterGoogleCloudAiplatformV1DeployedModelStatus++; + if (buildCounterGoogleCloudAiplatformV1DeployedModelStatus < 3) { + o.availableReplicaCount = 42; + o.lastUpdateTime = 'foo'; + o.message = 'foo'; + } + buildCounterGoogleCloudAiplatformV1DeployedModelStatus--; + return o; +} + +void checkGoogleCloudAiplatformV1DeployedModelStatus( + api.GoogleCloudAiplatformV1DeployedModelStatus o) { + buildCounterGoogleCloudAiplatformV1DeployedModelStatus++; + if (buildCounterGoogleCloudAiplatformV1DeployedModelStatus < 3) { + unittest.expect( + o.availableReplicaCount!, + unittest.equals(42), + ); + unittest.expect( + o.lastUpdateTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.message!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1DeployedModelStatus--; +} + core.int buildCounterGoogleCloudAiplatformV1DeploymentResourcePool = 0; api.GoogleCloudAiplatformV1DeploymentResourcePool buildGoogleCloudAiplatformV1DeploymentResourcePool() { @@ -4498,12 +5188,12 @@ void checkGoogleCloudAiplatformV1DestinationFeatureSetting( buildCounterGoogleCloudAiplatformV1DestinationFeatureSetting--; } -core.List buildUnnamed64() => [ +core.List buildUnnamed73() => [ buildGoogleCloudAiplatformV1Tensor(), buildGoogleCloudAiplatformV1Tensor(), ]; -void checkUnnamed64(core.List o) { +void checkUnnamed73(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Tensor(o[0]); checkGoogleCloudAiplatformV1Tensor(o[1]); @@ -4515,7 +5205,7 @@ api.GoogleCloudAiplatformV1DirectPredictRequest final o = api.GoogleCloudAiplatformV1DirectPredictRequest(); buildCounterGoogleCloudAiplatformV1DirectPredictRequest++; if (buildCounterGoogleCloudAiplatformV1DirectPredictRequest < 3) { - o.inputs = buildUnnamed64(); + o.inputs = buildUnnamed73(); o.parameters = buildGoogleCloudAiplatformV1Tensor(); } buildCounterGoogleCloudAiplatformV1DirectPredictRequest--; @@ -4526,18 +5216,18 @@ void checkGoogleCloudAiplatformV1DirectPredictRequest( api.GoogleCloudAiplatformV1DirectPredictRequest o) { buildCounterGoogleCloudAiplatformV1DirectPredictRequest++; if (buildCounterGoogleCloudAiplatformV1DirectPredictRequest < 3) { - checkUnnamed64(o.inputs!); + checkUnnamed73(o.inputs!); checkGoogleCloudAiplatformV1Tensor(o.parameters!); } buildCounterGoogleCloudAiplatformV1DirectPredictRequest--; } -core.List buildUnnamed65() => [ +core.List buildUnnamed74() => [ buildGoogleCloudAiplatformV1Tensor(), buildGoogleCloudAiplatformV1Tensor(), ]; -void checkUnnamed65(core.List o) { +void checkUnnamed74(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Tensor(o[0]); checkGoogleCloudAiplatformV1Tensor(o[1]); @@ -4549,7 +5239,7 @@ api.GoogleCloudAiplatformV1DirectPredictResponse final o = api.GoogleCloudAiplatformV1DirectPredictResponse(); buildCounterGoogleCloudAiplatformV1DirectPredictResponse++; if (buildCounterGoogleCloudAiplatformV1DirectPredictResponse < 3) { - o.outputs = buildUnnamed65(); + o.outputs = buildUnnamed74(); o.parameters = buildGoogleCloudAiplatformV1Tensor(); } buildCounterGoogleCloudAiplatformV1DirectPredictResponse--; @@ -4560,7 +5250,7 @@ void checkGoogleCloudAiplatformV1DirectPredictResponse( api.GoogleCloudAiplatformV1DirectPredictResponse o) { buildCounterGoogleCloudAiplatformV1DirectPredictResponse++; if (buildCounterGoogleCloudAiplatformV1DirectPredictResponse < 3) { - checkUnnamed65(o.outputs!); + checkUnnamed74(o.outputs!); checkGoogleCloudAiplatformV1Tensor(o.parameters!); } buildCounterGoogleCloudAiplatformV1DirectPredictResponse--; @@ -4619,6 +5309,23 @@ void checkGoogleCloudAiplatformV1DirectRawPredictResponse( buildCounterGoogleCloudAiplatformV1DirectRawPredictResponse--; } +core.int buildCounterGoogleCloudAiplatformV1DirectUploadSource = 0; +api.GoogleCloudAiplatformV1DirectUploadSource + buildGoogleCloudAiplatformV1DirectUploadSource() { + final o = api.GoogleCloudAiplatformV1DirectUploadSource(); + buildCounterGoogleCloudAiplatformV1DirectUploadSource++; + if (buildCounterGoogleCloudAiplatformV1DirectUploadSource < 3) {} + buildCounterGoogleCloudAiplatformV1DirectUploadSource--; + return o; +} + +void checkGoogleCloudAiplatformV1DirectUploadSource( + api.GoogleCloudAiplatformV1DirectUploadSource o) { + buildCounterGoogleCloudAiplatformV1DirectUploadSource++; + if (buildCounterGoogleCloudAiplatformV1DirectUploadSource < 3) {} + buildCounterGoogleCloudAiplatformV1DirectUploadSource--; +} + core.int buildCounterGoogleCloudAiplatformV1DiskSpec = 0; api.GoogleCloudAiplatformV1DiskSpec buildGoogleCloudAiplatformV1DiskSpec() { final o = api.GoogleCloudAiplatformV1DiskSpec(); @@ -4647,12 +5354,12 @@ void checkGoogleCloudAiplatformV1DiskSpec( buildCounterGoogleCloudAiplatformV1DiskSpec--; } -core.List buildUnnamed66() => [ +core.List buildUnnamed75() => [ 42.0, 42.0, ]; -void checkUnnamed66(core.List o) { +void checkUnnamed75(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4670,7 +5377,7 @@ api.GoogleCloudAiplatformV1DoubleArray final o = api.GoogleCloudAiplatformV1DoubleArray(); buildCounterGoogleCloudAiplatformV1DoubleArray++; if (buildCounterGoogleCloudAiplatformV1DoubleArray < 3) { - o.values = buildUnnamed66(); + o.values = buildUnnamed75(); } buildCounterGoogleCloudAiplatformV1DoubleArray--; return o; @@ -4680,7 +5387,7 @@ void checkGoogleCloudAiplatformV1DoubleArray( api.GoogleCloudAiplatformV1DoubleArray o) { buildCounterGoogleCloudAiplatformV1DoubleArray++; if (buildCounterGoogleCloudAiplatformV1DoubleArray < 3) { - checkUnnamed66(o.values!); + checkUnnamed75(o.values!); } buildCounterGoogleCloudAiplatformV1DoubleArray--; } @@ -4738,23 +5445,23 @@ void checkGoogleCloudAiplatformV1EncryptionSpec( buildCounterGoogleCloudAiplatformV1EncryptionSpec--; } -core.List buildUnnamed67() => [ +core.List buildUnnamed76() => [ buildGoogleCloudAiplatformV1DeployedModel(), buildGoogleCloudAiplatformV1DeployedModel(), ]; -void checkUnnamed67(core.List o) { +void checkUnnamed76(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1DeployedModel(o[0]); checkGoogleCloudAiplatformV1DeployedModel(o[1]); } -core.Map buildUnnamed68() => { +core.Map buildUnnamed77() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed68(core.Map o) { +void checkUnnamed77(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -4766,12 +5473,12 @@ void checkUnnamed68(core.Map o) { ); } -core.Map buildUnnamed69() => { +core.Map buildUnnamed78() => { 'x': 42, 'y': 42, }; -void checkUnnamed69(core.Map o) { +void checkUnnamed78(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -4788,16 +5495,18 @@ api.GoogleCloudAiplatformV1Endpoint buildGoogleCloudAiplatformV1Endpoint() { final o = api.GoogleCloudAiplatformV1Endpoint(); buildCounterGoogleCloudAiplatformV1Endpoint++; if (buildCounterGoogleCloudAiplatformV1Endpoint < 3) { + o.clientConnectionConfig = + buildGoogleCloudAiplatformV1ClientConnectionConfig(); o.createTime = 'foo'; o.dedicatedEndpointDns = 'foo'; o.dedicatedEndpointEnabled = true; - o.deployedModels = buildUnnamed67(); + o.deployedModels = buildUnnamed76(); o.description = 'foo'; o.displayName = 'foo'; o.enablePrivateServiceConnect = true; o.encryptionSpec = buildGoogleCloudAiplatformV1EncryptionSpec(); o.etag = 'foo'; - o.labels = buildUnnamed68(); + o.labels = buildUnnamed77(); o.modelDeploymentMonitoringJob = 'foo'; o.name = 'foo'; o.network = 'foo'; @@ -4807,7 +5516,7 @@ api.GoogleCloudAiplatformV1Endpoint buildGoogleCloudAiplatformV1Endpoint() { buildGoogleCloudAiplatformV1PrivateServiceConnectConfig(); o.satisfiesPzi = true; o.satisfiesPzs = true; - o.trafficSplit = buildUnnamed69(); + o.trafficSplit = buildUnnamed78(); o.updateTime = 'foo'; } buildCounterGoogleCloudAiplatformV1Endpoint--; @@ -4818,6 +5527,8 @@ void checkGoogleCloudAiplatformV1Endpoint( api.GoogleCloudAiplatformV1Endpoint o) { buildCounterGoogleCloudAiplatformV1Endpoint++; if (buildCounterGoogleCloudAiplatformV1Endpoint < 3) { + checkGoogleCloudAiplatformV1ClientConnectionConfig( + o.clientConnectionConfig!); unittest.expect( o.createTime!, unittest.equals('foo'), @@ -4827,7 +5538,7 @@ void checkGoogleCloudAiplatformV1Endpoint( unittest.equals('foo'), ); unittest.expect(o.dedicatedEndpointEnabled!, unittest.isTrue); - checkUnnamed67(o.deployedModels!); + checkUnnamed76(o.deployedModels!); unittest.expect( o.description!, unittest.equals('foo'), @@ -4842,7 +5553,7 @@ void checkGoogleCloudAiplatformV1Endpoint( o.etag!, unittest.equals('foo'), ); - checkUnnamed68(o.labels!); + checkUnnamed77(o.labels!); unittest.expect( o.modelDeploymentMonitoringJob!, unittest.equals('foo'), @@ -4861,7 +5572,7 @@ void checkGoogleCloudAiplatformV1Endpoint( o.privateServiceConnectConfig!); unittest.expect(o.satisfiesPzi!, unittest.isTrue); unittest.expect(o.satisfiesPzs!, unittest.isTrue); - checkUnnamed69(o.trafficSplit!); + checkUnnamed78(o.trafficSplit!); unittest.expect( o.updateTime!, unittest.equals('foo'), @@ -4896,12 +5607,12 @@ void checkGoogleCloudAiplatformV1EntityIdSelector( buildCounterGoogleCloudAiplatformV1EntityIdSelector--; } -core.Map buildUnnamed70() => { +core.Map buildUnnamed79() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed70(core.Map o) { +void checkUnnamed79(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -4921,7 +5632,7 @@ api.GoogleCloudAiplatformV1EntityType buildGoogleCloudAiplatformV1EntityType() { o.createTime = 'foo'; o.description = 'foo'; o.etag = 'foo'; - o.labels = buildUnnamed70(); + o.labels = buildUnnamed79(); o.monitoringConfig = buildGoogleCloudAiplatformV1FeaturestoreMonitoringConfig(); o.name = 'foo'; @@ -4950,7 +5661,7 @@ void checkGoogleCloudAiplatformV1EntityType( o.etag!, unittest.equals('foo'), ); - checkUnnamed70(o.labels!); + checkUnnamed79(o.labels!); checkGoogleCloudAiplatformV1FeaturestoreMonitoringConfig( o.monitoringConfig!); unittest.expect( @@ -4999,12 +5710,12 @@ void checkGoogleCloudAiplatformV1EnvVar(api.GoogleCloudAiplatformV1EnvVar o) { } core.List - buildUnnamed71() => [ + buildUnnamed80() => [ buildGoogleCloudAiplatformV1ErrorAnalysisAnnotationAttributedItem(), buildGoogleCloudAiplatformV1ErrorAnalysisAnnotationAttributedItem(), ]; -void checkUnnamed71( +void checkUnnamed80( core.List o) { unittest.expect(o, unittest.hasLength(2)); @@ -5018,7 +5729,7 @@ api.GoogleCloudAiplatformV1ErrorAnalysisAnnotation final o = api.GoogleCloudAiplatformV1ErrorAnalysisAnnotation(); buildCounterGoogleCloudAiplatformV1ErrorAnalysisAnnotation++; if (buildCounterGoogleCloudAiplatformV1ErrorAnalysisAnnotation < 3) { - o.attributedItems = buildUnnamed71(); + o.attributedItems = buildUnnamed80(); o.outlierScore = 42.0; o.outlierThreshold = 42.0; o.queryType = 'foo'; @@ -5031,7 +5742,7 @@ void checkGoogleCloudAiplatformV1ErrorAnalysisAnnotation( api.GoogleCloudAiplatformV1ErrorAnalysisAnnotation o) { buildCounterGoogleCloudAiplatformV1ErrorAnalysisAnnotation++; if (buildCounterGoogleCloudAiplatformV1ErrorAnalysisAnnotation < 3) { - checkUnnamed71(o.attributedItems!); + checkUnnamed80(o.attributedItems!); unittest.expect( o.outlierScore!, unittest.equals(42.0), @@ -5089,10 +5800,12 @@ api.GoogleCloudAiplatformV1EvaluateInstancesRequest if (buildCounterGoogleCloudAiplatformV1EvaluateInstancesRequest < 3) { o.bleuInput = buildGoogleCloudAiplatformV1BleuInput(); o.coherenceInput = buildGoogleCloudAiplatformV1CoherenceInput(); + o.cometInput = buildGoogleCloudAiplatformV1CometInput(); o.exactMatchInput = buildGoogleCloudAiplatformV1ExactMatchInput(); o.fluencyInput = buildGoogleCloudAiplatformV1FluencyInput(); o.fulfillmentInput = buildGoogleCloudAiplatformV1FulfillmentInput(); o.groundednessInput = buildGoogleCloudAiplatformV1GroundednessInput(); + o.metricxInput = buildGoogleCloudAiplatformV1MetricxInput(); o.pairwiseMetricInput = buildGoogleCloudAiplatformV1PairwiseMetricInput(); o.pairwiseQuestionAnsweringQualityInput = buildGoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityInput(); @@ -5121,6 +5834,18 @@ api.GoogleCloudAiplatformV1EvaluateInstancesRequest buildGoogleCloudAiplatformV1ToolParameterKeyMatchInput(); o.toolParameterKvMatchInput = buildGoogleCloudAiplatformV1ToolParameterKVMatchInput(); + o.trajectoryAnyOrderMatchInput = + buildGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput(); + o.trajectoryExactMatchInput = + buildGoogleCloudAiplatformV1TrajectoryExactMatchInput(); + o.trajectoryInOrderMatchInput = + buildGoogleCloudAiplatformV1TrajectoryInOrderMatchInput(); + o.trajectoryPrecisionInput = + buildGoogleCloudAiplatformV1TrajectoryPrecisionInput(); + o.trajectoryRecallInput = + buildGoogleCloudAiplatformV1TrajectoryRecallInput(); + o.trajectorySingleToolUseInput = + buildGoogleCloudAiplatformV1TrajectorySingleToolUseInput(); } buildCounterGoogleCloudAiplatformV1EvaluateInstancesRequest--; return o; @@ -5132,10 +5857,12 @@ void checkGoogleCloudAiplatformV1EvaluateInstancesRequest( if (buildCounterGoogleCloudAiplatformV1EvaluateInstancesRequest < 3) { checkGoogleCloudAiplatformV1BleuInput(o.bleuInput!); checkGoogleCloudAiplatformV1CoherenceInput(o.coherenceInput!); + checkGoogleCloudAiplatformV1CometInput(o.cometInput!); checkGoogleCloudAiplatformV1ExactMatchInput(o.exactMatchInput!); checkGoogleCloudAiplatformV1FluencyInput(o.fluencyInput!); checkGoogleCloudAiplatformV1FulfillmentInput(o.fulfillmentInput!); checkGoogleCloudAiplatformV1GroundednessInput(o.groundednessInput!); + checkGoogleCloudAiplatformV1MetricxInput(o.metricxInput!); checkGoogleCloudAiplatformV1PairwiseMetricInput(o.pairwiseMetricInput!); checkGoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityInput( o.pairwiseQuestionAnsweringQualityInput!); @@ -5164,6 +5891,17 @@ void checkGoogleCloudAiplatformV1EvaluateInstancesRequest( o.toolParameterKeyMatchInput!); checkGoogleCloudAiplatformV1ToolParameterKVMatchInput( o.toolParameterKvMatchInput!); + checkGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput( + o.trajectoryAnyOrderMatchInput!); + checkGoogleCloudAiplatformV1TrajectoryExactMatchInput( + o.trajectoryExactMatchInput!); + checkGoogleCloudAiplatformV1TrajectoryInOrderMatchInput( + o.trajectoryInOrderMatchInput!); + checkGoogleCloudAiplatformV1TrajectoryPrecisionInput( + o.trajectoryPrecisionInput!); + checkGoogleCloudAiplatformV1TrajectoryRecallInput(o.trajectoryRecallInput!); + checkGoogleCloudAiplatformV1TrajectorySingleToolUseInput( + o.trajectorySingleToolUseInput!); } buildCounterGoogleCloudAiplatformV1EvaluateInstancesRequest--; } @@ -5176,10 +5914,12 @@ api.GoogleCloudAiplatformV1EvaluateInstancesResponse if (buildCounterGoogleCloudAiplatformV1EvaluateInstancesResponse < 3) { o.bleuResults = buildGoogleCloudAiplatformV1BleuResults(); o.coherenceResult = buildGoogleCloudAiplatformV1CoherenceResult(); + o.cometResult = buildGoogleCloudAiplatformV1CometResult(); o.exactMatchResults = buildGoogleCloudAiplatformV1ExactMatchResults(); o.fluencyResult = buildGoogleCloudAiplatformV1FluencyResult(); o.fulfillmentResult = buildGoogleCloudAiplatformV1FulfillmentResult(); o.groundednessResult = buildGoogleCloudAiplatformV1GroundednessResult(); + o.metricxResult = buildGoogleCloudAiplatformV1MetricxResult(); o.pairwiseMetricResult = buildGoogleCloudAiplatformV1PairwiseMetricResult(); o.pairwiseQuestionAnsweringQualityResult = buildGoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityResult(); @@ -5209,6 +5949,18 @@ api.GoogleCloudAiplatformV1EvaluateInstancesResponse buildGoogleCloudAiplatformV1ToolParameterKeyMatchResults(); o.toolParameterKvMatchResults = buildGoogleCloudAiplatformV1ToolParameterKVMatchResults(); + o.trajectoryAnyOrderMatchResults = + buildGoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults(); + o.trajectoryExactMatchResults = + buildGoogleCloudAiplatformV1TrajectoryExactMatchResults(); + o.trajectoryInOrderMatchResults = + buildGoogleCloudAiplatformV1TrajectoryInOrderMatchResults(); + o.trajectoryPrecisionResults = + buildGoogleCloudAiplatformV1TrajectoryPrecisionResults(); + o.trajectoryRecallResults = + buildGoogleCloudAiplatformV1TrajectoryRecallResults(); + o.trajectorySingleToolUseResults = + buildGoogleCloudAiplatformV1TrajectorySingleToolUseResults(); } buildCounterGoogleCloudAiplatformV1EvaluateInstancesResponse--; return o; @@ -5220,10 +5972,12 @@ void checkGoogleCloudAiplatformV1EvaluateInstancesResponse( if (buildCounterGoogleCloudAiplatformV1EvaluateInstancesResponse < 3) { checkGoogleCloudAiplatformV1BleuResults(o.bleuResults!); checkGoogleCloudAiplatformV1CoherenceResult(o.coherenceResult!); + checkGoogleCloudAiplatformV1CometResult(o.cometResult!); checkGoogleCloudAiplatformV1ExactMatchResults(o.exactMatchResults!); checkGoogleCloudAiplatformV1FluencyResult(o.fluencyResult!); checkGoogleCloudAiplatformV1FulfillmentResult(o.fulfillmentResult!); checkGoogleCloudAiplatformV1GroundednessResult(o.groundednessResult!); + checkGoogleCloudAiplatformV1MetricxResult(o.metricxResult!); checkGoogleCloudAiplatformV1PairwiseMetricResult(o.pairwiseMetricResult!); checkGoogleCloudAiplatformV1PairwiseQuestionAnsweringQualityResult( o.pairwiseQuestionAnsweringQualityResult!); @@ -5252,17 +6006,29 @@ void checkGoogleCloudAiplatformV1EvaluateInstancesResponse( o.toolParameterKeyMatchResults!); checkGoogleCloudAiplatformV1ToolParameterKVMatchResults( o.toolParameterKvMatchResults!); + checkGoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults( + o.trajectoryAnyOrderMatchResults!); + checkGoogleCloudAiplatformV1TrajectoryExactMatchResults( + o.trajectoryExactMatchResults!); + checkGoogleCloudAiplatformV1TrajectoryInOrderMatchResults( + o.trajectoryInOrderMatchResults!); + checkGoogleCloudAiplatformV1TrajectoryPrecisionResults( + o.trajectoryPrecisionResults!); + checkGoogleCloudAiplatformV1TrajectoryRecallResults( + o.trajectoryRecallResults!); + checkGoogleCloudAiplatformV1TrajectorySingleToolUseResults( + o.trajectorySingleToolUseResults!); } buildCounterGoogleCloudAiplatformV1EvaluateInstancesResponse--; } core.List - buildUnnamed72() => [ + buildUnnamed81() => [ buildGoogleCloudAiplatformV1ErrorAnalysisAnnotation(), buildGoogleCloudAiplatformV1ErrorAnalysisAnnotation(), ]; -void checkUnnamed72( +void checkUnnamed81( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1ErrorAnalysisAnnotation(o[0]); @@ -5270,19 +6036,19 @@ void checkUnnamed72( } core.List - buildUnnamed73() => [ + buildUnnamed82() => [ buildGoogleCloudAiplatformV1EvaluatedAnnotationExplanation(), buildGoogleCloudAiplatformV1EvaluatedAnnotationExplanation(), ]; -void checkUnnamed73( +void checkUnnamed82( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1EvaluatedAnnotationExplanation(o[0]); checkGoogleCloudAiplatformV1EvaluatedAnnotationExplanation(o[1]); } -core.List buildUnnamed74() => [ +core.List buildUnnamed83() => [ { 'list': [1, 2, 3], 'bool': true, @@ -5295,7 +6061,7 @@ core.List buildUnnamed74() => [ }, ]; -void checkUnnamed74(core.List o) { +void checkUnnamed83(core.List o) { unittest.expect(o, unittest.hasLength(2)); var casted18 = (o[0]) as core.Map; unittest.expect(casted18, unittest.hasLength(3)); @@ -5327,7 +6093,7 @@ void checkUnnamed74(core.List o) { ); } -core.List buildUnnamed75() => [ +core.List buildUnnamed84() => [ { 'list': [1, 2, 3], 'bool': true, @@ -5340,7 +6106,7 @@ core.List buildUnnamed75() => [ }, ]; -void checkUnnamed75(core.List o) { +void checkUnnamed84(core.List o) { unittest.expect(o, unittest.hasLength(2)); var casted20 = (o[0]) as core.Map; unittest.expect(casted20, unittest.hasLength(3)); @@ -5383,11 +6149,11 @@ api.GoogleCloudAiplatformV1EvaluatedAnnotation 'bool': true, 'string': 'foo' }; - o.errorAnalysisAnnotations = buildUnnamed72(); + o.errorAnalysisAnnotations = buildUnnamed81(); o.evaluatedDataItemViewId = 'foo'; - o.explanations = buildUnnamed73(); - o.groundTruths = buildUnnamed74(); - o.predictions = buildUnnamed75(); + o.explanations = buildUnnamed82(); + o.groundTruths = buildUnnamed83(); + o.predictions = buildUnnamed84(); o.type = 'foo'; } buildCounterGoogleCloudAiplatformV1EvaluatedAnnotation--; @@ -5412,14 +6178,14 @@ void checkGoogleCloudAiplatformV1EvaluatedAnnotation( casted22['string'], unittest.equals('foo'), ); - checkUnnamed72(o.errorAnalysisAnnotations!); + checkUnnamed81(o.errorAnalysisAnnotations!); unittest.expect( o.evaluatedDataItemViewId!, unittest.equals('foo'), ); - checkUnnamed73(o.explanations!); - checkUnnamed74(o.groundTruths!); - checkUnnamed75(o.predictions!); + checkUnnamed82(o.explanations!); + checkUnnamed83(o.groundTruths!); + checkUnnamed84(o.predictions!); unittest.expect( o.type!, unittest.equals('foo'), @@ -5454,12 +6220,12 @@ void checkGoogleCloudAiplatformV1EvaluatedAnnotationExplanation( buildCounterGoogleCloudAiplatformV1EvaluatedAnnotationExplanation--; } -core.Map buildUnnamed76() => { +core.Map buildUnnamed85() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed76(core.Map o) { +void checkUnnamed85(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -5479,7 +6245,7 @@ api.GoogleCloudAiplatformV1Event buildGoogleCloudAiplatformV1Event() { o.artifact = 'foo'; o.eventTime = 'foo'; o.execution = 'foo'; - o.labels = buildUnnamed76(); + o.labels = buildUnnamed85(); o.type = 'foo'; } buildCounterGoogleCloudAiplatformV1Event--; @@ -5501,7 +6267,7 @@ void checkGoogleCloudAiplatformV1Event(api.GoogleCloudAiplatformV1Event o) { o.execution!, unittest.equals('foo'), ); - checkUnnamed76(o.labels!); + checkUnnamed85(o.labels!); unittest.expect( o.type!, unittest.equals('foo'), @@ -5510,12 +6276,12 @@ void checkGoogleCloudAiplatformV1Event(api.GoogleCloudAiplatformV1Event o) { buildCounterGoogleCloudAiplatformV1Event--; } -core.List buildUnnamed77() => [ +core.List buildUnnamed86() => [ buildGoogleCloudAiplatformV1ExactMatchInstance(), buildGoogleCloudAiplatformV1ExactMatchInstance(), ]; -void checkUnnamed77( +void checkUnnamed86( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1ExactMatchInstance(o[0]); @@ -5528,7 +6294,7 @@ api.GoogleCloudAiplatformV1ExactMatchInput final o = api.GoogleCloudAiplatformV1ExactMatchInput(); buildCounterGoogleCloudAiplatformV1ExactMatchInput++; if (buildCounterGoogleCloudAiplatformV1ExactMatchInput < 3) { - o.instances = buildUnnamed77(); + o.instances = buildUnnamed86(); o.metricSpec = buildGoogleCloudAiplatformV1ExactMatchSpec(); } buildCounterGoogleCloudAiplatformV1ExactMatchInput--; @@ -5539,7 +6305,7 @@ void checkGoogleCloudAiplatformV1ExactMatchInput( api.GoogleCloudAiplatformV1ExactMatchInput o) { buildCounterGoogleCloudAiplatformV1ExactMatchInput++; if (buildCounterGoogleCloudAiplatformV1ExactMatchInput < 3) { - checkUnnamed77(o.instances!); + checkUnnamed86(o.instances!); checkGoogleCloudAiplatformV1ExactMatchSpec(o.metricSpec!); } buildCounterGoogleCloudAiplatformV1ExactMatchInput--; @@ -5598,13 +6364,13 @@ void checkGoogleCloudAiplatformV1ExactMatchMetricValue( buildCounterGoogleCloudAiplatformV1ExactMatchMetricValue--; } -core.List buildUnnamed78() => +core.List buildUnnamed87() => [ buildGoogleCloudAiplatformV1ExactMatchMetricValue(), buildGoogleCloudAiplatformV1ExactMatchMetricValue(), ]; -void checkUnnamed78( +void checkUnnamed87( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1ExactMatchMetricValue(o[0]); @@ -5617,7 +6383,7 @@ api.GoogleCloudAiplatformV1ExactMatchResults final o = api.GoogleCloudAiplatformV1ExactMatchResults(); buildCounterGoogleCloudAiplatformV1ExactMatchResults++; if (buildCounterGoogleCloudAiplatformV1ExactMatchResults < 3) { - o.exactMatchMetricValues = buildUnnamed78(); + o.exactMatchMetricValues = buildUnnamed87(); } buildCounterGoogleCloudAiplatformV1ExactMatchResults--; return o; @@ -5627,7 +6393,7 @@ void checkGoogleCloudAiplatformV1ExactMatchResults( api.GoogleCloudAiplatformV1ExactMatchResults o) { buildCounterGoogleCloudAiplatformV1ExactMatchResults++; if (buildCounterGoogleCloudAiplatformV1ExactMatchResults < 3) { - checkUnnamed78(o.exactMatchMetricValues!); + checkUnnamed87(o.exactMatchMetricValues!); } buildCounterGoogleCloudAiplatformV1ExactMatchResults--; } @@ -5722,12 +6488,12 @@ void checkGoogleCloudAiplatformV1ExamplesExampleGcsSource( } core.List - buildUnnamed79() => [ + buildUnnamed88() => [ buildGoogleCloudAiplatformV1ExamplesRestrictionsNamespace(), buildGoogleCloudAiplatformV1ExamplesRestrictionsNamespace(), ]; -void checkUnnamed79( +void checkUnnamed88( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1ExamplesRestrictionsNamespace(o[0]); @@ -5743,7 +6509,7 @@ api.GoogleCloudAiplatformV1ExamplesOverride o.crowdingCount = 42; o.dataFormat = 'foo'; o.neighborCount = 42; - o.restrictions = buildUnnamed79(); + o.restrictions = buildUnnamed88(); o.returnEmbeddings = true; } buildCounterGoogleCloudAiplatformV1ExamplesOverride--; @@ -5766,18 +6532,18 @@ void checkGoogleCloudAiplatformV1ExamplesOverride( o.neighborCount!, unittest.equals(42), ); - checkUnnamed79(o.restrictions!); + checkUnnamed88(o.restrictions!); unittest.expect(o.returnEmbeddings!, unittest.isTrue); } buildCounterGoogleCloudAiplatformV1ExamplesOverride--; } -core.List buildUnnamed80() => [ +core.List buildUnnamed89() => [ 'foo', 'foo', ]; -void checkUnnamed80(core.List o) { +void checkUnnamed89(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5789,12 +6555,12 @@ void checkUnnamed80(core.List o) { ); } -core.List buildUnnamed81() => [ +core.List buildUnnamed90() => [ 'foo', 'foo', ]; -void checkUnnamed81(core.List o) { +void checkUnnamed90(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5812,8 +6578,8 @@ api.GoogleCloudAiplatformV1ExamplesRestrictionsNamespace final o = api.GoogleCloudAiplatformV1ExamplesRestrictionsNamespace(); buildCounterGoogleCloudAiplatformV1ExamplesRestrictionsNamespace++; if (buildCounterGoogleCloudAiplatformV1ExamplesRestrictionsNamespace < 3) { - o.allow = buildUnnamed80(); - o.deny = buildUnnamed81(); + o.allow = buildUnnamed89(); + o.deny = buildUnnamed90(); o.namespaceName = 'foo'; } buildCounterGoogleCloudAiplatformV1ExamplesRestrictionsNamespace--; @@ -5824,8 +6590,8 @@ void checkGoogleCloudAiplatformV1ExamplesRestrictionsNamespace( api.GoogleCloudAiplatformV1ExamplesRestrictionsNamespace o) { buildCounterGoogleCloudAiplatformV1ExamplesRestrictionsNamespace++; if (buildCounterGoogleCloudAiplatformV1ExamplesRestrictionsNamespace < 3) { - checkUnnamed80(o.allow!); - checkUnnamed81(o.deny!); + checkUnnamed89(o.allow!); + checkUnnamed90(o.deny!); unittest.expect( o.namespaceName!, unittest.equals('foo'), @@ -5834,12 +6600,12 @@ void checkGoogleCloudAiplatformV1ExamplesRestrictionsNamespace( buildCounterGoogleCloudAiplatformV1ExamplesRestrictionsNamespace--; } -core.Map buildUnnamed82() => { +core.Map buildUnnamed91() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed82(core.Map o) { +void checkUnnamed91(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -5851,7 +6617,7 @@ void checkUnnamed82(core.Map o) { ); } -core.Map buildUnnamed83() => { +core.Map buildUnnamed92() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -5864,7 +6630,7 @@ core.Map buildUnnamed83() => { }, }; -void checkUnnamed83(core.Map o) { +void checkUnnamed92(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted24 = (o['x']!) as core.Map; unittest.expect(casted24, unittest.hasLength(3)); @@ -5905,8 +6671,8 @@ api.GoogleCloudAiplatformV1Execution buildGoogleCloudAiplatformV1Execution() { o.description = 'foo'; o.displayName = 'foo'; o.etag = 'foo'; - o.labels = buildUnnamed82(); - o.metadata = buildUnnamed83(); + o.labels = buildUnnamed91(); + o.metadata = buildUnnamed92(); o.name = 'foo'; o.schemaTitle = 'foo'; o.schemaVersion = 'foo'; @@ -5937,8 +6703,8 @@ void checkGoogleCloudAiplatformV1Execution( o.etag!, unittest.equals('foo'), ); - checkUnnamed82(o.labels!); - checkUnnamed83(o.metadata!); + checkUnnamed91(o.labels!); + checkUnnamed92(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), @@ -5963,7 +6729,7 @@ void checkGoogleCloudAiplatformV1Execution( buildCounterGoogleCloudAiplatformV1Execution--; } -core.List buildUnnamed84() => [ +core.List buildUnnamed93() => [ { 'list': [1, 2, 3], 'bool': true, @@ -5976,7 +6742,7 @@ core.List buildUnnamed84() => [ }, ]; -void checkUnnamed84(core.List o) { +void checkUnnamed93(core.List o) { unittest.expect(o, unittest.hasLength(2)); var casted26 = (o[0]) as core.Map; unittest.expect(casted26, unittest.hasLength(3)); @@ -6017,7 +6783,7 @@ api.GoogleCloudAiplatformV1ExplainRequest o.deployedModelId = 'foo'; o.explanationSpecOverride = buildGoogleCloudAiplatformV1ExplanationSpecOverride(); - o.instances = buildUnnamed84(); + o.instances = buildUnnamed93(); o.parameters = { 'list': [1, 2, 3], 'bool': true, @@ -6038,7 +6804,7 @@ void checkGoogleCloudAiplatformV1ExplainRequest( ); checkGoogleCloudAiplatformV1ExplanationSpecOverride( o.explanationSpecOverride!); - checkUnnamed84(o.instances!); + checkUnnamed93(o.instances!); var casted28 = (o.parameters!) as core.Map; unittest.expect(casted28, unittest.hasLength(3)); unittest.expect( @@ -6057,18 +6823,18 @@ void checkGoogleCloudAiplatformV1ExplainRequest( buildCounterGoogleCloudAiplatformV1ExplainRequest--; } -core.List buildUnnamed85() => [ +core.List buildUnnamed94() => [ buildGoogleCloudAiplatformV1Explanation(), buildGoogleCloudAiplatformV1Explanation(), ]; -void checkUnnamed85(core.List o) { +void checkUnnamed94(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Explanation(o[0]); checkGoogleCloudAiplatformV1Explanation(o[1]); } -core.List buildUnnamed86() => [ +core.List buildUnnamed95() => [ { 'list': [1, 2, 3], 'bool': true, @@ -6081,7 +6847,7 @@ core.List buildUnnamed86() => [ }, ]; -void checkUnnamed86(core.List o) { +void checkUnnamed95(core.List o) { unittest.expect(o, unittest.hasLength(2)); var casted29 = (o[0]) as core.Map; unittest.expect(casted29, unittest.hasLength(3)); @@ -6120,8 +6886,8 @@ api.GoogleCloudAiplatformV1ExplainResponse buildCounterGoogleCloudAiplatformV1ExplainResponse++; if (buildCounterGoogleCloudAiplatformV1ExplainResponse < 3) { o.deployedModelId = 'foo'; - o.explanations = buildUnnamed85(); - o.predictions = buildUnnamed86(); + o.explanations = buildUnnamed94(); + o.predictions = buildUnnamed95(); } buildCounterGoogleCloudAiplatformV1ExplainResponse--; return o; @@ -6135,29 +6901,29 @@ void checkGoogleCloudAiplatformV1ExplainResponse( o.deployedModelId!, unittest.equals('foo'), ); - checkUnnamed85(o.explanations!); - checkUnnamed86(o.predictions!); + checkUnnamed94(o.explanations!); + checkUnnamed95(o.predictions!); } buildCounterGoogleCloudAiplatformV1ExplainResponse--; } -core.List buildUnnamed87() => [ +core.List buildUnnamed96() => [ buildGoogleCloudAiplatformV1Attribution(), buildGoogleCloudAiplatformV1Attribution(), ]; -void checkUnnamed87(core.List o) { +void checkUnnamed96(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Attribution(o[0]); checkGoogleCloudAiplatformV1Attribution(o[1]); } -core.List buildUnnamed88() => [ +core.List buildUnnamed97() => [ buildGoogleCloudAiplatformV1Neighbor(), buildGoogleCloudAiplatformV1Neighbor(), ]; -void checkUnnamed88(core.List o) { +void checkUnnamed97(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Neighbor(o[0]); checkGoogleCloudAiplatformV1Neighbor(o[1]); @@ -6169,8 +6935,8 @@ api.GoogleCloudAiplatformV1Explanation final o = api.GoogleCloudAiplatformV1Explanation(); buildCounterGoogleCloudAiplatformV1Explanation++; if (buildCounterGoogleCloudAiplatformV1Explanation < 3) { - o.attributions = buildUnnamed87(); - o.neighbors = buildUnnamed88(); + o.attributions = buildUnnamed96(); + o.neighbors = buildUnnamed97(); } buildCounterGoogleCloudAiplatformV1Explanation--; return o; @@ -6180,20 +6946,20 @@ void checkGoogleCloudAiplatformV1Explanation( api.GoogleCloudAiplatformV1Explanation o) { buildCounterGoogleCloudAiplatformV1Explanation++; if (buildCounterGoogleCloudAiplatformV1Explanation < 3) { - checkUnnamed87(o.attributions!); - checkUnnamed88(o.neighbors!); + checkUnnamed96(o.attributions!); + checkUnnamed97(o.neighbors!); } buildCounterGoogleCloudAiplatformV1Explanation--; } core.Map - buildUnnamed89() => { + buildUnnamed98() => { 'x': buildGoogleCloudAiplatformV1ExplanationMetadataInputMetadata(), 'y': buildGoogleCloudAiplatformV1ExplanationMetadataInputMetadata(), }; -void checkUnnamed89( +void checkUnnamed98( core.Map o) { @@ -6204,12 +6970,12 @@ void checkUnnamed89( core.Map - buildUnnamed90() => { + buildUnnamed99() => { 'x': buildGoogleCloudAiplatformV1ExplanationMetadataOutputMetadata(), 'y': buildGoogleCloudAiplatformV1ExplanationMetadataOutputMetadata(), }; -void checkUnnamed90( +void checkUnnamed99( core.Map o) { @@ -6225,9 +6991,9 @@ api.GoogleCloudAiplatformV1ExplanationMetadata buildCounterGoogleCloudAiplatformV1ExplanationMetadata++; if (buildCounterGoogleCloudAiplatformV1ExplanationMetadata < 3) { o.featureAttributionsSchemaUri = 'foo'; - o.inputs = buildUnnamed89(); + o.inputs = buildUnnamed98(); o.latentSpaceSource = 'foo'; - o.outputs = buildUnnamed90(); + o.outputs = buildUnnamed99(); } buildCounterGoogleCloudAiplatformV1ExplanationMetadata--; return o; @@ -6241,17 +7007,17 @@ void checkGoogleCloudAiplatformV1ExplanationMetadata( o.featureAttributionsSchemaUri!, unittest.equals('foo'), ); - checkUnnamed89(o.inputs!); + checkUnnamed98(o.inputs!); unittest.expect( o.latentSpaceSource!, unittest.equals('foo'), ); - checkUnnamed90(o.outputs!); + checkUnnamed99(o.outputs!); } buildCounterGoogleCloudAiplatformV1ExplanationMetadata--; } -core.List buildUnnamed91() => [ +core.List buildUnnamed100() => [ { 'list': [1, 2, 3], 'bool': true, @@ -6264,7 +7030,7 @@ core.List buildUnnamed91() => [ }, ]; -void checkUnnamed91(core.List o) { +void checkUnnamed100(core.List o) { unittest.expect(o, unittest.hasLength(2)); var casted31 = (o[0]) as core.Map; unittest.expect(casted31, unittest.hasLength(3)); @@ -6296,12 +7062,12 @@ void checkUnnamed91(core.List o) { ); } -core.List buildUnnamed92() => [ +core.List buildUnnamed101() => [ 'foo', 'foo', ]; -void checkUnnamed92(core.List o) { +void checkUnnamed101(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6313,7 +7079,7 @@ void checkUnnamed92(core.List o) { ); } -core.List buildUnnamed93() => [ +core.List buildUnnamed102() => [ { 'list': [1, 2, 3], 'bool': true, @@ -6326,7 +7092,7 @@ core.List buildUnnamed93() => [ }, ]; -void checkUnnamed93(core.List o) { +void checkUnnamed102(core.List o) { unittest.expect(o, unittest.hasLength(2)); var casted33 = (o[0]) as core.Map; unittest.expect(casted33, unittest.hasLength(3)); @@ -6366,15 +7132,15 @@ api.GoogleCloudAiplatformV1ExplanationMetadataInputMetadata buildCounterGoogleCloudAiplatformV1ExplanationMetadataInputMetadata++; if (buildCounterGoogleCloudAiplatformV1ExplanationMetadataInputMetadata < 3) { o.denseShapeTensorName = 'foo'; - o.encodedBaselines = buildUnnamed91(); + o.encodedBaselines = buildUnnamed100(); o.encodedTensorName = 'foo'; o.encoding = 'foo'; o.featureValueDomain = buildGoogleCloudAiplatformV1ExplanationMetadataInputMetadataFeatureValueDomain(); o.groupName = 'foo'; - o.indexFeatureMapping = buildUnnamed92(); + o.indexFeatureMapping = buildUnnamed101(); o.indicesTensorName = 'foo'; - o.inputBaselines = buildUnnamed93(); + o.inputBaselines = buildUnnamed102(); o.inputTensorName = 'foo'; o.modality = 'foo'; o.visualization = @@ -6392,7 +7158,7 @@ void checkGoogleCloudAiplatformV1ExplanationMetadataInputMetadata( o.denseShapeTensorName!, unittest.equals('foo'), ); - checkUnnamed91(o.encodedBaselines!); + checkUnnamed100(o.encodedBaselines!); unittest.expect( o.encodedTensorName!, unittest.equals('foo'), @@ -6407,12 +7173,12 @@ void checkGoogleCloudAiplatformV1ExplanationMetadataInputMetadata( o.groupName!, unittest.equals('foo'), ); - checkUnnamed92(o.indexFeatureMapping!); + checkUnnamed101(o.indexFeatureMapping!); unittest.expect( o.indicesTensorName!, unittest.equals('foo'), ); - checkUnnamed93(o.inputBaselines!); + checkUnnamed102(o.inputBaselines!); unittest.expect( o.inputTensorName!, unittest.equals('foo'), @@ -6582,14 +7348,14 @@ core.Map< core.String, api .GoogleCloudAiplatformV1ExplanationMetadataOverrideInputMetadataOverride> - buildUnnamed94() => { + buildUnnamed103() => { 'x': buildGoogleCloudAiplatformV1ExplanationMetadataOverrideInputMetadataOverride(), 'y': buildGoogleCloudAiplatformV1ExplanationMetadataOverrideInputMetadataOverride(), }; -void checkUnnamed94( +void checkUnnamed103( core.Map< core.String, api @@ -6608,7 +7374,7 @@ api.GoogleCloudAiplatformV1ExplanationMetadataOverride final o = api.GoogleCloudAiplatformV1ExplanationMetadataOverride(); buildCounterGoogleCloudAiplatformV1ExplanationMetadataOverride++; if (buildCounterGoogleCloudAiplatformV1ExplanationMetadataOverride < 3) { - o.inputs = buildUnnamed94(); + o.inputs = buildUnnamed103(); } buildCounterGoogleCloudAiplatformV1ExplanationMetadataOverride--; return o; @@ -6618,12 +7384,12 @@ void checkGoogleCloudAiplatformV1ExplanationMetadataOverride( api.GoogleCloudAiplatformV1ExplanationMetadataOverride o) { buildCounterGoogleCloudAiplatformV1ExplanationMetadataOverride++; if (buildCounterGoogleCloudAiplatformV1ExplanationMetadataOverride < 3) { - checkUnnamed94(o.inputs!); + checkUnnamed103(o.inputs!); } buildCounterGoogleCloudAiplatformV1ExplanationMetadataOverride--; } -core.List buildUnnamed95() => [ +core.List buildUnnamed104() => [ { 'list': [1, 2, 3], 'bool': true, @@ -6636,7 +7402,7 @@ core.List buildUnnamed95() => [ }, ]; -void checkUnnamed95(core.List o) { +void checkUnnamed104(core.List o) { unittest.expect(o, unittest.hasLength(2)); var casted36 = (o[0]) as core.Map; unittest.expect(casted36, unittest.hasLength(3)); @@ -6678,7 +7444,7 @@ api.GoogleCloudAiplatformV1ExplanationMetadataOverrideInputMetadataOverride buildCounterGoogleCloudAiplatformV1ExplanationMetadataOverrideInputMetadataOverride++; if (buildCounterGoogleCloudAiplatformV1ExplanationMetadataOverrideInputMetadataOverride < 3) { - o.inputBaselines = buildUnnamed95(); + o.inputBaselines = buildUnnamed104(); } buildCounterGoogleCloudAiplatformV1ExplanationMetadataOverrideInputMetadataOverride--; return o; @@ -6690,12 +7456,12 @@ void checkGoogleCloudAiplatformV1ExplanationMetadataOverrideInputMetadataOverrid buildCounterGoogleCloudAiplatformV1ExplanationMetadataOverrideInputMetadataOverride++; if (buildCounterGoogleCloudAiplatformV1ExplanationMetadataOverrideInputMetadataOverride < 3) { - checkUnnamed95(o.inputBaselines!); + checkUnnamed104(o.inputBaselines!); } buildCounterGoogleCloudAiplatformV1ExplanationMetadataOverrideInputMetadataOverride--; } -core.List buildUnnamed96() => [ +core.List buildUnnamed105() => [ { 'list': [1, 2, 3], 'bool': true, @@ -6708,7 +7474,7 @@ core.List buildUnnamed96() => [ }, ]; -void checkUnnamed96(core.List o) { +void checkUnnamed105(core.List o) { unittest.expect(o, unittest.hasLength(2)); var casted38 = (o[0]) as core.Map; unittest.expect(casted38, unittest.hasLength(3)); @@ -6749,7 +7515,7 @@ api.GoogleCloudAiplatformV1ExplanationParameters o.examples = buildGoogleCloudAiplatformV1Examples(); o.integratedGradientsAttribution = buildGoogleCloudAiplatformV1IntegratedGradientsAttribution(); - o.outputIndices = buildUnnamed96(); + o.outputIndices = buildUnnamed105(); o.sampledShapleyAttribution = buildGoogleCloudAiplatformV1SampledShapleyAttribution(); o.topK = 42; @@ -6766,7 +7532,7 @@ void checkGoogleCloudAiplatformV1ExplanationParameters( checkGoogleCloudAiplatformV1Examples(o.examples!); checkGoogleCloudAiplatformV1IntegratedGradientsAttribution( o.integratedGradientsAttribution!); - checkUnnamed96(o.outputIndices!); + checkUnnamed105(o.outputIndices!); checkGoogleCloudAiplatformV1SampledShapleyAttribution( o.sampledShapleyAttribution!); unittest.expect( @@ -6893,12 +7659,12 @@ void checkGoogleCloudAiplatformV1ExportDataRequest( } core.List - buildUnnamed97() => [ + buildUnnamed106() => [ buildGoogleCloudAiplatformV1DestinationFeatureSetting(), buildGoogleCloudAiplatformV1DestinationFeatureSetting(), ]; -void checkUnnamed97( +void checkUnnamed106( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1DestinationFeatureSetting(o[0]); @@ -6915,7 +7681,7 @@ api.GoogleCloudAiplatformV1ExportFeatureValuesRequest o.featureSelector = buildGoogleCloudAiplatformV1FeatureSelector(); o.fullExport = buildGoogleCloudAiplatformV1ExportFeatureValuesRequestFullExport(); - o.settings = buildUnnamed97(); + o.settings = buildUnnamed106(); o.snapshotExport = buildGoogleCloudAiplatformV1ExportFeatureValuesRequestSnapshotExport(); } @@ -6931,7 +7697,7 @@ void checkGoogleCloudAiplatformV1ExportFeatureValuesRequest( checkGoogleCloudAiplatformV1FeatureSelector(o.featureSelector!); checkGoogleCloudAiplatformV1ExportFeatureValuesRequestFullExport( o.fullExport!); - checkUnnamed97(o.settings!); + checkUnnamed106(o.settings!); checkGoogleCloudAiplatformV1ExportFeatureValuesRequestSnapshotExport( o.snapshotExport!); } @@ -7167,12 +7933,12 @@ void checkGoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataRequest( buildCounterGoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataRequest--; } -core.List buildUnnamed98() => [ +core.List buildUnnamed107() => [ buildGoogleCloudAiplatformV1TimeSeriesDataPoint(), buildGoogleCloudAiplatformV1TimeSeriesDataPoint(), ]; -void checkUnnamed98( +void checkUnnamed107( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1TimeSeriesDataPoint(o[0]); @@ -7190,7 +7956,7 @@ api.GoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataResponse if (buildCounterGoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataResponse < 3) { o.nextPageToken = 'foo'; - o.timeSeriesDataPoints = buildUnnamed98(); + o.timeSeriesDataPoints = buildUnnamed107(); } buildCounterGoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataResponse--; return o; @@ -7205,17 +7971,85 @@ void checkGoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed98(o.timeSeriesDataPoints!); + checkUnnamed107(o.timeSeriesDataPoints!); } buildCounterGoogleCloudAiplatformV1ExportTensorboardTimeSeriesDataResponse--; } -core.Map buildUnnamed99() => { +core.int buildCounterGoogleCloudAiplatformV1Fact = 0; +api.GoogleCloudAiplatformV1Fact buildGoogleCloudAiplatformV1Fact() { + final o = api.GoogleCloudAiplatformV1Fact(); + buildCounterGoogleCloudAiplatformV1Fact++; + if (buildCounterGoogleCloudAiplatformV1Fact < 3) { + o.query = 'foo'; + o.score = 42.0; + o.summary = 'foo'; + o.title = 'foo'; + o.uri = 'foo'; + o.vectorDistance = 42.0; + } + buildCounterGoogleCloudAiplatformV1Fact--; + return o; +} + +void checkGoogleCloudAiplatformV1Fact(api.GoogleCloudAiplatformV1Fact o) { + buildCounterGoogleCloudAiplatformV1Fact++; + if (buildCounterGoogleCloudAiplatformV1Fact < 3) { + unittest.expect( + o.query!, + unittest.equals('foo'), + ); + unittest.expect( + o.score!, + unittest.equals(42.0), + ); + unittest.expect( + o.summary!, + unittest.equals('foo'), + ); + unittest.expect( + o.title!, + unittest.equals('foo'), + ); + unittest.expect( + o.uri!, + unittest.equals('foo'), + ); + unittest.expect( + o.vectorDistance!, + unittest.equals(42.0), + ); + } + buildCounterGoogleCloudAiplatformV1Fact--; +} + +core.int buildCounterGoogleCloudAiplatformV1FasterDeploymentConfig = 0; +api.GoogleCloudAiplatformV1FasterDeploymentConfig + buildGoogleCloudAiplatformV1FasterDeploymentConfig() { + final o = api.GoogleCloudAiplatformV1FasterDeploymentConfig(); + buildCounterGoogleCloudAiplatformV1FasterDeploymentConfig++; + if (buildCounterGoogleCloudAiplatformV1FasterDeploymentConfig < 3) { + o.fastTryoutEnabled = true; + } + buildCounterGoogleCloudAiplatformV1FasterDeploymentConfig--; + return o; +} + +void checkGoogleCloudAiplatformV1FasterDeploymentConfig( + api.GoogleCloudAiplatformV1FasterDeploymentConfig o) { + buildCounterGoogleCloudAiplatformV1FasterDeploymentConfig++; + if (buildCounterGoogleCloudAiplatformV1FasterDeploymentConfig < 3) { + unittest.expect(o.fastTryoutEnabled!, unittest.isTrue); + } + buildCounterGoogleCloudAiplatformV1FasterDeploymentConfig--; +} + +core.Map buildUnnamed108() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed99(core.Map o) { +void checkUnnamed108(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -7228,12 +8062,12 @@ void checkUnnamed99(core.Map o) { } core.List - buildUnnamed100() => [ + buildUnnamed109() => [ buildGoogleCloudAiplatformV1FeatureMonitoringStatsAnomaly(), buildGoogleCloudAiplatformV1FeatureMonitoringStatsAnomaly(), ]; -void checkUnnamed100( +void checkUnnamed109( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1FeatureMonitoringStatsAnomaly(o[0]); @@ -7249,8 +8083,8 @@ api.GoogleCloudAiplatformV1Feature buildGoogleCloudAiplatformV1Feature() { o.description = 'foo'; o.disableMonitoring = true; o.etag = 'foo'; - o.labels = buildUnnamed99(); - o.monitoringStatsAnomalies = buildUnnamed100(); + o.labels = buildUnnamed108(); + o.monitoringStatsAnomalies = buildUnnamed109(); o.name = 'foo'; o.pointOfContact = 'foo'; o.updateTime = 'foo'; @@ -7277,8 +8111,8 @@ void checkGoogleCloudAiplatformV1Feature(api.GoogleCloudAiplatformV1Feature o) { o.etag!, unittest.equals('foo'), ); - checkUnnamed99(o.labels!); - checkUnnamed100(o.monitoringStatsAnomalies!); + checkUnnamed108(o.labels!); + checkUnnamed109(o.monitoringStatsAnomalies!); unittest.expect( o.name!, unittest.equals('foo'), @@ -7303,12 +8137,12 @@ void checkGoogleCloudAiplatformV1Feature(api.GoogleCloudAiplatformV1Feature o) { buildCounterGoogleCloudAiplatformV1Feature--; } -core.Map buildUnnamed101() => { +core.Map buildUnnamed110() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed101(core.Map o) { +void checkUnnamed110(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -7330,7 +8164,7 @@ api.GoogleCloudAiplatformV1FeatureGroup o.createTime = 'foo'; o.description = 'foo'; o.etag = 'foo'; - o.labels = buildUnnamed101(); + o.labels = buildUnnamed110(); o.name = 'foo'; o.updateTime = 'foo'; } @@ -7355,7 +8189,7 @@ void checkGoogleCloudAiplatformV1FeatureGroup( o.etag!, unittest.equals('foo'), ); - checkUnnamed101(o.labels!); + checkUnnamed110(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -7368,12 +8202,12 @@ void checkGoogleCloudAiplatformV1FeatureGroup( buildCounterGoogleCloudAiplatformV1FeatureGroup--; } -core.List buildUnnamed102() => [ +core.List buildUnnamed111() => [ 'foo', 'foo', ]; -void checkUnnamed102(core.List o) { +void checkUnnamed111(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -7393,7 +8227,7 @@ api.GoogleCloudAiplatformV1FeatureGroupBigQuery if (buildCounterGoogleCloudAiplatformV1FeatureGroupBigQuery < 3) { o.bigQuerySource = buildGoogleCloudAiplatformV1BigQuerySource(); o.dense = true; - o.entityIdColumns = buildUnnamed102(); + o.entityIdColumns = buildUnnamed111(); o.staticDataSource = true; o.timeSeries = buildGoogleCloudAiplatformV1FeatureGroupBigQueryTimeSeries(); } @@ -7407,7 +8241,7 @@ void checkGoogleCloudAiplatformV1FeatureGroupBigQuery( if (buildCounterGoogleCloudAiplatformV1FeatureGroupBigQuery < 3) { checkGoogleCloudAiplatformV1BigQuerySource(o.bigQuerySource!); unittest.expect(o.dense!, unittest.isTrue); - checkUnnamed102(o.entityIdColumns!); + checkUnnamed111(o.entityIdColumns!); unittest.expect(o.staticDataSource!, unittest.isTrue); checkGoogleCloudAiplatformV1FeatureGroupBigQueryTimeSeries(o.timeSeries!); } @@ -7465,12 +8299,12 @@ void checkGoogleCloudAiplatformV1FeatureMonitoringStatsAnomaly( } core.List - buildUnnamed103() => [ + buildUnnamed112() => [ buildGoogleCloudAiplatformV1FeatureNoiseSigmaNoiseSigmaForFeature(), buildGoogleCloudAiplatformV1FeatureNoiseSigmaNoiseSigmaForFeature(), ]; -void checkUnnamed103( +void checkUnnamed112( core.List o) { unittest.expect(o, unittest.hasLength(2)); @@ -7484,7 +8318,7 @@ api.GoogleCloudAiplatformV1FeatureNoiseSigma final o = api.GoogleCloudAiplatformV1FeatureNoiseSigma(); buildCounterGoogleCloudAiplatformV1FeatureNoiseSigma++; if (buildCounterGoogleCloudAiplatformV1FeatureNoiseSigma < 3) { - o.noiseSigma = buildUnnamed103(); + o.noiseSigma = buildUnnamed112(); } buildCounterGoogleCloudAiplatformV1FeatureNoiseSigma--; return o; @@ -7494,7 +8328,7 @@ void checkGoogleCloudAiplatformV1FeatureNoiseSigma( api.GoogleCloudAiplatformV1FeatureNoiseSigma o) { buildCounterGoogleCloudAiplatformV1FeatureNoiseSigma++; if (buildCounterGoogleCloudAiplatformV1FeatureNoiseSigma < 3) { - checkUnnamed103(o.noiseSigma!); + checkUnnamed112(o.noiseSigma!); } buildCounterGoogleCloudAiplatformV1FeatureNoiseSigma--; } @@ -7532,12 +8366,12 @@ void checkGoogleCloudAiplatformV1FeatureNoiseSigmaNoiseSigmaForFeature( buildCounterGoogleCloudAiplatformV1FeatureNoiseSigmaNoiseSigmaForFeature--; } -core.Map buildUnnamed104() => { +core.Map buildUnnamed113() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed104(core.Map o) { +void checkUnnamed113(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -7561,7 +8395,7 @@ api.GoogleCloudAiplatformV1FeatureOnlineStore buildGoogleCloudAiplatformV1FeatureOnlineStoreDedicatedServingEndpoint(); o.encryptionSpec = buildGoogleCloudAiplatformV1EncryptionSpec(); o.etag = 'foo'; - o.labels = buildUnnamed104(); + o.labels = buildUnnamed113(); o.name = 'foo'; o.optimized = buildGoogleCloudAiplatformV1FeatureOnlineStoreOptimized(); o.satisfiesPzi = true; @@ -7589,7 +8423,7 @@ void checkGoogleCloudAiplatformV1FeatureOnlineStore( o.etag!, unittest.equals('foo'), ); - checkUnnamed104(o.labels!); + checkUnnamed113(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -7878,12 +8712,12 @@ void checkGoogleCloudAiplatformV1FeatureValueDestination( buildCounterGoogleCloudAiplatformV1FeatureValueDestination--; } -core.List buildUnnamed105() => [ +core.List buildUnnamed114() => [ buildGoogleCloudAiplatformV1FeatureValue(), buildGoogleCloudAiplatformV1FeatureValue(), ]; -void checkUnnamed105(core.List o) { +void checkUnnamed114(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1FeatureValue(o[0]); checkGoogleCloudAiplatformV1FeatureValue(o[1]); @@ -7895,7 +8729,7 @@ api.GoogleCloudAiplatformV1FeatureValueList final o = api.GoogleCloudAiplatformV1FeatureValueList(); buildCounterGoogleCloudAiplatformV1FeatureValueList++; if (buildCounterGoogleCloudAiplatformV1FeatureValueList < 3) { - o.values = buildUnnamed105(); + o.values = buildUnnamed114(); } buildCounterGoogleCloudAiplatformV1FeatureValueList--; return o; @@ -7905,7 +8739,7 @@ void checkGoogleCloudAiplatformV1FeatureValueList( api.GoogleCloudAiplatformV1FeatureValueList o) { buildCounterGoogleCloudAiplatformV1FeatureValueList++; if (buildCounterGoogleCloudAiplatformV1FeatureValueList < 3) { - checkUnnamed105(o.values!); + checkUnnamed114(o.values!); } buildCounterGoogleCloudAiplatformV1FeatureValueList--; } @@ -7934,12 +8768,12 @@ void checkGoogleCloudAiplatformV1FeatureValueMetadata( buildCounterGoogleCloudAiplatformV1FeatureValueMetadata--; } -core.Map buildUnnamed106() => { +core.Map buildUnnamed115() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed106(core.Map o) { +void checkUnnamed115(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -7963,8 +8797,10 @@ api.GoogleCloudAiplatformV1FeatureView o.featureRegistrySource = buildGoogleCloudAiplatformV1FeatureViewFeatureRegistrySource(); o.indexConfig = buildGoogleCloudAiplatformV1FeatureViewIndexConfig(); - o.labels = buildUnnamed106(); + o.labels = buildUnnamed115(); o.name = 'foo'; + o.optimizedConfig = + buildGoogleCloudAiplatformV1FeatureViewOptimizedConfig(); o.satisfiesPzi = true; o.satisfiesPzs = true; o.syncConfig = buildGoogleCloudAiplatformV1FeatureViewSyncConfig(); @@ -7992,11 +8828,12 @@ void checkGoogleCloudAiplatformV1FeatureView( checkGoogleCloudAiplatformV1FeatureViewFeatureRegistrySource( o.featureRegistrySource!); checkGoogleCloudAiplatformV1FeatureViewIndexConfig(o.indexConfig!); - checkUnnamed106(o.labels!); + checkUnnamed115(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), ); + checkGoogleCloudAiplatformV1FeatureViewOptimizedConfig(o.optimizedConfig!); unittest.expect(o.satisfiesPzi!, unittest.isTrue); unittest.expect(o.satisfiesPzs!, unittest.isTrue); checkGoogleCloudAiplatformV1FeatureViewSyncConfig(o.syncConfig!); @@ -8009,12 +8846,12 @@ void checkGoogleCloudAiplatformV1FeatureView( buildCounterGoogleCloudAiplatformV1FeatureView--; } -core.List buildUnnamed107() => [ +core.List buildUnnamed116() => [ 'foo', 'foo', ]; -void checkUnnamed107(core.List o) { +void checkUnnamed116(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8032,7 +8869,7 @@ api.GoogleCloudAiplatformV1FeatureViewBigQuerySource final o = api.GoogleCloudAiplatformV1FeatureViewBigQuerySource(); buildCounterGoogleCloudAiplatformV1FeatureViewBigQuerySource++; if (buildCounterGoogleCloudAiplatformV1FeatureViewBigQuerySource < 3) { - o.entityIdColumns = buildUnnamed107(); + o.entityIdColumns = buildUnnamed116(); o.uri = 'foo'; } buildCounterGoogleCloudAiplatformV1FeatureViewBigQuerySource--; @@ -8043,7 +8880,7 @@ void checkGoogleCloudAiplatformV1FeatureViewBigQuerySource( api.GoogleCloudAiplatformV1FeatureViewBigQuerySource o) { buildCounterGoogleCloudAiplatformV1FeatureViewBigQuerySource++; if (buildCounterGoogleCloudAiplatformV1FeatureViewBigQuerySource < 3) { - checkUnnamed107(o.entityIdColumns!); + checkUnnamed116(o.entityIdColumns!); unittest.expect( o.uri!, unittest.equals('foo'), @@ -8079,12 +8916,12 @@ void checkGoogleCloudAiplatformV1FeatureViewDataKey( buildCounterGoogleCloudAiplatformV1FeatureViewDataKey--; } -core.List buildUnnamed108() => [ +core.List buildUnnamed117() => [ 'foo', 'foo', ]; -void checkUnnamed108(core.List o) { +void checkUnnamed117(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8102,7 +8939,7 @@ api.GoogleCloudAiplatformV1FeatureViewDataKeyCompositeKey final o = api.GoogleCloudAiplatformV1FeatureViewDataKeyCompositeKey(); buildCounterGoogleCloudAiplatformV1FeatureViewDataKeyCompositeKey++; if (buildCounterGoogleCloudAiplatformV1FeatureViewDataKeyCompositeKey < 3) { - o.parts = buildUnnamed108(); + o.parts = buildUnnamed117(); } buildCounterGoogleCloudAiplatformV1FeatureViewDataKeyCompositeKey--; return o; @@ -8112,19 +8949,19 @@ void checkGoogleCloudAiplatformV1FeatureViewDataKeyCompositeKey( api.GoogleCloudAiplatformV1FeatureViewDataKeyCompositeKey o) { buildCounterGoogleCloudAiplatformV1FeatureViewDataKeyCompositeKey++; if (buildCounterGoogleCloudAiplatformV1FeatureViewDataKeyCompositeKey < 3) { - checkUnnamed108(o.parts!); + checkUnnamed117(o.parts!); } buildCounterGoogleCloudAiplatformV1FeatureViewDataKeyCompositeKey--; } core.List< api.GoogleCloudAiplatformV1FeatureViewFeatureRegistrySourceFeatureGroup> - buildUnnamed109() => [ + buildUnnamed118() => [ buildGoogleCloudAiplatformV1FeatureViewFeatureRegistrySourceFeatureGroup(), buildGoogleCloudAiplatformV1FeatureViewFeatureRegistrySourceFeatureGroup(), ]; -void checkUnnamed109( +void checkUnnamed118( core.List< api .GoogleCloudAiplatformV1FeatureViewFeatureRegistrySourceFeatureGroup> @@ -8143,7 +8980,7 @@ api.GoogleCloudAiplatformV1FeatureViewFeatureRegistrySource final o = api.GoogleCloudAiplatformV1FeatureViewFeatureRegistrySource(); buildCounterGoogleCloudAiplatformV1FeatureViewFeatureRegistrySource++; if (buildCounterGoogleCloudAiplatformV1FeatureViewFeatureRegistrySource < 3) { - o.featureGroups = buildUnnamed109(); + o.featureGroups = buildUnnamed118(); o.projectNumber = 'foo'; } buildCounterGoogleCloudAiplatformV1FeatureViewFeatureRegistrySource--; @@ -8154,7 +8991,7 @@ void checkGoogleCloudAiplatformV1FeatureViewFeatureRegistrySource( api.GoogleCloudAiplatformV1FeatureViewFeatureRegistrySource o) { buildCounterGoogleCloudAiplatformV1FeatureViewFeatureRegistrySource++; if (buildCounterGoogleCloudAiplatformV1FeatureViewFeatureRegistrySource < 3) { - checkUnnamed109(o.featureGroups!); + checkUnnamed118(o.featureGroups!); unittest.expect( o.projectNumber!, unittest.equals('foo'), @@ -8163,12 +9000,12 @@ void checkGoogleCloudAiplatformV1FeatureViewFeatureRegistrySource( buildCounterGoogleCloudAiplatformV1FeatureViewFeatureRegistrySource--; } -core.List buildUnnamed110() => [ +core.List buildUnnamed119() => [ 'foo', 'foo', ]; -void checkUnnamed110(core.List o) { +void checkUnnamed119(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8191,7 +9028,7 @@ api.GoogleCloudAiplatformV1FeatureViewFeatureRegistrySourceFeatureGroup if (buildCounterGoogleCloudAiplatformV1FeatureViewFeatureRegistrySourceFeatureGroup < 3) { o.featureGroupId = 'foo'; - o.featureIds = buildUnnamed110(); + o.featureIds = buildUnnamed119(); } buildCounterGoogleCloudAiplatformV1FeatureViewFeatureRegistrySourceFeatureGroup--; return o; @@ -8206,17 +9043,17 @@ void checkGoogleCloudAiplatformV1FeatureViewFeatureRegistrySourceFeatureGroup( o.featureGroupId!, unittest.equals('foo'), ); - checkUnnamed110(o.featureIds!); + checkUnnamed119(o.featureIds!); } buildCounterGoogleCloudAiplatformV1FeatureViewFeatureRegistrySourceFeatureGroup--; } -core.List buildUnnamed111() => [ +core.List buildUnnamed120() => [ 'foo', 'foo', ]; -void checkUnnamed111(core.List o) { +void checkUnnamed120(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8240,7 +9077,7 @@ api.GoogleCloudAiplatformV1FeatureViewIndexConfig o.distanceMeasureType = 'foo'; o.embeddingColumn = 'foo'; o.embeddingDimension = 42; - o.filterColumns = buildUnnamed111(); + o.filterColumns = buildUnnamed120(); o.treeAhConfig = buildGoogleCloudAiplatformV1FeatureViewIndexConfigTreeAHConfig(); } @@ -8270,7 +9107,7 @@ void checkGoogleCloudAiplatformV1FeatureViewIndexConfig( o.embeddingDimension!, unittest.equals(42), ); - checkUnnamed111(o.filterColumns!); + checkUnnamed120(o.filterColumns!); checkGoogleCloudAiplatformV1FeatureViewIndexConfigTreeAHConfig( o.treeAhConfig!); } @@ -8325,6 +9162,27 @@ void checkGoogleCloudAiplatformV1FeatureViewIndexConfigTreeAHConfig( buildCounterGoogleCloudAiplatformV1FeatureViewIndexConfigTreeAHConfig--; } +core.int buildCounterGoogleCloudAiplatformV1FeatureViewOptimizedConfig = 0; +api.GoogleCloudAiplatformV1FeatureViewOptimizedConfig + buildGoogleCloudAiplatformV1FeatureViewOptimizedConfig() { + final o = api.GoogleCloudAiplatformV1FeatureViewOptimizedConfig(); + buildCounterGoogleCloudAiplatformV1FeatureViewOptimizedConfig++; + if (buildCounterGoogleCloudAiplatformV1FeatureViewOptimizedConfig < 3) { + o.automaticResources = buildGoogleCloudAiplatformV1AutomaticResources(); + } + buildCounterGoogleCloudAiplatformV1FeatureViewOptimizedConfig--; + return o; +} + +void checkGoogleCloudAiplatformV1FeatureViewOptimizedConfig( + api.GoogleCloudAiplatformV1FeatureViewOptimizedConfig o) { + buildCounterGoogleCloudAiplatformV1FeatureViewOptimizedConfig++; + if (buildCounterGoogleCloudAiplatformV1FeatureViewOptimizedConfig < 3) { + checkGoogleCloudAiplatformV1AutomaticResources(o.automaticResources!); + } + buildCounterGoogleCloudAiplatformV1FeatureViewOptimizedConfig--; +} + core.int buildCounterGoogleCloudAiplatformV1FeatureViewSync = 0; api.GoogleCloudAiplatformV1FeatureViewSync buildGoogleCloudAiplatformV1FeatureViewSync() { @@ -8453,12 +9311,12 @@ void checkGoogleCloudAiplatformV1FeatureViewVertexRagSource( buildCounterGoogleCloudAiplatformV1FeatureViewVertexRagSource--; } -core.Map buildUnnamed112() => { +core.Map buildUnnamed121() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed112(core.Map o) { +void checkUnnamed121(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -8479,7 +9337,7 @@ api.GoogleCloudAiplatformV1Featurestore o.createTime = 'foo'; o.encryptionSpec = buildGoogleCloudAiplatformV1EncryptionSpec(); o.etag = 'foo'; - o.labels = buildUnnamed112(); + o.labels = buildUnnamed121(); o.name = 'foo'; o.onlineServingConfig = buildGoogleCloudAiplatformV1FeaturestoreOnlineServingConfig(); @@ -8506,7 +9364,7 @@ void checkGoogleCloudAiplatformV1Featurestore( o.etag!, unittest.equals('foo'), ); - checkUnnamed112(o.labels!); + checkUnnamed121(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -8758,7 +9616,7 @@ void checkGoogleCloudAiplatformV1FetchFeatureValuesRequest( buildCounterGoogleCloudAiplatformV1FetchFeatureValuesRequest--; } -core.Map buildUnnamed113() => { +core.Map buildUnnamed122() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -8771,7 +9629,7 @@ core.Map buildUnnamed113() => { }, }; -void checkUnnamed113(core.Map o) { +void checkUnnamed122(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted40 = (o['x']!) as core.Map; unittest.expect(casted40, unittest.hasLength(3)); @@ -8812,7 +9670,7 @@ api.GoogleCloudAiplatformV1FetchFeatureValuesResponse o.dataKey = buildGoogleCloudAiplatformV1FeatureViewDataKey(); o.keyValues = buildGoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairList(); - o.protoStruct = buildUnnamed113(); + o.protoStruct = buildUnnamed122(); } buildCounterGoogleCloudAiplatformV1FetchFeatureValuesResponse--; return o; @@ -8825,7 +9683,7 @@ void checkGoogleCloudAiplatformV1FetchFeatureValuesResponse( checkGoogleCloudAiplatformV1FeatureViewDataKey(o.dataKey!); checkGoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairList( o.keyValues!); - checkUnnamed113(o.protoStruct!); + checkUnnamed122(o.protoStruct!); } buildCounterGoogleCloudAiplatformV1FetchFeatureValuesResponse--; } @@ -8833,12 +9691,12 @@ void checkGoogleCloudAiplatformV1FetchFeatureValuesResponse( core.List< api .GoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairListFeatureNameValuePair> - buildUnnamed114() => [ + buildUnnamed123() => [ buildGoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairListFeatureNameValuePair(), buildGoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairListFeatureNameValuePair(), ]; -void checkUnnamed114( +void checkUnnamed123( core.List< api .GoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairListFeatureNameValuePair> @@ -8860,7 +9718,7 @@ api.GoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairList buildCounterGoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairList++; if (buildCounterGoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairList < 3) { - o.features = buildUnnamed114(); + o.features = buildUnnamed123(); } buildCounterGoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairList--; return o; @@ -8872,7 +9730,7 @@ void checkGoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairL buildCounterGoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairList++; if (buildCounterGoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairList < 3) { - checkUnnamed114(o.features!); + checkUnnamed123(o.features!); } buildCounterGoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairList--; } @@ -8909,6 +9767,30 @@ void checkGoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairL buildCounterGoogleCloudAiplatformV1FetchFeatureValuesResponseFeatureNameValuePairListFeatureNameValuePair--; } +core.int buildCounterGoogleCloudAiplatformV1FetchPredictOperationRequest = 0; +api.GoogleCloudAiplatformV1FetchPredictOperationRequest + buildGoogleCloudAiplatformV1FetchPredictOperationRequest() { + final o = api.GoogleCloudAiplatformV1FetchPredictOperationRequest(); + buildCounterGoogleCloudAiplatformV1FetchPredictOperationRequest++; + if (buildCounterGoogleCloudAiplatformV1FetchPredictOperationRequest < 3) { + o.operationName = 'foo'; + } + buildCounterGoogleCloudAiplatformV1FetchPredictOperationRequest--; + return o; +} + +void checkGoogleCloudAiplatformV1FetchPredictOperationRequest( + api.GoogleCloudAiplatformV1FetchPredictOperationRequest o) { + buildCounterGoogleCloudAiplatformV1FetchPredictOperationRequest++; + if (buildCounterGoogleCloudAiplatformV1FetchPredictOperationRequest < 3) { + unittest.expect( + o.operationName!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1FetchPredictOperationRequest--; +} + core.int buildCounterGoogleCloudAiplatformV1FileData = 0; api.GoogleCloudAiplatformV1FileData buildGoogleCloudAiplatformV1FileData() { final o = api.GoogleCloudAiplatformV1FileData(); @@ -8937,6 +9819,34 @@ void checkGoogleCloudAiplatformV1FileData( buildCounterGoogleCloudAiplatformV1FileData--; } +core.int buildCounterGoogleCloudAiplatformV1FileStatus = 0; +api.GoogleCloudAiplatformV1FileStatus buildGoogleCloudAiplatformV1FileStatus() { + final o = api.GoogleCloudAiplatformV1FileStatus(); + buildCounterGoogleCloudAiplatformV1FileStatus++; + if (buildCounterGoogleCloudAiplatformV1FileStatus < 3) { + o.errorStatus = 'foo'; + o.state = 'foo'; + } + buildCounterGoogleCloudAiplatformV1FileStatus--; + return o; +} + +void checkGoogleCloudAiplatformV1FileStatus( + api.GoogleCloudAiplatformV1FileStatus o) { + buildCounterGoogleCloudAiplatformV1FileStatus++; + if (buildCounterGoogleCloudAiplatformV1FileStatus < 3) { + unittest.expect( + o.errorStatus!, + unittest.equals('foo'), + ); + unittest.expect( + o.state!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1FileStatus--; +} + core.int buildCounterGoogleCloudAiplatformV1FilterSplit = 0; api.GoogleCloudAiplatformV1FilterSplit buildGoogleCloudAiplatformV1FilterSplit() { @@ -8972,12 +9882,12 @@ void checkGoogleCloudAiplatformV1FilterSplit( } core.List - buildUnnamed115() => [ + buildUnnamed124() => [ buildGoogleCloudAiplatformV1FindNeighborsRequestQuery(), buildGoogleCloudAiplatformV1FindNeighborsRequestQuery(), ]; -void checkUnnamed115( +void checkUnnamed124( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1FindNeighborsRequestQuery(o[0]); @@ -8991,7 +9901,7 @@ api.GoogleCloudAiplatformV1FindNeighborsRequest buildCounterGoogleCloudAiplatformV1FindNeighborsRequest++; if (buildCounterGoogleCloudAiplatformV1FindNeighborsRequest < 3) { o.deployedIndexId = 'foo'; - o.queries = buildUnnamed115(); + o.queries = buildUnnamed124(); o.returnFullDatapoint = true; } buildCounterGoogleCloudAiplatformV1FindNeighborsRequest--; @@ -9006,7 +9916,7 @@ void checkGoogleCloudAiplatformV1FindNeighborsRequest( o.deployedIndexId!, unittest.equals('foo'), ); - checkUnnamed115(o.queries!); + checkUnnamed124(o.queries!); unittest.expect(o.returnFullDatapoint!, unittest.isTrue); } buildCounterGoogleCloudAiplatformV1FindNeighborsRequest--; @@ -9080,12 +9990,12 @@ void checkGoogleCloudAiplatformV1FindNeighborsRequestQueryRRF( } core.List - buildUnnamed116() => [ + buildUnnamed125() => [ buildGoogleCloudAiplatformV1FindNeighborsResponseNearestNeighbors(), buildGoogleCloudAiplatformV1FindNeighborsResponseNearestNeighbors(), ]; -void checkUnnamed116( +void checkUnnamed125( core.List o) { unittest.expect(o, unittest.hasLength(2)); @@ -9099,7 +10009,7 @@ api.GoogleCloudAiplatformV1FindNeighborsResponse final o = api.GoogleCloudAiplatformV1FindNeighborsResponse(); buildCounterGoogleCloudAiplatformV1FindNeighborsResponse++; if (buildCounterGoogleCloudAiplatformV1FindNeighborsResponse < 3) { - o.nearestNeighbors = buildUnnamed116(); + o.nearestNeighbors = buildUnnamed125(); } buildCounterGoogleCloudAiplatformV1FindNeighborsResponse--; return o; @@ -9109,18 +10019,18 @@ void checkGoogleCloudAiplatformV1FindNeighborsResponse( api.GoogleCloudAiplatformV1FindNeighborsResponse o) { buildCounterGoogleCloudAiplatformV1FindNeighborsResponse++; if (buildCounterGoogleCloudAiplatformV1FindNeighborsResponse < 3) { - checkUnnamed116(o.nearestNeighbors!); + checkUnnamed125(o.nearestNeighbors!); } buildCounterGoogleCloudAiplatformV1FindNeighborsResponse--; } core.List - buildUnnamed117() => [ + buildUnnamed126() => [ buildGoogleCloudAiplatformV1FindNeighborsResponseNeighbor(), buildGoogleCloudAiplatformV1FindNeighborsResponseNeighbor(), ]; -void checkUnnamed117( +void checkUnnamed126( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1FindNeighborsResponseNeighbor(o[0]); @@ -9137,7 +10047,7 @@ api.GoogleCloudAiplatformV1FindNeighborsResponseNearestNeighbors if (buildCounterGoogleCloudAiplatformV1FindNeighborsResponseNearestNeighbors < 3) { o.id = 'foo'; - o.neighbors = buildUnnamed117(); + o.neighbors = buildUnnamed126(); } buildCounterGoogleCloudAiplatformV1FindNeighborsResponseNearestNeighbors--; return o; @@ -9152,7 +10062,7 @@ void checkGoogleCloudAiplatformV1FindNeighborsResponseNearestNeighbors( o.id!, unittest.equals('foo'), ); - checkUnnamed117(o.neighbors!); + checkUnnamed126(o.neighbors!); } buildCounterGoogleCloudAiplatformV1FindNeighborsResponseNearestNeighbors--; } @@ -9437,7 +10347,7 @@ void checkGoogleCloudAiplatformV1FulfillmentSpec( buildCounterGoogleCloudAiplatformV1FulfillmentSpec--; } -core.Map buildUnnamed118() => { +core.Map buildUnnamed127() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -9450,7 +10360,7 @@ core.Map buildUnnamed118() => { }, }; -void checkUnnamed118(core.Map o) { +void checkUnnamed127(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted42 = (o['x']!) as core.Map; unittest.expect(casted42, unittest.hasLength(3)); @@ -9488,7 +10398,7 @@ api.GoogleCloudAiplatformV1FunctionCall final o = api.GoogleCloudAiplatformV1FunctionCall(); buildCounterGoogleCloudAiplatformV1FunctionCall++; if (buildCounterGoogleCloudAiplatformV1FunctionCall < 3) { - o.args = buildUnnamed118(); + o.args = buildUnnamed127(); o.name = 'foo'; } buildCounterGoogleCloudAiplatformV1FunctionCall--; @@ -9499,7 +10409,7 @@ void checkGoogleCloudAiplatformV1FunctionCall( api.GoogleCloudAiplatformV1FunctionCall o) { buildCounterGoogleCloudAiplatformV1FunctionCall++; if (buildCounterGoogleCloudAiplatformV1FunctionCall < 3) { - checkUnnamed118(o.args!); + checkUnnamed127(o.args!); unittest.expect( o.name!, unittest.equals('foo'), @@ -9508,12 +10418,12 @@ void checkGoogleCloudAiplatformV1FunctionCall( buildCounterGoogleCloudAiplatformV1FunctionCall--; } -core.List buildUnnamed119() => [ +core.List buildUnnamed128() => [ 'foo', 'foo', ]; -void checkUnnamed119(core.List o) { +void checkUnnamed128(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -9531,7 +10441,7 @@ api.GoogleCloudAiplatformV1FunctionCallingConfig final o = api.GoogleCloudAiplatformV1FunctionCallingConfig(); buildCounterGoogleCloudAiplatformV1FunctionCallingConfig++; if (buildCounterGoogleCloudAiplatformV1FunctionCallingConfig < 3) { - o.allowedFunctionNames = buildUnnamed119(); + o.allowedFunctionNames = buildUnnamed128(); o.mode = 'foo'; } buildCounterGoogleCloudAiplatformV1FunctionCallingConfig--; @@ -9542,7 +10452,7 @@ void checkGoogleCloudAiplatformV1FunctionCallingConfig( api.GoogleCloudAiplatformV1FunctionCallingConfig o) { buildCounterGoogleCloudAiplatformV1FunctionCallingConfig++; if (buildCounterGoogleCloudAiplatformV1FunctionCallingConfig < 3) { - checkUnnamed119(o.allowedFunctionNames!); + checkUnnamed128(o.allowedFunctionNames!); unittest.expect( o.mode!, unittest.equals('foo'), @@ -9584,7 +10494,7 @@ void checkGoogleCloudAiplatformV1FunctionDeclaration( buildCounterGoogleCloudAiplatformV1FunctionDeclaration--; } -core.Map buildUnnamed120() => { +core.Map buildUnnamed129() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -9597,7 +10507,7 @@ core.Map buildUnnamed120() => { }, }; -void checkUnnamed120(core.Map o) { +void checkUnnamed129(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted44 = (o['x']!) as core.Map; unittest.expect(casted44, unittest.hasLength(3)); @@ -9636,7 +10546,7 @@ api.GoogleCloudAiplatformV1FunctionResponse buildCounterGoogleCloudAiplatformV1FunctionResponse++; if (buildCounterGoogleCloudAiplatformV1FunctionResponse < 3) { o.name = 'foo'; - o.response = buildUnnamed120(); + o.response = buildUnnamed129(); } buildCounterGoogleCloudAiplatformV1FunctionResponse--; return o; @@ -9650,7 +10560,7 @@ void checkGoogleCloudAiplatformV1FunctionResponse( o.name!, unittest.equals('foo'), ); - checkUnnamed120(o.response!); + checkUnnamed129(o.response!); } buildCounterGoogleCloudAiplatformV1FunctionResponse--; } @@ -9679,12 +10589,12 @@ void checkGoogleCloudAiplatformV1GcsDestination( buildCounterGoogleCloudAiplatformV1GcsDestination--; } -core.List buildUnnamed121() => [ +core.List buildUnnamed130() => [ 'foo', 'foo', ]; -void checkUnnamed121(core.List o) { +void checkUnnamed130(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -9701,7 +10611,7 @@ api.GoogleCloudAiplatformV1GcsSource buildGoogleCloudAiplatformV1GcsSource() { final o = api.GoogleCloudAiplatformV1GcsSource(); buildCounterGoogleCloudAiplatformV1GcsSource++; if (buildCounterGoogleCloudAiplatformV1GcsSource < 3) { - o.uris = buildUnnamed121(); + o.uris = buildUnnamed130(); } buildCounterGoogleCloudAiplatformV1GcsSource--; return o; @@ -9711,28 +10621,28 @@ void checkGoogleCloudAiplatformV1GcsSource( api.GoogleCloudAiplatformV1GcsSource o) { buildCounterGoogleCloudAiplatformV1GcsSource++; if (buildCounterGoogleCloudAiplatformV1GcsSource < 3) { - checkUnnamed121(o.uris!); + checkUnnamed130(o.uris!); } buildCounterGoogleCloudAiplatformV1GcsSource--; } -core.List buildUnnamed122() => [ +core.List buildUnnamed131() => [ buildGoogleCloudAiplatformV1Content(), buildGoogleCloudAiplatformV1Content(), ]; -void checkUnnamed122(core.List o) { +void checkUnnamed131(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Content(o[0]); checkGoogleCloudAiplatformV1Content(o[1]); } -core.Map buildUnnamed123() => { +core.Map buildUnnamed132() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed123(core.Map o) { +void checkUnnamed132(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -9744,23 +10654,23 @@ void checkUnnamed123(core.Map o) { ); } -core.List buildUnnamed124() => [ +core.List buildUnnamed133() => [ buildGoogleCloudAiplatformV1SafetySetting(), buildGoogleCloudAiplatformV1SafetySetting(), ]; -void checkUnnamed124(core.List o) { +void checkUnnamed133(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1SafetySetting(o[0]); checkGoogleCloudAiplatformV1SafetySetting(o[1]); } -core.List buildUnnamed125() => [ +core.List buildUnnamed134() => [ buildGoogleCloudAiplatformV1Tool(), buildGoogleCloudAiplatformV1Tool(), ]; -void checkUnnamed125(core.List o) { +void checkUnnamed134(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Tool(o[0]); checkGoogleCloudAiplatformV1Tool(o[1]); @@ -9772,13 +10682,14 @@ api.GoogleCloudAiplatformV1GenerateContentRequest final o = api.GoogleCloudAiplatformV1GenerateContentRequest(); buildCounterGoogleCloudAiplatformV1GenerateContentRequest++; if (buildCounterGoogleCloudAiplatformV1GenerateContentRequest < 3) { - o.contents = buildUnnamed122(); + o.cachedContent = 'foo'; + o.contents = buildUnnamed131(); o.generationConfig = buildGoogleCloudAiplatformV1GenerationConfig(); - o.labels = buildUnnamed123(); - o.safetySettings = buildUnnamed124(); + o.labels = buildUnnamed132(); + o.safetySettings = buildUnnamed133(); o.systemInstruction = buildGoogleCloudAiplatformV1Content(); o.toolConfig = buildGoogleCloudAiplatformV1ToolConfig(); - o.tools = buildUnnamed125(); + o.tools = buildUnnamed134(); } buildCounterGoogleCloudAiplatformV1GenerateContentRequest--; return o; @@ -9788,23 +10699,27 @@ void checkGoogleCloudAiplatformV1GenerateContentRequest( api.GoogleCloudAiplatformV1GenerateContentRequest o) { buildCounterGoogleCloudAiplatformV1GenerateContentRequest++; if (buildCounterGoogleCloudAiplatformV1GenerateContentRequest < 3) { - checkUnnamed122(o.contents!); + unittest.expect( + o.cachedContent!, + unittest.equals('foo'), + ); + checkUnnamed131(o.contents!); checkGoogleCloudAiplatformV1GenerationConfig(o.generationConfig!); - checkUnnamed123(o.labels!); - checkUnnamed124(o.safetySettings!); + checkUnnamed132(o.labels!); + checkUnnamed133(o.safetySettings!); checkGoogleCloudAiplatformV1Content(o.systemInstruction!); checkGoogleCloudAiplatformV1ToolConfig(o.toolConfig!); - checkUnnamed125(o.tools!); + checkUnnamed134(o.tools!); } buildCounterGoogleCloudAiplatformV1GenerateContentRequest--; } -core.List buildUnnamed126() => [ +core.List buildUnnamed135() => [ buildGoogleCloudAiplatformV1Candidate(), buildGoogleCloudAiplatformV1Candidate(), ]; -void checkUnnamed126(core.List o) { +void checkUnnamed135(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Candidate(o[0]); checkGoogleCloudAiplatformV1Candidate(o[1]); @@ -9816,7 +10731,7 @@ api.GoogleCloudAiplatformV1GenerateContentResponse final o = api.GoogleCloudAiplatformV1GenerateContentResponse(); buildCounterGoogleCloudAiplatformV1GenerateContentResponse++; if (buildCounterGoogleCloudAiplatformV1GenerateContentResponse < 3) { - o.candidates = buildUnnamed126(); + o.candidates = buildUnnamed135(); o.modelVersion = 'foo'; o.promptFeedback = buildGoogleCloudAiplatformV1GenerateContentResponsePromptFeedback(); @@ -9831,7 +10746,7 @@ void checkGoogleCloudAiplatformV1GenerateContentResponse( api.GoogleCloudAiplatformV1GenerateContentResponse o) { buildCounterGoogleCloudAiplatformV1GenerateContentResponse++; if (buildCounterGoogleCloudAiplatformV1GenerateContentResponse < 3) { - checkUnnamed126(o.candidates!); + checkUnnamed135(o.candidates!); unittest.expect( o.modelVersion!, unittest.equals('foo'), @@ -9844,12 +10759,12 @@ void checkGoogleCloudAiplatformV1GenerateContentResponse( buildCounterGoogleCloudAiplatformV1GenerateContentResponse--; } -core.List buildUnnamed127() => [ +core.List buildUnnamed136() => [ buildGoogleCloudAiplatformV1SafetyRating(), buildGoogleCloudAiplatformV1SafetyRating(), ]; -void checkUnnamed127(core.List o) { +void checkUnnamed136(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1SafetyRating(o[0]); checkGoogleCloudAiplatformV1SafetyRating(o[1]); @@ -9866,7 +10781,7 @@ api.GoogleCloudAiplatformV1GenerateContentResponsePromptFeedback 3) { o.blockReason = 'foo'; o.blockReasonMessage = 'foo'; - o.safetyRatings = buildUnnamed127(); + o.safetyRatings = buildUnnamed136(); } buildCounterGoogleCloudAiplatformV1GenerateContentResponsePromptFeedback--; return o; @@ -9885,7 +10800,7 @@ void checkGoogleCloudAiplatformV1GenerateContentResponsePromptFeedback( o.blockReasonMessage!, unittest.equals('foo'), ); - checkUnnamed127(o.safetyRatings!); + checkUnnamed136(o.safetyRatings!); } buildCounterGoogleCloudAiplatformV1GenerateContentResponsePromptFeedback--; } @@ -9898,6 +10813,7 @@ api.GoogleCloudAiplatformV1GenerateContentResponseUsageMetadata buildCounterGoogleCloudAiplatformV1GenerateContentResponseUsageMetadata++; if (buildCounterGoogleCloudAiplatformV1GenerateContentResponseUsageMetadata < 3) { + o.cachedContentTokenCount = 42; o.candidatesTokenCount = 42; o.promptTokenCount = 42; o.totalTokenCount = 42; @@ -9911,6 +10827,10 @@ void checkGoogleCloudAiplatformV1GenerateContentResponseUsageMetadata( buildCounterGoogleCloudAiplatformV1GenerateContentResponseUsageMetadata++; if (buildCounterGoogleCloudAiplatformV1GenerateContentResponseUsageMetadata < 3) { + unittest.expect( + o.cachedContentTokenCount!, + unittest.equals(42), + ); unittest.expect( o.candidatesTokenCount!, unittest.equals(42), @@ -9927,12 +10847,29 @@ void checkGoogleCloudAiplatformV1GenerateContentResponseUsageMetadata( buildCounterGoogleCloudAiplatformV1GenerateContentResponseUsageMetadata--; } -core.List buildUnnamed128() => [ +core.List buildUnnamed137() => [ 'foo', 'foo', ]; -void checkUnnamed128(core.List o) { +void checkUnnamed137(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.List buildUnnamed138() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed138(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -9958,12 +10895,15 @@ api.GoogleCloudAiplatformV1GenerationConfig o.presencePenalty = 42.0; o.responseLogprobs = true; o.responseMimeType = 'foo'; + o.responseModalities = buildUnnamed137(); o.responseSchema = buildGoogleCloudAiplatformV1Schema(); o.routingConfig = buildGoogleCloudAiplatformV1GenerationConfigRoutingConfig(); o.seed = 42; - o.stopSequences = buildUnnamed128(); + o.speechConfig = buildGoogleCloudAiplatformV1SpeechConfig(); + o.stopSequences = buildUnnamed138(); o.temperature = 42.0; + o.tokenResolution = 'foo'; o.topK = 42.0; o.topP = 42.0; } @@ -10001,17 +10941,23 @@ void checkGoogleCloudAiplatformV1GenerationConfig( o.responseMimeType!, unittest.equals('foo'), ); + checkUnnamed137(o.responseModalities!); checkGoogleCloudAiplatformV1Schema(o.responseSchema!); checkGoogleCloudAiplatformV1GenerationConfigRoutingConfig(o.routingConfig!); unittest.expect( o.seed!, unittest.equals(42), ); - checkUnnamed128(o.stopSequences!); + checkGoogleCloudAiplatformV1SpeechConfig(o.speechConfig!); + checkUnnamed138(o.stopSequences!); unittest.expect( o.temperature!, unittest.equals(42.0), ); + unittest.expect( + o.tokenResolution!, + unittest.equals('foo'), + ); unittest.expect( o.topK!, unittest.equals(42.0), @@ -10134,6 +11080,69 @@ void checkGoogleCloudAiplatformV1GenieSource( buildCounterGoogleCloudAiplatformV1GenieSource--; } +core.List + buildUnnamed139() => [ + buildGoogleCloudAiplatformV1GoogleDriveSourceResourceId(), + buildGoogleCloudAiplatformV1GoogleDriveSourceResourceId(), + ]; + +void checkUnnamed139( + core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1GoogleDriveSourceResourceId(o[0]); + checkGoogleCloudAiplatformV1GoogleDriveSourceResourceId(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1GoogleDriveSource = 0; +api.GoogleCloudAiplatformV1GoogleDriveSource + buildGoogleCloudAiplatformV1GoogleDriveSource() { + final o = api.GoogleCloudAiplatformV1GoogleDriveSource(); + buildCounterGoogleCloudAiplatformV1GoogleDriveSource++; + if (buildCounterGoogleCloudAiplatformV1GoogleDriveSource < 3) { + o.resourceIds = buildUnnamed139(); + } + buildCounterGoogleCloudAiplatformV1GoogleDriveSource--; + return o; +} + +void checkGoogleCloudAiplatformV1GoogleDriveSource( + api.GoogleCloudAiplatformV1GoogleDriveSource o) { + buildCounterGoogleCloudAiplatformV1GoogleDriveSource++; + if (buildCounterGoogleCloudAiplatformV1GoogleDriveSource < 3) { + checkUnnamed139(o.resourceIds!); + } + buildCounterGoogleCloudAiplatformV1GoogleDriveSource--; +} + +core.int buildCounterGoogleCloudAiplatformV1GoogleDriveSourceResourceId = 0; +api.GoogleCloudAiplatformV1GoogleDriveSourceResourceId + buildGoogleCloudAiplatformV1GoogleDriveSourceResourceId() { + final o = api.GoogleCloudAiplatformV1GoogleDriveSourceResourceId(); + buildCounterGoogleCloudAiplatformV1GoogleDriveSourceResourceId++; + if (buildCounterGoogleCloudAiplatformV1GoogleDriveSourceResourceId < 3) { + o.resourceId = 'foo'; + o.resourceType = 'foo'; + } + buildCounterGoogleCloudAiplatformV1GoogleDriveSourceResourceId--; + return o; +} + +void checkGoogleCloudAiplatformV1GoogleDriveSourceResourceId( + api.GoogleCloudAiplatformV1GoogleDriveSourceResourceId o) { + buildCounterGoogleCloudAiplatformV1GoogleDriveSourceResourceId++; + if (buildCounterGoogleCloudAiplatformV1GoogleDriveSourceResourceId < 3) { + unittest.expect( + o.resourceId!, + unittest.equals('foo'), + ); + unittest.expect( + o.resourceType!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1GoogleDriveSourceResourceId--; +} + core.int buildCounterGoogleCloudAiplatformV1GoogleSearchRetrieval = 0; api.GoogleCloudAiplatformV1GoogleSearchRetrieval buildGoogleCloudAiplatformV1GoogleSearchRetrieval() { @@ -10298,6 +11307,7 @@ api.GoogleCloudAiplatformV1GroundingChunkRetrievedContext final o = api.GoogleCloudAiplatformV1GroundingChunkRetrievedContext(); buildCounterGoogleCloudAiplatformV1GroundingChunkRetrievedContext++; if (buildCounterGoogleCloudAiplatformV1GroundingChunkRetrievedContext < 3) { + o.text = 'foo'; o.title = 'foo'; o.uri = 'foo'; } @@ -10309,6 +11319,10 @@ void checkGoogleCloudAiplatformV1GroundingChunkRetrievedContext( api.GoogleCloudAiplatformV1GroundingChunkRetrievedContext o) { buildCounterGoogleCloudAiplatformV1GroundingChunkRetrievedContext++; if (buildCounterGoogleCloudAiplatformV1GroundingChunkRetrievedContext < 3) { + unittest.expect( + o.text!, + unittest.equals('foo'), + ); unittest.expect( o.title!, unittest.equals('foo'), @@ -10350,34 +11364,34 @@ void checkGoogleCloudAiplatformV1GroundingChunkWeb( buildCounterGoogleCloudAiplatformV1GroundingChunkWeb--; } -core.List buildUnnamed129() => [ +core.List buildUnnamed140() => [ buildGoogleCloudAiplatformV1GroundingChunk(), buildGoogleCloudAiplatformV1GroundingChunk(), ]; -void checkUnnamed129(core.List o) { +void checkUnnamed140(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1GroundingChunk(o[0]); checkGoogleCloudAiplatformV1GroundingChunk(o[1]); } -core.List buildUnnamed130() => [ +core.List buildUnnamed141() => [ buildGoogleCloudAiplatformV1GroundingSupport(), buildGoogleCloudAiplatformV1GroundingSupport(), ]; -void checkUnnamed130(core.List o) { +void checkUnnamed141(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1GroundingSupport(o[0]); checkGoogleCloudAiplatformV1GroundingSupport(o[1]); } -core.List buildUnnamed131() => [ +core.List buildUnnamed142() => [ 'foo', 'foo', ]; -void checkUnnamed131(core.List o) { +void checkUnnamed142(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -10395,11 +11409,11 @@ api.GoogleCloudAiplatformV1GroundingMetadata final o = api.GoogleCloudAiplatformV1GroundingMetadata(); buildCounterGoogleCloudAiplatformV1GroundingMetadata++; if (buildCounterGoogleCloudAiplatformV1GroundingMetadata < 3) { - o.groundingChunks = buildUnnamed129(); - o.groundingSupports = buildUnnamed130(); + o.groundingChunks = buildUnnamed140(); + o.groundingSupports = buildUnnamed141(); o.retrievalMetadata = buildGoogleCloudAiplatformV1RetrievalMetadata(); o.searchEntryPoint = buildGoogleCloudAiplatformV1SearchEntryPoint(); - o.webSearchQueries = buildUnnamed131(); + o.webSearchQueries = buildUnnamed142(); } buildCounterGoogleCloudAiplatformV1GroundingMetadata--; return o; @@ -10409,21 +11423,21 @@ void checkGoogleCloudAiplatformV1GroundingMetadata( api.GoogleCloudAiplatformV1GroundingMetadata o) { buildCounterGoogleCloudAiplatformV1GroundingMetadata++; if (buildCounterGoogleCloudAiplatformV1GroundingMetadata < 3) { - checkUnnamed129(o.groundingChunks!); - checkUnnamed130(o.groundingSupports!); + checkUnnamed140(o.groundingChunks!); + checkUnnamed141(o.groundingSupports!); checkGoogleCloudAiplatformV1RetrievalMetadata(o.retrievalMetadata!); checkGoogleCloudAiplatformV1SearchEntryPoint(o.searchEntryPoint!); - checkUnnamed131(o.webSearchQueries!); + checkUnnamed142(o.webSearchQueries!); } buildCounterGoogleCloudAiplatformV1GroundingMetadata--; } -core.List buildUnnamed132() => [ +core.List buildUnnamed143() => [ 42.0, 42.0, ]; -void checkUnnamed132(core.List o) { +void checkUnnamed143(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -10435,12 +11449,12 @@ void checkUnnamed132(core.List o) { ); } -core.List buildUnnamed133() => [ +core.List buildUnnamed144() => [ 42, 42, ]; -void checkUnnamed133(core.List o) { +void checkUnnamed144(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -10458,8 +11472,8 @@ api.GoogleCloudAiplatformV1GroundingSupport final o = api.GoogleCloudAiplatformV1GroundingSupport(); buildCounterGoogleCloudAiplatformV1GroundingSupport++; if (buildCounterGoogleCloudAiplatformV1GroundingSupport < 3) { - o.confidenceScores = buildUnnamed132(); - o.groundingChunkIndices = buildUnnamed133(); + o.confidenceScores = buildUnnamed143(); + o.groundingChunkIndices = buildUnnamed144(); o.segment = buildGoogleCloudAiplatformV1Segment(); } buildCounterGoogleCloudAiplatformV1GroundingSupport--; @@ -10470,19 +11484,19 @@ void checkGoogleCloudAiplatformV1GroundingSupport( api.GoogleCloudAiplatformV1GroundingSupport o) { buildCounterGoogleCloudAiplatformV1GroundingSupport++; if (buildCounterGoogleCloudAiplatformV1GroundingSupport < 3) { - checkUnnamed132(o.confidenceScores!); - checkUnnamed133(o.groundingChunkIndices!); + checkUnnamed143(o.confidenceScores!); + checkUnnamed144(o.groundingChunkIndices!); checkGoogleCloudAiplatformV1Segment(o.segment!); } buildCounterGoogleCloudAiplatformV1GroundingSupport--; } -core.Map buildUnnamed134() => { +core.Map buildUnnamed145() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed134(core.Map o) { +void checkUnnamed145(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -10494,12 +11508,12 @@ void checkUnnamed134(core.Map o) { ); } -core.List buildUnnamed135() => [ +core.List buildUnnamed146() => [ buildGoogleCloudAiplatformV1Trial(), buildGoogleCloudAiplatformV1Trial(), ]; -void checkUnnamed135(core.List o) { +void checkUnnamed146(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Trial(o[0]); checkGoogleCloudAiplatformV1Trial(o[1]); @@ -10516,7 +11530,7 @@ api.GoogleCloudAiplatformV1HyperparameterTuningJob o.encryptionSpec = buildGoogleCloudAiplatformV1EncryptionSpec(); o.endTime = 'foo'; o.error = buildGoogleRpcStatus(); - o.labels = buildUnnamed134(); + o.labels = buildUnnamed145(); o.maxFailedTrialCount = 42; o.maxTrialCount = 42; o.name = 'foo'; @@ -10527,7 +11541,7 @@ api.GoogleCloudAiplatformV1HyperparameterTuningJob o.state = 'foo'; o.studySpec = buildGoogleCloudAiplatformV1StudySpec(); o.trialJobSpec = buildGoogleCloudAiplatformV1CustomJobSpec(); - o.trials = buildUnnamed135(); + o.trials = buildUnnamed146(); o.updateTime = 'foo'; } buildCounterGoogleCloudAiplatformV1HyperparameterTuningJob--; @@ -10552,7 +11566,7 @@ void checkGoogleCloudAiplatformV1HyperparameterTuningJob( unittest.equals('foo'), ); checkGoogleRpcStatus(o.error!); - checkUnnamed134(o.labels!); + checkUnnamed145(o.labels!); unittest.expect( o.maxFailedTrialCount!, unittest.equals(42), @@ -10581,7 +11595,7 @@ void checkGoogleCloudAiplatformV1HyperparameterTuningJob( ); checkGoogleCloudAiplatformV1StudySpec(o.studySpec!); checkGoogleCloudAiplatformV1CustomJobSpec(o.trialJobSpec!); - checkUnnamed135(o.trials!); + checkUnnamed146(o.trials!); unittest.expect( o.updateTime!, unittest.equals('foo'), @@ -10590,12 +11604,12 @@ void checkGoogleCloudAiplatformV1HyperparameterTuningJob( buildCounterGoogleCloudAiplatformV1HyperparameterTuningJob--; } -core.List buildUnnamed136() => [ +core.List buildUnnamed147() => [ 'foo', 'foo', ]; -void checkUnnamed136(core.List o) { +void checkUnnamed147(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -10612,7 +11626,7 @@ api.GoogleCloudAiplatformV1IdMatcher buildGoogleCloudAiplatformV1IdMatcher() { final o = api.GoogleCloudAiplatformV1IdMatcher(); buildCounterGoogleCloudAiplatformV1IdMatcher++; if (buildCounterGoogleCloudAiplatformV1IdMatcher < 3) { - o.ids = buildUnnamed136(); + o.ids = buildUnnamed147(); } buildCounterGoogleCloudAiplatformV1IdMatcher--; return o; @@ -10622,17 +11636,17 @@ void checkGoogleCloudAiplatformV1IdMatcher( api.GoogleCloudAiplatformV1IdMatcher o) { buildCounterGoogleCloudAiplatformV1IdMatcher++; if (buildCounterGoogleCloudAiplatformV1IdMatcher < 3) { - checkUnnamed136(o.ids!); + checkUnnamed147(o.ids!); } buildCounterGoogleCloudAiplatformV1IdMatcher--; } -core.Map buildUnnamed137() => { +core.Map buildUnnamed148() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed137(core.Map o) { +void checkUnnamed148(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -10644,12 +11658,12 @@ void checkUnnamed137(core.Map o) { ); } -core.Map buildUnnamed138() => { +core.Map buildUnnamed149() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed138(core.Map o) { +void checkUnnamed149(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -10667,8 +11681,8 @@ api.GoogleCloudAiplatformV1ImportDataConfig final o = api.GoogleCloudAiplatformV1ImportDataConfig(); buildCounterGoogleCloudAiplatformV1ImportDataConfig++; if (buildCounterGoogleCloudAiplatformV1ImportDataConfig < 3) { - o.annotationLabels = buildUnnamed137(); - o.dataItemLabels = buildUnnamed138(); + o.annotationLabels = buildUnnamed148(); + o.dataItemLabels = buildUnnamed149(); o.gcsSource = buildGoogleCloudAiplatformV1GcsSource(); o.importSchemaUri = 'foo'; } @@ -10680,8 +11694,8 @@ void checkGoogleCloudAiplatformV1ImportDataConfig( api.GoogleCloudAiplatformV1ImportDataConfig o) { buildCounterGoogleCloudAiplatformV1ImportDataConfig++; if (buildCounterGoogleCloudAiplatformV1ImportDataConfig < 3) { - checkUnnamed137(o.annotationLabels!); - checkUnnamed138(o.dataItemLabels!); + checkUnnamed148(o.annotationLabels!); + checkUnnamed149(o.dataItemLabels!); checkGoogleCloudAiplatformV1GcsSource(o.gcsSource!); unittest.expect( o.importSchemaUri!, @@ -10691,12 +11705,12 @@ void checkGoogleCloudAiplatformV1ImportDataConfig( buildCounterGoogleCloudAiplatformV1ImportDataConfig--; } -core.List buildUnnamed139() => [ +core.List buildUnnamed150() => [ buildGoogleCloudAiplatformV1ImportDataConfig(), buildGoogleCloudAiplatformV1ImportDataConfig(), ]; -void checkUnnamed139(core.List o) { +void checkUnnamed150(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1ImportDataConfig(o[0]); checkGoogleCloudAiplatformV1ImportDataConfig(o[1]); @@ -10708,7 +11722,7 @@ api.GoogleCloudAiplatformV1ImportDataRequest final o = api.GoogleCloudAiplatformV1ImportDataRequest(); buildCounterGoogleCloudAiplatformV1ImportDataRequest++; if (buildCounterGoogleCloudAiplatformV1ImportDataRequest < 3) { - o.importConfigs = buildUnnamed139(); + o.importConfigs = buildUnnamed150(); } buildCounterGoogleCloudAiplatformV1ImportDataRequest--; return o; @@ -10718,18 +11732,18 @@ void checkGoogleCloudAiplatformV1ImportDataRequest( api.GoogleCloudAiplatformV1ImportDataRequest o) { buildCounterGoogleCloudAiplatformV1ImportDataRequest++; if (buildCounterGoogleCloudAiplatformV1ImportDataRequest < 3) { - checkUnnamed139(o.importConfigs!); + checkUnnamed150(o.importConfigs!); } buildCounterGoogleCloudAiplatformV1ImportDataRequest--; } core.List - buildUnnamed140() => [ + buildUnnamed151() => [ buildGoogleCloudAiplatformV1ImportFeatureValuesRequestFeatureSpec(), buildGoogleCloudAiplatformV1ImportFeatureValuesRequestFeatureSpec(), ]; -void checkUnnamed140( +void checkUnnamed151( core.List o) { unittest.expect(o, unittest.hasLength(2)); @@ -10749,7 +11763,7 @@ api.GoogleCloudAiplatformV1ImportFeatureValuesRequest o.disableIngestionAnalysis = true; o.disableOnlineServing = true; o.entityIdField = 'foo'; - o.featureSpecs = buildUnnamed140(); + o.featureSpecs = buildUnnamed151(); o.featureTime = 'foo'; o.featureTimeField = 'foo'; o.workerCount = 42; @@ -10771,7 +11785,7 @@ void checkGoogleCloudAiplatformV1ImportFeatureValuesRequest( o.entityIdField!, unittest.equals('foo'), ); - checkUnnamed140(o.featureSpecs!); + checkUnnamed151(o.featureSpecs!); unittest.expect( o.featureTime!, unittest.equals('foo'), @@ -10842,23 +11856,88 @@ void checkGoogleCloudAiplatformV1ImportModelEvaluationRequest( buildCounterGoogleCloudAiplatformV1ImportModelEvaluationRequest--; } -core.List buildUnnamed141() => [ +core.int buildCounterGoogleCloudAiplatformV1ImportRagFilesConfig = 0; +api.GoogleCloudAiplatformV1ImportRagFilesConfig + buildGoogleCloudAiplatformV1ImportRagFilesConfig() { + final o = api.GoogleCloudAiplatformV1ImportRagFilesConfig(); + buildCounterGoogleCloudAiplatformV1ImportRagFilesConfig++; + if (buildCounterGoogleCloudAiplatformV1ImportRagFilesConfig < 3) { + o.gcsSource = buildGoogleCloudAiplatformV1GcsSource(); + o.googleDriveSource = buildGoogleCloudAiplatformV1GoogleDriveSource(); + o.jiraSource = buildGoogleCloudAiplatformV1JiraSource(); + o.maxEmbeddingRequestsPerMin = 42; + o.partialFailureBigquerySink = + buildGoogleCloudAiplatformV1BigQueryDestination(); + o.partialFailureGcsSink = buildGoogleCloudAiplatformV1GcsDestination(); + o.ragFileTransformationConfig = + buildGoogleCloudAiplatformV1RagFileTransformationConfig(); + o.sharePointSources = buildGoogleCloudAiplatformV1SharePointSources(); + o.slackSource = buildGoogleCloudAiplatformV1SlackSource(); + } + buildCounterGoogleCloudAiplatformV1ImportRagFilesConfig--; + return o; +} + +void checkGoogleCloudAiplatformV1ImportRagFilesConfig( + api.GoogleCloudAiplatformV1ImportRagFilesConfig o) { + buildCounterGoogleCloudAiplatformV1ImportRagFilesConfig++; + if (buildCounterGoogleCloudAiplatformV1ImportRagFilesConfig < 3) { + checkGoogleCloudAiplatformV1GcsSource(o.gcsSource!); + checkGoogleCloudAiplatformV1GoogleDriveSource(o.googleDriveSource!); + checkGoogleCloudAiplatformV1JiraSource(o.jiraSource!); + unittest.expect( + o.maxEmbeddingRequestsPerMin!, + unittest.equals(42), + ); + checkGoogleCloudAiplatformV1BigQueryDestination( + o.partialFailureBigquerySink!); + checkGoogleCloudAiplatformV1GcsDestination(o.partialFailureGcsSink!); + checkGoogleCloudAiplatformV1RagFileTransformationConfig( + o.ragFileTransformationConfig!); + checkGoogleCloudAiplatformV1SharePointSources(o.sharePointSources!); + checkGoogleCloudAiplatformV1SlackSource(o.slackSource!); + } + buildCounterGoogleCloudAiplatformV1ImportRagFilesConfig--; +} + +core.int buildCounterGoogleCloudAiplatformV1ImportRagFilesRequest = 0; +api.GoogleCloudAiplatformV1ImportRagFilesRequest + buildGoogleCloudAiplatformV1ImportRagFilesRequest() { + final o = api.GoogleCloudAiplatformV1ImportRagFilesRequest(); + buildCounterGoogleCloudAiplatformV1ImportRagFilesRequest++; + if (buildCounterGoogleCloudAiplatformV1ImportRagFilesRequest < 3) { + o.importRagFilesConfig = buildGoogleCloudAiplatformV1ImportRagFilesConfig(); + } + buildCounterGoogleCloudAiplatformV1ImportRagFilesRequest--; + return o; +} + +void checkGoogleCloudAiplatformV1ImportRagFilesRequest( + api.GoogleCloudAiplatformV1ImportRagFilesRequest o) { + buildCounterGoogleCloudAiplatformV1ImportRagFilesRequest++; + if (buildCounterGoogleCloudAiplatformV1ImportRagFilesRequest < 3) { + checkGoogleCloudAiplatformV1ImportRagFilesConfig(o.importRagFilesConfig!); + } + buildCounterGoogleCloudAiplatformV1ImportRagFilesRequest--; +} + +core.List buildUnnamed152() => [ buildGoogleCloudAiplatformV1DeployedIndexRef(), buildGoogleCloudAiplatformV1DeployedIndexRef(), ]; -void checkUnnamed141(core.List o) { +void checkUnnamed152(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1DeployedIndexRef(o[0]); checkGoogleCloudAiplatformV1DeployedIndexRef(o[1]); } -core.Map buildUnnamed142() => { +core.Map buildUnnamed153() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed142(core.Map o) { +void checkUnnamed153(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -10876,14 +11955,14 @@ api.GoogleCloudAiplatformV1Index buildGoogleCloudAiplatformV1Index() { buildCounterGoogleCloudAiplatformV1Index++; if (buildCounterGoogleCloudAiplatformV1Index < 3) { o.createTime = 'foo'; - o.deployedIndexes = buildUnnamed141(); + o.deployedIndexes = buildUnnamed152(); o.description = 'foo'; o.displayName = 'foo'; o.encryptionSpec = buildGoogleCloudAiplatformV1EncryptionSpec(); o.etag = 'foo'; o.indexStats = buildGoogleCloudAiplatformV1IndexStats(); o.indexUpdateMethod = 'foo'; - o.labels = buildUnnamed142(); + o.labels = buildUnnamed153(); o.metadata = { 'list': [1, 2, 3], 'bool': true, @@ -10906,7 +11985,7 @@ void checkGoogleCloudAiplatformV1Index(api.GoogleCloudAiplatformV1Index o) { o.createTime!, unittest.equals('foo'), ); - checkUnnamed141(o.deployedIndexes!); + checkUnnamed152(o.deployedIndexes!); unittest.expect( o.description!, unittest.equals('foo'), @@ -10925,7 +12004,7 @@ void checkGoogleCloudAiplatformV1Index(api.GoogleCloudAiplatformV1Index o) { o.indexUpdateMethod!, unittest.equals('foo'), ); - checkUnnamed142(o.labels!); + checkUnnamed153(o.labels!); var casted46 = (o.metadata!) as core.Map; unittest.expect(casted46, unittest.hasLength(3)); unittest.expect( @@ -10958,12 +12037,12 @@ void checkGoogleCloudAiplatformV1Index(api.GoogleCloudAiplatformV1Index o) { buildCounterGoogleCloudAiplatformV1Index--; } -core.List buildUnnamed143() => [ +core.List buildUnnamed154() => [ 42.0, 42.0, ]; -void checkUnnamed143(core.List o) { +void checkUnnamed154(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -10976,12 +12055,12 @@ void checkUnnamed143(core.List o) { } core.List - buildUnnamed144() => [ + buildUnnamed155() => [ buildGoogleCloudAiplatformV1IndexDatapointNumericRestriction(), buildGoogleCloudAiplatformV1IndexDatapointNumericRestriction(), ]; -void checkUnnamed144( +void checkUnnamed155( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1IndexDatapointNumericRestriction(o[0]); @@ -10989,12 +12068,12 @@ void checkUnnamed144( } core.List - buildUnnamed145() => [ + buildUnnamed156() => [ buildGoogleCloudAiplatformV1IndexDatapointRestriction(), buildGoogleCloudAiplatformV1IndexDatapointRestriction(), ]; -void checkUnnamed145( +void checkUnnamed156( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1IndexDatapointRestriction(o[0]); @@ -11009,9 +12088,9 @@ api.GoogleCloudAiplatformV1IndexDatapoint if (buildCounterGoogleCloudAiplatformV1IndexDatapoint < 3) { o.crowdingTag = buildGoogleCloudAiplatformV1IndexDatapointCrowdingTag(); o.datapointId = 'foo'; - o.featureVector = buildUnnamed143(); - o.numericRestricts = buildUnnamed144(); - o.restricts = buildUnnamed145(); + o.featureVector = buildUnnamed154(); + o.numericRestricts = buildUnnamed155(); + o.restricts = buildUnnamed156(); o.sparseEmbedding = buildGoogleCloudAiplatformV1IndexDatapointSparseEmbedding(); } @@ -11028,9 +12107,9 @@ void checkGoogleCloudAiplatformV1IndexDatapoint( o.datapointId!, unittest.equals('foo'), ); - checkUnnamed143(o.featureVector!); - checkUnnamed144(o.numericRestricts!); - checkUnnamed145(o.restricts!); + checkUnnamed154(o.featureVector!); + checkUnnamed155(o.numericRestricts!); + checkUnnamed156(o.restricts!); checkGoogleCloudAiplatformV1IndexDatapointSparseEmbedding( o.sparseEmbedding!); } @@ -11106,12 +12185,12 @@ void checkGoogleCloudAiplatformV1IndexDatapointNumericRestriction( buildCounterGoogleCloudAiplatformV1IndexDatapointNumericRestriction--; } -core.List buildUnnamed146() => [ +core.List buildUnnamed157() => [ 'foo', 'foo', ]; -void checkUnnamed146(core.List o) { +void checkUnnamed157(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -11123,12 +12202,12 @@ void checkUnnamed146(core.List o) { ); } -core.List buildUnnamed147() => [ +core.List buildUnnamed158() => [ 'foo', 'foo', ]; -void checkUnnamed147(core.List o) { +void checkUnnamed158(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -11146,8 +12225,8 @@ api.GoogleCloudAiplatformV1IndexDatapointRestriction final o = api.GoogleCloudAiplatformV1IndexDatapointRestriction(); buildCounterGoogleCloudAiplatformV1IndexDatapointRestriction++; if (buildCounterGoogleCloudAiplatformV1IndexDatapointRestriction < 3) { - o.allowList = buildUnnamed146(); - o.denyList = buildUnnamed147(); + o.allowList = buildUnnamed157(); + o.denyList = buildUnnamed158(); o.namespace = 'foo'; } buildCounterGoogleCloudAiplatformV1IndexDatapointRestriction--; @@ -11158,8 +12237,8 @@ void checkGoogleCloudAiplatformV1IndexDatapointRestriction( api.GoogleCloudAiplatformV1IndexDatapointRestriction o) { buildCounterGoogleCloudAiplatformV1IndexDatapointRestriction++; if (buildCounterGoogleCloudAiplatformV1IndexDatapointRestriction < 3) { - checkUnnamed146(o.allowList!); - checkUnnamed147(o.denyList!); + checkUnnamed157(o.allowList!); + checkUnnamed158(o.denyList!); unittest.expect( o.namespace!, unittest.equals('foo'), @@ -11168,12 +12247,12 @@ void checkGoogleCloudAiplatformV1IndexDatapointRestriction( buildCounterGoogleCloudAiplatformV1IndexDatapointRestriction--; } -core.List buildUnnamed148() => [ +core.List buildUnnamed159() => [ 'foo', 'foo', ]; -void checkUnnamed148(core.List o) { +void checkUnnamed159(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -11185,12 +12264,12 @@ void checkUnnamed148(core.List o) { ); } -core.List buildUnnamed149() => [ +core.List buildUnnamed160() => [ 42.0, 42.0, ]; -void checkUnnamed149(core.List o) { +void checkUnnamed160(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -11208,8 +12287,8 @@ api.GoogleCloudAiplatformV1IndexDatapointSparseEmbedding final o = api.GoogleCloudAiplatformV1IndexDatapointSparseEmbedding(); buildCounterGoogleCloudAiplatformV1IndexDatapointSparseEmbedding++; if (buildCounterGoogleCloudAiplatformV1IndexDatapointSparseEmbedding < 3) { - o.dimensions = buildUnnamed148(); - o.values = buildUnnamed149(); + o.dimensions = buildUnnamed159(); + o.values = buildUnnamed160(); } buildCounterGoogleCloudAiplatformV1IndexDatapointSparseEmbedding--; return o; @@ -11219,29 +12298,29 @@ void checkGoogleCloudAiplatformV1IndexDatapointSparseEmbedding( api.GoogleCloudAiplatformV1IndexDatapointSparseEmbedding o) { buildCounterGoogleCloudAiplatformV1IndexDatapointSparseEmbedding++; if (buildCounterGoogleCloudAiplatformV1IndexDatapointSparseEmbedding < 3) { - checkUnnamed148(o.dimensions!); - checkUnnamed149(o.values!); + checkUnnamed159(o.dimensions!); + checkUnnamed160(o.values!); } buildCounterGoogleCloudAiplatformV1IndexDatapointSparseEmbedding--; } -core.List buildUnnamed150() => [ +core.List buildUnnamed161() => [ buildGoogleCloudAiplatformV1DeployedIndex(), buildGoogleCloudAiplatformV1DeployedIndex(), ]; -void checkUnnamed150(core.List o) { +void checkUnnamed161(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1DeployedIndex(o[0]); checkGoogleCloudAiplatformV1DeployedIndex(o[1]); } -core.Map buildUnnamed151() => { +core.Map buildUnnamed162() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed151(core.Map o) { +void checkUnnamed162(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -11260,13 +12339,13 @@ api.GoogleCloudAiplatformV1IndexEndpoint buildCounterGoogleCloudAiplatformV1IndexEndpoint++; if (buildCounterGoogleCloudAiplatformV1IndexEndpoint < 3) { o.createTime = 'foo'; - o.deployedIndexes = buildUnnamed150(); + o.deployedIndexes = buildUnnamed161(); o.description = 'foo'; o.displayName = 'foo'; o.enablePrivateServiceConnect = true; o.encryptionSpec = buildGoogleCloudAiplatformV1EncryptionSpec(); o.etag = 'foo'; - o.labels = buildUnnamed151(); + o.labels = buildUnnamed162(); o.name = 'foo'; o.network = 'foo'; o.privateServiceConnectConfig = @@ -11289,7 +12368,7 @@ void checkGoogleCloudAiplatformV1IndexEndpoint( o.createTime!, unittest.equals('foo'), ); - checkUnnamed150(o.deployedIndexes!); + checkUnnamed161(o.deployedIndexes!); unittest.expect( o.description!, unittest.equals('foo'), @@ -11304,7 +12383,7 @@ void checkGoogleCloudAiplatformV1IndexEndpoint( o.etag!, unittest.equals('foo'), ); - checkUnnamed151(o.labels!); + checkUnnamed162(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -11330,13 +12409,13 @@ void checkGoogleCloudAiplatformV1IndexEndpoint( buildCounterGoogleCloudAiplatformV1IndexEndpoint--; } -core.List buildUnnamed152() => +core.List buildUnnamed163() => [ buildGoogleCloudAiplatformV1PscAutomatedEndpoints(), buildGoogleCloudAiplatformV1PscAutomatedEndpoints(), ]; -void checkUnnamed152( +void checkUnnamed163( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1PscAutomatedEndpoints(o[0]); @@ -11350,7 +12429,7 @@ api.GoogleCloudAiplatformV1IndexPrivateEndpoints buildCounterGoogleCloudAiplatformV1IndexPrivateEndpoints++; if (buildCounterGoogleCloudAiplatformV1IndexPrivateEndpoints < 3) { o.matchGrpcAddress = 'foo'; - o.pscAutomatedEndpoints = buildUnnamed152(); + o.pscAutomatedEndpoints = buildUnnamed163(); o.serviceAttachment = 'foo'; } buildCounterGoogleCloudAiplatformV1IndexPrivateEndpoints--; @@ -11365,7 +12444,7 @@ void checkGoogleCloudAiplatformV1IndexPrivateEndpoints( o.matchGrpcAddress!, unittest.equals('foo'), ); - checkUnnamed152(o.pscAutomatedEndpoints!); + checkUnnamed163(o.pscAutomatedEndpoints!); unittest.expect( o.serviceAttachment!, unittest.equals('foo'), @@ -11462,12 +12541,12 @@ void checkGoogleCloudAiplatformV1InputDataConfig( buildCounterGoogleCloudAiplatformV1InputDataConfig--; } -core.List buildUnnamed153() => [ +core.List buildUnnamed164() => [ 'foo', 'foo', ]; -void checkUnnamed153(core.List o) { +void checkUnnamed164(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -11484,7 +12563,7 @@ api.GoogleCloudAiplatformV1Int64Array buildGoogleCloudAiplatformV1Int64Array() { final o = api.GoogleCloudAiplatformV1Int64Array(); buildCounterGoogleCloudAiplatformV1Int64Array++; if (buildCounterGoogleCloudAiplatformV1Int64Array < 3) { - o.values = buildUnnamed153(); + o.values = buildUnnamed164(); } buildCounterGoogleCloudAiplatformV1Int64Array--; return o; @@ -11494,7 +12573,7 @@ void checkGoogleCloudAiplatformV1Int64Array( api.GoogleCloudAiplatformV1Int64Array o) { buildCounterGoogleCloudAiplatformV1Int64Array++; if (buildCounterGoogleCloudAiplatformV1Int64Array < 3) { - checkUnnamed153(o.values!); + checkUnnamed164(o.values!); } buildCounterGoogleCloudAiplatformV1Int64Array--; } @@ -11527,6 +12606,108 @@ void checkGoogleCloudAiplatformV1IntegratedGradientsAttribution( buildCounterGoogleCloudAiplatformV1IntegratedGradientsAttribution--; } +core.List buildUnnamed165() => + [ + buildGoogleCloudAiplatformV1JiraSourceJiraQueries(), + buildGoogleCloudAiplatformV1JiraSourceJiraQueries(), + ]; + +void checkUnnamed165( + core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1JiraSourceJiraQueries(o[0]); + checkGoogleCloudAiplatformV1JiraSourceJiraQueries(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1JiraSource = 0; +api.GoogleCloudAiplatformV1JiraSource buildGoogleCloudAiplatformV1JiraSource() { + final o = api.GoogleCloudAiplatformV1JiraSource(); + buildCounterGoogleCloudAiplatformV1JiraSource++; + if (buildCounterGoogleCloudAiplatformV1JiraSource < 3) { + o.jiraQueries = buildUnnamed165(); + } + buildCounterGoogleCloudAiplatformV1JiraSource--; + return o; +} + +void checkGoogleCloudAiplatformV1JiraSource( + api.GoogleCloudAiplatformV1JiraSource o) { + buildCounterGoogleCloudAiplatformV1JiraSource++; + if (buildCounterGoogleCloudAiplatformV1JiraSource < 3) { + checkUnnamed165(o.jiraQueries!); + } + buildCounterGoogleCloudAiplatformV1JiraSource--; +} + +core.List buildUnnamed166() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed166(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.List buildUnnamed167() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed167(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterGoogleCloudAiplatformV1JiraSourceJiraQueries = 0; +api.GoogleCloudAiplatformV1JiraSourceJiraQueries + buildGoogleCloudAiplatformV1JiraSourceJiraQueries() { + final o = api.GoogleCloudAiplatformV1JiraSourceJiraQueries(); + buildCounterGoogleCloudAiplatformV1JiraSourceJiraQueries++; + if (buildCounterGoogleCloudAiplatformV1JiraSourceJiraQueries < 3) { + o.apiKeyConfig = buildGoogleCloudAiplatformV1ApiAuthApiKeyConfig(); + o.customQueries = buildUnnamed166(); + o.email = 'foo'; + o.projects = buildUnnamed167(); + o.serverUri = 'foo'; + } + buildCounterGoogleCloudAiplatformV1JiraSourceJiraQueries--; + return o; +} + +void checkGoogleCloudAiplatformV1JiraSourceJiraQueries( + api.GoogleCloudAiplatformV1JiraSourceJiraQueries o) { + buildCounterGoogleCloudAiplatformV1JiraSourceJiraQueries++; + if (buildCounterGoogleCloudAiplatformV1JiraSourceJiraQueries < 3) { + checkGoogleCloudAiplatformV1ApiAuthApiKeyConfig(o.apiKeyConfig!); + checkUnnamed166(o.customQueries!); + unittest.expect( + o.email!, + unittest.equals('foo'), + ); + checkUnnamed167(o.projects!); + unittest.expect( + o.serverUri!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1JiraSourceJiraQueries--; +} + core.int buildCounterGoogleCloudAiplatformV1LargeModelReference = 0; api.GoogleCloudAiplatformV1LargeModelReference buildGoogleCloudAiplatformV1LargeModelReference() { @@ -11551,34 +12732,34 @@ void checkGoogleCloudAiplatformV1LargeModelReference( buildCounterGoogleCloudAiplatformV1LargeModelReference--; } -core.List buildUnnamed154() => [ +core.List buildUnnamed168() => [ buildGoogleCloudAiplatformV1Artifact(), buildGoogleCloudAiplatformV1Artifact(), ]; -void checkUnnamed154(core.List o) { +void checkUnnamed168(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Artifact(o[0]); checkGoogleCloudAiplatformV1Artifact(o[1]); } -core.List buildUnnamed155() => [ +core.List buildUnnamed169() => [ buildGoogleCloudAiplatformV1Event(), buildGoogleCloudAiplatformV1Event(), ]; -void checkUnnamed155(core.List o) { +void checkUnnamed169(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Event(o[0]); checkGoogleCloudAiplatformV1Event(o[1]); } -core.List buildUnnamed156() => [ +core.List buildUnnamed170() => [ buildGoogleCloudAiplatformV1Execution(), buildGoogleCloudAiplatformV1Execution(), ]; -void checkUnnamed156(core.List o) { +void checkUnnamed170(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Execution(o[0]); checkGoogleCloudAiplatformV1Execution(o[1]); @@ -11590,9 +12771,9 @@ api.GoogleCloudAiplatformV1LineageSubgraph final o = api.GoogleCloudAiplatformV1LineageSubgraph(); buildCounterGoogleCloudAiplatformV1LineageSubgraph++; if (buildCounterGoogleCloudAiplatformV1LineageSubgraph < 3) { - o.artifacts = buildUnnamed154(); - o.events = buildUnnamed155(); - o.executions = buildUnnamed156(); + o.artifacts = buildUnnamed168(); + o.events = buildUnnamed169(); + o.executions = buildUnnamed170(); } buildCounterGoogleCloudAiplatformV1LineageSubgraph--; return o; @@ -11602,19 +12783,19 @@ void checkGoogleCloudAiplatformV1LineageSubgraph( api.GoogleCloudAiplatformV1LineageSubgraph o) { buildCounterGoogleCloudAiplatformV1LineageSubgraph++; if (buildCounterGoogleCloudAiplatformV1LineageSubgraph < 3) { - checkUnnamed154(o.artifacts!); - checkUnnamed155(o.events!); - checkUnnamed156(o.executions!); + checkUnnamed168(o.artifacts!); + checkUnnamed169(o.events!); + checkUnnamed170(o.executions!); } buildCounterGoogleCloudAiplatformV1LineageSubgraph--; } -core.List buildUnnamed157() => [ +core.List buildUnnamed171() => [ buildGoogleCloudAiplatformV1Annotation(), buildGoogleCloudAiplatformV1Annotation(), ]; -void checkUnnamed157(core.List o) { +void checkUnnamed171(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Annotation(o[0]); checkGoogleCloudAiplatformV1Annotation(o[1]); @@ -11626,7 +12807,7 @@ api.GoogleCloudAiplatformV1ListAnnotationsResponse final o = api.GoogleCloudAiplatformV1ListAnnotationsResponse(); buildCounterGoogleCloudAiplatformV1ListAnnotationsResponse++; if (buildCounterGoogleCloudAiplatformV1ListAnnotationsResponse < 3) { - o.annotations = buildUnnamed157(); + o.annotations = buildUnnamed171(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListAnnotationsResponse--; @@ -11637,7 +12818,7 @@ void checkGoogleCloudAiplatformV1ListAnnotationsResponse( api.GoogleCloudAiplatformV1ListAnnotationsResponse o) { buildCounterGoogleCloudAiplatformV1ListAnnotationsResponse++; if (buildCounterGoogleCloudAiplatformV1ListAnnotationsResponse < 3) { - checkUnnamed157(o.annotations!); + checkUnnamed171(o.annotations!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -11646,12 +12827,12 @@ void checkGoogleCloudAiplatformV1ListAnnotationsResponse( buildCounterGoogleCloudAiplatformV1ListAnnotationsResponse--; } -core.List buildUnnamed158() => [ +core.List buildUnnamed172() => [ buildGoogleCloudAiplatformV1Artifact(), buildGoogleCloudAiplatformV1Artifact(), ]; -void checkUnnamed158(core.List o) { +void checkUnnamed172(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Artifact(o[0]); checkGoogleCloudAiplatformV1Artifact(o[1]); @@ -11663,7 +12844,7 @@ api.GoogleCloudAiplatformV1ListArtifactsResponse final o = api.GoogleCloudAiplatformV1ListArtifactsResponse(); buildCounterGoogleCloudAiplatformV1ListArtifactsResponse++; if (buildCounterGoogleCloudAiplatformV1ListArtifactsResponse < 3) { - o.artifacts = buildUnnamed158(); + o.artifacts = buildUnnamed172(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListArtifactsResponse--; @@ -11674,7 +12855,7 @@ void checkGoogleCloudAiplatformV1ListArtifactsResponse( api.GoogleCloudAiplatformV1ListArtifactsResponse o) { buildCounterGoogleCloudAiplatformV1ListArtifactsResponse++; if (buildCounterGoogleCloudAiplatformV1ListArtifactsResponse < 3) { - checkUnnamed158(o.artifacts!); + checkUnnamed172(o.artifacts!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -11683,12 +12864,12 @@ void checkGoogleCloudAiplatformV1ListArtifactsResponse( buildCounterGoogleCloudAiplatformV1ListArtifactsResponse--; } -core.List buildUnnamed159() => [ +core.List buildUnnamed173() => [ buildGoogleCloudAiplatformV1BatchPredictionJob(), buildGoogleCloudAiplatformV1BatchPredictionJob(), ]; -void checkUnnamed159( +void checkUnnamed173( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1BatchPredictionJob(o[0]); @@ -11701,7 +12882,7 @@ api.GoogleCloudAiplatformV1ListBatchPredictionJobsResponse final o = api.GoogleCloudAiplatformV1ListBatchPredictionJobsResponse(); buildCounterGoogleCloudAiplatformV1ListBatchPredictionJobsResponse++; if (buildCounterGoogleCloudAiplatformV1ListBatchPredictionJobsResponse < 3) { - o.batchPredictionJobs = buildUnnamed159(); + o.batchPredictionJobs = buildUnnamed173(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListBatchPredictionJobsResponse--; @@ -11712,7 +12893,7 @@ void checkGoogleCloudAiplatformV1ListBatchPredictionJobsResponse( api.GoogleCloudAiplatformV1ListBatchPredictionJobsResponse o) { buildCounterGoogleCloudAiplatformV1ListBatchPredictionJobsResponse++; if (buildCounterGoogleCloudAiplatformV1ListBatchPredictionJobsResponse < 3) { - checkUnnamed159(o.batchPredictionJobs!); + checkUnnamed173(o.batchPredictionJobs!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -11721,12 +12902,49 @@ void checkGoogleCloudAiplatformV1ListBatchPredictionJobsResponse( buildCounterGoogleCloudAiplatformV1ListBatchPredictionJobsResponse--; } -core.List buildUnnamed160() => [ +core.List buildUnnamed174() => [ + buildGoogleCloudAiplatformV1CachedContent(), + buildGoogleCloudAiplatformV1CachedContent(), + ]; + +void checkUnnamed174(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1CachedContent(o[0]); + checkGoogleCloudAiplatformV1CachedContent(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1ListCachedContentsResponse = 0; +api.GoogleCloudAiplatformV1ListCachedContentsResponse + buildGoogleCloudAiplatformV1ListCachedContentsResponse() { + final o = api.GoogleCloudAiplatformV1ListCachedContentsResponse(); + buildCounterGoogleCloudAiplatformV1ListCachedContentsResponse++; + if (buildCounterGoogleCloudAiplatformV1ListCachedContentsResponse < 3) { + o.cachedContents = buildUnnamed174(); + o.nextPageToken = 'foo'; + } + buildCounterGoogleCloudAiplatformV1ListCachedContentsResponse--; + return o; +} + +void checkGoogleCloudAiplatformV1ListCachedContentsResponse( + api.GoogleCloudAiplatformV1ListCachedContentsResponse o) { + buildCounterGoogleCloudAiplatformV1ListCachedContentsResponse++; + if (buildCounterGoogleCloudAiplatformV1ListCachedContentsResponse < 3) { + checkUnnamed174(o.cachedContents!); + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1ListCachedContentsResponse--; +} + +core.List buildUnnamed175() => [ buildGoogleCloudAiplatformV1Context(), buildGoogleCloudAiplatformV1Context(), ]; -void checkUnnamed160(core.List o) { +void checkUnnamed175(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Context(o[0]); checkGoogleCloudAiplatformV1Context(o[1]); @@ -11738,7 +12956,7 @@ api.GoogleCloudAiplatformV1ListContextsResponse final o = api.GoogleCloudAiplatformV1ListContextsResponse(); buildCounterGoogleCloudAiplatformV1ListContextsResponse++; if (buildCounterGoogleCloudAiplatformV1ListContextsResponse < 3) { - o.contexts = buildUnnamed160(); + o.contexts = buildUnnamed175(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListContextsResponse--; @@ -11749,7 +12967,7 @@ void checkGoogleCloudAiplatformV1ListContextsResponse( api.GoogleCloudAiplatformV1ListContextsResponse o) { buildCounterGoogleCloudAiplatformV1ListContextsResponse++; if (buildCounterGoogleCloudAiplatformV1ListContextsResponse < 3) { - checkUnnamed160(o.contexts!); + checkUnnamed175(o.contexts!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -11758,12 +12976,12 @@ void checkGoogleCloudAiplatformV1ListContextsResponse( buildCounterGoogleCloudAiplatformV1ListContextsResponse--; } -core.List buildUnnamed161() => [ +core.List buildUnnamed176() => [ buildGoogleCloudAiplatformV1CustomJob(), buildGoogleCloudAiplatformV1CustomJob(), ]; -void checkUnnamed161(core.List o) { +void checkUnnamed176(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1CustomJob(o[0]); checkGoogleCloudAiplatformV1CustomJob(o[1]); @@ -11775,7 +12993,7 @@ api.GoogleCloudAiplatformV1ListCustomJobsResponse final o = api.GoogleCloudAiplatformV1ListCustomJobsResponse(); buildCounterGoogleCloudAiplatformV1ListCustomJobsResponse++; if (buildCounterGoogleCloudAiplatformV1ListCustomJobsResponse < 3) { - o.customJobs = buildUnnamed161(); + o.customJobs = buildUnnamed176(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListCustomJobsResponse--; @@ -11786,7 +13004,7 @@ void checkGoogleCloudAiplatformV1ListCustomJobsResponse( api.GoogleCloudAiplatformV1ListCustomJobsResponse o) { buildCounterGoogleCloudAiplatformV1ListCustomJobsResponse++; if (buildCounterGoogleCloudAiplatformV1ListCustomJobsResponse < 3) { - checkUnnamed161(o.customJobs!); + checkUnnamed176(o.customJobs!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -11795,12 +13013,12 @@ void checkGoogleCloudAiplatformV1ListCustomJobsResponse( buildCounterGoogleCloudAiplatformV1ListCustomJobsResponse--; } -core.List buildUnnamed162() => [ +core.List buildUnnamed177() => [ buildGoogleCloudAiplatformV1DataItem(), buildGoogleCloudAiplatformV1DataItem(), ]; -void checkUnnamed162(core.List o) { +void checkUnnamed177(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1DataItem(o[0]); checkGoogleCloudAiplatformV1DataItem(o[1]); @@ -11812,7 +13030,7 @@ api.GoogleCloudAiplatformV1ListDataItemsResponse final o = api.GoogleCloudAiplatformV1ListDataItemsResponse(); buildCounterGoogleCloudAiplatformV1ListDataItemsResponse++; if (buildCounterGoogleCloudAiplatformV1ListDataItemsResponse < 3) { - o.dataItems = buildUnnamed162(); + o.dataItems = buildUnnamed177(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListDataItemsResponse--; @@ -11823,7 +13041,7 @@ void checkGoogleCloudAiplatformV1ListDataItemsResponse( api.GoogleCloudAiplatformV1ListDataItemsResponse o) { buildCounterGoogleCloudAiplatformV1ListDataItemsResponse++; if (buildCounterGoogleCloudAiplatformV1ListDataItemsResponse < 3) { - checkUnnamed162(o.dataItems!); + checkUnnamed177(o.dataItems!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -11832,12 +13050,12 @@ void checkGoogleCloudAiplatformV1ListDataItemsResponse( buildCounterGoogleCloudAiplatformV1ListDataItemsResponse--; } -core.List buildUnnamed163() => [ +core.List buildUnnamed178() => [ buildGoogleCloudAiplatformV1DataLabelingJob(), buildGoogleCloudAiplatformV1DataLabelingJob(), ]; -void checkUnnamed163(core.List o) { +void checkUnnamed178(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1DataLabelingJob(o[0]); checkGoogleCloudAiplatformV1DataLabelingJob(o[1]); @@ -11849,7 +13067,7 @@ api.GoogleCloudAiplatformV1ListDataLabelingJobsResponse final o = api.GoogleCloudAiplatformV1ListDataLabelingJobsResponse(); buildCounterGoogleCloudAiplatformV1ListDataLabelingJobsResponse++; if (buildCounterGoogleCloudAiplatformV1ListDataLabelingJobsResponse < 3) { - o.dataLabelingJobs = buildUnnamed163(); + o.dataLabelingJobs = buildUnnamed178(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListDataLabelingJobsResponse--; @@ -11860,7 +13078,7 @@ void checkGoogleCloudAiplatformV1ListDataLabelingJobsResponse( api.GoogleCloudAiplatformV1ListDataLabelingJobsResponse o) { buildCounterGoogleCloudAiplatformV1ListDataLabelingJobsResponse++; if (buildCounterGoogleCloudAiplatformV1ListDataLabelingJobsResponse < 3) { - checkUnnamed163(o.dataLabelingJobs!); + checkUnnamed178(o.dataLabelingJobs!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -11869,12 +13087,12 @@ void checkGoogleCloudAiplatformV1ListDataLabelingJobsResponse( buildCounterGoogleCloudAiplatformV1ListDataLabelingJobsResponse--; } -core.List buildUnnamed164() => [ +core.List buildUnnamed179() => [ buildGoogleCloudAiplatformV1DatasetVersion(), buildGoogleCloudAiplatformV1DatasetVersion(), ]; -void checkUnnamed164(core.List o) { +void checkUnnamed179(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1DatasetVersion(o[0]); checkGoogleCloudAiplatformV1DatasetVersion(o[1]); @@ -11886,7 +13104,7 @@ api.GoogleCloudAiplatformV1ListDatasetVersionsResponse final o = api.GoogleCloudAiplatformV1ListDatasetVersionsResponse(); buildCounterGoogleCloudAiplatformV1ListDatasetVersionsResponse++; if (buildCounterGoogleCloudAiplatformV1ListDatasetVersionsResponse < 3) { - o.datasetVersions = buildUnnamed164(); + o.datasetVersions = buildUnnamed179(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListDatasetVersionsResponse--; @@ -11897,7 +13115,7 @@ void checkGoogleCloudAiplatformV1ListDatasetVersionsResponse( api.GoogleCloudAiplatformV1ListDatasetVersionsResponse o) { buildCounterGoogleCloudAiplatformV1ListDatasetVersionsResponse++; if (buildCounterGoogleCloudAiplatformV1ListDatasetVersionsResponse < 3) { - checkUnnamed164(o.datasetVersions!); + checkUnnamed179(o.datasetVersions!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -11906,12 +13124,12 @@ void checkGoogleCloudAiplatformV1ListDatasetVersionsResponse( buildCounterGoogleCloudAiplatformV1ListDatasetVersionsResponse--; } -core.List buildUnnamed165() => [ +core.List buildUnnamed180() => [ buildGoogleCloudAiplatformV1Dataset(), buildGoogleCloudAiplatformV1Dataset(), ]; -void checkUnnamed165(core.List o) { +void checkUnnamed180(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Dataset(o[0]); checkGoogleCloudAiplatformV1Dataset(o[1]); @@ -11923,7 +13141,7 @@ api.GoogleCloudAiplatformV1ListDatasetsResponse final o = api.GoogleCloudAiplatformV1ListDatasetsResponse(); buildCounterGoogleCloudAiplatformV1ListDatasetsResponse++; if (buildCounterGoogleCloudAiplatformV1ListDatasetsResponse < 3) { - o.datasets = buildUnnamed165(); + o.datasets = buildUnnamed180(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListDatasetsResponse--; @@ -11934,7 +13152,7 @@ void checkGoogleCloudAiplatformV1ListDatasetsResponse( api.GoogleCloudAiplatformV1ListDatasetsResponse o) { buildCounterGoogleCloudAiplatformV1ListDatasetsResponse++; if (buildCounterGoogleCloudAiplatformV1ListDatasetsResponse < 3) { - checkUnnamed165(o.datasets!); + checkUnnamed180(o.datasets!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -11944,12 +13162,12 @@ void checkGoogleCloudAiplatformV1ListDatasetsResponse( } core.List - buildUnnamed166() => [ + buildUnnamed181() => [ buildGoogleCloudAiplatformV1DeploymentResourcePool(), buildGoogleCloudAiplatformV1DeploymentResourcePool(), ]; -void checkUnnamed166( +void checkUnnamed181( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1DeploymentResourcePool(o[0]); @@ -11964,7 +13182,7 @@ api.GoogleCloudAiplatformV1ListDeploymentResourcePoolsResponse buildCounterGoogleCloudAiplatformV1ListDeploymentResourcePoolsResponse++; if (buildCounterGoogleCloudAiplatformV1ListDeploymentResourcePoolsResponse < 3) { - o.deploymentResourcePools = buildUnnamed166(); + o.deploymentResourcePools = buildUnnamed181(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListDeploymentResourcePoolsResponse--; @@ -11976,7 +13194,7 @@ void checkGoogleCloudAiplatformV1ListDeploymentResourcePoolsResponse( buildCounterGoogleCloudAiplatformV1ListDeploymentResourcePoolsResponse++; if (buildCounterGoogleCloudAiplatformV1ListDeploymentResourcePoolsResponse < 3) { - checkUnnamed166(o.deploymentResourcePools!); + checkUnnamed181(o.deploymentResourcePools!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -11985,12 +13203,12 @@ void checkGoogleCloudAiplatformV1ListDeploymentResourcePoolsResponse( buildCounterGoogleCloudAiplatformV1ListDeploymentResourcePoolsResponse--; } -core.List buildUnnamed167() => [ +core.List buildUnnamed182() => [ buildGoogleCloudAiplatformV1Endpoint(), buildGoogleCloudAiplatformV1Endpoint(), ]; -void checkUnnamed167(core.List o) { +void checkUnnamed182(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Endpoint(o[0]); checkGoogleCloudAiplatformV1Endpoint(o[1]); @@ -12002,7 +13220,7 @@ api.GoogleCloudAiplatformV1ListEndpointsResponse final o = api.GoogleCloudAiplatformV1ListEndpointsResponse(); buildCounterGoogleCloudAiplatformV1ListEndpointsResponse++; if (buildCounterGoogleCloudAiplatformV1ListEndpointsResponse < 3) { - o.endpoints = buildUnnamed167(); + o.endpoints = buildUnnamed182(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListEndpointsResponse--; @@ -12013,7 +13231,7 @@ void checkGoogleCloudAiplatformV1ListEndpointsResponse( api.GoogleCloudAiplatformV1ListEndpointsResponse o) { buildCounterGoogleCloudAiplatformV1ListEndpointsResponse++; if (buildCounterGoogleCloudAiplatformV1ListEndpointsResponse < 3) { - checkUnnamed167(o.endpoints!); + checkUnnamed182(o.endpoints!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -12022,12 +13240,12 @@ void checkGoogleCloudAiplatformV1ListEndpointsResponse( buildCounterGoogleCloudAiplatformV1ListEndpointsResponse--; } -core.List buildUnnamed168() => [ +core.List buildUnnamed183() => [ buildGoogleCloudAiplatformV1EntityType(), buildGoogleCloudAiplatformV1EntityType(), ]; -void checkUnnamed168(core.List o) { +void checkUnnamed183(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1EntityType(o[0]); checkGoogleCloudAiplatformV1EntityType(o[1]); @@ -12039,7 +13257,7 @@ api.GoogleCloudAiplatformV1ListEntityTypesResponse final o = api.GoogleCloudAiplatformV1ListEntityTypesResponse(); buildCounterGoogleCloudAiplatformV1ListEntityTypesResponse++; if (buildCounterGoogleCloudAiplatformV1ListEntityTypesResponse < 3) { - o.entityTypes = buildUnnamed168(); + o.entityTypes = buildUnnamed183(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListEntityTypesResponse--; @@ -12050,7 +13268,7 @@ void checkGoogleCloudAiplatformV1ListEntityTypesResponse( api.GoogleCloudAiplatformV1ListEntityTypesResponse o) { buildCounterGoogleCloudAiplatformV1ListEntityTypesResponse++; if (buildCounterGoogleCloudAiplatformV1ListEntityTypesResponse < 3) { - checkUnnamed168(o.entityTypes!); + checkUnnamed183(o.entityTypes!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -12059,12 +13277,12 @@ void checkGoogleCloudAiplatformV1ListEntityTypesResponse( buildCounterGoogleCloudAiplatformV1ListEntityTypesResponse--; } -core.List buildUnnamed169() => [ +core.List buildUnnamed184() => [ buildGoogleCloudAiplatformV1Execution(), buildGoogleCloudAiplatformV1Execution(), ]; -void checkUnnamed169(core.List o) { +void checkUnnamed184(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Execution(o[0]); checkGoogleCloudAiplatformV1Execution(o[1]); @@ -12076,7 +13294,7 @@ api.GoogleCloudAiplatformV1ListExecutionsResponse final o = api.GoogleCloudAiplatformV1ListExecutionsResponse(); buildCounterGoogleCloudAiplatformV1ListExecutionsResponse++; if (buildCounterGoogleCloudAiplatformV1ListExecutionsResponse < 3) { - o.executions = buildUnnamed169(); + o.executions = buildUnnamed184(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListExecutionsResponse--; @@ -12087,7 +13305,7 @@ void checkGoogleCloudAiplatformV1ListExecutionsResponse( api.GoogleCloudAiplatformV1ListExecutionsResponse o) { buildCounterGoogleCloudAiplatformV1ListExecutionsResponse++; if (buildCounterGoogleCloudAiplatformV1ListExecutionsResponse < 3) { - checkUnnamed169(o.executions!); + checkUnnamed184(o.executions!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -12096,12 +13314,12 @@ void checkGoogleCloudAiplatformV1ListExecutionsResponse( buildCounterGoogleCloudAiplatformV1ListExecutionsResponse--; } -core.List buildUnnamed170() => [ +core.List buildUnnamed185() => [ buildGoogleCloudAiplatformV1FeatureGroup(), buildGoogleCloudAiplatformV1FeatureGroup(), ]; -void checkUnnamed170(core.List o) { +void checkUnnamed185(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1FeatureGroup(o[0]); checkGoogleCloudAiplatformV1FeatureGroup(o[1]); @@ -12113,7 +13331,7 @@ api.GoogleCloudAiplatformV1ListFeatureGroupsResponse final o = api.GoogleCloudAiplatformV1ListFeatureGroupsResponse(); buildCounterGoogleCloudAiplatformV1ListFeatureGroupsResponse++; if (buildCounterGoogleCloudAiplatformV1ListFeatureGroupsResponse < 3) { - o.featureGroups = buildUnnamed170(); + o.featureGroups = buildUnnamed185(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListFeatureGroupsResponse--; @@ -12124,7 +13342,7 @@ void checkGoogleCloudAiplatformV1ListFeatureGroupsResponse( api.GoogleCloudAiplatformV1ListFeatureGroupsResponse o) { buildCounterGoogleCloudAiplatformV1ListFeatureGroupsResponse++; if (buildCounterGoogleCloudAiplatformV1ListFeatureGroupsResponse < 3) { - checkUnnamed170(o.featureGroups!); + checkUnnamed185(o.featureGroups!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -12133,12 +13351,12 @@ void checkGoogleCloudAiplatformV1ListFeatureGroupsResponse( buildCounterGoogleCloudAiplatformV1ListFeatureGroupsResponse--; } -core.List buildUnnamed171() => [ +core.List buildUnnamed186() => [ buildGoogleCloudAiplatformV1FeatureOnlineStore(), buildGoogleCloudAiplatformV1FeatureOnlineStore(), ]; -void checkUnnamed171( +void checkUnnamed186( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1FeatureOnlineStore(o[0]); @@ -12151,7 +13369,7 @@ api.GoogleCloudAiplatformV1ListFeatureOnlineStoresResponse final o = api.GoogleCloudAiplatformV1ListFeatureOnlineStoresResponse(); buildCounterGoogleCloudAiplatformV1ListFeatureOnlineStoresResponse++; if (buildCounterGoogleCloudAiplatformV1ListFeatureOnlineStoresResponse < 3) { - o.featureOnlineStores = buildUnnamed171(); + o.featureOnlineStores = buildUnnamed186(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListFeatureOnlineStoresResponse--; @@ -12162,7 +13380,7 @@ void checkGoogleCloudAiplatformV1ListFeatureOnlineStoresResponse( api.GoogleCloudAiplatformV1ListFeatureOnlineStoresResponse o) { buildCounterGoogleCloudAiplatformV1ListFeatureOnlineStoresResponse++; if (buildCounterGoogleCloudAiplatformV1ListFeatureOnlineStoresResponse < 3) { - checkUnnamed171(o.featureOnlineStores!); + checkUnnamed186(o.featureOnlineStores!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -12171,12 +13389,12 @@ void checkGoogleCloudAiplatformV1ListFeatureOnlineStoresResponse( buildCounterGoogleCloudAiplatformV1ListFeatureOnlineStoresResponse--; } -core.List buildUnnamed172() => [ +core.List buildUnnamed187() => [ buildGoogleCloudAiplatformV1FeatureViewSync(), buildGoogleCloudAiplatformV1FeatureViewSync(), ]; -void checkUnnamed172(core.List o) { +void checkUnnamed187(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1FeatureViewSync(o[0]); checkGoogleCloudAiplatformV1FeatureViewSync(o[1]); @@ -12188,7 +13406,7 @@ api.GoogleCloudAiplatformV1ListFeatureViewSyncsResponse final o = api.GoogleCloudAiplatformV1ListFeatureViewSyncsResponse(); buildCounterGoogleCloudAiplatformV1ListFeatureViewSyncsResponse++; if (buildCounterGoogleCloudAiplatformV1ListFeatureViewSyncsResponse < 3) { - o.featureViewSyncs = buildUnnamed172(); + o.featureViewSyncs = buildUnnamed187(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListFeatureViewSyncsResponse--; @@ -12199,7 +13417,7 @@ void checkGoogleCloudAiplatformV1ListFeatureViewSyncsResponse( api.GoogleCloudAiplatformV1ListFeatureViewSyncsResponse o) { buildCounterGoogleCloudAiplatformV1ListFeatureViewSyncsResponse++; if (buildCounterGoogleCloudAiplatformV1ListFeatureViewSyncsResponse < 3) { - checkUnnamed172(o.featureViewSyncs!); + checkUnnamed187(o.featureViewSyncs!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -12208,12 +13426,12 @@ void checkGoogleCloudAiplatformV1ListFeatureViewSyncsResponse( buildCounterGoogleCloudAiplatformV1ListFeatureViewSyncsResponse--; } -core.List buildUnnamed173() => [ +core.List buildUnnamed188() => [ buildGoogleCloudAiplatformV1FeatureView(), buildGoogleCloudAiplatformV1FeatureView(), ]; -void checkUnnamed173(core.List o) { +void checkUnnamed188(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1FeatureView(o[0]); checkGoogleCloudAiplatformV1FeatureView(o[1]); @@ -12225,7 +13443,7 @@ api.GoogleCloudAiplatformV1ListFeatureViewsResponse final o = api.GoogleCloudAiplatformV1ListFeatureViewsResponse(); buildCounterGoogleCloudAiplatformV1ListFeatureViewsResponse++; if (buildCounterGoogleCloudAiplatformV1ListFeatureViewsResponse < 3) { - o.featureViews = buildUnnamed173(); + o.featureViews = buildUnnamed188(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListFeatureViewsResponse--; @@ -12236,7 +13454,7 @@ void checkGoogleCloudAiplatformV1ListFeatureViewsResponse( api.GoogleCloudAiplatformV1ListFeatureViewsResponse o) { buildCounterGoogleCloudAiplatformV1ListFeatureViewsResponse++; if (buildCounterGoogleCloudAiplatformV1ListFeatureViewsResponse < 3) { - checkUnnamed173(o.featureViews!); + checkUnnamed188(o.featureViews!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -12245,12 +13463,12 @@ void checkGoogleCloudAiplatformV1ListFeatureViewsResponse( buildCounterGoogleCloudAiplatformV1ListFeatureViewsResponse--; } -core.List buildUnnamed174() => [ +core.List buildUnnamed189() => [ buildGoogleCloudAiplatformV1Feature(), buildGoogleCloudAiplatformV1Feature(), ]; -void checkUnnamed174(core.List o) { +void checkUnnamed189(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Feature(o[0]); checkGoogleCloudAiplatformV1Feature(o[1]); @@ -12262,7 +13480,7 @@ api.GoogleCloudAiplatformV1ListFeaturesResponse final o = api.GoogleCloudAiplatformV1ListFeaturesResponse(); buildCounterGoogleCloudAiplatformV1ListFeaturesResponse++; if (buildCounterGoogleCloudAiplatformV1ListFeaturesResponse < 3) { - o.features = buildUnnamed174(); + o.features = buildUnnamed189(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListFeaturesResponse--; @@ -12273,7 +13491,7 @@ void checkGoogleCloudAiplatformV1ListFeaturesResponse( api.GoogleCloudAiplatformV1ListFeaturesResponse o) { buildCounterGoogleCloudAiplatformV1ListFeaturesResponse++; if (buildCounterGoogleCloudAiplatformV1ListFeaturesResponse < 3) { - checkUnnamed174(o.features!); + checkUnnamed189(o.features!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -12282,12 +13500,12 @@ void checkGoogleCloudAiplatformV1ListFeaturesResponse( buildCounterGoogleCloudAiplatformV1ListFeaturesResponse--; } -core.List buildUnnamed175() => [ +core.List buildUnnamed190() => [ buildGoogleCloudAiplatformV1Featurestore(), buildGoogleCloudAiplatformV1Featurestore(), ]; -void checkUnnamed175(core.List o) { +void checkUnnamed190(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Featurestore(o[0]); checkGoogleCloudAiplatformV1Featurestore(o[1]); @@ -12299,7 +13517,7 @@ api.GoogleCloudAiplatformV1ListFeaturestoresResponse final o = api.GoogleCloudAiplatformV1ListFeaturestoresResponse(); buildCounterGoogleCloudAiplatformV1ListFeaturestoresResponse++; if (buildCounterGoogleCloudAiplatformV1ListFeaturestoresResponse < 3) { - o.featurestores = buildUnnamed175(); + o.featurestores = buildUnnamed190(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListFeaturestoresResponse--; @@ -12310,7 +13528,7 @@ void checkGoogleCloudAiplatformV1ListFeaturestoresResponse( api.GoogleCloudAiplatformV1ListFeaturestoresResponse o) { buildCounterGoogleCloudAiplatformV1ListFeaturestoresResponse++; if (buildCounterGoogleCloudAiplatformV1ListFeaturestoresResponse < 3) { - checkUnnamed175(o.featurestores!); + checkUnnamed190(o.featurestores!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -12320,12 +13538,12 @@ void checkGoogleCloudAiplatformV1ListFeaturestoresResponse( } core.List - buildUnnamed176() => [ + buildUnnamed191() => [ buildGoogleCloudAiplatformV1HyperparameterTuningJob(), buildGoogleCloudAiplatformV1HyperparameterTuningJob(), ]; -void checkUnnamed176( +void checkUnnamed191( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1HyperparameterTuningJob(o[0]); @@ -12340,7 +13558,7 @@ api.GoogleCloudAiplatformV1ListHyperparameterTuningJobsResponse buildCounterGoogleCloudAiplatformV1ListHyperparameterTuningJobsResponse++; if (buildCounterGoogleCloudAiplatformV1ListHyperparameterTuningJobsResponse < 3) { - o.hyperparameterTuningJobs = buildUnnamed176(); + o.hyperparameterTuningJobs = buildUnnamed191(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListHyperparameterTuningJobsResponse--; @@ -12352,7 +13570,7 @@ void checkGoogleCloudAiplatformV1ListHyperparameterTuningJobsResponse( buildCounterGoogleCloudAiplatformV1ListHyperparameterTuningJobsResponse++; if (buildCounterGoogleCloudAiplatformV1ListHyperparameterTuningJobsResponse < 3) { - checkUnnamed176(o.hyperparameterTuningJobs!); + checkUnnamed191(o.hyperparameterTuningJobs!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -12361,12 +13579,12 @@ void checkGoogleCloudAiplatformV1ListHyperparameterTuningJobsResponse( buildCounterGoogleCloudAiplatformV1ListHyperparameterTuningJobsResponse--; } -core.List buildUnnamed177() => [ +core.List buildUnnamed192() => [ buildGoogleCloudAiplatformV1IndexEndpoint(), buildGoogleCloudAiplatformV1IndexEndpoint(), ]; -void checkUnnamed177(core.List o) { +void checkUnnamed192(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1IndexEndpoint(o[0]); checkGoogleCloudAiplatformV1IndexEndpoint(o[1]); @@ -12378,7 +13596,7 @@ api.GoogleCloudAiplatformV1ListIndexEndpointsResponse final o = api.GoogleCloudAiplatformV1ListIndexEndpointsResponse(); buildCounterGoogleCloudAiplatformV1ListIndexEndpointsResponse++; if (buildCounterGoogleCloudAiplatformV1ListIndexEndpointsResponse < 3) { - o.indexEndpoints = buildUnnamed177(); + o.indexEndpoints = buildUnnamed192(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListIndexEndpointsResponse--; @@ -12389,7 +13607,7 @@ void checkGoogleCloudAiplatformV1ListIndexEndpointsResponse( api.GoogleCloudAiplatformV1ListIndexEndpointsResponse o) { buildCounterGoogleCloudAiplatformV1ListIndexEndpointsResponse++; if (buildCounterGoogleCloudAiplatformV1ListIndexEndpointsResponse < 3) { - checkUnnamed177(o.indexEndpoints!); + checkUnnamed192(o.indexEndpoints!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -12398,12 +13616,12 @@ void checkGoogleCloudAiplatformV1ListIndexEndpointsResponse( buildCounterGoogleCloudAiplatformV1ListIndexEndpointsResponse--; } -core.List buildUnnamed178() => [ +core.List buildUnnamed193() => [ buildGoogleCloudAiplatformV1Index(), buildGoogleCloudAiplatformV1Index(), ]; -void checkUnnamed178(core.List o) { +void checkUnnamed193(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Index(o[0]); checkGoogleCloudAiplatformV1Index(o[1]); @@ -12415,7 +13633,7 @@ api.GoogleCloudAiplatformV1ListIndexesResponse final o = api.GoogleCloudAiplatformV1ListIndexesResponse(); buildCounterGoogleCloudAiplatformV1ListIndexesResponse++; if (buildCounterGoogleCloudAiplatformV1ListIndexesResponse < 3) { - o.indexes = buildUnnamed178(); + o.indexes = buildUnnamed193(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListIndexesResponse--; @@ -12426,7 +13644,7 @@ void checkGoogleCloudAiplatformV1ListIndexesResponse( api.GoogleCloudAiplatformV1ListIndexesResponse o) { buildCounterGoogleCloudAiplatformV1ListIndexesResponse++; if (buildCounterGoogleCloudAiplatformV1ListIndexesResponse < 3) { - checkUnnamed178(o.indexes!); + checkUnnamed193(o.indexes!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -12435,12 +13653,12 @@ void checkGoogleCloudAiplatformV1ListIndexesResponse( buildCounterGoogleCloudAiplatformV1ListIndexesResponse--; } -core.List buildUnnamed179() => [ +core.List buildUnnamed194() => [ buildGoogleCloudAiplatformV1MetadataSchema(), buildGoogleCloudAiplatformV1MetadataSchema(), ]; -void checkUnnamed179(core.List o) { +void checkUnnamed194(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1MetadataSchema(o[0]); checkGoogleCloudAiplatformV1MetadataSchema(o[1]); @@ -12452,7 +13670,7 @@ api.GoogleCloudAiplatformV1ListMetadataSchemasResponse final o = api.GoogleCloudAiplatformV1ListMetadataSchemasResponse(); buildCounterGoogleCloudAiplatformV1ListMetadataSchemasResponse++; if (buildCounterGoogleCloudAiplatformV1ListMetadataSchemasResponse < 3) { - o.metadataSchemas = buildUnnamed179(); + o.metadataSchemas = buildUnnamed194(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListMetadataSchemasResponse--; @@ -12463,7 +13681,7 @@ void checkGoogleCloudAiplatformV1ListMetadataSchemasResponse( api.GoogleCloudAiplatformV1ListMetadataSchemasResponse o) { buildCounterGoogleCloudAiplatformV1ListMetadataSchemasResponse++; if (buildCounterGoogleCloudAiplatformV1ListMetadataSchemasResponse < 3) { - checkUnnamed179(o.metadataSchemas!); + checkUnnamed194(o.metadataSchemas!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -12472,12 +13690,12 @@ void checkGoogleCloudAiplatformV1ListMetadataSchemasResponse( buildCounterGoogleCloudAiplatformV1ListMetadataSchemasResponse--; } -core.List buildUnnamed180() => [ +core.List buildUnnamed195() => [ buildGoogleCloudAiplatformV1MetadataStore(), buildGoogleCloudAiplatformV1MetadataStore(), ]; -void checkUnnamed180(core.List o) { +void checkUnnamed195(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1MetadataStore(o[0]); checkGoogleCloudAiplatformV1MetadataStore(o[1]); @@ -12489,7 +13707,7 @@ api.GoogleCloudAiplatformV1ListMetadataStoresResponse final o = api.GoogleCloudAiplatformV1ListMetadataStoresResponse(); buildCounterGoogleCloudAiplatformV1ListMetadataStoresResponse++; if (buildCounterGoogleCloudAiplatformV1ListMetadataStoresResponse < 3) { - o.metadataStores = buildUnnamed180(); + o.metadataStores = buildUnnamed195(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListMetadataStoresResponse--; @@ -12500,7 +13718,7 @@ void checkGoogleCloudAiplatformV1ListMetadataStoresResponse( api.GoogleCloudAiplatformV1ListMetadataStoresResponse o) { buildCounterGoogleCloudAiplatformV1ListMetadataStoresResponse++; if (buildCounterGoogleCloudAiplatformV1ListMetadataStoresResponse < 3) { - checkUnnamed180(o.metadataStores!); + checkUnnamed195(o.metadataStores!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -12510,12 +13728,12 @@ void checkGoogleCloudAiplatformV1ListMetadataStoresResponse( } core.List - buildUnnamed181() => [ + buildUnnamed196() => [ buildGoogleCloudAiplatformV1ModelDeploymentMonitoringJob(), buildGoogleCloudAiplatformV1ModelDeploymentMonitoringJob(), ]; -void checkUnnamed181( +void checkUnnamed196( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1ModelDeploymentMonitoringJob(o[0]); @@ -12532,7 +13750,7 @@ api.GoogleCloudAiplatformV1ListModelDeploymentMonitoringJobsResponse buildCounterGoogleCloudAiplatformV1ListModelDeploymentMonitoringJobsResponse++; if (buildCounterGoogleCloudAiplatformV1ListModelDeploymentMonitoringJobsResponse < 3) { - o.modelDeploymentMonitoringJobs = buildUnnamed181(); + o.modelDeploymentMonitoringJobs = buildUnnamed196(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListModelDeploymentMonitoringJobsResponse--; @@ -12544,7 +13762,7 @@ void checkGoogleCloudAiplatformV1ListModelDeploymentMonitoringJobsResponse( buildCounterGoogleCloudAiplatformV1ListModelDeploymentMonitoringJobsResponse++; if (buildCounterGoogleCloudAiplatformV1ListModelDeploymentMonitoringJobsResponse < 3) { - checkUnnamed181(o.modelDeploymentMonitoringJobs!); + checkUnnamed196(o.modelDeploymentMonitoringJobs!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -12553,13 +13771,13 @@ void checkGoogleCloudAiplatformV1ListModelDeploymentMonitoringJobsResponse( buildCounterGoogleCloudAiplatformV1ListModelDeploymentMonitoringJobsResponse--; } -core.List buildUnnamed182() => +core.List buildUnnamed197() => [ buildGoogleCloudAiplatformV1ModelEvaluationSlice(), buildGoogleCloudAiplatformV1ModelEvaluationSlice(), ]; -void checkUnnamed182( +void checkUnnamed197( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1ModelEvaluationSlice(o[0]); @@ -12574,7 +13792,7 @@ api.GoogleCloudAiplatformV1ListModelEvaluationSlicesResponse buildCounterGoogleCloudAiplatformV1ListModelEvaluationSlicesResponse++; if (buildCounterGoogleCloudAiplatformV1ListModelEvaluationSlicesResponse < 3) { - o.modelEvaluationSlices = buildUnnamed182(); + o.modelEvaluationSlices = buildUnnamed197(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListModelEvaluationSlicesResponse--; @@ -12586,7 +13804,7 @@ void checkGoogleCloudAiplatformV1ListModelEvaluationSlicesResponse( buildCounterGoogleCloudAiplatformV1ListModelEvaluationSlicesResponse++; if (buildCounterGoogleCloudAiplatformV1ListModelEvaluationSlicesResponse < 3) { - checkUnnamed182(o.modelEvaluationSlices!); + checkUnnamed197(o.modelEvaluationSlices!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -12595,12 +13813,12 @@ void checkGoogleCloudAiplatformV1ListModelEvaluationSlicesResponse( buildCounterGoogleCloudAiplatformV1ListModelEvaluationSlicesResponse--; } -core.List buildUnnamed183() => [ +core.List buildUnnamed198() => [ buildGoogleCloudAiplatformV1ModelEvaluation(), buildGoogleCloudAiplatformV1ModelEvaluation(), ]; -void checkUnnamed183(core.List o) { +void checkUnnamed198(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1ModelEvaluation(o[0]); checkGoogleCloudAiplatformV1ModelEvaluation(o[1]); @@ -12612,7 +13830,7 @@ api.GoogleCloudAiplatformV1ListModelEvaluationsResponse final o = api.GoogleCloudAiplatformV1ListModelEvaluationsResponse(); buildCounterGoogleCloudAiplatformV1ListModelEvaluationsResponse++; if (buildCounterGoogleCloudAiplatformV1ListModelEvaluationsResponse < 3) { - o.modelEvaluations = buildUnnamed183(); + o.modelEvaluations = buildUnnamed198(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListModelEvaluationsResponse--; @@ -12623,7 +13841,7 @@ void checkGoogleCloudAiplatformV1ListModelEvaluationsResponse( api.GoogleCloudAiplatformV1ListModelEvaluationsResponse o) { buildCounterGoogleCloudAiplatformV1ListModelEvaluationsResponse++; if (buildCounterGoogleCloudAiplatformV1ListModelEvaluationsResponse < 3) { - checkUnnamed183(o.modelEvaluations!); + checkUnnamed198(o.modelEvaluations!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -12632,12 +13850,12 @@ void checkGoogleCloudAiplatformV1ListModelEvaluationsResponse( buildCounterGoogleCloudAiplatformV1ListModelEvaluationsResponse--; } -core.List buildUnnamed184() => [ +core.List buildUnnamed199() => [ buildGoogleCloudAiplatformV1Model(), buildGoogleCloudAiplatformV1Model(), ]; -void checkUnnamed184(core.List o) { +void checkUnnamed199(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Model(o[0]); checkGoogleCloudAiplatformV1Model(o[1]); @@ -12649,7 +13867,7 @@ api.GoogleCloudAiplatformV1ListModelVersionsResponse final o = api.GoogleCloudAiplatformV1ListModelVersionsResponse(); buildCounterGoogleCloudAiplatformV1ListModelVersionsResponse++; if (buildCounterGoogleCloudAiplatformV1ListModelVersionsResponse < 3) { - o.models = buildUnnamed184(); + o.models = buildUnnamed199(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListModelVersionsResponse--; @@ -12660,7 +13878,7 @@ void checkGoogleCloudAiplatformV1ListModelVersionsResponse( api.GoogleCloudAiplatformV1ListModelVersionsResponse o) { buildCounterGoogleCloudAiplatformV1ListModelVersionsResponse++; if (buildCounterGoogleCloudAiplatformV1ListModelVersionsResponse < 3) { - checkUnnamed184(o.models!); + checkUnnamed199(o.models!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -12669,12 +13887,12 @@ void checkGoogleCloudAiplatformV1ListModelVersionsResponse( buildCounterGoogleCloudAiplatformV1ListModelVersionsResponse--; } -core.List buildUnnamed185() => [ +core.List buildUnnamed200() => [ buildGoogleCloudAiplatformV1Model(), buildGoogleCloudAiplatformV1Model(), ]; -void checkUnnamed185(core.List o) { +void checkUnnamed200(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Model(o[0]); checkGoogleCloudAiplatformV1Model(o[1]); @@ -12686,7 +13904,7 @@ api.GoogleCloudAiplatformV1ListModelsResponse final o = api.GoogleCloudAiplatformV1ListModelsResponse(); buildCounterGoogleCloudAiplatformV1ListModelsResponse++; if (buildCounterGoogleCloudAiplatformV1ListModelsResponse < 3) { - o.models = buildUnnamed185(); + o.models = buildUnnamed200(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListModelsResponse--; @@ -12697,7 +13915,7 @@ void checkGoogleCloudAiplatformV1ListModelsResponse( api.GoogleCloudAiplatformV1ListModelsResponse o) { buildCounterGoogleCloudAiplatformV1ListModelsResponse++; if (buildCounterGoogleCloudAiplatformV1ListModelsResponse < 3) { - checkUnnamed185(o.models!); + checkUnnamed200(o.models!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -12706,12 +13924,12 @@ void checkGoogleCloudAiplatformV1ListModelsResponse( buildCounterGoogleCloudAiplatformV1ListModelsResponse--; } -core.List buildUnnamed186() => [ +core.List buildUnnamed201() => [ buildGoogleCloudAiplatformV1NasJob(), buildGoogleCloudAiplatformV1NasJob(), ]; -void checkUnnamed186(core.List o) { +void checkUnnamed201(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1NasJob(o[0]); checkGoogleCloudAiplatformV1NasJob(o[1]); @@ -12723,7 +13941,7 @@ api.GoogleCloudAiplatformV1ListNasJobsResponse final o = api.GoogleCloudAiplatformV1ListNasJobsResponse(); buildCounterGoogleCloudAiplatformV1ListNasJobsResponse++; if (buildCounterGoogleCloudAiplatformV1ListNasJobsResponse < 3) { - o.nasJobs = buildUnnamed186(); + o.nasJobs = buildUnnamed201(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListNasJobsResponse--; @@ -12734,7 +13952,7 @@ void checkGoogleCloudAiplatformV1ListNasJobsResponse( api.GoogleCloudAiplatformV1ListNasJobsResponse o) { buildCounterGoogleCloudAiplatformV1ListNasJobsResponse++; if (buildCounterGoogleCloudAiplatformV1ListNasJobsResponse < 3) { - checkUnnamed186(o.nasJobs!); + checkUnnamed201(o.nasJobs!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -12743,12 +13961,12 @@ void checkGoogleCloudAiplatformV1ListNasJobsResponse( buildCounterGoogleCloudAiplatformV1ListNasJobsResponse--; } -core.List buildUnnamed187() => [ +core.List buildUnnamed202() => [ buildGoogleCloudAiplatformV1NasTrialDetail(), buildGoogleCloudAiplatformV1NasTrialDetail(), ]; -void checkUnnamed187(core.List o) { +void checkUnnamed202(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1NasTrialDetail(o[0]); checkGoogleCloudAiplatformV1NasTrialDetail(o[1]); @@ -12760,7 +13978,7 @@ api.GoogleCloudAiplatformV1ListNasTrialDetailsResponse final o = api.GoogleCloudAiplatformV1ListNasTrialDetailsResponse(); buildCounterGoogleCloudAiplatformV1ListNasTrialDetailsResponse++; if (buildCounterGoogleCloudAiplatformV1ListNasTrialDetailsResponse < 3) { - o.nasTrialDetails = buildUnnamed187(); + o.nasTrialDetails = buildUnnamed202(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1ListNasTrialDetailsResponse--; @@ -12771,7 +13989,7 @@ void checkGoogleCloudAiplatformV1ListNasTrialDetailsResponse( api.GoogleCloudAiplatformV1ListNasTrialDetailsResponse o) { buildCounterGoogleCloudAiplatformV1ListNasTrialDetailsResponse++; if (buildCounterGoogleCloudAiplatformV1ListNasTrialDetailsResponse < 3) { - checkUnnamed187(o.nasTrialDetails!); + checkUnnamed202(o.nasTrialDetails!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -12780,13 +13998,13 @@ void checkGoogleCloudAiplatformV1ListNasTrialDetailsResponse( buildCounterGoogleCloudAiplatformV1ListNasTrialDetailsResponse--; } -core.List buildUnnamed188() => +core.List buildUnnamed203() => [ buildGoogleCloudAiplatformV1NotebookExecutionJob(), buildGoogleCloudAiplatformV1NotebookExecutionJob(), ]; -void checkUnnamed188( +void checkUnnamed203( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1NotebookExecutionJob(o[0]); @@ -12802,7 +14020,7 @@ api.GoogleCloudAiplatformV1ListNotebookExecutionJobsResponse if (buildCounterGoogleCloudAiplatformV1ListNotebookExecutionJobsResponse < 3) { o.nextPageToken = 'foo'; - o.notebookExecutionJobs = buildUnnamed188(); + o.notebookExecutionJobs = buildUnnamed203(); } buildCounterGoogleCloudAiplatformV1ListNotebookExecutionJobsResponse--; return o; @@ -12817,18 +14035,18 @@ void checkGoogleCloudAiplatformV1ListNotebookExecutionJobsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed188(o.notebookExecutionJobs!); + checkUnnamed203(o.notebookExecutionJobs!); } buildCounterGoogleCloudAiplatformV1ListNotebookExecutionJobsResponse--; } core.List - buildUnnamed189() => [ + buildUnnamed204() => [ buildGoogleCloudAiplatformV1NotebookRuntimeTemplate(), buildGoogleCloudAiplatformV1NotebookRuntimeTemplate(), ]; -void checkUnnamed189( +void checkUnnamed204( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1NotebookRuntimeTemplate(o[0]); @@ -12844,7 +14062,7 @@ api.GoogleCloudAiplatformV1ListNotebookRuntimeTemplatesResponse if (buildCounterGoogleCloudAiplatformV1ListNotebookRuntimeTemplatesResponse < 3) { o.nextPageToken = 'foo'; - o.notebookRuntimeTemplates = buildUnnamed189(); + o.notebookRuntimeTemplates = buildUnnamed204(); } buildCounterGoogleCloudAiplatformV1ListNotebookRuntimeTemplatesResponse--; return o; @@ -12859,17 +14077,17 @@ void checkGoogleCloudAiplatformV1ListNotebookRuntimeTemplatesResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed189(o.notebookRuntimeTemplates!); + checkUnnamed204(o.notebookRuntimeTemplates!); } buildCounterGoogleCloudAiplatformV1ListNotebookRuntimeTemplatesResponse--; } -core.List buildUnnamed190() => [ +core.List buildUnnamed205() => [ buildGoogleCloudAiplatformV1NotebookRuntime(), buildGoogleCloudAiplatformV1NotebookRuntime(), ]; -void checkUnnamed190(core.List o) { +void checkUnnamed205(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1NotebookRuntime(o[0]); checkGoogleCloudAiplatformV1NotebookRuntime(o[1]); @@ -12882,7 +14100,7 @@ api.GoogleCloudAiplatformV1ListNotebookRuntimesResponse buildCounterGoogleCloudAiplatformV1ListNotebookRuntimesResponse++; if (buildCounterGoogleCloudAiplatformV1ListNotebookRuntimesResponse < 3) { o.nextPageToken = 'foo'; - o.notebookRuntimes = buildUnnamed190(); + o.notebookRuntimes = buildUnnamed205(); } buildCounterGoogleCloudAiplatformV1ListNotebookRuntimesResponse--; return o; @@ -12896,7 +14114,7 @@ void checkGoogleCloudAiplatformV1ListNotebookRuntimesResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed190(o.notebookRuntimes!); + checkUnnamed205(o.notebookRuntimes!); } buildCounterGoogleCloudAiplatformV1ListNotebookRuntimesResponse--; } @@ -12918,12 +14136,12 @@ void checkGoogleCloudAiplatformV1ListOptimalTrialsRequest( buildCounterGoogleCloudAiplatformV1ListOptimalTrialsRequest--; } -core.List buildUnnamed191() => [ +core.List buildUnnamed206() => [ buildGoogleCloudAiplatformV1Trial(), buildGoogleCloudAiplatformV1Trial(), ]; -void checkUnnamed191(core.List o) { +void checkUnnamed206(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Trial(o[0]); checkGoogleCloudAiplatformV1Trial(o[1]); @@ -12935,7 +14153,7 @@ api.GoogleCloudAiplatformV1ListOptimalTrialsResponse final o = api.GoogleCloudAiplatformV1ListOptimalTrialsResponse(); buildCounterGoogleCloudAiplatformV1ListOptimalTrialsResponse++; if (buildCounterGoogleCloudAiplatformV1ListOptimalTrialsResponse < 3) { - o.optimalTrials = buildUnnamed191(); + o.optimalTrials = buildUnnamed206(); } buildCounterGoogleCloudAiplatformV1ListOptimalTrialsResponse--; return o; @@ -12945,17 +14163,17 @@ void checkGoogleCloudAiplatformV1ListOptimalTrialsResponse( api.GoogleCloudAiplatformV1ListOptimalTrialsResponse o) { buildCounterGoogleCloudAiplatformV1ListOptimalTrialsResponse++; if (buildCounterGoogleCloudAiplatformV1ListOptimalTrialsResponse < 3) { - checkUnnamed191(o.optimalTrials!); + checkUnnamed206(o.optimalTrials!); } buildCounterGoogleCloudAiplatformV1ListOptimalTrialsResponse--; } -core.List buildUnnamed192() => [ +core.List buildUnnamed207() => [ buildGoogleCloudAiplatformV1PersistentResource(), buildGoogleCloudAiplatformV1PersistentResource(), ]; -void checkUnnamed192( +void checkUnnamed207( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1PersistentResource(o[0]); @@ -12969,7 +14187,7 @@ api.GoogleCloudAiplatformV1ListPersistentResourcesResponse buildCounterGoogleCloudAiplatformV1ListPersistentResourcesResponse++; if (buildCounterGoogleCloudAiplatformV1ListPersistentResourcesResponse < 3) { o.nextPageToken = 'foo'; - o.persistentResources = buildUnnamed192(); + o.persistentResources = buildUnnamed207(); } buildCounterGoogleCloudAiplatformV1ListPersistentResourcesResponse--; return o; @@ -12983,17 +14201,17 @@ void checkGoogleCloudAiplatformV1ListPersistentResourcesResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed192(o.persistentResources!); + checkUnnamed207(o.persistentResources!); } buildCounterGoogleCloudAiplatformV1ListPersistentResourcesResponse--; } -core.List buildUnnamed193() => [ +core.List buildUnnamed208() => [ buildGoogleCloudAiplatformV1PipelineJob(), buildGoogleCloudAiplatformV1PipelineJob(), ]; -void checkUnnamed193(core.List o) { +void checkUnnamed208(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1PipelineJob(o[0]); checkGoogleCloudAiplatformV1PipelineJob(o[1]); @@ -13006,7 +14224,7 @@ api.GoogleCloudAiplatformV1ListPipelineJobsResponse buildCounterGoogleCloudAiplatformV1ListPipelineJobsResponse++; if (buildCounterGoogleCloudAiplatformV1ListPipelineJobsResponse < 3) { o.nextPageToken = 'foo'; - o.pipelineJobs = buildUnnamed193(); + o.pipelineJobs = buildUnnamed208(); } buildCounterGoogleCloudAiplatformV1ListPipelineJobsResponse--; return o; @@ -13020,17 +14238,128 @@ void checkGoogleCloudAiplatformV1ListPipelineJobsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed193(o.pipelineJobs!); + checkUnnamed208(o.pipelineJobs!); } buildCounterGoogleCloudAiplatformV1ListPipelineJobsResponse--; } -core.List buildUnnamed194() => [ +core.List buildUnnamed209() => [ + buildGoogleCloudAiplatformV1RagCorpus(), + buildGoogleCloudAiplatformV1RagCorpus(), + ]; + +void checkUnnamed209(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1RagCorpus(o[0]); + checkGoogleCloudAiplatformV1RagCorpus(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1ListRagCorporaResponse = 0; +api.GoogleCloudAiplatformV1ListRagCorporaResponse + buildGoogleCloudAiplatformV1ListRagCorporaResponse() { + final o = api.GoogleCloudAiplatformV1ListRagCorporaResponse(); + buildCounterGoogleCloudAiplatformV1ListRagCorporaResponse++; + if (buildCounterGoogleCloudAiplatformV1ListRagCorporaResponse < 3) { + o.nextPageToken = 'foo'; + o.ragCorpora = buildUnnamed209(); + } + buildCounterGoogleCloudAiplatformV1ListRagCorporaResponse--; + return o; +} + +void checkGoogleCloudAiplatformV1ListRagCorporaResponse( + api.GoogleCloudAiplatformV1ListRagCorporaResponse o) { + buildCounterGoogleCloudAiplatformV1ListRagCorporaResponse++; + if (buildCounterGoogleCloudAiplatformV1ListRagCorporaResponse < 3) { + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed209(o.ragCorpora!); + } + buildCounterGoogleCloudAiplatformV1ListRagCorporaResponse--; +} + +core.List buildUnnamed210() => [ + buildGoogleCloudAiplatformV1RagFile(), + buildGoogleCloudAiplatformV1RagFile(), + ]; + +void checkUnnamed210(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1RagFile(o[0]); + checkGoogleCloudAiplatformV1RagFile(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1ListRagFilesResponse = 0; +api.GoogleCloudAiplatformV1ListRagFilesResponse + buildGoogleCloudAiplatformV1ListRagFilesResponse() { + final o = api.GoogleCloudAiplatformV1ListRagFilesResponse(); + buildCounterGoogleCloudAiplatformV1ListRagFilesResponse++; + if (buildCounterGoogleCloudAiplatformV1ListRagFilesResponse < 3) { + o.nextPageToken = 'foo'; + o.ragFiles = buildUnnamed210(); + } + buildCounterGoogleCloudAiplatformV1ListRagFilesResponse--; + return o; +} + +void checkGoogleCloudAiplatformV1ListRagFilesResponse( + api.GoogleCloudAiplatformV1ListRagFilesResponse o) { + buildCounterGoogleCloudAiplatformV1ListRagFilesResponse++; + if (buildCounterGoogleCloudAiplatformV1ListRagFilesResponse < 3) { + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed210(o.ragFiles!); + } + buildCounterGoogleCloudAiplatformV1ListRagFilesResponse--; +} + +core.List buildUnnamed211() => [ + buildGoogleCloudAiplatformV1ReasoningEngine(), + buildGoogleCloudAiplatformV1ReasoningEngine(), + ]; + +void checkUnnamed211(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1ReasoningEngine(o[0]); + checkGoogleCloudAiplatformV1ReasoningEngine(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1ListReasoningEnginesResponse = 0; +api.GoogleCloudAiplatformV1ListReasoningEnginesResponse + buildGoogleCloudAiplatformV1ListReasoningEnginesResponse() { + final o = api.GoogleCloudAiplatformV1ListReasoningEnginesResponse(); + buildCounterGoogleCloudAiplatformV1ListReasoningEnginesResponse++; + if (buildCounterGoogleCloudAiplatformV1ListReasoningEnginesResponse < 3) { + o.nextPageToken = 'foo'; + o.reasoningEngines = buildUnnamed211(); + } + buildCounterGoogleCloudAiplatformV1ListReasoningEnginesResponse--; + return o; +} + +void checkGoogleCloudAiplatformV1ListReasoningEnginesResponse( + api.GoogleCloudAiplatformV1ListReasoningEnginesResponse o) { + buildCounterGoogleCloudAiplatformV1ListReasoningEnginesResponse++; + if (buildCounterGoogleCloudAiplatformV1ListReasoningEnginesResponse < 3) { + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed211(o.reasoningEngines!); + } + buildCounterGoogleCloudAiplatformV1ListReasoningEnginesResponse--; +} + +core.List buildUnnamed212() => [ buildGoogleCloudAiplatformV1SavedQuery(), buildGoogleCloudAiplatformV1SavedQuery(), ]; -void checkUnnamed194(core.List o) { +void checkUnnamed212(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1SavedQuery(o[0]); checkGoogleCloudAiplatformV1SavedQuery(o[1]); @@ -13043,7 +14372,7 @@ api.GoogleCloudAiplatformV1ListSavedQueriesResponse buildCounterGoogleCloudAiplatformV1ListSavedQueriesResponse++; if (buildCounterGoogleCloudAiplatformV1ListSavedQueriesResponse < 3) { o.nextPageToken = 'foo'; - o.savedQueries = buildUnnamed194(); + o.savedQueries = buildUnnamed212(); } buildCounterGoogleCloudAiplatformV1ListSavedQueriesResponse--; return o; @@ -13057,17 +14386,17 @@ void checkGoogleCloudAiplatformV1ListSavedQueriesResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed194(o.savedQueries!); + checkUnnamed212(o.savedQueries!); } buildCounterGoogleCloudAiplatformV1ListSavedQueriesResponse--; } -core.List buildUnnamed195() => [ +core.List buildUnnamed213() => [ buildGoogleCloudAiplatformV1Schedule(), buildGoogleCloudAiplatformV1Schedule(), ]; -void checkUnnamed195(core.List o) { +void checkUnnamed213(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Schedule(o[0]); checkGoogleCloudAiplatformV1Schedule(o[1]); @@ -13080,7 +14409,7 @@ api.GoogleCloudAiplatformV1ListSchedulesResponse buildCounterGoogleCloudAiplatformV1ListSchedulesResponse++; if (buildCounterGoogleCloudAiplatformV1ListSchedulesResponse < 3) { o.nextPageToken = 'foo'; - o.schedules = buildUnnamed195(); + o.schedules = buildUnnamed213(); } buildCounterGoogleCloudAiplatformV1ListSchedulesResponse--; return o; @@ -13094,17 +14423,17 @@ void checkGoogleCloudAiplatformV1ListSchedulesResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed195(o.schedules!); + checkUnnamed213(o.schedules!); } buildCounterGoogleCloudAiplatformV1ListSchedulesResponse--; } -core.List buildUnnamed196() => [ +core.List buildUnnamed214() => [ buildGoogleCloudAiplatformV1SpecialistPool(), buildGoogleCloudAiplatformV1SpecialistPool(), ]; -void checkUnnamed196(core.List o) { +void checkUnnamed214(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1SpecialistPool(o[0]); checkGoogleCloudAiplatformV1SpecialistPool(o[1]); @@ -13117,7 +14446,7 @@ api.GoogleCloudAiplatformV1ListSpecialistPoolsResponse buildCounterGoogleCloudAiplatformV1ListSpecialistPoolsResponse++; if (buildCounterGoogleCloudAiplatformV1ListSpecialistPoolsResponse < 3) { o.nextPageToken = 'foo'; - o.specialistPools = buildUnnamed196(); + o.specialistPools = buildUnnamed214(); } buildCounterGoogleCloudAiplatformV1ListSpecialistPoolsResponse--; return o; @@ -13131,17 +14460,17 @@ void checkGoogleCloudAiplatformV1ListSpecialistPoolsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed196(o.specialistPools!); + checkUnnamed214(o.specialistPools!); } buildCounterGoogleCloudAiplatformV1ListSpecialistPoolsResponse--; } -core.List buildUnnamed197() => [ +core.List buildUnnamed215() => [ buildGoogleCloudAiplatformV1Study(), buildGoogleCloudAiplatformV1Study(), ]; -void checkUnnamed197(core.List o) { +void checkUnnamed215(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Study(o[0]); checkGoogleCloudAiplatformV1Study(o[1]); @@ -13154,7 +14483,7 @@ api.GoogleCloudAiplatformV1ListStudiesResponse buildCounterGoogleCloudAiplatformV1ListStudiesResponse++; if (buildCounterGoogleCloudAiplatformV1ListStudiesResponse < 3) { o.nextPageToken = 'foo'; - o.studies = buildUnnamed197(); + o.studies = buildUnnamed215(); } buildCounterGoogleCloudAiplatformV1ListStudiesResponse--; return o; @@ -13168,18 +14497,18 @@ void checkGoogleCloudAiplatformV1ListStudiesResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed197(o.studies!); + checkUnnamed215(o.studies!); } buildCounterGoogleCloudAiplatformV1ListStudiesResponse--; } -core.List buildUnnamed198() => +core.List buildUnnamed216() => [ buildGoogleCloudAiplatformV1TensorboardExperiment(), buildGoogleCloudAiplatformV1TensorboardExperiment(), ]; -void checkUnnamed198( +void checkUnnamed216( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1TensorboardExperiment(o[0]); @@ -13195,7 +14524,7 @@ api.GoogleCloudAiplatformV1ListTensorboardExperimentsResponse if (buildCounterGoogleCloudAiplatformV1ListTensorboardExperimentsResponse < 3) { o.nextPageToken = 'foo'; - o.tensorboardExperiments = buildUnnamed198(); + o.tensorboardExperiments = buildUnnamed216(); } buildCounterGoogleCloudAiplatformV1ListTensorboardExperimentsResponse--; return o; @@ -13210,17 +14539,17 @@ void checkGoogleCloudAiplatformV1ListTensorboardExperimentsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed198(o.tensorboardExperiments!); + checkUnnamed216(o.tensorboardExperiments!); } buildCounterGoogleCloudAiplatformV1ListTensorboardExperimentsResponse--; } -core.List buildUnnamed199() => [ +core.List buildUnnamed217() => [ buildGoogleCloudAiplatformV1TensorboardRun(), buildGoogleCloudAiplatformV1TensorboardRun(), ]; -void checkUnnamed199(core.List o) { +void checkUnnamed217(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1TensorboardRun(o[0]); checkGoogleCloudAiplatformV1TensorboardRun(o[1]); @@ -13233,7 +14562,7 @@ api.GoogleCloudAiplatformV1ListTensorboardRunsResponse buildCounterGoogleCloudAiplatformV1ListTensorboardRunsResponse++; if (buildCounterGoogleCloudAiplatformV1ListTensorboardRunsResponse < 3) { o.nextPageToken = 'foo'; - o.tensorboardRuns = buildUnnamed199(); + o.tensorboardRuns = buildUnnamed217(); } buildCounterGoogleCloudAiplatformV1ListTensorboardRunsResponse--; return o; @@ -13247,18 +14576,18 @@ void checkGoogleCloudAiplatformV1ListTensorboardRunsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed199(o.tensorboardRuns!); + checkUnnamed217(o.tensorboardRuns!); } buildCounterGoogleCloudAiplatformV1ListTensorboardRunsResponse--; } -core.List buildUnnamed200() => +core.List buildUnnamed218() => [ buildGoogleCloudAiplatformV1TensorboardTimeSeries(), buildGoogleCloudAiplatformV1TensorboardTimeSeries(), ]; -void checkUnnamed200( +void checkUnnamed218( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1TensorboardTimeSeries(o[0]); @@ -13274,7 +14603,7 @@ api.GoogleCloudAiplatformV1ListTensorboardTimeSeriesResponse if (buildCounterGoogleCloudAiplatformV1ListTensorboardTimeSeriesResponse < 3) { o.nextPageToken = 'foo'; - o.tensorboardTimeSeries = buildUnnamed200(); + o.tensorboardTimeSeries = buildUnnamed218(); } buildCounterGoogleCloudAiplatformV1ListTensorboardTimeSeriesResponse--; return o; @@ -13289,17 +14618,17 @@ void checkGoogleCloudAiplatformV1ListTensorboardTimeSeriesResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed200(o.tensorboardTimeSeries!); + checkUnnamed218(o.tensorboardTimeSeries!); } buildCounterGoogleCloudAiplatformV1ListTensorboardTimeSeriesResponse--; } -core.List buildUnnamed201() => [ +core.List buildUnnamed219() => [ buildGoogleCloudAiplatformV1Tensorboard(), buildGoogleCloudAiplatformV1Tensorboard(), ]; -void checkUnnamed201(core.List o) { +void checkUnnamed219(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Tensorboard(o[0]); checkGoogleCloudAiplatformV1Tensorboard(o[1]); @@ -13312,7 +14641,7 @@ api.GoogleCloudAiplatformV1ListTensorboardsResponse buildCounterGoogleCloudAiplatformV1ListTensorboardsResponse++; if (buildCounterGoogleCloudAiplatformV1ListTensorboardsResponse < 3) { o.nextPageToken = 'foo'; - o.tensorboards = buildUnnamed201(); + o.tensorboards = buildUnnamed219(); } buildCounterGoogleCloudAiplatformV1ListTensorboardsResponse--; return o; @@ -13326,17 +14655,17 @@ void checkGoogleCloudAiplatformV1ListTensorboardsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed201(o.tensorboards!); + checkUnnamed219(o.tensorboards!); } buildCounterGoogleCloudAiplatformV1ListTensorboardsResponse--; } -core.List buildUnnamed202() => [ +core.List buildUnnamed220() => [ buildGoogleCloudAiplatformV1TrainingPipeline(), buildGoogleCloudAiplatformV1TrainingPipeline(), ]; -void checkUnnamed202(core.List o) { +void checkUnnamed220(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1TrainingPipeline(o[0]); checkGoogleCloudAiplatformV1TrainingPipeline(o[1]); @@ -13349,7 +14678,7 @@ api.GoogleCloudAiplatformV1ListTrainingPipelinesResponse buildCounterGoogleCloudAiplatformV1ListTrainingPipelinesResponse++; if (buildCounterGoogleCloudAiplatformV1ListTrainingPipelinesResponse < 3) { o.nextPageToken = 'foo'; - o.trainingPipelines = buildUnnamed202(); + o.trainingPipelines = buildUnnamed220(); } buildCounterGoogleCloudAiplatformV1ListTrainingPipelinesResponse--; return o; @@ -13363,17 +14692,17 @@ void checkGoogleCloudAiplatformV1ListTrainingPipelinesResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed202(o.trainingPipelines!); + checkUnnamed220(o.trainingPipelines!); } buildCounterGoogleCloudAiplatformV1ListTrainingPipelinesResponse--; } -core.List buildUnnamed203() => [ +core.List buildUnnamed221() => [ buildGoogleCloudAiplatformV1Trial(), buildGoogleCloudAiplatformV1Trial(), ]; -void checkUnnamed203(core.List o) { +void checkUnnamed221(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Trial(o[0]); checkGoogleCloudAiplatformV1Trial(o[1]); @@ -13386,7 +14715,7 @@ api.GoogleCloudAiplatformV1ListTrialsResponse buildCounterGoogleCloudAiplatformV1ListTrialsResponse++; if (buildCounterGoogleCloudAiplatformV1ListTrialsResponse < 3) { o.nextPageToken = 'foo'; - o.trials = buildUnnamed203(); + o.trials = buildUnnamed221(); } buildCounterGoogleCloudAiplatformV1ListTrialsResponse--; return o; @@ -13400,17 +14729,17 @@ void checkGoogleCloudAiplatformV1ListTrialsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed203(o.trials!); + checkUnnamed221(o.trials!); } buildCounterGoogleCloudAiplatformV1ListTrialsResponse--; } -core.List buildUnnamed204() => [ +core.List buildUnnamed222() => [ buildGoogleCloudAiplatformV1TuningJob(), buildGoogleCloudAiplatformV1TuningJob(), ]; -void checkUnnamed204(core.List o) { +void checkUnnamed222(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1TuningJob(o[0]); checkGoogleCloudAiplatformV1TuningJob(o[1]); @@ -13423,7 +14752,7 @@ api.GoogleCloudAiplatformV1ListTuningJobsResponse buildCounterGoogleCloudAiplatformV1ListTuningJobsResponse++; if (buildCounterGoogleCloudAiplatformV1ListTuningJobsResponse < 3) { o.nextPageToken = 'foo'; - o.tuningJobs = buildUnnamed204(); + o.tuningJobs = buildUnnamed222(); } buildCounterGoogleCloudAiplatformV1ListTuningJobsResponse--; return o; @@ -13437,18 +14766,18 @@ void checkGoogleCloudAiplatformV1ListTuningJobsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed204(o.tuningJobs!); + checkUnnamed222(o.tuningJobs!); } buildCounterGoogleCloudAiplatformV1ListTuningJobsResponse--; } core.List - buildUnnamed205() => [ + buildUnnamed223() => [ buildGoogleCloudAiplatformV1LogprobsResultCandidate(), buildGoogleCloudAiplatformV1LogprobsResultCandidate(), ]; -void checkUnnamed205( +void checkUnnamed223( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1LogprobsResultCandidate(o[0]); @@ -13456,12 +14785,12 @@ void checkUnnamed205( } core.List - buildUnnamed206() => [ + buildUnnamed224() => [ buildGoogleCloudAiplatformV1LogprobsResultTopCandidates(), buildGoogleCloudAiplatformV1LogprobsResultTopCandidates(), ]; -void checkUnnamed206( +void checkUnnamed224( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1LogprobsResultTopCandidates(o[0]); @@ -13474,8 +14803,8 @@ api.GoogleCloudAiplatformV1LogprobsResult final o = api.GoogleCloudAiplatformV1LogprobsResult(); buildCounterGoogleCloudAiplatformV1LogprobsResult++; if (buildCounterGoogleCloudAiplatformV1LogprobsResult < 3) { - o.chosenCandidates = buildUnnamed205(); - o.topCandidates = buildUnnamed206(); + o.chosenCandidates = buildUnnamed223(); + o.topCandidates = buildUnnamed224(); } buildCounterGoogleCloudAiplatformV1LogprobsResult--; return o; @@ -13485,8 +14814,8 @@ void checkGoogleCloudAiplatformV1LogprobsResult( api.GoogleCloudAiplatformV1LogprobsResult o) { buildCounterGoogleCloudAiplatformV1LogprobsResult++; if (buildCounterGoogleCloudAiplatformV1LogprobsResult < 3) { - checkUnnamed205(o.chosenCandidates!); - checkUnnamed206(o.topCandidates!); + checkUnnamed223(o.chosenCandidates!); + checkUnnamed224(o.topCandidates!); } buildCounterGoogleCloudAiplatformV1LogprobsResult--; } @@ -13526,12 +14855,12 @@ void checkGoogleCloudAiplatformV1LogprobsResultCandidate( } core.List - buildUnnamed207() => [ + buildUnnamed225() => [ buildGoogleCloudAiplatformV1LogprobsResultCandidate(), buildGoogleCloudAiplatformV1LogprobsResultCandidate(), ]; -void checkUnnamed207( +void checkUnnamed225( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1LogprobsResultCandidate(o[0]); @@ -13544,7 +14873,7 @@ api.GoogleCloudAiplatformV1LogprobsResultTopCandidates final o = api.GoogleCloudAiplatformV1LogprobsResultTopCandidates(); buildCounterGoogleCloudAiplatformV1LogprobsResultTopCandidates++; if (buildCounterGoogleCloudAiplatformV1LogprobsResultTopCandidates < 3) { - o.candidates = buildUnnamed207(); + o.candidates = buildUnnamed225(); } buildCounterGoogleCloudAiplatformV1LogprobsResultTopCandidates--; return o; @@ -13554,7 +14883,7 @@ void checkGoogleCloudAiplatformV1LogprobsResultTopCandidates( api.GoogleCloudAiplatformV1LogprobsResultTopCandidates o) { buildCounterGoogleCloudAiplatformV1LogprobsResultTopCandidates++; if (buildCounterGoogleCloudAiplatformV1LogprobsResultTopCandidates < 3) { - checkUnnamed207(o.candidates!); + checkUnnamed225(o.candidates!); } buildCounterGoogleCloudAiplatformV1LogprobsResultTopCandidates--; } @@ -13648,12 +14977,12 @@ void checkGoogleCloudAiplatformV1ManualBatchTuningParameters( buildCounterGoogleCloudAiplatformV1ManualBatchTuningParameters--; } -core.List buildUnnamed208() => [ +core.List buildUnnamed226() => [ buildGoogleCloudAiplatformV1MeasurementMetric(), buildGoogleCloudAiplatformV1MeasurementMetric(), ]; -void checkUnnamed208( +void checkUnnamed226( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1MeasurementMetric(o[0]); @@ -13667,7 +14996,7 @@ api.GoogleCloudAiplatformV1Measurement buildCounterGoogleCloudAiplatformV1Measurement++; if (buildCounterGoogleCloudAiplatformV1Measurement < 3) { o.elapsedDuration = 'foo'; - o.metrics = buildUnnamed208(); + o.metrics = buildUnnamed226(); o.stepCount = 'foo'; } buildCounterGoogleCloudAiplatformV1Measurement--; @@ -13682,7 +15011,7 @@ void checkGoogleCloudAiplatformV1Measurement( o.elapsedDuration!, unittest.equals('foo'), ); - checkUnnamed208(o.metrics!); + checkUnnamed226(o.metrics!); unittest.expect( o.stepCount!, unittest.equals('foo'), @@ -13720,12 +15049,12 @@ void checkGoogleCloudAiplatformV1MeasurementMetric( buildCounterGoogleCloudAiplatformV1MeasurementMetric--; } -core.List buildUnnamed209() => [ +core.List buildUnnamed227() => [ 'foo', 'foo', ]; -void checkUnnamed209(core.List o) { +void checkUnnamed227(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -13743,7 +15072,7 @@ api.GoogleCloudAiplatformV1MergeVersionAliasesRequest final o = api.GoogleCloudAiplatformV1MergeVersionAliasesRequest(); buildCounterGoogleCloudAiplatformV1MergeVersionAliasesRequest++; if (buildCounterGoogleCloudAiplatformV1MergeVersionAliasesRequest < 3) { - o.versionAliases = buildUnnamed209(); + o.versionAliases = buildUnnamed227(); } buildCounterGoogleCloudAiplatformV1MergeVersionAliasesRequest--; return o; @@ -13753,7 +15082,7 @@ void checkGoogleCloudAiplatformV1MergeVersionAliasesRequest( api.GoogleCloudAiplatformV1MergeVersionAliasesRequest o) { buildCounterGoogleCloudAiplatformV1MergeVersionAliasesRequest++; if (buildCounterGoogleCloudAiplatformV1MergeVersionAliasesRequest < 3) { - checkUnnamed209(o.versionAliases!); + checkUnnamed227(o.versionAliases!); } buildCounterGoogleCloudAiplatformV1MergeVersionAliasesRequest--; } @@ -13898,6 +15227,121 @@ void checkGoogleCloudAiplatformV1MetadataStoreMetadataStoreState( buildCounterGoogleCloudAiplatformV1MetadataStoreMetadataStoreState--; } +core.int buildCounterGoogleCloudAiplatformV1MetricxInput = 0; +api.GoogleCloudAiplatformV1MetricxInput + buildGoogleCloudAiplatformV1MetricxInput() { + final o = api.GoogleCloudAiplatformV1MetricxInput(); + buildCounterGoogleCloudAiplatformV1MetricxInput++; + if (buildCounterGoogleCloudAiplatformV1MetricxInput < 3) { + o.instance = buildGoogleCloudAiplatformV1MetricxInstance(); + o.metricSpec = buildGoogleCloudAiplatformV1MetricxSpec(); + } + buildCounterGoogleCloudAiplatformV1MetricxInput--; + return o; +} + +void checkGoogleCloudAiplatformV1MetricxInput( + api.GoogleCloudAiplatformV1MetricxInput o) { + buildCounterGoogleCloudAiplatformV1MetricxInput++; + if (buildCounterGoogleCloudAiplatformV1MetricxInput < 3) { + checkGoogleCloudAiplatformV1MetricxInstance(o.instance!); + checkGoogleCloudAiplatformV1MetricxSpec(o.metricSpec!); + } + buildCounterGoogleCloudAiplatformV1MetricxInput--; +} + +core.int buildCounterGoogleCloudAiplatformV1MetricxInstance = 0; +api.GoogleCloudAiplatformV1MetricxInstance + buildGoogleCloudAiplatformV1MetricxInstance() { + final o = api.GoogleCloudAiplatformV1MetricxInstance(); + buildCounterGoogleCloudAiplatformV1MetricxInstance++; + if (buildCounterGoogleCloudAiplatformV1MetricxInstance < 3) { + o.prediction = 'foo'; + o.reference = 'foo'; + o.source = 'foo'; + } + buildCounterGoogleCloudAiplatformV1MetricxInstance--; + return o; +} + +void checkGoogleCloudAiplatformV1MetricxInstance( + api.GoogleCloudAiplatformV1MetricxInstance o) { + buildCounterGoogleCloudAiplatformV1MetricxInstance++; + if (buildCounterGoogleCloudAiplatformV1MetricxInstance < 3) { + unittest.expect( + o.prediction!, + unittest.equals('foo'), + ); + unittest.expect( + o.reference!, + unittest.equals('foo'), + ); + unittest.expect( + o.source!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1MetricxInstance--; +} + +core.int buildCounterGoogleCloudAiplatformV1MetricxResult = 0; +api.GoogleCloudAiplatformV1MetricxResult + buildGoogleCloudAiplatformV1MetricxResult() { + final o = api.GoogleCloudAiplatformV1MetricxResult(); + buildCounterGoogleCloudAiplatformV1MetricxResult++; + if (buildCounterGoogleCloudAiplatformV1MetricxResult < 3) { + o.score = 42.0; + } + buildCounterGoogleCloudAiplatformV1MetricxResult--; + return o; +} + +void checkGoogleCloudAiplatformV1MetricxResult( + api.GoogleCloudAiplatformV1MetricxResult o) { + buildCounterGoogleCloudAiplatformV1MetricxResult++; + if (buildCounterGoogleCloudAiplatformV1MetricxResult < 3) { + unittest.expect( + o.score!, + unittest.equals(42.0), + ); + } + buildCounterGoogleCloudAiplatformV1MetricxResult--; +} + +core.int buildCounterGoogleCloudAiplatformV1MetricxSpec = 0; +api.GoogleCloudAiplatformV1MetricxSpec + buildGoogleCloudAiplatformV1MetricxSpec() { + final o = api.GoogleCloudAiplatformV1MetricxSpec(); + buildCounterGoogleCloudAiplatformV1MetricxSpec++; + if (buildCounterGoogleCloudAiplatformV1MetricxSpec < 3) { + o.sourceLanguage = 'foo'; + o.targetLanguage = 'foo'; + o.version = 'foo'; + } + buildCounterGoogleCloudAiplatformV1MetricxSpec--; + return o; +} + +void checkGoogleCloudAiplatformV1MetricxSpec( + api.GoogleCloudAiplatformV1MetricxSpec o) { + buildCounterGoogleCloudAiplatformV1MetricxSpec++; + if (buildCounterGoogleCloudAiplatformV1MetricxSpec < 3) { + unittest.expect( + o.sourceLanguage!, + unittest.equals('foo'), + ); + unittest.expect( + o.targetLanguage!, + unittest.equals('foo'), + ); + unittest.expect( + o.version!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1MetricxSpec--; +} + core.int buildCounterGoogleCloudAiplatformV1MigratableResource = 0; api.GoogleCloudAiplatformV1MigratableResource buildGoogleCloudAiplatformV1MigratableResource() { @@ -14002,12 +15446,12 @@ void checkGoogleCloudAiplatformV1MigratableResourceAutomlModel( core.List< api .GoogleCloudAiplatformV1MigratableResourceDataLabelingDatasetDataLabelingAnnotatedDataset> - buildUnnamed210() => [ + buildUnnamed228() => [ buildGoogleCloudAiplatformV1MigratableResourceDataLabelingDatasetDataLabelingAnnotatedDataset(), buildGoogleCloudAiplatformV1MigratableResourceDataLabelingDatasetDataLabelingAnnotatedDataset(), ]; -void checkUnnamed210( +void checkUnnamed228( core.List< api .GoogleCloudAiplatformV1MigratableResourceDataLabelingDatasetDataLabelingAnnotatedDataset> @@ -14028,7 +15472,7 @@ api.GoogleCloudAiplatformV1MigratableResourceDataLabelingDataset buildCounterGoogleCloudAiplatformV1MigratableResourceDataLabelingDataset++; if (buildCounterGoogleCloudAiplatformV1MigratableResourceDataLabelingDataset < 3) { - o.dataLabelingAnnotatedDatasets = buildUnnamed210(); + o.dataLabelingAnnotatedDatasets = buildUnnamed228(); o.dataset = 'foo'; o.datasetDisplayName = 'foo'; } @@ -14041,7 +15485,7 @@ void checkGoogleCloudAiplatformV1MigratableResourceDataLabelingDataset( buildCounterGoogleCloudAiplatformV1MigratableResourceDataLabelingDataset++; if (buildCounterGoogleCloudAiplatformV1MigratableResourceDataLabelingDataset < 3) { - checkUnnamed210(o.dataLabelingAnnotatedDatasets!); + checkUnnamed228(o.dataLabelingAnnotatedDatasets!); unittest.expect( o.dataset!, unittest.equals('foo'), @@ -14230,12 +15674,12 @@ void checkGoogleCloudAiplatformV1MigrateResourceRequestMigrateAutomlModelConfig( core.List< api .GoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfigMigrateDataLabelingAnnotatedDatasetConfig> - buildUnnamed211() => [ + buildUnnamed229() => [ buildGoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfigMigrateDataLabelingAnnotatedDatasetConfig(), buildGoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfigMigrateDataLabelingAnnotatedDatasetConfig(), ]; -void checkUnnamed211( +void checkUnnamed229( core.List< api .GoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfigMigrateDataLabelingAnnotatedDatasetConfig> @@ -14259,7 +15703,7 @@ api.GoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfi 3) { o.dataset = 'foo'; o.datasetDisplayName = 'foo'; - o.migrateDataLabelingAnnotatedDatasetConfigs = buildUnnamed211(); + o.migrateDataLabelingAnnotatedDatasetConfigs = buildUnnamed229(); } buildCounterGoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfig--; return o; @@ -14279,7 +15723,7 @@ void checkGoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatase o.datasetDisplayName!, unittest.equals('foo'), ); - checkUnnamed211(o.migrateDataLabelingAnnotatedDatasetConfigs!); + checkUnnamed229(o.migrateDataLabelingAnnotatedDatasetConfigs!); } buildCounterGoogleCloudAiplatformV1MigrateResourceRequestMigrateDataLabelingDatasetConfig--; } @@ -14354,23 +15798,23 @@ void checkGoogleCloudAiplatformV1MigrateResourceRequestMigrateMlEngineModelVersi buildCounterGoogleCloudAiplatformV1MigrateResourceRequestMigrateMlEngineModelVersionConfig--; } -core.List buildUnnamed212() => [ +core.List buildUnnamed230() => [ buildGoogleCloudAiplatformV1DeployedModelRef(), buildGoogleCloudAiplatformV1DeployedModelRef(), ]; -void checkUnnamed212(core.List o) { +void checkUnnamed230(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1DeployedModelRef(o[0]); checkGoogleCloudAiplatformV1DeployedModelRef(o[1]); } -core.Map buildUnnamed213() => { +core.Map buildUnnamed231() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed213(core.Map o) { +void checkUnnamed231(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -14382,12 +15826,12 @@ void checkUnnamed213(core.Map o) { ); } -core.List buildUnnamed214() => [ +core.List buildUnnamed232() => [ 'foo', 'foo', ]; -void checkUnnamed214(core.List o) { +void checkUnnamed232(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -14399,24 +15843,24 @@ void checkUnnamed214(core.List o) { ); } -core.List buildUnnamed215() => [ +core.List buildUnnamed233() => [ buildGoogleCloudAiplatformV1ModelExportFormat(), buildGoogleCloudAiplatformV1ModelExportFormat(), ]; -void checkUnnamed215( +void checkUnnamed233( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1ModelExportFormat(o[0]); checkGoogleCloudAiplatformV1ModelExportFormat(o[1]); } -core.List buildUnnamed216() => [ +core.List buildUnnamed234() => [ 'foo', 'foo', ]; -void checkUnnamed216(core.List o) { +void checkUnnamed234(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -14428,12 +15872,12 @@ void checkUnnamed216(core.List o) { ); } -core.List buildUnnamed217() => [ +core.List buildUnnamed235() => [ 'foo', 'foo', ]; -void checkUnnamed217(core.List o) { +void checkUnnamed235(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -14445,12 +15889,12 @@ void checkUnnamed217(core.List o) { ); } -core.List buildUnnamed218() => [ +core.List buildUnnamed236() => [ 'foo', 'foo', ]; -void checkUnnamed218(core.List o) { +void checkUnnamed236(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -14472,13 +15916,13 @@ api.GoogleCloudAiplatformV1Model buildGoogleCloudAiplatformV1Model() { o.containerSpec = buildGoogleCloudAiplatformV1ModelContainerSpec(); o.createTime = 'foo'; o.dataStats = buildGoogleCloudAiplatformV1ModelDataStats(); - o.deployedModels = buildUnnamed212(); + o.deployedModels = buildUnnamed230(); o.description = 'foo'; o.displayName = 'foo'; o.encryptionSpec = buildGoogleCloudAiplatformV1EncryptionSpec(); o.etag = 'foo'; o.explanationSpec = buildGoogleCloudAiplatformV1ExplanationSpec(); - o.labels = buildUnnamed213(); + o.labels = buildUnnamed231(); o.metadata = { 'list': [1, 2, 3], 'bool': true, @@ -14493,13 +15937,13 @@ api.GoogleCloudAiplatformV1Model buildGoogleCloudAiplatformV1Model() { o.predictSchemata = buildGoogleCloudAiplatformV1PredictSchemata(); o.satisfiesPzi = true; o.satisfiesPzs = true; - o.supportedDeploymentResourcesTypes = buildUnnamed214(); - o.supportedExportFormats = buildUnnamed215(); - o.supportedInputStorageFormats = buildUnnamed216(); - o.supportedOutputStorageFormats = buildUnnamed217(); + o.supportedDeploymentResourcesTypes = buildUnnamed232(); + o.supportedExportFormats = buildUnnamed233(); + o.supportedInputStorageFormats = buildUnnamed234(); + o.supportedOutputStorageFormats = buildUnnamed235(); o.trainingPipeline = 'foo'; o.updateTime = 'foo'; - o.versionAliases = buildUnnamed218(); + o.versionAliases = buildUnnamed236(); o.versionCreateTime = 'foo'; o.versionDescription = 'foo'; o.versionId = 'foo'; @@ -14523,7 +15967,7 @@ void checkGoogleCloudAiplatformV1Model(api.GoogleCloudAiplatformV1Model o) { unittest.equals('foo'), ); checkGoogleCloudAiplatformV1ModelDataStats(o.dataStats!); - checkUnnamed212(o.deployedModels!); + checkUnnamed230(o.deployedModels!); unittest.expect( o.description!, unittest.equals('foo'), @@ -14538,7 +15982,7 @@ void checkGoogleCloudAiplatformV1Model(api.GoogleCloudAiplatformV1Model o) { unittest.equals('foo'), ); checkGoogleCloudAiplatformV1ExplanationSpec(o.explanationSpec!); - checkUnnamed213(o.labels!); + checkUnnamed231(o.labels!); var casted47 = (o.metadata!) as core.Map; unittest.expect(casted47, unittest.hasLength(3)); unittest.expect( @@ -14574,10 +16018,10 @@ void checkGoogleCloudAiplatformV1Model(api.GoogleCloudAiplatformV1Model o) { checkGoogleCloudAiplatformV1PredictSchemata(o.predictSchemata!); unittest.expect(o.satisfiesPzi!, unittest.isTrue); unittest.expect(o.satisfiesPzs!, unittest.isTrue); - checkUnnamed214(o.supportedDeploymentResourcesTypes!); - checkUnnamed215(o.supportedExportFormats!); - checkUnnamed216(o.supportedInputStorageFormats!); - checkUnnamed217(o.supportedOutputStorageFormats!); + checkUnnamed232(o.supportedDeploymentResourcesTypes!); + checkUnnamed233(o.supportedExportFormats!); + checkUnnamed234(o.supportedInputStorageFormats!); + checkUnnamed235(o.supportedOutputStorageFormats!); unittest.expect( o.trainingPipeline!, unittest.equals('foo'), @@ -14586,7 +16030,7 @@ void checkGoogleCloudAiplatformV1Model(api.GoogleCloudAiplatformV1Model o) { o.updateTime!, unittest.equals('foo'), ); - checkUnnamed218(o.versionAliases!); + checkUnnamed236(o.versionAliases!); unittest.expect( o.versionCreateTime!, unittest.equals('foo'), @@ -14630,12 +16074,12 @@ void checkGoogleCloudAiplatformV1ModelBaseModelSource( buildCounterGoogleCloudAiplatformV1ModelBaseModelSource--; } -core.List buildUnnamed219() => [ +core.List buildUnnamed237() => [ 'foo', 'foo', ]; -void checkUnnamed219(core.List o) { +void checkUnnamed237(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -14647,12 +16091,12 @@ void checkUnnamed219(core.List o) { ); } -core.List buildUnnamed220() => [ +core.List buildUnnamed238() => [ 'foo', 'foo', ]; -void checkUnnamed220(core.List o) { +void checkUnnamed238(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -14664,34 +16108,34 @@ void checkUnnamed220(core.List o) { ); } -core.List buildUnnamed221() => [ +core.List buildUnnamed239() => [ buildGoogleCloudAiplatformV1EnvVar(), buildGoogleCloudAiplatformV1EnvVar(), ]; -void checkUnnamed221(core.List o) { +void checkUnnamed239(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1EnvVar(o[0]); checkGoogleCloudAiplatformV1EnvVar(o[1]); } -core.List buildUnnamed222() => [ +core.List buildUnnamed240() => [ buildGoogleCloudAiplatformV1Port(), buildGoogleCloudAiplatformV1Port(), ]; -void checkUnnamed222(core.List o) { +void checkUnnamed240(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Port(o[0]); checkGoogleCloudAiplatformV1Port(o[1]); } -core.List buildUnnamed223() => [ +core.List buildUnnamed241() => [ buildGoogleCloudAiplatformV1Port(), buildGoogleCloudAiplatformV1Port(), ]; -void checkUnnamed223(core.List o) { +void checkUnnamed241(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Port(o[0]); checkGoogleCloudAiplatformV1Port(o[1]); @@ -14703,15 +16147,15 @@ api.GoogleCloudAiplatformV1ModelContainerSpec final o = api.GoogleCloudAiplatformV1ModelContainerSpec(); buildCounterGoogleCloudAiplatformV1ModelContainerSpec++; if (buildCounterGoogleCloudAiplatformV1ModelContainerSpec < 3) { - o.args = buildUnnamed219(); - o.command = buildUnnamed220(); + o.args = buildUnnamed237(); + o.command = buildUnnamed238(); o.deploymentTimeout = 'foo'; - o.env = buildUnnamed221(); - o.grpcPorts = buildUnnamed222(); + o.env = buildUnnamed239(); + o.grpcPorts = buildUnnamed240(); o.healthProbe = buildGoogleCloudAiplatformV1Probe(); o.healthRoute = 'foo'; o.imageUri = 'foo'; - o.ports = buildUnnamed223(); + o.ports = buildUnnamed241(); o.predictRoute = 'foo'; o.sharedMemorySizeMb = 'foo'; o.startupProbe = buildGoogleCloudAiplatformV1Probe(); @@ -14724,14 +16168,14 @@ void checkGoogleCloudAiplatformV1ModelContainerSpec( api.GoogleCloudAiplatformV1ModelContainerSpec o) { buildCounterGoogleCloudAiplatformV1ModelContainerSpec++; if (buildCounterGoogleCloudAiplatformV1ModelContainerSpec < 3) { - checkUnnamed219(o.args!); - checkUnnamed220(o.command!); + checkUnnamed237(o.args!); + checkUnnamed238(o.command!); unittest.expect( o.deploymentTimeout!, unittest.equals('foo'), ); - checkUnnamed221(o.env!); - checkUnnamed222(o.grpcPorts!); + checkUnnamed239(o.env!); + checkUnnamed240(o.grpcPorts!); checkGoogleCloudAiplatformV1Probe(o.healthProbe!); unittest.expect( o.healthRoute!, @@ -14741,7 +16185,7 @@ void checkGoogleCloudAiplatformV1ModelContainerSpec( o.imageUri!, unittest.equals('foo'), ); - checkUnnamed223(o.ports!); + checkUnnamed241(o.ports!); unittest.expect( o.predictRoute!, unittest.equals('foo'), @@ -14848,12 +16292,12 @@ void checkGoogleCloudAiplatformV1ModelDeploymentMonitoringBigQueryTable( } core.List - buildUnnamed224() => [ + buildUnnamed242() => [ buildGoogleCloudAiplatformV1ModelDeploymentMonitoringBigQueryTable(), buildGoogleCloudAiplatformV1ModelDeploymentMonitoringBigQueryTable(), ]; -void checkUnnamed224( +void checkUnnamed242( core.List o) { unittest.expect(o, unittest.hasLength(2)); @@ -14861,12 +16305,12 @@ void checkUnnamed224( checkGoogleCloudAiplatformV1ModelDeploymentMonitoringBigQueryTable(o[1]); } -core.Map buildUnnamed225() => { +core.Map buildUnnamed243() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed225(core.Map o) { +void checkUnnamed243(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -14879,12 +16323,12 @@ void checkUnnamed225(core.Map o) { } core.List - buildUnnamed226() => [ + buildUnnamed244() => [ buildGoogleCloudAiplatformV1ModelDeploymentMonitoringObjectiveConfig(), buildGoogleCloudAiplatformV1ModelDeploymentMonitoringObjectiveConfig(), ]; -void checkUnnamed226( +void checkUnnamed244( core.List< api.GoogleCloudAiplatformV1ModelDeploymentMonitoringObjectiveConfig> o) { @@ -14900,19 +16344,19 @@ api.GoogleCloudAiplatformV1ModelDeploymentMonitoringJob buildCounterGoogleCloudAiplatformV1ModelDeploymentMonitoringJob++; if (buildCounterGoogleCloudAiplatformV1ModelDeploymentMonitoringJob < 3) { o.analysisInstanceSchemaUri = 'foo'; - o.bigqueryTables = buildUnnamed224(); + o.bigqueryTables = buildUnnamed242(); o.createTime = 'foo'; o.displayName = 'foo'; o.enableMonitoringPipelineLogs = true; o.encryptionSpec = buildGoogleCloudAiplatformV1EncryptionSpec(); o.endpoint = 'foo'; o.error = buildGoogleRpcStatus(); - o.labels = buildUnnamed225(); + o.labels = buildUnnamed243(); o.latestMonitoringPipelineMetadata = buildGoogleCloudAiplatformV1ModelDeploymentMonitoringJobLatestMonitoringPipelineMetadata(); o.logTtl = 'foo'; o.loggingSamplingStrategy = buildGoogleCloudAiplatformV1SamplingStrategy(); - o.modelDeploymentMonitoringObjectiveConfigs = buildUnnamed226(); + o.modelDeploymentMonitoringObjectiveConfigs = buildUnnamed244(); o.modelDeploymentMonitoringScheduleConfig = buildGoogleCloudAiplatformV1ModelDeploymentMonitoringScheduleConfig(); o.modelMonitoringAlertConfig = @@ -14945,7 +16389,7 @@ void checkGoogleCloudAiplatformV1ModelDeploymentMonitoringJob( o.analysisInstanceSchemaUri!, unittest.equals('foo'), ); - checkUnnamed224(o.bigqueryTables!); + checkUnnamed242(o.bigqueryTables!); unittest.expect( o.createTime!, unittest.equals('foo'), @@ -14961,7 +16405,7 @@ void checkGoogleCloudAiplatformV1ModelDeploymentMonitoringJob( unittest.equals('foo'), ); checkGoogleRpcStatus(o.error!); - checkUnnamed225(o.labels!); + checkUnnamed243(o.labels!); checkGoogleCloudAiplatformV1ModelDeploymentMonitoringJobLatestMonitoringPipelineMetadata( o.latestMonitoringPipelineMetadata!); unittest.expect( @@ -14969,7 +16413,7 @@ void checkGoogleCloudAiplatformV1ModelDeploymentMonitoringJob( unittest.equals('foo'), ); checkGoogleCloudAiplatformV1SamplingStrategy(o.loggingSamplingStrategy!); - checkUnnamed226(o.modelDeploymentMonitoringObjectiveConfigs!); + checkUnnamed244(o.modelDeploymentMonitoringObjectiveConfigs!); checkGoogleCloudAiplatformV1ModelDeploymentMonitoringScheduleConfig( o.modelDeploymentMonitoringScheduleConfig!); checkGoogleCloudAiplatformV1ModelMonitoringAlertConfig( @@ -15121,12 +16565,12 @@ void checkGoogleCloudAiplatformV1ModelDeploymentMonitoringScheduleConfig( core.List< api .GoogleCloudAiplatformV1ModelEvaluationModelEvaluationExplanationSpec> - buildUnnamed227() => [ + buildUnnamed245() => [ buildGoogleCloudAiplatformV1ModelEvaluationModelEvaluationExplanationSpec(), buildGoogleCloudAiplatformV1ModelEvaluationModelEvaluationExplanationSpec(), ]; -void checkUnnamed227( +void checkUnnamed245( core.List< api .GoogleCloudAiplatformV1ModelEvaluationModelEvaluationExplanationSpec> @@ -15138,12 +16582,12 @@ void checkUnnamed227( o[1]); } -core.List buildUnnamed228() => [ +core.List buildUnnamed246() => [ 'foo', 'foo', ]; -void checkUnnamed228(core.List o) { +void checkUnnamed246(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -15165,7 +16609,7 @@ api.GoogleCloudAiplatformV1ModelEvaluation o.createTime = 'foo'; o.dataItemSchemaUri = 'foo'; o.displayName = 'foo'; - o.explanationSpecs = buildUnnamed227(); + o.explanationSpecs = buildUnnamed245(); o.metadata = { 'list': [1, 2, 3], 'bool': true, @@ -15179,7 +16623,7 @@ api.GoogleCloudAiplatformV1ModelEvaluation o.metricsSchemaUri = 'foo'; o.modelExplanation = buildGoogleCloudAiplatformV1ModelExplanation(); o.name = 'foo'; - o.sliceDimensions = buildUnnamed228(); + o.sliceDimensions = buildUnnamed246(); } buildCounterGoogleCloudAiplatformV1ModelEvaluation--; return o; @@ -15205,7 +16649,7 @@ void checkGoogleCloudAiplatformV1ModelEvaluation( o.displayName!, unittest.equals('foo'), ); - checkUnnamed227(o.explanationSpecs!); + checkUnnamed245(o.explanationSpecs!); var casted49 = (o.metadata!) as core.Map; unittest.expect(casted49, unittest.hasLength(3)); unittest.expect( @@ -15243,7 +16687,7 @@ void checkGoogleCloudAiplatformV1ModelEvaluation( o.name!, unittest.equals('foo'), ); - checkUnnamed228(o.sliceDimensions!); + checkUnnamed246(o.sliceDimensions!); } buildCounterGoogleCloudAiplatformV1ModelEvaluation--; } @@ -15374,14 +16818,14 @@ core.Map< core.String, api .GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecSliceConfig> - buildUnnamed229() => { + buildUnnamed247() => { 'x': buildGoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecSliceConfig(), 'y': buildGoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecSliceConfig(), }; -void checkUnnamed229( +void checkUnnamed247( core.Map< core.String, api @@ -15402,7 +16846,7 @@ api.GoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpec buildCounterGoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpec++; if (buildCounterGoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpec < 3) { - o.configs = buildUnnamed229(); + o.configs = buildUnnamed247(); } buildCounterGoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpec--; return o; @@ -15413,7 +16857,7 @@ void checkGoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpec( buildCounterGoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpec++; if (buildCounterGoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpec < 3) { - checkUnnamed229(o.configs!); + checkUnnamed247(o.configs!); } buildCounterGoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpec--; } @@ -15521,12 +16965,12 @@ void checkGoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecValue( buildCounterGoogleCloudAiplatformV1ModelEvaluationSliceSliceSliceSpecValue--; } -core.List buildUnnamed230() => [ +core.List buildUnnamed248() => [ buildGoogleCloudAiplatformV1Attribution(), buildGoogleCloudAiplatformV1Attribution(), ]; -void checkUnnamed230(core.List o) { +void checkUnnamed248(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Attribution(o[0]); checkGoogleCloudAiplatformV1Attribution(o[1]); @@ -15538,7 +16982,7 @@ api.GoogleCloudAiplatformV1ModelExplanation final o = api.GoogleCloudAiplatformV1ModelExplanation(); buildCounterGoogleCloudAiplatformV1ModelExplanation++; if (buildCounterGoogleCloudAiplatformV1ModelExplanation < 3) { - o.meanAttributions = buildUnnamed230(); + o.meanAttributions = buildUnnamed248(); } buildCounterGoogleCloudAiplatformV1ModelExplanation--; return o; @@ -15548,17 +16992,17 @@ void checkGoogleCloudAiplatformV1ModelExplanation( api.GoogleCloudAiplatformV1ModelExplanation o) { buildCounterGoogleCloudAiplatformV1ModelExplanation++; if (buildCounterGoogleCloudAiplatformV1ModelExplanation < 3) { - checkUnnamed230(o.meanAttributions!); + checkUnnamed248(o.meanAttributions!); } buildCounterGoogleCloudAiplatformV1ModelExplanation--; } -core.List buildUnnamed231() => [ +core.List buildUnnamed249() => [ 'foo', 'foo', ]; -void checkUnnamed231(core.List o) { +void checkUnnamed249(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -15576,7 +17020,7 @@ api.GoogleCloudAiplatformV1ModelExportFormat final o = api.GoogleCloudAiplatformV1ModelExportFormat(); buildCounterGoogleCloudAiplatformV1ModelExportFormat++; if (buildCounterGoogleCloudAiplatformV1ModelExportFormat < 3) { - o.exportableContents = buildUnnamed231(); + o.exportableContents = buildUnnamed249(); o.id = 'foo'; } buildCounterGoogleCloudAiplatformV1ModelExportFormat--; @@ -15587,7 +17031,7 @@ void checkGoogleCloudAiplatformV1ModelExportFormat( api.GoogleCloudAiplatformV1ModelExportFormat o) { buildCounterGoogleCloudAiplatformV1ModelExportFormat++; if (buildCounterGoogleCloudAiplatformV1ModelExportFormat < 3) { - checkUnnamed231(o.exportableContents!); + checkUnnamed249(o.exportableContents!); unittest.expect( o.id!, unittest.equals('foo'), @@ -15620,12 +17064,12 @@ void checkGoogleCloudAiplatformV1ModelGardenSource( buildCounterGoogleCloudAiplatformV1ModelGardenSource--; } -core.List buildUnnamed232() => [ +core.List buildUnnamed250() => [ 'foo', 'foo', ]; -void checkUnnamed232(core.List o) { +void checkUnnamed250(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -15646,7 +17090,7 @@ api.GoogleCloudAiplatformV1ModelMonitoringAlertConfig o.emailAlertConfig = buildGoogleCloudAiplatformV1ModelMonitoringAlertConfigEmailAlertConfig(); o.enableLogging = true; - o.notificationChannels = buildUnnamed232(); + o.notificationChannels = buildUnnamed250(); } buildCounterGoogleCloudAiplatformV1ModelMonitoringAlertConfig--; return o; @@ -15659,17 +17103,17 @@ void checkGoogleCloudAiplatformV1ModelMonitoringAlertConfig( checkGoogleCloudAiplatformV1ModelMonitoringAlertConfigEmailAlertConfig( o.emailAlertConfig!); unittest.expect(o.enableLogging!, unittest.isTrue); - checkUnnamed232(o.notificationChannels!); + checkUnnamed250(o.notificationChannels!); } buildCounterGoogleCloudAiplatformV1ModelMonitoringAlertConfig--; } -core.List buildUnnamed233() => [ +core.List buildUnnamed251() => [ 'foo', 'foo', ]; -void checkUnnamed233(core.List o) { +void checkUnnamed251(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -15691,7 +17135,7 @@ api.GoogleCloudAiplatformV1ModelMonitoringAlertConfigEmailAlertConfig buildCounterGoogleCloudAiplatformV1ModelMonitoringAlertConfigEmailAlertConfig++; if (buildCounterGoogleCloudAiplatformV1ModelMonitoringAlertConfigEmailAlertConfig < 3) { - o.userEmails = buildUnnamed233(); + o.userEmails = buildUnnamed251(); } buildCounterGoogleCloudAiplatformV1ModelMonitoringAlertConfigEmailAlertConfig--; return o; @@ -15702,7 +17146,7 @@ void checkGoogleCloudAiplatformV1ModelMonitoringAlertConfigEmailAlertConfig( buildCounterGoogleCloudAiplatformV1ModelMonitoringAlertConfigEmailAlertConfig++; if (buildCounterGoogleCloudAiplatformV1ModelMonitoringAlertConfigEmailAlertConfig < 3) { - checkUnnamed233(o.userEmails!); + checkUnnamed251(o.userEmails!); } buildCounterGoogleCloudAiplatformV1ModelMonitoringAlertConfigEmailAlertConfig--; } @@ -15808,12 +17252,12 @@ void checkGoogleCloudAiplatformV1ModelMonitoringObjectiveConfigExplanationConfig } core.Map - buildUnnamed234() => { + buildUnnamed252() => { 'x': buildGoogleCloudAiplatformV1ThresholdConfig(), 'y': buildGoogleCloudAiplatformV1ThresholdConfig(), }; -void checkUnnamed234( +void checkUnnamed252( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1ThresholdConfig(o['x']!); @@ -15821,12 +17265,12 @@ void checkUnnamed234( } core.Map - buildUnnamed235() => { + buildUnnamed253() => { 'x': buildGoogleCloudAiplatformV1ThresholdConfig(), 'y': buildGoogleCloudAiplatformV1ThresholdConfig(), }; -void checkUnnamed235( +void checkUnnamed253( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1ThresholdConfig(o['x']!); @@ -15843,9 +17287,9 @@ api.GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigPredictionDriftDetectio buildCounterGoogleCloudAiplatformV1ModelMonitoringObjectiveConfigPredictionDriftDetectionConfig++; if (buildCounterGoogleCloudAiplatformV1ModelMonitoringObjectiveConfigPredictionDriftDetectionConfig < 3) { - o.attributionScoreDriftThresholds = buildUnnamed234(); + o.attributionScoreDriftThresholds = buildUnnamed252(); o.defaultDriftThreshold = buildGoogleCloudAiplatformV1ThresholdConfig(); - o.driftThresholds = buildUnnamed235(); + o.driftThresholds = buildUnnamed253(); } buildCounterGoogleCloudAiplatformV1ModelMonitoringObjectiveConfigPredictionDriftDetectionConfig--; return o; @@ -15857,9 +17301,9 @@ void checkGoogleCloudAiplatformV1ModelMonitoringObjectiveConfigPredictionDriftDe buildCounterGoogleCloudAiplatformV1ModelMonitoringObjectiveConfigPredictionDriftDetectionConfig++; if (buildCounterGoogleCloudAiplatformV1ModelMonitoringObjectiveConfigPredictionDriftDetectionConfig < 3) { - checkUnnamed234(o.attributionScoreDriftThresholds!); + checkUnnamed252(o.attributionScoreDriftThresholds!); checkGoogleCloudAiplatformV1ThresholdConfig(o.defaultDriftThreshold!); - checkUnnamed235(o.driftThresholds!); + checkUnnamed253(o.driftThresholds!); } buildCounterGoogleCloudAiplatformV1ModelMonitoringObjectiveConfigPredictionDriftDetectionConfig--; } @@ -15911,12 +17355,12 @@ void checkGoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingDataset( } core.Map - buildUnnamed236() => { + buildUnnamed254() => { 'x': buildGoogleCloudAiplatformV1ThresholdConfig(), 'y': buildGoogleCloudAiplatformV1ThresholdConfig(), }; -void checkUnnamed236( +void checkUnnamed254( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1ThresholdConfig(o['x']!); @@ -15924,12 +17368,12 @@ void checkUnnamed236( } core.Map - buildUnnamed237() => { + buildUnnamed255() => { 'x': buildGoogleCloudAiplatformV1ThresholdConfig(), 'y': buildGoogleCloudAiplatformV1ThresholdConfig(), }; -void checkUnnamed237( +void checkUnnamed255( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1ThresholdConfig(o['x']!); @@ -15946,9 +17390,9 @@ api.GoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingPredictionSkewD buildCounterGoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingPredictionSkewDetectionConfig++; if (buildCounterGoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingPredictionSkewDetectionConfig < 3) { - o.attributionScoreSkewThresholds = buildUnnamed236(); + o.attributionScoreSkewThresholds = buildUnnamed254(); o.defaultSkewThreshold = buildGoogleCloudAiplatformV1ThresholdConfig(); - o.skewThresholds = buildUnnamed237(); + o.skewThresholds = buildUnnamed255(); } buildCounterGoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingPredictionSkewDetectionConfig--; return o; @@ -15960,9 +17404,9 @@ void checkGoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingPredictio buildCounterGoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingPredictionSkewDetectionConfig++; if (buildCounterGoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingPredictionSkewDetectionConfig < 3) { - checkUnnamed236(o.attributionScoreSkewThresholds!); + checkUnnamed254(o.attributionScoreSkewThresholds!); checkGoogleCloudAiplatformV1ThresholdConfig(o.defaultSkewThreshold!); - checkUnnamed237(o.skewThresholds!); + checkUnnamed255(o.skewThresholds!); } buildCounterGoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingPredictionSkewDetectionConfig--; } @@ -15970,12 +17414,12 @@ void checkGoogleCloudAiplatformV1ModelMonitoringObjectiveConfigTrainingPredictio core.List< api .GoogleCloudAiplatformV1ModelMonitoringStatsAnomaliesFeatureHistoricStatsAnomalies> - buildUnnamed238() => [ + buildUnnamed256() => [ buildGoogleCloudAiplatformV1ModelMonitoringStatsAnomaliesFeatureHistoricStatsAnomalies(), buildGoogleCloudAiplatformV1ModelMonitoringStatsAnomaliesFeatureHistoricStatsAnomalies(), ]; -void checkUnnamed238( +void checkUnnamed256( core.List< api .GoogleCloudAiplatformV1ModelMonitoringStatsAnomaliesFeatureHistoricStatsAnomalies> @@ -15995,7 +17439,7 @@ api.GoogleCloudAiplatformV1ModelMonitoringStatsAnomalies if (buildCounterGoogleCloudAiplatformV1ModelMonitoringStatsAnomalies < 3) { o.anomalyCount = 42; o.deployedModelId = 'foo'; - o.featureStats = buildUnnamed238(); + o.featureStats = buildUnnamed256(); o.objective = 'foo'; } buildCounterGoogleCloudAiplatformV1ModelMonitoringStatsAnomalies--; @@ -16014,7 +17458,7 @@ void checkGoogleCloudAiplatformV1ModelMonitoringStatsAnomalies( o.deployedModelId!, unittest.equals('foo'), ); - checkUnnamed238(o.featureStats!); + checkUnnamed256(o.featureStats!); unittest.expect( o.objective!, unittest.equals('foo'), @@ -16023,12 +17467,12 @@ void checkGoogleCloudAiplatformV1ModelMonitoringStatsAnomalies( buildCounterGoogleCloudAiplatformV1ModelMonitoringStatsAnomalies--; } -core.List buildUnnamed239() => [ +core.List buildUnnamed257() => [ buildGoogleCloudAiplatformV1FeatureStatsAnomaly(), buildGoogleCloudAiplatformV1FeatureStatsAnomaly(), ]; -void checkUnnamed239( +void checkUnnamed257( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1FeatureStatsAnomaly(o[0]); @@ -16046,7 +17490,7 @@ api.GoogleCloudAiplatformV1ModelMonitoringStatsAnomaliesFeatureHistoricStatsAnom if (buildCounterGoogleCloudAiplatformV1ModelMonitoringStatsAnomaliesFeatureHistoricStatsAnomalies < 3) { o.featureDisplayName = 'foo'; - o.predictionStats = buildUnnamed239(); + o.predictionStats = buildUnnamed257(); o.threshold = buildGoogleCloudAiplatformV1ThresholdConfig(); o.trainingStats = buildGoogleCloudAiplatformV1FeatureStatsAnomaly(); } @@ -16064,7 +17508,7 @@ void checkGoogleCloudAiplatformV1ModelMonitoringStatsAnomaliesFeatureHistoricSta o.featureDisplayName!, unittest.equals('foo'), ); - checkUnnamed239(o.predictionStats!); + checkUnnamed257(o.predictionStats!); checkGoogleCloudAiplatformV1ThresholdConfig(o.threshold!); checkGoogleCloudAiplatformV1FeatureStatsAnomaly(o.trainingStats!); } @@ -16147,12 +17591,12 @@ void checkGoogleCloudAiplatformV1MutateDeployedModelRequest( buildCounterGoogleCloudAiplatformV1MutateDeployedModelRequest--; } -core.Map buildUnnamed240() => { +core.Map buildUnnamed258() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed240(core.Map o) { +void checkUnnamed258(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -16175,7 +17619,7 @@ api.GoogleCloudAiplatformV1NasJob buildGoogleCloudAiplatformV1NasJob() { o.encryptionSpec = buildGoogleCloudAiplatformV1EncryptionSpec(); o.endTime = 'foo'; o.error = buildGoogleRpcStatus(); - o.labels = buildUnnamed240(); + o.labels = buildUnnamed258(); o.name = 'foo'; o.nasJobOutput = buildGoogleCloudAiplatformV1NasJobOutput(); o.nasJobSpec = buildGoogleCloudAiplatformV1NasJobSpec(); @@ -16207,7 +17651,7 @@ void checkGoogleCloudAiplatformV1NasJob(api.GoogleCloudAiplatformV1NasJob o) { unittest.equals('foo'), ); checkGoogleRpcStatus(o.error!); - checkUnnamed240(o.labels!); + checkUnnamed258(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -16255,23 +17699,23 @@ void checkGoogleCloudAiplatformV1NasJobOutput( buildCounterGoogleCloudAiplatformV1NasJobOutput--; } -core.List buildUnnamed241() => [ +core.List buildUnnamed259() => [ buildGoogleCloudAiplatformV1NasTrial(), buildGoogleCloudAiplatformV1NasTrial(), ]; -void checkUnnamed241(core.List o) { +void checkUnnamed259(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1NasTrial(o[0]); checkGoogleCloudAiplatformV1NasTrial(o[1]); } -core.List buildUnnamed242() => [ +core.List buildUnnamed260() => [ buildGoogleCloudAiplatformV1NasTrial(), buildGoogleCloudAiplatformV1NasTrial(), ]; -void checkUnnamed242(core.List o) { +void checkUnnamed260(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1NasTrial(o[0]); checkGoogleCloudAiplatformV1NasTrial(o[1]); @@ -16283,8 +17727,8 @@ api.GoogleCloudAiplatformV1NasJobOutputMultiTrialJobOutput final o = api.GoogleCloudAiplatformV1NasJobOutputMultiTrialJobOutput(); buildCounterGoogleCloudAiplatformV1NasJobOutputMultiTrialJobOutput++; if (buildCounterGoogleCloudAiplatformV1NasJobOutputMultiTrialJobOutput < 3) { - o.searchTrials = buildUnnamed241(); - o.trainTrials = buildUnnamed242(); + o.searchTrials = buildUnnamed259(); + o.trainTrials = buildUnnamed260(); } buildCounterGoogleCloudAiplatformV1NasJobOutputMultiTrialJobOutput--; return o; @@ -16294,8 +17738,8 @@ void checkGoogleCloudAiplatformV1NasJobOutputMultiTrialJobOutput( api.GoogleCloudAiplatformV1NasJobOutputMultiTrialJobOutput o) { buildCounterGoogleCloudAiplatformV1NasJobOutputMultiTrialJobOutput++; if (buildCounterGoogleCloudAiplatformV1NasJobOutputMultiTrialJobOutput < 3) { - checkUnnamed241(o.searchTrials!); - checkUnnamed242(o.trainTrials!); + checkUnnamed259(o.searchTrials!); + checkUnnamed260(o.trainTrials!); } buildCounterGoogleCloudAiplatformV1NasJobOutputMultiTrialJobOutput--; } @@ -16558,12 +18002,12 @@ void checkGoogleCloudAiplatformV1NasTrialDetail( } core.List - buildUnnamed243() => [ + buildUnnamed261() => [ buildGoogleCloudAiplatformV1NearestNeighborQueryNumericFilter(), buildGoogleCloudAiplatformV1NearestNeighborQueryNumericFilter(), ]; -void checkUnnamed243( +void checkUnnamed261( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1NearestNeighborQueryNumericFilter(o[0]); @@ -16571,12 +18015,12 @@ void checkUnnamed243( } core.List - buildUnnamed244() => [ + buildUnnamed262() => [ buildGoogleCloudAiplatformV1NearestNeighborQueryStringFilter(), buildGoogleCloudAiplatformV1NearestNeighborQueryStringFilter(), ]; -void checkUnnamed244( +void checkUnnamed262( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1NearestNeighborQueryStringFilter(o[0]); @@ -16592,10 +18036,10 @@ api.GoogleCloudAiplatformV1NearestNeighborQuery o.embedding = buildGoogleCloudAiplatformV1NearestNeighborQueryEmbedding(); o.entityId = 'foo'; o.neighborCount = 42; - o.numericFilters = buildUnnamed243(); + o.numericFilters = buildUnnamed261(); o.parameters = buildGoogleCloudAiplatformV1NearestNeighborQueryParameters(); o.perCrowdingAttributeNeighborCount = 42; - o.stringFilters = buildUnnamed244(); + o.stringFilters = buildUnnamed262(); } buildCounterGoogleCloudAiplatformV1NearestNeighborQuery--; return o; @@ -16614,23 +18058,23 @@ void checkGoogleCloudAiplatformV1NearestNeighborQuery( o.neighborCount!, unittest.equals(42), ); - checkUnnamed243(o.numericFilters!); + checkUnnamed261(o.numericFilters!); checkGoogleCloudAiplatformV1NearestNeighborQueryParameters(o.parameters!); unittest.expect( o.perCrowdingAttributeNeighborCount!, unittest.equals(42), ); - checkUnnamed244(o.stringFilters!); + checkUnnamed262(o.stringFilters!); } buildCounterGoogleCloudAiplatformV1NearestNeighborQuery--; } -core.List buildUnnamed245() => [ +core.List buildUnnamed263() => [ 42.0, 42.0, ]; -void checkUnnamed245(core.List o) { +void checkUnnamed263(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -16648,7 +18092,7 @@ api.GoogleCloudAiplatformV1NearestNeighborQueryEmbedding final o = api.GoogleCloudAiplatformV1NearestNeighborQueryEmbedding(); buildCounterGoogleCloudAiplatformV1NearestNeighborQueryEmbedding++; if (buildCounterGoogleCloudAiplatformV1NearestNeighborQueryEmbedding < 3) { - o.value = buildUnnamed245(); + o.value = buildUnnamed263(); } buildCounterGoogleCloudAiplatformV1NearestNeighborQueryEmbedding--; return o; @@ -16658,7 +18102,7 @@ void checkGoogleCloudAiplatformV1NearestNeighborQueryEmbedding( api.GoogleCloudAiplatformV1NearestNeighborQueryEmbedding o) { buildCounterGoogleCloudAiplatformV1NearestNeighborQueryEmbedding++; if (buildCounterGoogleCloudAiplatformV1NearestNeighborQueryEmbedding < 3) { - checkUnnamed245(o.value!); + checkUnnamed263(o.value!); } buildCounterGoogleCloudAiplatformV1NearestNeighborQueryEmbedding--; } @@ -16739,12 +18183,12 @@ void checkGoogleCloudAiplatformV1NearestNeighborQueryParameters( buildCounterGoogleCloudAiplatformV1NearestNeighborQueryParameters--; } -core.List buildUnnamed246() => [ +core.List buildUnnamed264() => [ 'foo', 'foo', ]; -void checkUnnamed246(core.List o) { +void checkUnnamed264(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -16756,12 +18200,12 @@ void checkUnnamed246(core.List o) { ); } -core.List buildUnnamed247() => [ +core.List buildUnnamed265() => [ 'foo', 'foo', ]; -void checkUnnamed247(core.List o) { +void checkUnnamed265(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -16780,8 +18224,8 @@ api.GoogleCloudAiplatformV1NearestNeighborQueryStringFilter final o = api.GoogleCloudAiplatformV1NearestNeighborQueryStringFilter(); buildCounterGoogleCloudAiplatformV1NearestNeighborQueryStringFilter++; if (buildCounterGoogleCloudAiplatformV1NearestNeighborQueryStringFilter < 3) { - o.allowTokens = buildUnnamed246(); - o.denyTokens = buildUnnamed247(); + o.allowTokens = buildUnnamed264(); + o.denyTokens = buildUnnamed265(); o.name = 'foo'; } buildCounterGoogleCloudAiplatformV1NearestNeighborQueryStringFilter--; @@ -16792,8 +18236,8 @@ void checkGoogleCloudAiplatformV1NearestNeighborQueryStringFilter( api.GoogleCloudAiplatformV1NearestNeighborQueryStringFilter o) { buildCounterGoogleCloudAiplatformV1NearestNeighborQueryStringFilter++; if (buildCounterGoogleCloudAiplatformV1NearestNeighborQueryStringFilter < 3) { - checkUnnamed246(o.allowTokens!); - checkUnnamed247(o.denyTokens!); + checkUnnamed264(o.allowTokens!); + checkUnnamed265(o.denyTokens!); unittest.expect( o.name!, unittest.equals('foo'), @@ -16803,12 +18247,12 @@ void checkGoogleCloudAiplatformV1NearestNeighborQueryStringFilter( } core.List - buildUnnamed248() => [ + buildUnnamed266() => [ buildGoogleCloudAiplatformV1NearestNeighborsNeighbor(), buildGoogleCloudAiplatformV1NearestNeighborsNeighbor(), ]; -void checkUnnamed248( +void checkUnnamed266( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1NearestNeighborsNeighbor(o[0]); @@ -16821,7 +18265,7 @@ api.GoogleCloudAiplatformV1NearestNeighbors final o = api.GoogleCloudAiplatformV1NearestNeighbors(); buildCounterGoogleCloudAiplatformV1NearestNeighbors++; if (buildCounterGoogleCloudAiplatformV1NearestNeighbors < 3) { - o.neighbors = buildUnnamed248(); + o.neighbors = buildUnnamed266(); } buildCounterGoogleCloudAiplatformV1NearestNeighbors--; return o; @@ -16831,7 +18275,7 @@ void checkGoogleCloudAiplatformV1NearestNeighbors( api.GoogleCloudAiplatformV1NearestNeighbors o) { buildCounterGoogleCloudAiplatformV1NearestNeighbors++; if (buildCounterGoogleCloudAiplatformV1NearestNeighbors < 3) { - checkUnnamed248(o.neighbors!); + checkUnnamed266(o.neighbors!); } buildCounterGoogleCloudAiplatformV1NearestNeighbors--; } @@ -16983,12 +18427,12 @@ void checkGoogleCloudAiplatformV1NotebookEucConfig( buildCounterGoogleCloudAiplatformV1NotebookEucConfig--; } -core.Map buildUnnamed249() => { +core.Map buildUnnamed267() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed249(core.Map o) { +void checkUnnamed267(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -17007,6 +18451,8 @@ api.GoogleCloudAiplatformV1NotebookExecutionJob buildCounterGoogleCloudAiplatformV1NotebookExecutionJob++; if (buildCounterGoogleCloudAiplatformV1NotebookExecutionJob < 3) { o.createTime = 'foo'; + o.customEnvironmentSpec = + buildGoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec(); o.dataformRepositorySource = buildGoogleCloudAiplatformV1NotebookExecutionJobDataformRepositorySource(); o.directNotebookSource = @@ -17019,13 +18465,16 @@ api.GoogleCloudAiplatformV1NotebookExecutionJob buildGoogleCloudAiplatformV1NotebookExecutionJobGcsNotebookSource(); o.gcsOutputUri = 'foo'; o.jobState = 'foo'; - o.labels = buildUnnamed249(); + o.kernelName = 'foo'; + o.labels = buildUnnamed267(); o.name = 'foo'; o.notebookRuntimeTemplateResourceName = 'foo'; o.scheduleResourceName = 'foo'; o.serviceAccount = 'foo'; o.status = buildGoogleRpcStatus(); o.updateTime = 'foo'; + o.workbenchRuntime = + buildGoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime(); } buildCounterGoogleCloudAiplatformV1NotebookExecutionJob--; return o; @@ -17039,6 +18488,8 @@ void checkGoogleCloudAiplatformV1NotebookExecutionJob( o.createTime!, unittest.equals('foo'), ); + checkGoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec( + o.customEnvironmentSpec!); checkGoogleCloudAiplatformV1NotebookExecutionJobDataformRepositorySource( o.dataformRepositorySource!); checkGoogleCloudAiplatformV1NotebookExecutionJobDirectNotebookSource( @@ -17066,7 +18517,11 @@ void checkGoogleCloudAiplatformV1NotebookExecutionJob( o.jobState!, unittest.equals('foo'), ); - checkUnnamed249(o.labels!); + unittest.expect( + o.kernelName!, + unittest.equals('foo'), + ); + checkUnnamed267(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -17088,10 +18543,42 @@ void checkGoogleCloudAiplatformV1NotebookExecutionJob( o.updateTime!, unittest.equals('foo'), ); + checkGoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime( + o.workbenchRuntime!); } buildCounterGoogleCloudAiplatformV1NotebookExecutionJob--; } +core.int + buildCounterGoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec = + 0; +api.GoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec + buildGoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec() { + final o = + api.GoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec(); + buildCounterGoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec++; + if (buildCounterGoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec < + 3) { + o.machineSpec = buildGoogleCloudAiplatformV1MachineSpec(); + o.networkSpec = buildGoogleCloudAiplatformV1NetworkSpec(); + o.persistentDiskSpec = buildGoogleCloudAiplatformV1PersistentDiskSpec(); + } + buildCounterGoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec--; + return o; +} + +void checkGoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec( + api.GoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec o) { + buildCounterGoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec++; + if (buildCounterGoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec < + 3) { + checkGoogleCloudAiplatformV1MachineSpec(o.machineSpec!); + checkGoogleCloudAiplatformV1NetworkSpec(o.networkSpec!); + checkGoogleCloudAiplatformV1PersistentDiskSpec(o.persistentDiskSpec!); + } + buildCounterGoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec--; +} + core.int buildCounterGoogleCloudAiplatformV1NotebookExecutionJobDataformRepositorySource = 0; @@ -17188,6 +18675,26 @@ void checkGoogleCloudAiplatformV1NotebookExecutionJobGcsNotebookSource( buildCounterGoogleCloudAiplatformV1NotebookExecutionJobGcsNotebookSource--; } +core.int + buildCounterGoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime = 0; +api.GoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime + buildGoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime() { + final o = api.GoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime(); + buildCounterGoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime++; + if (buildCounterGoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime < + 3) {} + buildCounterGoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime--; + return o; +} + +void checkGoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime( + api.GoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime o) { + buildCounterGoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime++; + if (buildCounterGoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime < + 3) {} + buildCounterGoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime--; +} + core.int buildCounterGoogleCloudAiplatformV1NotebookIdleShutdownConfig = 0; api.GoogleCloudAiplatformV1NotebookIdleShutdownConfig buildGoogleCloudAiplatformV1NotebookIdleShutdownConfig() { @@ -17214,12 +18721,12 @@ void checkGoogleCloudAiplatformV1NotebookIdleShutdownConfig( buildCounterGoogleCloudAiplatformV1NotebookIdleShutdownConfig--; } -core.Map buildUnnamed250() => { +core.Map buildUnnamed268() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed250(core.Map o) { +void checkUnnamed268(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -17231,12 +18738,12 @@ void checkUnnamed250(core.Map o) { ); } -core.List buildUnnamed251() => [ +core.List buildUnnamed269() => [ 'foo', 'foo', ]; -void checkUnnamed251(core.List o) { +void checkUnnamed269(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -17255,17 +18762,21 @@ api.GoogleCloudAiplatformV1NotebookRuntime buildCounterGoogleCloudAiplatformV1NotebookRuntime++; if (buildCounterGoogleCloudAiplatformV1NotebookRuntime < 3) { o.createTime = 'foo'; + o.dataPersistentDiskSpec = buildGoogleCloudAiplatformV1PersistentDiskSpec(); o.description = 'foo'; o.displayName = 'foo'; o.encryptionSpec = buildGoogleCloudAiplatformV1EncryptionSpec(); + o.eucConfig = buildGoogleCloudAiplatformV1NotebookEucConfig(); o.expirationTime = 'foo'; o.healthState = 'foo'; o.idleShutdownConfig = buildGoogleCloudAiplatformV1NotebookIdleShutdownConfig(); o.isUpgradable = true; - o.labels = buildUnnamed250(); + o.labels = buildUnnamed268(); + o.machineSpec = buildGoogleCloudAiplatformV1MachineSpec(); o.name = 'foo'; - o.networkTags = buildUnnamed251(); + o.networkSpec = buildGoogleCloudAiplatformV1NetworkSpec(); + o.networkTags = buildUnnamed269(); o.notebookRuntimeTemplateRef = buildGoogleCloudAiplatformV1NotebookRuntimeTemplateRef(); o.notebookRuntimeType = 'foo'; @@ -17275,6 +18786,7 @@ api.GoogleCloudAiplatformV1NotebookRuntime o.satisfiesPzi = true; o.satisfiesPzs = true; o.serviceAccount = 'foo'; + o.shieldedVmConfig = buildGoogleCloudAiplatformV1ShieldedVmConfig(); o.updateTime = 'foo'; o.version = 'foo'; } @@ -17290,6 +18802,7 @@ void checkGoogleCloudAiplatformV1NotebookRuntime( o.createTime!, unittest.equals('foo'), ); + checkGoogleCloudAiplatformV1PersistentDiskSpec(o.dataPersistentDiskSpec!); unittest.expect( o.description!, unittest.equals('foo'), @@ -17299,6 +18812,7 @@ void checkGoogleCloudAiplatformV1NotebookRuntime( unittest.equals('foo'), ); checkGoogleCloudAiplatformV1EncryptionSpec(o.encryptionSpec!); + checkGoogleCloudAiplatformV1NotebookEucConfig(o.eucConfig!); unittest.expect( o.expirationTime!, unittest.equals('foo'), @@ -17310,12 +18824,14 @@ void checkGoogleCloudAiplatformV1NotebookRuntime( checkGoogleCloudAiplatformV1NotebookIdleShutdownConfig( o.idleShutdownConfig!); unittest.expect(o.isUpgradable!, unittest.isTrue); - checkUnnamed250(o.labels!); + checkUnnamed268(o.labels!); + checkGoogleCloudAiplatformV1MachineSpec(o.machineSpec!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed251(o.networkTags!); + checkGoogleCloudAiplatformV1NetworkSpec(o.networkSpec!); + checkUnnamed269(o.networkTags!); checkGoogleCloudAiplatformV1NotebookRuntimeTemplateRef( o.notebookRuntimeTemplateRef!); unittest.expect( @@ -17340,6 +18856,7 @@ void checkGoogleCloudAiplatformV1NotebookRuntime( o.serviceAccount!, unittest.equals('foo'), ); + checkGoogleCloudAiplatformV1ShieldedVmConfig(o.shieldedVmConfig!); unittest.expect( o.updateTime!, unittest.equals('foo'), @@ -17352,12 +18869,12 @@ void checkGoogleCloudAiplatformV1NotebookRuntime( buildCounterGoogleCloudAiplatformV1NotebookRuntime--; } -core.Map buildUnnamed252() => { +core.Map buildUnnamed270() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed252(core.Map o) { +void checkUnnamed270(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -17369,12 +18886,12 @@ void checkUnnamed252(core.Map o) { ); } -core.List buildUnnamed253() => [ +core.List buildUnnamed271() => [ 'foo', 'foo', ]; -void checkUnnamed253(core.List o) { +void checkUnnamed271(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -17402,11 +18919,11 @@ api.GoogleCloudAiplatformV1NotebookRuntimeTemplate o.idleShutdownConfig = buildGoogleCloudAiplatformV1NotebookIdleShutdownConfig(); o.isDefault = true; - o.labels = buildUnnamed252(); + o.labels = buildUnnamed270(); o.machineSpec = buildGoogleCloudAiplatformV1MachineSpec(); o.name = 'foo'; o.networkSpec = buildGoogleCloudAiplatformV1NetworkSpec(); - o.networkTags = buildUnnamed253(); + o.networkTags = buildUnnamed271(); o.notebookRuntimeType = 'foo'; o.serviceAccount = 'foo'; o.shieldedVmConfig = buildGoogleCloudAiplatformV1ShieldedVmConfig(); @@ -17442,14 +18959,14 @@ void checkGoogleCloudAiplatformV1NotebookRuntimeTemplate( checkGoogleCloudAiplatformV1NotebookIdleShutdownConfig( o.idleShutdownConfig!); unittest.expect(o.isDefault!, unittest.isTrue); - checkUnnamed252(o.labels!); + checkUnnamed270(o.labels!); checkGoogleCloudAiplatformV1MachineSpec(o.machineSpec!); unittest.expect( o.name!, unittest.equals('foo'), ); checkGoogleCloudAiplatformV1NetworkSpec(o.networkSpec!); - checkUnnamed253(o.networkTags!); + checkUnnamed271(o.networkTags!); unittest.expect( o.notebookRuntimeType!, unittest.equals('foo'), @@ -18007,12 +19524,12 @@ void checkGoogleCloudAiplatformV1PersistentDiskSpec( buildCounterGoogleCloudAiplatformV1PersistentDiskSpec--; } -core.Map buildUnnamed254() => { +core.Map buildUnnamed272() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed254(core.Map o) { +void checkUnnamed272(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -18024,12 +19541,12 @@ void checkUnnamed254(core.Map o) { ); } -core.List buildUnnamed255() => [ +core.List buildUnnamed273() => [ 'foo', 'foo', ]; -void checkUnnamed255(core.List o) { +void checkUnnamed273(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -18041,12 +19558,12 @@ void checkUnnamed255(core.List o) { ); } -core.List buildUnnamed256() => [ +core.List buildUnnamed274() => [ buildGoogleCloudAiplatformV1ResourcePool(), buildGoogleCloudAiplatformV1ResourcePool(), ]; -void checkUnnamed256(core.List o) { +void checkUnnamed274(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1ResourcePool(o[0]); checkGoogleCloudAiplatformV1ResourcePool(o[1]); @@ -18062,11 +19579,11 @@ api.GoogleCloudAiplatformV1PersistentResource o.displayName = 'foo'; o.encryptionSpec = buildGoogleCloudAiplatformV1EncryptionSpec(); o.error = buildGoogleRpcStatus(); - o.labels = buildUnnamed254(); + o.labels = buildUnnamed272(); o.name = 'foo'; o.network = 'foo'; - o.reservedIpRanges = buildUnnamed255(); - o.resourcePools = buildUnnamed256(); + o.reservedIpRanges = buildUnnamed273(); + o.resourcePools = buildUnnamed274(); o.resourceRuntime = buildGoogleCloudAiplatformV1ResourceRuntime(); o.resourceRuntimeSpec = buildGoogleCloudAiplatformV1ResourceRuntimeSpec(); o.satisfiesPzi = true; @@ -18093,7 +19610,7 @@ void checkGoogleCloudAiplatformV1PersistentResource( ); checkGoogleCloudAiplatformV1EncryptionSpec(o.encryptionSpec!); checkGoogleRpcStatus(o.error!); - checkUnnamed254(o.labels!); + checkUnnamed272(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -18102,8 +19619,8 @@ void checkGoogleCloudAiplatformV1PersistentResource( o.network!, unittest.equals('foo'), ); - checkUnnamed255(o.reservedIpRanges!); - checkUnnamed256(o.resourcePools!); + checkUnnamed273(o.reservedIpRanges!); + checkUnnamed274(o.resourcePools!); checkGoogleCloudAiplatformV1ResourceRuntime(o.resourceRuntime!); checkGoogleCloudAiplatformV1ResourceRuntimeSpec(o.resourceRuntimeSpec!); unittest.expect(o.satisfiesPzi!, unittest.isTrue); @@ -18124,12 +19641,12 @@ void checkGoogleCloudAiplatformV1PersistentResource( buildCounterGoogleCloudAiplatformV1PersistentResource--; } -core.Map buildUnnamed257() => { +core.Map buildUnnamed275() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed257(core.Map o) { +void checkUnnamed275(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -18141,7 +19658,7 @@ void checkUnnamed257(core.Map o) { ); } -core.Map buildUnnamed258() => { +core.Map buildUnnamed276() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -18154,7 +19671,7 @@ core.Map buildUnnamed258() => { }, }; -void checkUnnamed258(core.Map o) { +void checkUnnamed276(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted52 = (o['x']!) as core.Map; unittest.expect(casted52, unittest.hasLength(3)); @@ -18186,12 +19703,12 @@ void checkUnnamed258(core.Map o) { ); } -core.List buildUnnamed259() => [ +core.List buildUnnamed277() => [ 'foo', 'foo', ]; -void checkUnnamed259(core.List o) { +void checkUnnamed277(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -18215,12 +19732,12 @@ api.GoogleCloudAiplatformV1PipelineJob o.endTime = 'foo'; o.error = buildGoogleRpcStatus(); o.jobDetail = buildGoogleCloudAiplatformV1PipelineJobDetail(); - o.labels = buildUnnamed257(); + o.labels = buildUnnamed275(); o.name = 'foo'; o.network = 'foo'; - o.pipelineSpec = buildUnnamed258(); + o.pipelineSpec = buildUnnamed276(); o.preflightValidations = true; - o.reservedIpRanges = buildUnnamed259(); + o.reservedIpRanges = buildUnnamed277(); o.runtimeConfig = buildGoogleCloudAiplatformV1PipelineJobRuntimeConfig(); o.scheduleName = 'foo'; o.serviceAccount = 'foo'; @@ -18253,7 +19770,7 @@ void checkGoogleCloudAiplatformV1PipelineJob( ); checkGoogleRpcStatus(o.error!); checkGoogleCloudAiplatformV1PipelineJobDetail(o.jobDetail!); - checkUnnamed257(o.labels!); + checkUnnamed275(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -18262,9 +19779,9 @@ void checkGoogleCloudAiplatformV1PipelineJob( o.network!, unittest.equals('foo'), ); - checkUnnamed258(o.pipelineSpec!); + checkUnnamed276(o.pipelineSpec!); unittest.expect(o.preflightValidations!, unittest.isTrue); - checkUnnamed259(o.reservedIpRanges!); + checkUnnamed277(o.reservedIpRanges!); checkGoogleCloudAiplatformV1PipelineJobRuntimeConfig(o.runtimeConfig!); unittest.expect( o.scheduleName!, @@ -18295,12 +19812,12 @@ void checkGoogleCloudAiplatformV1PipelineJob( buildCounterGoogleCloudAiplatformV1PipelineJob--; } -core.List buildUnnamed260() => [ +core.List buildUnnamed278() => [ buildGoogleCloudAiplatformV1PipelineTaskDetail(), buildGoogleCloudAiplatformV1PipelineTaskDetail(), ]; -void checkUnnamed260( +void checkUnnamed278( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1PipelineTaskDetail(o[0]); @@ -18315,7 +19832,7 @@ api.GoogleCloudAiplatformV1PipelineJobDetail if (buildCounterGoogleCloudAiplatformV1PipelineJobDetail < 3) { o.pipelineContext = buildGoogleCloudAiplatformV1Context(); o.pipelineRunContext = buildGoogleCloudAiplatformV1Context(); - o.taskDetails = buildUnnamed260(); + o.taskDetails = buildUnnamed278(); } buildCounterGoogleCloudAiplatformV1PipelineJobDetail--; return o; @@ -18327,21 +19844,21 @@ void checkGoogleCloudAiplatformV1PipelineJobDetail( if (buildCounterGoogleCloudAiplatformV1PipelineJobDetail < 3) { checkGoogleCloudAiplatformV1Context(o.pipelineContext!); checkGoogleCloudAiplatformV1Context(o.pipelineRunContext!); - checkUnnamed260(o.taskDetails!); + checkUnnamed278(o.taskDetails!); } buildCounterGoogleCloudAiplatformV1PipelineJobDetail--; } core.Map - buildUnnamed261() => { + buildUnnamed279() => { 'x': buildGoogleCloudAiplatformV1PipelineJobRuntimeConfigInputArtifact(), 'y': buildGoogleCloudAiplatformV1PipelineJobRuntimeConfigInputArtifact(), }; -void checkUnnamed261( +void checkUnnamed279( core.Map o) { @@ -18350,7 +19867,7 @@ void checkUnnamed261( checkGoogleCloudAiplatformV1PipelineJobRuntimeConfigInputArtifact(o['y']!); } -core.Map buildUnnamed262() => { +core.Map buildUnnamed280() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -18363,7 +19880,7 @@ core.Map buildUnnamed262() => { }, }; -void checkUnnamed262(core.Map o) { +void checkUnnamed280(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted54 = (o['x']!) as core.Map; unittest.expect(casted54, unittest.hasLength(3)); @@ -18395,12 +19912,12 @@ void checkUnnamed262(core.Map o) { ); } -core.Map buildUnnamed263() => { +core.Map buildUnnamed281() => { 'x': buildGoogleCloudAiplatformV1Value(), 'y': buildGoogleCloudAiplatformV1Value(), }; -void checkUnnamed263( +void checkUnnamed281( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Value(o['x']!); @@ -18415,9 +19932,9 @@ api.GoogleCloudAiplatformV1PipelineJobRuntimeConfig if (buildCounterGoogleCloudAiplatformV1PipelineJobRuntimeConfig < 3) { o.failurePolicy = 'foo'; o.gcsOutputDirectory = 'foo'; - o.inputArtifacts = buildUnnamed261(); - o.parameterValues = buildUnnamed262(); - o.parameters = buildUnnamed263(); + o.inputArtifacts = buildUnnamed279(); + o.parameterValues = buildUnnamed280(); + o.parameters = buildUnnamed281(); } buildCounterGoogleCloudAiplatformV1PipelineJobRuntimeConfig--; return o; @@ -18435,9 +19952,9 @@ void checkGoogleCloudAiplatformV1PipelineJobRuntimeConfig( o.gcsOutputDirectory!, unittest.equals('foo'), ); - checkUnnamed261(o.inputArtifacts!); - checkUnnamed262(o.parameterValues!); - checkUnnamed263(o.parameters!); + checkUnnamed279(o.inputArtifacts!); + checkUnnamed280(o.parameterValues!); + checkUnnamed281(o.parameters!); } buildCounterGoogleCloudAiplatformV1PipelineJobRuntimeConfig--; } @@ -18471,12 +19988,12 @@ void checkGoogleCloudAiplatformV1PipelineJobRuntimeConfigInputArtifact( } core.Map - buildUnnamed264() => { + buildUnnamed282() => { 'x': buildGoogleCloudAiplatformV1PipelineTaskDetailArtifactList(), 'y': buildGoogleCloudAiplatformV1PipelineTaskDetailArtifactList(), }; -void checkUnnamed264( +void checkUnnamed282( core.Map o) { @@ -18486,12 +20003,12 @@ void checkUnnamed264( } core.Map - buildUnnamed265() => { + buildUnnamed283() => { 'x': buildGoogleCloudAiplatformV1PipelineTaskDetailArtifactList(), 'y': buildGoogleCloudAiplatformV1PipelineTaskDetailArtifactList(), }; -void checkUnnamed265( +void checkUnnamed283( core.Map o) { @@ -18501,12 +20018,12 @@ void checkUnnamed265( } core.List - buildUnnamed266() => [ + buildUnnamed284() => [ buildGoogleCloudAiplatformV1PipelineTaskDetailPipelineTaskStatus(), buildGoogleCloudAiplatformV1PipelineTaskDetailPipelineTaskStatus(), ]; -void checkUnnamed266( +void checkUnnamed284( core.List o) { unittest.expect(o, unittest.hasLength(2)); @@ -18525,10 +20042,10 @@ api.GoogleCloudAiplatformV1PipelineTaskDetail o.error = buildGoogleRpcStatus(); o.execution = buildGoogleCloudAiplatformV1Execution(); o.executorDetail = buildGoogleCloudAiplatformV1PipelineTaskExecutorDetail(); - o.inputs = buildUnnamed264(); - o.outputs = buildUnnamed265(); + o.inputs = buildUnnamed282(); + o.outputs = buildUnnamed283(); o.parentTaskId = 'foo'; - o.pipelineTaskStatus = buildUnnamed266(); + o.pipelineTaskStatus = buildUnnamed284(); o.startTime = 'foo'; o.state = 'foo'; o.taskId = 'foo'; @@ -18553,13 +20070,13 @@ void checkGoogleCloudAiplatformV1PipelineTaskDetail( checkGoogleRpcStatus(o.error!); checkGoogleCloudAiplatformV1Execution(o.execution!); checkGoogleCloudAiplatformV1PipelineTaskExecutorDetail(o.executorDetail!); - checkUnnamed264(o.inputs!); - checkUnnamed265(o.outputs!); + checkUnnamed282(o.inputs!); + checkUnnamed283(o.outputs!); unittest.expect( o.parentTaskId!, unittest.equals('foo'), ); - checkUnnamed266(o.pipelineTaskStatus!); + checkUnnamed284(o.pipelineTaskStatus!); unittest.expect( o.startTime!, unittest.equals('foo'), @@ -18580,12 +20097,12 @@ void checkGoogleCloudAiplatformV1PipelineTaskDetail( buildCounterGoogleCloudAiplatformV1PipelineTaskDetail--; } -core.List buildUnnamed267() => [ +core.List buildUnnamed285() => [ buildGoogleCloudAiplatformV1Artifact(), buildGoogleCloudAiplatformV1Artifact(), ]; -void checkUnnamed267(core.List o) { +void checkUnnamed285(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Artifact(o[0]); checkGoogleCloudAiplatformV1Artifact(o[1]); @@ -18597,7 +20114,7 @@ api.GoogleCloudAiplatformV1PipelineTaskDetailArtifactList final o = api.GoogleCloudAiplatformV1PipelineTaskDetailArtifactList(); buildCounterGoogleCloudAiplatformV1PipelineTaskDetailArtifactList++; if (buildCounterGoogleCloudAiplatformV1PipelineTaskDetailArtifactList < 3) { - o.artifacts = buildUnnamed267(); + o.artifacts = buildUnnamed285(); } buildCounterGoogleCloudAiplatformV1PipelineTaskDetailArtifactList--; return o; @@ -18607,7 +20124,7 @@ void checkGoogleCloudAiplatformV1PipelineTaskDetailArtifactList( api.GoogleCloudAiplatformV1PipelineTaskDetailArtifactList o) { buildCounterGoogleCloudAiplatformV1PipelineTaskDetailArtifactList++; if (buildCounterGoogleCloudAiplatformV1PipelineTaskDetailArtifactList < 3) { - checkUnnamed267(o.artifacts!); + checkUnnamed285(o.artifacts!); } buildCounterGoogleCloudAiplatformV1PipelineTaskDetailArtifactList--; } @@ -18673,12 +20190,12 @@ void checkGoogleCloudAiplatformV1PipelineTaskExecutorDetail( buildCounterGoogleCloudAiplatformV1PipelineTaskExecutorDetail--; } -core.List buildUnnamed268() => [ +core.List buildUnnamed286() => [ 'foo', 'foo', ]; -void checkUnnamed268(core.List o) { +void checkUnnamed286(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -18690,12 +20207,12 @@ void checkUnnamed268(core.List o) { ); } -core.List buildUnnamed269() => [ +core.List buildUnnamed287() => [ 'foo', 'foo', ]; -void checkUnnamed269(core.List o) { +void checkUnnamed287(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -18717,8 +20234,8 @@ api.GoogleCloudAiplatformV1PipelineTaskExecutorDetailContainerDetail buildCounterGoogleCloudAiplatformV1PipelineTaskExecutorDetailContainerDetail++; if (buildCounterGoogleCloudAiplatformV1PipelineTaskExecutorDetailContainerDetail < 3) { - o.failedMainJobs = buildUnnamed268(); - o.failedPreCachingCheckJobs = buildUnnamed269(); + o.failedMainJobs = buildUnnamed286(); + o.failedPreCachingCheckJobs = buildUnnamed287(); o.mainJob = 'foo'; o.preCachingCheckJob = 'foo'; } @@ -18731,8 +20248,8 @@ void checkGoogleCloudAiplatformV1PipelineTaskExecutorDetailContainerDetail( buildCounterGoogleCloudAiplatformV1PipelineTaskExecutorDetailContainerDetail++; if (buildCounterGoogleCloudAiplatformV1PipelineTaskExecutorDetailContainerDetail < 3) { - checkUnnamed268(o.failedMainJobs!); - checkUnnamed269(o.failedPreCachingCheckJobs!); + checkUnnamed286(o.failedMainJobs!); + checkUnnamed287(o.failedPreCachingCheckJobs!); unittest.expect( o.mainJob!, unittest.equals('foo'), @@ -18745,12 +20262,12 @@ void checkGoogleCloudAiplatformV1PipelineTaskExecutorDetailContainerDetail( buildCounterGoogleCloudAiplatformV1PipelineTaskExecutorDetailContainerDetail--; } -core.List buildUnnamed270() => [ +core.List buildUnnamed288() => [ 'foo', 'foo', ]; -void checkUnnamed270(core.List o) { +void checkUnnamed288(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -18772,7 +20289,7 @@ api.GoogleCloudAiplatformV1PipelineTaskExecutorDetailCustomJobDetail buildCounterGoogleCloudAiplatformV1PipelineTaskExecutorDetailCustomJobDetail++; if (buildCounterGoogleCloudAiplatformV1PipelineTaskExecutorDetailCustomJobDetail < 3) { - o.failedJobs = buildUnnamed270(); + o.failedJobs = buildUnnamed288(); o.job = 'foo'; } buildCounterGoogleCloudAiplatformV1PipelineTaskExecutorDetailCustomJobDetail--; @@ -18784,7 +20301,7 @@ void checkGoogleCloudAiplatformV1PipelineTaskExecutorDetailCustomJobDetail( buildCounterGoogleCloudAiplatformV1PipelineTaskExecutorDetailCustomJobDetail++; if (buildCounterGoogleCloudAiplatformV1PipelineTaskExecutorDetailCustomJobDetail < 3) { - checkUnnamed270(o.failedJobs!); + checkUnnamed288(o.failedJobs!); unittest.expect( o.job!, unittest.equals('foo'), @@ -18939,6 +20456,30 @@ void checkGoogleCloudAiplatformV1Port(api.GoogleCloudAiplatformV1Port o) { buildCounterGoogleCloudAiplatformV1Port--; } +core.int buildCounterGoogleCloudAiplatformV1PrebuiltVoiceConfig = 0; +api.GoogleCloudAiplatformV1PrebuiltVoiceConfig + buildGoogleCloudAiplatformV1PrebuiltVoiceConfig() { + final o = api.GoogleCloudAiplatformV1PrebuiltVoiceConfig(); + buildCounterGoogleCloudAiplatformV1PrebuiltVoiceConfig++; + if (buildCounterGoogleCloudAiplatformV1PrebuiltVoiceConfig < 3) { + o.voiceName = 'foo'; + } + buildCounterGoogleCloudAiplatformV1PrebuiltVoiceConfig--; + return o; +} + +void checkGoogleCloudAiplatformV1PrebuiltVoiceConfig( + api.GoogleCloudAiplatformV1PrebuiltVoiceConfig o) { + buildCounterGoogleCloudAiplatformV1PrebuiltVoiceConfig++; + if (buildCounterGoogleCloudAiplatformV1PrebuiltVoiceConfig < 3) { + unittest.expect( + o.voiceName!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1PrebuiltVoiceConfig--; +} + core.int buildCounterGoogleCloudAiplatformV1PredefinedSplit = 0; api.GoogleCloudAiplatformV1PredefinedSplit buildGoogleCloudAiplatformV1PredefinedSplit() { @@ -18963,7 +20504,7 @@ void checkGoogleCloudAiplatformV1PredefinedSplit( buildCounterGoogleCloudAiplatformV1PredefinedSplit--; } -core.List buildUnnamed271() => [ +core.List buildUnnamed289() => [ { 'list': [1, 2, 3], 'bool': true, @@ -18976,7 +20517,7 @@ core.List buildUnnamed271() => [ }, ]; -void checkUnnamed271(core.List o) { +void checkUnnamed289(core.List o) { unittest.expect(o, unittest.hasLength(2)); var casted56 = (o[0]) as core.Map; unittest.expect(casted56, unittest.hasLength(3)); @@ -19008,13 +20549,98 @@ void checkUnnamed271(core.List o) { ); } +core.int buildCounterGoogleCloudAiplatformV1PredictLongRunningRequest = 0; +api.GoogleCloudAiplatformV1PredictLongRunningRequest + buildGoogleCloudAiplatformV1PredictLongRunningRequest() { + final o = api.GoogleCloudAiplatformV1PredictLongRunningRequest(); + buildCounterGoogleCloudAiplatformV1PredictLongRunningRequest++; + if (buildCounterGoogleCloudAiplatformV1PredictLongRunningRequest < 3) { + o.instances = buildUnnamed289(); + o.parameters = { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }; + } + buildCounterGoogleCloudAiplatformV1PredictLongRunningRequest--; + return o; +} + +void checkGoogleCloudAiplatformV1PredictLongRunningRequest( + api.GoogleCloudAiplatformV1PredictLongRunningRequest o) { + buildCounterGoogleCloudAiplatformV1PredictLongRunningRequest++; + if (buildCounterGoogleCloudAiplatformV1PredictLongRunningRequest < 3) { + checkUnnamed289(o.instances!); + var casted58 = (o.parameters!) as core.Map; + unittest.expect(casted58, unittest.hasLength(3)); + unittest.expect( + casted58['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted58['bool'], + unittest.equals(true), + ); + unittest.expect( + casted58['string'], + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1PredictLongRunningRequest--; +} + +core.List buildUnnamed290() => [ + { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + ]; + +void checkUnnamed290(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + var casted59 = (o[0]) as core.Map; + unittest.expect(casted59, unittest.hasLength(3)); + unittest.expect( + casted59['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted59['bool'], + unittest.equals(true), + ); + unittest.expect( + casted59['string'], + unittest.equals('foo'), + ); + var casted60 = (o[1]) as core.Map; + unittest.expect(casted60, unittest.hasLength(3)); + unittest.expect( + casted60['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted60['bool'], + unittest.equals(true), + ); + unittest.expect( + casted60['string'], + unittest.equals('foo'), + ); +} + core.int buildCounterGoogleCloudAiplatformV1PredictRequest = 0; api.GoogleCloudAiplatformV1PredictRequest buildGoogleCloudAiplatformV1PredictRequest() { final o = api.GoogleCloudAiplatformV1PredictRequest(); buildCounterGoogleCloudAiplatformV1PredictRequest++; if (buildCounterGoogleCloudAiplatformV1PredictRequest < 3) { - o.instances = buildUnnamed271(); + o.instances = buildUnnamed290(); o.parameters = { 'list': [1, 2, 3], 'bool': true, @@ -19029,19 +20655,19 @@ void checkGoogleCloudAiplatformV1PredictRequest( api.GoogleCloudAiplatformV1PredictRequest o) { buildCounterGoogleCloudAiplatformV1PredictRequest++; if (buildCounterGoogleCloudAiplatformV1PredictRequest < 3) { - checkUnnamed271(o.instances!); - var casted58 = (o.parameters!) as core.Map; - unittest.expect(casted58, unittest.hasLength(3)); + checkUnnamed290(o.instances!); + var casted61 = (o.parameters!) as core.Map; + unittest.expect(casted61, unittest.hasLength(3)); unittest.expect( - casted58['list'], + casted61['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted58['bool'], + casted61['bool'], unittest.equals(true), ); unittest.expect( - casted58['string'], + casted61['string'], unittest.equals('foo'), ); } @@ -19079,7 +20705,7 @@ void checkGoogleCloudAiplatformV1PredictRequestResponseLoggingConfig( buildCounterGoogleCloudAiplatformV1PredictRequestResponseLoggingConfig--; } -core.List buildUnnamed272() => [ +core.List buildUnnamed291() => [ { 'list': [1, 2, 3], 'bool': true, @@ -19092,34 +20718,34 @@ core.List buildUnnamed272() => [ }, ]; -void checkUnnamed272(core.List o) { +void checkUnnamed291(core.List o) { unittest.expect(o, unittest.hasLength(2)); - var casted59 = (o[0]) as core.Map; - unittest.expect(casted59, unittest.hasLength(3)); + var casted62 = (o[0]) as core.Map; + unittest.expect(casted62, unittest.hasLength(3)); unittest.expect( - casted59['list'], + casted62['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted59['bool'], + casted62['bool'], unittest.equals(true), ); unittest.expect( - casted59['string'], + casted62['string'], unittest.equals('foo'), ); - var casted60 = (o[1]) as core.Map; - unittest.expect(casted60, unittest.hasLength(3)); + var casted63 = (o[1]) as core.Map; + unittest.expect(casted63, unittest.hasLength(3)); unittest.expect( - casted60['list'], + casted63['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted60['bool'], + casted63['bool'], unittest.equals(true), ); unittest.expect( - casted60['string'], + casted63['string'], unittest.equals('foo'), ); } @@ -19139,7 +20765,7 @@ api.GoogleCloudAiplatformV1PredictResponse o.model = 'foo'; o.modelDisplayName = 'foo'; o.modelVersionId = 'foo'; - o.predictions = buildUnnamed272(); + o.predictions = buildUnnamed291(); } buildCounterGoogleCloudAiplatformV1PredictResponse--; return o; @@ -19153,18 +20779,18 @@ void checkGoogleCloudAiplatformV1PredictResponse( o.deployedModelId!, unittest.equals('foo'), ); - var casted61 = (o.metadata!) as core.Map; - unittest.expect(casted61, unittest.hasLength(3)); + var casted64 = (o.metadata!) as core.Map; + unittest.expect(casted64, unittest.hasLength(3)); unittest.expect( - casted61['list'], + casted64['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted61['bool'], + casted64['bool'], unittest.equals(true), ); unittest.expect( - casted61['string'], + casted64['string'], unittest.equals('foo'), ); unittest.expect( @@ -19179,7 +20805,7 @@ void checkGoogleCloudAiplatformV1PredictResponse( o.modelVersionId!, unittest.equals('foo'), ); - checkUnnamed272(o.predictions!); + checkUnnamed291(o.predictions!); } buildCounterGoogleCloudAiplatformV1PredictResponse--; } @@ -19284,12 +20910,12 @@ void checkGoogleCloudAiplatformV1PrivateEndpoints( buildCounterGoogleCloudAiplatformV1PrivateEndpoints--; } -core.List buildUnnamed273() => [ +core.List buildUnnamed292() => [ 'foo', 'foo', ]; -void checkUnnamed273(core.List o) { +void checkUnnamed292(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -19308,7 +20934,7 @@ api.GoogleCloudAiplatformV1PrivateServiceConnectConfig buildCounterGoogleCloudAiplatformV1PrivateServiceConnectConfig++; if (buildCounterGoogleCloudAiplatformV1PrivateServiceConnectConfig < 3) { o.enablePrivateServiceConnect = true; - o.projectAllowlist = buildUnnamed273(); + o.projectAllowlist = buildUnnamed292(); o.serviceAttachment = 'foo'; } buildCounterGoogleCloudAiplatformV1PrivateServiceConnectConfig--; @@ -19320,7 +20946,7 @@ void checkGoogleCloudAiplatformV1PrivateServiceConnectConfig( buildCounterGoogleCloudAiplatformV1PrivateServiceConnectConfig++; if (buildCounterGoogleCloudAiplatformV1PrivateServiceConnectConfig < 3) { unittest.expect(o.enablePrivateServiceConnect!, unittest.isTrue); - checkUnnamed273(o.projectAllowlist!); + checkUnnamed292(o.projectAllowlist!); unittest.expect( o.serviceAttachment!, unittest.equals('foo'), @@ -19358,12 +20984,12 @@ void checkGoogleCloudAiplatformV1Probe(api.GoogleCloudAiplatformV1Probe o) { buildCounterGoogleCloudAiplatformV1Probe--; } -core.List buildUnnamed274() => [ +core.List buildUnnamed293() => [ 'foo', 'foo', ]; -void checkUnnamed274(core.List o) { +void checkUnnamed293(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -19381,7 +21007,7 @@ api.GoogleCloudAiplatformV1ProbeExecAction final o = api.GoogleCloudAiplatformV1ProbeExecAction(); buildCounterGoogleCloudAiplatformV1ProbeExecAction++; if (buildCounterGoogleCloudAiplatformV1ProbeExecAction < 3) { - o.command = buildUnnamed274(); + o.command = buildUnnamed293(); } buildCounterGoogleCloudAiplatformV1ProbeExecAction--; return o; @@ -19391,7 +21017,7 @@ void checkGoogleCloudAiplatformV1ProbeExecAction( api.GoogleCloudAiplatformV1ProbeExecAction o) { buildCounterGoogleCloudAiplatformV1ProbeExecAction++; if (buildCounterGoogleCloudAiplatformV1ProbeExecAction < 3) { - checkUnnamed274(o.command!); + checkUnnamed293(o.command!); } buildCounterGoogleCloudAiplatformV1ProbeExecAction--; } @@ -19430,29 +21056,12 @@ void checkGoogleCloudAiplatformV1PscAutomatedEndpoints( buildCounterGoogleCloudAiplatformV1PscAutomatedEndpoints--; } -core.int buildCounterGoogleCloudAiplatformV1PscInterfaceConfig = 0; -api.GoogleCloudAiplatformV1PscInterfaceConfig - buildGoogleCloudAiplatformV1PscInterfaceConfig() { - final o = api.GoogleCloudAiplatformV1PscInterfaceConfig(); - buildCounterGoogleCloudAiplatformV1PscInterfaceConfig++; - if (buildCounterGoogleCloudAiplatformV1PscInterfaceConfig < 3) {} - buildCounterGoogleCloudAiplatformV1PscInterfaceConfig--; - return o; -} - -void checkGoogleCloudAiplatformV1PscInterfaceConfig( - api.GoogleCloudAiplatformV1PscInterfaceConfig o) { - buildCounterGoogleCloudAiplatformV1PscInterfaceConfig++; - if (buildCounterGoogleCloudAiplatformV1PscInterfaceConfig < 3) {} - buildCounterGoogleCloudAiplatformV1PscInterfaceConfig--; -} - -core.List buildUnnamed275() => [ +core.List buildUnnamed294() => [ 'foo', 'foo', ]; -void checkUnnamed275(core.List o) { +void checkUnnamed294(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -19470,7 +21079,7 @@ api.GoogleCloudAiplatformV1PublisherModel final o = api.GoogleCloudAiplatformV1PublisherModel(); buildCounterGoogleCloudAiplatformV1PublisherModel++; if (buildCounterGoogleCloudAiplatformV1PublisherModel < 3) { - o.frameworks = buildUnnamed275(); + o.frameworks = buildUnnamed294(); o.launchStage = 'foo'; o.name = 'foo'; o.openSourceCategory = 'foo'; @@ -19489,7 +21098,7 @@ void checkGoogleCloudAiplatformV1PublisherModel( api.GoogleCloudAiplatformV1PublisherModel o) { buildCounterGoogleCloudAiplatformV1PublisherModel++; if (buildCounterGoogleCloudAiplatformV1PublisherModel < 3) { - checkUnnamed275(o.frameworks!); + checkUnnamed294(o.frameworks!); unittest.expect( o.launchStage!, unittest.equals('foo'), @@ -19655,12 +21264,12 @@ void checkGoogleCloudAiplatformV1PublisherModelCallToActionDeploy( buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionDeploy--; } -core.Map buildUnnamed276() => { +core.Map buildUnnamed295() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed276(core.Map o) { +void checkUnnamed295(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -19682,7 +21291,7 @@ api.GoogleCloudAiplatformV1PublisherModelCallToActionDeployDeployMetadata buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionDeployDeployMetadata++; if (buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionDeployDeployMetadata < 3) { - o.labels = buildUnnamed276(); + o.labels = buildUnnamed295(); o.sampleRequest = 'foo'; } buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionDeployDeployMetadata--; @@ -19695,7 +21304,7 @@ void checkGoogleCloudAiplatformV1PublisherModelCallToActionDeployDeployMetadata( buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionDeployDeployMetadata++; if (buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionDeployDeployMetadata < 3) { - checkUnnamed276(o.labels!); + checkUnnamed295(o.labels!); unittest.expect( o.sampleRequest!, unittest.equals('foo'), @@ -19704,12 +21313,12 @@ void checkGoogleCloudAiplatformV1PublisherModelCallToActionDeployDeployMetadata( buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionDeployDeployMetadata--; } -core.List buildUnnamed277() => [ +core.List buildUnnamed296() => [ 'foo', 'foo', ]; -void checkUnnamed277(core.List o) { +void checkUnnamed296(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -19729,7 +21338,7 @@ api.GoogleCloudAiplatformV1PublisherModelCallToActionDeployGke buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionDeployGke++; if (buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionDeployGke < 3) { - o.gkeYamlConfigs = buildUnnamed277(); + o.gkeYamlConfigs = buildUnnamed296(); } buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionDeployGke--; return o; @@ -19740,18 +21349,18 @@ void checkGoogleCloudAiplatformV1PublisherModelCallToActionDeployGke( buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionDeployGke++; if (buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionDeployGke < 3) { - checkUnnamed277(o.gkeYamlConfigs!); + checkUnnamed296(o.gkeYamlConfigs!); } buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionDeployGke--; } core.List - buildUnnamed278() => [ + buildUnnamed297() => [ buildGoogleCloudAiplatformV1PublisherModelCallToActionDeploy(), buildGoogleCloudAiplatformV1PublisherModelCallToActionDeploy(), ]; -void checkUnnamed278( +void checkUnnamed297( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1PublisherModelCallToActionDeploy(o[0]); @@ -19767,7 +21376,7 @@ api.GoogleCloudAiplatformV1PublisherModelCallToActionDeployVertex buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionDeployVertex++; if (buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionDeployVertex < 3) { - o.multiDeployVertex = buildUnnamed278(); + o.multiDeployVertex = buildUnnamed297(); } buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionDeployVertex--; return o; @@ -19778,7 +21387,7 @@ void checkGoogleCloudAiplatformV1PublisherModelCallToActionDeployVertex( buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionDeployVertex++; if (buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionDeployVertex < 3) { - checkUnnamed278(o.multiDeployVertex!); + checkUnnamed297(o.multiDeployVertex!); } buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionDeployVertex--; } @@ -19786,12 +21395,12 @@ void checkGoogleCloudAiplatformV1PublisherModelCallToActionDeployVertex( core.List< api .GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences> - buildUnnamed279() => [ + buildUnnamed298() => [ buildGoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences(), buildGoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences(), ]; -void checkUnnamed279( +void checkUnnamed298( core.List< api .GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences> @@ -19813,7 +21422,7 @@ api.GoogleCloudAiplatformV1PublisherModelCallToActionOpenFineTuningPipelines buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionOpenFineTuningPipelines++; if (buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionOpenFineTuningPipelines < 3) { - o.fineTuningPipelines = buildUnnamed279(); + o.fineTuningPipelines = buildUnnamed298(); } buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionOpenFineTuningPipelines--; return o; @@ -19825,7 +21434,7 @@ void checkGoogleCloudAiplatformV1PublisherModelCallToActionOpenFineTuningPipelin buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionOpenFineTuningPipelines++; if (buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionOpenFineTuningPipelines < 3) { - checkUnnamed279(o.fineTuningPipelines!); + checkUnnamed298(o.fineTuningPipelines!); } buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionOpenFineTuningPipelines--; } @@ -19833,12 +21442,12 @@ void checkGoogleCloudAiplatformV1PublisherModelCallToActionOpenFineTuningPipelin core.List< api .GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences> - buildUnnamed280() => [ + buildUnnamed299() => [ buildGoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences(), buildGoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences(), ]; -void checkUnnamed280( +void checkUnnamed299( core.List< api .GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences> @@ -19860,7 +21469,7 @@ api.GoogleCloudAiplatformV1PublisherModelCallToActionOpenNotebooks buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionOpenNotebooks++; if (buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionOpenNotebooks < 3) { - o.notebooks = buildUnnamed280(); + o.notebooks = buildUnnamed299(); } buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionOpenNotebooks--; return o; @@ -19871,19 +21480,19 @@ void checkGoogleCloudAiplatformV1PublisherModelCallToActionOpenNotebooks( buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionOpenNotebooks++; if (buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionOpenNotebooks < 3) { - checkUnnamed280(o.notebooks!); + checkUnnamed299(o.notebooks!); } buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionOpenNotebooks--; } core.Map - buildUnnamed281() => { + buildUnnamed300() => { 'x': buildGoogleCloudAiplatformV1PublisherModelResourceReference(), 'y': buildGoogleCloudAiplatformV1PublisherModelResourceReference(), }; -void checkUnnamed281( +void checkUnnamed300( core.Map o) { @@ -19902,7 +21511,7 @@ api.GoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences++; if (buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences < 3) { - o.references = buildUnnamed281(); + o.references = buildUnnamed300(); o.resourceDescription = 'foo'; o.resourceTitle = 'foo'; o.resourceUseCase = 'foo'; @@ -19918,7 +21527,7 @@ void checkGoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceRefer buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences++; if (buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceReferences < 3) { - checkUnnamed281(o.references!); + checkUnnamed300(o.references!); unittest.expect( o.resourceDescription!, unittest.equals('foo'), @@ -19940,12 +21549,12 @@ void checkGoogleCloudAiplatformV1PublisherModelCallToActionRegionalResourceRefer } core.List - buildUnnamed282() => [ + buildUnnamed301() => [ buildGoogleCloudAiplatformV1PublisherModelDocumentation(), buildGoogleCloudAiplatformV1PublisherModelDocumentation(), ]; -void checkUnnamed282( +void checkUnnamed301( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1PublisherModelDocumentation(o[0]); @@ -19961,7 +21570,7 @@ api.GoogleCloudAiplatformV1PublisherModelCallToActionViewRestApi buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionViewRestApi++; if (buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionViewRestApi < 3) { - o.documentations = buildUnnamed282(); + o.documentations = buildUnnamed301(); o.title = 'foo'; } buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionViewRestApi--; @@ -19973,7 +21582,7 @@ void checkGoogleCloudAiplatformV1PublisherModelCallToActionViewRestApi( buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionViewRestApi++; if (buildCounterGoogleCloudAiplatformV1PublisherModelCallToActionViewRestApi < 3) { - checkUnnamed282(o.documentations!); + checkUnnamed301(o.documentations!); unittest.expect( o.title!, unittest.equals('foo'), @@ -20128,12 +21737,12 @@ void checkGoogleCloudAiplatformV1PurgeExecutionsRequest( buildCounterGoogleCloudAiplatformV1PurgeExecutionsRequest--; } -core.List buildUnnamed283() => [ +core.List buildUnnamed302() => [ 'foo', 'foo', ]; -void checkUnnamed283(core.List o) { +void checkUnnamed302(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -20145,23 +21754,23 @@ void checkUnnamed283(core.List o) { ); } -core.List buildUnnamed284() => [ +core.List buildUnnamed303() => [ buildGoogleCloudAiplatformV1EnvVar(), buildGoogleCloudAiplatformV1EnvVar(), ]; -void checkUnnamed284(core.List o) { +void checkUnnamed303(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1EnvVar(o[0]); checkGoogleCloudAiplatformV1EnvVar(o[1]); } -core.List buildUnnamed285() => [ +core.List buildUnnamed304() => [ 'foo', 'foo', ]; -void checkUnnamed285(core.List o) { +void checkUnnamed304(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -20179,10 +21788,10 @@ api.GoogleCloudAiplatformV1PythonPackageSpec final o = api.GoogleCloudAiplatformV1PythonPackageSpec(); buildCounterGoogleCloudAiplatformV1PythonPackageSpec++; if (buildCounterGoogleCloudAiplatformV1PythonPackageSpec < 3) { - o.args = buildUnnamed283(); - o.env = buildUnnamed284(); + o.args = buildUnnamed302(); + o.env = buildUnnamed303(); o.executorImageUri = 'foo'; - o.packageUris = buildUnnamed285(); + o.packageUris = buildUnnamed304(); o.pythonModule = 'foo'; } buildCounterGoogleCloudAiplatformV1PythonPackageSpec--; @@ -20193,13 +21802,13 @@ void checkGoogleCloudAiplatformV1PythonPackageSpec( api.GoogleCloudAiplatformV1PythonPackageSpec o) { buildCounterGoogleCloudAiplatformV1PythonPackageSpec++; if (buildCounterGoogleCloudAiplatformV1PythonPackageSpec < 3) { - checkUnnamed283(o.args!); - checkUnnamed284(o.env!); + checkUnnamed302(o.args!); + checkUnnamed303(o.env!); unittest.expect( o.executorImageUri!, unittest.equals('foo'), ); - checkUnnamed285(o.packageUris!); + checkUnnamed304(o.packageUris!); unittest.expect( o.pythonModule!, unittest.equals('foo'), @@ -20208,23 +21817,23 @@ void checkGoogleCloudAiplatformV1PythonPackageSpec( buildCounterGoogleCloudAiplatformV1PythonPackageSpec--; } -core.List buildUnnamed286() => [ +core.List buildUnnamed305() => [ buildGoogleCloudAiplatformV1DeployedModelRef(), buildGoogleCloudAiplatformV1DeployedModelRef(), ]; -void checkUnnamed286(core.List o) { +void checkUnnamed305(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1DeployedModelRef(o[0]); checkGoogleCloudAiplatformV1DeployedModelRef(o[1]); } -core.List buildUnnamed287() => [ +core.List buildUnnamed306() => [ buildGoogleCloudAiplatformV1DeployedModel(), buildGoogleCloudAiplatformV1DeployedModel(), ]; -void checkUnnamed287(core.List o) { +void checkUnnamed306(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1DeployedModel(o[0]); checkGoogleCloudAiplatformV1DeployedModel(o[1]); @@ -20236,8 +21845,8 @@ api.GoogleCloudAiplatformV1QueryDeployedModelsResponse final o = api.GoogleCloudAiplatformV1QueryDeployedModelsResponse(); buildCounterGoogleCloudAiplatformV1QueryDeployedModelsResponse++; if (buildCounterGoogleCloudAiplatformV1QueryDeployedModelsResponse < 3) { - o.deployedModelRefs = buildUnnamed286(); - o.deployedModels = buildUnnamed287(); + o.deployedModelRefs = buildUnnamed305(); + o.deployedModels = buildUnnamed306(); o.nextPageToken = 'foo'; o.totalDeployedModelCount = 42; o.totalEndpointCount = 42; @@ -20250,8 +21859,8 @@ void checkGoogleCloudAiplatformV1QueryDeployedModelsResponse( api.GoogleCloudAiplatformV1QueryDeployedModelsResponse o) { buildCounterGoogleCloudAiplatformV1QueryDeployedModelsResponse++; if (buildCounterGoogleCloudAiplatformV1QueryDeployedModelsResponse < 3) { - checkUnnamed286(o.deployedModelRefs!); - checkUnnamed287(o.deployedModels!); + checkUnnamed305(o.deployedModelRefs!); + checkUnnamed306(o.deployedModels!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -20268,6 +21877,115 @@ void checkGoogleCloudAiplatformV1QueryDeployedModelsResponse( buildCounterGoogleCloudAiplatformV1QueryDeployedModelsResponse--; } +core.Map buildUnnamed307() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; + +void checkUnnamed307(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted65 = (o['x']!) as core.Map; + unittest.expect(casted65, unittest.hasLength(3)); + unittest.expect( + casted65['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted65['bool'], + unittest.equals(true), + ); + unittest.expect( + casted65['string'], + unittest.equals('foo'), + ); + var casted66 = (o['y']!) as core.Map; + unittest.expect(casted66, unittest.hasLength(3)); + unittest.expect( + casted66['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted66['bool'], + unittest.equals(true), + ); + unittest.expect( + casted66['string'], + unittest.equals('foo'), + ); +} + +core.int buildCounterGoogleCloudAiplatformV1QueryReasoningEngineRequest = 0; +api.GoogleCloudAiplatformV1QueryReasoningEngineRequest + buildGoogleCloudAiplatformV1QueryReasoningEngineRequest() { + final o = api.GoogleCloudAiplatformV1QueryReasoningEngineRequest(); + buildCounterGoogleCloudAiplatformV1QueryReasoningEngineRequest++; + if (buildCounterGoogleCloudAiplatformV1QueryReasoningEngineRequest < 3) { + o.classMethod = 'foo'; + o.input = buildUnnamed307(); + } + buildCounterGoogleCloudAiplatformV1QueryReasoningEngineRequest--; + return o; +} + +void checkGoogleCloudAiplatformV1QueryReasoningEngineRequest( + api.GoogleCloudAiplatformV1QueryReasoningEngineRequest o) { + buildCounterGoogleCloudAiplatformV1QueryReasoningEngineRequest++; + if (buildCounterGoogleCloudAiplatformV1QueryReasoningEngineRequest < 3) { + unittest.expect( + o.classMethod!, + unittest.equals('foo'), + ); + checkUnnamed307(o.input!); + } + buildCounterGoogleCloudAiplatformV1QueryReasoningEngineRequest--; +} + +core.int buildCounterGoogleCloudAiplatformV1QueryReasoningEngineResponse = 0; +api.GoogleCloudAiplatformV1QueryReasoningEngineResponse + buildGoogleCloudAiplatformV1QueryReasoningEngineResponse() { + final o = api.GoogleCloudAiplatformV1QueryReasoningEngineResponse(); + buildCounterGoogleCloudAiplatformV1QueryReasoningEngineResponse++; + if (buildCounterGoogleCloudAiplatformV1QueryReasoningEngineResponse < 3) { + o.output = { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }; + } + buildCounterGoogleCloudAiplatformV1QueryReasoningEngineResponse--; + return o; +} + +void checkGoogleCloudAiplatformV1QueryReasoningEngineResponse( + api.GoogleCloudAiplatformV1QueryReasoningEngineResponse o) { + buildCounterGoogleCloudAiplatformV1QueryReasoningEngineResponse++; + if (buildCounterGoogleCloudAiplatformV1QueryReasoningEngineResponse < 3) { + var casted67 = (o.output!) as core.Map; + unittest.expect(casted67, unittest.hasLength(3)); + unittest.expect( + casted67['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted67['bool'], + unittest.equals(true), + ); + unittest.expect( + casted67['string'], + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1QueryReasoningEngineResponse--; +} + core.int buildCounterGoogleCloudAiplatformV1QuestionAnsweringCorrectnessInput = 0; api.GoogleCloudAiplatformV1QuestionAnsweringCorrectnessInput @@ -20788,6 +22506,515 @@ void checkGoogleCloudAiplatformV1QuestionAnsweringRelevanceSpec( buildCounterGoogleCloudAiplatformV1QuestionAnsweringRelevanceSpec--; } +core.List buildUnnamed308() => [ + buildGoogleCloudAiplatformV1RagContextsContext(), + buildGoogleCloudAiplatformV1RagContextsContext(), + ]; + +void checkUnnamed308( + core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1RagContextsContext(o[0]); + checkGoogleCloudAiplatformV1RagContextsContext(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1RagContexts = 0; +api.GoogleCloudAiplatformV1RagContexts + buildGoogleCloudAiplatformV1RagContexts() { + final o = api.GoogleCloudAiplatformV1RagContexts(); + buildCounterGoogleCloudAiplatformV1RagContexts++; + if (buildCounterGoogleCloudAiplatformV1RagContexts < 3) { + o.contexts = buildUnnamed308(); + } + buildCounterGoogleCloudAiplatformV1RagContexts--; + return o; +} + +void checkGoogleCloudAiplatformV1RagContexts( + api.GoogleCloudAiplatformV1RagContexts o) { + buildCounterGoogleCloudAiplatformV1RagContexts++; + if (buildCounterGoogleCloudAiplatformV1RagContexts < 3) { + checkUnnamed308(o.contexts!); + } + buildCounterGoogleCloudAiplatformV1RagContexts--; +} + +core.int buildCounterGoogleCloudAiplatformV1RagContextsContext = 0; +api.GoogleCloudAiplatformV1RagContextsContext + buildGoogleCloudAiplatformV1RagContextsContext() { + final o = api.GoogleCloudAiplatformV1RagContextsContext(); + buildCounterGoogleCloudAiplatformV1RagContextsContext++; + if (buildCounterGoogleCloudAiplatformV1RagContextsContext < 3) { + o.score = 42.0; + o.sourceDisplayName = 'foo'; + o.sourceUri = 'foo'; + o.text = 'foo'; + } + buildCounterGoogleCloudAiplatformV1RagContextsContext--; + return o; +} + +void checkGoogleCloudAiplatformV1RagContextsContext( + api.GoogleCloudAiplatformV1RagContextsContext o) { + buildCounterGoogleCloudAiplatformV1RagContextsContext++; + if (buildCounterGoogleCloudAiplatformV1RagContextsContext < 3) { + unittest.expect( + o.score!, + unittest.equals(42.0), + ); + unittest.expect( + o.sourceDisplayName!, + unittest.equals('foo'), + ); + unittest.expect( + o.sourceUri!, + unittest.equals('foo'), + ); + unittest.expect( + o.text!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1RagContextsContext--; +} + +core.int buildCounterGoogleCloudAiplatformV1RagCorpus = 0; +api.GoogleCloudAiplatformV1RagCorpus buildGoogleCloudAiplatformV1RagCorpus() { + final o = api.GoogleCloudAiplatformV1RagCorpus(); + buildCounterGoogleCloudAiplatformV1RagCorpus++; + if (buildCounterGoogleCloudAiplatformV1RagCorpus < 3) { + o.corpusStatus = buildGoogleCloudAiplatformV1CorpusStatus(); + o.createTime = 'foo'; + o.description = 'foo'; + o.displayName = 'foo'; + o.name = 'foo'; + o.updateTime = 'foo'; + o.vectorDbConfig = buildGoogleCloudAiplatformV1RagVectorDbConfig(); + } + buildCounterGoogleCloudAiplatformV1RagCorpus--; + return o; +} + +void checkGoogleCloudAiplatformV1RagCorpus( + api.GoogleCloudAiplatformV1RagCorpus o) { + buildCounterGoogleCloudAiplatformV1RagCorpus++; + if (buildCounterGoogleCloudAiplatformV1RagCorpus < 3) { + checkGoogleCloudAiplatformV1CorpusStatus(o.corpusStatus!); + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + unittest.expect( + o.displayName!, + unittest.equals('foo'), + ); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), + ); + checkGoogleCloudAiplatformV1RagVectorDbConfig(o.vectorDbConfig!); + } + buildCounterGoogleCloudAiplatformV1RagCorpus--; +} + +core.int buildCounterGoogleCloudAiplatformV1RagEmbeddingModelConfig = 0; +api.GoogleCloudAiplatformV1RagEmbeddingModelConfig + buildGoogleCloudAiplatformV1RagEmbeddingModelConfig() { + final o = api.GoogleCloudAiplatformV1RagEmbeddingModelConfig(); + buildCounterGoogleCloudAiplatformV1RagEmbeddingModelConfig++; + if (buildCounterGoogleCloudAiplatformV1RagEmbeddingModelConfig < 3) { + o.vertexPredictionEndpoint = + buildGoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint(); + } + buildCounterGoogleCloudAiplatformV1RagEmbeddingModelConfig--; + return o; +} + +void checkGoogleCloudAiplatformV1RagEmbeddingModelConfig( + api.GoogleCloudAiplatformV1RagEmbeddingModelConfig o) { + buildCounterGoogleCloudAiplatformV1RagEmbeddingModelConfig++; + if (buildCounterGoogleCloudAiplatformV1RagEmbeddingModelConfig < 3) { + checkGoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint( + o.vertexPredictionEndpoint!); + } + buildCounterGoogleCloudAiplatformV1RagEmbeddingModelConfig--; +} + +core.int + buildCounterGoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint = + 0; +api.GoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint + buildGoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint() { + final o = api + .GoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint(); + buildCounterGoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint++; + if (buildCounterGoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint < + 3) { + o.endpoint = 'foo'; + o.model = 'foo'; + o.modelVersionId = 'foo'; + } + buildCounterGoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint--; + return o; +} + +void checkGoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint( + api.GoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint + o) { + buildCounterGoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint++; + if (buildCounterGoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint < + 3) { + unittest.expect( + o.endpoint!, + unittest.equals('foo'), + ); + unittest.expect( + o.model!, + unittest.equals('foo'), + ); + unittest.expect( + o.modelVersionId!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint--; +} + +core.int buildCounterGoogleCloudAiplatformV1RagFile = 0; +api.GoogleCloudAiplatformV1RagFile buildGoogleCloudAiplatformV1RagFile() { + final o = api.GoogleCloudAiplatformV1RagFile(); + buildCounterGoogleCloudAiplatformV1RagFile++; + if (buildCounterGoogleCloudAiplatformV1RagFile < 3) { + o.createTime = 'foo'; + o.description = 'foo'; + o.directUploadSource = buildGoogleCloudAiplatformV1DirectUploadSource(); + o.displayName = 'foo'; + o.fileStatus = buildGoogleCloudAiplatformV1FileStatus(); + o.gcsSource = buildGoogleCloudAiplatformV1GcsSource(); + o.googleDriveSource = buildGoogleCloudAiplatformV1GoogleDriveSource(); + o.jiraSource = buildGoogleCloudAiplatformV1JiraSource(); + o.name = 'foo'; + o.sharePointSources = buildGoogleCloudAiplatformV1SharePointSources(); + o.slackSource = buildGoogleCloudAiplatformV1SlackSource(); + o.updateTime = 'foo'; + } + buildCounterGoogleCloudAiplatformV1RagFile--; + return o; +} + +void checkGoogleCloudAiplatformV1RagFile(api.GoogleCloudAiplatformV1RagFile o) { + buildCounterGoogleCloudAiplatformV1RagFile++; + if (buildCounterGoogleCloudAiplatformV1RagFile < 3) { + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + checkGoogleCloudAiplatformV1DirectUploadSource(o.directUploadSource!); + unittest.expect( + o.displayName!, + unittest.equals('foo'), + ); + checkGoogleCloudAiplatformV1FileStatus(o.fileStatus!); + checkGoogleCloudAiplatformV1GcsSource(o.gcsSource!); + checkGoogleCloudAiplatformV1GoogleDriveSource(o.googleDriveSource!); + checkGoogleCloudAiplatformV1JiraSource(o.jiraSource!); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + checkGoogleCloudAiplatformV1SharePointSources(o.sharePointSources!); + checkGoogleCloudAiplatformV1SlackSource(o.slackSource!); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1RagFile--; +} + +core.int buildCounterGoogleCloudAiplatformV1RagFileChunkingConfig = 0; +api.GoogleCloudAiplatformV1RagFileChunkingConfig + buildGoogleCloudAiplatformV1RagFileChunkingConfig() { + final o = api.GoogleCloudAiplatformV1RagFileChunkingConfig(); + buildCounterGoogleCloudAiplatformV1RagFileChunkingConfig++; + if (buildCounterGoogleCloudAiplatformV1RagFileChunkingConfig < 3) { + o.fixedLengthChunking = + buildGoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking(); + } + buildCounterGoogleCloudAiplatformV1RagFileChunkingConfig--; + return o; +} + +void checkGoogleCloudAiplatformV1RagFileChunkingConfig( + api.GoogleCloudAiplatformV1RagFileChunkingConfig o) { + buildCounterGoogleCloudAiplatformV1RagFileChunkingConfig++; + if (buildCounterGoogleCloudAiplatformV1RagFileChunkingConfig < 3) { + checkGoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking( + o.fixedLengthChunking!); + } + buildCounterGoogleCloudAiplatformV1RagFileChunkingConfig--; +} + +core.int + buildCounterGoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking = + 0; +api.GoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking + buildGoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking() { + final o = + api.GoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking(); + buildCounterGoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking++; + if (buildCounterGoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking < + 3) { + o.chunkOverlap = 42; + o.chunkSize = 42; + } + buildCounterGoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking--; + return o; +} + +void checkGoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking( + api.GoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking o) { + buildCounterGoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking++; + if (buildCounterGoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking < + 3) { + unittest.expect( + o.chunkOverlap!, + unittest.equals(42), + ); + unittest.expect( + o.chunkSize!, + unittest.equals(42), + ); + } + buildCounterGoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking--; +} + +core.int buildCounterGoogleCloudAiplatformV1RagFileTransformationConfig = 0; +api.GoogleCloudAiplatformV1RagFileTransformationConfig + buildGoogleCloudAiplatformV1RagFileTransformationConfig() { + final o = api.GoogleCloudAiplatformV1RagFileTransformationConfig(); + buildCounterGoogleCloudAiplatformV1RagFileTransformationConfig++; + if (buildCounterGoogleCloudAiplatformV1RagFileTransformationConfig < 3) { + o.ragFileChunkingConfig = + buildGoogleCloudAiplatformV1RagFileChunkingConfig(); + } + buildCounterGoogleCloudAiplatformV1RagFileTransformationConfig--; + return o; +} + +void checkGoogleCloudAiplatformV1RagFileTransformationConfig( + api.GoogleCloudAiplatformV1RagFileTransformationConfig o) { + buildCounterGoogleCloudAiplatformV1RagFileTransformationConfig++; + if (buildCounterGoogleCloudAiplatformV1RagFileTransformationConfig < 3) { + checkGoogleCloudAiplatformV1RagFileChunkingConfig(o.ragFileChunkingConfig!); + } + buildCounterGoogleCloudAiplatformV1RagFileTransformationConfig--; +} + +core.int buildCounterGoogleCloudAiplatformV1RagQuery = 0; +api.GoogleCloudAiplatformV1RagQuery buildGoogleCloudAiplatformV1RagQuery() { + final o = api.GoogleCloudAiplatformV1RagQuery(); + buildCounterGoogleCloudAiplatformV1RagQuery++; + if (buildCounterGoogleCloudAiplatformV1RagQuery < 3) { + o.ragRetrievalConfig = buildGoogleCloudAiplatformV1RagRetrievalConfig(); + o.text = 'foo'; + } + buildCounterGoogleCloudAiplatformV1RagQuery--; + return o; +} + +void checkGoogleCloudAiplatformV1RagQuery( + api.GoogleCloudAiplatformV1RagQuery o) { + buildCounterGoogleCloudAiplatformV1RagQuery++; + if (buildCounterGoogleCloudAiplatformV1RagQuery < 3) { + checkGoogleCloudAiplatformV1RagRetrievalConfig(o.ragRetrievalConfig!); + unittest.expect( + o.text!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1RagQuery--; +} + +core.int buildCounterGoogleCloudAiplatformV1RagRetrievalConfig = 0; +api.GoogleCloudAiplatformV1RagRetrievalConfig + buildGoogleCloudAiplatformV1RagRetrievalConfig() { + final o = api.GoogleCloudAiplatformV1RagRetrievalConfig(); + buildCounterGoogleCloudAiplatformV1RagRetrievalConfig++; + if (buildCounterGoogleCloudAiplatformV1RagRetrievalConfig < 3) { + o.filter = buildGoogleCloudAiplatformV1RagRetrievalConfigFilter(); + o.topK = 42; + } + buildCounterGoogleCloudAiplatformV1RagRetrievalConfig--; + return o; +} + +void checkGoogleCloudAiplatformV1RagRetrievalConfig( + api.GoogleCloudAiplatformV1RagRetrievalConfig o) { + buildCounterGoogleCloudAiplatformV1RagRetrievalConfig++; + if (buildCounterGoogleCloudAiplatformV1RagRetrievalConfig < 3) { + checkGoogleCloudAiplatformV1RagRetrievalConfigFilter(o.filter!); + unittest.expect( + o.topK!, + unittest.equals(42), + ); + } + buildCounterGoogleCloudAiplatformV1RagRetrievalConfig--; +} + +core.int buildCounterGoogleCloudAiplatformV1RagRetrievalConfigFilter = 0; +api.GoogleCloudAiplatformV1RagRetrievalConfigFilter + buildGoogleCloudAiplatformV1RagRetrievalConfigFilter() { + final o = api.GoogleCloudAiplatformV1RagRetrievalConfigFilter(); + buildCounterGoogleCloudAiplatformV1RagRetrievalConfigFilter++; + if (buildCounterGoogleCloudAiplatformV1RagRetrievalConfigFilter < 3) { + o.metadataFilter = 'foo'; + o.vectorDistanceThreshold = 42.0; + o.vectorSimilarityThreshold = 42.0; + } + buildCounterGoogleCloudAiplatformV1RagRetrievalConfigFilter--; + return o; +} + +void checkGoogleCloudAiplatformV1RagRetrievalConfigFilter( + api.GoogleCloudAiplatformV1RagRetrievalConfigFilter o) { + buildCounterGoogleCloudAiplatformV1RagRetrievalConfigFilter++; + if (buildCounterGoogleCloudAiplatformV1RagRetrievalConfigFilter < 3) { + unittest.expect( + o.metadataFilter!, + unittest.equals('foo'), + ); + unittest.expect( + o.vectorDistanceThreshold!, + unittest.equals(42.0), + ); + unittest.expect( + o.vectorSimilarityThreshold!, + unittest.equals(42.0), + ); + } + buildCounterGoogleCloudAiplatformV1RagRetrievalConfigFilter--; +} + +core.int buildCounterGoogleCloudAiplatformV1RagVectorDbConfig = 0; +api.GoogleCloudAiplatformV1RagVectorDbConfig + buildGoogleCloudAiplatformV1RagVectorDbConfig() { + final o = api.GoogleCloudAiplatformV1RagVectorDbConfig(); + buildCounterGoogleCloudAiplatformV1RagVectorDbConfig++; + if (buildCounterGoogleCloudAiplatformV1RagVectorDbConfig < 3) { + o.apiAuth = buildGoogleCloudAiplatformV1ApiAuth(); + o.pinecone = buildGoogleCloudAiplatformV1RagVectorDbConfigPinecone(); + o.ragEmbeddingModelConfig = + buildGoogleCloudAiplatformV1RagEmbeddingModelConfig(); + o.ragManagedDb = + buildGoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb(); + o.vertexVectorSearch = + buildGoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch(); + } + buildCounterGoogleCloudAiplatformV1RagVectorDbConfig--; + return o; +} + +void checkGoogleCloudAiplatformV1RagVectorDbConfig( + api.GoogleCloudAiplatformV1RagVectorDbConfig o) { + buildCounterGoogleCloudAiplatformV1RagVectorDbConfig++; + if (buildCounterGoogleCloudAiplatformV1RagVectorDbConfig < 3) { + checkGoogleCloudAiplatformV1ApiAuth(o.apiAuth!); + checkGoogleCloudAiplatformV1RagVectorDbConfigPinecone(o.pinecone!); + checkGoogleCloudAiplatformV1RagEmbeddingModelConfig( + o.ragEmbeddingModelConfig!); + checkGoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb(o.ragManagedDb!); + checkGoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch( + o.vertexVectorSearch!); + } + buildCounterGoogleCloudAiplatformV1RagVectorDbConfig--; +} + +core.int buildCounterGoogleCloudAiplatformV1RagVectorDbConfigPinecone = 0; +api.GoogleCloudAiplatformV1RagVectorDbConfigPinecone + buildGoogleCloudAiplatformV1RagVectorDbConfigPinecone() { + final o = api.GoogleCloudAiplatformV1RagVectorDbConfigPinecone(); + buildCounterGoogleCloudAiplatformV1RagVectorDbConfigPinecone++; + if (buildCounterGoogleCloudAiplatformV1RagVectorDbConfigPinecone < 3) { + o.indexName = 'foo'; + } + buildCounterGoogleCloudAiplatformV1RagVectorDbConfigPinecone--; + return o; +} + +void checkGoogleCloudAiplatformV1RagVectorDbConfigPinecone( + api.GoogleCloudAiplatformV1RagVectorDbConfigPinecone o) { + buildCounterGoogleCloudAiplatformV1RagVectorDbConfigPinecone++; + if (buildCounterGoogleCloudAiplatformV1RagVectorDbConfigPinecone < 3) { + unittest.expect( + o.indexName!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1RagVectorDbConfigPinecone--; +} + +core.int buildCounterGoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb = 0; +api.GoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb + buildGoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb() { + final o = api.GoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb(); + buildCounterGoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb++; + if (buildCounterGoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb < 3) {} + buildCounterGoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb--; + return o; +} + +void checkGoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb( + api.GoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb o) { + buildCounterGoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb++; + if (buildCounterGoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb < 3) {} + buildCounterGoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb--; +} + +core.int + buildCounterGoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch = 0; +api.GoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch + buildGoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch() { + final o = api.GoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch(); + buildCounterGoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch++; + if (buildCounterGoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch < + 3) { + o.index = 'foo'; + o.indexEndpoint = 'foo'; + } + buildCounterGoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch--; + return o; +} + +void checkGoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch( + api.GoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch o) { + buildCounterGoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch++; + if (buildCounterGoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch < + 3) { + unittest.expect( + o.index!, + unittest.equals('foo'), + ); + unittest.expect( + o.indexEndpoint!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch--; +} + core.int buildCounterGoogleCloudAiplatformV1RawPredictRequest = 0; api.GoogleCloudAiplatformV1RawPredictRequest buildGoogleCloudAiplatformV1RawPredictRequest() { @@ -20851,12 +23078,12 @@ void checkGoogleCloudAiplatformV1RayMetricSpec( buildCounterGoogleCloudAiplatformV1RayMetricSpec--; } -core.Map buildUnnamed288() => { +core.Map buildUnnamed309() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed288(core.Map o) { +void checkUnnamed309(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -20877,7 +23104,7 @@ api.GoogleCloudAiplatformV1RaySpec buildGoogleCloudAiplatformV1RaySpec() { o.imageUri = 'foo'; o.rayLogsSpec = buildGoogleCloudAiplatformV1RayLogsSpec(); o.rayMetricSpec = buildGoogleCloudAiplatformV1RayMetricSpec(); - o.resourcePoolImages = buildUnnamed288(); + o.resourcePoolImages = buildUnnamed309(); } buildCounterGoogleCloudAiplatformV1RaySpec--; return o; @@ -20896,7 +23123,7 @@ void checkGoogleCloudAiplatformV1RaySpec(api.GoogleCloudAiplatformV1RaySpec o) { ); checkGoogleCloudAiplatformV1RayLogsSpec(o.rayLogsSpec!); checkGoogleCloudAiplatformV1RayMetricSpec(o.rayMetricSpec!); - checkUnnamed288(o.resourcePoolImages!); + checkUnnamed309(o.resourcePoolImages!); } buildCounterGoogleCloudAiplatformV1RaySpec--; } @@ -20953,12 +23180,12 @@ void checkGoogleCloudAiplatformV1ReadFeatureValuesResponse( } core.List - buildUnnamed289() => [ + buildUnnamed310() => [ buildGoogleCloudAiplatformV1ReadFeatureValuesResponseEntityViewData(), buildGoogleCloudAiplatformV1ReadFeatureValuesResponseEntityViewData(), ]; -void checkUnnamed289( +void checkUnnamed310( core.List< api.GoogleCloudAiplatformV1ReadFeatureValuesResponseEntityViewData> o) { @@ -20975,7 +23202,7 @@ api.GoogleCloudAiplatformV1ReadFeatureValuesResponseEntityView buildCounterGoogleCloudAiplatformV1ReadFeatureValuesResponseEntityView++; if (buildCounterGoogleCloudAiplatformV1ReadFeatureValuesResponseEntityView < 3) { - o.data = buildUnnamed289(); + o.data = buildUnnamed310(); o.entityId = 'foo'; } buildCounterGoogleCloudAiplatformV1ReadFeatureValuesResponseEntityView--; @@ -20987,7 +23214,7 @@ void checkGoogleCloudAiplatformV1ReadFeatureValuesResponseEntityView( buildCounterGoogleCloudAiplatformV1ReadFeatureValuesResponseEntityView++; if (buildCounterGoogleCloudAiplatformV1ReadFeatureValuesResponseEntityView < 3) { - checkUnnamed289(o.data!); + checkUnnamed310(o.data!); unittest.expect( o.entityId!, unittest.equals('foo'), @@ -21054,12 +23281,12 @@ void checkGoogleCloudAiplatformV1ReadFeatureValuesResponseFeatureDescriptor( } core.List - buildUnnamed290() => [ + buildUnnamed311() => [ buildGoogleCloudAiplatformV1ReadFeatureValuesResponseFeatureDescriptor(), buildGoogleCloudAiplatformV1ReadFeatureValuesResponseFeatureDescriptor(), ]; -void checkUnnamed290( +void checkUnnamed311( core.List< api .GoogleCloudAiplatformV1ReadFeatureValuesResponseFeatureDescriptor> @@ -21076,7 +23303,7 @@ api.GoogleCloudAiplatformV1ReadFeatureValuesResponseHeader buildCounterGoogleCloudAiplatformV1ReadFeatureValuesResponseHeader++; if (buildCounterGoogleCloudAiplatformV1ReadFeatureValuesResponseHeader < 3) { o.entityType = 'foo'; - o.featureDescriptors = buildUnnamed290(); + o.featureDescriptors = buildUnnamed311(); } buildCounterGoogleCloudAiplatformV1ReadFeatureValuesResponseHeader--; return o; @@ -21090,17 +23317,17 @@ void checkGoogleCloudAiplatformV1ReadFeatureValuesResponseHeader( o.entityType!, unittest.equals('foo'), ); - checkUnnamed290(o.featureDescriptors!); + checkUnnamed311(o.featureDescriptors!); } buildCounterGoogleCloudAiplatformV1ReadFeatureValuesResponseHeader--; } -core.List buildUnnamed291() => [ +core.List buildUnnamed312() => [ 'foo', 'foo', ]; -void checkUnnamed291(core.List o) { +void checkUnnamed312(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -21119,7 +23346,7 @@ api.GoogleCloudAiplatformV1ReadIndexDatapointsRequest buildCounterGoogleCloudAiplatformV1ReadIndexDatapointsRequest++; if (buildCounterGoogleCloudAiplatformV1ReadIndexDatapointsRequest < 3) { o.deployedIndexId = 'foo'; - o.ids = buildUnnamed291(); + o.ids = buildUnnamed312(); } buildCounterGoogleCloudAiplatformV1ReadIndexDatapointsRequest--; return o; @@ -21133,17 +23360,17 @@ void checkGoogleCloudAiplatformV1ReadIndexDatapointsRequest( o.deployedIndexId!, unittest.equals('foo'), ); - checkUnnamed291(o.ids!); + checkUnnamed312(o.ids!); } buildCounterGoogleCloudAiplatformV1ReadIndexDatapointsRequest--; } -core.List buildUnnamed292() => [ +core.List buildUnnamed313() => [ buildGoogleCloudAiplatformV1IndexDatapoint(), buildGoogleCloudAiplatformV1IndexDatapoint(), ]; -void checkUnnamed292(core.List o) { +void checkUnnamed313(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1IndexDatapoint(o[0]); checkGoogleCloudAiplatformV1IndexDatapoint(o[1]); @@ -21155,7 +23382,7 @@ api.GoogleCloudAiplatformV1ReadIndexDatapointsResponse final o = api.GoogleCloudAiplatformV1ReadIndexDatapointsResponse(); buildCounterGoogleCloudAiplatformV1ReadIndexDatapointsResponse++; if (buildCounterGoogleCloudAiplatformV1ReadIndexDatapointsResponse < 3) { - o.datapoints = buildUnnamed292(); + o.datapoints = buildUnnamed313(); } buildCounterGoogleCloudAiplatformV1ReadIndexDatapointsResponse--; return o; @@ -21165,17 +23392,17 @@ void checkGoogleCloudAiplatformV1ReadIndexDatapointsResponse( api.GoogleCloudAiplatformV1ReadIndexDatapointsResponse o) { buildCounterGoogleCloudAiplatformV1ReadIndexDatapointsResponse++; if (buildCounterGoogleCloudAiplatformV1ReadIndexDatapointsResponse < 3) { - checkUnnamed292(o.datapoints!); + checkUnnamed313(o.datapoints!); } buildCounterGoogleCloudAiplatformV1ReadIndexDatapointsResponse--; } -core.List buildUnnamed293() => [ +core.List buildUnnamed314() => [ buildGoogleCloudAiplatformV1TensorboardBlob(), buildGoogleCloudAiplatformV1TensorboardBlob(), ]; -void checkUnnamed293(core.List o) { +void checkUnnamed314(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1TensorboardBlob(o[0]); checkGoogleCloudAiplatformV1TensorboardBlob(o[1]); @@ -21187,7 +23414,7 @@ api.GoogleCloudAiplatformV1ReadTensorboardBlobDataResponse final o = api.GoogleCloudAiplatformV1ReadTensorboardBlobDataResponse(); buildCounterGoogleCloudAiplatformV1ReadTensorboardBlobDataResponse++; if (buildCounterGoogleCloudAiplatformV1ReadTensorboardBlobDataResponse < 3) { - o.blobs = buildUnnamed293(); + o.blobs = buildUnnamed314(); } buildCounterGoogleCloudAiplatformV1ReadTensorboardBlobDataResponse--; return o; @@ -21197,7 +23424,7 @@ void checkGoogleCloudAiplatformV1ReadTensorboardBlobDataResponse( api.GoogleCloudAiplatformV1ReadTensorboardBlobDataResponse o) { buildCounterGoogleCloudAiplatformV1ReadTensorboardBlobDataResponse++; if (buildCounterGoogleCloudAiplatformV1ReadTensorboardBlobDataResponse < 3) { - checkUnnamed293(o.blobs!); + checkUnnamed314(o.blobs!); } buildCounterGoogleCloudAiplatformV1ReadTensorboardBlobDataResponse--; } @@ -21255,14 +23482,14 @@ core.Map< core.String, api .GoogleCloudAiplatformV1ReadTensorboardUsageResponsePerMonthUsageData> - buildUnnamed294() => { + buildUnnamed315() => { 'x': buildGoogleCloudAiplatformV1ReadTensorboardUsageResponsePerMonthUsageData(), 'y': buildGoogleCloudAiplatformV1ReadTensorboardUsageResponsePerMonthUsageData(), }; -void checkUnnamed294( +void checkUnnamed315( core.Map< core.String, api @@ -21281,7 +23508,7 @@ api.GoogleCloudAiplatformV1ReadTensorboardUsageResponse final o = api.GoogleCloudAiplatformV1ReadTensorboardUsageResponse(); buildCounterGoogleCloudAiplatformV1ReadTensorboardUsageResponse++; if (buildCounterGoogleCloudAiplatformV1ReadTensorboardUsageResponse < 3) { - o.monthlyUsageData = buildUnnamed294(); + o.monthlyUsageData = buildUnnamed315(); } buildCounterGoogleCloudAiplatformV1ReadTensorboardUsageResponse--; return o; @@ -21291,19 +23518,19 @@ void checkGoogleCloudAiplatformV1ReadTensorboardUsageResponse( api.GoogleCloudAiplatformV1ReadTensorboardUsageResponse o) { buildCounterGoogleCloudAiplatformV1ReadTensorboardUsageResponse++; if (buildCounterGoogleCloudAiplatformV1ReadTensorboardUsageResponse < 3) { - checkUnnamed294(o.monthlyUsageData!); + checkUnnamed315(o.monthlyUsageData!); } buildCounterGoogleCloudAiplatformV1ReadTensorboardUsageResponse--; } core.List< api.GoogleCloudAiplatformV1ReadTensorboardUsageResponsePerUserUsageData> - buildUnnamed295() => [ + buildUnnamed316() => [ buildGoogleCloudAiplatformV1ReadTensorboardUsageResponsePerUserUsageData(), buildGoogleCloudAiplatformV1ReadTensorboardUsageResponsePerUserUsageData(), ]; -void checkUnnamed295( +void checkUnnamed316( core.List< api .GoogleCloudAiplatformV1ReadTensorboardUsageResponsePerUserUsageData> @@ -21325,7 +23552,7 @@ api.GoogleCloudAiplatformV1ReadTensorboardUsageResponsePerMonthUsageData buildCounterGoogleCloudAiplatformV1ReadTensorboardUsageResponsePerMonthUsageData++; if (buildCounterGoogleCloudAiplatformV1ReadTensorboardUsageResponsePerMonthUsageData < 3) { - o.userUsageData = buildUnnamed295(); + o.userUsageData = buildUnnamed316(); } buildCounterGoogleCloudAiplatformV1ReadTensorboardUsageResponsePerMonthUsageData--; return o; @@ -21337,7 +23564,7 @@ void checkGoogleCloudAiplatformV1ReadTensorboardUsageResponsePerMonthUsageData( buildCounterGoogleCloudAiplatformV1ReadTensorboardUsageResponsePerMonthUsageData++; if (buildCounterGoogleCloudAiplatformV1ReadTensorboardUsageResponsePerMonthUsageData < 3) { - checkUnnamed295(o.userUsageData!); + checkUnnamed316(o.userUsageData!); } buildCounterGoogleCloudAiplatformV1ReadTensorboardUsageResponsePerMonthUsageData--; } @@ -21376,6 +23603,176 @@ void checkGoogleCloudAiplatformV1ReadTensorboardUsageResponsePerUserUsageData( buildCounterGoogleCloudAiplatformV1ReadTensorboardUsageResponsePerUserUsageData--; } +core.int buildCounterGoogleCloudAiplatformV1ReasoningEngine = 0; +api.GoogleCloudAiplatformV1ReasoningEngine + buildGoogleCloudAiplatformV1ReasoningEngine() { + final o = api.GoogleCloudAiplatformV1ReasoningEngine(); + buildCounterGoogleCloudAiplatformV1ReasoningEngine++; + if (buildCounterGoogleCloudAiplatformV1ReasoningEngine < 3) { + o.createTime = 'foo'; + o.description = 'foo'; + o.displayName = 'foo'; + o.etag = 'foo'; + o.name = 'foo'; + o.spec = buildGoogleCloudAiplatformV1ReasoningEngineSpec(); + o.updateTime = 'foo'; + } + buildCounterGoogleCloudAiplatformV1ReasoningEngine--; + return o; +} + +void checkGoogleCloudAiplatformV1ReasoningEngine( + api.GoogleCloudAiplatformV1ReasoningEngine o) { + buildCounterGoogleCloudAiplatformV1ReasoningEngine++; + if (buildCounterGoogleCloudAiplatformV1ReasoningEngine < 3) { + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + unittest.expect( + o.displayName!, + unittest.equals('foo'), + ); + unittest.expect( + o.etag!, + unittest.equals('foo'), + ); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + checkGoogleCloudAiplatformV1ReasoningEngineSpec(o.spec!); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1ReasoningEngine--; +} + +core.Map buildUnnamed317() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; + +void checkUnnamed317(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted68 = (o['x']!) as core.Map; + unittest.expect(casted68, unittest.hasLength(3)); + unittest.expect( + casted68['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted68['bool'], + unittest.equals(true), + ); + unittest.expect( + casted68['string'], + unittest.equals('foo'), + ); + var casted69 = (o['y']!) as core.Map; + unittest.expect(casted69, unittest.hasLength(3)); + unittest.expect( + casted69['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted69['bool'], + unittest.equals(true), + ); + unittest.expect( + casted69['string'], + unittest.equals('foo'), + ); +} + +core.List> buildUnnamed318() => [ + buildUnnamed317(), + buildUnnamed317(), + ]; + +void checkUnnamed318(core.List> o) { + unittest.expect(o, unittest.hasLength(2)); + checkUnnamed317(o[0]); + checkUnnamed317(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1ReasoningEngineSpec = 0; +api.GoogleCloudAiplatformV1ReasoningEngineSpec + buildGoogleCloudAiplatformV1ReasoningEngineSpec() { + final o = api.GoogleCloudAiplatformV1ReasoningEngineSpec(); + buildCounterGoogleCloudAiplatformV1ReasoningEngineSpec++; + if (buildCounterGoogleCloudAiplatformV1ReasoningEngineSpec < 3) { + o.classMethods = buildUnnamed318(); + o.packageSpec = + buildGoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec(); + } + buildCounterGoogleCloudAiplatformV1ReasoningEngineSpec--; + return o; +} + +void checkGoogleCloudAiplatformV1ReasoningEngineSpec( + api.GoogleCloudAiplatformV1ReasoningEngineSpec o) { + buildCounterGoogleCloudAiplatformV1ReasoningEngineSpec++; + if (buildCounterGoogleCloudAiplatformV1ReasoningEngineSpec < 3) { + checkUnnamed318(o.classMethods!); + checkGoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec(o.packageSpec!); + } + buildCounterGoogleCloudAiplatformV1ReasoningEngineSpec--; +} + +core.int buildCounterGoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec = 0; +api.GoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec + buildGoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec() { + final o = api.GoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec(); + buildCounterGoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec++; + if (buildCounterGoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec < 3) { + o.dependencyFilesGcsUri = 'foo'; + o.pickleObjectGcsUri = 'foo'; + o.pythonVersion = 'foo'; + o.requirementsGcsUri = 'foo'; + } + buildCounterGoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec--; + return o; +} + +void checkGoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec( + api.GoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec o) { + buildCounterGoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec++; + if (buildCounterGoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec < 3) { + unittest.expect( + o.dependencyFilesGcsUri!, + unittest.equals('foo'), + ); + unittest.expect( + o.pickleObjectGcsUri!, + unittest.equals('foo'), + ); + unittest.expect( + o.pythonVersion!, + unittest.equals('foo'), + ); + unittest.expect( + o.requirementsGcsUri!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec--; +} + core.int buildCounterGoogleCloudAiplatformV1RebaseTunedModelRequest = 0; api.GoogleCloudAiplatformV1RebaseTunedModelRequest buildGoogleCloudAiplatformV1RebaseTunedModelRequest() { @@ -21420,12 +23817,12 @@ void checkGoogleCloudAiplatformV1RebootPersistentResourceRequest( buildCounterGoogleCloudAiplatformV1RebootPersistentResourceRequest--; } -core.List buildUnnamed296() => [ +core.List buildUnnamed319() => [ 'foo', 'foo', ]; -void checkUnnamed296(core.List o) { +void checkUnnamed319(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -21443,7 +23840,7 @@ api.GoogleCloudAiplatformV1RemoveContextChildrenRequest final o = api.GoogleCloudAiplatformV1RemoveContextChildrenRequest(); buildCounterGoogleCloudAiplatformV1RemoveContextChildrenRequest++; if (buildCounterGoogleCloudAiplatformV1RemoveContextChildrenRequest < 3) { - o.childContexts = buildUnnamed296(); + o.childContexts = buildUnnamed319(); } buildCounterGoogleCloudAiplatformV1RemoveContextChildrenRequest--; return o; @@ -21453,7 +23850,7 @@ void checkGoogleCloudAiplatformV1RemoveContextChildrenRequest( api.GoogleCloudAiplatformV1RemoveContextChildrenRequest o) { buildCounterGoogleCloudAiplatformV1RemoveContextChildrenRequest++; if (buildCounterGoogleCloudAiplatformV1RemoveContextChildrenRequest < 3) { - checkUnnamed296(o.childContexts!); + checkUnnamed319(o.childContexts!); } buildCounterGoogleCloudAiplatformV1RemoveContextChildrenRequest--; } @@ -21475,12 +23872,12 @@ void checkGoogleCloudAiplatformV1RemoveContextChildrenResponse( buildCounterGoogleCloudAiplatformV1RemoveContextChildrenResponse--; } -core.List buildUnnamed297() => [ +core.List buildUnnamed320() => [ 'foo', 'foo', ]; -void checkUnnamed297(core.List o) { +void checkUnnamed320(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -21498,7 +23895,7 @@ api.GoogleCloudAiplatformV1RemoveDatapointsRequest final o = api.GoogleCloudAiplatformV1RemoveDatapointsRequest(); buildCounterGoogleCloudAiplatformV1RemoveDatapointsRequest++; if (buildCounterGoogleCloudAiplatformV1RemoveDatapointsRequest < 3) { - o.datapointIds = buildUnnamed297(); + o.datapointIds = buildUnnamed320(); } buildCounterGoogleCloudAiplatformV1RemoveDatapointsRequest--; return o; @@ -21508,7 +23905,7 @@ void checkGoogleCloudAiplatformV1RemoveDatapointsRequest( api.GoogleCloudAiplatformV1RemoveDatapointsRequest o) { buildCounterGoogleCloudAiplatformV1RemoveDatapointsRequest++; if (buildCounterGoogleCloudAiplatformV1RemoveDatapointsRequest < 3) { - checkUnnamed297(o.datapointIds!); + checkUnnamed320(o.datapointIds!); } buildCounterGoogleCloudAiplatformV1RemoveDatapointsRequest--; } @@ -21530,12 +23927,12 @@ void checkGoogleCloudAiplatformV1RemoveDatapointsResponse( buildCounterGoogleCloudAiplatformV1RemoveDatapointsResponse--; } -core.List buildUnnamed298() => [ +core.List buildUnnamed321() => [ 'foo', 'foo', ]; -void checkUnnamed298(core.List o) { +void checkUnnamed321(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -21555,7 +23952,7 @@ api.GoogleCloudAiplatformV1ReservationAffinity if (buildCounterGoogleCloudAiplatformV1ReservationAffinity < 3) { o.key = 'foo'; o.reservationAffinityType = 'foo'; - o.values = buildUnnamed298(); + o.values = buildUnnamed321(); } buildCounterGoogleCloudAiplatformV1ReservationAffinity--; return o; @@ -21573,7 +23970,7 @@ void checkGoogleCloudAiplatformV1ReservationAffinity( o.reservationAffinityType!, unittest.equals('foo'), ); - checkUnnamed298(o.values!); + checkUnnamed321(o.values!); } buildCounterGoogleCloudAiplatformV1ReservationAffinity--; } @@ -21648,12 +24045,12 @@ void checkGoogleCloudAiplatformV1ResourcePoolAutoscalingSpec( buildCounterGoogleCloudAiplatformV1ResourcePoolAutoscalingSpec--; } -core.Map buildUnnamed299() => { +core.Map buildUnnamed322() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed299(core.Map o) { +void checkUnnamed322(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -21671,7 +24068,7 @@ api.GoogleCloudAiplatformV1ResourceRuntime final o = api.GoogleCloudAiplatformV1ResourceRuntime(); buildCounterGoogleCloudAiplatformV1ResourceRuntime++; if (buildCounterGoogleCloudAiplatformV1ResourceRuntime < 3) { - o.accessUris = buildUnnamed299(); + o.accessUris = buildUnnamed322(); } buildCounterGoogleCloudAiplatformV1ResourceRuntime--; return o; @@ -21681,7 +24078,7 @@ void checkGoogleCloudAiplatformV1ResourceRuntime( api.GoogleCloudAiplatformV1ResourceRuntime o) { buildCounterGoogleCloudAiplatformV1ResourceRuntime++; if (buildCounterGoogleCloudAiplatformV1ResourceRuntime < 3) { - checkUnnamed299(o.accessUris!); + checkUnnamed322(o.accessUris!); } buildCounterGoogleCloudAiplatformV1ResourceRuntime--; } @@ -21824,12 +24221,157 @@ void checkGoogleCloudAiplatformV1RetrievalMetadata( buildCounterGoogleCloudAiplatformV1RetrievalMetadata--; } -core.List buildUnnamed300() => [ +core.int buildCounterGoogleCloudAiplatformV1RetrieveContextsRequest = 0; +api.GoogleCloudAiplatformV1RetrieveContextsRequest + buildGoogleCloudAiplatformV1RetrieveContextsRequest() { + final o = api.GoogleCloudAiplatformV1RetrieveContextsRequest(); + buildCounterGoogleCloudAiplatformV1RetrieveContextsRequest++; + if (buildCounterGoogleCloudAiplatformV1RetrieveContextsRequest < 3) { + o.query = buildGoogleCloudAiplatformV1RagQuery(); + o.vertexRagStore = + buildGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore(); + } + buildCounterGoogleCloudAiplatformV1RetrieveContextsRequest--; + return o; +} + +void checkGoogleCloudAiplatformV1RetrieveContextsRequest( + api.GoogleCloudAiplatformV1RetrieveContextsRequest o) { + buildCounterGoogleCloudAiplatformV1RetrieveContextsRequest++; + if (buildCounterGoogleCloudAiplatformV1RetrieveContextsRequest < 3) { + checkGoogleCloudAiplatformV1RagQuery(o.query!); + checkGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore( + o.vertexRagStore!); + } + buildCounterGoogleCloudAiplatformV1RetrieveContextsRequest--; +} + +core.List< + api + .GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource> + buildUnnamed323() => [ + buildGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource(), + buildGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource(), + ]; + +void checkUnnamed323( + core.List< + api + .GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource> + o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource( + o[0]); + checkGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource( + o[1]); +} + +core.int + buildCounterGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore = + 0; +api.GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore + buildGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore() { + final o = api.GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore(); + buildCounterGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore++; + if (buildCounterGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore < + 3) { + o.ragResources = buildUnnamed323(); + o.vectorDistanceThreshold = 42.0; + } + buildCounterGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore--; + return o; +} + +void checkGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore( + api.GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore o) { + buildCounterGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore++; + if (buildCounterGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore < + 3) { + checkUnnamed323(o.ragResources!); + unittest.expect( + o.vectorDistanceThreshold!, + unittest.equals(42.0), + ); + } + buildCounterGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore--; +} + +core.List buildUnnamed324() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed324(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int + buildCounterGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource = + 0; +api.GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource + buildGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource() { + final o = api + .GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource(); + buildCounterGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource++; + if (buildCounterGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource < + 3) { + o.ragCorpus = 'foo'; + o.ragFileIds = buildUnnamed324(); + } + buildCounterGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource--; + return o; +} + +void checkGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource( + api.GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource + o) { + buildCounterGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource++; + if (buildCounterGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource < + 3) { + unittest.expect( + o.ragCorpus!, + unittest.equals('foo'), + ); + checkUnnamed324(o.ragFileIds!); + } + buildCounterGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource--; +} + +core.int buildCounterGoogleCloudAiplatformV1RetrieveContextsResponse = 0; +api.GoogleCloudAiplatformV1RetrieveContextsResponse + buildGoogleCloudAiplatformV1RetrieveContextsResponse() { + final o = api.GoogleCloudAiplatformV1RetrieveContextsResponse(); + buildCounterGoogleCloudAiplatformV1RetrieveContextsResponse++; + if (buildCounterGoogleCloudAiplatformV1RetrieveContextsResponse < 3) { + o.contexts = buildGoogleCloudAiplatformV1RagContexts(); + } + buildCounterGoogleCloudAiplatformV1RetrieveContextsResponse--; + return o; +} + +void checkGoogleCloudAiplatformV1RetrieveContextsResponse( + api.GoogleCloudAiplatformV1RetrieveContextsResponse o) { + buildCounterGoogleCloudAiplatformV1RetrieveContextsResponse++; + if (buildCounterGoogleCloudAiplatformV1RetrieveContextsResponse < 3) { + checkGoogleCloudAiplatformV1RagContexts(o.contexts!); + } + buildCounterGoogleCloudAiplatformV1RetrieveContextsResponse--; +} + +core.List buildUnnamed325() => [ buildGoogleCloudAiplatformV1RougeInstance(), buildGoogleCloudAiplatformV1RougeInstance(), ]; -void checkUnnamed300(core.List o) { +void checkUnnamed325(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1RougeInstance(o[0]); checkGoogleCloudAiplatformV1RougeInstance(o[1]); @@ -21840,7 +24382,7 @@ api.GoogleCloudAiplatformV1RougeInput buildGoogleCloudAiplatformV1RougeInput() { final o = api.GoogleCloudAiplatformV1RougeInput(); buildCounterGoogleCloudAiplatformV1RougeInput++; if (buildCounterGoogleCloudAiplatformV1RougeInput < 3) { - o.instances = buildUnnamed300(); + o.instances = buildUnnamed325(); o.metricSpec = buildGoogleCloudAiplatformV1RougeSpec(); } buildCounterGoogleCloudAiplatformV1RougeInput--; @@ -21851,7 +24393,7 @@ void checkGoogleCloudAiplatformV1RougeInput( api.GoogleCloudAiplatformV1RougeInput o) { buildCounterGoogleCloudAiplatformV1RougeInput++; if (buildCounterGoogleCloudAiplatformV1RougeInput < 3) { - checkUnnamed300(o.instances!); + checkUnnamed325(o.instances!); checkGoogleCloudAiplatformV1RougeSpec(o.metricSpec!); } buildCounterGoogleCloudAiplatformV1RougeInput--; @@ -21910,12 +24452,12 @@ void checkGoogleCloudAiplatformV1RougeMetricValue( buildCounterGoogleCloudAiplatformV1RougeMetricValue--; } -core.List buildUnnamed301() => [ +core.List buildUnnamed326() => [ buildGoogleCloudAiplatformV1RougeMetricValue(), buildGoogleCloudAiplatformV1RougeMetricValue(), ]; -void checkUnnamed301(core.List o) { +void checkUnnamed326(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1RougeMetricValue(o[0]); checkGoogleCloudAiplatformV1RougeMetricValue(o[1]); @@ -21927,7 +24469,7 @@ api.GoogleCloudAiplatformV1RougeResults final o = api.GoogleCloudAiplatformV1RougeResults(); buildCounterGoogleCloudAiplatformV1RougeResults++; if (buildCounterGoogleCloudAiplatformV1RougeResults < 3) { - o.rougeMetricValues = buildUnnamed301(); + o.rougeMetricValues = buildUnnamed326(); } buildCounterGoogleCloudAiplatformV1RougeResults--; return o; @@ -21937,7 +24479,7 @@ void checkGoogleCloudAiplatformV1RougeResults( api.GoogleCloudAiplatformV1RougeResults o) { buildCounterGoogleCloudAiplatformV1RougeResults++; if (buildCounterGoogleCloudAiplatformV1RougeResults < 3) { - checkUnnamed301(o.rougeMetricValues!); + checkUnnamed326(o.rougeMetricValues!); } buildCounterGoogleCloudAiplatformV1RougeResults--; } @@ -22309,18 +24851,18 @@ void checkGoogleCloudAiplatformV1SavedQuery( o.etag!, unittest.equals('foo'), ); - var casted62 = (o.metadata!) as core.Map; - unittest.expect(casted62, unittest.hasLength(3)); + var casted70 = (o.metadata!) as core.Map; + unittest.expect(casted70, unittest.hasLength(3)); unittest.expect( - casted62['list'], + casted70['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted62['bool'], + casted70['bool'], unittest.equals(true), ); unittest.expect( - casted62['string'], + casted70['string'], unittest.equals('foo'), ); unittest.expect( @@ -22532,23 +25074,23 @@ void checkGoogleCloudAiplatformV1Scheduling( buildCounterGoogleCloudAiplatformV1Scheduling--; } -core.List buildUnnamed302() => [ +core.List buildUnnamed327() => [ buildGoogleCloudAiplatformV1Schema(), buildGoogleCloudAiplatformV1Schema(), ]; -void checkUnnamed302(core.List o) { +void checkUnnamed327(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Schema(o[0]); checkGoogleCloudAiplatformV1Schema(o[1]); } -core.List buildUnnamed303() => [ +core.List buildUnnamed328() => [ 'foo', 'foo', ]; -void checkUnnamed303(core.List o) { +void checkUnnamed328(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -22560,24 +25102,24 @@ void checkUnnamed303(core.List o) { ); } -core.Map buildUnnamed304() => { +core.Map buildUnnamed329() => { 'x': buildGoogleCloudAiplatformV1Schema(), 'y': buildGoogleCloudAiplatformV1Schema(), }; -void checkUnnamed304( +void checkUnnamed329( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Schema(o['x']!); checkGoogleCloudAiplatformV1Schema(o['y']!); } -core.List buildUnnamed305() => [ +core.List buildUnnamed330() => [ 'foo', 'foo', ]; -void checkUnnamed305(core.List o) { +void checkUnnamed330(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -22589,12 +25131,12 @@ void checkUnnamed305(core.List o) { ); } -core.List buildUnnamed306() => [ +core.List buildUnnamed331() => [ 'foo', 'foo', ]; -void checkUnnamed306(core.List o) { +void checkUnnamed331(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -22611,14 +25153,14 @@ api.GoogleCloudAiplatformV1Schema buildGoogleCloudAiplatformV1Schema() { final o = api.GoogleCloudAiplatformV1Schema(); buildCounterGoogleCloudAiplatformV1Schema++; if (buildCounterGoogleCloudAiplatformV1Schema < 3) { - o.anyOf = buildUnnamed302(); + o.anyOf = buildUnnamed327(); o.default_ = { 'list': [1, 2, 3], 'bool': true, 'string': 'foo' }; o.description = 'foo'; - o.enum_ = buildUnnamed303(); + o.enum_ = buildUnnamed328(); o.example = { 'list': [1, 2, 3], 'bool': true, @@ -22636,9 +25178,9 @@ api.GoogleCloudAiplatformV1Schema buildGoogleCloudAiplatformV1Schema() { o.minimum = 42.0; o.nullable = true; o.pattern = 'foo'; - o.properties = buildUnnamed304(); - o.propertyOrdering = buildUnnamed305(); - o.required = buildUnnamed306(); + o.properties = buildUnnamed329(); + o.propertyOrdering = buildUnnamed330(); + o.required = buildUnnamed331(); o.title = 'foo'; o.type = 'foo'; } @@ -22649,38 +25191,38 @@ api.GoogleCloudAiplatformV1Schema buildGoogleCloudAiplatformV1Schema() { void checkGoogleCloudAiplatformV1Schema(api.GoogleCloudAiplatformV1Schema o) { buildCounterGoogleCloudAiplatformV1Schema++; if (buildCounterGoogleCloudAiplatformV1Schema < 3) { - checkUnnamed302(o.anyOf!); - var casted63 = (o.default_!) as core.Map; - unittest.expect(casted63, unittest.hasLength(3)); + checkUnnamed327(o.anyOf!); + var casted71 = (o.default_!) as core.Map; + unittest.expect(casted71, unittest.hasLength(3)); unittest.expect( - casted63['list'], + casted71['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted63['bool'], + casted71['bool'], unittest.equals(true), ); unittest.expect( - casted63['string'], + casted71['string'], unittest.equals('foo'), ); unittest.expect( o.description!, unittest.equals('foo'), ); - checkUnnamed303(o.enum_!); - var casted64 = (o.example!) as core.Map; - unittest.expect(casted64, unittest.hasLength(3)); + checkUnnamed328(o.enum_!); + var casted72 = (o.example!) as core.Map; + unittest.expect(casted72, unittest.hasLength(3)); unittest.expect( - casted64['list'], + casted72['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted64['bool'], + casted72['bool'], unittest.equals(true), ); unittest.expect( - casted64['string'], + casted72['string'], unittest.equals('foo'), ); unittest.expect( @@ -22725,9 +25267,9 @@ void checkGoogleCloudAiplatformV1Schema(api.GoogleCloudAiplatformV1Schema o) { o.pattern!, unittest.equals('foo'), ); - checkUnnamed304(o.properties!); - checkUnnamed305(o.propertyOrdering!); - checkUnnamed306(o.required!); + checkUnnamed329(o.properties!); + checkUnnamed330(o.propertyOrdering!); + checkUnnamed331(o.required!); unittest.expect( o.title!, unittest.equals('foo'), @@ -22740,12 +25282,12 @@ void checkGoogleCloudAiplatformV1Schema(api.GoogleCloudAiplatformV1Schema o) { buildCounterGoogleCloudAiplatformV1Schema--; } -core.List buildUnnamed307() => [ +core.List buildUnnamed332() => [ buildGoogleCloudAiplatformV1DataItemView(), buildGoogleCloudAiplatformV1DataItemView(), ]; -void checkUnnamed307(core.List o) { +void checkUnnamed332(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1DataItemView(o[0]); checkGoogleCloudAiplatformV1DataItemView(o[1]); @@ -22757,7 +25299,7 @@ api.GoogleCloudAiplatformV1SearchDataItemsResponse final o = api.GoogleCloudAiplatformV1SearchDataItemsResponse(); buildCounterGoogleCloudAiplatformV1SearchDataItemsResponse++; if (buildCounterGoogleCloudAiplatformV1SearchDataItemsResponse < 3) { - o.dataItemViews = buildUnnamed307(); + o.dataItemViews = buildUnnamed332(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1SearchDataItemsResponse--; @@ -22768,7 +25310,7 @@ void checkGoogleCloudAiplatformV1SearchDataItemsResponse( api.GoogleCloudAiplatformV1SearchDataItemsResponse o) { buildCounterGoogleCloudAiplatformV1SearchDataItemsResponse++; if (buildCounterGoogleCloudAiplatformV1SearchDataItemsResponse < 3) { - checkUnnamed307(o.dataItemViews!); + checkUnnamed332(o.dataItemViews!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -22806,12 +25348,12 @@ void checkGoogleCloudAiplatformV1SearchEntryPoint( buildCounterGoogleCloudAiplatformV1SearchEntryPoint--; } -core.List buildUnnamed308() => [ +core.List buildUnnamed333() => [ buildGoogleCloudAiplatformV1Feature(), buildGoogleCloudAiplatformV1Feature(), ]; -void checkUnnamed308(core.List o) { +void checkUnnamed333(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Feature(o[0]); checkGoogleCloudAiplatformV1Feature(o[1]); @@ -22823,7 +25365,7 @@ api.GoogleCloudAiplatformV1SearchFeaturesResponse final o = api.GoogleCloudAiplatformV1SearchFeaturesResponse(); buildCounterGoogleCloudAiplatformV1SearchFeaturesResponse++; if (buildCounterGoogleCloudAiplatformV1SearchFeaturesResponse < 3) { - o.features = buildUnnamed308(); + o.features = buildUnnamed333(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1SearchFeaturesResponse--; @@ -22834,7 +25376,7 @@ void checkGoogleCloudAiplatformV1SearchFeaturesResponse( api.GoogleCloudAiplatformV1SearchFeaturesResponse o) { buildCounterGoogleCloudAiplatformV1SearchFeaturesResponse++; if (buildCounterGoogleCloudAiplatformV1SearchFeaturesResponse < 3) { - checkUnnamed308(o.features!); + checkUnnamed333(o.features!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -22878,12 +25420,12 @@ void checkGoogleCloudAiplatformV1SearchMigratableResourcesRequest( buildCounterGoogleCloudAiplatformV1SearchMigratableResourcesRequest--; } -core.List buildUnnamed309() => [ +core.List buildUnnamed334() => [ buildGoogleCloudAiplatformV1MigratableResource(), buildGoogleCloudAiplatformV1MigratableResource(), ]; -void checkUnnamed309( +void checkUnnamed334( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1MigratableResource(o[0]); @@ -22898,7 +25440,7 @@ api.GoogleCloudAiplatformV1SearchMigratableResourcesResponse buildCounterGoogleCloudAiplatformV1SearchMigratableResourcesResponse++; if (buildCounterGoogleCloudAiplatformV1SearchMigratableResourcesResponse < 3) { - o.migratableResources = buildUnnamed309(); + o.migratableResources = buildUnnamed334(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1SearchMigratableResourcesResponse--; @@ -22910,7 +25452,7 @@ void checkGoogleCloudAiplatformV1SearchMigratableResourcesResponse( buildCounterGoogleCloudAiplatformV1SearchMigratableResourcesResponse++; if (buildCounterGoogleCloudAiplatformV1SearchMigratableResourcesResponse < 3) { - checkUnnamed309(o.migratableResources!); + checkUnnamed334(o.migratableResources!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -22922,12 +25464,12 @@ void checkGoogleCloudAiplatformV1SearchMigratableResourcesResponse( core.List< api .GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequestStatsAnomaliesObjective> - buildUnnamed310() => [ + buildUnnamed335() => [ buildGoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequestStatsAnomaliesObjective(), buildGoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequestStatsAnomaliesObjective(), ]; -void checkUnnamed310( +void checkUnnamed335( core.List< api .GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequestStatsAnomaliesObjective> @@ -22952,7 +25494,7 @@ api.GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequest o.deployedModelId = 'foo'; o.endTime = 'foo'; o.featureDisplayName = 'foo'; - o.objectives = buildUnnamed310(); + o.objectives = buildUnnamed335(); o.pageSize = 42; o.pageToken = 'foo'; o.startTime = 'foo'; @@ -22979,7 +25521,7 @@ void checkGoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRe o.featureDisplayName!, unittest.equals('foo'), ); - checkUnnamed310(o.objectives!); + checkUnnamed335(o.objectives!); unittest.expect( o.pageSize!, unittest.equals(42), @@ -23032,12 +25574,12 @@ void checkGoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRe } core.List - buildUnnamed311() => [ + buildUnnamed336() => [ buildGoogleCloudAiplatformV1ModelMonitoringStatsAnomalies(), buildGoogleCloudAiplatformV1ModelMonitoringStatsAnomalies(), ]; -void checkUnnamed311( +void checkUnnamed336( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1ModelMonitoringStatsAnomalies(o[0]); @@ -23054,7 +25596,7 @@ api.GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesResponse buildCounterGoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesResponse++; if (buildCounterGoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesResponse < 3) { - o.monitoringStats = buildUnnamed311(); + o.monitoringStats = buildUnnamed336(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesResponse--; @@ -23067,7 +25609,7 @@ void checkGoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRe buildCounterGoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesResponse++; if (buildCounterGoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesResponse < 3) { - checkUnnamed311(o.monitoringStats!); + checkUnnamed336(o.monitoringStats!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -23183,6 +25725,104 @@ void checkGoogleCloudAiplatformV1ServiceAccountSpec( buildCounterGoogleCloudAiplatformV1ServiceAccountSpec--; } +core.List + buildUnnamed337() => [ + buildGoogleCloudAiplatformV1SharePointSourcesSharePointSource(), + buildGoogleCloudAiplatformV1SharePointSourcesSharePointSource(), + ]; + +void checkUnnamed337( + core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1SharePointSourcesSharePointSource(o[0]); + checkGoogleCloudAiplatformV1SharePointSourcesSharePointSource(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1SharePointSources = 0; +api.GoogleCloudAiplatformV1SharePointSources + buildGoogleCloudAiplatformV1SharePointSources() { + final o = api.GoogleCloudAiplatformV1SharePointSources(); + buildCounterGoogleCloudAiplatformV1SharePointSources++; + if (buildCounterGoogleCloudAiplatformV1SharePointSources < 3) { + o.sharePointSources = buildUnnamed337(); + } + buildCounterGoogleCloudAiplatformV1SharePointSources--; + return o; +} + +void checkGoogleCloudAiplatformV1SharePointSources( + api.GoogleCloudAiplatformV1SharePointSources o) { + buildCounterGoogleCloudAiplatformV1SharePointSources++; + if (buildCounterGoogleCloudAiplatformV1SharePointSources < 3) { + checkUnnamed337(o.sharePointSources!); + } + buildCounterGoogleCloudAiplatformV1SharePointSources--; +} + +core.int buildCounterGoogleCloudAiplatformV1SharePointSourcesSharePointSource = + 0; +api.GoogleCloudAiplatformV1SharePointSourcesSharePointSource + buildGoogleCloudAiplatformV1SharePointSourcesSharePointSource() { + final o = api.GoogleCloudAiplatformV1SharePointSourcesSharePointSource(); + buildCounterGoogleCloudAiplatformV1SharePointSourcesSharePointSource++; + if (buildCounterGoogleCloudAiplatformV1SharePointSourcesSharePointSource < + 3) { + o.clientId = 'foo'; + o.clientSecret = buildGoogleCloudAiplatformV1ApiAuthApiKeyConfig(); + o.driveId = 'foo'; + o.driveName = 'foo'; + o.fileId = 'foo'; + o.sharepointFolderId = 'foo'; + o.sharepointFolderPath = 'foo'; + o.sharepointSiteName = 'foo'; + o.tenantId = 'foo'; + } + buildCounterGoogleCloudAiplatformV1SharePointSourcesSharePointSource--; + return o; +} + +void checkGoogleCloudAiplatformV1SharePointSourcesSharePointSource( + api.GoogleCloudAiplatformV1SharePointSourcesSharePointSource o) { + buildCounterGoogleCloudAiplatformV1SharePointSourcesSharePointSource++; + if (buildCounterGoogleCloudAiplatformV1SharePointSourcesSharePointSource < + 3) { + unittest.expect( + o.clientId!, + unittest.equals('foo'), + ); + checkGoogleCloudAiplatformV1ApiAuthApiKeyConfig(o.clientSecret!); + unittest.expect( + o.driveId!, + unittest.equals('foo'), + ); + unittest.expect( + o.driveName!, + unittest.equals('foo'), + ); + unittest.expect( + o.fileId!, + unittest.equals('foo'), + ); + unittest.expect( + o.sharepointFolderId!, + unittest.equals('foo'), + ); + unittest.expect( + o.sharepointFolderPath!, + unittest.equals('foo'), + ); + unittest.expect( + o.sharepointSiteName!, + unittest.equals('foo'), + ); + unittest.expect( + o.tenantId!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1SharePointSourcesSharePointSource--; +} + core.int buildCounterGoogleCloudAiplatformV1ShieldedVmConfig = 0; api.GoogleCloudAiplatformV1ShieldedVmConfig buildGoogleCloudAiplatformV1ShieldedVmConfig() { @@ -23204,6 +25844,114 @@ void checkGoogleCloudAiplatformV1ShieldedVmConfig( buildCounterGoogleCloudAiplatformV1ShieldedVmConfig--; } +core.List + buildUnnamed338() => [ + buildGoogleCloudAiplatformV1SlackSourceSlackChannels(), + buildGoogleCloudAiplatformV1SlackSourceSlackChannels(), + ]; + +void checkUnnamed338( + core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1SlackSourceSlackChannels(o[0]); + checkGoogleCloudAiplatformV1SlackSourceSlackChannels(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1SlackSource = 0; +api.GoogleCloudAiplatformV1SlackSource + buildGoogleCloudAiplatformV1SlackSource() { + final o = api.GoogleCloudAiplatformV1SlackSource(); + buildCounterGoogleCloudAiplatformV1SlackSource++; + if (buildCounterGoogleCloudAiplatformV1SlackSource < 3) { + o.channels = buildUnnamed338(); + } + buildCounterGoogleCloudAiplatformV1SlackSource--; + return o; +} + +void checkGoogleCloudAiplatformV1SlackSource( + api.GoogleCloudAiplatformV1SlackSource o) { + buildCounterGoogleCloudAiplatformV1SlackSource++; + if (buildCounterGoogleCloudAiplatformV1SlackSource < 3) { + checkUnnamed338(o.channels!); + } + buildCounterGoogleCloudAiplatformV1SlackSource--; +} + +core.List + buildUnnamed339() => [ + buildGoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel(), + buildGoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel(), + ]; + +void checkUnnamed339( + core.List + o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel(o[0]); + checkGoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1SlackSourceSlackChannels = 0; +api.GoogleCloudAiplatformV1SlackSourceSlackChannels + buildGoogleCloudAiplatformV1SlackSourceSlackChannels() { + final o = api.GoogleCloudAiplatformV1SlackSourceSlackChannels(); + buildCounterGoogleCloudAiplatformV1SlackSourceSlackChannels++; + if (buildCounterGoogleCloudAiplatformV1SlackSourceSlackChannels < 3) { + o.apiKeyConfig = buildGoogleCloudAiplatformV1ApiAuthApiKeyConfig(); + o.channels = buildUnnamed339(); + } + buildCounterGoogleCloudAiplatformV1SlackSourceSlackChannels--; + return o; +} + +void checkGoogleCloudAiplatformV1SlackSourceSlackChannels( + api.GoogleCloudAiplatformV1SlackSourceSlackChannels o) { + buildCounterGoogleCloudAiplatformV1SlackSourceSlackChannels++; + if (buildCounterGoogleCloudAiplatformV1SlackSourceSlackChannels < 3) { + checkGoogleCloudAiplatformV1ApiAuthApiKeyConfig(o.apiKeyConfig!); + checkUnnamed339(o.channels!); + } + buildCounterGoogleCloudAiplatformV1SlackSourceSlackChannels--; +} + +core.int + buildCounterGoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel = 0; +api.GoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel + buildGoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel() { + final o = api.GoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel(); + buildCounterGoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel++; + if (buildCounterGoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel < + 3) { + o.channelId = 'foo'; + o.endTime = 'foo'; + o.startTime = 'foo'; + } + buildCounterGoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel--; + return o; +} + +void checkGoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel( + api.GoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel o) { + buildCounterGoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel++; + if (buildCounterGoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel < + 3) { + unittest.expect( + o.channelId!, + unittest.equals('foo'), + ); + unittest.expect( + o.endTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.startTime!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel--; +} + core.int buildCounterGoogleCloudAiplatformV1SmoothGradConfig = 0; api.GoogleCloudAiplatformV1SmoothGradConfig buildGoogleCloudAiplatformV1SmoothGradConfig() { @@ -23235,12 +25983,12 @@ void checkGoogleCloudAiplatformV1SmoothGradConfig( buildCounterGoogleCloudAiplatformV1SmoothGradConfig--; } -core.List buildUnnamed312() => [ +core.List buildUnnamed340() => [ 'foo', 'foo', ]; -void checkUnnamed312(core.List o) { +void checkUnnamed340(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -23252,12 +26000,12 @@ void checkUnnamed312(core.List o) { ); } -core.List buildUnnamed313() => [ +core.List buildUnnamed341() => [ 'foo', 'foo', ]; -void checkUnnamed313(core.List o) { +void checkUnnamed341(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -23269,12 +26017,12 @@ void checkUnnamed313(core.List o) { ); } -core.List buildUnnamed314() => [ +core.List buildUnnamed342() => [ 'foo', 'foo', ]; -void checkUnnamed314(core.List o) { +void checkUnnamed342(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -23294,10 +26042,10 @@ api.GoogleCloudAiplatformV1SpecialistPool if (buildCounterGoogleCloudAiplatformV1SpecialistPool < 3) { o.displayName = 'foo'; o.name = 'foo'; - o.pendingDataLabelingJobs = buildUnnamed312(); - o.specialistManagerEmails = buildUnnamed313(); + o.pendingDataLabelingJobs = buildUnnamed340(); + o.specialistManagerEmails = buildUnnamed341(); o.specialistManagersCount = 42; - o.specialistWorkerEmails = buildUnnamed314(); + o.specialistWorkerEmails = buildUnnamed342(); } buildCounterGoogleCloudAiplatformV1SpecialistPool--; return o; @@ -23315,17 +26063,38 @@ void checkGoogleCloudAiplatformV1SpecialistPool( o.name!, unittest.equals('foo'), ); - checkUnnamed312(o.pendingDataLabelingJobs!); - checkUnnamed313(o.specialistManagerEmails!); + checkUnnamed340(o.pendingDataLabelingJobs!); + checkUnnamed341(o.specialistManagerEmails!); unittest.expect( o.specialistManagersCount!, unittest.equals(42), ); - checkUnnamed314(o.specialistWorkerEmails!); + checkUnnamed342(o.specialistWorkerEmails!); } buildCounterGoogleCloudAiplatformV1SpecialistPool--; } +core.int buildCounterGoogleCloudAiplatformV1SpeechConfig = 0; +api.GoogleCloudAiplatformV1SpeechConfig + buildGoogleCloudAiplatformV1SpeechConfig() { + final o = api.GoogleCloudAiplatformV1SpeechConfig(); + buildCounterGoogleCloudAiplatformV1SpeechConfig++; + if (buildCounterGoogleCloudAiplatformV1SpeechConfig < 3) { + o.voiceConfig = buildGoogleCloudAiplatformV1VoiceConfig(); + } + buildCounterGoogleCloudAiplatformV1SpeechConfig--; + return o; +} + +void checkGoogleCloudAiplatformV1SpeechConfig( + api.GoogleCloudAiplatformV1SpeechConfig o) { + buildCounterGoogleCloudAiplatformV1SpeechConfig++; + if (buildCounterGoogleCloudAiplatformV1SpeechConfig < 3) { + checkGoogleCloudAiplatformV1VoiceConfig(o.voiceConfig!); + } + buildCounterGoogleCloudAiplatformV1SpeechConfig--; +} + core.int buildCounterGoogleCloudAiplatformV1StartNotebookRuntimeRequest = 0; api.GoogleCloudAiplatformV1StartNotebookRuntimeRequest buildGoogleCloudAiplatformV1StartNotebookRuntimeRequest() { @@ -23343,6 +26112,23 @@ void checkGoogleCloudAiplatformV1StartNotebookRuntimeRequest( buildCounterGoogleCloudAiplatformV1StartNotebookRuntimeRequest--; } +core.int buildCounterGoogleCloudAiplatformV1StopNotebookRuntimeRequest = 0; +api.GoogleCloudAiplatformV1StopNotebookRuntimeRequest + buildGoogleCloudAiplatformV1StopNotebookRuntimeRequest() { + final o = api.GoogleCloudAiplatformV1StopNotebookRuntimeRequest(); + buildCounterGoogleCloudAiplatformV1StopNotebookRuntimeRequest++; + if (buildCounterGoogleCloudAiplatformV1StopNotebookRuntimeRequest < 3) {} + buildCounterGoogleCloudAiplatformV1StopNotebookRuntimeRequest--; + return o; +} + +void checkGoogleCloudAiplatformV1StopNotebookRuntimeRequest( + api.GoogleCloudAiplatformV1StopNotebookRuntimeRequest o) { + buildCounterGoogleCloudAiplatformV1StopNotebookRuntimeRequest++; + if (buildCounterGoogleCloudAiplatformV1StopNotebookRuntimeRequest < 3) {} + buildCounterGoogleCloudAiplatformV1StopNotebookRuntimeRequest--; +} + core.int buildCounterGoogleCloudAiplatformV1StopTrialRequest = 0; api.GoogleCloudAiplatformV1StopTrialRequest buildGoogleCloudAiplatformV1StopTrialRequest() { @@ -23420,12 +26206,12 @@ void checkGoogleCloudAiplatformV1StreamRawPredictRequest( buildCounterGoogleCloudAiplatformV1StreamRawPredictRequest--; } -core.List buildUnnamed315() => [ +core.List buildUnnamed343() => [ buildGoogleCloudAiplatformV1Tensor(), buildGoogleCloudAiplatformV1Tensor(), ]; -void checkUnnamed315(core.List o) { +void checkUnnamed343(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Tensor(o[0]); checkGoogleCloudAiplatformV1Tensor(o[1]); @@ -23437,7 +26223,7 @@ api.GoogleCloudAiplatformV1StreamingPredictRequest final o = api.GoogleCloudAiplatformV1StreamingPredictRequest(); buildCounterGoogleCloudAiplatformV1StreamingPredictRequest++; if (buildCounterGoogleCloudAiplatformV1StreamingPredictRequest < 3) { - o.inputs = buildUnnamed315(); + o.inputs = buildUnnamed343(); o.parameters = buildGoogleCloudAiplatformV1Tensor(); } buildCounterGoogleCloudAiplatformV1StreamingPredictRequest--; @@ -23448,18 +26234,18 @@ void checkGoogleCloudAiplatformV1StreamingPredictRequest( api.GoogleCloudAiplatformV1StreamingPredictRequest o) { buildCounterGoogleCloudAiplatformV1StreamingPredictRequest++; if (buildCounterGoogleCloudAiplatformV1StreamingPredictRequest < 3) { - checkUnnamed315(o.inputs!); + checkUnnamed343(o.inputs!); checkGoogleCloudAiplatformV1Tensor(o.parameters!); } buildCounterGoogleCloudAiplatformV1StreamingPredictRequest--; } -core.List buildUnnamed316() => [ +core.List buildUnnamed344() => [ buildGoogleCloudAiplatformV1Tensor(), buildGoogleCloudAiplatformV1Tensor(), ]; -void checkUnnamed316(core.List o) { +void checkUnnamed344(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Tensor(o[0]); checkGoogleCloudAiplatformV1Tensor(o[1]); @@ -23471,7 +26257,7 @@ api.GoogleCloudAiplatformV1StreamingPredictResponse final o = api.GoogleCloudAiplatformV1StreamingPredictResponse(); buildCounterGoogleCloudAiplatformV1StreamingPredictResponse++; if (buildCounterGoogleCloudAiplatformV1StreamingPredictResponse < 3) { - o.outputs = buildUnnamed316(); + o.outputs = buildUnnamed344(); o.parameters = buildGoogleCloudAiplatformV1Tensor(); } buildCounterGoogleCloudAiplatformV1StreamingPredictResponse--; @@ -23482,18 +26268,18 @@ void checkGoogleCloudAiplatformV1StreamingPredictResponse( api.GoogleCloudAiplatformV1StreamingPredictResponse o) { buildCounterGoogleCloudAiplatformV1StreamingPredictResponse++; if (buildCounterGoogleCloudAiplatformV1StreamingPredictResponse < 3) { - checkUnnamed316(o.outputs!); + checkUnnamed344(o.outputs!); checkGoogleCloudAiplatformV1Tensor(o.parameters!); } buildCounterGoogleCloudAiplatformV1StreamingPredictResponse--; } -core.List buildUnnamed317() => [ +core.List buildUnnamed345() => [ 'foo', 'foo', ]; -void checkUnnamed317(core.List o) { +void checkUnnamed345(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -23513,7 +26299,7 @@ api.GoogleCloudAiplatformV1StreamingReadFeatureValuesRequest buildCounterGoogleCloudAiplatformV1StreamingReadFeatureValuesRequest++; if (buildCounterGoogleCloudAiplatformV1StreamingReadFeatureValuesRequest < 3) { - o.entityIds = buildUnnamed317(); + o.entityIds = buildUnnamed345(); o.featureSelector = buildGoogleCloudAiplatformV1FeatureSelector(); } buildCounterGoogleCloudAiplatformV1StreamingReadFeatureValuesRequest--; @@ -23525,18 +26311,18 @@ void checkGoogleCloudAiplatformV1StreamingReadFeatureValuesRequest( buildCounterGoogleCloudAiplatformV1StreamingReadFeatureValuesRequest++; if (buildCounterGoogleCloudAiplatformV1StreamingReadFeatureValuesRequest < 3) { - checkUnnamed317(o.entityIds!); + checkUnnamed345(o.entityIds!); checkGoogleCloudAiplatformV1FeatureSelector(o.featureSelector!); } buildCounterGoogleCloudAiplatformV1StreamingReadFeatureValuesRequest--; } -core.List buildUnnamed318() => [ +core.List buildUnnamed346() => [ 'foo', 'foo', ]; -void checkUnnamed318(core.List o) { +void checkUnnamed346(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -23554,7 +26340,7 @@ api.GoogleCloudAiplatformV1StringArray final o = api.GoogleCloudAiplatformV1StringArray(); buildCounterGoogleCloudAiplatformV1StringArray++; if (buildCounterGoogleCloudAiplatformV1StringArray < 3) { - o.values = buildUnnamed318(); + o.values = buildUnnamed346(); } buildCounterGoogleCloudAiplatformV1StringArray--; return o; @@ -23564,7 +26350,7 @@ void checkGoogleCloudAiplatformV1StringArray( api.GoogleCloudAiplatformV1StringArray o) { buildCounterGoogleCloudAiplatformV1StringArray++; if (buildCounterGoogleCloudAiplatformV1StringArray < 3) { - checkUnnamed318(o.values!); + checkUnnamed346(o.values!); } buildCounterGoogleCloudAiplatformV1StringArray--; } @@ -23595,12 +26381,12 @@ void checkGoogleCloudAiplatformV1StructFieldValue( buildCounterGoogleCloudAiplatformV1StructFieldValue--; } -core.List buildUnnamed319() => [ +core.List buildUnnamed347() => [ buildGoogleCloudAiplatformV1StructFieldValue(), buildGoogleCloudAiplatformV1StructFieldValue(), ]; -void checkUnnamed319(core.List o) { +void checkUnnamed347(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1StructFieldValue(o[0]); checkGoogleCloudAiplatformV1StructFieldValue(o[1]); @@ -23612,7 +26398,7 @@ api.GoogleCloudAiplatformV1StructValue final o = api.GoogleCloudAiplatformV1StructValue(); buildCounterGoogleCloudAiplatformV1StructValue++; if (buildCounterGoogleCloudAiplatformV1StructValue < 3) { - o.values = buildUnnamed319(); + o.values = buildUnnamed347(); } buildCounterGoogleCloudAiplatformV1StructValue--; return o; @@ -23622,7 +26408,7 @@ void checkGoogleCloudAiplatformV1StructValue( api.GoogleCloudAiplatformV1StructValue o) { buildCounterGoogleCloudAiplatformV1StructValue++; if (buildCounterGoogleCloudAiplatformV1StructValue < 3) { - checkUnnamed319(o.values!); + checkUnnamed347(o.values!); } buildCounterGoogleCloudAiplatformV1StructValue--; } @@ -23671,12 +26457,12 @@ void checkGoogleCloudAiplatformV1Study(api.GoogleCloudAiplatformV1Study o) { buildCounterGoogleCloudAiplatformV1Study--; } -core.List buildUnnamed320() => [ +core.List buildUnnamed348() => [ buildGoogleCloudAiplatformV1StudySpecMetricSpec(), buildGoogleCloudAiplatformV1StudySpecMetricSpec(), ]; -void checkUnnamed320( +void checkUnnamed348( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1StudySpecMetricSpec(o[0]); @@ -23684,12 +26470,12 @@ void checkUnnamed320( } core.List - buildUnnamed321() => [ + buildUnnamed349() => [ buildGoogleCloudAiplatformV1StudySpecParameterSpec(), buildGoogleCloudAiplatformV1StudySpecParameterSpec(), ]; -void checkUnnamed321( +void checkUnnamed349( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1StudySpecParameterSpec(o[0]); @@ -23709,9 +26495,9 @@ api.GoogleCloudAiplatformV1StudySpec buildGoogleCloudAiplatformV1StudySpec() { o.measurementSelectionType = 'foo'; o.medianAutomatedStoppingSpec = buildGoogleCloudAiplatformV1StudySpecMedianAutomatedStoppingSpec(); - o.metrics = buildUnnamed320(); + o.metrics = buildUnnamed348(); o.observationNoise = 'foo'; - o.parameters = buildUnnamed321(); + o.parameters = buildUnnamed349(); o.studyStoppingConfig = buildGoogleCloudAiplatformV1StudySpecStudyStoppingConfig(); } @@ -23737,12 +26523,12 @@ void checkGoogleCloudAiplatformV1StudySpec( ); checkGoogleCloudAiplatformV1StudySpecMedianAutomatedStoppingSpec( o.medianAutomatedStoppingSpec!); - checkUnnamed320(o.metrics!); + checkUnnamed348(o.metrics!); unittest.expect( o.observationNoise!, unittest.equals('foo'), ); - checkUnnamed321(o.parameters!); + checkUnnamed349(o.parameters!); checkGoogleCloudAiplatformV1StudySpecStudyStoppingConfig( o.studyStoppingConfig!); } @@ -23914,12 +26700,12 @@ void checkGoogleCloudAiplatformV1StudySpecMetricSpecSafetyMetricConfig( core.List< api .GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpec> - buildUnnamed322() => [ + buildUnnamed350() => [ buildGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpec(), buildGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpec(), ]; -void checkUnnamed322( +void checkUnnamed350( core.List< api .GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpec> @@ -23939,7 +26725,7 @@ api.GoogleCloudAiplatformV1StudySpecParameterSpec if (buildCounterGoogleCloudAiplatformV1StudySpecParameterSpec < 3) { o.categoricalValueSpec = buildGoogleCloudAiplatformV1StudySpecParameterSpecCategoricalValueSpec(); - o.conditionalParameterSpecs = buildUnnamed322(); + o.conditionalParameterSpecs = buildUnnamed350(); o.discreteValueSpec = buildGoogleCloudAiplatformV1StudySpecParameterSpecDiscreteValueSpec(); o.doubleValueSpec = @@ -23959,7 +26745,7 @@ void checkGoogleCloudAiplatformV1StudySpecParameterSpec( if (buildCounterGoogleCloudAiplatformV1StudySpecParameterSpec < 3) { checkGoogleCloudAiplatformV1StudySpecParameterSpecCategoricalValueSpec( o.categoricalValueSpec!); - checkUnnamed322(o.conditionalParameterSpecs!); + checkUnnamed350(o.conditionalParameterSpecs!); checkGoogleCloudAiplatformV1StudySpecParameterSpecDiscreteValueSpec( o.discreteValueSpec!); checkGoogleCloudAiplatformV1StudySpecParameterSpecDoubleValueSpec( @@ -23978,12 +26764,12 @@ void checkGoogleCloudAiplatformV1StudySpecParameterSpec( buildCounterGoogleCloudAiplatformV1StudySpecParameterSpec--; } -core.List buildUnnamed323() => [ +core.List buildUnnamed351() => [ 'foo', 'foo', ]; -void checkUnnamed323(core.List o) { +void checkUnnamed351(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -24006,7 +26792,7 @@ api.GoogleCloudAiplatformV1StudySpecParameterSpecCategoricalValueSpec if (buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecCategoricalValueSpec < 3) { o.defaultValue = 'foo'; - o.values = buildUnnamed323(); + o.values = buildUnnamed351(); } buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecCategoricalValueSpec--; return o; @@ -24021,7 +26807,7 @@ void checkGoogleCloudAiplatformV1StudySpecParameterSpecCategoricalValueSpec( o.defaultValue!, unittest.equals('foo'), ); - checkUnnamed323(o.values!); + checkUnnamed351(o.values!); } buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecCategoricalValueSpec--; } @@ -24065,12 +26851,12 @@ void checkGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpec( buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpec--; } -core.List buildUnnamed324() => [ +core.List buildUnnamed352() => [ 'foo', 'foo', ]; -void checkUnnamed324(core.List o) { +void checkUnnamed352(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -24092,7 +26878,7 @@ api.GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecCategor buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecCategoricalValueCondition++; if (buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecCategoricalValueCondition < 3) { - o.values = buildUnnamed324(); + o.values = buildUnnamed352(); } buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecCategoricalValueCondition--; return o; @@ -24104,17 +26890,17 @@ void checkGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecC buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecCategoricalValueCondition++; if (buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecCategoricalValueCondition < 3) { - checkUnnamed324(o.values!); + checkUnnamed352(o.values!); } buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecCategoricalValueCondition--; } -core.List buildUnnamed325() => [ +core.List buildUnnamed353() => [ 42.0, 42.0, ]; -void checkUnnamed325(core.List o) { +void checkUnnamed353(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -24136,7 +26922,7 @@ api.GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecDiscret buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecDiscreteValueCondition++; if (buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecDiscreteValueCondition < 3) { - o.values = buildUnnamed325(); + o.values = buildUnnamed353(); } buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecDiscreteValueCondition--; return o; @@ -24148,17 +26934,17 @@ void checkGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecD buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecDiscreteValueCondition++; if (buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecDiscreteValueCondition < 3) { - checkUnnamed325(o.values!); + checkUnnamed353(o.values!); } buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecDiscreteValueCondition--; } -core.List buildUnnamed326() => [ +core.List buildUnnamed354() => [ 'foo', 'foo', ]; -void checkUnnamed326(core.List o) { +void checkUnnamed354(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -24180,7 +26966,7 @@ api.GoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecIntValu buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecIntValueCondition++; if (buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecIntValueCondition < 3) { - o.values = buildUnnamed326(); + o.values = buildUnnamed354(); } buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecIntValueCondition--; return o; @@ -24192,17 +26978,17 @@ void checkGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecI buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecIntValueCondition++; if (buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecIntValueCondition < 3) { - checkUnnamed326(o.values!); + checkUnnamed354(o.values!); } buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecConditionalParameterSpecIntValueCondition--; } -core.List buildUnnamed327() => [ +core.List buildUnnamed355() => [ 42.0, 42.0, ]; -void checkUnnamed327(core.List o) { +void checkUnnamed355(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -24225,7 +27011,7 @@ api.GoogleCloudAiplatformV1StudySpecParameterSpecDiscreteValueSpec if (buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecDiscreteValueSpec < 3) { o.defaultValue = 42.0; - o.values = buildUnnamed327(); + o.values = buildUnnamed355(); } buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecDiscreteValueSpec--; return o; @@ -24240,7 +27026,7 @@ void checkGoogleCloudAiplatformV1StudySpecParameterSpecDiscreteValueSpec( o.defaultValue!, unittest.equals(42.0), ); - checkUnnamed327(o.values!); + checkUnnamed355(o.values!); } buildCounterGoogleCloudAiplatformV1StudySpecParameterSpecDiscreteValueSpec--; } @@ -24399,12 +27185,12 @@ void checkGoogleCloudAiplatformV1StudyTimeConstraint( buildCounterGoogleCloudAiplatformV1StudyTimeConstraint--; } -core.List buildUnnamed328() => [ +core.List buildUnnamed356() => [ buildGoogleCloudAiplatformV1TrialContext(), buildGoogleCloudAiplatformV1TrialContext(), ]; -void checkUnnamed328(core.List o) { +void checkUnnamed356(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1TrialContext(o[0]); checkGoogleCloudAiplatformV1TrialContext(o[1]); @@ -24417,7 +27203,7 @@ api.GoogleCloudAiplatformV1SuggestTrialsRequest buildCounterGoogleCloudAiplatformV1SuggestTrialsRequest++; if (buildCounterGoogleCloudAiplatformV1SuggestTrialsRequest < 3) { o.clientId = 'foo'; - o.contexts = buildUnnamed328(); + o.contexts = buildUnnamed356(); o.suggestionCount = 42; } buildCounterGoogleCloudAiplatformV1SuggestTrialsRequest--; @@ -24432,7 +27218,7 @@ void checkGoogleCloudAiplatformV1SuggestTrialsRequest( o.clientId!, unittest.equals('foo'), ); - checkUnnamed328(o.contexts!); + checkUnnamed356(o.contexts!); unittest.expect( o.suggestionCount!, unittest.equals(42), @@ -24842,12 +27628,12 @@ void checkGoogleCloudAiplatformV1SupervisedHyperParameters( buildCounterGoogleCloudAiplatformV1SupervisedHyperParameters--; } -core.List buildUnnamed329() => [ +core.List buildUnnamed357() => [ 'foo', 'foo', ]; -void checkUnnamed329(core.List o) { +void checkUnnamed357(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -24859,12 +27645,12 @@ void checkUnnamed329(core.List o) { ); } -core.List buildUnnamed330() => [ +core.List buildUnnamed358() => [ buildGoogleCloudAiplatformV1Content(), buildGoogleCloudAiplatformV1Content(), ]; -void checkUnnamed330(core.List o) { +void checkUnnamed358(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Content(o[0]); checkGoogleCloudAiplatformV1Content(o[1]); @@ -24880,10 +27666,10 @@ api.GoogleCloudAiplatformV1SupervisedTuningDataStats o.totalBillableTokenCount = 'foo'; o.totalTruncatedExampleCount = 'foo'; o.totalTuningCharacterCount = 'foo'; - o.truncatedExampleIndices = buildUnnamed329(); + o.truncatedExampleIndices = buildUnnamed357(); o.tuningDatasetExampleCount = 'foo'; o.tuningStepCount = 'foo'; - o.userDatasetExamples = buildUnnamed330(); + o.userDatasetExamples = buildUnnamed358(); o.userInputTokenDistribution = buildGoogleCloudAiplatformV1SupervisedTuningDatasetDistribution(); o.userMessagePerExampleDistribution = @@ -24915,7 +27701,7 @@ void checkGoogleCloudAiplatformV1SupervisedTuningDataStats( o.totalTuningCharacterCount!, unittest.equals('foo'), ); - checkUnnamed329(o.truncatedExampleIndices!); + checkUnnamed357(o.truncatedExampleIndices!); unittest.expect( o.tuningDatasetExampleCount!, unittest.equals('foo'), @@ -24924,7 +27710,7 @@ void checkGoogleCloudAiplatformV1SupervisedTuningDataStats( o.tuningStepCount!, unittest.equals('foo'), ); - checkUnnamed330(o.userDatasetExamples!); + checkUnnamed358(o.userDatasetExamples!); checkGoogleCloudAiplatformV1SupervisedTuningDatasetDistribution( o.userInputTokenDistribution!); checkGoogleCloudAiplatformV1SupervisedTuningDatasetDistribution( @@ -24938,12 +27724,12 @@ void checkGoogleCloudAiplatformV1SupervisedTuningDataStats( core.List< api .GoogleCloudAiplatformV1SupervisedTuningDatasetDistributionDatasetBucket> - buildUnnamed331() => [ + buildUnnamed359() => [ buildGoogleCloudAiplatformV1SupervisedTuningDatasetDistributionDatasetBucket(), buildGoogleCloudAiplatformV1SupervisedTuningDatasetDistributionDatasetBucket(), ]; -void checkUnnamed331( +void checkUnnamed359( core.List< api .GoogleCloudAiplatformV1SupervisedTuningDatasetDistributionDatasetBucket> @@ -24964,7 +27750,7 @@ api.GoogleCloudAiplatformV1SupervisedTuningDatasetDistribution if (buildCounterGoogleCloudAiplatformV1SupervisedTuningDatasetDistribution < 3) { o.billableSum = 'foo'; - o.buckets = buildUnnamed331(); + o.buckets = buildUnnamed359(); o.max = 42.0; o.mean = 42.0; o.median = 42.0; @@ -24986,7 +27772,7 @@ void checkGoogleCloudAiplatformV1SupervisedTuningDatasetDistribution( o.billableSum!, unittest.equals('foo'), ); - checkUnnamed331(o.buckets!); + checkUnnamed359(o.buckets!); unittest.expect( o.max!, unittest.equals(42.0), @@ -25152,23 +27938,23 @@ void checkGoogleCloudAiplatformV1TFRecordDestination( buildCounterGoogleCloudAiplatformV1TFRecordDestination--; } -core.List buildUnnamed332() => [ +core.List buildUnnamed360() => [ true, true, ]; -void checkUnnamed332(core.List o) { +void checkUnnamed360(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect(o[0], unittest.isTrue); unittest.expect(o[1], unittest.isTrue); } -core.List buildUnnamed333() => [ +core.List buildUnnamed361() => [ 'foo', 'foo', ]; -void checkUnnamed333(core.List o) { +void checkUnnamed361(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -25180,12 +27966,12 @@ void checkUnnamed333(core.List o) { ); } -core.List buildUnnamed334() => [ +core.List buildUnnamed362() => [ 42.0, 42.0, ]; -void checkUnnamed334(core.List o) { +void checkUnnamed362(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -25197,12 +27983,12 @@ void checkUnnamed334(core.List o) { ); } -core.List buildUnnamed335() => [ +core.List buildUnnamed363() => [ 42.0, 42.0, ]; -void checkUnnamed335(core.List o) { +void checkUnnamed363(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -25214,12 +28000,12 @@ void checkUnnamed335(core.List o) { ); } -core.List buildUnnamed336() => [ +core.List buildUnnamed364() => [ 'foo', 'foo', ]; -void checkUnnamed336(core.List o) { +void checkUnnamed364(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -25231,12 +28017,12 @@ void checkUnnamed336(core.List o) { ); } -core.List buildUnnamed337() => [ +core.List buildUnnamed365() => [ 42, 42, ]; -void checkUnnamed337(core.List o) { +void checkUnnamed365(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -25248,23 +28034,23 @@ void checkUnnamed337(core.List o) { ); } -core.List buildUnnamed338() => [ +core.List buildUnnamed366() => [ buildGoogleCloudAiplatformV1Tensor(), buildGoogleCloudAiplatformV1Tensor(), ]; -void checkUnnamed338(core.List o) { +void checkUnnamed366(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Tensor(o[0]); checkGoogleCloudAiplatformV1Tensor(o[1]); } -core.List buildUnnamed339() => [ +core.List buildUnnamed367() => [ 'foo', 'foo', ]; -void checkUnnamed339(core.List o) { +void checkUnnamed367(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -25276,12 +28062,12 @@ void checkUnnamed339(core.List o) { ); } -core.List buildUnnamed340() => [ +core.List buildUnnamed368() => [ 'foo', 'foo', ]; -void checkUnnamed340(core.List o) { +void checkUnnamed368(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -25293,24 +28079,24 @@ void checkUnnamed340(core.List o) { ); } -core.Map buildUnnamed341() => { +core.Map buildUnnamed369() => { 'x': buildGoogleCloudAiplatformV1Tensor(), 'y': buildGoogleCloudAiplatformV1Tensor(), }; -void checkUnnamed341( +void checkUnnamed369( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Tensor(o['x']!); checkGoogleCloudAiplatformV1Tensor(o['y']!); } -core.List buildUnnamed342() => [ +core.List buildUnnamed370() => [ 'foo', 'foo', ]; -void checkUnnamed342(core.List o) { +void checkUnnamed370(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -25322,12 +28108,12 @@ void checkUnnamed342(core.List o) { ); } -core.List buildUnnamed343() => [ +core.List buildUnnamed371() => [ 42, 42, ]; -void checkUnnamed343(core.List o) { +void checkUnnamed371(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -25344,20 +28130,20 @@ api.GoogleCloudAiplatformV1Tensor buildGoogleCloudAiplatformV1Tensor() { final o = api.GoogleCloudAiplatformV1Tensor(); buildCounterGoogleCloudAiplatformV1Tensor++; if (buildCounterGoogleCloudAiplatformV1Tensor < 3) { - o.boolVal = buildUnnamed332(); - o.bytesVal = buildUnnamed333(); - o.doubleVal = buildUnnamed334(); + o.boolVal = buildUnnamed360(); + o.bytesVal = buildUnnamed361(); + o.doubleVal = buildUnnamed362(); o.dtype = 'foo'; - o.floatVal = buildUnnamed335(); - o.int64Val = buildUnnamed336(); - o.intVal = buildUnnamed337(); - o.listVal = buildUnnamed338(); - o.shape = buildUnnamed339(); - o.stringVal = buildUnnamed340(); - o.structVal = buildUnnamed341(); + o.floatVal = buildUnnamed363(); + o.int64Val = buildUnnamed364(); + o.intVal = buildUnnamed365(); + o.listVal = buildUnnamed366(); + o.shape = buildUnnamed367(); + o.stringVal = buildUnnamed368(); + o.structVal = buildUnnamed369(); o.tensorVal = 'foo'; - o.uint64Val = buildUnnamed342(); - o.uintVal = buildUnnamed343(); + o.uint64Val = buildUnnamed370(); + o.uintVal = buildUnnamed371(); } buildCounterGoogleCloudAiplatformV1Tensor--; return o; @@ -25366,36 +28152,36 @@ api.GoogleCloudAiplatformV1Tensor buildGoogleCloudAiplatformV1Tensor() { void checkGoogleCloudAiplatformV1Tensor(api.GoogleCloudAiplatformV1Tensor o) { buildCounterGoogleCloudAiplatformV1Tensor++; if (buildCounterGoogleCloudAiplatformV1Tensor < 3) { - checkUnnamed332(o.boolVal!); - checkUnnamed333(o.bytesVal!); - checkUnnamed334(o.doubleVal!); + checkUnnamed360(o.boolVal!); + checkUnnamed361(o.bytesVal!); + checkUnnamed362(o.doubleVal!); unittest.expect( o.dtype!, unittest.equals('foo'), ); - checkUnnamed335(o.floatVal!); - checkUnnamed336(o.int64Val!); - checkUnnamed337(o.intVal!); - checkUnnamed338(o.listVal!); - checkUnnamed339(o.shape!); - checkUnnamed340(o.stringVal!); - checkUnnamed341(o.structVal!); + checkUnnamed363(o.floatVal!); + checkUnnamed364(o.int64Val!); + checkUnnamed365(o.intVal!); + checkUnnamed366(o.listVal!); + checkUnnamed367(o.shape!); + checkUnnamed368(o.stringVal!); + checkUnnamed369(o.structVal!); unittest.expect( o.tensorVal!, unittest.equals('foo'), ); - checkUnnamed342(o.uint64Val!); - checkUnnamed343(o.uintVal!); + checkUnnamed370(o.uint64Val!); + checkUnnamed371(o.uintVal!); } buildCounterGoogleCloudAiplatformV1Tensor--; } -core.Map buildUnnamed344() => { +core.Map buildUnnamed372() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed344(core.Map o) { +void checkUnnamed372(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -25420,7 +28206,7 @@ api.GoogleCloudAiplatformV1Tensorboard o.encryptionSpec = buildGoogleCloudAiplatformV1EncryptionSpec(); o.etag = 'foo'; o.isDefault = true; - o.labels = buildUnnamed344(); + o.labels = buildUnnamed372(); o.name = 'foo'; o.runCount = 42; o.satisfiesPzi = true; @@ -25457,7 +28243,7 @@ void checkGoogleCloudAiplatformV1Tensorboard( unittest.equals('foo'), ); unittest.expect(o.isDefault!, unittest.isTrue); - checkUnnamed344(o.labels!); + checkUnnamed372(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -25505,12 +28291,12 @@ void checkGoogleCloudAiplatformV1TensorboardBlob( buildCounterGoogleCloudAiplatformV1TensorboardBlob--; } -core.List buildUnnamed345() => [ +core.List buildUnnamed373() => [ buildGoogleCloudAiplatformV1TensorboardBlob(), buildGoogleCloudAiplatformV1TensorboardBlob(), ]; -void checkUnnamed345(core.List o) { +void checkUnnamed373(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1TensorboardBlob(o[0]); checkGoogleCloudAiplatformV1TensorboardBlob(o[1]); @@ -25522,7 +28308,7 @@ api.GoogleCloudAiplatformV1TensorboardBlobSequence final o = api.GoogleCloudAiplatformV1TensorboardBlobSequence(); buildCounterGoogleCloudAiplatformV1TensorboardBlobSequence++; if (buildCounterGoogleCloudAiplatformV1TensorboardBlobSequence < 3) { - o.values = buildUnnamed345(); + o.values = buildUnnamed373(); } buildCounterGoogleCloudAiplatformV1TensorboardBlobSequence--; return o; @@ -25532,17 +28318,17 @@ void checkGoogleCloudAiplatformV1TensorboardBlobSequence( api.GoogleCloudAiplatformV1TensorboardBlobSequence o) { buildCounterGoogleCloudAiplatformV1TensorboardBlobSequence++; if (buildCounterGoogleCloudAiplatformV1TensorboardBlobSequence < 3) { - checkUnnamed345(o.values!); + checkUnnamed373(o.values!); } buildCounterGoogleCloudAiplatformV1TensorboardBlobSequence--; } -core.Map buildUnnamed346() => { +core.Map buildUnnamed374() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed346(core.Map o) { +void checkUnnamed374(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -25564,7 +28350,7 @@ api.GoogleCloudAiplatformV1TensorboardExperiment o.description = 'foo'; o.displayName = 'foo'; o.etag = 'foo'; - o.labels = buildUnnamed346(); + o.labels = buildUnnamed374(); o.name = 'foo'; o.source = 'foo'; o.updateTime = 'foo'; @@ -25593,7 +28379,7 @@ void checkGoogleCloudAiplatformV1TensorboardExperiment( o.etag!, unittest.equals('foo'), ); - checkUnnamed346(o.labels!); + checkUnnamed374(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -25610,12 +28396,12 @@ void checkGoogleCloudAiplatformV1TensorboardExperiment( buildCounterGoogleCloudAiplatformV1TensorboardExperiment--; } -core.Map buildUnnamed347() => { +core.Map buildUnnamed375() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed347(core.Map o) { +void checkUnnamed375(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -25637,7 +28423,7 @@ api.GoogleCloudAiplatformV1TensorboardRun o.description = 'foo'; o.displayName = 'foo'; o.etag = 'foo'; - o.labels = buildUnnamed347(); + o.labels = buildUnnamed375(); o.name = 'foo'; o.updateTime = 'foo'; } @@ -25665,7 +28451,7 @@ void checkGoogleCloudAiplatformV1TensorboardRun( o.etag!, unittest.equals('foo'), ); - checkUnnamed347(o.labels!); + checkUnnamed375(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -25831,12 +28617,12 @@ void checkGoogleCloudAiplatformV1ThresholdConfig( buildCounterGoogleCloudAiplatformV1ThresholdConfig--; } -core.List buildUnnamed348() => [ +core.List buildUnnamed376() => [ buildGoogleCloudAiplatformV1TimeSeriesDataPoint(), buildGoogleCloudAiplatformV1TimeSeriesDataPoint(), ]; -void checkUnnamed348( +void checkUnnamed376( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1TimeSeriesDataPoint(o[0]); @@ -25851,7 +28637,7 @@ api.GoogleCloudAiplatformV1TimeSeriesData if (buildCounterGoogleCloudAiplatformV1TimeSeriesData < 3) { o.tensorboardTimeSeriesId = 'foo'; o.valueType = 'foo'; - o.values = buildUnnamed348(); + o.values = buildUnnamed376(); } buildCounterGoogleCloudAiplatformV1TimeSeriesData--; return o; @@ -25869,7 +28655,7 @@ void checkGoogleCloudAiplatformV1TimeSeriesData( o.valueType!, unittest.equals('foo'), ); - checkUnnamed348(o.values!); + checkUnnamed376(o.values!); } buildCounterGoogleCloudAiplatformV1TimeSeriesData--; } @@ -25948,12 +28734,12 @@ void checkGoogleCloudAiplatformV1TimestampSplit( buildCounterGoogleCloudAiplatformV1TimestampSplit--; } -core.List buildUnnamed349() => [ +core.List buildUnnamed377() => [ 'foo', 'foo', ]; -void checkUnnamed349(core.List o) { +void checkUnnamed377(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -25965,12 +28751,12 @@ void checkUnnamed349(core.List o) { ); } -core.List buildUnnamed350() => [ +core.List buildUnnamed378() => [ 'foo', 'foo', ]; -void checkUnnamed350(core.List o) { +void checkUnnamed378(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -25988,8 +28774,8 @@ api.GoogleCloudAiplatformV1TokensInfo buildGoogleCloudAiplatformV1TokensInfo() { buildCounterGoogleCloudAiplatformV1TokensInfo++; if (buildCounterGoogleCloudAiplatformV1TokensInfo < 3) { o.role = 'foo'; - o.tokenIds = buildUnnamed349(); - o.tokens = buildUnnamed350(); + o.tokenIds = buildUnnamed377(); + o.tokens = buildUnnamed378(); } buildCounterGoogleCloudAiplatformV1TokensInfo--; return o; @@ -26003,18 +28789,18 @@ void checkGoogleCloudAiplatformV1TokensInfo( o.role!, unittest.equals('foo'), ); - checkUnnamed349(o.tokenIds!); - checkUnnamed350(o.tokens!); + checkUnnamed377(o.tokenIds!); + checkUnnamed378(o.tokens!); } buildCounterGoogleCloudAiplatformV1TokensInfo--; } -core.List buildUnnamed351() => [ +core.List buildUnnamed379() => [ buildGoogleCloudAiplatformV1FunctionDeclaration(), buildGoogleCloudAiplatformV1FunctionDeclaration(), ]; -void checkUnnamed351( +void checkUnnamed379( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1FunctionDeclaration(o[0]); @@ -26026,7 +28812,8 @@ api.GoogleCloudAiplatformV1Tool buildGoogleCloudAiplatformV1Tool() { final o = api.GoogleCloudAiplatformV1Tool(); buildCounterGoogleCloudAiplatformV1Tool++; if (buildCounterGoogleCloudAiplatformV1Tool < 3) { - o.functionDeclarations = buildUnnamed351(); + o.functionDeclarations = buildUnnamed379(); + o.googleSearch = buildGoogleCloudAiplatformV1ToolGoogleSearch(); o.googleSearchRetrieval = buildGoogleCloudAiplatformV1GoogleSearchRetrieval(); o.retrieval = buildGoogleCloudAiplatformV1Retrieval(); @@ -26038,20 +28825,49 @@ api.GoogleCloudAiplatformV1Tool buildGoogleCloudAiplatformV1Tool() { void checkGoogleCloudAiplatformV1Tool(api.GoogleCloudAiplatformV1Tool o) { buildCounterGoogleCloudAiplatformV1Tool++; if (buildCounterGoogleCloudAiplatformV1Tool < 3) { - checkUnnamed351(o.functionDeclarations!); + checkUnnamed379(o.functionDeclarations!); + checkGoogleCloudAiplatformV1ToolGoogleSearch(o.googleSearch!); checkGoogleCloudAiplatformV1GoogleSearchRetrieval(o.googleSearchRetrieval!); checkGoogleCloudAiplatformV1Retrieval(o.retrieval!); } buildCounterGoogleCloudAiplatformV1Tool--; } -core.List buildUnnamed352() => +core.int buildCounterGoogleCloudAiplatformV1ToolCall = 0; +api.GoogleCloudAiplatformV1ToolCall buildGoogleCloudAiplatformV1ToolCall() { + final o = api.GoogleCloudAiplatformV1ToolCall(); + buildCounterGoogleCloudAiplatformV1ToolCall++; + if (buildCounterGoogleCloudAiplatformV1ToolCall < 3) { + o.toolInput = 'foo'; + o.toolName = 'foo'; + } + buildCounterGoogleCloudAiplatformV1ToolCall--; + return o; +} + +void checkGoogleCloudAiplatformV1ToolCall( + api.GoogleCloudAiplatformV1ToolCall o) { + buildCounterGoogleCloudAiplatformV1ToolCall++; + if (buildCounterGoogleCloudAiplatformV1ToolCall < 3) { + unittest.expect( + o.toolInput!, + unittest.equals('foo'), + ); + unittest.expect( + o.toolName!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1ToolCall--; +} + +core.List buildUnnamed380() => [ buildGoogleCloudAiplatformV1ToolCallValidInstance(), buildGoogleCloudAiplatformV1ToolCallValidInstance(), ]; -void checkUnnamed352( +void checkUnnamed380( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1ToolCallValidInstance(o[0]); @@ -26064,7 +28880,7 @@ api.GoogleCloudAiplatformV1ToolCallValidInput final o = api.GoogleCloudAiplatformV1ToolCallValidInput(); buildCounterGoogleCloudAiplatformV1ToolCallValidInput++; if (buildCounterGoogleCloudAiplatformV1ToolCallValidInput < 3) { - o.instances = buildUnnamed352(); + o.instances = buildUnnamed380(); o.metricSpec = buildGoogleCloudAiplatformV1ToolCallValidSpec(); } buildCounterGoogleCloudAiplatformV1ToolCallValidInput--; @@ -26075,7 +28891,7 @@ void checkGoogleCloudAiplatformV1ToolCallValidInput( api.GoogleCloudAiplatformV1ToolCallValidInput o) { buildCounterGoogleCloudAiplatformV1ToolCallValidInput++; if (buildCounterGoogleCloudAiplatformV1ToolCallValidInput < 3) { - checkUnnamed352(o.instances!); + checkUnnamed380(o.instances!); checkGoogleCloudAiplatformV1ToolCallValidSpec(o.metricSpec!); } buildCounterGoogleCloudAiplatformV1ToolCallValidInput--; @@ -26135,12 +28951,12 @@ void checkGoogleCloudAiplatformV1ToolCallValidMetricValue( } core.List - buildUnnamed353() => [ + buildUnnamed381() => [ buildGoogleCloudAiplatformV1ToolCallValidMetricValue(), buildGoogleCloudAiplatformV1ToolCallValidMetricValue(), ]; -void checkUnnamed353( +void checkUnnamed381( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1ToolCallValidMetricValue(o[0]); @@ -26153,7 +28969,7 @@ api.GoogleCloudAiplatformV1ToolCallValidResults final o = api.GoogleCloudAiplatformV1ToolCallValidResults(); buildCounterGoogleCloudAiplatformV1ToolCallValidResults++; if (buildCounterGoogleCloudAiplatformV1ToolCallValidResults < 3) { - o.toolCallValidMetricValues = buildUnnamed353(); + o.toolCallValidMetricValues = buildUnnamed381(); } buildCounterGoogleCloudAiplatformV1ToolCallValidResults--; return o; @@ -26163,7 +28979,7 @@ void checkGoogleCloudAiplatformV1ToolCallValidResults( api.GoogleCloudAiplatformV1ToolCallValidResults o) { buildCounterGoogleCloudAiplatformV1ToolCallValidResults++; if (buildCounterGoogleCloudAiplatformV1ToolCallValidResults < 3) { - checkUnnamed353(o.toolCallValidMetricValues!); + checkUnnamed381(o.toolCallValidMetricValues!); } buildCounterGoogleCloudAiplatformV1ToolCallValidResults--; } @@ -26206,13 +29022,30 @@ void checkGoogleCloudAiplatformV1ToolConfig( buildCounterGoogleCloudAiplatformV1ToolConfig--; } -core.List buildUnnamed354() => +core.int buildCounterGoogleCloudAiplatformV1ToolGoogleSearch = 0; +api.GoogleCloudAiplatformV1ToolGoogleSearch + buildGoogleCloudAiplatformV1ToolGoogleSearch() { + final o = api.GoogleCloudAiplatformV1ToolGoogleSearch(); + buildCounterGoogleCloudAiplatformV1ToolGoogleSearch++; + if (buildCounterGoogleCloudAiplatformV1ToolGoogleSearch < 3) {} + buildCounterGoogleCloudAiplatformV1ToolGoogleSearch--; + return o; +} + +void checkGoogleCloudAiplatformV1ToolGoogleSearch( + api.GoogleCloudAiplatformV1ToolGoogleSearch o) { + buildCounterGoogleCloudAiplatformV1ToolGoogleSearch++; + if (buildCounterGoogleCloudAiplatformV1ToolGoogleSearch < 3) {} + buildCounterGoogleCloudAiplatformV1ToolGoogleSearch--; +} + +core.List buildUnnamed382() => [ buildGoogleCloudAiplatformV1ToolNameMatchInstance(), buildGoogleCloudAiplatformV1ToolNameMatchInstance(), ]; -void checkUnnamed354( +void checkUnnamed382( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1ToolNameMatchInstance(o[0]); @@ -26225,7 +29058,7 @@ api.GoogleCloudAiplatformV1ToolNameMatchInput final o = api.GoogleCloudAiplatformV1ToolNameMatchInput(); buildCounterGoogleCloudAiplatformV1ToolNameMatchInput++; if (buildCounterGoogleCloudAiplatformV1ToolNameMatchInput < 3) { - o.instances = buildUnnamed354(); + o.instances = buildUnnamed382(); o.metricSpec = buildGoogleCloudAiplatformV1ToolNameMatchSpec(); } buildCounterGoogleCloudAiplatformV1ToolNameMatchInput--; @@ -26236,7 +29069,7 @@ void checkGoogleCloudAiplatformV1ToolNameMatchInput( api.GoogleCloudAiplatformV1ToolNameMatchInput o) { buildCounterGoogleCloudAiplatformV1ToolNameMatchInput++; if (buildCounterGoogleCloudAiplatformV1ToolNameMatchInput < 3) { - checkUnnamed354(o.instances!); + checkUnnamed382(o.instances!); checkGoogleCloudAiplatformV1ToolNameMatchSpec(o.metricSpec!); } buildCounterGoogleCloudAiplatformV1ToolNameMatchInput--; @@ -26296,12 +29129,12 @@ void checkGoogleCloudAiplatformV1ToolNameMatchMetricValue( } core.List - buildUnnamed355() => [ + buildUnnamed383() => [ buildGoogleCloudAiplatformV1ToolNameMatchMetricValue(), buildGoogleCloudAiplatformV1ToolNameMatchMetricValue(), ]; -void checkUnnamed355( +void checkUnnamed383( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1ToolNameMatchMetricValue(o[0]); @@ -26314,7 +29147,7 @@ api.GoogleCloudAiplatformV1ToolNameMatchResults final o = api.GoogleCloudAiplatformV1ToolNameMatchResults(); buildCounterGoogleCloudAiplatformV1ToolNameMatchResults++; if (buildCounterGoogleCloudAiplatformV1ToolNameMatchResults < 3) { - o.toolNameMatchMetricValues = buildUnnamed355(); + o.toolNameMatchMetricValues = buildUnnamed383(); } buildCounterGoogleCloudAiplatformV1ToolNameMatchResults--; return o; @@ -26324,7 +29157,7 @@ void checkGoogleCloudAiplatformV1ToolNameMatchResults( api.GoogleCloudAiplatformV1ToolNameMatchResults o) { buildCounterGoogleCloudAiplatformV1ToolNameMatchResults++; if (buildCounterGoogleCloudAiplatformV1ToolNameMatchResults < 3) { - checkUnnamed355(o.toolNameMatchMetricValues!); + checkUnnamed383(o.toolNameMatchMetricValues!); } buildCounterGoogleCloudAiplatformV1ToolNameMatchResults--; } @@ -26347,12 +29180,12 @@ void checkGoogleCloudAiplatformV1ToolNameMatchSpec( } core.List - buildUnnamed356() => [ + buildUnnamed384() => [ buildGoogleCloudAiplatformV1ToolParameterKVMatchInstance(), buildGoogleCloudAiplatformV1ToolParameterKVMatchInstance(), ]; -void checkUnnamed356( +void checkUnnamed384( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1ToolParameterKVMatchInstance(o[0]); @@ -26365,7 +29198,7 @@ api.GoogleCloudAiplatformV1ToolParameterKVMatchInput final o = api.GoogleCloudAiplatformV1ToolParameterKVMatchInput(); buildCounterGoogleCloudAiplatformV1ToolParameterKVMatchInput++; if (buildCounterGoogleCloudAiplatformV1ToolParameterKVMatchInput < 3) { - o.instances = buildUnnamed356(); + o.instances = buildUnnamed384(); o.metricSpec = buildGoogleCloudAiplatformV1ToolParameterKVMatchSpec(); } buildCounterGoogleCloudAiplatformV1ToolParameterKVMatchInput--; @@ -26376,7 +29209,7 @@ void checkGoogleCloudAiplatformV1ToolParameterKVMatchInput( api.GoogleCloudAiplatformV1ToolParameterKVMatchInput o) { buildCounterGoogleCloudAiplatformV1ToolParameterKVMatchInput++; if (buildCounterGoogleCloudAiplatformV1ToolParameterKVMatchInput < 3) { - checkUnnamed356(o.instances!); + checkUnnamed384(o.instances!); checkGoogleCloudAiplatformV1ToolParameterKVMatchSpec(o.metricSpec!); } buildCounterGoogleCloudAiplatformV1ToolParameterKVMatchInput--; @@ -26436,12 +29269,12 @@ void checkGoogleCloudAiplatformV1ToolParameterKVMatchMetricValue( } core.List - buildUnnamed357() => [ + buildUnnamed385() => [ buildGoogleCloudAiplatformV1ToolParameterKVMatchMetricValue(), buildGoogleCloudAiplatformV1ToolParameterKVMatchMetricValue(), ]; -void checkUnnamed357( +void checkUnnamed385( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1ToolParameterKVMatchMetricValue(o[0]); @@ -26454,7 +29287,7 @@ api.GoogleCloudAiplatformV1ToolParameterKVMatchResults final o = api.GoogleCloudAiplatformV1ToolParameterKVMatchResults(); buildCounterGoogleCloudAiplatformV1ToolParameterKVMatchResults++; if (buildCounterGoogleCloudAiplatformV1ToolParameterKVMatchResults < 3) { - o.toolParameterKvMatchMetricValues = buildUnnamed357(); + o.toolParameterKvMatchMetricValues = buildUnnamed385(); } buildCounterGoogleCloudAiplatformV1ToolParameterKVMatchResults--; return o; @@ -26464,7 +29297,7 @@ void checkGoogleCloudAiplatformV1ToolParameterKVMatchResults( api.GoogleCloudAiplatformV1ToolParameterKVMatchResults o) { buildCounterGoogleCloudAiplatformV1ToolParameterKVMatchResults++; if (buildCounterGoogleCloudAiplatformV1ToolParameterKVMatchResults < 3) { - checkUnnamed357(o.toolParameterKvMatchMetricValues!); + checkUnnamed385(o.toolParameterKvMatchMetricValues!); } buildCounterGoogleCloudAiplatformV1ToolParameterKVMatchResults--; } @@ -26491,12 +29324,12 @@ void checkGoogleCloudAiplatformV1ToolParameterKVMatchSpec( } core.List - buildUnnamed358() => [ + buildUnnamed386() => [ buildGoogleCloudAiplatformV1ToolParameterKeyMatchInstance(), buildGoogleCloudAiplatformV1ToolParameterKeyMatchInstance(), ]; -void checkUnnamed358( +void checkUnnamed386( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1ToolParameterKeyMatchInstance(o[0]); @@ -26509,7 +29342,7 @@ api.GoogleCloudAiplatformV1ToolParameterKeyMatchInput final o = api.GoogleCloudAiplatformV1ToolParameterKeyMatchInput(); buildCounterGoogleCloudAiplatformV1ToolParameterKeyMatchInput++; if (buildCounterGoogleCloudAiplatformV1ToolParameterKeyMatchInput < 3) { - o.instances = buildUnnamed358(); + o.instances = buildUnnamed386(); o.metricSpec = buildGoogleCloudAiplatformV1ToolParameterKeyMatchSpec(); } buildCounterGoogleCloudAiplatformV1ToolParameterKeyMatchInput--; @@ -26520,7 +29353,7 @@ void checkGoogleCloudAiplatformV1ToolParameterKeyMatchInput( api.GoogleCloudAiplatformV1ToolParameterKeyMatchInput o) { buildCounterGoogleCloudAiplatformV1ToolParameterKeyMatchInput++; if (buildCounterGoogleCloudAiplatformV1ToolParameterKeyMatchInput < 3) { - checkUnnamed358(o.instances!); + checkUnnamed386(o.instances!); checkGoogleCloudAiplatformV1ToolParameterKeyMatchSpec(o.metricSpec!); } buildCounterGoogleCloudAiplatformV1ToolParameterKeyMatchInput--; @@ -26581,12 +29414,12 @@ void checkGoogleCloudAiplatformV1ToolParameterKeyMatchMetricValue( } core.List - buildUnnamed359() => [ + buildUnnamed387() => [ buildGoogleCloudAiplatformV1ToolParameterKeyMatchMetricValue(), buildGoogleCloudAiplatformV1ToolParameterKeyMatchMetricValue(), ]; -void checkUnnamed359( +void checkUnnamed387( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1ToolParameterKeyMatchMetricValue(o[0]); @@ -26599,7 +29432,7 @@ api.GoogleCloudAiplatformV1ToolParameterKeyMatchResults final o = api.GoogleCloudAiplatformV1ToolParameterKeyMatchResults(); buildCounterGoogleCloudAiplatformV1ToolParameterKeyMatchResults++; if (buildCounterGoogleCloudAiplatformV1ToolParameterKeyMatchResults < 3) { - o.toolParameterKeyMatchMetricValues = buildUnnamed359(); + o.toolParameterKeyMatchMetricValues = buildUnnamed387(); } buildCounterGoogleCloudAiplatformV1ToolParameterKeyMatchResults--; return o; @@ -26609,7 +29442,7 @@ void checkGoogleCloudAiplatformV1ToolParameterKeyMatchResults( api.GoogleCloudAiplatformV1ToolParameterKeyMatchResults o) { buildCounterGoogleCloudAiplatformV1ToolParameterKeyMatchResults++; if (buildCounterGoogleCloudAiplatformV1ToolParameterKeyMatchResults < 3) { - checkUnnamed359(o.toolParameterKeyMatchMetricValues!); + checkUnnamed387(o.toolParameterKeyMatchMetricValues!); } buildCounterGoogleCloudAiplatformV1ToolParameterKeyMatchResults--; } @@ -26655,12 +29488,12 @@ void checkGoogleCloudAiplatformV1TrainingConfig( buildCounterGoogleCloudAiplatformV1TrainingConfig--; } -core.Map buildUnnamed360() => { +core.Map buildUnnamed388() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed360(core.Map o) { +void checkUnnamed388(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -26684,7 +29517,7 @@ api.GoogleCloudAiplatformV1TrainingPipeline o.endTime = 'foo'; o.error = buildGoogleRpcStatus(); o.inputDataConfig = buildGoogleCloudAiplatformV1InputDataConfig(); - o.labels = buildUnnamed360(); + o.labels = buildUnnamed388(); o.modelId = 'foo'; o.modelToUpload = buildGoogleCloudAiplatformV1Model(); o.name = 'foo'; @@ -26727,7 +29560,7 @@ void checkGoogleCloudAiplatformV1TrainingPipeline( ); checkGoogleRpcStatus(o.error!); checkGoogleCloudAiplatformV1InputDataConfig(o.inputDataConfig!); - checkUnnamed360(o.labels!); + checkUnnamed388(o.labels!); unittest.expect( o.modelId!, unittest.equals('foo'), @@ -26753,32 +29586,32 @@ void checkGoogleCloudAiplatformV1TrainingPipeline( o.trainingTaskDefinition!, unittest.equals('foo'), ); - var casted65 = (o.trainingTaskInputs!) as core.Map; - unittest.expect(casted65, unittest.hasLength(3)); + var casted73 = (o.trainingTaskInputs!) as core.Map; + unittest.expect(casted73, unittest.hasLength(3)); unittest.expect( - casted65['list'], + casted73['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted65['bool'], + casted73['bool'], unittest.equals(true), ); unittest.expect( - casted65['string'], + casted73['string'], unittest.equals('foo'), ); - var casted66 = (o.trainingTaskMetadata!) as core.Map; - unittest.expect(casted66, unittest.hasLength(3)); + var casted74 = (o.trainingTaskMetadata!) as core.Map; + unittest.expect(casted74, unittest.hasLength(3)); unittest.expect( - casted66['list'], + casted74['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted66['bool'], + casted74['bool'], unittest.equals(true), ); unittest.expect( - casted66['string'], + casted74['string'], unittest.equals('foo'), ); unittest.expect( @@ -26789,34 +29622,885 @@ void checkGoogleCloudAiplatformV1TrainingPipeline( buildCounterGoogleCloudAiplatformV1TrainingPipeline--; } -core.List buildUnnamed361() => [ +core.List buildUnnamed389() => [ + buildGoogleCloudAiplatformV1ToolCall(), + buildGoogleCloudAiplatformV1ToolCall(), + ]; + +void checkUnnamed389(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1ToolCall(o[0]); + checkGoogleCloudAiplatformV1ToolCall(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1Trajectory = 0; +api.GoogleCloudAiplatformV1Trajectory buildGoogleCloudAiplatformV1Trajectory() { + final o = api.GoogleCloudAiplatformV1Trajectory(); + buildCounterGoogleCloudAiplatformV1Trajectory++; + if (buildCounterGoogleCloudAiplatformV1Trajectory < 3) { + o.toolCalls = buildUnnamed389(); + } + buildCounterGoogleCloudAiplatformV1Trajectory--; + return o; +} + +void checkGoogleCloudAiplatformV1Trajectory( + api.GoogleCloudAiplatformV1Trajectory o) { + buildCounterGoogleCloudAiplatformV1Trajectory++; + if (buildCounterGoogleCloudAiplatformV1Trajectory < 3) { + checkUnnamed389(o.toolCalls!); + } + buildCounterGoogleCloudAiplatformV1Trajectory--; +} + +core.List + buildUnnamed390() => [ + buildGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance(), + buildGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance(), + ]; + +void checkUnnamed390( + core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance(o[0]); + checkGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput = 0; +api.GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput + buildGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput() { + final o = api.GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput(); + buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput < 3) { + o.instances = buildUnnamed390(); + o.metricSpec = buildGoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec(); + } + buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput( + api.GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput o) { + buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput < 3) { + checkUnnamed390(o.instances!); + checkGoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec(o.metricSpec!); + } + buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput--; +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance = 0; +api.GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance + buildGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance() { + final o = api.GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance(); + buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance < 3) { + o.predictedTrajectory = buildGoogleCloudAiplatformV1Trajectory(); + o.referenceTrajectory = buildGoogleCloudAiplatformV1Trajectory(); + } + buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance( + api.GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance o) { + buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance < 3) { + checkGoogleCloudAiplatformV1Trajectory(o.predictedTrajectory!); + checkGoogleCloudAiplatformV1Trajectory(o.referenceTrajectory!); + } + buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance--; +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue = + 0; +api.GoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue + buildGoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue() { + final o = api.GoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue(); + buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue < + 3) { + o.score = 42.0; + } + buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue( + api.GoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue o) { + buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue < + 3) { + unittest.expect( + o.score!, + unittest.equals(42.0), + ); + } + buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue--; +} + +core.List + buildUnnamed391() => [ + buildGoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue(), + buildGoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue(), + ]; + +void checkUnnamed391( + core.List + o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue(o[0]); + checkGoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults = 0; +api.GoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults + buildGoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults() { + final o = api.GoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults(); + buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults < 3) { + o.trajectoryAnyOrderMatchMetricValues = buildUnnamed391(); + } + buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults( + api.GoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults o) { + buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults < 3) { + checkUnnamed391(o.trajectoryAnyOrderMatchMetricValues!); + } + buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults--; +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec = 0; +api.GoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec + buildGoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec() { + final o = api.GoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec(); + buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec < 3) {} + buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec( + api.GoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec o) { + buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec < 3) {} + buildCounterGoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec--; +} + +core.List + buildUnnamed392() => [ + buildGoogleCloudAiplatformV1TrajectoryExactMatchInstance(), + buildGoogleCloudAiplatformV1TrajectoryExactMatchInstance(), + ]; + +void checkUnnamed392( + core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1TrajectoryExactMatchInstance(o[0]); + checkGoogleCloudAiplatformV1TrajectoryExactMatchInstance(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchInput = 0; +api.GoogleCloudAiplatformV1TrajectoryExactMatchInput + buildGoogleCloudAiplatformV1TrajectoryExactMatchInput() { + final o = api.GoogleCloudAiplatformV1TrajectoryExactMatchInput(); + buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchInput++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchInput < 3) { + o.instances = buildUnnamed392(); + o.metricSpec = buildGoogleCloudAiplatformV1TrajectoryExactMatchSpec(); + } + buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchInput--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryExactMatchInput( + api.GoogleCloudAiplatformV1TrajectoryExactMatchInput o) { + buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchInput++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchInput < 3) { + checkUnnamed392(o.instances!); + checkGoogleCloudAiplatformV1TrajectoryExactMatchSpec(o.metricSpec!); + } + buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchInput--; +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchInstance = 0; +api.GoogleCloudAiplatformV1TrajectoryExactMatchInstance + buildGoogleCloudAiplatformV1TrajectoryExactMatchInstance() { + final o = api.GoogleCloudAiplatformV1TrajectoryExactMatchInstance(); + buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchInstance++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchInstance < 3) { + o.predictedTrajectory = buildGoogleCloudAiplatformV1Trajectory(); + o.referenceTrajectory = buildGoogleCloudAiplatformV1Trajectory(); + } + buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchInstance--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryExactMatchInstance( + api.GoogleCloudAiplatformV1TrajectoryExactMatchInstance o) { + buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchInstance++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchInstance < 3) { + checkGoogleCloudAiplatformV1Trajectory(o.predictedTrajectory!); + checkGoogleCloudAiplatformV1Trajectory(o.referenceTrajectory!); + } + buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchInstance--; +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchMetricValue = 0; +api.GoogleCloudAiplatformV1TrajectoryExactMatchMetricValue + buildGoogleCloudAiplatformV1TrajectoryExactMatchMetricValue() { + final o = api.GoogleCloudAiplatformV1TrajectoryExactMatchMetricValue(); + buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchMetricValue++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchMetricValue < 3) { + o.score = 42.0; + } + buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchMetricValue--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryExactMatchMetricValue( + api.GoogleCloudAiplatformV1TrajectoryExactMatchMetricValue o) { + buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchMetricValue++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchMetricValue < 3) { + unittest.expect( + o.score!, + unittest.equals(42.0), + ); + } + buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchMetricValue--; +} + +core.List + buildUnnamed393() => [ + buildGoogleCloudAiplatformV1TrajectoryExactMatchMetricValue(), + buildGoogleCloudAiplatformV1TrajectoryExactMatchMetricValue(), + ]; + +void checkUnnamed393( + core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1TrajectoryExactMatchMetricValue(o[0]); + checkGoogleCloudAiplatformV1TrajectoryExactMatchMetricValue(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchResults = 0; +api.GoogleCloudAiplatformV1TrajectoryExactMatchResults + buildGoogleCloudAiplatformV1TrajectoryExactMatchResults() { + final o = api.GoogleCloudAiplatformV1TrajectoryExactMatchResults(); + buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchResults++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchResults < 3) { + o.trajectoryExactMatchMetricValues = buildUnnamed393(); + } + buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchResults--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryExactMatchResults( + api.GoogleCloudAiplatformV1TrajectoryExactMatchResults o) { + buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchResults++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchResults < 3) { + checkUnnamed393(o.trajectoryExactMatchMetricValues!); + } + buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchResults--; +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchSpec = 0; +api.GoogleCloudAiplatformV1TrajectoryExactMatchSpec + buildGoogleCloudAiplatformV1TrajectoryExactMatchSpec() { + final o = api.GoogleCloudAiplatformV1TrajectoryExactMatchSpec(); + buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchSpec++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchSpec < 3) {} + buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchSpec--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryExactMatchSpec( + api.GoogleCloudAiplatformV1TrajectoryExactMatchSpec o) { + buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchSpec++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchSpec < 3) {} + buildCounterGoogleCloudAiplatformV1TrajectoryExactMatchSpec--; +} + +core.List + buildUnnamed394() => [ + buildGoogleCloudAiplatformV1TrajectoryInOrderMatchInstance(), + buildGoogleCloudAiplatformV1TrajectoryInOrderMatchInstance(), + ]; + +void checkUnnamed394( + core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1TrajectoryInOrderMatchInstance(o[0]); + checkGoogleCloudAiplatformV1TrajectoryInOrderMatchInstance(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchInput = 0; +api.GoogleCloudAiplatformV1TrajectoryInOrderMatchInput + buildGoogleCloudAiplatformV1TrajectoryInOrderMatchInput() { + final o = api.GoogleCloudAiplatformV1TrajectoryInOrderMatchInput(); + buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchInput++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchInput < 3) { + o.instances = buildUnnamed394(); + o.metricSpec = buildGoogleCloudAiplatformV1TrajectoryInOrderMatchSpec(); + } + buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchInput--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryInOrderMatchInput( + api.GoogleCloudAiplatformV1TrajectoryInOrderMatchInput o) { + buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchInput++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchInput < 3) { + checkUnnamed394(o.instances!); + checkGoogleCloudAiplatformV1TrajectoryInOrderMatchSpec(o.metricSpec!); + } + buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchInput--; +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchInstance = 0; +api.GoogleCloudAiplatformV1TrajectoryInOrderMatchInstance + buildGoogleCloudAiplatformV1TrajectoryInOrderMatchInstance() { + final o = api.GoogleCloudAiplatformV1TrajectoryInOrderMatchInstance(); + buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchInstance++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchInstance < 3) { + o.predictedTrajectory = buildGoogleCloudAiplatformV1Trajectory(); + o.referenceTrajectory = buildGoogleCloudAiplatformV1Trajectory(); + } + buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchInstance--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryInOrderMatchInstance( + api.GoogleCloudAiplatformV1TrajectoryInOrderMatchInstance o) { + buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchInstance++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchInstance < 3) { + checkGoogleCloudAiplatformV1Trajectory(o.predictedTrajectory!); + checkGoogleCloudAiplatformV1Trajectory(o.referenceTrajectory!); + } + buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchInstance--; +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue = + 0; +api.GoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue + buildGoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue() { + final o = api.GoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue(); + buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue < + 3) { + o.score = 42.0; + } + buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue( + api.GoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue o) { + buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue < + 3) { + unittest.expect( + o.score!, + unittest.equals(42.0), + ); + } + buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue--; +} + +core.List + buildUnnamed395() => [ + buildGoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue(), + buildGoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue(), + ]; + +void checkUnnamed395( + core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue(o[0]); + checkGoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchResults = 0; +api.GoogleCloudAiplatformV1TrajectoryInOrderMatchResults + buildGoogleCloudAiplatformV1TrajectoryInOrderMatchResults() { + final o = api.GoogleCloudAiplatformV1TrajectoryInOrderMatchResults(); + buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchResults++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchResults < 3) { + o.trajectoryInOrderMatchMetricValues = buildUnnamed395(); + } + buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchResults--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryInOrderMatchResults( + api.GoogleCloudAiplatformV1TrajectoryInOrderMatchResults o) { + buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchResults++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchResults < 3) { + checkUnnamed395(o.trajectoryInOrderMatchMetricValues!); + } + buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchResults--; +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchSpec = 0; +api.GoogleCloudAiplatformV1TrajectoryInOrderMatchSpec + buildGoogleCloudAiplatformV1TrajectoryInOrderMatchSpec() { + final o = api.GoogleCloudAiplatformV1TrajectoryInOrderMatchSpec(); + buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchSpec++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchSpec < 3) {} + buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchSpec--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryInOrderMatchSpec( + api.GoogleCloudAiplatformV1TrajectoryInOrderMatchSpec o) { + buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchSpec++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchSpec < 3) {} + buildCounterGoogleCloudAiplatformV1TrajectoryInOrderMatchSpec--; +} + +core.List + buildUnnamed396() => [ + buildGoogleCloudAiplatformV1TrajectoryPrecisionInstance(), + buildGoogleCloudAiplatformV1TrajectoryPrecisionInstance(), + ]; + +void checkUnnamed396( + core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1TrajectoryPrecisionInstance(o[0]); + checkGoogleCloudAiplatformV1TrajectoryPrecisionInstance(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionInput = 0; +api.GoogleCloudAiplatformV1TrajectoryPrecisionInput + buildGoogleCloudAiplatformV1TrajectoryPrecisionInput() { + final o = api.GoogleCloudAiplatformV1TrajectoryPrecisionInput(); + buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionInput++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionInput < 3) { + o.instances = buildUnnamed396(); + o.metricSpec = buildGoogleCloudAiplatformV1TrajectoryPrecisionSpec(); + } + buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionInput--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryPrecisionInput( + api.GoogleCloudAiplatformV1TrajectoryPrecisionInput o) { + buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionInput++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionInput < 3) { + checkUnnamed396(o.instances!); + checkGoogleCloudAiplatformV1TrajectoryPrecisionSpec(o.metricSpec!); + } + buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionInput--; +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionInstance = 0; +api.GoogleCloudAiplatformV1TrajectoryPrecisionInstance + buildGoogleCloudAiplatformV1TrajectoryPrecisionInstance() { + final o = api.GoogleCloudAiplatformV1TrajectoryPrecisionInstance(); + buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionInstance++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionInstance < 3) { + o.predictedTrajectory = buildGoogleCloudAiplatformV1Trajectory(); + o.referenceTrajectory = buildGoogleCloudAiplatformV1Trajectory(); + } + buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionInstance--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryPrecisionInstance( + api.GoogleCloudAiplatformV1TrajectoryPrecisionInstance o) { + buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionInstance++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionInstance < 3) { + checkGoogleCloudAiplatformV1Trajectory(o.predictedTrajectory!); + checkGoogleCloudAiplatformV1Trajectory(o.referenceTrajectory!); + } + buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionInstance--; +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionMetricValue = 0; +api.GoogleCloudAiplatformV1TrajectoryPrecisionMetricValue + buildGoogleCloudAiplatformV1TrajectoryPrecisionMetricValue() { + final o = api.GoogleCloudAiplatformV1TrajectoryPrecisionMetricValue(); + buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionMetricValue++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionMetricValue < 3) { + o.score = 42.0; + } + buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionMetricValue--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryPrecisionMetricValue( + api.GoogleCloudAiplatformV1TrajectoryPrecisionMetricValue o) { + buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionMetricValue++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionMetricValue < 3) { + unittest.expect( + o.score!, + unittest.equals(42.0), + ); + } + buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionMetricValue--; +} + +core.List + buildUnnamed397() => [ + buildGoogleCloudAiplatformV1TrajectoryPrecisionMetricValue(), + buildGoogleCloudAiplatformV1TrajectoryPrecisionMetricValue(), + ]; + +void checkUnnamed397( + core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1TrajectoryPrecisionMetricValue(o[0]); + checkGoogleCloudAiplatformV1TrajectoryPrecisionMetricValue(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionResults = 0; +api.GoogleCloudAiplatformV1TrajectoryPrecisionResults + buildGoogleCloudAiplatformV1TrajectoryPrecisionResults() { + final o = api.GoogleCloudAiplatformV1TrajectoryPrecisionResults(); + buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionResults++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionResults < 3) { + o.trajectoryPrecisionMetricValues = buildUnnamed397(); + } + buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionResults--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryPrecisionResults( + api.GoogleCloudAiplatformV1TrajectoryPrecisionResults o) { + buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionResults++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionResults < 3) { + checkUnnamed397(o.trajectoryPrecisionMetricValues!); + } + buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionResults--; +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionSpec = 0; +api.GoogleCloudAiplatformV1TrajectoryPrecisionSpec + buildGoogleCloudAiplatformV1TrajectoryPrecisionSpec() { + final o = api.GoogleCloudAiplatformV1TrajectoryPrecisionSpec(); + buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionSpec++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionSpec < 3) {} + buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionSpec--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryPrecisionSpec( + api.GoogleCloudAiplatformV1TrajectoryPrecisionSpec o) { + buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionSpec++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionSpec < 3) {} + buildCounterGoogleCloudAiplatformV1TrajectoryPrecisionSpec--; +} + +core.List + buildUnnamed398() => [ + buildGoogleCloudAiplatformV1TrajectoryRecallInstance(), + buildGoogleCloudAiplatformV1TrajectoryRecallInstance(), + ]; + +void checkUnnamed398( + core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1TrajectoryRecallInstance(o[0]); + checkGoogleCloudAiplatformV1TrajectoryRecallInstance(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryRecallInput = 0; +api.GoogleCloudAiplatformV1TrajectoryRecallInput + buildGoogleCloudAiplatformV1TrajectoryRecallInput() { + final o = api.GoogleCloudAiplatformV1TrajectoryRecallInput(); + buildCounterGoogleCloudAiplatformV1TrajectoryRecallInput++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryRecallInput < 3) { + o.instances = buildUnnamed398(); + o.metricSpec = buildGoogleCloudAiplatformV1TrajectoryRecallSpec(); + } + buildCounterGoogleCloudAiplatformV1TrajectoryRecallInput--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryRecallInput( + api.GoogleCloudAiplatformV1TrajectoryRecallInput o) { + buildCounterGoogleCloudAiplatformV1TrajectoryRecallInput++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryRecallInput < 3) { + checkUnnamed398(o.instances!); + checkGoogleCloudAiplatformV1TrajectoryRecallSpec(o.metricSpec!); + } + buildCounterGoogleCloudAiplatformV1TrajectoryRecallInput--; +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryRecallInstance = 0; +api.GoogleCloudAiplatformV1TrajectoryRecallInstance + buildGoogleCloudAiplatformV1TrajectoryRecallInstance() { + final o = api.GoogleCloudAiplatformV1TrajectoryRecallInstance(); + buildCounterGoogleCloudAiplatformV1TrajectoryRecallInstance++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryRecallInstance < 3) { + o.predictedTrajectory = buildGoogleCloudAiplatformV1Trajectory(); + o.referenceTrajectory = buildGoogleCloudAiplatformV1Trajectory(); + } + buildCounterGoogleCloudAiplatformV1TrajectoryRecallInstance--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryRecallInstance( + api.GoogleCloudAiplatformV1TrajectoryRecallInstance o) { + buildCounterGoogleCloudAiplatformV1TrajectoryRecallInstance++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryRecallInstance < 3) { + checkGoogleCloudAiplatformV1Trajectory(o.predictedTrajectory!); + checkGoogleCloudAiplatformV1Trajectory(o.referenceTrajectory!); + } + buildCounterGoogleCloudAiplatformV1TrajectoryRecallInstance--; +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryRecallMetricValue = 0; +api.GoogleCloudAiplatformV1TrajectoryRecallMetricValue + buildGoogleCloudAiplatformV1TrajectoryRecallMetricValue() { + final o = api.GoogleCloudAiplatformV1TrajectoryRecallMetricValue(); + buildCounterGoogleCloudAiplatformV1TrajectoryRecallMetricValue++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryRecallMetricValue < 3) { + o.score = 42.0; + } + buildCounterGoogleCloudAiplatformV1TrajectoryRecallMetricValue--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryRecallMetricValue( + api.GoogleCloudAiplatformV1TrajectoryRecallMetricValue o) { + buildCounterGoogleCloudAiplatformV1TrajectoryRecallMetricValue++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryRecallMetricValue < 3) { + unittest.expect( + o.score!, + unittest.equals(42.0), + ); + } + buildCounterGoogleCloudAiplatformV1TrajectoryRecallMetricValue--; +} + +core.List + buildUnnamed399() => [ + buildGoogleCloudAiplatformV1TrajectoryRecallMetricValue(), + buildGoogleCloudAiplatformV1TrajectoryRecallMetricValue(), + ]; + +void checkUnnamed399( + core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1TrajectoryRecallMetricValue(o[0]); + checkGoogleCloudAiplatformV1TrajectoryRecallMetricValue(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryRecallResults = 0; +api.GoogleCloudAiplatformV1TrajectoryRecallResults + buildGoogleCloudAiplatformV1TrajectoryRecallResults() { + final o = api.GoogleCloudAiplatformV1TrajectoryRecallResults(); + buildCounterGoogleCloudAiplatformV1TrajectoryRecallResults++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryRecallResults < 3) { + o.trajectoryRecallMetricValues = buildUnnamed399(); + } + buildCounterGoogleCloudAiplatformV1TrajectoryRecallResults--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryRecallResults( + api.GoogleCloudAiplatformV1TrajectoryRecallResults o) { + buildCounterGoogleCloudAiplatformV1TrajectoryRecallResults++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryRecallResults < 3) { + checkUnnamed399(o.trajectoryRecallMetricValues!); + } + buildCounterGoogleCloudAiplatformV1TrajectoryRecallResults--; +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectoryRecallSpec = 0; +api.GoogleCloudAiplatformV1TrajectoryRecallSpec + buildGoogleCloudAiplatformV1TrajectoryRecallSpec() { + final o = api.GoogleCloudAiplatformV1TrajectoryRecallSpec(); + buildCounterGoogleCloudAiplatformV1TrajectoryRecallSpec++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryRecallSpec < 3) {} + buildCounterGoogleCloudAiplatformV1TrajectoryRecallSpec--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectoryRecallSpec( + api.GoogleCloudAiplatformV1TrajectoryRecallSpec o) { + buildCounterGoogleCloudAiplatformV1TrajectoryRecallSpec++; + if (buildCounterGoogleCloudAiplatformV1TrajectoryRecallSpec < 3) {} + buildCounterGoogleCloudAiplatformV1TrajectoryRecallSpec--; +} + +core.List + buildUnnamed400() => [ + buildGoogleCloudAiplatformV1TrajectorySingleToolUseInstance(), + buildGoogleCloudAiplatformV1TrajectorySingleToolUseInstance(), + ]; + +void checkUnnamed400( + core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1TrajectorySingleToolUseInstance(o[0]); + checkGoogleCloudAiplatformV1TrajectorySingleToolUseInstance(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseInput = 0; +api.GoogleCloudAiplatformV1TrajectorySingleToolUseInput + buildGoogleCloudAiplatformV1TrajectorySingleToolUseInput() { + final o = api.GoogleCloudAiplatformV1TrajectorySingleToolUseInput(); + buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseInput++; + if (buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseInput < 3) { + o.instances = buildUnnamed400(); + o.metricSpec = buildGoogleCloudAiplatformV1TrajectorySingleToolUseSpec(); + } + buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseInput--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectorySingleToolUseInput( + api.GoogleCloudAiplatformV1TrajectorySingleToolUseInput o) { + buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseInput++; + if (buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseInput < 3) { + checkUnnamed400(o.instances!); + checkGoogleCloudAiplatformV1TrajectorySingleToolUseSpec(o.metricSpec!); + } + buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseInput--; +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseInstance = 0; +api.GoogleCloudAiplatformV1TrajectorySingleToolUseInstance + buildGoogleCloudAiplatformV1TrajectorySingleToolUseInstance() { + final o = api.GoogleCloudAiplatformV1TrajectorySingleToolUseInstance(); + buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseInstance++; + if (buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseInstance < 3) { + o.predictedTrajectory = buildGoogleCloudAiplatformV1Trajectory(); + } + buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseInstance--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectorySingleToolUseInstance( + api.GoogleCloudAiplatformV1TrajectorySingleToolUseInstance o) { + buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseInstance++; + if (buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseInstance < 3) { + checkGoogleCloudAiplatformV1Trajectory(o.predictedTrajectory!); + } + buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseInstance--; +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue = + 0; +api.GoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue + buildGoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue() { + final o = api.GoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue(); + buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue++; + if (buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue < + 3) { + o.score = 42.0; + } + buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue( + api.GoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue o) { + buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue++; + if (buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue < + 3) { + unittest.expect( + o.score!, + unittest.equals(42.0), + ); + } + buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue--; +} + +core.List + buildUnnamed401() => [ + buildGoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue(), + buildGoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue(), + ]; + +void checkUnnamed401( + core.List + o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue(o[0]); + checkGoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue(o[1]); +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseResults = 0; +api.GoogleCloudAiplatformV1TrajectorySingleToolUseResults + buildGoogleCloudAiplatformV1TrajectorySingleToolUseResults() { + final o = api.GoogleCloudAiplatformV1TrajectorySingleToolUseResults(); + buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseResults++; + if (buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseResults < 3) { + o.trajectorySingleToolUseMetricValues = buildUnnamed401(); + } + buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseResults--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectorySingleToolUseResults( + api.GoogleCloudAiplatformV1TrajectorySingleToolUseResults o) { + buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseResults++; + if (buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseResults < 3) { + checkUnnamed401(o.trajectorySingleToolUseMetricValues!); + } + buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseResults--; +} + +core.int buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseSpec = 0; +api.GoogleCloudAiplatformV1TrajectorySingleToolUseSpec + buildGoogleCloudAiplatformV1TrajectorySingleToolUseSpec() { + final o = api.GoogleCloudAiplatformV1TrajectorySingleToolUseSpec(); + buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseSpec++; + if (buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseSpec < 3) { + o.toolName = 'foo'; + } + buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseSpec--; + return o; +} + +void checkGoogleCloudAiplatformV1TrajectorySingleToolUseSpec( + api.GoogleCloudAiplatformV1TrajectorySingleToolUseSpec o) { + buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseSpec++; + if (buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseSpec < 3) { + unittest.expect( + o.toolName!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudAiplatformV1TrajectorySingleToolUseSpec--; +} + +core.List buildUnnamed402() => [ buildGoogleCloudAiplatformV1Measurement(), buildGoogleCloudAiplatformV1Measurement(), ]; -void checkUnnamed361(core.List o) { +void checkUnnamed402(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1Measurement(o[0]); checkGoogleCloudAiplatformV1Measurement(o[1]); } -core.List buildUnnamed362() => [ +core.List buildUnnamed403() => [ buildGoogleCloudAiplatformV1TrialParameter(), buildGoogleCloudAiplatformV1TrialParameter(), ]; -void checkUnnamed362(core.List o) { +void checkUnnamed403(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1TrialParameter(o[0]); checkGoogleCloudAiplatformV1TrialParameter(o[1]); } -core.Map buildUnnamed363() => { +core.Map buildUnnamed404() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed363(core.Map o) { +void checkUnnamed404(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -26839,12 +30523,12 @@ api.GoogleCloudAiplatformV1Trial buildGoogleCloudAiplatformV1Trial() { o.finalMeasurement = buildGoogleCloudAiplatformV1Measurement(); o.id = 'foo'; o.infeasibleReason = 'foo'; - o.measurements = buildUnnamed361(); + o.measurements = buildUnnamed402(); o.name = 'foo'; - o.parameters = buildUnnamed362(); + o.parameters = buildUnnamed403(); o.startTime = 'foo'; o.state = 'foo'; - o.webAccessUris = buildUnnamed363(); + o.webAccessUris = buildUnnamed404(); } buildCounterGoogleCloudAiplatformV1Trial--; return o; @@ -26874,12 +30558,12 @@ void checkGoogleCloudAiplatformV1Trial(api.GoogleCloudAiplatformV1Trial o) { o.infeasibleReason!, unittest.equals('foo'), ); - checkUnnamed361(o.measurements!); + checkUnnamed402(o.measurements!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed362(o.parameters!); + checkUnnamed403(o.parameters!); unittest.expect( o.startTime!, unittest.equals('foo'), @@ -26888,17 +30572,17 @@ void checkGoogleCloudAiplatformV1Trial(api.GoogleCloudAiplatformV1Trial o) { o.state!, unittest.equals('foo'), ); - checkUnnamed363(o.webAccessUris!); + checkUnnamed404(o.webAccessUris!); } buildCounterGoogleCloudAiplatformV1Trial--; } -core.List buildUnnamed364() => [ +core.List buildUnnamed405() => [ buildGoogleCloudAiplatformV1TrialParameter(), buildGoogleCloudAiplatformV1TrialParameter(), ]; -void checkUnnamed364(core.List o) { +void checkUnnamed405(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1TrialParameter(o[0]); checkGoogleCloudAiplatformV1TrialParameter(o[1]); @@ -26911,7 +30595,7 @@ api.GoogleCloudAiplatformV1TrialContext buildCounterGoogleCloudAiplatformV1TrialContext++; if (buildCounterGoogleCloudAiplatformV1TrialContext < 3) { o.description = 'foo'; - o.parameters = buildUnnamed364(); + o.parameters = buildUnnamed405(); } buildCounterGoogleCloudAiplatformV1TrialContext--; return o; @@ -26925,7 +30609,7 @@ void checkGoogleCloudAiplatformV1TrialContext( o.description!, unittest.equals('foo'), ); - checkUnnamed364(o.parameters!); + checkUnnamed405(o.parameters!); } buildCounterGoogleCloudAiplatformV1TrialContext--; } @@ -26955,18 +30639,18 @@ void checkGoogleCloudAiplatformV1TrialParameter( o.parameterId!, unittest.equals('foo'), ); - var casted67 = (o.value!) as core.Map; - unittest.expect(casted67, unittest.hasLength(3)); + var casted75 = (o.value!) as core.Map; + unittest.expect(casted75, unittest.hasLength(3)); unittest.expect( - casted67['list'], + casted75['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted67['bool'], + casted75['bool'], unittest.equals(true), ); unittest.expect( - casted67['string'], + casted75['string'], unittest.equals('foo'), ); } @@ -27058,12 +30742,12 @@ void checkGoogleCloudAiplatformV1TuningDataStats( buildCounterGoogleCloudAiplatformV1TuningDataStats--; } -core.Map buildUnnamed365() => { +core.Map buildUnnamed406() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed365(core.Map o) { +void checkUnnamed406(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -27087,8 +30771,9 @@ api.GoogleCloudAiplatformV1TuningJob buildGoogleCloudAiplatformV1TuningJob() { o.endTime = 'foo'; o.error = buildGoogleRpcStatus(); o.experiment = 'foo'; - o.labels = buildUnnamed365(); + o.labels = buildUnnamed406(); o.name = 'foo'; + o.serviceAccount = 'foo'; o.startTime = 'foo'; o.state = 'foo'; o.supervisedTuningSpec = buildGoogleCloudAiplatformV1SupervisedTuningSpec(); @@ -27127,11 +30812,15 @@ void checkGoogleCloudAiplatformV1TuningJob( o.experiment!, unittest.equals('foo'), ); - checkUnnamed365(o.labels!); + checkUnnamed406(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), ); + unittest.expect( + o.serviceAccount!, + unittest.equals('foo'), + ); unittest.expect( o.startTime!, unittest.equals('foo'), @@ -27179,12 +30868,12 @@ void checkGoogleCloudAiplatformV1UndeployIndexRequest( buildCounterGoogleCloudAiplatformV1UndeployIndexRequest--; } -core.Map buildUnnamed366() => { +core.Map buildUnnamed407() => { 'x': 42, 'y': 42, }; -void checkUnnamed366(core.Map o) { +void checkUnnamed407(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -27203,7 +30892,7 @@ api.GoogleCloudAiplatformV1UndeployModelRequest buildCounterGoogleCloudAiplatformV1UndeployModelRequest++; if (buildCounterGoogleCloudAiplatformV1UndeployModelRequest < 3) { o.deployedModelId = 'foo'; - o.trafficSplit = buildUnnamed366(); + o.trafficSplit = buildUnnamed407(); } buildCounterGoogleCloudAiplatformV1UndeployModelRequest--; return o; @@ -27217,7 +30906,7 @@ void checkGoogleCloudAiplatformV1UndeployModelRequest( o.deployedModelId!, unittest.equals('foo'), ); - checkUnnamed366(o.trafficSplit!); + checkUnnamed407(o.trafficSplit!); } buildCounterGoogleCloudAiplatformV1UndeployModelRequest--; } @@ -27250,6 +30939,28 @@ void checkGoogleCloudAiplatformV1UnmanagedContainerModel( buildCounterGoogleCloudAiplatformV1UnmanagedContainerModel--; } +core.int buildCounterGoogleCloudAiplatformV1UpdateEndpointLongRunningRequest = + 0; +api.GoogleCloudAiplatformV1UpdateEndpointLongRunningRequest + buildGoogleCloudAiplatformV1UpdateEndpointLongRunningRequest() { + final o = api.GoogleCloudAiplatformV1UpdateEndpointLongRunningRequest(); + buildCounterGoogleCloudAiplatformV1UpdateEndpointLongRunningRequest++; + if (buildCounterGoogleCloudAiplatformV1UpdateEndpointLongRunningRequest < 3) { + o.endpoint = buildGoogleCloudAiplatformV1Endpoint(); + } + buildCounterGoogleCloudAiplatformV1UpdateEndpointLongRunningRequest--; + return o; +} + +void checkGoogleCloudAiplatformV1UpdateEndpointLongRunningRequest( + api.GoogleCloudAiplatformV1UpdateEndpointLongRunningRequest o) { + buildCounterGoogleCloudAiplatformV1UpdateEndpointLongRunningRequest++; + if (buildCounterGoogleCloudAiplatformV1UpdateEndpointLongRunningRequest < 3) { + checkGoogleCloudAiplatformV1Endpoint(o.endpoint!); + } + buildCounterGoogleCloudAiplatformV1UpdateEndpointLongRunningRequest--; +} + core.int buildCounterGoogleCloudAiplatformV1UpdateExplanationDatasetRequest = 0; api.GoogleCloudAiplatformV1UpdateExplanationDatasetRequest buildGoogleCloudAiplatformV1UpdateExplanationDatasetRequest() { @@ -27324,12 +31035,81 @@ void checkGoogleCloudAiplatformV1UploadModelRequest( buildCounterGoogleCloudAiplatformV1UploadModelRequest--; } -core.List buildUnnamed367() => [ +core.int buildCounterGoogleCloudAiplatformV1UploadRagFileConfig = 0; +api.GoogleCloudAiplatformV1UploadRagFileConfig + buildGoogleCloudAiplatformV1UploadRagFileConfig() { + final o = api.GoogleCloudAiplatformV1UploadRagFileConfig(); + buildCounterGoogleCloudAiplatformV1UploadRagFileConfig++; + if (buildCounterGoogleCloudAiplatformV1UploadRagFileConfig < 3) { + o.ragFileTransformationConfig = + buildGoogleCloudAiplatformV1RagFileTransformationConfig(); + } + buildCounterGoogleCloudAiplatformV1UploadRagFileConfig--; + return o; +} + +void checkGoogleCloudAiplatformV1UploadRagFileConfig( + api.GoogleCloudAiplatformV1UploadRagFileConfig o) { + buildCounterGoogleCloudAiplatformV1UploadRagFileConfig++; + if (buildCounterGoogleCloudAiplatformV1UploadRagFileConfig < 3) { + checkGoogleCloudAiplatformV1RagFileTransformationConfig( + o.ragFileTransformationConfig!); + } + buildCounterGoogleCloudAiplatformV1UploadRagFileConfig--; +} + +core.int buildCounterGoogleCloudAiplatformV1UploadRagFileRequest = 0; +api.GoogleCloudAiplatformV1UploadRagFileRequest + buildGoogleCloudAiplatformV1UploadRagFileRequest() { + final o = api.GoogleCloudAiplatformV1UploadRagFileRequest(); + buildCounterGoogleCloudAiplatformV1UploadRagFileRequest++; + if (buildCounterGoogleCloudAiplatformV1UploadRagFileRequest < 3) { + o.ragFile = buildGoogleCloudAiplatformV1RagFile(); + o.uploadRagFileConfig = buildGoogleCloudAiplatformV1UploadRagFileConfig(); + } + buildCounterGoogleCloudAiplatformV1UploadRagFileRequest--; + return o; +} + +void checkGoogleCloudAiplatformV1UploadRagFileRequest( + api.GoogleCloudAiplatformV1UploadRagFileRequest o) { + buildCounterGoogleCloudAiplatformV1UploadRagFileRequest++; + if (buildCounterGoogleCloudAiplatformV1UploadRagFileRequest < 3) { + checkGoogleCloudAiplatformV1RagFile(o.ragFile!); + checkGoogleCloudAiplatformV1UploadRagFileConfig(o.uploadRagFileConfig!); + } + buildCounterGoogleCloudAiplatformV1UploadRagFileRequest--; +} + +core.int buildCounterGoogleCloudAiplatformV1UploadRagFileResponse = 0; +api.GoogleCloudAiplatformV1UploadRagFileResponse + buildGoogleCloudAiplatformV1UploadRagFileResponse() { + final o = api.GoogleCloudAiplatformV1UploadRagFileResponse(); + buildCounterGoogleCloudAiplatformV1UploadRagFileResponse++; + if (buildCounterGoogleCloudAiplatformV1UploadRagFileResponse < 3) { + o.error = buildGoogleRpcStatus(); + o.ragFile = buildGoogleCloudAiplatformV1RagFile(); + } + buildCounterGoogleCloudAiplatformV1UploadRagFileResponse--; + return o; +} + +void checkGoogleCloudAiplatformV1UploadRagFileResponse( + api.GoogleCloudAiplatformV1UploadRagFileResponse o) { + buildCounterGoogleCloudAiplatformV1UploadRagFileResponse++; + if (buildCounterGoogleCloudAiplatformV1UploadRagFileResponse < 3) { + checkGoogleRpcStatus(o.error!); + checkGoogleCloudAiplatformV1RagFile(o.ragFile!); + } + buildCounterGoogleCloudAiplatformV1UploadRagFileResponse--; +} + +core.List buildUnnamed408() => [ buildGoogleCloudAiplatformV1IndexDatapoint(), buildGoogleCloudAiplatformV1IndexDatapoint(), ]; -void checkUnnamed367(core.List o) { +void checkUnnamed408(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1IndexDatapoint(o[0]); checkGoogleCloudAiplatformV1IndexDatapoint(o[1]); @@ -27341,7 +31121,7 @@ api.GoogleCloudAiplatformV1UpsertDatapointsRequest final o = api.GoogleCloudAiplatformV1UpsertDatapointsRequest(); buildCounterGoogleCloudAiplatformV1UpsertDatapointsRequest++; if (buildCounterGoogleCloudAiplatformV1UpsertDatapointsRequest < 3) { - o.datapoints = buildUnnamed367(); + o.datapoints = buildUnnamed408(); o.updateMask = 'foo'; } buildCounterGoogleCloudAiplatformV1UpsertDatapointsRequest--; @@ -27352,7 +31132,7 @@ void checkGoogleCloudAiplatformV1UpsertDatapointsRequest( api.GoogleCloudAiplatformV1UpsertDatapointsRequest o) { buildCounterGoogleCloudAiplatformV1UpsertDatapointsRequest++; if (buildCounterGoogleCloudAiplatformV1UpsertDatapointsRequest < 3) { - checkUnnamed367(o.datapoints!); + checkUnnamed408(o.datapoints!); unittest.expect( o.updateMask!, unittest.equals('foo'), @@ -27468,30 +31248,13 @@ void checkGoogleCloudAiplatformV1VertexAISearch( buildCounterGoogleCloudAiplatformV1VertexAISearch--; } -core.List buildUnnamed368() => [ - 'foo', - 'foo', - ]; - -void checkUnnamed368(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o[0], - unittest.equals('foo'), - ); - unittest.expect( - o[1], - unittest.equals('foo'), - ); -} - core.List - buildUnnamed369() => [ + buildUnnamed409() => [ buildGoogleCloudAiplatformV1VertexRagStoreRagResource(), buildGoogleCloudAiplatformV1VertexRagStoreRagResource(), ]; -void checkUnnamed369( +void checkUnnamed409( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1VertexRagStoreRagResource(o[0]); @@ -27504,8 +31267,8 @@ api.GoogleCloudAiplatformV1VertexRagStore final o = api.GoogleCloudAiplatformV1VertexRagStore(); buildCounterGoogleCloudAiplatformV1VertexRagStore++; if (buildCounterGoogleCloudAiplatformV1VertexRagStore < 3) { - o.ragCorpora = buildUnnamed368(); - o.ragResources = buildUnnamed369(); + o.ragResources = buildUnnamed409(); + o.ragRetrievalConfig = buildGoogleCloudAiplatformV1RagRetrievalConfig(); o.similarityTopK = 42; o.vectorDistanceThreshold = 42.0; } @@ -27517,8 +31280,8 @@ void checkGoogleCloudAiplatformV1VertexRagStore( api.GoogleCloudAiplatformV1VertexRagStore o) { buildCounterGoogleCloudAiplatformV1VertexRagStore++; if (buildCounterGoogleCloudAiplatformV1VertexRagStore < 3) { - checkUnnamed368(o.ragCorpora!); - checkUnnamed369(o.ragResources!); + checkUnnamed409(o.ragResources!); + checkGoogleCloudAiplatformV1RagRetrievalConfig(o.ragRetrievalConfig!); unittest.expect( o.similarityTopK!, unittest.equals(42), @@ -27531,12 +31294,12 @@ void checkGoogleCloudAiplatformV1VertexRagStore( buildCounterGoogleCloudAiplatformV1VertexRagStore--; } -core.List buildUnnamed370() => [ +core.List buildUnnamed410() => [ 'foo', 'foo', ]; -void checkUnnamed370(core.List o) { +void checkUnnamed410(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -27555,7 +31318,7 @@ api.GoogleCloudAiplatformV1VertexRagStoreRagResource buildCounterGoogleCloudAiplatformV1VertexRagStoreRagResource++; if (buildCounterGoogleCloudAiplatformV1VertexRagStoreRagResource < 3) { o.ragCorpus = 'foo'; - o.ragFileIds = buildUnnamed370(); + o.ragFileIds = buildUnnamed410(); } buildCounterGoogleCloudAiplatformV1VertexRagStoreRagResource--; return o; @@ -27569,7 +31332,7 @@ void checkGoogleCloudAiplatformV1VertexRagStoreRagResource( o.ragCorpus!, unittest.equals('foo'), ); - checkUnnamed370(o.ragFileIds!); + checkUnnamed410(o.ragFileIds!); } buildCounterGoogleCloudAiplatformV1VertexRagStoreRagResource--; } @@ -27603,12 +31366,33 @@ void checkGoogleCloudAiplatformV1VideoMetadata( buildCounterGoogleCloudAiplatformV1VideoMetadata--; } -core.List buildUnnamed371() => [ +core.int buildCounterGoogleCloudAiplatformV1VoiceConfig = 0; +api.GoogleCloudAiplatformV1VoiceConfig + buildGoogleCloudAiplatformV1VoiceConfig() { + final o = api.GoogleCloudAiplatformV1VoiceConfig(); + buildCounterGoogleCloudAiplatformV1VoiceConfig++; + if (buildCounterGoogleCloudAiplatformV1VoiceConfig < 3) { + o.prebuiltVoiceConfig = buildGoogleCloudAiplatformV1PrebuiltVoiceConfig(); + } + buildCounterGoogleCloudAiplatformV1VoiceConfig--; + return o; +} + +void checkGoogleCloudAiplatformV1VoiceConfig( + api.GoogleCloudAiplatformV1VoiceConfig o) { + buildCounterGoogleCloudAiplatformV1VoiceConfig++; + if (buildCounterGoogleCloudAiplatformV1VoiceConfig < 3) { + checkGoogleCloudAiplatformV1PrebuiltVoiceConfig(o.prebuiltVoiceConfig!); + } + buildCounterGoogleCloudAiplatformV1VoiceConfig--; +} + +core.List buildUnnamed411() => [ buildGoogleCloudAiplatformV1NfsMount(), buildGoogleCloudAiplatformV1NfsMount(), ]; -void checkUnnamed371(core.List o) { +void checkUnnamed411(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1NfsMount(o[0]); checkGoogleCloudAiplatformV1NfsMount(o[1]); @@ -27623,7 +31407,7 @@ api.GoogleCloudAiplatformV1WorkerPoolSpec o.containerSpec = buildGoogleCloudAiplatformV1ContainerSpec(); o.diskSpec = buildGoogleCloudAiplatformV1DiskSpec(); o.machineSpec = buildGoogleCloudAiplatformV1MachineSpec(); - o.nfsMounts = buildUnnamed371(); + o.nfsMounts = buildUnnamed411(); o.pythonPackageSpec = buildGoogleCloudAiplatformV1PythonPackageSpec(); o.replicaCount = 'foo'; } @@ -27638,7 +31422,7 @@ void checkGoogleCloudAiplatformV1WorkerPoolSpec( checkGoogleCloudAiplatformV1ContainerSpec(o.containerSpec!); checkGoogleCloudAiplatformV1DiskSpec(o.diskSpec!); checkGoogleCloudAiplatformV1MachineSpec(o.machineSpec!); - checkUnnamed371(o.nfsMounts!); + checkUnnamed411(o.nfsMounts!); checkGoogleCloudAiplatformV1PythonPackageSpec(o.pythonPackageSpec!); unittest.expect( o.replicaCount!, @@ -27649,12 +31433,12 @@ void checkGoogleCloudAiplatformV1WorkerPoolSpec( } core.Map - buildUnnamed372() => { + buildUnnamed412() => { 'x': buildGoogleCloudAiplatformV1FeatureValue(), 'y': buildGoogleCloudAiplatformV1FeatureValue(), }; -void checkUnnamed372( +void checkUnnamed412( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1FeatureValue(o['x']!); @@ -27668,7 +31452,7 @@ api.GoogleCloudAiplatformV1WriteFeatureValuesPayload buildCounterGoogleCloudAiplatformV1WriteFeatureValuesPayload++; if (buildCounterGoogleCloudAiplatformV1WriteFeatureValuesPayload < 3) { o.entityId = 'foo'; - o.featureValues = buildUnnamed372(); + o.featureValues = buildUnnamed412(); } buildCounterGoogleCloudAiplatformV1WriteFeatureValuesPayload--; return o; @@ -27682,18 +31466,18 @@ void checkGoogleCloudAiplatformV1WriteFeatureValuesPayload( o.entityId!, unittest.equals('foo'), ); - checkUnnamed372(o.featureValues!); + checkUnnamed412(o.featureValues!); } buildCounterGoogleCloudAiplatformV1WriteFeatureValuesPayload--; } core.List - buildUnnamed373() => [ + buildUnnamed413() => [ buildGoogleCloudAiplatformV1WriteFeatureValuesPayload(), buildGoogleCloudAiplatformV1WriteFeatureValuesPayload(), ]; -void checkUnnamed373( +void checkUnnamed413( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1WriteFeatureValuesPayload(o[0]); @@ -27706,7 +31490,7 @@ api.GoogleCloudAiplatformV1WriteFeatureValuesRequest final o = api.GoogleCloudAiplatformV1WriteFeatureValuesRequest(); buildCounterGoogleCloudAiplatformV1WriteFeatureValuesRequest++; if (buildCounterGoogleCloudAiplatformV1WriteFeatureValuesRequest < 3) { - o.payloads = buildUnnamed373(); + o.payloads = buildUnnamed413(); } buildCounterGoogleCloudAiplatformV1WriteFeatureValuesRequest--; return o; @@ -27716,7 +31500,7 @@ void checkGoogleCloudAiplatformV1WriteFeatureValuesRequest( api.GoogleCloudAiplatformV1WriteFeatureValuesRequest o) { buildCounterGoogleCloudAiplatformV1WriteFeatureValuesRequest++; if (buildCounterGoogleCloudAiplatformV1WriteFeatureValuesRequest < 3) { - checkUnnamed373(o.payloads!); + checkUnnamed413(o.payloads!); } buildCounterGoogleCloudAiplatformV1WriteFeatureValuesRequest--; } @@ -27739,12 +31523,12 @@ void checkGoogleCloudAiplatformV1WriteFeatureValuesResponse( } core.List - buildUnnamed374() => [ + buildUnnamed414() => [ buildGoogleCloudAiplatformV1WriteTensorboardRunDataRequest(), buildGoogleCloudAiplatformV1WriteTensorboardRunDataRequest(), ]; -void checkUnnamed374( +void checkUnnamed414( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1WriteTensorboardRunDataRequest(o[0]); @@ -27760,7 +31544,7 @@ api.GoogleCloudAiplatformV1WriteTensorboardExperimentDataRequest buildCounterGoogleCloudAiplatformV1WriteTensorboardExperimentDataRequest++; if (buildCounterGoogleCloudAiplatformV1WriteTensorboardExperimentDataRequest < 3) { - o.writeRunDataRequests = buildUnnamed374(); + o.writeRunDataRequests = buildUnnamed414(); } buildCounterGoogleCloudAiplatformV1WriteTensorboardExperimentDataRequest--; return o; @@ -27771,7 +31555,7 @@ void checkGoogleCloudAiplatformV1WriteTensorboardExperimentDataRequest( buildCounterGoogleCloudAiplatformV1WriteTensorboardExperimentDataRequest++; if (buildCounterGoogleCloudAiplatformV1WriteTensorboardExperimentDataRequest < 3) { - checkUnnamed374(o.writeRunDataRequests!); + checkUnnamed414(o.writeRunDataRequests!); } buildCounterGoogleCloudAiplatformV1WriteTensorboardExperimentDataRequest--; } @@ -27797,12 +31581,12 @@ void checkGoogleCloudAiplatformV1WriteTensorboardExperimentDataResponse( buildCounterGoogleCloudAiplatformV1WriteTensorboardExperimentDataResponse--; } -core.List buildUnnamed375() => [ +core.List buildUnnamed415() => [ buildGoogleCloudAiplatformV1TimeSeriesData(), buildGoogleCloudAiplatformV1TimeSeriesData(), ]; -void checkUnnamed375(core.List o) { +void checkUnnamed415(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudAiplatformV1TimeSeriesData(o[0]); checkGoogleCloudAiplatformV1TimeSeriesData(o[1]); @@ -27815,7 +31599,7 @@ api.GoogleCloudAiplatformV1WriteTensorboardRunDataRequest buildCounterGoogleCloudAiplatformV1WriteTensorboardRunDataRequest++; if (buildCounterGoogleCloudAiplatformV1WriteTensorboardRunDataRequest < 3) { o.tensorboardRun = 'foo'; - o.timeSeriesData = buildUnnamed375(); + o.timeSeriesData = buildUnnamed415(); } buildCounterGoogleCloudAiplatformV1WriteTensorboardRunDataRequest--; return o; @@ -27829,7 +31613,7 @@ void checkGoogleCloudAiplatformV1WriteTensorboardRunDataRequest( o.tensorboardRun!, unittest.equals('foo'), ); - checkUnnamed375(o.timeSeriesData!); + checkUnnamed415(o.timeSeriesData!); } buildCounterGoogleCloudAiplatformV1WriteTensorboardRunDataRequest--; } @@ -27879,12 +31663,12 @@ void checkGoogleCloudAiplatformV1XraiAttribution( buildCounterGoogleCloudAiplatformV1XraiAttribution--; } -core.List buildUnnamed376() => [ +core.List buildUnnamed416() => [ buildGoogleCloudLocationLocation(), buildGoogleCloudLocationLocation(), ]; -void checkUnnamed376(core.List o) { +void checkUnnamed416(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudLocationLocation(o[0]); checkGoogleCloudLocationLocation(o[1]); @@ -27896,7 +31680,7 @@ api.GoogleCloudLocationListLocationsResponse final o = api.GoogleCloudLocationListLocationsResponse(); buildCounterGoogleCloudLocationListLocationsResponse++; if (buildCounterGoogleCloudLocationListLocationsResponse < 3) { - o.locations = buildUnnamed376(); + o.locations = buildUnnamed416(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudLocationListLocationsResponse--; @@ -27907,7 +31691,7 @@ void checkGoogleCloudLocationListLocationsResponse( api.GoogleCloudLocationListLocationsResponse o) { buildCounterGoogleCloudLocationListLocationsResponse++; if (buildCounterGoogleCloudLocationListLocationsResponse < 3) { - checkUnnamed376(o.locations!); + checkUnnamed416(o.locations!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -27916,12 +31700,12 @@ void checkGoogleCloudLocationListLocationsResponse( buildCounterGoogleCloudLocationListLocationsResponse--; } -core.Map buildUnnamed377() => { +core.Map buildUnnamed417() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed377(core.Map o) { +void checkUnnamed417(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -27933,7 +31717,7 @@ void checkUnnamed377(core.Map o) { ); } -core.Map buildUnnamed378() => { +core.Map buildUnnamed418() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -27946,34 +31730,34 @@ core.Map buildUnnamed378() => { }, }; -void checkUnnamed378(core.Map o) { +void checkUnnamed418(core.Map o) { unittest.expect(o, unittest.hasLength(2)); - var casted68 = (o['x']!) as core.Map; - unittest.expect(casted68, unittest.hasLength(3)); + var casted76 = (o['x']!) as core.Map; + unittest.expect(casted76, unittest.hasLength(3)); unittest.expect( - casted68['list'], + casted76['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted68['bool'], + casted76['bool'], unittest.equals(true), ); unittest.expect( - casted68['string'], + casted76['string'], unittest.equals('foo'), ); - var casted69 = (o['y']!) as core.Map; - unittest.expect(casted69, unittest.hasLength(3)); + var casted77 = (o['y']!) as core.Map; + unittest.expect(casted77, unittest.hasLength(3)); unittest.expect( - casted69['list'], + casted77['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted69['bool'], + casted77['bool'], unittest.equals(true), ); unittest.expect( - casted69['string'], + casted77['string'], unittest.equals('foo'), ); } @@ -27984,9 +31768,9 @@ api.GoogleCloudLocationLocation buildGoogleCloudLocationLocation() { buildCounterGoogleCloudLocationLocation++; if (buildCounterGoogleCloudLocationLocation < 3) { o.displayName = 'foo'; - o.labels = buildUnnamed377(); + o.labels = buildUnnamed417(); o.locationId = 'foo'; - o.metadata = buildUnnamed378(); + o.metadata = buildUnnamed418(); o.name = 'foo'; } buildCounterGoogleCloudLocationLocation--; @@ -28000,12 +31784,12 @@ void checkGoogleCloudLocationLocation(api.GoogleCloudLocationLocation o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed377(o.labels!); + checkUnnamed417(o.labels!); unittest.expect( o.locationId!, unittest.equals('foo'), ); - checkUnnamed378(o.metadata!); + checkUnnamed418(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), @@ -28014,12 +31798,12 @@ void checkGoogleCloudLocationLocation(api.GoogleCloudLocationLocation o) { buildCounterGoogleCloudLocationLocation--; } -core.List buildUnnamed379() => [ +core.List buildUnnamed419() => [ 'foo', 'foo', ]; -void checkUnnamed379(core.List o) { +void checkUnnamed419(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -28037,7 +31821,7 @@ api.GoogleIamV1Binding buildGoogleIamV1Binding() { buildCounterGoogleIamV1Binding++; if (buildCounterGoogleIamV1Binding < 3) { o.condition = buildGoogleTypeExpr(); - o.members = buildUnnamed379(); + o.members = buildUnnamed419(); o.role = 'foo'; } buildCounterGoogleIamV1Binding--; @@ -28048,7 +31832,7 @@ void checkGoogleIamV1Binding(api.GoogleIamV1Binding o) { buildCounterGoogleIamV1Binding++; if (buildCounterGoogleIamV1Binding < 3) { checkGoogleTypeExpr(o.condition!); - checkUnnamed379(o.members!); + checkUnnamed419(o.members!); unittest.expect( o.role!, unittest.equals('foo'), @@ -28057,12 +31841,12 @@ void checkGoogleIamV1Binding(api.GoogleIamV1Binding o) { buildCounterGoogleIamV1Binding--; } -core.List buildUnnamed380() => [ +core.List buildUnnamed420() => [ buildGoogleIamV1Binding(), buildGoogleIamV1Binding(), ]; -void checkUnnamed380(core.List o) { +void checkUnnamed420(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleIamV1Binding(o[0]); checkGoogleIamV1Binding(o[1]); @@ -28073,7 +31857,7 @@ api.GoogleIamV1Policy buildGoogleIamV1Policy() { final o = api.GoogleIamV1Policy(); buildCounterGoogleIamV1Policy++; if (buildCounterGoogleIamV1Policy < 3) { - o.bindings = buildUnnamed380(); + o.bindings = buildUnnamed420(); o.etag = 'foo'; o.version = 42; } @@ -28084,7 +31868,7 @@ api.GoogleIamV1Policy buildGoogleIamV1Policy() { void checkGoogleIamV1Policy(api.GoogleIamV1Policy o) { buildCounterGoogleIamV1Policy++; if (buildCounterGoogleIamV1Policy < 3) { - checkUnnamed380(o.bindings!); + checkUnnamed420(o.bindings!); unittest.expect( o.etag!, unittest.equals('foo'), @@ -28116,12 +31900,12 @@ void checkGoogleIamV1SetIamPolicyRequest(api.GoogleIamV1SetIamPolicyRequest o) { buildCounterGoogleIamV1SetIamPolicyRequest--; } -core.List buildUnnamed381() => [ +core.List buildUnnamed421() => [ 'foo', 'foo', ]; -void checkUnnamed381(core.List o) { +void checkUnnamed421(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -28139,7 +31923,7 @@ api.GoogleIamV1TestIamPermissionsResponse final o = api.GoogleIamV1TestIamPermissionsResponse(); buildCounterGoogleIamV1TestIamPermissionsResponse++; if (buildCounterGoogleIamV1TestIamPermissionsResponse < 3) { - o.permissions = buildUnnamed381(); + o.permissions = buildUnnamed421(); } buildCounterGoogleIamV1TestIamPermissionsResponse--; return o; @@ -28149,17 +31933,17 @@ void checkGoogleIamV1TestIamPermissionsResponse( api.GoogleIamV1TestIamPermissionsResponse o) { buildCounterGoogleIamV1TestIamPermissionsResponse++; if (buildCounterGoogleIamV1TestIamPermissionsResponse < 3) { - checkUnnamed381(o.permissions!); + checkUnnamed421(o.permissions!); } buildCounterGoogleIamV1TestIamPermissionsResponse--; } -core.List buildUnnamed382() => [ +core.List buildUnnamed422() => [ buildGoogleLongrunningOperation(), buildGoogleLongrunningOperation(), ]; -void checkUnnamed382(core.List o) { +void checkUnnamed422(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleLongrunningOperation(o[0]); checkGoogleLongrunningOperation(o[1]); @@ -28172,7 +31956,7 @@ api.GoogleLongrunningListOperationsResponse buildCounterGoogleLongrunningListOperationsResponse++; if (buildCounterGoogleLongrunningListOperationsResponse < 3) { o.nextPageToken = 'foo'; - o.operations = buildUnnamed382(); + o.operations = buildUnnamed422(); } buildCounterGoogleLongrunningListOperationsResponse--; return o; @@ -28186,12 +31970,12 @@ void checkGoogleLongrunningListOperationsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed382(o.operations!); + checkUnnamed422(o.operations!); } buildCounterGoogleLongrunningListOperationsResponse--; } -core.Map buildUnnamed383() => { +core.Map buildUnnamed423() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -28204,39 +31988,39 @@ core.Map buildUnnamed383() => { }, }; -void checkUnnamed383(core.Map o) { +void checkUnnamed423(core.Map o) { unittest.expect(o, unittest.hasLength(2)); - var casted70 = (o['x']!) as core.Map; - unittest.expect(casted70, unittest.hasLength(3)); + var casted78 = (o['x']!) as core.Map; + unittest.expect(casted78, unittest.hasLength(3)); unittest.expect( - casted70['list'], + casted78['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted70['bool'], + casted78['bool'], unittest.equals(true), ); unittest.expect( - casted70['string'], + casted78['string'], unittest.equals('foo'), ); - var casted71 = (o['y']!) as core.Map; - unittest.expect(casted71, unittest.hasLength(3)); + var casted79 = (o['y']!) as core.Map; + unittest.expect(casted79, unittest.hasLength(3)); unittest.expect( - casted71['list'], + casted79['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted71['bool'], + casted79['bool'], unittest.equals(true), ); unittest.expect( - casted71['string'], + casted79['string'], unittest.equals('foo'), ); } -core.Map buildUnnamed384() => { +core.Map buildUnnamed424() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -28249,34 +32033,34 @@ core.Map buildUnnamed384() => { }, }; -void checkUnnamed384(core.Map o) { +void checkUnnamed424(core.Map o) { unittest.expect(o, unittest.hasLength(2)); - var casted72 = (o['x']!) as core.Map; - unittest.expect(casted72, unittest.hasLength(3)); + var casted80 = (o['x']!) as core.Map; + unittest.expect(casted80, unittest.hasLength(3)); unittest.expect( - casted72['list'], + casted80['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted72['bool'], + casted80['bool'], unittest.equals(true), ); unittest.expect( - casted72['string'], + casted80['string'], unittest.equals('foo'), ); - var casted73 = (o['y']!) as core.Map; - unittest.expect(casted73, unittest.hasLength(3)); + var casted81 = (o['y']!) as core.Map; + unittest.expect(casted81, unittest.hasLength(3)); unittest.expect( - casted73['list'], + casted81['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted73['bool'], + casted81['bool'], unittest.equals(true), ); unittest.expect( - casted73['string'], + casted81['string'], unittest.equals('foo'), ); } @@ -28288,9 +32072,9 @@ api.GoogleLongrunningOperation buildGoogleLongrunningOperation() { if (buildCounterGoogleLongrunningOperation < 3) { o.done = true; o.error = buildGoogleRpcStatus(); - o.metadata = buildUnnamed383(); + o.metadata = buildUnnamed423(); o.name = 'foo'; - o.response = buildUnnamed384(); + o.response = buildUnnamed424(); } buildCounterGoogleLongrunningOperation--; return o; @@ -28301,12 +32085,12 @@ void checkGoogleLongrunningOperation(api.GoogleLongrunningOperation o) { if (buildCounterGoogleLongrunningOperation < 3) { unittest.expect(o.done!, unittest.isTrue); checkGoogleRpcStatus(o.error!); - checkUnnamed383(o.metadata!); + checkUnnamed423(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed384(o.response!); + checkUnnamed424(o.response!); } buildCounterGoogleLongrunningOperation--; } @@ -28326,7 +32110,7 @@ void checkGoogleProtobufEmpty(api.GoogleProtobufEmpty o) { buildCounterGoogleProtobufEmpty--; } -core.Map buildUnnamed385() => { +core.Map buildUnnamed425() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -28339,47 +32123,47 @@ core.Map buildUnnamed385() => { }, }; -void checkUnnamed385(core.Map o) { +void checkUnnamed425(core.Map o) { unittest.expect(o, unittest.hasLength(2)); - var casted74 = (o['x']!) as core.Map; - unittest.expect(casted74, unittest.hasLength(3)); + var casted82 = (o['x']!) as core.Map; + unittest.expect(casted82, unittest.hasLength(3)); unittest.expect( - casted74['list'], + casted82['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted74['bool'], + casted82['bool'], unittest.equals(true), ); unittest.expect( - casted74['string'], + casted82['string'], unittest.equals('foo'), ); - var casted75 = (o['y']!) as core.Map; - unittest.expect(casted75, unittest.hasLength(3)); + var casted83 = (o['y']!) as core.Map; + unittest.expect(casted83, unittest.hasLength(3)); unittest.expect( - casted75['list'], + casted83['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted75['bool'], + casted83['bool'], unittest.equals(true), ); unittest.expect( - casted75['string'], + casted83['string'], unittest.equals('foo'), ); } -core.List> buildUnnamed386() => [ - buildUnnamed385(), - buildUnnamed385(), +core.List> buildUnnamed426() => [ + buildUnnamed425(), + buildUnnamed425(), ]; -void checkUnnamed386(core.List> o) { +void checkUnnamed426(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed385(o[0]); - checkUnnamed385(o[1]); + checkUnnamed425(o[0]); + checkUnnamed425(o[1]); } core.int buildCounterGoogleRpcStatus = 0; @@ -28388,7 +32172,7 @@ api.GoogleRpcStatus buildGoogleRpcStatus() { buildCounterGoogleRpcStatus++; if (buildCounterGoogleRpcStatus < 3) { o.code = 42; - o.details = buildUnnamed386(); + o.details = buildUnnamed426(); o.message = 'foo'; } buildCounterGoogleRpcStatus--; @@ -28402,7 +32186,7 @@ void checkGoogleRpcStatus(api.GoogleRpcStatus o) { o.code!, unittest.equals(42), ); - checkUnnamed386(o.details!); + checkUnnamed426(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -28539,12 +32323,12 @@ void checkGoogleTypeMoney(api.GoogleTypeMoney o) { buildCounterGoogleTypeMoney--; } -core.List buildUnnamed387() => [ +core.List buildUnnamed427() => [ 'foo', 'foo', ]; -void checkUnnamed387(core.List o) { +void checkUnnamed427(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -28556,12 +32340,12 @@ void checkUnnamed387(core.List o) { ); } -core.List buildUnnamed388() => [ +core.List buildUnnamed428() => [ 'foo', 'foo', ]; -void checkUnnamed388(core.List o) { +void checkUnnamed428(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -28573,12 +32357,12 @@ void checkUnnamed388(core.List o) { ); } -core.List buildUnnamed389() => [ +core.List buildUnnamed429() => [ 'foo', 'foo', ]; -void checkUnnamed389(core.List o) { +void checkUnnamed429(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -28590,12 +32374,12 @@ void checkUnnamed389(core.List o) { ); } -core.List buildUnnamed390() => [ +core.List buildUnnamed430() => [ 'foo', 'foo', ]; -void checkUnnamed390(core.List o) { +void checkUnnamed430(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -28607,12 +32391,12 @@ void checkUnnamed390(core.List o) { ); } -core.List buildUnnamed391() => [ +core.List buildUnnamed431() => [ 'foo', 'foo', ]; -void checkUnnamed391(core.List o) { +void checkUnnamed431(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -28624,12 +32408,12 @@ void checkUnnamed391(core.List o) { ); } -core.List buildUnnamed392() => [ +core.List buildUnnamed432() => [ 'foo', 'foo', ]; -void checkUnnamed392(core.List o) { +void checkUnnamed432(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -28641,12 +32425,12 @@ void checkUnnamed392(core.List o) { ); } -core.List buildUnnamed393() => [ +core.List buildUnnamed433() => [ 'foo', 'foo', ]; -void checkUnnamed393(core.List o) { +void checkUnnamed433(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -28658,12 +32442,12 @@ void checkUnnamed393(core.List o) { ); } -core.List buildUnnamed394() => [ +core.List buildUnnamed434() => [ 'foo', 'foo', ]; -void checkUnnamed394(core.List o) { +void checkUnnamed434(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -28675,12 +32459,12 @@ void checkUnnamed394(core.List o) { ); } -core.List buildUnnamed395() => [ +core.List buildUnnamed435() => [ 'foo', 'foo', ]; -void checkUnnamed395(core.List o) { +void checkUnnamed435(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -28816,6 +32600,26 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1ApiAuth', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1ApiAuth(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1ApiAuth.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1ApiAuth(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1ApiAuthApiKeyConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1ApiAuthApiKeyConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1ApiAuthApiKeyConfig.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1ApiAuthApiKeyConfig(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1Artifact', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1Artifact(); @@ -28848,6 +32652,37 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1AugmentPromptRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1AugmentPromptRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1AugmentPromptRequest.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1AugmentPromptRequest(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1AugmentPromptRequestModel', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1AugmentPromptRequestModel(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1AugmentPromptRequestModel.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1AugmentPromptRequestModel(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1AugmentPromptResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1AugmentPromptResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1AugmentPromptResponse.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1AugmentPromptResponse(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1AutomaticResources', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1AutomaticResources(); @@ -29233,6 +33068,37 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1CacheConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1CacheConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1CacheConfig.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1CacheConfig(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1CachedContent', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1CachedContent(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1CachedContent.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1CachedContent(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1CachedContentUsageMetadata', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1CachedContentUsageMetadata(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1CachedContentUsageMetadata.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1CachedContentUsageMetadata(od); + }); + }); + unittest.group( 'obj-schema-GoogleCloudAiplatformV1CancelBatchPredictionJobRequest', () { unittest.test('to-json--from-json', () async { @@ -29368,6 +33234,27 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1Claim', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1Claim(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1Claim.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1Claim(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1ClientConnectionConfig', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1ClientConnectionConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1ClientConnectionConfig.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1ClientConnectionConfig(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1CoherenceInput', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1CoherenceInput(); @@ -29408,6 +33295,46 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1CometInput', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1CometInput(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1CometInput.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1CometInput(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1CometInstance', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1CometInstance(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1CometInstance.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1CometInstance(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1CometResult', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1CometResult(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1CometResult.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1CometResult(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1CometSpec', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1CometSpec(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1CometSpec.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1CometSpec(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1CompleteTrialRequest', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1CompleteTrialRequest(); @@ -29500,6 +33427,51 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1CorpusStatus', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1CorpusStatus(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1CorpusStatus.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1CorpusStatus(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1CorroborateContentRequest', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1CorroborateContentRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1CorroborateContentRequest.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1CorroborateContentRequest(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1CorroborateContentRequestParameters', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudAiplatformV1CorroborateContentRequestParameters(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1CorroborateContentRequestParameters + .fromJson(oJson as core.Map); + checkGoogleCloudAiplatformV1CorroborateContentRequestParameters(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1CorroborateContentResponse', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1CorroborateContentResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1CorroborateContentResponse.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1CorroborateContentResponse(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1CountTokensRequest', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1CountTokensRequest(); @@ -29816,6 +33788,16 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1DeployedModelStatus', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1DeployedModelStatus(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1DeployedModelStatus.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1DeployedModelStatus(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1DeploymentResourcePool', () { unittest.test('to-json--from-json', () async { @@ -29880,6 +33862,16 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1DirectUploadSource', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1DirectUploadSource(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1DirectUploadSource.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1DirectUploadSource(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1DiskSpec', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1DiskSpec(); @@ -30424,6 +34416,27 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1Fact', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1Fact(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1Fact.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1Fact(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1FasterDeploymentConfig', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1FasterDeploymentConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1FasterDeploymentConfig.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1FasterDeploymentConfig(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1Feature', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1Feature(); @@ -30733,6 +34746,17 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1FeatureViewOptimizedConfig', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1FeatureViewOptimizedConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1FeatureViewOptimizedConfig.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1FeatureViewOptimizedConfig(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1FeatureViewSync', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1FeatureViewSync(); @@ -30920,6 +34944,18 @@ void main() { }); }); + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1FetchPredictOperationRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1FetchPredictOperationRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1FetchPredictOperationRequest.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1FetchPredictOperationRequest(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1FileData', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1FileData(); @@ -30930,6 +34966,16 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1FileStatus', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1FileStatus(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1FileStatus.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1FileStatus(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1FilterSplit', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1FilterSplit(); @@ -31270,6 +35316,28 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1GoogleDriveSource', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1GoogleDriveSource(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1GoogleDriveSource.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1GoogleDriveSource(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1GoogleDriveSourceResourceId', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1GoogleDriveSourceResourceId(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1GoogleDriveSourceResourceId.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1GoogleDriveSourceResourceId(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1GoogleSearchRetrieval', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1GoogleSearchRetrieval(); @@ -31450,6 +35518,26 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1ImportRagFilesConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1ImportRagFilesConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1ImportRagFilesConfig.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1ImportRagFilesConfig(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1ImportRagFilesRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1ImportRagFilesRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1ImportRagFilesRequest.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1ImportRagFilesRequest(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1Index', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1Index(); @@ -31578,6 +35666,26 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1JiraSource', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1JiraSource(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1JiraSource.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1JiraSource(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1JiraSourceJiraQueries', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1JiraSourceJiraQueries(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1JiraSourceJiraQueries.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1JiraSourceJiraQueries(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1LargeModelReference', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1LargeModelReference(); @@ -31631,6 +35739,17 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1ListCachedContentsResponse', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1ListCachedContentsResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1ListCachedContentsResponse.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1ListCachedContentsResponse(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1ListContextsResponse', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1ListContextsResponse(); @@ -32030,6 +36149,39 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1ListRagCorporaResponse', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1ListRagCorporaResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1ListRagCorporaResponse.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1ListRagCorporaResponse(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1ListRagFilesResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1ListRagFilesResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1ListRagFilesResponse.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1ListRagFilesResponse(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1ListReasoningEnginesResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1ListReasoningEnginesResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1ListReasoningEnginesResponse.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1ListReasoningEnginesResponse(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1ListSavedQueriesResponse', () { unittest.test('to-json--from-json', () async { @@ -32295,6 +36447,46 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1MetricxInput', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1MetricxInput(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1MetricxInput.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1MetricxInput(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1MetricxInstance', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1MetricxInstance(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1MetricxInstance.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1MetricxInstance(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1MetricxResult', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1MetricxResult(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1MetricxResult.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1MetricxResult(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1MetricxSpec', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1MetricxSpec(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1MetricxSpec.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1MetricxSpec(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1MigratableResource', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1MigratableResource(); @@ -33121,6 +37313,20 @@ void main() { }); }); + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec + .fromJson(oJson as core.Map); + checkGoogleCloudAiplatformV1NotebookExecutionJobCustomEnvironmentSpec(od); + }); + }); + unittest.group( 'obj-schema-GoogleCloudAiplatformV1NotebookExecutionJobDataformRepositorySource', () { @@ -33164,6 +37370,19 @@ void main() { }); }); + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime + .fromJson(oJson as core.Map); + checkGoogleCloudAiplatformV1NotebookExecutionJobWorkbenchRuntime(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1NotebookIdleShutdownConfig', () { unittest.test('to-json--from-json', () async { @@ -33599,6 +37818,16 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1PrebuiltVoiceConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1PrebuiltVoiceConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1PrebuiltVoiceConfig.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1PrebuiltVoiceConfig(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1PredefinedSplit', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1PredefinedSplit(); @@ -33609,6 +37838,17 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1PredictLongRunningRequest', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1PredictLongRunningRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1PredictLongRunningRequest.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1PredictLongRunningRequest(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1PredictRequest', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1PredictRequest(); @@ -33714,16 +37954,6 @@ void main() { }); }); - unittest.group('obj-schema-GoogleCloudAiplatformV1PscInterfaceConfig', () { - unittest.test('to-json--from-json', () async { - final o = buildGoogleCloudAiplatformV1PscInterfaceConfig(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.GoogleCloudAiplatformV1PscInterfaceConfig.fromJson( - oJson as core.Map); - checkGoogleCloudAiplatformV1PscInterfaceConfig(od); - }); - }); - unittest.group('obj-schema-GoogleCloudAiplatformV1PublisherModel', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1PublisherModel(); @@ -33934,6 +38164,30 @@ void main() { }); }); + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1QueryReasoningEngineRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1QueryReasoningEngineRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1QueryReasoningEngineRequest.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1QueryReasoningEngineRequest(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1QueryReasoningEngineResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1QueryReasoningEngineResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1QueryReasoningEngineResponse.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1QueryReasoningEngineResponse(od); + }); + }); + unittest.group( 'obj-schema-GoogleCloudAiplatformV1QuestionAnsweringCorrectnessInput', () { @@ -34133,6 +38387,185 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1RagContexts', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1RagContexts(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1RagContexts.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1RagContexts(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1RagContextsContext', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1RagContextsContext(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1RagContextsContext.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1RagContextsContext(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1RagCorpus', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1RagCorpus(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1RagCorpus.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1RagCorpus(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1RagEmbeddingModelConfig', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1RagEmbeddingModelConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1RagEmbeddingModelConfig.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1RagEmbeddingModelConfig(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint + .fromJson(oJson as core.Map); + checkGoogleCloudAiplatformV1RagEmbeddingModelConfigVertexPredictionEndpoint( + od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1RagFile', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1RagFile(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1RagFile.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1RagFile(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1RagFileChunkingConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1RagFileChunkingConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1RagFileChunkingConfig.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1RagFileChunkingConfig(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking + .fromJson(oJson as core.Map); + checkGoogleCloudAiplatformV1RagFileChunkingConfigFixedLengthChunking(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1RagFileTransformationConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1RagFileTransformationConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1RagFileTransformationConfig.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1RagFileTransformationConfig(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1RagQuery', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1RagQuery(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1RagQuery.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1RagQuery(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1RagRetrievalConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1RagRetrievalConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1RagRetrievalConfig.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1RagRetrievalConfig(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1RagRetrievalConfigFilter', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1RagRetrievalConfigFilter(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1RagRetrievalConfigFilter.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1RagRetrievalConfigFilter(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1RagVectorDbConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1RagVectorDbConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1RagVectorDbConfig.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1RagVectorDbConfig(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1RagVectorDbConfigPinecone', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1RagVectorDbConfigPinecone(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1RagVectorDbConfigPinecone.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1RagVectorDbConfigPinecone(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1RagVectorDbConfigRagManagedDb(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch + .fromJson(oJson as core.Map); + checkGoogleCloudAiplatformV1RagVectorDbConfigVertexVectorSearch(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1RawPredictRequest', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1RawPredictRequest(); @@ -34352,6 +38785,38 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1ReasoningEngine', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1ReasoningEngine(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1ReasoningEngine.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1ReasoningEngine(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1ReasoningEngineSpec', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1ReasoningEngineSpec(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1ReasoningEngineSpec.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1ReasoningEngineSpec(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1ReasoningEngineSpecPackageSpec(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1RebaseTunedModelRequest', () { unittest.test('to-json--from-json', () async { @@ -34527,6 +38992,57 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1RetrieveContextsRequest', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1RetrieveContextsRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1RetrieveContextsRequest.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1RetrieveContextsRequest(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore + .fromJson(oJson as core.Map); + checkGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStore(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource + .fromJson(oJson as core.Map); + checkGoogleCloudAiplatformV1RetrieveContextsRequestVertexRagStoreRagResource( + od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1RetrieveContextsResponse', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1RetrieveContextsResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1RetrieveContextsResponse.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1RetrieveContextsResponse(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1RougeInput', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1RougeInput(); @@ -34887,6 +39403,29 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1SharePointSources', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1SharePointSources(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1SharePointSources.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1SharePointSources(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1SharePointSourcesSharePointSource', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1SharePointSourcesSharePointSource(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1SharePointSourcesSharePointSource.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1SharePointSourcesSharePointSource(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1ShieldedVmConfig', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1ShieldedVmConfig(); @@ -34897,6 +39436,40 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1SlackSource', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1SlackSource(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1SlackSource.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1SlackSource(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1SlackSourceSlackChannels', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1SlackSourceSlackChannels(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1SlackSourceSlackChannels.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1SlackSourceSlackChannels(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel + .fromJson(oJson as core.Map); + checkGoogleCloudAiplatformV1SlackSourceSlackChannelsSlackChannel(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1SmoothGradConfig', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1SmoothGradConfig(); @@ -34917,6 +39490,16 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1SpeechConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1SpeechConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1SpeechConfig.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1SpeechConfig(od); + }); + }); + unittest.group( 'obj-schema-GoogleCloudAiplatformV1StartNotebookRuntimeRequest', () { unittest.test('to-json--from-json', () async { @@ -34929,6 +39512,17 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1StopNotebookRuntimeRequest', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1StopNotebookRuntimeRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1StopNotebookRuntimeRequest.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1StopNotebookRuntimeRequest(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1StopTrialRequest', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1StopTrialRequest(); @@ -35654,6 +40248,16 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1ToolCall', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1ToolCall(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1ToolCall.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1ToolCall(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1ToolCallValidInput', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1ToolCallValidInput(); @@ -35715,6 +40319,16 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1ToolGoogleSearch', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1ToolGoogleSearch(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1ToolGoogleSearch.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1ToolGoogleSearch(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1ToolNameMatchInput', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1ToolNameMatchInput(); @@ -35902,6 +40516,367 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1Trajectory', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1Trajectory(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1Trajectory.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1Trajectory(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInput(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryAnyOrderMatchInstance(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue + .fromJson(oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryAnyOrderMatchMetricValue(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryAnyOrderMatchResults(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryAnyOrderMatchSpec(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1TrajectoryExactMatchInput', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectoryExactMatchInput(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1TrajectoryExactMatchInput.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryExactMatchInput(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1TrajectoryExactMatchInstance', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectoryExactMatchInstance(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1TrajectoryExactMatchInstance.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryExactMatchInstance(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1TrajectoryExactMatchMetricValue', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectoryExactMatchMetricValue(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1TrajectoryExactMatchMetricValue.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryExactMatchMetricValue(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1TrajectoryExactMatchResults', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectoryExactMatchResults(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1TrajectoryExactMatchResults.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryExactMatchResults(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1TrajectoryExactMatchSpec', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectoryExactMatchSpec(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1TrajectoryExactMatchSpec.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryExactMatchSpec(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1TrajectoryInOrderMatchInput', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectoryInOrderMatchInput(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1TrajectoryInOrderMatchInput.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryInOrderMatchInput(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1TrajectoryInOrderMatchInstance', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectoryInOrderMatchInstance(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1TrajectoryInOrderMatchInstance.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryInOrderMatchInstance(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryInOrderMatchMetricValue(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1TrajectoryInOrderMatchResults', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectoryInOrderMatchResults(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1TrajectoryInOrderMatchResults.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryInOrderMatchResults(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1TrajectoryInOrderMatchSpec', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectoryInOrderMatchSpec(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1TrajectoryInOrderMatchSpec.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryInOrderMatchSpec(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1TrajectoryPrecisionInput', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectoryPrecisionInput(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1TrajectoryPrecisionInput.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryPrecisionInput(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1TrajectoryPrecisionInstance', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectoryPrecisionInstance(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1TrajectoryPrecisionInstance.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryPrecisionInstance(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1TrajectoryPrecisionMetricValue', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectoryPrecisionMetricValue(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1TrajectoryPrecisionMetricValue.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryPrecisionMetricValue(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1TrajectoryPrecisionResults', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectoryPrecisionResults(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1TrajectoryPrecisionResults.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryPrecisionResults(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1TrajectoryPrecisionSpec', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectoryPrecisionSpec(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1TrajectoryPrecisionSpec.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryPrecisionSpec(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1TrajectoryRecallInput', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectoryRecallInput(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1TrajectoryRecallInput.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryRecallInput(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1TrajectoryRecallInstance', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectoryRecallInstance(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1TrajectoryRecallInstance.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryRecallInstance(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1TrajectoryRecallMetricValue', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectoryRecallMetricValue(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1TrajectoryRecallMetricValue.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryRecallMetricValue(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1TrajectoryRecallResults', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectoryRecallResults(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1TrajectoryRecallResults.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryRecallResults(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1TrajectoryRecallSpec', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectoryRecallSpec(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1TrajectoryRecallSpec.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectoryRecallSpec(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1TrajectorySingleToolUseInput', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectorySingleToolUseInput(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1TrajectorySingleToolUseInput.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectorySingleToolUseInput(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1TrajectorySingleToolUseInstance', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectorySingleToolUseInstance(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1TrajectorySingleToolUseInstance.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectorySingleToolUseInstance(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue + .fromJson(oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectorySingleToolUseMetricValue(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1TrajectorySingleToolUseResults', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectorySingleToolUseResults(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1TrajectorySingleToolUseResults.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectorySingleToolUseResults(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1TrajectorySingleToolUseSpec', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1TrajectorySingleToolUseSpec(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1TrajectorySingleToolUseSpec.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1TrajectorySingleToolUseSpec(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1Trial', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1Trial(); @@ -36003,6 +40978,18 @@ void main() { }); }); + unittest.group( + 'obj-schema-GoogleCloudAiplatformV1UpdateEndpointLongRunningRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1UpdateEndpointLongRunningRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudAiplatformV1UpdateEndpointLongRunningRequest.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1UpdateEndpointLongRunningRequest(od); + }); + }); + unittest.group( 'obj-schema-GoogleCloudAiplatformV1UpdateExplanationDatasetRequest', () { unittest.test('to-json--from-json', () async { @@ -36037,6 +41024,36 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1UploadRagFileConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1UploadRagFileConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1UploadRagFileConfig.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1UploadRagFileConfig(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1UploadRagFileRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1UploadRagFileRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1UploadRagFileRequest.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1UploadRagFileRequest(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudAiplatformV1UploadRagFileResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1UploadRagFileResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1UploadRagFileResponse.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1UploadRagFileResponse(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1UpsertDatapointsRequest', () { unittest.test('to-json--from-json', () async { @@ -36120,6 +41137,16 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudAiplatformV1VoiceConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudAiplatformV1VoiceConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudAiplatformV1VoiceConfig.fromJson( + oJson as core.Map); + checkGoogleCloudAiplatformV1VoiceConfig(od); + }); + }); + unittest.group('obj-schema-GoogleCloudAiplatformV1WorkerPoolSpec', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudAiplatformV1WorkerPoolSpec(); @@ -37313,19 +42340,20 @@ void main() { }); }); - unittest.group('resource-ProjectsLocationsResource', () { - unittest.test('method--evaluateInstances', () async { + unittest.group('resource-MediaResource', () { + unittest.test('method--upload', () async { + // TODO: Implement tests for media upload; + // TODO: Implement tests for media download; + final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations; - final arg_request = - buildGoogleCloudAiplatformV1EvaluateInstancesRequest(); - final arg_location = 'foo'; + final res = api.AiplatformApi(mock).media; + final arg_request = buildGoogleCloudAiplatformV1UploadRagFileRequest(); + final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1EvaluateInstancesRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1EvaluateInstancesRequest(obj); + final obj = api.GoogleCloudAiplatformV1UploadRagFileRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1UploadRagFileRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -37367,18 +42395,20 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = convert.json - .encode(buildGoogleCloudAiplatformV1EvaluateInstancesResponse()); + .encode(buildGoogleCloudAiplatformV1UploadRagFileResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.evaluateInstances(arg_request, arg_location, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1EvaluateInstancesResponse( - response as api.GoogleCloudAiplatformV1EvaluateInstancesResponse); + final response = + await res.upload(arg_request, arg_parent, $fields: arg_$fields); + checkGoogleCloudAiplatformV1UploadRagFileResponse( + response as api.GoogleCloudAiplatformV1UploadRagFileResponse); }); + }); - unittest.test('method--get', () async { + unittest.group('resource-ProjectsResource', () { + unittest.test('method--getCacheConfig', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations; + final res = api.AiplatformApi(mock).projects; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -37421,23 +42451,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleCloudLocationLocation()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1CacheConfig()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudLocationLocation( - response as api.GoogleCloudLocationLocation); + final response = await res.getCacheConfig(arg_name, $fields: arg_$fields); + checkGoogleCloudAiplatformV1CacheConfig( + response as api.GoogleCloudAiplatformV1CacheConfig); }); - unittest.test('method--list', () async { + unittest.test('method--updateCacheConfig', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations; + final res = api.AiplatformApi(mock).projects; + final arg_request = buildGoogleCloudAiplatformV1CacheConfig(); final arg_name = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1CacheConfig.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1CacheConfig(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -37469,18 +42502,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -37489,34 +42510,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudLocationListLocationsResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_name, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, + final response = await res.updateCacheConfig(arg_request, arg_name, $fields: arg_$fields); - checkGoogleCloudLocationListLocationsResponse( - response as api.GoogleCloudLocationListLocationsResponse); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); }); - unittest.group('resource-ProjectsLocationsBatchPredictionJobsResource', () { - unittest.test('method--cancel', () async { + unittest.group('resource-ProjectsLocationsResource', () { + unittest.test('method--augmentPrompt', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.batchPredictionJobs; - final arg_request = - buildGoogleCloudAiplatformV1CancelBatchPredictionJobRequest(); - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations; + final arg_request = buildGoogleCloudAiplatformV1AugmentPromptRequest(); + final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1CancelBatchPredictionJobRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1CancelBatchPredictionJobRequest(obj); + final obj = api.GoogleCloudAiplatformV1AugmentPromptRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1AugmentPromptRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -37557,25 +42571,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1AugmentPromptResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.cancel(arg_request, arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = await res.augmentPrompt(arg_request, arg_parent, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1AugmentPromptResponse( + response as api.GoogleCloudAiplatformV1AugmentPromptResponse); }); - unittest.test('method--create', () async { + unittest.test('method--corroborateContent', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.batchPredictionJobs; - final arg_request = buildGoogleCloudAiplatformV1BatchPredictionJob(); + final res = api.AiplatformApi(mock).projects.locations; + final arg_request = + buildGoogleCloudAiplatformV1CorroborateContentRequest(); final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1BatchPredictionJob.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1BatchPredictionJob(obj); + final obj = + api.GoogleCloudAiplatformV1CorroborateContentRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1CorroborateContentRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -37617,22 +42634,28 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = convert.json - .encode(buildGoogleCloudAiplatformV1BatchPredictionJob()); + .encode(buildGoogleCloudAiplatformV1CorroborateContentResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.create(arg_request, arg_parent, $fields: arg_$fields); - checkGoogleCloudAiplatformV1BatchPredictionJob( - response as api.GoogleCloudAiplatformV1BatchPredictionJob); + final response = await res.corroborateContent(arg_request, arg_parent, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1CorroborateContentResponse( + response as api.GoogleCloudAiplatformV1CorroborateContentResponse); }); - unittest.test('method--delete', () async { + unittest.test('method--evaluateInstances', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.batchPredictionJobs; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations; + final arg_request = + buildGoogleCloudAiplatformV1EvaluateInstancesRequest(); + final arg_location = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1EvaluateInstancesRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1EvaluateInstancesRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -37672,18 +42695,19 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1EvaluateInstancesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.evaluateInstances(arg_request, arg_location, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1EvaluateInstancesResponse( + response as api.GoogleCloudAiplatformV1EvaluateInstancesResponse); }); unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.batchPredictionJobs; + final res = api.AiplatformApi(mock).projects.locations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -37726,24 +42750,21 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1BatchPredictionJob()); + final resp = convert.json.encode(buildGoogleCloudLocationLocation()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1BatchPredictionJob( - response as api.GoogleCloudAiplatformV1BatchPredictionJob); + checkGoogleCloudLocationLocation( + response as api.GoogleCloudLocationLocation); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.batchPredictionJobs; - final arg_parent = 'foo'; + final res = api.AiplatformApi(mock).projects.locations; + final arg_name = 'foo'; final arg_filter = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; - final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -37789,10 +42810,6 @@ void main() { queryMap['pageToken']!.first, unittest.equals(arg_pageToken), ); - unittest.expect( - queryMap['readMask']!.first, - unittest.equals(arg_readMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -37801,32 +42818,94 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode( - buildGoogleCloudAiplatformV1ListBatchPredictionJobsResponse()); + final resp = convert.json + .encode(buildGoogleCloudLocationListLocationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, + final response = await res.list(arg_name, filter: arg_filter, pageSize: arg_pageSize, pageToken: arg_pageToken, - readMask: arg_readMask, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListBatchPredictionJobsResponse(response - as api.GoogleCloudAiplatformV1ListBatchPredictionJobsResponse); + checkGoogleCloudLocationListLocationsResponse( + response as api.GoogleCloudLocationListLocationsResponse); + }); + + unittest.test('method--retrieveContexts', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock).projects.locations; + final arg_request = buildGoogleCloudAiplatformV1RetrieveContextsRequest(); + final arg_parent = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1RetrieveContextsRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1RetrieveContextsRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1RetrieveContextsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.retrieveContexts(arg_request, arg_parent, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1RetrieveContextsResponse( + response as api.GoogleCloudAiplatformV1RetrieveContextsResponse); }); }); - unittest.group('resource-ProjectsLocationsCustomJobsResource', () { + unittest.group('resource-ProjectsLocationsBatchPredictionJobsResource', () { unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.customJobs; - final arg_request = buildGoogleCloudAiplatformV1CancelCustomJobRequest(); + final res = + api.AiplatformApi(mock).projects.locations.batchPredictionJobs; + final arg_request = + buildGoogleCloudAiplatformV1CancelBatchPredictionJobRequest(); final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1CancelCustomJobRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1CancelCustomJobRequest(obj); + final obj = + api.GoogleCloudAiplatformV1CancelBatchPredictionJobRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1CancelBatchPredictionJobRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -37877,14 +42956,15 @@ void main() { unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.customJobs; - final arg_request = buildGoogleCloudAiplatformV1CustomJob(); + final res = + api.AiplatformApi(mock).projects.locations.batchPredictionJobs; + final arg_request = buildGoogleCloudAiplatformV1BatchPredictionJob(); final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1CustomJob.fromJson( + final obj = api.GoogleCloudAiplatformV1BatchPredictionJob.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1CustomJob(obj); + checkGoogleCloudAiplatformV1BatchPredictionJob(obj); final path = req.url.path; var pathOffset = 0; @@ -37925,19 +43005,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1CustomJob()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1BatchPredictionJob()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.create(arg_request, arg_parent, $fields: arg_$fields); - checkGoogleCloudAiplatformV1CustomJob( - response as api.GoogleCloudAiplatformV1CustomJob); + checkGoogleCloudAiplatformV1BatchPredictionJob( + response as api.GoogleCloudAiplatformV1BatchPredictionJob); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.customJobs; + final res = + api.AiplatformApi(mock).projects.locations.batchPredictionJobs; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -37990,7 +43071,8 @@ void main() { unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.customJobs; + final res = + api.AiplatformApi(mock).projects.locations.batchPredictionJobs; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -38033,18 +43115,19 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1CustomJob()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1BatchPredictionJob()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1CustomJob( - response as api.GoogleCloudAiplatformV1CustomJob); + checkGoogleCloudAiplatformV1BatchPredictionJob( + response as api.GoogleCloudAiplatformV1BatchPredictionJob); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.customJobs; + final res = + api.AiplatformApi(mock).projects.locations.batchPredictionJobs; final arg_parent = 'foo'; final arg_filter = 'foo'; final arg_pageSize = 42; @@ -38107,8 +43190,8 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListCustomJobsResponse()); + final resp = convert.json.encode( + buildGoogleCloudAiplatformV1ListBatchPredictionJobsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.list(arg_parent, @@ -38117,19 +43200,23 @@ void main() { pageToken: arg_pageToken, readMask: arg_readMask, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListCustomJobsResponse( - response as api.GoogleCloudAiplatformV1ListCustomJobsResponse); + checkGoogleCloudAiplatformV1ListBatchPredictionJobsResponse(response + as api.GoogleCloudAiplatformV1ListBatchPredictionJobsResponse); }); }); - unittest.group('resource-ProjectsLocationsCustomJobsOperationsResource', () { - unittest.test('method--cancel', () async { + unittest.group('resource-ProjectsLocationsCachedContentsResource', () { + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.customJobs.operations; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.cachedContents; + final arg_request = buildGoogleCloudAiplatformV1CachedContent(); + final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1CachedContent.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1CachedContent(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -38169,17 +43256,19 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1CachedContent()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.cancel(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = + await res.create(arg_request, arg_parent, $fields: arg_$fields); + checkGoogleCloudAiplatformV1CachedContent( + response as api.GoogleCloudAiplatformV1CachedContent); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.customJobs.operations; + final res = api.AiplatformApi(mock).projects.locations.cachedContents; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -38231,8 +43320,7 @@ void main() { unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.customJobs.operations; + final res = api.AiplatformApi(mock).projects.locations.cachedContents; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -38275,20 +43363,19 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1CachedContent()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + checkGoogleCloudAiplatformV1CachedContent( + response as api.GoogleCloudAiplatformV1CachedContent); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.customJobs.operations; - final arg_name = 'foo'; - final arg_filter = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.cachedContents; + final arg_parent = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; final arg_$fields = 'foo'; @@ -38324,10 +43411,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); unittest.expect( core.int.parse(queryMap['pageSize']!.first), unittest.equals(arg_pageSize), @@ -38344,27 +43427,30 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListCachedContentsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_name, - filter: arg_filter, + final response = await res.list(arg_parent, pageSize: arg_pageSize, pageToken: arg_pageToken, $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + checkGoogleCloudAiplatformV1ListCachedContentsResponse( + response as api.GoogleCloudAiplatformV1ListCachedContentsResponse); }); - unittest.test('method--wait', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.customJobs.operations; + final res = api.AiplatformApi(mock).projects.locations.cachedContents; + final arg_request = buildGoogleCloudAiplatformV1CachedContent(); final arg_name = 'foo'; - final arg_timeout = 'foo'; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1CachedContent.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1CachedContent(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -38397,8 +43483,8 @@ void main() { } } unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), ); unittest.expect( queryMap['fields']!.first, @@ -38408,29 +43494,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1CachedContent()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); + checkGoogleCloudAiplatformV1CachedContent( + response as api.GoogleCloudAiplatformV1CachedContent); }); }); - unittest.group('resource-ProjectsLocationsDataLabelingJobsResource', () { + unittest.group('resource-ProjectsLocationsCustomJobsResource', () { unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.dataLabelingJobs; - final arg_request = - buildGoogleCloudAiplatformV1CancelDataLabelingJobRequest(); + final res = api.AiplatformApi(mock).projects.locations.customJobs; + final arg_request = buildGoogleCloudAiplatformV1CancelCustomJobRequest(); final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1CancelDataLabelingJobRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1CancelDataLabelingJobRequest(obj); + final obj = api.GoogleCloudAiplatformV1CancelCustomJobRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1CancelCustomJobRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -38481,14 +43566,14 @@ void main() { unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.dataLabelingJobs; - final arg_request = buildGoogleCloudAiplatformV1DataLabelingJob(); + final res = api.AiplatformApi(mock).projects.locations.customJobs; + final arg_request = buildGoogleCloudAiplatformV1CustomJob(); final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1DataLabelingJob.fromJson( + final obj = api.GoogleCloudAiplatformV1CustomJob.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1DataLabelingJob(obj); + checkGoogleCloudAiplatformV1CustomJob(obj); final path = req.url.path; var pathOffset = 0; @@ -38530,18 +43615,18 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = - convert.json.encode(buildGoogleCloudAiplatformV1DataLabelingJob()); + convert.json.encode(buildGoogleCloudAiplatformV1CustomJob()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.create(arg_request, arg_parent, $fields: arg_$fields); - checkGoogleCloudAiplatformV1DataLabelingJob( - response as api.GoogleCloudAiplatformV1DataLabelingJob); + checkGoogleCloudAiplatformV1CustomJob( + response as api.GoogleCloudAiplatformV1CustomJob); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.dataLabelingJobs; + final res = api.AiplatformApi(mock).projects.locations.customJobs; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -38594,7 +43679,7 @@ void main() { unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.dataLabelingJobs; + final res = api.AiplatformApi(mock).projects.locations.customJobs; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -38638,20 +43723,19 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = - convert.json.encode(buildGoogleCloudAiplatformV1DataLabelingJob()); + convert.json.encode(buildGoogleCloudAiplatformV1CustomJob()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1DataLabelingJob( - response as api.GoogleCloudAiplatformV1DataLabelingJob); + checkGoogleCloudAiplatformV1CustomJob( + response as api.GoogleCloudAiplatformV1CustomJob); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.dataLabelingJobs; + final res = api.AiplatformApi(mock).projects.locations.customJobs; final arg_parent = 'foo'; final arg_filter = 'foo'; - final arg_orderBy = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; final arg_readMask = 'foo'; @@ -38692,10 +43776,6 @@ void main() { queryMap['filter']!.first, unittest.equals(arg_filter), ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); unittest.expect( core.int.parse(queryMap['pageSize']!.first), unittest.equals(arg_pageSize), @@ -38717,30 +43797,25 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListDataLabelingJobsResponse()); + .encode(buildGoogleCloudAiplatformV1ListCustomJobsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.list(arg_parent, filter: arg_filter, - orderBy: arg_orderBy, pageSize: arg_pageSize, pageToken: arg_pageToken, readMask: arg_readMask, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListDataLabelingJobsResponse( - response as api.GoogleCloudAiplatformV1ListDataLabelingJobsResponse); + checkGoogleCloudAiplatformV1ListCustomJobsResponse( + response as api.GoogleCloudAiplatformV1ListCustomJobsResponse); }); }); - unittest.group('resource-ProjectsLocationsDataLabelingJobsOperationsResource', - () { + unittest.group('resource-ProjectsLocationsCustomJobsOperationsResource', () { unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .dataLabelingJobs - .operations; + final res = + api.AiplatformApi(mock).projects.locations.customJobs.operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -38792,11 +43867,8 @@ void main() { unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .dataLabelingJobs - .operations; + final res = + api.AiplatformApi(mock).projects.locations.customJobs.operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -38848,11 +43920,8 @@ void main() { unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .dataLabelingJobs - .operations; + final res = + api.AiplatformApi(mock).projects.locations.customJobs.operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -38905,11 +43974,8 @@ void main() { unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .dataLabelingJobs - .operations; + final res = + api.AiplatformApi(mock).projects.locations.customJobs.operations; final arg_name = 'foo'; final arg_filter = 'foo'; final arg_pageSize = 42; @@ -38982,11 +44048,8 @@ void main() { unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .dataLabelingJobs - .operations; + final res = + api.AiplatformApi(mock).projects.locations.customJobs.operations; final arg_name = 'foo'; final arg_timeout = 'foo'; final arg_$fields = 'foo'; @@ -39044,17 +44107,77 @@ void main() { }); }); - unittest.group('resource-ProjectsLocationsDatasetsResource', () { + unittest.group('resource-ProjectsLocationsDataLabelingJobsResource', () { + unittest.test('method--cancel', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock).projects.locations.dataLabelingJobs; + final arg_request = + buildGoogleCloudAiplatformV1CancelDataLabelingJobRequest(); + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1CancelDataLabelingJobRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1CancelDataLabelingJobRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleProtobufEmpty()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.cancel(arg_request, arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + }); + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.datasets; - final arg_request = buildGoogleCloudAiplatformV1Dataset(); + final res = api.AiplatformApi(mock).projects.locations.dataLabelingJobs; + final arg_request = buildGoogleCloudAiplatformV1DataLabelingJob(); final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1Dataset.fromJson( + final obj = api.GoogleCloudAiplatformV1DataLabelingJob.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1Dataset(obj); + checkGoogleCloudAiplatformV1DataLabelingJob(obj); final path = req.url.path; var pathOffset = 0; @@ -39095,18 +44218,19 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1DataLabelingJob()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.create(arg_request, arg_parent, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + checkGoogleCloudAiplatformV1DataLabelingJob( + response as api.GoogleCloudAiplatformV1DataLabelingJob); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.datasets; + final res = api.AiplatformApi(mock).projects.locations.dataLabelingJobs; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -39157,17 +44281,12 @@ void main() { response as api.GoogleLongrunningOperation); }); - unittest.test('method--export', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.datasets; - final arg_request = buildGoogleCloudAiplatformV1ExportDataRequest(); + final res = api.AiplatformApi(mock).projects.locations.dataLabelingJobs; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1ExportDataRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1ExportDataRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -39207,19 +44326,23 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1DataLabelingJob()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.export(arg_request, arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleCloudAiplatformV1DataLabelingJob( + response as api.GoogleCloudAiplatformV1DataLabelingJob); }); - unittest.test('method--get', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.datasets; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.dataLabelingJobs; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -39254,6 +44377,22 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); unittest.expect( queryMap['readMask']!.first, unittest.equals(arg_readMask), @@ -39266,26 +44405,34 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleCloudAiplatformV1Dataset()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListDataLabelingJobsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.get(arg_name, readMask: arg_readMask, $fields: arg_$fields); - checkGoogleCloudAiplatformV1Dataset( - response as api.GoogleCloudAiplatformV1Dataset); + final response = await res.list(arg_parent, + filter: arg_filter, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + readMask: arg_readMask, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ListDataLabelingJobsResponse( + response as api.GoogleCloudAiplatformV1ListDataLabelingJobsResponse); }); + }); - unittest.test('method--import', () async { + unittest.group('resource-ProjectsLocationsDataLabelingJobsOperationsResource', + () { + unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.datasets; - final arg_request = buildGoogleCloudAiplatformV1ImportDataRequest(); + final res = api.AiplatformApi(mock) + .projects + .locations + .dataLabelingJobs + .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1ImportDataRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1ImportDataRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -39325,24 +44472,21 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.import(arg_request, arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.cancel(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--list', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.datasets; - final arg_parent = 'foo'; - final arg_filter = 'foo'; - final arg_orderBy = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; - final arg_readMask = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .dataLabelingJobs + .operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -39376,26 +44520,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['readMask']!.first, - unittest.equals(arg_readMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -39404,33 +44528,23 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListDatasetsResponse()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - filter: arg_filter, - orderBy: arg_orderBy, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - readMask: arg_readMask, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListDatasetsResponse( - response as api.GoogleCloudAiplatformV1ListDatasetsResponse); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--patch', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.datasets; - final arg_request = buildGoogleCloudAiplatformV1Dataset(); + final res = api.AiplatformApi(mock) + .projects + .locations + .dataLabelingJobs + .operations; final arg_name = 'foo'; - final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1Dataset.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1Dataset(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -39462,10 +44576,6 @@ void main() { ); } } - unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -39474,32 +44584,25 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleCloudAiplatformV1Dataset()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch(arg_request, arg_name, - updateMask: arg_updateMask, $fields: arg_$fields); - checkGoogleCloudAiplatformV1Dataset( - response as api.GoogleCloudAiplatformV1Dataset); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--searchDataItems', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.datasets; - final arg_dataset = 'foo'; - final arg_annotationFilters = buildUnnamed387(); - final arg_annotationsFilter = 'foo'; - final arg_annotationsLimit = 42; - final arg_dataItemFilter = 'foo'; - final arg_dataLabelingJob = 'foo'; - final arg_fieldMask = 'foo'; - final arg_orderBy = 'foo'; - final arg_orderByAnnotation_orderBy = 'foo'; - final arg_orderByAnnotation_savedQuery = 'foo'; - final arg_orderByDataItem = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .dataLabelingJobs + .operations; + final arg_name = 'foo'; + final arg_filter = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; - final arg_savedQuery = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -39534,44 +44637,8 @@ void main() { } } unittest.expect( - queryMap['annotationFilters']!, - unittest.equals(arg_annotationFilters), - ); - unittest.expect( - queryMap['annotationsFilter']!.first, - unittest.equals(arg_annotationsFilter), - ); - unittest.expect( - core.int.parse(queryMap['annotationsLimit']!.first), - unittest.equals(arg_annotationsLimit), - ); - unittest.expect( - queryMap['dataItemFilter']!.first, - unittest.equals(arg_dataItemFilter), - ); - unittest.expect( - queryMap['dataLabelingJob']!.first, - unittest.equals(arg_dataLabelingJob), - ); - unittest.expect( - queryMap['fieldMask']!.first, - unittest.equals(arg_fieldMask), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - queryMap['orderByAnnotation.orderBy']!.first, - unittest.equals(arg_orderByAnnotation_orderBy), - ); - unittest.expect( - queryMap['orderByAnnotation.savedQuery']!.first, - unittest.equals(arg_orderByAnnotation_savedQuery), - ); - unittest.expect( - queryMap['orderByDataItem']!.first, - unittest.equals(arg_orderByDataItem), + queryMap['filter']!.first, + unittest.equals(arg_filter), ); unittest.expect( core.int.parse(queryMap['pageSize']!.first), @@ -39581,10 +44648,6 @@ void main() { queryMap['pageToken']!.first, unittest.equals(arg_pageToken), ); - unittest.expect( - queryMap['savedQuery']!.first, - unittest.equals(arg_savedQuery), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -39593,38 +44656,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1SearchDataItemsResponse()); + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.searchDataItems(arg_dataset, - annotationFilters: arg_annotationFilters, - annotationsFilter: arg_annotationsFilter, - annotationsLimit: arg_annotationsLimit, - dataItemFilter: arg_dataItemFilter, - dataLabelingJob: arg_dataLabelingJob, - fieldMask: arg_fieldMask, - orderBy: arg_orderBy, - orderByAnnotation_orderBy: arg_orderByAnnotation_orderBy, - orderByAnnotation_savedQuery: arg_orderByAnnotation_savedQuery, - orderByDataItem: arg_orderByDataItem, + final response = await res.list(arg_name, + filter: arg_filter, pageSize: arg_pageSize, pageToken: arg_pageToken, - savedQuery: arg_savedQuery, $fields: arg_$fields); - checkGoogleCloudAiplatformV1SearchDataItemsResponse( - response as api.GoogleCloudAiplatformV1SearchDataItemsResponse); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); - }); - unittest.group('resource-ProjectsLocationsDatasetsAnnotationSpecsResource', - () { - unittest.test('method--get', () async { + unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.datasets.annotationSpecs; + final res = api.AiplatformApi(mock) + .projects + .locations + .dataLabelingJobs + .operations; final arg_name = 'foo'; - final arg_readMask = 'foo'; + final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -39659,8 +44712,8 @@ void main() { } } unittest.expect( - queryMap['readMask']!.first, - unittest.equals(arg_readMask), + queryMap['timeout']!.first, + unittest.equals(arg_timeout), ); unittest.expect( queryMap['fields']!.first, @@ -39670,31 +44723,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1AnnotationSpec()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = - await res.get(arg_name, readMask: arg_readMask, $fields: arg_$fields); - checkGoogleCloudAiplatformV1AnnotationSpec( - response as api.GoogleCloudAiplatformV1AnnotationSpec); + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); }); - unittest.group( - 'resource-ProjectsLocationsDatasetsAnnotationSpecsOperationsResource', - () { - unittest.test('method--cancel', () async { + unittest.group('resource-ProjectsLocationsDatasetsResource', () { + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .datasets - .annotationSpecs - .operations; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.datasets; + final arg_request = buildGoogleCloudAiplatformV1Dataset(); + final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1Dataset.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1Dataset(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -39734,21 +44784,18 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.cancel(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = + await res.create(arg_request, arg_parent, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .datasets - .annotationSpecs - .operations; + final res = api.AiplatformApi(mock).projects.locations.datasets; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -39791,24 +44838,25 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--get', () async { + unittest.test('method--export', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .datasets - .annotationSpecs - .operations; + final res = api.AiplatformApi(mock).projects.locations.datasets; + final arg_request = buildGoogleCloudAiplatformV1ExportDataRequest(); final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1ExportDataRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1ExportDataRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -39851,23 +44899,17 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); + final response = + await res.export(arg_request, arg_name, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--list', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .datasets - .annotationSpecs - .operations; + final res = api.AiplatformApi(mock).projects.locations.datasets; final arg_name = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -39902,16 +44944,8 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), + queryMap['readMask']!.first, + unittest.equals(arg_readMask), ); unittest.expect( queryMap['fields']!.first, @@ -39921,31 +44955,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + final resp = convert.json.encode(buildGoogleCloudAiplatformV1Dataset()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_name, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + final response = + await res.get(arg_name, readMask: arg_readMask, $fields: arg_$fields); + checkGoogleCloudAiplatformV1Dataset( + response as api.GoogleCloudAiplatformV1Dataset); }); - unittest.test('method--wait', () async { + unittest.test('method--import', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .datasets - .annotationSpecs - .operations; + final res = api.AiplatformApi(mock).projects.locations.datasets; + final arg_request = buildGoogleCloudAiplatformV1ImportDataRequest(); final arg_name = 'foo'; - final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1ImportDataRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1ImportDataRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -39977,10 +45006,6 @@ void main() { ); } } - unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -39993,16 +45018,14 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + await res.import(arg_request, arg_name, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - }); - unittest.group('resource-ProjectsLocationsDatasetsDataItemsResource', () { unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.datasets.dataItems; + final res = api.AiplatformApi(mock).projects.locations.datasets; final arg_parent = 'foo'; final arg_filter = 'foo'; final arg_orderBy = 'foo'; @@ -40071,7 +45094,7 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListDataItemsResponse()); + .encode(buildGoogleCloudAiplatformV1ListDatasetsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.list(arg_parent, @@ -40081,27 +45104,91 @@ void main() { pageToken: arg_pageToken, readMask: arg_readMask, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListDataItemsResponse( - response as api.GoogleCloudAiplatformV1ListDataItemsResponse); + checkGoogleCloudAiplatformV1ListDatasetsResponse( + response as api.GoogleCloudAiplatformV1ListDatasetsResponse); }); - }); - unittest.group( - 'resource-ProjectsLocationsDatasetsDataItemsAnnotationsResource', () { - unittest.test('method--list', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .datasets - .dataItems - .annotations; - final arg_parent = 'foo'; - final arg_filter = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.datasets; + final arg_request = buildGoogleCloudAiplatformV1Dataset(); + final arg_name = 'foo'; + final arg_updateMask = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1Dataset.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1Dataset(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleCloudAiplatformV1Dataset()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); + checkGoogleCloudAiplatformV1Dataset( + response as api.GoogleCloudAiplatformV1Dataset); + }); + + unittest.test('method--searchDataItems', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock).projects.locations.datasets; + final arg_dataset = 'foo'; + final arg_annotationFilters = buildUnnamed427(); + final arg_annotationsFilter = 'foo'; + final arg_annotationsLimit = 42; + final arg_dataItemFilter = 'foo'; + final arg_dataLabelingJob = 'foo'; + final arg_fieldMask = 'foo'; final arg_orderBy = 'foo'; + final arg_orderByAnnotation_orderBy = 'foo'; + final arg_orderByAnnotation_savedQuery = 'foo'; + final arg_orderByDataItem = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; - final arg_readMask = 'foo'; + final arg_savedQuery = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -40136,13 +45223,45 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), + queryMap['annotationFilters']!, + unittest.equals(arg_annotationFilters), + ); + unittest.expect( + queryMap['annotationsFilter']!.first, + unittest.equals(arg_annotationsFilter), + ); + unittest.expect( + core.int.parse(queryMap['annotationsLimit']!.first), + unittest.equals(arg_annotationsLimit), + ); + unittest.expect( + queryMap['dataItemFilter']!.first, + unittest.equals(arg_dataItemFilter), + ); + unittest.expect( + queryMap['dataLabelingJob']!.first, + unittest.equals(arg_dataLabelingJob), + ); + unittest.expect( + queryMap['fieldMask']!.first, + unittest.equals(arg_fieldMask), ); unittest.expect( queryMap['orderBy']!.first, unittest.equals(arg_orderBy), ); + unittest.expect( + queryMap['orderByAnnotation.orderBy']!.first, + unittest.equals(arg_orderByAnnotation_orderBy), + ); + unittest.expect( + queryMap['orderByAnnotation.savedQuery']!.first, + unittest.equals(arg_orderByAnnotation_savedQuery), + ); + unittest.expect( + queryMap['orderByDataItem']!.first, + unittest.equals(arg_orderByDataItem), + ); unittest.expect( core.int.parse(queryMap['pageSize']!.first), unittest.equals(arg_pageSize), @@ -40152,8 +45271,8 @@ void main() { unittest.equals(arg_pageToken), ); unittest.expect( - queryMap['readMask']!.first, - unittest.equals(arg_readMask), + queryMap['savedQuery']!.first, + unittest.equals(arg_savedQuery), ); unittest.expect( queryMap['fields']!.first, @@ -40164,23 +45283,95 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListAnnotationsResponse()); + .encode(buildGoogleCloudAiplatformV1SearchDataItemsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - filter: arg_filter, + final response = await res.searchDataItems(arg_dataset, + annotationFilters: arg_annotationFilters, + annotationsFilter: arg_annotationsFilter, + annotationsLimit: arg_annotationsLimit, + dataItemFilter: arg_dataItemFilter, + dataLabelingJob: arg_dataLabelingJob, + fieldMask: arg_fieldMask, orderBy: arg_orderBy, + orderByAnnotation_orderBy: arg_orderByAnnotation_orderBy, + orderByAnnotation_savedQuery: arg_orderByAnnotation_savedQuery, + orderByDataItem: arg_orderByDataItem, pageSize: arg_pageSize, pageToken: arg_pageToken, - readMask: arg_readMask, + savedQuery: arg_savedQuery, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListAnnotationsResponse( - response as api.GoogleCloudAiplatformV1ListAnnotationsResponse); + checkGoogleCloudAiplatformV1SearchDataItemsResponse( + response as api.GoogleCloudAiplatformV1SearchDataItemsResponse); + }); + }); + + unittest.group('resource-ProjectsLocationsDatasetsAnnotationSpecsResource', + () { + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = + api.AiplatformApi(mock).projects.locations.datasets.annotationSpecs; + final arg_name = 'foo'; + final arg_readMask = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['readMask']!.first, + unittest.equals(arg_readMask), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1AnnotationSpec()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.get(arg_name, readMask: arg_readMask, $fields: arg_$fields); + checkGoogleCloudAiplatformV1AnnotationSpec( + response as api.GoogleCloudAiplatformV1AnnotationSpec); }); }); unittest.group( - 'resource-ProjectsLocationsDatasetsDataItemsAnnotationsOperationsResource', + 'resource-ProjectsLocationsDatasetsAnnotationSpecsOperationsResource', () { unittest.test('method--cancel', () async { final mock = HttpServerMock(); @@ -40188,8 +45379,7 @@ void main() { .projects .locations .datasets - .dataItems - .annotations + .annotationSpecs .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; @@ -40246,8 +45436,7 @@ void main() { .projects .locations .datasets - .dataItems - .annotations + .annotationSpecs .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; @@ -40304,8 +45493,7 @@ void main() { .projects .locations .datasets - .dataItems - .annotations + .annotationSpecs .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; @@ -40363,8 +45551,7 @@ void main() { .projects .locations .datasets - .dataItems - .annotations + .annotationSpecs .operations; final arg_name = 'foo'; final arg_filter = 'foo'; @@ -40442,8 +45629,7 @@ void main() { .projects .locations .datasets - .dataItems - .annotations + .annotationSpecs .operations; final arg_name = 'foo'; final arg_timeout = 'foo'; @@ -40502,8 +45688,189 @@ void main() { }); }); + unittest.group('resource-ProjectsLocationsDatasetsDataItemsResource', () { + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock).projects.locations.datasets.dataItems; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_readMask = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['readMask']!.first, + unittest.equals(arg_readMask), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListDataItemsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_parent, + filter: arg_filter, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + readMask: arg_readMask, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ListDataItemsResponse( + response as api.GoogleCloudAiplatformV1ListDataItemsResponse); + }); + }); + unittest.group( - 'resource-ProjectsLocationsDatasetsDataItemsOperationsResource', () { + 'resource-ProjectsLocationsDatasetsDataItemsAnnotationsResource', () { + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock) + .projects + .locations + .datasets + .dataItems + .annotations; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_readMask = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['readMask']!.first, + unittest.equals(arg_readMask), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListAnnotationsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_parent, + filter: arg_filter, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + readMask: arg_readMask, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ListAnnotationsResponse( + response as api.GoogleCloudAiplatformV1ListAnnotationsResponse); + }); + }); + + unittest.group( + 'resource-ProjectsLocationsDatasetsDataItemsAnnotationsOperationsResource', + () { unittest.test('method--cancel', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock) @@ -40511,6 +45878,7 @@ void main() { .locations .datasets .dataItems + .annotations .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; @@ -40568,6 +45936,7 @@ void main() { .locations .datasets .dataItems + .annotations .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; @@ -40625,6 +45994,7 @@ void main() { .locations .datasets .dataItems + .annotations .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; @@ -40683,6 +46053,7 @@ void main() { .locations .datasets .dataItems + .annotations .operations; final arg_name = 'foo'; final arg_filter = 'foo'; @@ -40761,6 +46132,7 @@ void main() { .locations .datasets .dataItems + .annotations .operations; final arg_name = 'foo'; final arg_timeout = 'foo'; @@ -40819,20 +46191,19 @@ void main() { }); }); - unittest.group('resource-ProjectsLocationsDatasetsDatasetVersionsResource', - () { - unittest.test('method--create', () async { + unittest.group( + 'resource-ProjectsLocationsDatasetsDataItemsOperationsResource', () { + unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.datasets.datasetVersions; - final arg_request = buildGoogleCloudAiplatformV1DatasetVersion(); - final arg_parent = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .datasets + .dataItems + .operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1DatasetVersion.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1DatasetVersion(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -40872,19 +46243,21 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.create(arg_request, arg_parent, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.cancel(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.datasets.datasetVersions; + final res = api.AiplatformApi(mock) + .projects + .locations + .datasets + .dataItems + .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -40927,20 +46300,22 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.datasets.datasetVersions; + final res = api.AiplatformApi(mock) + .projects + .locations + .datasets + .dataItems + .operations; final arg_name = 'foo'; - final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -40974,10 +46349,6 @@ void main() { ); } } - unittest.expect( - queryMap['readMask']!.first, - unittest.equals(arg_readMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -40986,26 +46357,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1DatasetVersion()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.get(arg_name, readMask: arg_readMask, $fields: arg_$fields); - checkGoogleCloudAiplatformV1DatasetVersion( - response as api.GoogleCloudAiplatformV1DatasetVersion); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.datasets.datasetVersions; - final arg_parent = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .datasets + .dataItems + .operations; + final arg_name = 'foo'; final arg_filter = 'foo'; - final arg_orderBy = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; - final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -41043,10 +46414,6 @@ void main() { queryMap['filter']!.first, unittest.equals(arg_filter), ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); unittest.expect( core.int.parse(queryMap['pageSize']!.first), unittest.equals(arg_pageSize), @@ -41055,10 +46422,6 @@ void main() { queryMap['pageToken']!.first, unittest.equals(arg_pageToken), ); - unittest.expect( - queryMap['readMask']!.first, - unittest.equals(arg_readMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -41067,34 +46430,31 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListDatasetVersionsResponse()); - return async.Future.value(stringResponse(200, h, resp)); + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, + final response = await res.list(arg_name, filter: arg_filter, - orderBy: arg_orderBy, pageSize: arg_pageSize, pageToken: arg_pageToken, - readMask: arg_readMask, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListDatasetVersionsResponse( - response as api.GoogleCloudAiplatformV1ListDatasetVersionsResponse); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); - unittest.test('method--patch', () async { + unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.datasets.datasetVersions; - final arg_request = buildGoogleCloudAiplatformV1DatasetVersion(); + final res = api.AiplatformApi(mock) + .projects + .locations + .datasets + .dataItems + .operations; final arg_name = 'foo'; - final arg_updateMask = 'foo'; + final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1DatasetVersion.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1DatasetVersion(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -41127,65 +46487,9 @@ void main() { } } unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), - ); - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1DatasetVersion()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.patch(arg_request, arg_name, - updateMask: arg_updateMask, $fields: arg_$fields); - checkGoogleCloudAiplatformV1DatasetVersion( - response as api.GoogleCloudAiplatformV1DatasetVersion); - }); - - unittest.test('method--restore', () async { - final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.datasets.datasetVersions; - final arg_name = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), + queryMap['timeout']!.first, + unittest.equals(arg_timeout), ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -41197,20 +46501,27 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.restore(arg_name, $fields: arg_$fields); + final response = + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); }); - unittest.group('resource-ProjectsLocationsDatasetsOperationsResource', () { - unittest.test('method--cancel', () async { + unittest.group('resource-ProjectsLocationsDatasetsDatasetVersionsResource', + () { + unittest.test('method--create', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.datasets.operations; - final arg_name = 'foo'; + api.AiplatformApi(mock).projects.locations.datasets.datasetVersions; + final arg_request = buildGoogleCloudAiplatformV1DatasetVersion(); + final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1DatasetVersion.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1DatasetVersion(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -41250,17 +46561,19 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.cancel(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = + await res.create(arg_request, arg_parent, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.datasets.operations; + api.AiplatformApi(mock).projects.locations.datasets.datasetVersions; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -41303,18 +46616,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); unittest.test('method--get', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.datasets.operations; + api.AiplatformApi(mock).projects.locations.datasets.datasetVersions; final arg_name = 'foo'; + final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -41348,6 +46663,10 @@ void main() { ); } } + unittest.expect( + queryMap['readMask']!.first, + unittest.equals(arg_readMask), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -41356,22 +46675,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1DatasetVersion()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = + await res.get(arg_name, readMask: arg_readMask, $fields: arg_$fields); + checkGoogleCloudAiplatformV1DatasetVersion( + response as api.GoogleCloudAiplatformV1DatasetVersion); }); unittest.test('method--list', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.datasets.operations; - final arg_name = 'foo'; + api.AiplatformApi(mock).projects.locations.datasets.datasetVersions; + final arg_parent = 'foo'; final arg_filter = 'foo'; + final arg_orderBy = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; + final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -41409,6 +46732,10 @@ void main() { queryMap['filter']!.first, unittest.equals(arg_filter), ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); unittest.expect( core.int.parse(queryMap['pageSize']!.first), unittest.equals(arg_pageSize), @@ -41417,6 +46744,10 @@ void main() { queryMap['pageToken']!.first, unittest.equals(arg_pageToken), ); + unittest.expect( + queryMap['readMask']!.first, + unittest.equals(arg_readMask), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -41425,27 +46756,34 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListDatasetVersionsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_name, + final response = await res.list(arg_parent, filter: arg_filter, + orderBy: arg_orderBy, pageSize: arg_pageSize, pageToken: arg_pageToken, + readMask: arg_readMask, $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + checkGoogleCloudAiplatformV1ListDatasetVersionsResponse( + response as api.GoogleCloudAiplatformV1ListDatasetVersionsResponse); }); - unittest.test('method--wait', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.datasets.operations; + api.AiplatformApi(mock).projects.locations.datasets.datasetVersions; + final arg_request = buildGoogleCloudAiplatformV1DatasetVersion(); final arg_name = 'foo'; - final arg_timeout = 'foo'; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1DatasetVersion.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1DatasetVersion(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -41478,8 +46816,8 @@ void main() { } } unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), ); unittest.expect( queryMap['fields']!.first, @@ -41489,21 +46827,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1DatasetVersion()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); + checkGoogleCloudAiplatformV1DatasetVersion( + response as api.GoogleCloudAiplatformV1DatasetVersion); }); - }); - unittest.group('resource-ProjectsLocationsDatasetsSavedQueriesResource', () { - unittest.test('method--delete', () async { + unittest.test('method--restore', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.datasets.savedQueries; + api.AiplatformApi(mock).projects.locations.datasets.datasetVersions; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -41549,21 +46886,18 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); + final response = await res.restore(arg_name, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); + }); - unittest.test('method--list', () async { + unittest.group('resource-ProjectsLocationsDatasetsOperationsResource', () { + unittest.test('method--cancel', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.datasets.savedQueries; - final arg_parent = 'foo'; - final arg_filter = 'foo'; - final arg_orderBy = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; - final arg_readMask = 'foo'; + api.AiplatformApi(mock).projects.locations.datasets.operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -41597,26 +46931,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['readMask']!.first, - unittest.equals(arg_readMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -41625,89 +46939,17 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListSavedQueriesResponse()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - filter: arg_filter, - orderBy: arg_orderBy, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - readMask: arg_readMask, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListSavedQueriesResponse( - response as api.GoogleCloudAiplatformV1ListSavedQueriesResponse); + final response = await res.cancel(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - }); - unittest.group( - 'resource-ProjectsLocationsDatasetsSavedQueriesOperationsResource', () { - unittest.test('method--cancel', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .datasets - .savedQueries - .operations; - final arg_name = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.cancel(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); - }); - - unittest.test('method--delete', () async { - final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .datasets - .savedQueries - .operations; + final res = + api.AiplatformApi(mock).projects.locations.datasets.operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -41759,12 +47001,8 @@ void main() { unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .datasets - .savedQueries - .operations; + final res = + api.AiplatformApi(mock).projects.locations.datasets.operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -41817,12 +47055,8 @@ void main() { unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .datasets - .savedQueries - .operations; + final res = + api.AiplatformApi(mock).projects.locations.datasets.operations; final arg_name = 'foo'; final arg_filter = 'foo'; final arg_pageSize = 42; @@ -41895,12 +47129,8 @@ void main() { unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .datasets - .savedQueries - .operations; + final res = + api.AiplatformApi(mock).projects.locations.datasets.operations; final arg_name = 'foo'; final arg_timeout = 'foo'; final arg_$fields = 'foo'; @@ -41958,22 +47188,14 @@ void main() { }); }); - unittest.group('resource-ProjectsLocationsDeploymentResourcePoolsResource', - () { - unittest.test('method--create', () async { + unittest.group('resource-ProjectsLocationsDatasetsSavedQueriesResource', () { + unittest.test('method--delete', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.deploymentResourcePools; - final arg_request = - buildGoogleCloudAiplatformV1CreateDeploymentResourcePoolRequest(); - final arg_parent = 'foo'; + api.AiplatformApi(mock).projects.locations.datasets.savedQueries; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1CreateDeploymentResourcePoolRequest - .fromJson(json as core.Map); - checkGoogleCloudAiplatformV1CreateDeploymentResourcePoolRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -42016,17 +47238,21 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.create(arg_request, arg_parent, $fields: arg_$fields); + final response = await res.delete(arg_name, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--delete', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.deploymentResourcePools; - final arg_name = 'foo'; + api.AiplatformApi(mock).projects.locations.datasets.savedQueries; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -42060,6 +47286,26 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['readMask']!.first, + unittest.equals(arg_readMask), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -42068,18 +47314,32 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListSavedQueriesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.list(arg_parent, + filter: arg_filter, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + readMask: arg_readMask, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ListSavedQueriesResponse( + response as api.GoogleCloudAiplatformV1ListSavedQueriesResponse); }); + }); - unittest.test('method--get', () async { + unittest.group( + 'resource-ProjectsLocationsDatasetsSavedQueriesOperationsResource', () { + unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.deploymentResourcePools; + final res = api.AiplatformApi(mock) + .projects + .locations + .datasets + .savedQueries + .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -42122,22 +47382,22 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1DeploymentResourcePool()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1DeploymentResourcePool( - response as api.GoogleCloudAiplatformV1DeploymentResourcePool); + final response = await res.cancel(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--list', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.deploymentResourcePools; - final arg_parent = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .datasets + .savedQueries + .operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -42171,14 +47431,6 @@ void main() { ); } } - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -42187,31 +47439,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode( - buildGoogleCloudAiplatformV1ListDeploymentResourcePoolsResponse()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListDeploymentResourcePoolsResponse(response - as api.GoogleCloudAiplatformV1ListDeploymentResourcePoolsResponse); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--patch', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.deploymentResourcePools; - final arg_request = buildGoogleCloudAiplatformV1DeploymentResourcePool(); + final res = api.AiplatformApi(mock) + .projects + .locations + .datasets + .savedQueries + .operations; final arg_name = 'foo'; - final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1DeploymentResourcePool.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1DeploymentResourcePool(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -42243,10 +47488,6 @@ void main() { ); } } - unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -42258,17 +47499,21 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch(arg_request, arg_name, - updateMask: arg_updateMask, $fields: arg_$fields); + final response = await res.get(arg_name, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--queryDeployedModels', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.deploymentResourcePools; - final arg_deploymentResourcePool = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .datasets + .savedQueries + .operations; + final arg_name = 'foo'; + final arg_filter = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; final arg_$fields = 'foo'; @@ -42304,6 +47549,10 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); unittest.expect( core.int.parse(queryMap['pageSize']!.first), unittest.equals(arg_pageSize), @@ -42320,30 +47569,29 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1QueryDeployedModelsResponse()); + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.queryDeployedModels(arg_deploymentResourcePool, + final response = await res.list(arg_name, + filter: arg_filter, pageSize: arg_pageSize, pageToken: arg_pageToken, $fields: arg_$fields); - checkGoogleCloudAiplatformV1QueryDeployedModelsResponse( - response as api.GoogleCloudAiplatformV1QueryDeployedModelsResponse); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); - }); - unittest.group( - 'resource-ProjectsLocationsDeploymentResourcePoolsOperationsResource', - () { - unittest.test('method--cancel', () async { + unittest.test('method--wait', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock) .projects .locations - .deploymentResourcePools + .datasets + .savedQueries .operations; final arg_name = 'foo'; + final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -42377,6 +47625,10 @@ void main() { ); } } + unittest.expect( + queryMap['timeout']!.first, + unittest.equals(arg_timeout), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -42385,23 +47637,32 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.cancel(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); + }); - unittest.test('method--delete', () async { + unittest.group('resource-ProjectsLocationsDeploymentResourcePoolsResource', + () { + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .deploymentResourcePools - .operations; - final arg_name = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.deploymentResourcePools; + final arg_request = + buildGoogleCloudAiplatformV1CreateDeploymentResourcePoolRequest(); + final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1CreateDeploymentResourcePoolRequest + .fromJson(json as core.Map); + checkGoogleCloudAiplatformV1CreateDeploymentResourcePoolRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -42441,20 +47702,19 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = + await res.create(arg_request, arg_parent, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--get', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .deploymentResourcePools - .operations; + final res = + api.AiplatformApi(mock).projects.locations.deploymentResourcePools; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -42500,22 +47760,16 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); + final response = await res.delete(arg_name, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--list', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .deploymentResourcePools - .operations; + final res = + api.AiplatformApi(mock).projects.locations.deploymentResourcePools; final arg_name = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -42549,18 +47803,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -42569,28 +47811,22 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1DeploymentResourcePool()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_name, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleCloudAiplatformV1DeploymentResourcePool( + response as api.GoogleCloudAiplatformV1DeploymentResourcePool); }); - unittest.test('method--wait', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .deploymentResourcePools - .operations; - final arg_name = 'foo'; - final arg_timeout = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.deploymentResourcePools; + final arg_parent = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -42625,8 +47861,12 @@ void main() { } } unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), ); unittest.expect( queryMap['fields']!.first, @@ -42636,27 +47876,30 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode( + buildGoogleCloudAiplatformV1ListDeploymentResourcePoolsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.list(arg_parent, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ListDeploymentResourcePoolsResponse(response + as api.GoogleCloudAiplatformV1ListDeploymentResourcePoolsResponse); }); - }); - unittest.group('resource-ProjectsLocationsEndpointsResource', () { - unittest.test('method--computeTokens', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.endpoints; - final arg_request = buildGoogleCloudAiplatformV1ComputeTokensRequest(); - final arg_endpoint = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.deploymentResourcePools; + final arg_request = buildGoogleCloudAiplatformV1DeploymentResourcePool(); + final arg_name = 'foo'; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1ComputeTokensRequest.fromJson( + final obj = api.GoogleCloudAiplatformV1DeploymentResourcePool.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1ComputeTokensRequest(obj); + checkGoogleCloudAiplatformV1DeploymentResourcePool(obj); final path = req.url.path; var pathOffset = 0; @@ -42689,6 +47932,10 @@ void main() { ); } } + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -42697,27 +47944,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ComputeTokensResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.computeTokens(arg_request, arg_endpoint, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1ComputeTokensResponse( - response as api.GoogleCloudAiplatformV1ComputeTokensResponse); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--countTokens', () async { + unittest.test('method--queryDeployedModels', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.endpoints; - final arg_request = buildGoogleCloudAiplatformV1CountTokensRequest(); - final arg_endpoint = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.deploymentResourcePools; + final arg_deploymentResourcePool = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1CountTokensRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1CountTokensRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -42749,6 +47993,14 @@ void main() { ); } } + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -42758,27 +48010,31 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = convert.json - .encode(buildGoogleCloudAiplatformV1CountTokensResponse()); + .encode(buildGoogleCloudAiplatformV1QueryDeployedModelsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.countTokens(arg_request, arg_endpoint, + final response = await res.queryDeployedModels(arg_deploymentResourcePool, + pageSize: arg_pageSize, + pageToken: arg_pageToken, $fields: arg_$fields); - checkGoogleCloudAiplatformV1CountTokensResponse( - response as api.GoogleCloudAiplatformV1CountTokensResponse); + checkGoogleCloudAiplatformV1QueryDeployedModelsResponse( + response as api.GoogleCloudAiplatformV1QueryDeployedModelsResponse); }); + }); - unittest.test('method--create', () async { + unittest.group( + 'resource-ProjectsLocationsDeploymentResourcePoolsOperationsResource', + () { + unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.endpoints; - final arg_request = buildGoogleCloudAiplatformV1Endpoint(); - final arg_parent = 'foo'; - final arg_endpointId = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .deploymentResourcePools + .operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1Endpoint.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1Endpoint(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -42810,10 +48066,6 @@ void main() { ); } } - unittest.expect( - queryMap['endpointId']!.first, - unittest.equals(arg_endpointId), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -42822,18 +48074,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.create(arg_request, arg_parent, - endpointId: arg_endpointId, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.cancel(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.endpoints; + final res = api.AiplatformApi(mock) + .projects + .locations + .deploymentResourcePools + .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -42876,25 +48130,23 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--deployModel', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.endpoints; - final arg_request = buildGoogleCloudAiplatformV1DeployModelRequest(); - final arg_endpoint = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .deploymentResourcePools + .operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1DeployModelRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1DeployModelRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -42937,23 +48189,24 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.deployModel(arg_request, arg_endpoint, - $fields: arg_$fields); + final response = await res.get(arg_name, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--directPredict', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.endpoints; - final arg_request = buildGoogleCloudAiplatformV1DirectPredictRequest(); - final arg_endpoint = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .deploymentResourcePools + .operations; + final arg_name = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1DirectPredictRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1DirectPredictRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -42985,6 +48238,18 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -42993,27 +48258,30 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1DirectPredictResponse()); + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.directPredict(arg_request, arg_endpoint, + final response = await res.list(arg_name, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, $fields: arg_$fields); - checkGoogleCloudAiplatformV1DirectPredictResponse( - response as api.GoogleCloudAiplatformV1DirectPredictResponse); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); - unittest.test('method--directRawPredict', () async { + unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.endpoints; - final arg_request = buildGoogleCloudAiplatformV1DirectRawPredictRequest(); - final arg_endpoint = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .deploymentResourcePools + .operations; + final arg_name = 'foo'; + final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1DirectRawPredictRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1DirectRawPredictRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -43045,6 +48313,10 @@ void main() { ); } } + unittest.expect( + queryMap['timeout']!.first, + unittest.equals(arg_timeout), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -43053,26 +48325,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1DirectRawPredictResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.directRawPredict(arg_request, arg_endpoint, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1DirectRawPredictResponse( - response as api.GoogleCloudAiplatformV1DirectRawPredictResponse); + final response = + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); + }); - unittest.test('method--explain', () async { + unittest.group('resource-ProjectsLocationsEndpointsResource', () { + unittest.test('method--computeTokens', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock).projects.locations.endpoints; - final arg_request = buildGoogleCloudAiplatformV1ExplainRequest(); + final arg_request = buildGoogleCloudAiplatformV1ComputeTokensRequest(); final arg_endpoint = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1ExplainRequest.fromJson( + final obj = api.GoogleCloudAiplatformV1ComputeTokensRequest.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1ExplainRequest(obj); + checkGoogleCloudAiplatformV1ComputeTokensRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -43113,26 +48386,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1ExplainResponse()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ComputeTokensResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.explain(arg_request, arg_endpoint, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ExplainResponse( - response as api.GoogleCloudAiplatformV1ExplainResponse); + final response = await res.computeTokens(arg_request, arg_endpoint, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ComputeTokensResponse( + response as api.GoogleCloudAiplatformV1ComputeTokensResponse); }); - unittest.test('method--generateContent', () async { + unittest.test('method--countTokens', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock).projects.locations.endpoints; - final arg_request = buildGoogleCloudAiplatformV1GenerateContentRequest(); - final arg_model = 'foo'; + final arg_request = buildGoogleCloudAiplatformV1CountTokensRequest(); + final arg_endpoint = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1GenerateContentRequest.fromJson( + final obj = api.GoogleCloudAiplatformV1CountTokensRequest.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1GenerateContentRequest(obj); + checkGoogleCloudAiplatformV1CountTokensRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -43174,21 +48447,27 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = convert.json - .encode(buildGoogleCloudAiplatformV1GenerateContentResponse()); + .encode(buildGoogleCloudAiplatformV1CountTokensResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.generateContent(arg_request, arg_model, + final response = await res.countTokens(arg_request, arg_endpoint, $fields: arg_$fields); - checkGoogleCloudAiplatformV1GenerateContentResponse( - response as api.GoogleCloudAiplatformV1GenerateContentResponse); + checkGoogleCloudAiplatformV1CountTokensResponse( + response as api.GoogleCloudAiplatformV1CountTokensResponse); }); - unittest.test('method--get', () async { + unittest.test('method--create', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock).projects.locations.endpoints; - final arg_name = 'foo'; + final arg_request = buildGoogleCloudAiplatformV1Endpoint(); + final arg_parent = 'foo'; + final arg_endpointId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1Endpoint.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1Endpoint(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -43220,6 +48499,10 @@ void main() { ); } } + unittest.expect( + queryMap['endpointId']!.first, + unittest.equals(arg_endpointId), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -43228,24 +48511,19 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1Endpoint()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1Endpoint( - response as api.GoogleCloudAiplatformV1Endpoint); + final response = await res.create(arg_request, arg_parent, + endpointId: arg_endpointId, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--list', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock).projects.locations.endpoints; - final arg_parent = 'foo'; - final arg_filter = 'foo'; - final arg_orderBy = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; - final arg_readMask = 'foo'; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -43279,26 +48557,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['readMask']!.first, - unittest.equals(arg_readMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -43307,33 +48565,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListEndpointsResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - filter: arg_filter, - orderBy: arg_orderBy, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - readMask: arg_readMask, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListEndpointsResponse( - response as api.GoogleCloudAiplatformV1ListEndpointsResponse); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--mutateDeployedModel', () async { + unittest.test('method--deployModel', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock).projects.locations.endpoints; - final arg_request = - buildGoogleCloudAiplatformV1MutateDeployedModelRequest(); + final arg_request = buildGoogleCloudAiplatformV1DeployModelRequest(); final arg_endpoint = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1MutateDeployedModelRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1MutateDeployedModelRequest(obj); + final obj = api.GoogleCloudAiplatformV1DeployModelRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1DeployModelRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -43377,23 +48626,22 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.mutateDeployedModel(arg_request, arg_endpoint, + final response = await res.deployModel(arg_request, arg_endpoint, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--patch', () async { + unittest.test('method--directPredict', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock).projects.locations.endpoints; - final arg_request = buildGoogleCloudAiplatformV1Endpoint(); - final arg_name = 'foo'; - final arg_updateMask = 'foo'; + final arg_request = buildGoogleCloudAiplatformV1DirectPredictRequest(); + final arg_endpoint = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1Endpoint.fromJson( + final obj = api.GoogleCloudAiplatformV1DirectPredictRequest.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1Endpoint(obj); + checkGoogleCloudAiplatformV1DirectPredictRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -43426,10 +48674,6 @@ void main() { ); } } - unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -43438,26 +48682,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1Endpoint()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1DirectPredictResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch(arg_request, arg_name, - updateMask: arg_updateMask, $fields: arg_$fields); - checkGoogleCloudAiplatformV1Endpoint( - response as api.GoogleCloudAiplatformV1Endpoint); + final response = await res.directPredict(arg_request, arg_endpoint, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1DirectPredictResponse( + response as api.GoogleCloudAiplatformV1DirectPredictResponse); }); - unittest.test('method--predict', () async { + unittest.test('method--directRawPredict', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock).projects.locations.endpoints; - final arg_request = buildGoogleCloudAiplatformV1PredictRequest(); + final arg_request = buildGoogleCloudAiplatformV1DirectRawPredictRequest(); final arg_endpoint = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1PredictRequest.fromJson( + final obj = api.GoogleCloudAiplatformV1DirectRawPredictRequest.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1PredictRequest(obj); + checkGoogleCloudAiplatformV1DirectRawPredictRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -43498,26 +48742,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1PredictResponse()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1DirectRawPredictResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.predict(arg_request, arg_endpoint, $fields: arg_$fields); - checkGoogleCloudAiplatformV1PredictResponse( - response as api.GoogleCloudAiplatformV1PredictResponse); + final response = await res.directRawPredict(arg_request, arg_endpoint, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1DirectRawPredictResponse( + response as api.GoogleCloudAiplatformV1DirectRawPredictResponse); }); - unittest.test('method--rawPredict', () async { + unittest.test('method--explain', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock).projects.locations.endpoints; - final arg_request = buildGoogleCloudAiplatformV1RawPredictRequest(); + final arg_request = buildGoogleCloudAiplatformV1ExplainRequest(); final arg_endpoint = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1RawPredictRequest.fromJson( + final obj = api.GoogleCloudAiplatformV1ExplainRequest.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1RawPredictRequest(obj); + checkGoogleCloudAiplatformV1ExplainRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -43558,24 +48802,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleApiHttpBody()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1ExplainResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = - await res.rawPredict(arg_request, arg_endpoint, $fields: arg_$fields); - checkGoogleApiHttpBody(response as api.GoogleApiHttpBody); + await res.explain(arg_request, arg_endpoint, $fields: arg_$fields); + checkGoogleCloudAiplatformV1ExplainResponse( + response as api.GoogleCloudAiplatformV1ExplainResponse); }); - unittest.test('method--serverStreamingPredict', () async { + unittest.test('method--fetchPredictOperation', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock).projects.locations.endpoints; - final arg_request = buildGoogleCloudAiplatformV1StreamingPredictRequest(); + final arg_request = + buildGoogleCloudAiplatformV1FetchPredictOperationRequest(); final arg_endpoint = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1StreamingPredictRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1StreamingPredictRequest(obj); + final obj = + api.GoogleCloudAiplatformV1FetchPredictOperationRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1FetchPredictOperationRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -43616,18 +48864,17 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1StreamingPredictResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.serverStreamingPredict( + final response = await res.fetchPredictOperation( arg_request, arg_endpoint, $fields: arg_$fields); - checkGoogleCloudAiplatformV1StreamingPredictResponse( - response as api.GoogleCloudAiplatformV1StreamingPredictResponse); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--streamGenerateContent', () async { + unittest.test('method--generateContent', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock).projects.locations.endpoints; final arg_request = buildGoogleCloudAiplatformV1GenerateContentRequest(); @@ -43681,23 +48928,18 @@ void main() { .encode(buildGoogleCloudAiplatformV1GenerateContentResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.streamGenerateContent(arg_request, arg_model, + final response = await res.generateContent(arg_request, arg_model, $fields: arg_$fields); checkGoogleCloudAiplatformV1GenerateContentResponse( response as api.GoogleCloudAiplatformV1GenerateContentResponse); }); - unittest.test('method--streamRawPredict', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock).projects.locations.endpoints; - final arg_request = buildGoogleCloudAiplatformV1StreamRawPredictRequest(); - final arg_endpoint = 'foo'; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1StreamRawPredictRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1StreamRawPredictRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -43737,25 +48979,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleApiHttpBody()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1Endpoint()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.streamRawPredict(arg_request, arg_endpoint, - $fields: arg_$fields); - checkGoogleApiHttpBody(response as api.GoogleApiHttpBody); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleCloudAiplatformV1Endpoint( + response as api.GoogleCloudAiplatformV1Endpoint); }); - unittest.test('method--undeployModel', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock).projects.locations.endpoints; - final arg_request = buildGoogleCloudAiplatformV1UndeployModelRequest(); - final arg_endpoint = 'foo'; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1UndeployModelRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1UndeployModelRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -43787,6 +49030,26 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['readMask']!.first, + unittest.equals(arg_readMask), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -43795,27 +49058,33 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListEndpointsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.undeployModel(arg_request, arg_endpoint, + final response = await res.list(arg_parent, + filter: arg_filter, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + readMask: arg_readMask, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + checkGoogleCloudAiplatformV1ListEndpointsResponse( + response as api.GoogleCloudAiplatformV1ListEndpointsResponse); }); - }); - unittest.group('resource-ProjectsLocationsEndpointsChatResource', () { - unittest.test('method--completions', () async { + unittest.test('method--mutateDeployedModel', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.endpoints.chat; - final arg_request = buildGoogleApiHttpBody(); + final res = api.AiplatformApi(mock).projects.locations.endpoints; + final arg_request = + buildGoogleCloudAiplatformV1MutateDeployedModelRequest(); final arg_endpoint = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleApiHttpBody.fromJson( - json as core.Map); - checkGoogleApiHttpBody(obj); + final obj = + api.GoogleCloudAiplatformV1MutateDeployedModelRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1MutateDeployedModelRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -43856,23 +49125,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleApiHttpBody()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.completions(arg_request, arg_endpoint, + final response = await res.mutateDeployedModel(arg_request, arg_endpoint, $fields: arg_$fields); - checkGoogleApiHttpBody(response as api.GoogleApiHttpBody); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - }); - unittest.group('resource-ProjectsLocationsEndpointsOperationsResource', () { - unittest.test('method--cancel', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.endpoints.operations; + final res = api.AiplatformApi(mock).projects.locations.endpoints; + final arg_request = buildGoogleCloudAiplatformV1Endpoint(); final arg_name = 'foo'; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1Endpoint.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1Endpoint(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -43904,6 +49177,10 @@ void main() { ); } } + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -43912,20 +49189,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1Endpoint()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.cancel(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); + checkGoogleCloudAiplatformV1Endpoint( + response as api.GoogleCloudAiplatformV1Endpoint); }); - unittest.test('method--delete', () async { + unittest.test('method--predict', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.endpoints.operations; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.endpoints; + final arg_request = buildGoogleCloudAiplatformV1PredictRequest(); + final arg_endpoint = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1PredictRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1PredictRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -43965,20 +49249,29 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1PredictResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = + await res.predict(arg_request, arg_endpoint, $fields: arg_$fields); + checkGoogleCloudAiplatformV1PredictResponse( + response as api.GoogleCloudAiplatformV1PredictResponse); }); - unittest.test('method--get', () async { + unittest.test('method--predictLongRunning', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.endpoints.operations; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.endpoints; + final arg_request = + buildGoogleCloudAiplatformV1PredictLongRunningRequest(); + final arg_endpoint = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1PredictLongRunningRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1PredictLongRunningRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -44021,21 +49314,23 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); + final response = await res.predictLongRunning(arg_request, arg_endpoint, + $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--list', () async { + unittest.test('method--rawPredict', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.endpoints.operations; - final arg_name = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.endpoints; + final arg_request = buildGoogleCloudAiplatformV1RawPredictRequest(); + final arg_endpoint = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1RawPredictRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1RawPredictRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -44067,18 +49362,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -44087,27 +49370,25 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + final resp = convert.json.encode(buildGoogleApiHttpBody()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_name, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + final response = + await res.rawPredict(arg_request, arg_endpoint, $fields: arg_$fields); + checkGoogleApiHttpBody(response as api.GoogleApiHttpBody); }); - unittest.test('method--wait', () async { + unittest.test('method--serverStreamingPredict', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.endpoints.operations; - final arg_name = 'foo'; - final arg_timeout = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.endpoints; + final arg_request = buildGoogleCloudAiplatformV1StreamingPredictRequest(); + final arg_endpoint = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1StreamingPredictRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1StreamingPredictRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -44139,10 +49420,6 @@ void main() { ); } } - unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -44151,28 +49428,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1StreamingPredictResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.serverStreamingPredict( + arg_request, arg_endpoint, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1StreamingPredictResponse( + response as api.GoogleCloudAiplatformV1StreamingPredictResponse); }); - }); - unittest.group('resource-ProjectsLocationsFeatureGroupsResource', () { - unittest.test('method--create', () async { + unittest.test('method--streamGenerateContent', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.featureGroups; - final arg_request = buildGoogleCloudAiplatformV1FeatureGroup(); - final arg_parent = 'foo'; - final arg_featureGroupId = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.endpoints; + final arg_request = buildGoogleCloudAiplatformV1GenerateContentRequest(); + final arg_model = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1FeatureGroup.fromJson( + final obj = api.GoogleCloudAiplatformV1GenerateContentRequest.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1FeatureGroup(obj); + checkGoogleCloudAiplatformV1GenerateContentRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -44205,10 +49481,6 @@ void main() { ); } } - unittest.expect( - queryMap['featureGroupId']!.first, - unittest.equals(arg_featureGroupId), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -44217,22 +49489,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1GenerateContentResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.create(arg_request, arg_parent, - featureGroupId: arg_featureGroupId, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.streamGenerateContent(arg_request, arg_model, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1GenerateContentResponse( + response as api.GoogleCloudAiplatformV1GenerateContentResponse); }); - unittest.test('method--delete', () async { + unittest.test('method--streamRawPredict', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.featureGroups; - final arg_name = 'foo'; - final arg_force = true; + final res = api.AiplatformApi(mock).projects.locations.endpoints; + final arg_request = buildGoogleCloudAiplatformV1StreamRawPredictRequest(); + final arg_endpoint = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1StreamRawPredictRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1StreamRawPredictRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -44264,10 +49541,6 @@ void main() { ); } } - unittest.expect( - queryMap['force']!.first, - unittest.equals('$arg_force'), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -44276,21 +49549,25 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleApiHttpBody()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.delete(arg_name, force: arg_force, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.streamRawPredict(arg_request, arg_endpoint, + $fields: arg_$fields); + checkGoogleApiHttpBody(response as api.GoogleApiHttpBody); }); - unittest.test('method--get', () async { + unittest.test('method--undeployModel', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.featureGroups; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.endpoints; + final arg_request = buildGoogleCloudAiplatformV1UndeployModelRequest(); + final arg_endpoint = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1UndeployModelRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1UndeployModelRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -44330,25 +49607,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1FeatureGroup()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1FeatureGroup( - response as api.GoogleCloudAiplatformV1FeatureGroup); + final response = await res.undeployModel(arg_request, arg_endpoint, + $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--list', () async { + unittest.test('method--update', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.featureGroups; - final arg_parent = 'foo'; - final arg_filter = 'foo'; - final arg_orderBy = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.endpoints; + final arg_request = + buildGoogleCloudAiplatformV1UpdateEndpointLongRunningRequest(); + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1UpdateEndpointLongRunningRequest + .fromJson(json as core.Map); + checkGoogleCloudAiplatformV1UpdateEndpointLongRunningRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -44380,22 +49659,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -44404,31 +49667,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListFeatureGroupsResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - filter: arg_filter, - orderBy: arg_orderBy, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListFeatureGroupsResponse( - response as api.GoogleCloudAiplatformV1ListFeatureGroupsResponse); + final response = + await res.update(arg_request, arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); + }); - unittest.test('method--patch', () async { + unittest.group('resource-ProjectsLocationsEndpointsChatResource', () { + unittest.test('method--completions', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.featureGroups; - final arg_request = buildGoogleCloudAiplatformV1FeatureGroup(); - final arg_name = 'foo'; - final arg_updateMask = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.endpoints.chat; + final arg_request = buildGoogleApiHttpBody(); + final arg_endpoint = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1FeatureGroup.fromJson( + final obj = api.GoogleApiHttpBody.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1FeatureGroup(obj); + checkGoogleApiHttpBody(obj); final path = req.url.path; var pathOffset = 0; @@ -44461,10 +49720,6 @@ void main() { ); } } - unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -44473,31 +49728,23 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleApiHttpBody()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch(arg_request, arg_name, - updateMask: arg_updateMask, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.completions(arg_request, arg_endpoint, + $fields: arg_$fields); + checkGoogleApiHttpBody(response as api.GoogleApiHttpBody); }); }); - unittest.group('resource-ProjectsLocationsFeatureGroupsFeaturesResource', () { - unittest.test('method--batchCreate', () async { + unittest.group('resource-ProjectsLocationsEndpointsOperationsResource', () { + unittest.test('method--cancel', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.featureGroups.features; - final arg_request = - buildGoogleCloudAiplatformV1BatchCreateFeaturesRequest(); - final arg_parent = 'foo'; + api.AiplatformApi(mock).projects.locations.endpoints.operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1BatchCreateFeaturesRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1BatchCreateFeaturesRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -44537,28 +49784,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.batchCreate(arg_request, arg_parent, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.cancel(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--create', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.featureGroups.features; - final arg_request = buildGoogleCloudAiplatformV1Feature(); - final arg_parent = 'foo'; - final arg_featureId = 'foo'; + api.AiplatformApi(mock).projects.locations.endpoints.operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1Feature.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1Feature(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -44590,10 +49829,6 @@ void main() { ); } } - unittest.expect( - queryMap['featureId']!.first, - unittest.equals(arg_featureId), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -44602,19 +49837,17 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.create(arg_request, arg_parent, - featureId: arg_featureId, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--delete', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.featureGroups.features; + api.AiplatformApi(mock).projects.locations.endpoints.operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -44660,16 +49893,19 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); + final response = await res.get(arg_name, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--get', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.featureGroups.features; + api.AiplatformApi(mock).projects.locations.endpoints.operations; final arg_name = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -44703,6 +49939,18 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -44711,25 +49959,25 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleCloudAiplatformV1Feature()); + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1Feature( - response as api.GoogleCloudAiplatformV1Feature); + final response = await res.list(arg_name, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); - unittest.test('method--list', () async { + unittest.test('method--wait', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.featureGroups.features; - final arg_parent = 'foo'; - final arg_filter = 'foo'; - final arg_latestStatsCount = 42; - final arg_orderBy = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; - final arg_readMask = 'foo'; + api.AiplatformApi(mock).projects.locations.endpoints.operations; + final arg_name = 'foo'; + final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -44764,28 +50012,8 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['latestStatsCount']!.first), - unittest.equals(arg_latestStatsCount), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['readMask']!.first, - unittest.equals(arg_readMask), + queryMap['timeout']!.first, + unittest.equals(arg_timeout), ); unittest.expect( queryMap['fields']!.first, @@ -44795,34 +50023,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListFeaturesResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - filter: arg_filter, - latestStatsCount: arg_latestStatsCount, - orderBy: arg_orderBy, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - readMask: arg_readMask, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListFeaturesResponse( - response as api.GoogleCloudAiplatformV1ListFeaturesResponse); + final response = + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); + }); - unittest.test('method--patch', () async { + unittest.group('resource-ProjectsLocationsFeatureGroupsResource', () { + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.featureGroups.features; - final arg_request = buildGoogleCloudAiplatformV1Feature(); - final arg_name = 'foo'; - final arg_updateMask = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.featureGroups; + final arg_request = buildGoogleCloudAiplatformV1FeatureGroup(); + final arg_parent = 'foo'; + final arg_featureGroupId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1Feature.fromJson( + final obj = api.GoogleCloudAiplatformV1FeatureGroup.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1Feature(obj); + checkGoogleCloudAiplatformV1FeatureGroup(obj); final path = req.url.path; var pathOffset = 0; @@ -44856,8 +50078,8 @@ void main() { } } unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), + queryMap['featureGroupId']!.first, + unittest.equals(arg_featureGroupId), ); unittest.expect( queryMap['fields']!.first, @@ -44870,24 +50092,17 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch(arg_request, arg_name, - updateMask: arg_updateMask, $fields: arg_$fields); + final response = await res.create(arg_request, arg_parent, + featureGroupId: arg_featureGroupId, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - }); - unittest.group( - 'resource-ProjectsLocationsFeatureGroupsFeaturesOperationsResource', () { unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featureGroups - .features - .operations; + final res = api.AiplatformApi(mock).projects.locations.featureGroups; final arg_name = 'foo'; + final arg_force = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -44921,6 +50136,10 @@ void main() { ); } } + unittest.expect( + queryMap['force']!.first, + unittest.equals('$arg_force'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -44929,21 +50148,18 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = + await res.delete(arg_name, force: arg_force, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featureGroups - .features - .operations; + final res = api.AiplatformApi(mock).projects.locations.featureGroups; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -44986,24 +50202,21 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1FeatureGroup()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + checkGoogleCloudAiplatformV1FeatureGroup( + response as api.GoogleCloudAiplatformV1FeatureGroup); }); - unittest.test('method--listWait', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featureGroups - .features - .operations; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.featureGroups; + final arg_parent = 'foo'; final arg_filter = 'foo'; + final arg_orderBy = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; final arg_$fields = 'foo'; @@ -45043,6 +50256,10 @@ void main() { queryMap['filter']!.first, unittest.equals(arg_filter), ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); unittest.expect( core.int.parse(queryMap['pageSize']!.first), unittest.equals(arg_pageSize), @@ -45059,31 +50276,32 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListFeatureGroupsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.listWait(arg_name, + final response = await res.list(arg_parent, filter: arg_filter, + orderBy: arg_orderBy, pageSize: arg_pageSize, pageToken: arg_pageToken, $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + checkGoogleCloudAiplatformV1ListFeatureGroupsResponse( + response as api.GoogleCloudAiplatformV1ListFeatureGroupsResponse); }); - unittest.test('method--wait', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featureGroups - .features - .operations; + final res = api.AiplatformApi(mock).projects.locations.featureGroups; + final arg_request = buildGoogleCloudAiplatformV1FeatureGroup(); final arg_name = 'foo'; - final arg_timeout = 'foo'; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1FeatureGroup.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1FeatureGroup(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -45116,8 +50334,8 @@ void main() { } } unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), ); unittest.expect( queryMap['fields']!.first, @@ -45130,22 +50348,28 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); }); - unittest.group('resource-ProjectsLocationsFeatureGroupsOperationsResource', - () { - unittest.test('method--delete', () async { + unittest.group('resource-ProjectsLocationsFeatureGroupsFeaturesResource', () { + unittest.test('method--batchCreate', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.featureGroups.operations; - final arg_name = 'foo'; + api.AiplatformApi(mock).projects.locations.featureGroups.features; + final arg_request = + buildGoogleCloudAiplatformV1BatchCreateFeaturesRequest(); + final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1BatchCreateFeaturesRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1BatchCreateFeaturesRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -45185,20 +50409,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = + await res.batchCreate(arg_request, arg_parent, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--get', () async { + unittest.test('method--create', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.featureGroups.operations; - final arg_name = 'foo'; + api.AiplatformApi(mock).projects.locations.featureGroups.features; + final arg_request = buildGoogleCloudAiplatformV1Feature(); + final arg_parent = 'foo'; + final arg_featureId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1Feature.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1Feature(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -45230,6 +50462,10 @@ void main() { ); } } + unittest.expect( + queryMap['featureId']!.first, + unittest.equals(arg_featureId), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -45241,19 +50477,17 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); + final response = await res.create(arg_request, arg_parent, + featureId: arg_featureId, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--listWait', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.featureGroups.operations; + api.AiplatformApi(mock).projects.locations.featureGroups.features; final arg_name = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -45287,18 +50521,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -45307,25 +50529,19 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.listWait(arg_name, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--wait', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.featureGroups.operations; + api.AiplatformApi(mock).projects.locations.featureGroups.features; final arg_name = 'foo'; - final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -45359,10 +50575,6 @@ void main() { ); } } - unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -45371,30 +50583,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleCloudAiplatformV1Feature()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleCloudAiplatformV1Feature( + response as api.GoogleCloudAiplatformV1Feature); }); - }); - unittest.group('resource-ProjectsLocationsFeatureOnlineStoresResource', () { - unittest.test('method--create', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.featureOnlineStores; - final arg_request = buildGoogleCloudAiplatformV1FeatureOnlineStore(); + api.AiplatformApi(mock).projects.locations.featureGroups.features; final arg_parent = 'foo'; - final arg_featureOnlineStoreId = 'foo'; + final arg_filter = 'foo'; + final arg_latestStatsCount = 42; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1FeatureOnlineStore.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1FeatureOnlineStore(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -45427,8 +50636,28 @@ void main() { } } unittest.expect( - queryMap['featureOnlineStoreId']!.first, - unittest.equals(arg_featureOnlineStoreId), + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['latestStatsCount']!.first), + unittest.equals(arg_latestStatsCount), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['readMask']!.first, + unittest.equals(arg_readMask), ); unittest.expect( queryMap['fields']!.first, @@ -45438,23 +50667,35 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListFeaturesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.create(arg_request, arg_parent, - featureOnlineStoreId: arg_featureOnlineStoreId, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.list(arg_parent, + filter: arg_filter, + latestStatsCount: arg_latestStatsCount, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + readMask: arg_readMask, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ListFeaturesResponse( + response as api.GoogleCloudAiplatformV1ListFeaturesResponse); }); - unittest.test('method--delete', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.featureOnlineStores; + api.AiplatformApi(mock).projects.locations.featureGroups.features; + final arg_request = buildGoogleCloudAiplatformV1Feature(); final arg_name = 'foo'; - final arg_force = true; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1Feature.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1Feature(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -45487,8 +50728,8 @@ void main() { } } unittest.expect( - queryMap['force']!.first, - unittest.equals('$arg_force'), + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), ); unittest.expect( queryMap['fields']!.first, @@ -45501,16 +50742,23 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.delete(arg_name, force: arg_force, $fields: arg_$fields); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); + }); - unittest.test('method--get', () async { + unittest.group( + 'resource-ProjectsLocationsFeatureGroupsFeaturesOperationsResource', () { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.featureOnlineStores; + final res = api.AiplatformApi(mock) + .projects + .locations + .featureGroups + .features + .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -45553,21 +50801,22 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1FeatureOnlineStore()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1FeatureOnlineStore( - response as api.GoogleCloudAiplatformV1FeatureOnlineStore); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--getIamPolicy', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.featureOnlineStores; - final arg_resource = 'foo'; - final arg_options_requestedPolicyVersion = 42; + final res = api.AiplatformApi(mock) + .projects + .locations + .featureGroups + .features + .operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -45601,10 +50850,6 @@ void main() { ); } } - unittest.expect( - core.int.parse(queryMap['options.requestedPolicyVersion']!.first), - unittest.equals(arg_options_requestedPolicyVersion), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -45613,22 +50858,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleIamV1Policy()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.getIamPolicy(arg_resource, - options_requestedPolicyVersion: arg_options_requestedPolicyVersion, - $fields: arg_$fields); - checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--list', () async { + unittest.test('method--listWait', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.featureOnlineStores; - final arg_parent = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .featureGroups + .features + .operations; + final arg_name = 'foo'; final arg_filter = 'foo'; - final arg_orderBy = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; final arg_$fields = 'foo'; @@ -45668,10 +50915,6 @@ void main() { queryMap['filter']!.first, unittest.equals(arg_filter), ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); unittest.expect( core.int.parse(queryMap['pageSize']!.first), unittest.equals(arg_pageSize), @@ -45688,33 +50931,31 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode( - buildGoogleCloudAiplatformV1ListFeatureOnlineStoresResponse()); + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, + final response = await res.listWait(arg_name, filter: arg_filter, - orderBy: arg_orderBy, pageSize: arg_pageSize, pageToken: arg_pageToken, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListFeatureOnlineStoresResponse(response - as api.GoogleCloudAiplatformV1ListFeatureOnlineStoresResponse); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); - unittest.test('method--patch', () async { + unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.featureOnlineStores; - final arg_request = buildGoogleCloudAiplatformV1FeatureOnlineStore(); + final res = api.AiplatformApi(mock) + .projects + .locations + .featureGroups + .features + .operations; final arg_name = 'foo'; - final arg_updateMask = 'foo'; + final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1FeatureOnlineStore.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1FeatureOnlineStore(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -45747,8 +50988,8 @@ void main() { } } unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), + queryMap['timeout']!.first, + unittest.equals(arg_timeout), ); unittest.expect( queryMap['fields']!.first, @@ -45761,24 +51002,22 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch(arg_request, arg_name, - updateMask: arg_updateMask, $fields: arg_$fields); + final response = + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); + }); - unittest.test('method--setIamPolicy', () async { + unittest.group('resource-ProjectsLocationsFeatureGroupsOperationsResource', + () { + unittest.test('method--delete', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.featureOnlineStores; - final arg_request = buildGoogleIamV1SetIamPolicyRequest(); - final arg_resource = 'foo'; + api.AiplatformApi(mock).projects.locations.featureGroups.operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleIamV1SetIamPolicyRequest.fromJson( - json as core.Map); - checkGoogleIamV1SetIamPolicyRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -45818,20 +51057,18 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleIamV1Policy()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setIamPolicy(arg_request, arg_resource, - $fields: arg_$fields); - checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--testIamPermissions', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.featureOnlineStores; - final arg_resource = 'foo'; - final arg_permissions = buildUnnamed388(); + api.AiplatformApi(mock).projects.locations.featureGroups.operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -45865,10 +51102,6 @@ void main() { ); } } - unittest.expect( - queryMap['permissions']!, - unittest.equals(arg_permissions), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -45877,36 +51110,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleIamV1TestIamPermissionsResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.testIamPermissions(arg_resource, - permissions: arg_permissions, $fields: arg_$fields); - checkGoogleIamV1TestIamPermissionsResponse( - response as api.GoogleIamV1TestIamPermissionsResponse); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - }); - unittest.group( - 'resource-ProjectsLocationsFeatureOnlineStoresFeatureViewsResource', () { - unittest.test('method--create', () async { + unittest.test('method--listWait', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featureOnlineStores - .featureViews; - final arg_request = buildGoogleCloudAiplatformV1FeatureView(); - final arg_parent = 'foo'; - final arg_featureViewId = 'foo'; - final arg_runSyncImmediately = true; + final res = + api.AiplatformApi(mock).projects.locations.featureGroups.operations; + final arg_name = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1FeatureView.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1FeatureView(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -45939,12 +51160,16 @@ void main() { } } unittest.expect( - queryMap['featureViewId']!.first, - unittest.equals(arg_featureViewId), + queryMap['filter']!.first, + unittest.equals(arg_filter), ); unittest.expect( - queryMap['runSyncImmediately']!.first, - unittest.equals('$arg_runSyncImmediately'), + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), ); unittest.expect( queryMap['fields']!.first, @@ -45954,25 +51179,25 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.create(arg_request, arg_parent, - featureViewId: arg_featureViewId, - runSyncImmediately: arg_runSyncImmediately, + final response = await res.listWait(arg_name, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); - unittest.test('method--delete', () async { + unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featureOnlineStores - .featureViews; + final res = + api.AiplatformApi(mock).projects.locations.featureGroups.operations; final arg_name = 'foo'; + final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -46006,6 +51231,10 @@ void main() { ); } } + unittest.expect( + queryMap['timeout']!.first, + unittest.equals(arg_timeout), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -46017,27 +51246,26 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); + final response = + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); + }); - unittest.test('method--fetchFeatureValues', () async { + unittest.group('resource-ProjectsLocationsFeatureOnlineStoresResource', () { + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featureOnlineStores - .featureViews; - final arg_request = - buildGoogleCloudAiplatformV1FetchFeatureValuesRequest(); - final arg_featureView = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.featureOnlineStores; + final arg_request = buildGoogleCloudAiplatformV1FeatureOnlineStore(); + final arg_parent = 'foo'; + final arg_featureOnlineStoreId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1FetchFeatureValuesRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1FetchFeatureValuesRequest(obj); + final obj = api.GoogleCloudAiplatformV1FeatureOnlineStore.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1FeatureOnlineStore(obj); final path = req.url.path; var pathOffset = 0; @@ -46070,6 +51298,10 @@ void main() { ); } } + unittest.expect( + queryMap['featureOnlineStoreId']!.first, + unittest.equals(arg_featureOnlineStoreId), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -46078,25 +51310,21 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1FetchFeatureValuesResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.fetchFeatureValues( - arg_request, arg_featureView, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1FetchFeatureValuesResponse( - response as api.GoogleCloudAiplatformV1FetchFeatureValuesResponse); + final response = await res.create(arg_request, arg_parent, + featureOnlineStoreId: arg_featureOnlineStoreId, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--get', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featureOnlineStores - .featureViews; + final res = + api.AiplatformApi(mock).projects.locations.featureOnlineStores; final arg_name = 'foo'; + final arg_force = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -46130,6 +51358,10 @@ void main() { ); } } + unittest.expect( + queryMap['force']!.first, + unittest.equals('$arg_force'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -46138,22 +51370,74 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1FeatureView()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.delete(arg_name, force: arg_force, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = + api.AiplatformApi(mock).projects.locations.featureOnlineStores; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1FeatureOnlineStore()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1FeatureView( - response as api.GoogleCloudAiplatformV1FeatureView); + checkGoogleCloudAiplatformV1FeatureOnlineStore( + response as api.GoogleCloudAiplatformV1FeatureOnlineStore); }); unittest.test('method--getIamPolicy', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featureOnlineStores - .featureViews; + final res = + api.AiplatformApi(mock).projects.locations.featureOnlineStores; final arg_resource = 'foo'; final arg_options_requestedPolicyVersion = 42; final arg_$fields = 'foo'; @@ -46212,11 +51496,8 @@ void main() { unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featureOnlineStores - .featureViews; + final res = + api.AiplatformApi(mock).projects.locations.featureOnlineStores; final arg_parent = 'foo'; final arg_filter = 'foo'; final arg_orderBy = 'foo'; @@ -46279,8 +51560,8 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListFeatureViewsResponse()); + final resp = convert.json.encode( + buildGoogleCloudAiplatformV1ListFeatureOnlineStoresResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.list(arg_parent, @@ -46289,25 +51570,22 @@ void main() { pageSize: arg_pageSize, pageToken: arg_pageToken, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListFeatureViewsResponse( - response as api.GoogleCloudAiplatformV1ListFeatureViewsResponse); + checkGoogleCloudAiplatformV1ListFeatureOnlineStoresResponse(response + as api.GoogleCloudAiplatformV1ListFeatureOnlineStoresResponse); }); unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featureOnlineStores - .featureViews; - final arg_request = buildGoogleCloudAiplatformV1FeatureView(); + final res = + api.AiplatformApi(mock).projects.locations.featureOnlineStores; + final arg_request = buildGoogleCloudAiplatformV1FeatureOnlineStore(); final arg_name = 'foo'; final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1FeatureView.fromJson( + final obj = api.GoogleCloudAiplatformV1FeatureOnlineStore.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1FeatureView(obj); + checkGoogleCloudAiplatformV1FeatureOnlineStore(obj); final path = req.url.path; var pathOffset = 0; @@ -46361,80 +51639,10 @@ void main() { response as api.GoogleLongrunningOperation); }); - unittest.test('method--searchNearestEntities', () async { - final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featureOnlineStores - .featureViews; - final arg_request = - buildGoogleCloudAiplatformV1SearchNearestEntitiesRequest(); - final arg_featureView = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1SearchNearestEntitiesRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1SearchNearestEntitiesRequest(obj); - - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode( - buildGoogleCloudAiplatformV1SearchNearestEntitiesResponse()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.searchNearestEntities( - arg_request, arg_featureView, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1SearchNearestEntitiesResponse( - response as api.GoogleCloudAiplatformV1SearchNearestEntitiesResponse); - }); - unittest.test('method--setIamPolicy', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featureOnlineStores - .featureViews; + final res = + api.AiplatformApi(mock).projects.locations.featureOnlineStores; final arg_request = buildGoogleIamV1SetIamPolicyRequest(); final arg_resource = 'foo'; final arg_$fields = 'foo'; @@ -46490,21 +51698,14 @@ void main() { checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); }); - unittest.test('method--sync', () async { + unittest.test('method--testIamPermissions', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featureOnlineStores - .featureViews; - final arg_request = buildGoogleCloudAiplatformV1SyncFeatureViewRequest(); - final arg_featureView = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.featureOnlineStores; + final arg_resource = 'foo'; + final arg_permissions = buildUnnamed428(); final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1SyncFeatureViewRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1SyncFeatureViewRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -46536,6 +51737,10 @@ void main() { ); } } + unittest.expect( + queryMap['permissions']!, + unittest.equals(arg_permissions), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -46544,27 +51749,36 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1SyncFeatureViewResponse()); + final resp = + convert.json.encode(buildGoogleIamV1TestIamPermissionsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.sync(arg_request, arg_featureView, $fields: arg_$fields); - checkGoogleCloudAiplatformV1SyncFeatureViewResponse( - response as api.GoogleCloudAiplatformV1SyncFeatureViewResponse); + final response = await res.testIamPermissions(arg_resource, + permissions: arg_permissions, $fields: arg_$fields); + checkGoogleIamV1TestIamPermissionsResponse( + response as api.GoogleIamV1TestIamPermissionsResponse); }); + }); - unittest.test('method--testIamPermissions', () async { + unittest.group( + 'resource-ProjectsLocationsFeatureOnlineStoresFeatureViewsResource', () { + unittest.test('method--create', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock) .projects .locations .featureOnlineStores .featureViews; - final arg_resource = 'foo'; - final arg_permissions = buildUnnamed389(); + final arg_request = buildGoogleCloudAiplatformV1FeatureView(); + final arg_parent = 'foo'; + final arg_featureViewId = 'foo'; + final arg_runSyncImmediately = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1FeatureView.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1FeatureView(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -46597,8 +51811,12 @@ void main() { } } unittest.expect( - queryMap['permissions']!, - unittest.equals(arg_permissions), + queryMap['featureViewId']!.first, + unittest.equals(arg_featureViewId), + ); + unittest.expect( + queryMap['runSyncImmediately']!.first, + unittest.equals('$arg_runSyncImmediately'), ); unittest.expect( queryMap['fields']!.first, @@ -46608,28 +51826,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleIamV1TestIamPermissionsResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.testIamPermissions(arg_resource, - permissions: arg_permissions, $fields: arg_$fields); - checkGoogleIamV1TestIamPermissionsResponse( - response as api.GoogleIamV1TestIamPermissionsResponse); + final response = await res.create(arg_request, arg_parent, + featureViewId: arg_featureViewId, + runSyncImmediately: arg_runSyncImmediately, + $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - }); - unittest.group( - 'resource-ProjectsLocationsFeatureOnlineStoresFeatureViewsFeatureViewSyncsResource', - () { - unittest.test('method--get', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock) .projects .locations .featureOnlineStores - .featureViews - .featureViewSyncs; + .featureViews; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -46672,30 +51886,31 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1FeatureViewSync()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1FeatureViewSync( - response as api.GoogleCloudAiplatformV1FeatureViewSync); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--list', () async { + unittest.test('method--fetchFeatureValues', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock) .projects .locations .featureOnlineStores - .featureViews - .featureViewSyncs; - final arg_parent = 'foo'; - final arg_filter = 'foo'; - final arg_orderBy = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + .featureViews; + final arg_request = + buildGoogleCloudAiplatformV1FetchFeatureValuesRequest(); + final arg_featureView = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1FetchFeatureValuesRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1FetchFeatureValuesRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -46727,22 +51942,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -46752,31 +51951,23 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListFeatureViewSyncsResponse()); + .encode(buildGoogleCloudAiplatformV1FetchFeatureValuesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - filter: arg_filter, - orderBy: arg_orderBy, - pageSize: arg_pageSize, - pageToken: arg_pageToken, + final response = await res.fetchFeatureValues( + arg_request, arg_featureView, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListFeatureViewSyncsResponse( - response as api.GoogleCloudAiplatformV1ListFeatureViewSyncsResponse); + checkGoogleCloudAiplatformV1FetchFeatureValuesResponse( + response as api.GoogleCloudAiplatformV1FetchFeatureValuesResponse); }); - }); - unittest.group( - 'resource-ProjectsLocationsFeatureOnlineStoresFeatureViewsOperationsResource', - () { - unittest.test('method--delete', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock) .projects .locations .featureOnlineStores - .featureViews - .operations; + .featureViews; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -46819,22 +52010,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1FeatureView()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleCloudAiplatformV1FeatureView( + response as api.GoogleCloudAiplatformV1FeatureView); }); - unittest.test('method--get', () async { + unittest.test('method--getIamPolicy', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock) .projects .locations .featureOnlineStores - .featureViews - .operations; - final arg_name = 'foo'; + .featureViews; + final arg_resource = 'foo'; + final arg_options_requestedPolicyVersion = 42; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -46868,6 +52061,10 @@ void main() { ); } } + unittest.expect( + core.int.parse(queryMap['options.requestedPolicyVersion']!.first), + unittest.equals(arg_options_requestedPolicyVersion), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -46876,24 +52073,25 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleIamV1Policy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.getIamPolicy(arg_resource, + options_requestedPolicyVersion: arg_options_requestedPolicyVersion, + $fields: arg_$fields); + checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); }); - unittest.test('method--listWait', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock) .projects .locations .featureOnlineStores - .featureViews - .operations; - final arg_name = 'foo'; + .featureViews; + final arg_parent = 'foo'; final arg_filter = 'foo'; + final arg_orderBy = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; final arg_$fields = 'foo'; @@ -46933,6 +52131,10 @@ void main() { queryMap['filter']!.first, unittest.equals(arg_filter), ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); unittest.expect( core.int.parse(queryMap['pageSize']!.first), unittest.equals(arg_pageSize), @@ -46949,31 +52151,36 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListFeatureViewsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.listWait(arg_name, + final response = await res.list(arg_parent, filter: arg_filter, + orderBy: arg_orderBy, pageSize: arg_pageSize, pageToken: arg_pageToken, $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + checkGoogleCloudAiplatformV1ListFeatureViewsResponse( + response as api.GoogleCloudAiplatformV1ListFeatureViewsResponse); }); - unittest.test('method--wait', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock) .projects .locations .featureOnlineStores - .featureViews - .operations; + .featureViews; + final arg_request = buildGoogleCloudAiplatformV1FeatureView(); final arg_name = 'foo'; - final arg_timeout = 'foo'; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1FeatureView.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1FeatureView(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -47006,8 +52213,8 @@ void main() { } } unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), ); unittest.expect( queryMap['fields']!.first, @@ -47020,25 +52227,29 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - }); - unittest.group( - 'resource-ProjectsLocationsFeatureOnlineStoresOperationsResource', () { - unittest.test('method--delete', () async { + unittest.test('method--searchNearestEntities', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock) .projects .locations .featureOnlineStores - .operations; - final arg_name = 'foo'; + .featureViews; + final arg_request = + buildGoogleCloudAiplatformV1SearchNearestEntitiesRequest(); + final arg_featureView = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1SearchNearestEntitiesRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1SearchNearestEntitiesRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -47078,23 +52289,32 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode( + buildGoogleCloudAiplatformV1SearchNearestEntitiesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = await res.searchNearestEntities( + arg_request, arg_featureView, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1SearchNearestEntitiesResponse( + response as api.GoogleCloudAiplatformV1SearchNearestEntitiesResponse); }); - unittest.test('method--get', () async { + unittest.test('method--setIamPolicy', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock) .projects .locations .featureOnlineStores - .operations; - final arg_name = 'foo'; + .featureViews; + final arg_request = buildGoogleIamV1SetIamPolicyRequest(); + final arg_resource = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleIamV1SetIamPolicyRequest.fromJson( + json as core.Map); + checkGoogleIamV1SetIamPolicyRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -47134,27 +52354,29 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleIamV1Policy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.setIamPolicy(arg_request, arg_resource, + $fields: arg_$fields); + checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); }); - unittest.test('method--listWait', () async { + unittest.test('method--sync', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock) .projects .locations .featureOnlineStores - .operations; - final arg_name = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + .featureViews; + final arg_request = buildGoogleCloudAiplatformV1SyncFeatureViewRequest(); + final arg_featureView = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1SyncFeatureViewRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1SyncFeatureViewRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -47186,18 +52408,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -47206,28 +52416,25 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1SyncFeatureViewResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.listWait(arg_name, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + final response = + await res.sync(arg_request, arg_featureView, $fields: arg_$fields); + checkGoogleCloudAiplatformV1SyncFeatureViewResponse( + response as api.GoogleCloudAiplatformV1SyncFeatureViewResponse); }); - unittest.test('method--wait', () async { + unittest.test('method--testIamPermissions', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock) .projects .locations .featureOnlineStores - .operations; - final arg_name = 'foo'; - final arg_timeout = 'foo'; + .featureViews; + final arg_resource = 'foo'; + final arg_permissions = buildUnnamed429(); final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -47262,8 +52469,8 @@ void main() { } } unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), + queryMap['permissions']!, + unittest.equals(arg_permissions), ); unittest.expect( queryMap['fields']!.first, @@ -47273,30 +52480,31 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = + convert.json.encode(buildGoogleIamV1TestIamPermissionsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.testIamPermissions(arg_resource, + permissions: arg_permissions, $fields: arg_$fields); + checkGoogleIamV1TestIamPermissionsResponse( + response as api.GoogleIamV1TestIamPermissionsResponse); }); }); - unittest.group('resource-ProjectsLocationsFeaturestoresResource', () { - unittest.test('method--batchReadFeatureValues', () async { + unittest.group( + 'resource-ProjectsLocationsFeatureOnlineStoresFeatureViewsFeatureViewSyncsResource', + () { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.featurestores; - final arg_request = - buildGoogleCloudAiplatformV1BatchReadFeatureValuesRequest(); - final arg_featurestore = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .featureOnlineStores + .featureViews + .featureViewSyncs; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1BatchReadFeatureValuesRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1BatchReadFeatureValuesRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -47336,28 +52544,30 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1FeatureViewSync()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.batchReadFeatureValues( - arg_request, arg_featurestore, - $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleCloudAiplatformV1FeatureViewSync( + response as api.GoogleCloudAiplatformV1FeatureViewSync); }); - unittest.test('method--create', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.featurestores; - final arg_request = buildGoogleCloudAiplatformV1Featurestore(); + final res = api.AiplatformApi(mock) + .projects + .locations + .featureOnlineStores + .featureViews + .featureViewSyncs; final arg_parent = 'foo'; - final arg_featurestoreId = 'foo'; + final arg_filter = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1Featurestore.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1Featurestore(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -47390,8 +52600,20 @@ void main() { } } unittest.expect( - queryMap['featurestoreId']!.first, - unittest.equals(arg_featurestoreId), + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), ); unittest.expect( queryMap['fields']!.first, @@ -47401,20 +52623,33 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListFeatureViewSyncsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.create(arg_request, arg_parent, - featurestoreId: arg_featurestoreId, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.list(arg_parent, + filter: arg_filter, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ListFeatureViewSyncsResponse( + response as api.GoogleCloudAiplatformV1ListFeatureViewSyncsResponse); }); + }); + unittest.group( + 'resource-ProjectsLocationsFeatureOnlineStoresFeatureViewsOperationsResource', + () { unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.featurestores; + final res = api.AiplatformApi(mock) + .projects + .locations + .featureOnlineStores + .featureViews + .operations; final arg_name = 'foo'; - final arg_force = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -47448,10 +52683,6 @@ void main() { ); } } - unittest.expect( - queryMap['force']!.first, - unittest.equals('$arg_force'), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -47460,18 +52691,21 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.delete(arg_name, force: arg_force, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.featurestores; + final res = api.AiplatformApi(mock) + .projects + .locations + .featureOnlineStores + .featureViews + .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -47514,20 +52748,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1Featurestore()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1Featurestore( - response as api.GoogleCloudAiplatformV1Featurestore); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--getIamPolicy', () async { + unittest.test('method--listWait', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.featurestores; - final arg_resource = 'foo'; - final arg_options_requestedPolicyVersion = 42; + final res = api.AiplatformApi(mock) + .projects + .locations + .featureOnlineStores + .featureViews + .operations; + final arg_name = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -47562,8 +52802,16 @@ void main() { } } unittest.expect( - core.int.parse(queryMap['options.requestedPolicyVersion']!.first), - unittest.equals(arg_options_requestedPolicyVersion), + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), ); unittest.expect( queryMap['fields']!.first, @@ -47573,24 +52821,29 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleIamV1Policy()); + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.getIamPolicy(arg_resource, - options_requestedPolicyVersion: arg_options_requestedPolicyVersion, + final response = await res.listWait(arg_name, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, $fields: arg_$fields); - checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); - unittest.test('method--list', () async { + unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.featurestores; - final arg_parent = 'foo'; - final arg_filter = 'foo'; - final arg_orderBy = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; - final arg_readMask = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .featureOnlineStores + .featureViews + .operations; + final arg_name = 'foo'; + final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -47625,25 +52878,70 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), + queryMap['timeout']!.first, + unittest.equals(arg_timeout), ); unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), + queryMap['fields']!.first, + unittest.equals(arg_$fields), ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + }); + + unittest.group( + 'resource-ProjectsLocationsFeatureOnlineStoresOperationsResource', () { + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock) + .projects + .locations + .featureOnlineStores + .operations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), ); + pathOffset += 1; unittest.expect( - queryMap['readMask']!.first, - unittest.equals(arg_readMask), + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -47652,33 +52950,23 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListFeaturestoresResponse()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - filter: arg_filter, - orderBy: arg_orderBy, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - readMask: arg_readMask, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListFeaturestoresResponse( - response as api.GoogleCloudAiplatformV1ListFeaturestoresResponse); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--patch', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.featurestores; - final arg_request = buildGoogleCloudAiplatformV1Featurestore(); + final res = api.AiplatformApi(mock) + .projects + .locations + .featureOnlineStores + .operations; final arg_name = 'foo'; - final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1Featurestore.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1Featurestore(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -47710,10 +52998,6 @@ void main() { ); } } - unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -47725,19 +53009,22 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch(arg_request, arg_name, - updateMask: arg_updateMask, $fields: arg_$fields); + final response = await res.get(arg_name, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--searchFeatures', () async { + unittest.test('method--listWait', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.featurestores; - final arg_location = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .featureOnlineStores + .operations; + final arg_name = 'foo'; + final arg_filter = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; - final arg_query = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -47771,6 +53058,10 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); unittest.expect( core.int.parse(queryMap['pageSize']!.first), unittest.equals(arg_pageSize), @@ -47779,10 +53070,6 @@ void main() { queryMap['pageToken']!.first, unittest.equals(arg_pageToken), ); - unittest.expect( - queryMap['query']!.first, - unittest.equals(arg_query), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -47791,30 +53078,30 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1SearchFeaturesResponse()); + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.searchFeatures(arg_location, + final response = await res.listWait(arg_name, + filter: arg_filter, pageSize: arg_pageSize, pageToken: arg_pageToken, - query: arg_query, $fields: arg_$fields); - checkGoogleCloudAiplatformV1SearchFeaturesResponse( - response as api.GoogleCloudAiplatformV1SearchFeaturesResponse); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); - unittest.test('method--setIamPolicy', () async { + unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.featurestores; - final arg_request = buildGoogleIamV1SetIamPolicyRequest(); - final arg_resource = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .featureOnlineStores + .operations; + final arg_name = 'foo'; + final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleIamV1SetIamPolicyRequest.fromJson( - json as core.Map); - checkGoogleIamV1SetIamPolicyRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -47846,6 +53133,10 @@ void main() { ); } } + unittest.expect( + queryMap['timeout']!.first, + unittest.equals(arg_timeout), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -47854,21 +53145,30 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleIamV1Policy()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setIamPolicy(arg_request, arg_resource, - $fields: arg_$fields); - checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); + final response = + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); + }); - unittest.test('method--testIamPermissions', () async { + unittest.group('resource-ProjectsLocationsFeaturestoresResource', () { + unittest.test('method--batchReadFeatureValues', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock).projects.locations.featurestores; - final arg_resource = 'foo'; - final arg_permissions = buildUnnamed390(); + final arg_request = + buildGoogleCloudAiplatformV1BatchReadFeatureValuesRequest(); + final arg_featurestore = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1BatchReadFeatureValuesRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1BatchReadFeatureValuesRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -47900,10 +53200,6 @@ void main() { ); } } - unittest.expect( - queryMap['permissions']!, - unittest.equals(arg_permissions), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -47912,31 +53208,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleIamV1TestIamPermissionsResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.testIamPermissions(arg_resource, - permissions: arg_permissions, $fields: arg_$fields); - checkGoogleIamV1TestIamPermissionsResponse( - response as api.GoogleIamV1TestIamPermissionsResponse); + final response = await res.batchReadFeatureValues( + arg_request, arg_featurestore, + $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - }); - unittest.group('resource-ProjectsLocationsFeaturestoresEntityTypesResource', - () { unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.featurestores.entityTypes; - final arg_request = buildGoogleCloudAiplatformV1EntityType(); + final res = api.AiplatformApi(mock).projects.locations.featurestores; + final arg_request = buildGoogleCloudAiplatformV1Featurestore(); final arg_parent = 'foo'; - final arg_entityTypeId = 'foo'; + final arg_featurestoreId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1EntityType.fromJson( + final obj = api.GoogleCloudAiplatformV1Featurestore.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1EntityType(obj); + checkGoogleCloudAiplatformV1Featurestore(obj); final path = req.url.path; var pathOffset = 0; @@ -47970,8 +53262,8 @@ void main() { } } unittest.expect( - queryMap['entityTypeId']!.first, - unittest.equals(arg_entityTypeId), + queryMap['featurestoreId']!.first, + unittest.equals(arg_featurestoreId), ); unittest.expect( queryMap['fields']!.first, @@ -47985,15 +53277,14 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.create(arg_request, arg_parent, - entityTypeId: arg_entityTypeId, $fields: arg_$fields); + featurestoreId: arg_featurestoreId, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.featurestores.entityTypes; + final res = api.AiplatformApi(mock).projects.locations.featurestores; final arg_name = 'foo'; final arg_force = true; final arg_$fields = 'foo'; @@ -48050,20 +53341,12 @@ void main() { response as api.GoogleLongrunningOperation); }); - unittest.test('method--deleteFeatureValues', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.featurestores.entityTypes; - final arg_request = - buildGoogleCloudAiplatformV1DeleteFeatureValuesRequest(); - final arg_entityType = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.featurestores; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1DeleteFeatureValuesRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1DeleteFeatureValuesRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -48103,30 +53386,22 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1Featurestore()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.deleteFeatureValues( - arg_request, arg_entityType, - $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleCloudAiplatformV1Featurestore( + response as api.GoogleCloudAiplatformV1Featurestore); }); - unittest.test('method--exportFeatureValues', () async { + unittest.test('method--getIamPolicy', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.featurestores.entityTypes; - final arg_request = - buildGoogleCloudAiplatformV1ExportFeatureValuesRequest(); - final arg_entityType = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.featurestores; + final arg_resource = 'foo'; + final arg_options_requestedPolicyVersion = 42; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1ExportFeatureValuesRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1ExportFeatureValuesRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -48158,6 +53433,10 @@ void main() { ); } } + unittest.expect( + core.int.parse(queryMap['options.requestedPolicyVersion']!.first), + unittest.equals(arg_options_requestedPolicyVersion), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -48166,21 +53445,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleIamV1Policy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.exportFeatureValues( - arg_request, arg_entityType, + final response = await res.getIamPolicy(arg_resource, + options_requestedPolicyVersion: arg_options_requestedPolicyVersion, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); }); - unittest.test('method--get', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.featurestores.entityTypes; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.featurestores; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -48214,6 +53496,26 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['readMask']!.first, + unittest.equals(arg_readMask), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -48222,23 +53524,33 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1EntityType()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListFeaturestoresResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1EntityType( - response as api.GoogleCloudAiplatformV1EntityType); + final response = await res.list(arg_parent, + filter: arg_filter, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + readMask: arg_readMask, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ListFeaturestoresResponse( + response as api.GoogleCloudAiplatformV1ListFeaturestoresResponse); }); - unittest.test('method--getIamPolicy', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.featurestores.entityTypes; - final arg_resource = 'foo'; - final arg_options_requestedPolicyVersion = 42; + final res = api.AiplatformApi(mock).projects.locations.featurestores; + final arg_request = buildGoogleCloudAiplatformV1Featurestore(); + final arg_name = 'foo'; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1Featurestore.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1Featurestore(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -48271,8 +53583,8 @@ void main() { } } unittest.expect( - core.int.parse(queryMap['options.requestedPolicyVersion']!.first), - unittest.equals(arg_options_requestedPolicyVersion), + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), ); unittest.expect( queryMap['fields']!.first, @@ -48282,29 +53594,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleIamV1Policy()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.getIamPolicy(arg_resource, - options_requestedPolicyVersion: arg_options_requestedPolicyVersion, - $fields: arg_$fields); - checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--importFeatureValues', () async { + unittest.test('method--searchFeatures', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.featurestores.entityTypes; - final arg_request = - buildGoogleCloudAiplatformV1ImportFeatureValuesRequest(); - final arg_entityType = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.featurestores; + final arg_location = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_query = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1ImportFeatureValuesRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1ImportFeatureValuesRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -48336,6 +53643,18 @@ void main() { ); } } + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['query']!.first, + unittest.equals(arg_query), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -48344,28 +53663,30 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1SearchFeaturesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.importFeatureValues( - arg_request, arg_entityType, + final response = await res.searchFeatures(arg_location, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + query: arg_query, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + checkGoogleCloudAiplatformV1SearchFeaturesResponse( + response as api.GoogleCloudAiplatformV1SearchFeaturesResponse); }); - unittest.test('method--list', () async { + unittest.test('method--setIamPolicy', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.featurestores.entityTypes; - final arg_parent = 'foo'; - final arg_filter = 'foo'; - final arg_orderBy = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; - final arg_readMask = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.featurestores; + final arg_request = buildGoogleIamV1SetIamPolicyRequest(); + final arg_resource = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleIamV1SetIamPolicyRequest.fromJson( + json as core.Map); + checkGoogleIamV1SetIamPolicyRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -48397,26 +53718,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['readMask']!.first, - unittest.equals(arg_readMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -48425,34 +53726,21 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListEntityTypesResponse()); + final resp = convert.json.encode(buildGoogleIamV1Policy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - filter: arg_filter, - orderBy: arg_orderBy, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - readMask: arg_readMask, + final response = await res.setIamPolicy(arg_request, arg_resource, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListEntityTypesResponse( - response as api.GoogleCloudAiplatformV1ListEntityTypesResponse); + checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); }); - unittest.test('method--patch', () async { + unittest.test('method--testIamPermissions', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.featurestores.entityTypes; - final arg_request = buildGoogleCloudAiplatformV1EntityType(); - final arg_name = 'foo'; - final arg_updateMask = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.featurestores; + final arg_resource = 'foo'; + final arg_permissions = buildUnnamed430(); final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1EntityType.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1EntityType(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -48485,8 +53773,8 @@ void main() { } } unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), + queryMap['permissions']!, + unittest.equals(arg_permissions), ); unittest.expect( queryMap['fields']!.first, @@ -48497,28 +53785,30 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = - convert.json.encode(buildGoogleCloudAiplatformV1EntityType()); + convert.json.encode(buildGoogleIamV1TestIamPermissionsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch(arg_request, arg_name, - updateMask: arg_updateMask, $fields: arg_$fields); - checkGoogleCloudAiplatformV1EntityType( - response as api.GoogleCloudAiplatformV1EntityType); + final response = await res.testIamPermissions(arg_resource, + permissions: arg_permissions, $fields: arg_$fields); + checkGoogleIamV1TestIamPermissionsResponse( + response as api.GoogleIamV1TestIamPermissionsResponse); }); + }); - unittest.test('method--readFeatureValues', () async { + unittest.group('resource-ProjectsLocationsFeaturestoresEntityTypesResource', + () { + unittest.test('method--create', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock).projects.locations.featurestores.entityTypes; - final arg_request = - buildGoogleCloudAiplatformV1ReadFeatureValuesRequest(); - final arg_entityType = 'foo'; + final arg_request = buildGoogleCloudAiplatformV1EntityType(); + final arg_parent = 'foo'; + final arg_entityTypeId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1ReadFeatureValuesRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1ReadFeatureValuesRequest(obj); + final obj = api.GoogleCloudAiplatformV1EntityType.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1EntityType(obj); final path = req.url.path; var pathOffset = 0; @@ -48551,6 +53841,10 @@ void main() { ); } } + unittest.expect( + queryMap['entityTypeId']!.first, + unittest.equals(arg_entityTypeId), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -48559,28 +53853,23 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ReadFeatureValuesResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.readFeatureValues(arg_request, arg_entityType, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1ReadFeatureValuesResponse( - response as api.GoogleCloudAiplatformV1ReadFeatureValuesResponse); + final response = await res.create(arg_request, arg_parent, + entityTypeId: arg_entityTypeId, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--setIamPolicy', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock).projects.locations.featurestores.entityTypes; - final arg_request = buildGoogleIamV1SetIamPolicyRequest(); - final arg_resource = 'foo'; + final arg_name = 'foo'; + final arg_force = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleIamV1SetIamPolicyRequest.fromJson( - json as core.Map); - checkGoogleIamV1SetIamPolicyRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -48612,6 +53901,10 @@ void main() { ); } } + unittest.expect( + queryMap['force']!.first, + unittest.equals('$arg_force'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -48620,26 +53913,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleIamV1Policy()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setIamPolicy(arg_request, arg_resource, - $fields: arg_$fields); - checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); + final response = + await res.delete(arg_name, force: arg_force, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--streamingReadFeatureValues', () async { + unittest.test('method--deleteFeatureValues', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock).projects.locations.featurestores.entityTypes; final arg_request = - buildGoogleCloudAiplatformV1StreamingReadFeatureValuesRequest(); + buildGoogleCloudAiplatformV1DeleteFeatureValuesRequest(); final arg_entityType = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1StreamingReadFeatureValuesRequest - .fromJson(json as core.Map); - checkGoogleCloudAiplatformV1StreamingReadFeatureValuesRequest(obj); + final obj = + api.GoogleCloudAiplatformV1DeleteFeatureValuesRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1DeleteFeatureValuesRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -48680,25 +53975,30 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ReadFeatureValuesResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.streamingReadFeatureValues( + final response = await res.deleteFeatureValues( arg_request, arg_entityType, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ReadFeatureValuesResponse( - response as api.GoogleCloudAiplatformV1ReadFeatureValuesResponse); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--testIamPermissions', () async { + unittest.test('method--exportFeatureValues', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock).projects.locations.featurestores.entityTypes; - final arg_resource = 'foo'; - final arg_permissions = buildUnnamed391(); + final arg_request = + buildGoogleCloudAiplatformV1ExportFeatureValuesRequest(); + final arg_entityType = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1ExportFeatureValuesRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1ExportFeatureValuesRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -48730,10 +54030,6 @@ void main() { ); } } - unittest.expect( - queryMap['permissions']!, - unittest.equals(arg_permissions), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -48742,30 +54038,2088 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleIamV1TestIamPermissionsResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.testIamPermissions(arg_resource, - permissions: arg_permissions, $fields: arg_$fields); - checkGoogleIamV1TestIamPermissionsResponse( - response as api.GoogleIamV1TestIamPermissionsResponse); + final response = await res.exportFeatureValues( + arg_request, arg_entityType, + $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--writeFeatureValues', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock).projects.locations.featurestores.entityTypes; - final arg_request = - buildGoogleCloudAiplatformV1WriteFeatureValuesRequest(); - final arg_entityType = 'foo'; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1WriteFeatureValuesRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1WriteFeatureValuesRequest(obj); - + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1EntityType()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleCloudAiplatformV1EntityType( + response as api.GoogleCloudAiplatformV1EntityType); + }); + + unittest.test('method--getIamPolicy', () async { + final mock = HttpServerMock(); + final res = + api.AiplatformApi(mock).projects.locations.featurestores.entityTypes; + final arg_resource = 'foo'; + final arg_options_requestedPolicyVersion = 42; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + core.int.parse(queryMap['options.requestedPolicyVersion']!.first), + unittest.equals(arg_options_requestedPolicyVersion), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleIamV1Policy()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.getIamPolicy(arg_resource, + options_requestedPolicyVersion: arg_options_requestedPolicyVersion, + $fields: arg_$fields); + checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); + }); + + unittest.test('method--importFeatureValues', () async { + final mock = HttpServerMock(); + final res = + api.AiplatformApi(mock).projects.locations.featurestores.entityTypes; + final arg_request = + buildGoogleCloudAiplatformV1ImportFeatureValuesRequest(); + final arg_entityType = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1ImportFeatureValuesRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1ImportFeatureValuesRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.importFeatureValues( + arg_request, arg_entityType, + $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = + api.AiplatformApi(mock).projects.locations.featurestores.entityTypes; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_readMask = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['readMask']!.first, + unittest.equals(arg_readMask), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListEntityTypesResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_parent, + filter: arg_filter, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + readMask: arg_readMask, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ListEntityTypesResponse( + response as api.GoogleCloudAiplatformV1ListEntityTypesResponse); + }); + + unittest.test('method--patch', () async { + final mock = HttpServerMock(); + final res = + api.AiplatformApi(mock).projects.locations.featurestores.entityTypes; + final arg_request = buildGoogleCloudAiplatformV1EntityType(); + final arg_name = 'foo'; + final arg_updateMask = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1EntityType.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1EntityType(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1EntityType()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); + checkGoogleCloudAiplatformV1EntityType( + response as api.GoogleCloudAiplatformV1EntityType); + }); + + unittest.test('method--readFeatureValues', () async { + final mock = HttpServerMock(); + final res = + api.AiplatformApi(mock).projects.locations.featurestores.entityTypes; + final arg_request = + buildGoogleCloudAiplatformV1ReadFeatureValuesRequest(); + final arg_entityType = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1ReadFeatureValuesRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1ReadFeatureValuesRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ReadFeatureValuesResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.readFeatureValues(arg_request, arg_entityType, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ReadFeatureValuesResponse( + response as api.GoogleCloudAiplatformV1ReadFeatureValuesResponse); + }); + + unittest.test('method--setIamPolicy', () async { + final mock = HttpServerMock(); + final res = + api.AiplatformApi(mock).projects.locations.featurestores.entityTypes; + final arg_request = buildGoogleIamV1SetIamPolicyRequest(); + final arg_resource = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleIamV1SetIamPolicyRequest.fromJson( + json as core.Map); + checkGoogleIamV1SetIamPolicyRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleIamV1Policy()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.setIamPolicy(arg_request, arg_resource, + $fields: arg_$fields); + checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); + }); + + unittest.test('method--streamingReadFeatureValues', () async { + final mock = HttpServerMock(); + final res = + api.AiplatformApi(mock).projects.locations.featurestores.entityTypes; + final arg_request = + buildGoogleCloudAiplatformV1StreamingReadFeatureValuesRequest(); + final arg_entityType = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1StreamingReadFeatureValuesRequest + .fromJson(json as core.Map); + checkGoogleCloudAiplatformV1StreamingReadFeatureValuesRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ReadFeatureValuesResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.streamingReadFeatureValues( + arg_request, arg_entityType, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ReadFeatureValuesResponse( + response as api.GoogleCloudAiplatformV1ReadFeatureValuesResponse); + }); + + unittest.test('method--testIamPermissions', () async { + final mock = HttpServerMock(); + final res = + api.AiplatformApi(mock).projects.locations.featurestores.entityTypes; + final arg_resource = 'foo'; + final arg_permissions = buildUnnamed431(); + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['permissions']!, + unittest.equals(arg_permissions), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = + convert.json.encode(buildGoogleIamV1TestIamPermissionsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.testIamPermissions(arg_resource, + permissions: arg_permissions, $fields: arg_$fields); + checkGoogleIamV1TestIamPermissionsResponse( + response as api.GoogleIamV1TestIamPermissionsResponse); + }); + + unittest.test('method--writeFeatureValues', () async { + final mock = HttpServerMock(); + final res = + api.AiplatformApi(mock).projects.locations.featurestores.entityTypes; + final arg_request = + buildGoogleCloudAiplatformV1WriteFeatureValuesRequest(); + final arg_entityType = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1WriteFeatureValuesRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1WriteFeatureValuesRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1WriteFeatureValuesResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.writeFeatureValues(arg_request, arg_entityType, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1WriteFeatureValuesResponse( + response as api.GoogleCloudAiplatformV1WriteFeatureValuesResponse); + }); + }); + + unittest.group( + 'resource-ProjectsLocationsFeaturestoresEntityTypesFeaturesResource', () { + unittest.test('method--batchCreate', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock) + .projects + .locations + .featurestores + .entityTypes + .features; + final arg_request = + buildGoogleCloudAiplatformV1BatchCreateFeaturesRequest(); + final arg_parent = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1BatchCreateFeaturesRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1BatchCreateFeaturesRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.batchCreate(arg_request, arg_parent, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--create', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock) + .projects + .locations + .featurestores + .entityTypes + .features; + final arg_request = buildGoogleCloudAiplatformV1Feature(); + final arg_parent = 'foo'; + final arg_featureId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1Feature.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1Feature(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['featureId']!.first, + unittest.equals(arg_featureId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.create(arg_request, arg_parent, + featureId: arg_featureId, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock) + .projects + .locations + .featurestores + .entityTypes + .features; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock) + .projects + .locations + .featurestores + .entityTypes + .features; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleCloudAiplatformV1Feature()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleCloudAiplatformV1Feature( + response as api.GoogleCloudAiplatformV1Feature); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock) + .projects + .locations + .featurestores + .entityTypes + .features; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_latestStatsCount = 42; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_readMask = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['latestStatsCount']!.first), + unittest.equals(arg_latestStatsCount), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['readMask']!.first, + unittest.equals(arg_readMask), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListFeaturesResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_parent, + filter: arg_filter, + latestStatsCount: arg_latestStatsCount, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + readMask: arg_readMask, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ListFeaturesResponse( + response as api.GoogleCloudAiplatformV1ListFeaturesResponse); + }); + + unittest.test('method--patch', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock) + .projects + .locations + .featurestores + .entityTypes + .features; + final arg_request = buildGoogleCloudAiplatformV1Feature(); + final arg_name = 'foo'; + final arg_updateMask = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1Feature.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1Feature(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleCloudAiplatformV1Feature()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); + checkGoogleCloudAiplatformV1Feature( + response as api.GoogleCloudAiplatformV1Feature); + }); + }); + + unittest.group( + 'resource-ProjectsLocationsFeaturestoresEntityTypesFeaturesOperationsResource', + () { + unittest.test('method--cancel', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock) + .projects + .locations + .featurestores + .entityTypes + .features + .operations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleProtobufEmpty()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.cancel(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + }); + + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock) + .projects + .locations + .featurestores + .entityTypes + .features + .operations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleProtobufEmpty()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock) + .projects + .locations + .featurestores + .entityTypes + .features + .operations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock) + .projects + .locations + .featurestores + .entityTypes + .features + .operations; + final arg_name = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_name, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); + }); + + unittest.test('method--wait', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock) + .projects + .locations + .featurestores + .entityTypes + .features + .operations; + final arg_name = 'foo'; + final arg_timeout = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['timeout']!.first, + unittest.equals(arg_timeout), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + }); + + unittest.group( + 'resource-ProjectsLocationsFeaturestoresEntityTypesOperationsResource', + () { + unittest.test('method--cancel', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock) + .projects + .locations + .featurestores + .entityTypes + .operations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleProtobufEmpty()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.cancel(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + }); + + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock) + .projects + .locations + .featurestores + .entityTypes + .operations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleProtobufEmpty()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock) + .projects + .locations + .featurestores + .entityTypes + .operations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock) + .projects + .locations + .featurestores + .entityTypes + .operations; + final arg_name = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_name, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); + }); + + unittest.test('method--wait', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock) + .projects + .locations + .featurestores + .entityTypes + .operations; + final arg_name = 'foo'; + final arg_timeout = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['timeout']!.first, + unittest.equals(arg_timeout), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + }); + + unittest.group('resource-ProjectsLocationsFeaturestoresOperationsResource', + () { + unittest.test('method--cancel', () async { + final mock = HttpServerMock(); + final res = + api.AiplatformApi(mock).projects.locations.featurestores.operations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleProtobufEmpty()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.cancel(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + }); + + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = + api.AiplatformApi(mock).projects.locations.featurestores.operations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleProtobufEmpty()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = + api.AiplatformApi(mock).projects.locations.featurestores.operations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = + api.AiplatformApi(mock).projects.locations.featurestores.operations; + final arg_name = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_name, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); + }); + + unittest.test('method--wait', () async { + final mock = HttpServerMock(); + final res = + api.AiplatformApi(mock).projects.locations.featurestores.operations; + final arg_name = 'foo'; + final arg_timeout = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['timeout']!.first, + unittest.equals(arg_timeout), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + }); + + unittest.group('resource-ProjectsLocationsHyperparameterTuningJobsResource', + () { + unittest.test('method--cancel', () async { + final mock = HttpServerMock(); + final res = + api.AiplatformApi(mock).projects.locations.hyperparameterTuningJobs; + final arg_request = + buildGoogleCloudAiplatformV1CancelHyperparameterTuningJobRequest(); + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1CancelHyperparameterTuningJobRequest + .fromJson(json as core.Map); + checkGoogleCloudAiplatformV1CancelHyperparameterTuningJobRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleProtobufEmpty()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.cancel(arg_request, arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + }); + + unittest.test('method--create', () async { + final mock = HttpServerMock(); + final res = + api.AiplatformApi(mock).projects.locations.hyperparameterTuningJobs; + final arg_request = buildGoogleCloudAiplatformV1HyperparameterTuningJob(); + final arg_parent = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1HyperparameterTuningJob.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1HyperparameterTuningJob(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -48806,36 +56160,330 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = convert.json - .encode(buildGoogleCloudAiplatformV1WriteFeatureValuesResponse()); + .encode(buildGoogleCloudAiplatformV1HyperparameterTuningJob()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.create(arg_request, arg_parent, $fields: arg_$fields); + checkGoogleCloudAiplatformV1HyperparameterTuningJob( + response as api.GoogleCloudAiplatformV1HyperparameterTuningJob); + }); + + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = + api.AiplatformApi(mock).projects.locations.hyperparameterTuningJobs; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = + api.AiplatformApi(mock).projects.locations.hyperparameterTuningJobs; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1HyperparameterTuningJob()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleCloudAiplatformV1HyperparameterTuningJob( + response as api.GoogleCloudAiplatformV1HyperparameterTuningJob); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = + api.AiplatformApi(mock).projects.locations.hyperparameterTuningJobs; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_readMask = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['readMask']!.first, + unittest.equals(arg_readMask), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode( + buildGoogleCloudAiplatformV1ListHyperparameterTuningJobsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_parent, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + readMask: arg_readMask, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ListHyperparameterTuningJobsResponse(response + as api.GoogleCloudAiplatformV1ListHyperparameterTuningJobsResponse); + }); + }); + + unittest.group( + 'resource-ProjectsLocationsHyperparameterTuningJobsOperationsResource', + () { + unittest.test('method--cancel', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock) + .projects + .locations + .hyperparameterTuningJobs + .operations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleProtobufEmpty()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.cancel(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + }); + + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock) + .projects + .locations + .hyperparameterTuningJobs + .operations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.writeFeatureValues(arg_request, arg_entityType, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1WriteFeatureValuesResponse( - response as api.GoogleCloudAiplatformV1WriteFeatureValuesResponse); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - }); - unittest.group( - 'resource-ProjectsLocationsFeaturestoresEntityTypesFeaturesResource', () { - unittest.test('method--batchCreate', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock) .projects .locations - .featurestores - .entityTypes - .features; - final arg_request = - buildGoogleCloudAiplatformV1BatchCreateFeaturesRequest(); - final arg_parent = 'foo'; + .hyperparameterTuningJobs + .operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1BatchCreateFeaturesRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1BatchCreateFeaturesRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -48878,29 +56526,99 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.batchCreate(arg_request, arg_parent, $fields: arg_$fields); + final response = await res.get(arg_name, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--create', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock) .projects .locations - .featurestores - .entityTypes - .features; - final arg_request = buildGoogleCloudAiplatformV1Feature(); - final arg_parent = 'foo'; - final arg_featureId = 'foo'; + .hyperparameterTuningJobs + .operations; + final arg_name = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1Feature.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1Feature(obj); + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_name, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); + }); + unittest.test('method--wait', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock) + .projects + .locations + .hyperparameterTuningJobs + .operations; + final arg_name = 'foo'; + final arg_timeout = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; var pathOffset = 0; core.int index; @@ -48933,8 +56651,8 @@ void main() { } } unittest.expect( - queryMap['featureId']!.first, - unittest.equals(arg_featureId), + queryMap['timeout']!.first, + unittest.equals(arg_timeout), ); unittest.expect( queryMap['fields']!.first, @@ -48947,20 +56665,76 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.create(arg_request, arg_parent, - featureId: arg_featureId, $fields: arg_$fields); + final response = + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + }); + + unittest.group('resource-ProjectsLocationsIndexEndpointsResource', () { + unittest.test('method--create', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock).projects.locations.indexEndpoints; + final arg_request = buildGoogleCloudAiplatformV1IndexEndpoint(); + final arg_parent = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1IndexEndpoint.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1IndexEndpoint(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.create(arg_request, arg_parent, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featurestores - .entityTypes - .features; + final res = api.AiplatformApi(mock).projects.locations.indexEndpoints; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -49011,14 +56785,128 @@ void main() { response as api.GoogleLongrunningOperation); }); + unittest.test('method--deployIndex', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock).projects.locations.indexEndpoints; + final arg_request = buildGoogleCloudAiplatformV1DeployIndexRequest(); + final arg_indexEndpoint = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1DeployIndexRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1DeployIndexRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.deployIndex(arg_request, arg_indexEndpoint, + $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--findNeighbors', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock).projects.locations.indexEndpoints; + final arg_request = buildGoogleCloudAiplatformV1FindNeighborsRequest(); + final arg_indexEndpoint = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1FindNeighborsRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1FindNeighborsRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1FindNeighborsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.findNeighbors(arg_request, arg_indexEndpoint, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1FindNeighborsResponse( + response as api.GoogleCloudAiplatformV1FindNeighborsResponse); + }); + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featurestores - .entityTypes - .features; + final res = api.AiplatformApi(mock).projects.locations.indexEndpoints; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -49061,26 +56949,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleCloudAiplatformV1Feature()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1IndexEndpoint()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1Feature( - response as api.GoogleCloudAiplatformV1Feature); + checkGoogleCloudAiplatformV1IndexEndpoint( + response as api.GoogleCloudAiplatformV1IndexEndpoint); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featurestores - .entityTypes - .features; + final res = api.AiplatformApi(mock).projects.locations.indexEndpoints; final arg_parent = 'foo'; final arg_filter = 'foo'; - final arg_latestStatsCount = 42; - final arg_orderBy = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; final arg_readMask = 'foo'; @@ -49118,29 +57000,212 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['latestStatsCount']!.first), - unittest.equals(arg_latestStatsCount), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['readMask']!.first, + unittest.equals(arg_readMask), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListIndexEndpointsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_parent, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + readMask: arg_readMask, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ListIndexEndpointsResponse( + response as api.GoogleCloudAiplatformV1ListIndexEndpointsResponse); + }); + + unittest.test('method--mutateDeployedIndex', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock).projects.locations.indexEndpoints; + final arg_request = buildGoogleCloudAiplatformV1DeployedIndex(); + final arg_indexEndpoint = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1DeployedIndex.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1DeployedIndex(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.mutateDeployedIndex( + arg_request, arg_indexEndpoint, + $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--patch', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock).projects.locations.indexEndpoints; + final arg_request = buildGoogleCloudAiplatformV1IndexEndpoint(); + final arg_name = 'foo'; + final arg_updateMask = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1IndexEndpoint.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1IndexEndpoint(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), ); unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), + queryMap['fields']!.first, + unittest.equals(arg_$fields), ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1IndexEndpoint()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); + checkGoogleCloudAiplatformV1IndexEndpoint( + response as api.GoogleCloudAiplatformV1IndexEndpoint); + }); + + unittest.test('method--readIndexDatapoints', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock).projects.locations.indexEndpoints; + final arg_request = + buildGoogleCloudAiplatformV1ReadIndexDatapointsRequest(); + final arg_indexEndpoint = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1ReadIndexDatapointsRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1ReadIndexDatapointsRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), ); + pathOffset += 1; unittest.expect( - queryMap['readMask']!.first, - unittest.equals(arg_readMask), + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -49150,37 +57215,26 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListFeaturesResponse()); + .encode(buildGoogleCloudAiplatformV1ReadIndexDatapointsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - filter: arg_filter, - latestStatsCount: arg_latestStatsCount, - orderBy: arg_orderBy, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - readMask: arg_readMask, + final response = await res.readIndexDatapoints( + arg_request, arg_indexEndpoint, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListFeaturesResponse( - response as api.GoogleCloudAiplatformV1ListFeaturesResponse); + checkGoogleCloudAiplatformV1ReadIndexDatapointsResponse( + response as api.GoogleCloudAiplatformV1ReadIndexDatapointsResponse); }); - unittest.test('method--patch', () async { + unittest.test('method--undeployIndex', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featurestores - .entityTypes - .features; - final arg_request = buildGoogleCloudAiplatformV1Feature(); - final arg_name = 'foo'; - final arg_updateMask = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.indexEndpoints; + final arg_request = buildGoogleCloudAiplatformV1UndeployIndexRequest(); + final arg_indexEndpoint = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1Feature.fromJson( + final obj = api.GoogleCloudAiplatformV1UndeployIndexRequest.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1Feature(obj); + checkGoogleCloudAiplatformV1UndeployIndexRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -49213,10 +57267,6 @@ void main() { ); } } - unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -49225,28 +57275,22 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleCloudAiplatformV1Feature()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch(arg_request, arg_name, - updateMask: arg_updateMask, $fields: arg_$fields); - checkGoogleCloudAiplatformV1Feature( - response as api.GoogleCloudAiplatformV1Feature); + final response = await res.undeployIndex(arg_request, arg_indexEndpoint, + $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); }); - unittest.group( - 'resource-ProjectsLocationsFeaturestoresEntityTypesFeaturesOperationsResource', + unittest.group('resource-ProjectsLocationsIndexEndpointsOperationsResource', () { unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featurestores - .entityTypes - .features - .operations; + final res = + api.AiplatformApi(mock).projects.locations.indexEndpoints.operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -49298,13 +57342,8 @@ void main() { unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featurestores - .entityTypes - .features - .operations; + final res = + api.AiplatformApi(mock).projects.locations.indexEndpoints.operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -49356,13 +57395,8 @@ void main() { unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featurestores - .entityTypes - .features - .operations; + final res = + api.AiplatformApi(mock).projects.locations.indexEndpoints.operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -49415,13 +57449,8 @@ void main() { unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featurestores - .entityTypes - .features - .operations; + final res = + api.AiplatformApi(mock).projects.locations.indexEndpoints.operations; final arg_name = 'foo'; final arg_filter = 'foo'; final arg_pageSize = 42; @@ -49494,13 +57523,8 @@ void main() { unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featurestores - .entityTypes - .features - .operations; + final res = + api.AiplatformApi(mock).projects.locations.indexEndpoints.operations; final arg_name = 'foo'; final arg_timeout = 'foo'; final arg_$fields = 'foo'; @@ -49558,20 +57582,18 @@ void main() { }); }); - unittest.group( - 'resource-ProjectsLocationsFeaturestoresEntityTypesOperationsResource', - () { - unittest.test('method--cancel', () async { + unittest.group('resource-ProjectsLocationsIndexesResource', () { + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featurestores - .entityTypes - .operations; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.indexes; + final arg_request = buildGoogleCloudAiplatformV1Index(); + final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1Index.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1Index(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -49611,21 +57633,71 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.cancel(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = + await res.create(arg_request, arg_parent, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featurestores - .entityTypes - .operations; + final res = api.AiplatformApi(mock).projects.locations.indexes; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock).projects.locations.indexes; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -49668,22 +57740,22 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleCloudAiplatformV1Index()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleCloudAiplatformV1Index( + response as api.GoogleCloudAiplatformV1Index); }); - unittest.test('method--get', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featurestores - .entityTypes - .operations; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.indexes; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -49717,6 +57789,22 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['readMask']!.first, + unittest.equals(arg_readMask), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -49725,28 +57813,32 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListIndexesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.list(arg_parent, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + readMask: arg_readMask, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ListIndexesResponse( + response as api.GoogleCloudAiplatformV1ListIndexesResponse); }); - unittest.test('method--list', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featurestores - .entityTypes - .operations; + final res = api.AiplatformApi(mock).projects.locations.indexes; + final arg_request = buildGoogleCloudAiplatformV1Index(); final arg_name = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1Index.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1Index(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -49779,16 +57871,8 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), ); unittest.expect( queryMap['fields']!.first, @@ -49798,31 +57882,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_name, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--wait', () async { + unittest.test('method--removeDatapoints', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .featurestores - .entityTypes - .operations; - final arg_name = 'foo'; - final arg_timeout = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.indexes; + final arg_request = buildGoogleCloudAiplatformV1RemoveDatapointsRequest(); + final arg_index = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1RemoveDatapointsRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1RemoveDatapointsRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -49854,10 +57933,6 @@ void main() { ); } } - unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -49866,25 +57941,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1RemoveDatapointsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.removeDatapoints(arg_request, arg_index, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1RemoveDatapointsResponse( + response as api.GoogleCloudAiplatformV1RemoveDatapointsResponse); }); - }); - unittest.group('resource-ProjectsLocationsFeaturestoresOperationsResource', - () { - unittest.test('method--cancel', () async { + unittest.test('method--upsertDatapoints', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.featurestores.operations; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.indexes; + final arg_request = buildGoogleCloudAiplatformV1UpsertDatapointsRequest(); + final arg_index = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1UpsertDatapointsRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1UpsertDatapointsRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -49924,17 +58001,21 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1UpsertDatapointsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.cancel(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = await res.upsertDatapoints(arg_request, arg_index, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1UpsertDatapointsResponse( + response as api.GoogleCloudAiplatformV1UpsertDatapointsResponse); }); + }); - unittest.test('method--delete', () async { + unittest.group('resource-ProjectsLocationsIndexesOperationsResource', () { + unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.featurestores.operations; + final res = api.AiplatformApi(mock).projects.locations.indexes.operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -49980,14 +58061,13 @@ void main() { final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); + final response = await res.cancel(arg_name, $fields: arg_$fields); checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--get', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.featurestores.operations; + final res = api.AiplatformApi(mock).projects.locations.indexes.operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -50030,22 +58110,17 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--list', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.featurestores.operations; + final res = api.AiplatformApi(mock).projects.locations.indexes.operations; final arg_name = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -50079,18 +58154,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -50099,25 +58162,21 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_name, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--wait', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.featurestores.operations; + final res = api.AiplatformApi(mock).projects.locations.indexes.operations; final arg_name = 'foo'; - final arg_timeout = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -50152,8 +58211,16 @@ void main() { } } unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), ); unittest.expect( queryMap['fields']!.first, @@ -50163,32 +58230,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.list(arg_name, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); - }); - unittest.group('resource-ProjectsLocationsHyperparameterTuningJobsResource', - () { - unittest.test('method--cancel', () async { + unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.hyperparameterTuningJobs; - final arg_request = - buildGoogleCloudAiplatformV1CancelHyperparameterTuningJobRequest(); + final res = api.AiplatformApi(mock).projects.locations.indexes.operations; final arg_name = 'foo'; + final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1CancelHyperparameterTuningJobRequest - .fromJson(json as core.Map); - checkGoogleCloudAiplatformV1CancelHyperparameterTuningJobRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -50220,6 +58281,10 @@ void main() { ); } } + unittest.expect( + queryMap['timeout']!.first, + unittest.equals(arg_timeout), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -50228,25 +58293,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = - await res.cancel(arg_request, arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); + }); + unittest.group('resource-ProjectsLocationsMetadataStoresResource', () { unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.hyperparameterTuningJobs; - final arg_request = buildGoogleCloudAiplatformV1HyperparameterTuningJob(); + final res = api.AiplatformApi(mock).projects.locations.metadataStores; + final arg_request = buildGoogleCloudAiplatformV1MetadataStore(); final arg_parent = 'foo'; + final arg_metadataStoreId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1HyperparameterTuningJob.fromJson( + final obj = api.GoogleCloudAiplatformV1MetadataStore.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1HyperparameterTuningJob(obj); + checkGoogleCloudAiplatformV1MetadataStore(obj); final path = req.url.path; var pathOffset = 0; @@ -50279,6 +58347,10 @@ void main() { ); } } + unittest.expect( + queryMap['metadataStoreId']!.first, + unittest.equals(arg_metadataStoreId), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -50287,21 +58359,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1HyperparameterTuningJob()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.create(arg_request, arg_parent, $fields: arg_$fields); - checkGoogleCloudAiplatformV1HyperparameterTuningJob( - response as api.GoogleCloudAiplatformV1HyperparameterTuningJob); + final response = await res.create(arg_request, arg_parent, + metadataStoreId: arg_metadataStoreId, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.hyperparameterTuningJobs; + final res = api.AiplatformApi(mock).projects.locations.metadataStores; final arg_name = 'foo'; + final arg_force = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -50335,6 +58406,10 @@ void main() { ); } } + unittest.expect( + queryMap['force']!.first, + unittest.equals('$arg_force'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -50346,15 +58421,15 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); + final response = + await res.delete(arg_name, force: arg_force, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.hyperparameterTuningJobs; + final res = api.AiplatformApi(mock).projects.locations.metadataStores; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -50397,24 +58472,21 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1HyperparameterTuningJob()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1MetadataStore()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1HyperparameterTuningJob( - response as api.GoogleCloudAiplatformV1HyperparameterTuningJob); + checkGoogleCloudAiplatformV1MetadataStore( + response as api.GoogleCloudAiplatformV1MetadataStore); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.hyperparameterTuningJobs; + final res = api.AiplatformApi(mock).projects.locations.metadataStores; final arg_parent = 'foo'; - final arg_filter = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; - final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -50448,10 +58520,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); unittest.expect( core.int.parse(queryMap['pageSize']!.first), unittest.equals(arg_pageSize), @@ -50460,10 +58528,6 @@ void main() { queryMap['pageToken']!.first, unittest.equals(arg_pageToken), ); - unittest.expect( - queryMap['readMask']!.first, - unittest.equals(arg_readMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -50472,34 +58536,34 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode( - buildGoogleCloudAiplatformV1ListHyperparameterTuningJobsResponse()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListMetadataStoresResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.list(arg_parent, - filter: arg_filter, pageSize: arg_pageSize, pageToken: arg_pageToken, - readMask: arg_readMask, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListHyperparameterTuningJobsResponse(response - as api.GoogleCloudAiplatformV1ListHyperparameterTuningJobsResponse); + checkGoogleCloudAiplatformV1ListMetadataStoresResponse( + response as api.GoogleCloudAiplatformV1ListMetadataStoresResponse); }); }); - unittest.group( - 'resource-ProjectsLocationsHyperparameterTuningJobsOperationsResource', + unittest.group('resource-ProjectsLocationsMetadataStoresArtifactsResource', () { - unittest.test('method--cancel', () async { + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .hyperparameterTuningJobs - .operations; - final arg_name = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.artifacts; + final arg_request = buildGoogleCloudAiplatformV1Artifact(); + final arg_parent = 'foo'; + final arg_artifactId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1Artifact.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1Artifact(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -50531,6 +58595,10 @@ void main() { ); } } + unittest.expect( + queryMap['artifactId']!.first, + unittest.equals(arg_artifactId), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -50539,21 +58607,22 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1Artifact()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.cancel(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = await res.create(arg_request, arg_parent, + artifactId: arg_artifactId, $fields: arg_$fields); + checkGoogleCloudAiplatformV1Artifact( + response as api.GoogleCloudAiplatformV1Artifact); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .hyperparameterTuningJobs - .operations; + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.artifacts; final arg_name = 'foo'; + final arg_etag = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -50587,6 +58656,10 @@ void main() { ); } } + unittest.expect( + queryMap['etag']!.first, + unittest.equals(arg_etag), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -50595,20 +58668,19 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = + await res.delete(arg_name, etag: arg_etag, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .hyperparameterTuningJobs - .operations; + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.artifacts; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -50651,23 +58723,22 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1Artifact()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + checkGoogleCloudAiplatformV1Artifact( + response as api.GoogleCloudAiplatformV1Artifact); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .hyperparameterTuningJobs - .operations; - final arg_name = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.artifacts; + final arg_parent = 'foo'; final arg_filter = 'foo'; + final arg_orderBy = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; final arg_$fields = 'foo'; @@ -50707,6 +58778,10 @@ void main() { queryMap['filter']!.first, unittest.equals(arg_filter), ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); unittest.expect( core.int.parse(queryMap['pageSize']!.first), unittest.equals(arg_pageSize), @@ -50723,30 +58798,34 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListArtifactsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_name, + final response = await res.list(arg_parent, filter: arg_filter, + orderBy: arg_orderBy, pageSize: arg_pageSize, pageToken: arg_pageToken, $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + checkGoogleCloudAiplatformV1ListArtifactsResponse( + response as api.GoogleCloudAiplatformV1ListArtifactsResponse); }); - unittest.test('method--wait', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .hyperparameterTuningJobs - .operations; + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.artifacts; + final arg_request = buildGoogleCloudAiplatformV1Artifact(); final arg_name = 'foo'; - final arg_timeout = 'foo'; + final arg_allowMissing = true; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1Artifact.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1Artifact(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -50779,8 +58858,12 @@ void main() { } } unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), + queryMap['allowMissing']!.first, + unittest.equals('$arg_allowMissing'), + ); + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), ); unittest.expect( queryMap['fields']!.first, @@ -50790,27 +58873,29 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1Artifact()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.patch(arg_request, arg_name, + allowMissing: arg_allowMissing, + updateMask: arg_updateMask, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1Artifact( + response as api.GoogleCloudAiplatformV1Artifact); }); - }); - unittest.group('resource-ProjectsLocationsIndexEndpointsResource', () { - unittest.test('method--create', () async { + unittest.test('method--purge', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.indexEndpoints; - final arg_request = buildGoogleCloudAiplatformV1IndexEndpoint(); + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.artifacts; + final arg_request = buildGoogleCloudAiplatformV1PurgeArtifactsRequest(); final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1IndexEndpoint.fromJson( + final obj = api.GoogleCloudAiplatformV1PurgeArtifactsRequest.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1IndexEndpoint(obj); + checkGoogleCloudAiplatformV1PurgeArtifactsRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -50855,15 +58940,18 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = - await res.create(arg_request, arg_parent, $fields: arg_$fields); + await res.purge(arg_request, arg_parent, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--delete', () async { + unittest.test('method--queryArtifactLineageSubgraph', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.indexEndpoints; - final arg_name = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.artifacts; + final arg_artifact = 'foo'; + final arg_filter = 'foo'; + final arg_maxHops = 42; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -50897,6 +58985,14 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['maxHops']!.first), + unittest.equals(arg_maxHops), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -50905,25 +59001,31 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1LineageSubgraph()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.queryArtifactLineageSubgraph(arg_artifact, + filter: arg_filter, maxHops: arg_maxHops, $fields: arg_$fields); + checkGoogleCloudAiplatformV1LineageSubgraph( + response as api.GoogleCloudAiplatformV1LineageSubgraph); }); + }); - unittest.test('method--deployIndex', () async { + unittest.group( + 'resource-ProjectsLocationsMetadataStoresArtifactsOperationsResource', + () { + unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.indexEndpoints; - final arg_request = buildGoogleCloudAiplatformV1DeployIndexRequest(); - final arg_indexEndpoint = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .metadataStores + .artifacts + .operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1DeployIndexRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1DeployIndexRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -50963,26 +59065,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.deployIndex(arg_request, arg_indexEndpoint, - $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.cancel(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--findNeighbors', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.indexEndpoints; - final arg_request = buildGoogleCloudAiplatformV1FindNeighborsRequest(); - final arg_indexEndpoint = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .metadataStores + .artifacts + .operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1FindNeighborsRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1FindNeighborsRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -51022,19 +59122,21 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1FindNeighborsResponse()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.findNeighbors(arg_request, arg_indexEndpoint, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1FindNeighborsResponse( - response as api.GoogleCloudAiplatformV1FindNeighborsResponse); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.indexEndpoints; + final res = api.AiplatformApi(mock) + .projects + .locations + .metadataStores + .artifacts + .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -51077,23 +59179,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1IndexEndpoint()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1IndexEndpoint( - response as api.GoogleCloudAiplatformV1IndexEndpoint); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.indexEndpoints; - final arg_parent = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .metadataStores + .artifacts + .operations; + final arg_name = 'foo'; final arg_filter = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; - final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -51139,10 +59244,6 @@ void main() { queryMap['pageToken']!.first, unittest.equals(arg_pageToken), ); - unittest.expect( - queryMap['readMask']!.first, - unittest.equals(arg_readMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -51151,31 +59252,31 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListIndexEndpointsResponse()); + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, + final response = await res.list(arg_name, filter: arg_filter, pageSize: arg_pageSize, pageToken: arg_pageToken, - readMask: arg_readMask, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListIndexEndpointsResponse( - response as api.GoogleCloudAiplatformV1ListIndexEndpointsResponse); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); - unittest.test('method--mutateDeployedIndex', () async { + unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.indexEndpoints; - final arg_request = buildGoogleCloudAiplatformV1DeployedIndex(); - final arg_indexEndpoint = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .metadataStores + .artifacts + .operations; + final arg_name = 'foo'; + final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1DeployedIndex.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1DeployedIndex(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -51207,6 +59308,10 @@ void main() { ); } } + unittest.expect( + queryMap['timeout']!.first, + unittest.equals(arg_timeout), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -51218,24 +59323,29 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.mutateDeployedIndex( - arg_request, arg_indexEndpoint, - $fields: arg_$fields); + final response = + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); + }); - unittest.test('method--patch', () async { + unittest.group('resource-ProjectsLocationsMetadataStoresContextsResource', + () { + unittest.test('method--addContextArtifactsAndExecutions', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.indexEndpoints; - final arg_request = buildGoogleCloudAiplatformV1IndexEndpoint(); - final arg_name = 'foo'; - final arg_updateMask = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.contexts; + final arg_request = + buildGoogleCloudAiplatformV1AddContextArtifactsAndExecutionsRequest(); + final arg_context = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1IndexEndpoint.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1IndexEndpoint(obj); + final obj = + api.GoogleCloudAiplatformV1AddContextArtifactsAndExecutionsRequest + .fromJson(json as core.Map); + checkGoogleCloudAiplatformV1AddContextArtifactsAndExecutionsRequest( + obj); final path = req.url.path; var pathOffset = 0; @@ -51268,10 +59378,6 @@ void main() { ); } } - unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -51280,28 +59386,31 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1IndexEndpoint()); + final resp = convert.json.encode( + buildGoogleCloudAiplatformV1AddContextArtifactsAndExecutionsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch(arg_request, arg_name, - updateMask: arg_updateMask, $fields: arg_$fields); - checkGoogleCloudAiplatformV1IndexEndpoint( - response as api.GoogleCloudAiplatformV1IndexEndpoint); + final response = await res.addContextArtifactsAndExecutions( + arg_request, arg_context, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1AddContextArtifactsAndExecutionsResponse( + response as api + .GoogleCloudAiplatformV1AddContextArtifactsAndExecutionsResponse); }); - unittest.test('method--readIndexDatapoints', () async { + unittest.test('method--addContextChildren', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.indexEndpoints; + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.contexts; final arg_request = - buildGoogleCloudAiplatformV1ReadIndexDatapointsRequest(); - final arg_indexEndpoint = 'foo'; + buildGoogleCloudAiplatformV1AddContextChildrenRequest(); + final arg_context = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final obj = - api.GoogleCloudAiplatformV1ReadIndexDatapointsRequest.fromJson( + api.GoogleCloudAiplatformV1AddContextChildrenRequest.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1ReadIndexDatapointsRequest(obj); + checkGoogleCloudAiplatformV1AddContextChildrenRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -51343,26 +59452,27 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ReadIndexDatapointsResponse()); + .encode(buildGoogleCloudAiplatformV1AddContextChildrenResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.readIndexDatapoints( - arg_request, arg_indexEndpoint, + final response = await res.addContextChildren(arg_request, arg_context, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ReadIndexDatapointsResponse( - response as api.GoogleCloudAiplatformV1ReadIndexDatapointsResponse); + checkGoogleCloudAiplatformV1AddContextChildrenResponse( + response as api.GoogleCloudAiplatformV1AddContextChildrenResponse); }); - unittest.test('method--undeployIndex', () async { + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.indexEndpoints; - final arg_request = buildGoogleCloudAiplatformV1UndeployIndexRequest(); - final arg_indexEndpoint = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.contexts; + final arg_request = buildGoogleCloudAiplatformV1Context(); + final arg_parent = 'foo'; + final arg_contextId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1UndeployIndexRequest.fromJson( + final obj = api.GoogleCloudAiplatformV1Context.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1UndeployIndexRequest(obj); + checkGoogleCloudAiplatformV1Context(obj); final path = req.url.path; var pathOffset = 0; @@ -51395,6 +59505,10 @@ void main() { ); } } + unittest.expect( + queryMap['contextId']!.first, + unittest.equals(arg_contextId), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -51403,23 +59517,22 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleCloudAiplatformV1Context()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.undeployIndex(arg_request, arg_indexEndpoint, - $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.create(arg_request, arg_parent, + contextId: arg_contextId, $fields: arg_$fields); + checkGoogleCloudAiplatformV1Context( + response as api.GoogleCloudAiplatformV1Context); }); - }); - unittest.group('resource-ProjectsLocationsIndexEndpointsOperationsResource', - () { - unittest.test('method--cancel', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.indexEndpoints.operations; + api.AiplatformApi(mock).projects.locations.metadataStores.contexts; final arg_name = 'foo'; + final arg_etag = 'foo'; + final arg_force = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -51453,6 +59566,14 @@ void main() { ); } } + unittest.expect( + queryMap['etag']!.first, + unittest.equals(arg_etag), + ); + unittest.expect( + queryMap['force']!.first, + unittest.equals('$arg_force'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -51461,17 +59582,19 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.cancel(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = await res.delete(arg_name, + etag: arg_etag, force: arg_force, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--delete', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.indexEndpoints.operations; + api.AiplatformApi(mock).projects.locations.metadataStores.contexts; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -51514,18 +59637,23 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleCloudAiplatformV1Context()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleCloudAiplatformV1Context( + response as api.GoogleCloudAiplatformV1Context); }); - unittest.test('method--get', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.indexEndpoints.operations; - final arg_name = 'foo'; + api.AiplatformApi(mock).projects.locations.metadataStores.contexts; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -51559,6 +59687,22 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -51567,24 +59711,34 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListContextsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.list(arg_parent, + filter: arg_filter, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ListContextsResponse( + response as api.GoogleCloudAiplatformV1ListContextsResponse); }); - unittest.test('method--list', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.indexEndpoints.operations; + api.AiplatformApi(mock).projects.locations.metadataStores.contexts; + final arg_request = buildGoogleCloudAiplatformV1Context(); final arg_name = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final arg_allowMissing = true; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1Context.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1Context(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -51617,16 +59771,12 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), + queryMap['allowMissing']!.first, + unittest.equals('$arg_allowMissing'), ); unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), ); unittest.expect( queryMap['fields']!.first, @@ -51636,27 +59786,29 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + final resp = convert.json.encode(buildGoogleCloudAiplatformV1Context()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_name, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, + final response = await res.patch(arg_request, arg_name, + allowMissing: arg_allowMissing, + updateMask: arg_updateMask, $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + checkGoogleCloudAiplatformV1Context( + response as api.GoogleCloudAiplatformV1Context); }); - unittest.test('method--wait', () async { + unittest.test('method--purge', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.indexEndpoints.operations; - final arg_name = 'foo'; - final arg_timeout = 'foo'; + api.AiplatformApi(mock).projects.locations.metadataStores.contexts; + final arg_request = buildGoogleCloudAiplatformV1PurgeContextsRequest(); + final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1PurgeContextsRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1PurgeContextsRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -51688,10 +59840,6 @@ void main() { ); } } - unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -51704,24 +59852,18 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + await res.purge(arg_request, arg_parent, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - }); - unittest.group('resource-ProjectsLocationsIndexesResource', () { - unittest.test('method--create', () async { + unittest.test('method--queryContextLineageSubgraph', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.indexes; - final arg_request = buildGoogleCloudAiplatformV1Index(); - final arg_parent = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.contexts; + final arg_context = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1Index.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1Index(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -51761,21 +59903,30 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1LineageSubgraph()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.create(arg_request, arg_parent, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.queryContextLineageSubgraph(arg_context, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1LineageSubgraph( + response as api.GoogleCloudAiplatformV1LineageSubgraph); }); - unittest.test('method--delete', () async { + unittest.test('method--removeContextChildren', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.indexes; - final arg_name = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.contexts; + final arg_request = + buildGoogleCloudAiplatformV1RemoveContextChildrenRequest(); + final arg_context = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1RemoveContextChildrenRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1RemoveContextChildrenRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -51815,17 +59966,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode( + buildGoogleCloudAiplatformV1RemoveContextChildrenResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.removeContextChildren(arg_request, arg_context, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1RemoveContextChildrenResponse( + response as api.GoogleCloudAiplatformV1RemoveContextChildrenResponse); }); + }); - unittest.test('method--get', () async { + unittest.group( + 'resource-ProjectsLocationsMetadataStoresContextsOperationsResource', () { + unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.indexes; + final res = api.AiplatformApi(mock) + .projects + .locations + .metadataStores + .contexts + .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -51868,22 +60029,22 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleCloudAiplatformV1Index()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1Index( - response as api.GoogleCloudAiplatformV1Index); + final response = await res.cancel(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--list', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.indexes; - final arg_parent = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; - final arg_readMask = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .metadataStores + .contexts + .operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -51917,22 +60078,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['readMask']!.first, - unittest.equals(arg_readMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -51941,32 +60086,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListIndexesResponse()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - readMask: arg_readMask, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListIndexesResponse( - response as api.GoogleCloudAiplatformV1ListIndexesResponse); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--patch', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.indexes; - final arg_request = buildGoogleCloudAiplatformV1Index(); + final res = api.AiplatformApi(mock) + .projects + .locations + .metadataStores + .contexts + .operations; final arg_name = 'foo'; - final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1Index.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1Index(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -51998,10 +60135,6 @@ void main() { ); } } - unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -52013,23 +60146,25 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch(arg_request, arg_name, - updateMask: arg_updateMask, $fields: arg_$fields); + final response = await res.get(arg_name, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--removeDatapoints', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.indexes; - final arg_request = buildGoogleCloudAiplatformV1RemoveDatapointsRequest(); - final arg_index = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .metadataStores + .contexts + .operations; + final arg_name = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1RemoveDatapointsRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1RemoveDatapointsRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -52061,6 +60196,18 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -52069,27 +60216,31 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1RemoveDatapointsResponse()); + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.removeDatapoints(arg_request, arg_index, + final response = await res.list(arg_name, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, $fields: arg_$fields); - checkGoogleCloudAiplatformV1RemoveDatapointsResponse( - response as api.GoogleCloudAiplatformV1RemoveDatapointsResponse); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); - unittest.test('method--upsertDatapoints', () async { + unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.indexes; - final arg_request = buildGoogleCloudAiplatformV1UpsertDatapointsRequest(); - final arg_index = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .metadataStores + .contexts + .operations; + final arg_name = 'foo'; + final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1UpsertDatapointsRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1UpsertDatapointsRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -52121,6 +60272,10 @@ void main() { ); } } + unittest.expect( + queryMap['timeout']!.first, + unittest.equals(arg_timeout), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -52129,24 +60284,32 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1UpsertDatapointsResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.upsertDatapoints(arg_request, arg_index, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1UpsertDatapointsResponse( - response as api.GoogleCloudAiplatformV1UpsertDatapointsResponse); + final response = + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); }); - unittest.group('resource-ProjectsLocationsIndexesOperationsResource', () { - unittest.test('method--cancel', () async { + unittest.group('resource-ProjectsLocationsMetadataStoresExecutionsResource', + () { + unittest.test('method--addExecutionEvents', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.indexes.operations; - final arg_name = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.executions; + final arg_request = + buildGoogleCloudAiplatformV1AddExecutionEventsRequest(); + final arg_execution = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1AddExecutionEventsRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1AddExecutionEventsRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -52186,19 +60349,29 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1AddExecutionEventsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.cancel(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = await res.addExecutionEvents(arg_request, arg_execution, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1AddExecutionEventsResponse( + response as api.GoogleCloudAiplatformV1AddExecutionEventsResponse); }); - unittest.test('method--delete', () async { + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.indexes.operations; - final arg_name = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.executions; + final arg_request = buildGoogleCloudAiplatformV1Execution(); + final arg_parent = 'foo'; + final arg_executionId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1Execution.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1Execution(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -52230,6 +60403,10 @@ void main() { ); } } + unittest.expect( + queryMap['executionId']!.first, + unittest.equals(arg_executionId), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -52238,17 +60415,22 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1Execution()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = await res.create(arg_request, arg_parent, + executionId: arg_executionId, $fields: arg_$fields); + checkGoogleCloudAiplatformV1Execution( + response as api.GoogleCloudAiplatformV1Execution); }); - unittest.test('method--get', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.indexes.operations; + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.executions; final arg_name = 'foo'; + final arg_etag = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -52282,6 +60464,10 @@ void main() { ); } } + unittest.expect( + queryMap['etag']!.first, + unittest.equals(arg_etag), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -52293,18 +60479,17 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); + final response = + await res.delete(arg_name, etag: arg_etag, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--list', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.indexes.operations; + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.executions; final arg_name = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -52338,18 +60523,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -52359,23 +60532,23 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + convert.json.encode(buildGoogleCloudAiplatformV1Execution()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_name, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleCloudAiplatformV1Execution( + response as api.GoogleCloudAiplatformV1Execution); }); - unittest.test('method--wait', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.indexes.operations; - final arg_name = 'foo'; - final arg_timeout = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.executions; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -52410,8 +60583,20 @@ void main() { } } unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), ); unittest.expect( queryMap['fields']!.first, @@ -52421,28 +60606,33 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListExecutionsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.list(arg_parent, + filter: arg_filter, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ListExecutionsResponse( + response as api.GoogleCloudAiplatformV1ListExecutionsResponse); }); - }); - unittest.group('resource-ProjectsLocationsMetadataStoresResource', () { - unittest.test('method--create', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.metadataStores; - final arg_request = buildGoogleCloudAiplatformV1MetadataStore(); - final arg_parent = 'foo'; - final arg_metadataStoreId = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.executions; + final arg_request = buildGoogleCloudAiplatformV1Execution(); + final arg_name = 'foo'; + final arg_allowMissing = true; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1MetadataStore.fromJson( + final obj = api.GoogleCloudAiplatformV1Execution.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1MetadataStore(obj); + checkGoogleCloudAiplatformV1Execution(obj); final path = req.url.path; var pathOffset = 0; @@ -52476,8 +60666,12 @@ void main() { } } unittest.expect( - queryMap['metadataStoreId']!.first, - unittest.equals(arg_metadataStoreId), + queryMap['allowMissing']!.first, + unittest.equals('$arg_allowMissing'), + ); + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), ); unittest.expect( queryMap['fields']!.first, @@ -52487,22 +60681,30 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1Execution()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.create(arg_request, arg_parent, - metadataStoreId: arg_metadataStoreId, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.patch(arg_request, arg_name, + allowMissing: arg_allowMissing, + updateMask: arg_updateMask, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1Execution( + response as api.GoogleCloudAiplatformV1Execution); }); - unittest.test('method--delete', () async { + unittest.test('method--purge', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.metadataStores; - final arg_name = 'foo'; - final arg_force = true; + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.executions; + final arg_request = buildGoogleCloudAiplatformV1PurgeExecutionsRequest(); + final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1PurgeExecutionsRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1PurgeExecutionsRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -52534,10 +60736,6 @@ void main() { ); } } - unittest.expect( - queryMap['force']!.first, - unittest.equals('$arg_force'), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -52550,15 +60748,16 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = - await res.delete(arg_name, force: arg_force, $fields: arg_$fields); + await res.purge(arg_request, arg_parent, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--get', () async { + unittest.test('method--queryExecutionInputsAndOutputs', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.metadataStores; - final arg_name = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.executions; + final arg_execution = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -52601,20 +60800,28 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = - convert.json.encode(buildGoogleCloudAiplatformV1MetadataStore()); + convert.json.encode(buildGoogleCloudAiplatformV1LineageSubgraph()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1MetadataStore( - response as api.GoogleCloudAiplatformV1MetadataStore); + final response = await res.queryExecutionInputsAndOutputs(arg_execution, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1LineageSubgraph( + response as api.GoogleCloudAiplatformV1LineageSubgraph); }); + }); - unittest.test('method--list', () async { + unittest.group( + 'resource-ProjectsLocationsMetadataStoresExecutionsOperationsResource', + () { + unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.metadataStores; - final arg_parent = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .metadataStores + .executions + .operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -52648,14 +60855,6 @@ void main() { ); } } - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -52664,34 +60863,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListMetadataStoresResponse()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListMetadataStoresResponse( - response as api.GoogleCloudAiplatformV1ListMetadataStoresResponse); + final response = await res.cancel(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - }); - unittest.group('resource-ProjectsLocationsMetadataStoresArtifactsResource', - () { - unittest.test('method--create', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.artifacts; - final arg_request = buildGoogleCloudAiplatformV1Artifact(); - final arg_parent = 'foo'; - final arg_artifactId = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .metadataStores + .executions + .operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1Artifact.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1Artifact(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -52723,10 +60912,6 @@ void main() { ); } } - unittest.expect( - queryMap['artifactId']!.first, - unittest.equals(arg_artifactId), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -52735,22 +60920,22 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1Artifact()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.create(arg_request, arg_parent, - artifactId: arg_artifactId, $fields: arg_$fields); - checkGoogleCloudAiplatformV1Artifact( - response as api.GoogleCloudAiplatformV1Artifact); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--delete', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.artifacts; + final res = api.AiplatformApi(mock) + .projects + .locations + .metadataStores + .executions + .operations; final arg_name = 'foo'; - final arg_etag = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -52784,10 +60969,6 @@ void main() { ); } } - unittest.expect( - queryMap['etag']!.first, - unittest.equals(arg_etag), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -52799,17 +60980,23 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.delete(arg_name, etag: arg_etag, $fields: arg_$fields); + final response = await res.get(arg_name, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--get', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.artifacts; + final res = api.AiplatformApi(mock) + .projects + .locations + .metadataStores + .executions + .operations; final arg_name = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -52843,6 +61030,18 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -52852,23 +61051,28 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = - convert.json.encode(buildGoogleCloudAiplatformV1Artifact()); + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1Artifact( - response as api.GoogleCloudAiplatformV1Artifact); + final response = await res.list(arg_name, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); - unittest.test('method--list', () async { + unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.artifacts; - final arg_parent = 'foo'; - final arg_filter = 'foo'; - final arg_orderBy = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .metadataStores + .executions + .operations; + final arg_name = 'foo'; + final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -52903,20 +61107,8 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), + queryMap['timeout']!.first, + unittest.equals(arg_timeout), ); unittest.expect( queryMap['fields']!.first, @@ -52926,33 +61118,33 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListArtifactsResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - filter: arg_filter, - orderBy: arg_orderBy, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListArtifactsResponse( - response as api.GoogleCloudAiplatformV1ListArtifactsResponse); + final response = + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); + }); - unittest.test('method--patch', () async { + unittest.group( + 'resource-ProjectsLocationsMetadataStoresMetadataSchemasResource', () { + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.artifacts; - final arg_request = buildGoogleCloudAiplatformV1Artifact(); - final arg_name = 'foo'; - final arg_allowMissing = true; - final arg_updateMask = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .metadataStores + .metadataSchemas; + final arg_request = buildGoogleCloudAiplatformV1MetadataSchema(); + final arg_parent = 'foo'; + final arg_metadataSchemaId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1Artifact.fromJson( + final obj = api.GoogleCloudAiplatformV1MetadataSchema.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1Artifact(obj); + checkGoogleCloudAiplatformV1MetadataSchema(obj); final path = req.url.path; var pathOffset = 0; @@ -52986,12 +61178,8 @@ void main() { } } unittest.expect( - queryMap['allowMissing']!.first, - unittest.equals('$arg_allowMissing'), - ); - unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), + queryMap['metadataSchemaId']!.first, + unittest.equals(arg_metadataSchemaId), ); unittest.expect( queryMap['fields']!.first, @@ -53002,29 +61190,25 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = - convert.json.encode(buildGoogleCloudAiplatformV1Artifact()); + convert.json.encode(buildGoogleCloudAiplatformV1MetadataSchema()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch(arg_request, arg_name, - allowMissing: arg_allowMissing, - updateMask: arg_updateMask, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1Artifact( - response as api.GoogleCloudAiplatformV1Artifact); + final response = await res.create(arg_request, arg_parent, + metadataSchemaId: arg_metadataSchemaId, $fields: arg_$fields); + checkGoogleCloudAiplatformV1MetadataSchema( + response as api.GoogleCloudAiplatformV1MetadataSchema); }); - unittest.test('method--purge', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.artifacts; - final arg_request = buildGoogleCloudAiplatformV1PurgeArtifactsRequest(); - final arg_parent = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .metadataStores + .metadataSchemas; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1PurgeArtifactsRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1PurgeArtifactsRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -53064,22 +61248,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1MetadataSchema()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.purge(arg_request, arg_parent, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleCloudAiplatformV1MetadataSchema( + response as api.GoogleCloudAiplatformV1MetadataSchema); }); - unittest.test('method--queryArtifactLineageSubgraph', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.artifacts; - final arg_artifact = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .metadataStores + .metadataSchemas; + final arg_parent = 'foo'; final arg_filter = 'foo'; - final arg_maxHops = 42; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -53118,8 +61306,12 @@ void main() { unittest.equals(arg_filter), ); unittest.expect( - core.int.parse(queryMap['maxHops']!.first), - unittest.equals(arg_maxHops), + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), ); unittest.expect( queryMap['fields']!.first, @@ -53129,28 +61321,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1LineageSubgraph()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListMetadataSchemasResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.queryArtifactLineageSubgraph(arg_artifact, - filter: arg_filter, maxHops: arg_maxHops, $fields: arg_$fields); - checkGoogleCloudAiplatformV1LineageSubgraph( - response as api.GoogleCloudAiplatformV1LineageSubgraph); + final response = await res.list(arg_parent, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ListMetadataSchemasResponse( + response as api.GoogleCloudAiplatformV1ListMetadataSchemasResponse); }); }); - unittest.group( - 'resource-ProjectsLocationsMetadataStoresArtifactsOperationsResource', + unittest.group('resource-ProjectsLocationsMetadataStoresOperationsResource', () { unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .metadataStores - .artifacts - .operations; + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -53202,12 +61392,8 @@ void main() { unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .metadataStores - .artifacts - .operations; + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -53259,12 +61445,8 @@ void main() { unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .metadataStores - .artifacts - .operations; + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -53317,12 +61499,8 @@ void main() { unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .metadataStores - .artifacts - .operations; + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.operations; final arg_name = 'foo'; final arg_filter = 'foo'; final arg_pageSize = 42; @@ -53395,12 +61573,8 @@ void main() { unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .metadataStores - .artifacts - .operations; + final res = + api.AiplatformApi(mock).projects.locations.metadataStores.operations; final arg_name = 'foo'; final arg_timeout = 'foo'; final arg_$fields = 'foo'; @@ -53458,87 +61632,20 @@ void main() { }); }); - unittest.group('resource-ProjectsLocationsMetadataStoresContextsResource', - () { - unittest.test('method--addContextArtifactsAndExecutions', () async { - final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.contexts; - final arg_request = - buildGoogleCloudAiplatformV1AddContextArtifactsAndExecutionsRequest(); - final arg_context = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1AddContextArtifactsAndExecutionsRequest - .fromJson(json as core.Map); - checkGoogleCloudAiplatformV1AddContextArtifactsAndExecutionsRequest( - obj); - - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode( - buildGoogleCloudAiplatformV1AddContextArtifactsAndExecutionsResponse()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.addContextArtifactsAndExecutions( - arg_request, arg_context, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1AddContextArtifactsAndExecutionsResponse( - response as api - .GoogleCloudAiplatformV1AddContextArtifactsAndExecutionsResponse); - }); - - unittest.test('method--addContextChildren', () async { + unittest.group('resource-ProjectsLocationsMigratableResourcesResource', () { + unittest.test('method--batchMigrate', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.metadataStores.contexts; + api.AiplatformApi(mock).projects.locations.migratableResources; final arg_request = - buildGoogleCloudAiplatformV1AddContextChildrenRequest(); - final arg_context = 'foo'; + buildGoogleCloudAiplatformV1BatchMigrateResourcesRequest(); + final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final obj = - api.GoogleCloudAiplatformV1AddContextChildrenRequest.fromJson( + api.GoogleCloudAiplatformV1BatchMigrateResourcesRequest.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1AddContextChildrenRequest(obj); + checkGoogleCloudAiplatformV1BatchMigrateResourcesRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -53579,28 +61686,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1AddContextChildrenResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.addContextChildren(arg_request, arg_context, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1AddContextChildrenResponse( - response as api.GoogleCloudAiplatformV1AddContextChildrenResponse); + final response = + await res.batchMigrate(arg_request, arg_parent, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--create', () async { + unittest.test('method--search', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.metadataStores.contexts; - final arg_request = buildGoogleCloudAiplatformV1Context(); + api.AiplatformApi(mock).projects.locations.migratableResources; + final arg_request = + buildGoogleCloudAiplatformV1SearchMigratableResourcesRequest(); final arg_parent = 'foo'; - final arg_contextId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1Context.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1Context(obj); + final obj = api.GoogleCloudAiplatformV1SearchMigratableResourcesRequest + .fromJson(json as core.Map); + checkGoogleCloudAiplatformV1SearchMigratableResourcesRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -53633,10 +61739,6 @@ void main() { ); } } - unittest.expect( - queryMap['contextId']!.first, - unittest.equals(arg_contextId), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -53645,22 +61747,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleCloudAiplatformV1Context()); + final resp = convert.json.encode( + buildGoogleCloudAiplatformV1SearchMigratableResourcesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.create(arg_request, arg_parent, - contextId: arg_contextId, $fields: arg_$fields); - checkGoogleCloudAiplatformV1Context( - response as api.GoogleCloudAiplatformV1Context); + final response = + await res.search(arg_request, arg_parent, $fields: arg_$fields); + checkGoogleCloudAiplatformV1SearchMigratableResourcesResponse(response + as api.GoogleCloudAiplatformV1SearchMigratableResourcesResponse); }); + }); - unittest.test('method--delete', () async { + unittest.group( + 'resource-ProjectsLocationsMigratableResourcesOperationsResource', () { + unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.contexts; + final res = api.AiplatformApi(mock) + .projects + .locations + .migratableResources + .operations; final arg_name = 'foo'; - final arg_etag = 'foo'; - final arg_force = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -53694,14 +61801,6 @@ void main() { ); } } - unittest.expect( - queryMap['etag']!.first, - unittest.equals(arg_etag), - ); - unittest.expect( - queryMap['force']!.first, - unittest.equals('$arg_force'), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -53710,19 +61809,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, - etag: arg_etag, force: arg_force, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.cancel(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--get', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.contexts; + final res = api.AiplatformApi(mock) + .projects + .locations + .migratableResources + .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -53765,23 +61865,21 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleCloudAiplatformV1Context()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1Context( - response as api.GoogleCloudAiplatformV1Context); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--list', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.contexts; - final arg_parent = 'foo'; - final arg_filter = 'foo'; - final arg_orderBy = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .migratableResources + .operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -53815,22 +61913,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -53839,34 +61921,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListContextsResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - filter: arg_filter, - orderBy: arg_orderBy, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListContextsResponse( - response as api.GoogleCloudAiplatformV1ListContextsResponse); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--patch', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.contexts; - final arg_request = buildGoogleCloudAiplatformV1Context(); + final res = api.AiplatformApi(mock) + .projects + .locations + .migratableResources + .operations; final arg_name = 'foo'; - final arg_allowMissing = true; - final arg_updateMask = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1Context.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1Context(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -53899,12 +61974,16 @@ void main() { } } unittest.expect( - queryMap['allowMissing']!.first, - unittest.equals('$arg_allowMissing'), + queryMap['filter']!.first, + unittest.equals(arg_filter), ); unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), ); unittest.expect( queryMap['fields']!.first, @@ -53914,29 +61993,30 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleCloudAiplatformV1Context()); + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch(arg_request, arg_name, - allowMissing: arg_allowMissing, - updateMask: arg_updateMask, + final response = await res.list(arg_name, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, $fields: arg_$fields); - checkGoogleCloudAiplatformV1Context( - response as api.GoogleCloudAiplatformV1Context); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); - unittest.test('method--purge', () async { + unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.contexts; - final arg_request = buildGoogleCloudAiplatformV1PurgeContextsRequest(); - final arg_parent = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .migratableResources + .operations; + final arg_name = 'foo'; + final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1PurgeContextsRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1PurgeContextsRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -53968,6 +62048,10 @@ void main() { ); } } + unittest.expect( + queryMap['timeout']!.first, + unittest.equals(arg_timeout), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -53980,18 +62064,30 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = - await res.purge(arg_request, arg_parent, $fields: arg_$fields); + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); + }); - unittest.test('method--queryContextLineageSubgraph', () async { + unittest.group( + 'resource-ProjectsLocationsModelDeploymentMonitoringJobsResource', () { + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.contexts; - final arg_context = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .modelDeploymentMonitoringJobs; + final arg_request = + buildGoogleCloudAiplatformV1ModelDeploymentMonitoringJob(); + final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1ModelDeploymentMonitoringJob.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1ModelDeploymentMonitoringJob(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -54031,30 +62127,25 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1LineageSubgraph()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ModelDeploymentMonitoringJob()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.queryContextLineageSubgraph(arg_context, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1LineageSubgraph( - response as api.GoogleCloudAiplatformV1LineageSubgraph); + final response = + await res.create(arg_request, arg_parent, $fields: arg_$fields); + checkGoogleCloudAiplatformV1ModelDeploymentMonitoringJob( + response as api.GoogleCloudAiplatformV1ModelDeploymentMonitoringJob); }); - unittest.test('method--removeContextChildren', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.contexts; - final arg_request = - buildGoogleCloudAiplatformV1RemoveContextChildrenRequest(); - final arg_context = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .modelDeploymentMonitoringJobs; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1RemoveContextChildrenRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1RemoveContextChildrenRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -54094,27 +62185,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode( - buildGoogleCloudAiplatformV1RemoveContextChildrenResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.removeContextChildren(arg_request, arg_context, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1RemoveContextChildrenResponse( - response as api.GoogleCloudAiplatformV1RemoveContextChildrenResponse); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - }); - unittest.group( - 'resource-ProjectsLocationsMetadataStoresContextsOperationsResource', () { - unittest.test('method--cancel', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock) .projects .locations - .metadataStores - .contexts - .operations; + .modelDeploymentMonitoringJobs; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -54157,22 +62241,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ModelDeploymentMonitoringJob()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.cancel(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleCloudAiplatformV1ModelDeploymentMonitoringJob( + response as api.GoogleCloudAiplatformV1ModelDeploymentMonitoringJob); }); - unittest.test('method--delete', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock) .projects .locations - .metadataStores - .contexts - .operations; - final arg_name = 'foo'; + .modelDeploymentMonitoringJobs; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -54206,6 +62294,22 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['readMask']!.first, + unittest.equals(arg_readMask), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -54214,24 +62318,38 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode( + buildGoogleCloudAiplatformV1ListModelDeploymentMonitoringJobsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = await res.list(arg_parent, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + readMask: arg_readMask, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ListModelDeploymentMonitoringJobsResponse( + response as api + .GoogleCloudAiplatformV1ListModelDeploymentMonitoringJobsResponse); }); - unittest.test('method--get', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock) .projects .locations - .metadataStores - .contexts - .operations; + .modelDeploymentMonitoringJobs; + final arg_request = + buildGoogleCloudAiplatformV1ModelDeploymentMonitoringJob(); final arg_name = 'foo'; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1ModelDeploymentMonitoringJob.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1ModelDeploymentMonitoringJob(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -54263,6 +62381,10 @@ void main() { ); } } + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -54274,25 +62396,29 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--list', () async { + unittest.test('method--pause', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock) .projects .locations - .metadataStores - .contexts - .operations; + .modelDeploymentMonitoringJobs; + final arg_request = + buildGoogleCloudAiplatformV1PauseModelDeploymentMonitoringJobRequest(); final arg_name = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1PauseModelDeploymentMonitoringJobRequest + .fromJson(json as core.Map); + checkGoogleCloudAiplatformV1PauseModelDeploymentMonitoringJobRequest( + obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -54324,18 +62450,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -54344,31 +62458,31 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_name, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + final response = + await res.pause(arg_request, arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--wait', () async { + unittest.test('method--resume', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock) .projects .locations - .metadataStores - .contexts - .operations; + .modelDeploymentMonitoringJobs; + final arg_request = + buildGoogleCloudAiplatformV1ResumeModelDeploymentMonitoringJobRequest(); final arg_name = 'foo'; - final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1ResumeModelDeploymentMonitoringJobRequest + .fromJson(json as core.Map); + checkGoogleCloudAiplatformV1ResumeModelDeploymentMonitoringJobRequest( + obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -54400,10 +62514,6 @@ void main() { ); } } - unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -54412,31 +62522,31 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + await res.resume(arg_request, arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - }); - unittest.group('resource-ProjectsLocationsMetadataStoresExecutionsResource', - () { - unittest.test('method--addExecutionEvents', () async { + unittest.test('method--searchModelDeploymentMonitoringStatsAnomalies', + () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.executions; + final res = api.AiplatformApi(mock) + .projects + .locations + .modelDeploymentMonitoringJobs; final arg_request = - buildGoogleCloudAiplatformV1AddExecutionEventsRequest(); - final arg_execution = 'foo'; + buildGoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequest(); + final arg_modelDeploymentMonitoringJob = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final obj = - api.GoogleCloudAiplatformV1AddExecutionEventsRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1AddExecutionEventsRequest(obj); + api.GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequest + .fromJson(json as core.Map); + checkGoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequest( + obj); final path = req.url.path; var pathOffset = 0; @@ -54477,29 +62587,32 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1AddExecutionEventsResponse()); + final resp = convert.json.encode( + buildGoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.addExecutionEvents(arg_request, arg_execution, + final response = await res.searchModelDeploymentMonitoringStatsAnomalies( + arg_request, arg_modelDeploymentMonitoringJob, $fields: arg_$fields); - checkGoogleCloudAiplatformV1AddExecutionEventsResponse( - response as api.GoogleCloudAiplatformV1AddExecutionEventsResponse); + checkGoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesResponse( + response as api + .GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesResponse); }); + }); - unittest.test('method--create', () async { + unittest.group( + 'resource-ProjectsLocationsModelDeploymentMonitoringJobsOperationsResource', + () { + unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.executions; - final arg_request = buildGoogleCloudAiplatformV1Execution(); - final arg_parent = 'foo'; - final arg_executionId = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .modelDeploymentMonitoringJobs + .operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1Execution.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1Execution(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -54531,10 +62644,6 @@ void main() { ); } } - unittest.expect( - queryMap['executionId']!.first, - unittest.equals(arg_executionId), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -54543,22 +62652,21 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1Execution()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.create(arg_request, arg_parent, - executionId: arg_executionId, $fields: arg_$fields); - checkGoogleCloudAiplatformV1Execution( - response as api.GoogleCloudAiplatformV1Execution); + final response = await res.cancel(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.executions; + final res = api.AiplatformApi(mock) + .projects + .locations + .modelDeploymentMonitoringJobs + .operations; final arg_name = 'foo'; - final arg_etag = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -54592,10 +62700,6 @@ void main() { ); } } - unittest.expect( - queryMap['etag']!.first, - unittest.equals(arg_etag), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -54604,19 +62708,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.delete(arg_name, etag: arg_etag, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.executions; + final res = api.AiplatformApi(mock) + .projects + .locations + .modelDeploymentMonitoringJobs + .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -54659,22 +62764,23 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1Execution()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1Execution( - response as api.GoogleCloudAiplatformV1Execution); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.executions; - final arg_parent = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .modelDeploymentMonitoringJobs + .operations; + final arg_name = 'foo'; final arg_filter = 'foo'; - final arg_orderBy = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; final arg_$fields = 'foo'; @@ -54714,10 +62820,6 @@ void main() { queryMap['filter']!.first, unittest.equals(arg_filter), ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); unittest.expect( core.int.parse(queryMap['pageSize']!.first), unittest.equals(arg_pageSize), @@ -54734,34 +62836,30 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListExecutionsResponse()); + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, + final response = await res.list(arg_name, filter: arg_filter, - orderBy: arg_orderBy, pageSize: arg_pageSize, pageToken: arg_pageToken, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListExecutionsResponse( - response as api.GoogleCloudAiplatformV1ListExecutionsResponse); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); - unittest.test('method--patch', () async { + unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.executions; - final arg_request = buildGoogleCloudAiplatformV1Execution(); + final res = api.AiplatformApi(mock) + .projects + .locations + .modelDeploymentMonitoringJobs + .operations; final arg_name = 'foo'; - final arg_allowMissing = true; - final arg_updateMask = 'foo'; + final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1Execution.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1Execution(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -54794,12 +62892,8 @@ void main() { } } unittest.expect( - queryMap['allowMissing']!.first, - unittest.equals('$arg_allowMissing'), - ); - unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), + queryMap['timeout']!.first, + unittest.equals(arg_timeout), ); unittest.expect( queryMap['fields']!.first, @@ -54809,29 +62903,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1Execution()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch(arg_request, arg_name, - allowMissing: arg_allowMissing, - updateMask: arg_updateMask, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1Execution( - response as api.GoogleCloudAiplatformV1Execution); + final response = + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); + }); - unittest.test('method--purge', () async { + unittest.group('resource-ProjectsLocationsModelsResource', () { + unittest.test('method--copy', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.executions; - final arg_request = buildGoogleCloudAiplatformV1PurgeExecutionsRequest(); + final res = api.AiplatformApi(mock).projects.locations.models; + final arg_request = buildGoogleCloudAiplatformV1CopyModelRequest(); final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1PurgeExecutionsRequest.fromJson( + final obj = api.GoogleCloudAiplatformV1CopyModelRequest.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1PurgeExecutionsRequest(obj); + checkGoogleCloudAiplatformV1CopyModelRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -54876,16 +62968,15 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = - await res.purge(arg_request, arg_parent, $fields: arg_$fields); + await res.copy(arg_request, arg_parent, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--queryExecutionInputsAndOutputs', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.executions; - final arg_execution = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.models; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -54927,28 +63018,17 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1LineageSubgraph()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.queryExecutionInputsAndOutputs(arg_execution, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1LineageSubgraph( - response as api.GoogleCloudAiplatformV1LineageSubgraph); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - }); - unittest.group( - 'resource-ProjectsLocationsMetadataStoresExecutionsOperationsResource', - () { - unittest.test('method--cancel', () async { + unittest.test('method--deleteVersion', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .metadataStores - .executions - .operations; + final res = api.AiplatformApi(mock).projects.locations.models; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -54991,24 +63071,25 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.cancel(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = await res.deleteVersion(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--delete', () async { + unittest.test('method--export', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .metadataStores - .executions - .operations; + final res = api.AiplatformApi(mock).projects.locations.models; + final arg_request = buildGoogleCloudAiplatformV1ExportModelRequest(); final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1ExportModelRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1ExportModelRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -55048,21 +63129,18 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = + await res.export(arg_request, arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .metadataStores - .executions - .operations; + final res = api.AiplatformApi(mock).projects.locations.models; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -55105,26 +63183,19 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleCloudAiplatformV1Model()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + checkGoogleCloudAiplatformV1Model( + response as api.GoogleCloudAiplatformV1Model); }); - unittest.test('method--list', () async { + unittest.test('method--getIamPolicy', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .metadataStores - .executions - .operations; - final arg_name = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.models; + final arg_resource = 'foo'; + final arg_options_requestedPolicyVersion = 42; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -55159,16 +63230,8 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), + core.int.parse(queryMap['options.requestedPolicyVersion']!.first), + unittest.equals(arg_options_requestedPolicyVersion), ); unittest.expect( queryMap['fields']!.first, @@ -55178,29 +63241,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + final resp = convert.json.encode(buildGoogleIamV1Policy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_name, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, + final response = await res.getIamPolicy(arg_resource, + options_requestedPolicyVersion: arg_options_requestedPolicyVersion, $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); }); - unittest.test('method--wait', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .metadataStores - .executions - .operations; - final arg_name = 'foo'; - final arg_timeout = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.models; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -55235,8 +63293,24 @@ void main() { } } unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['readMask']!.first, + unittest.equals(arg_readMask), ); unittest.expect( queryMap['fields']!.first, @@ -55246,34 +63320,32 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListModelsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.list(arg_parent, + filter: arg_filter, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + readMask: arg_readMask, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ListModelsResponse( + response as api.GoogleCloudAiplatformV1ListModelsResponse); }); - }); - unittest.group( - 'resource-ProjectsLocationsMetadataStoresMetadataSchemasResource', () { - unittest.test('method--create', () async { + unittest.test('method--listVersions', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .metadataStores - .metadataSchemas; - final arg_request = buildGoogleCloudAiplatformV1MetadataSchema(); - final arg_parent = 'foo'; - final arg_metadataSchemaId = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.models; + final arg_name = 'foo'; + final arg_filter = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1MetadataSchema.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1MetadataSchema(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -55306,8 +63378,24 @@ void main() { } } unittest.expect( - queryMap['metadataSchemaId']!.first, - unittest.equals(arg_metadataSchemaId), + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['readMask']!.first, + unittest.equals(arg_readMask), ); unittest.expect( queryMap['fields']!.first, @@ -55317,26 +63405,34 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1MetadataSchema()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListModelVersionsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.create(arg_request, arg_parent, - metadataSchemaId: arg_metadataSchemaId, $fields: arg_$fields); - checkGoogleCloudAiplatformV1MetadataSchema( - response as api.GoogleCloudAiplatformV1MetadataSchema); + final response = await res.listVersions(arg_name, + filter: arg_filter, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + readMask: arg_readMask, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ListModelVersionsResponse( + response as api.GoogleCloudAiplatformV1ListModelVersionsResponse); }); - unittest.test('method--get', () async { + unittest.test('method--mergeVersionAliases', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .metadataStores - .metadataSchemas; + final res = api.AiplatformApi(mock).projects.locations.models; + final arg_request = + buildGoogleCloudAiplatformV1MergeVersionAliasesRequest(); final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1MergeVersionAliasesRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1MergeVersionAliasesRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -55376,28 +63472,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1MetadataSchema()); + final resp = convert.json.encode(buildGoogleCloudAiplatformV1Model()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1MetadataSchema( - response as api.GoogleCloudAiplatformV1MetadataSchema); + final response = await res.mergeVersionAliases(arg_request, arg_name, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1Model( + response as api.GoogleCloudAiplatformV1Model); }); - unittest.test('method--list', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .metadataStores - .metadataSchemas; - final arg_parent = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.models; + final arg_request = buildGoogleCloudAiplatformV1Model(); + final arg_name = 'foo'; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1Model.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1Model(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -55430,16 +63525,8 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), ); unittest.expect( queryMap['fields']!.first, @@ -55449,29 +63536,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListMetadataSchemasResponse()); + final resp = convert.json.encode(buildGoogleCloudAiplatformV1Model()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListMetadataSchemasResponse( - response as api.GoogleCloudAiplatformV1ListMetadataSchemasResponse); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); + checkGoogleCloudAiplatformV1Model( + response as api.GoogleCloudAiplatformV1Model); }); - }); - unittest.group('resource-ProjectsLocationsMetadataStoresOperationsResource', - () { - unittest.test('method--cancel', () async { + unittest.test('method--setIamPolicy', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.operations; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.models; + final arg_request = buildGoogleIamV1SetIamPolicyRequest(); + final arg_resource = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleIamV1SetIamPolicyRequest.fromJson( + json as core.Map); + checkGoogleIamV1SetIamPolicyRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -55511,18 +63595,19 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleIamV1Policy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.cancel(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = await res.setIamPolicy(arg_request, arg_resource, + $fields: arg_$fields); + checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); }); - unittest.test('method--delete', () async { + unittest.test('method--testIamPermissions', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.operations; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.models; + final arg_resource = 'foo'; + final arg_permissions = buildUnnamed432(); final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -55556,6 +63641,10 @@ void main() { ); } } + unittest.expect( + queryMap['permissions']!, + unittest.equals(arg_permissions), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -55564,20 +63653,29 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = + convert.json.encode(buildGoogleIamV1TestIamPermissionsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = await res.testIamPermissions(arg_resource, + permissions: arg_permissions, $fields: arg_$fields); + checkGoogleIamV1TestIamPermissionsResponse( + response as api.GoogleIamV1TestIamPermissionsResponse); }); - unittest.test('method--get', () async { + unittest.test('method--updateExplanationDataset', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.operations; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.models; + final arg_request = + buildGoogleCloudAiplatformV1UpdateExplanationDatasetRequest(); + final arg_model = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1UpdateExplanationDatasetRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1UpdateExplanationDatasetRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -55620,21 +63718,24 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); + final response = await res.updateExplanationDataset( + arg_request, arg_model, + $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--list', () async { + unittest.test('method--upload', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.operations; - final arg_name = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.models; + final arg_request = buildGoogleCloudAiplatformV1UploadModelRequest(); + final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1UploadModelRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1UploadModelRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -55666,18 +63767,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -55686,25 +63775,21 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_name, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + final response = + await res.upload(arg_request, arg_parent, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); + }); - unittest.test('method--wait', () async { + unittest.group('resource-ProjectsLocationsModelsEvaluationsResource', () { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.metadataStores.operations; + final res = api.AiplatformApi(mock).projects.locations.models.evaluations; final arg_name = 'foo'; - final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -55738,10 +63823,6 @@ void main() { ); } } - unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -55750,30 +63831,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1ModelEvaluation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleCloudAiplatformV1ModelEvaluation( + response as api.GoogleCloudAiplatformV1ModelEvaluation); }); - }); - unittest.group('resource-ProjectsLocationsMigratableResourcesResource', () { - unittest.test('method--batchMigrate', () async { + unittest.test('method--import', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.migratableResources; + final res = api.AiplatformApi(mock).projects.locations.models.evaluations; final arg_request = - buildGoogleCloudAiplatformV1BatchMigrateResourcesRequest(); + buildGoogleCloudAiplatformV1ImportModelEvaluationRequest(); final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final obj = - api.GoogleCloudAiplatformV1BatchMigrateResourcesRequest.fromJson( + api.GoogleCloudAiplatformV1ImportModelEvaluationRequest.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1BatchMigrateResourcesRequest(obj); + checkGoogleCloudAiplatformV1ImportModelEvaluationRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -55814,28 +63892,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1ModelEvaluation()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = - await res.batchMigrate(arg_request, arg_parent, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + await res.import(arg_request, arg_parent, $fields: arg_$fields); + checkGoogleCloudAiplatformV1ModelEvaluation( + response as api.GoogleCloudAiplatformV1ModelEvaluation); }); - unittest.test('method--search', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.migratableResources; - final arg_request = - buildGoogleCloudAiplatformV1SearchMigratableResourcesRequest(); + final res = api.AiplatformApi(mock).projects.locations.models.evaluations; final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1SearchMigratableResourcesRequest - .fromJson(json as core.Map); - checkGoogleCloudAiplatformV1SearchMigratableResourcesRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -55867,6 +63943,22 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['readMask']!.first, + unittest.equals(arg_readMask), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -55875,25 +63967,30 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode( - buildGoogleCloudAiplatformV1SearchMigratableResourcesResponse()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListModelEvaluationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.search(arg_request, arg_parent, $fields: arg_$fields); - checkGoogleCloudAiplatformV1SearchMigratableResourcesResponse(response - as api.GoogleCloudAiplatformV1SearchMigratableResourcesResponse); + final response = await res.list(arg_parent, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + readMask: arg_readMask, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ListModelEvaluationsResponse( + response as api.GoogleCloudAiplatformV1ListModelEvaluationsResponse); }); }); unittest.group( - 'resource-ProjectsLocationsMigratableResourcesOperationsResource', () { + 'resource-ProjectsLocationsModelsEvaluationsOperationsResource', () { unittest.test('method--cancel', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock) .projects .locations - .migratableResources + .models + .evaluations .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; @@ -55949,7 +64046,8 @@ void main() { final res = api.AiplatformApi(mock) .projects .locations - .migratableResources + .models + .evaluations .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; @@ -56005,7 +64103,8 @@ void main() { final res = api.AiplatformApi(mock) .projects .locations - .migratableResources + .models + .evaluations .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; @@ -56062,7 +64161,8 @@ void main() { final res = api.AiplatformApi(mock) .projects .locations - .migratableResources + .models + .evaluations .operations; final arg_name = 'foo'; final arg_filter = 'foo'; @@ -56139,7 +64239,8 @@ void main() { final res = api.AiplatformApi(mock) .projects .locations - .migratableResources + .models + .evaluations .operations; final arg_name = 'foo'; final arg_timeout = 'foo'; @@ -56198,23 +64299,21 @@ void main() { }); }); - unittest.group( - 'resource-ProjectsLocationsModelDeploymentMonitoringJobsResource', () { - unittest.test('method--create', () async { + unittest.group('resource-ProjectsLocationsModelsEvaluationsSlicesResource', + () { + unittest.test('method--batchImport', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .modelDeploymentMonitoringJobs; + final res = + api.AiplatformApi(mock).projects.locations.models.evaluations.slices; final arg_request = - buildGoogleCloudAiplatformV1ModelDeploymentMonitoringJob(); + buildGoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsRequest(); final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final obj = - api.GoogleCloudAiplatformV1ModelDeploymentMonitoringJob.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1ModelDeploymentMonitoringJob(obj); + api.GoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsRequest + .fromJson(json as core.Map); + checkGoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -56255,78 +64354,21 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ModelDeploymentMonitoringJob()); + final resp = convert.json.encode( + buildGoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = - await res.create(arg_request, arg_parent, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ModelDeploymentMonitoringJob( - response as api.GoogleCloudAiplatformV1ModelDeploymentMonitoringJob); - }); - - unittest.test('method--delete', () async { - final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .modelDeploymentMonitoringJobs; - final arg_name = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + await res.batchImport(arg_request, arg_parent, $fields: arg_$fields); + checkGoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsResponse( + response as api + .GoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsResponse); }); unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .modelDeploymentMonitoringJobs; + final res = + api.AiplatformApi(mock).projects.locations.models.evaluations.slices; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -56370,20 +64412,18 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ModelDeploymentMonitoringJob()); + .encode(buildGoogleCloudAiplatformV1ModelEvaluationSlice()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ModelDeploymentMonitoringJob( - response as api.GoogleCloudAiplatformV1ModelDeploymentMonitoringJob); + checkGoogleCloudAiplatformV1ModelEvaluationSlice( + response as api.GoogleCloudAiplatformV1ModelEvaluationSlice); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .modelDeploymentMonitoringJobs; + final res = + api.AiplatformApi(mock).projects.locations.models.evaluations.slices; final arg_parent = 'foo'; final arg_filter = 'foo'; final arg_pageSize = 42; @@ -56447,7 +64487,7 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = convert.json.encode( - buildGoogleCloudAiplatformV1ListModelDeploymentMonitoringJobsResponse()); + buildGoogleCloudAiplatformV1ListModelEvaluationSlicesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.list(arg_parent, @@ -56456,28 +64496,18 @@ void main() { pageToken: arg_pageToken, readMask: arg_readMask, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListModelDeploymentMonitoringJobsResponse( - response as api - .GoogleCloudAiplatformV1ListModelDeploymentMonitoringJobsResponse); + checkGoogleCloudAiplatformV1ListModelEvaluationSlicesResponse(response + as api.GoogleCloudAiplatformV1ListModelEvaluationSlicesResponse); }); + }); - unittest.test('method--patch', () async { + unittest.group('resource-ProjectsLocationsModelsOperationsResource', () { + unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .modelDeploymentMonitoringJobs; - final arg_request = - buildGoogleCloudAiplatformV1ModelDeploymentMonitoringJob(); + final res = api.AiplatformApi(mock).projects.locations.models.operations; final arg_name = 'foo'; - final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1ModelDeploymentMonitoringJob.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1ModelDeploymentMonitoringJob(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -56509,10 +64539,6 @@ void main() { ); } } - unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -56521,32 +64547,19 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch(arg_request, arg_name, - updateMask: arg_updateMask, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.cancel(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--pause', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .modelDeploymentMonitoringJobs; - final arg_request = - buildGoogleCloudAiplatformV1PauseModelDeploymentMonitoringJobRequest(); + final res = api.AiplatformApi(mock).projects.locations.models.operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1PauseModelDeploymentMonitoringJobRequest - .fromJson(json as core.Map); - checkGoogleCloudAiplatformV1PauseModelDeploymentMonitoringJobRequest( - obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -56589,28 +64602,16 @@ void main() { final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.pause(arg_request, arg_name, $fields: arg_$fields); + final response = await res.delete(arg_name, $fields: arg_$fields); checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--resume', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .modelDeploymentMonitoringJobs; - final arg_request = - buildGoogleCloudAiplatformV1ResumeModelDeploymentMonitoringJobRequest(); + final res = api.AiplatformApi(mock).projects.locations.models.operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1ResumeModelDeploymentMonitoringJobRequest - .fromJson(json as core.Map); - checkGoogleCloudAiplatformV1ResumeModelDeploymentMonitoringJobRequest( - obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -56650,32 +64651,23 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.resume(arg_request, arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--searchModelDeploymentMonitoringStatsAnomalies', - () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .modelDeploymentMonitoringJobs; - final arg_request = - buildGoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequest(); - final arg_modelDeploymentMonitoringJob = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.models.operations; + final arg_name = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequest - .fromJson(json as core.Map); - checkGoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesRequest( - obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -56707,6 +64699,18 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -56715,30 +64719,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode( - buildGoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesResponse()); + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.searchModelDeploymentMonitoringStatsAnomalies( - arg_request, arg_modelDeploymentMonitoringJob, + final response = await res.list(arg_name, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, $fields: arg_$fields); - checkGoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesResponse( - response as api - .GoogleCloudAiplatformV1SearchModelDeploymentMonitoringStatsAnomaliesResponse); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); - }); - unittest.group( - 'resource-ProjectsLocationsModelDeploymentMonitoringJobsOperationsResource', - () { - unittest.test('method--cancel', () async { + unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .modelDeploymentMonitoringJobs - .operations; + final res = api.AiplatformApi(mock).projects.locations.models.operations; final arg_name = 'foo'; + final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -56772,6 +64770,10 @@ void main() { ); } } + unittest.expect( + queryMap['timeout']!.first, + unittest.equals(arg_timeout), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -56780,23 +64782,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.cancel(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); + }); - unittest.test('method--delete', () async { + unittest.group('resource-ProjectsLocationsNasJobsResource', () { + unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .modelDeploymentMonitoringJobs - .operations; + final res = api.AiplatformApi(mock).projects.locations.nasJobs; + final arg_request = buildGoogleCloudAiplatformV1CancelNasJobRequest(); final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1CancelNasJobRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1CancelNasJobRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -56839,20 +64846,22 @@ void main() { final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); + final response = + await res.cancel(arg_request, arg_name, $fields: arg_$fields); checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--get', () async { + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .modelDeploymentMonitoringJobs - .operations; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.nasJobs; + final arg_request = buildGoogleCloudAiplatformV1NasJob(); + final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1NasJob.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1NasJob(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -56892,25 +64901,19 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleCloudAiplatformV1NasJob()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = + await res.create(arg_request, arg_parent, $fields: arg_$fields); + checkGoogleCloudAiplatformV1NasJob( + response as api.GoogleCloudAiplatformV1NasJob); }); - unittest.test('method--list', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .modelDeploymentMonitoringJobs - .operations; + final res = api.AiplatformApi(mock).projects.locations.nasJobs; final arg_name = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -56944,18 +64947,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -56964,28 +64955,18 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_name, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--wait', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .modelDeploymentMonitoringJobs - .operations; + final res = api.AiplatformApi(mock).projects.locations.nasJobs; final arg_name = 'foo'; - final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -57019,10 +65000,6 @@ void main() { ); } } - unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -57031,28 +65008,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleCloudAiplatformV1NasJob()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleCloudAiplatformV1NasJob( + response as api.GoogleCloudAiplatformV1NasJob); }); - }); - unittest.group('resource-ProjectsLocationsModelsResource', () { - unittest.test('method--copy', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.models; - final arg_request = buildGoogleCloudAiplatformV1CopyModelRequest(); + final res = api.AiplatformApi(mock).projects.locations.nasJobs; final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1CopyModelRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1CopyModelRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -57084,6 +65057,22 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['readMask']!.first, + unittest.equals(arg_readMask), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -57092,18 +65081,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListNasJobsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.copy(arg_request, arg_parent, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.list(arg_parent, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + readMask: arg_readMask, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ListNasJobsResponse( + response as api.GoogleCloudAiplatformV1ListNasJobsResponse); }); + }); - unittest.test('method--delete', () async { + unittest.group('resource-ProjectsLocationsNasJobsNasTrialDetailsResource', + () { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.models; + final res = + api.AiplatformApi(mock).projects.locations.nasJobs.nasTrialDetails; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -57146,18 +65144,22 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1NasTrialDetail()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleCloudAiplatformV1NasTrialDetail( + response as api.GoogleCloudAiplatformV1NasTrialDetail); }); - unittest.test('method--deleteVersion', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.models; - final arg_name = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.nasJobs.nasTrialDetails; + final arg_parent = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -57191,6 +65193,14 @@ void main() { ); } } + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -57199,24 +65209,32 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListNasTrialDetailsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.deleteVersion(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.list(arg_parent, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ListNasTrialDetailsResponse( + response as api.GoogleCloudAiplatformV1ListNasTrialDetailsResponse); }); + }); - unittest.test('method--export', () async { + unittest.group('resource-ProjectsLocationsNotebookExecutionJobsResource', () { + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.models; - final arg_request = buildGoogleCloudAiplatformV1ExportModelRequest(); - final arg_name = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.notebookExecutionJobs; + final arg_request = buildGoogleCloudAiplatformV1NotebookExecutionJob(); + final arg_parent = 'foo'; + final arg_notebookExecutionJobId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1ExportModelRequest.fromJson( + final obj = api.GoogleCloudAiplatformV1NotebookExecutionJob.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1ExportModelRequest(obj); + checkGoogleCloudAiplatformV1NotebookExecutionJob(obj); final path = req.url.path; var pathOffset = 0; @@ -57249,6 +65267,10 @@ void main() { ); } } + unittest.expect( + queryMap['notebookExecutionJobId']!.first, + unittest.equals(arg_notebookExecutionJobId), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -57260,15 +65282,17 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.export(arg_request, arg_name, $fields: arg_$fields); + final response = await res.create(arg_request, arg_parent, + notebookExecutionJobId: arg_notebookExecutionJobId, + $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--get', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.models; + final res = + api.AiplatformApi(mock).projects.locations.notebookExecutionJobs; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -57311,19 +65335,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleCloudAiplatformV1Model()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1Model( - response as api.GoogleCloudAiplatformV1Model); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--getIamPolicy', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.models; - final arg_resource = 'foo'; - final arg_options_requestedPolicyVersion = 42; + final res = + api.AiplatformApi(mock).projects.locations.notebookExecutionJobs; + final arg_name = 'foo'; + final arg_view = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -57358,8 +65383,8 @@ void main() { } } unittest.expect( - core.int.parse(queryMap['options.requestedPolicyVersion']!.first), - unittest.equals(arg_options_requestedPolicyVersion), + queryMap['view']!.first, + unittest.equals(arg_view), ); unittest.expect( queryMap['fields']!.first, @@ -57369,24 +65394,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleIamV1Policy()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1NotebookExecutionJob()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.getIamPolicy(arg_resource, - options_requestedPolicyVersion: arg_options_requestedPolicyVersion, - $fields: arg_$fields); - checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); + final response = + await res.get(arg_name, view: arg_view, $fields: arg_$fields); + checkGoogleCloudAiplatformV1NotebookExecutionJob( + response as api.GoogleCloudAiplatformV1NotebookExecutionJob); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.models; + final res = + api.AiplatformApi(mock).projects.locations.notebookExecutionJobs; final arg_parent = 'foo'; final arg_filter = 'foo'; final arg_orderBy = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; - final arg_readMask = 'foo'; + final arg_view = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -57437,8 +65464,8 @@ void main() { unittest.equals(arg_pageToken), ); unittest.expect( - queryMap['readMask']!.first, - unittest.equals(arg_readMask), + queryMap['view']!.first, + unittest.equals(arg_view), ); unittest.expect( queryMap['fields']!.first, @@ -57448,8 +65475,8 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListModelsResponse()); + final resp = convert.json.encode( + buildGoogleCloudAiplatformV1ListNotebookExecutionJobsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.list(arg_parent, @@ -57457,21 +65484,23 @@ void main() { orderBy: arg_orderBy, pageSize: arg_pageSize, pageToken: arg_pageToken, - readMask: arg_readMask, + view: arg_view, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListModelsResponse( - response as api.GoogleCloudAiplatformV1ListModelsResponse); + checkGoogleCloudAiplatformV1ListNotebookExecutionJobsResponse(response + as api.GoogleCloudAiplatformV1ListNotebookExecutionJobsResponse); }); + }); - unittest.test('method--listVersions', () async { + unittest.group( + 'resource-ProjectsLocationsNotebookExecutionJobsOperationsResource', () { + unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.models; + final res = api.AiplatformApi(mock) + .projects + .locations + .notebookExecutionJobs + .operations; final arg_name = 'foo'; - final arg_filter = 'foo'; - final arg_orderBy = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; - final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -57505,26 +65534,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['readMask']!.first, - unittest.equals(arg_readMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -57533,34 +65542,23 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListModelVersionsResponse()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.listVersions(arg_name, - filter: arg_filter, - orderBy: arg_orderBy, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - readMask: arg_readMask, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListModelVersionsResponse( - response as api.GoogleCloudAiplatformV1ListModelVersionsResponse); + final response = await res.cancel(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--mergeVersionAliases', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.models; - final arg_request = - buildGoogleCloudAiplatformV1MergeVersionAliasesRequest(); + final res = api.AiplatformApi(mock) + .projects + .locations + .notebookExecutionJobs + .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1MergeVersionAliasesRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1MergeVersionAliasesRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -57600,27 +65598,23 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleCloudAiplatformV1Model()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.mergeVersionAliases(arg_request, arg_name, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1Model( - response as api.GoogleCloudAiplatformV1Model); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--patch', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.models; - final arg_request = buildGoogleCloudAiplatformV1Model(); + final res = api.AiplatformApi(mock) + .projects + .locations + .notebookExecutionJobs + .operations; final arg_name = 'foo'; - final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1Model.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1Model(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -57652,10 +65646,6 @@ void main() { ); } } - unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -57664,26 +65654,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleCloudAiplatformV1Model()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch(arg_request, arg_name, - updateMask: arg_updateMask, $fields: arg_$fields); - checkGoogleCloudAiplatformV1Model( - response as api.GoogleCloudAiplatformV1Model); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--setIamPolicy', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.models; - final arg_request = buildGoogleIamV1SetIamPolicyRequest(); - final arg_resource = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .notebookExecutionJobs + .operations; + final arg_name = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleIamV1SetIamPolicyRequest.fromJson( - json as core.Map); - checkGoogleIamV1SetIamPolicyRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -57715,6 +65706,18 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -57723,19 +65726,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleIamV1Policy()); + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setIamPolicy(arg_request, arg_resource, + final response = await res.list(arg_name, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, $fields: arg_$fields); - checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); - unittest.test('method--testIamPermissions', () async { + unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.models; - final arg_resource = 'foo'; - final arg_permissions = buildUnnamed392(); + final res = api.AiplatformApi(mock) + .projects + .locations + .notebookExecutionJobs + .operations; + final arg_name = 'foo'; + final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -57770,8 +65782,8 @@ void main() { } } unittest.expect( - queryMap['permissions']!, - unittest.equals(arg_permissions), + queryMap['timeout']!.first, + unittest.equals(arg_timeout), ); unittest.expect( queryMap['fields']!.first, @@ -57781,28 +65793,30 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleIamV1TestIamPermissionsResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.testIamPermissions(arg_resource, - permissions: arg_permissions, $fields: arg_$fields); - checkGoogleIamV1TestIamPermissionsResponse( - response as api.GoogleIamV1TestIamPermissionsResponse); + final response = + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); + }); - unittest.test('method--updateExplanationDataset', () async { + unittest.group('resource-ProjectsLocationsNotebookRuntimeTemplatesResource', + () { + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.models; - final arg_request = - buildGoogleCloudAiplatformV1UpdateExplanationDatasetRequest(); - final arg_model = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.notebookRuntimeTemplates; + final arg_request = buildGoogleCloudAiplatformV1NotebookRuntimeTemplate(); + final arg_parent = 'foo'; + final arg_notebookRuntimeTemplateId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1UpdateExplanationDatasetRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1UpdateExplanationDatasetRequest(obj); + final obj = api.GoogleCloudAiplatformV1NotebookRuntimeTemplate.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1NotebookRuntimeTemplate(obj); final path = req.url.path; var pathOffset = 0; @@ -57835,6 +65849,10 @@ void main() { ); } } + unittest.expect( + queryMap['notebookRuntimeTemplateId']!.first, + unittest.equals(arg_notebookRuntimeTemplateId), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -57846,24 +65864,20 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.updateExplanationDataset( - arg_request, arg_model, + final response = await res.create(arg_request, arg_parent, + notebookRuntimeTemplateId: arg_notebookRuntimeTemplateId, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--upload', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.models; - final arg_request = buildGoogleCloudAiplatformV1UploadModelRequest(); - final arg_parent = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.notebookRuntimeTemplates; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1UploadModelRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1UploadModelRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -57906,17 +65920,15 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.upload(arg_request, arg_parent, $fields: arg_$fields); + final response = await res.delete(arg_name, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - }); - unittest.group('resource-ProjectsLocationsModelsEvaluationsResource', () { unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.models.evaluations; + final res = + api.AiplatformApi(mock).projects.locations.notebookRuntimeTemplates; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -57959,28 +65971,23 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1ModelEvaluation()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1NotebookRuntimeTemplate()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ModelEvaluation( - response as api.GoogleCloudAiplatformV1ModelEvaluation); + checkGoogleCloudAiplatformV1NotebookRuntimeTemplate( + response as api.GoogleCloudAiplatformV1NotebookRuntimeTemplate); }); - unittest.test('method--import', () async { + unittest.test('method--getIamPolicy', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.models.evaluations; - final arg_request = - buildGoogleCloudAiplatformV1ImportModelEvaluationRequest(); - final arg_parent = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.notebookRuntimeTemplates; + final arg_resource = 'foo'; + final arg_options_requestedPolicyVersion = 42; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1ImportModelEvaluationRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1ImportModelEvaluationRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -58012,6 +66019,10 @@ void main() { ); } } + unittest.expect( + core.int.parse(queryMap['options.requestedPolicyVersion']!.first), + unittest.equals(arg_options_requestedPolicyVersion), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -58020,21 +66031,22 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1ModelEvaluation()); + final resp = convert.json.encode(buildGoogleIamV1Policy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.import(arg_request, arg_parent, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ModelEvaluation( - response as api.GoogleCloudAiplatformV1ModelEvaluation); + final response = await res.getIamPolicy(arg_resource, + options_requestedPolicyVersion: arg_options_requestedPolicyVersion, + $fields: arg_$fields); + checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.models.evaluations; + final res = + api.AiplatformApi(mock).projects.locations.notebookRuntimeTemplates; final arg_parent = 'foo'; final arg_filter = 'foo'; + final arg_orderBy = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; final arg_readMask = 'foo'; @@ -58075,6 +66087,10 @@ void main() { queryMap['filter']!.first, unittest.equals(arg_filter), ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); unittest.expect( core.int.parse(queryMap['pageSize']!.first), unittest.equals(arg_pageSize), @@ -58095,34 +66111,34 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListModelEvaluationsResponse()); + final resp = convert.json.encode( + buildGoogleCloudAiplatformV1ListNotebookRuntimeTemplatesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.list(arg_parent, filter: arg_filter, + orderBy: arg_orderBy, pageSize: arg_pageSize, pageToken: arg_pageToken, readMask: arg_readMask, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListModelEvaluationsResponse( - response as api.GoogleCloudAiplatformV1ListModelEvaluationsResponse); + checkGoogleCloudAiplatformV1ListNotebookRuntimeTemplatesResponse(response + as api.GoogleCloudAiplatformV1ListNotebookRuntimeTemplatesResponse); }); - }); - unittest.group( - 'resource-ProjectsLocationsModelsEvaluationsOperationsResource', () { - unittest.test('method--cancel', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .models - .evaluations - .operations; + final res = + api.AiplatformApi(mock).projects.locations.notebookRuntimeTemplates; + final arg_request = buildGoogleCloudAiplatformV1NotebookRuntimeTemplate(); final arg_name = 'foo'; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1NotebookRuntimeTemplate.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1NotebookRuntimeTemplate(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -58155,62 +66171,9 @@ void main() { } } unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.cancel(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); - }); - - unittest.test('method--delete', () async { - final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .models - .evaluations - .operations; - final arg_name = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -58219,24 +66182,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1NotebookRuntimeTemplate()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); + checkGoogleCloudAiplatformV1NotebookRuntimeTemplate( + response as api.GoogleCloudAiplatformV1NotebookRuntimeTemplate); }); - unittest.test('method--get', () async { + unittest.test('method--setIamPolicy', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .models - .evaluations - .operations; - final arg_name = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.notebookRuntimeTemplates; + final arg_request = buildGoogleIamV1SetIamPolicyRequest(); + final arg_resource = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleIamV1SetIamPolicyRequest.fromJson( + json as core.Map); + checkGoogleIamV1SetIamPolicyRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -58276,26 +66243,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleIamV1Policy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.setIamPolicy(arg_request, arg_resource, + $fields: arg_$fields); + checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); }); - unittest.test('method--list', () async { + unittest.test('method--testIamPermissions', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .models - .evaluations - .operations; - final arg_name = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.notebookRuntimeTemplates; + final arg_resource = 'foo'; + final arg_permissions = buildUnnamed433(); final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -58330,16 +66291,8 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), + queryMap['permissions']!, + unittest.equals(arg_permissions), ); unittest.expect( queryMap['fields']!.first, @@ -58350,28 +66303,27 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + convert.json.encode(buildGoogleIamV1TestIamPermissionsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_name, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + final response = await res.testIamPermissions(arg_resource, + permissions: arg_permissions, $fields: arg_$fields); + checkGoogleIamV1TestIamPermissionsResponse( + response as api.GoogleIamV1TestIamPermissionsResponse); }); + }); - unittest.test('method--wait', () async { + unittest.group( + 'resource-ProjectsLocationsNotebookRuntimeTemplatesOperationsResource', + () { + unittest.test('method--cancel', () async { final mock = HttpServerMock(); final res = api.AiplatformApi(mock) .projects .locations - .models - .evaluations + .notebookRuntimeTemplates .operations; final arg_name = 'foo'; - final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -58405,10 +66357,6 @@ void main() { ); } } - unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -58417,32 +66365,23 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.cancel(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - }); - unittest.group('resource-ProjectsLocationsModelsEvaluationsSlicesResource', - () { - unittest.test('method--batchImport', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.models.evaluations.slices; - final arg_request = - buildGoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsRequest(); - final arg_parent = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .notebookRuntimeTemplates + .operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsRequest - .fromJson(json as core.Map); - checkGoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -58482,21 +66421,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode( - buildGoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsResponse()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.batchImport(arg_request, arg_parent, $fields: arg_$fields); - checkGoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsResponse( - response as api - .GoogleCloudAiplatformV1BatchImportEvaluatedAnnotationsResponse); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.models.evaluations.slices; + final res = api.AiplatformApi(mock) + .projects + .locations + .notebookRuntimeTemplates + .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -58539,24 +66477,25 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ModelEvaluationSlice()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ModelEvaluationSlice( - response as api.GoogleCloudAiplatformV1ModelEvaluationSlice); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.models.evaluations.slices; - final arg_parent = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .notebookRuntimeTemplates + .operations; + final arg_name = 'foo'; final arg_filter = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; - final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -58602,10 +66541,6 @@ void main() { queryMap['pageToken']!.first, unittest.equals(arg_pageToken), ); - unittest.expect( - queryMap['readMask']!.first, - unittest.equals(arg_readMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -58614,26 +66549,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode( - buildGoogleCloudAiplatformV1ListModelEvaluationSlicesResponse()); + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, + final response = await res.list(arg_name, filter: arg_filter, pageSize: arg_pageSize, pageToken: arg_pageToken, - readMask: arg_readMask, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListModelEvaluationSlicesResponse(response - as api.GoogleCloudAiplatformV1ListModelEvaluationSlicesResponse); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); - }); - unittest.group('resource-ProjectsLocationsModelsOperationsResource', () { - unittest.test('method--cancel', () async { + unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.models.operations; + final res = api.AiplatformApi(mock) + .projects + .locations + .notebookRuntimeTemplates + .operations; final arg_name = 'foo'; + final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -58667,6 +66604,10 @@ void main() { ); } } + unittest.expect( + queryMap['timeout']!.first, + unittest.equals(arg_timeout), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -58675,16 +66616,81 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.cancel(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + }); + + unittest.group('resource-ProjectsLocationsNotebookRuntimesResource', () { + unittest.test('method--assign', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock).projects.locations.notebookRuntimes; + final arg_request = + buildGoogleCloudAiplatformV1AssignNotebookRuntimeRequest(); + final arg_parent = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1AssignNotebookRuntimeRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1AssignNotebookRuntimeRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.assign(arg_request, arg_parent, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.models.operations; + final res = api.AiplatformApi(mock).projects.locations.notebookRuntimes; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -58727,16 +66733,17 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.models.operations; + final res = api.AiplatformApi(mock).projects.locations.notebookRuntimes; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -58779,21 +66786,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1NotebookRuntime()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + checkGoogleCloudAiplatformV1NotebookRuntime( + response as api.GoogleCloudAiplatformV1NotebookRuntime); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.models.operations; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.notebookRuntimes; + final arg_parent = 'foo'; final arg_filter = 'foo'; + final arg_orderBy = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; + final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -58831,6 +66841,10 @@ void main() { queryMap['filter']!.first, unittest.equals(arg_filter), ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); unittest.expect( core.int.parse(queryMap['pageSize']!.first), unittest.equals(arg_pageSize), @@ -58839,6 +66853,10 @@ void main() { queryMap['pageToken']!.first, unittest.equals(arg_pageToken), ); + unittest.expect( + queryMap['readMask']!.first, + unittest.equals(arg_readMask), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -58847,26 +66865,34 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListNotebookRuntimesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_name, + final response = await res.list(arg_parent, filter: arg_filter, + orderBy: arg_orderBy, pageSize: arg_pageSize, pageToken: arg_pageToken, + readMask: arg_readMask, $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + checkGoogleCloudAiplatformV1ListNotebookRuntimesResponse( + response as api.GoogleCloudAiplatformV1ListNotebookRuntimesResponse); }); - unittest.test('method--wait', () async { + unittest.test('method--start', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.models.operations; + final res = api.AiplatformApi(mock).projects.locations.notebookRuntimes; + final arg_request = + buildGoogleCloudAiplatformV1StartNotebookRuntimeRequest(); final arg_name = 'foo'; - final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1StartNotebookRuntimeRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1StartNotebookRuntimeRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -58898,10 +66924,6 @@ void main() { ); } } - unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -58914,23 +66936,23 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + await res.start(arg_request, arg_name, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - }); - unittest.group('resource-ProjectsLocationsNasJobsResource', () { - unittest.test('method--cancel', () async { + unittest.test('method--stop', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.nasJobs; - final arg_request = buildGoogleCloudAiplatformV1CancelNasJobRequest(); + final res = api.AiplatformApi(mock).projects.locations.notebookRuntimes; + final arg_request = + buildGoogleCloudAiplatformV1StopNotebookRuntimeRequest(); final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1CancelNasJobRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1CancelNasJobRequest(obj); + final obj = + api.GoogleCloudAiplatformV1StopNotebookRuntimeRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1StopNotebookRuntimeRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -58971,24 +66993,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = - await res.cancel(arg_request, arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + await res.stop(arg_request, arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--create', () async { + unittest.test('method--upgrade', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.nasJobs; - final arg_request = buildGoogleCloudAiplatformV1NasJob(); - final arg_parent = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.notebookRuntimes; + final arg_request = + buildGoogleCloudAiplatformV1UpgradeNotebookRuntimeRequest(); + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1NasJob.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1NasJob(obj); + final obj = + api.GoogleCloudAiplatformV1UpgradeNotebookRuntimeRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1UpgradeNotebookRuntimeRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -59029,18 +67054,81 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleCloudAiplatformV1NasJob()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = - await res.create(arg_request, arg_parent, $fields: arg_$fields); - checkGoogleCloudAiplatformV1NasJob( - response as api.GoogleCloudAiplatformV1NasJob); + await res.upgrade(arg_request, arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + }); + + unittest.group('resource-ProjectsLocationsNotebookRuntimesOperationsResource', + () { + unittest.test('method--cancel', () async { + final mock = HttpServerMock(); + final res = api.AiplatformApi(mock) + .projects + .locations + .notebookRuntimes + .operations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleProtobufEmpty()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.cancel(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.nasJobs; + final res = api.AiplatformApi(mock) + .projects + .locations + .notebookRuntimes + .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -59083,17 +67171,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.nasJobs; + final res = api.AiplatformApi(mock) + .projects + .locations + .notebookRuntimes + .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -59136,22 +67227,25 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleCloudAiplatformV1NasJob()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1NasJob( - response as api.GoogleCloudAiplatformV1NasJob); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.nasJobs; - final arg_parent = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .notebookRuntimes + .operations; + final arg_name = 'foo'; final arg_filter = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; - final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -59197,10 +67291,6 @@ void main() { queryMap['pageToken']!.first, unittest.equals(arg_pageToken), ); - unittest.expect( - queryMap['readMask']!.first, - unittest.equals(arg_readMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -59209,28 +67299,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListNasJobsResponse()); + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, + final response = await res.list(arg_name, filter: arg_filter, pageSize: arg_pageSize, pageToken: arg_pageToken, - readMask: arg_readMask, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListNasJobsResponse( - response as api.GoogleCloudAiplatformV1ListNasJobsResponse); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); - }); - unittest.group('resource-ProjectsLocationsNasJobsNasTrialDetailsResource', - () { - unittest.test('method--get', () async { + unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.nasJobs.nasTrialDetails; + final res = api.AiplatformApi(mock) + .projects + .locations + .notebookRuntimes + .operations; final arg_name = 'foo'; + final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -59264,6 +67354,10 @@ void main() { ); } } + unittest.expect( + queryMap['timeout']!.first, + unittest.equals(arg_timeout), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -59272,22 +67366,21 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1NasTrialDetail()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1NasTrialDetail( - response as api.GoogleCloudAiplatformV1NasTrialDetail); + final response = + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); + }); - unittest.test('method--list', () async { + unittest.group('resource-ProjectsLocationsOperationsResource', () { + unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.nasJobs.nasTrialDetails; - final arg_parent = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -59321,14 +67414,6 @@ void main() { ); } } - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -59337,33 +67422,19 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListNasTrialDetailsResponse()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListNasTrialDetailsResponse( - response as api.GoogleCloudAiplatformV1ListNasTrialDetailsResponse); + final response = await res.cancel(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - }); - unittest.group('resource-ProjectsLocationsNotebookExecutionJobsResource', () { - unittest.test('method--create', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.notebookExecutionJobs; - final arg_request = buildGoogleCloudAiplatformV1NotebookExecutionJob(); - final arg_parent = 'foo'; - final arg_notebookExecutionJobId = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1NotebookExecutionJob.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1NotebookExecutionJob(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -59395,10 +67466,6 @@ void main() { ); } } - unittest.expect( - queryMap['notebookExecutionJobId']!.first, - unittest.equals(arg_notebookExecutionJobId), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -59407,20 +67474,16 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.create(arg_request, arg_parent, - notebookExecutionJobId: arg_notebookExecutionJobId, - $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--delete', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.notebookExecutionJobs; + final res = api.AiplatformApi(mock).projects.locations.operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -59466,17 +67529,18 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); + final response = await res.get(arg_name, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--get', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.notebookExecutionJobs; + final res = api.AiplatformApi(mock).projects.locations.operations; final arg_name = 'foo'; - final arg_view = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -59511,8 +67575,16 @@ void main() { } } unittest.expect( - queryMap['view']!.first, - unittest.equals(arg_view), + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), ); unittest.expect( queryMap['fields']!.first, @@ -59522,26 +67594,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1NotebookExecutionJob()); + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.get(arg_name, view: arg_view, $fields: arg_$fields); - checkGoogleCloudAiplatformV1NotebookExecutionJob( - response as api.GoogleCloudAiplatformV1NotebookExecutionJob); + final response = await res.list(arg_name, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); - unittest.test('method--list', () async { + unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.notebookExecutionJobs; - final arg_parent = 'foo'; - final arg_filter = 'foo'; - final arg_orderBy = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; - final arg_view = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.operations; + final arg_name = 'foo'; + final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -59576,24 +67646,8 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['view']!.first, - unittest.equals(arg_view), + queryMap['timeout']!.first, + unittest.equals(arg_timeout), ); unittest.expect( queryMap['fields']!.first, @@ -59603,34 +67657,30 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode( - buildGoogleCloudAiplatformV1ListNotebookExecutionJobsResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - filter: arg_filter, - orderBy: arg_orderBy, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - view: arg_view, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListNotebookExecutionJobsResponse(response - as api.GoogleCloudAiplatformV1ListNotebookExecutionJobsResponse); + final response = + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); }); - unittest.group( - 'resource-ProjectsLocationsNotebookExecutionJobsOperationsResource', () { - unittest.test('method--cancel', () async { + unittest.group('resource-ProjectsLocationsPersistentResourcesResource', () { + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .notebookExecutionJobs - .operations; - final arg_name = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.persistentResources; + final arg_request = buildGoogleCloudAiplatformV1PersistentResource(); + final arg_parent = 'foo'; + final arg_persistentResourceId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1PersistentResource.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1PersistentResource(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -59662,6 +67712,10 @@ void main() { ); } } + unittest.expect( + queryMap['persistentResourceId']!.first, + unittest.equals(arg_persistentResourceId), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -59670,20 +67724,19 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.cancel(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = await res.create(arg_request, arg_parent, + persistentResourceId: arg_persistentResourceId, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .notebookExecutionJobs - .operations; + final res = + api.AiplatformApi(mock).projects.locations.persistentResources; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -59726,20 +67779,18 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .notebookExecutionJobs - .operations; + final res = + api.AiplatformApi(mock).projects.locations.persistentResources; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -59782,23 +67833,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1PersistentResource()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + checkGoogleCloudAiplatformV1PersistentResource( + response as api.GoogleCloudAiplatformV1PersistentResource); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .notebookExecutionJobs - .operations; - final arg_name = 'foo'; - final arg_filter = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.persistentResources; + final arg_parent = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; final arg_$fields = 'foo'; @@ -59834,10 +67882,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); unittest.expect( core.int.parse(queryMap['pageSize']!.first), unittest.equals(arg_pageSize), @@ -59854,30 +67898,31 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + final resp = convert.json.encode( + buildGoogleCloudAiplatformV1ListPersistentResourcesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_name, - filter: arg_filter, + final response = await res.list(arg_parent, pageSize: arg_pageSize, pageToken: arg_pageToken, $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + checkGoogleCloudAiplatformV1ListPersistentResourcesResponse(response + as api.GoogleCloudAiplatformV1ListPersistentResourcesResponse); }); - unittest.test('method--wait', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .notebookExecutionJobs - .operations; + final res = + api.AiplatformApi(mock).projects.locations.persistentResources; + final arg_request = buildGoogleCloudAiplatformV1PersistentResource(); final arg_name = 'foo'; - final arg_timeout = 'foo'; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1PersistentResource.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1PersistentResource(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -59910,8 +67955,8 @@ void main() { } } unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), ); unittest.expect( queryMap['fields']!.first, @@ -59924,27 +67969,25 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - }); - unittest.group('resource-ProjectsLocationsNotebookRuntimeTemplatesResource', - () { - unittest.test('method--create', () async { + unittest.test('method--reboot', () async { final mock = HttpServerMock(); final res = - api.AiplatformApi(mock).projects.locations.notebookRuntimeTemplates; - final arg_request = buildGoogleCloudAiplatformV1NotebookRuntimeTemplate(); - final arg_parent = 'foo'; - final arg_notebookRuntimeTemplateId = 'foo'; + api.AiplatformApi(mock).projects.locations.persistentResources; + final arg_request = + buildGoogleCloudAiplatformV1RebootPersistentResourceRequest(); + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1NotebookRuntimeTemplate.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1NotebookRuntimeTemplate(obj); + final obj = + api.GoogleCloudAiplatformV1RebootPersistentResourceRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1RebootPersistentResourceRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -59977,10 +68020,6 @@ void main() { ); } } - unittest.expect( - queryMap['notebookRuntimeTemplateId']!.first, - unittest.equals(arg_notebookRuntimeTemplateId), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -59992,17 +68031,22 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.create(arg_request, arg_parent, - notebookRuntimeTemplateId: arg_notebookRuntimeTemplateId, - $fields: arg_$fields); + final response = + await res.reboot(arg_request, arg_name, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); + }); - unittest.test('method--delete', () async { + unittest.group( + 'resource-ProjectsLocationsPersistentResourcesOperationsResource', () { + unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.notebookRuntimeTemplates; + final res = api.AiplatformApi(mock) + .projects + .locations + .persistentResources + .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -60045,18 +68089,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.cancel(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--get', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.notebookRuntimeTemplates; + final res = api.AiplatformApi(mock) + .projects + .locations + .persistentResources + .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -60099,21 +68145,21 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1NotebookRuntimeTemplate()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1NotebookRuntimeTemplate( - response as api.GoogleCloudAiplatformV1NotebookRuntimeTemplate); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--getIamPolicy', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.notebookRuntimeTemplates; - final arg_resource = 'foo'; - final arg_options_requestedPolicyVersion = 42; + final res = api.AiplatformApi(mock) + .projects + .locations + .persistentResources + .operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -60147,10 +68193,6 @@ void main() { ); } } - unittest.expect( - core.int.parse(queryMap['options.requestedPolicyVersion']!.first), - unittest.equals(arg_options_requestedPolicyVersion), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -60159,25 +68201,25 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleIamV1Policy()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.getIamPolicy(arg_resource, - options_requestedPolicyVersion: arg_options_requestedPolicyVersion, - $fields: arg_$fields); - checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.notebookRuntimeTemplates; - final arg_parent = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .persistentResources + .operations; + final arg_name = 'foo'; final arg_filter = 'foo'; - final arg_orderBy = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; - final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -60215,10 +68257,6 @@ void main() { queryMap['filter']!.first, unittest.equals(arg_filter), ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); unittest.expect( core.int.parse(queryMap['pageSize']!.first), unittest.equals(arg_pageSize), @@ -60227,10 +68265,6 @@ void main() { queryMap['pageToken']!.first, unittest.equals(arg_pageToken), ); - unittest.expect( - queryMap['readMask']!.first, - unittest.equals(arg_readMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -60239,34 +68273,30 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode( - buildGoogleCloudAiplatformV1ListNotebookRuntimeTemplatesResponse()); + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, + final response = await res.list(arg_name, filter: arg_filter, - orderBy: arg_orderBy, pageSize: arg_pageSize, pageToken: arg_pageToken, - readMask: arg_readMask, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListNotebookRuntimeTemplatesResponse(response - as api.GoogleCloudAiplatformV1ListNotebookRuntimeTemplatesResponse); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); - unittest.test('method--patch', () async { + unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.notebookRuntimeTemplates; - final arg_request = buildGoogleCloudAiplatformV1NotebookRuntimeTemplate(); + final res = api.AiplatformApi(mock) + .projects + .locations + .persistentResources + .operations; final arg_name = 'foo'; - final arg_updateMask = 'foo'; + final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1NotebookRuntimeTemplate.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1NotebookRuntimeTemplate(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -60299,8 +68329,8 @@ void main() { } } unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), + queryMap['timeout']!.first, + unittest.equals(arg_timeout), ); unittest.expect( queryMap['fields']!.first, @@ -60310,27 +68340,29 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1NotebookRuntimeTemplate()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch(arg_request, arg_name, - updateMask: arg_updateMask, $fields: arg_$fields); - checkGoogleCloudAiplatformV1NotebookRuntimeTemplate( - response as api.GoogleCloudAiplatformV1NotebookRuntimeTemplate); + final response = + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); + }); - unittest.test('method--setIamPolicy', () async { + unittest.group('resource-ProjectsLocationsPipelineJobsResource', () { + unittest.test('method--batchCancel', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.notebookRuntimeTemplates; - final arg_request = buildGoogleIamV1SetIamPolicyRequest(); - final arg_resource = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.pipelineJobs; + final arg_request = + buildGoogleCloudAiplatformV1BatchCancelPipelineJobsRequest(); + final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleIamV1SetIamPolicyRequest.fromJson( - json as core.Map); - checkGoogleIamV1SetIamPolicyRequest(obj); + final obj = + api.GoogleCloudAiplatformV1BatchCancelPipelineJobsRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1BatchCancelPipelineJobsRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -60371,22 +68403,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleIamV1Policy()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setIamPolicy(arg_request, arg_resource, - $fields: arg_$fields); - checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); + final response = + await res.batchCancel(arg_request, arg_parent, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--testIamPermissions', () async { + unittest.test('method--batchDelete', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.notebookRuntimeTemplates; - final arg_resource = 'foo'; - final arg_permissions = buildUnnamed393(); + final res = api.AiplatformApi(mock).projects.locations.pipelineJobs; + final arg_request = + buildGoogleCloudAiplatformV1BatchDeletePipelineJobsRequest(); + final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1BatchDeletePipelineJobsRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1BatchDeletePipelineJobsRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -60418,10 +68456,6 @@ void main() { ); } } - unittest.expect( - queryMap['permissions']!, - unittest.equals(arg_permissions), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -60430,30 +68464,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleIamV1TestIamPermissionsResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.testIamPermissions(arg_resource, - permissions: arg_permissions, $fields: arg_$fields); - checkGoogleIamV1TestIamPermissionsResponse( - response as api.GoogleIamV1TestIamPermissionsResponse); + final response = + await res.batchDelete(arg_request, arg_parent, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - }); - unittest.group( - 'resource-ProjectsLocationsNotebookRuntimeTemplatesOperationsResource', - () { unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .notebookRuntimeTemplates - .operations; + final res = api.AiplatformApi(mock).projects.locations.pipelineJobs; + final arg_request = + buildGoogleCloudAiplatformV1CancelPipelineJobRequest(); final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1CancelPipelineJobRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1CancelPipelineJobRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -60496,20 +68528,23 @@ void main() { final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.cancel(arg_name, $fields: arg_$fields); + final response = + await res.cancel(arg_request, arg_name, $fields: arg_$fields); checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--delete', () async { + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .notebookRuntimeTemplates - .operations; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.pipelineJobs; + final arg_request = buildGoogleCloudAiplatformV1PipelineJob(); + final arg_parent = 'foo'; + final arg_pipelineJobId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1PipelineJob.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1PipelineJob(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -60541,6 +68576,10 @@ void main() { ); } } + unittest.expect( + queryMap['pipelineJobId']!.first, + unittest.equals(arg_pipelineJobId), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -60549,20 +68588,19 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1PipelineJob()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = await res.create(arg_request, arg_parent, + pipelineJobId: arg_pipelineJobId, $fields: arg_$fields); + checkGoogleCloudAiplatformV1PipelineJob( + response as api.GoogleCloudAiplatformV1PipelineJob); }); - unittest.test('method--get', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .notebookRuntimeTemplates - .operations; + final res = api.AiplatformApi(mock).projects.locations.pipelineJobs; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -60608,22 +68646,15 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); + final response = await res.delete(arg_name, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--list', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .notebookRuntimeTemplates - .operations; + final res = api.AiplatformApi(mock).projects.locations.pipelineJobs; final arg_name = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -60657,18 +68688,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -60678,27 +68697,23 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + convert.json.encode(buildGoogleCloudAiplatformV1PipelineJob()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_name, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleCloudAiplatformV1PipelineJob( + response as api.GoogleCloudAiplatformV1PipelineJob); }); - unittest.test('method--wait', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .notebookRuntimeTemplates - .operations; - final arg_name = 'foo'; - final arg_timeout = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.pipelineJobs; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -60733,8 +68748,24 @@ void main() { } } unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['readMask']!.first, + unittest.equals(arg_readMask), ); unittest.expect( queryMap['fields']!.first, @@ -60744,30 +68775,31 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListPipelineJobsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.list(arg_parent, + filter: arg_filter, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + readMask: arg_readMask, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ListPipelineJobsResponse( + response as api.GoogleCloudAiplatformV1ListPipelineJobsResponse); }); }); - unittest.group('resource-ProjectsLocationsNotebookRuntimesResource', () { - unittest.test('method--assign', () async { + unittest.group('resource-ProjectsLocationsPipelineJobsOperationsResource', + () { + unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.notebookRuntimes; - final arg_request = - buildGoogleCloudAiplatformV1AssignNotebookRuntimeRequest(); - final arg_parent = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.pipelineJobs.operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1AssignNotebookRuntimeRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1AssignNotebookRuntimeRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -60807,18 +68839,17 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.assign(arg_request, arg_parent, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.cancel(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.notebookRuntimes; + final res = + api.AiplatformApi(mock).projects.locations.pipelineJobs.operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -60861,17 +68892,17 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.notebookRuntimes; + final res = + api.AiplatformApi(mock).projects.locations.pipelineJobs.operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -60914,24 +68945,22 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1NotebookRuntime()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1NotebookRuntime( - response as api.GoogleCloudAiplatformV1NotebookRuntime); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.notebookRuntimes; - final arg_parent = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.pipelineJobs.operations; + final arg_name = 'foo'; final arg_filter = 'foo'; - final arg_orderBy = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; - final arg_readMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -60969,10 +68998,6 @@ void main() { queryMap['filter']!.first, unittest.equals(arg_filter), ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); unittest.expect( core.int.parse(queryMap['pageSize']!.first), unittest.equals(arg_pageSize), @@ -60981,10 +69006,6 @@ void main() { queryMap['pageToken']!.first, unittest.equals(arg_pageToken), ); - unittest.expect( - queryMap['readMask']!.first, - unittest.equals(arg_readMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -60993,34 +69014,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListNotebookRuntimesResponse()); + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, + final response = await res.list(arg_name, filter: arg_filter, - orderBy: arg_orderBy, pageSize: arg_pageSize, pageToken: arg_pageToken, - readMask: arg_readMask, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListNotebookRuntimesResponse( - response as api.GoogleCloudAiplatformV1ListNotebookRuntimesResponse); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); - unittest.test('method--start', () async { + unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.notebookRuntimes; - final arg_request = - buildGoogleCloudAiplatformV1StartNotebookRuntimeRequest(); + final res = + api.AiplatformApi(mock).projects.locations.pipelineJobs.operations; final arg_name = 'foo'; + final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1StartNotebookRuntimeRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1StartNotebookRuntimeRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -61052,6 +69066,10 @@ void main() { ); } } + unittest.expect( + queryMap['timeout']!.first, + unittest.equals(arg_timeout), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -61064,23 +69082,23 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = - await res.start(arg_request, arg_name, $fields: arg_$fields); + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); + }); - unittest.test('method--upgrade', () async { + unittest.group('resource-ProjectsLocationsPublishersModelsResource', () { + unittest.test('method--computeTokens', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.notebookRuntimes; - final arg_request = - buildGoogleCloudAiplatformV1UpgradeNotebookRuntimeRequest(); - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.publishers.models; + final arg_request = buildGoogleCloudAiplatformV1ComputeTokensRequest(); + final arg_endpoint = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1UpgradeNotebookRuntimeRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1UpgradeNotebookRuntimeRequest(obj); + final obj = api.GoogleCloudAiplatformV1ComputeTokensRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1ComputeTokensRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -61121,28 +69139,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ComputeTokensResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.upgrade(arg_request, arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.computeTokens(arg_request, arg_endpoint, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ComputeTokensResponse( + response as api.GoogleCloudAiplatformV1ComputeTokensResponse); }); - }); - unittest.group('resource-ProjectsLocationsNotebookRuntimesOperationsResource', - () { - unittest.test('method--cancel', () async { + unittest.test('method--countTokens', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .notebookRuntimes - .operations; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.publishers.models; + final arg_request = buildGoogleCloudAiplatformV1CountTokensRequest(); + final arg_endpoint = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1CountTokensRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1CountTokensRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -61182,23 +69199,29 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1CountTokensResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.cancel(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = await res.countTokens(arg_request, arg_endpoint, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1CountTokensResponse( + response as api.GoogleCloudAiplatformV1CountTokensResponse); }); - unittest.test('method--delete', () async { + unittest.test('method--fetchPredictOperation', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .notebookRuntimes - .operations; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.publishers.models; + final arg_request = + buildGoogleCloudAiplatformV1FetchPredictOperationRequest(); + final arg_endpoint = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1FetchPredictOperationRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1FetchPredictOperationRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -61238,23 +69261,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = await res.fetchPredictOperation( + arg_request, arg_endpoint, + $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--get', () async { + unittest.test('method--generateContent', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .notebookRuntimes - .operations; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.publishers.models; + final arg_request = buildGoogleCloudAiplatformV1GenerateContentRequest(); + final arg_model = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1GenerateContentRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1GenerateContentRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -61294,27 +69321,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1GenerateContentResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.generateContent(arg_request, arg_model, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1GenerateContentResponse( + response as api.GoogleCloudAiplatformV1GenerateContentResponse); }); - unittest.test('method--list', () async { + unittest.test('method--predict', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .notebookRuntimes - .operations; - final arg_name = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.publishers.models; + final arg_request = buildGoogleCloudAiplatformV1PredictRequest(); + final arg_endpoint = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1PredictRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1PredictRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -61346,18 +69373,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -61367,29 +69382,28 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + convert.json.encode(buildGoogleCloudAiplatformV1PredictResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_name, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + final response = + await res.predict(arg_request, arg_endpoint, $fields: arg_$fields); + checkGoogleCloudAiplatformV1PredictResponse( + response as api.GoogleCloudAiplatformV1PredictResponse); }); - unittest.test('method--wait', () async { + unittest.test('method--predictLongRunning', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .notebookRuntimes - .operations; - final arg_name = 'foo'; - final arg_timeout = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.publishers.models; + final arg_request = + buildGoogleCloudAiplatformV1PredictLongRunningRequest(); + final arg_endpoint = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudAiplatformV1PredictLongRunningRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1PredictLongRunningRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -61421,10 +69435,6 @@ void main() { ); } } - unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -61436,20 +69446,23 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + final response = await res.predictLongRunning(arg_request, arg_endpoint, + $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - }); - unittest.group('resource-ProjectsLocationsOperationsResource', () { - unittest.test('method--cancel', () async { + unittest.test('method--rawPredict', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.operations; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.publishers.models; + final arg_request = buildGoogleCloudAiplatformV1RawPredictRequest(); + final arg_endpoint = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1RawPredictRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1RawPredictRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -61489,19 +69502,25 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleApiHttpBody()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.cancel(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = + await res.rawPredict(arg_request, arg_endpoint, $fields: arg_$fields); + checkGoogleApiHttpBody(response as api.GoogleApiHttpBody); }); - unittest.test('method--delete', () async { + unittest.test('method--serverStreamingPredict', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.operations; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.publishers.models; + final arg_request = buildGoogleCloudAiplatformV1StreamingPredictRequest(); + final arg_endpoint = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1StreamingPredictRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1StreamingPredictRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -61541,75 +69560,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1StreamingPredictResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = await res.serverStreamingPredict( + arg_request, arg_endpoint, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1StreamingPredictResponse( + response as api.GoogleCloudAiplatformV1StreamingPredictResponse); }); - unittest.test('method--get', () async { + unittest.test('method--streamGenerateContent', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.operations; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.publishers.models; + final arg_request = buildGoogleCloudAiplatformV1GenerateContentRequest(); + final arg_model = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); - }); + final obj = api.GoogleCloudAiplatformV1GenerateContentRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1GenerateContentRequest(obj); - unittest.test('method--list', () async { - final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.operations; - final arg_name = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; var pathOffset = 0; core.int index; @@ -61641,18 +69613,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -61661,26 +69621,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1GenerateContentResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_name, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, + final response = await res.streamGenerateContent(arg_request, arg_model, $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + checkGoogleCloudAiplatformV1GenerateContentResponse( + response as api.GoogleCloudAiplatformV1GenerateContentResponse); }); - unittest.test('method--wait', () async { + unittest.test('method--streamRawPredict', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.operations; - final arg_name = 'foo'; - final arg_timeout = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.publishers.models; + final arg_request = buildGoogleCloudAiplatformV1StreamRawPredictRequest(); + final arg_endpoint = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1StreamRawPredictRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1StreamRawPredictRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -61712,10 +69673,6 @@ void main() { ); } } - unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -61724,29 +69681,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleApiHttpBody()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.streamRawPredict(arg_request, arg_endpoint, + $fields: arg_$fields); + checkGoogleApiHttpBody(response as api.GoogleApiHttpBody); }); }); - unittest.group('resource-ProjectsLocationsPersistentResourcesResource', () { + unittest.group('resource-ProjectsLocationsRagCorporaResource', () { unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.persistentResources; - final arg_request = buildGoogleCloudAiplatformV1PersistentResource(); + final res = api.AiplatformApi(mock).projects.locations.ragCorpora; + final arg_request = buildGoogleCloudAiplatformV1RagCorpus(); final arg_parent = 'foo'; - final arg_persistentResourceId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1PersistentResource.fromJson( + final obj = api.GoogleCloudAiplatformV1RagCorpus.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1PersistentResource(obj); + checkGoogleCloudAiplatformV1RagCorpus(obj); final path = req.url.path; var pathOffset = 0; @@ -61779,10 +69733,6 @@ void main() { ); } } - unittest.expect( - queryMap['persistentResourceId']!.first, - unittest.equals(arg_persistentResourceId), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -61794,17 +69744,17 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.create(arg_request, arg_parent, - persistentResourceId: arg_persistentResourceId, $fields: arg_$fields); + final response = + await res.create(arg_request, arg_parent, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.persistentResources; + final res = api.AiplatformApi(mock).projects.locations.ragCorpora; final arg_name = 'foo'; + final arg_force = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -61838,6 +69788,10 @@ void main() { ); } } + unittest.expect( + queryMap['force']!.first, + unittest.equals('$arg_force'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -61849,15 +69803,15 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); + final response = + await res.delete(arg_name, force: arg_force, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.persistentResources; + final res = api.AiplatformApi(mock).projects.locations.ragCorpora; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -61900,19 +69854,18 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1PersistentResource()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1RagCorpus()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1PersistentResource( - response as api.GoogleCloudAiplatformV1PersistentResource); + checkGoogleCloudAiplatformV1RagCorpus( + response as api.GoogleCloudAiplatformV1RagCorpus); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.persistentResources; + final res = api.AiplatformApi(mock).projects.locations.ragCorpora; final arg_parent = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; @@ -61965,96 +69918,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode( - buildGoogleCloudAiplatformV1ListPersistentResourcesResponse()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListRagCorporaResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.list(arg_parent, pageSize: arg_pageSize, pageToken: arg_pageToken, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListPersistentResourcesResponse(response - as api.GoogleCloudAiplatformV1ListPersistentResourcesResponse); + checkGoogleCloudAiplatformV1ListRagCorporaResponse( + response as api.GoogleCloudAiplatformV1ListRagCorporaResponse); }); unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.persistentResources; - final arg_request = buildGoogleCloudAiplatformV1PersistentResource(); + final res = api.AiplatformApi(mock).projects.locations.ragCorpora; + final arg_request = buildGoogleCloudAiplatformV1RagCorpus(); final arg_name = 'foo'; - final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1PersistentResource.fromJson( + final obj = api.GoogleCloudAiplatformV1RagCorpus.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1PersistentResource(obj); - - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), - ); - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.patch(arg_request, arg_name, - updateMask: arg_updateMask, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); - }); - - unittest.test('method--reboot', () async { - final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.persistentResources; - final arg_request = - buildGoogleCloudAiplatformV1RebootPersistentResourceRequest(); - final arg_name = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1RebootPersistentResourceRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1RebootPersistentResourceRequest(obj); + checkGoogleCloudAiplatformV1RagCorpus(obj); final path = req.url.path; var pathOffset = 0; @@ -62099,21 +69984,17 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = - await res.reboot(arg_request, arg_name, $fields: arg_$fields); + await res.patch(arg_request, arg_name, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); }); - unittest.group( - 'resource-ProjectsLocationsPersistentResourcesOperationsResource', () { + unittest.group('resource-ProjectsLocationsRagCorporaOperationsResource', () { unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .persistentResources - .operations; + final res = + api.AiplatformApi(mock).projects.locations.ragCorpora.operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -62165,11 +70046,8 @@ void main() { unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .persistentResources - .operations; + final res = + api.AiplatformApi(mock).projects.locations.ragCorpora.operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -62221,11 +70099,8 @@ void main() { unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .persistentResources - .operations; + final res = + api.AiplatformApi(mock).projects.locations.ragCorpora.operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -62278,11 +70153,8 @@ void main() { unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .persistentResources - .operations; + final res = + api.AiplatformApi(mock).projects.locations.ragCorpora.operations; final arg_name = 'foo'; final arg_filter = 'foo'; final arg_pageSize = 42; @@ -62355,11 +70227,8 @@ void main() { unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock) - .projects - .locations - .persistentResources - .operations; + final res = + api.AiplatformApi(mock).projects.locations.ragCorpora.operations; final arg_name = 'foo'; final arg_timeout = 'foo'; final arg_$fields = 'foo'; @@ -62417,20 +70286,14 @@ void main() { }); }); - unittest.group('resource-ProjectsLocationsPipelineJobsResource', () { - unittest.test('method--batchCancel', () async { + unittest.group('resource-ProjectsLocationsRagCorporaRagFilesResource', () { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.pipelineJobs; - final arg_request = - buildGoogleCloudAiplatformV1BatchCancelPipelineJobsRequest(); - final arg_parent = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.ragCorpora.ragFiles; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1BatchCancelPipelineJobsRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1BatchCancelPipelineJobsRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -62473,25 +70336,18 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.batchCancel(arg_request, arg_parent, $fields: arg_$fields); + final response = await res.delete(arg_name, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--batchDelete', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.pipelineJobs; - final arg_request = - buildGoogleCloudAiplatformV1BatchDeletePipelineJobsRequest(); - final arg_parent = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.ragCorpora.ragFiles; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1BatchDeletePipelineJobsRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1BatchDeletePipelineJobsRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -62531,27 +70387,25 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleCloudAiplatformV1RagFile()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.batchDelete(arg_request, arg_parent, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleCloudAiplatformV1RagFile( + response as api.GoogleCloudAiplatformV1RagFile); }); - unittest.test('method--cancel', () async { + unittest.test('method--import', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.pipelineJobs; - final arg_request = - buildGoogleCloudAiplatformV1CancelPipelineJobRequest(); - final arg_name = 'foo'; + final res = + api.AiplatformApi(mock).projects.locations.ragCorpora.ragFiles; + final arg_request = buildGoogleCloudAiplatformV1ImportRagFilesRequest(); + final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudAiplatformV1CancelPipelineJobRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1CancelPipelineJobRequest(obj); + final obj = api.GoogleCloudAiplatformV1ImportRagFilesRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1ImportRagFilesRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -62592,26 +70446,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = - await res.cancel(arg_request, arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + await res.import(arg_request, arg_parent, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--create', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.pipelineJobs; - final arg_request = buildGoogleCloudAiplatformV1PipelineJob(); + final res = + api.AiplatformApi(mock).projects.locations.ragCorpora.ragFiles; final arg_parent = 'foo'; - final arg_pipelineJobId = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1PipelineJob.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1PipelineJob(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -62644,8 +70496,12 @@ void main() { } } unittest.expect( - queryMap['pipelineJobId']!.first, - unittest.equals(arg_pipelineJobId), + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), ); unittest.expect( queryMap['fields']!.first, @@ -62655,19 +70511,29 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1PipelineJob()); + final resp = convert.json + .encode(buildGoogleCloudAiplatformV1ListRagFilesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.create(arg_request, arg_parent, - pipelineJobId: arg_pipelineJobId, $fields: arg_$fields); - checkGoogleCloudAiplatformV1PipelineJob( - response as api.GoogleCloudAiplatformV1PipelineJob); + final response = await res.list(arg_parent, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkGoogleCloudAiplatformV1ListRagFilesResponse( + response as api.GoogleCloudAiplatformV1ListRagFilesResponse); }); + }); - unittest.test('method--delete', () async { + unittest.group( + 'resource-ProjectsLocationsRagCorporaRagFilesOperationsResource', () { + unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.pipelineJobs; + final res = api.AiplatformApi(mock) + .projects + .locations + .ragCorpora + .ragFiles + .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -62710,17 +70576,21 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.cancel(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--get', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.pipelineJobs; + final res = api.AiplatformApi(mock) + .projects + .locations + .ragCorpora + .ragFiles + .operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -62763,24 +70633,22 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1PipelineJob()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudAiplatformV1PipelineJob( - response as api.GoogleCloudAiplatformV1PipelineJob); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--list', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.pipelineJobs; - final arg_parent = 'foo'; - final arg_filter = 'foo'; - final arg_orderBy = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; - final arg_readMask = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .ragCorpora + .ragFiles + .operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -62814,26 +70682,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['readMask']!.first, - unittest.equals(arg_readMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -62842,29 +70690,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ListPipelineJobsResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - filter: arg_filter, - orderBy: arg_orderBy, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - readMask: arg_readMask, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1ListPipelineJobsResponse( - response as api.GoogleCloudAiplatformV1ListPipelineJobsResponse); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - }); - unittest.group('resource-ProjectsLocationsPipelineJobsOperationsResource', - () { - unittest.test('method--cancel', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.pipelineJobs.operations; + final res = api.AiplatformApi(mock) + .projects + .locations + .ragCorpora + .ragFiles + .operations; final arg_name = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -62898,6 +70743,18 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -62906,18 +70763,29 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.cancel(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = await res.list(arg_name, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); - unittest.test('method--delete', () async { + unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.pipelineJobs.operations; + final res = api.AiplatformApi(mock) + .projects + .locations + .ragCorpora + .ragFiles + .operations; final arg_name = 'foo'; + final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -62951,6 +70819,10 @@ void main() { ); } } + unittest.expect( + queryMap['timeout']!.first, + unittest.equals(arg_timeout), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -62959,20 +70831,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + final response = + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); + }); - unittest.test('method--get', () async { + unittest.group('resource-ProjectsLocationsReasoningEnginesResource', () { + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.pipelineJobs.operations; - final arg_name = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.reasoningEngines; + final arg_request = buildGoogleCloudAiplatformV1ReasoningEngine(); + final arg_parent = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudAiplatformV1ReasoningEngine.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1ReasoningEngine(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -63015,19 +70895,16 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); + final response = + await res.create(arg_request, arg_parent, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--list', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.pipelineJobs.operations; + final res = api.AiplatformApi(mock).projects.locations.reasoningEngines; final arg_name = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -63061,18 +70938,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -63081,25 +70946,18 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_name, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--wait', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = - api.AiplatformApi(mock).projects.locations.pipelineJobs.operations; + final res = api.AiplatformApi(mock).projects.locations.reasoningEngines; final arg_name = 'foo'; - final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -63133,10 +70991,6 @@ void main() { ); } } - unittest.expect( - queryMap['timeout']!.first, - unittest.equals(arg_timeout), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -63145,28 +70999,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = + convert.json.encode(buildGoogleCloudAiplatformV1ReasoningEngine()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleCloudAiplatformV1ReasoningEngine( + response as api.GoogleCloudAiplatformV1ReasoningEngine); }); - }); - unittest.group('resource-ProjectsLocationsPublishersModelsResource', () { - unittest.test('method--computeTokens', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.publishers.models; - final arg_request = buildGoogleCloudAiplatformV1ComputeTokensRequest(); - final arg_endpoint = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.reasoningEngines; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1ComputeTokensRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1ComputeTokensRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -63198,6 +71048,18 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -63207,25 +71069,29 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = convert.json - .encode(buildGoogleCloudAiplatformV1ComputeTokensResponse()); + .encode(buildGoogleCloudAiplatformV1ListReasoningEnginesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.computeTokens(arg_request, arg_endpoint, + final response = await res.list(arg_parent, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, $fields: arg_$fields); - checkGoogleCloudAiplatformV1ComputeTokensResponse( - response as api.GoogleCloudAiplatformV1ComputeTokensResponse); + checkGoogleCloudAiplatformV1ListReasoningEnginesResponse( + response as api.GoogleCloudAiplatformV1ListReasoningEnginesResponse); }); - unittest.test('method--countTokens', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.publishers.models; - final arg_request = buildGoogleCloudAiplatformV1CountTokensRequest(); - final arg_endpoint = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.reasoningEngines; + final arg_request = buildGoogleCloudAiplatformV1ReasoningEngine(); + final arg_name = 'foo'; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1CountTokensRequest.fromJson( + final obj = api.GoogleCloudAiplatformV1ReasoningEngine.fromJson( json as core.Map); - checkGoogleCloudAiplatformV1CountTokensRequest(obj); + checkGoogleCloudAiplatformV1ReasoningEngine(obj); final path = req.url.path; var pathOffset = 0; @@ -63258,6 +71124,10 @@ void main() { ); } } + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -63266,26 +71136,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1CountTokensResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.countTokens(arg_request, arg_endpoint, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1CountTokensResponse( - response as api.GoogleCloudAiplatformV1CountTokensResponse); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--generateContent', () async { + unittest.test('method--query', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.publishers.models; - final arg_request = buildGoogleCloudAiplatformV1GenerateContentRequest(); - final arg_model = 'foo'; + final res = api.AiplatformApi(mock).projects.locations.reasoningEngines; + final arg_request = + buildGoogleCloudAiplatformV1QueryReasoningEngineRequest(); + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1GenerateContentRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1GenerateContentRequest(obj); + final obj = + api.GoogleCloudAiplatformV1QueryReasoningEngineRequest.fromJson( + json as core.Map); + checkGoogleCloudAiplatformV1QueryReasoningEngineRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -63327,26 +71198,28 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = convert.json - .encode(buildGoogleCloudAiplatformV1GenerateContentResponse()); + .encode(buildGoogleCloudAiplatformV1QueryReasoningEngineResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.generateContent(arg_request, arg_model, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1GenerateContentResponse( - response as api.GoogleCloudAiplatformV1GenerateContentResponse); + final response = + await res.query(arg_request, arg_name, $fields: arg_$fields); + checkGoogleCloudAiplatformV1QueryReasoningEngineResponse( + response as api.GoogleCloudAiplatformV1QueryReasoningEngineResponse); }); + }); - unittest.test('method--predict', () async { + unittest.group('resource-ProjectsLocationsReasoningEnginesOperationsResource', + () { + unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.publishers.models; - final arg_request = buildGoogleCloudAiplatformV1PredictRequest(); - final arg_endpoint = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .reasoningEngines + .operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1PredictRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1PredictRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -63386,27 +71259,23 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleCloudAiplatformV1PredictResponse()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.predict(arg_request, arg_endpoint, $fields: arg_$fields); - checkGoogleCloudAiplatformV1PredictResponse( - response as api.GoogleCloudAiplatformV1PredictResponse); + final response = await res.cancel(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--rawPredict', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.publishers.models; - final arg_request = buildGoogleCloudAiplatformV1RawPredictRequest(); - final arg_endpoint = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .reasoningEngines + .operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1RawPredictRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1RawPredictRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -63446,25 +71315,23 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleApiHttpBody()); + final resp = convert.json.encode(buildGoogleProtobufEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.rawPredict(arg_request, arg_endpoint, $fields: arg_$fields); - checkGoogleApiHttpBody(response as api.GoogleApiHttpBody); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); }); - unittest.test('method--serverStreamingPredict', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.publishers.models; - final arg_request = buildGoogleCloudAiplatformV1StreamingPredictRequest(); - final arg_endpoint = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .reasoningEngines + .operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1StreamingPredictRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1StreamingPredictRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -63504,28 +71371,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1StreamingPredictResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.serverStreamingPredict( - arg_request, arg_endpoint, - $fields: arg_$fields); - checkGoogleCloudAiplatformV1StreamingPredictResponse( - response as api.GoogleCloudAiplatformV1StreamingPredictResponse); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--streamGenerateContent', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.publishers.models; - final arg_request = buildGoogleCloudAiplatformV1GenerateContentRequest(); - final arg_model = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .reasoningEngines + .operations; + final arg_name = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1GenerateContentRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1GenerateContentRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -63557,6 +71423,18 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -63565,27 +71443,30 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildGoogleCloudAiplatformV1GenerateContentResponse()); + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.streamGenerateContent(arg_request, arg_model, + final response = await res.list(arg_name, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, $fields: arg_$fields); - checkGoogleCloudAiplatformV1GenerateContentResponse( - response as api.GoogleCloudAiplatformV1GenerateContentResponse); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); - unittest.test('method--streamRawPredict', () async { + unittest.test('method--wait', () async { final mock = HttpServerMock(); - final res = api.AiplatformApi(mock).projects.locations.publishers.models; - final arg_request = buildGoogleCloudAiplatformV1StreamRawPredictRequest(); - final arg_endpoint = 'foo'; + final res = api.AiplatformApi(mock) + .projects + .locations + .reasoningEngines + .operations; + final arg_name = 'foo'; + final arg_timeout = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudAiplatformV1StreamRawPredictRequest.fromJson( - json as core.Map); - checkGoogleCloudAiplatformV1StreamRawPredictRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -63617,6 +71498,10 @@ void main() { ); } } + unittest.expect( + queryMap['timeout']!.first, + unittest.equals(arg_timeout), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -63625,12 +71510,13 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleApiHttpBody()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.streamRawPredict(arg_request, arg_endpoint, - $fields: arg_$fields); - checkGoogleApiHttpBody(response as api.GoogleApiHttpBody); + final response = + await res.wait(arg_name, timeout: arg_timeout, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); }); @@ -66447,7 +74333,7 @@ void main() { final mock = HttpServerMock(); final res = api.AiplatformApi(mock).projects.locations.tensorboards; final arg_tensorboard = 'foo'; - final arg_timeSeries = buildUnnamed394(); + final arg_timeSeries = buildUnnamed434(); final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -69023,7 +76909,7 @@ void main() { .runs .timeSeries; final arg_timeSeries = 'foo'; - final arg_blobIds = buildUnnamed395(); + final arg_blobIds = buildUnnamed435(); final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; diff --git a/generated/googleapis/test/alloydb/v1_test.dart b/generated/googleapis/test/alloydb/v1_test.dart index 3b40f9930..a4a91d4ac 100644 --- a/generated/googleapis/test/alloydb/v1_test.dart +++ b/generated/googleapis/test/alloydb/v1_test.dart @@ -634,6 +634,43 @@ void checkContinuousBackupSource(api.ContinuousBackupSource o) { buildCounterContinuousBackupSource--; } +core.int buildCounterCsvExportOptions = 0; +api.CsvExportOptions buildCsvExportOptions() { + final o = api.CsvExportOptions(); + buildCounterCsvExportOptions++; + if (buildCounterCsvExportOptions < 3) { + o.escapeCharacter = 'foo'; + o.fieldDelimiter = 'foo'; + o.quoteCharacter = 'foo'; + o.selectQuery = 'foo'; + } + buildCounterCsvExportOptions--; + return o; +} + +void checkCsvExportOptions(api.CsvExportOptions o) { + buildCounterCsvExportOptions++; + if (buildCounterCsvExportOptions < 3) { + unittest.expect( + o.escapeCharacter!, + unittest.equals('foo'), + ); + unittest.expect( + o.fieldDelimiter!, + unittest.equals('foo'), + ); + unittest.expect( + o.quoteCharacter!, + unittest.equals('foo'), + ); + unittest.expect( + o.selectQuery!, + unittest.equals('foo'), + ); + } + buildCounterCsvExportOptions--; +} + core.int buildCounterEmpty = 0; api.Empty buildEmpty() { final o = api.Empty(); @@ -712,6 +749,34 @@ void checkEncryptionInfo(api.EncryptionInfo o) { buildCounterEncryptionInfo--; } +core.int buildCounterExportClusterRequest = 0; +api.ExportClusterRequest buildExportClusterRequest() { + final o = api.ExportClusterRequest(); + buildCounterExportClusterRequest++; + if (buildCounterExportClusterRequest < 3) { + o.csvExportOptions = buildCsvExportOptions(); + o.database = 'foo'; + o.gcsDestination = buildGcsDestination(); + o.sqlExportOptions = buildSqlExportOptions(); + } + buildCounterExportClusterRequest--; + return o; +} + +void checkExportClusterRequest(api.ExportClusterRequest o) { + buildCounterExportClusterRequest++; + if (buildCounterExportClusterRequest < 3) { + checkCsvExportOptions(o.csvExportOptions!); + unittest.expect( + o.database!, + unittest.equals('foo'), + ); + checkGcsDestination(o.gcsDestination!); + checkSqlExportOptions(o.sqlExportOptions!); + } + buildCounterExportClusterRequest--; +} + core.int buildCounterFailoverInstanceRequest = 0; api.FailoverInstanceRequest buildFailoverInstanceRequest() { final o = api.FailoverInstanceRequest(); @@ -736,6 +801,28 @@ void checkFailoverInstanceRequest(api.FailoverInstanceRequest o) { buildCounterFailoverInstanceRequest--; } +core.int buildCounterGcsDestination = 0; +api.GcsDestination buildGcsDestination() { + final o = api.GcsDestination(); + buildCounterGcsDestination++; + if (buildCounterGcsDestination < 3) { + o.uri = 'foo'; + } + buildCounterGcsDestination--; + return o; +} + +void checkGcsDestination(api.GcsDestination o) { + buildCounterGcsDestination++; + if (buildCounterGcsDestination < 3) { + unittest.expect( + o.uri!, + unittest.equals('foo'), + ); + } + buildCounterGcsDestination--; +} + core.List buildUnnamed9() => [ buildGoogleCloudLocationLocation(), buildGoogleCloudLocationLocation(), @@ -2121,6 +2208,48 @@ void checkSecondaryConfig(api.SecondaryConfig o) { buildCounterSecondaryConfig--; } +core.List buildUnnamed34() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed34(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterSqlExportOptions = 0; +api.SqlExportOptions buildSqlExportOptions() { + final o = api.SqlExportOptions(); + buildCounterSqlExportOptions++; + if (buildCounterSqlExportOptions < 3) { + o.cleanTargetObjects = true; + o.ifExistTargetObjects = true; + o.schemaOnly = true; + o.tables = buildUnnamed34(); + } + buildCounterSqlExportOptions--; + return o; +} + +void checkSqlExportOptions(api.SqlExportOptions o) { + buildCounterSqlExportOptions++; + if (buildCounterSqlExportOptions < 3) { + unittest.expect(o.cleanTargetObjects!, unittest.isTrue); + unittest.expect(o.ifExistTargetObjects!, unittest.isTrue); + unittest.expect(o.schemaOnly!, unittest.isTrue); + checkUnnamed34(o.tables!); + } + buildCounterSqlExportOptions--; +} + core.int buildCounterSslConfig = 0; api.SslConfig buildSslConfig() { final o = api.SslConfig(); @@ -2148,7 +2277,7 @@ void checkSslConfig(api.SslConfig o) { buildCounterSslConfig--; } -core.Map buildUnnamed34() => { +core.Map buildUnnamed35() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -2161,7 +2290,7 @@ core.Map buildUnnamed34() => { }, }; -void checkUnnamed34(core.Map o) { +void checkUnnamed35(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted7 = (o['x']!) as core.Map; unittest.expect(casted7, unittest.hasLength(3)); @@ -2193,15 +2322,15 @@ void checkUnnamed34(core.Map o) { ); } -core.List> buildUnnamed35() => [ - buildUnnamed34(), - buildUnnamed34(), +core.List> buildUnnamed36() => [ + buildUnnamed35(), + buildUnnamed35(), ]; -void checkUnnamed35(core.List> o) { +void checkUnnamed36(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed34(o[0]); - checkUnnamed34(o[1]); + checkUnnamed35(o[0]); + checkUnnamed35(o[1]); } core.int buildCounterStatus = 0; @@ -2210,7 +2339,7 @@ api.Status buildStatus() { buildCounterStatus++; if (buildCounterStatus < 3) { o.code = 42; - o.details = buildUnnamed35(); + o.details = buildUnnamed36(); o.message = 'foo'; } buildCounterStatus--; @@ -2224,7 +2353,7 @@ void checkStatus(api.Status o) { o.code!, unittest.equals(42), ); - checkUnnamed35(o.details!); + checkUnnamed36(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -2233,12 +2362,12 @@ void checkStatus(api.Status o) { buildCounterStatus--; } -core.List buildUnnamed36() => [ +core.List buildUnnamed37() => [ 'foo', 'foo', ]; -void checkUnnamed36(core.List o) { +void checkUnnamed37(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2255,7 +2384,7 @@ api.StringRestrictions buildStringRestrictions() { final o = api.StringRestrictions(); buildCounterStringRestrictions++; if (buildCounterStringRestrictions < 3) { - o.allowedValues = buildUnnamed36(); + o.allowedValues = buildUnnamed37(); } buildCounterStringRestrictions--; return o; @@ -2264,17 +2393,17 @@ api.StringRestrictions buildStringRestrictions() { void checkStringRestrictions(api.StringRestrictions o) { buildCounterStringRestrictions++; if (buildCounterStringRestrictions < 3) { - checkUnnamed36(o.allowedValues!); + checkUnnamed37(o.allowedValues!); } buildCounterStringRestrictions--; } -core.List buildUnnamed37() => [ +core.List buildUnnamed38() => [ 'foo', 'foo', ]; -void checkUnnamed37(core.List o) { +void checkUnnamed38(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2297,7 +2426,7 @@ api.SupportedDatabaseFlag buildSupportedDatabaseFlag() { o.name = 'foo'; o.requiresDbRestart = true; o.stringRestrictions = buildStringRestrictions(); - o.supportedDbVersions = buildUnnamed37(); + o.supportedDbVersions = buildUnnamed38(); o.valueType = 'foo'; } buildCounterSupportedDatabaseFlag--; @@ -2319,7 +2448,7 @@ void checkSupportedDatabaseFlag(api.SupportedDatabaseFlag o) { ); unittest.expect(o.requiresDbRestart!, unittest.isTrue); checkStringRestrictions(o.stringRestrictions!); - checkUnnamed37(o.supportedDbVersions!); + checkUnnamed38(o.supportedDbVersions!); unittest.expect( o.valueType!, unittest.equals('foo'), @@ -2445,12 +2574,12 @@ void checkUpgradeClusterRequest(api.UpgradeClusterRequest o) { buildCounterUpgradeClusterRequest--; } -core.List buildUnnamed38() => [ +core.List buildUnnamed39() => [ 'foo', 'foo', ]; -void checkUnnamed38(core.List o) { +void checkUnnamed39(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2467,7 +2596,7 @@ api.User buildUser() { final o = api.User(); buildCounterUser++; if (buildCounterUser < 3) { - o.databaseRoles = buildUnnamed38(); + o.databaseRoles = buildUnnamed39(); o.keepExtraRoles = true; o.name = 'foo'; o.password = 'foo'; @@ -2480,7 +2609,7 @@ api.User buildUser() { void checkUser(api.User o) { buildCounterUser++; if (buildCounterUser < 3) { - checkUnnamed38(o.databaseRoles!); + checkUnnamed39(o.databaseRoles!); unittest.expect(o.keepExtraRoles!, unittest.isTrue); unittest.expect( o.name!, @@ -2525,12 +2654,12 @@ void checkUserPassword(api.UserPassword o) { buildCounterUserPassword--; } -core.List buildUnnamed39() => [ +core.List buildUnnamed40() => [ 'foo', 'foo', ]; -void checkUnnamed39(core.List o) { +void checkUnnamed40(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2542,12 +2671,12 @@ void checkUnnamed39(core.List o) { ); } -core.List buildUnnamed40() => [ +core.List buildUnnamed41() => [ buildGoogleTypeTimeOfDay(), buildGoogleTypeTimeOfDay(), ]; -void checkUnnamed40(core.List o) { +void checkUnnamed41(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleTypeTimeOfDay(o[0]); checkGoogleTypeTimeOfDay(o[1]); @@ -2558,8 +2687,8 @@ api.WeeklySchedule buildWeeklySchedule() { final o = api.WeeklySchedule(); buildCounterWeeklySchedule++; if (buildCounterWeeklySchedule < 3) { - o.daysOfWeek = buildUnnamed39(); - o.startTimes = buildUnnamed40(); + o.daysOfWeek = buildUnnamed40(); + o.startTimes = buildUnnamed41(); } buildCounterWeeklySchedule--; return o; @@ -2568,8 +2697,8 @@ api.WeeklySchedule buildWeeklySchedule() { void checkWeeklySchedule(api.WeeklySchedule o) { buildCounterWeeklySchedule++; if (buildCounterWeeklySchedule < 3) { - checkUnnamed39(o.daysOfWeek!); - checkUnnamed40(o.startTimes!); + checkUnnamed40(o.daysOfWeek!); + checkUnnamed41(o.startTimes!); } buildCounterWeeklySchedule--; } @@ -2685,6 +2814,16 @@ void main() { }); }); + unittest.group('obj-schema-CsvExportOptions', () { + unittest.test('to-json--from-json', () async { + final o = buildCsvExportOptions(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.CsvExportOptions.fromJson( + oJson as core.Map); + checkCsvExportOptions(od); + }); + }); + unittest.group('obj-schema-Empty', () { unittest.test('to-json--from-json', () async { final o = buildEmpty(); @@ -2715,6 +2854,16 @@ void main() { }); }); + unittest.group('obj-schema-ExportClusterRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildExportClusterRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ExportClusterRequest.fromJson( + oJson as core.Map); + checkExportClusterRequest(od); + }); + }); + unittest.group('obj-schema-FailoverInstanceRequest', () { unittest.test('to-json--from-json', () async { final o = buildFailoverInstanceRequest(); @@ -2725,6 +2874,16 @@ void main() { }); }); + unittest.group('obj-schema-GcsDestination', () { + unittest.test('to-json--from-json', () async { + final o = buildGcsDestination(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GcsDestination.fromJson( + oJson as core.Map); + checkGcsDestination(od); + }); + }); + unittest.group('obj-schema-GoogleCloudLocationListLocationsResponse', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudLocationListLocationsResponse(); @@ -3045,6 +3204,16 @@ void main() { }); }); + unittest.group('obj-schema-SqlExportOptions', () { + unittest.test('to-json--from-json', () async { + final o = buildSqlExportOptions(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.SqlExportOptions.fromJson( + oJson as core.Map); + checkSqlExportOptions(od); + }); + }); + unittest.group('obj-schema-SslConfig', () { unittest.test('to-json--from-json', () async { final o = buildSslConfig(); @@ -3873,6 +4042,64 @@ void main() { checkOperation(response as api.Operation); }); + unittest.test('method--export', () async { + final mock = HttpServerMock(); + final res = api.CloudAlloyDBAdminApi(mock).projects.locations.clusters; + final arg_request = buildExportClusterRequest(); + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.ExportClusterRequest.fromJson( + json as core.Map); + checkExportClusterRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.export(arg_request, arg_name, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + unittest.test('method--get', () async { final mock = HttpServerMock(); final res = api.CloudAlloyDBAdminApi(mock).projects.locations.clusters; diff --git a/generated/googleapis/test/analyticshub/v1_test.dart b/generated/googleapis/test/analyticshub/v1_test.dart index 76c86d7d0..e53e438fc 100644 --- a/generated/googleapis/test/analyticshub/v1_test.dart +++ b/generated/googleapis/test/analyticshub/v1_test.dart @@ -1718,6 +1718,7 @@ api.SubscribeDataExchangeRequest buildSubscribeDataExchangeRequest() { buildCounterSubscribeDataExchangeRequest++; if (buildCounterSubscribeDataExchangeRequest < 3) { o.destination = 'foo'; + o.destinationDataset = buildDestinationDataset(); o.subscriberContact = 'foo'; o.subscription = 'foo'; } @@ -1732,6 +1733,7 @@ void checkSubscribeDataExchangeRequest(api.SubscribeDataExchangeRequest o) { o.destination!, unittest.equals('foo'), ); + checkDestinationDataset(o.destinationDataset!); unittest.expect( o.subscriberContact!, unittest.equals('foo'), diff --git a/generated/googleapis/test/androidenterprise/v1_test.dart b/generated/googleapis/test/androidenterprise/v1_test.dart index 641dd6883..43dd6cafa 100644 --- a/generated/googleapis/test/androidenterprise/v1_test.dart +++ b/generated/googleapis/test/androidenterprise/v1_test.dart @@ -733,6 +733,7 @@ api.CreateEnrollmentTokenResponse buildCreateEnrollmentTokenResponse() { buildCounterCreateEnrollmentTokenResponse++; if (buildCounterCreateEnrollmentTokenResponse < 3) { o.enrollmentToken = 'foo'; + o.token = buildEnrollmentToken(); } buildCounterCreateEnrollmentTokenResponse--; return o; @@ -745,6 +746,7 @@ void checkCreateEnrollmentTokenResponse(api.CreateEnrollmentTokenResponse o) { o.enrollmentToken!, unittest.equals('foo'), ); + checkEnrollmentToken(o.token!); } buildCounterCreateEnrollmentTokenResponse--; } @@ -931,6 +933,38 @@ void checkDevicesListResponse(api.DevicesListResponse o) { buildCounterDevicesListResponse--; } +core.int buildCounterEnrollmentToken = 0; +api.EnrollmentToken buildEnrollmentToken() { + final o = api.EnrollmentToken(); + buildCounterEnrollmentToken++; + if (buildCounterEnrollmentToken < 3) { + o.duration = 'foo'; + o.enrollmentTokenType = 'foo'; + o.token = 'foo'; + } + buildCounterEnrollmentToken--; + return o; +} + +void checkEnrollmentToken(api.EnrollmentToken o) { + buildCounterEnrollmentToken++; + if (buildCounterEnrollmentToken < 3) { + unittest.expect( + o.duration!, + unittest.equals('foo'), + ); + unittest.expect( + o.enrollmentTokenType!, + unittest.equals('foo'), + ); + unittest.expect( + o.token!, + unittest.equals('foo'), + ); + } + buildCounterEnrollmentToken--; +} + core.List buildUnnamed12() => [ buildAdministrator(), buildAdministrator(), @@ -3542,6 +3576,16 @@ void main() { }); }); + unittest.group('obj-schema-EnrollmentToken', () { + unittest.test('to-json--from-json', () async { + final o = buildEnrollmentToken(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.EnrollmentToken.fromJson( + oJson as core.Map); + checkEnrollmentToken(od); + }); + }); + unittest.group('obj-schema-Enterprise', () { unittest.test('to-json--from-json', () async { final o = buildEnterprise(); @@ -4831,6 +4875,9 @@ void main() { final res = api.AndroidEnterpriseApi(mock).enterprises; final arg_enterpriseId = 'foo'; final arg_deviceType = 'foo'; + final arg_enrollmentToken_duration = 'foo'; + final arg_enrollmentToken_enrollmentTokenType = 'foo'; + final arg_enrollmentToken_token = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -4881,6 +4928,18 @@ void main() { queryMap['deviceType']!.first, unittest.equals(arg_deviceType), ); + unittest.expect( + queryMap['enrollmentToken.duration']!.first, + unittest.equals(arg_enrollmentToken_duration), + ); + unittest.expect( + queryMap['enrollmentToken.enrollmentTokenType']!.first, + unittest.equals(arg_enrollmentToken_enrollmentTokenType), + ); + unittest.expect( + queryMap['enrollmentToken.token']!.first, + unittest.equals(arg_enrollmentToken_token), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -4893,7 +4952,12 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.createEnrollmentToken(arg_enterpriseId, - deviceType: arg_deviceType, $fields: arg_$fields); + deviceType: arg_deviceType, + enrollmentToken_duration: arg_enrollmentToken_duration, + enrollmentToken_enrollmentTokenType: + arg_enrollmentToken_enrollmentTokenType, + enrollmentToken_token: arg_enrollmentToken_token, + $fields: arg_$fields); checkCreateEnrollmentTokenResponse( response as api.CreateEnrollmentTokenResponse); }); diff --git a/generated/googleapis/test/androidmanagement/v1_test.dart b/generated/googleapis/test/androidmanagement/v1_test.dart index 4256b9e08..c54c3a4a1 100644 --- a/generated/googleapis/test/androidmanagement/v1_test.dart +++ b/generated/googleapis/test/androidmanagement/v1_test.dart @@ -3645,6 +3645,7 @@ api.PersonalUsagePolicies buildPersonalUsagePolicies() { o.maxDaysWithWorkOff = 42; o.personalApplications = buildUnnamed68(); o.personalPlayStoreMode = 'foo'; + o.privateSpacePolicy = 'foo'; o.screenCaptureDisabled = true; } buildCounterPersonalUsagePolicies--; @@ -3665,6 +3666,10 @@ void checkPersonalUsagePolicies(api.PersonalUsagePolicies o) { o.personalPlayStoreMode!, unittest.equals('foo'), ); + unittest.expect( + o.privateSpacePolicy!, + unittest.equals('foo'), + ); unittest.expect(o.screenCaptureDisabled!, unittest.isTrue); } buildCounterPersonalUsagePolicies--; diff --git a/generated/googleapis/test/androidpublisher/v3_test.dart b/generated/googleapis/test/androidpublisher/v3_test.dart index ecd183cdb..22566c1f3 100644 --- a/generated/googleapis/test/androidpublisher/v3_test.dart +++ b/generated/googleapis/test/androidpublisher/v3_test.dart @@ -4356,6 +4356,21 @@ void checkOfferTag(api.OfferTag o) { buildCounterOfferTag--; } +core.int buildCounterOneTimeCode = 0; +api.OneTimeCode buildOneTimeCode() { + final o = api.OneTimeCode(); + buildCounterOneTimeCode++; + if (buildCounterOneTimeCode < 3) {} + buildCounterOneTimeCode--; + return o; +} + +void checkOneTimeCode(api.OneTimeCode o) { + buildCounterOneTimeCode++; + if (buildCounterOneTimeCode < 3) {} + buildCounterOneTimeCode--; +} + core.int buildCounterOneTimeExternalTransaction = 0; api.OneTimeExternalTransaction buildOneTimeExternalTransaction() { final o = api.OneTimeExternalTransaction(); @@ -5609,6 +5624,27 @@ void checkSdkVersionTargeting(api.SdkVersionTargeting o) { buildCounterSdkVersionTargeting--; } +core.int buildCounterSignupPromotion = 0; +api.SignupPromotion buildSignupPromotion() { + final o = api.SignupPromotion(); + buildCounterSignupPromotion++; + if (buildCounterSignupPromotion < 3) { + o.oneTimeCode = buildOneTimeCode(); + o.vanityCode = buildVanityCode(); + } + buildCounterSignupPromotion--; + return o; +} + +void checkSignupPromotion(api.SignupPromotion o) { + buildCounterSignupPromotion++; + if (buildCounterSignupPromotion < 3) { + checkOneTimeCode(o.oneTimeCode!); + checkVanityCode(o.vanityCode!); + } + buildCounterSignupPromotion--; +} + core.int buildCounterSplitApkMetadata = 0; api.SplitApkMetadata buildSplitApkMetadata() { final o = api.SplitApkMetadata(); @@ -6281,6 +6317,7 @@ api.SubscriptionPurchaseLineItem buildSubscriptionPurchaseLineItem() { o.offerDetails = buildOfferDetails(); o.prepaidPlan = buildPrepaidPlan(); o.productId = 'foo'; + o.signupPromotion = buildSignupPromotion(); } buildCounterSubscriptionPurchaseLineItem--; return o; @@ -6301,6 +6338,7 @@ void checkSubscriptionPurchaseLineItem(api.SubscriptionPurchaseLineItem o) { o.productId!, unittest.equals('foo'), ); + checkSignupPromotion(o.signupPromotion!); } buildCounterSubscriptionPurchaseLineItem--; } @@ -7573,6 +7611,28 @@ void checkUsesPermission(api.UsesPermission o) { buildCounterUsesPermission--; } +core.int buildCounterVanityCode = 0; +api.VanityCode buildVanityCode() { + final o = api.VanityCode(); + buildCounterVanityCode++; + if (buildCounterVanityCode < 3) { + o.promotionCode = 'foo'; + } + buildCounterVanityCode--; + return o; +} + +void checkVanityCode(api.VanityCode o) { + buildCounterVanityCode++; + if (buildCounterVanityCode < 3) { + unittest.expect( + o.promotionCode!, + unittest.equals('foo'), + ); + } + buildCounterVanityCode--; +} + core.int buildCounterVariant = 0; api.Variant buildVariant() { final o = api.Variant(); @@ -9032,6 +9092,16 @@ void main() { }); }); + unittest.group('obj-schema-OneTimeCode', () { + unittest.test('to-json--from-json', () async { + final o = buildOneTimeCode(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.OneTimeCode.fromJson( + oJson as core.Map); + checkOneTimeCode(od); + }); + }); + unittest.group('obj-schema-OneTimeExternalTransaction', () { unittest.test('to-json--from-json', () async { final o = buildOneTimeExternalTransaction(); @@ -9505,6 +9575,16 @@ void main() { }); }); + unittest.group('obj-schema-SignupPromotion', () { + unittest.test('to-json--from-json', () async { + final o = buildSignupPromotion(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.SignupPromotion.fromJson( + oJson as core.Map); + checkSignupPromotion(od); + }); + }); + unittest.group('obj-schema-SplitApkMetadata', () { unittest.test('to-json--from-json', () async { final o = buildSplitApkMetadata(); @@ -10045,6 +10125,16 @@ void main() { }); }); + unittest.group('obj-schema-VanityCode', () { + unittest.test('to-json--from-json', () async { + final o = buildVanityCode(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.VanityCode.fromJson(oJson as core.Map); + checkVanityCode(od); + }); + }); + unittest.group('obj-schema-Variant', () { unittest.test('to-json--from-json', () async { final o = buildVariant(); diff --git a/generated/googleapis/test/apigee/v1_test.dart b/generated/googleapis/test/apigee/v1_test.dart index 8ab10ed7e..41bdf64a3 100644 --- a/generated/googleapis/test/apigee/v1_test.dart +++ b/generated/googleapis/test/apigee/v1_test.dart @@ -5081,6 +5081,8 @@ api.GoogleCloudApigeeV1EnvironmentConfig if (buildCounterGoogleCloudApigeeV1EnvironmentConfig < 3) { o.addonsConfig = buildGoogleCloudApigeeV1RuntimeAddonsConfig(); o.arcConfigLocation = 'foo'; + o.clientIpResolutionConfig = + buildGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig(); o.createTime = 'foo'; o.dataCollectors = buildUnnamed89(); o.debugMask = buildGoogleCloudApigeeV1DebugMask(); @@ -5116,6 +5118,8 @@ void checkGoogleCloudApigeeV1EnvironmentConfig( o.arcConfigLocation!, unittest.equals('foo'), ); + checkGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig( + o.clientIpResolutionConfig!); unittest.expect( o.createTime!, unittest.equals('foo'), @@ -5171,6 +5175,68 @@ void checkGoogleCloudApigeeV1EnvironmentConfig( buildCounterGoogleCloudApigeeV1EnvironmentConfig--; } +core.int + buildCounterGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig = + 0; +api.GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig + buildGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig() { + final o = api.GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig(); + buildCounterGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig++; + if (buildCounterGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig < + 3) { + o.headerIndexAlgorithm = + buildGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm(); + } + buildCounterGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig--; + return o; +} + +void checkGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig( + api.GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig o) { + buildCounterGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig++; + if (buildCounterGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig < + 3) { + checkGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm( + o.headerIndexAlgorithm!); + } + buildCounterGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig--; +} + +core.int + buildCounterGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm = + 0; +api.GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm + buildGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm() { + final o = api + .GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm(); + buildCounterGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm++; + if (buildCounterGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm < + 3) { + o.ipHeaderIndex = 42; + o.ipHeaderName = 'foo'; + } + buildCounterGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm--; + return o; +} + +void checkGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm( + api.GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm + o) { + buildCounterGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm++; + if (buildCounterGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm < + 3) { + unittest.expect( + o.ipHeaderIndex!, + unittest.equals(42), + ); + unittest.expect( + o.ipHeaderName!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm--; +} + core.List buildUnnamed98() => [ 'foo', 'foo', @@ -15055,6 +15121,35 @@ void main() { }); }); + unittest.group( + 'obj-schema-GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig + .fromJson(oJson as core.Map); + checkGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfig(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm + .fromJson(oJson as core.Map); + checkGoogleCloudApigeeV1EnvironmentConfigClientIPResolutionConfigHeaderIndexAlgorithm( + od); + }); + }); + unittest.group('obj-schema-GoogleCloudApigeeV1EnvironmentGroup', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudApigeeV1EnvironmentGroup(); diff --git a/generated/googleapis/test/appengine/v1_test.dart b/generated/googleapis/test/appengine/v1_test.dart index fbf176da1..499fc7d9c 100644 --- a/generated/googleapis/test/appengine/v1_test.dart +++ b/generated/googleapis/test/appengine/v1_test.dart @@ -7074,6 +7074,113 @@ void main() { }); }); + unittest.group('resource-ProjectsLocationsApplicationsServicesResource', () { + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = + api.AppengineApi(mock).projects.locations.applications.services; + final arg_projectsId = 'foo'; + final arg_locationsId = 'foo'; + final arg_applicationsId = 'foo'; + final arg_servicesId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 12), + unittest.equals('v1/projects/'), + ); + pathOffset += 12; + index = path.indexOf('/locations/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_projectsId'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('/locations/'), + ); + pathOffset += 11; + index = path.indexOf('/applications/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_locationsId'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 14), + unittest.equals('/applications/'), + ); + pathOffset += 14; + index = path.indexOf('/services/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_applicationsId'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 10), + unittest.equals('/services/'), + ); + pathOffset += 10; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; + unittest.expect( + subPart, + unittest.equals('$arg_servicesId'), + ); + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete( + arg_projectsId, arg_locationsId, arg_applicationsId, arg_servicesId, + $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + }); + unittest.group( 'resource-ProjectsLocationsApplicationsServicesVersionsResource', () { unittest.test('method--delete', () async { diff --git a/generated/googleapis/test/artifactregistry/v1_test.dart b/generated/googleapis/test/artifactregistry/v1_test.dart index 1fd6114ae..d63215127 100644 --- a/generated/googleapis/test/artifactregistry/v1_test.dart +++ b/generated/googleapis/test/artifactregistry/v1_test.dart @@ -2272,6 +2272,7 @@ api.Repository buildRepository() { o.mavenConfig = buildMavenRepositoryConfig(); o.mode = 'foo'; o.name = 'foo'; + o.registryUri = 'foo'; o.remoteRepositoryConfig = buildRemoteRepositoryConfig(); o.satisfiesPzi = true; o.satisfiesPzs = true; @@ -2317,6 +2318,10 @@ void checkRepository(api.Repository o) { o.name!, unittest.equals('foo'), ); + unittest.expect( + o.registryUri!, + unittest.equals('foo'), + ); checkRemoteRepositoryConfig(o.remoteRepositoryConfig!); unittest.expect(o.satisfiesPzi!, unittest.isTrue); unittest.expect(o.satisfiesPzs!, unittest.isTrue); diff --git a/generated/googleapis/test/backupdr/v1_test.dart b/generated/googleapis/test/backupdr/v1_test.dart index 363768bff..f906e8c55 100644 --- a/generated/googleapis/test/backupdr/v1_test.dart +++ b/generated/googleapis/test/backupdr/v1_test.dart @@ -479,6 +479,8 @@ api.Backup buildBackup() { o.labels = buildUnnamed6(); o.name = 'foo'; o.resourceSizeBytes = 'foo'; + o.satisfiesPzi = true; + o.satisfiesPzs = true; o.serviceLocks = buildUnnamed7(); o.state = 'foo'; o.updateTime = 'foo'; @@ -531,6 +533,8 @@ void checkBackup(api.Backup o) { o.resourceSizeBytes!, unittest.equals('foo'), ); + unittest.expect(o.satisfiesPzi!, unittest.isTrue); + unittest.expect(o.satisfiesPzs!, unittest.isTrue); checkUnnamed7(o.serviceLocks!); unittest.expect( o.state!, @@ -2136,6 +2140,33 @@ void checkInitializeParams(api.InitializeParams o) { buildCounterInitializeParams--; } +core.int buildCounterInitializeServiceRequest = 0; +api.InitializeServiceRequest buildInitializeServiceRequest() { + final o = api.InitializeServiceRequest(); + buildCounterInitializeServiceRequest++; + if (buildCounterInitializeServiceRequest < 3) { + o.requestId = 'foo'; + o.resourceType = 'foo'; + } + buildCounterInitializeServiceRequest--; + return o; +} + +void checkInitializeServiceRequest(api.InitializeServiceRequest o) { + buildCounterInitializeServiceRequest++; + if (buildCounterInitializeServiceRequest < 3) { + unittest.expect( + o.requestId!, + unittest.equals('foo'), + ); + unittest.expect( + o.resourceType!, + unittest.equals('foo'), + ); + } + buildCounterInitializeServiceRequest--; +} + core.int buildCounterInitiateBackupRequest = 0; api.InitiateBackupRequest buildInitiateBackupRequest() { final o = api.InitiateBackupRequest(); @@ -4398,6 +4429,16 @@ void main() { }); }); + unittest.group('obj-schema-InitializeServiceRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildInitializeServiceRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.InitializeServiceRequest.fromJson( + oJson as core.Map); + checkInitializeServiceRequest(od); + }); + }); + unittest.group('obj-schema-InitiateBackupRequest', () { unittest.test('to-json--from-json', () async { final o = buildInitiateBackupRequest(); @@ -7609,4 +7650,64 @@ void main() { checkListOperationsResponse(response as api.ListOperationsResponse); }); }); + + unittest.group('resource-ProjectsLocationsServiceConfigResource', () { + unittest.test('method--initialize', () async { + final mock = HttpServerMock(); + final res = api.BackupdrApi(mock).projects.locations.serviceConfig; + final arg_request = buildInitializeServiceRequest(); + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.InitializeServiceRequest.fromJson( + json as core.Map); + checkInitializeServiceRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.initialize(arg_request, arg_name, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + }); } diff --git a/generated/googleapis/test/beyondcorp/v1_test.dart b/generated/googleapis/test/beyondcorp/v1_test.dart index b177590b2..dc277d2a0 100644 --- a/generated/googleapis/test/beyondcorp/v1_test.dart +++ b/generated/googleapis/test/beyondcorp/v1_test.dart @@ -1341,56 +1341,13 @@ void checkUnnamed22(core.List o) { ); } -core.int buildCounterGoogleCloudBeyondcorpSecuritygatewaysV1Peering = 0; -api.GoogleCloudBeyondcorpSecuritygatewaysV1Peering - buildGoogleCloudBeyondcorpSecuritygatewaysV1Peering() { - final o = api.GoogleCloudBeyondcorpSecuritygatewaysV1Peering(); - buildCounterGoogleCloudBeyondcorpSecuritygatewaysV1Peering++; - if (buildCounterGoogleCloudBeyondcorpSecuritygatewaysV1Peering < 3) { - o.dnsZones = buildUnnamed22(); - o.targetNetwork = 'foo'; - } - buildCounterGoogleCloudBeyondcorpSecuritygatewaysV1Peering--; - return o; -} - -void checkGoogleCloudBeyondcorpSecuritygatewaysV1Peering( - api.GoogleCloudBeyondcorpSecuritygatewaysV1Peering o) { - buildCounterGoogleCloudBeyondcorpSecuritygatewaysV1Peering++; - if (buildCounterGoogleCloudBeyondcorpSecuritygatewaysV1Peering < 3) { - checkUnnamed22(o.dnsZones!); - unittest.expect( - o.targetNetwork!, - unittest.equals('foo'), - ); - } - buildCounterGoogleCloudBeyondcorpSecuritygatewaysV1Peering--; -} - -core.List buildUnnamed23() => [ - 'foo', - 'foo', - ]; - -void checkUnnamed23(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o[0], - unittest.equals('foo'), - ); - unittest.expect( - o[1], - unittest.equals('foo'), - ); -} - core.Map - buildUnnamed24() => { + buildUnnamed23() => { 'x': buildGoogleCloudBeyondcorpSecuritygatewaysV1Hub(), 'y': buildGoogleCloudBeyondcorpSecuritygatewaysV1Hub(), }; -void checkUnnamed24( +void checkUnnamed23( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudBeyondcorpSecuritygatewaysV1Hub(o['x']!); @@ -1405,8 +1362,8 @@ api.GoogleCloudBeyondcorpSecuritygatewaysV1SecurityGateway if (buildCounterGoogleCloudBeyondcorpSecuritygatewaysV1SecurityGateway < 3) { o.createTime = 'foo'; o.displayName = 'foo'; - o.externalIps = buildUnnamed23(); - o.hubs = buildUnnamed24(); + o.externalIps = buildUnnamed22(); + o.hubs = buildUnnamed23(); o.name = 'foo'; o.state = 'foo'; o.updateTime = 'foo'; @@ -1427,8 +1384,8 @@ void checkGoogleCloudBeyondcorpSecuritygatewaysV1SecurityGateway( o.displayName!, unittest.equals('foo'), ); - checkUnnamed23(o.externalIps!); - checkUnnamed24(o.hubs!); + checkUnnamed22(o.externalIps!); + checkUnnamed23(o.hubs!); unittest.expect( o.name!, unittest.equals('foo'), @@ -1445,56 +1402,12 @@ void checkGoogleCloudBeyondcorpSecuritygatewaysV1SecurityGateway( buildCounterGoogleCloudBeyondcorpSecuritygatewaysV1SecurityGateway--; } -core.List - buildUnnamed25() => [ - buildGoogleCloudBeyondcorpSecuritygatewaysV1Peering(), - buildGoogleCloudBeyondcorpSecuritygatewaysV1Peering(), - ]; - -void checkUnnamed25( - core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkGoogleCloudBeyondcorpSecuritygatewaysV1Peering(o[0]); - checkGoogleCloudBeyondcorpSecuritygatewaysV1Peering(o[1]); -} - -core.int buildCounterGoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest = - 0; -api.GoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest - buildGoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest() { - final o = api.GoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest(); - buildCounterGoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest++; - if (buildCounterGoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest < - 3) { - o.peerings = buildUnnamed25(); - o.requestId = 'foo'; - o.validateOnly = true; - } - buildCounterGoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest--; - return o; -} - -void checkGoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest( - api.GoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest o) { - buildCounterGoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest++; - if (buildCounterGoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest < - 3) { - checkUnnamed25(o.peerings!); - unittest.expect( - o.requestId!, - unittest.equals('foo'), - ); - unittest.expect(o.validateOnly!, unittest.isTrue); - } - buildCounterGoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest--; -} - -core.List buildUnnamed26() => [ +core.List buildUnnamed24() => [ buildGoogleCloudLocationLocation(), buildGoogleCloudLocationLocation(), ]; -void checkUnnamed26(core.List o) { +void checkUnnamed24(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudLocationLocation(o[0]); checkGoogleCloudLocationLocation(o[1]); @@ -1506,7 +1419,7 @@ api.GoogleCloudLocationListLocationsResponse final o = api.GoogleCloudLocationListLocationsResponse(); buildCounterGoogleCloudLocationListLocationsResponse++; if (buildCounterGoogleCloudLocationListLocationsResponse < 3) { - o.locations = buildUnnamed26(); + o.locations = buildUnnamed24(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudLocationListLocationsResponse--; @@ -1517,7 +1430,7 @@ void checkGoogleCloudLocationListLocationsResponse( api.GoogleCloudLocationListLocationsResponse o) { buildCounterGoogleCloudLocationListLocationsResponse++; if (buildCounterGoogleCloudLocationListLocationsResponse < 3) { - checkUnnamed26(o.locations!); + checkUnnamed24(o.locations!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -1526,12 +1439,12 @@ void checkGoogleCloudLocationListLocationsResponse( buildCounterGoogleCloudLocationListLocationsResponse--; } -core.Map buildUnnamed27() => { +core.Map buildUnnamed25() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed27(core.Map o) { +void checkUnnamed25(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1543,7 +1456,7 @@ void checkUnnamed27(core.Map o) { ); } -core.Map buildUnnamed28() => { +core.Map buildUnnamed26() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -1556,7 +1469,7 @@ core.Map buildUnnamed28() => { }, }; -void checkUnnamed28(core.Map o) { +void checkUnnamed26(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted5 = (o['x']!) as core.Map; unittest.expect(casted5, unittest.hasLength(3)); @@ -1594,9 +1507,9 @@ api.GoogleCloudLocationLocation buildGoogleCloudLocationLocation() { buildCounterGoogleCloudLocationLocation++; if (buildCounterGoogleCloudLocationLocation < 3) { o.displayName = 'foo'; - o.labels = buildUnnamed27(); + o.labels = buildUnnamed25(); o.locationId = 'foo'; - o.metadata = buildUnnamed28(); + o.metadata = buildUnnamed26(); o.name = 'foo'; } buildCounterGoogleCloudLocationLocation--; @@ -1610,12 +1523,12 @@ void checkGoogleCloudLocationLocation(api.GoogleCloudLocationLocation o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed27(o.labels!); + checkUnnamed25(o.labels!); unittest.expect( o.locationId!, unittest.equals('foo'), ); - checkUnnamed28(o.metadata!); + checkUnnamed26(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), @@ -1624,12 +1537,12 @@ void checkGoogleCloudLocationLocation(api.GoogleCloudLocationLocation o) { buildCounterGoogleCloudLocationLocation--; } -core.List buildUnnamed29() => [ +core.List buildUnnamed27() => [ buildGoogleIamV1AuditLogConfig(), buildGoogleIamV1AuditLogConfig(), ]; -void checkUnnamed29(core.List o) { +void checkUnnamed27(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleIamV1AuditLogConfig(o[0]); checkGoogleIamV1AuditLogConfig(o[1]); @@ -1640,7 +1553,7 @@ api.GoogleIamV1AuditConfig buildGoogleIamV1AuditConfig() { final o = api.GoogleIamV1AuditConfig(); buildCounterGoogleIamV1AuditConfig++; if (buildCounterGoogleIamV1AuditConfig < 3) { - o.auditLogConfigs = buildUnnamed29(); + o.auditLogConfigs = buildUnnamed27(); o.service = 'foo'; } buildCounterGoogleIamV1AuditConfig--; @@ -1650,7 +1563,7 @@ api.GoogleIamV1AuditConfig buildGoogleIamV1AuditConfig() { void checkGoogleIamV1AuditConfig(api.GoogleIamV1AuditConfig o) { buildCounterGoogleIamV1AuditConfig++; if (buildCounterGoogleIamV1AuditConfig < 3) { - checkUnnamed29(o.auditLogConfigs!); + checkUnnamed27(o.auditLogConfigs!); unittest.expect( o.service!, unittest.equals('foo'), @@ -1659,12 +1572,12 @@ void checkGoogleIamV1AuditConfig(api.GoogleIamV1AuditConfig o) { buildCounterGoogleIamV1AuditConfig--; } -core.List buildUnnamed30() => [ +core.List buildUnnamed28() => [ 'foo', 'foo', ]; -void checkUnnamed30(core.List o) { +void checkUnnamed28(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1681,7 +1594,7 @@ api.GoogleIamV1AuditLogConfig buildGoogleIamV1AuditLogConfig() { final o = api.GoogleIamV1AuditLogConfig(); buildCounterGoogleIamV1AuditLogConfig++; if (buildCounterGoogleIamV1AuditLogConfig < 3) { - o.exemptedMembers = buildUnnamed30(); + o.exemptedMembers = buildUnnamed28(); o.logType = 'foo'; } buildCounterGoogleIamV1AuditLogConfig--; @@ -1691,7 +1604,7 @@ api.GoogleIamV1AuditLogConfig buildGoogleIamV1AuditLogConfig() { void checkGoogleIamV1AuditLogConfig(api.GoogleIamV1AuditLogConfig o) { buildCounterGoogleIamV1AuditLogConfig++; if (buildCounterGoogleIamV1AuditLogConfig < 3) { - checkUnnamed30(o.exemptedMembers!); + checkUnnamed28(o.exemptedMembers!); unittest.expect( o.logType!, unittest.equals('foo'), @@ -1700,12 +1613,12 @@ void checkGoogleIamV1AuditLogConfig(api.GoogleIamV1AuditLogConfig o) { buildCounterGoogleIamV1AuditLogConfig--; } -core.List buildUnnamed31() => [ +core.List buildUnnamed29() => [ 'foo', 'foo', ]; -void checkUnnamed31(core.List o) { +void checkUnnamed29(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1723,7 +1636,7 @@ api.GoogleIamV1Binding buildGoogleIamV1Binding() { buildCounterGoogleIamV1Binding++; if (buildCounterGoogleIamV1Binding < 3) { o.condition = buildGoogleTypeExpr(); - o.members = buildUnnamed31(); + o.members = buildUnnamed29(); o.role = 'foo'; } buildCounterGoogleIamV1Binding--; @@ -1734,7 +1647,7 @@ void checkGoogleIamV1Binding(api.GoogleIamV1Binding o) { buildCounterGoogleIamV1Binding++; if (buildCounterGoogleIamV1Binding < 3) { checkGoogleTypeExpr(o.condition!); - checkUnnamed31(o.members!); + checkUnnamed29(o.members!); unittest.expect( o.role!, unittest.equals('foo'), @@ -1743,23 +1656,23 @@ void checkGoogleIamV1Binding(api.GoogleIamV1Binding o) { buildCounterGoogleIamV1Binding--; } -core.List buildUnnamed32() => [ +core.List buildUnnamed30() => [ buildGoogleIamV1AuditConfig(), buildGoogleIamV1AuditConfig(), ]; -void checkUnnamed32(core.List o) { +void checkUnnamed30(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleIamV1AuditConfig(o[0]); checkGoogleIamV1AuditConfig(o[1]); } -core.List buildUnnamed33() => [ +core.List buildUnnamed31() => [ buildGoogleIamV1Binding(), buildGoogleIamV1Binding(), ]; -void checkUnnamed33(core.List o) { +void checkUnnamed31(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleIamV1Binding(o[0]); checkGoogleIamV1Binding(o[1]); @@ -1770,8 +1683,8 @@ api.GoogleIamV1Policy buildGoogleIamV1Policy() { final o = api.GoogleIamV1Policy(); buildCounterGoogleIamV1Policy++; if (buildCounterGoogleIamV1Policy < 3) { - o.auditConfigs = buildUnnamed32(); - o.bindings = buildUnnamed33(); + o.auditConfigs = buildUnnamed30(); + o.bindings = buildUnnamed31(); o.etag = 'foo'; o.version = 42; } @@ -1782,8 +1695,8 @@ api.GoogleIamV1Policy buildGoogleIamV1Policy() { void checkGoogleIamV1Policy(api.GoogleIamV1Policy o) { buildCounterGoogleIamV1Policy++; if (buildCounterGoogleIamV1Policy < 3) { - checkUnnamed32(o.auditConfigs!); - checkUnnamed33(o.bindings!); + checkUnnamed30(o.auditConfigs!); + checkUnnamed31(o.bindings!); unittest.expect( o.etag!, unittest.equals('foo'), @@ -1820,12 +1733,12 @@ void checkGoogleIamV1SetIamPolicyRequest(api.GoogleIamV1SetIamPolicyRequest o) { buildCounterGoogleIamV1SetIamPolicyRequest--; } -core.List buildUnnamed34() => [ +core.List buildUnnamed32() => [ 'foo', 'foo', ]; -void checkUnnamed34(core.List o) { +void checkUnnamed32(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1843,7 +1756,7 @@ api.GoogleIamV1TestIamPermissionsRequest final o = api.GoogleIamV1TestIamPermissionsRequest(); buildCounterGoogleIamV1TestIamPermissionsRequest++; if (buildCounterGoogleIamV1TestIamPermissionsRequest < 3) { - o.permissions = buildUnnamed34(); + o.permissions = buildUnnamed32(); } buildCounterGoogleIamV1TestIamPermissionsRequest--; return o; @@ -1853,17 +1766,17 @@ void checkGoogleIamV1TestIamPermissionsRequest( api.GoogleIamV1TestIamPermissionsRequest o) { buildCounterGoogleIamV1TestIamPermissionsRequest++; if (buildCounterGoogleIamV1TestIamPermissionsRequest < 3) { - checkUnnamed34(o.permissions!); + checkUnnamed32(o.permissions!); } buildCounterGoogleIamV1TestIamPermissionsRequest--; } -core.List buildUnnamed35() => [ +core.List buildUnnamed33() => [ 'foo', 'foo', ]; -void checkUnnamed35(core.List o) { +void checkUnnamed33(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1881,7 +1794,7 @@ api.GoogleIamV1TestIamPermissionsResponse final o = api.GoogleIamV1TestIamPermissionsResponse(); buildCounterGoogleIamV1TestIamPermissionsResponse++; if (buildCounterGoogleIamV1TestIamPermissionsResponse < 3) { - o.permissions = buildUnnamed35(); + o.permissions = buildUnnamed33(); } buildCounterGoogleIamV1TestIamPermissionsResponse--; return o; @@ -1891,7 +1804,7 @@ void checkGoogleIamV1TestIamPermissionsResponse( api.GoogleIamV1TestIamPermissionsResponse o) { buildCounterGoogleIamV1TestIamPermissionsResponse++; if (buildCounterGoogleIamV1TestIamPermissionsResponse < 3) { - checkUnnamed35(o.permissions!); + checkUnnamed33(o.permissions!); } buildCounterGoogleIamV1TestIamPermissionsResponse--; } @@ -1913,12 +1826,12 @@ void checkGoogleLongrunningCancelOperationRequest( buildCounterGoogleLongrunningCancelOperationRequest--; } -core.List buildUnnamed36() => [ +core.List buildUnnamed34() => [ buildGoogleLongrunningOperation(), buildGoogleLongrunningOperation(), ]; -void checkUnnamed36(core.List o) { +void checkUnnamed34(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleLongrunningOperation(o[0]); checkGoogleLongrunningOperation(o[1]); @@ -1931,7 +1844,7 @@ api.GoogleLongrunningListOperationsResponse buildCounterGoogleLongrunningListOperationsResponse++; if (buildCounterGoogleLongrunningListOperationsResponse < 3) { o.nextPageToken = 'foo'; - o.operations = buildUnnamed36(); + o.operations = buildUnnamed34(); } buildCounterGoogleLongrunningListOperationsResponse--; return o; @@ -1945,12 +1858,12 @@ void checkGoogleLongrunningListOperationsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed36(o.operations!); + checkUnnamed34(o.operations!); } buildCounterGoogleLongrunningListOperationsResponse--; } -core.Map buildUnnamed37() => { +core.Map buildUnnamed35() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -1963,7 +1876,7 @@ core.Map buildUnnamed37() => { }, }; -void checkUnnamed37(core.Map o) { +void checkUnnamed35(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted7 = (o['x']!) as core.Map; unittest.expect(casted7, unittest.hasLength(3)); @@ -1995,7 +1908,7 @@ void checkUnnamed37(core.Map o) { ); } -core.Map buildUnnamed38() => { +core.Map buildUnnamed36() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -2008,7 +1921,7 @@ core.Map buildUnnamed38() => { }, }; -void checkUnnamed38(core.Map o) { +void checkUnnamed36(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted9 = (o['x']!) as core.Map; unittest.expect(casted9, unittest.hasLength(3)); @@ -2047,9 +1960,9 @@ api.GoogleLongrunningOperation buildGoogleLongrunningOperation() { if (buildCounterGoogleLongrunningOperation < 3) { o.done = true; o.error = buildGoogleRpcStatus(); - o.metadata = buildUnnamed37(); + o.metadata = buildUnnamed35(); o.name = 'foo'; - o.response = buildUnnamed38(); + o.response = buildUnnamed36(); } buildCounterGoogleLongrunningOperation--; return o; @@ -2060,17 +1973,17 @@ void checkGoogleLongrunningOperation(api.GoogleLongrunningOperation o) { if (buildCounterGoogleLongrunningOperation < 3) { unittest.expect(o.done!, unittest.isTrue); checkGoogleRpcStatus(o.error!); - checkUnnamed37(o.metadata!); + checkUnnamed35(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed38(o.response!); + checkUnnamed36(o.response!); } buildCounterGoogleLongrunningOperation--; } -core.Map buildUnnamed39() => { +core.Map buildUnnamed37() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -2083,7 +1996,7 @@ core.Map buildUnnamed39() => { }, }; -void checkUnnamed39(core.Map o) { +void checkUnnamed37(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted11 = (o['x']!) as core.Map; unittest.expect(casted11, unittest.hasLength(3)); @@ -2115,15 +2028,15 @@ void checkUnnamed39(core.Map o) { ); } -core.List> buildUnnamed40() => [ - buildUnnamed39(), - buildUnnamed39(), +core.List> buildUnnamed38() => [ + buildUnnamed37(), + buildUnnamed37(), ]; -void checkUnnamed40(core.List> o) { +void checkUnnamed38(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed39(o[0]); - checkUnnamed39(o[1]); + checkUnnamed37(o[0]); + checkUnnamed37(o[1]); } core.int buildCounterGoogleRpcStatus = 0; @@ -2132,7 +2045,7 @@ api.GoogleRpcStatus buildGoogleRpcStatus() { buildCounterGoogleRpcStatus++; if (buildCounterGoogleRpcStatus < 3) { o.code = 42; - o.details = buildUnnamed40(); + o.details = buildUnnamed38(); o.message = 'foo'; } buildCounterGoogleRpcStatus--; @@ -2146,7 +2059,7 @@ void checkGoogleRpcStatus(api.GoogleRpcStatus o) { o.code!, unittest.equals(42), ); - checkUnnamed40(o.details!); + checkUnnamed38(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -2192,23 +2105,23 @@ void checkGoogleTypeExpr(api.GoogleTypeExpr o) { buildCounterGoogleTypeExpr--; } -core.List buildUnnamed41() => [ +core.List buildUnnamed39() => [ buildAppGateway(), buildAppGateway(), ]; -void checkUnnamed41(core.List o) { +void checkUnnamed39(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAppGateway(o[0]); checkAppGateway(o[1]); } -core.List buildUnnamed42() => [ +core.List buildUnnamed40() => [ 'foo', 'foo', ]; -void checkUnnamed42(core.List o) { +void checkUnnamed40(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2225,9 +2138,9 @@ api.ListAppGatewaysResponse buildListAppGatewaysResponse() { final o = api.ListAppGatewaysResponse(); buildCounterListAppGatewaysResponse++; if (buildCounterListAppGatewaysResponse < 3) { - o.appGateways = buildUnnamed41(); + o.appGateways = buildUnnamed39(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed42(); + o.unreachable = buildUnnamed40(); } buildCounterListAppGatewaysResponse--; return o; @@ -2236,12 +2149,12 @@ api.ListAppGatewaysResponse buildListAppGatewaysResponse() { void checkListAppGatewaysResponse(api.ListAppGatewaysResponse o) { buildCounterListAppGatewaysResponse++; if (buildCounterListAppGatewaysResponse < 3) { - checkUnnamed41(o.appGateways!); + checkUnnamed39(o.appGateways!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed42(o.unreachable!); + checkUnnamed40(o.unreachable!); } buildCounterListAppGatewaysResponse--; } @@ -2599,17 +2512,6 @@ void main() { }); }); - unittest.group('obj-schema-GoogleCloudBeyondcorpSecuritygatewaysV1Peering', - () { - unittest.test('to-json--from-json', () async { - final o = buildGoogleCloudBeyondcorpSecuritygatewaysV1Peering(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.GoogleCloudBeyondcorpSecuritygatewaysV1Peering.fromJson( - oJson as core.Map); - checkGoogleCloudBeyondcorpSecuritygatewaysV1Peering(od); - }); - }); - unittest.group( 'obj-schema-GoogleCloudBeyondcorpSecuritygatewaysV1SecurityGateway', () { unittest.test('to-json--from-json', () async { @@ -2622,19 +2524,6 @@ void main() { }); }); - unittest.group( - 'obj-schema-GoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest', - () { - unittest.test('to-json--from-json', () async { - final o = buildGoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = - api.GoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest.fromJson( - oJson as core.Map); - checkGoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest(od); - }); - }); - unittest.group('obj-schema-GoogleCloudLocationListLocationsResponse', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudLocationListLocationsResponse(); @@ -6753,17 +6642,16 @@ void main() { response as api.GoogleLongrunningOperation); }); - unittest.test('method--setPeering', () async { + unittest.test('method--setIamPolicy', () async { final mock = HttpServerMock(); final res = api.BeyondCorpApi(mock).projects.locations.securityGateways; - final arg_request = - buildGoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest(); - final arg_securityGateway = 'foo'; + final arg_request = buildGoogleIamV1SetIamPolicyRequest(); + final arg_resource = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest - .fromJson(json as core.Map); - checkGoogleCloudBeyondcorpSecuritygatewaysV1SetPeeringRequest(obj); + final obj = api.GoogleIamV1SetIamPolicyRequest.fromJson( + json as core.Map); + checkGoogleIamV1SetIamPolicyRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -6804,13 +6692,12 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildGoogleIamV1Policy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setPeering(arg_request, arg_securityGateway, + final response = await res.setIamPolicy(arg_request, arg_resource, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); }); unittest.test('method--testIamPermissions', () async { @@ -7213,64 +7100,4 @@ void main() { checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); }); }); - - unittest.group('resource-VProjectsLocationsSecurityGatewaysResource', () { - unittest.test('method--setIamPolicy', () async { - final mock = HttpServerMock(); - final res = api.BeyondCorpApi(mock).v.projects.locations.securityGateways; - final arg_request = buildGoogleIamV1SetIamPolicyRequest(); - final arg_resource = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleIamV1SetIamPolicyRequest.fromJson( - json as core.Map); - checkGoogleIamV1SetIamPolicyRequest(obj); - - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 2), - unittest.equals('v/'), - ); - pathOffset += 2; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode(buildGoogleIamV1Policy()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.setIamPolicy(arg_request, arg_resource, - $fields: arg_$fields); - checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); - }); - }); } diff --git a/generated/googleapis/test/bigquery/v2_test.dart b/generated/googleapis/test/bigquery/v2_test.dart index 2c2fd7e38..6c4d59d18 100644 --- a/generated/googleapis/test/bigquery/v2_test.dart +++ b/generated/googleapis/test/bigquery/v2_test.dart @@ -8397,6 +8397,7 @@ api.Table buildTable() { o.labels = buildUnnamed136(); o.lastModifiedTime = 'foo'; o.location = 'foo'; + o.managedTableType = 'foo'; o.materializedView = buildMaterializedViewDefinition(); o.materializedViewStatus = buildMaterializedViewStatus(); o.maxStaleness = 'foo'; @@ -8489,6 +8490,10 @@ void checkTable(api.Table o) { o.location!, unittest.equals('foo'), ); + unittest.expect( + o.managedTableType!, + unittest.equals('foo'), + ); checkMaterializedViewDefinition(o.materializedView!); checkMaterializedViewStatus(o.materializedViewStatus!); unittest.expect( diff --git a/generated/googleapis/test/bigqueryreservation/v1_test.dart b/generated/googleapis/test/bigqueryreservation/v1_test.dart index 2a46dde30..932dc656e 100644 --- a/generated/googleapis/test/bigqueryreservation/v1_test.dart +++ b/generated/googleapis/test/bigqueryreservation/v1_test.dart @@ -32,6 +32,7 @@ api.Assignment buildAssignment() { buildCounterAssignment++; if (buildCounterAssignment < 3) { o.assignee = 'foo'; + o.enableGeminiInBigquery = true; o.jobType = 'foo'; o.name = 'foo'; o.state = 'foo'; @@ -47,6 +48,7 @@ void checkAssignment(api.Assignment o) { o.assignee!, unittest.equals('foo'), ); + unittest.expect(o.enableGeminiInBigquery!, unittest.isTrue); unittest.expect( o.jobType!, unittest.equals('foo'), @@ -398,6 +400,23 @@ void checkMoveAssignmentRequest(api.MoveAssignmentRequest o) { buildCounterMoveAssignmentRequest--; } +core.Map buildUnnamed5() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed5(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + core.int buildCounterReservation = 0; api.Reservation buildReservation() { final o = api.Reservation(); @@ -408,6 +427,7 @@ api.Reservation buildReservation() { o.creationTime = 'foo'; o.edition = 'foo'; o.ignoreIdleSlots = true; + o.labels = buildUnnamed5(); o.multiRegionAuxiliary = true; o.name = 'foo'; o.originalPrimaryLocation = 'foo'; @@ -437,6 +457,7 @@ void checkReservation(api.Reservation o) { unittest.equals('foo'), ); unittest.expect(o.ignoreIdleSlots!, unittest.isTrue); + checkUnnamed5(o.labels!); unittest.expect(o.multiRegionAuxiliary!, unittest.isTrue); unittest.expect( o.name!, @@ -466,12 +487,12 @@ void checkReservation(api.Reservation o) { buildCounterReservation--; } -core.List buildUnnamed5() => [ +core.List buildUnnamed6() => [ buildAssignment(), buildAssignment(), ]; -void checkUnnamed5(core.List o) { +void checkUnnamed6(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAssignment(o[0]); checkAssignment(o[1]); @@ -482,7 +503,7 @@ api.SearchAllAssignmentsResponse buildSearchAllAssignmentsResponse() { final o = api.SearchAllAssignmentsResponse(); buildCounterSearchAllAssignmentsResponse++; if (buildCounterSearchAllAssignmentsResponse < 3) { - o.assignments = buildUnnamed5(); + o.assignments = buildUnnamed6(); o.nextPageToken = 'foo'; } buildCounterSearchAllAssignmentsResponse--; @@ -492,7 +513,7 @@ api.SearchAllAssignmentsResponse buildSearchAllAssignmentsResponse() { void checkSearchAllAssignmentsResponse(api.SearchAllAssignmentsResponse o) { buildCounterSearchAllAssignmentsResponse++; if (buildCounterSearchAllAssignmentsResponse < 3) { - checkUnnamed5(o.assignments!); + checkUnnamed6(o.assignments!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -501,12 +522,12 @@ void checkSearchAllAssignmentsResponse(api.SearchAllAssignmentsResponse o) { buildCounterSearchAllAssignmentsResponse--; } -core.List buildUnnamed6() => [ +core.List buildUnnamed7() => [ buildAssignment(), buildAssignment(), ]; -void checkUnnamed6(core.List o) { +void checkUnnamed7(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAssignment(o[0]); checkAssignment(o[1]); @@ -517,7 +538,7 @@ api.SearchAssignmentsResponse buildSearchAssignmentsResponse() { final o = api.SearchAssignmentsResponse(); buildCounterSearchAssignmentsResponse++; if (buildCounterSearchAssignmentsResponse < 3) { - o.assignments = buildUnnamed6(); + o.assignments = buildUnnamed7(); o.nextPageToken = 'foo'; } buildCounterSearchAssignmentsResponse--; @@ -527,7 +548,7 @@ api.SearchAssignmentsResponse buildSearchAssignmentsResponse() { void checkSearchAssignmentsResponse(api.SearchAssignmentsResponse o) { buildCounterSearchAssignmentsResponse++; if (buildCounterSearchAssignmentsResponse < 3) { - checkUnnamed6(o.assignments!); + checkUnnamed7(o.assignments!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -580,7 +601,7 @@ void checkSplitCapacityCommitmentResponse( buildCounterSplitCapacityCommitmentResponse--; } -core.Map buildUnnamed7() => { +core.Map buildUnnamed8() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -593,7 +614,7 @@ core.Map buildUnnamed7() => { }, }; -void checkUnnamed7(core.Map o) { +void checkUnnamed8(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted1 = (o['x']!) as core.Map; unittest.expect(casted1, unittest.hasLength(3)); @@ -625,15 +646,15 @@ void checkUnnamed7(core.Map o) { ); } -core.List> buildUnnamed8() => [ - buildUnnamed7(), - buildUnnamed7(), +core.List> buildUnnamed9() => [ + buildUnnamed8(), + buildUnnamed8(), ]; -void checkUnnamed8(core.List> o) { +void checkUnnamed9(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed7(o[0]); - checkUnnamed7(o[1]); + checkUnnamed8(o[0]); + checkUnnamed8(o[1]); } core.int buildCounterStatus = 0; @@ -642,7 +663,7 @@ api.Status buildStatus() { buildCounterStatus++; if (buildCounterStatus < 3) { o.code = 42; - o.details = buildUnnamed8(); + o.details = buildUnnamed9(); o.message = 'foo'; } buildCounterStatus--; @@ -656,7 +677,7 @@ void checkStatus(api.Status o) { o.code!, unittest.equals(42), ); - checkUnnamed8(o.details!); + checkUnnamed9(o.details!); unittest.expect( o.message!, unittest.equals('foo'), diff --git a/generated/googleapis/test/calendar/v3_test.dart b/generated/googleapis/test/calendar/v3_test.dart index b9b6b938e..d6c15fa2e 100644 --- a/generated/googleapis/test/calendar/v3_test.dart +++ b/generated/googleapis/test/calendar/v3_test.dart @@ -1217,6 +1217,7 @@ api.Event buildEvent() { o.attachments = buildUnnamed11(); o.attendees = buildUnnamed12(); o.attendeesOmitted = true; + o.birthdayProperties = buildEventBirthdayProperties(); o.colorId = 'foo'; o.conferenceData = buildConferenceData(); o.created = core.DateTime.parse('2002-02-27T14:01:02Z'); @@ -1267,6 +1268,7 @@ void checkEvent(api.Event o) { checkUnnamed11(o.attachments!); checkUnnamed12(o.attendees!); unittest.expect(o.attendeesOmitted!, unittest.isTrue); + checkEventBirthdayProperties(o.birthdayProperties!); unittest.expect( o.colorId!, unittest.equals('foo'), @@ -1460,6 +1462,38 @@ void checkEventAttendee(api.EventAttendee o) { buildCounterEventAttendee--; } +core.int buildCounterEventBirthdayProperties = 0; +api.EventBirthdayProperties buildEventBirthdayProperties() { + final o = api.EventBirthdayProperties(); + buildCounterEventBirthdayProperties++; + if (buildCounterEventBirthdayProperties < 3) { + o.contact = 'foo'; + o.customTypeName = 'foo'; + o.type = 'foo'; + } + buildCounterEventBirthdayProperties--; + return o; +} + +void checkEventBirthdayProperties(api.EventBirthdayProperties o) { + buildCounterEventBirthdayProperties++; + if (buildCounterEventBirthdayProperties < 3) { + unittest.expect( + o.contact!, + unittest.equals('foo'), + ); + unittest.expect( + o.customTypeName!, + unittest.equals('foo'), + ); + unittest.expect( + o.type!, + unittest.equals('foo'), + ); + } + buildCounterEventBirthdayProperties--; +} + core.int buildCounterEventDateTime = 0; api.EventDateTime buildEventDateTime() { final o = api.EventDateTime(); @@ -2522,6 +2556,16 @@ void main() { }); }); + unittest.group('obj-schema-EventBirthdayProperties', () { + unittest.test('to-json--from-json', () async { + final o = buildEventBirthdayProperties(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.EventBirthdayProperties.fromJson( + oJson as core.Map); + checkEventBirthdayProperties(od); + }); + }); + unittest.group('obj-schema-EventDateTime', () { unittest.test('to-json--from-json', () async { final o = buildEventDateTime(); diff --git a/generated/googleapis/test/chat/v1_test.dart b/generated/googleapis/test/chat/v1_test.dart index f9ef61e91..5a2be6e74 100644 --- a/generated/googleapis/test/chat/v1_test.dart +++ b/generated/googleapis/test/chat/v1_test.dart @@ -916,6 +916,7 @@ api.GoogleAppsCardV1Button buildGoogleAppsCardV1Button() { o.icon = buildGoogleAppsCardV1Icon(); o.onClick = buildGoogleAppsCardV1OnClick(); o.text = 'foo'; + o.type = 'foo'; } buildCounterGoogleAppsCardV1Button--; return o; @@ -936,6 +937,10 @@ void checkGoogleAppsCardV1Button(api.GoogleAppsCardV1Button o) { o.text!, unittest.equals('foo'), ); + unittest.expect( + o.type!, + unittest.equals('foo'), + ); } buildCounterGoogleAppsCardV1Button--; } @@ -1122,12 +1127,182 @@ void checkGoogleAppsCardV1CardHeader(api.GoogleAppsCardV1CardHeader o) { buildCounterGoogleAppsCardV1CardHeader--; } -core.List buildUnnamed7() => [ +core.List buildUnnamed7() => [ + buildGoogleAppsCardV1CarouselCard(), + buildGoogleAppsCardV1CarouselCard(), + ]; + +void checkUnnamed7(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleAppsCardV1CarouselCard(o[0]); + checkGoogleAppsCardV1CarouselCard(o[1]); +} + +core.int buildCounterGoogleAppsCardV1Carousel = 0; +api.GoogleAppsCardV1Carousel buildGoogleAppsCardV1Carousel() { + final o = api.GoogleAppsCardV1Carousel(); + buildCounterGoogleAppsCardV1Carousel++; + if (buildCounterGoogleAppsCardV1Carousel < 3) { + o.carouselCards = buildUnnamed7(); + } + buildCounterGoogleAppsCardV1Carousel--; + return o; +} + +void checkGoogleAppsCardV1Carousel(api.GoogleAppsCardV1Carousel o) { + buildCounterGoogleAppsCardV1Carousel++; + if (buildCounterGoogleAppsCardV1Carousel < 3) { + checkUnnamed7(o.carouselCards!); + } + buildCounterGoogleAppsCardV1Carousel--; +} + +core.List buildUnnamed8() => [ + buildGoogleAppsCardV1NestedWidget(), + buildGoogleAppsCardV1NestedWidget(), + ]; + +void checkUnnamed8(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleAppsCardV1NestedWidget(o[0]); + checkGoogleAppsCardV1NestedWidget(o[1]); +} + +core.List buildUnnamed9() => [ + buildGoogleAppsCardV1NestedWidget(), + buildGoogleAppsCardV1NestedWidget(), + ]; + +void checkUnnamed9(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleAppsCardV1NestedWidget(o[0]); + checkGoogleAppsCardV1NestedWidget(o[1]); +} + +core.int buildCounterGoogleAppsCardV1CarouselCard = 0; +api.GoogleAppsCardV1CarouselCard buildGoogleAppsCardV1CarouselCard() { + final o = api.GoogleAppsCardV1CarouselCard(); + buildCounterGoogleAppsCardV1CarouselCard++; + if (buildCounterGoogleAppsCardV1CarouselCard < 3) { + o.footerWidgets = buildUnnamed8(); + o.widgets = buildUnnamed9(); + } + buildCounterGoogleAppsCardV1CarouselCard--; + return o; +} + +void checkGoogleAppsCardV1CarouselCard(api.GoogleAppsCardV1CarouselCard o) { + buildCounterGoogleAppsCardV1CarouselCard++; + if (buildCounterGoogleAppsCardV1CarouselCard < 3) { + checkUnnamed8(o.footerWidgets!); + checkUnnamed9(o.widgets!); + } + buildCounterGoogleAppsCardV1CarouselCard--; +} + +core.int buildCounterGoogleAppsCardV1Chip = 0; +api.GoogleAppsCardV1Chip buildGoogleAppsCardV1Chip() { + final o = api.GoogleAppsCardV1Chip(); + buildCounterGoogleAppsCardV1Chip++; + if (buildCounterGoogleAppsCardV1Chip < 3) { + o.altText = 'foo'; + o.disabled = true; + o.enabled = true; + o.icon = buildGoogleAppsCardV1Icon(); + o.label = 'foo'; + o.onClick = buildGoogleAppsCardV1OnClick(); + } + buildCounterGoogleAppsCardV1Chip--; + return o; +} + +void checkGoogleAppsCardV1Chip(api.GoogleAppsCardV1Chip o) { + buildCounterGoogleAppsCardV1Chip++; + if (buildCounterGoogleAppsCardV1Chip < 3) { + unittest.expect( + o.altText!, + unittest.equals('foo'), + ); + unittest.expect(o.disabled!, unittest.isTrue); + unittest.expect(o.enabled!, unittest.isTrue); + checkGoogleAppsCardV1Icon(o.icon!); + unittest.expect( + o.label!, + unittest.equals('foo'), + ); + checkGoogleAppsCardV1OnClick(o.onClick!); + } + buildCounterGoogleAppsCardV1Chip--; +} + +core.List buildUnnamed10() => [ + buildGoogleAppsCardV1Chip(), + buildGoogleAppsCardV1Chip(), + ]; + +void checkUnnamed10(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleAppsCardV1Chip(o[0]); + checkGoogleAppsCardV1Chip(o[1]); +} + +core.int buildCounterGoogleAppsCardV1ChipList = 0; +api.GoogleAppsCardV1ChipList buildGoogleAppsCardV1ChipList() { + final o = api.GoogleAppsCardV1ChipList(); + buildCounterGoogleAppsCardV1ChipList++; + if (buildCounterGoogleAppsCardV1ChipList < 3) { + o.chips = buildUnnamed10(); + o.layout = 'foo'; + } + buildCounterGoogleAppsCardV1ChipList--; + return o; +} + +void checkGoogleAppsCardV1ChipList(api.GoogleAppsCardV1ChipList o) { + buildCounterGoogleAppsCardV1ChipList++; + if (buildCounterGoogleAppsCardV1ChipList < 3) { + checkUnnamed10(o.chips!); + unittest.expect( + o.layout!, + unittest.equals('foo'), + ); + } + buildCounterGoogleAppsCardV1ChipList--; +} + +core.int buildCounterGoogleAppsCardV1CollapseControl = 0; +api.GoogleAppsCardV1CollapseControl buildGoogleAppsCardV1CollapseControl() { + final o = api.GoogleAppsCardV1CollapseControl(); + buildCounterGoogleAppsCardV1CollapseControl++; + if (buildCounterGoogleAppsCardV1CollapseControl < 3) { + o.collapseButton = buildGoogleAppsCardV1Button(); + o.expandButton = buildGoogleAppsCardV1Button(); + o.horizontalAlignment = 'foo'; + } + buildCounterGoogleAppsCardV1CollapseControl--; + return o; +} + +void checkGoogleAppsCardV1CollapseControl( + api.GoogleAppsCardV1CollapseControl o) { + buildCounterGoogleAppsCardV1CollapseControl++; + if (buildCounterGoogleAppsCardV1CollapseControl < 3) { + checkGoogleAppsCardV1Button(o.collapseButton!); + checkGoogleAppsCardV1Button(o.expandButton!); + unittest.expect( + o.horizontalAlignment!, + unittest.equals('foo'), + ); + } + buildCounterGoogleAppsCardV1CollapseControl--; +} + +core.List buildUnnamed11() => [ buildGoogleAppsCardV1Widgets(), buildGoogleAppsCardV1Widgets(), ]; -void checkUnnamed7(core.List o) { +void checkUnnamed11(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleAppsCardV1Widgets(o[0]); checkGoogleAppsCardV1Widgets(o[1]); @@ -1141,7 +1316,7 @@ api.GoogleAppsCardV1Column buildGoogleAppsCardV1Column() { o.horizontalAlignment = 'foo'; o.horizontalSizeStyle = 'foo'; o.verticalAlignment = 'foo'; - o.widgets = buildUnnamed7(); + o.widgets = buildUnnamed11(); } buildCounterGoogleAppsCardV1Column--; return o; @@ -1162,17 +1337,17 @@ void checkGoogleAppsCardV1Column(api.GoogleAppsCardV1Column o) { o.verticalAlignment!, unittest.equals('foo'), ); - checkUnnamed7(o.widgets!); + checkUnnamed11(o.widgets!); } buildCounterGoogleAppsCardV1Column--; } -core.List buildUnnamed8() => [ +core.List buildUnnamed12() => [ buildGoogleAppsCardV1Column(), buildGoogleAppsCardV1Column(), ]; -void checkUnnamed8(core.List o) { +void checkUnnamed12(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleAppsCardV1Column(o[0]); checkGoogleAppsCardV1Column(o[1]); @@ -1183,7 +1358,7 @@ api.GoogleAppsCardV1Columns buildGoogleAppsCardV1Columns() { final o = api.GoogleAppsCardV1Columns(); buildCounterGoogleAppsCardV1Columns++; if (buildCounterGoogleAppsCardV1Columns < 3) { - o.columnItems = buildUnnamed8(); + o.columnItems = buildUnnamed12(); } buildCounterGoogleAppsCardV1Columns--; return o; @@ -1192,7 +1367,7 @@ api.GoogleAppsCardV1Columns buildGoogleAppsCardV1Columns() { void checkGoogleAppsCardV1Columns(api.GoogleAppsCardV1Columns o) { buildCounterGoogleAppsCardV1Columns++; if (buildCounterGoogleAppsCardV1Columns < 3) { - checkUnnamed8(o.columnItems!); + checkUnnamed12(o.columnItems!); } buildCounterGoogleAppsCardV1Columns--; } @@ -1302,12 +1477,12 @@ void checkGoogleAppsCardV1Divider(api.GoogleAppsCardV1Divider o) { buildCounterGoogleAppsCardV1Divider--; } -core.List buildUnnamed9() => [ +core.List buildUnnamed13() => [ buildGoogleAppsCardV1GridItem(), buildGoogleAppsCardV1GridItem(), ]; -void checkUnnamed9(core.List o) { +void checkUnnamed13(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleAppsCardV1GridItem(o[0]); checkGoogleAppsCardV1GridItem(o[1]); @@ -1320,7 +1495,7 @@ api.GoogleAppsCardV1Grid buildGoogleAppsCardV1Grid() { if (buildCounterGoogleAppsCardV1Grid < 3) { o.borderStyle = buildGoogleAppsCardV1BorderStyle(); o.columnCount = 42; - o.items = buildUnnamed9(); + o.items = buildUnnamed13(); o.onClick = buildGoogleAppsCardV1OnClick(); o.title = 'foo'; } @@ -1336,7 +1511,7 @@ void checkGoogleAppsCardV1Grid(api.GoogleAppsCardV1Grid o) { o.columnCount!, unittest.equals(42), ); - checkUnnamed9(o.items!); + checkUnnamed13(o.items!); checkGoogleAppsCardV1OnClick(o.onClick!); unittest.expect( o.title!, @@ -1545,6 +1720,29 @@ void checkGoogleAppsCardV1MaterialIcon(api.GoogleAppsCardV1MaterialIcon o) { buildCounterGoogleAppsCardV1MaterialIcon--; } +core.int buildCounterGoogleAppsCardV1NestedWidget = 0; +api.GoogleAppsCardV1NestedWidget buildGoogleAppsCardV1NestedWidget() { + final o = api.GoogleAppsCardV1NestedWidget(); + buildCounterGoogleAppsCardV1NestedWidget++; + if (buildCounterGoogleAppsCardV1NestedWidget < 3) { + o.buttonList = buildGoogleAppsCardV1ButtonList(); + o.image = buildGoogleAppsCardV1Image(); + o.textParagraph = buildGoogleAppsCardV1TextParagraph(); + } + buildCounterGoogleAppsCardV1NestedWidget--; + return o; +} + +void checkGoogleAppsCardV1NestedWidget(api.GoogleAppsCardV1NestedWidget o) { + buildCounterGoogleAppsCardV1NestedWidget++; + if (buildCounterGoogleAppsCardV1NestedWidget < 3) { + checkGoogleAppsCardV1ButtonList(o.buttonList!); + checkGoogleAppsCardV1Image(o.image!); + checkGoogleAppsCardV1TextParagraph(o.textParagraph!); + } + buildCounterGoogleAppsCardV1NestedWidget--; +} + core.int buildCounterGoogleAppsCardV1OnClick = 0; api.GoogleAppsCardV1OnClick buildGoogleAppsCardV1OnClick() { final o = api.GoogleAppsCardV1OnClick(); @@ -1554,6 +1752,7 @@ api.GoogleAppsCardV1OnClick buildGoogleAppsCardV1OnClick() { o.card = buildGoogleAppsCardV1Card(); o.openDynamicLinkAction = buildGoogleAppsCardV1Action(); o.openLink = buildGoogleAppsCardV1OpenLink(); + o.overflowMenu = buildGoogleAppsCardV1OverflowMenu(); } buildCounterGoogleAppsCardV1OnClick--; return o; @@ -1566,6 +1765,7 @@ void checkGoogleAppsCardV1OnClick(api.GoogleAppsCardV1OnClick o) { checkGoogleAppsCardV1Card(o.card!); checkGoogleAppsCardV1Action(o.openDynamicLinkAction!); checkGoogleAppsCardV1OpenLink(o.openLink!); + checkGoogleAppsCardV1OverflowMenu(o.overflowMenu!); } buildCounterGoogleAppsCardV1OnClick--; } @@ -1602,6 +1802,65 @@ void checkGoogleAppsCardV1OpenLink(api.GoogleAppsCardV1OpenLink o) { buildCounterGoogleAppsCardV1OpenLink--; } +core.List buildUnnamed14() => [ + buildGoogleAppsCardV1OverflowMenuItem(), + buildGoogleAppsCardV1OverflowMenuItem(), + ]; + +void checkUnnamed14(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleAppsCardV1OverflowMenuItem(o[0]); + checkGoogleAppsCardV1OverflowMenuItem(o[1]); +} + +core.int buildCounterGoogleAppsCardV1OverflowMenu = 0; +api.GoogleAppsCardV1OverflowMenu buildGoogleAppsCardV1OverflowMenu() { + final o = api.GoogleAppsCardV1OverflowMenu(); + buildCounterGoogleAppsCardV1OverflowMenu++; + if (buildCounterGoogleAppsCardV1OverflowMenu < 3) { + o.items = buildUnnamed14(); + } + buildCounterGoogleAppsCardV1OverflowMenu--; + return o; +} + +void checkGoogleAppsCardV1OverflowMenu(api.GoogleAppsCardV1OverflowMenu o) { + buildCounterGoogleAppsCardV1OverflowMenu++; + if (buildCounterGoogleAppsCardV1OverflowMenu < 3) { + checkUnnamed14(o.items!); + } + buildCounterGoogleAppsCardV1OverflowMenu--; +} + +core.int buildCounterGoogleAppsCardV1OverflowMenuItem = 0; +api.GoogleAppsCardV1OverflowMenuItem buildGoogleAppsCardV1OverflowMenuItem() { + final o = api.GoogleAppsCardV1OverflowMenuItem(); + buildCounterGoogleAppsCardV1OverflowMenuItem++; + if (buildCounterGoogleAppsCardV1OverflowMenuItem < 3) { + o.disabled = true; + o.onClick = buildGoogleAppsCardV1OnClick(); + o.startIcon = buildGoogleAppsCardV1Icon(); + o.text = 'foo'; + } + buildCounterGoogleAppsCardV1OverflowMenuItem--; + return o; +} + +void checkGoogleAppsCardV1OverflowMenuItem( + api.GoogleAppsCardV1OverflowMenuItem o) { + buildCounterGoogleAppsCardV1OverflowMenuItem++; + if (buildCounterGoogleAppsCardV1OverflowMenuItem < 3) { + unittest.expect(o.disabled!, unittest.isTrue); + checkGoogleAppsCardV1OnClick(o.onClick!); + checkGoogleAppsCardV1Icon(o.startIcon!); + unittest.expect( + o.text!, + unittest.equals('foo'), + ); + } + buildCounterGoogleAppsCardV1OverflowMenuItem--; +} + core.int buildCounterGoogleAppsCardV1PlatformDataSource = 0; api.GoogleAppsCardV1PlatformDataSource buildGoogleAppsCardV1PlatformDataSource() { @@ -1628,12 +1887,12 @@ void checkGoogleAppsCardV1PlatformDataSource( buildCounterGoogleAppsCardV1PlatformDataSource--; } -core.List buildUnnamed10() => [ +core.List buildUnnamed15() => [ buildGoogleAppsCardV1Widget(), buildGoogleAppsCardV1Widget(), ]; -void checkUnnamed10(core.List o) { +void checkUnnamed15(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleAppsCardV1Widget(o[0]); checkGoogleAppsCardV1Widget(o[1]); @@ -1644,10 +1903,11 @@ api.GoogleAppsCardV1Section buildGoogleAppsCardV1Section() { final o = api.GoogleAppsCardV1Section(); buildCounterGoogleAppsCardV1Section++; if (buildCounterGoogleAppsCardV1Section < 3) { + o.collapseControl = buildGoogleAppsCardV1CollapseControl(); o.collapsible = true; o.header = 'foo'; o.uncollapsibleWidgetsCount = 42; - o.widgets = buildUnnamed10(); + o.widgets = buildUnnamed15(); } buildCounterGoogleAppsCardV1Section--; return o; @@ -1656,6 +1916,7 @@ api.GoogleAppsCardV1Section buildGoogleAppsCardV1Section() { void checkGoogleAppsCardV1Section(api.GoogleAppsCardV1Section o) { buildCounterGoogleAppsCardV1Section++; if (buildCounterGoogleAppsCardV1Section < 3) { + checkGoogleAppsCardV1CollapseControl(o.collapseControl!); unittest.expect(o.collapsible!, unittest.isTrue); unittest.expect( o.header!, @@ -1665,17 +1926,17 @@ void checkGoogleAppsCardV1Section(api.GoogleAppsCardV1Section o) { o.uncollapsibleWidgetsCount!, unittest.equals(42), ); - checkUnnamed10(o.widgets!); + checkUnnamed15(o.widgets!); } buildCounterGoogleAppsCardV1Section--; } -core.List buildUnnamed11() => [ +core.List buildUnnamed16() => [ buildGoogleAppsCardV1SelectionItem(), buildGoogleAppsCardV1SelectionItem(), ]; -void checkUnnamed11(core.List o) { +void checkUnnamed16(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleAppsCardV1SelectionItem(o[0]); checkGoogleAppsCardV1SelectionItem(o[1]); @@ -1687,7 +1948,7 @@ api.GoogleAppsCardV1SelectionInput buildGoogleAppsCardV1SelectionInput() { buildCounterGoogleAppsCardV1SelectionInput++; if (buildCounterGoogleAppsCardV1SelectionInput < 3) { o.externalDataSource = buildGoogleAppsCardV1Action(); - o.items = buildUnnamed11(); + o.items = buildUnnamed16(); o.label = 'foo'; o.multiSelectMaxSelectedItems = 42; o.multiSelectMinQueryLength = 42; @@ -1704,7 +1965,7 @@ void checkGoogleAppsCardV1SelectionInput(api.GoogleAppsCardV1SelectionInput o) { buildCounterGoogleAppsCardV1SelectionInput++; if (buildCounterGoogleAppsCardV1SelectionInput < 3) { checkGoogleAppsCardV1Action(o.externalDataSource!); - checkUnnamed11(o.items!); + checkUnnamed16(o.items!); unittest.expect( o.label!, unittest.equals('foo'), @@ -1792,12 +2053,12 @@ void checkGoogleAppsCardV1SuggestionItem(api.GoogleAppsCardV1SuggestionItem o) { buildCounterGoogleAppsCardV1SuggestionItem--; } -core.List buildUnnamed12() => [ +core.List buildUnnamed17() => [ buildGoogleAppsCardV1SuggestionItem(), buildGoogleAppsCardV1SuggestionItem(), ]; -void checkUnnamed12(core.List o) { +void checkUnnamed17(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleAppsCardV1SuggestionItem(o[0]); checkGoogleAppsCardV1SuggestionItem(o[1]); @@ -1808,7 +2069,7 @@ api.GoogleAppsCardV1Suggestions buildGoogleAppsCardV1Suggestions() { final o = api.GoogleAppsCardV1Suggestions(); buildCounterGoogleAppsCardV1Suggestions++; if (buildCounterGoogleAppsCardV1Suggestions < 3) { - o.items = buildUnnamed12(); + o.items = buildUnnamed17(); } buildCounterGoogleAppsCardV1Suggestions--; return o; @@ -1817,7 +2078,7 @@ api.GoogleAppsCardV1Suggestions buildGoogleAppsCardV1Suggestions() { void checkGoogleAppsCardV1Suggestions(api.GoogleAppsCardV1Suggestions o) { buildCounterGoogleAppsCardV1Suggestions++; if (buildCounterGoogleAppsCardV1Suggestions < 3) { - checkUnnamed12(o.items!); + checkUnnamed17(o.items!); } buildCounterGoogleAppsCardV1Suggestions--; } @@ -1916,6 +2177,7 @@ api.GoogleAppsCardV1TextParagraph buildGoogleAppsCardV1TextParagraph() { final o = api.GoogleAppsCardV1TextParagraph(); buildCounterGoogleAppsCardV1TextParagraph++; if (buildCounterGoogleAppsCardV1TextParagraph < 3) { + o.maxLines = 42; o.text = 'foo'; } buildCounterGoogleAppsCardV1TextParagraph--; @@ -1925,6 +2187,10 @@ api.GoogleAppsCardV1TextParagraph buildGoogleAppsCardV1TextParagraph() { void checkGoogleAppsCardV1TextParagraph(api.GoogleAppsCardV1TextParagraph o) { buildCounterGoogleAppsCardV1TextParagraph++; if (buildCounterGoogleAppsCardV1TextParagraph < 3) { + unittest.expect( + o.maxLines!, + unittest.equals(42), + ); unittest.expect( o.text!, unittest.equals('foo'), @@ -1939,6 +2205,8 @@ api.GoogleAppsCardV1Widget buildGoogleAppsCardV1Widget() { buildCounterGoogleAppsCardV1Widget++; if (buildCounterGoogleAppsCardV1Widget < 3) { o.buttonList = buildGoogleAppsCardV1ButtonList(); + o.carousel = buildGoogleAppsCardV1Carousel(); + o.chipList = buildGoogleAppsCardV1ChipList(); o.columns = buildGoogleAppsCardV1Columns(); o.dateTimePicker = buildGoogleAppsCardV1DateTimePicker(); o.decoratedText = buildGoogleAppsCardV1DecoratedText(); @@ -1958,6 +2226,8 @@ void checkGoogleAppsCardV1Widget(api.GoogleAppsCardV1Widget o) { buildCounterGoogleAppsCardV1Widget++; if (buildCounterGoogleAppsCardV1Widget < 3) { checkGoogleAppsCardV1ButtonList(o.buttonList!); + checkGoogleAppsCardV1Carousel(o.carousel!); + checkGoogleAppsCardV1ChipList(o.chipList!); checkGoogleAppsCardV1Columns(o.columns!); checkGoogleAppsCardV1DateTimePicker(o.dateTimePicker!); checkGoogleAppsCardV1DecoratedText(o.decoratedText!); @@ -1981,6 +2251,7 @@ api.GoogleAppsCardV1Widgets buildGoogleAppsCardV1Widgets() { buildCounterGoogleAppsCardV1Widgets++; if (buildCounterGoogleAppsCardV1Widgets < 3) { o.buttonList = buildGoogleAppsCardV1ButtonList(); + o.chipList = buildGoogleAppsCardV1ChipList(); o.dateTimePicker = buildGoogleAppsCardV1DateTimePicker(); o.decoratedText = buildGoogleAppsCardV1DecoratedText(); o.image = buildGoogleAppsCardV1Image(); @@ -1996,6 +2267,7 @@ void checkGoogleAppsCardV1Widgets(api.GoogleAppsCardV1Widgets o) { buildCounterGoogleAppsCardV1Widgets++; if (buildCounterGoogleAppsCardV1Widgets < 3) { checkGoogleAppsCardV1ButtonList(o.buttonList!); + checkGoogleAppsCardV1ChipList(o.chipList!); checkGoogleAppsCardV1DateTimePicker(o.dateTimePicker!); checkGoogleAppsCardV1DecoratedText(o.decoratedText!); checkGoogleAppsCardV1Image(o.image!); @@ -2158,12 +2430,12 @@ void checkKeyValue(api.KeyValue o) { buildCounterKeyValue--; } -core.List buildUnnamed13() => [ +core.List buildUnnamed18() => [ buildMembership(), buildMembership(), ]; -void checkUnnamed13(core.List o) { +void checkUnnamed18(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMembership(o[0]); checkMembership(o[1]); @@ -2174,7 +2446,7 @@ api.ListMembershipsResponse buildListMembershipsResponse() { final o = api.ListMembershipsResponse(); buildCounterListMembershipsResponse++; if (buildCounterListMembershipsResponse < 3) { - o.memberships = buildUnnamed13(); + o.memberships = buildUnnamed18(); o.nextPageToken = 'foo'; } buildCounterListMembershipsResponse--; @@ -2184,7 +2456,7 @@ api.ListMembershipsResponse buildListMembershipsResponse() { void checkListMembershipsResponse(api.ListMembershipsResponse o) { buildCounterListMembershipsResponse++; if (buildCounterListMembershipsResponse < 3) { - checkUnnamed13(o.memberships!); + checkUnnamed18(o.memberships!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -2193,12 +2465,12 @@ void checkListMembershipsResponse(api.ListMembershipsResponse o) { buildCounterListMembershipsResponse--; } -core.List buildUnnamed14() => [ +core.List buildUnnamed19() => [ buildMessage(), buildMessage(), ]; -void checkUnnamed14(core.List o) { +void checkUnnamed19(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMessage(o[0]); checkMessage(o[1]); @@ -2209,7 +2481,7 @@ api.ListMessagesResponse buildListMessagesResponse() { final o = api.ListMessagesResponse(); buildCounterListMessagesResponse++; if (buildCounterListMessagesResponse < 3) { - o.messages = buildUnnamed14(); + o.messages = buildUnnamed19(); o.nextPageToken = 'foo'; } buildCounterListMessagesResponse--; @@ -2219,7 +2491,7 @@ api.ListMessagesResponse buildListMessagesResponse() { void checkListMessagesResponse(api.ListMessagesResponse o) { buildCounterListMessagesResponse++; if (buildCounterListMessagesResponse < 3) { - checkUnnamed14(o.messages!); + checkUnnamed19(o.messages!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -2228,12 +2500,12 @@ void checkListMessagesResponse(api.ListMessagesResponse o) { buildCounterListMessagesResponse--; } -core.List buildUnnamed15() => [ +core.List buildUnnamed20() => [ buildReaction(), buildReaction(), ]; -void checkUnnamed15(core.List o) { +void checkUnnamed20(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkReaction(o[0]); checkReaction(o[1]); @@ -2245,7 +2517,7 @@ api.ListReactionsResponse buildListReactionsResponse() { buildCounterListReactionsResponse++; if (buildCounterListReactionsResponse < 3) { o.nextPageToken = 'foo'; - o.reactions = buildUnnamed15(); + o.reactions = buildUnnamed20(); } buildCounterListReactionsResponse--; return o; @@ -2258,17 +2530,17 @@ void checkListReactionsResponse(api.ListReactionsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed15(o.reactions!); + checkUnnamed20(o.reactions!); } buildCounterListReactionsResponse--; } -core.List buildUnnamed16() => [ +core.List buildUnnamed21() => [ buildSpaceEvent(), buildSpaceEvent(), ]; -void checkUnnamed16(core.List o) { +void checkUnnamed21(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSpaceEvent(o[0]); checkSpaceEvent(o[1]); @@ -2280,7 +2552,7 @@ api.ListSpaceEventsResponse buildListSpaceEventsResponse() { buildCounterListSpaceEventsResponse++; if (buildCounterListSpaceEventsResponse < 3) { o.nextPageToken = 'foo'; - o.spaceEvents = buildUnnamed16(); + o.spaceEvents = buildUnnamed21(); } buildCounterListSpaceEventsResponse--; return o; @@ -2293,17 +2565,17 @@ void checkListSpaceEventsResponse(api.ListSpaceEventsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed16(o.spaceEvents!); + checkUnnamed21(o.spaceEvents!); } buildCounterListSpaceEventsResponse--; } -core.List buildUnnamed17() => [ +core.List buildUnnamed22() => [ buildSpace(), buildSpace(), ]; -void checkUnnamed17(core.List o) { +void checkUnnamed22(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSpace(o[0]); checkSpace(o[1]); @@ -2315,7 +2587,7 @@ api.ListSpacesResponse buildListSpacesResponse() { buildCounterListSpacesResponse++; if (buildCounterListSpacesResponse < 3) { o.nextPageToken = 'foo'; - o.spaces = buildUnnamed17(); + o.spaces = buildUnnamed22(); } buildCounterListSpacesResponse--; return o; @@ -2328,7 +2600,7 @@ void checkListSpacesResponse(api.ListSpacesResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed17(o.spaces!); + checkUnnamed22(o.spaces!); } buildCounterListSpacesResponse--; } @@ -2423,12 +2695,12 @@ void checkMembership(api.Membership o) { buildCounterMembership--; } -core.List buildUnnamed18() => [ +core.List buildUnnamed23() => [ buildMembershipCreatedEventData(), buildMembershipCreatedEventData(), ]; -void checkUnnamed18(core.List o) { +void checkUnnamed23(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMembershipCreatedEventData(o[0]); checkMembershipCreatedEventData(o[1]); @@ -2439,7 +2711,7 @@ api.MembershipBatchCreatedEventData buildMembershipBatchCreatedEventData() { final o = api.MembershipBatchCreatedEventData(); buildCounterMembershipBatchCreatedEventData++; if (buildCounterMembershipBatchCreatedEventData < 3) { - o.memberships = buildUnnamed18(); + o.memberships = buildUnnamed23(); } buildCounterMembershipBatchCreatedEventData--; return o; @@ -2449,17 +2721,17 @@ void checkMembershipBatchCreatedEventData( api.MembershipBatchCreatedEventData o) { buildCounterMembershipBatchCreatedEventData++; if (buildCounterMembershipBatchCreatedEventData < 3) { - checkUnnamed18(o.memberships!); + checkUnnamed23(o.memberships!); } buildCounterMembershipBatchCreatedEventData--; } -core.List buildUnnamed19() => [ +core.List buildUnnamed24() => [ buildMembershipDeletedEventData(), buildMembershipDeletedEventData(), ]; -void checkUnnamed19(core.List o) { +void checkUnnamed24(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMembershipDeletedEventData(o[0]); checkMembershipDeletedEventData(o[1]); @@ -2470,7 +2742,7 @@ api.MembershipBatchDeletedEventData buildMembershipBatchDeletedEventData() { final o = api.MembershipBatchDeletedEventData(); buildCounterMembershipBatchDeletedEventData++; if (buildCounterMembershipBatchDeletedEventData < 3) { - o.memberships = buildUnnamed19(); + o.memberships = buildUnnamed24(); } buildCounterMembershipBatchDeletedEventData--; return o; @@ -2480,17 +2752,17 @@ void checkMembershipBatchDeletedEventData( api.MembershipBatchDeletedEventData o) { buildCounterMembershipBatchDeletedEventData++; if (buildCounterMembershipBatchDeletedEventData < 3) { - checkUnnamed19(o.memberships!); + checkUnnamed24(o.memberships!); } buildCounterMembershipBatchDeletedEventData--; } -core.List buildUnnamed20() => [ +core.List buildUnnamed25() => [ buildMembershipUpdatedEventData(), buildMembershipUpdatedEventData(), ]; -void checkUnnamed20(core.List o) { +void checkUnnamed25(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMembershipUpdatedEventData(o[0]); checkMembershipUpdatedEventData(o[1]); @@ -2501,7 +2773,7 @@ api.MembershipBatchUpdatedEventData buildMembershipBatchUpdatedEventData() { final o = api.MembershipBatchUpdatedEventData(); buildCounterMembershipBatchUpdatedEventData++; if (buildCounterMembershipBatchUpdatedEventData < 3) { - o.memberships = buildUnnamed20(); + o.memberships = buildUnnamed25(); } buildCounterMembershipBatchUpdatedEventData--; return o; @@ -2511,7 +2783,7 @@ void checkMembershipBatchUpdatedEventData( api.MembershipBatchUpdatedEventData o) { buildCounterMembershipBatchUpdatedEventData++; if (buildCounterMembershipBatchUpdatedEventData < 3) { - checkUnnamed20(o.memberships!); + checkUnnamed25(o.memberships!); } buildCounterMembershipBatchUpdatedEventData--; } @@ -2600,78 +2872,78 @@ void checkMembershipUpdatedEventData(api.MembershipUpdatedEventData o) { buildCounterMembershipUpdatedEventData--; } -core.List buildUnnamed21() => [ +core.List buildUnnamed26() => [ buildAccessoryWidget(), buildAccessoryWidget(), ]; -void checkUnnamed21(core.List o) { +void checkUnnamed26(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAccessoryWidget(o[0]); checkAccessoryWidget(o[1]); } -core.List buildUnnamed22() => [ +core.List buildUnnamed27() => [ buildAnnotation(), buildAnnotation(), ]; -void checkUnnamed22(core.List o) { +void checkUnnamed27(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAnnotation(o[0]); checkAnnotation(o[1]); } -core.List buildUnnamed23() => [ +core.List buildUnnamed28() => [ buildAttachedGif(), buildAttachedGif(), ]; -void checkUnnamed23(core.List o) { +void checkUnnamed28(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAttachedGif(o[0]); checkAttachedGif(o[1]); } -core.List buildUnnamed24() => [ +core.List buildUnnamed29() => [ buildAttachment(), buildAttachment(), ]; -void checkUnnamed24(core.List o) { +void checkUnnamed29(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAttachment(o[0]); checkAttachment(o[1]); } -core.List buildUnnamed25() => [ +core.List buildUnnamed30() => [ buildCard(), buildCard(), ]; -void checkUnnamed25(core.List o) { +void checkUnnamed30(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkCard(o[0]); checkCard(o[1]); } -core.List buildUnnamed26() => [ +core.List buildUnnamed31() => [ buildCardWithId(), buildCardWithId(), ]; -void checkUnnamed26(core.List o) { +void checkUnnamed31(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkCardWithId(o[0]); checkCardWithId(o[1]); } -core.List buildUnnamed27() => [ +core.List buildUnnamed32() => [ buildEmojiReactionSummary(), buildEmojiReactionSummary(), ]; -void checkUnnamed27(core.List o) { +void checkUnnamed32(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEmojiReactionSummary(o[0]); checkEmojiReactionSummary(o[1]); @@ -2682,19 +2954,19 @@ api.Message buildMessage() { final o = api.Message(); buildCounterMessage++; if (buildCounterMessage < 3) { - o.accessoryWidgets = buildUnnamed21(); + o.accessoryWidgets = buildUnnamed26(); o.actionResponse = buildActionResponse(); - o.annotations = buildUnnamed22(); + o.annotations = buildUnnamed27(); o.argumentText = 'foo'; - o.attachedGifs = buildUnnamed23(); - o.attachment = buildUnnamed24(); - o.cards = buildUnnamed25(); - o.cardsV2 = buildUnnamed26(); + o.attachedGifs = buildUnnamed28(); + o.attachment = buildUnnamed29(); + o.cards = buildUnnamed30(); + o.cardsV2 = buildUnnamed31(); o.clientAssignedMessageId = 'foo'; o.createTime = 'foo'; o.deleteTime = 'foo'; o.deletionMetadata = buildDeletionMetadata(); - o.emojiReactionSummaries = buildUnnamed27(); + o.emojiReactionSummaries = buildUnnamed32(); o.fallbackText = 'foo'; o.formattedText = 'foo'; o.lastUpdateTime = 'foo'; @@ -2716,17 +2988,17 @@ api.Message buildMessage() { void checkMessage(api.Message o) { buildCounterMessage++; if (buildCounterMessage < 3) { - checkUnnamed21(o.accessoryWidgets!); + checkUnnamed26(o.accessoryWidgets!); checkActionResponse(o.actionResponse!); - checkUnnamed22(o.annotations!); + checkUnnamed27(o.annotations!); unittest.expect( o.argumentText!, unittest.equals('foo'), ); - checkUnnamed23(o.attachedGifs!); - checkUnnamed24(o.attachment!); - checkUnnamed25(o.cards!); - checkUnnamed26(o.cardsV2!); + checkUnnamed28(o.attachedGifs!); + checkUnnamed29(o.attachment!); + checkUnnamed30(o.cards!); + checkUnnamed31(o.cardsV2!); unittest.expect( o.clientAssignedMessageId!, unittest.equals('foo'), @@ -2740,7 +3012,7 @@ void checkMessage(api.Message o) { unittest.equals('foo'), ); checkDeletionMetadata(o.deletionMetadata!); - checkUnnamed27(o.emojiReactionSummaries!); + checkUnnamed32(o.emojiReactionSummaries!); unittest.expect( o.fallbackText!, unittest.equals('foo'), @@ -2773,12 +3045,12 @@ void checkMessage(api.Message o) { buildCounterMessage--; } -core.List buildUnnamed28() => [ +core.List buildUnnamed33() => [ buildMessageCreatedEventData(), buildMessageCreatedEventData(), ]; -void checkUnnamed28(core.List o) { +void checkUnnamed33(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMessageCreatedEventData(o[0]); checkMessageCreatedEventData(o[1]); @@ -2789,7 +3061,7 @@ api.MessageBatchCreatedEventData buildMessageBatchCreatedEventData() { final o = api.MessageBatchCreatedEventData(); buildCounterMessageBatchCreatedEventData++; if (buildCounterMessageBatchCreatedEventData < 3) { - o.messages = buildUnnamed28(); + o.messages = buildUnnamed33(); } buildCounterMessageBatchCreatedEventData--; return o; @@ -2798,17 +3070,17 @@ api.MessageBatchCreatedEventData buildMessageBatchCreatedEventData() { void checkMessageBatchCreatedEventData(api.MessageBatchCreatedEventData o) { buildCounterMessageBatchCreatedEventData++; if (buildCounterMessageBatchCreatedEventData < 3) { - checkUnnamed28(o.messages!); + checkUnnamed33(o.messages!); } buildCounterMessageBatchCreatedEventData--; } -core.List buildUnnamed29() => [ +core.List buildUnnamed34() => [ buildMessageDeletedEventData(), buildMessageDeletedEventData(), ]; -void checkUnnamed29(core.List o) { +void checkUnnamed34(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMessageDeletedEventData(o[0]); checkMessageDeletedEventData(o[1]); @@ -2819,7 +3091,7 @@ api.MessageBatchDeletedEventData buildMessageBatchDeletedEventData() { final o = api.MessageBatchDeletedEventData(); buildCounterMessageBatchDeletedEventData++; if (buildCounterMessageBatchDeletedEventData < 3) { - o.messages = buildUnnamed29(); + o.messages = buildUnnamed34(); } buildCounterMessageBatchDeletedEventData--; return o; @@ -2828,17 +3100,17 @@ api.MessageBatchDeletedEventData buildMessageBatchDeletedEventData() { void checkMessageBatchDeletedEventData(api.MessageBatchDeletedEventData o) { buildCounterMessageBatchDeletedEventData++; if (buildCounterMessageBatchDeletedEventData < 3) { - checkUnnamed29(o.messages!); + checkUnnamed34(o.messages!); } buildCounterMessageBatchDeletedEventData--; } -core.List buildUnnamed30() => [ +core.List buildUnnamed35() => [ buildMessageUpdatedEventData(), buildMessageUpdatedEventData(), ]; -void checkUnnamed30(core.List o) { +void checkUnnamed35(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMessageUpdatedEventData(o[0]); checkMessageUpdatedEventData(o[1]); @@ -2849,7 +3121,7 @@ api.MessageBatchUpdatedEventData buildMessageBatchUpdatedEventData() { final o = api.MessageBatchUpdatedEventData(); buildCounterMessageBatchUpdatedEventData++; if (buildCounterMessageBatchUpdatedEventData < 3) { - o.messages = buildUnnamed30(); + o.messages = buildUnnamed35(); } buildCounterMessageBatchUpdatedEventData--; return o; @@ -2858,7 +3130,7 @@ api.MessageBatchUpdatedEventData buildMessageBatchUpdatedEventData() { void checkMessageBatchUpdatedEventData(api.MessageBatchUpdatedEventData o) { buildCounterMessageBatchUpdatedEventData++; if (buildCounterMessageBatchUpdatedEventData < 3) { - checkUnnamed30(o.messages!); + checkUnnamed35(o.messages!); } buildCounterMessageBatchUpdatedEventData--; } @@ -3070,12 +3342,12 @@ void checkReaction(api.Reaction o) { buildCounterReaction--; } -core.List buildUnnamed31() => [ +core.List buildUnnamed36() => [ buildReactionCreatedEventData(), buildReactionCreatedEventData(), ]; -void checkUnnamed31(core.List o) { +void checkUnnamed36(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkReactionCreatedEventData(o[0]); checkReactionCreatedEventData(o[1]); @@ -3086,7 +3358,7 @@ api.ReactionBatchCreatedEventData buildReactionBatchCreatedEventData() { final o = api.ReactionBatchCreatedEventData(); buildCounterReactionBatchCreatedEventData++; if (buildCounterReactionBatchCreatedEventData < 3) { - o.reactions = buildUnnamed31(); + o.reactions = buildUnnamed36(); } buildCounterReactionBatchCreatedEventData--; return o; @@ -3095,17 +3367,17 @@ api.ReactionBatchCreatedEventData buildReactionBatchCreatedEventData() { void checkReactionBatchCreatedEventData(api.ReactionBatchCreatedEventData o) { buildCounterReactionBatchCreatedEventData++; if (buildCounterReactionBatchCreatedEventData < 3) { - checkUnnamed31(o.reactions!); + checkUnnamed36(o.reactions!); } buildCounterReactionBatchCreatedEventData--; } -core.List buildUnnamed32() => [ +core.List buildUnnamed37() => [ buildReactionDeletedEventData(), buildReactionDeletedEventData(), ]; -void checkUnnamed32(core.List o) { +void checkUnnamed37(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkReactionDeletedEventData(o[0]); checkReactionDeletedEventData(o[1]); @@ -3116,7 +3388,7 @@ api.ReactionBatchDeletedEventData buildReactionBatchDeletedEventData() { final o = api.ReactionBatchDeletedEventData(); buildCounterReactionBatchDeletedEventData++; if (buildCounterReactionBatchDeletedEventData < 3) { - o.reactions = buildUnnamed32(); + o.reactions = buildUnnamed37(); } buildCounterReactionBatchDeletedEventData--; return o; @@ -3125,7 +3397,7 @@ api.ReactionBatchDeletedEventData buildReactionBatchDeletedEventData() { void checkReactionBatchDeletedEventData(api.ReactionBatchDeletedEventData o) { buildCounterReactionBatchDeletedEventData++; if (buildCounterReactionBatchDeletedEventData < 3) { - checkUnnamed32(o.reactions!); + checkUnnamed37(o.reactions!); } buildCounterReactionBatchDeletedEventData--; } @@ -3199,12 +3471,12 @@ void checkRichLinkMetadata(api.RichLinkMetadata o) { buildCounterRichLinkMetadata--; } -core.List buildUnnamed33() => [ +core.List buildUnnamed38() => [ buildSpace(), buildSpace(), ]; -void checkUnnamed33(core.List o) { +void checkUnnamed38(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSpace(o[0]); checkSpace(o[1]); @@ -3216,7 +3488,7 @@ api.SearchSpacesResponse buildSearchSpacesResponse() { buildCounterSearchSpacesResponse++; if (buildCounterSearchSpacesResponse < 3) { o.nextPageToken = 'foo'; - o.spaces = buildUnnamed33(); + o.spaces = buildUnnamed38(); o.totalSize = 42; } buildCounterSearchSpacesResponse--; @@ -3230,7 +3502,7 @@ void checkSearchSpacesResponse(api.SearchSpacesResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed33(o.spaces!); + checkUnnamed38(o.spaces!); unittest.expect( o.totalSize!, unittest.equals(42), @@ -3239,12 +3511,12 @@ void checkSearchSpacesResponse(api.SearchSpacesResponse o) { buildCounterSearchSpacesResponse--; } -core.List buildUnnamed34() => [ +core.List buildUnnamed39() => [ buildWidgetMarkup(), buildWidgetMarkup(), ]; -void checkUnnamed34(core.List o) { +void checkUnnamed39(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkWidgetMarkup(o[0]); checkWidgetMarkup(o[1]); @@ -3256,7 +3528,7 @@ api.Section buildSection() { buildCounterSection++; if (buildCounterSection < 3) { o.header = 'foo'; - o.widgets = buildUnnamed34(); + o.widgets = buildUnnamed39(); } buildCounterSection--; return o; @@ -3269,17 +3541,17 @@ void checkSection(api.Section o) { o.header!, unittest.equals('foo'), ); - checkUnnamed34(o.widgets!); + checkUnnamed39(o.widgets!); } buildCounterSection--; } -core.List buildUnnamed35() => [ +core.List buildUnnamed40() => [ buildGoogleAppsCardV1SelectionItem(), buildGoogleAppsCardV1SelectionItem(), ]; -void checkUnnamed35(core.List o) { +void checkUnnamed40(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleAppsCardV1SelectionItem(o[0]); checkGoogleAppsCardV1SelectionItem(o[1]); @@ -3290,7 +3562,7 @@ api.SelectionItems buildSelectionItems() { final o = api.SelectionItems(); buildCounterSelectionItems++; if (buildCounterSelectionItems < 3) { - o.items = buildUnnamed35(); + o.items = buildUnnamed40(); } buildCounterSelectionItems--; return o; @@ -3299,17 +3571,17 @@ api.SelectionItems buildSelectionItems() { void checkSelectionItems(api.SelectionItems o) { buildCounterSelectionItems++; if (buildCounterSelectionItems < 3) { - checkUnnamed35(o.items!); + checkUnnamed40(o.items!); } buildCounterSelectionItems--; } -core.List buildUnnamed36() => [ +core.List buildUnnamed41() => [ buildMembership(), buildMembership(), ]; -void checkUnnamed36(core.List o) { +void checkUnnamed41(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMembership(o[0]); checkMembership(o[1]); @@ -3320,7 +3592,7 @@ api.SetUpSpaceRequest buildSetUpSpaceRequest() { final o = api.SetUpSpaceRequest(); buildCounterSetUpSpaceRequest++; if (buildCounterSetUpSpaceRequest < 3) { - o.memberships = buildUnnamed36(); + o.memberships = buildUnnamed41(); o.requestId = 'foo'; o.space = buildSpace(); } @@ -3331,7 +3603,7 @@ api.SetUpSpaceRequest buildSetUpSpaceRequest() { void checkSetUpSpaceRequest(api.SetUpSpaceRequest o) { buildCounterSetUpSpaceRequest++; if (buildCounterSetUpSpaceRequest < 3) { - checkUnnamed36(o.memberships!); + checkUnnamed41(o.memberships!); unittest.expect( o.requestId!, unittest.equals('foo'), @@ -3410,6 +3682,7 @@ api.Space buildSpace() { o.displayName = 'foo'; o.externalUserAllowed = true; o.importMode = true; + o.importModeExpireTime = 'foo'; o.lastActiveTime = 'foo'; o.membershipCount = buildMembershipCount(); o.name = 'foo'; @@ -3443,6 +3716,10 @@ void checkSpace(api.Space o) { ); unittest.expect(o.externalUserAllowed!, unittest.isTrue); unittest.expect(o.importMode!, unittest.isTrue); + unittest.expect( + o.importModeExpireTime!, + unittest.equals('foo'), + ); unittest.expect( o.lastActiveTime!, unittest.equals('foo'), @@ -3484,12 +3761,12 @@ void checkSpace(api.Space o) { buildCounterSpace--; } -core.List buildUnnamed37() => [ +core.List buildUnnamed42() => [ buildSpaceUpdatedEventData(), buildSpaceUpdatedEventData(), ]; -void checkUnnamed37(core.List o) { +void checkUnnamed42(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSpaceUpdatedEventData(o[0]); checkSpaceUpdatedEventData(o[1]); @@ -3500,7 +3777,7 @@ api.SpaceBatchUpdatedEventData buildSpaceBatchUpdatedEventData() { final o = api.SpaceBatchUpdatedEventData(); buildCounterSpaceBatchUpdatedEventData++; if (buildCounterSpaceBatchUpdatedEventData < 3) { - o.spaces = buildUnnamed37(); + o.spaces = buildUnnamed42(); } buildCounterSpaceBatchUpdatedEventData--; return o; @@ -3509,7 +3786,7 @@ api.SpaceBatchUpdatedEventData buildSpaceBatchUpdatedEventData() { void checkSpaceBatchUpdatedEventData(api.SpaceBatchUpdatedEventData o) { buildCounterSpaceBatchUpdatedEventData++; if (buildCounterSpaceBatchUpdatedEventData < 3) { - checkUnnamed37(o.spaces!); + checkUnnamed42(o.spaces!); } buildCounterSpaceBatchUpdatedEventData--; } @@ -3902,12 +4179,12 @@ void checkUserMentionMetadata(api.UserMentionMetadata o) { buildCounterUserMentionMetadata--; } -core.List buildUnnamed38() => [ +core.List buildUnnamed43() => [ buildButton(), buildButton(), ]; -void checkUnnamed38(core.List o) { +void checkUnnamed43(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkButton(o[0]); checkButton(o[1]); @@ -3918,7 +4195,7 @@ api.WidgetMarkup buildWidgetMarkup() { final o = api.WidgetMarkup(); buildCounterWidgetMarkup++; if (buildCounterWidgetMarkup < 3) { - o.buttons = buildUnnamed38(); + o.buttons = buildUnnamed43(); o.image = buildImage(); o.keyValue = buildKeyValue(); o.textParagraph = buildTextParagraph(); @@ -3930,7 +4207,7 @@ api.WidgetMarkup buildWidgetMarkup() { void checkWidgetMarkup(api.WidgetMarkup o) { buildCounterWidgetMarkup++; if (buildCounterWidgetMarkup < 3) { - checkUnnamed38(o.buttons!); + checkUnnamed43(o.buttons!); checkImage(o.image!); checkKeyValue(o.keyValue!); checkTextParagraph(o.textParagraph!); @@ -4319,6 +4596,56 @@ void main() { }); }); + unittest.group('obj-schema-GoogleAppsCardV1Carousel', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleAppsCardV1Carousel(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleAppsCardV1Carousel.fromJson( + oJson as core.Map); + checkGoogleAppsCardV1Carousel(od); + }); + }); + + unittest.group('obj-schema-GoogleAppsCardV1CarouselCard', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleAppsCardV1CarouselCard(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleAppsCardV1CarouselCard.fromJson( + oJson as core.Map); + checkGoogleAppsCardV1CarouselCard(od); + }); + }); + + unittest.group('obj-schema-GoogleAppsCardV1Chip', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleAppsCardV1Chip(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleAppsCardV1Chip.fromJson( + oJson as core.Map); + checkGoogleAppsCardV1Chip(od); + }); + }); + + unittest.group('obj-schema-GoogleAppsCardV1ChipList', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleAppsCardV1ChipList(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleAppsCardV1ChipList.fromJson( + oJson as core.Map); + checkGoogleAppsCardV1ChipList(od); + }); + }); + + unittest.group('obj-schema-GoogleAppsCardV1CollapseControl', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleAppsCardV1CollapseControl(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleAppsCardV1CollapseControl.fromJson( + oJson as core.Map); + checkGoogleAppsCardV1CollapseControl(od); + }); + }); + unittest.group('obj-schema-GoogleAppsCardV1Column', () { unittest.test('to-json--from-json', () async { final o = buildGoogleAppsCardV1Column(); @@ -4439,6 +4766,16 @@ void main() { }); }); + unittest.group('obj-schema-GoogleAppsCardV1NestedWidget', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleAppsCardV1NestedWidget(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleAppsCardV1NestedWidget.fromJson( + oJson as core.Map); + checkGoogleAppsCardV1NestedWidget(od); + }); + }); + unittest.group('obj-schema-GoogleAppsCardV1OnClick', () { unittest.test('to-json--from-json', () async { final o = buildGoogleAppsCardV1OnClick(); @@ -4459,6 +4796,26 @@ void main() { }); }); + unittest.group('obj-schema-GoogleAppsCardV1OverflowMenu', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleAppsCardV1OverflowMenu(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleAppsCardV1OverflowMenu.fromJson( + oJson as core.Map); + checkGoogleAppsCardV1OverflowMenu(od); + }); + }); + + unittest.group('obj-schema-GoogleAppsCardV1OverflowMenuItem', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleAppsCardV1OverflowMenuItem(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleAppsCardV1OverflowMenuItem.fromJson( + oJson as core.Map); + checkGoogleAppsCardV1OverflowMenuItem(od); + }); + }); + unittest.group('obj-schema-GoogleAppsCardV1PlatformDataSource', () { unittest.test('to-json--from-json', () async { final o = buildGoogleAppsCardV1PlatformDataSource(); diff --git a/generated/googleapis/test/chromemanagement/v1_test.dart b/generated/googleapis/test/chromemanagement/v1_test.dart index 23b9b2dfc..d4c1a9867 100644 --- a/generated/googleapis/test/chromemanagement/v1_test.dart +++ b/generated/googleapis/test/chromemanagement/v1_test.dart @@ -2093,6 +2093,7 @@ api.GoogleChromeManagementV1InstalledApp o.homepageUri = 'foo'; o.osUserCount = 'foo'; o.permissions = buildUnnamed24(); + o.riskAssessment = buildGoogleChromeManagementV1RiskAssessmentData(); } buildCounterGoogleChromeManagementV1InstalledApp--; return o; @@ -2140,6 +2141,7 @@ void checkGoogleChromeManagementV1InstalledApp( unittest.equals('foo'), ); checkUnnamed24(o.permissions!); + checkGoogleChromeManagementV1RiskAssessmentData(o.riskAssessment!); } buildCounterGoogleChromeManagementV1InstalledApp--; } @@ -2843,6 +2845,109 @@ void checkGoogleChromeManagementV1PrinterReport( buildCounterGoogleChromeManagementV1PrinterReport--; } +core.int buildCounterGoogleChromeManagementV1RiskAssessment = 0; +api.GoogleChromeManagementV1RiskAssessment + buildGoogleChromeManagementV1RiskAssessment() { + final o = api.GoogleChromeManagementV1RiskAssessment(); + buildCounterGoogleChromeManagementV1RiskAssessment++; + if (buildCounterGoogleChromeManagementV1RiskAssessment < 3) { + o.assessment = 'foo'; + o.detailsUrl = 'foo'; + o.version = 'foo'; + } + buildCounterGoogleChromeManagementV1RiskAssessment--; + return o; +} + +void checkGoogleChromeManagementV1RiskAssessment( + api.GoogleChromeManagementV1RiskAssessment o) { + buildCounterGoogleChromeManagementV1RiskAssessment++; + if (buildCounterGoogleChromeManagementV1RiskAssessment < 3) { + unittest.expect( + o.assessment!, + unittest.equals('foo'), + ); + unittest.expect( + o.detailsUrl!, + unittest.equals('foo'), + ); + unittest.expect( + o.version!, + unittest.equals('foo'), + ); + } + buildCounterGoogleChromeManagementV1RiskAssessment--; +} + +core.List buildUnnamed31() => [ + buildGoogleChromeManagementV1RiskAssessmentEntry(), + buildGoogleChromeManagementV1RiskAssessmentEntry(), + ]; + +void checkUnnamed31( + core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleChromeManagementV1RiskAssessmentEntry(o[0]); + checkGoogleChromeManagementV1RiskAssessmentEntry(o[1]); +} + +core.int buildCounterGoogleChromeManagementV1RiskAssessmentData = 0; +api.GoogleChromeManagementV1RiskAssessmentData + buildGoogleChromeManagementV1RiskAssessmentData() { + final o = api.GoogleChromeManagementV1RiskAssessmentData(); + buildCounterGoogleChromeManagementV1RiskAssessmentData++; + if (buildCounterGoogleChromeManagementV1RiskAssessmentData < 3) { + o.entries = buildUnnamed31(); + o.overallRiskLevel = 'foo'; + } + buildCounterGoogleChromeManagementV1RiskAssessmentData--; + return o; +} + +void checkGoogleChromeManagementV1RiskAssessmentData( + api.GoogleChromeManagementV1RiskAssessmentData o) { + buildCounterGoogleChromeManagementV1RiskAssessmentData++; + if (buildCounterGoogleChromeManagementV1RiskAssessmentData < 3) { + checkUnnamed31(o.entries!); + unittest.expect( + o.overallRiskLevel!, + unittest.equals('foo'), + ); + } + buildCounterGoogleChromeManagementV1RiskAssessmentData--; +} + +core.int buildCounterGoogleChromeManagementV1RiskAssessmentEntry = 0; +api.GoogleChromeManagementV1RiskAssessmentEntry + buildGoogleChromeManagementV1RiskAssessmentEntry() { + final o = api.GoogleChromeManagementV1RiskAssessmentEntry(); + buildCounterGoogleChromeManagementV1RiskAssessmentEntry++; + if (buildCounterGoogleChromeManagementV1RiskAssessmentEntry < 3) { + o.provider = 'foo'; + o.riskAssessment = buildGoogleChromeManagementV1RiskAssessment(); + o.riskLevel = 'foo'; + } + buildCounterGoogleChromeManagementV1RiskAssessmentEntry--; + return o; +} + +void checkGoogleChromeManagementV1RiskAssessmentEntry( + api.GoogleChromeManagementV1RiskAssessmentEntry o) { + buildCounterGoogleChromeManagementV1RiskAssessmentEntry++; + if (buildCounterGoogleChromeManagementV1RiskAssessmentEntry < 3) { + unittest.expect( + o.provider!, + unittest.equals('foo'), + ); + checkGoogleChromeManagementV1RiskAssessment(o.riskAssessment!); + unittest.expect( + o.riskLevel!, + unittest.equals('foo'), + ); + } + buildCounterGoogleChromeManagementV1RiskAssessmentEntry--; +} + core.int buildCounterGoogleChromeManagementV1RuntimeCountersReport = 0; api.GoogleChromeManagementV1RuntimeCountersReport buildGoogleChromeManagementV1RuntimeCountersReport() { @@ -2887,13 +2992,13 @@ void checkGoogleChromeManagementV1RuntimeCountersReport( buildCounterGoogleChromeManagementV1RuntimeCountersReport--; } -core.List buildUnnamed31() => +core.List buildUnnamed32() => [ buildGoogleChromeManagementV1StorageInfoDiskVolume(), buildGoogleChromeManagementV1StorageInfoDiskVolume(), ]; -void checkUnnamed31( +void checkUnnamed32( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1StorageInfoDiskVolume(o[0]); @@ -2908,7 +3013,7 @@ api.GoogleChromeManagementV1StorageInfo if (buildCounterGoogleChromeManagementV1StorageInfo < 3) { o.availableDiskBytes = 'foo'; o.totalDiskBytes = 'foo'; - o.volume = buildUnnamed31(); + o.volume = buildUnnamed32(); } buildCounterGoogleChromeManagementV1StorageInfo--; return o; @@ -2926,7 +3031,7 @@ void checkGoogleChromeManagementV1StorageInfo( o.totalDiskBytes!, unittest.equals('foo'), ); - checkUnnamed31(o.volume!); + checkUnnamed32(o.volume!); } buildCounterGoogleChromeManagementV1StorageInfo--; } @@ -2965,12 +3070,12 @@ void checkGoogleChromeManagementV1StorageInfoDiskVolume( buildCounterGoogleChromeManagementV1StorageInfoDiskVolume--; } -core.List buildUnnamed32() => [ +core.List buildUnnamed33() => [ buildGoogleChromeManagementV1DiskInfo(), buildGoogleChromeManagementV1DiskInfo(), ]; -void checkUnnamed32(core.List o) { +void checkUnnamed33(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1DiskInfo(o[0]); checkGoogleChromeManagementV1DiskInfo(o[1]); @@ -2982,7 +3087,7 @@ api.GoogleChromeManagementV1StorageStatusReport final o = api.GoogleChromeManagementV1StorageStatusReport(); buildCounterGoogleChromeManagementV1StorageStatusReport++; if (buildCounterGoogleChromeManagementV1StorageStatusReport < 3) { - o.disk = buildUnnamed32(); + o.disk = buildUnnamed33(); o.reportTime = 'foo'; } buildCounterGoogleChromeManagementV1StorageStatusReport--; @@ -2993,7 +3098,7 @@ void checkGoogleChromeManagementV1StorageStatusReport( api.GoogleChromeManagementV1StorageStatusReport o) { buildCounterGoogleChromeManagementV1StorageStatusReport++; if (buildCounterGoogleChromeManagementV1StorageStatusReport < 3) { - checkUnnamed32(o.disk!); + checkUnnamed33(o.disk!); unittest.expect( o.reportTime!, unittest.equals('foo'), @@ -3134,132 +3239,132 @@ void checkGoogleChromeManagementV1TelemetryAudioSevereUnderrunEvent( buildCounterGoogleChromeManagementV1TelemetryAudioSevereUnderrunEvent--; } -core.List buildUnnamed33() => [ +core.List buildUnnamed34() => [ buildGoogleChromeManagementV1AppReport(), buildGoogleChromeManagementV1AppReport(), ]; -void checkUnnamed33(core.List o) { +void checkUnnamed34(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1AppReport(o[0]); checkGoogleChromeManagementV1AppReport(o[1]); } -core.List buildUnnamed34() => [ +core.List buildUnnamed35() => [ buildGoogleChromeManagementV1AudioStatusReport(), buildGoogleChromeManagementV1AudioStatusReport(), ]; -void checkUnnamed34( +void checkUnnamed35( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1AudioStatusReport(o[0]); checkGoogleChromeManagementV1AudioStatusReport(o[1]); } -core.List buildUnnamed35() => [ +core.List buildUnnamed36() => [ buildGoogleChromeManagementV1BatteryInfo(), buildGoogleChromeManagementV1BatteryInfo(), ]; -void checkUnnamed35(core.List o) { +void checkUnnamed36(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1BatteryInfo(o[0]); checkGoogleChromeManagementV1BatteryInfo(o[1]); } -core.List buildUnnamed36() => [ +core.List buildUnnamed37() => [ buildGoogleChromeManagementV1BatteryStatusReport(), buildGoogleChromeManagementV1BatteryStatusReport(), ]; -void checkUnnamed36( +void checkUnnamed37( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1BatteryStatusReport(o[0]); checkGoogleChromeManagementV1BatteryStatusReport(o[1]); } -core.List buildUnnamed37() => +core.List buildUnnamed38() => [ buildGoogleChromeManagementV1BootPerformanceReport(), buildGoogleChromeManagementV1BootPerformanceReport(), ]; -void checkUnnamed37( +void checkUnnamed38( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1BootPerformanceReport(o[0]); checkGoogleChromeManagementV1BootPerformanceReport(o[1]); } -core.List buildUnnamed38() => [ +core.List buildUnnamed39() => [ buildGoogleChromeManagementV1CpuInfo(), buildGoogleChromeManagementV1CpuInfo(), ]; -void checkUnnamed38(core.List o) { +void checkUnnamed39(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1CpuInfo(o[0]); checkGoogleChromeManagementV1CpuInfo(o[1]); } -core.List buildUnnamed39() => [ +core.List buildUnnamed40() => [ buildGoogleChromeManagementV1CpuStatusReport(), buildGoogleChromeManagementV1CpuStatusReport(), ]; -void checkUnnamed39(core.List o) { +void checkUnnamed40(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1CpuStatusReport(o[0]); checkGoogleChromeManagementV1CpuStatusReport(o[1]); } -core.List buildUnnamed40() => +core.List buildUnnamed41() => [ buildGoogleChromeManagementV1GraphicsStatusReport(), buildGoogleChromeManagementV1GraphicsStatusReport(), ]; -void checkUnnamed40( +void checkUnnamed41( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1GraphicsStatusReport(o[0]); checkGoogleChromeManagementV1GraphicsStatusReport(o[1]); } -core.List buildUnnamed41() => +core.List buildUnnamed42() => [ buildGoogleChromeManagementV1HeartbeatStatusReport(), buildGoogleChromeManagementV1HeartbeatStatusReport(), ]; -void checkUnnamed41( +void checkUnnamed42( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1HeartbeatStatusReport(o[0]); checkGoogleChromeManagementV1HeartbeatStatusReport(o[1]); } -core.List buildUnnamed42() => +core.List buildUnnamed43() => [ buildGoogleChromeManagementV1KioskAppStatusReport(), buildGoogleChromeManagementV1KioskAppStatusReport(), ]; -void checkUnnamed42( +void checkUnnamed43( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1KioskAppStatusReport(o[0]); checkGoogleChromeManagementV1KioskAppStatusReport(o[1]); } -core.List buildUnnamed43() => [ +core.List buildUnnamed44() => [ buildGoogleChromeManagementV1MemoryStatusReport(), buildGoogleChromeManagementV1MemoryStatusReport(), ]; -void checkUnnamed43( +void checkUnnamed44( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1MemoryStatusReport(o[0]); @@ -3267,12 +3372,12 @@ void checkUnnamed43( } core.List - buildUnnamed44() => [ + buildUnnamed45() => [ buildGoogleChromeManagementV1NetworkBandwidthReport(), buildGoogleChromeManagementV1NetworkBandwidthReport(), ]; -void checkUnnamed44( +void checkUnnamed45( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1NetworkBandwidthReport(o[0]); @@ -3280,84 +3385,84 @@ void checkUnnamed44( } core.List - buildUnnamed45() => [ + buildUnnamed46() => [ buildGoogleChromeManagementV1NetworkDiagnosticsReport(), buildGoogleChromeManagementV1NetworkDiagnosticsReport(), ]; -void checkUnnamed45( +void checkUnnamed46( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1NetworkDiagnosticsReport(o[0]); checkGoogleChromeManagementV1NetworkDiagnosticsReport(o[1]); } -core.List buildUnnamed46() => [ +core.List buildUnnamed47() => [ buildGoogleChromeManagementV1NetworkStatusReport(), buildGoogleChromeManagementV1NetworkStatusReport(), ]; -void checkUnnamed46( +void checkUnnamed47( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1NetworkStatusReport(o[0]); checkGoogleChromeManagementV1NetworkStatusReport(o[1]); } -core.List buildUnnamed47() => [ +core.List buildUnnamed48() => [ buildGoogleChromeManagementV1OsUpdateStatus(), buildGoogleChromeManagementV1OsUpdateStatus(), ]; -void checkUnnamed47(core.List o) { +void checkUnnamed48(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1OsUpdateStatus(o[0]); checkGoogleChromeManagementV1OsUpdateStatus(o[1]); } -core.List buildUnnamed48() => [ +core.List buildUnnamed49() => [ buildGoogleChromeManagementV1PeripheralsReport(), buildGoogleChromeManagementV1PeripheralsReport(), ]; -void checkUnnamed48( +void checkUnnamed49( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1PeripheralsReport(o[0]); checkGoogleChromeManagementV1PeripheralsReport(o[1]); } -core.List buildUnnamed49() => +core.List buildUnnamed50() => [ buildGoogleChromeManagementV1RuntimeCountersReport(), buildGoogleChromeManagementV1RuntimeCountersReport(), ]; -void checkUnnamed49( +void checkUnnamed50( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1RuntimeCountersReport(o[0]); checkGoogleChromeManagementV1RuntimeCountersReport(o[1]); } -core.List buildUnnamed50() => [ +core.List buildUnnamed51() => [ buildGoogleChromeManagementV1StorageStatusReport(), buildGoogleChromeManagementV1StorageStatusReport(), ]; -void checkUnnamed50( +void checkUnnamed51( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1StorageStatusReport(o[0]); checkGoogleChromeManagementV1StorageStatusReport(o[1]); } -core.List buildUnnamed51() => [ +core.List buildUnnamed52() => [ buildGoogleChromeManagementV1ThunderboltInfo(), buildGoogleChromeManagementV1ThunderboltInfo(), ]; -void checkUnnamed51(core.List o) { +void checkUnnamed52(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1ThunderboltInfo(o[0]); checkGoogleChromeManagementV1ThunderboltInfo(o[1]); @@ -3369,34 +3474,34 @@ api.GoogleChromeManagementV1TelemetryDevice final o = api.GoogleChromeManagementV1TelemetryDevice(); buildCounterGoogleChromeManagementV1TelemetryDevice++; if (buildCounterGoogleChromeManagementV1TelemetryDevice < 3) { - o.appReport = buildUnnamed33(); - o.audioStatusReport = buildUnnamed34(); - o.batteryInfo = buildUnnamed35(); - o.batteryStatusReport = buildUnnamed36(); - o.bootPerformanceReport = buildUnnamed37(); - o.cpuInfo = buildUnnamed38(); - o.cpuStatusReport = buildUnnamed39(); + o.appReport = buildUnnamed34(); + o.audioStatusReport = buildUnnamed35(); + o.batteryInfo = buildUnnamed36(); + o.batteryStatusReport = buildUnnamed37(); + o.bootPerformanceReport = buildUnnamed38(); + o.cpuInfo = buildUnnamed39(); + o.cpuStatusReport = buildUnnamed40(); o.customer = 'foo'; o.deviceId = 'foo'; o.graphicsInfo = buildGoogleChromeManagementV1GraphicsInfo(); - o.graphicsStatusReport = buildUnnamed40(); - o.heartbeatStatusReport = buildUnnamed41(); - o.kioskAppStatusReport = buildUnnamed42(); + o.graphicsStatusReport = buildUnnamed41(); + o.heartbeatStatusReport = buildUnnamed42(); + o.kioskAppStatusReport = buildUnnamed43(); o.memoryInfo = buildGoogleChromeManagementV1MemoryInfo(); - o.memoryStatusReport = buildUnnamed43(); + o.memoryStatusReport = buildUnnamed44(); o.name = 'foo'; - o.networkBandwidthReport = buildUnnamed44(); - o.networkDiagnosticsReport = buildUnnamed45(); + o.networkBandwidthReport = buildUnnamed45(); + o.networkDiagnosticsReport = buildUnnamed46(); o.networkInfo = buildGoogleChromeManagementV1NetworkInfo(); - o.networkStatusReport = buildUnnamed46(); + o.networkStatusReport = buildUnnamed47(); o.orgUnitId = 'foo'; - o.osUpdateStatus = buildUnnamed47(); - o.peripheralsReport = buildUnnamed48(); - o.runtimeCountersReport = buildUnnamed49(); + o.osUpdateStatus = buildUnnamed48(); + o.peripheralsReport = buildUnnamed49(); + o.runtimeCountersReport = buildUnnamed50(); o.serialNumber = 'foo'; o.storageInfo = buildGoogleChromeManagementV1StorageInfo(); - o.storageStatusReport = buildUnnamed50(); - o.thunderboltInfo = buildUnnamed51(); + o.storageStatusReport = buildUnnamed51(); + o.thunderboltInfo = buildUnnamed52(); } buildCounterGoogleChromeManagementV1TelemetryDevice--; return o; @@ -3406,13 +3511,13 @@ void checkGoogleChromeManagementV1TelemetryDevice( api.GoogleChromeManagementV1TelemetryDevice o) { buildCounterGoogleChromeManagementV1TelemetryDevice++; if (buildCounterGoogleChromeManagementV1TelemetryDevice < 3) { - checkUnnamed33(o.appReport!); - checkUnnamed34(o.audioStatusReport!); - checkUnnamed35(o.batteryInfo!); - checkUnnamed36(o.batteryStatusReport!); - checkUnnamed37(o.bootPerformanceReport!); - checkUnnamed38(o.cpuInfo!); - checkUnnamed39(o.cpuStatusReport!); + checkUnnamed34(o.appReport!); + checkUnnamed35(o.audioStatusReport!); + checkUnnamed36(o.batteryInfo!); + checkUnnamed37(o.batteryStatusReport!); + checkUnnamed38(o.bootPerformanceReport!); + checkUnnamed39(o.cpuInfo!); + checkUnnamed40(o.cpuStatusReport!); unittest.expect( o.customer!, unittest.equals('foo'), @@ -3422,33 +3527,33 @@ void checkGoogleChromeManagementV1TelemetryDevice( unittest.equals('foo'), ); checkGoogleChromeManagementV1GraphicsInfo(o.graphicsInfo!); - checkUnnamed40(o.graphicsStatusReport!); - checkUnnamed41(o.heartbeatStatusReport!); - checkUnnamed42(o.kioskAppStatusReport!); + checkUnnamed41(o.graphicsStatusReport!); + checkUnnamed42(o.heartbeatStatusReport!); + checkUnnamed43(o.kioskAppStatusReport!); checkGoogleChromeManagementV1MemoryInfo(o.memoryInfo!); - checkUnnamed43(o.memoryStatusReport!); + checkUnnamed44(o.memoryStatusReport!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed44(o.networkBandwidthReport!); - checkUnnamed45(o.networkDiagnosticsReport!); + checkUnnamed45(o.networkBandwidthReport!); + checkUnnamed46(o.networkDiagnosticsReport!); checkGoogleChromeManagementV1NetworkInfo(o.networkInfo!); - checkUnnamed46(o.networkStatusReport!); + checkUnnamed47(o.networkStatusReport!); unittest.expect( o.orgUnitId!, unittest.equals('foo'), ); - checkUnnamed47(o.osUpdateStatus!); - checkUnnamed48(o.peripheralsReport!); - checkUnnamed49(o.runtimeCountersReport!); + checkUnnamed48(o.osUpdateStatus!); + checkUnnamed49(o.peripheralsReport!); + checkUnnamed50(o.runtimeCountersReport!); unittest.expect( o.serialNumber!, unittest.equals('foo'), ); checkGoogleChromeManagementV1StorageInfo(o.storageInfo!); - checkUnnamed50(o.storageStatusReport!); - checkUnnamed51(o.thunderboltInfo!); + checkUnnamed51(o.storageStatusReport!); + checkUnnamed52(o.thunderboltInfo!); } buildCounterGoogleChromeManagementV1TelemetryDevice--; } @@ -3552,12 +3657,12 @@ void checkGoogleChromeManagementV1TelemetryEvent( buildCounterGoogleChromeManagementV1TelemetryEvent--; } -core.List buildUnnamed52() => [ +core.List buildUnnamed53() => [ 'foo', 'foo', ]; -void checkUnnamed52(core.List o) { +void checkUnnamed53(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3577,7 +3682,7 @@ api.GoogleChromeManagementV1TelemetryEventNotificationFilter buildCounterGoogleChromeManagementV1TelemetryEventNotificationFilter++; if (buildCounterGoogleChromeManagementV1TelemetryEventNotificationFilter < 3) { - o.eventTypes = buildUnnamed52(); + o.eventTypes = buildUnnamed53(); } buildCounterGoogleChromeManagementV1TelemetryEventNotificationFilter--; return o; @@ -3588,7 +3693,7 @@ void checkGoogleChromeManagementV1TelemetryEventNotificationFilter( buildCounterGoogleChromeManagementV1TelemetryEventNotificationFilter++; if (buildCounterGoogleChromeManagementV1TelemetryEventNotificationFilter < 3) { - checkUnnamed52(o.eventTypes!); + checkUnnamed53(o.eventTypes!); } buildCounterGoogleChromeManagementV1TelemetryEventNotificationFilter--; } @@ -3769,12 +3874,12 @@ void checkGoogleChromeManagementV1TelemetryNotificationFilter( buildCounterGoogleChromeManagementV1TelemetryNotificationFilter--; } -core.List buildUnnamed53() => [ +core.List buildUnnamed54() => [ buildGoogleChromeManagementV1UsbPeripheralReport(), buildGoogleChromeManagementV1UsbPeripheralReport(), ]; -void checkUnnamed53( +void checkUnnamed54( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1UsbPeripheralReport(o[0]); @@ -3787,7 +3892,7 @@ api.GoogleChromeManagementV1TelemetryUsbPeripheralsEvent final o = api.GoogleChromeManagementV1TelemetryUsbPeripheralsEvent(); buildCounterGoogleChromeManagementV1TelemetryUsbPeripheralsEvent++; if (buildCounterGoogleChromeManagementV1TelemetryUsbPeripheralsEvent < 3) { - o.usbPeripheralReport = buildUnnamed53(); + o.usbPeripheralReport = buildUnnamed54(); } buildCounterGoogleChromeManagementV1TelemetryUsbPeripheralsEvent--; return o; @@ -3797,17 +3902,17 @@ void checkGoogleChromeManagementV1TelemetryUsbPeripheralsEvent( api.GoogleChromeManagementV1TelemetryUsbPeripheralsEvent o) { buildCounterGoogleChromeManagementV1TelemetryUsbPeripheralsEvent++; if (buildCounterGoogleChromeManagementV1TelemetryUsbPeripheralsEvent < 3) { - checkUnnamed53(o.usbPeripheralReport!); + checkUnnamed54(o.usbPeripheralReport!); } buildCounterGoogleChromeManagementV1TelemetryUsbPeripheralsEvent--; } -core.List buildUnnamed54() => [ +core.List buildUnnamed55() => [ buildGoogleChromeManagementV1TelemetryUserDevice(), buildGoogleChromeManagementV1TelemetryUserDevice(), ]; -void checkUnnamed54( +void checkUnnamed55( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1TelemetryUserDevice(o[0]); @@ -3823,7 +3928,7 @@ api.GoogleChromeManagementV1TelemetryUser o.customer = 'foo'; o.name = 'foo'; o.orgUnitId = 'foo'; - o.userDevice = buildUnnamed54(); + o.userDevice = buildUnnamed55(); o.userEmail = 'foo'; o.userId = 'foo'; } @@ -3847,7 +3952,7 @@ void checkGoogleChromeManagementV1TelemetryUser( o.orgUnitId!, unittest.equals('foo'), ); - checkUnnamed54(o.userDevice!); + checkUnnamed55(o.userDevice!); unittest.expect( o.userEmail!, unittest.equals('foo'), @@ -3860,36 +3965,36 @@ void checkGoogleChromeManagementV1TelemetryUser( buildCounterGoogleChromeManagementV1TelemetryUser--; } -core.List buildUnnamed55() => [ +core.List buildUnnamed56() => [ buildGoogleChromeManagementV1AppReport(), buildGoogleChromeManagementV1AppReport(), ]; -void checkUnnamed55(core.List o) { +void checkUnnamed56(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1AppReport(o[0]); checkGoogleChromeManagementV1AppReport(o[1]); } -core.List buildUnnamed56() => [ +core.List buildUnnamed57() => [ buildGoogleChromeManagementV1AudioStatusReport(), buildGoogleChromeManagementV1AudioStatusReport(), ]; -void checkUnnamed56( +void checkUnnamed57( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1AudioStatusReport(o[0]); checkGoogleChromeManagementV1AudioStatusReport(o[1]); } -core.List buildUnnamed57() => +core.List buildUnnamed58() => [ buildGoogleChromeManagementV1DeviceActivityReport(), buildGoogleChromeManagementV1DeviceActivityReport(), ]; -void checkUnnamed57( +void checkUnnamed58( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1DeviceActivityReport(o[0]); @@ -3897,24 +4002,24 @@ void checkUnnamed57( } core.List - buildUnnamed58() => [ + buildUnnamed59() => [ buildGoogleChromeManagementV1NetworkBandwidthReport(), buildGoogleChromeManagementV1NetworkBandwidthReport(), ]; -void checkUnnamed58( +void checkUnnamed59( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1NetworkBandwidthReport(o[0]); checkGoogleChromeManagementV1NetworkBandwidthReport(o[1]); } -core.List buildUnnamed59() => [ +core.List buildUnnamed60() => [ buildGoogleChromeManagementV1PeripheralsReport(), buildGoogleChromeManagementV1PeripheralsReport(), ]; -void checkUnnamed59( +void checkUnnamed60( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1PeripheralsReport(o[0]); @@ -3927,12 +4032,12 @@ api.GoogleChromeManagementV1TelemetryUserDevice final o = api.GoogleChromeManagementV1TelemetryUserDevice(); buildCounterGoogleChromeManagementV1TelemetryUserDevice++; if (buildCounterGoogleChromeManagementV1TelemetryUserDevice < 3) { - o.appReport = buildUnnamed55(); - o.audioStatusReport = buildUnnamed56(); - o.deviceActivityReport = buildUnnamed57(); + o.appReport = buildUnnamed56(); + o.audioStatusReport = buildUnnamed57(); + o.deviceActivityReport = buildUnnamed58(); o.deviceId = 'foo'; - o.networkBandwidthReport = buildUnnamed58(); - o.peripheralsReport = buildUnnamed59(); + o.networkBandwidthReport = buildUnnamed59(); + o.peripheralsReport = buildUnnamed60(); } buildCounterGoogleChromeManagementV1TelemetryUserDevice--; return o; @@ -3942,15 +4047,15 @@ void checkGoogleChromeManagementV1TelemetryUserDevice( api.GoogleChromeManagementV1TelemetryUserDevice o) { buildCounterGoogleChromeManagementV1TelemetryUserDevice++; if (buildCounterGoogleChromeManagementV1TelemetryUserDevice < 3) { - checkUnnamed55(o.appReport!); - checkUnnamed56(o.audioStatusReport!); - checkUnnamed57(o.deviceActivityReport!); + checkUnnamed56(o.appReport!); + checkUnnamed57(o.audioStatusReport!); + checkUnnamed58(o.deviceActivityReport!); unittest.expect( o.deviceId!, unittest.equals('foo'), ); - checkUnnamed58(o.networkBandwidthReport!); - checkUnnamed59(o.peripheralsReport!); + checkUnnamed59(o.networkBandwidthReport!); + checkUnnamed60(o.peripheralsReport!); } buildCounterGoogleChromeManagementV1TelemetryUserDevice--; } @@ -4078,12 +4183,12 @@ void checkGoogleChromeManagementV1TouchScreenDevice( buildCounterGoogleChromeManagementV1TouchScreenDevice--; } -core.List buildUnnamed60() => [ +core.List buildUnnamed61() => [ buildGoogleChromeManagementV1TouchScreenDevice(), buildGoogleChromeManagementV1TouchScreenDevice(), ]; -void checkUnnamed60( +void checkUnnamed61( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleChromeManagementV1TouchScreenDevice(o[0]); @@ -4096,7 +4201,7 @@ api.GoogleChromeManagementV1TouchScreenInfo final o = api.GoogleChromeManagementV1TouchScreenInfo(); buildCounterGoogleChromeManagementV1TouchScreenInfo++; if (buildCounterGoogleChromeManagementV1TouchScreenInfo < 3) { - o.devices = buildUnnamed60(); + o.devices = buildUnnamed61(); o.touchpadLibrary = 'foo'; } buildCounterGoogleChromeManagementV1TouchScreenInfo--; @@ -4107,7 +4212,7 @@ void checkGoogleChromeManagementV1TouchScreenInfo( api.GoogleChromeManagementV1TouchScreenInfo o) { buildCounterGoogleChromeManagementV1TouchScreenInfo++; if (buildCounterGoogleChromeManagementV1TouchScreenInfo < 3) { - checkUnnamed60(o.devices!); + checkUnnamed61(o.devices!); unittest.expect( o.touchpadLibrary!, unittest.equals('foo'), @@ -4116,12 +4221,12 @@ void checkGoogleChromeManagementV1TouchScreenInfo( buildCounterGoogleChromeManagementV1TouchScreenInfo--; } -core.List buildUnnamed61() => [ +core.List buildUnnamed62() => [ 'foo', 'foo', ]; -void checkUnnamed61(core.List o) { +void checkUnnamed62(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4139,7 +4244,7 @@ api.GoogleChromeManagementV1UsbPeripheralReport final o = api.GoogleChromeManagementV1UsbPeripheralReport(); buildCounterGoogleChromeManagementV1UsbPeripheralReport++; if (buildCounterGoogleChromeManagementV1UsbPeripheralReport < 3) { - o.categories = buildUnnamed61(); + o.categories = buildUnnamed62(); o.classId = 42; o.firmwareVersion = 'foo'; o.name = 'foo'; @@ -4156,7 +4261,7 @@ void checkGoogleChromeManagementV1UsbPeripheralReport( api.GoogleChromeManagementV1UsbPeripheralReport o) { buildCounterGoogleChromeManagementV1UsbPeripheralReport++; if (buildCounterGoogleChromeManagementV1UsbPeripheralReport < 3) { - checkUnnamed61(o.categories!); + checkUnnamed62(o.categories!); unittest.expect( o.classId!, unittest.equals(42), @@ -4262,139 +4367,713 @@ void checkGoogleChromeManagementV1UserRequestingExtensionDetails( buildCounterGoogleChromeManagementV1UserRequestingExtensionDetails--; } -core.int buildCounterGoogleProtobufEmpty = 0; -api.GoogleProtobufEmpty buildGoogleProtobufEmpty() { - final o = api.GoogleProtobufEmpty(); - buildCounterGoogleProtobufEmpty++; - if (buildCounterGoogleProtobufEmpty < 3) {} - buildCounterGoogleProtobufEmpty--; - return o; -} - -void checkGoogleProtobufEmpty(api.GoogleProtobufEmpty o) { - buildCounterGoogleProtobufEmpty++; - if (buildCounterGoogleProtobufEmpty < 3) {} - buildCounterGoogleProtobufEmpty--; -} - -core.Map buildUnnamed62() => { - 'x': { - 'list': [1, 2, 3], - 'bool': true, - 'string': 'foo' - }, - 'y': { - 'list': [1, 2, 3], - 'bool': true, - 'string': 'foo' - }, - }; - -void checkUnnamed62(core.Map o) { - unittest.expect(o, unittest.hasLength(2)); - var casted1 = (o['x']!) as core.Map; - unittest.expect(casted1, unittest.hasLength(3)); - unittest.expect( - casted1['list'], - unittest.equals([1, 2, 3]), - ); - unittest.expect( - casted1['bool'], - unittest.equals(true), - ); - unittest.expect( - casted1['string'], - unittest.equals('foo'), - ); - var casted2 = (o['y']!) as core.Map; - unittest.expect(casted2, unittest.hasLength(3)); - unittest.expect( - casted2['list'], - unittest.equals([1, 2, 3]), - ); - unittest.expect( - casted2['bool'], - unittest.equals(true), - ); - unittest.expect( - casted2['string'], - unittest.equals('foo'), - ); -} - -core.List> buildUnnamed63() => [ - buildUnnamed62(), - buildUnnamed62(), - ]; - -void checkUnnamed63(core.List> o) { - unittest.expect(o, unittest.hasLength(2)); - checkUnnamed62(o[0]); - checkUnnamed62(o[1]); -} - -core.int buildCounterGoogleRpcStatus = 0; -api.GoogleRpcStatus buildGoogleRpcStatus() { - final o = api.GoogleRpcStatus(); - buildCounterGoogleRpcStatus++; - if (buildCounterGoogleRpcStatus < 3) { - o.code = 42; - o.details = buildUnnamed63(); - o.message = 'foo'; +core.int buildCounterGoogleChromeManagementVersionsV1AttestationCredential = 0; +api.GoogleChromeManagementVersionsV1AttestationCredential + buildGoogleChromeManagementVersionsV1AttestationCredential() { + final o = api.GoogleChromeManagementVersionsV1AttestationCredential(); + buildCounterGoogleChromeManagementVersionsV1AttestationCredential++; + if (buildCounterGoogleChromeManagementVersionsV1AttestationCredential < 3) { + o.keyRotationTime = 'foo'; + o.keyTrustLevel = 'foo'; + o.keyType = 'foo'; + o.publicKey = 'foo'; } - buildCounterGoogleRpcStatus--; + buildCounterGoogleChromeManagementVersionsV1AttestationCredential--; return o; } -void checkGoogleRpcStatus(api.GoogleRpcStatus o) { - buildCounterGoogleRpcStatus++; - if (buildCounterGoogleRpcStatus < 3) { +void checkGoogleChromeManagementVersionsV1AttestationCredential( + api.GoogleChromeManagementVersionsV1AttestationCredential o) { + buildCounterGoogleChromeManagementVersionsV1AttestationCredential++; + if (buildCounterGoogleChromeManagementVersionsV1AttestationCredential < 3) { unittest.expect( - o.code!, - unittest.equals(42), + o.keyRotationTime!, + unittest.equals('foo'), ); - checkUnnamed63(o.details!); unittest.expect( - o.message!, + o.keyTrustLevel!, + unittest.equals('foo'), + ); + unittest.expect( + o.keyType!, + unittest.equals('foo'), + ); + unittest.expect( + o.publicKey!, unittest.equals('foo'), ); } - buildCounterGoogleRpcStatus--; + buildCounterGoogleChromeManagementVersionsV1AttestationCredential--; } -core.int buildCounterGoogleTypeDate = 0; -api.GoogleTypeDate buildGoogleTypeDate() { - final o = api.GoogleTypeDate(); - buildCounterGoogleTypeDate++; - if (buildCounterGoogleTypeDate < 3) { - o.day = 42; - o.month = 42; - o.year = 42; +core.int buildCounterGoogleChromeManagementVersionsV1ChromeBrowserProfile = 0; +api.GoogleChromeManagementVersionsV1ChromeBrowserProfile + buildGoogleChromeManagementVersionsV1ChromeBrowserProfile() { + final o = api.GoogleChromeManagementVersionsV1ChromeBrowserProfile(); + buildCounterGoogleChromeManagementVersionsV1ChromeBrowserProfile++; + if (buildCounterGoogleChromeManagementVersionsV1ChromeBrowserProfile < 3) { + o.affiliationState = 'foo'; + o.annotatedLocation = 'foo'; + o.annotatedUser = 'foo'; + o.attestationCredential = + buildGoogleChromeManagementVersionsV1AttestationCredential(); + o.browserChannel = 'foo'; + o.browserVersion = 'foo'; + o.deviceInfo = buildGoogleChromeManagementVersionsV1DeviceInfo(); + o.displayName = 'foo'; + o.etag = 'foo'; + o.extensionCount = 'foo'; + o.firstEnrollmentTime = 'foo'; + o.identityProvider = 'foo'; + o.lastActivityTime = 'foo'; + o.lastPolicyFetchTime = 'foo'; + o.lastPolicySyncTime = 'foo'; + o.lastStatusReportTime = 'foo'; + o.name = 'foo'; + o.osPlatformType = 'foo'; + o.osPlatformVersion = 'foo'; + o.osVersion = 'foo'; + o.policyCount = 'foo'; + o.profileId = 'foo'; + o.profilePermanentId = 'foo'; + o.reportingData = buildGoogleChromeManagementVersionsV1ReportingData(); + o.userEmail = 'foo'; + o.userId = 'foo'; } - buildCounterGoogleTypeDate--; + buildCounterGoogleChromeManagementVersionsV1ChromeBrowserProfile--; return o; } -void checkGoogleTypeDate(api.GoogleTypeDate o) { - buildCounterGoogleTypeDate++; - if (buildCounterGoogleTypeDate < 3) { +void checkGoogleChromeManagementVersionsV1ChromeBrowserProfile( + api.GoogleChromeManagementVersionsV1ChromeBrowserProfile o) { + buildCounterGoogleChromeManagementVersionsV1ChromeBrowserProfile++; + if (buildCounterGoogleChromeManagementVersionsV1ChromeBrowserProfile < 3) { unittest.expect( - o.day!, - unittest.equals(42), + o.affiliationState!, + unittest.equals('foo'), ); unittest.expect( - o.month!, - unittest.equals(42), + o.annotatedLocation!, + unittest.equals('foo'), ); unittest.expect( - o.year!, - unittest.equals(42), + o.annotatedUser!, + unittest.equals('foo'), + ); + checkGoogleChromeManagementVersionsV1AttestationCredential( + o.attestationCredential!); + unittest.expect( + o.browserChannel!, + unittest.equals('foo'), + ); + unittest.expect( + o.browserVersion!, + unittest.equals('foo'), + ); + checkGoogleChromeManagementVersionsV1DeviceInfo(o.deviceInfo!); + unittest.expect( + o.displayName!, + unittest.equals('foo'), + ); + unittest.expect( + o.etag!, + unittest.equals('foo'), + ); + unittest.expect( + o.extensionCount!, + unittest.equals('foo'), + ); + unittest.expect( + o.firstEnrollmentTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.identityProvider!, + unittest.equals('foo'), + ); + unittest.expect( + o.lastActivityTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.lastPolicyFetchTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.lastPolicySyncTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.lastStatusReportTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.osPlatformType!, + unittest.equals('foo'), + ); + unittest.expect( + o.osPlatformVersion!, + unittest.equals('foo'), + ); + unittest.expect( + o.osVersion!, + unittest.equals('foo'), + ); + unittest.expect( + o.policyCount!, + unittest.equals('foo'), + ); + unittest.expect( + o.profileId!, + unittest.equals('foo'), + ); + unittest.expect( + o.profilePermanentId!, + unittest.equals('foo'), + ); + checkGoogleChromeManagementVersionsV1ReportingData(o.reportingData!); + unittest.expect( + o.userEmail!, + unittest.equals('foo'), + ); + unittest.expect( + o.userId!, + unittest.equals('foo'), ); } - buildCounterGoogleTypeDate--; + buildCounterGoogleChromeManagementVersionsV1ChromeBrowserProfile--; } -void main() { +core.int buildCounterGoogleChromeManagementVersionsV1DeviceInfo = 0; +api.GoogleChromeManagementVersionsV1DeviceInfo + buildGoogleChromeManagementVersionsV1DeviceInfo() { + final o = api.GoogleChromeManagementVersionsV1DeviceInfo(); + buildCounterGoogleChromeManagementVersionsV1DeviceInfo++; + if (buildCounterGoogleChromeManagementVersionsV1DeviceInfo < 3) { + o.affiliatedDeviceId = 'foo'; + o.deviceType = 'foo'; + o.hostname = 'foo'; + o.machine = 'foo'; + } + buildCounterGoogleChromeManagementVersionsV1DeviceInfo--; + return o; +} + +void checkGoogleChromeManagementVersionsV1DeviceInfo( + api.GoogleChromeManagementVersionsV1DeviceInfo o) { + buildCounterGoogleChromeManagementVersionsV1DeviceInfo++; + if (buildCounterGoogleChromeManagementVersionsV1DeviceInfo < 3) { + unittest.expect( + o.affiliatedDeviceId!, + unittest.equals('foo'), + ); + unittest.expect( + o.deviceType!, + unittest.equals('foo'), + ); + unittest.expect( + o.hostname!, + unittest.equals('foo'), + ); + unittest.expect( + o.machine!, + unittest.equals('foo'), + ); + } + buildCounterGoogleChromeManagementVersionsV1DeviceInfo--; +} + +core.List + buildUnnamed63() => [ + buildGoogleChromeManagementVersionsV1ChromeBrowserProfile(), + buildGoogleChromeManagementVersionsV1ChromeBrowserProfile(), + ]; + +void checkUnnamed63( + core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleChromeManagementVersionsV1ChromeBrowserProfile(o[0]); + checkGoogleChromeManagementVersionsV1ChromeBrowserProfile(o[1]); +} + +core.int + buildCounterGoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse = + 0; +api.GoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse + buildGoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse() { + final o = + api.GoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse(); + buildCounterGoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse++; + if (buildCounterGoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse < + 3) { + o.chromeBrowserProfiles = buildUnnamed63(); + o.nextPageToken = 'foo'; + o.totalSize = 'foo'; + } + buildCounterGoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse--; + return o; +} + +void checkGoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse( + api.GoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse o) { + buildCounterGoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse++; + if (buildCounterGoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse < + 3) { + checkUnnamed63(o.chromeBrowserProfiles!); + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + unittest.expect( + o.totalSize!, + unittest.equals('foo'), + ); + } + buildCounterGoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse--; +} + +core.List + buildUnnamed64() => [ + buildGoogleChromeManagementVersionsV1ReportingDataExtensionData(), + buildGoogleChromeManagementVersionsV1ReportingDataExtensionData(), + ]; + +void checkUnnamed64( + core.List + o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleChromeManagementVersionsV1ReportingDataExtensionData(o[0]); + checkGoogleChromeManagementVersionsV1ReportingDataExtensionData(o[1]); +} + +core.List + buildUnnamed65() => [ + buildGoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData(), + buildGoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData(), + ]; + +void checkUnnamed65( + core.List< + api + .GoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData> + o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData(o[0]); + checkGoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData(o[1]); +} + +core.List + buildUnnamed66() => [ + buildGoogleChromeManagementVersionsV1ReportingDataPolicyData(), + buildGoogleChromeManagementVersionsV1ReportingDataPolicyData(), + ]; + +void checkUnnamed66( + core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleChromeManagementVersionsV1ReportingDataPolicyData(o[0]); + checkGoogleChromeManagementVersionsV1ReportingDataPolicyData(o[1]); +} + +core.int buildCounterGoogleChromeManagementVersionsV1ReportingData = 0; +api.GoogleChromeManagementVersionsV1ReportingData + buildGoogleChromeManagementVersionsV1ReportingData() { + final o = api.GoogleChromeManagementVersionsV1ReportingData(); + buildCounterGoogleChromeManagementVersionsV1ReportingData++; + if (buildCounterGoogleChromeManagementVersionsV1ReportingData < 3) { + o.browserExecutablePath = 'foo'; + o.extensionData = buildUnnamed64(); + o.extensionPolicyData = buildUnnamed65(); + o.installedBrowserVersion = 'foo'; + o.policyData = buildUnnamed66(); + o.profilePath = 'foo'; + } + buildCounterGoogleChromeManagementVersionsV1ReportingData--; + return o; +} + +void checkGoogleChromeManagementVersionsV1ReportingData( + api.GoogleChromeManagementVersionsV1ReportingData o) { + buildCounterGoogleChromeManagementVersionsV1ReportingData++; + if (buildCounterGoogleChromeManagementVersionsV1ReportingData < 3) { + unittest.expect( + o.browserExecutablePath!, + unittest.equals('foo'), + ); + checkUnnamed64(o.extensionData!); + checkUnnamed65(o.extensionPolicyData!); + unittest.expect( + o.installedBrowserVersion!, + unittest.equals('foo'), + ); + checkUnnamed66(o.policyData!); + unittest.expect( + o.profilePath!, + unittest.equals('foo'), + ); + } + buildCounterGoogleChromeManagementVersionsV1ReportingData--; +} + +core.int + buildCounterGoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData = + 0; +api.GoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData + buildGoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData() { + final o = + api.GoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData(); + buildCounterGoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData++; + if (buildCounterGoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData < + 3) { + o.source = 'foo'; + } + buildCounterGoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData--; + return o; +} + +void checkGoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData( + api.GoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData o) { + buildCounterGoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData++; + if (buildCounterGoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData < + 3) { + unittest.expect( + o.source!, + unittest.equals('foo'), + ); + } + buildCounterGoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData--; +} + +core.List buildUnnamed67() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed67(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int + buildCounterGoogleChromeManagementVersionsV1ReportingDataExtensionData = 0; +api.GoogleChromeManagementVersionsV1ReportingDataExtensionData + buildGoogleChromeManagementVersionsV1ReportingDataExtensionData() { + final o = api.GoogleChromeManagementVersionsV1ReportingDataExtensionData(); + buildCounterGoogleChromeManagementVersionsV1ReportingDataExtensionData++; + if (buildCounterGoogleChromeManagementVersionsV1ReportingDataExtensionData < + 3) { + o.description = 'foo'; + o.extensionId = 'foo'; + o.extensionType = 'foo'; + o.homepageUri = 'foo'; + o.installationType = 'foo'; + o.isDisabled = true; + o.isWebstoreExtension = true; + o.manifestVersion = 42; + o.name = 'foo'; + o.permissions = buildUnnamed67(); + o.version = 'foo'; + } + buildCounterGoogleChromeManagementVersionsV1ReportingDataExtensionData--; + return o; +} + +void checkGoogleChromeManagementVersionsV1ReportingDataExtensionData( + api.GoogleChromeManagementVersionsV1ReportingDataExtensionData o) { + buildCounterGoogleChromeManagementVersionsV1ReportingDataExtensionData++; + if (buildCounterGoogleChromeManagementVersionsV1ReportingDataExtensionData < + 3) { + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + unittest.expect( + o.extensionId!, + unittest.equals('foo'), + ); + unittest.expect( + o.extensionType!, + unittest.equals('foo'), + ); + unittest.expect( + o.homepageUri!, + unittest.equals('foo'), + ); + unittest.expect( + o.installationType!, + unittest.equals('foo'), + ); + unittest.expect(o.isDisabled!, unittest.isTrue); + unittest.expect(o.isWebstoreExtension!, unittest.isTrue); + unittest.expect( + o.manifestVersion!, + unittest.equals(42), + ); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + checkUnnamed67(o.permissions!); + unittest.expect( + o.version!, + unittest.equals('foo'), + ); + } + buildCounterGoogleChromeManagementVersionsV1ReportingDataExtensionData--; +} + +core.List + buildUnnamed68() => [ + buildGoogleChromeManagementVersionsV1ReportingDataPolicyData(), + buildGoogleChromeManagementVersionsV1ReportingDataPolicyData(), + ]; + +void checkUnnamed68( + core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleChromeManagementVersionsV1ReportingDataPolicyData(o[0]); + checkGoogleChromeManagementVersionsV1ReportingDataPolicyData(o[1]); +} + +core.int + buildCounterGoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData = + 0; +api.GoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData + buildGoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData() { + final o = + api.GoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData(); + buildCounterGoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData++; + if (buildCounterGoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData < + 3) { + o.extensionId = 'foo'; + o.extensionName = 'foo'; + o.policyData = buildUnnamed68(); + } + buildCounterGoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData--; + return o; +} + +void checkGoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData( + api.GoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData o) { + buildCounterGoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData++; + if (buildCounterGoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData < + 3) { + unittest.expect( + o.extensionId!, + unittest.equals('foo'), + ); + unittest.expect( + o.extensionName!, + unittest.equals('foo'), + ); + checkUnnamed68(o.policyData!); + } + buildCounterGoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData--; +} + +core.List< + api.GoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData> + buildUnnamed69() => [ + buildGoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData(), + buildGoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData(), + ]; + +void checkUnnamed69( + core.List< + api + .GoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData> + o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData(o[0]); + checkGoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData(o[1]); +} + +core.int buildCounterGoogleChromeManagementVersionsV1ReportingDataPolicyData = + 0; +api.GoogleChromeManagementVersionsV1ReportingDataPolicyData + buildGoogleChromeManagementVersionsV1ReportingDataPolicyData() { + final o = api.GoogleChromeManagementVersionsV1ReportingDataPolicyData(); + buildCounterGoogleChromeManagementVersionsV1ReportingDataPolicyData++; + if (buildCounterGoogleChromeManagementVersionsV1ReportingDataPolicyData < 3) { + o.conflicts = buildUnnamed69(); + o.error = 'foo'; + o.name = 'foo'; + o.source = 'foo'; + o.value = 'foo'; + } + buildCounterGoogleChromeManagementVersionsV1ReportingDataPolicyData--; + return o; +} + +void checkGoogleChromeManagementVersionsV1ReportingDataPolicyData( + api.GoogleChromeManagementVersionsV1ReportingDataPolicyData o) { + buildCounterGoogleChromeManagementVersionsV1ReportingDataPolicyData++; + if (buildCounterGoogleChromeManagementVersionsV1ReportingDataPolicyData < 3) { + checkUnnamed69(o.conflicts!); + unittest.expect( + o.error!, + unittest.equals('foo'), + ); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.source!, + unittest.equals('foo'), + ); + unittest.expect( + o.value!, + unittest.equals('foo'), + ); + } + buildCounterGoogleChromeManagementVersionsV1ReportingDataPolicyData--; +} + +core.int buildCounterGoogleProtobufEmpty = 0; +api.GoogleProtobufEmpty buildGoogleProtobufEmpty() { + final o = api.GoogleProtobufEmpty(); + buildCounterGoogleProtobufEmpty++; + if (buildCounterGoogleProtobufEmpty < 3) {} + buildCounterGoogleProtobufEmpty--; + return o; +} + +void checkGoogleProtobufEmpty(api.GoogleProtobufEmpty o) { + buildCounterGoogleProtobufEmpty++; + if (buildCounterGoogleProtobufEmpty < 3) {} + buildCounterGoogleProtobufEmpty--; +} + +core.Map buildUnnamed70() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; + +void checkUnnamed70(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted1 = (o['x']!) as core.Map; + unittest.expect(casted1, unittest.hasLength(3)); + unittest.expect( + casted1['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted1['bool'], + unittest.equals(true), + ); + unittest.expect( + casted1['string'], + unittest.equals('foo'), + ); + var casted2 = (o['y']!) as core.Map; + unittest.expect(casted2, unittest.hasLength(3)); + unittest.expect( + casted2['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted2['bool'], + unittest.equals(true), + ); + unittest.expect( + casted2['string'], + unittest.equals('foo'), + ); +} + +core.List> buildUnnamed71() => [ + buildUnnamed70(), + buildUnnamed70(), + ]; + +void checkUnnamed71(core.List> o) { + unittest.expect(o, unittest.hasLength(2)); + checkUnnamed70(o[0]); + checkUnnamed70(o[1]); +} + +core.int buildCounterGoogleRpcStatus = 0; +api.GoogleRpcStatus buildGoogleRpcStatus() { + final o = api.GoogleRpcStatus(); + buildCounterGoogleRpcStatus++; + if (buildCounterGoogleRpcStatus < 3) { + o.code = 42; + o.details = buildUnnamed71(); + o.message = 'foo'; + } + buildCounterGoogleRpcStatus--; + return o; +} + +void checkGoogleRpcStatus(api.GoogleRpcStatus o) { + buildCounterGoogleRpcStatus++; + if (buildCounterGoogleRpcStatus < 3) { + unittest.expect( + o.code!, + unittest.equals(42), + ); + checkUnnamed71(o.details!); + unittest.expect( + o.message!, + unittest.equals('foo'), + ); + } + buildCounterGoogleRpcStatus--; +} + +core.int buildCounterGoogleTypeDate = 0; +api.GoogleTypeDate buildGoogleTypeDate() { + final o = api.GoogleTypeDate(); + buildCounterGoogleTypeDate++; + if (buildCounterGoogleTypeDate < 3) { + o.day = 42; + o.month = 42; + o.year = 42; + } + buildCounterGoogleTypeDate--; + return o; +} + +void checkGoogleTypeDate(api.GoogleTypeDate o) { + buildCounterGoogleTypeDate++; + if (buildCounterGoogleTypeDate < 3) { + unittest.expect( + o.day!, + unittest.equals(42), + ); + unittest.expect( + o.month!, + unittest.equals(42), + ); + unittest.expect( + o.year!, + unittest.equals(42), + ); + } + buildCounterGoogleTypeDate--; +} + +void main() { unittest.group('obj-schema-GoogleChromeManagementV1AndroidAppInfo', () { unittest.test('to-json--from-json', () async { final o = buildGoogleChromeManagementV1AndroidAppInfo(); @@ -5092,6 +5771,36 @@ void main() { }); }); + unittest.group('obj-schema-GoogleChromeManagementV1RiskAssessment', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleChromeManagementV1RiskAssessment(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleChromeManagementV1RiskAssessment.fromJson( + oJson as core.Map); + checkGoogleChromeManagementV1RiskAssessment(od); + }); + }); + + unittest.group('obj-schema-GoogleChromeManagementV1RiskAssessmentData', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleChromeManagementV1RiskAssessmentData(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleChromeManagementV1RiskAssessmentData.fromJson( + oJson as core.Map); + checkGoogleChromeManagementV1RiskAssessmentData(od); + }); + }); + + unittest.group('obj-schema-GoogleChromeManagementV1RiskAssessmentEntry', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleChromeManagementV1RiskAssessmentEntry(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleChromeManagementV1RiskAssessmentEntry.fromJson( + oJson as core.Map); + checkGoogleChromeManagementV1RiskAssessmentEntry(od); + }); + }); + unittest.group('obj-schema-GoogleChromeManagementV1RuntimeCountersReport', () { unittest.test('to-json--from-json', () async { @@ -5404,6 +6113,120 @@ void main() { }); }); + unittest.group( + 'obj-schema-GoogleChromeManagementVersionsV1AttestationCredential', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleChromeManagementVersionsV1AttestationCredential(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleChromeManagementVersionsV1AttestationCredential.fromJson( + oJson as core.Map); + checkGoogleChromeManagementVersionsV1AttestationCredential(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleChromeManagementVersionsV1ChromeBrowserProfile', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleChromeManagementVersionsV1ChromeBrowserProfile(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleChromeManagementVersionsV1ChromeBrowserProfile.fromJson( + oJson as core.Map); + checkGoogleChromeManagementVersionsV1ChromeBrowserProfile(od); + }); + }); + + unittest.group('obj-schema-GoogleChromeManagementVersionsV1DeviceInfo', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleChromeManagementVersionsV1DeviceInfo(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleChromeManagementVersionsV1DeviceInfo.fromJson( + oJson as core.Map); + checkGoogleChromeManagementVersionsV1DeviceInfo(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse + .fromJson(oJson as core.Map); + checkGoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse( + od); + }); + }); + + unittest.group('obj-schema-GoogleChromeManagementVersionsV1ReportingData', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleChromeManagementVersionsV1ReportingData(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleChromeManagementVersionsV1ReportingData.fromJson( + oJson as core.Map); + checkGoogleChromeManagementVersionsV1ReportingData(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData + .fromJson(oJson as core.Map); + checkGoogleChromeManagementVersionsV1ReportingDataConflictingPolicyData( + od); + }); + }); + + unittest.group( + 'obj-schema-GoogleChromeManagementVersionsV1ReportingDataExtensionData', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleChromeManagementVersionsV1ReportingDataExtensionData(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleChromeManagementVersionsV1ReportingDataExtensionData + .fromJson(oJson as core.Map); + checkGoogleChromeManagementVersionsV1ReportingDataExtensionData(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData + .fromJson(oJson as core.Map); + checkGoogleChromeManagementVersionsV1ReportingDataExtensionPolicyData(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleChromeManagementVersionsV1ReportingDataPolicyData', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleChromeManagementVersionsV1ReportingDataPolicyData(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleChromeManagementVersionsV1ReportingDataPolicyData.fromJson( + oJson as core.Map); + checkGoogleChromeManagementVersionsV1ReportingDataPolicyData(od); + }); + }); + unittest.group('obj-schema-GoogleProtobufEmpty', () { unittest.test('to-json--from-json', () async { final o = buildGoogleProtobufEmpty(); @@ -5843,6 +6666,194 @@ void main() { }); }); + unittest.group('resource-CustomersProfilesResource', () { + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = api.ChromeManagementApi(mock).customers.profiles; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleProtobufEmpty()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.ChromeManagementApi(mock).customers.profiles; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode( + buildGoogleChromeManagementVersionsV1ChromeBrowserProfile()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleChromeManagementVersionsV1ChromeBrowserProfile( + response as api.GoogleChromeManagementVersionsV1ChromeBrowserProfile); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.ChromeManagementApi(mock).customers.profiles; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode( + buildGoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_parent, + filter: arg_filter, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkGoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse( + response as api + .GoogleChromeManagementVersionsV1ListChromeBrowserProfilesResponse); + }); + }); + unittest.group('resource-CustomersReportsResource', () { unittest.test('method--countChromeBrowsersNeedingAttention', () async { final mock = HttpServerMock(); diff --git a/generated/googleapis/test/classroom/v1_test.dart b/generated/googleapis/test/classroom/v1_test.dart index 93a39e084..5dabcbb7f 100644 --- a/generated/googleapis/test/classroom/v1_test.dart +++ b/generated/googleapis/test/classroom/v1_test.dart @@ -825,6 +825,51 @@ void checkCourseWorkMaterial(api.CourseWorkMaterial o) { buildCounterCourseWorkMaterial--; } +core.List buildUnnamed7() => [ + buildLevel(), + buildLevel(), + ]; + +void checkUnnamed7(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkLevel(o[0]); + checkLevel(o[1]); +} + +core.int buildCounterCriterion = 0; +api.Criterion buildCriterion() { + final o = api.Criterion(); + buildCounterCriterion++; + if (buildCounterCriterion < 3) { + o.description = 'foo'; + o.id = 'foo'; + o.levels = buildUnnamed7(); + o.title = 'foo'; + } + buildCounterCriterion--; + return o; +} + +void checkCriterion(api.Criterion o) { + buildCounterCriterion++; + if (buildCounterCriterion < 3) { + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + unittest.expect( + o.id!, + unittest.equals('foo'), + ); + checkUnnamed7(o.levels!); + unittest.expect( + o.title!, + unittest.equals('foo'), + ); + } + buildCounterCriterion--; +} + core.int buildCounterDate = 0; api.Date buildDate() { final o = api.Date(); @@ -1127,12 +1172,12 @@ void checkGradeHistory(api.GradeHistory o) { buildCounterGradeHistory--; } -core.List buildUnnamed7() => [ +core.List buildUnnamed8() => [ buildGradeCategory(), buildGradeCategory(), ]; -void checkUnnamed7(core.List o) { +void checkUnnamed8(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGradeCategory(o[0]); checkGradeCategory(o[1]); @@ -1145,7 +1190,7 @@ api.GradebookSettings buildGradebookSettings() { if (buildCounterGradebookSettings < 3) { o.calculationType = 'foo'; o.displaySetting = 'foo'; - o.gradeCategories = buildUnnamed7(); + o.gradeCategories = buildUnnamed8(); } buildCounterGradebookSettings--; return o; @@ -1162,7 +1207,7 @@ void checkGradebookSettings(api.GradebookSettings o) { o.displaySetting!, unittest.equals('foo'), ); - checkUnnamed7(o.gradeCategories!); + checkUnnamed8(o.gradeCategories!); } buildCounterGradebookSettings--; } @@ -1243,12 +1288,12 @@ void checkGuardianInvitation(api.GuardianInvitation o) { buildCounterGuardianInvitation--; } -core.List buildUnnamed8() => [ +core.List buildUnnamed9() => [ 'foo', 'foo', ]; -void checkUnnamed8(core.List o) { +void checkUnnamed9(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1265,7 +1310,7 @@ api.IndividualStudentsOptions buildIndividualStudentsOptions() { final o = api.IndividualStudentsOptions(); buildCounterIndividualStudentsOptions++; if (buildCounterIndividualStudentsOptions < 3) { - o.studentIds = buildUnnamed8(); + o.studentIds = buildUnnamed9(); } buildCounterIndividualStudentsOptions--; return o; @@ -1274,7 +1319,7 @@ api.IndividualStudentsOptions buildIndividualStudentsOptions() { void checkIndividualStudentsOptions(api.IndividualStudentsOptions o) { buildCounterIndividualStudentsOptions++; if (buildCounterIndividualStudentsOptions < 3) { - checkUnnamed8(o.studentIds!); + checkUnnamed9(o.studentIds!); } buildCounterIndividualStudentsOptions--; } @@ -1316,6 +1361,43 @@ void checkInvitation(api.Invitation o) { buildCounterInvitation--; } +core.int buildCounterLevel = 0; +api.Level buildLevel() { + final o = api.Level(); + buildCounterLevel++; + if (buildCounterLevel < 3) { + o.description = 'foo'; + o.id = 'foo'; + o.points = 42.0; + o.title = 'foo'; + } + buildCounterLevel--; + return o; +} + +void checkLevel(api.Level o) { + buildCounterLevel++; + if (buildCounterLevel < 3) { + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + unittest.expect( + o.id!, + unittest.equals('foo'), + ); + unittest.expect( + o.points!, + unittest.equals(42.0), + ); + unittest.expect( + o.title!, + unittest.equals('foo'), + ); + } + buildCounterLevel--; +} + core.int buildCounterLink = 0; api.Link buildLink() { final o = api.Link(); @@ -1348,12 +1430,12 @@ void checkLink(api.Link o) { buildCounterLink--; } -core.List buildUnnamed9() => [ +core.List buildUnnamed10() => [ buildAddOnAttachment(), buildAddOnAttachment(), ]; -void checkUnnamed9(core.List o) { +void checkUnnamed10(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAddOnAttachment(o[0]); checkAddOnAttachment(o[1]); @@ -1364,7 +1446,7 @@ api.ListAddOnAttachmentsResponse buildListAddOnAttachmentsResponse() { final o = api.ListAddOnAttachmentsResponse(); buildCounterListAddOnAttachmentsResponse++; if (buildCounterListAddOnAttachmentsResponse < 3) { - o.addOnAttachments = buildUnnamed9(); + o.addOnAttachments = buildUnnamed10(); o.nextPageToken = 'foo'; } buildCounterListAddOnAttachmentsResponse--; @@ -1374,7 +1456,7 @@ api.ListAddOnAttachmentsResponse buildListAddOnAttachmentsResponse() { void checkListAddOnAttachmentsResponse(api.ListAddOnAttachmentsResponse o) { buildCounterListAddOnAttachmentsResponse++; if (buildCounterListAddOnAttachmentsResponse < 3) { - checkUnnamed9(o.addOnAttachments!); + checkUnnamed10(o.addOnAttachments!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -1383,12 +1465,12 @@ void checkListAddOnAttachmentsResponse(api.ListAddOnAttachmentsResponse o) { buildCounterListAddOnAttachmentsResponse--; } -core.List buildUnnamed10() => [ +core.List buildUnnamed11() => [ buildAnnouncement(), buildAnnouncement(), ]; -void checkUnnamed10(core.List o) { +void checkUnnamed11(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAnnouncement(o[0]); checkAnnouncement(o[1]); @@ -1399,7 +1481,7 @@ api.ListAnnouncementsResponse buildListAnnouncementsResponse() { final o = api.ListAnnouncementsResponse(); buildCounterListAnnouncementsResponse++; if (buildCounterListAnnouncementsResponse < 3) { - o.announcements = buildUnnamed10(); + o.announcements = buildUnnamed11(); o.nextPageToken = 'foo'; } buildCounterListAnnouncementsResponse--; @@ -1409,7 +1491,7 @@ api.ListAnnouncementsResponse buildListAnnouncementsResponse() { void checkListAnnouncementsResponse(api.ListAnnouncementsResponse o) { buildCounterListAnnouncementsResponse++; if (buildCounterListAnnouncementsResponse < 3) { - checkUnnamed10(o.announcements!); + checkUnnamed11(o.announcements!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -1418,12 +1500,12 @@ void checkListAnnouncementsResponse(api.ListAnnouncementsResponse o) { buildCounterListAnnouncementsResponse--; } -core.List buildUnnamed11() => [ +core.List buildUnnamed12() => [ buildCourseAlias(), buildCourseAlias(), ]; -void checkUnnamed11(core.List o) { +void checkUnnamed12(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkCourseAlias(o[0]); checkCourseAlias(o[1]); @@ -1434,7 +1516,7 @@ api.ListCourseAliasesResponse buildListCourseAliasesResponse() { final o = api.ListCourseAliasesResponse(); buildCounterListCourseAliasesResponse++; if (buildCounterListCourseAliasesResponse < 3) { - o.aliases = buildUnnamed11(); + o.aliases = buildUnnamed12(); o.nextPageToken = 'foo'; } buildCounterListCourseAliasesResponse--; @@ -1444,7 +1526,7 @@ api.ListCourseAliasesResponse buildListCourseAliasesResponse() { void checkListCourseAliasesResponse(api.ListCourseAliasesResponse o) { buildCounterListCourseAliasesResponse++; if (buildCounterListCourseAliasesResponse < 3) { - checkUnnamed11(o.aliases!); + checkUnnamed12(o.aliases!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -1453,12 +1535,12 @@ void checkListCourseAliasesResponse(api.ListCourseAliasesResponse o) { buildCounterListCourseAliasesResponse--; } -core.List buildUnnamed12() => [ +core.List buildUnnamed13() => [ buildCourseWorkMaterial(), buildCourseWorkMaterial(), ]; -void checkUnnamed12(core.List o) { +void checkUnnamed13(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkCourseWorkMaterial(o[0]); checkCourseWorkMaterial(o[1]); @@ -1469,7 +1551,7 @@ api.ListCourseWorkMaterialResponse buildListCourseWorkMaterialResponse() { final o = api.ListCourseWorkMaterialResponse(); buildCounterListCourseWorkMaterialResponse++; if (buildCounterListCourseWorkMaterialResponse < 3) { - o.courseWorkMaterial = buildUnnamed12(); + o.courseWorkMaterial = buildUnnamed13(); o.nextPageToken = 'foo'; } buildCounterListCourseWorkMaterialResponse--; @@ -1479,7 +1561,7 @@ api.ListCourseWorkMaterialResponse buildListCourseWorkMaterialResponse() { void checkListCourseWorkMaterialResponse(api.ListCourseWorkMaterialResponse o) { buildCounterListCourseWorkMaterialResponse++; if (buildCounterListCourseWorkMaterialResponse < 3) { - checkUnnamed12(o.courseWorkMaterial!); + checkUnnamed13(o.courseWorkMaterial!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -1488,12 +1570,12 @@ void checkListCourseWorkMaterialResponse(api.ListCourseWorkMaterialResponse o) { buildCounterListCourseWorkMaterialResponse--; } -core.List buildUnnamed13() => [ +core.List buildUnnamed14() => [ buildCourseWork(), buildCourseWork(), ]; -void checkUnnamed13(core.List o) { +void checkUnnamed14(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkCourseWork(o[0]); checkCourseWork(o[1]); @@ -1504,7 +1586,7 @@ api.ListCourseWorkResponse buildListCourseWorkResponse() { final o = api.ListCourseWorkResponse(); buildCounterListCourseWorkResponse++; if (buildCounterListCourseWorkResponse < 3) { - o.courseWork = buildUnnamed13(); + o.courseWork = buildUnnamed14(); o.nextPageToken = 'foo'; } buildCounterListCourseWorkResponse--; @@ -1514,7 +1596,7 @@ api.ListCourseWorkResponse buildListCourseWorkResponse() { void checkListCourseWorkResponse(api.ListCourseWorkResponse o) { buildCounterListCourseWorkResponse++; if (buildCounterListCourseWorkResponse < 3) { - checkUnnamed13(o.courseWork!); + checkUnnamed14(o.courseWork!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -1523,12 +1605,12 @@ void checkListCourseWorkResponse(api.ListCourseWorkResponse o) { buildCounterListCourseWorkResponse--; } -core.List buildUnnamed14() => [ +core.List buildUnnamed15() => [ buildCourse(), buildCourse(), ]; -void checkUnnamed14(core.List o) { +void checkUnnamed15(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkCourse(o[0]); checkCourse(o[1]); @@ -1539,7 +1621,7 @@ api.ListCoursesResponse buildListCoursesResponse() { final o = api.ListCoursesResponse(); buildCounterListCoursesResponse++; if (buildCounterListCoursesResponse < 3) { - o.courses = buildUnnamed14(); + o.courses = buildUnnamed15(); o.nextPageToken = 'foo'; } buildCounterListCoursesResponse--; @@ -1549,7 +1631,7 @@ api.ListCoursesResponse buildListCoursesResponse() { void checkListCoursesResponse(api.ListCoursesResponse o) { buildCounterListCoursesResponse++; if (buildCounterListCoursesResponse < 3) { - checkUnnamed14(o.courses!); + checkUnnamed15(o.courses!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -1558,12 +1640,12 @@ void checkListCoursesResponse(api.ListCoursesResponse o) { buildCounterListCoursesResponse--; } -core.List buildUnnamed15() => [ +core.List buildUnnamed16() => [ buildGuardianInvitation(), buildGuardianInvitation(), ]; -void checkUnnamed15(core.List o) { +void checkUnnamed16(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGuardianInvitation(o[0]); checkGuardianInvitation(o[1]); @@ -1574,7 +1656,7 @@ api.ListGuardianInvitationsResponse buildListGuardianInvitationsResponse() { final o = api.ListGuardianInvitationsResponse(); buildCounterListGuardianInvitationsResponse++; if (buildCounterListGuardianInvitationsResponse < 3) { - o.guardianInvitations = buildUnnamed15(); + o.guardianInvitations = buildUnnamed16(); o.nextPageToken = 'foo'; } buildCounterListGuardianInvitationsResponse--; @@ -1585,7 +1667,7 @@ void checkListGuardianInvitationsResponse( api.ListGuardianInvitationsResponse o) { buildCounterListGuardianInvitationsResponse++; if (buildCounterListGuardianInvitationsResponse < 3) { - checkUnnamed15(o.guardianInvitations!); + checkUnnamed16(o.guardianInvitations!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -1594,12 +1676,12 @@ void checkListGuardianInvitationsResponse( buildCounterListGuardianInvitationsResponse--; } -core.List buildUnnamed16() => [ +core.List buildUnnamed17() => [ buildGuardian(), buildGuardian(), ]; -void checkUnnamed16(core.List o) { +void checkUnnamed17(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGuardian(o[0]); checkGuardian(o[1]); @@ -1610,7 +1692,7 @@ api.ListGuardiansResponse buildListGuardiansResponse() { final o = api.ListGuardiansResponse(); buildCounterListGuardiansResponse++; if (buildCounterListGuardiansResponse < 3) { - o.guardians = buildUnnamed16(); + o.guardians = buildUnnamed17(); o.nextPageToken = 'foo'; } buildCounterListGuardiansResponse--; @@ -1620,7 +1702,7 @@ api.ListGuardiansResponse buildListGuardiansResponse() { void checkListGuardiansResponse(api.ListGuardiansResponse o) { buildCounterListGuardiansResponse++; if (buildCounterListGuardiansResponse < 3) { - checkUnnamed16(o.guardians!); + checkUnnamed17(o.guardians!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -1629,12 +1711,12 @@ void checkListGuardiansResponse(api.ListGuardiansResponse o) { buildCounterListGuardiansResponse--; } -core.List buildUnnamed17() => [ +core.List buildUnnamed18() => [ buildInvitation(), buildInvitation(), ]; -void checkUnnamed17(core.List o) { +void checkUnnamed18(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInvitation(o[0]); checkInvitation(o[1]); @@ -1645,7 +1727,7 @@ api.ListInvitationsResponse buildListInvitationsResponse() { final o = api.ListInvitationsResponse(); buildCounterListInvitationsResponse++; if (buildCounterListInvitationsResponse < 3) { - o.invitations = buildUnnamed17(); + o.invitations = buildUnnamed18(); o.nextPageToken = 'foo'; } buildCounterListInvitationsResponse--; @@ -1655,7 +1737,7 @@ api.ListInvitationsResponse buildListInvitationsResponse() { void checkListInvitationsResponse(api.ListInvitationsResponse o) { buildCounterListInvitationsResponse++; if (buildCounterListInvitationsResponse < 3) { - checkUnnamed17(o.invitations!); + checkUnnamed18(o.invitations!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -1664,12 +1746,47 @@ void checkListInvitationsResponse(api.ListInvitationsResponse o) { buildCounterListInvitationsResponse--; } -core.List buildUnnamed18() => [ +core.List buildUnnamed19() => [ + buildRubric(), + buildRubric(), + ]; + +void checkUnnamed19(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkRubric(o[0]); + checkRubric(o[1]); +} + +core.int buildCounterListRubricsResponse = 0; +api.ListRubricsResponse buildListRubricsResponse() { + final o = api.ListRubricsResponse(); + buildCounterListRubricsResponse++; + if (buildCounterListRubricsResponse < 3) { + o.nextPageToken = 'foo'; + o.rubrics = buildUnnamed19(); + } + buildCounterListRubricsResponse--; + return o; +} + +void checkListRubricsResponse(api.ListRubricsResponse o) { + buildCounterListRubricsResponse++; + if (buildCounterListRubricsResponse < 3) { + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed19(o.rubrics!); + } + buildCounterListRubricsResponse--; +} + +core.List buildUnnamed20() => [ buildStudentSubmission(), buildStudentSubmission(), ]; -void checkUnnamed18(core.List o) { +void checkUnnamed20(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStudentSubmission(o[0]); checkStudentSubmission(o[1]); @@ -1681,7 +1798,7 @@ api.ListStudentSubmissionsResponse buildListStudentSubmissionsResponse() { buildCounterListStudentSubmissionsResponse++; if (buildCounterListStudentSubmissionsResponse < 3) { o.nextPageToken = 'foo'; - o.studentSubmissions = buildUnnamed18(); + o.studentSubmissions = buildUnnamed20(); } buildCounterListStudentSubmissionsResponse--; return o; @@ -1694,17 +1811,17 @@ void checkListStudentSubmissionsResponse(api.ListStudentSubmissionsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed18(o.studentSubmissions!); + checkUnnamed20(o.studentSubmissions!); } buildCounterListStudentSubmissionsResponse--; } -core.List buildUnnamed19() => [ +core.List buildUnnamed21() => [ buildStudent(), buildStudent(), ]; -void checkUnnamed19(core.List o) { +void checkUnnamed21(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStudent(o[0]); checkStudent(o[1]); @@ -1716,7 +1833,7 @@ api.ListStudentsResponse buildListStudentsResponse() { buildCounterListStudentsResponse++; if (buildCounterListStudentsResponse < 3) { o.nextPageToken = 'foo'; - o.students = buildUnnamed19(); + o.students = buildUnnamed21(); } buildCounterListStudentsResponse--; return o; @@ -1729,17 +1846,17 @@ void checkListStudentsResponse(api.ListStudentsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed19(o.students!); + checkUnnamed21(o.students!); } buildCounterListStudentsResponse--; } -core.List buildUnnamed20() => [ +core.List buildUnnamed22() => [ buildTeacher(), buildTeacher(), ]; -void checkUnnamed20(core.List o) { +void checkUnnamed22(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTeacher(o[0]); checkTeacher(o[1]); @@ -1751,7 +1868,7 @@ api.ListTeachersResponse buildListTeachersResponse() { buildCounterListTeachersResponse++; if (buildCounterListTeachersResponse < 3) { o.nextPageToken = 'foo'; - o.teachers = buildUnnamed20(); + o.teachers = buildUnnamed22(); } buildCounterListTeachersResponse--; return o; @@ -1764,17 +1881,17 @@ void checkListTeachersResponse(api.ListTeachersResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed20(o.teachers!); + checkUnnamed22(o.teachers!); } buildCounterListTeachersResponse--; } -core.List buildUnnamed21() => [ +core.List buildUnnamed23() => [ buildTopic(), buildTopic(), ]; -void checkUnnamed21(core.List o) { +void checkUnnamed23(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTopic(o[0]); checkTopic(o[1]); @@ -1786,7 +1903,7 @@ api.ListTopicResponse buildListTopicResponse() { buildCounterListTopicResponse++; if (buildCounterListTopicResponse < 3) { o.nextPageToken = 'foo'; - o.topic = buildUnnamed21(); + o.topic = buildUnnamed23(); } buildCounterListTopicResponse--; return o; @@ -1799,7 +1916,7 @@ void checkListTopicResponse(api.ListTopicResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed21(o.topic!); + checkUnnamed23(o.topic!); } buildCounterListTopicResponse--; } @@ -1855,12 +1972,12 @@ void checkModifyAnnouncementAssigneesRequest( buildCounterModifyAnnouncementAssigneesRequest--; } -core.List buildUnnamed22() => [ +core.List buildUnnamed24() => [ buildAttachment(), buildAttachment(), ]; -void checkUnnamed22(core.List o) { +void checkUnnamed24(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAttachment(o[0]); checkAttachment(o[1]); @@ -1871,7 +1988,7 @@ api.ModifyAttachmentsRequest buildModifyAttachmentsRequest() { final o = api.ModifyAttachmentsRequest(); buildCounterModifyAttachmentsRequest++; if (buildCounterModifyAttachmentsRequest < 3) { - o.addAttachments = buildUnnamed22(); + o.addAttachments = buildUnnamed24(); } buildCounterModifyAttachmentsRequest--; return o; @@ -1880,7 +1997,7 @@ api.ModifyAttachmentsRequest buildModifyAttachmentsRequest() { void checkModifyAttachmentsRequest(api.ModifyAttachmentsRequest o) { buildCounterModifyAttachmentsRequest++; if (buildCounterModifyAttachmentsRequest < 3) { - checkUnnamed22(o.addAttachments!); + checkUnnamed24(o.addAttachments!); } buildCounterModifyAttachmentsRequest--; } @@ -1910,12 +2027,12 @@ void checkModifyCourseWorkAssigneesRequest( buildCounterModifyCourseWorkAssigneesRequest--; } -core.List buildUnnamed23() => [ +core.List buildUnnamed25() => [ 'foo', 'foo', ]; -void checkUnnamed23(core.List o) { +void checkUnnamed25(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1927,12 +2044,12 @@ void checkUnnamed23(core.List o) { ); } -core.List buildUnnamed24() => [ +core.List buildUnnamed26() => [ 'foo', 'foo', ]; -void checkUnnamed24(core.List o) { +void checkUnnamed26(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1949,8 +2066,8 @@ api.ModifyIndividualStudentsOptions buildModifyIndividualStudentsOptions() { final o = api.ModifyIndividualStudentsOptions(); buildCounterModifyIndividualStudentsOptions++; if (buildCounterModifyIndividualStudentsOptions < 3) { - o.addStudentIds = buildUnnamed23(); - o.removeStudentIds = buildUnnamed24(); + o.addStudentIds = buildUnnamed25(); + o.removeStudentIds = buildUnnamed26(); } buildCounterModifyIndividualStudentsOptions--; return o; @@ -1960,18 +2077,18 @@ void checkModifyIndividualStudentsOptions( api.ModifyIndividualStudentsOptions o) { buildCounterModifyIndividualStudentsOptions++; if (buildCounterModifyIndividualStudentsOptions < 3) { - checkUnnamed23(o.addStudentIds!); - checkUnnamed24(o.removeStudentIds!); + checkUnnamed25(o.addStudentIds!); + checkUnnamed26(o.removeStudentIds!); } buildCounterModifyIndividualStudentsOptions--; } -core.List buildUnnamed25() => [ +core.List buildUnnamed27() => [ 'foo', 'foo', ]; -void checkUnnamed25(core.List o) { +void checkUnnamed27(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1988,7 +2105,7 @@ api.MultipleChoiceQuestion buildMultipleChoiceQuestion() { final o = api.MultipleChoiceQuestion(); buildCounterMultipleChoiceQuestion++; if (buildCounterMultipleChoiceQuestion < 3) { - o.choices = buildUnnamed25(); + o.choices = buildUnnamed27(); } buildCounterMultipleChoiceQuestion--; return o; @@ -1997,7 +2114,7 @@ api.MultipleChoiceQuestion buildMultipleChoiceQuestion() { void checkMultipleChoiceQuestion(api.MultipleChoiceQuestion o) { buildCounterMultipleChoiceQuestion++; if (buildCounterMultipleChoiceQuestion < 3) { - checkUnnamed25(o.choices!); + checkUnnamed27(o.choices!); } buildCounterMultipleChoiceQuestion--; } @@ -2118,6 +2235,98 @@ void checkReturnStudentSubmissionRequest(api.ReturnStudentSubmissionRequest o) { buildCounterReturnStudentSubmissionRequest--; } +core.List buildUnnamed28() => [ + buildCriterion(), + buildCriterion(), + ]; + +void checkUnnamed28(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkCriterion(o[0]); + checkCriterion(o[1]); +} + +core.int buildCounterRubric = 0; +api.Rubric buildRubric() { + final o = api.Rubric(); + buildCounterRubric++; + if (buildCounterRubric < 3) { + o.courseId = 'foo'; + o.courseWorkId = 'foo'; + o.creationTime = 'foo'; + o.criteria = buildUnnamed28(); + o.id = 'foo'; + o.sourceSpreadsheetId = 'foo'; + o.updateTime = 'foo'; + } + buildCounterRubric--; + return o; +} + +void checkRubric(api.Rubric o) { + buildCounterRubric++; + if (buildCounterRubric < 3) { + unittest.expect( + o.courseId!, + unittest.equals('foo'), + ); + unittest.expect( + o.courseWorkId!, + unittest.equals('foo'), + ); + unittest.expect( + o.creationTime!, + unittest.equals('foo'), + ); + checkUnnamed28(o.criteria!); + unittest.expect( + o.id!, + unittest.equals('foo'), + ); + unittest.expect( + o.sourceSpreadsheetId!, + unittest.equals('foo'), + ); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), + ); + } + buildCounterRubric--; +} + +core.int buildCounterRubricGrade = 0; +api.RubricGrade buildRubricGrade() { + final o = api.RubricGrade(); + buildCounterRubricGrade++; + if (buildCounterRubricGrade < 3) { + o.criterionId = 'foo'; + o.levelId = 'foo'; + o.points = 42.0; + } + buildCounterRubricGrade--; + return o; +} + +void checkRubricGrade(api.RubricGrade o) { + buildCounterRubricGrade++; + if (buildCounterRubricGrade < 3) { + unittest.expect( + o.criterionId!, + unittest.equals('foo'), + ); + unittest.expect( + o.levelId!, + unittest.equals('foo'), + ); + unittest.expect( + o.points!, + unittest.equals(42.0), + ); + } + buildCounterRubricGrade--; +} + core.int buildCounterSharedDriveFile = 0; api.SharedDriveFile buildSharedDriveFile() { final o = api.SharedDriveFile(); @@ -2249,12 +2458,34 @@ void checkStudentContext(api.StudentContext o) { buildCounterStudentContext--; } -core.List buildUnnamed26() => [ +core.Map buildUnnamed29() => { + 'x': buildRubricGrade(), + 'y': buildRubricGrade(), + }; + +void checkUnnamed29(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + checkRubricGrade(o['x']!); + checkRubricGrade(o['y']!); +} + +core.Map buildUnnamed30() => { + 'x': buildRubricGrade(), + 'y': buildRubricGrade(), + }; + +void checkUnnamed30(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + checkRubricGrade(o['x']!); + checkRubricGrade(o['y']!); +} + +core.List buildUnnamed31() => [ buildSubmissionHistory(), buildSubmissionHistory(), ]; -void checkUnnamed26(core.List o) { +void checkUnnamed31(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSubmissionHistory(o[0]); checkSubmissionHistory(o[1]); @@ -2267,6 +2498,7 @@ api.StudentSubmission buildStudentSubmission() { if (buildCounterStudentSubmission < 3) { o.alternateLink = 'foo'; o.assignedGrade = 42.0; + o.assignedRubricGrades = buildUnnamed29(); o.assignmentSubmission = buildAssignmentSubmission(); o.associatedWithDeveloper = true; o.courseId = 'foo'; @@ -2274,12 +2506,13 @@ api.StudentSubmission buildStudentSubmission() { o.courseWorkType = 'foo'; o.creationTime = 'foo'; o.draftGrade = 42.0; + o.draftRubricGrades = buildUnnamed30(); o.id = 'foo'; o.late = true; o.multipleChoiceSubmission = buildMultipleChoiceSubmission(); o.shortAnswerSubmission = buildShortAnswerSubmission(); o.state = 'foo'; - o.submissionHistory = buildUnnamed26(); + o.submissionHistory = buildUnnamed31(); o.updateTime = 'foo'; o.userId = 'foo'; } @@ -2298,6 +2531,7 @@ void checkStudentSubmission(api.StudentSubmission o) { o.assignedGrade!, unittest.equals(42.0), ); + checkUnnamed29(o.assignedRubricGrades!); checkAssignmentSubmission(o.assignmentSubmission!); unittest.expect(o.associatedWithDeveloper!, unittest.isTrue); unittest.expect( @@ -2320,6 +2554,7 @@ void checkStudentSubmission(api.StudentSubmission o) { o.draftGrade!, unittest.equals(42.0), ); + checkUnnamed30(o.draftRubricGrades!); unittest.expect( o.id!, unittest.equals('foo'), @@ -2331,7 +2566,7 @@ void checkStudentSubmission(api.StudentSubmission o) { o.state!, unittest.equals('foo'), ); - checkUnnamed26(o.submissionHistory!); + checkUnnamed31(o.submissionHistory!); unittest.expect( o.updateTime!, unittest.equals('foo'), @@ -2498,12 +2733,12 @@ void checkTurnInStudentSubmissionRequest(api.TurnInStudentSubmissionRequest o) { buildCounterTurnInStudentSubmissionRequest--; } -core.List buildUnnamed27() => [ +core.List buildUnnamed32() => [ buildGlobalPermission(), buildGlobalPermission(), ]; -void checkUnnamed27(core.List o) { +void checkUnnamed32(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGlobalPermission(o[0]); checkGlobalPermission(o[1]); @@ -2517,7 +2752,7 @@ api.UserProfile buildUserProfile() { o.emailAddress = 'foo'; o.id = 'foo'; o.name = buildName(); - o.permissions = buildUnnamed27(); + o.permissions = buildUnnamed32(); o.photoUrl = 'foo'; o.verifiedTeacher = true; } @@ -2537,7 +2772,7 @@ void checkUserProfile(api.UserProfile o) { unittest.equals('foo'), ); checkName(o.name!); - checkUnnamed27(o.permissions!); + checkUnnamed32(o.permissions!); unittest.expect( o.photoUrl!, unittest.equals('foo'), @@ -2584,12 +2819,12 @@ void checkYouTubeVideo(api.YouTubeVideo o) { buildCounterYouTubeVideo--; } -core.List buildUnnamed28() => [ +core.List buildUnnamed33() => [ 'foo', 'foo', ]; -void checkUnnamed28(core.List o) { +void checkUnnamed33(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2601,12 +2836,12 @@ void checkUnnamed28(core.List o) { ); } -core.List buildUnnamed29() => [ +core.List buildUnnamed34() => [ 'foo', 'foo', ]; -void checkUnnamed29(core.List o) { +void checkUnnamed34(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2618,12 +2853,12 @@ void checkUnnamed29(core.List o) { ); } -core.List buildUnnamed30() => [ +core.List buildUnnamed35() => [ 'foo', 'foo', ]; -void checkUnnamed30(core.List o) { +void checkUnnamed35(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2635,12 +2870,12 @@ void checkUnnamed30(core.List o) { ); } -core.List buildUnnamed31() => [ +core.List buildUnnamed36() => [ 'foo', 'foo', ]; -void checkUnnamed31(core.List o) { +void checkUnnamed36(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2652,12 +2887,12 @@ void checkUnnamed31(core.List o) { ); } -core.List buildUnnamed32() => [ +core.List buildUnnamed37() => [ 'foo', 'foo', ]; -void checkUnnamed32(core.List o) { +void checkUnnamed37(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2669,12 +2904,12 @@ void checkUnnamed32(core.List o) { ); } -core.List buildUnnamed33() => [ +core.List buildUnnamed38() => [ 'foo', 'foo', ]; -void checkUnnamed33(core.List o) { +void checkUnnamed38(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2857,6 +3092,16 @@ void main() { }); }); + unittest.group('obj-schema-Criterion', () { + unittest.test('to-json--from-json', () async { + final o = buildCriterion(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Criterion.fromJson(oJson as core.Map); + checkCriterion(od); + }); + }); + unittest.group('obj-schema-Date', () { unittest.test('to-json--from-json', () async { final o = buildDate(); @@ -3007,6 +3252,16 @@ void main() { }); }); + unittest.group('obj-schema-Level', () { + unittest.test('to-json--from-json', () async { + final o = buildLevel(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Level.fromJson(oJson as core.Map); + checkLevel(od); + }); + }); + unittest.group('obj-schema-Link', () { unittest.test('to-json--from-json', () async { final o = buildLink(); @@ -3107,6 +3362,16 @@ void main() { }); }); + unittest.group('obj-schema-ListRubricsResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListRubricsResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListRubricsResponse.fromJson( + oJson as core.Map); + checkListRubricsResponse(od); + }); + }); + unittest.group('obj-schema-ListStudentSubmissionsResponse', () { unittest.test('to-json--from-json', () async { final o = buildListStudentSubmissionsResponse(); @@ -3257,6 +3522,26 @@ void main() { }); }); + unittest.group('obj-schema-Rubric', () { + unittest.test('to-json--from-json', () async { + final o = buildRubric(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Rubric.fromJson(oJson as core.Map); + checkRubric(od); + }); + }); + + unittest.group('obj-schema-RubricGrade', () { + unittest.test('to-json--from-json', () async { + final o = buildRubricGrade(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.RubricGrade.fromJson( + oJson as core.Map); + checkRubricGrade(od); + }); + }); + unittest.group('obj-schema-SharedDriveFile', () { unittest.test('to-json--from-json', () async { final o = buildSharedDriveFile(); @@ -3570,7 +3855,7 @@ void main() { unittest.test('method--list', () async { final mock = HttpServerMock(); final res = api.ClassroomApi(mock).courses; - final arg_courseStates = buildUnnamed28(); + final arg_courseStates = buildUnnamed33(); final arg_pageSize = 42; final arg_pageToken = 'foo'; final arg_studentId = 'foo'; @@ -4325,7 +4610,7 @@ void main() { final mock = HttpServerMock(); final res = api.ClassroomApi(mock).courses.announcements; final arg_courseId = 'foo'; - final arg_announcementStates = buildUnnamed29(); + final arg_announcementStates = buildUnnamed34(); final arg_orderBy = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; @@ -5392,7 +5677,7 @@ void main() { final mock = HttpServerMock(); final res = api.ClassroomApi(mock).courses.courseWork; final arg_courseId = 'foo'; - final arg_courseWorkStates = buildUnnamed30(); + final arg_courseWorkStates = buildUnnamed35(); final arg_orderBy = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; @@ -5647,6 +5932,103 @@ void main() { updateMask: arg_updateMask, $fields: arg_$fields); checkCourseWork(response as api.CourseWork); }); + + unittest.test('method--updateRubric', () async { + final mock = HttpServerMock(); + final res = api.ClassroomApi(mock).courses.courseWork; + final arg_request = buildRubric(); + final arg_courseId = 'foo'; + final arg_courseWorkId = 'foo'; + final arg_id = 'foo'; + final arg_updateMask = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.Rubric.fromJson(json as core.Map); + checkRubric(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('v1/courses/'), + ); + pathOffset += 11; + index = path.indexOf('/courseWork/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_courseId'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 12), + unittest.equals('/courseWork/'), + ); + pathOffset += 12; + index = path.indexOf('/rubric', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_courseWorkId'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 7), + unittest.equals('/rubric'), + ); + pathOffset += 7; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['id']!.first, + unittest.equals(arg_id), + ); + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildRubric()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.updateRubric( + arg_request, arg_courseId, arg_courseWorkId, + id: arg_id, updateMask: arg_updateMask, $fields: arg_$fields); + checkRubric(response as api.Rubric); + }); }); unittest.group('resource-CoursesCourseWorkAddOnAttachmentsResource', () { @@ -6382,6 +6764,463 @@ void main() { }); }); + unittest.group('resource-CoursesCourseWorkRubricsResource', () { + unittest.test('method--create', () async { + final mock = HttpServerMock(); + final res = api.ClassroomApi(mock).courses.courseWork.rubrics; + final arg_request = buildRubric(); + final arg_courseId = 'foo'; + final arg_courseWorkId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.Rubric.fromJson(json as core.Map); + checkRubric(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('v1/courses/'), + ); + pathOffset += 11; + index = path.indexOf('/courseWork/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_courseId'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 12), + unittest.equals('/courseWork/'), + ); + pathOffset += 12; + index = path.indexOf('/rubrics', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_courseWorkId'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 8), + unittest.equals('/rubrics'), + ); + pathOffset += 8; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildRubric()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.create( + arg_request, arg_courseId, arg_courseWorkId, + $fields: arg_$fields); + checkRubric(response as api.Rubric); + }); + + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = api.ClassroomApi(mock).courses.courseWork.rubrics; + final arg_courseId = 'foo'; + final arg_courseWorkId = 'foo'; + final arg_id = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('v1/courses/'), + ); + pathOffset += 11; + index = path.indexOf('/courseWork/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_courseId'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 12), + unittest.equals('/courseWork/'), + ); + pathOffset += 12; + index = path.indexOf('/rubrics/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_courseWorkId'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('/rubrics/'), + ); + pathOffset += 9; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; + unittest.expect( + subPart, + unittest.equals('$arg_id'), + ); + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildEmpty()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete(arg_courseId, arg_courseWorkId, arg_id, + $fields: arg_$fields); + checkEmpty(response as api.Empty); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.ClassroomApi(mock).courses.courseWork.rubrics; + final arg_courseId = 'foo'; + final arg_courseWorkId = 'foo'; + final arg_id = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('v1/courses/'), + ); + pathOffset += 11; + index = path.indexOf('/courseWork/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_courseId'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 12), + unittest.equals('/courseWork/'), + ); + pathOffset += 12; + index = path.indexOf('/rubrics/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_courseWorkId'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('/rubrics/'), + ); + pathOffset += 9; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; + unittest.expect( + subPart, + unittest.equals('$arg_id'), + ); + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildRubric()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_courseId, arg_courseWorkId, arg_id, + $fields: arg_$fields); + checkRubric(response as api.Rubric); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.ClassroomApi(mock).courses.courseWork.rubrics; + final arg_courseId = 'foo'; + final arg_courseWorkId = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('v1/courses/'), + ); + pathOffset += 11; + index = path.indexOf('/courseWork/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_courseId'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 12), + unittest.equals('/courseWork/'), + ); + pathOffset += 12; + index = path.indexOf('/rubrics', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_courseWorkId'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 8), + unittest.equals('/rubrics'), + ); + pathOffset += 8; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildListRubricsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_courseId, arg_courseWorkId, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListRubricsResponse(response as api.ListRubricsResponse); + }); + + unittest.test('method--patch', () async { + final mock = HttpServerMock(); + final res = api.ClassroomApi(mock).courses.courseWork.rubrics; + final arg_request = buildRubric(); + final arg_courseId = 'foo'; + final arg_courseWorkId = 'foo'; + final arg_id = 'foo'; + final arg_updateMask = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.Rubric.fromJson(json as core.Map); + checkRubric(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('v1/courses/'), + ); + pathOffset += 11; + index = path.indexOf('/courseWork/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_courseId'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 12), + unittest.equals('/courseWork/'), + ); + pathOffset += 12; + index = path.indexOf('/rubrics/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_courseWorkId'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('/rubrics/'), + ); + pathOffset += 9; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; + unittest.expect( + subPart, + unittest.equals('$arg_id'), + ); + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildRubric()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.patch( + arg_request, arg_courseId, arg_courseWorkId, arg_id, + updateMask: arg_updateMask, $fields: arg_$fields); + checkRubric(response as api.Rubric); + }); + }); + unittest.group('resource-CoursesCourseWorkStudentSubmissionsResource', () { unittest.test('method--get', () async { final mock = HttpServerMock(); @@ -6479,7 +7318,7 @@ void main() { final arg_late = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; - final arg_states = buildUnnamed31(); + final arg_states = buildUnnamed36(); final arg_userId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -7412,7 +8251,7 @@ void main() { final mock = HttpServerMock(); final res = api.ClassroomApi(mock).courses.courseWorkMaterials; final arg_courseId = 'foo'; - final arg_courseWorkMaterialStates = buildUnnamed32(); + final arg_courseWorkMaterialStates = buildUnnamed37(); final arg_materialDriveId = 'foo'; final arg_materialLink = 'foo'; final arg_orderBy = 'foo'; @@ -10539,7 +11378,7 @@ void main() { final arg_invitedEmailAddress = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; - final arg_states = buildUnnamed33(); + final arg_states = buildUnnamed38(); final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; diff --git a/generated/googleapis/test/cloudasset/v1_test.dart b/generated/googleapis/test/cloudasset/v1_test.dart index 71153a00f..31e214a84 100644 --- a/generated/googleapis/test/cloudasset/v1_test.dart +++ b/generated/googleapis/test/cloudasset/v1_test.dart @@ -2607,6 +2607,7 @@ api.GoogleIdentityAccesscontextmanagerV1EgressPolicy if (buildCounterGoogleIdentityAccesscontextmanagerV1EgressPolicy < 3) { o.egressFrom = buildGoogleIdentityAccesscontextmanagerV1EgressFrom(); o.egressTo = buildGoogleIdentityAccesscontextmanagerV1EgressTo(); + o.title = 'foo'; } buildCounterGoogleIdentityAccesscontextmanagerV1EgressPolicy--; return o; @@ -2618,6 +2619,10 @@ void checkGoogleIdentityAccesscontextmanagerV1EgressPolicy( if (buildCounterGoogleIdentityAccesscontextmanagerV1EgressPolicy < 3) { checkGoogleIdentityAccesscontextmanagerV1EgressFrom(o.egressFrom!); checkGoogleIdentityAccesscontextmanagerV1EgressTo(o.egressTo!); + unittest.expect( + o.title!, + unittest.equals('foo'), + ); } buildCounterGoogleIdentityAccesscontextmanagerV1EgressPolicy--; } @@ -2784,6 +2789,7 @@ api.GoogleIdentityAccesscontextmanagerV1IngressPolicy if (buildCounterGoogleIdentityAccesscontextmanagerV1IngressPolicy < 3) { o.ingressFrom = buildGoogleIdentityAccesscontextmanagerV1IngressFrom(); o.ingressTo = buildGoogleIdentityAccesscontextmanagerV1IngressTo(); + o.title = 'foo'; } buildCounterGoogleIdentityAccesscontextmanagerV1IngressPolicy--; return o; @@ -2795,6 +2801,10 @@ void checkGoogleIdentityAccesscontextmanagerV1IngressPolicy( if (buildCounterGoogleIdentityAccesscontextmanagerV1IngressPolicy < 3) { checkGoogleIdentityAccesscontextmanagerV1IngressFrom(o.ingressFrom!); checkGoogleIdentityAccesscontextmanagerV1IngressTo(o.ingressTo!); + unittest.expect( + o.title!, + unittest.equals('foo'), + ); } buildCounterGoogleIdentityAccesscontextmanagerV1IngressPolicy--; } @@ -2948,6 +2958,7 @@ api.GoogleIdentityAccesscontextmanagerV1ServicePerimeter buildCounterGoogleIdentityAccesscontextmanagerV1ServicePerimeter++; if (buildCounterGoogleIdentityAccesscontextmanagerV1ServicePerimeter < 3) { o.description = 'foo'; + o.etag = 'foo'; o.name = 'foo'; o.perimeterType = 'foo'; o.spec = buildGoogleIdentityAccesscontextmanagerV1ServicePerimeterConfig(); @@ -2968,6 +2979,10 @@ void checkGoogleIdentityAccesscontextmanagerV1ServicePerimeter( o.description!, unittest.equals('foo'), ); + unittest.expect( + o.etag!, + unittest.equals('foo'), + ); unittest.expect( o.name!, unittest.equals('foo'), diff --git a/generated/googleapis/test/cloudbuild/v1_test.dart b/generated/googleapis/test/cloudbuild/v1_test.dart index 253b632b0..f5e7a8b1a 100644 --- a/generated/googleapis/test/cloudbuild/v1_test.dart +++ b/generated/googleapis/test/cloudbuild/v1_test.dart @@ -2803,6 +2803,7 @@ api.PrivatePoolV1Config buildPrivatePoolV1Config() { buildCounterPrivatePoolV1Config++; if (buildCounterPrivatePoolV1Config < 3) { o.networkConfig = buildNetworkConfig(); + o.privateServiceConnect = buildPrivateServiceConnect(); o.workerConfig = buildWorkerConfig(); } buildCounterPrivatePoolV1Config--; @@ -2813,11 +2814,38 @@ void checkPrivatePoolV1Config(api.PrivatePoolV1Config o) { buildCounterPrivatePoolV1Config++; if (buildCounterPrivatePoolV1Config < 3) { checkNetworkConfig(o.networkConfig!); + checkPrivateServiceConnect(o.privateServiceConnect!); checkWorkerConfig(o.workerConfig!); } buildCounterPrivatePoolV1Config--; } +core.int buildCounterPrivateServiceConnect = 0; +api.PrivateServiceConnect buildPrivateServiceConnect() { + final o = api.PrivateServiceConnect(); + buildCounterPrivateServiceConnect++; + if (buildCounterPrivateServiceConnect < 3) { + o.networkAttachment = 'foo'; + o.publicIpAddressDisabled = true; + o.routeAllTraffic = true; + } + buildCounterPrivateServiceConnect--; + return o; +} + +void checkPrivateServiceConnect(api.PrivateServiceConnect o) { + buildCounterPrivateServiceConnect++; + if (buildCounterPrivateServiceConnect < 3) { + unittest.expect( + o.networkAttachment!, + unittest.equals('foo'), + ); + unittest.expect(o.publicIpAddressDisabled!, unittest.isTrue); + unittest.expect(o.routeAllTraffic!, unittest.isTrue); + } + buildCounterPrivateServiceConnect--; +} + core.int buildCounterPubsubConfig = 0; api.PubsubConfig buildPubsubConfig() { final o = api.PubsubConfig(); @@ -4552,6 +4580,16 @@ void main() { }); }); + unittest.group('obj-schema-PrivateServiceConnect', () { + unittest.test('to-json--from-json', () async { + final o = buildPrivateServiceConnect(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.PrivateServiceConnect.fromJson( + oJson as core.Map); + checkPrivateServiceConnect(od); + }); + }); + unittest.group('obj-schema-PubsubConfig', () { unittest.test('to-json--from-json', () async { final o = buildPubsubConfig(); diff --git a/generated/googleapis/test/cloudchannel/v1_test.dart b/generated/googleapis/test/cloudchannel/v1_test.dart index 2b0c5e66c..672430467 100644 --- a/generated/googleapis/test/cloudchannel/v1_test.dart +++ b/generated/googleapis/test/cloudchannel/v1_test.dart @@ -253,6 +253,7 @@ api.GoogleCloudChannelV1ChangeOfferRequest o.billingAccount = 'foo'; o.offer = 'foo'; o.parameters = buildUnnamed0(); + o.priceReferenceId = 'foo'; o.purchaseOrderId = 'foo'; o.requestId = 'foo'; } @@ -273,6 +274,10 @@ void checkGoogleCloudChannelV1ChangeOfferRequest( unittest.equals('foo'), ); checkUnnamed0(o.parameters!); + unittest.expect( + o.priceReferenceId!, + unittest.equals('foo'), + ); unittest.expect( o.purchaseOrderId!, unittest.equals('foo'), @@ -1069,6 +1074,7 @@ api.GoogleCloudChannelV1Entitlement buildGoogleCloudChannelV1Entitlement() { o.name = 'foo'; o.offer = 'foo'; o.parameters = buildUnnamed6(); + o.priceReferenceId = 'foo'; o.provisionedService = buildGoogleCloudChannelV1ProvisionedService(); o.provisioningState = 'foo'; o.purchaseOrderId = 'foo'; @@ -1103,6 +1109,10 @@ void checkGoogleCloudChannelV1Entitlement( unittest.equals('foo'), ); checkUnnamed6(o.parameters!); + unittest.expect( + o.priceReferenceId!, + unittest.equals('foo'), + ); checkGoogleCloudChannelV1ProvisionedService(o.provisionedService!); unittest.expect( o.provisioningState!, @@ -2625,6 +2635,7 @@ api.GoogleCloudChannelV1PurchasableOffer buildCounterGoogleCloudChannelV1PurchasableOffer++; if (buildCounterGoogleCloudChannelV1PurchasableOffer < 3) { o.offer = buildGoogleCloudChannelV1Offer(); + o.priceReferenceId = 'foo'; } buildCounterGoogleCloudChannelV1PurchasableOffer--; return o; @@ -2635,6 +2646,10 @@ void checkGoogleCloudChannelV1PurchasableOffer( buildCounterGoogleCloudChannelV1PurchasableOffer++; if (buildCounterGoogleCloudChannelV1PurchasableOffer < 3) { checkGoogleCloudChannelV1Offer(o.offer!); + unittest.expect( + o.priceReferenceId!, + unittest.equals('foo'), + ); } buildCounterGoogleCloudChannelV1PurchasableOffer--; } @@ -2701,6 +2716,8 @@ api.GoogleCloudChannelV1RegisterSubscriberRequest final o = api.GoogleCloudChannelV1RegisterSubscriberRequest(); buildCounterGoogleCloudChannelV1RegisterSubscriberRequest++; if (buildCounterGoogleCloudChannelV1RegisterSubscriberRequest < 3) { + o.account = 'foo'; + o.integrator = 'foo'; o.serviceAccount = 'foo'; } buildCounterGoogleCloudChannelV1RegisterSubscriberRequest--; @@ -2711,6 +2728,14 @@ void checkGoogleCloudChannelV1RegisterSubscriberRequest( api.GoogleCloudChannelV1RegisterSubscriberRequest o) { buildCounterGoogleCloudChannelV1RegisterSubscriberRequest++; if (buildCounterGoogleCloudChannelV1RegisterSubscriberRequest < 3) { + unittest.expect( + o.account!, + unittest.equals('foo'), + ); + unittest.expect( + o.integrator!, + unittest.equals('foo'), + ); unittest.expect( o.serviceAccount!, unittest.equals('foo'), @@ -3385,6 +3410,7 @@ api.GoogleCloudChannelV1TransferableOffer buildCounterGoogleCloudChannelV1TransferableOffer++; if (buildCounterGoogleCloudChannelV1TransferableOffer < 3) { o.offer = buildGoogleCloudChannelV1Offer(); + o.priceReferenceId = 'foo'; } buildCounterGoogleCloudChannelV1TransferableOffer--; return o; @@ -3395,6 +3421,10 @@ void checkGoogleCloudChannelV1TransferableOffer( buildCounterGoogleCloudChannelV1TransferableOffer++; if (buildCounterGoogleCloudChannelV1TransferableOffer < 3) { checkGoogleCloudChannelV1Offer(o.offer!); + unittest.expect( + o.priceReferenceId!, + unittest.equals('foo'), + ); } buildCounterGoogleCloudChannelV1TransferableOffer--; } @@ -3455,6 +3485,8 @@ api.GoogleCloudChannelV1UnregisterSubscriberRequest final o = api.GoogleCloudChannelV1UnregisterSubscriberRequest(); buildCounterGoogleCloudChannelV1UnregisterSubscriberRequest++; if (buildCounterGoogleCloudChannelV1UnregisterSubscriberRequest < 3) { + o.account = 'foo'; + o.integrator = 'foo'; o.serviceAccount = 'foo'; } buildCounterGoogleCloudChannelV1UnregisterSubscriberRequest--; @@ -3465,6 +3497,14 @@ void checkGoogleCloudChannelV1UnregisterSubscriberRequest( api.GoogleCloudChannelV1UnregisterSubscriberRequest o) { buildCounterGoogleCloudChannelV1UnregisterSubscriberRequest++; if (buildCounterGoogleCloudChannelV1UnregisterSubscriberRequest < 3) { + unittest.expect( + o.account!, + unittest.equals('foo'), + ); + unittest.expect( + o.integrator!, + unittest.equals('foo'), + ); unittest.expect( o.serviceAccount!, unittest.equals('foo'), @@ -5380,6 +5420,7 @@ void main() { final mock = HttpServerMock(); final res = api.CloudchannelApi(mock).accounts; final arg_account = 'foo'; + final arg_integrator = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; final arg_$fields = 'foo'; @@ -5415,6 +5456,10 @@ void main() { ); } } + unittest.expect( + queryMap['integrator']!.first, + unittest.equals(arg_integrator), + ); unittest.expect( core.int.parse(queryMap['pageSize']!.first), unittest.equals(arg_pageSize), @@ -5436,6 +5481,7 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.listSubscribers(arg_account, + integrator: arg_integrator, pageSize: arg_pageSize, pageToken: arg_pageToken, $fields: arg_$fields); @@ -8892,6 +8938,204 @@ void main() { }); }); + unittest.group('resource-IntegratorsResource', () { + unittest.test('method--listSubscribers', () async { + final mock = HttpServerMock(); + final res = api.CloudchannelApi(mock).integrators; + final arg_integrator = 'foo'; + final arg_account = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['account']!.first, + unittest.equals(arg_account), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json + .encode(buildGoogleCloudChannelV1ListSubscribersResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.listSubscribers(arg_integrator, + account: arg_account, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkGoogleCloudChannelV1ListSubscribersResponse( + response as api.GoogleCloudChannelV1ListSubscribersResponse); + }); + + unittest.test('method--registerSubscriber', () async { + final mock = HttpServerMock(); + final res = api.CloudchannelApi(mock).integrators; + final arg_request = buildGoogleCloudChannelV1RegisterSubscriberRequest(); + final arg_integrator = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudChannelV1RegisterSubscriberRequest.fromJson( + json as core.Map); + checkGoogleCloudChannelV1RegisterSubscriberRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json + .encode(buildGoogleCloudChannelV1RegisterSubscriberResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.registerSubscriber(arg_request, arg_integrator, + $fields: arg_$fields); + checkGoogleCloudChannelV1RegisterSubscriberResponse( + response as api.GoogleCloudChannelV1RegisterSubscriberResponse); + }); + + unittest.test('method--unregisterSubscriber', () async { + final mock = HttpServerMock(); + final res = api.CloudchannelApi(mock).integrators; + final arg_request = + buildGoogleCloudChannelV1UnregisterSubscriberRequest(); + final arg_integrator = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudChannelV1UnregisterSubscriberRequest.fromJson( + json as core.Map); + checkGoogleCloudChannelV1UnregisterSubscriberRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json + .encode(buildGoogleCloudChannelV1UnregisterSubscriberResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.unregisterSubscriber( + arg_request, arg_integrator, + $fields: arg_$fields); + checkGoogleCloudChannelV1UnregisterSubscriberResponse( + response as api.GoogleCloudChannelV1UnregisterSubscriberResponse); + }); + }); + unittest.group('resource-OperationsResource', () { unittest.test('method--cancel', () async { final mock = HttpServerMock(); diff --git a/generated/googleapis/test/cloudcontrolspartner/v1_test.dart b/generated/googleapis/test/cloudcontrolspartner/v1_test.dart index 4c94aba37..0b3365881 100644 --- a/generated/googleapis/test/cloudcontrolspartner/v1_test.dart +++ b/generated/googleapis/test/cloudcontrolspartner/v1_test.dart @@ -197,6 +197,7 @@ api.Customer buildCustomer() { o.displayName = 'foo'; o.isOnboarded = true; o.name = 'foo'; + o.organizationDomain = 'foo'; } buildCounterCustomer--; return o; @@ -215,6 +216,10 @@ void checkCustomer(api.Customer o) { o.name!, unittest.equals('foo'), ); + unittest.expect( + o.organizationDomain!, + unittest.equals('foo'), + ); } buildCounterCustomer--; } @@ -377,6 +382,21 @@ void checkEkmMetadata(api.EkmMetadata o) { buildCounterEkmMetadata--; } +core.int buildCounterEmpty = 0; +api.Empty buildEmpty() { + final o = api.Empty(); + buildCounterEmpty++; + if (buildCounterEmpty < 3) {} + buildCounterEmpty--; + return o; +} + +void checkEmpty(api.Empty o) { + buildCounterEmpty++; + if (buildCounterEmpty < 3) {} + buildCounterEmpty--; +} + core.List buildUnnamed5() => [ 'foo', 'foo', @@ -1171,6 +1191,16 @@ void main() { }); }); + unittest.group('obj-schema-Empty', () { + unittest.test('to-json--from-json', () async { + final o = buildEmpty(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Empty.fromJson(oJson as core.Map); + checkEmpty(od); + }); + }); + unittest.group('obj-schema-Gcloud', () { unittest.test('to-json--from-json', () async { final o = buildGcloud(); @@ -1366,6 +1396,127 @@ void main() { }); unittest.group('resource-OrganizationsLocationsCustomersResource', () { + unittest.test('method--create', () async { + final mock = HttpServerMock(); + final res = api.CloudControlsPartnerServiceApi(mock) + .organizations + .locations + .customers; + final arg_request = buildCustomer(); + final arg_parent = 'foo'; + final arg_customerId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.Customer.fromJson(json as core.Map); + checkCustomer(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['customerId']!.first, + unittest.equals(arg_customerId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildCustomer()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.create(arg_request, arg_parent, + customerId: arg_customerId, $fields: arg_$fields); + checkCustomer(response as api.Customer); + }); + + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = api.CloudControlsPartnerServiceApi(mock) + .organizations + .locations + .customers; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildEmpty()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkEmpty(response as api.Empty); + }); + unittest.test('method--get', () async { final mock = HttpServerMock(); final res = api.CloudControlsPartnerServiceApi(mock) @@ -1500,6 +1651,72 @@ void main() { $fields: arg_$fields); checkListCustomersResponse(response as api.ListCustomersResponse); }); + + unittest.test('method--patch', () async { + final mock = HttpServerMock(); + final res = api.CloudControlsPartnerServiceApi(mock) + .organizations + .locations + .customers; + final arg_request = buildCustomer(); + final arg_name = 'foo'; + final arg_updateMask = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.Customer.fromJson(json as core.Map); + checkCustomer(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildCustomer()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); + checkCustomer(response as api.Customer); + }); }); unittest.group('resource-OrganizationsLocationsCustomersWorkloadsResource', diff --git a/generated/googleapis/test/clouddeploy/v1_test.dart b/generated/googleapis/test/clouddeploy/v1_test.dart index 78b401cd0..1c6e3c866 100644 --- a/generated/googleapis/test/clouddeploy/v1_test.dart +++ b/generated/googleapis/test/clouddeploy/v1_test.dart @@ -638,6 +638,7 @@ api.AutomationRule buildAutomationRule() { o.advanceRolloutRule = buildAdvanceRolloutRule(); o.promoteReleaseRule = buildPromoteReleaseRule(); o.repairRolloutRule = buildRepairRolloutRule(); + o.timedPromoteReleaseRule = buildTimedPromoteReleaseRule(); } buildCounterAutomationRule--; return o; @@ -649,6 +650,7 @@ void checkAutomationRule(api.AutomationRule o) { checkAdvanceRolloutRule(o.advanceRolloutRule!); checkPromoteReleaseRule(o.promoteReleaseRule!); checkRepairRolloutRule(o.repairRolloutRule!); + checkTimedPromoteReleaseRule(o.timedPromoteReleaseRule!); } buildCounterAutomationRule--; } @@ -659,6 +661,7 @@ api.AutomationRuleCondition buildAutomationRuleCondition() { buildCounterAutomationRuleCondition++; if (buildCounterAutomationRuleCondition < 3) { o.targetsPresentCondition = buildTargetsPresentCondition(); + o.timedPromoteReleaseCondition = buildTimedPromoteReleaseCondition(); } buildCounterAutomationRuleCondition--; return o; @@ -668,6 +671,7 @@ void checkAutomationRuleCondition(api.AutomationRuleCondition o) { buildCounterAutomationRuleCondition++; if (buildCounterAutomationRuleCondition < 3) { checkTargetsPresentCondition(o.targetsPresentCondition!); + checkTimedPromoteReleaseCondition(o.timedPromoteReleaseCondition!); } buildCounterAutomationRuleCondition--; } @@ -692,6 +696,7 @@ api.AutomationRun buildAutomationRun() { o.state = 'foo'; o.stateDescription = 'foo'; o.targetId = 'foo'; + o.timedPromoteReleaseOperation = buildTimedPromoteReleaseOperation(); o.updateTime = 'foo'; o.waitUntilTime = 'foo'; } @@ -747,6 +752,7 @@ void checkAutomationRun(api.AutomationRun o) { o.targetId!, unittest.equals('foo'), ); + checkTimedPromoteReleaseOperation(o.timedPromoteReleaseOperation!); unittest.expect( o.updateTime!, unittest.equals('foo'), @@ -5730,6 +5736,33 @@ void checkTargetRender(api.TargetRender o) { buildCounterTargetRender--; } +core.int buildCounterTargets = 0; +api.Targets buildTargets() { + final o = api.Targets(); + buildCounterTargets++; + if (buildCounterTargets < 3) { + o.destinationTargetId = 'foo'; + o.sourceTargetId = 'foo'; + } + buildCounterTargets--; + return o; +} + +void checkTargets(api.Targets o) { + buildCounterTargets++; + if (buildCounterTargets < 3) { + unittest.expect( + o.destinationTargetId!, + unittest.equals('foo'), + ); + unittest.expect( + o.sourceTargetId!, + unittest.equals('foo'), + ); + } + buildCounterTargets--; +} + core.List buildUnnamed108() => [ 'foo', 'foo', @@ -6005,6 +6038,117 @@ void checkTimeWindows(api.TimeWindows o) { buildCounterTimeWindows--; } +core.List buildUnnamed114() => [ + buildTargets(), + buildTargets(), + ]; + +void checkUnnamed114(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkTargets(o[0]); + checkTargets(o[1]); +} + +core.int buildCounterTimedPromoteReleaseCondition = 0; +api.TimedPromoteReleaseCondition buildTimedPromoteReleaseCondition() { + final o = api.TimedPromoteReleaseCondition(); + buildCounterTimedPromoteReleaseCondition++; + if (buildCounterTimedPromoteReleaseCondition < 3) { + o.nextPromotionTime = 'foo'; + o.targetsList = buildUnnamed114(); + } + buildCounterTimedPromoteReleaseCondition--; + return o; +} + +void checkTimedPromoteReleaseCondition(api.TimedPromoteReleaseCondition o) { + buildCounterTimedPromoteReleaseCondition++; + if (buildCounterTimedPromoteReleaseCondition < 3) { + unittest.expect( + o.nextPromotionTime!, + unittest.equals('foo'), + ); + checkUnnamed114(o.targetsList!); + } + buildCounterTimedPromoteReleaseCondition--; +} + +core.int buildCounterTimedPromoteReleaseOperation = 0; +api.TimedPromoteReleaseOperation buildTimedPromoteReleaseOperation() { + final o = api.TimedPromoteReleaseOperation(); + buildCounterTimedPromoteReleaseOperation++; + if (buildCounterTimedPromoteReleaseOperation < 3) { + o.phase = 'foo'; + o.release = 'foo'; + o.targetId = 'foo'; + } + buildCounterTimedPromoteReleaseOperation--; + return o; +} + +void checkTimedPromoteReleaseOperation(api.TimedPromoteReleaseOperation o) { + buildCounterTimedPromoteReleaseOperation++; + if (buildCounterTimedPromoteReleaseOperation < 3) { + unittest.expect( + o.phase!, + unittest.equals('foo'), + ); + unittest.expect( + o.release!, + unittest.equals('foo'), + ); + unittest.expect( + o.targetId!, + unittest.equals('foo'), + ); + } + buildCounterTimedPromoteReleaseOperation--; +} + +core.int buildCounterTimedPromoteReleaseRule = 0; +api.TimedPromoteReleaseRule buildTimedPromoteReleaseRule() { + final o = api.TimedPromoteReleaseRule(); + buildCounterTimedPromoteReleaseRule++; + if (buildCounterTimedPromoteReleaseRule < 3) { + o.condition = buildAutomationRuleCondition(); + o.destinationPhase = 'foo'; + o.destinationTargetId = 'foo'; + o.id = 'foo'; + o.schedule = 'foo'; + o.timeZone = 'foo'; + } + buildCounterTimedPromoteReleaseRule--; + return o; +} + +void checkTimedPromoteReleaseRule(api.TimedPromoteReleaseRule o) { + buildCounterTimedPromoteReleaseRule++; + if (buildCounterTimedPromoteReleaseRule < 3) { + checkAutomationRuleCondition(o.condition!); + unittest.expect( + o.destinationPhase!, + unittest.equals('foo'), + ); + unittest.expect( + o.destinationTargetId!, + unittest.equals('foo'), + ); + unittest.expect( + o.id!, + unittest.equals('foo'), + ); + unittest.expect( + o.schedule!, + unittest.equals('foo'), + ); + unittest.expect( + o.timeZone!, + unittest.equals('foo'), + ); + } + buildCounterTimedPromoteReleaseRule--; +} + core.int buildCounterVerifyJob = 0; api.VerifyJob buildVerifyJob() { final o = api.VerifyJob(); @@ -6062,12 +6206,12 @@ void checkVerifyJobRun(api.VerifyJobRun o) { buildCounterVerifyJobRun--; } -core.List buildUnnamed114() => [ +core.List buildUnnamed115() => [ 'foo', 'foo', ]; -void checkUnnamed114(core.List o) { +void checkUnnamed115(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6084,7 +6228,7 @@ api.WeeklyWindow buildWeeklyWindow() { final o = api.WeeklyWindow(); buildCounterWeeklyWindow++; if (buildCounterWeeklyWindow < 3) { - o.daysOfWeek = buildUnnamed114(); + o.daysOfWeek = buildUnnamed115(); o.endTime = buildTimeOfDay(); o.startTime = buildTimeOfDay(); } @@ -6095,19 +6239,19 @@ api.WeeklyWindow buildWeeklyWindow() { void checkWeeklyWindow(api.WeeklyWindow o) { buildCounterWeeklyWindow++; if (buildCounterWeeklyWindow < 3) { - checkUnnamed114(o.daysOfWeek!); + checkUnnamed115(o.daysOfWeek!); checkTimeOfDay(o.endTime!); checkTimeOfDay(o.startTime!); } buildCounterWeeklyWindow--; } -core.List buildUnnamed115() => [ +core.List buildUnnamed116() => [ 'foo', 'foo', ]; -void checkUnnamed115(core.List o) { +void checkUnnamed116(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6119,12 +6263,12 @@ void checkUnnamed115(core.List o) { ); } -core.List buildUnnamed116() => [ +core.List buildUnnamed117() => [ 'foo', 'foo', ]; -void checkUnnamed116(core.List o) { +void checkUnnamed117(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -7516,6 +7660,16 @@ void main() { }); }); + unittest.group('obj-schema-Targets', () { + unittest.test('to-json--from-json', () async { + final o = buildTargets(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Targets.fromJson(oJson as core.Map); + checkTargets(od); + }); + }); + unittest.group('obj-schema-TargetsPresentCondition', () { unittest.test('to-json--from-json', () async { final o = buildTargetsPresentCondition(); @@ -7596,6 +7750,36 @@ void main() { }); }); + unittest.group('obj-schema-TimedPromoteReleaseCondition', () { + unittest.test('to-json--from-json', () async { + final o = buildTimedPromoteReleaseCondition(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.TimedPromoteReleaseCondition.fromJson( + oJson as core.Map); + checkTimedPromoteReleaseCondition(od); + }); + }); + + unittest.group('obj-schema-TimedPromoteReleaseOperation', () { + unittest.test('to-json--from-json', () async { + final o = buildTimedPromoteReleaseOperation(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.TimedPromoteReleaseOperation.fromJson( + oJson as core.Map); + checkTimedPromoteReleaseOperation(od); + }); + }); + + unittest.group('obj-schema-TimedPromoteReleaseRule', () { + unittest.test('to-json--from-json', () async { + final o = buildTimedPromoteReleaseRule(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.TimedPromoteReleaseRule.fromJson( + oJson as core.Map); + checkTimedPromoteReleaseRule(od); + }); + }); + unittest.group('obj-schema-VerifyJob', () { unittest.test('to-json--from-json', () async { final o = buildVerifyJob(); @@ -9558,7 +9742,7 @@ void main() { .releases; final arg_request = buildRelease(); final arg_parent = 'foo'; - final arg_overrideDeployPolicy = buildUnnamed115(); + final arg_overrideDeployPolicy = buildUnnamed116(); final arg_releaseId = 'foo'; final arg_requestId = 'foo'; final arg_validateOnly = true; @@ -9975,7 +10159,7 @@ void main() { .rollouts; final arg_request = buildRollout(); final arg_parent = 'foo'; - final arg_overrideDeployPolicy = buildUnnamed116(); + final arg_overrideDeployPolicy = buildUnnamed117(); final arg_requestId = 'foo'; final arg_rolloutId = 'foo'; final arg_startingPhaseId = 'foo'; diff --git a/generated/googleapis/test/cloudidentity/v1_test.dart b/generated/googleapis/test/cloudidentity/v1_test.dart index 4d11d72a5..d23c7452f 100644 --- a/generated/googleapis/test/cloudidentity/v1_test.dart +++ b/generated/googleapis/test/cloudidentity/v1_test.dart @@ -1757,12 +1757,47 @@ void checkListMembershipsResponse(api.ListMembershipsResponse o) { buildCounterListMembershipsResponse--; } -core.List buildUnnamed21() => [ +core.List buildUnnamed21() => [ + buildPolicy(), + buildPolicy(), + ]; + +void checkUnnamed21(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkPolicy(o[0]); + checkPolicy(o[1]); +} + +core.int buildCounterListPoliciesResponse = 0; +api.ListPoliciesResponse buildListPoliciesResponse() { + final o = api.ListPoliciesResponse(); + buildCounterListPoliciesResponse++; + if (buildCounterListPoliciesResponse < 3) { + o.nextPageToken = 'foo'; + o.policies = buildUnnamed21(); + } + buildCounterListPoliciesResponse--; + return o; +} + +void checkListPoliciesResponse(api.ListPoliciesResponse o) { + buildCounterListPoliciesResponse++; + if (buildCounterListPoliciesResponse < 3) { + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed21(o.policies!); + } + buildCounterListPoliciesResponse--; +} + +core.List buildUnnamed22() => [ buildUserInvitation(), buildUserInvitation(), ]; -void checkUnnamed21(core.List o) { +void checkUnnamed22(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkUserInvitation(o[0]); checkUserInvitation(o[1]); @@ -1774,7 +1809,7 @@ api.ListUserInvitationsResponse buildListUserInvitationsResponse() { buildCounterListUserInvitationsResponse++; if (buildCounterListUserInvitationsResponse < 3) { o.nextPageToken = 'foo'; - o.userInvitations = buildUnnamed21(); + o.userInvitations = buildUnnamed22(); } buildCounterListUserInvitationsResponse--; return o; @@ -1787,7 +1822,7 @@ void checkListUserInvitationsResponse(api.ListUserInvitationsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed21(o.userInvitations!); + checkUnnamed22(o.userInvitations!); } buildCounterListUserInvitationsResponse--; } @@ -1836,23 +1871,23 @@ void checkLookupMembershipNameResponse(api.LookupMembershipNameResponse o) { buildCounterLookupMembershipNameResponse--; } -core.List buildUnnamed22() => [ +core.List buildUnnamed23() => [ buildEntityKey(), buildEntityKey(), ]; -void checkUnnamed22(core.List o) { +void checkUnnamed23(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEntityKey(o[0]); checkEntityKey(o[1]); } -core.List buildUnnamed23() => [ +core.List buildUnnamed24() => [ buildTransitiveMembershipRole(), buildTransitiveMembershipRole(), ]; -void checkUnnamed23(core.List o) { +void checkUnnamed24(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTransitiveMembershipRole(o[0]); checkTransitiveMembershipRole(o[1]); @@ -1864,9 +1899,9 @@ api.MemberRelation buildMemberRelation() { buildCounterMemberRelation++; if (buildCounterMemberRelation < 3) { o.member = 'foo'; - o.preferredMemberKey = buildUnnamed22(); + o.preferredMemberKey = buildUnnamed23(); o.relationType = 'foo'; - o.roles = buildUnnamed23(); + o.roles = buildUnnamed24(); } buildCounterMemberRelation--; return o; @@ -1879,12 +1914,12 @@ void checkMemberRelation(api.MemberRelation o) { o.member!, unittest.equals('foo'), ); - checkUnnamed22(o.preferredMemberKey!); + checkUnnamed23(o.preferredMemberKey!); unittest.expect( o.relationType!, unittest.equals('foo'), ); - checkUnnamed23(o.roles!); + checkUnnamed24(o.roles!); } buildCounterMemberRelation--; } @@ -1913,12 +1948,12 @@ void checkMemberRestriction(api.MemberRestriction o) { buildCounterMemberRestriction--; } -core.List buildUnnamed24() => [ +core.List buildUnnamed25() => [ buildMembershipRole(), buildMembershipRole(), ]; -void checkUnnamed24(core.List o) { +void checkUnnamed25(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMembershipRole(o[0]); checkMembershipRole(o[1]); @@ -1933,7 +1968,7 @@ api.Membership buildMembership() { o.deliverySetting = 'foo'; o.name = 'foo'; o.preferredMemberKey = buildEntityKey(); - o.roles = buildUnnamed24(); + o.roles = buildUnnamed25(); o.type = 'foo'; o.updateTime = 'foo'; } @@ -1957,7 +1992,7 @@ void checkMembership(api.Membership o) { unittest.equals('foo'), ); checkEntityKey(o.preferredMemberKey!); - checkUnnamed24(o.roles!); + checkUnnamed25(o.roles!); unittest.expect( o.type!, unittest.equals('foo'), @@ -1970,12 +2005,12 @@ void checkMembership(api.Membership o) { buildCounterMembership--; } -core.Map buildUnnamed25() => { +core.Map buildUnnamed26() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed25(core.Map o) { +void checkUnnamed26(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1987,12 +2022,12 @@ void checkUnnamed25(core.Map o) { ); } -core.List buildUnnamed26() => [ +core.List buildUnnamed27() => [ buildMembershipRole(), buildMembershipRole(), ]; -void checkUnnamed26(core.List o) { +void checkUnnamed27(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMembershipRole(o[0]); checkMembershipRole(o[1]); @@ -2007,9 +2042,9 @@ api.MembershipRelation buildMembershipRelation() { o.displayName = 'foo'; o.group = 'foo'; o.groupKey = buildEntityKey(); - o.labels = buildUnnamed25(); + o.labels = buildUnnamed26(); o.membership = 'foo'; - o.roles = buildUnnamed26(); + o.roles = buildUnnamed27(); } buildCounterMembershipRelation--; return o; @@ -2031,12 +2066,12 @@ void checkMembershipRelation(api.MembershipRelation o) { unittest.equals('foo'), ); checkEntityKey(o.groupKey!); - checkUnnamed25(o.labels!); + checkUnnamed26(o.labels!); unittest.expect( o.membership!, unittest.equals('foo'), ); - checkUnnamed26(o.roles!); + checkUnnamed27(o.roles!); } buildCounterMembershipRelation--; } @@ -2091,23 +2126,23 @@ void checkMembershipRoleRestrictionEvaluation( buildCounterMembershipRoleRestrictionEvaluation--; } -core.List buildUnnamed27() => [ +core.List buildUnnamed28() => [ buildMembershipRole(), buildMembershipRole(), ]; -void checkUnnamed27(core.List o) { +void checkUnnamed28(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMembershipRole(o[0]); checkMembershipRole(o[1]); } -core.List buildUnnamed28() => [ +core.List buildUnnamed29() => [ 'foo', 'foo', ]; -void checkUnnamed28(core.List o) { +void checkUnnamed29(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2119,12 +2154,12 @@ void checkUnnamed28(core.List o) { ); } -core.List buildUnnamed29() => [ +core.List buildUnnamed30() => [ buildUpdateMembershipRolesParams(), buildUpdateMembershipRolesParams(), ]; -void checkUnnamed29(core.List o) { +void checkUnnamed30(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkUpdateMembershipRolesParams(o[0]); checkUpdateMembershipRolesParams(o[1]); @@ -2135,9 +2170,9 @@ api.ModifyMembershipRolesRequest buildModifyMembershipRolesRequest() { final o = api.ModifyMembershipRolesRequest(); buildCounterModifyMembershipRolesRequest++; if (buildCounterModifyMembershipRolesRequest < 3) { - o.addRoles = buildUnnamed27(); - o.removeRoles = buildUnnamed28(); - o.updateRolesParams = buildUnnamed29(); + o.addRoles = buildUnnamed28(); + o.removeRoles = buildUnnamed29(); + o.updateRolesParams = buildUnnamed30(); } buildCounterModifyMembershipRolesRequest--; return o; @@ -2146,9 +2181,9 @@ api.ModifyMembershipRolesRequest buildModifyMembershipRolesRequest() { void checkModifyMembershipRolesRequest(api.ModifyMembershipRolesRequest o) { buildCounterModifyMembershipRolesRequest++; if (buildCounterModifyMembershipRolesRequest < 3) { - checkUnnamed27(o.addRoles!); - checkUnnamed28(o.removeRoles!); - checkUnnamed29(o.updateRolesParams!); + checkUnnamed28(o.addRoles!); + checkUnnamed29(o.removeRoles!); + checkUnnamed30(o.updateRolesParams!); } buildCounterModifyMembershipRolesRequest--; } @@ -2172,7 +2207,7 @@ void checkModifyMembershipRolesResponse(api.ModifyMembershipRolesResponse o) { buildCounterModifyMembershipRolesResponse--; } -core.Map buildUnnamed30() => { +core.Map buildUnnamed31() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -2185,7 +2220,7 @@ core.Map buildUnnamed30() => { }, }; -void checkUnnamed30(core.Map o) { +void checkUnnamed31(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted3 = (o['x']!) as core.Map; unittest.expect(casted3, unittest.hasLength(3)); @@ -2217,7 +2252,7 @@ void checkUnnamed30(core.Map o) { ); } -core.Map buildUnnamed31() => { +core.Map buildUnnamed32() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -2230,7 +2265,7 @@ core.Map buildUnnamed31() => { }, }; -void checkUnnamed31(core.Map o) { +void checkUnnamed32(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted5 = (o['x']!) as core.Map; unittest.expect(casted5, unittest.hasLength(3)); @@ -2269,9 +2304,9 @@ api.Operation buildOperation() { if (buildCounterOperation < 3) { o.done = true; o.error = buildStatus(); - o.metadata = buildUnnamed30(); + o.metadata = buildUnnamed31(); o.name = 'foo'; - o.response = buildUnnamed31(); + o.response = buildUnnamed32(); } buildCounterOperation--; return o; @@ -2282,16 +2317,89 @@ void checkOperation(api.Operation o) { if (buildCounterOperation < 3) { unittest.expect(o.done!, unittest.isTrue); checkStatus(o.error!); - checkUnnamed30(o.metadata!); + checkUnnamed31(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed31(o.response!); + checkUnnamed32(o.response!); } buildCounterOperation--; } +core.int buildCounterPolicy = 0; +api.Policy buildPolicy() { + final o = api.Policy(); + buildCounterPolicy++; + if (buildCounterPolicy < 3) { + o.customer = 'foo'; + o.name = 'foo'; + o.policyQuery = buildPolicyQuery(); + o.setting = buildSetting(); + o.type = 'foo'; + } + buildCounterPolicy--; + return o; +} + +void checkPolicy(api.Policy o) { + buildCounterPolicy++; + if (buildCounterPolicy < 3) { + unittest.expect( + o.customer!, + unittest.equals('foo'), + ); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + checkPolicyQuery(o.policyQuery!); + checkSetting(o.setting!); + unittest.expect( + o.type!, + unittest.equals('foo'), + ); + } + buildCounterPolicy--; +} + +core.int buildCounterPolicyQuery = 0; +api.PolicyQuery buildPolicyQuery() { + final o = api.PolicyQuery(); + buildCounterPolicyQuery++; + if (buildCounterPolicyQuery < 3) { + o.group = 'foo'; + o.orgUnit = 'foo'; + o.query = 'foo'; + o.sortOrder = 42.0; + } + buildCounterPolicyQuery--; + return o; +} + +void checkPolicyQuery(api.PolicyQuery o) { + buildCounterPolicyQuery++; + if (buildCounterPolicyQuery < 3) { + unittest.expect( + o.group!, + unittest.equals('foo'), + ); + unittest.expect( + o.orgUnit!, + unittest.equals('foo'), + ); + unittest.expect( + o.query!, + unittest.equals('foo'), + ); + unittest.expect( + o.sortOrder!, + unittest.equals(42.0), + ); + } + buildCounterPolicyQuery--; +} + core.int buildCounterRestrictionEvaluation = 0; api.RestrictionEvaluation buildRestrictionEvaluation() { final o = api.RestrictionEvaluation(); @@ -2441,12 +2549,12 @@ void checkSamlSsoInfo(api.SamlSsoInfo o) { buildCounterSamlSsoInfo--; } -core.List buildUnnamed32() => [ +core.List buildUnnamed33() => [ buildMembershipRelation(), buildMembershipRelation(), ]; -void checkUnnamed32(core.List o) { +void checkUnnamed33(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMembershipRelation(o[0]); checkMembershipRelation(o[1]); @@ -2457,7 +2565,7 @@ api.SearchDirectGroupsResponse buildSearchDirectGroupsResponse() { final o = api.SearchDirectGroupsResponse(); buildCounterSearchDirectGroupsResponse++; if (buildCounterSearchDirectGroupsResponse < 3) { - o.memberships = buildUnnamed32(); + o.memberships = buildUnnamed33(); o.nextPageToken = 'foo'; } buildCounterSearchDirectGroupsResponse--; @@ -2467,7 +2575,7 @@ api.SearchDirectGroupsResponse buildSearchDirectGroupsResponse() { void checkSearchDirectGroupsResponse(api.SearchDirectGroupsResponse o) { buildCounterSearchDirectGroupsResponse++; if (buildCounterSearchDirectGroupsResponse < 3) { - checkUnnamed32(o.memberships!); + checkUnnamed33(o.memberships!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -2476,12 +2584,12 @@ void checkSearchDirectGroupsResponse(api.SearchDirectGroupsResponse o) { buildCounterSearchDirectGroupsResponse--; } -core.List buildUnnamed33() => [ +core.List buildUnnamed34() => [ buildGroup(), buildGroup(), ]; -void checkUnnamed33(core.List o) { +void checkUnnamed34(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGroup(o[0]); checkGroup(o[1]); @@ -2492,7 +2600,7 @@ api.SearchGroupsResponse buildSearchGroupsResponse() { final o = api.SearchGroupsResponse(); buildCounterSearchGroupsResponse++; if (buildCounterSearchGroupsResponse < 3) { - o.groups = buildUnnamed33(); + o.groups = buildUnnamed34(); o.nextPageToken = 'foo'; } buildCounterSearchGroupsResponse--; @@ -2502,7 +2610,7 @@ api.SearchGroupsResponse buildSearchGroupsResponse() { void checkSearchGroupsResponse(api.SearchGroupsResponse o) { buildCounterSearchGroupsResponse++; if (buildCounterSearchGroupsResponse < 3) { - checkUnnamed33(o.groups!); + checkUnnamed34(o.groups!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -2511,12 +2619,12 @@ void checkSearchGroupsResponse(api.SearchGroupsResponse o) { buildCounterSearchGroupsResponse--; } -core.List buildUnnamed34() => [ +core.List buildUnnamed35() => [ buildGroupRelation(), buildGroupRelation(), ]; -void checkUnnamed34(core.List o) { +void checkUnnamed35(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGroupRelation(o[0]); checkGroupRelation(o[1]); @@ -2527,7 +2635,7 @@ api.SearchTransitiveGroupsResponse buildSearchTransitiveGroupsResponse() { final o = api.SearchTransitiveGroupsResponse(); buildCounterSearchTransitiveGroupsResponse++; if (buildCounterSearchTransitiveGroupsResponse < 3) { - o.memberships = buildUnnamed34(); + o.memberships = buildUnnamed35(); o.nextPageToken = 'foo'; } buildCounterSearchTransitiveGroupsResponse--; @@ -2537,7 +2645,7 @@ api.SearchTransitiveGroupsResponse buildSearchTransitiveGroupsResponse() { void checkSearchTransitiveGroupsResponse(api.SearchTransitiveGroupsResponse o) { buildCounterSearchTransitiveGroupsResponse++; if (buildCounterSearchTransitiveGroupsResponse < 3) { - checkUnnamed34(o.memberships!); + checkUnnamed35(o.memberships!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -2546,12 +2654,12 @@ void checkSearchTransitiveGroupsResponse(api.SearchTransitiveGroupsResponse o) { buildCounterSearchTransitiveGroupsResponse--; } -core.List buildUnnamed35() => [ +core.List buildUnnamed36() => [ buildMemberRelation(), buildMemberRelation(), ]; -void checkUnnamed35(core.List o) { +void checkUnnamed36(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMemberRelation(o[0]); checkMemberRelation(o[1]); @@ -2563,7 +2671,7 @@ api.SearchTransitiveMembershipsResponse final o = api.SearchTransitiveMembershipsResponse(); buildCounterSearchTransitiveMembershipsResponse++; if (buildCounterSearchTransitiveMembershipsResponse < 3) { - o.memberships = buildUnnamed35(); + o.memberships = buildUnnamed36(); o.nextPageToken = 'foo'; } buildCounterSearchTransitiveMembershipsResponse--; @@ -2574,7 +2682,7 @@ void checkSearchTransitiveMembershipsResponse( api.SearchTransitiveMembershipsResponse o) { buildCounterSearchTransitiveMembershipsResponse++; if (buildCounterSearchTransitiveMembershipsResponse < 3) { - checkUnnamed35(o.memberships!); + checkUnnamed36(o.memberships!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -2622,6 +2730,75 @@ void checkSendUserInvitationRequest(api.SendUserInvitationRequest o) { buildCounterSendUserInvitationRequest--; } +core.Map buildUnnamed37() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; + +void checkUnnamed37(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted7 = (o['x']!) as core.Map; + unittest.expect(casted7, unittest.hasLength(3)); + unittest.expect( + casted7['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted7['bool'], + unittest.equals(true), + ); + unittest.expect( + casted7['string'], + unittest.equals('foo'), + ); + var casted8 = (o['y']!) as core.Map; + unittest.expect(casted8, unittest.hasLength(3)); + unittest.expect( + casted8['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted8['bool'], + unittest.equals(true), + ); + unittest.expect( + casted8['string'], + unittest.equals('foo'), + ); +} + +core.int buildCounterSetting = 0; +api.Setting buildSetting() { + final o = api.Setting(); + buildCounterSetting++; + if (buildCounterSetting < 3) { + o.type = 'foo'; + o.value = buildUnnamed37(); + } + buildCounterSetting--; + return o; +} + +void checkSetting(api.Setting o) { + buildCounterSetting++; + if (buildCounterSetting < 3) { + unittest.expect( + o.type!, + unittest.equals('foo'), + ); + checkUnnamed37(o.value!); + } + buildCounterSetting--; +} + core.int buildCounterSignInBehavior = 0; api.SignInBehavior buildSignInBehavior() { final o = api.SignInBehavior(); @@ -2644,7 +2821,7 @@ void checkSignInBehavior(api.SignInBehavior o) { buildCounterSignInBehavior--; } -core.Map buildUnnamed36() => { +core.Map buildUnnamed38() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -2657,47 +2834,47 @@ core.Map buildUnnamed36() => { }, }; -void checkUnnamed36(core.Map o) { +void checkUnnamed38(core.Map o) { unittest.expect(o, unittest.hasLength(2)); - var casted7 = (o['x']!) as core.Map; - unittest.expect(casted7, unittest.hasLength(3)); + var casted9 = (o['x']!) as core.Map; + unittest.expect(casted9, unittest.hasLength(3)); unittest.expect( - casted7['list'], + casted9['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted7['bool'], + casted9['bool'], unittest.equals(true), ); unittest.expect( - casted7['string'], + casted9['string'], unittest.equals('foo'), ); - var casted8 = (o['y']!) as core.Map; - unittest.expect(casted8, unittest.hasLength(3)); + var casted10 = (o['y']!) as core.Map; + unittest.expect(casted10, unittest.hasLength(3)); unittest.expect( - casted8['list'], + casted10['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted8['bool'], + casted10['bool'], unittest.equals(true), ); unittest.expect( - casted8['string'], + casted10['string'], unittest.equals('foo'), ); } -core.List> buildUnnamed37() => [ - buildUnnamed36(), - buildUnnamed36(), +core.List> buildUnnamed39() => [ + buildUnnamed38(), + buildUnnamed38(), ]; -void checkUnnamed37(core.List> o) { +void checkUnnamed39(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed36(o[0]); - checkUnnamed36(o[1]); + checkUnnamed38(o[0]); + checkUnnamed38(o[1]); } core.int buildCounterStatus = 0; @@ -2706,7 +2883,7 @@ api.Status buildStatus() { buildCounterStatus++; if (buildCounterStatus < 3) { o.code = 42; - o.details = buildUnnamed37(); + o.details = buildUnnamed39(); o.message = 'foo'; } buildCounterStatus--; @@ -2720,7 +2897,7 @@ void checkStatus(api.Status o) { o.code!, unittest.equals(42), ); - checkUnnamed37(o.details!); + checkUnnamed39(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -3250,6 +3427,16 @@ void main() { }); }); + unittest.group('obj-schema-ListPoliciesResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListPoliciesResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListPoliciesResponse.fromJson( + oJson as core.Map); + checkListPoliciesResponse(od); + }); + }); + unittest.group('obj-schema-ListUserInvitationsResponse', () { unittest.test('to-json--from-json', () async { final o = buildListUserInvitationsResponse(); @@ -3370,6 +3557,26 @@ void main() { }); }); + unittest.group('obj-schema-Policy', () { + unittest.test('to-json--from-json', () async { + final o = buildPolicy(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Policy.fromJson(oJson as core.Map); + checkPolicy(od); + }); + }); + + unittest.group('obj-schema-PolicyQuery', () { + unittest.test('to-json--from-json', () async { + final o = buildPolicyQuery(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.PolicyQuery.fromJson( + oJson as core.Map); + checkPolicyQuery(od); + }); + }); + unittest.group('obj-schema-RestrictionEvaluation', () { unittest.test('to-json--from-json', () async { final o = buildRestrictionEvaluation(); @@ -3490,6 +3697,16 @@ void main() { }); }); + unittest.group('obj-schema-Setting', () { + unittest.test('to-json--from-json', () async { + final o = buildSetting(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Setting.fromJson(oJson as core.Map); + checkSetting(od); + }); + }); + unittest.group('obj-schema-SignInBehavior', () { unittest.test('to-json--from-json', () async { final o = buildSignInBehavior(); @@ -7053,4 +7270,127 @@ void main() { checkOperation(response as api.Operation); }); }); + + unittest.group('resource-PoliciesResource', () { + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.CloudIdentityApi(mock).policies; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildPolicy()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkPolicy(response as api.Policy); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.CloudIdentityApi(mock).policies; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('v1/policies'), + ); + pathOffset += 11; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildListPoliciesResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list( + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListPoliciesResponse(response as api.ListPoliciesResponse); + }); + }); } diff --git a/generated/googleapis/test/compute/v1_test.dart b/generated/googleapis/test/compute/v1_test.dart index d5c4f1d2a..01fedc9b7 100644 --- a/generated/googleapis/test/compute/v1_test.dart +++ b/generated/googleapis/test/compute/v1_test.dart @@ -4554,7 +4554,6 @@ api.Binding buildBinding() { final o = api.Binding(); buildCounterBinding++; if (buildCounterBinding < 3) { - o.bindingId = 'foo'; o.condition = buildExpr(); o.members = buildUnnamed73(); o.role = 'foo'; @@ -4566,10 +4565,6 @@ api.Binding buildBinding() { void checkBinding(api.Binding o) { buildCounterBinding++; if (buildCounterBinding < 3) { - unittest.expect( - o.bindingId!, - unittest.equals('foo'), - ); checkExpr(o.condition!); checkUnnamed73(o.members!); unittest.expect( @@ -5447,62 +5442,6 @@ void checkCommitmentsScopedList(api.CommitmentsScopedList o) { buildCounterCommitmentsScopedList--; } -core.List buildUnnamed90() => [ - 'foo', - 'foo', - ]; - -void checkUnnamed90(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o[0], - unittest.equals('foo'), - ); - unittest.expect( - o[1], - unittest.equals('foo'), - ); -} - -core.int buildCounterCondition = 0; -api.Condition buildCondition() { - final o = api.Condition(); - buildCounterCondition++; - if (buildCounterCondition < 3) { - o.iam = 'foo'; - o.op = 'foo'; - o.svc = 'foo'; - o.sys = 'foo'; - o.values = buildUnnamed90(); - } - buildCounterCondition--; - return o; -} - -void checkCondition(api.Condition o) { - buildCounterCondition++; - if (buildCounterCondition < 3) { - unittest.expect( - o.iam!, - unittest.equals('foo'), - ); - unittest.expect( - o.op!, - unittest.equals('foo'), - ); - unittest.expect( - o.svc!, - unittest.equals('foo'), - ); - unittest.expect( - o.sys!, - unittest.equals('foo'), - ); - checkUnnamed90(o.values!); - } - buildCounterCondition--; -} - core.int buildCounterConfidentialInstanceConfig = 0; api.ConfidentialInstanceConfig buildConfidentialInstanceConfig() { final o = api.ConfidentialInstanceConfig(); @@ -5611,12 +5550,12 @@ void checkConsistentHashLoadBalancerSettingsHttpCookie( buildCounterConsistentHashLoadBalancerSettingsHttpCookie--; } -core.List buildUnnamed91() => [ +core.List buildUnnamed90() => [ 'foo', 'foo', ]; -void checkUnnamed91(core.List o) { +void checkUnnamed90(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5628,12 +5567,12 @@ void checkUnnamed91(core.List o) { ); } -core.List buildUnnamed92() => [ +core.List buildUnnamed91() => [ 'foo', 'foo', ]; -void checkUnnamed92(core.List o) { +void checkUnnamed91(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5645,12 +5584,12 @@ void checkUnnamed92(core.List o) { ); } -core.List buildUnnamed93() => [ +core.List buildUnnamed92() => [ 'foo', 'foo', ]; -void checkUnnamed93(core.List o) { +void checkUnnamed92(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5662,12 +5601,12 @@ void checkUnnamed93(core.List o) { ); } -core.List buildUnnamed94() => [ +core.List buildUnnamed93() => [ 'foo', 'foo', ]; -void checkUnnamed94(core.List o) { +void checkUnnamed93(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5679,12 +5618,12 @@ void checkUnnamed94(core.List o) { ); } -core.List buildUnnamed95() => [ +core.List buildUnnamed94() => [ 'foo', 'foo', ]; -void checkUnnamed95(core.List o) { +void checkUnnamed94(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5702,12 +5641,12 @@ api.CorsPolicy buildCorsPolicy() { buildCounterCorsPolicy++; if (buildCounterCorsPolicy < 3) { o.allowCredentials = true; - o.allowHeaders = buildUnnamed91(); - o.allowMethods = buildUnnamed92(); - o.allowOriginRegexes = buildUnnamed93(); - o.allowOrigins = buildUnnamed94(); + o.allowHeaders = buildUnnamed90(); + o.allowMethods = buildUnnamed91(); + o.allowOriginRegexes = buildUnnamed92(); + o.allowOrigins = buildUnnamed93(); o.disabled = true; - o.exposeHeaders = buildUnnamed95(); + o.exposeHeaders = buildUnnamed94(); o.maxAge = 42; } buildCounterCorsPolicy--; @@ -5718,12 +5657,12 @@ void checkCorsPolicy(api.CorsPolicy o) { buildCounterCorsPolicy++; if (buildCounterCorsPolicy < 3) { unittest.expect(o.allowCredentials!, unittest.isTrue); - checkUnnamed91(o.allowHeaders!); - checkUnnamed92(o.allowMethods!); - checkUnnamed93(o.allowOriginRegexes!); - checkUnnamed94(o.allowOrigins!); + checkUnnamed90(o.allowHeaders!); + checkUnnamed91(o.allowMethods!); + checkUnnamed92(o.allowOriginRegexes!); + checkUnnamed93(o.allowOrigins!); unittest.expect(o.disabled!, unittest.isTrue); - checkUnnamed95(o.exposeHeaders!); + checkUnnamed94(o.exposeHeaders!); unittest.expect( o.maxAge!, unittest.equals(42), @@ -5733,12 +5672,12 @@ void checkCorsPolicy(api.CorsPolicy o) { } core.List - buildUnnamed96() => [ + buildUnnamed95() => [ buildCustomErrorResponsePolicyCustomErrorResponseRule(), buildCustomErrorResponsePolicyCustomErrorResponseRule(), ]; -void checkUnnamed96( +void checkUnnamed95( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkCustomErrorResponsePolicyCustomErrorResponseRule(o[0]); @@ -5750,7 +5689,7 @@ api.CustomErrorResponsePolicy buildCustomErrorResponsePolicy() { final o = api.CustomErrorResponsePolicy(); buildCounterCustomErrorResponsePolicy++; if (buildCounterCustomErrorResponsePolicy < 3) { - o.errorResponseRules = buildUnnamed96(); + o.errorResponseRules = buildUnnamed95(); o.errorService = 'foo'; } buildCounterCustomErrorResponsePolicy--; @@ -5760,7 +5699,7 @@ api.CustomErrorResponsePolicy buildCustomErrorResponsePolicy() { void checkCustomErrorResponsePolicy(api.CustomErrorResponsePolicy o) { buildCounterCustomErrorResponsePolicy++; if (buildCounterCustomErrorResponsePolicy < 3) { - checkUnnamed96(o.errorResponseRules!); + checkUnnamed95(o.errorResponseRules!); unittest.expect( o.errorService!, unittest.equals('foo'), @@ -5769,12 +5708,12 @@ void checkCustomErrorResponsePolicy(api.CustomErrorResponsePolicy o) { buildCounterCustomErrorResponsePolicy--; } -core.List buildUnnamed97() => [ +core.List buildUnnamed96() => [ 'foo', 'foo', ]; -void checkUnnamed97(core.List o) { +void checkUnnamed96(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5792,7 +5731,7 @@ api.CustomErrorResponsePolicyCustomErrorResponseRule final o = api.CustomErrorResponsePolicyCustomErrorResponseRule(); buildCounterCustomErrorResponsePolicyCustomErrorResponseRule++; if (buildCounterCustomErrorResponsePolicyCustomErrorResponseRule < 3) { - o.matchResponseCodes = buildUnnamed97(); + o.matchResponseCodes = buildUnnamed96(); o.overrideResponseCode = 42; o.path = 'foo'; } @@ -5804,7 +5743,7 @@ void checkCustomErrorResponsePolicyCustomErrorResponseRule( api.CustomErrorResponsePolicyCustomErrorResponseRule o) { buildCounterCustomErrorResponsePolicyCustomErrorResponseRule++; if (buildCounterCustomErrorResponsePolicyCustomErrorResponseRule < 3) { - checkUnnamed97(o.matchResponseCodes!); + checkUnnamed96(o.matchResponseCodes!); unittest.expect( o.overrideResponseCode!, unittest.equals(42), @@ -5927,34 +5866,34 @@ void checkDeprecationStatus(api.DeprecationStatus o) { buildCounterDeprecationStatus--; } -core.Map buildUnnamed98() => { +core.Map buildUnnamed97() => { 'x': buildDiskAsyncReplicationList(), 'y': buildDiskAsyncReplicationList(), }; -void checkUnnamed98(core.Map o) { +void checkUnnamed97(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkDiskAsyncReplicationList(o['x']!); checkDiskAsyncReplicationList(o['y']!); } -core.List buildUnnamed99() => [ +core.List buildUnnamed98() => [ buildGuestOsFeature(), buildGuestOsFeature(), ]; -void checkUnnamed99(core.List o) { +void checkUnnamed98(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGuestOsFeature(o[0]); checkGuestOsFeature(o[1]); } -core.Map buildUnnamed100() => { +core.Map buildUnnamed99() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed100(core.Map o) { +void checkUnnamed99(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -5966,12 +5905,12 @@ void checkUnnamed100(core.Map o) { ); } -core.List buildUnnamed101() => [ +core.List buildUnnamed100() => [ 'foo', 'foo', ]; -void checkUnnamed101(core.List o) { +void checkUnnamed100(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5983,12 +5922,12 @@ void checkUnnamed101(core.List o) { ); } -core.List buildUnnamed102() => [ +core.List buildUnnamed101() => [ 'foo', 'foo', ]; -void checkUnnamed102(core.List o) { +void checkUnnamed101(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6000,12 +5939,12 @@ void checkUnnamed102(core.List o) { ); } -core.List buildUnnamed103() => [ +core.List buildUnnamed102() => [ 'foo', 'foo', ]; -void checkUnnamed103(core.List o) { +void checkUnnamed102(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6017,12 +5956,12 @@ void checkUnnamed103(core.List o) { ); } -core.List buildUnnamed104() => [ +core.List buildUnnamed103() => [ 'foo', 'foo', ]; -void checkUnnamed104(core.List o) { +void checkUnnamed103(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6034,12 +5973,12 @@ void checkUnnamed104(core.List o) { ); } -core.List buildUnnamed105() => [ +core.List buildUnnamed104() => [ 'foo', 'foo', ]; -void checkUnnamed105(core.List o) { +void checkUnnamed104(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6059,20 +5998,20 @@ api.Disk buildDisk() { o.accessMode = 'foo'; o.architecture = 'foo'; o.asyncPrimaryDisk = buildDiskAsyncReplication(); - o.asyncSecondaryDisks = buildUnnamed98(); + o.asyncSecondaryDisks = buildUnnamed97(); o.creationTimestamp = 'foo'; o.description = 'foo'; o.diskEncryptionKey = buildCustomerEncryptionKey(); o.enableConfidentialCompute = true; - o.guestOsFeatures = buildUnnamed99(); + o.guestOsFeatures = buildUnnamed98(); o.id = 'foo'; o.kind = 'foo'; o.labelFingerprint = 'foo'; - o.labels = buildUnnamed100(); + o.labels = buildUnnamed99(); o.lastAttachTimestamp = 'foo'; o.lastDetachTimestamp = 'foo'; - o.licenseCodes = buildUnnamed101(); - o.licenses = buildUnnamed102(); + o.licenseCodes = buildUnnamed100(); + o.licenses = buildUnnamed101(); o.locationHint = 'foo'; o.name = 'foo'; o.options = 'foo'; @@ -6081,8 +6020,8 @@ api.Disk buildDisk() { o.provisionedIops = 'foo'; o.provisionedThroughput = 'foo'; o.region = 'foo'; - o.replicaZones = buildUnnamed103(); - o.resourcePolicies = buildUnnamed104(); + o.replicaZones = buildUnnamed102(); + o.resourcePolicies = buildUnnamed103(); o.resourceStatus = buildDiskResourceStatus(); o.satisfiesPzi = true; o.satisfiesPzs = true; @@ -6104,7 +6043,7 @@ api.Disk buildDisk() { o.status = 'foo'; o.storagePool = 'foo'; o.type = 'foo'; - o.users = buildUnnamed105(); + o.users = buildUnnamed104(); o.zone = 'foo'; } buildCounterDisk--; @@ -6123,7 +6062,7 @@ void checkDisk(api.Disk o) { unittest.equals('foo'), ); checkDiskAsyncReplication(o.asyncPrimaryDisk!); - checkUnnamed98(o.asyncSecondaryDisks!); + checkUnnamed97(o.asyncSecondaryDisks!); unittest.expect( o.creationTimestamp!, unittest.equals('foo'), @@ -6134,7 +6073,7 @@ void checkDisk(api.Disk o) { ); checkCustomerEncryptionKey(o.diskEncryptionKey!); unittest.expect(o.enableConfidentialCompute!, unittest.isTrue); - checkUnnamed99(o.guestOsFeatures!); + checkUnnamed98(o.guestOsFeatures!); unittest.expect( o.id!, unittest.equals('foo'), @@ -6147,7 +6086,7 @@ void checkDisk(api.Disk o) { o.labelFingerprint!, unittest.equals('foo'), ); - checkUnnamed100(o.labels!); + checkUnnamed99(o.labels!); unittest.expect( o.lastAttachTimestamp!, unittest.equals('foo'), @@ -6156,8 +6095,8 @@ void checkDisk(api.Disk o) { o.lastDetachTimestamp!, unittest.equals('foo'), ); - checkUnnamed101(o.licenseCodes!); - checkUnnamed102(o.licenses!); + checkUnnamed100(o.licenseCodes!); + checkUnnamed101(o.licenses!); unittest.expect( o.locationHint!, unittest.equals('foo'), @@ -6187,8 +6126,8 @@ void checkDisk(api.Disk o) { o.region!, unittest.equals('foo'), ); - checkUnnamed103(o.replicaZones!); - checkUnnamed104(o.resourcePolicies!); + checkUnnamed102(o.replicaZones!); + checkUnnamed103(o.resourcePolicies!); checkDiskResourceStatus(o.resourceStatus!); unittest.expect(o.satisfiesPzi!, unittest.isTrue); unittest.expect(o.satisfiesPzs!, unittest.isTrue); @@ -6258,7 +6197,7 @@ void checkDisk(api.Disk o) { o.type!, unittest.equals('foo'), ); - checkUnnamed105(o.users!); + checkUnnamed104(o.users!); unittest.expect( o.zone!, unittest.equals('foo'), @@ -6267,23 +6206,23 @@ void checkDisk(api.Disk o) { buildCounterDisk--; } -core.Map buildUnnamed106() => { +core.Map buildUnnamed105() => { 'x': buildDisksScopedList(), 'y': buildDisksScopedList(), }; -void checkUnnamed106(core.Map o) { +void checkUnnamed105(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkDisksScopedList(o['x']!); checkDisksScopedList(o['y']!); } -core.List buildUnnamed107() => [ +core.List buildUnnamed106() => [ 'foo', 'foo', ]; -void checkUnnamed107(core.List o) { +void checkUnnamed106(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6322,12 +6261,12 @@ void checkDiskAggregatedListWarningData(api.DiskAggregatedListWarningData o) { buildCounterDiskAggregatedListWarningData--; } -core.List buildUnnamed108() => [ +core.List buildUnnamed107() => [ buildDiskAggregatedListWarningData(), buildDiskAggregatedListWarningData(), ]; -void checkUnnamed108(core.List o) { +void checkUnnamed107(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDiskAggregatedListWarningData(o[0]); checkDiskAggregatedListWarningData(o[1]); @@ -6339,7 +6278,7 @@ api.DiskAggregatedListWarning buildDiskAggregatedListWarning() { buildCounterDiskAggregatedListWarning++; if (buildCounterDiskAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed108(); + o.data = buildUnnamed107(); o.message = 'foo'; } buildCounterDiskAggregatedListWarning--; @@ -6353,7 +6292,7 @@ void checkDiskAggregatedListWarning(api.DiskAggregatedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed108(o.data!); + checkUnnamed107(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -6368,11 +6307,11 @@ api.DiskAggregatedList buildDiskAggregatedList() { buildCounterDiskAggregatedList++; if (buildCounterDiskAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed106(); + o.items = buildUnnamed105(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed107(); + o.unreachables = buildUnnamed106(); o.warning = buildDiskAggregatedListWarning(); } buildCounterDiskAggregatedList--; @@ -6386,7 +6325,7 @@ void checkDiskAggregatedList(api.DiskAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed106(o.items!); + checkUnnamed105(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -6399,7 +6338,7 @@ void checkDiskAggregatedList(api.DiskAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed107(o.unreachables!); + checkUnnamed106(o.unreachables!); checkDiskAggregatedListWarning(o.warning!); } buildCounterDiskAggregatedList--; @@ -6495,12 +6434,12 @@ void checkDiskInstantiationConfig(api.DiskInstantiationConfig o) { buildCounterDiskInstantiationConfig--; } -core.List buildUnnamed109() => [ +core.List buildUnnamed108() => [ buildDisk(), buildDisk(), ]; -void checkUnnamed109(core.List o) { +void checkUnnamed108(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDisk(o[0]); checkDisk(o[1]); @@ -6533,12 +6472,12 @@ void checkDiskListWarningData(api.DiskListWarningData o) { buildCounterDiskListWarningData--; } -core.List buildUnnamed110() => [ +core.List buildUnnamed109() => [ buildDiskListWarningData(), buildDiskListWarningData(), ]; -void checkUnnamed110(core.List o) { +void checkUnnamed109(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDiskListWarningData(o[0]); checkDiskListWarningData(o[1]); @@ -6550,7 +6489,7 @@ api.DiskListWarning buildDiskListWarning() { buildCounterDiskListWarning++; if (buildCounterDiskListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed110(); + o.data = buildUnnamed109(); o.message = 'foo'; } buildCounterDiskListWarning--; @@ -6564,7 +6503,7 @@ void checkDiskListWarning(api.DiskListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed110(o.data!); + checkUnnamed109(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -6579,7 +6518,7 @@ api.DiskList buildDiskList() { buildCounterDiskList++; if (buildCounterDiskList < 3) { o.id = 'foo'; - o.items = buildUnnamed109(); + o.items = buildUnnamed108(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -6596,7 +6535,7 @@ void checkDiskList(api.DiskList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed109(o.items!); + checkUnnamed108(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -6641,12 +6580,12 @@ void checkDiskMoveRequest(api.DiskMoveRequest o) { buildCounterDiskMoveRequest--; } -core.Map buildUnnamed111() => { +core.Map buildUnnamed110() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed111(core.Map o) { +void checkUnnamed110(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -6663,7 +6602,7 @@ api.DiskParams buildDiskParams() { final o = api.DiskParams(); buildCounterDiskParams++; if (buildCounterDiskParams < 3) { - o.resourceManagerTags = buildUnnamed111(); + o.resourceManagerTags = buildUnnamed110(); } buildCounterDiskParams--; return o; @@ -6672,18 +6611,18 @@ api.DiskParams buildDiskParams() { void checkDiskParams(api.DiskParams o) { buildCounterDiskParams++; if (buildCounterDiskParams < 3) { - checkUnnamed111(o.resourceManagerTags!); + checkUnnamed110(o.resourceManagerTags!); } buildCounterDiskParams--; } core.Map - buildUnnamed112() => { + buildUnnamed111() => { 'x': buildDiskResourceStatusAsyncReplicationStatus(), 'y': buildDiskResourceStatusAsyncReplicationStatus(), }; -void checkUnnamed112( +void checkUnnamed111( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkDiskResourceStatusAsyncReplicationStatus(o['x']!); @@ -6696,7 +6635,7 @@ api.DiskResourceStatus buildDiskResourceStatus() { buildCounterDiskResourceStatus++; if (buildCounterDiskResourceStatus < 3) { o.asyncPrimaryDisk = buildDiskResourceStatusAsyncReplicationStatus(); - o.asyncSecondaryDisks = buildUnnamed112(); + o.asyncSecondaryDisks = buildUnnamed111(); } buildCounterDiskResourceStatus--; return o; @@ -6706,7 +6645,7 @@ void checkDiskResourceStatus(api.DiskResourceStatus o) { buildCounterDiskResourceStatus++; if (buildCounterDiskResourceStatus < 3) { checkDiskResourceStatusAsyncReplicationStatus(o.asyncPrimaryDisk!); - checkUnnamed112(o.asyncSecondaryDisks!); + checkUnnamed111(o.asyncSecondaryDisks!); } buildCounterDiskResourceStatus--; } @@ -6804,23 +6743,23 @@ void checkDiskType(api.DiskType o) { buildCounterDiskType--; } -core.Map buildUnnamed113() => { +core.Map buildUnnamed112() => { 'x': buildDiskTypesScopedList(), 'y': buildDiskTypesScopedList(), }; -void checkUnnamed113(core.Map o) { +void checkUnnamed112(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkDiskTypesScopedList(o['x']!); checkDiskTypesScopedList(o['y']!); } -core.List buildUnnamed114() => [ +core.List buildUnnamed113() => [ 'foo', 'foo', ]; -void checkUnnamed114(core.List o) { +void checkUnnamed113(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6860,12 +6799,12 @@ void checkDiskTypeAggregatedListWarningData( buildCounterDiskTypeAggregatedListWarningData--; } -core.List buildUnnamed115() => [ +core.List buildUnnamed114() => [ buildDiskTypeAggregatedListWarningData(), buildDiskTypeAggregatedListWarningData(), ]; -void checkUnnamed115(core.List o) { +void checkUnnamed114(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDiskTypeAggregatedListWarningData(o[0]); checkDiskTypeAggregatedListWarningData(o[1]); @@ -6877,7 +6816,7 @@ api.DiskTypeAggregatedListWarning buildDiskTypeAggregatedListWarning() { buildCounterDiskTypeAggregatedListWarning++; if (buildCounterDiskTypeAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed115(); + o.data = buildUnnamed114(); o.message = 'foo'; } buildCounterDiskTypeAggregatedListWarning--; @@ -6891,7 +6830,7 @@ void checkDiskTypeAggregatedListWarning(api.DiskTypeAggregatedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed115(o.data!); + checkUnnamed114(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -6906,11 +6845,11 @@ api.DiskTypeAggregatedList buildDiskTypeAggregatedList() { buildCounterDiskTypeAggregatedList++; if (buildCounterDiskTypeAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed113(); + o.items = buildUnnamed112(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed114(); + o.unreachables = buildUnnamed113(); o.warning = buildDiskTypeAggregatedListWarning(); } buildCounterDiskTypeAggregatedList--; @@ -6924,7 +6863,7 @@ void checkDiskTypeAggregatedList(api.DiskTypeAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed113(o.items!); + checkUnnamed112(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -6937,18 +6876,18 @@ void checkDiskTypeAggregatedList(api.DiskTypeAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed114(o.unreachables!); + checkUnnamed113(o.unreachables!); checkDiskTypeAggregatedListWarning(o.warning!); } buildCounterDiskTypeAggregatedList--; } -core.List buildUnnamed116() => [ +core.List buildUnnamed115() => [ buildDiskType(), buildDiskType(), ]; -void checkUnnamed116(core.List o) { +void checkUnnamed115(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDiskType(o[0]); checkDiskType(o[1]); @@ -6981,12 +6920,12 @@ void checkDiskTypeListWarningData(api.DiskTypeListWarningData o) { buildCounterDiskTypeListWarningData--; } -core.List buildUnnamed117() => [ +core.List buildUnnamed116() => [ buildDiskTypeListWarningData(), buildDiskTypeListWarningData(), ]; -void checkUnnamed117(core.List o) { +void checkUnnamed116(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDiskTypeListWarningData(o[0]); checkDiskTypeListWarningData(o[1]); @@ -6998,7 +6937,7 @@ api.DiskTypeListWarning buildDiskTypeListWarning() { buildCounterDiskTypeListWarning++; if (buildCounterDiskTypeListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed117(); + o.data = buildUnnamed116(); o.message = 'foo'; } buildCounterDiskTypeListWarning--; @@ -7012,7 +6951,7 @@ void checkDiskTypeListWarning(api.DiskTypeListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed117(o.data!); + checkUnnamed116(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -7027,7 +6966,7 @@ api.DiskTypeList buildDiskTypeList() { buildCounterDiskTypeList++; if (buildCounterDiskTypeList < 3) { o.id = 'foo'; - o.items = buildUnnamed116(); + o.items = buildUnnamed115(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -7044,7 +6983,7 @@ void checkDiskTypeList(api.DiskTypeList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed116(o.items!); + checkUnnamed115(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -7062,12 +7001,12 @@ void checkDiskTypeList(api.DiskTypeList o) { buildCounterDiskTypeList--; } -core.List buildUnnamed118() => [ +core.List buildUnnamed117() => [ buildDiskType(), buildDiskType(), ]; -void checkUnnamed118(core.List o) { +void checkUnnamed117(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDiskType(o[0]); checkDiskType(o[1]); @@ -7100,12 +7039,12 @@ void checkDiskTypesScopedListWarningData(api.DiskTypesScopedListWarningData o) { buildCounterDiskTypesScopedListWarningData--; } -core.List buildUnnamed119() => [ +core.List buildUnnamed118() => [ buildDiskTypesScopedListWarningData(), buildDiskTypesScopedListWarningData(), ]; -void checkUnnamed119(core.List o) { +void checkUnnamed118(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDiskTypesScopedListWarningData(o[0]); checkDiskTypesScopedListWarningData(o[1]); @@ -7117,7 +7056,7 @@ api.DiskTypesScopedListWarning buildDiskTypesScopedListWarning() { buildCounterDiskTypesScopedListWarning++; if (buildCounterDiskTypesScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed119(); + o.data = buildUnnamed118(); o.message = 'foo'; } buildCounterDiskTypesScopedListWarning--; @@ -7131,7 +7070,7 @@ void checkDiskTypesScopedListWarning(api.DiskTypesScopedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed119(o.data!); + checkUnnamed118(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -7145,7 +7084,7 @@ api.DiskTypesScopedList buildDiskTypesScopedList() { final o = api.DiskTypesScopedList(); buildCounterDiskTypesScopedList++; if (buildCounterDiskTypesScopedList < 3) { - o.diskTypes = buildUnnamed118(); + o.diskTypes = buildUnnamed117(); o.warning = buildDiskTypesScopedListWarning(); } buildCounterDiskTypesScopedList--; @@ -7155,18 +7094,18 @@ api.DiskTypesScopedList buildDiskTypesScopedList() { void checkDiskTypesScopedList(api.DiskTypesScopedList o) { buildCounterDiskTypesScopedList++; if (buildCounterDiskTypesScopedList < 3) { - checkUnnamed118(o.diskTypes!); + checkUnnamed117(o.diskTypes!); checkDiskTypesScopedListWarning(o.warning!); } buildCounterDiskTypesScopedList--; } -core.List buildUnnamed120() => [ +core.List buildUnnamed119() => [ 'foo', 'foo', ]; -void checkUnnamed120(core.List o) { +void checkUnnamed119(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -7183,7 +7122,7 @@ api.DisksAddResourcePoliciesRequest buildDisksAddResourcePoliciesRequest() { final o = api.DisksAddResourcePoliciesRequest(); buildCounterDisksAddResourcePoliciesRequest++; if (buildCounterDisksAddResourcePoliciesRequest < 3) { - o.resourcePolicies = buildUnnamed120(); + o.resourcePolicies = buildUnnamed119(); } buildCounterDisksAddResourcePoliciesRequest--; return o; @@ -7193,17 +7132,17 @@ void checkDisksAddResourcePoliciesRequest( api.DisksAddResourcePoliciesRequest o) { buildCounterDisksAddResourcePoliciesRequest++; if (buildCounterDisksAddResourcePoliciesRequest < 3) { - checkUnnamed120(o.resourcePolicies!); + checkUnnamed119(o.resourcePolicies!); } buildCounterDisksAddResourcePoliciesRequest--; } -core.List buildUnnamed121() => [ +core.List buildUnnamed120() => [ 'foo', 'foo', ]; -void checkUnnamed121(core.List o) { +void checkUnnamed120(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -7221,7 +7160,7 @@ api.DisksRemoveResourcePoliciesRequest final o = api.DisksRemoveResourcePoliciesRequest(); buildCounterDisksRemoveResourcePoliciesRequest++; if (buildCounterDisksRemoveResourcePoliciesRequest < 3) { - o.resourcePolicies = buildUnnamed121(); + o.resourcePolicies = buildUnnamed120(); } buildCounterDisksRemoveResourcePoliciesRequest--; return o; @@ -7231,7 +7170,7 @@ void checkDisksRemoveResourcePoliciesRequest( api.DisksRemoveResourcePoliciesRequest o) { buildCounterDisksRemoveResourcePoliciesRequest++; if (buildCounterDisksRemoveResourcePoliciesRequest < 3) { - checkUnnamed121(o.resourcePolicies!); + checkUnnamed120(o.resourcePolicies!); } buildCounterDisksRemoveResourcePoliciesRequest--; } @@ -7258,12 +7197,12 @@ void checkDisksResizeRequest(api.DisksResizeRequest o) { buildCounterDisksResizeRequest--; } -core.List buildUnnamed122() => [ +core.List buildUnnamed121() => [ buildDisk(), buildDisk(), ]; -void checkUnnamed122(core.List o) { +void checkUnnamed121(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDisk(o[0]); checkDisk(o[1]); @@ -7296,12 +7235,12 @@ void checkDisksScopedListWarningData(api.DisksScopedListWarningData o) { buildCounterDisksScopedListWarningData--; } -core.List buildUnnamed123() => [ +core.List buildUnnamed122() => [ buildDisksScopedListWarningData(), buildDisksScopedListWarningData(), ]; -void checkUnnamed123(core.List o) { +void checkUnnamed122(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDisksScopedListWarningData(o[0]); checkDisksScopedListWarningData(o[1]); @@ -7313,7 +7252,7 @@ api.DisksScopedListWarning buildDisksScopedListWarning() { buildCounterDisksScopedListWarning++; if (buildCounterDisksScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed123(); + o.data = buildUnnamed122(); o.message = 'foo'; } buildCounterDisksScopedListWarning--; @@ -7327,7 +7266,7 @@ void checkDisksScopedListWarning(api.DisksScopedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed123(o.data!); + checkUnnamed122(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -7341,7 +7280,7 @@ api.DisksScopedList buildDisksScopedList() { final o = api.DisksScopedList(); buildCounterDisksScopedList++; if (buildCounterDisksScopedList < 3) { - o.disks = buildUnnamed122(); + o.disks = buildUnnamed121(); o.warning = buildDisksScopedListWarning(); } buildCounterDisksScopedList--; @@ -7351,7 +7290,7 @@ api.DisksScopedList buildDisksScopedList() { void checkDisksScopedList(api.DisksScopedList o) { buildCounterDisksScopedList++; if (buildCounterDisksScopedList < 3) { - checkUnnamed122(o.disks!); + checkUnnamed121(o.disks!); checkDisksScopedListWarning(o.warning!); } buildCounterDisksScopedList--; @@ -7423,12 +7362,12 @@ void checkDisplayDevice(api.DisplayDevice o) { buildCounterDisplayDevice--; } -core.List buildUnnamed124() => [ +core.List buildUnnamed123() => [ buildDistributionPolicyZoneConfiguration(), buildDistributionPolicyZoneConfiguration(), ]; -void checkUnnamed124(core.List o) { +void checkUnnamed123(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDistributionPolicyZoneConfiguration(o[0]); checkDistributionPolicyZoneConfiguration(o[1]); @@ -7440,7 +7379,7 @@ api.DistributionPolicy buildDistributionPolicy() { buildCounterDistributionPolicy++; if (buildCounterDistributionPolicy < 3) { o.targetShape = 'foo'; - o.zones = buildUnnamed124(); + o.zones = buildUnnamed123(); } buildCounterDistributionPolicy--; return o; @@ -7453,7 +7392,7 @@ void checkDistributionPolicy(api.DistributionPolicy o) { o.targetShape!, unittest.equals('foo'), ); - checkUnnamed124(o.zones!); + checkUnnamed123(o.zones!); } buildCounterDistributionPolicy--; } @@ -7509,12 +7448,12 @@ void checkDuration(api.Duration o) { buildCounterDuration--; } -core.Map buildUnnamed125() => { +core.Map buildUnnamed124() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed125(core.Map o) { +void checkUnnamed124(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -7532,7 +7471,7 @@ api.ErrorInfo buildErrorInfo() { buildCounterErrorInfo++; if (buildCounterErrorInfo < 3) { o.domain = 'foo'; - o.metadatas = buildUnnamed125(); + o.metadatas = buildUnnamed124(); o.reason = 'foo'; } buildCounterErrorInfo--; @@ -7546,7 +7485,7 @@ void checkErrorInfo(api.ErrorInfo o) { o.domain!, unittest.equals('foo'), ); - checkUnnamed125(o.metadatas!); + checkUnnamed124(o.metadatas!); unittest.expect( o.reason!, unittest.equals('foo'), @@ -7594,12 +7533,12 @@ void checkExchangedPeeringRoute(api.ExchangedPeeringRoute o) { buildCounterExchangedPeeringRoute--; } -core.List buildUnnamed126() => [ +core.List buildUnnamed125() => [ buildExchangedPeeringRoute(), buildExchangedPeeringRoute(), ]; -void checkUnnamed126(core.List o) { +void checkUnnamed125(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkExchangedPeeringRoute(o[0]); checkExchangedPeeringRoute(o[1]); @@ -7634,12 +7573,12 @@ void checkExchangedPeeringRoutesListWarningData( buildCounterExchangedPeeringRoutesListWarningData--; } -core.List buildUnnamed127() => [ +core.List buildUnnamed126() => [ buildExchangedPeeringRoutesListWarningData(), buildExchangedPeeringRoutesListWarningData(), ]; -void checkUnnamed127(core.List o) { +void checkUnnamed126(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkExchangedPeeringRoutesListWarningData(o[0]); checkExchangedPeeringRoutesListWarningData(o[1]); @@ -7651,7 +7590,7 @@ api.ExchangedPeeringRoutesListWarning buildExchangedPeeringRoutesListWarning() { buildCounterExchangedPeeringRoutesListWarning++; if (buildCounterExchangedPeeringRoutesListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed127(); + o.data = buildUnnamed126(); o.message = 'foo'; } buildCounterExchangedPeeringRoutesListWarning--; @@ -7666,7 +7605,7 @@ void checkExchangedPeeringRoutesListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed127(o.data!); + checkUnnamed126(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -7681,7 +7620,7 @@ api.ExchangedPeeringRoutesList buildExchangedPeeringRoutesList() { buildCounterExchangedPeeringRoutesList++; if (buildCounterExchangedPeeringRoutesList < 3) { o.id = 'foo'; - o.items = buildUnnamed126(); + o.items = buildUnnamed125(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -7698,7 +7637,7 @@ void checkExchangedPeeringRoutesList(api.ExchangedPeeringRoutesList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed126(o.items!); + checkUnnamed125(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -7753,23 +7692,23 @@ void checkExpr(api.Expr o) { buildCounterExpr--; } -core.List buildUnnamed128() => [ +core.List buildUnnamed127() => [ buildExternalVpnGatewayInterface(), buildExternalVpnGatewayInterface(), ]; -void checkUnnamed128(core.List o) { +void checkUnnamed127(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkExternalVpnGatewayInterface(o[0]); checkExternalVpnGatewayInterface(o[1]); } -core.Map buildUnnamed129() => { +core.Map buildUnnamed128() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed129(core.Map o) { +void checkUnnamed128(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -7789,10 +7728,10 @@ api.ExternalVpnGateway buildExternalVpnGateway() { o.creationTimestamp = 'foo'; o.description = 'foo'; o.id = 'foo'; - o.interfaces = buildUnnamed128(); + o.interfaces = buildUnnamed127(); o.kind = 'foo'; o.labelFingerprint = 'foo'; - o.labels = buildUnnamed129(); + o.labels = buildUnnamed128(); o.name = 'foo'; o.redundancyType = 'foo'; o.selfLink = 'foo'; @@ -7816,7 +7755,7 @@ void checkExternalVpnGateway(api.ExternalVpnGateway o) { o.id!, unittest.equals('foo'), ); - checkUnnamed128(o.interfaces!); + checkUnnamed127(o.interfaces!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -7825,7 +7764,7 @@ void checkExternalVpnGateway(api.ExternalVpnGateway o) { o.labelFingerprint!, unittest.equals('foo'), ); - checkUnnamed129(o.labels!); + checkUnnamed128(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -7874,12 +7813,12 @@ void checkExternalVpnGatewayInterface(api.ExternalVpnGatewayInterface o) { buildCounterExternalVpnGatewayInterface--; } -core.List buildUnnamed130() => [ +core.List buildUnnamed129() => [ buildExternalVpnGateway(), buildExternalVpnGateway(), ]; -void checkUnnamed130(core.List o) { +void checkUnnamed129(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkExternalVpnGateway(o[0]); checkExternalVpnGateway(o[1]); @@ -7913,12 +7852,12 @@ void checkExternalVpnGatewayListWarningData( buildCounterExternalVpnGatewayListWarningData--; } -core.List buildUnnamed131() => [ +core.List buildUnnamed130() => [ buildExternalVpnGatewayListWarningData(), buildExternalVpnGatewayListWarningData(), ]; -void checkUnnamed131(core.List o) { +void checkUnnamed130(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkExternalVpnGatewayListWarningData(o[0]); checkExternalVpnGatewayListWarningData(o[1]); @@ -7930,7 +7869,7 @@ api.ExternalVpnGatewayListWarning buildExternalVpnGatewayListWarning() { buildCounterExternalVpnGatewayListWarning++; if (buildCounterExternalVpnGatewayListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed131(); + o.data = buildUnnamed130(); o.message = 'foo'; } buildCounterExternalVpnGatewayListWarning--; @@ -7944,7 +7883,7 @@ void checkExternalVpnGatewayListWarning(api.ExternalVpnGatewayListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed131(o.data!); + checkUnnamed130(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -7960,7 +7899,7 @@ api.ExternalVpnGatewayList buildExternalVpnGatewayList() { if (buildCounterExternalVpnGatewayList < 3) { o.etag = 'foo'; o.id = 'foo'; - o.items = buildUnnamed130(); + o.items = buildUnnamed129(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -7981,7 +7920,7 @@ void checkExternalVpnGatewayList(api.ExternalVpnGatewayList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed130(o.items!); + checkUnnamed129(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -8026,12 +7965,12 @@ void checkFileContentBuffer(api.FileContentBuffer o) { buildCounterFileContentBuffer--; } -core.List buildUnnamed132() => [ +core.List buildUnnamed131() => [ 'foo', 'foo', ]; -void checkUnnamed132(core.List o) { +void checkUnnamed131(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8049,7 +7988,7 @@ api.FirewallAllowed buildFirewallAllowed() { buildCounterFirewallAllowed++; if (buildCounterFirewallAllowed < 3) { o.IPProtocol = 'foo'; - o.ports = buildUnnamed132(); + o.ports = buildUnnamed131(); } buildCounterFirewallAllowed--; return o; @@ -8062,28 +8001,28 @@ void checkFirewallAllowed(api.FirewallAllowed o) { o.IPProtocol!, unittest.equals('foo'), ); - checkUnnamed132(o.ports!); + checkUnnamed131(o.ports!); } buildCounterFirewallAllowed--; } -core.List buildUnnamed133() => [ +core.List buildUnnamed132() => [ buildFirewallAllowed(), buildFirewallAllowed(), ]; -void checkUnnamed133(core.List o) { +void checkUnnamed132(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFirewallAllowed(o[0]); checkFirewallAllowed(o[1]); } -core.List buildUnnamed134() => [ +core.List buildUnnamed133() => [ 'foo', 'foo', ]; -void checkUnnamed134(core.List o) { +void checkUnnamed133(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8101,7 +8040,7 @@ api.FirewallDenied buildFirewallDenied() { buildCounterFirewallDenied++; if (buildCounterFirewallDenied < 3) { o.IPProtocol = 'foo'; - o.ports = buildUnnamed134(); + o.ports = buildUnnamed133(); } buildCounterFirewallDenied--; return o; @@ -8114,28 +8053,28 @@ void checkFirewallDenied(api.FirewallDenied o) { o.IPProtocol!, unittest.equals('foo'), ); - checkUnnamed134(o.ports!); + checkUnnamed133(o.ports!); } buildCounterFirewallDenied--; } -core.List buildUnnamed135() => [ +core.List buildUnnamed134() => [ buildFirewallDenied(), buildFirewallDenied(), ]; -void checkUnnamed135(core.List o) { +void checkUnnamed134(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFirewallDenied(o[0]); checkFirewallDenied(o[1]); } -core.List buildUnnamed136() => [ +core.List buildUnnamed135() => [ 'foo', 'foo', ]; -void checkUnnamed136(core.List o) { +void checkUnnamed135(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8147,12 +8086,12 @@ void checkUnnamed136(core.List o) { ); } -core.List buildUnnamed137() => [ +core.List buildUnnamed136() => [ 'foo', 'foo', ]; -void checkUnnamed137(core.List o) { +void checkUnnamed136(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8164,12 +8103,12 @@ void checkUnnamed137(core.List o) { ); } -core.List buildUnnamed138() => [ +core.List buildUnnamed137() => [ 'foo', 'foo', ]; -void checkUnnamed138(core.List o) { +void checkUnnamed137(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8181,12 +8120,12 @@ void checkUnnamed138(core.List o) { ); } -core.List buildUnnamed139() => [ +core.List buildUnnamed138() => [ 'foo', 'foo', ]; -void checkUnnamed139(core.List o) { +void checkUnnamed138(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8198,12 +8137,12 @@ void checkUnnamed139(core.List o) { ); } -core.List buildUnnamed140() => [ +core.List buildUnnamed139() => [ 'foo', 'foo', ]; -void checkUnnamed140(core.List o) { +void checkUnnamed139(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8215,12 +8154,12 @@ void checkUnnamed140(core.List o) { ); } -core.List buildUnnamed141() => [ +core.List buildUnnamed140() => [ 'foo', 'foo', ]; -void checkUnnamed141(core.List o) { +void checkUnnamed140(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8237,11 +8176,11 @@ api.Firewall buildFirewall() { final o = api.Firewall(); buildCounterFirewall++; if (buildCounterFirewall < 3) { - o.allowed = buildUnnamed133(); + o.allowed = buildUnnamed132(); o.creationTimestamp = 'foo'; - o.denied = buildUnnamed135(); + o.denied = buildUnnamed134(); o.description = 'foo'; - o.destinationRanges = buildUnnamed136(); + o.destinationRanges = buildUnnamed135(); o.direction = 'foo'; o.disabled = true; o.id = 'foo'; @@ -8251,11 +8190,11 @@ api.Firewall buildFirewall() { o.network = 'foo'; o.priority = 42; o.selfLink = 'foo'; - o.sourceRanges = buildUnnamed137(); - o.sourceServiceAccounts = buildUnnamed138(); - o.sourceTags = buildUnnamed139(); - o.targetServiceAccounts = buildUnnamed140(); - o.targetTags = buildUnnamed141(); + o.sourceRanges = buildUnnamed136(); + o.sourceServiceAccounts = buildUnnamed137(); + o.sourceTags = buildUnnamed138(); + o.targetServiceAccounts = buildUnnamed139(); + o.targetTags = buildUnnamed140(); } buildCounterFirewall--; return o; @@ -8264,17 +8203,17 @@ api.Firewall buildFirewall() { void checkFirewall(api.Firewall o) { buildCounterFirewall++; if (buildCounterFirewall < 3) { - checkUnnamed133(o.allowed!); + checkUnnamed132(o.allowed!); unittest.expect( o.creationTimestamp!, unittest.equals('foo'), ); - checkUnnamed135(o.denied!); + checkUnnamed134(o.denied!); unittest.expect( o.description!, unittest.equals('foo'), ); - checkUnnamed136(o.destinationRanges!); + checkUnnamed135(o.destinationRanges!); unittest.expect( o.direction!, unittest.equals('foo'), @@ -8305,21 +8244,21 @@ void checkFirewall(api.Firewall o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed137(o.sourceRanges!); - checkUnnamed138(o.sourceServiceAccounts!); - checkUnnamed139(o.sourceTags!); - checkUnnamed140(o.targetServiceAccounts!); - checkUnnamed141(o.targetTags!); + checkUnnamed136(o.sourceRanges!); + checkUnnamed137(o.sourceServiceAccounts!); + checkUnnamed138(o.sourceTags!); + checkUnnamed139(o.targetServiceAccounts!); + checkUnnamed140(o.targetTags!); } buildCounterFirewall--; } -core.List buildUnnamed142() => [ +core.List buildUnnamed141() => [ buildFirewall(), buildFirewall(), ]; -void checkUnnamed142(core.List o) { +void checkUnnamed141(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFirewall(o[0]); checkFirewall(o[1]); @@ -8352,12 +8291,12 @@ void checkFirewallListWarningData(api.FirewallListWarningData o) { buildCounterFirewallListWarningData--; } -core.List buildUnnamed143() => [ +core.List buildUnnamed142() => [ buildFirewallListWarningData(), buildFirewallListWarningData(), ]; -void checkUnnamed143(core.List o) { +void checkUnnamed142(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFirewallListWarningData(o[0]); checkFirewallListWarningData(o[1]); @@ -8369,7 +8308,7 @@ api.FirewallListWarning buildFirewallListWarning() { buildCounterFirewallListWarning++; if (buildCounterFirewallListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed143(); + o.data = buildUnnamed142(); o.message = 'foo'; } buildCounterFirewallListWarning--; @@ -8383,7 +8322,7 @@ void checkFirewallListWarning(api.FirewallListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed143(o.data!); + checkUnnamed142(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -8398,7 +8337,7 @@ api.FirewallList buildFirewallList() { buildCounterFirewallList++; if (buildCounterFirewallList < 3) { o.id = 'foo'; - o.items = buildUnnamed142(); + o.items = buildUnnamed141(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -8415,7 +8354,7 @@ void checkFirewallList(api.FirewallList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed142(o.items!); + checkUnnamed141(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -8457,12 +8396,12 @@ void checkFirewallLogConfig(api.FirewallLogConfig o) { buildCounterFirewallLogConfig--; } -core.List buildUnnamed144() => [ +core.List buildUnnamed143() => [ buildFirewallPolicyAssociation(), buildFirewallPolicyAssociation(), ]; -void checkUnnamed144(core.List o) { +void checkUnnamed143(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFirewallPolicyAssociation(o[0]); checkFirewallPolicyAssociation(o[1]); @@ -8474,7 +8413,7 @@ api.FirewallPoliciesListAssociationsResponse final o = api.FirewallPoliciesListAssociationsResponse(); buildCounterFirewallPoliciesListAssociationsResponse++; if (buildCounterFirewallPoliciesListAssociationsResponse < 3) { - o.associations = buildUnnamed144(); + o.associations = buildUnnamed143(); o.kind = 'foo'; } buildCounterFirewallPoliciesListAssociationsResponse--; @@ -8485,7 +8424,7 @@ void checkFirewallPoliciesListAssociationsResponse( api.FirewallPoliciesListAssociationsResponse o) { buildCounterFirewallPoliciesListAssociationsResponse++; if (buildCounterFirewallPoliciesListAssociationsResponse < 3) { - checkUnnamed144(o.associations!); + checkUnnamed143(o.associations!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -8494,23 +8433,23 @@ void checkFirewallPoliciesListAssociationsResponse( buildCounterFirewallPoliciesListAssociationsResponse--; } -core.List buildUnnamed145() => [ +core.List buildUnnamed144() => [ buildFirewallPolicyAssociation(), buildFirewallPolicyAssociation(), ]; -void checkUnnamed145(core.List o) { +void checkUnnamed144(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFirewallPolicyAssociation(o[0]); checkFirewallPolicyAssociation(o[1]); } -core.List buildUnnamed146() => [ +core.List buildUnnamed145() => [ buildFirewallPolicyRule(), buildFirewallPolicyRule(), ]; -void checkUnnamed146(core.List o) { +void checkUnnamed145(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFirewallPolicyRule(o[0]); checkFirewallPolicyRule(o[1]); @@ -8521,7 +8460,7 @@ api.FirewallPolicy buildFirewallPolicy() { final o = api.FirewallPolicy(); buildCounterFirewallPolicy++; if (buildCounterFirewallPolicy < 3) { - o.associations = buildUnnamed145(); + o.associations = buildUnnamed144(); o.creationTimestamp = 'foo'; o.description = 'foo'; o.displayName = 'foo'; @@ -8532,7 +8471,7 @@ api.FirewallPolicy buildFirewallPolicy() { o.parent = 'foo'; o.region = 'foo'; o.ruleTupleCount = 42; - o.rules = buildUnnamed146(); + o.rules = buildUnnamed145(); o.selfLink = 'foo'; o.selfLinkWithId = 'foo'; o.shortName = 'foo'; @@ -8544,7 +8483,7 @@ api.FirewallPolicy buildFirewallPolicy() { void checkFirewallPolicy(api.FirewallPolicy o) { buildCounterFirewallPolicy++; if (buildCounterFirewallPolicy < 3) { - checkUnnamed145(o.associations!); + checkUnnamed144(o.associations!); unittest.expect( o.creationTimestamp!, unittest.equals('foo'), @@ -8585,7 +8524,7 @@ void checkFirewallPolicy(api.FirewallPolicy o) { o.ruleTupleCount!, unittest.equals(42), ); - checkUnnamed146(o.rules!); + checkUnnamed145(o.rules!); unittest.expect( o.selfLink!, unittest.equals('foo'), @@ -8644,12 +8583,12 @@ void checkFirewallPolicyAssociation(api.FirewallPolicyAssociation o) { buildCounterFirewallPolicyAssociation--; } -core.List buildUnnamed147() => [ +core.List buildUnnamed146() => [ buildFirewallPolicy(), buildFirewallPolicy(), ]; -void checkUnnamed147(core.List o) { +void checkUnnamed146(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFirewallPolicy(o[0]); checkFirewallPolicy(o[1]); @@ -8682,12 +8621,12 @@ void checkFirewallPolicyListWarningData(api.FirewallPolicyListWarningData o) { buildCounterFirewallPolicyListWarningData--; } -core.List buildUnnamed148() => [ +core.List buildUnnamed147() => [ buildFirewallPolicyListWarningData(), buildFirewallPolicyListWarningData(), ]; -void checkUnnamed148(core.List o) { +void checkUnnamed147(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFirewallPolicyListWarningData(o[0]); checkFirewallPolicyListWarningData(o[1]); @@ -8699,7 +8638,7 @@ api.FirewallPolicyListWarning buildFirewallPolicyListWarning() { buildCounterFirewallPolicyListWarning++; if (buildCounterFirewallPolicyListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed148(); + o.data = buildUnnamed147(); o.message = 'foo'; } buildCounterFirewallPolicyListWarning--; @@ -8713,7 +8652,7 @@ void checkFirewallPolicyListWarning(api.FirewallPolicyListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed148(o.data!); + checkUnnamed147(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -8728,7 +8667,7 @@ api.FirewallPolicyList buildFirewallPolicyList() { buildCounterFirewallPolicyList++; if (buildCounterFirewallPolicyList < 3) { o.id = 'foo'; - o.items = buildUnnamed147(); + o.items = buildUnnamed146(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.warning = buildFirewallPolicyListWarning(); @@ -8744,7 +8683,7 @@ void checkFirewallPolicyList(api.FirewallPolicyList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed147(o.items!); + checkUnnamed146(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -8758,12 +8697,12 @@ void checkFirewallPolicyList(api.FirewallPolicyList o) { buildCounterFirewallPolicyList--; } -core.List buildUnnamed149() => [ +core.List buildUnnamed148() => [ 'foo', 'foo', ]; -void checkUnnamed149(core.List o) { +void checkUnnamed148(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8775,23 +8714,23 @@ void checkUnnamed149(core.List o) { ); } -core.List buildUnnamed150() => [ +core.List buildUnnamed149() => [ buildFirewallPolicyRuleSecureTag(), buildFirewallPolicyRuleSecureTag(), ]; -void checkUnnamed150(core.List o) { +void checkUnnamed149(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFirewallPolicyRuleSecureTag(o[0]); checkFirewallPolicyRuleSecureTag(o[1]); } -core.List buildUnnamed151() => [ +core.List buildUnnamed150() => [ 'foo', 'foo', ]; -void checkUnnamed151(core.List o) { +void checkUnnamed150(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8819,9 +8758,9 @@ api.FirewallPolicyRule buildFirewallPolicyRule() { o.ruleName = 'foo'; o.ruleTupleCount = 42; o.securityProfileGroup = 'foo'; - o.targetResources = buildUnnamed149(); - o.targetSecureTags = buildUnnamed150(); - o.targetServiceAccounts = buildUnnamed151(); + o.targetResources = buildUnnamed148(); + o.targetSecureTags = buildUnnamed149(); + o.targetServiceAccounts = buildUnnamed150(); o.tlsInspect = true; } buildCounterFirewallPolicyRule--; @@ -8866,20 +8805,20 @@ void checkFirewallPolicyRule(api.FirewallPolicyRule o) { o.securityProfileGroup!, unittest.equals('foo'), ); - checkUnnamed149(o.targetResources!); - checkUnnamed150(o.targetSecureTags!); - checkUnnamed151(o.targetServiceAccounts!); + checkUnnamed148(o.targetResources!); + checkUnnamed149(o.targetSecureTags!); + checkUnnamed150(o.targetServiceAccounts!); unittest.expect(o.tlsInspect!, unittest.isTrue); } buildCounterFirewallPolicyRule--; } -core.List buildUnnamed152() => [ +core.List buildUnnamed151() => [ 'foo', 'foo', ]; -void checkUnnamed152(core.List o) { +void checkUnnamed151(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8891,12 +8830,12 @@ void checkUnnamed152(core.List o) { ); } -core.List buildUnnamed153() => [ +core.List buildUnnamed152() => [ 'foo', 'foo', ]; -void checkUnnamed153(core.List o) { +void checkUnnamed152(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8908,12 +8847,12 @@ void checkUnnamed153(core.List o) { ); } -core.List buildUnnamed154() => [ +core.List buildUnnamed153() => [ 'foo', 'foo', ]; -void checkUnnamed154(core.List o) { +void checkUnnamed153(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8925,12 +8864,12 @@ void checkUnnamed154(core.List o) { ); } -core.List buildUnnamed155() => [ +core.List buildUnnamed154() => [ 'foo', 'foo', ]; -void checkUnnamed155(core.List o) { +void checkUnnamed154(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8942,12 +8881,12 @@ void checkUnnamed155(core.List o) { ); } -core.List buildUnnamed156() => [ +core.List buildUnnamed155() => [ 'foo', 'foo', ]; -void checkUnnamed156(core.List o) { +void checkUnnamed155(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8959,23 +8898,23 @@ void checkUnnamed156(core.List o) { ); } -core.List buildUnnamed157() => [ +core.List buildUnnamed156() => [ buildFirewallPolicyRuleMatcherLayer4Config(), buildFirewallPolicyRuleMatcherLayer4Config(), ]; -void checkUnnamed157(core.List o) { +void checkUnnamed156(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFirewallPolicyRuleMatcherLayer4Config(o[0]); checkFirewallPolicyRuleMatcherLayer4Config(o[1]); } -core.List buildUnnamed158() => [ +core.List buildUnnamed157() => [ 'foo', 'foo', ]; -void checkUnnamed158(core.List o) { +void checkUnnamed157(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8987,12 +8926,12 @@ void checkUnnamed158(core.List o) { ); } -core.List buildUnnamed159() => [ +core.List buildUnnamed158() => [ 'foo', 'foo', ]; -void checkUnnamed159(core.List o) { +void checkUnnamed158(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -9004,12 +8943,12 @@ void checkUnnamed159(core.List o) { ); } -core.List buildUnnamed160() => [ +core.List buildUnnamed159() => [ 'foo', 'foo', ]; -void checkUnnamed160(core.List o) { +void checkUnnamed159(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -9021,12 +8960,12 @@ void checkUnnamed160(core.List o) { ); } -core.List buildUnnamed161() => [ +core.List buildUnnamed160() => [ 'foo', 'foo', ]; -void checkUnnamed161(core.List o) { +void checkUnnamed160(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -9038,23 +8977,23 @@ void checkUnnamed161(core.List o) { ); } -core.List buildUnnamed162() => [ +core.List buildUnnamed161() => [ buildFirewallPolicyRuleSecureTag(), buildFirewallPolicyRuleSecureTag(), ]; -void checkUnnamed162(core.List o) { +void checkUnnamed161(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFirewallPolicyRuleSecureTag(o[0]); checkFirewallPolicyRuleSecureTag(o[1]); } -core.List buildUnnamed163() => [ +core.List buildUnnamed162() => [ 'foo', 'foo', ]; -void checkUnnamed163(core.List o) { +void checkUnnamed162(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -9071,18 +9010,18 @@ api.FirewallPolicyRuleMatcher buildFirewallPolicyRuleMatcher() { final o = api.FirewallPolicyRuleMatcher(); buildCounterFirewallPolicyRuleMatcher++; if (buildCounterFirewallPolicyRuleMatcher < 3) { - o.destAddressGroups = buildUnnamed152(); - o.destFqdns = buildUnnamed153(); - o.destIpRanges = buildUnnamed154(); - o.destRegionCodes = buildUnnamed155(); - o.destThreatIntelligences = buildUnnamed156(); - o.layer4Configs = buildUnnamed157(); - o.srcAddressGroups = buildUnnamed158(); - o.srcFqdns = buildUnnamed159(); - o.srcIpRanges = buildUnnamed160(); - o.srcRegionCodes = buildUnnamed161(); - o.srcSecureTags = buildUnnamed162(); - o.srcThreatIntelligences = buildUnnamed163(); + o.destAddressGroups = buildUnnamed151(); + o.destFqdns = buildUnnamed152(); + o.destIpRanges = buildUnnamed153(); + o.destRegionCodes = buildUnnamed154(); + o.destThreatIntelligences = buildUnnamed155(); + o.layer4Configs = buildUnnamed156(); + o.srcAddressGroups = buildUnnamed157(); + o.srcFqdns = buildUnnamed158(); + o.srcIpRanges = buildUnnamed159(); + o.srcRegionCodes = buildUnnamed160(); + o.srcSecureTags = buildUnnamed161(); + o.srcThreatIntelligences = buildUnnamed162(); } buildCounterFirewallPolicyRuleMatcher--; return o; @@ -9091,28 +9030,28 @@ api.FirewallPolicyRuleMatcher buildFirewallPolicyRuleMatcher() { void checkFirewallPolicyRuleMatcher(api.FirewallPolicyRuleMatcher o) { buildCounterFirewallPolicyRuleMatcher++; if (buildCounterFirewallPolicyRuleMatcher < 3) { - checkUnnamed152(o.destAddressGroups!); - checkUnnamed153(o.destFqdns!); - checkUnnamed154(o.destIpRanges!); - checkUnnamed155(o.destRegionCodes!); - checkUnnamed156(o.destThreatIntelligences!); - checkUnnamed157(o.layer4Configs!); - checkUnnamed158(o.srcAddressGroups!); - checkUnnamed159(o.srcFqdns!); - checkUnnamed160(o.srcIpRanges!); - checkUnnamed161(o.srcRegionCodes!); - checkUnnamed162(o.srcSecureTags!); - checkUnnamed163(o.srcThreatIntelligences!); + checkUnnamed151(o.destAddressGroups!); + checkUnnamed152(o.destFqdns!); + checkUnnamed153(o.destIpRanges!); + checkUnnamed154(o.destRegionCodes!); + checkUnnamed155(o.destThreatIntelligences!); + checkUnnamed156(o.layer4Configs!); + checkUnnamed157(o.srcAddressGroups!); + checkUnnamed158(o.srcFqdns!); + checkUnnamed159(o.srcIpRanges!); + checkUnnamed160(o.srcRegionCodes!); + checkUnnamed161(o.srcSecureTags!); + checkUnnamed162(o.srcThreatIntelligences!); } buildCounterFirewallPolicyRuleMatcher--; } -core.List buildUnnamed164() => [ +core.List buildUnnamed163() => [ 'foo', 'foo', ]; -void checkUnnamed164(core.List o) { +void checkUnnamed163(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -9131,7 +9070,7 @@ api.FirewallPolicyRuleMatcherLayer4Config buildCounterFirewallPolicyRuleMatcherLayer4Config++; if (buildCounterFirewallPolicyRuleMatcherLayer4Config < 3) { o.ipProtocol = 'foo'; - o.ports = buildUnnamed164(); + o.ports = buildUnnamed163(); } buildCounterFirewallPolicyRuleMatcherLayer4Config--; return o; @@ -9145,7 +9084,7 @@ void checkFirewallPolicyRuleMatcherLayer4Config( o.ipProtocol!, unittest.equals('foo'), ); - checkUnnamed164(o.ports!); + checkUnnamed163(o.ports!); } buildCounterFirewallPolicyRuleMatcherLayer4Config--; } @@ -9209,12 +9148,12 @@ void checkFixedOrPercent(api.FixedOrPercent o) { buildCounterFixedOrPercent--; } -core.Map buildUnnamed165() => { +core.Map buildUnnamed164() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed165(core.Map o) { +void checkUnnamed164(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -9226,23 +9165,23 @@ void checkUnnamed165(core.Map o) { ); } -core.List buildUnnamed166() => [ +core.List buildUnnamed165() => [ buildMetadataFilter(), buildMetadataFilter(), ]; -void checkUnnamed166(core.List o) { +void checkUnnamed165(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMetadataFilter(o[0]); checkMetadataFilter(o[1]); } -core.List buildUnnamed167() => [ +core.List buildUnnamed166() => [ 'foo', 'foo', ]; -void checkUnnamed167(core.List o) { +void checkUnnamed166(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -9254,24 +9193,24 @@ void checkUnnamed167(core.List o) { ); } -core.List buildUnnamed168() => [ +core.List buildUnnamed167() => [ buildForwardingRuleServiceDirectoryRegistration(), buildForwardingRuleServiceDirectoryRegistration(), ]; -void checkUnnamed168( +void checkUnnamed167( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkForwardingRuleServiceDirectoryRegistration(o[0]); checkForwardingRuleServiceDirectoryRegistration(o[1]); } -core.List buildUnnamed169() => [ +core.List buildUnnamed168() => [ 'foo', 'foo', ]; -void checkUnnamed169(core.List o) { +void checkUnnamed168(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -9304,23 +9243,23 @@ api.ForwardingRule buildForwardingRule() { o.isMirroringCollector = true; o.kind = 'foo'; o.labelFingerprint = 'foo'; - o.labels = buildUnnamed165(); + o.labels = buildUnnamed164(); o.loadBalancingScheme = 'foo'; - o.metadataFilters = buildUnnamed166(); + o.metadataFilters = buildUnnamed165(); o.name = 'foo'; o.network = 'foo'; o.networkTier = 'foo'; o.noAutomateDnsZone = true; o.portRange = 'foo'; - o.ports = buildUnnamed167(); + o.ports = buildUnnamed166(); o.pscConnectionId = 'foo'; o.pscConnectionStatus = 'foo'; o.region = 'foo'; o.selfLink = 'foo'; - o.serviceDirectoryRegistrations = buildUnnamed168(); + o.serviceDirectoryRegistrations = buildUnnamed167(); o.serviceLabel = 'foo'; o.serviceName = 'foo'; - o.sourceIpRanges = buildUnnamed169(); + o.sourceIpRanges = buildUnnamed168(); o.subnetwork = 'foo'; o.target = 'foo'; } @@ -9383,12 +9322,12 @@ void checkForwardingRule(api.ForwardingRule o) { o.labelFingerprint!, unittest.equals('foo'), ); - checkUnnamed165(o.labels!); + checkUnnamed164(o.labels!); unittest.expect( o.loadBalancingScheme!, unittest.equals('foo'), ); - checkUnnamed166(o.metadataFilters!); + checkUnnamed165(o.metadataFilters!); unittest.expect( o.name!, unittest.equals('foo'), @@ -9406,7 +9345,7 @@ void checkForwardingRule(api.ForwardingRule o) { o.portRange!, unittest.equals('foo'), ); - checkUnnamed167(o.ports!); + checkUnnamed166(o.ports!); unittest.expect( o.pscConnectionId!, unittest.equals('foo'), @@ -9423,7 +9362,7 @@ void checkForwardingRule(api.ForwardingRule o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed168(o.serviceDirectoryRegistrations!); + checkUnnamed167(o.serviceDirectoryRegistrations!); unittest.expect( o.serviceLabel!, unittest.equals('foo'), @@ -9432,7 +9371,7 @@ void checkForwardingRule(api.ForwardingRule o) { o.serviceName!, unittest.equals('foo'), ); - checkUnnamed169(o.sourceIpRanges!); + checkUnnamed168(o.sourceIpRanges!); unittest.expect( o.subnetwork!, unittest.equals('foo'), @@ -9445,23 +9384,23 @@ void checkForwardingRule(api.ForwardingRule o) { buildCounterForwardingRule--; } -core.Map buildUnnamed170() => { +core.Map buildUnnamed169() => { 'x': buildForwardingRulesScopedList(), 'y': buildForwardingRulesScopedList(), }; -void checkUnnamed170(core.Map o) { +void checkUnnamed169(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkForwardingRulesScopedList(o['x']!); checkForwardingRulesScopedList(o['y']!); } -core.List buildUnnamed171() => [ +core.List buildUnnamed170() => [ 'foo', 'foo', ]; -void checkUnnamed171(core.List o) { +void checkUnnamed170(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -9502,12 +9441,12 @@ void checkForwardingRuleAggregatedListWarningData( buildCounterForwardingRuleAggregatedListWarningData--; } -core.List buildUnnamed172() => [ +core.List buildUnnamed171() => [ buildForwardingRuleAggregatedListWarningData(), buildForwardingRuleAggregatedListWarningData(), ]; -void checkUnnamed172(core.List o) { +void checkUnnamed171(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkForwardingRuleAggregatedListWarningData(o[0]); checkForwardingRuleAggregatedListWarningData(o[1]); @@ -9520,7 +9459,7 @@ api.ForwardingRuleAggregatedListWarning buildCounterForwardingRuleAggregatedListWarning++; if (buildCounterForwardingRuleAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed172(); + o.data = buildUnnamed171(); o.message = 'foo'; } buildCounterForwardingRuleAggregatedListWarning--; @@ -9535,7 +9474,7 @@ void checkForwardingRuleAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed172(o.data!); + checkUnnamed171(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -9550,11 +9489,11 @@ api.ForwardingRuleAggregatedList buildForwardingRuleAggregatedList() { buildCounterForwardingRuleAggregatedList++; if (buildCounterForwardingRuleAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed170(); + o.items = buildUnnamed169(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed171(); + o.unreachables = buildUnnamed170(); o.warning = buildForwardingRuleAggregatedListWarning(); } buildCounterForwardingRuleAggregatedList--; @@ -9568,7 +9507,7 @@ void checkForwardingRuleAggregatedList(api.ForwardingRuleAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed170(o.items!); + checkUnnamed169(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -9581,18 +9520,18 @@ void checkForwardingRuleAggregatedList(api.ForwardingRuleAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed171(o.unreachables!); + checkUnnamed170(o.unreachables!); checkForwardingRuleAggregatedListWarning(o.warning!); } buildCounterForwardingRuleAggregatedList--; } -core.List buildUnnamed173() => [ +core.List buildUnnamed172() => [ buildForwardingRule(), buildForwardingRule(), ]; -void checkUnnamed173(core.List o) { +void checkUnnamed172(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkForwardingRule(o[0]); checkForwardingRule(o[1]); @@ -9625,12 +9564,12 @@ void checkForwardingRuleListWarningData(api.ForwardingRuleListWarningData o) { buildCounterForwardingRuleListWarningData--; } -core.List buildUnnamed174() => [ +core.List buildUnnamed173() => [ buildForwardingRuleListWarningData(), buildForwardingRuleListWarningData(), ]; -void checkUnnamed174(core.List o) { +void checkUnnamed173(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkForwardingRuleListWarningData(o[0]); checkForwardingRuleListWarningData(o[1]); @@ -9642,7 +9581,7 @@ api.ForwardingRuleListWarning buildForwardingRuleListWarning() { buildCounterForwardingRuleListWarning++; if (buildCounterForwardingRuleListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed174(); + o.data = buildUnnamed173(); o.message = 'foo'; } buildCounterForwardingRuleListWarning--; @@ -9656,7 +9595,7 @@ void checkForwardingRuleListWarning(api.ForwardingRuleListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed174(o.data!); + checkUnnamed173(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -9671,7 +9610,7 @@ api.ForwardingRuleList buildForwardingRuleList() { buildCounterForwardingRuleList++; if (buildCounterForwardingRuleList < 3) { o.id = 'foo'; - o.items = buildUnnamed173(); + o.items = buildUnnamed172(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -9688,7 +9627,7 @@ void checkForwardingRuleList(api.ForwardingRuleList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed173(o.items!); + checkUnnamed172(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -9762,12 +9701,12 @@ void checkForwardingRuleServiceDirectoryRegistration( buildCounterForwardingRuleServiceDirectoryRegistration--; } -core.List buildUnnamed175() => [ +core.List buildUnnamed174() => [ buildForwardingRule(), buildForwardingRule(), ]; -void checkUnnamed175(core.List o) { +void checkUnnamed174(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkForwardingRule(o[0]); checkForwardingRule(o[1]); @@ -9802,12 +9741,12 @@ void checkForwardingRulesScopedListWarningData( buildCounterForwardingRulesScopedListWarningData--; } -core.List buildUnnamed176() => [ +core.List buildUnnamed175() => [ buildForwardingRulesScopedListWarningData(), buildForwardingRulesScopedListWarningData(), ]; -void checkUnnamed176(core.List o) { +void checkUnnamed175(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkForwardingRulesScopedListWarningData(o[0]); checkForwardingRulesScopedListWarningData(o[1]); @@ -9819,7 +9758,7 @@ api.ForwardingRulesScopedListWarning buildForwardingRulesScopedListWarning() { buildCounterForwardingRulesScopedListWarning++; if (buildCounterForwardingRulesScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed176(); + o.data = buildUnnamed175(); o.message = 'foo'; } buildCounterForwardingRulesScopedListWarning--; @@ -9834,7 +9773,7 @@ void checkForwardingRulesScopedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed176(o.data!); + checkUnnamed175(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -9848,7 +9787,7 @@ api.ForwardingRulesScopedList buildForwardingRulesScopedList() { final o = api.ForwardingRulesScopedList(); buildCounterForwardingRulesScopedList++; if (buildCounterForwardingRulesScopedList < 3) { - o.forwardingRules = buildUnnamed175(); + o.forwardingRules = buildUnnamed174(); o.warning = buildForwardingRulesScopedListWarning(); } buildCounterForwardingRulesScopedList--; @@ -9858,7 +9797,7 @@ api.ForwardingRulesScopedList buildForwardingRulesScopedList() { void checkForwardingRulesScopedList(api.ForwardingRulesScopedList o) { buildCounterForwardingRulesScopedList++; if (buildCounterForwardingRulesScopedList < 3) { - checkUnnamed175(o.forwardingRules!); + checkUnnamed174(o.forwardingRules!); checkForwardingRulesScopedListWarning(o.warning!); } buildCounterForwardingRulesScopedList--; @@ -9928,12 +9867,12 @@ void checkGlobalAddressesMoveRequest(api.GlobalAddressesMoveRequest o) { buildCounterGlobalAddressesMoveRequest--; } -core.List buildUnnamed177() => [ +core.List buildUnnamed176() => [ buildNetworkEndpoint(), buildNetworkEndpoint(), ]; -void checkUnnamed177(core.List o) { +void checkUnnamed176(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkEndpoint(o[0]); checkNetworkEndpoint(o[1]); @@ -9945,7 +9884,7 @@ api.GlobalNetworkEndpointGroupsAttachEndpointsRequest final o = api.GlobalNetworkEndpointGroupsAttachEndpointsRequest(); buildCounterGlobalNetworkEndpointGroupsAttachEndpointsRequest++; if (buildCounterGlobalNetworkEndpointGroupsAttachEndpointsRequest < 3) { - o.networkEndpoints = buildUnnamed177(); + o.networkEndpoints = buildUnnamed176(); } buildCounterGlobalNetworkEndpointGroupsAttachEndpointsRequest--; return o; @@ -9955,17 +9894,17 @@ void checkGlobalNetworkEndpointGroupsAttachEndpointsRequest( api.GlobalNetworkEndpointGroupsAttachEndpointsRequest o) { buildCounterGlobalNetworkEndpointGroupsAttachEndpointsRequest++; if (buildCounterGlobalNetworkEndpointGroupsAttachEndpointsRequest < 3) { - checkUnnamed177(o.networkEndpoints!); + checkUnnamed176(o.networkEndpoints!); } buildCounterGlobalNetworkEndpointGroupsAttachEndpointsRequest--; } -core.List buildUnnamed178() => [ +core.List buildUnnamed177() => [ buildNetworkEndpoint(), buildNetworkEndpoint(), ]; -void checkUnnamed178(core.List o) { +void checkUnnamed177(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkEndpoint(o[0]); checkNetworkEndpoint(o[1]); @@ -9977,7 +9916,7 @@ api.GlobalNetworkEndpointGroupsDetachEndpointsRequest final o = api.GlobalNetworkEndpointGroupsDetachEndpointsRequest(); buildCounterGlobalNetworkEndpointGroupsDetachEndpointsRequest++; if (buildCounterGlobalNetworkEndpointGroupsDetachEndpointsRequest < 3) { - o.networkEndpoints = buildUnnamed178(); + o.networkEndpoints = buildUnnamed177(); } buildCounterGlobalNetworkEndpointGroupsDetachEndpointsRequest--; return o; @@ -9987,17 +9926,17 @@ void checkGlobalNetworkEndpointGroupsDetachEndpointsRequest( api.GlobalNetworkEndpointGroupsDetachEndpointsRequest o) { buildCounterGlobalNetworkEndpointGroupsDetachEndpointsRequest++; if (buildCounterGlobalNetworkEndpointGroupsDetachEndpointsRequest < 3) { - checkUnnamed178(o.networkEndpoints!); + checkUnnamed177(o.networkEndpoints!); } buildCounterGlobalNetworkEndpointGroupsDetachEndpointsRequest--; } -core.List buildUnnamed179() => [ +core.List buildUnnamed178() => [ buildBinding(), buildBinding(), ]; -void checkUnnamed179(core.List o) { +void checkUnnamed178(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkBinding(o[0]); checkBinding(o[1]); @@ -10009,7 +9948,7 @@ api.GlobalOrganizationSetPolicyRequest final o = api.GlobalOrganizationSetPolicyRequest(); buildCounterGlobalOrganizationSetPolicyRequest++; if (buildCounterGlobalOrganizationSetPolicyRequest < 3) { - o.bindings = buildUnnamed179(); + o.bindings = buildUnnamed178(); o.etag = 'foo'; o.policy = buildPolicy(); } @@ -10021,7 +9960,7 @@ void checkGlobalOrganizationSetPolicyRequest( api.GlobalOrganizationSetPolicyRequest o) { buildCounterGlobalOrganizationSetPolicyRequest++; if (buildCounterGlobalOrganizationSetPolicyRequest < 3) { - checkUnnamed179(o.bindings!); + checkUnnamed178(o.bindings!); unittest.expect( o.etag!, unittest.equals('foo'), @@ -10031,12 +9970,12 @@ void checkGlobalOrganizationSetPolicyRequest( buildCounterGlobalOrganizationSetPolicyRequest--; } -core.Map buildUnnamed180() => { +core.Map buildUnnamed179() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed180(core.Map o) { +void checkUnnamed179(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -10054,7 +9993,7 @@ api.GlobalSetLabelsRequest buildGlobalSetLabelsRequest() { buildCounterGlobalSetLabelsRequest++; if (buildCounterGlobalSetLabelsRequest < 3) { o.labelFingerprint = 'foo'; - o.labels = buildUnnamed180(); + o.labels = buildUnnamed179(); } buildCounterGlobalSetLabelsRequest--; return o; @@ -10067,17 +10006,17 @@ void checkGlobalSetLabelsRequest(api.GlobalSetLabelsRequest o) { o.labelFingerprint!, unittest.equals('foo'), ); - checkUnnamed180(o.labels!); + checkUnnamed179(o.labels!); } buildCounterGlobalSetLabelsRequest--; } -core.List buildUnnamed181() => [ +core.List buildUnnamed180() => [ buildBinding(), buildBinding(), ]; -void checkUnnamed181(core.List o) { +void checkUnnamed180(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkBinding(o[0]); checkBinding(o[1]); @@ -10088,7 +10027,7 @@ api.GlobalSetPolicyRequest buildGlobalSetPolicyRequest() { final o = api.GlobalSetPolicyRequest(); buildCounterGlobalSetPolicyRequest++; if (buildCounterGlobalSetPolicyRequest < 3) { - o.bindings = buildUnnamed181(); + o.bindings = buildUnnamed180(); o.etag = 'foo'; o.policy = buildPolicy(); } @@ -10099,7 +10038,7 @@ api.GlobalSetPolicyRequest buildGlobalSetPolicyRequest() { void checkGlobalSetPolicyRequest(api.GlobalSetPolicyRequest o) { buildCounterGlobalSetPolicyRequest++; if (buildCounterGlobalSetPolicyRequest < 3) { - checkUnnamed181(o.bindings!); + checkUnnamed180(o.bindings!); unittest.expect( o.etag!, unittest.equals('foo'), @@ -10185,12 +10124,12 @@ void checkGuestAttributesEntry(api.GuestAttributesEntry o) { buildCounterGuestAttributesEntry--; } -core.List buildUnnamed182() => [ +core.List buildUnnamed181() => [ buildGuestAttributesEntry(), buildGuestAttributesEntry(), ]; -void checkUnnamed182(core.List o) { +void checkUnnamed181(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGuestAttributesEntry(o[0]); checkGuestAttributesEntry(o[1]); @@ -10201,7 +10140,7 @@ api.GuestAttributesValue buildGuestAttributesValue() { final o = api.GuestAttributesValue(); buildCounterGuestAttributesValue++; if (buildCounterGuestAttributesValue < 3) { - o.items = buildUnnamed182(); + o.items = buildUnnamed181(); } buildCounterGuestAttributesValue--; return o; @@ -10210,7 +10149,7 @@ api.GuestAttributesValue buildGuestAttributesValue() { void checkGuestAttributesValue(api.GuestAttributesValue o) { buildCounterGuestAttributesValue++; if (buildCounterGuestAttributesValue < 3) { - checkUnnamed182(o.items!); + checkUnnamed181(o.items!); } buildCounterGuestAttributesValue--; } @@ -10393,12 +10332,12 @@ void checkHTTPSHealthCheck(api.HTTPSHealthCheck o) { buildCounterHTTPSHealthCheck--; } -core.List buildUnnamed183() => [ +core.List buildUnnamed182() => [ 'foo', 'foo', ]; -void checkUnnamed183(core.List o) { +void checkUnnamed182(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -10429,7 +10368,7 @@ api.HealthCheck buildHealthCheck() { o.name = 'foo'; o.region = 'foo'; o.selfLink = 'foo'; - o.sourceRegions = buildUnnamed183(); + o.sourceRegions = buildUnnamed182(); o.sslHealthCheck = buildSSLHealthCheck(); o.tcpHealthCheck = buildTCPHealthCheck(); o.timeoutSec = 42; @@ -10484,7 +10423,7 @@ void checkHealthCheck(api.HealthCheck o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed183(o.sourceRegions!); + checkUnnamed182(o.sourceRegions!); checkSSLHealthCheck(o.sslHealthCheck!); checkTCPHealthCheck(o.tcpHealthCheck!); unittest.expect( @@ -10503,12 +10442,12 @@ void checkHealthCheck(api.HealthCheck o) { buildCounterHealthCheck--; } -core.List buildUnnamed184() => [ +core.List buildUnnamed183() => [ buildHealthCheck(), buildHealthCheck(), ]; -void checkUnnamed184(core.List o) { +void checkUnnamed183(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHealthCheck(o[0]); checkHealthCheck(o[1]); @@ -10541,12 +10480,12 @@ void checkHealthCheckListWarningData(api.HealthCheckListWarningData o) { buildCounterHealthCheckListWarningData--; } -core.List buildUnnamed185() => [ +core.List buildUnnamed184() => [ buildHealthCheckListWarningData(), buildHealthCheckListWarningData(), ]; -void checkUnnamed185(core.List o) { +void checkUnnamed184(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHealthCheckListWarningData(o[0]); checkHealthCheckListWarningData(o[1]); @@ -10558,7 +10497,7 @@ api.HealthCheckListWarning buildHealthCheckListWarning() { buildCounterHealthCheckListWarning++; if (buildCounterHealthCheckListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed185(); + o.data = buildUnnamed184(); o.message = 'foo'; } buildCounterHealthCheckListWarning--; @@ -10572,7 +10511,7 @@ void checkHealthCheckListWarning(api.HealthCheckListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed185(o.data!); + checkUnnamed184(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -10587,7 +10526,7 @@ api.HealthCheckList buildHealthCheckList() { buildCounterHealthCheckList++; if (buildCounterHealthCheckList < 3) { o.id = 'foo'; - o.items = buildUnnamed184(); + o.items = buildUnnamed183(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -10604,7 +10543,7 @@ void checkHealthCheckList(api.HealthCheckList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed184(o.items!); + checkUnnamed183(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -10663,12 +10602,12 @@ void checkHealthCheckReference(api.HealthCheckReference o) { buildCounterHealthCheckReference--; } -core.List buildUnnamed186() => [ +core.List buildUnnamed185() => [ 'foo', 'foo', ]; -void checkUnnamed186(core.List o) { +void checkUnnamed185(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -10680,12 +10619,12 @@ void checkUnnamed186(core.List o) { ); } -core.List buildUnnamed187() => [ +core.List buildUnnamed186() => [ 'foo', 'foo', ]; -void checkUnnamed187(core.List o) { +void checkUnnamed186(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -10697,12 +10636,12 @@ void checkUnnamed187(core.List o) { ); } -core.List buildUnnamed188() => [ +core.List buildUnnamed187() => [ 'foo', 'foo', ]; -void checkUnnamed188(core.List o) { +void checkUnnamed187(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -10722,13 +10661,13 @@ api.HealthCheckService buildHealthCheckService() { o.creationTimestamp = 'foo'; o.description = 'foo'; o.fingerprint = 'foo'; - o.healthChecks = buildUnnamed186(); + o.healthChecks = buildUnnamed185(); o.healthStatusAggregationPolicy = 'foo'; o.id = 'foo'; o.kind = 'foo'; o.name = 'foo'; - o.networkEndpointGroups = buildUnnamed187(); - o.notificationEndpoints = buildUnnamed188(); + o.networkEndpointGroups = buildUnnamed186(); + o.notificationEndpoints = buildUnnamed187(); o.region = 'foo'; o.selfLink = 'foo'; } @@ -10751,7 +10690,7 @@ void checkHealthCheckService(api.HealthCheckService o) { o.fingerprint!, unittest.equals('foo'), ); - checkUnnamed186(o.healthChecks!); + checkUnnamed185(o.healthChecks!); unittest.expect( o.healthStatusAggregationPolicy!, unittest.equals('foo'), @@ -10768,8 +10707,8 @@ void checkHealthCheckService(api.HealthCheckService o) { o.name!, unittest.equals('foo'), ); - checkUnnamed187(o.networkEndpointGroups!); - checkUnnamed188(o.notificationEndpoints!); + checkUnnamed186(o.networkEndpointGroups!); + checkUnnamed187(o.notificationEndpoints!); unittest.expect( o.region!, unittest.equals('foo'), @@ -10804,12 +10743,12 @@ void checkHealthCheckServiceReference(api.HealthCheckServiceReference o) { buildCounterHealthCheckServiceReference--; } -core.List buildUnnamed189() => [ +core.List buildUnnamed188() => [ buildHealthCheckService(), buildHealthCheckService(), ]; -void checkUnnamed189(core.List o) { +void checkUnnamed188(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHealthCheckService(o[0]); checkHealthCheckService(o[1]); @@ -10844,12 +10783,12 @@ void checkHealthCheckServicesListWarningData( buildCounterHealthCheckServicesListWarningData--; } -core.List buildUnnamed190() => [ +core.List buildUnnamed189() => [ buildHealthCheckServicesListWarningData(), buildHealthCheckServicesListWarningData(), ]; -void checkUnnamed190(core.List o) { +void checkUnnamed189(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHealthCheckServicesListWarningData(o[0]); checkHealthCheckServicesListWarningData(o[1]); @@ -10861,7 +10800,7 @@ api.HealthCheckServicesListWarning buildHealthCheckServicesListWarning() { buildCounterHealthCheckServicesListWarning++; if (buildCounterHealthCheckServicesListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed190(); + o.data = buildUnnamed189(); o.message = 'foo'; } buildCounterHealthCheckServicesListWarning--; @@ -10875,7 +10814,7 @@ void checkHealthCheckServicesListWarning(api.HealthCheckServicesListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed190(o.data!); + checkUnnamed189(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -10890,7 +10829,7 @@ api.HealthCheckServicesList buildHealthCheckServicesList() { buildCounterHealthCheckServicesList++; if (buildCounterHealthCheckServicesList < 3) { o.id = 'foo'; - o.items = buildUnnamed189(); + o.items = buildUnnamed188(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -10907,7 +10846,7 @@ void checkHealthCheckServicesList(api.HealthCheckServicesList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed189(o.items!); + checkUnnamed188(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -10925,23 +10864,23 @@ void checkHealthCheckServicesList(api.HealthCheckServicesList o) { buildCounterHealthCheckServicesList--; } -core.Map buildUnnamed191() => { +core.Map buildUnnamed190() => { 'x': buildHealthChecksScopedList(), 'y': buildHealthChecksScopedList(), }; -void checkUnnamed191(core.Map o) { +void checkUnnamed190(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkHealthChecksScopedList(o['x']!); checkHealthChecksScopedList(o['y']!); } -core.List buildUnnamed192() => [ +core.List buildUnnamed191() => [ 'foo', 'foo', ]; -void checkUnnamed192(core.List o) { +void checkUnnamed191(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -10982,12 +10921,12 @@ void checkHealthChecksAggregatedListWarningData( buildCounterHealthChecksAggregatedListWarningData--; } -core.List buildUnnamed193() => [ +core.List buildUnnamed192() => [ buildHealthChecksAggregatedListWarningData(), buildHealthChecksAggregatedListWarningData(), ]; -void checkUnnamed193(core.List o) { +void checkUnnamed192(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHealthChecksAggregatedListWarningData(o[0]); checkHealthChecksAggregatedListWarningData(o[1]); @@ -10999,7 +10938,7 @@ api.HealthChecksAggregatedListWarning buildHealthChecksAggregatedListWarning() { buildCounterHealthChecksAggregatedListWarning++; if (buildCounterHealthChecksAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed193(); + o.data = buildUnnamed192(); o.message = 'foo'; } buildCounterHealthChecksAggregatedListWarning--; @@ -11014,7 +10953,7 @@ void checkHealthChecksAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed193(o.data!); + checkUnnamed192(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -11029,11 +10968,11 @@ api.HealthChecksAggregatedList buildHealthChecksAggregatedList() { buildCounterHealthChecksAggregatedList++; if (buildCounterHealthChecksAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed191(); + o.items = buildUnnamed190(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed192(); + o.unreachables = buildUnnamed191(); o.warning = buildHealthChecksAggregatedListWarning(); } buildCounterHealthChecksAggregatedList--; @@ -11047,7 +10986,7 @@ void checkHealthChecksAggregatedList(api.HealthChecksAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed191(o.items!); + checkUnnamed190(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -11060,18 +10999,18 @@ void checkHealthChecksAggregatedList(api.HealthChecksAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed192(o.unreachables!); + checkUnnamed191(o.unreachables!); checkHealthChecksAggregatedListWarning(o.warning!); } buildCounterHealthChecksAggregatedList--; } -core.List buildUnnamed194() => [ +core.List buildUnnamed193() => [ buildHealthCheck(), buildHealthCheck(), ]; -void checkUnnamed194(core.List o) { +void checkUnnamed193(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHealthCheck(o[0]); checkHealthCheck(o[1]); @@ -11105,12 +11044,12 @@ void checkHealthChecksScopedListWarningData( buildCounterHealthChecksScopedListWarningData--; } -core.List buildUnnamed195() => [ +core.List buildUnnamed194() => [ buildHealthChecksScopedListWarningData(), buildHealthChecksScopedListWarningData(), ]; -void checkUnnamed195(core.List o) { +void checkUnnamed194(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHealthChecksScopedListWarningData(o[0]); checkHealthChecksScopedListWarningData(o[1]); @@ -11122,7 +11061,7 @@ api.HealthChecksScopedListWarning buildHealthChecksScopedListWarning() { buildCounterHealthChecksScopedListWarning++; if (buildCounterHealthChecksScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed195(); + o.data = buildUnnamed194(); o.message = 'foo'; } buildCounterHealthChecksScopedListWarning--; @@ -11136,7 +11075,7 @@ void checkHealthChecksScopedListWarning(api.HealthChecksScopedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed195(o.data!); + checkUnnamed194(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -11150,7 +11089,7 @@ api.HealthChecksScopedList buildHealthChecksScopedList() { final o = api.HealthChecksScopedList(); buildCounterHealthChecksScopedList++; if (buildCounterHealthChecksScopedList < 3) { - o.healthChecks = buildUnnamed194(); + o.healthChecks = buildUnnamed193(); o.warning = buildHealthChecksScopedListWarning(); } buildCounterHealthChecksScopedList--; @@ -11160,18 +11099,18 @@ api.HealthChecksScopedList buildHealthChecksScopedList() { void checkHealthChecksScopedList(api.HealthChecksScopedList o) { buildCounterHealthChecksScopedList++; if (buildCounterHealthChecksScopedList < 3) { - checkUnnamed194(o.healthChecks!); + checkUnnamed193(o.healthChecks!); checkHealthChecksScopedListWarning(o.warning!); } buildCounterHealthChecksScopedList--; } -core.Map buildUnnamed196() => { +core.Map buildUnnamed195() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed196(core.Map o) { +void checkUnnamed195(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -11188,7 +11127,7 @@ api.HealthStatus buildHealthStatus() { final o = api.HealthStatus(); buildCounterHealthStatus++; if (buildCounterHealthStatus < 3) { - o.annotations = buildUnnamed196(); + o.annotations = buildUnnamed195(); o.forwardingRule = 'foo'; o.forwardingRuleIp = 'foo'; o.healthState = 'foo'; @@ -11207,7 +11146,7 @@ api.HealthStatus buildHealthStatus() { void checkHealthStatus(api.HealthStatus o) { buildCounterHealthStatus++; if (buildCounterHealthStatus < 3) { - checkUnnamed196(o.annotations!); + checkUnnamed195(o.annotations!); unittest.expect( o.forwardingRule!, unittest.equals('foo'), @@ -11287,12 +11226,12 @@ void checkHealthStatusForNetworkEndpoint(api.HealthStatusForNetworkEndpoint o) { buildCounterHealthStatusForNetworkEndpoint--; } -core.List buildUnnamed197() => [ +core.List buildUnnamed196() => [ buildHelpLink(), buildHelpLink(), ]; -void checkUnnamed197(core.List o) { +void checkUnnamed196(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHelpLink(o[0]); checkHelpLink(o[1]); @@ -11303,7 +11242,7 @@ api.Help buildHelp() { final o = api.Help(); buildCounterHelp++; if (buildCounterHelp < 3) { - o.links = buildUnnamed197(); + o.links = buildUnnamed196(); } buildCounterHelp--; return o; @@ -11312,7 +11251,7 @@ api.Help buildHelp() { void checkHelp(api.Help o) { buildCounterHelp++; if (buildCounterHelp < 3) { - checkUnnamed197(o.links!); + checkUnnamed196(o.links!); } buildCounterHelp--; } @@ -11344,12 +11283,12 @@ void checkHelpLink(api.HelpLink o) { buildCounterHelpLink--; } -core.List buildUnnamed198() => [ +core.List buildUnnamed197() => [ 'foo', 'foo', ]; -void checkUnnamed198(core.List o) { +void checkUnnamed197(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -11367,7 +11306,7 @@ api.HostRule buildHostRule() { buildCounterHostRule++; if (buildCounterHostRule < 3) { o.description = 'foo'; - o.hosts = buildUnnamed198(); + o.hosts = buildUnnamed197(); o.pathMatcher = 'foo'; } buildCounterHostRule--; @@ -11381,7 +11320,7 @@ void checkHostRule(api.HostRule o) { o.description!, unittest.equals('foo'), ); - checkUnnamed198(o.hosts!); + checkUnnamed197(o.hosts!); unittest.expect( o.pathMatcher!, unittest.equals('foo'), @@ -11462,23 +11401,23 @@ void checkHttpFaultInjection(api.HttpFaultInjection o) { buildCounterHttpFaultInjection--; } -core.List buildUnnamed199() => [ +core.List buildUnnamed198() => [ buildHttpHeaderOption(), buildHttpHeaderOption(), ]; -void checkUnnamed199(core.List o) { +void checkUnnamed198(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHttpHeaderOption(o[0]); checkHttpHeaderOption(o[1]); } -core.List buildUnnamed200() => [ +core.List buildUnnamed199() => [ 'foo', 'foo', ]; -void checkUnnamed200(core.List o) { +void checkUnnamed199(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -11490,23 +11429,23 @@ void checkUnnamed200(core.List o) { ); } -core.List buildUnnamed201() => [ +core.List buildUnnamed200() => [ buildHttpHeaderOption(), buildHttpHeaderOption(), ]; -void checkUnnamed201(core.List o) { +void checkUnnamed200(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHttpHeaderOption(o[0]); checkHttpHeaderOption(o[1]); } -core.List buildUnnamed202() => [ +core.List buildUnnamed201() => [ 'foo', 'foo', ]; -void checkUnnamed202(core.List o) { +void checkUnnamed201(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -11523,10 +11462,10 @@ api.HttpHeaderAction buildHttpHeaderAction() { final o = api.HttpHeaderAction(); buildCounterHttpHeaderAction++; if (buildCounterHttpHeaderAction < 3) { - o.requestHeadersToAdd = buildUnnamed199(); - o.requestHeadersToRemove = buildUnnamed200(); - o.responseHeadersToAdd = buildUnnamed201(); - o.responseHeadersToRemove = buildUnnamed202(); + o.requestHeadersToAdd = buildUnnamed198(); + o.requestHeadersToRemove = buildUnnamed199(); + o.responseHeadersToAdd = buildUnnamed200(); + o.responseHeadersToRemove = buildUnnamed201(); } buildCounterHttpHeaderAction--; return o; @@ -11535,10 +11474,10 @@ api.HttpHeaderAction buildHttpHeaderAction() { void checkHttpHeaderAction(api.HttpHeaderAction o) { buildCounterHttpHeaderAction++; if (buildCounterHttpHeaderAction < 3) { - checkUnnamed199(o.requestHeadersToAdd!); - checkUnnamed200(o.requestHeadersToRemove!); - checkUnnamed201(o.responseHeadersToAdd!); - checkUnnamed202(o.responseHeadersToRemove!); + checkUnnamed198(o.requestHeadersToAdd!); + checkUnnamed199(o.requestHeadersToRemove!); + checkUnnamed200(o.responseHeadersToAdd!); + checkUnnamed201(o.responseHeadersToRemove!); } buildCounterHttpHeaderAction--; } @@ -11702,12 +11641,12 @@ void checkHttpHealthCheck(api.HttpHealthCheck o) { buildCounterHttpHealthCheck--; } -core.List buildUnnamed203() => [ +core.List buildUnnamed202() => [ buildHttpHealthCheck(), buildHttpHealthCheck(), ]; -void checkUnnamed203(core.List o) { +void checkUnnamed202(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHttpHealthCheck(o[0]); checkHttpHealthCheck(o[1]); @@ -11740,12 +11679,12 @@ void checkHttpHealthCheckListWarningData(api.HttpHealthCheckListWarningData o) { buildCounterHttpHealthCheckListWarningData--; } -core.List buildUnnamed204() => [ +core.List buildUnnamed203() => [ buildHttpHealthCheckListWarningData(), buildHttpHealthCheckListWarningData(), ]; -void checkUnnamed204(core.List o) { +void checkUnnamed203(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHttpHealthCheckListWarningData(o[0]); checkHttpHealthCheckListWarningData(o[1]); @@ -11757,7 +11696,7 @@ api.HttpHealthCheckListWarning buildHttpHealthCheckListWarning() { buildCounterHttpHealthCheckListWarning++; if (buildCounterHttpHealthCheckListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed204(); + o.data = buildUnnamed203(); o.message = 'foo'; } buildCounterHttpHealthCheckListWarning--; @@ -11771,7 +11710,7 @@ void checkHttpHealthCheckListWarning(api.HttpHealthCheckListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed204(o.data!); + checkUnnamed203(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -11786,7 +11725,7 @@ api.HttpHealthCheckList buildHttpHealthCheckList() { buildCounterHttpHealthCheckList++; if (buildCounterHttpHealthCheckList < 3) { o.id = 'foo'; - o.items = buildUnnamed203(); + o.items = buildUnnamed202(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -11803,7 +11742,7 @@ void checkHttpHealthCheckList(api.HttpHealthCheckList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed203(o.items!); + checkUnnamed202(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -11896,12 +11835,12 @@ void checkHttpRedirectAction(api.HttpRedirectAction o) { buildCounterHttpRedirectAction--; } -core.List buildUnnamed205() => [ +core.List buildUnnamed204() => [ 'foo', 'foo', ]; -void checkUnnamed205(core.List o) { +void checkUnnamed204(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -11920,7 +11859,7 @@ api.HttpRetryPolicy buildHttpRetryPolicy() { if (buildCounterHttpRetryPolicy < 3) { o.numRetries = 42; o.perTryTimeout = buildDuration(); - o.retryConditions = buildUnnamed205(); + o.retryConditions = buildUnnamed204(); } buildCounterHttpRetryPolicy--; return o; @@ -11934,17 +11873,17 @@ void checkHttpRetryPolicy(api.HttpRetryPolicy o) { unittest.equals(42), ); checkDuration(o.perTryTimeout!); - checkUnnamed205(o.retryConditions!); + checkUnnamed204(o.retryConditions!); } buildCounterHttpRetryPolicy--; } -core.List buildUnnamed206() => [ +core.List buildUnnamed205() => [ buildWeightedBackendService(), buildWeightedBackendService(), ]; -void checkUnnamed206(core.List o) { +void checkUnnamed205(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkWeightedBackendService(o[0]); checkWeightedBackendService(o[1]); @@ -11962,7 +11901,7 @@ api.HttpRouteAction buildHttpRouteAction() { o.retryPolicy = buildHttpRetryPolicy(); o.timeout = buildDuration(); o.urlRewrite = buildUrlRewrite(); - o.weightedBackendServices = buildUnnamed206(); + o.weightedBackendServices = buildUnnamed205(); } buildCounterHttpRouteAction--; return o; @@ -11978,17 +11917,17 @@ void checkHttpRouteAction(api.HttpRouteAction o) { checkHttpRetryPolicy(o.retryPolicy!); checkDuration(o.timeout!); checkUrlRewrite(o.urlRewrite!); - checkUnnamed206(o.weightedBackendServices!); + checkUnnamed205(o.weightedBackendServices!); } buildCounterHttpRouteAction--; } -core.List buildUnnamed207() => [ +core.List buildUnnamed206() => [ buildHttpRouteRuleMatch(), buildHttpRouteRuleMatch(), ]; -void checkUnnamed207(core.List o) { +void checkUnnamed206(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHttpRouteRuleMatch(o[0]); checkHttpRouteRuleMatch(o[1]); @@ -12002,7 +11941,7 @@ api.HttpRouteRule buildHttpRouteRule() { o.customErrorResponsePolicy = buildCustomErrorResponsePolicy(); o.description = 'foo'; o.headerAction = buildHttpHeaderAction(); - o.matchRules = buildUnnamed207(); + o.matchRules = buildUnnamed206(); o.priority = 42; o.routeAction = buildHttpRouteAction(); o.service = 'foo'; @@ -12021,7 +11960,7 @@ void checkHttpRouteRule(api.HttpRouteRule o) { unittest.equals('foo'), ); checkHttpHeaderAction(o.headerAction!); - checkUnnamed207(o.matchRules!); + checkUnnamed206(o.matchRules!); unittest.expect( o.priority!, unittest.equals(42), @@ -12036,34 +11975,34 @@ void checkHttpRouteRule(api.HttpRouteRule o) { buildCounterHttpRouteRule--; } -core.List buildUnnamed208() => [ +core.List buildUnnamed207() => [ buildHttpHeaderMatch(), buildHttpHeaderMatch(), ]; -void checkUnnamed208(core.List o) { +void checkUnnamed207(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHttpHeaderMatch(o[0]); checkHttpHeaderMatch(o[1]); } -core.List buildUnnamed209() => [ +core.List buildUnnamed208() => [ buildMetadataFilter(), buildMetadataFilter(), ]; -void checkUnnamed209(core.List o) { +void checkUnnamed208(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMetadataFilter(o[0]); checkMetadataFilter(o[1]); } -core.List buildUnnamed210() => [ +core.List buildUnnamed209() => [ buildHttpQueryParameterMatch(), buildHttpQueryParameterMatch(), ]; -void checkUnnamed210(core.List o) { +void checkUnnamed209(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHttpQueryParameterMatch(o[0]); checkHttpQueryParameterMatch(o[1]); @@ -12075,12 +12014,12 @@ api.HttpRouteRuleMatch buildHttpRouteRuleMatch() { buildCounterHttpRouteRuleMatch++; if (buildCounterHttpRouteRuleMatch < 3) { o.fullPathMatch = 'foo'; - o.headerMatches = buildUnnamed208(); + o.headerMatches = buildUnnamed207(); o.ignoreCase = true; - o.metadataFilters = buildUnnamed209(); + o.metadataFilters = buildUnnamed208(); o.pathTemplateMatch = 'foo'; o.prefixMatch = 'foo'; - o.queryParameterMatches = buildUnnamed210(); + o.queryParameterMatches = buildUnnamed209(); o.regexMatch = 'foo'; } buildCounterHttpRouteRuleMatch--; @@ -12094,9 +12033,9 @@ void checkHttpRouteRuleMatch(api.HttpRouteRuleMatch o) { o.fullPathMatch!, unittest.equals('foo'), ); - checkUnnamed208(o.headerMatches!); + checkUnnamed207(o.headerMatches!); unittest.expect(o.ignoreCase!, unittest.isTrue); - checkUnnamed209(o.metadataFilters!); + checkUnnamed208(o.metadataFilters!); unittest.expect( o.pathTemplateMatch!, unittest.equals('foo'), @@ -12105,7 +12044,7 @@ void checkHttpRouteRuleMatch(api.HttpRouteRuleMatch o) { o.prefixMatch!, unittest.equals('foo'), ); - checkUnnamed210(o.queryParameterMatches!); + checkUnnamed209(o.queryParameterMatches!); unittest.expect( o.regexMatch!, unittest.equals('foo'), @@ -12196,12 +12135,12 @@ void checkHttpsHealthCheck(api.HttpsHealthCheck o) { buildCounterHttpsHealthCheck--; } -core.List buildUnnamed211() => [ +core.List buildUnnamed210() => [ buildHttpsHealthCheck(), buildHttpsHealthCheck(), ]; -void checkUnnamed211(core.List o) { +void checkUnnamed210(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHttpsHealthCheck(o[0]); checkHttpsHealthCheck(o[1]); @@ -12235,12 +12174,12 @@ void checkHttpsHealthCheckListWarningData( buildCounterHttpsHealthCheckListWarningData--; } -core.List buildUnnamed212() => [ +core.List buildUnnamed211() => [ buildHttpsHealthCheckListWarningData(), buildHttpsHealthCheckListWarningData(), ]; -void checkUnnamed212(core.List o) { +void checkUnnamed211(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHttpsHealthCheckListWarningData(o[0]); checkHttpsHealthCheckListWarningData(o[1]); @@ -12252,7 +12191,7 @@ api.HttpsHealthCheckListWarning buildHttpsHealthCheckListWarning() { buildCounterHttpsHealthCheckListWarning++; if (buildCounterHttpsHealthCheckListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed212(); + o.data = buildUnnamed211(); o.message = 'foo'; } buildCounterHttpsHealthCheckListWarning--; @@ -12266,7 +12205,7 @@ void checkHttpsHealthCheckListWarning(api.HttpsHealthCheckListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed212(o.data!); + checkUnnamed211(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -12281,7 +12220,7 @@ api.HttpsHealthCheckList buildHttpsHealthCheckList() { buildCounterHttpsHealthCheckList++; if (buildCounterHttpsHealthCheckList < 3) { o.id = 'foo'; - o.items = buildUnnamed211(); + o.items = buildUnnamed210(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -12298,7 +12237,7 @@ void checkHttpsHealthCheckList(api.HttpsHealthCheckList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed211(o.items!); + checkUnnamed210(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -12316,23 +12255,23 @@ void checkHttpsHealthCheckList(api.HttpsHealthCheckList o) { buildCounterHttpsHealthCheckList--; } -core.List buildUnnamed213() => [ +core.List buildUnnamed212() => [ buildGuestOsFeature(), buildGuestOsFeature(), ]; -void checkUnnamed213(core.List o) { +void checkUnnamed212(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGuestOsFeature(o[0]); checkGuestOsFeature(o[1]); } -core.Map buildUnnamed214() => { +core.Map buildUnnamed213() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed214(core.Map o) { +void checkUnnamed213(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -12344,12 +12283,12 @@ void checkUnnamed214(core.Map o) { ); } -core.List buildUnnamed215() => [ +core.List buildUnnamed214() => [ 'foo', 'foo', ]; -void checkUnnamed215(core.List o) { +void checkUnnamed214(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -12361,12 +12300,12 @@ void checkUnnamed215(core.List o) { ); } -core.List buildUnnamed216() => [ +core.List buildUnnamed215() => [ 'foo', 'foo', ]; -void checkUnnamed216(core.List o) { +void checkUnnamed215(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -12410,12 +12349,12 @@ void checkImageRawDisk(api.ImageRawDisk o) { buildCounterImageRawDisk--; } -core.List buildUnnamed217() => [ +core.List buildUnnamed216() => [ 'foo', 'foo', ]; -void checkUnnamed217(core.List o) { +void checkUnnamed216(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -12440,14 +12379,14 @@ api.Image buildImage() { o.diskSizeGb = 'foo'; o.enableConfidentialCompute = true; o.family = 'foo'; - o.guestOsFeatures = buildUnnamed213(); + o.guestOsFeatures = buildUnnamed212(); o.id = 'foo'; o.imageEncryptionKey = buildCustomerEncryptionKey(); o.kind = 'foo'; o.labelFingerprint = 'foo'; - o.labels = buildUnnamed214(); - o.licenseCodes = buildUnnamed215(); - o.licenses = buildUnnamed216(); + o.labels = buildUnnamed213(); + o.licenseCodes = buildUnnamed214(); + o.licenses = buildUnnamed215(); o.name = 'foo'; o.rawDisk = buildImageRawDisk(); o.satisfiesPzi = true; @@ -12465,7 +12404,7 @@ api.Image buildImage() { o.sourceSnapshotId = 'foo'; o.sourceType = 'foo'; o.status = 'foo'; - o.storageLocations = buildUnnamed217(); + o.storageLocations = buildUnnamed216(); } buildCounterImage--; return o; @@ -12500,7 +12439,7 @@ void checkImage(api.Image o) { o.family!, unittest.equals('foo'), ); - checkUnnamed213(o.guestOsFeatures!); + checkUnnamed212(o.guestOsFeatures!); unittest.expect( o.id!, unittest.equals('foo'), @@ -12514,9 +12453,9 @@ void checkImage(api.Image o) { o.labelFingerprint!, unittest.equals('foo'), ); - checkUnnamed214(o.labels!); - checkUnnamed215(o.licenseCodes!); - checkUnnamed216(o.licenses!); + checkUnnamed213(o.labels!); + checkUnnamed214(o.licenseCodes!); + checkUnnamed215(o.licenses!); unittest.expect( o.name!, unittest.equals('foo'), @@ -12564,7 +12503,7 @@ void checkImage(api.Image o) { o.status!, unittest.equals('foo'), ); - checkUnnamed217(o.storageLocations!); + checkUnnamed216(o.storageLocations!); } buildCounterImage--; } @@ -12588,12 +12527,12 @@ void checkImageFamilyView(api.ImageFamilyView o) { buildCounterImageFamilyView--; } -core.List buildUnnamed218() => [ +core.List buildUnnamed217() => [ buildImage(), buildImage(), ]; -void checkUnnamed218(core.List o) { +void checkUnnamed217(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkImage(o[0]); checkImage(o[1]); @@ -12626,12 +12565,12 @@ void checkImageListWarningData(api.ImageListWarningData o) { buildCounterImageListWarningData--; } -core.List buildUnnamed219() => [ +core.List buildUnnamed218() => [ buildImageListWarningData(), buildImageListWarningData(), ]; -void checkUnnamed219(core.List o) { +void checkUnnamed218(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkImageListWarningData(o[0]); checkImageListWarningData(o[1]); @@ -12643,7 +12582,7 @@ api.ImageListWarning buildImageListWarning() { buildCounterImageListWarning++; if (buildCounterImageListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed219(); + o.data = buildUnnamed218(); o.message = 'foo'; } buildCounterImageListWarning--; @@ -12657,7 +12596,7 @@ void checkImageListWarning(api.ImageListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed219(o.data!); + checkUnnamed218(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -12672,7 +12611,7 @@ api.ImageList buildImageList() { buildCounterImageList++; if (buildCounterImageList < 3) { o.id = 'foo'; - o.items = buildUnnamed218(); + o.items = buildUnnamed217(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -12689,7 +12628,7 @@ void checkImageList(api.ImageList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed218(o.items!); + checkUnnamed217(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -12707,34 +12646,34 @@ void checkImageList(api.ImageList o) { buildCounterImageList--; } -core.List buildUnnamed220() => [ +core.List buildUnnamed219() => [ buildFileContentBuffer(), buildFileContentBuffer(), ]; -void checkUnnamed220(core.List o) { +void checkUnnamed219(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFileContentBuffer(o[0]); checkFileContentBuffer(o[1]); } -core.List buildUnnamed221() => [ +core.List buildUnnamed220() => [ buildFileContentBuffer(), buildFileContentBuffer(), ]; -void checkUnnamed221(core.List o) { +void checkUnnamed220(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFileContentBuffer(o[0]); checkFileContentBuffer(o[1]); } -core.List buildUnnamed222() => [ +core.List buildUnnamed221() => [ buildFileContentBuffer(), buildFileContentBuffer(), ]; -void checkUnnamed222(core.List o) { +void checkUnnamed221(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFileContentBuffer(o[0]); checkFileContentBuffer(o[1]); @@ -12745,9 +12684,9 @@ api.InitialStateConfig buildInitialStateConfig() { final o = api.InitialStateConfig(); buildCounterInitialStateConfig++; if (buildCounterInitialStateConfig < 3) { - o.dbs = buildUnnamed220(); - o.dbxs = buildUnnamed221(); - o.keks = buildUnnamed222(); + o.dbs = buildUnnamed219(); + o.dbxs = buildUnnamed220(); + o.keks = buildUnnamed221(); o.pk = buildFileContentBuffer(); } buildCounterInitialStateConfig--; @@ -12757,42 +12696,42 @@ api.InitialStateConfig buildInitialStateConfig() { void checkInitialStateConfig(api.InitialStateConfig o) { buildCounterInitialStateConfig++; if (buildCounterInitialStateConfig < 3) { - checkUnnamed220(o.dbs!); - checkUnnamed221(o.dbxs!); - checkUnnamed222(o.keks!); + checkUnnamed219(o.dbs!); + checkUnnamed220(o.dbxs!); + checkUnnamed221(o.keks!); checkFileContentBuffer(o.pk!); } buildCounterInitialStateConfig--; } -core.List buildUnnamed223() => [ +core.List buildUnnamed222() => [ buildAttachedDisk(), buildAttachedDisk(), ]; -void checkUnnamed223(core.List o) { +void checkUnnamed222(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAttachedDisk(o[0]); checkAttachedDisk(o[1]); } -core.List buildUnnamed224() => [ +core.List buildUnnamed223() => [ buildAcceleratorConfig(), buildAcceleratorConfig(), ]; -void checkUnnamed224(core.List o) { +void checkUnnamed223(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAcceleratorConfig(o[0]); checkAcceleratorConfig(o[1]); } -core.Map buildUnnamed225() => { +core.Map buildUnnamed224() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed225(core.Map o) { +void checkUnnamed224(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -12804,23 +12743,23 @@ void checkUnnamed225(core.Map o) { ); } -core.List buildUnnamed226() => [ +core.List buildUnnamed225() => [ buildNetworkInterface(), buildNetworkInterface(), ]; -void checkUnnamed226(core.List o) { +void checkUnnamed225(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkInterface(o[0]); checkNetworkInterface(o[1]); } -core.List buildUnnamed227() => [ +core.List buildUnnamed226() => [ 'foo', 'foo', ]; -void checkUnnamed227(core.List o) { +void checkUnnamed226(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -12832,12 +12771,12 @@ void checkUnnamed227(core.List o) { ); } -core.List buildUnnamed228() => [ +core.List buildUnnamed227() => [ buildServiceAccount(), buildServiceAccount(), ]; -void checkUnnamed228(core.List o) { +void checkUnnamed227(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkServiceAccount(o[0]); checkServiceAccount(o[1]); @@ -12855,17 +12794,17 @@ api.Instance buildInstance() { o.creationTimestamp = 'foo'; o.deletionProtection = true; o.description = 'foo'; - o.disks = buildUnnamed223(); + o.disks = buildUnnamed222(); o.displayDevice = buildDisplayDevice(); o.fingerprint = 'foo'; - o.guestAccelerators = buildUnnamed224(); + o.guestAccelerators = buildUnnamed223(); o.hostname = 'foo'; o.id = 'foo'; o.instanceEncryptionKey = buildCustomerEncryptionKey(); o.keyRevocationActionType = 'foo'; o.kind = 'foo'; o.labelFingerprint = 'foo'; - o.labels = buildUnnamed225(); + o.labels = buildUnnamed224(); o.lastStartTimestamp = 'foo'; o.lastStopTimestamp = 'foo'; o.lastSuspendedTimestamp = 'foo'; @@ -12873,18 +12812,18 @@ api.Instance buildInstance() { o.metadata = buildMetadata(); o.minCpuPlatform = 'foo'; o.name = 'foo'; - o.networkInterfaces = buildUnnamed226(); + o.networkInterfaces = buildUnnamed225(); o.networkPerformanceConfig = buildNetworkPerformanceConfig(); o.params = buildInstanceParams(); o.privateIpv6GoogleAccess = 'foo'; o.reservationAffinity = buildReservationAffinity(); - o.resourcePolicies = buildUnnamed227(); + o.resourcePolicies = buildUnnamed226(); o.resourceStatus = buildResourceStatus(); o.satisfiesPzi = true; o.satisfiesPzs = true; o.scheduling = buildScheduling(); o.selfLink = 'foo'; - o.serviceAccounts = buildUnnamed228(); + o.serviceAccounts = buildUnnamed227(); o.shieldedInstanceConfig = buildShieldedInstanceConfig(); o.shieldedInstanceIntegrityPolicy = buildShieldedInstanceIntegrityPolicy(); o.sourceMachineImage = 'foo'; @@ -12918,13 +12857,13 @@ void checkInstance(api.Instance o) { o.description!, unittest.equals('foo'), ); - checkUnnamed223(o.disks!); + checkUnnamed222(o.disks!); checkDisplayDevice(o.displayDevice!); unittest.expect( o.fingerprint!, unittest.equals('foo'), ); - checkUnnamed224(o.guestAccelerators!); + checkUnnamed223(o.guestAccelerators!); unittest.expect( o.hostname!, unittest.equals('foo'), @@ -12946,7 +12885,7 @@ void checkInstance(api.Instance o) { o.labelFingerprint!, unittest.equals('foo'), ); - checkUnnamed225(o.labels!); + checkUnnamed224(o.labels!); unittest.expect( o.lastStartTimestamp!, unittest.equals('foo'), @@ -12972,7 +12911,7 @@ void checkInstance(api.Instance o) { o.name!, unittest.equals('foo'), ); - checkUnnamed226(o.networkInterfaces!); + checkUnnamed225(o.networkInterfaces!); checkNetworkPerformanceConfig(o.networkPerformanceConfig!); checkInstanceParams(o.params!); unittest.expect( @@ -12980,7 +12919,7 @@ void checkInstance(api.Instance o) { unittest.equals('foo'), ); checkReservationAffinity(o.reservationAffinity!); - checkUnnamed227(o.resourcePolicies!); + checkUnnamed226(o.resourcePolicies!); checkResourceStatus(o.resourceStatus!); unittest.expect(o.satisfiesPzi!, unittest.isTrue); unittest.expect(o.satisfiesPzs!, unittest.isTrue); @@ -12989,7 +12928,7 @@ void checkInstance(api.Instance o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed228(o.serviceAccounts!); + checkUnnamed227(o.serviceAccounts!); checkShieldedInstanceConfig(o.shieldedInstanceConfig!); checkShieldedInstanceIntegrityPolicy(o.shieldedInstanceIntegrityPolicy!); unittest.expect( @@ -13015,23 +12954,23 @@ void checkInstance(api.Instance o) { buildCounterInstance--; } -core.Map buildUnnamed229() => { +core.Map buildUnnamed228() => { 'x': buildInstancesScopedList(), 'y': buildInstancesScopedList(), }; -void checkUnnamed229(core.Map o) { +void checkUnnamed228(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkInstancesScopedList(o['x']!); checkInstancesScopedList(o['y']!); } -core.List buildUnnamed230() => [ +core.List buildUnnamed229() => [ 'foo', 'foo', ]; -void checkUnnamed230(core.List o) { +void checkUnnamed229(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -13071,12 +13010,12 @@ void checkInstanceAggregatedListWarningData( buildCounterInstanceAggregatedListWarningData--; } -core.List buildUnnamed231() => [ +core.List buildUnnamed230() => [ buildInstanceAggregatedListWarningData(), buildInstanceAggregatedListWarningData(), ]; -void checkUnnamed231(core.List o) { +void checkUnnamed230(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceAggregatedListWarningData(o[0]); checkInstanceAggregatedListWarningData(o[1]); @@ -13088,7 +13027,7 @@ api.InstanceAggregatedListWarning buildInstanceAggregatedListWarning() { buildCounterInstanceAggregatedListWarning++; if (buildCounterInstanceAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed231(); + o.data = buildUnnamed230(); o.message = 'foo'; } buildCounterInstanceAggregatedListWarning--; @@ -13102,7 +13041,7 @@ void checkInstanceAggregatedListWarning(api.InstanceAggregatedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed231(o.data!); + checkUnnamed230(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -13117,11 +13056,11 @@ api.InstanceAggregatedList buildInstanceAggregatedList() { buildCounterInstanceAggregatedList++; if (buildCounterInstanceAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed229(); + o.items = buildUnnamed228(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed230(); + o.unreachables = buildUnnamed229(); o.warning = buildInstanceAggregatedListWarning(); } buildCounterInstanceAggregatedList--; @@ -13135,7 +13074,7 @@ void checkInstanceAggregatedList(api.InstanceAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed229(o.items!); + checkUnnamed228(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -13148,7 +13087,7 @@ void checkInstanceAggregatedList(api.InstanceAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed230(o.unreachables!); + checkUnnamed229(o.unreachables!); checkInstanceAggregatedListWarning(o.warning!); } buildCounterInstanceAggregatedList--; @@ -13215,12 +13154,12 @@ void checkInstanceConsumptionInfo(api.InstanceConsumptionInfo o) { buildCounterInstanceConsumptionInfo--; } -core.List buildUnnamed232() => [ +core.List buildUnnamed231() => [ buildNamedPort(), buildNamedPort(), ]; -void checkUnnamed232(core.List o) { +void checkUnnamed231(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNamedPort(o[0]); checkNamedPort(o[1]); @@ -13237,7 +13176,7 @@ api.InstanceGroup buildInstanceGroup() { o.id = 'foo'; o.kind = 'foo'; o.name = 'foo'; - o.namedPorts = buildUnnamed232(); + o.namedPorts = buildUnnamed231(); o.network = 'foo'; o.region = 'foo'; o.selfLink = 'foo'; @@ -13276,7 +13215,7 @@ void checkInstanceGroup(api.InstanceGroup o) { o.name!, unittest.equals('foo'), ); - checkUnnamed232(o.namedPorts!); + checkUnnamed231(o.namedPorts!); unittest.expect( o.network!, unittest.equals('foo'), @@ -13305,23 +13244,23 @@ void checkInstanceGroup(api.InstanceGroup o) { buildCounterInstanceGroup--; } -core.Map buildUnnamed233() => { +core.Map buildUnnamed232() => { 'x': buildInstanceGroupsScopedList(), 'y': buildInstanceGroupsScopedList(), }; -void checkUnnamed233(core.Map o) { +void checkUnnamed232(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceGroupsScopedList(o['x']!); checkInstanceGroupsScopedList(o['y']!); } -core.List buildUnnamed234() => [ +core.List buildUnnamed233() => [ 'foo', 'foo', ]; -void checkUnnamed234(core.List o) { +void checkUnnamed233(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -13362,12 +13301,12 @@ void checkInstanceGroupAggregatedListWarningData( buildCounterInstanceGroupAggregatedListWarningData--; } -core.List buildUnnamed235() => [ +core.List buildUnnamed234() => [ buildInstanceGroupAggregatedListWarningData(), buildInstanceGroupAggregatedListWarningData(), ]; -void checkUnnamed235(core.List o) { +void checkUnnamed234(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceGroupAggregatedListWarningData(o[0]); checkInstanceGroupAggregatedListWarningData(o[1]); @@ -13380,7 +13319,7 @@ api.InstanceGroupAggregatedListWarning buildCounterInstanceGroupAggregatedListWarning++; if (buildCounterInstanceGroupAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed235(); + o.data = buildUnnamed234(); o.message = 'foo'; } buildCounterInstanceGroupAggregatedListWarning--; @@ -13395,7 +13334,7 @@ void checkInstanceGroupAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed235(o.data!); + checkUnnamed234(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -13410,11 +13349,11 @@ api.InstanceGroupAggregatedList buildInstanceGroupAggregatedList() { buildCounterInstanceGroupAggregatedList++; if (buildCounterInstanceGroupAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed233(); + o.items = buildUnnamed232(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed234(); + o.unreachables = buildUnnamed233(); o.warning = buildInstanceGroupAggregatedListWarning(); } buildCounterInstanceGroupAggregatedList--; @@ -13428,7 +13367,7 @@ void checkInstanceGroupAggregatedList(api.InstanceGroupAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed233(o.items!); + checkUnnamed232(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -13441,18 +13380,18 @@ void checkInstanceGroupAggregatedList(api.InstanceGroupAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed234(o.unreachables!); + checkUnnamed233(o.unreachables!); checkInstanceGroupAggregatedListWarning(o.warning!); } buildCounterInstanceGroupAggregatedList--; } -core.List buildUnnamed236() => [ +core.List buildUnnamed235() => [ buildInstanceGroup(), buildInstanceGroup(), ]; -void checkUnnamed236(core.List o) { +void checkUnnamed235(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceGroup(o[0]); checkInstanceGroup(o[1]); @@ -13485,12 +13424,12 @@ void checkInstanceGroupListWarningData(api.InstanceGroupListWarningData o) { buildCounterInstanceGroupListWarningData--; } -core.List buildUnnamed237() => [ +core.List buildUnnamed236() => [ buildInstanceGroupListWarningData(), buildInstanceGroupListWarningData(), ]; -void checkUnnamed237(core.List o) { +void checkUnnamed236(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceGroupListWarningData(o[0]); checkInstanceGroupListWarningData(o[1]); @@ -13502,7 +13441,7 @@ api.InstanceGroupListWarning buildInstanceGroupListWarning() { buildCounterInstanceGroupListWarning++; if (buildCounterInstanceGroupListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed237(); + o.data = buildUnnamed236(); o.message = 'foo'; } buildCounterInstanceGroupListWarning--; @@ -13516,7 +13455,7 @@ void checkInstanceGroupListWarning(api.InstanceGroupListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed237(o.data!); + checkUnnamed236(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -13531,7 +13470,7 @@ api.InstanceGroupList buildInstanceGroupList() { buildCounterInstanceGroupList++; if (buildCounterInstanceGroupList < 3) { o.id = 'foo'; - o.items = buildUnnamed236(); + o.items = buildUnnamed235(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -13548,7 +13487,7 @@ void checkInstanceGroupList(api.InstanceGroupList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed236(o.items!); + checkUnnamed235(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -13566,34 +13505,34 @@ void checkInstanceGroupList(api.InstanceGroupList o) { buildCounterInstanceGroupList--; } -core.List buildUnnamed238() => [ +core.List buildUnnamed237() => [ buildInstanceGroupManagerAutoHealingPolicy(), buildInstanceGroupManagerAutoHealingPolicy(), ]; -void checkUnnamed238(core.List o) { +void checkUnnamed237(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceGroupManagerAutoHealingPolicy(o[0]); checkInstanceGroupManagerAutoHealingPolicy(o[1]); } -core.List buildUnnamed239() => [ +core.List buildUnnamed238() => [ buildNamedPort(), buildNamedPort(), ]; -void checkUnnamed239(core.List o) { +void checkUnnamed238(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNamedPort(o[0]); checkNamedPort(o[1]); } -core.List buildUnnamed240() => [ +core.List buildUnnamed239() => [ 'foo', 'foo', ]; -void checkUnnamed240(core.List o) { +void checkUnnamed239(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -13605,12 +13544,12 @@ void checkUnnamed240(core.List o) { ); } -core.List buildUnnamed241() => [ +core.List buildUnnamed240() => [ buildInstanceGroupManagerVersion(), buildInstanceGroupManagerVersion(), ]; -void checkUnnamed241(core.List o) { +void checkUnnamed240(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceGroupManagerVersion(o[0]); checkInstanceGroupManagerVersion(o[1]); @@ -13622,7 +13561,7 @@ api.InstanceGroupManager buildInstanceGroupManager() { buildCounterInstanceGroupManager++; if (buildCounterInstanceGroupManager < 3) { o.allInstancesConfig = buildInstanceGroupManagerAllInstancesConfig(); - o.autoHealingPolicies = buildUnnamed238(); + o.autoHealingPolicies = buildUnnamed237(); o.baseInstanceName = 'foo'; o.creationTimestamp = 'foo'; o.currentActions = buildInstanceGroupManagerActionsSummary(); @@ -13630,6 +13569,8 @@ api.InstanceGroupManager buildInstanceGroupManager() { o.distributionPolicy = buildDistributionPolicy(); o.fingerprint = 'foo'; o.id = 'foo'; + o.instanceFlexibilityPolicy = + buildInstanceGroupManagerInstanceFlexibilityPolicy(); o.instanceGroup = 'foo'; o.instanceLifecyclePolicy = buildInstanceGroupManagerInstanceLifecyclePolicy(); @@ -13637,17 +13578,20 @@ api.InstanceGroupManager buildInstanceGroupManager() { o.kind = 'foo'; o.listManagedInstancesResults = 'foo'; o.name = 'foo'; - o.namedPorts = buildUnnamed239(); + o.namedPorts = buildUnnamed238(); o.region = 'foo'; o.satisfiesPzi = true; o.satisfiesPzs = true; o.selfLink = 'foo'; + o.standbyPolicy = buildInstanceGroupManagerStandbyPolicy(); o.statefulPolicy = buildStatefulPolicy(); o.status = buildInstanceGroupManagerStatus(); - o.targetPools = buildUnnamed240(); + o.targetPools = buildUnnamed239(); o.targetSize = 42; + o.targetStoppedSize = 42; + o.targetSuspendedSize = 42; o.updatePolicy = buildInstanceGroupManagerUpdatePolicy(); - o.versions = buildUnnamed241(); + o.versions = buildUnnamed240(); o.zone = 'foo'; } buildCounterInstanceGroupManager--; @@ -13658,7 +13602,7 @@ void checkInstanceGroupManager(api.InstanceGroupManager o) { buildCounterInstanceGroupManager++; if (buildCounterInstanceGroupManager < 3) { checkInstanceGroupManagerAllInstancesConfig(o.allInstancesConfig!); - checkUnnamed238(o.autoHealingPolicies!); + checkUnnamed237(o.autoHealingPolicies!); unittest.expect( o.baseInstanceName!, unittest.equals('foo'), @@ -13681,6 +13625,8 @@ void checkInstanceGroupManager(api.InstanceGroupManager o) { o.id!, unittest.equals('foo'), ); + checkInstanceGroupManagerInstanceFlexibilityPolicy( + o.instanceFlexibilityPolicy!); unittest.expect( o.instanceGroup!, unittest.equals('foo'), @@ -13703,7 +13649,7 @@ void checkInstanceGroupManager(api.InstanceGroupManager o) { o.name!, unittest.equals('foo'), ); - checkUnnamed239(o.namedPorts!); + checkUnnamed238(o.namedPorts!); unittest.expect( o.region!, unittest.equals('foo'), @@ -13714,15 +13660,24 @@ void checkInstanceGroupManager(api.InstanceGroupManager o) { o.selfLink!, unittest.equals('foo'), ); + checkInstanceGroupManagerStandbyPolicy(o.standbyPolicy!); checkStatefulPolicy(o.statefulPolicy!); checkInstanceGroupManagerStatus(o.status!); - checkUnnamed240(o.targetPools!); + checkUnnamed239(o.targetPools!); unittest.expect( o.targetSize!, unittest.equals(42), ); + unittest.expect( + o.targetStoppedSize!, + unittest.equals(42), + ); + unittest.expect( + o.targetSuspendedSize!, + unittest.equals(42), + ); checkInstanceGroupManagerUpdatePolicy(o.updatePolicy!); - checkUnnamed241(o.versions!); + checkUnnamed240(o.versions!); unittest.expect( o.zone!, unittest.equals('foo'), @@ -13815,25 +13770,25 @@ void checkInstanceGroupManagerActionsSummary( buildCounterInstanceGroupManagerActionsSummary--; } -core.Map buildUnnamed242() => +core.Map buildUnnamed241() => { 'x': buildInstanceGroupManagersScopedList(), 'y': buildInstanceGroupManagersScopedList(), }; -void checkUnnamed242( +void checkUnnamed241( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceGroupManagersScopedList(o['x']!); checkInstanceGroupManagersScopedList(o['y']!); } -core.List buildUnnamed243() => [ +core.List buildUnnamed242() => [ 'foo', 'foo', ]; -void checkUnnamed243(core.List o) { +void checkUnnamed242(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -13875,12 +13830,12 @@ void checkInstanceGroupManagerAggregatedListWarningData( } core.List - buildUnnamed244() => [ + buildUnnamed243() => [ buildInstanceGroupManagerAggregatedListWarningData(), buildInstanceGroupManagerAggregatedListWarningData(), ]; -void checkUnnamed244( +void checkUnnamed243( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceGroupManagerAggregatedListWarningData(o[0]); @@ -13894,7 +13849,7 @@ api.InstanceGroupManagerAggregatedListWarning buildCounterInstanceGroupManagerAggregatedListWarning++; if (buildCounterInstanceGroupManagerAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed244(); + o.data = buildUnnamed243(); o.message = 'foo'; } buildCounterInstanceGroupManagerAggregatedListWarning--; @@ -13909,7 +13864,7 @@ void checkInstanceGroupManagerAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed244(o.data!); + checkUnnamed243(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -13925,11 +13880,11 @@ api.InstanceGroupManagerAggregatedList buildCounterInstanceGroupManagerAggregatedList++; if (buildCounterInstanceGroupManagerAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed242(); + o.items = buildUnnamed241(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed243(); + o.unreachables = buildUnnamed242(); o.warning = buildInstanceGroupManagerAggregatedListWarning(); } buildCounterInstanceGroupManagerAggregatedList--; @@ -13944,7 +13899,7 @@ void checkInstanceGroupManagerAggregatedList( o.id!, unittest.equals('foo'), ); - checkUnnamed242(o.items!); + checkUnnamed241(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -13957,7 +13912,7 @@ void checkInstanceGroupManagerAggregatedList( o.selfLink!, unittest.equals('foo'), ); - checkUnnamed243(o.unreachables!); + checkUnnamed242(o.unreachables!); checkInstanceGroupManagerAggregatedListWarning(o.warning!); } buildCounterInstanceGroupManagerAggregatedList--; @@ -14013,6 +13968,93 @@ void checkInstanceGroupManagerAutoHealingPolicy( buildCounterInstanceGroupManagerAutoHealingPolicy--; } +core.Map + buildUnnamed244() => { + 'x': + buildInstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection(), + 'y': + buildInstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection(), + }; + +void checkUnnamed244( + core.Map + o) { + unittest.expect(o, unittest.hasLength(2)); + checkInstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection(o['x']!); + checkInstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection(o['y']!); +} + +core.int buildCounterInstanceGroupManagerInstanceFlexibilityPolicy = 0; +api.InstanceGroupManagerInstanceFlexibilityPolicy + buildInstanceGroupManagerInstanceFlexibilityPolicy() { + final o = api.InstanceGroupManagerInstanceFlexibilityPolicy(); + buildCounterInstanceGroupManagerInstanceFlexibilityPolicy++; + if (buildCounterInstanceGroupManagerInstanceFlexibilityPolicy < 3) { + o.instanceSelections = buildUnnamed244(); + } + buildCounterInstanceGroupManagerInstanceFlexibilityPolicy--; + return o; +} + +void checkInstanceGroupManagerInstanceFlexibilityPolicy( + api.InstanceGroupManagerInstanceFlexibilityPolicy o) { + buildCounterInstanceGroupManagerInstanceFlexibilityPolicy++; + if (buildCounterInstanceGroupManagerInstanceFlexibilityPolicy < 3) { + checkUnnamed244(o.instanceSelections!); + } + buildCounterInstanceGroupManagerInstanceFlexibilityPolicy--; +} + +core.List buildUnnamed245() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed245(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int + buildCounterInstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection = + 0; +api.InstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection + buildInstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection() { + final o = + api.InstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection(); + buildCounterInstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection++; + if (buildCounterInstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection < + 3) { + o.machineTypes = buildUnnamed245(); + o.rank = 42; + } + buildCounterInstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection--; + return o; +} + +void checkInstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection( + api.InstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection o) { + buildCounterInstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection++; + if (buildCounterInstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection < + 3) { + checkUnnamed245(o.machineTypes!); + unittest.expect( + o.rank!, + unittest.equals(42), + ); + } + buildCounterInstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection--; +} + core.int buildCounterInstanceGroupManagerInstanceLifecyclePolicy = 0; api.InstanceGroupManagerInstanceLifecyclePolicy buildInstanceGroupManagerInstanceLifecyclePolicy() { @@ -14042,12 +14084,12 @@ void checkInstanceGroupManagerInstanceLifecyclePolicy( buildCounterInstanceGroupManagerInstanceLifecyclePolicy--; } -core.List buildUnnamed245() => [ +core.List buildUnnamed246() => [ buildInstanceGroupManager(), buildInstanceGroupManager(), ]; -void checkUnnamed245(core.List o) { +void checkUnnamed246(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceGroupManager(o[0]); checkInstanceGroupManager(o[1]); @@ -14082,12 +14124,12 @@ void checkInstanceGroupManagerListWarningData( buildCounterInstanceGroupManagerListWarningData--; } -core.List buildUnnamed246() => [ +core.List buildUnnamed247() => [ buildInstanceGroupManagerListWarningData(), buildInstanceGroupManagerListWarningData(), ]; -void checkUnnamed246(core.List o) { +void checkUnnamed247(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceGroupManagerListWarningData(o[0]); checkInstanceGroupManagerListWarningData(o[1]); @@ -14099,7 +14141,7 @@ api.InstanceGroupManagerListWarning buildInstanceGroupManagerListWarning() { buildCounterInstanceGroupManagerListWarning++; if (buildCounterInstanceGroupManagerListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed246(); + o.data = buildUnnamed247(); o.message = 'foo'; } buildCounterInstanceGroupManagerListWarning--; @@ -14114,7 +14156,7 @@ void checkInstanceGroupManagerListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed246(o.data!); + checkUnnamed247(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -14129,7 +14171,7 @@ api.InstanceGroupManagerList buildInstanceGroupManagerList() { buildCounterInstanceGroupManagerList++; if (buildCounterInstanceGroupManagerList < 3) { o.id = 'foo'; - o.items = buildUnnamed245(); + o.items = buildUnnamed246(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -14146,7 +14188,7 @@ void checkInstanceGroupManagerList(api.InstanceGroupManagerList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed245(o.items!); + checkUnnamed246(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -14269,12 +14311,12 @@ void checkInstanceGroupManagerResizeRequestStatusErrorErrorsErrorDetails( } core.List - buildUnnamed247() => [ + buildUnnamed248() => [ buildInstanceGroupManagerResizeRequestStatusErrorErrorsErrorDetails(), buildInstanceGroupManagerResizeRequestStatusErrorErrorsErrorDetails(), ]; -void checkUnnamed247( +void checkUnnamed248( core.List< api.InstanceGroupManagerResizeRequestStatusErrorErrorsErrorDetails> o) { @@ -14290,7 +14332,7 @@ api.InstanceGroupManagerResizeRequestStatusErrorErrors buildCounterInstanceGroupManagerResizeRequestStatusErrorErrors++; if (buildCounterInstanceGroupManagerResizeRequestStatusErrorErrors < 3) { o.code = 'foo'; - o.errorDetails = buildUnnamed247(); + o.errorDetails = buildUnnamed248(); o.location = 'foo'; o.message = 'foo'; } @@ -14306,7 +14348,7 @@ void checkInstanceGroupManagerResizeRequestStatusErrorErrors( o.code!, unittest.equals('foo'), ); - checkUnnamed247(o.errorDetails!); + checkUnnamed248(o.errorDetails!); unittest.expect( o.location!, unittest.equals('foo'), @@ -14320,12 +14362,12 @@ void checkInstanceGroupManagerResizeRequestStatusErrorErrors( } core.List - buildUnnamed248() => [ + buildUnnamed249() => [ buildInstanceGroupManagerResizeRequestStatusErrorErrors(), buildInstanceGroupManagerResizeRequestStatusErrorErrors(), ]; -void checkUnnamed248( +void checkUnnamed249( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceGroupManagerResizeRequestStatusErrorErrors(o[0]); @@ -14338,7 +14380,7 @@ api.InstanceGroupManagerResizeRequestStatusError final o = api.InstanceGroupManagerResizeRequestStatusError(); buildCounterInstanceGroupManagerResizeRequestStatusError++; if (buildCounterInstanceGroupManagerResizeRequestStatusError < 3) { - o.errors = buildUnnamed248(); + o.errors = buildUnnamed249(); } buildCounterInstanceGroupManagerResizeRequestStatusError--; return o; @@ -14348,7 +14390,7 @@ void checkInstanceGroupManagerResizeRequestStatusError( api.InstanceGroupManagerResizeRequestStatusError o) { buildCounterInstanceGroupManagerResizeRequestStatusError++; if (buildCounterInstanceGroupManagerResizeRequestStatusError < 3) { - checkUnnamed248(o.errors!); + checkUnnamed249(o.errors!); } buildCounterInstanceGroupManagerResizeRequestStatusError--; } @@ -14412,12 +14454,12 @@ void checkInstanceGroupManagerResizeRequestStatusLastAttemptErrorErrorsErrorDeta core.List< api .InstanceGroupManagerResizeRequestStatusLastAttemptErrorErrorsErrorDetails> - buildUnnamed249() => [ + buildUnnamed250() => [ buildInstanceGroupManagerResizeRequestStatusLastAttemptErrorErrorsErrorDetails(), buildInstanceGroupManagerResizeRequestStatusLastAttemptErrorErrorsErrorDetails(), ]; -void checkUnnamed249( +void checkUnnamed250( core.List< api .InstanceGroupManagerResizeRequestStatusLastAttemptErrorErrorsErrorDetails> @@ -14439,7 +14481,7 @@ api.InstanceGroupManagerResizeRequestStatusLastAttemptErrorErrors if (buildCounterInstanceGroupManagerResizeRequestStatusLastAttemptErrorErrors < 3) { o.code = 'foo'; - o.errorDetails = buildUnnamed249(); + o.errorDetails = buildUnnamed250(); o.location = 'foo'; o.message = 'foo'; } @@ -14456,7 +14498,7 @@ void checkInstanceGroupManagerResizeRequestStatusLastAttemptErrorErrors( o.code!, unittest.equals('foo'), ); - checkUnnamed249(o.errorDetails!); + checkUnnamed250(o.errorDetails!); unittest.expect( o.location!, unittest.equals('foo'), @@ -14470,12 +14512,12 @@ void checkInstanceGroupManagerResizeRequestStatusLastAttemptErrorErrors( } core.List - buildUnnamed250() => [ + buildUnnamed251() => [ buildInstanceGroupManagerResizeRequestStatusLastAttemptErrorErrors(), buildInstanceGroupManagerResizeRequestStatusLastAttemptErrorErrors(), ]; -void checkUnnamed250( +void checkUnnamed251( core.List o) { unittest.expect(o, unittest.hasLength(2)); @@ -14490,7 +14532,7 @@ api.InstanceGroupManagerResizeRequestStatusLastAttemptError final o = api.InstanceGroupManagerResizeRequestStatusLastAttemptError(); buildCounterInstanceGroupManagerResizeRequestStatusLastAttemptError++; if (buildCounterInstanceGroupManagerResizeRequestStatusLastAttemptError < 3) { - o.errors = buildUnnamed250(); + o.errors = buildUnnamed251(); } buildCounterInstanceGroupManagerResizeRequestStatusLastAttemptError--; return o; @@ -14500,7 +14542,7 @@ void checkInstanceGroupManagerResizeRequestStatusLastAttemptError( api.InstanceGroupManagerResizeRequestStatusLastAttemptError o) { buildCounterInstanceGroupManagerResizeRequestStatusLastAttemptError++; if (buildCounterInstanceGroupManagerResizeRequestStatusLastAttemptError < 3) { - checkUnnamed250(o.errors!); + checkUnnamed251(o.errors!); } buildCounterInstanceGroupManagerResizeRequestStatusLastAttemptError--; } @@ -14526,12 +14568,12 @@ void checkInstanceGroupManagerResizeRequestStatusLastAttempt( buildCounterInstanceGroupManagerResizeRequestStatusLastAttempt--; } -core.List buildUnnamed251() => [ +core.List buildUnnamed252() => [ buildInstanceGroupManagerResizeRequest(), buildInstanceGroupManagerResizeRequest(), ]; -void checkUnnamed251(core.List o) { +void checkUnnamed252(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceGroupManagerResizeRequest(o[0]); checkInstanceGroupManagerResizeRequest(o[1]); @@ -14570,12 +14612,12 @@ void checkInstanceGroupManagerResizeRequestsListResponseWarningData( } core.List - buildUnnamed252() => [ + buildUnnamed253() => [ buildInstanceGroupManagerResizeRequestsListResponseWarningData(), buildInstanceGroupManagerResizeRequestsListResponseWarningData(), ]; -void checkUnnamed252( +void checkUnnamed253( core.List o) { unittest.expect(o, unittest.hasLength(2)); @@ -14590,7 +14632,7 @@ api.InstanceGroupManagerResizeRequestsListResponseWarning buildCounterInstanceGroupManagerResizeRequestsListResponseWarning++; if (buildCounterInstanceGroupManagerResizeRequestsListResponseWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed252(); + o.data = buildUnnamed253(); o.message = 'foo'; } buildCounterInstanceGroupManagerResizeRequestsListResponseWarning--; @@ -14605,7 +14647,7 @@ void checkInstanceGroupManagerResizeRequestsListResponseWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed252(o.data!); + checkUnnamed253(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -14621,7 +14663,7 @@ api.InstanceGroupManagerResizeRequestsListResponse buildCounterInstanceGroupManagerResizeRequestsListResponse++; if (buildCounterInstanceGroupManagerResizeRequestsListResponse < 3) { o.id = 'foo'; - o.items = buildUnnamed251(); + o.items = buildUnnamed252(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -14639,7 +14681,7 @@ void checkInstanceGroupManagerResizeRequestsListResponse( o.id!, unittest.equals('foo'), ); - checkUnnamed251(o.items!); + checkUnnamed252(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -14657,6 +14699,34 @@ void checkInstanceGroupManagerResizeRequestsListResponse( buildCounterInstanceGroupManagerResizeRequestsListResponse--; } +core.int buildCounterInstanceGroupManagerStandbyPolicy = 0; +api.InstanceGroupManagerStandbyPolicy buildInstanceGroupManagerStandbyPolicy() { + final o = api.InstanceGroupManagerStandbyPolicy(); + buildCounterInstanceGroupManagerStandbyPolicy++; + if (buildCounterInstanceGroupManagerStandbyPolicy < 3) { + o.initialDelaySec = 42; + o.mode = 'foo'; + } + buildCounterInstanceGroupManagerStandbyPolicy--; + return o; +} + +void checkInstanceGroupManagerStandbyPolicy( + api.InstanceGroupManagerStandbyPolicy o) { + buildCounterInstanceGroupManagerStandbyPolicy++; + if (buildCounterInstanceGroupManagerStandbyPolicy < 3) { + unittest.expect( + o.initialDelaySec!, + unittest.equals(42), + ); + unittest.expect( + o.mode!, + unittest.equals('foo'), + ); + } + buildCounterInstanceGroupManagerStandbyPolicy--; +} + core.int buildCounterInstanceGroupManagerStatus = 0; api.InstanceGroupManagerStatus buildInstanceGroupManagerStatus() { final o = api.InstanceGroupManagerStatus(); @@ -14856,12 +14926,12 @@ void checkInstanceGroupManagerVersion(api.InstanceGroupManagerVersion o) { buildCounterInstanceGroupManagerVersion--; } -core.List buildUnnamed253() => [ +core.List buildUnnamed254() => [ 'foo', 'foo', ]; -void checkUnnamed253(core.List o) { +void checkUnnamed254(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -14879,7 +14949,7 @@ api.InstanceGroupManagersAbandonInstancesRequest final o = api.InstanceGroupManagersAbandonInstancesRequest(); buildCounterInstanceGroupManagersAbandonInstancesRequest++; if (buildCounterInstanceGroupManagersAbandonInstancesRequest < 3) { - o.instances = buildUnnamed253(); + o.instances = buildUnnamed254(); } buildCounterInstanceGroupManagersAbandonInstancesRequest--; return o; @@ -14889,17 +14959,17 @@ void checkInstanceGroupManagersAbandonInstancesRequest( api.InstanceGroupManagersAbandonInstancesRequest o) { buildCounterInstanceGroupManagersAbandonInstancesRequest++; if (buildCounterInstanceGroupManagersAbandonInstancesRequest < 3) { - checkUnnamed253(o.instances!); + checkUnnamed254(o.instances!); } buildCounterInstanceGroupManagersAbandonInstancesRequest--; } -core.List buildUnnamed254() => [ +core.List buildUnnamed255() => [ 'foo', 'foo', ]; -void checkUnnamed254(core.List o) { +void checkUnnamed255(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -14918,7 +14988,7 @@ api.InstanceGroupManagersApplyUpdatesRequest buildCounterInstanceGroupManagersApplyUpdatesRequest++; if (buildCounterInstanceGroupManagersApplyUpdatesRequest < 3) { o.allInstances = true; - o.instances = buildUnnamed254(); + o.instances = buildUnnamed255(); o.minimalAction = 'foo'; o.mostDisruptiveAllowedAction = 'foo'; } @@ -14931,7 +15001,7 @@ void checkInstanceGroupManagersApplyUpdatesRequest( buildCounterInstanceGroupManagersApplyUpdatesRequest++; if (buildCounterInstanceGroupManagersApplyUpdatesRequest < 3) { unittest.expect(o.allInstances!, unittest.isTrue); - checkUnnamed254(o.instances!); + checkUnnamed255(o.instances!); unittest.expect( o.minimalAction!, unittest.equals('foo'), @@ -14944,12 +15014,12 @@ void checkInstanceGroupManagersApplyUpdatesRequest( buildCounterInstanceGroupManagersApplyUpdatesRequest--; } -core.List buildUnnamed255() => [ +core.List buildUnnamed256() => [ buildPerInstanceConfig(), buildPerInstanceConfig(), ]; -void checkUnnamed255(core.List o) { +void checkUnnamed256(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPerInstanceConfig(o[0]); checkPerInstanceConfig(o[1]); @@ -14961,7 +15031,7 @@ api.InstanceGroupManagersCreateInstancesRequest final o = api.InstanceGroupManagersCreateInstancesRequest(); buildCounterInstanceGroupManagersCreateInstancesRequest++; if (buildCounterInstanceGroupManagersCreateInstancesRequest < 3) { - o.instances = buildUnnamed255(); + o.instances = buildUnnamed256(); } buildCounterInstanceGroupManagersCreateInstancesRequest--; return o; @@ -14971,17 +15041,17 @@ void checkInstanceGroupManagersCreateInstancesRequest( api.InstanceGroupManagersCreateInstancesRequest o) { buildCounterInstanceGroupManagersCreateInstancesRequest++; if (buildCounterInstanceGroupManagersCreateInstancesRequest < 3) { - checkUnnamed255(o.instances!); + checkUnnamed256(o.instances!); } buildCounterInstanceGroupManagersCreateInstancesRequest--; } -core.List buildUnnamed256() => [ +core.List buildUnnamed257() => [ 'foo', 'foo', ]; -void checkUnnamed256(core.List o) { +void checkUnnamed257(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -14999,7 +15069,7 @@ api.InstanceGroupManagersDeleteInstancesRequest final o = api.InstanceGroupManagersDeleteInstancesRequest(); buildCounterInstanceGroupManagersDeleteInstancesRequest++; if (buildCounterInstanceGroupManagersDeleteInstancesRequest < 3) { - o.instances = buildUnnamed256(); + o.instances = buildUnnamed257(); o.skipInstancesOnValidationError = true; } buildCounterInstanceGroupManagersDeleteInstancesRequest--; @@ -15010,18 +15080,18 @@ void checkInstanceGroupManagersDeleteInstancesRequest( api.InstanceGroupManagersDeleteInstancesRequest o) { buildCounterInstanceGroupManagersDeleteInstancesRequest++; if (buildCounterInstanceGroupManagersDeleteInstancesRequest < 3) { - checkUnnamed256(o.instances!); + checkUnnamed257(o.instances!); unittest.expect(o.skipInstancesOnValidationError!, unittest.isTrue); } buildCounterInstanceGroupManagersDeleteInstancesRequest--; } -core.List buildUnnamed257() => [ +core.List buildUnnamed258() => [ 'foo', 'foo', ]; -void checkUnnamed257(core.List o) { +void checkUnnamed258(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -15039,7 +15109,7 @@ api.InstanceGroupManagersDeletePerInstanceConfigsReq final o = api.InstanceGroupManagersDeletePerInstanceConfigsReq(); buildCounterInstanceGroupManagersDeletePerInstanceConfigsReq++; if (buildCounterInstanceGroupManagersDeletePerInstanceConfigsReq < 3) { - o.names = buildUnnamed257(); + o.names = buildUnnamed258(); } buildCounterInstanceGroupManagersDeletePerInstanceConfigsReq--; return o; @@ -15049,17 +15119,17 @@ void checkInstanceGroupManagersDeletePerInstanceConfigsReq( api.InstanceGroupManagersDeletePerInstanceConfigsReq o) { buildCounterInstanceGroupManagersDeletePerInstanceConfigsReq++; if (buildCounterInstanceGroupManagersDeletePerInstanceConfigsReq < 3) { - checkUnnamed257(o.names!); + checkUnnamed258(o.names!); } buildCounterInstanceGroupManagersDeletePerInstanceConfigsReq--; } -core.List buildUnnamed258() => [ +core.List buildUnnamed259() => [ buildInstanceManagedByIgmError(), buildInstanceManagedByIgmError(), ]; -void checkUnnamed258(core.List o) { +void checkUnnamed259(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceManagedByIgmError(o[0]); checkInstanceManagedByIgmError(o[1]); @@ -15071,7 +15141,7 @@ api.InstanceGroupManagersListErrorsResponse final o = api.InstanceGroupManagersListErrorsResponse(); buildCounterInstanceGroupManagersListErrorsResponse++; if (buildCounterInstanceGroupManagersListErrorsResponse < 3) { - o.items = buildUnnamed258(); + o.items = buildUnnamed259(); o.nextPageToken = 'foo'; } buildCounterInstanceGroupManagersListErrorsResponse--; @@ -15082,7 +15152,7 @@ void checkInstanceGroupManagersListErrorsResponse( api.InstanceGroupManagersListErrorsResponse o) { buildCounterInstanceGroupManagersListErrorsResponse++; if (buildCounterInstanceGroupManagersListErrorsResponse < 3) { - checkUnnamed258(o.items!); + checkUnnamed259(o.items!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -15091,12 +15161,12 @@ void checkInstanceGroupManagersListErrorsResponse( buildCounterInstanceGroupManagersListErrorsResponse--; } -core.List buildUnnamed259() => [ +core.List buildUnnamed260() => [ buildManagedInstance(), buildManagedInstance(), ]; -void checkUnnamed259(core.List o) { +void checkUnnamed260(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkManagedInstance(o[0]); checkManagedInstance(o[1]); @@ -15108,7 +15178,7 @@ api.InstanceGroupManagersListManagedInstancesResponse final o = api.InstanceGroupManagersListManagedInstancesResponse(); buildCounterInstanceGroupManagersListManagedInstancesResponse++; if (buildCounterInstanceGroupManagersListManagedInstancesResponse < 3) { - o.managedInstances = buildUnnamed259(); + o.managedInstances = buildUnnamed260(); o.nextPageToken = 'foo'; } buildCounterInstanceGroupManagersListManagedInstancesResponse--; @@ -15119,7 +15189,7 @@ void checkInstanceGroupManagersListManagedInstancesResponse( api.InstanceGroupManagersListManagedInstancesResponse o) { buildCounterInstanceGroupManagersListManagedInstancesResponse++; if (buildCounterInstanceGroupManagersListManagedInstancesResponse < 3) { - checkUnnamed259(o.managedInstances!); + checkUnnamed260(o.managedInstances!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -15128,12 +15198,12 @@ void checkInstanceGroupManagersListManagedInstancesResponse( buildCounterInstanceGroupManagersListManagedInstancesResponse--; } -core.List buildUnnamed260() => [ +core.List buildUnnamed261() => [ buildPerInstanceConfig(), buildPerInstanceConfig(), ]; -void checkUnnamed260(core.List o) { +void checkUnnamed261(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPerInstanceConfig(o[0]); checkPerInstanceConfig(o[1]); @@ -15172,12 +15242,12 @@ void checkInstanceGroupManagersListPerInstanceConfigsRespWarningData( } core.List - buildUnnamed261() => [ + buildUnnamed262() => [ buildInstanceGroupManagersListPerInstanceConfigsRespWarningData(), buildInstanceGroupManagersListPerInstanceConfigsRespWarningData(), ]; -void checkUnnamed261( +void checkUnnamed262( core.List o) { unittest.expect(o, unittest.hasLength(2)); @@ -15192,7 +15262,7 @@ api.InstanceGroupManagersListPerInstanceConfigsRespWarning buildCounterInstanceGroupManagersListPerInstanceConfigsRespWarning++; if (buildCounterInstanceGroupManagersListPerInstanceConfigsRespWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed261(); + o.data = buildUnnamed262(); o.message = 'foo'; } buildCounterInstanceGroupManagersListPerInstanceConfigsRespWarning--; @@ -15207,7 +15277,7 @@ void checkInstanceGroupManagersListPerInstanceConfigsRespWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed261(o.data!); + checkUnnamed262(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -15222,7 +15292,7 @@ api.InstanceGroupManagersListPerInstanceConfigsResp final o = api.InstanceGroupManagersListPerInstanceConfigsResp(); buildCounterInstanceGroupManagersListPerInstanceConfigsResp++; if (buildCounterInstanceGroupManagersListPerInstanceConfigsResp < 3) { - o.items = buildUnnamed260(); + o.items = buildUnnamed261(); o.nextPageToken = 'foo'; o.warning = buildInstanceGroupManagersListPerInstanceConfigsRespWarning(); } @@ -15234,7 +15304,7 @@ void checkInstanceGroupManagersListPerInstanceConfigsResp( api.InstanceGroupManagersListPerInstanceConfigsResp o) { buildCounterInstanceGroupManagersListPerInstanceConfigsResp++; if (buildCounterInstanceGroupManagersListPerInstanceConfigsResp < 3) { - checkUnnamed260(o.items!); + checkUnnamed261(o.items!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -15244,12 +15314,12 @@ void checkInstanceGroupManagersListPerInstanceConfigsResp( buildCounterInstanceGroupManagersListPerInstanceConfigsResp--; } -core.List buildUnnamed262() => [ +core.List buildUnnamed263() => [ buildPerInstanceConfig(), buildPerInstanceConfig(), ]; -void checkUnnamed262(core.List o) { +void checkUnnamed263(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPerInstanceConfig(o[0]); checkPerInstanceConfig(o[1]); @@ -15261,7 +15331,7 @@ api.InstanceGroupManagersPatchPerInstanceConfigsReq final o = api.InstanceGroupManagersPatchPerInstanceConfigsReq(); buildCounterInstanceGroupManagersPatchPerInstanceConfigsReq++; if (buildCounterInstanceGroupManagersPatchPerInstanceConfigsReq < 3) { - o.perInstanceConfigs = buildUnnamed262(); + o.perInstanceConfigs = buildUnnamed263(); } buildCounterInstanceGroupManagersPatchPerInstanceConfigsReq--; return o; @@ -15271,17 +15341,17 @@ void checkInstanceGroupManagersPatchPerInstanceConfigsReq( api.InstanceGroupManagersPatchPerInstanceConfigsReq o) { buildCounterInstanceGroupManagersPatchPerInstanceConfigsReq++; if (buildCounterInstanceGroupManagersPatchPerInstanceConfigsReq < 3) { - checkUnnamed262(o.perInstanceConfigs!); + checkUnnamed263(o.perInstanceConfigs!); } buildCounterInstanceGroupManagersPatchPerInstanceConfigsReq--; } -core.List buildUnnamed263() => [ +core.List buildUnnamed264() => [ 'foo', 'foo', ]; -void checkUnnamed263(core.List o) { +void checkUnnamed264(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -15299,7 +15369,7 @@ api.InstanceGroupManagersRecreateInstancesRequest final o = api.InstanceGroupManagersRecreateInstancesRequest(); buildCounterInstanceGroupManagersRecreateInstancesRequest++; if (buildCounterInstanceGroupManagersRecreateInstancesRequest < 3) { - o.instances = buildUnnamed263(); + o.instances = buildUnnamed264(); } buildCounterInstanceGroupManagersRecreateInstancesRequest--; return o; @@ -15309,17 +15379,55 @@ void checkInstanceGroupManagersRecreateInstancesRequest( api.InstanceGroupManagersRecreateInstancesRequest o) { buildCounterInstanceGroupManagersRecreateInstancesRequest++; if (buildCounterInstanceGroupManagersRecreateInstancesRequest < 3) { - checkUnnamed263(o.instances!); + checkUnnamed264(o.instances!); } buildCounterInstanceGroupManagersRecreateInstancesRequest--; } -core.List buildUnnamed264() => [ +core.List buildUnnamed265() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed265(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterInstanceGroupManagersResumeInstancesRequest = 0; +api.InstanceGroupManagersResumeInstancesRequest + buildInstanceGroupManagersResumeInstancesRequest() { + final o = api.InstanceGroupManagersResumeInstancesRequest(); + buildCounterInstanceGroupManagersResumeInstancesRequest++; + if (buildCounterInstanceGroupManagersResumeInstancesRequest < 3) { + o.instances = buildUnnamed265(); + } + buildCounterInstanceGroupManagersResumeInstancesRequest--; + return o; +} + +void checkInstanceGroupManagersResumeInstancesRequest( + api.InstanceGroupManagersResumeInstancesRequest o) { + buildCounterInstanceGroupManagersResumeInstancesRequest++; + if (buildCounterInstanceGroupManagersResumeInstancesRequest < 3) { + checkUnnamed265(o.instances!); + } + buildCounterInstanceGroupManagersResumeInstancesRequest--; +} + +core.List buildUnnamed266() => [ buildInstanceGroupManager(), buildInstanceGroupManager(), ]; -void checkUnnamed264(core.List o) { +void checkUnnamed266(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceGroupManager(o[0]); checkInstanceGroupManager(o[1]); @@ -15354,12 +15462,12 @@ void checkInstanceGroupManagersScopedListWarningData( buildCounterInstanceGroupManagersScopedListWarningData--; } -core.List buildUnnamed265() => [ +core.List buildUnnamed267() => [ buildInstanceGroupManagersScopedListWarningData(), buildInstanceGroupManagersScopedListWarningData(), ]; -void checkUnnamed265( +void checkUnnamed267( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceGroupManagersScopedListWarningData(o[0]); @@ -15373,7 +15481,7 @@ api.InstanceGroupManagersScopedListWarning buildCounterInstanceGroupManagersScopedListWarning++; if (buildCounterInstanceGroupManagersScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed265(); + o.data = buildUnnamed267(); o.message = 'foo'; } buildCounterInstanceGroupManagersScopedListWarning--; @@ -15388,7 +15496,7 @@ void checkInstanceGroupManagersScopedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed265(o.data!); + checkUnnamed267(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -15402,7 +15510,7 @@ api.InstanceGroupManagersScopedList buildInstanceGroupManagersScopedList() { final o = api.InstanceGroupManagersScopedList(); buildCounterInstanceGroupManagersScopedList++; if (buildCounterInstanceGroupManagersScopedList < 3) { - o.instanceGroupManagers = buildUnnamed264(); + o.instanceGroupManagers = buildUnnamed266(); o.warning = buildInstanceGroupManagersScopedListWarning(); } buildCounterInstanceGroupManagersScopedList--; @@ -15413,7 +15521,7 @@ void checkInstanceGroupManagersScopedList( api.InstanceGroupManagersScopedList o) { buildCounterInstanceGroupManagersScopedList++; if (buildCounterInstanceGroupManagersScopedList < 3) { - checkUnnamed264(o.instanceGroupManagers!); + checkUnnamed266(o.instanceGroupManagers!); checkInstanceGroupManagersScopedListWarning(o.warning!); } buildCounterInstanceGroupManagersScopedList--; @@ -15443,12 +15551,12 @@ void checkInstanceGroupManagersSetInstanceTemplateRequest( buildCounterInstanceGroupManagersSetInstanceTemplateRequest--; } -core.List buildUnnamed266() => [ +core.List buildUnnamed268() => [ 'foo', 'foo', ]; -void checkUnnamed266(core.List o) { +void checkUnnamed268(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -15467,7 +15575,7 @@ api.InstanceGroupManagersSetTargetPoolsRequest buildCounterInstanceGroupManagersSetTargetPoolsRequest++; if (buildCounterInstanceGroupManagersSetTargetPoolsRequest < 3) { o.fingerprint = 'foo'; - o.targetPools = buildUnnamed266(); + o.targetPools = buildUnnamed268(); } buildCounterInstanceGroupManagersSetTargetPoolsRequest--; return o; @@ -15481,17 +15589,135 @@ void checkInstanceGroupManagersSetTargetPoolsRequest( o.fingerprint!, unittest.equals('foo'), ); - checkUnnamed266(o.targetPools!); + checkUnnamed268(o.targetPools!); } buildCounterInstanceGroupManagersSetTargetPoolsRequest--; } -core.List buildUnnamed267() => [ +core.List buildUnnamed269() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed269(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterInstanceGroupManagersStartInstancesRequest = 0; +api.InstanceGroupManagersStartInstancesRequest + buildInstanceGroupManagersStartInstancesRequest() { + final o = api.InstanceGroupManagersStartInstancesRequest(); + buildCounterInstanceGroupManagersStartInstancesRequest++; + if (buildCounterInstanceGroupManagersStartInstancesRequest < 3) { + o.instances = buildUnnamed269(); + } + buildCounterInstanceGroupManagersStartInstancesRequest--; + return o; +} + +void checkInstanceGroupManagersStartInstancesRequest( + api.InstanceGroupManagersStartInstancesRequest o) { + buildCounterInstanceGroupManagersStartInstancesRequest++; + if (buildCounterInstanceGroupManagersStartInstancesRequest < 3) { + checkUnnamed269(o.instances!); + } + buildCounterInstanceGroupManagersStartInstancesRequest--; +} + +core.List buildUnnamed270() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed270(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterInstanceGroupManagersStopInstancesRequest = 0; +api.InstanceGroupManagersStopInstancesRequest + buildInstanceGroupManagersStopInstancesRequest() { + final o = api.InstanceGroupManagersStopInstancesRequest(); + buildCounterInstanceGroupManagersStopInstancesRequest++; + if (buildCounterInstanceGroupManagersStopInstancesRequest < 3) { + o.forceStop = true; + o.instances = buildUnnamed270(); + } + buildCounterInstanceGroupManagersStopInstancesRequest--; + return o; +} + +void checkInstanceGroupManagersStopInstancesRequest( + api.InstanceGroupManagersStopInstancesRequest o) { + buildCounterInstanceGroupManagersStopInstancesRequest++; + if (buildCounterInstanceGroupManagersStopInstancesRequest < 3) { + unittest.expect(o.forceStop!, unittest.isTrue); + checkUnnamed270(o.instances!); + } + buildCounterInstanceGroupManagersStopInstancesRequest--; +} + +core.List buildUnnamed271() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed271(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterInstanceGroupManagersSuspendInstancesRequest = 0; +api.InstanceGroupManagersSuspendInstancesRequest + buildInstanceGroupManagersSuspendInstancesRequest() { + final o = api.InstanceGroupManagersSuspendInstancesRequest(); + buildCounterInstanceGroupManagersSuspendInstancesRequest++; + if (buildCounterInstanceGroupManagersSuspendInstancesRequest < 3) { + o.forceSuspend = true; + o.instances = buildUnnamed271(); + } + buildCounterInstanceGroupManagersSuspendInstancesRequest--; + return o; +} + +void checkInstanceGroupManagersSuspendInstancesRequest( + api.InstanceGroupManagersSuspendInstancesRequest o) { + buildCounterInstanceGroupManagersSuspendInstancesRequest++; + if (buildCounterInstanceGroupManagersSuspendInstancesRequest < 3) { + unittest.expect(o.forceSuspend!, unittest.isTrue); + checkUnnamed271(o.instances!); + } + buildCounterInstanceGroupManagersSuspendInstancesRequest--; +} + +core.List buildUnnamed272() => [ buildPerInstanceConfig(), buildPerInstanceConfig(), ]; -void checkUnnamed267(core.List o) { +void checkUnnamed272(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPerInstanceConfig(o[0]); checkPerInstanceConfig(o[1]); @@ -15503,7 +15729,7 @@ api.InstanceGroupManagersUpdatePerInstanceConfigsReq final o = api.InstanceGroupManagersUpdatePerInstanceConfigsReq(); buildCounterInstanceGroupManagersUpdatePerInstanceConfigsReq++; if (buildCounterInstanceGroupManagersUpdatePerInstanceConfigsReq < 3) { - o.perInstanceConfigs = buildUnnamed267(); + o.perInstanceConfigs = buildUnnamed272(); } buildCounterInstanceGroupManagersUpdatePerInstanceConfigsReq--; return o; @@ -15513,17 +15739,17 @@ void checkInstanceGroupManagersUpdatePerInstanceConfigsReq( api.InstanceGroupManagersUpdatePerInstanceConfigsReq o) { buildCounterInstanceGroupManagersUpdatePerInstanceConfigsReq++; if (buildCounterInstanceGroupManagersUpdatePerInstanceConfigsReq < 3) { - checkUnnamed267(o.perInstanceConfigs!); + checkUnnamed272(o.perInstanceConfigs!); } buildCounterInstanceGroupManagersUpdatePerInstanceConfigsReq--; } -core.List buildUnnamed268() => [ +core.List buildUnnamed273() => [ buildInstanceReference(), buildInstanceReference(), ]; -void checkUnnamed268(core.List o) { +void checkUnnamed273(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceReference(o[0]); checkInstanceReference(o[1]); @@ -15534,7 +15760,7 @@ api.InstanceGroupsAddInstancesRequest buildInstanceGroupsAddInstancesRequest() { final o = api.InstanceGroupsAddInstancesRequest(); buildCounterInstanceGroupsAddInstancesRequest++; if (buildCounterInstanceGroupsAddInstancesRequest < 3) { - o.instances = buildUnnamed268(); + o.instances = buildUnnamed273(); } buildCounterInstanceGroupsAddInstancesRequest--; return o; @@ -15544,17 +15770,17 @@ void checkInstanceGroupsAddInstancesRequest( api.InstanceGroupsAddInstancesRequest o) { buildCounterInstanceGroupsAddInstancesRequest++; if (buildCounterInstanceGroupsAddInstancesRequest < 3) { - checkUnnamed268(o.instances!); + checkUnnamed273(o.instances!); } buildCounterInstanceGroupsAddInstancesRequest--; } -core.List buildUnnamed269() => [ +core.List buildUnnamed274() => [ buildInstanceWithNamedPorts(), buildInstanceWithNamedPorts(), ]; -void checkUnnamed269(core.List o) { +void checkUnnamed274(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceWithNamedPorts(o[0]); checkInstanceWithNamedPorts(o[1]); @@ -15589,12 +15815,12 @@ void checkInstanceGroupsListInstancesWarningData( buildCounterInstanceGroupsListInstancesWarningData--; } -core.List buildUnnamed270() => [ +core.List buildUnnamed275() => [ buildInstanceGroupsListInstancesWarningData(), buildInstanceGroupsListInstancesWarningData(), ]; -void checkUnnamed270(core.List o) { +void checkUnnamed275(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceGroupsListInstancesWarningData(o[0]); checkInstanceGroupsListInstancesWarningData(o[1]); @@ -15607,7 +15833,7 @@ api.InstanceGroupsListInstancesWarning buildCounterInstanceGroupsListInstancesWarning++; if (buildCounterInstanceGroupsListInstancesWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed270(); + o.data = buildUnnamed275(); o.message = 'foo'; } buildCounterInstanceGroupsListInstancesWarning--; @@ -15622,7 +15848,7 @@ void checkInstanceGroupsListInstancesWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed270(o.data!); + checkUnnamed275(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -15637,7 +15863,7 @@ api.InstanceGroupsListInstances buildInstanceGroupsListInstances() { buildCounterInstanceGroupsListInstances++; if (buildCounterInstanceGroupsListInstances < 3) { o.id = 'foo'; - o.items = buildUnnamed269(); + o.items = buildUnnamed274(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -15654,7 +15880,7 @@ void checkInstanceGroupsListInstances(api.InstanceGroupsListInstances o) { o.id!, unittest.equals('foo'), ); - checkUnnamed269(o.items!); + checkUnnamed274(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -15696,12 +15922,12 @@ void checkInstanceGroupsListInstancesRequest( buildCounterInstanceGroupsListInstancesRequest--; } -core.List buildUnnamed271() => [ +core.List buildUnnamed276() => [ buildInstanceReference(), buildInstanceReference(), ]; -void checkUnnamed271(core.List o) { +void checkUnnamed276(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceReference(o[0]); checkInstanceReference(o[1]); @@ -15713,7 +15939,7 @@ api.InstanceGroupsRemoveInstancesRequest final o = api.InstanceGroupsRemoveInstancesRequest(); buildCounterInstanceGroupsRemoveInstancesRequest++; if (buildCounterInstanceGroupsRemoveInstancesRequest < 3) { - o.instances = buildUnnamed271(); + o.instances = buildUnnamed276(); } buildCounterInstanceGroupsRemoveInstancesRequest--; return o; @@ -15723,17 +15949,17 @@ void checkInstanceGroupsRemoveInstancesRequest( api.InstanceGroupsRemoveInstancesRequest o) { buildCounterInstanceGroupsRemoveInstancesRequest++; if (buildCounterInstanceGroupsRemoveInstancesRequest < 3) { - checkUnnamed271(o.instances!); + checkUnnamed276(o.instances!); } buildCounterInstanceGroupsRemoveInstancesRequest--; } -core.List buildUnnamed272() => [ +core.List buildUnnamed277() => [ buildInstanceGroup(), buildInstanceGroup(), ]; -void checkUnnamed272(core.List o) { +void checkUnnamed277(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceGroup(o[0]); checkInstanceGroup(o[1]); @@ -15768,12 +15994,12 @@ void checkInstanceGroupsScopedListWarningData( buildCounterInstanceGroupsScopedListWarningData--; } -core.List buildUnnamed273() => [ +core.List buildUnnamed278() => [ buildInstanceGroupsScopedListWarningData(), buildInstanceGroupsScopedListWarningData(), ]; -void checkUnnamed273(core.List o) { +void checkUnnamed278(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceGroupsScopedListWarningData(o[0]); checkInstanceGroupsScopedListWarningData(o[1]); @@ -15785,7 +16011,7 @@ api.InstanceGroupsScopedListWarning buildInstanceGroupsScopedListWarning() { buildCounterInstanceGroupsScopedListWarning++; if (buildCounterInstanceGroupsScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed273(); + o.data = buildUnnamed278(); o.message = 'foo'; } buildCounterInstanceGroupsScopedListWarning--; @@ -15800,7 +16026,7 @@ void checkInstanceGroupsScopedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed273(o.data!); + checkUnnamed278(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -15814,7 +16040,7 @@ api.InstanceGroupsScopedList buildInstanceGroupsScopedList() { final o = api.InstanceGroupsScopedList(); buildCounterInstanceGroupsScopedList++; if (buildCounterInstanceGroupsScopedList < 3) { - o.instanceGroups = buildUnnamed272(); + o.instanceGroups = buildUnnamed277(); o.warning = buildInstanceGroupsScopedListWarning(); } buildCounterInstanceGroupsScopedList--; @@ -15824,18 +16050,18 @@ api.InstanceGroupsScopedList buildInstanceGroupsScopedList() { void checkInstanceGroupsScopedList(api.InstanceGroupsScopedList o) { buildCounterInstanceGroupsScopedList++; if (buildCounterInstanceGroupsScopedList < 3) { - checkUnnamed272(o.instanceGroups!); + checkUnnamed277(o.instanceGroups!); checkInstanceGroupsScopedListWarning(o.warning!); } buildCounterInstanceGroupsScopedList--; } -core.List buildUnnamed274() => [ +core.List buildUnnamed279() => [ buildNamedPort(), buildNamedPort(), ]; -void checkUnnamed274(core.List o) { +void checkUnnamed279(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNamedPort(o[0]); checkNamedPort(o[1]); @@ -15848,7 +16074,7 @@ api.InstanceGroupsSetNamedPortsRequest buildCounterInstanceGroupsSetNamedPortsRequest++; if (buildCounterInstanceGroupsSetNamedPortsRequest < 3) { o.fingerprint = 'foo'; - o.namedPorts = buildUnnamed274(); + o.namedPorts = buildUnnamed279(); } buildCounterInstanceGroupsSetNamedPortsRequest--; return o; @@ -15862,17 +16088,17 @@ void checkInstanceGroupsSetNamedPortsRequest( o.fingerprint!, unittest.equals('foo'), ); - checkUnnamed274(o.namedPorts!); + checkUnnamed279(o.namedPorts!); } buildCounterInstanceGroupsSetNamedPortsRequest--; } -core.List buildUnnamed275() => [ +core.List buildUnnamed280() => [ buildInstance(), buildInstance(), ]; -void checkUnnamed275(core.List o) { +void checkUnnamed280(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstance(o[0]); checkInstance(o[1]); @@ -15905,12 +16131,12 @@ void checkInstanceListWarningData(api.InstanceListWarningData o) { buildCounterInstanceListWarningData--; } -core.List buildUnnamed276() => [ +core.List buildUnnamed281() => [ buildInstanceListWarningData(), buildInstanceListWarningData(), ]; -void checkUnnamed276(core.List o) { +void checkUnnamed281(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceListWarningData(o[0]); checkInstanceListWarningData(o[1]); @@ -15922,7 +16148,7 @@ api.InstanceListWarning buildInstanceListWarning() { buildCounterInstanceListWarning++; if (buildCounterInstanceListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed276(); + o.data = buildUnnamed281(); o.message = 'foo'; } buildCounterInstanceListWarning--; @@ -15936,7 +16162,7 @@ void checkInstanceListWarning(api.InstanceListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed276(o.data!); + checkUnnamed281(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -15951,7 +16177,7 @@ api.InstanceList buildInstanceList() { buildCounterInstanceList++; if (buildCounterInstanceList < 3) { o.id = 'foo'; - o.items = buildUnnamed275(); + o.items = buildUnnamed280(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -15968,7 +16194,7 @@ void checkInstanceList(api.InstanceList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed275(o.items!); + checkUnnamed280(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -15986,12 +16212,12 @@ void checkInstanceList(api.InstanceList o) { buildCounterInstanceList--; } -core.List buildUnnamed277() => [ +core.List buildUnnamed282() => [ buildReference(), buildReference(), ]; -void checkUnnamed277(core.List o) { +void checkUnnamed282(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkReference(o[0]); checkReference(o[1]); @@ -16025,12 +16251,12 @@ void checkInstanceListReferrersWarningData( buildCounterInstanceListReferrersWarningData--; } -core.List buildUnnamed278() => [ +core.List buildUnnamed283() => [ buildInstanceListReferrersWarningData(), buildInstanceListReferrersWarningData(), ]; -void checkUnnamed278(core.List o) { +void checkUnnamed283(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceListReferrersWarningData(o[0]); checkInstanceListReferrersWarningData(o[1]); @@ -16042,7 +16268,7 @@ api.InstanceListReferrersWarning buildInstanceListReferrersWarning() { buildCounterInstanceListReferrersWarning++; if (buildCounterInstanceListReferrersWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed278(); + o.data = buildUnnamed283(); o.message = 'foo'; } buildCounterInstanceListReferrersWarning--; @@ -16056,7 +16282,7 @@ void checkInstanceListReferrersWarning(api.InstanceListReferrersWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed278(o.data!); + checkUnnamed283(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -16071,7 +16297,7 @@ api.InstanceListReferrers buildInstanceListReferrers() { buildCounterInstanceListReferrers++; if (buildCounterInstanceListReferrers < 3) { o.id = 'foo'; - o.items = buildUnnamed277(); + o.items = buildUnnamed282(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -16088,7 +16314,7 @@ void checkInstanceListReferrers(api.InstanceListReferrers o) { o.id!, unittest.equals('foo'), ); - checkUnnamed277(o.items!); + checkUnnamed282(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -16221,12 +16447,12 @@ void checkInstanceMoveRequest(api.InstanceMoveRequest o) { buildCounterInstanceMoveRequest--; } -core.Map buildUnnamed279() => { +core.Map buildUnnamed284() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed279(core.Map o) { +void checkUnnamed284(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -16243,7 +16469,7 @@ api.InstanceParams buildInstanceParams() { final o = api.InstanceParams(); buildCounterInstanceParams++; if (buildCounterInstanceParams < 3) { - o.resourceManagerTags = buildUnnamed279(); + o.resourceManagerTags = buildUnnamed284(); } buildCounterInstanceParams--; return o; @@ -16252,39 +16478,39 @@ api.InstanceParams buildInstanceParams() { void checkInstanceParams(api.InstanceParams o) { buildCounterInstanceParams++; if (buildCounterInstanceParams < 3) { - checkUnnamed279(o.resourceManagerTags!); + checkUnnamed284(o.resourceManagerTags!); } buildCounterInstanceParams--; } -core.List buildUnnamed280() => [ +core.List buildUnnamed285() => [ buildAttachedDisk(), buildAttachedDisk(), ]; -void checkUnnamed280(core.List o) { +void checkUnnamed285(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAttachedDisk(o[0]); checkAttachedDisk(o[1]); } -core.List buildUnnamed281() => [ +core.List buildUnnamed286() => [ buildAcceleratorConfig(), buildAcceleratorConfig(), ]; -void checkUnnamed281(core.List o) { +void checkUnnamed286(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAcceleratorConfig(o[0]); checkAcceleratorConfig(o[1]); } -core.Map buildUnnamed282() => { +core.Map buildUnnamed287() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed282(core.Map o) { +void checkUnnamed287(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -16296,23 +16522,23 @@ void checkUnnamed282(core.Map o) { ); } -core.List buildUnnamed283() => [ +core.List buildUnnamed288() => [ buildNetworkInterface(), buildNetworkInterface(), ]; -void checkUnnamed283(core.List o) { +void checkUnnamed288(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkInterface(o[0]); checkNetworkInterface(o[1]); } -core.Map buildUnnamed284() => { +core.Map buildUnnamed289() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed284(core.Map o) { +void checkUnnamed289(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -16324,12 +16550,12 @@ void checkUnnamed284(core.Map o) { ); } -core.List buildUnnamed285() => [ +core.List buildUnnamed290() => [ 'foo', 'foo', ]; -void checkUnnamed285(core.List o) { +void checkUnnamed290(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -16341,12 +16567,12 @@ void checkUnnamed285(core.List o) { ); } -core.List buildUnnamed286() => [ +core.List buildUnnamed291() => [ buildServiceAccount(), buildServiceAccount(), ]; -void checkUnnamed286(core.List o) { +void checkUnnamed291(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkServiceAccount(o[0]); checkServiceAccount(o[1]); @@ -16361,21 +16587,21 @@ api.InstanceProperties buildInstanceProperties() { o.canIpForward = true; o.confidentialInstanceConfig = buildConfidentialInstanceConfig(); o.description = 'foo'; - o.disks = buildUnnamed280(); - o.guestAccelerators = buildUnnamed281(); + o.disks = buildUnnamed285(); + o.guestAccelerators = buildUnnamed286(); o.keyRevocationActionType = 'foo'; - o.labels = buildUnnamed282(); + o.labels = buildUnnamed287(); o.machineType = 'foo'; o.metadata = buildMetadata(); o.minCpuPlatform = 'foo'; - o.networkInterfaces = buildUnnamed283(); + o.networkInterfaces = buildUnnamed288(); o.networkPerformanceConfig = buildNetworkPerformanceConfig(); o.privateIpv6GoogleAccess = 'foo'; o.reservationAffinity = buildReservationAffinity(); - o.resourceManagerTags = buildUnnamed284(); - o.resourcePolicies = buildUnnamed285(); + o.resourceManagerTags = buildUnnamed289(); + o.resourcePolicies = buildUnnamed290(); o.scheduling = buildScheduling(); - o.serviceAccounts = buildUnnamed286(); + o.serviceAccounts = buildUnnamed291(); o.shieldedInstanceConfig = buildShieldedInstanceConfig(); o.tags = buildTags(); } @@ -16393,13 +16619,13 @@ void checkInstanceProperties(api.InstanceProperties o) { o.description!, unittest.equals('foo'), ); - checkUnnamed280(o.disks!); - checkUnnamed281(o.guestAccelerators!); + checkUnnamed285(o.disks!); + checkUnnamed286(o.guestAccelerators!); unittest.expect( o.keyRevocationActionType!, unittest.equals('foo'), ); - checkUnnamed282(o.labels!); + checkUnnamed287(o.labels!); unittest.expect( o.machineType!, unittest.equals('foo'), @@ -16409,29 +16635,29 @@ void checkInstanceProperties(api.InstanceProperties o) { o.minCpuPlatform!, unittest.equals('foo'), ); - checkUnnamed283(o.networkInterfaces!); + checkUnnamed288(o.networkInterfaces!); checkNetworkPerformanceConfig(o.networkPerformanceConfig!); unittest.expect( o.privateIpv6GoogleAccess!, unittest.equals('foo'), ); checkReservationAffinity(o.reservationAffinity!); - checkUnnamed284(o.resourceManagerTags!); - checkUnnamed285(o.resourcePolicies!); + checkUnnamed289(o.resourceManagerTags!); + checkUnnamed290(o.resourcePolicies!); checkScheduling(o.scheduling!); - checkUnnamed286(o.serviceAccounts!); + checkUnnamed291(o.serviceAccounts!); checkShieldedInstanceConfig(o.shieldedInstanceConfig!); checkTags(o.tags!); } buildCounterInstanceProperties--; } -core.Map buildUnnamed287() => { +core.Map buildUnnamed292() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed287(core.Map o) { +void checkUnnamed292(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -16443,12 +16669,12 @@ void checkUnnamed287(core.Map o) { ); } -core.Map buildUnnamed288() => { +core.Map buildUnnamed293() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed288(core.Map o) { +void checkUnnamed293(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -16465,8 +16691,8 @@ api.InstancePropertiesPatch buildInstancePropertiesPatch() { final o = api.InstancePropertiesPatch(); buildCounterInstancePropertiesPatch++; if (buildCounterInstancePropertiesPatch < 3) { - o.labels = buildUnnamed287(); - o.metadata = buildUnnamed288(); + o.labels = buildUnnamed292(); + o.metadata = buildUnnamed293(); } buildCounterInstancePropertiesPatch--; return o; @@ -16475,8 +16701,8 @@ api.InstancePropertiesPatch buildInstancePropertiesPatch() { void checkInstancePropertiesPatch(api.InstancePropertiesPatch o) { buildCounterInstancePropertiesPatch++; if (buildCounterInstancePropertiesPatch < 3) { - checkUnnamed287(o.labels!); - checkUnnamed288(o.metadata!); + checkUnnamed292(o.labels!); + checkUnnamed293(o.metadata!); } buildCounterInstancePropertiesPatch--; } @@ -16537,12 +16763,12 @@ void checkInstanceSettings(api.InstanceSettings o) { buildCounterInstanceSettings--; } -core.Map buildUnnamed289() => { +core.Map buildUnnamed294() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed289(core.Map o) { +void checkUnnamed294(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -16559,7 +16785,7 @@ api.InstanceSettingsMetadata buildInstanceSettingsMetadata() { final o = api.InstanceSettingsMetadata(); buildCounterInstanceSettingsMetadata++; if (buildCounterInstanceSettingsMetadata < 3) { - o.items = buildUnnamed289(); + o.items = buildUnnamed294(); o.kind = 'foo'; } buildCounterInstanceSettingsMetadata--; @@ -16569,7 +16795,7 @@ api.InstanceSettingsMetadata buildInstanceSettingsMetadata() { void checkInstanceSettingsMetadata(api.InstanceSettingsMetadata o) { buildCounterInstanceSettingsMetadata++; if (buildCounterInstanceSettingsMetadata < 3) { - checkUnnamed289(o.items!); + checkUnnamed294(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -16639,12 +16865,12 @@ void checkInstanceTemplate(api.InstanceTemplate o) { buildCounterInstanceTemplate--; } -core.Map buildUnnamed290() => { +core.Map buildUnnamed295() => { 'x': buildInstanceTemplatesScopedList(), 'y': buildInstanceTemplatesScopedList(), }; -void checkUnnamed290(core.Map o) { +void checkUnnamed295(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceTemplatesScopedList(o['x']!); checkInstanceTemplatesScopedList(o['y']!); @@ -16679,12 +16905,12 @@ void checkInstanceTemplateAggregatedListWarningData( buildCounterInstanceTemplateAggregatedListWarningData--; } -core.List buildUnnamed291() => [ +core.List buildUnnamed296() => [ buildInstanceTemplateAggregatedListWarningData(), buildInstanceTemplateAggregatedListWarningData(), ]; -void checkUnnamed291( +void checkUnnamed296( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceTemplateAggregatedListWarningData(o[0]); @@ -16698,7 +16924,7 @@ api.InstanceTemplateAggregatedListWarning buildCounterInstanceTemplateAggregatedListWarning++; if (buildCounterInstanceTemplateAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed291(); + o.data = buildUnnamed296(); o.message = 'foo'; } buildCounterInstanceTemplateAggregatedListWarning--; @@ -16713,7 +16939,7 @@ void checkInstanceTemplateAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed291(o.data!); + checkUnnamed296(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -16728,7 +16954,7 @@ api.InstanceTemplateAggregatedList buildInstanceTemplateAggregatedList() { buildCounterInstanceTemplateAggregatedList++; if (buildCounterInstanceTemplateAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed290(); + o.items = buildUnnamed295(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -16745,7 +16971,7 @@ void checkInstanceTemplateAggregatedList(api.InstanceTemplateAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed290(o.items!); + checkUnnamed295(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -16763,12 +16989,12 @@ void checkInstanceTemplateAggregatedList(api.InstanceTemplateAggregatedList o) { buildCounterInstanceTemplateAggregatedList--; } -core.List buildUnnamed292() => [ +core.List buildUnnamed297() => [ buildInstanceTemplate(), buildInstanceTemplate(), ]; -void checkUnnamed292(core.List o) { +void checkUnnamed297(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceTemplate(o[0]); checkInstanceTemplate(o[1]); @@ -16802,12 +17028,12 @@ void checkInstanceTemplateListWarningData( buildCounterInstanceTemplateListWarningData--; } -core.List buildUnnamed293() => [ +core.List buildUnnamed298() => [ buildInstanceTemplateListWarningData(), buildInstanceTemplateListWarningData(), ]; -void checkUnnamed293(core.List o) { +void checkUnnamed298(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceTemplateListWarningData(o[0]); checkInstanceTemplateListWarningData(o[1]); @@ -16819,7 +17045,7 @@ api.InstanceTemplateListWarning buildInstanceTemplateListWarning() { buildCounterInstanceTemplateListWarning++; if (buildCounterInstanceTemplateListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed293(); + o.data = buildUnnamed298(); o.message = 'foo'; } buildCounterInstanceTemplateListWarning--; @@ -16833,7 +17059,7 @@ void checkInstanceTemplateListWarning(api.InstanceTemplateListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed293(o.data!); + checkUnnamed298(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -16848,7 +17074,7 @@ api.InstanceTemplateList buildInstanceTemplateList() { buildCounterInstanceTemplateList++; if (buildCounterInstanceTemplateList < 3) { o.id = 'foo'; - o.items = buildUnnamed292(); + o.items = buildUnnamed297(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -16865,7 +17091,7 @@ void checkInstanceTemplateList(api.InstanceTemplateList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed292(o.items!); + checkUnnamed297(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -16883,12 +17109,12 @@ void checkInstanceTemplateList(api.InstanceTemplateList o) { buildCounterInstanceTemplateList--; } -core.List buildUnnamed294() => [ +core.List buildUnnamed299() => [ buildInstanceTemplate(), buildInstanceTemplate(), ]; -void checkUnnamed294(core.List o) { +void checkUnnamed299(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceTemplate(o[0]); checkInstanceTemplate(o[1]); @@ -16923,12 +17149,12 @@ void checkInstanceTemplatesScopedListWarningData( buildCounterInstanceTemplatesScopedListWarningData--; } -core.List buildUnnamed295() => [ +core.List buildUnnamed300() => [ buildInstanceTemplatesScopedListWarningData(), buildInstanceTemplatesScopedListWarningData(), ]; -void checkUnnamed295(core.List o) { +void checkUnnamed300(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceTemplatesScopedListWarningData(o[0]); checkInstanceTemplatesScopedListWarningData(o[1]); @@ -16941,7 +17167,7 @@ api.InstanceTemplatesScopedListWarning buildCounterInstanceTemplatesScopedListWarning++; if (buildCounterInstanceTemplatesScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed295(); + o.data = buildUnnamed300(); o.message = 'foo'; } buildCounterInstanceTemplatesScopedListWarning--; @@ -16956,7 +17182,7 @@ void checkInstanceTemplatesScopedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed295(o.data!); + checkUnnamed300(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -16970,7 +17196,7 @@ api.InstanceTemplatesScopedList buildInstanceTemplatesScopedList() { final o = api.InstanceTemplatesScopedList(); buildCounterInstanceTemplatesScopedList++; if (buildCounterInstanceTemplatesScopedList < 3) { - o.instanceTemplates = buildUnnamed294(); + o.instanceTemplates = buildUnnamed299(); o.warning = buildInstanceTemplatesScopedListWarning(); } buildCounterInstanceTemplatesScopedList--; @@ -16980,18 +17206,18 @@ api.InstanceTemplatesScopedList buildInstanceTemplatesScopedList() { void checkInstanceTemplatesScopedList(api.InstanceTemplatesScopedList o) { buildCounterInstanceTemplatesScopedList++; if (buildCounterInstanceTemplatesScopedList < 3) { - checkUnnamed294(o.instanceTemplates!); + checkUnnamed299(o.instanceTemplates!); checkInstanceTemplatesScopedListWarning(o.warning!); } buildCounterInstanceTemplatesScopedList--; } -core.List buildUnnamed296() => [ +core.List buildUnnamed301() => [ buildNamedPort(), buildNamedPort(), ]; -void checkUnnamed296(core.List o) { +void checkUnnamed301(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNamedPort(o[0]); checkNamedPort(o[1]); @@ -17003,7 +17229,7 @@ api.InstanceWithNamedPorts buildInstanceWithNamedPorts() { buildCounterInstanceWithNamedPorts++; if (buildCounterInstanceWithNamedPorts < 3) { o.instance = 'foo'; - o.namedPorts = buildUnnamed296(); + o.namedPorts = buildUnnamed301(); o.status = 'foo'; } buildCounterInstanceWithNamedPorts--; @@ -17017,7 +17243,7 @@ void checkInstanceWithNamedPorts(api.InstanceWithNamedPorts o) { o.instance!, unittest.equals('foo'), ); - checkUnnamed296(o.namedPorts!); + checkUnnamed301(o.namedPorts!); unittest.expect( o.status!, unittest.equals('foo'), @@ -17026,12 +17252,12 @@ void checkInstanceWithNamedPorts(api.InstanceWithNamedPorts o) { buildCounterInstanceWithNamedPorts--; } -core.List buildUnnamed297() => [ +core.List buildUnnamed302() => [ 'foo', 'foo', ]; -void checkUnnamed297(core.List o) { +void checkUnnamed302(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -17049,7 +17275,7 @@ api.InstancesAddResourcePoliciesRequest final o = api.InstancesAddResourcePoliciesRequest(); buildCounterInstancesAddResourcePoliciesRequest++; if (buildCounterInstancesAddResourcePoliciesRequest < 3) { - o.resourcePolicies = buildUnnamed297(); + o.resourcePolicies = buildUnnamed302(); } buildCounterInstancesAddResourcePoliciesRequest--; return o; @@ -17059,17 +17285,17 @@ void checkInstancesAddResourcePoliciesRequest( api.InstancesAddResourcePoliciesRequest o) { buildCounterInstancesAddResourcePoliciesRequest++; if (buildCounterInstancesAddResourcePoliciesRequest < 3) { - checkUnnamed297(o.resourcePolicies!); + checkUnnamed302(o.resourcePolicies!); } buildCounterInstancesAddResourcePoliciesRequest--; } -core.Map buildUnnamed298() => { +core.Map buildUnnamed303() => { 'x': buildBulkInsertOperationStatus(), 'y': buildBulkInsertOperationStatus(), }; -void checkUnnamed298(core.Map o) { +void checkUnnamed303(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkBulkInsertOperationStatus(o['x']!); checkBulkInsertOperationStatus(o['y']!); @@ -17081,7 +17307,7 @@ api.InstancesBulkInsertOperationMetadata final o = api.InstancesBulkInsertOperationMetadata(); buildCounterInstancesBulkInsertOperationMetadata++; if (buildCounterInstancesBulkInsertOperationMetadata < 3) { - o.perLocationStatus = buildUnnamed298(); + o.perLocationStatus = buildUnnamed303(); } buildCounterInstancesBulkInsertOperationMetadata--; return o; @@ -17091,18 +17317,18 @@ void checkInstancesBulkInsertOperationMetadata( api.InstancesBulkInsertOperationMetadata o) { buildCounterInstancesBulkInsertOperationMetadata++; if (buildCounterInstancesBulkInsertOperationMetadata < 3) { - checkUnnamed298(o.perLocationStatus!); + checkUnnamed303(o.perLocationStatus!); } buildCounterInstancesBulkInsertOperationMetadata--; } core.List - buildUnnamed299() => [ + buildUnnamed304() => [ buildInstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy(), buildInstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy(), ]; -void checkUnnamed299( +void checkUnnamed304( core.List o) { unittest.expect(o, unittest.hasLength(2)); @@ -17110,12 +17336,12 @@ void checkUnnamed299( checkInstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy(o[1]); } -core.List buildUnnamed300() => [ +core.List buildUnnamed305() => [ buildFirewall(), buildFirewall(), ]; -void checkUnnamed300(core.List o) { +void checkUnnamed305(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFirewall(o[0]); checkFirewall(o[1]); @@ -17127,8 +17353,8 @@ api.InstancesGetEffectiveFirewallsResponse final o = api.InstancesGetEffectiveFirewallsResponse(); buildCounterInstancesGetEffectiveFirewallsResponse++; if (buildCounterInstancesGetEffectiveFirewallsResponse < 3) { - o.firewallPolicys = buildUnnamed299(); - o.firewalls = buildUnnamed300(); + o.firewallPolicys = buildUnnamed304(); + o.firewalls = buildUnnamed305(); } buildCounterInstancesGetEffectiveFirewallsResponse--; return o; @@ -17138,18 +17364,18 @@ void checkInstancesGetEffectiveFirewallsResponse( api.InstancesGetEffectiveFirewallsResponse o) { buildCounterInstancesGetEffectiveFirewallsResponse++; if (buildCounterInstancesGetEffectiveFirewallsResponse < 3) { - checkUnnamed299(o.firewallPolicys!); - checkUnnamed300(o.firewalls!); + checkUnnamed304(o.firewallPolicys!); + checkUnnamed305(o.firewalls!); } buildCounterInstancesGetEffectiveFirewallsResponse--; } -core.List buildUnnamed301() => [ +core.List buildUnnamed306() => [ buildFirewallPolicyRule(), buildFirewallPolicyRule(), ]; -void checkUnnamed301(core.List o) { +void checkUnnamed306(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFirewallPolicyRule(o[0]); checkFirewallPolicyRule(o[1]); @@ -17167,7 +17393,7 @@ api.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy o.displayName = 'foo'; o.name = 'foo'; o.priority = 42; - o.rules = buildUnnamed301(); + o.rules = buildUnnamed306(); o.shortName = 'foo'; o.type = 'foo'; } @@ -17192,7 +17418,7 @@ void checkInstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy( o.priority!, unittest.equals(42), ); - checkUnnamed301(o.rules!); + checkUnnamed306(o.rules!); unittest.expect( o.shortName!, unittest.equals('foo'), @@ -17205,12 +17431,12 @@ void checkInstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy( buildCounterInstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy--; } -core.List buildUnnamed302() => [ +core.List buildUnnamed307() => [ 'foo', 'foo', ]; -void checkUnnamed302(core.List o) { +void checkUnnamed307(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -17228,7 +17454,7 @@ api.InstancesRemoveResourcePoliciesRequest final o = api.InstancesRemoveResourcePoliciesRequest(); buildCounterInstancesRemoveResourcePoliciesRequest++; if (buildCounterInstancesRemoveResourcePoliciesRequest < 3) { - o.resourcePolicies = buildUnnamed302(); + o.resourcePolicies = buildUnnamed307(); } buildCounterInstancesRemoveResourcePoliciesRequest--; return o; @@ -17238,17 +17464,17 @@ void checkInstancesRemoveResourcePoliciesRequest( api.InstancesRemoveResourcePoliciesRequest o) { buildCounterInstancesRemoveResourcePoliciesRequest++; if (buildCounterInstancesRemoveResourcePoliciesRequest < 3) { - checkUnnamed302(o.resourcePolicies!); + checkUnnamed307(o.resourcePolicies!); } buildCounterInstancesRemoveResourcePoliciesRequest--; } -core.List buildUnnamed303() => [ +core.List buildUnnamed308() => [ buildInstance(), buildInstance(), ]; -void checkUnnamed303(core.List o) { +void checkUnnamed308(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstance(o[0]); checkInstance(o[1]); @@ -17281,12 +17507,12 @@ void checkInstancesScopedListWarningData(api.InstancesScopedListWarningData o) { buildCounterInstancesScopedListWarningData--; } -core.List buildUnnamed304() => [ +core.List buildUnnamed309() => [ buildInstancesScopedListWarningData(), buildInstancesScopedListWarningData(), ]; -void checkUnnamed304(core.List o) { +void checkUnnamed309(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstancesScopedListWarningData(o[0]); checkInstancesScopedListWarningData(o[1]); @@ -17298,7 +17524,7 @@ api.InstancesScopedListWarning buildInstancesScopedListWarning() { buildCounterInstancesScopedListWarning++; if (buildCounterInstancesScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed304(); + o.data = buildUnnamed309(); o.message = 'foo'; } buildCounterInstancesScopedListWarning--; @@ -17312,7 +17538,7 @@ void checkInstancesScopedListWarning(api.InstancesScopedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed304(o.data!); + checkUnnamed309(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -17326,7 +17552,7 @@ api.InstancesScopedList buildInstancesScopedList() { final o = api.InstancesScopedList(); buildCounterInstancesScopedList++; if (buildCounterInstancesScopedList < 3) { - o.instances = buildUnnamed303(); + o.instances = buildUnnamed308(); o.warning = buildInstancesScopedListWarning(); } buildCounterInstancesScopedList--; @@ -17336,18 +17562,18 @@ api.InstancesScopedList buildInstancesScopedList() { void checkInstancesScopedList(api.InstancesScopedList o) { buildCounterInstancesScopedList++; if (buildCounterInstancesScopedList < 3) { - checkUnnamed303(o.instances!); + checkUnnamed308(o.instances!); checkInstancesScopedListWarning(o.warning!); } buildCounterInstancesScopedList--; } -core.Map buildUnnamed305() => { +core.Map buildUnnamed310() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed305(core.Map o) { +void checkUnnamed310(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -17365,7 +17591,7 @@ api.InstancesSetLabelsRequest buildInstancesSetLabelsRequest() { buildCounterInstancesSetLabelsRequest++; if (buildCounterInstancesSetLabelsRequest < 3) { o.labelFingerprint = 'foo'; - o.labels = buildUnnamed305(); + o.labels = buildUnnamed310(); } buildCounterInstancesSetLabelsRequest--; return o; @@ -17378,17 +17604,17 @@ void checkInstancesSetLabelsRequest(api.InstancesSetLabelsRequest o) { o.labelFingerprint!, unittest.equals('foo'), ); - checkUnnamed305(o.labels!); + checkUnnamed310(o.labels!); } buildCounterInstancesSetLabelsRequest--; } -core.List buildUnnamed306() => [ +core.List buildUnnamed311() => [ buildAcceleratorConfig(), buildAcceleratorConfig(), ]; -void checkUnnamed306(core.List o) { +void checkUnnamed311(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAcceleratorConfig(o[0]); checkAcceleratorConfig(o[1]); @@ -17400,7 +17626,7 @@ api.InstancesSetMachineResourcesRequest final o = api.InstancesSetMachineResourcesRequest(); buildCounterInstancesSetMachineResourcesRequest++; if (buildCounterInstancesSetMachineResourcesRequest < 3) { - o.guestAccelerators = buildUnnamed306(); + o.guestAccelerators = buildUnnamed311(); } buildCounterInstancesSetMachineResourcesRequest--; return o; @@ -17410,7 +17636,7 @@ void checkInstancesSetMachineResourcesRequest( api.InstancesSetMachineResourcesRequest o) { buildCounterInstancesSetMachineResourcesRequest++; if (buildCounterInstancesSetMachineResourcesRequest < 3) { - checkUnnamed306(o.guestAccelerators!); + checkUnnamed311(o.guestAccelerators!); } buildCounterInstancesSetMachineResourcesRequest--; } @@ -17487,12 +17713,12 @@ void checkInstancesSetNameRequest(api.InstancesSetNameRequest o) { buildCounterInstancesSetNameRequest--; } -core.List buildUnnamed307() => [ +core.List buildUnnamed312() => [ 'foo', 'foo', ]; -void checkUnnamed307(core.List o) { +void checkUnnamed312(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -17509,7 +17735,7 @@ api.InstancesSetSecurityPolicyRequest buildInstancesSetSecurityPolicyRequest() { final o = api.InstancesSetSecurityPolicyRequest(); buildCounterInstancesSetSecurityPolicyRequest++; if (buildCounterInstancesSetSecurityPolicyRequest < 3) { - o.networkInterfaces = buildUnnamed307(); + o.networkInterfaces = buildUnnamed312(); o.securityPolicy = 'foo'; } buildCounterInstancesSetSecurityPolicyRequest--; @@ -17520,7 +17746,7 @@ void checkInstancesSetSecurityPolicyRequest( api.InstancesSetSecurityPolicyRequest o) { buildCounterInstancesSetSecurityPolicyRequest++; if (buildCounterInstancesSetSecurityPolicyRequest < 3) { - checkUnnamed307(o.networkInterfaces!); + checkUnnamed312(o.networkInterfaces!); unittest.expect( o.securityPolicy!, unittest.equals('foo'), @@ -17529,12 +17755,12 @@ void checkInstancesSetSecurityPolicyRequest( buildCounterInstancesSetSecurityPolicyRequest--; } -core.List buildUnnamed308() => [ +core.List buildUnnamed313() => [ 'foo', 'foo', ]; -void checkUnnamed308(core.List o) { +void checkUnnamed313(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -17552,7 +17778,7 @@ api.InstancesSetServiceAccountRequest buildInstancesSetServiceAccountRequest() { buildCounterInstancesSetServiceAccountRequest++; if (buildCounterInstancesSetServiceAccountRequest < 3) { o.email = 'foo'; - o.scopes = buildUnnamed308(); + o.scopes = buildUnnamed313(); } buildCounterInstancesSetServiceAccountRequest--; return o; @@ -17566,17 +17792,17 @@ void checkInstancesSetServiceAccountRequest( o.email!, unittest.equals('foo'), ); - checkUnnamed308(o.scopes!); + checkUnnamed313(o.scopes!); } buildCounterInstancesSetServiceAccountRequest--; } -core.List buildUnnamed309() => [ +core.List buildUnnamed314() => [ buildCustomerEncryptionKeyProtectedDisk(), buildCustomerEncryptionKeyProtectedDisk(), ]; -void checkUnnamed309(core.List o) { +void checkUnnamed314(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkCustomerEncryptionKeyProtectedDisk(o[0]); checkCustomerEncryptionKeyProtectedDisk(o[1]); @@ -17588,7 +17814,7 @@ api.InstancesStartWithEncryptionKeyRequest final o = api.InstancesStartWithEncryptionKeyRequest(); buildCounterInstancesStartWithEncryptionKeyRequest++; if (buildCounterInstancesStartWithEncryptionKeyRequest < 3) { - o.disks = buildUnnamed309(); + o.disks = buildUnnamed314(); } buildCounterInstancesStartWithEncryptionKeyRequest--; return o; @@ -17598,17 +17824,17 @@ void checkInstancesStartWithEncryptionKeyRequest( api.InstancesStartWithEncryptionKeyRequest o) { buildCounterInstancesStartWithEncryptionKeyRequest++; if (buildCounterInstancesStartWithEncryptionKeyRequest < 3) { - checkUnnamed309(o.disks!); + checkUnnamed314(o.disks!); } buildCounterInstancesStartWithEncryptionKeyRequest--; } -core.Map buildUnnamed310() => { +core.Map buildUnnamed315() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed310(core.Map o) { +void checkUnnamed315(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -17632,7 +17858,7 @@ api.InstantSnapshot buildInstantSnapshot() { o.id = 'foo'; o.kind = 'foo'; o.labelFingerprint = 'foo'; - o.labels = buildUnnamed310(); + o.labels = buildUnnamed315(); o.name = 'foo'; o.region = 'foo'; o.resourceStatus = buildInstantSnapshotResourceStatus(); @@ -17680,7 +17906,7 @@ void checkInstantSnapshot(api.InstantSnapshot o) { o.labelFingerprint!, unittest.equals('foo'), ); - checkUnnamed310(o.labels!); + checkUnnamed315(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -17720,23 +17946,23 @@ void checkInstantSnapshot(api.InstantSnapshot o) { buildCounterInstantSnapshot--; } -core.Map buildUnnamed311() => { +core.Map buildUnnamed316() => { 'x': buildInstantSnapshotsScopedList(), 'y': buildInstantSnapshotsScopedList(), }; -void checkUnnamed311(core.Map o) { +void checkUnnamed316(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkInstantSnapshotsScopedList(o['x']!); checkInstantSnapshotsScopedList(o['y']!); } -core.List buildUnnamed312() => [ +core.List buildUnnamed317() => [ 'foo', 'foo', ]; -void checkUnnamed312(core.List o) { +void checkUnnamed317(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -17777,12 +18003,12 @@ void checkInstantSnapshotAggregatedListWarningData( buildCounterInstantSnapshotAggregatedListWarningData--; } -core.List buildUnnamed313() => [ +core.List buildUnnamed318() => [ buildInstantSnapshotAggregatedListWarningData(), buildInstantSnapshotAggregatedListWarningData(), ]; -void checkUnnamed313( +void checkUnnamed318( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstantSnapshotAggregatedListWarningData(o[0]); @@ -17796,7 +18022,7 @@ api.InstantSnapshotAggregatedListWarning buildCounterInstantSnapshotAggregatedListWarning++; if (buildCounterInstantSnapshotAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed313(); + o.data = buildUnnamed318(); o.message = 'foo'; } buildCounterInstantSnapshotAggregatedListWarning--; @@ -17811,7 +18037,7 @@ void checkInstantSnapshotAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed313(o.data!); + checkUnnamed318(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -17826,11 +18052,11 @@ api.InstantSnapshotAggregatedList buildInstantSnapshotAggregatedList() { buildCounterInstantSnapshotAggregatedList++; if (buildCounterInstantSnapshotAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed311(); + o.items = buildUnnamed316(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed312(); + o.unreachables = buildUnnamed317(); o.warning = buildInstantSnapshotAggregatedListWarning(); } buildCounterInstantSnapshotAggregatedList--; @@ -17844,7 +18070,7 @@ void checkInstantSnapshotAggregatedList(api.InstantSnapshotAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed311(o.items!); + checkUnnamed316(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -17857,18 +18083,18 @@ void checkInstantSnapshotAggregatedList(api.InstantSnapshotAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed312(o.unreachables!); + checkUnnamed317(o.unreachables!); checkInstantSnapshotAggregatedListWarning(o.warning!); } buildCounterInstantSnapshotAggregatedList--; } -core.List buildUnnamed314() => [ +core.List buildUnnamed319() => [ buildInstantSnapshot(), buildInstantSnapshot(), ]; -void checkUnnamed314(core.List o) { +void checkUnnamed319(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstantSnapshot(o[0]); checkInstantSnapshot(o[1]); @@ -17901,12 +18127,12 @@ void checkInstantSnapshotListWarningData(api.InstantSnapshotListWarningData o) { buildCounterInstantSnapshotListWarningData--; } -core.List buildUnnamed315() => [ +core.List buildUnnamed320() => [ buildInstantSnapshotListWarningData(), buildInstantSnapshotListWarningData(), ]; -void checkUnnamed315(core.List o) { +void checkUnnamed320(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstantSnapshotListWarningData(o[0]); checkInstantSnapshotListWarningData(o[1]); @@ -17918,7 +18144,7 @@ api.InstantSnapshotListWarning buildInstantSnapshotListWarning() { buildCounterInstantSnapshotListWarning++; if (buildCounterInstantSnapshotListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed315(); + o.data = buildUnnamed320(); o.message = 'foo'; } buildCounterInstantSnapshotListWarning--; @@ -17932,7 +18158,7 @@ void checkInstantSnapshotListWarning(api.InstantSnapshotListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed315(o.data!); + checkUnnamed320(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -17947,7 +18173,7 @@ api.InstantSnapshotList buildInstantSnapshotList() { buildCounterInstantSnapshotList++; if (buildCounterInstantSnapshotList < 3) { o.id = 'foo'; - o.items = buildUnnamed314(); + o.items = buildUnnamed319(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -17964,7 +18190,7 @@ void checkInstantSnapshotList(api.InstantSnapshotList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed314(o.items!); + checkUnnamed319(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -18004,12 +18230,12 @@ void checkInstantSnapshotResourceStatus(api.InstantSnapshotResourceStatus o) { buildCounterInstantSnapshotResourceStatus--; } -core.List buildUnnamed316() => [ +core.List buildUnnamed321() => [ buildInstantSnapshot(), buildInstantSnapshot(), ]; -void checkUnnamed316(core.List o) { +void checkUnnamed321(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstantSnapshot(o[0]); checkInstantSnapshot(o[1]); @@ -18044,12 +18270,12 @@ void checkInstantSnapshotsScopedListWarningData( buildCounterInstantSnapshotsScopedListWarningData--; } -core.List buildUnnamed317() => [ +core.List buildUnnamed322() => [ buildInstantSnapshotsScopedListWarningData(), buildInstantSnapshotsScopedListWarningData(), ]; -void checkUnnamed317(core.List o) { +void checkUnnamed322(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstantSnapshotsScopedListWarningData(o[0]); checkInstantSnapshotsScopedListWarningData(o[1]); @@ -18061,7 +18287,7 @@ api.InstantSnapshotsScopedListWarning buildInstantSnapshotsScopedListWarning() { buildCounterInstantSnapshotsScopedListWarning++; if (buildCounterInstantSnapshotsScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed317(); + o.data = buildUnnamed322(); o.message = 'foo'; } buildCounterInstantSnapshotsScopedListWarning--; @@ -18076,7 +18302,7 @@ void checkInstantSnapshotsScopedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed317(o.data!); + checkUnnamed322(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -18090,7 +18316,7 @@ api.InstantSnapshotsScopedList buildInstantSnapshotsScopedList() { final o = api.InstantSnapshotsScopedList(); buildCounterInstantSnapshotsScopedList++; if (buildCounterInstantSnapshotsScopedList < 3) { - o.instantSnapshots = buildUnnamed316(); + o.instantSnapshots = buildUnnamed321(); o.warning = buildInstantSnapshotsScopedListWarning(); } buildCounterInstantSnapshotsScopedList--; @@ -18100,7 +18326,7 @@ api.InstantSnapshotsScopedList buildInstantSnapshotsScopedList() { void checkInstantSnapshotsScopedList(api.InstantSnapshotsScopedList o) { buildCounterInstantSnapshotsScopedList++; if (buildCounterInstantSnapshotsScopedList < 3) { - checkUnnamed316(o.instantSnapshots!); + checkUnnamed321(o.instantSnapshots!); checkInstantSnapshotsScopedListWarning(o.warning!); } buildCounterInstantSnapshotsScopedList--; @@ -18133,12 +18359,12 @@ void checkInt64RangeMatch(api.Int64RangeMatch o) { buildCounterInt64RangeMatch--; } -core.List buildUnnamed318() => [ +core.List buildUnnamed323() => [ 'foo', 'foo', ]; -void checkUnnamed318(core.List o) { +void checkUnnamed323(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -18150,34 +18376,34 @@ void checkUnnamed318(core.List o) { ); } -core.List buildUnnamed319() => [ +core.List buildUnnamed324() => [ buildInterconnectCircuitInfo(), buildInterconnectCircuitInfo(), ]; -void checkUnnamed319(core.List o) { +void checkUnnamed324(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInterconnectCircuitInfo(o[0]); checkInterconnectCircuitInfo(o[1]); } -core.List buildUnnamed320() => [ +core.List buildUnnamed325() => [ buildInterconnectOutageNotification(), buildInterconnectOutageNotification(), ]; -void checkUnnamed320(core.List o) { +void checkUnnamed325(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInterconnectOutageNotification(o[0]); checkInterconnectOutageNotification(o[1]); } -core.List buildUnnamed321() => [ +core.List buildUnnamed326() => [ 'foo', 'foo', ]; -void checkUnnamed321(core.List o) { +void checkUnnamed326(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -18189,12 +18415,12 @@ void checkUnnamed321(core.List o) { ); } -core.Map buildUnnamed322() => { +core.Map buildUnnamed327() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed322(core.Map o) { +void checkUnnamed327(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -18206,12 +18432,12 @@ void checkUnnamed322(core.Map o) { ); } -core.List buildUnnamed323() => [ +core.List buildUnnamed328() => [ 'foo', 'foo', ]; -void checkUnnamed323(core.List o) { +void checkUnnamed328(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -18229,20 +18455,20 @@ api.Interconnect buildInterconnect() { buildCounterInterconnect++; if (buildCounterInterconnect < 3) { o.adminEnabled = true; - o.availableFeatures = buildUnnamed318(); - o.circuitInfos = buildUnnamed319(); + o.availableFeatures = buildUnnamed323(); + o.circuitInfos = buildUnnamed324(); o.creationTimestamp = 'foo'; o.customerName = 'foo'; o.description = 'foo'; - o.expectedOutages = buildUnnamed320(); + o.expectedOutages = buildUnnamed325(); o.googleIpAddress = 'foo'; o.googleReferenceId = 'foo'; o.id = 'foo'; - o.interconnectAttachments = buildUnnamed321(); + o.interconnectAttachments = buildUnnamed326(); o.interconnectType = 'foo'; o.kind = 'foo'; o.labelFingerprint = 'foo'; - o.labels = buildUnnamed322(); + o.labels = buildUnnamed327(); o.linkType = 'foo'; o.location = 'foo'; o.macsec = buildInterconnectMacsec(); @@ -18253,7 +18479,7 @@ api.Interconnect buildInterconnect() { o.peerIpAddress = 'foo'; o.provisionedLinkCount = 42; o.remoteLocation = 'foo'; - o.requestedFeatures = buildUnnamed323(); + o.requestedFeatures = buildUnnamed328(); o.requestedLinkCount = 42; o.satisfiesPzs = true; o.selfLink = 'foo'; @@ -18267,8 +18493,8 @@ void checkInterconnect(api.Interconnect o) { buildCounterInterconnect++; if (buildCounterInterconnect < 3) { unittest.expect(o.adminEnabled!, unittest.isTrue); - checkUnnamed318(o.availableFeatures!); - checkUnnamed319(o.circuitInfos!); + checkUnnamed323(o.availableFeatures!); + checkUnnamed324(o.circuitInfos!); unittest.expect( o.creationTimestamp!, unittest.equals('foo'), @@ -18281,7 +18507,7 @@ void checkInterconnect(api.Interconnect o) { o.description!, unittest.equals('foo'), ); - checkUnnamed320(o.expectedOutages!); + checkUnnamed325(o.expectedOutages!); unittest.expect( o.googleIpAddress!, unittest.equals('foo'), @@ -18294,7 +18520,7 @@ void checkInterconnect(api.Interconnect o) { o.id!, unittest.equals('foo'), ); - checkUnnamed321(o.interconnectAttachments!); + checkUnnamed326(o.interconnectAttachments!); unittest.expect( o.interconnectType!, unittest.equals('foo'), @@ -18307,7 +18533,7 @@ void checkInterconnect(api.Interconnect o) { o.labelFingerprint!, unittest.equals('foo'), ); - checkUnnamed322(o.labels!); + checkUnnamed327(o.labels!); unittest.expect( o.linkType!, unittest.equals('foo'), @@ -18342,7 +18568,7 @@ void checkInterconnect(api.Interconnect o) { o.remoteLocation!, unittest.equals('foo'), ); - checkUnnamed323(o.requestedFeatures!); + checkUnnamed328(o.requestedFeatures!); unittest.expect( o.requestedLinkCount!, unittest.equals(42), @@ -18360,12 +18586,12 @@ void checkInterconnect(api.Interconnect o) { buildCounterInterconnect--; } -core.List buildUnnamed324() => [ +core.List buildUnnamed329() => [ 'foo', 'foo', ]; -void checkUnnamed324(core.List o) { +void checkUnnamed329(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -18377,12 +18603,12 @@ void checkUnnamed324(core.List o) { ); } -core.List buildUnnamed325() => [ +core.List buildUnnamed330() => [ 'foo', 'foo', ]; -void checkUnnamed325(core.List o) { +void checkUnnamed330(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -18394,12 +18620,12 @@ void checkUnnamed325(core.List o) { ); } -core.List buildUnnamed326() => [ +core.List buildUnnamed331() => [ 'foo', 'foo', ]; -void checkUnnamed326(core.List o) { +void checkUnnamed331(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -18411,12 +18637,12 @@ void checkUnnamed326(core.List o) { ); } -core.Map buildUnnamed327() => { +core.Map buildUnnamed332() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed327(core.Map o) { +void checkUnnamed332(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -18435,8 +18661,8 @@ api.InterconnectAttachment buildInterconnectAttachment() { if (buildCounterInterconnectAttachment < 3) { o.adminEnabled = true; o.bandwidth = 'foo'; - o.candidateIpv6Subnets = buildUnnamed324(); - o.candidateSubnets = buildUnnamed325(); + o.candidateIpv6Subnets = buildUnnamed329(); + o.candidateSubnets = buildUnnamed330(); o.cloudRouterIpAddress = 'foo'; o.cloudRouterIpv6Address = 'foo'; o.cloudRouterIpv6InterfaceId = 'foo'; @@ -18453,10 +18679,10 @@ api.InterconnectAttachment buildInterconnectAttachment() { o.googleReferenceId = 'foo'; o.id = 'foo'; o.interconnect = 'foo'; - o.ipsecInternalAddresses = buildUnnamed326(); + o.ipsecInternalAddresses = buildUnnamed331(); o.kind = 'foo'; o.labelFingerprint = 'foo'; - o.labels = buildUnnamed327(); + o.labels = buildUnnamed332(); o.mtu = 42; o.name = 'foo'; o.operationalStatus = 'foo'; @@ -18487,8 +18713,8 @@ void checkInterconnectAttachment(api.InterconnectAttachment o) { o.bandwidth!, unittest.equals('foo'), ); - checkUnnamed324(o.candidateIpv6Subnets!); - checkUnnamed325(o.candidateSubnets!); + checkUnnamed329(o.candidateIpv6Subnets!); + checkUnnamed330(o.candidateSubnets!); unittest.expect( o.cloudRouterIpAddress!, unittest.equals('foo'), @@ -18547,7 +18773,7 @@ void checkInterconnectAttachment(api.InterconnectAttachment o) { o.interconnect!, unittest.equals('foo'), ); - checkUnnamed326(o.ipsecInternalAddresses!); + checkUnnamed331(o.ipsecInternalAddresses!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -18556,7 +18782,7 @@ void checkInterconnectAttachment(api.InterconnectAttachment o) { o.labelFingerprint!, unittest.equals('foo'), ); - checkUnnamed327(o.labels!); + checkUnnamed332(o.labels!); unittest.expect( o.mtu!, unittest.equals(42), @@ -18621,24 +18847,24 @@ void checkInterconnectAttachment(api.InterconnectAttachment o) { } core.Map - buildUnnamed328() => { + buildUnnamed333() => { 'x': buildInterconnectAttachmentsScopedList(), 'y': buildInterconnectAttachmentsScopedList(), }; -void checkUnnamed328( +void checkUnnamed333( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkInterconnectAttachmentsScopedList(o['x']!); checkInterconnectAttachmentsScopedList(o['y']!); } -core.List buildUnnamed329() => [ +core.List buildUnnamed334() => [ 'foo', 'foo', ]; -void checkUnnamed329(core.List o) { +void checkUnnamed334(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -18680,12 +18906,12 @@ void checkInterconnectAttachmentAggregatedListWarningData( } core.List - buildUnnamed330() => [ + buildUnnamed335() => [ buildInterconnectAttachmentAggregatedListWarningData(), buildInterconnectAttachmentAggregatedListWarningData(), ]; -void checkUnnamed330( +void checkUnnamed335( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInterconnectAttachmentAggregatedListWarningData(o[0]); @@ -18699,7 +18925,7 @@ api.InterconnectAttachmentAggregatedListWarning buildCounterInterconnectAttachmentAggregatedListWarning++; if (buildCounterInterconnectAttachmentAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed330(); + o.data = buildUnnamed335(); o.message = 'foo'; } buildCounterInterconnectAttachmentAggregatedListWarning--; @@ -18714,7 +18940,7 @@ void checkInterconnectAttachmentAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed330(o.data!); + checkUnnamed335(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -18730,11 +18956,11 @@ api.InterconnectAttachmentAggregatedList buildCounterInterconnectAttachmentAggregatedList++; if (buildCounterInterconnectAttachmentAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed328(); + o.items = buildUnnamed333(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed329(); + o.unreachables = buildUnnamed334(); o.warning = buildInterconnectAttachmentAggregatedListWarning(); } buildCounterInterconnectAttachmentAggregatedList--; @@ -18749,7 +18975,7 @@ void checkInterconnectAttachmentAggregatedList( o.id!, unittest.equals('foo'), ); - checkUnnamed328(o.items!); + checkUnnamed333(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -18762,19 +18988,19 @@ void checkInterconnectAttachmentAggregatedList( o.selfLink!, unittest.equals('foo'), ); - checkUnnamed329(o.unreachables!); + checkUnnamed334(o.unreachables!); checkInterconnectAttachmentAggregatedListWarning(o.warning!); } buildCounterInterconnectAttachmentAggregatedList--; } core.List - buildUnnamed331() => [ + buildUnnamed336() => [ buildInterconnectAttachmentConfigurationConstraintsBgpPeerASNRange(), buildInterconnectAttachmentConfigurationConstraintsBgpPeerASNRange(), ]; -void checkUnnamed331( +void checkUnnamed336( core.List o) { unittest.expect(o, unittest.hasLength(2)); @@ -18789,7 +19015,7 @@ api.InterconnectAttachmentConfigurationConstraints buildCounterInterconnectAttachmentConfigurationConstraints++; if (buildCounterInterconnectAttachmentConfigurationConstraints < 3) { o.bgpMd5 = 'foo'; - o.bgpPeerAsnRanges = buildUnnamed331(); + o.bgpPeerAsnRanges = buildUnnamed336(); } buildCounterInterconnectAttachmentConfigurationConstraints--; return o; @@ -18803,7 +19029,7 @@ void checkInterconnectAttachmentConfigurationConstraints( o.bgpMd5!, unittest.equals('foo'), ); - checkUnnamed331(o.bgpPeerAsnRanges!); + checkUnnamed336(o.bgpPeerAsnRanges!); } buildCounterInterconnectAttachmentConfigurationConstraints--; } @@ -18841,12 +19067,12 @@ void checkInterconnectAttachmentConfigurationConstraintsBgpPeerASNRange( buildCounterInterconnectAttachmentConfigurationConstraintsBgpPeerASNRange--; } -core.List buildUnnamed332() => [ +core.List buildUnnamed337() => [ buildInterconnectAttachment(), buildInterconnectAttachment(), ]; -void checkUnnamed332(core.List o) { +void checkUnnamed337(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInterconnectAttachment(o[0]); checkInterconnectAttachment(o[1]); @@ -18881,12 +19107,12 @@ void checkInterconnectAttachmentListWarningData( buildCounterInterconnectAttachmentListWarningData--; } -core.List buildUnnamed333() => [ +core.List buildUnnamed338() => [ buildInterconnectAttachmentListWarningData(), buildInterconnectAttachmentListWarningData(), ]; -void checkUnnamed333(core.List o) { +void checkUnnamed338(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInterconnectAttachmentListWarningData(o[0]); checkInterconnectAttachmentListWarningData(o[1]); @@ -18898,7 +19124,7 @@ api.InterconnectAttachmentListWarning buildInterconnectAttachmentListWarning() { buildCounterInterconnectAttachmentListWarning++; if (buildCounterInterconnectAttachmentListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed333(); + o.data = buildUnnamed338(); o.message = 'foo'; } buildCounterInterconnectAttachmentListWarning--; @@ -18913,7 +19139,7 @@ void checkInterconnectAttachmentListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed333(o.data!); + checkUnnamed338(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -18928,7 +19154,7 @@ api.InterconnectAttachmentList buildInterconnectAttachmentList() { buildCounterInterconnectAttachmentList++; if (buildCounterInterconnectAttachmentList < 3) { o.id = 'foo'; - o.items = buildUnnamed332(); + o.items = buildUnnamed337(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -18945,7 +19171,7 @@ void checkInterconnectAttachmentList(api.InterconnectAttachmentList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed332(o.items!); + checkUnnamed337(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -19020,12 +19246,12 @@ void checkInterconnectAttachmentPrivateInfo( buildCounterInterconnectAttachmentPrivateInfo--; } -core.List buildUnnamed334() => [ +core.List buildUnnamed339() => [ buildInterconnectAttachment(), buildInterconnectAttachment(), ]; -void checkUnnamed334(core.List o) { +void checkUnnamed339(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInterconnectAttachment(o[0]); checkInterconnectAttachment(o[1]); @@ -19060,13 +19286,13 @@ void checkInterconnectAttachmentsScopedListWarningData( buildCounterInterconnectAttachmentsScopedListWarningData--; } -core.List buildUnnamed335() => +core.List buildUnnamed340() => [ buildInterconnectAttachmentsScopedListWarningData(), buildInterconnectAttachmentsScopedListWarningData(), ]; -void checkUnnamed335( +void checkUnnamed340( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInterconnectAttachmentsScopedListWarningData(o[0]); @@ -19080,7 +19306,7 @@ api.InterconnectAttachmentsScopedListWarning buildCounterInterconnectAttachmentsScopedListWarning++; if (buildCounterInterconnectAttachmentsScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed335(); + o.data = buildUnnamed340(); o.message = 'foo'; } buildCounterInterconnectAttachmentsScopedListWarning--; @@ -19095,7 +19321,7 @@ void checkInterconnectAttachmentsScopedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed335(o.data!); + checkUnnamed340(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -19109,7 +19335,7 @@ api.InterconnectAttachmentsScopedList buildInterconnectAttachmentsScopedList() { final o = api.InterconnectAttachmentsScopedList(); buildCounterInterconnectAttachmentsScopedList++; if (buildCounterInterconnectAttachmentsScopedList < 3) { - o.interconnectAttachments = buildUnnamed334(); + o.interconnectAttachments = buildUnnamed339(); o.warning = buildInterconnectAttachmentsScopedListWarning(); } buildCounterInterconnectAttachmentsScopedList--; @@ -19120,7 +19346,7 @@ void checkInterconnectAttachmentsScopedList( api.InterconnectAttachmentsScopedList o) { buildCounterInterconnectAttachmentsScopedList++; if (buildCounterInterconnectAttachmentsScopedList < 3) { - checkUnnamed334(o.interconnectAttachments!); + checkUnnamed339(o.interconnectAttachments!); checkInterconnectAttachmentsScopedListWarning(o.warning!); } buildCounterInterconnectAttachmentsScopedList--; @@ -19158,23 +19384,23 @@ void checkInterconnectCircuitInfo(api.InterconnectCircuitInfo o) { buildCounterInterconnectCircuitInfo--; } -core.List buildUnnamed336() => [ +core.List buildUnnamed341() => [ buildInterconnectDiagnosticsARPEntry(), buildInterconnectDiagnosticsARPEntry(), ]; -void checkUnnamed336(core.List o) { +void checkUnnamed341(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInterconnectDiagnosticsARPEntry(o[0]); checkInterconnectDiagnosticsARPEntry(o[1]); } -core.List buildUnnamed337() => [ +core.List buildUnnamed342() => [ buildInterconnectDiagnosticsLinkStatus(), buildInterconnectDiagnosticsLinkStatus(), ]; -void checkUnnamed337(core.List o) { +void checkUnnamed342(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInterconnectDiagnosticsLinkStatus(o[0]); checkInterconnectDiagnosticsLinkStatus(o[1]); @@ -19185,10 +19411,10 @@ api.InterconnectDiagnostics buildInterconnectDiagnostics() { final o = api.InterconnectDiagnostics(); buildCounterInterconnectDiagnostics++; if (buildCounterInterconnectDiagnostics < 3) { - o.arpCaches = buildUnnamed336(); + o.arpCaches = buildUnnamed341(); o.bundleAggregationType = 'foo'; o.bundleOperationalStatus = 'foo'; - o.links = buildUnnamed337(); + o.links = buildUnnamed342(); o.macAddress = 'foo'; } buildCounterInterconnectDiagnostics--; @@ -19198,7 +19424,7 @@ api.InterconnectDiagnostics buildInterconnectDiagnostics() { void checkInterconnectDiagnostics(api.InterconnectDiagnostics o) { buildCounterInterconnectDiagnostics++; if (buildCounterInterconnectDiagnostics < 3) { - checkUnnamed336(o.arpCaches!); + checkUnnamed341(o.arpCaches!); unittest.expect( o.bundleAggregationType!, unittest.equals('foo'), @@ -19207,7 +19433,7 @@ void checkInterconnectDiagnostics(api.InterconnectDiagnostics o) { o.bundleOperationalStatus!, unittest.equals('foo'), ); - checkUnnamed337(o.links!); + checkUnnamed342(o.links!); unittest.expect( o.macAddress!, unittest.equals('foo'), @@ -19307,12 +19533,12 @@ void checkInterconnectDiagnosticsLinkOpticalPower( buildCounterInterconnectDiagnosticsLinkOpticalPower--; } -core.List buildUnnamed338() => [ +core.List buildUnnamed343() => [ buildInterconnectDiagnosticsARPEntry(), buildInterconnectDiagnosticsARPEntry(), ]; -void checkUnnamed338(core.List o) { +void checkUnnamed343(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInterconnectDiagnosticsARPEntry(o[0]); checkInterconnectDiagnosticsARPEntry(o[1]); @@ -19323,7 +19549,7 @@ api.InterconnectDiagnosticsLinkStatus buildInterconnectDiagnosticsLinkStatus() { final o = api.InterconnectDiagnosticsLinkStatus(); buildCounterInterconnectDiagnosticsLinkStatus++; if (buildCounterInterconnectDiagnosticsLinkStatus < 3) { - o.arpCaches = buildUnnamed338(); + o.arpCaches = buildUnnamed343(); o.circuitId = 'foo'; o.googleDemarc = 'foo'; o.lacpStatus = buildInterconnectDiagnosticsLinkLACPStatus(); @@ -19340,7 +19566,7 @@ void checkInterconnectDiagnosticsLinkStatus( api.InterconnectDiagnosticsLinkStatus o) { buildCounterInterconnectDiagnosticsLinkStatus++; if (buildCounterInterconnectDiagnosticsLinkStatus < 3) { - checkUnnamed338(o.arpCaches!); + checkUnnamed343(o.arpCaches!); unittest.expect( o.circuitId!, unittest.equals('foo'), @@ -19387,12 +19613,12 @@ void checkInterconnectDiagnosticsMacsecStatus( buildCounterInterconnectDiagnosticsMacsecStatus--; } -core.List buildUnnamed339() => [ +core.List buildUnnamed344() => [ buildInterconnect(), buildInterconnect(), ]; -void checkUnnamed339(core.List o) { +void checkUnnamed344(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInterconnect(o[0]); checkInterconnect(o[1]); @@ -19425,12 +19651,12 @@ void checkInterconnectListWarningData(api.InterconnectListWarningData o) { buildCounterInterconnectListWarningData--; } -core.List buildUnnamed340() => [ +core.List buildUnnamed345() => [ buildInterconnectListWarningData(), buildInterconnectListWarningData(), ]; -void checkUnnamed340(core.List o) { +void checkUnnamed345(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInterconnectListWarningData(o[0]); checkInterconnectListWarningData(o[1]); @@ -19442,7 +19668,7 @@ api.InterconnectListWarning buildInterconnectListWarning() { buildCounterInterconnectListWarning++; if (buildCounterInterconnectListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed340(); + o.data = buildUnnamed345(); o.message = 'foo'; } buildCounterInterconnectListWarning--; @@ -19456,7 +19682,7 @@ void checkInterconnectListWarning(api.InterconnectListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed340(o.data!); + checkUnnamed345(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -19471,7 +19697,7 @@ api.InterconnectList buildInterconnectList() { buildCounterInterconnectList++; if (buildCounterInterconnectList < 3) { o.id = 'foo'; - o.items = buildUnnamed339(); + o.items = buildUnnamed344(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -19488,7 +19714,7 @@ void checkInterconnectList(api.InterconnectList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed339(o.items!); + checkUnnamed344(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -19506,12 +19732,12 @@ void checkInterconnectList(api.InterconnectList o) { buildCounterInterconnectList--; } -core.List buildUnnamed341() => [ +core.List buildUnnamed346() => [ 'foo', 'foo', ]; -void checkUnnamed341(core.List o) { +void checkUnnamed346(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -19523,12 +19749,12 @@ void checkUnnamed341(core.List o) { ); } -core.List buildUnnamed342() => [ +core.List buildUnnamed347() => [ 'foo', 'foo', ]; -void checkUnnamed342(core.List o) { +void checkUnnamed347(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -19540,12 +19766,12 @@ void checkUnnamed342(core.List o) { ); } -core.List buildUnnamed343() => [ +core.List buildUnnamed348() => [ buildInterconnectLocationRegionInfo(), buildInterconnectLocationRegionInfo(), ]; -void checkUnnamed343(core.List o) { +void checkUnnamed348(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInterconnectLocationRegionInfo(o[0]); checkInterconnectLocationRegionInfo(o[1]); @@ -19558,8 +19784,8 @@ api.InterconnectLocation buildInterconnectLocation() { if (buildCounterInterconnectLocation < 3) { o.address = 'foo'; o.availabilityZone = 'foo'; - o.availableFeatures = buildUnnamed341(); - o.availableLinkTypes = buildUnnamed342(); + o.availableFeatures = buildUnnamed346(); + o.availableLinkTypes = buildUnnamed347(); o.city = 'foo'; o.continent = 'foo'; o.creationTimestamp = 'foo'; @@ -19570,7 +19796,7 @@ api.InterconnectLocation buildInterconnectLocation() { o.kind = 'foo'; o.name = 'foo'; o.peeringdbFacilityId = 'foo'; - o.regionInfos = buildUnnamed343(); + o.regionInfos = buildUnnamed348(); o.selfLink = 'foo'; o.status = 'foo'; o.supportsPzs = true; @@ -19590,8 +19816,8 @@ void checkInterconnectLocation(api.InterconnectLocation o) { o.availabilityZone!, unittest.equals('foo'), ); - checkUnnamed341(o.availableFeatures!); - checkUnnamed342(o.availableLinkTypes!); + checkUnnamed346(o.availableFeatures!); + checkUnnamed347(o.availableLinkTypes!); unittest.expect( o.city!, unittest.equals('foo'), @@ -19632,7 +19858,7 @@ void checkInterconnectLocation(api.InterconnectLocation o) { o.peeringdbFacilityId!, unittest.equals('foo'), ); - checkUnnamed343(o.regionInfos!); + checkUnnamed348(o.regionInfos!); unittest.expect( o.selfLink!, unittest.equals('foo'), @@ -19646,12 +19872,12 @@ void checkInterconnectLocation(api.InterconnectLocation o) { buildCounterInterconnectLocation--; } -core.List buildUnnamed344() => [ +core.List buildUnnamed349() => [ buildInterconnectLocation(), buildInterconnectLocation(), ]; -void checkUnnamed344(core.List o) { +void checkUnnamed349(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInterconnectLocation(o[0]); checkInterconnectLocation(o[1]); @@ -19686,12 +19912,12 @@ void checkInterconnectLocationListWarningData( buildCounterInterconnectLocationListWarningData--; } -core.List buildUnnamed345() => [ +core.List buildUnnamed350() => [ buildInterconnectLocationListWarningData(), buildInterconnectLocationListWarningData(), ]; -void checkUnnamed345(core.List o) { +void checkUnnamed350(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInterconnectLocationListWarningData(o[0]); checkInterconnectLocationListWarningData(o[1]); @@ -19703,7 +19929,7 @@ api.InterconnectLocationListWarning buildInterconnectLocationListWarning() { buildCounterInterconnectLocationListWarning++; if (buildCounterInterconnectLocationListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed345(); + o.data = buildUnnamed350(); o.message = 'foo'; } buildCounterInterconnectLocationListWarning--; @@ -19718,7 +19944,7 @@ void checkInterconnectLocationListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed345(o.data!); + checkUnnamed350(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -19733,7 +19959,7 @@ api.InterconnectLocationList buildInterconnectLocationList() { buildCounterInterconnectLocationList++; if (buildCounterInterconnectLocationList < 3) { o.id = 'foo'; - o.items = buildUnnamed344(); + o.items = buildUnnamed349(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -19750,7 +19976,7 @@ void checkInterconnectLocationList(api.InterconnectLocationList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed344(o.items!); + checkUnnamed349(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -19800,12 +20026,12 @@ void checkInterconnectLocationRegionInfo(api.InterconnectLocationRegionInfo o) { buildCounterInterconnectLocationRegionInfo--; } -core.List buildUnnamed346() => [ +core.List buildUnnamed351() => [ buildInterconnectMacsecPreSharedKey(), buildInterconnectMacsecPreSharedKey(), ]; -void checkUnnamed346(core.List o) { +void checkUnnamed351(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInterconnectMacsecPreSharedKey(o[0]); checkInterconnectMacsecPreSharedKey(o[1]); @@ -19817,7 +20043,7 @@ api.InterconnectMacsec buildInterconnectMacsec() { buildCounterInterconnectMacsec++; if (buildCounterInterconnectMacsec < 3) { o.failOpen = true; - o.preSharedKeys = buildUnnamed346(); + o.preSharedKeys = buildUnnamed351(); } buildCounterInterconnectMacsec--; return o; @@ -19827,17 +20053,17 @@ void checkInterconnectMacsec(api.InterconnectMacsec o) { buildCounterInterconnectMacsec++; if (buildCounterInterconnectMacsec < 3) { unittest.expect(o.failOpen!, unittest.isTrue); - checkUnnamed346(o.preSharedKeys!); + checkUnnamed351(o.preSharedKeys!); } buildCounterInterconnectMacsec--; } -core.List buildUnnamed347() => [ +core.List buildUnnamed352() => [ buildInterconnectMacsecConfigPreSharedKey(), buildInterconnectMacsecConfigPreSharedKey(), ]; -void checkUnnamed347(core.List o) { +void checkUnnamed352(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInterconnectMacsecConfigPreSharedKey(o[0]); checkInterconnectMacsecConfigPreSharedKey(o[1]); @@ -19848,7 +20074,7 @@ api.InterconnectMacsecConfig buildInterconnectMacsecConfig() { final o = api.InterconnectMacsecConfig(); buildCounterInterconnectMacsecConfig++; if (buildCounterInterconnectMacsecConfig < 3) { - o.preSharedKeys = buildUnnamed347(); + o.preSharedKeys = buildUnnamed352(); } buildCounterInterconnectMacsecConfig--; return o; @@ -19857,7 +20083,7 @@ api.InterconnectMacsecConfig buildInterconnectMacsecConfig() { void checkInterconnectMacsecConfig(api.InterconnectMacsecConfig o) { buildCounterInterconnectMacsecConfig++; if (buildCounterInterconnectMacsecConfig < 3) { - checkUnnamed347(o.preSharedKeys!); + checkUnnamed352(o.preSharedKeys!); } buildCounterInterconnectMacsecConfig--; } @@ -19928,12 +20154,12 @@ void checkInterconnectMacsecPreSharedKey(api.InterconnectMacsecPreSharedKey o) { buildCounterInterconnectMacsecPreSharedKey--; } -core.List buildUnnamed348() => [ +core.List buildUnnamed353() => [ 'foo', 'foo', ]; -void checkUnnamed348(core.List o) { +void checkUnnamed353(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -19950,7 +20176,7 @@ api.InterconnectOutageNotification buildInterconnectOutageNotification() { final o = api.InterconnectOutageNotification(); buildCounterInterconnectOutageNotification++; if (buildCounterInterconnectOutageNotification < 3) { - o.affectedCircuits = buildUnnamed348(); + o.affectedCircuits = buildUnnamed353(); o.description = 'foo'; o.endTime = 'foo'; o.issueType = 'foo'; @@ -19966,7 +20192,7 @@ api.InterconnectOutageNotification buildInterconnectOutageNotification() { void checkInterconnectOutageNotification(api.InterconnectOutageNotification o) { buildCounterInterconnectOutageNotification++; if (buildCounterInterconnectOutageNotification < 3) { - checkUnnamed348(o.affectedCircuits!); + checkUnnamed353(o.affectedCircuits!); unittest.expect( o.description!, unittest.equals('foo'), @@ -20000,12 +20226,12 @@ void checkInterconnectOutageNotification(api.InterconnectOutageNotification o) { } core.List - buildUnnamed349() => [ + buildUnnamed354() => [ buildInterconnectRemoteLocationPermittedConnections(), buildInterconnectRemoteLocationPermittedConnections(), ]; -void checkUnnamed349( +void checkUnnamed354( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInterconnectRemoteLocationPermittedConnections(o[0]); @@ -20034,7 +20260,7 @@ api.InterconnectRemoteLocation buildInterconnectRemoteLocation() { o.maxLagSize10Gbps = 42; o.name = 'foo'; o.peeringdbFacilityId = 'foo'; - o.permittedConnections = buildUnnamed349(); + o.permittedConnections = buildUnnamed354(); o.remoteService = 'foo'; o.selfLink = 'foo'; o.status = 'foo'; @@ -20105,7 +20331,7 @@ void checkInterconnectRemoteLocation(api.InterconnectRemoteLocation o) { o.peeringdbFacilityId!, unittest.equals('foo'), ); - checkUnnamed349(o.permittedConnections!); + checkUnnamed354(o.permittedConnections!); unittest.expect( o.remoteService!, unittest.equals('foo'), @@ -20184,12 +20410,12 @@ void checkInterconnectRemoteLocationConstraintsSubnetLengthRange( buildCounterInterconnectRemoteLocationConstraintsSubnetLengthRange--; } -core.List buildUnnamed350() => [ +core.List buildUnnamed355() => [ buildInterconnectRemoteLocation(), buildInterconnectRemoteLocation(), ]; -void checkUnnamed350(core.List o) { +void checkUnnamed355(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInterconnectRemoteLocation(o[0]); checkInterconnectRemoteLocation(o[1]); @@ -20224,12 +20450,12 @@ void checkInterconnectRemoteLocationListWarningData( buildCounterInterconnectRemoteLocationListWarningData--; } -core.List buildUnnamed351() => [ +core.List buildUnnamed356() => [ buildInterconnectRemoteLocationListWarningData(), buildInterconnectRemoteLocationListWarningData(), ]; -void checkUnnamed351( +void checkUnnamed356( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInterconnectRemoteLocationListWarningData(o[0]); @@ -20243,7 +20469,7 @@ api.InterconnectRemoteLocationListWarning buildCounterInterconnectRemoteLocationListWarning++; if (buildCounterInterconnectRemoteLocationListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed351(); + o.data = buildUnnamed356(); o.message = 'foo'; } buildCounterInterconnectRemoteLocationListWarning--; @@ -20258,7 +20484,7 @@ void checkInterconnectRemoteLocationListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed351(o.data!); + checkUnnamed356(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -20273,7 +20499,7 @@ api.InterconnectRemoteLocationList buildInterconnectRemoteLocationList() { buildCounterInterconnectRemoteLocationList++; if (buildCounterInterconnectRemoteLocationList < 3) { o.id = 'foo'; - o.items = buildUnnamed350(); + o.items = buildUnnamed355(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -20290,7 +20516,7 @@ void checkInterconnectRemoteLocationList(api.InterconnectRemoteLocationList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed350(o.items!); + checkUnnamed355(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -20437,12 +20663,12 @@ void checkLicense(api.License o) { buildCounterLicense--; } -core.List buildUnnamed352() => [ +core.List buildUnnamed357() => [ buildLicenseCodeLicenseAlias(), buildLicenseCodeLicenseAlias(), ]; -void checkUnnamed352(core.List o) { +void checkUnnamed357(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLicenseCodeLicenseAlias(o[0]); checkLicenseCodeLicenseAlias(o[1]); @@ -20457,7 +20683,7 @@ api.LicenseCode buildLicenseCode() { o.description = 'foo'; o.id = 'foo'; o.kind = 'foo'; - o.licenseAlias = buildUnnamed352(); + o.licenseAlias = buildUnnamed357(); o.name = 'foo'; o.selfLink = 'foo'; o.state = 'foo'; @@ -20486,7 +20712,7 @@ void checkLicenseCode(api.LicenseCode o) { o.kind!, unittest.equals('foo'), ); - checkUnnamed352(o.licenseAlias!); + checkUnnamed357(o.licenseAlias!); unittest.expect( o.name!, unittest.equals('foo'), @@ -20590,12 +20816,12 @@ void checkLicenseResourceRequirements(api.LicenseResourceRequirements o) { buildCounterLicenseResourceRequirements--; } -core.List buildUnnamed353() => [ +core.List buildUnnamed358() => [ buildLicense(), buildLicense(), ]; -void checkUnnamed353(core.List o) { +void checkUnnamed358(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLicense(o[0]); checkLicense(o[1]); @@ -20629,12 +20855,12 @@ void checkLicensesListResponseWarningData( buildCounterLicensesListResponseWarningData--; } -core.List buildUnnamed354() => [ +core.List buildUnnamed359() => [ buildLicensesListResponseWarningData(), buildLicensesListResponseWarningData(), ]; -void checkUnnamed354(core.List o) { +void checkUnnamed359(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLicensesListResponseWarningData(o[0]); checkLicensesListResponseWarningData(o[1]); @@ -20646,7 +20872,7 @@ api.LicensesListResponseWarning buildLicensesListResponseWarning() { buildCounterLicensesListResponseWarning++; if (buildCounterLicensesListResponseWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed354(); + o.data = buildUnnamed359(); o.message = 'foo'; } buildCounterLicensesListResponseWarning--; @@ -20660,7 +20886,7 @@ void checkLicensesListResponseWarning(api.LicensesListResponseWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed354(o.data!); + checkUnnamed359(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -20675,7 +20901,7 @@ api.LicensesListResponse buildLicensesListResponse() { buildCounterLicensesListResponse++; if (buildCounterLicensesListResponse < 3) { o.id = 'foo'; - o.items = buildUnnamed353(); + o.items = buildUnnamed358(); o.nextPageToken = 'foo'; o.selfLink = 'foo'; o.warning = buildLicensesListResponseWarning(); @@ -20691,7 +20917,7 @@ void checkLicensesListResponse(api.LicensesListResponse o) { o.id!, unittest.equals('foo'), ); - checkUnnamed353(o.items!); + checkUnnamed358(o.items!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -20764,12 +20990,12 @@ void checkLocalizedMessage(api.LocalizedMessage o) { buildCounterLocalizedMessage--; } -core.Map buildUnnamed355() => { +core.Map buildUnnamed360() => { 'x': buildLocationPolicyLocation(), 'y': buildLocationPolicyLocation(), }; -void checkUnnamed355(core.Map o) { +void checkUnnamed360(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkLocationPolicyLocation(o['x']!); checkLocationPolicyLocation(o['y']!); @@ -20780,7 +21006,7 @@ api.LocationPolicy buildLocationPolicy() { final o = api.LocationPolicy(); buildCounterLocationPolicy++; if (buildCounterLocationPolicy < 3) { - o.locations = buildUnnamed355(); + o.locations = buildUnnamed360(); o.targetShape = 'foo'; } buildCounterLocationPolicy--; @@ -20790,7 +21016,7 @@ api.LocationPolicy buildLocationPolicy() { void checkLocationPolicy(api.LocationPolicy o) { buildCounterLocationPolicy++; if (buildCounterLocationPolicy < 3) { - checkUnnamed355(o.locations!); + checkUnnamed360(o.locations!); unittest.expect( o.targetShape!, unittest.equals('foo'), @@ -20846,170 +21072,34 @@ void checkLocationPolicyLocationConstraints( buildCounterLocationPolicyLocationConstraints--; } -core.int buildCounterLogConfig = 0; -api.LogConfig buildLogConfig() { - final o = api.LogConfig(); - buildCounterLogConfig++; - if (buildCounterLogConfig < 3) { - o.cloudAudit = buildLogConfigCloudAuditOptions(); - o.counter = buildLogConfigCounterOptions(); - o.dataAccess = buildLogConfigDataAccessOptions(); - } - buildCounterLogConfig--; - return o; -} - -void checkLogConfig(api.LogConfig o) { - buildCounterLogConfig++; - if (buildCounterLogConfig < 3) { - checkLogConfigCloudAuditOptions(o.cloudAudit!); - checkLogConfigCounterOptions(o.counter!); - checkLogConfigDataAccessOptions(o.dataAccess!); - } - buildCounterLogConfig--; -} - -core.int buildCounterLogConfigCloudAuditOptions = 0; -api.LogConfigCloudAuditOptions buildLogConfigCloudAuditOptions() { - final o = api.LogConfigCloudAuditOptions(); - buildCounterLogConfigCloudAuditOptions++; - if (buildCounterLogConfigCloudAuditOptions < 3) { - o.logName = 'foo'; - } - buildCounterLogConfigCloudAuditOptions--; - return o; -} - -void checkLogConfigCloudAuditOptions(api.LogConfigCloudAuditOptions o) { - buildCounterLogConfigCloudAuditOptions++; - if (buildCounterLogConfigCloudAuditOptions < 3) { - unittest.expect( - o.logName!, - unittest.equals('foo'), - ); - } - buildCounterLogConfigCloudAuditOptions--; -} - -core.List buildUnnamed356() => [ - buildLogConfigCounterOptionsCustomField(), - buildLogConfigCounterOptionsCustomField(), - ]; - -void checkUnnamed356(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkLogConfigCounterOptionsCustomField(o[0]); - checkLogConfigCounterOptionsCustomField(o[1]); -} - -core.int buildCounterLogConfigCounterOptions = 0; -api.LogConfigCounterOptions buildLogConfigCounterOptions() { - final o = api.LogConfigCounterOptions(); - buildCounterLogConfigCounterOptions++; - if (buildCounterLogConfigCounterOptions < 3) { - o.customFields = buildUnnamed356(); - o.field = 'foo'; - o.metric = 'foo'; - } - buildCounterLogConfigCounterOptions--; - return o; -} - -void checkLogConfigCounterOptions(api.LogConfigCounterOptions o) { - buildCounterLogConfigCounterOptions++; - if (buildCounterLogConfigCounterOptions < 3) { - checkUnnamed356(o.customFields!); - unittest.expect( - o.field!, - unittest.equals('foo'), - ); - unittest.expect( - o.metric!, - unittest.equals('foo'), - ); - } - buildCounterLogConfigCounterOptions--; -} - -core.int buildCounterLogConfigCounterOptionsCustomField = 0; -api.LogConfigCounterOptionsCustomField - buildLogConfigCounterOptionsCustomField() { - final o = api.LogConfigCounterOptionsCustomField(); - buildCounterLogConfigCounterOptionsCustomField++; - if (buildCounterLogConfigCounterOptionsCustomField < 3) { - o.name = 'foo'; - o.value = 'foo'; - } - buildCounterLogConfigCounterOptionsCustomField--; - return o; -} - -void checkLogConfigCounterOptionsCustomField( - api.LogConfigCounterOptionsCustomField o) { - buildCounterLogConfigCounterOptionsCustomField++; - if (buildCounterLogConfigCounterOptionsCustomField < 3) { - unittest.expect( - o.name!, - unittest.equals('foo'), - ); - unittest.expect( - o.value!, - unittest.equals('foo'), - ); - } - buildCounterLogConfigCounterOptionsCustomField--; -} - -core.int buildCounterLogConfigDataAccessOptions = 0; -api.LogConfigDataAccessOptions buildLogConfigDataAccessOptions() { - final o = api.LogConfigDataAccessOptions(); - buildCounterLogConfigDataAccessOptions++; - if (buildCounterLogConfigDataAccessOptions < 3) { - o.logMode = 'foo'; - } - buildCounterLogConfigDataAccessOptions--; - return o; -} - -void checkLogConfigDataAccessOptions(api.LogConfigDataAccessOptions o) { - buildCounterLogConfigDataAccessOptions++; - if (buildCounterLogConfigDataAccessOptions < 3) { - unittest.expect( - o.logMode!, - unittest.equals('foo'), - ); - } - buildCounterLogConfigDataAccessOptions--; -} - -core.List buildUnnamed357() => [ +core.List buildUnnamed361() => [ buildSavedDisk(), buildSavedDisk(), ]; -void checkUnnamed357(core.List o) { +void checkUnnamed361(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSavedDisk(o[0]); checkSavedDisk(o[1]); } -core.List buildUnnamed358() => [ +core.List buildUnnamed362() => [ buildSourceDiskEncryptionKey(), buildSourceDiskEncryptionKey(), ]; -void checkUnnamed358(core.List o) { +void checkUnnamed362(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSourceDiskEncryptionKey(o[0]); checkSourceDiskEncryptionKey(o[1]); } -core.List buildUnnamed359() => [ +core.List buildUnnamed363() => [ 'foo', 'foo', ]; -void checkUnnamed359(core.List o) { +void checkUnnamed363(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -21036,13 +21126,13 @@ api.MachineImage buildMachineImage() { o.name = 'foo'; o.satisfiesPzi = true; o.satisfiesPzs = true; - o.savedDisks = buildUnnamed357(); + o.savedDisks = buildUnnamed361(); o.selfLink = 'foo'; - o.sourceDiskEncryptionKeys = buildUnnamed358(); + o.sourceDiskEncryptionKeys = buildUnnamed362(); o.sourceInstance = 'foo'; o.sourceInstanceProperties = buildSourceInstanceProperties(); o.status = 'foo'; - o.storageLocations = buildUnnamed359(); + o.storageLocations = buildUnnamed363(); o.totalStorageBytes = 'foo'; } buildCounterMachineImage--; @@ -21077,12 +21167,12 @@ void checkMachineImage(api.MachineImage o) { ); unittest.expect(o.satisfiesPzi!, unittest.isTrue); unittest.expect(o.satisfiesPzs!, unittest.isTrue); - checkUnnamed357(o.savedDisks!); + checkUnnamed361(o.savedDisks!); unittest.expect( o.selfLink!, unittest.equals('foo'), ); - checkUnnamed358(o.sourceDiskEncryptionKeys!); + checkUnnamed362(o.sourceDiskEncryptionKeys!); unittest.expect( o.sourceInstance!, unittest.equals('foo'), @@ -21092,7 +21182,7 @@ void checkMachineImage(api.MachineImage o) { o.status!, unittest.equals('foo'), ); - checkUnnamed359(o.storageLocations!); + checkUnnamed363(o.storageLocations!); unittest.expect( o.totalStorageBytes!, unittest.equals('foo'), @@ -21101,12 +21191,12 @@ void checkMachineImage(api.MachineImage o) { buildCounterMachineImage--; } -core.List buildUnnamed360() => [ +core.List buildUnnamed364() => [ buildMachineImage(), buildMachineImage(), ]; -void checkUnnamed360(core.List o) { +void checkUnnamed364(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMachineImage(o[0]); checkMachineImage(o[1]); @@ -21139,12 +21229,12 @@ void checkMachineImageListWarningData(api.MachineImageListWarningData o) { buildCounterMachineImageListWarningData--; } -core.List buildUnnamed361() => [ +core.List buildUnnamed365() => [ buildMachineImageListWarningData(), buildMachineImageListWarningData(), ]; -void checkUnnamed361(core.List o) { +void checkUnnamed365(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMachineImageListWarningData(o[0]); checkMachineImageListWarningData(o[1]); @@ -21156,7 +21246,7 @@ api.MachineImageListWarning buildMachineImageListWarning() { buildCounterMachineImageListWarning++; if (buildCounterMachineImageListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed361(); + o.data = buildUnnamed365(); o.message = 'foo'; } buildCounterMachineImageListWarning--; @@ -21170,7 +21260,7 @@ void checkMachineImageListWarning(api.MachineImageListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed361(o.data!); + checkUnnamed365(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -21185,7 +21275,7 @@ api.MachineImageList buildMachineImageList() { buildCounterMachineImageList++; if (buildCounterMachineImageList < 3) { o.id = 'foo'; - o.items = buildUnnamed360(); + o.items = buildUnnamed364(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -21202,7 +21292,7 @@ void checkMachineImageList(api.MachineImageList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed360(o.items!); + checkUnnamed364(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -21247,12 +21337,12 @@ void checkMachineTypeAccelerators(api.MachineTypeAccelerators o) { buildCounterMachineTypeAccelerators--; } -core.List buildUnnamed362() => [ +core.List buildUnnamed366() => [ buildMachineTypeAccelerators(), buildMachineTypeAccelerators(), ]; -void checkUnnamed362(core.List o) { +void checkUnnamed366(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMachineTypeAccelerators(o[0]); checkMachineTypeAccelerators(o[1]); @@ -21280,12 +21370,12 @@ void checkMachineTypeScratchDisks(api.MachineTypeScratchDisks o) { buildCounterMachineTypeScratchDisks--; } -core.List buildUnnamed363() => [ +core.List buildUnnamed367() => [ buildMachineTypeScratchDisks(), buildMachineTypeScratchDisks(), ]; -void checkUnnamed363(core.List o) { +void checkUnnamed367(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMachineTypeScratchDisks(o[0]); checkMachineTypeScratchDisks(o[1]); @@ -21296,7 +21386,7 @@ api.MachineType buildMachineType() { final o = api.MachineType(); buildCounterMachineType++; if (buildCounterMachineType < 3) { - o.accelerators = buildUnnamed362(); + o.accelerators = buildUnnamed366(); o.architecture = 'foo'; o.creationTimestamp = 'foo'; o.deprecated = buildDeprecationStatus(); @@ -21310,7 +21400,7 @@ api.MachineType buildMachineType() { o.maximumPersistentDisksSizeGb = 'foo'; o.memoryMb = 42; o.name = 'foo'; - o.scratchDisks = buildUnnamed363(); + o.scratchDisks = buildUnnamed367(); o.selfLink = 'foo'; o.zone = 'foo'; } @@ -21321,7 +21411,7 @@ api.MachineType buildMachineType() { void checkMachineType(api.MachineType o) { buildCounterMachineType++; if (buildCounterMachineType < 3) { - checkUnnamed362(o.accelerators!); + checkUnnamed366(o.accelerators!); unittest.expect( o.architecture!, unittest.equals('foo'), @@ -21368,7 +21458,7 @@ void checkMachineType(api.MachineType o) { o.name!, unittest.equals('foo'), ); - checkUnnamed363(o.scratchDisks!); + checkUnnamed367(o.scratchDisks!); unittest.expect( o.selfLink!, unittest.equals('foo'), @@ -21381,23 +21471,23 @@ void checkMachineType(api.MachineType o) { buildCounterMachineType--; } -core.Map buildUnnamed364() => { +core.Map buildUnnamed368() => { 'x': buildMachineTypesScopedList(), 'y': buildMachineTypesScopedList(), }; -void checkUnnamed364(core.Map o) { +void checkUnnamed368(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkMachineTypesScopedList(o['x']!); checkMachineTypesScopedList(o['y']!); } -core.List buildUnnamed365() => [ +core.List buildUnnamed369() => [ 'foo', 'foo', ]; -void checkUnnamed365(core.List o) { +void checkUnnamed369(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -21438,12 +21528,12 @@ void checkMachineTypeAggregatedListWarningData( buildCounterMachineTypeAggregatedListWarningData--; } -core.List buildUnnamed366() => [ +core.List buildUnnamed370() => [ buildMachineTypeAggregatedListWarningData(), buildMachineTypeAggregatedListWarningData(), ]; -void checkUnnamed366(core.List o) { +void checkUnnamed370(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMachineTypeAggregatedListWarningData(o[0]); checkMachineTypeAggregatedListWarningData(o[1]); @@ -21455,7 +21545,7 @@ api.MachineTypeAggregatedListWarning buildMachineTypeAggregatedListWarning() { buildCounterMachineTypeAggregatedListWarning++; if (buildCounterMachineTypeAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed366(); + o.data = buildUnnamed370(); o.message = 'foo'; } buildCounterMachineTypeAggregatedListWarning--; @@ -21470,7 +21560,7 @@ void checkMachineTypeAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed366(o.data!); + checkUnnamed370(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -21485,11 +21575,11 @@ api.MachineTypeAggregatedList buildMachineTypeAggregatedList() { buildCounterMachineTypeAggregatedList++; if (buildCounterMachineTypeAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed364(); + o.items = buildUnnamed368(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed365(); + o.unreachables = buildUnnamed369(); o.warning = buildMachineTypeAggregatedListWarning(); } buildCounterMachineTypeAggregatedList--; @@ -21503,7 +21593,7 @@ void checkMachineTypeAggregatedList(api.MachineTypeAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed364(o.items!); + checkUnnamed368(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -21516,18 +21606,18 @@ void checkMachineTypeAggregatedList(api.MachineTypeAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed365(o.unreachables!); + checkUnnamed369(o.unreachables!); checkMachineTypeAggregatedListWarning(o.warning!); } buildCounterMachineTypeAggregatedList--; } -core.List buildUnnamed367() => [ +core.List buildUnnamed371() => [ buildMachineType(), buildMachineType(), ]; -void checkUnnamed367(core.List o) { +void checkUnnamed371(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMachineType(o[0]); checkMachineType(o[1]); @@ -21560,12 +21650,12 @@ void checkMachineTypeListWarningData(api.MachineTypeListWarningData o) { buildCounterMachineTypeListWarningData--; } -core.List buildUnnamed368() => [ +core.List buildUnnamed372() => [ buildMachineTypeListWarningData(), buildMachineTypeListWarningData(), ]; -void checkUnnamed368(core.List o) { +void checkUnnamed372(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMachineTypeListWarningData(o[0]); checkMachineTypeListWarningData(o[1]); @@ -21577,7 +21667,7 @@ api.MachineTypeListWarning buildMachineTypeListWarning() { buildCounterMachineTypeListWarning++; if (buildCounterMachineTypeListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed368(); + o.data = buildUnnamed372(); o.message = 'foo'; } buildCounterMachineTypeListWarning--; @@ -21591,7 +21681,7 @@ void checkMachineTypeListWarning(api.MachineTypeListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed368(o.data!); + checkUnnamed372(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -21606,7 +21696,7 @@ api.MachineTypeList buildMachineTypeList() { buildCounterMachineTypeList++; if (buildCounterMachineTypeList < 3) { o.id = 'foo'; - o.items = buildUnnamed367(); + o.items = buildUnnamed371(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -21623,7 +21713,7 @@ void checkMachineTypeList(api.MachineTypeList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed367(o.items!); + checkUnnamed371(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -21641,12 +21731,12 @@ void checkMachineTypeList(api.MachineTypeList o) { buildCounterMachineTypeList--; } -core.List buildUnnamed369() => [ +core.List buildUnnamed373() => [ buildMachineType(), buildMachineType(), ]; -void checkUnnamed369(core.List o) { +void checkUnnamed373(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMachineType(o[0]); checkMachineType(o[1]); @@ -21680,12 +21770,12 @@ void checkMachineTypesScopedListWarningData( buildCounterMachineTypesScopedListWarningData--; } -core.List buildUnnamed370() => [ +core.List buildUnnamed374() => [ buildMachineTypesScopedListWarningData(), buildMachineTypesScopedListWarningData(), ]; -void checkUnnamed370(core.List o) { +void checkUnnamed374(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMachineTypesScopedListWarningData(o[0]); checkMachineTypesScopedListWarningData(o[1]); @@ -21697,7 +21787,7 @@ api.MachineTypesScopedListWarning buildMachineTypesScopedListWarning() { buildCounterMachineTypesScopedListWarning++; if (buildCounterMachineTypesScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed370(); + o.data = buildUnnamed374(); o.message = 'foo'; } buildCounterMachineTypesScopedListWarning--; @@ -21711,7 +21801,7 @@ void checkMachineTypesScopedListWarning(api.MachineTypesScopedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed370(o.data!); + checkUnnamed374(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -21725,7 +21815,7 @@ api.MachineTypesScopedList buildMachineTypesScopedList() { final o = api.MachineTypesScopedList(); buildCounterMachineTypesScopedList++; if (buildCounterMachineTypesScopedList < 3) { - o.machineTypes = buildUnnamed369(); + o.machineTypes = buildUnnamed373(); o.warning = buildMachineTypesScopedListWarning(); } buildCounterMachineTypesScopedList--; @@ -21735,18 +21825,18 @@ api.MachineTypesScopedList buildMachineTypesScopedList() { void checkMachineTypesScopedList(api.MachineTypesScopedList o) { buildCounterMachineTypesScopedList++; if (buildCounterMachineTypesScopedList < 3) { - checkUnnamed369(o.machineTypes!); + checkUnnamed373(o.machineTypes!); checkMachineTypesScopedListWarning(o.warning!); } buildCounterMachineTypesScopedList--; } -core.List buildUnnamed371() => [ +core.List buildUnnamed375() => [ buildManagedInstanceInstanceHealth(), buildManagedInstanceInstanceHealth(), ]; -void checkUnnamed371(core.List o) { +void checkUnnamed375(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkManagedInstanceInstanceHealth(o[0]); checkManagedInstanceInstanceHealth(o[1]); @@ -21760,12 +21850,14 @@ api.ManagedInstance buildManagedInstance() { o.currentAction = 'foo'; o.id = 'foo'; o.instance = 'foo'; - o.instanceHealth = buildUnnamed371(); + o.instanceHealth = buildUnnamed375(); o.instanceStatus = 'foo'; o.lastAttempt = buildManagedInstanceLastAttempt(); o.name = 'foo'; o.preservedStateFromConfig = buildPreservedState(); o.preservedStateFromPolicy = buildPreservedState(); + o.propertiesFromFlexibilityPolicy = + buildManagedInstancePropertiesFromFlexibilityPolicy(); o.version = buildManagedInstanceVersion(); } buildCounterManagedInstance--; @@ -21787,7 +21879,7 @@ void checkManagedInstance(api.ManagedInstance o) { o.instance!, unittest.equals('foo'), ); - checkUnnamed371(o.instanceHealth!); + checkUnnamed375(o.instanceHealth!); unittest.expect( o.instanceStatus!, unittest.equals('foo'), @@ -21799,6 +21891,8 @@ void checkManagedInstance(api.ManagedInstance o) { ); checkPreservedState(o.preservedStateFromConfig!); checkPreservedState(o.preservedStateFromPolicy!); + checkManagedInstancePropertiesFromFlexibilityPolicy( + o.propertiesFromFlexibilityPolicy!); checkManagedInstanceVersion(o.version!); } buildCounterManagedInstance--; @@ -21859,12 +21953,12 @@ void checkManagedInstanceLastAttemptErrorsErrorsErrorDetails( } core.List - buildUnnamed372() => [ + buildUnnamed376() => [ buildManagedInstanceLastAttemptErrorsErrorsErrorDetails(), buildManagedInstanceLastAttemptErrorsErrorsErrorDetails(), ]; -void checkUnnamed372( +void checkUnnamed376( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkManagedInstanceLastAttemptErrorsErrorsErrorDetails(o[0]); @@ -21878,7 +21972,7 @@ api.ManagedInstanceLastAttemptErrorsErrors buildCounterManagedInstanceLastAttemptErrorsErrors++; if (buildCounterManagedInstanceLastAttemptErrorsErrors < 3) { o.code = 'foo'; - o.errorDetails = buildUnnamed372(); + o.errorDetails = buildUnnamed376(); o.location = 'foo'; o.message = 'foo'; } @@ -21894,7 +21988,7 @@ void checkManagedInstanceLastAttemptErrorsErrors( o.code!, unittest.equals('foo'), ); - checkUnnamed372(o.errorDetails!); + checkUnnamed376(o.errorDetails!); unittest.expect( o.location!, unittest.equals('foo'), @@ -21907,12 +22001,12 @@ void checkManagedInstanceLastAttemptErrorsErrors( buildCounterManagedInstanceLastAttemptErrorsErrors--; } -core.List buildUnnamed373() => [ +core.List buildUnnamed377() => [ buildManagedInstanceLastAttemptErrorsErrors(), buildManagedInstanceLastAttemptErrorsErrors(), ]; -void checkUnnamed373(core.List o) { +void checkUnnamed377(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkManagedInstanceLastAttemptErrorsErrors(o[0]); checkManagedInstanceLastAttemptErrorsErrors(o[1]); @@ -21923,7 +22017,7 @@ api.ManagedInstanceLastAttemptErrors buildManagedInstanceLastAttemptErrors() { final o = api.ManagedInstanceLastAttemptErrors(); buildCounterManagedInstanceLastAttemptErrors++; if (buildCounterManagedInstanceLastAttemptErrors < 3) { - o.errors = buildUnnamed373(); + o.errors = buildUnnamed377(); } buildCounterManagedInstanceLastAttemptErrors--; return o; @@ -21933,7 +22027,7 @@ void checkManagedInstanceLastAttemptErrors( api.ManagedInstanceLastAttemptErrors o) { buildCounterManagedInstanceLastAttemptErrors++; if (buildCounterManagedInstanceLastAttemptErrors < 3) { - checkUnnamed373(o.errors!); + checkUnnamed377(o.errors!); } buildCounterManagedInstanceLastAttemptErrors--; } @@ -21957,6 +22051,30 @@ void checkManagedInstanceLastAttempt(api.ManagedInstanceLastAttempt o) { buildCounterManagedInstanceLastAttempt--; } +core.int buildCounterManagedInstancePropertiesFromFlexibilityPolicy = 0; +api.ManagedInstancePropertiesFromFlexibilityPolicy + buildManagedInstancePropertiesFromFlexibilityPolicy() { + final o = api.ManagedInstancePropertiesFromFlexibilityPolicy(); + buildCounterManagedInstancePropertiesFromFlexibilityPolicy++; + if (buildCounterManagedInstancePropertiesFromFlexibilityPolicy < 3) { + o.machineType = 'foo'; + } + buildCounterManagedInstancePropertiesFromFlexibilityPolicy--; + return o; +} + +void checkManagedInstancePropertiesFromFlexibilityPolicy( + api.ManagedInstancePropertiesFromFlexibilityPolicy o) { + buildCounterManagedInstancePropertiesFromFlexibilityPolicy++; + if (buildCounterManagedInstancePropertiesFromFlexibilityPolicy < 3) { + unittest.expect( + o.machineType!, + unittest.equals('foo'), + ); + } + buildCounterManagedInstancePropertiesFromFlexibilityPolicy--; +} + core.int buildCounterManagedInstanceVersion = 0; api.ManagedInstanceVersion buildManagedInstanceVersion() { final o = api.ManagedInstanceVersion(); @@ -22011,12 +22129,12 @@ void checkMetadataItems(api.MetadataItems o) { buildCounterMetadataItems--; } -core.List buildUnnamed374() => [ +core.List buildUnnamed378() => [ buildMetadataItems(), buildMetadataItems(), ]; -void checkUnnamed374(core.List o) { +void checkUnnamed378(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMetadataItems(o[0]); checkMetadataItems(o[1]); @@ -22028,7 +22146,7 @@ api.Metadata buildMetadata() { buildCounterMetadata++; if (buildCounterMetadata < 3) { o.fingerprint = 'foo'; - o.items = buildUnnamed374(); + o.items = buildUnnamed378(); o.kind = 'foo'; } buildCounterMetadata--; @@ -22042,7 +22160,7 @@ void checkMetadata(api.Metadata o) { o.fingerprint!, unittest.equals('foo'), ); - checkUnnamed374(o.items!); + checkUnnamed378(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -22051,12 +22169,12 @@ void checkMetadata(api.Metadata o) { buildCounterMetadata--; } -core.List buildUnnamed375() => [ +core.List buildUnnamed379() => [ buildMetadataFilterLabelMatch(), buildMetadataFilterLabelMatch(), ]; -void checkUnnamed375(core.List o) { +void checkUnnamed379(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMetadataFilterLabelMatch(o[0]); checkMetadataFilterLabelMatch(o[1]); @@ -22067,7 +22185,7 @@ api.MetadataFilter buildMetadataFilter() { final o = api.MetadataFilter(); buildCounterMetadataFilter++; if (buildCounterMetadataFilter < 3) { - o.filterLabels = buildUnnamed375(); + o.filterLabels = buildUnnamed379(); o.filterMatchCriteria = 'foo'; } buildCounterMetadataFilter--; @@ -22077,7 +22195,7 @@ api.MetadataFilter buildMetadataFilter() { void checkMetadataFilter(api.MetadataFilter o) { buildCounterMetadataFilter++; if (buildCounterMetadataFilter < 3) { - checkUnnamed375(o.filterLabels!); + checkUnnamed379(o.filterLabels!); unittest.expect( o.filterMatchCriteria!, unittest.equals('foo'), @@ -22140,12 +22258,12 @@ void checkNamedPort(api.NamedPort o) { buildCounterNamedPort--; } -core.List buildUnnamed376() => [ +core.List buildUnnamed380() => [ buildNatIpInfoNatIpInfoMapping(), buildNatIpInfoNatIpInfoMapping(), ]; -void checkUnnamed376(core.List o) { +void checkUnnamed380(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNatIpInfoNatIpInfoMapping(o[0]); checkNatIpInfoNatIpInfoMapping(o[1]); @@ -22156,7 +22274,7 @@ api.NatIpInfo buildNatIpInfo() { final o = api.NatIpInfo(); buildCounterNatIpInfo++; if (buildCounterNatIpInfo < 3) { - o.natIpInfoMappings = buildUnnamed376(); + o.natIpInfoMappings = buildUnnamed380(); o.natName = 'foo'; } buildCounterNatIpInfo--; @@ -22166,7 +22284,7 @@ api.NatIpInfo buildNatIpInfo() { void checkNatIpInfo(api.NatIpInfo o) { buildCounterNatIpInfo++; if (buildCounterNatIpInfo < 3) { - checkUnnamed376(o.natIpInfoMappings!); + checkUnnamed380(o.natIpInfoMappings!); unittest.expect( o.natName!, unittest.equals('foo'), @@ -22207,12 +22325,12 @@ void checkNatIpInfoNatIpInfoMapping(api.NatIpInfoNatIpInfoMapping o) { buildCounterNatIpInfoNatIpInfoMapping--; } -core.List buildUnnamed377() => [ +core.List buildUnnamed381() => [ buildNatIpInfo(), buildNatIpInfo(), ]; -void checkUnnamed377(core.List o) { +void checkUnnamed381(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNatIpInfo(o[0]); checkNatIpInfo(o[1]); @@ -22223,7 +22341,7 @@ api.NatIpInfoResponse buildNatIpInfoResponse() { final o = api.NatIpInfoResponse(); buildCounterNatIpInfoResponse++; if (buildCounterNatIpInfoResponse < 3) { - o.result = buildUnnamed377(); + o.result = buildUnnamed381(); } buildCounterNatIpInfoResponse--; return o; @@ -22232,28 +22350,28 @@ api.NatIpInfoResponse buildNatIpInfoResponse() { void checkNatIpInfoResponse(api.NatIpInfoResponse o) { buildCounterNatIpInfoResponse++; if (buildCounterNatIpInfoResponse < 3) { - checkUnnamed377(o.result!); + checkUnnamed381(o.result!); } buildCounterNatIpInfoResponse--; } -core.List buildUnnamed378() => [ +core.List buildUnnamed382() => [ buildNetworkPeering(), buildNetworkPeering(), ]; -void checkUnnamed378(core.List o) { +void checkUnnamed382(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkPeering(o[0]); checkNetworkPeering(o[1]); } -core.List buildUnnamed379() => [ +core.List buildUnnamed383() => [ 'foo', 'foo', ]; -void checkUnnamed379(core.List o) { +void checkUnnamed383(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -22283,11 +22401,12 @@ api.Network buildNetwork() { o.mtu = 42; o.name = 'foo'; o.networkFirewallPolicyEnforcementOrder = 'foo'; - o.peerings = buildUnnamed378(); + o.networkProfile = 'foo'; + o.peerings = buildUnnamed382(); o.routingConfig = buildNetworkRoutingConfig(); o.selfLink = 'foo'; o.selfLinkWithId = 'foo'; - o.subnetworks = buildUnnamed379(); + o.subnetworks = buildUnnamed383(); } buildCounterNetwork--; return o; @@ -22342,7 +22461,11 @@ void checkNetwork(api.Network o) { o.networkFirewallPolicyEnforcementOrder!, unittest.equals('foo'), ); - checkUnnamed378(o.peerings!); + unittest.expect( + o.networkProfile!, + unittest.equals('foo'), + ); + checkUnnamed382(o.peerings!); checkNetworkRoutingConfig(o.routingConfig!); unittest.expect( o.selfLink!, @@ -22352,28 +22475,28 @@ void checkNetwork(api.Network o) { o.selfLinkWithId!, unittest.equals('foo'), ); - checkUnnamed379(o.subnetworks!); + checkUnnamed383(o.subnetworks!); } buildCounterNetwork--; } -core.List buildUnnamed380() => [ +core.List buildUnnamed384() => [ buildNetworkAttachmentConnectedEndpoint(), buildNetworkAttachmentConnectedEndpoint(), ]; -void checkUnnamed380(core.List o) { +void checkUnnamed384(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkAttachmentConnectedEndpoint(o[0]); checkNetworkAttachmentConnectedEndpoint(o[1]); } -core.List buildUnnamed381() => [ +core.List buildUnnamed385() => [ 'foo', 'foo', ]; -void checkUnnamed381(core.List o) { +void checkUnnamed385(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -22385,12 +22508,12 @@ void checkUnnamed381(core.List o) { ); } -core.List buildUnnamed382() => [ +core.List buildUnnamed386() => [ 'foo', 'foo', ]; -void checkUnnamed382(core.List o) { +void checkUnnamed386(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -22402,12 +22525,12 @@ void checkUnnamed382(core.List o) { ); } -core.List buildUnnamed383() => [ +core.List buildUnnamed387() => [ 'foo', 'foo', ]; -void checkUnnamed383(core.List o) { +void checkUnnamed387(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -22424,7 +22547,7 @@ api.NetworkAttachment buildNetworkAttachment() { final o = api.NetworkAttachment(); buildCounterNetworkAttachment++; if (buildCounterNetworkAttachment < 3) { - o.connectionEndpoints = buildUnnamed380(); + o.connectionEndpoints = buildUnnamed384(); o.connectionPreference = 'foo'; o.creationTimestamp = 'foo'; o.description = 'foo'; @@ -22433,12 +22556,12 @@ api.NetworkAttachment buildNetworkAttachment() { o.kind = 'foo'; o.name = 'foo'; o.network = 'foo'; - o.producerAcceptLists = buildUnnamed381(); - o.producerRejectLists = buildUnnamed382(); + o.producerAcceptLists = buildUnnamed385(); + o.producerRejectLists = buildUnnamed386(); o.region = 'foo'; o.selfLink = 'foo'; o.selfLinkWithId = 'foo'; - o.subnetworks = buildUnnamed383(); + o.subnetworks = buildUnnamed387(); } buildCounterNetworkAttachment--; return o; @@ -22447,7 +22570,7 @@ api.NetworkAttachment buildNetworkAttachment() { void checkNetworkAttachment(api.NetworkAttachment o) { buildCounterNetworkAttachment++; if (buildCounterNetworkAttachment < 3) { - checkUnnamed380(o.connectionEndpoints!); + checkUnnamed384(o.connectionEndpoints!); unittest.expect( o.connectionPreference!, unittest.equals('foo'), @@ -22480,8 +22603,8 @@ void checkNetworkAttachment(api.NetworkAttachment o) { o.network!, unittest.equals('foo'), ); - checkUnnamed381(o.producerAcceptLists!); - checkUnnamed382(o.producerRejectLists!); + checkUnnamed385(o.producerAcceptLists!); + checkUnnamed386(o.producerRejectLists!); unittest.expect( o.region!, unittest.equals('foo'), @@ -22494,17 +22617,17 @@ void checkNetworkAttachment(api.NetworkAttachment o) { o.selfLinkWithId!, unittest.equals('foo'), ); - checkUnnamed383(o.subnetworks!); + checkUnnamed387(o.subnetworks!); } buildCounterNetworkAttachment--; } -core.Map buildUnnamed384() => { +core.Map buildUnnamed388() => { 'x': buildNetworkAttachmentsScopedList(), 'y': buildNetworkAttachmentsScopedList(), }; -void checkUnnamed384( +void checkUnnamed388( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkAttachmentsScopedList(o['x']!); @@ -22540,12 +22663,12 @@ void checkNetworkAttachmentAggregatedListWarningData( buildCounterNetworkAttachmentAggregatedListWarningData--; } -core.List buildUnnamed385() => [ +core.List buildUnnamed389() => [ buildNetworkAttachmentAggregatedListWarningData(), buildNetworkAttachmentAggregatedListWarningData(), ]; -void checkUnnamed385( +void checkUnnamed389( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkAttachmentAggregatedListWarningData(o[0]); @@ -22559,7 +22682,7 @@ api.NetworkAttachmentAggregatedListWarning buildCounterNetworkAttachmentAggregatedListWarning++; if (buildCounterNetworkAttachmentAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed385(); + o.data = buildUnnamed389(); o.message = 'foo'; } buildCounterNetworkAttachmentAggregatedListWarning--; @@ -22574,7 +22697,7 @@ void checkNetworkAttachmentAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed385(o.data!); + checkUnnamed389(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -22589,7 +22712,7 @@ api.NetworkAttachmentAggregatedList buildNetworkAttachmentAggregatedList() { buildCounterNetworkAttachmentAggregatedList++; if (buildCounterNetworkAttachmentAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed384(); + o.items = buildUnnamed388(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -22607,7 +22730,7 @@ void checkNetworkAttachmentAggregatedList( o.id!, unittest.equals('foo'), ); - checkUnnamed384(o.items!); + checkUnnamed388(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -22625,12 +22748,12 @@ void checkNetworkAttachmentAggregatedList( buildCounterNetworkAttachmentAggregatedList--; } -core.List buildUnnamed386() => [ +core.List buildUnnamed390() => [ 'foo', 'foo', ]; -void checkUnnamed386(core.List o) { +void checkUnnamed390(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -22651,7 +22774,7 @@ api.NetworkAttachmentConnectedEndpoint o.ipAddress = 'foo'; o.ipv6Address = 'foo'; o.projectIdOrNum = 'foo'; - o.secondaryIpCidrRanges = buildUnnamed386(); + o.secondaryIpCidrRanges = buildUnnamed390(); o.status = 'foo'; o.subnetwork = 'foo'; o.subnetworkCidrRange = 'foo'; @@ -22676,7 +22799,7 @@ void checkNetworkAttachmentConnectedEndpoint( o.projectIdOrNum!, unittest.equals('foo'), ); - checkUnnamed386(o.secondaryIpCidrRanges!); + checkUnnamed390(o.secondaryIpCidrRanges!); unittest.expect( o.status!, unittest.equals('foo'), @@ -22693,12 +22816,12 @@ void checkNetworkAttachmentConnectedEndpoint( buildCounterNetworkAttachmentConnectedEndpoint--; } -core.List buildUnnamed387() => [ +core.List buildUnnamed391() => [ buildNetworkAttachment(), buildNetworkAttachment(), ]; -void checkUnnamed387(core.List o) { +void checkUnnamed391(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkAttachment(o[0]); checkNetworkAttachment(o[1]); @@ -22732,12 +22855,12 @@ void checkNetworkAttachmentListWarningData( buildCounterNetworkAttachmentListWarningData--; } -core.List buildUnnamed388() => [ +core.List buildUnnamed392() => [ buildNetworkAttachmentListWarningData(), buildNetworkAttachmentListWarningData(), ]; -void checkUnnamed388(core.List o) { +void checkUnnamed392(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkAttachmentListWarningData(o[0]); checkNetworkAttachmentListWarningData(o[1]); @@ -22749,7 +22872,7 @@ api.NetworkAttachmentListWarning buildNetworkAttachmentListWarning() { buildCounterNetworkAttachmentListWarning++; if (buildCounterNetworkAttachmentListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed388(); + o.data = buildUnnamed392(); o.message = 'foo'; } buildCounterNetworkAttachmentListWarning--; @@ -22763,7 +22886,7 @@ void checkNetworkAttachmentListWarning(api.NetworkAttachmentListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed388(o.data!); + checkUnnamed392(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -22778,7 +22901,7 @@ api.NetworkAttachmentList buildNetworkAttachmentList() { buildCounterNetworkAttachmentList++; if (buildCounterNetworkAttachmentList < 3) { o.id = 'foo'; - o.items = buildUnnamed387(); + o.items = buildUnnamed391(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -22795,7 +22918,7 @@ void checkNetworkAttachmentList(api.NetworkAttachmentList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed387(o.items!); + checkUnnamed391(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -22813,12 +22936,12 @@ void checkNetworkAttachmentList(api.NetworkAttachmentList o) { buildCounterNetworkAttachmentList--; } -core.List buildUnnamed389() => [ +core.List buildUnnamed393() => [ buildNetworkAttachment(), buildNetworkAttachment(), ]; -void checkUnnamed389(core.List o) { +void checkUnnamed393(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkAttachment(o[0]); checkNetworkAttachment(o[1]); @@ -22853,12 +22976,12 @@ void checkNetworkAttachmentsScopedListWarningData( buildCounterNetworkAttachmentsScopedListWarningData--; } -core.List buildUnnamed390() => [ +core.List buildUnnamed394() => [ buildNetworkAttachmentsScopedListWarningData(), buildNetworkAttachmentsScopedListWarningData(), ]; -void checkUnnamed390(core.List o) { +void checkUnnamed394(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkAttachmentsScopedListWarningData(o[0]); checkNetworkAttachmentsScopedListWarningData(o[1]); @@ -22871,7 +22994,7 @@ api.NetworkAttachmentsScopedListWarning buildCounterNetworkAttachmentsScopedListWarning++; if (buildCounterNetworkAttachmentsScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed390(); + o.data = buildUnnamed394(); o.message = 'foo'; } buildCounterNetworkAttachmentsScopedListWarning--; @@ -22886,7 +23009,7 @@ void checkNetworkAttachmentsScopedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed390(o.data!); + checkUnnamed394(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -22900,7 +23023,7 @@ api.NetworkAttachmentsScopedList buildNetworkAttachmentsScopedList() { final o = api.NetworkAttachmentsScopedList(); buildCounterNetworkAttachmentsScopedList++; if (buildCounterNetworkAttachmentsScopedList < 3) { - o.networkAttachments = buildUnnamed389(); + o.networkAttachments = buildUnnamed393(); o.warning = buildNetworkAttachmentsScopedListWarning(); } buildCounterNetworkAttachmentsScopedList--; @@ -22910,7 +23033,7 @@ api.NetworkAttachmentsScopedList buildNetworkAttachmentsScopedList() { void checkNetworkAttachmentsScopedList(api.NetworkAttachmentsScopedList o) { buildCounterNetworkAttachmentsScopedList++; if (buildCounterNetworkAttachmentsScopedList < 3) { - checkUnnamed389(o.networkAttachments!); + checkUnnamed393(o.networkAttachments!); checkNetworkAttachmentsScopedListWarning(o.warning!); } buildCounterNetworkAttachmentsScopedList--; @@ -22984,24 +23107,24 @@ void checkNetworkEdgeSecurityService(api.NetworkEdgeSecurityService o) { } core.Map - buildUnnamed391() => { + buildUnnamed395() => { 'x': buildNetworkEdgeSecurityServicesScopedList(), 'y': buildNetworkEdgeSecurityServicesScopedList(), }; -void checkUnnamed391( +void checkUnnamed395( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkEdgeSecurityServicesScopedList(o['x']!); checkNetworkEdgeSecurityServicesScopedList(o['y']!); } -core.List buildUnnamed392() => [ +core.List buildUnnamed396() => [ 'foo', 'foo', ]; -void checkUnnamed392(core.List o) { +void checkUnnamed396(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -23043,12 +23166,12 @@ void checkNetworkEdgeSecurityServiceAggregatedListWarningData( } core.List - buildUnnamed393() => [ + buildUnnamed397() => [ buildNetworkEdgeSecurityServiceAggregatedListWarningData(), buildNetworkEdgeSecurityServiceAggregatedListWarningData(), ]; -void checkUnnamed393( +void checkUnnamed397( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkEdgeSecurityServiceAggregatedListWarningData(o[0]); @@ -23062,7 +23185,7 @@ api.NetworkEdgeSecurityServiceAggregatedListWarning buildCounterNetworkEdgeSecurityServiceAggregatedListWarning++; if (buildCounterNetworkEdgeSecurityServiceAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed393(); + o.data = buildUnnamed397(); o.message = 'foo'; } buildCounterNetworkEdgeSecurityServiceAggregatedListWarning--; @@ -23077,7 +23200,7 @@ void checkNetworkEdgeSecurityServiceAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed393(o.data!); + checkUnnamed397(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -23094,11 +23217,11 @@ api.NetworkEdgeSecurityServiceAggregatedList if (buildCounterNetworkEdgeSecurityServiceAggregatedList < 3) { o.etag = 'foo'; o.id = 'foo'; - o.items = buildUnnamed391(); + o.items = buildUnnamed395(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed392(); + o.unreachables = buildUnnamed396(); o.warning = buildNetworkEdgeSecurityServiceAggregatedListWarning(); } buildCounterNetworkEdgeSecurityServiceAggregatedList--; @@ -23117,7 +23240,7 @@ void checkNetworkEdgeSecurityServiceAggregatedList( o.id!, unittest.equals('foo'), ); - checkUnnamed391(o.items!); + checkUnnamed395(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -23130,18 +23253,18 @@ void checkNetworkEdgeSecurityServiceAggregatedList( o.selfLink!, unittest.equals('foo'), ); - checkUnnamed392(o.unreachables!); + checkUnnamed396(o.unreachables!); checkNetworkEdgeSecurityServiceAggregatedListWarning(o.warning!); } buildCounterNetworkEdgeSecurityServiceAggregatedList--; } -core.List buildUnnamed394() => [ +core.List buildUnnamed398() => [ buildNetworkEdgeSecurityService(), buildNetworkEdgeSecurityService(), ]; -void checkUnnamed394(core.List o) { +void checkUnnamed398(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkEdgeSecurityService(o[0]); checkNetworkEdgeSecurityService(o[1]); @@ -23177,12 +23300,12 @@ void checkNetworkEdgeSecurityServicesScopedListWarningData( } core.List - buildUnnamed395() => [ + buildUnnamed399() => [ buildNetworkEdgeSecurityServicesScopedListWarningData(), buildNetworkEdgeSecurityServicesScopedListWarningData(), ]; -void checkUnnamed395( +void checkUnnamed399( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkEdgeSecurityServicesScopedListWarningData(o[0]); @@ -23196,7 +23319,7 @@ api.NetworkEdgeSecurityServicesScopedListWarning buildCounterNetworkEdgeSecurityServicesScopedListWarning++; if (buildCounterNetworkEdgeSecurityServicesScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed395(); + o.data = buildUnnamed399(); o.message = 'foo'; } buildCounterNetworkEdgeSecurityServicesScopedListWarning--; @@ -23211,7 +23334,7 @@ void checkNetworkEdgeSecurityServicesScopedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed395(o.data!); + checkUnnamed399(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -23226,7 +23349,7 @@ api.NetworkEdgeSecurityServicesScopedList final o = api.NetworkEdgeSecurityServicesScopedList(); buildCounterNetworkEdgeSecurityServicesScopedList++; if (buildCounterNetworkEdgeSecurityServicesScopedList < 3) { - o.networkEdgeSecurityServices = buildUnnamed394(); + o.networkEdgeSecurityServices = buildUnnamed398(); o.warning = buildNetworkEdgeSecurityServicesScopedListWarning(); } buildCounterNetworkEdgeSecurityServicesScopedList--; @@ -23237,18 +23360,18 @@ void checkNetworkEdgeSecurityServicesScopedList( api.NetworkEdgeSecurityServicesScopedList o) { buildCounterNetworkEdgeSecurityServicesScopedList++; if (buildCounterNetworkEdgeSecurityServicesScopedList < 3) { - checkUnnamed394(o.networkEdgeSecurityServices!); + checkUnnamed398(o.networkEdgeSecurityServices!); checkNetworkEdgeSecurityServicesScopedListWarning(o.warning!); } buildCounterNetworkEdgeSecurityServicesScopedList--; } -core.Map buildUnnamed396() => { +core.Map buildUnnamed400() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed396(core.Map o) { +void checkUnnamed400(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -23265,7 +23388,7 @@ api.NetworkEndpoint buildNetworkEndpoint() { final o = api.NetworkEndpoint(); buildCounterNetworkEndpoint++; if (buildCounterNetworkEndpoint < 3) { - o.annotations = buildUnnamed396(); + o.annotations = buildUnnamed400(); o.clientDestinationPort = 42; o.fqdn = 'foo'; o.instance = 'foo'; @@ -23280,7 +23403,7 @@ api.NetworkEndpoint buildNetworkEndpoint() { void checkNetworkEndpoint(api.NetworkEndpoint o) { buildCounterNetworkEndpoint++; if (buildCounterNetworkEndpoint < 3) { - checkUnnamed396(o.annotations!); + checkUnnamed400(o.annotations!); unittest.expect( o.clientDestinationPort!, unittest.equals(42), @@ -23309,12 +23432,12 @@ void checkNetworkEndpoint(api.NetworkEndpoint o) { buildCounterNetworkEndpoint--; } -core.Map buildUnnamed397() => { +core.Map buildUnnamed401() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed397(core.Map o) { +void checkUnnamed401(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -23331,7 +23454,7 @@ api.NetworkEndpointGroup buildNetworkEndpointGroup() { final o = api.NetworkEndpointGroup(); buildCounterNetworkEndpointGroup++; if (buildCounterNetworkEndpointGroup < 3) { - o.annotations = buildUnnamed397(); + o.annotations = buildUnnamed401(); o.appEngine = buildNetworkEndpointGroupAppEngine(); o.cloudFunction = buildNetworkEndpointGroupCloudFunction(); o.cloudRun = buildNetworkEndpointGroupCloudRun(); @@ -23358,7 +23481,7 @@ api.NetworkEndpointGroup buildNetworkEndpointGroup() { void checkNetworkEndpointGroup(api.NetworkEndpointGroup o) { buildCounterNetworkEndpointGroup++; if (buildCounterNetworkEndpointGroup < 3) { - checkUnnamed397(o.annotations!); + checkUnnamed401(o.annotations!); checkNetworkEndpointGroupAppEngine(o.appEngine!); checkNetworkEndpointGroupCloudFunction(o.cloudFunction!); checkNetworkEndpointGroupCloudRun(o.cloudRun!); @@ -23423,25 +23546,25 @@ void checkNetworkEndpointGroup(api.NetworkEndpointGroup o) { buildCounterNetworkEndpointGroup--; } -core.Map buildUnnamed398() => +core.Map buildUnnamed402() => { 'x': buildNetworkEndpointGroupsScopedList(), 'y': buildNetworkEndpointGroupsScopedList(), }; -void checkUnnamed398( +void checkUnnamed402( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkEndpointGroupsScopedList(o['x']!); checkNetworkEndpointGroupsScopedList(o['y']!); } -core.List buildUnnamed399() => [ +core.List buildUnnamed403() => [ 'foo', 'foo', ]; -void checkUnnamed399(core.List o) { +void checkUnnamed403(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -23483,12 +23606,12 @@ void checkNetworkEndpointGroupAggregatedListWarningData( } core.List - buildUnnamed400() => [ + buildUnnamed404() => [ buildNetworkEndpointGroupAggregatedListWarningData(), buildNetworkEndpointGroupAggregatedListWarningData(), ]; -void checkUnnamed400( +void checkUnnamed404( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkEndpointGroupAggregatedListWarningData(o[0]); @@ -23502,7 +23625,7 @@ api.NetworkEndpointGroupAggregatedListWarning buildCounterNetworkEndpointGroupAggregatedListWarning++; if (buildCounterNetworkEndpointGroupAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed400(); + o.data = buildUnnamed404(); o.message = 'foo'; } buildCounterNetworkEndpointGroupAggregatedListWarning--; @@ -23517,7 +23640,7 @@ void checkNetworkEndpointGroupAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed400(o.data!); + checkUnnamed404(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -23533,11 +23656,11 @@ api.NetworkEndpointGroupAggregatedList buildCounterNetworkEndpointGroupAggregatedList++; if (buildCounterNetworkEndpointGroupAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed398(); + o.items = buildUnnamed402(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed399(); + o.unreachables = buildUnnamed403(); o.warning = buildNetworkEndpointGroupAggregatedListWarning(); } buildCounterNetworkEndpointGroupAggregatedList--; @@ -23552,7 +23675,7 @@ void checkNetworkEndpointGroupAggregatedList( o.id!, unittest.equals('foo'), ); - checkUnnamed398(o.items!); + checkUnnamed402(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -23565,7 +23688,7 @@ void checkNetworkEndpointGroupAggregatedList( o.selfLink!, unittest.equals('foo'), ); - checkUnnamed399(o.unreachables!); + checkUnnamed403(o.unreachables!); checkNetworkEndpointGroupAggregatedListWarning(o.warning!); } buildCounterNetworkEndpointGroupAggregatedList--; @@ -23663,12 +23786,12 @@ void checkNetworkEndpointGroupCloudRun(api.NetworkEndpointGroupCloudRun o) { buildCounterNetworkEndpointGroupCloudRun--; } -core.List buildUnnamed401() => [ +core.List buildUnnamed405() => [ buildNetworkEndpointGroup(), buildNetworkEndpointGroup(), ]; -void checkUnnamed401(core.List o) { +void checkUnnamed405(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkEndpointGroup(o[0]); checkNetworkEndpointGroup(o[1]); @@ -23703,12 +23826,12 @@ void checkNetworkEndpointGroupListWarningData( buildCounterNetworkEndpointGroupListWarningData--; } -core.List buildUnnamed402() => [ +core.List buildUnnamed406() => [ buildNetworkEndpointGroupListWarningData(), buildNetworkEndpointGroupListWarningData(), ]; -void checkUnnamed402(core.List o) { +void checkUnnamed406(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkEndpointGroupListWarningData(o[0]); checkNetworkEndpointGroupListWarningData(o[1]); @@ -23720,7 +23843,7 @@ api.NetworkEndpointGroupListWarning buildNetworkEndpointGroupListWarning() { buildCounterNetworkEndpointGroupListWarning++; if (buildCounterNetworkEndpointGroupListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed402(); + o.data = buildUnnamed406(); o.message = 'foo'; } buildCounterNetworkEndpointGroupListWarning--; @@ -23735,7 +23858,7 @@ void checkNetworkEndpointGroupListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed402(o.data!); + checkUnnamed406(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -23750,7 +23873,7 @@ api.NetworkEndpointGroupList buildNetworkEndpointGroupList() { buildCounterNetworkEndpointGroupList++; if (buildCounterNetworkEndpointGroupList < 3) { o.id = 'foo'; - o.items = buildUnnamed401(); + o.items = buildUnnamed405(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -23767,7 +23890,7 @@ void checkNetworkEndpointGroupList(api.NetworkEndpointGroupList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed401(o.items!); + checkUnnamed405(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -23822,12 +23945,12 @@ void checkNetworkEndpointGroupPscData(api.NetworkEndpointGroupPscData o) { buildCounterNetworkEndpointGroupPscData--; } -core.List buildUnnamed403() => [ +core.List buildUnnamed407() => [ buildNetworkEndpoint(), buildNetworkEndpoint(), ]; -void checkUnnamed403(core.List o) { +void checkUnnamed407(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkEndpoint(o[0]); checkNetworkEndpoint(o[1]); @@ -23839,7 +23962,7 @@ api.NetworkEndpointGroupsAttachEndpointsRequest final o = api.NetworkEndpointGroupsAttachEndpointsRequest(); buildCounterNetworkEndpointGroupsAttachEndpointsRequest++; if (buildCounterNetworkEndpointGroupsAttachEndpointsRequest < 3) { - o.networkEndpoints = buildUnnamed403(); + o.networkEndpoints = buildUnnamed407(); } buildCounterNetworkEndpointGroupsAttachEndpointsRequest--; return o; @@ -23849,17 +23972,17 @@ void checkNetworkEndpointGroupsAttachEndpointsRequest( api.NetworkEndpointGroupsAttachEndpointsRequest o) { buildCounterNetworkEndpointGroupsAttachEndpointsRequest++; if (buildCounterNetworkEndpointGroupsAttachEndpointsRequest < 3) { - checkUnnamed403(o.networkEndpoints!); + checkUnnamed407(o.networkEndpoints!); } buildCounterNetworkEndpointGroupsAttachEndpointsRequest--; } -core.List buildUnnamed404() => [ +core.List buildUnnamed408() => [ buildNetworkEndpoint(), buildNetworkEndpoint(), ]; -void checkUnnamed404(core.List o) { +void checkUnnamed408(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkEndpoint(o[0]); checkNetworkEndpoint(o[1]); @@ -23871,7 +23994,7 @@ api.NetworkEndpointGroupsDetachEndpointsRequest final o = api.NetworkEndpointGroupsDetachEndpointsRequest(); buildCounterNetworkEndpointGroupsDetachEndpointsRequest++; if (buildCounterNetworkEndpointGroupsDetachEndpointsRequest < 3) { - o.networkEndpoints = buildUnnamed404(); + o.networkEndpoints = buildUnnamed408(); } buildCounterNetworkEndpointGroupsDetachEndpointsRequest--; return o; @@ -23881,7 +24004,7 @@ void checkNetworkEndpointGroupsDetachEndpointsRequest( api.NetworkEndpointGroupsDetachEndpointsRequest o) { buildCounterNetworkEndpointGroupsDetachEndpointsRequest++; if (buildCounterNetworkEndpointGroupsDetachEndpointsRequest < 3) { - checkUnnamed404(o.networkEndpoints!); + checkUnnamed408(o.networkEndpoints!); } buildCounterNetworkEndpointGroupsDetachEndpointsRequest--; } @@ -23910,12 +24033,12 @@ void checkNetworkEndpointGroupsListEndpointsRequest( buildCounterNetworkEndpointGroupsListEndpointsRequest--; } -core.List buildUnnamed405() => [ +core.List buildUnnamed409() => [ buildNetworkEndpointWithHealthStatus(), buildNetworkEndpointWithHealthStatus(), ]; -void checkUnnamed405(core.List o) { +void checkUnnamed409(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkEndpointWithHealthStatus(o[0]); checkNetworkEndpointWithHealthStatus(o[1]); @@ -23951,12 +24074,12 @@ void checkNetworkEndpointGroupsListNetworkEndpointsWarningData( } core.List - buildUnnamed406() => [ + buildUnnamed410() => [ buildNetworkEndpointGroupsListNetworkEndpointsWarningData(), buildNetworkEndpointGroupsListNetworkEndpointsWarningData(), ]; -void checkUnnamed406( +void checkUnnamed410( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkEndpointGroupsListNetworkEndpointsWarningData(o[0]); @@ -23970,7 +24093,7 @@ api.NetworkEndpointGroupsListNetworkEndpointsWarning buildCounterNetworkEndpointGroupsListNetworkEndpointsWarning++; if (buildCounterNetworkEndpointGroupsListNetworkEndpointsWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed406(); + o.data = buildUnnamed410(); o.message = 'foo'; } buildCounterNetworkEndpointGroupsListNetworkEndpointsWarning--; @@ -23985,7 +24108,7 @@ void checkNetworkEndpointGroupsListNetworkEndpointsWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed406(o.data!); + checkUnnamed410(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -24001,7 +24124,7 @@ api.NetworkEndpointGroupsListNetworkEndpoints buildCounterNetworkEndpointGroupsListNetworkEndpoints++; if (buildCounterNetworkEndpointGroupsListNetworkEndpoints < 3) { o.id = 'foo'; - o.items = buildUnnamed405(); + o.items = buildUnnamed409(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.warning = buildNetworkEndpointGroupsListNetworkEndpointsWarning(); @@ -24018,7 +24141,7 @@ void checkNetworkEndpointGroupsListNetworkEndpoints( o.id!, unittest.equals('foo'), ); - checkUnnamed405(o.items!); + checkUnnamed409(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -24032,12 +24155,12 @@ void checkNetworkEndpointGroupsListNetworkEndpoints( buildCounterNetworkEndpointGroupsListNetworkEndpoints--; } -core.List buildUnnamed407() => [ +core.List buildUnnamed411() => [ buildNetworkEndpointGroup(), buildNetworkEndpointGroup(), ]; -void checkUnnamed407(core.List o) { +void checkUnnamed411(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkEndpointGroup(o[0]); checkNetworkEndpointGroup(o[1]); @@ -24072,12 +24195,12 @@ void checkNetworkEndpointGroupsScopedListWarningData( buildCounterNetworkEndpointGroupsScopedListWarningData--; } -core.List buildUnnamed408() => [ +core.List buildUnnamed412() => [ buildNetworkEndpointGroupsScopedListWarningData(), buildNetworkEndpointGroupsScopedListWarningData(), ]; -void checkUnnamed408( +void checkUnnamed412( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkEndpointGroupsScopedListWarningData(o[0]); @@ -24091,7 +24214,7 @@ api.NetworkEndpointGroupsScopedListWarning buildCounterNetworkEndpointGroupsScopedListWarning++; if (buildCounterNetworkEndpointGroupsScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed408(); + o.data = buildUnnamed412(); o.message = 'foo'; } buildCounterNetworkEndpointGroupsScopedListWarning--; @@ -24106,7 +24229,7 @@ void checkNetworkEndpointGroupsScopedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed408(o.data!); + checkUnnamed412(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -24120,7 +24243,7 @@ api.NetworkEndpointGroupsScopedList buildNetworkEndpointGroupsScopedList() { final o = api.NetworkEndpointGroupsScopedList(); buildCounterNetworkEndpointGroupsScopedList++; if (buildCounterNetworkEndpointGroupsScopedList < 3) { - o.networkEndpointGroups = buildUnnamed407(); + o.networkEndpointGroups = buildUnnamed411(); o.warning = buildNetworkEndpointGroupsScopedListWarning(); } buildCounterNetworkEndpointGroupsScopedList--; @@ -24131,18 +24254,18 @@ void checkNetworkEndpointGroupsScopedList( api.NetworkEndpointGroupsScopedList o) { buildCounterNetworkEndpointGroupsScopedList++; if (buildCounterNetworkEndpointGroupsScopedList < 3) { - checkUnnamed407(o.networkEndpointGroups!); + checkUnnamed411(o.networkEndpointGroups!); checkNetworkEndpointGroupsScopedListWarning(o.warning!); } buildCounterNetworkEndpointGroupsScopedList--; } -core.List buildUnnamed409() => [ +core.List buildUnnamed413() => [ buildHealthStatusForNetworkEndpoint(), buildHealthStatusForNetworkEndpoint(), ]; -void checkUnnamed409(core.List o) { +void checkUnnamed413(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHealthStatusForNetworkEndpoint(o[0]); checkHealthStatusForNetworkEndpoint(o[1]); @@ -24153,7 +24276,7 @@ api.NetworkEndpointWithHealthStatus buildNetworkEndpointWithHealthStatus() { final o = api.NetworkEndpointWithHealthStatus(); buildCounterNetworkEndpointWithHealthStatus++; if (buildCounterNetworkEndpointWithHealthStatus < 3) { - o.healths = buildUnnamed409(); + o.healths = buildUnnamed413(); o.networkEndpoint = buildNetworkEndpoint(); } buildCounterNetworkEndpointWithHealthStatus--; @@ -24164,40 +24287,40 @@ void checkNetworkEndpointWithHealthStatus( api.NetworkEndpointWithHealthStatus o) { buildCounterNetworkEndpointWithHealthStatus++; if (buildCounterNetworkEndpointWithHealthStatus < 3) { - checkUnnamed409(o.healths!); + checkUnnamed413(o.healths!); checkNetworkEndpoint(o.networkEndpoint!); } buildCounterNetworkEndpointWithHealthStatus--; } -core.List buildUnnamed410() => [ +core.List buildUnnamed414() => [ buildAccessConfig(), buildAccessConfig(), ]; -void checkUnnamed410(core.List o) { +void checkUnnamed414(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAccessConfig(o[0]); checkAccessConfig(o[1]); } -core.List buildUnnamed411() => [ +core.List buildUnnamed415() => [ buildAliasIpRange(), buildAliasIpRange(), ]; -void checkUnnamed411(core.List o) { +void checkUnnamed415(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAliasIpRange(o[0]); checkAliasIpRange(o[1]); } -core.List buildUnnamed412() => [ +core.List buildUnnamed416() => [ buildAccessConfig(), buildAccessConfig(), ]; -void checkUnnamed412(core.List o) { +void checkUnnamed416(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAccessConfig(o[0]); checkAccessConfig(o[1]); @@ -24208,11 +24331,11 @@ api.NetworkInterface buildNetworkInterface() { final o = api.NetworkInterface(); buildCounterNetworkInterface++; if (buildCounterNetworkInterface < 3) { - o.accessConfigs = buildUnnamed410(); - o.aliasIpRanges = buildUnnamed411(); + o.accessConfigs = buildUnnamed414(); + o.aliasIpRanges = buildUnnamed415(); o.fingerprint = 'foo'; o.internalIpv6PrefixLength = 42; - o.ipv6AccessConfigs = buildUnnamed412(); + o.ipv6AccessConfigs = buildUnnamed416(); o.ipv6AccessType = 'foo'; o.ipv6Address = 'foo'; o.kind = 'foo'; @@ -24232,8 +24355,8 @@ api.NetworkInterface buildNetworkInterface() { void checkNetworkInterface(api.NetworkInterface o) { buildCounterNetworkInterface++; if (buildCounterNetworkInterface < 3) { - checkUnnamed410(o.accessConfigs!); - checkUnnamed411(o.aliasIpRanges!); + checkUnnamed414(o.accessConfigs!); + checkUnnamed415(o.aliasIpRanges!); unittest.expect( o.fingerprint!, unittest.equals('foo'), @@ -24242,7 +24365,7 @@ void checkNetworkInterface(api.NetworkInterface o) { o.internalIpv6PrefixLength!, unittest.equals(42), ); - checkUnnamed412(o.ipv6AccessConfigs!); + checkUnnamed416(o.ipv6AccessConfigs!); unittest.expect( o.ipv6AccessType!, unittest.equals('foo'), @@ -24291,12 +24414,12 @@ void checkNetworkInterface(api.NetworkInterface o) { buildCounterNetworkInterface--; } -core.List buildUnnamed413() => [ +core.List buildUnnamed417() => [ buildNetwork(), buildNetwork(), ]; -void checkUnnamed413(core.List o) { +void checkUnnamed417(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetwork(o[0]); checkNetwork(o[1]); @@ -24329,12 +24452,12 @@ void checkNetworkListWarningData(api.NetworkListWarningData o) { buildCounterNetworkListWarningData--; } -core.List buildUnnamed414() => [ +core.List buildUnnamed418() => [ buildNetworkListWarningData(), buildNetworkListWarningData(), ]; -void checkUnnamed414(core.List o) { +void checkUnnamed418(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkListWarningData(o[0]); checkNetworkListWarningData(o[1]); @@ -24346,7 +24469,7 @@ api.NetworkListWarning buildNetworkListWarning() { buildCounterNetworkListWarning++; if (buildCounterNetworkListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed414(); + o.data = buildUnnamed418(); o.message = 'foo'; } buildCounterNetworkListWarning--; @@ -24360,7 +24483,7 @@ void checkNetworkListWarning(api.NetworkListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed414(o.data!); + checkUnnamed418(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -24375,7 +24498,7 @@ api.NetworkList buildNetworkList() { buildCounterNetworkList++; if (buildCounterNetworkList < 3) { o.id = 'foo'; - o.items = buildUnnamed413(); + o.items = buildUnnamed417(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -24392,7 +24515,7 @@ void checkNetworkList(api.NetworkList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed413(o.items!); + checkUnnamed417(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -24491,11 +24614,432 @@ void checkNetworkPerformanceConfig(api.NetworkPerformanceConfig o) { buildCounterNetworkPerformanceConfig--; } +core.int buildCounterNetworkProfile = 0; +api.NetworkProfile buildNetworkProfile() { + final o = api.NetworkProfile(); + buildCounterNetworkProfile++; + if (buildCounterNetworkProfile < 3) { + o.creationTimestamp = 'foo'; + o.description = 'foo'; + o.features = buildNetworkProfileNetworkFeatures(); + o.id = 'foo'; + o.kind = 'foo'; + o.location = buildNetworkProfileLocation(); + o.name = 'foo'; + o.selfLink = 'foo'; + o.selfLinkWithId = 'foo'; + o.zone = 'foo'; + } + buildCounterNetworkProfile--; + return o; +} + +void checkNetworkProfile(api.NetworkProfile o) { + buildCounterNetworkProfile++; + if (buildCounterNetworkProfile < 3) { + unittest.expect( + o.creationTimestamp!, + unittest.equals('foo'), + ); + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + checkNetworkProfileNetworkFeatures(o.features!); + unittest.expect( + o.id!, + unittest.equals('foo'), + ); + unittest.expect( + o.kind!, + unittest.equals('foo'), + ); + checkNetworkProfileLocation(o.location!); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.selfLink!, + unittest.equals('foo'), + ); + unittest.expect( + o.selfLinkWithId!, + unittest.equals('foo'), + ); + unittest.expect( + o.zone!, + unittest.equals('foo'), + ); + } + buildCounterNetworkProfile--; +} + +core.int buildCounterNetworkProfileLocation = 0; +api.NetworkProfileLocation buildNetworkProfileLocation() { + final o = api.NetworkProfileLocation(); + buildCounterNetworkProfileLocation++; + if (buildCounterNetworkProfileLocation < 3) { + o.name = 'foo'; + o.scope = 'foo'; + } + buildCounterNetworkProfileLocation--; + return o; +} + +void checkNetworkProfileLocation(api.NetworkProfileLocation o) { + buildCounterNetworkProfileLocation++; + if (buildCounterNetworkProfileLocation < 3) { + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.scope!, + unittest.equals('foo'), + ); + } + buildCounterNetworkProfileLocation--; +} + +core.List buildUnnamed419() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed419(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.List buildUnnamed420() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed420(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.List buildUnnamed421() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed421(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.List buildUnnamed422() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed422(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterNetworkProfileNetworkFeatures = 0; +api.NetworkProfileNetworkFeatures buildNetworkProfileNetworkFeatures() { + final o = api.NetworkProfileNetworkFeatures(); + buildCounterNetworkProfileNetworkFeatures++; + if (buildCounterNetworkProfileNetworkFeatures < 3) { + o.addressPurposes = buildUnnamed419(); + o.allowAliasIpRanges = 'foo'; + o.allowAutoModeSubnet = 'foo'; + o.allowClassDFirewalls = 'foo'; + o.allowCloudNat = 'foo'; + o.allowCloudRouter = 'foo'; + o.allowExternalIpAccess = 'foo'; + o.allowInterconnect = 'foo'; + o.allowLoadBalancing = 'foo'; + o.allowMultiNicInSameNetwork = 'foo'; + o.allowPacketMirroring = 'foo'; + o.allowPrivateGoogleAccess = 'foo'; + o.allowPsc = 'foo'; + o.allowSameNetworkUnicast = 'foo'; + o.allowStaticRoutes = 'foo'; + o.allowSubInterfaces = 'foo'; + o.allowVpcPeering = 'foo'; + o.allowVpn = 'foo'; + o.interfaceTypes = buildUnnamed420(); + o.subnetPurposes = buildUnnamed421(); + o.subnetStackTypes = buildUnnamed422(); + o.unicast = 'foo'; + } + buildCounterNetworkProfileNetworkFeatures--; + return o; +} + +void checkNetworkProfileNetworkFeatures(api.NetworkProfileNetworkFeatures o) { + buildCounterNetworkProfileNetworkFeatures++; + if (buildCounterNetworkProfileNetworkFeatures < 3) { + checkUnnamed419(o.addressPurposes!); + unittest.expect( + o.allowAliasIpRanges!, + unittest.equals('foo'), + ); + unittest.expect( + o.allowAutoModeSubnet!, + unittest.equals('foo'), + ); + unittest.expect( + o.allowClassDFirewalls!, + unittest.equals('foo'), + ); + unittest.expect( + o.allowCloudNat!, + unittest.equals('foo'), + ); + unittest.expect( + o.allowCloudRouter!, + unittest.equals('foo'), + ); + unittest.expect( + o.allowExternalIpAccess!, + unittest.equals('foo'), + ); + unittest.expect( + o.allowInterconnect!, + unittest.equals('foo'), + ); + unittest.expect( + o.allowLoadBalancing!, + unittest.equals('foo'), + ); + unittest.expect( + o.allowMultiNicInSameNetwork!, + unittest.equals('foo'), + ); + unittest.expect( + o.allowPacketMirroring!, + unittest.equals('foo'), + ); + unittest.expect( + o.allowPrivateGoogleAccess!, + unittest.equals('foo'), + ); + unittest.expect( + o.allowPsc!, + unittest.equals('foo'), + ); + unittest.expect( + o.allowSameNetworkUnicast!, + unittest.equals('foo'), + ); + unittest.expect( + o.allowStaticRoutes!, + unittest.equals('foo'), + ); + unittest.expect( + o.allowSubInterfaces!, + unittest.equals('foo'), + ); + unittest.expect( + o.allowVpcPeering!, + unittest.equals('foo'), + ); + unittest.expect( + o.allowVpn!, + unittest.equals('foo'), + ); + checkUnnamed420(o.interfaceTypes!); + checkUnnamed421(o.subnetPurposes!); + checkUnnamed422(o.subnetStackTypes!); + unittest.expect( + o.unicast!, + unittest.equals('foo'), + ); + } + buildCounterNetworkProfileNetworkFeatures--; +} + +core.List buildUnnamed423() => [ + buildNetworkProfile(), + buildNetworkProfile(), + ]; + +void checkUnnamed423(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkNetworkProfile(o[0]); + checkNetworkProfile(o[1]); +} + +core.List buildUnnamed424() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed424(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterNetworkProfilesListResponseWarningData = 0; +api.NetworkProfilesListResponseWarningData + buildNetworkProfilesListResponseWarningData() { + final o = api.NetworkProfilesListResponseWarningData(); + buildCounterNetworkProfilesListResponseWarningData++; + if (buildCounterNetworkProfilesListResponseWarningData < 3) { + o.key = 'foo'; + o.value = 'foo'; + } + buildCounterNetworkProfilesListResponseWarningData--; + return o; +} + +void checkNetworkProfilesListResponseWarningData( + api.NetworkProfilesListResponseWarningData o) { + buildCounterNetworkProfilesListResponseWarningData++; + if (buildCounterNetworkProfilesListResponseWarningData < 3) { + unittest.expect( + o.key!, + unittest.equals('foo'), + ); + unittest.expect( + o.value!, + unittest.equals('foo'), + ); + } + buildCounterNetworkProfilesListResponseWarningData--; +} + +core.List buildUnnamed425() => [ + buildNetworkProfilesListResponseWarningData(), + buildNetworkProfilesListResponseWarningData(), + ]; + +void checkUnnamed425(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkNetworkProfilesListResponseWarningData(o[0]); + checkNetworkProfilesListResponseWarningData(o[1]); +} + +core.int buildCounterNetworkProfilesListResponseWarning = 0; +api.NetworkProfilesListResponseWarning + buildNetworkProfilesListResponseWarning() { + final o = api.NetworkProfilesListResponseWarning(); + buildCounterNetworkProfilesListResponseWarning++; + if (buildCounterNetworkProfilesListResponseWarning < 3) { + o.code = 'foo'; + o.data = buildUnnamed425(); + o.message = 'foo'; + } + buildCounterNetworkProfilesListResponseWarning--; + return o; +} + +void checkNetworkProfilesListResponseWarning( + api.NetworkProfilesListResponseWarning o) { + buildCounterNetworkProfilesListResponseWarning++; + if (buildCounterNetworkProfilesListResponseWarning < 3) { + unittest.expect( + o.code!, + unittest.equals('foo'), + ); + checkUnnamed425(o.data!); + unittest.expect( + o.message!, + unittest.equals('foo'), + ); + } + buildCounterNetworkProfilesListResponseWarning--; +} + +core.int buildCounterNetworkProfilesListResponse = 0; +api.NetworkProfilesListResponse buildNetworkProfilesListResponse() { + final o = api.NetworkProfilesListResponse(); + buildCounterNetworkProfilesListResponse++; + if (buildCounterNetworkProfilesListResponse < 3) { + o.etag = 'foo'; + o.id = 'foo'; + o.items = buildUnnamed423(); + o.kind = 'foo'; + o.nextPageToken = 'foo'; + o.selfLink = 'foo'; + o.unreachables = buildUnnamed424(); + o.warning = buildNetworkProfilesListResponseWarning(); + } + buildCounterNetworkProfilesListResponse--; + return o; +} + +void checkNetworkProfilesListResponse(api.NetworkProfilesListResponse o) { + buildCounterNetworkProfilesListResponse++; + if (buildCounterNetworkProfilesListResponse < 3) { + unittest.expect( + o.etag!, + unittest.equals('foo'), + ); + unittest.expect( + o.id!, + unittest.equals('foo'), + ); + checkUnnamed423(o.items!); + unittest.expect( + o.kind!, + unittest.equals('foo'), + ); + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + unittest.expect( + o.selfLink!, + unittest.equals('foo'), + ); + checkUnnamed424(o.unreachables!); + checkNetworkProfilesListResponseWarning(o.warning!); + } + buildCounterNetworkProfilesListResponse--; +} + core.int buildCounterNetworkRoutingConfig = 0; api.NetworkRoutingConfig buildNetworkRoutingConfig() { final o = api.NetworkRoutingConfig(); buildCounterNetworkRoutingConfig++; if (buildCounterNetworkRoutingConfig < 3) { + o.bgpAlwaysCompareMed = true; + o.bgpBestPathSelectionMode = 'foo'; + o.bgpInterRegionCost = 'foo'; o.routingMode = 'foo'; } buildCounterNetworkRoutingConfig--; @@ -24505,6 +25049,15 @@ api.NetworkRoutingConfig buildNetworkRoutingConfig() { void checkNetworkRoutingConfig(api.NetworkRoutingConfig o) { buildCounterNetworkRoutingConfig++; if (buildCounterNetworkRoutingConfig < 3) { + unittest.expect(o.bgpAlwaysCompareMed!, unittest.isTrue); + unittest.expect( + o.bgpBestPathSelectionMode!, + unittest.equals('foo'), + ); + unittest.expect( + o.bgpInterRegionCost!, + unittest.equals('foo'), + ); unittest.expect( o.routingMode!, unittest.equals('foo'), @@ -24545,12 +25098,12 @@ void checkNetworksAddPeeringRequest(api.NetworksAddPeeringRequest o) { } core.List - buildUnnamed415() => [ + buildUnnamed426() => [ buildNetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy(), buildNetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy(), ]; -void checkUnnamed415( +void checkUnnamed426( core.List o) { unittest.expect(o, unittest.hasLength(2)); @@ -24558,12 +25111,12 @@ void checkUnnamed415( checkNetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy(o[1]); } -core.List buildUnnamed416() => [ +core.List buildUnnamed427() => [ buildFirewall(), buildFirewall(), ]; -void checkUnnamed416(core.List o) { +void checkUnnamed427(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFirewall(o[0]); checkFirewall(o[1]); @@ -24575,8 +25128,8 @@ api.NetworksGetEffectiveFirewallsResponse final o = api.NetworksGetEffectiveFirewallsResponse(); buildCounterNetworksGetEffectiveFirewallsResponse++; if (buildCounterNetworksGetEffectiveFirewallsResponse < 3) { - o.firewallPolicys = buildUnnamed415(); - o.firewalls = buildUnnamed416(); + o.firewallPolicys = buildUnnamed426(); + o.firewalls = buildUnnamed427(); } buildCounterNetworksGetEffectiveFirewallsResponse--; return o; @@ -24586,18 +25139,18 @@ void checkNetworksGetEffectiveFirewallsResponse( api.NetworksGetEffectiveFirewallsResponse o) { buildCounterNetworksGetEffectiveFirewallsResponse++; if (buildCounterNetworksGetEffectiveFirewallsResponse < 3) { - checkUnnamed415(o.firewallPolicys!); - checkUnnamed416(o.firewalls!); + checkUnnamed426(o.firewallPolicys!); + checkUnnamed427(o.firewalls!); } buildCounterNetworksGetEffectiveFirewallsResponse--; } -core.List buildUnnamed417() => [ +core.List buildUnnamed428() => [ buildFirewallPolicyRule(), buildFirewallPolicyRule(), ]; -void checkUnnamed417(core.List o) { +void checkUnnamed428(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFirewallPolicyRule(o[0]); checkFirewallPolicyRule(o[1]); @@ -24615,7 +25168,7 @@ api.NetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy o.displayName = 'foo'; o.name = 'foo'; o.priority = 42; - o.rules = buildUnnamed417(); + o.rules = buildUnnamed428(); o.shortName = 'foo'; o.type = 'foo'; } @@ -24640,7 +25193,7 @@ void checkNetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy( o.priority!, unittest.equals(42), ); - checkUnnamed417(o.rules!); + checkUnnamed428(o.rules!); unittest.expect( o.shortName!, unittest.equals('foo'), @@ -24787,23 +25340,23 @@ void checkNodeGroup(api.NodeGroup o) { buildCounterNodeGroup--; } -core.Map buildUnnamed418() => { +core.Map buildUnnamed429() => { 'x': buildNodeGroupsScopedList(), 'y': buildNodeGroupsScopedList(), }; -void checkUnnamed418(core.Map o) { +void checkUnnamed429(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkNodeGroupsScopedList(o['x']!); checkNodeGroupsScopedList(o['y']!); } -core.List buildUnnamed419() => [ +core.List buildUnnamed430() => [ 'foo', 'foo', ]; -void checkUnnamed419(core.List o) { +void checkUnnamed430(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -24844,12 +25397,12 @@ void checkNodeGroupAggregatedListWarningData( buildCounterNodeGroupAggregatedListWarningData--; } -core.List buildUnnamed420() => [ +core.List buildUnnamed431() => [ buildNodeGroupAggregatedListWarningData(), buildNodeGroupAggregatedListWarningData(), ]; -void checkUnnamed420(core.List o) { +void checkUnnamed431(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNodeGroupAggregatedListWarningData(o[0]); checkNodeGroupAggregatedListWarningData(o[1]); @@ -24861,7 +25414,7 @@ api.NodeGroupAggregatedListWarning buildNodeGroupAggregatedListWarning() { buildCounterNodeGroupAggregatedListWarning++; if (buildCounterNodeGroupAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed420(); + o.data = buildUnnamed431(); o.message = 'foo'; } buildCounterNodeGroupAggregatedListWarning--; @@ -24875,7 +25428,7 @@ void checkNodeGroupAggregatedListWarning(api.NodeGroupAggregatedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed420(o.data!); + checkUnnamed431(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -24890,11 +25443,11 @@ api.NodeGroupAggregatedList buildNodeGroupAggregatedList() { buildCounterNodeGroupAggregatedList++; if (buildCounterNodeGroupAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed418(); + o.items = buildUnnamed429(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed419(); + o.unreachables = buildUnnamed430(); o.warning = buildNodeGroupAggregatedListWarning(); } buildCounterNodeGroupAggregatedList--; @@ -24908,7 +25461,7 @@ void checkNodeGroupAggregatedList(api.NodeGroupAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed418(o.items!); + checkUnnamed429(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -24921,7 +25474,7 @@ void checkNodeGroupAggregatedList(api.NodeGroupAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed419(o.unreachables!); + checkUnnamed430(o.unreachables!); checkNodeGroupAggregatedListWarning(o.warning!); } buildCounterNodeGroupAggregatedList--; @@ -24959,12 +25512,12 @@ void checkNodeGroupAutoscalingPolicy(api.NodeGroupAutoscalingPolicy o) { buildCounterNodeGroupAutoscalingPolicy--; } -core.List buildUnnamed421() => [ +core.List buildUnnamed432() => [ buildNodeGroup(), buildNodeGroup(), ]; -void checkUnnamed421(core.List o) { +void checkUnnamed432(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNodeGroup(o[0]); checkNodeGroup(o[1]); @@ -24997,12 +25550,12 @@ void checkNodeGroupListWarningData(api.NodeGroupListWarningData o) { buildCounterNodeGroupListWarningData--; } -core.List buildUnnamed422() => [ +core.List buildUnnamed433() => [ buildNodeGroupListWarningData(), buildNodeGroupListWarningData(), ]; -void checkUnnamed422(core.List o) { +void checkUnnamed433(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNodeGroupListWarningData(o[0]); checkNodeGroupListWarningData(o[1]); @@ -25014,7 +25567,7 @@ api.NodeGroupListWarning buildNodeGroupListWarning() { buildCounterNodeGroupListWarning++; if (buildCounterNodeGroupListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed422(); + o.data = buildUnnamed433(); o.message = 'foo'; } buildCounterNodeGroupListWarning--; @@ -25028,7 +25581,7 @@ void checkNodeGroupListWarning(api.NodeGroupListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed422(o.data!); + checkUnnamed433(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -25043,7 +25596,7 @@ api.NodeGroupList buildNodeGroupList() { buildCounterNodeGroupList++; if (buildCounterNodeGroupList < 3) { o.id = 'foo'; - o.items = buildUnnamed421(); + o.items = buildUnnamed432(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -25060,7 +25613,7 @@ void checkNodeGroupList(api.NodeGroupList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed421(o.items!); + checkUnnamed432(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -25102,45 +25655,45 @@ void checkNodeGroupMaintenanceWindow(api.NodeGroupMaintenanceWindow o) { buildCounterNodeGroupMaintenanceWindow--; } -core.List buildUnnamed423() => [ +core.List buildUnnamed434() => [ buildAcceleratorConfig(), buildAcceleratorConfig(), ]; -void checkUnnamed423(core.List o) { +void checkUnnamed434(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAcceleratorConfig(o[0]); checkAcceleratorConfig(o[1]); } -core.List buildUnnamed424() => [ +core.List buildUnnamed435() => [ buildLocalDisk(), buildLocalDisk(), ]; -void checkUnnamed424(core.List o) { +void checkUnnamed435(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLocalDisk(o[0]); checkLocalDisk(o[1]); } -core.List buildUnnamed425() => [ +core.List buildUnnamed436() => [ buildInstanceConsumptionData(), buildInstanceConsumptionData(), ]; -void checkUnnamed425(core.List o) { +void checkUnnamed436(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceConsumptionData(o[0]); checkInstanceConsumptionData(o[1]); } -core.List buildUnnamed426() => [ +core.List buildUnnamed437() => [ 'foo', 'foo', ]; -void checkUnnamed426(core.List o) { +void checkUnnamed437(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -25157,12 +25710,12 @@ api.NodeGroupNode buildNodeGroupNode() { final o = api.NodeGroupNode(); buildCounterNodeGroupNode++; if (buildCounterNodeGroupNode < 3) { - o.accelerators = buildUnnamed423(); + o.accelerators = buildUnnamed434(); o.consumedResources = buildInstanceConsumptionInfo(); o.cpuOvercommitType = 'foo'; - o.disks = buildUnnamed424(); - o.instanceConsumptionData = buildUnnamed425(); - o.instances = buildUnnamed426(); + o.disks = buildUnnamed435(); + o.instanceConsumptionData = buildUnnamed436(); + o.instances = buildUnnamed437(); o.name = 'foo'; o.nodeType = 'foo'; o.satisfiesPzs = true; @@ -25179,15 +25732,15 @@ api.NodeGroupNode buildNodeGroupNode() { void checkNodeGroupNode(api.NodeGroupNode o) { buildCounterNodeGroupNode++; if (buildCounterNodeGroupNode < 3) { - checkUnnamed423(o.accelerators!); + checkUnnamed434(o.accelerators!); checkInstanceConsumptionInfo(o.consumedResources!); unittest.expect( o.cpuOvercommitType!, unittest.equals('foo'), ); - checkUnnamed424(o.disks!); - checkUnnamed425(o.instanceConsumptionData!); - checkUnnamed426(o.instances!); + checkUnnamed435(o.disks!); + checkUnnamed436(o.instanceConsumptionData!); + checkUnnamed437(o.instances!); unittest.expect( o.name!, unittest.equals('foo'), @@ -25234,12 +25787,12 @@ void checkNodeGroupsAddNodesRequest(api.NodeGroupsAddNodesRequest o) { buildCounterNodeGroupsAddNodesRequest--; } -core.List buildUnnamed427() => [ +core.List buildUnnamed438() => [ 'foo', 'foo', ]; -void checkUnnamed427(core.List o) { +void checkUnnamed438(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -25256,7 +25809,7 @@ api.NodeGroupsDeleteNodesRequest buildNodeGroupsDeleteNodesRequest() { final o = api.NodeGroupsDeleteNodesRequest(); buildCounterNodeGroupsDeleteNodesRequest++; if (buildCounterNodeGroupsDeleteNodesRequest < 3) { - o.nodes = buildUnnamed427(); + o.nodes = buildUnnamed438(); } buildCounterNodeGroupsDeleteNodesRequest--; return o; @@ -25265,17 +25818,17 @@ api.NodeGroupsDeleteNodesRequest buildNodeGroupsDeleteNodesRequest() { void checkNodeGroupsDeleteNodesRequest(api.NodeGroupsDeleteNodesRequest o) { buildCounterNodeGroupsDeleteNodesRequest++; if (buildCounterNodeGroupsDeleteNodesRequest < 3) { - checkUnnamed427(o.nodes!); + checkUnnamed438(o.nodes!); } buildCounterNodeGroupsDeleteNodesRequest--; } -core.List buildUnnamed428() => [ +core.List buildUnnamed439() => [ buildNodeGroupNode(), buildNodeGroupNode(), ]; -void checkUnnamed428(core.List o) { +void checkUnnamed439(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNodeGroupNode(o[0]); checkNodeGroupNode(o[1]); @@ -25308,12 +25861,12 @@ void checkNodeGroupsListNodesWarningData(api.NodeGroupsListNodesWarningData o) { buildCounterNodeGroupsListNodesWarningData--; } -core.List buildUnnamed429() => [ +core.List buildUnnamed440() => [ buildNodeGroupsListNodesWarningData(), buildNodeGroupsListNodesWarningData(), ]; -void checkUnnamed429(core.List o) { +void checkUnnamed440(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNodeGroupsListNodesWarningData(o[0]); checkNodeGroupsListNodesWarningData(o[1]); @@ -25325,7 +25878,7 @@ api.NodeGroupsListNodesWarning buildNodeGroupsListNodesWarning() { buildCounterNodeGroupsListNodesWarning++; if (buildCounterNodeGroupsListNodesWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed429(); + o.data = buildUnnamed440(); o.message = 'foo'; } buildCounterNodeGroupsListNodesWarning--; @@ -25339,7 +25892,7 @@ void checkNodeGroupsListNodesWarning(api.NodeGroupsListNodesWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed429(o.data!); + checkUnnamed440(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -25354,7 +25907,7 @@ api.NodeGroupsListNodes buildNodeGroupsListNodes() { buildCounterNodeGroupsListNodes++; if (buildCounterNodeGroupsListNodes < 3) { o.id = 'foo'; - o.items = buildUnnamed428(); + o.items = buildUnnamed439(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -25371,7 +25924,7 @@ void checkNodeGroupsListNodes(api.NodeGroupsListNodes o) { o.id!, unittest.equals('foo'), ); - checkUnnamed428(o.items!); + checkUnnamed439(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -25389,12 +25942,12 @@ void checkNodeGroupsListNodes(api.NodeGroupsListNodes o) { buildCounterNodeGroupsListNodes--; } -core.List buildUnnamed430() => [ +core.List buildUnnamed441() => [ 'foo', 'foo', ]; -void checkUnnamed430(core.List o) { +void checkUnnamed441(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -25412,7 +25965,7 @@ api.NodeGroupsPerformMaintenanceRequest final o = api.NodeGroupsPerformMaintenanceRequest(); buildCounterNodeGroupsPerformMaintenanceRequest++; if (buildCounterNodeGroupsPerformMaintenanceRequest < 3) { - o.nodes = buildUnnamed430(); + o.nodes = buildUnnamed441(); o.startTime = 'foo'; } buildCounterNodeGroupsPerformMaintenanceRequest--; @@ -25423,7 +25976,7 @@ void checkNodeGroupsPerformMaintenanceRequest( api.NodeGroupsPerformMaintenanceRequest o) { buildCounterNodeGroupsPerformMaintenanceRequest++; if (buildCounterNodeGroupsPerformMaintenanceRequest < 3) { - checkUnnamed430(o.nodes!); + checkUnnamed441(o.nodes!); unittest.expect( o.startTime!, unittest.equals('foo'), @@ -25432,12 +25985,12 @@ void checkNodeGroupsPerformMaintenanceRequest( buildCounterNodeGroupsPerformMaintenanceRequest--; } -core.List buildUnnamed431() => [ +core.List buildUnnamed442() => [ buildNodeGroup(), buildNodeGroup(), ]; -void checkUnnamed431(core.List o) { +void checkUnnamed442(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNodeGroup(o[0]); checkNodeGroup(o[1]); @@ -25471,12 +26024,12 @@ void checkNodeGroupsScopedListWarningData( buildCounterNodeGroupsScopedListWarningData--; } -core.List buildUnnamed432() => [ +core.List buildUnnamed443() => [ buildNodeGroupsScopedListWarningData(), buildNodeGroupsScopedListWarningData(), ]; -void checkUnnamed432(core.List o) { +void checkUnnamed443(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNodeGroupsScopedListWarningData(o[0]); checkNodeGroupsScopedListWarningData(o[1]); @@ -25488,7 +26041,7 @@ api.NodeGroupsScopedListWarning buildNodeGroupsScopedListWarning() { buildCounterNodeGroupsScopedListWarning++; if (buildCounterNodeGroupsScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed432(); + o.data = buildUnnamed443(); o.message = 'foo'; } buildCounterNodeGroupsScopedListWarning--; @@ -25502,7 +26055,7 @@ void checkNodeGroupsScopedListWarning(api.NodeGroupsScopedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed432(o.data!); + checkUnnamed443(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -25516,7 +26069,7 @@ api.NodeGroupsScopedList buildNodeGroupsScopedList() { final o = api.NodeGroupsScopedList(); buildCounterNodeGroupsScopedList++; if (buildCounterNodeGroupsScopedList < 3) { - o.nodeGroups = buildUnnamed431(); + o.nodeGroups = buildUnnamed442(); o.warning = buildNodeGroupsScopedListWarning(); } buildCounterNodeGroupsScopedList--; @@ -25526,7 +26079,7 @@ api.NodeGroupsScopedList buildNodeGroupsScopedList() { void checkNodeGroupsScopedList(api.NodeGroupsScopedList o) { buildCounterNodeGroupsScopedList++; if (buildCounterNodeGroupsScopedList < 3) { - checkUnnamed431(o.nodeGroups!); + checkUnnamed442(o.nodeGroups!); checkNodeGroupsScopedListWarning(o.warning!); } buildCounterNodeGroupsScopedList--; @@ -25555,12 +26108,12 @@ void checkNodeGroupsSetNodeTemplateRequest( buildCounterNodeGroupsSetNodeTemplateRequest--; } -core.List buildUnnamed433() => [ +core.List buildUnnamed444() => [ 'foo', 'foo', ]; -void checkUnnamed433(core.List o) { +void checkUnnamed444(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -25578,7 +26131,7 @@ api.NodeGroupsSimulateMaintenanceEventRequest final o = api.NodeGroupsSimulateMaintenanceEventRequest(); buildCounterNodeGroupsSimulateMaintenanceEventRequest++; if (buildCounterNodeGroupsSimulateMaintenanceEventRequest < 3) { - o.nodes = buildUnnamed433(); + o.nodes = buildUnnamed444(); } buildCounterNodeGroupsSimulateMaintenanceEventRequest--; return o; @@ -25588,39 +26141,39 @@ void checkNodeGroupsSimulateMaintenanceEventRequest( api.NodeGroupsSimulateMaintenanceEventRequest o) { buildCounterNodeGroupsSimulateMaintenanceEventRequest++; if (buildCounterNodeGroupsSimulateMaintenanceEventRequest < 3) { - checkUnnamed433(o.nodes!); + checkUnnamed444(o.nodes!); } buildCounterNodeGroupsSimulateMaintenanceEventRequest--; } -core.List buildUnnamed434() => [ +core.List buildUnnamed445() => [ buildAcceleratorConfig(), buildAcceleratorConfig(), ]; -void checkUnnamed434(core.List o) { +void checkUnnamed445(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAcceleratorConfig(o[0]); checkAcceleratorConfig(o[1]); } -core.List buildUnnamed435() => [ +core.List buildUnnamed446() => [ buildLocalDisk(), buildLocalDisk(), ]; -void checkUnnamed435(core.List o) { +void checkUnnamed446(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLocalDisk(o[0]); checkLocalDisk(o[1]); } -core.Map buildUnnamed436() => { +core.Map buildUnnamed447() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed436(core.Map o) { +void checkUnnamed447(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -25637,15 +26190,15 @@ api.NodeTemplate buildNodeTemplate() { final o = api.NodeTemplate(); buildCounterNodeTemplate++; if (buildCounterNodeTemplate < 3) { - o.accelerators = buildUnnamed434(); + o.accelerators = buildUnnamed445(); o.cpuOvercommitType = 'foo'; o.creationTimestamp = 'foo'; o.description = 'foo'; - o.disks = buildUnnamed435(); + o.disks = buildUnnamed446(); o.id = 'foo'; o.kind = 'foo'; o.name = 'foo'; - o.nodeAffinityLabels = buildUnnamed436(); + o.nodeAffinityLabels = buildUnnamed447(); o.nodeType = 'foo'; o.nodeTypeFlexibility = buildNodeTemplateNodeTypeFlexibility(); o.region = 'foo'; @@ -25661,7 +26214,7 @@ api.NodeTemplate buildNodeTemplate() { void checkNodeTemplate(api.NodeTemplate o) { buildCounterNodeTemplate++; if (buildCounterNodeTemplate < 3) { - checkUnnamed434(o.accelerators!); + checkUnnamed445(o.accelerators!); unittest.expect( o.cpuOvercommitType!, unittest.equals('foo'), @@ -25674,7 +26227,7 @@ void checkNodeTemplate(api.NodeTemplate o) { o.description!, unittest.equals('foo'), ); - checkUnnamed435(o.disks!); + checkUnnamed446(o.disks!); unittest.expect( o.id!, unittest.equals('foo'), @@ -25687,7 +26240,7 @@ void checkNodeTemplate(api.NodeTemplate o) { o.name!, unittest.equals('foo'), ); - checkUnnamed436(o.nodeAffinityLabels!); + checkUnnamed447(o.nodeAffinityLabels!); unittest.expect( o.nodeType!, unittest.equals('foo'), @@ -25714,23 +26267,23 @@ void checkNodeTemplate(api.NodeTemplate o) { buildCounterNodeTemplate--; } -core.Map buildUnnamed437() => { +core.Map buildUnnamed448() => { 'x': buildNodeTemplatesScopedList(), 'y': buildNodeTemplatesScopedList(), }; -void checkUnnamed437(core.Map o) { +void checkUnnamed448(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkNodeTemplatesScopedList(o['x']!); checkNodeTemplatesScopedList(o['y']!); } -core.List buildUnnamed438() => [ +core.List buildUnnamed449() => [ 'foo', 'foo', ]; -void checkUnnamed438(core.List o) { +void checkUnnamed449(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -25771,12 +26324,12 @@ void checkNodeTemplateAggregatedListWarningData( buildCounterNodeTemplateAggregatedListWarningData--; } -core.List buildUnnamed439() => [ +core.List buildUnnamed450() => [ buildNodeTemplateAggregatedListWarningData(), buildNodeTemplateAggregatedListWarningData(), ]; -void checkUnnamed439(core.List o) { +void checkUnnamed450(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNodeTemplateAggregatedListWarningData(o[0]); checkNodeTemplateAggregatedListWarningData(o[1]); @@ -25788,7 +26341,7 @@ api.NodeTemplateAggregatedListWarning buildNodeTemplateAggregatedListWarning() { buildCounterNodeTemplateAggregatedListWarning++; if (buildCounterNodeTemplateAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed439(); + o.data = buildUnnamed450(); o.message = 'foo'; } buildCounterNodeTemplateAggregatedListWarning--; @@ -25803,7 +26356,7 @@ void checkNodeTemplateAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed439(o.data!); + checkUnnamed450(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -25818,11 +26371,11 @@ api.NodeTemplateAggregatedList buildNodeTemplateAggregatedList() { buildCounterNodeTemplateAggregatedList++; if (buildCounterNodeTemplateAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed437(); + o.items = buildUnnamed448(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed438(); + o.unreachables = buildUnnamed449(); o.warning = buildNodeTemplateAggregatedListWarning(); } buildCounterNodeTemplateAggregatedList--; @@ -25836,7 +26389,7 @@ void checkNodeTemplateAggregatedList(api.NodeTemplateAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed437(o.items!); + checkUnnamed448(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -25849,18 +26402,18 @@ void checkNodeTemplateAggregatedList(api.NodeTemplateAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed438(o.unreachables!); + checkUnnamed449(o.unreachables!); checkNodeTemplateAggregatedListWarning(o.warning!); } buildCounterNodeTemplateAggregatedList--; } -core.List buildUnnamed440() => [ +core.List buildUnnamed451() => [ buildNodeTemplate(), buildNodeTemplate(), ]; -void checkUnnamed440(core.List o) { +void checkUnnamed451(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNodeTemplate(o[0]); checkNodeTemplate(o[1]); @@ -25893,12 +26446,12 @@ void checkNodeTemplateListWarningData(api.NodeTemplateListWarningData o) { buildCounterNodeTemplateListWarningData--; } -core.List buildUnnamed441() => [ +core.List buildUnnamed452() => [ buildNodeTemplateListWarningData(), buildNodeTemplateListWarningData(), ]; -void checkUnnamed441(core.List o) { +void checkUnnamed452(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNodeTemplateListWarningData(o[0]); checkNodeTemplateListWarningData(o[1]); @@ -25910,7 +26463,7 @@ api.NodeTemplateListWarning buildNodeTemplateListWarning() { buildCounterNodeTemplateListWarning++; if (buildCounterNodeTemplateListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed441(); + o.data = buildUnnamed452(); o.message = 'foo'; } buildCounterNodeTemplateListWarning--; @@ -25924,7 +26477,7 @@ void checkNodeTemplateListWarning(api.NodeTemplateListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed441(o.data!); + checkUnnamed452(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -25939,7 +26492,7 @@ api.NodeTemplateList buildNodeTemplateList() { buildCounterNodeTemplateList++; if (buildCounterNodeTemplateList < 3) { o.id = 'foo'; - o.items = buildUnnamed440(); + o.items = buildUnnamed451(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -25956,7 +26509,7 @@ void checkNodeTemplateList(api.NodeTemplateList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed440(o.items!); + checkUnnamed451(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -26007,12 +26560,12 @@ void checkNodeTemplateNodeTypeFlexibility( buildCounterNodeTemplateNodeTypeFlexibility--; } -core.List buildUnnamed442() => [ +core.List buildUnnamed453() => [ buildNodeTemplate(), buildNodeTemplate(), ]; -void checkUnnamed442(core.List o) { +void checkUnnamed453(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNodeTemplate(o[0]); checkNodeTemplate(o[1]); @@ -26047,12 +26600,12 @@ void checkNodeTemplatesScopedListWarningData( buildCounterNodeTemplatesScopedListWarningData--; } -core.List buildUnnamed443() => [ +core.List buildUnnamed454() => [ buildNodeTemplatesScopedListWarningData(), buildNodeTemplatesScopedListWarningData(), ]; -void checkUnnamed443(core.List o) { +void checkUnnamed454(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNodeTemplatesScopedListWarningData(o[0]); checkNodeTemplatesScopedListWarningData(o[1]); @@ -26064,7 +26617,7 @@ api.NodeTemplatesScopedListWarning buildNodeTemplatesScopedListWarning() { buildCounterNodeTemplatesScopedListWarning++; if (buildCounterNodeTemplatesScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed443(); + o.data = buildUnnamed454(); o.message = 'foo'; } buildCounterNodeTemplatesScopedListWarning--; @@ -26078,7 +26631,7 @@ void checkNodeTemplatesScopedListWarning(api.NodeTemplatesScopedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed443(o.data!); + checkUnnamed454(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -26092,7 +26645,7 @@ api.NodeTemplatesScopedList buildNodeTemplatesScopedList() { final o = api.NodeTemplatesScopedList(); buildCounterNodeTemplatesScopedList++; if (buildCounterNodeTemplatesScopedList < 3) { - o.nodeTemplates = buildUnnamed442(); + o.nodeTemplates = buildUnnamed453(); o.warning = buildNodeTemplatesScopedListWarning(); } buildCounterNodeTemplatesScopedList--; @@ -26102,7 +26655,7 @@ api.NodeTemplatesScopedList buildNodeTemplatesScopedList() { void checkNodeTemplatesScopedList(api.NodeTemplatesScopedList o) { buildCounterNodeTemplatesScopedList++; if (buildCounterNodeTemplatesScopedList < 3) { - checkUnnamed442(o.nodeTemplates!); + checkUnnamed453(o.nodeTemplates!); checkNodeTemplatesScopedListWarning(o.warning!); } buildCounterNodeTemplatesScopedList--; @@ -26182,23 +26735,23 @@ void checkNodeType(api.NodeType o) { buildCounterNodeType--; } -core.Map buildUnnamed444() => { +core.Map buildUnnamed455() => { 'x': buildNodeTypesScopedList(), 'y': buildNodeTypesScopedList(), }; -void checkUnnamed444(core.Map o) { +void checkUnnamed455(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkNodeTypesScopedList(o['x']!); checkNodeTypesScopedList(o['y']!); } -core.List buildUnnamed445() => [ +core.List buildUnnamed456() => [ 'foo', 'foo', ]; -void checkUnnamed445(core.List o) { +void checkUnnamed456(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -26238,12 +26791,12 @@ void checkNodeTypeAggregatedListWarningData( buildCounterNodeTypeAggregatedListWarningData--; } -core.List buildUnnamed446() => [ +core.List buildUnnamed457() => [ buildNodeTypeAggregatedListWarningData(), buildNodeTypeAggregatedListWarningData(), ]; -void checkUnnamed446(core.List o) { +void checkUnnamed457(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNodeTypeAggregatedListWarningData(o[0]); checkNodeTypeAggregatedListWarningData(o[1]); @@ -26255,7 +26808,7 @@ api.NodeTypeAggregatedListWarning buildNodeTypeAggregatedListWarning() { buildCounterNodeTypeAggregatedListWarning++; if (buildCounterNodeTypeAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed446(); + o.data = buildUnnamed457(); o.message = 'foo'; } buildCounterNodeTypeAggregatedListWarning--; @@ -26269,7 +26822,7 @@ void checkNodeTypeAggregatedListWarning(api.NodeTypeAggregatedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed446(o.data!); + checkUnnamed457(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -26284,11 +26837,11 @@ api.NodeTypeAggregatedList buildNodeTypeAggregatedList() { buildCounterNodeTypeAggregatedList++; if (buildCounterNodeTypeAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed444(); + o.items = buildUnnamed455(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed445(); + o.unreachables = buildUnnamed456(); o.warning = buildNodeTypeAggregatedListWarning(); } buildCounterNodeTypeAggregatedList--; @@ -26302,7 +26855,7 @@ void checkNodeTypeAggregatedList(api.NodeTypeAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed444(o.items!); + checkUnnamed455(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -26315,18 +26868,18 @@ void checkNodeTypeAggregatedList(api.NodeTypeAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed445(o.unreachables!); + checkUnnamed456(o.unreachables!); checkNodeTypeAggregatedListWarning(o.warning!); } buildCounterNodeTypeAggregatedList--; } -core.List buildUnnamed447() => [ +core.List buildUnnamed458() => [ buildNodeType(), buildNodeType(), ]; -void checkUnnamed447(core.List o) { +void checkUnnamed458(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNodeType(o[0]); checkNodeType(o[1]); @@ -26359,12 +26912,12 @@ void checkNodeTypeListWarningData(api.NodeTypeListWarningData o) { buildCounterNodeTypeListWarningData--; } -core.List buildUnnamed448() => [ +core.List buildUnnamed459() => [ buildNodeTypeListWarningData(), buildNodeTypeListWarningData(), ]; -void checkUnnamed448(core.List o) { +void checkUnnamed459(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNodeTypeListWarningData(o[0]); checkNodeTypeListWarningData(o[1]); @@ -26376,7 +26929,7 @@ api.NodeTypeListWarning buildNodeTypeListWarning() { buildCounterNodeTypeListWarning++; if (buildCounterNodeTypeListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed448(); + o.data = buildUnnamed459(); o.message = 'foo'; } buildCounterNodeTypeListWarning--; @@ -26390,7 +26943,7 @@ void checkNodeTypeListWarning(api.NodeTypeListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed448(o.data!); + checkUnnamed459(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -26405,7 +26958,7 @@ api.NodeTypeList buildNodeTypeList() { buildCounterNodeTypeList++; if (buildCounterNodeTypeList < 3) { o.id = 'foo'; - o.items = buildUnnamed447(); + o.items = buildUnnamed458(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -26422,7 +26975,7 @@ void checkNodeTypeList(api.NodeTypeList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed447(o.items!); + checkUnnamed458(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -26440,12 +26993,12 @@ void checkNodeTypeList(api.NodeTypeList o) { buildCounterNodeTypeList--; } -core.List buildUnnamed449() => [ +core.List buildUnnamed460() => [ buildNodeType(), buildNodeType(), ]; -void checkUnnamed449(core.List o) { +void checkUnnamed460(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNodeType(o[0]); checkNodeType(o[1]); @@ -26478,12 +27031,12 @@ void checkNodeTypesScopedListWarningData(api.NodeTypesScopedListWarningData o) { buildCounterNodeTypesScopedListWarningData--; } -core.List buildUnnamed450() => [ +core.List buildUnnamed461() => [ buildNodeTypesScopedListWarningData(), buildNodeTypesScopedListWarningData(), ]; -void checkUnnamed450(core.List o) { +void checkUnnamed461(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNodeTypesScopedListWarningData(o[0]); checkNodeTypesScopedListWarningData(o[1]); @@ -26495,7 +27048,7 @@ api.NodeTypesScopedListWarning buildNodeTypesScopedListWarning() { buildCounterNodeTypesScopedListWarning++; if (buildCounterNodeTypesScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed450(); + o.data = buildUnnamed461(); o.message = 'foo'; } buildCounterNodeTypesScopedListWarning--; @@ -26509,7 +27062,7 @@ void checkNodeTypesScopedListWarning(api.NodeTypesScopedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed450(o.data!); + checkUnnamed461(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -26523,7 +27076,7 @@ api.NodeTypesScopedList buildNodeTypesScopedList() { final o = api.NodeTypesScopedList(); buildCounterNodeTypesScopedList++; if (buildCounterNodeTypesScopedList < 3) { - o.nodeTypes = buildUnnamed449(); + o.nodeTypes = buildUnnamed460(); o.warning = buildNodeTypesScopedListWarning(); } buildCounterNodeTypesScopedList--; @@ -26533,7 +27086,7 @@ api.NodeTypesScopedList buildNodeTypesScopedList() { void checkNodeTypesScopedList(api.NodeTypesScopedList o) { buildCounterNodeTypesScopedList++; if (buildCounterNodeTypesScopedList < 3) { - checkUnnamed449(o.nodeTypes!); + checkUnnamed460(o.nodeTypes!); checkNodeTypesScopedListWarning(o.warning!); } buildCounterNodeTypesScopedList--; @@ -26633,12 +27186,12 @@ void checkNotificationEndpointGrpcSettings( buildCounterNotificationEndpointGrpcSettings--; } -core.List buildUnnamed451() => [ +core.List buildUnnamed462() => [ buildNotificationEndpoint(), buildNotificationEndpoint(), ]; -void checkUnnamed451(core.List o) { +void checkUnnamed462(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNotificationEndpoint(o[0]); checkNotificationEndpoint(o[1]); @@ -26673,12 +27226,12 @@ void checkNotificationEndpointListWarningData( buildCounterNotificationEndpointListWarningData--; } -core.List buildUnnamed452() => [ +core.List buildUnnamed463() => [ buildNotificationEndpointListWarningData(), buildNotificationEndpointListWarningData(), ]; -void checkUnnamed452(core.List o) { +void checkUnnamed463(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNotificationEndpointListWarningData(o[0]); checkNotificationEndpointListWarningData(o[1]); @@ -26690,7 +27243,7 @@ api.NotificationEndpointListWarning buildNotificationEndpointListWarning() { buildCounterNotificationEndpointListWarning++; if (buildCounterNotificationEndpointListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed452(); + o.data = buildUnnamed463(); o.message = 'foo'; } buildCounterNotificationEndpointListWarning--; @@ -26705,7 +27258,7 @@ void checkNotificationEndpointListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed452(o.data!); + checkUnnamed463(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -26720,7 +27273,7 @@ api.NotificationEndpointList buildNotificationEndpointList() { buildCounterNotificationEndpointList++; if (buildCounterNotificationEndpointList < 3) { o.id = 'foo'; - o.items = buildUnnamed451(); + o.items = buildUnnamed462(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -26737,7 +27290,7 @@ void checkNotificationEndpointList(api.NotificationEndpointList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed451(o.items!); + checkUnnamed462(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -26781,12 +27334,12 @@ void checkOperationErrorErrorsErrorDetails( buildCounterOperationErrorErrorsErrorDetails--; } -core.List buildUnnamed453() => [ +core.List buildUnnamed464() => [ buildOperationErrorErrorsErrorDetails(), buildOperationErrorErrorsErrorDetails(), ]; -void checkUnnamed453(core.List o) { +void checkUnnamed464(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperationErrorErrorsErrorDetails(o[0]); checkOperationErrorErrorsErrorDetails(o[1]); @@ -26798,7 +27351,7 @@ api.OperationErrorErrors buildOperationErrorErrors() { buildCounterOperationErrorErrors++; if (buildCounterOperationErrorErrors < 3) { o.code = 'foo'; - o.errorDetails = buildUnnamed453(); + o.errorDetails = buildUnnamed464(); o.location = 'foo'; o.message = 'foo'; } @@ -26813,7 +27366,7 @@ void checkOperationErrorErrors(api.OperationErrorErrors o) { o.code!, unittest.equals('foo'), ); - checkUnnamed453(o.errorDetails!); + checkUnnamed464(o.errorDetails!); unittest.expect( o.location!, unittest.equals('foo'), @@ -26826,12 +27379,12 @@ void checkOperationErrorErrors(api.OperationErrorErrors o) { buildCounterOperationErrorErrors--; } -core.List buildUnnamed454() => [ +core.List buildUnnamed465() => [ buildOperationErrorErrors(), buildOperationErrorErrors(), ]; -void checkUnnamed454(core.List o) { +void checkUnnamed465(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperationErrorErrors(o[0]); checkOperationErrorErrors(o[1]); @@ -26842,7 +27395,7 @@ api.OperationError buildOperationError() { final o = api.OperationError(); buildCounterOperationError++; if (buildCounterOperationError < 3) { - o.errors = buildUnnamed454(); + o.errors = buildUnnamed465(); } buildCounterOperationError--; return o; @@ -26851,7 +27404,7 @@ api.OperationError buildOperationError() { void checkOperationError(api.OperationError o) { buildCounterOperationError++; if (buildCounterOperationError < 3) { - checkUnnamed454(o.errors!); + checkUnnamed465(o.errors!); } buildCounterOperationError--; } @@ -26883,12 +27436,12 @@ void checkOperationWarningsData(api.OperationWarningsData o) { buildCounterOperationWarningsData--; } -core.List buildUnnamed455() => [ +core.List buildUnnamed466() => [ buildOperationWarningsData(), buildOperationWarningsData(), ]; -void checkUnnamed455(core.List o) { +void checkUnnamed466(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperationWarningsData(o[0]); checkOperationWarningsData(o[1]); @@ -26900,7 +27453,7 @@ api.OperationWarnings buildOperationWarnings() { buildCounterOperationWarnings++; if (buildCounterOperationWarnings < 3) { o.code = 'foo'; - o.data = buildUnnamed455(); + o.data = buildUnnamed466(); o.message = 'foo'; } buildCounterOperationWarnings--; @@ -26914,7 +27467,7 @@ void checkOperationWarnings(api.OperationWarnings o) { o.code!, unittest.equals('foo'), ); - checkUnnamed455(o.data!); + checkUnnamed466(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -26923,12 +27476,12 @@ void checkOperationWarnings(api.OperationWarnings o) { buildCounterOperationWarnings--; } -core.List buildUnnamed456() => [ +core.List buildUnnamed467() => [ buildOperationWarnings(), buildOperationWarnings(), ]; -void checkUnnamed456(core.List o) { +void checkUnnamed467(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperationWarnings(o[0]); checkOperationWarnings(o[1]); @@ -26965,7 +27518,7 @@ api.Operation buildOperation() { o.targetId = 'foo'; o.targetLink = 'foo'; o.user = 'foo'; - o.warnings = buildUnnamed456(); + o.warnings = buildUnnamed467(); o.zone = 'foo'; } buildCounterOperation--; @@ -27064,7 +27617,7 @@ void checkOperation(api.Operation o) { o.user!, unittest.equals('foo'), ); - checkUnnamed456(o.warnings!); + checkUnnamed467(o.warnings!); unittest.expect( o.zone!, unittest.equals('foo'), @@ -27073,23 +27626,23 @@ void checkOperation(api.Operation o) { buildCounterOperation--; } -core.Map buildUnnamed457() => { +core.Map buildUnnamed468() => { 'x': buildOperationsScopedList(), 'y': buildOperationsScopedList(), }; -void checkUnnamed457(core.Map o) { +void checkUnnamed468(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkOperationsScopedList(o['x']!); checkOperationsScopedList(o['y']!); } -core.List buildUnnamed458() => [ +core.List buildUnnamed469() => [ 'foo', 'foo', ]; -void checkUnnamed458(core.List o) { +void checkUnnamed469(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -27130,12 +27683,12 @@ void checkOperationAggregatedListWarningData( buildCounterOperationAggregatedListWarningData--; } -core.List buildUnnamed459() => [ +core.List buildUnnamed470() => [ buildOperationAggregatedListWarningData(), buildOperationAggregatedListWarningData(), ]; -void checkUnnamed459(core.List o) { +void checkUnnamed470(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperationAggregatedListWarningData(o[0]); checkOperationAggregatedListWarningData(o[1]); @@ -27147,7 +27700,7 @@ api.OperationAggregatedListWarning buildOperationAggregatedListWarning() { buildCounterOperationAggregatedListWarning++; if (buildCounterOperationAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed459(); + o.data = buildUnnamed470(); o.message = 'foo'; } buildCounterOperationAggregatedListWarning--; @@ -27161,7 +27714,7 @@ void checkOperationAggregatedListWarning(api.OperationAggregatedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed459(o.data!); + checkUnnamed470(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -27176,11 +27729,11 @@ api.OperationAggregatedList buildOperationAggregatedList() { buildCounterOperationAggregatedList++; if (buildCounterOperationAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed457(); + o.items = buildUnnamed468(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed458(); + o.unreachables = buildUnnamed469(); o.warning = buildOperationAggregatedListWarning(); } buildCounterOperationAggregatedList--; @@ -27194,7 +27747,7 @@ void checkOperationAggregatedList(api.OperationAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed457(o.items!); + checkUnnamed468(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -27207,18 +27760,18 @@ void checkOperationAggregatedList(api.OperationAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed458(o.unreachables!); + checkUnnamed469(o.unreachables!); checkOperationAggregatedListWarning(o.warning!); } buildCounterOperationAggregatedList--; } -core.List buildUnnamed460() => [ +core.List buildUnnamed471() => [ buildOperation(), buildOperation(), ]; -void checkUnnamed460(core.List o) { +void checkUnnamed471(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperation(o[0]); checkOperation(o[1]); @@ -27251,12 +27804,12 @@ void checkOperationListWarningData(api.OperationListWarningData o) { buildCounterOperationListWarningData--; } -core.List buildUnnamed461() => [ +core.List buildUnnamed472() => [ buildOperationListWarningData(), buildOperationListWarningData(), ]; -void checkUnnamed461(core.List o) { +void checkUnnamed472(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperationListWarningData(o[0]); checkOperationListWarningData(o[1]); @@ -27268,7 +27821,7 @@ api.OperationListWarning buildOperationListWarning() { buildCounterOperationListWarning++; if (buildCounterOperationListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed461(); + o.data = buildUnnamed472(); o.message = 'foo'; } buildCounterOperationListWarning--; @@ -27282,7 +27835,7 @@ void checkOperationListWarning(api.OperationListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed461(o.data!); + checkUnnamed472(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -27297,7 +27850,7 @@ api.OperationList buildOperationList() { buildCounterOperationList++; if (buildCounterOperationList < 3) { o.id = 'foo'; - o.items = buildUnnamed460(); + o.items = buildUnnamed471(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -27314,7 +27867,7 @@ void checkOperationList(api.OperationList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed460(o.items!); + checkUnnamed471(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -27332,12 +27885,12 @@ void checkOperationList(api.OperationList o) { buildCounterOperationList--; } -core.List buildUnnamed462() => [ +core.List buildUnnamed473() => [ buildOperation(), buildOperation(), ]; -void checkUnnamed462(core.List o) { +void checkUnnamed473(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperation(o[0]); checkOperation(o[1]); @@ -27371,12 +27924,12 @@ void checkOperationsScopedListWarningData( buildCounterOperationsScopedListWarningData--; } -core.List buildUnnamed463() => [ +core.List buildUnnamed474() => [ buildOperationsScopedListWarningData(), buildOperationsScopedListWarningData(), ]; -void checkUnnamed463(core.List o) { +void checkUnnamed474(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperationsScopedListWarningData(o[0]); checkOperationsScopedListWarningData(o[1]); @@ -27388,7 +27941,7 @@ api.OperationsScopedListWarning buildOperationsScopedListWarning() { buildCounterOperationsScopedListWarning++; if (buildCounterOperationsScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed463(); + o.data = buildUnnamed474(); o.message = 'foo'; } buildCounterOperationsScopedListWarning--; @@ -27402,7 +27955,7 @@ void checkOperationsScopedListWarning(api.OperationsScopedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed463(o.data!); + checkUnnamed474(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -27416,7 +27969,7 @@ api.OperationsScopedList buildOperationsScopedList() { final o = api.OperationsScopedList(); buildCounterOperationsScopedList++; if (buildCounterOperationsScopedList < 3) { - o.operations = buildUnnamed462(); + o.operations = buildUnnamed473(); o.warning = buildOperationsScopedListWarning(); } buildCounterOperationsScopedList--; @@ -27426,7 +27979,7 @@ api.OperationsScopedList buildOperationsScopedList() { void checkOperationsScopedList(api.OperationsScopedList o) { buildCounterOperationsScopedList++; if (buildCounterOperationsScopedList < 3) { - checkUnnamed462(o.operations!); + checkUnnamed473(o.operations!); checkOperationsScopedListWarning(o.warning!); } buildCounterOperationsScopedList--; @@ -27615,23 +28168,23 @@ void checkPacketMirroring(api.PacketMirroring o) { buildCounterPacketMirroring--; } -core.Map buildUnnamed464() => { +core.Map buildUnnamed475() => { 'x': buildPacketMirroringsScopedList(), 'y': buildPacketMirroringsScopedList(), }; -void checkUnnamed464(core.Map o) { +void checkUnnamed475(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkPacketMirroringsScopedList(o['x']!); checkPacketMirroringsScopedList(o['y']!); } -core.List buildUnnamed465() => [ +core.List buildUnnamed476() => [ 'foo', 'foo', ]; -void checkUnnamed465(core.List o) { +void checkUnnamed476(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -27672,12 +28225,12 @@ void checkPacketMirroringAggregatedListWarningData( buildCounterPacketMirroringAggregatedListWarningData--; } -core.List buildUnnamed466() => [ +core.List buildUnnamed477() => [ buildPacketMirroringAggregatedListWarningData(), buildPacketMirroringAggregatedListWarningData(), ]; -void checkUnnamed466( +void checkUnnamed477( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPacketMirroringAggregatedListWarningData(o[0]); @@ -27691,7 +28244,7 @@ api.PacketMirroringAggregatedListWarning buildCounterPacketMirroringAggregatedListWarning++; if (buildCounterPacketMirroringAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed466(); + o.data = buildUnnamed477(); o.message = 'foo'; } buildCounterPacketMirroringAggregatedListWarning--; @@ -27706,7 +28259,7 @@ void checkPacketMirroringAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed466(o.data!); + checkUnnamed477(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -27721,11 +28274,11 @@ api.PacketMirroringAggregatedList buildPacketMirroringAggregatedList() { buildCounterPacketMirroringAggregatedList++; if (buildCounterPacketMirroringAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed464(); + o.items = buildUnnamed475(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed465(); + o.unreachables = buildUnnamed476(); o.warning = buildPacketMirroringAggregatedListWarning(); } buildCounterPacketMirroringAggregatedList--; @@ -27739,7 +28292,7 @@ void checkPacketMirroringAggregatedList(api.PacketMirroringAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed464(o.items!); + checkUnnamed475(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -27752,18 +28305,18 @@ void checkPacketMirroringAggregatedList(api.PacketMirroringAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed465(o.unreachables!); + checkUnnamed476(o.unreachables!); checkPacketMirroringAggregatedListWarning(o.warning!); } buildCounterPacketMirroringAggregatedList--; } -core.List buildUnnamed467() => [ +core.List buildUnnamed478() => [ 'foo', 'foo', ]; -void checkUnnamed467(core.List o) { +void checkUnnamed478(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -27775,12 +28328,12 @@ void checkUnnamed467(core.List o) { ); } -core.List buildUnnamed468() => [ +core.List buildUnnamed479() => [ 'foo', 'foo', ]; -void checkUnnamed468(core.List o) { +void checkUnnamed479(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -27797,8 +28350,8 @@ api.PacketMirroringFilter buildPacketMirroringFilter() { final o = api.PacketMirroringFilter(); buildCounterPacketMirroringFilter++; if (buildCounterPacketMirroringFilter < 3) { - o.IPProtocols = buildUnnamed467(); - o.cidrRanges = buildUnnamed468(); + o.IPProtocols = buildUnnamed478(); + o.cidrRanges = buildUnnamed479(); o.direction = 'foo'; } buildCounterPacketMirroringFilter--; @@ -27808,8 +28361,8 @@ api.PacketMirroringFilter buildPacketMirroringFilter() { void checkPacketMirroringFilter(api.PacketMirroringFilter o) { buildCounterPacketMirroringFilter++; if (buildCounterPacketMirroringFilter < 3) { - checkUnnamed467(o.IPProtocols!); - checkUnnamed468(o.cidrRanges!); + checkUnnamed478(o.IPProtocols!); + checkUnnamed479(o.cidrRanges!); unittest.expect( o.direction!, unittest.equals('foo'), @@ -27846,12 +28399,12 @@ void checkPacketMirroringForwardingRuleInfo( buildCounterPacketMirroringForwardingRuleInfo--; } -core.List buildUnnamed469() => [ +core.List buildUnnamed480() => [ buildPacketMirroring(), buildPacketMirroring(), ]; -void checkUnnamed469(core.List o) { +void checkUnnamed480(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPacketMirroring(o[0]); checkPacketMirroring(o[1]); @@ -27884,12 +28437,12 @@ void checkPacketMirroringListWarningData(api.PacketMirroringListWarningData o) { buildCounterPacketMirroringListWarningData--; } -core.List buildUnnamed470() => [ +core.List buildUnnamed481() => [ buildPacketMirroringListWarningData(), buildPacketMirroringListWarningData(), ]; -void checkUnnamed470(core.List o) { +void checkUnnamed481(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPacketMirroringListWarningData(o[0]); checkPacketMirroringListWarningData(o[1]); @@ -27901,7 +28454,7 @@ api.PacketMirroringListWarning buildPacketMirroringListWarning() { buildCounterPacketMirroringListWarning++; if (buildCounterPacketMirroringListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed470(); + o.data = buildUnnamed481(); o.message = 'foo'; } buildCounterPacketMirroringListWarning--; @@ -27915,7 +28468,7 @@ void checkPacketMirroringListWarning(api.PacketMirroringListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed470(o.data!); + checkUnnamed481(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -27930,7 +28483,7 @@ api.PacketMirroringList buildPacketMirroringList() { buildCounterPacketMirroringList++; if (buildCounterPacketMirroringList < 3) { o.id = 'foo'; - o.items = buildUnnamed469(); + o.items = buildUnnamed480(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -27947,7 +28500,7 @@ void checkPacketMirroringList(api.PacketMirroringList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed469(o.items!); + checkUnnamed480(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -27966,12 +28519,12 @@ void checkPacketMirroringList(api.PacketMirroringList o) { } core.List - buildUnnamed471() => [ + buildUnnamed482() => [ buildPacketMirroringMirroredResourceInfoInstanceInfo(), buildPacketMirroringMirroredResourceInfoInstanceInfo(), ]; -void checkUnnamed471( +void checkUnnamed482( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPacketMirroringMirroredResourceInfoInstanceInfo(o[0]); @@ -27979,24 +28532,24 @@ void checkUnnamed471( } core.List - buildUnnamed472() => [ + buildUnnamed483() => [ buildPacketMirroringMirroredResourceInfoSubnetInfo(), buildPacketMirroringMirroredResourceInfoSubnetInfo(), ]; -void checkUnnamed472( +void checkUnnamed483( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPacketMirroringMirroredResourceInfoSubnetInfo(o[0]); checkPacketMirroringMirroredResourceInfoSubnetInfo(o[1]); } -core.List buildUnnamed473() => [ +core.List buildUnnamed484() => [ 'foo', 'foo', ]; -void checkUnnamed473(core.List o) { +void checkUnnamed484(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -28014,9 +28567,9 @@ api.PacketMirroringMirroredResourceInfo final o = api.PacketMirroringMirroredResourceInfo(); buildCounterPacketMirroringMirroredResourceInfo++; if (buildCounterPacketMirroringMirroredResourceInfo < 3) { - o.instances = buildUnnamed471(); - o.subnetworks = buildUnnamed472(); - o.tags = buildUnnamed473(); + o.instances = buildUnnamed482(); + o.subnetworks = buildUnnamed483(); + o.tags = buildUnnamed484(); } buildCounterPacketMirroringMirroredResourceInfo--; return o; @@ -28026,9 +28579,9 @@ void checkPacketMirroringMirroredResourceInfo( api.PacketMirroringMirroredResourceInfo o) { buildCounterPacketMirroringMirroredResourceInfo++; if (buildCounterPacketMirroringMirroredResourceInfo < 3) { - checkUnnamed471(o.instances!); - checkUnnamed472(o.subnetworks!); - checkUnnamed473(o.tags!); + checkUnnamed482(o.instances!); + checkUnnamed483(o.subnetworks!); + checkUnnamed484(o.tags!); } buildCounterPacketMirroringMirroredResourceInfo--; } @@ -28118,12 +28671,12 @@ void checkPacketMirroringNetworkInfo(api.PacketMirroringNetworkInfo o) { buildCounterPacketMirroringNetworkInfo--; } -core.List buildUnnamed474() => [ +core.List buildUnnamed485() => [ buildPacketMirroring(), buildPacketMirroring(), ]; -void checkUnnamed474(core.List o) { +void checkUnnamed485(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPacketMirroring(o[0]); checkPacketMirroring(o[1]); @@ -28158,12 +28711,12 @@ void checkPacketMirroringsScopedListWarningData( buildCounterPacketMirroringsScopedListWarningData--; } -core.List buildUnnamed475() => [ +core.List buildUnnamed486() => [ buildPacketMirroringsScopedListWarningData(), buildPacketMirroringsScopedListWarningData(), ]; -void checkUnnamed475(core.List o) { +void checkUnnamed486(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPacketMirroringsScopedListWarningData(o[0]); checkPacketMirroringsScopedListWarningData(o[1]); @@ -28175,7 +28728,7 @@ api.PacketMirroringsScopedListWarning buildPacketMirroringsScopedListWarning() { buildCounterPacketMirroringsScopedListWarning++; if (buildCounterPacketMirroringsScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed475(); + o.data = buildUnnamed486(); o.message = 'foo'; } buildCounterPacketMirroringsScopedListWarning--; @@ -28190,7 +28743,7 @@ void checkPacketMirroringsScopedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed475(o.data!); + checkUnnamed486(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -28204,7 +28757,7 @@ api.PacketMirroringsScopedList buildPacketMirroringsScopedList() { final o = api.PacketMirroringsScopedList(); buildCounterPacketMirroringsScopedList++; if (buildCounterPacketMirroringsScopedList < 3) { - o.packetMirrorings = buildUnnamed474(); + o.packetMirrorings = buildUnnamed485(); o.warning = buildPacketMirroringsScopedListWarning(); } buildCounterPacketMirroringsScopedList--; @@ -28214,29 +28767,29 @@ api.PacketMirroringsScopedList buildPacketMirroringsScopedList() { void checkPacketMirroringsScopedList(api.PacketMirroringsScopedList o) { buildCounterPacketMirroringsScopedList++; if (buildCounterPacketMirroringsScopedList < 3) { - checkUnnamed474(o.packetMirrorings!); + checkUnnamed485(o.packetMirrorings!); checkPacketMirroringsScopedListWarning(o.warning!); } buildCounterPacketMirroringsScopedList--; } -core.List buildUnnamed476() => [ +core.List buildUnnamed487() => [ buildPathRule(), buildPathRule(), ]; -void checkUnnamed476(core.List o) { +void checkUnnamed487(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPathRule(o[0]); checkPathRule(o[1]); } -core.List buildUnnamed477() => [ +core.List buildUnnamed488() => [ buildHttpRouteRule(), buildHttpRouteRule(), ]; -void checkUnnamed477(core.List o) { +void checkUnnamed488(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHttpRouteRule(o[0]); checkHttpRouteRule(o[1]); @@ -28254,8 +28807,8 @@ api.PathMatcher buildPathMatcher() { o.description = 'foo'; o.headerAction = buildHttpHeaderAction(); o.name = 'foo'; - o.pathRules = buildUnnamed476(); - o.routeRules = buildUnnamed477(); + o.pathRules = buildUnnamed487(); + o.routeRules = buildUnnamed488(); } buildCounterPathMatcher--; return o; @@ -28280,18 +28833,18 @@ void checkPathMatcher(api.PathMatcher o) { o.name!, unittest.equals('foo'), ); - checkUnnamed476(o.pathRules!); - checkUnnamed477(o.routeRules!); + checkUnnamed487(o.pathRules!); + checkUnnamed488(o.routeRules!); } buildCounterPathMatcher--; } -core.List buildUnnamed478() => [ +core.List buildUnnamed489() => [ 'foo', 'foo', ]; -void checkUnnamed478(core.List o) { +void checkUnnamed489(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -28309,7 +28862,7 @@ api.PathRule buildPathRule() { buildCounterPathRule++; if (buildCounterPathRule < 3) { o.customErrorResponsePolicy = buildCustomErrorResponsePolicy(); - o.paths = buildUnnamed478(); + o.paths = buildUnnamed489(); o.routeAction = buildHttpRouteAction(); o.service = 'foo'; o.urlRedirect = buildHttpRedirectAction(); @@ -28322,7 +28875,7 @@ void checkPathRule(api.PathRule o) { buildCounterPathRule++; if (buildCounterPathRule < 3) { checkCustomErrorResponsePolicy(o.customErrorResponsePolicy!); - checkUnnamed478(o.paths!); + checkUnnamed489(o.paths!); checkHttpRouteAction(o.routeAction!); unittest.expect( o.service!, @@ -28367,48 +28920,36 @@ void checkPerInstanceConfig(api.PerInstanceConfig o) { buildCounterPerInstanceConfig--; } -core.List buildUnnamed479() => [ +core.List buildUnnamed490() => [ buildAuditConfig(), buildAuditConfig(), ]; -void checkUnnamed479(core.List o) { +void checkUnnamed490(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAuditConfig(o[0]); checkAuditConfig(o[1]); } -core.List buildUnnamed480() => [ +core.List buildUnnamed491() => [ buildBinding(), buildBinding(), ]; -void checkUnnamed480(core.List o) { +void checkUnnamed491(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkBinding(o[0]); checkBinding(o[1]); } -core.List buildUnnamed481() => [ - buildRule(), - buildRule(), - ]; - -void checkUnnamed481(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkRule(o[0]); - checkRule(o[1]); -} - core.int buildCounterPolicy = 0; api.Policy buildPolicy() { final o = api.Policy(); buildCounterPolicy++; if (buildCounterPolicy < 3) { - o.auditConfigs = buildUnnamed479(); - o.bindings = buildUnnamed480(); + o.auditConfigs = buildUnnamed490(); + o.bindings = buildUnnamed491(); o.etag = 'foo'; - o.rules = buildUnnamed481(); o.version = 42; } buildCounterPolicy--; @@ -28418,13 +28959,12 @@ api.Policy buildPolicy() { void checkPolicy(api.Policy o) { buildCounterPolicy++; if (buildCounterPolicy < 3) { - checkUnnamed479(o.auditConfigs!); - checkUnnamed480(o.bindings!); + checkUnnamed490(o.auditConfigs!); + checkUnnamed491(o.bindings!); unittest.expect( o.etag!, unittest.equals('foo'), ); - checkUnnamed481(o.rules!); unittest.expect( o.version!, unittest.equals(42), @@ -28433,12 +28973,12 @@ void checkPolicy(api.Policy o) { buildCounterPolicy--; } -core.List buildUnnamed482() => [ +core.List buildUnnamed492() => [ buildWafExpressionSet(), buildWafExpressionSet(), ]; -void checkUnnamed482(core.List o) { +void checkUnnamed492(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkWafExpressionSet(o[0]); checkWafExpressionSet(o[1]); @@ -28449,7 +28989,7 @@ api.PreconfiguredWafSet buildPreconfiguredWafSet() { final o = api.PreconfiguredWafSet(); buildCounterPreconfiguredWafSet++; if (buildCounterPreconfiguredWafSet < 3) { - o.expressionSets = buildUnnamed482(); + o.expressionSets = buildUnnamed492(); } buildCounterPreconfiguredWafSet--; return o; @@ -28458,54 +28998,54 @@ api.PreconfiguredWafSet buildPreconfiguredWafSet() { void checkPreconfiguredWafSet(api.PreconfiguredWafSet o) { buildCounterPreconfiguredWafSet++; if (buildCounterPreconfiguredWafSet < 3) { - checkUnnamed482(o.expressionSets!); + checkUnnamed492(o.expressionSets!); } buildCounterPreconfiguredWafSet--; } -core.Map buildUnnamed483() => { +core.Map buildUnnamed493() => { 'x': buildPreservedStatePreservedDisk(), 'y': buildPreservedStatePreservedDisk(), }; -void checkUnnamed483(core.Map o) { +void checkUnnamed493(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkPreservedStatePreservedDisk(o['x']!); checkPreservedStatePreservedDisk(o['y']!); } -core.Map buildUnnamed484() => +core.Map buildUnnamed494() => { 'x': buildPreservedStatePreservedNetworkIp(), 'y': buildPreservedStatePreservedNetworkIp(), }; -void checkUnnamed484( +void checkUnnamed494( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkPreservedStatePreservedNetworkIp(o['x']!); checkPreservedStatePreservedNetworkIp(o['y']!); } -core.Map buildUnnamed485() => +core.Map buildUnnamed495() => { 'x': buildPreservedStatePreservedNetworkIp(), 'y': buildPreservedStatePreservedNetworkIp(), }; -void checkUnnamed485( +void checkUnnamed495( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkPreservedStatePreservedNetworkIp(o['x']!); checkPreservedStatePreservedNetworkIp(o['y']!); } -core.Map buildUnnamed486() => { +core.Map buildUnnamed496() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed486(core.Map o) { +void checkUnnamed496(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -28522,10 +29062,10 @@ api.PreservedState buildPreservedState() { final o = api.PreservedState(); buildCounterPreservedState++; if (buildCounterPreservedState < 3) { - o.disks = buildUnnamed483(); - o.externalIPs = buildUnnamed484(); - o.internalIPs = buildUnnamed485(); - o.metadata = buildUnnamed486(); + o.disks = buildUnnamed493(); + o.externalIPs = buildUnnamed494(); + o.internalIPs = buildUnnamed495(); + o.metadata = buildUnnamed496(); } buildCounterPreservedState--; return o; @@ -28534,10 +29074,10 @@ api.PreservedState buildPreservedState() { void checkPreservedState(api.PreservedState o) { buildCounterPreservedState++; if (buildCounterPreservedState < 3) { - checkUnnamed483(o.disks!); - checkUnnamed484(o.externalIPs!); - checkUnnamed485(o.internalIPs!); - checkUnnamed486(o.metadata!); + checkUnnamed493(o.disks!); + checkUnnamed494(o.externalIPs!); + checkUnnamed495(o.internalIPs!); + checkUnnamed496(o.metadata!); } buildCounterPreservedState--; } @@ -28628,12 +29168,12 @@ void checkPreservedStatePreservedNetworkIpIpAddress( buildCounterPreservedStatePreservedNetworkIpIpAddress--; } -core.List buildUnnamed487() => [ +core.List buildUnnamed497() => [ 'foo', 'foo', ]; -void checkUnnamed487(core.List o) { +void checkUnnamed497(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -28645,12 +29185,12 @@ void checkUnnamed487(core.List o) { ); } -core.List buildUnnamed488() => [ +core.List buildUnnamed498() => [ buildQuota(), buildQuota(), ]; -void checkUnnamed488(core.List o) { +void checkUnnamed498(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkQuota(o[0]); checkQuota(o[1]); @@ -28667,11 +29207,11 @@ api.Project buildProject() { o.defaultNetworkTier = 'foo'; o.defaultServiceAccount = 'foo'; o.description = 'foo'; - o.enabledFeatures = buildUnnamed487(); + o.enabledFeatures = buildUnnamed497(); o.id = 'foo'; o.kind = 'foo'; o.name = 'foo'; - o.quotas = buildUnnamed488(); + o.quotas = buildUnnamed498(); o.selfLink = 'foo'; o.usageExportLocation = buildUsageExportLocation(); o.vmDnsSetting = 'foo'; @@ -28705,7 +29245,7 @@ void checkProject(api.Project o) { o.description!, unittest.equals('foo'), ); - checkUnnamed487(o.enabledFeatures!); + checkUnnamed497(o.enabledFeatures!); unittest.expect( o.id!, unittest.equals('foo'), @@ -28718,7 +29258,7 @@ void checkProject(api.Project o) { o.name!, unittest.equals('foo'), ); - checkUnnamed488(o.quotas!); + checkUnnamed498(o.quotas!); unittest.expect( o.selfLink!, unittest.equals('foo'), @@ -28776,12 +29316,12 @@ void checkProjectsEnableXpnResourceRequest( buildCounterProjectsEnableXpnResourceRequest--; } -core.List buildUnnamed489() => [ +core.List buildUnnamed499() => [ buildXpnResourceId(), buildXpnResourceId(), ]; -void checkUnnamed489(core.List o) { +void checkUnnamed499(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkXpnResourceId(o[0]); checkXpnResourceId(o[1]); @@ -28794,7 +29334,7 @@ api.ProjectsGetXpnResources buildProjectsGetXpnResources() { if (buildCounterProjectsGetXpnResources < 3) { o.kind = 'foo'; o.nextPageToken = 'foo'; - o.resources = buildUnnamed489(); + o.resources = buildUnnamed499(); } buildCounterProjectsGetXpnResources--; return o; @@ -28811,7 +29351,7 @@ void checkProjectsGetXpnResources(api.ProjectsGetXpnResources o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed489(o.resources!); + checkUnnamed499(o.resources!); } buildCounterProjectsGetXpnResources--; } @@ -28885,13 +29425,13 @@ void checkProjectsSetDefaultNetworkTierRequest( buildCounterProjectsSetDefaultNetworkTierRequest--; } -core.List buildUnnamed490() => +core.List buildUnnamed500() => [ buildPublicAdvertisedPrefixPublicDelegatedPrefix(), buildPublicAdvertisedPrefixPublicDelegatedPrefix(), ]; -void checkUnnamed490( +void checkUnnamed500( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPublicAdvertisedPrefixPublicDelegatedPrefix(o[0]); @@ -28913,7 +29453,7 @@ api.PublicAdvertisedPrefix buildPublicAdvertisedPrefix() { o.kind = 'foo'; o.name = 'foo'; o.pdpScope = 'foo'; - o.publicDelegatedPrefixs = buildUnnamed490(); + o.publicDelegatedPrefixs = buildUnnamed500(); o.selfLink = 'foo'; o.sharedSecret = 'foo'; o.status = 'foo'; @@ -28965,7 +29505,7 @@ void checkPublicAdvertisedPrefix(api.PublicAdvertisedPrefix o) { o.pdpScope!, unittest.equals('foo'), ); - checkUnnamed490(o.publicDelegatedPrefixs!); + checkUnnamed500(o.publicDelegatedPrefixs!); unittest.expect( o.selfLink!, unittest.equals('foo'), @@ -28982,12 +29522,12 @@ void checkPublicAdvertisedPrefix(api.PublicAdvertisedPrefix o) { buildCounterPublicAdvertisedPrefix--; } -core.List buildUnnamed491() => [ +core.List buildUnnamed501() => [ buildPublicAdvertisedPrefix(), buildPublicAdvertisedPrefix(), ]; -void checkUnnamed491(core.List o) { +void checkUnnamed501(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPublicAdvertisedPrefix(o[0]); checkPublicAdvertisedPrefix(o[1]); @@ -29022,12 +29562,12 @@ void checkPublicAdvertisedPrefixListWarningData( buildCounterPublicAdvertisedPrefixListWarningData--; } -core.List buildUnnamed492() => [ +core.List buildUnnamed502() => [ buildPublicAdvertisedPrefixListWarningData(), buildPublicAdvertisedPrefixListWarningData(), ]; -void checkUnnamed492(core.List o) { +void checkUnnamed502(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPublicAdvertisedPrefixListWarningData(o[0]); checkPublicAdvertisedPrefixListWarningData(o[1]); @@ -29039,7 +29579,7 @@ api.PublicAdvertisedPrefixListWarning buildPublicAdvertisedPrefixListWarning() { buildCounterPublicAdvertisedPrefixListWarning++; if (buildCounterPublicAdvertisedPrefixListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed492(); + o.data = buildUnnamed502(); o.message = 'foo'; } buildCounterPublicAdvertisedPrefixListWarning--; @@ -29054,7 +29594,7 @@ void checkPublicAdvertisedPrefixListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed492(o.data!); + checkUnnamed502(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -29069,7 +29609,7 @@ api.PublicAdvertisedPrefixList buildPublicAdvertisedPrefixList() { buildCounterPublicAdvertisedPrefixList++; if (buildCounterPublicAdvertisedPrefixList < 3) { o.id = 'foo'; - o.items = buildUnnamed491(); + o.items = buildUnnamed501(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -29086,7 +29626,7 @@ void checkPublicAdvertisedPrefixList(api.PublicAdvertisedPrefixList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed491(o.items!); + checkUnnamed501(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -29149,12 +29689,12 @@ void checkPublicAdvertisedPrefixPublicDelegatedPrefix( } core.List - buildUnnamed493() => [ + buildUnnamed503() => [ buildPublicDelegatedPrefixPublicDelegatedSubPrefix(), buildPublicDelegatedPrefixPublicDelegatedSubPrefix(), ]; -void checkUnnamed493( +void checkUnnamed503( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPublicDelegatedPrefixPublicDelegatedSubPrefix(o[0]); @@ -29178,7 +29718,7 @@ api.PublicDelegatedPrefix buildPublicDelegatedPrefix() { o.mode = 'foo'; o.name = 'foo'; o.parentPrefix = 'foo'; - o.publicDelegatedSubPrefixs = buildUnnamed493(); + o.publicDelegatedSubPrefixs = buildUnnamed503(); o.region = 'foo'; o.selfLink = 'foo'; o.status = 'foo'; @@ -29235,7 +29775,7 @@ void checkPublicDelegatedPrefix(api.PublicDelegatedPrefix o) { o.parentPrefix!, unittest.equals('foo'), ); - checkUnnamed493(o.publicDelegatedSubPrefixs!); + checkUnnamed503(o.publicDelegatedSubPrefixs!); unittest.expect( o.region!, unittest.equals('foo'), @@ -29253,24 +29793,24 @@ void checkPublicDelegatedPrefix(api.PublicDelegatedPrefix o) { } core.Map - buildUnnamed494() => { + buildUnnamed504() => { 'x': buildPublicDelegatedPrefixesScopedList(), 'y': buildPublicDelegatedPrefixesScopedList(), }; -void checkUnnamed494( +void checkUnnamed504( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkPublicDelegatedPrefixesScopedList(o['x']!); checkPublicDelegatedPrefixesScopedList(o['y']!); } -core.List buildUnnamed495() => [ +core.List buildUnnamed505() => [ 'foo', 'foo', ]; -void checkUnnamed495(core.List o) { +void checkUnnamed505(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -29312,12 +29852,12 @@ void checkPublicDelegatedPrefixAggregatedListWarningData( } core.List - buildUnnamed496() => [ + buildUnnamed506() => [ buildPublicDelegatedPrefixAggregatedListWarningData(), buildPublicDelegatedPrefixAggregatedListWarningData(), ]; -void checkUnnamed496( +void checkUnnamed506( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPublicDelegatedPrefixAggregatedListWarningData(o[0]); @@ -29331,7 +29871,7 @@ api.PublicDelegatedPrefixAggregatedListWarning buildCounterPublicDelegatedPrefixAggregatedListWarning++; if (buildCounterPublicDelegatedPrefixAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed496(); + o.data = buildUnnamed506(); o.message = 'foo'; } buildCounterPublicDelegatedPrefixAggregatedListWarning--; @@ -29346,7 +29886,7 @@ void checkPublicDelegatedPrefixAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed496(o.data!); + checkUnnamed506(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -29362,11 +29902,11 @@ api.PublicDelegatedPrefixAggregatedList buildCounterPublicDelegatedPrefixAggregatedList++; if (buildCounterPublicDelegatedPrefixAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed494(); + o.items = buildUnnamed504(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed495(); + o.unreachables = buildUnnamed505(); o.warning = buildPublicDelegatedPrefixAggregatedListWarning(); } buildCounterPublicDelegatedPrefixAggregatedList--; @@ -29381,7 +29921,7 @@ void checkPublicDelegatedPrefixAggregatedList( o.id!, unittest.equals('foo'), ); - checkUnnamed494(o.items!); + checkUnnamed504(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -29394,18 +29934,18 @@ void checkPublicDelegatedPrefixAggregatedList( o.selfLink!, unittest.equals('foo'), ); - checkUnnamed495(o.unreachables!); + checkUnnamed505(o.unreachables!); checkPublicDelegatedPrefixAggregatedListWarning(o.warning!); } buildCounterPublicDelegatedPrefixAggregatedList--; } -core.List buildUnnamed497() => [ +core.List buildUnnamed507() => [ buildPublicDelegatedPrefix(), buildPublicDelegatedPrefix(), ]; -void checkUnnamed497(core.List o) { +void checkUnnamed507(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPublicDelegatedPrefix(o[0]); checkPublicDelegatedPrefix(o[1]); @@ -29440,12 +29980,12 @@ void checkPublicDelegatedPrefixListWarningData( buildCounterPublicDelegatedPrefixListWarningData--; } -core.List buildUnnamed498() => [ +core.List buildUnnamed508() => [ buildPublicDelegatedPrefixListWarningData(), buildPublicDelegatedPrefixListWarningData(), ]; -void checkUnnamed498(core.List o) { +void checkUnnamed508(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPublicDelegatedPrefixListWarningData(o[0]); checkPublicDelegatedPrefixListWarningData(o[1]); @@ -29457,7 +29997,7 @@ api.PublicDelegatedPrefixListWarning buildPublicDelegatedPrefixListWarning() { buildCounterPublicDelegatedPrefixListWarning++; if (buildCounterPublicDelegatedPrefixListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed498(); + o.data = buildUnnamed508(); o.message = 'foo'; } buildCounterPublicDelegatedPrefixListWarning--; @@ -29472,7 +30012,7 @@ void checkPublicDelegatedPrefixListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed498(o.data!); + checkUnnamed508(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -29487,7 +30027,7 @@ api.PublicDelegatedPrefixList buildPublicDelegatedPrefixList() { buildCounterPublicDelegatedPrefixList++; if (buildCounterPublicDelegatedPrefixList < 3) { o.id = 'foo'; - o.items = buildUnnamed497(); + o.items = buildUnnamed507(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -29504,7 +30044,7 @@ void checkPublicDelegatedPrefixList(api.PublicDelegatedPrefixList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed497(o.items!); + checkUnnamed507(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -29583,12 +30123,12 @@ void checkPublicDelegatedPrefixPublicDelegatedSubPrefix( buildCounterPublicDelegatedPrefixPublicDelegatedSubPrefix--; } -core.List buildUnnamed499() => [ +core.List buildUnnamed509() => [ buildPublicDelegatedPrefix(), buildPublicDelegatedPrefix(), ]; -void checkUnnamed499(core.List o) { +void checkUnnamed509(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPublicDelegatedPrefix(o[0]); checkPublicDelegatedPrefix(o[1]); @@ -29623,13 +30163,13 @@ void checkPublicDelegatedPrefixesScopedListWarningData( buildCounterPublicDelegatedPrefixesScopedListWarningData--; } -core.List buildUnnamed500() => +core.List buildUnnamed510() => [ buildPublicDelegatedPrefixesScopedListWarningData(), buildPublicDelegatedPrefixesScopedListWarningData(), ]; -void checkUnnamed500( +void checkUnnamed510( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPublicDelegatedPrefixesScopedListWarningData(o[0]); @@ -29643,7 +30183,7 @@ api.PublicDelegatedPrefixesScopedListWarning buildCounterPublicDelegatedPrefixesScopedListWarning++; if (buildCounterPublicDelegatedPrefixesScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed500(); + o.data = buildUnnamed510(); o.message = 'foo'; } buildCounterPublicDelegatedPrefixesScopedListWarning--; @@ -29658,7 +30198,7 @@ void checkPublicDelegatedPrefixesScopedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed500(o.data!); + checkUnnamed510(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -29672,7 +30212,7 @@ api.PublicDelegatedPrefixesScopedList buildPublicDelegatedPrefixesScopedList() { final o = api.PublicDelegatedPrefixesScopedList(); buildCounterPublicDelegatedPrefixesScopedList++; if (buildCounterPublicDelegatedPrefixesScopedList < 3) { - o.publicDelegatedPrefixes = buildUnnamed499(); + o.publicDelegatedPrefixes = buildUnnamed509(); o.warning = buildPublicDelegatedPrefixesScopedListWarning(); } buildCounterPublicDelegatedPrefixesScopedList--; @@ -29683,7 +30223,7 @@ void checkPublicDelegatedPrefixesScopedList( api.PublicDelegatedPrefixesScopedList o) { buildCounterPublicDelegatedPrefixesScopedList++; if (buildCounterPublicDelegatedPrefixesScopedList < 3) { - checkUnnamed499(o.publicDelegatedPrefixes!); + checkUnnamed509(o.publicDelegatedPrefixes!); checkPublicDelegatedPrefixesScopedListWarning(o.warning!); } buildCounterPublicDelegatedPrefixesScopedList--; @@ -29726,12 +30266,12 @@ void checkQuota(api.Quota o) { buildCounterQuota--; } -core.Map buildUnnamed501() => { +core.Map buildUnnamed511() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed501(core.Map o) { +void checkUnnamed511(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -29748,7 +30288,7 @@ api.QuotaExceededInfo buildQuotaExceededInfo() { final o = api.QuotaExceededInfo(); buildCounterQuotaExceededInfo++; if (buildCounterQuotaExceededInfo < 3) { - o.dimensions = buildUnnamed501(); + o.dimensions = buildUnnamed511(); o.futureLimit = 42.0; o.limit = 42.0; o.limitName = 'foo'; @@ -29762,7 +30302,7 @@ api.QuotaExceededInfo buildQuotaExceededInfo() { void checkQuotaExceededInfo(api.QuotaExceededInfo o) { buildCounterQuotaExceededInfo++; if (buildCounterQuotaExceededInfo < 3) { - checkUnnamed501(o.dimensions!); + checkUnnamed511(o.dimensions!); unittest.expect( o.futureLimit!, unittest.equals(42.0), @@ -29851,12 +30391,12 @@ void checkRegionQuotaStatusWarningData(api.RegionQuotaStatusWarningData o) { buildCounterRegionQuotaStatusWarningData--; } -core.List buildUnnamed502() => [ +core.List buildUnnamed512() => [ buildRegionQuotaStatusWarningData(), buildRegionQuotaStatusWarningData(), ]; -void checkUnnamed502(core.List o) { +void checkUnnamed512(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRegionQuotaStatusWarningData(o[0]); checkRegionQuotaStatusWarningData(o[1]); @@ -29868,7 +30408,7 @@ api.RegionQuotaStatusWarning buildRegionQuotaStatusWarning() { buildCounterRegionQuotaStatusWarning++; if (buildCounterRegionQuotaStatusWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed502(); + o.data = buildUnnamed512(); o.message = 'foo'; } buildCounterRegionQuotaStatusWarning--; @@ -29882,7 +30422,7 @@ void checkRegionQuotaStatusWarning(api.RegionQuotaStatusWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed502(o.data!); + checkUnnamed512(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -29891,23 +30431,23 @@ void checkRegionQuotaStatusWarning(api.RegionQuotaStatusWarning o) { buildCounterRegionQuotaStatusWarning--; } -core.List buildUnnamed503() => [ +core.List buildUnnamed513() => [ buildQuota(), buildQuota(), ]; -void checkUnnamed503(core.List o) { +void checkUnnamed513(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkQuota(o[0]); checkQuota(o[1]); } -core.List buildUnnamed504() => [ +core.List buildUnnamed514() => [ 'foo', 'foo', ]; -void checkUnnamed504(core.List o) { +void checkUnnamed514(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -29931,11 +30471,11 @@ api.Region buildRegion() { o.kind = 'foo'; o.name = 'foo'; o.quotaStatusWarning = buildRegionQuotaStatusWarning(); - o.quotas = buildUnnamed503(); + o.quotas = buildUnnamed513(); o.selfLink = 'foo'; o.status = 'foo'; o.supportsPzs = true; - o.zones = buildUnnamed504(); + o.zones = buildUnnamed514(); } buildCounterRegion--; return o; @@ -29966,7 +30506,7 @@ void checkRegion(api.Region o) { unittest.equals('foo'), ); checkRegionQuotaStatusWarning(o.quotaStatusWarning!); - checkUnnamed503(o.quotas!); + checkUnnamed513(o.quotas!); unittest.expect( o.selfLink!, unittest.equals('foo'), @@ -29976,7 +30516,7 @@ void checkRegion(api.Region o) { unittest.equals('foo'), ); unittest.expect(o.supportsPzs!, unittest.isTrue); - checkUnnamed504(o.zones!); + checkUnnamed514(o.zones!); } buildCounterRegion--; } @@ -30008,12 +30548,12 @@ void checkRegionAddressesMoveRequest(api.RegionAddressesMoveRequest o) { buildCounterRegionAddressesMoveRequest--; } -core.List buildUnnamed505() => [ +core.List buildUnnamed515() => [ buildAutoscaler(), buildAutoscaler(), ]; -void checkUnnamed505(core.List o) { +void checkUnnamed515(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAutoscaler(o[0]); checkAutoscaler(o[1]); @@ -30047,12 +30587,12 @@ void checkRegionAutoscalerListWarningData( buildCounterRegionAutoscalerListWarningData--; } -core.List buildUnnamed506() => [ +core.List buildUnnamed516() => [ buildRegionAutoscalerListWarningData(), buildRegionAutoscalerListWarningData(), ]; -void checkUnnamed506(core.List o) { +void checkUnnamed516(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRegionAutoscalerListWarningData(o[0]); checkRegionAutoscalerListWarningData(o[1]); @@ -30064,7 +30604,7 @@ api.RegionAutoscalerListWarning buildRegionAutoscalerListWarning() { buildCounterRegionAutoscalerListWarning++; if (buildCounterRegionAutoscalerListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed506(); + o.data = buildUnnamed516(); o.message = 'foo'; } buildCounterRegionAutoscalerListWarning--; @@ -30078,7 +30618,7 @@ void checkRegionAutoscalerListWarning(api.RegionAutoscalerListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed506(o.data!); + checkUnnamed516(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -30093,7 +30633,7 @@ api.RegionAutoscalerList buildRegionAutoscalerList() { buildCounterRegionAutoscalerList++; if (buildCounterRegionAutoscalerList < 3) { o.id = 'foo'; - o.items = buildUnnamed505(); + o.items = buildUnnamed515(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -30110,7 +30650,7 @@ void checkRegionAutoscalerList(api.RegionAutoscalerList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed505(o.items!); + checkUnnamed515(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -30128,12 +30668,12 @@ void checkRegionAutoscalerList(api.RegionAutoscalerList o) { buildCounterRegionAutoscalerList--; } -core.List buildUnnamed507() => [ +core.List buildUnnamed517() => [ buildDiskType(), buildDiskType(), ]; -void checkUnnamed507(core.List o) { +void checkUnnamed517(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDiskType(o[0]); checkDiskType(o[1]); @@ -30166,12 +30706,12 @@ void checkRegionDiskTypeListWarningData(api.RegionDiskTypeListWarningData o) { buildCounterRegionDiskTypeListWarningData--; } -core.List buildUnnamed508() => [ +core.List buildUnnamed518() => [ buildRegionDiskTypeListWarningData(), buildRegionDiskTypeListWarningData(), ]; -void checkUnnamed508(core.List o) { +void checkUnnamed518(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRegionDiskTypeListWarningData(o[0]); checkRegionDiskTypeListWarningData(o[1]); @@ -30183,7 +30723,7 @@ api.RegionDiskTypeListWarning buildRegionDiskTypeListWarning() { buildCounterRegionDiskTypeListWarning++; if (buildCounterRegionDiskTypeListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed508(); + o.data = buildUnnamed518(); o.message = 'foo'; } buildCounterRegionDiskTypeListWarning--; @@ -30197,7 +30737,7 @@ void checkRegionDiskTypeListWarning(api.RegionDiskTypeListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed508(o.data!); + checkUnnamed518(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -30212,7 +30752,7 @@ api.RegionDiskTypeList buildRegionDiskTypeList() { buildCounterRegionDiskTypeList++; if (buildCounterRegionDiskTypeList < 3) { o.id = 'foo'; - o.items = buildUnnamed507(); + o.items = buildUnnamed517(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -30229,7 +30769,7 @@ void checkRegionDiskTypeList(api.RegionDiskTypeList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed507(o.items!); + checkUnnamed517(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -30247,12 +30787,12 @@ void checkRegionDiskTypeList(api.RegionDiskTypeList o) { buildCounterRegionDiskTypeList--; } -core.List buildUnnamed509() => [ +core.List buildUnnamed519() => [ 'foo', 'foo', ]; -void checkUnnamed509(core.List o) { +void checkUnnamed519(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -30270,7 +30810,7 @@ api.RegionDisksAddResourcePoliciesRequest final o = api.RegionDisksAddResourcePoliciesRequest(); buildCounterRegionDisksAddResourcePoliciesRequest++; if (buildCounterRegionDisksAddResourcePoliciesRequest < 3) { - o.resourcePolicies = buildUnnamed509(); + o.resourcePolicies = buildUnnamed519(); } buildCounterRegionDisksAddResourcePoliciesRequest--; return o; @@ -30280,17 +30820,17 @@ void checkRegionDisksAddResourcePoliciesRequest( api.RegionDisksAddResourcePoliciesRequest o) { buildCounterRegionDisksAddResourcePoliciesRequest++; if (buildCounterRegionDisksAddResourcePoliciesRequest < 3) { - checkUnnamed509(o.resourcePolicies!); + checkUnnamed519(o.resourcePolicies!); } buildCounterRegionDisksAddResourcePoliciesRequest--; } -core.List buildUnnamed510() => [ +core.List buildUnnamed520() => [ 'foo', 'foo', ]; -void checkUnnamed510(core.List o) { +void checkUnnamed520(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -30308,7 +30848,7 @@ api.RegionDisksRemoveResourcePoliciesRequest final o = api.RegionDisksRemoveResourcePoliciesRequest(); buildCounterRegionDisksRemoveResourcePoliciesRequest++; if (buildCounterRegionDisksRemoveResourcePoliciesRequest < 3) { - o.resourcePolicies = buildUnnamed510(); + o.resourcePolicies = buildUnnamed520(); } buildCounterRegionDisksRemoveResourcePoliciesRequest--; return o; @@ -30318,7 +30858,7 @@ void checkRegionDisksRemoveResourcePoliciesRequest( api.RegionDisksRemoveResourcePoliciesRequest o) { buildCounterRegionDisksRemoveResourcePoliciesRequest++; if (buildCounterRegionDisksRemoveResourcePoliciesRequest < 3) { - checkUnnamed510(o.resourcePolicies!); + checkUnnamed520(o.resourcePolicies!); } buildCounterRegionDisksRemoveResourcePoliciesRequest--; } @@ -30369,12 +30909,12 @@ void checkRegionDisksStartAsyncReplicationRequest( buildCounterRegionDisksStartAsyncReplicationRequest--; } -core.List buildUnnamed511() => [ +core.List buildUnnamed521() => [ buildInstanceGroup(), buildInstanceGroup(), ]; -void checkUnnamed511(core.List o) { +void checkUnnamed521(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceGroup(o[0]); checkInstanceGroup(o[1]); @@ -30409,12 +30949,12 @@ void checkRegionInstanceGroupListWarningData( buildCounterRegionInstanceGroupListWarningData--; } -core.List buildUnnamed512() => [ +core.List buildUnnamed522() => [ buildRegionInstanceGroupListWarningData(), buildRegionInstanceGroupListWarningData(), ]; -void checkUnnamed512(core.List o) { +void checkUnnamed522(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRegionInstanceGroupListWarningData(o[0]); checkRegionInstanceGroupListWarningData(o[1]); @@ -30426,7 +30966,7 @@ api.RegionInstanceGroupListWarning buildRegionInstanceGroupListWarning() { buildCounterRegionInstanceGroupListWarning++; if (buildCounterRegionInstanceGroupListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed512(); + o.data = buildUnnamed522(); o.message = 'foo'; } buildCounterRegionInstanceGroupListWarning--; @@ -30440,7 +30980,7 @@ void checkRegionInstanceGroupListWarning(api.RegionInstanceGroupListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed512(o.data!); + checkUnnamed522(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -30455,7 +30995,7 @@ api.RegionInstanceGroupList buildRegionInstanceGroupList() { buildCounterRegionInstanceGroupList++; if (buildCounterRegionInstanceGroupList < 3) { o.id = 'foo'; - o.items = buildUnnamed511(); + o.items = buildUnnamed521(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -30472,7 +31012,7 @@ void checkRegionInstanceGroupList(api.RegionInstanceGroupList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed511(o.items!); + checkUnnamed521(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -30490,12 +31030,12 @@ void checkRegionInstanceGroupList(api.RegionInstanceGroupList o) { buildCounterRegionInstanceGroupList--; } -core.List buildUnnamed513() => [ +core.List buildUnnamed523() => [ 'foo', 'foo', ]; -void checkUnnamed513(core.List o) { +void checkUnnamed523(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -30513,7 +31053,7 @@ api.RegionInstanceGroupManagerDeleteInstanceConfigReq final o = api.RegionInstanceGroupManagerDeleteInstanceConfigReq(); buildCounterRegionInstanceGroupManagerDeleteInstanceConfigReq++; if (buildCounterRegionInstanceGroupManagerDeleteInstanceConfigReq < 3) { - o.names = buildUnnamed513(); + o.names = buildUnnamed523(); } buildCounterRegionInstanceGroupManagerDeleteInstanceConfigReq--; return o; @@ -30523,17 +31063,17 @@ void checkRegionInstanceGroupManagerDeleteInstanceConfigReq( api.RegionInstanceGroupManagerDeleteInstanceConfigReq o) { buildCounterRegionInstanceGroupManagerDeleteInstanceConfigReq++; if (buildCounterRegionInstanceGroupManagerDeleteInstanceConfigReq < 3) { - checkUnnamed513(o.names!); + checkUnnamed523(o.names!); } buildCounterRegionInstanceGroupManagerDeleteInstanceConfigReq--; } -core.List buildUnnamed514() => [ +core.List buildUnnamed524() => [ buildInstanceGroupManager(), buildInstanceGroupManager(), ]; -void checkUnnamed514(core.List o) { +void checkUnnamed524(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceGroupManager(o[0]); checkInstanceGroupManager(o[1]); @@ -30568,12 +31108,12 @@ void checkRegionInstanceGroupManagerListWarningData( buildCounterRegionInstanceGroupManagerListWarningData--; } -core.List buildUnnamed515() => [ +core.List buildUnnamed525() => [ buildRegionInstanceGroupManagerListWarningData(), buildRegionInstanceGroupManagerListWarningData(), ]; -void checkUnnamed515( +void checkUnnamed525( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRegionInstanceGroupManagerListWarningData(o[0]); @@ -30587,7 +31127,7 @@ api.RegionInstanceGroupManagerListWarning buildCounterRegionInstanceGroupManagerListWarning++; if (buildCounterRegionInstanceGroupManagerListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed515(); + o.data = buildUnnamed525(); o.message = 'foo'; } buildCounterRegionInstanceGroupManagerListWarning--; @@ -30602,7 +31142,7 @@ void checkRegionInstanceGroupManagerListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed515(o.data!); + checkUnnamed525(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -30617,7 +31157,7 @@ api.RegionInstanceGroupManagerList buildRegionInstanceGroupManagerList() { buildCounterRegionInstanceGroupManagerList++; if (buildCounterRegionInstanceGroupManagerList < 3) { o.id = 'foo'; - o.items = buildUnnamed514(); + o.items = buildUnnamed524(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -30634,7 +31174,7 @@ void checkRegionInstanceGroupManagerList(api.RegionInstanceGroupManagerList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed514(o.items!); + checkUnnamed524(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -30652,12 +31192,12 @@ void checkRegionInstanceGroupManagerList(api.RegionInstanceGroupManagerList o) { buildCounterRegionInstanceGroupManagerList--; } -core.List buildUnnamed516() => [ +core.List buildUnnamed526() => [ buildPerInstanceConfig(), buildPerInstanceConfig(), ]; -void checkUnnamed516(core.List o) { +void checkUnnamed526(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPerInstanceConfig(o[0]); checkPerInstanceConfig(o[1]); @@ -30669,7 +31209,7 @@ api.RegionInstanceGroupManagerPatchInstanceConfigReq final o = api.RegionInstanceGroupManagerPatchInstanceConfigReq(); buildCounterRegionInstanceGroupManagerPatchInstanceConfigReq++; if (buildCounterRegionInstanceGroupManagerPatchInstanceConfigReq < 3) { - o.perInstanceConfigs = buildUnnamed516(); + o.perInstanceConfigs = buildUnnamed526(); } buildCounterRegionInstanceGroupManagerPatchInstanceConfigReq--; return o; @@ -30679,17 +31219,17 @@ void checkRegionInstanceGroupManagerPatchInstanceConfigReq( api.RegionInstanceGroupManagerPatchInstanceConfigReq o) { buildCounterRegionInstanceGroupManagerPatchInstanceConfigReq++; if (buildCounterRegionInstanceGroupManagerPatchInstanceConfigReq < 3) { - checkUnnamed516(o.perInstanceConfigs!); + checkUnnamed526(o.perInstanceConfigs!); } buildCounterRegionInstanceGroupManagerPatchInstanceConfigReq--; } -core.List buildUnnamed517() => [ +core.List buildUnnamed527() => [ buildPerInstanceConfig(), buildPerInstanceConfig(), ]; -void checkUnnamed517(core.List o) { +void checkUnnamed527(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPerInstanceConfig(o[0]); checkPerInstanceConfig(o[1]); @@ -30701,7 +31241,7 @@ api.RegionInstanceGroupManagerUpdateInstanceConfigReq final o = api.RegionInstanceGroupManagerUpdateInstanceConfigReq(); buildCounterRegionInstanceGroupManagerUpdateInstanceConfigReq++; if (buildCounterRegionInstanceGroupManagerUpdateInstanceConfigReq < 3) { - o.perInstanceConfigs = buildUnnamed517(); + o.perInstanceConfigs = buildUnnamed527(); } buildCounterRegionInstanceGroupManagerUpdateInstanceConfigReq--; return o; @@ -30711,17 +31251,17 @@ void checkRegionInstanceGroupManagerUpdateInstanceConfigReq( api.RegionInstanceGroupManagerUpdateInstanceConfigReq o) { buildCounterRegionInstanceGroupManagerUpdateInstanceConfigReq++; if (buildCounterRegionInstanceGroupManagerUpdateInstanceConfigReq < 3) { - checkUnnamed517(o.perInstanceConfigs!); + checkUnnamed527(o.perInstanceConfigs!); } buildCounterRegionInstanceGroupManagerUpdateInstanceConfigReq--; } -core.List buildUnnamed518() => [ +core.List buildUnnamed528() => [ 'foo', 'foo', ]; -void checkUnnamed518(core.List o) { +void checkUnnamed528(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -30739,7 +31279,7 @@ api.RegionInstanceGroupManagersAbandonInstancesRequest final o = api.RegionInstanceGroupManagersAbandonInstancesRequest(); buildCounterRegionInstanceGroupManagersAbandonInstancesRequest++; if (buildCounterRegionInstanceGroupManagersAbandonInstancesRequest < 3) { - o.instances = buildUnnamed518(); + o.instances = buildUnnamed528(); } buildCounterRegionInstanceGroupManagersAbandonInstancesRequest--; return o; @@ -30749,17 +31289,17 @@ void checkRegionInstanceGroupManagersAbandonInstancesRequest( api.RegionInstanceGroupManagersAbandonInstancesRequest o) { buildCounterRegionInstanceGroupManagersAbandonInstancesRequest++; if (buildCounterRegionInstanceGroupManagersAbandonInstancesRequest < 3) { - checkUnnamed518(o.instances!); + checkUnnamed528(o.instances!); } buildCounterRegionInstanceGroupManagersAbandonInstancesRequest--; } -core.List buildUnnamed519() => [ +core.List buildUnnamed529() => [ 'foo', 'foo', ]; -void checkUnnamed519(core.List o) { +void checkUnnamed529(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -30778,7 +31318,7 @@ api.RegionInstanceGroupManagersApplyUpdatesRequest buildCounterRegionInstanceGroupManagersApplyUpdatesRequest++; if (buildCounterRegionInstanceGroupManagersApplyUpdatesRequest < 3) { o.allInstances = true; - o.instances = buildUnnamed519(); + o.instances = buildUnnamed529(); o.minimalAction = 'foo'; o.mostDisruptiveAllowedAction = 'foo'; } @@ -30791,7 +31331,7 @@ void checkRegionInstanceGroupManagersApplyUpdatesRequest( buildCounterRegionInstanceGroupManagersApplyUpdatesRequest++; if (buildCounterRegionInstanceGroupManagersApplyUpdatesRequest < 3) { unittest.expect(o.allInstances!, unittest.isTrue); - checkUnnamed519(o.instances!); + checkUnnamed529(o.instances!); unittest.expect( o.minimalAction!, unittest.equals('foo'), @@ -30804,12 +31344,12 @@ void checkRegionInstanceGroupManagersApplyUpdatesRequest( buildCounterRegionInstanceGroupManagersApplyUpdatesRequest--; } -core.List buildUnnamed520() => [ +core.List buildUnnamed530() => [ buildPerInstanceConfig(), buildPerInstanceConfig(), ]; -void checkUnnamed520(core.List o) { +void checkUnnamed530(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPerInstanceConfig(o[0]); checkPerInstanceConfig(o[1]); @@ -30821,7 +31361,7 @@ api.RegionInstanceGroupManagersCreateInstancesRequest final o = api.RegionInstanceGroupManagersCreateInstancesRequest(); buildCounterRegionInstanceGroupManagersCreateInstancesRequest++; if (buildCounterRegionInstanceGroupManagersCreateInstancesRequest < 3) { - o.instances = buildUnnamed520(); + o.instances = buildUnnamed530(); } buildCounterRegionInstanceGroupManagersCreateInstancesRequest--; return o; @@ -30831,17 +31371,17 @@ void checkRegionInstanceGroupManagersCreateInstancesRequest( api.RegionInstanceGroupManagersCreateInstancesRequest o) { buildCounterRegionInstanceGroupManagersCreateInstancesRequest++; if (buildCounterRegionInstanceGroupManagersCreateInstancesRequest < 3) { - checkUnnamed520(o.instances!); + checkUnnamed530(o.instances!); } buildCounterRegionInstanceGroupManagersCreateInstancesRequest--; } -core.List buildUnnamed521() => [ +core.List buildUnnamed531() => [ 'foo', 'foo', ]; -void checkUnnamed521(core.List o) { +void checkUnnamed531(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -30859,7 +31399,7 @@ api.RegionInstanceGroupManagersDeleteInstancesRequest final o = api.RegionInstanceGroupManagersDeleteInstancesRequest(); buildCounterRegionInstanceGroupManagersDeleteInstancesRequest++; if (buildCounterRegionInstanceGroupManagersDeleteInstancesRequest < 3) { - o.instances = buildUnnamed521(); + o.instances = buildUnnamed531(); o.skipInstancesOnValidationError = true; } buildCounterRegionInstanceGroupManagersDeleteInstancesRequest--; @@ -30870,18 +31410,18 @@ void checkRegionInstanceGroupManagersDeleteInstancesRequest( api.RegionInstanceGroupManagersDeleteInstancesRequest o) { buildCounterRegionInstanceGroupManagersDeleteInstancesRequest++; if (buildCounterRegionInstanceGroupManagersDeleteInstancesRequest < 3) { - checkUnnamed521(o.instances!); + checkUnnamed531(o.instances!); unittest.expect(o.skipInstancesOnValidationError!, unittest.isTrue); } buildCounterRegionInstanceGroupManagersDeleteInstancesRequest--; } -core.List buildUnnamed522() => [ +core.List buildUnnamed532() => [ buildInstanceManagedByIgmError(), buildInstanceManagedByIgmError(), ]; -void checkUnnamed522(core.List o) { +void checkUnnamed532(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceManagedByIgmError(o[0]); checkInstanceManagedByIgmError(o[1]); @@ -30893,7 +31433,7 @@ api.RegionInstanceGroupManagersListErrorsResponse final o = api.RegionInstanceGroupManagersListErrorsResponse(); buildCounterRegionInstanceGroupManagersListErrorsResponse++; if (buildCounterRegionInstanceGroupManagersListErrorsResponse < 3) { - o.items = buildUnnamed522(); + o.items = buildUnnamed532(); o.nextPageToken = 'foo'; } buildCounterRegionInstanceGroupManagersListErrorsResponse--; @@ -30904,7 +31444,7 @@ void checkRegionInstanceGroupManagersListErrorsResponse( api.RegionInstanceGroupManagersListErrorsResponse o) { buildCounterRegionInstanceGroupManagersListErrorsResponse++; if (buildCounterRegionInstanceGroupManagersListErrorsResponse < 3) { - checkUnnamed522(o.items!); + checkUnnamed532(o.items!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -30913,12 +31453,12 @@ void checkRegionInstanceGroupManagersListErrorsResponse( buildCounterRegionInstanceGroupManagersListErrorsResponse--; } -core.List buildUnnamed523() => [ +core.List buildUnnamed533() => [ buildPerInstanceConfig(), buildPerInstanceConfig(), ]; -void checkUnnamed523(core.List o) { +void checkUnnamed533(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPerInstanceConfig(o[0]); checkPerInstanceConfig(o[1]); @@ -30958,12 +31498,12 @@ void checkRegionInstanceGroupManagersListInstanceConfigsRespWarningData( } core.List - buildUnnamed524() => [ + buildUnnamed534() => [ buildRegionInstanceGroupManagersListInstanceConfigsRespWarningData(), buildRegionInstanceGroupManagersListInstanceConfigsRespWarningData(), ]; -void checkUnnamed524( +void checkUnnamed534( core.List o) { unittest.expect(o, unittest.hasLength(2)); @@ -30980,7 +31520,7 @@ api.RegionInstanceGroupManagersListInstanceConfigsRespWarning if (buildCounterRegionInstanceGroupManagersListInstanceConfigsRespWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed524(); + o.data = buildUnnamed534(); o.message = 'foo'; } buildCounterRegionInstanceGroupManagersListInstanceConfigsRespWarning--; @@ -30996,7 +31536,7 @@ void checkRegionInstanceGroupManagersListInstanceConfigsRespWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed524(o.data!); + checkUnnamed534(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -31011,7 +31551,7 @@ api.RegionInstanceGroupManagersListInstanceConfigsResp final o = api.RegionInstanceGroupManagersListInstanceConfigsResp(); buildCounterRegionInstanceGroupManagersListInstanceConfigsResp++; if (buildCounterRegionInstanceGroupManagersListInstanceConfigsResp < 3) { - o.items = buildUnnamed523(); + o.items = buildUnnamed533(); o.nextPageToken = 'foo'; o.warning = buildRegionInstanceGroupManagersListInstanceConfigsRespWarning(); @@ -31024,7 +31564,7 @@ void checkRegionInstanceGroupManagersListInstanceConfigsResp( api.RegionInstanceGroupManagersListInstanceConfigsResp o) { buildCounterRegionInstanceGroupManagersListInstanceConfigsResp++; if (buildCounterRegionInstanceGroupManagersListInstanceConfigsResp < 3) { - checkUnnamed523(o.items!); + checkUnnamed533(o.items!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -31034,12 +31574,12 @@ void checkRegionInstanceGroupManagersListInstanceConfigsResp( buildCounterRegionInstanceGroupManagersListInstanceConfigsResp--; } -core.List buildUnnamed525() => [ +core.List buildUnnamed535() => [ buildManagedInstance(), buildManagedInstance(), ]; -void checkUnnamed525(core.List o) { +void checkUnnamed535(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkManagedInstance(o[0]); checkManagedInstance(o[1]); @@ -31051,7 +31591,7 @@ api.RegionInstanceGroupManagersListInstancesResponse final o = api.RegionInstanceGroupManagersListInstancesResponse(); buildCounterRegionInstanceGroupManagersListInstancesResponse++; if (buildCounterRegionInstanceGroupManagersListInstancesResponse < 3) { - o.managedInstances = buildUnnamed525(); + o.managedInstances = buildUnnamed535(); o.nextPageToken = 'foo'; } buildCounterRegionInstanceGroupManagersListInstancesResponse--; @@ -31062,7 +31602,7 @@ void checkRegionInstanceGroupManagersListInstancesResponse( api.RegionInstanceGroupManagersListInstancesResponse o) { buildCounterRegionInstanceGroupManagersListInstancesResponse++; if (buildCounterRegionInstanceGroupManagersListInstancesResponse < 3) { - checkUnnamed525(o.managedInstances!); + checkUnnamed535(o.managedInstances!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -31071,12 +31611,12 @@ void checkRegionInstanceGroupManagersListInstancesResponse( buildCounterRegionInstanceGroupManagersListInstancesResponse--; } -core.List buildUnnamed526() => [ +core.List buildUnnamed536() => [ 'foo', 'foo', ]; -void checkUnnamed526(core.List o) { +void checkUnnamed536(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -31094,7 +31634,7 @@ api.RegionInstanceGroupManagersRecreateRequest final o = api.RegionInstanceGroupManagersRecreateRequest(); buildCounterRegionInstanceGroupManagersRecreateRequest++; if (buildCounterRegionInstanceGroupManagersRecreateRequest < 3) { - o.instances = buildUnnamed526(); + o.instances = buildUnnamed536(); } buildCounterRegionInstanceGroupManagersRecreateRequest--; return o; @@ -31104,17 +31644,55 @@ void checkRegionInstanceGroupManagersRecreateRequest( api.RegionInstanceGroupManagersRecreateRequest o) { buildCounterRegionInstanceGroupManagersRecreateRequest++; if (buildCounterRegionInstanceGroupManagersRecreateRequest < 3) { - checkUnnamed526(o.instances!); + checkUnnamed536(o.instances!); } buildCounterRegionInstanceGroupManagersRecreateRequest--; } -core.List buildUnnamed527() => [ +core.List buildUnnamed537() => [ 'foo', 'foo', ]; -void checkUnnamed527(core.List o) { +void checkUnnamed537(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterRegionInstanceGroupManagersResumeInstancesRequest = 0; +api.RegionInstanceGroupManagersResumeInstancesRequest + buildRegionInstanceGroupManagersResumeInstancesRequest() { + final o = api.RegionInstanceGroupManagersResumeInstancesRequest(); + buildCounterRegionInstanceGroupManagersResumeInstancesRequest++; + if (buildCounterRegionInstanceGroupManagersResumeInstancesRequest < 3) { + o.instances = buildUnnamed537(); + } + buildCounterRegionInstanceGroupManagersResumeInstancesRequest--; + return o; +} + +void checkRegionInstanceGroupManagersResumeInstancesRequest( + api.RegionInstanceGroupManagersResumeInstancesRequest o) { + buildCounterRegionInstanceGroupManagersResumeInstancesRequest++; + if (buildCounterRegionInstanceGroupManagersResumeInstancesRequest < 3) { + checkUnnamed537(o.instances!); + } + buildCounterRegionInstanceGroupManagersResumeInstancesRequest--; +} + +core.List buildUnnamed538() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed538(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -31133,7 +31711,7 @@ api.RegionInstanceGroupManagersSetTargetPoolsRequest buildCounterRegionInstanceGroupManagersSetTargetPoolsRequest++; if (buildCounterRegionInstanceGroupManagersSetTargetPoolsRequest < 3) { o.fingerprint = 'foo'; - o.targetPools = buildUnnamed527(); + o.targetPools = buildUnnamed538(); } buildCounterRegionInstanceGroupManagersSetTargetPoolsRequest--; return o; @@ -31147,7 +31725,7 @@ void checkRegionInstanceGroupManagersSetTargetPoolsRequest( o.fingerprint!, unittest.equals('foo'), ); - checkUnnamed527(o.targetPools!); + checkUnnamed538(o.targetPools!); } buildCounterRegionInstanceGroupManagersSetTargetPoolsRequest--; } @@ -31176,12 +31754,130 @@ void checkRegionInstanceGroupManagersSetTemplateRequest( buildCounterRegionInstanceGroupManagersSetTemplateRequest--; } -core.List buildUnnamed528() => [ +core.List buildUnnamed539() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed539(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterRegionInstanceGroupManagersStartInstancesRequest = 0; +api.RegionInstanceGroupManagersStartInstancesRequest + buildRegionInstanceGroupManagersStartInstancesRequest() { + final o = api.RegionInstanceGroupManagersStartInstancesRequest(); + buildCounterRegionInstanceGroupManagersStartInstancesRequest++; + if (buildCounterRegionInstanceGroupManagersStartInstancesRequest < 3) { + o.instances = buildUnnamed539(); + } + buildCounterRegionInstanceGroupManagersStartInstancesRequest--; + return o; +} + +void checkRegionInstanceGroupManagersStartInstancesRequest( + api.RegionInstanceGroupManagersStartInstancesRequest o) { + buildCounterRegionInstanceGroupManagersStartInstancesRequest++; + if (buildCounterRegionInstanceGroupManagersStartInstancesRequest < 3) { + checkUnnamed539(o.instances!); + } + buildCounterRegionInstanceGroupManagersStartInstancesRequest--; +} + +core.List buildUnnamed540() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed540(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterRegionInstanceGroupManagersStopInstancesRequest = 0; +api.RegionInstanceGroupManagersStopInstancesRequest + buildRegionInstanceGroupManagersStopInstancesRequest() { + final o = api.RegionInstanceGroupManagersStopInstancesRequest(); + buildCounterRegionInstanceGroupManagersStopInstancesRequest++; + if (buildCounterRegionInstanceGroupManagersStopInstancesRequest < 3) { + o.forceStop = true; + o.instances = buildUnnamed540(); + } + buildCounterRegionInstanceGroupManagersStopInstancesRequest--; + return o; +} + +void checkRegionInstanceGroupManagersStopInstancesRequest( + api.RegionInstanceGroupManagersStopInstancesRequest o) { + buildCounterRegionInstanceGroupManagersStopInstancesRequest++; + if (buildCounterRegionInstanceGroupManagersStopInstancesRequest < 3) { + unittest.expect(o.forceStop!, unittest.isTrue); + checkUnnamed540(o.instances!); + } + buildCounterRegionInstanceGroupManagersStopInstancesRequest--; +} + +core.List buildUnnamed541() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed541(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterRegionInstanceGroupManagersSuspendInstancesRequest = 0; +api.RegionInstanceGroupManagersSuspendInstancesRequest + buildRegionInstanceGroupManagersSuspendInstancesRequest() { + final o = api.RegionInstanceGroupManagersSuspendInstancesRequest(); + buildCounterRegionInstanceGroupManagersSuspendInstancesRequest++; + if (buildCounterRegionInstanceGroupManagersSuspendInstancesRequest < 3) { + o.forceSuspend = true; + o.instances = buildUnnamed541(); + } + buildCounterRegionInstanceGroupManagersSuspendInstancesRequest--; + return o; +} + +void checkRegionInstanceGroupManagersSuspendInstancesRequest( + api.RegionInstanceGroupManagersSuspendInstancesRequest o) { + buildCounterRegionInstanceGroupManagersSuspendInstancesRequest++; + if (buildCounterRegionInstanceGroupManagersSuspendInstancesRequest < 3) { + unittest.expect(o.forceSuspend!, unittest.isTrue); + checkUnnamed541(o.instances!); + } + buildCounterRegionInstanceGroupManagersSuspendInstancesRequest--; +} + +core.List buildUnnamed542() => [ buildInstanceWithNamedPorts(), buildInstanceWithNamedPorts(), ]; -void checkUnnamed528(core.List o) { +void checkUnnamed542(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceWithNamedPorts(o[0]); checkInstanceWithNamedPorts(o[1]); @@ -31216,13 +31912,13 @@ void checkRegionInstanceGroupsListInstancesWarningData( buildCounterRegionInstanceGroupsListInstancesWarningData--; } -core.List buildUnnamed529() => +core.List buildUnnamed543() => [ buildRegionInstanceGroupsListInstancesWarningData(), buildRegionInstanceGroupsListInstancesWarningData(), ]; -void checkUnnamed529( +void checkUnnamed543( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRegionInstanceGroupsListInstancesWarningData(o[0]); @@ -31236,7 +31932,7 @@ api.RegionInstanceGroupsListInstancesWarning buildCounterRegionInstanceGroupsListInstancesWarning++; if (buildCounterRegionInstanceGroupsListInstancesWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed529(); + o.data = buildUnnamed543(); o.message = 'foo'; } buildCounterRegionInstanceGroupsListInstancesWarning--; @@ -31251,7 +31947,7 @@ void checkRegionInstanceGroupsListInstancesWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed529(o.data!); + checkUnnamed543(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -31266,7 +31962,7 @@ api.RegionInstanceGroupsListInstances buildRegionInstanceGroupsListInstances() { buildCounterRegionInstanceGroupsListInstances++; if (buildCounterRegionInstanceGroupsListInstances < 3) { o.id = 'foo'; - o.items = buildUnnamed528(); + o.items = buildUnnamed542(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -31284,7 +31980,7 @@ void checkRegionInstanceGroupsListInstances( o.id!, unittest.equals('foo'), ); - checkUnnamed528(o.items!); + checkUnnamed542(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -31331,12 +32027,12 @@ void checkRegionInstanceGroupsListInstancesRequest( buildCounterRegionInstanceGroupsListInstancesRequest--; } -core.List buildUnnamed530() => [ +core.List buildUnnamed544() => [ buildNamedPort(), buildNamedPort(), ]; -void checkUnnamed530(core.List o) { +void checkUnnamed544(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNamedPort(o[0]); checkNamedPort(o[1]); @@ -31349,7 +32045,7 @@ api.RegionInstanceGroupsSetNamedPortsRequest buildCounterRegionInstanceGroupsSetNamedPortsRequest++; if (buildCounterRegionInstanceGroupsSetNamedPortsRequest < 3) { o.fingerprint = 'foo'; - o.namedPorts = buildUnnamed530(); + o.namedPorts = buildUnnamed544(); } buildCounterRegionInstanceGroupsSetNamedPortsRequest--; return o; @@ -31363,17 +32059,17 @@ void checkRegionInstanceGroupsSetNamedPortsRequest( o.fingerprint!, unittest.equals('foo'), ); - checkUnnamed530(o.namedPorts!); + checkUnnamed544(o.namedPorts!); } buildCounterRegionInstanceGroupsSetNamedPortsRequest--; } -core.List buildUnnamed531() => [ +core.List buildUnnamed545() => [ buildRegion(), buildRegion(), ]; -void checkUnnamed531(core.List o) { +void checkUnnamed545(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRegion(o[0]); checkRegion(o[1]); @@ -31406,12 +32102,12 @@ void checkRegionListWarningData(api.RegionListWarningData o) { buildCounterRegionListWarningData--; } -core.List buildUnnamed532() => [ +core.List buildUnnamed546() => [ buildRegionListWarningData(), buildRegionListWarningData(), ]; -void checkUnnamed532(core.List o) { +void checkUnnamed546(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRegionListWarningData(o[0]); checkRegionListWarningData(o[1]); @@ -31423,7 +32119,7 @@ api.RegionListWarning buildRegionListWarning() { buildCounterRegionListWarning++; if (buildCounterRegionListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed532(); + o.data = buildUnnamed546(); o.message = 'foo'; } buildCounterRegionListWarning--; @@ -31437,7 +32133,7 @@ void checkRegionListWarning(api.RegionListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed532(o.data!); + checkUnnamed546(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -31452,7 +32148,7 @@ api.RegionList buildRegionList() { buildCounterRegionList++; if (buildCounterRegionList < 3) { o.id = 'foo'; - o.items = buildUnnamed531(); + o.items = buildUnnamed545(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -31469,7 +32165,7 @@ void checkRegionList(api.RegionList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed531(o.items!); + checkUnnamed545(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -31487,12 +32183,12 @@ void checkRegionList(api.RegionList o) { buildCounterRegionList--; } -core.List buildUnnamed533() => [ +core.List buildUnnamed547() => [ buildNetworkEndpoint(), buildNetworkEndpoint(), ]; -void checkUnnamed533(core.List o) { +void checkUnnamed547(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkEndpoint(o[0]); checkNetworkEndpoint(o[1]); @@ -31504,7 +32200,7 @@ api.RegionNetworkEndpointGroupsAttachEndpointsRequest final o = api.RegionNetworkEndpointGroupsAttachEndpointsRequest(); buildCounterRegionNetworkEndpointGroupsAttachEndpointsRequest++; if (buildCounterRegionNetworkEndpointGroupsAttachEndpointsRequest < 3) { - o.networkEndpoints = buildUnnamed533(); + o.networkEndpoints = buildUnnamed547(); } buildCounterRegionNetworkEndpointGroupsAttachEndpointsRequest--; return o; @@ -31514,17 +32210,17 @@ void checkRegionNetworkEndpointGroupsAttachEndpointsRequest( api.RegionNetworkEndpointGroupsAttachEndpointsRequest o) { buildCounterRegionNetworkEndpointGroupsAttachEndpointsRequest++; if (buildCounterRegionNetworkEndpointGroupsAttachEndpointsRequest < 3) { - checkUnnamed533(o.networkEndpoints!); + checkUnnamed547(o.networkEndpoints!); } buildCounterRegionNetworkEndpointGroupsAttachEndpointsRequest--; } -core.List buildUnnamed534() => [ +core.List buildUnnamed548() => [ buildNetworkEndpoint(), buildNetworkEndpoint(), ]; -void checkUnnamed534(core.List o) { +void checkUnnamed548(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkEndpoint(o[0]); checkNetworkEndpoint(o[1]); @@ -31536,7 +32232,7 @@ api.RegionNetworkEndpointGroupsDetachEndpointsRequest final o = api.RegionNetworkEndpointGroupsDetachEndpointsRequest(); buildCounterRegionNetworkEndpointGroupsDetachEndpointsRequest++; if (buildCounterRegionNetworkEndpointGroupsDetachEndpointsRequest < 3) { - o.networkEndpoints = buildUnnamed534(); + o.networkEndpoints = buildUnnamed548(); } buildCounterRegionNetworkEndpointGroupsDetachEndpointsRequest--; return o; @@ -31546,7 +32242,7 @@ void checkRegionNetworkEndpointGroupsDetachEndpointsRequest( api.RegionNetworkEndpointGroupsDetachEndpointsRequest o) { buildCounterRegionNetworkEndpointGroupsDetachEndpointsRequest++; if (buildCounterRegionNetworkEndpointGroupsDetachEndpointsRequest < 3) { - checkUnnamed534(o.networkEndpoints!); + checkUnnamed548(o.networkEndpoints!); } buildCounterRegionNetworkEndpointGroupsDetachEndpointsRequest--; } @@ -31554,12 +32250,12 @@ void checkRegionNetworkEndpointGroupsDetachEndpointsRequest( core.List< api .RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy> - buildUnnamed535() => [ + buildUnnamed549() => [ buildRegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy(), buildRegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy(), ]; -void checkUnnamed535( +void checkUnnamed549( core.List< api .RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy> @@ -31571,12 +32267,12 @@ void checkUnnamed535( o[1]); } -core.List buildUnnamed536() => [ +core.List buildUnnamed550() => [ buildFirewall(), buildFirewall(), ]; -void checkUnnamed536(core.List o) { +void checkUnnamed550(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFirewall(o[0]); checkFirewall(o[1]); @@ -31590,8 +32286,8 @@ api.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse buildCounterRegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse++; if (buildCounterRegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse < 3) { - o.firewallPolicys = buildUnnamed535(); - o.firewalls = buildUnnamed536(); + o.firewallPolicys = buildUnnamed549(); + o.firewalls = buildUnnamed550(); } buildCounterRegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse--; return o; @@ -31602,18 +32298,18 @@ void checkRegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse( buildCounterRegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse++; if (buildCounterRegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse < 3) { - checkUnnamed535(o.firewallPolicys!); - checkUnnamed536(o.firewalls!); + checkUnnamed549(o.firewallPolicys!); + checkUnnamed550(o.firewalls!); } buildCounterRegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse--; } -core.List buildUnnamed537() => [ +core.List buildUnnamed551() => [ buildFirewallPolicyRule(), buildFirewallPolicyRule(), ]; -void checkUnnamed537(core.List o) { +void checkUnnamed551(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFirewallPolicyRule(o[0]); checkFirewallPolicyRule(o[1]); @@ -31631,7 +32327,7 @@ api.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallP 3) { o.displayName = 'foo'; o.name = 'foo'; - o.rules = buildUnnamed537(); + o.rules = buildUnnamed551(); o.type = 'foo'; } buildCounterRegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy--; @@ -31652,7 +32348,7 @@ void checkRegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFir o.name!, unittest.equals('foo'), ); - checkUnnamed537(o.rules!); + checkUnnamed551(o.rules!); unittest.expect( o.type!, unittest.equals('foo'), @@ -31661,12 +32357,12 @@ void checkRegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFir buildCounterRegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy--; } -core.Map buildUnnamed538() => { +core.Map buildUnnamed552() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed538(core.Map o) { +void checkUnnamed552(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -31684,7 +32380,7 @@ api.RegionSetLabelsRequest buildRegionSetLabelsRequest() { buildCounterRegionSetLabelsRequest++; if (buildCounterRegionSetLabelsRequest < 3) { o.labelFingerprint = 'foo'; - o.labels = buildUnnamed538(); + o.labels = buildUnnamed552(); } buildCounterRegionSetLabelsRequest--; return o; @@ -31697,17 +32393,17 @@ void checkRegionSetLabelsRequest(api.RegionSetLabelsRequest o) { o.labelFingerprint!, unittest.equals('foo'), ); - checkUnnamed538(o.labels!); + checkUnnamed552(o.labels!); } buildCounterRegionSetLabelsRequest--; } -core.List buildUnnamed539() => [ +core.List buildUnnamed553() => [ buildBinding(), buildBinding(), ]; -void checkUnnamed539(core.List o) { +void checkUnnamed553(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkBinding(o[0]); checkBinding(o[1]); @@ -31718,7 +32414,7 @@ api.RegionSetPolicyRequest buildRegionSetPolicyRequest() { final o = api.RegionSetPolicyRequest(); buildCounterRegionSetPolicyRequest++; if (buildCounterRegionSetPolicyRequest < 3) { - o.bindings = buildUnnamed539(); + o.bindings = buildUnnamed553(); o.etag = 'foo'; o.policy = buildPolicy(); } @@ -31729,7 +32425,7 @@ api.RegionSetPolicyRequest buildRegionSetPolicyRequest() { void checkRegionSetPolicyRequest(api.RegionSetPolicyRequest o) { buildCounterRegionSetPolicyRequest++; if (buildCounterRegionSetPolicyRequest < 3) { - checkUnnamed539(o.bindings!); + checkUnnamed553(o.bindings!); unittest.expect( o.etag!, unittest.equals('foo'), @@ -31739,12 +32435,12 @@ void checkRegionSetPolicyRequest(api.RegionSetPolicyRequest o) { buildCounterRegionSetPolicyRequest--; } -core.List buildUnnamed540() => [ +core.List buildUnnamed554() => [ 'foo', 'foo', ]; -void checkUnnamed540(core.List o) { +void checkUnnamed554(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -31762,7 +32458,7 @@ api.RegionTargetHttpsProxiesSetSslCertificatesRequest final o = api.RegionTargetHttpsProxiesSetSslCertificatesRequest(); buildCounterRegionTargetHttpsProxiesSetSslCertificatesRequest++; if (buildCounterRegionTargetHttpsProxiesSetSslCertificatesRequest < 3) { - o.sslCertificates = buildUnnamed540(); + o.sslCertificates = buildUnnamed554(); } buildCounterRegionTargetHttpsProxiesSetSslCertificatesRequest--; return o; @@ -31772,7 +32468,7 @@ void checkRegionTargetHttpsProxiesSetSslCertificatesRequest( api.RegionTargetHttpsProxiesSetSslCertificatesRequest o) { buildCounterRegionTargetHttpsProxiesSetSslCertificatesRequest++; if (buildCounterRegionTargetHttpsProxiesSetSslCertificatesRequest < 3) { - checkUnnamed540(o.sslCertificates!); + checkUnnamed554(o.sslCertificates!); } buildCounterRegionTargetHttpsProxiesSetSslCertificatesRequest--; } @@ -31818,12 +32514,12 @@ void checkRequestMirrorPolicy(api.RequestMirrorPolicy o) { buildCounterRequestMirrorPolicy--; } -core.Map buildUnnamed541() => { +core.Map buildUnnamed555() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed541(core.Map o) { +void checkUnnamed555(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -31847,7 +32543,7 @@ api.Reservation buildReservation() { o.id = 'foo'; o.kind = 'foo'; o.name = 'foo'; - o.resourcePolicies = buildUnnamed541(); + o.resourcePolicies = buildUnnamed555(); o.resourceStatus = buildAllocationResourceStatus(); o.satisfiesPzs = true; o.selfLink = 'foo'; @@ -31889,7 +32585,7 @@ void checkReservation(api.Reservation o) { o.name!, unittest.equals('foo'), ); - checkUnnamed541(o.resourcePolicies!); + checkUnnamed555(o.resourcePolicies!); checkAllocationResourceStatus(o.resourceStatus!); unittest.expect(o.satisfiesPzs!, unittest.isTrue); unittest.expect( @@ -31911,12 +32607,12 @@ void checkReservation(api.Reservation o) { buildCounterReservation--; } -core.List buildUnnamed542() => [ +core.List buildUnnamed556() => [ 'foo', 'foo', ]; -void checkUnnamed542(core.List o) { +void checkUnnamed556(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -31935,7 +32631,7 @@ api.ReservationAffinity buildReservationAffinity() { if (buildCounterReservationAffinity < 3) { o.consumeReservationType = 'foo'; o.key = 'foo'; - o.values = buildUnnamed542(); + o.values = buildUnnamed556(); } buildCounterReservationAffinity--; return o; @@ -31952,28 +32648,28 @@ void checkReservationAffinity(api.ReservationAffinity o) { o.key!, unittest.equals('foo'), ); - checkUnnamed542(o.values!); + checkUnnamed556(o.values!); } buildCounterReservationAffinity--; } -core.Map buildUnnamed543() => { +core.Map buildUnnamed557() => { 'x': buildReservationsScopedList(), 'y': buildReservationsScopedList(), }; -void checkUnnamed543(core.Map o) { +void checkUnnamed557(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkReservationsScopedList(o['x']!); checkReservationsScopedList(o['y']!); } -core.List buildUnnamed544() => [ +core.List buildUnnamed558() => [ 'foo', 'foo', ]; -void checkUnnamed544(core.List o) { +void checkUnnamed558(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -32014,12 +32710,12 @@ void checkReservationAggregatedListWarningData( buildCounterReservationAggregatedListWarningData--; } -core.List buildUnnamed545() => [ +core.List buildUnnamed559() => [ buildReservationAggregatedListWarningData(), buildReservationAggregatedListWarningData(), ]; -void checkUnnamed545(core.List o) { +void checkUnnamed559(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkReservationAggregatedListWarningData(o[0]); checkReservationAggregatedListWarningData(o[1]); @@ -32031,7 +32727,7 @@ api.ReservationAggregatedListWarning buildReservationAggregatedListWarning() { buildCounterReservationAggregatedListWarning++; if (buildCounterReservationAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed545(); + o.data = buildUnnamed559(); o.message = 'foo'; } buildCounterReservationAggregatedListWarning--; @@ -32046,7 +32742,7 @@ void checkReservationAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed545(o.data!); + checkUnnamed559(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -32061,11 +32757,11 @@ api.ReservationAggregatedList buildReservationAggregatedList() { buildCounterReservationAggregatedList++; if (buildCounterReservationAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed543(); + o.items = buildUnnamed557(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed544(); + o.unreachables = buildUnnamed558(); o.warning = buildReservationAggregatedListWarning(); } buildCounterReservationAggregatedList--; @@ -32079,7 +32775,7 @@ void checkReservationAggregatedList(api.ReservationAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed543(o.items!); + checkUnnamed557(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -32092,18 +32788,18 @@ void checkReservationAggregatedList(api.ReservationAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed544(o.unreachables!); + checkUnnamed558(o.unreachables!); checkReservationAggregatedListWarning(o.warning!); } buildCounterReservationAggregatedList--; } -core.List buildUnnamed546() => [ +core.List buildUnnamed560() => [ buildReservation(), buildReservation(), ]; -void checkUnnamed546(core.List o) { +void checkUnnamed560(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkReservation(o[0]); checkReservation(o[1]); @@ -32136,12 +32832,12 @@ void checkReservationListWarningData(api.ReservationListWarningData o) { buildCounterReservationListWarningData--; } -core.List buildUnnamed547() => [ +core.List buildUnnamed561() => [ buildReservationListWarningData(), buildReservationListWarningData(), ]; -void checkUnnamed547(core.List o) { +void checkUnnamed561(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkReservationListWarningData(o[0]); checkReservationListWarningData(o[1]); @@ -32153,7 +32849,7 @@ api.ReservationListWarning buildReservationListWarning() { buildCounterReservationListWarning++; if (buildCounterReservationListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed547(); + o.data = buildUnnamed561(); o.message = 'foo'; } buildCounterReservationListWarning--; @@ -32167,7 +32863,7 @@ void checkReservationListWarning(api.ReservationListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed547(o.data!); + checkUnnamed561(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -32182,7 +32878,7 @@ api.ReservationList buildReservationList() { buildCounterReservationList++; if (buildCounterReservationList < 3) { o.id = 'foo'; - o.items = buildUnnamed546(); + o.items = buildUnnamed560(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -32199,7 +32895,7 @@ void checkReservationList(api.ReservationList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed546(o.items!); + checkUnnamed560(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -32239,12 +32935,12 @@ void checkReservationsResizeRequest(api.ReservationsResizeRequest o) { buildCounterReservationsResizeRequest--; } -core.List buildUnnamed548() => [ +core.List buildUnnamed562() => [ buildReservation(), buildReservation(), ]; -void checkUnnamed548(core.List o) { +void checkUnnamed562(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkReservation(o[0]); checkReservation(o[1]); @@ -32278,12 +32974,12 @@ void checkReservationsScopedListWarningData( buildCounterReservationsScopedListWarningData--; } -core.List buildUnnamed549() => [ +core.List buildUnnamed563() => [ buildReservationsScopedListWarningData(), buildReservationsScopedListWarningData(), ]; -void checkUnnamed549(core.List o) { +void checkUnnamed563(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkReservationsScopedListWarningData(o[0]); checkReservationsScopedListWarningData(o[1]); @@ -32295,7 +32991,7 @@ api.ReservationsScopedListWarning buildReservationsScopedListWarning() { buildCounterReservationsScopedListWarning++; if (buildCounterReservationsScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed549(); + o.data = buildUnnamed563(); o.message = 'foo'; } buildCounterReservationsScopedListWarning--; @@ -32309,7 +33005,7 @@ void checkReservationsScopedListWarning(api.ReservationsScopedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed549(o.data!); + checkUnnamed563(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -32323,7 +33019,7 @@ api.ReservationsScopedList buildReservationsScopedList() { final o = api.ReservationsScopedList(); buildCounterReservationsScopedList++; if (buildCounterReservationsScopedList < 3) { - o.reservations = buildUnnamed548(); + o.reservations = buildUnnamed562(); o.warning = buildReservationsScopedListWarning(); } buildCounterReservationsScopedList--; @@ -32333,7 +33029,7 @@ api.ReservationsScopedList buildReservationsScopedList() { void checkReservationsScopedList(api.ReservationsScopedList o) { buildCounterReservationsScopedList++; if (buildCounterReservationsScopedList < 3) { - checkUnnamed548(o.reservations!); + checkUnnamed562(o.reservations!); checkReservationsScopedListWarning(o.warning!); } buildCounterReservationsScopedList--; @@ -32393,12 +33089,12 @@ void checkResourceGroupReference(api.ResourceGroupReference o) { buildCounterResourceGroupReference--; } -core.List buildUnnamed550() => [ +core.List buildUnnamed564() => [ buildResourcePolicy(), buildResourcePolicy(), ]; -void checkUnnamed550(core.List o) { +void checkUnnamed564(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkResourcePolicy(o[0]); checkResourcePolicy(o[1]); @@ -32433,12 +33129,12 @@ void checkResourcePoliciesScopedListWarningData( buildCounterResourcePoliciesScopedListWarningData--; } -core.List buildUnnamed551() => [ +core.List buildUnnamed565() => [ buildResourcePoliciesScopedListWarningData(), buildResourcePoliciesScopedListWarningData(), ]; -void checkUnnamed551(core.List o) { +void checkUnnamed565(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkResourcePoliciesScopedListWarningData(o[0]); checkResourcePoliciesScopedListWarningData(o[1]); @@ -32450,7 +33146,7 @@ api.ResourcePoliciesScopedListWarning buildResourcePoliciesScopedListWarning() { buildCounterResourcePoliciesScopedListWarning++; if (buildCounterResourcePoliciesScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed551(); + o.data = buildUnnamed565(); o.message = 'foo'; } buildCounterResourcePoliciesScopedListWarning--; @@ -32465,7 +33161,7 @@ void checkResourcePoliciesScopedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed551(o.data!); + checkUnnamed565(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -32479,7 +33175,7 @@ api.ResourcePoliciesScopedList buildResourcePoliciesScopedList() { final o = api.ResourcePoliciesScopedList(); buildCounterResourcePoliciesScopedList++; if (buildCounterResourcePoliciesScopedList < 3) { - o.resourcePolicies = buildUnnamed550(); + o.resourcePolicies = buildUnnamed564(); o.warning = buildResourcePoliciesScopedListWarning(); } buildCounterResourcePoliciesScopedList--; @@ -32489,7 +33185,7 @@ api.ResourcePoliciesScopedList buildResourcePoliciesScopedList() { void checkResourcePoliciesScopedList(api.ResourcePoliciesScopedList o) { buildCounterResourcePoliciesScopedList++; if (buildCounterResourcePoliciesScopedList < 3) { - checkUnnamed550(o.resourcePolicies!); + checkUnnamed564(o.resourcePolicies!); checkResourcePoliciesScopedListWarning(o.warning!); } buildCounterResourcePoliciesScopedList--; @@ -32564,23 +33260,23 @@ void checkResourcePolicy(api.ResourcePolicy o) { buildCounterResourcePolicy--; } -core.Map buildUnnamed552() => { +core.Map buildUnnamed566() => { 'x': buildResourcePoliciesScopedList(), 'y': buildResourcePoliciesScopedList(), }; -void checkUnnamed552(core.Map o) { +void checkUnnamed566(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkResourcePoliciesScopedList(o['x']!); checkResourcePoliciesScopedList(o['y']!); } -core.List buildUnnamed553() => [ +core.List buildUnnamed567() => [ 'foo', 'foo', ]; -void checkUnnamed553(core.List o) { +void checkUnnamed567(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -32621,12 +33317,12 @@ void checkResourcePolicyAggregatedListWarningData( buildCounterResourcePolicyAggregatedListWarningData--; } -core.List buildUnnamed554() => [ +core.List buildUnnamed568() => [ buildResourcePolicyAggregatedListWarningData(), buildResourcePolicyAggregatedListWarningData(), ]; -void checkUnnamed554(core.List o) { +void checkUnnamed568(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkResourcePolicyAggregatedListWarningData(o[0]); checkResourcePolicyAggregatedListWarningData(o[1]); @@ -32639,7 +33335,7 @@ api.ResourcePolicyAggregatedListWarning buildCounterResourcePolicyAggregatedListWarning++; if (buildCounterResourcePolicyAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed554(); + o.data = buildUnnamed568(); o.message = 'foo'; } buildCounterResourcePolicyAggregatedListWarning--; @@ -32654,7 +33350,7 @@ void checkResourcePolicyAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed554(o.data!); + checkUnnamed568(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -32670,11 +33366,11 @@ api.ResourcePolicyAggregatedList buildResourcePolicyAggregatedList() { if (buildCounterResourcePolicyAggregatedList < 3) { o.etag = 'foo'; o.id = 'foo'; - o.items = buildUnnamed552(); + o.items = buildUnnamed566(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed553(); + o.unreachables = buildUnnamed567(); o.warning = buildResourcePolicyAggregatedListWarning(); } buildCounterResourcePolicyAggregatedList--; @@ -32692,7 +33388,7 @@ void checkResourcePolicyAggregatedList(api.ResourcePolicyAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed552(o.items!); + checkUnnamed566(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -32705,7 +33401,7 @@ void checkResourcePolicyAggregatedList(api.ResourcePolicyAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed553(o.unreachables!); + checkUnnamed567(o.unreachables!); checkResourcePolicyAggregatedListWarning(o.warning!); } buildCounterResourcePolicyAggregatedList--; @@ -32888,12 +33584,12 @@ void checkResourcePolicyInstanceSchedulePolicySchedule( buildCounterResourcePolicyInstanceSchedulePolicySchedule--; } -core.List buildUnnamed555() => [ +core.List buildUnnamed569() => [ buildResourcePolicy(), buildResourcePolicy(), ]; -void checkUnnamed555(core.List o) { +void checkUnnamed569(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkResourcePolicy(o[0]); checkResourcePolicy(o[1]); @@ -32926,12 +33622,12 @@ void checkResourcePolicyListWarningData(api.ResourcePolicyListWarningData o) { buildCounterResourcePolicyListWarningData--; } -core.List buildUnnamed556() => [ +core.List buildUnnamed570() => [ buildResourcePolicyListWarningData(), buildResourcePolicyListWarningData(), ]; -void checkUnnamed556(core.List o) { +void checkUnnamed570(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkResourcePolicyListWarningData(o[0]); checkResourcePolicyListWarningData(o[1]); @@ -32943,7 +33639,7 @@ api.ResourcePolicyListWarning buildResourcePolicyListWarning() { buildCounterResourcePolicyListWarning++; if (buildCounterResourcePolicyListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed556(); + o.data = buildUnnamed570(); o.message = 'foo'; } buildCounterResourcePolicyListWarning--; @@ -32957,7 +33653,7 @@ void checkResourcePolicyListWarning(api.ResourcePolicyListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed556(o.data!); + checkUnnamed570(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -32973,7 +33669,7 @@ api.ResourcePolicyList buildResourcePolicyList() { if (buildCounterResourcePolicyList < 3) { o.etag = 'foo'; o.id = 'foo'; - o.items = buildUnnamed555(); + o.items = buildUnnamed569(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -32994,7 +33690,7 @@ void checkResourcePolicyList(api.ResourcePolicyList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed555(o.items!); + checkUnnamed569(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -33148,12 +33844,12 @@ void checkResourcePolicySnapshotSchedulePolicySchedule( buildCounterResourcePolicySnapshotSchedulePolicySchedule--; } -core.Map buildUnnamed557() => { +core.Map buildUnnamed571() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed557(core.Map o) { +void checkUnnamed571(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -33165,12 +33861,12 @@ void checkUnnamed557(core.Map o) { ); } -core.List buildUnnamed558() => [ +core.List buildUnnamed572() => [ 'foo', 'foo', ]; -void checkUnnamed558(core.List o) { +void checkUnnamed572(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -33190,8 +33886,8 @@ api.ResourcePolicySnapshotSchedulePolicySnapshotProperties if (buildCounterResourcePolicySnapshotSchedulePolicySnapshotProperties < 3) { o.chainName = 'foo'; o.guestFlush = true; - o.labels = buildUnnamed557(); - o.storageLocations = buildUnnamed558(); + o.labels = buildUnnamed571(); + o.storageLocations = buildUnnamed572(); } buildCounterResourcePolicySnapshotSchedulePolicySnapshotProperties--; return o; @@ -33206,18 +33902,18 @@ void checkResourcePolicySnapshotSchedulePolicySnapshotProperties( unittest.equals('foo'), ); unittest.expect(o.guestFlush!, unittest.isTrue); - checkUnnamed557(o.labels!); - checkUnnamed558(o.storageLocations!); + checkUnnamed571(o.labels!); + checkUnnamed572(o.storageLocations!); } buildCounterResourcePolicySnapshotSchedulePolicySnapshotProperties--; } -core.List buildUnnamed559() => [ +core.List buildUnnamed573() => [ buildResourcePolicyWeeklyCycleDayOfWeek(), buildResourcePolicyWeeklyCycleDayOfWeek(), ]; -void checkUnnamed559(core.List o) { +void checkUnnamed573(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkResourcePolicyWeeklyCycleDayOfWeek(o[0]); checkResourcePolicyWeeklyCycleDayOfWeek(o[1]); @@ -33228,7 +33924,7 @@ api.ResourcePolicyWeeklyCycle buildResourcePolicyWeeklyCycle() { final o = api.ResourcePolicyWeeklyCycle(); buildCounterResourcePolicyWeeklyCycle++; if (buildCounterResourcePolicyWeeklyCycle < 3) { - o.dayOfWeeks = buildUnnamed559(); + o.dayOfWeeks = buildUnnamed573(); } buildCounterResourcePolicyWeeklyCycle--; return o; @@ -33237,7 +33933,7 @@ api.ResourcePolicyWeeklyCycle buildResourcePolicyWeeklyCycle() { void checkResourcePolicyWeeklyCycle(api.ResourcePolicyWeeklyCycle o) { buildCounterResourcePolicyWeeklyCycle++; if (buildCounterResourcePolicyWeeklyCycle < 3) { - checkUnnamed559(o.dayOfWeeks!); + checkUnnamed573(o.dayOfWeeks!); } buildCounterResourcePolicyWeeklyCycle--; } @@ -33324,23 +34020,23 @@ void checkResourceStatusScheduling(api.ResourceStatusScheduling o) { buildCounterResourceStatusScheduling--; } -core.List buildUnnamed560() => [ +core.List buildUnnamed574() => [ buildRouteAsPath(), buildRouteAsPath(), ]; -void checkUnnamed560(core.List o) { +void checkUnnamed574(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRouteAsPath(o[0]); checkRouteAsPath(o[1]); } -core.List buildUnnamed561() => [ +core.List buildUnnamed575() => [ 'foo', 'foo', ]; -void checkUnnamed561(core.List o) { +void checkUnnamed575(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -33379,12 +34075,12 @@ void checkRouteWarningsData(api.RouteWarningsData o) { buildCounterRouteWarningsData--; } -core.List buildUnnamed562() => [ +core.List buildUnnamed576() => [ buildRouteWarningsData(), buildRouteWarningsData(), ]; -void checkUnnamed562(core.List o) { +void checkUnnamed576(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRouteWarningsData(o[0]); checkRouteWarningsData(o[1]); @@ -33396,7 +34092,7 @@ api.RouteWarnings buildRouteWarnings() { buildCounterRouteWarnings++; if (buildCounterRouteWarnings < 3) { o.code = 'foo'; - o.data = buildUnnamed562(); + o.data = buildUnnamed576(); o.message = 'foo'; } buildCounterRouteWarnings--; @@ -33410,7 +34106,7 @@ void checkRouteWarnings(api.RouteWarnings o) { o.code!, unittest.equals('foo'), ); - checkUnnamed562(o.data!); + checkUnnamed576(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -33419,12 +34115,12 @@ void checkRouteWarnings(api.RouteWarnings o) { buildCounterRouteWarnings--; } -core.List buildUnnamed563() => [ +core.List buildUnnamed577() => [ buildRouteWarnings(), buildRouteWarnings(), ]; -void checkUnnamed563(core.List o) { +void checkUnnamed577(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRouteWarnings(o[0]); checkRouteWarnings(o[1]); @@ -33435,7 +34131,7 @@ api.Route buildRoute() { final o = api.Route(); buildCounterRoute++; if (buildCounterRoute < 3) { - o.asPaths = buildUnnamed560(); + o.asPaths = buildUnnamed574(); o.creationTimestamp = 'foo'; o.description = 'foo'; o.destRange = 'foo'; @@ -33447,16 +34143,19 @@ api.Route buildRoute() { o.nextHopHub = 'foo'; o.nextHopIlb = 'foo'; o.nextHopInstance = 'foo'; + o.nextHopInterRegionCost = 42; o.nextHopIp = 'foo'; + o.nextHopMed = 42; o.nextHopNetwork = 'foo'; + o.nextHopOrigin = 'foo'; o.nextHopPeering = 'foo'; o.nextHopVpnTunnel = 'foo'; o.priority = 42; o.routeStatus = 'foo'; o.routeType = 'foo'; o.selfLink = 'foo'; - o.tags = buildUnnamed561(); - o.warnings = buildUnnamed563(); + o.tags = buildUnnamed575(); + o.warnings = buildUnnamed577(); } buildCounterRoute--; return o; @@ -33465,7 +34164,7 @@ api.Route buildRoute() { void checkRoute(api.Route o) { buildCounterRoute++; if (buildCounterRoute < 3) { - checkUnnamed560(o.asPaths!); + checkUnnamed574(o.asPaths!); unittest.expect( o.creationTimestamp!, unittest.equals('foo'), @@ -33510,14 +34209,26 @@ void checkRoute(api.Route o) { o.nextHopInstance!, unittest.equals('foo'), ); + unittest.expect( + o.nextHopInterRegionCost!, + unittest.equals(42), + ); unittest.expect( o.nextHopIp!, unittest.equals('foo'), ); + unittest.expect( + o.nextHopMed!, + unittest.equals(42), + ); unittest.expect( o.nextHopNetwork!, unittest.equals('foo'), ); + unittest.expect( + o.nextHopOrigin!, + unittest.equals('foo'), + ); unittest.expect( o.nextHopPeering!, unittest.equals('foo'), @@ -33542,18 +34253,18 @@ void checkRoute(api.Route o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed561(o.tags!); - checkUnnamed563(o.warnings!); + checkUnnamed575(o.tags!); + checkUnnamed577(o.warnings!); } buildCounterRoute--; } -core.List buildUnnamed564() => [ +core.List buildUnnamed578() => [ 42, 42, ]; -void checkUnnamed564(core.List o) { +void checkUnnamed578(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -33570,7 +34281,7 @@ api.RouteAsPath buildRouteAsPath() { final o = api.RouteAsPath(); buildCounterRouteAsPath++; if (buildCounterRouteAsPath < 3) { - o.asLists = buildUnnamed564(); + o.asLists = buildUnnamed578(); o.pathSegmentType = 'foo'; } buildCounterRouteAsPath--; @@ -33580,7 +34291,7 @@ api.RouteAsPath buildRouteAsPath() { void checkRouteAsPath(api.RouteAsPath o) { buildCounterRouteAsPath++; if (buildCounterRouteAsPath < 3) { - checkUnnamed564(o.asLists!); + checkUnnamed578(o.asLists!); unittest.expect( o.pathSegmentType!, unittest.equals('foo'), @@ -33589,12 +34300,12 @@ void checkRouteAsPath(api.RouteAsPath o) { buildCounterRouteAsPath--; } -core.List buildUnnamed565() => [ +core.List buildUnnamed579() => [ buildRoute(), buildRoute(), ]; -void checkUnnamed565(core.List o) { +void checkUnnamed579(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRoute(o[0]); checkRoute(o[1]); @@ -33627,12 +34338,12 @@ void checkRouteListWarningData(api.RouteListWarningData o) { buildCounterRouteListWarningData--; } -core.List buildUnnamed566() => [ +core.List buildUnnamed580() => [ buildRouteListWarningData(), buildRouteListWarningData(), ]; -void checkUnnamed566(core.List o) { +void checkUnnamed580(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRouteListWarningData(o[0]); checkRouteListWarningData(o[1]); @@ -33644,7 +34355,7 @@ api.RouteListWarning buildRouteListWarning() { buildCounterRouteListWarning++; if (buildCounterRouteListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed566(); + o.data = buildUnnamed580(); o.message = 'foo'; } buildCounterRouteListWarning--; @@ -33658,7 +34369,7 @@ void checkRouteListWarning(api.RouteListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed566(o.data!); + checkUnnamed580(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -33673,7 +34384,7 @@ api.RouteList buildRouteList() { buildCounterRouteList++; if (buildCounterRouteList < 3) { o.id = 'foo'; - o.items = buildUnnamed565(); + o.items = buildUnnamed579(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -33690,7 +34401,7 @@ void checkRouteList(api.RouteList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed565(o.items!); + checkUnnamed579(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -33708,45 +34419,45 @@ void checkRouteList(api.RouteList o) { buildCounterRouteList--; } -core.List buildUnnamed567() => [ +core.List buildUnnamed581() => [ buildRouterBgpPeer(), buildRouterBgpPeer(), ]; -void checkUnnamed567(core.List o) { +void checkUnnamed581(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRouterBgpPeer(o[0]); checkRouterBgpPeer(o[1]); } -core.List buildUnnamed568() => [ +core.List buildUnnamed582() => [ buildRouterInterface(), buildRouterInterface(), ]; -void checkUnnamed568(core.List o) { +void checkUnnamed582(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRouterInterface(o[0]); checkRouterInterface(o[1]); } -core.List buildUnnamed569() => [ +core.List buildUnnamed583() => [ buildRouterMd5AuthenticationKey(), buildRouterMd5AuthenticationKey(), ]; -void checkUnnamed569(core.List o) { +void checkUnnamed583(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRouterMd5AuthenticationKey(o[0]); checkRouterMd5AuthenticationKey(o[1]); } -core.List buildUnnamed570() => [ +core.List buildUnnamed584() => [ buildRouterNat(), buildRouterNat(), ]; -void checkUnnamed570(core.List o) { +void checkUnnamed584(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRouterNat(o[0]); checkRouterNat(o[1]); @@ -33758,16 +34469,16 @@ api.Router buildRouter() { buildCounterRouter++; if (buildCounterRouter < 3) { o.bgp = buildRouterBgp(); - o.bgpPeers = buildUnnamed567(); + o.bgpPeers = buildUnnamed581(); o.creationTimestamp = 'foo'; o.description = 'foo'; o.encryptedInterconnectRouter = true; o.id = 'foo'; - o.interfaces = buildUnnamed568(); + o.interfaces = buildUnnamed582(); o.kind = 'foo'; - o.md5AuthenticationKeys = buildUnnamed569(); + o.md5AuthenticationKeys = buildUnnamed583(); o.name = 'foo'; - o.nats = buildUnnamed570(); + o.nats = buildUnnamed584(); o.network = 'foo'; o.region = 'foo'; o.selfLink = 'foo'; @@ -33780,7 +34491,7 @@ void checkRouter(api.Router o) { buildCounterRouter++; if (buildCounterRouter < 3) { checkRouterBgp(o.bgp!); - checkUnnamed567(o.bgpPeers!); + checkUnnamed581(o.bgpPeers!); unittest.expect( o.creationTimestamp!, unittest.equals('foo'), @@ -33794,17 +34505,17 @@ void checkRouter(api.Router o) { o.id!, unittest.equals('foo'), ); - checkUnnamed568(o.interfaces!); + checkUnnamed582(o.interfaces!); unittest.expect( o.kind!, unittest.equals('foo'), ); - checkUnnamed569(o.md5AuthenticationKeys!); + checkUnnamed583(o.md5AuthenticationKeys!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed570(o.nats!); + checkUnnamed584(o.nats!); unittest.expect( o.network!, unittest.equals('foo'), @@ -33848,23 +34559,23 @@ void checkRouterAdvertisedIpRange(api.RouterAdvertisedIpRange o) { buildCounterRouterAdvertisedIpRange--; } -core.Map buildUnnamed571() => { +core.Map buildUnnamed585() => { 'x': buildRoutersScopedList(), 'y': buildRoutersScopedList(), }; -void checkUnnamed571(core.Map o) { +void checkUnnamed585(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkRoutersScopedList(o['x']!); checkRoutersScopedList(o['y']!); } -core.List buildUnnamed572() => [ +core.List buildUnnamed586() => [ 'foo', 'foo', ]; -void checkUnnamed572(core.List o) { +void checkUnnamed586(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -33904,12 +34615,12 @@ void checkRouterAggregatedListWarningData( buildCounterRouterAggregatedListWarningData--; } -core.List buildUnnamed573() => [ +core.List buildUnnamed587() => [ buildRouterAggregatedListWarningData(), buildRouterAggregatedListWarningData(), ]; -void checkUnnamed573(core.List o) { +void checkUnnamed587(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRouterAggregatedListWarningData(o[0]); checkRouterAggregatedListWarningData(o[1]); @@ -33921,7 +34632,7 @@ api.RouterAggregatedListWarning buildRouterAggregatedListWarning() { buildCounterRouterAggregatedListWarning++; if (buildCounterRouterAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed573(); + o.data = buildUnnamed587(); o.message = 'foo'; } buildCounterRouterAggregatedListWarning--; @@ -33935,7 +34646,7 @@ void checkRouterAggregatedListWarning(api.RouterAggregatedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed573(o.data!); + checkUnnamed587(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -33950,11 +34661,11 @@ api.RouterAggregatedList buildRouterAggregatedList() { buildCounterRouterAggregatedList++; if (buildCounterRouterAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed571(); + o.items = buildUnnamed585(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed572(); + o.unreachables = buildUnnamed586(); o.warning = buildRouterAggregatedListWarning(); } buildCounterRouterAggregatedList--; @@ -33968,7 +34679,7 @@ void checkRouterAggregatedList(api.RouterAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed571(o.items!); + checkUnnamed585(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -33981,18 +34692,18 @@ void checkRouterAggregatedList(api.RouterAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed572(o.unreachables!); + checkUnnamed586(o.unreachables!); checkRouterAggregatedListWarning(o.warning!); } buildCounterRouterAggregatedList--; } -core.List buildUnnamed574() => [ +core.List buildUnnamed588() => [ 'foo', 'foo', ]; -void checkUnnamed574(core.List o) { +void checkUnnamed588(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -34004,12 +34715,12 @@ void checkUnnamed574(core.List o) { ); } -core.List buildUnnamed575() => [ +core.List buildUnnamed589() => [ buildRouterAdvertisedIpRange(), buildRouterAdvertisedIpRange(), ]; -void checkUnnamed575(core.List o) { +void checkUnnamed589(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRouterAdvertisedIpRange(o[0]); checkRouterAdvertisedIpRange(o[1]); @@ -34021,8 +34732,8 @@ api.RouterBgp buildRouterBgp() { buildCounterRouterBgp++; if (buildCounterRouterBgp < 3) { o.advertiseMode = 'foo'; - o.advertisedGroups = buildUnnamed574(); - o.advertisedIpRanges = buildUnnamed575(); + o.advertisedGroups = buildUnnamed588(); + o.advertisedIpRanges = buildUnnamed589(); o.asn = 42; o.identifierRange = 'foo'; o.keepaliveInterval = 42; @@ -34038,8 +34749,8 @@ void checkRouterBgp(api.RouterBgp o) { o.advertiseMode!, unittest.equals('foo'), ); - checkUnnamed574(o.advertisedGroups!); - checkUnnamed575(o.advertisedIpRanges!); + checkUnnamed588(o.advertisedGroups!); + checkUnnamed589(o.advertisedIpRanges!); unittest.expect( o.asn!, unittest.equals(42), @@ -34056,12 +34767,12 @@ void checkRouterBgp(api.RouterBgp o) { buildCounterRouterBgp--; } -core.List buildUnnamed576() => [ +core.List buildUnnamed590() => [ 'foo', 'foo', ]; -void checkUnnamed576(core.List o) { +void checkUnnamed590(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -34073,34 +34784,34 @@ void checkUnnamed576(core.List o) { ); } -core.List buildUnnamed577() => [ +core.List buildUnnamed591() => [ buildRouterAdvertisedIpRange(), buildRouterAdvertisedIpRange(), ]; -void checkUnnamed577(core.List o) { +void checkUnnamed591(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRouterAdvertisedIpRange(o[0]); checkRouterAdvertisedIpRange(o[1]); } -core.List buildUnnamed578() => [ +core.List buildUnnamed592() => [ buildRouterBgpPeerCustomLearnedIpRange(), buildRouterBgpPeerCustomLearnedIpRange(), ]; -void checkUnnamed578(core.List o) { +void checkUnnamed592(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRouterBgpPeerCustomLearnedIpRange(o[0]); checkRouterBgpPeerCustomLearnedIpRange(o[1]); } -core.List buildUnnamed579() => [ +core.List buildUnnamed593() => [ 'foo', 'foo', ]; -void checkUnnamed579(core.List o) { +void checkUnnamed593(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -34112,12 +34823,12 @@ void checkUnnamed579(core.List o) { ); } -core.List buildUnnamed580() => [ +core.List buildUnnamed594() => [ 'foo', 'foo', ]; -void checkUnnamed580(core.List o) { +void checkUnnamed594(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -34135,17 +34846,17 @@ api.RouterBgpPeer buildRouterBgpPeer() { buildCounterRouterBgpPeer++; if (buildCounterRouterBgpPeer < 3) { o.advertiseMode = 'foo'; - o.advertisedGroups = buildUnnamed576(); - o.advertisedIpRanges = buildUnnamed577(); + o.advertisedGroups = buildUnnamed590(); + o.advertisedIpRanges = buildUnnamed591(); o.advertisedRoutePriority = 42; o.bfd = buildRouterBgpPeerBfd(); - o.customLearnedIpRanges = buildUnnamed578(); + o.customLearnedIpRanges = buildUnnamed592(); o.customLearnedRoutePriority = 42; o.enable = 'foo'; o.enableIpv4 = true; o.enableIpv6 = true; - o.exportPolicies = buildUnnamed579(); - o.importPolicies = buildUnnamed580(); + o.exportPolicies = buildUnnamed593(); + o.importPolicies = buildUnnamed594(); o.interfaceName = 'foo'; o.ipAddress = 'foo'; o.ipv4NexthopAddress = 'foo'; @@ -34170,14 +34881,14 @@ void checkRouterBgpPeer(api.RouterBgpPeer o) { o.advertiseMode!, unittest.equals('foo'), ); - checkUnnamed576(o.advertisedGroups!); - checkUnnamed577(o.advertisedIpRanges!); + checkUnnamed590(o.advertisedGroups!); + checkUnnamed591(o.advertisedIpRanges!); unittest.expect( o.advertisedRoutePriority!, unittest.equals(42), ); checkRouterBgpPeerBfd(o.bfd!); - checkUnnamed578(o.customLearnedIpRanges!); + checkUnnamed592(o.customLearnedIpRanges!); unittest.expect( o.customLearnedRoutePriority!, unittest.equals(42), @@ -34188,8 +34899,8 @@ void checkRouterBgpPeer(api.RouterBgpPeer o) { ); unittest.expect(o.enableIpv4!, unittest.isTrue); unittest.expect(o.enableIpv6!, unittest.isTrue); - checkUnnamed579(o.exportPolicies!); - checkUnnamed580(o.importPolicies!); + checkUnnamed593(o.exportPolicies!); + checkUnnamed594(o.importPolicies!); unittest.expect( o.interfaceName!, unittest.equals('foo'), @@ -34364,12 +35075,12 @@ void checkRouterInterface(api.RouterInterface o) { buildCounterRouterInterface--; } -core.List buildUnnamed581() => [ +core.List buildUnnamed595() => [ buildRouter(), buildRouter(), ]; -void checkUnnamed581(core.List o) { +void checkUnnamed595(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRouter(o[0]); checkRouter(o[1]); @@ -34402,12 +35113,12 @@ void checkRouterListWarningData(api.RouterListWarningData o) { buildCounterRouterListWarningData--; } -core.List buildUnnamed582() => [ +core.List buildUnnamed596() => [ buildRouterListWarningData(), buildRouterListWarningData(), ]; -void checkUnnamed582(core.List o) { +void checkUnnamed596(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRouterListWarningData(o[0]); checkRouterListWarningData(o[1]); @@ -34419,7 +35130,7 @@ api.RouterListWarning buildRouterListWarning() { buildCounterRouterListWarning++; if (buildCounterRouterListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed582(); + o.data = buildUnnamed596(); o.message = 'foo'; } buildCounterRouterListWarning--; @@ -34433,7 +35144,7 @@ void checkRouterListWarning(api.RouterListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed582(o.data!); + checkUnnamed596(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -34448,7 +35159,7 @@ api.RouterList buildRouterList() { buildCounterRouterList++; if (buildCounterRouterList < 3) { o.id = 'foo'; - o.items = buildUnnamed581(); + o.items = buildUnnamed595(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -34465,7 +35176,7 @@ void checkRouterList(api.RouterList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed581(o.items!); + checkUnnamed595(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -34510,12 +35221,12 @@ void checkRouterMd5AuthenticationKey(api.RouterMd5AuthenticationKey o) { buildCounterRouterMd5AuthenticationKey--; } -core.List buildUnnamed583() => [ +core.List buildUnnamed597() => [ 'foo', 'foo', ]; -void checkUnnamed583(core.List o) { +void checkUnnamed597(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -34527,12 +35238,12 @@ void checkUnnamed583(core.List o) { ); } -core.List buildUnnamed584() => [ +core.List buildUnnamed598() => [ 'foo', 'foo', ]; -void checkUnnamed584(core.List o) { +void checkUnnamed598(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -34544,12 +35255,12 @@ void checkUnnamed584(core.List o) { ); } -core.List buildUnnamed585() => [ +core.List buildUnnamed599() => [ 'foo', 'foo', ]; -void checkUnnamed585(core.List o) { +void checkUnnamed599(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -34561,23 +35272,23 @@ void checkUnnamed585(core.List o) { ); } -core.List buildUnnamed586() => [ +core.List buildUnnamed600() => [ buildRouterNatRule(), buildRouterNatRule(), ]; -void checkUnnamed586(core.List o) { +void checkUnnamed600(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRouterNatRule(o[0]); checkRouterNatRule(o[1]); } -core.List buildUnnamed587() => [ +core.List buildUnnamed601() => [ buildRouterNatSubnetworkToNat(), buildRouterNatSubnetworkToNat(), ]; -void checkUnnamed587(core.List o) { +void checkUnnamed601(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRouterNatSubnetworkToNat(o[0]); checkRouterNatSubnetworkToNat(o[1]); @@ -34589,20 +35300,20 @@ api.RouterNat buildRouterNat() { buildCounterRouterNat++; if (buildCounterRouterNat < 3) { o.autoNetworkTier = 'foo'; - o.drainNatIps = buildUnnamed583(); + o.drainNatIps = buildUnnamed597(); o.enableDynamicPortAllocation = true; o.enableEndpointIndependentMapping = true; - o.endpointTypes = buildUnnamed584(); + o.endpointTypes = buildUnnamed598(); o.icmpIdleTimeoutSec = 42; o.logConfig = buildRouterNatLogConfig(); o.maxPortsPerVm = 42; o.minPortsPerVm = 42; o.name = 'foo'; o.natIpAllocateOption = 'foo'; - o.natIps = buildUnnamed585(); - o.rules = buildUnnamed586(); + o.natIps = buildUnnamed599(); + o.rules = buildUnnamed600(); o.sourceSubnetworkIpRangesToNat = 'foo'; - o.subnetworks = buildUnnamed587(); + o.subnetworks = buildUnnamed601(); o.tcpEstablishedIdleTimeoutSec = 42; o.tcpTimeWaitTimeoutSec = 42; o.tcpTransitoryIdleTimeoutSec = 42; @@ -34620,10 +35331,10 @@ void checkRouterNat(api.RouterNat o) { o.autoNetworkTier!, unittest.equals('foo'), ); - checkUnnamed583(o.drainNatIps!); + checkUnnamed597(o.drainNatIps!); unittest.expect(o.enableDynamicPortAllocation!, unittest.isTrue); unittest.expect(o.enableEndpointIndependentMapping!, unittest.isTrue); - checkUnnamed584(o.endpointTypes!); + checkUnnamed598(o.endpointTypes!); unittest.expect( o.icmpIdleTimeoutSec!, unittest.equals(42), @@ -34645,13 +35356,13 @@ void checkRouterNat(api.RouterNat o) { o.natIpAllocateOption!, unittest.equals('foo'), ); - checkUnnamed585(o.natIps!); - checkUnnamed586(o.rules!); + checkUnnamed599(o.natIps!); + checkUnnamed600(o.rules!); unittest.expect( o.sourceSubnetworkIpRangesToNat!, unittest.equals('foo'), ); - checkUnnamed587(o.subnetworks!); + checkUnnamed601(o.subnetworks!); unittest.expect( o.tcpEstablishedIdleTimeoutSec!, unittest.equals(42), @@ -34734,12 +35445,12 @@ void checkRouterNatRule(api.RouterNatRule o) { buildCounterRouterNatRule--; } -core.List buildUnnamed588() => [ +core.List buildUnnamed602() => [ 'foo', 'foo', ]; -void checkUnnamed588(core.List o) { +void checkUnnamed602(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -34751,12 +35462,12 @@ void checkUnnamed588(core.List o) { ); } -core.List buildUnnamed589() => [ +core.List buildUnnamed603() => [ 'foo', 'foo', ]; -void checkUnnamed589(core.List o) { +void checkUnnamed603(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -34768,12 +35479,12 @@ void checkUnnamed589(core.List o) { ); } -core.List buildUnnamed590() => [ +core.List buildUnnamed604() => [ 'foo', 'foo', ]; -void checkUnnamed590(core.List o) { +void checkUnnamed604(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -34785,12 +35496,12 @@ void checkUnnamed590(core.List o) { ); } -core.List buildUnnamed591() => [ +core.List buildUnnamed605() => [ 'foo', 'foo', ]; -void checkUnnamed591(core.List o) { +void checkUnnamed605(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -34807,10 +35518,10 @@ api.RouterNatRuleAction buildRouterNatRuleAction() { final o = api.RouterNatRuleAction(); buildCounterRouterNatRuleAction++; if (buildCounterRouterNatRuleAction < 3) { - o.sourceNatActiveIps = buildUnnamed588(); - o.sourceNatActiveRanges = buildUnnamed589(); - o.sourceNatDrainIps = buildUnnamed590(); - o.sourceNatDrainRanges = buildUnnamed591(); + o.sourceNatActiveIps = buildUnnamed602(); + o.sourceNatActiveRanges = buildUnnamed603(); + o.sourceNatDrainIps = buildUnnamed604(); + o.sourceNatDrainRanges = buildUnnamed605(); } buildCounterRouterNatRuleAction--; return o; @@ -34819,20 +35530,20 @@ api.RouterNatRuleAction buildRouterNatRuleAction() { void checkRouterNatRuleAction(api.RouterNatRuleAction o) { buildCounterRouterNatRuleAction++; if (buildCounterRouterNatRuleAction < 3) { - checkUnnamed588(o.sourceNatActiveIps!); - checkUnnamed589(o.sourceNatActiveRanges!); - checkUnnamed590(o.sourceNatDrainIps!); - checkUnnamed591(o.sourceNatDrainRanges!); + checkUnnamed602(o.sourceNatActiveIps!); + checkUnnamed603(o.sourceNatActiveRanges!); + checkUnnamed604(o.sourceNatDrainIps!); + checkUnnamed605(o.sourceNatDrainRanges!); } buildCounterRouterNatRuleAction--; } -core.List buildUnnamed592() => [ +core.List buildUnnamed606() => [ 'foo', 'foo', ]; -void checkUnnamed592(core.List o) { +void checkUnnamed606(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -34844,12 +35555,12 @@ void checkUnnamed592(core.List o) { ); } -core.List buildUnnamed593() => [ +core.List buildUnnamed607() => [ 'foo', 'foo', ]; -void checkUnnamed593(core.List o) { +void checkUnnamed607(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -34867,8 +35578,8 @@ api.RouterNatSubnetworkToNat buildRouterNatSubnetworkToNat() { buildCounterRouterNatSubnetworkToNat++; if (buildCounterRouterNatSubnetworkToNat < 3) { o.name = 'foo'; - o.secondaryIpRangeNames = buildUnnamed592(); - o.sourceIpRangesToNat = buildUnnamed593(); + o.secondaryIpRangeNames = buildUnnamed606(); + o.sourceIpRangesToNat = buildUnnamed607(); } buildCounterRouterNatSubnetworkToNat--; return o; @@ -34881,51 +35592,51 @@ void checkRouterNatSubnetworkToNat(api.RouterNatSubnetworkToNat o) { o.name!, unittest.equals('foo'), ); - checkUnnamed592(o.secondaryIpRangeNames!); - checkUnnamed593(o.sourceIpRangesToNat!); + checkUnnamed606(o.secondaryIpRangeNames!); + checkUnnamed607(o.sourceIpRangesToNat!); } buildCounterRouterNatSubnetworkToNat--; } -core.List buildUnnamed594() => [ +core.List buildUnnamed608() => [ buildRoute(), buildRoute(), ]; -void checkUnnamed594(core.List o) { +void checkUnnamed608(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRoute(o[0]); checkRoute(o[1]); } -core.List buildUnnamed595() => [ +core.List buildUnnamed609() => [ buildRoute(), buildRoute(), ]; -void checkUnnamed595(core.List o) { +void checkUnnamed609(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRoute(o[0]); checkRoute(o[1]); } -core.List buildUnnamed596() => [ +core.List buildUnnamed610() => [ buildRouterStatusBgpPeerStatus(), buildRouterStatusBgpPeerStatus(), ]; -void checkUnnamed596(core.List o) { +void checkUnnamed610(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRouterStatusBgpPeerStatus(o[0]); checkRouterStatusBgpPeerStatus(o[1]); } -core.List buildUnnamed597() => [ +core.List buildUnnamed611() => [ buildRouterStatusNatStatus(), buildRouterStatusNatStatus(), ]; -void checkUnnamed597(core.List o) { +void checkUnnamed611(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRouterStatusNatStatus(o[0]); checkRouterStatusNatStatus(o[1]); @@ -34936,10 +35647,10 @@ api.RouterStatus buildRouterStatus() { final o = api.RouterStatus(); buildCounterRouterStatus++; if (buildCounterRouterStatus < 3) { - o.bestRoutes = buildUnnamed594(); - o.bestRoutesForRouter = buildUnnamed595(); - o.bgpPeerStatus = buildUnnamed596(); - o.natStatus = buildUnnamed597(); + o.bestRoutes = buildUnnamed608(); + o.bestRoutesForRouter = buildUnnamed609(); + o.bgpPeerStatus = buildUnnamed610(); + o.natStatus = buildUnnamed611(); o.network = 'foo'; } buildCounterRouterStatus--; @@ -34949,10 +35660,10 @@ api.RouterStatus buildRouterStatus() { void checkRouterStatus(api.RouterStatus o) { buildCounterRouterStatus++; if (buildCounterRouterStatus < 3) { - checkUnnamed594(o.bestRoutes!); - checkUnnamed595(o.bestRoutesForRouter!); - checkUnnamed596(o.bgpPeerStatus!); - checkUnnamed597(o.natStatus!); + checkUnnamed608(o.bestRoutes!); + checkUnnamed609(o.bestRoutesForRouter!); + checkUnnamed610(o.bgpPeerStatus!); + checkUnnamed611(o.natStatus!); unittest.expect( o.network!, unittest.equals('foo'), @@ -34961,12 +35672,12 @@ void checkRouterStatus(api.RouterStatus o) { buildCounterRouterStatus--; } -core.List buildUnnamed598() => [ +core.List buildUnnamed612() => [ buildRoute(), buildRoute(), ]; -void checkUnnamed598(core.List o) { +void checkUnnamed612(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRoute(o[0]); checkRoute(o[1]); @@ -34977,7 +35688,7 @@ api.RouterStatusBgpPeerStatus buildRouterStatusBgpPeerStatus() { final o = api.RouterStatusBgpPeerStatus(); buildCounterRouterStatusBgpPeerStatus++; if (buildCounterRouterStatusBgpPeerStatus < 3) { - o.advertisedRoutes = buildUnnamed598(); + o.advertisedRoutes = buildUnnamed612(); o.bfdStatus = buildBfdStatus(); o.enableIpv4 = true; o.enableIpv6 = true; @@ -35005,7 +35716,7 @@ api.RouterStatusBgpPeerStatus buildRouterStatusBgpPeerStatus() { void checkRouterStatusBgpPeerStatus(api.RouterStatusBgpPeerStatus o) { buildCounterRouterStatusBgpPeerStatus++; if (buildCounterRouterStatusBgpPeerStatus < 3) { - checkUnnamed598(o.advertisedRoutes!); + checkUnnamed612(o.advertisedRoutes!); checkBfdStatus(o.bfdStatus!); unittest.expect(o.enableIpv4!, unittest.isTrue); unittest.expect(o.enableIpv6!, unittest.isTrue); @@ -35074,12 +35785,12 @@ void checkRouterStatusBgpPeerStatus(api.RouterStatusBgpPeerStatus o) { buildCounterRouterStatusBgpPeerStatus--; } -core.List buildUnnamed599() => [ +core.List buildUnnamed613() => [ 'foo', 'foo', ]; -void checkUnnamed599(core.List o) { +void checkUnnamed613(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -35091,12 +35802,12 @@ void checkUnnamed599(core.List o) { ); } -core.List buildUnnamed600() => [ +core.List buildUnnamed614() => [ 'foo', 'foo', ]; -void checkUnnamed600(core.List o) { +void checkUnnamed614(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -35108,12 +35819,12 @@ void checkUnnamed600(core.List o) { ); } -core.List buildUnnamed601() => [ +core.List buildUnnamed615() => [ 'foo', 'foo', ]; -void checkUnnamed601(core.List o) { +void checkUnnamed615(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -35125,23 +35836,23 @@ void checkUnnamed601(core.List o) { ); } -core.List buildUnnamed602() => [ +core.List buildUnnamed616() => [ buildRouterStatusNatStatusNatRuleStatus(), buildRouterStatusNatStatusNatRuleStatus(), ]; -void checkUnnamed602(core.List o) { +void checkUnnamed616(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRouterStatusNatStatusNatRuleStatus(o[0]); checkRouterStatusNatStatusNatRuleStatus(o[1]); } -core.List buildUnnamed603() => [ +core.List buildUnnamed617() => [ 'foo', 'foo', ]; -void checkUnnamed603(core.List o) { +void checkUnnamed617(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -35153,12 +35864,12 @@ void checkUnnamed603(core.List o) { ); } -core.List buildUnnamed604() => [ +core.List buildUnnamed618() => [ 'foo', 'foo', ]; -void checkUnnamed604(core.List o) { +void checkUnnamed618(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -35175,15 +35886,15 @@ api.RouterStatusNatStatus buildRouterStatusNatStatus() { final o = api.RouterStatusNatStatus(); buildCounterRouterStatusNatStatus++; if (buildCounterRouterStatusNatStatus < 3) { - o.autoAllocatedNatIps = buildUnnamed599(); - o.drainAutoAllocatedNatIps = buildUnnamed600(); - o.drainUserAllocatedNatIps = buildUnnamed601(); + o.autoAllocatedNatIps = buildUnnamed613(); + o.drainAutoAllocatedNatIps = buildUnnamed614(); + o.drainUserAllocatedNatIps = buildUnnamed615(); o.minExtraNatIpsNeeded = 42; o.name = 'foo'; o.numVmEndpointsWithNatMappings = 42; - o.ruleStatus = buildUnnamed602(); - o.userAllocatedNatIpResources = buildUnnamed603(); - o.userAllocatedNatIps = buildUnnamed604(); + o.ruleStatus = buildUnnamed616(); + o.userAllocatedNatIpResources = buildUnnamed617(); + o.userAllocatedNatIps = buildUnnamed618(); } buildCounterRouterStatusNatStatus--; return o; @@ -35192,9 +35903,9 @@ api.RouterStatusNatStatus buildRouterStatusNatStatus() { void checkRouterStatusNatStatus(api.RouterStatusNatStatus o) { buildCounterRouterStatusNatStatus++; if (buildCounterRouterStatusNatStatus < 3) { - checkUnnamed599(o.autoAllocatedNatIps!); - checkUnnamed600(o.drainAutoAllocatedNatIps!); - checkUnnamed601(o.drainUserAllocatedNatIps!); + checkUnnamed613(o.autoAllocatedNatIps!); + checkUnnamed614(o.drainAutoAllocatedNatIps!); + checkUnnamed615(o.drainUserAllocatedNatIps!); unittest.expect( o.minExtraNatIpsNeeded!, unittest.equals(42), @@ -35207,19 +35918,19 @@ void checkRouterStatusNatStatus(api.RouterStatusNatStatus o) { o.numVmEndpointsWithNatMappings!, unittest.equals(42), ); - checkUnnamed602(o.ruleStatus!); - checkUnnamed603(o.userAllocatedNatIpResources!); - checkUnnamed604(o.userAllocatedNatIps!); + checkUnnamed616(o.ruleStatus!); + checkUnnamed617(o.userAllocatedNatIpResources!); + checkUnnamed618(o.userAllocatedNatIps!); } buildCounterRouterStatusNatStatus--; } -core.List buildUnnamed605() => [ +core.List buildUnnamed619() => [ 'foo', 'foo', ]; -void checkUnnamed605(core.List o) { +void checkUnnamed619(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -35231,12 +35942,12 @@ void checkUnnamed605(core.List o) { ); } -core.List buildUnnamed606() => [ +core.List buildUnnamed620() => [ 'foo', 'foo', ]; -void checkUnnamed606(core.List o) { +void checkUnnamed620(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -35254,8 +35965,8 @@ api.RouterStatusNatStatusNatRuleStatus final o = api.RouterStatusNatStatusNatRuleStatus(); buildCounterRouterStatusNatStatusNatRuleStatus++; if (buildCounterRouterStatusNatStatusNatRuleStatus < 3) { - o.activeNatIps = buildUnnamed605(); - o.drainNatIps = buildUnnamed606(); + o.activeNatIps = buildUnnamed619(); + o.drainNatIps = buildUnnamed620(); o.minExtraIpsNeeded = 42; o.numVmEndpointsWithNatMappings = 42; o.ruleNumber = 42; @@ -35268,8 +35979,8 @@ void checkRouterStatusNatStatusNatRuleStatus( api.RouterStatusNatStatusNatRuleStatus o) { buildCounterRouterStatusNatStatusNatRuleStatus++; if (buildCounterRouterStatusNatStatusNatRuleStatus < 3) { - checkUnnamed605(o.activeNatIps!); - checkUnnamed606(o.drainNatIps!); + checkUnnamed619(o.activeNatIps!); + checkUnnamed620(o.drainNatIps!); unittest.expect( o.minExtraIpsNeeded!, unittest.equals(42), @@ -35329,12 +36040,12 @@ void checkRoutersPreviewResponse(api.RoutersPreviewResponse o) { buildCounterRoutersPreviewResponse--; } -core.List buildUnnamed607() => [ +core.List buildUnnamed621() => [ buildRouter(), buildRouter(), ]; -void checkUnnamed607(core.List o) { +void checkUnnamed621(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRouter(o[0]); checkRouter(o[1]); @@ -35367,12 +36078,12 @@ void checkRoutersScopedListWarningData(api.RoutersScopedListWarningData o) { buildCounterRoutersScopedListWarningData--; } -core.List buildUnnamed608() => [ +core.List buildUnnamed622() => [ buildRoutersScopedListWarningData(), buildRoutersScopedListWarningData(), ]; -void checkUnnamed608(core.List o) { +void checkUnnamed622(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRoutersScopedListWarningData(o[0]); checkRoutersScopedListWarningData(o[1]); @@ -35384,7 +36095,7 @@ api.RoutersScopedListWarning buildRoutersScopedListWarning() { buildCounterRoutersScopedListWarning++; if (buildCounterRoutersScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed608(); + o.data = buildUnnamed622(); o.message = 'foo'; } buildCounterRoutersScopedListWarning--; @@ -35398,7 +36109,7 @@ void checkRoutersScopedListWarning(api.RoutersScopedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed608(o.data!); + checkUnnamed622(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -35412,7 +36123,7 @@ api.RoutersScopedList buildRoutersScopedList() { final o = api.RoutersScopedList(); buildCounterRoutersScopedList++; if (buildCounterRoutersScopedList < 3) { - o.routers = buildUnnamed607(); + o.routers = buildUnnamed621(); o.warning = buildRoutersScopedListWarning(); } buildCounterRoutersScopedList--; @@ -35422,122 +36133,12 @@ api.RoutersScopedList buildRoutersScopedList() { void checkRoutersScopedList(api.RoutersScopedList o) { buildCounterRoutersScopedList++; if (buildCounterRoutersScopedList < 3) { - checkUnnamed607(o.routers!); + checkUnnamed621(o.routers!); checkRoutersScopedListWarning(o.warning!); } buildCounterRoutersScopedList--; } -core.List buildUnnamed609() => [ - buildCondition(), - buildCondition(), - ]; - -void checkUnnamed609(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkCondition(o[0]); - checkCondition(o[1]); -} - -core.List buildUnnamed610() => [ - 'foo', - 'foo', - ]; - -void checkUnnamed610(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o[0], - unittest.equals('foo'), - ); - unittest.expect( - o[1], - unittest.equals('foo'), - ); -} - -core.List buildUnnamed611() => [ - buildLogConfig(), - buildLogConfig(), - ]; - -void checkUnnamed611(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkLogConfig(o[0]); - checkLogConfig(o[1]); -} - -core.List buildUnnamed612() => [ - 'foo', - 'foo', - ]; - -void checkUnnamed612(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o[0], - unittest.equals('foo'), - ); - unittest.expect( - o[1], - unittest.equals('foo'), - ); -} - -core.List buildUnnamed613() => [ - 'foo', - 'foo', - ]; - -void checkUnnamed613(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o[0], - unittest.equals('foo'), - ); - unittest.expect( - o[1], - unittest.equals('foo'), - ); -} - -core.int buildCounterRule = 0; -api.Rule buildRule() { - final o = api.Rule(); - buildCounterRule++; - if (buildCounterRule < 3) { - o.action = 'foo'; - o.conditions = buildUnnamed609(); - o.description = 'foo'; - o.ins = buildUnnamed610(); - o.logConfigs = buildUnnamed611(); - o.notIns = buildUnnamed612(); - o.permissions = buildUnnamed613(); - } - buildCounterRule--; - return o; -} - -void checkRule(api.Rule o) { - buildCounterRule++; - if (buildCounterRule < 3) { - unittest.expect( - o.action!, - unittest.equals('foo'), - ); - checkUnnamed609(o.conditions!); - unittest.expect( - o.description!, - unittest.equals('foo'), - ); - checkUnnamed610(o.ins!); - checkUnnamed611(o.logConfigs!); - checkUnnamed612(o.notIns!); - checkUnnamed613(o.permissions!); - } - buildCounterRule--; -} - core.int buildCounterSSLHealthCheck = 0; api.SSLHealthCheck buildSSLHealthCheck() { final o = api.SSLHealthCheck(); @@ -35585,23 +36186,23 @@ void checkSSLHealthCheck(api.SSLHealthCheck o) { buildCounterSSLHealthCheck--; } -core.List buildUnnamed614() => [ +core.List buildUnnamed623() => [ buildGuestOsFeature(), buildGuestOsFeature(), ]; -void checkUnnamed614(core.List o) { +void checkUnnamed623(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGuestOsFeature(o[0]); checkGuestOsFeature(o[1]); } -core.List buildUnnamed615() => [ +core.List buildUnnamed624() => [ 'foo', 'foo', ]; -void checkUnnamed615(core.List o) { +void checkUnnamed624(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -35624,11 +36225,11 @@ api.SavedAttachedDisk buildSavedAttachedDisk() { o.diskEncryptionKey = buildCustomerEncryptionKey(); o.diskSizeGb = 'foo'; o.diskType = 'foo'; - o.guestOsFeatures = buildUnnamed614(); + o.guestOsFeatures = buildUnnamed623(); o.index = 42; o.interface = 'foo'; o.kind = 'foo'; - o.licenses = buildUnnamed615(); + o.licenses = buildUnnamed624(); o.mode = 'foo'; o.source = 'foo'; o.storageBytes = 'foo'; @@ -35657,7 +36258,7 @@ void checkSavedAttachedDisk(api.SavedAttachedDisk o) { o.diskType!, unittest.equals('foo'), ); - checkUnnamed614(o.guestOsFeatures!); + checkUnnamed623(o.guestOsFeatures!); unittest.expect( o.index!, unittest.equals(42), @@ -35670,7 +36271,7 @@ void checkSavedAttachedDisk(api.SavedAttachedDisk o) { o.kind!, unittest.equals('foo'), ); - checkUnnamed615(o.licenses!); + checkUnnamed624(o.licenses!); unittest.expect( o.mode!, unittest.equals('foo'), @@ -35769,12 +36370,12 @@ void checkScalingScheduleStatus(api.ScalingScheduleStatus o) { buildCounterScalingScheduleStatus--; } -core.List buildUnnamed616() => [ +core.List buildUnnamed625() => [ buildSchedulingNodeAffinity(), buildSchedulingNodeAffinity(), ]; -void checkUnnamed616(core.List o) { +void checkUnnamed625(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSchedulingNodeAffinity(o[0]); checkSchedulingNodeAffinity(o[1]); @@ -35792,7 +36393,7 @@ api.Scheduling buildScheduling() { o.locationHint = 'foo'; o.maxRunDuration = buildDuration(); o.minNodeCpus = 42; - o.nodeAffinities = buildUnnamed616(); + o.nodeAffinities = buildUnnamed625(); o.onHostMaintenance = 'foo'; o.onInstanceStopAction = buildSchedulingOnInstanceStopAction(); o.preemptible = true; @@ -35825,7 +36426,7 @@ void checkScheduling(api.Scheduling o) { o.minNodeCpus!, unittest.equals(42), ); - checkUnnamed616(o.nodeAffinities!); + checkUnnamed625(o.nodeAffinities!); unittest.expect( o.onHostMaintenance!, unittest.equals('foo'), @@ -35844,12 +36445,12 @@ void checkScheduling(api.Scheduling o) { buildCounterScheduling--; } -core.List buildUnnamed617() => [ +core.List buildUnnamed626() => [ 'foo', 'foo', ]; -void checkUnnamed617(core.List o) { +void checkUnnamed626(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -35868,7 +36469,7 @@ api.SchedulingNodeAffinity buildSchedulingNodeAffinity() { if (buildCounterSchedulingNodeAffinity < 3) { o.key = 'foo'; o.operator = 'foo'; - o.values = buildUnnamed617(); + o.values = buildUnnamed626(); } buildCounterSchedulingNodeAffinity--; return o; @@ -35885,7 +36486,7 @@ void checkSchedulingNodeAffinity(api.SchedulingNodeAffinity o) { o.operator!, unittest.equals('foo'), ); - checkUnnamed617(o.values!); + checkUnnamed626(o.values!); } buildCounterSchedulingNodeAffinity--; } @@ -35936,23 +36537,23 @@ void checkScreenshot(api.Screenshot o) { buildCounterScreenshot--; } -core.Map buildUnnamed618() => { +core.Map buildUnnamed627() => { 'x': buildSecurityPoliciesScopedList(), 'y': buildSecurityPoliciesScopedList(), }; -void checkUnnamed618(core.Map o) { +void checkUnnamed627(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkSecurityPoliciesScopedList(o['x']!); checkSecurityPoliciesScopedList(o['y']!); } -core.List buildUnnamed619() => [ +core.List buildUnnamed628() => [ 'foo', 'foo', ]; -void checkUnnamed619(core.List o) { +void checkUnnamed628(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -35993,12 +36594,12 @@ void checkSecurityPoliciesAggregatedListWarningData( buildCounterSecurityPoliciesAggregatedListWarningData--; } -core.List buildUnnamed620() => [ +core.List buildUnnamed629() => [ buildSecurityPoliciesAggregatedListWarningData(), buildSecurityPoliciesAggregatedListWarningData(), ]; -void checkUnnamed620( +void checkUnnamed629( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSecurityPoliciesAggregatedListWarningData(o[0]); @@ -36012,7 +36613,7 @@ api.SecurityPoliciesAggregatedListWarning buildCounterSecurityPoliciesAggregatedListWarning++; if (buildCounterSecurityPoliciesAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed620(); + o.data = buildUnnamed629(); o.message = 'foo'; } buildCounterSecurityPoliciesAggregatedListWarning--; @@ -36027,7 +36628,7 @@ void checkSecurityPoliciesAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed620(o.data!); + checkUnnamed629(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -36043,11 +36644,11 @@ api.SecurityPoliciesAggregatedList buildSecurityPoliciesAggregatedList() { if (buildCounterSecurityPoliciesAggregatedList < 3) { o.etag = 'foo'; o.id = 'foo'; - o.items = buildUnnamed618(); + o.items = buildUnnamed627(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed619(); + o.unreachables = buildUnnamed628(); o.warning = buildSecurityPoliciesAggregatedListWarning(); } buildCounterSecurityPoliciesAggregatedList--; @@ -36065,7 +36666,7 @@ void checkSecurityPoliciesAggregatedList(api.SecurityPoliciesAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed618(o.items!); + checkUnnamed627(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -36078,7 +36679,7 @@ void checkSecurityPoliciesAggregatedList(api.SecurityPoliciesAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed619(o.unreachables!); + checkUnnamed628(o.unreachables!); checkSecurityPoliciesAggregatedListWarning(o.warning!); } buildCounterSecurityPoliciesAggregatedList--; @@ -36106,12 +36707,12 @@ void checkSecurityPoliciesListPreconfiguredExpressionSetsResponse( buildCounterSecurityPoliciesListPreconfiguredExpressionSetsResponse--; } -core.List buildUnnamed621() => [ +core.List buildUnnamed630() => [ buildSecurityPolicy(), buildSecurityPolicy(), ]; -void checkUnnamed621(core.List o) { +void checkUnnamed630(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSecurityPolicy(o[0]); checkSecurityPolicy(o[1]); @@ -36146,12 +36747,12 @@ void checkSecurityPoliciesScopedListWarningData( buildCounterSecurityPoliciesScopedListWarningData--; } -core.List buildUnnamed622() => [ +core.List buildUnnamed631() => [ buildSecurityPoliciesScopedListWarningData(), buildSecurityPoliciesScopedListWarningData(), ]; -void checkUnnamed622(core.List o) { +void checkUnnamed631(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSecurityPoliciesScopedListWarningData(o[0]); checkSecurityPoliciesScopedListWarningData(o[1]); @@ -36163,7 +36764,7 @@ api.SecurityPoliciesScopedListWarning buildSecurityPoliciesScopedListWarning() { buildCounterSecurityPoliciesScopedListWarning++; if (buildCounterSecurityPoliciesScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed622(); + o.data = buildUnnamed631(); o.message = 'foo'; } buildCounterSecurityPoliciesScopedListWarning--; @@ -36178,7 +36779,7 @@ void checkSecurityPoliciesScopedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed622(o.data!); + checkUnnamed631(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -36192,7 +36793,7 @@ api.SecurityPoliciesScopedList buildSecurityPoliciesScopedList() { final o = api.SecurityPoliciesScopedList(); buildCounterSecurityPoliciesScopedList++; if (buildCounterSecurityPoliciesScopedList < 3) { - o.securityPolicies = buildUnnamed621(); + o.securityPolicies = buildUnnamed630(); o.warning = buildSecurityPoliciesScopedListWarning(); } buildCounterSecurityPoliciesScopedList--; @@ -36202,7 +36803,7 @@ api.SecurityPoliciesScopedList buildSecurityPoliciesScopedList() { void checkSecurityPoliciesScopedList(api.SecurityPoliciesScopedList o) { buildCounterSecurityPoliciesScopedList++; if (buildCounterSecurityPoliciesScopedList < 3) { - checkUnnamed621(o.securityPolicies!); + checkUnnamed630(o.securityPolicies!); checkSecurityPoliciesScopedListWarning(o.warning!); } buildCounterSecurityPoliciesScopedList--; @@ -36227,12 +36828,12 @@ void checkSecurityPoliciesWafConfig(api.SecurityPoliciesWafConfig o) { buildCounterSecurityPoliciesWafConfig--; } -core.Map buildUnnamed623() => { +core.Map buildUnnamed632() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed623(core.Map o) { +void checkUnnamed632(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -36244,23 +36845,23 @@ void checkUnnamed623(core.Map o) { ); } -core.List buildUnnamed624() => [ +core.List buildUnnamed633() => [ buildSecurityPolicyRule(), buildSecurityPolicyRule(), ]; -void checkUnnamed624(core.List o) { +void checkUnnamed633(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSecurityPolicyRule(o[0]); checkSecurityPolicyRule(o[1]); } -core.List buildUnnamed625() => [ +core.List buildUnnamed634() => [ buildSecurityPolicyUserDefinedField(), buildSecurityPolicyUserDefinedField(), ]; -void checkUnnamed625(core.List o) { +void checkUnnamed634(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSecurityPolicyUserDefinedField(o[0]); checkSecurityPolicyUserDefinedField(o[1]); @@ -36280,14 +36881,14 @@ api.SecurityPolicy buildSecurityPolicy() { o.id = 'foo'; o.kind = 'foo'; o.labelFingerprint = 'foo'; - o.labels = buildUnnamed623(); + o.labels = buildUnnamed632(); o.name = 'foo'; o.recaptchaOptionsConfig = buildSecurityPolicyRecaptchaOptionsConfig(); o.region = 'foo'; - o.rules = buildUnnamed624(); + o.rules = buildUnnamed633(); o.selfLink = 'foo'; o.type = 'foo'; - o.userDefinedFields = buildUnnamed625(); + o.userDefinedFields = buildUnnamed634(); } buildCounterSecurityPolicy--; return o; @@ -36323,7 +36924,7 @@ void checkSecurityPolicy(api.SecurityPolicy o) { o.labelFingerprint!, unittest.equals('foo'), ); - checkUnnamed623(o.labels!); + checkUnnamed632(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -36333,7 +36934,7 @@ void checkSecurityPolicy(api.SecurityPolicy o) { o.region!, unittest.equals('foo'), ); - checkUnnamed624(o.rules!); + checkUnnamed633(o.rules!); unittest.expect( o.selfLink!, unittest.equals('foo'), @@ -36342,7 +36943,7 @@ void checkSecurityPolicy(api.SecurityPolicy o) { o.type!, unittest.equals('foo'), ); - checkUnnamed625(o.userDefinedFields!); + checkUnnamed634(o.userDefinedFields!); } buildCounterSecurityPolicy--; } @@ -36373,12 +36974,12 @@ void checkSecurityPolicyAdaptiveProtectionConfig( core.List< api .SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig> - buildUnnamed626() => [ + buildUnnamed635() => [ buildSecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig(), buildSecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig(), ]; -void checkUnnamed626( +void checkUnnamed635( core.List< api .SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig> @@ -36401,7 +37002,7 @@ api.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig 3) { o.enable = true; o.ruleVisibility = 'foo'; - o.thresholdConfigs = buildUnnamed626(); + o.thresholdConfigs = buildUnnamed635(); } buildCounterSecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig--; return o; @@ -36417,7 +37018,7 @@ void checkSecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( o.ruleVisibility!, unittest.equals('foo'), ); - checkUnnamed626(o.thresholdConfigs!); + checkUnnamed635(o.thresholdConfigs!); } buildCounterSecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig--; } @@ -36425,12 +37026,12 @@ void checkSecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( core.List< api .SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfigTrafficGranularityConfig> - buildUnnamed627() => [ + buildUnnamed636() => [ buildSecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfigTrafficGranularityConfig(), buildSecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfigTrafficGranularityConfig(), ]; -void checkUnnamed627( +void checkUnnamed636( core.List< api .SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfigTrafficGranularityConfig> @@ -36460,7 +37061,7 @@ api.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig o.detectionLoadThreshold = 42.0; o.detectionRelativeToBaselineQps = 42.0; o.name = 'foo'; - o.trafficGranularityConfigs = buildUnnamed627(); + o.trafficGranularityConfigs = buildUnnamed636(); } buildCounterSecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig--; return o; @@ -36504,7 +37105,7 @@ void checkSecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThreshold o.name!, unittest.equals('foo'), ); - checkUnnamed627(o.trafficGranularityConfigs!); + checkUnnamed636(o.trafficGranularityConfigs!); } buildCounterSecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfig--; } @@ -36546,12 +37147,12 @@ void checkSecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThreshold buildCounterSecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfigThresholdConfigTrafficGranularityConfig--; } -core.List buildUnnamed628() => [ +core.List buildUnnamed637() => [ 'foo', 'foo', ]; -void checkUnnamed628(core.List o) { +void checkUnnamed637(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -36573,7 +37174,7 @@ api.SecurityPolicyAdvancedOptionsConfig buildSecurityPolicyAdvancedOptionsConfigJsonCustomConfig(); o.jsonParsing = 'foo'; o.logLevel = 'foo'; - o.userIpRequestHeaders = buildUnnamed628(); + o.userIpRequestHeaders = buildUnnamed637(); } buildCounterSecurityPolicyAdvancedOptionsConfig--; return o; @@ -36593,17 +37194,17 @@ void checkSecurityPolicyAdvancedOptionsConfig( o.logLevel!, unittest.equals('foo'), ); - checkUnnamed628(o.userIpRequestHeaders!); + checkUnnamed637(o.userIpRequestHeaders!); } buildCounterSecurityPolicyAdvancedOptionsConfig--; } -core.List buildUnnamed629() => [ +core.List buildUnnamed638() => [ 'foo', 'foo', ]; -void checkUnnamed629(core.List o) { +void checkUnnamed638(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -36621,7 +37222,7 @@ api.SecurityPolicyAdvancedOptionsConfigJsonCustomConfig final o = api.SecurityPolicyAdvancedOptionsConfigJsonCustomConfig(); buildCounterSecurityPolicyAdvancedOptionsConfigJsonCustomConfig++; if (buildCounterSecurityPolicyAdvancedOptionsConfigJsonCustomConfig < 3) { - o.contentTypes = buildUnnamed629(); + o.contentTypes = buildUnnamed638(); } buildCounterSecurityPolicyAdvancedOptionsConfigJsonCustomConfig--; return o; @@ -36631,7 +37232,7 @@ void checkSecurityPolicyAdvancedOptionsConfigJsonCustomConfig( api.SecurityPolicyAdvancedOptionsConfigJsonCustomConfig o) { buildCounterSecurityPolicyAdvancedOptionsConfigJsonCustomConfig++; if (buildCounterSecurityPolicyAdvancedOptionsConfigJsonCustomConfig < 3) { - checkUnnamed629(o.contentTypes!); + checkUnnamed638(o.contentTypes!); } buildCounterSecurityPolicyAdvancedOptionsConfigJsonCustomConfig--; } @@ -36660,12 +37261,12 @@ void checkSecurityPolicyDdosProtectionConfig( buildCounterSecurityPolicyDdosProtectionConfig--; } -core.List buildUnnamed630() => [ +core.List buildUnnamed639() => [ buildSecurityPolicy(), buildSecurityPolicy(), ]; -void checkUnnamed630(core.List o) { +void checkUnnamed639(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSecurityPolicy(o[0]); checkSecurityPolicy(o[1]); @@ -36698,12 +37299,12 @@ void checkSecurityPolicyListWarningData(api.SecurityPolicyListWarningData o) { buildCounterSecurityPolicyListWarningData--; } -core.List buildUnnamed631() => [ +core.List buildUnnamed640() => [ buildSecurityPolicyListWarningData(), buildSecurityPolicyListWarningData(), ]; -void checkUnnamed631(core.List o) { +void checkUnnamed640(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSecurityPolicyListWarningData(o[0]); checkSecurityPolicyListWarningData(o[1]); @@ -36715,7 +37316,7 @@ api.SecurityPolicyListWarning buildSecurityPolicyListWarning() { buildCounterSecurityPolicyListWarning++; if (buildCounterSecurityPolicyListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed631(); + o.data = buildUnnamed640(); o.message = 'foo'; } buildCounterSecurityPolicyListWarning--; @@ -36729,7 +37330,7 @@ void checkSecurityPolicyListWarning(api.SecurityPolicyListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed631(o.data!); + checkUnnamed640(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -36744,7 +37345,7 @@ api.SecurityPolicyList buildSecurityPolicyList() { buildCounterSecurityPolicyList++; if (buildCounterSecurityPolicyList < 3) { o.id = 'foo'; - o.items = buildUnnamed630(); + o.items = buildUnnamed639(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.warning = buildSecurityPolicyListWarning(); @@ -36760,7 +37361,7 @@ void checkSecurityPolicyList(api.SecurityPolicyList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed630(o.items!); + checkUnnamed639(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -36872,12 +37473,12 @@ void checkSecurityPolicyRule(api.SecurityPolicyRule o) { } core.List - buildUnnamed632() => [ + buildUnnamed641() => [ buildSecurityPolicyRuleHttpHeaderActionHttpHeaderOption(), buildSecurityPolicyRuleHttpHeaderActionHttpHeaderOption(), ]; -void checkUnnamed632( +void checkUnnamed641( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSecurityPolicyRuleHttpHeaderActionHttpHeaderOption(o[0]); @@ -36890,7 +37491,7 @@ api.SecurityPolicyRuleHttpHeaderAction final o = api.SecurityPolicyRuleHttpHeaderAction(); buildCounterSecurityPolicyRuleHttpHeaderAction++; if (buildCounterSecurityPolicyRuleHttpHeaderAction < 3) { - o.requestHeadersToAdds = buildUnnamed632(); + o.requestHeadersToAdds = buildUnnamed641(); } buildCounterSecurityPolicyRuleHttpHeaderAction--; return o; @@ -36900,7 +37501,7 @@ void checkSecurityPolicyRuleHttpHeaderAction( api.SecurityPolicyRuleHttpHeaderAction o) { buildCounterSecurityPolicyRuleHttpHeaderAction++; if (buildCounterSecurityPolicyRuleHttpHeaderAction < 3) { - checkUnnamed632(o.requestHeadersToAdds!); + checkUnnamed641(o.requestHeadersToAdds!); } buildCounterSecurityPolicyRuleHttpHeaderAction--; } @@ -36962,12 +37563,12 @@ void checkSecurityPolicyRuleMatcher(api.SecurityPolicyRuleMatcher o) { buildCounterSecurityPolicyRuleMatcher--; } -core.List buildUnnamed633() => [ +core.List buildUnnamed642() => [ 'foo', 'foo', ]; -void checkUnnamed633(core.List o) { +void checkUnnamed642(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -36984,7 +37585,7 @@ api.SecurityPolicyRuleMatcherConfig buildSecurityPolicyRuleMatcherConfig() { final o = api.SecurityPolicyRuleMatcherConfig(); buildCounterSecurityPolicyRuleMatcherConfig++; if (buildCounterSecurityPolicyRuleMatcherConfig < 3) { - o.srcIpRanges = buildUnnamed633(); + o.srcIpRanges = buildUnnamed642(); } buildCounterSecurityPolicyRuleMatcherConfig--; return o; @@ -36994,7 +37595,7 @@ void checkSecurityPolicyRuleMatcherConfig( api.SecurityPolicyRuleMatcherConfig o) { buildCounterSecurityPolicyRuleMatcherConfig++; if (buildCounterSecurityPolicyRuleMatcherConfig < 3) { - checkUnnamed633(o.srcIpRanges!); + checkUnnamed642(o.srcIpRanges!); } buildCounterSecurityPolicyRuleMatcherConfig--; } @@ -37022,12 +37623,12 @@ void checkSecurityPolicyRuleMatcherExprOptions( buildCounterSecurityPolicyRuleMatcherExprOptions--; } -core.List buildUnnamed634() => [ +core.List buildUnnamed643() => [ 'foo', 'foo', ]; -void checkUnnamed634(core.List o) { +void checkUnnamed643(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -37039,12 +37640,12 @@ void checkUnnamed634(core.List o) { ); } -core.List buildUnnamed635() => [ +core.List buildUnnamed644() => [ 'foo', 'foo', ]; -void checkUnnamed635(core.List o) { +void checkUnnamed644(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -37062,8 +37663,8 @@ api.SecurityPolicyRuleMatcherExprOptionsRecaptchaOptions final o = api.SecurityPolicyRuleMatcherExprOptionsRecaptchaOptions(); buildCounterSecurityPolicyRuleMatcherExprOptionsRecaptchaOptions++; if (buildCounterSecurityPolicyRuleMatcherExprOptionsRecaptchaOptions < 3) { - o.actionTokenSiteKeys = buildUnnamed634(); - o.sessionTokenSiteKeys = buildUnnamed635(); + o.actionTokenSiteKeys = buildUnnamed643(); + o.sessionTokenSiteKeys = buildUnnamed644(); } buildCounterSecurityPolicyRuleMatcherExprOptionsRecaptchaOptions--; return o; @@ -37073,18 +37674,18 @@ void checkSecurityPolicyRuleMatcherExprOptionsRecaptchaOptions( api.SecurityPolicyRuleMatcherExprOptionsRecaptchaOptions o) { buildCounterSecurityPolicyRuleMatcherExprOptionsRecaptchaOptions++; if (buildCounterSecurityPolicyRuleMatcherExprOptionsRecaptchaOptions < 3) { - checkUnnamed634(o.actionTokenSiteKeys!); - checkUnnamed635(o.sessionTokenSiteKeys!); + checkUnnamed643(o.actionTokenSiteKeys!); + checkUnnamed644(o.sessionTokenSiteKeys!); } buildCounterSecurityPolicyRuleMatcherExprOptionsRecaptchaOptions--; } -core.List buildUnnamed636() => [ +core.List buildUnnamed645() => [ 'foo', 'foo', ]; -void checkUnnamed636(core.List o) { +void checkUnnamed645(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -37096,12 +37697,12 @@ void checkUnnamed636(core.List o) { ); } -core.List buildUnnamed637() => [ +core.List buildUnnamed646() => [ 'foo', 'foo', ]; -void checkUnnamed637(core.List o) { +void checkUnnamed646(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -37113,12 +37714,12 @@ void checkUnnamed637(core.List o) { ); } -core.List buildUnnamed638() => [ +core.List buildUnnamed647() => [ 'foo', 'foo', ]; -void checkUnnamed638(core.List o) { +void checkUnnamed647(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -37130,12 +37731,12 @@ void checkUnnamed638(core.List o) { ); } -core.List buildUnnamed639() => [ +core.List buildUnnamed648() => [ 42, 42, ]; -void checkUnnamed639(core.List o) { +void checkUnnamed648(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -37147,12 +37748,12 @@ void checkUnnamed639(core.List o) { ); } -core.List buildUnnamed640() => [ +core.List buildUnnamed649() => [ 'foo', 'foo', ]; -void checkUnnamed640(core.List o) { +void checkUnnamed649(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -37164,12 +37765,12 @@ void checkUnnamed640(core.List o) { ); } -core.List buildUnnamed641() => [ +core.List buildUnnamed650() => [ 'foo', 'foo', ]; -void checkUnnamed641(core.List o) { +void checkUnnamed650(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -37181,12 +37782,12 @@ void checkUnnamed641(core.List o) { ); } -core.List buildUnnamed642() => [ +core.List buildUnnamed651() => [ 'foo', 'foo', ]; -void checkUnnamed642(core.List o) { +void checkUnnamed651(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -37199,12 +37800,12 @@ void checkUnnamed642(core.List o) { } core.List - buildUnnamed643() => [ + buildUnnamed652() => [ buildSecurityPolicyRuleNetworkMatcherUserDefinedFieldMatch(), buildSecurityPolicyRuleNetworkMatcherUserDefinedFieldMatch(), ]; -void checkUnnamed643( +void checkUnnamed652( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSecurityPolicyRuleNetworkMatcherUserDefinedFieldMatch(o[0]); @@ -37216,14 +37817,14 @@ api.SecurityPolicyRuleNetworkMatcher buildSecurityPolicyRuleNetworkMatcher() { final o = api.SecurityPolicyRuleNetworkMatcher(); buildCounterSecurityPolicyRuleNetworkMatcher++; if (buildCounterSecurityPolicyRuleNetworkMatcher < 3) { - o.destIpRanges = buildUnnamed636(); - o.destPorts = buildUnnamed637(); - o.ipProtocols = buildUnnamed638(); - o.srcAsns = buildUnnamed639(); - o.srcIpRanges = buildUnnamed640(); - o.srcPorts = buildUnnamed641(); - o.srcRegionCodes = buildUnnamed642(); - o.userDefinedFields = buildUnnamed643(); + o.destIpRanges = buildUnnamed645(); + o.destPorts = buildUnnamed646(); + o.ipProtocols = buildUnnamed647(); + o.srcAsns = buildUnnamed648(); + o.srcIpRanges = buildUnnamed649(); + o.srcPorts = buildUnnamed650(); + o.srcRegionCodes = buildUnnamed651(); + o.userDefinedFields = buildUnnamed652(); } buildCounterSecurityPolicyRuleNetworkMatcher--; return o; @@ -37233,24 +37834,24 @@ void checkSecurityPolicyRuleNetworkMatcher( api.SecurityPolicyRuleNetworkMatcher o) { buildCounterSecurityPolicyRuleNetworkMatcher++; if (buildCounterSecurityPolicyRuleNetworkMatcher < 3) { - checkUnnamed636(o.destIpRanges!); - checkUnnamed637(o.destPorts!); - checkUnnamed638(o.ipProtocols!); - checkUnnamed639(o.srcAsns!); - checkUnnamed640(o.srcIpRanges!); - checkUnnamed641(o.srcPorts!); - checkUnnamed642(o.srcRegionCodes!); - checkUnnamed643(o.userDefinedFields!); + checkUnnamed645(o.destIpRanges!); + checkUnnamed646(o.destPorts!); + checkUnnamed647(o.ipProtocols!); + checkUnnamed648(o.srcAsns!); + checkUnnamed649(o.srcIpRanges!); + checkUnnamed650(o.srcPorts!); + checkUnnamed651(o.srcRegionCodes!); + checkUnnamed652(o.userDefinedFields!); } buildCounterSecurityPolicyRuleNetworkMatcher--; } -core.List buildUnnamed644() => [ +core.List buildUnnamed653() => [ 'foo', 'foo', ]; -void checkUnnamed644(core.List o) { +void checkUnnamed653(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -37269,7 +37870,7 @@ api.SecurityPolicyRuleNetworkMatcherUserDefinedFieldMatch buildCounterSecurityPolicyRuleNetworkMatcherUserDefinedFieldMatch++; if (buildCounterSecurityPolicyRuleNetworkMatcherUserDefinedFieldMatch < 3) { o.name = 'foo'; - o.values = buildUnnamed644(); + o.values = buildUnnamed653(); } buildCounterSecurityPolicyRuleNetworkMatcherUserDefinedFieldMatch--; return o; @@ -37283,18 +37884,18 @@ void checkSecurityPolicyRuleNetworkMatcherUserDefinedFieldMatch( o.name!, unittest.equals('foo'), ); - checkUnnamed644(o.values!); + checkUnnamed653(o.values!); } buildCounterSecurityPolicyRuleNetworkMatcherUserDefinedFieldMatch--; } core.List - buildUnnamed645() => [ + buildUnnamed654() => [ buildSecurityPolicyRulePreconfiguredWafConfigExclusion(), buildSecurityPolicyRulePreconfiguredWafConfigExclusion(), ]; -void checkUnnamed645( +void checkUnnamed654( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSecurityPolicyRulePreconfiguredWafConfigExclusion(o[0]); @@ -37307,7 +37908,7 @@ api.SecurityPolicyRulePreconfiguredWafConfig final o = api.SecurityPolicyRulePreconfiguredWafConfig(); buildCounterSecurityPolicyRulePreconfiguredWafConfig++; if (buildCounterSecurityPolicyRulePreconfiguredWafConfig < 3) { - o.exclusions = buildUnnamed645(); + o.exclusions = buildUnnamed654(); } buildCounterSecurityPolicyRulePreconfiguredWafConfig--; return o; @@ -37317,18 +37918,18 @@ void checkSecurityPolicyRulePreconfiguredWafConfig( api.SecurityPolicyRulePreconfiguredWafConfig o) { buildCounterSecurityPolicyRulePreconfiguredWafConfig++; if (buildCounterSecurityPolicyRulePreconfiguredWafConfig < 3) { - checkUnnamed645(o.exclusions!); + checkUnnamed654(o.exclusions!); } buildCounterSecurityPolicyRulePreconfiguredWafConfig--; } core.List - buildUnnamed646() => [ + buildUnnamed655() => [ buildSecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams(), buildSecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams(), ]; -void checkUnnamed646( +void checkUnnamed655( core.List o) { unittest.expect(o, unittest.hasLength(2)); @@ -37337,12 +37938,12 @@ void checkUnnamed646( } core.List - buildUnnamed647() => [ + buildUnnamed656() => [ buildSecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams(), buildSecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams(), ]; -void checkUnnamed647( +void checkUnnamed656( core.List o) { unittest.expect(o, unittest.hasLength(2)); @@ -37351,12 +37952,12 @@ void checkUnnamed647( } core.List - buildUnnamed648() => [ + buildUnnamed657() => [ buildSecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams(), buildSecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams(), ]; -void checkUnnamed648( +void checkUnnamed657( core.List o) { unittest.expect(o, unittest.hasLength(2)); @@ -37365,12 +37966,12 @@ void checkUnnamed648( } core.List - buildUnnamed649() => [ + buildUnnamed658() => [ buildSecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams(), buildSecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams(), ]; -void checkUnnamed649( +void checkUnnamed658( core.List o) { unittest.expect(o, unittest.hasLength(2)); @@ -37378,12 +37979,12 @@ void checkUnnamed649( checkSecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams(o[1]); } -core.List buildUnnamed650() => [ +core.List buildUnnamed659() => [ 'foo', 'foo', ]; -void checkUnnamed650(core.List o) { +void checkUnnamed659(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -37401,11 +38002,11 @@ api.SecurityPolicyRulePreconfiguredWafConfigExclusion final o = api.SecurityPolicyRulePreconfiguredWafConfigExclusion(); buildCounterSecurityPolicyRulePreconfiguredWafConfigExclusion++; if (buildCounterSecurityPolicyRulePreconfiguredWafConfigExclusion < 3) { - o.requestCookiesToExclude = buildUnnamed646(); - o.requestHeadersToExclude = buildUnnamed647(); - o.requestQueryParamsToExclude = buildUnnamed648(); - o.requestUrisToExclude = buildUnnamed649(); - o.targetRuleIds = buildUnnamed650(); + o.requestCookiesToExclude = buildUnnamed655(); + o.requestHeadersToExclude = buildUnnamed656(); + o.requestQueryParamsToExclude = buildUnnamed657(); + o.requestUrisToExclude = buildUnnamed658(); + o.targetRuleIds = buildUnnamed659(); o.targetRuleSet = 'foo'; } buildCounterSecurityPolicyRulePreconfiguredWafConfigExclusion--; @@ -37416,11 +38017,11 @@ void checkSecurityPolicyRulePreconfiguredWafConfigExclusion( api.SecurityPolicyRulePreconfiguredWafConfigExclusion o) { buildCounterSecurityPolicyRulePreconfiguredWafConfigExclusion++; if (buildCounterSecurityPolicyRulePreconfiguredWafConfigExclusion < 3) { - checkUnnamed646(o.requestCookiesToExclude!); - checkUnnamed647(o.requestHeadersToExclude!); - checkUnnamed648(o.requestQueryParamsToExclude!); - checkUnnamed649(o.requestUrisToExclude!); - checkUnnamed650(o.targetRuleIds!); + checkUnnamed655(o.requestCookiesToExclude!); + checkUnnamed656(o.requestHeadersToExclude!); + checkUnnamed657(o.requestQueryParamsToExclude!); + checkUnnamed658(o.requestUrisToExclude!); + checkUnnamed659(o.targetRuleIds!); unittest.expect( o.targetRuleSet!, unittest.equals('foo'), @@ -37463,12 +38064,12 @@ void checkSecurityPolicyRulePreconfiguredWafConfigExclusionFieldParams( } core.List - buildUnnamed651() => [ + buildUnnamed660() => [ buildSecurityPolicyRuleRateLimitOptionsEnforceOnKeyConfig(), buildSecurityPolicyRuleRateLimitOptionsEnforceOnKeyConfig(), ]; -void checkUnnamed651( +void checkUnnamed660( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSecurityPolicyRuleRateLimitOptionsEnforceOnKeyConfig(o[0]); @@ -37485,7 +38086,7 @@ api.SecurityPolicyRuleRateLimitOptions o.banThreshold = buildSecurityPolicyRuleRateLimitOptionsThreshold(); o.conformAction = 'foo'; o.enforceOnKey = 'foo'; - o.enforceOnKeyConfigs = buildUnnamed651(); + o.enforceOnKeyConfigs = buildUnnamed660(); o.enforceOnKeyName = 'foo'; o.exceedAction = 'foo'; o.exceedRedirectOptions = buildSecurityPolicyRuleRedirectOptions(); @@ -37512,7 +38113,7 @@ void checkSecurityPolicyRuleRateLimitOptions( o.enforceOnKey!, unittest.equals('foo'), ); - checkUnnamed651(o.enforceOnKeyConfigs!); + checkUnnamed660(o.enforceOnKeyConfigs!); unittest.expect( o.enforceOnKeyName!, unittest.equals('foo'), @@ -37655,12 +38256,12 @@ void checkSecurityPolicyUserDefinedField(api.SecurityPolicyUserDefinedField o) { buildCounterSecurityPolicyUserDefinedField--; } -core.List buildUnnamed652() => [ +core.List buildUnnamed661() => [ 'foo', 'foo', ]; -void checkUnnamed652(core.List o) { +void checkUnnamed661(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -37679,7 +38280,7 @@ api.SecuritySettings buildSecuritySettings() { if (buildCounterSecuritySettings < 3) { o.awsV4Authentication = buildAWSV4Signature(); o.clientTlsPolicy = 'foo'; - o.subjectAltNames = buildUnnamed652(); + o.subjectAltNames = buildUnnamed661(); } buildCounterSecuritySettings--; return o; @@ -37693,7 +38294,7 @@ void checkSecuritySettings(api.SecuritySettings o) { o.clientTlsPolicy!, unittest.equals('foo'), ); - checkUnnamed652(o.subjectAltNames!); + checkUnnamed661(o.subjectAltNames!); } buildCounterSecuritySettings--; } @@ -37762,12 +38363,12 @@ void checkServerBinding(api.ServerBinding o) { buildCounterServerBinding--; } -core.List buildUnnamed653() => [ +core.List buildUnnamed662() => [ 'foo', 'foo', ]; -void checkUnnamed653(core.List o) { +void checkUnnamed662(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -37785,7 +38386,7 @@ api.ServiceAccount buildServiceAccount() { buildCounterServiceAccount++; if (buildCounterServiceAccount < 3) { o.email = 'foo'; - o.scopes = buildUnnamed653(); + o.scopes = buildUnnamed662(); } buildCounterServiceAccount--; return o; @@ -37798,39 +38399,39 @@ void checkServiceAccount(api.ServiceAccount o) { o.email!, unittest.equals('foo'), ); - checkUnnamed653(o.scopes!); + checkUnnamed662(o.scopes!); } buildCounterServiceAccount--; } -core.List buildUnnamed654() => [ +core.List buildUnnamed663() => [ buildServiceAttachmentConnectedEndpoint(), buildServiceAttachmentConnectedEndpoint(), ]; -void checkUnnamed654(core.List o) { +void checkUnnamed663(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkServiceAttachmentConnectedEndpoint(o[0]); checkServiceAttachmentConnectedEndpoint(o[1]); } -core.List buildUnnamed655() => [ +core.List buildUnnamed664() => [ buildServiceAttachmentConsumerProjectLimit(), buildServiceAttachmentConsumerProjectLimit(), ]; -void checkUnnamed655(core.List o) { +void checkUnnamed664(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkServiceAttachmentConsumerProjectLimit(o[0]); checkServiceAttachmentConsumerProjectLimit(o[1]); } -core.List buildUnnamed656() => [ +core.List buildUnnamed665() => [ 'foo', 'foo', ]; -void checkUnnamed656(core.List o) { +void checkUnnamed665(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -37842,12 +38443,12 @@ void checkUnnamed656(core.List o) { ); } -core.List buildUnnamed657() => [ +core.List buildUnnamed666() => [ 'foo', 'foo', ]; -void checkUnnamed657(core.List o) { +void checkUnnamed666(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -37859,12 +38460,12 @@ void checkUnnamed657(core.List o) { ); } -core.List buildUnnamed658() => [ +core.List buildUnnamed667() => [ 'foo', 'foo', ]; -void checkUnnamed658(core.List o) { +void checkUnnamed667(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -37881,19 +38482,19 @@ api.ServiceAttachment buildServiceAttachment() { final o = api.ServiceAttachment(); buildCounterServiceAttachment++; if (buildCounterServiceAttachment < 3) { - o.connectedEndpoints = buildUnnamed654(); + o.connectedEndpoints = buildUnnamed663(); o.connectionPreference = 'foo'; - o.consumerAcceptLists = buildUnnamed655(); - o.consumerRejectLists = buildUnnamed656(); + o.consumerAcceptLists = buildUnnamed664(); + o.consumerRejectLists = buildUnnamed665(); o.creationTimestamp = 'foo'; o.description = 'foo'; - o.domainNames = buildUnnamed657(); + o.domainNames = buildUnnamed666(); o.enableProxyProtocol = true; o.fingerprint = 'foo'; o.id = 'foo'; o.kind = 'foo'; o.name = 'foo'; - o.natSubnets = buildUnnamed658(); + o.natSubnets = buildUnnamed667(); o.producerForwardingRule = 'foo'; o.propagatedConnectionLimit = 42; o.pscServiceAttachmentId = buildUint128(); @@ -37909,13 +38510,13 @@ api.ServiceAttachment buildServiceAttachment() { void checkServiceAttachment(api.ServiceAttachment o) { buildCounterServiceAttachment++; if (buildCounterServiceAttachment < 3) { - checkUnnamed654(o.connectedEndpoints!); + checkUnnamed663(o.connectedEndpoints!); unittest.expect( o.connectionPreference!, unittest.equals('foo'), ); - checkUnnamed655(o.consumerAcceptLists!); - checkUnnamed656(o.consumerRejectLists!); + checkUnnamed664(o.consumerAcceptLists!); + checkUnnamed665(o.consumerRejectLists!); unittest.expect( o.creationTimestamp!, unittest.equals('foo'), @@ -37924,7 +38525,7 @@ void checkServiceAttachment(api.ServiceAttachment o) { o.description!, unittest.equals('foo'), ); - checkUnnamed657(o.domainNames!); + checkUnnamed666(o.domainNames!); unittest.expect(o.enableProxyProtocol!, unittest.isTrue); unittest.expect( o.fingerprint!, @@ -37942,7 +38543,7 @@ void checkServiceAttachment(api.ServiceAttachment o) { o.name!, unittest.equals('foo'), ); - checkUnnamed658(o.natSubnets!); + checkUnnamed667(o.natSubnets!); unittest.expect( o.producerForwardingRule!, unittest.equals('foo'), @@ -37969,24 +38570,24 @@ void checkServiceAttachment(api.ServiceAttachment o) { buildCounterServiceAttachment--; } -core.Map buildUnnamed659() => { +core.Map buildUnnamed668() => { 'x': buildServiceAttachmentsScopedList(), 'y': buildServiceAttachmentsScopedList(), }; -void checkUnnamed659( +void checkUnnamed668( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkServiceAttachmentsScopedList(o['x']!); checkServiceAttachmentsScopedList(o['y']!); } -core.List buildUnnamed660() => [ +core.List buildUnnamed669() => [ 'foo', 'foo', ]; -void checkUnnamed660(core.List o) { +void checkUnnamed669(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -38027,12 +38628,12 @@ void checkServiceAttachmentAggregatedListWarningData( buildCounterServiceAttachmentAggregatedListWarningData--; } -core.List buildUnnamed661() => [ +core.List buildUnnamed670() => [ buildServiceAttachmentAggregatedListWarningData(), buildServiceAttachmentAggregatedListWarningData(), ]; -void checkUnnamed661( +void checkUnnamed670( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkServiceAttachmentAggregatedListWarningData(o[0]); @@ -38046,7 +38647,7 @@ api.ServiceAttachmentAggregatedListWarning buildCounterServiceAttachmentAggregatedListWarning++; if (buildCounterServiceAttachmentAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed661(); + o.data = buildUnnamed670(); o.message = 'foo'; } buildCounterServiceAttachmentAggregatedListWarning--; @@ -38061,7 +38662,7 @@ void checkServiceAttachmentAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed661(o.data!); + checkUnnamed670(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -38076,11 +38677,11 @@ api.ServiceAttachmentAggregatedList buildServiceAttachmentAggregatedList() { buildCounterServiceAttachmentAggregatedList++; if (buildCounterServiceAttachmentAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed659(); + o.items = buildUnnamed668(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed660(); + o.unreachables = buildUnnamed669(); o.warning = buildServiceAttachmentAggregatedListWarning(); } buildCounterServiceAttachmentAggregatedList--; @@ -38095,7 +38696,7 @@ void checkServiceAttachmentAggregatedList( o.id!, unittest.equals('foo'), ); - checkUnnamed659(o.items!); + checkUnnamed668(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -38108,7 +38709,7 @@ void checkServiceAttachmentAggregatedList( o.selfLink!, unittest.equals('foo'), ); - checkUnnamed660(o.unreachables!); + checkUnnamed669(o.unreachables!); checkServiceAttachmentAggregatedListWarning(o.warning!); } buildCounterServiceAttachmentAggregatedList--; @@ -38192,12 +38793,12 @@ void checkServiceAttachmentConsumerProjectLimit( buildCounterServiceAttachmentConsumerProjectLimit--; } -core.List buildUnnamed662() => [ +core.List buildUnnamed671() => [ buildServiceAttachment(), buildServiceAttachment(), ]; -void checkUnnamed662(core.List o) { +void checkUnnamed671(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkServiceAttachment(o[0]); checkServiceAttachment(o[1]); @@ -38231,12 +38832,12 @@ void checkServiceAttachmentListWarningData( buildCounterServiceAttachmentListWarningData--; } -core.List buildUnnamed663() => [ +core.List buildUnnamed672() => [ buildServiceAttachmentListWarningData(), buildServiceAttachmentListWarningData(), ]; -void checkUnnamed663(core.List o) { +void checkUnnamed672(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkServiceAttachmentListWarningData(o[0]); checkServiceAttachmentListWarningData(o[1]); @@ -38248,7 +38849,7 @@ api.ServiceAttachmentListWarning buildServiceAttachmentListWarning() { buildCounterServiceAttachmentListWarning++; if (buildCounterServiceAttachmentListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed663(); + o.data = buildUnnamed672(); o.message = 'foo'; } buildCounterServiceAttachmentListWarning--; @@ -38262,7 +38863,7 @@ void checkServiceAttachmentListWarning(api.ServiceAttachmentListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed663(o.data!); + checkUnnamed672(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -38277,7 +38878,7 @@ api.ServiceAttachmentList buildServiceAttachmentList() { buildCounterServiceAttachmentList++; if (buildCounterServiceAttachmentList < 3) { o.id = 'foo'; - o.items = buildUnnamed662(); + o.items = buildUnnamed671(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -38294,7 +38895,7 @@ void checkServiceAttachmentList(api.ServiceAttachmentList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed662(o.items!); + checkUnnamed671(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -38312,12 +38913,12 @@ void checkServiceAttachmentList(api.ServiceAttachmentList o) { buildCounterServiceAttachmentList--; } -core.List buildUnnamed664() => [ +core.List buildUnnamed673() => [ buildServiceAttachment(), buildServiceAttachment(), ]; -void checkUnnamed664(core.List o) { +void checkUnnamed673(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkServiceAttachment(o[0]); checkServiceAttachment(o[1]); @@ -38352,12 +38953,12 @@ void checkServiceAttachmentsScopedListWarningData( buildCounterServiceAttachmentsScopedListWarningData--; } -core.List buildUnnamed665() => [ +core.List buildUnnamed674() => [ buildServiceAttachmentsScopedListWarningData(), buildServiceAttachmentsScopedListWarningData(), ]; -void checkUnnamed665(core.List o) { +void checkUnnamed674(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkServiceAttachmentsScopedListWarningData(o[0]); checkServiceAttachmentsScopedListWarningData(o[1]); @@ -38370,7 +38971,7 @@ api.ServiceAttachmentsScopedListWarning buildCounterServiceAttachmentsScopedListWarning++; if (buildCounterServiceAttachmentsScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed665(); + o.data = buildUnnamed674(); o.message = 'foo'; } buildCounterServiceAttachmentsScopedListWarning--; @@ -38385,7 +38986,7 @@ void checkServiceAttachmentsScopedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed665(o.data!); + checkUnnamed674(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -38399,7 +39000,7 @@ api.ServiceAttachmentsScopedList buildServiceAttachmentsScopedList() { final o = api.ServiceAttachmentsScopedList(); buildCounterServiceAttachmentsScopedList++; if (buildCounterServiceAttachmentsScopedList < 3) { - o.serviceAttachments = buildUnnamed664(); + o.serviceAttachments = buildUnnamed673(); o.warning = buildServiceAttachmentsScopedListWarning(); } buildCounterServiceAttachmentsScopedList--; @@ -38409,7 +39010,7 @@ api.ServiceAttachmentsScopedList buildServiceAttachmentsScopedList() { void checkServiceAttachmentsScopedList(api.ServiceAttachmentsScopedList o) { buildCounterServiceAttachmentsScopedList++; if (buildCounterServiceAttachmentsScopedList < 3) { - checkUnnamed664(o.serviceAttachments!); + checkUnnamed673(o.serviceAttachments!); checkServiceAttachmentsScopedListWarning(o.warning!); } buildCounterServiceAttachmentsScopedList--; @@ -38417,14 +39018,14 @@ void checkServiceAttachmentsScopedList(api.ServiceAttachmentsScopedList o) { core.Map - buildUnnamed666() => { + buildUnnamed675() => { 'x': buildSetCommonInstanceMetadataOperationMetadataPerLocationOperationInfo(), 'y': buildSetCommonInstanceMetadataOperationMetadataPerLocationOperationInfo(), }; -void checkUnnamed666( +void checkUnnamed675( core.Map< core.String, api @@ -38444,7 +39045,7 @@ api.SetCommonInstanceMetadataOperationMetadata buildCounterSetCommonInstanceMetadataOperationMetadata++; if (buildCounterSetCommonInstanceMetadataOperationMetadata < 3) { o.clientOperationId = 'foo'; - o.perLocationOperations = buildUnnamed666(); + o.perLocationOperations = buildUnnamed675(); } buildCounterSetCommonInstanceMetadataOperationMetadata--; return o; @@ -38458,7 +39059,7 @@ void checkSetCommonInstanceMetadataOperationMetadata( o.clientOperationId!, unittest.equals('foo'), ); - checkUnnamed666(o.perLocationOperations!); + checkUnnamed675(o.perLocationOperations!); } buildCounterSetCommonInstanceMetadataOperationMetadata--; } @@ -38494,12 +39095,12 @@ void checkSetCommonInstanceMetadataOperationMetadataPerLocationOperationInfo( buildCounterSetCommonInstanceMetadataOperationMetadataPerLocationOperationInfo--; } -core.Map buildUnnamed667() => { +core.Map buildUnnamed676() => { 'x': buildShareSettingsProjectConfig(), 'y': buildShareSettingsProjectConfig(), }; -void checkUnnamed667(core.Map o) { +void checkUnnamed676(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkShareSettingsProjectConfig(o['x']!); checkShareSettingsProjectConfig(o['y']!); @@ -38510,7 +39111,7 @@ api.ShareSettings buildShareSettings() { final o = api.ShareSettings(); buildCounterShareSettings++; if (buildCounterShareSettings < 3) { - o.projectMap = buildUnnamed667(); + o.projectMap = buildUnnamed676(); o.shareType = 'foo'; } buildCounterShareSettings--; @@ -38520,7 +39121,7 @@ api.ShareSettings buildShareSettings() { void checkShareSettings(api.ShareSettings o) { buildCounterShareSettings++; if (buildCounterShareSettings < 3) { - checkUnnamed667(o.projectMap!); + checkUnnamed676(o.projectMap!); unittest.expect( o.shareType!, unittest.equals('foo'), @@ -38674,23 +39275,23 @@ void checkSignedUrlKey(api.SignedUrlKey o) { buildCounterSignedUrlKey--; } -core.List buildUnnamed668() => [ +core.List buildUnnamed677() => [ buildGuestOsFeature(), buildGuestOsFeature(), ]; -void checkUnnamed668(core.List o) { +void checkUnnamed677(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGuestOsFeature(o[0]); checkGuestOsFeature(o[1]); } -core.Map buildUnnamed669() => { +core.Map buildUnnamed678() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed669(core.Map o) { +void checkUnnamed678(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -38702,12 +39303,12 @@ void checkUnnamed669(core.Map o) { ); } -core.List buildUnnamed670() => [ +core.List buildUnnamed679() => [ 'foo', 'foo', ]; -void checkUnnamed670(core.List o) { +void checkUnnamed679(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -38719,12 +39320,12 @@ void checkUnnamed670(core.List o) { ); } -core.List buildUnnamed671() => [ +core.List buildUnnamed680() => [ 'foo', 'foo', ]; -void checkUnnamed671(core.List o) { +void checkUnnamed680(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -38736,12 +39337,12 @@ void checkUnnamed671(core.List o) { ); } -core.List buildUnnamed672() => [ +core.List buildUnnamed681() => [ 'foo', 'foo', ]; -void checkUnnamed672(core.List o) { +void checkUnnamed681(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -38767,13 +39368,13 @@ api.Snapshot buildSnapshot() { o.diskSizeGb = 'foo'; o.downloadBytes = 'foo'; o.enableConfidentialCompute = true; - o.guestOsFeatures = buildUnnamed668(); + o.guestOsFeatures = buildUnnamed677(); o.id = 'foo'; o.kind = 'foo'; o.labelFingerprint = 'foo'; - o.labels = buildUnnamed669(); - o.licenseCodes = buildUnnamed670(); - o.licenses = buildUnnamed671(); + o.labels = buildUnnamed678(); + o.licenseCodes = buildUnnamed679(); + o.licenses = buildUnnamed680(); o.locationHint = 'foo'; o.name = 'foo'; o.satisfiesPzi = true; @@ -38793,7 +39394,7 @@ api.Snapshot buildSnapshot() { o.status = 'foo'; o.storageBytes = 'foo'; o.storageBytesStatus = 'foo'; - o.storageLocations = buildUnnamed672(); + o.storageLocations = buildUnnamed681(); } buildCounterSnapshot--; return o; @@ -38832,7 +39433,7 @@ void checkSnapshot(api.Snapshot o) { unittest.equals('foo'), ); unittest.expect(o.enableConfidentialCompute!, unittest.isTrue); - checkUnnamed668(o.guestOsFeatures!); + checkUnnamed677(o.guestOsFeatures!); unittest.expect( o.id!, unittest.equals('foo'), @@ -38845,9 +39446,9 @@ void checkSnapshot(api.Snapshot o) { o.labelFingerprint!, unittest.equals('foo'), ); - checkUnnamed669(o.labels!); - checkUnnamed670(o.licenseCodes!); - checkUnnamed671(o.licenses!); + checkUnnamed678(o.labels!); + checkUnnamed679(o.licenseCodes!); + checkUnnamed680(o.licenses!); unittest.expect( o.locationHint!, unittest.equals('foo'), @@ -38909,17 +39510,17 @@ void checkSnapshot(api.Snapshot o) { o.storageBytesStatus!, unittest.equals('foo'), ); - checkUnnamed672(o.storageLocations!); + checkUnnamed681(o.storageLocations!); } buildCounterSnapshot--; } -core.List buildUnnamed673() => [ +core.List buildUnnamed682() => [ buildSnapshot(), buildSnapshot(), ]; -void checkUnnamed673(core.List o) { +void checkUnnamed682(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSnapshot(o[0]); checkSnapshot(o[1]); @@ -38952,12 +39553,12 @@ void checkSnapshotListWarningData(api.SnapshotListWarningData o) { buildCounterSnapshotListWarningData--; } -core.List buildUnnamed674() => [ +core.List buildUnnamed683() => [ buildSnapshotListWarningData(), buildSnapshotListWarningData(), ]; -void checkUnnamed674(core.List o) { +void checkUnnamed683(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSnapshotListWarningData(o[0]); checkSnapshotListWarningData(o[1]); @@ -38969,7 +39570,7 @@ api.SnapshotListWarning buildSnapshotListWarning() { buildCounterSnapshotListWarning++; if (buildCounterSnapshotListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed674(); + o.data = buildUnnamed683(); o.message = 'foo'; } buildCounterSnapshotListWarning--; @@ -38983,7 +39584,7 @@ void checkSnapshotListWarning(api.SnapshotListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed674(o.data!); + checkUnnamed683(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -38998,7 +39599,7 @@ api.SnapshotList buildSnapshotList() { buildCounterSnapshotList++; if (buildCounterSnapshotList < 3) { o.id = 'foo'; - o.items = buildUnnamed673(); + o.items = buildUnnamed682(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -39015,7 +39616,7 @@ void checkSnapshotList(api.SnapshotList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed673(o.items!); + checkUnnamed682(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -39054,14 +39655,14 @@ void checkSnapshotSettings(api.SnapshotSettings o) { core.Map - buildUnnamed675() => { + buildUnnamed684() => { 'x': buildSnapshotSettingsStorageLocationSettingsStorageLocationPreference(), 'y': buildSnapshotSettingsStorageLocationSettingsStorageLocationPreference(), }; -void checkUnnamed675( +void checkUnnamed684( core.Map< core.String, api @@ -39080,7 +39681,7 @@ api.SnapshotSettingsStorageLocationSettings final o = api.SnapshotSettingsStorageLocationSettings(); buildCounterSnapshotSettingsStorageLocationSettings++; if (buildCounterSnapshotSettingsStorageLocationSettings < 3) { - o.locations = buildUnnamed675(); + o.locations = buildUnnamed684(); o.policy = 'foo'; } buildCounterSnapshotSettingsStorageLocationSettings--; @@ -39091,7 +39692,7 @@ void checkSnapshotSettingsStorageLocationSettings( api.SnapshotSettingsStorageLocationSettings o) { buildCounterSnapshotSettingsStorageLocationSettings++; if (buildCounterSnapshotSettingsStorageLocationSettings < 3) { - checkUnnamed675(o.locations!); + checkUnnamed684(o.locations!); unittest.expect( o.policy!, unittest.equals('foo'), @@ -39153,12 +39754,12 @@ void checkSourceDiskEncryptionKey(api.SourceDiskEncryptionKey o) { buildCounterSourceDiskEncryptionKey--; } -core.List buildUnnamed676() => [ +core.List buildUnnamed685() => [ buildDiskInstantiationConfig(), buildDiskInstantiationConfig(), ]; -void checkUnnamed676(core.List o) { +void checkUnnamed685(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDiskInstantiationConfig(o[0]); checkDiskInstantiationConfig(o[1]); @@ -39169,7 +39770,7 @@ api.SourceInstanceParams buildSourceInstanceParams() { final o = api.SourceInstanceParams(); buildCounterSourceInstanceParams++; if (buildCounterSourceInstanceParams < 3) { - o.diskConfigs = buildUnnamed676(); + o.diskConfigs = buildUnnamed685(); } buildCounterSourceInstanceParams--; return o; @@ -39178,39 +39779,39 @@ api.SourceInstanceParams buildSourceInstanceParams() { void checkSourceInstanceParams(api.SourceInstanceParams o) { buildCounterSourceInstanceParams++; if (buildCounterSourceInstanceParams < 3) { - checkUnnamed676(o.diskConfigs!); + checkUnnamed685(o.diskConfigs!); } buildCounterSourceInstanceParams--; } -core.List buildUnnamed677() => [ +core.List buildUnnamed686() => [ buildSavedAttachedDisk(), buildSavedAttachedDisk(), ]; -void checkUnnamed677(core.List o) { +void checkUnnamed686(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSavedAttachedDisk(o[0]); checkSavedAttachedDisk(o[1]); } -core.List buildUnnamed678() => [ +core.List buildUnnamed687() => [ buildAcceleratorConfig(), buildAcceleratorConfig(), ]; -void checkUnnamed678(core.List o) { +void checkUnnamed687(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAcceleratorConfig(o[0]); checkAcceleratorConfig(o[1]); } -core.Map buildUnnamed679() => { +core.Map buildUnnamed688() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed679(core.Map o) { +void checkUnnamed688(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -39222,23 +39823,23 @@ void checkUnnamed679(core.Map o) { ); } -core.List buildUnnamed680() => [ +core.List buildUnnamed689() => [ buildNetworkInterface(), buildNetworkInterface(), ]; -void checkUnnamed680(core.List o) { +void checkUnnamed689(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkInterface(o[0]); checkNetworkInterface(o[1]); } -core.List buildUnnamed681() => [ +core.List buildUnnamed690() => [ buildServiceAccount(), buildServiceAccount(), ]; -void checkUnnamed681(core.List o) { +void checkUnnamed690(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkServiceAccount(o[0]); checkServiceAccount(o[1]); @@ -39252,16 +39853,16 @@ api.SourceInstanceProperties buildSourceInstanceProperties() { o.canIpForward = true; o.deletionProtection = true; o.description = 'foo'; - o.disks = buildUnnamed677(); - o.guestAccelerators = buildUnnamed678(); + o.disks = buildUnnamed686(); + o.guestAccelerators = buildUnnamed687(); o.keyRevocationActionType = 'foo'; - o.labels = buildUnnamed679(); + o.labels = buildUnnamed688(); o.machineType = 'foo'; o.metadata = buildMetadata(); o.minCpuPlatform = 'foo'; - o.networkInterfaces = buildUnnamed680(); + o.networkInterfaces = buildUnnamed689(); o.scheduling = buildScheduling(); - o.serviceAccounts = buildUnnamed681(); + o.serviceAccounts = buildUnnamed690(); o.tags = buildTags(); } buildCounterSourceInstanceProperties--; @@ -39277,13 +39878,13 @@ void checkSourceInstanceProperties(api.SourceInstanceProperties o) { o.description!, unittest.equals('foo'), ); - checkUnnamed677(o.disks!); - checkUnnamed678(o.guestAccelerators!); + checkUnnamed686(o.disks!); + checkUnnamed687(o.guestAccelerators!); unittest.expect( o.keyRevocationActionType!, unittest.equals('foo'), ); - checkUnnamed679(o.labels!); + checkUnnamed688(o.labels!); unittest.expect( o.machineType!, unittest.equals('foo'), @@ -39293,20 +39894,20 @@ void checkSourceInstanceProperties(api.SourceInstanceProperties o) { o.minCpuPlatform!, unittest.equals('foo'), ); - checkUnnamed680(o.networkInterfaces!); + checkUnnamed689(o.networkInterfaces!); checkScheduling(o.scheduling!); - checkUnnamed681(o.serviceAccounts!); + checkUnnamed690(o.serviceAccounts!); checkTags(o.tags!); } buildCounterSourceInstanceProperties--; } -core.List buildUnnamed682() => [ +core.List buildUnnamed691() => [ 'foo', 'foo', ]; -void checkUnnamed682(core.List o) { +void checkUnnamed691(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -39335,7 +39936,7 @@ api.SslCertificate buildSslCertificate() { o.region = 'foo'; o.selfLink = 'foo'; o.selfManaged = buildSslCertificateSelfManagedSslCertificate(); - o.subjectAlternativeNames = buildUnnamed682(); + o.subjectAlternativeNames = buildUnnamed691(); o.type = 'foo'; } buildCounterSslCertificate--; @@ -39387,7 +39988,7 @@ void checkSslCertificate(api.SslCertificate o) { unittest.equals('foo'), ); checkSslCertificateSelfManagedSslCertificate(o.selfManaged!); - checkUnnamed682(o.subjectAlternativeNames!); + checkUnnamed691(o.subjectAlternativeNames!); unittest.expect( o.type!, unittest.equals('foo'), @@ -39396,23 +39997,23 @@ void checkSslCertificate(api.SslCertificate o) { buildCounterSslCertificate--; } -core.Map buildUnnamed683() => { +core.Map buildUnnamed692() => { 'x': buildSslCertificatesScopedList(), 'y': buildSslCertificatesScopedList(), }; -void checkUnnamed683(core.Map o) { +void checkUnnamed692(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkSslCertificatesScopedList(o['x']!); checkSslCertificatesScopedList(o['y']!); } -core.List buildUnnamed684() => [ +core.List buildUnnamed693() => [ 'foo', 'foo', ]; -void checkUnnamed684(core.List o) { +void checkUnnamed693(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -39453,12 +40054,12 @@ void checkSslCertificateAggregatedListWarningData( buildCounterSslCertificateAggregatedListWarningData--; } -core.List buildUnnamed685() => [ +core.List buildUnnamed694() => [ buildSslCertificateAggregatedListWarningData(), buildSslCertificateAggregatedListWarningData(), ]; -void checkUnnamed685(core.List o) { +void checkUnnamed694(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSslCertificateAggregatedListWarningData(o[0]); checkSslCertificateAggregatedListWarningData(o[1]); @@ -39471,7 +40072,7 @@ api.SslCertificateAggregatedListWarning buildCounterSslCertificateAggregatedListWarning++; if (buildCounterSslCertificateAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed685(); + o.data = buildUnnamed694(); o.message = 'foo'; } buildCounterSslCertificateAggregatedListWarning--; @@ -39486,7 +40087,7 @@ void checkSslCertificateAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed685(o.data!); + checkUnnamed694(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -39501,11 +40102,11 @@ api.SslCertificateAggregatedList buildSslCertificateAggregatedList() { buildCounterSslCertificateAggregatedList++; if (buildCounterSslCertificateAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed683(); + o.items = buildUnnamed692(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed684(); + o.unreachables = buildUnnamed693(); o.warning = buildSslCertificateAggregatedListWarning(); } buildCounterSslCertificateAggregatedList--; @@ -39519,7 +40120,7 @@ void checkSslCertificateAggregatedList(api.SslCertificateAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed683(o.items!); + checkUnnamed692(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -39532,18 +40133,18 @@ void checkSslCertificateAggregatedList(api.SslCertificateAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed684(o.unreachables!); + checkUnnamed693(o.unreachables!); checkSslCertificateAggregatedListWarning(o.warning!); } buildCounterSslCertificateAggregatedList--; } -core.List buildUnnamed686() => [ +core.List buildUnnamed695() => [ buildSslCertificate(), buildSslCertificate(), ]; -void checkUnnamed686(core.List o) { +void checkUnnamed695(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSslCertificate(o[0]); checkSslCertificate(o[1]); @@ -39576,12 +40177,12 @@ void checkSslCertificateListWarningData(api.SslCertificateListWarningData o) { buildCounterSslCertificateListWarningData--; } -core.List buildUnnamed687() => [ +core.List buildUnnamed696() => [ buildSslCertificateListWarningData(), buildSslCertificateListWarningData(), ]; -void checkUnnamed687(core.List o) { +void checkUnnamed696(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSslCertificateListWarningData(o[0]); checkSslCertificateListWarningData(o[1]); @@ -39593,7 +40194,7 @@ api.SslCertificateListWarning buildSslCertificateListWarning() { buildCounterSslCertificateListWarning++; if (buildCounterSslCertificateListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed687(); + o.data = buildUnnamed696(); o.message = 'foo'; } buildCounterSslCertificateListWarning--; @@ -39607,7 +40208,7 @@ void checkSslCertificateListWarning(api.SslCertificateListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed687(o.data!); + checkUnnamed696(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -39622,7 +40223,7 @@ api.SslCertificateList buildSslCertificateList() { buildCounterSslCertificateList++; if (buildCounterSslCertificateList < 3) { o.id = 'foo'; - o.items = buildUnnamed686(); + o.items = buildUnnamed695(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -39639,7 +40240,7 @@ void checkSslCertificateList(api.SslCertificateList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed686(o.items!); + checkUnnamed695(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -39657,12 +40258,12 @@ void checkSslCertificateList(api.SslCertificateList o) { buildCounterSslCertificateList--; } -core.Map buildUnnamed688() => { +core.Map buildUnnamed697() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed688(core.Map o) { +void checkUnnamed697(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -39674,12 +40275,12 @@ void checkUnnamed688(core.Map o) { ); } -core.List buildUnnamed689() => [ +core.List buildUnnamed698() => [ 'foo', 'foo', ]; -void checkUnnamed689(core.List o) { +void checkUnnamed698(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -39697,8 +40298,8 @@ api.SslCertificateManagedSslCertificate final o = api.SslCertificateManagedSslCertificate(); buildCounterSslCertificateManagedSslCertificate++; if (buildCounterSslCertificateManagedSslCertificate < 3) { - o.domainStatus = buildUnnamed688(); - o.domains = buildUnnamed689(); + o.domainStatus = buildUnnamed697(); + o.domains = buildUnnamed698(); o.status = 'foo'; } buildCounterSslCertificateManagedSslCertificate--; @@ -39709,8 +40310,8 @@ void checkSslCertificateManagedSslCertificate( api.SslCertificateManagedSslCertificate o) { buildCounterSslCertificateManagedSslCertificate++; if (buildCounterSslCertificateManagedSslCertificate < 3) { - checkUnnamed688(o.domainStatus!); - checkUnnamed689(o.domains!); + checkUnnamed697(o.domainStatus!); + checkUnnamed698(o.domains!); unittest.expect( o.status!, unittest.equals('foo'), @@ -39748,12 +40349,12 @@ void checkSslCertificateSelfManagedSslCertificate( buildCounterSslCertificateSelfManagedSslCertificate--; } -core.List buildUnnamed690() => [ +core.List buildUnnamed699() => [ buildSslCertificate(), buildSslCertificate(), ]; -void checkUnnamed690(core.List o) { +void checkUnnamed699(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSslCertificate(o[0]); checkSslCertificate(o[1]); @@ -39788,12 +40389,12 @@ void checkSslCertificatesScopedListWarningData( buildCounterSslCertificatesScopedListWarningData--; } -core.List buildUnnamed691() => [ +core.List buildUnnamed700() => [ buildSslCertificatesScopedListWarningData(), buildSslCertificatesScopedListWarningData(), ]; -void checkUnnamed691(core.List o) { +void checkUnnamed700(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSslCertificatesScopedListWarningData(o[0]); checkSslCertificatesScopedListWarningData(o[1]); @@ -39805,7 +40406,7 @@ api.SslCertificatesScopedListWarning buildSslCertificatesScopedListWarning() { buildCounterSslCertificatesScopedListWarning++; if (buildCounterSslCertificatesScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed691(); + o.data = buildUnnamed700(); o.message = 'foo'; } buildCounterSslCertificatesScopedListWarning--; @@ -39820,7 +40421,7 @@ void checkSslCertificatesScopedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed691(o.data!); + checkUnnamed700(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -39834,7 +40435,7 @@ api.SslCertificatesScopedList buildSslCertificatesScopedList() { final o = api.SslCertificatesScopedList(); buildCounterSslCertificatesScopedList++; if (buildCounterSslCertificatesScopedList < 3) { - o.sslCertificates = buildUnnamed690(); + o.sslCertificates = buildUnnamed699(); o.warning = buildSslCertificatesScopedListWarning(); } buildCounterSslCertificatesScopedList--; @@ -39844,29 +40445,29 @@ api.SslCertificatesScopedList buildSslCertificatesScopedList() { void checkSslCertificatesScopedList(api.SslCertificatesScopedList o) { buildCounterSslCertificatesScopedList++; if (buildCounterSslCertificatesScopedList < 3) { - checkUnnamed690(o.sslCertificates!); + checkUnnamed699(o.sslCertificates!); checkSslCertificatesScopedListWarning(o.warning!); } buildCounterSslCertificatesScopedList--; } -core.Map buildUnnamed692() => { +core.Map buildUnnamed701() => { 'x': buildSslPoliciesScopedList(), 'y': buildSslPoliciesScopedList(), }; -void checkUnnamed692(core.Map o) { +void checkUnnamed701(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkSslPoliciesScopedList(o['x']!); checkSslPoliciesScopedList(o['y']!); } -core.List buildUnnamed693() => [ +core.List buildUnnamed702() => [ 'foo', 'foo', ]; -void checkUnnamed693(core.List o) { +void checkUnnamed702(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -39907,12 +40508,12 @@ void checkSslPoliciesAggregatedListWarningData( buildCounterSslPoliciesAggregatedListWarningData--; } -core.List buildUnnamed694() => [ +core.List buildUnnamed703() => [ buildSslPoliciesAggregatedListWarningData(), buildSslPoliciesAggregatedListWarningData(), ]; -void checkUnnamed694(core.List o) { +void checkUnnamed703(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSslPoliciesAggregatedListWarningData(o[0]); checkSslPoliciesAggregatedListWarningData(o[1]); @@ -39924,7 +40525,7 @@ api.SslPoliciesAggregatedListWarning buildSslPoliciesAggregatedListWarning() { buildCounterSslPoliciesAggregatedListWarning++; if (buildCounterSslPoliciesAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed694(); + o.data = buildUnnamed703(); o.message = 'foo'; } buildCounterSslPoliciesAggregatedListWarning--; @@ -39939,7 +40540,7 @@ void checkSslPoliciesAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed694(o.data!); + checkUnnamed703(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -39955,11 +40556,11 @@ api.SslPoliciesAggregatedList buildSslPoliciesAggregatedList() { if (buildCounterSslPoliciesAggregatedList < 3) { o.etag = 'foo'; o.id = 'foo'; - o.items = buildUnnamed692(); + o.items = buildUnnamed701(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed693(); + o.unreachables = buildUnnamed702(); o.warning = buildSslPoliciesAggregatedListWarning(); } buildCounterSslPoliciesAggregatedList--; @@ -39977,7 +40578,7 @@ void checkSslPoliciesAggregatedList(api.SslPoliciesAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed692(o.items!); + checkUnnamed701(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -39990,18 +40591,18 @@ void checkSslPoliciesAggregatedList(api.SslPoliciesAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed693(o.unreachables!); + checkUnnamed702(o.unreachables!); checkSslPoliciesAggregatedListWarning(o.warning!); } buildCounterSslPoliciesAggregatedList--; } -core.List buildUnnamed695() => [ +core.List buildUnnamed704() => [ buildSslPolicy(), buildSslPolicy(), ]; -void checkUnnamed695(core.List o) { +void checkUnnamed704(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSslPolicy(o[0]); checkSslPolicy(o[1]); @@ -40034,12 +40635,12 @@ void checkSslPoliciesListWarningData(api.SslPoliciesListWarningData o) { buildCounterSslPoliciesListWarningData--; } -core.List buildUnnamed696() => [ +core.List buildUnnamed705() => [ buildSslPoliciesListWarningData(), buildSslPoliciesListWarningData(), ]; -void checkUnnamed696(core.List o) { +void checkUnnamed705(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSslPoliciesListWarningData(o[0]); checkSslPoliciesListWarningData(o[1]); @@ -40051,7 +40652,7 @@ api.SslPoliciesListWarning buildSslPoliciesListWarning() { buildCounterSslPoliciesListWarning++; if (buildCounterSslPoliciesListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed696(); + o.data = buildUnnamed705(); o.message = 'foo'; } buildCounterSslPoliciesListWarning--; @@ -40065,7 +40666,7 @@ void checkSslPoliciesListWarning(api.SslPoliciesListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed696(o.data!); + checkUnnamed705(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -40080,7 +40681,7 @@ api.SslPoliciesList buildSslPoliciesList() { buildCounterSslPoliciesList++; if (buildCounterSslPoliciesList < 3) { o.id = 'foo'; - o.items = buildUnnamed695(); + o.items = buildUnnamed704(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -40097,7 +40698,7 @@ void checkSslPoliciesList(api.SslPoliciesList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed695(o.items!); + checkUnnamed704(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -40115,12 +40716,12 @@ void checkSslPoliciesList(api.SslPoliciesList o) { buildCounterSslPoliciesList--; } -core.List buildUnnamed697() => [ +core.List buildUnnamed706() => [ 'foo', 'foo', ]; -void checkUnnamed697(core.List o) { +void checkUnnamed706(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -40138,7 +40739,7 @@ api.SslPoliciesListAvailableFeaturesResponse final o = api.SslPoliciesListAvailableFeaturesResponse(); buildCounterSslPoliciesListAvailableFeaturesResponse++; if (buildCounterSslPoliciesListAvailableFeaturesResponse < 3) { - o.features = buildUnnamed697(); + o.features = buildUnnamed706(); } buildCounterSslPoliciesListAvailableFeaturesResponse--; return o; @@ -40148,17 +40749,17 @@ void checkSslPoliciesListAvailableFeaturesResponse( api.SslPoliciesListAvailableFeaturesResponse o) { buildCounterSslPoliciesListAvailableFeaturesResponse++; if (buildCounterSslPoliciesListAvailableFeaturesResponse < 3) { - checkUnnamed697(o.features!); + checkUnnamed706(o.features!); } buildCounterSslPoliciesListAvailableFeaturesResponse--; } -core.List buildUnnamed698() => [ +core.List buildUnnamed707() => [ buildSslPolicy(), buildSslPolicy(), ]; -void checkUnnamed698(core.List o) { +void checkUnnamed707(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSslPolicy(o[0]); checkSslPolicy(o[1]); @@ -40192,12 +40793,12 @@ void checkSslPoliciesScopedListWarningData( buildCounterSslPoliciesScopedListWarningData--; } -core.List buildUnnamed699() => [ +core.List buildUnnamed708() => [ buildSslPoliciesScopedListWarningData(), buildSslPoliciesScopedListWarningData(), ]; -void checkUnnamed699(core.List o) { +void checkUnnamed708(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSslPoliciesScopedListWarningData(o[0]); checkSslPoliciesScopedListWarningData(o[1]); @@ -40209,7 +40810,7 @@ api.SslPoliciesScopedListWarning buildSslPoliciesScopedListWarning() { buildCounterSslPoliciesScopedListWarning++; if (buildCounterSslPoliciesScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed699(); + o.data = buildUnnamed708(); o.message = 'foo'; } buildCounterSslPoliciesScopedListWarning--; @@ -40223,7 +40824,7 @@ void checkSslPoliciesScopedListWarning(api.SslPoliciesScopedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed699(o.data!); + checkUnnamed708(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -40237,7 +40838,7 @@ api.SslPoliciesScopedList buildSslPoliciesScopedList() { final o = api.SslPoliciesScopedList(); buildCounterSslPoliciesScopedList++; if (buildCounterSslPoliciesScopedList < 3) { - o.sslPolicies = buildUnnamed698(); + o.sslPolicies = buildUnnamed707(); o.warning = buildSslPoliciesScopedListWarning(); } buildCounterSslPoliciesScopedList--; @@ -40247,18 +40848,18 @@ api.SslPoliciesScopedList buildSslPoliciesScopedList() { void checkSslPoliciesScopedList(api.SslPoliciesScopedList o) { buildCounterSslPoliciesScopedList++; if (buildCounterSslPoliciesScopedList < 3) { - checkUnnamed698(o.sslPolicies!); + checkUnnamed707(o.sslPolicies!); checkSslPoliciesScopedListWarning(o.warning!); } buildCounterSslPoliciesScopedList--; } -core.List buildUnnamed700() => [ +core.List buildUnnamed709() => [ 'foo', 'foo', ]; -void checkUnnamed700(core.List o) { +void checkUnnamed709(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -40270,12 +40871,12 @@ void checkUnnamed700(core.List o) { ); } -core.List buildUnnamed701() => [ +core.List buildUnnamed710() => [ 'foo', 'foo', ]; -void checkUnnamed701(core.List o) { +void checkUnnamed710(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -40314,12 +40915,12 @@ void checkSslPolicyWarningsData(api.SslPolicyWarningsData o) { buildCounterSslPolicyWarningsData--; } -core.List buildUnnamed702() => [ +core.List buildUnnamed711() => [ buildSslPolicyWarningsData(), buildSslPolicyWarningsData(), ]; -void checkUnnamed702(core.List o) { +void checkUnnamed711(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSslPolicyWarningsData(o[0]); checkSslPolicyWarningsData(o[1]); @@ -40331,7 +40932,7 @@ api.SslPolicyWarnings buildSslPolicyWarnings() { buildCounterSslPolicyWarnings++; if (buildCounterSslPolicyWarnings < 3) { o.code = 'foo'; - o.data = buildUnnamed702(); + o.data = buildUnnamed711(); o.message = 'foo'; } buildCounterSslPolicyWarnings--; @@ -40345,7 +40946,7 @@ void checkSslPolicyWarnings(api.SslPolicyWarnings o) { o.code!, unittest.equals('foo'), ); - checkUnnamed702(o.data!); + checkUnnamed711(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -40354,12 +40955,12 @@ void checkSslPolicyWarnings(api.SslPolicyWarnings o) { buildCounterSslPolicyWarnings--; } -core.List buildUnnamed703() => [ +core.List buildUnnamed712() => [ buildSslPolicyWarnings(), buildSslPolicyWarnings(), ]; -void checkUnnamed703(core.List o) { +void checkUnnamed712(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSslPolicyWarnings(o[0]); checkSslPolicyWarnings(o[1]); @@ -40371,9 +40972,9 @@ api.SslPolicy buildSslPolicy() { buildCounterSslPolicy++; if (buildCounterSslPolicy < 3) { o.creationTimestamp = 'foo'; - o.customFeatures = buildUnnamed700(); + o.customFeatures = buildUnnamed709(); o.description = 'foo'; - o.enabledFeatures = buildUnnamed701(); + o.enabledFeatures = buildUnnamed710(); o.fingerprint = 'foo'; o.id = 'foo'; o.kind = 'foo'; @@ -40382,7 +40983,7 @@ api.SslPolicy buildSslPolicy() { o.profile = 'foo'; o.region = 'foo'; o.selfLink = 'foo'; - o.warnings = buildUnnamed703(); + o.warnings = buildUnnamed712(); } buildCounterSslPolicy--; return o; @@ -40395,12 +40996,12 @@ void checkSslPolicy(api.SslPolicy o) { o.creationTimestamp!, unittest.equals('foo'), ); - checkUnnamed700(o.customFeatures!); + checkUnnamed709(o.customFeatures!); unittest.expect( o.description!, unittest.equals('foo'), ); - checkUnnamed701(o.enabledFeatures!); + checkUnnamed710(o.enabledFeatures!); unittest.expect( o.fingerprint!, unittest.equals('foo'), @@ -40433,7 +41034,7 @@ void checkSslPolicy(api.SslPolicy o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed703(o.warnings!); + checkUnnamed712(o.warnings!); } buildCounterSslPolicy--; } @@ -40480,12 +41081,12 @@ void checkStatefulPolicy(api.StatefulPolicy o) { } core.Map - buildUnnamed704() => { + buildUnnamed713() => { 'x': buildStatefulPolicyPreservedStateDiskDevice(), 'y': buildStatefulPolicyPreservedStateDiskDevice(), }; -void checkUnnamed704( +void checkUnnamed713( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkStatefulPolicyPreservedStateDiskDevice(o['x']!); @@ -40493,12 +41094,12 @@ void checkUnnamed704( } core.Map - buildUnnamed705() => { + buildUnnamed714() => { 'x': buildStatefulPolicyPreservedStateNetworkIp(), 'y': buildStatefulPolicyPreservedStateNetworkIp(), }; -void checkUnnamed705( +void checkUnnamed714( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkStatefulPolicyPreservedStateNetworkIp(o['x']!); @@ -40506,12 +41107,12 @@ void checkUnnamed705( } core.Map - buildUnnamed706() => { + buildUnnamed715() => { 'x': buildStatefulPolicyPreservedStateNetworkIp(), 'y': buildStatefulPolicyPreservedStateNetworkIp(), }; -void checkUnnamed706( +void checkUnnamed715( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkStatefulPolicyPreservedStateNetworkIp(o['x']!); @@ -40523,9 +41124,9 @@ api.StatefulPolicyPreservedState buildStatefulPolicyPreservedState() { final o = api.StatefulPolicyPreservedState(); buildCounterStatefulPolicyPreservedState++; if (buildCounterStatefulPolicyPreservedState < 3) { - o.disks = buildUnnamed704(); - o.externalIPs = buildUnnamed705(); - o.internalIPs = buildUnnamed706(); + o.disks = buildUnnamed713(); + o.externalIPs = buildUnnamed714(); + o.internalIPs = buildUnnamed715(); } buildCounterStatefulPolicyPreservedState--; return o; @@ -40534,9 +41135,9 @@ api.StatefulPolicyPreservedState buildStatefulPolicyPreservedState() { void checkStatefulPolicyPreservedState(api.StatefulPolicyPreservedState o) { buildCounterStatefulPolicyPreservedState++; if (buildCounterStatefulPolicyPreservedState < 3) { - checkUnnamed704(o.disks!); - checkUnnamed705(o.externalIPs!); - checkUnnamed706(o.internalIPs!); + checkUnnamed713(o.disks!); + checkUnnamed714(o.externalIPs!); + checkUnnamed715(o.internalIPs!); } buildCounterStatefulPolicyPreservedState--; } @@ -40589,7 +41190,7 @@ void checkStatefulPolicyPreservedStateNetworkIp( buildCounterStatefulPolicyPreservedStateNetworkIp--; } -core.Map buildUnnamed707() => { +core.Map buildUnnamed716() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -40602,7 +41203,7 @@ core.Map buildUnnamed707() => { }, }; -void checkUnnamed707(core.Map o) { +void checkUnnamed716(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted1 = (o['x']!) as core.Map; unittest.expect(casted1, unittest.hasLength(3)); @@ -40634,15 +41235,15 @@ void checkUnnamed707(core.Map o) { ); } -core.List> buildUnnamed708() => [ - buildUnnamed707(), - buildUnnamed707(), +core.List> buildUnnamed717() => [ + buildUnnamed716(), + buildUnnamed716(), ]; -void checkUnnamed708(core.List> o) { +void checkUnnamed717(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed707(o[0]); - checkUnnamed707(o[1]); + checkUnnamed716(o[0]); + checkUnnamed716(o[1]); } core.int buildCounterStatus = 0; @@ -40651,7 +41252,7 @@ api.Status buildStatus() { buildCounterStatus++; if (buildCounterStatus < 3) { o.code = 42; - o.details = buildUnnamed708(); + o.details = buildUnnamed717(); o.message = 'foo'; } buildCounterStatus--; @@ -40665,7 +41266,7 @@ void checkStatus(api.Status o) { o.code!, unittest.equals(42), ); - checkUnnamed708(o.details!); + checkUnnamed717(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -40674,12 +41275,12 @@ void checkStatus(api.Status o) { buildCounterStatus--; } -core.Map buildUnnamed709() => { +core.Map buildUnnamed718() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed709(core.Map o) { +void checkUnnamed718(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -40702,7 +41303,7 @@ api.StoragePool buildStoragePool() { o.id = 'foo'; o.kind = 'foo'; o.labelFingerprint = 'foo'; - o.labels = buildUnnamed709(); + o.labels = buildUnnamed718(); o.name = 'foo'; o.performanceProvisioningType = 'foo'; o.poolProvisionedCapacityGb = 'foo'; @@ -40747,7 +41348,7 @@ void checkStoragePool(api.StoragePool o) { o.labelFingerprint!, unittest.equals('foo'), ); - checkUnnamed709(o.labels!); + checkUnnamed718(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -40794,23 +41395,23 @@ void checkStoragePool(api.StoragePool o) { buildCounterStoragePool--; } -core.Map buildUnnamed710() => { +core.Map buildUnnamed719() => { 'x': buildStoragePoolsScopedList(), 'y': buildStoragePoolsScopedList(), }; -void checkUnnamed710(core.Map o) { +void checkUnnamed719(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkStoragePoolsScopedList(o['x']!); checkStoragePoolsScopedList(o['y']!); } -core.List buildUnnamed711() => [ +core.List buildUnnamed720() => [ 'foo', 'foo', ]; -void checkUnnamed711(core.List o) { +void checkUnnamed720(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -40851,12 +41452,12 @@ void checkStoragePoolAggregatedListWarningData( buildCounterStoragePoolAggregatedListWarningData--; } -core.List buildUnnamed712() => [ +core.List buildUnnamed721() => [ buildStoragePoolAggregatedListWarningData(), buildStoragePoolAggregatedListWarningData(), ]; -void checkUnnamed712(core.List o) { +void checkUnnamed721(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStoragePoolAggregatedListWarningData(o[0]); checkStoragePoolAggregatedListWarningData(o[1]); @@ -40868,7 +41469,7 @@ api.StoragePoolAggregatedListWarning buildStoragePoolAggregatedListWarning() { buildCounterStoragePoolAggregatedListWarning++; if (buildCounterStoragePoolAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed712(); + o.data = buildUnnamed721(); o.message = 'foo'; } buildCounterStoragePoolAggregatedListWarning--; @@ -40883,7 +41484,7 @@ void checkStoragePoolAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed712(o.data!); + checkUnnamed721(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -40899,11 +41500,11 @@ api.StoragePoolAggregatedList buildStoragePoolAggregatedList() { if (buildCounterStoragePoolAggregatedList < 3) { o.etag = 'foo'; o.id = 'foo'; - o.items = buildUnnamed710(); + o.items = buildUnnamed719(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed711(); + o.unreachables = buildUnnamed720(); o.warning = buildStoragePoolAggregatedListWarning(); } buildCounterStoragePoolAggregatedList--; @@ -40921,7 +41522,7 @@ void checkStoragePoolAggregatedList(api.StoragePoolAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed710(o.items!); + checkUnnamed719(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -40934,18 +41535,18 @@ void checkStoragePoolAggregatedList(api.StoragePoolAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed711(o.unreachables!); + checkUnnamed720(o.unreachables!); checkStoragePoolAggregatedListWarning(o.warning!); } buildCounterStoragePoolAggregatedList--; } -core.List buildUnnamed713() => [ +core.List buildUnnamed722() => [ 'foo', 'foo', ]; -void checkUnnamed713(core.List o) { +void checkUnnamed722(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -40957,12 +41558,12 @@ void checkUnnamed713(core.List o) { ); } -core.List buildUnnamed714() => [ +core.List buildUnnamed723() => [ 'foo', 'foo', ]; -void checkUnnamed714(core.List o) { +void checkUnnamed723(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -40979,13 +41580,13 @@ api.StoragePoolDisk buildStoragePoolDisk() { final o = api.StoragePoolDisk(); buildCounterStoragePoolDisk++; if (buildCounterStoragePoolDisk < 3) { - o.attachedInstances = buildUnnamed713(); + o.attachedInstances = buildUnnamed722(); o.creationTimestamp = 'foo'; o.disk = 'foo'; o.name = 'foo'; o.provisionedIops = 'foo'; o.provisionedThroughput = 'foo'; - o.resourcePolicies = buildUnnamed714(); + o.resourcePolicies = buildUnnamed723(); o.sizeGb = 'foo'; o.status = 'foo'; o.type = 'foo'; @@ -40998,7 +41599,7 @@ api.StoragePoolDisk buildStoragePoolDisk() { void checkStoragePoolDisk(api.StoragePoolDisk o) { buildCounterStoragePoolDisk++; if (buildCounterStoragePoolDisk < 3) { - checkUnnamed713(o.attachedInstances!); + checkUnnamed722(o.attachedInstances!); unittest.expect( o.creationTimestamp!, unittest.equals('foo'), @@ -41019,7 +41620,7 @@ void checkStoragePoolDisk(api.StoragePoolDisk o) { o.provisionedThroughput!, unittest.equals('foo'), ); - checkUnnamed714(o.resourcePolicies!); + checkUnnamed723(o.resourcePolicies!); unittest.expect( o.sizeGb!, unittest.equals('foo'), @@ -41040,23 +41641,23 @@ void checkStoragePoolDisk(api.StoragePoolDisk o) { buildCounterStoragePoolDisk--; } -core.List buildUnnamed715() => [ +core.List buildUnnamed724() => [ buildStoragePool(), buildStoragePool(), ]; -void checkUnnamed715(core.List o) { +void checkUnnamed724(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStoragePool(o[0]); checkStoragePool(o[1]); } -core.List buildUnnamed716() => [ +core.List buildUnnamed725() => [ 'foo', 'foo', ]; -void checkUnnamed716(core.List o) { +void checkUnnamed725(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -41095,12 +41696,12 @@ void checkStoragePoolListWarningData(api.StoragePoolListWarningData o) { buildCounterStoragePoolListWarningData--; } -core.List buildUnnamed717() => [ +core.List buildUnnamed726() => [ buildStoragePoolListWarningData(), buildStoragePoolListWarningData(), ]; -void checkUnnamed717(core.List o) { +void checkUnnamed726(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStoragePoolListWarningData(o[0]); checkStoragePoolListWarningData(o[1]); @@ -41112,7 +41713,7 @@ api.StoragePoolListWarning buildStoragePoolListWarning() { buildCounterStoragePoolListWarning++; if (buildCounterStoragePoolListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed717(); + o.data = buildUnnamed726(); o.message = 'foo'; } buildCounterStoragePoolListWarning--; @@ -41126,7 +41727,7 @@ void checkStoragePoolListWarning(api.StoragePoolListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed717(o.data!); + checkUnnamed726(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -41142,11 +41743,11 @@ api.StoragePoolList buildStoragePoolList() { if (buildCounterStoragePoolList < 3) { o.etag = 'foo'; o.id = 'foo'; - o.items = buildUnnamed715(); + o.items = buildUnnamed724(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed716(); + o.unreachables = buildUnnamed725(); o.warning = buildStoragePoolListWarning(); } buildCounterStoragePoolList--; @@ -41164,7 +41765,7 @@ void checkStoragePoolList(api.StoragePoolList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed715(o.items!); + checkUnnamed724(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -41177,29 +41778,29 @@ void checkStoragePoolList(api.StoragePoolList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed716(o.unreachables!); + checkUnnamed725(o.unreachables!); checkStoragePoolListWarning(o.warning!); } buildCounterStoragePoolList--; } -core.List buildUnnamed718() => [ +core.List buildUnnamed727() => [ buildStoragePoolDisk(), buildStoragePoolDisk(), ]; -void checkUnnamed718(core.List o) { +void checkUnnamed727(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStoragePoolDisk(o[0]); checkStoragePoolDisk(o[1]); } -core.List buildUnnamed719() => [ +core.List buildUnnamed728() => [ 'foo', 'foo', ]; -void checkUnnamed719(core.List o) { +void checkUnnamed728(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -41239,12 +41840,12 @@ void checkStoragePoolListDisksWarningData( buildCounterStoragePoolListDisksWarningData--; } -core.List buildUnnamed720() => [ +core.List buildUnnamed729() => [ buildStoragePoolListDisksWarningData(), buildStoragePoolListDisksWarningData(), ]; -void checkUnnamed720(core.List o) { +void checkUnnamed729(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStoragePoolListDisksWarningData(o[0]); checkStoragePoolListDisksWarningData(o[1]); @@ -41256,7 +41857,7 @@ api.StoragePoolListDisksWarning buildStoragePoolListDisksWarning() { buildCounterStoragePoolListDisksWarning++; if (buildCounterStoragePoolListDisksWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed720(); + o.data = buildUnnamed729(); o.message = 'foo'; } buildCounterStoragePoolListDisksWarning--; @@ -41270,7 +41871,7 @@ void checkStoragePoolListDisksWarning(api.StoragePoolListDisksWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed720(o.data!); + checkUnnamed729(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -41286,11 +41887,11 @@ api.StoragePoolListDisks buildStoragePoolListDisks() { if (buildCounterStoragePoolListDisks < 3) { o.etag = 'foo'; o.id = 'foo'; - o.items = buildUnnamed718(); + o.items = buildUnnamed727(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed719(); + o.unreachables = buildUnnamed728(); o.warning = buildStoragePoolListDisksWarning(); } buildCounterStoragePoolListDisks--; @@ -41308,7 +41909,7 @@ void checkStoragePoolListDisks(api.StoragePoolListDisks o) { o.id!, unittest.equals('foo'), ); - checkUnnamed718(o.items!); + checkUnnamed727(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -41321,7 +41922,7 @@ void checkStoragePoolListDisks(api.StoragePoolListDisks o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed719(o.unreachables!); + checkUnnamed728(o.unreachables!); checkStoragePoolListDisksWarning(o.warning!); } buildCounterStoragePoolListDisks--; @@ -41394,12 +41995,12 @@ void checkStoragePoolResourceStatus(api.StoragePoolResourceStatus o) { buildCounterStoragePoolResourceStatus--; } -core.List buildUnnamed721() => [ +core.List buildUnnamed730() => [ 'foo', 'foo', ]; -void checkUnnamed721(core.List o) { +void checkUnnamed730(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -41431,7 +42032,7 @@ api.StoragePoolType buildStoragePoolType() { o.name = 'foo'; o.selfLink = 'foo'; o.selfLinkWithId = 'foo'; - o.supportedDiskTypes = buildUnnamed721(); + o.supportedDiskTypes = buildUnnamed730(); o.zone = 'foo'; } buildCounterStoragePoolType--; @@ -41498,7 +42099,7 @@ void checkStoragePoolType(api.StoragePoolType o) { o.selfLinkWithId!, unittest.equals('foo'), ); - checkUnnamed721(o.supportedDiskTypes!); + checkUnnamed730(o.supportedDiskTypes!); unittest.expect( o.zone!, unittest.equals('foo'), @@ -41507,12 +42108,12 @@ void checkStoragePoolType(api.StoragePoolType o) { buildCounterStoragePoolType--; } -core.Map buildUnnamed722() => { +core.Map buildUnnamed731() => { 'x': buildStoragePoolTypesScopedList(), 'y': buildStoragePoolTypesScopedList(), }; -void checkUnnamed722(core.Map o) { +void checkUnnamed731(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkStoragePoolTypesScopedList(o['x']!); checkStoragePoolTypesScopedList(o['y']!); @@ -41547,12 +42148,12 @@ void checkStoragePoolTypeAggregatedListWarningData( buildCounterStoragePoolTypeAggregatedListWarningData--; } -core.List buildUnnamed723() => [ +core.List buildUnnamed732() => [ buildStoragePoolTypeAggregatedListWarningData(), buildStoragePoolTypeAggregatedListWarningData(), ]; -void checkUnnamed723( +void checkUnnamed732( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStoragePoolTypeAggregatedListWarningData(o[0]); @@ -41566,7 +42167,7 @@ api.StoragePoolTypeAggregatedListWarning buildCounterStoragePoolTypeAggregatedListWarning++; if (buildCounterStoragePoolTypeAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed723(); + o.data = buildUnnamed732(); o.message = 'foo'; } buildCounterStoragePoolTypeAggregatedListWarning--; @@ -41581,7 +42182,7 @@ void checkStoragePoolTypeAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed723(o.data!); + checkUnnamed732(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -41596,7 +42197,7 @@ api.StoragePoolTypeAggregatedList buildStoragePoolTypeAggregatedList() { buildCounterStoragePoolTypeAggregatedList++; if (buildCounterStoragePoolTypeAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed722(); + o.items = buildUnnamed731(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -41613,7 +42214,7 @@ void checkStoragePoolTypeAggregatedList(api.StoragePoolTypeAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed722(o.items!); + checkUnnamed731(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -41631,12 +42232,12 @@ void checkStoragePoolTypeAggregatedList(api.StoragePoolTypeAggregatedList o) { buildCounterStoragePoolTypeAggregatedList--; } -core.List buildUnnamed724() => [ +core.List buildUnnamed733() => [ buildStoragePoolType(), buildStoragePoolType(), ]; -void checkUnnamed724(core.List o) { +void checkUnnamed733(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStoragePoolType(o[0]); checkStoragePoolType(o[1]); @@ -41669,12 +42270,12 @@ void checkStoragePoolTypeListWarningData(api.StoragePoolTypeListWarningData o) { buildCounterStoragePoolTypeListWarningData--; } -core.List buildUnnamed725() => [ +core.List buildUnnamed734() => [ buildStoragePoolTypeListWarningData(), buildStoragePoolTypeListWarningData(), ]; -void checkUnnamed725(core.List o) { +void checkUnnamed734(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStoragePoolTypeListWarningData(o[0]); checkStoragePoolTypeListWarningData(o[1]); @@ -41686,7 +42287,7 @@ api.StoragePoolTypeListWarning buildStoragePoolTypeListWarning() { buildCounterStoragePoolTypeListWarning++; if (buildCounterStoragePoolTypeListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed725(); + o.data = buildUnnamed734(); o.message = 'foo'; } buildCounterStoragePoolTypeListWarning--; @@ -41700,7 +42301,7 @@ void checkStoragePoolTypeListWarning(api.StoragePoolTypeListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed725(o.data!); + checkUnnamed734(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -41715,7 +42316,7 @@ api.StoragePoolTypeList buildStoragePoolTypeList() { buildCounterStoragePoolTypeList++; if (buildCounterStoragePoolTypeList < 3) { o.id = 'foo'; - o.items = buildUnnamed724(); + o.items = buildUnnamed733(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -41732,7 +42333,7 @@ void checkStoragePoolTypeList(api.StoragePoolTypeList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed724(o.items!); + checkUnnamed733(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -41750,12 +42351,12 @@ void checkStoragePoolTypeList(api.StoragePoolTypeList o) { buildCounterStoragePoolTypeList--; } -core.List buildUnnamed726() => [ +core.List buildUnnamed735() => [ buildStoragePoolType(), buildStoragePoolType(), ]; -void checkUnnamed726(core.List o) { +void checkUnnamed735(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStoragePoolType(o[0]); checkStoragePoolType(o[1]); @@ -41790,12 +42391,12 @@ void checkStoragePoolTypesScopedListWarningData( buildCounterStoragePoolTypesScopedListWarningData--; } -core.List buildUnnamed727() => [ +core.List buildUnnamed736() => [ buildStoragePoolTypesScopedListWarningData(), buildStoragePoolTypesScopedListWarningData(), ]; -void checkUnnamed727(core.List o) { +void checkUnnamed736(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStoragePoolTypesScopedListWarningData(o[0]); checkStoragePoolTypesScopedListWarningData(o[1]); @@ -41807,7 +42408,7 @@ api.StoragePoolTypesScopedListWarning buildStoragePoolTypesScopedListWarning() { buildCounterStoragePoolTypesScopedListWarning++; if (buildCounterStoragePoolTypesScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed727(); + o.data = buildUnnamed736(); o.message = 'foo'; } buildCounterStoragePoolTypesScopedListWarning--; @@ -41822,7 +42423,7 @@ void checkStoragePoolTypesScopedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed727(o.data!); + checkUnnamed736(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -41836,7 +42437,7 @@ api.StoragePoolTypesScopedList buildStoragePoolTypesScopedList() { final o = api.StoragePoolTypesScopedList(); buildCounterStoragePoolTypesScopedList++; if (buildCounterStoragePoolTypesScopedList < 3) { - o.storagePoolTypes = buildUnnamed726(); + o.storagePoolTypes = buildUnnamed735(); o.warning = buildStoragePoolTypesScopedListWarning(); } buildCounterStoragePoolTypesScopedList--; @@ -41846,18 +42447,18 @@ api.StoragePoolTypesScopedList buildStoragePoolTypesScopedList() { void checkStoragePoolTypesScopedList(api.StoragePoolTypesScopedList o) { buildCounterStoragePoolTypesScopedList++; if (buildCounterStoragePoolTypesScopedList < 3) { - checkUnnamed726(o.storagePoolTypes!); + checkUnnamed735(o.storagePoolTypes!); checkStoragePoolTypesScopedListWarning(o.warning!); } buildCounterStoragePoolTypesScopedList--; } -core.List buildUnnamed728() => [ +core.List buildUnnamed737() => [ buildStoragePool(), buildStoragePool(), ]; -void checkUnnamed728(core.List o) { +void checkUnnamed737(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStoragePool(o[0]); checkStoragePool(o[1]); @@ -41891,12 +42492,12 @@ void checkStoragePoolsScopedListWarningData( buildCounterStoragePoolsScopedListWarningData--; } -core.List buildUnnamed729() => [ +core.List buildUnnamed738() => [ buildStoragePoolsScopedListWarningData(), buildStoragePoolsScopedListWarningData(), ]; -void checkUnnamed729(core.List o) { +void checkUnnamed738(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStoragePoolsScopedListWarningData(o[0]); checkStoragePoolsScopedListWarningData(o[1]); @@ -41908,7 +42509,7 @@ api.StoragePoolsScopedListWarning buildStoragePoolsScopedListWarning() { buildCounterStoragePoolsScopedListWarning++; if (buildCounterStoragePoolsScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed729(); + o.data = buildUnnamed738(); o.message = 'foo'; } buildCounterStoragePoolsScopedListWarning--; @@ -41922,7 +42523,7 @@ void checkStoragePoolsScopedListWarning(api.StoragePoolsScopedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed729(o.data!); + checkUnnamed738(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -41936,7 +42537,7 @@ api.StoragePoolsScopedList buildStoragePoolsScopedList() { final o = api.StoragePoolsScopedList(); buildCounterStoragePoolsScopedList++; if (buildCounterStoragePoolsScopedList < 3) { - o.storagePools = buildUnnamed728(); + o.storagePools = buildUnnamed737(); o.warning = buildStoragePoolsScopedListWarning(); } buildCounterStoragePoolsScopedList--; @@ -41946,18 +42547,18 @@ api.StoragePoolsScopedList buildStoragePoolsScopedList() { void checkStoragePoolsScopedList(api.StoragePoolsScopedList o) { buildCounterStoragePoolsScopedList++; if (buildCounterStoragePoolsScopedList < 3) { - checkUnnamed728(o.storagePools!); + checkUnnamed737(o.storagePools!); checkStoragePoolsScopedListWarning(o.warning!); } buildCounterStoragePoolsScopedList--; } -core.List buildUnnamed730() => [ +core.List buildUnnamed739() => [ buildSubnetworkSecondaryRange(), buildSubnetworkSecondaryRange(), ]; -void checkUnnamed730(core.List o) { +void checkUnnamed739(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSubnetworkSecondaryRange(o[0]); checkSubnetworkSecondaryRange(o[1]); @@ -41989,7 +42590,7 @@ api.Subnetwork buildSubnetwork() { o.region = 'foo'; o.reservedInternalRange = 'foo'; o.role = 'foo'; - o.secondaryIpRanges = buildUnnamed730(); + o.secondaryIpRanges = buildUnnamed739(); o.selfLink = 'foo'; o.stackType = 'foo'; o.state = 'foo'; @@ -42076,7 +42677,7 @@ void checkSubnetwork(api.Subnetwork o) { o.role!, unittest.equals('foo'), ); - checkUnnamed730(o.secondaryIpRanges!); + checkUnnamed739(o.secondaryIpRanges!); unittest.expect( o.selfLink!, unittest.equals('foo'), @@ -42093,23 +42694,23 @@ void checkSubnetwork(api.Subnetwork o) { buildCounterSubnetwork--; } -core.Map buildUnnamed731() => { +core.Map buildUnnamed740() => { 'x': buildSubnetworksScopedList(), 'y': buildSubnetworksScopedList(), }; -void checkUnnamed731(core.Map o) { +void checkUnnamed740(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkSubnetworksScopedList(o['x']!); checkSubnetworksScopedList(o['y']!); } -core.List buildUnnamed732() => [ +core.List buildUnnamed741() => [ 'foo', 'foo', ]; -void checkUnnamed732(core.List o) { +void checkUnnamed741(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -42150,12 +42751,12 @@ void checkSubnetworkAggregatedListWarningData( buildCounterSubnetworkAggregatedListWarningData--; } -core.List buildUnnamed733() => [ +core.List buildUnnamed742() => [ buildSubnetworkAggregatedListWarningData(), buildSubnetworkAggregatedListWarningData(), ]; -void checkUnnamed733(core.List o) { +void checkUnnamed742(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSubnetworkAggregatedListWarningData(o[0]); checkSubnetworkAggregatedListWarningData(o[1]); @@ -42167,7 +42768,7 @@ api.SubnetworkAggregatedListWarning buildSubnetworkAggregatedListWarning() { buildCounterSubnetworkAggregatedListWarning++; if (buildCounterSubnetworkAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed733(); + o.data = buildUnnamed742(); o.message = 'foo'; } buildCounterSubnetworkAggregatedListWarning--; @@ -42182,7 +42783,7 @@ void checkSubnetworkAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed733(o.data!); + checkUnnamed742(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -42197,11 +42798,11 @@ api.SubnetworkAggregatedList buildSubnetworkAggregatedList() { buildCounterSubnetworkAggregatedList++; if (buildCounterSubnetworkAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed731(); + o.items = buildUnnamed740(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed732(); + o.unreachables = buildUnnamed741(); o.warning = buildSubnetworkAggregatedListWarning(); } buildCounterSubnetworkAggregatedList--; @@ -42215,7 +42816,7 @@ void checkSubnetworkAggregatedList(api.SubnetworkAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed731(o.items!); + checkUnnamed740(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -42228,18 +42829,18 @@ void checkSubnetworkAggregatedList(api.SubnetworkAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed732(o.unreachables!); + checkUnnamed741(o.unreachables!); checkSubnetworkAggregatedListWarning(o.warning!); } buildCounterSubnetworkAggregatedList--; } -core.List buildUnnamed734() => [ +core.List buildUnnamed743() => [ buildSubnetwork(), buildSubnetwork(), ]; -void checkUnnamed734(core.List o) { +void checkUnnamed743(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSubnetwork(o[0]); checkSubnetwork(o[1]); @@ -42272,12 +42873,12 @@ void checkSubnetworkListWarningData(api.SubnetworkListWarningData o) { buildCounterSubnetworkListWarningData--; } -core.List buildUnnamed735() => [ +core.List buildUnnamed744() => [ buildSubnetworkListWarningData(), buildSubnetworkListWarningData(), ]; -void checkUnnamed735(core.List o) { +void checkUnnamed744(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSubnetworkListWarningData(o[0]); checkSubnetworkListWarningData(o[1]); @@ -42289,7 +42890,7 @@ api.SubnetworkListWarning buildSubnetworkListWarning() { buildCounterSubnetworkListWarning++; if (buildCounterSubnetworkListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed735(); + o.data = buildUnnamed744(); o.message = 'foo'; } buildCounterSubnetworkListWarning--; @@ -42303,7 +42904,7 @@ void checkSubnetworkListWarning(api.SubnetworkListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed735(o.data!); + checkUnnamed744(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -42318,7 +42919,7 @@ api.SubnetworkList buildSubnetworkList() { buildCounterSubnetworkList++; if (buildCounterSubnetworkList < 3) { o.id = 'foo'; - o.items = buildUnnamed734(); + o.items = buildUnnamed743(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -42335,7 +42936,7 @@ void checkSubnetworkList(api.SubnetworkList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed734(o.items!); + checkUnnamed743(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -42353,12 +42954,12 @@ void checkSubnetworkList(api.SubnetworkList o) { buildCounterSubnetworkList--; } -core.List buildUnnamed736() => [ +core.List buildUnnamed745() => [ 'foo', 'foo', ]; -void checkUnnamed736(core.List o) { +void checkUnnamed745(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -42380,7 +42981,7 @@ api.SubnetworkLogConfig buildSubnetworkLogConfig() { o.filterExpr = 'foo'; o.flowSampling = 42.0; o.metadata = 'foo'; - o.metadataFields = buildUnnamed736(); + o.metadataFields = buildUnnamed745(); } buildCounterSubnetworkLogConfig--; return o; @@ -42406,7 +43007,7 @@ void checkSubnetworkLogConfig(api.SubnetworkLogConfig o) { o.metadata!, unittest.equals('foo'), ); - checkUnnamed736(o.metadataFields!); + checkUnnamed745(o.metadataFields!); } buildCounterSubnetworkLogConfig--; } @@ -42467,12 +43068,12 @@ void checkSubnetworksExpandIpCidrRangeRequest( buildCounterSubnetworksExpandIpCidrRangeRequest--; } -core.List buildUnnamed737() => [ +core.List buildUnnamed746() => [ buildSubnetwork(), buildSubnetwork(), ]; -void checkUnnamed737(core.List o) { +void checkUnnamed746(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSubnetwork(o[0]); checkSubnetwork(o[1]); @@ -42506,12 +43107,12 @@ void checkSubnetworksScopedListWarningData( buildCounterSubnetworksScopedListWarningData--; } -core.List buildUnnamed738() => [ +core.List buildUnnamed747() => [ buildSubnetworksScopedListWarningData(), buildSubnetworksScopedListWarningData(), ]; -void checkUnnamed738(core.List o) { +void checkUnnamed747(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSubnetworksScopedListWarningData(o[0]); checkSubnetworksScopedListWarningData(o[1]); @@ -42523,7 +43124,7 @@ api.SubnetworksScopedListWarning buildSubnetworksScopedListWarning() { buildCounterSubnetworksScopedListWarning++; if (buildCounterSubnetworksScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed738(); + o.data = buildUnnamed747(); o.message = 'foo'; } buildCounterSubnetworksScopedListWarning--; @@ -42537,7 +43138,7 @@ void checkSubnetworksScopedListWarning(api.SubnetworksScopedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed738(o.data!); + checkUnnamed747(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -42551,7 +43152,7 @@ api.SubnetworksScopedList buildSubnetworksScopedList() { final o = api.SubnetworksScopedList(); buildCounterSubnetworksScopedList++; if (buildCounterSubnetworksScopedList < 3) { - o.subnetworks = buildUnnamed737(); + o.subnetworks = buildUnnamed746(); o.warning = buildSubnetworksScopedListWarning(); } buildCounterSubnetworksScopedList--; @@ -42561,7 +43162,7 @@ api.SubnetworksScopedList buildSubnetworksScopedList() { void checkSubnetworksScopedList(api.SubnetworksScopedList o) { buildCounterSubnetworksScopedList++; if (buildCounterSubnetworksScopedList < 3) { - checkUnnamed737(o.subnetworks!); + checkUnnamed746(o.subnetworks!); checkSubnetworksScopedListWarning(o.warning!); } buildCounterSubnetworksScopedList--; @@ -42657,12 +43258,12 @@ void checkTCPHealthCheck(api.TCPHealthCheck o) { buildCounterTCPHealthCheck--; } -core.List buildUnnamed739() => [ +core.List buildUnnamed748() => [ 'foo', 'foo', ]; -void checkUnnamed739(core.List o) { +void checkUnnamed748(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -42680,7 +43281,7 @@ api.Tags buildTags() { buildCounterTags++; if (buildCounterTags < 3) { o.fingerprint = 'foo'; - o.items = buildUnnamed739(); + o.items = buildUnnamed748(); } buildCounterTags--; return o; @@ -42693,7 +43294,7 @@ void checkTags(api.Tags o) { o.fingerprint!, unittest.equals('foo'), ); - checkUnnamed739(o.items!); + checkUnnamed748(o.items!); } buildCounterTags--; } @@ -42762,12 +43363,12 @@ void checkTargetGrpcProxy(api.TargetGrpcProxy o) { buildCounterTargetGrpcProxy--; } -core.List buildUnnamed740() => [ +core.List buildUnnamed749() => [ buildTargetGrpcProxy(), buildTargetGrpcProxy(), ]; -void checkUnnamed740(core.List o) { +void checkUnnamed749(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetGrpcProxy(o[0]); checkTargetGrpcProxy(o[1]); @@ -42800,12 +43401,12 @@ void checkTargetGrpcProxyListWarningData(api.TargetGrpcProxyListWarningData o) { buildCounterTargetGrpcProxyListWarningData--; } -core.List buildUnnamed741() => [ +core.List buildUnnamed750() => [ buildTargetGrpcProxyListWarningData(), buildTargetGrpcProxyListWarningData(), ]; -void checkUnnamed741(core.List o) { +void checkUnnamed750(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetGrpcProxyListWarningData(o[0]); checkTargetGrpcProxyListWarningData(o[1]); @@ -42817,7 +43418,7 @@ api.TargetGrpcProxyListWarning buildTargetGrpcProxyListWarning() { buildCounterTargetGrpcProxyListWarning++; if (buildCounterTargetGrpcProxyListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed741(); + o.data = buildUnnamed750(); o.message = 'foo'; } buildCounterTargetGrpcProxyListWarning--; @@ -42831,7 +43432,7 @@ void checkTargetGrpcProxyListWarning(api.TargetGrpcProxyListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed741(o.data!); + checkUnnamed750(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -42846,7 +43447,7 @@ api.TargetGrpcProxyList buildTargetGrpcProxyList() { buildCounterTargetGrpcProxyList++; if (buildCounterTargetGrpcProxyList < 3) { o.id = 'foo'; - o.items = buildUnnamed740(); + o.items = buildUnnamed749(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -42863,7 +43464,7 @@ void checkTargetGrpcProxyList(api.TargetGrpcProxyList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed740(o.items!); + checkUnnamed749(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -42881,12 +43482,12 @@ void checkTargetGrpcProxyList(api.TargetGrpcProxyList o) { buildCounterTargetGrpcProxyList--; } -core.List buildUnnamed742() => [ +core.List buildUnnamed751() => [ buildTargetHttpProxy(), buildTargetHttpProxy(), ]; -void checkUnnamed742(core.List o) { +void checkUnnamed751(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetHttpProxy(o[0]); checkTargetHttpProxy(o[1]); @@ -42921,12 +43522,12 @@ void checkTargetHttpProxiesScopedListWarningData( buildCounterTargetHttpProxiesScopedListWarningData--; } -core.List buildUnnamed743() => [ +core.List buildUnnamed752() => [ buildTargetHttpProxiesScopedListWarningData(), buildTargetHttpProxiesScopedListWarningData(), ]; -void checkUnnamed743(core.List o) { +void checkUnnamed752(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetHttpProxiesScopedListWarningData(o[0]); checkTargetHttpProxiesScopedListWarningData(o[1]); @@ -42939,7 +43540,7 @@ api.TargetHttpProxiesScopedListWarning buildCounterTargetHttpProxiesScopedListWarning++; if (buildCounterTargetHttpProxiesScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed743(); + o.data = buildUnnamed752(); o.message = 'foo'; } buildCounterTargetHttpProxiesScopedListWarning--; @@ -42954,7 +43555,7 @@ void checkTargetHttpProxiesScopedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed743(o.data!); + checkUnnamed752(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -42968,7 +43569,7 @@ api.TargetHttpProxiesScopedList buildTargetHttpProxiesScopedList() { final o = api.TargetHttpProxiesScopedList(); buildCounterTargetHttpProxiesScopedList++; if (buildCounterTargetHttpProxiesScopedList < 3) { - o.targetHttpProxies = buildUnnamed742(); + o.targetHttpProxies = buildUnnamed751(); o.warning = buildTargetHttpProxiesScopedListWarning(); } buildCounterTargetHttpProxiesScopedList--; @@ -42978,7 +43579,7 @@ api.TargetHttpProxiesScopedList buildTargetHttpProxiesScopedList() { void checkTargetHttpProxiesScopedList(api.TargetHttpProxiesScopedList o) { buildCounterTargetHttpProxiesScopedList++; if (buildCounterTargetHttpProxiesScopedList < 3) { - checkUnnamed742(o.targetHttpProxies!); + checkUnnamed751(o.targetHttpProxies!); checkTargetHttpProxiesScopedListWarning(o.warning!); } buildCounterTargetHttpProxiesScopedList--; @@ -43053,23 +43654,23 @@ void checkTargetHttpProxy(api.TargetHttpProxy o) { buildCounterTargetHttpProxy--; } -core.Map buildUnnamed744() => { +core.Map buildUnnamed753() => { 'x': buildTargetHttpProxiesScopedList(), 'y': buildTargetHttpProxiesScopedList(), }; -void checkUnnamed744(core.Map o) { +void checkUnnamed753(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkTargetHttpProxiesScopedList(o['x']!); checkTargetHttpProxiesScopedList(o['y']!); } -core.List buildUnnamed745() => [ +core.List buildUnnamed754() => [ 'foo', 'foo', ]; -void checkUnnamed745(core.List o) { +void checkUnnamed754(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -43087,11 +43688,11 @@ api.TargetHttpProxyAggregatedList buildTargetHttpProxyAggregatedList() { buildCounterTargetHttpProxyAggregatedList++; if (buildCounterTargetHttpProxyAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed744(); + o.items = buildUnnamed753(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed745(); + o.unreachables = buildUnnamed754(); } buildCounterTargetHttpProxyAggregatedList--; return o; @@ -43104,7 +43705,7 @@ void checkTargetHttpProxyAggregatedList(api.TargetHttpProxyAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed744(o.items!); + checkUnnamed753(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -43117,17 +43718,17 @@ void checkTargetHttpProxyAggregatedList(api.TargetHttpProxyAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed745(o.unreachables!); + checkUnnamed754(o.unreachables!); } buildCounterTargetHttpProxyAggregatedList--; } -core.List buildUnnamed746() => [ +core.List buildUnnamed755() => [ buildTargetHttpProxy(), buildTargetHttpProxy(), ]; -void checkUnnamed746(core.List o) { +void checkUnnamed755(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetHttpProxy(o[0]); checkTargetHttpProxy(o[1]); @@ -43160,12 +43761,12 @@ void checkTargetHttpProxyListWarningData(api.TargetHttpProxyListWarningData o) { buildCounterTargetHttpProxyListWarningData--; } -core.List buildUnnamed747() => [ +core.List buildUnnamed756() => [ buildTargetHttpProxyListWarningData(), buildTargetHttpProxyListWarningData(), ]; -void checkUnnamed747(core.List o) { +void checkUnnamed756(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetHttpProxyListWarningData(o[0]); checkTargetHttpProxyListWarningData(o[1]); @@ -43177,7 +43778,7 @@ api.TargetHttpProxyListWarning buildTargetHttpProxyListWarning() { buildCounterTargetHttpProxyListWarning++; if (buildCounterTargetHttpProxyListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed747(); + o.data = buildUnnamed756(); o.message = 'foo'; } buildCounterTargetHttpProxyListWarning--; @@ -43191,7 +43792,7 @@ void checkTargetHttpProxyListWarning(api.TargetHttpProxyListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed747(o.data!); + checkUnnamed756(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -43206,7 +43807,7 @@ api.TargetHttpProxyList buildTargetHttpProxyList() { buildCounterTargetHttpProxyList++; if (buildCounterTargetHttpProxyList < 3) { o.id = 'foo'; - o.items = buildUnnamed746(); + o.items = buildUnnamed755(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -43223,7 +43824,7 @@ void checkTargetHttpProxyList(api.TargetHttpProxyList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed746(o.items!); + checkUnnamed755(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -43241,12 +43842,12 @@ void checkTargetHttpProxyList(api.TargetHttpProxyList o) { buildCounterTargetHttpProxyList--; } -core.List buildUnnamed748() => [ +core.List buildUnnamed757() => [ buildTargetHttpsProxy(), buildTargetHttpsProxy(), ]; -void checkUnnamed748(core.List o) { +void checkUnnamed757(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetHttpsProxy(o[0]); checkTargetHttpsProxy(o[1]); @@ -43281,12 +43882,12 @@ void checkTargetHttpsProxiesScopedListWarningData( buildCounterTargetHttpsProxiesScopedListWarningData--; } -core.List buildUnnamed749() => [ +core.List buildUnnamed758() => [ buildTargetHttpsProxiesScopedListWarningData(), buildTargetHttpsProxiesScopedListWarningData(), ]; -void checkUnnamed749(core.List o) { +void checkUnnamed758(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetHttpsProxiesScopedListWarningData(o[0]); checkTargetHttpsProxiesScopedListWarningData(o[1]); @@ -43299,7 +43900,7 @@ api.TargetHttpsProxiesScopedListWarning buildCounterTargetHttpsProxiesScopedListWarning++; if (buildCounterTargetHttpsProxiesScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed749(); + o.data = buildUnnamed758(); o.message = 'foo'; } buildCounterTargetHttpsProxiesScopedListWarning--; @@ -43314,7 +43915,7 @@ void checkTargetHttpsProxiesScopedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed749(o.data!); + checkUnnamed758(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -43328,7 +43929,7 @@ api.TargetHttpsProxiesScopedList buildTargetHttpsProxiesScopedList() { final o = api.TargetHttpsProxiesScopedList(); buildCounterTargetHttpsProxiesScopedList++; if (buildCounterTargetHttpsProxiesScopedList < 3) { - o.targetHttpsProxies = buildUnnamed748(); + o.targetHttpsProxies = buildUnnamed757(); o.warning = buildTargetHttpsProxiesScopedListWarning(); } buildCounterTargetHttpsProxiesScopedList--; @@ -43338,7 +43939,7 @@ api.TargetHttpsProxiesScopedList buildTargetHttpsProxiesScopedList() { void checkTargetHttpsProxiesScopedList(api.TargetHttpsProxiesScopedList o) { buildCounterTargetHttpsProxiesScopedList++; if (buildCounterTargetHttpsProxiesScopedList < 3) { - checkUnnamed748(o.targetHttpsProxies!); + checkUnnamed757(o.targetHttpsProxies!); checkTargetHttpsProxiesScopedListWarning(o.warning!); } buildCounterTargetHttpsProxiesScopedList--; @@ -43392,12 +43993,12 @@ void checkTargetHttpsProxiesSetQuicOverrideRequest( buildCounterTargetHttpsProxiesSetQuicOverrideRequest--; } -core.List buildUnnamed750() => [ +core.List buildUnnamed759() => [ 'foo', 'foo', ]; -void checkUnnamed750(core.List o) { +void checkUnnamed759(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -43415,7 +44016,7 @@ api.TargetHttpsProxiesSetSslCertificatesRequest final o = api.TargetHttpsProxiesSetSslCertificatesRequest(); buildCounterTargetHttpsProxiesSetSslCertificatesRequest++; if (buildCounterTargetHttpsProxiesSetSslCertificatesRequest < 3) { - o.sslCertificates = buildUnnamed750(); + o.sslCertificates = buildUnnamed759(); } buildCounterTargetHttpsProxiesSetSslCertificatesRequest--; return o; @@ -43425,17 +44026,17 @@ void checkTargetHttpsProxiesSetSslCertificatesRequest( api.TargetHttpsProxiesSetSslCertificatesRequest o) { buildCounterTargetHttpsProxiesSetSslCertificatesRequest++; if (buildCounterTargetHttpsProxiesSetSslCertificatesRequest < 3) { - checkUnnamed750(o.sslCertificates!); + checkUnnamed759(o.sslCertificates!); } buildCounterTargetHttpsProxiesSetSslCertificatesRequest--; } -core.List buildUnnamed751() => [ +core.List buildUnnamed760() => [ 'foo', 'foo', ]; -void checkUnnamed751(core.List o) { +void checkUnnamed760(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -43466,7 +44067,7 @@ api.TargetHttpsProxy buildTargetHttpsProxy() { o.region = 'foo'; o.selfLink = 'foo'; o.serverTlsPolicy = 'foo'; - o.sslCertificates = buildUnnamed751(); + o.sslCertificates = buildUnnamed760(); o.sslPolicy = 'foo'; o.tlsEarlyData = 'foo'; o.urlMap = 'foo'; @@ -43531,7 +44132,7 @@ void checkTargetHttpsProxy(api.TargetHttpsProxy o) { o.serverTlsPolicy!, unittest.equals('foo'), ); - checkUnnamed751(o.sslCertificates!); + checkUnnamed760(o.sslCertificates!); unittest.expect( o.sslPolicy!, unittest.equals('foo'), @@ -43548,24 +44149,24 @@ void checkTargetHttpsProxy(api.TargetHttpsProxy o) { buildCounterTargetHttpsProxy--; } -core.Map buildUnnamed752() => { +core.Map buildUnnamed761() => { 'x': buildTargetHttpsProxiesScopedList(), 'y': buildTargetHttpsProxiesScopedList(), }; -void checkUnnamed752( +void checkUnnamed761( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkTargetHttpsProxiesScopedList(o['x']!); checkTargetHttpsProxiesScopedList(o['y']!); } -core.List buildUnnamed753() => [ +core.List buildUnnamed762() => [ 'foo', 'foo', ]; -void checkUnnamed753(core.List o) { +void checkUnnamed762(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -43606,12 +44207,12 @@ void checkTargetHttpsProxyAggregatedListWarningData( buildCounterTargetHttpsProxyAggregatedListWarningData--; } -core.List buildUnnamed754() => [ +core.List buildUnnamed763() => [ buildTargetHttpsProxyAggregatedListWarningData(), buildTargetHttpsProxyAggregatedListWarningData(), ]; -void checkUnnamed754( +void checkUnnamed763( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetHttpsProxyAggregatedListWarningData(o[0]); @@ -43625,7 +44226,7 @@ api.TargetHttpsProxyAggregatedListWarning buildCounterTargetHttpsProxyAggregatedListWarning++; if (buildCounterTargetHttpsProxyAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed754(); + o.data = buildUnnamed763(); o.message = 'foo'; } buildCounterTargetHttpsProxyAggregatedListWarning--; @@ -43640,7 +44241,7 @@ void checkTargetHttpsProxyAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed754(o.data!); + checkUnnamed763(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -43655,11 +44256,11 @@ api.TargetHttpsProxyAggregatedList buildTargetHttpsProxyAggregatedList() { buildCounterTargetHttpsProxyAggregatedList++; if (buildCounterTargetHttpsProxyAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed752(); + o.items = buildUnnamed761(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed753(); + o.unreachables = buildUnnamed762(); o.warning = buildTargetHttpsProxyAggregatedListWarning(); } buildCounterTargetHttpsProxyAggregatedList--; @@ -43673,7 +44274,7 @@ void checkTargetHttpsProxyAggregatedList(api.TargetHttpsProxyAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed752(o.items!); + checkUnnamed761(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -43686,18 +44287,18 @@ void checkTargetHttpsProxyAggregatedList(api.TargetHttpsProxyAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed753(o.unreachables!); + checkUnnamed762(o.unreachables!); checkTargetHttpsProxyAggregatedListWarning(o.warning!); } buildCounterTargetHttpsProxyAggregatedList--; } -core.List buildUnnamed755() => [ +core.List buildUnnamed764() => [ buildTargetHttpsProxy(), buildTargetHttpsProxy(), ]; -void checkUnnamed755(core.List o) { +void checkUnnamed764(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetHttpsProxy(o[0]); checkTargetHttpsProxy(o[1]); @@ -43731,12 +44332,12 @@ void checkTargetHttpsProxyListWarningData( buildCounterTargetHttpsProxyListWarningData--; } -core.List buildUnnamed756() => [ +core.List buildUnnamed765() => [ buildTargetHttpsProxyListWarningData(), buildTargetHttpsProxyListWarningData(), ]; -void checkUnnamed756(core.List o) { +void checkUnnamed765(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetHttpsProxyListWarningData(o[0]); checkTargetHttpsProxyListWarningData(o[1]); @@ -43748,7 +44349,7 @@ api.TargetHttpsProxyListWarning buildTargetHttpsProxyListWarning() { buildCounterTargetHttpsProxyListWarning++; if (buildCounterTargetHttpsProxyListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed756(); + o.data = buildUnnamed765(); o.message = 'foo'; } buildCounterTargetHttpsProxyListWarning--; @@ -43762,7 +44363,7 @@ void checkTargetHttpsProxyListWarning(api.TargetHttpsProxyListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed756(o.data!); + checkUnnamed765(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -43777,7 +44378,7 @@ api.TargetHttpsProxyList buildTargetHttpsProxyList() { buildCounterTargetHttpsProxyList++; if (buildCounterTargetHttpsProxyList < 3) { o.id = 'foo'; - o.items = buildUnnamed755(); + o.items = buildUnnamed764(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -43794,7 +44395,7 @@ void checkTargetHttpsProxyList(api.TargetHttpsProxyList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed755(o.items!); + checkUnnamed764(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -43884,23 +44485,23 @@ void checkTargetInstance(api.TargetInstance o) { buildCounterTargetInstance--; } -core.Map buildUnnamed757() => { +core.Map buildUnnamed766() => { 'x': buildTargetInstancesScopedList(), 'y': buildTargetInstancesScopedList(), }; -void checkUnnamed757(core.Map o) { +void checkUnnamed766(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkTargetInstancesScopedList(o['x']!); checkTargetInstancesScopedList(o['y']!); } -core.List buildUnnamed758() => [ +core.List buildUnnamed767() => [ 'foo', 'foo', ]; -void checkUnnamed758(core.List o) { +void checkUnnamed767(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -43941,12 +44542,12 @@ void checkTargetInstanceAggregatedListWarningData( buildCounterTargetInstanceAggregatedListWarningData--; } -core.List buildUnnamed759() => [ +core.List buildUnnamed768() => [ buildTargetInstanceAggregatedListWarningData(), buildTargetInstanceAggregatedListWarningData(), ]; -void checkUnnamed759(core.List o) { +void checkUnnamed768(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetInstanceAggregatedListWarningData(o[0]); checkTargetInstanceAggregatedListWarningData(o[1]); @@ -43959,7 +44560,7 @@ api.TargetInstanceAggregatedListWarning buildCounterTargetInstanceAggregatedListWarning++; if (buildCounterTargetInstanceAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed759(); + o.data = buildUnnamed768(); o.message = 'foo'; } buildCounterTargetInstanceAggregatedListWarning--; @@ -43974,7 +44575,7 @@ void checkTargetInstanceAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed759(o.data!); + checkUnnamed768(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -43989,11 +44590,11 @@ api.TargetInstanceAggregatedList buildTargetInstanceAggregatedList() { buildCounterTargetInstanceAggregatedList++; if (buildCounterTargetInstanceAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed757(); + o.items = buildUnnamed766(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed758(); + o.unreachables = buildUnnamed767(); o.warning = buildTargetInstanceAggregatedListWarning(); } buildCounterTargetInstanceAggregatedList--; @@ -44007,7 +44608,7 @@ void checkTargetInstanceAggregatedList(api.TargetInstanceAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed757(o.items!); + checkUnnamed766(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -44020,18 +44621,18 @@ void checkTargetInstanceAggregatedList(api.TargetInstanceAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed758(o.unreachables!); + checkUnnamed767(o.unreachables!); checkTargetInstanceAggregatedListWarning(o.warning!); } buildCounterTargetInstanceAggregatedList--; } -core.List buildUnnamed760() => [ +core.List buildUnnamed769() => [ buildTargetInstance(), buildTargetInstance(), ]; -void checkUnnamed760(core.List o) { +void checkUnnamed769(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetInstance(o[0]); checkTargetInstance(o[1]); @@ -44064,12 +44665,12 @@ void checkTargetInstanceListWarningData(api.TargetInstanceListWarningData o) { buildCounterTargetInstanceListWarningData--; } -core.List buildUnnamed761() => [ +core.List buildUnnamed770() => [ buildTargetInstanceListWarningData(), buildTargetInstanceListWarningData(), ]; -void checkUnnamed761(core.List o) { +void checkUnnamed770(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetInstanceListWarningData(o[0]); checkTargetInstanceListWarningData(o[1]); @@ -44081,7 +44682,7 @@ api.TargetInstanceListWarning buildTargetInstanceListWarning() { buildCounterTargetInstanceListWarning++; if (buildCounterTargetInstanceListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed761(); + o.data = buildUnnamed770(); o.message = 'foo'; } buildCounterTargetInstanceListWarning--; @@ -44095,7 +44696,7 @@ void checkTargetInstanceListWarning(api.TargetInstanceListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed761(o.data!); + checkUnnamed770(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -44110,7 +44711,7 @@ api.TargetInstanceList buildTargetInstanceList() { buildCounterTargetInstanceList++; if (buildCounterTargetInstanceList < 3) { o.id = 'foo'; - o.items = buildUnnamed760(); + o.items = buildUnnamed769(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -44127,7 +44728,7 @@ void checkTargetInstanceList(api.TargetInstanceList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed760(o.items!); + checkUnnamed769(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -44145,12 +44746,12 @@ void checkTargetInstanceList(api.TargetInstanceList o) { buildCounterTargetInstanceList--; } -core.List buildUnnamed762() => [ +core.List buildUnnamed771() => [ buildTargetInstance(), buildTargetInstance(), ]; -void checkUnnamed762(core.List o) { +void checkUnnamed771(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetInstance(o[0]); checkTargetInstance(o[1]); @@ -44185,12 +44786,12 @@ void checkTargetInstancesScopedListWarningData( buildCounterTargetInstancesScopedListWarningData--; } -core.List buildUnnamed763() => [ +core.List buildUnnamed772() => [ buildTargetInstancesScopedListWarningData(), buildTargetInstancesScopedListWarningData(), ]; -void checkUnnamed763(core.List o) { +void checkUnnamed772(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetInstancesScopedListWarningData(o[0]); checkTargetInstancesScopedListWarningData(o[1]); @@ -44202,7 +44803,7 @@ api.TargetInstancesScopedListWarning buildTargetInstancesScopedListWarning() { buildCounterTargetInstancesScopedListWarning++; if (buildCounterTargetInstancesScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed763(); + o.data = buildUnnamed772(); o.message = 'foo'; } buildCounterTargetInstancesScopedListWarning--; @@ -44217,7 +44818,7 @@ void checkTargetInstancesScopedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed763(o.data!); + checkUnnamed772(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -44231,7 +44832,7 @@ api.TargetInstancesScopedList buildTargetInstancesScopedList() { final o = api.TargetInstancesScopedList(); buildCounterTargetInstancesScopedList++; if (buildCounterTargetInstancesScopedList < 3) { - o.targetInstances = buildUnnamed762(); + o.targetInstances = buildUnnamed771(); o.warning = buildTargetInstancesScopedListWarning(); } buildCounterTargetInstancesScopedList--; @@ -44241,18 +44842,18 @@ api.TargetInstancesScopedList buildTargetInstancesScopedList() { void checkTargetInstancesScopedList(api.TargetInstancesScopedList o) { buildCounterTargetInstancesScopedList++; if (buildCounterTargetInstancesScopedList < 3) { - checkUnnamed762(o.targetInstances!); + checkUnnamed771(o.targetInstances!); checkTargetInstancesScopedListWarning(o.warning!); } buildCounterTargetInstancesScopedList--; } -core.List buildUnnamed764() => [ +core.List buildUnnamed773() => [ 'foo', 'foo', ]; -void checkUnnamed764(core.List o) { +void checkUnnamed773(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -44264,12 +44865,12 @@ void checkUnnamed764(core.List o) { ); } -core.List buildUnnamed765() => [ +core.List buildUnnamed774() => [ 'foo', 'foo', ]; -void checkUnnamed765(core.List o) { +void checkUnnamed774(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -44290,9 +44891,9 @@ api.TargetPool buildTargetPool() { o.creationTimestamp = 'foo'; o.description = 'foo'; o.failoverRatio = 42.0; - o.healthChecks = buildUnnamed764(); + o.healthChecks = buildUnnamed773(); o.id = 'foo'; - o.instances = buildUnnamed765(); + o.instances = buildUnnamed774(); o.kind = 'foo'; o.name = 'foo'; o.region = 'foo'; @@ -44323,12 +44924,12 @@ void checkTargetPool(api.TargetPool o) { o.failoverRatio!, unittest.equals(42.0), ); - checkUnnamed764(o.healthChecks!); + checkUnnamed773(o.healthChecks!); unittest.expect( o.id!, unittest.equals('foo'), ); - checkUnnamed765(o.instances!); + checkUnnamed774(o.instances!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -44357,23 +44958,23 @@ void checkTargetPool(api.TargetPool o) { buildCounterTargetPool--; } -core.Map buildUnnamed766() => { +core.Map buildUnnamed775() => { 'x': buildTargetPoolsScopedList(), 'y': buildTargetPoolsScopedList(), }; -void checkUnnamed766(core.Map o) { +void checkUnnamed775(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkTargetPoolsScopedList(o['x']!); checkTargetPoolsScopedList(o['y']!); } -core.List buildUnnamed767() => [ +core.List buildUnnamed776() => [ 'foo', 'foo', ]; -void checkUnnamed767(core.List o) { +void checkUnnamed776(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -44414,12 +45015,12 @@ void checkTargetPoolAggregatedListWarningData( buildCounterTargetPoolAggregatedListWarningData--; } -core.List buildUnnamed768() => [ +core.List buildUnnamed777() => [ buildTargetPoolAggregatedListWarningData(), buildTargetPoolAggregatedListWarningData(), ]; -void checkUnnamed768(core.List o) { +void checkUnnamed777(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetPoolAggregatedListWarningData(o[0]); checkTargetPoolAggregatedListWarningData(o[1]); @@ -44431,7 +45032,7 @@ api.TargetPoolAggregatedListWarning buildTargetPoolAggregatedListWarning() { buildCounterTargetPoolAggregatedListWarning++; if (buildCounterTargetPoolAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed768(); + o.data = buildUnnamed777(); o.message = 'foo'; } buildCounterTargetPoolAggregatedListWarning--; @@ -44446,7 +45047,7 @@ void checkTargetPoolAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed768(o.data!); + checkUnnamed777(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -44461,11 +45062,11 @@ api.TargetPoolAggregatedList buildTargetPoolAggregatedList() { buildCounterTargetPoolAggregatedList++; if (buildCounterTargetPoolAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed766(); + o.items = buildUnnamed775(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed767(); + o.unreachables = buildUnnamed776(); o.warning = buildTargetPoolAggregatedListWarning(); } buildCounterTargetPoolAggregatedList--; @@ -44479,7 +45080,7 @@ void checkTargetPoolAggregatedList(api.TargetPoolAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed766(o.items!); + checkUnnamed775(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -44492,18 +45093,18 @@ void checkTargetPoolAggregatedList(api.TargetPoolAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed767(o.unreachables!); + checkUnnamed776(o.unreachables!); checkTargetPoolAggregatedListWarning(o.warning!); } buildCounterTargetPoolAggregatedList--; } -core.List buildUnnamed769() => [ +core.List buildUnnamed778() => [ buildHealthStatus(), buildHealthStatus(), ]; -void checkUnnamed769(core.List o) { +void checkUnnamed778(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHealthStatus(o[0]); checkHealthStatus(o[1]); @@ -44514,7 +45115,7 @@ api.TargetPoolInstanceHealth buildTargetPoolInstanceHealth() { final o = api.TargetPoolInstanceHealth(); buildCounterTargetPoolInstanceHealth++; if (buildCounterTargetPoolInstanceHealth < 3) { - o.healthStatus = buildUnnamed769(); + o.healthStatus = buildUnnamed778(); o.kind = 'foo'; } buildCounterTargetPoolInstanceHealth--; @@ -44524,7 +45125,7 @@ api.TargetPoolInstanceHealth buildTargetPoolInstanceHealth() { void checkTargetPoolInstanceHealth(api.TargetPoolInstanceHealth o) { buildCounterTargetPoolInstanceHealth++; if (buildCounterTargetPoolInstanceHealth < 3) { - checkUnnamed769(o.healthStatus!); + checkUnnamed778(o.healthStatus!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -44533,12 +45134,12 @@ void checkTargetPoolInstanceHealth(api.TargetPoolInstanceHealth o) { buildCounterTargetPoolInstanceHealth--; } -core.List buildUnnamed770() => [ +core.List buildUnnamed779() => [ buildTargetPool(), buildTargetPool(), ]; -void checkUnnamed770(core.List o) { +void checkUnnamed779(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetPool(o[0]); checkTargetPool(o[1]); @@ -44571,12 +45172,12 @@ void checkTargetPoolListWarningData(api.TargetPoolListWarningData o) { buildCounterTargetPoolListWarningData--; } -core.List buildUnnamed771() => [ +core.List buildUnnamed780() => [ buildTargetPoolListWarningData(), buildTargetPoolListWarningData(), ]; -void checkUnnamed771(core.List o) { +void checkUnnamed780(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetPoolListWarningData(o[0]); checkTargetPoolListWarningData(o[1]); @@ -44588,7 +45189,7 @@ api.TargetPoolListWarning buildTargetPoolListWarning() { buildCounterTargetPoolListWarning++; if (buildCounterTargetPoolListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed771(); + o.data = buildUnnamed780(); o.message = 'foo'; } buildCounterTargetPoolListWarning--; @@ -44602,7 +45203,7 @@ void checkTargetPoolListWarning(api.TargetPoolListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed771(o.data!); + checkUnnamed780(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -44617,7 +45218,7 @@ api.TargetPoolList buildTargetPoolList() { buildCounterTargetPoolList++; if (buildCounterTargetPoolList < 3) { o.id = 'foo'; - o.items = buildUnnamed770(); + o.items = buildUnnamed779(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -44634,7 +45235,7 @@ void checkTargetPoolList(api.TargetPoolList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed770(o.items!); + checkUnnamed779(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -44652,12 +45253,12 @@ void checkTargetPoolList(api.TargetPoolList o) { buildCounterTargetPoolList--; } -core.List buildUnnamed772() => [ +core.List buildUnnamed781() => [ buildHealthCheckReference(), buildHealthCheckReference(), ]; -void checkUnnamed772(core.List o) { +void checkUnnamed781(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHealthCheckReference(o[0]); checkHealthCheckReference(o[1]); @@ -44668,7 +45269,7 @@ api.TargetPoolsAddHealthCheckRequest buildTargetPoolsAddHealthCheckRequest() { final o = api.TargetPoolsAddHealthCheckRequest(); buildCounterTargetPoolsAddHealthCheckRequest++; if (buildCounterTargetPoolsAddHealthCheckRequest < 3) { - o.healthChecks = buildUnnamed772(); + o.healthChecks = buildUnnamed781(); } buildCounterTargetPoolsAddHealthCheckRequest--; return o; @@ -44678,17 +45279,17 @@ void checkTargetPoolsAddHealthCheckRequest( api.TargetPoolsAddHealthCheckRequest o) { buildCounterTargetPoolsAddHealthCheckRequest++; if (buildCounterTargetPoolsAddHealthCheckRequest < 3) { - checkUnnamed772(o.healthChecks!); + checkUnnamed781(o.healthChecks!); } buildCounterTargetPoolsAddHealthCheckRequest--; } -core.List buildUnnamed773() => [ +core.List buildUnnamed782() => [ buildInstanceReference(), buildInstanceReference(), ]; -void checkUnnamed773(core.List o) { +void checkUnnamed782(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceReference(o[0]); checkInstanceReference(o[1]); @@ -44699,7 +45300,7 @@ api.TargetPoolsAddInstanceRequest buildTargetPoolsAddInstanceRequest() { final o = api.TargetPoolsAddInstanceRequest(); buildCounterTargetPoolsAddInstanceRequest++; if (buildCounterTargetPoolsAddInstanceRequest < 3) { - o.instances = buildUnnamed773(); + o.instances = buildUnnamed782(); } buildCounterTargetPoolsAddInstanceRequest--; return o; @@ -44708,17 +45309,17 @@ api.TargetPoolsAddInstanceRequest buildTargetPoolsAddInstanceRequest() { void checkTargetPoolsAddInstanceRequest(api.TargetPoolsAddInstanceRequest o) { buildCounterTargetPoolsAddInstanceRequest++; if (buildCounterTargetPoolsAddInstanceRequest < 3) { - checkUnnamed773(o.instances!); + checkUnnamed782(o.instances!); } buildCounterTargetPoolsAddInstanceRequest--; } -core.List buildUnnamed774() => [ +core.List buildUnnamed783() => [ buildHealthCheckReference(), buildHealthCheckReference(), ]; -void checkUnnamed774(core.List o) { +void checkUnnamed783(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHealthCheckReference(o[0]); checkHealthCheckReference(o[1]); @@ -44730,7 +45331,7 @@ api.TargetPoolsRemoveHealthCheckRequest final o = api.TargetPoolsRemoveHealthCheckRequest(); buildCounterTargetPoolsRemoveHealthCheckRequest++; if (buildCounterTargetPoolsRemoveHealthCheckRequest < 3) { - o.healthChecks = buildUnnamed774(); + o.healthChecks = buildUnnamed783(); } buildCounterTargetPoolsRemoveHealthCheckRequest--; return o; @@ -44740,17 +45341,17 @@ void checkTargetPoolsRemoveHealthCheckRequest( api.TargetPoolsRemoveHealthCheckRequest o) { buildCounterTargetPoolsRemoveHealthCheckRequest++; if (buildCounterTargetPoolsRemoveHealthCheckRequest < 3) { - checkUnnamed774(o.healthChecks!); + checkUnnamed783(o.healthChecks!); } buildCounterTargetPoolsRemoveHealthCheckRequest--; } -core.List buildUnnamed775() => [ +core.List buildUnnamed784() => [ buildInstanceReference(), buildInstanceReference(), ]; -void checkUnnamed775(core.List o) { +void checkUnnamed784(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstanceReference(o[0]); checkInstanceReference(o[1]); @@ -44761,7 +45362,7 @@ api.TargetPoolsRemoveInstanceRequest buildTargetPoolsRemoveInstanceRequest() { final o = api.TargetPoolsRemoveInstanceRequest(); buildCounterTargetPoolsRemoveInstanceRequest++; if (buildCounterTargetPoolsRemoveInstanceRequest < 3) { - o.instances = buildUnnamed775(); + o.instances = buildUnnamed784(); } buildCounterTargetPoolsRemoveInstanceRequest--; return o; @@ -44771,17 +45372,17 @@ void checkTargetPoolsRemoveInstanceRequest( api.TargetPoolsRemoveInstanceRequest o) { buildCounterTargetPoolsRemoveInstanceRequest++; if (buildCounterTargetPoolsRemoveInstanceRequest < 3) { - checkUnnamed775(o.instances!); + checkUnnamed784(o.instances!); } buildCounterTargetPoolsRemoveInstanceRequest--; } -core.List buildUnnamed776() => [ +core.List buildUnnamed785() => [ buildTargetPool(), buildTargetPool(), ]; -void checkUnnamed776(core.List o) { +void checkUnnamed785(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetPool(o[0]); checkTargetPool(o[1]); @@ -44815,12 +45416,12 @@ void checkTargetPoolsScopedListWarningData( buildCounterTargetPoolsScopedListWarningData--; } -core.List buildUnnamed777() => [ +core.List buildUnnamed786() => [ buildTargetPoolsScopedListWarningData(), buildTargetPoolsScopedListWarningData(), ]; -void checkUnnamed777(core.List o) { +void checkUnnamed786(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetPoolsScopedListWarningData(o[0]); checkTargetPoolsScopedListWarningData(o[1]); @@ -44832,7 +45433,7 @@ api.TargetPoolsScopedListWarning buildTargetPoolsScopedListWarning() { buildCounterTargetPoolsScopedListWarning++; if (buildCounterTargetPoolsScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed777(); + o.data = buildUnnamed786(); o.message = 'foo'; } buildCounterTargetPoolsScopedListWarning--; @@ -44846,7 +45447,7 @@ void checkTargetPoolsScopedListWarning(api.TargetPoolsScopedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed777(o.data!); + checkUnnamed786(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -44860,7 +45461,7 @@ api.TargetPoolsScopedList buildTargetPoolsScopedList() { final o = api.TargetPoolsScopedList(); buildCounterTargetPoolsScopedList++; if (buildCounterTargetPoolsScopedList < 3) { - o.targetPools = buildUnnamed776(); + o.targetPools = buildUnnamed785(); o.warning = buildTargetPoolsScopedListWarning(); } buildCounterTargetPoolsScopedList--; @@ -44870,7 +45471,7 @@ api.TargetPoolsScopedList buildTargetPoolsScopedList() { void checkTargetPoolsScopedList(api.TargetPoolsScopedList o) { buildCounterTargetPoolsScopedList++; if (buildCounterTargetPoolsScopedList < 3) { - checkUnnamed776(o.targetPools!); + checkUnnamed785(o.targetPools!); checkTargetPoolsScopedListWarning(o.warning!); } buildCounterTargetPoolsScopedList--; @@ -44970,12 +45571,12 @@ void checkTargetSslProxiesSetProxyHeaderRequest( buildCounterTargetSslProxiesSetProxyHeaderRequest--; } -core.List buildUnnamed778() => [ +core.List buildUnnamed787() => [ 'foo', 'foo', ]; -void checkUnnamed778(core.List o) { +void checkUnnamed787(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -44993,7 +45594,7 @@ api.TargetSslProxiesSetSslCertificatesRequest final o = api.TargetSslProxiesSetSslCertificatesRequest(); buildCounterTargetSslProxiesSetSslCertificatesRequest++; if (buildCounterTargetSslProxiesSetSslCertificatesRequest < 3) { - o.sslCertificates = buildUnnamed778(); + o.sslCertificates = buildUnnamed787(); } buildCounterTargetSslProxiesSetSslCertificatesRequest--; return o; @@ -45003,17 +45604,17 @@ void checkTargetSslProxiesSetSslCertificatesRequest( api.TargetSslProxiesSetSslCertificatesRequest o) { buildCounterTargetSslProxiesSetSslCertificatesRequest++; if (buildCounterTargetSslProxiesSetSslCertificatesRequest < 3) { - checkUnnamed778(o.sslCertificates!); + checkUnnamed787(o.sslCertificates!); } buildCounterTargetSslProxiesSetSslCertificatesRequest--; } -core.List buildUnnamed779() => [ +core.List buildUnnamed788() => [ 'foo', 'foo', ]; -void checkUnnamed779(core.List o) { +void checkUnnamed788(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -45039,7 +45640,7 @@ api.TargetSslProxy buildTargetSslProxy() { o.proxyHeader = 'foo'; o.selfLink = 'foo'; o.service = 'foo'; - o.sslCertificates = buildUnnamed779(); + o.sslCertificates = buildUnnamed788(); o.sslPolicy = 'foo'; } buildCounterTargetSslProxy--; @@ -45085,7 +45686,7 @@ void checkTargetSslProxy(api.TargetSslProxy o) { o.service!, unittest.equals('foo'), ); - checkUnnamed779(o.sslCertificates!); + checkUnnamed788(o.sslCertificates!); unittest.expect( o.sslPolicy!, unittest.equals('foo'), @@ -45094,12 +45695,12 @@ void checkTargetSslProxy(api.TargetSslProxy o) { buildCounterTargetSslProxy--; } -core.List buildUnnamed780() => [ +core.List buildUnnamed789() => [ buildTargetSslProxy(), buildTargetSslProxy(), ]; -void checkUnnamed780(core.List o) { +void checkUnnamed789(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetSslProxy(o[0]); checkTargetSslProxy(o[1]); @@ -45132,12 +45733,12 @@ void checkTargetSslProxyListWarningData(api.TargetSslProxyListWarningData o) { buildCounterTargetSslProxyListWarningData--; } -core.List buildUnnamed781() => [ +core.List buildUnnamed790() => [ buildTargetSslProxyListWarningData(), buildTargetSslProxyListWarningData(), ]; -void checkUnnamed781(core.List o) { +void checkUnnamed790(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetSslProxyListWarningData(o[0]); checkTargetSslProxyListWarningData(o[1]); @@ -45149,7 +45750,7 @@ api.TargetSslProxyListWarning buildTargetSslProxyListWarning() { buildCounterTargetSslProxyListWarning++; if (buildCounterTargetSslProxyListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed781(); + o.data = buildUnnamed790(); o.message = 'foo'; } buildCounterTargetSslProxyListWarning--; @@ -45163,7 +45764,7 @@ void checkTargetSslProxyListWarning(api.TargetSslProxyListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed781(o.data!); + checkUnnamed790(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -45178,7 +45779,7 @@ api.TargetSslProxyList buildTargetSslProxyList() { buildCounterTargetSslProxyList++; if (buildCounterTargetSslProxyList < 3) { o.id = 'foo'; - o.items = buildUnnamed780(); + o.items = buildUnnamed789(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -45195,7 +45796,7 @@ void checkTargetSslProxyList(api.TargetSslProxyList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed780(o.items!); + checkUnnamed789(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -45213,12 +45814,12 @@ void checkTargetSslProxyList(api.TargetSslProxyList o) { buildCounterTargetSslProxyList--; } -core.List buildUnnamed782() => [ +core.List buildUnnamed791() => [ buildTargetTcpProxy(), buildTargetTcpProxy(), ]; -void checkUnnamed782(core.List o) { +void checkUnnamed791(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetTcpProxy(o[0]); checkTargetTcpProxy(o[1]); @@ -45253,12 +45854,12 @@ void checkTargetTcpProxiesScopedListWarningData( buildCounterTargetTcpProxiesScopedListWarningData--; } -core.List buildUnnamed783() => [ +core.List buildUnnamed792() => [ buildTargetTcpProxiesScopedListWarningData(), buildTargetTcpProxiesScopedListWarningData(), ]; -void checkUnnamed783(core.List o) { +void checkUnnamed792(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetTcpProxiesScopedListWarningData(o[0]); checkTargetTcpProxiesScopedListWarningData(o[1]); @@ -45270,7 +45871,7 @@ api.TargetTcpProxiesScopedListWarning buildTargetTcpProxiesScopedListWarning() { buildCounterTargetTcpProxiesScopedListWarning++; if (buildCounterTargetTcpProxiesScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed783(); + o.data = buildUnnamed792(); o.message = 'foo'; } buildCounterTargetTcpProxiesScopedListWarning--; @@ -45285,7 +45886,7 @@ void checkTargetTcpProxiesScopedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed783(o.data!); + checkUnnamed792(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -45299,7 +45900,7 @@ api.TargetTcpProxiesScopedList buildTargetTcpProxiesScopedList() { final o = api.TargetTcpProxiesScopedList(); buildCounterTargetTcpProxiesScopedList++; if (buildCounterTargetTcpProxiesScopedList < 3) { - o.targetTcpProxies = buildUnnamed782(); + o.targetTcpProxies = buildUnnamed791(); o.warning = buildTargetTcpProxiesScopedListWarning(); } buildCounterTargetTcpProxiesScopedList--; @@ -45309,7 +45910,7 @@ api.TargetTcpProxiesScopedList buildTargetTcpProxiesScopedList() { void checkTargetTcpProxiesScopedList(api.TargetTcpProxiesScopedList o) { buildCounterTargetTcpProxiesScopedList++; if (buildCounterTargetTcpProxiesScopedList < 3) { - checkUnnamed782(o.targetTcpProxies!); + checkUnnamed791(o.targetTcpProxies!); checkTargetTcpProxiesScopedListWarning(o.warning!); } buildCounterTargetTcpProxiesScopedList--; @@ -45427,23 +46028,23 @@ void checkTargetTcpProxy(api.TargetTcpProxy o) { buildCounterTargetTcpProxy--; } -core.Map buildUnnamed784() => { +core.Map buildUnnamed793() => { 'x': buildTargetTcpProxiesScopedList(), 'y': buildTargetTcpProxiesScopedList(), }; -void checkUnnamed784(core.Map o) { +void checkUnnamed793(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkTargetTcpProxiesScopedList(o['x']!); checkTargetTcpProxiesScopedList(o['y']!); } -core.List buildUnnamed785() => [ +core.List buildUnnamed794() => [ 'foo', 'foo', ]; -void checkUnnamed785(core.List o) { +void checkUnnamed794(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -45484,12 +46085,12 @@ void checkTargetTcpProxyAggregatedListWarningData( buildCounterTargetTcpProxyAggregatedListWarningData--; } -core.List buildUnnamed786() => [ +core.List buildUnnamed795() => [ buildTargetTcpProxyAggregatedListWarningData(), buildTargetTcpProxyAggregatedListWarningData(), ]; -void checkUnnamed786(core.List o) { +void checkUnnamed795(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetTcpProxyAggregatedListWarningData(o[0]); checkTargetTcpProxyAggregatedListWarningData(o[1]); @@ -45502,7 +46103,7 @@ api.TargetTcpProxyAggregatedListWarning buildCounterTargetTcpProxyAggregatedListWarning++; if (buildCounterTargetTcpProxyAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed786(); + o.data = buildUnnamed795(); o.message = 'foo'; } buildCounterTargetTcpProxyAggregatedListWarning--; @@ -45517,7 +46118,7 @@ void checkTargetTcpProxyAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed786(o.data!); + checkUnnamed795(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -45532,11 +46133,11 @@ api.TargetTcpProxyAggregatedList buildTargetTcpProxyAggregatedList() { buildCounterTargetTcpProxyAggregatedList++; if (buildCounterTargetTcpProxyAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed784(); + o.items = buildUnnamed793(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed785(); + o.unreachables = buildUnnamed794(); o.warning = buildTargetTcpProxyAggregatedListWarning(); } buildCounterTargetTcpProxyAggregatedList--; @@ -45550,7 +46151,7 @@ void checkTargetTcpProxyAggregatedList(api.TargetTcpProxyAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed784(o.items!); + checkUnnamed793(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -45563,18 +46164,18 @@ void checkTargetTcpProxyAggregatedList(api.TargetTcpProxyAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed785(o.unreachables!); + checkUnnamed794(o.unreachables!); checkTargetTcpProxyAggregatedListWarning(o.warning!); } buildCounterTargetTcpProxyAggregatedList--; } -core.List buildUnnamed787() => [ +core.List buildUnnamed796() => [ buildTargetTcpProxy(), buildTargetTcpProxy(), ]; -void checkUnnamed787(core.List o) { +void checkUnnamed796(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetTcpProxy(o[0]); checkTargetTcpProxy(o[1]); @@ -45607,12 +46208,12 @@ void checkTargetTcpProxyListWarningData(api.TargetTcpProxyListWarningData o) { buildCounterTargetTcpProxyListWarningData--; } -core.List buildUnnamed788() => [ +core.List buildUnnamed797() => [ buildTargetTcpProxyListWarningData(), buildTargetTcpProxyListWarningData(), ]; -void checkUnnamed788(core.List o) { +void checkUnnamed797(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetTcpProxyListWarningData(o[0]); checkTargetTcpProxyListWarningData(o[1]); @@ -45624,7 +46225,7 @@ api.TargetTcpProxyListWarning buildTargetTcpProxyListWarning() { buildCounterTargetTcpProxyListWarning++; if (buildCounterTargetTcpProxyListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed788(); + o.data = buildUnnamed797(); o.message = 'foo'; } buildCounterTargetTcpProxyListWarning--; @@ -45638,7 +46239,7 @@ void checkTargetTcpProxyListWarning(api.TargetTcpProxyListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed788(o.data!); + checkUnnamed797(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -45653,7 +46254,7 @@ api.TargetTcpProxyList buildTargetTcpProxyList() { buildCounterTargetTcpProxyList++; if (buildCounterTargetTcpProxyList < 3) { o.id = 'foo'; - o.items = buildUnnamed787(); + o.items = buildUnnamed796(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -45670,7 +46271,7 @@ void checkTargetTcpProxyList(api.TargetTcpProxyList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed787(o.items!); + checkUnnamed796(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -45688,12 +46289,12 @@ void checkTargetTcpProxyList(api.TargetTcpProxyList o) { buildCounterTargetTcpProxyList--; } -core.List buildUnnamed789() => [ +core.List buildUnnamed798() => [ 'foo', 'foo', ]; -void checkUnnamed789(core.List o) { +void checkUnnamed798(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -45705,12 +46306,12 @@ void checkUnnamed789(core.List o) { ); } -core.Map buildUnnamed790() => { +core.Map buildUnnamed799() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed790(core.Map o) { +void checkUnnamed799(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -45722,12 +46323,12 @@ void checkUnnamed790(core.Map o) { ); } -core.List buildUnnamed791() => [ +core.List buildUnnamed800() => [ 'foo', 'foo', ]; -void checkUnnamed791(core.List o) { +void checkUnnamed800(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -45746,17 +46347,17 @@ api.TargetVpnGateway buildTargetVpnGateway() { if (buildCounterTargetVpnGateway < 3) { o.creationTimestamp = 'foo'; o.description = 'foo'; - o.forwardingRules = buildUnnamed789(); + o.forwardingRules = buildUnnamed798(); o.id = 'foo'; o.kind = 'foo'; o.labelFingerprint = 'foo'; - o.labels = buildUnnamed790(); + o.labels = buildUnnamed799(); o.name = 'foo'; o.network = 'foo'; o.region = 'foo'; o.selfLink = 'foo'; o.status = 'foo'; - o.tunnels = buildUnnamed791(); + o.tunnels = buildUnnamed800(); } buildCounterTargetVpnGateway--; return o; @@ -45773,7 +46374,7 @@ void checkTargetVpnGateway(api.TargetVpnGateway o) { o.description!, unittest.equals('foo'), ); - checkUnnamed789(o.forwardingRules!); + checkUnnamed798(o.forwardingRules!); unittest.expect( o.id!, unittest.equals('foo'), @@ -45786,7 +46387,7 @@ void checkTargetVpnGateway(api.TargetVpnGateway o) { o.labelFingerprint!, unittest.equals('foo'), ); - checkUnnamed790(o.labels!); + checkUnnamed799(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -45807,28 +46408,28 @@ void checkTargetVpnGateway(api.TargetVpnGateway o) { o.status!, unittest.equals('foo'), ); - checkUnnamed791(o.tunnels!); + checkUnnamed800(o.tunnels!); } buildCounterTargetVpnGateway--; } -core.Map buildUnnamed792() => { +core.Map buildUnnamed801() => { 'x': buildTargetVpnGatewaysScopedList(), 'y': buildTargetVpnGatewaysScopedList(), }; -void checkUnnamed792(core.Map o) { +void checkUnnamed801(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkTargetVpnGatewaysScopedList(o['x']!); checkTargetVpnGatewaysScopedList(o['y']!); } -core.List buildUnnamed793() => [ +core.List buildUnnamed802() => [ 'foo', 'foo', ]; -void checkUnnamed793(core.List o) { +void checkUnnamed802(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -45869,12 +46470,12 @@ void checkTargetVpnGatewayAggregatedListWarningData( buildCounterTargetVpnGatewayAggregatedListWarningData--; } -core.List buildUnnamed794() => [ +core.List buildUnnamed803() => [ buildTargetVpnGatewayAggregatedListWarningData(), buildTargetVpnGatewayAggregatedListWarningData(), ]; -void checkUnnamed794( +void checkUnnamed803( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetVpnGatewayAggregatedListWarningData(o[0]); @@ -45888,7 +46489,7 @@ api.TargetVpnGatewayAggregatedListWarning buildCounterTargetVpnGatewayAggregatedListWarning++; if (buildCounterTargetVpnGatewayAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed794(); + o.data = buildUnnamed803(); o.message = 'foo'; } buildCounterTargetVpnGatewayAggregatedListWarning--; @@ -45903,7 +46504,7 @@ void checkTargetVpnGatewayAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed794(o.data!); + checkUnnamed803(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -45918,11 +46519,11 @@ api.TargetVpnGatewayAggregatedList buildTargetVpnGatewayAggregatedList() { buildCounterTargetVpnGatewayAggregatedList++; if (buildCounterTargetVpnGatewayAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed792(); + o.items = buildUnnamed801(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed793(); + o.unreachables = buildUnnamed802(); o.warning = buildTargetVpnGatewayAggregatedListWarning(); } buildCounterTargetVpnGatewayAggregatedList--; @@ -45936,7 +46537,7 @@ void checkTargetVpnGatewayAggregatedList(api.TargetVpnGatewayAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed792(o.items!); + checkUnnamed801(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -45949,18 +46550,18 @@ void checkTargetVpnGatewayAggregatedList(api.TargetVpnGatewayAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed793(o.unreachables!); + checkUnnamed802(o.unreachables!); checkTargetVpnGatewayAggregatedListWarning(o.warning!); } buildCounterTargetVpnGatewayAggregatedList--; } -core.List buildUnnamed795() => [ +core.List buildUnnamed804() => [ buildTargetVpnGateway(), buildTargetVpnGateway(), ]; -void checkUnnamed795(core.List o) { +void checkUnnamed804(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetVpnGateway(o[0]); checkTargetVpnGateway(o[1]); @@ -45994,12 +46595,12 @@ void checkTargetVpnGatewayListWarningData( buildCounterTargetVpnGatewayListWarningData--; } -core.List buildUnnamed796() => [ +core.List buildUnnamed805() => [ buildTargetVpnGatewayListWarningData(), buildTargetVpnGatewayListWarningData(), ]; -void checkUnnamed796(core.List o) { +void checkUnnamed805(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetVpnGatewayListWarningData(o[0]); checkTargetVpnGatewayListWarningData(o[1]); @@ -46011,7 +46612,7 @@ api.TargetVpnGatewayListWarning buildTargetVpnGatewayListWarning() { buildCounterTargetVpnGatewayListWarning++; if (buildCounterTargetVpnGatewayListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed796(); + o.data = buildUnnamed805(); o.message = 'foo'; } buildCounterTargetVpnGatewayListWarning--; @@ -46025,7 +46626,7 @@ void checkTargetVpnGatewayListWarning(api.TargetVpnGatewayListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed796(o.data!); + checkUnnamed805(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -46040,7 +46641,7 @@ api.TargetVpnGatewayList buildTargetVpnGatewayList() { buildCounterTargetVpnGatewayList++; if (buildCounterTargetVpnGatewayList < 3) { o.id = 'foo'; - o.items = buildUnnamed795(); + o.items = buildUnnamed804(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -46057,7 +46658,7 @@ void checkTargetVpnGatewayList(api.TargetVpnGatewayList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed795(o.items!); + checkUnnamed804(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -46075,12 +46676,12 @@ void checkTargetVpnGatewayList(api.TargetVpnGatewayList o) { buildCounterTargetVpnGatewayList--; } -core.List buildUnnamed797() => [ +core.List buildUnnamed806() => [ buildTargetVpnGateway(), buildTargetVpnGateway(), ]; -void checkUnnamed797(core.List o) { +void checkUnnamed806(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetVpnGateway(o[0]); checkTargetVpnGateway(o[1]); @@ -46115,12 +46716,12 @@ void checkTargetVpnGatewaysScopedListWarningData( buildCounterTargetVpnGatewaysScopedListWarningData--; } -core.List buildUnnamed798() => [ +core.List buildUnnamed807() => [ buildTargetVpnGatewaysScopedListWarningData(), buildTargetVpnGatewaysScopedListWarningData(), ]; -void checkUnnamed798(core.List o) { +void checkUnnamed807(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTargetVpnGatewaysScopedListWarningData(o[0]); checkTargetVpnGatewaysScopedListWarningData(o[1]); @@ -46133,7 +46734,7 @@ api.TargetVpnGatewaysScopedListWarning buildCounterTargetVpnGatewaysScopedListWarning++; if (buildCounterTargetVpnGatewaysScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed798(); + o.data = buildUnnamed807(); o.message = 'foo'; } buildCounterTargetVpnGatewaysScopedListWarning--; @@ -46148,7 +46749,7 @@ void checkTargetVpnGatewaysScopedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed798(o.data!); + checkUnnamed807(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -46162,7 +46763,7 @@ api.TargetVpnGatewaysScopedList buildTargetVpnGatewaysScopedList() { final o = api.TargetVpnGatewaysScopedList(); buildCounterTargetVpnGatewaysScopedList++; if (buildCounterTargetVpnGatewaysScopedList < 3) { - o.targetVpnGateways = buildUnnamed797(); + o.targetVpnGateways = buildUnnamed806(); o.warning = buildTargetVpnGatewaysScopedListWarning(); } buildCounterTargetVpnGatewaysScopedList--; @@ -46172,18 +46773,18 @@ api.TargetVpnGatewaysScopedList buildTargetVpnGatewaysScopedList() { void checkTargetVpnGatewaysScopedList(api.TargetVpnGatewaysScopedList o) { buildCounterTargetVpnGatewaysScopedList++; if (buildCounterTargetVpnGatewaysScopedList < 3) { - checkUnnamed797(o.targetVpnGateways!); + checkUnnamed806(o.targetVpnGateways!); checkTargetVpnGatewaysScopedListWarning(o.warning!); } buildCounterTargetVpnGatewaysScopedList--; } -core.List buildUnnamed799() => [ +core.List buildUnnamed808() => [ buildUrlMapTestHeader(), buildUrlMapTestHeader(), ]; -void checkUnnamed799(core.List o) { +void checkUnnamed808(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkUrlMapTestHeader(o[0]); checkUrlMapTestHeader(o[1]); @@ -46200,7 +46801,7 @@ api.TestFailure buildTestFailure() { o.expectedOutputUrl = 'foo'; o.expectedRedirectResponseCode = 42; o.expectedService = 'foo'; - o.headers = buildUnnamed799(); + o.headers = buildUnnamed808(); o.host = 'foo'; o.path = 'foo'; } @@ -46235,7 +46836,7 @@ void checkTestFailure(api.TestFailure o) { o.expectedService!, unittest.equals('foo'), ); - checkUnnamed799(o.headers!); + checkUnnamed808(o.headers!); unittest.expect( o.host!, unittest.equals('foo'), @@ -46248,12 +46849,12 @@ void checkTestFailure(api.TestFailure o) { buildCounterTestFailure--; } -core.List buildUnnamed800() => [ +core.List buildUnnamed809() => [ 'foo', 'foo', ]; -void checkUnnamed800(core.List o) { +void checkUnnamed809(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -46270,7 +46871,7 @@ api.TestPermissionsRequest buildTestPermissionsRequest() { final o = api.TestPermissionsRequest(); buildCounterTestPermissionsRequest++; if (buildCounterTestPermissionsRequest < 3) { - o.permissions = buildUnnamed800(); + o.permissions = buildUnnamed809(); } buildCounterTestPermissionsRequest--; return o; @@ -46279,17 +46880,17 @@ api.TestPermissionsRequest buildTestPermissionsRequest() { void checkTestPermissionsRequest(api.TestPermissionsRequest o) { buildCounterTestPermissionsRequest++; if (buildCounterTestPermissionsRequest < 3) { - checkUnnamed800(o.permissions!); + checkUnnamed809(o.permissions!); } buildCounterTestPermissionsRequest--; } -core.List buildUnnamed801() => [ +core.List buildUnnamed810() => [ 'foo', 'foo', ]; -void checkUnnamed801(core.List o) { +void checkUnnamed810(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -46306,7 +46907,7 @@ api.TestPermissionsResponse buildTestPermissionsResponse() { final o = api.TestPermissionsResponse(); buildCounterTestPermissionsResponse++; if (buildCounterTestPermissionsResponse < 3) { - o.permissions = buildUnnamed801(); + o.permissions = buildUnnamed810(); } buildCounterTestPermissionsResponse--; return o; @@ -46315,7 +46916,7 @@ api.TestPermissionsResponse buildTestPermissionsResponse() { void checkTestPermissionsResponse(api.TestPermissionsResponse o) { buildCounterTestPermissionsResponse++; if (buildCounterTestPermissionsResponse < 3) { - checkUnnamed801(o.permissions!); + checkUnnamed810(o.permissions!); } buildCounterTestPermissionsResponse--; } @@ -46391,34 +46992,34 @@ void checkUpcomingMaintenance(api.UpcomingMaintenance o) { buildCounterUpcomingMaintenance--; } -core.List buildUnnamed802() => [ +core.List buildUnnamed811() => [ buildHostRule(), buildHostRule(), ]; -void checkUnnamed802(core.List o) { +void checkUnnamed811(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHostRule(o[0]); checkHostRule(o[1]); } -core.List buildUnnamed803() => [ +core.List buildUnnamed812() => [ buildPathMatcher(), buildPathMatcher(), ]; -void checkUnnamed803(core.List o) { +void checkUnnamed812(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPathMatcher(o[0]); checkPathMatcher(o[1]); } -core.List buildUnnamed804() => [ +core.List buildUnnamed813() => [ buildUrlMapTest(), buildUrlMapTest(), ]; -void checkUnnamed804(core.List o) { +void checkUnnamed813(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkUrlMapTest(o[0]); checkUrlMapTest(o[1]); @@ -46437,14 +47038,14 @@ api.UrlMap buildUrlMap() { o.description = 'foo'; o.fingerprint = 'foo'; o.headerAction = buildHttpHeaderAction(); - o.hostRules = buildUnnamed802(); + o.hostRules = buildUnnamed811(); o.id = 'foo'; o.kind = 'foo'; o.name = 'foo'; - o.pathMatchers = buildUnnamed803(); + o.pathMatchers = buildUnnamed812(); o.region = 'foo'; o.selfLink = 'foo'; - o.tests = buildUnnamed804(); + o.tests = buildUnnamed813(); } buildCounterUrlMap--; return o; @@ -46473,7 +47074,7 @@ void checkUrlMap(api.UrlMap o) { unittest.equals('foo'), ); checkHttpHeaderAction(o.headerAction!); - checkUnnamed802(o.hostRules!); + checkUnnamed811(o.hostRules!); unittest.expect( o.id!, unittest.equals('foo'), @@ -46486,7 +47087,7 @@ void checkUrlMap(api.UrlMap o) { o.name!, unittest.equals('foo'), ); - checkUnnamed803(o.pathMatchers!); + checkUnnamed812(o.pathMatchers!); unittest.expect( o.region!, unittest.equals('foo'), @@ -46495,17 +47096,17 @@ void checkUrlMap(api.UrlMap o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed804(o.tests!); + checkUnnamed813(o.tests!); } buildCounterUrlMap--; } -core.List buildUnnamed805() => [ +core.List buildUnnamed814() => [ buildUrlMap(), buildUrlMap(), ]; -void checkUnnamed805(core.List o) { +void checkUnnamed814(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkUrlMap(o[0]); checkUrlMap(o[1]); @@ -46538,12 +47139,12 @@ void checkUrlMapListWarningData(api.UrlMapListWarningData o) { buildCounterUrlMapListWarningData--; } -core.List buildUnnamed806() => [ +core.List buildUnnamed815() => [ buildUrlMapListWarningData(), buildUrlMapListWarningData(), ]; -void checkUnnamed806(core.List o) { +void checkUnnamed815(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkUrlMapListWarningData(o[0]); checkUrlMapListWarningData(o[1]); @@ -46555,7 +47156,7 @@ api.UrlMapListWarning buildUrlMapListWarning() { buildCounterUrlMapListWarning++; if (buildCounterUrlMapListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed806(); + o.data = buildUnnamed815(); o.message = 'foo'; } buildCounterUrlMapListWarning--; @@ -46569,7 +47170,7 @@ void checkUrlMapListWarning(api.UrlMapListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed806(o.data!); + checkUnnamed815(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -46584,7 +47185,7 @@ api.UrlMapList buildUrlMapList() { buildCounterUrlMapList++; if (buildCounterUrlMapList < 3) { o.id = 'foo'; - o.items = buildUnnamed805(); + o.items = buildUnnamed814(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -46601,7 +47202,7 @@ void checkUrlMapList(api.UrlMapList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed805(o.items!); + checkUnnamed814(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -46641,12 +47242,12 @@ void checkUrlMapReference(api.UrlMapReference o) { buildCounterUrlMapReference--; } -core.List buildUnnamed807() => [ +core.List buildUnnamed816() => [ buildUrlMapTestHeader(), buildUrlMapTestHeader(), ]; -void checkUnnamed807(core.List o) { +void checkUnnamed816(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkUrlMapTestHeader(o[0]); checkUrlMapTestHeader(o[1]); @@ -46660,7 +47261,7 @@ api.UrlMapTest buildUrlMapTest() { o.description = 'foo'; o.expectedOutputUrl = 'foo'; o.expectedRedirectResponseCode = 42; - o.headers = buildUnnamed807(); + o.headers = buildUnnamed816(); o.host = 'foo'; o.path = 'foo'; o.service = 'foo'; @@ -46684,7 +47285,7 @@ void checkUrlMapTest(api.UrlMapTest o) { o.expectedRedirectResponseCode!, unittest.equals(42), ); - checkUnnamed807(o.headers!); + checkUnnamed816(o.headers!); unittest.expect( o.host!, unittest.equals('foo'), @@ -46728,12 +47329,12 @@ void checkUrlMapTestHeader(api.UrlMapTestHeader o) { buildCounterUrlMapTestHeader--; } -core.List buildUnnamed808() => [ +core.List buildUnnamed817() => [ 'foo', 'foo', ]; -void checkUnnamed808(core.List o) { +void checkUnnamed817(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -46745,12 +47346,12 @@ void checkUnnamed808(core.List o) { ); } -core.List buildUnnamed809() => [ +core.List buildUnnamed818() => [ buildTestFailure(), buildTestFailure(), ]; -void checkUnnamed809(core.List o) { +void checkUnnamed818(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTestFailure(o[0]); checkTestFailure(o[1]); @@ -46761,9 +47362,9 @@ api.UrlMapValidationResult buildUrlMapValidationResult() { final o = api.UrlMapValidationResult(); buildCounterUrlMapValidationResult++; if (buildCounterUrlMapValidationResult < 3) { - o.loadErrors = buildUnnamed808(); + o.loadErrors = buildUnnamed817(); o.loadSucceeded = true; - o.testFailures = buildUnnamed809(); + o.testFailures = buildUnnamed818(); o.testPassed = true; } buildCounterUrlMapValidationResult--; @@ -46773,31 +47374,31 @@ api.UrlMapValidationResult buildUrlMapValidationResult() { void checkUrlMapValidationResult(api.UrlMapValidationResult o) { buildCounterUrlMapValidationResult++; if (buildCounterUrlMapValidationResult < 3) { - checkUnnamed808(o.loadErrors!); + checkUnnamed817(o.loadErrors!); unittest.expect(o.loadSucceeded!, unittest.isTrue); - checkUnnamed809(o.testFailures!); + checkUnnamed818(o.testFailures!); unittest.expect(o.testPassed!, unittest.isTrue); } buildCounterUrlMapValidationResult--; } -core.Map buildUnnamed810() => { +core.Map buildUnnamed819() => { 'x': buildUrlMapsScopedList(), 'y': buildUrlMapsScopedList(), }; -void checkUnnamed810(core.Map o) { +void checkUnnamed819(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkUrlMapsScopedList(o['x']!); checkUrlMapsScopedList(o['y']!); } -core.List buildUnnamed811() => [ +core.List buildUnnamed820() => [ 'foo', 'foo', ]; -void checkUnnamed811(core.List o) { +void checkUnnamed820(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -46837,12 +47438,12 @@ void checkUrlMapsAggregatedListWarningData( buildCounterUrlMapsAggregatedListWarningData--; } -core.List buildUnnamed812() => [ +core.List buildUnnamed821() => [ buildUrlMapsAggregatedListWarningData(), buildUrlMapsAggregatedListWarningData(), ]; -void checkUnnamed812(core.List o) { +void checkUnnamed821(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkUrlMapsAggregatedListWarningData(o[0]); checkUrlMapsAggregatedListWarningData(o[1]); @@ -46854,7 +47455,7 @@ api.UrlMapsAggregatedListWarning buildUrlMapsAggregatedListWarning() { buildCounterUrlMapsAggregatedListWarning++; if (buildCounterUrlMapsAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed812(); + o.data = buildUnnamed821(); o.message = 'foo'; } buildCounterUrlMapsAggregatedListWarning--; @@ -46868,7 +47469,7 @@ void checkUrlMapsAggregatedListWarning(api.UrlMapsAggregatedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed812(o.data!); + checkUnnamed821(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -46883,11 +47484,11 @@ api.UrlMapsAggregatedList buildUrlMapsAggregatedList() { buildCounterUrlMapsAggregatedList++; if (buildCounterUrlMapsAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed810(); + o.items = buildUnnamed819(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed811(); + o.unreachables = buildUnnamed820(); o.warning = buildUrlMapsAggregatedListWarning(); } buildCounterUrlMapsAggregatedList--; @@ -46901,7 +47502,7 @@ void checkUrlMapsAggregatedList(api.UrlMapsAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed810(o.items!); + checkUnnamed819(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -46914,18 +47515,18 @@ void checkUrlMapsAggregatedList(api.UrlMapsAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed811(o.unreachables!); + checkUnnamed820(o.unreachables!); checkUrlMapsAggregatedListWarning(o.warning!); } buildCounterUrlMapsAggregatedList--; } -core.List buildUnnamed813() => [ +core.List buildUnnamed822() => [ buildUrlMap(), buildUrlMap(), ]; -void checkUnnamed813(core.List o) { +void checkUnnamed822(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkUrlMap(o[0]); checkUrlMap(o[1]); @@ -46958,12 +47559,12 @@ void checkUrlMapsScopedListWarningData(api.UrlMapsScopedListWarningData o) { buildCounterUrlMapsScopedListWarningData--; } -core.List buildUnnamed814() => [ +core.List buildUnnamed823() => [ buildUrlMapsScopedListWarningData(), buildUrlMapsScopedListWarningData(), ]; -void checkUnnamed814(core.List o) { +void checkUnnamed823(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkUrlMapsScopedListWarningData(o[0]); checkUrlMapsScopedListWarningData(o[1]); @@ -46975,7 +47576,7 @@ api.UrlMapsScopedListWarning buildUrlMapsScopedListWarning() { buildCounterUrlMapsScopedListWarning++; if (buildCounterUrlMapsScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed814(); + o.data = buildUnnamed823(); o.message = 'foo'; } buildCounterUrlMapsScopedListWarning--; @@ -46989,7 +47590,7 @@ void checkUrlMapsScopedListWarning(api.UrlMapsScopedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed814(o.data!); + checkUnnamed823(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -47003,7 +47604,7 @@ api.UrlMapsScopedList buildUrlMapsScopedList() { final o = api.UrlMapsScopedList(); buildCounterUrlMapsScopedList++; if (buildCounterUrlMapsScopedList < 3) { - o.urlMaps = buildUnnamed813(); + o.urlMaps = buildUnnamed822(); o.warning = buildUrlMapsScopedListWarning(); } buildCounterUrlMapsScopedList--; @@ -47013,18 +47614,18 @@ api.UrlMapsScopedList buildUrlMapsScopedList() { void checkUrlMapsScopedList(api.UrlMapsScopedList o) { buildCounterUrlMapsScopedList++; if (buildCounterUrlMapsScopedList < 3) { - checkUnnamed813(o.urlMaps!); + checkUnnamed822(o.urlMaps!); checkUrlMapsScopedListWarning(o.warning!); } buildCounterUrlMapsScopedList--; } -core.List buildUnnamed815() => [ +core.List buildUnnamed824() => [ 'foo', 'foo', ]; -void checkUnnamed815(core.List o) { +void checkUnnamed824(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -47041,7 +47642,7 @@ api.UrlMapsValidateRequest buildUrlMapsValidateRequest() { final o = api.UrlMapsValidateRequest(); buildCounterUrlMapsValidateRequest++; if (buildCounterUrlMapsValidateRequest < 3) { - o.loadBalancingSchemes = buildUnnamed815(); + o.loadBalancingSchemes = buildUnnamed824(); o.resource = buildUrlMap(); } buildCounterUrlMapsValidateRequest--; @@ -47051,7 +47652,7 @@ api.UrlMapsValidateRequest buildUrlMapsValidateRequest() { void checkUrlMapsValidateRequest(api.UrlMapsValidateRequest o) { buildCounterUrlMapsValidateRequest++; if (buildCounterUrlMapsValidateRequest < 3) { - checkUnnamed815(o.loadBalancingSchemes!); + checkUnnamed824(o.loadBalancingSchemes!); checkUrlMap(o.resource!); } buildCounterUrlMapsValidateRequest--; @@ -47108,12 +47709,12 @@ void checkUrlRewrite(api.UrlRewrite o) { buildCounterUrlRewrite--; } -core.List buildUnnamed816() => [ +core.List buildUnnamed825() => [ buildUsableSubnetworkSecondaryRange(), buildUsableSubnetworkSecondaryRange(), ]; -void checkUnnamed816(core.List o) { +void checkUnnamed825(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkUsableSubnetworkSecondaryRange(o[0]); checkUsableSubnetworkSecondaryRange(o[1]); @@ -47131,7 +47732,7 @@ api.UsableSubnetwork buildUsableSubnetwork() { o.network = 'foo'; o.purpose = 'foo'; o.role = 'foo'; - o.secondaryIpRanges = buildUnnamed816(); + o.secondaryIpRanges = buildUnnamed825(); o.stackType = 'foo'; o.subnetwork = 'foo'; } @@ -47170,7 +47771,7 @@ void checkUsableSubnetwork(api.UsableSubnetwork o) { o.role!, unittest.equals('foo'), ); - checkUnnamed816(o.secondaryIpRanges!); + checkUnnamed825(o.secondaryIpRanges!); unittest.expect( o.stackType!, unittest.equals('foo'), @@ -47210,12 +47811,12 @@ void checkUsableSubnetworkSecondaryRange(api.UsableSubnetworkSecondaryRange o) { buildCounterUsableSubnetworkSecondaryRange--; } -core.List buildUnnamed817() => [ +core.List buildUnnamed826() => [ buildUsableSubnetwork(), buildUsableSubnetwork(), ]; -void checkUnnamed817(core.List o) { +void checkUnnamed826(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkUsableSubnetwork(o[0]); checkUsableSubnetwork(o[1]); @@ -47250,12 +47851,12 @@ void checkUsableSubnetworksAggregatedListWarningData( buildCounterUsableSubnetworksAggregatedListWarningData--; } -core.List buildUnnamed818() => [ +core.List buildUnnamed827() => [ buildUsableSubnetworksAggregatedListWarningData(), buildUsableSubnetworksAggregatedListWarningData(), ]; -void checkUnnamed818( +void checkUnnamed827( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkUsableSubnetworksAggregatedListWarningData(o[0]); @@ -47269,7 +47870,7 @@ api.UsableSubnetworksAggregatedListWarning buildCounterUsableSubnetworksAggregatedListWarning++; if (buildCounterUsableSubnetworksAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed818(); + o.data = buildUnnamed827(); o.message = 'foo'; } buildCounterUsableSubnetworksAggregatedListWarning--; @@ -47284,7 +47885,7 @@ void checkUsableSubnetworksAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed818(o.data!); + checkUnnamed827(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -47299,7 +47900,7 @@ api.UsableSubnetworksAggregatedList buildUsableSubnetworksAggregatedList() { buildCounterUsableSubnetworksAggregatedList++; if (buildCounterUsableSubnetworksAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed817(); + o.items = buildUnnamed826(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -47317,7 +47918,7 @@ void checkUsableSubnetworksAggregatedList( o.id!, unittest.equals('foo'), ); - checkUnnamed817(o.items!); + checkUnnamed826(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -47362,12 +47963,12 @@ void checkUsageExportLocation(api.UsageExportLocation o) { buildCounterUsageExportLocation--; } -core.List buildUnnamed819() => [ +core.List buildUnnamed828() => [ buildVmEndpointNatMappingsInterfaceNatMappings(), buildVmEndpointNatMappingsInterfaceNatMappings(), ]; -void checkUnnamed819( +void checkUnnamed828( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkVmEndpointNatMappingsInterfaceNatMappings(o[0]); @@ -47380,7 +47981,7 @@ api.VmEndpointNatMappings buildVmEndpointNatMappings() { buildCounterVmEndpointNatMappings++; if (buildCounterVmEndpointNatMappings < 3) { o.instanceName = 'foo'; - o.interfaceNatMappings = buildUnnamed819(); + o.interfaceNatMappings = buildUnnamed828(); } buildCounterVmEndpointNatMappings--; return o; @@ -47393,17 +47994,17 @@ void checkVmEndpointNatMappings(api.VmEndpointNatMappings o) { o.instanceName!, unittest.equals('foo'), ); - checkUnnamed819(o.interfaceNatMappings!); + checkUnnamed828(o.interfaceNatMappings!); } buildCounterVmEndpointNatMappings--; } -core.List buildUnnamed820() => [ +core.List buildUnnamed829() => [ 'foo', 'foo', ]; -void checkUnnamed820(core.List o) { +void checkUnnamed829(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -47415,12 +48016,12 @@ void checkUnnamed820(core.List o) { ); } -core.List buildUnnamed821() => [ +core.List buildUnnamed830() => [ 'foo', 'foo', ]; -void checkUnnamed821(core.List o) { +void checkUnnamed830(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -47433,12 +48034,12 @@ void checkUnnamed821(core.List o) { } core.List - buildUnnamed822() => [ + buildUnnamed831() => [ buildVmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings(), buildVmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings(), ]; -void checkUnnamed822( +void checkUnnamed831( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkVmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings(o[0]); @@ -47451,11 +48052,11 @@ api.VmEndpointNatMappingsInterfaceNatMappings final o = api.VmEndpointNatMappingsInterfaceNatMappings(); buildCounterVmEndpointNatMappingsInterfaceNatMappings++; if (buildCounterVmEndpointNatMappingsInterfaceNatMappings < 3) { - o.drainNatIpPortRanges = buildUnnamed820(); - o.natIpPortRanges = buildUnnamed821(); + o.drainNatIpPortRanges = buildUnnamed829(); + o.natIpPortRanges = buildUnnamed830(); o.numTotalDrainNatPorts = 42; o.numTotalNatPorts = 42; - o.ruleMappings = buildUnnamed822(); + o.ruleMappings = buildUnnamed831(); o.sourceAliasIpRange = 'foo'; o.sourceVirtualIp = 'foo'; } @@ -47467,8 +48068,8 @@ void checkVmEndpointNatMappingsInterfaceNatMappings( api.VmEndpointNatMappingsInterfaceNatMappings o) { buildCounterVmEndpointNatMappingsInterfaceNatMappings++; if (buildCounterVmEndpointNatMappingsInterfaceNatMappings < 3) { - checkUnnamed820(o.drainNatIpPortRanges!); - checkUnnamed821(o.natIpPortRanges!); + checkUnnamed829(o.drainNatIpPortRanges!); + checkUnnamed830(o.natIpPortRanges!); unittest.expect( o.numTotalDrainNatPorts!, unittest.equals(42), @@ -47477,7 +48078,7 @@ void checkVmEndpointNatMappingsInterfaceNatMappings( o.numTotalNatPorts!, unittest.equals(42), ); - checkUnnamed822(o.ruleMappings!); + checkUnnamed831(o.ruleMappings!); unittest.expect( o.sourceAliasIpRange!, unittest.equals('foo'), @@ -47490,12 +48091,12 @@ void checkVmEndpointNatMappingsInterfaceNatMappings( buildCounterVmEndpointNatMappingsInterfaceNatMappings--; } -core.List buildUnnamed823() => [ +core.List buildUnnamed832() => [ 'foo', 'foo', ]; -void checkUnnamed823(core.List o) { +void checkUnnamed832(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -47507,12 +48108,12 @@ void checkUnnamed823(core.List o) { ); } -core.List buildUnnamed824() => [ +core.List buildUnnamed833() => [ 'foo', 'foo', ]; -void checkUnnamed824(core.List o) { +void checkUnnamed833(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -47532,8 +48133,8 @@ api.VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings buildCounterVmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings++; if (buildCounterVmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings < 3) { - o.drainNatIpPortRanges = buildUnnamed823(); - o.natIpPortRanges = buildUnnamed824(); + o.drainNatIpPortRanges = buildUnnamed832(); + o.natIpPortRanges = buildUnnamed833(); o.numTotalDrainNatPorts = 42; o.numTotalNatPorts = 42; o.ruleNumber = 42; @@ -47547,8 +48148,8 @@ void checkVmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings( buildCounterVmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings++; if (buildCounterVmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings < 3) { - checkUnnamed823(o.drainNatIpPortRanges!); - checkUnnamed824(o.natIpPortRanges!); + checkUnnamed832(o.drainNatIpPortRanges!); + checkUnnamed833(o.natIpPortRanges!); unittest.expect( o.numTotalDrainNatPorts!, unittest.equals(42), @@ -47565,12 +48166,12 @@ void checkVmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings( buildCounterVmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings--; } -core.List buildUnnamed825() => [ +core.List buildUnnamed834() => [ buildVmEndpointNatMappings(), buildVmEndpointNatMappings(), ]; -void checkUnnamed825(core.List o) { +void checkUnnamed834(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkVmEndpointNatMappings(o[0]); checkVmEndpointNatMappings(o[1]); @@ -47605,12 +48206,12 @@ void checkVmEndpointNatMappingsListWarningData( buildCounterVmEndpointNatMappingsListWarningData--; } -core.List buildUnnamed826() => [ +core.List buildUnnamed835() => [ buildVmEndpointNatMappingsListWarningData(), buildVmEndpointNatMappingsListWarningData(), ]; -void checkUnnamed826(core.List o) { +void checkUnnamed835(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkVmEndpointNatMappingsListWarningData(o[0]); checkVmEndpointNatMappingsListWarningData(o[1]); @@ -47622,7 +48223,7 @@ api.VmEndpointNatMappingsListWarning buildVmEndpointNatMappingsListWarning() { buildCounterVmEndpointNatMappingsListWarning++; if (buildCounterVmEndpointNatMappingsListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed826(); + o.data = buildUnnamed835(); o.message = 'foo'; } buildCounterVmEndpointNatMappingsListWarning--; @@ -47637,7 +48238,7 @@ void checkVmEndpointNatMappingsListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed826(o.data!); + checkUnnamed835(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -47654,7 +48255,7 @@ api.VmEndpointNatMappingsList buildVmEndpointNatMappingsList() { o.id = 'foo'; o.kind = 'foo'; o.nextPageToken = 'foo'; - o.result = buildUnnamed825(); + o.result = buildUnnamed834(); o.selfLink = 'foo'; o.warning = buildVmEndpointNatMappingsListWarning(); } @@ -47677,7 +48278,7 @@ void checkVmEndpointNatMappingsList(api.VmEndpointNatMappingsList o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed825(o.result!); + checkUnnamed834(o.result!); unittest.expect( o.selfLink!, unittest.equals('foo'), @@ -47687,12 +48288,12 @@ void checkVmEndpointNatMappingsList(api.VmEndpointNatMappingsList o) { buildCounterVmEndpointNatMappingsList--; } -core.Map buildUnnamed827() => { +core.Map buildUnnamed836() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed827(core.Map o) { +void checkUnnamed836(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -47704,12 +48305,12 @@ void checkUnnamed827(core.Map o) { ); } -core.List buildUnnamed828() => [ +core.List buildUnnamed837() => [ buildVpnGatewayVpnGatewayInterface(), buildVpnGatewayVpnGatewayInterface(), ]; -void checkUnnamed828(core.List o) { +void checkUnnamed837(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkVpnGatewayVpnGatewayInterface(o[0]); checkVpnGatewayVpnGatewayInterface(o[1]); @@ -47726,13 +48327,13 @@ api.VpnGateway buildVpnGateway() { o.id = 'foo'; o.kind = 'foo'; o.labelFingerprint = 'foo'; - o.labels = buildUnnamed827(); + o.labels = buildUnnamed836(); o.name = 'foo'; o.network = 'foo'; o.region = 'foo'; o.selfLink = 'foo'; o.stackType = 'foo'; - o.vpnInterfaces = buildUnnamed828(); + o.vpnInterfaces = buildUnnamed837(); } buildCounterVpnGateway--; return o; @@ -47765,7 +48366,7 @@ void checkVpnGateway(api.VpnGateway o) { o.labelFingerprint!, unittest.equals('foo'), ); - checkUnnamed827(o.labels!); + checkUnnamed836(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -47786,28 +48387,28 @@ void checkVpnGateway(api.VpnGateway o) { o.stackType!, unittest.equals('foo'), ); - checkUnnamed828(o.vpnInterfaces!); + checkUnnamed837(o.vpnInterfaces!); } buildCounterVpnGateway--; } -core.Map buildUnnamed829() => { +core.Map buildUnnamed838() => { 'x': buildVpnGatewaysScopedList(), 'y': buildVpnGatewaysScopedList(), }; -void checkUnnamed829(core.Map o) { +void checkUnnamed838(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkVpnGatewaysScopedList(o['x']!); checkVpnGatewaysScopedList(o['y']!); } -core.List buildUnnamed830() => [ +core.List buildUnnamed839() => [ 'foo', 'foo', ]; -void checkUnnamed830(core.List o) { +void checkUnnamed839(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -47848,12 +48449,12 @@ void checkVpnGatewayAggregatedListWarningData( buildCounterVpnGatewayAggregatedListWarningData--; } -core.List buildUnnamed831() => [ +core.List buildUnnamed840() => [ buildVpnGatewayAggregatedListWarningData(), buildVpnGatewayAggregatedListWarningData(), ]; -void checkUnnamed831(core.List o) { +void checkUnnamed840(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkVpnGatewayAggregatedListWarningData(o[0]); checkVpnGatewayAggregatedListWarningData(o[1]); @@ -47865,7 +48466,7 @@ api.VpnGatewayAggregatedListWarning buildVpnGatewayAggregatedListWarning() { buildCounterVpnGatewayAggregatedListWarning++; if (buildCounterVpnGatewayAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed831(); + o.data = buildUnnamed840(); o.message = 'foo'; } buildCounterVpnGatewayAggregatedListWarning--; @@ -47880,7 +48481,7 @@ void checkVpnGatewayAggregatedListWarning( o.code!, unittest.equals('foo'), ); - checkUnnamed831(o.data!); + checkUnnamed840(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -47895,11 +48496,11 @@ api.VpnGatewayAggregatedList buildVpnGatewayAggregatedList() { buildCounterVpnGatewayAggregatedList++; if (buildCounterVpnGatewayAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed829(); + o.items = buildUnnamed838(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed830(); + o.unreachables = buildUnnamed839(); o.warning = buildVpnGatewayAggregatedListWarning(); } buildCounterVpnGatewayAggregatedList--; @@ -47913,7 +48514,7 @@ void checkVpnGatewayAggregatedList(api.VpnGatewayAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed829(o.items!); + checkUnnamed838(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -47926,18 +48527,18 @@ void checkVpnGatewayAggregatedList(api.VpnGatewayAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed830(o.unreachables!); + checkUnnamed839(o.unreachables!); checkVpnGatewayAggregatedListWarning(o.warning!); } buildCounterVpnGatewayAggregatedList--; } -core.List buildUnnamed832() => [ +core.List buildUnnamed841() => [ buildVpnGateway(), buildVpnGateway(), ]; -void checkUnnamed832(core.List o) { +void checkUnnamed841(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkVpnGateway(o[0]); checkVpnGateway(o[1]); @@ -47970,12 +48571,12 @@ void checkVpnGatewayListWarningData(api.VpnGatewayListWarningData o) { buildCounterVpnGatewayListWarningData--; } -core.List buildUnnamed833() => [ +core.List buildUnnamed842() => [ buildVpnGatewayListWarningData(), buildVpnGatewayListWarningData(), ]; -void checkUnnamed833(core.List o) { +void checkUnnamed842(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkVpnGatewayListWarningData(o[0]); checkVpnGatewayListWarningData(o[1]); @@ -47987,7 +48588,7 @@ api.VpnGatewayListWarning buildVpnGatewayListWarning() { buildCounterVpnGatewayListWarning++; if (buildCounterVpnGatewayListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed833(); + o.data = buildUnnamed842(); o.message = 'foo'; } buildCounterVpnGatewayListWarning--; @@ -48001,7 +48602,7 @@ void checkVpnGatewayListWarning(api.VpnGatewayListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed833(o.data!); + checkUnnamed842(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -48016,7 +48617,7 @@ api.VpnGatewayList buildVpnGatewayList() { buildCounterVpnGatewayList++; if (buildCounterVpnGatewayList < 3) { o.id = 'foo'; - o.items = buildUnnamed832(); + o.items = buildUnnamed841(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -48033,7 +48634,7 @@ void checkVpnGatewayList(api.VpnGatewayList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed832(o.items!); + checkUnnamed841(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -48051,12 +48652,12 @@ void checkVpnGatewayList(api.VpnGatewayList o) { buildCounterVpnGatewayList--; } -core.List buildUnnamed834() => [ +core.List buildUnnamed843() => [ buildVpnGatewayStatusVpnConnection(), buildVpnGatewayStatusVpnConnection(), ]; -void checkUnnamed834(core.List o) { +void checkUnnamed843(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkVpnGatewayStatusVpnConnection(o[0]); checkVpnGatewayStatusVpnConnection(o[1]); @@ -48067,7 +48668,7 @@ api.VpnGatewayStatus buildVpnGatewayStatus() { final o = api.VpnGatewayStatus(); buildCounterVpnGatewayStatus++; if (buildCounterVpnGatewayStatus < 3) { - o.vpnConnections = buildUnnamed834(); + o.vpnConnections = buildUnnamed843(); } buildCounterVpnGatewayStatus--; return o; @@ -48076,7 +48677,7 @@ api.VpnGatewayStatus buildVpnGatewayStatus() { void checkVpnGatewayStatus(api.VpnGatewayStatus o) { buildCounterVpnGatewayStatus++; if (buildCounterVpnGatewayStatus < 3) { - checkUnnamed834(o.vpnConnections!); + checkUnnamed843(o.vpnConnections!); } buildCounterVpnGatewayStatus--; } @@ -48142,12 +48743,12 @@ void checkVpnGatewayStatusTunnel(api.VpnGatewayStatusTunnel o) { buildCounterVpnGatewayStatusTunnel--; } -core.List buildUnnamed835() => [ +core.List buildUnnamed844() => [ buildVpnGatewayStatusTunnel(), buildVpnGatewayStatusTunnel(), ]; -void checkUnnamed835(core.List o) { +void checkUnnamed844(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkVpnGatewayStatusTunnel(o[0]); checkVpnGatewayStatusTunnel(o[1]); @@ -48161,7 +48762,7 @@ api.VpnGatewayStatusVpnConnection buildVpnGatewayStatusVpnConnection() { o.peerExternalGateway = 'foo'; o.peerGcpGateway = 'foo'; o.state = buildVpnGatewayStatusHighAvailabilityRequirementState(); - o.tunnels = buildUnnamed835(); + o.tunnels = buildUnnamed844(); } buildCounterVpnGatewayStatusVpnConnection--; return o; @@ -48179,7 +48780,7 @@ void checkVpnGatewayStatusVpnConnection(api.VpnGatewayStatusVpnConnection o) { unittest.equals('foo'), ); checkVpnGatewayStatusHighAvailabilityRequirementState(o.state!); - checkUnnamed835(o.tunnels!); + checkUnnamed844(o.tunnels!); } buildCounterVpnGatewayStatusVpnConnection--; } @@ -48240,12 +48841,12 @@ void checkVpnGatewaysGetStatusResponse(api.VpnGatewaysGetStatusResponse o) { buildCounterVpnGatewaysGetStatusResponse--; } -core.List buildUnnamed836() => [ +core.List buildUnnamed845() => [ buildVpnGateway(), buildVpnGateway(), ]; -void checkUnnamed836(core.List o) { +void checkUnnamed845(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkVpnGateway(o[0]); checkVpnGateway(o[1]); @@ -48279,12 +48880,12 @@ void checkVpnGatewaysScopedListWarningData( buildCounterVpnGatewaysScopedListWarningData--; } -core.List buildUnnamed837() => [ +core.List buildUnnamed846() => [ buildVpnGatewaysScopedListWarningData(), buildVpnGatewaysScopedListWarningData(), ]; -void checkUnnamed837(core.List o) { +void checkUnnamed846(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkVpnGatewaysScopedListWarningData(o[0]); checkVpnGatewaysScopedListWarningData(o[1]); @@ -48296,7 +48897,7 @@ api.VpnGatewaysScopedListWarning buildVpnGatewaysScopedListWarning() { buildCounterVpnGatewaysScopedListWarning++; if (buildCounterVpnGatewaysScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed837(); + o.data = buildUnnamed846(); o.message = 'foo'; } buildCounterVpnGatewaysScopedListWarning--; @@ -48310,7 +48911,7 @@ void checkVpnGatewaysScopedListWarning(api.VpnGatewaysScopedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed837(o.data!); + checkUnnamed846(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -48324,7 +48925,7 @@ api.VpnGatewaysScopedList buildVpnGatewaysScopedList() { final o = api.VpnGatewaysScopedList(); buildCounterVpnGatewaysScopedList++; if (buildCounterVpnGatewaysScopedList < 3) { - o.vpnGateways = buildUnnamed836(); + o.vpnGateways = buildUnnamed845(); o.warning = buildVpnGatewaysScopedListWarning(); } buildCounterVpnGatewaysScopedList--; @@ -48334,18 +48935,18 @@ api.VpnGatewaysScopedList buildVpnGatewaysScopedList() { void checkVpnGatewaysScopedList(api.VpnGatewaysScopedList o) { buildCounterVpnGatewaysScopedList++; if (buildCounterVpnGatewaysScopedList < 3) { - checkUnnamed836(o.vpnGateways!); + checkUnnamed845(o.vpnGateways!); checkVpnGatewaysScopedListWarning(o.warning!); } buildCounterVpnGatewaysScopedList--; } -core.Map buildUnnamed838() => { +core.Map buildUnnamed847() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed838(core.Map o) { +void checkUnnamed847(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -48357,12 +48958,12 @@ void checkUnnamed838(core.Map o) { ); } -core.List buildUnnamed839() => [ +core.List buildUnnamed848() => [ 'foo', 'foo', ]; -void checkUnnamed839(core.List o) { +void checkUnnamed848(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -48374,12 +48975,12 @@ void checkUnnamed839(core.List o) { ); } -core.List buildUnnamed840() => [ +core.List buildUnnamed849() => [ 'foo', 'foo', ]; -void checkUnnamed840(core.List o) { +void checkUnnamed849(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -48403,15 +49004,15 @@ api.VpnTunnel buildVpnTunnel() { o.ikeVersion = 42; o.kind = 'foo'; o.labelFingerprint = 'foo'; - o.labels = buildUnnamed838(); - o.localTrafficSelector = buildUnnamed839(); + o.labels = buildUnnamed847(); + o.localTrafficSelector = buildUnnamed848(); o.name = 'foo'; o.peerExternalGateway = 'foo'; o.peerExternalGatewayInterface = 42; o.peerGcpGateway = 'foo'; o.peerIp = 'foo'; o.region = 'foo'; - o.remoteTrafficSelector = buildUnnamed840(); + o.remoteTrafficSelector = buildUnnamed849(); o.router = 'foo'; o.selfLink = 'foo'; o.sharedSecret = 'foo'; @@ -48456,8 +49057,8 @@ void checkVpnTunnel(api.VpnTunnel o) { o.labelFingerprint!, unittest.equals('foo'), ); - checkUnnamed838(o.labels!); - checkUnnamed839(o.localTrafficSelector!); + checkUnnamed847(o.labels!); + checkUnnamed848(o.localTrafficSelector!); unittest.expect( o.name!, unittest.equals('foo'), @@ -48482,7 +49083,7 @@ void checkVpnTunnel(api.VpnTunnel o) { o.region!, unittest.equals('foo'), ); - checkUnnamed840(o.remoteTrafficSelector!); + checkUnnamed849(o.remoteTrafficSelector!); unittest.expect( o.router!, unittest.equals('foo'), @@ -48519,23 +49120,23 @@ void checkVpnTunnel(api.VpnTunnel o) { buildCounterVpnTunnel--; } -core.Map buildUnnamed841() => { +core.Map buildUnnamed850() => { 'x': buildVpnTunnelsScopedList(), 'y': buildVpnTunnelsScopedList(), }; -void checkUnnamed841(core.Map o) { +void checkUnnamed850(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkVpnTunnelsScopedList(o['x']!); checkVpnTunnelsScopedList(o['y']!); } -core.List buildUnnamed842() => [ +core.List buildUnnamed851() => [ 'foo', 'foo', ]; -void checkUnnamed842(core.List o) { +void checkUnnamed851(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -48576,12 +49177,12 @@ void checkVpnTunnelAggregatedListWarningData( buildCounterVpnTunnelAggregatedListWarningData--; } -core.List buildUnnamed843() => [ +core.List buildUnnamed852() => [ buildVpnTunnelAggregatedListWarningData(), buildVpnTunnelAggregatedListWarningData(), ]; -void checkUnnamed843(core.List o) { +void checkUnnamed852(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkVpnTunnelAggregatedListWarningData(o[0]); checkVpnTunnelAggregatedListWarningData(o[1]); @@ -48593,7 +49194,7 @@ api.VpnTunnelAggregatedListWarning buildVpnTunnelAggregatedListWarning() { buildCounterVpnTunnelAggregatedListWarning++; if (buildCounterVpnTunnelAggregatedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed843(); + o.data = buildUnnamed852(); o.message = 'foo'; } buildCounterVpnTunnelAggregatedListWarning--; @@ -48607,7 +49208,7 @@ void checkVpnTunnelAggregatedListWarning(api.VpnTunnelAggregatedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed843(o.data!); + checkUnnamed852(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -48622,11 +49223,11 @@ api.VpnTunnelAggregatedList buildVpnTunnelAggregatedList() { buildCounterVpnTunnelAggregatedList++; if (buildCounterVpnTunnelAggregatedList < 3) { o.id = 'foo'; - o.items = buildUnnamed841(); + o.items = buildUnnamed850(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; - o.unreachables = buildUnnamed842(); + o.unreachables = buildUnnamed851(); o.warning = buildVpnTunnelAggregatedListWarning(); } buildCounterVpnTunnelAggregatedList--; @@ -48640,7 +49241,7 @@ void checkVpnTunnelAggregatedList(api.VpnTunnelAggregatedList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed841(o.items!); + checkUnnamed850(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -48653,18 +49254,18 @@ void checkVpnTunnelAggregatedList(api.VpnTunnelAggregatedList o) { o.selfLink!, unittest.equals('foo'), ); - checkUnnamed842(o.unreachables!); + checkUnnamed851(o.unreachables!); checkVpnTunnelAggregatedListWarning(o.warning!); } buildCounterVpnTunnelAggregatedList--; } -core.List buildUnnamed844() => [ +core.List buildUnnamed853() => [ buildVpnTunnel(), buildVpnTunnel(), ]; -void checkUnnamed844(core.List o) { +void checkUnnamed853(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkVpnTunnel(o[0]); checkVpnTunnel(o[1]); @@ -48697,12 +49298,12 @@ void checkVpnTunnelListWarningData(api.VpnTunnelListWarningData o) { buildCounterVpnTunnelListWarningData--; } -core.List buildUnnamed845() => [ +core.List buildUnnamed854() => [ buildVpnTunnelListWarningData(), buildVpnTunnelListWarningData(), ]; -void checkUnnamed845(core.List o) { +void checkUnnamed854(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkVpnTunnelListWarningData(o[0]); checkVpnTunnelListWarningData(o[1]); @@ -48714,7 +49315,7 @@ api.VpnTunnelListWarning buildVpnTunnelListWarning() { buildCounterVpnTunnelListWarning++; if (buildCounterVpnTunnelListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed845(); + o.data = buildUnnamed854(); o.message = 'foo'; } buildCounterVpnTunnelListWarning--; @@ -48728,7 +49329,7 @@ void checkVpnTunnelListWarning(api.VpnTunnelListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed845(o.data!); + checkUnnamed854(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -48743,7 +49344,7 @@ api.VpnTunnelList buildVpnTunnelList() { buildCounterVpnTunnelList++; if (buildCounterVpnTunnelList < 3) { o.id = 'foo'; - o.items = buildUnnamed844(); + o.items = buildUnnamed853(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -48760,7 +49361,7 @@ void checkVpnTunnelList(api.VpnTunnelList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed844(o.items!); + checkUnnamed853(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -48778,12 +49379,12 @@ void checkVpnTunnelList(api.VpnTunnelList o) { buildCounterVpnTunnelList--; } -core.List buildUnnamed846() => [ +core.List buildUnnamed855() => [ buildVpnTunnel(), buildVpnTunnel(), ]; -void checkUnnamed846(core.List o) { +void checkUnnamed855(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkVpnTunnel(o[0]); checkVpnTunnel(o[1]); @@ -48817,12 +49418,12 @@ void checkVpnTunnelsScopedListWarningData( buildCounterVpnTunnelsScopedListWarningData--; } -core.List buildUnnamed847() => [ +core.List buildUnnamed856() => [ buildVpnTunnelsScopedListWarningData(), buildVpnTunnelsScopedListWarningData(), ]; -void checkUnnamed847(core.List o) { +void checkUnnamed856(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkVpnTunnelsScopedListWarningData(o[0]); checkVpnTunnelsScopedListWarningData(o[1]); @@ -48834,7 +49435,7 @@ api.VpnTunnelsScopedListWarning buildVpnTunnelsScopedListWarning() { buildCounterVpnTunnelsScopedListWarning++; if (buildCounterVpnTunnelsScopedListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed847(); + o.data = buildUnnamed856(); o.message = 'foo'; } buildCounterVpnTunnelsScopedListWarning--; @@ -48848,7 +49449,7 @@ void checkVpnTunnelsScopedListWarning(api.VpnTunnelsScopedListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed847(o.data!); + checkUnnamed856(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -48862,7 +49463,7 @@ api.VpnTunnelsScopedList buildVpnTunnelsScopedList() { final o = api.VpnTunnelsScopedList(); buildCounterVpnTunnelsScopedList++; if (buildCounterVpnTunnelsScopedList < 3) { - o.vpnTunnels = buildUnnamed846(); + o.vpnTunnels = buildUnnamed855(); o.warning = buildVpnTunnelsScopedListWarning(); } buildCounterVpnTunnelsScopedList--; @@ -48872,18 +49473,18 @@ api.VpnTunnelsScopedList buildVpnTunnelsScopedList() { void checkVpnTunnelsScopedList(api.VpnTunnelsScopedList o) { buildCounterVpnTunnelsScopedList++; if (buildCounterVpnTunnelsScopedList < 3) { - checkUnnamed846(o.vpnTunnels!); + checkUnnamed855(o.vpnTunnels!); checkVpnTunnelsScopedListWarning(o.warning!); } buildCounterVpnTunnelsScopedList--; } -core.List buildUnnamed848() => [ +core.List buildUnnamed857() => [ 'foo', 'foo', ]; -void checkUnnamed848(core.List o) { +void checkUnnamed857(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -48895,12 +49496,12 @@ void checkUnnamed848(core.List o) { ); } -core.List buildUnnamed849() => [ +core.List buildUnnamed858() => [ buildWafExpressionSetExpression(), buildWafExpressionSetExpression(), ]; -void checkUnnamed849(core.List o) { +void checkUnnamed858(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkWafExpressionSetExpression(o[0]); checkWafExpressionSetExpression(o[1]); @@ -48911,8 +49512,8 @@ api.WafExpressionSet buildWafExpressionSet() { final o = api.WafExpressionSet(); buildCounterWafExpressionSet++; if (buildCounterWafExpressionSet < 3) { - o.aliases = buildUnnamed848(); - o.expressions = buildUnnamed849(); + o.aliases = buildUnnamed857(); + o.expressions = buildUnnamed858(); o.id = 'foo'; } buildCounterWafExpressionSet--; @@ -48922,8 +49523,8 @@ api.WafExpressionSet buildWafExpressionSet() { void checkWafExpressionSet(api.WafExpressionSet o) { buildCounterWafExpressionSet++; if (buildCounterWafExpressionSet < 3) { - checkUnnamed848(o.aliases!); - checkUnnamed849(o.expressions!); + checkUnnamed857(o.aliases!); + checkUnnamed858(o.expressions!); unittest.expect( o.id!, unittest.equals('foo'), @@ -48988,12 +49589,12 @@ void checkWeightedBackendService(api.WeightedBackendService o) { buildCounterWeightedBackendService--; } -core.List buildUnnamed850() => [ +core.List buildUnnamed859() => [ buildProject(), buildProject(), ]; -void checkUnnamed850(core.List o) { +void checkUnnamed859(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkProject(o[0]); checkProject(o[1]); @@ -49026,12 +49627,12 @@ void checkXpnHostListWarningData(api.XpnHostListWarningData o) { buildCounterXpnHostListWarningData--; } -core.List buildUnnamed851() => [ +core.List buildUnnamed860() => [ buildXpnHostListWarningData(), buildXpnHostListWarningData(), ]; -void checkUnnamed851(core.List o) { +void checkUnnamed860(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkXpnHostListWarningData(o[0]); checkXpnHostListWarningData(o[1]); @@ -49043,7 +49644,7 @@ api.XpnHostListWarning buildXpnHostListWarning() { buildCounterXpnHostListWarning++; if (buildCounterXpnHostListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed851(); + o.data = buildUnnamed860(); o.message = 'foo'; } buildCounterXpnHostListWarning--; @@ -49057,7 +49658,7 @@ void checkXpnHostListWarning(api.XpnHostListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed851(o.data!); + checkUnnamed860(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -49072,7 +49673,7 @@ api.XpnHostList buildXpnHostList() { buildCounterXpnHostList++; if (buildCounterXpnHostList < 3) { o.id = 'foo'; - o.items = buildUnnamed850(); + o.items = buildUnnamed859(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -49089,7 +49690,7 @@ void checkXpnHostList(api.XpnHostList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed850(o.items!); + checkUnnamed859(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -49134,12 +49735,12 @@ void checkXpnResourceId(api.XpnResourceId o) { buildCounterXpnResourceId--; } -core.List buildUnnamed852() => [ +core.List buildUnnamed861() => [ 'foo', 'foo', ]; -void checkUnnamed852(core.List o) { +void checkUnnamed861(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -49156,7 +49757,7 @@ api.Zone buildZone() { final o = api.Zone(); buildCounterZone++; if (buildCounterZone < 3) { - o.availableCpuPlatforms = buildUnnamed852(); + o.availableCpuPlatforms = buildUnnamed861(); o.creationTimestamp = 'foo'; o.deprecated = buildDeprecationStatus(); o.description = 'foo'; @@ -49175,7 +49776,7 @@ api.Zone buildZone() { void checkZone(api.Zone o) { buildCounterZone++; if (buildCounterZone < 3) { - checkUnnamed852(o.availableCpuPlatforms!); + checkUnnamed861(o.availableCpuPlatforms!); unittest.expect( o.creationTimestamp!, unittest.equals('foo'), @@ -49214,12 +49815,12 @@ void checkZone(api.Zone o) { buildCounterZone--; } -core.List buildUnnamed853() => [ +core.List buildUnnamed862() => [ buildZone(), buildZone(), ]; -void checkUnnamed853(core.List o) { +void checkUnnamed862(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkZone(o[0]); checkZone(o[1]); @@ -49252,12 +49853,12 @@ void checkZoneListWarningData(api.ZoneListWarningData o) { buildCounterZoneListWarningData--; } -core.List buildUnnamed854() => [ +core.List buildUnnamed863() => [ buildZoneListWarningData(), buildZoneListWarningData(), ]; -void checkUnnamed854(core.List o) { +void checkUnnamed863(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkZoneListWarningData(o[0]); checkZoneListWarningData(o[1]); @@ -49269,7 +49870,7 @@ api.ZoneListWarning buildZoneListWarning() { buildCounterZoneListWarning++; if (buildCounterZoneListWarning < 3) { o.code = 'foo'; - o.data = buildUnnamed854(); + o.data = buildUnnamed863(); o.message = 'foo'; } buildCounterZoneListWarning--; @@ -49283,7 +49884,7 @@ void checkZoneListWarning(api.ZoneListWarning o) { o.code!, unittest.equals('foo'), ); - checkUnnamed854(o.data!); + checkUnnamed863(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -49298,7 +49899,7 @@ api.ZoneList buildZoneList() { buildCounterZoneList++; if (buildCounterZoneList < 3) { o.id = 'foo'; - o.items = buildUnnamed853(); + o.items = buildUnnamed862(); o.kind = 'foo'; o.nextPageToken = 'foo'; o.selfLink = 'foo'; @@ -49315,7 +49916,7 @@ void checkZoneList(api.ZoneList o) { o.id!, unittest.equals('foo'), ); - checkUnnamed853(o.items!); + checkUnnamed862(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -49333,12 +49934,12 @@ void checkZoneList(api.ZoneList o) { buildCounterZoneList--; } -core.Map buildUnnamed855() => { +core.Map buildUnnamed864() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed855(core.Map o) { +void checkUnnamed864(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -49356,7 +49957,7 @@ api.ZoneSetLabelsRequest buildZoneSetLabelsRequest() { buildCounterZoneSetLabelsRequest++; if (buildCounterZoneSetLabelsRequest < 3) { o.labelFingerprint = 'foo'; - o.labels = buildUnnamed855(); + o.labels = buildUnnamed864(); } buildCounterZoneSetLabelsRequest--; return o; @@ -49369,17 +49970,17 @@ void checkZoneSetLabelsRequest(api.ZoneSetLabelsRequest o) { o.labelFingerprint!, unittest.equals('foo'), ); - checkUnnamed855(o.labels!); + checkUnnamed864(o.labels!); } buildCounterZoneSetLabelsRequest--; } -core.List buildUnnamed856() => [ +core.List buildUnnamed865() => [ buildBinding(), buildBinding(), ]; -void checkUnnamed856(core.List o) { +void checkUnnamed865(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkBinding(o[0]); checkBinding(o[1]); @@ -49390,7 +49991,7 @@ api.ZoneSetPolicyRequest buildZoneSetPolicyRequest() { final o = api.ZoneSetPolicyRequest(); buildCounterZoneSetPolicyRequest++; if (buildCounterZoneSetPolicyRequest < 3) { - o.bindings = buildUnnamed856(); + o.bindings = buildUnnamed865(); o.etag = 'foo'; o.policy = buildPolicy(); } @@ -49401,7 +50002,7 @@ api.ZoneSetPolicyRequest buildZoneSetPolicyRequest() { void checkZoneSetPolicyRequest(api.ZoneSetPolicyRequest o) { buildCounterZoneSetPolicyRequest++; if (buildCounterZoneSetPolicyRequest < 3) { - checkUnnamed856(o.bindings!); + checkUnnamed865(o.bindings!); unittest.expect( o.etag!, unittest.equals('foo'), @@ -49411,12 +50012,12 @@ void checkZoneSetPolicyRequest(api.ZoneSetPolicyRequest o) { buildCounterZoneSetPolicyRequest--; } -core.List buildUnnamed857() => [ +core.List buildUnnamed866() => [ 'foo', 'foo', ]; -void checkUnnamed857(core.List o) { +void checkUnnamed866(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -49428,12 +50029,12 @@ void checkUnnamed857(core.List o) { ); } -core.List buildUnnamed858() => [ +core.List buildUnnamed867() => [ 'foo', 'foo', ]; -void checkUnnamed858(core.List o) { +void checkUnnamed867(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -49445,12 +50046,12 @@ void checkUnnamed858(core.List o) { ); } -core.List buildUnnamed859() => [ +core.List buildUnnamed868() => [ 'foo', 'foo', ]; -void checkUnnamed859(core.List o) { +void checkUnnamed868(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -49462,12 +50063,12 @@ void checkUnnamed859(core.List o) { ); } -core.List buildUnnamed860() => [ +core.List buildUnnamed869() => [ 'foo', 'foo', ]; -void checkUnnamed860(core.List o) { +void checkUnnamed869(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -49479,12 +50080,12 @@ void checkUnnamed860(core.List o) { ); } -core.List buildUnnamed861() => [ +core.List buildUnnamed870() => [ 'foo', 'foo', ]; -void checkUnnamed861(core.List o) { +void checkUnnamed870(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -50651,16 +51252,6 @@ void main() { }); }); - unittest.group('obj-schema-Condition', () { - unittest.test('to-json--from-json', () async { - final o = buildCondition(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = - api.Condition.fromJson(oJson as core.Map); - checkCondition(od); - }); - }); - unittest.group('obj-schema-ConfidentialInstanceConfig', () { unittest.test('to-json--from-json', () async { final o = buildConfidentialInstanceConfig(); @@ -52375,6 +52966,31 @@ void main() { }); }); + unittest.group('obj-schema-InstanceGroupManagerInstanceFlexibilityPolicy', + () { + unittest.test('to-json--from-json', () async { + final o = buildInstanceGroupManagerInstanceFlexibilityPolicy(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.InstanceGroupManagerInstanceFlexibilityPolicy.fromJson( + oJson as core.Map); + checkInstanceGroupManagerInstanceFlexibilityPolicy(od); + }); + }); + + unittest.group( + 'obj-schema-InstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection', + () { + unittest.test('to-json--from-json', () async { + final o = + buildInstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.InstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection + .fromJson(oJson as core.Map); + checkInstanceGroupManagerInstanceFlexibilityPolicyInstanceSelection(od); + }); + }); + unittest.group('obj-schema-InstanceGroupManagerInstanceLifecyclePolicy', () { unittest.test('to-json--from-json', () async { final o = buildInstanceGroupManagerInstanceLifecyclePolicy(); @@ -52560,6 +53176,16 @@ void main() { }); }); + unittest.group('obj-schema-InstanceGroupManagerStandbyPolicy', () { + unittest.test('to-json--from-json', () async { + final o = buildInstanceGroupManagerStandbyPolicy(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.InstanceGroupManagerStandbyPolicy.fromJson( + oJson as core.Map); + checkInstanceGroupManagerStandbyPolicy(od); + }); + }); + unittest.group('obj-schema-InstanceGroupManagerStatus', () { unittest.test('to-json--from-json', () async { final o = buildInstanceGroupManagerStatus(); @@ -52762,6 +53388,16 @@ void main() { }); }); + unittest.group('obj-schema-InstanceGroupManagersResumeInstancesRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildInstanceGroupManagersResumeInstancesRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.InstanceGroupManagersResumeInstancesRequest.fromJson( + oJson as core.Map); + checkInstanceGroupManagersResumeInstancesRequest(od); + }); + }); + unittest.group('obj-schema-InstanceGroupManagersScopedListWarningData', () { unittest.test('to-json--from-json', () async { final o = buildInstanceGroupManagersScopedListWarningData(); @@ -52813,6 +53449,36 @@ void main() { }); }); + unittest.group('obj-schema-InstanceGroupManagersStartInstancesRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildInstanceGroupManagersStartInstancesRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.InstanceGroupManagersStartInstancesRequest.fromJson( + oJson as core.Map); + checkInstanceGroupManagersStartInstancesRequest(od); + }); + }); + + unittest.group('obj-schema-InstanceGroupManagersStopInstancesRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildInstanceGroupManagersStopInstancesRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.InstanceGroupManagersStopInstancesRequest.fromJson( + oJson as core.Map); + checkInstanceGroupManagersStopInstancesRequest(od); + }); + }); + + unittest.group('obj-schema-InstanceGroupManagersSuspendInstancesRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildInstanceGroupManagersSuspendInstancesRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.InstanceGroupManagersSuspendInstancesRequest.fromJson( + oJson as core.Map); + checkInstanceGroupManagersSuspendInstancesRequest(od); + }); + }); + unittest.group('obj-schema-InstanceGroupManagersUpdatePerInstanceConfigsReq', () { unittest.test('to-json--from-json', () async { @@ -54059,56 +54725,6 @@ void main() { }); }); - unittest.group('obj-schema-LogConfig', () { - unittest.test('to-json--from-json', () async { - final o = buildLogConfig(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = - api.LogConfig.fromJson(oJson as core.Map); - checkLogConfig(od); - }); - }); - - unittest.group('obj-schema-LogConfigCloudAuditOptions', () { - unittest.test('to-json--from-json', () async { - final o = buildLogConfigCloudAuditOptions(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.LogConfigCloudAuditOptions.fromJson( - oJson as core.Map); - checkLogConfigCloudAuditOptions(od); - }); - }); - - unittest.group('obj-schema-LogConfigCounterOptions', () { - unittest.test('to-json--from-json', () async { - final o = buildLogConfigCounterOptions(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.LogConfigCounterOptions.fromJson( - oJson as core.Map); - checkLogConfigCounterOptions(od); - }); - }); - - unittest.group('obj-schema-LogConfigCounterOptionsCustomField', () { - unittest.test('to-json--from-json', () async { - final o = buildLogConfigCounterOptionsCustomField(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.LogConfigCounterOptionsCustomField.fromJson( - oJson as core.Map); - checkLogConfigCounterOptionsCustomField(od); - }); - }); - - unittest.group('obj-schema-LogConfigDataAccessOptions', () { - unittest.test('to-json--from-json', () async { - final o = buildLogConfigDataAccessOptions(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.LogConfigDataAccessOptions.fromJson( - oJson as core.Map); - checkLogConfigDataAccessOptions(od); - }); - }); - unittest.group('obj-schema-MachineImage', () { unittest.test('to-json--from-json', () async { final o = buildMachineImage(); @@ -54331,6 +54947,17 @@ void main() { }); }); + unittest.group('obj-schema-ManagedInstancePropertiesFromFlexibilityPolicy', + () { + unittest.test('to-json--from-json', () async { + final o = buildManagedInstancePropertiesFromFlexibilityPolicy(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ManagedInstancePropertiesFromFlexibilityPolicy.fromJson( + oJson as core.Map); + checkManagedInstancePropertiesFromFlexibilityPolicy(od); + }); + }); + unittest.group('obj-schema-ManagedInstanceVersion', () { unittest.test('to-json--from-json', () async { final o = buildManagedInstanceVersion(); @@ -54899,6 +55526,66 @@ void main() { }); }); + unittest.group('obj-schema-NetworkProfile', () { + unittest.test('to-json--from-json', () async { + final o = buildNetworkProfile(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.NetworkProfile.fromJson( + oJson as core.Map); + checkNetworkProfile(od); + }); + }); + + unittest.group('obj-schema-NetworkProfileLocation', () { + unittest.test('to-json--from-json', () async { + final o = buildNetworkProfileLocation(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.NetworkProfileLocation.fromJson( + oJson as core.Map); + checkNetworkProfileLocation(od); + }); + }); + + unittest.group('obj-schema-NetworkProfileNetworkFeatures', () { + unittest.test('to-json--from-json', () async { + final o = buildNetworkProfileNetworkFeatures(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.NetworkProfileNetworkFeatures.fromJson( + oJson as core.Map); + checkNetworkProfileNetworkFeatures(od); + }); + }); + + unittest.group('obj-schema-NetworkProfilesListResponseWarningData', () { + unittest.test('to-json--from-json', () async { + final o = buildNetworkProfilesListResponseWarningData(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.NetworkProfilesListResponseWarningData.fromJson( + oJson as core.Map); + checkNetworkProfilesListResponseWarningData(od); + }); + }); + + unittest.group('obj-schema-NetworkProfilesListResponseWarning', () { + unittest.test('to-json--from-json', () async { + final o = buildNetworkProfilesListResponseWarning(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.NetworkProfilesListResponseWarning.fromJson( + oJson as core.Map); + checkNetworkProfilesListResponseWarning(od); + }); + }); + + unittest.group('obj-schema-NetworkProfilesListResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildNetworkProfilesListResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.NetworkProfilesListResponse.fromJson( + oJson as core.Map); + checkNetworkProfilesListResponse(od); + }); + }); + unittest.group('obj-schema-NetworkRoutingConfig', () { unittest.test('to-json--from-json', () async { final o = buildNetworkRoutingConfig(); @@ -56466,6 +57153,17 @@ void main() { }); }); + unittest.group('obj-schema-RegionInstanceGroupManagersResumeInstancesRequest', + () { + unittest.test('to-json--from-json', () async { + final o = buildRegionInstanceGroupManagersResumeInstancesRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.RegionInstanceGroupManagersResumeInstancesRequest.fromJson( + oJson as core.Map); + checkRegionInstanceGroupManagersResumeInstancesRequest(od); + }); + }); + unittest.group('obj-schema-RegionInstanceGroupManagersSetTargetPoolsRequest', () { unittest.test('to-json--from-json', () async { @@ -56488,6 +57186,40 @@ void main() { }); }); + unittest.group('obj-schema-RegionInstanceGroupManagersStartInstancesRequest', + () { + unittest.test('to-json--from-json', () async { + final o = buildRegionInstanceGroupManagersStartInstancesRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.RegionInstanceGroupManagersStartInstancesRequest.fromJson( + oJson as core.Map); + checkRegionInstanceGroupManagersStartInstancesRequest(od); + }); + }); + + unittest.group('obj-schema-RegionInstanceGroupManagersStopInstancesRequest', + () { + unittest.test('to-json--from-json', () async { + final o = buildRegionInstanceGroupManagersStopInstancesRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.RegionInstanceGroupManagersStopInstancesRequest.fromJson( + oJson as core.Map); + checkRegionInstanceGroupManagersStopInstancesRequest(od); + }); + }); + + unittest.group( + 'obj-schema-RegionInstanceGroupManagersSuspendInstancesRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildRegionInstanceGroupManagersSuspendInstancesRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.RegionInstanceGroupManagersSuspendInstancesRequest.fromJson( + oJson as core.Map); + checkRegionInstanceGroupManagersSuspendInstancesRequest(od); + }); + }); + unittest.group('obj-schema-RegionInstanceGroupsListInstancesWarningData', () { unittest.test('to-json--from-json', () async { final o = buildRegionInstanceGroupsListInstancesWarningData(); @@ -57426,16 +58158,6 @@ void main() { }); }); - unittest.group('obj-schema-Rule', () { - unittest.test('to-json--from-json', () async { - final o = buildRule(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = - api.Rule.fromJson(oJson as core.Map); - checkRule(od); - }); - }); - unittest.group('obj-schema-SSLHealthCheck', () { unittest.test('to-json--from-json', () async { final o = buildSSLHealthCheck(); @@ -67091,7 +67813,7 @@ void main() { final arg_project = 'foo'; final arg_zone = 'foo'; final arg_disk = 'foo'; - final arg_paths = buildUnnamed857(); + final arg_paths = buildUnnamed866(); final arg_requestId = 'foo'; final arg_updateMask = 'foo'; final arg_$fields = 'foo'; @@ -78711,21 +79433,19 @@ void main() { checkOperation(response as api.Operation); }); - unittest.test('method--setInstanceTemplate', () async { + unittest.test('method--resumeInstances', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).instanceGroupManagers; - final arg_request = - buildInstanceGroupManagersSetInstanceTemplateRequest(); + final arg_request = buildInstanceGroupManagersResumeInstancesRequest(); final arg_project = 'foo'; final arg_zone = 'foo'; final arg_instanceGroupManager = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.InstanceGroupManagersSetInstanceTemplateRequest.fromJson( - json as core.Map); - checkInstanceGroupManagersSetInstanceTemplateRequest(obj); + final obj = api.InstanceGroupManagersResumeInstancesRequest.fromJson( + json as core.Map); + checkInstanceGroupManagersResumeInstancesRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -78774,7 +79494,7 @@ void main() { unittest.equals('/instanceGroupManagers/'), ); pathOffset += 23; - index = path.indexOf('/setInstanceTemplate', pathOffset); + index = path.indexOf('/resumeInstances', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -78784,10 +79504,10 @@ void main() { unittest.equals('$arg_instanceGroupManager'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 20), - unittest.equals('/setInstanceTemplate'), + path.substring(pathOffset, pathOffset + 16), + unittest.equals('/resumeInstances'), ); - pathOffset += 20; + pathOffset += 16; final query = req.url.query; var queryOffset = 0; @@ -78819,25 +79539,27 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setInstanceTemplate( + final response = await res.resumeInstances( arg_request, arg_project, arg_zone, arg_instanceGroupManager, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--setTargetPools', () async { + unittest.test('method--setInstanceTemplate', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).instanceGroupManagers; - final arg_request = buildInstanceGroupManagersSetTargetPoolsRequest(); + final arg_request = + buildInstanceGroupManagersSetInstanceTemplateRequest(); final arg_project = 'foo'; final arg_zone = 'foo'; final arg_instanceGroupManager = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.InstanceGroupManagersSetTargetPoolsRequest.fromJson( - json as core.Map); - checkInstanceGroupManagersSetTargetPoolsRequest(obj); + final obj = + api.InstanceGroupManagersSetInstanceTemplateRequest.fromJson( + json as core.Map); + checkInstanceGroupManagersSetInstanceTemplateRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -78886,7 +79608,7 @@ void main() { unittest.equals('/instanceGroupManagers/'), ); pathOffset += 23; - index = path.indexOf('/setTargetPools', pathOffset); + index = path.indexOf('/setInstanceTemplate', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -78896,10 +79618,10 @@ void main() { unittest.equals('$arg_instanceGroupManager'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 15), - unittest.equals('/setTargetPools'), + path.substring(pathOffset, pathOffset + 20), + unittest.equals('/setInstanceTemplate'), ); - pathOffset += 15; + pathOffset += 20; final query = req.url.query; var queryOffset = 0; @@ -78931,27 +79653,25 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setTargetPools( + final response = await res.setInstanceTemplate( arg_request, arg_project, arg_zone, arg_instanceGroupManager, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--updatePerInstanceConfigs', () async { + unittest.test('method--setTargetPools', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).instanceGroupManagers; - final arg_request = - buildInstanceGroupManagersUpdatePerInstanceConfigsReq(); + final arg_request = buildInstanceGroupManagersSetTargetPoolsRequest(); final arg_project = 'foo'; final arg_zone = 'foo'; final arg_instanceGroupManager = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.InstanceGroupManagersUpdatePerInstanceConfigsReq.fromJson( - json as core.Map); - checkInstanceGroupManagersUpdatePerInstanceConfigsReq(obj); + final obj = api.InstanceGroupManagersSetTargetPoolsRequest.fromJson( + json as core.Map); + checkInstanceGroupManagersSetTargetPoolsRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -79000,7 +79720,7 @@ void main() { unittest.equals('/instanceGroupManagers/'), ); pathOffset += 23; - index = path.indexOf('/updatePerInstanceConfigs', pathOffset); + index = path.indexOf('/setTargetPools', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -79010,10 +79730,10 @@ void main() { unittest.equals('$arg_instanceGroupManager'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 25), - unittest.equals('/updatePerInstanceConfigs'), + path.substring(pathOffset, pathOffset + 15), + unittest.equals('/setTargetPools'), ); - pathOffset += 25; + pathOffset += 15; final query = req.url.query; var queryOffset = 0; @@ -79045,27 +79765,25 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.updatePerInstanceConfigs( + final response = await res.setTargetPools( arg_request, arg_project, arg_zone, arg_instanceGroupManager, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - }); - unittest.group('resource-InstanceGroupsResource', () { - unittest.test('method--addInstances', () async { + unittest.test('method--startInstances', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).instanceGroups; - final arg_request = buildInstanceGroupsAddInstancesRequest(); + final res = api.ComputeApi(mock).instanceGroupManagers; + final arg_request = buildInstanceGroupManagersStartInstancesRequest(); final arg_project = 'foo'; final arg_zone = 'foo'; - final arg_instanceGroup = 'foo'; + final arg_instanceGroupManager = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.InstanceGroupsAddInstancesRequest.fromJson( + final obj = api.InstanceGroupManagersStartInstancesRequest.fromJson( json as core.Map); - checkInstanceGroupsAddInstancesRequest(obj); + checkInstanceGroupManagersStartInstancesRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -79100,7 +79818,7 @@ void main() { unittest.equals('/zones/'), ); pathOffset += 7; - index = path.indexOf('/instanceGroups/', pathOffset); + index = path.indexOf('/instanceGroupManagers/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -79110,24 +79828,24 @@ void main() { unittest.equals('$arg_zone'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 16), - unittest.equals('/instanceGroups/'), + path.substring(pathOffset, pathOffset + 23), + unittest.equals('/instanceGroupManagers/'), ); - pathOffset += 16; - index = path.indexOf('/addInstances', pathOffset); + pathOffset += 23; + index = path.indexOf('/startInstances', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); pathOffset = index; unittest.expect( subPart, - unittest.equals('$arg_instanceGroup'), + unittest.equals('$arg_instanceGroupManager'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 13), - unittest.equals('/addInstances'), + path.substring(pathOffset, pathOffset + 15), + unittest.equals('/startInstances'), ); - pathOffset += 13; + pathOffset += 15; final query = req.url.query; var queryOffset = 0; @@ -79159,25 +79877,26 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.addInstances( - arg_request, arg_project, arg_zone, arg_instanceGroup, + final response = await res.startInstances( + arg_request, arg_project, arg_zone, arg_instanceGroupManager, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--aggregatedList', () async { + unittest.test('method--stopInstances', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).instanceGroups; + final res = api.ComputeApi(mock).instanceGroupManagers; + final arg_request = buildInstanceGroupManagersStopInstancesRequest(); final arg_project = 'foo'; - final arg_filter = 'foo'; - final arg_includeAllScopes = true; - final arg_maxResults = 42; - final arg_orderBy = 'foo'; - final arg_pageToken = 'foo'; - final arg_returnPartialSuccess = true; - final arg_serviceProjectNumber = 'foo'; + final arg_zone = 'foo'; + final arg_instanceGroupManager = 'foo'; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.InstanceGroupManagersStopInstancesRequest.fromJson( + json as core.Map); + checkInstanceGroupManagersStopInstancesRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -79197,7 +79916,7 @@ void main() { unittest.equals('projects/'), ); pathOffset += 9; - index = path.indexOf('/aggregated/instanceGroups', pathOffset); + index = path.indexOf('/zones/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -79207,10 +79926,38 @@ void main() { unittest.equals('$arg_project'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 26), - unittest.equals('/aggregated/instanceGroups'), + path.substring(pathOffset, pathOffset + 7), + unittest.equals('/zones/'), ); - pathOffset += 26; + pathOffset += 7; + index = path.indexOf('/instanceGroupManagers/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_zone'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 23), + unittest.equals('/instanceGroupManagers/'), + ); + pathOffset += 23; + index = path.indexOf('/stopInstances', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_instanceGroupManager'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 14), + unittest.equals('/stopInstances'), + ); + pathOffset += 14; final query = req.url.query; var queryOffset = 0; @@ -79228,32 +79975,8 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - queryMap['includeAllScopes']!.first, - unittest.equals('$arg_includeAllScopes'), - ); - unittest.expect( - core.int.parse(queryMap['maxResults']!.first), - unittest.equals(arg_maxResults), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['returnPartialSuccess']!.first, - unittest.equals('$arg_returnPartialSuccess'), - ); - unittest.expect( - queryMap['serviceProjectNumber']!.first, - unittest.equals(arg_serviceProjectNumber), + queryMap['requestId']!.first, + unittest.equals(arg_requestId), ); unittest.expect( queryMap['fields']!.first, @@ -79263,31 +79986,29 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildInstanceGroupAggregatedList()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.aggregatedList(arg_project, - filter: arg_filter, - includeAllScopes: arg_includeAllScopes, - maxResults: arg_maxResults, - orderBy: arg_orderBy, - pageToken: arg_pageToken, - returnPartialSuccess: arg_returnPartialSuccess, - serviceProjectNumber: arg_serviceProjectNumber, - $fields: arg_$fields); - checkInstanceGroupAggregatedList( - response as api.InstanceGroupAggregatedList); + final response = await res.stopInstances( + arg_request, arg_project, arg_zone, arg_instanceGroupManager, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--delete', () async { + unittest.test('method--suspendInstances', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).instanceGroups; + final res = api.ComputeApi(mock).instanceGroupManagers; + final arg_request = buildInstanceGroupManagersSuspendInstancesRequest(); final arg_project = 'foo'; final arg_zone = 'foo'; - final arg_instanceGroup = 'foo'; + final arg_instanceGroupManager = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.InstanceGroupManagersSuspendInstancesRequest.fromJson( + json as core.Map); + checkInstanceGroupManagersSuspendInstancesRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -79321,7 +80042,7 @@ void main() { unittest.equals('/zones/'), ); pathOffset += 7; - index = path.indexOf('/instanceGroups/', pathOffset); + index = path.indexOf('/instanceGroupManagers/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -79331,16 +80052,24 @@ void main() { unittest.equals('$arg_zone'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 16), - unittest.equals('/instanceGroups/'), + path.substring(pathOffset, pathOffset + 23), + unittest.equals('/instanceGroupManagers/'), ); - pathOffset += 16; - subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); - pathOffset = path.length; + pathOffset += 23; + index = path.indexOf('/suspendInstances', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; unittest.expect( subPart, - unittest.equals('$arg_instanceGroup'), + unittest.equals('$arg_instanceGroupManager'), ); + unittest.expect( + path.substring(pathOffset, pathOffset + 17), + unittest.equals('/suspendInstances'), + ); + pathOffset += 17; final query = req.url.query; var queryOffset = 0; @@ -79372,20 +80101,28 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete( - arg_project, arg_zone, arg_instanceGroup, + final response = await res.suspendInstances( + arg_request, arg_project, arg_zone, arg_instanceGroupManager, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--get', () async { + unittest.test('method--updatePerInstanceConfigs', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).instanceGroups; + final res = api.ComputeApi(mock).instanceGroupManagers; + final arg_request = + buildInstanceGroupManagersUpdatePerInstanceConfigsReq(); final arg_project = 'foo'; final arg_zone = 'foo'; - final arg_instanceGroup = 'foo'; + final arg_instanceGroupManager = 'foo'; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.InstanceGroupManagersUpdatePerInstanceConfigsReq.fromJson( + json as core.Map); + checkInstanceGroupManagersUpdatePerInstanceConfigsReq(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -79419,7 +80156,7 @@ void main() { unittest.equals('/zones/'), ); pathOffset += 7; - index = path.indexOf('/instanceGroups/', pathOffset); + index = path.indexOf('/instanceGroupManagers/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -79429,16 +80166,24 @@ void main() { unittest.equals('$arg_zone'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 16), - unittest.equals('/instanceGroups/'), + path.substring(pathOffset, pathOffset + 23), + unittest.equals('/instanceGroupManagers/'), ); - pathOffset += 16; - subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); - pathOffset = path.length; + pathOffset += 23; + index = path.indexOf('/updatePerInstanceConfigs', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; unittest.expect( subPart, - unittest.equals('$arg_instanceGroup'), + unittest.equals('$arg_instanceGroupManager'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 25), + unittest.equals('/updatePerInstanceConfigs'), ); + pathOffset += 25; final query = req.url.query; var queryOffset = 0; @@ -79455,6 +80200,10 @@ void main() { ); } } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -79463,26 +80212,30 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildInstanceGroup()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_project, arg_zone, arg_instanceGroup, - $fields: arg_$fields); - checkInstanceGroup(response as api.InstanceGroup); + final response = await res.updatePerInstanceConfigs( + arg_request, arg_project, arg_zone, arg_instanceGroupManager, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); + }); - unittest.test('method--insert', () async { + unittest.group('resource-InstanceGroupsResource', () { + unittest.test('method--addInstances', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).instanceGroups; - final arg_request = buildInstanceGroup(); + final arg_request = buildInstanceGroupsAddInstancesRequest(); final arg_project = 'foo'; final arg_zone = 'foo'; + final arg_instanceGroup = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.InstanceGroup.fromJson( + final obj = api.InstanceGroupsAddInstancesRequest.fromJson( json as core.Map); - checkInstanceGroup(obj); + checkInstanceGroupsAddInstancesRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -79517,7 +80270,7 @@ void main() { unittest.equals('/zones/'), ); pathOffset += 7; - index = path.indexOf('/instanceGroups', pathOffset); + index = path.indexOf('/instanceGroups/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -79527,10 +80280,24 @@ void main() { unittest.equals('$arg_zone'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 15), - unittest.equals('/instanceGroups'), + path.substring(pathOffset, pathOffset + 16), + unittest.equals('/instanceGroups/'), ); - pathOffset += 15; + pathOffset += 16; + index = path.indexOf('/addInstances', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_instanceGroup'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 13), + unittest.equals('/addInstances'), + ); + pathOffset += 13; final query = req.url.query; var queryOffset = 0; @@ -79562,21 +80329,23 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.insert(arg_request, arg_project, arg_zone, + final response = await res.addInstances( + arg_request, arg_project, arg_zone, arg_instanceGroup, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--list', () async { + unittest.test('method--aggregatedList', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).instanceGroups; final arg_project = 'foo'; - final arg_zone = 'foo'; final arg_filter = 'foo'; + final arg_includeAllScopes = true; final arg_maxResults = 42; final arg_orderBy = 'foo'; final arg_pageToken = 'foo'; final arg_returnPartialSuccess = true; + final arg_serviceProjectNumber = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -79598,7 +80367,7 @@ void main() { unittest.equals('projects/'), ); pathOffset += 9; - index = path.indexOf('/zones/', pathOffset); + index = path.indexOf('/aggregated/instanceGroups', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -79608,24 +80377,10 @@ void main() { unittest.equals('$arg_project'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 7), - unittest.equals('/zones/'), - ); - pathOffset += 7; - index = path.indexOf('/instanceGroups', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; - unittest.expect( - subPart, - unittest.equals('$arg_zone'), - ); - unittest.expect( - path.substring(pathOffset, pathOffset + 15), - unittest.equals('/instanceGroups'), + path.substring(pathOffset, pathOffset + 26), + unittest.equals('/aggregated/instanceGroups'), ); - pathOffset += 15; + pathOffset += 26; final query = req.url.query; var queryOffset = 0; @@ -79646,6 +80401,10 @@ void main() { queryMap['filter']!.first, unittest.equals(arg_filter), ); + unittest.expect( + queryMap['includeAllScopes']!.first, + unittest.equals('$arg_includeAllScopes'), + ); unittest.expect( core.int.parse(queryMap['maxResults']!.first), unittest.equals(arg_maxResults), @@ -79662,6 +80421,10 @@ void main() { queryMap['returnPartialSuccess']!.first, unittest.equals('$arg_returnPartialSuccess'), ); + unittest.expect( + queryMap['serviceProjectNumber']!.first, + unittest.equals(arg_serviceProjectNumber), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -79670,37 +80433,31 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildInstanceGroupList()); + final resp = convert.json.encode(buildInstanceGroupAggregatedList()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_project, arg_zone, + final response = await res.aggregatedList(arg_project, filter: arg_filter, + includeAllScopes: arg_includeAllScopes, maxResults: arg_maxResults, orderBy: arg_orderBy, pageToken: arg_pageToken, returnPartialSuccess: arg_returnPartialSuccess, + serviceProjectNumber: arg_serviceProjectNumber, $fields: arg_$fields); - checkInstanceGroupList(response as api.InstanceGroupList); + checkInstanceGroupAggregatedList( + response as api.InstanceGroupAggregatedList); }); - unittest.test('method--listInstances', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).instanceGroups; - final arg_request = buildInstanceGroupsListInstancesRequest(); final arg_project = 'foo'; final arg_zone = 'foo'; final arg_instanceGroup = 'foo'; - final arg_filter = 'foo'; - final arg_maxResults = 42; - final arg_orderBy = 'foo'; - final arg_pageToken = 'foo'; - final arg_returnPartialSuccess = true; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.InstanceGroupsListInstancesRequest.fromJson( - json as core.Map); - checkInstanceGroupsListInstancesRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -79748,20 +80505,12 @@ void main() { unittest.equals('/instanceGroups/'), ); pathOffset += 16; - index = path.indexOf('/listInstances', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; unittest.expect( subPart, unittest.equals('$arg_instanceGroup'), ); - unittest.expect( - path.substring(pathOffset, pathOffset + 14), - unittest.equals('/listInstances'), - ); - pathOffset += 14; final query = req.url.query; var queryOffset = 0; @@ -79779,24 +80528,8 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['maxResults']!.first), - unittest.equals(arg_maxResults), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['returnPartialSuccess']!.first, - unittest.equals('$arg_returnPartialSuccess'), + queryMap['requestId']!.first, + unittest.equals(arg_requestId), ); unittest.expect( queryMap['fields']!.first, @@ -79806,35 +80539,23 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildInstanceGroupsListInstances()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.listInstances( - arg_request, arg_project, arg_zone, arg_instanceGroup, - filter: arg_filter, - maxResults: arg_maxResults, - orderBy: arg_orderBy, - pageToken: arg_pageToken, - returnPartialSuccess: arg_returnPartialSuccess, - $fields: arg_$fields); - checkInstanceGroupsListInstances( - response as api.InstanceGroupsListInstances); + final response = await res.delete( + arg_project, arg_zone, arg_instanceGroup, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--removeInstances', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).instanceGroups; - final arg_request = buildInstanceGroupsRemoveInstancesRequest(); final arg_project = 'foo'; final arg_zone = 'foo'; final arg_instanceGroup = 'foo'; - final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.InstanceGroupsRemoveInstancesRequest.fromJson( - json as core.Map); - checkInstanceGroupsRemoveInstancesRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -79882,20 +80603,12 @@ void main() { unittest.equals('/instanceGroups/'), ); pathOffset += 16; - index = path.indexOf('/removeInstances', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; unittest.expect( subPart, unittest.equals('$arg_instanceGroup'), ); - unittest.expect( - path.substring(pathOffset, pathOffset + 16), - unittest.equals('/removeInstances'), - ); - pathOffset += 16; final query = req.url.query; var queryOffset = 0; @@ -79912,10 +80625,6 @@ void main() { ); } } - unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -79924,28 +80633,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildInstanceGroup()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.removeInstances( - arg_request, arg_project, arg_zone, arg_instanceGroup, - requestId: arg_requestId, $fields: arg_$fields); - checkOperation(response as api.Operation); + final response = await res.get(arg_project, arg_zone, arg_instanceGroup, + $fields: arg_$fields); + checkInstanceGroup(response as api.InstanceGroup); }); - unittest.test('method--setNamedPorts', () async { + unittest.test('method--insert', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).instanceGroups; - final arg_request = buildInstanceGroupsSetNamedPortsRequest(); + final arg_request = buildInstanceGroup(); final arg_project = 'foo'; final arg_zone = 'foo'; - final arg_instanceGroup = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.InstanceGroupsSetNamedPortsRequest.fromJson( + final obj = api.InstanceGroup.fromJson( json as core.Map); - checkInstanceGroupsSetNamedPortsRequest(obj); + checkInstanceGroup(obj); final path = req.url.path; var pathOffset = 0; @@ -79980,7 +80687,7 @@ void main() { unittest.equals('/zones/'), ); pathOffset += 7; - index = path.indexOf('/instanceGroups/', pathOffset); + index = path.indexOf('/instanceGroups', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -79990,24 +80697,487 @@ void main() { unittest.equals('$arg_zone'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 16), - unittest.equals('/instanceGroups/'), - ); - pathOffset += 16; - index = path.indexOf('/setNamedPorts', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; - unittest.expect( - subPart, - unittest.equals('$arg_instanceGroup'), - ); - unittest.expect( - path.substring(pathOffset, pathOffset + 14), - unittest.equals('/setNamedPorts'), + path.substring(pathOffset, pathOffset + 15), + unittest.equals('/instanceGroups'), ); - pathOffset += 14; + pathOffset += 15; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.insert(arg_request, arg_project, arg_zone, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.ComputeApi(mock).instanceGroups; + final arg_project = 'foo'; + final arg_zone = 'foo'; + final arg_filter = 'foo'; + final arg_maxResults = 42; + final arg_orderBy = 'foo'; + final arg_pageToken = 'foo'; + final arg_returnPartialSuccess = true; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('compute/v1/'), + ); + pathOffset += 11; + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('projects/'), + ); + pathOffset += 9; + index = path.indexOf('/zones/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_project'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 7), + unittest.equals('/zones/'), + ); + pathOffset += 7; + index = path.indexOf('/instanceGroups', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_zone'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 15), + unittest.equals('/instanceGroups'), + ); + pathOffset += 15; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['maxResults']!.first), + unittest.equals(arg_maxResults), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['returnPartialSuccess']!.first, + unittest.equals('$arg_returnPartialSuccess'), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildInstanceGroupList()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_project, arg_zone, + filter: arg_filter, + maxResults: arg_maxResults, + orderBy: arg_orderBy, + pageToken: arg_pageToken, + returnPartialSuccess: arg_returnPartialSuccess, + $fields: arg_$fields); + checkInstanceGroupList(response as api.InstanceGroupList); + }); + + unittest.test('method--listInstances', () async { + final mock = HttpServerMock(); + final res = api.ComputeApi(mock).instanceGroups; + final arg_request = buildInstanceGroupsListInstancesRequest(); + final arg_project = 'foo'; + final arg_zone = 'foo'; + final arg_instanceGroup = 'foo'; + final arg_filter = 'foo'; + final arg_maxResults = 42; + final arg_orderBy = 'foo'; + final arg_pageToken = 'foo'; + final arg_returnPartialSuccess = true; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.InstanceGroupsListInstancesRequest.fromJson( + json as core.Map); + checkInstanceGroupsListInstancesRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('compute/v1/'), + ); + pathOffset += 11; + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('projects/'), + ); + pathOffset += 9; + index = path.indexOf('/zones/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_project'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 7), + unittest.equals('/zones/'), + ); + pathOffset += 7; + index = path.indexOf('/instanceGroups/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_zone'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 16), + unittest.equals('/instanceGroups/'), + ); + pathOffset += 16; + index = path.indexOf('/listInstances', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_instanceGroup'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 14), + unittest.equals('/listInstances'), + ); + pathOffset += 14; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['maxResults']!.first), + unittest.equals(arg_maxResults), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['returnPartialSuccess']!.first, + unittest.equals('$arg_returnPartialSuccess'), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildInstanceGroupsListInstances()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.listInstances( + arg_request, arg_project, arg_zone, arg_instanceGroup, + filter: arg_filter, + maxResults: arg_maxResults, + orderBy: arg_orderBy, + pageToken: arg_pageToken, + returnPartialSuccess: arg_returnPartialSuccess, + $fields: arg_$fields); + checkInstanceGroupsListInstances( + response as api.InstanceGroupsListInstances); + }); + + unittest.test('method--removeInstances', () async { + final mock = HttpServerMock(); + final res = api.ComputeApi(mock).instanceGroups; + final arg_request = buildInstanceGroupsRemoveInstancesRequest(); + final arg_project = 'foo'; + final arg_zone = 'foo'; + final arg_instanceGroup = 'foo'; + final arg_requestId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.InstanceGroupsRemoveInstancesRequest.fromJson( + json as core.Map); + checkInstanceGroupsRemoveInstancesRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('compute/v1/'), + ); + pathOffset += 11; + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('projects/'), + ); + pathOffset += 9; + index = path.indexOf('/zones/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_project'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 7), + unittest.equals('/zones/'), + ); + pathOffset += 7; + index = path.indexOf('/instanceGroups/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_zone'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 16), + unittest.equals('/instanceGroups/'), + ); + pathOffset += 16; + index = path.indexOf('/removeInstances', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_instanceGroup'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 16), + unittest.equals('/removeInstances'), + ); + pathOffset += 16; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.removeInstances( + arg_request, arg_project, arg_zone, arg_instanceGroup, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--setNamedPorts', () async { + final mock = HttpServerMock(); + final res = api.ComputeApi(mock).instanceGroups; + final arg_request = buildInstanceGroupsSetNamedPortsRequest(); + final arg_project = 'foo'; + final arg_zone = 'foo'; + final arg_instanceGroup = 'foo'; + final arg_requestId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.InstanceGroupsSetNamedPortsRequest.fromJson( + json as core.Map); + checkInstanceGroupsSetNamedPortsRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('compute/v1/'), + ); + pathOffset += 11; + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('projects/'), + ); + pathOffset += 9; + index = path.indexOf('/zones/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_project'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 7), + unittest.equals('/zones/'), + ); + pathOffset += 7; + index = path.indexOf('/instanceGroups/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_zone'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 16), + unittest.equals('/instanceGroups/'), + ); + pathOffset += 16; + index = path.indexOf('/setNamedPorts', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_instanceGroup'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 14), + unittest.equals('/setNamedPorts'), + ); + pathOffset += 14; final query = req.url.query; var queryOffset = 0; @@ -92178,7 +93348,7 @@ void main() { final arg_project = 'foo'; final arg_region = 'foo'; final arg_networkEdgeSecurityService = 'foo'; - final arg_paths = buildUnnamed858(); + final arg_paths = buildUnnamed867(); final arg_requestId = 'foo'; final arg_updateMask = 'foo'; final arg_$fields = 'foo'; @@ -94784,6 +95954,188 @@ void main() { }); }); + unittest.group('resource-NetworkProfilesResource', () { + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.ComputeApi(mock).networkProfiles; + final arg_project = 'foo'; + final arg_networkProfile = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('compute/v1/'), + ); + pathOffset += 11; + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('projects/'), + ); + pathOffset += 9; + index = path.indexOf('/global/networkProfiles/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_project'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 24), + unittest.equals('/global/networkProfiles/'), + ); + pathOffset += 24; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; + unittest.expect( + subPart, + unittest.equals('$arg_networkProfile'), + ); + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildNetworkProfile()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.get(arg_project, arg_networkProfile, $fields: arg_$fields); + checkNetworkProfile(response as api.NetworkProfile); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.ComputeApi(mock).networkProfiles; + final arg_project = 'foo'; + final arg_filter = 'foo'; + final arg_maxResults = 42; + final arg_orderBy = 'foo'; + final arg_pageToken = 'foo'; + final arg_returnPartialSuccess = true; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('compute/v1/'), + ); + pathOffset += 11; + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('projects/'), + ); + pathOffset += 9; + index = path.indexOf('/global/networkProfiles', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_project'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 23), + unittest.equals('/global/networkProfiles'), + ); + pathOffset += 23; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['maxResults']!.first), + unittest.equals(arg_maxResults), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['returnPartialSuccess']!.first, + unittest.equals('$arg_returnPartialSuccess'), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildNetworkProfilesListResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_project, + filter: arg_filter, + maxResults: arg_maxResults, + orderBy: arg_orderBy, + pageToken: arg_pageToken, + returnPartialSuccess: arg_returnPartialSuccess, + $fields: arg_$fields); + checkNetworkProfilesListResponse( + response as api.NetworkProfilesListResponse); + }); + }); + unittest.group('resource-NetworksResource', () { unittest.test('method--addPeering', () async { final mock = HttpServerMock(); @@ -99514,18 +100866,600 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.disableXpnResource(arg_request, arg_project, + final response = await res.disableXpnResource(arg_request, arg_project, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--enableXpnHost', () async { + final mock = HttpServerMock(); + final res = api.ComputeApi(mock).projects; + final arg_project = 'foo'; + final arg_requestId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('compute/v1/'), + ); + pathOffset += 11; + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('projects/'), + ); + pathOffset += 9; + index = path.indexOf('/enableXpnHost', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_project'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 14), + unittest.equals('/enableXpnHost'), + ); + pathOffset += 14; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.enableXpnHost(arg_project, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--enableXpnResource', () async { + final mock = HttpServerMock(); + final res = api.ComputeApi(mock).projects; + final arg_request = buildProjectsEnableXpnResourceRequest(); + final arg_project = 'foo'; + final arg_requestId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.ProjectsEnableXpnResourceRequest.fromJson( + json as core.Map); + checkProjectsEnableXpnResourceRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('compute/v1/'), + ); + pathOffset += 11; + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('projects/'), + ); + pathOffset += 9; + index = path.indexOf('/enableXpnResource', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_project'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 18), + unittest.equals('/enableXpnResource'), + ); + pathOffset += 18; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.enableXpnResource(arg_request, arg_project, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.ComputeApi(mock).projects; + final arg_project = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('compute/v1/'), + ); + pathOffset += 11; + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('projects/'), + ); + pathOffset += 9; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; + unittest.expect( + subPart, + unittest.equals('$arg_project'), + ); + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildProject()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_project, $fields: arg_$fields); + checkProject(response as api.Project); + }); + + unittest.test('method--getXpnHost', () async { + final mock = HttpServerMock(); + final res = api.ComputeApi(mock).projects; + final arg_project = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('compute/v1/'), + ); + pathOffset += 11; + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('projects/'), + ); + pathOffset += 9; + index = path.indexOf('/getXpnHost', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_project'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('/getXpnHost'), + ); + pathOffset += 11; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildProject()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.getXpnHost(arg_project, $fields: arg_$fields); + checkProject(response as api.Project); + }); + + unittest.test('method--getXpnResources', () async { + final mock = HttpServerMock(); + final res = api.ComputeApi(mock).projects; + final arg_project = 'foo'; + final arg_filter = 'foo'; + final arg_maxResults = 42; + final arg_orderBy = 'foo'; + final arg_pageToken = 'foo'; + final arg_returnPartialSuccess = true; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('compute/v1/'), + ); + pathOffset += 11; + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('projects/'), + ); + pathOffset += 9; + index = path.indexOf('/getXpnResources', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_project'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 16), + unittest.equals('/getXpnResources'), + ); + pathOffset += 16; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['maxResults']!.first), + unittest.equals(arg_maxResults), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['returnPartialSuccess']!.first, + unittest.equals('$arg_returnPartialSuccess'), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildProjectsGetXpnResources()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.getXpnResources(arg_project, + filter: arg_filter, + maxResults: arg_maxResults, + orderBy: arg_orderBy, + pageToken: arg_pageToken, + returnPartialSuccess: arg_returnPartialSuccess, + $fields: arg_$fields); + checkProjectsGetXpnResources(response as api.ProjectsGetXpnResources); + }); + + unittest.test('method--listXpnHosts', () async { + final mock = HttpServerMock(); + final res = api.ComputeApi(mock).projects; + final arg_request = buildProjectsListXpnHostsRequest(); + final arg_project = 'foo'; + final arg_filter = 'foo'; + final arg_maxResults = 42; + final arg_orderBy = 'foo'; + final arg_pageToken = 'foo'; + final arg_returnPartialSuccess = true; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.ProjectsListXpnHostsRequest.fromJson( + json as core.Map); + checkProjectsListXpnHostsRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('compute/v1/'), + ); + pathOffset += 11; + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('projects/'), + ); + pathOffset += 9; + index = path.indexOf('/listXpnHosts', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_project'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 13), + unittest.equals('/listXpnHosts'), + ); + pathOffset += 13; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['maxResults']!.first), + unittest.equals(arg_maxResults), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['returnPartialSuccess']!.first, + unittest.equals('$arg_returnPartialSuccess'), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildXpnHostList()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.listXpnHosts(arg_request, arg_project, + filter: arg_filter, + maxResults: arg_maxResults, + orderBy: arg_orderBy, + pageToken: arg_pageToken, + returnPartialSuccess: arg_returnPartialSuccess, + $fields: arg_$fields); + checkXpnHostList(response as api.XpnHostList); + }); + + unittest.test('method--moveDisk', () async { + final mock = HttpServerMock(); + final res = api.ComputeApi(mock).projects; + final arg_request = buildDiskMoveRequest(); + final arg_project = 'foo'; + final arg_requestId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.DiskMoveRequest.fromJson( + json as core.Map); + checkDiskMoveRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('compute/v1/'), + ); + pathOffset += 11; + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('projects/'), + ); + pathOffset += 9; + index = path.indexOf('/moveDisk', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_project'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('/moveDisk'), + ); + pathOffset += 9; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.moveDisk(arg_request, arg_project, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--enableXpnHost', () async { + unittest.test('method--moveInstance', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).projects; + final arg_request = buildInstanceMoveRequest(); final arg_project = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.InstanceMoveRequest.fromJson( + json as core.Map); + checkInstanceMoveRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -99545,7 +101479,7 @@ void main() { unittest.equals('projects/'), ); pathOffset += 9; - index = path.indexOf('/enableXpnHost', pathOffset); + index = path.indexOf('/moveInstance', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -99555,10 +101489,10 @@ void main() { unittest.equals('$arg_project'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 14), - unittest.equals('/enableXpnHost'), + path.substring(pathOffset, pathOffset + 13), + unittest.equals('/moveInstance'), ); - pathOffset += 14; + pathOffset += 13; final query = req.url.query; var queryOffset = 0; @@ -99590,22 +101524,22 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.enableXpnHost(arg_project, + final response = await res.moveInstance(arg_request, arg_project, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--enableXpnResource', () async { + unittest.test('method--setCloudArmorTier', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).projects; - final arg_request = buildProjectsEnableXpnResourceRequest(); + final arg_request = buildProjectsSetCloudArmorTierRequest(); final arg_project = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.ProjectsEnableXpnResourceRequest.fromJson( + final obj = api.ProjectsSetCloudArmorTierRequest.fromJson( json as core.Map); - checkProjectsEnableXpnResourceRequest(obj); + checkProjectsSetCloudArmorTierRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -99626,7 +101560,7 @@ void main() { unittest.equals('projects/'), ); pathOffset += 9; - index = path.indexOf('/enableXpnResource', pathOffset); + index = path.indexOf('/setCloudArmorTier', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -99637,7 +101571,7 @@ void main() { ); unittest.expect( path.substring(pathOffset, pathOffset + 18), - unittest.equals('/enableXpnResource'), + unittest.equals('/setCloudArmorTier'), ); pathOffset += 18; @@ -99671,79 +101605,23 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.enableXpnResource(arg_request, arg_project, + final response = await res.setCloudArmorTier(arg_request, arg_project, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--get', () async { + unittest.test('method--setCommonInstanceMetadata', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).projects; + final arg_request = buildMetadata(); final arg_project = 'foo'; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 11), - unittest.equals('compute/v1/'), - ); - pathOffset += 11; - unittest.expect( - path.substring(pathOffset, pathOffset + 9), - unittest.equals('projects/'), - ); - pathOffset += 9; - subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); - pathOffset = path.length; - unittest.expect( - subPart, - unittest.equals('$arg_project'), - ); - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode(buildProject()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.get(arg_project, $fields: arg_$fields); - checkProject(response as api.Project); - }); + final obj = + api.Metadata.fromJson(json as core.Map); + checkMetadata(obj); - unittest.test('method--getXpnHost', () async { - final mock = HttpServerMock(); - final res = api.ComputeApi(mock).projects; - final arg_project = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; var pathOffset = 0; core.int index; @@ -99763,7 +101641,7 @@ void main() { unittest.equals('projects/'), ); pathOffset += 9; - index = path.indexOf('/getXpnHost', pathOffset); + index = path.indexOf('/setCommonInstanceMetadata', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -99773,10 +101651,10 @@ void main() { unittest.equals('$arg_project'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 11), - unittest.equals('/getXpnHost'), + path.substring(pathOffset, pathOffset + 26), + unittest.equals('/setCommonInstanceMetadata'), ); - pathOffset += 11; + pathOffset += 26; final query = req.url.query; var queryOffset = 0; @@ -99793,6 +101671,10 @@ void main() { ); } } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -99801,24 +101683,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildProject()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.getXpnHost(arg_project, $fields: arg_$fields); - checkProject(response as api.Project); + final response = await res.setCommonInstanceMetadata( + arg_request, arg_project, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--getXpnResources', () async { + unittest.test('method--setDefaultNetworkTier', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).projects; + final arg_request = buildProjectsSetDefaultNetworkTierRequest(); final arg_project = 'foo'; - final arg_filter = 'foo'; - final arg_maxResults = 42; - final arg_orderBy = 'foo'; - final arg_pageToken = 'foo'; - final arg_returnPartialSuccess = true; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.ProjectsSetDefaultNetworkTierRequest.fromJson( + json as core.Map); + checkProjectsSetDefaultNetworkTierRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -99838,7 +101723,7 @@ void main() { unittest.equals('projects/'), ); pathOffset += 9; - index = path.indexOf('/getXpnResources', pathOffset); + index = path.indexOf('/setDefaultNetworkTier', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -99848,10 +101733,10 @@ void main() { unittest.equals('$arg_project'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 16), - unittest.equals('/getXpnResources'), + path.substring(pathOffset, pathOffset + 22), + unittest.equals('/setDefaultNetworkTier'), ); - pathOffset += 16; + pathOffset += 22; final query = req.url.query; var queryOffset = 0; @@ -99869,24 +101754,8 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['maxResults']!.first), - unittest.equals(arg_maxResults), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['returnPartialSuccess']!.first, - unittest.equals('$arg_returnPartialSuccess'), + queryMap['requestId']!.first, + unittest.equals(arg_requestId), ); unittest.expect( queryMap['fields']!.first, @@ -99896,34 +101765,25 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildProjectsGetXpnResources()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.getXpnResources(arg_project, - filter: arg_filter, - maxResults: arg_maxResults, - orderBy: arg_orderBy, - pageToken: arg_pageToken, - returnPartialSuccess: arg_returnPartialSuccess, - $fields: arg_$fields); - checkProjectsGetXpnResources(response as api.ProjectsGetXpnResources); + final response = await res.setDefaultNetworkTier(arg_request, arg_project, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--listXpnHosts', () async { + unittest.test('method--setUsageExportBucket', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).projects; - final arg_request = buildProjectsListXpnHostsRequest(); + final arg_request = buildUsageExportLocation(); final arg_project = 'foo'; - final arg_filter = 'foo'; - final arg_maxResults = 42; - final arg_orderBy = 'foo'; - final arg_pageToken = 'foo'; - final arg_returnPartialSuccess = true; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.ProjectsListXpnHostsRequest.fromJson( + final obj = api.UsageExportLocation.fromJson( json as core.Map); - checkProjectsListXpnHostsRequest(obj); + checkUsageExportLocation(obj); final path = req.url.path; var pathOffset = 0; @@ -99944,7 +101804,7 @@ void main() { unittest.equals('projects/'), ); pathOffset += 9; - index = path.indexOf('/listXpnHosts', pathOffset); + index = path.indexOf('/setUsageExportBucket', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -99954,10 +101814,10 @@ void main() { unittest.equals('$arg_project'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 13), - unittest.equals('/listXpnHosts'), + path.substring(pathOffset, pathOffset + 21), + unittest.equals('/setUsageExportBucket'), ); - pathOffset += 13; + pathOffset += 21; final query = req.url.query; var queryOffset = 0; @@ -99975,24 +101835,8 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['maxResults']!.first), - unittest.equals(arg_maxResults), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['returnPartialSuccess']!.first, - unittest.equals('$arg_returnPartialSuccess'), + queryMap['requestId']!.first, + unittest.equals(arg_requestId), ); unittest.expect( queryMap['fields']!.first, @@ -100002,31 +101846,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildXpnHostList()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.listXpnHosts(arg_request, arg_project, - filter: arg_filter, - maxResults: arg_maxResults, - orderBy: arg_orderBy, - pageToken: arg_pageToken, - returnPartialSuccess: arg_returnPartialSuccess, - $fields: arg_$fields); - checkXpnHostList(response as api.XpnHostList); + final response = await res.setUsageExportBucket(arg_request, arg_project, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); + }); - unittest.test('method--moveDisk', () async { + unittest.group('resource-PublicAdvertisedPrefixesResource', () { + unittest.test('method--announce', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).projects; - final arg_request = buildDiskMoveRequest(); + final res = api.ComputeApi(mock).publicAdvertisedPrefixes; final arg_project = 'foo'; + final arg_publicAdvertisedPrefix = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.DiskMoveRequest.fromJson( - json as core.Map); - checkDiskMoveRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -100046,7 +101883,7 @@ void main() { unittest.equals('projects/'), ); pathOffset += 9; - index = path.indexOf('/moveDisk', pathOffset); + index = path.indexOf('/global/publicAdvertisedPrefixes/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -100055,9 +101892,23 @@ void main() { subPart, unittest.equals('$arg_project'), ); + unittest.expect( + path.substring(pathOffset, pathOffset + 33), + unittest.equals('/global/publicAdvertisedPrefixes/'), + ); + pathOffset += 33; + index = path.indexOf('/announce', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_publicAdvertisedPrefix'), + ); unittest.expect( path.substring(pathOffset, pathOffset + 9), - unittest.equals('/moveDisk'), + unittest.equals('/announce'), ); pathOffset += 9; @@ -100091,23 +101942,20 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.moveDisk(arg_request, arg_project, + final response = await res.announce( + arg_project, arg_publicAdvertisedPrefix, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--moveInstance', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).projects; - final arg_request = buildInstanceMoveRequest(); + final res = api.ComputeApi(mock).publicAdvertisedPrefixes; final arg_project = 'foo'; + final arg_publicAdvertisedPrefix = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.InstanceMoveRequest.fromJson( - json as core.Map); - checkInstanceMoveRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -100127,7 +101975,7 @@ void main() { unittest.equals('projects/'), ); pathOffset += 9; - index = path.indexOf('/moveInstance', pathOffset); + index = path.indexOf('/global/publicAdvertisedPrefixes/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -100137,10 +101985,16 @@ void main() { unittest.equals('$arg_project'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 13), - unittest.equals('/moveInstance'), + path.substring(pathOffset, pathOffset + 33), + unittest.equals('/global/publicAdvertisedPrefixes/'), + ); + pathOffset += 33; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; + unittest.expect( + subPart, + unittest.equals('$arg_publicAdvertisedPrefix'), ); - pathOffset += 13; final query = req.url.query; var queryOffset = 0; @@ -100172,23 +102026,18 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.moveInstance(arg_request, arg_project, + final response = await res.delete(arg_project, arg_publicAdvertisedPrefix, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--setCloudArmorTier', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).projects; - final arg_request = buildProjectsSetCloudArmorTierRequest(); + final res = api.ComputeApi(mock).publicAdvertisedPrefixes; final arg_project = 'foo'; - final arg_requestId = 'foo'; + final arg_publicAdvertisedPrefix = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.ProjectsSetCloudArmorTierRequest.fromJson( - json as core.Map); - checkProjectsSetCloudArmorTierRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -100208,7 +102057,7 @@ void main() { unittest.equals('projects/'), ); pathOffset += 9; - index = path.indexOf('/setCloudArmorTier', pathOffset); + index = path.indexOf('/global/publicAdvertisedPrefixes/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -100218,10 +102067,16 @@ void main() { unittest.equals('$arg_project'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 18), - unittest.equals('/setCloudArmorTier'), + path.substring(pathOffset, pathOffset + 33), + unittest.equals('/global/publicAdvertisedPrefixes/'), + ); + pathOffset += 33; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; + unittest.expect( + subPart, + unittest.equals('$arg_publicAdvertisedPrefix'), ); - pathOffset += 18; final query = req.url.query; var queryOffset = 0; @@ -100238,10 +102093,6 @@ void main() { ); } } - unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -100250,25 +102101,25 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildPublicAdvertisedPrefix()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setCloudArmorTier(arg_request, arg_project, - requestId: arg_requestId, $fields: arg_$fields); - checkOperation(response as api.Operation); + final response = await res.get(arg_project, arg_publicAdvertisedPrefix, + $fields: arg_$fields); + checkPublicAdvertisedPrefix(response as api.PublicAdvertisedPrefix); }); - unittest.test('method--setCommonInstanceMetadata', () async { + unittest.test('method--insert', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).projects; - final arg_request = buildMetadata(); + final res = api.ComputeApi(mock).publicAdvertisedPrefixes; + final arg_request = buildPublicAdvertisedPrefix(); final arg_project = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.Metadata.fromJson(json as core.Map); - checkMetadata(obj); + final obj = api.PublicAdvertisedPrefix.fromJson( + json as core.Map); + checkPublicAdvertisedPrefix(obj); final path = req.url.path; var pathOffset = 0; @@ -100289,7 +102140,7 @@ void main() { unittest.equals('projects/'), ); pathOffset += 9; - index = path.indexOf('/setCommonInstanceMetadata', pathOffset); + index = path.indexOf('/global/publicAdvertisedPrefixes', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -100299,10 +102150,10 @@ void main() { unittest.equals('$arg_project'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 26), - unittest.equals('/setCommonInstanceMetadata'), + path.substring(pathOffset, pathOffset + 32), + unittest.equals('/global/publicAdvertisedPrefixes'), ); - pathOffset += 26; + pathOffset += 32; final query = req.url.query; var queryOffset = 0; @@ -100334,24 +102185,22 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setCommonInstanceMetadata( - arg_request, arg_project, + final response = await res.insert(arg_request, arg_project, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--setDefaultNetworkTier', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).projects; - final arg_request = buildProjectsSetDefaultNetworkTierRequest(); + final res = api.ComputeApi(mock).publicAdvertisedPrefixes; final arg_project = 'foo'; - final arg_requestId = 'foo'; + final arg_filter = 'foo'; + final arg_maxResults = 42; + final arg_orderBy = 'foo'; + final arg_pageToken = 'foo'; + final arg_returnPartialSuccess = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.ProjectsSetDefaultNetworkTierRequest.fromJson( - json as core.Map); - checkProjectsSetDefaultNetworkTierRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -100371,7 +102220,7 @@ void main() { unittest.equals('projects/'), ); pathOffset += 9; - index = path.indexOf('/setDefaultNetworkTier', pathOffset); + index = path.indexOf('/global/publicAdvertisedPrefixes', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -100381,10 +102230,10 @@ void main() { unittest.equals('$arg_project'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 22), - unittest.equals('/setDefaultNetworkTier'), + path.substring(pathOffset, pathOffset + 32), + unittest.equals('/global/publicAdvertisedPrefixes'), ); - pathOffset += 22; + pathOffset += 32; final query = req.url.query; var queryOffset = 0; @@ -100402,8 +102251,24 @@ void main() { } } unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['maxResults']!.first), + unittest.equals(arg_maxResults), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['returnPartialSuccess']!.first, + unittest.equals('$arg_returnPartialSuccess'), ); unittest.expect( queryMap['fields']!.first, @@ -100413,25 +102278,32 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildPublicAdvertisedPrefixList()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setDefaultNetworkTier(arg_request, arg_project, - requestId: arg_requestId, $fields: arg_$fields); - checkOperation(response as api.Operation); + final response = await res.list(arg_project, + filter: arg_filter, + maxResults: arg_maxResults, + orderBy: arg_orderBy, + pageToken: arg_pageToken, + returnPartialSuccess: arg_returnPartialSuccess, + $fields: arg_$fields); + checkPublicAdvertisedPrefixList( + response as api.PublicAdvertisedPrefixList); }); - unittest.test('method--setUsageExportBucket', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).projects; - final arg_request = buildUsageExportLocation(); + final res = api.ComputeApi(mock).publicAdvertisedPrefixes; + final arg_request = buildPublicAdvertisedPrefix(); final arg_project = 'foo'; + final arg_publicAdvertisedPrefix = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.UsageExportLocation.fromJson( + final obj = api.PublicAdvertisedPrefix.fromJson( json as core.Map); - checkUsageExportLocation(obj); + checkPublicAdvertisedPrefix(obj); final path = req.url.path; var pathOffset = 0; @@ -100452,7 +102324,7 @@ void main() { unittest.equals('projects/'), ); pathOffset += 9; - index = path.indexOf('/setUsageExportBucket', pathOffset); + index = path.indexOf('/global/publicAdvertisedPrefixes/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -100462,10 +102334,16 @@ void main() { unittest.equals('$arg_project'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 21), - unittest.equals('/setUsageExportBucket'), + path.substring(pathOffset, pathOffset + 33), + unittest.equals('/global/publicAdvertisedPrefixes/'), + ); + pathOffset += 33; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; + unittest.expect( + subPart, + unittest.equals('$arg_publicAdvertisedPrefix'), ); - pathOffset += 21; final query = req.url.query; var queryOffset = 0; @@ -100497,14 +102375,13 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setUsageExportBucket(arg_request, arg_project, + final response = await res.patch( + arg_request, arg_project, arg_publicAdvertisedPrefix, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - }); - unittest.group('resource-PublicAdvertisedPrefixesResource', () { - unittest.test('method--announce', () async { + unittest.test('method--withdraw', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).publicAdvertisedPrefixes; final arg_project = 'foo'; @@ -100545,7 +102422,7 @@ void main() { unittest.equals('/global/publicAdvertisedPrefixes/'), ); pathOffset += 33; - index = path.indexOf('/announce', pathOffset); + index = path.indexOf('/withdraw', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -100556,7 +102433,7 @@ void main() { ); unittest.expect( path.substring(pathOffset, pathOffset + 9), - unittest.equals('/announce'), + unittest.equals('/withdraw'), ); pathOffset += 9; @@ -100590,18 +102467,25 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.announce( + final response = await res.withdraw( arg_project, arg_publicAdvertisedPrefix, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); + }); - unittest.test('method--delete', () async { + unittest.group('resource-PublicDelegatedPrefixesResource', () { + unittest.test('method--aggregatedList', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).publicAdvertisedPrefixes; + final res = api.ComputeApi(mock).publicDelegatedPrefixes; final arg_project = 'foo'; - final arg_publicAdvertisedPrefix = 'foo'; - final arg_requestId = 'foo'; + final arg_filter = 'foo'; + final arg_includeAllScopes = true; + final arg_maxResults = 42; + final arg_orderBy = 'foo'; + final arg_pageToken = 'foo'; + final arg_returnPartialSuccess = true; + final arg_serviceProjectNumber = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -100623,7 +102507,7 @@ void main() { unittest.equals('projects/'), ); pathOffset += 9; - index = path.indexOf('/global/publicAdvertisedPrefixes/', pathOffset); + index = path.indexOf('/aggregated/publicDelegatedPrefixes', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -100633,16 +102517,10 @@ void main() { unittest.equals('$arg_project'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 33), - unittest.equals('/global/publicAdvertisedPrefixes/'), - ); - pathOffset += 33; - subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); - pathOffset = path.length; - unittest.expect( - subPart, - unittest.equals('$arg_publicAdvertisedPrefix'), + path.substring(pathOffset, pathOffset + 35), + unittest.equals('/aggregated/publicDelegatedPrefixes'), ); + pathOffset += 35; final query = req.url.query; var queryOffset = 0; @@ -100660,8 +102538,32 @@ void main() { } } unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['includeAllScopes']!.first, + unittest.equals('$arg_includeAllScopes'), + ); + unittest.expect( + core.int.parse(queryMap['maxResults']!.first), + unittest.equals(arg_maxResults), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['returnPartialSuccess']!.first, + unittest.equals('$arg_returnPartialSuccess'), + ); + unittest.expect( + queryMap['serviceProjectNumber']!.first, + unittest.equals(arg_serviceProjectNumber), ); unittest.expect( queryMap['fields']!.first, @@ -100671,19 +102573,30 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = + convert.json.encode(buildPublicDelegatedPrefixAggregatedList()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_project, arg_publicAdvertisedPrefix, - requestId: arg_requestId, $fields: arg_$fields); - checkOperation(response as api.Operation); + final response = await res.aggregatedList(arg_project, + filter: arg_filter, + includeAllScopes: arg_includeAllScopes, + maxResults: arg_maxResults, + orderBy: arg_orderBy, + pageToken: arg_pageToken, + returnPartialSuccess: arg_returnPartialSuccess, + serviceProjectNumber: arg_serviceProjectNumber, + $fields: arg_$fields); + checkPublicDelegatedPrefixAggregatedList( + response as api.PublicDelegatedPrefixAggregatedList); }); - unittest.test('method--get', () async { + unittest.test('method--announce', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).publicAdvertisedPrefixes; + final res = api.ComputeApi(mock).publicDelegatedPrefixes; final arg_project = 'foo'; - final arg_publicAdvertisedPrefix = 'foo'; + final arg_region = 'foo'; + final arg_publicDelegatedPrefix = 'foo'; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -100705,7 +102618,7 @@ void main() { unittest.equals('projects/'), ); pathOffset += 9; - index = path.indexOf('/global/publicAdvertisedPrefixes/', pathOffset); + index = path.indexOf('/regions/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -100715,16 +102628,38 @@ void main() { unittest.equals('$arg_project'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 33), - unittest.equals('/global/publicAdvertisedPrefixes/'), + path.substring(pathOffset, pathOffset + 9), + unittest.equals('/regions/'), ); - pathOffset += 33; - subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); - pathOffset = path.length; + pathOffset += 9; + index = path.indexOf('/publicDelegatedPrefixes/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; unittest.expect( subPart, - unittest.equals('$arg_publicAdvertisedPrefix'), + unittest.equals('$arg_region'), ); + unittest.expect( + path.substring(pathOffset, pathOffset + 25), + unittest.equals('/publicDelegatedPrefixes/'), + ); + pathOffset += 25; + index = path.indexOf('/announce', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_publicDelegatedPrefix'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('/announce'), + ); + pathOffset += 9; final query = req.url.query; var queryOffset = 0; @@ -100741,6 +102676,10 @@ void main() { ); } } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -100749,26 +102688,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildPublicAdvertisedPrefix()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_project, arg_publicAdvertisedPrefix, - $fields: arg_$fields); - checkPublicAdvertisedPrefix(response as api.PublicAdvertisedPrefix); + final response = await res.announce( + arg_project, arg_region, arg_publicDelegatedPrefix, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--insert', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).publicAdvertisedPrefixes; - final arg_request = buildPublicAdvertisedPrefix(); + final res = api.ComputeApi(mock).publicDelegatedPrefixes; final arg_project = 'foo'; + final arg_region = 'foo'; + final arg_publicDelegatedPrefix = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.PublicAdvertisedPrefix.fromJson( - json as core.Map); - checkPublicAdvertisedPrefix(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -100788,7 +102725,7 @@ void main() { unittest.equals('projects/'), ); pathOffset += 9; - index = path.indexOf('/global/publicAdvertisedPrefixes', pathOffset); + index = path.indexOf('/regions/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -100798,10 +102735,30 @@ void main() { unittest.equals('$arg_project'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 32), - unittest.equals('/global/publicAdvertisedPrefixes'), + path.substring(pathOffset, pathOffset + 9), + unittest.equals('/regions/'), + ); + pathOffset += 9; + index = path.indexOf('/publicDelegatedPrefixes/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_region'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 25), + unittest.equals('/publicDelegatedPrefixes/'), + ); + pathOffset += 25; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; + unittest.expect( + subPart, + unittest.equals('$arg_publicDelegatedPrefix'), ); - pathOffset += 32; final query = req.url.query; var queryOffset = 0; @@ -100833,20 +102790,18 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.insert(arg_request, arg_project, + final response = await res.delete( + arg_project, arg_region, arg_publicDelegatedPrefix, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--list', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).publicAdvertisedPrefixes; + final res = api.ComputeApi(mock).publicDelegatedPrefixes; final arg_project = 'foo'; - final arg_filter = 'foo'; - final arg_maxResults = 42; - final arg_orderBy = 'foo'; - final arg_pageToken = 'foo'; - final arg_returnPartialSuccess = true; + final arg_region = 'foo'; + final arg_publicDelegatedPrefix = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -100868,7 +102823,7 @@ void main() { unittest.equals('projects/'), ); pathOffset += 9; - index = path.indexOf('/global/publicAdvertisedPrefixes', pathOffset); + index = path.indexOf('/regions/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -100878,10 +102833,30 @@ void main() { unittest.equals('$arg_project'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 32), - unittest.equals('/global/publicAdvertisedPrefixes'), + path.substring(pathOffset, pathOffset + 9), + unittest.equals('/regions/'), + ); + pathOffset += 9; + index = path.indexOf('/publicDelegatedPrefixes/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_region'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 25), + unittest.equals('/publicDelegatedPrefixes/'), + ); + pathOffset += 25; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; + unittest.expect( + subPart, + unittest.equals('$arg_publicDelegatedPrefix'), ); - pathOffset += 32; final query = req.url.query; var queryOffset = 0; @@ -100898,26 +102873,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['maxResults']!.first), - unittest.equals(arg_maxResults), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['returnPartialSuccess']!.first, - unittest.equals('$arg_returnPartialSuccess'), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -100926,32 +102881,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildPublicAdvertisedPrefixList()); + final resp = convert.json.encode(buildPublicDelegatedPrefix()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_project, - filter: arg_filter, - maxResults: arg_maxResults, - orderBy: arg_orderBy, - pageToken: arg_pageToken, - returnPartialSuccess: arg_returnPartialSuccess, + final response = await res.get( + arg_project, arg_region, arg_publicDelegatedPrefix, $fields: arg_$fields); - checkPublicAdvertisedPrefixList( - response as api.PublicAdvertisedPrefixList); + checkPublicDelegatedPrefix(response as api.PublicDelegatedPrefix); }); - unittest.test('method--patch', () async { + unittest.test('method--insert', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).publicAdvertisedPrefixes; - final arg_request = buildPublicAdvertisedPrefix(); + final res = api.ComputeApi(mock).publicDelegatedPrefixes; + final arg_request = buildPublicDelegatedPrefix(); final arg_project = 'foo'; - final arg_publicAdvertisedPrefix = 'foo'; + final arg_region = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.PublicAdvertisedPrefix.fromJson( + final obj = api.PublicDelegatedPrefix.fromJson( json as core.Map); - checkPublicAdvertisedPrefix(obj); + checkPublicDelegatedPrefix(obj); final path = req.url.path; var pathOffset = 0; @@ -100972,7 +102922,7 @@ void main() { unittest.equals('projects/'), ); pathOffset += 9; - index = path.indexOf('/global/publicAdvertisedPrefixes/', pathOffset); + index = path.indexOf('/regions/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -100982,16 +102932,24 @@ void main() { unittest.equals('$arg_project'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 33), - unittest.equals('/global/publicAdvertisedPrefixes/'), + path.substring(pathOffset, pathOffset + 9), + unittest.equals('/regions/'), ); - pathOffset += 33; - subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); - pathOffset = path.length; + pathOffset += 9; + index = path.indexOf('/publicDelegatedPrefixes', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; unittest.expect( subPart, - unittest.equals('$arg_publicAdvertisedPrefix'), + unittest.equals('$arg_region'), ); + unittest.expect( + path.substring(pathOffset, pathOffset + 24), + unittest.equals('/publicDelegatedPrefixes'), + ); + pathOffset += 24; final query = req.url.query; var queryOffset = 0; @@ -101023,18 +102981,21 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch( - arg_request, arg_project, arg_publicAdvertisedPrefix, + final response = await res.insert(arg_request, arg_project, arg_region, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--withdraw', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).publicAdvertisedPrefixes; + final res = api.ComputeApi(mock).publicDelegatedPrefixes; final arg_project = 'foo'; - final arg_publicAdvertisedPrefix = 'foo'; - final arg_requestId = 'foo'; + final arg_region = 'foo'; + final arg_filter = 'foo'; + final arg_maxResults = 42; + final arg_orderBy = 'foo'; + final arg_pageToken = 'foo'; + final arg_returnPartialSuccess = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -101056,7 +103017,7 @@ void main() { unittest.equals('projects/'), ); pathOffset += 9; - index = path.indexOf('/global/publicAdvertisedPrefixes/', pathOffset); + index = path.indexOf('/regions/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -101066,24 +103027,24 @@ void main() { unittest.equals('$arg_project'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 33), - unittest.equals('/global/publicAdvertisedPrefixes/'), + path.substring(pathOffset, pathOffset + 9), + unittest.equals('/regions/'), ); - pathOffset += 33; - index = path.indexOf('/withdraw', pathOffset); + pathOffset += 9; + index = path.indexOf('/publicDelegatedPrefixes', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); pathOffset = index; unittest.expect( subPart, - unittest.equals('$arg_publicAdvertisedPrefix'), + unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 9), - unittest.equals('/withdraw'), + path.substring(pathOffset, pathOffset + 24), + unittest.equals('/publicDelegatedPrefixes'), ); - pathOffset += 9; + pathOffset += 24; final query = req.url.query; var queryOffset = 0; @@ -101101,8 +103062,24 @@ void main() { } } unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['maxResults']!.first), + unittest.equals(arg_maxResults), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['returnPartialSuccess']!.first, + unittest.equals('$arg_returnPartialSuccess'), ); unittest.expect( queryMap['fields']!.first, @@ -101112,30 +103089,33 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildPublicDelegatedPrefixList()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.withdraw( - arg_project, arg_publicAdvertisedPrefix, - requestId: arg_requestId, $fields: arg_$fields); - checkOperation(response as api.Operation); + final response = await res.list(arg_project, arg_region, + filter: arg_filter, + maxResults: arg_maxResults, + orderBy: arg_orderBy, + pageToken: arg_pageToken, + returnPartialSuccess: arg_returnPartialSuccess, + $fields: arg_$fields); + checkPublicDelegatedPrefixList(response as api.PublicDelegatedPrefixList); }); - }); - unittest.group('resource-PublicDelegatedPrefixesResource', () { - unittest.test('method--aggregatedList', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).publicDelegatedPrefixes; + final arg_request = buildPublicDelegatedPrefix(); final arg_project = 'foo'; - final arg_filter = 'foo'; - final arg_includeAllScopes = true; - final arg_maxResults = 42; - final arg_orderBy = 'foo'; - final arg_pageToken = 'foo'; - final arg_returnPartialSuccess = true; - final arg_serviceProjectNumber = 'foo'; + final arg_region = 'foo'; + final arg_publicDelegatedPrefix = 'foo'; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.PublicDelegatedPrefix.fromJson( + json as core.Map); + checkPublicDelegatedPrefix(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -101155,7 +103135,7 @@ void main() { unittest.equals('projects/'), ); pathOffset += 9; - index = path.indexOf('/aggregated/publicDelegatedPrefixes', pathOffset); + index = path.indexOf('/regions/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -101165,10 +103145,30 @@ void main() { unittest.equals('$arg_project'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 35), - unittest.equals('/aggregated/publicDelegatedPrefixes'), + path.substring(pathOffset, pathOffset + 9), + unittest.equals('/regions/'), + ); + pathOffset += 9; + index = path.indexOf('/publicDelegatedPrefixes/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_region'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 25), + unittest.equals('/publicDelegatedPrefixes/'), + ); + pathOffset += 25; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; + unittest.expect( + subPart, + unittest.equals('$arg_publicDelegatedPrefix'), ); - pathOffset += 35; final query = req.url.query; var queryOffset = 0; @@ -101186,32 +103186,8 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - queryMap['includeAllScopes']!.first, - unittest.equals('$arg_includeAllScopes'), - ); - unittest.expect( - core.int.parse(queryMap['maxResults']!.first), - unittest.equals(arg_maxResults), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['returnPartialSuccess']!.first, - unittest.equals('$arg_returnPartialSuccess'), - ); - unittest.expect( - queryMap['serviceProjectNumber']!.first, - unittest.equals(arg_serviceProjectNumber), + queryMap['requestId']!.first, + unittest.equals(arg_requestId), ); unittest.expect( queryMap['fields']!.first, @@ -101221,24 +103197,16 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildPublicDelegatedPrefixAggregatedList()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.aggregatedList(arg_project, - filter: arg_filter, - includeAllScopes: arg_includeAllScopes, - maxResults: arg_maxResults, - orderBy: arg_orderBy, - pageToken: arg_pageToken, - returnPartialSuccess: arg_returnPartialSuccess, - serviceProjectNumber: arg_serviceProjectNumber, - $fields: arg_$fields); - checkPublicDelegatedPrefixAggregatedList( - response as api.PublicDelegatedPrefixAggregatedList); + final response = await res.patch( + arg_request, arg_project, arg_region, arg_publicDelegatedPrefix, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--announce', () async { + unittest.test('method--withdraw', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).publicDelegatedPrefixes; final arg_project = 'foo'; @@ -101294,7 +103262,7 @@ void main() { unittest.equals('/publicDelegatedPrefixes/'), ); pathOffset += 25; - index = path.indexOf('/announce', pathOffset); + index = path.indexOf('/withdraw', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -101305,7 +103273,7 @@ void main() { ); unittest.expect( path.substring(pathOffset, pathOffset + 9), - unittest.equals('/announce'), + unittest.equals('/withdraw'), ); pathOffset += 9; @@ -101339,18 +103307,20 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.announce( + final response = await res.withdraw( arg_project, arg_region, arg_publicDelegatedPrefix, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); + }); + unittest.group('resource-RegionAutoscalersResource', () { unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).publicDelegatedPrefixes; + final res = api.ComputeApi(mock).regionAutoscalers; final arg_project = 'foo'; final arg_region = 'foo'; - final arg_publicDelegatedPrefix = 'foo'; + final arg_autoscaler = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -101387,7 +103357,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/publicDelegatedPrefixes/', pathOffset); + index = path.indexOf('/autoscalers/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -101397,15 +103367,15 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 25), - unittest.equals('/publicDelegatedPrefixes/'), + path.substring(pathOffset, pathOffset + 13), + unittest.equals('/autoscalers/'), ); - pathOffset += 25; + pathOffset += 13; subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); pathOffset = path.length; unittest.expect( subPart, - unittest.equals('$arg_publicDelegatedPrefix'), + unittest.equals('$arg_autoscaler'), ); final query = req.url.query; @@ -101438,18 +103408,17 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete( - arg_project, arg_region, arg_publicDelegatedPrefix, + final response = await res.delete(arg_project, arg_region, arg_autoscaler, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).publicDelegatedPrefixes; + final res = api.ComputeApi(mock).regionAutoscalers; final arg_project = 'foo'; final arg_region = 'foo'; - final arg_publicDelegatedPrefix = 'foo'; + final arg_autoscaler = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -101485,7 +103454,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/publicDelegatedPrefixes/', pathOffset); + index = path.indexOf('/autoscalers/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -101495,15 +103464,15 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 25), - unittest.equals('/publicDelegatedPrefixes/'), + path.substring(pathOffset, pathOffset + 13), + unittest.equals('/autoscalers/'), ); - pathOffset += 25; + pathOffset += 13; subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); pathOffset = path.length; unittest.expect( subPart, - unittest.equals('$arg_publicDelegatedPrefix'), + unittest.equals('$arg_autoscaler'), ); final query = req.url.query; @@ -101529,27 +103498,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildPublicDelegatedPrefix()); + final resp = convert.json.encode(buildAutoscaler()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get( - arg_project, arg_region, arg_publicDelegatedPrefix, + final response = await res.get(arg_project, arg_region, arg_autoscaler, $fields: arg_$fields); - checkPublicDelegatedPrefix(response as api.PublicDelegatedPrefix); + checkAutoscaler(response as api.Autoscaler); }); unittest.test('method--insert', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).publicDelegatedPrefixes; - final arg_request = buildPublicDelegatedPrefix(); + final res = api.ComputeApi(mock).regionAutoscalers; + final arg_request = buildAutoscaler(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.PublicDelegatedPrefix.fromJson( + final obj = api.Autoscaler.fromJson( json as core.Map); - checkPublicDelegatedPrefix(obj); + checkAutoscaler(obj); final path = req.url.path; var pathOffset = 0; @@ -101584,7 +103552,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/publicDelegatedPrefixes', pathOffset); + index = path.indexOf('/autoscalers', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -101594,10 +103562,10 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 24), - unittest.equals('/publicDelegatedPrefixes'), + path.substring(pathOffset, pathOffset + 12), + unittest.equals('/autoscalers'), ); - pathOffset += 24; + pathOffset += 12; final query = req.url.query; var queryOffset = 0; @@ -101636,7 +103604,7 @@ void main() { unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).publicDelegatedPrefixes; + final res = api.ComputeApi(mock).regionAutoscalers; final arg_project = 'foo'; final arg_region = 'foo'; final arg_filter = 'foo'; @@ -101679,7 +103647,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/publicDelegatedPrefixes', pathOffset); + index = path.indexOf('/autoscalers', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -101689,10 +103657,10 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 24), - unittest.equals('/publicDelegatedPrefixes'), + path.substring(pathOffset, pathOffset + 12), + unittest.equals('/autoscalers'), ); - pathOffset += 24; + pathOffset += 12; final query = req.url.query; var queryOffset = 0; @@ -101737,7 +103705,7 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildPublicDelegatedPrefixList()); + final resp = convert.json.encode(buildRegionAutoscalerList()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.list(arg_project, arg_region, @@ -101747,22 +103715,22 @@ void main() { pageToken: arg_pageToken, returnPartialSuccess: arg_returnPartialSuccess, $fields: arg_$fields); - checkPublicDelegatedPrefixList(response as api.PublicDelegatedPrefixList); + checkRegionAutoscalerList(response as api.RegionAutoscalerList); }); unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).publicDelegatedPrefixes; - final arg_request = buildPublicDelegatedPrefix(); + final res = api.ComputeApi(mock).regionAutoscalers; + final arg_request = buildAutoscaler(); final arg_project = 'foo'; final arg_region = 'foo'; - final arg_publicDelegatedPrefix = 'foo'; + final arg_autoscaler = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.PublicDelegatedPrefix.fromJson( + final obj = api.Autoscaler.fromJson( json as core.Map); - checkPublicDelegatedPrefix(obj); + checkAutoscaler(obj); final path = req.url.path; var pathOffset = 0; @@ -101797,7 +103765,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/publicDelegatedPrefixes/', pathOffset); + index = path.indexOf('/autoscalers', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -101807,16 +103775,10 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 25), - unittest.equals('/publicDelegatedPrefixes/'), - ); - pathOffset += 25; - subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); - pathOffset = path.length; - unittest.expect( - subPart, - unittest.equals('$arg_publicDelegatedPrefix'), + path.substring(pathOffset, pathOffset + 12), + unittest.equals('/autoscalers'), ); + pathOffset += 12; final query = req.url.query; var queryOffset = 0; @@ -101833,6 +103795,10 @@ void main() { ); } } + unittest.expect( + queryMap['autoscaler']!.first, + unittest.equals(arg_autoscaler), + ); unittest.expect( queryMap['requestId']!.first, unittest.equals(arg_requestId), @@ -101848,21 +103814,27 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch( - arg_request, arg_project, arg_region, arg_publicDelegatedPrefix, - requestId: arg_requestId, $fields: arg_$fields); + final response = await res.patch(arg_request, arg_project, arg_region, + autoscaler: arg_autoscaler, + requestId: arg_requestId, + $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--withdraw', () async { + unittest.test('method--update', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).publicDelegatedPrefixes; + final res = api.ComputeApi(mock).regionAutoscalers; + final arg_request = buildAutoscaler(); final arg_project = 'foo'; final arg_region = 'foo'; - final arg_publicDelegatedPrefix = 'foo'; + final arg_autoscaler = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.Autoscaler.fromJson( + json as core.Map); + checkAutoscaler(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -101896,7 +103868,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/publicDelegatedPrefixes/', pathOffset); + index = path.indexOf('/autoscalers', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -101906,24 +103878,10 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 25), - unittest.equals('/publicDelegatedPrefixes/'), - ); - pathOffset += 25; - index = path.indexOf('/withdraw', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; - unittest.expect( - subPart, - unittest.equals('$arg_publicDelegatedPrefix'), - ); - unittest.expect( - path.substring(pathOffset, pathOffset + 9), - unittest.equals('/withdraw'), + path.substring(pathOffset, pathOffset + 12), + unittest.equals('/autoscalers'), ); - pathOffset += 9; + pathOffset += 12; final query = req.url.query; var queryOffset = 0; @@ -101940,6 +103898,10 @@ void main() { ); } } + unittest.expect( + queryMap['autoscaler']!.first, + unittest.equals(arg_autoscaler), + ); unittest.expect( queryMap['requestId']!.first, unittest.equals(arg_requestId), @@ -101955,20 +103917,21 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.withdraw( - arg_project, arg_region, arg_publicDelegatedPrefix, - requestId: arg_requestId, $fields: arg_$fields); + final response = await res.update(arg_request, arg_project, arg_region, + autoscaler: arg_autoscaler, + requestId: arg_requestId, + $fields: arg_$fields); checkOperation(response as api.Operation); }); }); - unittest.group('resource-RegionAutoscalersResource', () { + unittest.group('resource-RegionBackendServicesResource', () { unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionAutoscalers; + final res = api.ComputeApi(mock).regionBackendServices; final arg_project = 'foo'; final arg_region = 'foo'; - final arg_autoscaler = 'foo'; + final arg_backendService = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -102005,7 +103968,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/autoscalers/', pathOffset); + index = path.indexOf('/backendServices/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -102015,15 +103978,15 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 13), - unittest.equals('/autoscalers/'), + path.substring(pathOffset, pathOffset + 17), + unittest.equals('/backendServices/'), ); - pathOffset += 13; + pathOffset += 17; subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); pathOffset = path.length; unittest.expect( subPart, - unittest.equals('$arg_autoscaler'), + unittest.equals('$arg_backendService'), ); final query = req.url.query; @@ -102056,17 +104019,18 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_project, arg_region, arg_autoscaler, + final response = await res.delete( + arg_project, arg_region, arg_backendService, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionAutoscalers; + final res = api.ComputeApi(mock).regionBackendServices; final arg_project = 'foo'; final arg_region = 'foo'; - final arg_autoscaler = 'foo'; + final arg_backendService = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -102102,7 +104066,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/autoscalers/', pathOffset); + index = path.indexOf('/backendServices/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -102112,15 +104076,15 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 13), - unittest.equals('/autoscalers/'), + path.substring(pathOffset, pathOffset + 17), + unittest.equals('/backendServices/'), ); - pathOffset += 13; + pathOffset += 17; subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); pathOffset = path.length; unittest.expect( subPart, - unittest.equals('$arg_autoscaler'), + unittest.equals('$arg_backendService'), ); final query = req.url.query; @@ -102146,122 +104110,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildAutoscaler()); + final resp = convert.json.encode(buildBackendService()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_project, arg_region, arg_autoscaler, + final response = await res.get( + arg_project, arg_region, arg_backendService, $fields: arg_$fields); - checkAutoscaler(response as api.Autoscaler); + checkBackendService(response as api.BackendService); }); - unittest.test('method--insert', () async { + unittest.test('method--getHealth', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionAutoscalers; - final arg_request = buildAutoscaler(); + final res = api.ComputeApi(mock).regionBackendServices; + final arg_request = buildResourceGroupReference(); final arg_project = 'foo'; final arg_region = 'foo'; - final arg_requestId = 'foo'; + final arg_backendService = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.Autoscaler.fromJson( + final obj = api.ResourceGroupReference.fromJson( json as core.Map); - checkAutoscaler(obj); - - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 11), - unittest.equals('compute/v1/'), - ); - pathOffset += 11; - unittest.expect( - path.substring(pathOffset, pathOffset + 9), - unittest.equals('projects/'), - ); - pathOffset += 9; - index = path.indexOf('/regions/', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; - unittest.expect( - subPart, - unittest.equals('$arg_project'), - ); - unittest.expect( - path.substring(pathOffset, pathOffset + 9), - unittest.equals('/regions/'), - ); - pathOffset += 9; - index = path.indexOf('/autoscalers', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; - unittest.expect( - subPart, - unittest.equals('$arg_region'), - ); - unittest.expect( - path.substring(pathOffset, pathOffset + 12), - unittest.equals('/autoscalers'), - ); - pathOffset += 12; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), - ); - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode(buildOperation()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.insert(arg_request, arg_project, arg_region, - requestId: arg_requestId, $fields: arg_$fields); - checkOperation(response as api.Operation); - }); + checkResourceGroupReference(obj); - unittest.test('method--list', () async { - final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionAutoscalers; - final arg_project = 'foo'; - final arg_region = 'foo'; - final arg_filter = 'foo'; - final arg_maxResults = 42; - final arg_orderBy = 'foo'; - final arg_pageToken = 'foo'; - final arg_returnPartialSuccess = true; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; var pathOffset = 0; core.int index; @@ -102295,7 +104165,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/autoscalers', pathOffset); + index = path.indexOf('/backendServices/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -102305,10 +104175,24 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 12), - unittest.equals('/autoscalers'), + path.substring(pathOffset, pathOffset + 17), + unittest.equals('/backendServices/'), ); - pathOffset += 12; + pathOffset += 17; + index = path.indexOf('/getHealth', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_backendService'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 10), + unittest.equals('/getHealth'), + ); + pathOffset += 10; final query = req.url.query; var queryOffset = 0; @@ -102325,26 +104209,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['maxResults']!.first), - unittest.equals(arg_maxResults), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['returnPartialSuccess']!.first, - unittest.equals('$arg_returnPartialSuccess'), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -102353,33 +104217,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildRegionAutoscalerList()); + final resp = convert.json.encode(buildBackendServiceGroupHealth()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_project, arg_region, - filter: arg_filter, - maxResults: arg_maxResults, - orderBy: arg_orderBy, - pageToken: arg_pageToken, - returnPartialSuccess: arg_returnPartialSuccess, + final response = await res.getHealth( + arg_request, arg_project, arg_region, arg_backendService, $fields: arg_$fields); - checkRegionAutoscalerList(response as api.RegionAutoscalerList); + checkBackendServiceGroupHealth(response as api.BackendServiceGroupHealth); }); - unittest.test('method--patch', () async { + unittest.test('method--getIamPolicy', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionAutoscalers; - final arg_request = buildAutoscaler(); + final res = api.ComputeApi(mock).regionBackendServices; final arg_project = 'foo'; final arg_region = 'foo'; - final arg_autoscaler = 'foo'; - final arg_requestId = 'foo'; + final arg_resource = 'foo'; + final arg_optionsRequestedPolicyVersion = 42; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.Autoscaler.fromJson( - json as core.Map); - checkAutoscaler(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -102413,7 +104268,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/autoscalers', pathOffset); + index = path.indexOf('/backendServices/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -102423,10 +104278,24 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 12), - unittest.equals('/autoscalers'), + path.substring(pathOffset, pathOffset + 17), + unittest.equals('/backendServices/'), ); - pathOffset += 12; + pathOffset += 17; + index = path.indexOf('/getIamPolicy', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_resource'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 13), + unittest.equals('/getIamPolicy'), + ); + pathOffset += 13; final query = req.url.query; var queryOffset = 0; @@ -102444,12 +104313,8 @@ void main() { } } unittest.expect( - queryMap['autoscaler']!.first, - unittest.equals(arg_autoscaler), - ); - unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), + core.int.parse(queryMap['optionsRequestedPolicyVersion']!.first), + unittest.equals(arg_optionsRequestedPolicyVersion), ); unittest.expect( queryMap['fields']!.first, @@ -102459,29 +104324,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildPolicy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch(arg_request, arg_project, arg_region, - autoscaler: arg_autoscaler, - requestId: arg_requestId, + final response = await res.getIamPolicy( + arg_project, arg_region, arg_resource, + optionsRequestedPolicyVersion: arg_optionsRequestedPolicyVersion, $fields: arg_$fields); - checkOperation(response as api.Operation); + checkPolicy(response as api.Policy); }); - unittest.test('method--update', () async { + unittest.test('method--insert', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionAutoscalers; - final arg_request = buildAutoscaler(); + final res = api.ComputeApi(mock).regionBackendServices; + final arg_request = buildBackendService(); final arg_project = 'foo'; final arg_region = 'foo'; - final arg_autoscaler = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.Autoscaler.fromJson( + final obj = api.BackendService.fromJson( json as core.Map); - checkAutoscaler(obj); + checkBackendService(obj); final path = req.url.path; var pathOffset = 0; @@ -102516,7 +104380,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/autoscalers', pathOffset); + index = path.indexOf('/backendServices', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -102526,10 +104390,10 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 12), - unittest.equals('/autoscalers'), + path.substring(pathOffset, pathOffset + 16), + unittest.equals('/backendServices'), ); - pathOffset += 12; + pathOffset += 16; final query = req.url.query; var queryOffset = 0; @@ -102546,10 +104410,6 @@ void main() { ); } } - unittest.expect( - queryMap['autoscaler']!.first, - unittest.equals(arg_autoscaler), - ); unittest.expect( queryMap['requestId']!.first, unittest.equals(arg_requestId), @@ -102565,22 +104425,21 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.update(arg_request, arg_project, arg_region, - autoscaler: arg_autoscaler, - requestId: arg_requestId, - $fields: arg_$fields); + final response = await res.insert(arg_request, arg_project, arg_region, + requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - }); - unittest.group('resource-RegionBackendServicesResource', () { - unittest.test('method--delete', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionBackendServices; final arg_project = 'foo'; final arg_region = 'foo'; - final arg_backendService = 'foo'; - final arg_requestId = 'foo'; + final arg_filter = 'foo'; + final arg_maxResults = 42; + final arg_orderBy = 'foo'; + final arg_pageToken = 'foo'; + final arg_returnPartialSuccess = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -102616,7 +104475,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/backendServices/', pathOffset); + index = path.indexOf('/backendServices', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -102626,16 +104485,10 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 17), - unittest.equals('/backendServices/'), - ); - pathOffset += 17; - subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); - pathOffset = path.length; - unittest.expect( - subPart, - unittest.equals('$arg_backendService'), + path.substring(pathOffset, pathOffset + 16), + unittest.equals('/backendServices'), ); + pathOffset += 16; final query = req.url.query; var queryOffset = 0; @@ -102653,8 +104506,24 @@ void main() { } } unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['maxResults']!.first), + unittest.equals(arg_maxResults), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['returnPartialSuccess']!.first, + unittest.equals('$arg_returnPartialSuccess'), ); unittest.expect( queryMap['fields']!.first, @@ -102664,21 +104533,29 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildBackendServiceList()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete( - arg_project, arg_region, arg_backendService, - requestId: arg_requestId, $fields: arg_$fields); - checkOperation(response as api.Operation); + final response = await res.list(arg_project, arg_region, + filter: arg_filter, + maxResults: arg_maxResults, + orderBy: arg_orderBy, + pageToken: arg_pageToken, + returnPartialSuccess: arg_returnPartialSuccess, + $fields: arg_$fields); + checkBackendServiceList(response as api.BackendServiceList); }); - unittest.test('method--get', () async { + unittest.test('method--listUsable', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionBackendServices; final arg_project = 'foo'; final arg_region = 'foo'; - final arg_backendService = 'foo'; + final arg_filter = 'foo'; + final arg_maxResults = 42; + final arg_orderBy = 'foo'; + final arg_pageToken = 'foo'; + final arg_returnPartialSuccess = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -102714,7 +104591,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/backendServices/', pathOffset); + index = path.indexOf('/backendServices/listUsable', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -102724,16 +104601,10 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 17), - unittest.equals('/backendServices/'), - ); - pathOffset += 17; - subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); - pathOffset = path.length; - unittest.expect( - subPart, - unittest.equals('$arg_backendService'), + path.substring(pathOffset, pathOffset + 27), + unittest.equals('/backendServices/listUsable'), ); + pathOffset += 27; final query = req.url.query; var queryOffset = 0; @@ -102750,6 +104621,26 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['maxResults']!.first), + unittest.equals(arg_maxResults), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['returnPartialSuccess']!.first, + unittest.equals('$arg_returnPartialSuccess'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -102758,27 +104649,32 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildBackendService()); + final resp = convert.json.encode(buildBackendServiceListUsable()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get( - arg_project, arg_region, arg_backendService, + final response = await res.listUsable(arg_project, arg_region, + filter: arg_filter, + maxResults: arg_maxResults, + orderBy: arg_orderBy, + pageToken: arg_pageToken, + returnPartialSuccess: arg_returnPartialSuccess, $fields: arg_$fields); - checkBackendService(response as api.BackendService); + checkBackendServiceListUsable(response as api.BackendServiceListUsable); }); - unittest.test('method--getHealth', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionBackendServices; - final arg_request = buildResourceGroupReference(); + final arg_request = buildBackendService(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_backendService = 'foo'; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.ResourceGroupReference.fromJson( + final obj = api.BackendService.fromJson( json as core.Map); - checkResourceGroupReference(obj); + checkBackendService(obj); final path = req.url.path; var pathOffset = 0; @@ -102827,20 +104723,12 @@ void main() { unittest.equals('/backendServices/'), ); pathOffset += 17; - index = path.indexOf('/getHealth', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; unittest.expect( subPart, unittest.equals('$arg_backendService'), ); - unittest.expect( - path.substring(pathOffset, pathOffset + 10), - unittest.equals('/getHealth'), - ); - pathOffset += 10; final query = req.url.query; var queryOffset = 0; @@ -102857,6 +104745,10 @@ void main() { ); } } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -102865,24 +104757,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildBackendServiceGroupHealth()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.getHealth( + final response = await res.patch( arg_request, arg_project, arg_region, arg_backendService, - $fields: arg_$fields); - checkBackendServiceGroupHealth(response as api.BackendServiceGroupHealth); + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--getIamPolicy', () async { + unittest.test('method--setIamPolicy', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionBackendServices; + final arg_request = buildRegionSetPolicyRequest(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_resource = 'foo'; - final arg_optionsRequestedPolicyVersion = 42; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.RegionSetPolicyRequest.fromJson( + json as core.Map); + checkRegionSetPolicyRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -102930,7 +104826,7 @@ void main() { unittest.equals('/backendServices/'), ); pathOffset += 17; - index = path.indexOf('/getIamPolicy', pathOffset); + index = path.indexOf('/setIamPolicy', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -102941,7 +104837,7 @@ void main() { ); unittest.expect( path.substring(pathOffset, pathOffset + 13), - unittest.equals('/getIamPolicy'), + unittest.equals('/setIamPolicy'), ); pathOffset += 13; @@ -102960,10 +104856,6 @@ void main() { ); } } - unittest.expect( - core.int.parse(queryMap['optionsRequestedPolicyVersion']!.first), - unittest.equals(arg_optionsRequestedPolicyVersion), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -102975,25 +104867,25 @@ void main() { final resp = convert.json.encode(buildPolicy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.getIamPolicy( - arg_project, arg_region, arg_resource, - optionsRequestedPolicyVersion: arg_optionsRequestedPolicyVersion, + final response = await res.setIamPolicy( + arg_request, arg_project, arg_region, arg_resource, $fields: arg_$fields); checkPolicy(response as api.Policy); }); - unittest.test('method--insert', () async { + unittest.test('method--setSecurityPolicy', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionBackendServices; - final arg_request = buildBackendService(); + final arg_request = buildSecurityPolicyReference(); final arg_project = 'foo'; final arg_region = 'foo'; + final arg_backendService = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.BackendService.fromJson( + final obj = api.SecurityPolicyReference.fromJson( json as core.Map); - checkBackendService(obj); + checkSecurityPolicyReference(obj); final path = req.url.path; var pathOffset = 0; @@ -103028,7 +104920,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/backendServices', pathOffset); + index = path.indexOf('/backendServices/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -103038,10 +104930,24 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 16), - unittest.equals('/backendServices'), + path.substring(pathOffset, pathOffset + 17), + unittest.equals('/backendServices/'), ); - pathOffset += 16; + pathOffset += 17; + index = path.indexOf('/setSecurityPolicy', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_backendService'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 18), + unittest.equals('/setSecurityPolicy'), + ); + pathOffset += 18; final query = req.url.query; var queryOffset = 0; @@ -103073,23 +104979,25 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.insert(arg_request, arg_project, arg_region, + final response = await res.setSecurityPolicy( + arg_request, arg_project, arg_region, arg_backendService, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--list', () async { + unittest.test('method--testIamPermissions', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionBackendServices; + final arg_request = buildTestPermissionsRequest(); final arg_project = 'foo'; final arg_region = 'foo'; - final arg_filter = 'foo'; - final arg_maxResults = 42; - final arg_orderBy = 'foo'; - final arg_pageToken = 'foo'; - final arg_returnPartialSuccess = true; + final arg_resource = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.TestPermissionsRequest.fromJson( + json as core.Map); + checkTestPermissionsRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -103123,7 +105031,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/backendServices', pathOffset); + index = path.indexOf('/backendServices/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -103133,10 +105041,24 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 16), - unittest.equals('/backendServices'), + path.substring(pathOffset, pathOffset + 17), + unittest.equals('/backendServices/'), ); - pathOffset += 16; + pathOffset += 17; + index = path.indexOf('/testIamPermissions', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_resource'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 19), + unittest.equals('/testIamPermissions'), + ); + pathOffset += 19; final query = req.url.query; var queryOffset = 0; @@ -103153,26 +105075,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['maxResults']!.first), - unittest.equals(arg_maxResults), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['returnPartialSuccess']!.first, - unittest.equals('$arg_returnPartialSuccess'), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -103181,31 +105083,29 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildBackendServiceList()); + final resp = convert.json.encode(buildTestPermissionsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_project, arg_region, - filter: arg_filter, - maxResults: arg_maxResults, - orderBy: arg_orderBy, - pageToken: arg_pageToken, - returnPartialSuccess: arg_returnPartialSuccess, + final response = await res.testIamPermissions( + arg_request, arg_project, arg_region, arg_resource, $fields: arg_$fields); - checkBackendServiceList(response as api.BackendServiceList); + checkTestPermissionsResponse(response as api.TestPermissionsResponse); }); - unittest.test('method--listUsable', () async { + unittest.test('method--update', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionBackendServices; + final arg_request = buildBackendService(); final arg_project = 'foo'; final arg_region = 'foo'; - final arg_filter = 'foo'; - final arg_maxResults = 42; - final arg_orderBy = 'foo'; - final arg_pageToken = 'foo'; - final arg_returnPartialSuccess = true; + final arg_backendService = 'foo'; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.BackendService.fromJson( + json as core.Map); + checkBackendService(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -103239,7 +105139,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/backendServices/listUsable', pathOffset); + index = path.indexOf('/backendServices/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -103249,10 +105149,16 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 27), - unittest.equals('/backendServices/listUsable'), + path.substring(pathOffset, pathOffset + 17), + unittest.equals('/backendServices/'), + ); + pathOffset += 17; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; + unittest.expect( + subPart, + unittest.equals('$arg_backendService'), ); - pathOffset += 27; final query = req.url.query; var queryOffset = 0; @@ -103270,24 +105176,8 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['maxResults']!.first), - unittest.equals(arg_maxResults), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['returnPartialSuccess']!.first, - unittest.equals('$arg_returnPartialSuccess'), + queryMap['requestId']!.first, + unittest.equals(arg_requestId), ); unittest.expect( queryMap['fields']!.first, @@ -103297,33 +105187,30 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildBackendServiceListUsable()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.listUsable(arg_project, arg_region, - filter: arg_filter, - maxResults: arg_maxResults, - orderBy: arg_orderBy, - pageToken: arg_pageToken, - returnPartialSuccess: arg_returnPartialSuccess, - $fields: arg_$fields); - checkBackendServiceListUsable(response as api.BackendServiceListUsable); + final response = await res.update( + arg_request, arg_project, arg_region, arg_backendService, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); + }); - unittest.test('method--patch', () async { + unittest.group('resource-RegionCommitmentsResource', () { + unittest.test('method--aggregatedList', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionBackendServices; - final arg_request = buildBackendService(); + final res = api.ComputeApi(mock).regionCommitments; final arg_project = 'foo'; - final arg_region = 'foo'; - final arg_backendService = 'foo'; - final arg_requestId = 'foo'; + final arg_filter = 'foo'; + final arg_includeAllScopes = true; + final arg_maxResults = 42; + final arg_orderBy = 'foo'; + final arg_pageToken = 'foo'; + final arg_returnPartialSuccess = true; + final arg_serviceProjectNumber = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.BackendService.fromJson( - json as core.Map); - checkBackendService(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -103343,7 +105230,7 @@ void main() { unittest.equals('projects/'), ); pathOffset += 9; - index = path.indexOf('/regions/', pathOffset); + index = path.indexOf('/aggregated/commitments', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -103353,30 +105240,10 @@ void main() { unittest.equals('$arg_project'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 9), - unittest.equals('/regions/'), - ); - pathOffset += 9; - index = path.indexOf('/backendServices/', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; - unittest.expect( - subPart, - unittest.equals('$arg_region'), - ); - unittest.expect( - path.substring(pathOffset, pathOffset + 17), - unittest.equals('/backendServices/'), - ); - pathOffset += 17; - subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); - pathOffset = path.length; - unittest.expect( - subPart, - unittest.equals('$arg_backendService'), + path.substring(pathOffset, pathOffset + 23), + unittest.equals('/aggregated/commitments'), ); + pathOffset += 23; final query = req.url.query; var queryOffset = 0; @@ -103394,8 +105261,32 @@ void main() { } } unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['includeAllScopes']!.first, + unittest.equals('$arg_includeAllScopes'), + ); + unittest.expect( + core.int.parse(queryMap['maxResults']!.first), + unittest.equals(arg_maxResults), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['returnPartialSuccess']!.first, + unittest.equals('$arg_returnPartialSuccess'), + ); + unittest.expect( + queryMap['serviceProjectNumber']!.first, + unittest.equals(arg_serviceProjectNumber), ); unittest.expect( queryMap['fields']!.first, @@ -103405,28 +105296,29 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildCommitmentAggregatedList()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch( - arg_request, arg_project, arg_region, arg_backendService, - requestId: arg_requestId, $fields: arg_$fields); - checkOperation(response as api.Operation); + final response = await res.aggregatedList(arg_project, + filter: arg_filter, + includeAllScopes: arg_includeAllScopes, + maxResults: arg_maxResults, + orderBy: arg_orderBy, + pageToken: arg_pageToken, + returnPartialSuccess: arg_returnPartialSuccess, + serviceProjectNumber: arg_serviceProjectNumber, + $fields: arg_$fields); + checkCommitmentAggregatedList(response as api.CommitmentAggregatedList); }); - unittest.test('method--setIamPolicy', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionBackendServices; - final arg_request = buildRegionSetPolicyRequest(); + final res = api.ComputeApi(mock).regionCommitments; final arg_project = 'foo'; final arg_region = 'foo'; - final arg_resource = 'foo'; + final arg_commitment = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.RegionSetPolicyRequest.fromJson( - json as core.Map); - checkRegionSetPolicyRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -103460,7 +105352,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/backendServices/', pathOffset); + index = path.indexOf('/commitments/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -103470,24 +105362,16 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 17), - unittest.equals('/backendServices/'), + path.substring(pathOffset, pathOffset + 13), + unittest.equals('/commitments/'), ); - pathOffset += 17; - index = path.indexOf('/setIamPolicy', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; + pathOffset += 13; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; unittest.expect( subPart, - unittest.equals('$arg_resource'), - ); - unittest.expect( - path.substring(pathOffset, pathOffset + 13), - unittest.equals('/setIamPolicy'), + unittest.equals('$arg_commitment'), ); - pathOffset += 13; final query = req.url.query; var queryOffset = 0; @@ -103512,28 +105396,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildPolicy()); + final resp = convert.json.encode(buildCommitment()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setIamPolicy( - arg_request, arg_project, arg_region, arg_resource, + final response = await res.get(arg_project, arg_region, arg_commitment, $fields: arg_$fields); - checkPolicy(response as api.Policy); + checkCommitment(response as api.Commitment); }); - unittest.test('method--setSecurityPolicy', () async { + unittest.test('method--insert', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionBackendServices; - final arg_request = buildSecurityPolicyReference(); + final res = api.ComputeApi(mock).regionCommitments; + final arg_request = buildCommitment(); final arg_project = 'foo'; final arg_region = 'foo'; - final arg_backendService = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.SecurityPolicyReference.fromJson( + final obj = api.Commitment.fromJson( json as core.Map); - checkSecurityPolicyReference(obj); + checkCommitment(obj); final path = req.url.path; var pathOffset = 0; @@ -103568,7 +105450,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/backendServices/', pathOffset); + index = path.indexOf('/commitments', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -103578,24 +105460,10 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 17), - unittest.equals('/backendServices/'), - ); - pathOffset += 17; - index = path.indexOf('/setSecurityPolicy', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; - unittest.expect( - subPart, - unittest.equals('$arg_backendService'), - ); - unittest.expect( - path.substring(pathOffset, pathOffset + 18), - unittest.equals('/setSecurityPolicy'), + path.substring(pathOffset, pathOffset + 12), + unittest.equals('/commitments'), ); - pathOffset += 18; + pathOffset += 12; final query = req.url.query; var queryOffset = 0; @@ -103627,25 +105495,23 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setSecurityPolicy( - arg_request, arg_project, arg_region, arg_backendService, + final response = await res.insert(arg_request, arg_project, arg_region, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--testIamPermissions', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionBackendServices; - final arg_request = buildTestPermissionsRequest(); + final res = api.ComputeApi(mock).regionCommitments; final arg_project = 'foo'; final arg_region = 'foo'; - final arg_resource = 'foo'; + final arg_filter = 'foo'; + final arg_maxResults = 42; + final arg_orderBy = 'foo'; + final arg_pageToken = 'foo'; + final arg_returnPartialSuccess = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.TestPermissionsRequest.fromJson( - json as core.Map); - checkTestPermissionsRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -103679,7 +105545,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/backendServices/', pathOffset); + index = path.indexOf('/commitments', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -103689,24 +105555,10 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 17), - unittest.equals('/backendServices/'), - ); - pathOffset += 17; - index = path.indexOf('/testIamPermissions', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; - unittest.expect( - subPart, - unittest.equals('$arg_resource'), - ); - unittest.expect( - path.substring(pathOffset, pathOffset + 19), - unittest.equals('/testIamPermissions'), + path.substring(pathOffset, pathOffset + 12), + unittest.equals('/commitments'), ); - pathOffset += 19; + pathOffset += 12; final query = req.url.query; var queryOffset = 0; @@ -103723,6 +105575,26 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['maxResults']!.first), + unittest.equals(arg_maxResults), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['returnPartialSuccess']!.first, + unittest.equals('$arg_returnPartialSuccess'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -103731,28 +105603,34 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildTestPermissionsResponse()); + final resp = convert.json.encode(buildCommitmentList()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.testIamPermissions( - arg_request, arg_project, arg_region, arg_resource, + final response = await res.list(arg_project, arg_region, + filter: arg_filter, + maxResults: arg_maxResults, + orderBy: arg_orderBy, + pageToken: arg_pageToken, + returnPartialSuccess: arg_returnPartialSuccess, $fields: arg_$fields); - checkTestPermissionsResponse(response as api.TestPermissionsResponse); + checkCommitmentList(response as api.CommitmentList); }); unittest.test('method--update', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionBackendServices; - final arg_request = buildBackendService(); + final res = api.ComputeApi(mock).regionCommitments; + final arg_request = buildCommitment(); final arg_project = 'foo'; final arg_region = 'foo'; - final arg_backendService = 'foo'; + final arg_commitment = 'foo'; + final arg_paths = buildUnnamed868(); final arg_requestId = 'foo'; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.BackendService.fromJson( + final obj = api.Commitment.fromJson( json as core.Map); - checkBackendService(obj); + checkCommitment(obj); final path = req.url.path; var pathOffset = 0; @@ -103787,7 +105665,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/backendServices/', pathOffset); + index = path.indexOf('/commitments/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -103797,15 +105675,15 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 17), - unittest.equals('/backendServices/'), + path.substring(pathOffset, pathOffset + 13), + unittest.equals('/commitments/'), ); - pathOffset += 17; + pathOffset += 13; subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); pathOffset = path.length; unittest.expect( subPart, - unittest.equals('$arg_backendService'), + unittest.equals('$arg_commitment'), ); final query = req.url.query; @@ -103823,10 +105701,18 @@ void main() { ); } } + unittest.expect( + queryMap['paths']!, + unittest.equals(arg_paths), + ); unittest.expect( queryMap['requestId']!.first, unittest.equals(arg_requestId), ); + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -103839,24 +105725,22 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.update( - arg_request, arg_project, arg_region, arg_backendService, - requestId: arg_requestId, $fields: arg_$fields); + arg_request, arg_project, arg_region, arg_commitment, + paths: arg_paths, + requestId: arg_requestId, + updateMask: arg_updateMask, + $fields: arg_$fields); checkOperation(response as api.Operation); }); }); - unittest.group('resource-RegionCommitmentsResource', () { - unittest.test('method--aggregatedList', () async { + unittest.group('resource-RegionDiskTypesResource', () { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionCommitments; + final res = api.ComputeApi(mock).regionDiskTypes; final arg_project = 'foo'; - final arg_filter = 'foo'; - final arg_includeAllScopes = true; - final arg_maxResults = 42; - final arg_orderBy = 'foo'; - final arg_pageToken = 'foo'; - final arg_returnPartialSuccess = true; - final arg_serviceProjectNumber = 'foo'; + final arg_region = 'foo'; + final arg_diskType = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -103878,7 +105762,7 @@ void main() { unittest.equals('projects/'), ); pathOffset += 9; - index = path.indexOf('/aggregated/commitments', pathOffset); + index = path.indexOf('/regions/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -103888,10 +105772,30 @@ void main() { unittest.equals('$arg_project'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 23), - unittest.equals('/aggregated/commitments'), + path.substring(pathOffset, pathOffset + 9), + unittest.equals('/regions/'), + ); + pathOffset += 9; + index = path.indexOf('/diskTypes/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_region'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('/diskTypes/'), + ); + pathOffset += 11; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; + unittest.expect( + subPart, + unittest.equals('$arg_diskType'), ); - pathOffset += 23; final query = req.url.query; var queryOffset = 0; @@ -103908,34 +105812,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - queryMap['includeAllScopes']!.first, - unittest.equals('$arg_includeAllScopes'), - ); - unittest.expect( - core.int.parse(queryMap['maxResults']!.first), - unittest.equals(arg_maxResults), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['returnPartialSuccess']!.first, - unittest.equals('$arg_returnPartialSuccess'), - ); - unittest.expect( - queryMap['serviceProjectNumber']!.first, - unittest.equals(arg_serviceProjectNumber), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -103944,27 +105820,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildCommitmentAggregatedList()); + final resp = convert.json.encode(buildDiskType()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.aggregatedList(arg_project, - filter: arg_filter, - includeAllScopes: arg_includeAllScopes, - maxResults: arg_maxResults, - orderBy: arg_orderBy, - pageToken: arg_pageToken, - returnPartialSuccess: arg_returnPartialSuccess, - serviceProjectNumber: arg_serviceProjectNumber, + final response = await res.get(arg_project, arg_region, arg_diskType, $fields: arg_$fields); - checkCommitmentAggregatedList(response as api.CommitmentAggregatedList); + checkDiskType(response as api.DiskType); }); - unittest.test('method--get', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionCommitments; + final res = api.ComputeApi(mock).regionDiskTypes; final arg_project = 'foo'; final arg_region = 'foo'; - final arg_commitment = 'foo'; + final arg_filter = 'foo'; + final arg_maxResults = 42; + final arg_orderBy = 'foo'; + final arg_pageToken = 'foo'; + final arg_returnPartialSuccess = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -104000,7 +105873,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/commitments/', pathOffset); + index = path.indexOf('/diskTypes', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -104010,16 +105883,10 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 13), - unittest.equals('/commitments/'), - ); - pathOffset += 13; - subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); - pathOffset = path.length; - unittest.expect( - subPart, - unittest.equals('$arg_commitment'), + path.substring(pathOffset, pathOffset + 10), + unittest.equals('/diskTypes'), ); + pathOffset += 10; final query = req.url.query; var queryOffset = 0; @@ -104036,6 +105903,26 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['maxResults']!.first), + unittest.equals(arg_maxResults), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['returnPartialSuccess']!.first, + unittest.equals('$arg_returnPartialSuccess'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -104044,26 +105931,34 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildCommitment()); + final resp = convert.json.encode(buildRegionDiskTypeList()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_project, arg_region, arg_commitment, + final response = await res.list(arg_project, arg_region, + filter: arg_filter, + maxResults: arg_maxResults, + orderBy: arg_orderBy, + pageToken: arg_pageToken, + returnPartialSuccess: arg_returnPartialSuccess, $fields: arg_$fields); - checkCommitment(response as api.Commitment); + checkRegionDiskTypeList(response as api.RegionDiskTypeList); }); + }); - unittest.test('method--insert', () async { + unittest.group('resource-RegionDisksResource', () { + unittest.test('method--addResourcePolicies', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionCommitments; - final arg_request = buildCommitment(); + final res = api.ComputeApi(mock).regionDisks; + final arg_request = buildRegionDisksAddResourcePoliciesRequest(); final arg_project = 'foo'; final arg_region = 'foo'; + final arg_disk = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.Commitment.fromJson( + final obj = api.RegionDisksAddResourcePoliciesRequest.fromJson( json as core.Map); - checkCommitment(obj); + checkRegionDisksAddResourcePoliciesRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -104098,7 +105993,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/commitments', pathOffset); + index = path.indexOf('/disks/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -104108,10 +106003,24 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 12), - unittest.equals('/commitments'), + path.substring(pathOffset, pathOffset + 7), + unittest.equals('/disks/'), ); - pathOffset += 12; + pathOffset += 7; + index = path.indexOf('/addResourcePolicies', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_disk'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 20), + unittest.equals('/addResourcePolicies'), + ); + pathOffset += 20; final query = req.url.query; var queryOffset = 0; @@ -104143,23 +106052,25 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.insert(arg_request, arg_project, arg_region, + final response = await res.addResourcePolicies( + arg_request, arg_project, arg_region, arg_disk, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--list', () async { + unittest.test('method--bulkInsert', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionCommitments; + final res = api.ComputeApi(mock).regionDisks; + final arg_request = buildBulkInsertDiskResource(); final arg_project = 'foo'; final arg_region = 'foo'; - final arg_filter = 'foo'; - final arg_maxResults = 42; - final arg_orderBy = 'foo'; - final arg_pageToken = 'foo'; - final arg_returnPartialSuccess = true; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.BulkInsertDiskResource.fromJson( + json as core.Map); + checkBulkInsertDiskResource(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -104193,7 +106104,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/commitments', pathOffset); + index = path.indexOf('/disks/bulkInsert', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -104203,10 +106114,10 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 12), - unittest.equals('/commitments'), + path.substring(pathOffset, pathOffset + 17), + unittest.equals('/disks/bulkInsert'), ); - pathOffset += 12; + pathOffset += 17; final query = req.url.query; var queryOffset = 0; @@ -104224,24 +106135,8 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['maxResults']!.first), - unittest.equals(arg_maxResults), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['returnPartialSuccess']!.first, - unittest.equals('$arg_returnPartialSuccess'), + queryMap['requestId']!.first, + unittest.equals(arg_requestId), ); unittest.expect( queryMap['fields']!.first, @@ -104251,34 +106146,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildCommitmentList()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_project, arg_region, - filter: arg_filter, - maxResults: arg_maxResults, - orderBy: arg_orderBy, - pageToken: arg_pageToken, - returnPartialSuccess: arg_returnPartialSuccess, - $fields: arg_$fields); - checkCommitmentList(response as api.CommitmentList); + final response = await res.bulkInsert( + arg_request, arg_project, arg_region, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--update', () async { + unittest.test('method--createSnapshot', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionCommitments; - final arg_request = buildCommitment(); + final res = api.ComputeApi(mock).regionDisks; + final arg_request = buildSnapshot(); final arg_project = 'foo'; final arg_region = 'foo'; - final arg_commitment = 'foo'; - final arg_paths = buildUnnamed859(); + final arg_disk = 'foo'; final arg_requestId = 'foo'; - final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.Commitment.fromJson( - json as core.Map); - checkCommitment(obj); + final obj = + api.Snapshot.fromJson(json as core.Map); + checkSnapshot(obj); final path = req.url.path; var pathOffset = 0; @@ -104313,7 +106202,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/commitments/', pathOffset); + index = path.indexOf('/disks/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -104323,16 +106212,24 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 13), - unittest.equals('/commitments/'), + path.substring(pathOffset, pathOffset + 7), + unittest.equals('/disks/'), ); - pathOffset += 13; - subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); - pathOffset = path.length; + pathOffset += 7; + index = path.indexOf('/createSnapshot', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; unittest.expect( subPart, - unittest.equals('$arg_commitment'), + unittest.equals('$arg_disk'), ); + unittest.expect( + path.substring(pathOffset, pathOffset + 15), + unittest.equals('/createSnapshot'), + ); + pathOffset += 15; final query = req.url.query; var queryOffset = 0; @@ -104349,18 +106246,10 @@ void main() { ); } } - unittest.expect( - queryMap['paths']!, - unittest.equals(arg_paths), - ); unittest.expect( queryMap['requestId']!.first, unittest.equals(arg_requestId), ); - unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -104372,23 +106261,19 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.update( - arg_request, arg_project, arg_region, arg_commitment, - paths: arg_paths, - requestId: arg_requestId, - updateMask: arg_updateMask, - $fields: arg_$fields); + final response = await res.createSnapshot( + arg_request, arg_project, arg_region, arg_disk, + requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - }); - unittest.group('resource-RegionDiskTypesResource', () { - unittest.test('method--get', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionDiskTypes; + final res = api.ComputeApi(mock).regionDisks; final arg_project = 'foo'; final arg_region = 'foo'; - final arg_diskType = 'foo'; + final arg_disk = 'foo'; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -104424,7 +106309,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/diskTypes/', pathOffset); + index = path.indexOf('/disks/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -104434,15 +106319,15 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 11), - unittest.equals('/diskTypes/'), + path.substring(pathOffset, pathOffset + 7), + unittest.equals('/disks/'), ); - pathOffset += 11; + pathOffset += 7; subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); pathOffset = path.length; unittest.expect( subPart, - unittest.equals('$arg_diskType'), + unittest.equals('$arg_disk'), ); final query = req.url.query; @@ -104460,6 +106345,10 @@ void main() { ); } } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -104468,24 +106357,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildDiskType()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_project, arg_region, arg_diskType, - $fields: arg_$fields); - checkDiskType(response as api.DiskType); + final response = await res.delete(arg_project, arg_region, arg_disk, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--list', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionDiskTypes; + final res = api.ComputeApi(mock).regionDisks; final arg_project = 'foo'; final arg_region = 'foo'; - final arg_filter = 'foo'; - final arg_maxResults = 42; - final arg_orderBy = 'foo'; - final arg_pageToken = 'foo'; - final arg_returnPartialSuccess = true; + final arg_disk = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -104521,7 +106406,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/diskTypes', pathOffset); + index = path.indexOf('/disks/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -104531,10 +106416,16 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 10), - unittest.equals('/diskTypes'), + path.substring(pathOffset, pathOffset + 7), + unittest.equals('/disks/'), + ); + pathOffset += 7; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; + unittest.expect( + subPart, + unittest.equals('$arg_disk'), ); - pathOffset += 10; final query = req.url.query; var queryOffset = 0; @@ -104551,26 +106442,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['maxResults']!.first), - unittest.equals(arg_maxResults), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['returnPartialSuccess']!.first, - unittest.equals('$arg_returnPartialSuccess'), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -104579,35 +106450,23 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildRegionDiskTypeList()); + final resp = convert.json.encode(buildDisk()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_project, arg_region, - filter: arg_filter, - maxResults: arg_maxResults, - orderBy: arg_orderBy, - pageToken: arg_pageToken, - returnPartialSuccess: arg_returnPartialSuccess, + final response = await res.get(arg_project, arg_region, arg_disk, $fields: arg_$fields); - checkRegionDiskTypeList(response as api.RegionDiskTypeList); + checkDisk(response as api.Disk); }); - }); - unittest.group('resource-RegionDisksResource', () { - unittest.test('method--addResourcePolicies', () async { + unittest.test('method--getIamPolicy', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionDisks; - final arg_request = buildRegionDisksAddResourcePoliciesRequest(); final arg_project = 'foo'; final arg_region = 'foo'; - final arg_disk = 'foo'; - final arg_requestId = 'foo'; + final arg_resource = 'foo'; + final arg_optionsRequestedPolicyVersion = 42; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.RegionDisksAddResourcePoliciesRequest.fromJson( - json as core.Map); - checkRegionDisksAddResourcePoliciesRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -104655,20 +106514,20 @@ void main() { unittest.equals('/disks/'), ); pathOffset += 7; - index = path.indexOf('/addResourcePolicies', pathOffset); + index = path.indexOf('/getIamPolicy', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); pathOffset = index; unittest.expect( subPart, - unittest.equals('$arg_disk'), + unittest.equals('$arg_resource'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 20), - unittest.equals('/addResourcePolicies'), + path.substring(pathOffset, pathOffset + 13), + unittest.equals('/getIamPolicy'), ); - pathOffset += 20; + pathOffset += 13; final query = req.url.query; var queryOffset = 0; @@ -104686,8 +106545,8 @@ void main() { } } unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), + core.int.parse(queryMap['optionsRequestedPolicyVersion']!.first), + unittest.equals(arg_optionsRequestedPolicyVersion), ); unittest.expect( queryMap['fields']!.first, @@ -104697,27 +106556,29 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildPolicy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.addResourcePolicies( - arg_request, arg_project, arg_region, arg_disk, - requestId: arg_requestId, $fields: arg_$fields); - checkOperation(response as api.Operation); + final response = await res.getIamPolicy( + arg_project, arg_region, arg_resource, + optionsRequestedPolicyVersion: arg_optionsRequestedPolicyVersion, + $fields: arg_$fields); + checkPolicy(response as api.Policy); }); - unittest.test('method--bulkInsert', () async { + unittest.test('method--insert', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionDisks; - final arg_request = buildBulkInsertDiskResource(); + final arg_request = buildDisk(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_requestId = 'foo'; + final arg_sourceImage = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.BulkInsertDiskResource.fromJson( - json as core.Map); - checkBulkInsertDiskResource(obj); + final obj = + api.Disk.fromJson(json as core.Map); + checkDisk(obj); final path = req.url.path; var pathOffset = 0; @@ -104752,7 +106613,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/disks/bulkInsert', pathOffset); + index = path.indexOf('/disks', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -104762,10 +106623,10 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 17), - unittest.equals('/disks/bulkInsert'), + path.substring(pathOffset, pathOffset + 6), + unittest.equals('/disks'), ); - pathOffset += 17; + pathOffset += 6; final query = req.url.query; var queryOffset = 0; @@ -104786,6 +106647,10 @@ void main() { queryMap['requestId']!.first, unittest.equals(arg_requestId), ); + unittest.expect( + queryMap['sourceImage']!.first, + unittest.equals(arg_sourceImage), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -104797,26 +106662,25 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.bulkInsert( - arg_request, arg_project, arg_region, - requestId: arg_requestId, $fields: arg_$fields); + final response = await res.insert(arg_request, arg_project, arg_region, + requestId: arg_requestId, + sourceImage: arg_sourceImage, + $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--createSnapshot', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionDisks; - final arg_request = buildSnapshot(); final arg_project = 'foo'; final arg_region = 'foo'; - final arg_disk = 'foo'; - final arg_requestId = 'foo'; + final arg_filter = 'foo'; + final arg_maxResults = 42; + final arg_orderBy = 'foo'; + final arg_pageToken = 'foo'; + final arg_returnPartialSuccess = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.Snapshot.fromJson(json as core.Map); - checkSnapshot(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -104850,7 +106714,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/disks/', pathOffset); + index = path.indexOf('/disks', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -104860,24 +106724,10 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 7), - unittest.equals('/disks/'), - ); - pathOffset += 7; - index = path.indexOf('/createSnapshot', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; - unittest.expect( - subPart, - unittest.equals('$arg_disk'), - ); - unittest.expect( - path.substring(pathOffset, pathOffset + 15), - unittest.equals('/createSnapshot'), + path.substring(pathOffset, pathOffset + 6), + unittest.equals('/disks'), ); - pathOffset += 15; + pathOffset += 6; final query = req.url.query; var queryOffset = 0; @@ -104895,8 +106745,24 @@ void main() { } } unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['maxResults']!.first), + unittest.equals(arg_maxResults), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['returnPartialSuccess']!.first, + unittest.equals('$arg_returnPartialSuccess'), ); unittest.expect( queryMap['fields']!.first, @@ -104906,24 +106772,33 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildDiskList()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.createSnapshot( - arg_request, arg_project, arg_region, arg_disk, - requestId: arg_requestId, $fields: arg_$fields); - checkOperation(response as api.Operation); + final response = await res.list(arg_project, arg_region, + filter: arg_filter, + maxResults: arg_maxResults, + orderBy: arg_orderBy, + pageToken: arg_pageToken, + returnPartialSuccess: arg_returnPartialSuccess, + $fields: arg_$fields); + checkDiskList(response as api.DiskList); }); - unittest.test('method--delete', () async { + unittest.test('method--removeResourcePolicies', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionDisks; + final arg_request = buildRegionDisksRemoveResourcePoliciesRequest(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_disk = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.RegionDisksRemoveResourcePoliciesRequest.fromJson( + json as core.Map); + checkRegionDisksRemoveResourcePoliciesRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -104971,12 +106846,20 @@ void main() { unittest.equals('/disks/'), ); pathOffset += 7; - subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); - pathOffset = path.length; + index = path.indexOf('/removeResourcePolicies', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; unittest.expect( subPart, unittest.equals('$arg_disk'), ); + unittest.expect( + path.substring(pathOffset, pathOffset + 23), + unittest.equals('/removeResourcePolicies'), + ); + pathOffset += 23; final query = req.url.query; var queryOffset = 0; @@ -105008,19 +106891,26 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_project, arg_region, arg_disk, + final response = await res.removeResourcePolicies( + arg_request, arg_project, arg_region, arg_disk, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--get', () async { + unittest.test('method--resize', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionDisks; + final arg_request = buildRegionDisksResizeRequest(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_disk = 'foo'; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.RegionDisksResizeRequest.fromJson( + json as core.Map); + checkRegionDisksResizeRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -105068,12 +106958,20 @@ void main() { unittest.equals('/disks/'), ); pathOffset += 7; - subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); - pathOffset = path.length; + index = path.indexOf('/resize', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; unittest.expect( subPart, unittest.equals('$arg_disk'), ); + unittest.expect( + path.substring(pathOffset, pathOffset + 7), + unittest.equals('/resize'), + ); + pathOffset += 7; final query = req.url.query; var queryOffset = 0; @@ -105090,6 +106988,10 @@ void main() { ); } } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -105098,23 +107000,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildDisk()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_project, arg_region, arg_disk, - $fields: arg_$fields); - checkDisk(response as api.Disk); + final response = await res.resize( + arg_request, arg_project, arg_region, arg_disk, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--getIamPolicy', () async { + unittest.test('method--setIamPolicy', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionDisks; + final arg_request = buildRegionSetPolicyRequest(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_resource = 'foo'; - final arg_optionsRequestedPolicyVersion = 42; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.RegionSetPolicyRequest.fromJson( + json as core.Map); + checkRegionSetPolicyRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -105162,7 +107069,7 @@ void main() { unittest.equals('/disks/'), ); pathOffset += 7; - index = path.indexOf('/getIamPolicy', pathOffset); + index = path.indexOf('/setIamPolicy', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -105173,7 +107080,7 @@ void main() { ); unittest.expect( path.substring(pathOffset, pathOffset + 13), - unittest.equals('/getIamPolicy'), + unittest.equals('/setIamPolicy'), ); pathOffset += 13; @@ -105192,10 +107099,6 @@ void main() { ); } } - unittest.expect( - core.int.parse(queryMap['optionsRequestedPolicyVersion']!.first), - unittest.equals(arg_optionsRequestedPolicyVersion), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -105207,26 +107110,25 @@ void main() { final resp = convert.json.encode(buildPolicy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.getIamPolicy( - arg_project, arg_region, arg_resource, - optionsRequestedPolicyVersion: arg_optionsRequestedPolicyVersion, + final response = await res.setIamPolicy( + arg_request, arg_project, arg_region, arg_resource, $fields: arg_$fields); checkPolicy(response as api.Policy); }); - unittest.test('method--insert', () async { + unittest.test('method--setLabels', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionDisks; - final arg_request = buildDisk(); + final arg_request = buildRegionSetLabelsRequest(); final arg_project = 'foo'; final arg_region = 'foo'; + final arg_resource = 'foo'; final arg_requestId = 'foo'; - final arg_sourceImage = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.Disk.fromJson(json as core.Map); - checkDisk(obj); + final obj = api.RegionSetLabelsRequest.fromJson( + json as core.Map); + checkRegionSetLabelsRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -105261,7 +107163,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/disks', pathOffset); + index = path.indexOf('/disks/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -105271,111 +107173,24 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 6), - unittest.equals('/disks'), - ); - pathOffset += 6; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), - ); - unittest.expect( - queryMap['sourceImage']!.first, - unittest.equals(arg_sourceImage), - ); - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode(buildOperation()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.insert(arg_request, arg_project, arg_region, - requestId: arg_requestId, - sourceImage: arg_sourceImage, - $fields: arg_$fields); - checkOperation(response as api.Operation); - }); - - unittest.test('method--list', () async { - final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionDisks; - final arg_project = 'foo'; - final arg_region = 'foo'; - final arg_filter = 'foo'; - final arg_maxResults = 42; - final arg_orderBy = 'foo'; - final arg_pageToken = 'foo'; - final arg_returnPartialSuccess = true; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 11), - unittest.equals('compute/v1/'), - ); - pathOffset += 11; - unittest.expect( - path.substring(pathOffset, pathOffset + 9), - unittest.equals('projects/'), - ); - pathOffset += 9; - index = path.indexOf('/regions/', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; - unittest.expect( - subPart, - unittest.equals('$arg_project'), - ); - unittest.expect( - path.substring(pathOffset, pathOffset + 9), - unittest.equals('/regions/'), + path.substring(pathOffset, pathOffset + 7), + unittest.equals('/disks/'), ); - pathOffset += 9; - index = path.indexOf('/disks', pathOffset); + pathOffset += 7; + index = path.indexOf('/setLabels', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); pathOffset = index; unittest.expect( subPart, - unittest.equals('$arg_region'), + unittest.equals('$arg_resource'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 6), - unittest.equals('/disks'), + path.substring(pathOffset, pathOffset + 10), + unittest.equals('/setLabels'), ); - pathOffset += 6; + pathOffset += 10; final query = req.url.query; var queryOffset = 0; @@ -105393,24 +107208,8 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['maxResults']!.first), - unittest.equals(arg_maxResults), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['returnPartialSuccess']!.first, - unittest.equals('$arg_returnPartialSuccess'), + queryMap['requestId']!.first, + unittest.equals(arg_requestId), ); unittest.expect( queryMap['fields']!.first, @@ -105420,32 +107219,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildDiskList()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_project, arg_region, - filter: arg_filter, - maxResults: arg_maxResults, - orderBy: arg_orderBy, - pageToken: arg_pageToken, - returnPartialSuccess: arg_returnPartialSuccess, - $fields: arg_$fields); - checkDiskList(response as api.DiskList); + final response = await res.setLabels( + arg_request, arg_project, arg_region, arg_resource, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--removeResourcePolicies', () async { + unittest.test('method--startAsyncReplication', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionDisks; - final arg_request = buildRegionDisksRemoveResourcePoliciesRequest(); + final arg_request = buildRegionDisksStartAsyncReplicationRequest(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_disk = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.RegionDisksRemoveResourcePoliciesRequest.fromJson( + final obj = api.RegionDisksStartAsyncReplicationRequest.fromJson( json as core.Map); - checkRegionDisksRemoveResourcePoliciesRequest(obj); + checkRegionDisksStartAsyncReplicationRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -105494,7 +107289,7 @@ void main() { unittest.equals('/disks/'), ); pathOffset += 7; - index = path.indexOf('/removeResourcePolicies', pathOffset); + index = path.indexOf('/startAsyncReplication', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -105504,10 +107299,10 @@ void main() { unittest.equals('$arg_disk'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 23), - unittest.equals('/removeResourcePolicies'), + path.substring(pathOffset, pathOffset + 22), + unittest.equals('/startAsyncReplication'), ); - pathOffset += 23; + pathOffset += 22; final query = req.url.query; var queryOffset = 0; @@ -105539,26 +107334,21 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.removeResourcePolicies( + final response = await res.startAsyncReplication( arg_request, arg_project, arg_region, arg_disk, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--resize', () async { + unittest.test('method--stopAsyncReplication', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionDisks; - final arg_request = buildRegionDisksResizeRequest(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_disk = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.RegionDisksResizeRequest.fromJson( - json as core.Map); - checkRegionDisksResizeRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -105606,7 +107396,7 @@ void main() { unittest.equals('/disks/'), ); pathOffset += 7; - index = path.indexOf('/resize', pathOffset); + index = path.indexOf('/stopAsyncReplication', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -105616,10 +107406,10 @@ void main() { unittest.equals('$arg_disk'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 7), - unittest.equals('/resize'), + path.substring(pathOffset, pathOffset + 21), + unittest.equals('/stopAsyncReplication'), ); - pathOffset += 7; + pathOffset += 21; final query = req.url.query; var queryOffset = 0; @@ -105651,24 +107441,24 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.resize( - arg_request, arg_project, arg_region, arg_disk, + final response = await res.stopAsyncReplication( + arg_project, arg_region, arg_disk, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--setIamPolicy', () async { + unittest.test('method--stopGroupAsyncReplication', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionDisks; - final arg_request = buildRegionSetPolicyRequest(); + final arg_request = buildDisksStopGroupAsyncReplicationResource(); final arg_project = 'foo'; final arg_region = 'foo'; - final arg_resource = 'foo'; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.RegionSetPolicyRequest.fromJson( + final obj = api.DisksStopGroupAsyncReplicationResource.fromJson( json as core.Map); - checkRegionSetPolicyRequest(obj); + checkDisksStopGroupAsyncReplicationResource(obj); final path = req.url.path; var pathOffset = 0; @@ -105703,7 +107493,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/disks/', pathOffset); + index = path.indexOf('/disks/stopGroupAsyncReplication', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -105713,24 +107503,10 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 7), - unittest.equals('/disks/'), - ); - pathOffset += 7; - index = path.indexOf('/setIamPolicy', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; - unittest.expect( - subPart, - unittest.equals('$arg_resource'), - ); - unittest.expect( - path.substring(pathOffset, pathOffset + 13), - unittest.equals('/setIamPolicy'), + path.substring(pathOffset, pathOffset + 32), + unittest.equals('/disks/stopGroupAsyncReplication'), ); - pathOffset += 13; + pathOffset += 32; final query = req.url.query; var queryOffset = 0; @@ -105747,6 +107523,10 @@ void main() { ); } } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -105755,28 +107535,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildPolicy()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setIamPolicy( - arg_request, arg_project, arg_region, arg_resource, - $fields: arg_$fields); - checkPolicy(response as api.Policy); + final response = await res.stopGroupAsyncReplication( + arg_request, arg_project, arg_region, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--setLabels', () async { + unittest.test('method--testIamPermissions', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionDisks; - final arg_request = buildRegionSetLabelsRequest(); + final arg_request = buildTestPermissionsRequest(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_resource = 'foo'; - final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.RegionSetLabelsRequest.fromJson( + final obj = api.TestPermissionsRequest.fromJson( json as core.Map); - checkRegionSetLabelsRequest(obj); + checkTestPermissionsRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -105825,7 +107604,7 @@ void main() { unittest.equals('/disks/'), ); pathOffset += 7; - index = path.indexOf('/setLabels', pathOffset); + index = path.indexOf('/testIamPermissions', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -105835,10 +107614,10 @@ void main() { unittest.equals('$arg_resource'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 10), - unittest.equals('/setLabels'), + path.substring(pathOffset, pathOffset + 19), + unittest.equals('/testIamPermissions'), ); - pathOffset += 10; + pathOffset += 19; final query = req.url.query; var queryOffset = 0; @@ -105855,10 +107634,6 @@ void main() { ); } } - unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -105867,28 +107642,30 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildTestPermissionsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setLabels( + final response = await res.testIamPermissions( arg_request, arg_project, arg_region, arg_resource, - requestId: arg_requestId, $fields: arg_$fields); - checkOperation(response as api.Operation); + $fields: arg_$fields); + checkTestPermissionsResponse(response as api.TestPermissionsResponse); }); - unittest.test('method--startAsyncReplication', () async { + unittest.test('method--update', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionDisks; - final arg_request = buildRegionDisksStartAsyncReplicationRequest(); + final arg_request = buildDisk(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_disk = 'foo'; + final arg_paths = buildUnnamed869(); final arg_requestId = 'foo'; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.RegionDisksStartAsyncReplicationRequest.fromJson( - json as core.Map); - checkRegionDisksStartAsyncReplicationRequest(obj); + final obj = + api.Disk.fromJson(json as core.Map); + checkDisk(obj); final path = req.url.path; var pathOffset = 0; @@ -105937,20 +107714,12 @@ void main() { unittest.equals('/disks/'), ); pathOffset += 7; - index = path.indexOf('/startAsyncReplication', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; unittest.expect( subPart, unittest.equals('$arg_disk'), ); - unittest.expect( - path.substring(pathOffset, pathOffset + 22), - unittest.equals('/startAsyncReplication'), - ); - pathOffset += 22; final query = req.url.query; var queryOffset = 0; @@ -105967,10 +107736,18 @@ void main() { ); } } + unittest.expect( + queryMap['paths']!, + unittest.equals(arg_paths), + ); unittest.expect( queryMap['requestId']!.first, unittest.equals(arg_requestId), ); + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -105982,18 +107759,23 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.startAsyncReplication( + final response = await res.update( arg_request, arg_project, arg_region, arg_disk, - requestId: arg_requestId, $fields: arg_$fields); + paths: arg_paths, + requestId: arg_requestId, + updateMask: arg_updateMask, + $fields: arg_$fields); checkOperation(response as api.Operation); }); + }); - unittest.test('method--stopAsyncReplication', () async { + unittest.group('resource-RegionHealthCheckServicesResource', () { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionDisks; + final res = api.ComputeApi(mock).regionHealthCheckServices; final arg_project = 'foo'; final arg_region = 'foo'; - final arg_disk = 'foo'; + final arg_healthCheckService = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -106030,7 +107812,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/disks/', pathOffset); + index = path.indexOf('/healthCheckServices/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -106040,24 +107822,114 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 7), - unittest.equals('/disks/'), + path.substring(pathOffset, pathOffset + 21), + unittest.equals('/healthCheckServices/'), ); - pathOffset += 7; - index = path.indexOf('/stopAsyncReplication', pathOffset); + pathOffset += 21; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; + unittest.expect( + subPart, + unittest.equals('$arg_healthCheckService'), + ); + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete( + arg_project, arg_region, arg_healthCheckService, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.ComputeApi(mock).regionHealthCheckServices; + final arg_project = 'foo'; + final arg_region = 'foo'; + final arg_healthCheckService = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('compute/v1/'), + ); + pathOffset += 11; + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('projects/'), + ); + pathOffset += 9; + index = path.indexOf('/regions/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); pathOffset = index; unittest.expect( subPart, - unittest.equals('$arg_disk'), + unittest.equals('$arg_project'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('/regions/'), + ); + pathOffset += 9; + index = path.indexOf('/healthCheckServices/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_region'), ); unittest.expect( path.substring(pathOffset, pathOffset + 21), - unittest.equals('/stopAsyncReplication'), + unittest.equals('/healthCheckServices/'), ); pathOffset += 21; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; + unittest.expect( + subPart, + unittest.equals('$arg_healthCheckService'), + ); final query = req.url.query; var queryOffset = 0; @@ -106074,10 +107946,6 @@ void main() { ); } } - unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -106086,27 +107954,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildHealthCheckService()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.stopAsyncReplication( - arg_project, arg_region, arg_disk, - requestId: arg_requestId, $fields: arg_$fields); - checkOperation(response as api.Operation); + final response = await res.get( + arg_project, arg_region, arg_healthCheckService, + $fields: arg_$fields); + checkHealthCheckService(response as api.HealthCheckService); }); - unittest.test('method--stopGroupAsyncReplication', () async { + unittest.test('method--insert', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionDisks; - final arg_request = buildDisksStopGroupAsyncReplicationResource(); + final res = api.ComputeApi(mock).regionHealthCheckServices; + final arg_request = buildHealthCheckService(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.DisksStopGroupAsyncReplicationResource.fromJson( + final obj = api.HealthCheckService.fromJson( json as core.Map); - checkDisksStopGroupAsyncReplicationResource(obj); + checkHealthCheckService(obj); final path = req.url.path; var pathOffset = 0; @@ -106141,7 +108009,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/disks/stopGroupAsyncReplication', pathOffset); + index = path.indexOf('/healthCheckServices', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -106151,10 +108019,10 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 32), - unittest.equals('/disks/stopGroupAsyncReplication'), + path.substring(pathOffset, pathOffset + 20), + unittest.equals('/healthCheckServices'), ); - pathOffset += 32; + pathOffset += 20; final query = req.url.query; var queryOffset = 0; @@ -106186,25 +108054,23 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.stopGroupAsyncReplication( - arg_request, arg_project, arg_region, + final response = await res.insert(arg_request, arg_project, arg_region, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--testIamPermissions', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionDisks; - final arg_request = buildTestPermissionsRequest(); + final res = api.ComputeApi(mock).regionHealthCheckServices; final arg_project = 'foo'; final arg_region = 'foo'; - final arg_resource = 'foo'; + final arg_filter = 'foo'; + final arg_maxResults = 42; + final arg_orderBy = 'foo'; + final arg_pageToken = 'foo'; + final arg_returnPartialSuccess = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.TestPermissionsRequest.fromJson( - json as core.Map); - checkTestPermissionsRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -106238,7 +108104,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/disks/', pathOffset); + index = path.indexOf('/healthCheckServices', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -106248,24 +108114,10 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 7), - unittest.equals('/disks/'), - ); - pathOffset += 7; - index = path.indexOf('/testIamPermissions', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; - unittest.expect( - subPart, - unittest.equals('$arg_resource'), - ); - unittest.expect( - path.substring(pathOffset, pathOffset + 19), - unittest.equals('/testIamPermissions'), + path.substring(pathOffset, pathOffset + 20), + unittest.equals('/healthCheckServices'), ); - pathOffset += 19; + pathOffset += 20; final query = req.url.query; var queryOffset = 0; @@ -106282,6 +108134,26 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['maxResults']!.first), + unittest.equals(arg_maxResults), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['returnPartialSuccess']!.first, + unittest.equals('$arg_returnPartialSuccess'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -106290,30 +108162,32 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildTestPermissionsResponse()); + final resp = convert.json.encode(buildHealthCheckServicesList()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.testIamPermissions( - arg_request, arg_project, arg_region, arg_resource, + final response = await res.list(arg_project, arg_region, + filter: arg_filter, + maxResults: arg_maxResults, + orderBy: arg_orderBy, + pageToken: arg_pageToken, + returnPartialSuccess: arg_returnPartialSuccess, $fields: arg_$fields); - checkTestPermissionsResponse(response as api.TestPermissionsResponse); + checkHealthCheckServicesList(response as api.HealthCheckServicesList); }); - unittest.test('method--update', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionDisks; - final arg_request = buildDisk(); + final res = api.ComputeApi(mock).regionHealthCheckServices; + final arg_request = buildHealthCheckService(); final arg_project = 'foo'; final arg_region = 'foo'; - final arg_disk = 'foo'; - final arg_paths = buildUnnamed860(); + final arg_healthCheckService = 'foo'; final arg_requestId = 'foo'; - final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.Disk.fromJson(json as core.Map); - checkDisk(obj); + final obj = api.HealthCheckService.fromJson( + json as core.Map); + checkHealthCheckService(obj); final path = req.url.path; var pathOffset = 0; @@ -106348,7 +108222,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/disks/', pathOffset); + index = path.indexOf('/healthCheckServices/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -106358,15 +108232,15 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 7), - unittest.equals('/disks/'), + path.substring(pathOffset, pathOffset + 21), + unittest.equals('/healthCheckServices/'), ); - pathOffset += 7; + pathOffset += 21; subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); pathOffset = path.length; unittest.expect( subPart, - unittest.equals('$arg_disk'), + unittest.equals('$arg_healthCheckService'), ); final query = req.url.query; @@ -106384,18 +108258,10 @@ void main() { ); } } - unittest.expect( - queryMap['paths']!, - unittest.equals(arg_paths), - ); unittest.expect( queryMap['requestId']!.first, unittest.equals(arg_requestId), ); - unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -106407,23 +108273,20 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.update( - arg_request, arg_project, arg_region, arg_disk, - paths: arg_paths, - requestId: arg_requestId, - updateMask: arg_updateMask, - $fields: arg_$fields); + final response = await res.patch( + arg_request, arg_project, arg_region, arg_healthCheckService, + requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); }); - unittest.group('resource-RegionHealthCheckServicesResource', () { + unittest.group('resource-RegionHealthChecksResource', () { unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionHealthCheckServices; + final res = api.ComputeApi(mock).regionHealthChecks; final arg_project = 'foo'; final arg_region = 'foo'; - final arg_healthCheckService = 'foo'; + final arg_healthCheck = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -106460,7 +108323,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/healthCheckServices/', pathOffset); + index = path.indexOf('/healthChecks/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -106470,15 +108333,15 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 21), - unittest.equals('/healthCheckServices/'), + path.substring(pathOffset, pathOffset + 14), + unittest.equals('/healthChecks/'), ); - pathOffset += 21; + pathOffset += 14; subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); pathOffset = path.length; unittest.expect( subPart, - unittest.equals('$arg_healthCheckService'), + unittest.equals('$arg_healthCheck'), ); final query = req.url.query; @@ -106512,17 +108375,17 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.delete( - arg_project, arg_region, arg_healthCheckService, + arg_project, arg_region, arg_healthCheck, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionHealthCheckServices; + final res = api.ComputeApi(mock).regionHealthChecks; final arg_project = 'foo'; final arg_region = 'foo'; - final arg_healthCheckService = 'foo'; + final arg_healthCheck = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -106558,7 +108421,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/healthCheckServices/', pathOffset); + index = path.indexOf('/healthChecks/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -106568,15 +108431,15 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 21), - unittest.equals('/healthCheckServices/'), + path.substring(pathOffset, pathOffset + 14), + unittest.equals('/healthChecks/'), ); - pathOffset += 21; + pathOffset += 14; subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); pathOffset = path.length; unittest.expect( subPart, - unittest.equals('$arg_healthCheckService'), + unittest.equals('$arg_healthCheck'), ); final query = req.url.query; @@ -106602,27 +108465,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildHealthCheckService()); + final resp = convert.json.encode(buildHealthCheck()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get( - arg_project, arg_region, arg_healthCheckService, + final response = await res.get(arg_project, arg_region, arg_healthCheck, $fields: arg_$fields); - checkHealthCheckService(response as api.HealthCheckService); + checkHealthCheck(response as api.HealthCheck); }); unittest.test('method--insert', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionHealthCheckServices; - final arg_request = buildHealthCheckService(); + final res = api.ComputeApi(mock).regionHealthChecks; + final arg_request = buildHealthCheck(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.HealthCheckService.fromJson( + final obj = api.HealthCheck.fromJson( json as core.Map); - checkHealthCheckService(obj); + checkHealthCheck(obj); final path = req.url.path; var pathOffset = 0; @@ -106657,7 +108519,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/healthCheckServices', pathOffset); + index = path.indexOf('/healthChecks', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -106667,10 +108529,10 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 20), - unittest.equals('/healthCheckServices'), + path.substring(pathOffset, pathOffset + 13), + unittest.equals('/healthChecks'), ); - pathOffset += 20; + pathOffset += 13; final query = req.url.query; var queryOffset = 0; @@ -106709,7 +108571,7 @@ void main() { unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionHealthCheckServices; + final res = api.ComputeApi(mock).regionHealthChecks; final arg_project = 'foo'; final arg_region = 'foo'; final arg_filter = 'foo'; @@ -106752,7 +108614,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/healthCheckServices', pathOffset); + index = path.indexOf('/healthChecks', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -106762,10 +108624,10 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 20), - unittest.equals('/healthCheckServices'), + path.substring(pathOffset, pathOffset + 13), + unittest.equals('/healthChecks'), ); - pathOffset += 20; + pathOffset += 13; final query = req.url.query; var queryOffset = 0; @@ -106810,7 +108672,7 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildHealthCheckServicesList()); + final resp = convert.json.encode(buildHealthCheckList()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.list(arg_project, arg_region, @@ -106820,22 +108682,22 @@ void main() { pageToken: arg_pageToken, returnPartialSuccess: arg_returnPartialSuccess, $fields: arg_$fields); - checkHealthCheckServicesList(response as api.HealthCheckServicesList); + checkHealthCheckList(response as api.HealthCheckList); }); unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionHealthCheckServices; - final arg_request = buildHealthCheckService(); + final res = api.ComputeApi(mock).regionHealthChecks; + final arg_request = buildHealthCheck(); final arg_project = 'foo'; final arg_region = 'foo'; - final arg_healthCheckService = 'foo'; + final arg_healthCheck = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.HealthCheckService.fromJson( + final obj = api.HealthCheck.fromJson( json as core.Map); - checkHealthCheckService(obj); + checkHealthCheck(obj); final path = req.url.path; var pathOffset = 0; @@ -106870,7 +108732,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/healthCheckServices/', pathOffset); + index = path.indexOf('/healthChecks/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -106880,15 +108742,15 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 21), - unittest.equals('/healthCheckServices/'), + path.substring(pathOffset, pathOffset + 14), + unittest.equals('/healthChecks/'), ); - pathOffset += 21; + pathOffset += 14; subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); pathOffset = path.length; unittest.expect( subPart, - unittest.equals('$arg_healthCheckService'), + unittest.equals('$arg_healthCheck'), ); final query = req.url.query; @@ -106922,22 +108784,25 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.patch( - arg_request, arg_project, arg_region, arg_healthCheckService, + arg_request, arg_project, arg_region, arg_healthCheck, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - }); - unittest.group('resource-RegionHealthChecksResource', () { - unittest.test('method--delete', () async { + unittest.test('method--update', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionHealthChecks; + final arg_request = buildHealthCheck(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_healthCheck = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.HealthCheck.fromJson( + json as core.Map); + checkHealthCheck(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -107022,20 +108887,30 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete( - arg_project, arg_region, arg_healthCheck, + final response = await res.update( + arg_request, arg_project, arg_region, arg_healthCheck, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); + }); - unittest.test('method--get', () async { + unittest.group('resource-RegionInstanceGroupManagersResource', () { + unittest.test('method--abandonInstances', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionHealthChecks; + final res = api.ComputeApi(mock).regionInstanceGroupManagers; + final arg_request = + buildRegionInstanceGroupManagersAbandonInstancesRequest(); final arg_project = 'foo'; final arg_region = 'foo'; - final arg_healthCheck = 'foo'; + final arg_instanceGroupManager = 'foo'; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.RegionInstanceGroupManagersAbandonInstancesRequest.fromJson( + json as core.Map); + checkRegionInstanceGroupManagersAbandonInstancesRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -107069,7 +108944,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/healthChecks/', pathOffset); + index = path.indexOf('/instanceGroupManagers/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -107079,16 +108954,24 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 14), - unittest.equals('/healthChecks/'), + path.substring(pathOffset, pathOffset + 23), + unittest.equals('/instanceGroupManagers/'), ); - pathOffset += 14; - subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); - pathOffset = path.length; + pathOffset += 23; + index = path.indexOf('/abandonInstances', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; unittest.expect( subPart, - unittest.equals('$arg_healthCheck'), + unittest.equals('$arg_instanceGroupManager'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 17), + unittest.equals('/abandonInstances'), ); + pathOffset += 17; final query = req.url.query; var queryOffset = 0; @@ -107105,6 +108988,10 @@ void main() { ); } } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -107113,26 +109000,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildHealthCheck()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_project, arg_region, arg_healthCheck, - $fields: arg_$fields); - checkHealthCheck(response as api.HealthCheck); + final response = await res.abandonInstances( + arg_request, arg_project, arg_region, arg_instanceGroupManager, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--insert', () async { + unittest.test('method--applyUpdatesToInstances', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionHealthChecks; - final arg_request = buildHealthCheck(); + final res = api.ComputeApi(mock).regionInstanceGroupManagers; + final arg_request = buildRegionInstanceGroupManagersApplyUpdatesRequest(); final arg_project = 'foo'; final arg_region = 'foo'; - final arg_requestId = 'foo'; + final arg_instanceGroupManager = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.HealthCheck.fromJson( + final obj = api.RegionInstanceGroupManagersApplyUpdatesRequest.fromJson( json as core.Map); - checkHealthCheck(obj); + checkRegionInstanceGroupManagersApplyUpdatesRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -107167,7 +109055,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/healthChecks', pathOffset); + index = path.indexOf('/instanceGroupManagers/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -107177,10 +109065,24 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 13), - unittest.equals('/healthChecks'), + path.substring(pathOffset, pathOffset + 23), + unittest.equals('/instanceGroupManagers/'), ); - pathOffset += 13; + pathOffset += 23; + index = path.indexOf('/applyUpdatesToInstances', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_instanceGroupManager'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 24), + unittest.equals('/applyUpdatesToInstances'), + ); + pathOffset += 24; final query = req.url.query; var queryOffset = 0; @@ -107197,10 +109099,6 @@ void main() { ); } } - unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -107212,23 +109110,28 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.insert(arg_request, arg_project, arg_region, - requestId: arg_requestId, $fields: arg_$fields); + final response = await res.applyUpdatesToInstances( + arg_request, arg_project, arg_region, arg_instanceGroupManager, + $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--list', () async { + unittest.test('method--createInstances', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionHealthChecks; + final res = api.ComputeApi(mock).regionInstanceGroupManagers; + final arg_request = + buildRegionInstanceGroupManagersCreateInstancesRequest(); final arg_project = 'foo'; final arg_region = 'foo'; - final arg_filter = 'foo'; - final arg_maxResults = 42; - final arg_orderBy = 'foo'; - final arg_pageToken = 'foo'; - final arg_returnPartialSuccess = true; + final arg_instanceGroupManager = 'foo'; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.RegionInstanceGroupManagersCreateInstancesRequest.fromJson( + json as core.Map); + checkRegionInstanceGroupManagersCreateInstancesRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -107262,7 +109165,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/healthChecks', pathOffset); + index = path.indexOf('/instanceGroupManagers/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -107272,10 +109175,24 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 13), - unittest.equals('/healthChecks'), + path.substring(pathOffset, pathOffset + 23), + unittest.equals('/instanceGroupManagers/'), ); - pathOffset += 13; + pathOffset += 23; + index = path.indexOf('/createInstances', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_instanceGroupManager'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 16), + unittest.equals('/createInstances'), + ); + pathOffset += 16; final query = req.url.query; var queryOffset = 0; @@ -107293,24 +109210,8 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['maxResults']!.first), - unittest.equals(arg_maxResults), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['returnPartialSuccess']!.first, - unittest.equals('$arg_returnPartialSuccess'), + queryMap['requestId']!.first, + unittest.equals(arg_requestId), ); unittest.expect( queryMap['fields']!.first, @@ -107320,33 +109221,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildHealthCheckList()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_project, arg_region, - filter: arg_filter, - maxResults: arg_maxResults, - orderBy: arg_orderBy, - pageToken: arg_pageToken, - returnPartialSuccess: arg_returnPartialSuccess, - $fields: arg_$fields); - checkHealthCheckList(response as api.HealthCheckList); + final response = await res.createInstances( + arg_request, arg_project, arg_region, arg_instanceGroupManager, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--patch', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionHealthChecks; - final arg_request = buildHealthCheck(); + final res = api.ComputeApi(mock).regionInstanceGroupManagers; final arg_project = 'foo'; final arg_region = 'foo'; - final arg_healthCheck = 'foo'; + final arg_instanceGroupManager = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.HealthCheck.fromJson( - json as core.Map); - checkHealthCheck(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -107380,7 +109272,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/healthChecks/', pathOffset); + index = path.indexOf('/instanceGroupManagers/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -107390,15 +109282,15 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 14), - unittest.equals('/healthChecks/'), + path.substring(pathOffset, pathOffset + 23), + unittest.equals('/instanceGroupManagers/'), ); - pathOffset += 14; + pathOffset += 23; subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); pathOffset = path.length; unittest.expect( subPart, - unittest.equals('$arg_healthCheck'), + unittest.equals('$arg_instanceGroupManager'), ); final query = req.url.query; @@ -107431,25 +109323,27 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch( - arg_request, arg_project, arg_region, arg_healthCheck, + final response = await res.delete( + arg_project, arg_region, arg_instanceGroupManager, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--update', () async { + unittest.test('method--deleteInstances', () async { final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionHealthChecks; - final arg_request = buildHealthCheck(); + final res = api.ComputeApi(mock).regionInstanceGroupManagers; + final arg_request = + buildRegionInstanceGroupManagersDeleteInstancesRequest(); final arg_project = 'foo'; final arg_region = 'foo'; - final arg_healthCheck = 'foo'; + final arg_instanceGroupManager = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.HealthCheck.fromJson( - json as core.Map); - checkHealthCheck(obj); + final obj = + api.RegionInstanceGroupManagersDeleteInstancesRequest.fromJson( + json as core.Map); + checkRegionInstanceGroupManagersDeleteInstancesRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -107484,7 +109378,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/healthChecks/', pathOffset); + index = path.indexOf('/instanceGroupManagers/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -107494,16 +109388,24 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 14), - unittest.equals('/healthChecks/'), + path.substring(pathOffset, pathOffset + 23), + unittest.equals('/instanceGroupManagers/'), ); - pathOffset += 14; - subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); - pathOffset = path.length; + pathOffset += 23; + index = path.indexOf('/deleteInstances', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; unittest.expect( subPart, - unittest.equals('$arg_healthCheck'), + unittest.equals('$arg_instanceGroupManager'), ); + unittest.expect( + path.substring(pathOffset, pathOffset + 16), + unittest.equals('/deleteInstances'), + ); + pathOffset += 16; final query = req.url.query; var queryOffset = 0; @@ -107535,29 +109437,26 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.update( - arg_request, arg_project, arg_region, arg_healthCheck, + final response = await res.deleteInstances( + arg_request, arg_project, arg_region, arg_instanceGroupManager, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - }); - unittest.group('resource-RegionInstanceGroupManagersResource', () { - unittest.test('method--abandonInstances', () async { + unittest.test('method--deletePerInstanceConfigs', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionInstanceGroupManagers; final arg_request = - buildRegionInstanceGroupManagersAbandonInstancesRequest(); + buildRegionInstanceGroupManagerDeleteInstanceConfigReq(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_instanceGroupManager = 'foo'; - final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final obj = - api.RegionInstanceGroupManagersAbandonInstancesRequest.fromJson( + api.RegionInstanceGroupManagerDeleteInstanceConfigReq.fromJson( json as core.Map); - checkRegionInstanceGroupManagersAbandonInstancesRequest(obj); + checkRegionInstanceGroupManagerDeleteInstanceConfigReq(obj); final path = req.url.path; var pathOffset = 0; @@ -107606,7 +109505,7 @@ void main() { unittest.equals('/instanceGroupManagers/'), ); pathOffset += 23; - index = path.indexOf('/abandonInstances', pathOffset); + index = path.indexOf('/deletePerInstanceConfigs', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -107616,10 +109515,10 @@ void main() { unittest.equals('$arg_instanceGroupManager'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 17), - unittest.equals('/abandonInstances'), + path.substring(pathOffset, pathOffset + 25), + unittest.equals('/deletePerInstanceConfigs'), ); - pathOffset += 17; + pathOffset += 25; final query = req.url.query; var queryOffset = 0; @@ -107636,10 +109535,6 @@ void main() { ); } } - unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -107651,25 +109546,20 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.abandonInstances( + final response = await res.deletePerInstanceConfigs( arg_request, arg_project, arg_region, arg_instanceGroupManager, - requestId: arg_requestId, $fields: arg_$fields); + $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--applyUpdatesToInstances', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionInstanceGroupManagers; - final arg_request = buildRegionInstanceGroupManagersApplyUpdatesRequest(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_instanceGroupManager = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.RegionInstanceGroupManagersApplyUpdatesRequest.fromJson( - json as core.Map); - checkRegionInstanceGroupManagersApplyUpdatesRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -107717,20 +109607,12 @@ void main() { unittest.equals('/instanceGroupManagers/'), ); pathOffset += 23; - index = path.indexOf('/applyUpdatesToInstances', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; unittest.expect( subPart, unittest.equals('$arg_instanceGroupManager'), ); - unittest.expect( - path.substring(pathOffset, pathOffset + 24), - unittest.equals('/applyUpdatesToInstances'), - ); - pathOffset += 24; final query = req.url.query; var queryOffset = 0; @@ -107755,30 +109637,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildInstanceGroupManager()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.applyUpdatesToInstances( - arg_request, arg_project, arg_region, arg_instanceGroupManager, + final response = await res.get( + arg_project, arg_region, arg_instanceGroupManager, $fields: arg_$fields); - checkOperation(response as api.Operation); + checkInstanceGroupManager(response as api.InstanceGroupManager); }); - unittest.test('method--createInstances', () async { + unittest.test('method--insert', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionInstanceGroupManagers; - final arg_request = - buildRegionInstanceGroupManagersCreateInstancesRequest(); + final arg_request = buildInstanceGroupManager(); final arg_project = 'foo'; final arg_region = 'foo'; - final arg_instanceGroupManager = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.RegionInstanceGroupManagersCreateInstancesRequest.fromJson( - json as core.Map); - checkRegionInstanceGroupManagersCreateInstancesRequest(obj); + final obj = api.InstanceGroupManager.fromJson( + json as core.Map); + checkInstanceGroupManager(obj); final path = req.url.path; var pathOffset = 0; @@ -107813,7 +109692,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/instanceGroupManagers/', pathOffset); + index = path.indexOf('/instanceGroupManagers', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -107823,24 +109702,10 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 23), - unittest.equals('/instanceGroupManagers/'), - ); - pathOffset += 23; - index = path.indexOf('/createInstances', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; - unittest.expect( - subPart, - unittest.equals('$arg_instanceGroupManager'), - ); - unittest.expect( - path.substring(pathOffset, pathOffset + 16), - unittest.equals('/createInstances'), + path.substring(pathOffset, pathOffset + 22), + unittest.equals('/instanceGroupManagers'), ); - pathOffset += 16; + pathOffset += 22; final query = req.url.query; var queryOffset = 0; @@ -107872,19 +109737,21 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.createInstances( - arg_request, arg_project, arg_region, arg_instanceGroupManager, + final response = await res.insert(arg_request, arg_project, arg_region, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--delete', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionInstanceGroupManagers; final arg_project = 'foo'; final arg_region = 'foo'; - final arg_instanceGroupManager = 'foo'; - final arg_requestId = 'foo'; + final arg_filter = 'foo'; + final arg_maxResults = 42; + final arg_orderBy = 'foo'; + final arg_pageToken = 'foo'; + final arg_returnPartialSuccess = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -107920,7 +109787,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/instanceGroupManagers/', pathOffset); + index = path.indexOf('/instanceGroupManagers', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -107930,16 +109797,10 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 23), - unittest.equals('/instanceGroupManagers/'), - ); - pathOffset += 23; - subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); - pathOffset = path.length; - unittest.expect( - subPart, - unittest.equals('$arg_instanceGroupManager'), + path.substring(pathOffset, pathOffset + 22), + unittest.equals('/instanceGroupManagers'), ); + pathOffset += 22; final query = req.url.query; var queryOffset = 0; @@ -107957,8 +109818,24 @@ void main() { } } unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['maxResults']!.first), + unittest.equals(arg_maxResults), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['returnPartialSuccess']!.first, + unittest.equals('$arg_returnPartialSuccess'), ); unittest.expect( queryMap['fields']!.first, @@ -107968,31 +109845,33 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildRegionInstanceGroupManagerList()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete( - arg_project, arg_region, arg_instanceGroupManager, - requestId: arg_requestId, $fields: arg_$fields); - checkOperation(response as api.Operation); + final response = await res.list(arg_project, arg_region, + filter: arg_filter, + maxResults: arg_maxResults, + orderBy: arg_orderBy, + pageToken: arg_pageToken, + returnPartialSuccess: arg_returnPartialSuccess, + $fields: arg_$fields); + checkRegionInstanceGroupManagerList( + response as api.RegionInstanceGroupManagerList); }); - unittest.test('method--deleteInstances', () async { + unittest.test('method--listErrors', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionInstanceGroupManagers; - final arg_request = - buildRegionInstanceGroupManagersDeleteInstancesRequest(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_instanceGroupManager = 'foo'; - final arg_requestId = 'foo'; + final arg_filter = 'foo'; + final arg_maxResults = 42; + final arg_orderBy = 'foo'; + final arg_pageToken = 'foo'; + final arg_returnPartialSuccess = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.RegionInstanceGroupManagersDeleteInstancesRequest.fromJson( - json as core.Map); - checkRegionInstanceGroupManagersDeleteInstancesRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -108040,7 +109919,7 @@ void main() { unittest.equals('/instanceGroupManagers/'), ); pathOffset += 23; - index = path.indexOf('/deleteInstances', pathOffset); + index = path.indexOf('/listErrors', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -108050,10 +109929,10 @@ void main() { unittest.equals('$arg_instanceGroupManager'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 16), - unittest.equals('/deleteInstances'), + path.substring(pathOffset, pathOffset + 11), + unittest.equals('/listErrors'), ); - pathOffset += 16; + pathOffset += 11; final query = req.url.query; var queryOffset = 0; @@ -108071,8 +109950,24 @@ void main() { } } unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['maxResults']!.first), + unittest.equals(arg_maxResults), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['returnPartialSuccess']!.first, + unittest.equals('$arg_returnPartialSuccess'), ); unittest.expect( queryMap['fields']!.first, @@ -108082,30 +109977,35 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json + .encode(buildRegionInstanceGroupManagersListErrorsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.deleteInstances( - arg_request, arg_project, arg_region, arg_instanceGroupManager, - requestId: arg_requestId, $fields: arg_$fields); - checkOperation(response as api.Operation); + final response = await res.listErrors( + arg_project, arg_region, arg_instanceGroupManager, + filter: arg_filter, + maxResults: arg_maxResults, + orderBy: arg_orderBy, + pageToken: arg_pageToken, + returnPartialSuccess: arg_returnPartialSuccess, + $fields: arg_$fields); + checkRegionInstanceGroupManagersListErrorsResponse( + response as api.RegionInstanceGroupManagersListErrorsResponse); }); - unittest.test('method--deletePerInstanceConfigs', () async { + unittest.test('method--listManagedInstances', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionInstanceGroupManagers; - final arg_request = - buildRegionInstanceGroupManagerDeleteInstanceConfigReq(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_instanceGroupManager = 'foo'; + final arg_filter = 'foo'; + final arg_maxResults = 42; + final arg_orderBy = 'foo'; + final arg_pageToken = 'foo'; + final arg_returnPartialSuccess = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.RegionInstanceGroupManagerDeleteInstanceConfigReq.fromJson( - json as core.Map); - checkRegionInstanceGroupManagerDeleteInstanceConfigReq(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -108153,7 +110053,7 @@ void main() { unittest.equals('/instanceGroupManagers/'), ); pathOffset += 23; - index = path.indexOf('/deletePerInstanceConfigs', pathOffset); + index = path.indexOf('/listManagedInstances', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -108163,10 +110063,10 @@ void main() { unittest.equals('$arg_instanceGroupManager'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 25), - unittest.equals('/deletePerInstanceConfigs'), + path.substring(pathOffset, pathOffset + 21), + unittest.equals('/listManagedInstances'), ); - pathOffset += 25; + pathOffset += 21; final query = req.url.query; var queryOffset = 0; @@ -108183,6 +110083,26 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['maxResults']!.first), + unittest.equals(arg_maxResults), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['returnPartialSuccess']!.first, + unittest.equals('$arg_returnPartialSuccess'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -108191,21 +110111,33 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json + .encode(buildRegionInstanceGroupManagersListInstancesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.deletePerInstanceConfigs( - arg_request, arg_project, arg_region, arg_instanceGroupManager, + final response = await res.listManagedInstances( + arg_project, arg_region, arg_instanceGroupManager, + filter: arg_filter, + maxResults: arg_maxResults, + orderBy: arg_orderBy, + pageToken: arg_pageToken, + returnPartialSuccess: arg_returnPartialSuccess, $fields: arg_$fields); - checkOperation(response as api.Operation); + checkRegionInstanceGroupManagersListInstancesResponse( + response as api.RegionInstanceGroupManagersListInstancesResponse); }); - unittest.test('method--get', () async { + unittest.test('method--listPerInstanceConfigs', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionInstanceGroupManagers; final arg_project = 'foo'; final arg_region = 'foo'; final arg_instanceGroupManager = 'foo'; + final arg_filter = 'foo'; + final arg_maxResults = 42; + final arg_orderBy = 'foo'; + final arg_pageToken = 'foo'; + final arg_returnPartialSuccess = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -108255,12 +110187,20 @@ void main() { unittest.equals('/instanceGroupManagers/'), ); pathOffset += 23; - subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); - pathOffset = path.length; + index = path.indexOf('/listPerInstanceConfigs', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; unittest.expect( subPart, unittest.equals('$arg_instanceGroupManager'), ); + unittest.expect( + path.substring(pathOffset, pathOffset + 23), + unittest.equals('/listPerInstanceConfigs'), + ); + pathOffset += 23; final query = req.url.query; var queryOffset = 0; @@ -108277,6 +110217,26 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['maxResults']!.first), + unittest.equals(arg_maxResults), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['returnPartialSuccess']!.first, + unittest.equals('$arg_returnPartialSuccess'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -108285,21 +110245,29 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildInstanceGroupManager()); + final resp = convert.json + .encode(buildRegionInstanceGroupManagersListInstanceConfigsResp()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get( + final response = await res.listPerInstanceConfigs( arg_project, arg_region, arg_instanceGroupManager, + filter: arg_filter, + maxResults: arg_maxResults, + orderBy: arg_orderBy, + pageToken: arg_pageToken, + returnPartialSuccess: arg_returnPartialSuccess, $fields: arg_$fields); - checkInstanceGroupManager(response as api.InstanceGroupManager); + checkRegionInstanceGroupManagersListInstanceConfigsResp( + response as api.RegionInstanceGroupManagersListInstanceConfigsResp); }); - unittest.test('method--insert', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionInstanceGroupManagers; final arg_request = buildInstanceGroupManager(); final arg_project = 'foo'; final arg_region = 'foo'; + final arg_instanceGroupManager = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -108340,7 +110308,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/instanceGroupManagers', pathOffset); + index = path.indexOf('/instanceGroupManagers/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -108350,10 +110318,16 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 22), - unittest.equals('/instanceGroupManagers'), + path.substring(pathOffset, pathOffset + 23), + unittest.equals('/instanceGroupManagers/'), + ); + pathOffset += 23; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; + unittest.expect( + subPart, + unittest.equals('$arg_instanceGroupManager'), ); - pathOffset += 22; final query = req.url.query; var queryOffset = 0; @@ -108385,23 +110359,28 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.insert(arg_request, arg_project, arg_region, + final response = await res.patch( + arg_request, arg_project, arg_region, arg_instanceGroupManager, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--list', () async { + unittest.test('method--patchPerInstanceConfigs', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionInstanceGroupManagers; + final arg_request = + buildRegionInstanceGroupManagerPatchInstanceConfigReq(); final arg_project = 'foo'; final arg_region = 'foo'; - final arg_filter = 'foo'; - final arg_maxResults = 42; - final arg_orderBy = 'foo'; - final arg_pageToken = 'foo'; - final arg_returnPartialSuccess = true; + final arg_instanceGroupManager = 'foo'; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.RegionInstanceGroupManagerPatchInstanceConfigReq.fromJson( + json as core.Map); + checkRegionInstanceGroupManagerPatchInstanceConfigReq(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -108435,7 +110414,7 @@ void main() { unittest.equals('/regions/'), ); pathOffset += 9; - index = path.indexOf('/instanceGroupManagers', pathOffset); + index = path.indexOf('/instanceGroupManagers/', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -108445,10 +110424,24 @@ void main() { unittest.equals('$arg_region'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 22), - unittest.equals('/instanceGroupManagers'), + path.substring(pathOffset, pathOffset + 23), + unittest.equals('/instanceGroupManagers/'), ); - pathOffset += 22; + pathOffset += 23; + index = path.indexOf('/patchPerInstanceConfigs', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_instanceGroupManager'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 24), + unittest.equals('/patchPerInstanceConfigs'), + ); + pathOffset += 24; final query = req.url.query; var queryOffset = 0; @@ -108466,24 +110459,8 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['maxResults']!.first), - unittest.equals(arg_maxResults), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['returnPartialSuccess']!.first, - unittest.equals('$arg_returnPartialSuccess'), + queryMap['requestId']!.first, + unittest.equals(arg_requestId), ); unittest.expect( queryMap['fields']!.first, @@ -108493,33 +110470,29 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildRegionInstanceGroupManagerList()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_project, arg_region, - filter: arg_filter, - maxResults: arg_maxResults, - orderBy: arg_orderBy, - pageToken: arg_pageToken, - returnPartialSuccess: arg_returnPartialSuccess, - $fields: arg_$fields); - checkRegionInstanceGroupManagerList( - response as api.RegionInstanceGroupManagerList); + final response = await res.patchPerInstanceConfigs( + arg_request, arg_project, arg_region, arg_instanceGroupManager, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--listErrors', () async { + unittest.test('method--recreateInstances', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionInstanceGroupManagers; + final arg_request = buildRegionInstanceGroupManagersRecreateRequest(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_instanceGroupManager = 'foo'; - final arg_filter = 'foo'; - final arg_maxResults = 42; - final arg_orderBy = 'foo'; - final arg_pageToken = 'foo'; - final arg_returnPartialSuccess = true; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.RegionInstanceGroupManagersRecreateRequest.fromJson( + json as core.Map); + checkRegionInstanceGroupManagersRecreateRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -108567,7 +110540,7 @@ void main() { unittest.equals('/instanceGroupManagers/'), ); pathOffset += 23; - index = path.indexOf('/listErrors', pathOffset); + index = path.indexOf('/recreateInstances', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -108577,10 +110550,10 @@ void main() { unittest.equals('$arg_instanceGroupManager'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 11), - unittest.equals('/listErrors'), + path.substring(pathOffset, pathOffset + 18), + unittest.equals('/recreateInstances'), ); - pathOffset += 11; + pathOffset += 18; final query = req.url.query; var queryOffset = 0; @@ -108598,24 +110571,8 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['maxResults']!.first), - unittest.equals(arg_maxResults), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['returnPartialSuccess']!.first, - unittest.equals('$arg_returnPartialSuccess'), + queryMap['requestId']!.first, + unittest.equals(arg_requestId), ); unittest.expect( queryMap['fields']!.first, @@ -108625,33 +110582,23 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildRegionInstanceGroupManagersListErrorsResponse()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.listErrors( - arg_project, arg_region, arg_instanceGroupManager, - filter: arg_filter, - maxResults: arg_maxResults, - orderBy: arg_orderBy, - pageToken: arg_pageToken, - returnPartialSuccess: arg_returnPartialSuccess, - $fields: arg_$fields); - checkRegionInstanceGroupManagersListErrorsResponse( - response as api.RegionInstanceGroupManagersListErrorsResponse); + final response = await res.recreateInstances( + arg_request, arg_project, arg_region, arg_instanceGroupManager, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--listManagedInstances', () async { + unittest.test('method--resize', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionInstanceGroupManagers; final arg_project = 'foo'; final arg_region = 'foo'; final arg_instanceGroupManager = 'foo'; - final arg_filter = 'foo'; - final arg_maxResults = 42; - final arg_orderBy = 'foo'; - final arg_pageToken = 'foo'; - final arg_returnPartialSuccess = true; + final arg_size = 42; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -108701,7 +110648,7 @@ void main() { unittest.equals('/instanceGroupManagers/'), ); pathOffset += 23; - index = path.indexOf('/listManagedInstances', pathOffset); + index = path.indexOf('/resize', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -108711,10 +110658,10 @@ void main() { unittest.equals('$arg_instanceGroupManager'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 21), - unittest.equals('/listManagedInstances'), + path.substring(pathOffset, pathOffset + 7), + unittest.equals('/resize'), ); - pathOffset += 21; + pathOffset += 7; final query = req.url.query; var queryOffset = 0; @@ -108732,24 +110679,12 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['maxResults']!.first), - unittest.equals(arg_maxResults), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), + core.int.parse(queryMap['size']!.first), + unittest.equals(arg_size), ); unittest.expect( - queryMap['returnPartialSuccess']!.first, - unittest.equals('$arg_returnPartialSuccess'), + queryMap['requestId']!.first, + unittest.equals(arg_requestId), ); unittest.expect( queryMap['fields']!.first, @@ -108759,35 +110694,31 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildRegionInstanceGroupManagersListInstancesResponse()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.listManagedInstances( - arg_project, arg_region, arg_instanceGroupManager, - filter: arg_filter, - maxResults: arg_maxResults, - orderBy: arg_orderBy, - pageToken: arg_pageToken, - returnPartialSuccess: arg_returnPartialSuccess, - $fields: arg_$fields); - checkRegionInstanceGroupManagersListInstancesResponse( - response as api.RegionInstanceGroupManagersListInstancesResponse); + final response = await res.resize( + arg_project, arg_region, arg_instanceGroupManager, arg_size, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--listPerInstanceConfigs', () async { + unittest.test('method--resumeInstances', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionInstanceGroupManagers; + final arg_request = + buildRegionInstanceGroupManagersResumeInstancesRequest(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_instanceGroupManager = 'foo'; - final arg_filter = 'foo'; - final arg_maxResults = 42; - final arg_orderBy = 'foo'; - final arg_pageToken = 'foo'; - final arg_returnPartialSuccess = true; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.RegionInstanceGroupManagersResumeInstancesRequest.fromJson( + json as core.Map); + checkRegionInstanceGroupManagersResumeInstancesRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -108835,7 +110766,7 @@ void main() { unittest.equals('/instanceGroupManagers/'), ); pathOffset += 23; - index = path.indexOf('/listPerInstanceConfigs', pathOffset); + index = path.indexOf('/resumeInstances', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -108845,10 +110776,10 @@ void main() { unittest.equals('$arg_instanceGroupManager'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 23), - unittest.equals('/listPerInstanceConfigs'), + path.substring(pathOffset, pathOffset + 16), + unittest.equals('/resumeInstances'), ); - pathOffset += 23; + pathOffset += 16; final query = req.url.query; var queryOffset = 0; @@ -108866,24 +110797,8 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['maxResults']!.first), - unittest.equals(arg_maxResults), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['returnPartialSuccess']!.first, - unittest.equals('$arg_returnPartialSuccess'), + queryMap['requestId']!.first, + unittest.equals(arg_requestId), ); unittest.expect( queryMap['fields']!.first, @@ -108893,35 +110808,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json - .encode(buildRegionInstanceGroupManagersListInstanceConfigsResp()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.listPerInstanceConfigs( - arg_project, arg_region, arg_instanceGroupManager, - filter: arg_filter, - maxResults: arg_maxResults, - orderBy: arg_orderBy, - pageToken: arg_pageToken, - returnPartialSuccess: arg_returnPartialSuccess, - $fields: arg_$fields); - checkRegionInstanceGroupManagersListInstanceConfigsResp( - response as api.RegionInstanceGroupManagersListInstanceConfigsResp); + final response = await res.resumeInstances( + arg_request, arg_project, arg_region, arg_instanceGroupManager, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--patch', () async { + unittest.test('method--setInstanceTemplate', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionInstanceGroupManagers; - final arg_request = buildInstanceGroupManager(); + final arg_request = buildRegionInstanceGroupManagersSetTemplateRequest(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_instanceGroupManager = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.InstanceGroupManager.fromJson( + final obj = api.RegionInstanceGroupManagersSetTemplateRequest.fromJson( json as core.Map); - checkInstanceGroupManager(obj); + checkRegionInstanceGroupManagersSetTemplateRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -108970,12 +110878,20 @@ void main() { unittest.equals('/instanceGroupManagers/'), ); pathOffset += 23; - subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); - pathOffset = path.length; + index = path.indexOf('/setInstanceTemplate', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; unittest.expect( subPart, unittest.equals('$arg_instanceGroupManager'), ); + unittest.expect( + path.substring(pathOffset, pathOffset + 20), + unittest.equals('/setInstanceTemplate'), + ); + pathOffset += 20; final query = req.url.query; var queryOffset = 0; @@ -109007,17 +110923,17 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch( + final response = await res.setInstanceTemplate( arg_request, arg_project, arg_region, arg_instanceGroupManager, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--patchPerInstanceConfigs', () async { + unittest.test('method--setTargetPools', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionInstanceGroupManagers; final arg_request = - buildRegionInstanceGroupManagerPatchInstanceConfigReq(); + buildRegionInstanceGroupManagersSetTargetPoolsRequest(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_instanceGroupManager = 'foo'; @@ -109025,9 +110941,9 @@ void main() { final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final obj = - api.RegionInstanceGroupManagerPatchInstanceConfigReq.fromJson( + api.RegionInstanceGroupManagersSetTargetPoolsRequest.fromJson( json as core.Map); - checkRegionInstanceGroupManagerPatchInstanceConfigReq(obj); + checkRegionInstanceGroupManagersSetTargetPoolsRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -109076,7 +110992,7 @@ void main() { unittest.equals('/instanceGroupManagers/'), ); pathOffset += 23; - index = path.indexOf('/patchPerInstanceConfigs', pathOffset); + index = path.indexOf('/setTargetPools', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -109086,10 +111002,10 @@ void main() { unittest.equals('$arg_instanceGroupManager'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 24), - unittest.equals('/patchPerInstanceConfigs'), + path.substring(pathOffset, pathOffset + 15), + unittest.equals('/setTargetPools'), ); - pathOffset += 24; + pathOffset += 15; final query = req.url.query; var queryOffset = 0; @@ -109121,25 +111037,27 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patchPerInstanceConfigs( + final response = await res.setTargetPools( arg_request, arg_project, arg_region, arg_instanceGroupManager, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--recreateInstances', () async { + unittest.test('method--startInstances', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionInstanceGroupManagers; - final arg_request = buildRegionInstanceGroupManagersRecreateRequest(); + final arg_request = + buildRegionInstanceGroupManagersStartInstancesRequest(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_instanceGroupManager = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.RegionInstanceGroupManagersRecreateRequest.fromJson( - json as core.Map); - checkRegionInstanceGroupManagersRecreateRequest(obj); + final obj = + api.RegionInstanceGroupManagersStartInstancesRequest.fromJson( + json as core.Map); + checkRegionInstanceGroupManagersStartInstancesRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -109188,7 +111106,7 @@ void main() { unittest.equals('/instanceGroupManagers/'), ); pathOffset += 23; - index = path.indexOf('/recreateInstances', pathOffset); + index = path.indexOf('/startInstances', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -109198,10 +111116,10 @@ void main() { unittest.equals('$arg_instanceGroupManager'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 18), - unittest.equals('/recreateInstances'), + path.substring(pathOffset, pathOffset + 15), + unittest.equals('/startInstances'), ); - pathOffset += 18; + pathOffset += 15; final query = req.url.query; var queryOffset = 0; @@ -109233,137 +111151,27 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.recreateInstances( + final response = await res.startInstances( arg_request, arg_project, arg_region, arg_instanceGroupManager, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--resize', () async { + unittest.test('method--stopInstances', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionInstanceGroupManagers; - final arg_project = 'foo'; - final arg_region = 'foo'; - final arg_instanceGroupManager = 'foo'; - final arg_size = 42; - final arg_requestId = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 11), - unittest.equals('compute/v1/'), - ); - pathOffset += 11; - unittest.expect( - path.substring(pathOffset, pathOffset + 9), - unittest.equals('projects/'), - ); - pathOffset += 9; - index = path.indexOf('/regions/', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; - unittest.expect( - subPart, - unittest.equals('$arg_project'), - ); - unittest.expect( - path.substring(pathOffset, pathOffset + 9), - unittest.equals('/regions/'), - ); - pathOffset += 9; - index = path.indexOf('/instanceGroupManagers/', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; - unittest.expect( - subPart, - unittest.equals('$arg_region'), - ); - unittest.expect( - path.substring(pathOffset, pathOffset + 23), - unittest.equals('/instanceGroupManagers/'), - ); - pathOffset += 23; - index = path.indexOf('/resize', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; - unittest.expect( - subPart, - unittest.equals('$arg_instanceGroupManager'), - ); - unittest.expect( - path.substring(pathOffset, pathOffset + 7), - unittest.equals('/resize'), - ); - pathOffset += 7; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - core.int.parse(queryMap['size']!.first), - unittest.equals(arg_size), - ); - unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), - ); - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode(buildOperation()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.resize( - arg_project, arg_region, arg_instanceGroupManager, arg_size, - requestId: arg_requestId, $fields: arg_$fields); - checkOperation(response as api.Operation); - }); - - unittest.test('method--setInstanceTemplate', () async { - final mock = HttpServerMock(); - final res = api.ComputeApi(mock).regionInstanceGroupManagers; - final arg_request = buildRegionInstanceGroupManagersSetTemplateRequest(); + final arg_request = + buildRegionInstanceGroupManagersStopInstancesRequest(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_instanceGroupManager = 'foo'; final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.RegionInstanceGroupManagersSetTemplateRequest.fromJson( - json as core.Map); - checkRegionInstanceGroupManagersSetTemplateRequest(obj); + final obj = + api.RegionInstanceGroupManagersStopInstancesRequest.fromJson( + json as core.Map); + checkRegionInstanceGroupManagersStopInstancesRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -109412,7 +111220,7 @@ void main() { unittest.equals('/instanceGroupManagers/'), ); pathOffset += 23; - index = path.indexOf('/setInstanceTemplate', pathOffset); + index = path.indexOf('/stopInstances', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -109422,10 +111230,10 @@ void main() { unittest.equals('$arg_instanceGroupManager'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 20), - unittest.equals('/setInstanceTemplate'), + path.substring(pathOffset, pathOffset + 14), + unittest.equals('/stopInstances'), ); - pathOffset += 20; + pathOffset += 14; final query = req.url.query; var queryOffset = 0; @@ -109457,17 +111265,17 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setInstanceTemplate( + final response = await res.stopInstances( arg_request, arg_project, arg_region, arg_instanceGroupManager, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--setTargetPools', () async { + unittest.test('method--suspendInstances', () async { final mock = HttpServerMock(); final res = api.ComputeApi(mock).regionInstanceGroupManagers; final arg_request = - buildRegionInstanceGroupManagersSetTargetPoolsRequest(); + buildRegionInstanceGroupManagersSuspendInstancesRequest(); final arg_project = 'foo'; final arg_region = 'foo'; final arg_instanceGroupManager = 'foo'; @@ -109475,9 +111283,9 @@ void main() { final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final obj = - api.RegionInstanceGroupManagersSetTargetPoolsRequest.fromJson( + api.RegionInstanceGroupManagersSuspendInstancesRequest.fromJson( json as core.Map); - checkRegionInstanceGroupManagersSetTargetPoolsRequest(obj); + checkRegionInstanceGroupManagersSuspendInstancesRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -109526,7 +111334,7 @@ void main() { unittest.equals('/instanceGroupManagers/'), ); pathOffset += 23; - index = path.indexOf('/setTargetPools', pathOffset); + index = path.indexOf('/suspendInstances', pathOffset); unittest.expect(index >= 0, unittest.isTrue); subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); @@ -109536,10 +111344,10 @@ void main() { unittest.equals('$arg_instanceGroupManager'), ); unittest.expect( - path.substring(pathOffset, pathOffset + 15), - unittest.equals('/setTargetPools'), + path.substring(pathOffset, pathOffset + 17), + unittest.equals('/suspendInstances'), ); - pathOffset += 15; + pathOffset += 17; final query = req.url.query; var queryOffset = 0; @@ -109571,7 +111379,7 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setTargetPools( + final response = await res.suspendInstances( arg_request, arg_project, arg_region, arg_instanceGroupManager, requestId: arg_requestId, $fields: arg_$fields); checkOperation(response as api.Operation); @@ -115900,6 +117708,118 @@ void main() { priority: arg_priority, $fields: arg_$fields); checkOperation(response as api.Operation); }); + + unittest.test('method--setLabels', () async { + final mock = HttpServerMock(); + final res = api.ComputeApi(mock).regionSecurityPolicies; + final arg_request = buildRegionSetLabelsRequest(); + final arg_project = 'foo'; + final arg_region = 'foo'; + final arg_resource = 'foo'; + final arg_requestId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.RegionSetLabelsRequest.fromJson( + json as core.Map); + checkRegionSetLabelsRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('compute/v1/'), + ); + pathOffset += 11; + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('projects/'), + ); + pathOffset += 9; + index = path.indexOf('/regions/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_project'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('/regions/'), + ); + pathOffset += 9; + index = path.indexOf('/securityPolicies/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_region'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 18), + unittest.equals('/securityPolicies/'), + ); + pathOffset += 18; + index = path.indexOf('/setLabels', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_resource'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 10), + unittest.equals('/setLabels'), + ); + pathOffset += 10; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.setLabels( + arg_request, arg_project, arg_region, arg_resource, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); }); unittest.group('resource-RegionSslCertificatesResource', () { @@ -120576,7 +122496,7 @@ void main() { final arg_project = 'foo'; final arg_zone = 'foo'; final arg_reservation = 'foo'; - final arg_paths = buildUnnamed861(); + final arg_paths = buildUnnamed870(); final arg_requestId = 'foo'; final arg_updateMask = 'foo'; final arg_$fields = 'foo'; diff --git a/generated/googleapis/test/connectors/v1_test.dart b/generated/googleapis/test/connectors/v1_test.dart index 9294ab289..d206af679 100644 --- a/generated/googleapis/test/connectors/v1_test.dart +++ b/generated/googleapis/test/connectors/v1_test.dart @@ -949,6 +949,7 @@ api.Connector buildConnector() { o.externalUri = 'foo'; o.labels = buildUnnamed14(); o.launchStage = 'foo'; + o.marketplaceConnectorDetails = buildMarketplaceConnectorDetails(); o.name = 'foo'; o.tags = buildUnnamed15(); o.updateTime = 'foo'; @@ -995,6 +996,7 @@ void checkConnector(api.Connector o) { o.launchStage!, unittest.equals('foo'), ); + checkMarketplaceConnectorDetails(o.marketplaceConnectorDetails!); unittest.expect( o.name!, unittest.equals('foo'), @@ -1381,12 +1383,29 @@ void checkUnnamed25(core.List o) { ); } -core.Map buildUnnamed26() => { +core.List buildUnnamed26() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed26(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.Map buildUnnamed27() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed26(core.Map o) { +void checkUnnamed27(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1398,6 +1417,23 @@ void checkUnnamed26(core.Map o) { ); } +core.List buildUnnamed28() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed28(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + core.int buildCounterCustomConnector = 0; api.CustomConnector buildCustomConnector() { final o = api.CustomConnector(); @@ -1405,13 +1441,15 @@ api.CustomConnector buildCustomConnector() { if (buildCounterCustomConnector < 3) { o.activeConnectorVersions = buildUnnamed24(); o.allConnectorVersions = buildUnnamed25(); + o.allMarketplaceVersions = buildUnnamed26(); o.createTime = 'foo'; o.customConnectorType = 'foo'; o.description = 'foo'; o.displayName = 'foo'; - o.labels = buildUnnamed26(); + o.labels = buildUnnamed27(); o.logo = 'foo'; o.name = 'foo'; + o.publishedMarketplaceVersions = buildUnnamed28(); o.updateTime = 'foo'; } buildCounterCustomConnector--; @@ -1423,6 +1461,7 @@ void checkCustomConnector(api.CustomConnector o) { if (buildCounterCustomConnector < 3) { checkUnnamed24(o.activeConnectorVersions!); checkUnnamed25(o.allConnectorVersions!); + checkUnnamed26(o.allMarketplaceVersions!); unittest.expect( o.createTime!, unittest.equals('foo'), @@ -1439,7 +1478,7 @@ void checkCustomConnector(api.CustomConnector o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed26(o.labels!); + checkUnnamed27(o.labels!); unittest.expect( o.logo!, unittest.equals('foo'), @@ -1448,6 +1487,7 @@ void checkCustomConnector(api.CustomConnector o) { o.name!, unittest.equals('foo'), ); + checkUnnamed28(o.publishedMarketplaceVersions!); unittest.expect( o.updateTime!, unittest.equals('foo'), @@ -1456,34 +1496,34 @@ void checkCustomConnector(api.CustomConnector o) { buildCounterCustomConnector--; } -core.List buildUnnamed27() => [ +core.List buildUnnamed29() => [ buildConfigVariableTemplate(), buildConfigVariableTemplate(), ]; -void checkUnnamed27(core.List o) { +void checkUnnamed29(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkConfigVariableTemplate(o[0]); checkConfigVariableTemplate(o[1]); } -core.List buildUnnamed28() => [ +core.List buildUnnamed30() => [ buildDestinationConfig(), buildDestinationConfig(), ]; -void checkUnnamed28(core.List o) { +void checkUnnamed30(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDestinationConfig(o[0]); checkDestinationConfig(o[1]); } -core.Map buildUnnamed29() => { +core.Map buildUnnamed31() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed29(core.Map o) { +void checkUnnamed31(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1495,12 +1535,12 @@ void checkUnnamed29(core.Map o) { ); } -core.List buildUnnamed30() => [ +core.List buildUnnamed32() => [ 'foo', 'foo', ]; -void checkUnnamed30(core.List o) { +void checkUnnamed32(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1518,15 +1558,17 @@ api.CustomConnectorVersion buildCustomConnectorVersion() { buildCounterCustomConnectorVersion++; if (buildCounterCustomConnectorVersion < 3) { o.authConfig = buildAuthConfig(); - o.backendVariableTemplates = buildUnnamed27(); + o.backendVariableTemplates = buildUnnamed29(); o.createTime = 'foo'; - o.destinationConfigs = buildUnnamed28(); + o.destinationConfigs = buildUnnamed30(); o.enableBackendDestinationConfig = true; - o.labels = buildUnnamed29(); + o.labels = buildUnnamed31(); o.name = 'foo'; + o.partnerMetadata = buildPartnerMetadata(); + o.publishStatus = buildPublishStatus(); o.serviceAccount = 'foo'; o.specLocation = 'foo'; - o.specServerUrls = buildUnnamed30(); + o.specServerUrls = buildUnnamed32(); o.state = 'foo'; o.updateTime = 'foo'; } @@ -1538,18 +1580,20 @@ void checkCustomConnectorVersion(api.CustomConnectorVersion o) { buildCounterCustomConnectorVersion++; if (buildCounterCustomConnectorVersion < 3) { checkAuthConfig(o.authConfig!); - checkUnnamed27(o.backendVariableTemplates!); + checkUnnamed29(o.backendVariableTemplates!); unittest.expect( o.createTime!, unittest.equals('foo'), ); - checkUnnamed28(o.destinationConfigs!); + checkUnnamed30(o.destinationConfigs!); unittest.expect(o.enableBackendDestinationConfig!, unittest.isTrue); - checkUnnamed29(o.labels!); + checkUnnamed31(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), ); + checkPartnerMetadata(o.partnerMetadata!); + checkPublishStatus(o.publishStatus!); unittest.expect( o.serviceAccount!, unittest.equals('foo'), @@ -1558,7 +1602,7 @@ void checkCustomConnectorVersion(api.CustomConnectorVersion o) { o.specLocation!, unittest.equals('foo'), ); - checkUnnamed30(o.specServerUrls!); + checkUnnamed32(o.specServerUrls!); unittest.expect( o.state!, unittest.equals('foo'), @@ -1647,12 +1691,12 @@ void checkDestination(api.Destination o) { buildCounterDestination--; } -core.List buildUnnamed31() => [ +core.List buildUnnamed33() => [ buildDestination(), buildDestination(), ]; -void checkUnnamed31(core.List o) { +void checkUnnamed33(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDestination(o[0]); checkDestination(o[1]); @@ -1663,7 +1707,7 @@ api.DestinationConfig buildDestinationConfig() { final o = api.DestinationConfig(); buildCounterDestinationConfig++; if (buildCounterDestinationConfig < 3) { - o.destinations = buildUnnamed31(); + o.destinations = buildUnnamed33(); o.key = 'foo'; } buildCounterDestinationConfig--; @@ -1673,7 +1717,7 @@ api.DestinationConfig buildDestinationConfig() { void checkDestinationConfig(api.DestinationConfig o) { buildCounterDestinationConfig++; if (buildCounterDestinationConfig < 3) { - checkUnnamed31(o.destinations!); + checkUnnamed33(o.destinations!); unittest.expect( o.key!, unittest.equals('foo'), @@ -1682,12 +1726,12 @@ void checkDestinationConfig(api.DestinationConfig o) { buildCounterDestinationConfig--; } -core.List buildUnnamed32() => [ +core.List buildUnnamed34() => [ 'foo', 'foo', ]; -void checkUnnamed32(core.List o) { +void checkUnnamed34(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1704,7 +1748,7 @@ api.DestinationConfigTemplate buildDestinationConfigTemplate() { final o = api.DestinationConfigTemplate(); buildCounterDestinationConfigTemplate++; if (buildCounterDestinationConfigTemplate < 3) { - o.autocompleteSuggestions = buildUnnamed32(); + o.autocompleteSuggestions = buildUnnamed34(); o.defaultPort = 42; o.description = 'foo'; o.displayName = 'foo'; @@ -1722,7 +1766,7 @@ api.DestinationConfigTemplate buildDestinationConfigTemplate() { void checkDestinationConfigTemplate(api.DestinationConfigTemplate o) { buildCounterDestinationConfigTemplate++; if (buildCounterDestinationConfigTemplate < 3) { - checkUnnamed32(o.autocompleteSuggestions!); + checkUnnamed34(o.autocompleteSuggestions!); unittest.expect( o.defaultPort!, unittest.equals(42), @@ -1853,12 +1897,12 @@ void checkEncryptionKey(api.EncryptionKey o) { buildCounterEncryptionKey--; } -core.List buildUnnamed33() => [ +core.List buildUnnamed35() => [ buildHeader(), buildHeader(), ]; -void checkUnnamed33(core.List o) { +void checkUnnamed35(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHeader(o[0]); checkHeader(o[1]); @@ -1870,7 +1914,7 @@ api.EndPoint buildEndPoint() { buildCounterEndPoint++; if (buildCounterEndPoint < 3) { o.endpointUri = 'foo'; - o.headers = buildUnnamed33(); + o.headers = buildUnnamed35(); } buildCounterEndPoint--; return o; @@ -1883,17 +1927,17 @@ void checkEndPoint(api.EndPoint o) { o.endpointUri!, unittest.equals('foo'), ); - checkUnnamed33(o.headers!); + checkUnnamed35(o.headers!); } buildCounterEndPoint--; } -core.Map buildUnnamed34() => { +core.Map buildUnnamed36() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed34(core.Map o) { +void checkUnnamed36(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1914,7 +1958,7 @@ api.EndpointAttachment buildEndpointAttachment() { o.description = 'foo'; o.endpointGlobalAccess = true; o.endpointIp = 'foo'; - o.labels = buildUnnamed34(); + o.labels = buildUnnamed36(); o.name = 'foo'; o.serviceAttachment = 'foo'; o.state = 'foo'; @@ -1940,7 +1984,7 @@ void checkEndpointAttachment(api.EndpointAttachment o) { o.endpointIp!, unittest.equals('foo'), ); - checkUnnamed34(o.labels!); + checkUnnamed36(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -1988,12 +2032,12 @@ void checkEnumOption(api.EnumOption o) { buildCounterEnumOption--; } -core.List buildUnnamed35() => [ +core.List buildUnnamed37() => [ buildConfigVariable(), buildConfigVariable(), ]; -void checkUnnamed35(core.List o) { +void checkUnnamed37(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkConfigVariable(o[0]); checkConfigVariable(o[1]); @@ -2012,7 +2056,7 @@ api.EventSubscription buildEventSubscription() { o.status = buildEventSubscriptionStatus(); o.subscriber = 'foo'; o.subscriberLink = 'foo'; - o.triggerConfigVariables = buildUnnamed35(); + o.triggerConfigVariables = buildUnnamed37(); o.updateTime = 'foo'; } buildCounterEventSubscription--; @@ -2045,7 +2089,7 @@ void checkEventSubscription(api.EventSubscription o) { o.subscriberLink!, unittest.equals('foo'), ); - checkUnnamed35(o.triggerConfigVariables!); + checkUnnamed37(o.triggerConfigVariables!); unittest.expect( o.updateTime!, unittest.equals('foo'), @@ -2060,6 +2104,7 @@ api.EventSubscriptionDestination buildEventSubscriptionDestination() { buildCounterEventSubscriptionDestination++; if (buildCounterEventSubscriptionDestination < 3) { o.endpoint = buildEndPoint(); + o.gsutil = buildGSUtil(); o.serviceAccount = 'foo'; o.type = 'foo'; } @@ -2071,6 +2116,7 @@ void checkEventSubscriptionDestination(api.EventSubscriptionDestination o) { buildCounterEventSubscriptionDestination++; if (buildCounterEventSubscriptionDestination < 3) { checkEndPoint(o.endpoint!); + checkGSUtil(o.gsutil!); unittest.expect( o.serviceAccount!, unittest.equals('foo'), @@ -2167,12 +2213,12 @@ void checkEventType(api.EventType o) { buildCounterEventType--; } -core.List buildUnnamed36() => [ +core.List buildUnnamed38() => [ buildConfigVariable(), buildConfigVariable(), ]; -void checkUnnamed36(core.List o) { +void checkUnnamed38(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkConfigVariable(o[0]); checkConfigVariable(o[1]); @@ -2183,7 +2229,7 @@ api.EventingConfig buildEventingConfig() { final o = api.EventingConfig(); buildCounterEventingConfig++; if (buildCounterEventingConfig < 3) { - o.additionalVariables = buildUnnamed36(); + o.additionalVariables = buildUnnamed38(); o.authConfig = buildAuthConfig(); o.deadLetterConfig = buildDeadLetterConfig(); o.enrichmentEnabled = true; @@ -2200,7 +2246,7 @@ api.EventingConfig buildEventingConfig() { void checkEventingConfig(api.EventingConfig o) { buildCounterEventingConfig++; if (buildCounterEventingConfig < 3) { - checkUnnamed36(o.additionalVariables!); + checkUnnamed38(o.additionalVariables!); checkAuthConfig(o.authConfig!); checkDeadLetterConfig(o.deadLetterConfig!); unittest.expect(o.enrichmentEnabled!, unittest.isTrue); @@ -2216,45 +2262,45 @@ void checkEventingConfig(api.EventingConfig o) { buildCounterEventingConfig--; } -core.List buildUnnamed37() => [ +core.List buildUnnamed39() => [ buildConfigVariableTemplate(), buildConfigVariableTemplate(), ]; -void checkUnnamed37(core.List o) { +void checkUnnamed39(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkConfigVariableTemplate(o[0]); checkConfigVariableTemplate(o[1]); } -core.List buildUnnamed38() => [ +core.List buildUnnamed40() => [ buildAuthConfigTemplate(), buildAuthConfigTemplate(), ]; -void checkUnnamed38(core.List o) { +void checkUnnamed40(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAuthConfigTemplate(o[0]); checkAuthConfigTemplate(o[1]); } -core.List buildUnnamed39() => [ +core.List buildUnnamed41() => [ buildAuthConfigTemplate(), buildAuthConfigTemplate(), ]; -void checkUnnamed39(core.List o) { +void checkUnnamed41(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAuthConfigTemplate(o[0]); checkAuthConfigTemplate(o[1]); } -core.List buildUnnamed40() => [ +core.List buildUnnamed42() => [ buildConfigVariableTemplate(), buildConfigVariableTemplate(), ]; -void checkUnnamed40(core.List o) { +void checkUnnamed42(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkConfigVariableTemplate(o[0]); checkConfigVariableTemplate(o[1]); @@ -2265,18 +2311,18 @@ api.EventingConfigTemplate buildEventingConfigTemplate() { final o = api.EventingConfigTemplate(); buildCounterEventingConfigTemplate++; if (buildCounterEventingConfigTemplate < 3) { - o.additionalVariables = buildUnnamed37(); - o.authConfigTemplates = buildUnnamed38(); + o.additionalVariables = buildUnnamed39(); + o.authConfigTemplates = buildUnnamed40(); o.autoRefresh = true; o.autoRegistrationSupported = true; o.encryptionKeyTemplate = buildConfigVariableTemplate(); o.enrichmentSupported = true; o.eventListenerType = 'foo'; o.isEventingSupported = true; - o.listenerAuthConfigTemplates = buildUnnamed39(); + o.listenerAuthConfigTemplates = buildUnnamed41(); o.proxyDestinationConfig = buildDestinationConfigTemplate(); o.registrationDestinationConfig = buildDestinationConfigTemplate(); - o.triggerConfigVariables = buildUnnamed40(); + o.triggerConfigVariables = buildUnnamed42(); } buildCounterEventingConfigTemplate--; return o; @@ -2285,8 +2331,8 @@ api.EventingConfigTemplate buildEventingConfigTemplate() { void checkEventingConfigTemplate(api.EventingConfigTemplate o) { buildCounterEventingConfigTemplate++; if (buildCounterEventingConfigTemplate < 3) { - checkUnnamed37(o.additionalVariables!); - checkUnnamed38(o.authConfigTemplates!); + checkUnnamed39(o.additionalVariables!); + checkUnnamed40(o.authConfigTemplates!); unittest.expect(o.autoRefresh!, unittest.isTrue); unittest.expect(o.autoRegistrationSupported!, unittest.isTrue); checkConfigVariableTemplate(o.encryptionKeyTemplate!); @@ -2296,20 +2342,20 @@ void checkEventingConfigTemplate(api.EventingConfigTemplate o) { unittest.equals('foo'), ); unittest.expect(o.isEventingSupported!, unittest.isTrue); - checkUnnamed39(o.listenerAuthConfigTemplates!); + checkUnnamed41(o.listenerAuthConfigTemplates!); checkDestinationConfigTemplate(o.proxyDestinationConfig!); checkDestinationConfigTemplate(o.registrationDestinationConfig!); - checkUnnamed40(o.triggerConfigVariables!); + checkUnnamed42(o.triggerConfigVariables!); } buildCounterEventingConfigTemplate--; } -core.List buildUnnamed41() => [ +core.List buildUnnamed43() => [ 'foo', 'foo', ]; -void checkUnnamed41(core.List o) { +void checkUnnamed43(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2332,7 +2378,7 @@ api.EventingDetails buildEventingDetails() { o.iconLocation = 'foo'; o.launchStage = 'foo'; o.name = 'foo'; - o.searchTags = buildUnnamed41(); + o.searchTags = buildUnnamed43(); o.type = 'foo'; } buildCounterEventingDetails--; @@ -2363,7 +2409,7 @@ void checkEventingDetails(api.EventingDetails o) { o.name!, unittest.equals('foo'), ); - checkUnnamed41(o.searchTags!); + checkUnnamed43(o.searchTags!); unittest.expect( o.type!, unittest.equals('foo'), @@ -2381,6 +2427,7 @@ api.EventingRuntimeData buildEventingRuntimeData() { o.eventsListenerPscSa = 'foo'; o.status = buildEventingStatus(); o.webhookData = buildWebhookData(); + o.webhookSubscriptions = buildWebhookSubscriptions(); } buildCounterEventingRuntimeData--; return o; @@ -2399,6 +2446,7 @@ void checkEventingRuntimeData(api.EventingRuntimeData o) { ); checkEventingStatus(o.status!); checkWebhookData(o.webhookData!); + checkWebhookSubscriptions(o.webhookSubscriptions!); } buildCounterEventingRuntimeData--; } @@ -2491,12 +2539,12 @@ void checkExtractionRule(api.ExtractionRule o) { buildCounterExtractionRule--; } -core.List buildUnnamed42() => [ +core.List buildUnnamed44() => [ buildExtractionRule(), buildExtractionRule(), ]; -void checkUnnamed42(core.List o) { +void checkUnnamed44(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkExtractionRule(o[0]); checkExtractionRule(o[1]); @@ -2507,7 +2555,7 @@ api.ExtractionRules buildExtractionRules() { final o = api.ExtractionRules(); buildCounterExtractionRules++; if (buildCounterExtractionRules < 3) { - o.extractionRule = buildUnnamed42(); + o.extractionRule = buildUnnamed44(); } buildCounterExtractionRules--; return o; @@ -2516,17 +2564,17 @@ api.ExtractionRules buildExtractionRules() { void checkExtractionRules(api.ExtractionRules o) { buildCounterExtractionRules++; if (buildCounterExtractionRules < 3) { - checkUnnamed42(o.extractionRule!); + checkUnnamed44(o.extractionRule!); } buildCounterExtractionRules--; } -core.List buildUnnamed43() => [ +core.List buildUnnamed45() => [ buildAuthSchema(), buildAuthSchema(), ]; -void checkUnnamed43(core.List o) { +void checkUnnamed45(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAuthSchema(o[0]); checkAuthSchema(o[1]); @@ -2537,7 +2585,7 @@ api.FetchAuthSchemaResponse buildFetchAuthSchemaResponse() { final o = api.FetchAuthSchemaResponse(); buildCounterFetchAuthSchemaResponse++; if (buildCounterFetchAuthSchemaResponse < 3) { - o.authSchemas = buildUnnamed43(); + o.authSchemas = buildUnnamed45(); o.jsonSchema = buildJsonAuthSchema(); } buildCounterFetchAuthSchemaResponse--; @@ -2547,13 +2595,13 @@ api.FetchAuthSchemaResponse buildFetchAuthSchemaResponse() { void checkFetchAuthSchemaResponse(api.FetchAuthSchemaResponse o) { buildCounterFetchAuthSchemaResponse++; if (buildCounterFetchAuthSchemaResponse < 3) { - checkUnnamed43(o.authSchemas!); + checkUnnamed45(o.authSchemas!); checkJsonAuthSchema(o.jsonSchema!); } buildCounterFetchAuthSchemaResponse--; } -core.Map buildUnnamed44() => { +core.Map buildUnnamed46() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -2566,7 +2614,7 @@ core.Map buildUnnamed44() => { }, }; -void checkUnnamed44(core.Map o) { +void checkUnnamed46(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted1 = (o['x']!) as core.Map; unittest.expect(casted1, unittest.hasLength(3)); @@ -2603,7 +2651,7 @@ api.Field buildField() { final o = api.Field(); buildCounterField++; if (buildCounterField < 3) { - o.additionalDetails = buildUnnamed44(); + o.additionalDetails = buildUnnamed46(); o.dataType = 'foo'; o.defaultValue = { 'list': [1, 2, 3], @@ -2624,7 +2672,7 @@ api.Field buildField() { void checkField(api.Field o) { buildCounterField++; if (buildCounterField < 3) { - checkUnnamed44(o.additionalDetails!); + checkUnnamed46(o.additionalDetails!); unittest.expect( o.dataType!, unittest.equals('foo'), @@ -2698,6 +2746,28 @@ void checkFieldComparison(api.FieldComparison o) { buildCounterFieldComparison--; } +core.int buildCounterGSUtil = 0; +api.GSUtil buildGSUtil() { + final o = api.GSUtil(); + buildCounterGSUtil++; + if (buildCounterGSUtil < 3) { + o.gsutilUri = 'foo'; + } + buildCounterGSUtil--; + return o; +} + +void checkGSUtil(api.GSUtil o) { + buildCounterGSUtil++; + if (buildCounterGSUtil < 3) { + unittest.expect( + o.gsutilUri!, + unittest.equals('foo'), + ); + } + buildCounterGSUtil--; +} + core.int buildCounterHPAConfig = 0; api.HPAConfig buildHPAConfig() { final o = api.HPAConfig(); @@ -2834,12 +2904,12 @@ void checkJMS(api.JMS o) { buildCounterJMS--; } -core.List buildUnnamed45() => [ +core.List buildUnnamed47() => [ buildAuthObject(), buildAuthObject(), ]; -void checkUnnamed45(core.List o) { +void checkUnnamed47(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAuthObject(o[0]); checkAuthObject(o[1]); @@ -2851,7 +2921,7 @@ api.JsonAuthSchema buildJsonAuthSchema() { buildCounterJsonAuthSchema++; if (buildCounterJsonAuthSchema < 3) { o.P_schema = 'foo'; - o.oneOf = buildUnnamed45(); + o.oneOf = buildUnnamed47(); } buildCounterJsonAuthSchema--; return o; @@ -2864,12 +2934,12 @@ void checkJsonAuthSchema(api.JsonAuthSchema o) { o.P_schema!, unittest.equals('foo'), ); - checkUnnamed45(o.oneOf!); + checkUnnamed47(o.oneOf!); } buildCounterJsonAuthSchema--; } -core.List buildUnnamed46() => [ +core.List buildUnnamed48() => [ { 'list': [1, 2, 3], 'bool': true, @@ -2882,7 +2952,7 @@ core.List buildUnnamed46() => [ }, ]; -void checkUnnamed46(core.List o) { +void checkUnnamed48(core.List o) { unittest.expect(o, unittest.hasLength(2)); var casted5 = (o[0]) as core.Map; unittest.expect(casted5, unittest.hasLength(3)); @@ -2914,23 +2984,23 @@ void checkUnnamed46(core.List o) { ); } -core.Map buildUnnamed47() => { +core.Map buildUnnamed49() => { 'x': buildJsonSchema(), 'y': buildJsonSchema(), }; -void checkUnnamed47(core.Map o) { +void checkUnnamed49(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkJsonSchema(o['x']!); checkJsonSchema(o['y']!); } -core.List buildUnnamed48() => [ +core.List buildUnnamed50() => [ 'foo', 'foo', ]; -void checkUnnamed48(core.List o) { +void checkUnnamed50(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2942,12 +3012,12 @@ void checkUnnamed48(core.List o) { ); } -core.List buildUnnamed49() => [ +core.List buildUnnamed51() => [ 'foo', 'foo', ]; -void checkUnnamed49(core.List o) { +void checkUnnamed51(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2970,13 +3040,13 @@ api.JsonSchema buildJsonSchema() { 'string': 'foo' }; o.description = 'foo'; - o.enum_ = buildUnnamed46(); + o.enum_ = buildUnnamed48(); o.format = 'foo'; o.items = buildJsonSchema(); o.jdbcType = 'foo'; - o.properties = buildUnnamed47(); - o.required = buildUnnamed48(); - o.type = buildUnnamed49(); + o.properties = buildUnnamed49(); + o.required = buildUnnamed50(); + o.type = buildUnnamed51(); } buildCounterJsonSchema--; return o; @@ -3003,7 +3073,7 @@ void checkJsonSchema(api.JsonSchema o) { o.description!, unittest.equals('foo'), ); - checkUnnamed46(o.enum_!); + checkUnnamed48(o.enum_!); unittest.expect( o.format!, unittest.equals('foo'), @@ -3013,9 +3083,9 @@ void checkJsonSchema(api.JsonSchema o) { o.jdbcType!, unittest.equals('foo'), ); - checkUnnamed47(o.properties!); - checkUnnamed48(o.required!); - checkUnnamed49(o.type!); + checkUnnamed49(o.properties!); + checkUnnamed50(o.required!); + checkUnnamed51(o.type!); } buildCounterJsonSchema--; } @@ -3052,12 +3122,12 @@ void checkJwtClaims(api.JwtClaims o) { buildCounterJwtClaims--; } -core.List buildUnnamed50() => [ +core.List buildUnnamed52() => [ buildRuntimeActionSchema(), buildRuntimeActionSchema(), ]; -void checkUnnamed50(core.List o) { +void checkUnnamed52(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRuntimeActionSchema(o[0]); checkRuntimeActionSchema(o[1]); @@ -3068,7 +3138,7 @@ api.ListActionsResponse buildListActionsResponse() { final o = api.ListActionsResponse(); buildCounterListActionsResponse++; if (buildCounterListActionsResponse < 3) { - o.actions = buildUnnamed50(); + o.actions = buildUnnamed52(); o.nextPageToken = 'foo'; } buildCounterListActionsResponse--; @@ -3078,7 +3148,7 @@ api.ListActionsResponse buildListActionsResponse() { void checkListActionsResponse(api.ListActionsResponse o) { buildCounterListActionsResponse++; if (buildCounterListActionsResponse < 3) { - checkUnnamed50(o.actions!); + checkUnnamed52(o.actions!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -3087,23 +3157,23 @@ void checkListActionsResponse(api.ListActionsResponse o) { buildCounterListActionsResponse--; } -core.List buildUnnamed51() => [ +core.List buildUnnamed53() => [ buildConnection(), buildConnection(), ]; -void checkUnnamed51(core.List o) { +void checkUnnamed53(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkConnection(o[0]); checkConnection(o[1]); } -core.List buildUnnamed52() => [ +core.List buildUnnamed54() => [ 'foo', 'foo', ]; -void checkUnnamed52(core.List o) { +void checkUnnamed54(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3120,9 +3190,9 @@ api.ListConnectionsResponse buildListConnectionsResponse() { final o = api.ListConnectionsResponse(); buildCounterListConnectionsResponse++; if (buildCounterListConnectionsResponse < 3) { - o.connections = buildUnnamed51(); + o.connections = buildUnnamed53(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed52(); + o.unreachable = buildUnnamed54(); } buildCounterListConnectionsResponse--; return o; @@ -3131,33 +3201,33 @@ api.ListConnectionsResponse buildListConnectionsResponse() { void checkListConnectionsResponse(api.ListConnectionsResponse o) { buildCounterListConnectionsResponse++; if (buildCounterListConnectionsResponse < 3) { - checkUnnamed51(o.connections!); + checkUnnamed53(o.connections!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed52(o.unreachable!); + checkUnnamed54(o.unreachable!); } buildCounterListConnectionsResponse--; } -core.List buildUnnamed53() => [ +core.List buildUnnamed55() => [ buildConnectorVersion(), buildConnectorVersion(), ]; -void checkUnnamed53(core.List o) { +void checkUnnamed55(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkConnectorVersion(o[0]); checkConnectorVersion(o[1]); } -core.List buildUnnamed54() => [ +core.List buildUnnamed56() => [ 'foo', 'foo', ]; -void checkUnnamed54(core.List o) { +void checkUnnamed56(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3174,9 +3244,9 @@ api.ListConnectorVersionsResponse buildListConnectorVersionsResponse() { final o = api.ListConnectorVersionsResponse(); buildCounterListConnectorVersionsResponse++; if (buildCounterListConnectorVersionsResponse < 3) { - o.connectorVersions = buildUnnamed53(); + o.connectorVersions = buildUnnamed55(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed54(); + o.unreachable = buildUnnamed56(); } buildCounterListConnectorVersionsResponse--; return o; @@ -3185,33 +3255,33 @@ api.ListConnectorVersionsResponse buildListConnectorVersionsResponse() { void checkListConnectorVersionsResponse(api.ListConnectorVersionsResponse o) { buildCounterListConnectorVersionsResponse++; if (buildCounterListConnectorVersionsResponse < 3) { - checkUnnamed53(o.connectorVersions!); + checkUnnamed55(o.connectorVersions!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed54(o.unreachable!); + checkUnnamed56(o.unreachable!); } buildCounterListConnectorVersionsResponse--; } -core.List buildUnnamed55() => [ +core.List buildUnnamed57() => [ buildConnector(), buildConnector(), ]; -void checkUnnamed55(core.List o) { +void checkUnnamed57(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkConnector(o[0]); checkConnector(o[1]); } -core.List buildUnnamed56() => [ +core.List buildUnnamed58() => [ 'foo', 'foo', ]; -void checkUnnamed56(core.List o) { +void checkUnnamed58(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3228,9 +3298,9 @@ api.ListConnectorsResponse buildListConnectorsResponse() { final o = api.ListConnectorsResponse(); buildCounterListConnectorsResponse++; if (buildCounterListConnectorsResponse < 3) { - o.connectors = buildUnnamed55(); + o.connectors = buildUnnamed57(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed56(); + o.unreachable = buildUnnamed58(); } buildCounterListConnectorsResponse--; return o; @@ -3239,33 +3309,33 @@ api.ListConnectorsResponse buildListConnectorsResponse() { void checkListConnectorsResponse(api.ListConnectorsResponse o) { buildCounterListConnectorsResponse++; if (buildCounterListConnectorsResponse < 3) { - checkUnnamed55(o.connectors!); + checkUnnamed57(o.connectors!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed56(o.unreachable!); + checkUnnamed58(o.unreachable!); } buildCounterListConnectorsResponse--; } -core.List buildUnnamed57() => [ +core.List buildUnnamed59() => [ buildCustomConnectorVersion(), buildCustomConnectorVersion(), ]; -void checkUnnamed57(core.List o) { +void checkUnnamed59(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkCustomConnectorVersion(o[0]); checkCustomConnectorVersion(o[1]); } -core.List buildUnnamed58() => [ +core.List buildUnnamed60() => [ 'foo', 'foo', ]; -void checkUnnamed58(core.List o) { +void checkUnnamed60(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3283,9 +3353,9 @@ api.ListCustomConnectorVersionsResponse final o = api.ListCustomConnectorVersionsResponse(); buildCounterListCustomConnectorVersionsResponse++; if (buildCounterListCustomConnectorVersionsResponse < 3) { - o.customConnectorVersions = buildUnnamed57(); + o.customConnectorVersions = buildUnnamed59(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed58(); + o.unreachable = buildUnnamed60(); } buildCounterListCustomConnectorVersionsResponse--; return o; @@ -3295,33 +3365,33 @@ void checkListCustomConnectorVersionsResponse( api.ListCustomConnectorVersionsResponse o) { buildCounterListCustomConnectorVersionsResponse++; if (buildCounterListCustomConnectorVersionsResponse < 3) { - checkUnnamed57(o.customConnectorVersions!); + checkUnnamed59(o.customConnectorVersions!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed58(o.unreachable!); + checkUnnamed60(o.unreachable!); } buildCounterListCustomConnectorVersionsResponse--; } -core.List buildUnnamed59() => [ +core.List buildUnnamed61() => [ buildCustomConnector(), buildCustomConnector(), ]; -void checkUnnamed59(core.List o) { +void checkUnnamed61(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkCustomConnector(o[0]); checkCustomConnector(o[1]); } -core.List buildUnnamed60() => [ +core.List buildUnnamed62() => [ 'foo', 'foo', ]; -void checkUnnamed60(core.List o) { +void checkUnnamed62(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3338,9 +3408,9 @@ api.ListCustomConnectorsResponse buildListCustomConnectorsResponse() { final o = api.ListCustomConnectorsResponse(); buildCounterListCustomConnectorsResponse++; if (buildCounterListCustomConnectorsResponse < 3) { - o.customConnectors = buildUnnamed59(); + o.customConnectors = buildUnnamed61(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed60(); + o.unreachable = buildUnnamed62(); } buildCounterListCustomConnectorsResponse--; return o; @@ -3349,33 +3419,33 @@ api.ListCustomConnectorsResponse buildListCustomConnectorsResponse() { void checkListCustomConnectorsResponse(api.ListCustomConnectorsResponse o) { buildCounterListCustomConnectorsResponse++; if (buildCounterListCustomConnectorsResponse < 3) { - checkUnnamed59(o.customConnectors!); + checkUnnamed61(o.customConnectors!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed60(o.unreachable!); + checkUnnamed62(o.unreachable!); } buildCounterListCustomConnectorsResponse--; } -core.List buildUnnamed61() => [ +core.List buildUnnamed63() => [ buildEndpointAttachment(), buildEndpointAttachment(), ]; -void checkUnnamed61(core.List o) { +void checkUnnamed63(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEndpointAttachment(o[0]); checkEndpointAttachment(o[1]); } -core.List buildUnnamed62() => [ +core.List buildUnnamed64() => [ 'foo', 'foo', ]; -void checkUnnamed62(core.List o) { +void checkUnnamed64(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3392,9 +3462,9 @@ api.ListEndpointAttachmentsResponse buildListEndpointAttachmentsResponse() { final o = api.ListEndpointAttachmentsResponse(); buildCounterListEndpointAttachmentsResponse++; if (buildCounterListEndpointAttachmentsResponse < 3) { - o.endpointAttachments = buildUnnamed61(); + o.endpointAttachments = buildUnnamed63(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed62(); + o.unreachable = buildUnnamed64(); } buildCounterListEndpointAttachmentsResponse--; return o; @@ -3404,22 +3474,22 @@ void checkListEndpointAttachmentsResponse( api.ListEndpointAttachmentsResponse o) { buildCounterListEndpointAttachmentsResponse++; if (buildCounterListEndpointAttachmentsResponse < 3) { - checkUnnamed61(o.endpointAttachments!); + checkUnnamed63(o.endpointAttachments!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed62(o.unreachable!); + checkUnnamed64(o.unreachable!); } buildCounterListEndpointAttachmentsResponse--; } -core.List buildUnnamed63() => [ +core.List buildUnnamed65() => [ buildRuntimeEntitySchema(), buildRuntimeEntitySchema(), ]; -void checkUnnamed63(core.List o) { +void checkUnnamed65(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRuntimeEntitySchema(o[0]); checkRuntimeEntitySchema(o[1]); @@ -3430,7 +3500,7 @@ api.ListEntityTypesResponse buildListEntityTypesResponse() { final o = api.ListEntityTypesResponse(); buildCounterListEntityTypesResponse++; if (buildCounterListEntityTypesResponse < 3) { - o.entityTypes = buildUnnamed63(); + o.entityTypes = buildUnnamed65(); o.nextPageToken = 'foo'; } buildCounterListEntityTypesResponse--; @@ -3440,7 +3510,7 @@ api.ListEntityTypesResponse buildListEntityTypesResponse() { void checkListEntityTypesResponse(api.ListEntityTypesResponse o) { buildCounterListEntityTypesResponse++; if (buildCounterListEntityTypesResponse < 3) { - checkUnnamed63(o.entityTypes!); + checkUnnamed65(o.entityTypes!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -3449,23 +3519,23 @@ void checkListEntityTypesResponse(api.ListEntityTypesResponse o) { buildCounterListEntityTypesResponse--; } -core.List buildUnnamed64() => [ +core.List buildUnnamed66() => [ buildEventSubscription(), buildEventSubscription(), ]; -void checkUnnamed64(core.List o) { +void checkUnnamed66(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEventSubscription(o[0]); checkEventSubscription(o[1]); } -core.List buildUnnamed65() => [ +core.List buildUnnamed67() => [ 'foo', 'foo', ]; -void checkUnnamed65(core.List o) { +void checkUnnamed67(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3482,9 +3552,9 @@ api.ListEventSubscriptionsResponse buildListEventSubscriptionsResponse() { final o = api.ListEventSubscriptionsResponse(); buildCounterListEventSubscriptionsResponse++; if (buildCounterListEventSubscriptionsResponse < 3) { - o.eventSubscriptions = buildUnnamed64(); + o.eventSubscriptions = buildUnnamed66(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed65(); + o.unreachable = buildUnnamed67(); } buildCounterListEventSubscriptionsResponse--; return o; @@ -3493,22 +3563,22 @@ api.ListEventSubscriptionsResponse buildListEventSubscriptionsResponse() { void checkListEventSubscriptionsResponse(api.ListEventSubscriptionsResponse o) { buildCounterListEventSubscriptionsResponse++; if (buildCounterListEventSubscriptionsResponse < 3) { - checkUnnamed64(o.eventSubscriptions!); + checkUnnamed66(o.eventSubscriptions!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed65(o.unreachable!); + checkUnnamed67(o.unreachable!); } buildCounterListEventSubscriptionsResponse--; } -core.List buildUnnamed66() => [ +core.List buildUnnamed68() => [ buildEventType(), buildEventType(), ]; -void checkUnnamed66(core.List o) { +void checkUnnamed68(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEventType(o[0]); checkEventType(o[1]); @@ -3519,7 +3589,7 @@ api.ListEventTypesResponse buildListEventTypesResponse() { final o = api.ListEventTypesResponse(); buildCounterListEventTypesResponse++; if (buildCounterListEventTypesResponse < 3) { - o.eventTypes = buildUnnamed66(); + o.eventTypes = buildUnnamed68(); o.nextPageToken = 'foo'; } buildCounterListEventTypesResponse--; @@ -3529,7 +3599,7 @@ api.ListEventTypesResponse buildListEventTypesResponse() { void checkListEventTypesResponse(api.ListEventTypesResponse o) { buildCounterListEventTypesResponse++; if (buildCounterListEventTypesResponse < 3) { - checkUnnamed66(o.eventTypes!); + checkUnnamed68(o.eventTypes!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -3538,12 +3608,12 @@ void checkListEventTypesResponse(api.ListEventTypesResponse o) { buildCounterListEventTypesResponse--; } -core.List buildUnnamed67() => [ +core.List buildUnnamed69() => [ buildLocation(), buildLocation(), ]; -void checkUnnamed67(core.List o) { +void checkUnnamed69(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLocation(o[0]); checkLocation(o[1]); @@ -3554,7 +3624,7 @@ api.ListLocationsResponse buildListLocationsResponse() { final o = api.ListLocationsResponse(); buildCounterListLocationsResponse++; if (buildCounterListLocationsResponse < 3) { - o.locations = buildUnnamed67(); + o.locations = buildUnnamed69(); o.nextPageToken = 'foo'; } buildCounterListLocationsResponse--; @@ -3564,7 +3634,7 @@ api.ListLocationsResponse buildListLocationsResponse() { void checkListLocationsResponse(api.ListLocationsResponse o) { buildCounterListLocationsResponse++; if (buildCounterListLocationsResponse < 3) { - checkUnnamed67(o.locations!); + checkUnnamed69(o.locations!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -3573,12 +3643,12 @@ void checkListLocationsResponse(api.ListLocationsResponse o) { buildCounterListLocationsResponse--; } -core.List buildUnnamed68() => [ +core.List buildUnnamed70() => [ buildManagedZone(), buildManagedZone(), ]; -void checkUnnamed68(core.List o) { +void checkUnnamed70(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkManagedZone(o[0]); checkManagedZone(o[1]); @@ -3589,7 +3659,7 @@ api.ListManagedZonesResponse buildListManagedZonesResponse() { final o = api.ListManagedZonesResponse(); buildCounterListManagedZonesResponse++; if (buildCounterListManagedZonesResponse < 3) { - o.managedZones = buildUnnamed68(); + o.managedZones = buildUnnamed70(); o.nextPageToken = 'foo'; } buildCounterListManagedZonesResponse--; @@ -3599,7 +3669,7 @@ api.ListManagedZonesResponse buildListManagedZonesResponse() { void checkListManagedZonesResponse(api.ListManagedZonesResponse o) { buildCounterListManagedZonesResponse++; if (buildCounterListManagedZonesResponse < 3) { - checkUnnamed68(o.managedZones!); + checkUnnamed70(o.managedZones!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -3608,12 +3678,12 @@ void checkListManagedZonesResponse(api.ListManagedZonesResponse o) { buildCounterListManagedZonesResponse--; } -core.List buildUnnamed69() => [ +core.List buildUnnamed71() => [ buildOperation(), buildOperation(), ]; -void checkUnnamed69(core.List o) { +void checkUnnamed71(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperation(o[0]); checkOperation(o[1]); @@ -3625,7 +3695,7 @@ api.ListOperationsResponse buildListOperationsResponse() { buildCounterListOperationsResponse++; if (buildCounterListOperationsResponse < 3) { o.nextPageToken = 'foo'; - o.operations = buildUnnamed69(); + o.operations = buildUnnamed71(); } buildCounterListOperationsResponse--; return o; @@ -3638,28 +3708,28 @@ void checkListOperationsResponse(api.ListOperationsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed69(o.operations!); + checkUnnamed71(o.operations!); } buildCounterListOperationsResponse--; } -core.List buildUnnamed70() => [ +core.List buildUnnamed72() => [ buildProvider(), buildProvider(), ]; -void checkUnnamed70(core.List o) { +void checkUnnamed72(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkProvider(o[0]); checkProvider(o[1]); } -core.List buildUnnamed71() => [ +core.List buildUnnamed73() => [ 'foo', 'foo', ]; -void checkUnnamed71(core.List o) { +void checkUnnamed73(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3677,8 +3747,8 @@ api.ListProvidersResponse buildListProvidersResponse() { buildCounterListProvidersResponse++; if (buildCounterListProvidersResponse < 3) { o.nextPageToken = 'foo'; - o.providers = buildUnnamed70(); - o.unreachable = buildUnnamed71(); + o.providers = buildUnnamed72(); + o.unreachable = buildUnnamed73(); } buildCounterListProvidersResponse--; return o; @@ -3691,18 +3761,18 @@ void checkListProvidersResponse(api.ListProvidersResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed70(o.providers!); - checkUnnamed71(o.unreachable!); + checkUnnamed72(o.providers!); + checkUnnamed73(o.unreachable!); } buildCounterListProvidersResponse--; } -core.List buildUnnamed72() => [ +core.List buildUnnamed74() => [ buildRuntimeActionSchema(), buildRuntimeActionSchema(), ]; -void checkUnnamed72(core.List o) { +void checkUnnamed74(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRuntimeActionSchema(o[0]); checkRuntimeActionSchema(o[1]); @@ -3714,7 +3784,7 @@ api.ListRuntimeActionSchemasResponse buildListRuntimeActionSchemasResponse() { buildCounterListRuntimeActionSchemasResponse++; if (buildCounterListRuntimeActionSchemasResponse < 3) { o.nextPageToken = 'foo'; - o.runtimeActionSchemas = buildUnnamed72(); + o.runtimeActionSchemas = buildUnnamed74(); } buildCounterListRuntimeActionSchemasResponse--; return o; @@ -3728,17 +3798,17 @@ void checkListRuntimeActionSchemasResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed72(o.runtimeActionSchemas!); + checkUnnamed74(o.runtimeActionSchemas!); } buildCounterListRuntimeActionSchemasResponse--; } -core.List buildUnnamed73() => [ +core.List buildUnnamed75() => [ buildRuntimeEntitySchema(), buildRuntimeEntitySchema(), ]; -void checkUnnamed73(core.List o) { +void checkUnnamed75(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRuntimeEntitySchema(o[0]); checkRuntimeEntitySchema(o[1]); @@ -3750,7 +3820,7 @@ api.ListRuntimeEntitySchemasResponse buildListRuntimeEntitySchemasResponse() { buildCounterListRuntimeEntitySchemasResponse++; if (buildCounterListRuntimeEntitySchemasResponse < 3) { o.nextPageToken = 'foo'; - o.runtimeEntitySchemas = buildUnnamed73(); + o.runtimeEntitySchemas = buildUnnamed75(); } buildCounterListRuntimeEntitySchemasResponse--; return o; @@ -3764,12 +3834,12 @@ void checkListRuntimeEntitySchemasResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed73(o.runtimeEntitySchemas!); + checkUnnamed75(o.runtimeEntitySchemas!); } buildCounterListRuntimeEntitySchemasResponse--; } -core.Map buildUnnamed74() => { +core.Map buildUnnamed76() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3782,7 +3852,7 @@ core.Map buildUnnamed74() => { }, }; -void checkUnnamed74(core.Map o) { +void checkUnnamed76(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted8 = (o['x']!) as core.Map; unittest.expect(casted8, unittest.hasLength(3)); @@ -3819,7 +3889,7 @@ api.ListenEventRequest buildListenEventRequest() { final o = api.ListenEventRequest(); buildCounterListenEventRequest++; if (buildCounterListenEventRequest < 3) { - o.payload = buildUnnamed74(); + o.payload = buildUnnamed76(); } buildCounterListenEventRequest--; return o; @@ -3828,7 +3898,7 @@ api.ListenEventRequest buildListenEventRequest() { void checkListenEventRequest(api.ListenEventRequest o) { buildCounterListenEventRequest++; if (buildCounterListenEventRequest < 3) { - checkUnnamed74(o.payload!); + checkUnnamed76(o.payload!); } buildCounterListenEventRequest--; } @@ -3848,12 +3918,12 @@ void checkListenEventResponse(api.ListenEventResponse o) { buildCounterListenEventResponse--; } -core.Map buildUnnamed75() => { +core.Map buildUnnamed77() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed75(core.Map o) { +void checkUnnamed77(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3865,7 +3935,7 @@ void checkUnnamed75(core.Map o) { ); } -core.Map buildUnnamed76() => { +core.Map buildUnnamed78() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3878,7 +3948,7 @@ core.Map buildUnnamed76() => { }, }; -void checkUnnamed76(core.Map o) { +void checkUnnamed78(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted10 = (o['x']!) as core.Map; unittest.expect(casted10, unittest.hasLength(3)); @@ -3916,9 +3986,9 @@ api.Location buildLocation() { buildCounterLocation++; if (buildCounterLocation < 3) { o.displayName = 'foo'; - o.labels = buildUnnamed75(); + o.labels = buildUnnamed77(); o.locationId = 'foo'; - o.metadata = buildUnnamed76(); + o.metadata = buildUnnamed78(); o.name = 'foo'; } buildCounterLocation--; @@ -3932,12 +4002,12 @@ void checkLocation(api.Location o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed75(o.labels!); + checkUnnamed77(o.labels!); unittest.expect( o.locationId!, unittest.equals('foo'), ); - checkUnnamed76(o.metadata!); + checkUnnamed78(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), @@ -3970,23 +4040,23 @@ void checkLockConfig(api.LockConfig o) { buildCounterLockConfig--; } -core.List buildUnnamed77() => [ +core.List buildUnnamed79() => [ buildFieldComparison(), buildFieldComparison(), ]; -void checkUnnamed77(core.List o) { +void checkUnnamed79(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFieldComparison(o[0]); checkFieldComparison(o[1]); } -core.List buildUnnamed78() => [ +core.List buildUnnamed80() => [ buildLogicalExpression(), buildLogicalExpression(), ]; -void checkUnnamed78(core.List o) { +void checkUnnamed80(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLogicalExpression(o[0]); checkLogicalExpression(o[1]); @@ -3997,8 +4067,8 @@ api.LogicalExpression buildLogicalExpression() { final o = api.LogicalExpression(); buildCounterLogicalExpression++; if (buildCounterLogicalExpression < 3) { - o.fieldComparisons = buildUnnamed77(); - o.logicalExpressions = buildUnnamed78(); + o.fieldComparisons = buildUnnamed79(); + o.logicalExpressions = buildUnnamed80(); o.logicalOperator = 'foo'; } buildCounterLogicalExpression--; @@ -4008,8 +4078,8 @@ api.LogicalExpression buildLogicalExpression() { void checkLogicalExpression(api.LogicalExpression o) { buildCounterLogicalExpression++; if (buildCounterLogicalExpression < 3) { - checkUnnamed77(o.fieldComparisons!); - checkUnnamed78(o.logicalExpressions!); + checkUnnamed79(o.fieldComparisons!); + checkUnnamed80(o.logicalExpressions!); unittest.expect( o.logicalOperator!, unittest.equals('foo'), @@ -4018,12 +4088,12 @@ void checkLogicalExpression(api.LogicalExpression o) { buildCounterLogicalExpression--; } -core.Map buildUnnamed79() => { +core.Map buildUnnamed81() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed79(core.Map o) { +void checkUnnamed81(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -4043,7 +4113,7 @@ api.ManagedZone buildManagedZone() { o.createTime = 'foo'; o.description = 'foo'; o.dns = 'foo'; - o.labels = buildUnnamed79(); + o.labels = buildUnnamed81(); o.name = 'foo'; o.targetProject = 'foo'; o.targetVpc = 'foo'; @@ -4068,7 +4138,7 @@ void checkManagedZone(api.ManagedZone o) { o.dns!, unittest.equals('foo'), ); - checkUnnamed79(o.labels!); + checkUnnamed81(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -4089,12 +4159,49 @@ void checkManagedZone(api.ManagedZone o) { buildCounterManagedZone--; } -core.List buildUnnamed80() => [ +core.int buildCounterMarketplaceConnectorDetails = 0; +api.MarketplaceConnectorDetails buildMarketplaceConnectorDetails() { + final o = api.MarketplaceConnectorDetails(); + buildCounterMarketplaceConnectorDetails++; + if (buildCounterMarketplaceConnectorDetails < 3) { + o.marketplaceProduct = 'foo'; + o.marketplaceProductId = 'foo'; + o.marketplaceProductUri = 'foo'; + o.partner = 'foo'; + } + buildCounterMarketplaceConnectorDetails--; + return o; +} + +void checkMarketplaceConnectorDetails(api.MarketplaceConnectorDetails o) { + buildCounterMarketplaceConnectorDetails++; + if (buildCounterMarketplaceConnectorDetails < 3) { + unittest.expect( + o.marketplaceProduct!, + unittest.equals('foo'), + ); + unittest.expect( + o.marketplaceProductId!, + unittest.equals('foo'), + ); + unittest.expect( + o.marketplaceProductUri!, + unittest.equals('foo'), + ); + unittest.expect( + o.partner!, + unittest.equals('foo'), + ); + } + buildCounterMarketplaceConnectorDetails--; +} + +core.List buildUnnamed82() => [ buildMultipleSelectOption(), buildMultipleSelectOption(), ]; -void checkUnnamed80(core.List o) { +void checkUnnamed82(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMultipleSelectOption(o[0]); checkMultipleSelectOption(o[1]); @@ -4106,7 +4213,7 @@ api.MultipleSelectConfig buildMultipleSelectConfig() { buildCounterMultipleSelectConfig++; if (buildCounterMultipleSelectConfig < 3) { o.allowCustomValues = true; - o.multipleSelectOptions = buildUnnamed80(); + o.multipleSelectOptions = buildUnnamed82(); o.valueSeparator = 'foo'; } buildCounterMultipleSelectConfig--; @@ -4117,7 +4224,7 @@ void checkMultipleSelectConfig(api.MultipleSelectConfig o) { buildCounterMultipleSelectConfig++; if (buildCounterMultipleSelectConfig < 3) { unittest.expect(o.allowCustomValues!, unittest.isTrue); - checkUnnamed80(o.multipleSelectOptions!); + checkUnnamed82(o.multipleSelectOptions!); unittest.expect( o.valueSeparator!, unittest.equals('foo'), @@ -4160,12 +4267,12 @@ void checkMultipleSelectOption(api.MultipleSelectOption o) { buildCounterMultipleSelectOption--; } -core.List buildUnnamed81() => [ +core.List buildUnnamed83() => [ 'foo', 'foo', ]; -void checkUnnamed81(core.List o) { +void checkUnnamed83(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4182,7 +4289,7 @@ api.NetworkConfig buildNetworkConfig() { final o = api.NetworkConfig(); buildCounterNetworkConfig++; if (buildCounterNetworkConfig < 3) { - o.egressIps = buildUnnamed81(); + o.egressIps = buildUnnamed83(); o.egressMode = 'foo'; } buildCounterNetworkConfig--; @@ -4192,7 +4299,7 @@ api.NetworkConfig buildNetworkConfig() { void checkNetworkConfig(api.NetworkConfig o) { buildCounterNetworkConfig++; if (buildCounterNetworkConfig < 3) { - checkUnnamed81(o.egressIps!); + checkUnnamed83(o.egressIps!); unittest.expect( o.egressMode!, unittest.equals('foo'), @@ -4228,12 +4335,12 @@ void checkNodeConfig(api.NodeConfig o) { buildCounterNodeConfig--; } -core.List buildUnnamed82() => [ +core.List buildUnnamed84() => [ 'foo', 'foo', ]; -void checkUnnamed82(core.List o) { +void checkUnnamed84(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4257,7 +4364,7 @@ api.Oauth2AuthCodeFlow buildOauth2AuthCodeFlow() { o.enablePkce = true; o.pkceVerifier = 'foo'; o.redirectUri = 'foo'; - o.scopes = buildUnnamed82(); + o.scopes = buildUnnamed84(); } buildCounterOauth2AuthCodeFlow--; return o; @@ -4288,17 +4395,17 @@ void checkOauth2AuthCodeFlow(api.Oauth2AuthCodeFlow o) { o.redirectUri!, unittest.equals('foo'), ); - checkUnnamed82(o.scopes!); + checkUnnamed84(o.scopes!); } buildCounterOauth2AuthCodeFlow--; } -core.List buildUnnamed83() => [ +core.List buildUnnamed85() => [ 'foo', 'foo', ]; -void checkUnnamed83(core.List o) { +void checkUnnamed85(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4317,7 +4424,7 @@ api.Oauth2AuthCodeFlowGoogleManaged buildOauth2AuthCodeFlowGoogleManaged() { if (buildCounterOauth2AuthCodeFlowGoogleManaged < 3) { o.authCode = 'foo'; o.redirectUri = 'foo'; - o.scopes = buildUnnamed83(); + o.scopes = buildUnnamed85(); } buildCounterOauth2AuthCodeFlowGoogleManaged--; return o; @@ -4335,7 +4442,7 @@ void checkOauth2AuthCodeFlowGoogleManaged( o.redirectUri!, unittest.equals('foo'), ); - checkUnnamed83(o.scopes!); + checkUnnamed85(o.scopes!); } buildCounterOauth2AuthCodeFlowGoogleManaged--; } @@ -4385,7 +4492,7 @@ void checkOauth2JwtBearer(api.Oauth2JwtBearer o) { buildCounterOauth2JwtBearer--; } -core.Map buildUnnamed84() => { +core.Map buildUnnamed86() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -4398,7 +4505,7 @@ core.Map buildUnnamed84() => { }, }; -void checkUnnamed84(core.Map o) { +void checkUnnamed86(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted12 = (o['x']!) as core.Map; unittest.expect(casted12, unittest.hasLength(3)); @@ -4430,7 +4537,7 @@ void checkUnnamed84(core.Map o) { ); } -core.Map buildUnnamed85() => { +core.Map buildUnnamed87() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -4443,7 +4550,7 @@ core.Map buildUnnamed85() => { }, }; -void checkUnnamed85(core.Map o) { +void checkUnnamed87(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted14 = (o['x']!) as core.Map; unittest.expect(casted14, unittest.hasLength(3)); @@ -4482,9 +4589,9 @@ api.Operation buildOperation() { if (buildCounterOperation < 3) { o.done = true; o.error = buildStatus(); - o.metadata = buildUnnamed84(); + o.metadata = buildUnnamed86(); o.name = 'foo'; - o.response = buildUnnamed85(); + o.response = buildUnnamed87(); } buildCounterOperation--; return o; @@ -4495,33 +4602,119 @@ void checkOperation(api.Operation o) { if (buildCounterOperation < 3) { unittest.expect(o.done!, unittest.isTrue); checkStatus(o.error!); - checkUnnamed84(o.metadata!); + checkUnnamed86(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed85(o.response!); + checkUnnamed87(o.response!); } buildCounterOperation--; } -core.List buildUnnamed86() => [ +core.int buildCounterPartnerMetadata = 0; +api.PartnerMetadata buildPartnerMetadata() { + final o = api.PartnerMetadata(); + buildCounterPartnerMetadata++; + if (buildCounterPartnerMetadata < 3) { + o.acceptGcpTos = true; + o.additionalComments = 'foo'; + o.confirmPartnerRequirements = true; + o.demoUri = 'foo'; + o.integrationTemplates = 'foo'; + o.marketplaceProduct = 'foo'; + o.marketplaceProductId = 'foo'; + o.marketplaceProductProjectId = 'foo'; + o.marketplaceProductUri = 'foo'; + o.partner = 'foo'; + o.partnerConnectorDisplayName = 'foo'; + o.publishRequestTime = 'foo'; + o.targetApplication = 'foo'; + o.targetCustomerSegment = 'foo'; + o.useCases = 'foo'; + } + buildCounterPartnerMetadata--; + return o; +} + +void checkPartnerMetadata(api.PartnerMetadata o) { + buildCounterPartnerMetadata++; + if (buildCounterPartnerMetadata < 3) { + unittest.expect(o.acceptGcpTos!, unittest.isTrue); + unittest.expect( + o.additionalComments!, + unittest.equals('foo'), + ); + unittest.expect(o.confirmPartnerRequirements!, unittest.isTrue); + unittest.expect( + o.demoUri!, + unittest.equals('foo'), + ); + unittest.expect( + o.integrationTemplates!, + unittest.equals('foo'), + ); + unittest.expect( + o.marketplaceProduct!, + unittest.equals('foo'), + ); + unittest.expect( + o.marketplaceProductId!, + unittest.equals('foo'), + ); + unittest.expect( + o.marketplaceProductProjectId!, + unittest.equals('foo'), + ); + unittest.expect( + o.marketplaceProductUri!, + unittest.equals('foo'), + ); + unittest.expect( + o.partner!, + unittest.equals('foo'), + ); + unittest.expect( + o.partnerConnectorDisplayName!, + unittest.equals('foo'), + ); + unittest.expect( + o.publishRequestTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.targetApplication!, + unittest.equals('foo'), + ); + unittest.expect( + o.targetCustomerSegment!, + unittest.equals('foo'), + ); + unittest.expect( + o.useCases!, + unittest.equals('foo'), + ); + } + buildCounterPartnerMetadata--; +} + +core.List buildUnnamed88() => [ buildAuditConfig(), buildAuditConfig(), ]; -void checkUnnamed86(core.List o) { +void checkUnnamed88(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAuditConfig(o[0]); checkAuditConfig(o[1]); } -core.List buildUnnamed87() => [ +core.List buildUnnamed89() => [ buildBinding(), buildBinding(), ]; -void checkUnnamed87(core.List o) { +void checkUnnamed89(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkBinding(o[0]); checkBinding(o[1]); @@ -4532,8 +4725,8 @@ api.Policy buildPolicy() { final o = api.Policy(); buildCounterPolicy++; if (buildCounterPolicy < 3) { - o.auditConfigs = buildUnnamed86(); - o.bindings = buildUnnamed87(); + o.auditConfigs = buildUnnamed88(); + o.bindings = buildUnnamed89(); o.etag = 'foo'; o.version = 42; } @@ -4544,8 +4737,8 @@ api.Policy buildPolicy() { void checkPolicy(api.Policy o) { buildCounterPolicy++; if (buildCounterPolicy < 3) { - checkUnnamed86(o.auditConfigs!); - checkUnnamed87(o.bindings!); + checkUnnamed88(o.auditConfigs!); + checkUnnamed89(o.bindings!); unittest.expect( o.etag!, unittest.equals('foo'), @@ -4558,12 +4751,12 @@ void checkPolicy(api.Policy o) { buildCounterPolicy--; } -core.Map buildUnnamed88() => { +core.Map buildUnnamed90() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed88(core.Map o) { +void checkUnnamed90(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -4585,7 +4778,7 @@ api.Provider buildProvider() { o.displayName = 'foo'; o.documentationUri = 'foo'; o.externalUri = 'foo'; - o.labels = buildUnnamed88(); + o.labels = buildUnnamed90(); o.launchStage = 'foo'; o.name = 'foo'; o.updateTime = 'foo'; @@ -4618,7 +4811,7 @@ void checkProvider(api.Provider o) { o.externalUri!, unittest.equals('foo'), ); - checkUnnamed88(o.labels!); + checkUnnamed90(o.labels!); unittest.expect( o.launchStage!, unittest.equals('foo'), @@ -4639,6 +4832,64 @@ void checkProvider(api.Provider o) { buildCounterProvider--; } +core.int buildCounterPublishCustomConnectorVersionRequest = 0; +api.PublishCustomConnectorVersionRequest + buildPublishCustomConnectorVersionRequest() { + final o = api.PublishCustomConnectorVersionRequest(); + buildCounterPublishCustomConnectorVersionRequest++; + if (buildCounterPublishCustomConnectorVersionRequest < 3) { + o.partnerMetadata = buildPartnerMetadata(); + } + buildCounterPublishCustomConnectorVersionRequest--; + return o; +} + +void checkPublishCustomConnectorVersionRequest( + api.PublishCustomConnectorVersionRequest o) { + buildCounterPublishCustomConnectorVersionRequest++; + if (buildCounterPublishCustomConnectorVersionRequest < 3) { + checkPartnerMetadata(o.partnerMetadata!); + } + buildCounterPublishCustomConnectorVersionRequest--; +} + +core.int buildCounterPublishStatus = 0; +api.PublishStatus buildPublishStatus() { + final o = api.PublishStatus(); + buildCounterPublishStatus++; + if (buildCounterPublishStatus < 3) { + o.publishState = 'foo'; + o.publishTime = 'foo'; + o.publishedAs = 'foo'; + o.publishedSource = 'foo'; + } + buildCounterPublishStatus--; + return o; +} + +void checkPublishStatus(api.PublishStatus o) { + buildCounterPublishStatus++; + if (buildCounterPublishStatus < 3) { + unittest.expect( + o.publishState!, + unittest.equals('foo'), + ); + unittest.expect( + o.publishTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.publishedAs!, + unittest.equals('foo'), + ); + unittest.expect( + o.publishedSource!, + unittest.equals('foo'), + ); + } + buildCounterPublishStatus--; +} + core.int buildCounterRefreshConnectionSchemaMetadataRequest = 0; api.RefreshConnectionSchemaMetadataRequest buildRefreshConnectionSchemaMetadataRequest() { @@ -4850,12 +5101,12 @@ void checkRetryEventSubscriptionRequest(api.RetryEventSubscriptionRequest o) { buildCounterRetryEventSubscriptionRequest--; } -core.List buildUnnamed89() => [ +core.List buildUnnamed91() => [ 'foo', 'foo', ]; -void checkUnnamed89(core.List o) { +void checkUnnamed91(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4875,7 +5126,7 @@ api.RoleGrant buildRoleGrant() { o.helperTextTemplate = 'foo'; o.principal = 'foo'; o.resource = buildResource(); - o.roles = buildUnnamed89(); + o.roles = buildUnnamed91(); } buildCounterRoleGrant--; return o; @@ -4893,28 +5144,28 @@ void checkRoleGrant(api.RoleGrant o) { unittest.equals('foo'), ); checkResource(o.resource!); - checkUnnamed89(o.roles!); + checkUnnamed91(o.roles!); } buildCounterRoleGrant--; } -core.List buildUnnamed90() => [ +core.List buildUnnamed92() => [ buildInputParameter(), buildInputParameter(), ]; -void checkUnnamed90(core.List o) { +void checkUnnamed92(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInputParameter(o[0]); checkInputParameter(o[1]); } -core.List buildUnnamed91() => [ +core.List buildUnnamed93() => [ buildResultMetadata(), buildResultMetadata(), ]; -void checkUnnamed91(core.List o) { +void checkUnnamed93(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkResultMetadata(o[0]); checkResultMetadata(o[1]); @@ -4929,10 +5180,10 @@ api.RuntimeActionSchema buildRuntimeActionSchema() { o.description = 'foo'; o.displayName = 'foo'; o.inputJsonSchema = buildJsonSchema(); - o.inputParameters = buildUnnamed90(); + o.inputParameters = buildUnnamed92(); o.inputSchemaAsString = 'foo'; o.resultJsonSchema = buildJsonSchema(); - o.resultMetadata = buildUnnamed91(); + o.resultMetadata = buildUnnamed93(); o.resultSchemaAsString = 'foo'; } buildCounterRuntimeActionSchema--; @@ -4955,13 +5206,13 @@ void checkRuntimeActionSchema(api.RuntimeActionSchema o) { unittest.equals('foo'), ); checkJsonSchema(o.inputJsonSchema!); - checkUnnamed90(o.inputParameters!); + checkUnnamed92(o.inputParameters!); unittest.expect( o.inputSchemaAsString!, unittest.equals('foo'), ); checkJsonSchema(o.resultJsonSchema!); - checkUnnamed91(o.resultMetadata!); + checkUnnamed93(o.resultMetadata!); unittest.expect( o.resultSchemaAsString!, unittest.equals('foo'), @@ -5037,23 +5288,23 @@ void checkRuntimeConfig(api.RuntimeConfig o) { buildCounterRuntimeConfig--; } -core.List buildUnnamed92() => [ +core.List buildUnnamed94() => [ buildField(), buildField(), ]; -void checkUnnamed92(core.List o) { +void checkUnnamed94(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkField(o[0]); checkField(o[1]); } -core.List buildUnnamed93() => [ +core.List buildUnnamed95() => [ 'foo', 'foo', ]; -void checkUnnamed93(core.List o) { +void checkUnnamed95(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5071,9 +5322,9 @@ api.RuntimeEntitySchema buildRuntimeEntitySchema() { buildCounterRuntimeEntitySchema++; if (buildCounterRuntimeEntitySchema < 3) { o.entity = 'foo'; - o.fields = buildUnnamed92(); + o.fields = buildUnnamed94(); o.jsonSchema = buildJsonSchema(); - o.operations = buildUnnamed93(); + o.operations = buildUnnamed95(); } buildCounterRuntimeEntitySchema--; return o; @@ -5086,9 +5337,9 @@ void checkRuntimeEntitySchema(api.RuntimeEntitySchema o) { o.entity!, unittest.equals('foo'), ); - checkUnnamed92(o.fields!); + checkUnnamed94(o.fields!); checkJsonSchema(o.jsonSchema!); - checkUnnamed93(o.operations!); + checkUnnamed95(o.operations!); } buildCounterRuntimeEntitySchema--; } @@ -5137,23 +5388,23 @@ void checkSearchConnectionInstance(api.SearchConnectionInstance o) { buildCounterSearchConnectionInstance--; } -core.List buildUnnamed94() => [ +core.List buildUnnamed96() => [ buildSearchConnectionInstance(), buildSearchConnectionInstance(), ]; -void checkUnnamed94(core.List o) { +void checkUnnamed96(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSearchConnectionInstance(o[0]); checkSearchConnectionInstance(o[1]); } -core.List buildUnnamed95() => [ +core.List buildUnnamed97() => [ 'foo', 'foo', ]; -void checkUnnamed95(core.List o) { +void checkUnnamed97(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5170,9 +5421,9 @@ api.SearchConnectionsResponse buildSearchConnectionsResponse() { final o = api.SearchConnectionsResponse(); buildCounterSearchConnectionsResponse++; if (buildCounterSearchConnectionsResponse < 3) { - o.connections = buildUnnamed94(); + o.connections = buildUnnamed96(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed95(); + o.unreachable = buildUnnamed97(); } buildCounterSearchConnectionsResponse--; return o; @@ -5181,12 +5432,12 @@ api.SearchConnectionsResponse buildSearchConnectionsResponse() { void checkSearchConnectionsResponse(api.SearchConnectionsResponse o) { buildCounterSearchConnectionsResponse++; if (buildCounterSearchConnectionsResponse < 3) { - checkUnnamed94(o.connections!); + checkUnnamed96(o.connections!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed95(o.unreachable!); + checkUnnamed97(o.unreachable!); } buildCounterSearchConnectionsResponse--; } @@ -5326,12 +5577,12 @@ void checkSshPublicKey(api.SshPublicKey o) { buildCounterSshPublicKey--; } -core.List buildUnnamed96() => [ +core.List buildUnnamed98() => [ buildConfigVariable(), buildConfigVariable(), ]; -void checkUnnamed96(core.List o) { +void checkUnnamed98(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkConfigVariable(o[0]); checkConfigVariable(o[1]); @@ -5342,7 +5593,7 @@ api.SslConfig buildSslConfig() { final o = api.SslConfig(); buildCounterSslConfig++; if (buildCounterSslConfig < 3) { - o.additionalVariables = buildUnnamed96(); + o.additionalVariables = buildUnnamed98(); o.clientCertType = 'foo'; o.clientCertificate = buildSecret(); o.clientPrivateKey = buildSecret(); @@ -5360,7 +5611,7 @@ api.SslConfig buildSslConfig() { void checkSslConfig(api.SslConfig o) { buildCounterSslConfig++; if (buildCounterSslConfig < 3) { - checkUnnamed96(o.additionalVariables!); + checkUnnamed98(o.additionalVariables!); unittest.expect( o.clientCertType!, unittest.equals('foo'), @@ -5386,23 +5637,23 @@ void checkSslConfig(api.SslConfig o) { buildCounterSslConfig--; } -core.List buildUnnamed97() => [ +core.List buildUnnamed99() => [ buildConfigVariableTemplate(), buildConfigVariableTemplate(), ]; -void checkUnnamed97(core.List o) { +void checkUnnamed99(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkConfigVariableTemplate(o[0]); checkConfigVariableTemplate(o[1]); } -core.List buildUnnamed98() => [ +core.List buildUnnamed100() => [ 'foo', 'foo', ]; -void checkUnnamed98(core.List o) { +void checkUnnamed100(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5414,12 +5665,12 @@ void checkUnnamed98(core.List o) { ); } -core.List buildUnnamed99() => [ +core.List buildUnnamed101() => [ 'foo', 'foo', ]; -void checkUnnamed99(core.List o) { +void checkUnnamed101(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5436,10 +5687,10 @@ api.SslConfigTemplate buildSslConfigTemplate() { final o = api.SslConfigTemplate(); buildCounterSslConfigTemplate++; if (buildCounterSslConfigTemplate < 3) { - o.additionalVariables = buildUnnamed97(); - o.clientCertType = buildUnnamed98(); + o.additionalVariables = buildUnnamed99(); + o.clientCertType = buildUnnamed100(); o.isTlsMandatory = true; - o.serverCertType = buildUnnamed99(); + o.serverCertType = buildUnnamed101(); o.sslType = 'foo'; } buildCounterSslConfigTemplate--; @@ -5449,10 +5700,10 @@ api.SslConfigTemplate buildSslConfigTemplate() { void checkSslConfigTemplate(api.SslConfigTemplate o) { buildCounterSslConfigTemplate++; if (buildCounterSslConfigTemplate < 3) { - checkUnnamed97(o.additionalVariables!); - checkUnnamed98(o.clientCertType!); + checkUnnamed99(o.additionalVariables!); + checkUnnamed100(o.clientCertType!); unittest.expect(o.isTlsMandatory!, unittest.isTrue); - checkUnnamed99(o.serverCertType!); + checkUnnamed101(o.serverCertType!); unittest.expect( o.sslType!, unittest.equals('foo'), @@ -5505,7 +5756,7 @@ void checkStandardEntity(api.StandardEntity o) { buildCounterStandardEntity--; } -core.Map buildUnnamed100() => { +core.Map buildUnnamed102() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -5518,7 +5769,7 @@ core.Map buildUnnamed100() => { }, }; -void checkUnnamed100(core.Map o) { +void checkUnnamed102(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted17 = (o['x']!) as core.Map; unittest.expect(casted17, unittest.hasLength(3)); @@ -5550,15 +5801,15 @@ void checkUnnamed100(core.Map o) { ); } -core.List> buildUnnamed101() => [ - buildUnnamed100(), - buildUnnamed100(), +core.List> buildUnnamed103() => [ + buildUnnamed102(), + buildUnnamed102(), ]; -void checkUnnamed101(core.List> o) { +void checkUnnamed103(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed100(o[0]); - checkUnnamed100(o[1]); + checkUnnamed102(o[0]); + checkUnnamed102(o[1]); } core.int buildCounterStatus = 0; @@ -5567,7 +5818,7 @@ api.Status buildStatus() { buildCounterStatus++; if (buildCounterStatus < 3) { o.code = 42; - o.details = buildUnnamed101(); + o.details = buildUnnamed103(); o.message = 'foo'; } buildCounterStatus--; @@ -5581,7 +5832,7 @@ void checkStatus(api.Status o) { o.code!, unittest.equals(42), ); - checkUnnamed101(o.details!); + checkUnnamed103(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -5613,12 +5864,12 @@ void checkSupportedRuntimeFeatures(api.SupportedRuntimeFeatures o) { buildCounterSupportedRuntimeFeatures--; } -core.List buildUnnamed102() => [ +core.List buildUnnamed104() => [ 'foo', 'foo', ]; -void checkUnnamed102(core.List o) { +void checkUnnamed104(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5635,7 +5886,7 @@ api.TestIamPermissionsRequest buildTestIamPermissionsRequest() { final o = api.TestIamPermissionsRequest(); buildCounterTestIamPermissionsRequest++; if (buildCounterTestIamPermissionsRequest < 3) { - o.permissions = buildUnnamed102(); + o.permissions = buildUnnamed104(); } buildCounterTestIamPermissionsRequest--; return o; @@ -5644,17 +5895,17 @@ api.TestIamPermissionsRequest buildTestIamPermissionsRequest() { void checkTestIamPermissionsRequest(api.TestIamPermissionsRequest o) { buildCounterTestIamPermissionsRequest++; if (buildCounterTestIamPermissionsRequest < 3) { - checkUnnamed102(o.permissions!); + checkUnnamed104(o.permissions!); } buildCounterTestIamPermissionsRequest--; } -core.List buildUnnamed103() => [ +core.List buildUnnamed105() => [ 'foo', 'foo', ]; -void checkUnnamed103(core.List o) { +void checkUnnamed105(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5671,7 +5922,7 @@ api.TestIamPermissionsResponse buildTestIamPermissionsResponse() { final o = api.TestIamPermissionsResponse(); buildCounterTestIamPermissionsResponse++; if (buildCounterTestIamPermissionsResponse < 3) { - o.permissions = buildUnnamed103(); + o.permissions = buildUnnamed105(); } buildCounterTestIamPermissionsResponse--; return o; @@ -5680,7 +5931,7 @@ api.TestIamPermissionsResponse buildTestIamPermissionsResponse() { void checkTestIamPermissionsResponse(api.TestIamPermissionsResponse o) { buildCounterTestIamPermissionsResponse++; if (buildCounterTestIamPermissionsResponse < 3) { - checkUnnamed103(o.permissions!); + checkUnnamed105(o.permissions!); } buildCounterTestIamPermissionsResponse--; } @@ -5767,12 +6018,12 @@ void checkValidateCustomConnectorSpecResponse( buildCounterValidateCustomConnectorSpecResponse--; } -core.List buildUnnamed104() => [ +core.List buildUnnamed106() => [ buildConfigVariable(), buildConfigVariable(), ]; -void checkUnnamed104(core.List o) { +void checkUnnamed106(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkConfigVariable(o[0]); checkConfigVariable(o[1]); @@ -5783,7 +6034,7 @@ api.WebhookData buildWebhookData() { final o = api.WebhookData(); buildCounterWebhookData++; if (buildCounterWebhookData < 3) { - o.additionalVariables = buildUnnamed104(); + o.additionalVariables = buildUnnamed106(); o.createTime = 'foo'; o.id = 'foo'; o.name = 'foo'; @@ -5797,7 +6048,7 @@ api.WebhookData buildWebhookData() { void checkWebhookData(api.WebhookData o) { buildCounterWebhookData++; if (buildCounterWebhookData < 3) { - checkUnnamed104(o.additionalVariables!); + checkUnnamed106(o.additionalVariables!); unittest.expect( o.createTime!, unittest.equals('foo'), @@ -5822,6 +6073,53 @@ void checkWebhookData(api.WebhookData o) { buildCounterWebhookData--; } +core.List buildUnnamed107() => [ + buildWebhookData(), + buildWebhookData(), + ]; + +void checkUnnamed107(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkWebhookData(o[0]); + checkWebhookData(o[1]); +} + +core.int buildCounterWebhookSubscriptions = 0; +api.WebhookSubscriptions buildWebhookSubscriptions() { + final o = api.WebhookSubscriptions(); + buildCounterWebhookSubscriptions++; + if (buildCounterWebhookSubscriptions < 3) { + o.webhookData = buildUnnamed107(); + } + buildCounterWebhookSubscriptions--; + return o; +} + +void checkWebhookSubscriptions(api.WebhookSubscriptions o) { + buildCounterWebhookSubscriptions++; + if (buildCounterWebhookSubscriptions < 3) { + checkUnnamed107(o.webhookData!); + } + buildCounterWebhookSubscriptions--; +} + +core.int buildCounterWithdrawCustomConnectorVersionRequest = 0; +api.WithdrawCustomConnectorVersionRequest + buildWithdrawCustomConnectorVersionRequest() { + final o = api.WithdrawCustomConnectorVersionRequest(); + buildCounterWithdrawCustomConnectorVersionRequest++; + if (buildCounterWithdrawCustomConnectorVersionRequest < 3) {} + buildCounterWithdrawCustomConnectorVersionRequest--; + return o; +} + +void checkWithdrawCustomConnectorVersionRequest( + api.WithdrawCustomConnectorVersionRequest o) { + buildCounterWithdrawCustomConnectorVersionRequest++; + if (buildCounterWithdrawCustomConnectorVersionRequest < 3) {} + buildCounterWithdrawCustomConnectorVersionRequest--; +} + void main() { unittest.group('obj-schema-AuditConfig', () { unittest.test('to-json--from-json', () async { @@ -6333,6 +6631,16 @@ void main() { }); }); + unittest.group('obj-schema-GSUtil', () { + unittest.test('to-json--from-json', () async { + final o = buildGSUtil(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GSUtil.fromJson(oJson as core.Map); + checkGSUtil(od); + }); + }); + unittest.group('obj-schema-HPAConfig', () { unittest.test('to-json--from-json', () async { final o = buildHPAConfig(); @@ -6622,6 +6930,16 @@ void main() { }); }); + unittest.group('obj-schema-MarketplaceConnectorDetails', () { + unittest.test('to-json--from-json', () async { + final o = buildMarketplaceConnectorDetails(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.MarketplaceConnectorDetails.fromJson( + oJson as core.Map); + checkMarketplaceConnectorDetails(od); + }); + }); + unittest.group('obj-schema-MultipleSelectConfig', () { unittest.test('to-json--from-json', () async { final o = buildMultipleSelectConfig(); @@ -6712,6 +7030,16 @@ void main() { }); }); + unittest.group('obj-schema-PartnerMetadata', () { + unittest.test('to-json--from-json', () async { + final o = buildPartnerMetadata(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.PartnerMetadata.fromJson( + oJson as core.Map); + checkPartnerMetadata(od); + }); + }); + unittest.group('obj-schema-Policy', () { unittest.test('to-json--from-json', () async { final o = buildPolicy(); @@ -6732,6 +7060,26 @@ void main() { }); }); + unittest.group('obj-schema-PublishCustomConnectorVersionRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildPublishCustomConnectorVersionRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.PublishCustomConnectorVersionRequest.fromJson( + oJson as core.Map); + checkPublishCustomConnectorVersionRequest(od); + }); + }); + + unittest.group('obj-schema-PublishStatus', () { + unittest.test('to-json--from-json', () async { + final o = buildPublishStatus(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.PublishStatus.fromJson( + oJson as core.Map); + checkPublishStatus(od); + }); + }); + unittest.group('obj-schema-RefreshConnectionSchemaMetadataRequest', () { unittest.test('to-json--from-json', () async { final o = buildRefreshConnectionSchemaMetadataRequest(); @@ -7052,6 +7400,26 @@ void main() { }); }); + unittest.group('obj-schema-WebhookSubscriptions', () { + unittest.test('to-json--from-json', () async { + final o = buildWebhookSubscriptions(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.WebhookSubscriptions.fromJson( + oJson as core.Map); + checkWebhookSubscriptions(od); + }); + }); + + unittest.group('obj-schema-WithdrawCustomConnectorVersionRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildWithdrawCustomConnectorVersionRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.WithdrawCustomConnectorVersionRequest.fromJson( + oJson as core.Map); + checkWithdrawCustomConnectorVersionRequest(od); + }); + }); + unittest.group('resource-ProjectsLocationsResource', () { unittest.test('method--get', () async { final mock = HttpServerMock(); @@ -9177,6 +9545,130 @@ void main() { await res.deprecate(arg_request, arg_name, $fields: arg_$fields); checkOperation(response as api.Operation); }); + + unittest.test('method--publish', () async { + final mock = HttpServerMock(); + final res = api.ConnectorsApi(mock) + .projects + .locations + .customConnectors + .customConnectorVersions; + final arg_request = buildPublishCustomConnectorVersionRequest(); + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.PublishCustomConnectorVersionRequest.fromJson( + json as core.Map); + checkPublishCustomConnectorVersionRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.publish(arg_request, arg_name, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--withdraw', () async { + final mock = HttpServerMock(); + final res = api.ConnectorsApi(mock) + .projects + .locations + .customConnectors + .customConnectorVersions; + final arg_request = buildWithdrawCustomConnectorVersionRequest(); + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.WithdrawCustomConnectorVersionRequest.fromJson( + json as core.Map); + checkWithdrawCustomConnectorVersionRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.withdraw(arg_request, arg_name, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); }); unittest.group('resource-ProjectsLocationsEndpointAttachmentsResource', () { diff --git a/generated/googleapis/test/contactcenterinsights/v1_test.dart b/generated/googleapis/test/contactcenterinsights/v1_test.dart index 521efaa4d..44d1b65f0 100644 --- a/generated/googleapis/test/contactcenterinsights/v1_test.dart +++ b/generated/googleapis/test/contactcenterinsights/v1_test.dart @@ -1523,6 +1523,23 @@ void checkGoogleCloudContactcenterinsightsV1ConversationQualityMetadata( buildCounterGoogleCloudContactcenterinsightsV1ConversationQualityMetadata--; } +core.List buildUnnamed20() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed20(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + core.int buildCounterGoogleCloudContactcenterinsightsV1ConversationQualityMetadataAgentInfo = 0; @@ -1534,9 +1551,12 @@ api.GoogleCloudContactcenterinsightsV1ConversationQualityMetadataAgentInfo if (buildCounterGoogleCloudContactcenterinsightsV1ConversationQualityMetadataAgentInfo < 3) { o.agentId = 'foo'; + o.agentType = 'foo'; o.displayName = 'foo'; o.dispositionCode = 'foo'; + o.location = 'foo'; o.team = 'foo'; + o.teams = buildUnnamed20(); } buildCounterGoogleCloudContactcenterinsightsV1ConversationQualityMetadataAgentInfo--; return o; @@ -1552,6 +1572,10 @@ void checkGoogleCloudContactcenterinsightsV1ConversationQualityMetadataAgentInfo o.agentId!, unittest.equals('foo'), ); + unittest.expect( + o.agentType!, + unittest.equals('foo'), + ); unittest.expect( o.displayName!, unittest.equals('foo'), @@ -1560,20 +1584,25 @@ void checkGoogleCloudContactcenterinsightsV1ConversationQualityMetadataAgentInfo o.dispositionCode!, unittest.equals('foo'), ); + unittest.expect( + o.location!, + unittest.equals('foo'), + ); unittest.expect( o.team!, unittest.equals('foo'), ); + checkUnnamed20(o.teams!); } buildCounterGoogleCloudContactcenterinsightsV1ConversationQualityMetadataAgentInfo--; } -core.Map buildUnnamed20() => { +core.Map buildUnnamed21() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed20(core.Map o) { +void checkUnnamed21(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1585,12 +1614,12 @@ void checkUnnamed20(core.Map o) { ); } -core.Map buildUnnamed21() => { +core.Map buildUnnamed22() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed21(core.Map o) { +void checkUnnamed22(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1615,9 +1644,9 @@ api.GoogleCloudContactcenterinsightsV1ConversationSummarizationSuggestionData o.answerRecord = 'foo'; o.confidence = 42.0; o.conversationModel = 'foo'; - o.metadata = buildUnnamed20(); + o.metadata = buildUnnamed21(); o.text = 'foo'; - o.textSections = buildUnnamed21(); + o.textSections = buildUnnamed22(); } buildCounterGoogleCloudContactcenterinsightsV1ConversationSummarizationSuggestionData--; return o; @@ -1641,12 +1670,12 @@ void checkGoogleCloudContactcenterinsightsV1ConversationSummarizationSuggestionD o.conversationModel!, unittest.equals('foo'), ); - checkUnnamed20(o.metadata!); + checkUnnamed21(o.metadata!); unittest.expect( o.text!, unittest.equals('foo'), ); - checkUnnamed21(o.textSections!); + checkUnnamed22(o.textSections!); } buildCounterGoogleCloudContactcenterinsightsV1ConversationSummarizationSuggestionData--; } @@ -1654,12 +1683,12 @@ void checkGoogleCloudContactcenterinsightsV1ConversationSummarizationSuggestionD core.List< api .GoogleCloudContactcenterinsightsV1ConversationTranscriptTranscriptSegment> - buildUnnamed22() => [ + buildUnnamed23() => [ buildGoogleCloudContactcenterinsightsV1ConversationTranscriptTranscriptSegment(), buildGoogleCloudContactcenterinsightsV1ConversationTranscriptTranscriptSegment(), ]; -void checkUnnamed22( +void checkUnnamed23( core.List< api .GoogleCloudContactcenterinsightsV1ConversationTranscriptTranscriptSegment> @@ -1679,7 +1708,7 @@ api.GoogleCloudContactcenterinsightsV1ConversationTranscript buildCounterGoogleCloudContactcenterinsightsV1ConversationTranscript++; if (buildCounterGoogleCloudContactcenterinsightsV1ConversationTranscript < 3) { - o.transcriptSegments = buildUnnamed22(); + o.transcriptSegments = buildUnnamed23(); } buildCounterGoogleCloudContactcenterinsightsV1ConversationTranscript--; return o; @@ -1690,7 +1719,7 @@ void checkGoogleCloudContactcenterinsightsV1ConversationTranscript( buildCounterGoogleCloudContactcenterinsightsV1ConversationTranscript++; if (buildCounterGoogleCloudContactcenterinsightsV1ConversationTranscript < 3) { - checkUnnamed22(o.transcriptSegments!); + checkUnnamed23(o.transcriptSegments!); } buildCounterGoogleCloudContactcenterinsightsV1ConversationTranscript--; } @@ -1698,12 +1727,12 @@ void checkGoogleCloudContactcenterinsightsV1ConversationTranscript( core.List< api .GoogleCloudContactcenterinsightsV1ConversationTranscriptTranscriptSegmentWordInfo> - buildUnnamed23() => [ + buildUnnamed24() => [ buildGoogleCloudContactcenterinsightsV1ConversationTranscriptTranscriptSegmentWordInfo(), buildGoogleCloudContactcenterinsightsV1ConversationTranscriptTranscriptSegmentWordInfo(), ]; -void checkUnnamed23( +void checkUnnamed24( core.List< api .GoogleCloudContactcenterinsightsV1ConversationTranscriptTranscriptSegmentWordInfo> @@ -1735,7 +1764,7 @@ api.GoogleCloudContactcenterinsightsV1ConversationTranscriptTranscriptSegment buildGoogleCloudContactcenterinsightsV1ConversationParticipant(); o.sentiment = buildGoogleCloudContactcenterinsightsV1SentimentData(); o.text = 'foo'; - o.words = buildUnnamed23(); + o.words = buildUnnamed24(); } buildCounterGoogleCloudContactcenterinsightsV1ConversationTranscriptTranscriptSegment--; return o; @@ -1772,7 +1801,7 @@ void checkGoogleCloudContactcenterinsightsV1ConversationTranscriptTranscriptSegm o.text!, unittest.equals('foo'), ); - checkUnnamed23(o.words!); + checkUnnamed24(o.words!); } buildCounterGoogleCloudContactcenterinsightsV1ConversationTranscriptTranscriptSegment--; } @@ -2215,12 +2244,12 @@ void checkGoogleCloudContactcenterinsightsV1EncryptionSpec( buildCounterGoogleCloudContactcenterinsightsV1EncryptionSpec--; } -core.Map buildUnnamed24() => { +core.Map buildUnnamed25() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed24(core.Map o) { +void checkUnnamed25(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2239,7 +2268,7 @@ api.GoogleCloudContactcenterinsightsV1Entity buildCounterGoogleCloudContactcenterinsightsV1Entity++; if (buildCounterGoogleCloudContactcenterinsightsV1Entity < 3) { o.displayName = 'foo'; - o.metadata = buildUnnamed24(); + o.metadata = buildUnnamed25(); o.salience = 42.0; o.sentiment = buildGoogleCloudContactcenterinsightsV1SentimentData(); o.type = 'foo'; @@ -2256,7 +2285,7 @@ void checkGoogleCloudContactcenterinsightsV1Entity( o.displayName!, unittest.equals('foo'), ); - checkUnnamed24(o.metadata!); + checkUnnamed25(o.metadata!); unittest.expect( o.salience!, unittest.equals(42.0), @@ -2469,12 +2498,12 @@ void checkGoogleCloudContactcenterinsightsV1ExportIssueModelRequestGcsDestinatio buildCounterGoogleCloudContactcenterinsightsV1ExportIssueModelRequestGcsDestination--; } -core.Map buildUnnamed25() => { +core.Map buildUnnamed26() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed25(core.Map o) { +void checkUnnamed26(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2494,7 +2523,7 @@ api.GoogleCloudContactcenterinsightsV1FaqAnswerData if (buildCounterGoogleCloudContactcenterinsightsV1FaqAnswerData < 3) { o.answer = 'foo'; o.confidenceScore = 42.0; - o.metadata = buildUnnamed25(); + o.metadata = buildUnnamed26(); o.queryRecord = 'foo'; o.question = 'foo'; o.source = 'foo'; @@ -2515,7 +2544,7 @@ void checkGoogleCloudContactcenterinsightsV1FaqAnswerData( o.confidenceScore!, unittest.equals(42.0), ); - checkUnnamed25(o.metadata!); + checkUnnamed26(o.metadata!); unittest.expect( o.queryRecord!, unittest.equals('foo'), @@ -2778,12 +2807,12 @@ void checkGoogleCloudContactcenterinsightsV1IngestConversationsRequestConversati buildCounterGoogleCloudContactcenterinsightsV1IngestConversationsRequestConversationConfig--; } -core.List buildUnnamed26() => [ +core.List buildUnnamed27() => [ 'foo', 'foo', ]; -void checkUnnamed26(core.List o) { +void checkUnnamed27(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2807,7 +2836,7 @@ api.GoogleCloudContactcenterinsightsV1IngestConversationsRequestGcsSource 3) { o.bucketObjectType = 'foo'; o.bucketUri = 'foo'; - o.customMetadataKeys = buildUnnamed26(); + o.customMetadataKeys = buildUnnamed27(); o.metadataBucketUri = 'foo'; } buildCounterGoogleCloudContactcenterinsightsV1IngestConversationsRequestGcsSource--; @@ -2828,7 +2857,7 @@ void checkGoogleCloudContactcenterinsightsV1IngestConversationsRequestGcsSource( o.bucketUri!, unittest.equals('foo'), ); - checkUnnamed26(o.customMetadataKeys!); + checkUnnamed27(o.customMetadataKeys!); unittest.expect( o.metadataBucketUri!, unittest.equals('foo'), @@ -2963,12 +2992,12 @@ void checkGoogleCloudContactcenterinsightsV1InterruptionData( buildCounterGoogleCloudContactcenterinsightsV1InterruptionData--; } -core.List buildUnnamed27() => [ +core.List buildUnnamed28() => [ 'foo', 'foo', ]; -void checkUnnamed27(core.List o) { +void checkUnnamed28(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2990,7 +3019,7 @@ api.GoogleCloudContactcenterinsightsV1Issue o.displayDescription = 'foo'; o.displayName = 'foo'; o.name = 'foo'; - o.sampleUtterances = buildUnnamed27(); + o.sampleUtterances = buildUnnamed28(); o.updateTime = 'foo'; } buildCounterGoogleCloudContactcenterinsightsV1Issue--; @@ -3017,7 +3046,7 @@ void checkGoogleCloudContactcenterinsightsV1Issue( o.name!, unittest.equals('foo'), ); - checkUnnamed27(o.sampleUtterances!); + checkUnnamed28(o.sampleUtterances!); unittest.expect( o.updateTime!, unittest.equals('foo'), @@ -3188,14 +3217,14 @@ void checkGoogleCloudContactcenterinsightsV1IssueModelInputDataConfig( core.Map - buildUnnamed28() => { + buildUnnamed29() => { 'x': buildGoogleCloudContactcenterinsightsV1IssueModelLabelStatsIssueStats(), 'y': buildGoogleCloudContactcenterinsightsV1IssueModelLabelStatsIssueStats(), }; -void checkUnnamed28( +void checkUnnamed29( core.Map< core.String, api @@ -3215,7 +3244,7 @@ api.GoogleCloudContactcenterinsightsV1IssueModelLabelStats buildCounterGoogleCloudContactcenterinsightsV1IssueModelLabelStats++; if (buildCounterGoogleCloudContactcenterinsightsV1IssueModelLabelStats < 3) { o.analyzedConversationsCount = 'foo'; - o.issueStats = buildUnnamed28(); + o.issueStats = buildUnnamed29(); o.unclassifiedConversationsCount = 'foo'; } buildCounterGoogleCloudContactcenterinsightsV1IssueModelLabelStats--; @@ -3230,7 +3259,7 @@ void checkGoogleCloudContactcenterinsightsV1IssueModelLabelStats( o.analyzedConversationsCount!, unittest.equals('foo'), ); - checkUnnamed28(o.issueStats!); + checkUnnamed29(o.issueStats!); unittest.expect( o.unclassifiedConversationsCount!, unittest.equals('foo'), @@ -3279,12 +3308,12 @@ void checkGoogleCloudContactcenterinsightsV1IssueModelLabelStatsIssueStats( } core.List - buildUnnamed29() => [ + buildUnnamed30() => [ buildGoogleCloudContactcenterinsightsV1IssueAssignment(), buildGoogleCloudContactcenterinsightsV1IssueAssignment(), ]; -void checkUnnamed29( +void checkUnnamed30( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudContactcenterinsightsV1IssueAssignment(o[0]); @@ -3298,7 +3327,7 @@ api.GoogleCloudContactcenterinsightsV1IssueModelResult buildCounterGoogleCloudContactcenterinsightsV1IssueModelResult++; if (buildCounterGoogleCloudContactcenterinsightsV1IssueModelResult < 3) { o.issueModel = 'foo'; - o.issues = buildUnnamed29(); + o.issues = buildUnnamed30(); } buildCounterGoogleCloudContactcenterinsightsV1IssueModelResult--; return o; @@ -3312,18 +3341,18 @@ void checkGoogleCloudContactcenterinsightsV1IssueModelResult( o.issueModel!, unittest.equals('foo'), ); - checkUnnamed29(o.issues!); + checkUnnamed30(o.issues!); } buildCounterGoogleCloudContactcenterinsightsV1IssueModelResult--; } core.List - buildUnnamed30() => [ + buildUnnamed31() => [ buildGoogleCloudContactcenterinsightsV1FeedbackLabel(), buildGoogleCloudContactcenterinsightsV1FeedbackLabel(), ]; -void checkUnnamed30( +void checkUnnamed31( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudContactcenterinsightsV1FeedbackLabel(o[0]); @@ -3340,7 +3369,7 @@ api.GoogleCloudContactcenterinsightsV1ListAllFeedbackLabelsResponse buildCounterGoogleCloudContactcenterinsightsV1ListAllFeedbackLabelsResponse++; if (buildCounterGoogleCloudContactcenterinsightsV1ListAllFeedbackLabelsResponse < 3) { - o.feedbackLabels = buildUnnamed30(); + o.feedbackLabels = buildUnnamed31(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudContactcenterinsightsV1ListAllFeedbackLabelsResponse--; @@ -3352,7 +3381,7 @@ void checkGoogleCloudContactcenterinsightsV1ListAllFeedbackLabelsResponse( buildCounterGoogleCloudContactcenterinsightsV1ListAllFeedbackLabelsResponse++; if (buildCounterGoogleCloudContactcenterinsightsV1ListAllFeedbackLabelsResponse < 3) { - checkUnnamed30(o.feedbackLabels!); + checkUnnamed31(o.feedbackLabels!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -3361,12 +3390,12 @@ void checkGoogleCloudContactcenterinsightsV1ListAllFeedbackLabelsResponse( buildCounterGoogleCloudContactcenterinsightsV1ListAllFeedbackLabelsResponse--; } -core.List buildUnnamed31() => [ +core.List buildUnnamed32() => [ buildGoogleCloudContactcenterinsightsV1Analysis(), buildGoogleCloudContactcenterinsightsV1Analysis(), ]; -void checkUnnamed31( +void checkUnnamed32( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudContactcenterinsightsV1Analysis(o[0]); @@ -3379,7 +3408,7 @@ api.GoogleCloudContactcenterinsightsV1ListAnalysesResponse final o = api.GoogleCloudContactcenterinsightsV1ListAnalysesResponse(); buildCounterGoogleCloudContactcenterinsightsV1ListAnalysesResponse++; if (buildCounterGoogleCloudContactcenterinsightsV1ListAnalysesResponse < 3) { - o.analyses = buildUnnamed31(); + o.analyses = buildUnnamed32(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudContactcenterinsightsV1ListAnalysesResponse--; @@ -3390,7 +3419,7 @@ void checkGoogleCloudContactcenterinsightsV1ListAnalysesResponse( api.GoogleCloudContactcenterinsightsV1ListAnalysesResponse o) { buildCounterGoogleCloudContactcenterinsightsV1ListAnalysesResponse++; if (buildCounterGoogleCloudContactcenterinsightsV1ListAnalysesResponse < 3) { - checkUnnamed31(o.analyses!); + checkUnnamed32(o.analyses!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -3400,12 +3429,12 @@ void checkGoogleCloudContactcenterinsightsV1ListAnalysesResponse( } core.List - buildUnnamed32() => [ + buildUnnamed33() => [ buildGoogleCloudContactcenterinsightsV1AnalysisRule(), buildGoogleCloudContactcenterinsightsV1AnalysisRule(), ]; -void checkUnnamed32( +void checkUnnamed33( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudContactcenterinsightsV1AnalysisRule(o[0]); @@ -3420,7 +3449,7 @@ api.GoogleCloudContactcenterinsightsV1ListAnalysisRulesResponse buildCounterGoogleCloudContactcenterinsightsV1ListAnalysisRulesResponse++; if (buildCounterGoogleCloudContactcenterinsightsV1ListAnalysisRulesResponse < 3) { - o.analysisRules = buildUnnamed32(); + o.analysisRules = buildUnnamed33(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudContactcenterinsightsV1ListAnalysisRulesResponse--; @@ -3432,7 +3461,7 @@ void checkGoogleCloudContactcenterinsightsV1ListAnalysisRulesResponse( buildCounterGoogleCloudContactcenterinsightsV1ListAnalysisRulesResponse++; if (buildCounterGoogleCloudContactcenterinsightsV1ListAnalysisRulesResponse < 3) { - checkUnnamed32(o.analysisRules!); + checkUnnamed33(o.analysisRules!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -3442,12 +3471,12 @@ void checkGoogleCloudContactcenterinsightsV1ListAnalysisRulesResponse( } core.List - buildUnnamed33() => [ + buildUnnamed34() => [ buildGoogleCloudContactcenterinsightsV1Conversation(), buildGoogleCloudContactcenterinsightsV1Conversation(), ]; -void checkUnnamed33( +void checkUnnamed34( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudContactcenterinsightsV1Conversation(o[0]); @@ -3462,7 +3491,7 @@ api.GoogleCloudContactcenterinsightsV1ListConversationsResponse buildCounterGoogleCloudContactcenterinsightsV1ListConversationsResponse++; if (buildCounterGoogleCloudContactcenterinsightsV1ListConversationsResponse < 3) { - o.conversations = buildUnnamed33(); + o.conversations = buildUnnamed34(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudContactcenterinsightsV1ListConversationsResponse--; @@ -3474,7 +3503,7 @@ void checkGoogleCloudContactcenterinsightsV1ListConversationsResponse( buildCounterGoogleCloudContactcenterinsightsV1ListConversationsResponse++; if (buildCounterGoogleCloudContactcenterinsightsV1ListConversationsResponse < 3) { - checkUnnamed33(o.conversations!); + checkUnnamed34(o.conversations!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -3484,12 +3513,12 @@ void checkGoogleCloudContactcenterinsightsV1ListConversationsResponse( } core.List - buildUnnamed34() => [ + buildUnnamed35() => [ buildGoogleCloudContactcenterinsightsV1FeedbackLabel(), buildGoogleCloudContactcenterinsightsV1FeedbackLabel(), ]; -void checkUnnamed34( +void checkUnnamed35( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudContactcenterinsightsV1FeedbackLabel(o[0]); @@ -3505,7 +3534,7 @@ api.GoogleCloudContactcenterinsightsV1ListFeedbackLabelsResponse buildCounterGoogleCloudContactcenterinsightsV1ListFeedbackLabelsResponse++; if (buildCounterGoogleCloudContactcenterinsightsV1ListFeedbackLabelsResponse < 3) { - o.feedbackLabels = buildUnnamed34(); + o.feedbackLabels = buildUnnamed35(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudContactcenterinsightsV1ListFeedbackLabelsResponse--; @@ -3517,7 +3546,7 @@ void checkGoogleCloudContactcenterinsightsV1ListFeedbackLabelsResponse( buildCounterGoogleCloudContactcenterinsightsV1ListFeedbackLabelsResponse++; if (buildCounterGoogleCloudContactcenterinsightsV1ListFeedbackLabelsResponse < 3) { - checkUnnamed34(o.feedbackLabels!); + checkUnnamed35(o.feedbackLabels!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -3526,13 +3555,13 @@ void checkGoogleCloudContactcenterinsightsV1ListFeedbackLabelsResponse( buildCounterGoogleCloudContactcenterinsightsV1ListFeedbackLabelsResponse--; } -core.List buildUnnamed35() => +core.List buildUnnamed36() => [ buildGoogleCloudContactcenterinsightsV1IssueModel(), buildGoogleCloudContactcenterinsightsV1IssueModel(), ]; -void checkUnnamed35( +void checkUnnamed36( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudContactcenterinsightsV1IssueModel(o[0]); @@ -3547,7 +3576,7 @@ api.GoogleCloudContactcenterinsightsV1ListIssueModelsResponse buildCounterGoogleCloudContactcenterinsightsV1ListIssueModelsResponse++; if (buildCounterGoogleCloudContactcenterinsightsV1ListIssueModelsResponse < 3) { - o.issueModels = buildUnnamed35(); + o.issueModels = buildUnnamed36(); } buildCounterGoogleCloudContactcenterinsightsV1ListIssueModelsResponse--; return o; @@ -3558,17 +3587,17 @@ void checkGoogleCloudContactcenterinsightsV1ListIssueModelsResponse( buildCounterGoogleCloudContactcenterinsightsV1ListIssueModelsResponse++; if (buildCounterGoogleCloudContactcenterinsightsV1ListIssueModelsResponse < 3) { - checkUnnamed35(o.issueModels!); + checkUnnamed36(o.issueModels!); } buildCounterGoogleCloudContactcenterinsightsV1ListIssueModelsResponse--; } -core.List buildUnnamed36() => [ +core.List buildUnnamed37() => [ buildGoogleCloudContactcenterinsightsV1Issue(), buildGoogleCloudContactcenterinsightsV1Issue(), ]; -void checkUnnamed36(core.List o) { +void checkUnnamed37(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudContactcenterinsightsV1Issue(o[0]); checkGoogleCloudContactcenterinsightsV1Issue(o[1]); @@ -3580,7 +3609,7 @@ api.GoogleCloudContactcenterinsightsV1ListIssuesResponse final o = api.GoogleCloudContactcenterinsightsV1ListIssuesResponse(); buildCounterGoogleCloudContactcenterinsightsV1ListIssuesResponse++; if (buildCounterGoogleCloudContactcenterinsightsV1ListIssuesResponse < 3) { - o.issues = buildUnnamed36(); + o.issues = buildUnnamed37(); } buildCounterGoogleCloudContactcenterinsightsV1ListIssuesResponse--; return o; @@ -3590,18 +3619,18 @@ void checkGoogleCloudContactcenterinsightsV1ListIssuesResponse( api.GoogleCloudContactcenterinsightsV1ListIssuesResponse o) { buildCounterGoogleCloudContactcenterinsightsV1ListIssuesResponse++; if (buildCounterGoogleCloudContactcenterinsightsV1ListIssuesResponse < 3) { - checkUnnamed36(o.issues!); + checkUnnamed37(o.issues!); } buildCounterGoogleCloudContactcenterinsightsV1ListIssuesResponse--; } core.List - buildUnnamed37() => [ + buildUnnamed38() => [ buildGoogleCloudContactcenterinsightsV1PhraseMatcher(), buildGoogleCloudContactcenterinsightsV1PhraseMatcher(), ]; -void checkUnnamed37( +void checkUnnamed38( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudContactcenterinsightsV1PhraseMatcher(o[0]); @@ -3618,7 +3647,7 @@ api.GoogleCloudContactcenterinsightsV1ListPhraseMatchersResponse if (buildCounterGoogleCloudContactcenterinsightsV1ListPhraseMatchersResponse < 3) { o.nextPageToken = 'foo'; - o.phraseMatchers = buildUnnamed37(); + o.phraseMatchers = buildUnnamed38(); } buildCounterGoogleCloudContactcenterinsightsV1ListPhraseMatchersResponse--; return o; @@ -3633,18 +3662,18 @@ void checkGoogleCloudContactcenterinsightsV1ListPhraseMatchersResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed37(o.phraseMatchers!); + checkUnnamed38(o.phraseMatchers!); } buildCounterGoogleCloudContactcenterinsightsV1ListPhraseMatchersResponse--; } -core.List buildUnnamed38() => +core.List buildUnnamed39() => [ buildGoogleCloudContactcenterinsightsV1QaQuestion(), buildGoogleCloudContactcenterinsightsV1QaQuestion(), ]; -void checkUnnamed38( +void checkUnnamed39( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudContactcenterinsightsV1QaQuestion(o[0]); @@ -3660,7 +3689,7 @@ api.GoogleCloudContactcenterinsightsV1ListQaQuestionsResponse if (buildCounterGoogleCloudContactcenterinsightsV1ListQaQuestionsResponse < 3) { o.nextPageToken = 'foo'; - o.qaQuestions = buildUnnamed38(); + o.qaQuestions = buildUnnamed39(); } buildCounterGoogleCloudContactcenterinsightsV1ListQaQuestionsResponse--; return o; @@ -3675,18 +3704,18 @@ void checkGoogleCloudContactcenterinsightsV1ListQaQuestionsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed38(o.qaQuestions!); + checkUnnamed39(o.qaQuestions!); } buildCounterGoogleCloudContactcenterinsightsV1ListQaQuestionsResponse--; } core.List - buildUnnamed39() => [ + buildUnnamed40() => [ buildGoogleCloudContactcenterinsightsV1QaScorecardRevision(), buildGoogleCloudContactcenterinsightsV1QaScorecardRevision(), ]; -void checkUnnamed39( +void checkUnnamed40( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudContactcenterinsightsV1QaScorecardRevision(o[0]); @@ -3704,7 +3733,7 @@ api.GoogleCloudContactcenterinsightsV1ListQaScorecardRevisionsResponse if (buildCounterGoogleCloudContactcenterinsightsV1ListQaScorecardRevisionsResponse < 3) { o.nextPageToken = 'foo'; - o.qaScorecardRevisions = buildUnnamed39(); + o.qaScorecardRevisions = buildUnnamed40(); } buildCounterGoogleCloudContactcenterinsightsV1ListQaScorecardRevisionsResponse--; return o; @@ -3719,18 +3748,18 @@ void checkGoogleCloudContactcenterinsightsV1ListQaScorecardRevisionsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed39(o.qaScorecardRevisions!); + checkUnnamed40(o.qaScorecardRevisions!); } buildCounterGoogleCloudContactcenterinsightsV1ListQaScorecardRevisionsResponse--; } -core.List buildUnnamed40() => +core.List buildUnnamed41() => [ buildGoogleCloudContactcenterinsightsV1QaScorecard(), buildGoogleCloudContactcenterinsightsV1QaScorecard(), ]; -void checkUnnamed40( +void checkUnnamed41( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudContactcenterinsightsV1QaScorecard(o[0]); @@ -3746,7 +3775,7 @@ api.GoogleCloudContactcenterinsightsV1ListQaScorecardsResponse if (buildCounterGoogleCloudContactcenterinsightsV1ListQaScorecardsResponse < 3) { o.nextPageToken = 'foo'; - o.qaScorecards = buildUnnamed40(); + o.qaScorecards = buildUnnamed41(); } buildCounterGoogleCloudContactcenterinsightsV1ListQaScorecardsResponse--; return o; @@ -3761,17 +3790,17 @@ void checkGoogleCloudContactcenterinsightsV1ListQaScorecardsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed40(o.qaScorecards!); + checkUnnamed41(o.qaScorecards!); } buildCounterGoogleCloudContactcenterinsightsV1ListQaScorecardsResponse--; } -core.List buildUnnamed41() => [ +core.List buildUnnamed42() => [ buildGoogleCloudContactcenterinsightsV1View(), buildGoogleCloudContactcenterinsightsV1View(), ]; -void checkUnnamed41(core.List o) { +void checkUnnamed42(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudContactcenterinsightsV1View(o[0]); checkGoogleCloudContactcenterinsightsV1View(o[1]); @@ -3784,7 +3813,7 @@ api.GoogleCloudContactcenterinsightsV1ListViewsResponse buildCounterGoogleCloudContactcenterinsightsV1ListViewsResponse++; if (buildCounterGoogleCloudContactcenterinsightsV1ListViewsResponse < 3) { o.nextPageToken = 'foo'; - o.views = buildUnnamed41(); + o.views = buildUnnamed42(); } buildCounterGoogleCloudContactcenterinsightsV1ListViewsResponse--; return o; @@ -3798,7 +3827,7 @@ void checkGoogleCloudContactcenterinsightsV1ListViewsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed41(o.views!); + checkUnnamed42(o.views!); } buildCounterGoogleCloudContactcenterinsightsV1ListViewsResponse--; } @@ -3885,12 +3914,12 @@ void checkGoogleCloudContactcenterinsightsV1PhraseMatchRuleConfig( } core.List - buildUnnamed42() => [ + buildUnnamed43() => [ buildGoogleCloudContactcenterinsightsV1PhraseMatchRule(), buildGoogleCloudContactcenterinsightsV1PhraseMatchRule(), ]; -void checkUnnamed42( +void checkUnnamed43( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudContactcenterinsightsV1PhraseMatchRule(o[0]); @@ -3903,7 +3932,7 @@ api.GoogleCloudContactcenterinsightsV1PhraseMatchRuleGroup final o = api.GoogleCloudContactcenterinsightsV1PhraseMatchRuleGroup(); buildCounterGoogleCloudContactcenterinsightsV1PhraseMatchRuleGroup++; if (buildCounterGoogleCloudContactcenterinsightsV1PhraseMatchRuleGroup < 3) { - o.phraseMatchRules = buildUnnamed42(); + o.phraseMatchRules = buildUnnamed43(); o.type = 'foo'; } buildCounterGoogleCloudContactcenterinsightsV1PhraseMatchRuleGroup--; @@ -3914,7 +3943,7 @@ void checkGoogleCloudContactcenterinsightsV1PhraseMatchRuleGroup( api.GoogleCloudContactcenterinsightsV1PhraseMatchRuleGroup o) { buildCounterGoogleCloudContactcenterinsightsV1PhraseMatchRuleGroup++; if (buildCounterGoogleCloudContactcenterinsightsV1PhraseMatchRuleGroup < 3) { - checkUnnamed42(o.phraseMatchRules!); + checkUnnamed43(o.phraseMatchRules!); unittest.expect( o.type!, unittest.equals('foo'), @@ -3924,12 +3953,12 @@ void checkGoogleCloudContactcenterinsightsV1PhraseMatchRuleGroup( } core.List - buildUnnamed43() => [ + buildUnnamed44() => [ buildGoogleCloudContactcenterinsightsV1PhraseMatchRuleGroup(), buildGoogleCloudContactcenterinsightsV1PhraseMatchRuleGroup(), ]; -void checkUnnamed43( +void checkUnnamed44( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudContactcenterinsightsV1PhraseMatchRuleGroup(o[0]); @@ -3946,7 +3975,7 @@ api.GoogleCloudContactcenterinsightsV1PhraseMatcher o.active = true; o.displayName = 'foo'; o.name = 'foo'; - o.phraseMatchRuleGroups = buildUnnamed43(); + o.phraseMatchRuleGroups = buildUnnamed44(); o.revisionCreateTime = 'foo'; o.revisionId = 'foo'; o.roleMatch = 'foo'; @@ -3975,7 +4004,7 @@ void checkGoogleCloudContactcenterinsightsV1PhraseMatcher( o.name!, unittest.equals('foo'), ); - checkUnnamed43(o.phraseMatchRuleGroups!); + checkUnnamed44(o.phraseMatchRuleGroups!); unittest.expect( o.revisionCreateTime!, unittest.equals('foo'), @@ -4005,24 +4034,24 @@ void checkGoogleCloudContactcenterinsightsV1PhraseMatcher( } core.List - buildUnnamed44() => [ + buildUnnamed45() => [ buildGoogleCloudContactcenterinsightsV1QaAnswerAnswerSource(), buildGoogleCloudContactcenterinsightsV1QaAnswerAnswerSource(), ]; -void checkUnnamed44( +void checkUnnamed45( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudContactcenterinsightsV1QaAnswerAnswerSource(o[0]); checkGoogleCloudContactcenterinsightsV1QaAnswerAnswerSource(o[1]); } -core.List buildUnnamed45() => [ +core.List buildUnnamed46() => [ 'foo', 'foo', ]; -void checkUnnamed45(core.List o) { +void checkUnnamed46(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4040,14 +4069,13 @@ api.GoogleCloudContactcenterinsightsV1QaAnswer final o = api.GoogleCloudContactcenterinsightsV1QaAnswer(); buildCounterGoogleCloudContactcenterinsightsV1QaAnswer++; if (buildCounterGoogleCloudContactcenterinsightsV1QaAnswer < 3) { - o.answerSources = buildUnnamed44(); + o.answerSources = buildUnnamed45(); o.answerValue = buildGoogleCloudContactcenterinsightsV1QaAnswerAnswerValue(); o.conversation = 'foo'; - o.potentialScore = 42.0; o.qaQuestion = 'foo'; o.questionBody = 'foo'; - o.tags = buildUnnamed45(); + o.tags = buildUnnamed46(); } buildCounterGoogleCloudContactcenterinsightsV1QaAnswer--; return o; @@ -4057,16 +4085,12 @@ void checkGoogleCloudContactcenterinsightsV1QaAnswer( api.GoogleCloudContactcenterinsightsV1QaAnswer o) { buildCounterGoogleCloudContactcenterinsightsV1QaAnswer++; if (buildCounterGoogleCloudContactcenterinsightsV1QaAnswer < 3) { - checkUnnamed44(o.answerSources!); + checkUnnamed45(o.answerSources!); checkGoogleCloudContactcenterinsightsV1QaAnswerAnswerValue(o.answerValue!); unittest.expect( o.conversation!, unittest.equals('foo'), ); - unittest.expect( - o.potentialScore!, - unittest.equals(42.0), - ); unittest.expect( o.qaQuestion!, unittest.equals('foo'), @@ -4075,7 +4099,7 @@ void checkGoogleCloudContactcenterinsightsV1QaAnswer( o.questionBody!, unittest.equals('foo'), ); - checkUnnamed45(o.tags!); + checkUnnamed46(o.tags!); } buildCounterGoogleCloudContactcenterinsightsV1QaAnswer--; } @@ -4116,7 +4140,9 @@ api.GoogleCloudContactcenterinsightsV1QaAnswerAnswerValue o.boolValue = true; o.key = 'foo'; o.naValue = true; + o.normalizedScore = 42.0; o.numValue = 42.0; + o.potentialScore = 42.0; o.score = 42.0; o.strValue = 'foo'; } @@ -4134,10 +4160,18 @@ void checkGoogleCloudContactcenterinsightsV1QaAnswerAnswerValue( unittest.equals('foo'), ); unittest.expect(o.naValue!, unittest.isTrue); + unittest.expect( + o.normalizedScore!, + unittest.equals(42.0), + ); unittest.expect( o.numValue!, unittest.equals(42.0), ); + unittest.expect( + o.potentialScore!, + unittest.equals(42.0), + ); unittest.expect( o.score!, unittest.equals(42.0), @@ -4151,24 +4185,24 @@ void checkGoogleCloudContactcenterinsightsV1QaAnswerAnswerValue( } core.List - buildUnnamed46() => [ + buildUnnamed47() => [ buildGoogleCloudContactcenterinsightsV1QaQuestionAnswerChoice(), buildGoogleCloudContactcenterinsightsV1QaQuestionAnswerChoice(), ]; -void checkUnnamed46( +void checkUnnamed47( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudContactcenterinsightsV1QaQuestionAnswerChoice(o[0]); checkGoogleCloudContactcenterinsightsV1QaQuestionAnswerChoice(o[1]); } -core.List buildUnnamed47() => [ +core.List buildUnnamed48() => [ 'foo', 'foo', ]; -void checkUnnamed47(core.List o) { +void checkUnnamed48(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4187,14 +4221,14 @@ api.GoogleCloudContactcenterinsightsV1QaQuestion buildCounterGoogleCloudContactcenterinsightsV1QaQuestion++; if (buildCounterGoogleCloudContactcenterinsightsV1QaQuestion < 3) { o.abbreviation = 'foo'; - o.answerChoices = buildUnnamed46(); + o.answerChoices = buildUnnamed47(); o.answerInstructions = 'foo'; o.createTime = 'foo'; o.metrics = buildGoogleCloudContactcenterinsightsV1QaQuestionMetrics(); o.name = 'foo'; o.order = 42; o.questionBody = 'foo'; - o.tags = buildUnnamed47(); + o.tags = buildUnnamed48(); o.tuningMetadata = buildGoogleCloudContactcenterinsightsV1QaQuestionTuningMetadata(); o.updateTime = 'foo'; @@ -4211,7 +4245,7 @@ void checkGoogleCloudContactcenterinsightsV1QaQuestion( o.abbreviation!, unittest.equals('foo'), ); - checkUnnamed46(o.answerChoices!); + checkUnnamed47(o.answerChoices!); unittest.expect( o.answerInstructions!, unittest.equals('foo'), @@ -4233,7 +4267,7 @@ void checkGoogleCloudContactcenterinsightsV1QaQuestion( o.questionBody!, unittest.equals('foo'), ); - checkUnnamed47(o.tags!); + checkUnnamed48(o.tags!); checkGoogleCloudContactcenterinsightsV1QaQuestionTuningMetadata( o.tuningMetadata!); unittest.expect( @@ -4314,12 +4348,12 @@ void checkGoogleCloudContactcenterinsightsV1QaQuestionMetrics( buildCounterGoogleCloudContactcenterinsightsV1QaQuestionMetrics--; } -core.List buildUnnamed48() => [ +core.List buildUnnamed49() => [ 'foo', 'foo', ]; -void checkUnnamed48(core.List o) { +void checkUnnamed49(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4339,7 +4373,7 @@ api.GoogleCloudContactcenterinsightsV1QaQuestionTuningMetadata buildCounterGoogleCloudContactcenterinsightsV1QaQuestionTuningMetadata++; if (buildCounterGoogleCloudContactcenterinsightsV1QaQuestionTuningMetadata < 3) { - o.datasetValidationWarnings = buildUnnamed48(); + o.datasetValidationWarnings = buildUnnamed49(); o.totalValidLabelCount = 'foo'; o.tuningError = 'foo'; } @@ -4352,7 +4386,7 @@ void checkGoogleCloudContactcenterinsightsV1QaQuestionTuningMetadata( buildCounterGoogleCloudContactcenterinsightsV1QaQuestionTuningMetadata++; if (buildCounterGoogleCloudContactcenterinsightsV1QaQuestionTuningMetadata < 3) { - checkUnnamed48(o.datasetValidationWarnings!); + checkUnnamed49(o.datasetValidationWarnings!); unittest.expect( o.totalValidLabelCount!, unittest.equals('foo'), @@ -4409,12 +4443,12 @@ void checkGoogleCloudContactcenterinsightsV1QaScorecard( buildCounterGoogleCloudContactcenterinsightsV1QaScorecard--; } -core.List buildUnnamed49() => [ +core.List buildUnnamed50() => [ buildGoogleCloudContactcenterinsightsV1QaAnswer(), buildGoogleCloudContactcenterinsightsV1QaAnswer(), ]; -void checkUnnamed49( +void checkUnnamed50( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudContactcenterinsightsV1QaAnswer(o[0]); @@ -4422,12 +4456,12 @@ void checkUnnamed49( } core.List - buildUnnamed50() => [ + buildUnnamed51() => [ buildGoogleCloudContactcenterinsightsV1QaScorecardResultQaTagResult(), buildGoogleCloudContactcenterinsightsV1QaScorecardResultQaTagResult(), ]; -void checkUnnamed50( +void checkUnnamed51( core.List< api.GoogleCloudContactcenterinsightsV1QaScorecardResultQaTagResult> o) { @@ -4437,12 +4471,12 @@ void checkUnnamed50( } core.List - buildUnnamed51() => [ + buildUnnamed52() => [ buildGoogleCloudContactcenterinsightsV1QaScorecardResultScoreSource(), buildGoogleCloudContactcenterinsightsV1QaScorecardResultScoreSource(), ]; -void checkUnnamed51( +void checkUnnamed52( core.List< api.GoogleCloudContactcenterinsightsV1QaScorecardResultScoreSource> o) { @@ -4463,11 +4497,11 @@ api.GoogleCloudContactcenterinsightsV1QaScorecardResult o.name = 'foo'; o.normalizedScore = 42.0; o.potentialScore = 42.0; - o.qaAnswers = buildUnnamed49(); + o.qaAnswers = buildUnnamed50(); o.qaScorecardRevision = 'foo'; - o.qaTagResults = buildUnnamed50(); + o.qaTagResults = buildUnnamed51(); o.score = 42.0; - o.scoreSources = buildUnnamed51(); + o.scoreSources = buildUnnamed52(); } buildCounterGoogleCloudContactcenterinsightsV1QaScorecardResult--; return o; @@ -4501,17 +4535,17 @@ void checkGoogleCloudContactcenterinsightsV1QaScorecardResult( o.potentialScore!, unittest.equals(42.0), ); - checkUnnamed49(o.qaAnswers!); + checkUnnamed50(o.qaAnswers!); unittest.expect( o.qaScorecardRevision!, unittest.equals('foo'), ); - checkUnnamed50(o.qaTagResults!); + checkUnnamed51(o.qaTagResults!); unittest.expect( o.score!, unittest.equals(42.0), ); - checkUnnamed51(o.scoreSources!); + checkUnnamed52(o.scoreSources!); } buildCounterGoogleCloudContactcenterinsightsV1QaScorecardResult--; } @@ -4561,12 +4595,12 @@ void checkGoogleCloudContactcenterinsightsV1QaScorecardResultQaTagResult( } core.List - buildUnnamed52() => [ + buildUnnamed53() => [ buildGoogleCloudContactcenterinsightsV1QaScorecardResultQaTagResult(), buildGoogleCloudContactcenterinsightsV1QaScorecardResultQaTagResult(), ]; -void checkUnnamed52( +void checkUnnamed53( core.List< api.GoogleCloudContactcenterinsightsV1QaScorecardResultQaTagResult> o) { @@ -4587,7 +4621,7 @@ api.GoogleCloudContactcenterinsightsV1QaScorecardResultScoreSource 3) { o.normalizedScore = 42.0; o.potentialScore = 42.0; - o.qaTagResults = buildUnnamed52(); + o.qaTagResults = buildUnnamed53(); o.score = 42.0; o.sourceType = 'foo'; } @@ -4608,7 +4642,7 @@ void checkGoogleCloudContactcenterinsightsV1QaScorecardResultScoreSource( o.potentialScore!, unittest.equals(42.0), ); - checkUnnamed52(o.qaTagResults!); + checkUnnamed53(o.qaTagResults!); unittest.expect( o.score!, unittest.equals(42.0), @@ -4621,12 +4655,12 @@ void checkGoogleCloudContactcenterinsightsV1QaScorecardResultScoreSource( buildCounterGoogleCloudContactcenterinsightsV1QaScorecardResultScoreSource--; } -core.List buildUnnamed53() => [ +core.List buildUnnamed54() => [ 'foo', 'foo', ]; -void checkUnnamed53(core.List o) { +void checkUnnamed54(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4644,7 +4678,7 @@ api.GoogleCloudContactcenterinsightsV1QaScorecardRevision final o = api.GoogleCloudContactcenterinsightsV1QaScorecardRevision(); buildCounterGoogleCloudContactcenterinsightsV1QaScorecardRevision++; if (buildCounterGoogleCloudContactcenterinsightsV1QaScorecardRevision < 3) { - o.alternateIds = buildUnnamed53(); + o.alternateIds = buildUnnamed54(); o.createTime = 'foo'; o.name = 'foo'; o.snapshot = buildGoogleCloudContactcenterinsightsV1QaScorecard(); @@ -4658,7 +4692,7 @@ void checkGoogleCloudContactcenterinsightsV1QaScorecardRevision( api.GoogleCloudContactcenterinsightsV1QaScorecardRevision o) { buildCounterGoogleCloudContactcenterinsightsV1QaScorecardRevision++; if (buildCounterGoogleCloudContactcenterinsightsV1QaScorecardRevision < 3) { - checkUnnamed53(o.alternateIds!); + checkUnnamed54(o.alternateIds!); unittest.expect( o.createTime!, unittest.equals('foo'), @@ -4676,12 +4710,12 @@ void checkGoogleCloudContactcenterinsightsV1QaScorecardRevision( buildCounterGoogleCloudContactcenterinsightsV1QaScorecardRevision--; } -core.List buildUnnamed54() => [ +core.List buildUnnamed55() => [ buildGoogleCloudContactcenterinsightsV1Dimension(), buildGoogleCloudContactcenterinsightsV1Dimension(), ]; -void checkUnnamed54( +void checkUnnamed55( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudContactcenterinsightsV1Dimension(o[0]); @@ -4694,7 +4728,7 @@ api.GoogleCloudContactcenterinsightsV1QueryMetricsRequest final o = api.GoogleCloudContactcenterinsightsV1QueryMetricsRequest(); buildCounterGoogleCloudContactcenterinsightsV1QueryMetricsRequest++; if (buildCounterGoogleCloudContactcenterinsightsV1QueryMetricsRequest < 3) { - o.dimensions = buildUnnamed54(); + o.dimensions = buildUnnamed55(); o.filter = 'foo'; o.measureMask = 'foo'; o.timeGranularity = 'foo'; @@ -4707,7 +4741,7 @@ void checkGoogleCloudContactcenterinsightsV1QueryMetricsRequest( api.GoogleCloudContactcenterinsightsV1QueryMetricsRequest o) { buildCounterGoogleCloudContactcenterinsightsV1QueryMetricsRequest++; if (buildCounterGoogleCloudContactcenterinsightsV1QueryMetricsRequest < 3) { - checkUnnamed54(o.dimensions!); + checkUnnamed55(o.dimensions!); unittest.expect( o.filter!, unittest.equals('foo'), @@ -4880,12 +4914,12 @@ void checkGoogleCloudContactcenterinsightsV1SentimentData( buildCounterGoogleCloudContactcenterinsightsV1SentimentData--; } -core.Map buildUnnamed55() => { +core.Map buildUnnamed56() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed55(core.Map o) { +void checkUnnamed56(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -4909,7 +4943,7 @@ api.GoogleCloudContactcenterinsightsV1Settings o.createTime = 'foo'; o.languageCode = 'foo'; o.name = 'foo'; - o.pubsubNotificationSettings = buildUnnamed55(); + o.pubsubNotificationSettings = buildUnnamed56(); o.redactionConfig = buildGoogleCloudContactcenterinsightsV1RedactionConfig(); o.speechConfig = buildGoogleCloudContactcenterinsightsV1SpeechConfig(); @@ -4941,7 +4975,7 @@ void checkGoogleCloudContactcenterinsightsV1Settings( o.name!, unittest.equals('foo'), ); - checkUnnamed55(o.pubsubNotificationSettings!); + checkUnnamed56(o.pubsubNotificationSettings!); checkGoogleCloudContactcenterinsightsV1RedactionConfig(o.redactionConfig!); checkGoogleCloudContactcenterinsightsV1SpeechConfig(o.speechConfig!); unittest.expect( @@ -5005,12 +5039,12 @@ void checkGoogleCloudContactcenterinsightsV1SilenceData( buildCounterGoogleCloudContactcenterinsightsV1SilenceData--; } -core.Map buildUnnamed56() => { +core.Map buildUnnamed57() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed56(core.Map o) { +void checkUnnamed57(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -5032,7 +5066,7 @@ api.GoogleCloudContactcenterinsightsV1SmartComposeSuggestionData if (buildCounterGoogleCloudContactcenterinsightsV1SmartComposeSuggestionData < 3) { o.confidenceScore = 42.0; - o.metadata = buildUnnamed56(); + o.metadata = buildUnnamed57(); o.queryRecord = 'foo'; o.suggestion = 'foo'; } @@ -5049,7 +5083,7 @@ void checkGoogleCloudContactcenterinsightsV1SmartComposeSuggestionData( o.confidenceScore!, unittest.equals(42.0), ); - checkUnnamed56(o.metadata!); + checkUnnamed57(o.metadata!); unittest.expect( o.queryRecord!, unittest.equals('foo'), @@ -5062,12 +5096,12 @@ void checkGoogleCloudContactcenterinsightsV1SmartComposeSuggestionData( buildCounterGoogleCloudContactcenterinsightsV1SmartComposeSuggestionData--; } -core.Map buildUnnamed57() => { +core.Map buildUnnamed58() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed57(core.Map o) { +void checkUnnamed58(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -5086,7 +5120,7 @@ api.GoogleCloudContactcenterinsightsV1SmartReplyData buildCounterGoogleCloudContactcenterinsightsV1SmartReplyData++; if (buildCounterGoogleCloudContactcenterinsightsV1SmartReplyData < 3) { o.confidenceScore = 42.0; - o.metadata = buildUnnamed57(); + o.metadata = buildUnnamed58(); o.queryRecord = 'foo'; o.reply = 'foo'; } @@ -5102,7 +5136,7 @@ void checkGoogleCloudContactcenterinsightsV1SmartReplyData( o.confidenceScore!, unittest.equals(42.0), ); - checkUnnamed57(o.metadata!); + checkUnnamed58(o.metadata!); unittest.expect( o.queryRecord!, unittest.equals('foo'), @@ -5303,451 +5337,179 @@ void checkGoogleCloudContactcenterinsightsV1View( buildCounterGoogleCloudContactcenterinsightsV1View--; } -core.List buildUnnamed58() => [ - buildGoogleIamV1AuditLogConfig(), - buildGoogleIamV1AuditLogConfig(), +core.List buildUnnamed59() => [ + buildGoogleLongrunningOperation(), + buildGoogleLongrunningOperation(), ]; -void checkUnnamed58(core.List o) { +void checkUnnamed59(core.List o) { unittest.expect(o, unittest.hasLength(2)); - checkGoogleIamV1AuditLogConfig(o[0]); - checkGoogleIamV1AuditLogConfig(o[1]); + checkGoogleLongrunningOperation(o[0]); + checkGoogleLongrunningOperation(o[1]); } -core.int buildCounterGoogleIamV1AuditConfig = 0; -api.GoogleIamV1AuditConfig buildGoogleIamV1AuditConfig() { - final o = api.GoogleIamV1AuditConfig(); - buildCounterGoogleIamV1AuditConfig++; - if (buildCounterGoogleIamV1AuditConfig < 3) { - o.auditLogConfigs = buildUnnamed58(); - o.service = 'foo'; +core.int buildCounterGoogleLongrunningListOperationsResponse = 0; +api.GoogleLongrunningListOperationsResponse + buildGoogleLongrunningListOperationsResponse() { + final o = api.GoogleLongrunningListOperationsResponse(); + buildCounterGoogleLongrunningListOperationsResponse++; + if (buildCounterGoogleLongrunningListOperationsResponse < 3) { + o.nextPageToken = 'foo'; + o.operations = buildUnnamed59(); } - buildCounterGoogleIamV1AuditConfig--; + buildCounterGoogleLongrunningListOperationsResponse--; return o; } -void checkGoogleIamV1AuditConfig(api.GoogleIamV1AuditConfig o) { - buildCounterGoogleIamV1AuditConfig++; - if (buildCounterGoogleIamV1AuditConfig < 3) { - checkUnnamed58(o.auditLogConfigs!); +void checkGoogleLongrunningListOperationsResponse( + api.GoogleLongrunningListOperationsResponse o) { + buildCounterGoogleLongrunningListOperationsResponse++; + if (buildCounterGoogleLongrunningListOperationsResponse < 3) { unittest.expect( - o.service!, + o.nextPageToken!, unittest.equals('foo'), ); + checkUnnamed59(o.operations!); } - buildCounterGoogleIamV1AuditConfig--; + buildCounterGoogleLongrunningListOperationsResponse--; } -core.List buildUnnamed59() => [ - 'foo', - 'foo', - ]; +core.Map buildUnnamed60() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; -void checkUnnamed59(core.List o) { +void checkUnnamed60(core.Map o) { unittest.expect(o, unittest.hasLength(2)); + var casted1 = (o['x']!) as core.Map; + unittest.expect(casted1, unittest.hasLength(3)); unittest.expect( - o[0], + casted1['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted1['bool'], + unittest.equals(true), + ); + unittest.expect( + casted1['string'], unittest.equals('foo'), ); + var casted2 = (o['y']!) as core.Map; + unittest.expect(casted2, unittest.hasLength(3)); unittest.expect( - o[1], + casted2['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted2['bool'], + unittest.equals(true), + ); + unittest.expect( + casted2['string'], unittest.equals('foo'), ); } -core.int buildCounterGoogleIamV1AuditLogConfig = 0; -api.GoogleIamV1AuditLogConfig buildGoogleIamV1AuditLogConfig() { - final o = api.GoogleIamV1AuditLogConfig(); - buildCounterGoogleIamV1AuditLogConfig++; - if (buildCounterGoogleIamV1AuditLogConfig < 3) { - o.exemptedMembers = buildUnnamed59(); - o.logType = 'foo'; - } - buildCounterGoogleIamV1AuditLogConfig--; - return o; -} - -void checkGoogleIamV1AuditLogConfig(api.GoogleIamV1AuditLogConfig o) { - buildCounterGoogleIamV1AuditLogConfig++; - if (buildCounterGoogleIamV1AuditLogConfig < 3) { - checkUnnamed59(o.exemptedMembers!); - unittest.expect( - o.logType!, - unittest.equals('foo'), - ); - } - buildCounterGoogleIamV1AuditLogConfig--; -} - -core.List buildUnnamed60() => [ - 'foo', - 'foo', - ]; +core.Map buildUnnamed61() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; -void checkUnnamed60(core.List o) { +void checkUnnamed61(core.Map o) { unittest.expect(o, unittest.hasLength(2)); + var casted3 = (o['x']!) as core.Map; + unittest.expect(casted3, unittest.hasLength(3)); unittest.expect( - o[0], + casted3['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted3['bool'], + unittest.equals(true), + ); + unittest.expect( + casted3['string'], unittest.equals('foo'), ); + var casted4 = (o['y']!) as core.Map; + unittest.expect(casted4, unittest.hasLength(3)); unittest.expect( - o[1], + casted4['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted4['bool'], + unittest.equals(true), + ); + unittest.expect( + casted4['string'], unittest.equals('foo'), ); } -core.int buildCounterGoogleIamV1Binding = 0; -api.GoogleIamV1Binding buildGoogleIamV1Binding() { - final o = api.GoogleIamV1Binding(); - buildCounterGoogleIamV1Binding++; - if (buildCounterGoogleIamV1Binding < 3) { - o.condition = buildGoogleTypeExpr(); - o.members = buildUnnamed60(); - o.role = 'foo'; +core.int buildCounterGoogleLongrunningOperation = 0; +api.GoogleLongrunningOperation buildGoogleLongrunningOperation() { + final o = api.GoogleLongrunningOperation(); + buildCounterGoogleLongrunningOperation++; + if (buildCounterGoogleLongrunningOperation < 3) { + o.done = true; + o.error = buildGoogleRpcStatus(); + o.metadata = buildUnnamed60(); + o.name = 'foo'; + o.response = buildUnnamed61(); } - buildCounterGoogleIamV1Binding--; + buildCounterGoogleLongrunningOperation--; return o; } -void checkGoogleIamV1Binding(api.GoogleIamV1Binding o) { - buildCounterGoogleIamV1Binding++; - if (buildCounterGoogleIamV1Binding < 3) { - checkGoogleTypeExpr(o.condition!); - checkUnnamed60(o.members!); +void checkGoogleLongrunningOperation(api.GoogleLongrunningOperation o) { + buildCounterGoogleLongrunningOperation++; + if (buildCounterGoogleLongrunningOperation < 3) { + unittest.expect(o.done!, unittest.isTrue); + checkGoogleRpcStatus(o.error!); + checkUnnamed60(o.metadata!); unittest.expect( - o.role!, + o.name!, unittest.equals('foo'), ); + checkUnnamed61(o.response!); } - buildCounterGoogleIamV1Binding--; + buildCounterGoogleLongrunningOperation--; } -core.List buildUnnamed61() => [ - buildGoogleIamV1AuditConfig(), - buildGoogleIamV1AuditConfig(), - ]; - -void checkUnnamed61(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkGoogleIamV1AuditConfig(o[0]); - checkGoogleIamV1AuditConfig(o[1]); +core.int buildCounterGoogleProtobufEmpty = 0; +api.GoogleProtobufEmpty buildGoogleProtobufEmpty() { + final o = api.GoogleProtobufEmpty(); + buildCounterGoogleProtobufEmpty++; + if (buildCounterGoogleProtobufEmpty < 3) {} + buildCounterGoogleProtobufEmpty--; + return o; } -core.List buildUnnamed62() => [ - buildGoogleIamV1Binding(), - buildGoogleIamV1Binding(), - ]; +void checkGoogleProtobufEmpty(api.GoogleProtobufEmpty o) { + buildCounterGoogleProtobufEmpty++; + if (buildCounterGoogleProtobufEmpty < 3) {} + buildCounterGoogleProtobufEmpty--; +} -void checkUnnamed62(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkGoogleIamV1Binding(o[0]); - checkGoogleIamV1Binding(o[1]); -} - -core.int buildCounterGoogleIamV1Policy = 0; -api.GoogleIamV1Policy buildGoogleIamV1Policy() { - final o = api.GoogleIamV1Policy(); - buildCounterGoogleIamV1Policy++; - if (buildCounterGoogleIamV1Policy < 3) { - o.auditConfigs = buildUnnamed61(); - o.bindings = buildUnnamed62(); - o.etag = 'foo'; - o.version = 42; - } - buildCounterGoogleIamV1Policy--; - return o; -} - -void checkGoogleIamV1Policy(api.GoogleIamV1Policy o) { - buildCounterGoogleIamV1Policy++; - if (buildCounterGoogleIamV1Policy < 3) { - checkUnnamed61(o.auditConfigs!); - checkUnnamed62(o.bindings!); - unittest.expect( - o.etag!, - unittest.equals('foo'), - ); - unittest.expect( - o.version!, - unittest.equals(42), - ); - } - buildCounterGoogleIamV1Policy--; -} - -core.int buildCounterGoogleIamV1SetIamPolicyRequest = 0; -api.GoogleIamV1SetIamPolicyRequest buildGoogleIamV1SetIamPolicyRequest() { - final o = api.GoogleIamV1SetIamPolicyRequest(); - buildCounterGoogleIamV1SetIamPolicyRequest++; - if (buildCounterGoogleIamV1SetIamPolicyRequest < 3) { - o.policy = buildGoogleIamV1Policy(); - o.updateMask = 'foo'; - } - buildCounterGoogleIamV1SetIamPolicyRequest--; - return o; -} - -void checkGoogleIamV1SetIamPolicyRequest(api.GoogleIamV1SetIamPolicyRequest o) { - buildCounterGoogleIamV1SetIamPolicyRequest++; - if (buildCounterGoogleIamV1SetIamPolicyRequest < 3) { - checkGoogleIamV1Policy(o.policy!); - unittest.expect( - o.updateMask!, - unittest.equals('foo'), - ); - } - buildCounterGoogleIamV1SetIamPolicyRequest--; -} - -core.List buildUnnamed63() => [ - 'foo', - 'foo', - ]; - -void checkUnnamed63(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o[0], - unittest.equals('foo'), - ); - unittest.expect( - o[1], - unittest.equals('foo'), - ); -} - -core.int buildCounterGoogleIamV1TestIamPermissionsRequest = 0; -api.GoogleIamV1TestIamPermissionsRequest - buildGoogleIamV1TestIamPermissionsRequest() { - final o = api.GoogleIamV1TestIamPermissionsRequest(); - buildCounterGoogleIamV1TestIamPermissionsRequest++; - if (buildCounterGoogleIamV1TestIamPermissionsRequest < 3) { - o.permissions = buildUnnamed63(); - } - buildCounterGoogleIamV1TestIamPermissionsRequest--; - return o; -} - -void checkGoogleIamV1TestIamPermissionsRequest( - api.GoogleIamV1TestIamPermissionsRequest o) { - buildCounterGoogleIamV1TestIamPermissionsRequest++; - if (buildCounterGoogleIamV1TestIamPermissionsRequest < 3) { - checkUnnamed63(o.permissions!); - } - buildCounterGoogleIamV1TestIamPermissionsRequest--; -} - -core.List buildUnnamed64() => [ - 'foo', - 'foo', - ]; - -void checkUnnamed64(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o[0], - unittest.equals('foo'), - ); - unittest.expect( - o[1], - unittest.equals('foo'), - ); -} - -core.int buildCounterGoogleIamV1TestIamPermissionsResponse = 0; -api.GoogleIamV1TestIamPermissionsResponse - buildGoogleIamV1TestIamPermissionsResponse() { - final o = api.GoogleIamV1TestIamPermissionsResponse(); - buildCounterGoogleIamV1TestIamPermissionsResponse++; - if (buildCounterGoogleIamV1TestIamPermissionsResponse < 3) { - o.permissions = buildUnnamed64(); - } - buildCounterGoogleIamV1TestIamPermissionsResponse--; - return o; -} - -void checkGoogleIamV1TestIamPermissionsResponse( - api.GoogleIamV1TestIamPermissionsResponse o) { - buildCounterGoogleIamV1TestIamPermissionsResponse++; - if (buildCounterGoogleIamV1TestIamPermissionsResponse < 3) { - checkUnnamed64(o.permissions!); - } - buildCounterGoogleIamV1TestIamPermissionsResponse--; -} - -core.List buildUnnamed65() => [ - buildGoogleLongrunningOperation(), - buildGoogleLongrunningOperation(), - ]; - -void checkUnnamed65(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkGoogleLongrunningOperation(o[0]); - checkGoogleLongrunningOperation(o[1]); -} - -core.int buildCounterGoogleLongrunningListOperationsResponse = 0; -api.GoogleLongrunningListOperationsResponse - buildGoogleLongrunningListOperationsResponse() { - final o = api.GoogleLongrunningListOperationsResponse(); - buildCounterGoogleLongrunningListOperationsResponse++; - if (buildCounterGoogleLongrunningListOperationsResponse < 3) { - o.nextPageToken = 'foo'; - o.operations = buildUnnamed65(); - } - buildCounterGoogleLongrunningListOperationsResponse--; - return o; -} - -void checkGoogleLongrunningListOperationsResponse( - api.GoogleLongrunningListOperationsResponse o) { - buildCounterGoogleLongrunningListOperationsResponse++; - if (buildCounterGoogleLongrunningListOperationsResponse < 3) { - unittest.expect( - o.nextPageToken!, - unittest.equals('foo'), - ); - checkUnnamed65(o.operations!); - } - buildCounterGoogleLongrunningListOperationsResponse--; -} - -core.Map buildUnnamed66() => { - 'x': { - 'list': [1, 2, 3], - 'bool': true, - 'string': 'foo' - }, - 'y': { - 'list': [1, 2, 3], - 'bool': true, - 'string': 'foo' - }, - }; - -void checkUnnamed66(core.Map o) { - unittest.expect(o, unittest.hasLength(2)); - var casted1 = (o['x']!) as core.Map; - unittest.expect(casted1, unittest.hasLength(3)); - unittest.expect( - casted1['list'], - unittest.equals([1, 2, 3]), - ); - unittest.expect( - casted1['bool'], - unittest.equals(true), - ); - unittest.expect( - casted1['string'], - unittest.equals('foo'), - ); - var casted2 = (o['y']!) as core.Map; - unittest.expect(casted2, unittest.hasLength(3)); - unittest.expect( - casted2['list'], - unittest.equals([1, 2, 3]), - ); - unittest.expect( - casted2['bool'], - unittest.equals(true), - ); - unittest.expect( - casted2['string'], - unittest.equals('foo'), - ); -} - -core.Map buildUnnamed67() => { - 'x': { - 'list': [1, 2, 3], - 'bool': true, - 'string': 'foo' - }, - 'y': { - 'list': [1, 2, 3], - 'bool': true, - 'string': 'foo' - }, - }; - -void checkUnnamed67(core.Map o) { - unittest.expect(o, unittest.hasLength(2)); - var casted3 = (o['x']!) as core.Map; - unittest.expect(casted3, unittest.hasLength(3)); - unittest.expect( - casted3['list'], - unittest.equals([1, 2, 3]), - ); - unittest.expect( - casted3['bool'], - unittest.equals(true), - ); - unittest.expect( - casted3['string'], - unittest.equals('foo'), - ); - var casted4 = (o['y']!) as core.Map; - unittest.expect(casted4, unittest.hasLength(3)); - unittest.expect( - casted4['list'], - unittest.equals([1, 2, 3]), - ); - unittest.expect( - casted4['bool'], - unittest.equals(true), - ); - unittest.expect( - casted4['string'], - unittest.equals('foo'), - ); -} - -core.int buildCounterGoogleLongrunningOperation = 0; -api.GoogleLongrunningOperation buildGoogleLongrunningOperation() { - final o = api.GoogleLongrunningOperation(); - buildCounterGoogleLongrunningOperation++; - if (buildCounterGoogleLongrunningOperation < 3) { - o.done = true; - o.error = buildGoogleRpcStatus(); - o.metadata = buildUnnamed66(); - o.name = 'foo'; - o.response = buildUnnamed67(); - } - buildCounterGoogleLongrunningOperation--; - return o; -} - -void checkGoogleLongrunningOperation(api.GoogleLongrunningOperation o) { - buildCounterGoogleLongrunningOperation++; - if (buildCounterGoogleLongrunningOperation < 3) { - unittest.expect(o.done!, unittest.isTrue); - checkGoogleRpcStatus(o.error!); - checkUnnamed66(o.metadata!); - unittest.expect( - o.name!, - unittest.equals('foo'), - ); - checkUnnamed67(o.response!); - } - buildCounterGoogleLongrunningOperation--; -} - -core.int buildCounterGoogleProtobufEmpty = 0; -api.GoogleProtobufEmpty buildGoogleProtobufEmpty() { - final o = api.GoogleProtobufEmpty(); - buildCounterGoogleProtobufEmpty++; - if (buildCounterGoogleProtobufEmpty < 3) {} - buildCounterGoogleProtobufEmpty--; - return o; -} - -void checkGoogleProtobufEmpty(api.GoogleProtobufEmpty o) { - buildCounterGoogleProtobufEmpty++; - if (buildCounterGoogleProtobufEmpty < 3) {} - buildCounterGoogleProtobufEmpty--; -} - -core.Map buildUnnamed68() => { +core.Map buildUnnamed62() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -5760,7 +5522,7 @@ core.Map buildUnnamed68() => { }, }; -void checkUnnamed68(core.Map o) { +void checkUnnamed62(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted5 = (o['x']!) as core.Map; unittest.expect(casted5, unittest.hasLength(3)); @@ -5792,15 +5554,15 @@ void checkUnnamed68(core.Map o) { ); } -core.List> buildUnnamed69() => [ - buildUnnamed68(), - buildUnnamed68(), +core.List> buildUnnamed63() => [ + buildUnnamed62(), + buildUnnamed62(), ]; -void checkUnnamed69(core.List> o) { +void checkUnnamed63(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed68(o[0]); - checkUnnamed68(o[1]); + checkUnnamed62(o[0]); + checkUnnamed62(o[1]); } core.int buildCounterGoogleRpcStatus = 0; @@ -5809,7 +5571,7 @@ api.GoogleRpcStatus buildGoogleRpcStatus() { buildCounterGoogleRpcStatus++; if (buildCounterGoogleRpcStatus < 3) { o.code = 42; - o.details = buildUnnamed69(); + o.details = buildUnnamed63(); o.message = 'foo'; } buildCounterGoogleRpcStatus--; @@ -5823,7 +5585,7 @@ void checkGoogleRpcStatus(api.GoogleRpcStatus o) { o.code!, unittest.equals(42), ); - checkUnnamed69(o.details!); + checkUnnamed63(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -5832,43 +5594,6 @@ void checkGoogleRpcStatus(api.GoogleRpcStatus o) { buildCounterGoogleRpcStatus--; } -core.int buildCounterGoogleTypeExpr = 0; -api.GoogleTypeExpr buildGoogleTypeExpr() { - final o = api.GoogleTypeExpr(); - buildCounterGoogleTypeExpr++; - if (buildCounterGoogleTypeExpr < 3) { - o.description = 'foo'; - o.expression = 'foo'; - o.location = 'foo'; - o.title = 'foo'; - } - buildCounterGoogleTypeExpr--; - return o; -} - -void checkGoogleTypeExpr(api.GoogleTypeExpr o) { - buildCounterGoogleTypeExpr++; - if (buildCounterGoogleTypeExpr < 3) { - unittest.expect( - o.description!, - unittest.equals('foo'), - ); - unittest.expect( - o.expression!, - unittest.equals('foo'), - ); - unittest.expect( - o.location!, - unittest.equals('foo'), - ); - unittest.expect( - o.title!, - unittest.equals('foo'), - ); - } - buildCounterGoogleTypeExpr--; -} - void main() { unittest.group('obj-schema-GoogleCloudContactcenterinsightsV1Analysis', () { unittest.test('to-json--from-json', () async { @@ -7405,77 +7130,7 @@ void main() { }); }); - unittest.group('obj-schema-GoogleIamV1AuditConfig', () { - unittest.test('to-json--from-json', () async { - final o = buildGoogleIamV1AuditConfig(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.GoogleIamV1AuditConfig.fromJson( - oJson as core.Map); - checkGoogleIamV1AuditConfig(od); - }); - }); - - unittest.group('obj-schema-GoogleIamV1AuditLogConfig', () { - unittest.test('to-json--from-json', () async { - final o = buildGoogleIamV1AuditLogConfig(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.GoogleIamV1AuditLogConfig.fromJson( - oJson as core.Map); - checkGoogleIamV1AuditLogConfig(od); - }); - }); - - unittest.group('obj-schema-GoogleIamV1Binding', () { - unittest.test('to-json--from-json', () async { - final o = buildGoogleIamV1Binding(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.GoogleIamV1Binding.fromJson( - oJson as core.Map); - checkGoogleIamV1Binding(od); - }); - }); - - unittest.group('obj-schema-GoogleIamV1Policy', () { - unittest.test('to-json--from-json', () async { - final o = buildGoogleIamV1Policy(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.GoogleIamV1Policy.fromJson( - oJson as core.Map); - checkGoogleIamV1Policy(od); - }); - }); - - unittest.group('obj-schema-GoogleIamV1SetIamPolicyRequest', () { - unittest.test('to-json--from-json', () async { - final o = buildGoogleIamV1SetIamPolicyRequest(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.GoogleIamV1SetIamPolicyRequest.fromJson( - oJson as core.Map); - checkGoogleIamV1SetIamPolicyRequest(od); - }); - }); - - unittest.group('obj-schema-GoogleIamV1TestIamPermissionsRequest', () { - unittest.test('to-json--from-json', () async { - final o = buildGoogleIamV1TestIamPermissionsRequest(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.GoogleIamV1TestIamPermissionsRequest.fromJson( - oJson as core.Map); - checkGoogleIamV1TestIamPermissionsRequest(od); - }); - }); - - unittest.group('obj-schema-GoogleIamV1TestIamPermissionsResponse', () { - unittest.test('to-json--from-json', () async { - final o = buildGoogleIamV1TestIamPermissionsResponse(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.GoogleIamV1TestIamPermissionsResponse.fromJson( - oJson as core.Map); - checkGoogleIamV1TestIamPermissionsResponse(od); - }); - }); - - unittest.group('obj-schema-GoogleLongrunningListOperationsResponse', () { + unittest.group('obj-schema-GoogleLongrunningListOperationsResponse', () { unittest.test('to-json--from-json', () async { final o = buildGoogleLongrunningListOperationsResponse(); final oJson = convert.jsonDecode(convert.jsonEncode(o)); @@ -7515,16 +7170,6 @@ void main() { }); }); - unittest.group('obj-schema-GoogleTypeExpr', () { - unittest.test('to-json--from-json', () async { - final o = buildGoogleTypeExpr(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.GoogleTypeExpr.fromJson( - oJson as core.Map); - checkGoogleTypeExpr(od); - }); - }); - unittest.group('resource-ProjectsLocationsResource', () { unittest.test('method--bulkDownloadFeedbackLabels', () async { final mock = HttpServerMock(); @@ -8268,78 +7913,15 @@ void main() { }); unittest.group( - 'resource-ProjectsLocationsAuthorizedViewSetAuthorizedViewResource', () { - unittest.test('method--calculateStats', () async { - final mock = HttpServerMock(); - final res = api.ContactcenterinsightsApi(mock) - .projects - .locations - .authorizedViewSet - .authorizedView; - final arg_location = 'foo'; - final arg_filter = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode( - buildGoogleCloudContactcenterinsightsV1CalculateStatsResponse()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.calculateStats(arg_location, - filter: arg_filter, $fields: arg_$fields); - checkGoogleCloudContactcenterinsightsV1CalculateStatsResponse(response - as api.GoogleCloudContactcenterinsightsV1CalculateStatsResponse); - }); - + 'resource-ProjectsLocationsAuthorizedViewSetsAuthorizedViewsResource', + () { unittest.test('method--queryMetrics', () async { final mock = HttpServerMock(); final res = api.ContactcenterinsightsApi(mock) .projects .locations - .authorizedViewSet - .authorizedView; + .authorizedViewSets + .authorizedViews; final arg_request = buildGoogleCloudContactcenterinsightsV1QueryMetricsRequest(); final arg_location = 'foo'; @@ -8400,28 +7982,20 @@ void main() { }); unittest.group( - 'resource-ProjectsLocationsAuthorizedViewSetAuthorizedViewsConversationsFeedbackLabelsResource', + 'resource-ProjectsLocationsAuthorizedViewSetsAuthorizedViewsConversationsResource', () { - unittest.test('method--create', () async { + unittest.test('method--calculateStats', () async { final mock = HttpServerMock(); final res = api.ContactcenterinsightsApi(mock) .projects .locations - .authorizedViewSet + .authorizedViewSets .authorizedViews - .conversations - .feedbackLabels; - final arg_request = - buildGoogleCloudContactcenterinsightsV1FeedbackLabel(); - final arg_parent = 'foo'; - final arg_feedbackLabelId = 'foo'; + .conversations; + final arg_location = 'foo'; + final arg_filter = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudContactcenterinsightsV1FeedbackLabel.fromJson( - json as core.Map); - checkGoogleCloudContactcenterinsightsV1FeedbackLabel(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -8454,70 +8028,9 @@ void main() { } } unittest.expect( - queryMap['feedbackLabelId']!.first, - unittest.equals(arg_feedbackLabelId), - ); - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json - .encode(buildGoogleCloudContactcenterinsightsV1FeedbackLabel()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.create(arg_request, arg_parent, - feedbackLabelId: arg_feedbackLabelId, $fields: arg_$fields); - checkGoogleCloudContactcenterinsightsV1FeedbackLabel( - response as api.GoogleCloudContactcenterinsightsV1FeedbackLabel); - }); - - unittest.test('method--delete', () async { - final mock = HttpServerMock(); - final res = api.ContactcenterinsightsApi(mock) - .projects - .locations - .authorizedViewSet - .authorizedViews - .conversations - .feedbackLabels; - final arg_name = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), + queryMap['filter']!.first, + unittest.equals(arg_filter), ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -8526,1330 +8039,14 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); + final resp = convert.json.encode( + buildGoogleCloudContactcenterinsightsV1CalculateStatsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); - }); - - unittest.test('method--get', () async { - final mock = HttpServerMock(); - final res = api.ContactcenterinsightsApi(mock) - .projects - .locations - .authorizedViewSet - .authorizedViews - .conversations - .feedbackLabels; - final arg_name = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json - .encode(buildGoogleCloudContactcenterinsightsV1FeedbackLabel()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudContactcenterinsightsV1FeedbackLabel( - response as api.GoogleCloudContactcenterinsightsV1FeedbackLabel); - }); - - unittest.test('method--list', () async { - final mock = HttpServerMock(); - final res = api.ContactcenterinsightsApi(mock) - .projects - .locations - .authorizedViewSet - .authorizedViews - .conversations - .feedbackLabels; - final arg_parent = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode( - buildGoogleCloudContactcenterinsightsV1ListFeedbackLabelsResponse()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.list(arg_parent, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleCloudContactcenterinsightsV1ListFeedbackLabelsResponse(response - as api.GoogleCloudContactcenterinsightsV1ListFeedbackLabelsResponse); - }); - - unittest.test('method--patch', () async { - final mock = HttpServerMock(); - final res = api.ContactcenterinsightsApi(mock) - .projects - .locations - .authorizedViewSet - .authorizedViews - .conversations - .feedbackLabels; - final arg_request = - buildGoogleCloudContactcenterinsightsV1FeedbackLabel(); - final arg_name = 'foo'; - final arg_updateMask = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GoogleCloudContactcenterinsightsV1FeedbackLabel.fromJson( - json as core.Map); - checkGoogleCloudContactcenterinsightsV1FeedbackLabel(obj); - - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), - ); - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json - .encode(buildGoogleCloudContactcenterinsightsV1FeedbackLabel()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.patch(arg_request, arg_name, - updateMask: arg_updateMask, $fields: arg_$fields); - checkGoogleCloudContactcenterinsightsV1FeedbackLabel( - response as api.GoogleCloudContactcenterinsightsV1FeedbackLabel); - }); - }); - - unittest.group( - 'resource-ProjectsLocationsAuthorizedViewSetsAuthorizedViewsResource', - () { - unittest.test('method--getIamPolicy', () async { - final mock = HttpServerMock(); - final res = api.ContactcenterinsightsApi(mock) - .projects - .locations - .authorizedViewSets - .authorizedViews; - final arg_resource = 'foo'; - final arg_options_requestedPolicyVersion = 42; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - core.int.parse(queryMap['options.requestedPolicyVersion']!.first), - unittest.equals(arg_options_requestedPolicyVersion), - ); - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode(buildGoogleIamV1Policy()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.getIamPolicy(arg_resource, - options_requestedPolicyVersion: arg_options_requestedPolicyVersion, - $fields: arg_$fields); - checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); - }); - - unittest.test('method--setIamPolicy', () async { - final mock = HttpServerMock(); - final res = api.ContactcenterinsightsApi(mock) - .projects - .locations - .authorizedViewSets - .authorizedViews; - final arg_request = buildGoogleIamV1SetIamPolicyRequest(); - final arg_resource = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleIamV1SetIamPolicyRequest.fromJson( - json as core.Map); - checkGoogleIamV1SetIamPolicyRequest(obj); - - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode(buildGoogleIamV1Policy()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.setIamPolicy(arg_request, arg_resource, - $fields: arg_$fields); - checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); - }); - - unittest.test('method--testIamPermissions', () async { - final mock = HttpServerMock(); - final res = api.ContactcenterinsightsApi(mock) - .projects - .locations - .authorizedViewSets - .authorizedViews; - final arg_request = buildGoogleIamV1TestIamPermissionsRequest(); - final arg_resource = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleIamV1TestIamPermissionsRequest.fromJson( - json as core.Map); - checkGoogleIamV1TestIamPermissionsRequest(obj); - - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = - convert.json.encode(buildGoogleIamV1TestIamPermissionsResponse()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.testIamPermissions(arg_request, arg_resource, - $fields: arg_$fields); - checkGoogleIamV1TestIamPermissionsResponse( - response as api.GoogleIamV1TestIamPermissionsResponse); - }); - }); - - unittest.group( - 'resource-ProjectsLocationsAuthorizedViewSetsAuthorizedViewsConversationsResource', - () { - unittest.test('method--create', () async { - final mock = HttpServerMock(); - final res = api.ContactcenterinsightsApi(mock) - .projects - .locations - .authorizedViewSets - .authorizedViews - .conversations; - final arg_request = buildGoogleCloudContactcenterinsightsV1Conversation(); - final arg_parent = 'foo'; - final arg_conversationId = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudContactcenterinsightsV1Conversation.fromJson( - json as core.Map); - checkGoogleCloudContactcenterinsightsV1Conversation(obj); - - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['conversationId']!.first, - unittest.equals(arg_conversationId), - ); - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json - .encode(buildGoogleCloudContactcenterinsightsV1Conversation()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.create(arg_request, arg_parent, - conversationId: arg_conversationId, $fields: arg_$fields); - checkGoogleCloudContactcenterinsightsV1Conversation( - response as api.GoogleCloudContactcenterinsightsV1Conversation); - }); - - unittest.test('method--delete', () async { - final mock = HttpServerMock(); - final res = api.ContactcenterinsightsApi(mock) - .projects - .locations - .authorizedViewSets - .authorizedViews - .conversations; - final arg_name = 'foo'; - final arg_force = true; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['force']!.first, - unittest.equals('$arg_force'), - ); - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = - await res.delete(arg_name, force: arg_force, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); - }); - - unittest.test('method--get', () async { - final mock = HttpServerMock(); - final res = api.ContactcenterinsightsApi(mock) - .projects - .locations - .authorizedViewSets - .authorizedViews - .conversations; - final arg_name = 'foo'; - final arg_view = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['view']!.first, - unittest.equals(arg_view), - ); - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json - .encode(buildGoogleCloudContactcenterinsightsV1Conversation()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = - await res.get(arg_name, view: arg_view, $fields: arg_$fields); - checkGoogleCloudContactcenterinsightsV1Conversation( - response as api.GoogleCloudContactcenterinsightsV1Conversation); - }); - - unittest.test('method--list', () async { - final mock = HttpServerMock(); - final res = api.ContactcenterinsightsApi(mock) - .projects - .locations - .authorizedViewSets - .authorizedViews - .conversations; - final arg_parent = 'foo'; - final arg_filter = 'foo'; - final arg_orderBy = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; - final arg_view = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['view']!.first, - unittest.equals(arg_view), - ); - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode( - buildGoogleCloudContactcenterinsightsV1ListConversationsResponse()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.list(arg_parent, - filter: arg_filter, - orderBy: arg_orderBy, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - view: arg_view, - $fields: arg_$fields); - checkGoogleCloudContactcenterinsightsV1ListConversationsResponse(response - as api.GoogleCloudContactcenterinsightsV1ListConversationsResponse); - }); - - unittest.test('method--patch', () async { - final mock = HttpServerMock(); - final res = api.ContactcenterinsightsApi(mock) - .projects - .locations - .authorizedViewSets - .authorizedViews - .conversations; - final arg_request = buildGoogleCloudContactcenterinsightsV1Conversation(); - final arg_name = 'foo'; - final arg_updateMask = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudContactcenterinsightsV1Conversation.fromJson( - json as core.Map); - checkGoogleCloudContactcenterinsightsV1Conversation(obj); - - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), - ); - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json - .encode(buildGoogleCloudContactcenterinsightsV1Conversation()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.patch(arg_request, arg_name, - updateMask: arg_updateMask, $fields: arg_$fields); - checkGoogleCloudContactcenterinsightsV1Conversation( - response as api.GoogleCloudContactcenterinsightsV1Conversation); - }); - - unittest.test('method--upload', () async { - final mock = HttpServerMock(); - final res = api.ContactcenterinsightsApi(mock) - .projects - .locations - .authorizedViewSets - .authorizedViews - .conversations; - final arg_request = buildGoogleCloudContactcenterinsightsV1Conversation(); - final arg_parent = 'foo'; - final arg_conversationId = 'foo'; - final arg_redactionConfig_deidentifyTemplate = 'foo'; - final arg_redactionConfig_inspectTemplate = 'foo'; - final arg_speechConfig_speechRecognizer = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudContactcenterinsightsV1Conversation.fromJson( - json as core.Map); - checkGoogleCloudContactcenterinsightsV1Conversation(obj); - - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['conversationId']!.first, - unittest.equals(arg_conversationId), - ); - unittest.expect( - queryMap['redactionConfig.deidentifyTemplate']!.first, - unittest.equals(arg_redactionConfig_deidentifyTemplate), - ); - unittest.expect( - queryMap['redactionConfig.inspectTemplate']!.first, - unittest.equals(arg_redactionConfig_inspectTemplate), - ); - unittest.expect( - queryMap['speechConfig.speechRecognizer']!.first, - unittest.equals(arg_speechConfig_speechRecognizer), - ); - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.upload(arg_request, arg_parent, - conversationId: arg_conversationId, - redactionConfig_deidentifyTemplate: - arg_redactionConfig_deidentifyTemplate, - redactionConfig_inspectTemplate: arg_redactionConfig_inspectTemplate, - speechConfig_speechRecognizer: arg_speechConfig_speechRecognizer, - $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); - }); - }); - - unittest.group( - 'resource-ProjectsLocationsAuthorizedViewSetsAuthorizedViewsConversationsAnalysesResource', - () { - unittest.test('method--create', () async { - final mock = HttpServerMock(); - final res = api.ContactcenterinsightsApi(mock) - .projects - .locations - .authorizedViewSets - .authorizedViews - .conversations - .analyses; - final arg_request = buildGoogleCloudContactcenterinsightsV1Analysis(); - final arg_parent = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleCloudContactcenterinsightsV1Analysis.fromJson( - json as core.Map); - checkGoogleCloudContactcenterinsightsV1Analysis(obj); - - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = - await res.create(arg_request, arg_parent, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); - }); - - unittest.test('method--delete', () async { - final mock = HttpServerMock(); - final res = api.ContactcenterinsightsApi(mock) - .projects - .locations - .authorizedViewSets - .authorizedViews - .conversations - .analyses; - final arg_name = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); - }); - - unittest.test('method--get', () async { - final mock = HttpServerMock(); - final res = api.ContactcenterinsightsApi(mock) - .projects - .locations - .authorizedViewSets - .authorizedViews - .conversations - .analyses; - final arg_name = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json - .encode(buildGoogleCloudContactcenterinsightsV1Analysis()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleCloudContactcenterinsightsV1Analysis( - response as api.GoogleCloudContactcenterinsightsV1Analysis); - }); - - unittest.test('method--list', () async { - final mock = HttpServerMock(); - final res = api.ContactcenterinsightsApi(mock) - .projects - .locations - .authorizedViewSets - .authorizedViews - .conversations - .analyses; - final arg_parent = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode( - buildGoogleCloudContactcenterinsightsV1ListAnalysesResponse()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.list(arg_parent, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleCloudContactcenterinsightsV1ListAnalysesResponse(response - as api.GoogleCloudContactcenterinsightsV1ListAnalysesResponse); - }); - }); - - unittest.group( - 'resource-ProjectsLocationsAuthorizedViewSetsAuthorizedViewsOperationsResource', - () { - unittest.test('method--cancel', () async { - final mock = HttpServerMock(); - final res = api.ContactcenterinsightsApi(mock) - .projects - .locations - .authorizedViewSets - .authorizedViews - .operations; - final arg_name = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode(buildGoogleProtobufEmpty()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.cancel(arg_name, $fields: arg_$fields); - checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); - }); - - unittest.test('method--get', () async { - final mock = HttpServerMock(); - final res = api.ContactcenterinsightsApi(mock) - .projects - .locations - .authorizedViewSets - .authorizedViews - .operations; - final arg_name = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); - }); - - unittest.test('method--list', () async { - final mock = HttpServerMock(); - final res = api.ContactcenterinsightsApi(mock) - .projects - .locations - .authorizedViewSets - .authorizedViews - .operations; - final arg_name = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.list(arg_name, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + final response = await res.calculateStats(arg_location, + filter: arg_filter, $fields: arg_$fields); + checkGoogleCloudContactcenterinsightsV1CalculateStatsResponse(response + as api.GoogleCloudContactcenterinsightsV1CalculateStatsResponse); }); }); diff --git a/generated/googleapis/test/container/v1_test.dart b/generated/googleapis/test/container/v1_test.dart index 563c60110..104125085 100644 --- a/generated/googleapis/test/container/v1_test.dart +++ b/generated/googleapis/test/container/v1_test.dart @@ -1268,6 +1268,7 @@ api.ClusterUpdate buildClusterUpdate() { o.desiredEnableFqdnNetworkPolicy = true; o.desiredEnableMultiNetworking = true; o.desiredEnablePrivateEndpoint = true; + o.desiredEnterpriseConfig = buildDesiredEnterpriseConfig(); o.desiredFleet = buildFleet(); o.desiredGatewayApiConfig = buildGatewayAPIConfig(); o.desiredGcfsConfig = buildGcfsConfig(); @@ -1289,6 +1290,7 @@ api.ClusterUpdate buildClusterUpdate() { o.desiredNetworkPerformanceConfig = buildClusterNetworkPerformanceConfig(); o.desiredNodeKubeletConfig = buildNodeKubeletConfig(); o.desiredNodePoolAutoConfigKubeletConfig = buildNodeKubeletConfig(); + o.desiredNodePoolAutoConfigLinuxNodeConfig = buildLinuxNodeConfig(); o.desiredNodePoolAutoConfigNetworkTags = buildNetworkTags(); o.desiredNodePoolAutoConfigResourceManagerTags = buildResourceManagerTags(); o.desiredNodePoolAutoscaling = buildNodePoolAutoscaling(); @@ -1344,6 +1346,7 @@ void checkClusterUpdate(api.ClusterUpdate o) { unittest.expect(o.desiredEnableFqdnNetworkPolicy!, unittest.isTrue); unittest.expect(o.desiredEnableMultiNetworking!, unittest.isTrue); unittest.expect(o.desiredEnablePrivateEndpoint!, unittest.isTrue); + checkDesiredEnterpriseConfig(o.desiredEnterpriseConfig!); checkFleet(o.desiredFleet!); checkGatewayAPIConfig(o.desiredGatewayApiConfig!); checkGcfsConfig(o.desiredGcfsConfig!); @@ -1380,6 +1383,7 @@ void checkClusterUpdate(api.ClusterUpdate o) { checkClusterNetworkPerformanceConfig(o.desiredNetworkPerformanceConfig!); checkNodeKubeletConfig(o.desiredNodeKubeletConfig!); checkNodeKubeletConfig(o.desiredNodePoolAutoConfigKubeletConfig!); + checkLinuxNodeConfig(o.desiredNodePoolAutoConfigLinuxNodeConfig!); checkNetworkTags(o.desiredNodePoolAutoConfigNetworkTags!); checkResourceManagerTags(o.desiredNodePoolAutoConfigResourceManagerTags!); checkNodePoolAutoscaling(o.desiredNodePoolAutoscaling!); @@ -1892,6 +1896,28 @@ void checkDefaultSnatStatus(api.DefaultSnatStatus o) { buildCounterDefaultSnatStatus--; } +core.int buildCounterDesiredEnterpriseConfig = 0; +api.DesiredEnterpriseConfig buildDesiredEnterpriseConfig() { + final o = api.DesiredEnterpriseConfig(); + buildCounterDesiredEnterpriseConfig++; + if (buildCounterDesiredEnterpriseConfig < 3) { + o.desiredTier = 'foo'; + } + buildCounterDesiredEnterpriseConfig--; + return o; +} + +void checkDesiredEnterpriseConfig(api.DesiredEnterpriseConfig o) { + buildCounterDesiredEnterpriseConfig++; + if (buildCounterDesiredEnterpriseConfig < 3) { + unittest.expect( + o.desiredTier!, + unittest.equals('foo'), + ); + } + buildCounterDesiredEnterpriseConfig--; +} + core.int buildCounterDnsCacheConfig = 0; api.DnsCacheConfig buildDnsCacheConfig() { final o = api.DnsCacheConfig(); @@ -1932,6 +1958,7 @@ api.EnterpriseConfig buildEnterpriseConfig() { buildCounterEnterpriseConfig++; if (buildCounterEnterpriseConfig < 3) { o.clusterTier = 'foo'; + o.desiredTier = 'foo'; } buildCounterEnterpriseConfig--; return o; @@ -1944,6 +1971,10 @@ void checkEnterpriseConfig(api.EnterpriseConfig o) { o.clusterTier!, unittest.equals('foo'), ); + unittest.expect( + o.desiredTier!, + unittest.equals('foo'), + ); } buildCounterEnterpriseConfig--; } @@ -3825,8 +3856,10 @@ api.NodeConfig buildNodeConfig() { o.linuxNodeConfig = buildLinuxNodeConfig(); o.localNvmeSsdBlockConfig = buildLocalNvmeSsdBlockConfig(); o.localSsdCount = 42; + o.localSsdEncryptionMode = 'foo'; o.loggingConfig = buildNodePoolLoggingConfig(); o.machineType = 'foo'; + o.maxRunDuration = 'foo'; o.metadata = buildUnnamed42(); o.minCpuPlatform = 'foo'; o.nodeGroup = 'foo'; @@ -3892,11 +3925,19 @@ void checkNodeConfig(api.NodeConfig o) { o.localSsdCount!, unittest.equals(42), ); + unittest.expect( + o.localSsdEncryptionMode!, + unittest.equals('foo'), + ); checkNodePoolLoggingConfig(o.loggingConfig!); unittest.expect( o.machineType!, unittest.equals('foo'), ); + unittest.expect( + o.maxRunDuration!, + unittest.equals('foo'), + ); checkUnnamed42(o.metadata!); unittest.expect( o.minCpuPlatform!, @@ -4249,6 +4290,7 @@ api.NodePoolAutoConfig buildNodePoolAutoConfig() { final o = api.NodePoolAutoConfig(); buildCounterNodePoolAutoConfig++; if (buildCounterNodePoolAutoConfig < 3) { + o.linuxNodeConfig = buildLinuxNodeConfig(); o.networkTags = buildNetworkTags(); o.nodeKubeletConfig = buildNodeKubeletConfig(); o.resourceManagerTags = buildResourceManagerTags(); @@ -4260,6 +4302,7 @@ api.NodePoolAutoConfig buildNodePoolAutoConfig() { void checkNodePoolAutoConfig(api.NodePoolAutoConfig o) { buildCounterNodePoolAutoConfig++; if (buildCounterNodePoolAutoConfig < 3) { + checkLinuxNodeConfig(o.linuxNodeConfig!); checkNetworkTags(o.networkTags!); checkNodeKubeletConfig(o.nodeKubeletConfig!); checkResourceManagerTags(o.resourceManagerTags!); @@ -6537,6 +6580,7 @@ api.UpdateNodePoolRequest buildUpdateNodePoolRequest() { o.locations = buildUnnamed76(); o.loggingConfig = buildNodePoolLoggingConfig(); o.machineType = 'foo'; + o.maxRunDuration = 'foo'; o.name = 'foo'; o.nodeNetworkConfig = buildNodeNetworkConfig(); o.nodePoolId = 'foo'; @@ -6595,6 +6639,10 @@ void checkUpdateNodePoolRequest(api.UpdateNodePoolRequest o) { o.machineType!, unittest.equals('foo'), ); + unittest.expect( + o.maxRunDuration!, + unittest.equals('foo'), + ); unittest.expect( o.name!, unittest.equals('foo'), @@ -7394,6 +7442,16 @@ void main() { }); }); + unittest.group('obj-schema-DesiredEnterpriseConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildDesiredEnterpriseConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.DesiredEnterpriseConfig.fromJson( + oJson as core.Map); + checkDesiredEnterpriseConfig(od); + }); + }); + unittest.group('obj-schema-DnsCacheConfig', () { unittest.test('to-json--from-json', () async { final o = buildDnsCacheConfig(); diff --git a/generated/googleapis/test/content/v2_1_test.dart b/generated/googleapis/test/content/v2_1_test.dart index 14627125d..8bf267140 100644 --- a/generated/googleapis/test/content/v2_1_test.dart +++ b/generated/googleapis/test/content/v2_1_test.dart @@ -6790,6 +6790,7 @@ api.LoyaltyProgram buildLoyaltyProgram() { o.memberPriceEffectiveDate = 'foo'; o.price = buildPrice(); o.programLabel = 'foo'; + o.shippingLabel = 'foo'; o.tierLabel = 'foo'; } buildCounterLoyaltyProgram--; @@ -6813,6 +6814,10 @@ void checkLoyaltyProgram(api.LoyaltyProgram o) { o.programLabel!, unittest.equals('foo'), ); + unittest.expect( + o.shippingLabel!, + unittest.equals('foo'), + ); unittest.expect( o.tierLabel!, unittest.equals('foo'), @@ -13355,359 +13360,34 @@ void checkServiceStoreConfigCutoffConfigLocalCutoffTime( buildCounterServiceStoreConfigCutoffConfigLocalCutoffTime--; } -core.List buildUnnamed202() => [ - 'foo', - 'foo', - ]; - -void checkUnnamed202(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o[0], - unittest.equals('foo'), - ); - unittest.expect( - o[1], - unittest.equals('foo'), - ); -} - -core.int buildCounterSettlementReport = 0; -api.SettlementReport buildSettlementReport() { - final o = api.SettlementReport(); - buildCounterSettlementReport++; - if (buildCounterSettlementReport < 3) { - o.endDate = 'foo'; - o.kind = 'foo'; - o.previousBalance = buildPrice(); - o.settlementId = 'foo'; - o.startDate = 'foo'; - o.transferAmount = buildPrice(); - o.transferDate = 'foo'; - o.transferIds = buildUnnamed202(); - } - buildCounterSettlementReport--; - return o; -} - -void checkSettlementReport(api.SettlementReport o) { - buildCounterSettlementReport++; - if (buildCounterSettlementReport < 3) { - unittest.expect( - o.endDate!, - unittest.equals('foo'), - ); - unittest.expect( - o.kind!, - unittest.equals('foo'), - ); - checkPrice(o.previousBalance!); - unittest.expect( - o.settlementId!, - unittest.equals('foo'), - ); - unittest.expect( - o.startDate!, - unittest.equals('foo'), - ); - checkPrice(o.transferAmount!); - unittest.expect( - o.transferDate!, - unittest.equals('foo'), - ); - checkUnnamed202(o.transferIds!); - } - buildCounterSettlementReport--; -} - -core.int buildCounterSettlementTransaction = 0; -api.SettlementTransaction buildSettlementTransaction() { - final o = api.SettlementTransaction(); - buildCounterSettlementTransaction++; - if (buildCounterSettlementTransaction < 3) { - o.amount = buildSettlementTransactionAmount(); - o.identifiers = buildSettlementTransactionIdentifiers(); - o.kind = 'foo'; - o.transaction = buildSettlementTransactionTransaction(); - } - buildCounterSettlementTransaction--; - return o; -} - -void checkSettlementTransaction(api.SettlementTransaction o) { - buildCounterSettlementTransaction++; - if (buildCounterSettlementTransaction < 3) { - checkSettlementTransactionAmount(o.amount!); - checkSettlementTransactionIdentifiers(o.identifiers!); - unittest.expect( - o.kind!, - unittest.equals('foo'), - ); - checkSettlementTransactionTransaction(o.transaction!); - } - buildCounterSettlementTransaction--; -} - -core.int buildCounterSettlementTransactionAmount = 0; -api.SettlementTransactionAmount buildSettlementTransactionAmount() { - final o = api.SettlementTransactionAmount(); - buildCounterSettlementTransactionAmount++; - if (buildCounterSettlementTransactionAmount < 3) { - o.commission = buildSettlementTransactionAmountCommission(); - o.description = 'foo'; - o.transactionAmount = buildPrice(); - o.type = 'foo'; - } - buildCounterSettlementTransactionAmount--; - return o; -} - -void checkSettlementTransactionAmount(api.SettlementTransactionAmount o) { - buildCounterSettlementTransactionAmount++; - if (buildCounterSettlementTransactionAmount < 3) { - checkSettlementTransactionAmountCommission(o.commission!); - unittest.expect( - o.description!, - unittest.equals('foo'), - ); - checkPrice(o.transactionAmount!); - unittest.expect( - o.type!, - unittest.equals('foo'), - ); - } - buildCounterSettlementTransactionAmount--; -} - -core.int buildCounterSettlementTransactionAmountCommission = 0; -api.SettlementTransactionAmountCommission - buildSettlementTransactionAmountCommission() { - final o = api.SettlementTransactionAmountCommission(); - buildCounterSettlementTransactionAmountCommission++; - if (buildCounterSettlementTransactionAmountCommission < 3) { - o.category = 'foo'; - o.rate = 'foo'; - } - buildCounterSettlementTransactionAmountCommission--; - return o; -} - -void checkSettlementTransactionAmountCommission( - api.SettlementTransactionAmountCommission o) { - buildCounterSettlementTransactionAmountCommission++; - if (buildCounterSettlementTransactionAmountCommission < 3) { - unittest.expect( - o.category!, - unittest.equals('foo'), - ); - unittest.expect( - o.rate!, - unittest.equals('foo'), - ); - } - buildCounterSettlementTransactionAmountCommission--; -} - -core.List buildUnnamed203() => [ - 'foo', - 'foo', - ]; - -void checkUnnamed203(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o[0], - unittest.equals('foo'), - ); - unittest.expect( - o[1], - unittest.equals('foo'), - ); -} - -core.int buildCounterSettlementTransactionIdentifiers = 0; -api.SettlementTransactionIdentifiers buildSettlementTransactionIdentifiers() { - final o = api.SettlementTransactionIdentifiers(); - buildCounterSettlementTransactionIdentifiers++; - if (buildCounterSettlementTransactionIdentifiers < 3) { - o.adjustmentId = 'foo'; - o.merchantOrderId = 'foo'; - o.orderItemId = 'foo'; - o.settlementEntryId = 'foo'; - o.shipmentIds = buildUnnamed203(); - o.transactionId = 'foo'; - } - buildCounterSettlementTransactionIdentifiers--; - return o; -} - -void checkSettlementTransactionIdentifiers( - api.SettlementTransactionIdentifiers o) { - buildCounterSettlementTransactionIdentifiers++; - if (buildCounterSettlementTransactionIdentifiers < 3) { - unittest.expect( - o.adjustmentId!, - unittest.equals('foo'), - ); - unittest.expect( - o.merchantOrderId!, - unittest.equals('foo'), - ); - unittest.expect( - o.orderItemId!, - unittest.equals('foo'), - ); - unittest.expect( - o.settlementEntryId!, - unittest.equals('foo'), - ); - checkUnnamed203(o.shipmentIds!); - unittest.expect( - o.transactionId!, - unittest.equals('foo'), - ); - } - buildCounterSettlementTransactionIdentifiers--; -} - -core.int buildCounterSettlementTransactionTransaction = 0; -api.SettlementTransactionTransaction buildSettlementTransactionTransaction() { - final o = api.SettlementTransactionTransaction(); - buildCounterSettlementTransactionTransaction++; - if (buildCounterSettlementTransactionTransaction < 3) { - o.postDate = 'foo'; - o.type = 'foo'; - } - buildCounterSettlementTransactionTransaction--; - return o; -} - -void checkSettlementTransactionTransaction( - api.SettlementTransactionTransaction o) { - buildCounterSettlementTransactionTransaction++; - if (buildCounterSettlementTransactionTransaction < 3) { - unittest.expect( - o.postDate!, - unittest.equals('foo'), - ); - unittest.expect( - o.type!, - unittest.equals('foo'), - ); - } - buildCounterSettlementTransactionTransaction--; -} - -core.List buildUnnamed204() => [ - buildSettlementReport(), - buildSettlementReport(), - ]; - -void checkUnnamed204(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkSettlementReport(o[0]); - checkSettlementReport(o[1]); -} - -core.int buildCounterSettlementreportsListResponse = 0; -api.SettlementreportsListResponse buildSettlementreportsListResponse() { - final o = api.SettlementreportsListResponse(); - buildCounterSettlementreportsListResponse++; - if (buildCounterSettlementreportsListResponse < 3) { - o.kind = 'foo'; - o.nextPageToken = 'foo'; - o.resources = buildUnnamed204(); - } - buildCounterSettlementreportsListResponse--; - return o; -} - -void checkSettlementreportsListResponse(api.SettlementreportsListResponse o) { - buildCounterSettlementreportsListResponse++; - if (buildCounterSettlementreportsListResponse < 3) { - unittest.expect( - o.kind!, - unittest.equals('foo'), - ); - unittest.expect( - o.nextPageToken!, - unittest.equals('foo'), - ); - checkUnnamed204(o.resources!); - } - buildCounterSettlementreportsListResponse--; -} - -core.List buildUnnamed205() => [ - buildSettlementTransaction(), - buildSettlementTransaction(), - ]; - -void checkUnnamed205(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkSettlementTransaction(o[0]); - checkSettlementTransaction(o[1]); -} - -core.int buildCounterSettlementtransactionsListResponse = 0; -api.SettlementtransactionsListResponse - buildSettlementtransactionsListResponse() { - final o = api.SettlementtransactionsListResponse(); - buildCounterSettlementtransactionsListResponse++; - if (buildCounterSettlementtransactionsListResponse < 3) { - o.kind = 'foo'; - o.nextPageToken = 'foo'; - o.resources = buildUnnamed205(); - } - buildCounterSettlementtransactionsListResponse--; - return o; -} - -void checkSettlementtransactionsListResponse( - api.SettlementtransactionsListResponse o) { - buildCounterSettlementtransactionsListResponse++; - if (buildCounterSettlementtransactionsListResponse < 3) { - unittest.expect( - o.kind!, - unittest.equals('foo'), - ); - unittest.expect( - o.nextPageToken!, - unittest.equals('foo'), - ); - checkUnnamed205(o.resources!); - } - buildCounterSettlementtransactionsListResponse--; -} - -core.List buildUnnamed206() => [ +core.List buildUnnamed202() => [ buildPostalCodeGroup(), buildPostalCodeGroup(), ]; -void checkUnnamed206(core.List o) { +void checkUnnamed202(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPostalCodeGroup(o[0]); checkPostalCodeGroup(o[1]); } -core.List buildUnnamed207() => [ +core.List buildUnnamed203() => [ buildService(), buildService(), ]; -void checkUnnamed207(core.List o) { +void checkUnnamed203(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkService(o[0]); checkService(o[1]); } -core.List buildUnnamed208() => [ +core.List buildUnnamed204() => [ buildWarehouse(), buildWarehouse(), ]; -void checkUnnamed208(core.List o) { +void checkUnnamed204(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkWarehouse(o[0]); checkWarehouse(o[1]); @@ -13719,9 +13399,9 @@ api.ShippingSettings buildShippingSettings() { buildCounterShippingSettings++; if (buildCounterShippingSettings < 3) { o.accountId = 'foo'; - o.postalCodeGroups = buildUnnamed206(); - o.services = buildUnnamed207(); - o.warehouses = buildUnnamed208(); + o.postalCodeGroups = buildUnnamed202(); + o.services = buildUnnamed203(); + o.warehouses = buildUnnamed204(); } buildCounterShippingSettings--; return o; @@ -13734,19 +13414,19 @@ void checkShippingSettings(api.ShippingSettings o) { o.accountId!, unittest.equals('foo'), ); - checkUnnamed206(o.postalCodeGroups!); - checkUnnamed207(o.services!); - checkUnnamed208(o.warehouses!); + checkUnnamed202(o.postalCodeGroups!); + checkUnnamed203(o.services!); + checkUnnamed204(o.warehouses!); } buildCounterShippingSettings--; } -core.List buildUnnamed209() => [ +core.List buildUnnamed205() => [ buildShippingsettingsCustomBatchRequestEntry(), buildShippingsettingsCustomBatchRequestEntry(), ]; -void checkUnnamed209(core.List o) { +void checkUnnamed205(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkShippingsettingsCustomBatchRequestEntry(o[0]); checkShippingsettingsCustomBatchRequestEntry(o[1]); @@ -13758,7 +13438,7 @@ api.ShippingsettingsCustomBatchRequest final o = api.ShippingsettingsCustomBatchRequest(); buildCounterShippingsettingsCustomBatchRequest++; if (buildCounterShippingsettingsCustomBatchRequest < 3) { - o.entries = buildUnnamed209(); + o.entries = buildUnnamed205(); } buildCounterShippingsettingsCustomBatchRequest--; return o; @@ -13768,7 +13448,7 @@ void checkShippingsettingsCustomBatchRequest( api.ShippingsettingsCustomBatchRequest o) { buildCounterShippingsettingsCustomBatchRequest++; if (buildCounterShippingsettingsCustomBatchRequest < 3) { - checkUnnamed209(o.entries!); + checkUnnamed205(o.entries!); } buildCounterShippingsettingsCustomBatchRequest--; } @@ -13814,12 +13494,12 @@ void checkShippingsettingsCustomBatchRequestEntry( buildCounterShippingsettingsCustomBatchRequestEntry--; } -core.List buildUnnamed210() => [ +core.List buildUnnamed206() => [ buildShippingsettingsCustomBatchResponseEntry(), buildShippingsettingsCustomBatchResponseEntry(), ]; -void checkUnnamed210( +void checkUnnamed206( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkShippingsettingsCustomBatchResponseEntry(o[0]); @@ -13832,7 +13512,7 @@ api.ShippingsettingsCustomBatchResponse final o = api.ShippingsettingsCustomBatchResponse(); buildCounterShippingsettingsCustomBatchResponse++; if (buildCounterShippingsettingsCustomBatchResponse < 3) { - o.entries = buildUnnamed210(); + o.entries = buildUnnamed206(); o.kind = 'foo'; } buildCounterShippingsettingsCustomBatchResponse--; @@ -13843,7 +13523,7 @@ void checkShippingsettingsCustomBatchResponse( api.ShippingsettingsCustomBatchResponse o) { buildCounterShippingsettingsCustomBatchResponse++; if (buildCounterShippingsettingsCustomBatchResponse < 3) { - checkUnnamed210(o.entries!); + checkUnnamed206(o.entries!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -13885,12 +13565,12 @@ void checkShippingsettingsCustomBatchResponseEntry( buildCounterShippingsettingsCustomBatchResponseEntry--; } -core.List buildUnnamed211() => [ +core.List buildUnnamed207() => [ buildCarriersCarrier(), buildCarriersCarrier(), ]; -void checkUnnamed211(core.List o) { +void checkUnnamed207(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkCarriersCarrier(o[0]); checkCarriersCarrier(o[1]); @@ -13902,7 +13582,7 @@ api.ShippingsettingsGetSupportedCarriersResponse final o = api.ShippingsettingsGetSupportedCarriersResponse(); buildCounterShippingsettingsGetSupportedCarriersResponse++; if (buildCounterShippingsettingsGetSupportedCarriersResponse < 3) { - o.carriers = buildUnnamed211(); + o.carriers = buildUnnamed207(); o.kind = 'foo'; } buildCounterShippingsettingsGetSupportedCarriersResponse--; @@ -13913,7 +13593,7 @@ void checkShippingsettingsGetSupportedCarriersResponse( api.ShippingsettingsGetSupportedCarriersResponse o) { buildCounterShippingsettingsGetSupportedCarriersResponse++; if (buildCounterShippingsettingsGetSupportedCarriersResponse < 3) { - checkUnnamed211(o.carriers!); + checkUnnamed207(o.carriers!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -13922,12 +13602,12 @@ void checkShippingsettingsGetSupportedCarriersResponse( buildCounterShippingsettingsGetSupportedCarriersResponse--; } -core.List buildUnnamed212() => [ +core.List buildUnnamed208() => [ buildHolidaysHoliday(), buildHolidaysHoliday(), ]; -void checkUnnamed212(core.List o) { +void checkUnnamed208(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHolidaysHoliday(o[0]); checkHolidaysHoliday(o[1]); @@ -13939,7 +13619,7 @@ api.ShippingsettingsGetSupportedHolidaysResponse final o = api.ShippingsettingsGetSupportedHolidaysResponse(); buildCounterShippingsettingsGetSupportedHolidaysResponse++; if (buildCounterShippingsettingsGetSupportedHolidaysResponse < 3) { - o.holidays = buildUnnamed212(); + o.holidays = buildUnnamed208(); o.kind = 'foo'; } buildCounterShippingsettingsGetSupportedHolidaysResponse--; @@ -13950,7 +13630,7 @@ void checkShippingsettingsGetSupportedHolidaysResponse( api.ShippingsettingsGetSupportedHolidaysResponse o) { buildCounterShippingsettingsGetSupportedHolidaysResponse++; if (buildCounterShippingsettingsGetSupportedHolidaysResponse < 3) { - checkUnnamed212(o.holidays!); + checkUnnamed208(o.holidays!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -13959,12 +13639,12 @@ void checkShippingsettingsGetSupportedHolidaysResponse( buildCounterShippingsettingsGetSupportedHolidaysResponse--; } -core.List buildUnnamed213() => [ +core.List buildUnnamed209() => [ buildPickupServicesPickupService(), buildPickupServicesPickupService(), ]; -void checkUnnamed213(core.List o) { +void checkUnnamed209(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPickupServicesPickupService(o[0]); checkPickupServicesPickupService(o[1]); @@ -13977,7 +13657,7 @@ api.ShippingsettingsGetSupportedPickupServicesResponse buildCounterShippingsettingsGetSupportedPickupServicesResponse++; if (buildCounterShippingsettingsGetSupportedPickupServicesResponse < 3) { o.kind = 'foo'; - o.pickupServices = buildUnnamed213(); + o.pickupServices = buildUnnamed209(); } buildCounterShippingsettingsGetSupportedPickupServicesResponse--; return o; @@ -13991,17 +13671,17 @@ void checkShippingsettingsGetSupportedPickupServicesResponse( o.kind!, unittest.equals('foo'), ); - checkUnnamed213(o.pickupServices!); + checkUnnamed209(o.pickupServices!); } buildCounterShippingsettingsGetSupportedPickupServicesResponse--; } -core.List buildUnnamed214() => [ +core.List buildUnnamed210() => [ buildShippingSettings(), buildShippingSettings(), ]; -void checkUnnamed214(core.List o) { +void checkUnnamed210(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkShippingSettings(o[0]); checkShippingSettings(o[1]); @@ -14014,7 +13694,7 @@ api.ShippingsettingsListResponse buildShippingsettingsListResponse() { if (buildCounterShippingsettingsListResponse < 3) { o.kind = 'foo'; o.nextPageToken = 'foo'; - o.resources = buildUnnamed214(); + o.resources = buildUnnamed210(); } buildCounterShippingsettingsListResponse--; return o; @@ -14031,17 +13711,17 @@ void checkShippingsettingsListResponse(api.ShippingsettingsListResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed214(o.resources!); + checkUnnamed210(o.resources!); } buildCounterShippingsettingsListResponse--; } -core.List buildUnnamed215() => [ +core.List buildUnnamed211() => [ buildShoppingAdsProgramStatusRegionStatus(), buildShoppingAdsProgramStatusRegionStatus(), ]; -void checkUnnamed215(core.List o) { +void checkUnnamed211(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkShoppingAdsProgramStatusRegionStatus(o[0]); checkShoppingAdsProgramStatusRegionStatus(o[1]); @@ -14053,7 +13733,7 @@ api.ShoppingAdsProgramStatus buildShoppingAdsProgramStatus() { buildCounterShoppingAdsProgramStatus++; if (buildCounterShoppingAdsProgramStatus < 3) { o.globalState = 'foo'; - o.regionStatuses = buildUnnamed215(); + o.regionStatuses = buildUnnamed211(); } buildCounterShoppingAdsProgramStatus--; return o; @@ -14066,17 +13746,17 @@ void checkShoppingAdsProgramStatus(api.ShoppingAdsProgramStatus o) { o.globalState!, unittest.equals('foo'), ); - checkUnnamed215(o.regionStatuses!); + checkUnnamed211(o.regionStatuses!); } buildCounterShoppingAdsProgramStatus--; } -core.List buildUnnamed216() => [ +core.List buildUnnamed212() => [ 'foo', 'foo', ]; -void checkUnnamed216(core.List o) { +void checkUnnamed212(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -14088,12 +13768,12 @@ void checkUnnamed216(core.List o) { ); } -core.List buildUnnamed217() => [ +core.List buildUnnamed213() => [ 'foo', 'foo', ]; -void checkUnnamed217(core.List o) { +void checkUnnamed213(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -14105,12 +13785,12 @@ void checkUnnamed217(core.List o) { ); } -core.List buildUnnamed218() => [ +core.List buildUnnamed214() => [ 'foo', 'foo', ]; -void checkUnnamed218(core.List o) { +void checkUnnamed214(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -14130,14 +13810,14 @@ api.ShoppingAdsProgramStatusRegionStatus if (buildCounterShoppingAdsProgramStatusRegionStatus < 3) { o.disapprovalDate = 'foo'; o.eligibilityStatus = 'foo'; - o.onboardingIssues = buildUnnamed216(); - o.regionCodes = buildUnnamed217(); + o.onboardingIssues = buildUnnamed212(); + o.regionCodes = buildUnnamed213(); o.reviewEligibilityStatus = 'foo'; o.reviewIneligibilityReason = 'foo'; o.reviewIneligibilityReasonDescription = 'foo'; o.reviewIneligibilityReasonDetails = buildShoppingAdsProgramStatusReviewIneligibilityReasonDetails(); - o.reviewIssues = buildUnnamed218(); + o.reviewIssues = buildUnnamed214(); } buildCounterShoppingAdsProgramStatusRegionStatus--; return o; @@ -14155,8 +13835,8 @@ void checkShoppingAdsProgramStatusRegionStatus( o.eligibilityStatus!, unittest.equals('foo'), ); - checkUnnamed216(o.onboardingIssues!); - checkUnnamed217(o.regionCodes!); + checkUnnamed212(o.onboardingIssues!); + checkUnnamed213(o.regionCodes!); unittest.expect( o.reviewEligibilityStatus!, unittest.equals('foo'), @@ -14171,7 +13851,7 @@ void checkShoppingAdsProgramStatusRegionStatus( ); checkShoppingAdsProgramStatusReviewIneligibilityReasonDetails( o.reviewIneligibilityReasonDetails!); - checkUnnamed218(o.reviewIssues!); + checkUnnamed214(o.reviewIssues!); } buildCounterShoppingAdsProgramStatusRegionStatus--; } @@ -14203,12 +13883,12 @@ void checkShoppingAdsProgramStatusReviewIneligibilityReasonDetails( buildCounterShoppingAdsProgramStatusReviewIneligibilityReasonDetails--; } -core.List buildUnnamed219() => [ +core.List buildUnnamed215() => [ buildRow(), buildRow(), ]; -void checkUnnamed219(core.List o) { +void checkUnnamed215(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRow(o[0]); checkRow(o[1]); @@ -14222,7 +13902,7 @@ api.Table buildTable() { o.columnHeaders = buildHeaders(); o.name = 'foo'; o.rowHeaders = buildHeaders(); - o.rows = buildUnnamed219(); + o.rows = buildUnnamed215(); } buildCounterTable--; return o; @@ -14237,7 +13917,7 @@ void checkTable(api.Table o) { unittest.equals('foo'), ); checkHeaders(o.rowHeaders!); - checkUnnamed219(o.rows!); + checkUnnamed215(o.rows!); } buildCounterTable--; } @@ -14387,12 +14067,12 @@ void checkTopicTrends(api.TopicTrends o) { buildCounterTopicTrends--; } -core.List buildUnnamed220() => [ +core.List buildUnnamed216() => [ 'foo', 'foo', ]; -void checkUnnamed220(core.List o) { +void checkUnnamed216(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -14404,23 +14084,23 @@ void checkUnnamed220(core.List o) { ); } -core.List buildUnnamed221() => [ +core.List buildUnnamed217() => [ buildTransitTableTransitTimeRow(), buildTransitTableTransitTimeRow(), ]; -void checkUnnamed221(core.List o) { +void checkUnnamed217(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTransitTableTransitTimeRow(o[0]); checkTransitTableTransitTimeRow(o[1]); } -core.List buildUnnamed222() => [ +core.List buildUnnamed218() => [ 'foo', 'foo', ]; -void checkUnnamed222(core.List o) { +void checkUnnamed218(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -14437,9 +14117,9 @@ api.TransitTable buildTransitTable() { final o = api.TransitTable(); buildCounterTransitTable++; if (buildCounterTransitTable < 3) { - o.postalCodeGroupNames = buildUnnamed220(); - o.rows = buildUnnamed221(); - o.transitTimeLabels = buildUnnamed222(); + o.postalCodeGroupNames = buildUnnamed216(); + o.rows = buildUnnamed217(); + o.transitTimeLabels = buildUnnamed218(); } buildCounterTransitTable--; return o; @@ -14448,19 +14128,19 @@ api.TransitTable buildTransitTable() { void checkTransitTable(api.TransitTable o) { buildCounterTransitTable++; if (buildCounterTransitTable < 3) { - checkUnnamed220(o.postalCodeGroupNames!); - checkUnnamed221(o.rows!); - checkUnnamed222(o.transitTimeLabels!); + checkUnnamed216(o.postalCodeGroupNames!); + checkUnnamed217(o.rows!); + checkUnnamed218(o.transitTimeLabels!); } buildCounterTransitTable--; } -core.List buildUnnamed223() => [ +core.List buildUnnamed219() => [ buildTransitTableTransitTimeRowTransitTimeValue(), buildTransitTableTransitTimeRowTransitTimeValue(), ]; -void checkUnnamed223( +void checkUnnamed219( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTransitTableTransitTimeRowTransitTimeValue(o[0]); @@ -14472,7 +14152,7 @@ api.TransitTableTransitTimeRow buildTransitTableTransitTimeRow() { final o = api.TransitTableTransitTimeRow(); buildCounterTransitTableTransitTimeRow++; if (buildCounterTransitTableTransitTimeRow < 3) { - o.values = buildUnnamed223(); + o.values = buildUnnamed219(); } buildCounterTransitTableTransitTimeRow--; return o; @@ -14481,7 +14161,7 @@ api.TransitTableTransitTimeRow buildTransitTableTransitTimeRow() { void checkTransitTableTransitTimeRow(api.TransitTableTransitTimeRow o) { buildCounterTransitTableTransitTimeRow++; if (buildCounterTransitTableTransitTimeRow < 3) { - checkUnnamed223(o.values!); + checkUnnamed219(o.values!); } buildCounterTransitTableTransitTimeRow--; } @@ -14838,29 +14518,12 @@ void checkWeight(api.Weight o) { buildCounterWeight--; } -core.List buildUnnamed224() => [ - 'foo', - 'foo', - ]; - -void checkUnnamed224(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o[0], - unittest.equals('foo'), - ); - unittest.expect( - o[1], - unittest.equals('foo'), - ); -} - -core.List buildUnnamed225() => [ +core.List buildUnnamed220() => [ 'foo', 'foo', ]; -void checkUnnamed225(core.List o) { +void checkUnnamed220(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -14872,12 +14535,12 @@ void checkUnnamed225(core.List o) { ); } -core.List buildUnnamed226() => [ +core.List buildUnnamed221() => [ 'foo', 'foo', ]; -void checkUnnamed226(core.List o) { +void checkUnnamed221(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -14889,12 +14552,12 @@ void checkUnnamed226(core.List o) { ); } -core.List buildUnnamed227() => [ +core.List buildUnnamed222() => [ 'foo', 'foo', ]; -void checkUnnamed227(core.List o) { +void checkUnnamed222(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -14906,12 +14569,12 @@ void checkUnnamed227(core.List o) { ); } -core.List buildUnnamed228() => [ +core.List buildUnnamed223() => [ 'foo', 'foo', ]; -void checkUnnamed228(core.List o) { +void checkUnnamed223(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -14923,12 +14586,12 @@ void checkUnnamed228(core.List o) { ); } -core.List buildUnnamed229() => [ +core.List buildUnnamed224() => [ 'foo', 'foo', ]; -void checkUnnamed229(core.List o) { +void checkUnnamed224(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -14940,12 +14603,12 @@ void checkUnnamed229(core.List o) { ); } -core.List buildUnnamed230() => [ +core.List buildUnnamed225() => [ 'foo', 'foo', ]; -void checkUnnamed230(core.List o) { +void checkUnnamed225(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -17965,86 +17628,6 @@ void main() { }); }); - unittest.group('obj-schema-SettlementReport', () { - unittest.test('to-json--from-json', () async { - final o = buildSettlementReport(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.SettlementReport.fromJson( - oJson as core.Map); - checkSettlementReport(od); - }); - }); - - unittest.group('obj-schema-SettlementTransaction', () { - unittest.test('to-json--from-json', () async { - final o = buildSettlementTransaction(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.SettlementTransaction.fromJson( - oJson as core.Map); - checkSettlementTransaction(od); - }); - }); - - unittest.group('obj-schema-SettlementTransactionAmount', () { - unittest.test('to-json--from-json', () async { - final o = buildSettlementTransactionAmount(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.SettlementTransactionAmount.fromJson( - oJson as core.Map); - checkSettlementTransactionAmount(od); - }); - }); - - unittest.group('obj-schema-SettlementTransactionAmountCommission', () { - unittest.test('to-json--from-json', () async { - final o = buildSettlementTransactionAmountCommission(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.SettlementTransactionAmountCommission.fromJson( - oJson as core.Map); - checkSettlementTransactionAmountCommission(od); - }); - }); - - unittest.group('obj-schema-SettlementTransactionIdentifiers', () { - unittest.test('to-json--from-json', () async { - final o = buildSettlementTransactionIdentifiers(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.SettlementTransactionIdentifiers.fromJson( - oJson as core.Map); - checkSettlementTransactionIdentifiers(od); - }); - }); - - unittest.group('obj-schema-SettlementTransactionTransaction', () { - unittest.test('to-json--from-json', () async { - final o = buildSettlementTransactionTransaction(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.SettlementTransactionTransaction.fromJson( - oJson as core.Map); - checkSettlementTransactionTransaction(od); - }); - }); - - unittest.group('obj-schema-SettlementreportsListResponse', () { - unittest.test('to-json--from-json', () async { - final o = buildSettlementreportsListResponse(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.SettlementreportsListResponse.fromJson( - oJson as core.Map); - checkSettlementreportsListResponse(od); - }); - }); - - unittest.group('obj-schema-SettlementtransactionsListResponse', () { - unittest.test('to-json--from-json', () async { - final o = buildSettlementtransactionsListResponse(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.SettlementtransactionsListResponse.fromJson( - oJson as core.Map); - checkSettlementtransactionsListResponse(od); - }); - }); - unittest.group('obj-schema-ShippingSettings', () { unittest.test('to-json--from-json', () async { final o = buildShippingSettings(); @@ -19876,7 +19459,7 @@ void main() { final res = api.ShoppingContentApi(mock).accountstatuses; final arg_merchantId = 'foo'; final arg_accountId = 'foo'; - final arg_destinations = buildUnnamed224(); + final arg_destinations = buildUnnamed220(); final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -19928,7 +19511,7 @@ void main() { final mock = HttpServerMock(); final res = api.ShoppingContentApi(mock).accountstatuses; final arg_merchantId = 'foo'; - final arg_destinations = buildUnnamed225(); + final arg_destinations = buildUnnamed221(); final arg_maxResults = 42; final arg_name = 'foo'; final arg_pageToken = 'foo'; @@ -22302,7 +21885,7 @@ void main() { final arg_accountId = 'foo'; final arg_country = 'foo'; final arg_lsfType = 'foo'; - final arg_pickupTypes = buildUnnamed226(); + final arg_pickupTypes = buildUnnamed222(); final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -23773,7 +23356,7 @@ void main() { final res = api.ShoppingContentApi(mock).productstatuses; final arg_merchantId = 'foo'; final arg_productId = 'foo'; - final arg_destinations = buildUnnamed227(); + final arg_destinations = buildUnnamed223(); final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -23825,7 +23408,7 @@ void main() { final mock = HttpServerMock(); final res = api.ShoppingContentApi(mock).productstatuses; final arg_merchantId = 'foo'; - final arg_destinations = buildUnnamed228(); + final arg_destinations = buildUnnamed224(); final arg_maxResults = 42; final arg_pageToken = 'foo'; final arg_$fields = 'foo'; @@ -24229,7 +23812,7 @@ void main() { final mock = HttpServerMock(); final res = api.ShoppingContentApi(mock).recommendations; final arg_merchantId = 'foo'; - final arg_allowedTag = buildUnnamed229(); + final arg_allowedTag = buildUnnamed225(); final arg_languageCode = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -25564,198 +25147,6 @@ void main() { }); }); - unittest.group('resource-SettlementreportsResource', () { - unittest.test('method--get', () async { - final mock = HttpServerMock(); - final res = api.ShoppingContentApi(mock).settlementreports; - final arg_merchantId = 'foo'; - final arg_settlementId = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode(buildSettlementReport()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = - await res.get(arg_merchantId, arg_settlementId, $fields: arg_$fields); - checkSettlementReport(response as api.SettlementReport); - }); - - unittest.test('method--list', () async { - final mock = HttpServerMock(); - final res = api.ShoppingContentApi(mock).settlementreports; - final arg_merchantId = 'foo'; - final arg_maxResults = 42; - final arg_pageToken = 'foo'; - final arg_transferEndDate = 'foo'; - final arg_transferStartDate = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - core.int.parse(queryMap['maxResults']!.first), - unittest.equals(arg_maxResults), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['transferEndDate']!.first, - unittest.equals(arg_transferEndDate), - ); - unittest.expect( - queryMap['transferStartDate']!.first, - unittest.equals(arg_transferStartDate), - ); - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode(buildSettlementreportsListResponse()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.list(arg_merchantId, - maxResults: arg_maxResults, - pageToken: arg_pageToken, - transferEndDate: arg_transferEndDate, - transferStartDate: arg_transferStartDate, - $fields: arg_$fields); - checkSettlementreportsListResponse( - response as api.SettlementreportsListResponse); - }); - }); - - unittest.group('resource-SettlementtransactionsResource', () { - unittest.test('method--list', () async { - final mock = HttpServerMock(); - final res = api.ShoppingContentApi(mock).settlementtransactions; - final arg_merchantId = 'foo'; - final arg_settlementId = 'foo'; - final arg_maxResults = 42; - final arg_pageToken = 'foo'; - final arg_transactionIds = buildUnnamed230(); - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - core.int.parse(queryMap['maxResults']!.first), - unittest.equals(arg_maxResults), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['transactionIds']!, - unittest.equals(arg_transactionIds), - ); - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = - convert.json.encode(buildSettlementtransactionsListResponse()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.list(arg_merchantId, arg_settlementId, - maxResults: arg_maxResults, - pageToken: arg_pageToken, - transactionIds: arg_transactionIds, - $fields: arg_$fields); - checkSettlementtransactionsListResponse( - response as api.SettlementtransactionsListResponse); - }); - }); - unittest.group('resource-ShippingsettingsResource', () { unittest.test('method--custombatch', () async { final mock = HttpServerMock(); diff --git a/generated/googleapis/test/datacatalog/v1_test.dart b/generated/googleapis/test/datacatalog/v1_test.dart index 96f9635b4..6107cc5bb 100644 --- a/generated/googleapis/test/datacatalog/v1_test.dart +++ b/generated/googleapis/test/datacatalog/v1_test.dart @@ -1120,6 +1120,7 @@ api.GoogleCloudDatacatalogV1EntryGroup o.description = 'foo'; o.displayName = 'foo'; o.name = 'foo'; + o.transferredToDataplex = true; } buildCounterGoogleCloudDatacatalogV1EntryGroup--; return o; @@ -1142,6 +1143,7 @@ void checkGoogleCloudDatacatalogV1EntryGroup( o.name!, unittest.equals('foo'), ); + unittest.expect(o.transferredToDataplex!, unittest.isTrue); } buildCounterGoogleCloudDatacatalogV1EntryGroup--; } @@ -1764,6 +1766,35 @@ void checkGoogleCloudDatacatalogV1LookerSystemSpec( buildCounterGoogleCloudDatacatalogV1LookerSystemSpec--; } +core.int buildCounterGoogleCloudDatacatalogV1MigrationConfig = 0; +api.GoogleCloudDatacatalogV1MigrationConfig + buildGoogleCloudDatacatalogV1MigrationConfig() { + final o = api.GoogleCloudDatacatalogV1MigrationConfig(); + buildCounterGoogleCloudDatacatalogV1MigrationConfig++; + if (buildCounterGoogleCloudDatacatalogV1MigrationConfig < 3) { + o.catalogUiExperience = 'foo'; + o.tagTemplateMigration = 'foo'; + } + buildCounterGoogleCloudDatacatalogV1MigrationConfig--; + return o; +} + +void checkGoogleCloudDatacatalogV1MigrationConfig( + api.GoogleCloudDatacatalogV1MigrationConfig o) { + buildCounterGoogleCloudDatacatalogV1MigrationConfig++; + if (buildCounterGoogleCloudDatacatalogV1MigrationConfig < 3) { + unittest.expect( + o.catalogUiExperience!, + unittest.equals('foo'), + ); + unittest.expect( + o.tagTemplateMigration!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudDatacatalogV1MigrationConfig--; +} + core.int buildCounterGoogleCloudDatacatalogV1ModelSpec = 0; api.GoogleCloudDatacatalogV1ModelSpec buildGoogleCloudDatacatalogV1ModelSpec() { final o = api.GoogleCloudDatacatalogV1ModelSpec(); @@ -1826,6 +1857,40 @@ void checkGoogleCloudDatacatalogV1ModifyEntryOverviewRequest( buildCounterGoogleCloudDatacatalogV1ModifyEntryOverviewRequest--; } +core.Map + buildUnnamed18() => { + 'x': buildGoogleCloudDatacatalogV1MigrationConfig(), + 'y': buildGoogleCloudDatacatalogV1MigrationConfig(), + }; + +void checkUnnamed18( + core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudDatacatalogV1MigrationConfig(o['x']!); + checkGoogleCloudDatacatalogV1MigrationConfig(o['y']!); +} + +core.int buildCounterGoogleCloudDatacatalogV1OrganizationConfig = 0; +api.GoogleCloudDatacatalogV1OrganizationConfig + buildGoogleCloudDatacatalogV1OrganizationConfig() { + final o = api.GoogleCloudDatacatalogV1OrganizationConfig(); + buildCounterGoogleCloudDatacatalogV1OrganizationConfig++; + if (buildCounterGoogleCloudDatacatalogV1OrganizationConfig < 3) { + o.config = buildUnnamed18(); + } + buildCounterGoogleCloudDatacatalogV1OrganizationConfig--; + return o; +} + +void checkGoogleCloudDatacatalogV1OrganizationConfig( + api.GoogleCloudDatacatalogV1OrganizationConfig o) { + buildCounterGoogleCloudDatacatalogV1OrganizationConfig++; + if (buildCounterGoogleCloudDatacatalogV1OrganizationConfig < 3) { + checkUnnamed18(o.config!); + } + buildCounterGoogleCloudDatacatalogV1OrganizationConfig--; +} + core.int buildCounterGoogleCloudDatacatalogV1PersonalDetails = 0; api.GoogleCloudDatacatalogV1PersonalDetails buildGoogleCloudDatacatalogV1PersonalDetails() { @@ -2006,12 +2071,12 @@ void checkGoogleCloudDatacatalogV1PhysicalSchemaThriftSchema( buildCounterGoogleCloudDatacatalogV1PhysicalSchemaThriftSchema--; } -core.List buildUnnamed18() => [ +core.List buildUnnamed19() => [ 'foo', 'foo', ]; -void checkUnnamed18(core.List o) { +void checkUnnamed19(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2028,7 +2093,7 @@ api.GoogleCloudDatacatalogV1PolicyTag buildGoogleCloudDatacatalogV1PolicyTag() { final o = api.GoogleCloudDatacatalogV1PolicyTag(); buildCounterGoogleCloudDatacatalogV1PolicyTag++; if (buildCounterGoogleCloudDatacatalogV1PolicyTag < 3) { - o.childPolicyTags = buildUnnamed18(); + o.childPolicyTags = buildUnnamed19(); o.description = 'foo'; o.displayName = 'foo'; o.name = 'foo'; @@ -2042,7 +2107,7 @@ void checkGoogleCloudDatacatalogV1PolicyTag( api.GoogleCloudDatacatalogV1PolicyTag o) { buildCounterGoogleCloudDatacatalogV1PolicyTag++; if (buildCounterGoogleCloudDatacatalogV1PolicyTag < 3) { - checkUnnamed18(o.childPolicyTags!); + checkUnnamed19(o.childPolicyTags!); unittest.expect( o.description!, unittest.equals('foo'), @@ -2063,12 +2128,12 @@ void checkGoogleCloudDatacatalogV1PolicyTag( buildCounterGoogleCloudDatacatalogV1PolicyTag--; } -core.List buildUnnamed19() => [ +core.List buildUnnamed20() => [ buildGoogleCloudDatacatalogV1Tag(), buildGoogleCloudDatacatalogV1Tag(), ]; -void checkUnnamed19(core.List o) { +void checkUnnamed20(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDatacatalogV1Tag(o[0]); checkGoogleCloudDatacatalogV1Tag(o[1]); @@ -2082,7 +2147,7 @@ api.GoogleCloudDatacatalogV1ReconcileTagsRequest if (buildCounterGoogleCloudDatacatalogV1ReconcileTagsRequest < 3) { o.forceDeleteMissing = true; o.tagTemplate = 'foo'; - o.tags = buildUnnamed19(); + o.tags = buildUnnamed20(); } buildCounterGoogleCloudDatacatalogV1ReconcileTagsRequest--; return o; @@ -2097,7 +2162,7 @@ void checkGoogleCloudDatacatalogV1ReconcileTagsRequest( o.tagTemplate!, unittest.equals('foo'), ); - checkUnnamed19(o.tags!); + checkUnnamed20(o.tags!); } buildCounterGoogleCloudDatacatalogV1ReconcileTagsRequest--; } @@ -2176,12 +2241,12 @@ void checkGoogleCloudDatacatalogV1ReplaceTaxonomyRequest( buildCounterGoogleCloudDatacatalogV1ReplaceTaxonomyRequest--; } -core.List buildUnnamed20() => [ +core.List buildUnnamed21() => [ buildGoogleCloudDatacatalogV1RoutineSpecArgument(), buildGoogleCloudDatacatalogV1RoutineSpecArgument(), ]; -void checkUnnamed20( +void checkUnnamed21( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDatacatalogV1RoutineSpecArgument(o[0]); @@ -2198,7 +2263,7 @@ api.GoogleCloudDatacatalogV1RoutineSpec o.definitionBody = 'foo'; o.language = 'foo'; o.returnType = 'foo'; - o.routineArguments = buildUnnamed20(); + o.routineArguments = buildUnnamed21(); o.routineType = 'foo'; } buildCounterGoogleCloudDatacatalogV1RoutineSpec--; @@ -2222,7 +2287,7 @@ void checkGoogleCloudDatacatalogV1RoutineSpec( o.returnType!, unittest.equals('foo'), ); - checkUnnamed20(o.routineArguments!); + checkUnnamed21(o.routineArguments!); unittest.expect( o.routineType!, unittest.equals('foo'), @@ -2265,12 +2330,12 @@ void checkGoogleCloudDatacatalogV1RoutineSpecArgument( buildCounterGoogleCloudDatacatalogV1RoutineSpecArgument--; } -core.List buildUnnamed21() => [ +core.List buildUnnamed22() => [ buildGoogleCloudDatacatalogV1ColumnSchema(), buildGoogleCloudDatacatalogV1ColumnSchema(), ]; -void checkUnnamed21(core.List o) { +void checkUnnamed22(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDatacatalogV1ColumnSchema(o[0]); checkGoogleCloudDatacatalogV1ColumnSchema(o[1]); @@ -2281,7 +2346,7 @@ api.GoogleCloudDatacatalogV1Schema buildGoogleCloudDatacatalogV1Schema() { final o = api.GoogleCloudDatacatalogV1Schema(); buildCounterGoogleCloudDatacatalogV1Schema++; if (buildCounterGoogleCloudDatacatalogV1Schema < 3) { - o.columns = buildUnnamed21(); + o.columns = buildUnnamed22(); } buildCounterGoogleCloudDatacatalogV1Schema--; return o; @@ -2290,7 +2355,7 @@ api.GoogleCloudDatacatalogV1Schema buildGoogleCloudDatacatalogV1Schema() { void checkGoogleCloudDatacatalogV1Schema(api.GoogleCloudDatacatalogV1Schema o) { buildCounterGoogleCloudDatacatalogV1Schema++; if (buildCounterGoogleCloudDatacatalogV1Schema < 3) { - checkUnnamed21(o.columns!); + checkUnnamed22(o.columns!); } buildCounterGoogleCloudDatacatalogV1Schema--; } @@ -2338,12 +2403,12 @@ void checkGoogleCloudDatacatalogV1SearchCatalogRequest( buildCounterGoogleCloudDatacatalogV1SearchCatalogRequest--; } -core.List buildUnnamed22() => [ +core.List buildUnnamed23() => [ 'foo', 'foo', ]; -void checkUnnamed22(core.List o) { +void checkUnnamed23(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2355,12 +2420,12 @@ void checkUnnamed22(core.List o) { ); } -core.List buildUnnamed23() => [ +core.List buildUnnamed24() => [ 'foo', 'foo', ]; -void checkUnnamed23(core.List o) { +void checkUnnamed24(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2372,12 +2437,12 @@ void checkUnnamed23(core.List o) { ); } -core.List buildUnnamed24() => [ +core.List buildUnnamed25() => [ 'foo', 'foo', ]; -void checkUnnamed24(core.List o) { +void checkUnnamed25(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2396,10 +2461,10 @@ api.GoogleCloudDatacatalogV1SearchCatalogRequestScope buildCounterGoogleCloudDatacatalogV1SearchCatalogRequestScope++; if (buildCounterGoogleCloudDatacatalogV1SearchCatalogRequestScope < 3) { o.includeGcpPublicDatasets = true; - o.includeOrgIds = buildUnnamed22(); - o.includeProjectIds = buildUnnamed23(); + o.includeOrgIds = buildUnnamed23(); + o.includeProjectIds = buildUnnamed24(); o.includePublicTagTemplates = true; - o.restrictedLocations = buildUnnamed24(); + o.restrictedLocations = buildUnnamed25(); o.starredOnly = true; } buildCounterGoogleCloudDatacatalogV1SearchCatalogRequestScope--; @@ -2411,33 +2476,33 @@ void checkGoogleCloudDatacatalogV1SearchCatalogRequestScope( buildCounterGoogleCloudDatacatalogV1SearchCatalogRequestScope++; if (buildCounterGoogleCloudDatacatalogV1SearchCatalogRequestScope < 3) { unittest.expect(o.includeGcpPublicDatasets!, unittest.isTrue); - checkUnnamed22(o.includeOrgIds!); - checkUnnamed23(o.includeProjectIds!); + checkUnnamed23(o.includeOrgIds!); + checkUnnamed24(o.includeProjectIds!); unittest.expect(o.includePublicTagTemplates!, unittest.isTrue); - checkUnnamed24(o.restrictedLocations!); + checkUnnamed25(o.restrictedLocations!); unittest.expect(o.starredOnly!, unittest.isTrue); } buildCounterGoogleCloudDatacatalogV1SearchCatalogRequestScope--; } -core.List buildUnnamed25() => [ +core.List buildUnnamed26() => [ buildGoogleCloudDatacatalogV1SearchCatalogResult(), buildGoogleCloudDatacatalogV1SearchCatalogResult(), ]; -void checkUnnamed25( +void checkUnnamed26( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDatacatalogV1SearchCatalogResult(o[0]); checkGoogleCloudDatacatalogV1SearchCatalogResult(o[1]); } -core.List buildUnnamed26() => [ +core.List buildUnnamed27() => [ 'foo', 'foo', ]; -void checkUnnamed26(core.List o) { +void checkUnnamed27(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2456,9 +2521,9 @@ api.GoogleCloudDatacatalogV1SearchCatalogResponse buildCounterGoogleCloudDatacatalogV1SearchCatalogResponse++; if (buildCounterGoogleCloudDatacatalogV1SearchCatalogResponse < 3) { o.nextPageToken = 'foo'; - o.results = buildUnnamed25(); + o.results = buildUnnamed26(); o.totalSize = 42; - o.unreachable = buildUnnamed26(); + o.unreachable = buildUnnamed27(); } buildCounterGoogleCloudDatacatalogV1SearchCatalogResponse--; return o; @@ -2472,12 +2537,12 @@ void checkGoogleCloudDatacatalogV1SearchCatalogResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed25(o.results!); + checkUnnamed26(o.results!); unittest.expect( o.totalSize!, unittest.equals(42), ); - checkUnnamed26(o.unreachable!); + checkUnnamed27(o.unreachable!); } buildCounterGoogleCloudDatacatalogV1SearchCatalogResponse--; } @@ -2551,12 +2616,12 @@ void checkGoogleCloudDatacatalogV1SearchCatalogResult( buildCounterGoogleCloudDatacatalogV1SearchCatalogResult--; } -core.List buildUnnamed27() => [ +core.List buildUnnamed28() => [ buildGoogleCloudDatacatalogV1SerializedPolicyTag(), buildGoogleCloudDatacatalogV1SerializedPolicyTag(), ]; -void checkUnnamed27( +void checkUnnamed28( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDatacatalogV1SerializedPolicyTag(o[0]); @@ -2569,7 +2634,7 @@ api.GoogleCloudDatacatalogV1SerializedPolicyTag final o = api.GoogleCloudDatacatalogV1SerializedPolicyTag(); buildCounterGoogleCloudDatacatalogV1SerializedPolicyTag++; if (buildCounterGoogleCloudDatacatalogV1SerializedPolicyTag < 3) { - o.childPolicyTags = buildUnnamed27(); + o.childPolicyTags = buildUnnamed28(); o.description = 'foo'; o.displayName = 'foo'; o.policyTag = 'foo'; @@ -2582,7 +2647,7 @@ void checkGoogleCloudDatacatalogV1SerializedPolicyTag( api.GoogleCloudDatacatalogV1SerializedPolicyTag o) { buildCounterGoogleCloudDatacatalogV1SerializedPolicyTag++; if (buildCounterGoogleCloudDatacatalogV1SerializedPolicyTag < 3) { - checkUnnamed27(o.childPolicyTags!); + checkUnnamed28(o.childPolicyTags!); unittest.expect( o.description!, unittest.equals('foo'), @@ -2599,12 +2664,12 @@ void checkGoogleCloudDatacatalogV1SerializedPolicyTag( buildCounterGoogleCloudDatacatalogV1SerializedPolicyTag--; } -core.List buildUnnamed28() => [ +core.List buildUnnamed29() => [ 'foo', 'foo', ]; -void checkUnnamed28(core.List o) { +void checkUnnamed29(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2616,12 +2681,12 @@ void checkUnnamed28(core.List o) { ); } -core.List buildUnnamed29() => [ +core.List buildUnnamed30() => [ buildGoogleCloudDatacatalogV1SerializedPolicyTag(), buildGoogleCloudDatacatalogV1SerializedPolicyTag(), ]; -void checkUnnamed29( +void checkUnnamed30( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDatacatalogV1SerializedPolicyTag(o[0]); @@ -2634,10 +2699,10 @@ api.GoogleCloudDatacatalogV1SerializedTaxonomy final o = api.GoogleCloudDatacatalogV1SerializedTaxonomy(); buildCounterGoogleCloudDatacatalogV1SerializedTaxonomy++; if (buildCounterGoogleCloudDatacatalogV1SerializedTaxonomy < 3) { - o.activatedPolicyTypes = buildUnnamed28(); + o.activatedPolicyTypes = buildUnnamed29(); o.description = 'foo'; o.displayName = 'foo'; - o.policyTags = buildUnnamed29(); + o.policyTags = buildUnnamed30(); } buildCounterGoogleCloudDatacatalogV1SerializedTaxonomy--; return o; @@ -2647,7 +2712,7 @@ void checkGoogleCloudDatacatalogV1SerializedTaxonomy( api.GoogleCloudDatacatalogV1SerializedTaxonomy o) { buildCounterGoogleCloudDatacatalogV1SerializedTaxonomy++; if (buildCounterGoogleCloudDatacatalogV1SerializedTaxonomy < 3) { - checkUnnamed28(o.activatedPolicyTypes!); + checkUnnamed29(o.activatedPolicyTypes!); unittest.expect( o.description!, unittest.equals('foo'), @@ -2656,7 +2721,7 @@ void checkGoogleCloudDatacatalogV1SerializedTaxonomy( o.displayName!, unittest.equals('foo'), ); - checkUnnamed29(o.policyTags!); + checkUnnamed30(o.policyTags!); } buildCounterGoogleCloudDatacatalogV1SerializedTaxonomy--; } @@ -2684,6 +2749,35 @@ void checkGoogleCloudDatacatalogV1ServiceSpec( buildCounterGoogleCloudDatacatalogV1ServiceSpec--; } +core.int buildCounterGoogleCloudDatacatalogV1SetConfigRequest = 0; +api.GoogleCloudDatacatalogV1SetConfigRequest + buildGoogleCloudDatacatalogV1SetConfigRequest() { + final o = api.GoogleCloudDatacatalogV1SetConfigRequest(); + buildCounterGoogleCloudDatacatalogV1SetConfigRequest++; + if (buildCounterGoogleCloudDatacatalogV1SetConfigRequest < 3) { + o.catalogUiExperience = 'foo'; + o.tagTemplateMigration = 'foo'; + } + buildCounterGoogleCloudDatacatalogV1SetConfigRequest--; + return o; +} + +void checkGoogleCloudDatacatalogV1SetConfigRequest( + api.GoogleCloudDatacatalogV1SetConfigRequest o) { + buildCounterGoogleCloudDatacatalogV1SetConfigRequest++; + if (buildCounterGoogleCloudDatacatalogV1SetConfigRequest < 3) { + unittest.expect( + o.catalogUiExperience!, + unittest.equals('foo'), + ); + unittest.expect( + o.tagTemplateMigration!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudDatacatalogV1SetConfigRequest--; +} + core.int buildCounterGoogleCloudDatacatalogV1SqlDatabaseSystemSpec = 0; api.GoogleCloudDatacatalogV1SqlDatabaseSystemSpec buildGoogleCloudDatacatalogV1SqlDatabaseSystemSpec() { @@ -2752,12 +2846,12 @@ void checkGoogleCloudDatacatalogV1StarEntryResponse( buildCounterGoogleCloudDatacatalogV1StarEntryResponse--; } -core.List buildUnnamed30() => [ +core.List buildUnnamed31() => [ 'foo', 'foo', ]; -void checkUnnamed30(core.List o) { +void checkUnnamed31(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2775,7 +2869,7 @@ api.GoogleCloudDatacatalogV1StorageProperties final o = api.GoogleCloudDatacatalogV1StorageProperties(); buildCounterGoogleCloudDatacatalogV1StorageProperties++; if (buildCounterGoogleCloudDatacatalogV1StorageProperties < 3) { - o.filePattern = buildUnnamed30(); + o.filePattern = buildUnnamed31(); o.fileType = 'foo'; } buildCounterGoogleCloudDatacatalogV1StorageProperties--; @@ -2786,7 +2880,7 @@ void checkGoogleCloudDatacatalogV1StorageProperties( api.GoogleCloudDatacatalogV1StorageProperties o) { buildCounterGoogleCloudDatacatalogV1StorageProperties++; if (buildCounterGoogleCloudDatacatalogV1StorageProperties < 3) { - checkUnnamed30(o.filePattern!); + checkUnnamed31(o.filePattern!); unittest.expect( o.fileType!, unittest.equals('foo'), @@ -2852,13 +2946,13 @@ void checkGoogleCloudDatacatalogV1TableSpec( buildCounterGoogleCloudDatacatalogV1TableSpec--; } -core.Map buildUnnamed31() => +core.Map buildUnnamed32() => { 'x': buildGoogleCloudDatacatalogV1TagField(), 'y': buildGoogleCloudDatacatalogV1TagField(), }; -void checkUnnamed31( +void checkUnnamed32( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDatacatalogV1TagField(o['x']!); @@ -2871,7 +2965,8 @@ api.GoogleCloudDatacatalogV1Tag buildGoogleCloudDatacatalogV1Tag() { buildCounterGoogleCloudDatacatalogV1Tag++; if (buildCounterGoogleCloudDatacatalogV1Tag < 3) { o.column = 'foo'; - o.fields = buildUnnamed31(); + o.dataplexTransferStatus = 'foo'; + o.fields = buildUnnamed32(); o.name = 'foo'; o.template = 'foo'; o.templateDisplayName = 'foo'; @@ -2887,7 +2982,11 @@ void checkGoogleCloudDatacatalogV1Tag(api.GoogleCloudDatacatalogV1Tag o) { o.column!, unittest.equals('foo'), ); - checkUnnamed31(o.fields!); + unittest.expect( + o.dataplexTransferStatus!, + unittest.equals('foo'), + ); + checkUnnamed32(o.fields!); unittest.expect( o.name!, unittest.equals('foo'), @@ -2981,12 +3080,12 @@ void checkGoogleCloudDatacatalogV1TagFieldEnumValue( } core.Map - buildUnnamed32() => { + buildUnnamed33() => { 'x': buildGoogleCloudDatacatalogV1TagTemplateField(), 'y': buildGoogleCloudDatacatalogV1TagTemplateField(), }; -void checkUnnamed32( +void checkUnnamed33( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDatacatalogV1TagTemplateField(o['x']!); @@ -3001,7 +3100,7 @@ api.GoogleCloudDatacatalogV1TagTemplate if (buildCounterGoogleCloudDatacatalogV1TagTemplate < 3) { o.dataplexTransferStatus = 'foo'; o.displayName = 'foo'; - o.fields = buildUnnamed32(); + o.fields = buildUnnamed33(); o.isPubliclyReadable = true; o.name = 'foo'; } @@ -3021,7 +3120,7 @@ void checkGoogleCloudDatacatalogV1TagTemplate( o.displayName!, unittest.equals('foo'), ); - checkUnnamed32(o.fields!); + checkUnnamed33(o.fields!); unittest.expect(o.isPubliclyReadable!, unittest.isTrue); unittest.expect( o.name!, @@ -3074,12 +3173,12 @@ void checkGoogleCloudDatacatalogV1TagTemplateField( buildCounterGoogleCloudDatacatalogV1TagTemplateField--; } -core.List buildUnnamed33() => [ +core.List buildUnnamed34() => [ 'foo', 'foo', ]; -void checkUnnamed33(core.List o) { +void checkUnnamed34(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3096,7 +3195,7 @@ api.GoogleCloudDatacatalogV1Taxonomy buildGoogleCloudDatacatalogV1Taxonomy() { final o = api.GoogleCloudDatacatalogV1Taxonomy(); buildCounterGoogleCloudDatacatalogV1Taxonomy++; if (buildCounterGoogleCloudDatacatalogV1Taxonomy < 3) { - o.activatedPolicyTypes = buildUnnamed33(); + o.activatedPolicyTypes = buildUnnamed34(); o.description = 'foo'; o.displayName = 'foo'; o.name = 'foo'; @@ -3112,7 +3211,7 @@ void checkGoogleCloudDatacatalogV1Taxonomy( api.GoogleCloudDatacatalogV1Taxonomy o) { buildCounterGoogleCloudDatacatalogV1Taxonomy++; if (buildCounterGoogleCloudDatacatalogV1Taxonomy < 3) { - checkUnnamed33(o.activatedPolicyTypes!); + checkUnnamed34(o.activatedPolicyTypes!); unittest.expect( o.description!, unittest.equals('foo'), @@ -3199,12 +3298,12 @@ void checkGoogleCloudDatacatalogV1UnstarEntryResponse( } core.Map - buildUnnamed34() => { + buildUnnamed35() => { 'x': buildGoogleCloudDatacatalogV1CommonUsageStats(), 'y': buildGoogleCloudDatacatalogV1CommonUsageStats(), }; -void checkUnnamed34( +void checkUnnamed35( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDatacatalogV1CommonUsageStats(o['x']!); @@ -3212,12 +3311,12 @@ void checkUnnamed34( } core.Map - buildUnnamed35() => { + buildUnnamed36() => { 'x': buildGoogleCloudDatacatalogV1UsageStats(), 'y': buildGoogleCloudDatacatalogV1UsageStats(), }; -void checkUnnamed35( +void checkUnnamed36( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDatacatalogV1UsageStats(o['x']!); @@ -3230,10 +3329,10 @@ api.GoogleCloudDatacatalogV1UsageSignal final o = api.GoogleCloudDatacatalogV1UsageSignal(); buildCounterGoogleCloudDatacatalogV1UsageSignal++; if (buildCounterGoogleCloudDatacatalogV1UsageSignal < 3) { - o.commonUsageWithinTimeRange = buildUnnamed34(); + o.commonUsageWithinTimeRange = buildUnnamed35(); o.favoriteCount = 'foo'; o.updateTime = 'foo'; - o.usageWithinTimeRange = buildUnnamed35(); + o.usageWithinTimeRange = buildUnnamed36(); } buildCounterGoogleCloudDatacatalogV1UsageSignal--; return o; @@ -3243,7 +3342,7 @@ void checkGoogleCloudDatacatalogV1UsageSignal( api.GoogleCloudDatacatalogV1UsageSignal o) { buildCounterGoogleCloudDatacatalogV1UsageSignal++; if (buildCounterGoogleCloudDatacatalogV1UsageSignal < 3) { - checkUnnamed34(o.commonUsageWithinTimeRange!); + checkUnnamed35(o.commonUsageWithinTimeRange!); unittest.expect( o.favoriteCount!, unittest.equals('foo'), @@ -3252,7 +3351,7 @@ void checkGoogleCloudDatacatalogV1UsageSignal( o.updateTime!, unittest.equals('foo'), ); - checkUnnamed35(o.usageWithinTimeRange!); + checkUnnamed36(o.usageWithinTimeRange!); } buildCounterGoogleCloudDatacatalogV1UsageSignal--; } @@ -3351,12 +3450,12 @@ void checkGoogleCloudDatacatalogV1VertexModelSourceInfo( buildCounterGoogleCloudDatacatalogV1VertexModelSourceInfo--; } -core.List buildUnnamed36() => [ +core.List buildUnnamed37() => [ 'foo', 'foo', ]; -void checkUnnamed36(core.List o) { +void checkUnnamed37(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3375,7 +3474,7 @@ api.GoogleCloudDatacatalogV1VertexModelSpec buildCounterGoogleCloudDatacatalogV1VertexModelSpec++; if (buildCounterGoogleCloudDatacatalogV1VertexModelSpec < 3) { o.containerImageUri = 'foo'; - o.versionAliases = buildUnnamed36(); + o.versionAliases = buildUnnamed37(); o.versionDescription = 'foo'; o.versionId = 'foo'; o.vertexModelSourceInfo = @@ -3393,7 +3492,7 @@ void checkGoogleCloudDatacatalogV1VertexModelSpec( o.containerImageUri!, unittest.equals('foo'), ); - checkUnnamed36(o.versionAliases!); + checkUnnamed37(o.versionAliases!); unittest.expect( o.versionDescription!, unittest.equals('foo'), @@ -3431,12 +3530,12 @@ void checkGoogleCloudDatacatalogV1ViewSpec( buildCounterGoogleCloudDatacatalogV1ViewSpec--; } -core.List buildUnnamed37() => [ +core.List buildUnnamed38() => [ buildOperation(), buildOperation(), ]; -void checkUnnamed37(core.List o) { +void checkUnnamed38(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperation(o[0]); checkOperation(o[1]); @@ -3448,7 +3547,7 @@ api.ListOperationsResponse buildListOperationsResponse() { buildCounterListOperationsResponse++; if (buildCounterListOperationsResponse < 3) { o.nextPageToken = 'foo'; - o.operations = buildUnnamed37(); + o.operations = buildUnnamed38(); } buildCounterListOperationsResponse--; return o; @@ -3461,12 +3560,12 @@ void checkListOperationsResponse(api.ListOperationsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed37(o.operations!); + checkUnnamed38(o.operations!); } buildCounterListOperationsResponse--; } -core.Map buildUnnamed38() => { +core.Map buildUnnamed39() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3479,7 +3578,7 @@ core.Map buildUnnamed38() => { }, }; -void checkUnnamed38(core.Map o) { +void checkUnnamed39(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted1 = (o['x']!) as core.Map; unittest.expect(casted1, unittest.hasLength(3)); @@ -3511,7 +3610,7 @@ void checkUnnamed38(core.Map o) { ); } -core.Map buildUnnamed39() => { +core.Map buildUnnamed40() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3524,7 +3623,7 @@ core.Map buildUnnamed39() => { }, }; -void checkUnnamed39(core.Map o) { +void checkUnnamed40(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted3 = (o['x']!) as core.Map; unittest.expect(casted3, unittest.hasLength(3)); @@ -3563,9 +3662,9 @@ api.Operation buildOperation() { if (buildCounterOperation < 3) { o.done = true; o.error = buildStatus(); - o.metadata = buildUnnamed38(); + o.metadata = buildUnnamed39(); o.name = 'foo'; - o.response = buildUnnamed39(); + o.response = buildUnnamed40(); } buildCounterOperation--; return o; @@ -3576,22 +3675,22 @@ void checkOperation(api.Operation o) { if (buildCounterOperation < 3) { unittest.expect(o.done!, unittest.isTrue); checkStatus(o.error!); - checkUnnamed38(o.metadata!); + checkUnnamed39(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed39(o.response!); + checkUnnamed40(o.response!); } buildCounterOperation--; } -core.List buildUnnamed40() => [ +core.List buildUnnamed41() => [ buildBinding(), buildBinding(), ]; -void checkUnnamed40(core.List o) { +void checkUnnamed41(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkBinding(o[0]); checkBinding(o[1]); @@ -3602,7 +3701,7 @@ api.Policy buildPolicy() { final o = api.Policy(); buildCounterPolicy++; if (buildCounterPolicy < 3) { - o.bindings = buildUnnamed40(); + o.bindings = buildUnnamed41(); o.etag = 'foo'; o.version = 42; } @@ -3613,7 +3712,7 @@ api.Policy buildPolicy() { void checkPolicy(api.Policy o) { buildCounterPolicy++; if (buildCounterPolicy < 3) { - checkUnnamed40(o.bindings!); + checkUnnamed41(o.bindings!); unittest.expect( o.etag!, unittest.equals('foo'), @@ -3645,7 +3744,7 @@ void checkSetIamPolicyRequest(api.SetIamPolicyRequest o) { buildCounterSetIamPolicyRequest--; } -core.Map buildUnnamed41() => { +core.Map buildUnnamed42() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3658,7 +3757,7 @@ core.Map buildUnnamed41() => { }, }; -void checkUnnamed41(core.Map o) { +void checkUnnamed42(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted5 = (o['x']!) as core.Map; unittest.expect(casted5, unittest.hasLength(3)); @@ -3690,15 +3789,15 @@ void checkUnnamed41(core.Map o) { ); } -core.List> buildUnnamed42() => [ - buildUnnamed41(), - buildUnnamed41(), +core.List> buildUnnamed43() => [ + buildUnnamed42(), + buildUnnamed42(), ]; -void checkUnnamed42(core.List> o) { +void checkUnnamed43(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed41(o[0]); - checkUnnamed41(o[1]); + checkUnnamed42(o[0]); + checkUnnamed42(o[1]); } core.int buildCounterStatus = 0; @@ -3707,7 +3806,7 @@ api.Status buildStatus() { buildCounterStatus++; if (buildCounterStatus < 3) { o.code = 42; - o.details = buildUnnamed42(); + o.details = buildUnnamed43(); o.message = 'foo'; } buildCounterStatus--; @@ -3721,7 +3820,7 @@ void checkStatus(api.Status o) { o.code!, unittest.equals(42), ); - checkUnnamed42(o.details!); + checkUnnamed43(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -3730,12 +3829,12 @@ void checkStatus(api.Status o) { buildCounterStatus--; } -core.List buildUnnamed43() => [ +core.List buildUnnamed44() => [ 'foo', 'foo', ]; -void checkUnnamed43(core.List o) { +void checkUnnamed44(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3752,7 +3851,7 @@ api.TestIamPermissionsRequest buildTestIamPermissionsRequest() { final o = api.TestIamPermissionsRequest(); buildCounterTestIamPermissionsRequest++; if (buildCounterTestIamPermissionsRequest < 3) { - o.permissions = buildUnnamed43(); + o.permissions = buildUnnamed44(); } buildCounterTestIamPermissionsRequest--; return o; @@ -3761,17 +3860,17 @@ api.TestIamPermissionsRequest buildTestIamPermissionsRequest() { void checkTestIamPermissionsRequest(api.TestIamPermissionsRequest o) { buildCounterTestIamPermissionsRequest++; if (buildCounterTestIamPermissionsRequest < 3) { - checkUnnamed43(o.permissions!); + checkUnnamed44(o.permissions!); } buildCounterTestIamPermissionsRequest--; } -core.List buildUnnamed44() => [ +core.List buildUnnamed45() => [ 'foo', 'foo', ]; -void checkUnnamed44(core.List o) { +void checkUnnamed45(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3788,7 +3887,7 @@ api.TestIamPermissionsResponse buildTestIamPermissionsResponse() { final o = api.TestIamPermissionsResponse(); buildCounterTestIamPermissionsResponse++; if (buildCounterTestIamPermissionsResponse < 3) { - o.permissions = buildUnnamed44(); + o.permissions = buildUnnamed45(); } buildCounterTestIamPermissionsResponse--; return o; @@ -3797,17 +3896,17 @@ api.TestIamPermissionsResponse buildTestIamPermissionsResponse() { void checkTestIamPermissionsResponse(api.TestIamPermissionsResponse o) { buildCounterTestIamPermissionsResponse++; if (buildCounterTestIamPermissionsResponse < 3) { - checkUnnamed44(o.permissions!); + checkUnnamed45(o.permissions!); } buildCounterTestIamPermissionsResponse--; } -core.List buildUnnamed45() => [ +core.List buildUnnamed46() => [ 'foo', 'foo', ]; -void checkUnnamed45(core.List o) { +void checkUnnamed46(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4359,6 +4458,16 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudDatacatalogV1MigrationConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudDatacatalogV1MigrationConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudDatacatalogV1MigrationConfig.fromJson( + oJson as core.Map); + checkGoogleCloudDatacatalogV1MigrationConfig(od); + }); + }); + unittest.group('obj-schema-GoogleCloudDatacatalogV1ModelSpec', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudDatacatalogV1ModelSpec(); @@ -4393,6 +4502,16 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudDatacatalogV1OrganizationConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudDatacatalogV1OrganizationConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudDatacatalogV1OrganizationConfig.fromJson( + oJson as core.Map); + checkGoogleCloudDatacatalogV1OrganizationConfig(od); + }); + }); + unittest.group('obj-schema-GoogleCloudDatacatalogV1PersonalDetails', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudDatacatalogV1PersonalDetails(); @@ -4641,6 +4760,16 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudDatacatalogV1SetConfigRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudDatacatalogV1SetConfigRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudDatacatalogV1SetConfigRequest.fromJson( + oJson as core.Map); + checkGoogleCloudDatacatalogV1SetConfigRequest(od); + }); + }); + unittest.group('obj-schema-GoogleCloudDatacatalogV1SqlDatabaseSystemSpec', () { unittest.test('to-json--from-json', () async { @@ -5066,6 +5195,294 @@ void main() { }); }); + unittest.group('resource-OrganizationsLocationsResource', () { + unittest.test('method--retrieveConfig', () async { + final mock = HttpServerMock(); + final res = api.DataCatalogApi(mock).organizations.locations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json + .encode(buildGoogleCloudDatacatalogV1OrganizationConfig()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.retrieveConfig(arg_name, $fields: arg_$fields); + checkGoogleCloudDatacatalogV1OrganizationConfig( + response as api.GoogleCloudDatacatalogV1OrganizationConfig); + }); + + unittest.test('method--retrieveEffectiveConfig', () async { + final mock = HttpServerMock(); + final res = api.DataCatalogApi(mock).organizations.locations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = + convert.json.encode(buildGoogleCloudDatacatalogV1MigrationConfig()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.retrieveEffectiveConfig(arg_name, $fields: arg_$fields); + checkGoogleCloudDatacatalogV1MigrationConfig( + response as api.GoogleCloudDatacatalogV1MigrationConfig); + }); + + unittest.test('method--setConfig', () async { + final mock = HttpServerMock(); + final res = api.DataCatalogApi(mock).organizations.locations; + final arg_request = buildGoogleCloudDatacatalogV1SetConfigRequest(); + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudDatacatalogV1SetConfigRequest.fromJson( + json as core.Map); + checkGoogleCloudDatacatalogV1SetConfigRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = + convert.json.encode(buildGoogleCloudDatacatalogV1MigrationConfig()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.setConfig(arg_request, arg_name, $fields: arg_$fields); + checkGoogleCloudDatacatalogV1MigrationConfig( + response as api.GoogleCloudDatacatalogV1MigrationConfig); + }); + }); + + unittest.group('resource-ProjectsLocationsResource', () { + unittest.test('method--retrieveEffectiveConfig', () async { + final mock = HttpServerMock(); + final res = api.DataCatalogApi(mock).projects.locations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = + convert.json.encode(buildGoogleCloudDatacatalogV1MigrationConfig()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.retrieveEffectiveConfig(arg_name, $fields: arg_$fields); + checkGoogleCloudDatacatalogV1MigrationConfig( + response as api.GoogleCloudDatacatalogV1MigrationConfig); + }); + + unittest.test('method--setConfig', () async { + final mock = HttpServerMock(); + final res = api.DataCatalogApi(mock).projects.locations; + final arg_request = buildGoogleCloudDatacatalogV1SetConfigRequest(); + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudDatacatalogV1SetConfigRequest.fromJson( + json as core.Map); + checkGoogleCloudDatacatalogV1SetConfigRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = + convert.json.encode(buildGoogleCloudDatacatalogV1MigrationConfig()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.setConfig(arg_request, arg_name, $fields: arg_$fields); + checkGoogleCloudDatacatalogV1MigrationConfig( + response as api.GoogleCloudDatacatalogV1MigrationConfig); + }); + }); + unittest.group('resource-ProjectsLocationsEntryGroupsResource', () { unittest.test('method--create', () async { final mock = HttpServerMock(); @@ -7942,7 +8359,7 @@ void main() { final res = api.DataCatalogApi(mock).projects.locations.taxonomies; final arg_parent = 'foo'; final arg_serializedTaxonomies = true; - final arg_taxonomies = buildUnnamed45(); + final arg_taxonomies = buildUnnamed46(); final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; diff --git a/generated/googleapis/test/datafusion/v1_test.dart b/generated/googleapis/test/datafusion/v1_test.dart index 965248fd4..8cd9113af 100644 --- a/generated/googleapis/test/datafusion/v1_test.dart +++ b/generated/googleapis/test/datafusion/v1_test.dart @@ -555,6 +555,17 @@ void checkUnnamed8(core.List o) { checkVersion(o[1]); } +core.List buildUnnamed9() => [ + buildVersion(), + buildVersion(), + ]; + +void checkUnnamed9(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkVersion(o[0]); + checkVersion(o[1]); +} + core.int buildCounterListAvailableVersionsResponse = 0; api.ListAvailableVersionsResponse buildListAvailableVersionsResponse() { final o = api.ListAvailableVersionsResponse(); @@ -562,6 +573,7 @@ api.ListAvailableVersionsResponse buildListAvailableVersionsResponse() { if (buildCounterListAvailableVersionsResponse < 3) { o.availableVersions = buildUnnamed8(); o.nextPageToken = 'foo'; + o.versions = buildUnnamed9(); } buildCounterListAvailableVersionsResponse--; return o; @@ -575,16 +587,17 @@ void checkListAvailableVersionsResponse(api.ListAvailableVersionsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); + checkUnnamed9(o.versions!); } buildCounterListAvailableVersionsResponse--; } -core.List buildUnnamed9() => [ +core.List buildUnnamed10() => [ buildDnsPeering(), buildDnsPeering(), ]; -void checkUnnamed9(core.List o) { +void checkUnnamed10(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDnsPeering(o[0]); checkDnsPeering(o[1]); @@ -595,7 +608,7 @@ api.ListDnsPeeringsResponse buildListDnsPeeringsResponse() { final o = api.ListDnsPeeringsResponse(); buildCounterListDnsPeeringsResponse++; if (buildCounterListDnsPeeringsResponse < 3) { - o.dnsPeerings = buildUnnamed9(); + o.dnsPeerings = buildUnnamed10(); o.nextPageToken = 'foo'; } buildCounterListDnsPeeringsResponse--; @@ -605,7 +618,7 @@ api.ListDnsPeeringsResponse buildListDnsPeeringsResponse() { void checkListDnsPeeringsResponse(api.ListDnsPeeringsResponse o) { buildCounterListDnsPeeringsResponse++; if (buildCounterListDnsPeeringsResponse < 3) { - checkUnnamed9(o.dnsPeerings!); + checkUnnamed10(o.dnsPeerings!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -614,23 +627,23 @@ void checkListDnsPeeringsResponse(api.ListDnsPeeringsResponse o) { buildCounterListDnsPeeringsResponse--; } -core.List buildUnnamed10() => [ +core.List buildUnnamed11() => [ buildInstance(), buildInstance(), ]; -void checkUnnamed10(core.List o) { +void checkUnnamed11(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInstance(o[0]); checkInstance(o[1]); } -core.List buildUnnamed11() => [ +core.List buildUnnamed12() => [ 'foo', 'foo', ]; -void checkUnnamed11(core.List o) { +void checkUnnamed12(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -647,9 +660,9 @@ api.ListInstancesResponse buildListInstancesResponse() { final o = api.ListInstancesResponse(); buildCounterListInstancesResponse++; if (buildCounterListInstancesResponse < 3) { - o.instances = buildUnnamed10(); + o.instances = buildUnnamed11(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed11(); + o.unreachable = buildUnnamed12(); } buildCounterListInstancesResponse--; return o; @@ -658,22 +671,22 @@ api.ListInstancesResponse buildListInstancesResponse() { void checkListInstancesResponse(api.ListInstancesResponse o) { buildCounterListInstancesResponse++; if (buildCounterListInstancesResponse < 3) { - checkUnnamed10(o.instances!); + checkUnnamed11(o.instances!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed11(o.unreachable!); + checkUnnamed12(o.unreachable!); } buildCounterListInstancesResponse--; } -core.List buildUnnamed12() => [ +core.List buildUnnamed13() => [ buildLocation(), buildLocation(), ]; -void checkUnnamed12(core.List o) { +void checkUnnamed13(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLocation(o[0]); checkLocation(o[1]); @@ -684,7 +697,7 @@ api.ListLocationsResponse buildListLocationsResponse() { final o = api.ListLocationsResponse(); buildCounterListLocationsResponse++; if (buildCounterListLocationsResponse < 3) { - o.locations = buildUnnamed12(); + o.locations = buildUnnamed13(); o.nextPageToken = 'foo'; } buildCounterListLocationsResponse--; @@ -694,7 +707,7 @@ api.ListLocationsResponse buildListLocationsResponse() { void checkListLocationsResponse(api.ListLocationsResponse o) { buildCounterListLocationsResponse++; if (buildCounterListLocationsResponse < 3) { - checkUnnamed12(o.locations!); + checkUnnamed13(o.locations!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -703,12 +716,12 @@ void checkListLocationsResponse(api.ListLocationsResponse o) { buildCounterListLocationsResponse--; } -core.List buildUnnamed13() => [ +core.List buildUnnamed14() => [ buildOperation(), buildOperation(), ]; -void checkUnnamed13(core.List o) { +void checkUnnamed14(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperation(o[0]); checkOperation(o[1]); @@ -720,7 +733,7 @@ api.ListOperationsResponse buildListOperationsResponse() { buildCounterListOperationsResponse++; if (buildCounterListOperationsResponse < 3) { o.nextPageToken = 'foo'; - o.operations = buildUnnamed13(); + o.operations = buildUnnamed14(); } buildCounterListOperationsResponse--; return o; @@ -733,17 +746,17 @@ void checkListOperationsResponse(api.ListOperationsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed13(o.operations!); + checkUnnamed14(o.operations!); } buildCounterListOperationsResponse--; } -core.Map buildUnnamed14() => { +core.Map buildUnnamed15() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed14(core.Map o) { +void checkUnnamed15(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -755,7 +768,7 @@ void checkUnnamed14(core.Map o) { ); } -core.Map buildUnnamed15() => { +core.Map buildUnnamed16() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -768,7 +781,7 @@ core.Map buildUnnamed15() => { }, }; -void checkUnnamed15(core.Map o) { +void checkUnnamed16(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted1 = (o['x']!) as core.Map; unittest.expect(casted1, unittest.hasLength(3)); @@ -806,9 +819,9 @@ api.Location buildLocation() { buildCounterLocation++; if (buildCounterLocation < 3) { o.displayName = 'foo'; - o.labels = buildUnnamed14(); + o.labels = buildUnnamed15(); o.locationId = 'foo'; - o.metadata = buildUnnamed15(); + o.metadata = buildUnnamed16(); o.name = 'foo'; } buildCounterLocation--; @@ -822,12 +835,12 @@ void checkLocation(api.Location o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed14(o.labels!); + checkUnnamed15(o.labels!); unittest.expect( o.locationId!, unittest.equals('foo'), ); - checkUnnamed15(o.metadata!); + checkUnnamed16(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), @@ -910,7 +923,7 @@ void checkNetworkConfig(api.NetworkConfig o) { buildCounterNetworkConfig--; } -core.Map buildUnnamed16() => { +core.Map buildUnnamed17() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -923,7 +936,7 @@ core.Map buildUnnamed16() => { }, }; -void checkUnnamed16(core.Map o) { +void checkUnnamed17(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted3 = (o['x']!) as core.Map; unittest.expect(casted3, unittest.hasLength(3)); @@ -955,7 +968,7 @@ void checkUnnamed16(core.Map o) { ); } -core.Map buildUnnamed17() => { +core.Map buildUnnamed18() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -968,7 +981,7 @@ core.Map buildUnnamed17() => { }, }; -void checkUnnamed17(core.Map o) { +void checkUnnamed18(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted5 = (o['x']!) as core.Map; unittest.expect(casted5, unittest.hasLength(3)); @@ -1007,9 +1020,9 @@ api.Operation buildOperation() { if (buildCounterOperation < 3) { o.done = true; o.error = buildStatus(); - o.metadata = buildUnnamed16(); + o.metadata = buildUnnamed17(); o.name = 'foo'; - o.response = buildUnnamed17(); + o.response = buildUnnamed18(); } buildCounterOperation--; return o; @@ -1020,33 +1033,33 @@ void checkOperation(api.Operation o) { if (buildCounterOperation < 3) { unittest.expect(o.done!, unittest.isTrue); checkStatus(o.error!); - checkUnnamed16(o.metadata!); + checkUnnamed17(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed17(o.response!); + checkUnnamed18(o.response!); } buildCounterOperation--; } -core.List buildUnnamed18() => [ +core.List buildUnnamed19() => [ buildAuditConfig(), buildAuditConfig(), ]; -void checkUnnamed18(core.List o) { +void checkUnnamed19(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAuditConfig(o[0]); checkAuditConfig(o[1]); } -core.List buildUnnamed19() => [ +core.List buildUnnamed20() => [ buildBinding(), buildBinding(), ]; -void checkUnnamed19(core.List o) { +void checkUnnamed20(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkBinding(o[0]); checkBinding(o[1]); @@ -1057,8 +1070,8 @@ api.Policy buildPolicy() { final o = api.Policy(); buildCounterPolicy++; if (buildCounterPolicy < 3) { - o.auditConfigs = buildUnnamed18(); - o.bindings = buildUnnamed19(); + o.auditConfigs = buildUnnamed19(); + o.bindings = buildUnnamed20(); o.etag = 'foo'; o.version = 42; } @@ -1069,8 +1082,8 @@ api.Policy buildPolicy() { void checkPolicy(api.Policy o) { buildCounterPolicy++; if (buildCounterPolicy < 3) { - checkUnnamed18(o.auditConfigs!); - checkUnnamed19(o.bindings!); + checkUnnamed19(o.auditConfigs!); + checkUnnamed20(o.bindings!); unittest.expect( o.etag!, unittest.equals('foo'), @@ -1178,7 +1191,7 @@ void checkSetIamPolicyRequest(api.SetIamPolicyRequest o) { buildCounterSetIamPolicyRequest--; } -core.Map buildUnnamed20() => { +core.Map buildUnnamed21() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -1191,7 +1204,7 @@ core.Map buildUnnamed20() => { }, }; -void checkUnnamed20(core.Map o) { +void checkUnnamed21(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted7 = (o['x']!) as core.Map; unittest.expect(casted7, unittest.hasLength(3)); @@ -1223,15 +1236,15 @@ void checkUnnamed20(core.Map o) { ); } -core.List> buildUnnamed21() => [ - buildUnnamed20(), - buildUnnamed20(), +core.List> buildUnnamed22() => [ + buildUnnamed21(), + buildUnnamed21(), ]; -void checkUnnamed21(core.List> o) { +void checkUnnamed22(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed20(o[0]); - checkUnnamed20(o[1]); + checkUnnamed21(o[0]); + checkUnnamed21(o[1]); } core.int buildCounterStatus = 0; @@ -1240,7 +1253,7 @@ api.Status buildStatus() { buildCounterStatus++; if (buildCounterStatus < 3) { o.code = 42; - o.details = buildUnnamed21(); + o.details = buildUnnamed22(); o.message = 'foo'; } buildCounterStatus--; @@ -1254,7 +1267,7 @@ void checkStatus(api.Status o) { o.code!, unittest.equals(42), ); - checkUnnamed21(o.details!); + checkUnnamed22(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -1263,12 +1276,12 @@ void checkStatus(api.Status o) { buildCounterStatus--; } -core.List buildUnnamed22() => [ +core.List buildUnnamed23() => [ 'foo', 'foo', ]; -void checkUnnamed22(core.List o) { +void checkUnnamed23(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1285,7 +1298,7 @@ api.TestIamPermissionsRequest buildTestIamPermissionsRequest() { final o = api.TestIamPermissionsRequest(); buildCounterTestIamPermissionsRequest++; if (buildCounterTestIamPermissionsRequest < 3) { - o.permissions = buildUnnamed22(); + o.permissions = buildUnnamed23(); } buildCounterTestIamPermissionsRequest--; return o; @@ -1294,17 +1307,17 @@ api.TestIamPermissionsRequest buildTestIamPermissionsRequest() { void checkTestIamPermissionsRequest(api.TestIamPermissionsRequest o) { buildCounterTestIamPermissionsRequest++; if (buildCounterTestIamPermissionsRequest < 3) { - checkUnnamed22(o.permissions!); + checkUnnamed23(o.permissions!); } buildCounterTestIamPermissionsRequest--; } -core.List buildUnnamed23() => [ +core.List buildUnnamed24() => [ 'foo', 'foo', ]; -void checkUnnamed23(core.List o) { +void checkUnnamed24(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1321,7 +1334,7 @@ api.TestIamPermissionsResponse buildTestIamPermissionsResponse() { final o = api.TestIamPermissionsResponse(); buildCounterTestIamPermissionsResponse++; if (buildCounterTestIamPermissionsResponse < 3) { - o.permissions = buildUnnamed23(); + o.permissions = buildUnnamed24(); } buildCounterTestIamPermissionsResponse--; return o; @@ -1330,7 +1343,7 @@ api.TestIamPermissionsResponse buildTestIamPermissionsResponse() { void checkTestIamPermissionsResponse(api.TestIamPermissionsResponse o) { buildCounterTestIamPermissionsResponse++; if (buildCounterTestIamPermissionsResponse < 3) { - checkUnnamed23(o.permissions!); + checkUnnamed24(o.permissions!); } buildCounterTestIamPermissionsResponse--; } @@ -1362,12 +1375,12 @@ void checkTimeWindow(api.TimeWindow o) { buildCounterTimeWindow--; } -core.List buildUnnamed24() => [ +core.List buildUnnamed25() => [ 'foo', 'foo', ]; -void checkUnnamed24(core.List o) { +void checkUnnamed25(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1384,7 +1397,7 @@ api.Version buildVersion() { final o = api.Version(); buildCounterVersion++; if (buildCounterVersion < 3) { - o.availableFeatures = buildUnnamed24(); + o.availableFeatures = buildUnnamed25(); o.defaultVersion = true; o.type = 'foo'; o.versionNumber = 'foo'; @@ -1396,7 +1409,7 @@ api.Version buildVersion() { void checkVersion(api.Version o) { buildCounterVersion++; if (buildCounterVersion < 3) { - checkUnnamed24(o.availableFeatures!); + checkUnnamed25(o.availableFeatures!); unittest.expect(o.defaultVersion!, unittest.isTrue); unittest.expect( o.type!, diff --git a/generated/googleapis/test/datamigration/v1_test.dart b/generated/googleapis/test/datamigration/v1_test.dart index ba6893d5f..448a1d1b9 100644 --- a/generated/googleapis/test/datamigration/v1_test.dart +++ b/generated/googleapis/test/datamigration/v1_test.dart @@ -2622,23 +2622,59 @@ void checkListMappingRulesResponse(api.ListMappingRulesResponse o) { buildCounterListMappingRulesResponse--; } -core.List buildUnnamed39() => [ +core.List buildUnnamed39() => [ + buildMigrationJobObject(), + buildMigrationJobObject(), + ]; + +void checkUnnamed39(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkMigrationJobObject(o[0]); + checkMigrationJobObject(o[1]); +} + +core.int buildCounterListMigrationJobObjectsResponse = 0; +api.ListMigrationJobObjectsResponse buildListMigrationJobObjectsResponse() { + final o = api.ListMigrationJobObjectsResponse(); + buildCounterListMigrationJobObjectsResponse++; + if (buildCounterListMigrationJobObjectsResponse < 3) { + o.migrationJobObjects = buildUnnamed39(); + o.nextPageToken = 'foo'; + } + buildCounterListMigrationJobObjectsResponse--; + return o; +} + +void checkListMigrationJobObjectsResponse( + api.ListMigrationJobObjectsResponse o) { + buildCounterListMigrationJobObjectsResponse++; + if (buildCounterListMigrationJobObjectsResponse < 3) { + checkUnnamed39(o.migrationJobObjects!); + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + } + buildCounterListMigrationJobObjectsResponse--; +} + +core.List buildUnnamed40() => [ buildMigrationJob(), buildMigrationJob(), ]; -void checkUnnamed39(core.List o) { +void checkUnnamed40(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMigrationJob(o[0]); checkMigrationJob(o[1]); } -core.List buildUnnamed40() => [ +core.List buildUnnamed41() => [ 'foo', 'foo', ]; -void checkUnnamed40(core.List o) { +void checkUnnamed41(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2655,9 +2691,9 @@ api.ListMigrationJobsResponse buildListMigrationJobsResponse() { final o = api.ListMigrationJobsResponse(); buildCounterListMigrationJobsResponse++; if (buildCounterListMigrationJobsResponse < 3) { - o.migrationJobs = buildUnnamed39(); + o.migrationJobs = buildUnnamed40(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed40(); + o.unreachable = buildUnnamed41(); } buildCounterListMigrationJobsResponse--; return o; @@ -2666,22 +2702,22 @@ api.ListMigrationJobsResponse buildListMigrationJobsResponse() { void checkListMigrationJobsResponse(api.ListMigrationJobsResponse o) { buildCounterListMigrationJobsResponse++; if (buildCounterListMigrationJobsResponse < 3) { - checkUnnamed39(o.migrationJobs!); + checkUnnamed40(o.migrationJobs!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed40(o.unreachable!); + checkUnnamed41(o.unreachable!); } buildCounterListMigrationJobsResponse--; } -core.List buildUnnamed41() => [ +core.List buildUnnamed42() => [ buildOperation(), buildOperation(), ]; -void checkUnnamed41(core.List o) { +void checkUnnamed42(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperation(o[0]); checkOperation(o[1]); @@ -2693,7 +2729,7 @@ api.ListOperationsResponse buildListOperationsResponse() { buildCounterListOperationsResponse++; if (buildCounterListOperationsResponse < 3) { o.nextPageToken = 'foo'; - o.operations = buildUnnamed41(); + o.operations = buildUnnamed42(); } buildCounterListOperationsResponse--; return o; @@ -2706,28 +2742,28 @@ void checkListOperationsResponse(api.ListOperationsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed41(o.operations!); + checkUnnamed42(o.operations!); } buildCounterListOperationsResponse--; } -core.List buildUnnamed42() => [ +core.List buildUnnamed43() => [ buildPrivateConnection(), buildPrivateConnection(), ]; -void checkUnnamed42(core.List o) { +void checkUnnamed43(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPrivateConnection(o[0]); checkPrivateConnection(o[1]); } -core.List buildUnnamed43() => [ +core.List buildUnnamed44() => [ 'foo', 'foo', ]; -void checkUnnamed43(core.List o) { +void checkUnnamed44(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2745,8 +2781,8 @@ api.ListPrivateConnectionsResponse buildListPrivateConnectionsResponse() { buildCounterListPrivateConnectionsResponse++; if (buildCounterListPrivateConnectionsResponse < 3) { o.nextPageToken = 'foo'; - o.privateConnections = buildUnnamed42(); - o.unreachable = buildUnnamed43(); + o.privateConnections = buildUnnamed43(); + o.unreachable = buildUnnamed44(); } buildCounterListPrivateConnectionsResponse--; return o; @@ -2759,18 +2795,18 @@ void checkListPrivateConnectionsResponse(api.ListPrivateConnectionsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed42(o.privateConnections!); - checkUnnamed43(o.unreachable!); + checkUnnamed43(o.privateConnections!); + checkUnnamed44(o.unreachable!); } buildCounterListPrivateConnectionsResponse--; } -core.Map buildUnnamed44() => { +core.Map buildUnnamed45() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed44(core.Map o) { +void checkUnnamed45(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2782,7 +2818,7 @@ void checkUnnamed44(core.Map o) { ); } -core.Map buildUnnamed45() => { +core.Map buildUnnamed46() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -2795,7 +2831,7 @@ core.Map buildUnnamed45() => { }, }; -void checkUnnamed45(core.Map o) { +void checkUnnamed46(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted13 = (o['x']!) as core.Map; unittest.expect(casted13, unittest.hasLength(3)); @@ -2833,9 +2869,9 @@ api.Location buildLocation() { buildCounterLocation++; if (buildCounterLocation < 3) { o.displayName = 'foo'; - o.labels = buildUnnamed44(); + o.labels = buildUnnamed45(); o.locationId = 'foo'; - o.metadata = buildUnnamed45(); + o.metadata = buildUnnamed46(); o.name = 'foo'; } buildCounterLocation--; @@ -2849,12 +2885,12 @@ void checkLocation(api.Location o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed44(o.labels!); + checkUnnamed45(o.labels!); unittest.expect( o.locationId!, unittest.equals('foo'), ); - checkUnnamed45(o.metadata!); + checkUnnamed46(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), @@ -2905,6 +2941,26 @@ void checkLogMiner(api.LogMiner o) { buildCounterLogMiner--; } +core.int buildCounterLookupMigrationJobObjectRequest = 0; +api.LookupMigrationJobObjectRequest buildLookupMigrationJobObjectRequest() { + final o = api.LookupMigrationJobObjectRequest(); + buildCounterLookupMigrationJobObjectRequest++; + if (buildCounterLookupMigrationJobObjectRequest < 3) { + o.sourceObjectIdentifier = buildSourceObjectIdentifier(); + } + buildCounterLookupMigrationJobObjectRequest--; + return o; +} + +void checkLookupMigrationJobObjectRequest( + api.LookupMigrationJobObjectRequest o) { + buildCounterLookupMigrationJobObjectRequest++; + if (buildCounterLookupMigrationJobObjectRequest < 3) { + checkSourceObjectIdentifier(o.sourceObjectIdentifier!); + } + buildCounterLookupMigrationJobObjectRequest--; +} + core.int buildCounterMachineConfig = 0; api.MachineConfig buildMachineConfig() { final o = api.MachineConfig(); @@ -3003,12 +3059,12 @@ void checkMappingRule(api.MappingRule o) { buildCounterMappingRule--; } -core.List buildUnnamed46() => [ +core.List buildUnnamed47() => [ 'foo', 'foo', ]; -void checkUnnamed46(core.List o) { +void checkUnnamed47(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3025,7 +3081,7 @@ api.MappingRuleFilter buildMappingRuleFilter() { final o = api.MappingRuleFilter(); buildCounterMappingRuleFilter++; if (buildCounterMappingRuleFilter < 3) { - o.entities = buildUnnamed46(); + o.entities = buildUnnamed47(); o.entityNameContains = 'foo'; o.entityNamePrefix = 'foo'; o.entityNameSuffix = 'foo'; @@ -3038,7 +3094,7 @@ api.MappingRuleFilter buildMappingRuleFilter() { void checkMappingRuleFilter(api.MappingRuleFilter o) { buildCounterMappingRuleFilter++; if (buildCounterMappingRuleFilter < 3) { - checkUnnamed46(o.entities!); + checkUnnamed47(o.entities!); unittest.expect( o.entityNameContains!, unittest.equals('foo'), @@ -3059,7 +3115,7 @@ void checkMappingRuleFilter(api.MappingRuleFilter o) { buildCounterMappingRuleFilter--; } -core.Map buildUnnamed47() => { +core.Map buildUnnamed48() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3072,7 +3128,7 @@ core.Map buildUnnamed47() => { }, }; -void checkUnnamed47(core.Map o) { +void checkUnnamed48(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted15 = (o['x']!) as core.Map; unittest.expect(casted15, unittest.hasLength(3)); @@ -3104,12 +3160,24 @@ void checkUnnamed47(core.Map o) { ); } +core.List buildUnnamed49() => [ + buildIndexEntity(), + buildIndexEntity(), + ]; + +void checkUnnamed49(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkIndexEntity(o[0]); + checkIndexEntity(o[1]); +} + core.int buildCounterMaterializedViewEntity = 0; api.MaterializedViewEntity buildMaterializedViewEntity() { final o = api.MaterializedViewEntity(); buildCounterMaterializedViewEntity++; if (buildCounterMaterializedViewEntity < 3) { - o.customFeatures = buildUnnamed47(); + o.customFeatures = buildUnnamed48(); + o.indices = buildUnnamed49(); o.sqlCode = 'foo'; } buildCounterMaterializedViewEntity--; @@ -3119,7 +3187,8 @@ api.MaterializedViewEntity buildMaterializedViewEntity() { void checkMaterializedViewEntity(api.MaterializedViewEntity o) { buildCounterMaterializedViewEntity++; if (buildCounterMaterializedViewEntity < 3) { - checkUnnamed47(o.customFeatures!); + checkUnnamed48(o.customFeatures!); + checkUnnamed49(o.indices!); unittest.expect( o.sqlCode!, unittest.equals('foo'), @@ -3128,12 +3197,12 @@ void checkMaterializedViewEntity(api.MaterializedViewEntity o) { buildCounterMaterializedViewEntity--; } -core.Map buildUnnamed48() => { +core.Map buildUnnamed50() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed48(core.Map o) { +void checkUnnamed50(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3163,8 +3232,9 @@ api.MigrationJob buildMigrationJob() { o.endTime = 'foo'; o.error = buildStatus(); o.filter = 'foo'; - o.labels = buildUnnamed48(); + o.labels = buildUnnamed50(); o.name = 'foo'; + o.objectsConfig = buildMigrationJobObjectsConfig(); o.oracleToPostgresConfig = buildOracleToPostgresConfig(); o.performanceConfig = buildPerformanceConfig(); o.phase = 'foo'; @@ -3226,11 +3296,12 @@ void checkMigrationJob(api.MigrationJob o) { o.filter!, unittest.equals('foo'), ); - checkUnnamed48(o.labels!); + checkUnnamed50(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), ); + checkMigrationJobObjectsConfig(o.objectsConfig!); checkOracleToPostgresConfig(o.oracleToPostgresConfig!); checkPerformanceConfig(o.performanceConfig!); unittest.expect( @@ -3263,7 +3334,72 @@ void checkMigrationJob(api.MigrationJob o) { buildCounterMigrationJob--; } -core.Map buildUnnamed49() => { +core.int buildCounterMigrationJobObject = 0; +api.MigrationJobObject buildMigrationJobObject() { + final o = api.MigrationJobObject(); + buildCounterMigrationJobObject++; + if (buildCounterMigrationJobObject < 3) { + o.createTime = 'foo'; + o.error = buildStatus(); + o.name = 'foo'; + o.phase = 'foo'; + o.sourceObject = buildSourceObjectIdentifier(); + o.state = 'foo'; + o.updateTime = 'foo'; + } + buildCounterMigrationJobObject--; + return o; +} + +void checkMigrationJobObject(api.MigrationJobObject o) { + buildCounterMigrationJobObject++; + if (buildCounterMigrationJobObject < 3) { + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + checkStatus(o.error!); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.phase!, + unittest.equals('foo'), + ); + checkSourceObjectIdentifier(o.sourceObject!); + unittest.expect( + o.state!, + unittest.equals('foo'), + ); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), + ); + } + buildCounterMigrationJobObject--; +} + +core.int buildCounterMigrationJobObjectsConfig = 0; +api.MigrationJobObjectsConfig buildMigrationJobObjectsConfig() { + final o = api.MigrationJobObjectsConfig(); + buildCounterMigrationJobObjectsConfig++; + if (buildCounterMigrationJobObjectsConfig < 3) { + o.sourceObjectsConfig = buildSourceObjectsConfig(); + } + buildCounterMigrationJobObjectsConfig--; + return o; +} + +void checkMigrationJobObjectsConfig(api.MigrationJobObjectsConfig o) { + buildCounterMigrationJobObjectsConfig++; + if (buildCounterMigrationJobObjectsConfig < 3) { + checkSourceObjectsConfig(o.sourceObjectsConfig!); + } + buildCounterMigrationJobObjectsConfig--; +} + +core.Map buildUnnamed51() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3276,7 +3412,7 @@ core.Map buildUnnamed49() => { }, }; -void checkUnnamed49(core.Map o) { +void checkUnnamed51(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted17 = (o['x']!) as core.Map; unittest.expect(casted17, unittest.hasLength(3)); @@ -3313,7 +3449,7 @@ api.MultiColumnDatatypeChange buildMultiColumnDatatypeChange() { final o = api.MultiColumnDatatypeChange(); buildCounterMultiColumnDatatypeChange++; if (buildCounterMultiColumnDatatypeChange < 3) { - o.customFeatures = buildUnnamed49(); + o.customFeatures = buildUnnamed51(); o.newDataType = 'foo'; o.overrideFractionalSecondsPrecision = 42; o.overrideLength = 'foo'; @@ -3330,7 +3466,7 @@ api.MultiColumnDatatypeChange buildMultiColumnDatatypeChange() { void checkMultiColumnDatatypeChange(api.MultiColumnDatatypeChange o) { buildCounterMultiColumnDatatypeChange++; if (buildCounterMultiColumnDatatypeChange < 3) { - checkUnnamed49(o.customFeatures!); + checkUnnamed51(o.customFeatures!); unittest.expect( o.newDataType!, unittest.equals('foo'), @@ -3434,7 +3570,7 @@ void checkMySqlConnectionProfile(api.MySqlConnectionProfile o) { buildCounterMySqlConnectionProfile--; } -core.Map buildUnnamed50() => { +core.Map buildUnnamed52() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3447,7 +3583,7 @@ core.Map buildUnnamed50() => { }, }; -void checkUnnamed50(core.Map o) { +void checkUnnamed52(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted19 = (o['x']!) as core.Map; unittest.expect(casted19, unittest.hasLength(3)); @@ -3479,7 +3615,7 @@ void checkUnnamed50(core.Map o) { ); } -core.Map buildUnnamed51() => { +core.Map buildUnnamed53() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3492,7 +3628,7 @@ core.Map buildUnnamed51() => { }, }; -void checkUnnamed51(core.Map o) { +void checkUnnamed53(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted21 = (o['x']!) as core.Map; unittest.expect(casted21, unittest.hasLength(3)); @@ -3531,9 +3667,9 @@ api.Operation buildOperation() { if (buildCounterOperation < 3) { o.done = true; o.error = buildStatus(); - o.metadata = buildUnnamed50(); + o.metadata = buildUnnamed52(); o.name = 'foo'; - o.response = buildUnnamed51(); + o.response = buildUnnamed53(); } buildCounterOperation--; return o; @@ -3544,12 +3680,12 @@ void checkOperation(api.Operation o) { if (buildCounterOperation < 3) { unittest.expect(o.done!, unittest.isTrue); checkStatus(o.error!); - checkUnnamed50(o.metadata!); + checkUnnamed52(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed51(o.response!); + checkUnnamed53(o.response!); } buildCounterOperation--; } @@ -3728,7 +3864,7 @@ void checkOracleToPostgresConfig(api.OracleToPostgresConfig o) { buildCounterOracleToPostgresConfig--; } -core.Map buildUnnamed52() => { +core.Map buildUnnamed54() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3741,7 +3877,7 @@ core.Map buildUnnamed52() => { }, }; -void checkUnnamed52(core.Map o) { +void checkUnnamed54(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted23 = (o['x']!) as core.Map; unittest.expect(casted23, unittest.hasLength(3)); @@ -3778,7 +3914,7 @@ api.PackageEntity buildPackageEntity() { final o = api.PackageEntity(); buildCounterPackageEntity++; if (buildCounterPackageEntity < 3) { - o.customFeatures = buildUnnamed52(); + o.customFeatures = buildUnnamed54(); o.packageBody = 'foo'; o.packageSqlCode = 'foo'; } @@ -3789,7 +3925,7 @@ api.PackageEntity buildPackageEntity() { void checkPackageEntity(api.PackageEntity o) { buildCounterPackageEntity++; if (buildCounterPackageEntity < 3) { - checkUnnamed52(o.customFeatures!); + checkUnnamed54(o.customFeatures!); unittest.expect( o.packageBody!, unittest.equals('foo'), @@ -3824,23 +3960,23 @@ void checkPerformanceConfig(api.PerformanceConfig o) { buildCounterPerformanceConfig--; } -core.List buildUnnamed53() => [ +core.List buildUnnamed55() => [ buildAuditConfig(), buildAuditConfig(), ]; -void checkUnnamed53(core.List o) { +void checkUnnamed55(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAuditConfig(o[0]); checkAuditConfig(o[1]); } -core.List buildUnnamed54() => [ +core.List buildUnnamed56() => [ buildBinding(), buildBinding(), ]; -void checkUnnamed54(core.List o) { +void checkUnnamed56(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkBinding(o[0]); checkBinding(o[1]); @@ -3851,8 +3987,8 @@ api.Policy buildPolicy() { final o = api.Policy(); buildCounterPolicy++; if (buildCounterPolicy < 3) { - o.auditConfigs = buildUnnamed53(); - o.bindings = buildUnnamed54(); + o.auditConfigs = buildUnnamed55(); + o.bindings = buildUnnamed56(); o.etag = 'foo'; o.version = 42; } @@ -3863,8 +3999,8 @@ api.Policy buildPolicy() { void checkPolicy(api.Policy o) { buildCounterPolicy++; if (buildCounterPolicy < 3) { - checkUnnamed53(o.auditConfigs!); - checkUnnamed54(o.bindings!); + checkUnnamed55(o.auditConfigs!); + checkUnnamed56(o.bindings!); unittest.expect( o.etag!, unittest.equals('foo'), @@ -3921,6 +4057,7 @@ api.PostgreSqlConnectionProfile buildPostgreSqlConnectionProfile() { if (buildCounterPostgreSqlConnectionProfile < 3) { o.alloydbClusterId = 'foo'; o.cloudSqlId = 'foo'; + o.database = 'foo'; o.host = 'foo'; o.networkArchitecture = 'foo'; o.password = 'foo'; @@ -3947,6 +4084,10 @@ void checkPostgreSqlConnectionProfile(api.PostgreSqlConnectionProfile o) { o.cloudSqlId!, unittest.equals('foo'), ); + unittest.expect( + o.database!, + unittest.equals('foo'), + ); unittest.expect( o.host!, unittest.equals('foo'), @@ -4003,12 +4144,12 @@ void checkPostgresDestinationConfig(api.PostgresDestinationConfig o) { buildCounterPostgresDestinationConfig--; } -core.Map buildUnnamed55() => { +core.Map buildUnnamed57() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed55(core.Map o) { +void checkUnnamed57(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -4020,12 +4161,12 @@ void checkUnnamed55(core.Map o) { ); } -core.Map buildUnnamed56() => { +core.Map buildUnnamed58() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed56(core.Map o) { +void checkUnnamed58(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -4037,12 +4178,12 @@ void checkUnnamed56(core.Map o) { ); } -core.List buildUnnamed57() => [ +core.List buildUnnamed59() => [ 'foo', 'foo', ]; -void checkUnnamed57(core.List o) { +void checkUnnamed59(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4059,12 +4200,12 @@ api.PrimaryInstanceSettings buildPrimaryInstanceSettings() { final o = api.PrimaryInstanceSettings(); buildCounterPrimaryInstanceSettings++; if (buildCounterPrimaryInstanceSettings < 3) { - o.databaseFlags = buildUnnamed55(); + o.databaseFlags = buildUnnamed57(); o.id = 'foo'; o.instanceNetworkConfig = buildInstanceNetworkConfig(); - o.labels = buildUnnamed56(); + o.labels = buildUnnamed58(); o.machineConfig = buildMachineConfig(); - o.outboundPublicIpAddresses = buildUnnamed57(); + o.outboundPublicIpAddresses = buildUnnamed59(); o.privateIp = 'foo'; } buildCounterPrimaryInstanceSettings--; @@ -4074,15 +4215,15 @@ api.PrimaryInstanceSettings buildPrimaryInstanceSettings() { void checkPrimaryInstanceSettings(api.PrimaryInstanceSettings o) { buildCounterPrimaryInstanceSettings++; if (buildCounterPrimaryInstanceSettings < 3) { - checkUnnamed55(o.databaseFlags!); + checkUnnamed57(o.databaseFlags!); unittest.expect( o.id!, unittest.equals('foo'), ); checkInstanceNetworkConfig(o.instanceNetworkConfig!); - checkUnnamed56(o.labels!); + checkUnnamed58(o.labels!); checkMachineConfig(o.machineConfig!); - checkUnnamed57(o.outboundPublicIpAddresses!); + checkUnnamed59(o.outboundPublicIpAddresses!); unittest.expect( o.privateIp!, unittest.equals('foo'), @@ -4091,12 +4232,12 @@ void checkPrimaryInstanceSettings(api.PrimaryInstanceSettings o) { buildCounterPrimaryInstanceSettings--; } -core.Map buildUnnamed58() => { +core.Map buildUnnamed60() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed58(core.Map o) { +void checkUnnamed60(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -4116,7 +4257,7 @@ api.PrivateConnection buildPrivateConnection() { o.createTime = 'foo'; o.displayName = 'foo'; o.error = buildStatus(); - o.labels = buildUnnamed58(); + o.labels = buildUnnamed60(); o.name = 'foo'; o.state = 'foo'; o.updateTime = 'foo'; @@ -4138,7 +4279,7 @@ void checkPrivateConnection(api.PrivateConnection o) { unittest.equals('foo'), ); checkStatus(o.error!); - checkUnnamed58(o.labels!); + checkUnnamed60(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -4205,14 +4346,18 @@ core.int buildCounterPromoteMigrationJobRequest = 0; api.PromoteMigrationJobRequest buildPromoteMigrationJobRequest() { final o = api.PromoteMigrationJobRequest(); buildCounterPromoteMigrationJobRequest++; - if (buildCounterPromoteMigrationJobRequest < 3) {} + if (buildCounterPromoteMigrationJobRequest < 3) { + o.objectsFilter = buildMigrationJobObjectsConfig(); + } buildCounterPromoteMigrationJobRequest--; return o; } void checkPromoteMigrationJobRequest(api.PromoteMigrationJobRequest o) { buildCounterPromoteMigrationJobRequest++; - if (buildCounterPromoteMigrationJobRequest < 3) {} + if (buildCounterPromoteMigrationJobRequest < 3) { + checkMigrationJobObjectsConfig(o.objectsFilter!); + } buildCounterPromoteMigrationJobRequest--; } @@ -4221,6 +4366,7 @@ api.RestartMigrationJobRequest buildRestartMigrationJobRequest() { final o = api.RestartMigrationJobRequest(); buildCounterRestartMigrationJobRequest++; if (buildCounterRestartMigrationJobRequest < 3) { + o.objectsFilter = buildMigrationJobObjectsConfig(); o.skipValidation = true; } buildCounterRestartMigrationJobRequest--; @@ -4230,6 +4376,7 @@ api.RestartMigrationJobRequest buildRestartMigrationJobRequest() { void checkRestartMigrationJobRequest(api.RestartMigrationJobRequest o) { buildCounterRestartMigrationJobRequest++; if (buildCounterRestartMigrationJobRequest < 3) { + checkMigrationJobObjectsConfig(o.objectsFilter!); unittest.expect(o.skipValidation!, unittest.isTrue); } buildCounterRestartMigrationJobRequest--; @@ -4357,7 +4504,7 @@ void checkRulesFile(api.RulesFile o) { buildCounterRulesFile--; } -core.Map buildUnnamed59() => { +core.Map buildUnnamed61() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -4370,7 +4517,7 @@ core.Map buildUnnamed59() => { }, }; -void checkUnnamed59(core.Map o) { +void checkUnnamed61(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted25 = (o['x']!) as core.Map; unittest.expect(casted25, unittest.hasLength(3)); @@ -4407,7 +4554,7 @@ api.SchemaEntity buildSchemaEntity() { final o = api.SchemaEntity(); buildCounterSchemaEntity++; if (buildCounterSchemaEntity < 3) { - o.customFeatures = buildUnnamed59(); + o.customFeatures = buildUnnamed61(); } buildCounterSchemaEntity--; return o; @@ -4416,17 +4563,17 @@ api.SchemaEntity buildSchemaEntity() { void checkSchemaEntity(api.SchemaEntity o) { buildCounterSchemaEntity++; if (buildCounterSchemaEntity < 3) { - checkUnnamed59(o.customFeatures!); + checkUnnamed61(o.customFeatures!); } buildCounterSchemaEntity--; } -core.List buildUnnamed60() => [ +core.List buildUnnamed62() => [ buildBackgroundJobLogEntry(), buildBackgroundJobLogEntry(), ]; -void checkUnnamed60(core.List o) { +void checkUnnamed62(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkBackgroundJobLogEntry(o[0]); checkBackgroundJobLogEntry(o[1]); @@ -4437,7 +4584,7 @@ api.SearchBackgroundJobsResponse buildSearchBackgroundJobsResponse() { final o = api.SearchBackgroundJobsResponse(); buildCounterSearchBackgroundJobsResponse++; if (buildCounterSearchBackgroundJobsResponse < 3) { - o.jobs = buildUnnamed60(); + o.jobs = buildUnnamed62(); } buildCounterSearchBackgroundJobsResponse--; return o; @@ -4446,7 +4593,7 @@ api.SearchBackgroundJobsResponse buildSearchBackgroundJobsResponse() { void checkSearchBackgroundJobsResponse(api.SearchBackgroundJobsResponse o) { buildCounterSearchBackgroundJobsResponse++; if (buildCounterSearchBackgroundJobsResponse < 3) { - checkUnnamed60(o.jobs!); + checkUnnamed62(o.jobs!); } buildCounterSearchBackgroundJobsResponse--; } @@ -4502,7 +4649,7 @@ void checkSeedJobDetails(api.SeedJobDetails o) { buildCounterSeedJobDetails--; } -core.Map buildUnnamed61() => { +core.Map buildUnnamed63() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -4515,7 +4662,7 @@ core.Map buildUnnamed61() => { }, }; -void checkUnnamed61(core.Map o) { +void checkUnnamed63(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted27 = (o['x']!) as core.Map; unittest.expect(casted27, unittest.hasLength(3)); @@ -4553,7 +4700,7 @@ api.SequenceEntity buildSequenceEntity() { buildCounterSequenceEntity++; if (buildCounterSequenceEntity < 3) { o.cache = 'foo'; - o.customFeatures = buildUnnamed61(); + o.customFeatures = buildUnnamed63(); o.cycle = true; o.increment = 'foo'; o.maxValue = 'foo'; @@ -4571,7 +4718,7 @@ void checkSequenceEntity(api.SequenceEntity o) { o.cache!, unittest.equals('foo'), ); - checkUnnamed61(o.customFeatures!); + checkUnnamed63(o.customFeatures!); unittest.expect(o.cycle!, unittest.isTrue); unittest.expect( o.increment!, @@ -4617,12 +4764,12 @@ void checkSetIamPolicyRequest(api.SetIamPolicyRequest o) { buildCounterSetIamPolicyRequest--; } -core.List buildUnnamed62() => [ +core.List buildUnnamed64() => [ 'foo', 'foo', ]; -void checkUnnamed62(core.List o) { +void checkUnnamed64(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4640,7 +4787,7 @@ api.SetTablePrimaryKey buildSetTablePrimaryKey() { buildCounterSetTablePrimaryKey++; if (buildCounterSetTablePrimaryKey < 3) { o.primaryKey = 'foo'; - o.primaryKeyColumns = buildUnnamed62(); + o.primaryKeyColumns = buildUnnamed64(); } buildCounterSetTablePrimaryKey--; return o; @@ -4653,12 +4800,12 @@ void checkSetTablePrimaryKey(api.SetTablePrimaryKey o) { o.primaryKey!, unittest.equals('foo'), ); - checkUnnamed62(o.primaryKeyColumns!); + checkUnnamed64(o.primaryKeyColumns!); } buildCounterSetTablePrimaryKey--; } -core.Map buildUnnamed63() => { +core.Map buildUnnamed65() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -4671,7 +4818,7 @@ core.Map buildUnnamed63() => { }, }; -void checkUnnamed63(core.Map o) { +void checkUnnamed65(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted29 = (o['x']!) as core.Map; unittest.expect(casted29, unittest.hasLength(3)); @@ -4703,12 +4850,12 @@ void checkUnnamed63(core.Map o) { ); } -core.List buildUnnamed64() => [ +core.List buildUnnamed66() => [ 'foo', 'foo', ]; -void checkUnnamed64(core.List o) { +void checkUnnamed66(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4731,14 +4878,14 @@ api.SingleColumnChange buildSingleColumnChange() { o.charset = 'foo'; o.collation = 'foo'; o.comment = 'foo'; - o.customFeatures = buildUnnamed63(); + o.customFeatures = buildUnnamed65(); o.dataType = 'foo'; o.fractionalSecondsPrecision = 42; o.length = 'foo'; o.nullable = true; o.precision = 42; o.scale = 42; - o.setValues = buildUnnamed64(); + o.setValues = buildUnnamed66(); o.udt = true; } buildCounterSingleColumnChange--; @@ -4766,7 +4913,7 @@ void checkSingleColumnChange(api.SingleColumnChange o) { o.comment!, unittest.equals('foo'), ); - checkUnnamed63(o.customFeatures!); + checkUnnamed65(o.customFeatures!); unittest.expect( o.dataType!, unittest.equals('foo'), @@ -4788,7 +4935,7 @@ void checkSingleColumnChange(api.SingleColumnChange o) { o.scale!, unittest.equals(42), ); - checkUnnamed64(o.setValues!); + checkUnnamed66(o.setValues!); unittest.expect(o.udt!, unittest.isTrue); } buildCounterSingleColumnChange--; @@ -4885,6 +5032,87 @@ void checkSourceNumericFilter(api.SourceNumericFilter o) { buildCounterSourceNumericFilter--; } +core.int buildCounterSourceObjectConfig = 0; +api.SourceObjectConfig buildSourceObjectConfig() { + final o = api.SourceObjectConfig(); + buildCounterSourceObjectConfig++; + if (buildCounterSourceObjectConfig < 3) { + o.objectIdentifier = buildSourceObjectIdentifier(); + } + buildCounterSourceObjectConfig--; + return o; +} + +void checkSourceObjectConfig(api.SourceObjectConfig o) { + buildCounterSourceObjectConfig++; + if (buildCounterSourceObjectConfig < 3) { + checkSourceObjectIdentifier(o.objectIdentifier!); + } + buildCounterSourceObjectConfig--; +} + +core.int buildCounterSourceObjectIdentifier = 0; +api.SourceObjectIdentifier buildSourceObjectIdentifier() { + final o = api.SourceObjectIdentifier(); + buildCounterSourceObjectIdentifier++; + if (buildCounterSourceObjectIdentifier < 3) { + o.database = 'foo'; + o.type = 'foo'; + } + buildCounterSourceObjectIdentifier--; + return o; +} + +void checkSourceObjectIdentifier(api.SourceObjectIdentifier o) { + buildCounterSourceObjectIdentifier++; + if (buildCounterSourceObjectIdentifier < 3) { + unittest.expect( + o.database!, + unittest.equals('foo'), + ); + unittest.expect( + o.type!, + unittest.equals('foo'), + ); + } + buildCounterSourceObjectIdentifier--; +} + +core.List buildUnnamed67() => [ + buildSourceObjectConfig(), + buildSourceObjectConfig(), + ]; + +void checkUnnamed67(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkSourceObjectConfig(o[0]); + checkSourceObjectConfig(o[1]); +} + +core.int buildCounterSourceObjectsConfig = 0; +api.SourceObjectsConfig buildSourceObjectsConfig() { + final o = api.SourceObjectsConfig(); + buildCounterSourceObjectsConfig++; + if (buildCounterSourceObjectsConfig < 3) { + o.objectConfigs = buildUnnamed67(); + o.objectsSelectionType = 'foo'; + } + buildCounterSourceObjectsConfig--; + return o; +} + +void checkSourceObjectsConfig(api.SourceObjectsConfig o) { + buildCounterSourceObjectsConfig++; + if (buildCounterSourceObjectsConfig < 3) { + checkUnnamed67(o.objectConfigs!); + unittest.expect( + o.objectsSelectionType!, + unittest.equals('foo'), + ); + } + buildCounterSourceObjectsConfig--; +} + core.int buildCounterSourceSqlChange = 0; api.SourceSqlChange buildSourceSqlChange() { final o = api.SourceSqlChange(); @@ -4971,12 +5199,12 @@ void checkSqlAclEntry(api.SqlAclEntry o) { buildCounterSqlAclEntry--; } -core.List buildUnnamed65() => [ +core.List buildUnnamed68() => [ buildSqlAclEntry(), buildSqlAclEntry(), ]; -void checkUnnamed65(core.List o) { +void checkUnnamed68(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSqlAclEntry(o[0]); checkSqlAclEntry(o[1]); @@ -4988,7 +5216,7 @@ api.SqlIpConfig buildSqlIpConfig() { buildCounterSqlIpConfig++; if (buildCounterSqlIpConfig < 3) { o.allocatedIpRange = 'foo'; - o.authorizedNetworks = buildUnnamed65(); + o.authorizedNetworks = buildUnnamed68(); o.enableIpv4 = true; o.privateNetwork = 'foo'; o.requireSsl = true; @@ -5004,7 +5232,7 @@ void checkSqlIpConfig(api.SqlIpConfig o) { o.allocatedIpRange!, unittest.equals('foo'), ); - checkUnnamed65(o.authorizedNetworks!); + checkUnnamed68(o.authorizedNetworks!); unittest.expect(o.enableIpv4!, unittest.isTrue); unittest.expect( o.privateNetwork!, @@ -5156,12 +5384,12 @@ void checkSqlServerEncryptionOptions(api.SqlServerEncryptionOptions o) { buildCounterSqlServerEncryptionOptions--; } -core.List buildUnnamed66() => [ +core.List buildUnnamed69() => [ buildSqlServerDatabaseBackup(), buildSqlServerDatabaseBackup(), ]; -void checkUnnamed66(core.List o) { +void checkUnnamed69(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSqlServerDatabaseBackup(o[0]); checkSqlServerDatabaseBackup(o[1]); @@ -5174,7 +5402,7 @@ api.SqlServerHomogeneousMigrationJobConfig buildCounterSqlServerHomogeneousMigrationJobConfig++; if (buildCounterSqlServerHomogeneousMigrationJobConfig < 3) { o.backupFilePattern = 'foo'; - o.databaseBackups = buildUnnamed66(); + o.databaseBackups = buildUnnamed69(); o.promoteWhenReady = true; o.useDiffBackup = true; } @@ -5190,7 +5418,7 @@ void checkSqlServerHomogeneousMigrationJobConfig( o.backupFilePattern!, unittest.equals('foo'), ); - checkUnnamed66(o.databaseBackups!); + checkUnnamed69(o.databaseBackups!); unittest.expect(o.promoteWhenReady!, unittest.isTrue); unittest.expect(o.useDiffBackup!, unittest.isTrue); } @@ -5305,7 +5533,7 @@ void checkStaticServiceIpConnectivity(api.StaticServiceIpConnectivity o) { buildCounterStaticServiceIpConnectivity--; } -core.Map buildUnnamed67() => { +core.Map buildUnnamed70() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -5318,7 +5546,7 @@ core.Map buildUnnamed67() => { }, }; -void checkUnnamed67(core.Map o) { +void checkUnnamed70(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted31 = (o['x']!) as core.Map; unittest.expect(casted31, unittest.hasLength(3)); @@ -5350,15 +5578,15 @@ void checkUnnamed67(core.Map o) { ); } -core.List> buildUnnamed68() => [ - buildUnnamed67(), - buildUnnamed67(), +core.List> buildUnnamed71() => [ + buildUnnamed70(), + buildUnnamed70(), ]; -void checkUnnamed68(core.List> o) { +void checkUnnamed71(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed67(o[0]); - checkUnnamed67(o[1]); + checkUnnamed70(o[0]); + checkUnnamed70(o[1]); } core.int buildCounterStatus = 0; @@ -5367,7 +5595,7 @@ api.Status buildStatus() { buildCounterStatus++; if (buildCounterStatus < 3) { o.code = 42; - o.details = buildUnnamed68(); + o.details = buildUnnamed71(); o.message = 'foo'; } buildCounterStatus--; @@ -5381,7 +5609,7 @@ void checkStatus(api.Status o) { o.code!, unittest.equals(42), ); - checkUnnamed68(o.details!); + checkUnnamed71(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -5405,7 +5633,7 @@ void checkStopMigrationJobRequest(api.StopMigrationJobRequest o) { buildCounterStopMigrationJobRequest--; } -core.Map buildUnnamed69() => { +core.Map buildUnnamed72() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -5418,7 +5646,7 @@ core.Map buildUnnamed69() => { }, }; -void checkUnnamed69(core.Map o) { +void checkUnnamed72(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted33 = (o['x']!) as core.Map; unittest.expect(casted33, unittest.hasLength(3)); @@ -5455,7 +5683,7 @@ api.StoredProcedureEntity buildStoredProcedureEntity() { final o = api.StoredProcedureEntity(); buildCounterStoredProcedureEntity++; if (buildCounterStoredProcedureEntity < 3) { - o.customFeatures = buildUnnamed69(); + o.customFeatures = buildUnnamed72(); o.sqlCode = 'foo'; } buildCounterStoredProcedureEntity--; @@ -5465,7 +5693,7 @@ api.StoredProcedureEntity buildStoredProcedureEntity() { void checkStoredProcedureEntity(api.StoredProcedureEntity o) { buildCounterStoredProcedureEntity++; if (buildCounterStoredProcedureEntity < 3) { - checkUnnamed69(o.customFeatures!); + checkUnnamed72(o.customFeatures!); unittest.expect( o.sqlCode!, unittest.equals('foo'), @@ -5474,7 +5702,7 @@ void checkStoredProcedureEntity(api.StoredProcedureEntity o) { buildCounterStoredProcedureEntity--; } -core.Map buildUnnamed70() => { +core.Map buildUnnamed73() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -5487,7 +5715,7 @@ core.Map buildUnnamed70() => { }, }; -void checkUnnamed70(core.Map o) { +void checkUnnamed73(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted35 = (o['x']!) as core.Map; unittest.expect(casted35, unittest.hasLength(3)); @@ -5524,7 +5752,7 @@ api.SynonymEntity buildSynonymEntity() { final o = api.SynonymEntity(); buildCounterSynonymEntity++; if (buildCounterSynonymEntity < 3) { - o.customFeatures = buildUnnamed70(); + o.customFeatures = buildUnnamed73(); o.sourceEntity = 'foo'; o.sourceType = 'foo'; } @@ -5535,7 +5763,7 @@ api.SynonymEntity buildSynonymEntity() { void checkSynonymEntity(api.SynonymEntity o) { buildCounterSynonymEntity++; if (buildCounterSynonymEntity < 3) { - checkUnnamed70(o.customFeatures!); + checkUnnamed73(o.customFeatures!); unittest.expect( o.sourceEntity!, unittest.equals('foo'), @@ -5548,29 +5776,29 @@ void checkSynonymEntity(api.SynonymEntity o) { buildCounterSynonymEntity--; } -core.List buildUnnamed71() => [ +core.List buildUnnamed74() => [ buildColumnEntity(), buildColumnEntity(), ]; -void checkUnnamed71(core.List o) { +void checkUnnamed74(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkColumnEntity(o[0]); checkColumnEntity(o[1]); } -core.List buildUnnamed72() => [ +core.List buildUnnamed75() => [ buildConstraintEntity(), buildConstraintEntity(), ]; -void checkUnnamed72(core.List o) { +void checkUnnamed75(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkConstraintEntity(o[0]); checkConstraintEntity(o[1]); } -core.Map buildUnnamed73() => { +core.Map buildUnnamed76() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -5583,7 +5811,7 @@ core.Map buildUnnamed73() => { }, }; -void checkUnnamed73(core.Map o) { +void checkUnnamed76(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted37 = (o['x']!) as core.Map; unittest.expect(casted37, unittest.hasLength(3)); @@ -5615,23 +5843,23 @@ void checkUnnamed73(core.Map o) { ); } -core.List buildUnnamed74() => [ +core.List buildUnnamed77() => [ buildIndexEntity(), buildIndexEntity(), ]; -void checkUnnamed74(core.List o) { +void checkUnnamed77(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkIndexEntity(o[0]); checkIndexEntity(o[1]); } -core.List buildUnnamed75() => [ +core.List buildUnnamed78() => [ buildTriggerEntity(), buildTriggerEntity(), ]; -void checkUnnamed75(core.List o) { +void checkUnnamed78(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTriggerEntity(o[0]); checkTriggerEntity(o[1]); @@ -5642,12 +5870,12 @@ api.TableEntity buildTableEntity() { final o = api.TableEntity(); buildCounterTableEntity++; if (buildCounterTableEntity < 3) { - o.columns = buildUnnamed71(); + o.columns = buildUnnamed74(); o.comment = 'foo'; - o.constraints = buildUnnamed72(); - o.customFeatures = buildUnnamed73(); - o.indices = buildUnnamed74(); - o.triggers = buildUnnamed75(); + o.constraints = buildUnnamed75(); + o.customFeatures = buildUnnamed76(); + o.indices = buildUnnamed77(); + o.triggers = buildUnnamed78(); } buildCounterTableEntity--; return o; @@ -5656,15 +5884,15 @@ api.TableEntity buildTableEntity() { void checkTableEntity(api.TableEntity o) { buildCounterTableEntity++; if (buildCounterTableEntity < 3) { - checkUnnamed71(o.columns!); + checkUnnamed74(o.columns!); unittest.expect( o.comment!, unittest.equals('foo'), ); - checkUnnamed72(o.constraints!); - checkUnnamed73(o.customFeatures!); - checkUnnamed74(o.indices!); - checkUnnamed75(o.triggers!); + checkUnnamed75(o.constraints!); + checkUnnamed76(o.customFeatures!); + checkUnnamed77(o.indices!); + checkUnnamed78(o.triggers!); } buildCounterTableEntity--; } @@ -5691,12 +5919,12 @@ void checkTcpProxyScript(api.TcpProxyScript o) { buildCounterTcpProxyScript--; } -core.List buildUnnamed76() => [ +core.List buildUnnamed79() => [ 'foo', 'foo', ]; -void checkUnnamed76(core.List o) { +void checkUnnamed79(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5713,7 +5941,7 @@ api.TestIamPermissionsRequest buildTestIamPermissionsRequest() { final o = api.TestIamPermissionsRequest(); buildCounterTestIamPermissionsRequest++; if (buildCounterTestIamPermissionsRequest < 3) { - o.permissions = buildUnnamed76(); + o.permissions = buildUnnamed79(); } buildCounterTestIamPermissionsRequest--; return o; @@ -5722,17 +5950,17 @@ api.TestIamPermissionsRequest buildTestIamPermissionsRequest() { void checkTestIamPermissionsRequest(api.TestIamPermissionsRequest o) { buildCounterTestIamPermissionsRequest++; if (buildCounterTestIamPermissionsRequest < 3) { - checkUnnamed76(o.permissions!); + checkUnnamed79(o.permissions!); } buildCounterTestIamPermissionsRequest--; } -core.List buildUnnamed77() => [ +core.List buildUnnamed80() => [ 'foo', 'foo', ]; -void checkUnnamed77(core.List o) { +void checkUnnamed80(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5749,7 +5977,7 @@ api.TestIamPermissionsResponse buildTestIamPermissionsResponse() { final o = api.TestIamPermissionsResponse(); buildCounterTestIamPermissionsResponse++; if (buildCounterTestIamPermissionsResponse < 3) { - o.permissions = buildUnnamed77(); + o.permissions = buildUnnamed80(); } buildCounterTestIamPermissionsResponse--; return o; @@ -5758,12 +5986,12 @@ api.TestIamPermissionsResponse buildTestIamPermissionsResponse() { void checkTestIamPermissionsResponse(api.TestIamPermissionsResponse o) { buildCounterTestIamPermissionsResponse++; if (buildCounterTestIamPermissionsResponse < 3) { - checkUnnamed77(o.permissions!); + checkUnnamed80(o.permissions!); } buildCounterTestIamPermissionsResponse--; } -core.Map buildUnnamed78() => { +core.Map buildUnnamed81() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -5776,7 +6004,7 @@ core.Map buildUnnamed78() => { }, }; -void checkUnnamed78(core.Map o) { +void checkUnnamed81(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted39 = (o['x']!) as core.Map; unittest.expect(casted39, unittest.hasLength(3)); @@ -5808,12 +6036,12 @@ void checkUnnamed78(core.Map o) { ); } -core.List buildUnnamed79() => [ +core.List buildUnnamed82() => [ 'foo', 'foo', ]; -void checkUnnamed79(core.List o) { +void checkUnnamed82(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5830,11 +6058,11 @@ api.TriggerEntity buildTriggerEntity() { final o = api.TriggerEntity(); buildCounterTriggerEntity++; if (buildCounterTriggerEntity < 3) { - o.customFeatures = buildUnnamed78(); + o.customFeatures = buildUnnamed81(); o.name = 'foo'; o.sqlCode = 'foo'; o.triggerType = 'foo'; - o.triggeringEvents = buildUnnamed79(); + o.triggeringEvents = buildUnnamed82(); } buildCounterTriggerEntity--; return o; @@ -5843,7 +6071,7 @@ api.TriggerEntity buildTriggerEntity() { void checkTriggerEntity(api.TriggerEntity o) { buildCounterTriggerEntity++; if (buildCounterTriggerEntity < 3) { - checkUnnamed78(o.customFeatures!); + checkUnnamed81(o.customFeatures!); unittest.expect( o.name!, unittest.equals('foo'), @@ -5856,12 +6084,12 @@ void checkTriggerEntity(api.TriggerEntity o) { o.triggerType!, unittest.equals('foo'), ); - checkUnnamed79(o.triggeringEvents!); + checkUnnamed82(o.triggeringEvents!); } buildCounterTriggerEntity--; } -core.Map buildUnnamed80() => { +core.Map buildUnnamed83() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -5874,7 +6102,7 @@ core.Map buildUnnamed80() => { }, }; -void checkUnnamed80(core.Map o) { +void checkUnnamed83(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted41 = (o['x']!) as core.Map; unittest.expect(casted41, unittest.hasLength(3)); @@ -5911,7 +6139,7 @@ api.UDTEntity buildUDTEntity() { final o = api.UDTEntity(); buildCounterUDTEntity++; if (buildCounterUDTEntity < 3) { - o.customFeatures = buildUnnamed80(); + o.customFeatures = buildUnnamed83(); o.udtBody = 'foo'; o.udtSqlCode = 'foo'; } @@ -5922,7 +6150,7 @@ api.UDTEntity buildUDTEntity() { void checkUDTEntity(api.UDTEntity o) { buildCounterUDTEntity++; if (buildCounterUDTEntity < 3) { - checkUnnamed80(o.customFeatures!); + checkUnnamed83(o.customFeatures!); unittest.expect( o.udtBody!, unittest.equals('foo'), @@ -5964,12 +6192,12 @@ void checkUserPassword(api.UserPassword o) { buildCounterUserPassword--; } -core.List buildUnnamed81() => [ +core.List buildUnnamed84() => [ 'foo', 'foo', ]; -void checkUnnamed81(core.List o) { +void checkUnnamed84(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5988,7 +6216,7 @@ api.ValueListFilter buildValueListFilter() { if (buildCounterValueListFilter < 3) { o.ignoreCase = true; o.valuePresentList = 'foo'; - o.values = buildUnnamed81(); + o.values = buildUnnamed84(); } buildCounterValueListFilter--; return o; @@ -6002,7 +6230,7 @@ void checkValueListFilter(api.ValueListFilter o) { o.valuePresentList!, unittest.equals('foo'), ); - checkUnnamed81(o.values!); + checkUnnamed84(o.values!); } buildCounterValueListFilter--; } @@ -6068,18 +6296,18 @@ void checkVerifyMigrationJobRequest(api.VerifyMigrationJobRequest o) { buildCounterVerifyMigrationJobRequest--; } -core.List buildUnnamed82() => [ +core.List buildUnnamed85() => [ buildConstraintEntity(), buildConstraintEntity(), ]; -void checkUnnamed82(core.List o) { +void checkUnnamed85(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkConstraintEntity(o[0]); checkConstraintEntity(o[1]); } -core.Map buildUnnamed83() => { +core.Map buildUnnamed86() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -6092,7 +6320,7 @@ core.Map buildUnnamed83() => { }, }; -void checkUnnamed83(core.Map o) { +void checkUnnamed86(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted43 = (o['x']!) as core.Map; unittest.expect(casted43, unittest.hasLength(3)); @@ -6129,8 +6357,8 @@ api.ViewEntity buildViewEntity() { final o = api.ViewEntity(); buildCounterViewEntity++; if (buildCounterViewEntity < 3) { - o.constraints = buildUnnamed82(); - o.customFeatures = buildUnnamed83(); + o.constraints = buildUnnamed85(); + o.customFeatures = buildUnnamed86(); o.sqlCode = 'foo'; } buildCounterViewEntity--; @@ -6140,8 +6368,8 @@ api.ViewEntity buildViewEntity() { void checkViewEntity(api.ViewEntity o) { buildCounterViewEntity++; if (buildCounterViewEntity < 3) { - checkUnnamed82(o.constraints!); - checkUnnamed83(o.customFeatures!); + checkUnnamed85(o.constraints!); + checkUnnamed86(o.customFeatures!); unittest.expect( o.sqlCode!, unittest.equals('foo'), @@ -6844,6 +7072,16 @@ void main() { }); }); + unittest.group('obj-schema-ListMigrationJobObjectsResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListMigrationJobObjectsResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListMigrationJobObjectsResponse.fromJson( + oJson as core.Map); + checkListMigrationJobObjectsResponse(od); + }); + }); + unittest.group('obj-schema-ListMigrationJobsResponse', () { unittest.test('to-json--from-json', () async { final o = buildListMigrationJobsResponse(); @@ -6904,6 +7142,16 @@ void main() { }); }); + unittest.group('obj-schema-LookupMigrationJobObjectRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildLookupMigrationJobObjectRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.LookupMigrationJobObjectRequest.fromJson( + oJson as core.Map); + checkLookupMigrationJobObjectRequest(od); + }); + }); + unittest.group('obj-schema-MachineConfig', () { unittest.test('to-json--from-json', () async { final o = buildMachineConfig(); @@ -6954,6 +7202,26 @@ void main() { }); }); + unittest.group('obj-schema-MigrationJobObject', () { + unittest.test('to-json--from-json', () async { + final o = buildMigrationJobObject(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.MigrationJobObject.fromJson( + oJson as core.Map); + checkMigrationJobObject(od); + }); + }); + + unittest.group('obj-schema-MigrationJobObjectsConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildMigrationJobObjectsConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.MigrationJobObjectsConfig.fromJson( + oJson as core.Map); + checkMigrationJobObjectsConfig(od); + }); + }); + unittest.group('obj-schema-MultiColumnDatatypeChange', () { unittest.test('to-json--from-json', () async { final o = buildMultiColumnDatatypeChange(); @@ -7324,6 +7592,36 @@ void main() { }); }); + unittest.group('obj-schema-SourceObjectConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildSourceObjectConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.SourceObjectConfig.fromJson( + oJson as core.Map); + checkSourceObjectConfig(od); + }); + }); + + unittest.group('obj-schema-SourceObjectIdentifier', () { + unittest.test('to-json--from-json', () async { + final o = buildSourceObjectIdentifier(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.SourceObjectIdentifier.fromJson( + oJson as core.Map); + checkSourceObjectIdentifier(od); + }); + }); + + unittest.group('obj-schema-SourceObjectsConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildSourceObjectsConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.SourceObjectsConfig.fromJson( + oJson as core.Map); + checkSourceObjectsConfig(od); + }); + }); + unittest.group('obj-schema-SourceSqlChange', () { unittest.test('to-json--from-json', () async { final o = buildSourceSqlChange(); @@ -10006,6 +10304,62 @@ void main() { checkOperation(response as api.Operation); }); + unittest.test('method--fetchSourceObjects', () async { + final mock = HttpServerMock(); + final res = api.DatabaseMigrationServiceApi(mock) + .projects + .locations + .migrationJobs; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.fetchSourceObjects(arg_name, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + unittest.test('method--generateSshScript', () async { final mock = HttpServerMock(); final res = api.DatabaseMigrationServiceApi(mock) @@ -10891,6 +11245,62 @@ void main() { }); unittest.group('resource-ProjectsLocationsMigrationJobsObjectsResource', () { + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.DatabaseMigrationServiceApi(mock) + .projects + .locations + .migrationJobs + .objects; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildMigrationJobObject()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkMigrationJobObject(response as api.MigrationJobObject); + }); + unittest.test('method--getIamPolicy', () async { final mock = HttpServerMock(); final res = api.DatabaseMigrationServiceApi(mock) @@ -10954,6 +11364,139 @@ void main() { checkPolicy(response as api.Policy); }); + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.DatabaseMigrationServiceApi(mock) + .projects + .locations + .migrationJobs + .objects; + final arg_parent = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = + convert.json.encode(buildListMigrationJobObjectsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_parent, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListMigrationJobObjectsResponse( + response as api.ListMigrationJobObjectsResponse); + }); + + unittest.test('method--lookup', () async { + final mock = HttpServerMock(); + final res = api.DatabaseMigrationServiceApi(mock) + .projects + .locations + .migrationJobs + .objects; + final arg_request = buildLookupMigrationJobObjectRequest(); + final arg_parent = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.LookupMigrationJobObjectRequest.fromJson( + json as core.Map); + checkLookupMigrationJobObjectRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildMigrationJobObject()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.lookup(arg_request, arg_parent, $fields: arg_$fields); + checkMigrationJobObject(response as api.MigrationJobObject); + }); + unittest.test('method--setIamPolicy', () async { final mock = HttpServerMock(); final res = api.DatabaseMigrationServiceApi(mock) diff --git a/generated/googleapis/test/dataplex/v1_test.dart b/generated/googleapis/test/dataplex/v1_test.dart index 76b20c66c..1767072af 100644 --- a/generated/googleapis/test/dataplex/v1_test.dart +++ b/generated/googleapis/test/dataplex/v1_test.dart @@ -1670,6 +1670,257 @@ void checkGoogleCloudDataplexV1DataAttributeBindingPath( buildCounterGoogleCloudDataplexV1DataAttributeBindingPath--; } +core.int buildCounterGoogleCloudDataplexV1DataDiscoveryResult = 0; +api.GoogleCloudDataplexV1DataDiscoveryResult + buildGoogleCloudDataplexV1DataDiscoveryResult() { + final o = api.GoogleCloudDataplexV1DataDiscoveryResult(); + buildCounterGoogleCloudDataplexV1DataDiscoveryResult++; + if (buildCounterGoogleCloudDataplexV1DataDiscoveryResult < 3) { + o.bigqueryPublishing = + buildGoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing(); + } + buildCounterGoogleCloudDataplexV1DataDiscoveryResult--; + return o; +} + +void checkGoogleCloudDataplexV1DataDiscoveryResult( + api.GoogleCloudDataplexV1DataDiscoveryResult o) { + buildCounterGoogleCloudDataplexV1DataDiscoveryResult++; + if (buildCounterGoogleCloudDataplexV1DataDiscoveryResult < 3) { + checkGoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing( + o.bigqueryPublishing!); + } + buildCounterGoogleCloudDataplexV1DataDiscoveryResult--; +} + +core.int + buildCounterGoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing = 0; +api.GoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing + buildGoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing() { + final o = api.GoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing(); + buildCounterGoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing++; + if (buildCounterGoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing < + 3) { + o.dataset = 'foo'; + } + buildCounterGoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing--; + return o; +} + +void checkGoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing( + api.GoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing o) { + buildCounterGoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing++; + if (buildCounterGoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing < + 3) { + unittest.expect( + o.dataset!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing--; +} + +core.int buildCounterGoogleCloudDataplexV1DataDiscoverySpec = 0; +api.GoogleCloudDataplexV1DataDiscoverySpec + buildGoogleCloudDataplexV1DataDiscoverySpec() { + final o = api.GoogleCloudDataplexV1DataDiscoverySpec(); + buildCounterGoogleCloudDataplexV1DataDiscoverySpec++; + if (buildCounterGoogleCloudDataplexV1DataDiscoverySpec < 3) { + o.bigqueryPublishingConfig = + buildGoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig(); + o.storageConfig = + buildGoogleCloudDataplexV1DataDiscoverySpecStorageConfig(); + } + buildCounterGoogleCloudDataplexV1DataDiscoverySpec--; + return o; +} + +void checkGoogleCloudDataplexV1DataDiscoverySpec( + api.GoogleCloudDataplexV1DataDiscoverySpec o) { + buildCounterGoogleCloudDataplexV1DataDiscoverySpec++; + if (buildCounterGoogleCloudDataplexV1DataDiscoverySpec < 3) { + checkGoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig( + o.bigqueryPublishingConfig!); + checkGoogleCloudDataplexV1DataDiscoverySpecStorageConfig(o.storageConfig!); + } + buildCounterGoogleCloudDataplexV1DataDiscoverySpec--; +} + +core.int + buildCounterGoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig = + 0; +api.GoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig + buildGoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig() { + final o = + api.GoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig(); + buildCounterGoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig++; + if (buildCounterGoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig < + 3) { + o.connection = 'foo'; + o.tableType = 'foo'; + } + buildCounterGoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig--; + return o; +} + +void checkGoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig( + api.GoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig o) { + buildCounterGoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig++; + if (buildCounterGoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig < + 3) { + unittest.expect( + o.connection!, + unittest.equals('foo'), + ); + unittest.expect( + o.tableType!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig--; +} + +core.List buildUnnamed18() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed18(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.List buildUnnamed19() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed19(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterGoogleCloudDataplexV1DataDiscoverySpecStorageConfig = 0; +api.GoogleCloudDataplexV1DataDiscoverySpecStorageConfig + buildGoogleCloudDataplexV1DataDiscoverySpecStorageConfig() { + final o = api.GoogleCloudDataplexV1DataDiscoverySpecStorageConfig(); + buildCounterGoogleCloudDataplexV1DataDiscoverySpecStorageConfig++; + if (buildCounterGoogleCloudDataplexV1DataDiscoverySpecStorageConfig < 3) { + o.csvOptions = + buildGoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions(); + o.excludePatterns = buildUnnamed18(); + o.includePatterns = buildUnnamed19(); + o.jsonOptions = + buildGoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions(); + } + buildCounterGoogleCloudDataplexV1DataDiscoverySpecStorageConfig--; + return o; +} + +void checkGoogleCloudDataplexV1DataDiscoverySpecStorageConfig( + api.GoogleCloudDataplexV1DataDiscoverySpecStorageConfig o) { + buildCounterGoogleCloudDataplexV1DataDiscoverySpecStorageConfig++; + if (buildCounterGoogleCloudDataplexV1DataDiscoverySpecStorageConfig < 3) { + checkGoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions( + o.csvOptions!); + checkUnnamed18(o.excludePatterns!); + checkUnnamed19(o.includePatterns!); + checkGoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions( + o.jsonOptions!); + } + buildCounterGoogleCloudDataplexV1DataDiscoverySpecStorageConfig--; +} + +core.int + buildCounterGoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions = + 0; +api.GoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions + buildGoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions() { + final o = api.GoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions(); + buildCounterGoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions++; + if (buildCounterGoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions < + 3) { + o.delimiter = 'foo'; + o.encoding = 'foo'; + o.headerRows = 42; + o.quote = 'foo'; + o.typeInferenceDisabled = true; + } + buildCounterGoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions--; + return o; +} + +void checkGoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions( + api.GoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions o) { + buildCounterGoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions++; + if (buildCounterGoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions < + 3) { + unittest.expect( + o.delimiter!, + unittest.equals('foo'), + ); + unittest.expect( + o.encoding!, + unittest.equals('foo'), + ); + unittest.expect( + o.headerRows!, + unittest.equals(42), + ); + unittest.expect( + o.quote!, + unittest.equals('foo'), + ); + unittest.expect(o.typeInferenceDisabled!, unittest.isTrue); + } + buildCounterGoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions--; +} + +core.int + buildCounterGoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions = + 0; +api.GoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions + buildGoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions() { + final o = + api.GoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions(); + buildCounterGoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions++; + if (buildCounterGoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions < + 3) { + o.encoding = 'foo'; + o.typeInferenceDisabled = true; + } + buildCounterGoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions--; + return o; +} + +void checkGoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions( + api.GoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions o) { + buildCounterGoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions++; + if (buildCounterGoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions < + 3) { + unittest.expect( + o.encoding!, + unittest.equals('foo'), + ); + unittest.expect(o.typeInferenceDisabled!, unittest.isTrue); + } + buildCounterGoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions--; +} + core.int buildCounterGoogleCloudDataplexV1DataProfileResult = 0; api.GoogleCloudDataplexV1DataProfileResult buildGoogleCloudDataplexV1DataProfileResult() { @@ -1764,12 +2015,12 @@ void checkGoogleCloudDataplexV1DataProfileResultPostScanActionsResultBigQueryExp } core.List - buildUnnamed18() => [ + buildUnnamed20() => [ buildGoogleCloudDataplexV1DataProfileResultProfileField(), buildGoogleCloudDataplexV1DataProfileResultProfileField(), ]; -void checkUnnamed18( +void checkUnnamed20( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1DataProfileResultProfileField(o[0]); @@ -1782,7 +2033,7 @@ api.GoogleCloudDataplexV1DataProfileResultProfile final o = api.GoogleCloudDataplexV1DataProfileResultProfile(); buildCounterGoogleCloudDataplexV1DataProfileResultProfile++; if (buildCounterGoogleCloudDataplexV1DataProfileResultProfile < 3) { - o.fields = buildUnnamed18(); + o.fields = buildUnnamed20(); } buildCounterGoogleCloudDataplexV1DataProfileResultProfile--; return o; @@ -1792,7 +2043,7 @@ void checkGoogleCloudDataplexV1DataProfileResultProfile( api.GoogleCloudDataplexV1DataProfileResultProfile o) { buildCounterGoogleCloudDataplexV1DataProfileResultProfile++; if (buildCounterGoogleCloudDataplexV1DataProfileResultProfile < 3) { - checkUnnamed18(o.fields!); + checkUnnamed20(o.fields!); } buildCounterGoogleCloudDataplexV1DataProfileResultProfile--; } @@ -1838,12 +2089,12 @@ void checkGoogleCloudDataplexV1DataProfileResultProfileField( core.List< api .GoogleCloudDataplexV1DataProfileResultProfileFieldProfileInfoTopNValue> - buildUnnamed19() => [ + buildUnnamed21() => [ buildGoogleCloudDataplexV1DataProfileResultProfileFieldProfileInfoTopNValue(), buildGoogleCloudDataplexV1DataProfileResultProfileFieldProfileInfoTopNValue(), ]; -void checkUnnamed19( +void checkUnnamed21( core.List< api .GoogleCloudDataplexV1DataProfileResultProfileFieldProfileInfoTopNValue> @@ -1872,7 +2123,7 @@ api.GoogleCloudDataplexV1DataProfileResultProfileFieldProfileInfo o.nullRatio = 42.0; o.stringProfile = buildGoogleCloudDataplexV1DataProfileResultProfileFieldProfileInfoStringFieldInfo(); - o.topNValues = buildUnnamed19(); + o.topNValues = buildUnnamed21(); } buildCounterGoogleCloudDataplexV1DataProfileResultProfileFieldProfileInfo--; return o; @@ -1897,17 +2148,17 @@ void checkGoogleCloudDataplexV1DataProfileResultProfileFieldProfileInfo( ); checkGoogleCloudDataplexV1DataProfileResultProfileFieldProfileInfoStringFieldInfo( o.stringProfile!); - checkUnnamed19(o.topNValues!); + checkUnnamed21(o.topNValues!); } buildCounterGoogleCloudDataplexV1DataProfileResultProfileFieldProfileInfo--; } -core.List buildUnnamed20() => [ +core.List buildUnnamed22() => [ 42.0, 42.0, ]; -void checkUnnamed20(core.List o) { +void checkUnnamed22(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1932,7 +2183,7 @@ api.GoogleCloudDataplexV1DataProfileResultProfileFieldProfileInfoDoubleFieldInfo o.average = 42.0; o.max = 42.0; o.min = 42.0; - o.quartiles = buildUnnamed20(); + o.quartiles = buildUnnamed22(); o.standardDeviation = 42.0; } buildCounterGoogleCloudDataplexV1DataProfileResultProfileFieldProfileInfoDoubleFieldInfo--; @@ -1957,7 +2208,7 @@ void checkGoogleCloudDataplexV1DataProfileResultProfileFieldProfileInfoDoubleFie o.min!, unittest.equals(42.0), ); - checkUnnamed20(o.quartiles!); + checkUnnamed22(o.quartiles!); unittest.expect( o.standardDeviation!, unittest.equals(42.0), @@ -1966,12 +2217,12 @@ void checkGoogleCloudDataplexV1DataProfileResultProfileFieldProfileInfoDoubleFie buildCounterGoogleCloudDataplexV1DataProfileResultProfileFieldProfileInfoDoubleFieldInfo--; } -core.List buildUnnamed21() => [ +core.List buildUnnamed23() => [ 'foo', 'foo', ]; -void checkUnnamed21(core.List o) { +void checkUnnamed23(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1996,7 +2247,7 @@ api.GoogleCloudDataplexV1DataProfileResultProfileFieldProfileInfoIntegerFieldInf o.average = 42.0; o.max = 'foo'; o.min = 'foo'; - o.quartiles = buildUnnamed21(); + o.quartiles = buildUnnamed23(); o.standardDeviation = 42.0; } buildCounterGoogleCloudDataplexV1DataProfileResultProfileFieldProfileInfoIntegerFieldInfo--; @@ -2021,7 +2272,7 @@ void checkGoogleCloudDataplexV1DataProfileResultProfileFieldProfileInfoIntegerFi o.min!, unittest.equals('foo'), ); - checkUnnamed21(o.quartiles!); + checkUnnamed23(o.quartiles!); unittest.expect( o.standardDeviation!, unittest.equals(42.0), @@ -2199,12 +2450,12 @@ void checkGoogleCloudDataplexV1DataProfileSpecPostScanActionsBigQueryExport( buildCounterGoogleCloudDataplexV1DataProfileSpecPostScanActionsBigQueryExport--; } -core.List buildUnnamed22() => [ +core.List buildUnnamed24() => [ 'foo', 'foo', ]; -void checkUnnamed22(core.List o) { +void checkUnnamed24(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2222,7 +2473,7 @@ api.GoogleCloudDataplexV1DataProfileSpecSelectedFields final o = api.GoogleCloudDataplexV1DataProfileSpecSelectedFields(); buildCounterGoogleCloudDataplexV1DataProfileSpecSelectedFields++; if (buildCounterGoogleCloudDataplexV1DataProfileSpecSelectedFields < 3) { - o.fieldNames = buildUnnamed22(); + o.fieldNames = buildUnnamed24(); } buildCounterGoogleCloudDataplexV1DataProfileSpecSelectedFields--; return o; @@ -2232,7 +2483,7 @@ void checkGoogleCloudDataplexV1DataProfileSpecSelectedFields( api.GoogleCloudDataplexV1DataProfileSpecSelectedFields o) { buildCounterGoogleCloudDataplexV1DataProfileSpecSelectedFields++; if (buildCounterGoogleCloudDataplexV1DataProfileSpecSelectedFields < 3) { - checkUnnamed22(o.fieldNames!); + checkUnnamed24(o.fieldNames!); } buildCounterGoogleCloudDataplexV1DataProfileSpecSelectedFields--; } @@ -2318,13 +2569,13 @@ void checkGoogleCloudDataplexV1DataQualityDimensionResult( buildCounterGoogleCloudDataplexV1DataQualityDimensionResult--; } -core.List buildUnnamed23() => +core.List buildUnnamed25() => [ buildGoogleCloudDataplexV1DataQualityColumnResult(), buildGoogleCloudDataplexV1DataQualityColumnResult(), ]; -void checkUnnamed23( +void checkUnnamed25( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1DataQualityColumnResult(o[0]); @@ -2332,24 +2583,24 @@ void checkUnnamed23( } core.List - buildUnnamed24() => [ + buildUnnamed26() => [ buildGoogleCloudDataplexV1DataQualityDimensionResult(), buildGoogleCloudDataplexV1DataQualityDimensionResult(), ]; -void checkUnnamed24( +void checkUnnamed26( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1DataQualityDimensionResult(o[0]); checkGoogleCloudDataplexV1DataQualityDimensionResult(o[1]); } -core.List buildUnnamed25() => [ +core.List buildUnnamed27() => [ buildGoogleCloudDataplexV1DataQualityRuleResult(), buildGoogleCloudDataplexV1DataQualityRuleResult(), ]; -void checkUnnamed25( +void checkUnnamed27( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1DataQualityRuleResult(o[0]); @@ -2362,13 +2613,13 @@ api.GoogleCloudDataplexV1DataQualityResult final o = api.GoogleCloudDataplexV1DataQualityResult(); buildCounterGoogleCloudDataplexV1DataQualityResult++; if (buildCounterGoogleCloudDataplexV1DataQualityResult < 3) { - o.columns = buildUnnamed23(); - o.dimensions = buildUnnamed24(); + o.columns = buildUnnamed25(); + o.dimensions = buildUnnamed26(); o.passed = true; o.postScanActionsResult = buildGoogleCloudDataplexV1DataQualityResultPostScanActionsResult(); o.rowCount = 'foo'; - o.rules = buildUnnamed25(); + o.rules = buildUnnamed27(); o.scannedData = buildGoogleCloudDataplexV1ScannedData(); o.score = 42.0; } @@ -2380,8 +2631,8 @@ void checkGoogleCloudDataplexV1DataQualityResult( api.GoogleCloudDataplexV1DataQualityResult o) { buildCounterGoogleCloudDataplexV1DataQualityResult++; if (buildCounterGoogleCloudDataplexV1DataQualityResult < 3) { - checkUnnamed23(o.columns!); - checkUnnamed24(o.dimensions!); + checkUnnamed25(o.columns!); + checkUnnamed26(o.dimensions!); unittest.expect(o.passed!, unittest.isTrue); checkGoogleCloudDataplexV1DataQualityResultPostScanActionsResult( o.postScanActionsResult!); @@ -2389,7 +2640,7 @@ void checkGoogleCloudDataplexV1DataQualityResult( o.rowCount!, unittest.equals('foo'), ); - checkUnnamed25(o.rules!); + checkUnnamed27(o.rules!); checkGoogleCloudDataplexV1ScannedData(o.scannedData!); unittest.expect( o.score!, @@ -2695,12 +2946,12 @@ void checkGoogleCloudDataplexV1DataQualityRuleRowConditionExpectation( buildCounterGoogleCloudDataplexV1DataQualityRuleRowConditionExpectation--; } -core.List buildUnnamed26() => [ +core.List buildUnnamed28() => [ 'foo', 'foo', ]; -void checkUnnamed26(core.List o) { +void checkUnnamed28(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2718,7 +2969,7 @@ api.GoogleCloudDataplexV1DataQualityRuleSetExpectation final o = api.GoogleCloudDataplexV1DataQualityRuleSetExpectation(); buildCounterGoogleCloudDataplexV1DataQualityRuleSetExpectation++; if (buildCounterGoogleCloudDataplexV1DataQualityRuleSetExpectation < 3) { - o.values = buildUnnamed26(); + o.values = buildUnnamed28(); } buildCounterGoogleCloudDataplexV1DataQualityRuleSetExpectation--; return o; @@ -2728,7 +2979,7 @@ void checkGoogleCloudDataplexV1DataQualityRuleSetExpectation( api.GoogleCloudDataplexV1DataQualityRuleSetExpectation o) { buildCounterGoogleCloudDataplexV1DataQualityRuleSetExpectation++; if (buildCounterGoogleCloudDataplexV1DataQualityRuleSetExpectation < 3) { - checkUnnamed26(o.values!); + checkUnnamed28(o.values!); } buildCounterGoogleCloudDataplexV1DataQualityRuleSetExpectation--; } @@ -2847,12 +3098,12 @@ void checkGoogleCloudDataplexV1DataQualityRuleUniquenessExpectation( buildCounterGoogleCloudDataplexV1DataQualityRuleUniquenessExpectation--; } -core.List buildUnnamed27() => [ +core.List buildUnnamed29() => [ buildGoogleCloudDataplexV1DataQualityRule(), buildGoogleCloudDataplexV1DataQualityRule(), ]; -void checkUnnamed27(core.List o) { +void checkUnnamed29(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1DataQualityRule(o[0]); checkGoogleCloudDataplexV1DataQualityRule(o[1]); @@ -2867,7 +3118,7 @@ api.GoogleCloudDataplexV1DataQualitySpec o.postScanActions = buildGoogleCloudDataplexV1DataQualitySpecPostScanActions(); o.rowFilter = 'foo'; - o.rules = buildUnnamed27(); + o.rules = buildUnnamed29(); o.samplingPercent = 42.0; } buildCounterGoogleCloudDataplexV1DataQualitySpec--; @@ -2884,7 +3135,7 @@ void checkGoogleCloudDataplexV1DataQualitySpec( o.rowFilter!, unittest.equals('foo'), ); - checkUnnamed27(o.rules!); + checkUnnamed29(o.rules!); unittest.expect( o.samplingPercent!, unittest.equals(42.0), @@ -3035,12 +3286,12 @@ void checkGoogleCloudDataplexV1DataQualitySpecPostScanActionsNotificationReport( buildCounterGoogleCloudDataplexV1DataQualitySpecPostScanActionsNotificationReport--; } -core.List buildUnnamed28() => [ +core.List buildUnnamed30() => [ 'foo', 'foo', ]; -void checkUnnamed28(core.List o) { +void checkUnnamed30(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3061,7 +3312,7 @@ api.GoogleCloudDataplexV1DataQualitySpecPostScanActionsRecipients buildCounterGoogleCloudDataplexV1DataQualitySpecPostScanActionsRecipients++; if (buildCounterGoogleCloudDataplexV1DataQualitySpecPostScanActionsRecipients < 3) { - o.emails = buildUnnamed28(); + o.emails = buildUnnamed30(); } buildCounterGoogleCloudDataplexV1DataQualitySpecPostScanActionsRecipients--; return o; @@ -3072,7 +3323,7 @@ void checkGoogleCloudDataplexV1DataQualitySpecPostScanActionsRecipients( buildCounterGoogleCloudDataplexV1DataQualitySpecPostScanActionsRecipients++; if (buildCounterGoogleCloudDataplexV1DataQualitySpecPostScanActionsRecipients < 3) { - checkUnnamed28(o.emails!); + checkUnnamed30(o.emails!); } buildCounterGoogleCloudDataplexV1DataQualitySpecPostScanActionsRecipients--; } @@ -3107,12 +3358,12 @@ void checkGoogleCloudDataplexV1DataQualitySpecPostScanActionsScoreThresholdTrigg buildCounterGoogleCloudDataplexV1DataQualitySpecPostScanActionsScoreThresholdTrigger--; } -core.Map buildUnnamed29() => { +core.Map buildUnnamed31() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed29(core.Map o) { +void checkUnnamed31(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3131,6 +3382,8 @@ api.GoogleCloudDataplexV1DataScan buildGoogleCloudDataplexV1DataScan() { if (buildCounterGoogleCloudDataplexV1DataScan < 3) { o.createTime = 'foo'; o.data = buildGoogleCloudDataplexV1DataSource(); + o.dataDiscoveryResult = buildGoogleCloudDataplexV1DataDiscoveryResult(); + o.dataDiscoverySpec = buildGoogleCloudDataplexV1DataDiscoverySpec(); o.dataProfileResult = buildGoogleCloudDataplexV1DataProfileResult(); o.dataProfileSpec = buildGoogleCloudDataplexV1DataProfileSpec(); o.dataQualityResult = buildGoogleCloudDataplexV1DataQualityResult(); @@ -3139,7 +3392,7 @@ api.GoogleCloudDataplexV1DataScan buildGoogleCloudDataplexV1DataScan() { o.displayName = 'foo'; o.executionSpec = buildGoogleCloudDataplexV1DataScanExecutionSpec(); o.executionStatus = buildGoogleCloudDataplexV1DataScanExecutionStatus(); - o.labels = buildUnnamed29(); + o.labels = buildUnnamed31(); o.name = 'foo'; o.state = 'foo'; o.type = 'foo'; @@ -3158,6 +3411,8 @@ void checkGoogleCloudDataplexV1DataScan(api.GoogleCloudDataplexV1DataScan o) { unittest.equals('foo'), ); checkGoogleCloudDataplexV1DataSource(o.data!); + checkGoogleCloudDataplexV1DataDiscoveryResult(o.dataDiscoveryResult!); + checkGoogleCloudDataplexV1DataDiscoverySpec(o.dataDiscoverySpec!); checkGoogleCloudDataplexV1DataProfileResult(o.dataProfileResult!); checkGoogleCloudDataplexV1DataProfileSpec(o.dataProfileSpec!); checkGoogleCloudDataplexV1DataQualityResult(o.dataQualityResult!); @@ -3172,7 +3427,7 @@ void checkGoogleCloudDataplexV1DataScan(api.GoogleCloudDataplexV1DataScan o) { ); checkGoogleCloudDataplexV1DataScanExecutionSpec(o.executionSpec!); checkGoogleCloudDataplexV1DataScanExecutionStatus(o.executionStatus!); - checkUnnamed29(o.labels!); + checkUnnamed31(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -3263,6 +3518,8 @@ api.GoogleCloudDataplexV1DataScanJob buildGoogleCloudDataplexV1DataScanJob() { buildCounterGoogleCloudDataplexV1DataScanJob++; if (buildCounterGoogleCloudDataplexV1DataScanJob < 3) { o.createTime = 'foo'; + o.dataDiscoveryResult = buildGoogleCloudDataplexV1DataDiscoveryResult(); + o.dataDiscoverySpec = buildGoogleCloudDataplexV1DataDiscoverySpec(); o.dataProfileResult = buildGoogleCloudDataplexV1DataProfileResult(); o.dataProfileSpec = buildGoogleCloudDataplexV1DataProfileSpec(); o.dataQualityResult = buildGoogleCloudDataplexV1DataQualityResult(); @@ -3287,6 +3544,8 @@ void checkGoogleCloudDataplexV1DataScanJob( o.createTime!, unittest.equals('foo'), ); + checkGoogleCloudDataplexV1DataDiscoveryResult(o.dataDiscoveryResult!); + checkGoogleCloudDataplexV1DataDiscoverySpec(o.dataDiscoverySpec!); checkGoogleCloudDataplexV1DataProfileResult(o.dataProfileResult!); checkGoogleCloudDataplexV1DataProfileSpec(o.dataProfileSpec!); checkGoogleCloudDataplexV1DataQualityResult(o.dataQualityResult!); @@ -3351,12 +3610,12 @@ void checkGoogleCloudDataplexV1DataSource( buildCounterGoogleCloudDataplexV1DataSource--; } -core.Map buildUnnamed30() => { +core.Map buildUnnamed32() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed30(core.Map o) { +void checkUnnamed32(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3379,7 +3638,7 @@ api.GoogleCloudDataplexV1DataTaxonomy buildGoogleCloudDataplexV1DataTaxonomy() { o.description = 'foo'; o.displayName = 'foo'; o.etag = 'foo'; - o.labels = buildUnnamed30(); + o.labels = buildUnnamed32(); o.name = 'foo'; o.uid = 'foo'; o.updateTime = 'foo'; @@ -3416,7 +3675,7 @@ void checkGoogleCloudDataplexV1DataTaxonomy( o.etag!, unittest.equals('foo'), ); - checkUnnamed30(o.labels!); + checkUnnamed32(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -3584,12 +3843,12 @@ void checkGoogleCloudDataplexV1EntityCompatibilityStatusCompatibility( buildCounterGoogleCloudDataplexV1EntityCompatibilityStatusCompatibility--; } -core.Map buildUnnamed31() => { +core.Map buildUnnamed33() => { 'x': buildGoogleCloudDataplexV1Aspect(), 'y': buildGoogleCloudDataplexV1Aspect(), }; -void checkUnnamed31(core.Map o) { +void checkUnnamed33(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1Aspect(o['x']!); checkGoogleCloudDataplexV1Aspect(o['y']!); @@ -3600,7 +3859,7 @@ api.GoogleCloudDataplexV1Entry buildGoogleCloudDataplexV1Entry() { final o = api.GoogleCloudDataplexV1Entry(); buildCounterGoogleCloudDataplexV1Entry++; if (buildCounterGoogleCloudDataplexV1Entry < 3) { - o.aspects = buildUnnamed31(); + o.aspects = buildUnnamed33(); o.createTime = 'foo'; o.entrySource = buildGoogleCloudDataplexV1EntrySource(); o.entryType = 'foo'; @@ -3616,7 +3875,7 @@ api.GoogleCloudDataplexV1Entry buildGoogleCloudDataplexV1Entry() { void checkGoogleCloudDataplexV1Entry(api.GoogleCloudDataplexV1Entry o) { buildCounterGoogleCloudDataplexV1Entry++; if (buildCounterGoogleCloudDataplexV1Entry < 3) { - checkUnnamed31(o.aspects!); + checkUnnamed33(o.aspects!); unittest.expect( o.createTime!, unittest.equals('foo'), @@ -3646,12 +3905,12 @@ void checkGoogleCloudDataplexV1Entry(api.GoogleCloudDataplexV1Entry o) { buildCounterGoogleCloudDataplexV1Entry--; } -core.Map buildUnnamed32() => { +core.Map buildUnnamed34() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed32(core.Map o) { +void checkUnnamed34(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3672,7 +3931,7 @@ api.GoogleCloudDataplexV1EntryGroup buildGoogleCloudDataplexV1EntryGroup() { o.description = 'foo'; o.displayName = 'foo'; o.etag = 'foo'; - o.labels = buildUnnamed32(); + o.labels = buildUnnamed34(); o.name = 'foo'; o.transferStatus = 'foo'; o.uid = 'foo'; @@ -3702,7 +3961,7 @@ void checkGoogleCloudDataplexV1EntryGroup( o.etag!, unittest.equals('foo'), ); - checkUnnamed32(o.labels!); + checkUnnamed34(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -3723,23 +3982,23 @@ void checkGoogleCloudDataplexV1EntryGroup( buildCounterGoogleCloudDataplexV1EntryGroup--; } -core.List buildUnnamed33() => [ +core.List buildUnnamed35() => [ buildGoogleCloudDataplexV1EntrySourceAncestor(), buildGoogleCloudDataplexV1EntrySourceAncestor(), ]; -void checkUnnamed33(core.List o) { +void checkUnnamed35(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1EntrySourceAncestor(o[0]); checkGoogleCloudDataplexV1EntrySourceAncestor(o[1]); } -core.Map buildUnnamed34() => { +core.Map buildUnnamed36() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed34(core.Map o) { +void checkUnnamed36(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3756,11 +4015,11 @@ api.GoogleCloudDataplexV1EntrySource buildGoogleCloudDataplexV1EntrySource() { final o = api.GoogleCloudDataplexV1EntrySource(); buildCounterGoogleCloudDataplexV1EntrySource++; if (buildCounterGoogleCloudDataplexV1EntrySource < 3) { - o.ancestors = buildUnnamed33(); + o.ancestors = buildUnnamed35(); o.createTime = 'foo'; o.description = 'foo'; o.displayName = 'foo'; - o.labels = buildUnnamed34(); + o.labels = buildUnnamed36(); o.location = 'foo'; o.platform = 'foo'; o.resource = 'foo'; @@ -3775,7 +4034,7 @@ void checkGoogleCloudDataplexV1EntrySource( api.GoogleCloudDataplexV1EntrySource o) { buildCounterGoogleCloudDataplexV1EntrySource++; if (buildCounterGoogleCloudDataplexV1EntrySource < 3) { - checkUnnamed33(o.ancestors!); + checkUnnamed35(o.ancestors!); unittest.expect( o.createTime!, unittest.equals('foo'), @@ -3788,7 +4047,7 @@ void checkGoogleCloudDataplexV1EntrySource( o.displayName!, unittest.equals('foo'), ); - checkUnnamed34(o.labels!); + checkUnnamed36(o.labels!); unittest.expect( o.location!, unittest.equals('foo'), @@ -3842,12 +4101,12 @@ void checkGoogleCloudDataplexV1EntrySourceAncestor( buildCounterGoogleCloudDataplexV1EntrySourceAncestor--; } -core.Map buildUnnamed35() => { +core.Map buildUnnamed37() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed35(core.Map o) { +void checkUnnamed37(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3859,23 +4118,23 @@ void checkUnnamed35(core.Map o) { ); } -core.List buildUnnamed36() => [ +core.List buildUnnamed38() => [ buildGoogleCloudDataplexV1EntryTypeAspectInfo(), buildGoogleCloudDataplexV1EntryTypeAspectInfo(), ]; -void checkUnnamed36(core.List o) { +void checkUnnamed38(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1EntryTypeAspectInfo(o[0]); checkGoogleCloudDataplexV1EntryTypeAspectInfo(o[1]); } -core.List buildUnnamed37() => [ +core.List buildUnnamed39() => [ 'foo', 'foo', ]; -void checkUnnamed37(core.List o) { +void checkUnnamed39(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3897,12 +4156,12 @@ api.GoogleCloudDataplexV1EntryType buildGoogleCloudDataplexV1EntryType() { o.description = 'foo'; o.displayName = 'foo'; o.etag = 'foo'; - o.labels = buildUnnamed35(); + o.labels = buildUnnamed37(); o.name = 'foo'; o.platform = 'foo'; - o.requiredAspects = buildUnnamed36(); + o.requiredAspects = buildUnnamed38(); o.system = 'foo'; - o.typeAliases = buildUnnamed37(); + o.typeAliases = buildUnnamed39(); o.uid = 'foo'; o.updateTime = 'foo'; } @@ -3930,7 +4189,7 @@ void checkGoogleCloudDataplexV1EntryType(api.GoogleCloudDataplexV1EntryType o) { o.etag!, unittest.equals('foo'), ); - checkUnnamed35(o.labels!); + checkUnnamed37(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -3939,12 +4198,12 @@ void checkGoogleCloudDataplexV1EntryType(api.GoogleCloudDataplexV1EntryType o) { o.platform!, unittest.equals('foo'), ); - checkUnnamed36(o.requiredAspects!); + checkUnnamed38(o.requiredAspects!); unittest.expect( o.system!, unittest.equals('foo'), ); - checkUnnamed37(o.typeAliases!); + checkUnnamed39(o.typeAliases!); unittest.expect( o.uid!, unittest.equals('foo'), @@ -4005,12 +4264,12 @@ void checkGoogleCloudDataplexV1EntryTypeAuthorization( buildCounterGoogleCloudDataplexV1EntryTypeAuthorization--; } -core.Map buildUnnamed38() => { +core.Map buildUnnamed40() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed38(core.Map o) { +void checkUnnamed40(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -4033,7 +4292,7 @@ api.GoogleCloudDataplexV1Environment buildGoogleCloudDataplexV1Environment() { o.endpoints = buildGoogleCloudDataplexV1EnvironmentEndpoints(); o.infrastructureSpec = buildGoogleCloudDataplexV1EnvironmentInfrastructureSpec(); - o.labels = buildUnnamed38(); + o.labels = buildUnnamed40(); o.name = 'foo'; o.sessionSpec = buildGoogleCloudDataplexV1EnvironmentSessionSpec(); o.sessionStatus = buildGoogleCloudDataplexV1EnvironmentSessionStatus(); @@ -4064,7 +4323,7 @@ void checkGoogleCloudDataplexV1Environment( checkGoogleCloudDataplexV1EnvironmentEndpoints(o.endpoints!); checkGoogleCloudDataplexV1EnvironmentInfrastructureSpec( o.infrastructureSpec!); - checkUnnamed38(o.labels!); + checkUnnamed40(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -4182,12 +4441,12 @@ void checkGoogleCloudDataplexV1EnvironmentInfrastructureSpecComputeResources( buildCounterGoogleCloudDataplexV1EnvironmentInfrastructureSpecComputeResources--; } -core.List buildUnnamed39() => [ +core.List buildUnnamed41() => [ 'foo', 'foo', ]; -void checkUnnamed39(core.List o) { +void checkUnnamed41(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4199,12 +4458,12 @@ void checkUnnamed39(core.List o) { ); } -core.Map buildUnnamed40() => { +core.Map buildUnnamed42() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed40(core.Map o) { +void checkUnnamed42(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -4216,12 +4475,12 @@ void checkUnnamed40(core.Map o) { ); } -core.List buildUnnamed41() => [ +core.List buildUnnamed43() => [ 'foo', 'foo', ]; -void checkUnnamed41(core.List o) { +void checkUnnamed43(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4244,9 +4503,9 @@ api.GoogleCloudDataplexV1EnvironmentInfrastructureSpecOsImageRuntime if (buildCounterGoogleCloudDataplexV1EnvironmentInfrastructureSpecOsImageRuntime < 3) { o.imageVersion = 'foo'; - o.javaLibraries = buildUnnamed39(); - o.properties = buildUnnamed40(); - o.pythonPackages = buildUnnamed41(); + o.javaLibraries = buildUnnamed41(); + o.properties = buildUnnamed42(); + o.pythonPackages = buildUnnamed43(); } buildCounterGoogleCloudDataplexV1EnvironmentInfrastructureSpecOsImageRuntime--; return o; @@ -4261,9 +4520,9 @@ void checkGoogleCloudDataplexV1EnvironmentInfrastructureSpecOsImageRuntime( o.imageVersion!, unittest.equals('foo'), ); - checkUnnamed39(o.javaLibraries!); - checkUnnamed40(o.properties!); - checkUnnamed41(o.pythonPackages!); + checkUnnamed41(o.javaLibraries!); + checkUnnamed42(o.properties!); + checkUnnamed43(o.pythonPackages!); } buildCounterGoogleCloudDataplexV1EnvironmentInfrastructureSpecOsImageRuntime--; } @@ -4332,12 +4591,12 @@ void checkGoogleCloudDataplexV1GenerateDataQualityRulesRequest( buildCounterGoogleCloudDataplexV1GenerateDataQualityRulesRequest--; } -core.List buildUnnamed42() => [ +core.List buildUnnamed44() => [ buildGoogleCloudDataplexV1DataQualityRule(), buildGoogleCloudDataplexV1DataQualityRule(), ]; -void checkUnnamed42(core.List o) { +void checkUnnamed44(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1DataQualityRule(o[0]); checkGoogleCloudDataplexV1DataQualityRule(o[1]); @@ -4349,7 +4608,7 @@ api.GoogleCloudDataplexV1GenerateDataQualityRulesResponse final o = api.GoogleCloudDataplexV1GenerateDataQualityRulesResponse(); buildCounterGoogleCloudDataplexV1GenerateDataQualityRulesResponse++; if (buildCounterGoogleCloudDataplexV1GenerateDataQualityRulesResponse < 3) { - o.rule = buildUnnamed42(); + o.rule = buildUnnamed44(); } buildCounterGoogleCloudDataplexV1GenerateDataQualityRulesResponse--; return o; @@ -4359,17 +4618,17 @@ void checkGoogleCloudDataplexV1GenerateDataQualityRulesResponse( api.GoogleCloudDataplexV1GenerateDataQualityRulesResponse o) { buildCounterGoogleCloudDataplexV1GenerateDataQualityRulesResponse++; if (buildCounterGoogleCloudDataplexV1GenerateDataQualityRulesResponse < 3) { - checkUnnamed42(o.rule!); + checkUnnamed44(o.rule!); } buildCounterGoogleCloudDataplexV1GenerateDataQualityRulesResponse--; } -core.Map buildUnnamed43() => { +core.Map buildUnnamed45() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed43(core.Map o) { +void checkUnnamed45(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -4388,7 +4647,7 @@ api.GoogleCloudDataplexV1Job buildGoogleCloudDataplexV1Job() { if (buildCounterGoogleCloudDataplexV1Job < 3) { o.endTime = 'foo'; o.executionSpec = buildGoogleCloudDataplexV1TaskExecutionSpec(); - o.labels = buildUnnamed43(); + o.labels = buildUnnamed45(); o.message = 'foo'; o.name = 'foo'; o.retryCount = 42; @@ -4411,7 +4670,7 @@ void checkGoogleCloudDataplexV1Job(api.GoogleCloudDataplexV1Job o) { unittest.equals('foo'), ); checkGoogleCloudDataplexV1TaskExecutionSpec(o.executionSpec!); - checkUnnamed43(o.labels!); + checkUnnamed45(o.labels!); unittest.expect( o.message!, unittest.equals('foo'), @@ -4452,12 +4711,12 @@ void checkGoogleCloudDataplexV1Job(api.GoogleCloudDataplexV1Job o) { buildCounterGoogleCloudDataplexV1Job--; } -core.Map buildUnnamed44() => { +core.Map buildUnnamed46() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed44(core.Map o) { +void checkUnnamed46(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -4478,7 +4737,7 @@ api.GoogleCloudDataplexV1Lake buildGoogleCloudDataplexV1Lake() { o.createTime = 'foo'; o.description = 'foo'; o.displayName = 'foo'; - o.labels = buildUnnamed44(); + o.labels = buildUnnamed46(); o.metastore = buildGoogleCloudDataplexV1LakeMetastore(); o.metastoreStatus = buildGoogleCloudDataplexV1LakeMetastoreStatus(); o.name = 'foo'; @@ -4507,7 +4766,7 @@ void checkGoogleCloudDataplexV1Lake(api.GoogleCloudDataplexV1Lake o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed44(o.labels!); + checkUnnamed46(o.labels!); checkGoogleCloudDataplexV1LakeMetastore(o.metastore!); checkGoogleCloudDataplexV1LakeMetastoreStatus(o.metastoreStatus!); unittest.expect( @@ -4597,12 +4856,12 @@ void checkGoogleCloudDataplexV1LakeMetastoreStatus( buildCounterGoogleCloudDataplexV1LakeMetastoreStatus--; } -core.List buildUnnamed45() => [ +core.List buildUnnamed47() => [ buildGoogleCloudDataplexV1Action(), buildGoogleCloudDataplexV1Action(), ]; -void checkUnnamed45(core.List o) { +void checkUnnamed47(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1Action(o[0]); checkGoogleCloudDataplexV1Action(o[1]); @@ -4614,7 +4873,7 @@ api.GoogleCloudDataplexV1ListActionsResponse final o = api.GoogleCloudDataplexV1ListActionsResponse(); buildCounterGoogleCloudDataplexV1ListActionsResponse++; if (buildCounterGoogleCloudDataplexV1ListActionsResponse < 3) { - o.actions = buildUnnamed45(); + o.actions = buildUnnamed47(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudDataplexV1ListActionsResponse--; @@ -4625,7 +4884,7 @@ void checkGoogleCloudDataplexV1ListActionsResponse( api.GoogleCloudDataplexV1ListActionsResponse o) { buildCounterGoogleCloudDataplexV1ListActionsResponse++; if (buildCounterGoogleCloudDataplexV1ListActionsResponse < 3) { - checkUnnamed45(o.actions!); + checkUnnamed47(o.actions!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -4634,23 +4893,23 @@ void checkGoogleCloudDataplexV1ListActionsResponse( buildCounterGoogleCloudDataplexV1ListActionsResponse--; } -core.List buildUnnamed46() => [ +core.List buildUnnamed48() => [ buildGoogleCloudDataplexV1AspectType(), buildGoogleCloudDataplexV1AspectType(), ]; -void checkUnnamed46(core.List o) { +void checkUnnamed48(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1AspectType(o[0]); checkGoogleCloudDataplexV1AspectType(o[1]); } -core.List buildUnnamed47() => [ +core.List buildUnnamed49() => [ 'foo', 'foo', ]; -void checkUnnamed47(core.List o) { +void checkUnnamed49(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4668,9 +4927,9 @@ api.GoogleCloudDataplexV1ListAspectTypesResponse final o = api.GoogleCloudDataplexV1ListAspectTypesResponse(); buildCounterGoogleCloudDataplexV1ListAspectTypesResponse++; if (buildCounterGoogleCloudDataplexV1ListAspectTypesResponse < 3) { - o.aspectTypes = buildUnnamed46(); + o.aspectTypes = buildUnnamed48(); o.nextPageToken = 'foo'; - o.unreachableLocations = buildUnnamed47(); + o.unreachableLocations = buildUnnamed49(); } buildCounterGoogleCloudDataplexV1ListAspectTypesResponse--; return o; @@ -4680,22 +4939,22 @@ void checkGoogleCloudDataplexV1ListAspectTypesResponse( api.GoogleCloudDataplexV1ListAspectTypesResponse o) { buildCounterGoogleCloudDataplexV1ListAspectTypesResponse++; if (buildCounterGoogleCloudDataplexV1ListAspectTypesResponse < 3) { - checkUnnamed46(o.aspectTypes!); + checkUnnamed48(o.aspectTypes!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed47(o.unreachableLocations!); + checkUnnamed49(o.unreachableLocations!); } buildCounterGoogleCloudDataplexV1ListAspectTypesResponse--; } -core.List buildUnnamed48() => [ +core.List buildUnnamed50() => [ buildGoogleCloudDataplexV1Asset(), buildGoogleCloudDataplexV1Asset(), ]; -void checkUnnamed48(core.List o) { +void checkUnnamed50(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1Asset(o[0]); checkGoogleCloudDataplexV1Asset(o[1]); @@ -4707,7 +4966,7 @@ api.GoogleCloudDataplexV1ListAssetsResponse final o = api.GoogleCloudDataplexV1ListAssetsResponse(); buildCounterGoogleCloudDataplexV1ListAssetsResponse++; if (buildCounterGoogleCloudDataplexV1ListAssetsResponse < 3) { - o.assets = buildUnnamed48(); + o.assets = buildUnnamed50(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudDataplexV1ListAssetsResponse--; @@ -4718,7 +4977,7 @@ void checkGoogleCloudDataplexV1ListAssetsResponse( api.GoogleCloudDataplexV1ListAssetsResponse o) { buildCounterGoogleCloudDataplexV1ListAssetsResponse++; if (buildCounterGoogleCloudDataplexV1ListAssetsResponse < 3) { - checkUnnamed48(o.assets!); + checkUnnamed50(o.assets!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -4727,12 +4986,12 @@ void checkGoogleCloudDataplexV1ListAssetsResponse( buildCounterGoogleCloudDataplexV1ListAssetsResponse--; } -core.List buildUnnamed49() => [ +core.List buildUnnamed51() => [ buildGoogleCloudDataplexV1Content(), buildGoogleCloudDataplexV1Content(), ]; -void checkUnnamed49(core.List o) { +void checkUnnamed51(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1Content(o[0]); checkGoogleCloudDataplexV1Content(o[1]); @@ -4744,7 +5003,7 @@ api.GoogleCloudDataplexV1ListContentResponse final o = api.GoogleCloudDataplexV1ListContentResponse(); buildCounterGoogleCloudDataplexV1ListContentResponse++; if (buildCounterGoogleCloudDataplexV1ListContentResponse < 3) { - o.content = buildUnnamed49(); + o.content = buildUnnamed51(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudDataplexV1ListContentResponse--; @@ -4755,7 +5014,7 @@ void checkGoogleCloudDataplexV1ListContentResponse( api.GoogleCloudDataplexV1ListContentResponse o) { buildCounterGoogleCloudDataplexV1ListContentResponse++; if (buildCounterGoogleCloudDataplexV1ListContentResponse < 3) { - checkUnnamed49(o.content!); + checkUnnamed51(o.content!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -4764,24 +5023,24 @@ void checkGoogleCloudDataplexV1ListContentResponse( buildCounterGoogleCloudDataplexV1ListContentResponse--; } -core.List buildUnnamed50() => [ +core.List buildUnnamed52() => [ buildGoogleCloudDataplexV1DataAttributeBinding(), buildGoogleCloudDataplexV1DataAttributeBinding(), ]; -void checkUnnamed50( +void checkUnnamed52( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1DataAttributeBinding(o[0]); checkGoogleCloudDataplexV1DataAttributeBinding(o[1]); } -core.List buildUnnamed51() => [ +core.List buildUnnamed53() => [ 'foo', 'foo', ]; -void checkUnnamed51(core.List o) { +void checkUnnamed53(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4799,9 +5058,9 @@ api.GoogleCloudDataplexV1ListDataAttributeBindingsResponse final o = api.GoogleCloudDataplexV1ListDataAttributeBindingsResponse(); buildCounterGoogleCloudDataplexV1ListDataAttributeBindingsResponse++; if (buildCounterGoogleCloudDataplexV1ListDataAttributeBindingsResponse < 3) { - o.dataAttributeBindings = buildUnnamed50(); + o.dataAttributeBindings = buildUnnamed52(); o.nextPageToken = 'foo'; - o.unreachableLocations = buildUnnamed51(); + o.unreachableLocations = buildUnnamed53(); } buildCounterGoogleCloudDataplexV1ListDataAttributeBindingsResponse--; return o; @@ -4811,33 +5070,33 @@ void checkGoogleCloudDataplexV1ListDataAttributeBindingsResponse( api.GoogleCloudDataplexV1ListDataAttributeBindingsResponse o) { buildCounterGoogleCloudDataplexV1ListDataAttributeBindingsResponse++; if (buildCounterGoogleCloudDataplexV1ListDataAttributeBindingsResponse < 3) { - checkUnnamed50(o.dataAttributeBindings!); + checkUnnamed52(o.dataAttributeBindings!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed51(o.unreachableLocations!); + checkUnnamed53(o.unreachableLocations!); } buildCounterGoogleCloudDataplexV1ListDataAttributeBindingsResponse--; } -core.List buildUnnamed52() => [ +core.List buildUnnamed54() => [ buildGoogleCloudDataplexV1DataAttribute(), buildGoogleCloudDataplexV1DataAttribute(), ]; -void checkUnnamed52(core.List o) { +void checkUnnamed54(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1DataAttribute(o[0]); checkGoogleCloudDataplexV1DataAttribute(o[1]); } -core.List buildUnnamed53() => [ +core.List buildUnnamed55() => [ 'foo', 'foo', ]; -void checkUnnamed53(core.List o) { +void checkUnnamed55(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4855,9 +5114,9 @@ api.GoogleCloudDataplexV1ListDataAttributesResponse final o = api.GoogleCloudDataplexV1ListDataAttributesResponse(); buildCounterGoogleCloudDataplexV1ListDataAttributesResponse++; if (buildCounterGoogleCloudDataplexV1ListDataAttributesResponse < 3) { - o.dataAttributes = buildUnnamed52(); + o.dataAttributes = buildUnnamed54(); o.nextPageToken = 'foo'; - o.unreachableLocations = buildUnnamed53(); + o.unreachableLocations = buildUnnamed55(); } buildCounterGoogleCloudDataplexV1ListDataAttributesResponse--; return o; @@ -4867,22 +5126,22 @@ void checkGoogleCloudDataplexV1ListDataAttributesResponse( api.GoogleCloudDataplexV1ListDataAttributesResponse o) { buildCounterGoogleCloudDataplexV1ListDataAttributesResponse++; if (buildCounterGoogleCloudDataplexV1ListDataAttributesResponse < 3) { - checkUnnamed52(o.dataAttributes!); + checkUnnamed54(o.dataAttributes!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed53(o.unreachableLocations!); + checkUnnamed55(o.unreachableLocations!); } buildCounterGoogleCloudDataplexV1ListDataAttributesResponse--; } -core.List buildUnnamed54() => [ +core.List buildUnnamed56() => [ buildGoogleCloudDataplexV1DataScanJob(), buildGoogleCloudDataplexV1DataScanJob(), ]; -void checkUnnamed54(core.List o) { +void checkUnnamed56(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1DataScanJob(o[0]); checkGoogleCloudDataplexV1DataScanJob(o[1]); @@ -4894,7 +5153,7 @@ api.GoogleCloudDataplexV1ListDataScanJobsResponse final o = api.GoogleCloudDataplexV1ListDataScanJobsResponse(); buildCounterGoogleCloudDataplexV1ListDataScanJobsResponse++; if (buildCounterGoogleCloudDataplexV1ListDataScanJobsResponse < 3) { - o.dataScanJobs = buildUnnamed54(); + o.dataScanJobs = buildUnnamed56(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudDataplexV1ListDataScanJobsResponse--; @@ -4905,7 +5164,7 @@ void checkGoogleCloudDataplexV1ListDataScanJobsResponse( api.GoogleCloudDataplexV1ListDataScanJobsResponse o) { buildCounterGoogleCloudDataplexV1ListDataScanJobsResponse++; if (buildCounterGoogleCloudDataplexV1ListDataScanJobsResponse < 3) { - checkUnnamed54(o.dataScanJobs!); + checkUnnamed56(o.dataScanJobs!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -4914,23 +5173,23 @@ void checkGoogleCloudDataplexV1ListDataScanJobsResponse( buildCounterGoogleCloudDataplexV1ListDataScanJobsResponse--; } -core.List buildUnnamed55() => [ +core.List buildUnnamed57() => [ buildGoogleCloudDataplexV1DataScan(), buildGoogleCloudDataplexV1DataScan(), ]; -void checkUnnamed55(core.List o) { +void checkUnnamed57(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1DataScan(o[0]); checkGoogleCloudDataplexV1DataScan(o[1]); } -core.List buildUnnamed56() => [ +core.List buildUnnamed58() => [ 'foo', 'foo', ]; -void checkUnnamed56(core.List o) { +void checkUnnamed58(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4948,9 +5207,9 @@ api.GoogleCloudDataplexV1ListDataScansResponse final o = api.GoogleCloudDataplexV1ListDataScansResponse(); buildCounterGoogleCloudDataplexV1ListDataScansResponse++; if (buildCounterGoogleCloudDataplexV1ListDataScansResponse < 3) { - o.dataScans = buildUnnamed55(); + o.dataScans = buildUnnamed57(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed56(); + o.unreachable = buildUnnamed58(); } buildCounterGoogleCloudDataplexV1ListDataScansResponse--; return o; @@ -4960,33 +5219,33 @@ void checkGoogleCloudDataplexV1ListDataScansResponse( api.GoogleCloudDataplexV1ListDataScansResponse o) { buildCounterGoogleCloudDataplexV1ListDataScansResponse++; if (buildCounterGoogleCloudDataplexV1ListDataScansResponse < 3) { - checkUnnamed55(o.dataScans!); + checkUnnamed57(o.dataScans!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed56(o.unreachable!); + checkUnnamed58(o.unreachable!); } buildCounterGoogleCloudDataplexV1ListDataScansResponse--; } -core.List buildUnnamed57() => [ +core.List buildUnnamed59() => [ buildGoogleCloudDataplexV1DataTaxonomy(), buildGoogleCloudDataplexV1DataTaxonomy(), ]; -void checkUnnamed57(core.List o) { +void checkUnnamed59(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1DataTaxonomy(o[0]); checkGoogleCloudDataplexV1DataTaxonomy(o[1]); } -core.List buildUnnamed58() => [ +core.List buildUnnamed60() => [ 'foo', 'foo', ]; -void checkUnnamed58(core.List o) { +void checkUnnamed60(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5004,9 +5263,9 @@ api.GoogleCloudDataplexV1ListDataTaxonomiesResponse final o = api.GoogleCloudDataplexV1ListDataTaxonomiesResponse(); buildCounterGoogleCloudDataplexV1ListDataTaxonomiesResponse++; if (buildCounterGoogleCloudDataplexV1ListDataTaxonomiesResponse < 3) { - o.dataTaxonomies = buildUnnamed57(); + o.dataTaxonomies = buildUnnamed59(); o.nextPageToken = 'foo'; - o.unreachableLocations = buildUnnamed58(); + o.unreachableLocations = buildUnnamed60(); } buildCounterGoogleCloudDataplexV1ListDataTaxonomiesResponse--; return o; @@ -5016,22 +5275,22 @@ void checkGoogleCloudDataplexV1ListDataTaxonomiesResponse( api.GoogleCloudDataplexV1ListDataTaxonomiesResponse o) { buildCounterGoogleCloudDataplexV1ListDataTaxonomiesResponse++; if (buildCounterGoogleCloudDataplexV1ListDataTaxonomiesResponse < 3) { - checkUnnamed57(o.dataTaxonomies!); + checkUnnamed59(o.dataTaxonomies!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed58(o.unreachableLocations!); + checkUnnamed60(o.unreachableLocations!); } buildCounterGoogleCloudDataplexV1ListDataTaxonomiesResponse--; } -core.List buildUnnamed59() => [ +core.List buildUnnamed61() => [ buildGoogleCloudDataplexV1Entity(), buildGoogleCloudDataplexV1Entity(), ]; -void checkUnnamed59(core.List o) { +void checkUnnamed61(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1Entity(o[0]); checkGoogleCloudDataplexV1Entity(o[1]); @@ -5043,7 +5302,7 @@ api.GoogleCloudDataplexV1ListEntitiesResponse final o = api.GoogleCloudDataplexV1ListEntitiesResponse(); buildCounterGoogleCloudDataplexV1ListEntitiesResponse++; if (buildCounterGoogleCloudDataplexV1ListEntitiesResponse < 3) { - o.entities = buildUnnamed59(); + o.entities = buildUnnamed61(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudDataplexV1ListEntitiesResponse--; @@ -5054,7 +5313,7 @@ void checkGoogleCloudDataplexV1ListEntitiesResponse( api.GoogleCloudDataplexV1ListEntitiesResponse o) { buildCounterGoogleCloudDataplexV1ListEntitiesResponse++; if (buildCounterGoogleCloudDataplexV1ListEntitiesResponse < 3) { - checkUnnamed59(o.entities!); + checkUnnamed61(o.entities!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -5063,12 +5322,12 @@ void checkGoogleCloudDataplexV1ListEntitiesResponse( buildCounterGoogleCloudDataplexV1ListEntitiesResponse--; } -core.List buildUnnamed60() => [ +core.List buildUnnamed62() => [ buildGoogleCloudDataplexV1Entry(), buildGoogleCloudDataplexV1Entry(), ]; -void checkUnnamed60(core.List o) { +void checkUnnamed62(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1Entry(o[0]); checkGoogleCloudDataplexV1Entry(o[1]); @@ -5080,7 +5339,7 @@ api.GoogleCloudDataplexV1ListEntriesResponse final o = api.GoogleCloudDataplexV1ListEntriesResponse(); buildCounterGoogleCloudDataplexV1ListEntriesResponse++; if (buildCounterGoogleCloudDataplexV1ListEntriesResponse < 3) { - o.entries = buildUnnamed60(); + o.entries = buildUnnamed62(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudDataplexV1ListEntriesResponse--; @@ -5091,7 +5350,7 @@ void checkGoogleCloudDataplexV1ListEntriesResponse( api.GoogleCloudDataplexV1ListEntriesResponse o) { buildCounterGoogleCloudDataplexV1ListEntriesResponse++; if (buildCounterGoogleCloudDataplexV1ListEntriesResponse < 3) { - checkUnnamed60(o.entries!); + checkUnnamed62(o.entries!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -5100,23 +5359,23 @@ void checkGoogleCloudDataplexV1ListEntriesResponse( buildCounterGoogleCloudDataplexV1ListEntriesResponse--; } -core.List buildUnnamed61() => [ +core.List buildUnnamed63() => [ buildGoogleCloudDataplexV1EntryGroup(), buildGoogleCloudDataplexV1EntryGroup(), ]; -void checkUnnamed61(core.List o) { +void checkUnnamed63(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1EntryGroup(o[0]); checkGoogleCloudDataplexV1EntryGroup(o[1]); } -core.List buildUnnamed62() => [ +core.List buildUnnamed64() => [ 'foo', 'foo', ]; -void checkUnnamed62(core.List o) { +void checkUnnamed64(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5134,9 +5393,9 @@ api.GoogleCloudDataplexV1ListEntryGroupsResponse final o = api.GoogleCloudDataplexV1ListEntryGroupsResponse(); buildCounterGoogleCloudDataplexV1ListEntryGroupsResponse++; if (buildCounterGoogleCloudDataplexV1ListEntryGroupsResponse < 3) { - o.entryGroups = buildUnnamed61(); + o.entryGroups = buildUnnamed63(); o.nextPageToken = 'foo'; - o.unreachableLocations = buildUnnamed62(); + o.unreachableLocations = buildUnnamed64(); } buildCounterGoogleCloudDataplexV1ListEntryGroupsResponse--; return o; @@ -5146,33 +5405,33 @@ void checkGoogleCloudDataplexV1ListEntryGroupsResponse( api.GoogleCloudDataplexV1ListEntryGroupsResponse o) { buildCounterGoogleCloudDataplexV1ListEntryGroupsResponse++; if (buildCounterGoogleCloudDataplexV1ListEntryGroupsResponse < 3) { - checkUnnamed61(o.entryGroups!); + checkUnnamed63(o.entryGroups!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed62(o.unreachableLocations!); + checkUnnamed64(o.unreachableLocations!); } buildCounterGoogleCloudDataplexV1ListEntryGroupsResponse--; } -core.List buildUnnamed63() => [ +core.List buildUnnamed65() => [ buildGoogleCloudDataplexV1EntryType(), buildGoogleCloudDataplexV1EntryType(), ]; -void checkUnnamed63(core.List o) { +void checkUnnamed65(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1EntryType(o[0]); checkGoogleCloudDataplexV1EntryType(o[1]); } -core.List buildUnnamed64() => [ +core.List buildUnnamed66() => [ 'foo', 'foo', ]; -void checkUnnamed64(core.List o) { +void checkUnnamed66(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5190,9 +5449,9 @@ api.GoogleCloudDataplexV1ListEntryTypesResponse final o = api.GoogleCloudDataplexV1ListEntryTypesResponse(); buildCounterGoogleCloudDataplexV1ListEntryTypesResponse++; if (buildCounterGoogleCloudDataplexV1ListEntryTypesResponse < 3) { - o.entryTypes = buildUnnamed63(); + o.entryTypes = buildUnnamed65(); o.nextPageToken = 'foo'; - o.unreachableLocations = buildUnnamed64(); + o.unreachableLocations = buildUnnamed66(); } buildCounterGoogleCloudDataplexV1ListEntryTypesResponse--; return o; @@ -5202,22 +5461,22 @@ void checkGoogleCloudDataplexV1ListEntryTypesResponse( api.GoogleCloudDataplexV1ListEntryTypesResponse o) { buildCounterGoogleCloudDataplexV1ListEntryTypesResponse++; if (buildCounterGoogleCloudDataplexV1ListEntryTypesResponse < 3) { - checkUnnamed63(o.entryTypes!); + checkUnnamed65(o.entryTypes!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed64(o.unreachableLocations!); + checkUnnamed66(o.unreachableLocations!); } buildCounterGoogleCloudDataplexV1ListEntryTypesResponse--; } -core.List buildUnnamed65() => [ +core.List buildUnnamed67() => [ buildGoogleCloudDataplexV1Environment(), buildGoogleCloudDataplexV1Environment(), ]; -void checkUnnamed65(core.List o) { +void checkUnnamed67(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1Environment(o[0]); checkGoogleCloudDataplexV1Environment(o[1]); @@ -5229,7 +5488,7 @@ api.GoogleCloudDataplexV1ListEnvironmentsResponse final o = api.GoogleCloudDataplexV1ListEnvironmentsResponse(); buildCounterGoogleCloudDataplexV1ListEnvironmentsResponse++; if (buildCounterGoogleCloudDataplexV1ListEnvironmentsResponse < 3) { - o.environments = buildUnnamed65(); + o.environments = buildUnnamed67(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudDataplexV1ListEnvironmentsResponse--; @@ -5240,7 +5499,7 @@ void checkGoogleCloudDataplexV1ListEnvironmentsResponse( api.GoogleCloudDataplexV1ListEnvironmentsResponse o) { buildCounterGoogleCloudDataplexV1ListEnvironmentsResponse++; if (buildCounterGoogleCloudDataplexV1ListEnvironmentsResponse < 3) { - checkUnnamed65(o.environments!); + checkUnnamed67(o.environments!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -5249,12 +5508,12 @@ void checkGoogleCloudDataplexV1ListEnvironmentsResponse( buildCounterGoogleCloudDataplexV1ListEnvironmentsResponse--; } -core.List buildUnnamed66() => [ +core.List buildUnnamed68() => [ buildGoogleCloudDataplexV1Job(), buildGoogleCloudDataplexV1Job(), ]; -void checkUnnamed66(core.List o) { +void checkUnnamed68(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1Job(o[0]); checkGoogleCloudDataplexV1Job(o[1]); @@ -5266,7 +5525,7 @@ api.GoogleCloudDataplexV1ListJobsResponse final o = api.GoogleCloudDataplexV1ListJobsResponse(); buildCounterGoogleCloudDataplexV1ListJobsResponse++; if (buildCounterGoogleCloudDataplexV1ListJobsResponse < 3) { - o.jobs = buildUnnamed66(); + o.jobs = buildUnnamed68(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudDataplexV1ListJobsResponse--; @@ -5277,7 +5536,7 @@ void checkGoogleCloudDataplexV1ListJobsResponse( api.GoogleCloudDataplexV1ListJobsResponse o) { buildCounterGoogleCloudDataplexV1ListJobsResponse++; if (buildCounterGoogleCloudDataplexV1ListJobsResponse < 3) { - checkUnnamed66(o.jobs!); + checkUnnamed68(o.jobs!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -5286,23 +5545,23 @@ void checkGoogleCloudDataplexV1ListJobsResponse( buildCounterGoogleCloudDataplexV1ListJobsResponse--; } -core.List buildUnnamed67() => [ +core.List buildUnnamed69() => [ buildGoogleCloudDataplexV1Lake(), buildGoogleCloudDataplexV1Lake(), ]; -void checkUnnamed67(core.List o) { +void checkUnnamed69(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1Lake(o[0]); checkGoogleCloudDataplexV1Lake(o[1]); } -core.List buildUnnamed68() => [ +core.List buildUnnamed70() => [ 'foo', 'foo', ]; -void checkUnnamed68(core.List o) { +void checkUnnamed70(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5320,9 +5579,9 @@ api.GoogleCloudDataplexV1ListLakesResponse final o = api.GoogleCloudDataplexV1ListLakesResponse(); buildCounterGoogleCloudDataplexV1ListLakesResponse++; if (buildCounterGoogleCloudDataplexV1ListLakesResponse < 3) { - o.lakes = buildUnnamed67(); + o.lakes = buildUnnamed69(); o.nextPageToken = 'foo'; - o.unreachableLocations = buildUnnamed68(); + o.unreachableLocations = buildUnnamed70(); } buildCounterGoogleCloudDataplexV1ListLakesResponse--; return o; @@ -5332,33 +5591,33 @@ void checkGoogleCloudDataplexV1ListLakesResponse( api.GoogleCloudDataplexV1ListLakesResponse o) { buildCounterGoogleCloudDataplexV1ListLakesResponse++; if (buildCounterGoogleCloudDataplexV1ListLakesResponse < 3) { - checkUnnamed67(o.lakes!); + checkUnnamed69(o.lakes!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed68(o.unreachableLocations!); + checkUnnamed70(o.unreachableLocations!); } buildCounterGoogleCloudDataplexV1ListLakesResponse--; } -core.List buildUnnamed69() => [ +core.List buildUnnamed71() => [ buildGoogleCloudDataplexV1MetadataJob(), buildGoogleCloudDataplexV1MetadataJob(), ]; -void checkUnnamed69(core.List o) { +void checkUnnamed71(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1MetadataJob(o[0]); checkGoogleCloudDataplexV1MetadataJob(o[1]); } -core.List buildUnnamed70() => [ +core.List buildUnnamed72() => [ 'foo', 'foo', ]; -void checkUnnamed70(core.List o) { +void checkUnnamed72(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5376,9 +5635,9 @@ api.GoogleCloudDataplexV1ListMetadataJobsResponse final o = api.GoogleCloudDataplexV1ListMetadataJobsResponse(); buildCounterGoogleCloudDataplexV1ListMetadataJobsResponse++; if (buildCounterGoogleCloudDataplexV1ListMetadataJobsResponse < 3) { - o.metadataJobs = buildUnnamed69(); + o.metadataJobs = buildUnnamed71(); o.nextPageToken = 'foo'; - o.unreachableLocations = buildUnnamed70(); + o.unreachableLocations = buildUnnamed72(); } buildCounterGoogleCloudDataplexV1ListMetadataJobsResponse--; return o; @@ -5388,22 +5647,22 @@ void checkGoogleCloudDataplexV1ListMetadataJobsResponse( api.GoogleCloudDataplexV1ListMetadataJobsResponse o) { buildCounterGoogleCloudDataplexV1ListMetadataJobsResponse++; if (buildCounterGoogleCloudDataplexV1ListMetadataJobsResponse < 3) { - checkUnnamed69(o.metadataJobs!); + checkUnnamed71(o.metadataJobs!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed70(o.unreachableLocations!); + checkUnnamed72(o.unreachableLocations!); } buildCounterGoogleCloudDataplexV1ListMetadataJobsResponse--; } -core.List buildUnnamed71() => [ +core.List buildUnnamed73() => [ buildGoogleCloudDataplexV1Partition(), buildGoogleCloudDataplexV1Partition(), ]; -void checkUnnamed71(core.List o) { +void checkUnnamed73(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1Partition(o[0]); checkGoogleCloudDataplexV1Partition(o[1]); @@ -5416,7 +5675,7 @@ api.GoogleCloudDataplexV1ListPartitionsResponse buildCounterGoogleCloudDataplexV1ListPartitionsResponse++; if (buildCounterGoogleCloudDataplexV1ListPartitionsResponse < 3) { o.nextPageToken = 'foo'; - o.partitions = buildUnnamed71(); + o.partitions = buildUnnamed73(); } buildCounterGoogleCloudDataplexV1ListPartitionsResponse--; return o; @@ -5430,17 +5689,17 @@ void checkGoogleCloudDataplexV1ListPartitionsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed71(o.partitions!); + checkUnnamed73(o.partitions!); } buildCounterGoogleCloudDataplexV1ListPartitionsResponse--; } -core.List buildUnnamed72() => [ +core.List buildUnnamed74() => [ buildGoogleCloudDataplexV1Session(), buildGoogleCloudDataplexV1Session(), ]; -void checkUnnamed72(core.List o) { +void checkUnnamed74(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1Session(o[0]); checkGoogleCloudDataplexV1Session(o[1]); @@ -5453,7 +5712,7 @@ api.GoogleCloudDataplexV1ListSessionsResponse buildCounterGoogleCloudDataplexV1ListSessionsResponse++; if (buildCounterGoogleCloudDataplexV1ListSessionsResponse < 3) { o.nextPageToken = 'foo'; - o.sessions = buildUnnamed72(); + o.sessions = buildUnnamed74(); } buildCounterGoogleCloudDataplexV1ListSessionsResponse--; return o; @@ -5467,28 +5726,28 @@ void checkGoogleCloudDataplexV1ListSessionsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed72(o.sessions!); + checkUnnamed74(o.sessions!); } buildCounterGoogleCloudDataplexV1ListSessionsResponse--; } -core.List buildUnnamed73() => [ +core.List buildUnnamed75() => [ buildGoogleCloudDataplexV1Task(), buildGoogleCloudDataplexV1Task(), ]; -void checkUnnamed73(core.List o) { +void checkUnnamed75(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1Task(o[0]); checkGoogleCloudDataplexV1Task(o[1]); } -core.List buildUnnamed74() => [ +core.List buildUnnamed76() => [ 'foo', 'foo', ]; -void checkUnnamed74(core.List o) { +void checkUnnamed76(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5507,8 +5766,8 @@ api.GoogleCloudDataplexV1ListTasksResponse buildCounterGoogleCloudDataplexV1ListTasksResponse++; if (buildCounterGoogleCloudDataplexV1ListTasksResponse < 3) { o.nextPageToken = 'foo'; - o.tasks = buildUnnamed73(); - o.unreachableLocations = buildUnnamed74(); + o.tasks = buildUnnamed75(); + o.unreachableLocations = buildUnnamed76(); } buildCounterGoogleCloudDataplexV1ListTasksResponse--; return o; @@ -5522,18 +5781,18 @@ void checkGoogleCloudDataplexV1ListTasksResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed73(o.tasks!); - checkUnnamed74(o.unreachableLocations!); + checkUnnamed75(o.tasks!); + checkUnnamed76(o.unreachableLocations!); } buildCounterGoogleCloudDataplexV1ListTasksResponse--; } -core.List buildUnnamed75() => [ +core.List buildUnnamed77() => [ buildGoogleCloudDataplexV1Zone(), buildGoogleCloudDataplexV1Zone(), ]; -void checkUnnamed75(core.List o) { +void checkUnnamed77(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1Zone(o[0]); checkGoogleCloudDataplexV1Zone(o[1]); @@ -5546,7 +5805,7 @@ api.GoogleCloudDataplexV1ListZonesResponse buildCounterGoogleCloudDataplexV1ListZonesResponse++; if (buildCounterGoogleCloudDataplexV1ListZonesResponse < 3) { o.nextPageToken = 'foo'; - o.zones = buildUnnamed75(); + o.zones = buildUnnamed77(); } buildCounterGoogleCloudDataplexV1ListZonesResponse--; return o; @@ -5560,17 +5819,17 @@ void checkGoogleCloudDataplexV1ListZonesResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed75(o.zones!); + checkUnnamed77(o.zones!); } buildCounterGoogleCloudDataplexV1ListZonesResponse--; } -core.Map buildUnnamed76() => { +core.Map buildUnnamed78() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed76(core.Map o) { +void checkUnnamed78(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -5590,7 +5849,7 @@ api.GoogleCloudDataplexV1MetadataJob buildGoogleCloudDataplexV1MetadataJob() { o.createTime = 'foo'; o.importResult = buildGoogleCloudDataplexV1MetadataJobImportJobResult(); o.importSpec = buildGoogleCloudDataplexV1MetadataJobImportJobSpec(); - o.labels = buildUnnamed76(); + o.labels = buildUnnamed78(); o.name = 'foo'; o.status = buildGoogleCloudDataplexV1MetadataJobStatus(); o.type = 'foo'; @@ -5611,7 +5870,7 @@ void checkGoogleCloudDataplexV1MetadataJob( ); checkGoogleCloudDataplexV1MetadataJobImportJobResult(o.importResult!); checkGoogleCloudDataplexV1MetadataJobImportJobSpec(o.importSpec!); - checkUnnamed76(o.labels!); + checkUnnamed78(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -5729,12 +5988,12 @@ void checkGoogleCloudDataplexV1MetadataJobImportJobSpec( buildCounterGoogleCloudDataplexV1MetadataJobImportJobSpec--; } -core.List buildUnnamed77() => [ +core.List buildUnnamed79() => [ 'foo', 'foo', ]; -void checkUnnamed77(core.List o) { +void checkUnnamed79(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5746,12 +6005,12 @@ void checkUnnamed77(core.List o) { ); } -core.List buildUnnamed78() => [ +core.List buildUnnamed80() => [ 'foo', 'foo', ]; -void checkUnnamed78(core.List o) { +void checkUnnamed80(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5763,12 +6022,12 @@ void checkUnnamed78(core.List o) { ); } -core.List buildUnnamed79() => [ +core.List buildUnnamed81() => [ 'foo', 'foo', ]; -void checkUnnamed79(core.List o) { +void checkUnnamed81(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5788,9 +6047,9 @@ api.GoogleCloudDataplexV1MetadataJobImportJobSpecImportJobScope buildCounterGoogleCloudDataplexV1MetadataJobImportJobSpecImportJobScope++; if (buildCounterGoogleCloudDataplexV1MetadataJobImportJobSpecImportJobScope < 3) { - o.aspectTypes = buildUnnamed77(); - o.entryGroups = buildUnnamed78(); - o.entryTypes = buildUnnamed79(); + o.aspectTypes = buildUnnamed79(); + o.entryGroups = buildUnnamed80(); + o.entryTypes = buildUnnamed81(); } buildCounterGoogleCloudDataplexV1MetadataJobImportJobSpecImportJobScope--; return o; @@ -5801,9 +6060,9 @@ void checkGoogleCloudDataplexV1MetadataJobImportJobSpecImportJobScope( buildCounterGoogleCloudDataplexV1MetadataJobImportJobSpecImportJobScope++; if (buildCounterGoogleCloudDataplexV1MetadataJobImportJobSpecImportJobScope < 3) { - checkUnnamed77(o.aspectTypes!); - checkUnnamed78(o.entryGroups!); - checkUnnamed79(o.entryTypes!); + checkUnnamed79(o.aspectTypes!); + checkUnnamed80(o.entryGroups!); + checkUnnamed81(o.entryTypes!); } buildCounterGoogleCloudDataplexV1MetadataJobImportJobSpecImportJobScope--; } @@ -5847,12 +6106,12 @@ void checkGoogleCloudDataplexV1MetadataJobStatus( buildCounterGoogleCloudDataplexV1MetadataJobStatus--; } -core.List buildUnnamed80() => [ +core.List buildUnnamed82() => [ 'foo', 'foo', ]; -void checkUnnamed80(core.List o) { +void checkUnnamed82(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5872,7 +6131,7 @@ api.GoogleCloudDataplexV1Partition buildGoogleCloudDataplexV1Partition() { o.etag = 'foo'; o.location = 'foo'; o.name = 'foo'; - o.values = buildUnnamed80(); + o.values = buildUnnamed82(); } buildCounterGoogleCloudDataplexV1Partition--; return o; @@ -5893,17 +6152,17 @@ void checkGoogleCloudDataplexV1Partition(api.GoogleCloudDataplexV1Partition o) { o.name!, unittest.equals('foo'), ); - checkUnnamed80(o.values!); + checkUnnamed82(o.values!); } buildCounterGoogleCloudDataplexV1Partition--; } -core.List buildUnnamed81() => [ +core.List buildUnnamed83() => [ 'foo', 'foo', ]; -void checkUnnamed81(core.List o) { +void checkUnnamed83(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5915,12 +6174,12 @@ void checkUnnamed81(core.List o) { ); } -core.List buildUnnamed82() => [ +core.List buildUnnamed84() => [ 'foo', 'foo', ]; -void checkUnnamed82(core.List o) { +void checkUnnamed84(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5932,12 +6191,12 @@ void checkUnnamed82(core.List o) { ); } -core.List buildUnnamed83() => [ +core.List buildUnnamed85() => [ 'foo', 'foo', ]; -void checkUnnamed83(core.List o) { +void checkUnnamed85(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5955,9 +6214,9 @@ api.GoogleCloudDataplexV1ResourceAccessSpec final o = api.GoogleCloudDataplexV1ResourceAccessSpec(); buildCounterGoogleCloudDataplexV1ResourceAccessSpec++; if (buildCounterGoogleCloudDataplexV1ResourceAccessSpec < 3) { - o.owners = buildUnnamed81(); - o.readers = buildUnnamed82(); - o.writers = buildUnnamed83(); + o.owners = buildUnnamed83(); + o.readers = buildUnnamed84(); + o.writers = buildUnnamed85(); } buildCounterGoogleCloudDataplexV1ResourceAccessSpec--; return o; @@ -5967,9 +6226,9 @@ void checkGoogleCloudDataplexV1ResourceAccessSpec( api.GoogleCloudDataplexV1ResourceAccessSpec o) { buildCounterGoogleCloudDataplexV1ResourceAccessSpec++; if (buildCounterGoogleCloudDataplexV1ResourceAccessSpec < 3) { - checkUnnamed81(o.owners!); - checkUnnamed82(o.readers!); - checkUnnamed83(o.writers!); + checkUnnamed83(o.owners!); + checkUnnamed84(o.readers!); + checkUnnamed85(o.writers!); } buildCounterGoogleCloudDataplexV1ResourceAccessSpec--; } @@ -6012,12 +6271,12 @@ void checkGoogleCloudDataplexV1RunDataScanResponse( buildCounterGoogleCloudDataplexV1RunDataScanResponse--; } -core.Map buildUnnamed84() => { +core.Map buildUnnamed86() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed84(core.Map o) { +void checkUnnamed86(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -6029,12 +6288,12 @@ void checkUnnamed84(core.Map o) { ); } -core.Map buildUnnamed85() => { +core.Map buildUnnamed87() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed85(core.Map o) { +void checkUnnamed87(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -6052,8 +6311,8 @@ api.GoogleCloudDataplexV1RunTaskRequest final o = api.GoogleCloudDataplexV1RunTaskRequest(); buildCounterGoogleCloudDataplexV1RunTaskRequest++; if (buildCounterGoogleCloudDataplexV1RunTaskRequest < 3) { - o.args = buildUnnamed84(); - o.labels = buildUnnamed85(); + o.args = buildUnnamed86(); + o.labels = buildUnnamed87(); } buildCounterGoogleCloudDataplexV1RunTaskRequest--; return o; @@ -6063,8 +6322,8 @@ void checkGoogleCloudDataplexV1RunTaskRequest( api.GoogleCloudDataplexV1RunTaskRequest o) { buildCounterGoogleCloudDataplexV1RunTaskRequest++; if (buildCounterGoogleCloudDataplexV1RunTaskRequest < 3) { - checkUnnamed84(o.args!); - checkUnnamed85(o.labels!); + checkUnnamed86(o.args!); + checkUnnamed87(o.labels!); } buildCounterGoogleCloudDataplexV1RunTaskRequest--; } @@ -6145,23 +6404,23 @@ void checkGoogleCloudDataplexV1ScannedDataIncrementalField( buildCounterGoogleCloudDataplexV1ScannedDataIncrementalField--; } -core.List buildUnnamed86() => [ +core.List buildUnnamed88() => [ buildGoogleCloudDataplexV1SchemaSchemaField(), buildGoogleCloudDataplexV1SchemaSchemaField(), ]; -void checkUnnamed86(core.List o) { +void checkUnnamed88(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1SchemaSchemaField(o[0]); checkGoogleCloudDataplexV1SchemaSchemaField(o[1]); } -core.List buildUnnamed87() => [ +core.List buildUnnamed89() => [ buildGoogleCloudDataplexV1SchemaPartitionField(), buildGoogleCloudDataplexV1SchemaPartitionField(), ]; -void checkUnnamed87( +void checkUnnamed89( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1SchemaPartitionField(o[0]); @@ -6173,8 +6432,8 @@ api.GoogleCloudDataplexV1Schema buildGoogleCloudDataplexV1Schema() { final o = api.GoogleCloudDataplexV1Schema(); buildCounterGoogleCloudDataplexV1Schema++; if (buildCounterGoogleCloudDataplexV1Schema < 3) { - o.fields = buildUnnamed86(); - o.partitionFields = buildUnnamed87(); + o.fields = buildUnnamed88(); + o.partitionFields = buildUnnamed89(); o.partitionStyle = 'foo'; o.userManaged = true; } @@ -6185,8 +6444,8 @@ api.GoogleCloudDataplexV1Schema buildGoogleCloudDataplexV1Schema() { void checkGoogleCloudDataplexV1Schema(api.GoogleCloudDataplexV1Schema o) { buildCounterGoogleCloudDataplexV1Schema++; if (buildCounterGoogleCloudDataplexV1Schema < 3) { - checkUnnamed86(o.fields!); - checkUnnamed87(o.partitionFields!); + checkUnnamed88(o.fields!); + checkUnnamed89(o.partitionFields!); unittest.expect( o.partitionStyle!, unittest.equals('foo'), @@ -6225,12 +6484,12 @@ void checkGoogleCloudDataplexV1SchemaPartitionField( buildCounterGoogleCloudDataplexV1SchemaPartitionField--; } -core.List buildUnnamed88() => [ +core.List buildUnnamed90() => [ buildGoogleCloudDataplexV1SchemaSchemaField(), buildGoogleCloudDataplexV1SchemaSchemaField(), ]; -void checkUnnamed88(core.List o) { +void checkUnnamed90(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1SchemaSchemaField(o[0]); checkGoogleCloudDataplexV1SchemaSchemaField(o[1]); @@ -6243,7 +6502,7 @@ api.GoogleCloudDataplexV1SchemaSchemaField buildCounterGoogleCloudDataplexV1SchemaSchemaField++; if (buildCounterGoogleCloudDataplexV1SchemaSchemaField < 3) { o.description = 'foo'; - o.fields = buildUnnamed88(); + o.fields = buildUnnamed90(); o.mode = 'foo'; o.name = 'foo'; o.type = 'foo'; @@ -6260,7 +6519,7 @@ void checkGoogleCloudDataplexV1SchemaSchemaField( o.description!, unittest.equals('foo'), ); - checkUnnamed88(o.fields!); + checkUnnamed90(o.fields!); unittest.expect( o.mode!, unittest.equals('foo'), @@ -6277,23 +6536,23 @@ void checkGoogleCloudDataplexV1SchemaSchemaField( buildCounterGoogleCloudDataplexV1SchemaSchemaField--; } -core.List buildUnnamed89() => [ +core.List buildUnnamed91() => [ buildGoogleCloudDataplexV1SearchEntriesResult(), buildGoogleCloudDataplexV1SearchEntriesResult(), ]; -void checkUnnamed89(core.List o) { +void checkUnnamed91(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDataplexV1SearchEntriesResult(o[0]); checkGoogleCloudDataplexV1SearchEntriesResult(o[1]); } -core.List buildUnnamed90() => [ +core.List buildUnnamed92() => [ 'foo', 'foo', ]; -void checkUnnamed90(core.List o) { +void checkUnnamed92(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6312,9 +6571,9 @@ api.GoogleCloudDataplexV1SearchEntriesResponse buildCounterGoogleCloudDataplexV1SearchEntriesResponse++; if (buildCounterGoogleCloudDataplexV1SearchEntriesResponse < 3) { o.nextPageToken = 'foo'; - o.results = buildUnnamed89(); + o.results = buildUnnamed91(); o.totalSize = 42; - o.unreachable = buildUnnamed90(); + o.unreachable = buildUnnamed92(); } buildCounterGoogleCloudDataplexV1SearchEntriesResponse--; return o; @@ -6328,12 +6587,12 @@ void checkGoogleCloudDataplexV1SearchEntriesResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed89(o.results!); + checkUnnamed91(o.results!); unittest.expect( o.totalSize!, unittest.equals(42), ); - checkUnnamed90(o.unreachable!); + checkUnnamed92(o.unreachable!); } buildCounterGoogleCloudDataplexV1SearchEntriesResponse--; } @@ -6575,12 +6834,12 @@ void checkGoogleCloudDataplexV1StorageFormatJsonOptions( buildCounterGoogleCloudDataplexV1StorageFormatJsonOptions--; } -core.Map buildUnnamed91() => { +core.Map buildUnnamed93() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed91(core.Map o) { +void checkUnnamed93(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -6602,7 +6861,7 @@ api.GoogleCloudDataplexV1Task buildGoogleCloudDataplexV1Task() { o.displayName = 'foo'; o.executionSpec = buildGoogleCloudDataplexV1TaskExecutionSpec(); o.executionStatus = buildGoogleCloudDataplexV1TaskExecutionStatus(); - o.labels = buildUnnamed91(); + o.labels = buildUnnamed93(); o.name = 'foo'; o.notebook = buildGoogleCloudDataplexV1TaskNotebookTaskConfig(); o.spark = buildGoogleCloudDataplexV1TaskSparkTaskConfig(); @@ -6632,7 +6891,7 @@ void checkGoogleCloudDataplexV1Task(api.GoogleCloudDataplexV1Task o) { ); checkGoogleCloudDataplexV1TaskExecutionSpec(o.executionSpec!); checkGoogleCloudDataplexV1TaskExecutionStatus(o.executionStatus!); - checkUnnamed91(o.labels!); + checkUnnamed93(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -6656,12 +6915,12 @@ void checkGoogleCloudDataplexV1Task(api.GoogleCloudDataplexV1Task o) { buildCounterGoogleCloudDataplexV1Task--; } -core.Map buildUnnamed92() => { +core.Map buildUnnamed94() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed92(core.Map o) { +void checkUnnamed94(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -6679,7 +6938,7 @@ api.GoogleCloudDataplexV1TaskExecutionSpec final o = api.GoogleCloudDataplexV1TaskExecutionSpec(); buildCounterGoogleCloudDataplexV1TaskExecutionSpec++; if (buildCounterGoogleCloudDataplexV1TaskExecutionSpec < 3) { - o.args = buildUnnamed92(); + o.args = buildUnnamed94(); o.kmsKey = 'foo'; o.maxJobExecutionLifetime = 'foo'; o.project = 'foo'; @@ -6693,7 +6952,7 @@ void checkGoogleCloudDataplexV1TaskExecutionSpec( api.GoogleCloudDataplexV1TaskExecutionSpec o) { buildCounterGoogleCloudDataplexV1TaskExecutionSpec++; if (buildCounterGoogleCloudDataplexV1TaskExecutionSpec < 3) { - checkUnnamed92(o.args!); + checkUnnamed94(o.args!); unittest.expect( o.kmsKey!, unittest.equals('foo'), @@ -6803,12 +7062,12 @@ void checkGoogleCloudDataplexV1TaskInfrastructureSpecBatchComputeResources( buildCounterGoogleCloudDataplexV1TaskInfrastructureSpecBatchComputeResources--; } -core.List buildUnnamed93() => [ +core.List buildUnnamed95() => [ 'foo', 'foo', ]; -void checkUnnamed93(core.List o) { +void checkUnnamed95(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6820,12 +7079,12 @@ void checkUnnamed93(core.List o) { ); } -core.Map buildUnnamed94() => { +core.Map buildUnnamed96() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed94(core.Map o) { +void checkUnnamed96(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -6837,12 +7096,12 @@ void checkUnnamed94(core.Map o) { ); } -core.List buildUnnamed95() => [ +core.List buildUnnamed97() => [ 'foo', 'foo', ]; -void checkUnnamed95(core.List o) { +void checkUnnamed97(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6865,9 +7124,9 @@ api.GoogleCloudDataplexV1TaskInfrastructureSpecContainerImageRuntime if (buildCounterGoogleCloudDataplexV1TaskInfrastructureSpecContainerImageRuntime < 3) { o.image = 'foo'; - o.javaJars = buildUnnamed93(); - o.properties = buildUnnamed94(); - o.pythonPackages = buildUnnamed95(); + o.javaJars = buildUnnamed95(); + o.properties = buildUnnamed96(); + o.pythonPackages = buildUnnamed97(); } buildCounterGoogleCloudDataplexV1TaskInfrastructureSpecContainerImageRuntime--; return o; @@ -6882,19 +7141,19 @@ void checkGoogleCloudDataplexV1TaskInfrastructureSpecContainerImageRuntime( o.image!, unittest.equals('foo'), ); - checkUnnamed93(o.javaJars!); - checkUnnamed94(o.properties!); - checkUnnamed95(o.pythonPackages!); + checkUnnamed95(o.javaJars!); + checkUnnamed96(o.properties!); + checkUnnamed97(o.pythonPackages!); } buildCounterGoogleCloudDataplexV1TaskInfrastructureSpecContainerImageRuntime--; } -core.List buildUnnamed96() => [ +core.List buildUnnamed98() => [ 'foo', 'foo', ]; -void checkUnnamed96(core.List o) { +void checkUnnamed98(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6913,7 +7172,7 @@ api.GoogleCloudDataplexV1TaskInfrastructureSpecVpcNetwork buildCounterGoogleCloudDataplexV1TaskInfrastructureSpecVpcNetwork++; if (buildCounterGoogleCloudDataplexV1TaskInfrastructureSpecVpcNetwork < 3) { o.network = 'foo'; - o.networkTags = buildUnnamed96(); + o.networkTags = buildUnnamed98(); o.subNetwork = 'foo'; } buildCounterGoogleCloudDataplexV1TaskInfrastructureSpecVpcNetwork--; @@ -6928,7 +7187,7 @@ void checkGoogleCloudDataplexV1TaskInfrastructureSpecVpcNetwork( o.network!, unittest.equals('foo'), ); - checkUnnamed96(o.networkTags!); + checkUnnamed98(o.networkTags!); unittest.expect( o.subNetwork!, unittest.equals('foo'), @@ -6937,12 +7196,12 @@ void checkGoogleCloudDataplexV1TaskInfrastructureSpecVpcNetwork( buildCounterGoogleCloudDataplexV1TaskInfrastructureSpecVpcNetwork--; } -core.List buildUnnamed97() => [ +core.List buildUnnamed99() => [ 'foo', 'foo', ]; -void checkUnnamed97(core.List o) { +void checkUnnamed99(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6954,12 +7213,12 @@ void checkUnnamed97(core.List o) { ); } -core.List buildUnnamed98() => [ +core.List buildUnnamed100() => [ 'foo', 'foo', ]; -void checkUnnamed98(core.List o) { +void checkUnnamed100(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6977,8 +7236,8 @@ api.GoogleCloudDataplexV1TaskNotebookTaskConfig final o = api.GoogleCloudDataplexV1TaskNotebookTaskConfig(); buildCounterGoogleCloudDataplexV1TaskNotebookTaskConfig++; if (buildCounterGoogleCloudDataplexV1TaskNotebookTaskConfig < 3) { - o.archiveUris = buildUnnamed97(); - o.fileUris = buildUnnamed98(); + o.archiveUris = buildUnnamed99(); + o.fileUris = buildUnnamed100(); o.infrastructureSpec = buildGoogleCloudDataplexV1TaskInfrastructureSpec(); o.notebook = 'foo'; } @@ -6990,8 +7249,8 @@ void checkGoogleCloudDataplexV1TaskNotebookTaskConfig( api.GoogleCloudDataplexV1TaskNotebookTaskConfig o) { buildCounterGoogleCloudDataplexV1TaskNotebookTaskConfig++; if (buildCounterGoogleCloudDataplexV1TaskNotebookTaskConfig < 3) { - checkUnnamed97(o.archiveUris!); - checkUnnamed98(o.fileUris!); + checkUnnamed99(o.archiveUris!); + checkUnnamed100(o.fileUris!); checkGoogleCloudDataplexV1TaskInfrastructureSpec(o.infrastructureSpec!); unittest.expect( o.notebook!, @@ -7001,12 +7260,12 @@ void checkGoogleCloudDataplexV1TaskNotebookTaskConfig( buildCounterGoogleCloudDataplexV1TaskNotebookTaskConfig--; } -core.List buildUnnamed99() => [ +core.List buildUnnamed101() => [ 'foo', 'foo', ]; -void checkUnnamed99(core.List o) { +void checkUnnamed101(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -7018,12 +7277,12 @@ void checkUnnamed99(core.List o) { ); } -core.List buildUnnamed100() => [ +core.List buildUnnamed102() => [ 'foo', 'foo', ]; -void checkUnnamed100(core.List o) { +void checkUnnamed102(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -7041,8 +7300,8 @@ api.GoogleCloudDataplexV1TaskSparkTaskConfig final o = api.GoogleCloudDataplexV1TaskSparkTaskConfig(); buildCounterGoogleCloudDataplexV1TaskSparkTaskConfig++; if (buildCounterGoogleCloudDataplexV1TaskSparkTaskConfig < 3) { - o.archiveUris = buildUnnamed99(); - o.fileUris = buildUnnamed100(); + o.archiveUris = buildUnnamed101(); + o.fileUris = buildUnnamed102(); o.infrastructureSpec = buildGoogleCloudDataplexV1TaskInfrastructureSpec(); o.mainClass = 'foo'; o.mainJarFileUri = 'foo'; @@ -7058,8 +7317,8 @@ void checkGoogleCloudDataplexV1TaskSparkTaskConfig( api.GoogleCloudDataplexV1TaskSparkTaskConfig o) { buildCounterGoogleCloudDataplexV1TaskSparkTaskConfig++; if (buildCounterGoogleCloudDataplexV1TaskSparkTaskConfig < 3) { - checkUnnamed99(o.archiveUris!); - checkUnnamed100(o.fileUris!); + checkUnnamed101(o.archiveUris!); + checkUnnamed102(o.fileUris!); checkGoogleCloudDataplexV1TaskInfrastructureSpec(o.infrastructureSpec!); unittest.expect( o.mainClass!, @@ -7188,12 +7447,12 @@ void checkGoogleCloudDataplexV1TriggerSchedule( buildCounterGoogleCloudDataplexV1TriggerSchedule--; } -core.Map buildUnnamed101() => { +core.Map buildUnnamed103() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed101(core.Map o) { +void checkUnnamed103(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -7215,7 +7474,7 @@ api.GoogleCloudDataplexV1Zone buildGoogleCloudDataplexV1Zone() { o.description = 'foo'; o.discoverySpec = buildGoogleCloudDataplexV1ZoneDiscoverySpec(); o.displayName = 'foo'; - o.labels = buildUnnamed101(); + o.labels = buildUnnamed103(); o.name = 'foo'; o.resourceSpec = buildGoogleCloudDataplexV1ZoneResourceSpec(); o.state = 'foo'; @@ -7244,7 +7503,7 @@ void checkGoogleCloudDataplexV1Zone(api.GoogleCloudDataplexV1Zone o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed101(o.labels!); + checkUnnamed103(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -7270,12 +7529,12 @@ void checkGoogleCloudDataplexV1Zone(api.GoogleCloudDataplexV1Zone o) { buildCounterGoogleCloudDataplexV1Zone--; } -core.List buildUnnamed102() => [ +core.List buildUnnamed104() => [ 'foo', 'foo', ]; -void checkUnnamed102(core.List o) { +void checkUnnamed104(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -7287,12 +7546,12 @@ void checkUnnamed102(core.List o) { ); } -core.List buildUnnamed103() => [ +core.List buildUnnamed105() => [ 'foo', 'foo', ]; -void checkUnnamed103(core.List o) { +void checkUnnamed105(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -7312,8 +7571,8 @@ api.GoogleCloudDataplexV1ZoneDiscoverySpec if (buildCounterGoogleCloudDataplexV1ZoneDiscoverySpec < 3) { o.csvOptions = buildGoogleCloudDataplexV1ZoneDiscoverySpecCsvOptions(); o.enabled = true; - o.excludePatterns = buildUnnamed102(); - o.includePatterns = buildUnnamed103(); + o.excludePatterns = buildUnnamed104(); + o.includePatterns = buildUnnamed105(); o.jsonOptions = buildGoogleCloudDataplexV1ZoneDiscoverySpecJsonOptions(); o.schedule = 'foo'; } @@ -7327,8 +7586,8 @@ void checkGoogleCloudDataplexV1ZoneDiscoverySpec( if (buildCounterGoogleCloudDataplexV1ZoneDiscoverySpec < 3) { checkGoogleCloudDataplexV1ZoneDiscoverySpecCsvOptions(o.csvOptions!); unittest.expect(o.enabled!, unittest.isTrue); - checkUnnamed102(o.excludePatterns!); - checkUnnamed103(o.includePatterns!); + checkUnnamed104(o.excludePatterns!); + checkUnnamed105(o.includePatterns!); checkGoogleCloudDataplexV1ZoneDiscoverySpecJsonOptions(o.jsonOptions!); unittest.expect( o.schedule!, @@ -7424,12 +7683,12 @@ void checkGoogleCloudDataplexV1ZoneResourceSpec( buildCounterGoogleCloudDataplexV1ZoneResourceSpec--; } -core.List buildUnnamed104() => [ +core.List buildUnnamed106() => [ buildGoogleCloudLocationLocation(), buildGoogleCloudLocationLocation(), ]; -void checkUnnamed104(core.List o) { +void checkUnnamed106(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudLocationLocation(o[0]); checkGoogleCloudLocationLocation(o[1]); @@ -7441,7 +7700,7 @@ api.GoogleCloudLocationListLocationsResponse final o = api.GoogleCloudLocationListLocationsResponse(); buildCounterGoogleCloudLocationListLocationsResponse++; if (buildCounterGoogleCloudLocationListLocationsResponse < 3) { - o.locations = buildUnnamed104(); + o.locations = buildUnnamed106(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudLocationListLocationsResponse--; @@ -7452,7 +7711,7 @@ void checkGoogleCloudLocationListLocationsResponse( api.GoogleCloudLocationListLocationsResponse o) { buildCounterGoogleCloudLocationListLocationsResponse++; if (buildCounterGoogleCloudLocationListLocationsResponse < 3) { - checkUnnamed104(o.locations!); + checkUnnamed106(o.locations!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -7461,12 +7720,12 @@ void checkGoogleCloudLocationListLocationsResponse( buildCounterGoogleCloudLocationListLocationsResponse--; } -core.Map buildUnnamed105() => { +core.Map buildUnnamed107() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed105(core.Map o) { +void checkUnnamed107(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -7478,7 +7737,7 @@ void checkUnnamed105(core.Map o) { ); } -core.Map buildUnnamed106() => { +core.Map buildUnnamed108() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -7491,7 +7750,7 @@ core.Map buildUnnamed106() => { }, }; -void checkUnnamed106(core.Map o) { +void checkUnnamed108(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted3 = (o['x']!) as core.Map; unittest.expect(casted3, unittest.hasLength(3)); @@ -7529,9 +7788,9 @@ api.GoogleCloudLocationLocation buildGoogleCloudLocationLocation() { buildCounterGoogleCloudLocationLocation++; if (buildCounterGoogleCloudLocationLocation < 3) { o.displayName = 'foo'; - o.labels = buildUnnamed105(); + o.labels = buildUnnamed107(); o.locationId = 'foo'; - o.metadata = buildUnnamed106(); + o.metadata = buildUnnamed108(); o.name = 'foo'; } buildCounterGoogleCloudLocationLocation--; @@ -7545,12 +7804,12 @@ void checkGoogleCloudLocationLocation(api.GoogleCloudLocationLocation o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed105(o.labels!); + checkUnnamed107(o.labels!); unittest.expect( o.locationId!, unittest.equals('foo'), ); - checkUnnamed106(o.metadata!); + checkUnnamed108(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), @@ -7559,12 +7818,12 @@ void checkGoogleCloudLocationLocation(api.GoogleCloudLocationLocation o) { buildCounterGoogleCloudLocationLocation--; } -core.List buildUnnamed107() => [ +core.List buildUnnamed109() => [ buildGoogleIamV1AuditLogConfig(), buildGoogleIamV1AuditLogConfig(), ]; -void checkUnnamed107(core.List o) { +void checkUnnamed109(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleIamV1AuditLogConfig(o[0]); checkGoogleIamV1AuditLogConfig(o[1]); @@ -7575,7 +7834,7 @@ api.GoogleIamV1AuditConfig buildGoogleIamV1AuditConfig() { final o = api.GoogleIamV1AuditConfig(); buildCounterGoogleIamV1AuditConfig++; if (buildCounterGoogleIamV1AuditConfig < 3) { - o.auditLogConfigs = buildUnnamed107(); + o.auditLogConfigs = buildUnnamed109(); o.service = 'foo'; } buildCounterGoogleIamV1AuditConfig--; @@ -7585,7 +7844,7 @@ api.GoogleIamV1AuditConfig buildGoogleIamV1AuditConfig() { void checkGoogleIamV1AuditConfig(api.GoogleIamV1AuditConfig o) { buildCounterGoogleIamV1AuditConfig++; if (buildCounterGoogleIamV1AuditConfig < 3) { - checkUnnamed107(o.auditLogConfigs!); + checkUnnamed109(o.auditLogConfigs!); unittest.expect( o.service!, unittest.equals('foo'), @@ -7594,12 +7853,12 @@ void checkGoogleIamV1AuditConfig(api.GoogleIamV1AuditConfig o) { buildCounterGoogleIamV1AuditConfig--; } -core.List buildUnnamed108() => [ +core.List buildUnnamed110() => [ 'foo', 'foo', ]; -void checkUnnamed108(core.List o) { +void checkUnnamed110(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -7616,7 +7875,7 @@ api.GoogleIamV1AuditLogConfig buildGoogleIamV1AuditLogConfig() { final o = api.GoogleIamV1AuditLogConfig(); buildCounterGoogleIamV1AuditLogConfig++; if (buildCounterGoogleIamV1AuditLogConfig < 3) { - o.exemptedMembers = buildUnnamed108(); + o.exemptedMembers = buildUnnamed110(); o.logType = 'foo'; } buildCounterGoogleIamV1AuditLogConfig--; @@ -7626,7 +7885,7 @@ api.GoogleIamV1AuditLogConfig buildGoogleIamV1AuditLogConfig() { void checkGoogleIamV1AuditLogConfig(api.GoogleIamV1AuditLogConfig o) { buildCounterGoogleIamV1AuditLogConfig++; if (buildCounterGoogleIamV1AuditLogConfig < 3) { - checkUnnamed108(o.exemptedMembers!); + checkUnnamed110(o.exemptedMembers!); unittest.expect( o.logType!, unittest.equals('foo'), @@ -7635,12 +7894,12 @@ void checkGoogleIamV1AuditLogConfig(api.GoogleIamV1AuditLogConfig o) { buildCounterGoogleIamV1AuditLogConfig--; } -core.List buildUnnamed109() => [ +core.List buildUnnamed111() => [ 'foo', 'foo', ]; -void checkUnnamed109(core.List o) { +void checkUnnamed111(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -7658,7 +7917,7 @@ api.GoogleIamV1Binding buildGoogleIamV1Binding() { buildCounterGoogleIamV1Binding++; if (buildCounterGoogleIamV1Binding < 3) { o.condition = buildGoogleTypeExpr(); - o.members = buildUnnamed109(); + o.members = buildUnnamed111(); o.role = 'foo'; } buildCounterGoogleIamV1Binding--; @@ -7669,7 +7928,7 @@ void checkGoogleIamV1Binding(api.GoogleIamV1Binding o) { buildCounterGoogleIamV1Binding++; if (buildCounterGoogleIamV1Binding < 3) { checkGoogleTypeExpr(o.condition!); - checkUnnamed109(o.members!); + checkUnnamed111(o.members!); unittest.expect( o.role!, unittest.equals('foo'), @@ -7678,23 +7937,23 @@ void checkGoogleIamV1Binding(api.GoogleIamV1Binding o) { buildCounterGoogleIamV1Binding--; } -core.List buildUnnamed110() => [ +core.List buildUnnamed112() => [ buildGoogleIamV1AuditConfig(), buildGoogleIamV1AuditConfig(), ]; -void checkUnnamed110(core.List o) { +void checkUnnamed112(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleIamV1AuditConfig(o[0]); checkGoogleIamV1AuditConfig(o[1]); } -core.List buildUnnamed111() => [ +core.List buildUnnamed113() => [ buildGoogleIamV1Binding(), buildGoogleIamV1Binding(), ]; -void checkUnnamed111(core.List o) { +void checkUnnamed113(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleIamV1Binding(o[0]); checkGoogleIamV1Binding(o[1]); @@ -7705,8 +7964,8 @@ api.GoogleIamV1Policy buildGoogleIamV1Policy() { final o = api.GoogleIamV1Policy(); buildCounterGoogleIamV1Policy++; if (buildCounterGoogleIamV1Policy < 3) { - o.auditConfigs = buildUnnamed110(); - o.bindings = buildUnnamed111(); + o.auditConfigs = buildUnnamed112(); + o.bindings = buildUnnamed113(); o.etag = 'foo'; o.version = 42; } @@ -7717,8 +7976,8 @@ api.GoogleIamV1Policy buildGoogleIamV1Policy() { void checkGoogleIamV1Policy(api.GoogleIamV1Policy o) { buildCounterGoogleIamV1Policy++; if (buildCounterGoogleIamV1Policy < 3) { - checkUnnamed110(o.auditConfigs!); - checkUnnamed111(o.bindings!); + checkUnnamed112(o.auditConfigs!); + checkUnnamed113(o.bindings!); unittest.expect( o.etag!, unittest.equals('foo'), @@ -7755,12 +8014,12 @@ void checkGoogleIamV1SetIamPolicyRequest(api.GoogleIamV1SetIamPolicyRequest o) { buildCounterGoogleIamV1SetIamPolicyRequest--; } -core.List buildUnnamed112() => [ +core.List buildUnnamed114() => [ 'foo', 'foo', ]; -void checkUnnamed112(core.List o) { +void checkUnnamed114(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -7778,7 +8037,7 @@ api.GoogleIamV1TestIamPermissionsRequest final o = api.GoogleIamV1TestIamPermissionsRequest(); buildCounterGoogleIamV1TestIamPermissionsRequest++; if (buildCounterGoogleIamV1TestIamPermissionsRequest < 3) { - o.permissions = buildUnnamed112(); + o.permissions = buildUnnamed114(); } buildCounterGoogleIamV1TestIamPermissionsRequest--; return o; @@ -7788,17 +8047,17 @@ void checkGoogleIamV1TestIamPermissionsRequest( api.GoogleIamV1TestIamPermissionsRequest o) { buildCounterGoogleIamV1TestIamPermissionsRequest++; if (buildCounterGoogleIamV1TestIamPermissionsRequest < 3) { - checkUnnamed112(o.permissions!); + checkUnnamed114(o.permissions!); } buildCounterGoogleIamV1TestIamPermissionsRequest--; } -core.List buildUnnamed113() => [ +core.List buildUnnamed115() => [ 'foo', 'foo', ]; -void checkUnnamed113(core.List o) { +void checkUnnamed115(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -7816,7 +8075,7 @@ api.GoogleIamV1TestIamPermissionsResponse final o = api.GoogleIamV1TestIamPermissionsResponse(); buildCounterGoogleIamV1TestIamPermissionsResponse++; if (buildCounterGoogleIamV1TestIamPermissionsResponse < 3) { - o.permissions = buildUnnamed113(); + o.permissions = buildUnnamed115(); } buildCounterGoogleIamV1TestIamPermissionsResponse--; return o; @@ -7826,7 +8085,7 @@ void checkGoogleIamV1TestIamPermissionsResponse( api.GoogleIamV1TestIamPermissionsResponse o) { buildCounterGoogleIamV1TestIamPermissionsResponse++; if (buildCounterGoogleIamV1TestIamPermissionsResponse < 3) { - checkUnnamed113(o.permissions!); + checkUnnamed115(o.permissions!); } buildCounterGoogleIamV1TestIamPermissionsResponse--; } @@ -7848,12 +8107,12 @@ void checkGoogleLongrunningCancelOperationRequest( buildCounterGoogleLongrunningCancelOperationRequest--; } -core.List buildUnnamed114() => [ +core.List buildUnnamed116() => [ buildGoogleLongrunningOperation(), buildGoogleLongrunningOperation(), ]; -void checkUnnamed114(core.List o) { +void checkUnnamed116(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleLongrunningOperation(o[0]); checkGoogleLongrunningOperation(o[1]); @@ -7866,7 +8125,7 @@ api.GoogleLongrunningListOperationsResponse buildCounterGoogleLongrunningListOperationsResponse++; if (buildCounterGoogleLongrunningListOperationsResponse < 3) { o.nextPageToken = 'foo'; - o.operations = buildUnnamed114(); + o.operations = buildUnnamed116(); } buildCounterGoogleLongrunningListOperationsResponse--; return o; @@ -7880,12 +8139,12 @@ void checkGoogleLongrunningListOperationsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed114(o.operations!); + checkUnnamed116(o.operations!); } buildCounterGoogleLongrunningListOperationsResponse--; } -core.Map buildUnnamed115() => { +core.Map buildUnnamed117() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -7898,7 +8157,7 @@ core.Map buildUnnamed115() => { }, }; -void checkUnnamed115(core.Map o) { +void checkUnnamed117(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted5 = (o['x']!) as core.Map; unittest.expect(casted5, unittest.hasLength(3)); @@ -7930,7 +8189,7 @@ void checkUnnamed115(core.Map o) { ); } -core.Map buildUnnamed116() => { +core.Map buildUnnamed118() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -7943,7 +8202,7 @@ core.Map buildUnnamed116() => { }, }; -void checkUnnamed116(core.Map o) { +void checkUnnamed118(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted7 = (o['x']!) as core.Map; unittest.expect(casted7, unittest.hasLength(3)); @@ -7982,9 +8241,9 @@ api.GoogleLongrunningOperation buildGoogleLongrunningOperation() { if (buildCounterGoogleLongrunningOperation < 3) { o.done = true; o.error = buildGoogleRpcStatus(); - o.metadata = buildUnnamed115(); + o.metadata = buildUnnamed117(); o.name = 'foo'; - o.response = buildUnnamed116(); + o.response = buildUnnamed118(); } buildCounterGoogleLongrunningOperation--; return o; @@ -7995,17 +8254,17 @@ void checkGoogleLongrunningOperation(api.GoogleLongrunningOperation o) { if (buildCounterGoogleLongrunningOperation < 3) { unittest.expect(o.done!, unittest.isTrue); checkGoogleRpcStatus(o.error!); - checkUnnamed115(o.metadata!); + checkUnnamed117(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed116(o.response!); + checkUnnamed118(o.response!); } buildCounterGoogleLongrunningOperation--; } -core.Map buildUnnamed117() => { +core.Map buildUnnamed119() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -8018,7 +8277,7 @@ core.Map buildUnnamed117() => { }, }; -void checkUnnamed117(core.Map o) { +void checkUnnamed119(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted9 = (o['x']!) as core.Map; unittest.expect(casted9, unittest.hasLength(3)); @@ -8050,15 +8309,15 @@ void checkUnnamed117(core.Map o) { ); } -core.List> buildUnnamed118() => [ - buildUnnamed117(), - buildUnnamed117(), +core.List> buildUnnamed120() => [ + buildUnnamed119(), + buildUnnamed119(), ]; -void checkUnnamed118(core.List> o) { +void checkUnnamed120(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed117(o[0]); - checkUnnamed117(o[1]); + checkUnnamed119(o[0]); + checkUnnamed119(o[1]); } core.int buildCounterGoogleRpcStatus = 0; @@ -8067,7 +8326,7 @@ api.GoogleRpcStatus buildGoogleRpcStatus() { buildCounterGoogleRpcStatus++; if (buildCounterGoogleRpcStatus < 3) { o.code = 42; - o.details = buildUnnamed118(); + o.details = buildUnnamed120(); o.message = 'foo'; } buildCounterGoogleRpcStatus--; @@ -8081,7 +8340,7 @@ void checkGoogleRpcStatus(api.GoogleRpcStatus o) { o.code!, unittest.equals(42), ); - checkUnnamed118(o.details!); + checkUnnamed120(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -8127,12 +8386,12 @@ void checkGoogleTypeExpr(api.GoogleTypeExpr o) { buildCounterGoogleTypeExpr--; } -core.List buildUnnamed119() => [ +core.List buildUnnamed121() => [ 'foo', 'foo', ]; -void checkUnnamed119(core.List o) { +void checkUnnamed121(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8144,12 +8403,12 @@ void checkUnnamed119(core.List o) { ); } -core.List buildUnnamed120() => [ +core.List buildUnnamed122() => [ 'foo', 'foo', ]; -void checkUnnamed120(core.List o) { +void checkUnnamed122(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8161,12 +8420,12 @@ void checkUnnamed120(core.List o) { ); } -core.List buildUnnamed121() => [ +core.List buildUnnamed123() => [ 'foo', 'foo', ]; -void checkUnnamed121(core.List o) { +void checkUnnamed123(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8178,12 +8437,12 @@ void checkUnnamed121(core.List o) { ); } -core.List buildUnnamed122() => [ +core.List buildUnnamed124() => [ 'foo', 'foo', ]; -void checkUnnamed122(core.List o) { +void checkUnnamed124(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8195,12 +8454,12 @@ void checkUnnamed122(core.List o) { ); } -core.List buildUnnamed123() => [ +core.List buildUnnamed125() => [ 'foo', 'foo', ]; -void checkUnnamed123(core.List o) { +void checkUnnamed125(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8606,6 +8865,93 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudDataplexV1DataDiscoveryResult', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudDataplexV1DataDiscoveryResult(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudDataplexV1DataDiscoveryResult.fromJson( + oJson as core.Map); + checkGoogleCloudDataplexV1DataDiscoveryResult(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing + .fromJson(oJson as core.Map); + checkGoogleCloudDataplexV1DataDiscoveryResultBigQueryPublishing(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudDataplexV1DataDiscoverySpec', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudDataplexV1DataDiscoverySpec(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudDataplexV1DataDiscoverySpec.fromJson( + oJson as core.Map); + checkGoogleCloudDataplexV1DataDiscoverySpec(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig + .fromJson(oJson as core.Map); + checkGoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudDataplexV1DataDiscoverySpecStorageConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudDataplexV1DataDiscoverySpecStorageConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudDataplexV1DataDiscoverySpecStorageConfig.fromJson( + oJson as core.Map); + checkGoogleCloudDataplexV1DataDiscoverySpecStorageConfig(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions + .fromJson(oJson as core.Map); + checkGoogleCloudDataplexV1DataDiscoverySpecStorageConfigCsvOptions(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions + .fromJson(oJson as core.Map); + checkGoogleCloudDataplexV1DataDiscoverySpecStorageConfigJsonOptions(od); + }); + }); + unittest.group('obj-schema-GoogleCloudDataplexV1DataProfileResult', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudDataplexV1DataProfileResult(); @@ -10772,9 +11118,9 @@ void main() { final mock = HttpServerMock(); final res = api.CloudDataplexApi(mock).projects.locations; final arg_name = 'foo'; - final arg_aspectTypes_1 = buildUnnamed119(); + final arg_aspectTypes_1 = buildUnnamed121(); final arg_entry = 'foo'; - final arg_paths = buildUnnamed120(); + final arg_paths = buildUnnamed122(); final arg_view = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -12042,6 +12388,7 @@ void main() { final mock = HttpServerMock(); final res = api.CloudDataplexApi(mock).projects.locations.dataScans; final arg_name = 'foo'; + final arg_force = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -12075,6 +12422,10 @@ void main() { ); } } + unittest.expect( + queryMap['force']!.first, + unittest.equals('$arg_force'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -12086,7 +12437,8 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); + final response = + await res.delete(arg_name, force: arg_force, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); @@ -14494,8 +14846,8 @@ void main() { final res = api.CloudDataplexApi(mock).projects.locations.entryGroups.entries; final arg_name = 'foo'; - final arg_aspectTypes = buildUnnamed121(); - final arg_paths = buildUnnamed122(); + final arg_aspectTypes = buildUnnamed123(); + final arg_paths = buildUnnamed124(); final arg_view = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -14643,7 +14995,7 @@ void main() { final arg_request = buildGoogleCloudDataplexV1Entry(); final arg_name = 'foo'; final arg_allowMissing = true; - final arg_aspectKeys = buildUnnamed123(); + final arg_aspectKeys = buildUnnamed125(); final arg_deleteMissingAspects = true; final arg_updateMask = 'foo'; final arg_$fields = 'foo'; @@ -20891,6 +21243,7 @@ void main() { final arg_request = buildGoogleCloudDataplexV1MetadataJob(); final arg_parent = 'foo'; final arg_metadataJobId = 'foo'; + final arg_validateOnly = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final obj = api.GoogleCloudDataplexV1MetadataJob.fromJson( @@ -20932,6 +21285,10 @@ void main() { queryMap['metadataJobId']!.first, unittest.equals(arg_metadataJobId), ); + unittest.expect( + queryMap['validateOnly']!.first, + unittest.equals('$arg_validateOnly'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -20944,7 +21301,9 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.create(arg_request, arg_parent, - metadataJobId: arg_metadataJobId, $fields: arg_$fields); + metadataJobId: arg_metadataJobId, + validateOnly: arg_validateOnly, + $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); diff --git a/generated/googleapis/test/datastore/v1_test.dart b/generated/googleapis/test/datastore/v1_test.dart index 0f7f1ca21..d02c62093 100644 --- a/generated/googleapis/test/datastore/v1_test.dart +++ b/generated/googleapis/test/datastore/v1_test.dart @@ -686,6 +686,47 @@ void checkFilter(api.Filter o) { buildCounterFilter--; } +core.int buildCounterFindNearest = 0; +api.FindNearest buildFindNearest() { + final o = api.FindNearest(); + buildCounterFindNearest++; + if (buildCounterFindNearest < 3) { + o.distanceMeasure = 'foo'; + o.distanceResultProperty = 'foo'; + o.distanceThreshold = 42.0; + o.limit = 42; + o.queryVector = buildValue(); + o.vectorProperty = buildPropertyReference(); + } + buildCounterFindNearest--; + return o; +} + +void checkFindNearest(api.FindNearest o) { + buildCounterFindNearest++; + if (buildCounterFindNearest < 3) { + unittest.expect( + o.distanceMeasure!, + unittest.equals('foo'), + ); + unittest.expect( + o.distanceResultProperty!, + unittest.equals('foo'), + ); + unittest.expect( + o.distanceThreshold!, + unittest.equals(42.0), + ); + unittest.expect( + o.limit!, + unittest.equals(42), + ); + checkValue(o.queryVector!); + checkPropertyReference(o.vectorProperty!); + } + buildCounterFindNearest--; +} + core.List buildUnnamed11() => [ 'foo', 'foo', @@ -1830,6 +1871,7 @@ api.Query buildQuery() { o.distinctOn = buildUnnamed32(); o.endCursor = 'foo'; o.filter = buildFilter(); + o.findNearest = buildFindNearest(); o.kind = buildUnnamed33(); o.limit = 42; o.offset = 42; @@ -1850,6 +1892,7 @@ void checkQuery(api.Query o) { unittest.equals('foo'), ); checkFilter(o.filter!); + checkFindNearest(o.findNearest!); checkUnnamed33(o.kind!); unittest.expect( o.limit!, @@ -2625,6 +2668,16 @@ void main() { }); }); + unittest.group('obj-schema-FindNearest', () { + unittest.test('to-json--from-json', () async { + final o = buildFindNearest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.FindNearest.fromJson( + oJson as core.Map); + checkFindNearest(od); + }); + }); + unittest.group('obj-schema-GoogleDatastoreAdminV1EntityFilter', () { unittest.test('to-json--from-json', () async { final o = buildGoogleDatastoreAdminV1EntityFilter(); diff --git a/generated/googleapis/test/datastream/v1_test.dart b/generated/googleapis/test/datastream/v1_test.dart index 504372b7d..edaeb5714 100644 --- a/generated/googleapis/test/datastream/v1_test.dart +++ b/generated/googleapis/test/datastream/v1_test.dart @@ -191,6 +191,42 @@ void checkBigQueryProfile(api.BigQueryProfile o) { buildCounterBigQueryProfile--; } +core.int buildCounterBinaryLogParser = 0; +api.BinaryLogParser buildBinaryLogParser() { + final o = api.BinaryLogParser(); + buildCounterBinaryLogParser++; + if (buildCounterBinaryLogParser < 3) { + o.logFileDirectories = buildLogFileDirectories(); + o.oracleAsmLogFileAccess = buildOracleAsmLogFileAccess(); + } + buildCounterBinaryLogParser--; + return o; +} + +void checkBinaryLogParser(api.BinaryLogParser o) { + buildCounterBinaryLogParser++; + if (buildCounterBinaryLogParser < 3) { + checkLogFileDirectories(o.logFileDirectories!); + checkOracleAsmLogFileAccess(o.oracleAsmLogFileAccess!); + } + buildCounterBinaryLogParser--; +} + +core.int buildCounterBinaryLogPosition = 0; +api.BinaryLogPosition buildBinaryLogPosition() { + final o = api.BinaryLogPosition(); + buildCounterBinaryLogPosition++; + if (buildCounterBinaryLogPosition < 3) {} + buildCounterBinaryLogPosition--; + return o; +} + +void checkBinaryLogPosition(api.BinaryLogPosition o) { + buildCounterBinaryLogPosition++; + if (buildCounterBinaryLogPosition < 3) {} + buildCounterBinaryLogPosition--; +} + core.int buildCounterCancelOperationRequest = 0; api.CancelOperationRequest buildCancelOperationRequest() { final o = api.CancelOperationRequest(); @@ -659,6 +695,21 @@ void checkGcsProfile(api.GcsProfile o) { buildCounterGcsProfile--; } +core.int buildCounterGtid = 0; +api.Gtid buildGtid() { + final o = api.Gtid(); + buildCounterGtid++; + if (buildCounterGtid < 3) {} + buildCounterGtid--; + return o; +} + +void checkGtid(api.Gtid o) { + buildCounterGtid++; + if (buildCounterGtid < 3) {} + buildCounterGtid--; +} + core.int buildCounterJsonFileFormat = 0; api.JsonFileFormat buildJsonFileFormat() { final o = api.JsonFileFormat(); @@ -1105,6 +1156,48 @@ void checkLocation(api.Location o) { buildCounterLocation--; } +core.int buildCounterLogFileDirectories = 0; +api.LogFileDirectories buildLogFileDirectories() { + final o = api.LogFileDirectories(); + buildCounterLogFileDirectories++; + if (buildCounterLogFileDirectories < 3) { + o.archivedLogDirectory = 'foo'; + o.onlineLogDirectory = 'foo'; + } + buildCounterLogFileDirectories--; + return o; +} + +void checkLogFileDirectories(api.LogFileDirectories o) { + buildCounterLogFileDirectories++; + if (buildCounterLogFileDirectories < 3) { + unittest.expect( + o.archivedLogDirectory!, + unittest.equals('foo'), + ); + unittest.expect( + o.onlineLogDirectory!, + unittest.equals('foo'), + ); + } + buildCounterLogFileDirectories--; +} + +core.int buildCounterLogMiner = 0; +api.LogMiner buildLogMiner() { + final o = api.LogMiner(); + buildCounterLogMiner++; + if (buildCounterLogMiner < 3) {} + buildCounterLogMiner--; + return o; +} + +void checkLogMiner(api.LogMiner o) { + buildCounterLogMiner++; + if (buildCounterLogMiner < 3) {} + buildCounterLogMiner--; +} + core.int buildCounterLookupStreamObjectRequest = 0; api.LookupStreamObjectRequest buildLookupStreamObjectRequest() { final o = api.LookupStreamObjectRequest(); @@ -1373,7 +1466,9 @@ api.MysqlSourceConfig buildMysqlSourceConfig() { final o = api.MysqlSourceConfig(); buildCounterMysqlSourceConfig++; if (buildCounterMysqlSourceConfig < 3) { + o.binaryLogPosition = buildBinaryLogPosition(); o.excludeObjects = buildMysqlRdbms(); + o.gtid = buildGtid(); o.includeObjects = buildMysqlRdbms(); o.maxConcurrentBackfillTasks = 42; o.maxConcurrentCdcTasks = 42; @@ -1385,7 +1480,9 @@ api.MysqlSourceConfig buildMysqlSourceConfig() { void checkMysqlSourceConfig(api.MysqlSourceConfig o) { buildCounterMysqlSourceConfig++; if (buildCounterMysqlSourceConfig < 3) { + checkBinaryLogPosition(o.binaryLogPosition!); checkMysqlRdbms(o.excludeObjects!); + checkGtid(o.gtid!); checkMysqlRdbms(o.includeObjects!); unittest.expect( o.maxConcurrentBackfillTasks!, @@ -1607,6 +1704,84 @@ void checkOperation(api.Operation o) { buildCounterOperation--; } +core.Map buildUnnamed22() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed22(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.int buildCounterOracleAsmConfig = 0; +api.OracleAsmConfig buildOracleAsmConfig() { + final o = api.OracleAsmConfig(); + buildCounterOracleAsmConfig++; + if (buildCounterOracleAsmConfig < 3) { + o.asmService = 'foo'; + o.connectionAttributes = buildUnnamed22(); + o.hostname = 'foo'; + o.oracleSslConfig = buildOracleSslConfig(); + o.password = 'foo'; + o.port = 42; + o.username = 'foo'; + } + buildCounterOracleAsmConfig--; + return o; +} + +void checkOracleAsmConfig(api.OracleAsmConfig o) { + buildCounterOracleAsmConfig++; + if (buildCounterOracleAsmConfig < 3) { + unittest.expect( + o.asmService!, + unittest.equals('foo'), + ); + checkUnnamed22(o.connectionAttributes!); + unittest.expect( + o.hostname!, + unittest.equals('foo'), + ); + checkOracleSslConfig(o.oracleSslConfig!); + unittest.expect( + o.password!, + unittest.equals('foo'), + ); + unittest.expect( + o.port!, + unittest.equals(42), + ); + unittest.expect( + o.username!, + unittest.equals('foo'), + ); + } + buildCounterOracleAsmConfig--; +} + +core.int buildCounterOracleAsmLogFileAccess = 0; +api.OracleAsmLogFileAccess buildOracleAsmLogFileAccess() { + final o = api.OracleAsmLogFileAccess(); + buildCounterOracleAsmLogFileAccess++; + if (buildCounterOracleAsmLogFileAccess < 3) {} + buildCounterOracleAsmLogFileAccess--; + return o; +} + +void checkOracleAsmLogFileAccess(api.OracleAsmLogFileAccess o) { + buildCounterOracleAsmLogFileAccess++; + if (buildCounterOracleAsmLogFileAccess < 3) {} + buildCounterOracleAsmLogFileAccess--; +} + core.int buildCounterOracleColumn = 0; api.OracleColumn buildOracleColumn() { final o = api.OracleColumn(); @@ -1690,12 +1865,12 @@ void checkOracleObjectIdentifier(api.OracleObjectIdentifier o) { buildCounterOracleObjectIdentifier--; } -core.Map buildUnnamed22() => { +core.Map buildUnnamed23() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed22(core.Map o) { +void checkUnnamed23(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1712,12 +1887,14 @@ api.OracleProfile buildOracleProfile() { final o = api.OracleProfile(); buildCounterOracleProfile++; if (buildCounterOracleProfile < 3) { - o.connectionAttributes = buildUnnamed22(); + o.connectionAttributes = buildUnnamed23(); o.databaseService = 'foo'; o.hostname = 'foo'; + o.oracleAsmConfig = buildOracleAsmConfig(); o.oracleSslConfig = buildOracleSslConfig(); o.password = 'foo'; o.port = 42; + o.secretManagerStoredPassword = 'foo'; o.username = 'foo'; } buildCounterOracleProfile--; @@ -1727,7 +1904,7 @@ api.OracleProfile buildOracleProfile() { void checkOracleProfile(api.OracleProfile o) { buildCounterOracleProfile++; if (buildCounterOracleProfile < 3) { - checkUnnamed22(o.connectionAttributes!); + checkUnnamed23(o.connectionAttributes!); unittest.expect( o.databaseService!, unittest.equals('foo'), @@ -1736,6 +1913,7 @@ void checkOracleProfile(api.OracleProfile o) { o.hostname!, unittest.equals('foo'), ); + checkOracleAsmConfig(o.oracleAsmConfig!); checkOracleSslConfig(o.oracleSslConfig!); unittest.expect( o.password!, @@ -1745,6 +1923,10 @@ void checkOracleProfile(api.OracleProfile o) { o.port!, unittest.equals(42), ); + unittest.expect( + o.secretManagerStoredPassword!, + unittest.equals('foo'), + ); unittest.expect( o.username!, unittest.equals('foo'), @@ -1753,12 +1935,12 @@ void checkOracleProfile(api.OracleProfile o) { buildCounterOracleProfile--; } -core.List buildUnnamed23() => [ +core.List buildUnnamed24() => [ buildOracleSchema(), buildOracleSchema(), ]; -void checkUnnamed23(core.List o) { +void checkUnnamed24(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOracleSchema(o[0]); checkOracleSchema(o[1]); @@ -1769,7 +1951,7 @@ api.OracleRdbms buildOracleRdbms() { final o = api.OracleRdbms(); buildCounterOracleRdbms++; if (buildCounterOracleRdbms < 3) { - o.oracleSchemas = buildUnnamed23(); + o.oracleSchemas = buildUnnamed24(); } buildCounterOracleRdbms--; return o; @@ -1778,17 +1960,17 @@ api.OracleRdbms buildOracleRdbms() { void checkOracleRdbms(api.OracleRdbms o) { buildCounterOracleRdbms++; if (buildCounterOracleRdbms < 3) { - checkUnnamed23(o.oracleSchemas!); + checkUnnamed24(o.oracleSchemas!); } buildCounterOracleRdbms--; } -core.List buildUnnamed24() => [ +core.List buildUnnamed25() => [ buildOracleTable(), buildOracleTable(), ]; -void checkUnnamed24(core.List o) { +void checkUnnamed25(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOracleTable(o[0]); checkOracleTable(o[1]); @@ -1799,7 +1981,7 @@ api.OracleSchema buildOracleSchema() { final o = api.OracleSchema(); buildCounterOracleSchema++; if (buildCounterOracleSchema < 3) { - o.oracleTables = buildUnnamed24(); + o.oracleTables = buildUnnamed25(); o.schema = 'foo'; } buildCounterOracleSchema--; @@ -1809,7 +1991,7 @@ api.OracleSchema buildOracleSchema() { void checkOracleSchema(api.OracleSchema o) { buildCounterOracleSchema++; if (buildCounterOracleSchema < 3) { - checkUnnamed24(o.oracleTables!); + checkUnnamed25(o.oracleTables!); unittest.expect( o.schema!, unittest.equals('foo'), @@ -1845,9 +2027,11 @@ api.OracleSourceConfig buildOracleSourceConfig() { final o = api.OracleSourceConfig(); buildCounterOracleSourceConfig++; if (buildCounterOracleSourceConfig < 3) { + o.binaryLogParser = buildBinaryLogParser(); o.dropLargeObjects = buildDropLargeObjects(); o.excludeObjects = buildOracleRdbms(); o.includeObjects = buildOracleRdbms(); + o.logMiner = buildLogMiner(); o.maxConcurrentBackfillTasks = 42; o.maxConcurrentCdcTasks = 42; o.streamLargeObjects = buildStreamLargeObjects(); @@ -1859,9 +2043,11 @@ api.OracleSourceConfig buildOracleSourceConfig() { void checkOracleSourceConfig(api.OracleSourceConfig o) { buildCounterOracleSourceConfig++; if (buildCounterOracleSourceConfig < 3) { + checkBinaryLogParser(o.binaryLogParser!); checkDropLargeObjects(o.dropLargeObjects!); checkOracleRdbms(o.excludeObjects!); checkOracleRdbms(o.includeObjects!); + checkLogMiner(o.logMiner!); unittest.expect( o.maxConcurrentBackfillTasks!, unittest.equals(42), @@ -1899,12 +2085,12 @@ void checkOracleSslConfig(api.OracleSslConfig o) { buildCounterOracleSslConfig--; } -core.List buildUnnamed25() => [ +core.List buildUnnamed26() => [ buildOracleColumn(), buildOracleColumn(), ]; -void checkUnnamed25(core.List o) { +void checkUnnamed26(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOracleColumn(o[0]); checkOracleColumn(o[1]); @@ -1915,7 +2101,7 @@ api.OracleTable buildOracleTable() { final o = api.OracleTable(); buildCounterOracleTable++; if (buildCounterOracleTable < 3) { - o.oracleColumns = buildUnnamed25(); + o.oracleColumns = buildUnnamed26(); o.table = 'foo'; } buildCounterOracleTable--; @@ -1925,7 +2111,7 @@ api.OracleTable buildOracleTable() { void checkOracleTable(api.OracleTable o) { buildCounterOracleTable++; if (buildCounterOracleTable < 3) { - checkUnnamed25(o.oracleColumns!); + checkUnnamed26(o.oracleColumns!); unittest.expect( o.table!, unittest.equals('foo'), @@ -2054,12 +2240,12 @@ void checkPostgresqlProfile(api.PostgresqlProfile o) { buildCounterPostgresqlProfile--; } -core.List buildUnnamed26() => [ +core.List buildUnnamed27() => [ buildPostgresqlSchema(), buildPostgresqlSchema(), ]; -void checkUnnamed26(core.List o) { +void checkUnnamed27(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPostgresqlSchema(o[0]); checkPostgresqlSchema(o[1]); @@ -2070,7 +2256,7 @@ api.PostgresqlRdbms buildPostgresqlRdbms() { final o = api.PostgresqlRdbms(); buildCounterPostgresqlRdbms++; if (buildCounterPostgresqlRdbms < 3) { - o.postgresqlSchemas = buildUnnamed26(); + o.postgresqlSchemas = buildUnnamed27(); } buildCounterPostgresqlRdbms--; return o; @@ -2079,17 +2265,17 @@ api.PostgresqlRdbms buildPostgresqlRdbms() { void checkPostgresqlRdbms(api.PostgresqlRdbms o) { buildCounterPostgresqlRdbms++; if (buildCounterPostgresqlRdbms < 3) { - checkUnnamed26(o.postgresqlSchemas!); + checkUnnamed27(o.postgresqlSchemas!); } buildCounterPostgresqlRdbms--; } -core.List buildUnnamed27() => [ +core.List buildUnnamed28() => [ buildPostgresqlTable(), buildPostgresqlTable(), ]; -void checkUnnamed27(core.List o) { +void checkUnnamed28(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPostgresqlTable(o[0]); checkPostgresqlTable(o[1]); @@ -2100,7 +2286,7 @@ api.PostgresqlSchema buildPostgresqlSchema() { final o = api.PostgresqlSchema(); buildCounterPostgresqlSchema++; if (buildCounterPostgresqlSchema < 3) { - o.postgresqlTables = buildUnnamed27(); + o.postgresqlTables = buildUnnamed28(); o.schema = 'foo'; } buildCounterPostgresqlSchema--; @@ -2110,7 +2296,7 @@ api.PostgresqlSchema buildPostgresqlSchema() { void checkPostgresqlSchema(api.PostgresqlSchema o) { buildCounterPostgresqlSchema++; if (buildCounterPostgresqlSchema < 3) { - checkUnnamed27(o.postgresqlTables!); + checkUnnamed28(o.postgresqlTables!); unittest.expect( o.schema!, unittest.equals('foo'), @@ -2155,12 +2341,12 @@ void checkPostgresqlSourceConfig(api.PostgresqlSourceConfig o) { buildCounterPostgresqlSourceConfig--; } -core.List buildUnnamed28() => [ +core.List buildUnnamed29() => [ buildPostgresqlColumn(), buildPostgresqlColumn(), ]; -void checkUnnamed28(core.List o) { +void checkUnnamed29(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPostgresqlColumn(o[0]); checkPostgresqlColumn(o[1]); @@ -2171,7 +2357,7 @@ api.PostgresqlTable buildPostgresqlTable() { final o = api.PostgresqlTable(); buildCounterPostgresqlTable++; if (buildCounterPostgresqlTable < 3) { - o.postgresqlColumns = buildUnnamed28(); + o.postgresqlColumns = buildUnnamed29(); o.table = 'foo'; } buildCounterPostgresqlTable--; @@ -2181,7 +2367,7 @@ api.PostgresqlTable buildPostgresqlTable() { void checkPostgresqlTable(api.PostgresqlTable o) { buildCounterPostgresqlTable++; if (buildCounterPostgresqlTable < 3) { - checkUnnamed28(o.postgresqlColumns!); + checkUnnamed29(o.postgresqlColumns!); unittest.expect( o.table!, unittest.equals('foo'), @@ -2190,12 +2376,12 @@ void checkPostgresqlTable(api.PostgresqlTable o) { buildCounterPostgresqlTable--; } -core.Map buildUnnamed29() => { +core.Map buildUnnamed30() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed29(core.Map o) { +void checkUnnamed30(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2215,7 +2401,7 @@ api.PrivateConnection buildPrivateConnection() { o.createTime = 'foo'; o.displayName = 'foo'; o.error = buildError(); - o.labels = buildUnnamed29(); + o.labels = buildUnnamed30(); o.name = 'foo'; o.state = 'foo'; o.updateTime = 'foo'; @@ -2237,7 +2423,7 @@ void checkPrivateConnection(api.PrivateConnection o) { unittest.equals('foo'), ); checkError(o.error!); - checkUnnamed29(o.labels!); + checkUnnamed30(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -2277,12 +2463,12 @@ void checkPrivateConnectivity(api.PrivateConnectivity o) { buildCounterPrivateConnectivity--; } -core.Map buildUnnamed30() => { +core.Map buildUnnamed31() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed30(core.Map o) { +void checkUnnamed31(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2303,7 +2489,7 @@ api.Route buildRoute() { o.destinationAddress = 'foo'; o.destinationPort = 42; o.displayName = 'foo'; - o.labels = buildUnnamed30(); + o.labels = buildUnnamed31(); o.name = 'foo'; o.updateTime = 'foo'; } @@ -2330,7 +2516,7 @@ void checkRoute(api.Route o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed30(o.labels!); + checkUnnamed31(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -2467,6 +2653,7 @@ api.SpecificStartPosition buildSpecificStartPosition() { if (buildCounterSpecificStartPosition < 3) { o.mysqlLogPosition = buildMysqlLogPosition(); o.oracleScnPosition = buildOracleScnPosition(); + o.sqlServerLsnPosition = buildSqlServerLsnPosition(); } buildCounterSpecificStartPosition--; return o; @@ -2477,6 +2664,7 @@ void checkSpecificStartPosition(api.SpecificStartPosition o) { if (buildCounterSpecificStartPosition < 3) { checkMysqlLogPosition(o.mysqlLogPosition!); checkOracleScnPosition(o.oracleScnPosition!); + checkSqlServerLsnPosition(o.sqlServerLsnPosition!); } buildCounterSpecificStartPosition--; } @@ -2547,6 +2735,28 @@ void checkSqlServerColumn(api.SqlServerColumn o) { buildCounterSqlServerColumn--; } +core.int buildCounterSqlServerLsnPosition = 0; +api.SqlServerLsnPosition buildSqlServerLsnPosition() { + final o = api.SqlServerLsnPosition(); + buildCounterSqlServerLsnPosition++; + if (buildCounterSqlServerLsnPosition < 3) { + o.lsn = 'foo'; + } + buildCounterSqlServerLsnPosition--; + return o; +} + +void checkSqlServerLsnPosition(api.SqlServerLsnPosition o) { + buildCounterSqlServerLsnPosition++; + if (buildCounterSqlServerLsnPosition < 3) { + unittest.expect( + o.lsn!, + unittest.equals('foo'), + ); + } + buildCounterSqlServerLsnPosition--; +} + core.int buildCounterSqlServerObjectIdentifier = 0; api.SqlServerObjectIdentifier buildSqlServerObjectIdentifier() { final o = api.SqlServerObjectIdentifier(); @@ -2616,12 +2826,12 @@ void checkSqlServerProfile(api.SqlServerProfile o) { buildCounterSqlServerProfile--; } -core.List buildUnnamed31() => [ +core.List buildUnnamed32() => [ buildSqlServerSchema(), buildSqlServerSchema(), ]; -void checkUnnamed31(core.List o) { +void checkUnnamed32(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSqlServerSchema(o[0]); checkSqlServerSchema(o[1]); @@ -2632,7 +2842,7 @@ api.SqlServerRdbms buildSqlServerRdbms() { final o = api.SqlServerRdbms(); buildCounterSqlServerRdbms++; if (buildCounterSqlServerRdbms < 3) { - o.schemas = buildUnnamed31(); + o.schemas = buildUnnamed32(); } buildCounterSqlServerRdbms--; return o; @@ -2641,17 +2851,17 @@ api.SqlServerRdbms buildSqlServerRdbms() { void checkSqlServerRdbms(api.SqlServerRdbms o) { buildCounterSqlServerRdbms++; if (buildCounterSqlServerRdbms < 3) { - checkUnnamed31(o.schemas!); + checkUnnamed32(o.schemas!); } buildCounterSqlServerRdbms--; } -core.List buildUnnamed32() => [ +core.List buildUnnamed33() => [ buildSqlServerTable(), buildSqlServerTable(), ]; -void checkUnnamed32(core.List o) { +void checkUnnamed33(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSqlServerTable(o[0]); checkSqlServerTable(o[1]); @@ -2663,7 +2873,7 @@ api.SqlServerSchema buildSqlServerSchema() { buildCounterSqlServerSchema++; if (buildCounterSqlServerSchema < 3) { o.schema = 'foo'; - o.tables = buildUnnamed32(); + o.tables = buildUnnamed33(); } buildCounterSqlServerSchema--; return o; @@ -2676,7 +2886,7 @@ void checkSqlServerSchema(api.SqlServerSchema o) { o.schema!, unittest.equals('foo'), ); - checkUnnamed32(o.tables!); + checkUnnamed33(o.tables!); } buildCounterSqlServerSchema--; } @@ -2716,12 +2926,12 @@ void checkSqlServerSourceConfig(api.SqlServerSourceConfig o) { buildCounterSqlServerSourceConfig--; } -core.List buildUnnamed33() => [ +core.List buildUnnamed34() => [ buildSqlServerColumn(), buildSqlServerColumn(), ]; -void checkUnnamed33(core.List o) { +void checkUnnamed34(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSqlServerColumn(o[0]); checkSqlServerColumn(o[1]); @@ -2732,7 +2942,7 @@ api.SqlServerTable buildSqlServerTable() { final o = api.SqlServerTable(); buildCounterSqlServerTable++; if (buildCounterSqlServerTable < 3) { - o.columns = buildUnnamed33(); + o.columns = buildUnnamed34(); o.table = 'foo'; } buildCounterSqlServerTable--; @@ -2742,7 +2952,7 @@ api.SqlServerTable buildSqlServerTable() { void checkSqlServerTable(api.SqlServerTable o) { buildCounterSqlServerTable++; if (buildCounterSqlServerTable < 3) { - checkUnnamed33(o.columns!); + checkUnnamed34(o.columns!); unittest.expect( o.table!, unittest.equals('foo'), @@ -2815,7 +3025,7 @@ void checkStaticServiceIpConnectivity(api.StaticServiceIpConnectivity o) { buildCounterStaticServiceIpConnectivity--; } -core.Map buildUnnamed34() => { +core.Map buildUnnamed35() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -2828,7 +3038,7 @@ core.Map buildUnnamed34() => { }, }; -void checkUnnamed34(core.Map o) { +void checkUnnamed35(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted7 = (o['x']!) as core.Map; unittest.expect(casted7, unittest.hasLength(3)); @@ -2860,15 +3070,15 @@ void checkUnnamed34(core.Map o) { ); } -core.List> buildUnnamed35() => [ - buildUnnamed34(), - buildUnnamed34(), +core.List> buildUnnamed36() => [ + buildUnnamed35(), + buildUnnamed35(), ]; -void checkUnnamed35(core.List> o) { +void checkUnnamed36(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed34(o[0]); - checkUnnamed34(o[1]); + checkUnnamed35(o[0]); + checkUnnamed35(o[1]); } core.int buildCounterStatus = 0; @@ -2877,7 +3087,7 @@ api.Status buildStatus() { buildCounterStatus++; if (buildCounterStatus < 3) { o.code = 42; - o.details = buildUnnamed35(); + o.details = buildUnnamed36(); o.message = 'foo'; } buildCounterStatus--; @@ -2891,7 +3101,7 @@ void checkStatus(api.Status o) { o.code!, unittest.equals(42), ); - checkUnnamed35(o.details!); + checkUnnamed36(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -2934,23 +3144,23 @@ void checkStopBackfillJobResponse(api.StopBackfillJobResponse o) { buildCounterStopBackfillJobResponse--; } -core.List buildUnnamed36() => [ +core.List buildUnnamed37() => [ buildError(), buildError(), ]; -void checkUnnamed36(core.List o) { +void checkUnnamed37(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkError(o[0]); checkError(o[1]); } -core.Map buildUnnamed37() => { +core.Map buildUnnamed38() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed37(core.Map o) { +void checkUnnamed38(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2973,8 +3183,8 @@ api.Stream buildStream() { o.customerManagedEncryptionKey = 'foo'; o.destinationConfig = buildDestinationConfig(); o.displayName = 'foo'; - o.errors = buildUnnamed36(); - o.labels = buildUnnamed37(); + o.errors = buildUnnamed37(); + o.labels = buildUnnamed38(); o.lastRecoveryTime = 'foo'; o.name = 'foo'; o.sourceConfig = buildSourceConfig(); @@ -3003,8 +3213,8 @@ void checkStream(api.Stream o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed36(o.errors!); - checkUnnamed37(o.labels!); + checkUnnamed37(o.errors!); + checkUnnamed38(o.labels!); unittest.expect( o.lastRecoveryTime!, unittest.equals('foo'), @@ -3041,12 +3251,12 @@ void checkStreamLargeObjects(api.StreamLargeObjects o) { buildCounterStreamLargeObjects--; } -core.List buildUnnamed38() => [ +core.List buildUnnamed39() => [ buildError(), buildError(), ]; -void checkUnnamed38(core.List o) { +void checkUnnamed39(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkError(o[0]); checkError(o[1]); @@ -3060,7 +3270,7 @@ api.StreamObject buildStreamObject() { o.backfillJob = buildBackfillJob(); o.createTime = 'foo'; o.displayName = 'foo'; - o.errors = buildUnnamed38(); + o.errors = buildUnnamed39(); o.name = 'foo'; o.sourceObject = buildSourceObjectIdentifier(); o.updateTime = 'foo'; @@ -3081,7 +3291,7 @@ void checkStreamObject(api.StreamObject o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed38(o.errors!); + checkUnnamed39(o.errors!); unittest.expect( o.name!, unittest.equals('foo'), @@ -3193,6 +3403,26 @@ void main() { }); }); + unittest.group('obj-schema-BinaryLogParser', () { + unittest.test('to-json--from-json', () async { + final o = buildBinaryLogParser(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.BinaryLogParser.fromJson( + oJson as core.Map); + checkBinaryLogParser(od); + }); + }); + + unittest.group('obj-schema-BinaryLogPosition', () { + unittest.test('to-json--from-json', () async { + final o = buildBinaryLogPosition(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.BinaryLogPosition.fromJson( + oJson as core.Map); + checkBinaryLogPosition(od); + }); + }); + unittest.group('obj-schema-CancelOperationRequest', () { unittest.test('to-json--from-json', () async { final o = buildCancelOperationRequest(); @@ -3333,6 +3563,16 @@ void main() { }); }); + unittest.group('obj-schema-Gtid', () { + unittest.test('to-json--from-json', () async { + final o = buildGtid(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Gtid.fromJson(oJson as core.Map); + checkGtid(od); + }); + }); + unittest.group('obj-schema-JsonFileFormat', () { unittest.test('to-json--from-json', () async { final o = buildJsonFileFormat(); @@ -3423,6 +3663,26 @@ void main() { }); }); + unittest.group('obj-schema-LogFileDirectories', () { + unittest.test('to-json--from-json', () async { + final o = buildLogFileDirectories(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.LogFileDirectories.fromJson( + oJson as core.Map); + checkLogFileDirectories(od); + }); + }); + + unittest.group('obj-schema-LogMiner', () { + unittest.test('to-json--from-json', () async { + final o = buildLogMiner(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.LogMiner.fromJson(oJson as core.Map); + checkLogMiner(od); + }); + }); + unittest.group('obj-schema-LookupStreamObjectRequest', () { unittest.test('to-json--from-json', () async { final o = buildLookupStreamObjectRequest(); @@ -3563,6 +3823,26 @@ void main() { }); }); + unittest.group('obj-schema-OracleAsmConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildOracleAsmConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.OracleAsmConfig.fromJson( + oJson as core.Map); + checkOracleAsmConfig(od); + }); + }); + + unittest.group('obj-schema-OracleAsmLogFileAccess', () { + unittest.test('to-json--from-json', () async { + final o = buildOracleAsmLogFileAccess(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.OracleAsmLogFileAccess.fromJson( + oJson as core.Map); + checkOracleAsmLogFileAccess(od); + }); + }); + unittest.group('obj-schema-OracleColumn', () { unittest.test('to-json--from-json', () async { final o = buildOracleColumn(); @@ -3833,6 +4113,16 @@ void main() { }); }); + unittest.group('obj-schema-SqlServerLsnPosition', () { + unittest.test('to-json--from-json', () async { + final o = buildSqlServerLsnPosition(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.SqlServerLsnPosition.fromJson( + oJson as core.Map); + checkSqlServerLsnPosition(od); + }); + }); + unittest.group('obj-schema-SqlServerObjectIdentifier', () { unittest.test('to-json--from-json', () async { final o = buildSqlServerObjectIdentifier(); diff --git a/generated/googleapis/test/deploymentmanager/v2_test.dart b/generated/googleapis/test/deploymentmanager/v2_test.dart index e686af15f..c22c1c5d2 100644 --- a/generated/googleapis/test/deploymentmanager/v2_test.dart +++ b/generated/googleapis/test/deploymentmanager/v2_test.dart @@ -209,12 +209,53 @@ void checkConfigFile(api.ConfigFile o) { buildCounterConfigFile--; } -core.List buildUnnamed3() => [ +core.List buildUnnamed3() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed3(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterDebugInfo = 0; +api.DebugInfo buildDebugInfo() { + final o = api.DebugInfo(); + buildCounterDebugInfo++; + if (buildCounterDebugInfo < 3) { + o.detail = 'foo'; + o.stackEntries = buildUnnamed3(); + } + buildCounterDebugInfo--; + return o; +} + +void checkDebugInfo(api.DebugInfo o) { + buildCounterDebugInfo++; + if (buildCounterDebugInfo < 3) { + unittest.expect( + o.detail!, + unittest.equals('foo'), + ); + checkUnnamed3(o.stackEntries!); + } + buildCounterDebugInfo--; +} + +core.List buildUnnamed4() => [ buildDeploymentLabelEntry(), buildDeploymentLabelEntry(), ]; -void checkUnnamed3(core.List o) { +void checkUnnamed4(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDeploymentLabelEntry(o[0]); checkDeploymentLabelEntry(o[1]); @@ -229,7 +270,7 @@ api.Deployment buildDeployment() { o.fingerprint = 'foo'; o.id = 'foo'; o.insertTime = 'foo'; - o.labels = buildUnnamed3(); + o.labels = buildUnnamed4(); o.manifest = 'foo'; o.name = 'foo'; o.operation = buildOperation(); @@ -261,7 +302,7 @@ void checkDeployment(api.Deployment o) { o.insertTime!, unittest.equals('foo'), ); - checkUnnamed3(o.labels!); + checkUnnamed4(o.labels!); unittest.expect( o.manifest!, unittest.equals('foo'), @@ -312,12 +353,12 @@ void checkDeploymentLabelEntry(api.DeploymentLabelEntry o) { buildCounterDeploymentLabelEntry--; } -core.List buildUnnamed4() => [ +core.List buildUnnamed5() => [ buildDeploymentUpdateLabelEntry(), buildDeploymentUpdateLabelEntry(), ]; -void checkUnnamed4(core.List o) { +void checkUnnamed5(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDeploymentUpdateLabelEntry(o[0]); checkDeploymentUpdateLabelEntry(o[1]); @@ -329,7 +370,7 @@ api.DeploymentUpdate buildDeploymentUpdate() { buildCounterDeploymentUpdate++; if (buildCounterDeploymentUpdate < 3) { o.description = 'foo'; - o.labels = buildUnnamed4(); + o.labels = buildUnnamed5(); o.manifest = 'foo'; } buildCounterDeploymentUpdate--; @@ -343,7 +384,7 @@ void checkDeploymentUpdate(api.DeploymentUpdate o) { o.description!, unittest.equals('foo'), ); - checkUnnamed4(o.labels!); + checkUnnamed5(o.labels!); unittest.expect( o.manifest!, unittest.equals('foo'), @@ -402,12 +443,12 @@ void checkDeploymentsCancelPreviewRequest( buildCounterDeploymentsCancelPreviewRequest--; } -core.List buildUnnamed5() => [ +core.List buildUnnamed6() => [ buildDeployment(), buildDeployment(), ]; -void checkUnnamed5(core.List o) { +void checkUnnamed6(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDeployment(o[0]); checkDeployment(o[1]); @@ -418,7 +459,7 @@ api.DeploymentsListResponse buildDeploymentsListResponse() { final o = api.DeploymentsListResponse(); buildCounterDeploymentsListResponse++; if (buildCounterDeploymentsListResponse < 3) { - o.deployments = buildUnnamed5(); + o.deployments = buildUnnamed6(); o.nextPageToken = 'foo'; } buildCounterDeploymentsListResponse--; @@ -428,7 +469,7 @@ api.DeploymentsListResponse buildDeploymentsListResponse() { void checkDeploymentsListResponse(api.DeploymentsListResponse o) { buildCounterDeploymentsListResponse++; if (buildCounterDeploymentsListResponse < 3) { - checkUnnamed5(o.deployments!); + checkUnnamed6(o.deployments!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -459,6 +500,52 @@ void checkDeploymentsStopRequest(api.DeploymentsStopRequest o) { buildCounterDeploymentsStopRequest--; } +core.Map buildUnnamed7() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed7(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.int buildCounterErrorInfo = 0; +api.ErrorInfo buildErrorInfo() { + final o = api.ErrorInfo(); + buildCounterErrorInfo++; + if (buildCounterErrorInfo < 3) { + o.domain = 'foo'; + o.metadata = buildUnnamed7(); + o.reason = 'foo'; + } + buildCounterErrorInfo--; + return o; +} + +void checkErrorInfo(api.ErrorInfo o) { + buildCounterErrorInfo++; + if (buildCounterErrorInfo < 3) { + unittest.expect( + o.domain!, + unittest.equals('foo'), + ); + checkUnnamed7(o.metadata!); + unittest.expect( + o.reason!, + unittest.equals('foo'), + ); + } + buildCounterErrorInfo--; +} + core.int buildCounterExpr = 0; api.Expr buildExpr() { final o = api.Expr(); @@ -496,12 +583,12 @@ void checkExpr(api.Expr o) { buildCounterExpr--; } -core.List buildUnnamed6() => [ +core.List buildUnnamed8() => [ buildBinding(), buildBinding(), ]; -void checkUnnamed6(core.List o) { +void checkUnnamed8(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkBinding(o[0]); checkBinding(o[1]); @@ -512,9 +599,10 @@ api.GlobalSetPolicyRequest buildGlobalSetPolicyRequest() { final o = api.GlobalSetPolicyRequest(); buildCounterGlobalSetPolicyRequest++; if (buildCounterGlobalSetPolicyRequest < 3) { - o.bindings = buildUnnamed6(); + o.bindings = buildUnnamed8(); o.etag = 'foo'; o.policy = buildPolicy(); + o.updateMask = 'foo'; } buildCounterGlobalSetPolicyRequest--; return o; @@ -523,16 +611,77 @@ api.GlobalSetPolicyRequest buildGlobalSetPolicyRequest() { void checkGlobalSetPolicyRequest(api.GlobalSetPolicyRequest o) { buildCounterGlobalSetPolicyRequest++; if (buildCounterGlobalSetPolicyRequest < 3) { - checkUnnamed6(o.bindings!); + checkUnnamed8(o.bindings!); unittest.expect( o.etag!, unittest.equals('foo'), ); checkPolicy(o.policy!); + unittest.expect( + o.updateMask!, + unittest.equals('foo'), + ); } buildCounterGlobalSetPolicyRequest--; } +core.List buildUnnamed9() => [ + buildHelpLink(), + buildHelpLink(), + ]; + +void checkUnnamed9(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkHelpLink(o[0]); + checkHelpLink(o[1]); +} + +core.int buildCounterHelp = 0; +api.Help buildHelp() { + final o = api.Help(); + buildCounterHelp++; + if (buildCounterHelp < 3) { + o.links = buildUnnamed9(); + } + buildCounterHelp--; + return o; +} + +void checkHelp(api.Help o) { + buildCounterHelp++; + if (buildCounterHelp < 3) { + checkUnnamed9(o.links!); + } + buildCounterHelp--; +} + +core.int buildCounterHelpLink = 0; +api.HelpLink buildHelpLink() { + final o = api.HelpLink(); + buildCounterHelpLink++; + if (buildCounterHelpLink < 3) { + o.description = 'foo'; + o.url = 'foo'; + } + buildCounterHelpLink--; + return o; +} + +void checkHelpLink(api.HelpLink o) { + buildCounterHelpLink++; + if (buildCounterHelpLink < 3) { + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + unittest.expect( + o.url!, + unittest.equals('foo'), + ); + } + buildCounterHelpLink--; +} + core.int buildCounterImportFile = 0; api.ImportFile buildImportFile() { final o = api.ImportFile(); @@ -560,12 +709,12 @@ void checkImportFile(api.ImportFile o) { buildCounterImportFile--; } -core.Map buildUnnamed7() => { +core.Map buildUnnamed10() => { 'x': buildBulkInsertOperationStatus(), 'y': buildBulkInsertOperationStatus(), }; -void checkUnnamed7(core.Map o) { +void checkUnnamed10(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkBulkInsertOperationStatus(o['x']!); checkBulkInsertOperationStatus(o['y']!); @@ -577,7 +726,7 @@ api.InstancesBulkInsertOperationMetadata final o = api.InstancesBulkInsertOperationMetadata(); buildCounterInstancesBulkInsertOperationMetadata++; if (buildCounterInstancesBulkInsertOperationMetadata < 3) { - o.perLocationStatus = buildUnnamed7(); + o.perLocationStatus = buildUnnamed10(); } buildCounterInstancesBulkInsertOperationMetadata--; return o; @@ -587,17 +736,44 @@ void checkInstancesBulkInsertOperationMetadata( api.InstancesBulkInsertOperationMetadata o) { buildCounterInstancesBulkInsertOperationMetadata++; if (buildCounterInstancesBulkInsertOperationMetadata < 3) { - checkUnnamed7(o.perLocationStatus!); + checkUnnamed10(o.perLocationStatus!); } buildCounterInstancesBulkInsertOperationMetadata--; } -core.List buildUnnamed8() => [ +core.int buildCounterLocalizedMessage = 0; +api.LocalizedMessage buildLocalizedMessage() { + final o = api.LocalizedMessage(); + buildCounterLocalizedMessage++; + if (buildCounterLocalizedMessage < 3) { + o.locale = 'foo'; + o.message = 'foo'; + } + buildCounterLocalizedMessage--; + return o; +} + +void checkLocalizedMessage(api.LocalizedMessage o) { + buildCounterLocalizedMessage++; + if (buildCounterLocalizedMessage < 3) { + unittest.expect( + o.locale!, + unittest.equals('foo'), + ); + unittest.expect( + o.message!, + unittest.equals('foo'), + ); + } + buildCounterLocalizedMessage--; +} + +core.List buildUnnamed11() => [ buildImportFile(), buildImportFile(), ]; -void checkUnnamed8(core.List o) { +void checkUnnamed11(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkImportFile(o[0]); checkImportFile(o[1]); @@ -611,7 +787,7 @@ api.Manifest buildManifest() { o.config = buildConfigFile(); o.expandedConfig = 'foo'; o.id = 'foo'; - o.imports = buildUnnamed8(); + o.imports = buildUnnamed11(); o.insertTime = 'foo'; o.layout = 'foo'; o.manifestSizeBytes = 'foo'; @@ -635,7 +811,7 @@ void checkManifest(api.Manifest o) { o.id!, unittest.equals('foo'), ); - checkUnnamed8(o.imports!); + checkUnnamed11(o.imports!); unittest.expect( o.insertTime!, unittest.equals('foo'), @@ -664,12 +840,12 @@ void checkManifest(api.Manifest o) { buildCounterManifest--; } -core.List buildUnnamed9() => [ +core.List buildUnnamed12() => [ buildManifest(), buildManifest(), ]; -void checkUnnamed9(core.List o) { +void checkUnnamed12(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkManifest(o[0]); checkManifest(o[1]); @@ -680,7 +856,7 @@ api.ManifestsListResponse buildManifestsListResponse() { final o = api.ManifestsListResponse(); buildCounterManifestsListResponse++; if (buildCounterManifestsListResponse < 3) { - o.manifests = buildUnnamed9(); + o.manifests = buildUnnamed12(); o.nextPageToken = 'foo'; } buildCounterManifestsListResponse--; @@ -690,7 +866,7 @@ api.ManifestsListResponse buildManifestsListResponse() { void checkManifestsListResponse(api.ManifestsListResponse o) { buildCounterManifestsListResponse++; if (buildCounterManifestsListResponse < 3) { - checkUnnamed9(o.manifests!); + checkUnnamed12(o.manifests!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -699,12 +875,69 @@ void checkManifestsListResponse(api.ManifestsListResponse o) { buildCounterManifestsListResponse--; } +core.List buildUnnamed13() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed13(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterOperationErrorErrorsErrorDetails = 0; +api.OperationErrorErrorsErrorDetails buildOperationErrorErrorsErrorDetails() { + final o = api.OperationErrorErrorsErrorDetails(); + buildCounterOperationErrorErrorsErrorDetails++; + if (buildCounterOperationErrorErrorsErrorDetails < 3) { + o.errorInfo = buildErrorInfo(); + o.help = buildHelp(); + o.localizedMessage = buildLocalizedMessage(); + o.quotaInfo = buildQuotaExceededInfo(); + } + buildCounterOperationErrorErrorsErrorDetails--; + return o; +} + +void checkOperationErrorErrorsErrorDetails( + api.OperationErrorErrorsErrorDetails o) { + buildCounterOperationErrorErrorsErrorDetails++; + if (buildCounterOperationErrorErrorsErrorDetails < 3) { + checkErrorInfo(o.errorInfo!); + checkHelp(o.help!); + checkLocalizedMessage(o.localizedMessage!); + checkQuotaExceededInfo(o.quotaInfo!); + } + buildCounterOperationErrorErrorsErrorDetails--; +} + +core.List buildUnnamed14() => [ + buildOperationErrorErrorsErrorDetails(), + buildOperationErrorErrorsErrorDetails(), + ]; + +void checkUnnamed14(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkOperationErrorErrorsErrorDetails(o[0]); + checkOperationErrorErrorsErrorDetails(o[1]); +} + core.int buildCounterOperationErrorErrors = 0; api.OperationErrorErrors buildOperationErrorErrors() { final o = api.OperationErrorErrors(); buildCounterOperationErrorErrors++; if (buildCounterOperationErrorErrors < 3) { + o.arguments = buildUnnamed13(); o.code = 'foo'; + o.debugInfo = buildDebugInfo(); + o.errorDetails = buildUnnamed14(); o.location = 'foo'; o.message = 'foo'; } @@ -715,10 +948,13 @@ api.OperationErrorErrors buildOperationErrorErrors() { void checkOperationErrorErrors(api.OperationErrorErrors o) { buildCounterOperationErrorErrors++; if (buildCounterOperationErrorErrors < 3) { + checkUnnamed13(o.arguments!); unittest.expect( o.code!, unittest.equals('foo'), ); + checkDebugInfo(o.debugInfo!); + checkUnnamed14(o.errorDetails!); unittest.expect( o.location!, unittest.equals('foo'), @@ -731,12 +967,12 @@ void checkOperationErrorErrors(api.OperationErrorErrors o) { buildCounterOperationErrorErrors--; } -core.List buildUnnamed10() => [ +core.List buildUnnamed15() => [ buildOperationErrorErrors(), buildOperationErrorErrors(), ]; -void checkUnnamed10(core.List o) { +void checkUnnamed15(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperationErrorErrors(o[0]); checkOperationErrorErrors(o[1]); @@ -747,7 +983,7 @@ api.OperationError buildOperationError() { final o = api.OperationError(); buildCounterOperationError++; if (buildCounterOperationError < 3) { - o.errors = buildUnnamed10(); + o.errors = buildUnnamed15(); } buildCounterOperationError--; return o; @@ -756,7 +992,7 @@ api.OperationError buildOperationError() { void checkOperationError(api.OperationError o) { buildCounterOperationError++; if (buildCounterOperationError < 3) { - checkUnnamed10(o.errors!); + checkUnnamed15(o.errors!); } buildCounterOperationError--; } @@ -788,12 +1024,12 @@ void checkOperationWarningsData(api.OperationWarningsData o) { buildCounterOperationWarningsData--; } -core.List buildUnnamed11() => [ +core.List buildUnnamed16() => [ buildOperationWarningsData(), buildOperationWarningsData(), ]; -void checkUnnamed11(core.List o) { +void checkUnnamed16(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperationWarningsData(o[0]); checkOperationWarningsData(o[1]); @@ -805,7 +1041,7 @@ api.OperationWarnings buildOperationWarnings() { buildCounterOperationWarnings++; if (buildCounterOperationWarnings < 3) { o.code = 'foo'; - o.data = buildUnnamed11(); + o.data = buildUnnamed16(); o.message = 'foo'; } buildCounterOperationWarnings--; @@ -819,7 +1055,7 @@ void checkOperationWarnings(api.OperationWarnings o) { o.code!, unittest.equals('foo'), ); - checkUnnamed11(o.data!); + checkUnnamed16(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -828,12 +1064,12 @@ void checkOperationWarnings(api.OperationWarnings o) { buildCounterOperationWarnings--; } -core.List buildUnnamed12() => [ +core.List buildUnnamed17() => [ buildOperationWarnings(), buildOperationWarnings(), ]; -void checkUnnamed12(core.List o) { +void checkUnnamed17(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperationWarnings(o[0]); checkOperationWarnings(o[1]); @@ -862,6 +1098,7 @@ api.Operation buildOperation() { o.progress = 42; o.region = 'foo'; o.selfLink = 'foo'; + o.selfLinkWithId = 'foo'; o.setCommonInstanceMetadataOperationMetadata = buildSetCommonInstanceMetadataOperationMetadata(); o.startTime = 'foo'; @@ -870,7 +1107,7 @@ api.Operation buildOperation() { o.targetId = 'foo'; o.targetLink = 'foo'; o.user = 'foo'; - o.warnings = buildUnnamed12(); + o.warnings = buildUnnamed17(); o.zone = 'foo'; } buildCounterOperation--; @@ -943,6 +1180,10 @@ void checkOperation(api.Operation o) { o.selfLink!, unittest.equals('foo'), ); + unittest.expect( + o.selfLinkWithId!, + unittest.equals('foo'), + ); checkSetCommonInstanceMetadataOperationMetadata( o.setCommonInstanceMetadataOperationMetadata!); unittest.expect( @@ -969,7 +1210,7 @@ void checkOperation(api.Operation o) { o.user!, unittest.equals('foo'), ); - checkUnnamed12(o.warnings!); + checkUnnamed17(o.warnings!); unittest.expect( o.zone!, unittest.equals('foo'), @@ -978,12 +1219,12 @@ void checkOperation(api.Operation o) { buildCounterOperation--; } -core.List buildUnnamed13() => [ +core.List buildUnnamed18() => [ buildOperation(), buildOperation(), ]; -void checkUnnamed13(core.List o) { +void checkUnnamed18(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperation(o[0]); checkOperation(o[1]); @@ -995,7 +1236,7 @@ api.OperationsListResponse buildOperationsListResponse() { buildCounterOperationsListResponse++; if (buildCounterOperationsListResponse < 3) { o.nextPageToken = 'foo'; - o.operations = buildUnnamed13(); + o.operations = buildUnnamed18(); } buildCounterOperationsListResponse--; return o; @@ -1008,28 +1249,28 @@ void checkOperationsListResponse(api.OperationsListResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed13(o.operations!); + checkUnnamed18(o.operations!); } buildCounterOperationsListResponse--; } -core.List buildUnnamed14() => [ +core.List buildUnnamed19() => [ buildAuditConfig(), buildAuditConfig(), ]; -void checkUnnamed14(core.List o) { +void checkUnnamed19(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAuditConfig(o[0]); checkAuditConfig(o[1]); } -core.List buildUnnamed15() => [ +core.List buildUnnamed20() => [ buildBinding(), buildBinding(), ]; -void checkUnnamed15(core.List o) { +void checkUnnamed20(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkBinding(o[0]); checkBinding(o[1]); @@ -1040,8 +1281,8 @@ api.Policy buildPolicy() { final o = api.Policy(); buildCounterPolicy++; if (buildCounterPolicy < 3) { - o.auditConfigs = buildUnnamed14(); - o.bindings = buildUnnamed15(); + o.auditConfigs = buildUnnamed19(); + o.bindings = buildUnnamed20(); o.etag = 'foo'; o.version = 42; } @@ -1052,8 +1293,8 @@ api.Policy buildPolicy() { void checkPolicy(api.Policy o) { buildCounterPolicy++; if (buildCounterPolicy < 3) { - checkUnnamed14(o.auditConfigs!); - checkUnnamed15(o.bindings!); + checkUnnamed19(o.auditConfigs!); + checkUnnamed20(o.bindings!); unittest.expect( o.etag!, unittest.equals('foo'), @@ -1066,6 +1307,67 @@ void checkPolicy(api.Policy o) { buildCounterPolicy--; } +core.Map buildUnnamed21() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed21(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.int buildCounterQuotaExceededInfo = 0; +api.QuotaExceededInfo buildQuotaExceededInfo() { + final o = api.QuotaExceededInfo(); + buildCounterQuotaExceededInfo++; + if (buildCounterQuotaExceededInfo < 3) { + o.dimensions = buildUnnamed21(); + o.futureLimit = 42.0; + o.limit = 42.0; + o.limitName = 'foo'; + o.metricName = 'foo'; + o.rolloutStatus = 'foo'; + } + buildCounterQuotaExceededInfo--; + return o; +} + +void checkQuotaExceededInfo(api.QuotaExceededInfo o) { + buildCounterQuotaExceededInfo++; + if (buildCounterQuotaExceededInfo < 3) { + checkUnnamed21(o.dimensions!); + unittest.expect( + o.futureLimit!, + unittest.equals(42.0), + ); + unittest.expect( + o.limit!, + unittest.equals(42.0), + ); + unittest.expect( + o.limitName!, + unittest.equals('foo'), + ); + unittest.expect( + o.metricName!, + unittest.equals('foo'), + ); + unittest.expect( + o.rolloutStatus!, + unittest.equals('foo'), + ); + } + buildCounterQuotaExceededInfo--; +} + core.int buildCounterResourceWarningsData = 0; api.ResourceWarningsData buildResourceWarningsData() { final o = api.ResourceWarningsData(); @@ -1093,12 +1395,12 @@ void checkResourceWarningsData(api.ResourceWarningsData o) { buildCounterResourceWarningsData--; } -core.List buildUnnamed16() => [ +core.List buildUnnamed22() => [ buildResourceWarningsData(), buildResourceWarningsData(), ]; -void checkUnnamed16(core.List o) { +void checkUnnamed22(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkResourceWarningsData(o[0]); checkResourceWarningsData(o[1]); @@ -1110,7 +1412,7 @@ api.ResourceWarnings buildResourceWarnings() { buildCounterResourceWarnings++; if (buildCounterResourceWarnings < 3) { o.code = 'foo'; - o.data = buildUnnamed16(); + o.data = buildUnnamed22(); o.message = 'foo'; } buildCounterResourceWarnings--; @@ -1124,7 +1426,7 @@ void checkResourceWarnings(api.ResourceWarnings o) { o.code!, unittest.equals('foo'), ); - checkUnnamed16(o.data!); + checkUnnamed22(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -1133,12 +1435,12 @@ void checkResourceWarnings(api.ResourceWarnings o) { buildCounterResourceWarnings--; } -core.List buildUnnamed17() => [ +core.List buildUnnamed23() => [ buildResourceWarnings(), buildResourceWarnings(), ]; -void checkUnnamed17(core.List o) { +void checkUnnamed23(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkResourceWarnings(o[0]); checkResourceWarnings(o[1]); @@ -1160,7 +1462,7 @@ api.Resource buildResource() { o.update = buildResourceUpdate(); o.updateTime = 'foo'; o.url = 'foo'; - o.warnings = buildUnnamed17(); + o.warnings = buildUnnamed23(); } buildCounterResource--; return o; @@ -1207,7 +1509,7 @@ void checkResource(api.Resource o) { o.url!, unittest.equals('foo'), ); - checkUnnamed17(o.warnings!); + checkUnnamed23(o.warnings!); } buildCounterResource--; } @@ -1234,12 +1536,70 @@ void checkResourceAccessControl(api.ResourceAccessControl o) { buildCounterResourceAccessControl--; } +core.List buildUnnamed24() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed24(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterResourceUpdateErrorErrorsErrorDetails = 0; +api.ResourceUpdateErrorErrorsErrorDetails + buildResourceUpdateErrorErrorsErrorDetails() { + final o = api.ResourceUpdateErrorErrorsErrorDetails(); + buildCounterResourceUpdateErrorErrorsErrorDetails++; + if (buildCounterResourceUpdateErrorErrorsErrorDetails < 3) { + o.errorInfo = buildErrorInfo(); + o.help = buildHelp(); + o.localizedMessage = buildLocalizedMessage(); + o.quotaInfo = buildQuotaExceededInfo(); + } + buildCounterResourceUpdateErrorErrorsErrorDetails--; + return o; +} + +void checkResourceUpdateErrorErrorsErrorDetails( + api.ResourceUpdateErrorErrorsErrorDetails o) { + buildCounterResourceUpdateErrorErrorsErrorDetails++; + if (buildCounterResourceUpdateErrorErrorsErrorDetails < 3) { + checkErrorInfo(o.errorInfo!); + checkHelp(o.help!); + checkLocalizedMessage(o.localizedMessage!); + checkQuotaExceededInfo(o.quotaInfo!); + } + buildCounterResourceUpdateErrorErrorsErrorDetails--; +} + +core.List buildUnnamed25() => [ + buildResourceUpdateErrorErrorsErrorDetails(), + buildResourceUpdateErrorErrorsErrorDetails(), + ]; + +void checkUnnamed25(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkResourceUpdateErrorErrorsErrorDetails(o[0]); + checkResourceUpdateErrorErrorsErrorDetails(o[1]); +} + core.int buildCounterResourceUpdateErrorErrors = 0; api.ResourceUpdateErrorErrors buildResourceUpdateErrorErrors() { final o = api.ResourceUpdateErrorErrors(); buildCounterResourceUpdateErrorErrors++; if (buildCounterResourceUpdateErrorErrors < 3) { + o.arguments = buildUnnamed24(); o.code = 'foo'; + o.debugInfo = buildDebugInfo(); + o.errorDetails = buildUnnamed25(); o.location = 'foo'; o.message = 'foo'; } @@ -1250,10 +1610,13 @@ api.ResourceUpdateErrorErrors buildResourceUpdateErrorErrors() { void checkResourceUpdateErrorErrors(api.ResourceUpdateErrorErrors o) { buildCounterResourceUpdateErrorErrors++; if (buildCounterResourceUpdateErrorErrors < 3) { + checkUnnamed24(o.arguments!); unittest.expect( o.code!, unittest.equals('foo'), ); + checkDebugInfo(o.debugInfo!); + checkUnnamed25(o.errorDetails!); unittest.expect( o.location!, unittest.equals('foo'), @@ -1266,12 +1629,12 @@ void checkResourceUpdateErrorErrors(api.ResourceUpdateErrorErrors o) { buildCounterResourceUpdateErrorErrors--; } -core.List buildUnnamed18() => [ +core.List buildUnnamed26() => [ buildResourceUpdateErrorErrors(), buildResourceUpdateErrorErrors(), ]; -void checkUnnamed18(core.List o) { +void checkUnnamed26(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkResourceUpdateErrorErrors(o[0]); checkResourceUpdateErrorErrors(o[1]); @@ -1282,7 +1645,7 @@ api.ResourceUpdateError buildResourceUpdateError() { final o = api.ResourceUpdateError(); buildCounterResourceUpdateError++; if (buildCounterResourceUpdateError < 3) { - o.errors = buildUnnamed18(); + o.errors = buildUnnamed26(); } buildCounterResourceUpdateError--; return o; @@ -1291,7 +1654,7 @@ api.ResourceUpdateError buildResourceUpdateError() { void checkResourceUpdateError(api.ResourceUpdateError o) { buildCounterResourceUpdateError++; if (buildCounterResourceUpdateError < 3) { - checkUnnamed18(o.errors!); + checkUnnamed26(o.errors!); } buildCounterResourceUpdateError--; } @@ -1323,12 +1686,12 @@ void checkResourceUpdateWarningsData(api.ResourceUpdateWarningsData o) { buildCounterResourceUpdateWarningsData--; } -core.List buildUnnamed19() => [ +core.List buildUnnamed27() => [ buildResourceUpdateWarningsData(), buildResourceUpdateWarningsData(), ]; -void checkUnnamed19(core.List o) { +void checkUnnamed27(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkResourceUpdateWarningsData(o[0]); checkResourceUpdateWarningsData(o[1]); @@ -1340,7 +1703,7 @@ api.ResourceUpdateWarnings buildResourceUpdateWarnings() { buildCounterResourceUpdateWarnings++; if (buildCounterResourceUpdateWarnings < 3) { o.code = 'foo'; - o.data = buildUnnamed19(); + o.data = buildUnnamed27(); o.message = 'foo'; } buildCounterResourceUpdateWarnings--; @@ -1354,7 +1717,7 @@ void checkResourceUpdateWarnings(api.ResourceUpdateWarnings o) { o.code!, unittest.equals('foo'), ); - checkUnnamed19(o.data!); + checkUnnamed27(o.data!); unittest.expect( o.message!, unittest.equals('foo'), @@ -1363,12 +1726,12 @@ void checkResourceUpdateWarnings(api.ResourceUpdateWarnings o) { buildCounterResourceUpdateWarnings--; } -core.List buildUnnamed20() => [ +core.List buildUnnamed28() => [ buildResourceUpdateWarnings(), buildResourceUpdateWarnings(), ]; -void checkUnnamed20(core.List o) { +void checkUnnamed28(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkResourceUpdateWarnings(o[0]); checkResourceUpdateWarnings(o[1]); @@ -1386,7 +1749,7 @@ api.ResourceUpdate buildResourceUpdate() { o.manifest = 'foo'; o.properties = 'foo'; o.state = 'foo'; - o.warnings = buildUnnamed20(); + o.warnings = buildUnnamed28(); } buildCounterResourceUpdate--; return o; @@ -1417,17 +1780,17 @@ void checkResourceUpdate(api.ResourceUpdate o) { o.state!, unittest.equals('foo'), ); - checkUnnamed20(o.warnings!); + checkUnnamed28(o.warnings!); } buildCounterResourceUpdate--; } -core.List buildUnnamed21() => [ +core.List buildUnnamed29() => [ buildResource(), buildResource(), ]; -void checkUnnamed21(core.List o) { +void checkUnnamed29(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkResource(o[0]); checkResource(o[1]); @@ -1439,7 +1802,7 @@ api.ResourcesListResponse buildResourcesListResponse() { buildCounterResourcesListResponse++; if (buildCounterResourcesListResponse < 3) { o.nextPageToken = 'foo'; - o.resources = buildUnnamed21(); + o.resources = buildUnnamed29(); } buildCounterResourcesListResponse--; return o; @@ -1452,21 +1815,21 @@ void checkResourcesListResponse(api.ResourcesListResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed21(o.resources!); + checkUnnamed29(o.resources!); } buildCounterResourcesListResponse--; } core.Map - buildUnnamed22() => { + buildUnnamed30() => { 'x': buildSetCommonInstanceMetadataOperationMetadataPerLocationOperationInfo(), 'y': buildSetCommonInstanceMetadataOperationMetadataPerLocationOperationInfo(), }; -void checkUnnamed22( +void checkUnnamed30( core.Map< core.String, api @@ -1486,7 +1849,7 @@ api.SetCommonInstanceMetadataOperationMetadata buildCounterSetCommonInstanceMetadataOperationMetadata++; if (buildCounterSetCommonInstanceMetadataOperationMetadata < 3) { o.clientOperationId = 'foo'; - o.perLocationOperations = buildUnnamed22(); + o.perLocationOperations = buildUnnamed30(); } buildCounterSetCommonInstanceMetadataOperationMetadata--; return o; @@ -1500,7 +1863,7 @@ void checkSetCommonInstanceMetadataOperationMetadata( o.clientOperationId!, unittest.equals('foo'), ); - checkUnnamed22(o.perLocationOperations!); + checkUnnamed30(o.perLocationOperations!); } buildCounterSetCommonInstanceMetadataOperationMetadata--; } @@ -1536,7 +1899,7 @@ void checkSetCommonInstanceMetadataOperationMetadataPerLocationOperationInfo( buildCounterSetCommonInstanceMetadataOperationMetadataPerLocationOperationInfo--; } -core.Map buildUnnamed23() => { +core.Map buildUnnamed31() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -1549,7 +1912,7 @@ core.Map buildUnnamed23() => { }, }; -void checkUnnamed23(core.Map o) { +void checkUnnamed31(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted1 = (o['x']!) as core.Map; unittest.expect(casted1, unittest.hasLength(3)); @@ -1581,15 +1944,15 @@ void checkUnnamed23(core.Map o) { ); } -core.List> buildUnnamed24() => [ - buildUnnamed23(), - buildUnnamed23(), +core.List> buildUnnamed32() => [ + buildUnnamed31(), + buildUnnamed31(), ]; -void checkUnnamed24(core.List> o) { +void checkUnnamed32(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed23(o[0]); - checkUnnamed23(o[1]); + checkUnnamed31(o[0]); + checkUnnamed31(o[1]); } core.int buildCounterStatus = 0; @@ -1598,7 +1961,7 @@ api.Status buildStatus() { buildCounterStatus++; if (buildCounterStatus < 3) { o.code = 42; - o.details = buildUnnamed24(); + o.details = buildUnnamed32(); o.message = 'foo'; } buildCounterStatus--; @@ -1612,7 +1975,7 @@ void checkStatus(api.Status o) { o.code!, unittest.equals(42), ); - checkUnnamed24(o.details!); + checkUnnamed32(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -1621,12 +1984,12 @@ void checkStatus(api.Status o) { buildCounterStatus--; } -core.List buildUnnamed25() => [ +core.List buildUnnamed33() => [ buildImportFile(), buildImportFile(), ]; -void checkUnnamed25(core.List o) { +void checkUnnamed33(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkImportFile(o[0]); checkImportFile(o[1]); @@ -1638,7 +2001,7 @@ api.TargetConfiguration buildTargetConfiguration() { buildCounterTargetConfiguration++; if (buildCounterTargetConfiguration < 3) { o.config = buildConfigFile(); - o.imports = buildUnnamed25(); + o.imports = buildUnnamed33(); } buildCounterTargetConfiguration--; return o; @@ -1648,17 +2011,17 @@ void checkTargetConfiguration(api.TargetConfiguration o) { buildCounterTargetConfiguration++; if (buildCounterTargetConfiguration < 3) { checkConfigFile(o.config!); - checkUnnamed25(o.imports!); + checkUnnamed33(o.imports!); } buildCounterTargetConfiguration--; } -core.List buildUnnamed26() => [ +core.List buildUnnamed34() => [ 'foo', 'foo', ]; -void checkUnnamed26(core.List o) { +void checkUnnamed34(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1675,7 +2038,7 @@ api.TestPermissionsRequest buildTestPermissionsRequest() { final o = api.TestPermissionsRequest(); buildCounterTestPermissionsRequest++; if (buildCounterTestPermissionsRequest < 3) { - o.permissions = buildUnnamed26(); + o.permissions = buildUnnamed34(); } buildCounterTestPermissionsRequest--; return o; @@ -1684,17 +2047,17 @@ api.TestPermissionsRequest buildTestPermissionsRequest() { void checkTestPermissionsRequest(api.TestPermissionsRequest o) { buildCounterTestPermissionsRequest++; if (buildCounterTestPermissionsRequest < 3) { - checkUnnamed26(o.permissions!); + checkUnnamed34(o.permissions!); } buildCounterTestPermissionsRequest--; } -core.List buildUnnamed27() => [ +core.List buildUnnamed35() => [ 'foo', 'foo', ]; -void checkUnnamed27(core.List o) { +void checkUnnamed35(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1711,7 +2074,7 @@ api.TestPermissionsResponse buildTestPermissionsResponse() { final o = api.TestPermissionsResponse(); buildCounterTestPermissionsResponse++; if (buildCounterTestPermissionsResponse < 3) { - o.permissions = buildUnnamed27(); + o.permissions = buildUnnamed35(); } buildCounterTestPermissionsResponse--; return o; @@ -1720,7 +2083,7 @@ api.TestPermissionsResponse buildTestPermissionsResponse() { void checkTestPermissionsResponse(api.TestPermissionsResponse o) { buildCounterTestPermissionsResponse++; if (buildCounterTestPermissionsResponse < 3) { - checkUnnamed27(o.permissions!); + checkUnnamed35(o.permissions!); } buildCounterTestPermissionsResponse--; } @@ -1764,12 +2127,12 @@ void checkType(api.Type o) { buildCounterType--; } -core.List buildUnnamed28() => [ +core.List buildUnnamed36() => [ buildType(), buildType(), ]; -void checkUnnamed28(core.List o) { +void checkUnnamed36(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkType(o[0]); checkType(o[1]); @@ -1781,7 +2144,7 @@ api.TypesListResponse buildTypesListResponse() { buildCounterTypesListResponse++; if (buildCounterTypesListResponse < 3) { o.nextPageToken = 'foo'; - o.types = buildUnnamed28(); + o.types = buildUnnamed36(); } buildCounterTypesListResponse--; return o; @@ -1794,7 +2157,7 @@ void checkTypesListResponse(api.TypesListResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed28(o.types!); + checkUnnamed36(o.types!); } buildCounterTypesListResponse--; } @@ -1850,6 +2213,16 @@ void main() { }); }); + unittest.group('obj-schema-DebugInfo', () { + unittest.test('to-json--from-json', () async { + final o = buildDebugInfo(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.DebugInfo.fromJson(oJson as core.Map); + checkDebugInfo(od); + }); + }); + unittest.group('obj-schema-Deployment', () { unittest.test('to-json--from-json', () async { final o = buildDeployment(); @@ -1920,6 +2293,16 @@ void main() { }); }); + unittest.group('obj-schema-ErrorInfo', () { + unittest.test('to-json--from-json', () async { + final o = buildErrorInfo(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.ErrorInfo.fromJson(oJson as core.Map); + checkErrorInfo(od); + }); + }); + unittest.group('obj-schema-Expr', () { unittest.test('to-json--from-json', () async { final o = buildExpr(); @@ -1940,6 +2323,26 @@ void main() { }); }); + unittest.group('obj-schema-Help', () { + unittest.test('to-json--from-json', () async { + final o = buildHelp(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Help.fromJson(oJson as core.Map); + checkHelp(od); + }); + }); + + unittest.group('obj-schema-HelpLink', () { + unittest.test('to-json--from-json', () async { + final o = buildHelpLink(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.HelpLink.fromJson(oJson as core.Map); + checkHelpLink(od); + }); + }); + unittest.group('obj-schema-ImportFile', () { unittest.test('to-json--from-json', () async { final o = buildImportFile(); @@ -1960,6 +2363,16 @@ void main() { }); }); + unittest.group('obj-schema-LocalizedMessage', () { + unittest.test('to-json--from-json', () async { + final o = buildLocalizedMessage(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.LocalizedMessage.fromJson( + oJson as core.Map); + checkLocalizedMessage(od); + }); + }); + unittest.group('obj-schema-Manifest', () { unittest.test('to-json--from-json', () async { final o = buildManifest(); @@ -1980,6 +2393,16 @@ void main() { }); }); + unittest.group('obj-schema-OperationErrorErrorsErrorDetails', () { + unittest.test('to-json--from-json', () async { + final o = buildOperationErrorErrorsErrorDetails(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.OperationErrorErrorsErrorDetails.fromJson( + oJson as core.Map); + checkOperationErrorErrorsErrorDetails(od); + }); + }); + unittest.group('obj-schema-OperationErrorErrors', () { unittest.test('to-json--from-json', () async { final o = buildOperationErrorErrors(); @@ -2050,6 +2473,16 @@ void main() { }); }); + unittest.group('obj-schema-QuotaExceededInfo', () { + unittest.test('to-json--from-json', () async { + final o = buildQuotaExceededInfo(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.QuotaExceededInfo.fromJson( + oJson as core.Map); + checkQuotaExceededInfo(od); + }); + }); + unittest.group('obj-schema-ResourceWarningsData', () { unittest.test('to-json--from-json', () async { final o = buildResourceWarningsData(); @@ -2090,6 +2523,16 @@ void main() { }); }); + unittest.group('obj-schema-ResourceUpdateErrorErrorsErrorDetails', () { + unittest.test('to-json--from-json', () async { + final o = buildResourceUpdateErrorErrorsErrorDetails(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ResourceUpdateErrorErrorsErrorDetails.fromJson( + oJson as core.Map); + checkResourceUpdateErrorErrorsErrorDetails(od); + }); + }); + unittest.group('obj-schema-ResourceUpdateErrorErrors', () { unittest.test('to-json--from-json', () async { final o = buildResourceUpdateErrorErrors(); @@ -2329,6 +2772,7 @@ void main() { final arg_project = 'foo'; final arg_deployment = 'foo'; final arg_deletePolicy = 'foo'; + final arg_header_bypassBillingFilter = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -2385,6 +2829,10 @@ void main() { queryMap['deletePolicy']!.first, unittest.equals(arg_deletePolicy), ); + unittest.expect( + queryMap['header.bypassBillingFilter']!.first, + unittest.equals('$arg_header_bypassBillingFilter'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -2397,7 +2845,9 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.delete(arg_project, arg_deployment, - deletePolicy: arg_deletePolicy, $fields: arg_$fields); + deletePolicy: arg_deletePolicy, + header_bypassBillingFilter: arg_header_bypassBillingFilter, + $fields: arg_$fields); checkOperation(response as api.Operation); }); @@ -2406,6 +2856,7 @@ void main() { final res = api.DeploymentManagerApi(mock).deployments; final arg_project = 'foo'; final arg_deployment = 'foo'; + final arg_header_bypassBillingFilter = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -2458,6 +2909,10 @@ void main() { ); } } + unittest.expect( + queryMap['header.bypassBillingFilter']!.first, + unittest.equals('$arg_header_bypassBillingFilter'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -2469,8 +2924,9 @@ void main() { final resp = convert.json.encode(buildDeployment()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.get(arg_project, arg_deployment, $fields: arg_$fields); + final response = await res.get(arg_project, arg_deployment, + header_bypassBillingFilter: arg_header_bypassBillingFilter, + $fields: arg_$fields); checkDeployment(response as api.Deployment); }); @@ -2479,6 +2935,7 @@ void main() { final res = api.DeploymentManagerApi(mock).deployments; final arg_project = 'foo'; final arg_resource = 'foo'; + final arg_header_bypassBillingFilter = true; final arg_optionsRequestedPolicyVersion = 42; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -2540,6 +2997,10 @@ void main() { ); } } + unittest.expect( + queryMap['header.bypassBillingFilter']!.first, + unittest.equals('$arg_header_bypassBillingFilter'), + ); unittest.expect( core.int.parse(queryMap['optionsRequestedPolicyVersion']!.first), unittest.equals(arg_optionsRequestedPolicyVersion), @@ -2556,6 +3017,7 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.getIamPolicy(arg_project, arg_resource, + header_bypassBillingFilter: arg_header_bypassBillingFilter, optionsRequestedPolicyVersion: arg_optionsRequestedPolicyVersion, $fields: arg_$fields); checkPolicy(response as api.Policy); @@ -2567,6 +3029,7 @@ void main() { final arg_request = buildDeployment(); final arg_project = 'foo'; final arg_createPolicy = 'foo'; + final arg_header_bypassBillingFilter = true; final arg_preview = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -2622,6 +3085,10 @@ void main() { queryMap['createPolicy']!.first, unittest.equals(arg_createPolicy), ); + unittest.expect( + queryMap['header.bypassBillingFilter']!.first, + unittest.equals('$arg_header_bypassBillingFilter'), + ); unittest.expect( queryMap['preview']!.first, unittest.equals('$arg_preview'), @@ -2639,6 +3106,7 @@ void main() { }), true); final response = await res.insert(arg_request, arg_project, createPolicy: arg_createPolicy, + header_bypassBillingFilter: arg_header_bypassBillingFilter, preview: arg_preview, $fields: arg_$fields); checkOperation(response as api.Operation); @@ -2742,6 +3210,7 @@ void main() { final arg_deployment = 'foo'; final arg_createPolicy = 'foo'; final arg_deletePolicy = 'foo'; + final arg_header_bypassBillingFilter = true; final arg_preview = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -2807,6 +3276,10 @@ void main() { queryMap['deletePolicy']!.first, unittest.equals(arg_deletePolicy), ); + unittest.expect( + queryMap['header.bypassBillingFilter']!.first, + unittest.equals('$arg_header_bypassBillingFilter'), + ); unittest.expect( queryMap['preview']!.first, unittest.equals('$arg_preview'), @@ -2825,6 +3298,7 @@ void main() { final response = await res.patch(arg_request, arg_project, arg_deployment, createPolicy: arg_createPolicy, deletePolicy: arg_deletePolicy, + header_bypassBillingFilter: arg_header_bypassBillingFilter, preview: arg_preview, $fields: arg_$fields); checkOperation(response as api.Operation); @@ -3009,6 +3483,7 @@ void main() { final arg_request = buildTestPermissionsRequest(); final arg_project = 'foo'; final arg_resource = 'foo'; + final arg_header_bypassBillingFilter = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final obj = api.TestPermissionsRequest.fromJson( @@ -3073,6 +3548,10 @@ void main() { ); } } + unittest.expect( + queryMap['header.bypassBillingFilter']!.first, + unittest.equals('$arg_header_bypassBillingFilter'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -3086,6 +3565,7 @@ void main() { }), true); final response = await res.testIamPermissions( arg_request, arg_project, arg_resource, + header_bypassBillingFilter: arg_header_bypassBillingFilter, $fields: arg_$fields); checkTestPermissionsResponse(response as api.TestPermissionsResponse); }); @@ -3098,6 +3578,7 @@ void main() { final arg_deployment = 'foo'; final arg_createPolicy = 'foo'; final arg_deletePolicy = 'foo'; + final arg_header_bypassBillingFilter = true; final arg_preview = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -3163,6 +3644,10 @@ void main() { queryMap['deletePolicy']!.first, unittest.equals(arg_deletePolicy), ); + unittest.expect( + queryMap['header.bypassBillingFilter']!.first, + unittest.equals('$arg_header_bypassBillingFilter'), + ); unittest.expect( queryMap['preview']!.first, unittest.equals('$arg_preview'), @@ -3182,6 +3667,7 @@ void main() { arg_request, arg_project, arg_deployment, createPolicy: arg_createPolicy, deletePolicy: arg_deletePolicy, + header_bypassBillingFilter: arg_header_bypassBillingFilter, preview: arg_preview, $fields: arg_$fields); checkOperation(response as api.Operation); @@ -3195,6 +3681,7 @@ void main() { final arg_project = 'foo'; final arg_deployment = 'foo'; final arg_manifest = 'foo'; + final arg_header_bypassBillingFilter = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -3261,6 +3748,10 @@ void main() { ); } } + unittest.expect( + queryMap['header.bypassBillingFilter']!.first, + unittest.equals('$arg_header_bypassBillingFilter'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -3273,6 +3764,7 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_project, arg_deployment, arg_manifest, + header_bypassBillingFilter: arg_header_bypassBillingFilter, $fields: arg_$fields); checkManifest(response as api.Manifest); }); @@ -3389,6 +3881,7 @@ void main() { final res = api.DeploymentManagerApi(mock).operations; final arg_project = 'foo'; final arg_operation = 'foo'; + final arg_header_bypassBillingFilter = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -3441,6 +3934,10 @@ void main() { ); } } + unittest.expect( + queryMap['header.bypassBillingFilter']!.first, + unittest.equals('$arg_header_bypassBillingFilter'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -3452,8 +3949,9 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.get(arg_project, arg_operation, $fields: arg_$fields); + final response = await res.get(arg_project, arg_operation, + header_bypassBillingFilter: arg_header_bypassBillingFilter, + $fields: arg_$fields); checkOperation(response as api.Operation); }); @@ -3555,6 +4053,7 @@ void main() { final arg_project = 'foo'; final arg_deployment = 'foo'; final arg_resource = 'foo'; + final arg_header_bypassBillingFilter = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -3621,6 +4120,10 @@ void main() { ); } } + unittest.expect( + queryMap['header.bypassBillingFilter']!.first, + unittest.equals('$arg_header_bypassBillingFilter'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -3633,6 +4136,7 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_project, arg_deployment, arg_resource, + header_bypassBillingFilter: arg_header_bypassBillingFilter, $fields: arg_$fields); checkResource(response as api.Resource); }); diff --git a/generated/googleapis/test/dialogflow/v2_test.dart b/generated/googleapis/test/dialogflow/v2_test.dart index 0de760aa0..51db17f81 100644 --- a/generated/googleapis/test/dialogflow/v2_test.dart +++ b/generated/googleapis/test/dialogflow/v2_test.dart @@ -3210,6 +3210,7 @@ api.GoogleCloudDialogflowV2HumanAgentAssistantConfigSuggestionQueryConfig o.confidenceThreshold = 42.0; o.contextFilterSettings = buildGoogleCloudDialogflowV2HumanAgentAssistantConfigSuggestionQueryConfigContextFilterSettings(); + o.contextSize = 42; o.dialogflowQuerySource = buildGoogleCloudDialogflowV2HumanAgentAssistantConfigSuggestionQueryConfigDialogflowQuerySource(); o.documentQuerySource = @@ -3236,6 +3237,10 @@ void checkGoogleCloudDialogflowV2HumanAgentAssistantConfigSuggestionQueryConfig( ); checkGoogleCloudDialogflowV2HumanAgentAssistantConfigSuggestionQueryConfigContextFilterSettings( o.contextFilterSettings!); + unittest.expect( + o.contextSize!, + unittest.equals(42), + ); checkGoogleCloudDialogflowV2HumanAgentAssistantConfigSuggestionQueryConfigDialogflowQuerySource( o.dialogflowQuerySource!); checkGoogleCloudDialogflowV2HumanAgentAssistantConfigSuggestionQueryConfigDocumentQuerySource( @@ -3823,12 +3828,29 @@ void checkUnnamed41(core.List o) { ); } -core.List buildUnnamed42() => [ +core.List buildUnnamed42() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed42(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.List buildUnnamed43() => [ buildGoogleCloudDialogflowV2SpeechContext(), buildGoogleCloudDialogflowV2SpeechContext(), ]; -void checkUnnamed42(core.List o) { +void checkUnnamed43(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2SpeechContext(o[0]); checkGoogleCloudDialogflowV2SpeechContext(o[1]); @@ -3849,9 +3871,10 @@ api.GoogleCloudDialogflowV2InputAudioConfig o.modelVariant = 'foo'; o.optOutConformerModelMigration = true; o.phraseHints = buildUnnamed41(); + o.phraseSets = buildUnnamed42(); o.sampleRateHertz = 42; o.singleUtterance = true; - o.speechContexts = buildUnnamed42(); + o.speechContexts = buildUnnamed43(); } buildCounterGoogleCloudDialogflowV2InputAudioConfig--; return o; @@ -3882,12 +3905,13 @@ void checkGoogleCloudDialogflowV2InputAudioConfig( ); unittest.expect(o.optOutConformerModelMigration!, unittest.isTrue); checkUnnamed41(o.phraseHints!); + checkUnnamed42(o.phraseSets!); unittest.expect( o.sampleRateHertz!, unittest.equals(42), ); unittest.expect(o.singleUtterance!, unittest.isTrue); - checkUnnamed42(o.speechContexts!); + checkUnnamed43(o.speechContexts!); } buildCounterGoogleCloudDialogflowV2InputAudioConfig--; } @@ -3937,12 +3961,12 @@ void checkGoogleCloudDialogflowV2InputDataset( buildCounterGoogleCloudDialogflowV2InputDataset--; } -core.List buildUnnamed43() => [ +core.List buildUnnamed44() => [ 'foo', 'foo', ]; -void checkUnnamed43(core.List o) { +void checkUnnamed44(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3954,12 +3978,12 @@ void checkUnnamed43(core.List o) { ); } -core.List buildUnnamed44() => [ +core.List buildUnnamed45() => [ 'foo', 'foo', ]; -void checkUnnamed44(core.List o) { +void checkUnnamed45(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3972,24 +3996,24 @@ void checkUnnamed44(core.List o) { } core.List - buildUnnamed45() => [ + buildUnnamed46() => [ buildGoogleCloudDialogflowV2IntentFollowupIntentInfo(), buildGoogleCloudDialogflowV2IntentFollowupIntentInfo(), ]; -void checkUnnamed45( +void checkUnnamed46( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2IntentFollowupIntentInfo(o[0]); checkGoogleCloudDialogflowV2IntentFollowupIntentInfo(o[1]); } -core.List buildUnnamed46() => [ +core.List buildUnnamed47() => [ 'foo', 'foo', ]; -void checkUnnamed46(core.List o) { +void checkUnnamed47(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4001,45 +4025,45 @@ void checkUnnamed46(core.List o) { ); } -core.List buildUnnamed47() => [ +core.List buildUnnamed48() => [ buildGoogleCloudDialogflowV2IntentMessage(), buildGoogleCloudDialogflowV2IntentMessage(), ]; -void checkUnnamed47(core.List o) { +void checkUnnamed48(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2IntentMessage(o[0]); checkGoogleCloudDialogflowV2IntentMessage(o[1]); } -core.List buildUnnamed48() => [ +core.List buildUnnamed49() => [ buildGoogleCloudDialogflowV2Context(), buildGoogleCloudDialogflowV2Context(), ]; -void checkUnnamed48(core.List o) { +void checkUnnamed49(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2Context(o[0]); checkGoogleCloudDialogflowV2Context(o[1]); } -core.List buildUnnamed49() => [ +core.List buildUnnamed50() => [ buildGoogleCloudDialogflowV2IntentParameter(), buildGoogleCloudDialogflowV2IntentParameter(), ]; -void checkUnnamed49(core.List o) { +void checkUnnamed50(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2IntentParameter(o[0]); checkGoogleCloudDialogflowV2IntentParameter(o[1]); } -core.List buildUnnamed50() => [ +core.List buildUnnamed51() => [ buildGoogleCloudDialogflowV2IntentTrainingPhrase(), buildGoogleCloudDialogflowV2IntentTrainingPhrase(), ]; -void checkUnnamed50( +void checkUnnamed51( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2IntentTrainingPhrase(o[0]); @@ -4052,24 +4076,24 @@ api.GoogleCloudDialogflowV2Intent buildGoogleCloudDialogflowV2Intent() { buildCounterGoogleCloudDialogflowV2Intent++; if (buildCounterGoogleCloudDialogflowV2Intent < 3) { o.action = 'foo'; - o.defaultResponsePlatforms = buildUnnamed43(); + o.defaultResponsePlatforms = buildUnnamed44(); o.displayName = 'foo'; o.endInteraction = true; - o.events = buildUnnamed44(); - o.followupIntentInfo = buildUnnamed45(); - o.inputContextNames = buildUnnamed46(); + o.events = buildUnnamed45(); + o.followupIntentInfo = buildUnnamed46(); + o.inputContextNames = buildUnnamed47(); o.isFallback = true; o.liveAgentHandoff = true; - o.messages = buildUnnamed47(); + o.messages = buildUnnamed48(); o.mlDisabled = true; o.name = 'foo'; - o.outputContexts = buildUnnamed48(); - o.parameters = buildUnnamed49(); + o.outputContexts = buildUnnamed49(); + o.parameters = buildUnnamed50(); o.parentFollowupIntentName = 'foo'; o.priority = 42; o.resetContexts = true; o.rootFollowupIntentName = 'foo'; - o.trainingPhrases = buildUnnamed50(); + o.trainingPhrases = buildUnnamed51(); o.webhookState = 'foo'; } buildCounterGoogleCloudDialogflowV2Intent--; @@ -4083,25 +4107,25 @@ void checkGoogleCloudDialogflowV2Intent(api.GoogleCloudDialogflowV2Intent o) { o.action!, unittest.equals('foo'), ); - checkUnnamed43(o.defaultResponsePlatforms!); + checkUnnamed44(o.defaultResponsePlatforms!); unittest.expect( o.displayName!, unittest.equals('foo'), ); unittest.expect(o.endInteraction!, unittest.isTrue); - checkUnnamed44(o.events!); - checkUnnamed45(o.followupIntentInfo!); - checkUnnamed46(o.inputContextNames!); + checkUnnamed45(o.events!); + checkUnnamed46(o.followupIntentInfo!); + checkUnnamed47(o.inputContextNames!); unittest.expect(o.isFallback!, unittest.isTrue); unittest.expect(o.liveAgentHandoff!, unittest.isTrue); - checkUnnamed47(o.messages!); + checkUnnamed48(o.messages!); unittest.expect(o.mlDisabled!, unittest.isTrue); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed48(o.outputContexts!); - checkUnnamed49(o.parameters!); + checkUnnamed49(o.outputContexts!); + checkUnnamed50(o.parameters!); unittest.expect( o.parentFollowupIntentName!, unittest.equals('foo'), @@ -4115,7 +4139,7 @@ void checkGoogleCloudDialogflowV2Intent(api.GoogleCloudDialogflowV2Intent o) { o.rootFollowupIntentName!, unittest.equals('foo'), ); - checkUnnamed50(o.trainingPhrases!); + checkUnnamed51(o.trainingPhrases!); unittest.expect( o.webhookState!, unittest.equals('foo'), @@ -4124,12 +4148,12 @@ void checkGoogleCloudDialogflowV2Intent(api.GoogleCloudDialogflowV2Intent o) { buildCounterGoogleCloudDialogflowV2Intent--; } -core.List buildUnnamed51() => [ +core.List buildUnnamed52() => [ buildGoogleCloudDialogflowV2Intent(), buildGoogleCloudDialogflowV2Intent(), ]; -void checkUnnamed51(core.List o) { +void checkUnnamed52(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2Intent(o[0]); checkGoogleCloudDialogflowV2Intent(o[1]); @@ -4141,7 +4165,7 @@ api.GoogleCloudDialogflowV2IntentBatch final o = api.GoogleCloudDialogflowV2IntentBatch(); buildCounterGoogleCloudDialogflowV2IntentBatch++; if (buildCounterGoogleCloudDialogflowV2IntentBatch < 3) { - o.intents = buildUnnamed51(); + o.intents = buildUnnamed52(); } buildCounterGoogleCloudDialogflowV2IntentBatch--; return o; @@ -4151,7 +4175,7 @@ void checkGoogleCloudDialogflowV2IntentBatch( api.GoogleCloudDialogflowV2IntentBatch o) { buildCounterGoogleCloudDialogflowV2IntentBatch++; if (buildCounterGoogleCloudDialogflowV2IntentBatch < 3) { - checkUnnamed51(o.intents!); + checkUnnamed52(o.intents!); } buildCounterGoogleCloudDialogflowV2IntentBatch--; } @@ -4185,7 +4209,7 @@ void checkGoogleCloudDialogflowV2IntentFollowupIntentInfo( buildCounterGoogleCloudDialogflowV2IntentFollowupIntentInfo--; } -core.Map buildUnnamed52() => { +core.Map buildUnnamed53() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -4198,7 +4222,7 @@ core.Map buildUnnamed52() => { }, }; -void checkUnnamed52(core.Map o) { +void checkUnnamed53(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted8 = (o['x']!) as core.Map; unittest.expect(casted8, unittest.hasLength(3)); @@ -4247,7 +4271,7 @@ api.GoogleCloudDialogflowV2IntentMessage buildGoogleCloudDialogflowV2IntentMessageLinkOutSuggestion(); o.listSelect = buildGoogleCloudDialogflowV2IntentMessageListSelect(); o.mediaContent = buildGoogleCloudDialogflowV2IntentMessageMediaContent(); - o.payload = buildUnnamed52(); + o.payload = buildUnnamed53(); o.platform = 'foo'; o.quickReplies = buildGoogleCloudDialogflowV2IntentMessageQuickReplies(); o.simpleResponses = @@ -4274,7 +4298,7 @@ void checkGoogleCloudDialogflowV2IntentMessage( o.linkOutSuggestion!); checkGoogleCloudDialogflowV2IntentMessageListSelect(o.listSelect!); checkGoogleCloudDialogflowV2IntentMessageMediaContent(o.mediaContent!); - checkUnnamed52(o.payload!); + checkUnnamed53(o.payload!); unittest.expect( o.platform!, unittest.equals('foo'), @@ -4290,12 +4314,12 @@ void checkGoogleCloudDialogflowV2IntentMessage( } core.List - buildUnnamed53() => [ + buildUnnamed54() => [ buildGoogleCloudDialogflowV2IntentMessageBasicCardButton(), buildGoogleCloudDialogflowV2IntentMessageBasicCardButton(), ]; -void checkUnnamed53( +void checkUnnamed54( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2IntentMessageBasicCardButton(o[0]); @@ -4308,7 +4332,7 @@ api.GoogleCloudDialogflowV2IntentMessageBasicCard final o = api.GoogleCloudDialogflowV2IntentMessageBasicCard(); buildCounterGoogleCloudDialogflowV2IntentMessageBasicCard++; if (buildCounterGoogleCloudDialogflowV2IntentMessageBasicCard < 3) { - o.buttons = buildUnnamed53(); + o.buttons = buildUnnamed54(); o.formattedText = 'foo'; o.image = buildGoogleCloudDialogflowV2IntentMessageImage(); o.subtitle = 'foo'; @@ -4322,7 +4346,7 @@ void checkGoogleCloudDialogflowV2IntentMessageBasicCard( api.GoogleCloudDialogflowV2IntentMessageBasicCard o) { buildCounterGoogleCloudDialogflowV2IntentMessageBasicCard++; if (buildCounterGoogleCloudDialogflowV2IntentMessageBasicCard < 3) { - checkUnnamed53(o.buttons!); + checkUnnamed54(o.buttons!); unittest.expect( o.formattedText!, unittest.equals('foo'), @@ -4400,12 +4424,12 @@ void checkGoogleCloudDialogflowV2IntentMessageBasicCardButtonOpenUriAction( core.List< api .GoogleCloudDialogflowV2IntentMessageBrowseCarouselCardBrowseCarouselCardItem> - buildUnnamed54() => [ + buildUnnamed55() => [ buildGoogleCloudDialogflowV2IntentMessageBrowseCarouselCardBrowseCarouselCardItem(), buildGoogleCloudDialogflowV2IntentMessageBrowseCarouselCardBrowseCarouselCardItem(), ]; -void checkUnnamed54( +void checkUnnamed55( core.List< api .GoogleCloudDialogflowV2IntentMessageBrowseCarouselCardBrowseCarouselCardItem> @@ -4424,7 +4448,7 @@ api.GoogleCloudDialogflowV2IntentMessageBrowseCarouselCard buildCounterGoogleCloudDialogflowV2IntentMessageBrowseCarouselCard++; if (buildCounterGoogleCloudDialogflowV2IntentMessageBrowseCarouselCard < 3) { o.imageDisplayOptions = 'foo'; - o.items = buildUnnamed54(); + o.items = buildUnnamed55(); } buildCounterGoogleCloudDialogflowV2IntentMessageBrowseCarouselCard--; return o; @@ -4438,7 +4462,7 @@ void checkGoogleCloudDialogflowV2IntentMessageBrowseCarouselCard( o.imageDisplayOptions!, unittest.equals('foo'), ); - checkUnnamed54(o.items!); + checkUnnamed55(o.items!); } buildCounterGoogleCloudDialogflowV2IntentMessageBrowseCarouselCard--; } @@ -4525,12 +4549,12 @@ void checkGoogleCloudDialogflowV2IntentMessageBrowseCarouselCardBrowseCarouselCa } core.List - buildUnnamed55() => [ + buildUnnamed56() => [ buildGoogleCloudDialogflowV2IntentMessageCardButton(), buildGoogleCloudDialogflowV2IntentMessageCardButton(), ]; -void checkUnnamed55( +void checkUnnamed56( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2IntentMessageCardButton(o[0]); @@ -4543,7 +4567,7 @@ api.GoogleCloudDialogflowV2IntentMessageCard final o = api.GoogleCloudDialogflowV2IntentMessageCard(); buildCounterGoogleCloudDialogflowV2IntentMessageCard++; if (buildCounterGoogleCloudDialogflowV2IntentMessageCard < 3) { - o.buttons = buildUnnamed55(); + o.buttons = buildUnnamed56(); o.imageUri = 'foo'; o.subtitle = 'foo'; o.title = 'foo'; @@ -4556,7 +4580,7 @@ void checkGoogleCloudDialogflowV2IntentMessageCard( api.GoogleCloudDialogflowV2IntentMessageCard o) { buildCounterGoogleCloudDialogflowV2IntentMessageCard++; if (buildCounterGoogleCloudDialogflowV2IntentMessageCard < 3) { - checkUnnamed55(o.buttons!); + checkUnnamed56(o.buttons!); unittest.expect( o.imageUri!, unittest.equals('foo'), @@ -4603,12 +4627,12 @@ void checkGoogleCloudDialogflowV2IntentMessageCardButton( } core.List - buildUnnamed56() => [ + buildUnnamed57() => [ buildGoogleCloudDialogflowV2IntentMessageCarouselSelectItem(), buildGoogleCloudDialogflowV2IntentMessageCarouselSelectItem(), ]; -void checkUnnamed56( +void checkUnnamed57( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2IntentMessageCarouselSelectItem(o[0]); @@ -4621,7 +4645,7 @@ api.GoogleCloudDialogflowV2IntentMessageCarouselSelect final o = api.GoogleCloudDialogflowV2IntentMessageCarouselSelect(); buildCounterGoogleCloudDialogflowV2IntentMessageCarouselSelect++; if (buildCounterGoogleCloudDialogflowV2IntentMessageCarouselSelect < 3) { - o.items = buildUnnamed56(); + o.items = buildUnnamed57(); } buildCounterGoogleCloudDialogflowV2IntentMessageCarouselSelect--; return o; @@ -4631,7 +4655,7 @@ void checkGoogleCloudDialogflowV2IntentMessageCarouselSelect( api.GoogleCloudDialogflowV2IntentMessageCarouselSelect o) { buildCounterGoogleCloudDialogflowV2IntentMessageCarouselSelect++; if (buildCounterGoogleCloudDialogflowV2IntentMessageCarouselSelect < 3) { - checkUnnamed56(o.items!); + checkUnnamed57(o.items!); } buildCounterGoogleCloudDialogflowV2IntentMessageCarouselSelect--; } @@ -4757,12 +4781,12 @@ void checkGoogleCloudDialogflowV2IntentMessageLinkOutSuggestion( } core.List - buildUnnamed57() => [ + buildUnnamed58() => [ buildGoogleCloudDialogflowV2IntentMessageListSelectItem(), buildGoogleCloudDialogflowV2IntentMessageListSelectItem(), ]; -void checkUnnamed57( +void checkUnnamed58( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2IntentMessageListSelectItem(o[0]); @@ -4775,7 +4799,7 @@ api.GoogleCloudDialogflowV2IntentMessageListSelect final o = api.GoogleCloudDialogflowV2IntentMessageListSelect(); buildCounterGoogleCloudDialogflowV2IntentMessageListSelect++; if (buildCounterGoogleCloudDialogflowV2IntentMessageListSelect < 3) { - o.items = buildUnnamed57(); + o.items = buildUnnamed58(); o.subtitle = 'foo'; o.title = 'foo'; } @@ -4787,7 +4811,7 @@ void checkGoogleCloudDialogflowV2IntentMessageListSelect( api.GoogleCloudDialogflowV2IntentMessageListSelect o) { buildCounterGoogleCloudDialogflowV2IntentMessageListSelect++; if (buildCounterGoogleCloudDialogflowV2IntentMessageListSelect < 3) { - checkUnnamed57(o.items!); + checkUnnamed58(o.items!); unittest.expect( o.subtitle!, unittest.equals('foo'), @@ -4835,12 +4859,12 @@ void checkGoogleCloudDialogflowV2IntentMessageListSelectItem( core.List< api.GoogleCloudDialogflowV2IntentMessageMediaContentResponseMediaObject> - buildUnnamed58() => [ + buildUnnamed59() => [ buildGoogleCloudDialogflowV2IntentMessageMediaContentResponseMediaObject(), buildGoogleCloudDialogflowV2IntentMessageMediaContentResponseMediaObject(), ]; -void checkUnnamed58( +void checkUnnamed59( core.List< api .GoogleCloudDialogflowV2IntentMessageMediaContentResponseMediaObject> @@ -4858,7 +4882,7 @@ api.GoogleCloudDialogflowV2IntentMessageMediaContent final o = api.GoogleCloudDialogflowV2IntentMessageMediaContent(); buildCounterGoogleCloudDialogflowV2IntentMessageMediaContent++; if (buildCounterGoogleCloudDialogflowV2IntentMessageMediaContent < 3) { - o.mediaObjects = buildUnnamed58(); + o.mediaObjects = buildUnnamed59(); o.mediaType = 'foo'; } buildCounterGoogleCloudDialogflowV2IntentMessageMediaContent--; @@ -4869,7 +4893,7 @@ void checkGoogleCloudDialogflowV2IntentMessageMediaContent( api.GoogleCloudDialogflowV2IntentMessageMediaContent o) { buildCounterGoogleCloudDialogflowV2IntentMessageMediaContent++; if (buildCounterGoogleCloudDialogflowV2IntentMessageMediaContent < 3) { - checkUnnamed58(o.mediaObjects!); + checkUnnamed59(o.mediaObjects!); unittest.expect( o.mediaType!, unittest.equals('foo'), @@ -4921,12 +4945,12 @@ void checkGoogleCloudDialogflowV2IntentMessageMediaContentResponseMediaObject( buildCounterGoogleCloudDialogflowV2IntentMessageMediaContentResponseMediaObject--; } -core.List buildUnnamed59() => [ +core.List buildUnnamed60() => [ 'foo', 'foo', ]; -void checkUnnamed59(core.List o) { +void checkUnnamed60(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4944,7 +4968,7 @@ api.GoogleCloudDialogflowV2IntentMessageQuickReplies final o = api.GoogleCloudDialogflowV2IntentMessageQuickReplies(); buildCounterGoogleCloudDialogflowV2IntentMessageQuickReplies++; if (buildCounterGoogleCloudDialogflowV2IntentMessageQuickReplies < 3) { - o.quickReplies = buildUnnamed59(); + o.quickReplies = buildUnnamed60(); o.title = 'foo'; } buildCounterGoogleCloudDialogflowV2IntentMessageQuickReplies--; @@ -4955,7 +4979,7 @@ void checkGoogleCloudDialogflowV2IntentMessageQuickReplies( api.GoogleCloudDialogflowV2IntentMessageQuickReplies o) { buildCounterGoogleCloudDialogflowV2IntentMessageQuickReplies++; if (buildCounterGoogleCloudDialogflowV2IntentMessageQuickReplies < 3) { - checkUnnamed59(o.quickReplies!); + checkUnnamed60(o.quickReplies!); unittest.expect( o.title!, unittest.equals('foo'), @@ -4964,12 +4988,12 @@ void checkGoogleCloudDialogflowV2IntentMessageQuickReplies( buildCounterGoogleCloudDialogflowV2IntentMessageQuickReplies--; } -core.List buildUnnamed60() => [ +core.List buildUnnamed61() => [ 'foo', 'foo', ]; -void checkUnnamed60(core.List o) { +void checkUnnamed61(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4988,7 +5012,7 @@ api.GoogleCloudDialogflowV2IntentMessageSelectItemInfo buildCounterGoogleCloudDialogflowV2IntentMessageSelectItemInfo++; if (buildCounterGoogleCloudDialogflowV2IntentMessageSelectItemInfo < 3) { o.key = 'foo'; - o.synonyms = buildUnnamed60(); + o.synonyms = buildUnnamed61(); } buildCounterGoogleCloudDialogflowV2IntentMessageSelectItemInfo--; return o; @@ -5002,7 +5026,7 @@ void checkGoogleCloudDialogflowV2IntentMessageSelectItemInfo( o.key!, unittest.equals('foo'), ); - checkUnnamed60(o.synonyms!); + checkUnnamed61(o.synonyms!); } buildCounterGoogleCloudDialogflowV2IntentMessageSelectItemInfo--; } @@ -5042,12 +5066,12 @@ void checkGoogleCloudDialogflowV2IntentMessageSimpleResponse( } core.List - buildUnnamed61() => [ + buildUnnamed62() => [ buildGoogleCloudDialogflowV2IntentMessageSimpleResponse(), buildGoogleCloudDialogflowV2IntentMessageSimpleResponse(), ]; -void checkUnnamed61( +void checkUnnamed62( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2IntentMessageSimpleResponse(o[0]); @@ -5060,7 +5084,7 @@ api.GoogleCloudDialogflowV2IntentMessageSimpleResponses final o = api.GoogleCloudDialogflowV2IntentMessageSimpleResponses(); buildCounterGoogleCloudDialogflowV2IntentMessageSimpleResponses++; if (buildCounterGoogleCloudDialogflowV2IntentMessageSimpleResponses < 3) { - o.simpleResponses = buildUnnamed61(); + o.simpleResponses = buildUnnamed62(); } buildCounterGoogleCloudDialogflowV2IntentMessageSimpleResponses--; return o; @@ -5070,7 +5094,7 @@ void checkGoogleCloudDialogflowV2IntentMessageSimpleResponses( api.GoogleCloudDialogflowV2IntentMessageSimpleResponses o) { buildCounterGoogleCloudDialogflowV2IntentMessageSimpleResponses++; if (buildCounterGoogleCloudDialogflowV2IntentMessageSimpleResponses < 3) { - checkUnnamed61(o.simpleResponses!); + checkUnnamed62(o.simpleResponses!); } buildCounterGoogleCloudDialogflowV2IntentMessageSimpleResponses--; } @@ -5100,12 +5124,12 @@ void checkGoogleCloudDialogflowV2IntentMessageSuggestion( } core.List - buildUnnamed62() => [ + buildUnnamed63() => [ buildGoogleCloudDialogflowV2IntentMessageSuggestion(), buildGoogleCloudDialogflowV2IntentMessageSuggestion(), ]; -void checkUnnamed62( +void checkUnnamed63( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2IntentMessageSuggestion(o[0]); @@ -5118,7 +5142,7 @@ api.GoogleCloudDialogflowV2IntentMessageSuggestions final o = api.GoogleCloudDialogflowV2IntentMessageSuggestions(); buildCounterGoogleCloudDialogflowV2IntentMessageSuggestions++; if (buildCounterGoogleCloudDialogflowV2IntentMessageSuggestions < 3) { - o.suggestions = buildUnnamed62(); + o.suggestions = buildUnnamed63(); } buildCounterGoogleCloudDialogflowV2IntentMessageSuggestions--; return o; @@ -5128,18 +5152,18 @@ void checkGoogleCloudDialogflowV2IntentMessageSuggestions( api.GoogleCloudDialogflowV2IntentMessageSuggestions o) { buildCounterGoogleCloudDialogflowV2IntentMessageSuggestions++; if (buildCounterGoogleCloudDialogflowV2IntentMessageSuggestions < 3) { - checkUnnamed62(o.suggestions!); + checkUnnamed63(o.suggestions!); } buildCounterGoogleCloudDialogflowV2IntentMessageSuggestions--; } core.List - buildUnnamed63() => [ + buildUnnamed64() => [ buildGoogleCloudDialogflowV2IntentMessageBasicCardButton(), buildGoogleCloudDialogflowV2IntentMessageBasicCardButton(), ]; -void checkUnnamed63( +void checkUnnamed64( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2IntentMessageBasicCardButton(o[0]); @@ -5147,12 +5171,12 @@ void checkUnnamed63( } core.List - buildUnnamed64() => [ + buildUnnamed65() => [ buildGoogleCloudDialogflowV2IntentMessageColumnProperties(), buildGoogleCloudDialogflowV2IntentMessageColumnProperties(), ]; -void checkUnnamed64( +void checkUnnamed65( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2IntentMessageColumnProperties(o[0]); @@ -5160,12 +5184,12 @@ void checkUnnamed64( } core.List - buildUnnamed65() => [ + buildUnnamed66() => [ buildGoogleCloudDialogflowV2IntentMessageTableCardRow(), buildGoogleCloudDialogflowV2IntentMessageTableCardRow(), ]; -void checkUnnamed65( +void checkUnnamed66( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2IntentMessageTableCardRow(o[0]); @@ -5178,10 +5202,10 @@ api.GoogleCloudDialogflowV2IntentMessageTableCard final o = api.GoogleCloudDialogflowV2IntentMessageTableCard(); buildCounterGoogleCloudDialogflowV2IntentMessageTableCard++; if (buildCounterGoogleCloudDialogflowV2IntentMessageTableCard < 3) { - o.buttons = buildUnnamed63(); - o.columnProperties = buildUnnamed64(); + o.buttons = buildUnnamed64(); + o.columnProperties = buildUnnamed65(); o.image = buildGoogleCloudDialogflowV2IntentMessageImage(); - o.rows = buildUnnamed65(); + o.rows = buildUnnamed66(); o.subtitle = 'foo'; o.title = 'foo'; } @@ -5193,10 +5217,10 @@ void checkGoogleCloudDialogflowV2IntentMessageTableCard( api.GoogleCloudDialogflowV2IntentMessageTableCard o) { buildCounterGoogleCloudDialogflowV2IntentMessageTableCard++; if (buildCounterGoogleCloudDialogflowV2IntentMessageTableCard < 3) { - checkUnnamed63(o.buttons!); - checkUnnamed64(o.columnProperties!); + checkUnnamed64(o.buttons!); + checkUnnamed65(o.columnProperties!); checkGoogleCloudDialogflowV2IntentMessageImage(o.image!); - checkUnnamed65(o.rows!); + checkUnnamed66(o.rows!); unittest.expect( o.subtitle!, unittest.equals('foo'), @@ -5234,12 +5258,12 @@ void checkGoogleCloudDialogflowV2IntentMessageTableCardCell( } core.List - buildUnnamed66() => [ + buildUnnamed67() => [ buildGoogleCloudDialogflowV2IntentMessageTableCardCell(), buildGoogleCloudDialogflowV2IntentMessageTableCardCell(), ]; -void checkUnnamed66( +void checkUnnamed67( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2IntentMessageTableCardCell(o[0]); @@ -5252,7 +5276,7 @@ api.GoogleCloudDialogflowV2IntentMessageTableCardRow final o = api.GoogleCloudDialogflowV2IntentMessageTableCardRow(); buildCounterGoogleCloudDialogflowV2IntentMessageTableCardRow++; if (buildCounterGoogleCloudDialogflowV2IntentMessageTableCardRow < 3) { - o.cells = buildUnnamed66(); + o.cells = buildUnnamed67(); o.dividerAfter = true; } buildCounterGoogleCloudDialogflowV2IntentMessageTableCardRow--; @@ -5263,18 +5287,18 @@ void checkGoogleCloudDialogflowV2IntentMessageTableCardRow( api.GoogleCloudDialogflowV2IntentMessageTableCardRow o) { buildCounterGoogleCloudDialogflowV2IntentMessageTableCardRow++; if (buildCounterGoogleCloudDialogflowV2IntentMessageTableCardRow < 3) { - checkUnnamed66(o.cells!); + checkUnnamed67(o.cells!); unittest.expect(o.dividerAfter!, unittest.isTrue); } buildCounterGoogleCloudDialogflowV2IntentMessageTableCardRow--; } -core.List buildUnnamed67() => [ +core.List buildUnnamed68() => [ 'foo', 'foo', ]; -void checkUnnamed67(core.List o) { +void checkUnnamed68(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5292,7 +5316,7 @@ api.GoogleCloudDialogflowV2IntentMessageText final o = api.GoogleCloudDialogflowV2IntentMessageText(); buildCounterGoogleCloudDialogflowV2IntentMessageText++; if (buildCounterGoogleCloudDialogflowV2IntentMessageText < 3) { - o.text = buildUnnamed67(); + o.text = buildUnnamed68(); } buildCounterGoogleCloudDialogflowV2IntentMessageText--; return o; @@ -5302,17 +5326,17 @@ void checkGoogleCloudDialogflowV2IntentMessageText( api.GoogleCloudDialogflowV2IntentMessageText o) { buildCounterGoogleCloudDialogflowV2IntentMessageText++; if (buildCounterGoogleCloudDialogflowV2IntentMessageText < 3) { - checkUnnamed67(o.text!); + checkUnnamed68(o.text!); } buildCounterGoogleCloudDialogflowV2IntentMessageText--; } -core.List buildUnnamed68() => [ +core.List buildUnnamed69() => [ 'foo', 'foo', ]; -void checkUnnamed68(core.List o) { +void checkUnnamed69(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5336,7 +5360,7 @@ api.GoogleCloudDialogflowV2IntentParameter o.isList = true; o.mandatory = true; o.name = 'foo'; - o.prompts = buildUnnamed68(); + o.prompts = buildUnnamed69(); o.value = 'foo'; } buildCounterGoogleCloudDialogflowV2IntentParameter--; @@ -5365,7 +5389,7 @@ void checkGoogleCloudDialogflowV2IntentParameter( o.name!, unittest.equals('foo'), ); - checkUnnamed68(o.prompts!); + checkUnnamed69(o.prompts!); unittest.expect( o.value!, unittest.equals('foo'), @@ -5409,12 +5433,12 @@ void checkGoogleCloudDialogflowV2IntentSuggestion( } core.List - buildUnnamed69() => [ + buildUnnamed70() => [ buildGoogleCloudDialogflowV2IntentTrainingPhrasePart(), buildGoogleCloudDialogflowV2IntentTrainingPhrasePart(), ]; -void checkUnnamed69( +void checkUnnamed70( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2IntentTrainingPhrasePart(o[0]); @@ -5428,7 +5452,7 @@ api.GoogleCloudDialogflowV2IntentTrainingPhrase buildCounterGoogleCloudDialogflowV2IntentTrainingPhrase++; if (buildCounterGoogleCloudDialogflowV2IntentTrainingPhrase < 3) { o.name = 'foo'; - o.parts = buildUnnamed69(); + o.parts = buildUnnamed70(); o.timesAddedCount = 42; o.type = 'foo'; } @@ -5444,7 +5468,7 @@ void checkGoogleCloudDialogflowV2IntentTrainingPhrase( o.name!, unittest.equals('foo'), ); - checkUnnamed69(o.parts!); + checkUnnamed70(o.parts!); unittest.expect( o.timesAddedCount!, unittest.equals(42), @@ -5593,12 +5617,12 @@ void checkGoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerFaqSource( core.List< api .GoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeSourceSnippet> - buildUnnamed70() => [ + buildUnnamed71() => [ buildGoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeSourceSnippet(), buildGoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeSourceSnippet(), ]; -void checkUnnamed70( +void checkUnnamed71( core.List< api .GoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeSourceSnippet> @@ -5620,7 +5644,7 @@ api.GoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeSource buildCounterGoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeSource++; if (buildCounterGoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeSource < 3) { - o.snippets = buildUnnamed70(); + o.snippets = buildUnnamed71(); } buildCounterGoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeSource--; return o; @@ -5632,11 +5656,56 @@ void checkGoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeS buildCounterGoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeSource++; if (buildCounterGoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeSource < 3) { - checkUnnamed70(o.snippets!); + checkUnnamed71(o.snippets!); } buildCounterGoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeSource--; } +core.Map buildUnnamed72() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; + +void checkUnnamed72(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted10 = (o['x']!) as core.Map; + unittest.expect(casted10, unittest.hasLength(3)); + unittest.expect( + casted10['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted10['bool'], + unittest.equals(true), + ); + unittest.expect( + casted10['string'], + unittest.equals('foo'), + ); + var casted11 = (o['y']!) as core.Map; + unittest.expect(casted11, unittest.hasLength(3)); + unittest.expect( + casted11['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted11['bool'], + unittest.equals(true), + ); + unittest.expect( + casted11['string'], + unittest.equals('foo'), + ); +} + core.int buildCounterGoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeSourceSnippet = 0; @@ -5647,6 +5716,7 @@ api.GoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeSourceS buildCounterGoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeSourceSnippet++; if (buildCounterGoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeSourceSnippet < 3) { + o.metadata = buildUnnamed72(); o.text = 'foo'; o.title = 'foo'; o.uri = 'foo'; @@ -5661,6 +5731,7 @@ void checkGoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeS buildCounterGoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeSourceSnippet++; if (buildCounterGoogleCloudDialogflowV2KnowledgeAssistAnswerKnowledgeAnswerGenerativeSourceSnippet < 3) { + checkUnnamed72(o.metadata!); unittest.expect( o.text!, unittest.equals('foo'), @@ -5738,12 +5809,12 @@ void checkGoogleCloudDialogflowV2KnowledgeBase( buildCounterGoogleCloudDialogflowV2KnowledgeBase--; } -core.List buildUnnamed71() => [ +core.List buildUnnamed73() => [ buildGoogleCloudDialogflowV2AnswerRecord(), buildGoogleCloudDialogflowV2AnswerRecord(), ]; -void checkUnnamed71(core.List o) { +void checkUnnamed73(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2AnswerRecord(o[0]); checkGoogleCloudDialogflowV2AnswerRecord(o[1]); @@ -5755,7 +5826,7 @@ api.GoogleCloudDialogflowV2ListAnswerRecordsResponse final o = api.GoogleCloudDialogflowV2ListAnswerRecordsResponse(); buildCounterGoogleCloudDialogflowV2ListAnswerRecordsResponse++; if (buildCounterGoogleCloudDialogflowV2ListAnswerRecordsResponse < 3) { - o.answerRecords = buildUnnamed71(); + o.answerRecords = buildUnnamed73(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudDialogflowV2ListAnswerRecordsResponse--; @@ -5766,7 +5837,7 @@ void checkGoogleCloudDialogflowV2ListAnswerRecordsResponse( api.GoogleCloudDialogflowV2ListAnswerRecordsResponse o) { buildCounterGoogleCloudDialogflowV2ListAnswerRecordsResponse++; if (buildCounterGoogleCloudDialogflowV2ListAnswerRecordsResponse < 3) { - checkUnnamed71(o.answerRecords!); + checkUnnamed73(o.answerRecords!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -5775,12 +5846,12 @@ void checkGoogleCloudDialogflowV2ListAnswerRecordsResponse( buildCounterGoogleCloudDialogflowV2ListAnswerRecordsResponse--; } -core.List buildUnnamed72() => [ +core.List buildUnnamed74() => [ buildGoogleCloudDialogflowV2Context(), buildGoogleCloudDialogflowV2Context(), ]; -void checkUnnamed72(core.List o) { +void checkUnnamed74(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2Context(o[0]); checkGoogleCloudDialogflowV2Context(o[1]); @@ -5792,7 +5863,7 @@ api.GoogleCloudDialogflowV2ListContextsResponse final o = api.GoogleCloudDialogflowV2ListContextsResponse(); buildCounterGoogleCloudDialogflowV2ListContextsResponse++; if (buildCounterGoogleCloudDialogflowV2ListContextsResponse < 3) { - o.contexts = buildUnnamed72(); + o.contexts = buildUnnamed74(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudDialogflowV2ListContextsResponse--; @@ -5803,7 +5874,7 @@ void checkGoogleCloudDialogflowV2ListContextsResponse( api.GoogleCloudDialogflowV2ListContextsResponse o) { buildCounterGoogleCloudDialogflowV2ListContextsResponse++; if (buildCounterGoogleCloudDialogflowV2ListContextsResponse < 3) { - checkUnnamed72(o.contexts!); + checkUnnamed74(o.contexts!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -5812,12 +5883,12 @@ void checkGoogleCloudDialogflowV2ListContextsResponse( buildCounterGoogleCloudDialogflowV2ListContextsResponse--; } -core.List buildUnnamed73() => [ +core.List buildUnnamed75() => [ buildGoogleCloudDialogflowV2ConversationDataset(), buildGoogleCloudDialogflowV2ConversationDataset(), ]; -void checkUnnamed73( +void checkUnnamed75( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2ConversationDataset(o[0]); @@ -5831,7 +5902,7 @@ api.GoogleCloudDialogflowV2ListConversationDatasetsResponse final o = api.GoogleCloudDialogflowV2ListConversationDatasetsResponse(); buildCounterGoogleCloudDialogflowV2ListConversationDatasetsResponse++; if (buildCounterGoogleCloudDialogflowV2ListConversationDatasetsResponse < 3) { - o.conversationDatasets = buildUnnamed73(); + o.conversationDatasets = buildUnnamed75(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudDialogflowV2ListConversationDatasetsResponse--; @@ -5842,7 +5913,7 @@ void checkGoogleCloudDialogflowV2ListConversationDatasetsResponse( api.GoogleCloudDialogflowV2ListConversationDatasetsResponse o) { buildCounterGoogleCloudDialogflowV2ListConversationDatasetsResponse++; if (buildCounterGoogleCloudDialogflowV2ListConversationDatasetsResponse < 3) { - checkUnnamed73(o.conversationDatasets!); + checkUnnamed75(o.conversationDatasets!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -5852,12 +5923,12 @@ void checkGoogleCloudDialogflowV2ListConversationDatasetsResponse( } core.List - buildUnnamed74() => [ + buildUnnamed76() => [ buildGoogleCloudDialogflowV2ConversationModelEvaluation(), buildGoogleCloudDialogflowV2ConversationModelEvaluation(), ]; -void checkUnnamed74( +void checkUnnamed76( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2ConversationModelEvaluation(o[0]); @@ -5874,7 +5945,7 @@ api.GoogleCloudDialogflowV2ListConversationModelEvaluationsResponse buildCounterGoogleCloudDialogflowV2ListConversationModelEvaluationsResponse++; if (buildCounterGoogleCloudDialogflowV2ListConversationModelEvaluationsResponse < 3) { - o.conversationModelEvaluations = buildUnnamed74(); + o.conversationModelEvaluations = buildUnnamed76(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudDialogflowV2ListConversationModelEvaluationsResponse--; @@ -5886,7 +5957,7 @@ void checkGoogleCloudDialogflowV2ListConversationModelEvaluationsResponse( buildCounterGoogleCloudDialogflowV2ListConversationModelEvaluationsResponse++; if (buildCounterGoogleCloudDialogflowV2ListConversationModelEvaluationsResponse < 3) { - checkUnnamed74(o.conversationModelEvaluations!); + checkUnnamed76(o.conversationModelEvaluations!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -5895,12 +5966,12 @@ void checkGoogleCloudDialogflowV2ListConversationModelEvaluationsResponse( buildCounterGoogleCloudDialogflowV2ListConversationModelEvaluationsResponse--; } -core.List buildUnnamed75() => [ +core.List buildUnnamed77() => [ buildGoogleCloudDialogflowV2ConversationModel(), buildGoogleCloudDialogflowV2ConversationModel(), ]; -void checkUnnamed75(core.List o) { +void checkUnnamed77(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2ConversationModel(o[0]); checkGoogleCloudDialogflowV2ConversationModel(o[1]); @@ -5912,7 +5983,7 @@ api.GoogleCloudDialogflowV2ListConversationModelsResponse final o = api.GoogleCloudDialogflowV2ListConversationModelsResponse(); buildCounterGoogleCloudDialogflowV2ListConversationModelsResponse++; if (buildCounterGoogleCloudDialogflowV2ListConversationModelsResponse < 3) { - o.conversationModels = buildUnnamed75(); + o.conversationModels = buildUnnamed77(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudDialogflowV2ListConversationModelsResponse--; @@ -5923,7 +5994,7 @@ void checkGoogleCloudDialogflowV2ListConversationModelsResponse( api.GoogleCloudDialogflowV2ListConversationModelsResponse o) { buildCounterGoogleCloudDialogflowV2ListConversationModelsResponse++; if (buildCounterGoogleCloudDialogflowV2ListConversationModelsResponse < 3) { - checkUnnamed75(o.conversationModels!); + checkUnnamed77(o.conversationModels!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -5932,12 +6003,12 @@ void checkGoogleCloudDialogflowV2ListConversationModelsResponse( buildCounterGoogleCloudDialogflowV2ListConversationModelsResponse--; } -core.List buildUnnamed76() => [ +core.List buildUnnamed78() => [ buildGoogleCloudDialogflowV2ConversationProfile(), buildGoogleCloudDialogflowV2ConversationProfile(), ]; -void checkUnnamed76( +void checkUnnamed78( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2ConversationProfile(o[0]); @@ -5951,7 +6022,7 @@ api.GoogleCloudDialogflowV2ListConversationProfilesResponse final o = api.GoogleCloudDialogflowV2ListConversationProfilesResponse(); buildCounterGoogleCloudDialogflowV2ListConversationProfilesResponse++; if (buildCounterGoogleCloudDialogflowV2ListConversationProfilesResponse < 3) { - o.conversationProfiles = buildUnnamed76(); + o.conversationProfiles = buildUnnamed78(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudDialogflowV2ListConversationProfilesResponse--; @@ -5962,7 +6033,7 @@ void checkGoogleCloudDialogflowV2ListConversationProfilesResponse( api.GoogleCloudDialogflowV2ListConversationProfilesResponse o) { buildCounterGoogleCloudDialogflowV2ListConversationProfilesResponse++; if (buildCounterGoogleCloudDialogflowV2ListConversationProfilesResponse < 3) { - checkUnnamed76(o.conversationProfiles!); + checkUnnamed78(o.conversationProfiles!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -5971,12 +6042,12 @@ void checkGoogleCloudDialogflowV2ListConversationProfilesResponse( buildCounterGoogleCloudDialogflowV2ListConversationProfilesResponse--; } -core.List buildUnnamed77() => [ +core.List buildUnnamed79() => [ buildGoogleCloudDialogflowV2Conversation(), buildGoogleCloudDialogflowV2Conversation(), ]; -void checkUnnamed77(core.List o) { +void checkUnnamed79(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2Conversation(o[0]); checkGoogleCloudDialogflowV2Conversation(o[1]); @@ -5988,7 +6059,7 @@ api.GoogleCloudDialogflowV2ListConversationsResponse final o = api.GoogleCloudDialogflowV2ListConversationsResponse(); buildCounterGoogleCloudDialogflowV2ListConversationsResponse++; if (buildCounterGoogleCloudDialogflowV2ListConversationsResponse < 3) { - o.conversations = buildUnnamed77(); + o.conversations = buildUnnamed79(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudDialogflowV2ListConversationsResponse--; @@ -5999,7 +6070,7 @@ void checkGoogleCloudDialogflowV2ListConversationsResponse( api.GoogleCloudDialogflowV2ListConversationsResponse o) { buildCounterGoogleCloudDialogflowV2ListConversationsResponse++; if (buildCounterGoogleCloudDialogflowV2ListConversationsResponse < 3) { - checkUnnamed77(o.conversations!); + checkUnnamed79(o.conversations!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -6008,12 +6079,12 @@ void checkGoogleCloudDialogflowV2ListConversationsResponse( buildCounterGoogleCloudDialogflowV2ListConversationsResponse--; } -core.List buildUnnamed78() => [ +core.List buildUnnamed80() => [ buildGoogleCloudDialogflowV2Document(), buildGoogleCloudDialogflowV2Document(), ]; -void checkUnnamed78(core.List o) { +void checkUnnamed80(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2Document(o[0]); checkGoogleCloudDialogflowV2Document(o[1]); @@ -6025,7 +6096,7 @@ api.GoogleCloudDialogflowV2ListDocumentsResponse final o = api.GoogleCloudDialogflowV2ListDocumentsResponse(); buildCounterGoogleCloudDialogflowV2ListDocumentsResponse++; if (buildCounterGoogleCloudDialogflowV2ListDocumentsResponse < 3) { - o.documents = buildUnnamed78(); + o.documents = buildUnnamed80(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudDialogflowV2ListDocumentsResponse--; @@ -6036,7 +6107,7 @@ void checkGoogleCloudDialogflowV2ListDocumentsResponse( api.GoogleCloudDialogflowV2ListDocumentsResponse o) { buildCounterGoogleCloudDialogflowV2ListDocumentsResponse++; if (buildCounterGoogleCloudDialogflowV2ListDocumentsResponse < 3) { - checkUnnamed78(o.documents!); + checkUnnamed80(o.documents!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -6045,12 +6116,12 @@ void checkGoogleCloudDialogflowV2ListDocumentsResponse( buildCounterGoogleCloudDialogflowV2ListDocumentsResponse--; } -core.List buildUnnamed79() => [ +core.List buildUnnamed81() => [ buildGoogleCloudDialogflowV2EntityType(), buildGoogleCloudDialogflowV2EntityType(), ]; -void checkUnnamed79(core.List o) { +void checkUnnamed81(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2EntityType(o[0]); checkGoogleCloudDialogflowV2EntityType(o[1]); @@ -6062,7 +6133,7 @@ api.GoogleCloudDialogflowV2ListEntityTypesResponse final o = api.GoogleCloudDialogflowV2ListEntityTypesResponse(); buildCounterGoogleCloudDialogflowV2ListEntityTypesResponse++; if (buildCounterGoogleCloudDialogflowV2ListEntityTypesResponse < 3) { - o.entityTypes = buildUnnamed79(); + o.entityTypes = buildUnnamed81(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudDialogflowV2ListEntityTypesResponse--; @@ -6073,7 +6144,7 @@ void checkGoogleCloudDialogflowV2ListEntityTypesResponse( api.GoogleCloudDialogflowV2ListEntityTypesResponse o) { buildCounterGoogleCloudDialogflowV2ListEntityTypesResponse++; if (buildCounterGoogleCloudDialogflowV2ListEntityTypesResponse < 3) { - checkUnnamed79(o.entityTypes!); + checkUnnamed81(o.entityTypes!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -6082,12 +6153,12 @@ void checkGoogleCloudDialogflowV2ListEntityTypesResponse( buildCounterGoogleCloudDialogflowV2ListEntityTypesResponse--; } -core.List buildUnnamed80() => [ +core.List buildUnnamed82() => [ buildGoogleCloudDialogflowV2Environment(), buildGoogleCloudDialogflowV2Environment(), ]; -void checkUnnamed80(core.List o) { +void checkUnnamed82(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2Environment(o[0]); checkGoogleCloudDialogflowV2Environment(o[1]); @@ -6099,7 +6170,7 @@ api.GoogleCloudDialogflowV2ListEnvironmentsResponse final o = api.GoogleCloudDialogflowV2ListEnvironmentsResponse(); buildCounterGoogleCloudDialogflowV2ListEnvironmentsResponse++; if (buildCounterGoogleCloudDialogflowV2ListEnvironmentsResponse < 3) { - o.environments = buildUnnamed80(); + o.environments = buildUnnamed82(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudDialogflowV2ListEnvironmentsResponse--; @@ -6110,7 +6181,7 @@ void checkGoogleCloudDialogflowV2ListEnvironmentsResponse( api.GoogleCloudDialogflowV2ListEnvironmentsResponse o) { buildCounterGoogleCloudDialogflowV2ListEnvironmentsResponse++; if (buildCounterGoogleCloudDialogflowV2ListEnvironmentsResponse < 3) { - checkUnnamed80(o.environments!); + checkUnnamed82(o.environments!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -6119,12 +6190,12 @@ void checkGoogleCloudDialogflowV2ListEnvironmentsResponse( buildCounterGoogleCloudDialogflowV2ListEnvironmentsResponse--; } -core.List buildUnnamed81() => [ +core.List buildUnnamed83() => [ buildGoogleCloudDialogflowV2Generator(), buildGoogleCloudDialogflowV2Generator(), ]; -void checkUnnamed81(core.List o) { +void checkUnnamed83(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2Generator(o[0]); checkGoogleCloudDialogflowV2Generator(o[1]); @@ -6136,7 +6207,7 @@ api.GoogleCloudDialogflowV2ListGeneratorsResponse final o = api.GoogleCloudDialogflowV2ListGeneratorsResponse(); buildCounterGoogleCloudDialogflowV2ListGeneratorsResponse++; if (buildCounterGoogleCloudDialogflowV2ListGeneratorsResponse < 3) { - o.generators = buildUnnamed81(); + o.generators = buildUnnamed83(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudDialogflowV2ListGeneratorsResponse--; @@ -6147,7 +6218,7 @@ void checkGoogleCloudDialogflowV2ListGeneratorsResponse( api.GoogleCloudDialogflowV2ListGeneratorsResponse o) { buildCounterGoogleCloudDialogflowV2ListGeneratorsResponse++; if (buildCounterGoogleCloudDialogflowV2ListGeneratorsResponse < 3) { - checkUnnamed81(o.generators!); + checkUnnamed83(o.generators!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -6156,12 +6227,12 @@ void checkGoogleCloudDialogflowV2ListGeneratorsResponse( buildCounterGoogleCloudDialogflowV2ListGeneratorsResponse--; } -core.List buildUnnamed82() => [ +core.List buildUnnamed84() => [ buildGoogleCloudDialogflowV2Intent(), buildGoogleCloudDialogflowV2Intent(), ]; -void checkUnnamed82(core.List o) { +void checkUnnamed84(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2Intent(o[0]); checkGoogleCloudDialogflowV2Intent(o[1]); @@ -6173,7 +6244,7 @@ api.GoogleCloudDialogflowV2ListIntentsResponse final o = api.GoogleCloudDialogflowV2ListIntentsResponse(); buildCounterGoogleCloudDialogflowV2ListIntentsResponse++; if (buildCounterGoogleCloudDialogflowV2ListIntentsResponse < 3) { - o.intents = buildUnnamed82(); + o.intents = buildUnnamed84(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudDialogflowV2ListIntentsResponse--; @@ -6184,7 +6255,7 @@ void checkGoogleCloudDialogflowV2ListIntentsResponse( api.GoogleCloudDialogflowV2ListIntentsResponse o) { buildCounterGoogleCloudDialogflowV2ListIntentsResponse++; if (buildCounterGoogleCloudDialogflowV2ListIntentsResponse < 3) { - checkUnnamed82(o.intents!); + checkUnnamed84(o.intents!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -6193,12 +6264,12 @@ void checkGoogleCloudDialogflowV2ListIntentsResponse( buildCounterGoogleCloudDialogflowV2ListIntentsResponse--; } -core.List buildUnnamed83() => [ +core.List buildUnnamed85() => [ buildGoogleCloudDialogflowV2KnowledgeBase(), buildGoogleCloudDialogflowV2KnowledgeBase(), ]; -void checkUnnamed83(core.List o) { +void checkUnnamed85(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2KnowledgeBase(o[0]); checkGoogleCloudDialogflowV2KnowledgeBase(o[1]); @@ -6210,7 +6281,7 @@ api.GoogleCloudDialogflowV2ListKnowledgeBasesResponse final o = api.GoogleCloudDialogflowV2ListKnowledgeBasesResponse(); buildCounterGoogleCloudDialogflowV2ListKnowledgeBasesResponse++; if (buildCounterGoogleCloudDialogflowV2ListKnowledgeBasesResponse < 3) { - o.knowledgeBases = buildUnnamed83(); + o.knowledgeBases = buildUnnamed85(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudDialogflowV2ListKnowledgeBasesResponse--; @@ -6221,7 +6292,7 @@ void checkGoogleCloudDialogflowV2ListKnowledgeBasesResponse( api.GoogleCloudDialogflowV2ListKnowledgeBasesResponse o) { buildCounterGoogleCloudDialogflowV2ListKnowledgeBasesResponse++; if (buildCounterGoogleCloudDialogflowV2ListKnowledgeBasesResponse < 3) { - checkUnnamed83(o.knowledgeBases!); + checkUnnamed85(o.knowledgeBases!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -6230,12 +6301,12 @@ void checkGoogleCloudDialogflowV2ListKnowledgeBasesResponse( buildCounterGoogleCloudDialogflowV2ListKnowledgeBasesResponse--; } -core.List buildUnnamed84() => [ +core.List buildUnnamed86() => [ buildGoogleCloudDialogflowV2Message(), buildGoogleCloudDialogflowV2Message(), ]; -void checkUnnamed84(core.List o) { +void checkUnnamed86(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2Message(o[0]); checkGoogleCloudDialogflowV2Message(o[1]); @@ -6247,7 +6318,7 @@ api.GoogleCloudDialogflowV2ListMessagesResponse final o = api.GoogleCloudDialogflowV2ListMessagesResponse(); buildCounterGoogleCloudDialogflowV2ListMessagesResponse++; if (buildCounterGoogleCloudDialogflowV2ListMessagesResponse < 3) { - o.messages = buildUnnamed84(); + o.messages = buildUnnamed86(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudDialogflowV2ListMessagesResponse--; @@ -6258,7 +6329,7 @@ void checkGoogleCloudDialogflowV2ListMessagesResponse( api.GoogleCloudDialogflowV2ListMessagesResponse o) { buildCounterGoogleCloudDialogflowV2ListMessagesResponse++; if (buildCounterGoogleCloudDialogflowV2ListMessagesResponse < 3) { - checkUnnamed84(o.messages!); + checkUnnamed86(o.messages!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -6267,12 +6338,12 @@ void checkGoogleCloudDialogflowV2ListMessagesResponse( buildCounterGoogleCloudDialogflowV2ListMessagesResponse--; } -core.List buildUnnamed85() => [ +core.List buildUnnamed87() => [ buildGoogleCloudDialogflowV2Participant(), buildGoogleCloudDialogflowV2Participant(), ]; -void checkUnnamed85(core.List o) { +void checkUnnamed87(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2Participant(o[0]); checkGoogleCloudDialogflowV2Participant(o[1]); @@ -6285,7 +6356,7 @@ api.GoogleCloudDialogflowV2ListParticipantsResponse buildCounterGoogleCloudDialogflowV2ListParticipantsResponse++; if (buildCounterGoogleCloudDialogflowV2ListParticipantsResponse < 3) { o.nextPageToken = 'foo'; - o.participants = buildUnnamed85(); + o.participants = buildUnnamed87(); } buildCounterGoogleCloudDialogflowV2ListParticipantsResponse--; return o; @@ -6299,17 +6370,17 @@ void checkGoogleCloudDialogflowV2ListParticipantsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed85(o.participants!); + checkUnnamed87(o.participants!); } buildCounterGoogleCloudDialogflowV2ListParticipantsResponse--; } -core.List buildUnnamed86() => [ +core.List buildUnnamed88() => [ buildGoogleCloudDialogflowV2SessionEntityType(), buildGoogleCloudDialogflowV2SessionEntityType(), ]; -void checkUnnamed86(core.List o) { +void checkUnnamed88(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2SessionEntityType(o[0]); checkGoogleCloudDialogflowV2SessionEntityType(o[1]); @@ -6322,7 +6393,7 @@ api.GoogleCloudDialogflowV2ListSessionEntityTypesResponse buildCounterGoogleCloudDialogflowV2ListSessionEntityTypesResponse++; if (buildCounterGoogleCloudDialogflowV2ListSessionEntityTypesResponse < 3) { o.nextPageToken = 'foo'; - o.sessionEntityTypes = buildUnnamed86(); + o.sessionEntityTypes = buildUnnamed88(); } buildCounterGoogleCloudDialogflowV2ListSessionEntityTypesResponse--; return o; @@ -6336,17 +6407,17 @@ void checkGoogleCloudDialogflowV2ListSessionEntityTypesResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed86(o.sessionEntityTypes!); + checkUnnamed88(o.sessionEntityTypes!); } buildCounterGoogleCloudDialogflowV2ListSessionEntityTypesResponse--; } -core.List buildUnnamed87() => [ +core.List buildUnnamed89() => [ buildGoogleCloudDialogflowV2Version(), buildGoogleCloudDialogflowV2Version(), ]; -void checkUnnamed87(core.List o) { +void checkUnnamed89(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2Version(o[0]); checkGoogleCloudDialogflowV2Version(o[1]); @@ -6359,7 +6430,7 @@ api.GoogleCloudDialogflowV2ListVersionsResponse buildCounterGoogleCloudDialogflowV2ListVersionsResponse++; if (buildCounterGoogleCloudDialogflowV2ListVersionsResponse < 3) { o.nextPageToken = 'foo'; - o.versions = buildUnnamed87(); + o.versions = buildUnnamed89(); } buildCounterGoogleCloudDialogflowV2ListVersionsResponse--; return o; @@ -6373,7 +6444,7 @@ void checkGoogleCloudDialogflowV2ListVersionsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed87(o.versions!); + checkUnnamed89(o.versions!); } buildCounterGoogleCloudDialogflowV2ListVersionsResponse--; } @@ -6455,12 +6526,12 @@ void checkGoogleCloudDialogflowV2Message(api.GoogleCloudDialogflowV2Message o) { buildCounterGoogleCloudDialogflowV2Message--; } -core.List buildUnnamed88() => [ +core.List buildUnnamed90() => [ buildGoogleCloudDialogflowV2AnnotatedMessagePart(), buildGoogleCloudDialogflowV2AnnotatedMessagePart(), ]; -void checkUnnamed88( +void checkUnnamed90( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2AnnotatedMessagePart(o[0]); @@ -6474,7 +6545,7 @@ api.GoogleCloudDialogflowV2MessageAnnotation buildCounterGoogleCloudDialogflowV2MessageAnnotation++; if (buildCounterGoogleCloudDialogflowV2MessageAnnotation < 3) { o.containEntities = true; - o.parts = buildUnnamed88(); + o.parts = buildUnnamed90(); } buildCounterGoogleCloudDialogflowV2MessageAnnotation--; return o; @@ -6485,7 +6556,7 @@ void checkGoogleCloudDialogflowV2MessageAnnotation( buildCounterGoogleCloudDialogflowV2MessageAnnotation++; if (buildCounterGoogleCloudDialogflowV2MessageAnnotation < 3) { unittest.expect(o.containEntities!, unittest.isTrue); - checkUnnamed88(o.parts!); + checkUnnamed90(o.parts!); } buildCounterGoogleCloudDialogflowV2MessageAnnotation--; } @@ -6617,12 +6688,12 @@ void checkGoogleCloudDialogflowV2OutputAudioConfig( buildCounterGoogleCloudDialogflowV2OutputAudioConfig--; } -core.Map buildUnnamed89() => { +core.Map buildUnnamed91() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed89(core.Map o) { +void checkUnnamed91(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -6640,7 +6711,7 @@ api.GoogleCloudDialogflowV2Participant final o = api.GoogleCloudDialogflowV2Participant(); buildCounterGoogleCloudDialogflowV2Participant++; if (buildCounterGoogleCloudDialogflowV2Participant < 3) { - o.documentsMetadataFilters = buildUnnamed89(); + o.documentsMetadataFilters = buildUnnamed91(); o.name = 'foo'; o.obfuscatedExternalUserId = 'foo'; o.role = 'foo'; @@ -6654,7 +6725,7 @@ void checkGoogleCloudDialogflowV2Participant( api.GoogleCloudDialogflowV2Participant o) { buildCounterGoogleCloudDialogflowV2Participant++; if (buildCounterGoogleCloudDialogflowV2Participant < 3) { - checkUnnamed89(o.documentsMetadataFilters!); + checkUnnamed91(o.documentsMetadataFilters!); unittest.expect( o.name!, unittest.equals('foo'), @@ -6699,18 +6770,18 @@ void checkGoogleCloudDialogflowV2QueryInput( buildCounterGoogleCloudDialogflowV2QueryInput--; } -core.List buildUnnamed90() => [ +core.List buildUnnamed92() => [ buildGoogleCloudDialogflowV2Context(), buildGoogleCloudDialogflowV2Context(), ]; -void checkUnnamed90(core.List o) { +void checkUnnamed92(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2Context(o[0]); checkGoogleCloudDialogflowV2Context(o[1]); } -core.Map buildUnnamed91() => { +core.Map buildUnnamed93() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -6723,55 +6794,55 @@ core.Map buildUnnamed91() => { }, }; -void checkUnnamed91(core.Map o) { +void checkUnnamed93(core.Map o) { unittest.expect(o, unittest.hasLength(2)); - var casted10 = (o['x']!) as core.Map; - unittest.expect(casted10, unittest.hasLength(3)); + var casted12 = (o['x']!) as core.Map; + unittest.expect(casted12, unittest.hasLength(3)); unittest.expect( - casted10['list'], + casted12['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted10['bool'], + casted12['bool'], unittest.equals(true), ); unittest.expect( - casted10['string'], + casted12['string'], unittest.equals('foo'), ); - var casted11 = (o['y']!) as core.Map; - unittest.expect(casted11, unittest.hasLength(3)); + var casted13 = (o['y']!) as core.Map; + unittest.expect(casted13, unittest.hasLength(3)); unittest.expect( - casted11['list'], + casted13['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted11['bool'], + casted13['bool'], unittest.equals(true), ); unittest.expect( - casted11['string'], + casted13['string'], unittest.equals('foo'), ); } -core.List buildUnnamed92() => [ +core.List buildUnnamed94() => [ buildGoogleCloudDialogflowV2SessionEntityType(), buildGoogleCloudDialogflowV2SessionEntityType(), ]; -void checkUnnamed92(core.List o) { +void checkUnnamed94(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2SessionEntityType(o[0]); checkGoogleCloudDialogflowV2SessionEntityType(o[1]); } -core.Map buildUnnamed93() => { +core.Map buildUnnamed95() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed93(core.Map o) { +void checkUnnamed95(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -6789,16 +6860,16 @@ api.GoogleCloudDialogflowV2QueryParameters final o = api.GoogleCloudDialogflowV2QueryParameters(); buildCounterGoogleCloudDialogflowV2QueryParameters++; if (buildCounterGoogleCloudDialogflowV2QueryParameters < 3) { - o.contexts = buildUnnamed90(); + o.contexts = buildUnnamed92(); o.geoLocation = buildGoogleTypeLatLng(); - o.payload = buildUnnamed91(); + o.payload = buildUnnamed93(); o.platform = 'foo'; o.resetContexts = true; o.sentimentAnalysisRequestConfig = buildGoogleCloudDialogflowV2SentimentAnalysisRequestConfig(); - o.sessionEntityTypes = buildUnnamed92(); + o.sessionEntityTypes = buildUnnamed94(); o.timeZone = 'foo'; - o.webhookHeaders = buildUnnamed93(); + o.webhookHeaders = buildUnnamed95(); } buildCounterGoogleCloudDialogflowV2QueryParameters--; return o; @@ -6808,9 +6879,9 @@ void checkGoogleCloudDialogflowV2QueryParameters( api.GoogleCloudDialogflowV2QueryParameters o) { buildCounterGoogleCloudDialogflowV2QueryParameters++; if (buildCounterGoogleCloudDialogflowV2QueryParameters < 3) { - checkUnnamed90(o.contexts!); + checkUnnamed92(o.contexts!); checkGoogleTypeLatLng(o.geoLocation!); - checkUnnamed91(o.payload!); + checkUnnamed93(o.payload!); unittest.expect( o.platform!, unittest.equals('foo'), @@ -6818,17 +6889,17 @@ void checkGoogleCloudDialogflowV2QueryParameters( unittest.expect(o.resetContexts!, unittest.isTrue); checkGoogleCloudDialogflowV2SentimentAnalysisRequestConfig( o.sentimentAnalysisRequestConfig!); - checkUnnamed92(o.sessionEntityTypes!); + checkUnnamed94(o.sessionEntityTypes!); unittest.expect( o.timeZone!, unittest.equals('foo'), ); - checkUnnamed93(o.webhookHeaders!); + checkUnnamed95(o.webhookHeaders!); } buildCounterGoogleCloudDialogflowV2QueryParameters--; } -core.Map buildUnnamed94() => { +core.Map buildUnnamed96() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -6841,61 +6912,61 @@ core.Map buildUnnamed94() => { }, }; -void checkUnnamed94(core.Map o) { +void checkUnnamed96(core.Map o) { unittest.expect(o, unittest.hasLength(2)); - var casted12 = (o['x']!) as core.Map; - unittest.expect(casted12, unittest.hasLength(3)); + var casted14 = (o['x']!) as core.Map; + unittest.expect(casted14, unittest.hasLength(3)); unittest.expect( - casted12['list'], + casted14['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted12['bool'], + casted14['bool'], unittest.equals(true), ); unittest.expect( - casted12['string'], + casted14['string'], unittest.equals('foo'), ); - var casted13 = (o['y']!) as core.Map; - unittest.expect(casted13, unittest.hasLength(3)); + var casted15 = (o['y']!) as core.Map; + unittest.expect(casted15, unittest.hasLength(3)); unittest.expect( - casted13['list'], + casted15['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted13['bool'], + casted15['bool'], unittest.equals(true), ); unittest.expect( - casted13['string'], + casted15['string'], unittest.equals('foo'), ); } -core.List buildUnnamed95() => [ +core.List buildUnnamed97() => [ buildGoogleCloudDialogflowV2IntentMessage(), buildGoogleCloudDialogflowV2IntentMessage(), ]; -void checkUnnamed95(core.List o) { +void checkUnnamed97(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2IntentMessage(o[0]); checkGoogleCloudDialogflowV2IntentMessage(o[1]); } -core.List buildUnnamed96() => [ +core.List buildUnnamed98() => [ buildGoogleCloudDialogflowV2Context(), buildGoogleCloudDialogflowV2Context(), ]; -void checkUnnamed96(core.List o) { +void checkUnnamed98(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2Context(o[0]); checkGoogleCloudDialogflowV2Context(o[1]); } -core.Map buildUnnamed97() => { +core.Map buildUnnamed99() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -6908,39 +6979,39 @@ core.Map buildUnnamed97() => { }, }; -void checkUnnamed97(core.Map o) { +void checkUnnamed99(core.Map o) { unittest.expect(o, unittest.hasLength(2)); - var casted14 = (o['x']!) as core.Map; - unittest.expect(casted14, unittest.hasLength(3)); + var casted16 = (o['x']!) as core.Map; + unittest.expect(casted16, unittest.hasLength(3)); unittest.expect( - casted14['list'], + casted16['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted14['bool'], + casted16['bool'], unittest.equals(true), ); unittest.expect( - casted14['string'], + casted16['string'], unittest.equals('foo'), ); - var casted15 = (o['y']!) as core.Map; - unittest.expect(casted15, unittest.hasLength(3)); + var casted17 = (o['y']!) as core.Map; + unittest.expect(casted17, unittest.hasLength(3)); unittest.expect( - casted15['list'], + casted17['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted15['bool'], + casted17['bool'], unittest.equals(true), ); unittest.expect( - casted15['string'], + casted17['string'], unittest.equals('foo'), ); } -core.Map buildUnnamed98() => { +core.Map buildUnnamed100() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -6953,34 +7024,34 @@ core.Map buildUnnamed98() => { }, }; -void checkUnnamed98(core.Map o) { +void checkUnnamed100(core.Map o) { unittest.expect(o, unittest.hasLength(2)); - var casted16 = (o['x']!) as core.Map; - unittest.expect(casted16, unittest.hasLength(3)); + var casted18 = (o['x']!) as core.Map; + unittest.expect(casted18, unittest.hasLength(3)); unittest.expect( - casted16['list'], + casted18['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted16['bool'], + casted18['bool'], unittest.equals(true), ); unittest.expect( - casted16['string'], + casted18['string'], unittest.equals('foo'), ); - var casted17 = (o['y']!) as core.Map; - unittest.expect(casted17, unittest.hasLength(3)); + var casted19 = (o['y']!) as core.Map; + unittest.expect(casted19, unittest.hasLength(3)); unittest.expect( - casted17['list'], + casted19['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted17['bool'], + casted19['bool'], unittest.equals(true), ); unittest.expect( - casted17['string'], + casted19['string'], unittest.equals('foo'), ); } @@ -6994,19 +7065,19 @@ api.GoogleCloudDialogflowV2QueryResult o.action = 'foo'; o.allRequiredParamsPresent = true; o.cancelsSlotFilling = true; - o.diagnosticInfo = buildUnnamed94(); - o.fulfillmentMessages = buildUnnamed95(); + o.diagnosticInfo = buildUnnamed96(); + o.fulfillmentMessages = buildUnnamed97(); o.fulfillmentText = 'foo'; o.intent = buildGoogleCloudDialogflowV2Intent(); o.intentDetectionConfidence = 42.0; o.languageCode = 'foo'; - o.outputContexts = buildUnnamed96(); - o.parameters = buildUnnamed97(); + o.outputContexts = buildUnnamed98(); + o.parameters = buildUnnamed99(); o.queryText = 'foo'; o.sentimentAnalysisResult = buildGoogleCloudDialogflowV2SentimentAnalysisResult(); o.speechRecognitionConfidence = 42.0; - o.webhookPayload = buildUnnamed98(); + o.webhookPayload = buildUnnamed100(); o.webhookSource = 'foo'; } buildCounterGoogleCloudDialogflowV2QueryResult--; @@ -7023,8 +7094,8 @@ void checkGoogleCloudDialogflowV2QueryResult( ); unittest.expect(o.allRequiredParamsPresent!, unittest.isTrue); unittest.expect(o.cancelsSlotFilling!, unittest.isTrue); - checkUnnamed94(o.diagnosticInfo!); - checkUnnamed95(o.fulfillmentMessages!); + checkUnnamed96(o.diagnosticInfo!); + checkUnnamed97(o.fulfillmentMessages!); unittest.expect( o.fulfillmentText!, unittest.equals('foo'), @@ -7038,8 +7109,8 @@ void checkGoogleCloudDialogflowV2QueryResult( o.languageCode!, unittest.equals('foo'), ); - checkUnnamed96(o.outputContexts!); - checkUnnamed97(o.parameters!); + checkUnnamed98(o.outputContexts!); + checkUnnamed99(o.parameters!); unittest.expect( o.queryText!, unittest.equals('foo'), @@ -7050,7 +7121,7 @@ void checkGoogleCloudDialogflowV2QueryResult( o.speechRecognitionConfidence!, unittest.equals(42.0), ); - checkUnnamed98(o.webhookPayload!); + checkUnnamed100(o.webhookPayload!); unittest.expect( o.webhookSource!, unittest.equals('foo'), @@ -7116,12 +7187,12 @@ void checkGoogleCloudDialogflowV2RestoreAgentRequest( buildCounterGoogleCloudDialogflowV2RestoreAgentRequest--; } -core.List buildUnnamed99() => [ +core.List buildUnnamed101() => [ buildGoogleCloudDialogflowV2Agent(), buildGoogleCloudDialogflowV2Agent(), ]; -void checkUnnamed99(core.List o) { +void checkUnnamed101(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2Agent(o[0]); checkGoogleCloudDialogflowV2Agent(o[1]); @@ -7133,7 +7204,7 @@ api.GoogleCloudDialogflowV2SearchAgentsResponse final o = api.GoogleCloudDialogflowV2SearchAgentsResponse(); buildCounterGoogleCloudDialogflowV2SearchAgentsResponse++; if (buildCounterGoogleCloudDialogflowV2SearchAgentsResponse < 3) { - o.agents = buildUnnamed99(); + o.agents = buildUnnamed101(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudDialogflowV2SearchAgentsResponse--; @@ -7144,7 +7215,7 @@ void checkGoogleCloudDialogflowV2SearchAgentsResponse( api.GoogleCloudDialogflowV2SearchAgentsResponse o) { buildCounterGoogleCloudDialogflowV2SearchAgentsResponse++; if (buildCounterGoogleCloudDialogflowV2SearchAgentsResponse < 3) { - checkUnnamed99(o.agents!); + checkUnnamed101(o.agents!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -7154,12 +7225,12 @@ void checkGoogleCloudDialogflowV2SearchAgentsResponse( } core.List - buildUnnamed100() => [ + buildUnnamed102() => [ buildGoogleCloudDialogflowV2SearchKnowledgeAnswerAnswerSource(), buildGoogleCloudDialogflowV2SearchKnowledgeAnswerAnswerSource(), ]; -void checkUnnamed100( +void checkUnnamed102( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2SearchKnowledgeAnswerAnswerSource(o[0]); @@ -7174,7 +7245,7 @@ api.GoogleCloudDialogflowV2SearchKnowledgeAnswer if (buildCounterGoogleCloudDialogflowV2SearchKnowledgeAnswer < 3) { o.answer = 'foo'; o.answerRecord = 'foo'; - o.answerSources = buildUnnamed100(); + o.answerSources = buildUnnamed102(); o.answerType = 'foo'; } buildCounterGoogleCloudDialogflowV2SearchKnowledgeAnswer--; @@ -7193,7 +7264,7 @@ void checkGoogleCloudDialogflowV2SearchKnowledgeAnswer( o.answerRecord!, unittest.equals('foo'), ); - checkUnnamed100(o.answerSources!); + checkUnnamed102(o.answerSources!); unittest.expect( o.answerType!, unittest.equals('foo'), @@ -7202,6 +7273,51 @@ void checkGoogleCloudDialogflowV2SearchKnowledgeAnswer( buildCounterGoogleCloudDialogflowV2SearchKnowledgeAnswer--; } +core.Map buildUnnamed103() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; + +void checkUnnamed103(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted20 = (o['x']!) as core.Map; + unittest.expect(casted20, unittest.hasLength(3)); + unittest.expect( + casted20['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted20['bool'], + unittest.equals(true), + ); + unittest.expect( + casted20['string'], + unittest.equals('foo'), + ); + var casted21 = (o['y']!) as core.Map; + unittest.expect(casted21, unittest.hasLength(3)); + unittest.expect( + casted21['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted21['bool'], + unittest.equals(true), + ); + unittest.expect( + casted21['string'], + unittest.equals('foo'), + ); +} + core.int buildCounterGoogleCloudDialogflowV2SearchKnowledgeAnswerAnswerSource = 0; api.GoogleCloudDialogflowV2SearchKnowledgeAnswerAnswerSource @@ -7210,6 +7326,7 @@ api.GoogleCloudDialogflowV2SearchKnowledgeAnswerAnswerSource buildCounterGoogleCloudDialogflowV2SearchKnowledgeAnswerAnswerSource++; if (buildCounterGoogleCloudDialogflowV2SearchKnowledgeAnswerAnswerSource < 3) { + o.metadata = buildUnnamed103(); o.snippet = 'foo'; o.title = 'foo'; o.uri = 'foo'; @@ -7223,6 +7340,7 @@ void checkGoogleCloudDialogflowV2SearchKnowledgeAnswerAnswerSource( buildCounterGoogleCloudDialogflowV2SearchKnowledgeAnswerAnswerSource++; if (buildCounterGoogleCloudDialogflowV2SearchKnowledgeAnswerAnswerSource < 3) { + checkUnnamed103(o.metadata!); unittest.expect( o.snippet!, unittest.equals('foo'), @@ -7239,17 +7357,67 @@ void checkGoogleCloudDialogflowV2SearchKnowledgeAnswerAnswerSource( buildCounterGoogleCloudDialogflowV2SearchKnowledgeAnswerAnswerSource--; } -core.int buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequest = 0; -api.GoogleCloudDialogflowV2SearchKnowledgeRequest - buildGoogleCloudDialogflowV2SearchKnowledgeRequest() { - final o = api.GoogleCloudDialogflowV2SearchKnowledgeRequest(); - buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequest++; - if (buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequest < 3) { - o.conversation = 'foo'; - o.conversationProfile = 'foo'; +core.Map buildUnnamed104() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; + +void checkUnnamed104(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted22 = (o['x']!) as core.Map; + unittest.expect(casted22, unittest.hasLength(3)); + unittest.expect( + casted22['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted22['bool'], + unittest.equals(true), + ); + unittest.expect( + casted22['string'], + unittest.equals('foo'), + ); + var casted23 = (o['y']!) as core.Map; + unittest.expect(casted23, unittest.hasLength(3)); + unittest.expect( + casted23['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted23['bool'], + unittest.equals(true), + ); + unittest.expect( + casted23['string'], + unittest.equals('foo'), + ); +} + +core.int buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequest = 0; +api.GoogleCloudDialogflowV2SearchKnowledgeRequest + buildGoogleCloudDialogflowV2SearchKnowledgeRequest() { + final o = api.GoogleCloudDialogflowV2SearchKnowledgeRequest(); + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequest++; + if (buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequest < 3) { + o.conversation = 'foo'; + o.conversationProfile = 'foo'; + o.endUserMetadata = buildUnnamed104(); + o.exactSearch = true; o.latestMessage = 'foo'; o.parent = 'foo'; o.query = buildGoogleCloudDialogflowV2TextInput(); + o.querySource = 'foo'; + o.searchConfig = + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig(); o.sessionId = 'foo'; } buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequest--; @@ -7268,6 +7436,8 @@ void checkGoogleCloudDialogflowV2SearchKnowledgeRequest( o.conversationProfile!, unittest.equals('foo'), ); + checkUnnamed104(o.endUserMetadata!); + unittest.expect(o.exactSearch!, unittest.isTrue); unittest.expect( o.latestMessage!, unittest.equals('foo'), @@ -7277,6 +7447,12 @@ void checkGoogleCloudDialogflowV2SearchKnowledgeRequest( unittest.equals('foo'), ); checkGoogleCloudDialogflowV2TextInput(o.query!); + unittest.expect( + o.querySource!, + unittest.equals('foo'), + ); + checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig( + o.searchConfig!); unittest.expect( o.sessionId!, unittest.equals('foo'), @@ -7285,13 +7461,375 @@ void checkGoogleCloudDialogflowV2SearchKnowledgeRequest( buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequest--; } -core.List buildUnnamed101() => +core.List< + api.GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs> + buildUnnamed105() => [ + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs(), + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs(), + ]; + +void checkUnnamed105( + core.List< + api + .GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs> + o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs( + o[0]); + checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs( + o[1]); +} + +core.List< + api + .GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs> + buildUnnamed106() => [ + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs(), + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs(), + ]; + +void checkUnnamed106( + core.List< + api + .GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs> + o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs( + o[0]); + checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs( + o[1]); +} + +core.int buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig = + 0; +api.GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig() { + final o = api.GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig(); + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig++; + if (buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig < + 3) { + o.boostSpecs = buildUnnamed105(); + o.filterSpecs = buildUnnamed106(); + } + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig--; + return o; +} + +void checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig( + api.GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig o) { + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig++; + if (buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig < + 3) { + checkUnnamed105(o.boostSpecs!); + checkUnnamed106(o.filterSpecs!); + } + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig--; +} + +core.List buildUnnamed107() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed107(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.List< + api + .GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec> + buildUnnamed108() => [ + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec(), + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec(), + ]; + +void checkUnnamed108( + core.List< + api + .GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec> + o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec( + o[0]); + checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec( + o[1]); +} + +core.int + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs = + 0; +api.GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs() { + final o = + api.GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs(); + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs++; + if (buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs < + 3) { + o.dataStores = buildUnnamed107(); + o.spec = buildUnnamed108(); + } + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs--; + return o; +} + +void checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs( + api.GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs o) { + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs++; + if (buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs < + 3) { + checkUnnamed107(o.dataStores!); + checkUnnamed108(o.spec!); + } + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs--; +} + +core.List< + api + .GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec> + buildUnnamed109() => [ + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec(), + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec(), + ]; + +void checkUnnamed109( + core.List< + api + .GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec> + o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec( + o[0]); + checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec( + o[1]); +} + +core.int + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec = + 0; +api.GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec() { + final o = api + .GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec(); + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec++; + if (buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec < + 3) { + o.conditionBoostSpecs = buildUnnamed109(); + } + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec--; + return o; +} + +void checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec( + api.GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec + o) { + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec++; + if (buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec < + 3) { + checkUnnamed109(o.conditionBoostSpecs!); + } + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec--; +} + +core.int + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec = + 0; +api.GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec() { + final o = api + .GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec(); + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec++; + if (buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec < + 3) { + o.boost = 42.0; + o.boostControlSpec = + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec(); + o.condition = 'foo'; + } + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec--; + return o; +} + +void checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec( + api.GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec + o) { + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec++; + if (buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec < + 3) { + unittest.expect( + o.boost!, + unittest.equals(42.0), + ); + checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec( + o.boostControlSpec!); + unittest.expect( + o.condition!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec--; +} + +core.List< + api + .GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint> + buildUnnamed110() => [ + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint(), + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint(), + ]; + +void checkUnnamed110( + core.List< + api + .GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint> + o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint( + o[0]); + checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint( + o[1]); +} + +core.int + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec = + 0; +api.GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec() { + final o = api + .GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec(); + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec++; + if (buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec < + 3) { + o.attributeType = 'foo'; + o.controlPoints = buildUnnamed110(); + o.fieldName = 'foo'; + o.interpolationType = 'foo'; + } + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec--; + return o; +} + +void checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec( + api.GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec + o) { + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec++; + if (buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec < + 3) { + unittest.expect( + o.attributeType!, + unittest.equals('foo'), + ); + checkUnnamed110(o.controlPoints!); + unittest.expect( + o.fieldName!, + unittest.equals('foo'), + ); + unittest.expect( + o.interpolationType!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec--; +} + +core.int + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint = + 0; +api.GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint() { + final o = api + .GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint(); + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint++; + if (buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint < + 3) { + o.attributeValue = 'foo'; + o.boostAmount = 42.0; + } + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint--; + return o; +} + +void checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint( + api.GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint + o) { + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint++; + if (buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint < + 3) { + unittest.expect( + o.attributeValue!, + unittest.equals('foo'), + ); + unittest.expect( + o.boostAmount!, + unittest.equals(42.0), + ); + } + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint--; +} + +core.List buildUnnamed111() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed111(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs = + 0; +api.GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs() { + final o = api + .GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs(); + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs++; + if (buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs < + 3) { + o.dataStores = buildUnnamed111(); + o.filter = 'foo'; + } + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs--; + return o; +} + +void checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs( + api.GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs + o) { + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs++; + if (buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs < + 3) { + checkUnnamed111(o.dataStores!); + unittest.expect( + o.filter!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs--; +} + +core.List buildUnnamed112() => [ buildGoogleCloudDialogflowV2SearchKnowledgeAnswer(), buildGoogleCloudDialogflowV2SearchKnowledgeAnswer(), ]; -void checkUnnamed101( +void checkUnnamed112( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2SearchKnowledgeAnswer(o[0]); @@ -7304,7 +7842,7 @@ api.GoogleCloudDialogflowV2SearchKnowledgeResponse final o = api.GoogleCloudDialogflowV2SearchKnowledgeResponse(); buildCounterGoogleCloudDialogflowV2SearchKnowledgeResponse++; if (buildCounterGoogleCloudDialogflowV2SearchKnowledgeResponse < 3) { - o.answers = buildUnnamed101(); + o.answers = buildUnnamed112(); o.rewrittenQuery = 'foo'; } buildCounterGoogleCloudDialogflowV2SearchKnowledgeResponse--; @@ -7315,7 +7853,7 @@ void checkGoogleCloudDialogflowV2SearchKnowledgeResponse( api.GoogleCloudDialogflowV2SearchKnowledgeResponse o) { buildCounterGoogleCloudDialogflowV2SearchKnowledgeResponse++; if (buildCounterGoogleCloudDialogflowV2SearchKnowledgeResponse < 3) { - checkUnnamed101(o.answers!); + checkUnnamed112(o.answers!); unittest.expect( o.rewrittenQuery!, unittest.equals('foo'), @@ -7394,12 +7932,12 @@ void checkGoogleCloudDialogflowV2SentimentAnalysisResult( buildCounterGoogleCloudDialogflowV2SentimentAnalysisResult--; } -core.List buildUnnamed102() => [ +core.List buildUnnamed113() => [ buildGoogleCloudDialogflowV2EntityTypeEntity(), buildGoogleCloudDialogflowV2EntityTypeEntity(), ]; -void checkUnnamed102(core.List o) { +void checkUnnamed113(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2EntityTypeEntity(o[0]); checkGoogleCloudDialogflowV2EntityTypeEntity(o[1]); @@ -7411,7 +7949,7 @@ api.GoogleCloudDialogflowV2SessionEntityType final o = api.GoogleCloudDialogflowV2SessionEntityType(); buildCounterGoogleCloudDialogflowV2SessionEntityType++; if (buildCounterGoogleCloudDialogflowV2SessionEntityType < 3) { - o.entities = buildUnnamed102(); + o.entities = buildUnnamed113(); o.entityOverrideMode = 'foo'; o.name = 'foo'; } @@ -7423,7 +7961,7 @@ void checkGoogleCloudDialogflowV2SessionEntityType( api.GoogleCloudDialogflowV2SessionEntityType o) { buildCounterGoogleCloudDialogflowV2SessionEntityType++; if (buildCounterGoogleCloudDialogflowV2SessionEntityType < 3) { - checkUnnamed102(o.entities!); + checkUnnamed113(o.entities!); unittest.expect( o.entityOverrideMode!, unittest.equals('foo'), @@ -7502,12 +8040,12 @@ void checkGoogleCloudDialogflowV2SmartReplyAnswer( } core.List - buildUnnamed103() => [ + buildUnnamed114() => [ buildGoogleCloudDialogflowV2SmartReplyMetricsTopNMetrics(), buildGoogleCloudDialogflowV2SmartReplyMetricsTopNMetrics(), ]; -void checkUnnamed103( +void checkUnnamed114( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2SmartReplyMetricsTopNMetrics(o[0]); @@ -7522,7 +8060,7 @@ api.GoogleCloudDialogflowV2SmartReplyMetrics if (buildCounterGoogleCloudDialogflowV2SmartReplyMetrics < 3) { o.allowlistCoverage = 42.0; o.conversationCount = 'foo'; - o.topNMetrics = buildUnnamed103(); + o.topNMetrics = buildUnnamed114(); } buildCounterGoogleCloudDialogflowV2SmartReplyMetrics--; return o; @@ -7540,7 +8078,7 @@ void checkGoogleCloudDialogflowV2SmartReplyMetrics( o.conversationCount!, unittest.equals('foo'), ); - checkUnnamed103(o.topNMetrics!); + checkUnnamed114(o.topNMetrics!); } buildCounterGoogleCloudDialogflowV2SmartReplyMetrics--; } @@ -7598,12 +8136,12 @@ void checkGoogleCloudDialogflowV2SmartReplyModelMetadata( buildCounterGoogleCloudDialogflowV2SmartReplyModelMetadata--; } -core.List buildUnnamed104() => [ +core.List buildUnnamed115() => [ 'foo', 'foo', ]; -void checkUnnamed104(core.List o) { +void checkUnnamed115(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -7622,7 +8160,7 @@ api.GoogleCloudDialogflowV2SpeechContext buildCounterGoogleCloudDialogflowV2SpeechContext++; if (buildCounterGoogleCloudDialogflowV2SpeechContext < 3) { o.boost = 42.0; - o.phrases = buildUnnamed104(); + o.phrases = buildUnnamed115(); } buildCounterGoogleCloudDialogflowV2SpeechContext--; return o; @@ -7636,11 +8174,28 @@ void checkGoogleCloudDialogflowV2SpeechContext( o.boost!, unittest.equals(42.0), ); - checkUnnamed104(o.phrases!); + checkUnnamed115(o.phrases!); } buildCounterGoogleCloudDialogflowV2SpeechContext--; } +core.List buildUnnamed116() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed116(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + core.int buildCounterGoogleCloudDialogflowV2SpeechToTextConfig = 0; api.GoogleCloudDialogflowV2SpeechToTextConfig buildGoogleCloudDialogflowV2SpeechToTextConfig() { @@ -7651,6 +8206,7 @@ api.GoogleCloudDialogflowV2SpeechToTextConfig o.enableWordInfo = true; o.languageCode = 'foo'; o.model = 'foo'; + o.phraseSets = buildUnnamed116(); o.sampleRateHertz = 42; o.speechModelVariant = 'foo'; o.useTimeoutBasedEndpointing = true; @@ -7676,6 +8232,7 @@ void checkGoogleCloudDialogflowV2SpeechToTextConfig( o.model!, unittest.equals('foo'), ); + checkUnnamed116(o.phraseSets!); unittest.expect( o.sampleRateHertz!, unittest.equals(42), @@ -7720,12 +8277,12 @@ void checkGoogleCloudDialogflowV2SuggestArticlesRequest( buildCounterGoogleCloudDialogflowV2SuggestArticlesRequest--; } -core.List buildUnnamed105() => [ +core.List buildUnnamed117() => [ buildGoogleCloudDialogflowV2ArticleAnswer(), buildGoogleCloudDialogflowV2ArticleAnswer(), ]; -void checkUnnamed105(core.List o) { +void checkUnnamed117(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2ArticleAnswer(o[0]); checkGoogleCloudDialogflowV2ArticleAnswer(o[1]); @@ -7737,7 +8294,7 @@ api.GoogleCloudDialogflowV2SuggestArticlesResponse final o = api.GoogleCloudDialogflowV2SuggestArticlesResponse(); buildCounterGoogleCloudDialogflowV2SuggestArticlesResponse++; if (buildCounterGoogleCloudDialogflowV2SuggestArticlesResponse < 3) { - o.articleAnswers = buildUnnamed105(); + o.articleAnswers = buildUnnamed117(); o.contextSize = 42; o.latestMessage = 'foo'; } @@ -7749,7 +8306,7 @@ void checkGoogleCloudDialogflowV2SuggestArticlesResponse( api.GoogleCloudDialogflowV2SuggestArticlesResponse o) { buildCounterGoogleCloudDialogflowV2SuggestArticlesResponse++; if (buildCounterGoogleCloudDialogflowV2SuggestArticlesResponse < 3) { - checkUnnamed105(o.articleAnswers!); + checkUnnamed117(o.articleAnswers!); unittest.expect( o.contextSize!, unittest.equals(42), @@ -7832,12 +8389,12 @@ void checkGoogleCloudDialogflowV2SuggestConversationSummaryResponse( buildCounterGoogleCloudDialogflowV2SuggestConversationSummaryResponse--; } -core.Map buildUnnamed106() => { +core.Map buildUnnamed118() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed106(core.Map o) { +void checkUnnamed118(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -7862,7 +8419,7 @@ api.GoogleCloudDialogflowV2SuggestConversationSummaryResponseSummary o.answerRecord = 'foo'; o.baselineModelVersion = 'foo'; o.text = 'foo'; - o.textSections = buildUnnamed106(); + o.textSections = buildUnnamed118(); } buildCounterGoogleCloudDialogflowV2SuggestConversationSummaryResponseSummary--; return o; @@ -7885,7 +8442,7 @@ void checkGoogleCloudDialogflowV2SuggestConversationSummaryResponseSummary( o.text!, unittest.equals('foo'), ); - checkUnnamed106(o.textSections!); + checkUnnamed118(o.textSections!); } buildCounterGoogleCloudDialogflowV2SuggestConversationSummaryResponseSummary--; } @@ -7921,12 +8478,12 @@ void checkGoogleCloudDialogflowV2SuggestFaqAnswersRequest( buildCounterGoogleCloudDialogflowV2SuggestFaqAnswersRequest--; } -core.List buildUnnamed107() => [ +core.List buildUnnamed119() => [ buildGoogleCloudDialogflowV2FaqAnswer(), buildGoogleCloudDialogflowV2FaqAnswer(), ]; -void checkUnnamed107(core.List o) { +void checkUnnamed119(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2FaqAnswer(o[0]); checkGoogleCloudDialogflowV2FaqAnswer(o[1]); @@ -7939,7 +8496,7 @@ api.GoogleCloudDialogflowV2SuggestFaqAnswersResponse buildCounterGoogleCloudDialogflowV2SuggestFaqAnswersResponse++; if (buildCounterGoogleCloudDialogflowV2SuggestFaqAnswersResponse < 3) { o.contextSize = 42; - o.faqAnswers = buildUnnamed107(); + o.faqAnswers = buildUnnamed119(); o.latestMessage = 'foo'; } buildCounterGoogleCloudDialogflowV2SuggestFaqAnswersResponse--; @@ -7954,7 +8511,7 @@ void checkGoogleCloudDialogflowV2SuggestFaqAnswersResponse( o.contextSize!, unittest.equals(42), ); - checkUnnamed107(o.faqAnswers!); + checkUnnamed119(o.faqAnswers!); unittest.expect( o.latestMessage!, unittest.equals('foo'), @@ -8060,12 +8617,12 @@ void checkGoogleCloudDialogflowV2SuggestSmartRepliesRequest( buildCounterGoogleCloudDialogflowV2SuggestSmartRepliesRequest--; } -core.List buildUnnamed108() => [ +core.List buildUnnamed120() => [ buildGoogleCloudDialogflowV2SmartReplyAnswer(), buildGoogleCloudDialogflowV2SmartReplyAnswer(), ]; -void checkUnnamed108(core.List o) { +void checkUnnamed120(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2SmartReplyAnswer(o[0]); checkGoogleCloudDialogflowV2SmartReplyAnswer(o[1]); @@ -8079,7 +8636,7 @@ api.GoogleCloudDialogflowV2SuggestSmartRepliesResponse if (buildCounterGoogleCloudDialogflowV2SuggestSmartRepliesResponse < 3) { o.contextSize = 42; o.latestMessage = 'foo'; - o.smartReplyAnswers = buildUnnamed108(); + o.smartReplyAnswers = buildUnnamed120(); } buildCounterGoogleCloudDialogflowV2SuggestSmartRepliesResponse--; return o; @@ -8097,7 +8654,7 @@ void checkGoogleCloudDialogflowV2SuggestSmartRepliesResponse( o.latestMessage!, unittest.equals('foo'), ); - checkUnnamed108(o.smartReplyAnswers!); + checkUnnamed120(o.smartReplyAnswers!); } buildCounterGoogleCloudDialogflowV2SuggestSmartRepliesResponse--; } @@ -8187,24 +8744,24 @@ void checkGoogleCloudDialogflowV2SuggestionResult( buildCounterGoogleCloudDialogflowV2SuggestionResult--; } -core.List buildUnnamed109() => [ +core.List buildUnnamed121() => [ buildGoogleCloudDialogflowV2FewShotExample(), buildGoogleCloudDialogflowV2FewShotExample(), ]; -void checkUnnamed109(core.List o) { +void checkUnnamed121(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2FewShotExample(o[0]); checkGoogleCloudDialogflowV2FewShotExample(o[1]); } -core.List buildUnnamed110() => +core.List buildUnnamed122() => [ buildGoogleCloudDialogflowV2SummarizationSection(), buildGoogleCloudDialogflowV2SummarizationSection(), ]; -void checkUnnamed110( +void checkUnnamed122( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2SummarizationSection(o[0]); @@ -8217,9 +8774,9 @@ api.GoogleCloudDialogflowV2SummarizationContext final o = api.GoogleCloudDialogflowV2SummarizationContext(); buildCounterGoogleCloudDialogflowV2SummarizationContext++; if (buildCounterGoogleCloudDialogflowV2SummarizationContext < 3) { - o.fewShotExamples = buildUnnamed109(); + o.fewShotExamples = buildUnnamed121(); o.outputLanguageCode = 'foo'; - o.summarizationSections = buildUnnamed110(); + o.summarizationSections = buildUnnamed122(); o.version = 'foo'; } buildCounterGoogleCloudDialogflowV2SummarizationContext--; @@ -8230,12 +8787,12 @@ void checkGoogleCloudDialogflowV2SummarizationContext( api.GoogleCloudDialogflowV2SummarizationContext o) { buildCounterGoogleCloudDialogflowV2SummarizationContext++; if (buildCounterGoogleCloudDialogflowV2SummarizationContext < 3) { - checkUnnamed109(o.fewShotExamples!); + checkUnnamed121(o.fewShotExamples!); unittest.expect( o.outputLanguageCode!, unittest.equals('foo'), ); - checkUnnamed110(o.summarizationSections!); + checkUnnamed122(o.summarizationSections!); unittest.expect( o.version!, unittest.equals('foo'), @@ -8278,13 +8835,13 @@ void checkGoogleCloudDialogflowV2SummarizationSection( buildCounterGoogleCloudDialogflowV2SummarizationSection--; } -core.List buildUnnamed111() => +core.List buildUnnamed123() => [ buildGoogleCloudDialogflowV2SummarizationSection(), buildGoogleCloudDialogflowV2SummarizationSection(), ]; -void checkUnnamed111( +void checkUnnamed123( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2SummarizationSection(o[0]); @@ -8297,7 +8854,7 @@ api.GoogleCloudDialogflowV2SummarizationSectionList final o = api.GoogleCloudDialogflowV2SummarizationSectionList(); buildCounterGoogleCloudDialogflowV2SummarizationSectionList++; if (buildCounterGoogleCloudDialogflowV2SummarizationSectionList < 3) { - o.summarizationSections = buildUnnamed111(); + o.summarizationSections = buildUnnamed123(); } buildCounterGoogleCloudDialogflowV2SummarizationSectionList--; return o; @@ -8307,18 +8864,18 @@ void checkGoogleCloudDialogflowV2SummarizationSectionList( api.GoogleCloudDialogflowV2SummarizationSectionList o) { buildCounterGoogleCloudDialogflowV2SummarizationSectionList++; if (buildCounterGoogleCloudDialogflowV2SummarizationSectionList < 3) { - checkUnnamed111(o.summarizationSections!); + checkUnnamed123(o.summarizationSections!); } buildCounterGoogleCloudDialogflowV2SummarizationSectionList--; } core.List - buildUnnamed112() => [ + buildUnnamed124() => [ buildGoogleCloudDialogflowV2SummarySuggestionSummarySection(), buildGoogleCloudDialogflowV2SummarySuggestionSummarySection(), ]; -void checkUnnamed112( +void checkUnnamed124( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2SummarySuggestionSummarySection(o[0]); @@ -8331,7 +8888,7 @@ api.GoogleCloudDialogflowV2SummarySuggestion final o = api.GoogleCloudDialogflowV2SummarySuggestion(); buildCounterGoogleCloudDialogflowV2SummarySuggestion++; if (buildCounterGoogleCloudDialogflowV2SummarySuggestion < 3) { - o.summarySections = buildUnnamed112(); + o.summarySections = buildUnnamed124(); } buildCounterGoogleCloudDialogflowV2SummarySuggestion--; return o; @@ -8341,7 +8898,7 @@ void checkGoogleCloudDialogflowV2SummarySuggestion( api.GoogleCloudDialogflowV2SummarySuggestion o) { buildCounterGoogleCloudDialogflowV2SummarySuggestion++; if (buildCounterGoogleCloudDialogflowV2SummarySuggestion < 3) { - checkUnnamed112(o.summarySections!); + checkUnnamed124(o.summarySections!); } buildCounterGoogleCloudDialogflowV2SummarySuggestion--; } @@ -8375,12 +8932,12 @@ void checkGoogleCloudDialogflowV2SummarySuggestionSummarySection( buildCounterGoogleCloudDialogflowV2SummarySuggestionSummarySection--; } -core.List buildUnnamed113() => [ +core.List buildUnnamed125() => [ 'foo', 'foo', ]; -void checkUnnamed113(core.List o) { +void checkUnnamed125(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8398,7 +8955,7 @@ api.GoogleCloudDialogflowV2SynthesizeSpeechConfig final o = api.GoogleCloudDialogflowV2SynthesizeSpeechConfig(); buildCounterGoogleCloudDialogflowV2SynthesizeSpeechConfig++; if (buildCounterGoogleCloudDialogflowV2SynthesizeSpeechConfig < 3) { - o.effectsProfileId = buildUnnamed113(); + o.effectsProfileId = buildUnnamed125(); o.pitch = 42.0; o.speakingRate = 42.0; o.voice = buildGoogleCloudDialogflowV2VoiceSelectionParams(); @@ -8412,7 +8969,7 @@ void checkGoogleCloudDialogflowV2SynthesizeSpeechConfig( api.GoogleCloudDialogflowV2SynthesizeSpeechConfig o) { buildCounterGoogleCloudDialogflowV2SynthesizeSpeechConfig++; if (buildCounterGoogleCloudDialogflowV2SynthesizeSpeechConfig < 3) { - checkUnnamed113(o.effectsProfileId!); + checkUnnamed125(o.effectsProfileId!); unittest.expect( o.pitch!, unittest.equals(42.0), @@ -8459,12 +9016,12 @@ void checkGoogleCloudDialogflowV2TextInput( } core.Map - buildUnnamed114() => { + buildUnnamed126() => { 'x': buildGoogleCloudDialogflowV2SynthesizeSpeechConfig(), 'y': buildGoogleCloudDialogflowV2SynthesizeSpeechConfig(), }; -void checkUnnamed114( +void checkUnnamed126( core.Map o) { unittest.expect(o, unittest.hasLength(2)); @@ -8481,7 +9038,7 @@ api.GoogleCloudDialogflowV2TextToSpeechSettings o.enableTextToSpeech = true; o.outputAudioEncoding = 'foo'; o.sampleRateHertz = 42; - o.synthesizeSpeechConfigs = buildUnnamed114(); + o.synthesizeSpeechConfigs = buildUnnamed126(); } buildCounterGoogleCloudDialogflowV2TextToSpeechSettings--; return o; @@ -8500,7 +9057,7 @@ void checkGoogleCloudDialogflowV2TextToSpeechSettings( o.sampleRateHertz!, unittest.equals(42), ); - checkUnnamed114(o.synthesizeSpeechConfigs!); + checkUnnamed126(o.synthesizeSpeechConfigs!); } buildCounterGoogleCloudDialogflowV2TextToSpeechSettings--; } @@ -8542,12 +9099,12 @@ void checkGoogleCloudDialogflowV2UndeployConversationModelRequest( buildCounterGoogleCloudDialogflowV2UndeployConversationModelRequest--; } -core.List buildUnnamed115() => [ +core.List buildUnnamed127() => [ 'foo', 'foo', ]; -void checkUnnamed115(core.List o) { +void checkUnnamed127(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -8565,7 +9122,7 @@ api.GoogleCloudDialogflowV2ValidationError final o = api.GoogleCloudDialogflowV2ValidationError(); buildCounterGoogleCloudDialogflowV2ValidationError++; if (buildCounterGoogleCloudDialogflowV2ValidationError < 3) { - o.entries = buildUnnamed115(); + o.entries = buildUnnamed127(); o.errorMessage = 'foo'; o.severity = 'foo'; } @@ -8577,7 +9134,7 @@ void checkGoogleCloudDialogflowV2ValidationError( api.GoogleCloudDialogflowV2ValidationError o) { buildCounterGoogleCloudDialogflowV2ValidationError++; if (buildCounterGoogleCloudDialogflowV2ValidationError < 3) { - checkUnnamed115(o.entries!); + checkUnnamed127(o.entries!); unittest.expect( o.errorMessage!, unittest.equals('foo'), @@ -8590,12 +9147,12 @@ void checkGoogleCloudDialogflowV2ValidationError( buildCounterGoogleCloudDialogflowV2ValidationError--; } -core.List buildUnnamed116() => [ +core.List buildUnnamed128() => [ buildGoogleCloudDialogflowV2ValidationError(), buildGoogleCloudDialogflowV2ValidationError(), ]; -void checkUnnamed116(core.List o) { +void checkUnnamed128(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudDialogflowV2ValidationError(o[0]); checkGoogleCloudDialogflowV2ValidationError(o[1]); @@ -8607,7 +9164,7 @@ api.GoogleCloudDialogflowV2ValidationResult final o = api.GoogleCloudDialogflowV2ValidationResult(); buildCounterGoogleCloudDialogflowV2ValidationResult++; if (buildCounterGoogleCloudDialogflowV2ValidationResult < 3) { - o.validationErrors = buildUnnamed116(); + o.validationErrors = buildUnnamed128(); } buildCounterGoogleCloudDialogflowV2ValidationResult--; return o; @@ -8617,7 +9174,7 @@ void checkGoogleCloudDialogflowV2ValidationResult( api.GoogleCloudDialogflowV2ValidationResult o) { buildCounterGoogleCloudDialogflowV2ValidationResult++; if (buildCounterGoogleCloudDialogflowV2ValidationResult < 3) { - checkUnnamed116(o.validationErrors!); + checkUnnamed128(o.validationErrors!); } buildCounterGoogleCloudDialogflowV2ValidationResult--; } @@ -8693,12 +9250,12 @@ void checkGoogleCloudDialogflowV2VoiceSelectionParams( buildCounterGoogleCloudDialogflowV2VoiceSelectionParams--; } -core.List buildUnnamed117() => [ +core.List buildUnnamed129() => [ buildGoogleCloudLocationLocation(), buildGoogleCloudLocationLocation(), ]; -void checkUnnamed117(core.List o) { +void checkUnnamed129(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudLocationLocation(o[0]); checkGoogleCloudLocationLocation(o[1]); @@ -8710,7 +9267,7 @@ api.GoogleCloudLocationListLocationsResponse final o = api.GoogleCloudLocationListLocationsResponse(); buildCounterGoogleCloudLocationListLocationsResponse++; if (buildCounterGoogleCloudLocationListLocationsResponse < 3) { - o.locations = buildUnnamed117(); + o.locations = buildUnnamed129(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudLocationListLocationsResponse--; @@ -8721,7 +9278,7 @@ void checkGoogleCloudLocationListLocationsResponse( api.GoogleCloudLocationListLocationsResponse o) { buildCounterGoogleCloudLocationListLocationsResponse++; if (buildCounterGoogleCloudLocationListLocationsResponse < 3) { - checkUnnamed117(o.locations!); + checkUnnamed129(o.locations!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -8730,12 +9287,12 @@ void checkGoogleCloudLocationListLocationsResponse( buildCounterGoogleCloudLocationListLocationsResponse--; } -core.Map buildUnnamed118() => { +core.Map buildUnnamed130() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed118(core.Map o) { +void checkUnnamed130(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -8747,7 +9304,7 @@ void checkUnnamed118(core.Map o) { ); } -core.Map buildUnnamed119() => { +core.Map buildUnnamed131() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -8760,34 +9317,34 @@ core.Map buildUnnamed119() => { }, }; -void checkUnnamed119(core.Map o) { +void checkUnnamed131(core.Map o) { unittest.expect(o, unittest.hasLength(2)); - var casted18 = (o['x']!) as core.Map; - unittest.expect(casted18, unittest.hasLength(3)); + var casted24 = (o['x']!) as core.Map; + unittest.expect(casted24, unittest.hasLength(3)); unittest.expect( - casted18['list'], + casted24['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted18['bool'], + casted24['bool'], unittest.equals(true), ); unittest.expect( - casted18['string'], + casted24['string'], unittest.equals('foo'), ); - var casted19 = (o['y']!) as core.Map; - unittest.expect(casted19, unittest.hasLength(3)); + var casted25 = (o['y']!) as core.Map; + unittest.expect(casted25, unittest.hasLength(3)); unittest.expect( - casted19['list'], + casted25['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted19['bool'], + casted25['bool'], unittest.equals(true), ); unittest.expect( - casted19['string'], + casted25['string'], unittest.equals('foo'), ); } @@ -8798,9 +9355,9 @@ api.GoogleCloudLocationLocation buildGoogleCloudLocationLocation() { buildCounterGoogleCloudLocationLocation++; if (buildCounterGoogleCloudLocationLocation < 3) { o.displayName = 'foo'; - o.labels = buildUnnamed118(); + o.labels = buildUnnamed130(); o.locationId = 'foo'; - o.metadata = buildUnnamed119(); + o.metadata = buildUnnamed131(); o.name = 'foo'; } buildCounterGoogleCloudLocationLocation--; @@ -8814,12 +9371,12 @@ void checkGoogleCloudLocationLocation(api.GoogleCloudLocationLocation o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed118(o.labels!); + checkUnnamed130(o.labels!); unittest.expect( o.locationId!, unittest.equals('foo'), ); - checkUnnamed119(o.metadata!); + checkUnnamed131(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), @@ -8828,12 +9385,12 @@ void checkGoogleCloudLocationLocation(api.GoogleCloudLocationLocation o) { buildCounterGoogleCloudLocationLocation--; } -core.List buildUnnamed120() => [ +core.List buildUnnamed132() => [ buildGoogleLongrunningOperation(), buildGoogleLongrunningOperation(), ]; -void checkUnnamed120(core.List o) { +void checkUnnamed132(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleLongrunningOperation(o[0]); checkGoogleLongrunningOperation(o[1]); @@ -8846,7 +9403,7 @@ api.GoogleLongrunningListOperationsResponse buildCounterGoogleLongrunningListOperationsResponse++; if (buildCounterGoogleLongrunningListOperationsResponse < 3) { o.nextPageToken = 'foo'; - o.operations = buildUnnamed120(); + o.operations = buildUnnamed132(); } buildCounterGoogleLongrunningListOperationsResponse--; return o; @@ -8860,12 +9417,12 @@ void checkGoogleLongrunningListOperationsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed120(o.operations!); + checkUnnamed132(o.operations!); } buildCounterGoogleLongrunningListOperationsResponse--; } -core.Map buildUnnamed121() => { +core.Map buildUnnamed133() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -8878,39 +9435,39 @@ core.Map buildUnnamed121() => { }, }; -void checkUnnamed121(core.Map o) { +void checkUnnamed133(core.Map o) { unittest.expect(o, unittest.hasLength(2)); - var casted20 = (o['x']!) as core.Map; - unittest.expect(casted20, unittest.hasLength(3)); + var casted26 = (o['x']!) as core.Map; + unittest.expect(casted26, unittest.hasLength(3)); unittest.expect( - casted20['list'], + casted26['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted20['bool'], + casted26['bool'], unittest.equals(true), ); unittest.expect( - casted20['string'], + casted26['string'], unittest.equals('foo'), ); - var casted21 = (o['y']!) as core.Map; - unittest.expect(casted21, unittest.hasLength(3)); + var casted27 = (o['y']!) as core.Map; + unittest.expect(casted27, unittest.hasLength(3)); unittest.expect( - casted21['list'], + casted27['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted21['bool'], + casted27['bool'], unittest.equals(true), ); unittest.expect( - casted21['string'], + casted27['string'], unittest.equals('foo'), ); } -core.Map buildUnnamed122() => { +core.Map buildUnnamed134() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -8923,34 +9480,34 @@ core.Map buildUnnamed122() => { }, }; -void checkUnnamed122(core.Map o) { +void checkUnnamed134(core.Map o) { unittest.expect(o, unittest.hasLength(2)); - var casted22 = (o['x']!) as core.Map; - unittest.expect(casted22, unittest.hasLength(3)); + var casted28 = (o['x']!) as core.Map; + unittest.expect(casted28, unittest.hasLength(3)); unittest.expect( - casted22['list'], + casted28['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted22['bool'], + casted28['bool'], unittest.equals(true), ); unittest.expect( - casted22['string'], + casted28['string'], unittest.equals('foo'), ); - var casted23 = (o['y']!) as core.Map; - unittest.expect(casted23, unittest.hasLength(3)); + var casted29 = (o['y']!) as core.Map; + unittest.expect(casted29, unittest.hasLength(3)); unittest.expect( - casted23['list'], + casted29['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted23['bool'], + casted29['bool'], unittest.equals(true), ); unittest.expect( - casted23['string'], + casted29['string'], unittest.equals('foo'), ); } @@ -8962,9 +9519,9 @@ api.GoogleLongrunningOperation buildGoogleLongrunningOperation() { if (buildCounterGoogleLongrunningOperation < 3) { o.done = true; o.error = buildGoogleRpcStatus(); - o.metadata = buildUnnamed121(); + o.metadata = buildUnnamed133(); o.name = 'foo'; - o.response = buildUnnamed122(); + o.response = buildUnnamed134(); } buildCounterGoogleLongrunningOperation--; return o; @@ -8975,12 +9532,12 @@ void checkGoogleLongrunningOperation(api.GoogleLongrunningOperation o) { if (buildCounterGoogleLongrunningOperation < 3) { unittest.expect(o.done!, unittest.isTrue); checkGoogleRpcStatus(o.error!); - checkUnnamed121(o.metadata!); + checkUnnamed133(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed122(o.response!); + checkUnnamed134(o.response!); } buildCounterGoogleLongrunningOperation--; } @@ -9000,7 +9557,7 @@ void checkGoogleProtobufEmpty(api.GoogleProtobufEmpty o) { buildCounterGoogleProtobufEmpty--; } -core.Map buildUnnamed123() => { +core.Map buildUnnamed135() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -9013,47 +9570,47 @@ core.Map buildUnnamed123() => { }, }; -void checkUnnamed123(core.Map o) { +void checkUnnamed135(core.Map o) { unittest.expect(o, unittest.hasLength(2)); - var casted24 = (o['x']!) as core.Map; - unittest.expect(casted24, unittest.hasLength(3)); + var casted30 = (o['x']!) as core.Map; + unittest.expect(casted30, unittest.hasLength(3)); unittest.expect( - casted24['list'], + casted30['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted24['bool'], + casted30['bool'], unittest.equals(true), ); unittest.expect( - casted24['string'], + casted30['string'], unittest.equals('foo'), ); - var casted25 = (o['y']!) as core.Map; - unittest.expect(casted25, unittest.hasLength(3)); + var casted31 = (o['y']!) as core.Map; + unittest.expect(casted31, unittest.hasLength(3)); unittest.expect( - casted25['list'], + casted31['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted25['bool'], + casted31['bool'], unittest.equals(true), ); unittest.expect( - casted25['string'], + casted31['string'], unittest.equals('foo'), ); } -core.List> buildUnnamed124() => [ - buildUnnamed123(), - buildUnnamed123(), +core.List> buildUnnamed136() => [ + buildUnnamed135(), + buildUnnamed135(), ]; -void checkUnnamed124(core.List> o) { +void checkUnnamed136(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed123(o[0]); - checkUnnamed123(o[1]); + checkUnnamed135(o[0]); + checkUnnamed135(o[1]); } core.int buildCounterGoogleRpcStatus = 0; @@ -9062,7 +9619,7 @@ api.GoogleRpcStatus buildGoogleRpcStatus() { buildCounterGoogleRpcStatus++; if (buildCounterGoogleRpcStatus < 3) { o.code = 42; - o.details = buildUnnamed124(); + o.details = buildUnnamed136(); o.message = 'foo'; } buildCounterGoogleRpcStatus--; @@ -9076,7 +9633,7 @@ void checkGoogleRpcStatus(api.GoogleRpcStatus o) { o.code!, unittest.equals(42), ); - checkUnnamed124(o.details!); + checkUnnamed136(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -11079,6 +11636,109 @@ void main() { }); }); + unittest.group( + 'obj-schema-GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig + .fromJson(oJson as core.Map); + checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfig(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs + .fromJson(oJson as core.Map); + checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecs( + od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec + .fromJson(oJson as core.Map); + checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpec( + od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec + .fromJson(oJson as core.Map); + checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpec( + od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec + .fromJson(oJson as core.Map); + checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpec( + od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint + .fromJson(oJson as core.Map); + checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigBoostSpecsBoostSpecConditionBoostSpecBoostControlSpecControlPoint( + od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs + .fromJson(oJson as core.Map); + checkGoogleCloudDialogflowV2SearchKnowledgeRequestSearchConfigFilterSpecs( + od); + }); + }); + unittest.group('obj-schema-GoogleCloudDialogflowV2SearchKnowledgeResponse', () { unittest.test('to-json--from-json', () async { diff --git a/generated/googleapis/test/dialogflow/v3_test.dart b/generated/googleapis/test/dialogflow/v3_test.dart index 8373d8004..2826a379e 100644 --- a/generated/googleapis/test/dialogflow/v3_test.dart +++ b/generated/googleapis/test/dialogflow/v3_test.dart @@ -3810,6 +3810,7 @@ api.GoogleCloudDialogflowCxV3Generator buildCounterGoogleCloudDialogflowCxV3Generator++; if (buildCounterGoogleCloudDialogflowCxV3Generator < 3) { o.displayName = 'foo'; + o.modelParameter = buildGoogleCloudDialogflowCxV3GeneratorModelParameter(); o.name = 'foo'; o.placeholders = buildUnnamed48(); o.promptText = buildGoogleCloudDialogflowCxV3Phrase(); @@ -3826,6 +3827,7 @@ void checkGoogleCloudDialogflowCxV3Generator( o.displayName!, unittest.equals('foo'), ); + checkGoogleCloudDialogflowCxV3GeneratorModelParameter(o.modelParameter!); unittest.expect( o.name!, unittest.equals('foo'), @@ -3836,6 +3838,45 @@ void checkGoogleCloudDialogflowCxV3Generator( buildCounterGoogleCloudDialogflowCxV3Generator--; } +core.int buildCounterGoogleCloudDialogflowCxV3GeneratorModelParameter = 0; +api.GoogleCloudDialogflowCxV3GeneratorModelParameter + buildGoogleCloudDialogflowCxV3GeneratorModelParameter() { + final o = api.GoogleCloudDialogflowCxV3GeneratorModelParameter(); + buildCounterGoogleCloudDialogflowCxV3GeneratorModelParameter++; + if (buildCounterGoogleCloudDialogflowCxV3GeneratorModelParameter < 3) { + o.maxDecodeSteps = 42; + o.temperature = 42.0; + o.topK = 42; + o.topP = 42.0; + } + buildCounterGoogleCloudDialogflowCxV3GeneratorModelParameter--; + return o; +} + +void checkGoogleCloudDialogflowCxV3GeneratorModelParameter( + api.GoogleCloudDialogflowCxV3GeneratorModelParameter o) { + buildCounterGoogleCloudDialogflowCxV3GeneratorModelParameter++; + if (buildCounterGoogleCloudDialogflowCxV3GeneratorModelParameter < 3) { + unittest.expect( + o.maxDecodeSteps!, + unittest.equals(42), + ); + unittest.expect( + o.temperature!, + unittest.equals(42.0), + ); + unittest.expect( + o.topK!, + unittest.equals(42), + ); + unittest.expect( + o.topP!, + unittest.equals(42.0), + ); + } + buildCounterGoogleCloudDialogflowCxV3GeneratorModelParameter--; +} + core.int buildCounterGoogleCloudDialogflowCxV3GeneratorPlaceholder = 0; api.GoogleCloudDialogflowCxV3GeneratorPlaceholder buildGoogleCloudDialogflowCxV3GeneratorPlaceholder() { @@ -9766,6 +9807,17 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudDialogflowCxV3GeneratorModelParameter', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudDialogflowCxV3GeneratorModelParameter(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudDialogflowCxV3GeneratorModelParameter.fromJson( + oJson as core.Map); + checkGoogleCloudDialogflowCxV3GeneratorModelParameter(od); + }); + }); + unittest.group('obj-schema-GoogleCloudDialogflowCxV3GeneratorPlaceholder', () { unittest.test('to-json--from-json', () async { diff --git a/generated/googleapis/test/digitalassetlinks/v1_test.dart b/generated/googleapis/test/digitalassetlinks/v1_test.dart index a265864c5..d4fd3b0a6 100644 --- a/generated/googleapis/test/digitalassetlinks/v1_test.dart +++ b/generated/googleapis/test/digitalassetlinks/v1_test.dart @@ -71,84 +71,6 @@ void checkAsset(api.Asset o) { buildCounterAsset--; } -core.List buildUnnamed0() => [ - buildStatementTemplate(), - buildStatementTemplate(), - ]; - -void checkUnnamed0(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkStatementTemplate(o[0]); - checkStatementTemplate(o[1]); -} - -core.int buildCounterBulkCheckRequest = 0; -api.BulkCheckRequest buildBulkCheckRequest() { - final o = api.BulkCheckRequest(); - buildCounterBulkCheckRequest++; - if (buildCounterBulkCheckRequest < 3) { - o.allowGoogleInternalDataSources = true; - o.defaultRelation = 'foo'; - o.defaultSource = buildAsset(); - o.defaultTarget = buildAsset(); - o.skipCacheLookup = true; - o.statements = buildUnnamed0(); - } - buildCounterBulkCheckRequest--; - return o; -} - -void checkBulkCheckRequest(api.BulkCheckRequest o) { - buildCounterBulkCheckRequest++; - if (buildCounterBulkCheckRequest < 3) { - unittest.expect(o.allowGoogleInternalDataSources!, unittest.isTrue); - unittest.expect( - o.defaultRelation!, - unittest.equals('foo'), - ); - checkAsset(o.defaultSource!); - checkAsset(o.defaultTarget!); - unittest.expect(o.skipCacheLookup!, unittest.isTrue); - checkUnnamed0(o.statements!); - } - buildCounterBulkCheckRequest--; -} - -core.List buildUnnamed1() => [ - buildCheckResponse(), - buildCheckResponse(), - ]; - -void checkUnnamed1(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkCheckResponse(o[0]); - checkCheckResponse(o[1]); -} - -core.int buildCounterBulkCheckResponse = 0; -api.BulkCheckResponse buildBulkCheckResponse() { - final o = api.BulkCheckResponse(); - buildCounterBulkCheckResponse++; - if (buildCounterBulkCheckResponse < 3) { - o.bulkErrorCode = 'foo'; - o.checkResults = buildUnnamed1(); - } - buildCounterBulkCheckResponse--; - return o; -} - -void checkBulkCheckResponse(api.BulkCheckResponse o) { - buildCounterBulkCheckResponse++; - if (buildCounterBulkCheckResponse < 3) { - unittest.expect( - o.bulkErrorCode!, - unittest.equals('foo'), - ); - checkUnnamed1(o.checkResults!); - } - buildCounterBulkCheckResponse--; -} - core.int buildCounterCertificateInfo = 0; api.CertificateInfo buildCertificateInfo() { final o = api.CertificateInfo(); @@ -171,12 +93,12 @@ void checkCertificateInfo(api.CertificateInfo o) { buildCounterCertificateInfo--; } -core.List buildUnnamed2() => [ +core.List buildUnnamed0() => [ 'foo', 'foo', ]; -void checkUnnamed2(core.List o) { +void checkUnnamed0(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -194,7 +116,7 @@ api.CheckResponse buildCheckResponse() { buildCounterCheckResponse++; if (buildCounterCheckResponse < 3) { o.debugString = 'foo'; - o.errorCode = buildUnnamed2(); + o.errorCode = buildUnnamed0(); o.linked = true; o.maxAge = 'foo'; } @@ -209,7 +131,7 @@ void checkCheckResponse(api.CheckResponse o) { o.debugString!, unittest.equals('foo'), ); - checkUnnamed2(o.errorCode!); + checkUnnamed0(o.errorCode!); unittest.expect(o.linked!, unittest.isTrue); unittest.expect( o.maxAge!, @@ -219,12 +141,12 @@ void checkCheckResponse(api.CheckResponse o) { buildCounterCheckResponse--; } -core.List buildUnnamed3() => [ +core.List buildUnnamed1() => [ 'foo', 'foo', ]; -void checkUnnamed3(core.List o) { +void checkUnnamed1(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -236,12 +158,12 @@ void checkUnnamed3(core.List o) { ); } -core.List buildUnnamed4() => [ +core.List buildUnnamed2() => [ buildStatement(), buildStatement(), ]; -void checkUnnamed4(core.List o) { +void checkUnnamed2(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStatement(o[0]); checkStatement(o[1]); @@ -253,9 +175,9 @@ api.ListResponse buildListResponse() { buildCounterListResponse++; if (buildCounterListResponse < 3) { o.debugString = 'foo'; - o.errorCode = buildUnnamed3(); + o.errorCode = buildUnnamed1(); o.maxAge = 'foo'; - o.statements = buildUnnamed4(); + o.statements = buildUnnamed2(); } buildCounterListResponse--; return o; @@ -268,12 +190,12 @@ void checkListResponse(api.ListResponse o) { o.debugString!, unittest.equals('foo'), ); - checkUnnamed3(o.errorCode!); + checkUnnamed1(o.errorCode!); unittest.expect( o.maxAge!, unittest.equals('foo'), ); - checkUnnamed4(o.statements!); + checkUnnamed2(o.statements!); } buildCounterListResponse--; } @@ -304,32 +226,6 @@ void checkStatement(api.Statement o) { buildCounterStatement--; } -core.int buildCounterStatementTemplate = 0; -api.StatementTemplate buildStatementTemplate() { - final o = api.StatementTemplate(); - buildCounterStatementTemplate++; - if (buildCounterStatementTemplate < 3) { - o.relation = 'foo'; - o.source = buildAsset(); - o.target = buildAsset(); - } - buildCounterStatementTemplate--; - return o; -} - -void checkStatementTemplate(api.StatementTemplate o) { - buildCounterStatementTemplate++; - if (buildCounterStatementTemplate < 3) { - unittest.expect( - o.relation!, - unittest.equals('foo'), - ); - checkAsset(o.source!); - checkAsset(o.target!); - } - buildCounterStatementTemplate--; -} - core.int buildCounterWebAsset = 0; api.WebAsset buildWebAsset() { final o = api.WebAsset(); @@ -373,26 +269,6 @@ void main() { }); }); - unittest.group('obj-schema-BulkCheckRequest', () { - unittest.test('to-json--from-json', () async { - final o = buildBulkCheckRequest(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.BulkCheckRequest.fromJson( - oJson as core.Map); - checkBulkCheckRequest(od); - }); - }); - - unittest.group('obj-schema-BulkCheckResponse', () { - unittest.test('to-json--from-json', () async { - final o = buildBulkCheckResponse(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.BulkCheckResponse.fromJson( - oJson as core.Map); - checkBulkCheckResponse(od); - }); - }); - unittest.group('obj-schema-CertificateInfo', () { unittest.test('to-json--from-json', () async { final o = buildCertificateInfo(); @@ -433,16 +309,6 @@ void main() { }); }); - unittest.group('obj-schema-StatementTemplate', () { - unittest.test('to-json--from-json', () async { - final o = buildStatementTemplate(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.StatementTemplate.fromJson( - oJson as core.Map); - checkStatementTemplate(od); - }); - }); - unittest.group('obj-schema-WebAsset', () { unittest.test('to-json--from-json', () async { final o = buildWebAsset(); @@ -454,61 +320,6 @@ void main() { }); unittest.group('resource-AssetlinksResource', () { - unittest.test('method--bulkCheck', () async { - final mock = HttpServerMock(); - final res = api.DigitalassetlinksApi(mock).assetlinks; - final arg_request = buildBulkCheckRequest(); - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.BulkCheckRequest.fromJson( - json as core.Map); - checkBulkCheckRequest(obj); - - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 23), - unittest.equals('v1/assetlinks:bulkCheck'), - ); - pathOffset += 23; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode(buildBulkCheckResponse()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.bulkCheck(arg_request, $fields: arg_$fields); - checkBulkCheckResponse(response as api.BulkCheckResponse); - }); - unittest.test('method--check', () async { final mock = HttpServerMock(); final res = api.DigitalassetlinksApi(mock).assetlinks; diff --git a/generated/googleapis/test/dlp/v2_test.dart b/generated/googleapis/test/dlp/v2_test.dart index ed1f5a5f0..4acba2a53 100644 --- a/generated/googleapis/test/dlp/v2_test.dart +++ b/generated/googleapis/test/dlp/v2_test.dart @@ -5202,6 +5202,7 @@ api.GooglePrivacyDlpV2InfoTypeDescription o.categories = buildUnnamed60(); o.description = 'foo'; o.displayName = 'foo'; + o.example = 'foo'; o.name = 'foo'; o.sensitivityScore = buildGooglePrivacyDlpV2SensitivityScore(); o.supportedBy = buildUnnamed61(); @@ -5224,6 +5225,10 @@ void checkGooglePrivacyDlpV2InfoTypeDescription( o.displayName!, unittest.equals('foo'), ); + unittest.expect( + o.example!, + unittest.equals('foo'), + ); unittest.expect( o.name!, unittest.equals('foo'), diff --git a/generated/googleapis/test/drive/v3_test.dart b/generated/googleapis/test/drive/v3_test.dart index 0b14f33fb..920a72e9b 100644 --- a/generated/googleapis/test/drive/v3_test.dart +++ b/generated/googleapis/test/drive/v3_test.dart @@ -3040,12 +3040,60 @@ void checkReplyList(api.ReplyList o) { buildCounterReplyList--; } -core.Map buildUnnamed56() => { +core.List buildUnnamed56() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed56(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterResolveAccessProposalRequest = 0; +api.ResolveAccessProposalRequest buildResolveAccessProposalRequest() { + final o = api.ResolveAccessProposalRequest(); + buildCounterResolveAccessProposalRequest++; + if (buildCounterResolveAccessProposalRequest < 3) { + o.action = 'foo'; + o.role = buildUnnamed56(); + o.sendNotification = true; + o.view = 'foo'; + } + buildCounterResolveAccessProposalRequest--; + return o; +} + +void checkResolveAccessProposalRequest(api.ResolveAccessProposalRequest o) { + buildCounterResolveAccessProposalRequest++; + if (buildCounterResolveAccessProposalRequest < 3) { + unittest.expect( + o.action!, + unittest.equals('foo'), + ); + checkUnnamed56(o.role!); + unittest.expect(o.sendNotification!, unittest.isTrue); + unittest.expect( + o.view!, + unittest.equals('foo'), + ); + } + buildCounterResolveAccessProposalRequest--; +} + +core.Map buildUnnamed57() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed56(core.Map o) { +void checkUnnamed57(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3062,7 +3110,7 @@ api.Revision buildRevision() { final o = api.Revision(); buildCounterRevision++; if (buildCounterRevision < 3) { - o.exportLinks = buildUnnamed56(); + o.exportLinks = buildUnnamed57(); o.id = 'foo'; o.keepForever = true; o.kind = 'foo'; @@ -3084,7 +3132,7 @@ api.Revision buildRevision() { void checkRevision(api.Revision o) { buildCounterRevision++; if (buildCounterRevision < 3) { - checkUnnamed56(o.exportLinks!); + checkUnnamed57(o.exportLinks!); unittest.expect( o.id!, unittest.equals('foo'), @@ -3126,12 +3174,12 @@ void checkRevision(api.Revision o) { buildCounterRevision--; } -core.List buildUnnamed57() => [ +core.List buildUnnamed58() => [ buildRevision(), buildRevision(), ]; -void checkUnnamed57(core.List o) { +void checkUnnamed58(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRevision(o[0]); checkRevision(o[1]); @@ -3144,7 +3192,7 @@ api.RevisionList buildRevisionList() { if (buildCounterRevisionList < 3) { o.kind = 'foo'; o.nextPageToken = 'foo'; - o.revisions = buildUnnamed57(); + o.revisions = buildUnnamed58(); } buildCounterRevisionList--; return o; @@ -3161,7 +3209,7 @@ void checkRevisionList(api.RevisionList o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed57(o.revisions!); + checkUnnamed58(o.revisions!); } buildCounterRevisionList--; } @@ -3193,7 +3241,7 @@ void checkStartPageToken(api.StartPageToken o) { buildCounterStartPageToken--; } -core.Map buildUnnamed58() => { +core.Map buildUnnamed59() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3206,7 +3254,7 @@ core.Map buildUnnamed58() => { }, }; -void checkUnnamed58(core.Map o) { +void checkUnnamed59(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted5 = (o['x']!) as core.Map; unittest.expect(casted5, unittest.hasLength(3)); @@ -3238,15 +3286,15 @@ void checkUnnamed58(core.Map o) { ); } -core.List> buildUnnamed59() => [ - buildUnnamed58(), - buildUnnamed58(), +core.List> buildUnnamed60() => [ + buildUnnamed59(), + buildUnnamed59(), ]; -void checkUnnamed59(core.List> o) { +void checkUnnamed60(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed58(o[0]); - checkUnnamed58(o[1]); + checkUnnamed59(o[0]); + checkUnnamed59(o[1]); } core.int buildCounterStatus = 0; @@ -3255,7 +3303,7 @@ api.Status buildStatus() { buildCounterStatus++; if (buildCounterStatus < 3) { o.code = 42; - o.details = buildUnnamed59(); + o.details = buildUnnamed60(); o.message = 'foo'; } buildCounterStatus--; @@ -3269,7 +3317,7 @@ void checkStatus(api.Status o) { o.code!, unittest.equals(42), ); - checkUnnamed59(o.details!); + checkUnnamed60(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -3468,12 +3516,12 @@ void checkTeamDrive(api.TeamDrive o) { buildCounterTeamDrive--; } -core.List buildUnnamed60() => [ +core.List buildUnnamed61() => [ buildTeamDrive(), buildTeamDrive(), ]; -void checkUnnamed60(core.List o) { +void checkUnnamed61(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTeamDrive(o[0]); checkTeamDrive(o[1]); @@ -3486,7 +3534,7 @@ api.TeamDriveList buildTeamDriveList() { if (buildCounterTeamDriveList < 3) { o.kind = 'foo'; o.nextPageToken = 'foo'; - o.teamDrives = buildUnnamed60(); + o.teamDrives = buildUnnamed61(); } buildCounterTeamDriveList--; return o; @@ -3503,7 +3551,7 @@ void checkTeamDriveList(api.TeamDriveList o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed60(o.teamDrives!); + checkUnnamed61(o.teamDrives!); } buildCounterTeamDriveList--; } @@ -3552,23 +3600,6 @@ void checkUser(api.User o) { buildCounterUser--; } -core.List buildUnnamed61() => [ - 'foo', - 'foo', - ]; - -void checkUnnamed61(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o[0], - unittest.equals('foo'), - ); - unittest.expect( - o[1], - unittest.equals('foo'), - ); -} - void main() { unittest.group('obj-schema-AboutDriveThemes', () { unittest.test('to-json--from-json', () async { @@ -4059,6 +4090,16 @@ void main() { }); }); + unittest.group('obj-schema-ResolveAccessProposalRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildResolveAccessProposalRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ResolveAccessProposalRequest.fromJson( + oJson as core.Map); + checkResolveAccessProposalRequest(od); + }); + }); + unittest.group('obj-schema-Revision', () { unittest.test('to-json--from-json', () async { final o = buildRevision(); @@ -4216,6 +4257,260 @@ void main() { }); }); + unittest.group('resource-AccessproposalsResource', () { + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.DriveApi(mock).accessproposals; + final arg_fileId = 'foo'; + final arg_proposalId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('drive/v3/'), + ); + pathOffset += 9; + unittest.expect( + path.substring(pathOffset, pathOffset + 6), + unittest.equals('files/'), + ); + pathOffset += 6; + index = path.indexOf('/accessproposals/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_fileId'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 17), + unittest.equals('/accessproposals/'), + ); + pathOffset += 17; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; + unittest.expect( + subPart, + unittest.equals('$arg_proposalId'), + ); + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildAccessProposal()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.get(arg_fileId, arg_proposalId, $fields: arg_$fields); + checkAccessProposal(response as api.AccessProposal); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.DriveApi(mock).accessproposals; + final arg_fileId = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('drive/v3/'), + ); + pathOffset += 9; + unittest.expect( + path.substring(pathOffset, pathOffset + 6), + unittest.equals('files/'), + ); + pathOffset += 6; + index = path.indexOf('/accessproposals', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_fileId'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 16), + unittest.equals('/accessproposals'), + ); + pathOffset += 16; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildListAccessProposalsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_fileId, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListAccessProposalsResponse( + response as api.ListAccessProposalsResponse); + }); + + unittest.test('method--resolve', () async { + final mock = HttpServerMock(); + final res = api.DriveApi(mock).accessproposals; + final arg_request = buildResolveAccessProposalRequest(); + final arg_fileId = 'foo'; + final arg_proposalId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.ResolveAccessProposalRequest.fromJson( + json as core.Map); + checkResolveAccessProposalRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 9), + unittest.equals('drive/v3/'), + ); + pathOffset += 9; + unittest.expect( + path.substring(pathOffset, pathOffset + 6), + unittest.equals('files/'), + ); + pathOffset += 6; + index = path.indexOf('/accessproposals/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_fileId'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 17), + unittest.equals('/accessproposals/'), + ); + pathOffset += 17; + index = path.indexOf(':resolve', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_proposalId'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 8), + unittest.equals(':resolve'), + ); + pathOffset += 8; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = ''; + return async.Future.value(stringResponse(200, h, resp)); + }), true); + await res.resolve(arg_request, arg_fileId, arg_proposalId, + $fields: arg_$fields); + }); + }); + unittest.group('resource-AppsResource', () { unittest.test('method--get', () async { final mock = HttpServerMock(); @@ -6961,201 +7256,6 @@ void main() { }); }); - unittest.group('resource-FilesAccessproposalsResource', () { - unittest.test('method--list', () async { - final mock = HttpServerMock(); - final res = api.DriveApi(mock).files.accessproposals; - final arg_fileId = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 9), - unittest.equals('drive/v3/'), - ); - pathOffset += 9; - unittest.expect( - path.substring(pathOffset, pathOffset + 6), - unittest.equals('files/'), - ); - pathOffset += 6; - index = path.indexOf('/accessproposals', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; - unittest.expect( - subPart, - unittest.equals('$arg_fileId'), - ); - unittest.expect( - path.substring(pathOffset, pathOffset + 16), - unittest.equals('/accessproposals'), - ); - pathOffset += 16; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode(buildListAccessProposalsResponse()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.list(arg_fileId, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkListAccessProposalsResponse( - response as api.ListAccessProposalsResponse); - }); - - unittest.test('method--resolve', () async { - final mock = HttpServerMock(); - final res = api.DriveApi(mock).files.accessproposals; - final arg_fileId = 'foo'; - final arg_proposalId = 'foo'; - final arg_action = 'foo'; - final arg_role = buildUnnamed61(); - final arg_sendNotification = true; - final arg_view = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 9), - unittest.equals('drive/v3/'), - ); - pathOffset += 9; - unittest.expect( - path.substring(pathOffset, pathOffset + 6), - unittest.equals('files/'), - ); - pathOffset += 6; - index = path.indexOf('/accessproposals/', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; - unittest.expect( - subPart, - unittest.equals('$arg_fileId'), - ); - unittest.expect( - path.substring(pathOffset, pathOffset + 17), - unittest.equals('/accessproposals/'), - ); - pathOffset += 17; - index = path.indexOf(':resolve', pathOffset); - unittest.expect(index >= 0, unittest.isTrue); - subPart = - core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); - pathOffset = index; - unittest.expect( - subPart, - unittest.equals('$arg_proposalId'), - ); - unittest.expect( - path.substring(pathOffset, pathOffset + 8), - unittest.equals(':resolve'), - ); - pathOffset += 8; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['action']!.first, - unittest.equals(arg_action), - ); - unittest.expect( - queryMap['role']!, - unittest.equals(arg_role), - ); - unittest.expect( - queryMap['sendNotification']!.first, - unittest.equals('$arg_sendNotification'), - ); - unittest.expect( - queryMap['view']!.first, - unittest.equals(arg_view), - ); - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = ''; - return async.Future.value(stringResponse(200, h, resp)); - }), true); - await res.resolve(arg_fileId, arg_proposalId, - action: arg_action, - role: arg_role, - sendNotification: arg_sendNotification, - view: arg_view, - $fields: arg_$fields); - }); - }); - unittest.group('resource-OperationResource', () { unittest.test('method--cancel', () async { final mock = HttpServerMock(); diff --git a/generated/googleapis/test/eventarc/v1_test.dart b/generated/googleapis/test/eventarc/v1_test.dart index 7cdc93c97..c78697551 100644 --- a/generated/googleapis/test/eventarc/v1_test.dart +++ b/generated/googleapis/test/eventarc/v1_test.dart @@ -145,6 +145,23 @@ void checkBinding(api.Binding o) { buildCounterBinding--; } +core.Map buildUnnamed3() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed3(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + core.int buildCounterChannel = 0; api.Channel buildChannel() { final o = api.Channel(); @@ -153,6 +170,7 @@ api.Channel buildChannel() { o.activationToken = 'foo'; o.createTime = 'foo'; o.cryptoKeyName = 'foo'; + o.labels = buildUnnamed3(); o.name = 'foo'; o.provider = 'foo'; o.pubsubTopic = 'foo'; @@ -180,6 +198,7 @@ void checkChannel(api.Channel o) { o.cryptoKeyName!, unittest.equals('foo'), ); + checkUnnamed3(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -209,6 +228,23 @@ void checkChannel(api.Channel o) { buildCounterChannel--; } +core.Map buildUnnamed4() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed4(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + core.int buildCounterChannelConnection = 0; api.ChannelConnection buildChannelConnection() { final o = api.ChannelConnection(); @@ -217,6 +253,7 @@ api.ChannelConnection buildChannelConnection() { o.activationToken = 'foo'; o.channel = 'foo'; o.createTime = 'foo'; + o.labels = buildUnnamed4(); o.name = 'foo'; o.uid = 'foo'; o.updateTime = 'foo'; @@ -240,6 +277,7 @@ void checkChannelConnection(api.ChannelConnection o) { o.createTime!, unittest.equals('foo'), ); + checkUnnamed4(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -338,6 +376,106 @@ void checkEmpty(api.Empty o) { buildCounterEmpty--; } +core.Map buildUnnamed5() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed5(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.Map buildUnnamed6() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed6(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.int buildCounterEnrollment = 0; +api.Enrollment buildEnrollment() { + final o = api.Enrollment(); + buildCounterEnrollment++; + if (buildCounterEnrollment < 3) { + o.annotations = buildUnnamed5(); + o.celMatch = 'foo'; + o.createTime = 'foo'; + o.destination = 'foo'; + o.displayName = 'foo'; + o.etag = 'foo'; + o.labels = buildUnnamed6(); + o.messageBus = 'foo'; + o.name = 'foo'; + o.uid = 'foo'; + o.updateTime = 'foo'; + } + buildCounterEnrollment--; + return o; +} + +void checkEnrollment(api.Enrollment o) { + buildCounterEnrollment++; + if (buildCounterEnrollment < 3) { + checkUnnamed5(o.annotations!); + unittest.expect( + o.celMatch!, + unittest.equals('foo'), + ); + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.destination!, + unittest.equals('foo'), + ); + unittest.expect( + o.displayName!, + unittest.equals('foo'), + ); + unittest.expect( + o.etag!, + unittest.equals('foo'), + ); + checkUnnamed6(o.labels!); + unittest.expect( + o.messageBus!, + unittest.equals('foo'), + ); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.uid!, + unittest.equals('foo'), + ); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), + ); + } + buildCounterEnrollment--; +} + core.int buildCounterEventFilter = 0; api.EventFilter buildEventFilter() { final o = api.EventFilter(); @@ -370,12 +508,12 @@ void checkEventFilter(api.EventFilter o) { buildCounterEventFilter--; } -core.List buildUnnamed3() => [ +core.List buildUnnamed7() => [ buildFilteringAttribute(), buildFilteringAttribute(), ]; -void checkUnnamed3(core.List o) { +void checkUnnamed7(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFilteringAttribute(o[0]); checkFilteringAttribute(o[1]); @@ -388,7 +526,7 @@ api.EventType buildEventType() { if (buildCounterEventType < 3) { o.description = 'foo'; o.eventSchemaUri = 'foo'; - o.filteringAttributes = buildUnnamed3(); + o.filteringAttributes = buildUnnamed7(); o.type = 'foo'; } buildCounterEventType--; @@ -406,7 +544,7 @@ void checkEventType(api.EventType o) { o.eventSchemaUri!, unittest.equals('foo'), ); - checkUnnamed3(o.filteringAttributes!); + checkUnnamed7(o.filteringAttributes!); unittest.expect( o.type!, unittest.equals('foo'), @@ -525,6 +663,103 @@ void checkGKE(api.GKE o) { buildCounterGKE--; } +core.Map buildUnnamed8() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed8(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.Map buildUnnamed9() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed9(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.int buildCounterGoogleApiSource = 0; +api.GoogleApiSource buildGoogleApiSource() { + final o = api.GoogleApiSource(); + buildCounterGoogleApiSource++; + if (buildCounterGoogleApiSource < 3) { + o.annotations = buildUnnamed8(); + o.createTime = 'foo'; + o.cryptoKeyName = 'foo'; + o.destination = 'foo'; + o.displayName = 'foo'; + o.etag = 'foo'; + o.labels = buildUnnamed9(); + o.loggingConfig = buildLoggingConfig(); + o.name = 'foo'; + o.uid = 'foo'; + o.updateTime = 'foo'; + } + buildCounterGoogleApiSource--; + return o; +} + +void checkGoogleApiSource(api.GoogleApiSource o) { + buildCounterGoogleApiSource++; + if (buildCounterGoogleApiSource < 3) { + checkUnnamed8(o.annotations!); + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.cryptoKeyName!, + unittest.equals('foo'), + ); + unittest.expect( + o.destination!, + unittest.equals('foo'), + ); + unittest.expect( + o.displayName!, + unittest.equals('foo'), + ); + unittest.expect( + o.etag!, + unittest.equals('foo'), + ); + checkUnnamed9(o.labels!); + checkLoggingConfig(o.loggingConfig!); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.uid!, + unittest.equals('foo'), + ); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), + ); + } + buildCounterGoogleApiSource--; +} + core.int buildCounterGoogleChannelConfig = 0; api.GoogleChannelConfig buildGoogleChannelConfig() { final o = api.GoogleChannelConfig(); @@ -557,78 +792,465 @@ void checkGoogleChannelConfig(api.GoogleChannelConfig o) { buildCounterGoogleChannelConfig--; } -core.int buildCounterGoogleLongrunningCancelOperationRequest = 0; -api.GoogleLongrunningCancelOperationRequest - buildGoogleLongrunningCancelOperationRequest() { - final o = api.GoogleLongrunningCancelOperationRequest(); - buildCounterGoogleLongrunningCancelOperationRequest++; - if (buildCounterGoogleLongrunningCancelOperationRequest < 3) {} - buildCounterGoogleLongrunningCancelOperationRequest--; +core.int buildCounterGoogleCloudEventarcV1PipelineDestination = 0; +api.GoogleCloudEventarcV1PipelineDestination + buildGoogleCloudEventarcV1PipelineDestination() { + final o = api.GoogleCloudEventarcV1PipelineDestination(); + buildCounterGoogleCloudEventarcV1PipelineDestination++; + if (buildCounterGoogleCloudEventarcV1PipelineDestination < 3) { + o.authenticationConfig = + buildGoogleCloudEventarcV1PipelineDestinationAuthenticationConfig(); + o.httpEndpoint = + buildGoogleCloudEventarcV1PipelineDestinationHttpEndpoint(); + o.messageBus = 'foo'; + o.networkConfig = + buildGoogleCloudEventarcV1PipelineDestinationNetworkConfig(); + o.outputPayloadFormat = + buildGoogleCloudEventarcV1PipelineMessagePayloadFormat(); + o.topic = 'foo'; + o.workflow = 'foo'; + } + buildCounterGoogleCloudEventarcV1PipelineDestination--; return o; } -void checkGoogleLongrunningCancelOperationRequest( - api.GoogleLongrunningCancelOperationRequest o) { - buildCounterGoogleLongrunningCancelOperationRequest++; - if (buildCounterGoogleLongrunningCancelOperationRequest < 3) {} - buildCounterGoogleLongrunningCancelOperationRequest--; +void checkGoogleCloudEventarcV1PipelineDestination( + api.GoogleCloudEventarcV1PipelineDestination o) { + buildCounterGoogleCloudEventarcV1PipelineDestination++; + if (buildCounterGoogleCloudEventarcV1PipelineDestination < 3) { + checkGoogleCloudEventarcV1PipelineDestinationAuthenticationConfig( + o.authenticationConfig!); + checkGoogleCloudEventarcV1PipelineDestinationHttpEndpoint(o.httpEndpoint!); + unittest.expect( + o.messageBus!, + unittest.equals('foo'), + ); + checkGoogleCloudEventarcV1PipelineDestinationNetworkConfig( + o.networkConfig!); + checkGoogleCloudEventarcV1PipelineMessagePayloadFormat( + o.outputPayloadFormat!); + unittest.expect( + o.topic!, + unittest.equals('foo'), + ); + unittest.expect( + o.workflow!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudEventarcV1PipelineDestination--; } -core.List buildUnnamed4() => [ - buildGoogleLongrunningOperation(), - buildGoogleLongrunningOperation(), - ]; +core.int + buildCounterGoogleCloudEventarcV1PipelineDestinationAuthenticationConfig = + 0; +api.GoogleCloudEventarcV1PipelineDestinationAuthenticationConfig + buildGoogleCloudEventarcV1PipelineDestinationAuthenticationConfig() { + final o = api.GoogleCloudEventarcV1PipelineDestinationAuthenticationConfig(); + buildCounterGoogleCloudEventarcV1PipelineDestinationAuthenticationConfig++; + if (buildCounterGoogleCloudEventarcV1PipelineDestinationAuthenticationConfig < + 3) { + o.googleOidc = + buildGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken(); + o.oauthToken = + buildGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken(); + } + buildCounterGoogleCloudEventarcV1PipelineDestinationAuthenticationConfig--; + return o; +} -void checkUnnamed4(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkGoogleLongrunningOperation(o[0]); - checkGoogleLongrunningOperation(o[1]); +void checkGoogleCloudEventarcV1PipelineDestinationAuthenticationConfig( + api.GoogleCloudEventarcV1PipelineDestinationAuthenticationConfig o) { + buildCounterGoogleCloudEventarcV1PipelineDestinationAuthenticationConfig++; + if (buildCounterGoogleCloudEventarcV1PipelineDestinationAuthenticationConfig < + 3) { + checkGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken( + o.googleOidc!); + checkGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken( + o.oauthToken!); + } + buildCounterGoogleCloudEventarcV1PipelineDestinationAuthenticationConfig--; } -core.int buildCounterGoogleLongrunningListOperationsResponse = 0; -api.GoogleLongrunningListOperationsResponse - buildGoogleLongrunningListOperationsResponse() { - final o = api.GoogleLongrunningListOperationsResponse(); - buildCounterGoogleLongrunningListOperationsResponse++; - if (buildCounterGoogleLongrunningListOperationsResponse < 3) { - o.nextPageToken = 'foo'; - o.operations = buildUnnamed4(); +core.int + buildCounterGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken = + 0; +api.GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken + buildGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken() { + final o = api + .GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken(); + buildCounterGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken++; + if (buildCounterGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken < + 3) { + o.scope = 'foo'; + o.serviceAccount = 'foo'; } - buildCounterGoogleLongrunningListOperationsResponse--; + buildCounterGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken--; return o; } -void checkGoogleLongrunningListOperationsResponse( - api.GoogleLongrunningListOperationsResponse o) { - buildCounterGoogleLongrunningListOperationsResponse++; - if (buildCounterGoogleLongrunningListOperationsResponse < 3) { +void checkGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken( + api.GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken + o) { + buildCounterGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken++; + if (buildCounterGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken < + 3) { unittest.expect( - o.nextPageToken!, + o.scope!, + unittest.equals('foo'), + ); + unittest.expect( + o.serviceAccount!, unittest.equals('foo'), ); - checkUnnamed4(o.operations!); } - buildCounterGoogleLongrunningListOperationsResponse--; + buildCounterGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken--; } -core.Map buildUnnamed5() => { - 'x': { - 'list': [1, 2, 3], - 'bool': true, - 'string': 'foo' - }, - 'y': { - 'list': [1, 2, 3], - 'bool': true, - 'string': 'foo' - }, - }; +core.int + buildCounterGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken = + 0; +api.GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken + buildGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken() { + final o = api + .GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken(); + buildCounterGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken++; + if (buildCounterGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken < + 3) { + o.audience = 'foo'; + o.serviceAccount = 'foo'; + } + buildCounterGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken--; + return o; +} -void checkUnnamed5(core.Map o) { - unittest.expect(o, unittest.hasLength(2)); - var casted1 = (o['x']!) as core.Map; - unittest.expect(casted1, unittest.hasLength(3)); - unittest.expect( +void checkGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken( + api.GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken + o) { + buildCounterGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken++; + if (buildCounterGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken < + 3) { + unittest.expect( + o.audience!, + unittest.equals('foo'), + ); + unittest.expect( + o.serviceAccount!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken--; +} + +core.int buildCounterGoogleCloudEventarcV1PipelineDestinationHttpEndpoint = 0; +api.GoogleCloudEventarcV1PipelineDestinationHttpEndpoint + buildGoogleCloudEventarcV1PipelineDestinationHttpEndpoint() { + final o = api.GoogleCloudEventarcV1PipelineDestinationHttpEndpoint(); + buildCounterGoogleCloudEventarcV1PipelineDestinationHttpEndpoint++; + if (buildCounterGoogleCloudEventarcV1PipelineDestinationHttpEndpoint < 3) { + o.messageBindingTemplate = 'foo'; + o.uri = 'foo'; + } + buildCounterGoogleCloudEventarcV1PipelineDestinationHttpEndpoint--; + return o; +} + +void checkGoogleCloudEventarcV1PipelineDestinationHttpEndpoint( + api.GoogleCloudEventarcV1PipelineDestinationHttpEndpoint o) { + buildCounterGoogleCloudEventarcV1PipelineDestinationHttpEndpoint++; + if (buildCounterGoogleCloudEventarcV1PipelineDestinationHttpEndpoint < 3) { + unittest.expect( + o.messageBindingTemplate!, + unittest.equals('foo'), + ); + unittest.expect( + o.uri!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudEventarcV1PipelineDestinationHttpEndpoint--; +} + +core.int buildCounterGoogleCloudEventarcV1PipelineDestinationNetworkConfig = 0; +api.GoogleCloudEventarcV1PipelineDestinationNetworkConfig + buildGoogleCloudEventarcV1PipelineDestinationNetworkConfig() { + final o = api.GoogleCloudEventarcV1PipelineDestinationNetworkConfig(); + buildCounterGoogleCloudEventarcV1PipelineDestinationNetworkConfig++; + if (buildCounterGoogleCloudEventarcV1PipelineDestinationNetworkConfig < 3) { + o.networkAttachment = 'foo'; + } + buildCounterGoogleCloudEventarcV1PipelineDestinationNetworkConfig--; + return o; +} + +void checkGoogleCloudEventarcV1PipelineDestinationNetworkConfig( + api.GoogleCloudEventarcV1PipelineDestinationNetworkConfig o) { + buildCounterGoogleCloudEventarcV1PipelineDestinationNetworkConfig++; + if (buildCounterGoogleCloudEventarcV1PipelineDestinationNetworkConfig < 3) { + unittest.expect( + o.networkAttachment!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudEventarcV1PipelineDestinationNetworkConfig--; +} + +core.int buildCounterGoogleCloudEventarcV1PipelineMediation = 0; +api.GoogleCloudEventarcV1PipelineMediation + buildGoogleCloudEventarcV1PipelineMediation() { + final o = api.GoogleCloudEventarcV1PipelineMediation(); + buildCounterGoogleCloudEventarcV1PipelineMediation++; + if (buildCounterGoogleCloudEventarcV1PipelineMediation < 3) { + o.transformation = + buildGoogleCloudEventarcV1PipelineMediationTransformation(); + } + buildCounterGoogleCloudEventarcV1PipelineMediation--; + return o; +} + +void checkGoogleCloudEventarcV1PipelineMediation( + api.GoogleCloudEventarcV1PipelineMediation o) { + buildCounterGoogleCloudEventarcV1PipelineMediation++; + if (buildCounterGoogleCloudEventarcV1PipelineMediation < 3) { + checkGoogleCloudEventarcV1PipelineMediationTransformation( + o.transformation!); + } + buildCounterGoogleCloudEventarcV1PipelineMediation--; +} + +core.int buildCounterGoogleCloudEventarcV1PipelineMediationTransformation = 0; +api.GoogleCloudEventarcV1PipelineMediationTransformation + buildGoogleCloudEventarcV1PipelineMediationTransformation() { + final o = api.GoogleCloudEventarcV1PipelineMediationTransformation(); + buildCounterGoogleCloudEventarcV1PipelineMediationTransformation++; + if (buildCounterGoogleCloudEventarcV1PipelineMediationTransformation < 3) { + o.transformationTemplate = 'foo'; + } + buildCounterGoogleCloudEventarcV1PipelineMediationTransformation--; + return o; +} + +void checkGoogleCloudEventarcV1PipelineMediationTransformation( + api.GoogleCloudEventarcV1PipelineMediationTransformation o) { + buildCounterGoogleCloudEventarcV1PipelineMediationTransformation++; + if (buildCounterGoogleCloudEventarcV1PipelineMediationTransformation < 3) { + unittest.expect( + o.transformationTemplate!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudEventarcV1PipelineMediationTransformation--; +} + +core.int buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormat = 0; +api.GoogleCloudEventarcV1PipelineMessagePayloadFormat + buildGoogleCloudEventarcV1PipelineMessagePayloadFormat() { + final o = api.GoogleCloudEventarcV1PipelineMessagePayloadFormat(); + buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormat++; + if (buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormat < 3) { + o.avro = buildGoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat(); + o.json = buildGoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat(); + o.protobuf = + buildGoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat(); + } + buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormat--; + return o; +} + +void checkGoogleCloudEventarcV1PipelineMessagePayloadFormat( + api.GoogleCloudEventarcV1PipelineMessagePayloadFormat o) { + buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormat++; + if (buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormat < 3) { + checkGoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat(o.avro!); + checkGoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat(o.json!); + checkGoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat( + o.protobuf!); + } + buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormat--; +} + +core.int + buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat = 0; +api.GoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat + buildGoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat() { + final o = api.GoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat(); + buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat++; + if (buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat < + 3) { + o.schemaDefinition = 'foo'; + } + buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat--; + return o; +} + +void checkGoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat( + api.GoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat o) { + buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat++; + if (buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat < + 3) { + unittest.expect( + o.schemaDefinition!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat--; +} + +core.int + buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat = 0; +api.GoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat + buildGoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat() { + final o = api.GoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat(); + buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat++; + if (buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat < + 3) {} + buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat--; + return o; +} + +void checkGoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat( + api.GoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat o) { + buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat++; + if (buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat < + 3) {} + buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat--; +} + +core.int + buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat = + 0; +api.GoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat + buildGoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat() { + final o = + api.GoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat(); + buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat++; + if (buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat < + 3) { + o.schemaDefinition = 'foo'; + } + buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat--; + return o; +} + +void checkGoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat( + api.GoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat o) { + buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat++; + if (buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat < + 3) { + unittest.expect( + o.schemaDefinition!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat--; +} + +core.int buildCounterGoogleCloudEventarcV1PipelineRetryPolicy = 0; +api.GoogleCloudEventarcV1PipelineRetryPolicy + buildGoogleCloudEventarcV1PipelineRetryPolicy() { + final o = api.GoogleCloudEventarcV1PipelineRetryPolicy(); + buildCounterGoogleCloudEventarcV1PipelineRetryPolicy++; + if (buildCounterGoogleCloudEventarcV1PipelineRetryPolicy < 3) { + o.maxAttempts = 42; + o.maxRetryDelay = 'foo'; + o.minRetryDelay = 'foo'; + } + buildCounterGoogleCloudEventarcV1PipelineRetryPolicy--; + return o; +} + +void checkGoogleCloudEventarcV1PipelineRetryPolicy( + api.GoogleCloudEventarcV1PipelineRetryPolicy o) { + buildCounterGoogleCloudEventarcV1PipelineRetryPolicy++; + if (buildCounterGoogleCloudEventarcV1PipelineRetryPolicy < 3) { + unittest.expect( + o.maxAttempts!, + unittest.equals(42), + ); + unittest.expect( + o.maxRetryDelay!, + unittest.equals('foo'), + ); + unittest.expect( + o.minRetryDelay!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudEventarcV1PipelineRetryPolicy--; +} + +core.int buildCounterGoogleLongrunningCancelOperationRequest = 0; +api.GoogleLongrunningCancelOperationRequest + buildGoogleLongrunningCancelOperationRequest() { + final o = api.GoogleLongrunningCancelOperationRequest(); + buildCounterGoogleLongrunningCancelOperationRequest++; + if (buildCounterGoogleLongrunningCancelOperationRequest < 3) {} + buildCounterGoogleLongrunningCancelOperationRequest--; + return o; +} + +void checkGoogleLongrunningCancelOperationRequest( + api.GoogleLongrunningCancelOperationRequest o) { + buildCounterGoogleLongrunningCancelOperationRequest++; + if (buildCounterGoogleLongrunningCancelOperationRequest < 3) {} + buildCounterGoogleLongrunningCancelOperationRequest--; +} + +core.List buildUnnamed10() => [ + buildGoogleLongrunningOperation(), + buildGoogleLongrunningOperation(), + ]; + +void checkUnnamed10(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleLongrunningOperation(o[0]); + checkGoogleLongrunningOperation(o[1]); +} + +core.int buildCounterGoogleLongrunningListOperationsResponse = 0; +api.GoogleLongrunningListOperationsResponse + buildGoogleLongrunningListOperationsResponse() { + final o = api.GoogleLongrunningListOperationsResponse(); + buildCounterGoogleLongrunningListOperationsResponse++; + if (buildCounterGoogleLongrunningListOperationsResponse < 3) { + o.nextPageToken = 'foo'; + o.operations = buildUnnamed10(); + } + buildCounterGoogleLongrunningListOperationsResponse--; + return o; +} + +void checkGoogleLongrunningListOperationsResponse( + api.GoogleLongrunningListOperationsResponse o) { + buildCounterGoogleLongrunningListOperationsResponse++; + if (buildCounterGoogleLongrunningListOperationsResponse < 3) { + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed10(o.operations!); + } + buildCounterGoogleLongrunningListOperationsResponse--; +} + +core.Map buildUnnamed11() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; + +void checkUnnamed11(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted1 = (o['x']!) as core.Map; + unittest.expect(casted1, unittest.hasLength(3)); + unittest.expect( casted1['list'], unittest.equals([1, 2, 3]), ); @@ -656,7 +1278,7 @@ void checkUnnamed5(core.Map o) { ); } -core.Map buildUnnamed6() => { +core.Map buildUnnamed12() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -669,7 +1291,7 @@ core.Map buildUnnamed6() => { }, }; -void checkUnnamed6(core.Map o) { +void checkUnnamed12(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted3 = (o['x']!) as core.Map; unittest.expect(casted3, unittest.hasLength(3)); @@ -708,9 +1330,9 @@ api.GoogleLongrunningOperation buildGoogleLongrunningOperation() { if (buildCounterGoogleLongrunningOperation < 3) { o.done = true; o.error = buildGoogleRpcStatus(); - o.metadata = buildUnnamed5(); + o.metadata = buildUnnamed11(); o.name = 'foo'; - o.response = buildUnnamed6(); + o.response = buildUnnamed12(); } buildCounterGoogleLongrunningOperation--; return o; @@ -721,17 +1343,17 @@ void checkGoogleLongrunningOperation(api.GoogleLongrunningOperation o) { if (buildCounterGoogleLongrunningOperation < 3) { unittest.expect(o.done!, unittest.isTrue); checkGoogleRpcStatus(o.error!); - checkUnnamed5(o.metadata!); + checkUnnamed11(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed6(o.response!); + checkUnnamed12(o.response!); } buildCounterGoogleLongrunningOperation--; } -core.Map buildUnnamed7() => { +core.Map buildUnnamed13() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -744,7 +1366,7 @@ core.Map buildUnnamed7() => { }, }; -void checkUnnamed7(core.Map o) { +void checkUnnamed13(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted5 = (o['x']!) as core.Map; unittest.expect(casted5, unittest.hasLength(3)); @@ -776,15 +1398,15 @@ void checkUnnamed7(core.Map o) { ); } -core.List> buildUnnamed8() => [ - buildUnnamed7(), - buildUnnamed7(), +core.List> buildUnnamed14() => [ + buildUnnamed13(), + buildUnnamed13(), ]; -void checkUnnamed8(core.List> o) { +void checkUnnamed14(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed7(o[0]); - checkUnnamed7(o[1]); + checkUnnamed13(o[0]); + checkUnnamed13(o[1]); } core.int buildCounterGoogleRpcStatus = 0; @@ -793,7 +1415,7 @@ api.GoogleRpcStatus buildGoogleRpcStatus() { buildCounterGoogleRpcStatus++; if (buildCounterGoogleRpcStatus < 3) { o.code = 42; - o.details = buildUnnamed8(); + o.details = buildUnnamed14(); o.message = 'foo'; } buildCounterGoogleRpcStatus--; @@ -807,7 +1429,7 @@ void checkGoogleRpcStatus(api.GoogleRpcStatus o) { o.code!, unittest.equals(42), ); - checkUnnamed8(o.details!); + checkUnnamed14(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -838,23 +1460,23 @@ void checkHttpEndpoint(api.HttpEndpoint o) { buildCounterHttpEndpoint--; } -core.List buildUnnamed9() => [ +core.List buildUnnamed15() => [ buildChannelConnection(), buildChannelConnection(), ]; -void checkUnnamed9(core.List o) { +void checkUnnamed15(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkChannelConnection(o[0]); checkChannelConnection(o[1]); } -core.List buildUnnamed10() => [ +core.List buildUnnamed16() => [ 'foo', 'foo', ]; -void checkUnnamed10(core.List o) { +void checkUnnamed16(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -871,9 +1493,9 @@ api.ListChannelConnectionsResponse buildListChannelConnectionsResponse() { final o = api.ListChannelConnectionsResponse(); buildCounterListChannelConnectionsResponse++; if (buildCounterListChannelConnectionsResponse < 3) { - o.channelConnections = buildUnnamed9(); + o.channelConnections = buildUnnamed15(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed10(); + o.unreachable = buildUnnamed16(); } buildCounterListChannelConnectionsResponse--; return o; @@ -882,33 +1504,33 @@ api.ListChannelConnectionsResponse buildListChannelConnectionsResponse() { void checkListChannelConnectionsResponse(api.ListChannelConnectionsResponse o) { buildCounterListChannelConnectionsResponse++; if (buildCounterListChannelConnectionsResponse < 3) { - checkUnnamed9(o.channelConnections!); + checkUnnamed15(o.channelConnections!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed10(o.unreachable!); + checkUnnamed16(o.unreachable!); } buildCounterListChannelConnectionsResponse--; } -core.List buildUnnamed11() => [ +core.List buildUnnamed17() => [ buildChannel(), buildChannel(), ]; -void checkUnnamed11(core.List o) { +void checkUnnamed17(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkChannel(o[0]); checkChannel(o[1]); } -core.List buildUnnamed12() => [ +core.List buildUnnamed18() => [ 'foo', 'foo', ]; -void checkUnnamed12(core.List o) { +void checkUnnamed18(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -925,9 +1547,9 @@ api.ListChannelsResponse buildListChannelsResponse() { final o = api.ListChannelsResponse(); buildCounterListChannelsResponse++; if (buildCounterListChannelsResponse < 3) { - o.channels = buildUnnamed11(); + o.channels = buildUnnamed17(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed12(); + o.unreachable = buildUnnamed18(); } buildCounterListChannelsResponse--; return o; @@ -936,68 +1558,87 @@ api.ListChannelsResponse buildListChannelsResponse() { void checkListChannelsResponse(api.ListChannelsResponse o) { buildCounterListChannelsResponse++; if (buildCounterListChannelsResponse < 3) { - checkUnnamed11(o.channels!); + checkUnnamed17(o.channels!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed12(o.unreachable!); + checkUnnamed18(o.unreachable!); } buildCounterListChannelsResponse--; } -core.List buildUnnamed13() => [ - buildLocation(), - buildLocation(), +core.List buildUnnamed19() => [ + buildEnrollment(), + buildEnrollment(), ]; -void checkUnnamed13(core.List o) { +void checkUnnamed19(core.List o) { unittest.expect(o, unittest.hasLength(2)); - checkLocation(o[0]); - checkLocation(o[1]); + checkEnrollment(o[0]); + checkEnrollment(o[1]); } -core.int buildCounterListLocationsResponse = 0; -api.ListLocationsResponse buildListLocationsResponse() { - final o = api.ListLocationsResponse(); - buildCounterListLocationsResponse++; - if (buildCounterListLocationsResponse < 3) { - o.locations = buildUnnamed13(); +core.List buildUnnamed20() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed20(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterListEnrollmentsResponse = 0; +api.ListEnrollmentsResponse buildListEnrollmentsResponse() { + final o = api.ListEnrollmentsResponse(); + buildCounterListEnrollmentsResponse++; + if (buildCounterListEnrollmentsResponse < 3) { + o.enrollments = buildUnnamed19(); o.nextPageToken = 'foo'; + o.unreachable = buildUnnamed20(); } - buildCounterListLocationsResponse--; + buildCounterListEnrollmentsResponse--; return o; } -void checkListLocationsResponse(api.ListLocationsResponse o) { - buildCounterListLocationsResponse++; - if (buildCounterListLocationsResponse < 3) { - checkUnnamed13(o.locations!); +void checkListEnrollmentsResponse(api.ListEnrollmentsResponse o) { + buildCounterListEnrollmentsResponse++; + if (buildCounterListEnrollmentsResponse < 3) { + checkUnnamed19(o.enrollments!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); + checkUnnamed20(o.unreachable!); } - buildCounterListLocationsResponse--; + buildCounterListEnrollmentsResponse--; } -core.List buildUnnamed14() => [ - buildProvider(), - buildProvider(), +core.List buildUnnamed21() => [ + buildGoogleApiSource(), + buildGoogleApiSource(), ]; -void checkUnnamed14(core.List o) { +void checkUnnamed21(core.List o) { unittest.expect(o, unittest.hasLength(2)); - checkProvider(o[0]); - checkProvider(o[1]); + checkGoogleApiSource(o[0]); + checkGoogleApiSource(o[1]); } -core.List buildUnnamed15() => [ +core.List buildUnnamed22() => [ 'foo', 'foo', ]; -void checkUnnamed15(core.List o) { +void checkUnnamed22(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1009,49 +1650,73 @@ void checkUnnamed15(core.List o) { ); } -core.int buildCounterListProvidersResponse = 0; -api.ListProvidersResponse buildListProvidersResponse() { - final o = api.ListProvidersResponse(); - buildCounterListProvidersResponse++; - if (buildCounterListProvidersResponse < 3) { +core.int buildCounterListGoogleApiSourcesResponse = 0; +api.ListGoogleApiSourcesResponse buildListGoogleApiSourcesResponse() { + final o = api.ListGoogleApiSourcesResponse(); + buildCounterListGoogleApiSourcesResponse++; + if (buildCounterListGoogleApiSourcesResponse < 3) { + o.googleApiSources = buildUnnamed21(); o.nextPageToken = 'foo'; - o.providers = buildUnnamed14(); - o.unreachable = buildUnnamed15(); + o.unreachable = buildUnnamed22(); } - buildCounterListProvidersResponse--; + buildCounterListGoogleApiSourcesResponse--; return o; } -void checkListProvidersResponse(api.ListProvidersResponse o) { - buildCounterListProvidersResponse++; - if (buildCounterListProvidersResponse < 3) { +void checkListGoogleApiSourcesResponse(api.ListGoogleApiSourcesResponse o) { + buildCounterListGoogleApiSourcesResponse++; + if (buildCounterListGoogleApiSourcesResponse < 3) { + checkUnnamed21(o.googleApiSources!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed14(o.providers!); - checkUnnamed15(o.unreachable!); + checkUnnamed22(o.unreachable!); } - buildCounterListProvidersResponse--; + buildCounterListGoogleApiSourcesResponse--; } -core.List buildUnnamed16() => [ - buildTrigger(), - buildTrigger(), +core.List buildUnnamed23() => [ + buildLocation(), + buildLocation(), ]; -void checkUnnamed16(core.List o) { +void checkUnnamed23(core.List o) { unittest.expect(o, unittest.hasLength(2)); - checkTrigger(o[0]); - checkTrigger(o[1]); + checkLocation(o[0]); + checkLocation(o[1]); +} + +core.int buildCounterListLocationsResponse = 0; +api.ListLocationsResponse buildListLocationsResponse() { + final o = api.ListLocationsResponse(); + buildCounterListLocationsResponse++; + if (buildCounterListLocationsResponse < 3) { + o.locations = buildUnnamed23(); + o.nextPageToken = 'foo'; + } + buildCounterListLocationsResponse--; + return o; +} + +void checkListLocationsResponse(api.ListLocationsResponse o) { + buildCounterListLocationsResponse++; + if (buildCounterListLocationsResponse < 3) { + checkUnnamed23(o.locations!); + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + } + buildCounterListLocationsResponse--; } -core.List buildUnnamed17() => [ +core.List buildUnnamed24() => [ 'foo', 'foo', ]; -void checkUnnamed17(core.List o) { +void checkUnnamed24(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1063,18 +1728,252 @@ void checkUnnamed17(core.List o) { ); } -core.int buildCounterListTriggersResponse = 0; -api.ListTriggersResponse buildListTriggersResponse() { - final o = api.ListTriggersResponse(); - buildCounterListTriggersResponse++; - if (buildCounterListTriggersResponse < 3) { - o.nextPageToken = 'foo'; - o.triggers = buildUnnamed16(); - o.unreachable = buildUnnamed17(); - } - buildCounterListTriggersResponse--; - return o; -} +core.List buildUnnamed25() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed25(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterListMessageBusEnrollmentsResponse = 0; +api.ListMessageBusEnrollmentsResponse buildListMessageBusEnrollmentsResponse() { + final o = api.ListMessageBusEnrollmentsResponse(); + buildCounterListMessageBusEnrollmentsResponse++; + if (buildCounterListMessageBusEnrollmentsResponse < 3) { + o.enrollments = buildUnnamed24(); + o.nextPageToken = 'foo'; + o.unreachable = buildUnnamed25(); + } + buildCounterListMessageBusEnrollmentsResponse--; + return o; +} + +void checkListMessageBusEnrollmentsResponse( + api.ListMessageBusEnrollmentsResponse o) { + buildCounterListMessageBusEnrollmentsResponse++; + if (buildCounterListMessageBusEnrollmentsResponse < 3) { + checkUnnamed24(o.enrollments!); + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed25(o.unreachable!); + } + buildCounterListMessageBusEnrollmentsResponse--; +} + +core.List buildUnnamed26() => [ + buildMessageBus(), + buildMessageBus(), + ]; + +void checkUnnamed26(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkMessageBus(o[0]); + checkMessageBus(o[1]); +} + +core.List buildUnnamed27() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed27(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterListMessageBusesResponse = 0; +api.ListMessageBusesResponse buildListMessageBusesResponse() { + final o = api.ListMessageBusesResponse(); + buildCounterListMessageBusesResponse++; + if (buildCounterListMessageBusesResponse < 3) { + o.messageBuses = buildUnnamed26(); + o.nextPageToken = 'foo'; + o.unreachable = buildUnnamed27(); + } + buildCounterListMessageBusesResponse--; + return o; +} + +void checkListMessageBusesResponse(api.ListMessageBusesResponse o) { + buildCounterListMessageBusesResponse++; + if (buildCounterListMessageBusesResponse < 3) { + checkUnnamed26(o.messageBuses!); + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed27(o.unreachable!); + } + buildCounterListMessageBusesResponse--; +} + +core.List buildUnnamed28() => [ + buildPipeline(), + buildPipeline(), + ]; + +void checkUnnamed28(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkPipeline(o[0]); + checkPipeline(o[1]); +} + +core.List buildUnnamed29() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed29(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterListPipelinesResponse = 0; +api.ListPipelinesResponse buildListPipelinesResponse() { + final o = api.ListPipelinesResponse(); + buildCounterListPipelinesResponse++; + if (buildCounterListPipelinesResponse < 3) { + o.nextPageToken = 'foo'; + o.pipelines = buildUnnamed28(); + o.unreachable = buildUnnamed29(); + } + buildCounterListPipelinesResponse--; + return o; +} + +void checkListPipelinesResponse(api.ListPipelinesResponse o) { + buildCounterListPipelinesResponse++; + if (buildCounterListPipelinesResponse < 3) { + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed28(o.pipelines!); + checkUnnamed29(o.unreachable!); + } + buildCounterListPipelinesResponse--; +} + +core.List buildUnnamed30() => [ + buildProvider(), + buildProvider(), + ]; + +void checkUnnamed30(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkProvider(o[0]); + checkProvider(o[1]); +} + +core.List buildUnnamed31() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed31(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterListProvidersResponse = 0; +api.ListProvidersResponse buildListProvidersResponse() { + final o = api.ListProvidersResponse(); + buildCounterListProvidersResponse++; + if (buildCounterListProvidersResponse < 3) { + o.nextPageToken = 'foo'; + o.providers = buildUnnamed30(); + o.unreachable = buildUnnamed31(); + } + buildCounterListProvidersResponse--; + return o; +} + +void checkListProvidersResponse(api.ListProvidersResponse o) { + buildCounterListProvidersResponse++; + if (buildCounterListProvidersResponse < 3) { + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed30(o.providers!); + checkUnnamed31(o.unreachable!); + } + buildCounterListProvidersResponse--; +} + +core.List buildUnnamed32() => [ + buildTrigger(), + buildTrigger(), + ]; + +void checkUnnamed32(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkTrigger(o[0]); + checkTrigger(o[1]); +} + +core.List buildUnnamed33() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed33(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterListTriggersResponse = 0; +api.ListTriggersResponse buildListTriggersResponse() { + final o = api.ListTriggersResponse(); + buildCounterListTriggersResponse++; + if (buildCounterListTriggersResponse < 3) { + o.nextPageToken = 'foo'; + o.triggers = buildUnnamed32(); + o.unreachable = buildUnnamed33(); + } + buildCounterListTriggersResponse--; + return o; +} void checkListTriggersResponse(api.ListTriggersResponse o) { buildCounterListTriggersResponse++; @@ -1083,18 +1982,18 @@ void checkListTriggersResponse(api.ListTriggersResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed16(o.triggers!); - checkUnnamed17(o.unreachable!); + checkUnnamed32(o.triggers!); + checkUnnamed33(o.unreachable!); } buildCounterListTriggersResponse--; } -core.Map buildUnnamed18() => { +core.Map buildUnnamed34() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed18(core.Map o) { +void checkUnnamed34(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1106,7 +2005,7 @@ void checkUnnamed18(core.Map o) { ); } -core.Map buildUnnamed19() => { +core.Map buildUnnamed35() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -1119,7 +2018,7 @@ core.Map buildUnnamed19() => { }, }; -void checkUnnamed19(core.Map o) { +void checkUnnamed35(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted7 = (o['x']!) as core.Map; unittest.expect(casted7, unittest.hasLength(3)); @@ -1157,9 +2056,9 @@ api.Location buildLocation() { buildCounterLocation++; if (buildCounterLocation < 3) { o.displayName = 'foo'; - o.labels = buildUnnamed18(); + o.labels = buildUnnamed34(); o.locationId = 'foo'; - o.metadata = buildUnnamed19(); + o.metadata = buildUnnamed35(); o.name = 'foo'; } buildCounterLocation--; @@ -1173,12 +2072,12 @@ void checkLocation(api.Location o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed18(o.labels!); + checkUnnamed34(o.labels!); unittest.expect( o.locationId!, unittest.equals('foo'), ); - checkUnnamed19(o.metadata!); + checkUnnamed35(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), @@ -1187,87 +2086,325 @@ void checkLocation(api.Location o) { buildCounterLocation--; } -core.int buildCounterNetworkConfig = 0; -api.NetworkConfig buildNetworkConfig() { - final o = api.NetworkConfig(); - buildCounterNetworkConfig++; - if (buildCounterNetworkConfig < 3) { - o.networkAttachment = 'foo'; +core.int buildCounterLoggingConfig = 0; +api.LoggingConfig buildLoggingConfig() { + final o = api.LoggingConfig(); + buildCounterLoggingConfig++; + if (buildCounterLoggingConfig < 3) { + o.logSeverity = 'foo'; } - buildCounterNetworkConfig--; + buildCounterLoggingConfig--; return o; } -void checkNetworkConfig(api.NetworkConfig o) { - buildCounterNetworkConfig++; - if (buildCounterNetworkConfig < 3) { +void checkLoggingConfig(api.LoggingConfig o) { + buildCounterLoggingConfig++; + if (buildCounterLoggingConfig < 3) { unittest.expect( - o.networkAttachment!, + o.logSeverity!, unittest.equals('foo'), ); } - buildCounterNetworkConfig--; + buildCounterLoggingConfig--; } -core.List buildUnnamed20() => [ - buildAuditConfig(), - buildAuditConfig(), - ]; +core.Map buildUnnamed36() => { + 'x': 'foo', + 'y': 'foo', + }; -void checkUnnamed20(core.List o) { +void checkUnnamed36(core.Map o) { unittest.expect(o, unittest.hasLength(2)); - checkAuditConfig(o[0]); - checkAuditConfig(o[1]); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); } -core.List buildUnnamed21() => [ - buildBinding(), - buildBinding(), - ]; +core.Map buildUnnamed37() => { + 'x': 'foo', + 'y': 'foo', + }; -void checkUnnamed21(core.List o) { +void checkUnnamed37(core.Map o) { unittest.expect(o, unittest.hasLength(2)); - checkBinding(o[0]); - checkBinding(o[1]); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); } -core.int buildCounterPolicy = 0; -api.Policy buildPolicy() { - final o = api.Policy(); - buildCounterPolicy++; - if (buildCounterPolicy < 3) { - o.auditConfigs = buildUnnamed20(); - o.bindings = buildUnnamed21(); +core.int buildCounterMessageBus = 0; +api.MessageBus buildMessageBus() { + final o = api.MessageBus(); + buildCounterMessageBus++; + if (buildCounterMessageBus < 3) { + o.annotations = buildUnnamed36(); + o.createTime = 'foo'; + o.cryptoKeyName = 'foo'; + o.displayName = 'foo'; o.etag = 'foo'; - o.version = 42; + o.labels = buildUnnamed37(); + o.loggingConfig = buildLoggingConfig(); + o.name = 'foo'; + o.uid = 'foo'; + o.updateTime = 'foo'; } - buildCounterPolicy--; + buildCounterMessageBus--; return o; } -void checkPolicy(api.Policy o) { - buildCounterPolicy++; - if (buildCounterPolicy < 3) { - checkUnnamed20(o.auditConfigs!); - checkUnnamed21(o.bindings!); +void checkMessageBus(api.MessageBus o) { + buildCounterMessageBus++; + if (buildCounterMessageBus < 3) { + checkUnnamed36(o.annotations!); + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.cryptoKeyName!, + unittest.equals('foo'), + ); + unittest.expect( + o.displayName!, + unittest.equals('foo'), + ); unittest.expect( o.etag!, unittest.equals('foo'), ); + checkUnnamed37(o.labels!); + checkLoggingConfig(o.loggingConfig!); unittest.expect( - o.version!, - unittest.equals(42), + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.uid!, + unittest.equals('foo'), + ); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), ); } - buildCounterPolicy--; + buildCounterMessageBus--; } -core.List buildUnnamed22() => [ - buildEventType(), - buildEventType(), - ]; +core.int buildCounterNetworkConfig = 0; +api.NetworkConfig buildNetworkConfig() { + final o = api.NetworkConfig(); + buildCounterNetworkConfig++; + if (buildCounterNetworkConfig < 3) { + o.networkAttachment = 'foo'; + } + buildCounterNetworkConfig--; + return o; +} + +void checkNetworkConfig(api.NetworkConfig o) { + buildCounterNetworkConfig++; + if (buildCounterNetworkConfig < 3) { + unittest.expect( + o.networkAttachment!, + unittest.equals('foo'), + ); + } + buildCounterNetworkConfig--; +} + +core.Map buildUnnamed38() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed38(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.List buildUnnamed39() => [ + buildGoogleCloudEventarcV1PipelineDestination(), + buildGoogleCloudEventarcV1PipelineDestination(), + ]; + +void checkUnnamed39(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudEventarcV1PipelineDestination(o[0]); + checkGoogleCloudEventarcV1PipelineDestination(o[1]); +} + +core.Map buildUnnamed40() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed40(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.List buildUnnamed41() => [ + buildGoogleCloudEventarcV1PipelineMediation(), + buildGoogleCloudEventarcV1PipelineMediation(), + ]; + +void checkUnnamed41(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudEventarcV1PipelineMediation(o[0]); + checkGoogleCloudEventarcV1PipelineMediation(o[1]); +} + +core.int buildCounterPipeline = 0; +api.Pipeline buildPipeline() { + final o = api.Pipeline(); + buildCounterPipeline++; + if (buildCounterPipeline < 3) { + o.annotations = buildUnnamed38(); + o.createTime = 'foo'; + o.cryptoKeyName = 'foo'; + o.destinations = buildUnnamed39(); + o.displayName = 'foo'; + o.etag = 'foo'; + o.inputPayloadFormat = + buildGoogleCloudEventarcV1PipelineMessagePayloadFormat(); + o.labels = buildUnnamed40(); + o.loggingConfig = buildLoggingConfig(); + o.mediations = buildUnnamed41(); + o.name = 'foo'; + o.retryPolicy = buildGoogleCloudEventarcV1PipelineRetryPolicy(); + o.uid = 'foo'; + o.updateTime = 'foo'; + } + buildCounterPipeline--; + return o; +} + +void checkPipeline(api.Pipeline o) { + buildCounterPipeline++; + if (buildCounterPipeline < 3) { + checkUnnamed38(o.annotations!); + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.cryptoKeyName!, + unittest.equals('foo'), + ); + checkUnnamed39(o.destinations!); + unittest.expect( + o.displayName!, + unittest.equals('foo'), + ); + unittest.expect( + o.etag!, + unittest.equals('foo'), + ); + checkGoogleCloudEventarcV1PipelineMessagePayloadFormat( + o.inputPayloadFormat!); + checkUnnamed40(o.labels!); + checkLoggingConfig(o.loggingConfig!); + checkUnnamed41(o.mediations!); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + checkGoogleCloudEventarcV1PipelineRetryPolicy(o.retryPolicy!); + unittest.expect( + o.uid!, + unittest.equals('foo'), + ); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), + ); + } + buildCounterPipeline--; +} + +core.List buildUnnamed42() => [ + buildAuditConfig(), + buildAuditConfig(), + ]; + +void checkUnnamed42(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkAuditConfig(o[0]); + checkAuditConfig(o[1]); +} + +core.List buildUnnamed43() => [ + buildBinding(), + buildBinding(), + ]; + +void checkUnnamed43(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkBinding(o[0]); + checkBinding(o[1]); +} + +core.int buildCounterPolicy = 0; +api.Policy buildPolicy() { + final o = api.Policy(); + buildCounterPolicy++; + if (buildCounterPolicy < 3) { + o.auditConfigs = buildUnnamed42(); + o.bindings = buildUnnamed43(); + o.etag = 'foo'; + o.version = 42; + } + buildCounterPolicy--; + return o; +} + +void checkPolicy(api.Policy o) { + buildCounterPolicy++; + if (buildCounterPolicy < 3) { + checkUnnamed42(o.auditConfigs!); + checkUnnamed43(o.bindings!); + unittest.expect( + o.etag!, + unittest.equals('foo'), + ); + unittest.expect( + o.version!, + unittest.equals(42), + ); + } + buildCounterPolicy--; +} + +core.List buildUnnamed44() => [ + buildEventType(), + buildEventType(), + ]; -void checkUnnamed22(core.List o) { +void checkUnnamed44(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEventType(o[0]); checkEventType(o[1]); @@ -1279,7 +2416,7 @@ api.Provider buildProvider() { buildCounterProvider++; if (buildCounterProvider < 3) { o.displayName = 'foo'; - o.eventTypes = buildUnnamed22(); + o.eventTypes = buildUnnamed44(); o.name = 'foo'; } buildCounterProvider--; @@ -1293,7 +2430,7 @@ void checkProvider(api.Provider o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed22(o.eventTypes!); + checkUnnamed44(o.eventTypes!); unittest.expect( o.name!, unittest.equals('foo'), @@ -1380,12 +2517,12 @@ void checkStateCondition(api.StateCondition o) { buildCounterStateCondition--; } -core.List buildUnnamed23() => [ +core.List buildUnnamed45() => [ 'foo', 'foo', ]; -void checkUnnamed23(core.List o) { +void checkUnnamed45(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1402,7 +2539,7 @@ api.TestIamPermissionsRequest buildTestIamPermissionsRequest() { final o = api.TestIamPermissionsRequest(); buildCounterTestIamPermissionsRequest++; if (buildCounterTestIamPermissionsRequest < 3) { - o.permissions = buildUnnamed23(); + o.permissions = buildUnnamed45(); } buildCounterTestIamPermissionsRequest--; return o; @@ -1411,17 +2548,17 @@ api.TestIamPermissionsRequest buildTestIamPermissionsRequest() { void checkTestIamPermissionsRequest(api.TestIamPermissionsRequest o) { buildCounterTestIamPermissionsRequest++; if (buildCounterTestIamPermissionsRequest < 3) { - checkUnnamed23(o.permissions!); + checkUnnamed45(o.permissions!); } buildCounterTestIamPermissionsRequest--; } -core.List buildUnnamed24() => [ +core.List buildUnnamed46() => [ 'foo', 'foo', ]; -void checkUnnamed24(core.List o) { +void checkUnnamed46(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1438,7 +2575,7 @@ api.TestIamPermissionsResponse buildTestIamPermissionsResponse() { final o = api.TestIamPermissionsResponse(); buildCounterTestIamPermissionsResponse++; if (buildCounterTestIamPermissionsResponse < 3) { - o.permissions = buildUnnamed24(); + o.permissions = buildUnnamed46(); } buildCounterTestIamPermissionsResponse--; return o; @@ -1447,7 +2584,7 @@ api.TestIamPermissionsResponse buildTestIamPermissionsResponse() { void checkTestIamPermissionsResponse(api.TestIamPermissionsResponse o) { buildCounterTestIamPermissionsResponse++; if (buildCounterTestIamPermissionsResponse < 3) { - checkUnnamed24(o.permissions!); + checkUnnamed46(o.permissions!); } buildCounterTestIamPermissionsResponse--; } @@ -1471,34 +2608,34 @@ void checkTransport(api.Transport o) { buildCounterTransport--; } -core.Map buildUnnamed25() => { +core.Map buildUnnamed47() => { 'x': buildStateCondition(), 'y': buildStateCondition(), }; -void checkUnnamed25(core.Map o) { +void checkUnnamed47(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkStateCondition(o['x']!); checkStateCondition(o['y']!); } -core.List buildUnnamed26() => [ +core.List buildUnnamed48() => [ buildEventFilter(), buildEventFilter(), ]; -void checkUnnamed26(core.List o) { +void checkUnnamed48(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEventFilter(o[0]); checkEventFilter(o[1]); } -core.Map buildUnnamed27() => { +core.Map buildUnnamed49() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed27(core.Map o) { +void checkUnnamed49(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1516,13 +2653,13 @@ api.Trigger buildTrigger() { buildCounterTrigger++; if (buildCounterTrigger < 3) { o.channel = 'foo'; - o.conditions = buildUnnamed25(); + o.conditions = buildUnnamed47(); o.createTime = 'foo'; o.destination = buildDestination(); o.etag = 'foo'; o.eventDataContentType = 'foo'; - o.eventFilters = buildUnnamed26(); - o.labels = buildUnnamed27(); + o.eventFilters = buildUnnamed48(); + o.labels = buildUnnamed49(); o.name = 'foo'; o.satisfiesPzs = true; o.serviceAccount = 'foo'; @@ -1541,7 +2678,7 @@ void checkTrigger(api.Trigger o) { o.channel!, unittest.equals('foo'), ); - checkUnnamed25(o.conditions!); + checkUnnamed47(o.conditions!); unittest.expect( o.createTime!, unittest.equals('foo'), @@ -1555,8 +2692,8 @@ void checkTrigger(api.Trigger o) { o.eventDataContentType!, unittest.equals('foo'), ); - checkUnnamed26(o.eventFilters!); - checkUnnamed27(o.labels!); + checkUnnamed48(o.eventFilters!); + checkUnnamed49(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -1660,6 +2797,16 @@ void main() { }); }); + unittest.group('obj-schema-Enrollment', () { + unittest.test('to-json--from-json', () async { + final o = buildEnrollment(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Enrollment.fromJson(oJson as core.Map); + checkEnrollment(od); + }); + }); + unittest.group('obj-schema-EventFilter', () { unittest.test('to-json--from-json', () async { final o = buildEventFilter(); @@ -1709,6 +2856,16 @@ void main() { }); }); + unittest.group('obj-schema-GoogleApiSource', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleApiSource(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleApiSource.fromJson( + oJson as core.Map); + checkGoogleApiSource(od); + }); + }); + unittest.group('obj-schema-GoogleChannelConfig', () { unittest.test('to-json--from-json', () async { final o = buildGoogleChannelConfig(); @@ -1719,6 +2876,167 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudEventarcV1PipelineDestination', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudEventarcV1PipelineDestination(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudEventarcV1PipelineDestination.fromJson( + oJson as core.Map); + checkGoogleCloudEventarcV1PipelineDestination(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudEventarcV1PipelineDestinationAuthenticationConfig', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudEventarcV1PipelineDestinationAuthenticationConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudEventarcV1PipelineDestinationAuthenticationConfig + .fromJson(oJson as core.Map); + checkGoogleCloudEventarcV1PipelineDestinationAuthenticationConfig(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken + .fromJson(oJson as core.Map); + checkGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOAuthToken( + od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken + .fromJson(oJson as core.Map); + checkGoogleCloudEventarcV1PipelineDestinationAuthenticationConfigOidcToken( + od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudEventarcV1PipelineDestinationHttpEndpoint', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudEventarcV1PipelineDestinationHttpEndpoint(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudEventarcV1PipelineDestinationHttpEndpoint.fromJson( + oJson as core.Map); + checkGoogleCloudEventarcV1PipelineDestinationHttpEndpoint(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudEventarcV1PipelineDestinationNetworkConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudEventarcV1PipelineDestinationNetworkConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudEventarcV1PipelineDestinationNetworkConfig.fromJson( + oJson as core.Map); + checkGoogleCloudEventarcV1PipelineDestinationNetworkConfig(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudEventarcV1PipelineMediation', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudEventarcV1PipelineMediation(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudEventarcV1PipelineMediation.fromJson( + oJson as core.Map); + checkGoogleCloudEventarcV1PipelineMediation(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudEventarcV1PipelineMediationTransformation', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudEventarcV1PipelineMediationTransformation(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudEventarcV1PipelineMediationTransformation.fromJson( + oJson as core.Map); + checkGoogleCloudEventarcV1PipelineMediationTransformation(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudEventarcV1PipelineMessagePayloadFormat', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudEventarcV1PipelineMessagePayloadFormat(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudEventarcV1PipelineMessagePayloadFormat.fromJson( + oJson as core.Map); + checkGoogleCloudEventarcV1PipelineMessagePayloadFormat(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat + .fromJson(oJson as core.Map); + checkGoogleCloudEventarcV1PipelineMessagePayloadFormatAvroFormat(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat + .fromJson(oJson as core.Map); + checkGoogleCloudEventarcV1PipelineMessagePayloadFormatJsonFormat(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat + .fromJson(oJson as core.Map); + checkGoogleCloudEventarcV1PipelineMessagePayloadFormatProtobufFormat(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudEventarcV1PipelineRetryPolicy', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudEventarcV1PipelineRetryPolicy(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudEventarcV1PipelineRetryPolicy.fromJson( + oJson as core.Map); + checkGoogleCloudEventarcV1PipelineRetryPolicy(od); + }); + }); + unittest.group('obj-schema-GoogleLongrunningCancelOperationRequest', () { unittest.test('to-json--from-json', () async { final o = buildGoogleLongrunningCancelOperationRequest(); @@ -1789,31 +3107,81 @@ void main() { }); }); - unittest.group('obj-schema-ListLocationsResponse', () { + unittest.group('obj-schema-ListEnrollmentsResponse', () { unittest.test('to-json--from-json', () async { - final o = buildListLocationsResponse(); + final o = buildListEnrollmentsResponse(); final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.ListLocationsResponse.fromJson( + final od = api.ListEnrollmentsResponse.fromJson( oJson as core.Map); - checkListLocationsResponse(od); + checkListEnrollmentsResponse(od); }); }); - unittest.group('obj-schema-ListProvidersResponse', () { + unittest.group('obj-schema-ListGoogleApiSourcesResponse', () { unittest.test('to-json--from-json', () async { - final o = buildListProvidersResponse(); + final o = buildListGoogleApiSourcesResponse(); final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.ListProvidersResponse.fromJson( + final od = api.ListGoogleApiSourcesResponse.fromJson( oJson as core.Map); - checkListProvidersResponse(od); + checkListGoogleApiSourcesResponse(od); }); }); - unittest.group('obj-schema-ListTriggersResponse', () { + unittest.group('obj-schema-ListLocationsResponse', () { unittest.test('to-json--from-json', () async { - final o = buildListTriggersResponse(); + final o = buildListLocationsResponse(); final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.ListTriggersResponse.fromJson( + final od = api.ListLocationsResponse.fromJson( + oJson as core.Map); + checkListLocationsResponse(od); + }); + }); + + unittest.group('obj-schema-ListMessageBusEnrollmentsResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListMessageBusEnrollmentsResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListMessageBusEnrollmentsResponse.fromJson( + oJson as core.Map); + checkListMessageBusEnrollmentsResponse(od); + }); + }); + + unittest.group('obj-schema-ListMessageBusesResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListMessageBusesResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListMessageBusesResponse.fromJson( + oJson as core.Map); + checkListMessageBusesResponse(od); + }); + }); + + unittest.group('obj-schema-ListPipelinesResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListPipelinesResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListPipelinesResponse.fromJson( + oJson as core.Map); + checkListPipelinesResponse(od); + }); + }); + + unittest.group('obj-schema-ListProvidersResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListProvidersResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListProvidersResponse.fromJson( + oJson as core.Map); + checkListProvidersResponse(od); + }); + }); + + unittest.group('obj-schema-ListTriggersResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListTriggersResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListTriggersResponse.fromJson( oJson as core.Map); checkListTriggersResponse(od); }); @@ -1829,6 +3197,26 @@ void main() { }); }); + unittest.group('obj-schema-LoggingConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildLoggingConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.LoggingConfig.fromJson( + oJson as core.Map); + checkLoggingConfig(od); + }); + }); + + unittest.group('obj-schema-MessageBus', () { + unittest.test('to-json--from-json', () async { + final o = buildMessageBus(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.MessageBus.fromJson(oJson as core.Map); + checkMessageBus(od); + }); + }); + unittest.group('obj-schema-NetworkConfig', () { unittest.test('to-json--from-json', () async { final o = buildNetworkConfig(); @@ -1839,6 +3227,16 @@ void main() { }); }); + unittest.group('obj-schema-Pipeline', () { + unittest.test('to-json--from-json', () async { + final o = buildPipeline(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Pipeline.fromJson(oJson as core.Map); + checkPipeline(od); + }); + }); + unittest.group('obj-schema-Policy', () { unittest.test('to-json--from-json', () async { final o = buildPolicy(); @@ -1859,80 +3257,1380 @@ void main() { }); }); - unittest.group('obj-schema-Pubsub', () { - unittest.test('to-json--from-json', () async { - final o = buildPubsub(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = - api.Pubsub.fromJson(oJson as core.Map); - checkPubsub(od); - }); - }); + unittest.group('obj-schema-Pubsub', () { + unittest.test('to-json--from-json', () async { + final o = buildPubsub(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Pubsub.fromJson(oJson as core.Map); + checkPubsub(od); + }); + }); + + unittest.group('obj-schema-SetIamPolicyRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildSetIamPolicyRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.SetIamPolicyRequest.fromJson( + oJson as core.Map); + checkSetIamPolicyRequest(od); + }); + }); + + unittest.group('obj-schema-StateCondition', () { + unittest.test('to-json--from-json', () async { + final o = buildStateCondition(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.StateCondition.fromJson( + oJson as core.Map); + checkStateCondition(od); + }); + }); + + unittest.group('obj-schema-TestIamPermissionsRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildTestIamPermissionsRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.TestIamPermissionsRequest.fromJson( + oJson as core.Map); + checkTestIamPermissionsRequest(od); + }); + }); + + unittest.group('obj-schema-TestIamPermissionsResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildTestIamPermissionsResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.TestIamPermissionsResponse.fromJson( + oJson as core.Map); + checkTestIamPermissionsResponse(od); + }); + }); + + unittest.group('obj-schema-Transport', () { + unittest.test('to-json--from-json', () async { + final o = buildTransport(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Transport.fromJson(oJson as core.Map); + checkTransport(od); + }); + }); + + unittest.group('obj-schema-Trigger', () { + unittest.test('to-json--from-json', () async { + final o = buildTrigger(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Trigger.fromJson(oJson as core.Map); + checkTrigger(od); + }); + }); + + unittest.group('resource-ProjectsLocationsResource', () { + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.EventarcApi(mock).projects.locations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildLocation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkLocation(response as api.Location); + }); + + unittest.test('method--getGoogleChannelConfig', () async { + final mock = HttpServerMock(); + final res = api.EventarcApi(mock).projects.locations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleChannelConfig()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.getGoogleChannelConfig(arg_name, $fields: arg_$fields); + checkGoogleChannelConfig(response as api.GoogleChannelConfig); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.EventarcApi(mock).projects.locations; + final arg_name = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildListLocationsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_name, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListLocationsResponse(response as api.ListLocationsResponse); + }); + + unittest.test('method--updateGoogleChannelConfig', () async { + final mock = HttpServerMock(); + final res = api.EventarcApi(mock).projects.locations; + final arg_request = buildGoogleChannelConfig(); + final arg_name = 'foo'; + final arg_updateMask = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleChannelConfig.fromJson( + json as core.Map); + checkGoogleChannelConfig(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleChannelConfig()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.updateGoogleChannelConfig( + arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); + checkGoogleChannelConfig(response as api.GoogleChannelConfig); + }); + }); + + unittest.group('resource-ProjectsLocationsChannelConnectionsResource', () { + unittest.test('method--create', () async { + final mock = HttpServerMock(); + final res = api.EventarcApi(mock).projects.locations.channelConnections; + final arg_request = buildChannelConnection(); + final arg_parent = 'foo'; + final arg_channelConnectionId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.ChannelConnection.fromJson( + json as core.Map); + checkChannelConnection(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['channelConnectionId']!.first, + unittest.equals(arg_channelConnectionId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.create(arg_request, arg_parent, + channelConnectionId: arg_channelConnectionId, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = api.EventarcApi(mock).projects.locations.channelConnections; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.EventarcApi(mock).projects.locations.channelConnections; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildChannelConnection()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkChannelConnection(response as api.ChannelConnection); + }); + + unittest.test('method--getIamPolicy', () async { + final mock = HttpServerMock(); + final res = api.EventarcApi(mock).projects.locations.channelConnections; + final arg_resource = 'foo'; + final arg_options_requestedPolicyVersion = 42; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + core.int.parse(queryMap['options.requestedPolicyVersion']!.first), + unittest.equals(arg_options_requestedPolicyVersion), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildPolicy()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.getIamPolicy(arg_resource, + options_requestedPolicyVersion: arg_options_requestedPolicyVersion, + $fields: arg_$fields); + checkPolicy(response as api.Policy); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.EventarcApi(mock).projects.locations.channelConnections; + final arg_parent = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildListChannelConnectionsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_parent, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListChannelConnectionsResponse( + response as api.ListChannelConnectionsResponse); + }); + + unittest.test('method--setIamPolicy', () async { + final mock = HttpServerMock(); + final res = api.EventarcApi(mock).projects.locations.channelConnections; + final arg_request = buildSetIamPolicyRequest(); + final arg_resource = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.SetIamPolicyRequest.fromJson( + json as core.Map); + checkSetIamPolicyRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildPolicy()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.setIamPolicy(arg_request, arg_resource, + $fields: arg_$fields); + checkPolicy(response as api.Policy); + }); + + unittest.test('method--testIamPermissions', () async { + final mock = HttpServerMock(); + final res = api.EventarcApi(mock).projects.locations.channelConnections; + final arg_request = buildTestIamPermissionsRequest(); + final arg_resource = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.TestIamPermissionsRequest.fromJson( + json as core.Map); + checkTestIamPermissionsRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildTestIamPermissionsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.testIamPermissions(arg_request, arg_resource, + $fields: arg_$fields); + checkTestIamPermissionsResponse( + response as api.TestIamPermissionsResponse); + }); + }); + + unittest.group('resource-ProjectsLocationsChannelsResource', () { + unittest.test('method--create', () async { + final mock = HttpServerMock(); + final res = api.EventarcApi(mock).projects.locations.channels; + final arg_request = buildChannel(); + final arg_parent = 'foo'; + final arg_channelId = 'foo'; + final arg_validateOnly = true; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.Channel.fromJson(json as core.Map); + checkChannel(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['channelId']!.first, + unittest.equals(arg_channelId), + ); + unittest.expect( + queryMap['validateOnly']!.first, + unittest.equals('$arg_validateOnly'), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.create(arg_request, arg_parent, + channelId: arg_channelId, + validateOnly: arg_validateOnly, + $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = api.EventarcApi(mock).projects.locations.channels; + final arg_name = 'foo'; + final arg_validateOnly = true; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['validateOnly']!.first, + unittest.equals('$arg_validateOnly'), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete(arg_name, + validateOnly: arg_validateOnly, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.EventarcApi(mock).projects.locations.channels; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildChannel()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkChannel(response as api.Channel); + }); + + unittest.test('method--getIamPolicy', () async { + final mock = HttpServerMock(); + final res = api.EventarcApi(mock).projects.locations.channels; + final arg_resource = 'foo'; + final arg_options_requestedPolicyVersion = 42; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + core.int.parse(queryMap['options.requestedPolicyVersion']!.first), + unittest.equals(arg_options_requestedPolicyVersion), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildPolicy()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.getIamPolicy(arg_resource, + options_requestedPolicyVersion: arg_options_requestedPolicyVersion, + $fields: arg_$fields); + checkPolicy(response as api.Policy); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.EventarcApi(mock).projects.locations.channels; + final arg_parent = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildListChannelsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_parent, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListChannelsResponse(response as api.ListChannelsResponse); + }); + + unittest.test('method--patch', () async { + final mock = HttpServerMock(); + final res = api.EventarcApi(mock).projects.locations.channels; + final arg_request = buildChannel(); + final arg_name = 'foo'; + final arg_updateMask = 'foo'; + final arg_validateOnly = true; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.Channel.fromJson(json as core.Map); + checkChannel(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); + unittest.expect( + queryMap['validateOnly']!.first, + unittest.equals('$arg_validateOnly'), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, + validateOnly: arg_validateOnly, + $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--setIamPolicy', () async { + final mock = HttpServerMock(); + final res = api.EventarcApi(mock).projects.locations.channels; + final arg_request = buildSetIamPolicyRequest(); + final arg_resource = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.SetIamPolicyRequest.fromJson( + json as core.Map); + checkSetIamPolicyRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildPolicy()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.setIamPolicy(arg_request, arg_resource, + $fields: arg_$fields); + checkPolicy(response as api.Policy); + }); + + unittest.test('method--testIamPermissions', () async { + final mock = HttpServerMock(); + final res = api.EventarcApi(mock).projects.locations.channels; + final arg_request = buildTestIamPermissionsRequest(); + final arg_resource = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.TestIamPermissionsRequest.fromJson( + json as core.Map); + checkTestIamPermissionsRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildTestIamPermissionsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.testIamPermissions(arg_request, arg_resource, + $fields: arg_$fields); + checkTestIamPermissionsResponse( + response as api.TestIamPermissionsResponse); + }); + }); + + unittest.group('resource-ProjectsLocationsEnrollmentsResource', () { + unittest.test('method--create', () async { + final mock = HttpServerMock(); + final res = api.EventarcApi(mock).projects.locations.enrollments; + final arg_request = buildEnrollment(); + final arg_parent = 'foo'; + final arg_enrollmentId = 'foo'; + final arg_validateOnly = true; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.Enrollment.fromJson( + json as core.Map); + checkEnrollment(obj); - unittest.group('obj-schema-SetIamPolicyRequest', () { - unittest.test('to-json--from-json', () async { - final o = buildSetIamPolicyRequest(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.SetIamPolicyRequest.fromJson( - oJson as core.Map); - checkSetIamPolicyRequest(od); - }); - }); + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - unittest.group('obj-schema-StateCondition', () { - unittest.test('to-json--from-json', () async { - final o = buildStateCondition(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.StateCondition.fromJson( - oJson as core.Map); - checkStateCondition(od); - }); - }); + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); - unittest.group('obj-schema-TestIamPermissionsRequest', () { - unittest.test('to-json--from-json', () async { - final o = buildTestIamPermissionsRequest(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.TestIamPermissionsRequest.fromJson( - oJson as core.Map); - checkTestIamPermissionsRequest(od); - }); - }); + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['enrollmentId']!.first, + unittest.equals(arg_enrollmentId), + ); + unittest.expect( + queryMap['validateOnly']!.first, + unittest.equals('$arg_validateOnly'), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); - unittest.group('obj-schema-TestIamPermissionsResponse', () { - unittest.test('to-json--from-json', () async { - final o = buildTestIamPermissionsResponse(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.TestIamPermissionsResponse.fromJson( - oJson as core.Map); - checkTestIamPermissionsResponse(od); + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.create(arg_request, arg_parent, + enrollmentId: arg_enrollmentId, + validateOnly: arg_validateOnly, + $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - }); - unittest.group('obj-schema-Transport', () { - unittest.test('to-json--from-json', () async { - final o = buildTransport(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = - api.Transport.fromJson(oJson as core.Map); - checkTransport(od); - }); - }); + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = api.EventarcApi(mock).projects.locations.enrollments; + final arg_name = 'foo'; + final arg_allowMissing = true; + final arg_etag = 'foo'; + final arg_validateOnly = true; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - unittest.group('obj-schema-Trigger', () { - unittest.test('to-json--from-json', () async { - final o = buildTrigger(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = - api.Trigger.fromJson(oJson as core.Map); - checkTrigger(od); + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['allowMissing']!.first, + unittest.equals('$arg_allowMissing'), + ); + unittest.expect( + queryMap['etag']!.first, + unittest.equals(arg_etag), + ); + unittest.expect( + queryMap['validateOnly']!.first, + unittest.equals('$arg_validateOnly'), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete(arg_name, + allowMissing: arg_allowMissing, + etag: arg_etag, + validateOnly: arg_validateOnly, + $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - }); - unittest.group('resource-ProjectsLocationsResource', () { unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations; + final res = api.EventarcApi(mock).projects.locations.enrollments; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -1975,17 +4673,18 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildLocation()); + final resp = convert.json.encode(buildEnrollment()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkLocation(response as api.Location); + checkEnrollment(response as api.Enrollment); }); - unittest.test('method--getGoogleChannelConfig', () async { + unittest.test('method--getIamPolicy', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations; - final arg_name = 'foo'; + final res = api.EventarcApi(mock).projects.locations.enrollments; + final arg_resource = 'foo'; + final arg_options_requestedPolicyVersion = 42; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -2019,6 +4718,10 @@ void main() { ); } } + unittest.expect( + core.int.parse(queryMap['options.requestedPolicyVersion']!.first), + unittest.equals(arg_options_requestedPolicyVersion), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -2027,19 +4730,21 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleChannelConfig()); + final resp = convert.json.encode(buildPolicy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.getGoogleChannelConfig(arg_name, $fields: arg_$fields); - checkGoogleChannelConfig(response as api.GoogleChannelConfig); + final response = await res.getIamPolicy(arg_resource, + options_requestedPolicyVersion: arg_options_requestedPolicyVersion, + $fields: arg_$fields); + checkPolicy(response as api.Policy); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations; - final arg_name = 'foo'; + final res = api.EventarcApi(mock).projects.locations.enrollments; + final arg_parent = 'foo'; final arg_filter = 'foo'; + final arg_orderBy = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; final arg_$fields = 'foo'; @@ -2079,6 +4784,10 @@ void main() { queryMap['filter']!.first, unittest.equals(arg_filter), ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); unittest.expect( core.int.parse(queryMap['pageSize']!.first), unittest.equals(arg_pageSize), @@ -2095,28 +4804,31 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildListLocationsResponse()); + final resp = convert.json.encode(buildListEnrollmentsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_name, + final response = await res.list(arg_parent, filter: arg_filter, + orderBy: arg_orderBy, pageSize: arg_pageSize, pageToken: arg_pageToken, $fields: arg_$fields); - checkListLocationsResponse(response as api.ListLocationsResponse); + checkListEnrollmentsResponse(response as api.ListEnrollmentsResponse); }); - unittest.test('method--updateGoogleChannelConfig', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations; - final arg_request = buildGoogleChannelConfig(); + final res = api.EventarcApi(mock).projects.locations.enrollments; + final arg_request = buildEnrollment(); final arg_name = 'foo'; + final arg_allowMissing = true; final arg_updateMask = 'foo'; + final arg_validateOnly = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleChannelConfig.fromJson( + final obj = api.Enrollment.fromJson( json as core.Map); - checkGoogleChannelConfig(obj); + checkEnrollment(obj); final path = req.url.path; var pathOffset = 0; @@ -2149,10 +4861,18 @@ void main() { ); } } + unittest.expect( + queryMap['allowMissing']!.first, + unittest.equals('$arg_allowMissing'), + ); unittest.expect( queryMap['updateMask']!.first, unittest.equals(arg_updateMask), ); + unittest.expect( + queryMap['validateOnly']!.first, + unittest.equals('$arg_validateOnly'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -2161,28 +4881,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleChannelConfig()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.updateGoogleChannelConfig( - arg_request, arg_name, - updateMask: arg_updateMask, $fields: arg_$fields); - checkGoogleChannelConfig(response as api.GoogleChannelConfig); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.patch(arg_request, arg_name, + allowMissing: arg_allowMissing, + updateMask: arg_updateMask, + validateOnly: arg_validateOnly, + $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - }); - unittest.group('resource-ProjectsLocationsChannelConnectionsResource', () { - unittest.test('method--create', () async { + unittest.test('method--setIamPolicy', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.channelConnections; - final arg_request = buildChannelConnection(); - final arg_parent = 'foo'; - final arg_channelConnectionId = 'foo'; + final res = api.EventarcApi(mock).projects.locations.enrollments; + final arg_request = buildSetIamPolicyRequest(); + final arg_resource = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.ChannelConnection.fromJson( + final obj = api.SetIamPolicyRequest.fromJson( json as core.Map); - checkChannelConnection(obj); + checkSetIamPolicyRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -2215,10 +4935,6 @@ void main() { ); } } - unittest.expect( - queryMap['channelConnectionId']!.first, - unittest.equals(arg_channelConnectionId), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -2227,21 +4943,25 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildPolicy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.create(arg_request, arg_parent, - channelConnectionId: arg_channelConnectionId, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.setIamPolicy(arg_request, arg_resource, + $fields: arg_$fields); + checkPolicy(response as api.Policy); }); - unittest.test('method--delete', () async { + unittest.test('method--testIamPermissions', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.channelConnections; - final arg_name = 'foo'; + final res = api.EventarcApi(mock).projects.locations.enrollments; + final arg_request = buildTestIamPermissionsRequest(); + final arg_resource = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.TestIamPermissionsRequest.fromJson( + json as core.Map); + checkTestIamPermissionsRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -2281,20 +5001,30 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildTestIamPermissionsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.testIamPermissions(arg_request, arg_resource, + $fields: arg_$fields); + checkTestIamPermissionsResponse( + response as api.TestIamPermissionsResponse); }); + }); - unittest.test('method--get', () async { + unittest.group('resource-ProjectsLocationsGoogleApiSourcesResource', () { + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.channelConnections; - final arg_name = 'foo'; + final res = api.EventarcApi(mock).projects.locations.googleApiSources; + final arg_request = buildGoogleApiSource(); + final arg_parent = 'foo'; + final arg_googleApiSourceId = 'foo'; + final arg_validateOnly = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleApiSource.fromJson( + json as core.Map); + checkGoogleApiSource(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -2326,6 +5056,14 @@ void main() { ); } } + unittest.expect( + queryMap['googleApiSourceId']!.first, + unittest.equals(arg_googleApiSourceId), + ); + unittest.expect( + queryMap['validateOnly']!.first, + unittest.equals('$arg_validateOnly'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -2334,18 +5072,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildChannelConnection()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkChannelConnection(response as api.ChannelConnection); + final response = await res.create(arg_request, arg_parent, + googleApiSourceId: arg_googleApiSourceId, + validateOnly: arg_validateOnly, + $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--getIamPolicy', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.channelConnections; - final arg_resource = 'foo'; - final arg_options_requestedPolicyVersion = 42; + final res = api.EventarcApi(mock).projects.locations.googleApiSources; + final arg_name = 'foo'; + final arg_allowMissing = true; + final arg_etag = 'foo'; + final arg_validateOnly = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -2380,8 +5124,16 @@ void main() { } } unittest.expect( - core.int.parse(queryMap['options.requestedPolicyVersion']!.first), - unittest.equals(arg_options_requestedPolicyVersion), + queryMap['allowMissing']!.first, + unittest.equals('$arg_allowMissing'), + ); + unittest.expect( + queryMap['etag']!.first, + unittest.equals(arg_etag), + ); + unittest.expect( + queryMap['validateOnly']!.first, + unittest.equals('$arg_validateOnly'), ); unittest.expect( queryMap['fields']!.first, @@ -2391,21 +5143,22 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildPolicy()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.getIamPolicy(arg_resource, - options_requestedPolicyVersion: arg_options_requestedPolicyVersion, + final response = await res.delete(arg_name, + allowMissing: arg_allowMissing, + etag: arg_etag, + validateOnly: arg_validateOnly, $fields: arg_$fields); - checkPolicy(response as api.Policy); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--list', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.channelConnections; - final arg_parent = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final res = api.EventarcApi(mock).projects.locations.googleApiSources; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -2439,14 +5192,6 @@ void main() { ); } } - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -2455,28 +5200,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildListChannelConnectionsResponse()); + final resp = convert.json.encode(buildGoogleApiSource()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkListChannelConnectionsResponse( - response as api.ListChannelConnectionsResponse); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleApiSource(response as api.GoogleApiSource); }); - unittest.test('method--setIamPolicy', () async { + unittest.test('method--getIamPolicy', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.channelConnections; - final arg_request = buildSetIamPolicyRequest(); + final res = api.EventarcApi(mock).projects.locations.googleApiSources; final arg_resource = 'foo'; + final arg_options_requestedPolicyVersion = 42; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.SetIamPolicyRequest.fromJson( - json as core.Map); - checkSetIamPolicyRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -2508,6 +5245,10 @@ void main() { ); } } + unittest.expect( + core.int.parse(queryMap['options.requestedPolicyVersion']!.first), + unittest.equals(arg_options_requestedPolicyVersion), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -2519,22 +5260,22 @@ void main() { final resp = convert.json.encode(buildPolicy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setIamPolicy(arg_request, arg_resource, + final response = await res.getIamPolicy(arg_resource, + options_requestedPolicyVersion: arg_options_requestedPolicyVersion, $fields: arg_$fields); checkPolicy(response as api.Policy); }); - unittest.test('method--testIamPermissions', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.channelConnections; - final arg_request = buildTestIamPermissionsRequest(); - final arg_resource = 'foo'; + final res = api.EventarcApi(mock).projects.locations.googleApiSources; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.TestIamPermissionsRequest.fromJson( - json as core.Map); - checkTestIamPermissionsRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -2566,6 +5307,22 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -2574,29 +5331,32 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildTestIamPermissionsResponse()); + final resp = convert.json.encode(buildListGoogleApiSourcesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.testIamPermissions(arg_request, arg_resource, + final response = await res.list(arg_parent, + filter: arg_filter, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, $fields: arg_$fields); - checkTestIamPermissionsResponse( - response as api.TestIamPermissionsResponse); + checkListGoogleApiSourcesResponse( + response as api.ListGoogleApiSourcesResponse); }); - }); - unittest.group('resource-ProjectsLocationsChannelsResource', () { - unittest.test('method--create', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.channels; - final arg_request = buildChannel(); - final arg_parent = 'foo'; - final arg_channelId = 'foo'; + final res = api.EventarcApi(mock).projects.locations.googleApiSources; + final arg_request = buildGoogleApiSource(); + final arg_name = 'foo'; + final arg_allowMissing = true; + final arg_updateMask = 'foo'; final arg_validateOnly = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.Channel.fromJson(json as core.Map); - checkChannel(obj); + final obj = api.GoogleApiSource.fromJson( + json as core.Map); + checkGoogleApiSource(obj); final path = req.url.path; var pathOffset = 0; @@ -2630,8 +5390,12 @@ void main() { } } unittest.expect( - queryMap['channelId']!.first, - unittest.equals(arg_channelId), + queryMap['allowMissing']!.first, + unittest.equals('$arg_allowMissing'), + ); + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), ); unittest.expect( queryMap['validateOnly']!.first, @@ -2648,21 +5412,26 @@ void main() { final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.create(arg_request, arg_parent, - channelId: arg_channelId, + final response = await res.patch(arg_request, arg_name, + allowMissing: arg_allowMissing, + updateMask: arg_updateMask, validateOnly: arg_validateOnly, $fields: arg_$fields); checkGoogleLongrunningOperation( response as api.GoogleLongrunningOperation); }); - unittest.test('method--delete', () async { + unittest.test('method--setIamPolicy', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.channels; - final arg_name = 'foo'; - final arg_validateOnly = true; + final res = api.EventarcApi(mock).projects.locations.googleApiSources; + final arg_request = buildSetIamPolicyRequest(); + final arg_resource = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.SetIamPolicyRequest.fromJson( + json as core.Map); + checkSetIamPolicyRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -2694,10 +5463,6 @@ void main() { ); } } - unittest.expect( - queryMap['validateOnly']!.first, - unittest.equals('$arg_validateOnly'), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -2706,21 +5471,25 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildPolicy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, - validateOnly: arg_validateOnly, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.setIamPolicy(arg_request, arg_resource, + $fields: arg_$fields); + checkPolicy(response as api.Policy); }); - unittest.test('method--get', () async { + unittest.test('method--testIamPermissions', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.channels; - final arg_name = 'foo'; + final res = api.EventarcApi(mock).projects.locations.googleApiSources; + final arg_request = buildTestIamPermissionsRequest(); + final arg_resource = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.TestIamPermissionsRequest.fromJson( + json as core.Map); + checkTestIamPermissionsRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -2760,20 +5529,30 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildChannel()); + final resp = convert.json.encode(buildTestIamPermissionsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkChannel(response as api.Channel); + final response = await res.testIamPermissions(arg_request, arg_resource, + $fields: arg_$fields); + checkTestIamPermissionsResponse( + response as api.TestIamPermissionsResponse); }); + }); - unittest.test('method--getIamPolicy', () async { + unittest.group('resource-ProjectsLocationsMessageBusesResource', () { + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.channels; - final arg_resource = 'foo'; - final arg_options_requestedPolicyVersion = 42; + final res = api.EventarcApi(mock).projects.locations.messageBuses; + final arg_request = buildMessageBus(); + final arg_parent = 'foo'; + final arg_messageBusId = 'foo'; + final arg_validateOnly = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.MessageBus.fromJson( + json as core.Map); + checkMessageBus(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -2806,8 +5585,12 @@ void main() { } } unittest.expect( - core.int.parse(queryMap['options.requestedPolicyVersion']!.first), - unittest.equals(arg_options_requestedPolicyVersion), + queryMap['messageBusId']!.first, + unittest.equals(arg_messageBusId), + ); + unittest.expect( + queryMap['validateOnly']!.first, + unittest.equals('$arg_validateOnly'), ); unittest.expect( queryMap['fields']!.first, @@ -2817,22 +5600,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildPolicy()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.getIamPolicy(arg_resource, - options_requestedPolicyVersion: arg_options_requestedPolicyVersion, + final response = await res.create(arg_request, arg_parent, + messageBusId: arg_messageBusId, + validateOnly: arg_validateOnly, $fields: arg_$fields); - checkPolicy(response as api.Policy); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--list', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.channels; - final arg_parent = 'foo'; - final arg_orderBy = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final res = api.EventarcApi(mock).projects.locations.messageBuses; + final arg_name = 'foo'; + final arg_allowMissing = true; + final arg_etag = 'foo'; + final arg_validateOnly = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -2867,16 +5652,16 @@ void main() { } } unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), + queryMap['allowMissing']!.first, + unittest.equals('$arg_allowMissing'), ); unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), + queryMap['etag']!.first, + unittest.equals(arg_etag), ); unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), + queryMap['validateOnly']!.first, + unittest.equals('$arg_validateOnly'), ); unittest.expect( queryMap['fields']!.first, @@ -2886,30 +5671,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildListChannelsResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - orderBy: arg_orderBy, - pageSize: arg_pageSize, - pageToken: arg_pageToken, + final response = await res.delete(arg_name, + allowMissing: arg_allowMissing, + etag: arg_etag, + validateOnly: arg_validateOnly, $fields: arg_$fields); - checkListChannelsResponse(response as api.ListChannelsResponse); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--patch', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.channels; - final arg_request = buildChannel(); + final res = api.EventarcApi(mock).projects.locations.messageBuses; final arg_name = 'foo'; - final arg_updateMask = 'foo'; - final arg_validateOnly = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.Channel.fromJson(json as core.Map); - checkChannel(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -2941,14 +5720,6 @@ void main() { ); } } - unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), - ); - unittest.expect( - queryMap['validateOnly']!.first, - unittest.equals('$arg_validateOnly'), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -2957,28 +5728,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildMessageBus()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch(arg_request, arg_name, - updateMask: arg_updateMask, - validateOnly: arg_validateOnly, - $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.get(arg_name, $fields: arg_$fields); + checkMessageBus(response as api.MessageBus); }); - unittest.test('method--setIamPolicy', () async { + unittest.test('method--getIamPolicy', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.channels; - final arg_request = buildSetIamPolicyRequest(); + final res = api.EventarcApi(mock).projects.locations.messageBuses; final arg_resource = 'foo'; + final arg_options_requestedPolicyVersion = 42; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.SetIamPolicyRequest.fromJson( - json as core.Map); - checkSetIamPolicyRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -3010,6 +5773,10 @@ void main() { ); } } + unittest.expect( + core.int.parse(queryMap['options.requestedPolicyVersion']!.first), + unittest.equals(arg_options_requestedPolicyVersion), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -3021,22 +5788,22 @@ void main() { final resp = convert.json.encode(buildPolicy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setIamPolicy(arg_request, arg_resource, + final response = await res.getIamPolicy(arg_resource, + options_requestedPolicyVersion: arg_options_requestedPolicyVersion, $fields: arg_$fields); checkPolicy(response as api.Policy); }); - unittest.test('method--testIamPermissions', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.channels; - final arg_request = buildTestIamPermissionsRequest(); - final arg_resource = 'foo'; + final res = api.EventarcApi(mock).projects.locations.messageBuses; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.TestIamPermissionsRequest.fromJson( - json as core.Map); - checkTestIamPermissionsRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -3068,6 +5835,22 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -3076,22 +5859,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildTestIamPermissionsResponse()); + final resp = convert.json.encode(buildListMessageBusesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.testIamPermissions(arg_request, arg_resource, + final response = await res.list(arg_parent, + filter: arg_filter, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, $fields: arg_$fields); - checkTestIamPermissionsResponse( - response as api.TestIamPermissionsResponse); + checkListMessageBusesResponse(response as api.ListMessageBusesResponse); }); - }); - unittest.group('resource-ProjectsLocationsEnrollmentsResource', () { - unittest.test('method--getIamPolicy', () async { + unittest.test('method--listEnrollments', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.enrollments; - final arg_resource = 'foo'; - final arg_options_requestedPolicyVersion = 42; + final res = api.EventarcApi(mock).projects.locations.messageBuses; + final arg_parent = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -3126,8 +5911,12 @@ void main() { } } unittest.expect( - core.int.parse(queryMap['options.requestedPolicyVersion']!.first), - unittest.equals(arg_options_requestedPolicyVersion), + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), ); unittest.expect( queryMap['fields']!.first, @@ -3137,25 +5926,31 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildPolicy()); + final resp = + convert.json.encode(buildListMessageBusEnrollmentsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.getIamPolicy(arg_resource, - options_requestedPolicyVersion: arg_options_requestedPolicyVersion, + final response = await res.listEnrollments(arg_parent, + pageSize: arg_pageSize, + pageToken: arg_pageToken, $fields: arg_$fields); - checkPolicy(response as api.Policy); + checkListMessageBusEnrollmentsResponse( + response as api.ListMessageBusEnrollmentsResponse); }); - unittest.test('method--setIamPolicy', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.enrollments; - final arg_request = buildSetIamPolicyRequest(); - final arg_resource = 'foo'; + final res = api.EventarcApi(mock).projects.locations.messageBuses; + final arg_request = buildMessageBus(); + final arg_name = 'foo'; + final arg_allowMissing = true; + final arg_updateMask = 'foo'; + final arg_validateOnly = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.SetIamPolicyRequest.fromJson( + final obj = api.MessageBus.fromJson( json as core.Map); - checkSetIamPolicyRequest(obj); + checkMessageBus(obj); final path = req.url.path; var pathOffset = 0; @@ -3188,6 +5983,18 @@ void main() { ); } } + unittest.expect( + queryMap['allowMissing']!.first, + unittest.equals('$arg_allowMissing'), + ); + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); + unittest.expect( + queryMap['validateOnly']!.first, + unittest.equals('$arg_validateOnly'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -3196,24 +6003,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildPolicy()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setIamPolicy(arg_request, arg_resource, + final response = await res.patch(arg_request, arg_name, + allowMissing: arg_allowMissing, + updateMask: arg_updateMask, + validateOnly: arg_validateOnly, $fields: arg_$fields); - checkPolicy(response as api.Policy); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--testIamPermissions', () async { + unittest.test('method--setIamPolicy', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.enrollments; - final arg_request = buildTestIamPermissionsRequest(); + final res = api.EventarcApi(mock).projects.locations.messageBuses; + final arg_request = buildSetIamPolicyRequest(); final arg_resource = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.TestIamPermissionsRequest.fromJson( + final obj = api.SetIamPolicyRequest.fromJson( json as core.Map); - checkTestIamPermissionsRequest(obj); + checkSetIamPolicyRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -3254,24 +6065,25 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildTestIamPermissionsResponse()); + final resp = convert.json.encode(buildPolicy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.testIamPermissions(arg_request, arg_resource, + final response = await res.setIamPolicy(arg_request, arg_resource, $fields: arg_$fields); - checkTestIamPermissionsResponse( - response as api.TestIamPermissionsResponse); + checkPolicy(response as api.Policy); }); - }); - unittest.group('resource-ProjectsLocationsGoogleApiSourcesResource', () { - unittest.test('method--getIamPolicy', () async { + unittest.test('method--testIamPermissions', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.googleApiSources; + final res = api.EventarcApi(mock).projects.locations.messageBuses; + final arg_request = buildTestIamPermissionsRequest(); final arg_resource = 'foo'; - final arg_options_requestedPolicyVersion = 42; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.TestIamPermissionsRequest.fromJson( + json as core.Map); + checkTestIamPermissionsRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -3301,12 +6113,8 @@ void main() { core.Uri.decodeQueryComponent(keyValue[0]), core.Uri.decodeQueryComponent(keyValue[1]), ); - } - } - unittest.expect( - core.int.parse(queryMap['options.requestedPolicyVersion']!.first), - unittest.equals(arg_options_requestedPolicyVersion), - ); + } + } unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -3315,25 +6123,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildPolicy()); + final resp = convert.json.encode(buildTestIamPermissionsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.getIamPolicy(arg_resource, - options_requestedPolicyVersion: arg_options_requestedPolicyVersion, + final response = await res.testIamPermissions(arg_request, arg_resource, $fields: arg_$fields); - checkPolicy(response as api.Policy); + checkTestIamPermissionsResponse( + response as api.TestIamPermissionsResponse); }); + }); - unittest.test('method--setIamPolicy', () async { + unittest.group('resource-ProjectsLocationsOperationsResource', () { + unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.googleApiSources; - final arg_request = buildSetIamPolicyRequest(); - final arg_resource = 'foo'; + final res = api.EventarcApi(mock).projects.locations.operations; + final arg_request = buildGoogleLongrunningCancelOperationRequest(); + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.SetIamPolicyRequest.fromJson( + final obj = api.GoogleLongrunningCancelOperationRequest.fromJson( json as core.Map); - checkSetIamPolicyRequest(obj); + checkGoogleLongrunningCancelOperationRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -3374,25 +6184,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildPolicy()); + final resp = convert.json.encode(buildEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setIamPolicy(arg_request, arg_resource, - $fields: arg_$fields); - checkPolicy(response as api.Policy); + final response = + await res.cancel(arg_request, arg_name, $fields: arg_$fields); + checkEmpty(response as api.Empty); }); - unittest.test('method--testIamPermissions', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.googleApiSources; - final arg_request = buildTestIamPermissionsRequest(); - final arg_resource = 'foo'; + final res = api.EventarcApi(mock).projects.locations.operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.TestIamPermissionsRequest.fromJson( - json as core.Map); - checkTestIamPermissionsRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -3432,22 +6237,17 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildTestIamPermissionsResponse()); + final resp = convert.json.encode(buildEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.testIamPermissions(arg_request, arg_resource, - $fields: arg_$fields); - checkTestIamPermissionsResponse( - response as api.TestIamPermissionsResponse); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkEmpty(response as api.Empty); }); - }); - unittest.group('resource-ProjectsLocationsMessageBusesResource', () { - unittest.test('method--getIamPolicy', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.messageBuses; - final arg_resource = 'foo'; - final arg_options_requestedPolicyVersion = 42; + final res = api.EventarcApi(mock).projects.locations.operations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -3481,10 +6281,6 @@ void main() { ); } } - unittest.expect( - core.int.parse(queryMap['options.requestedPolicyVersion']!.first), - unittest.equals(arg_options_requestedPolicyVersion), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -3493,26 +6289,23 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildPolicy()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.getIamPolicy(arg_resource, - options_requestedPolicyVersion: arg_options_requestedPolicyVersion, - $fields: arg_$fields); - checkPolicy(response as api.Policy); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--setIamPolicy', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.messageBuses; - final arg_request = buildSetIamPolicyRequest(); - final arg_resource = 'foo'; + final res = api.EventarcApi(mock).projects.locations.operations; + final arg_name = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.SetIamPolicyRequest.fromJson( - json as core.Map); - checkSetIamPolicyRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -3544,6 +6337,18 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -3552,24 +6357,33 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildPolicy()); + final resp = + convert.json.encode(buildGoogleLongrunningListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setIamPolicy(arg_request, arg_resource, + final response = await res.list(arg_name, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, $fields: arg_$fields); - checkPolicy(response as api.Policy); + checkGoogleLongrunningListOperationsResponse( + response as api.GoogleLongrunningListOperationsResponse); }); + }); - unittest.test('method--testIamPermissions', () async { + unittest.group('resource-ProjectsLocationsPipelinesResource', () { + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.messageBuses; - final arg_request = buildTestIamPermissionsRequest(); - final arg_resource = 'foo'; + final res = api.EventarcApi(mock).projects.locations.pipelines; + final arg_request = buildPipeline(); + final arg_parent = 'foo'; + final arg_pipelineId = 'foo'; + final arg_validateOnly = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.TestIamPermissionsRequest.fromJson( - json as core.Map); - checkTestIamPermissionsRequest(obj); + final obj = + api.Pipeline.fromJson(json as core.Map); + checkPipeline(obj); final path = req.url.path; var pathOffset = 0; @@ -3602,6 +6416,14 @@ void main() { ); } } + unittest.expect( + queryMap['pipelineId']!.first, + unittest.equals(arg_pipelineId), + ); + unittest.expect( + queryMap['validateOnly']!.first, + unittest.equals('$arg_validateOnly'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -3610,28 +6432,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildTestIamPermissionsResponse()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.testIamPermissions(arg_request, arg_resource, + final response = await res.create(arg_request, arg_parent, + pipelineId: arg_pipelineId, + validateOnly: arg_validateOnly, $fields: arg_$fields); - checkTestIamPermissionsResponse( - response as api.TestIamPermissionsResponse); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - }); - unittest.group('resource-ProjectsLocationsOperationsResource', () { - unittest.test('method--cancel', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.operations; - final arg_request = buildGoogleLongrunningCancelOperationRequest(); + final res = api.EventarcApi(mock).projects.locations.pipelines; final arg_name = 'foo'; + final arg_allowMissing = true; + final arg_etag = 'foo'; + final arg_validateOnly = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleLongrunningCancelOperationRequest.fromJson( - json as core.Map); - checkGoogleLongrunningCancelOperationRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -3663,6 +6483,18 @@ void main() { ); } } + unittest.expect( + queryMap['allowMissing']!.first, + unittest.equals('$arg_allowMissing'), + ); + unittest.expect( + queryMap['etag']!.first, + unittest.equals(arg_etag), + ); + unittest.expect( + queryMap['validateOnly']!.first, + unittest.equals('$arg_validateOnly'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -3671,17 +6503,21 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildEmpty()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.cancel(arg_request, arg_name, $fields: arg_$fields); - checkEmpty(response as api.Empty); + final response = await res.delete(arg_name, + allowMissing: arg_allowMissing, + etag: arg_etag, + validateOnly: arg_validateOnly, + $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); - unittest.test('method--delete', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.operations; + final res = api.EventarcApi(mock).projects.locations.pipelines; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -3724,17 +6560,18 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildEmpty()); + final resp = convert.json.encode(buildPipeline()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkEmpty(response as api.Empty); + final response = await res.get(arg_name, $fields: arg_$fields); + checkPipeline(response as api.Pipeline); }); - unittest.test('method--get', () async { + unittest.test('method--getIamPolicy', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.operations; - final arg_name = 'foo'; + final res = api.EventarcApi(mock).projects.locations.pipelines; + final arg_resource = 'foo'; + final arg_options_requestedPolicyVersion = 42; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -3768,6 +6605,10 @@ void main() { ); } } + unittest.expect( + core.int.parse(queryMap['options.requestedPolicyVersion']!.first), + unittest.equals(arg_options_requestedPolicyVersion), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -3776,19 +6617,21 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleLongrunningOperation()); + final resp = convert.json.encode(buildPolicy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGoogleLongrunningOperation( - response as api.GoogleLongrunningOperation); + final response = await res.getIamPolicy(arg_resource, + options_requestedPolicyVersion: arg_options_requestedPolicyVersion, + $fields: arg_$fields); + checkPolicy(response as api.Policy); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.EventarcApi(mock).projects.locations.operations; - final arg_name = 'foo'; + final res = api.EventarcApi(mock).projects.locations.pipelines; + final arg_parent = 'foo'; final arg_filter = 'foo'; + final arg_orderBy = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; final arg_$fields = 'foo'; @@ -3828,6 +6671,10 @@ void main() { queryMap['filter']!.first, unittest.equals(arg_filter), ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); unittest.expect( core.int.parse(queryMap['pageSize']!.first), unittest.equals(arg_pageSize), @@ -3844,28 +6691,32 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleLongrunningListOperationsResponse()); + final resp = convert.json.encode(buildListPipelinesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_name, + final response = await res.list(arg_parent, filter: arg_filter, + orderBy: arg_orderBy, pageSize: arg_pageSize, pageToken: arg_pageToken, $fields: arg_$fields); - checkGoogleLongrunningListOperationsResponse( - response as api.GoogleLongrunningListOperationsResponse); + checkListPipelinesResponse(response as api.ListPipelinesResponse); }); - }); - unittest.group('resource-ProjectsLocationsPipelinesResource', () { - unittest.test('method--getIamPolicy', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); final res = api.EventarcApi(mock).projects.locations.pipelines; - final arg_resource = 'foo'; - final arg_options_requestedPolicyVersion = 42; + final arg_request = buildPipeline(); + final arg_name = 'foo'; + final arg_allowMissing = true; + final arg_updateMask = 'foo'; + final arg_validateOnly = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.Pipeline.fromJson(json as core.Map); + checkPipeline(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -3898,8 +6749,16 @@ void main() { } } unittest.expect( - core.int.parse(queryMap['options.requestedPolicyVersion']!.first), - unittest.equals(arg_options_requestedPolicyVersion), + queryMap['allowMissing']!.first, + unittest.equals('$arg_allowMissing'), + ); + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); + unittest.expect( + queryMap['validateOnly']!.first, + unittest.equals('$arg_validateOnly'), ); unittest.expect( queryMap['fields']!.first, @@ -3909,13 +6768,16 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildPolicy()); + final resp = convert.json.encode(buildGoogleLongrunningOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.getIamPolicy(arg_resource, - options_requestedPolicyVersion: arg_options_requestedPolicyVersion, + final response = await res.patch(arg_request, arg_name, + allowMissing: arg_allowMissing, + updateMask: arg_updateMask, + validateOnly: arg_validateOnly, $fields: arg_$fields); - checkPolicy(response as api.Policy); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); }); unittest.test('method--setIamPolicy', () async { diff --git a/generated/googleapis/test/fcm/v1_test.dart b/generated/googleapis/test/fcm/v1_test.dart index 26871e54b..bcc155466 100644 --- a/generated/googleapis/test/fcm/v1_test.dart +++ b/generated/googleapis/test/fcm/v1_test.dart @@ -352,6 +352,7 @@ api.ApnsConfig buildApnsConfig() { if (buildCounterApnsConfig < 3) { o.fcmOptions = buildApnsFcmOptions(); o.headers = buildUnnamed4(); + o.liveActivityToken = 'foo'; o.payload = buildUnnamed5(); } buildCounterApnsConfig--; @@ -363,6 +364,10 @@ void checkApnsConfig(api.ApnsConfig o) { if (buildCounterApnsConfig < 3) { checkApnsFcmOptions(o.fcmOptions!); checkUnnamed4(o.headers!); + unittest.expect( + o.liveActivityToken!, + unittest.equals('foo'), + ); checkUnnamed5(o.payload!); } buildCounterApnsConfig--; diff --git a/generated/googleapis/test/file/v1_test.dart b/generated/googleapis/test/file/v1_test.dart index 3492fe02f..60efb3ce6 100644 --- a/generated/googleapis/test/file/v1_test.dart +++ b/generated/googleapis/test/file/v1_test.dart @@ -226,7 +226,6 @@ api.FixedIOPS buildFixedIOPS() { buildCounterFixedIOPS++; if (buildCounterFixedIOPS < 3) { o.maxIops = 'foo'; - o.maxReadIops = 'foo'; } buildCounterFixedIOPS--; return o; @@ -239,10 +238,6 @@ void checkFixedIOPS(api.FixedIOPS o) { o.maxIops!, unittest.equals('foo'), ); - unittest.expect( - o.maxReadIops!, - unittest.equals('foo'), - ); } buildCounterFixedIOPS--; } @@ -253,7 +248,6 @@ api.IOPSPerTB buildIOPSPerTB() { buildCounterIOPSPerTB++; if (buildCounterIOPSPerTB < 3) { o.maxIopsPerTb = 'foo'; - o.maxReadIopsPerTb = 'foo'; } buildCounterIOPSPerTB--; return o; @@ -266,10 +260,6 @@ void checkIOPSPerTB(api.IOPSPerTB o) { o.maxIopsPerTb!, unittest.equals('foo'), ); - unittest.expect( - o.maxReadIopsPerTb!, - unittest.equals('foo'), - ); } buildCounterIOPSPerTB--; } @@ -627,6 +617,23 @@ void checkUnnamed14(core.List o) { checkSnapshot(o[1]); } +core.List buildUnnamed15() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed15(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + core.int buildCounterListSnapshotsResponse = 0; api.ListSnapshotsResponse buildListSnapshotsResponse() { final o = api.ListSnapshotsResponse(); @@ -634,6 +641,7 @@ api.ListSnapshotsResponse buildListSnapshotsResponse() { if (buildCounterListSnapshotsResponse < 3) { o.nextPageToken = 'foo'; o.snapshots = buildUnnamed14(); + o.unreachable = buildUnnamed15(); } buildCounterListSnapshotsResponse--; return o; @@ -647,16 +655,17 @@ void checkListSnapshotsResponse(api.ListSnapshotsResponse o) { unittest.equals('foo'), ); checkUnnamed14(o.snapshots!); + checkUnnamed15(o.unreachable!); } buildCounterListSnapshotsResponse--; } -core.Map buildUnnamed15() => { +core.Map buildUnnamed16() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed15(core.Map o) { +void checkUnnamed16(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -668,7 +677,7 @@ void checkUnnamed15(core.Map o) { ); } -core.Map buildUnnamed16() => { +core.Map buildUnnamed17() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -681,7 +690,7 @@ core.Map buildUnnamed16() => { }, }; -void checkUnnamed16(core.Map o) { +void checkUnnamed17(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted1 = (o['x']!) as core.Map; unittest.expect(casted1, unittest.hasLength(3)); @@ -719,9 +728,9 @@ api.Location buildLocation() { buildCounterLocation++; if (buildCounterLocation < 3) { o.displayName = 'foo'; - o.labels = buildUnnamed15(); + o.labels = buildUnnamed16(); o.locationId = 'foo'; - o.metadata = buildUnnamed16(); + o.metadata = buildUnnamed17(); o.name = 'foo'; } buildCounterLocation--; @@ -735,12 +744,12 @@ void checkLocation(api.Location o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed15(o.labels!); + checkUnnamed16(o.labels!); unittest.expect( o.locationId!, unittest.equals('foo'), ); - checkUnnamed16(o.metadata!); + checkUnnamed17(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), @@ -749,12 +758,12 @@ void checkLocation(api.Location o) { buildCounterLocation--; } -core.List buildUnnamed17() => [ +core.List buildUnnamed18() => [ 'foo', 'foo', ]; -void checkUnnamed17(core.List o) { +void checkUnnamed18(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -766,12 +775,12 @@ void checkUnnamed17(core.List o) { ); } -core.List buildUnnamed18() => [ +core.List buildUnnamed19() => [ 'foo', 'foo', ]; -void checkUnnamed18(core.List o) { +void checkUnnamed19(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -789,8 +798,8 @@ api.NetworkConfig buildNetworkConfig() { buildCounterNetworkConfig++; if (buildCounterNetworkConfig < 3) { o.connectMode = 'foo'; - o.ipAddresses = buildUnnamed17(); - o.modes = buildUnnamed18(); + o.ipAddresses = buildUnnamed18(); + o.modes = buildUnnamed19(); o.network = 'foo'; o.reservedIpRange = 'foo'; } @@ -805,8 +814,8 @@ void checkNetworkConfig(api.NetworkConfig o) { o.connectMode!, unittest.equals('foo'), ); - checkUnnamed17(o.ipAddresses!); - checkUnnamed18(o.modes!); + checkUnnamed18(o.ipAddresses!); + checkUnnamed19(o.modes!); unittest.expect( o.network!, unittest.equals('foo'), @@ -819,12 +828,12 @@ void checkNetworkConfig(api.NetworkConfig o) { buildCounterNetworkConfig--; } -core.List buildUnnamed19() => [ +core.List buildUnnamed20() => [ 'foo', 'foo', ]; -void checkUnnamed19(core.List o) { +void checkUnnamed20(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -844,7 +853,7 @@ api.NfsExportOptions buildNfsExportOptions() { o.accessMode = 'foo'; o.anonGid = 'foo'; o.anonUid = 'foo'; - o.ipRanges = buildUnnamed19(); + o.ipRanges = buildUnnamed20(); o.squashMode = 'foo'; } buildCounterNfsExportOptions--; @@ -866,7 +875,7 @@ void checkNfsExportOptions(api.NfsExportOptions o) { o.anonUid!, unittest.equals('foo'), ); - checkUnnamed19(o.ipRanges!); + checkUnnamed20(o.ipRanges!); unittest.expect( o.squashMode!, unittest.equals('foo'), @@ -875,7 +884,7 @@ void checkNfsExportOptions(api.NfsExportOptions o) { buildCounterNfsExportOptions--; } -core.Map buildUnnamed20() => { +core.Map buildUnnamed21() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -888,7 +897,7 @@ core.Map buildUnnamed20() => { }, }; -void checkUnnamed20(core.Map o) { +void checkUnnamed21(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted3 = (o['x']!) as core.Map; unittest.expect(casted3, unittest.hasLength(3)); @@ -920,7 +929,7 @@ void checkUnnamed20(core.Map o) { ); } -core.Map buildUnnamed21() => { +core.Map buildUnnamed22() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -933,7 +942,7 @@ core.Map buildUnnamed21() => { }, }; -void checkUnnamed21(core.Map o) { +void checkUnnamed22(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted5 = (o['x']!) as core.Map; unittest.expect(casted5, unittest.hasLength(3)); @@ -972,9 +981,9 @@ api.Operation buildOperation() { if (buildCounterOperation < 3) { o.done = true; o.error = buildStatus(); - o.metadata = buildUnnamed20(); + o.metadata = buildUnnamed21(); o.name = 'foo'; - o.response = buildUnnamed21(); + o.response = buildUnnamed22(); } buildCounterOperation--; return o; @@ -985,12 +994,12 @@ void checkOperation(api.Operation o) { if (buildCounterOperation < 3) { unittest.expect(o.done!, unittest.isTrue); checkStatus(o.error!); - checkUnnamed20(o.metadata!); + checkUnnamed21(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed21(o.response!); + checkUnnamed22(o.response!); } buildCounterOperation--; } @@ -1068,12 +1077,12 @@ void checkPromoteReplicaRequest(api.PromoteReplicaRequest o) { buildCounterPromoteReplicaRequest--; } -core.List buildUnnamed22() => [ +core.List buildUnnamed23() => [ 'foo', 'foo', ]; -void checkUnnamed22(core.List o) { +void checkUnnamed23(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1093,7 +1102,7 @@ api.ReplicaConfig buildReplicaConfig() { o.lastActiveSyncTime = 'foo'; o.peerInstance = 'foo'; o.state = 'foo'; - o.stateReasons = buildUnnamed22(); + o.stateReasons = buildUnnamed23(); } buildCounterReplicaConfig--; return o; @@ -1114,17 +1123,17 @@ void checkReplicaConfig(api.ReplicaConfig o) { o.state!, unittest.equals('foo'), ); - checkUnnamed22(o.stateReasons!); + checkUnnamed23(o.stateReasons!); } buildCounterReplicaConfig--; } -core.List buildUnnamed23() => [ +core.List buildUnnamed24() => [ buildReplicaConfig(), buildReplicaConfig(), ]; -void checkUnnamed23(core.List o) { +void checkUnnamed24(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkReplicaConfig(o[0]); checkReplicaConfig(o[1]); @@ -1135,7 +1144,7 @@ api.Replication buildReplication() { final o = api.Replication(); buildCounterReplication++; if (buildCounterReplication < 3) { - o.replicas = buildUnnamed23(); + o.replicas = buildUnnamed24(); o.role = 'foo'; } buildCounterReplication--; @@ -1145,7 +1154,7 @@ api.Replication buildReplication() { void checkReplication(api.Replication o) { buildCounterReplication++; if (buildCounterReplication < 3) { - checkUnnamed23(o.replicas!); + checkUnnamed24(o.replicas!); unittest.expect( o.role!, unittest.equals('foo'), @@ -1203,12 +1212,12 @@ void checkRevertInstanceRequest(api.RevertInstanceRequest o) { buildCounterRevertInstanceRequest--; } -core.Map buildUnnamed24() => { +core.Map buildUnnamed25() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed24(core.Map o) { +void checkUnnamed25(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1220,12 +1229,12 @@ void checkUnnamed24(core.Map o) { ); } -core.Map buildUnnamed25() => { +core.Map buildUnnamed26() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed25(core.Map o) { +void checkUnnamed26(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1245,10 +1254,10 @@ api.Snapshot buildSnapshot() { o.createTime = 'foo'; o.description = 'foo'; o.filesystemUsedBytes = 'foo'; - o.labels = buildUnnamed24(); + o.labels = buildUnnamed25(); o.name = 'foo'; o.state = 'foo'; - o.tags = buildUnnamed25(); + o.tags = buildUnnamed26(); } buildCounterSnapshot--; return o; @@ -1269,7 +1278,7 @@ void checkSnapshot(api.Snapshot o) { o.filesystemUsedBytes!, unittest.equals('foo'), ); - checkUnnamed24(o.labels!); + checkUnnamed25(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -1278,12 +1287,12 @@ void checkSnapshot(api.Snapshot o) { o.state!, unittest.equals('foo'), ); - checkUnnamed25(o.tags!); + checkUnnamed26(o.tags!); } buildCounterSnapshot--; } -core.Map buildUnnamed26() => { +core.Map buildUnnamed27() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -1296,7 +1305,7 @@ core.Map buildUnnamed26() => { }, }; -void checkUnnamed26(core.Map o) { +void checkUnnamed27(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted7 = (o['x']!) as core.Map; unittest.expect(casted7, unittest.hasLength(3)); @@ -1328,15 +1337,15 @@ void checkUnnamed26(core.Map o) { ); } -core.List> buildUnnamed27() => [ - buildUnnamed26(), - buildUnnamed26(), +core.List> buildUnnamed28() => [ + buildUnnamed27(), + buildUnnamed27(), ]; -void checkUnnamed27(core.List> o) { +void checkUnnamed28(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed26(o[0]); - checkUnnamed26(o[1]); + checkUnnamed27(o[0]); + checkUnnamed27(o[1]); } core.int buildCounterStatus = 0; @@ -1345,7 +1354,7 @@ api.Status buildStatus() { buildCounterStatus++; if (buildCounterStatus < 3) { o.code = 42; - o.details = buildUnnamed27(); + o.details = buildUnnamed28(); o.message = 'foo'; } buildCounterStatus--; @@ -1359,7 +1368,7 @@ void checkStatus(api.Status o) { o.code!, unittest.equals(42), ); - checkUnnamed27(o.details!); + checkUnnamed28(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -2728,6 +2737,7 @@ void main() { final arg_orderBy = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; + final arg_returnPartialSuccess = true; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -2777,6 +2787,10 @@ void main() { queryMap['pageToken']!.first, unittest.equals(arg_pageToken), ); + unittest.expect( + queryMap['returnPartialSuccess']!.first, + unittest.equals('$arg_returnPartialSuccess'), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -2793,6 +2807,7 @@ void main() { orderBy: arg_orderBy, pageSize: arg_pageSize, pageToken: arg_pageToken, + returnPartialSuccess: arg_returnPartialSuccess, $fields: arg_$fields); checkListSnapshotsResponse(response as api.ListSnapshotsResponse); }); diff --git a/generated/googleapis/test/forms/v1_test.dart b/generated/googleapis/test/forms/v1_test.dart index 6cb0921d8..568e8b4f2 100644 --- a/generated/googleapis/test/forms/v1_test.dart +++ b/generated/googleapis/test/forms/v1_test.dart @@ -1078,6 +1078,7 @@ api.Question buildQuestion() { o.fileUploadQuestion = buildFileUploadQuestion(); o.grading = buildGrading(); o.questionId = 'foo'; + o.ratingQuestion = buildRatingQuestion(); o.required = true; o.rowQuestion = buildRowQuestion(); o.scaleQuestion = buildScaleQuestion(); @@ -1099,6 +1100,7 @@ void checkQuestion(api.Question o) { o.questionId!, unittest.equals('foo'), ); + checkRatingQuestion(o.ratingQuestion!); unittest.expect(o.required!, unittest.isTrue); checkRowQuestion(o.rowQuestion!); checkScaleQuestion(o.scaleQuestion!); @@ -1182,6 +1184,33 @@ void checkQuizSettings(api.QuizSettings o) { buildCounterQuizSettings--; } +core.int buildCounterRatingQuestion = 0; +api.RatingQuestion buildRatingQuestion() { + final o = api.RatingQuestion(); + buildCounterRatingQuestion++; + if (buildCounterRatingQuestion < 3) { + o.iconType = 'foo'; + o.ratingScaleLevel = 42; + } + buildCounterRatingQuestion--; + return o; +} + +void checkRatingQuestion(api.RatingQuestion o) { + buildCounterRatingQuestion++; + if (buildCounterRatingQuestion < 3) { + unittest.expect( + o.iconType!, + unittest.equals('foo'), + ); + unittest.expect( + o.ratingScaleLevel!, + unittest.equals(42), + ); + } + buildCounterRatingQuestion--; +} + core.int buildCounterRenewWatchRequest = 0; api.RenewWatchRequest buildRenewWatchRequest() { final o = api.RenewWatchRequest(); @@ -2071,6 +2100,16 @@ void main() { }); }); + unittest.group('obj-schema-RatingQuestion', () { + unittest.test('to-json--from-json', () async { + final o = buildRatingQuestion(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.RatingQuestion.fromJson( + oJson as core.Map); + checkRatingQuestion(od); + }); + }); + unittest.group('obj-schema-RenewWatchRequest', () { unittest.test('to-json--from-json', () async { final o = buildRenewWatchRequest(); diff --git a/generated/googleapis/test/gkehub/v1_test.dart b/generated/googleapis/test/gkehub/v1_test.dart index 1911d86df..7d3206346 100644 --- a/generated/googleapis/test/gkehub/v1_test.dart +++ b/generated/googleapis/test/gkehub/v1_test.dart @@ -166,6 +166,8 @@ api.Authority buildAuthority() { o.identityProvider = 'foo'; o.issuer = 'foo'; o.oidcJwks = 'foo'; + o.scopeTenancyIdentityProvider = 'foo'; + o.scopeTenancyWorkloadIdentityPool = 'foo'; o.workloadIdentityPool = 'foo'; } buildCounterAuthority--; @@ -187,6 +189,14 @@ void checkAuthority(api.Authority o) { o.oidcJwks!, unittest.equals('foo'), ); + unittest.expect( + o.scopeTenancyIdentityProvider!, + unittest.equals('foo'), + ); + unittest.expect( + o.scopeTenancyWorkloadIdentityPool!, + unittest.equals('foo'), + ); unittest.expect( o.workloadIdentityPool!, unittest.equals('foo'), @@ -849,6 +859,7 @@ api.ConfigManagementConfigSync buildConfigManagementConfigSync() { o.oci = buildConfigManagementOciConfig(); o.preventDrift = true; o.sourceFormat = 'foo'; + o.stopSyncing = true; } buildCounterConfigManagementConfigSync--; return o; @@ -870,6 +881,7 @@ void checkConfigManagementConfigSync(api.ConfigManagementConfigSync o) { o.sourceFormat!, unittest.equals('foo'), ); + unittest.expect(o.stopSyncing!, unittest.isTrue); } buildCounterConfigManagementConfigSync--; } @@ -978,6 +990,7 @@ api.ConfigManagementConfigSyncState buildConfigManagementConfigSyncState() { buildCounterConfigManagementConfigSyncState++; if (buildCounterConfigManagementConfigSyncState < 3) { o.clusterLevelStopSyncingState = 'foo'; + o.crCount = 42; o.deploymentState = buildConfigManagementConfigSyncDeploymentState(); o.errors = buildUnnamed13(); o.reposyncCrd = 'foo'; @@ -998,6 +1011,10 @@ void checkConfigManagementConfigSyncState( o.clusterLevelStopSyncingState!, unittest.equals('foo'), ); + unittest.expect( + o.crCount!, + unittest.equals(42), + ); checkConfigManagementConfigSyncDeploymentState(o.deploymentState!); checkUnnamed13(o.errors!); unittest.expect( @@ -3896,6 +3913,7 @@ api.Membership buildMembership() { buildCounterMembership++; if (buildCounterMembership < 3) { o.authority = buildAuthority(); + o.clusterTier = 'foo'; o.createTime = 'foo'; o.deleteTime = 'foo'; o.description = 'foo'; @@ -3917,6 +3935,10 @@ void checkMembership(api.Membership o) { buildCounterMembership++; if (buildCounterMembership < 3) { checkAuthority(o.authority!); + unittest.expect( + o.clusterTier!, + unittest.equals('foo'), + ); unittest.expect( o.createTime!, unittest.equals('foo'), @@ -5535,6 +5557,7 @@ api.ServiceMeshMembershipSpec buildServiceMeshMembershipSpec() { final o = api.ServiceMeshMembershipSpec(); buildCounterServiceMeshMembershipSpec++; if (buildCounterServiceMeshMembershipSpec < 3) { + o.configApi = 'foo'; o.controlPlane = 'foo'; o.management = 'foo'; } @@ -5545,6 +5568,10 @@ api.ServiceMeshMembershipSpec buildServiceMeshMembershipSpec() { void checkServiceMeshMembershipSpec(api.ServiceMeshMembershipSpec o) { buildCounterServiceMeshMembershipSpec++; if (buildCounterServiceMeshMembershipSpec < 3) { + unittest.expect( + o.configApi!, + unittest.equals('foo'), + ); unittest.expect( o.controlPlane!, unittest.equals('foo'), diff --git a/generated/googleapis/test/gkehub/v2_test.dart b/generated/googleapis/test/gkehub/v2_test.dart index be3ba5fb4..8ded4e582 100644 --- a/generated/googleapis/test/gkehub/v2_test.dart +++ b/generated/googleapis/test/gkehub/v2_test.dart @@ -334,6 +334,7 @@ api.ConfigManagementConfigSync buildConfigManagementConfigSync() { o.oci = buildConfigManagementOciConfig(); o.preventDrift = true; o.sourceFormat = 'foo'; + o.stopSyncing = true; } buildCounterConfigManagementConfigSync--; return o; @@ -355,6 +356,7 @@ void checkConfigManagementConfigSync(api.ConfigManagementConfigSync o) { o.sourceFormat!, unittest.equals('foo'), ); + unittest.expect(o.stopSyncing!, unittest.isTrue); } buildCounterConfigManagementConfigSync--; } @@ -463,6 +465,7 @@ api.ConfigManagementConfigSyncState buildConfigManagementConfigSyncState() { buildCounterConfigManagementConfigSyncState++; if (buildCounterConfigManagementConfigSyncState < 3) { o.clusterLevelStopSyncingState = 'foo'; + o.crCount = 42; o.deploymentState = buildConfigManagementConfigSyncDeploymentState(); o.errors = buildUnnamed1(); o.reposyncCrd = 'foo'; @@ -483,6 +486,10 @@ void checkConfigManagementConfigSyncState( o.clusterLevelStopSyncingState!, unittest.equals('foo'), ); + unittest.expect( + o.crCount!, + unittest.equals(42), + ); checkConfigManagementConfigSyncDeploymentState(o.deploymentState!); checkUnnamed1(o.errors!); unittest.expect( @@ -1299,38 +1306,6 @@ void checkEmpty(api.Empty o) { buildCounterEmpty--; } -core.int buildCounterFeatureConfigRef = 0; -api.FeatureConfigRef buildFeatureConfigRef() { - final o = api.FeatureConfigRef(); - buildCounterFeatureConfigRef++; - if (buildCounterFeatureConfigRef < 3) { - o.config = 'foo'; - o.configUpdateTime = 'foo'; - o.uuid = 'foo'; - } - buildCounterFeatureConfigRef--; - return o; -} - -void checkFeatureConfigRef(api.FeatureConfigRef o) { - buildCounterFeatureConfigRef++; - if (buildCounterFeatureConfigRef < 3) { - unittest.expect( - o.config!, - unittest.equals('foo'), - ); - unittest.expect( - o.configUpdateTime!, - unittest.equals('foo'), - ); - unittest.expect( - o.uuid!, - unittest.equals('foo'), - ); - } - buildCounterFeatureConfigRef--; -} - core.int buildCounterFeatureSpec = 0; api.FeatureSpec buildFeatureSpec() { final o = api.FeatureSpec(); @@ -2321,7 +2296,6 @@ api.MembershipFeature buildMembershipFeature() { if (buildCounterMembershipFeature < 3) { o.createTime = 'foo'; o.deleteTime = 'foo'; - o.featureConfigRef = buildFeatureConfigRef(); o.labels = buildUnnamed18(); o.lifecycleState = buildLifecycleState(); o.name = 'foo'; @@ -2344,7 +2318,6 @@ void checkMembershipFeature(api.MembershipFeature o) { o.deleteTime!, unittest.equals('foo'), ); - checkFeatureConfigRef(o.featureConfigRef!); checkUnnamed18(o.labels!); checkLifecycleState(o.lifecycleState!); unittest.expect( @@ -3811,16 +3784,6 @@ void main() { }); }); - unittest.group('obj-schema-FeatureConfigRef', () { - unittest.test('to-json--from-json', () async { - final o = buildFeatureConfigRef(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.FeatureConfigRef.fromJson( - oJson as core.Map); - checkFeatureConfigRef(od); - }); - }); - unittest.group('obj-schema-FeatureSpec', () { unittest.test('to-json--from-json', () async { final o = buildFeatureSpec(); diff --git a/generated/googleapis/test/gkeonprem/v1_test.dart b/generated/googleapis/test/gkeonprem/v1_test.dart index 096af12ca..7576453fa 100644 --- a/generated/googleapis/test/gkeonprem/v1_test.dart +++ b/generated/googleapis/test/gkeonprem/v1_test.dart @@ -4048,14 +4048,18 @@ core.int buildCounterVmwareAdminMetalLbConfig = 0; api.VmwareAdminMetalLbConfig buildVmwareAdminMetalLbConfig() { final o = api.VmwareAdminMetalLbConfig(); buildCounterVmwareAdminMetalLbConfig++; - if (buildCounterVmwareAdminMetalLbConfig < 3) {} + if (buildCounterVmwareAdminMetalLbConfig < 3) { + o.enabled = true; + } buildCounterVmwareAdminMetalLbConfig--; return o; } void checkVmwareAdminMetalLbConfig(api.VmwareAdminMetalLbConfig o) { buildCounterVmwareAdminMetalLbConfig++; - if (buildCounterVmwareAdminMetalLbConfig < 3) {} + if (buildCounterVmwareAdminMetalLbConfig < 3) { + unittest.expect(o.enabled!, unittest.isTrue); + } buildCounterVmwareAdminMetalLbConfig--; } @@ -9937,6 +9941,82 @@ void main() { }); unittest.group('resource-ProjectsLocationsVmwareAdminClustersResource', () { + unittest.test('method--create', () async { + final mock = HttpServerMock(); + final res = api.GKEOnPremApi(mock).projects.locations.vmwareAdminClusters; + final arg_request = buildVmwareAdminCluster(); + final arg_parent = 'foo'; + final arg_allowPreflightFailure = true; + final arg_validateOnly = true; + final arg_vmwareAdminClusterId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.VmwareAdminCluster.fromJson( + json as core.Map); + checkVmwareAdminCluster(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['allowPreflightFailure']!.first, + unittest.equals('$arg_allowPreflightFailure'), + ); + unittest.expect( + queryMap['validateOnly']!.first, + unittest.equals('$arg_validateOnly'), + ); + unittest.expect( + queryMap['vmwareAdminClusterId']!.first, + unittest.equals(arg_vmwareAdminClusterId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.create(arg_request, arg_parent, + allowPreflightFailure: arg_allowPreflightFailure, + validateOnly: arg_validateOnly, + vmwareAdminClusterId: arg_vmwareAdminClusterId, + $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + unittest.test('method--enroll', () async { final mock = HttpServerMock(); final res = api.GKEOnPremApi(mock).projects.locations.vmwareAdminClusters; diff --git a/generated/googleapis/test/healthcare/v1_test.dart b/generated/googleapis/test/healthcare/v1_test.dart index aa0c9856f..a9362f055 100644 --- a/generated/googleapis/test/healthcare/v1_test.dart +++ b/generated/googleapis/test/healthcare/v1_test.dart @@ -26,6 +26,28 @@ import 'package:test/test.dart' as unittest; import '../test_shared.dart'; +core.int buildCounterAccessDeterminationLogConfig = 0; +api.AccessDeterminationLogConfig buildAccessDeterminationLogConfig() { + final o = api.AccessDeterminationLogConfig(); + buildCounterAccessDeterminationLogConfig++; + if (buildCounterAccessDeterminationLogConfig < 3) { + o.logLevel = 'foo'; + } + buildCounterAccessDeterminationLogConfig--; + return o; +} + +void checkAccessDeterminationLogConfig(api.AccessDeterminationLogConfig o) { + buildCounterAccessDeterminationLogConfig++; + if (buildCounterAccessDeterminationLogConfig < 3) { + unittest.expect( + o.logLevel!, + unittest.equals('foo'), + ); + } + buildCounterAccessDeterminationLogConfig--; +} + core.int buildCounterActivateConsentRequest = 0; api.ActivateConsentRequest buildActivateConsentRequest() { final o = api.ActivateConsentRequest(); @@ -75,6 +97,42 @@ void checkUnnamed0(core.List o) { ); } +core.int buildCounterAdminConsents = 0; +api.AdminConsents buildAdminConsents() { + final o = api.AdminConsents(); + buildCounterAdminConsents++; + if (buildCounterAdminConsents < 3) { + o.names = buildUnnamed0(); + } + buildCounterAdminConsents--; + return o; +} + +void checkAdminConsents(api.AdminConsents o) { + buildCounterAdminConsents++; + if (buildCounterAdminConsents < 3) { + checkUnnamed0(o.names!); + } + buildCounterAdminConsents--; +} + +core.List buildUnnamed1() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed1(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + core.int buildCounterAnalyzeEntitiesRequest = 0; api.AnalyzeEntitiesRequest buildAnalyzeEntitiesRequest() { final o = api.AnalyzeEntitiesRequest(); @@ -82,7 +140,7 @@ api.AnalyzeEntitiesRequest buildAnalyzeEntitiesRequest() { if (buildCounterAnalyzeEntitiesRequest < 3) { o.alternativeOutputFormat = 'foo'; o.documentContent = 'foo'; - o.licensedVocabularies = buildUnnamed0(); + o.licensedVocabularies = buildUnnamed1(); } buildCounterAnalyzeEntitiesRequest--; return o; @@ -99,39 +157,39 @@ void checkAnalyzeEntitiesRequest(api.AnalyzeEntitiesRequest o) { o.documentContent!, unittest.equals('foo'), ); - checkUnnamed0(o.licensedVocabularies!); + checkUnnamed1(o.licensedVocabularies!); } buildCounterAnalyzeEntitiesRequest--; } -core.List buildUnnamed1() => [ +core.List buildUnnamed2() => [ buildEntity(), buildEntity(), ]; -void checkUnnamed1(core.List o) { +void checkUnnamed2(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEntity(o[0]); checkEntity(o[1]); } -core.List buildUnnamed2() => [ +core.List buildUnnamed3() => [ buildEntityMention(), buildEntityMention(), ]; -void checkUnnamed2(core.List o) { +void checkUnnamed3(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEntityMention(o[0]); checkEntityMention(o[1]); } -core.List buildUnnamed3() => [ +core.List buildUnnamed4() => [ buildEntityMentionRelationship(), buildEntityMentionRelationship(), ]; -void checkUnnamed3(core.List o) { +void checkUnnamed4(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEntityMentionRelationship(o[0]); checkEntityMentionRelationship(o[1]); @@ -142,10 +200,10 @@ api.AnalyzeEntitiesResponse buildAnalyzeEntitiesResponse() { final o = api.AnalyzeEntitiesResponse(); buildCounterAnalyzeEntitiesResponse++; if (buildCounterAnalyzeEntitiesResponse < 3) { - o.entities = buildUnnamed1(); - o.entityMentions = buildUnnamed2(); + o.entities = buildUnnamed2(); + o.entityMentions = buildUnnamed3(); o.fhirBundle = 'foo'; - o.relationships = buildUnnamed3(); + o.relationships = buildUnnamed4(); } buildCounterAnalyzeEntitiesResponse--; return o; @@ -154,17 +212,61 @@ api.AnalyzeEntitiesResponse buildAnalyzeEntitiesResponse() { void checkAnalyzeEntitiesResponse(api.AnalyzeEntitiesResponse o) { buildCounterAnalyzeEntitiesResponse++; if (buildCounterAnalyzeEntitiesResponse < 3) { - checkUnnamed1(o.entities!); - checkUnnamed2(o.entityMentions!); + checkUnnamed2(o.entities!); + checkUnnamed3(o.entityMentions!); unittest.expect( o.fhirBundle!, unittest.equals('foo'), ); - checkUnnamed3(o.relationships!); + checkUnnamed4(o.relationships!); } buildCounterAnalyzeEntitiesResponse--; } +core.int buildCounterApplyAdminConsentsRequest = 0; +api.ApplyAdminConsentsRequest buildApplyAdminConsentsRequest() { + final o = api.ApplyAdminConsentsRequest(); + buildCounterApplyAdminConsentsRequest++; + if (buildCounterApplyAdminConsentsRequest < 3) { + o.newConsentsList = buildAdminConsents(); + o.validateOnly = true; + } + buildCounterApplyAdminConsentsRequest--; + return o; +} + +void checkApplyAdminConsentsRequest(api.ApplyAdminConsentsRequest o) { + buildCounterApplyAdminConsentsRequest++; + if (buildCounterApplyAdminConsentsRequest < 3) { + checkAdminConsents(o.newConsentsList!); + unittest.expect(o.validateOnly!, unittest.isTrue); + } + buildCounterApplyAdminConsentsRequest--; +} + +core.int buildCounterApplyConsentsRequest = 0; +api.ApplyConsentsRequest buildApplyConsentsRequest() { + final o = api.ApplyConsentsRequest(); + buildCounterApplyConsentsRequest++; + if (buildCounterApplyConsentsRequest < 3) { + o.patientScope = buildPatientScope(); + o.timeRange = buildTimeRange(); + o.validateOnly = true; + } + buildCounterApplyConsentsRequest--; + return o; +} + +void checkApplyConsentsRequest(api.ApplyConsentsRequest o) { + buildCounterApplyConsentsRequest++; + if (buildCounterApplyConsentsRequest < 3) { + checkPatientScope(o.patientScope!); + checkTimeRange(o.timeRange!); + unittest.expect(o.validateOnly!, unittest.isTrue); + } + buildCounterApplyConsentsRequest--; +} + core.int buildCounterArchiveUserDataMappingRequest = 0; api.ArchiveUserDataMappingRequest buildArchiveUserDataMappingRequest() { final o = api.ArchiveUserDataMappingRequest(); @@ -195,12 +297,12 @@ void checkArchiveUserDataMappingResponse(api.ArchiveUserDataMappingResponse o) { buildCounterArchiveUserDataMappingResponse--; } -core.List buildUnnamed4() => [ +core.List buildUnnamed5() => [ 'foo', 'foo', ]; -void checkUnnamed4(core.List o) { +void checkUnnamed5(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -218,7 +320,7 @@ api.Attribute buildAttribute() { buildCounterAttribute++; if (buildCounterAttribute < 3) { o.attributeDefinitionId = 'foo'; - o.values = buildUnnamed4(); + o.values = buildUnnamed5(); } buildCounterAttribute--; return o; @@ -231,17 +333,17 @@ void checkAttribute(api.Attribute o) { o.attributeDefinitionId!, unittest.equals('foo'), ); - checkUnnamed4(o.values!); + checkUnnamed5(o.values!); } buildCounterAttribute--; } -core.List buildUnnamed5() => [ +core.List buildUnnamed6() => [ 'foo', 'foo', ]; -void checkUnnamed5(core.List o) { +void checkUnnamed6(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -253,12 +355,12 @@ void checkUnnamed5(core.List o) { ); } -core.List buildUnnamed6() => [ +core.List buildUnnamed7() => [ 'foo', 'foo', ]; -void checkUnnamed6(core.List o) { +void checkUnnamed7(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -275,9 +377,9 @@ api.AttributeDefinition buildAttributeDefinition() { final o = api.AttributeDefinition(); buildCounterAttributeDefinition++; if (buildCounterAttributeDefinition < 3) { - o.allowedValues = buildUnnamed5(); + o.allowedValues = buildUnnamed6(); o.category = 'foo'; - o.consentDefaultValues = buildUnnamed6(); + o.consentDefaultValues = buildUnnamed7(); o.dataMappingDefaultValue = 'foo'; o.description = 'foo'; o.name = 'foo'; @@ -289,12 +391,12 @@ api.AttributeDefinition buildAttributeDefinition() { void checkAttributeDefinition(api.AttributeDefinition o) { buildCounterAttributeDefinition++; if (buildCounterAttributeDefinition < 3) { - checkUnnamed5(o.allowedValues!); + checkUnnamed6(o.allowedValues!); unittest.expect( o.category!, unittest.equals('foo'), ); - checkUnnamed6(o.consentDefaultValues!); + checkUnnamed7(o.consentDefaultValues!); unittest.expect( o.dataMappingDefaultValue!, unittest.equals('foo'), @@ -311,12 +413,12 @@ void checkAttributeDefinition(api.AttributeDefinition o) { buildCounterAttributeDefinition--; } -core.List buildUnnamed7() => [ +core.List buildUnnamed8() => [ buildAuditLogConfig(), buildAuditLogConfig(), ]; -void checkUnnamed7(core.List o) { +void checkUnnamed8(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAuditLogConfig(o[0]); checkAuditLogConfig(o[1]); @@ -327,7 +429,7 @@ api.AuditConfig buildAuditConfig() { final o = api.AuditConfig(); buildCounterAuditConfig++; if (buildCounterAuditConfig < 3) { - o.auditLogConfigs = buildUnnamed7(); + o.auditLogConfigs = buildUnnamed8(); o.service = 'foo'; } buildCounterAuditConfig--; @@ -337,7 +439,7 @@ api.AuditConfig buildAuditConfig() { void checkAuditConfig(api.AuditConfig o) { buildCounterAuditConfig++; if (buildCounterAuditConfig < 3) { - checkUnnamed7(o.auditLogConfigs!); + checkUnnamed8(o.auditLogConfigs!); unittest.expect( o.service!, unittest.equals('foo'), @@ -346,12 +448,12 @@ void checkAuditConfig(api.AuditConfig o) { buildCounterAuditConfig--; } -core.List buildUnnamed8() => [ +core.List buildUnnamed9() => [ 'foo', 'foo', ]; -void checkUnnamed8(core.List o) { +void checkUnnamed9(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -368,7 +470,7 @@ api.AuditLogConfig buildAuditLogConfig() { final o = api.AuditLogConfig(); buildCounterAuditLogConfig++; if (buildCounterAuditLogConfig < 3) { - o.exemptedMembers = buildUnnamed8(); + o.exemptedMembers = buildUnnamed9(); o.logType = 'foo'; } buildCounterAuditLogConfig--; @@ -378,7 +480,7 @@ api.AuditLogConfig buildAuditLogConfig() { void checkAuditLogConfig(api.AuditLogConfig o) { buildCounterAuditLogConfig++; if (buildCounterAuditLogConfig < 3) { - checkUnnamed8(o.exemptedMembers!); + checkUnnamed9(o.exemptedMembers!); unittest.expect( o.logType!, unittest.equals('foo'), @@ -387,12 +489,12 @@ void checkAuditLogConfig(api.AuditLogConfig o) { buildCounterAuditLogConfig--; } -core.List buildUnnamed9() => [ +core.List buildUnnamed10() => [ 'foo', 'foo', ]; -void checkUnnamed9(core.List o) { +void checkUnnamed10(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -410,7 +512,7 @@ api.Binding buildBinding() { buildCounterBinding++; if (buildCounterBinding < 3) { o.condition = buildExpr(); - o.members = buildUnnamed9(); + o.members = buildUnnamed10(); o.role = 'foo'; } buildCounterBinding--; @@ -421,7 +523,7 @@ void checkBinding(api.Binding o) { buildCounterBinding++; if (buildCounterBinding < 3) { checkExpr(o.condition!); - checkUnnamed9(o.members!); + checkUnnamed10(o.members!); unittest.expect( o.role!, unittest.equals('foo'), @@ -521,12 +623,12 @@ void checkCharacterMaskConfig(api.CharacterMaskConfig o) { buildCounterCharacterMaskConfig--; } -core.Map buildUnnamed10() => { +core.Map buildUnnamed11() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed10(core.Map o) { +void checkUnnamed11(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -545,7 +647,7 @@ api.CheckDataAccessRequest buildCheckDataAccessRequest() { if (buildCounterCheckDataAccessRequest < 3) { o.consentList = buildConsentList(); o.dataId = 'foo'; - o.requestAttributes = buildUnnamed10(); + o.requestAttributes = buildUnnamed11(); o.responseView = 'foo'; } buildCounterCheckDataAccessRequest--; @@ -560,7 +662,7 @@ void checkCheckDataAccessRequest(api.CheckDataAccessRequest o) { o.dataId!, unittest.equals('foo'), ); - checkUnnamed10(o.requestAttributes!); + checkUnnamed11(o.requestAttributes!); unittest.expect( o.responseView!, unittest.equals('foo'), @@ -569,12 +671,12 @@ void checkCheckDataAccessRequest(api.CheckDataAccessRequest o) { buildCounterCheckDataAccessRequest--; } -core.Map buildUnnamed11() => { +core.Map buildUnnamed12() => { 'x': buildConsentEvaluation(), 'y': buildConsentEvaluation(), }; -void checkUnnamed11(core.Map o) { +void checkUnnamed12(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkConsentEvaluation(o['x']!); checkConsentEvaluation(o['y']!); @@ -585,7 +687,7 @@ api.CheckDataAccessResponse buildCheckDataAccessResponse() { final o = api.CheckDataAccessResponse(); buildCounterCheckDataAccessResponse++; if (buildCounterCheckDataAccessResponse < 3) { - o.consentDetails = buildUnnamed11(); + o.consentDetails = buildUnnamed12(); o.consented = true; } buildCounterCheckDataAccessResponse--; @@ -595,18 +697,18 @@ api.CheckDataAccessResponse buildCheckDataAccessResponse() { void checkCheckDataAccessResponse(api.CheckDataAccessResponse o) { buildCounterCheckDataAccessResponse++; if (buildCounterCheckDataAccessResponse < 3) { - checkUnnamed11(o.consentDetails!); + checkUnnamed12(o.consentDetails!); unittest.expect(o.consented!, unittest.isTrue); } buildCounterCheckDataAccessResponse--; } -core.Map buildUnnamed12() => { +core.Map buildUnnamed13() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed12(core.Map o) { +void checkUnnamed13(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -618,12 +720,12 @@ void checkUnnamed12(core.Map o) { ); } -core.List buildUnnamed13() => [ +core.List buildUnnamed14() => [ buildGoogleCloudHealthcareV1ConsentPolicy(), buildGoogleCloudHealthcareV1ConsentPolicy(), ]; -void checkUnnamed13(core.List o) { +void checkUnnamed14(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudHealthcareV1ConsentPolicy(o[0]); checkGoogleCloudHealthcareV1ConsentPolicy(o[1]); @@ -636,9 +738,9 @@ api.Consent buildConsent() { if (buildCounterConsent < 3) { o.consentArtifact = 'foo'; o.expireTime = 'foo'; - o.metadata = buildUnnamed12(); + o.metadata = buildUnnamed13(); o.name = 'foo'; - o.policies = buildUnnamed13(); + o.policies = buildUnnamed14(); o.revisionCreateTime = 'foo'; o.revisionId = 'foo'; o.state = 'foo'; @@ -660,12 +762,12 @@ void checkConsent(api.Consent o) { o.expireTime!, unittest.equals('foo'), ); - checkUnnamed12(o.metadata!); + checkUnnamed13(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed13(o.policies!); + checkUnnamed14(o.policies!); unittest.expect( o.revisionCreateTime!, unittest.equals('foo'), @@ -690,23 +792,55 @@ void checkConsent(api.Consent o) { buildCounterConsent--; } -core.List buildUnnamed14() => [ +core.int buildCounterConsentAccessorScope = 0; +api.ConsentAccessorScope buildConsentAccessorScope() { + final o = api.ConsentAccessorScope(); + buildCounterConsentAccessorScope++; + if (buildCounterConsentAccessorScope < 3) { + o.actor = 'foo'; + o.environment = 'foo'; + o.purpose = 'foo'; + } + buildCounterConsentAccessorScope--; + return o; +} + +void checkConsentAccessorScope(api.ConsentAccessorScope o) { + buildCounterConsentAccessorScope++; + if (buildCounterConsentAccessorScope < 3) { + unittest.expect( + o.actor!, + unittest.equals('foo'), + ); + unittest.expect( + o.environment!, + unittest.equals('foo'), + ); + unittest.expect( + o.purpose!, + unittest.equals('foo'), + ); + } + buildCounterConsentAccessorScope--; +} + +core.List buildUnnamed15() => [ buildImage(), buildImage(), ]; -void checkUnnamed14(core.List o) { +void checkUnnamed15(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkImage(o[0]); checkImage(o[1]); } -core.Map buildUnnamed15() => { +core.Map buildUnnamed16() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed15(core.Map o) { +void checkUnnamed16(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -723,10 +857,10 @@ api.ConsentArtifact buildConsentArtifact() { final o = api.ConsentArtifact(); buildCounterConsentArtifact++; if (buildCounterConsentArtifact < 3) { - o.consentContentScreenshots = buildUnnamed14(); + o.consentContentScreenshots = buildUnnamed15(); o.consentContentVersion = 'foo'; o.guardianSignature = buildSignature(); - o.metadata = buildUnnamed15(); + o.metadata = buildUnnamed16(); o.name = 'foo'; o.userId = 'foo'; o.userSignature = buildSignature(); @@ -739,13 +873,13 @@ api.ConsentArtifact buildConsentArtifact() { void checkConsentArtifact(api.ConsentArtifact o) { buildCounterConsentArtifact++; if (buildCounterConsentArtifact < 3) { - checkUnnamed14(o.consentContentScreenshots!); + checkUnnamed15(o.consentContentScreenshots!); unittest.expect( o.consentContentVersion!, unittest.equals('foo'), ); checkSignature(o.guardianSignature!); - checkUnnamed15(o.metadata!); + checkUnnamed16(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), @@ -760,6 +894,53 @@ void checkConsentArtifact(api.ConsentArtifact o) { buildCounterConsentArtifact--; } +core.List buildUnnamed17() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed17(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterConsentConfig = 0; +api.ConsentConfig buildConsentConfig() { + final o = api.ConsentConfig(); + buildCounterConsentConfig++; + if (buildCounterConsentConfig < 3) { + o.accessDeterminationLogConfig = buildAccessDeterminationLogConfig(); + o.accessEnforced = true; + o.consentHeaderHandling = buildConsentHeaderHandling(); + o.enforcedAdminConsents = buildUnnamed17(); + o.version = 'foo'; + } + buildCounterConsentConfig--; + return o; +} + +void checkConsentConfig(api.ConsentConfig o) { + buildCounterConsentConfig++; + if (buildCounterConsentConfig < 3) { + checkAccessDeterminationLogConfig(o.accessDeterminationLogConfig!); + unittest.expect(o.accessEnforced!, unittest.isTrue); + checkConsentHeaderHandling(o.consentHeaderHandling!); + checkUnnamed17(o.enforcedAdminConsents!); + unittest.expect( + o.version!, + unittest.equals('foo'), + ); + } + buildCounterConsentConfig--; +} + core.int buildCounterConsentEvaluation = 0; api.ConsentEvaluation buildConsentEvaluation() { final o = api.ConsentEvaluation(); @@ -782,12 +963,34 @@ void checkConsentEvaluation(api.ConsentEvaluation o) { buildCounterConsentEvaluation--; } -core.List buildUnnamed16() => [ +core.int buildCounterConsentHeaderHandling = 0; +api.ConsentHeaderHandling buildConsentHeaderHandling() { + final o = api.ConsentHeaderHandling(); + buildCounterConsentHeaderHandling++; + if (buildCounterConsentHeaderHandling < 3) { + o.profile = 'foo'; + } + buildCounterConsentHeaderHandling--; + return o; +} + +void checkConsentHeaderHandling(api.ConsentHeaderHandling o) { + buildCounterConsentHeaderHandling++; + if (buildCounterConsentHeaderHandling < 3) { + unittest.expect( + o.profile!, + unittest.equals('foo'), + ); + } + buildCounterConsentHeaderHandling--; +} + +core.List buildUnnamed18() => [ 'foo', 'foo', ]; -void checkUnnamed16(core.List o) { +void checkUnnamed18(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -804,7 +1007,7 @@ api.ConsentList buildConsentList() { final o = api.ConsentList(); buildCounterConsentList++; if (buildCounterConsentList < 3) { - o.consents = buildUnnamed16(); + o.consents = buildUnnamed18(); } buildCounterConsentList--; return o; @@ -813,17 +1016,17 @@ api.ConsentList buildConsentList() { void checkConsentList(api.ConsentList o) { buildCounterConsentList++; if (buildCounterConsentList < 3) { - checkUnnamed16(o.consents!); + checkUnnamed18(o.consents!); } buildCounterConsentList--; } -core.Map buildUnnamed17() => { +core.Map buildUnnamed19() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed17(core.Map o) { +void checkUnnamed19(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -842,7 +1045,7 @@ api.ConsentStore buildConsentStore() { if (buildCounterConsentStore < 3) { o.defaultConsentTtl = 'foo'; o.enableConsentCreateOnUpdate = true; - o.labels = buildUnnamed17(); + o.labels = buildUnnamed19(); o.name = 'foo'; } buildCounterConsentStore--; @@ -857,7 +1060,7 @@ void checkConsentStore(api.ConsentStore o) { unittest.equals('foo'), ); unittest.expect(o.enableConsentCreateOnUpdate!, unittest.isTrue); - checkUnnamed17(o.labels!); + checkUnnamed19(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -1156,12 +1359,12 @@ void checkDicomFilterConfig(api.DicomFilterConfig o) { buildCounterDicomFilterConfig--; } -core.Map buildUnnamed18() => { +core.Map buildUnnamed20() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed18(core.Map o) { +void checkUnnamed20(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1173,12 +1376,12 @@ void checkUnnamed18(core.Map o) { ); } -core.List buildUnnamed19() => [ +core.List buildUnnamed21() => [ buildGoogleCloudHealthcareV1DicomStreamConfig(), buildGoogleCloudHealthcareV1DicomStreamConfig(), ]; -void checkUnnamed19(core.List o) { +void checkUnnamed21(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudHealthcareV1DicomStreamConfig(o[0]); checkGoogleCloudHealthcareV1DicomStreamConfig(o[1]); @@ -1189,10 +1392,10 @@ api.DicomStore buildDicomStore() { final o = api.DicomStore(); buildCounterDicomStore++; if (buildCounterDicomStore < 3) { - o.labels = buildUnnamed18(); + o.labels = buildUnnamed20(); o.name = 'foo'; o.notificationConfig = buildNotificationConfig(); - o.streamConfigs = buildUnnamed19(); + o.streamConfigs = buildUnnamed21(); } buildCounterDicomStore--; return o; @@ -1201,13 +1404,13 @@ api.DicomStore buildDicomStore() { void checkDicomStore(api.DicomStore o) { buildCounterDicomStore++; if (buildCounterDicomStore < 3) { - checkUnnamed18(o.labels!); + checkUnnamed20(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), ); checkNotificationConfig(o.notificationConfig!); - checkUnnamed19(o.streamConfigs!); + checkUnnamed21(o.streamConfigs!); } buildCounterDicomStore--; } @@ -1296,12 +1499,12 @@ void checkEncryptionSpec(api.EncryptionSpec o) { buildCounterEncryptionSpec--; } -core.List buildUnnamed20() => [ +core.List buildUnnamed22() => [ 'foo', 'foo', ]; -void checkUnnamed20(core.List o) { +void checkUnnamed22(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1320,7 +1523,7 @@ api.Entity buildEntity() { if (buildCounterEntity < 3) { o.entityId = 'foo'; o.preferredTerm = 'foo'; - o.vocabularyCodes = buildUnnamed20(); + o.vocabularyCodes = buildUnnamed22(); } buildCounterEntity--; return o; @@ -1337,17 +1540,17 @@ void checkEntity(api.Entity o) { o.preferredTerm!, unittest.equals('foo'), ); - checkUnnamed20(o.vocabularyCodes!); + checkUnnamed22(o.vocabularyCodes!); } buildCounterEntity--; } -core.List buildUnnamed21() => [ +core.List buildUnnamed23() => [ buildLinkedEntity(), buildLinkedEntity(), ]; -void checkUnnamed21(core.List o) { +void checkUnnamed23(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLinkedEntity(o[0]); checkLinkedEntity(o[1]); @@ -1360,7 +1563,7 @@ api.EntityMention buildEntityMention() { if (buildCounterEntityMention < 3) { o.certaintyAssessment = buildFeature(); o.confidence = 42.0; - o.linkedEntities = buildUnnamed21(); + o.linkedEntities = buildUnnamed23(); o.mentionId = 'foo'; o.subject = buildFeature(); o.temporalAssessment = buildFeature(); @@ -1379,7 +1582,7 @@ void checkEntityMention(api.EntityMention o) { o.confidence!, unittest.equals(42.0), ); - checkUnnamed21(o.linkedEntities!); + checkUnnamed23(o.linkedEntities!); unittest.expect( o.mentionId!, unittest.equals('foo'), @@ -1427,12 +1630,12 @@ void checkEntityMentionRelationship(api.EntityMentionRelationship o) { buildCounterEntityMentionRelationship--; } -core.Map buildUnnamed22() => { +core.Map buildUnnamed24() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed22(core.Map o) { +void checkUnnamed24(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1444,12 +1647,12 @@ void checkUnnamed22(core.Map o) { ); } -core.Map buildUnnamed23() => { +core.Map buildUnnamed25() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed23(core.Map o) { +void checkUnnamed25(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1469,8 +1672,8 @@ api.EvaluateUserConsentsRequest buildEvaluateUserConsentsRequest() { o.consentList = buildConsentList(); o.pageSize = 42; o.pageToken = 'foo'; - o.requestAttributes = buildUnnamed22(); - o.resourceAttributes = buildUnnamed23(); + o.requestAttributes = buildUnnamed24(); + o.resourceAttributes = buildUnnamed25(); o.responseView = 'foo'; o.userId = 'foo'; } @@ -1490,8 +1693,8 @@ void checkEvaluateUserConsentsRequest(api.EvaluateUserConsentsRequest o) { o.pageToken!, unittest.equals('foo'), ); - checkUnnamed22(o.requestAttributes!); - checkUnnamed23(o.resourceAttributes!); + checkUnnamed24(o.requestAttributes!); + checkUnnamed25(o.resourceAttributes!); unittest.expect( o.responseView!, unittest.equals('foo'), @@ -1504,12 +1707,12 @@ void checkEvaluateUserConsentsRequest(api.EvaluateUserConsentsRequest o) { buildCounterEvaluateUserConsentsRequest--; } -core.List buildUnnamed24() => [ +core.List buildUnnamed26() => [ buildResult(), buildResult(), ]; -void checkUnnamed24(core.List o) { +void checkUnnamed26(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkResult(o[0]); checkResult(o[1]); @@ -1521,7 +1724,7 @@ api.EvaluateUserConsentsResponse buildEvaluateUserConsentsResponse() { buildCounterEvaluateUserConsentsResponse++; if (buildCounterEvaluateUserConsentsResponse < 3) { o.nextPageToken = 'foo'; - o.results = buildUnnamed24(); + o.results = buildUnnamed26(); } buildCounterEvaluateUserConsentsResponse--; return o; @@ -1534,11 +1737,184 @@ void checkEvaluateUserConsentsResponse(api.EvaluateUserConsentsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed24(o.results!); + checkUnnamed26(o.results!); } buildCounterEvaluateUserConsentsResponse--; } +core.List buildUnnamed27() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed27(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.List buildUnnamed28() => [ + buildConsentAccessorScope(), + buildConsentAccessorScope(), + ]; + +void checkUnnamed28(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkConsentAccessorScope(o[0]); + checkConsentAccessorScope(o[1]); +} + +core.List buildUnnamed29() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed29(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterExplainDataAccessConsentInfo = 0; +api.ExplainDataAccessConsentInfo buildExplainDataAccessConsentInfo() { + final o = api.ExplainDataAccessConsentInfo(); + buildCounterExplainDataAccessConsentInfo++; + if (buildCounterExplainDataAccessConsentInfo < 3) { + o.cascadeOrigins = buildUnnamed27(); + o.consentResource = 'foo'; + o.enforcementTime = 'foo'; + o.matchingAccessorScopes = buildUnnamed28(); + o.patientConsentOwner = 'foo'; + o.type = 'foo'; + o.variants = buildUnnamed29(); + } + buildCounterExplainDataAccessConsentInfo--; + return o; +} + +void checkExplainDataAccessConsentInfo(api.ExplainDataAccessConsentInfo o) { + buildCounterExplainDataAccessConsentInfo++; + if (buildCounterExplainDataAccessConsentInfo < 3) { + checkUnnamed27(o.cascadeOrigins!); + unittest.expect( + o.consentResource!, + unittest.equals('foo'), + ); + unittest.expect( + o.enforcementTime!, + unittest.equals('foo'), + ); + checkUnnamed28(o.matchingAccessorScopes!); + unittest.expect( + o.patientConsentOwner!, + unittest.equals('foo'), + ); + unittest.expect( + o.type!, + unittest.equals('foo'), + ); + checkUnnamed29(o.variants!); + } + buildCounterExplainDataAccessConsentInfo--; +} + +core.List buildUnnamed30() => [ + buildExplainDataAccessConsentInfo(), + buildExplainDataAccessConsentInfo(), + ]; + +void checkUnnamed30(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkExplainDataAccessConsentInfo(o[0]); + checkExplainDataAccessConsentInfo(o[1]); +} + +core.List buildUnnamed31() => [ + buildExplainDataAccessConsentScope(), + buildExplainDataAccessConsentScope(), + ]; + +void checkUnnamed31(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkExplainDataAccessConsentScope(o[0]); + checkExplainDataAccessConsentScope(o[1]); +} + +core.int buildCounterExplainDataAccessConsentScope = 0; +api.ExplainDataAccessConsentScope buildExplainDataAccessConsentScope() { + final o = api.ExplainDataAccessConsentScope(); + buildCounterExplainDataAccessConsentScope++; + if (buildCounterExplainDataAccessConsentScope < 3) { + o.accessorScope = buildConsentAccessorScope(); + o.decision = 'foo'; + o.enforcingConsents = buildUnnamed30(); + o.exceptions = buildUnnamed31(); + } + buildCounterExplainDataAccessConsentScope--; + return o; +} + +void checkExplainDataAccessConsentScope(api.ExplainDataAccessConsentScope o) { + buildCounterExplainDataAccessConsentScope++; + if (buildCounterExplainDataAccessConsentScope < 3) { + checkConsentAccessorScope(o.accessorScope!); + unittest.expect( + o.decision!, + unittest.equals('foo'), + ); + checkUnnamed30(o.enforcingConsents!); + checkUnnamed31(o.exceptions!); + } + buildCounterExplainDataAccessConsentScope--; +} + +core.List buildUnnamed32() => [ + buildExplainDataAccessConsentScope(), + buildExplainDataAccessConsentScope(), + ]; + +void checkUnnamed32(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkExplainDataAccessConsentScope(o[0]); + checkExplainDataAccessConsentScope(o[1]); +} + +core.int buildCounterExplainDataAccessResponse = 0; +api.ExplainDataAccessResponse buildExplainDataAccessResponse() { + final o = api.ExplainDataAccessResponse(); + buildCounterExplainDataAccessResponse++; + if (buildCounterExplainDataAccessResponse < 3) { + o.consentScopes = buildUnnamed32(); + o.warning = 'foo'; + } + buildCounterExplainDataAccessResponse--; + return o; +} + +void checkExplainDataAccessResponse(api.ExplainDataAccessResponse o) { + buildCounterExplainDataAccessResponse++; + if (buildCounterExplainDataAccessResponse < 3) { + checkUnnamed32(o.consentScopes!); + unittest.expect( + o.warning!, + unittest.equals('foo'), + ); + } + buildCounterExplainDataAccessResponse--; +} + core.int buildCounterExportDicomDataRequest = 0; api.ExportDicomDataRequest buildExportDicomDataRequest() { final o = api.ExportDicomDataRequest(); @@ -1694,12 +2070,12 @@ void checkFeature(api.Feature o) { buildCounterFeature--; } -core.List buildUnnamed25() => [ +core.List buildUnnamed33() => [ buildFieldMetadata(), buildFieldMetadata(), ]; -void checkUnnamed25(core.List o) { +void checkUnnamed33(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFieldMetadata(o[0]); checkFieldMetadata(o[1]); @@ -1711,7 +2087,7 @@ api.FhirConfig buildFhirConfig() { buildCounterFhirConfig++; if (buildCounterFhirConfig < 3) { o.defaultKeepExtensions = true; - o.fieldMetadataList = buildUnnamed25(); + o.fieldMetadataList = buildUnnamed33(); } buildCounterFhirConfig--; return o; @@ -1721,7 +2097,7 @@ void checkFhirConfig(api.FhirConfig o) { buildCounterFhirConfig++; if (buildCounterFhirConfig < 3) { unittest.expect(o.defaultKeepExtensions!, unittest.isTrue); - checkUnnamed25(o.fieldMetadataList!); + checkUnnamed33(o.fieldMetadataList!); } buildCounterFhirConfig--; } @@ -1771,12 +2147,12 @@ void checkFhirNotificationConfig(api.FhirNotificationConfig o) { buildCounterFhirNotificationConfig--; } -core.Map buildUnnamed26() => { +core.Map buildUnnamed34() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed26(core.Map o) { +void checkUnnamed34(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1788,23 +2164,23 @@ void checkUnnamed26(core.Map o) { ); } -core.List buildUnnamed27() => [ +core.List buildUnnamed35() => [ buildFhirNotificationConfig(), buildFhirNotificationConfig(), ]; -void checkUnnamed27(core.List o) { +void checkUnnamed35(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFhirNotificationConfig(o[0]); checkFhirNotificationConfig(o[1]); } -core.List buildUnnamed28() => [ +core.List buildUnnamed36() => [ buildStreamConfig(), buildStreamConfig(), ]; -void checkUnnamed28(core.List o) { +void checkUnnamed36(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStreamConfig(o[0]); checkStreamConfig(o[1]); @@ -1816,15 +2192,16 @@ api.FhirStore buildFhirStore() { buildCounterFhirStore++; if (buildCounterFhirStore < 3) { o.complexDataTypeReferenceParsing = 'foo'; + o.consentConfig = buildConsentConfig(); o.defaultSearchHandlingStrict = true; o.disableReferentialIntegrity = true; o.disableResourceVersioning = true; o.enableUpdateCreate = true; - o.labels = buildUnnamed26(); + o.labels = buildUnnamed34(); o.name = 'foo'; o.notificationConfig = buildNotificationConfig(); - o.notificationConfigs = buildUnnamed27(); - o.streamConfigs = buildUnnamed28(); + o.notificationConfigs = buildUnnamed35(); + o.streamConfigs = buildUnnamed36(); o.validationConfig = buildValidationConfig(); o.version = 'foo'; } @@ -1839,18 +2216,19 @@ void checkFhirStore(api.FhirStore o) { o.complexDataTypeReferenceParsing!, unittest.equals('foo'), ); + checkConsentConfig(o.consentConfig!); unittest.expect(o.defaultSearchHandlingStrict!, unittest.isTrue); unittest.expect(o.disableReferentialIntegrity!, unittest.isTrue); unittest.expect(o.disableResourceVersioning!, unittest.isTrue); unittest.expect(o.enableUpdateCreate!, unittest.isTrue); - checkUnnamed26(o.labels!); + checkUnnamed34(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), ); checkNotificationConfig(o.notificationConfig!); - checkUnnamed27(o.notificationConfigs!); - checkUnnamed28(o.streamConfigs!); + checkUnnamed35(o.notificationConfigs!); + checkUnnamed36(o.streamConfigs!); checkValidationConfig(o.validationConfig!); unittest.expect( o.version!, @@ -1892,12 +2270,12 @@ void checkFhirStoreMetric(api.FhirStoreMetric o) { buildCounterFhirStoreMetric--; } -core.List buildUnnamed29() => [ +core.List buildUnnamed37() => [ buildFhirStoreMetric(), buildFhirStoreMetric(), ]; -void checkUnnamed29(core.List o) { +void checkUnnamed37(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFhirStoreMetric(o[0]); checkFhirStoreMetric(o[1]); @@ -1908,7 +2286,7 @@ api.FhirStoreMetrics buildFhirStoreMetrics() { final o = api.FhirStoreMetrics(); buildCounterFhirStoreMetrics++; if (buildCounterFhirStoreMetrics < 3) { - o.metrics = buildUnnamed29(); + o.metrics = buildUnnamed37(); o.name = 'foo'; } buildCounterFhirStoreMetrics--; @@ -1918,7 +2296,7 @@ api.FhirStoreMetrics buildFhirStoreMetrics() { void checkFhirStoreMetrics(api.FhirStoreMetrics o) { buildCounterFhirStoreMetrics++; if (buildCounterFhirStoreMetrics < 3) { - checkUnnamed29(o.metrics!); + checkUnnamed37(o.metrics!); unittest.expect( o.name!, unittest.equals('foo'), @@ -1969,12 +2347,12 @@ void checkField(api.Field o) { buildCounterField--; } -core.List buildUnnamed30() => [ +core.List buildUnnamed38() => [ 'foo', 'foo', ]; -void checkUnnamed30(core.List o) { +void checkUnnamed38(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1992,7 +2370,7 @@ api.FieldMetadata buildFieldMetadata() { buildCounterFieldMetadata++; if (buildCounterFieldMetadata < 3) { o.action = 'foo'; - o.paths = buildUnnamed30(); + o.paths = buildUnnamed38(); } buildCounterFieldMetadata--; return o; @@ -2005,7 +2383,7 @@ void checkFieldMetadata(api.FieldMetadata o) { o.action!, unittest.equals('foo'), ); - checkUnnamed30(o.paths!); + checkUnnamed38(o.paths!); } buildCounterFieldMetadata--; } @@ -2088,12 +2466,12 @@ void checkGoogleCloudHealthcareV1ConsentGcsDestination( buildCounterGoogleCloudHealthcareV1ConsentGcsDestination--; } -core.List buildUnnamed31() => [ +core.List buildUnnamed39() => [ buildAttribute(), buildAttribute(), ]; -void checkUnnamed31(core.List o) { +void checkUnnamed39(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAttribute(o[0]); checkAttribute(o[1]); @@ -2106,7 +2484,7 @@ api.GoogleCloudHealthcareV1ConsentPolicy buildCounterGoogleCloudHealthcareV1ConsentPolicy++; if (buildCounterGoogleCloudHealthcareV1ConsentPolicy < 3) { o.authorizationRule = buildExpr(); - o.resourceAttributes = buildUnnamed31(); + o.resourceAttributes = buildUnnamed39(); } buildCounterGoogleCloudHealthcareV1ConsentPolicy--; return o; @@ -2117,7 +2495,7 @@ void checkGoogleCloudHealthcareV1ConsentPolicy( buildCounterGoogleCloudHealthcareV1ConsentPolicy++; if (buildCounterGoogleCloudHealthcareV1ConsentPolicy < 3) { checkExpr(o.authorizationRule!); - checkUnnamed31(o.resourceAttributes!); + checkUnnamed39(o.resourceAttributes!); } buildCounterGoogleCloudHealthcareV1ConsentPolicy--; } @@ -2331,23 +2709,23 @@ void checkGroupOrSegment(api.GroupOrSegment o) { buildCounterGroupOrSegment--; } -core.Map buildUnnamed32() => { +core.Map buildUnnamed40() => { 'x': buildSchemaGroup(), 'y': buildSchemaGroup(), }; -void checkUnnamed32(core.Map o) { +void checkUnnamed40(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkSchemaGroup(o['x']!); checkSchemaGroup(o['y']!); } -core.List buildUnnamed33() => [ +core.List buildUnnamed41() => [ buildVersionSource(), buildVersionSource(), ]; -void checkUnnamed33(core.List o) { +void checkUnnamed41(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkVersionSource(o[0]); checkVersionSource(o[1]); @@ -2358,8 +2736,8 @@ api.Hl7SchemaConfig buildHl7SchemaConfig() { final o = api.Hl7SchemaConfig(); buildCounterHl7SchemaConfig++; if (buildCounterHl7SchemaConfig < 3) { - o.messageSchemaConfigs = buildUnnamed32(); - o.version = buildUnnamed33(); + o.messageSchemaConfigs = buildUnnamed40(); + o.version = buildUnnamed41(); } buildCounterHl7SchemaConfig--; return o; @@ -2368,29 +2746,29 @@ api.Hl7SchemaConfig buildHl7SchemaConfig() { void checkHl7SchemaConfig(api.Hl7SchemaConfig o) { buildCounterHl7SchemaConfig++; if (buildCounterHl7SchemaConfig < 3) { - checkUnnamed32(o.messageSchemaConfigs!); - checkUnnamed33(o.version!); + checkUnnamed40(o.messageSchemaConfigs!); + checkUnnamed41(o.version!); } buildCounterHl7SchemaConfig--; } -core.List buildUnnamed34() => [ +core.List buildUnnamed42() => [ buildType(), buildType(), ]; -void checkUnnamed34(core.List o) { +void checkUnnamed42(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkType(o[0]); checkType(o[1]); } -core.List buildUnnamed35() => [ +core.List buildUnnamed43() => [ buildVersionSource(), buildVersionSource(), ]; -void checkUnnamed35(core.List o) { +void checkUnnamed43(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkVersionSource(o[0]); checkVersionSource(o[1]); @@ -2401,8 +2779,8 @@ api.Hl7TypesConfig buildHl7TypesConfig() { final o = api.Hl7TypesConfig(); buildCounterHl7TypesConfig++; if (buildCounterHl7TypesConfig < 3) { - o.type = buildUnnamed34(); - o.version = buildUnnamed35(); + o.type = buildUnnamed42(); + o.version = buildUnnamed43(); } buildCounterHl7TypesConfig--; return o; @@ -2411,8 +2789,8 @@ api.Hl7TypesConfig buildHl7TypesConfig() { void checkHl7TypesConfig(api.Hl7TypesConfig o) { buildCounterHl7TypesConfig++; if (buildCounterHl7TypesConfig < 3) { - checkUnnamed34(o.type!); - checkUnnamed35(o.version!); + checkUnnamed42(o.type!); + checkUnnamed43(o.version!); } buildCounterHl7TypesConfig--; } @@ -2444,12 +2822,12 @@ void checkHl7V2NotificationConfig(api.Hl7V2NotificationConfig o) { buildCounterHl7V2NotificationConfig--; } -core.Map buildUnnamed36() => { +core.Map buildUnnamed44() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed36(core.Map o) { +void checkUnnamed44(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2461,12 +2839,12 @@ void checkUnnamed36(core.Map o) { ); } -core.List buildUnnamed37() => [ +core.List buildUnnamed45() => [ buildHl7V2NotificationConfig(), buildHl7V2NotificationConfig(), ]; -void checkUnnamed37(core.List o) { +void checkUnnamed45(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHl7V2NotificationConfig(o[0]); checkHl7V2NotificationConfig(o[1]); @@ -2477,9 +2855,9 @@ api.Hl7V2Store buildHl7V2Store() { final o = api.Hl7V2Store(); buildCounterHl7V2Store++; if (buildCounterHl7V2Store < 3) { - o.labels = buildUnnamed36(); + o.labels = buildUnnamed44(); o.name = 'foo'; - o.notificationConfigs = buildUnnamed37(); + o.notificationConfigs = buildUnnamed45(); o.parserConfig = buildParserConfig(); o.rejectDuplicateMessage = true; } @@ -2490,12 +2868,12 @@ api.Hl7V2Store buildHl7V2Store() { void checkHl7V2Store(api.Hl7V2Store o) { buildCounterHl7V2Store++; if (buildCounterHl7V2Store < 3) { - checkUnnamed36(o.labels!); + checkUnnamed44(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed37(o.notificationConfigs!); + checkUnnamed45(o.notificationConfigs!); checkParserConfig(o.parserConfig!); unittest.expect(o.rejectDuplicateMessage!, unittest.isTrue); } @@ -2534,12 +2912,12 @@ void checkHl7V2StoreMetric(api.Hl7V2StoreMetric o) { buildCounterHl7V2StoreMetric--; } -core.List buildUnnamed38() => [ +core.List buildUnnamed46() => [ buildHl7V2StoreMetric(), buildHl7V2StoreMetric(), ]; -void checkUnnamed38(core.List o) { +void checkUnnamed46(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHl7V2StoreMetric(o[0]); checkHl7V2StoreMetric(o[1]); @@ -2550,7 +2928,7 @@ api.Hl7V2StoreMetrics buildHl7V2StoreMetrics() { final o = api.Hl7V2StoreMetrics(); buildCounterHl7V2StoreMetrics++; if (buildCounterHl7V2StoreMetrics < 3) { - o.metrics = buildUnnamed38(); + o.metrics = buildUnnamed46(); o.name = 'foo'; } buildCounterHl7V2StoreMetrics--; @@ -2560,7 +2938,7 @@ api.Hl7V2StoreMetrics buildHl7V2StoreMetrics() { void checkHl7V2StoreMetrics(api.Hl7V2StoreMetrics o) { buildCounterHl7V2StoreMetrics++; if (buildCounterHl7V2StoreMetrics < 3) { - checkUnnamed38(o.metrics!); + checkUnnamed46(o.metrics!); unittest.expect( o.name!, unittest.equals('foo'), @@ -2569,7 +2947,7 @@ void checkHl7V2StoreMetrics(api.Hl7V2StoreMetrics o) { buildCounterHl7V2StoreMetrics--; } -core.Map buildUnnamed39() => { +core.Map buildUnnamed47() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -2582,7 +2960,7 @@ core.Map buildUnnamed39() => { }, }; -void checkUnnamed39(core.Map o) { +void checkUnnamed47(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted1 = (o['x']!) as core.Map; unittest.expect(casted1, unittest.hasLength(3)); @@ -2614,15 +2992,15 @@ void checkUnnamed39(core.Map o) { ); } -core.List> buildUnnamed40() => [ - buildUnnamed39(), - buildUnnamed39(), +core.List> buildUnnamed48() => [ + buildUnnamed47(), + buildUnnamed47(), ]; -void checkUnnamed40(core.List> o) { +void checkUnnamed48(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed39(o[0]); - checkUnnamed39(o[1]); + checkUnnamed47(o[0]); + checkUnnamed47(o[1]); } core.int buildCounterHttpBody = 0; @@ -2632,7 +3010,7 @@ api.HttpBody buildHttpBody() { if (buildCounterHttpBody < 3) { o.contentType = 'foo'; o.data = 'foo'; - o.extensions = buildUnnamed40(); + o.extensions = buildUnnamed48(); } buildCounterHttpBody--; return o; @@ -2649,7 +3027,7 @@ void checkHttpBody(api.HttpBody o) { o.data!, unittest.equals('foo'), ); - checkUnnamed40(o.extensions!); + checkUnnamed48(o.extensions!); } buildCounterHttpBody--; } @@ -2767,12 +3145,12 @@ void checkImportResourcesRequest(api.ImportResourcesRequest o) { buildCounterImportResourcesRequest--; } -core.List buildUnnamed41() => [ +core.List buildUnnamed49() => [ 'foo', 'foo', ]; -void checkUnnamed41(core.List o) { +void checkUnnamed49(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2792,7 +3170,7 @@ api.InfoTypeTransformation buildInfoTypeTransformation() { o.characterMaskConfig = buildCharacterMaskConfig(); o.cryptoHashConfig = buildCryptoHashConfig(); o.dateShiftConfig = buildDateShiftConfig(); - o.infoTypes = buildUnnamed41(); + o.infoTypes = buildUnnamed49(); o.redactConfig = buildRedactConfig(); o.replaceWithInfoTypeConfig = buildReplaceWithInfoTypeConfig(); } @@ -2806,7 +3184,7 @@ void checkInfoTypeTransformation(api.InfoTypeTransformation o) { checkCharacterMaskConfig(o.characterMaskConfig!); checkCryptoHashConfig(o.cryptoHashConfig!); checkDateShiftConfig(o.dateShiftConfig!); - checkUnnamed41(o.infoTypes!); + checkUnnamed49(o.infoTypes!); checkRedactConfig(o.redactConfig!); checkReplaceWithInfoTypeConfig(o.replaceWithInfoTypeConfig!); } @@ -2905,12 +3283,12 @@ void checkLinkedEntity(api.LinkedEntity o) { buildCounterLinkedEntity--; } -core.List buildUnnamed42() => [ +core.List buildUnnamed50() => [ buildAttributeDefinition(), buildAttributeDefinition(), ]; -void checkUnnamed42(core.List o) { +void checkUnnamed50(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAttributeDefinition(o[0]); checkAttributeDefinition(o[1]); @@ -2921,7 +3299,7 @@ api.ListAttributeDefinitionsResponse buildListAttributeDefinitionsResponse() { final o = api.ListAttributeDefinitionsResponse(); buildCounterListAttributeDefinitionsResponse++; if (buildCounterListAttributeDefinitionsResponse < 3) { - o.attributeDefinitions = buildUnnamed42(); + o.attributeDefinitions = buildUnnamed50(); o.nextPageToken = 'foo'; } buildCounterListAttributeDefinitionsResponse--; @@ -2932,7 +3310,7 @@ void checkListAttributeDefinitionsResponse( api.ListAttributeDefinitionsResponse o) { buildCounterListAttributeDefinitionsResponse++; if (buildCounterListAttributeDefinitionsResponse < 3) { - checkUnnamed42(o.attributeDefinitions!); + checkUnnamed50(o.attributeDefinitions!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -2941,12 +3319,12 @@ void checkListAttributeDefinitionsResponse( buildCounterListAttributeDefinitionsResponse--; } -core.List buildUnnamed43() => [ +core.List buildUnnamed51() => [ buildConsentArtifact(), buildConsentArtifact(), ]; -void checkUnnamed43(core.List o) { +void checkUnnamed51(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkConsentArtifact(o[0]); checkConsentArtifact(o[1]); @@ -2957,7 +3335,7 @@ api.ListConsentArtifactsResponse buildListConsentArtifactsResponse() { final o = api.ListConsentArtifactsResponse(); buildCounterListConsentArtifactsResponse++; if (buildCounterListConsentArtifactsResponse < 3) { - o.consentArtifacts = buildUnnamed43(); + o.consentArtifacts = buildUnnamed51(); o.nextPageToken = 'foo'; } buildCounterListConsentArtifactsResponse--; @@ -2967,7 +3345,7 @@ api.ListConsentArtifactsResponse buildListConsentArtifactsResponse() { void checkListConsentArtifactsResponse(api.ListConsentArtifactsResponse o) { buildCounterListConsentArtifactsResponse++; if (buildCounterListConsentArtifactsResponse < 3) { - checkUnnamed43(o.consentArtifacts!); + checkUnnamed51(o.consentArtifacts!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -2976,12 +3354,12 @@ void checkListConsentArtifactsResponse(api.ListConsentArtifactsResponse o) { buildCounterListConsentArtifactsResponse--; } -core.List buildUnnamed44() => [ +core.List buildUnnamed52() => [ buildConsent(), buildConsent(), ]; -void checkUnnamed44(core.List o) { +void checkUnnamed52(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkConsent(o[0]); checkConsent(o[1]); @@ -2992,7 +3370,7 @@ api.ListConsentRevisionsResponse buildListConsentRevisionsResponse() { final o = api.ListConsentRevisionsResponse(); buildCounterListConsentRevisionsResponse++; if (buildCounterListConsentRevisionsResponse < 3) { - o.consents = buildUnnamed44(); + o.consents = buildUnnamed52(); o.nextPageToken = 'foo'; } buildCounterListConsentRevisionsResponse--; @@ -3002,7 +3380,7 @@ api.ListConsentRevisionsResponse buildListConsentRevisionsResponse() { void checkListConsentRevisionsResponse(api.ListConsentRevisionsResponse o) { buildCounterListConsentRevisionsResponse++; if (buildCounterListConsentRevisionsResponse < 3) { - checkUnnamed44(o.consents!); + checkUnnamed52(o.consents!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -3011,12 +3389,12 @@ void checkListConsentRevisionsResponse(api.ListConsentRevisionsResponse o) { buildCounterListConsentRevisionsResponse--; } -core.List buildUnnamed45() => [ +core.List buildUnnamed53() => [ buildConsentStore(), buildConsentStore(), ]; -void checkUnnamed45(core.List o) { +void checkUnnamed53(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkConsentStore(o[0]); checkConsentStore(o[1]); @@ -3027,7 +3405,7 @@ api.ListConsentStoresResponse buildListConsentStoresResponse() { final o = api.ListConsentStoresResponse(); buildCounterListConsentStoresResponse++; if (buildCounterListConsentStoresResponse < 3) { - o.consentStores = buildUnnamed45(); + o.consentStores = buildUnnamed53(); o.nextPageToken = 'foo'; } buildCounterListConsentStoresResponse--; @@ -3037,7 +3415,7 @@ api.ListConsentStoresResponse buildListConsentStoresResponse() { void checkListConsentStoresResponse(api.ListConsentStoresResponse o) { buildCounterListConsentStoresResponse++; if (buildCounterListConsentStoresResponse < 3) { - checkUnnamed45(o.consentStores!); + checkUnnamed53(o.consentStores!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -3046,12 +3424,12 @@ void checkListConsentStoresResponse(api.ListConsentStoresResponse o) { buildCounterListConsentStoresResponse--; } -core.List buildUnnamed46() => [ +core.List buildUnnamed54() => [ buildConsent(), buildConsent(), ]; -void checkUnnamed46(core.List o) { +void checkUnnamed54(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkConsent(o[0]); checkConsent(o[1]); @@ -3062,7 +3440,7 @@ api.ListConsentsResponse buildListConsentsResponse() { final o = api.ListConsentsResponse(); buildCounterListConsentsResponse++; if (buildCounterListConsentsResponse < 3) { - o.consents = buildUnnamed46(); + o.consents = buildUnnamed54(); o.nextPageToken = 'foo'; } buildCounterListConsentsResponse--; @@ -3072,7 +3450,7 @@ api.ListConsentsResponse buildListConsentsResponse() { void checkListConsentsResponse(api.ListConsentsResponse o) { buildCounterListConsentsResponse++; if (buildCounterListConsentsResponse < 3) { - checkUnnamed46(o.consents!); + checkUnnamed54(o.consents!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -3081,12 +3459,12 @@ void checkListConsentsResponse(api.ListConsentsResponse o) { buildCounterListConsentsResponse--; } -core.List buildUnnamed47() => [ +core.List buildUnnamed55() => [ buildDataset(), buildDataset(), ]; -void checkUnnamed47(core.List o) { +void checkUnnamed55(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDataset(o[0]); checkDataset(o[1]); @@ -3097,7 +3475,7 @@ api.ListDatasetsResponse buildListDatasetsResponse() { final o = api.ListDatasetsResponse(); buildCounterListDatasetsResponse++; if (buildCounterListDatasetsResponse < 3) { - o.datasets = buildUnnamed47(); + o.datasets = buildUnnamed55(); o.nextPageToken = 'foo'; } buildCounterListDatasetsResponse--; @@ -3107,7 +3485,7 @@ api.ListDatasetsResponse buildListDatasetsResponse() { void checkListDatasetsResponse(api.ListDatasetsResponse o) { buildCounterListDatasetsResponse++; if (buildCounterListDatasetsResponse < 3) { - checkUnnamed47(o.datasets!); + checkUnnamed55(o.datasets!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -3116,12 +3494,12 @@ void checkListDatasetsResponse(api.ListDatasetsResponse o) { buildCounterListDatasetsResponse--; } -core.List buildUnnamed48() => [ +core.List buildUnnamed56() => [ buildDicomStore(), buildDicomStore(), ]; -void checkUnnamed48(core.List o) { +void checkUnnamed56(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDicomStore(o[0]); checkDicomStore(o[1]); @@ -3132,7 +3510,7 @@ api.ListDicomStoresResponse buildListDicomStoresResponse() { final o = api.ListDicomStoresResponse(); buildCounterListDicomStoresResponse++; if (buildCounterListDicomStoresResponse < 3) { - o.dicomStores = buildUnnamed48(); + o.dicomStores = buildUnnamed56(); o.nextPageToken = 'foo'; } buildCounterListDicomStoresResponse--; @@ -3142,7 +3520,7 @@ api.ListDicomStoresResponse buildListDicomStoresResponse() { void checkListDicomStoresResponse(api.ListDicomStoresResponse o) { buildCounterListDicomStoresResponse++; if (buildCounterListDicomStoresResponse < 3) { - checkUnnamed48(o.dicomStores!); + checkUnnamed56(o.dicomStores!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -3151,12 +3529,12 @@ void checkListDicomStoresResponse(api.ListDicomStoresResponse o) { buildCounterListDicomStoresResponse--; } -core.List buildUnnamed49() => [ +core.List buildUnnamed57() => [ buildFhirStore(), buildFhirStore(), ]; -void checkUnnamed49(core.List o) { +void checkUnnamed57(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFhirStore(o[0]); checkFhirStore(o[1]); @@ -3167,7 +3545,7 @@ api.ListFhirStoresResponse buildListFhirStoresResponse() { final o = api.ListFhirStoresResponse(); buildCounterListFhirStoresResponse++; if (buildCounterListFhirStoresResponse < 3) { - o.fhirStores = buildUnnamed49(); + o.fhirStores = buildUnnamed57(); o.nextPageToken = 'foo'; } buildCounterListFhirStoresResponse--; @@ -3177,7 +3555,7 @@ api.ListFhirStoresResponse buildListFhirStoresResponse() { void checkListFhirStoresResponse(api.ListFhirStoresResponse o) { buildCounterListFhirStoresResponse++; if (buildCounterListFhirStoresResponse < 3) { - checkUnnamed49(o.fhirStores!); + checkUnnamed57(o.fhirStores!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -3186,12 +3564,12 @@ void checkListFhirStoresResponse(api.ListFhirStoresResponse o) { buildCounterListFhirStoresResponse--; } -core.List buildUnnamed50() => [ +core.List buildUnnamed58() => [ buildHl7V2Store(), buildHl7V2Store(), ]; -void checkUnnamed50(core.List o) { +void checkUnnamed58(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHl7V2Store(o[0]); checkHl7V2Store(o[1]); @@ -3202,7 +3580,7 @@ api.ListHl7V2StoresResponse buildListHl7V2StoresResponse() { final o = api.ListHl7V2StoresResponse(); buildCounterListHl7V2StoresResponse++; if (buildCounterListHl7V2StoresResponse < 3) { - o.hl7V2Stores = buildUnnamed50(); + o.hl7V2Stores = buildUnnamed58(); o.nextPageToken = 'foo'; } buildCounterListHl7V2StoresResponse--; @@ -3212,7 +3590,7 @@ api.ListHl7V2StoresResponse buildListHl7V2StoresResponse() { void checkListHl7V2StoresResponse(api.ListHl7V2StoresResponse o) { buildCounterListHl7V2StoresResponse++; if (buildCounterListHl7V2StoresResponse < 3) { - checkUnnamed50(o.hl7V2Stores!); + checkUnnamed58(o.hl7V2Stores!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -3221,12 +3599,12 @@ void checkListHl7V2StoresResponse(api.ListHl7V2StoresResponse o) { buildCounterListHl7V2StoresResponse--; } -core.List buildUnnamed51() => [ +core.List buildUnnamed59() => [ buildLocation(), buildLocation(), ]; -void checkUnnamed51(core.List o) { +void checkUnnamed59(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLocation(o[0]); checkLocation(o[1]); @@ -3237,7 +3615,7 @@ api.ListLocationsResponse buildListLocationsResponse() { final o = api.ListLocationsResponse(); buildCounterListLocationsResponse++; if (buildCounterListLocationsResponse < 3) { - o.locations = buildUnnamed51(); + o.locations = buildUnnamed59(); o.nextPageToken = 'foo'; } buildCounterListLocationsResponse--; @@ -3247,7 +3625,7 @@ api.ListLocationsResponse buildListLocationsResponse() { void checkListLocationsResponse(api.ListLocationsResponse o) { buildCounterListLocationsResponse++; if (buildCounterListLocationsResponse < 3) { - checkUnnamed51(o.locations!); + checkUnnamed59(o.locations!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -3256,12 +3634,12 @@ void checkListLocationsResponse(api.ListLocationsResponse o) { buildCounterListLocationsResponse--; } -core.List buildUnnamed52() => [ +core.List buildUnnamed60() => [ buildMessage(), buildMessage(), ]; -void checkUnnamed52(core.List o) { +void checkUnnamed60(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMessage(o[0]); checkMessage(o[1]); @@ -3272,7 +3650,7 @@ api.ListMessagesResponse buildListMessagesResponse() { final o = api.ListMessagesResponse(); buildCounterListMessagesResponse++; if (buildCounterListMessagesResponse < 3) { - o.hl7V2Messages = buildUnnamed52(); + o.hl7V2Messages = buildUnnamed60(); o.nextPageToken = 'foo'; } buildCounterListMessagesResponse--; @@ -3282,7 +3660,7 @@ api.ListMessagesResponse buildListMessagesResponse() { void checkListMessagesResponse(api.ListMessagesResponse o) { buildCounterListMessagesResponse++; if (buildCounterListMessagesResponse < 3) { - checkUnnamed52(o.hl7V2Messages!); + checkUnnamed60(o.hl7V2Messages!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -3291,12 +3669,12 @@ void checkListMessagesResponse(api.ListMessagesResponse o) { buildCounterListMessagesResponse--; } -core.List buildUnnamed53() => [ +core.List buildUnnamed61() => [ buildOperation(), buildOperation(), ]; -void checkUnnamed53(core.List o) { +void checkUnnamed61(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperation(o[0]); checkOperation(o[1]); @@ -3308,7 +3686,7 @@ api.ListOperationsResponse buildListOperationsResponse() { buildCounterListOperationsResponse++; if (buildCounterListOperationsResponse < 3) { o.nextPageToken = 'foo'; - o.operations = buildUnnamed53(); + o.operations = buildUnnamed61(); } buildCounterListOperationsResponse--; return o; @@ -3321,17 +3699,17 @@ void checkListOperationsResponse(api.ListOperationsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed53(o.operations!); + checkUnnamed61(o.operations!); } buildCounterListOperationsResponse--; } -core.List buildUnnamed54() => [ +core.List buildUnnamed62() => [ buildUserDataMapping(), buildUserDataMapping(), ]; -void checkUnnamed54(core.List o) { +void checkUnnamed62(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkUserDataMapping(o[0]); checkUserDataMapping(o[1]); @@ -3343,7 +3721,7 @@ api.ListUserDataMappingsResponse buildListUserDataMappingsResponse() { buildCounterListUserDataMappingsResponse++; if (buildCounterListUserDataMappingsResponse < 3) { o.nextPageToken = 'foo'; - o.userDataMappings = buildUnnamed54(); + o.userDataMappings = buildUnnamed62(); } buildCounterListUserDataMappingsResponse--; return o; @@ -3356,17 +3734,17 @@ void checkListUserDataMappingsResponse(api.ListUserDataMappingsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed54(o.userDataMappings!); + checkUnnamed62(o.userDataMappings!); } buildCounterListUserDataMappingsResponse--; } -core.Map buildUnnamed55() => { +core.Map buildUnnamed63() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed55(core.Map o) { +void checkUnnamed63(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3378,7 +3756,7 @@ void checkUnnamed55(core.Map o) { ); } -core.Map buildUnnamed56() => { +core.Map buildUnnamed64() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3391,7 +3769,7 @@ core.Map buildUnnamed56() => { }, }; -void checkUnnamed56(core.Map o) { +void checkUnnamed64(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted3 = (o['x']!) as core.Map; unittest.expect(casted3, unittest.hasLength(3)); @@ -3429,9 +3807,9 @@ api.Location buildLocation() { buildCounterLocation++; if (buildCounterLocation < 3) { o.displayName = 'foo'; - o.labels = buildUnnamed55(); + o.labels = buildUnnamed63(); o.locationId = 'foo'; - o.metadata = buildUnnamed56(); + o.metadata = buildUnnamed64(); o.name = 'foo'; } buildCounterLocation--; @@ -3445,12 +3823,12 @@ void checkLocation(api.Location o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed55(o.labels!); + checkUnnamed63(o.labels!); unittest.expect( o.locationId!, unittest.equals('foo'), ); - checkUnnamed56(o.metadata!); + checkUnnamed64(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), @@ -3459,12 +3837,12 @@ void checkLocation(api.Location o) { buildCounterLocation--; } -core.Map buildUnnamed57() => { +core.Map buildUnnamed65() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed57(core.Map o) { +void checkUnnamed65(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3476,12 +3854,12 @@ void checkUnnamed57(core.Map o) { ); } -core.List buildUnnamed58() => [ +core.List buildUnnamed66() => [ buildPatientId(), buildPatientId(), ]; -void checkUnnamed58(core.List o) { +void checkUnnamed66(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPatientId(o[0]); checkPatientId(o[1]); @@ -3494,11 +3872,11 @@ api.Message buildMessage() { if (buildCounterMessage < 3) { o.createTime = 'foo'; o.data = 'foo'; - o.labels = buildUnnamed57(); + o.labels = buildUnnamed65(); o.messageType = 'foo'; o.name = 'foo'; o.parsedData = buildParsedData(); - o.patientIds = buildUnnamed58(); + o.patientIds = buildUnnamed66(); o.schematizedData = buildSchematizedData(); o.sendFacility = 'foo'; o.sendTime = 'foo'; @@ -3518,7 +3896,7 @@ void checkMessage(api.Message o) { o.data!, unittest.equals('foo'), ); - checkUnnamed57(o.labels!); + checkUnnamed65(o.labels!); unittest.expect( o.messageType!, unittest.equals('foo'), @@ -3528,7 +3906,7 @@ void checkMessage(api.Message o) { unittest.equals('foo'), ); checkParsedData(o.parsedData!); - checkUnnamed58(o.patientIds!); + checkUnnamed66(o.patientIds!); checkSchematizedData(o.schematizedData!); unittest.expect( o.sendFacility!, @@ -3566,7 +3944,7 @@ void checkNotificationConfig(api.NotificationConfig o) { buildCounterNotificationConfig--; } -core.Map buildUnnamed59() => { +core.Map buildUnnamed67() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3579,7 +3957,7 @@ core.Map buildUnnamed59() => { }, }; -void checkUnnamed59(core.Map o) { +void checkUnnamed67(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted5 = (o['x']!) as core.Map; unittest.expect(casted5, unittest.hasLength(3)); @@ -3611,7 +3989,7 @@ void checkUnnamed59(core.Map o) { ); } -core.Map buildUnnamed60() => { +core.Map buildUnnamed68() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3624,7 +4002,7 @@ core.Map buildUnnamed60() => { }, }; -void checkUnnamed60(core.Map o) { +void checkUnnamed68(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted7 = (o['x']!) as core.Map; unittest.expect(casted7, unittest.hasLength(3)); @@ -3663,9 +4041,9 @@ api.Operation buildOperation() { if (buildCounterOperation < 3) { o.done = true; o.error = buildStatus(); - o.metadata = buildUnnamed59(); + o.metadata = buildUnnamed67(); o.name = 'foo'; - o.response = buildUnnamed60(); + o.response = buildUnnamed68(); } buildCounterOperation--; return o; @@ -3676,22 +4054,22 @@ void checkOperation(api.Operation o) { if (buildCounterOperation < 3) { unittest.expect(o.done!, unittest.isTrue); checkStatus(o.error!); - checkUnnamed59(o.metadata!); + checkUnnamed67(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed60(o.response!); + checkUnnamed68(o.response!); } buildCounterOperation--; } -core.List buildUnnamed61() => [ +core.List buildUnnamed69() => [ buildSegment(), buildSegment(), ]; -void checkUnnamed61(core.List o) { +void checkUnnamed69(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSegment(o[0]); checkSegment(o[1]); @@ -3702,7 +4080,7 @@ api.ParsedData buildParsedData() { final o = api.ParsedData(); buildCounterParsedData++; if (buildCounterParsedData < 3) { - o.segments = buildUnnamed61(); + o.segments = buildUnnamed69(); } buildCounterParsedData--; return o; @@ -3711,7 +4089,7 @@ api.ParsedData buildParsedData() { void checkParsedData(api.ParsedData o) { buildCounterParsedData++; if (buildCounterParsedData < 3) { - checkUnnamed61(o.segments!); + checkUnnamed69(o.segments!); } buildCounterParsedData--; } @@ -3774,23 +4152,59 @@ void checkPatientId(api.PatientId o) { buildCounterPatientId--; } -core.List buildUnnamed62() => [ +core.List buildUnnamed70() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed70(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterPatientScope = 0; +api.PatientScope buildPatientScope() { + final o = api.PatientScope(); + buildCounterPatientScope++; + if (buildCounterPatientScope < 3) { + o.patientIds = buildUnnamed70(); + } + buildCounterPatientScope--; + return o; +} + +void checkPatientScope(api.PatientScope o) { + buildCounterPatientScope++; + if (buildCounterPatientScope < 3) { + checkUnnamed70(o.patientIds!); + } + buildCounterPatientScope--; +} + +core.List buildUnnamed71() => [ buildAuditConfig(), buildAuditConfig(), ]; -void checkUnnamed62(core.List o) { +void checkUnnamed71(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAuditConfig(o[0]); checkAuditConfig(o[1]); } -core.List buildUnnamed63() => [ +core.List buildUnnamed72() => [ buildBinding(), buildBinding(), ]; -void checkUnnamed63(core.List o) { +void checkUnnamed72(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkBinding(o[0]); checkBinding(o[1]); @@ -3801,8 +4215,8 @@ api.Policy buildPolicy() { final o = api.Policy(); buildCounterPolicy++; if (buildCounterPolicy < 3) { - o.auditConfigs = buildUnnamed62(); - o.bindings = buildUnnamed63(); + o.auditConfigs = buildUnnamed71(); + o.bindings = buildUnnamed72(); o.etag = 'foo'; o.version = 42; } @@ -3813,8 +4227,8 @@ api.Policy buildPolicy() { void checkPolicy(api.Policy o) { buildCounterPolicy++; if (buildCounterPolicy < 3) { - checkUnnamed62(o.auditConfigs!); - checkUnnamed63(o.bindings!); + checkUnnamed71(o.auditConfigs!); + checkUnnamed72(o.bindings!); unittest.expect( o.etag!, unittest.equals('foo'), @@ -3849,12 +4263,12 @@ void checkPubsubDestination(api.PubsubDestination o) { buildCounterPubsubDestination--; } -core.Map buildUnnamed64() => { +core.Map buildUnnamed73() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed64(core.Map o) { +void checkUnnamed73(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3866,12 +4280,12 @@ void checkUnnamed64(core.Map o) { ); } -core.Map buildUnnamed65() => { +core.Map buildUnnamed74() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed65(core.Map o) { +void checkUnnamed74(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3889,8 +4303,8 @@ api.QueryAccessibleDataRequest buildQueryAccessibleDataRequest() { buildCounterQueryAccessibleDataRequest++; if (buildCounterQueryAccessibleDataRequest < 3) { o.gcsDestination = buildGoogleCloudHealthcareV1ConsentGcsDestination(); - o.requestAttributes = buildUnnamed64(); - o.resourceAttributes = buildUnnamed65(); + o.requestAttributes = buildUnnamed73(); + o.resourceAttributes = buildUnnamed74(); } buildCounterQueryAccessibleDataRequest--; return o; @@ -3900,8 +4314,8 @@ void checkQueryAccessibleDataRequest(api.QueryAccessibleDataRequest o) { buildCounterQueryAccessibleDataRequest++; if (buildCounterQueryAccessibleDataRequest < 3) { checkGoogleCloudHealthcareV1ConsentGcsDestination(o.gcsDestination!); - checkUnnamed64(o.requestAttributes!); - checkUnnamed65(o.resourceAttributes!); + checkUnnamed73(o.requestAttributes!); + checkUnnamed74(o.resourceAttributes!); } buildCounterQueryAccessibleDataRequest--; } @@ -3958,12 +4372,12 @@ void checkReplaceWithInfoTypeConfig(api.ReplaceWithInfoTypeConfig o) { buildCounterReplaceWithInfoTypeConfig--; } -core.List buildUnnamed66() => [ +core.List buildUnnamed75() => [ 'foo', 'foo', ]; -void checkUnnamed66(core.List o) { +void checkUnnamed75(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3980,7 +4394,7 @@ api.Resources buildResources() { final o = api.Resources(); buildCounterResources++; if (buildCounterResources < 3) { - o.resources = buildUnnamed66(); + o.resources = buildUnnamed75(); } buildCounterResources--; return o; @@ -3989,17 +4403,17 @@ api.Resources buildResources() { void checkResources(api.Resources o) { buildCounterResources++; if (buildCounterResources < 3) { - checkUnnamed66(o.resources!); + checkUnnamed75(o.resources!); } buildCounterResources--; } -core.Map buildUnnamed67() => { +core.Map buildUnnamed76() => { 'x': buildConsentEvaluation(), 'y': buildConsentEvaluation(), }; -void checkUnnamed67(core.Map o) { +void checkUnnamed76(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkConsentEvaluation(o['x']!); checkConsentEvaluation(o['y']!); @@ -4010,7 +4424,7 @@ api.Result buildResult() { final o = api.Result(); buildCounterResult++; if (buildCounterResult < 3) { - o.consentDetails = buildUnnamed67(); + o.consentDetails = buildUnnamed76(); o.consented = true; o.dataId = 'foo'; } @@ -4021,7 +4435,7 @@ api.Result buildResult() { void checkResult(api.Result o) { buildCounterResult++; if (buildCounterResult < 3) { - checkUnnamed67(o.consentDetails!); + checkUnnamed76(o.consentDetails!); unittest.expect(o.consented!, unittest.isTrue); unittest.expect( o.dataId!, @@ -4053,12 +4467,12 @@ void checkRevokeConsentRequest(api.RevokeConsentRequest o) { buildCounterRevokeConsentRequest--; } -core.List buildUnnamed68() => [ +core.List buildUnnamed77() => [ 'foo', 'foo', ]; -void checkUnnamed68(core.List o) { +void checkUnnamed77(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4077,7 +4491,7 @@ api.RollbackFhirResourceFilteringFields buildCounterRollbackFhirResourceFilteringFields++; if (buildCounterRollbackFhirResourceFilteringFields < 3) { o.metadataFilter = 'foo'; - o.operationIds = buildUnnamed68(); + o.operationIds = buildUnnamed77(); } buildCounterRollbackFhirResourceFilteringFields--; return o; @@ -4091,17 +4505,17 @@ void checkRollbackFhirResourceFilteringFields( o.metadataFilter!, unittest.equals('foo'), ); - checkUnnamed68(o.operationIds!); + checkUnnamed77(o.operationIds!); } buildCounterRollbackFhirResourceFilteringFields--; } -core.List buildUnnamed69() => [ +core.List buildUnnamed78() => [ 'foo', 'foo', ]; -void checkUnnamed69(core.List o) { +void checkUnnamed78(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4125,7 +4539,7 @@ api.RollbackFhirResourcesRequest buildRollbackFhirResourcesRequest() { o.inputGcsObject = 'foo'; o.resultGcsBucket = 'foo'; o.rollbackTime = 'foo'; - o.type = buildUnnamed69(); + o.type = buildUnnamed78(); } buildCounterRollbackFhirResourcesRequest--; return o; @@ -4153,17 +4567,17 @@ void checkRollbackFhirResourcesRequest(api.RollbackFhirResourcesRequest o) { o.rollbackTime!, unittest.equals('foo'), ); - checkUnnamed69(o.type!); + checkUnnamed78(o.type!); } buildCounterRollbackFhirResourcesRequest--; } -core.List buildUnnamed70() => [ +core.List buildUnnamed79() => [ 'foo', 'foo', ]; -void checkUnnamed70(core.List o) { +void checkUnnamed79(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4181,7 +4595,7 @@ api.RollbackHL7MessagesFilteringFields final o = api.RollbackHL7MessagesFilteringFields(); buildCounterRollbackHL7MessagesFilteringFields++; if (buildCounterRollbackHL7MessagesFilteringFields < 3) { - o.operationIds = buildUnnamed70(); + o.operationIds = buildUnnamed79(); } buildCounterRollbackHL7MessagesFilteringFields--; return o; @@ -4191,7 +4605,7 @@ void checkRollbackHL7MessagesFilteringFields( api.RollbackHL7MessagesFilteringFields o) { buildCounterRollbackHL7MessagesFilteringFields++; if (buildCounterRollbackHL7MessagesFilteringFields < 3) { - checkUnnamed70(o.operationIds!); + checkUnnamed79(o.operationIds!); } buildCounterRollbackHL7MessagesFilteringFields--; } @@ -4268,12 +4682,12 @@ void checkSchemaConfig(api.SchemaConfig o) { buildCounterSchemaConfig--; } -core.List buildUnnamed71() => [ +core.List buildUnnamed80() => [ buildGroupOrSegment(), buildGroupOrSegment(), ]; -void checkUnnamed71(core.List o) { +void checkUnnamed80(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGroupOrSegment(o[0]); checkGroupOrSegment(o[1]); @@ -4286,7 +4700,7 @@ api.SchemaGroup buildSchemaGroup() { if (buildCounterSchemaGroup < 3) { o.choice = true; o.maxOccurs = 42; - o.members = buildUnnamed71(); + o.members = buildUnnamed80(); o.minOccurs = 42; o.name = 'foo'; } @@ -4302,7 +4716,7 @@ void checkSchemaGroup(api.SchemaGroup o) { o.maxOccurs!, unittest.equals(42), ); - checkUnnamed71(o.members!); + checkUnnamed80(o.members!); unittest.expect( o.minOccurs!, unittest.equals(42), @@ -4315,23 +4729,23 @@ void checkSchemaGroup(api.SchemaGroup o) { buildCounterSchemaGroup--; } -core.List buildUnnamed72() => [ +core.List buildUnnamed81() => [ buildHl7SchemaConfig(), buildHl7SchemaConfig(), ]; -void checkUnnamed72(core.List o) { +void checkUnnamed81(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHl7SchemaConfig(o[0]); checkHl7SchemaConfig(o[1]); } -core.List buildUnnamed73() => [ +core.List buildUnnamed82() => [ buildHl7TypesConfig(), buildHl7TypesConfig(), ]; -void checkUnnamed73(core.List o) { +void checkUnnamed82(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHl7TypesConfig(o[0]); checkHl7TypesConfig(o[1]); @@ -4343,9 +4757,9 @@ api.SchemaPackage buildSchemaPackage() { buildCounterSchemaPackage++; if (buildCounterSchemaPackage < 3) { o.ignoreMinOccurs = true; - o.schemas = buildUnnamed72(); + o.schemas = buildUnnamed81(); o.schematizedParsingType = 'foo'; - o.types = buildUnnamed73(); + o.types = buildUnnamed82(); o.unexpectedSegmentHandling = 'foo'; } buildCounterSchemaPackage--; @@ -4356,12 +4770,12 @@ void checkSchemaPackage(api.SchemaPackage o) { buildCounterSchemaPackage++; if (buildCounterSchemaPackage < 3) { unittest.expect(o.ignoreMinOccurs!, unittest.isTrue); - checkUnnamed72(o.schemas!); + checkUnnamed81(o.schemas!); unittest.expect( o.schematizedParsingType!, unittest.equals('foo'), ); - checkUnnamed73(o.types!); + checkUnnamed82(o.types!); unittest.expect( o.unexpectedSegmentHandling!, unittest.equals('foo'), @@ -4451,12 +4865,12 @@ void checkSearchResourcesRequest(api.SearchResourcesRequest o) { buildCounterSearchResourcesRequest--; } -core.Map buildUnnamed74() => { +core.Map buildUnnamed83() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed74(core.Map o) { +void checkUnnamed83(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -4473,7 +4887,7 @@ api.Segment buildSegment() { final o = api.Segment(); buildCounterSegment++; if (buildCounterSegment < 3) { - o.fields = buildUnnamed74(); + o.fields = buildUnnamed83(); o.segmentId = 'foo'; o.setId = 'foo'; } @@ -4484,7 +4898,7 @@ api.Segment buildSegment() { void checkSegment(api.Segment o) { buildCounterSegment++; if (buildCounterSegment < 3) { - checkUnnamed74(o.fields!); + checkUnnamed83(o.fields!); unittest.expect( o.segmentId!, unittest.equals('foo'), @@ -4579,12 +4993,12 @@ void checkSetIamPolicyRequest(api.SetIamPolicyRequest o) { buildCounterSetIamPolicyRequest--; } -core.Map buildUnnamed75() => { +core.Map buildUnnamed84() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed75(core.Map o) { +void checkUnnamed84(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -4602,7 +5016,7 @@ api.Signature buildSignature() { buildCounterSignature++; if (buildCounterSignature < 3) { o.image = buildImage(); - o.metadata = buildUnnamed75(); + o.metadata = buildUnnamed84(); o.signatureTime = 'foo'; o.userId = 'foo'; } @@ -4614,7 +5028,7 @@ void checkSignature(api.Signature o) { buildCounterSignature++; if (buildCounterSignature < 3) { checkImage(o.image!); - checkUnnamed75(o.metadata!); + checkUnnamed84(o.metadata!); unittest.expect( o.signatureTime!, unittest.equals('foo'), @@ -4627,7 +5041,7 @@ void checkSignature(api.Signature o) { buildCounterSignature--; } -core.Map buildUnnamed76() => { +core.Map buildUnnamed85() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -4640,7 +5054,7 @@ core.Map buildUnnamed76() => { }, }; -void checkUnnamed76(core.Map o) { +void checkUnnamed85(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted9 = (o['x']!) as core.Map; unittest.expect(casted9, unittest.hasLength(3)); @@ -4672,15 +5086,15 @@ void checkUnnamed76(core.Map o) { ); } -core.List> buildUnnamed77() => [ - buildUnnamed76(), - buildUnnamed76(), +core.List> buildUnnamed86() => [ + buildUnnamed85(), + buildUnnamed85(), ]; -void checkUnnamed77(core.List> o) { +void checkUnnamed86(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed76(o[0]); - checkUnnamed76(o[1]); + checkUnnamed85(o[0]); + checkUnnamed85(o[1]); } core.int buildCounterStatus = 0; @@ -4689,7 +5103,7 @@ api.Status buildStatus() { buildCounterStatus++; if (buildCounterStatus < 3) { o.code = 42; - o.details = buildUnnamed77(); + o.details = buildUnnamed86(); o.message = 'foo'; } buildCounterStatus--; @@ -4703,7 +5117,7 @@ void checkStatus(api.Status o) { o.code!, unittest.equals(42), ); - checkUnnamed77(o.details!); + checkUnnamed86(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -4738,12 +5152,12 @@ void checkStorageInfo(api.StorageInfo o) { buildCounterStorageInfo--; } -core.List buildUnnamed78() => [ +core.List buildUnnamed87() => [ 'foo', 'foo', ]; -void checkUnnamed78(core.List o) { +void checkUnnamed87(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4763,7 +5177,7 @@ api.StreamConfig buildStreamConfig() { o.bigqueryDestination = buildGoogleCloudHealthcareV1FhirBigQueryDestination(); o.deidentifiedStoreDestination = buildDeidentifiedStoreDestination(); - o.resourceTypes = buildUnnamed78(); + o.resourceTypes = buildUnnamed87(); } buildCounterStreamConfig--; return o; @@ -4774,7 +5188,7 @@ void checkStreamConfig(api.StreamConfig o) { if (buildCounterStreamConfig < 3) { checkGoogleCloudHealthcareV1FhirBigQueryDestination(o.bigqueryDestination!); checkDeidentifiedStoreDestination(o.deidentifiedStoreDestination!); - checkUnnamed78(o.resourceTypes!); + checkUnnamed87(o.resourceTypes!); } buildCounterStreamConfig--; } @@ -4843,12 +5257,12 @@ void checkStudyMetrics(api.StudyMetrics o) { buildCounterStudyMetrics--; } -core.List buildUnnamed79() => [ +core.List buildUnnamed88() => [ 'foo', 'foo', ]; -void checkUnnamed79(core.List o) { +void checkUnnamed88(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4865,7 +5279,7 @@ api.TagFilterList buildTagFilterList() { final o = api.TagFilterList(); buildCounterTagFilterList++; if (buildCounterTagFilterList < 3) { - o.tags = buildUnnamed79(); + o.tags = buildUnnamed88(); } buildCounterTagFilterList--; return o; @@ -4874,17 +5288,17 @@ api.TagFilterList buildTagFilterList() { void checkTagFilterList(api.TagFilterList o) { buildCounterTagFilterList++; if (buildCounterTagFilterList < 3) { - checkUnnamed79(o.tags!); + checkUnnamed88(o.tags!); } buildCounterTagFilterList--; } -core.List buildUnnamed80() => [ +core.List buildUnnamed89() => [ 'foo', 'foo', ]; -void checkUnnamed80(core.List o) { +void checkUnnamed89(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4901,7 +5315,7 @@ api.TestIamPermissionsRequest buildTestIamPermissionsRequest() { final o = api.TestIamPermissionsRequest(); buildCounterTestIamPermissionsRequest++; if (buildCounterTestIamPermissionsRequest < 3) { - o.permissions = buildUnnamed80(); + o.permissions = buildUnnamed89(); } buildCounterTestIamPermissionsRequest--; return o; @@ -4910,17 +5324,17 @@ api.TestIamPermissionsRequest buildTestIamPermissionsRequest() { void checkTestIamPermissionsRequest(api.TestIamPermissionsRequest o) { buildCounterTestIamPermissionsRequest++; if (buildCounterTestIamPermissionsRequest < 3) { - checkUnnamed80(o.permissions!); + checkUnnamed89(o.permissions!); } buildCounterTestIamPermissionsRequest--; } -core.List buildUnnamed81() => [ +core.List buildUnnamed90() => [ 'foo', 'foo', ]; -void checkUnnamed81(core.List o) { +void checkUnnamed90(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4937,7 +5351,7 @@ api.TestIamPermissionsResponse buildTestIamPermissionsResponse() { final o = api.TestIamPermissionsResponse(); buildCounterTestIamPermissionsResponse++; if (buildCounterTestIamPermissionsResponse < 3) { - o.permissions = buildUnnamed81(); + o.permissions = buildUnnamed90(); } buildCounterTestIamPermissionsResponse--; return o; @@ -4946,28 +5360,28 @@ api.TestIamPermissionsResponse buildTestIamPermissionsResponse() { void checkTestIamPermissionsResponse(api.TestIamPermissionsResponse o) { buildCounterTestIamPermissionsResponse++; if (buildCounterTestIamPermissionsResponse < 3) { - checkUnnamed81(o.permissions!); + checkUnnamed90(o.permissions!); } buildCounterTestIamPermissionsResponse--; } -core.List buildUnnamed82() => [ +core.List buildUnnamed91() => [ buildInfoTypeTransformation(), buildInfoTypeTransformation(), ]; -void checkUnnamed82(core.List o) { +void checkUnnamed91(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInfoTypeTransformation(o[0]); checkInfoTypeTransformation(o[1]); } -core.List buildUnnamed83() => [ +core.List buildUnnamed92() => [ 'foo', 'foo', ]; -void checkUnnamed83(core.List o) { +void checkUnnamed92(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4979,12 +5393,12 @@ void checkUnnamed83(core.List o) { ); } -core.List buildUnnamed84() => [ +core.List buildUnnamed93() => [ buildInfoTypeTransformation(), buildInfoTypeTransformation(), ]; -void checkUnnamed84(core.List o) { +void checkUnnamed93(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInfoTypeTransformation(o[0]); checkInfoTypeTransformation(o[1]); @@ -4995,9 +5409,9 @@ api.TextConfig buildTextConfig() { final o = api.TextConfig(); buildCounterTextConfig++; if (buildCounterTextConfig < 3) { - o.additionalTransformations = buildUnnamed82(); - o.excludeInfoTypes = buildUnnamed83(); - o.transformations = buildUnnamed84(); + o.additionalTransformations = buildUnnamed91(); + o.excludeInfoTypes = buildUnnamed92(); + o.transformations = buildUnnamed93(); } buildCounterTextConfig--; return o; @@ -5006,9 +5420,9 @@ api.TextConfig buildTextConfig() { void checkTextConfig(api.TextConfig o) { buildCounterTextConfig++; if (buildCounterTextConfig < 3) { - checkUnnamed82(o.additionalTransformations!); - checkUnnamed83(o.excludeInfoTypes!); - checkUnnamed84(o.transformations!); + checkUnnamed91(o.additionalTransformations!); + checkUnnamed92(o.excludeInfoTypes!); + checkUnnamed93(o.transformations!); } buildCounterTextConfig--; } @@ -5067,23 +5481,50 @@ void checkTimePartitioning(api.TimePartitioning o) { buildCounterTimePartitioning--; } -core.List buildUnnamed85() => [ - buildField(), - buildField(), - ]; - -void checkUnnamed85(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkField(o[0]); - checkField(o[1]); +core.int buildCounterTimeRange = 0; +api.TimeRange buildTimeRange() { + final o = api.TimeRange(); + buildCounterTimeRange++; + if (buildCounterTimeRange < 3) { + o.end = 'foo'; + o.start = 'foo'; + } + buildCounterTimeRange--; + return o; } -core.int buildCounterType = 0; -api.Type buildType() { +void checkTimeRange(api.TimeRange o) { + buildCounterTimeRange++; + if (buildCounterTimeRange < 3) { + unittest.expect( + o.end!, + unittest.equals('foo'), + ); + unittest.expect( + o.start!, + unittest.equals('foo'), + ); + } + buildCounterTimeRange--; +} + +core.List buildUnnamed94() => [ + buildField(), + buildField(), + ]; + +void checkUnnamed94(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkField(o[0]); + checkField(o[1]); +} + +core.int buildCounterType = 0; +api.Type buildType() { final o = api.Type(); buildCounterType++; if (buildCounterType < 3) { - o.fields = buildUnnamed85(); + o.fields = buildUnnamed94(); o.name = 'foo'; o.primitive = 'foo'; } @@ -5094,7 +5535,7 @@ api.Type buildType() { void checkType(api.Type o) { buildCounterType++; if (buildCounterType < 3) { - checkUnnamed85(o.fields!); + checkUnnamed94(o.fields!); unittest.expect( o.name!, unittest.equals('foo'), @@ -5107,12 +5548,12 @@ void checkType(api.Type o) { buildCounterType--; } -core.List buildUnnamed86() => [ +core.List buildUnnamed95() => [ buildAttribute(), buildAttribute(), ]; -void checkUnnamed86(core.List o) { +void checkUnnamed95(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAttribute(o[0]); checkAttribute(o[1]); @@ -5127,7 +5568,7 @@ api.UserDataMapping buildUserDataMapping() { o.archived = true; o.dataId = 'foo'; o.name = 'foo'; - o.resourceAttributes = buildUnnamed86(); + o.resourceAttributes = buildUnnamed95(); o.userId = 'foo'; } buildCounterUserDataMapping--; @@ -5150,7 +5591,7 @@ void checkUserDataMapping(api.UserDataMapping o) { o.name!, unittest.equals('foo'), ); - checkUnnamed86(o.resourceAttributes!); + checkUnnamed95(o.resourceAttributes!); unittest.expect( o.userId!, unittest.equals('foo'), @@ -5159,12 +5600,12 @@ void checkUserDataMapping(api.UserDataMapping o) { buildCounterUserDataMapping--; } -core.List buildUnnamed87() => [ +core.List buildUnnamed96() => [ 'foo', 'foo', ]; -void checkUnnamed87(core.List o) { +void checkUnnamed96(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5185,7 +5626,7 @@ api.ValidationConfig buildValidationConfig() { o.disableProfileValidation = true; o.disableReferenceTypeValidation = true; o.disableRequiredFieldValidation = true; - o.enabledImplementationGuides = buildUnnamed87(); + o.enabledImplementationGuides = buildUnnamed96(); } buildCounterValidationConfig--; return o; @@ -5198,7 +5639,7 @@ void checkValidationConfig(api.ValidationConfig o) { unittest.expect(o.disableProfileValidation!, unittest.isTrue); unittest.expect(o.disableReferenceTypeValidation!, unittest.isTrue); unittest.expect(o.disableRequiredFieldValidation!, unittest.isTrue); - checkUnnamed87(o.enabledImplementationGuides!); + checkUnnamed96(o.enabledImplementationGuides!); } buildCounterValidationConfig--; } @@ -5231,6 +5672,16 @@ void checkVersionSource(api.VersionSource o) { } void main() { + unittest.group('obj-schema-AccessDeterminationLogConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildAccessDeterminationLogConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.AccessDeterminationLogConfig.fromJson( + oJson as core.Map); + checkAccessDeterminationLogConfig(od); + }); + }); + unittest.group('obj-schema-ActivateConsentRequest', () { unittest.test('to-json--from-json', () async { final o = buildActivateConsentRequest(); @@ -5241,6 +5692,16 @@ void main() { }); }); + unittest.group('obj-schema-AdminConsents', () { + unittest.test('to-json--from-json', () async { + final o = buildAdminConsents(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.AdminConsents.fromJson( + oJson as core.Map); + checkAdminConsents(od); + }); + }); + unittest.group('obj-schema-AnalyzeEntitiesRequest', () { unittest.test('to-json--from-json', () async { final o = buildAnalyzeEntitiesRequest(); @@ -5261,6 +5722,26 @@ void main() { }); }); + unittest.group('obj-schema-ApplyAdminConsentsRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildApplyAdminConsentsRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ApplyAdminConsentsRequest.fromJson( + oJson as core.Map); + checkApplyAdminConsentsRequest(od); + }); + }); + + unittest.group('obj-schema-ApplyConsentsRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildApplyConsentsRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ApplyConsentsRequest.fromJson( + oJson as core.Map); + checkApplyConsentsRequest(od); + }); + }); + unittest.group('obj-schema-ArchiveUserDataMappingRequest', () { unittest.test('to-json--from-json', () async { final o = buildArchiveUserDataMappingRequest(); @@ -5401,6 +5882,16 @@ void main() { }); }); + unittest.group('obj-schema-ConsentAccessorScope', () { + unittest.test('to-json--from-json', () async { + final o = buildConsentAccessorScope(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ConsentAccessorScope.fromJson( + oJson as core.Map); + checkConsentAccessorScope(od); + }); + }); + unittest.group('obj-schema-ConsentArtifact', () { unittest.test('to-json--from-json', () async { final o = buildConsentArtifact(); @@ -5411,6 +5902,16 @@ void main() { }); }); + unittest.group('obj-schema-ConsentConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildConsentConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ConsentConfig.fromJson( + oJson as core.Map); + checkConsentConfig(od); + }); + }); + unittest.group('obj-schema-ConsentEvaluation', () { unittest.test('to-json--from-json', () async { final o = buildConsentEvaluation(); @@ -5421,6 +5922,16 @@ void main() { }); }); + unittest.group('obj-schema-ConsentHeaderHandling', () { + unittest.test('to-json--from-json', () async { + final o = buildConsentHeaderHandling(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ConsentHeaderHandling.fromJson( + oJson as core.Map); + checkConsentHeaderHandling(od); + }); + }); + unittest.group('obj-schema-ConsentList', () { unittest.test('to-json--from-json', () async { final o = buildConsentList(); @@ -5641,6 +6152,36 @@ void main() { }); }); + unittest.group('obj-schema-ExplainDataAccessConsentInfo', () { + unittest.test('to-json--from-json', () async { + final o = buildExplainDataAccessConsentInfo(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ExplainDataAccessConsentInfo.fromJson( + oJson as core.Map); + checkExplainDataAccessConsentInfo(od); + }); + }); + + unittest.group('obj-schema-ExplainDataAccessConsentScope', () { + unittest.test('to-json--from-json', () async { + final o = buildExplainDataAccessConsentScope(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ExplainDataAccessConsentScope.fromJson( + oJson as core.Map); + checkExplainDataAccessConsentScope(od); + }); + }); + + unittest.group('obj-schema-ExplainDataAccessResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildExplainDataAccessResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ExplainDataAccessResponse.fromJson( + oJson as core.Map); + checkExplainDataAccessResponse(od); + }); + }); + unittest.group('obj-schema-ExportDicomDataRequest', () { unittest.test('to-json--from-json', () async { final o = buildExportDicomDataRequest(); @@ -6263,6 +6804,16 @@ void main() { }); }); + unittest.group('obj-schema-PatientScope', () { + unittest.test('to-json--from-json', () async { + final o = buildPatientScope(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.PatientScope.fromJson( + oJson as core.Map); + checkPatientScope(od); + }); + }); + unittest.group('obj-schema-Policy', () { unittest.test('to-json--from-json', () async { final o = buildPolicy(); @@ -6613,6 +7164,16 @@ void main() { }); }); + unittest.group('obj-schema-TimeRange', () { + unittest.test('to-json--from-json', () async { + final o = buildTimeRange(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.TimeRange.fromJson(oJson as core.Map); + checkTimeRange(od); + }); + }); + unittest.group('obj-schema-Type', () { unittest.test('to-json--from-json', () async { final o = buildType(); @@ -12075,6 +12636,124 @@ void main() { }); unittest.group('resource-ProjectsLocationsDatasetsFhirStoresResource', () { + unittest.test('method--applyAdminConsents', () async { + final mock = HttpServerMock(); + final res = + api.CloudHealthcareApi(mock).projects.locations.datasets.fhirStores; + final arg_request = buildApplyAdminConsentsRequest(); + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.ApplyAdminConsentsRequest.fromJson( + json as core.Map); + checkApplyAdminConsentsRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.applyAdminConsents(arg_request, arg_name, + $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--applyConsents', () async { + final mock = HttpServerMock(); + final res = + api.CloudHealthcareApi(mock).projects.locations.datasets.fhirStores; + final arg_request = buildApplyConsentsRequest(); + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.ApplyConsentsRequest.fromJson( + json as core.Map); + checkApplyConsentsRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.applyConsents(arg_request, arg_name, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + unittest.test('method--create', () async { final mock = HttpServerMock(); final res = @@ -12251,6 +12930,65 @@ void main() { checkEmpty(response as api.Empty); }); + unittest.test('method--explainDataAccess', () async { + final mock = HttpServerMock(); + final res = + api.CloudHealthcareApi(mock).projects.locations.datasets.fhirStores; + final arg_name = 'foo'; + final arg_resourceId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['resourceId']!.first, + unittest.equals(arg_resourceId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildExplainDataAccessResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.explainDataAccess(arg_name, + resourceId: arg_resourceId, $fields: arg_$fields); + checkExplainDataAccessResponse(response as api.ExplainDataAccessResponse); + }); + unittest.test('method--export', () async { final mock = HttpServerMock(); final res = @@ -13093,6 +13831,134 @@ void main() { checkHttpBody(response as api.HttpBody); }); + unittest.test('method--ConsentEnforcementStatus', () async { + final mock = HttpServerMock(); + final res = api.CloudHealthcareApi(mock) + .projects + .locations + .datasets + .fhirStores + .fhir; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildHttpBody()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.ConsentEnforcementStatus(arg_name, $fields: arg_$fields); + checkHttpBody(response as api.HttpBody); + }); + + unittest.test('method--PatientConsentEnforcementStatus', () async { + final mock = HttpServerMock(); + final res = api.CloudHealthcareApi(mock) + .projects + .locations + .datasets + .fhirStores + .fhir; + final arg_name = 'foo'; + final arg_P_count = 42; + final arg_P_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + core.int.parse(queryMap['_count']!.first), + unittest.equals(arg_P_count), + ); + unittest.expect( + queryMap['_page_token']!.first, + unittest.equals(arg_P_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildHttpBody()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.PatientConsentEnforcementStatus(arg_name, + P_count: arg_P_count, + P_pageToken: arg_P_pageToken, + $fields: arg_$fields); + checkHttpBody(response as api.HttpBody); + }); + unittest.test('method--PatientEverything', () async { final mock = HttpServerMock(); final res = api.CloudHealthcareApi(mock) diff --git a/generated/googleapis/test/iamcredentials/v1_test.dart b/generated/googleapis/test/iamcredentials/v1_test.dart index 370dc2070..42f38c17c 100644 --- a/generated/googleapis/test/iamcredentials/v1_test.dart +++ b/generated/googleapis/test/iamcredentials/v1_test.dart @@ -195,12 +195,53 @@ void checkUnnamed3(core.List o) { ); } +core.int buildCounterServiceAccountAllowedLocations = 0; +api.ServiceAccountAllowedLocations buildServiceAccountAllowedLocations() { + final o = api.ServiceAccountAllowedLocations(); + buildCounterServiceAccountAllowedLocations++; + if (buildCounterServiceAccountAllowedLocations < 3) { + o.encodedLocations = 'foo'; + o.locations = buildUnnamed3(); + } + buildCounterServiceAccountAllowedLocations--; + return o; +} + +void checkServiceAccountAllowedLocations(api.ServiceAccountAllowedLocations o) { + buildCounterServiceAccountAllowedLocations++; + if (buildCounterServiceAccountAllowedLocations < 3) { + unittest.expect( + o.encodedLocations!, + unittest.equals('foo'), + ); + checkUnnamed3(o.locations!); + } + buildCounterServiceAccountAllowedLocations--; +} + +core.List buildUnnamed4() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed4(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + core.int buildCounterSignBlobRequest = 0; api.SignBlobRequest buildSignBlobRequest() { final o = api.SignBlobRequest(); buildCounterSignBlobRequest++; if (buildCounterSignBlobRequest < 3) { - o.delegates = buildUnnamed3(); + o.delegates = buildUnnamed4(); o.payload = 'foo'; } buildCounterSignBlobRequest--; @@ -210,7 +251,7 @@ api.SignBlobRequest buildSignBlobRequest() { void checkSignBlobRequest(api.SignBlobRequest o) { buildCounterSignBlobRequest++; if (buildCounterSignBlobRequest < 3) { - checkUnnamed3(o.delegates!); + checkUnnamed4(o.delegates!); unittest.expect( o.payload!, unittest.equals('foo'), @@ -246,12 +287,12 @@ void checkSignBlobResponse(api.SignBlobResponse o) { buildCounterSignBlobResponse--; } -core.List buildUnnamed4() => [ +core.List buildUnnamed5() => [ 'foo', 'foo', ]; -void checkUnnamed4(core.List o) { +void checkUnnamed5(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -268,7 +309,7 @@ api.SignJwtRequest buildSignJwtRequest() { final o = api.SignJwtRequest(); buildCounterSignJwtRequest++; if (buildCounterSignJwtRequest < 3) { - o.delegates = buildUnnamed4(); + o.delegates = buildUnnamed5(); o.payload = 'foo'; } buildCounterSignJwtRequest--; @@ -278,7 +319,7 @@ api.SignJwtRequest buildSignJwtRequest() { void checkSignJwtRequest(api.SignJwtRequest o) { buildCounterSignJwtRequest++; if (buildCounterSignJwtRequest < 3) { - checkUnnamed4(o.delegates!); + checkUnnamed5(o.delegates!); unittest.expect( o.payload!, unittest.equals('foo'), @@ -355,6 +396,16 @@ void main() { }); }); + unittest.group('obj-schema-ServiceAccountAllowedLocations', () { + unittest.test('to-json--from-json', () async { + final o = buildServiceAccountAllowedLocations(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ServiceAccountAllowedLocations.fromJson( + oJson as core.Map); + checkServiceAccountAllowedLocations(od); + }); + }); + unittest.group('obj-schema-SignBlobRequest', () { unittest.test('to-json--from-json', () async { final o = buildSignBlobRequest(); @@ -513,6 +564,60 @@ void main() { checkGenerateIdTokenResponse(response as api.GenerateIdTokenResponse); }); + unittest.test('method--getAllowedLocations', () async { + final mock = HttpServerMock(); + final res = api.IAMCredentialsApi(mock).projects.serviceAccounts; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildServiceAccountAllowedLocations()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.getAllowedLocations(arg_name, $fields: arg_$fields); + checkServiceAccountAllowedLocations( + response as api.ServiceAccountAllowedLocations); + }); + unittest.test('method--signBlob', () async { final mock = HttpServerMock(); final res = api.IAMCredentialsApi(mock).projects.serviceAccounts; diff --git a/generated/googleapis/test/identitytoolkit/v2_test.dart b/generated/googleapis/test/identitytoolkit/v2_test.dart index e12f8a909..0b52acc1b 100644 --- a/generated/googleapis/test/identitytoolkit/v2_test.dart +++ b/generated/googleapis/test/identitytoolkit/v2_test.dart @@ -1541,6 +1541,8 @@ api.GoogleCloudIdentitytoolkitAdminV2RecaptchaConfig o.recaptchaKeys = buildUnnamed16(); o.tollFraudManagedRules = buildUnnamed17(); o.useAccountDefender = true; + o.useSmsBotScore = true; + o.useSmsTollFraudProtection = true; } buildCounterGoogleCloudIdentitytoolkitAdminV2RecaptchaConfig--; return o; @@ -1562,6 +1564,8 @@ void checkGoogleCloudIdentitytoolkitAdminV2RecaptchaConfig( checkUnnamed16(o.recaptchaKeys!); checkUnnamed17(o.tollFraudManagedRules!); unittest.expect(o.useAccountDefender!, unittest.isTrue); + unittest.expect(o.useSmsBotScore!, unittest.isTrue); + unittest.expect(o.useSmsTollFraudProtection!, unittest.isTrue); } buildCounterGoogleCloudIdentitytoolkitAdminV2RecaptchaConfig--; } @@ -2571,6 +2575,8 @@ api.GoogleCloudIdentitytoolkitV2RecaptchaConfig if (buildCounterGoogleCloudIdentitytoolkitV2RecaptchaConfig < 3) { o.recaptchaEnforcementState = buildUnnamed21(); o.recaptchaKey = 'foo'; + o.useSmsBotScore = true; + o.useSmsTollFraudProtection = true; } buildCounterGoogleCloudIdentitytoolkitV2RecaptchaConfig--; return o; @@ -2585,6 +2591,8 @@ void checkGoogleCloudIdentitytoolkitV2RecaptchaConfig( o.recaptchaKey!, unittest.equals('foo'), ); + unittest.expect(o.useSmsBotScore!, unittest.isTrue); + unittest.expect(o.useSmsTollFraudProtection!, unittest.isTrue); } buildCounterGoogleCloudIdentitytoolkitV2RecaptchaConfig--; } diff --git a/generated/googleapis/test/integrations/v1_test.dart b/generated/googleapis/test/integrations/v1_test.dart index 3837bb6ad..6045b3f3b 100644 --- a/generated/googleapis/test/integrations/v1_test.dart +++ b/generated/googleapis/test/integrations/v1_test.dart @@ -3201,6 +3201,7 @@ api.EnterpriseCrmFrontendsEventbusProtoEventExecutionInfoReplayInfo if (buildCounterEnterpriseCrmFrontendsEventbusProtoEventExecutionInfoReplayInfo < 3) { o.originalExecutionInfoId = 'foo'; + o.replayMode = 'foo'; o.replayReason = 'foo'; o.replayedExecutionInfoIds = buildUnnamed37(); } @@ -3217,6 +3218,10 @@ void checkEnterpriseCrmFrontendsEventbusProtoEventExecutionInfoReplayInfo( o.originalExecutionInfoId!, unittest.equals('foo'), ); + unittest.expect( + o.replayMode!, + unittest.equals('foo'), + ); unittest.expect( o.replayReason!, unittest.equals('foo'), @@ -4046,46 +4051,12 @@ void checkUnnamed52(core.List o) { ); } -core.List buildUnnamed53() => [ - 'foo', - 'foo', - ]; - -void checkUnnamed53(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o[0], - unittest.equals('foo'), - ); - unittest.expect( - o[1], - unittest.equals('foo'), - ); -} - -core.List buildUnnamed54() => [ - 'foo', - 'foo', - ]; - -void checkUnnamed54(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o[0], - unittest.equals('foo'), - ); - unittest.expect( - o[1], - unittest.equals('foo'), - ); -} - -core.Map buildUnnamed55() => { +core.Map buildUnnamed53() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed55(core.Map o) { +void checkUnnamed53(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -4097,12 +4068,12 @@ void checkUnnamed55(core.Map o) { ); } -core.List buildUnnamed56() => [ +core.List buildUnnamed54() => [ buildEnterpriseCrmEventbusProtoNextTask(), buildEnterpriseCrmEventbusProtoNextTask(), ]; -void checkUnnamed56(core.List o) { +void checkUnnamed54(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEnterpriseCrmEventbusProtoNextTask(o[0]); checkEnterpriseCrmEventbusProtoNextTask(o[1]); @@ -4120,14 +4091,16 @@ api.EnterpriseCrmFrontendsEventbusProtoTriggerConfig o.description = 'foo'; o.enabledClients = buildUnnamed52(); o.errorCatcherId = 'foo'; - o.inputVariables = buildUnnamed53(); + o.inputVariables = + buildEnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables(); o.label = 'foo'; o.nextTasksExecutionPolicy = 'foo'; - o.outputVariables = buildUnnamed54(); + o.outputVariables = + buildEnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables(); o.pauseWorkflowExecutions = true; o.position = buildEnterpriseCrmEventbusProtoCoordinate(); - o.properties = buildUnnamed55(); - o.startTasks = buildUnnamed56(); + o.properties = buildUnnamed53(); + o.startTasks = buildUnnamed54(); o.triggerCriteria = buildEnterpriseCrmEventbusProtoTriggerCriteria(); o.triggerId = 'foo'; o.triggerName = 'foo'; @@ -4154,7 +4127,8 @@ void checkEnterpriseCrmFrontendsEventbusProtoTriggerConfig( o.errorCatcherId!, unittest.equals('foo'), ); - checkUnnamed53(o.inputVariables!); + checkEnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables( + o.inputVariables!); unittest.expect( o.label!, unittest.equals('foo'), @@ -4163,11 +4137,12 @@ void checkEnterpriseCrmFrontendsEventbusProtoTriggerConfig( o.nextTasksExecutionPolicy!, unittest.equals('foo'), ); - checkUnnamed54(o.outputVariables!); + checkEnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables( + o.outputVariables!); unittest.expect(o.pauseWorkflowExecutions!, unittest.isTrue); checkEnterpriseCrmEventbusProtoCoordinate(o.position!); - checkUnnamed55(o.properties!); - checkUnnamed56(o.startTasks!); + checkUnnamed53(o.properties!); + checkUnnamed54(o.startTasks!); checkEnterpriseCrmEventbusProtoTriggerCriteria(o.triggerCriteria!); unittest.expect( o.triggerId!, @@ -4189,13 +4164,54 @@ void checkEnterpriseCrmFrontendsEventbusProtoTriggerConfig( buildCounterEnterpriseCrmFrontendsEventbusProtoTriggerConfig--; } +core.List buildUnnamed55() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed55(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterEnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables = + 0; +api.EnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables + buildEnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables() { + final o = api.EnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables(); + buildCounterEnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables++; + if (buildCounterEnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables < + 3) { + o.names = buildUnnamed55(); + } + buildCounterEnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables--; + return o; +} + +void checkEnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables( + api.EnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables o) { + buildCounterEnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables++; + if (buildCounterEnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables < + 3) { + checkUnnamed55(o.names!); + } + buildCounterEnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables--; +} + core.List - buildUnnamed57() => [ + buildUnnamed56() => [ buildEnterpriseCrmFrontendsEventbusProtoWorkflowParameterEntry(), buildEnterpriseCrmFrontendsEventbusProtoWorkflowParameterEntry(), ]; -void checkUnnamed57( +void checkUnnamed56( core.List o) { unittest.expect(o, unittest.hasLength(2)); @@ -4212,7 +4228,7 @@ api.EnterpriseCrmFrontendsEventbusProtoWorkflowParameterEntry if (buildCounterEnterpriseCrmFrontendsEventbusProtoWorkflowParameterEntry < 3) { o.attributes = buildEnterpriseCrmEventbusProtoAttributes(); - o.children = buildUnnamed57(); + o.children = buildUnnamed56(); o.containsLargeData = true; o.dataType = 'foo'; o.defaultValue = @@ -4239,7 +4255,7 @@ void checkEnterpriseCrmFrontendsEventbusProtoWorkflowParameterEntry( if (buildCounterEnterpriseCrmFrontendsEventbusProtoWorkflowParameterEntry < 3) { checkEnterpriseCrmEventbusProtoAttributes(o.attributes!); - checkUnnamed57(o.children!); + checkUnnamed56(o.children!); unittest.expect(o.containsLargeData!, unittest.isTrue); unittest.expect( o.dataType!, @@ -4286,12 +4302,12 @@ void checkEnterpriseCrmFrontendsEventbusProtoWorkflowParameterEntry( } core.List - buildUnnamed58() => [ + buildUnnamed57() => [ buildEnterpriseCrmFrontendsEventbusProtoWorkflowParameterEntry(), buildEnterpriseCrmFrontendsEventbusProtoWorkflowParameterEntry(), ]; -void checkUnnamed58( +void checkUnnamed57( core.List o) { unittest.expect(o, unittest.hasLength(2)); @@ -4305,7 +4321,7 @@ api.EnterpriseCrmFrontendsEventbusProtoWorkflowParameters final o = api.EnterpriseCrmFrontendsEventbusProtoWorkflowParameters(); buildCounterEnterpriseCrmFrontendsEventbusProtoWorkflowParameters++; if (buildCounterEnterpriseCrmFrontendsEventbusProtoWorkflowParameters < 3) { - o.parameters = buildUnnamed58(); + o.parameters = buildUnnamed57(); } buildCounterEnterpriseCrmFrontendsEventbusProtoWorkflowParameters--; return o; @@ -4315,17 +4331,17 @@ void checkEnterpriseCrmFrontendsEventbusProtoWorkflowParameters( api.EnterpriseCrmFrontendsEventbusProtoWorkflowParameters o) { buildCounterEnterpriseCrmFrontendsEventbusProtoWorkflowParameters++; if (buildCounterEnterpriseCrmFrontendsEventbusProtoWorkflowParameters < 3) { - checkUnnamed58(o.parameters!); + checkUnnamed57(o.parameters!); } buildCounterEnterpriseCrmFrontendsEventbusProtoWorkflowParameters--; } -core.List buildUnnamed59() => [ +core.List buildUnnamed58() => [ buildGoogleCloudConnectorsV1ConfigVariable(), buildGoogleCloudConnectorsV1ConfigVariable(), ]; -void checkUnnamed59(core.List o) { +void checkUnnamed58(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudConnectorsV1ConfigVariable(o[0]); checkGoogleCloudConnectorsV1ConfigVariable(o[1]); @@ -4336,7 +4352,7 @@ api.GoogleCloudConnectorsV1AuthConfig buildGoogleCloudConnectorsV1AuthConfig() { final o = api.GoogleCloudConnectorsV1AuthConfig(); buildCounterGoogleCloudConnectorsV1AuthConfig++; if (buildCounterGoogleCloudConnectorsV1AuthConfig < 3) { - o.additionalVariables = buildUnnamed59(); + o.additionalVariables = buildUnnamed58(); o.authKey = 'foo'; o.authType = 'foo'; o.oauth2AuthCodeFlow = @@ -4357,7 +4373,7 @@ void checkGoogleCloudConnectorsV1AuthConfig( api.GoogleCloudConnectorsV1AuthConfig o) { buildCounterGoogleCloudConnectorsV1AuthConfig++; if (buildCounterGoogleCloudConnectorsV1AuthConfig < 3) { - checkUnnamed59(o.additionalVariables!); + checkUnnamed58(o.additionalVariables!); unittest.expect( o.authKey!, unittest.equals('foo'), @@ -4379,12 +4395,12 @@ void checkGoogleCloudConnectorsV1AuthConfig( buildCounterGoogleCloudConnectorsV1AuthConfig--; } -core.List buildUnnamed60() => [ +core.List buildUnnamed59() => [ 'foo', 'foo', ]; -void checkUnnamed60(core.List o) { +void checkUnnamed59(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4409,7 +4425,7 @@ api.GoogleCloudConnectorsV1AuthConfigOauth2AuthCodeFlow o.enablePkce = true; o.pkceVerifier = 'foo'; o.redirectUri = 'foo'; - o.scopes = buildUnnamed60(); + o.scopes = buildUnnamed59(); } buildCounterGoogleCloudConnectorsV1AuthConfigOauth2AuthCodeFlow--; return o; @@ -4441,17 +4457,17 @@ void checkGoogleCloudConnectorsV1AuthConfigOauth2AuthCodeFlow( o.redirectUri!, unittest.equals('foo'), ); - checkUnnamed60(o.scopes!); + checkUnnamed59(o.scopes!); } buildCounterGoogleCloudConnectorsV1AuthConfigOauth2AuthCodeFlow--; } -core.List buildUnnamed61() => [ +core.List buildUnnamed60() => [ 'foo', 'foo', ]; -void checkUnnamed61(core.List o) { +void checkUnnamed60(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4475,7 +4491,7 @@ api.GoogleCloudConnectorsV1AuthConfigOauth2AuthCodeFlowGoogleManaged 3) { o.authCode = 'foo'; o.redirectUri = 'foo'; - o.scopes = buildUnnamed61(); + o.scopes = buildUnnamed60(); } buildCounterGoogleCloudConnectorsV1AuthConfigOauth2AuthCodeFlowGoogleManaged--; return o; @@ -4494,7 +4510,7 @@ void checkGoogleCloudConnectorsV1AuthConfigOauth2AuthCodeFlowGoogleManaged( o.redirectUri!, unittest.equals('foo'), ); - checkUnnamed61(o.scopes!); + checkUnnamed60(o.scopes!); } buildCounterGoogleCloudConnectorsV1AuthConfigOauth2AuthCodeFlowGoogleManaged--; } @@ -4713,34 +4729,34 @@ void checkGoogleCloudConnectorsV1ConfigVariable( buildCounterGoogleCloudConnectorsV1ConfigVariable--; } -core.List buildUnnamed62() => [ +core.List buildUnnamed61() => [ buildGoogleCloudConnectorsV1ConfigVariable(), buildGoogleCloudConnectorsV1ConfigVariable(), ]; -void checkUnnamed62(core.List o) { +void checkUnnamed61(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudConnectorsV1ConfigVariable(o[0]); checkGoogleCloudConnectorsV1ConfigVariable(o[1]); } -core.List buildUnnamed63() => [ +core.List buildUnnamed62() => [ buildGoogleCloudConnectorsV1DestinationConfig(), buildGoogleCloudConnectorsV1DestinationConfig(), ]; -void checkUnnamed63(core.List o) { +void checkUnnamed62(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudConnectorsV1DestinationConfig(o[0]); checkGoogleCloudConnectorsV1DestinationConfig(o[1]); } -core.Map buildUnnamed64() => { +core.Map buildUnnamed63() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed64(core.Map o) { +void checkUnnamed63(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -4761,7 +4777,7 @@ api.GoogleCloudConnectorsV1Connection buildGoogleCloudConnectorsV1Connection() { o.authConfig = buildGoogleCloudConnectorsV1AuthConfig(); o.authOverrideEnabled = true; o.billingConfig = buildGoogleCloudConnectorsV1BillingConfig(); - o.configVariables = buildUnnamed62(); + o.configVariables = buildUnnamed61(); o.connectionRevision = 'foo'; o.connectorVersion = 'foo'; o.connectorVersionInfraConfig = @@ -4769,7 +4785,7 @@ api.GoogleCloudConnectorsV1Connection buildGoogleCloudConnectorsV1Connection() { o.connectorVersionLaunchStage = 'foo'; o.createTime = 'foo'; o.description = 'foo'; - o.destinationConfigs = buildUnnamed63(); + o.destinationConfigs = buildUnnamed62(); o.envoyImageLocation = 'foo'; o.eventingConfig = buildGoogleCloudConnectorsV1EventingConfig(); o.eventingEnablementType = 'foo'; @@ -4777,7 +4793,7 @@ api.GoogleCloudConnectorsV1Connection buildGoogleCloudConnectorsV1Connection() { o.host = 'foo'; o.imageLocation = 'foo'; o.isTrustedTester = true; - o.labels = buildUnnamed64(); + o.labels = buildUnnamed63(); o.lockConfig = buildGoogleCloudConnectorsV1LockConfig(); o.logConfig = buildGoogleCloudConnectorsV1LogConfig(); o.name = 'foo'; @@ -4803,7 +4819,7 @@ void checkGoogleCloudConnectorsV1Connection( checkGoogleCloudConnectorsV1AuthConfig(o.authConfig!); unittest.expect(o.authOverrideEnabled!, unittest.isTrue); checkGoogleCloudConnectorsV1BillingConfig(o.billingConfig!); - checkUnnamed62(o.configVariables!); + checkUnnamed61(o.configVariables!); unittest.expect( o.connectionRevision!, unittest.equals('foo'), @@ -4826,7 +4842,7 @@ void checkGoogleCloudConnectorsV1Connection( o.description!, unittest.equals('foo'), ); - checkUnnamed63(o.destinationConfigs!); + checkUnnamed62(o.destinationConfigs!); unittest.expect( o.envoyImageLocation!, unittest.equals('foo'), @@ -4846,7 +4862,7 @@ void checkGoogleCloudConnectorsV1Connection( unittest.equals('foo'), ); unittest.expect(o.isTrustedTester!, unittest.isTrue); - checkUnnamed64(o.labels!); + checkUnnamed63(o.labels!); checkGoogleCloudConnectorsV1LockConfig(o.lockConfig!); checkGoogleCloudConnectorsV1LogConfig(o.logConfig!); unittest.expect( @@ -5014,12 +5030,12 @@ void checkGoogleCloudConnectorsV1Destination( buildCounterGoogleCloudConnectorsV1Destination--; } -core.List buildUnnamed65() => [ +core.List buildUnnamed64() => [ buildGoogleCloudConnectorsV1Destination(), buildGoogleCloudConnectorsV1Destination(), ]; -void checkUnnamed65(core.List o) { +void checkUnnamed64(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudConnectorsV1Destination(o[0]); checkGoogleCloudConnectorsV1Destination(o[1]); @@ -5031,7 +5047,7 @@ api.GoogleCloudConnectorsV1DestinationConfig final o = api.GoogleCloudConnectorsV1DestinationConfig(); buildCounterGoogleCloudConnectorsV1DestinationConfig++; if (buildCounterGoogleCloudConnectorsV1DestinationConfig < 3) { - o.destinations = buildUnnamed65(); + o.destinations = buildUnnamed64(); o.key = 'foo'; } buildCounterGoogleCloudConnectorsV1DestinationConfig--; @@ -5042,7 +5058,7 @@ void checkGoogleCloudConnectorsV1DestinationConfig( api.GoogleCloudConnectorsV1DestinationConfig o) { buildCounterGoogleCloudConnectorsV1DestinationConfig++; if (buildCounterGoogleCloudConnectorsV1DestinationConfig < 3) { - checkUnnamed65(o.destinations!); + checkUnnamed64(o.destinations!); unittest.expect( o.key!, unittest.equals('foo'), @@ -5080,12 +5096,12 @@ void checkGoogleCloudConnectorsV1EncryptionKey( buildCounterGoogleCloudConnectorsV1EncryptionKey--; } -core.List buildUnnamed66() => [ +core.List buildUnnamed65() => [ buildGoogleCloudConnectorsV1ConfigVariable(), buildGoogleCloudConnectorsV1ConfigVariable(), ]; -void checkUnnamed66(core.List o) { +void checkUnnamed65(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudConnectorsV1ConfigVariable(o[0]); checkGoogleCloudConnectorsV1ConfigVariable(o[1]); @@ -5097,7 +5113,7 @@ api.GoogleCloudConnectorsV1EventingConfig final o = api.GoogleCloudConnectorsV1EventingConfig(); buildCounterGoogleCloudConnectorsV1EventingConfig++; if (buildCounterGoogleCloudConnectorsV1EventingConfig < 3) { - o.additionalVariables = buildUnnamed66(); + o.additionalVariables = buildUnnamed65(); o.authConfig = buildGoogleCloudConnectorsV1AuthConfig(); o.deadLetterConfig = buildGoogleCloudConnectorsV1EventingConfigDeadLetterConfig(); @@ -5117,7 +5133,7 @@ void checkGoogleCloudConnectorsV1EventingConfig( api.GoogleCloudConnectorsV1EventingConfig o) { buildCounterGoogleCloudConnectorsV1EventingConfig++; if (buildCounterGoogleCloudConnectorsV1EventingConfig < 3) { - checkUnnamed66(o.additionalVariables!); + checkUnnamed65(o.additionalVariables!); checkGoogleCloudConnectorsV1AuthConfig(o.authConfig!); checkGoogleCloudConnectorsV1EventingConfigDeadLetterConfig( o.deadLetterConfig!); @@ -5175,6 +5191,8 @@ api.GoogleCloudConnectorsV1EventingRuntimeData o.status = buildGoogleCloudConnectorsV1EventingStatus(); o.webhookData = buildGoogleCloudConnectorsV1EventingRuntimeDataWebhookData(); + o.webhookSubscriptions = + buildGoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions(); } buildCounterGoogleCloudConnectorsV1EventingRuntimeData--; return o; @@ -5194,16 +5212,18 @@ void checkGoogleCloudConnectorsV1EventingRuntimeData( ); checkGoogleCloudConnectorsV1EventingStatus(o.status!); checkGoogleCloudConnectorsV1EventingRuntimeDataWebhookData(o.webhookData!); + checkGoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions( + o.webhookSubscriptions!); } buildCounterGoogleCloudConnectorsV1EventingRuntimeData--; } -core.List buildUnnamed67() => [ +core.List buildUnnamed66() => [ buildGoogleCloudConnectorsV1ConfigVariable(), buildGoogleCloudConnectorsV1ConfigVariable(), ]; -void checkUnnamed67(core.List o) { +void checkUnnamed66(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudConnectorsV1ConfigVariable(o[0]); checkGoogleCloudConnectorsV1ConfigVariable(o[1]); @@ -5215,7 +5235,7 @@ api.GoogleCloudConnectorsV1EventingRuntimeDataWebhookData final o = api.GoogleCloudConnectorsV1EventingRuntimeDataWebhookData(); buildCounterGoogleCloudConnectorsV1EventingRuntimeDataWebhookData++; if (buildCounterGoogleCloudConnectorsV1EventingRuntimeDataWebhookData < 3) { - o.additionalVariables = buildUnnamed67(); + o.additionalVariables = buildUnnamed66(); o.createTime = 'foo'; o.id = 'foo'; o.name = 'foo'; @@ -5230,7 +5250,7 @@ void checkGoogleCloudConnectorsV1EventingRuntimeDataWebhookData( api.GoogleCloudConnectorsV1EventingRuntimeDataWebhookData o) { buildCounterGoogleCloudConnectorsV1EventingRuntimeDataWebhookData++; if (buildCounterGoogleCloudConnectorsV1EventingRuntimeDataWebhookData < 3) { - checkUnnamed67(o.additionalVariables!); + checkUnnamed66(o.additionalVariables!); unittest.expect( o.createTime!, unittest.equals('foo'), @@ -5255,6 +5275,45 @@ void checkGoogleCloudConnectorsV1EventingRuntimeDataWebhookData( buildCounterGoogleCloudConnectorsV1EventingRuntimeDataWebhookData--; } +core.List + buildUnnamed67() => [ + buildGoogleCloudConnectorsV1EventingRuntimeDataWebhookData(), + buildGoogleCloudConnectorsV1EventingRuntimeDataWebhookData(), + ]; + +void checkUnnamed67( + core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudConnectorsV1EventingRuntimeDataWebhookData(o[0]); + checkGoogleCloudConnectorsV1EventingRuntimeDataWebhookData(o[1]); +} + +core.int + buildCounterGoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions = + 0; +api.GoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions + buildGoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions() { + final o = + api.GoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions(); + buildCounterGoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions++; + if (buildCounterGoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions < + 3) { + o.webhookData = buildUnnamed67(); + } + buildCounterGoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions--; + return o; +} + +void checkGoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions( + api.GoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions o) { + buildCounterGoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions++; + if (buildCounterGoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions < + 3) { + checkUnnamed67(o.webhookData!); + } + buildCounterGoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions--; +} + core.int buildCounterGoogleCloudConnectorsV1EventingStatus = 0; api.GoogleCloudConnectorsV1EventingStatus buildGoogleCloudConnectorsV1EventingStatus() { @@ -5575,6 +5634,49 @@ void checkGoogleCloudIntegrationsV1alphaAccessToken( buildCounterGoogleCloudIntegrationsV1alphaAccessToken--; } +core.List buildUnnamed69() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed69(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterGoogleCloudIntegrationsV1alphaApiTriggerResource = 0; +api.GoogleCloudIntegrationsV1alphaApiTriggerResource + buildGoogleCloudIntegrationsV1alphaApiTriggerResource() { + final o = api.GoogleCloudIntegrationsV1alphaApiTriggerResource(); + buildCounterGoogleCloudIntegrationsV1alphaApiTriggerResource++; + if (buildCounterGoogleCloudIntegrationsV1alphaApiTriggerResource < 3) { + o.integrationResource = 'foo'; + o.triggerId = buildUnnamed69(); + } + buildCounterGoogleCloudIntegrationsV1alphaApiTriggerResource--; + return o; +} + +void checkGoogleCloudIntegrationsV1alphaApiTriggerResource( + api.GoogleCloudIntegrationsV1alphaApiTriggerResource o) { + buildCounterGoogleCloudIntegrationsV1alphaApiTriggerResource++; + if (buildCounterGoogleCloudIntegrationsV1alphaApiTriggerResource < 3) { + unittest.expect( + o.integrationResource!, + unittest.equals('foo'), + ); + checkUnnamed69(o.triggerId!); + } + buildCounterGoogleCloudIntegrationsV1alphaApiTriggerResource--; +} + core.int buildCounterGoogleCloudIntegrationsV1alphaAssertion = 0; api.GoogleCloudIntegrationsV1alphaAssertion buildGoogleCloudIntegrationsV1alphaAssertion() { @@ -5681,12 +5783,12 @@ void checkGoogleCloudIntegrationsV1alphaAttemptStats( buildCounterGoogleCloudIntegrationsV1alphaAttemptStats--; } -core.List buildUnnamed69() => [ +core.List buildUnnamed70() => [ 'foo', 'foo', ]; -void checkUnnamed69(core.List o) { +void checkUnnamed70(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5712,7 +5814,7 @@ api.GoogleCloudIntegrationsV1alphaAuthConfig o.description = 'foo'; o.displayName = 'foo'; o.encryptedCredential = 'foo'; - o.expiryNotificationDuration = buildUnnamed69(); + o.expiryNotificationDuration = buildUnnamed70(); o.lastModifierEmail = 'foo'; o.name = 'foo'; o.overrideValidTime = 'foo'; @@ -5759,7 +5861,7 @@ void checkGoogleCloudIntegrationsV1alphaAuthConfig( o.encryptedCredential!, unittest.equals('foo'), ); - checkUnnamed69(o.expiryNotificationDuration!); + checkUnnamed70(o.expiryNotificationDuration!); unittest.expect( o.lastModifierEmail!, unittest.equals('foo'), @@ -5825,12 +5927,12 @@ void checkGoogleCloudIntegrationsV1alphaAuthToken( buildCounterGoogleCloudIntegrationsV1alphaAuthToken--; } -core.List buildUnnamed70() => [ +core.List buildUnnamed71() => [ true, true, ]; -void checkUnnamed70(core.List o) { +void checkUnnamed71(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect(o[0], unittest.isTrue); unittest.expect(o[1], unittest.isTrue); @@ -5842,7 +5944,7 @@ api.GoogleCloudIntegrationsV1alphaBooleanParameterArray final o = api.GoogleCloudIntegrationsV1alphaBooleanParameterArray(); buildCounterGoogleCloudIntegrationsV1alphaBooleanParameterArray++; if (buildCounterGoogleCloudIntegrationsV1alphaBooleanParameterArray < 3) { - o.booleanValues = buildUnnamed70(); + o.booleanValues = buildUnnamed71(); } buildCounterGoogleCloudIntegrationsV1alphaBooleanParameterArray--; return o; @@ -5852,7 +5954,7 @@ void checkGoogleCloudIntegrationsV1alphaBooleanParameterArray( api.GoogleCloudIntegrationsV1alphaBooleanParameterArray o) { buildCounterGoogleCloudIntegrationsV1alphaBooleanParameterArray++; if (buildCounterGoogleCloudIntegrationsV1alphaBooleanParameterArray < 3) { - checkUnnamed70(o.booleanValues!); + checkUnnamed71(o.booleanValues!); } buildCounterGoogleCloudIntegrationsV1alphaBooleanParameterArray--; } @@ -6178,12 +6280,12 @@ void checkGoogleCloudIntegrationsV1alphaCloudSchedulerConfig( buildCounterGoogleCloudIntegrationsV1alphaCloudSchedulerConfig--; } -core.List buildUnnamed71() => [ +core.List buildUnnamed72() => [ buildGoogleCloudIntegrationsV1alphaFailurePolicy(), buildGoogleCloudIntegrationsV1alphaFailurePolicy(), ]; -void checkUnnamed71( +void checkUnnamed72( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaFailurePolicy(o[0]); @@ -6199,7 +6301,7 @@ api.GoogleCloudIntegrationsV1alphaConditionalFailurePolicies if (buildCounterGoogleCloudIntegrationsV1alphaConditionalFailurePolicies < 3) { o.defaultFailurePolicy = buildGoogleCloudIntegrationsV1alphaFailurePolicy(); - o.failurePolicies = buildUnnamed71(); + o.failurePolicies = buildUnnamed72(); } buildCounterGoogleCloudIntegrationsV1alphaConditionalFailurePolicies--; return o; @@ -6211,17 +6313,17 @@ void checkGoogleCloudIntegrationsV1alphaConditionalFailurePolicies( if (buildCounterGoogleCloudIntegrationsV1alphaConditionalFailurePolicies < 3) { checkGoogleCloudIntegrationsV1alphaFailurePolicy(o.defaultFailurePolicy!); - checkUnnamed71(o.failurePolicies!); + checkUnnamed72(o.failurePolicies!); } buildCounterGoogleCloudIntegrationsV1alphaConditionalFailurePolicies--; } -core.List buildUnnamed72() => [ +core.List buildUnnamed73() => [ 'foo', 'foo', ]; -void checkUnnamed72(core.List o) { +void checkUnnamed73(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6233,12 +6335,12 @@ void checkUnnamed72(core.List o) { ); } -core.List buildUnnamed73() => [ +core.List buildUnnamed74() => [ 'foo', 'foo', ]; -void checkUnnamed73(core.List o) { +void checkUnnamed74(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6256,8 +6358,8 @@ api.GoogleCloudIntegrationsV1alphaConnectionSchemaMetadata final o = api.GoogleCloudIntegrationsV1alphaConnectionSchemaMetadata(); buildCounterGoogleCloudIntegrationsV1alphaConnectionSchemaMetadata++; if (buildCounterGoogleCloudIntegrationsV1alphaConnectionSchemaMetadata < 3) { - o.actions = buildUnnamed72(); - o.entities = buildUnnamed73(); + o.actions = buildUnnamed73(); + o.entities = buildUnnamed74(); } buildCounterGoogleCloudIntegrationsV1alphaConnectionSchemaMetadata--; return o; @@ -6267,8 +6369,8 @@ void checkGoogleCloudIntegrationsV1alphaConnectionSchemaMetadata( api.GoogleCloudIntegrationsV1alphaConnectionSchemaMetadata o) { buildCounterGoogleCloudIntegrationsV1alphaConnectionSchemaMetadata++; if (buildCounterGoogleCloudIntegrationsV1alphaConnectionSchemaMetadata < 3) { - checkUnnamed72(o.actions!); - checkUnnamed73(o.entities!); + checkUnnamed73(o.actions!); + checkUnnamed74(o.entities!); } buildCounterGoogleCloudIntegrationsV1alphaConnectionSchemaMetadata--; } @@ -6494,12 +6596,12 @@ void checkGoogleCloudIntegrationsV1alphaDeprovisionClientRequest( buildCounterGoogleCloudIntegrationsV1alphaDeprovisionClientRequest--; } -core.List buildUnnamed74() => [ +core.List buildUnnamed75() => [ 42.0, 42.0, ]; -void checkUnnamed74(core.List o) { +void checkUnnamed75(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6517,7 +6619,7 @@ api.GoogleCloudIntegrationsV1alphaDoubleParameterArray final o = api.GoogleCloudIntegrationsV1alphaDoubleParameterArray(); buildCounterGoogleCloudIntegrationsV1alphaDoubleParameterArray++; if (buildCounterGoogleCloudIntegrationsV1alphaDoubleParameterArray < 3) { - o.doubleValues = buildUnnamed74(); + o.doubleValues = buildUnnamed75(); } buildCounterGoogleCloudIntegrationsV1alphaDoubleParameterArray--; return o; @@ -6527,7 +6629,7 @@ void checkGoogleCloudIntegrationsV1alphaDoubleParameterArray( api.GoogleCloudIntegrationsV1alphaDoubleParameterArray o) { buildCounterGoogleCloudIntegrationsV1alphaDoubleParameterArray++; if (buildCounterGoogleCloudIntegrationsV1alphaDoubleParameterArray < 3) { - checkUnnamed74(o.doubleValues!); + checkUnnamed75(o.doubleValues!); } buildCounterGoogleCloudIntegrationsV1alphaDoubleParameterArray--; } @@ -6557,13 +6659,13 @@ void checkGoogleCloudIntegrationsV1alphaDownloadExecutionResponse( buildCounterGoogleCloudIntegrationsV1alphaDownloadExecutionResponse--; } -core.List buildUnnamed75() => +core.List buildUnnamed76() => [ buildGoogleCloudIntegrationsV1alphaSerializedFile(), buildGoogleCloudIntegrationsV1alphaSerializedFile(), ]; -void checkUnnamed75( +void checkUnnamed76( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaSerializedFile(o[0]); @@ -6581,7 +6683,7 @@ api.GoogleCloudIntegrationsV1alphaDownloadIntegrationVersionResponse if (buildCounterGoogleCloudIntegrationsV1alphaDownloadIntegrationVersionResponse < 3) { o.content = 'foo'; - o.files = buildUnnamed75(); + o.files = buildUnnamed76(); } buildCounterGoogleCloudIntegrationsV1alphaDownloadIntegrationVersionResponse--; return o; @@ -6596,17 +6698,17 @@ void checkGoogleCloudIntegrationsV1alphaDownloadIntegrationVersionResponse( o.content!, unittest.equals('foo'), ); - checkUnnamed75(o.files!); + checkUnnamed76(o.files!); } buildCounterGoogleCloudIntegrationsV1alphaDownloadIntegrationVersionResponse--; } -core.List buildUnnamed76() => [ +core.List buildUnnamed77() => [ buildGoogleCloudIntegrationsV1alphaFile(), buildGoogleCloudIntegrationsV1alphaFile(), ]; -void checkUnnamed76(core.List o) { +void checkUnnamed77(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaFile(o[0]); checkGoogleCloudIntegrationsV1alphaFile(o[1]); @@ -6620,7 +6722,7 @@ api.GoogleCloudIntegrationsV1alphaDownloadJsonPackageResponse buildCounterGoogleCloudIntegrationsV1alphaDownloadJsonPackageResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaDownloadJsonPackageResponse < 3) { - o.files = buildUnnamed76(); + o.files = buildUnnamed77(); } buildCounterGoogleCloudIntegrationsV1alphaDownloadJsonPackageResponse--; return o; @@ -6631,7 +6733,7 @@ void checkGoogleCloudIntegrationsV1alphaDownloadJsonPackageResponse( buildCounterGoogleCloudIntegrationsV1alphaDownloadJsonPackageResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaDownloadJsonPackageResponse < 3) { - checkUnnamed76(o.files!); + checkUnnamed77(o.files!); } buildCounterGoogleCloudIntegrationsV1alphaDownloadJsonPackageResponse--; } @@ -6684,12 +6786,12 @@ void checkGoogleCloudIntegrationsV1alphaDownloadTestCaseResponse( buildCounterGoogleCloudIntegrationsV1alphaDownloadTestCaseResponse--; } -core.List buildUnnamed77() => [ +core.List buildUnnamed78() => [ 'foo', 'foo', ]; -void checkUnnamed77(core.List o) { +void checkUnnamed78(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6711,7 +6813,7 @@ api.GoogleCloudIntegrationsV1alphaEnumerateConnectorPlatformRegionsResponse buildCounterGoogleCloudIntegrationsV1alphaEnumerateConnectorPlatformRegionsResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaEnumerateConnectorPlatformRegionsResponse < 3) { - o.regions = buildUnnamed77(); + o.regions = buildUnnamed78(); } buildCounterGoogleCloudIntegrationsV1alphaEnumerateConnectorPlatformRegionsResponse--; return o; @@ -6723,17 +6825,17 @@ void checkGoogleCloudIntegrationsV1alphaEnumerateConnectorPlatformRegionsRespons buildCounterGoogleCloudIntegrationsV1alphaEnumerateConnectorPlatformRegionsResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaEnumerateConnectorPlatformRegionsResponse < 3) { - checkUnnamed77(o.regions!); + checkUnnamed78(o.regions!); } buildCounterGoogleCloudIntegrationsV1alphaEnumerateConnectorPlatformRegionsResponse--; } -core.List buildUnnamed78() => [ +core.List buildUnnamed79() => [ buildGoogleCloudIntegrationsV1alphaNextTask(), buildGoogleCloudIntegrationsV1alphaNextTask(), ]; -void checkUnnamed78(core.List o) { +void checkUnnamed79(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaNextTask(o[0]); checkGoogleCloudIntegrationsV1alphaNextTask(o[1]); @@ -6750,7 +6852,7 @@ api.GoogleCloudIntegrationsV1alphaErrorCatcherConfig o.errorCatcherNumber = 'foo'; o.label = 'foo'; o.position = buildGoogleCloudIntegrationsV1alphaCoordinate(); - o.startErrorTasks = buildUnnamed78(); + o.startErrorTasks = buildUnnamed79(); } buildCounterGoogleCloudIntegrationsV1alphaErrorCatcherConfig--; return o; @@ -6777,7 +6879,7 @@ void checkGoogleCloudIntegrationsV1alphaErrorCatcherConfig( unittest.equals('foo'), ); checkGoogleCloudIntegrationsV1alphaCoordinate(o.position!); - checkUnnamed78(o.startErrorTasks!); + checkUnnamed79(o.startErrorTasks!); } buildCounterGoogleCloudIntegrationsV1alphaErrorCatcherConfig--; } @@ -6835,12 +6937,12 @@ void checkGoogleCloudIntegrationsV1alphaExecuteEventResponse( } core.Map - buildUnnamed79() => { + buildUnnamed80() => { 'x': buildGoogleCloudIntegrationsV1alphaValueType(), 'y': buildGoogleCloudIntegrationsV1alphaValueType(), }; -void checkUnnamed79( +void checkUnnamed80( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaValueType(o['x']!); @@ -6848,12 +6950,12 @@ void checkUnnamed79( } core.List - buildUnnamed80() => [ + buildUnnamed81() => [ buildEnterpriseCrmFrontendsEventbusProtoParameterEntry(), buildEnterpriseCrmFrontendsEventbusProtoParameterEntry(), ]; -void checkUnnamed80( +void checkUnnamed81( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEnterpriseCrmFrontendsEventbusProtoParameterEntry(o[0]); @@ -6870,8 +6972,8 @@ api.GoogleCloudIntegrationsV1alphaExecuteIntegrationsRequest 3) { o.doNotPropagateError = true; o.executionId = 'foo'; - o.inputParameters = buildUnnamed79(); - o.parameterEntries = buildUnnamed80(); + o.inputParameters = buildUnnamed80(); + o.parameterEntries = buildUnnamed81(); o.parameters = buildEnterpriseCrmFrontendsEventbusProtoEventParameters(); o.requestId = 'foo'; o.triggerId = 'foo'; @@ -6890,8 +6992,8 @@ void checkGoogleCloudIntegrationsV1alphaExecuteIntegrationsRequest( o.executionId!, unittest.equals('foo'), ); - checkUnnamed79(o.inputParameters!); - checkUnnamed80(o.parameterEntries!); + checkUnnamed80(o.inputParameters!); + checkUnnamed81(o.parameterEntries!); checkEnterpriseCrmFrontendsEventbusProtoEventParameters(o.parameters!); unittest.expect( o.requestId!, @@ -6905,7 +7007,7 @@ void checkGoogleCloudIntegrationsV1alphaExecuteIntegrationsRequest( buildCounterGoogleCloudIntegrationsV1alphaExecuteIntegrationsRequest--; } -core.Map buildUnnamed81() => { +core.Map buildUnnamed82() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -6918,7 +7020,7 @@ core.Map buildUnnamed81() => { }, }; -void checkUnnamed81(core.Map o) { +void checkUnnamed82(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted11 = (o['x']!) as core.Map; unittest.expect(casted11, unittest.hasLength(3)); @@ -6951,12 +7053,12 @@ void checkUnnamed81(core.Map o) { } core.List - buildUnnamed82() => [ + buildUnnamed83() => [ buildEnterpriseCrmFrontendsEventbusProtoParameterEntry(), buildEnterpriseCrmFrontendsEventbusProtoParameterEntry(), ]; -void checkUnnamed82( +void checkUnnamed83( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEnterpriseCrmFrontendsEventbusProtoParameterEntry(o[0]); @@ -6975,8 +7077,8 @@ api.GoogleCloudIntegrationsV1alphaExecuteIntegrationsResponse buildEnterpriseCrmFrontendsEventbusProtoEventParameters(); o.executionFailed = true; o.executionId = 'foo'; - o.outputParameters = buildUnnamed81(); - o.parameterEntries = buildUnnamed82(); + o.outputParameters = buildUnnamed82(); + o.parameterEntries = buildUnnamed83(); } buildCounterGoogleCloudIntegrationsV1alphaExecuteIntegrationsResponse--; return o; @@ -6993,19 +7095,19 @@ void checkGoogleCloudIntegrationsV1alphaExecuteIntegrationsResponse( o.executionId!, unittest.equals('foo'), ); - checkUnnamed81(o.outputParameters!); - checkUnnamed82(o.parameterEntries!); + checkUnnamed82(o.outputParameters!); + checkUnnamed83(o.parameterEntries!); } buildCounterGoogleCloudIntegrationsV1alphaExecuteIntegrationsResponse--; } core.Map - buildUnnamed83() => { + buildUnnamed84() => { 'x': buildGoogleCloudIntegrationsV1alphaValueType(), 'y': buildGoogleCloudIntegrationsV1alphaValueType(), }; -void checkUnnamed83( +void checkUnnamed84( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaValueType(o['x']!); @@ -7018,7 +7120,7 @@ api.GoogleCloudIntegrationsV1alphaExecuteTestCaseRequest final o = api.GoogleCloudIntegrationsV1alphaExecuteTestCaseRequest(); buildCounterGoogleCloudIntegrationsV1alphaExecuteTestCaseRequest++; if (buildCounterGoogleCloudIntegrationsV1alphaExecuteTestCaseRequest < 3) { - o.inputParameters = buildUnnamed83(); + o.inputParameters = buildUnnamed84(); } buildCounterGoogleCloudIntegrationsV1alphaExecuteTestCaseRequest--; return o; @@ -7028,25 +7130,25 @@ void checkGoogleCloudIntegrationsV1alphaExecuteTestCaseRequest( api.GoogleCloudIntegrationsV1alphaExecuteTestCaseRequest o) { buildCounterGoogleCloudIntegrationsV1alphaExecuteTestCaseRequest++; if (buildCounterGoogleCloudIntegrationsV1alphaExecuteTestCaseRequest < 3) { - checkUnnamed83(o.inputParameters!); + checkUnnamed84(o.inputParameters!); } buildCounterGoogleCloudIntegrationsV1alphaExecuteTestCaseRequest--; } -core.List buildUnnamed84() => +core.List buildUnnamed85() => [ buildGoogleCloudIntegrationsV1alphaAssertionResult(), buildGoogleCloudIntegrationsV1alphaAssertionResult(), ]; -void checkUnnamed84( +void checkUnnamed85( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaAssertionResult(o[0]); checkGoogleCloudIntegrationsV1alphaAssertionResult(o[1]); } -core.Map buildUnnamed85() => { +core.Map buildUnnamed86() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -7059,7 +7161,7 @@ core.Map buildUnnamed85() => { }, }; -void checkUnnamed85(core.Map o) { +void checkUnnamed86(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted13 = (o['x']!) as core.Map; unittest.expect(casted13, unittest.hasLength(3)); @@ -7097,9 +7199,9 @@ api.GoogleCloudIntegrationsV1alphaExecuteTestCaseResponse final o = api.GoogleCloudIntegrationsV1alphaExecuteTestCaseResponse(); buildCounterGoogleCloudIntegrationsV1alphaExecuteTestCaseResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaExecuteTestCaseResponse < 3) { - o.assertionResults = buildUnnamed84(); + o.assertionResults = buildUnnamed85(); o.executionId = 'foo'; - o.outputParameters = buildUnnamed85(); + o.outputParameters = buildUnnamed86(); o.testExecutionState = 'foo'; } buildCounterGoogleCloudIntegrationsV1alphaExecuteTestCaseResponse--; @@ -7110,12 +7212,12 @@ void checkGoogleCloudIntegrationsV1alphaExecuteTestCaseResponse( api.GoogleCloudIntegrationsV1alphaExecuteTestCaseResponse o) { buildCounterGoogleCloudIntegrationsV1alphaExecuteTestCaseResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaExecuteTestCaseResponse < 3) { - checkUnnamed84(o.assertionResults!); + checkUnnamed85(o.assertionResults!); unittest.expect( o.executionId!, unittest.equals('foo'), ); - checkUnnamed85(o.outputParameters!); + checkUnnamed86(o.outputParameters!); unittest.expect( o.testExecutionState!, unittest.equals('foo'), @@ -7124,24 +7226,24 @@ void checkGoogleCloudIntegrationsV1alphaExecuteTestCaseResponse( buildCounterGoogleCloudIntegrationsV1alphaExecuteTestCaseResponse--; } -core.List buildUnnamed86() => [ +core.List buildUnnamed87() => [ buildGoogleCloudIntegrationsV1alphaExecution(), buildGoogleCloudIntegrationsV1alphaExecution(), ]; -void checkUnnamed86(core.List o) { +void checkUnnamed87(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaExecution(o[0]); checkGoogleCloudIntegrationsV1alphaExecution(o[1]); } core.Map - buildUnnamed87() => { + buildUnnamed88() => { 'x': buildGoogleCloudIntegrationsV1alphaValueType(), 'y': buildGoogleCloudIntegrationsV1alphaValueType(), }; -void checkUnnamed87( +void checkUnnamed88( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaValueType(o['x']!); @@ -7149,12 +7251,12 @@ void checkUnnamed87( } core.List - buildUnnamed88() => [ + buildUnnamed89() => [ buildEnterpriseCrmFrontendsEventbusProtoParameterEntry(), buildEnterpriseCrmFrontendsEventbusProtoParameterEntry(), ]; -void checkUnnamed88( +void checkUnnamed89( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEnterpriseCrmFrontendsEventbusProtoParameterEntry(o[0]); @@ -7162,12 +7264,12 @@ void checkUnnamed88( } core.Map - buildUnnamed89() => { + buildUnnamed90() => { 'x': buildGoogleCloudIntegrationsV1alphaValueType(), 'y': buildGoogleCloudIntegrationsV1alphaValueType(), }; -void checkUnnamed89( +void checkUnnamed90( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaValueType(o['x']!); @@ -7175,12 +7277,12 @@ void checkUnnamed89( } core.List - buildUnnamed90() => [ + buildUnnamed91() => [ buildEnterpriseCrmFrontendsEventbusProtoParameterEntry(), buildEnterpriseCrmFrontendsEventbusProtoParameterEntry(), ]; -void checkUnnamed90( +void checkUnnamed91( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEnterpriseCrmFrontendsEventbusProtoParameterEntry(o[0]); @@ -7196,7 +7298,7 @@ api.GoogleCloudIntegrationsV1alphaExecution o.cloudLoggingDetails = buildGoogleCloudIntegrationsV1alphaCloudLoggingDetails(); o.createTime = 'foo'; - o.directSubExecutions = buildUnnamed86(); + o.directSubExecutions = buildUnnamed87(); o.eventExecutionDetails = buildEnterpriseCrmEventbusProtoEventExecutionDetails(); o.executionDetails = buildGoogleCloudIntegrationsV1alphaExecutionDetails(); @@ -7204,10 +7306,10 @@ api.GoogleCloudIntegrationsV1alphaExecution o.integrationVersionState = 'foo'; o.name = 'foo'; o.replayInfo = buildGoogleCloudIntegrationsV1alphaExecutionReplayInfo(); - o.requestParameters = buildUnnamed87(); - o.requestParams = buildUnnamed88(); - o.responseParameters = buildUnnamed89(); - o.responseParams = buildUnnamed90(); + o.requestParameters = buildUnnamed88(); + o.requestParams = buildUnnamed89(); + o.responseParameters = buildUnnamed90(); + o.responseParams = buildUnnamed91(); o.snapshotNumber = 'foo'; o.triggerId = 'foo'; o.updateTime = 'foo'; @@ -7226,7 +7328,7 @@ void checkGoogleCloudIntegrationsV1alphaExecution( o.createTime!, unittest.equals('foo'), ); - checkUnnamed86(o.directSubExecutions!); + checkUnnamed87(o.directSubExecutions!); checkEnterpriseCrmEventbusProtoEventExecutionDetails( o.eventExecutionDetails!); checkGoogleCloudIntegrationsV1alphaExecutionDetails(o.executionDetails!); @@ -7243,10 +7345,10 @@ void checkGoogleCloudIntegrationsV1alphaExecution( unittest.equals('foo'), ); checkGoogleCloudIntegrationsV1alphaExecutionReplayInfo(o.replayInfo!); - checkUnnamed87(o.requestParameters!); - checkUnnamed88(o.requestParams!); - checkUnnamed89(o.responseParameters!); - checkUnnamed90(o.responseParams!); + checkUnnamed88(o.requestParameters!); + checkUnnamed89(o.requestParams!); + checkUnnamed90(o.responseParameters!); + checkUnnamed91(o.responseParams!); unittest.expect( o.snapshotNumber!, unittest.equals('foo'), @@ -7263,12 +7365,12 @@ void checkGoogleCloudIntegrationsV1alphaExecution( buildCounterGoogleCloudIntegrationsV1alphaExecution--; } -core.List buildUnnamed91() => [ +core.List buildUnnamed92() => [ buildGoogleCloudIntegrationsV1alphaAttemptStats(), buildGoogleCloudIntegrationsV1alphaAttemptStats(), ]; -void checkUnnamed91( +void checkUnnamed92( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaAttemptStats(o[0]); @@ -7276,12 +7378,12 @@ void checkUnnamed91( } core.List - buildUnnamed92() => [ + buildUnnamed93() => [ buildGoogleCloudIntegrationsV1alphaExecutionSnapshot(), buildGoogleCloudIntegrationsV1alphaExecutionSnapshot(), ]; -void checkUnnamed92( +void checkUnnamed93( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaExecutionSnapshot(o[0]); @@ -7294,9 +7396,9 @@ api.GoogleCloudIntegrationsV1alphaExecutionDetails final o = api.GoogleCloudIntegrationsV1alphaExecutionDetails(); buildCounterGoogleCloudIntegrationsV1alphaExecutionDetails++; if (buildCounterGoogleCloudIntegrationsV1alphaExecutionDetails < 3) { - o.attemptStats = buildUnnamed91(); + o.attemptStats = buildUnnamed92(); o.eventExecutionSnapshotsSize = 'foo'; - o.executionSnapshots = buildUnnamed92(); + o.executionSnapshots = buildUnnamed93(); o.state = 'foo'; } buildCounterGoogleCloudIntegrationsV1alphaExecutionDetails--; @@ -7307,12 +7409,12 @@ void checkGoogleCloudIntegrationsV1alphaExecutionDetails( api.GoogleCloudIntegrationsV1alphaExecutionDetails o) { buildCounterGoogleCloudIntegrationsV1alphaExecutionDetails++; if (buildCounterGoogleCloudIntegrationsV1alphaExecutionDetails < 3) { - checkUnnamed91(o.attemptStats!); + checkUnnamed92(o.attemptStats!); unittest.expect( o.eventExecutionSnapshotsSize!, unittest.equals('foo'), ); - checkUnnamed92(o.executionSnapshots!); + checkUnnamed93(o.executionSnapshots!); unittest.expect( o.state!, unittest.equals('foo'), @@ -7321,12 +7423,12 @@ void checkGoogleCloudIntegrationsV1alphaExecutionDetails( buildCounterGoogleCloudIntegrationsV1alphaExecutionDetails--; } -core.List buildUnnamed93() => [ +core.List buildUnnamed94() => [ 'foo', 'foo', ]; -void checkUnnamed93(core.List o) { +void checkUnnamed94(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -7345,8 +7447,9 @@ api.GoogleCloudIntegrationsV1alphaExecutionReplayInfo buildCounterGoogleCloudIntegrationsV1alphaExecutionReplayInfo++; if (buildCounterGoogleCloudIntegrationsV1alphaExecutionReplayInfo < 3) { o.originalExecutionInfoId = 'foo'; + o.replayMode = 'foo'; o.replayReason = 'foo'; - o.replayedExecutionInfoIds = buildUnnamed93(); + o.replayedExecutionInfoIds = buildUnnamed94(); } buildCounterGoogleCloudIntegrationsV1alphaExecutionReplayInfo--; return o; @@ -7360,22 +7463,26 @@ void checkGoogleCloudIntegrationsV1alphaExecutionReplayInfo( o.originalExecutionInfoId!, unittest.equals('foo'), ); + unittest.expect( + o.replayMode!, + unittest.equals('foo'), + ); unittest.expect( o.replayReason!, unittest.equals('foo'), ); - checkUnnamed93(o.replayedExecutionInfoIds!); + checkUnnamed94(o.replayedExecutionInfoIds!); } buildCounterGoogleCloudIntegrationsV1alphaExecutionReplayInfo--; } core.Map - buildUnnamed94() => { + buildUnnamed95() => { 'x': buildGoogleCloudIntegrationsV1alphaValueType(), 'y': buildGoogleCloudIntegrationsV1alphaValueType(), }; -void checkUnnamed94( +void checkUnnamed95( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaValueType(o['x']!); @@ -7383,12 +7490,12 @@ void checkUnnamed94( } core.List - buildUnnamed95() => [ + buildUnnamed96() => [ buildGoogleCloudIntegrationsV1alphaTaskExecutionDetails(), buildGoogleCloudIntegrationsV1alphaTaskExecutionDetails(), ]; -void checkUnnamed95( +void checkUnnamed96( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaTaskExecutionDetails(o[0]); @@ -7404,8 +7511,8 @@ api.GoogleCloudIntegrationsV1alphaExecutionSnapshot o.checkpointTaskNumber = 'foo'; o.executionSnapshotMetadata = buildGoogleCloudIntegrationsV1alphaExecutionSnapshotExecutionSnapshotMetadata(); - o.params = buildUnnamed94(); - o.taskExecutionDetails = buildUnnamed95(); + o.params = buildUnnamed95(); + o.taskExecutionDetails = buildUnnamed96(); } buildCounterGoogleCloudIntegrationsV1alphaExecutionSnapshot--; return o; @@ -7421,18 +7528,18 @@ void checkGoogleCloudIntegrationsV1alphaExecutionSnapshot( ); checkGoogleCloudIntegrationsV1alphaExecutionSnapshotExecutionSnapshotMetadata( o.executionSnapshotMetadata!); - checkUnnamed94(o.params!); - checkUnnamed95(o.taskExecutionDetails!); + checkUnnamed95(o.params!); + checkUnnamed96(o.taskExecutionDetails!); } buildCounterGoogleCloudIntegrationsV1alphaExecutionSnapshot--; } -core.List buildUnnamed96() => [ +core.List buildUnnamed97() => [ 'foo', 'foo', ]; -void checkUnnamed96(core.List o) { +void checkUnnamed97(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -7444,12 +7551,12 @@ void checkUnnamed96(core.List o) { ); } -core.List buildUnnamed97() => [ +core.List buildUnnamed98() => [ 'foo', 'foo', ]; -void checkUnnamed97(core.List o) { +void checkUnnamed98(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -7471,8 +7578,8 @@ api.GoogleCloudIntegrationsV1alphaExecutionSnapshotExecutionSnapshotMetadata buildCounterGoogleCloudIntegrationsV1alphaExecutionSnapshotExecutionSnapshotMetadata++; if (buildCounterGoogleCloudIntegrationsV1alphaExecutionSnapshotExecutionSnapshotMetadata < 3) { - o.ancestorIterationNumbers = buildUnnamed96(); - o.ancestorTaskNumbers = buildUnnamed97(); + o.ancestorIterationNumbers = buildUnnamed97(); + o.ancestorTaskNumbers = buildUnnamed98(); o.executionAttempt = 42; o.integrationName = 'foo'; o.task = 'foo'; @@ -7490,8 +7597,8 @@ void checkGoogleCloudIntegrationsV1alphaExecutionSnapshotExecutionSnapshotMetada buildCounterGoogleCloudIntegrationsV1alphaExecutionSnapshotExecutionSnapshotMetadata++; if (buildCounterGoogleCloudIntegrationsV1alphaExecutionSnapshotExecutionSnapshotMetadata < 3) { - checkUnnamed96(o.ancestorIterationNumbers!); - checkUnnamed97(o.ancestorTaskNumbers!); + checkUnnamed97(o.ancestorIterationNumbers!); + checkUnnamed98(o.ancestorTaskNumbers!); unittest.expect( o.executionAttempt!, unittest.equals(42), @@ -7559,7 +7666,7 @@ void checkGoogleCloudIntegrationsV1alphaFailurePolicy( buildCounterGoogleCloudIntegrationsV1alphaFailurePolicy--; } -core.Map buildUnnamed98() => { +core.Map buildUnnamed99() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -7572,7 +7679,7 @@ core.Map buildUnnamed98() => { }, }; -void checkUnnamed98(core.Map o) { +void checkUnnamed99(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted15 = (o['x']!) as core.Map; unittest.expect(casted15, unittest.hasLength(3)); @@ -7610,7 +7717,7 @@ api.GoogleCloudIntegrationsV1alphaFile final o = api.GoogleCloudIntegrationsV1alphaFile(); buildCounterGoogleCloudIntegrationsV1alphaFile++; if (buildCounterGoogleCloudIntegrationsV1alphaFile < 3) { - o.integrationConfig = buildUnnamed98(); + o.integrationConfig = buildUnnamed99(); o.integrationVersion = buildGoogleCloudIntegrationsV1alphaIntegrationVersion(); o.type = 'foo'; @@ -7623,7 +7730,7 @@ void checkGoogleCloudIntegrationsV1alphaFile( api.GoogleCloudIntegrationsV1alphaFile o) { buildCounterGoogleCloudIntegrationsV1alphaFile++; if (buildCounterGoogleCloudIntegrationsV1alphaFile < 3) { - checkUnnamed98(o.integrationConfig!); + checkUnnamed99(o.integrationConfig!); checkGoogleCloudIntegrationsV1alphaIntegrationVersion( o.integrationVersion!); unittest.expect( @@ -7634,6 +7741,75 @@ void checkGoogleCloudIntegrationsV1alphaFile( buildCounterGoogleCloudIntegrationsV1alphaFile--; } +core.List + buildUnnamed100() => [ + buildGoogleCloudIntegrationsV1alphaApiTriggerResource(), + buildGoogleCloudIntegrationsV1alphaApiTriggerResource(), + ]; + +void checkUnnamed100( + core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudIntegrationsV1alphaApiTriggerResource(o[0]); + checkGoogleCloudIntegrationsV1alphaApiTriggerResource(o[1]); +} + +core.int buildCounterGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest = + 0; +api.GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest + buildGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest() { + final o = api.GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest(); + buildCounterGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest++; + if (buildCounterGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest < + 3) { + o.apiTriggerResources = buildUnnamed100(); + o.fileFormat = 'foo'; + } + buildCounterGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest--; + return o; +} + +void checkGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest( + api.GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest o) { + buildCounterGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest++; + if (buildCounterGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest < + 3) { + checkUnnamed100(o.apiTriggerResources!); + unittest.expect( + o.fileFormat!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest--; +} + +core.int buildCounterGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse = + 0; +api.GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse + buildGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse() { + final o = api.GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse(); + buildCounterGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse++; + if (buildCounterGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse < + 3) { + o.openApiSpec = 'foo'; + } + buildCounterGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse--; + return o; +} + +void checkGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse( + api.GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse o) { + buildCounterGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse++; + if (buildCounterGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse < + 3) { + unittest.expect( + o.openApiSpec!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse--; +} + core.int buildCounterGoogleCloudIntegrationsV1alphaGenerateTokenResponse = 0; api.GoogleCloudIntegrationsV1alphaGenerateTokenResponse buildGoogleCloudIntegrationsV1alphaGenerateTokenResponse() { @@ -7703,14 +7879,14 @@ void checkGoogleCloudIntegrationsV1alphaGetClientResponse( core.Map - buildUnnamed99() => { + buildUnnamed101() => { 'x': buildGoogleCloudIntegrationsV1alphaUseTemplateRequestIntegrationDetails(), 'y': buildGoogleCloudIntegrationsV1alphaUseTemplateRequestIntegrationDetails(), }; -void checkUnnamed99( +void checkUnnamed101( core.Map< core.String, api @@ -7731,7 +7907,7 @@ api.GoogleCloudIntegrationsV1alphaImportTemplateRequest if (buildCounterGoogleCloudIntegrationsV1alphaImportTemplateRequest < 3) { o.integration = 'foo'; o.integrationRegion = 'foo'; - o.subIntegrations = buildUnnamed99(); + o.subIntegrations = buildUnnamed101(); } buildCounterGoogleCloudIntegrationsV1alphaImportTemplateRequest--; return o; @@ -7749,18 +7925,18 @@ void checkGoogleCloudIntegrationsV1alphaImportTemplateRequest( o.integrationRegion!, unittest.equals('foo'), ); - checkUnnamed99(o.subIntegrations!); + checkUnnamed101(o.subIntegrations!); } buildCounterGoogleCloudIntegrationsV1alphaImportTemplateRequest--; } core.List - buildUnnamed100() => [ + buildUnnamed102() => [ buildGoogleCloudIntegrationsV1alphaIntegrationVersion(), buildGoogleCloudIntegrationsV1alphaIntegrationVersion(), ]; -void checkUnnamed100( +void checkUnnamed102( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaIntegrationVersion(o[0]); @@ -7775,7 +7951,7 @@ api.GoogleCloudIntegrationsV1alphaImportTemplateResponse if (buildCounterGoogleCloudIntegrationsV1alphaImportTemplateResponse < 3) { o.integrationVersion = buildGoogleCloudIntegrationsV1alphaIntegrationVersion(); - o.subIntegrationVersions = buildUnnamed100(); + o.subIntegrationVersions = buildUnnamed102(); } buildCounterGoogleCloudIntegrationsV1alphaImportTemplateResponse--; return o; @@ -7787,17 +7963,17 @@ void checkGoogleCloudIntegrationsV1alphaImportTemplateResponse( if (buildCounterGoogleCloudIntegrationsV1alphaImportTemplateResponse < 3) { checkGoogleCloudIntegrationsV1alphaIntegrationVersion( o.integrationVersion!); - checkUnnamed100(o.subIntegrationVersions!); + checkUnnamed102(o.subIntegrationVersions!); } buildCounterGoogleCloudIntegrationsV1alphaImportTemplateResponse--; } -core.List buildUnnamed101() => [ +core.List buildUnnamed103() => [ 'foo', 'foo', ]; -void checkUnnamed101(core.List o) { +void checkUnnamed103(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -7815,7 +7991,7 @@ api.GoogleCloudIntegrationsV1alphaIntParameterArray final o = api.GoogleCloudIntegrationsV1alphaIntParameterArray(); buildCounterGoogleCloudIntegrationsV1alphaIntParameterArray++; if (buildCounterGoogleCloudIntegrationsV1alphaIntParameterArray < 3) { - o.intValues = buildUnnamed101(); + o.intValues = buildUnnamed103(); } buildCounterGoogleCloudIntegrationsV1alphaIntParameterArray--; return o; @@ -7825,7 +8001,7 @@ void checkGoogleCloudIntegrationsV1alphaIntParameterArray( api.GoogleCloudIntegrationsV1alphaIntParameterArray o) { buildCounterGoogleCloudIntegrationsV1alphaIntParameterArray++; if (buildCounterGoogleCloudIntegrationsV1alphaIntParameterArray < 3) { - checkUnnamed101(o.intValues!); + checkUnnamed103(o.intValues!); } buildCounterGoogleCloudIntegrationsV1alphaIntParameterArray--; } @@ -8063,12 +8239,12 @@ void checkGoogleCloudIntegrationsV1alphaIntegrationParameter( } core.List - buildUnnamed102() => [ + buildUnnamed104() => [ buildGoogleCloudIntegrationsV1alphaErrorCatcherConfig(), buildGoogleCloudIntegrationsV1alphaErrorCatcherConfig(), ]; -void checkUnnamed102( +void checkUnnamed104( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaErrorCatcherConfig(o[0]); @@ -8076,12 +8252,12 @@ void checkUnnamed102( } core.List - buildUnnamed103() => [ + buildUnnamed105() => [ buildGoogleCloudIntegrationsV1alphaIntegrationConfigParameter(), buildGoogleCloudIntegrationsV1alphaIntegrationConfigParameter(), ]; -void checkUnnamed103( +void checkUnnamed105( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaIntegrationConfigParameter(o[0]); @@ -8089,24 +8265,24 @@ void checkUnnamed103( } core.List - buildUnnamed104() => [ + buildUnnamed106() => [ buildGoogleCloudIntegrationsV1alphaIntegrationParameter(), buildGoogleCloudIntegrationsV1alphaIntegrationParameter(), ]; -void checkUnnamed104( +void checkUnnamed106( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaIntegrationParameter(o[0]); checkGoogleCloudIntegrationsV1alphaIntegrationParameter(o[1]); } -core.List buildUnnamed105() => [ +core.List buildUnnamed107() => [ buildGoogleCloudIntegrationsV1alphaTaskConfig(), buildGoogleCloudIntegrationsV1alphaTaskConfig(), ]; -void checkUnnamed105( +void checkUnnamed107( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaTaskConfig(o[0]); @@ -8114,25 +8290,25 @@ void checkUnnamed105( } core.List - buildUnnamed106() => [ + buildUnnamed108() => [ buildEnterpriseCrmFrontendsEventbusProtoTaskConfig(), buildEnterpriseCrmFrontendsEventbusProtoTaskConfig(), ]; -void checkUnnamed106( +void checkUnnamed108( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEnterpriseCrmFrontendsEventbusProtoTaskConfig(o[0]); checkEnterpriseCrmFrontendsEventbusProtoTaskConfig(o[1]); } -core.List buildUnnamed107() => +core.List buildUnnamed109() => [ buildGoogleCloudIntegrationsV1alphaTriggerConfig(), buildGoogleCloudIntegrationsV1alphaTriggerConfig(), ]; -void checkUnnamed107( +void checkUnnamed109( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaTriggerConfig(o[0]); @@ -8140,12 +8316,12 @@ void checkUnnamed107( } core.List - buildUnnamed108() => [ + buildUnnamed110() => [ buildEnterpriseCrmFrontendsEventbusProtoTriggerConfig(), buildEnterpriseCrmFrontendsEventbusProtoTriggerConfig(), ]; -void checkUnnamed108( +void checkUnnamed110( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEnterpriseCrmFrontendsEventbusProtoTriggerConfig(o[0]); @@ -8165,9 +8341,9 @@ api.GoogleCloudIntegrationsV1alphaIntegrationVersion o.databasePersistencePolicy = 'foo'; o.description = 'foo'; o.enableVariableMasking = true; - o.errorCatcherConfigs = buildUnnamed102(); - o.integrationConfigParameters = buildUnnamed103(); - o.integrationParameters = buildUnnamed104(); + o.errorCatcherConfigs = buildUnnamed104(); + o.integrationConfigParameters = buildUnnamed105(); + o.integrationParameters = buildUnnamed106(); o.integrationParametersInternal = buildEnterpriseCrmFrontendsEventbusProtoWorkflowParameters(); o.lastModifierEmail = 'foo'; @@ -8179,11 +8355,11 @@ api.GoogleCloudIntegrationsV1alphaIntegrationVersion o.snapshotNumber = 'foo'; o.state = 'foo'; o.status = 'foo'; - o.taskConfigs = buildUnnamed105(); - o.taskConfigsInternal = buildUnnamed106(); + o.taskConfigs = buildUnnamed107(); + o.taskConfigsInternal = buildUnnamed108(); o.teardown = buildEnterpriseCrmEventbusProtoTeardown(); - o.triggerConfigs = buildUnnamed107(); - o.triggerConfigsInternal = buildUnnamed108(); + o.triggerConfigs = buildUnnamed109(); + o.triggerConfigsInternal = buildUnnamed110(); o.updateTime = 'foo'; o.userLabel = 'foo'; } @@ -8214,9 +8390,9 @@ void checkGoogleCloudIntegrationsV1alphaIntegrationVersion( unittest.equals('foo'), ); unittest.expect(o.enableVariableMasking!, unittest.isTrue); - checkUnnamed102(o.errorCatcherConfigs!); - checkUnnamed103(o.integrationConfigParameters!); - checkUnnamed104(o.integrationParameters!); + checkUnnamed104(o.errorCatcherConfigs!); + checkUnnamed105(o.integrationConfigParameters!); + checkUnnamed106(o.integrationParameters!); checkEnterpriseCrmFrontendsEventbusProtoWorkflowParameters( o.integrationParametersInternal!); unittest.expect( @@ -8255,11 +8431,11 @@ void checkGoogleCloudIntegrationsV1alphaIntegrationVersion( o.status!, unittest.equals('foo'), ); - checkUnnamed105(o.taskConfigs!); - checkUnnamed106(o.taskConfigsInternal!); + checkUnnamed107(o.taskConfigs!); + checkUnnamed108(o.taskConfigsInternal!); checkEnterpriseCrmEventbusProtoTeardown(o.teardown!); - checkUnnamed107(o.triggerConfigs!); - checkUnnamed108(o.triggerConfigsInternal!); + checkUnnamed109(o.triggerConfigs!); + checkUnnamed110(o.triggerConfigsInternal!); unittest.expect( o.updateTime!, unittest.equals('foo'), @@ -8443,12 +8619,12 @@ void checkGoogleCloudIntegrationsV1alphaLinkAppsScriptProjectResponse( buildCounterGoogleCloudIntegrationsV1alphaLinkAppsScriptProjectResponse--; } -core.List buildUnnamed109() => [ +core.List buildUnnamed111() => [ buildGoogleCloudIntegrationsV1alphaAuthConfig(), buildGoogleCloudIntegrationsV1alphaAuthConfig(), ]; -void checkUnnamed109( +void checkUnnamed111( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaAuthConfig(o[0]); @@ -8461,7 +8637,7 @@ api.GoogleCloudIntegrationsV1alphaListAuthConfigsResponse final o = api.GoogleCloudIntegrationsV1alphaListAuthConfigsResponse(); buildCounterGoogleCloudIntegrationsV1alphaListAuthConfigsResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaListAuthConfigsResponse < 3) { - o.authConfigs = buildUnnamed109(); + o.authConfigs = buildUnnamed111(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudIntegrationsV1alphaListAuthConfigsResponse--; @@ -8472,7 +8648,7 @@ void checkGoogleCloudIntegrationsV1alphaListAuthConfigsResponse( api.GoogleCloudIntegrationsV1alphaListAuthConfigsResponse o) { buildCounterGoogleCloudIntegrationsV1alphaListAuthConfigsResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaListAuthConfigsResponse < 3) { - checkUnnamed109(o.authConfigs!); + checkUnnamed111(o.authConfigs!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -8481,12 +8657,12 @@ void checkGoogleCloudIntegrationsV1alphaListAuthConfigsResponse( buildCounterGoogleCloudIntegrationsV1alphaListAuthConfigsResponse--; } -core.List buildUnnamed110() => [ +core.List buildUnnamed112() => [ buildGoogleCloudIntegrationsV1alphaCertificate(), buildGoogleCloudIntegrationsV1alphaCertificate(), ]; -void checkUnnamed110( +void checkUnnamed112( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaCertificate(o[0]); @@ -8499,7 +8675,7 @@ api.GoogleCloudIntegrationsV1alphaListCertificatesResponse final o = api.GoogleCloudIntegrationsV1alphaListCertificatesResponse(); buildCounterGoogleCloudIntegrationsV1alphaListCertificatesResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaListCertificatesResponse < 3) { - o.certificates = buildUnnamed110(); + o.certificates = buildUnnamed112(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudIntegrationsV1alphaListCertificatesResponse--; @@ -8510,7 +8686,7 @@ void checkGoogleCloudIntegrationsV1alphaListCertificatesResponse( api.GoogleCloudIntegrationsV1alphaListCertificatesResponse o) { buildCounterGoogleCloudIntegrationsV1alphaListCertificatesResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaListCertificatesResponse < 3) { - checkUnnamed110(o.certificates!); + checkUnnamed112(o.certificates!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -8519,12 +8695,12 @@ void checkGoogleCloudIntegrationsV1alphaListCertificatesResponse( buildCounterGoogleCloudIntegrationsV1alphaListCertificatesResponse--; } -core.List buildUnnamed111() => [ +core.List buildUnnamed113() => [ buildGoogleCloudConnectorsV1Connection(), buildGoogleCloudConnectorsV1Connection(), ]; -void checkUnnamed111(core.List o) { +void checkUnnamed113(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudConnectorsV1Connection(o[0]); checkGoogleCloudConnectorsV1Connection(o[1]); @@ -8536,7 +8712,7 @@ api.GoogleCloudIntegrationsV1alphaListConnectionsResponse final o = api.GoogleCloudIntegrationsV1alphaListConnectionsResponse(); buildCounterGoogleCloudIntegrationsV1alphaListConnectionsResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaListConnectionsResponse < 3) { - o.connections = buildUnnamed111(); + o.connections = buildUnnamed113(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudIntegrationsV1alphaListConnectionsResponse--; @@ -8547,7 +8723,7 @@ void checkGoogleCloudIntegrationsV1alphaListConnectionsResponse( api.GoogleCloudIntegrationsV1alphaListConnectionsResponse o) { buildCounterGoogleCloudIntegrationsV1alphaListConnectionsResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaListConnectionsResponse < 3) { - checkUnnamed111(o.connections!); + checkUnnamed113(o.connections!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -8557,24 +8733,24 @@ void checkGoogleCloudIntegrationsV1alphaListConnectionsResponse( } core.List - buildUnnamed112() => [ + buildUnnamed114() => [ buildEnterpriseCrmFrontendsEventbusProtoEventExecutionInfo(), buildEnterpriseCrmFrontendsEventbusProtoEventExecutionInfo(), ]; -void checkUnnamed112( +void checkUnnamed114( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEnterpriseCrmFrontendsEventbusProtoEventExecutionInfo(o[0]); checkEnterpriseCrmFrontendsEventbusProtoEventExecutionInfo(o[1]); } -core.List buildUnnamed113() => [ +core.List buildUnnamed115() => [ buildGoogleCloudIntegrationsV1alphaExecution(), buildGoogleCloudIntegrationsV1alphaExecution(), ]; -void checkUnnamed113(core.List o) { +void checkUnnamed115(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaExecution(o[0]); checkGoogleCloudIntegrationsV1alphaExecution(o[1]); @@ -8586,8 +8762,8 @@ api.GoogleCloudIntegrationsV1alphaListExecutionsResponse final o = api.GoogleCloudIntegrationsV1alphaListExecutionsResponse(); buildCounterGoogleCloudIntegrationsV1alphaListExecutionsResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaListExecutionsResponse < 3) { - o.executionInfos = buildUnnamed112(); - o.executions = buildUnnamed113(); + o.executionInfos = buildUnnamed114(); + o.executions = buildUnnamed115(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudIntegrationsV1alphaListExecutionsResponse--; @@ -8598,8 +8774,8 @@ void checkGoogleCloudIntegrationsV1alphaListExecutionsResponse( api.GoogleCloudIntegrationsV1alphaListExecutionsResponse o) { buildCounterGoogleCloudIntegrationsV1alphaListExecutionsResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaListExecutionsResponse < 3) { - checkUnnamed112(o.executionInfos!); - checkUnnamed113(o.executions!); + checkUnnamed114(o.executionInfos!); + checkUnnamed115(o.executions!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -8609,12 +8785,12 @@ void checkGoogleCloudIntegrationsV1alphaListExecutionsResponse( } core.List - buildUnnamed114() => [ + buildUnnamed116() => [ buildGoogleCloudIntegrationsV1alphaIntegrationVersion(), buildGoogleCloudIntegrationsV1alphaIntegrationVersion(), ]; -void checkUnnamed114( +void checkUnnamed116( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaIntegrationVersion(o[0]); @@ -8630,7 +8806,7 @@ api.GoogleCloudIntegrationsV1alphaListIntegrationVersionsResponse buildCounterGoogleCloudIntegrationsV1alphaListIntegrationVersionsResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaListIntegrationVersionsResponse < 3) { - o.integrationVersions = buildUnnamed114(); + o.integrationVersions = buildUnnamed116(); o.nextPageToken = 'foo'; o.noPermission = true; } @@ -8643,7 +8819,7 @@ void checkGoogleCloudIntegrationsV1alphaListIntegrationVersionsResponse( buildCounterGoogleCloudIntegrationsV1alphaListIntegrationVersionsResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaListIntegrationVersionsResponse < 3) { - checkUnnamed114(o.integrationVersions!); + checkUnnamed116(o.integrationVersions!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -8653,12 +8829,12 @@ void checkGoogleCloudIntegrationsV1alphaListIntegrationVersionsResponse( buildCounterGoogleCloudIntegrationsV1alphaListIntegrationVersionsResponse--; } -core.List buildUnnamed115() => [ +core.List buildUnnamed117() => [ buildGoogleCloudIntegrationsV1alphaIntegration(), buildGoogleCloudIntegrationsV1alphaIntegration(), ]; -void checkUnnamed115( +void checkUnnamed117( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaIntegration(o[0]); @@ -8671,7 +8847,7 @@ api.GoogleCloudIntegrationsV1alphaListIntegrationsResponse final o = api.GoogleCloudIntegrationsV1alphaListIntegrationsResponse(); buildCounterGoogleCloudIntegrationsV1alphaListIntegrationsResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaListIntegrationsResponse < 3) { - o.integrations = buildUnnamed115(); + o.integrations = buildUnnamed117(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudIntegrationsV1alphaListIntegrationsResponse--; @@ -8682,7 +8858,7 @@ void checkGoogleCloudIntegrationsV1alphaListIntegrationsResponse( api.GoogleCloudIntegrationsV1alphaListIntegrationsResponse o) { buildCounterGoogleCloudIntegrationsV1alphaListIntegrationsResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaListIntegrationsResponse < 3) { - checkUnnamed115(o.integrations!); + checkUnnamed117(o.integrations!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -8692,12 +8868,12 @@ void checkGoogleCloudIntegrationsV1alphaListIntegrationsResponse( } core.List - buildUnnamed116() => [ + buildUnnamed118() => [ buildGoogleCloudIntegrationsV1alphaRuntimeActionSchema(), buildGoogleCloudIntegrationsV1alphaRuntimeActionSchema(), ]; -void checkUnnamed116( +void checkUnnamed118( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaRuntimeActionSchema(o[0]); @@ -8715,7 +8891,7 @@ api.GoogleCloudIntegrationsV1alphaListRuntimeActionSchemasResponse if (buildCounterGoogleCloudIntegrationsV1alphaListRuntimeActionSchemasResponse < 3) { o.nextPageToken = 'foo'; - o.runtimeActionSchemas = buildUnnamed116(); + o.runtimeActionSchemas = buildUnnamed118(); } buildCounterGoogleCloudIntegrationsV1alphaListRuntimeActionSchemasResponse--; return o; @@ -8730,18 +8906,18 @@ void checkGoogleCloudIntegrationsV1alphaListRuntimeActionSchemasResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed116(o.runtimeActionSchemas!); + checkUnnamed118(o.runtimeActionSchemas!); } buildCounterGoogleCloudIntegrationsV1alphaListRuntimeActionSchemasResponse--; } core.List - buildUnnamed117() => [ + buildUnnamed119() => [ buildGoogleCloudIntegrationsV1alphaRuntimeEntitySchema(), buildGoogleCloudIntegrationsV1alphaRuntimeEntitySchema(), ]; -void checkUnnamed117( +void checkUnnamed119( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaRuntimeEntitySchema(o[0]); @@ -8759,7 +8935,7 @@ api.GoogleCloudIntegrationsV1alphaListRuntimeEntitySchemasResponse if (buildCounterGoogleCloudIntegrationsV1alphaListRuntimeEntitySchemasResponse < 3) { o.nextPageToken = 'foo'; - o.runtimeEntitySchemas = buildUnnamed117(); + o.runtimeEntitySchemas = buildUnnamed119(); } buildCounterGoogleCloudIntegrationsV1alphaListRuntimeEntitySchemasResponse--; return o; @@ -8774,17 +8950,17 @@ void checkGoogleCloudIntegrationsV1alphaListRuntimeEntitySchemasResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed117(o.runtimeEntitySchemas!); + checkUnnamed119(o.runtimeEntitySchemas!); } buildCounterGoogleCloudIntegrationsV1alphaListRuntimeEntitySchemasResponse--; } -core.List buildUnnamed118() => [ +core.List buildUnnamed120() => [ buildGoogleCloudIntegrationsV1alphaSfdcChannel(), buildGoogleCloudIntegrationsV1alphaSfdcChannel(), ]; -void checkUnnamed118( +void checkUnnamed120( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaSfdcChannel(o[0]); @@ -8798,7 +8974,7 @@ api.GoogleCloudIntegrationsV1alphaListSfdcChannelsResponse buildCounterGoogleCloudIntegrationsV1alphaListSfdcChannelsResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaListSfdcChannelsResponse < 3) { o.nextPageToken = 'foo'; - o.sfdcChannels = buildUnnamed118(); + o.sfdcChannels = buildUnnamed120(); } buildCounterGoogleCloudIntegrationsV1alphaListSfdcChannelsResponse--; return o; @@ -8812,17 +8988,17 @@ void checkGoogleCloudIntegrationsV1alphaListSfdcChannelsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed118(o.sfdcChannels!); + checkUnnamed120(o.sfdcChannels!); } buildCounterGoogleCloudIntegrationsV1alphaListSfdcChannelsResponse--; } -core.List buildUnnamed119() => [ +core.List buildUnnamed121() => [ buildGoogleCloudIntegrationsV1alphaSfdcInstance(), buildGoogleCloudIntegrationsV1alphaSfdcInstance(), ]; -void checkUnnamed119( +void checkUnnamed121( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaSfdcInstance(o[0]); @@ -8837,7 +9013,7 @@ api.GoogleCloudIntegrationsV1alphaListSfdcInstancesResponse buildCounterGoogleCloudIntegrationsV1alphaListSfdcInstancesResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaListSfdcInstancesResponse < 3) { o.nextPageToken = 'foo'; - o.sfdcInstances = buildUnnamed119(); + o.sfdcInstances = buildUnnamed121(); } buildCounterGoogleCloudIntegrationsV1alphaListSfdcInstancesResponse--; return o; @@ -8851,17 +9027,17 @@ void checkGoogleCloudIntegrationsV1alphaListSfdcInstancesResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed119(o.sfdcInstances!); + checkUnnamed121(o.sfdcInstances!); } buildCounterGoogleCloudIntegrationsV1alphaListSfdcInstancesResponse--; } -core.List buildUnnamed120() => [ +core.List buildUnnamed122() => [ buildGoogleCloudIntegrationsV1alphaSuspension(), buildGoogleCloudIntegrationsV1alphaSuspension(), ]; -void checkUnnamed120( +void checkUnnamed122( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaSuspension(o[0]); @@ -8875,7 +9051,7 @@ api.GoogleCloudIntegrationsV1alphaListSuspensionsResponse buildCounterGoogleCloudIntegrationsV1alphaListSuspensionsResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaListSuspensionsResponse < 3) { o.nextPageToken = 'foo'; - o.suspensions = buildUnnamed120(); + o.suspensions = buildUnnamed122(); } buildCounterGoogleCloudIntegrationsV1alphaListSuspensionsResponse--; return o; @@ -8889,17 +9065,17 @@ void checkGoogleCloudIntegrationsV1alphaListSuspensionsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed120(o.suspensions!); + checkUnnamed122(o.suspensions!); } buildCounterGoogleCloudIntegrationsV1alphaListSuspensionsResponse--; } -core.List buildUnnamed121() => [ +core.List buildUnnamed123() => [ buildGoogleCloudIntegrationsV1alphaTemplate(), buildGoogleCloudIntegrationsV1alphaTemplate(), ]; -void checkUnnamed121(core.List o) { +void checkUnnamed123(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaTemplate(o[0]); checkGoogleCloudIntegrationsV1alphaTemplate(o[1]); @@ -8912,7 +9088,7 @@ api.GoogleCloudIntegrationsV1alphaListTemplatesResponse buildCounterGoogleCloudIntegrationsV1alphaListTemplatesResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaListTemplatesResponse < 3) { o.nextPageToken = 'foo'; - o.templates = buildUnnamed121(); + o.templates = buildUnnamed123(); } buildCounterGoogleCloudIntegrationsV1alphaListTemplatesResponse--; return o; @@ -8926,17 +9102,17 @@ void checkGoogleCloudIntegrationsV1alphaListTemplatesResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed121(o.templates!); + checkUnnamed123(o.templates!); } buildCounterGoogleCloudIntegrationsV1alphaListTemplatesResponse--; } -core.List buildUnnamed122() => [ +core.List buildUnnamed124() => [ buildGoogleCloudIntegrationsV1alphaExecution(), buildGoogleCloudIntegrationsV1alphaExecution(), ]; -void checkUnnamed122(core.List o) { +void checkUnnamed124(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaExecution(o[0]); checkGoogleCloudIntegrationsV1alphaExecution(o[1]); @@ -8951,7 +9127,7 @@ api.GoogleCloudIntegrationsV1alphaListTestCaseExecutionsResponse buildCounterGoogleCloudIntegrationsV1alphaListTestCaseExecutionsResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaListTestCaseExecutionsResponse < 3) { - o.executions = buildUnnamed122(); + o.executions = buildUnnamed124(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudIntegrationsV1alphaListTestCaseExecutionsResponse--; @@ -8963,7 +9139,7 @@ void checkGoogleCloudIntegrationsV1alphaListTestCaseExecutionsResponse( buildCounterGoogleCloudIntegrationsV1alphaListTestCaseExecutionsResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaListTestCaseExecutionsResponse < 3) { - checkUnnamed122(o.executions!); + checkUnnamed124(o.executions!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -8972,12 +9148,12 @@ void checkGoogleCloudIntegrationsV1alphaListTestCaseExecutionsResponse( buildCounterGoogleCloudIntegrationsV1alphaListTestCaseExecutionsResponse--; } -core.List buildUnnamed123() => [ +core.List buildUnnamed125() => [ buildGoogleCloudIntegrationsV1alphaTestCase(), buildGoogleCloudIntegrationsV1alphaTestCase(), ]; -void checkUnnamed123(core.List o) { +void checkUnnamed125(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaTestCase(o[0]); checkGoogleCloudIntegrationsV1alphaTestCase(o[1]); @@ -8990,7 +9166,7 @@ api.GoogleCloudIntegrationsV1alphaListTestCasesResponse buildCounterGoogleCloudIntegrationsV1alphaListTestCasesResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaListTestCasesResponse < 3) { o.nextPageToken = 'foo'; - o.testCases = buildUnnamed123(); + o.testCases = buildUnnamed125(); } buildCounterGoogleCloudIntegrationsV1alphaListTestCasesResponse--; return o; @@ -9004,18 +9180,18 @@ void checkGoogleCloudIntegrationsV1alphaListTestCasesResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed123(o.testCases!); + checkUnnamed125(o.testCases!); } buildCounterGoogleCloudIntegrationsV1alphaListTestCasesResponse--; } -core.List buildUnnamed124() => +core.List buildUnnamed126() => [ buildGoogleCloudIntegrationsV1alphaEventParameter(), buildGoogleCloudIntegrationsV1alphaEventParameter(), ]; -void checkUnnamed124( +void checkUnnamed126( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaEventParameter(o[0]); @@ -9030,7 +9206,7 @@ api.GoogleCloudIntegrationsV1alphaMockConfig if (buildCounterGoogleCloudIntegrationsV1alphaMockConfig < 3) { o.failedExecutions = 'foo'; o.mockStrategy = 'foo'; - o.parameters = buildUnnamed124(); + o.parameters = buildUnnamed126(); } buildCounterGoogleCloudIntegrationsV1alphaMockConfig--; return o; @@ -9048,7 +9224,7 @@ void checkGoogleCloudIntegrationsV1alphaMockConfig( o.mockStrategy!, unittest.equals('foo'), ); - checkUnnamed124(o.parameters!); + checkUnnamed126(o.parameters!); } buildCounterGoogleCloudIntegrationsV1alphaMockConfig--; } @@ -9309,12 +9485,12 @@ void checkGoogleCloudIntegrationsV1alphaOidcToken( } core.List - buildUnnamed125() => [ + buildUnnamed127() => [ buildGoogleCloudIntegrationsV1alphaParameterMapEntry(), buildGoogleCloudIntegrationsV1alphaParameterMapEntry(), ]; -void checkUnnamed125( +void checkUnnamed127( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaParameterMapEntry(o[0]); @@ -9327,7 +9503,7 @@ api.GoogleCloudIntegrationsV1alphaParameterMap final o = api.GoogleCloudIntegrationsV1alphaParameterMap(); buildCounterGoogleCloudIntegrationsV1alphaParameterMap++; if (buildCounterGoogleCloudIntegrationsV1alphaParameterMap < 3) { - o.entries = buildUnnamed125(); + o.entries = buildUnnamed127(); o.keyType = 'foo'; o.valueType = 'foo'; } @@ -9339,7 +9515,7 @@ void checkGoogleCloudIntegrationsV1alphaParameterMap( api.GoogleCloudIntegrationsV1alphaParameterMap o) { buildCounterGoogleCloudIntegrationsV1alphaParameterMap++; if (buildCounterGoogleCloudIntegrationsV1alphaParameterMap < 3) { - checkUnnamed125(o.entries!); + checkUnnamed127(o.entries!); unittest.expect( o.keyType!, unittest.equals('foo'), @@ -9401,12 +9577,12 @@ void checkGoogleCloudIntegrationsV1alphaParameterMapField( buildCounterGoogleCloudIntegrationsV1alphaParameterMapField--; } -core.List buildUnnamed126() => [ +core.List buildUnnamed128() => [ 'foo', 'foo', ]; -void checkUnnamed126(core.List o) { +void checkUnnamed128(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -9426,7 +9602,7 @@ api.GoogleCloudIntegrationsV1alphaProjectProperties if (buildCounterGoogleCloudIntegrationsV1alphaProjectProperties < 3) { o.billingType = 'foo'; o.ipEnablementState = 'foo'; - o.provisionedRegions = buildUnnamed126(); + o.provisionedRegions = buildUnnamed128(); } buildCounterGoogleCloudIntegrationsV1alphaProjectProperties--; return o; @@ -9444,7 +9620,7 @@ void checkGoogleCloudIntegrationsV1alphaProjectProperties( o.ipEnablementState!, unittest.equals('foo'), ); - checkUnnamed126(o.provisionedRegions!); + checkUnnamed128(o.provisionedRegions!); } buildCounterGoogleCloudIntegrationsV1alphaProjectProperties--; } @@ -9481,7 +9657,7 @@ void checkGoogleCloudIntegrationsV1alphaProvisionClientRequest( buildCounterGoogleCloudIntegrationsV1alphaProvisionClientRequest--; } -core.Map buildUnnamed127() => { +core.Map buildUnnamed129() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -9494,7 +9670,7 @@ core.Map buildUnnamed127() => { }, }; -void checkUnnamed127(core.Map o) { +void checkUnnamed129(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted17 = (o['x']!) as core.Map; unittest.expect(casted17, unittest.hasLength(3)); @@ -9536,7 +9712,7 @@ api.GoogleCloudIntegrationsV1alphaPublishIntegrationVersionRequest buildCounterGoogleCloudIntegrationsV1alphaPublishIntegrationVersionRequest++; if (buildCounterGoogleCloudIntegrationsV1alphaPublishIntegrationVersionRequest < 3) { - o.configParameters = buildUnnamed127(); + o.configParameters = buildUnnamed129(); } buildCounterGoogleCloudIntegrationsV1alphaPublishIntegrationVersionRequest--; return o; @@ -9547,7 +9723,7 @@ void checkGoogleCloudIntegrationsV1alphaPublishIntegrationVersionRequest( buildCounterGoogleCloudIntegrationsV1alphaPublishIntegrationVersionRequest++; if (buildCounterGoogleCloudIntegrationsV1alphaPublishIntegrationVersionRequest < 3) { - checkUnnamed127(o.configParameters!); + checkUnnamed129(o.configParameters!); } buildCounterGoogleCloudIntegrationsV1alphaPublishIntegrationVersionRequest--; } @@ -9625,7 +9801,7 @@ void checkGoogleCloudIntegrationsV1alphaReplayExecutionRequest( buildCounterGoogleCloudIntegrationsV1alphaReplayExecutionRequest--; } -core.Map buildUnnamed128() => { +core.Map buildUnnamed130() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -9638,7 +9814,7 @@ core.Map buildUnnamed128() => { }, }; -void checkUnnamed128(core.Map o) { +void checkUnnamed130(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted19 = (o['x']!) as core.Map; unittest.expect(casted19, unittest.hasLength(3)); @@ -9677,7 +9853,7 @@ api.GoogleCloudIntegrationsV1alphaReplayExecutionResponse buildCounterGoogleCloudIntegrationsV1alphaReplayExecutionResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaReplayExecutionResponse < 3) { o.executionId = 'foo'; - o.outputParameters = buildUnnamed128(); + o.outputParameters = buildUnnamed130(); o.replayedExecutionId = 'foo'; } buildCounterGoogleCloudIntegrationsV1alphaReplayExecutionResponse--; @@ -9692,7 +9868,7 @@ void checkGoogleCloudIntegrationsV1alphaReplayExecutionResponse( o.executionId!, unittest.equals('foo'), ); - checkUnnamed128(o.outputParameters!); + checkUnnamed130(o.outputParameters!); unittest.expect( o.replayedExecutionId!, unittest.equals('foo'), @@ -9811,12 +9987,12 @@ void checkGoogleCloudIntegrationsV1alphaRuntimeEntitySchema( } core.Map - buildUnnamed129() => { + buildUnnamed131() => { 'x': buildGoogleCloudIntegrationsV1alphaValueType(), 'y': buildGoogleCloudIntegrationsV1alphaValueType(), }; -void checkUnnamed129( +void checkUnnamed131( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaValueType(o['x']!); @@ -9824,12 +10000,12 @@ void checkUnnamed129( } core.List - buildUnnamed130() => [ + buildUnnamed132() => [ buildEnterpriseCrmFrontendsEventbusProtoParameterEntry(), buildEnterpriseCrmFrontendsEventbusProtoParameterEntry(), ]; -void checkUnnamed130( +void checkUnnamed132( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEnterpriseCrmFrontendsEventbusProtoParameterEntry(o[0]); @@ -9844,8 +10020,8 @@ api.GoogleCloudIntegrationsV1alphaScheduleIntegrationsRequest buildCounterGoogleCloudIntegrationsV1alphaScheduleIntegrationsRequest++; if (buildCounterGoogleCloudIntegrationsV1alphaScheduleIntegrationsRequest < 3) { - o.inputParameters = buildUnnamed129(); - o.parameterEntries = buildUnnamed130(); + o.inputParameters = buildUnnamed131(); + o.parameterEntries = buildUnnamed132(); o.parameters = buildEnterpriseCrmEventbusProtoEventParameters(); o.requestId = 'foo'; o.scheduleTime = 'foo'; @@ -9861,8 +10037,8 @@ void checkGoogleCloudIntegrationsV1alphaScheduleIntegrationsRequest( buildCounterGoogleCloudIntegrationsV1alphaScheduleIntegrationsRequest++; if (buildCounterGoogleCloudIntegrationsV1alphaScheduleIntegrationsRequest < 3) { - checkUnnamed129(o.inputParameters!); - checkUnnamed130(o.parameterEntries!); + checkUnnamed131(o.inputParameters!); + checkUnnamed132(o.parameterEntries!); checkEnterpriseCrmEventbusProtoEventParameters(o.parameters!); unittest.expect( o.requestId!, @@ -9884,12 +10060,12 @@ void checkGoogleCloudIntegrationsV1alphaScheduleIntegrationsRequest( buildCounterGoogleCloudIntegrationsV1alphaScheduleIntegrationsRequest--; } -core.List buildUnnamed131() => [ +core.List buildUnnamed133() => [ 'foo', 'foo', ]; -void checkUnnamed131(core.List o) { +void checkUnnamed133(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -9909,7 +10085,7 @@ api.GoogleCloudIntegrationsV1alphaScheduleIntegrationsResponse buildCounterGoogleCloudIntegrationsV1alphaScheduleIntegrationsResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaScheduleIntegrationsResponse < 3) { - o.executionInfoIds = buildUnnamed131(); + o.executionInfoIds = buildUnnamed133(); } buildCounterGoogleCloudIntegrationsV1alphaScheduleIntegrationsResponse--; return o; @@ -9920,17 +10096,17 @@ void checkGoogleCloudIntegrationsV1alphaScheduleIntegrationsResponse( buildCounterGoogleCloudIntegrationsV1alphaScheduleIntegrationsResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaScheduleIntegrationsResponse < 3) { - checkUnnamed131(o.executionInfoIds!); + checkUnnamed133(o.executionInfoIds!); } buildCounterGoogleCloudIntegrationsV1alphaScheduleIntegrationsResponse--; } -core.List buildUnnamed132() => [ +core.List buildUnnamed134() => [ buildGoogleCloudIntegrationsV1alphaTemplate(), buildGoogleCloudIntegrationsV1alphaTemplate(), ]; -void checkUnnamed132(core.List o) { +void checkUnnamed134(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaTemplate(o[0]); checkGoogleCloudIntegrationsV1alphaTemplate(o[1]); @@ -9943,7 +10119,7 @@ api.GoogleCloudIntegrationsV1alphaSearchTemplatesResponse buildCounterGoogleCloudIntegrationsV1alphaSearchTemplatesResponse++; if (buildCounterGoogleCloudIntegrationsV1alphaSearchTemplatesResponse < 3) { o.nextPageToken = 'foo'; - o.templates = buildUnnamed132(); + o.templates = buildUnnamed134(); } buildCounterGoogleCloudIntegrationsV1alphaSearchTemplatesResponse--; return o; @@ -9957,7 +10133,7 @@ void checkGoogleCloudIntegrationsV1alphaSearchTemplatesResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed132(o.templates!); + checkUnnamed134(o.templates!); } buildCounterGoogleCloudIntegrationsV1alphaSearchTemplatesResponse--; } @@ -10082,12 +10258,12 @@ void checkGoogleCloudIntegrationsV1alphaSfdcChannel( buildCounterGoogleCloudIntegrationsV1alphaSfdcChannel--; } -core.List buildUnnamed133() => [ +core.List buildUnnamed135() => [ 'foo', 'foo', ]; -void checkUnnamed133(core.List o) { +void checkUnnamed135(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -10105,7 +10281,7 @@ api.GoogleCloudIntegrationsV1alphaSfdcInstance final o = api.GoogleCloudIntegrationsV1alphaSfdcInstance(); buildCounterGoogleCloudIntegrationsV1alphaSfdcInstance++; if (buildCounterGoogleCloudIntegrationsV1alphaSfdcInstance < 3) { - o.authConfigId = buildUnnamed133(); + o.authConfigId = buildUnnamed135(); o.createTime = 'foo'; o.deleteTime = 'foo'; o.description = 'foo'; @@ -10123,7 +10299,7 @@ void checkGoogleCloudIntegrationsV1alphaSfdcInstance( api.GoogleCloudIntegrationsV1alphaSfdcInstance o) { buildCounterGoogleCloudIntegrationsV1alphaSfdcInstance++; if (buildCounterGoogleCloudIntegrationsV1alphaSfdcInstance < 3) { - checkUnnamed133(o.authConfigId!); + checkUnnamed135(o.authConfigId!); unittest.expect( o.createTime!, unittest.equals('foo'), @@ -10160,12 +10336,12 @@ void checkGoogleCloudIntegrationsV1alphaSfdcInstance( buildCounterGoogleCloudIntegrationsV1alphaSfdcInstance--; } -core.List buildUnnamed134() => [ +core.List buildUnnamed136() => [ 'foo', 'foo', ]; -void checkUnnamed134(core.List o) { +void checkUnnamed136(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -10183,7 +10359,7 @@ api.GoogleCloudIntegrationsV1alphaShareTemplateRequest final o = api.GoogleCloudIntegrationsV1alphaShareTemplateRequest(); buildCounterGoogleCloudIntegrationsV1alphaShareTemplateRequest++; if (buildCounterGoogleCloudIntegrationsV1alphaShareTemplateRequest < 3) { - o.resourceNames = buildUnnamed134(); + o.resourceNames = buildUnnamed136(); } buildCounterGoogleCloudIntegrationsV1alphaShareTemplateRequest--; return o; @@ -10193,17 +10369,17 @@ void checkGoogleCloudIntegrationsV1alphaShareTemplateRequest( api.GoogleCloudIntegrationsV1alphaShareTemplateRequest o) { buildCounterGoogleCloudIntegrationsV1alphaShareTemplateRequest++; if (buildCounterGoogleCloudIntegrationsV1alphaShareTemplateRequest < 3) { - checkUnnamed134(o.resourceNames!); + checkUnnamed136(o.resourceNames!); } buildCounterGoogleCloudIntegrationsV1alphaShareTemplateRequest--; } -core.List buildUnnamed135() => [ +core.List buildUnnamed137() => [ 'foo', 'foo', ]; -void checkUnnamed135(core.List o) { +void checkUnnamed137(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -10221,7 +10397,7 @@ api.GoogleCloudIntegrationsV1alphaStringParameterArray final o = api.GoogleCloudIntegrationsV1alphaStringParameterArray(); buildCounterGoogleCloudIntegrationsV1alphaStringParameterArray++; if (buildCounterGoogleCloudIntegrationsV1alphaStringParameterArray < 3) { - o.stringValues = buildUnnamed135(); + o.stringValues = buildUnnamed137(); } buildCounterGoogleCloudIntegrationsV1alphaStringParameterArray--; return o; @@ -10231,7 +10407,7 @@ void checkGoogleCloudIntegrationsV1alphaStringParameterArray( api.GoogleCloudIntegrationsV1alphaStringParameterArray o) { buildCounterGoogleCloudIntegrationsV1alphaStringParameterArray++; if (buildCounterGoogleCloudIntegrationsV1alphaStringParameterArray < 3) { - checkUnnamed135(o.stringValues!); + checkUnnamed137(o.stringValues!); } buildCounterGoogleCloudIntegrationsV1alphaStringParameterArray--; } @@ -10322,12 +10498,12 @@ void checkGoogleCloudIntegrationsV1alphaSuspension( buildCounterGoogleCloudIntegrationsV1alphaSuspension--; } -core.List buildUnnamed136() => [ +core.List buildUnnamed138() => [ 'foo', 'foo', ]; -void checkUnnamed136(core.List o) { +void checkUnnamed138(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -10346,7 +10522,7 @@ api.GoogleCloudIntegrationsV1alphaSuspensionApprovalConfig buildCounterGoogleCloudIntegrationsV1alphaSuspensionApprovalConfig++; if (buildCounterGoogleCloudIntegrationsV1alphaSuspensionApprovalConfig < 3) { o.customMessage = 'foo'; - o.emailAddresses = buildUnnamed136(); + o.emailAddresses = buildUnnamed138(); o.expiration = buildGoogleCloudIntegrationsV1alphaSuspensionApprovalExpiration(); } @@ -10362,7 +10538,7 @@ void checkGoogleCloudIntegrationsV1alphaSuspensionApprovalConfig( o.customMessage!, unittest.equals('foo'), ); - checkUnnamed136(o.emailAddresses!); + checkUnnamed138(o.emailAddresses!); checkGoogleCloudIntegrationsV1alphaSuspensionApprovalExpiration( o.expiration!); } @@ -10538,24 +10714,24 @@ void checkGoogleCloudIntegrationsV1alphaTakeoverTestCaseEditLockRequest( buildCounterGoogleCloudIntegrationsV1alphaTakeoverTestCaseEditLockRequest--; } -core.List buildUnnamed137() => [ +core.List buildUnnamed139() => [ buildGoogleCloudIntegrationsV1alphaNextTask(), buildGoogleCloudIntegrationsV1alphaNextTask(), ]; -void checkUnnamed137(core.List o) { +void checkUnnamed139(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaNextTask(o[0]); checkGoogleCloudIntegrationsV1alphaNextTask(o[1]); } core.Map - buildUnnamed138() => { + buildUnnamed140() => { 'x': buildGoogleCloudIntegrationsV1alphaEventParameter(), 'y': buildGoogleCloudIntegrationsV1alphaEventParameter(), }; -void checkUnnamed138( +void checkUnnamed140( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaEventParameter(o['x']!); @@ -10576,9 +10752,9 @@ api.GoogleCloudIntegrationsV1alphaTaskConfig o.externalTaskType = 'foo'; o.failurePolicy = buildGoogleCloudIntegrationsV1alphaFailurePolicy(); o.jsonValidationOption = 'foo'; - o.nextTasks = buildUnnamed137(); + o.nextTasks = buildUnnamed139(); o.nextTasksExecutionPolicy = 'foo'; - o.parameters = buildUnnamed138(); + o.parameters = buildUnnamed140(); o.position = buildGoogleCloudIntegrationsV1alphaCoordinate(); o.successPolicy = buildGoogleCloudIntegrationsV1alphaSuccessPolicy(); o.synchronousCallFailurePolicy = @@ -10619,12 +10795,12 @@ void checkGoogleCloudIntegrationsV1alphaTaskConfig( o.jsonValidationOption!, unittest.equals('foo'), ); - checkUnnamed137(o.nextTasks!); + checkUnnamed139(o.nextTasks!); unittest.expect( o.nextTasksExecutionPolicy!, unittest.equals('foo'), ); - checkUnnamed138(o.parameters!); + checkUnnamed140(o.parameters!); checkGoogleCloudIntegrationsV1alphaCoordinate(o.position!); checkGoogleCloudIntegrationsV1alphaSuccessPolicy(o.successPolicy!); checkGoogleCloudIntegrationsV1alphaFailurePolicy( @@ -10649,12 +10825,12 @@ void checkGoogleCloudIntegrationsV1alphaTaskConfig( buildCounterGoogleCloudIntegrationsV1alphaTaskConfig--; } -core.List buildUnnamed139() => [ +core.List buildUnnamed141() => [ buildGoogleCloudIntegrationsV1alphaAttemptStats(), buildGoogleCloudIntegrationsV1alphaAttemptStats(), ]; -void checkUnnamed139( +void checkUnnamed141( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaAttemptStats(o[0]); @@ -10667,7 +10843,7 @@ api.GoogleCloudIntegrationsV1alphaTaskExecutionDetails final o = api.GoogleCloudIntegrationsV1alphaTaskExecutionDetails(); buildCounterGoogleCloudIntegrationsV1alphaTaskExecutionDetails++; if (buildCounterGoogleCloudIntegrationsV1alphaTaskExecutionDetails < 3) { - o.taskAttemptStats = buildUnnamed139(); + o.taskAttemptStats = buildUnnamed141(); o.taskExecutionState = 'foo'; o.taskNumber = 'foo'; } @@ -10679,7 +10855,7 @@ void checkGoogleCloudIntegrationsV1alphaTaskExecutionDetails( api.GoogleCloudIntegrationsV1alphaTaskExecutionDetails o) { buildCounterGoogleCloudIntegrationsV1alphaTaskExecutionDetails++; if (buildCounterGoogleCloudIntegrationsV1alphaTaskExecutionDetails < 3) { - checkUnnamed139(o.taskAttemptStats!); + checkUnnamed141(o.taskAttemptStats!); unittest.expect( o.taskExecutionState!, unittest.equals('foo'), @@ -10692,12 +10868,12 @@ void checkGoogleCloudIntegrationsV1alphaTaskExecutionDetails( buildCounterGoogleCloudIntegrationsV1alphaTaskExecutionDetails--; } -core.List buildUnnamed140() => [ +core.List buildUnnamed142() => [ 'foo', 'foo', ]; -void checkUnnamed140(core.List o) { +void checkUnnamed142(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -10710,24 +10886,24 @@ void checkUnnamed140(core.List o) { } core.List - buildUnnamed141() => [ + buildUnnamed143() => [ buildGoogleCloudIntegrationsV1alphaTemplateComponent(), buildGoogleCloudIntegrationsV1alphaTemplateComponent(), ]; -void checkUnnamed141( +void checkUnnamed143( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaTemplateComponent(o[0]); checkGoogleCloudIntegrationsV1alphaTemplateComponent(o[1]); } -core.List buildUnnamed142() => [ +core.List buildUnnamed144() => [ 'foo', 'foo', ]; -void checkUnnamed142(core.List o) { +void checkUnnamed144(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -10739,12 +10915,12 @@ void checkUnnamed142(core.List o) { ); } -core.List buildUnnamed143() => [ +core.List buildUnnamed145() => [ 'foo', 'foo', ]; -void checkUnnamed143(core.List o) { +void checkUnnamed145(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -10763,16 +10939,16 @@ api.GoogleCloudIntegrationsV1alphaTemplate buildCounterGoogleCloudIntegrationsV1alphaTemplate++; if (buildCounterGoogleCloudIntegrationsV1alphaTemplate < 3) { o.author = 'foo'; - o.categories = buildUnnamed140(); - o.components = buildUnnamed141(); + o.categories = buildUnnamed142(); + o.components = buildUnnamed143(); o.createTime = 'foo'; o.description = 'foo'; o.displayName = 'foo'; o.docLink = 'foo'; o.lastUsedTime = 'foo'; o.name = 'foo'; - o.sharedWith = buildUnnamed142(); - o.tags = buildUnnamed143(); + o.sharedWith = buildUnnamed144(); + o.tags = buildUnnamed145(); o.templateBundle = buildGoogleCloudIntegrationsV1alphaTemplateBundle(); o.updateTime = 'foo'; o.usageCount = 'foo'; @@ -10791,8 +10967,8 @@ void checkGoogleCloudIntegrationsV1alphaTemplate( o.author!, unittest.equals('foo'), ); - checkUnnamed140(o.categories!); - checkUnnamed141(o.components!); + checkUnnamed142(o.categories!); + checkUnnamed143(o.components!); unittest.expect( o.createTime!, unittest.equals('foo'), @@ -10817,8 +10993,8 @@ void checkGoogleCloudIntegrationsV1alphaTemplate( o.name!, unittest.equals('foo'), ); - checkUnnamed142(o.sharedWith!); - checkUnnamed143(o.tags!); + checkUnnamed144(o.sharedWith!); + checkUnnamed145(o.tags!); checkGoogleCloudIntegrationsV1alphaTemplateBundle(o.templateBundle!); unittest.expect( o.updateTime!, @@ -10841,12 +11017,12 @@ void checkGoogleCloudIntegrationsV1alphaTemplate( } core.List - buildUnnamed144() => [ + buildUnnamed146() => [ buildGoogleCloudIntegrationsV1alphaIntegrationVersionTemplate(), buildGoogleCloudIntegrationsV1alphaIntegrationVersionTemplate(), ]; -void checkUnnamed144( +void checkUnnamed146( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaIntegrationVersionTemplate(o[0]); @@ -10861,7 +11037,7 @@ api.GoogleCloudIntegrationsV1alphaTemplateBundle if (buildCounterGoogleCloudIntegrationsV1alphaTemplateBundle < 3) { o.integrationVersionTemplate = buildGoogleCloudIntegrationsV1alphaIntegrationVersionTemplate(); - o.subIntegrationVersionTemplates = buildUnnamed144(); + o.subIntegrationVersionTemplates = buildUnnamed146(); } buildCounterGoogleCloudIntegrationsV1alphaTemplateBundle--; return o; @@ -10873,7 +11049,7 @@ void checkGoogleCloudIntegrationsV1alphaTemplateBundle( if (buildCounterGoogleCloudIntegrationsV1alphaTemplateBundle < 3) { checkGoogleCloudIntegrationsV1alphaIntegrationVersionTemplate( o.integrationVersionTemplate!); - checkUnnamed144(o.subIntegrationVersionTemplates!); + checkUnnamed146(o.subIntegrationVersionTemplates!); } buildCounterGoogleCloudIntegrationsV1alphaTemplateBundle--; } @@ -10908,25 +11084,25 @@ void checkGoogleCloudIntegrationsV1alphaTemplateComponent( } core.List - buildUnnamed145() => [ + buildUnnamed147() => [ buildGoogleCloudIntegrationsV1alphaIntegrationParameter(), buildGoogleCloudIntegrationsV1alphaIntegrationParameter(), ]; -void checkUnnamed145( +void checkUnnamed147( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaIntegrationParameter(o[0]); checkGoogleCloudIntegrationsV1alphaIntegrationParameter(o[1]); } -core.List buildUnnamed146() => +core.List buildUnnamed148() => [ buildGoogleCloudIntegrationsV1alphaTestTaskConfig(), buildGoogleCloudIntegrationsV1alphaTestTaskConfig(), ]; -void checkUnnamed146( +void checkUnnamed148( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaTestTaskConfig(o[0]); @@ -10947,8 +11123,8 @@ api.GoogleCloudIntegrationsV1alphaTestCase o.lastModifierEmail = 'foo'; o.lockHolderEmail = 'foo'; o.name = 'foo'; - o.testInputParameters = buildUnnamed145(); - o.testTaskConfigs = buildUnnamed146(); + o.testInputParameters = buildUnnamed147(); + o.testTaskConfigs = buildUnnamed148(); o.triggerConfig = buildGoogleCloudIntegrationsV1alphaTriggerConfig(); o.triggerId = 'foo'; o.updateTime = 'foo'; @@ -10993,8 +11169,8 @@ void checkGoogleCloudIntegrationsV1alphaTestCase( o.name!, unittest.equals('foo'), ); - checkUnnamed145(o.testInputParameters!); - checkUnnamed146(o.testTaskConfigs!); + checkUnnamed147(o.testInputParameters!); + checkUnnamed148(o.testTaskConfigs!); checkGoogleCloudIntegrationsV1alphaTriggerConfig(o.triggerConfig!); unittest.expect( o.triggerId!, @@ -11008,7 +11184,7 @@ void checkGoogleCloudIntegrationsV1alphaTestCase( buildCounterGoogleCloudIntegrationsV1alphaTestCase--; } -core.Map buildUnnamed147() => { +core.Map buildUnnamed149() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -11021,7 +11197,7 @@ core.Map buildUnnamed147() => { }, }; -void checkUnnamed147(core.Map o) { +void checkUnnamed149(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted21 = (o['x']!) as core.Map; unittest.expect(casted21, unittest.hasLength(3)); @@ -11054,12 +11230,12 @@ void checkUnnamed147(core.Map o) { } core.Map - buildUnnamed148() => { + buildUnnamed150() => { 'x': buildGoogleCloudIntegrationsV1alphaValueType(), 'y': buildGoogleCloudIntegrationsV1alphaValueType(), }; -void checkUnnamed148( +void checkUnnamed150( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaValueType(o['x']!); @@ -11073,9 +11249,9 @@ api.GoogleCloudIntegrationsV1alphaTestIntegrationsRequest buildCounterGoogleCloudIntegrationsV1alphaTestIntegrationsRequest++; if (buildCounterGoogleCloudIntegrationsV1alphaTestIntegrationsRequest < 3) { o.clientId = 'foo'; - o.configParameters = buildUnnamed147(); + o.configParameters = buildUnnamed149(); o.deadlineSecondsTime = 'foo'; - o.inputParameters = buildUnnamed148(); + o.inputParameters = buildUnnamed150(); o.integrationVersion = buildGoogleCloudIntegrationsV1alphaIntegrationVersion(); o.parameters = buildEnterpriseCrmFrontendsEventbusProtoEventParameters(); @@ -11094,12 +11270,12 @@ void checkGoogleCloudIntegrationsV1alphaTestIntegrationsRequest( o.clientId!, unittest.equals('foo'), ); - checkUnnamed147(o.configParameters!); + checkUnnamed149(o.configParameters!); unittest.expect( o.deadlineSecondsTime!, unittest.equals('foo'), ); - checkUnnamed148(o.inputParameters!); + checkUnnamed150(o.inputParameters!); checkGoogleCloudIntegrationsV1alphaIntegrationVersion( o.integrationVersion!); checkEnterpriseCrmFrontendsEventbusProtoEventParameters(o.parameters!); @@ -11113,12 +11289,12 @@ void checkGoogleCloudIntegrationsV1alphaTestIntegrationsRequest( } core.List - buildUnnamed149() => [ + buildUnnamed151() => [ buildEnterpriseCrmFrontendsEventbusProtoParameterEntry(), buildEnterpriseCrmFrontendsEventbusProtoParameterEntry(), ]; -void checkUnnamed149( +void checkUnnamed151( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEnterpriseCrmFrontendsEventbusProtoParameterEntry(o[0]); @@ -11126,12 +11302,12 @@ void checkUnnamed149( } core.Map - buildUnnamed150() => { + buildUnnamed152() => { 'x': buildGoogleCloudIntegrationsV1alphaValueType(), 'y': buildGoogleCloudIntegrationsV1alphaValueType(), }; -void checkUnnamed150( +void checkUnnamed152( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaValueType(o['x']!); @@ -11148,8 +11324,8 @@ api.GoogleCloudIntegrationsV1alphaTestIntegrationsResponse buildEnterpriseCrmFrontendsEventbusProtoEventParameters(); o.executionFailed = true; o.executionId = 'foo'; - o.parameterEntries = buildUnnamed149(); - o.parameters = buildUnnamed150(); + o.parameterEntries = buildUnnamed151(); + o.parameters = buildUnnamed152(); } buildCounterGoogleCloudIntegrationsV1alphaTestIntegrationsResponse--; return o; @@ -11165,18 +11341,18 @@ void checkGoogleCloudIntegrationsV1alphaTestIntegrationsResponse( o.executionId!, unittest.equals('foo'), ); - checkUnnamed149(o.parameterEntries!); - checkUnnamed150(o.parameters!); + checkUnnamed151(o.parameterEntries!); + checkUnnamed152(o.parameters!); } buildCounterGoogleCloudIntegrationsV1alphaTestIntegrationsResponse--; } -core.List buildUnnamed151() => [ +core.List buildUnnamed153() => [ buildGoogleCloudIntegrationsV1alphaAssertion(), buildGoogleCloudIntegrationsV1alphaAssertion(), ]; -void checkUnnamed151(core.List o) { +void checkUnnamed153(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaAssertion(o[0]); checkGoogleCloudIntegrationsV1alphaAssertion(o[1]); @@ -11188,7 +11364,7 @@ api.GoogleCloudIntegrationsV1alphaTestTaskConfig final o = api.GoogleCloudIntegrationsV1alphaTestTaskConfig(); buildCounterGoogleCloudIntegrationsV1alphaTestTaskConfig++; if (buildCounterGoogleCloudIntegrationsV1alphaTestTaskConfig < 3) { - o.assertions = buildUnnamed151(); + o.assertions = buildUnnamed153(); o.mockConfig = buildGoogleCloudIntegrationsV1alphaMockConfig(); o.task = 'foo'; o.taskConfig = buildGoogleCloudIntegrationsV1alphaTaskConfig(); @@ -11202,7 +11378,7 @@ void checkGoogleCloudIntegrationsV1alphaTestTaskConfig( api.GoogleCloudIntegrationsV1alphaTestTaskConfig o) { buildCounterGoogleCloudIntegrationsV1alphaTestTaskConfig++; if (buildCounterGoogleCloudIntegrationsV1alphaTestTaskConfig < 3) { - checkUnnamed151(o.assertions!); + checkUnnamed153(o.assertions!); checkGoogleCloudIntegrationsV1alphaMockConfig(o.mockConfig!); unittest.expect( o.task!, @@ -11218,52 +11394,18 @@ void checkGoogleCloudIntegrationsV1alphaTestTaskConfig( } core.List - buildUnnamed152() => [ + buildUnnamed154() => [ buildGoogleCloudIntegrationsV1alphaIntegrationAlertConfig(), buildGoogleCloudIntegrationsV1alphaIntegrationAlertConfig(), ]; -void checkUnnamed152( +void checkUnnamed154( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaIntegrationAlertConfig(o[0]); checkGoogleCloudIntegrationsV1alphaIntegrationAlertConfig(o[1]); } -core.List buildUnnamed153() => [ - 'foo', - 'foo', - ]; - -void checkUnnamed153(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o[0], - unittest.equals('foo'), - ); - unittest.expect( - o[1], - unittest.equals('foo'), - ); -} - -core.List buildUnnamed154() => [ - 'foo', - 'foo', - ]; - -void checkUnnamed154(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o[0], - unittest.equals('foo'), - ); - unittest.expect( - o[1], - unittest.equals('foo'), - ); -} - core.Map buildUnnamed155() => { 'x': 'foo', 'y': 'foo', @@ -11298,15 +11440,17 @@ api.GoogleCloudIntegrationsV1alphaTriggerConfig final o = api.GoogleCloudIntegrationsV1alphaTriggerConfig(); buildCounterGoogleCloudIntegrationsV1alphaTriggerConfig++; if (buildCounterGoogleCloudIntegrationsV1alphaTriggerConfig < 3) { - o.alertConfig = buildUnnamed152(); + o.alertConfig = buildUnnamed154(); o.cloudSchedulerConfig = buildGoogleCloudIntegrationsV1alphaCloudSchedulerConfig(); o.description = 'foo'; o.errorCatcherId = 'foo'; - o.inputVariables = buildUnnamed153(); + o.inputVariables = + buildGoogleCloudIntegrationsV1alphaTriggerConfigVariables(); o.label = 'foo'; o.nextTasksExecutionPolicy = 'foo'; - o.outputVariables = buildUnnamed154(); + o.outputVariables = + buildGoogleCloudIntegrationsV1alphaTriggerConfigVariables(); o.position = buildGoogleCloudIntegrationsV1alphaCoordinate(); o.properties = buildUnnamed155(); o.startTasks = buildUnnamed156(); @@ -11323,7 +11467,7 @@ void checkGoogleCloudIntegrationsV1alphaTriggerConfig( api.GoogleCloudIntegrationsV1alphaTriggerConfig o) { buildCounterGoogleCloudIntegrationsV1alphaTriggerConfig++; if (buildCounterGoogleCloudIntegrationsV1alphaTriggerConfig < 3) { - checkUnnamed152(o.alertConfig!); + checkUnnamed154(o.alertConfig!); checkGoogleCloudIntegrationsV1alphaCloudSchedulerConfig( o.cloudSchedulerConfig!); unittest.expect( @@ -11334,7 +11478,8 @@ void checkGoogleCloudIntegrationsV1alphaTriggerConfig( o.errorCatcherId!, unittest.equals('foo'), ); - checkUnnamed153(o.inputVariables!); + checkGoogleCloudIntegrationsV1alphaTriggerConfigVariables( + o.inputVariables!); unittest.expect( o.label!, unittest.equals('foo'), @@ -11343,7 +11488,8 @@ void checkGoogleCloudIntegrationsV1alphaTriggerConfig( o.nextTasksExecutionPolicy!, unittest.equals('foo'), ); - checkUnnamed154(o.outputVariables!); + checkGoogleCloudIntegrationsV1alphaTriggerConfigVariables( + o.outputVariables!); checkGoogleCloudIntegrationsV1alphaCoordinate(o.position!); checkUnnamed155(o.properties!); checkUnnamed156(o.startTasks!); @@ -11367,6 +11513,44 @@ void checkGoogleCloudIntegrationsV1alphaTriggerConfig( buildCounterGoogleCloudIntegrationsV1alphaTriggerConfig--; } +core.List buildUnnamed157() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed157(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterGoogleCloudIntegrationsV1alphaTriggerConfigVariables = 0; +api.GoogleCloudIntegrationsV1alphaTriggerConfigVariables + buildGoogleCloudIntegrationsV1alphaTriggerConfigVariables() { + final o = api.GoogleCloudIntegrationsV1alphaTriggerConfigVariables(); + buildCounterGoogleCloudIntegrationsV1alphaTriggerConfigVariables++; + if (buildCounterGoogleCloudIntegrationsV1alphaTriggerConfigVariables < 3) { + o.names = buildUnnamed157(); + } + buildCounterGoogleCloudIntegrationsV1alphaTriggerConfigVariables--; + return o; +} + +void checkGoogleCloudIntegrationsV1alphaTriggerConfigVariables( + api.GoogleCloudIntegrationsV1alphaTriggerConfigVariables o) { + buildCounterGoogleCloudIntegrationsV1alphaTriggerConfigVariables++; + if (buildCounterGoogleCloudIntegrationsV1alphaTriggerConfigVariables < 3) { + checkUnnamed157(o.names!); + } + buildCounterGoogleCloudIntegrationsV1alphaTriggerConfigVariables--; +} + core.int buildCounterGoogleCloudIntegrationsV1alphaUnpublishIntegrationVersionRequest = 0; @@ -11389,12 +11573,12 @@ void checkGoogleCloudIntegrationsV1alphaUnpublishIntegrationVersionRequest( buildCounterGoogleCloudIntegrationsV1alphaUnpublishIntegrationVersionRequest--; } -core.List buildUnnamed157() => [ +core.List buildUnnamed158() => [ 'foo', 'foo', ]; -void checkUnnamed157(core.List o) { +void checkUnnamed158(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -11412,7 +11596,7 @@ api.GoogleCloudIntegrationsV1alphaUnshareTemplateRequest final o = api.GoogleCloudIntegrationsV1alphaUnshareTemplateRequest(); buildCounterGoogleCloudIntegrationsV1alphaUnshareTemplateRequest++; if (buildCounterGoogleCloudIntegrationsV1alphaUnshareTemplateRequest < 3) { - o.resourceNames = buildUnnamed157(); + o.resourceNames = buildUnnamed158(); } buildCounterGoogleCloudIntegrationsV1alphaUnshareTemplateRequest--; return o; @@ -11422,7 +11606,7 @@ void checkGoogleCloudIntegrationsV1alphaUnshareTemplateRequest( api.GoogleCloudIntegrationsV1alphaUnshareTemplateRequest o) { buildCounterGoogleCloudIntegrationsV1alphaUnshareTemplateRequest++; if (buildCounterGoogleCloudIntegrationsV1alphaUnshareTemplateRequest < 3) { - checkUnnamed157(o.resourceNames!); + checkUnnamed158(o.resourceNames!); } buildCounterGoogleCloudIntegrationsV1alphaUnshareTemplateRequest--; } @@ -11590,14 +11774,14 @@ void checkGoogleCloudIntegrationsV1alphaUploadTestCaseResponse( core.Map - buildUnnamed158() => { + buildUnnamed159() => { 'x': buildGoogleCloudIntegrationsV1alphaUseTemplateRequestIntegrationDetails(), 'y': buildGoogleCloudIntegrationsV1alphaUseTemplateRequestIntegrationDetails(), }; -void checkUnnamed158( +void checkUnnamed159( core.Map< core.String, api @@ -11619,7 +11803,7 @@ api.GoogleCloudIntegrationsV1alphaUseTemplateRequest o.integrationDetails = buildGoogleCloudIntegrationsV1alphaUseTemplateRequestIntegrationDetails(); o.integrationRegion = 'foo'; - o.subIntegrations = buildUnnamed158(); + o.subIntegrations = buildUnnamed159(); } buildCounterGoogleCloudIntegrationsV1alphaUseTemplateRequest--; return o; @@ -11635,7 +11819,7 @@ void checkGoogleCloudIntegrationsV1alphaUseTemplateRequest( o.integrationRegion!, unittest.equals('foo'), ); - checkUnnamed158(o.subIntegrations!); + checkUnnamed159(o.subIntegrations!); } buildCounterGoogleCloudIntegrationsV1alphaUseTemplateRequest--; } @@ -11675,12 +11859,12 @@ void checkGoogleCloudIntegrationsV1alphaUseTemplateRequestIntegrationDetails( } core.List - buildUnnamed159() => [ + buildUnnamed160() => [ buildGoogleCloudIntegrationsV1alphaIntegrationVersion(), buildGoogleCloudIntegrationsV1alphaIntegrationVersion(), ]; -void checkUnnamed159( +void checkUnnamed160( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudIntegrationsV1alphaIntegrationVersion(o[0]); @@ -11695,7 +11879,7 @@ api.GoogleCloudIntegrationsV1alphaUseTemplateResponse if (buildCounterGoogleCloudIntegrationsV1alphaUseTemplateResponse < 3) { o.integrationVersion = buildGoogleCloudIntegrationsV1alphaIntegrationVersion(); - o.subIntegrationVersions = buildUnnamed159(); + o.subIntegrationVersions = buildUnnamed160(); } buildCounterGoogleCloudIntegrationsV1alphaUseTemplateResponse--; return o; @@ -11707,7 +11891,7 @@ void checkGoogleCloudIntegrationsV1alphaUseTemplateResponse( if (buildCounterGoogleCloudIntegrationsV1alphaUseTemplateResponse < 3) { checkGoogleCloudIntegrationsV1alphaIntegrationVersion( o.integrationVersion!); - checkUnnamed159(o.subIntegrationVersions!); + checkUnnamed160(o.subIntegrationVersions!); } buildCounterGoogleCloudIntegrationsV1alphaUseTemplateResponse--; } @@ -11880,12 +12064,12 @@ void checkGoogleProtobufEmpty(api.GoogleProtobufEmpty o) { buildCounterGoogleProtobufEmpty--; } -core.List buildUnnamed160() => [ +core.List buildUnnamed161() => [ 'foo', 'foo', ]; -void checkUnnamed160(core.List o) { +void checkUnnamed161(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -11897,12 +12081,12 @@ void checkUnnamed160(core.List o) { ); } -core.List buildUnnamed161() => [ +core.List buildUnnamed162() => [ 'foo', 'foo', ]; -void checkUnnamed161(core.List o) { +void checkUnnamed162(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -11914,12 +12098,12 @@ void checkUnnamed161(core.List o) { ); } -core.List buildUnnamed162() => [ +core.List buildUnnamed163() => [ 'foo', 'foo', ]; -void checkUnnamed162(core.List o) { +void checkUnnamed163(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -11931,12 +12115,12 @@ void checkUnnamed162(core.List o) { ); } -core.List buildUnnamed163() => [ +core.List buildUnnamed164() => [ 'foo', 'foo', ]; -void checkUnnamed163(core.List o) { +void checkUnnamed164(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -11948,12 +12132,12 @@ void checkUnnamed163(core.List o) { ); } -core.List buildUnnamed164() => [ +core.List buildUnnamed165() => [ 'foo', 'foo', ]; -void checkUnnamed164(core.List o) { +void checkUnnamed165(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -11965,12 +12149,12 @@ void checkUnnamed164(core.List o) { ); } -core.List buildUnnamed165() => [ +core.List buildUnnamed166() => [ 'foo', 'foo', ]; -void checkUnnamed165(core.List o) { +void checkUnnamed166(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -11982,12 +12166,12 @@ void checkUnnamed165(core.List o) { ); } -core.List buildUnnamed166() => [ +core.List buildUnnamed167() => [ 'foo', 'foo', ]; -void checkUnnamed166(core.List o) { +void checkUnnamed167(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -12949,6 +13133,19 @@ void main() { }); }); + unittest.group( + 'obj-schema-EnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables', + () { + unittest.test('to-json--from-json', () async { + final o = + buildEnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.EnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables + .fromJson(oJson as core.Map); + checkEnterpriseCrmFrontendsEventbusProtoTriggerConfigVariables(od); + }); + }); + unittest.group( 'obj-schema-EnterpriseCrmFrontendsEventbusProtoWorkflowParameterEntry', () { @@ -13195,6 +13392,20 @@ void main() { }); }); + unittest.group( + 'obj-schema-GoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions + .fromJson(oJson as core.Map); + checkGoogleCloudConnectorsV1EventingRuntimeDataWebhookSubscriptions(od); + }); + }); + unittest.group('obj-schema-GoogleCloudConnectorsV1EventingStatus', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudConnectorsV1EventingStatus(); @@ -13295,6 +13506,17 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudIntegrationsV1alphaApiTriggerResource', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudIntegrationsV1alphaApiTriggerResource(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudIntegrationsV1alphaApiTriggerResource.fromJson( + oJson as core.Map); + checkGoogleCloudIntegrationsV1alphaApiTriggerResource(od); + }); + }); + unittest.group('obj-schema-GoogleCloudIntegrationsV1alphaAssertion', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudIntegrationsV1alphaAssertion(); @@ -13808,6 +14030,32 @@ void main() { }); }); + unittest.group( + 'obj-schema-GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest.fromJson( + oJson as core.Map); + checkGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse + .fromJson(oJson as core.Map); + checkGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse(od); + }); + }); + unittest.group( 'obj-schema-GoogleCloudIntegrationsV1alphaGenerateTokenResponse', () { unittest.test('to-json--from-json', () async { @@ -14764,6 +15012,18 @@ void main() { }); }); + unittest.group( + 'obj-schema-GoogleCloudIntegrationsV1alphaTriggerConfigVariables', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudIntegrationsV1alphaTriggerConfigVariables(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudIntegrationsV1alphaTriggerConfigVariables.fromJson( + oJson as core.Map); + checkGoogleCloudIntegrationsV1alphaTriggerConfigVariables(od); + }); + }); + unittest.group( 'obj-schema-GoogleCloudIntegrationsV1alphaUnpublishIntegrationVersionRequest', () { @@ -15146,6 +15406,67 @@ void main() { }); unittest.group('resource-ProjectsLocationsResource', () { + unittest.test('method--generateOpenApiSpec', () async { + final mock = HttpServerMock(); + final res = api.IntegrationsApi(mock).projects.locations; + final arg_request = + buildGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest(); + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest + .fromJson(json as core.Map); + checkGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode( + buildGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.generateOpenApiSpec(arg_request, arg_name, + $fields: arg_$fields); + checkGoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse(response + as api.GoogleCloudIntegrationsV1alphaGenerateOpenApiSpecResponse); + }); + unittest.test('method--getClients', () async { final mock = HttpServerMock(); final res = api.IntegrationsApi(mock).projects.locations; @@ -17219,7 +17540,7 @@ void main() { final arg_filter = 'foo'; final arg_filterParams_customFilter = 'foo'; final arg_filterParams_endTime = 'foo'; - final arg_filterParams_eventStatuses = buildUnnamed160(); + final arg_filterParams_eventStatuses = buildUnnamed161(); final arg_filterParams_executionId = 'foo'; final arg_filterParams_parameterKey = 'foo'; final arg_filterParams_parameterPairKey = 'foo'; @@ -17227,7 +17548,7 @@ void main() { final arg_filterParams_parameterType = 'foo'; final arg_filterParams_parameterValue = 'foo'; final arg_filterParams_startTime = 'foo'; - final arg_filterParams_taskStatuses = buildUnnamed161(); + final arg_filterParams_taskStatuses = buildUnnamed162(); final arg_filterParams_workflowName = 'foo'; final arg_orderBy = 'foo'; final arg_pageSize = 42; @@ -17808,7 +18129,7 @@ void main() { api.IntegrationsApi(mock).projects.locations.integrations.versions; final arg_name = 'foo'; final arg_fileFormat = 'foo'; - final arg_files = buildUnnamed162(); + final arg_files = buildUnnamed163(); final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -17874,7 +18195,7 @@ void main() { final res = api.IntegrationsApi(mock).projects.locations.integrations.versions; final arg_name = 'foo'; - final arg_files = buildUnnamed163(); + final arg_files = buildUnnamed164(); final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -20185,7 +20506,7 @@ void main() { final arg_filter = 'foo'; final arg_filterParams_customFilter = 'foo'; final arg_filterParams_endTime = 'foo'; - final arg_filterParams_eventStatuses = buildUnnamed164(); + final arg_filterParams_eventStatuses = buildUnnamed165(); final arg_filterParams_executionId = 'foo'; final arg_filterParams_parameterKey = 'foo'; final arg_filterParams_parameterPairKey = 'foo'; @@ -20193,7 +20514,7 @@ void main() { final arg_filterParams_parameterType = 'foo'; final arg_filterParams_parameterValue = 'foo'; final arg_filterParams_startTime = 'foo'; - final arg_filterParams_taskStatuses = buildUnnamed165(); + final arg_filterParams_taskStatuses = buildUnnamed166(); final arg_filterParams_workflowName = 'foo'; final arg_orderBy = 'foo'; final arg_pageSize = 42; @@ -20727,7 +21048,7 @@ void main() { .versions; final arg_name = 'foo'; final arg_fileFormat = 'foo'; - final arg_files = buildUnnamed166(); + final arg_files = buildUnnamed167(); final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; diff --git a/generated/googleapis/test/metastore/v2_test.dart b/generated/googleapis/test/metastore/v2_test.dart new file mode 100644 index 000000000..a78afa451 --- /dev/null +++ b/generated/googleapis/test/metastore/v2_test.dart @@ -0,0 +1,2342 @@ +// ignore_for_file: camel_case_types +// ignore_for_file: comment_references +// ignore_for_file: deprecated_member_use_from_same_package +// ignore_for_file: doc_directive_unknown +// ignore_for_file: lines_longer_than_80_chars +// ignore_for_file: non_constant_identifier_names +// ignore_for_file: prefer_const_declarations +// ignore_for_file: prefer_expression_function_bodies +// ignore_for_file: prefer_final_locals +// ignore_for_file: prefer_interpolation_to_compose_strings +// ignore_for_file: unintended_html_in_doc_comment +// ignore_for_file: unnecessary_brace_in_string_interps +// ignore_for_file: unnecessary_cast +// ignore_for_file: unnecessary_lambdas +// ignore_for_file: unnecessary_string_interpolations +// ignore_for_file: unreachable_from_main +// ignore_for_file: unused_local_variable + +import 'dart:async' as async; +import 'dart:convert' as convert; +import 'dart:core' as core; + +import 'package:googleapis/metastore/v2.dart' as api; +import 'package:http/http.dart' as http; +import 'package:test/test.dart' as unittest; + +import '../test_shared.dart'; + +core.int + buildCounterGoogleCloudMetastoreV2AlterMetadataResourceLocationRequest = 0; +api.GoogleCloudMetastoreV2AlterMetadataResourceLocationRequest + buildGoogleCloudMetastoreV2AlterMetadataResourceLocationRequest() { + final o = api.GoogleCloudMetastoreV2AlterMetadataResourceLocationRequest(); + buildCounterGoogleCloudMetastoreV2AlterMetadataResourceLocationRequest++; + if (buildCounterGoogleCloudMetastoreV2AlterMetadataResourceLocationRequest < + 3) { + o.locationUri = 'foo'; + o.resourceName = 'foo'; + } + buildCounterGoogleCloudMetastoreV2AlterMetadataResourceLocationRequest--; + return o; +} + +void checkGoogleCloudMetastoreV2AlterMetadataResourceLocationRequest( + api.GoogleCloudMetastoreV2AlterMetadataResourceLocationRequest o) { + buildCounterGoogleCloudMetastoreV2AlterMetadataResourceLocationRequest++; + if (buildCounterGoogleCloudMetastoreV2AlterMetadataResourceLocationRequest < + 3) { + unittest.expect( + o.locationUri!, + unittest.equals('foo'), + ); + unittest.expect( + o.resourceName!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudMetastoreV2AlterMetadataResourceLocationRequest--; +} + +core.Map buildUnnamed0() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed0(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.int buildCounterGoogleCloudMetastoreV2AlterTablePropertiesRequest = 0; +api.GoogleCloudMetastoreV2AlterTablePropertiesRequest + buildGoogleCloudMetastoreV2AlterTablePropertiesRequest() { + final o = api.GoogleCloudMetastoreV2AlterTablePropertiesRequest(); + buildCounterGoogleCloudMetastoreV2AlterTablePropertiesRequest++; + if (buildCounterGoogleCloudMetastoreV2AlterTablePropertiesRequest < 3) { + o.properties = buildUnnamed0(); + o.tableName = 'foo'; + o.updateMask = 'foo'; + } + buildCounterGoogleCloudMetastoreV2AlterTablePropertiesRequest--; + return o; +} + +void checkGoogleCloudMetastoreV2AlterTablePropertiesRequest( + api.GoogleCloudMetastoreV2AlterTablePropertiesRequest o) { + buildCounterGoogleCloudMetastoreV2AlterTablePropertiesRequest++; + if (buildCounterGoogleCloudMetastoreV2AlterTablePropertiesRequest < 3) { + checkUnnamed0(o.properties!); + unittest.expect( + o.tableName!, + unittest.equals('foo'), + ); + unittest.expect( + o.updateMask!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudMetastoreV2AlterTablePropertiesRequest--; +} + +core.Map buildUnnamed1() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed1(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.List buildUnnamed2() => [ + buildGoogleCloudMetastoreV2Endpoint(), + buildGoogleCloudMetastoreV2Endpoint(), + ]; + +void checkUnnamed2(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudMetastoreV2Endpoint(o[0]); + checkGoogleCloudMetastoreV2Endpoint(o[1]); +} + +core.int buildCounterGoogleCloudMetastoreV2AuxiliaryVersionConfig = 0; +api.GoogleCloudMetastoreV2AuxiliaryVersionConfig + buildGoogleCloudMetastoreV2AuxiliaryVersionConfig() { + final o = api.GoogleCloudMetastoreV2AuxiliaryVersionConfig(); + buildCounterGoogleCloudMetastoreV2AuxiliaryVersionConfig++; + if (buildCounterGoogleCloudMetastoreV2AuxiliaryVersionConfig < 3) { + o.configOverrides = buildUnnamed1(); + o.endpoints = buildUnnamed2(); + o.version = 'foo'; + } + buildCounterGoogleCloudMetastoreV2AuxiliaryVersionConfig--; + return o; +} + +void checkGoogleCloudMetastoreV2AuxiliaryVersionConfig( + api.GoogleCloudMetastoreV2AuxiliaryVersionConfig o) { + buildCounterGoogleCloudMetastoreV2AuxiliaryVersionConfig++; + if (buildCounterGoogleCloudMetastoreV2AuxiliaryVersionConfig < 3) { + checkUnnamed1(o.configOverrides!); + checkUnnamed2(o.endpoints!); + unittest.expect( + o.version!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudMetastoreV2AuxiliaryVersionConfig--; +} + +core.List buildUnnamed3() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed3(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterGoogleCloudMetastoreV2Backup = 0; +api.GoogleCloudMetastoreV2Backup buildGoogleCloudMetastoreV2Backup() { + final o = api.GoogleCloudMetastoreV2Backup(); + buildCounterGoogleCloudMetastoreV2Backup++; + if (buildCounterGoogleCloudMetastoreV2Backup < 3) { + o.createTime = 'foo'; + o.description = 'foo'; + o.endTime = 'foo'; + o.name = 'foo'; + o.restoringServices = buildUnnamed3(); + o.serviceRevision = buildGoogleCloudMetastoreV2Service(); + o.state = 'foo'; + } + buildCounterGoogleCloudMetastoreV2Backup--; + return o; +} + +void checkGoogleCloudMetastoreV2Backup(api.GoogleCloudMetastoreV2Backup o) { + buildCounterGoogleCloudMetastoreV2Backup++; + if (buildCounterGoogleCloudMetastoreV2Backup < 3) { + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + unittest.expect( + o.endTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + checkUnnamed3(o.restoringServices!); + checkGoogleCloudMetastoreV2Service(o.serviceRevision!); + unittest.expect( + o.state!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudMetastoreV2Backup--; +} + +core.int buildCounterGoogleCloudMetastoreV2DataCatalogConfig = 0; +api.GoogleCloudMetastoreV2DataCatalogConfig + buildGoogleCloudMetastoreV2DataCatalogConfig() { + final o = api.GoogleCloudMetastoreV2DataCatalogConfig(); + buildCounterGoogleCloudMetastoreV2DataCatalogConfig++; + if (buildCounterGoogleCloudMetastoreV2DataCatalogConfig < 3) { + o.enabled = true; + } + buildCounterGoogleCloudMetastoreV2DataCatalogConfig--; + return o; +} + +void checkGoogleCloudMetastoreV2DataCatalogConfig( + api.GoogleCloudMetastoreV2DataCatalogConfig o) { + buildCounterGoogleCloudMetastoreV2DataCatalogConfig++; + if (buildCounterGoogleCloudMetastoreV2DataCatalogConfig < 3) { + unittest.expect(o.enabled!, unittest.isTrue); + } + buildCounterGoogleCloudMetastoreV2DataCatalogConfig--; +} + +core.int buildCounterGoogleCloudMetastoreV2DatabaseDump = 0; +api.GoogleCloudMetastoreV2DatabaseDump + buildGoogleCloudMetastoreV2DatabaseDump() { + final o = api.GoogleCloudMetastoreV2DatabaseDump(); + buildCounterGoogleCloudMetastoreV2DatabaseDump++; + if (buildCounterGoogleCloudMetastoreV2DatabaseDump < 3) { + o.gcsUri = 'foo'; + o.type = 'foo'; + } + buildCounterGoogleCloudMetastoreV2DatabaseDump--; + return o; +} + +void checkGoogleCloudMetastoreV2DatabaseDump( + api.GoogleCloudMetastoreV2DatabaseDump o) { + buildCounterGoogleCloudMetastoreV2DatabaseDump++; + if (buildCounterGoogleCloudMetastoreV2DatabaseDump < 3) { + unittest.expect( + o.gcsUri!, + unittest.equals('foo'), + ); + unittest.expect( + o.type!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudMetastoreV2DatabaseDump--; +} + +core.int buildCounterGoogleCloudMetastoreV2EncryptionConfig = 0; +api.GoogleCloudMetastoreV2EncryptionConfig + buildGoogleCloudMetastoreV2EncryptionConfig() { + final o = api.GoogleCloudMetastoreV2EncryptionConfig(); + buildCounterGoogleCloudMetastoreV2EncryptionConfig++; + if (buildCounterGoogleCloudMetastoreV2EncryptionConfig < 3) {} + buildCounterGoogleCloudMetastoreV2EncryptionConfig--; + return o; +} + +void checkGoogleCloudMetastoreV2EncryptionConfig( + api.GoogleCloudMetastoreV2EncryptionConfig o) { + buildCounterGoogleCloudMetastoreV2EncryptionConfig++; + if (buildCounterGoogleCloudMetastoreV2EncryptionConfig < 3) {} + buildCounterGoogleCloudMetastoreV2EncryptionConfig--; +} + +core.int buildCounterGoogleCloudMetastoreV2Endpoint = 0; +api.GoogleCloudMetastoreV2Endpoint buildGoogleCloudMetastoreV2Endpoint() { + final o = api.GoogleCloudMetastoreV2Endpoint(); + buildCounterGoogleCloudMetastoreV2Endpoint++; + if (buildCounterGoogleCloudMetastoreV2Endpoint < 3) { + o.endpointUri = 'foo'; + o.region = 'foo'; + } + buildCounterGoogleCloudMetastoreV2Endpoint--; + return o; +} + +void checkGoogleCloudMetastoreV2Endpoint(api.GoogleCloudMetastoreV2Endpoint o) { + buildCounterGoogleCloudMetastoreV2Endpoint++; + if (buildCounterGoogleCloudMetastoreV2Endpoint < 3) { + unittest.expect( + o.endpointUri!, + unittest.equals('foo'), + ); + unittest.expect( + o.region!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudMetastoreV2Endpoint--; +} + +core.int buildCounterGoogleCloudMetastoreV2ExportMetadataRequest = 0; +api.GoogleCloudMetastoreV2ExportMetadataRequest + buildGoogleCloudMetastoreV2ExportMetadataRequest() { + final o = api.GoogleCloudMetastoreV2ExportMetadataRequest(); + buildCounterGoogleCloudMetastoreV2ExportMetadataRequest++; + if (buildCounterGoogleCloudMetastoreV2ExportMetadataRequest < 3) { + o.databaseDumpType = 'foo'; + o.destinationGcsFolder = 'foo'; + o.requestId = 'foo'; + } + buildCounterGoogleCloudMetastoreV2ExportMetadataRequest--; + return o; +} + +void checkGoogleCloudMetastoreV2ExportMetadataRequest( + api.GoogleCloudMetastoreV2ExportMetadataRequest o) { + buildCounterGoogleCloudMetastoreV2ExportMetadataRequest++; + if (buildCounterGoogleCloudMetastoreV2ExportMetadataRequest < 3) { + unittest.expect( + o.databaseDumpType!, + unittest.equals('foo'), + ); + unittest.expect( + o.destinationGcsFolder!, + unittest.equals('foo'), + ); + unittest.expect( + o.requestId!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudMetastoreV2ExportMetadataRequest--; +} + +core.Map + buildUnnamed4() => { + 'x': buildGoogleCloudMetastoreV2AuxiliaryVersionConfig(), + 'y': buildGoogleCloudMetastoreV2AuxiliaryVersionConfig(), + }; + +void checkUnnamed4( + core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudMetastoreV2AuxiliaryVersionConfig(o['x']!); + checkGoogleCloudMetastoreV2AuxiliaryVersionConfig(o['y']!); +} + +core.Map buildUnnamed5() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed5(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.int buildCounterGoogleCloudMetastoreV2HiveMetastoreConfig = 0; +api.GoogleCloudMetastoreV2HiveMetastoreConfig + buildGoogleCloudMetastoreV2HiveMetastoreConfig() { + final o = api.GoogleCloudMetastoreV2HiveMetastoreConfig(); + buildCounterGoogleCloudMetastoreV2HiveMetastoreConfig++; + if (buildCounterGoogleCloudMetastoreV2HiveMetastoreConfig < 3) { + o.auxiliaryVersions = buildUnnamed4(); + o.configOverrides = buildUnnamed5(); + o.endpointProtocol = 'foo'; + o.version = 'foo'; + } + buildCounterGoogleCloudMetastoreV2HiveMetastoreConfig--; + return o; +} + +void checkGoogleCloudMetastoreV2HiveMetastoreConfig( + api.GoogleCloudMetastoreV2HiveMetastoreConfig o) { + buildCounterGoogleCloudMetastoreV2HiveMetastoreConfig++; + if (buildCounterGoogleCloudMetastoreV2HiveMetastoreConfig < 3) { + checkUnnamed4(o.auxiliaryVersions!); + checkUnnamed5(o.configOverrides!); + unittest.expect( + o.endpointProtocol!, + unittest.equals('foo'), + ); + unittest.expect( + o.version!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudMetastoreV2HiveMetastoreConfig--; +} + +core.int buildCounterGoogleCloudMetastoreV2ImportMetadataRequest = 0; +api.GoogleCloudMetastoreV2ImportMetadataRequest + buildGoogleCloudMetastoreV2ImportMetadataRequest() { + final o = api.GoogleCloudMetastoreV2ImportMetadataRequest(); + buildCounterGoogleCloudMetastoreV2ImportMetadataRequest++; + if (buildCounterGoogleCloudMetastoreV2ImportMetadataRequest < 3) { + o.databaseDump = buildGoogleCloudMetastoreV2DatabaseDump(); + o.description = 'foo'; + o.requestId = 'foo'; + } + buildCounterGoogleCloudMetastoreV2ImportMetadataRequest--; + return o; +} + +void checkGoogleCloudMetastoreV2ImportMetadataRequest( + api.GoogleCloudMetastoreV2ImportMetadataRequest o) { + buildCounterGoogleCloudMetastoreV2ImportMetadataRequest++; + if (buildCounterGoogleCloudMetastoreV2ImportMetadataRequest < 3) { + checkGoogleCloudMetastoreV2DatabaseDump(o.databaseDump!); + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + unittest.expect( + o.requestId!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudMetastoreV2ImportMetadataRequest--; +} + +core.int buildCounterGoogleCloudMetastoreV2LatestBackup = 0; +api.GoogleCloudMetastoreV2LatestBackup + buildGoogleCloudMetastoreV2LatestBackup() { + final o = api.GoogleCloudMetastoreV2LatestBackup(); + buildCounterGoogleCloudMetastoreV2LatestBackup++; + if (buildCounterGoogleCloudMetastoreV2LatestBackup < 3) { + o.backupId = 'foo'; + o.duration = 'foo'; + o.startTime = 'foo'; + o.state = 'foo'; + } + buildCounterGoogleCloudMetastoreV2LatestBackup--; + return o; +} + +void checkGoogleCloudMetastoreV2LatestBackup( + api.GoogleCloudMetastoreV2LatestBackup o) { + buildCounterGoogleCloudMetastoreV2LatestBackup++; + if (buildCounterGoogleCloudMetastoreV2LatestBackup < 3) { + unittest.expect( + o.backupId!, + unittest.equals('foo'), + ); + unittest.expect( + o.duration!, + unittest.equals('foo'), + ); + unittest.expect( + o.startTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.state!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudMetastoreV2LatestBackup--; +} + +core.List buildUnnamed6() => [ + buildGoogleCloudMetastoreV2Backup(), + buildGoogleCloudMetastoreV2Backup(), + ]; + +void checkUnnamed6(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudMetastoreV2Backup(o[0]); + checkGoogleCloudMetastoreV2Backup(o[1]); +} + +core.List buildUnnamed7() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed7(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterGoogleCloudMetastoreV2ListBackupsResponse = 0; +api.GoogleCloudMetastoreV2ListBackupsResponse + buildGoogleCloudMetastoreV2ListBackupsResponse() { + final o = api.GoogleCloudMetastoreV2ListBackupsResponse(); + buildCounterGoogleCloudMetastoreV2ListBackupsResponse++; + if (buildCounterGoogleCloudMetastoreV2ListBackupsResponse < 3) { + o.backups = buildUnnamed6(); + o.nextPageToken = 'foo'; + o.unreachable = buildUnnamed7(); + } + buildCounterGoogleCloudMetastoreV2ListBackupsResponse--; + return o; +} + +void checkGoogleCloudMetastoreV2ListBackupsResponse( + api.GoogleCloudMetastoreV2ListBackupsResponse o) { + buildCounterGoogleCloudMetastoreV2ListBackupsResponse++; + if (buildCounterGoogleCloudMetastoreV2ListBackupsResponse < 3) { + checkUnnamed6(o.backups!); + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed7(o.unreachable!); + } + buildCounterGoogleCloudMetastoreV2ListBackupsResponse--; +} + +core.List buildUnnamed8() => [ + buildGoogleCloudMetastoreV2Service(), + buildGoogleCloudMetastoreV2Service(), + ]; + +void checkUnnamed8(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudMetastoreV2Service(o[0]); + checkGoogleCloudMetastoreV2Service(o[1]); +} + +core.List buildUnnamed9() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed9(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterGoogleCloudMetastoreV2ListServicesResponse = 0; +api.GoogleCloudMetastoreV2ListServicesResponse + buildGoogleCloudMetastoreV2ListServicesResponse() { + final o = api.GoogleCloudMetastoreV2ListServicesResponse(); + buildCounterGoogleCloudMetastoreV2ListServicesResponse++; + if (buildCounterGoogleCloudMetastoreV2ListServicesResponse < 3) { + o.nextPageToken = 'foo'; + o.services = buildUnnamed8(); + o.unreachable = buildUnnamed9(); + } + buildCounterGoogleCloudMetastoreV2ListServicesResponse--; + return o; +} + +void checkGoogleCloudMetastoreV2ListServicesResponse( + api.GoogleCloudMetastoreV2ListServicesResponse o) { + buildCounterGoogleCloudMetastoreV2ListServicesResponse++; + if (buildCounterGoogleCloudMetastoreV2ListServicesResponse < 3) { + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed8(o.services!); + checkUnnamed9(o.unreachable!); + } + buildCounterGoogleCloudMetastoreV2ListServicesResponse--; +} + +core.int buildCounterGoogleCloudMetastoreV2MetadataIntegration = 0; +api.GoogleCloudMetastoreV2MetadataIntegration + buildGoogleCloudMetastoreV2MetadataIntegration() { + final o = api.GoogleCloudMetastoreV2MetadataIntegration(); + buildCounterGoogleCloudMetastoreV2MetadataIntegration++; + if (buildCounterGoogleCloudMetastoreV2MetadataIntegration < 3) { + o.dataCatalogConfig = buildGoogleCloudMetastoreV2DataCatalogConfig(); + } + buildCounterGoogleCloudMetastoreV2MetadataIntegration--; + return o; +} + +void checkGoogleCloudMetastoreV2MetadataIntegration( + api.GoogleCloudMetastoreV2MetadataIntegration o) { + buildCounterGoogleCloudMetastoreV2MetadataIntegration++; + if (buildCounterGoogleCloudMetastoreV2MetadataIntegration < 3) { + checkGoogleCloudMetastoreV2DataCatalogConfig(o.dataCatalogConfig!); + } + buildCounterGoogleCloudMetastoreV2MetadataIntegration--; +} + +core.int buildCounterGoogleCloudMetastoreV2MoveTableToDatabaseRequest = 0; +api.GoogleCloudMetastoreV2MoveTableToDatabaseRequest + buildGoogleCloudMetastoreV2MoveTableToDatabaseRequest() { + final o = api.GoogleCloudMetastoreV2MoveTableToDatabaseRequest(); + buildCounterGoogleCloudMetastoreV2MoveTableToDatabaseRequest++; + if (buildCounterGoogleCloudMetastoreV2MoveTableToDatabaseRequest < 3) { + o.dbName = 'foo'; + o.destinationDbName = 'foo'; + o.tableName = 'foo'; + } + buildCounterGoogleCloudMetastoreV2MoveTableToDatabaseRequest--; + return o; +} + +void checkGoogleCloudMetastoreV2MoveTableToDatabaseRequest( + api.GoogleCloudMetastoreV2MoveTableToDatabaseRequest o) { + buildCounterGoogleCloudMetastoreV2MoveTableToDatabaseRequest++; + if (buildCounterGoogleCloudMetastoreV2MoveTableToDatabaseRequest < 3) { + unittest.expect( + o.dbName!, + unittest.equals('foo'), + ); + unittest.expect( + o.destinationDbName!, + unittest.equals('foo'), + ); + unittest.expect( + o.tableName!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudMetastoreV2MoveTableToDatabaseRequest--; +} + +core.int buildCounterGoogleCloudMetastoreV2QueryMetadataRequest = 0; +api.GoogleCloudMetastoreV2QueryMetadataRequest + buildGoogleCloudMetastoreV2QueryMetadataRequest() { + final o = api.GoogleCloudMetastoreV2QueryMetadataRequest(); + buildCounterGoogleCloudMetastoreV2QueryMetadataRequest++; + if (buildCounterGoogleCloudMetastoreV2QueryMetadataRequest < 3) { + o.query = 'foo'; + } + buildCounterGoogleCloudMetastoreV2QueryMetadataRequest--; + return o; +} + +void checkGoogleCloudMetastoreV2QueryMetadataRequest( + api.GoogleCloudMetastoreV2QueryMetadataRequest o) { + buildCounterGoogleCloudMetastoreV2QueryMetadataRequest++; + if (buildCounterGoogleCloudMetastoreV2QueryMetadataRequest < 3) { + unittest.expect( + o.query!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudMetastoreV2QueryMetadataRequest--; +} + +core.int buildCounterGoogleCloudMetastoreV2RestoreServiceRequest = 0; +api.GoogleCloudMetastoreV2RestoreServiceRequest + buildGoogleCloudMetastoreV2RestoreServiceRequest() { + final o = api.GoogleCloudMetastoreV2RestoreServiceRequest(); + buildCounterGoogleCloudMetastoreV2RestoreServiceRequest++; + if (buildCounterGoogleCloudMetastoreV2RestoreServiceRequest < 3) { + o.backup = 'foo'; + o.backupLocation = 'foo'; + o.requestId = 'foo'; + o.restoreType = 'foo'; + } + buildCounterGoogleCloudMetastoreV2RestoreServiceRequest--; + return o; +} + +void checkGoogleCloudMetastoreV2RestoreServiceRequest( + api.GoogleCloudMetastoreV2RestoreServiceRequest o) { + buildCounterGoogleCloudMetastoreV2RestoreServiceRequest++; + if (buildCounterGoogleCloudMetastoreV2RestoreServiceRequest < 3) { + unittest.expect( + o.backup!, + unittest.equals('foo'), + ); + unittest.expect( + o.backupLocation!, + unittest.equals('foo'), + ); + unittest.expect( + o.requestId!, + unittest.equals('foo'), + ); + unittest.expect( + o.restoreType!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudMetastoreV2RestoreServiceRequest--; +} + +core.int buildCounterGoogleCloudMetastoreV2ScalingConfig = 0; +api.GoogleCloudMetastoreV2ScalingConfig + buildGoogleCloudMetastoreV2ScalingConfig() { + final o = api.GoogleCloudMetastoreV2ScalingConfig(); + buildCounterGoogleCloudMetastoreV2ScalingConfig++; + if (buildCounterGoogleCloudMetastoreV2ScalingConfig < 3) { + o.scalingFactor = 42; + } + buildCounterGoogleCloudMetastoreV2ScalingConfig--; + return o; +} + +void checkGoogleCloudMetastoreV2ScalingConfig( + api.GoogleCloudMetastoreV2ScalingConfig o) { + buildCounterGoogleCloudMetastoreV2ScalingConfig++; + if (buildCounterGoogleCloudMetastoreV2ScalingConfig < 3) { + unittest.expect( + o.scalingFactor!, + unittest.equals(42), + ); + } + buildCounterGoogleCloudMetastoreV2ScalingConfig--; +} + +core.int buildCounterGoogleCloudMetastoreV2ScheduledBackup = 0; +api.GoogleCloudMetastoreV2ScheduledBackup + buildGoogleCloudMetastoreV2ScheduledBackup() { + final o = api.GoogleCloudMetastoreV2ScheduledBackup(); + buildCounterGoogleCloudMetastoreV2ScheduledBackup++; + if (buildCounterGoogleCloudMetastoreV2ScheduledBackup < 3) { + o.backupLocation = 'foo'; + o.cronSchedule = 'foo'; + o.enabled = true; + o.latestBackup = buildGoogleCloudMetastoreV2LatestBackup(); + o.nextScheduledTime = 'foo'; + o.timeZone = 'foo'; + } + buildCounterGoogleCloudMetastoreV2ScheduledBackup--; + return o; +} + +void checkGoogleCloudMetastoreV2ScheduledBackup( + api.GoogleCloudMetastoreV2ScheduledBackup o) { + buildCounterGoogleCloudMetastoreV2ScheduledBackup++; + if (buildCounterGoogleCloudMetastoreV2ScheduledBackup < 3) { + unittest.expect( + o.backupLocation!, + unittest.equals('foo'), + ); + unittest.expect( + o.cronSchedule!, + unittest.equals('foo'), + ); + unittest.expect(o.enabled!, unittest.isTrue); + checkGoogleCloudMetastoreV2LatestBackup(o.latestBackup!); + unittest.expect( + o.nextScheduledTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.timeZone!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudMetastoreV2ScheduledBackup--; +} + +core.List buildUnnamed10() => [ + buildGoogleCloudMetastoreV2Endpoint(), + buildGoogleCloudMetastoreV2Endpoint(), + ]; + +void checkUnnamed10(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudMetastoreV2Endpoint(o[0]); + checkGoogleCloudMetastoreV2Endpoint(o[1]); +} + +core.Map buildUnnamed11() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed11(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.int buildCounterGoogleCloudMetastoreV2Service = 0; +api.GoogleCloudMetastoreV2Service buildGoogleCloudMetastoreV2Service() { + final o = api.GoogleCloudMetastoreV2Service(); + buildCounterGoogleCloudMetastoreV2Service++; + if (buildCounterGoogleCloudMetastoreV2Service < 3) { + o.createTime = 'foo'; + o.encryptionConfig = buildGoogleCloudMetastoreV2EncryptionConfig(); + o.endpoints = buildUnnamed10(); + o.hiveMetastoreConfig = buildGoogleCloudMetastoreV2HiveMetastoreConfig(); + o.labels = buildUnnamed11(); + o.metadataIntegration = buildGoogleCloudMetastoreV2MetadataIntegration(); + o.name = 'foo'; + o.scalingConfig = buildGoogleCloudMetastoreV2ScalingConfig(); + o.scheduledBackup = buildGoogleCloudMetastoreV2ScheduledBackup(); + o.state = 'foo'; + o.stateMessage = 'foo'; + o.uid = 'foo'; + o.updateTime = 'foo'; + o.warehouseGcsUri = 'foo'; + } + buildCounterGoogleCloudMetastoreV2Service--; + return o; +} + +void checkGoogleCloudMetastoreV2Service(api.GoogleCloudMetastoreV2Service o) { + buildCounterGoogleCloudMetastoreV2Service++; + if (buildCounterGoogleCloudMetastoreV2Service < 3) { + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + checkGoogleCloudMetastoreV2EncryptionConfig(o.encryptionConfig!); + checkUnnamed10(o.endpoints!); + checkGoogleCloudMetastoreV2HiveMetastoreConfig(o.hiveMetastoreConfig!); + checkUnnamed11(o.labels!); + checkGoogleCloudMetastoreV2MetadataIntegration(o.metadataIntegration!); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + checkGoogleCloudMetastoreV2ScalingConfig(o.scalingConfig!); + checkGoogleCloudMetastoreV2ScheduledBackup(o.scheduledBackup!); + unittest.expect( + o.state!, + unittest.equals('foo'), + ); + unittest.expect( + o.stateMessage!, + unittest.equals('foo'), + ); + unittest.expect( + o.uid!, + unittest.equals('foo'), + ); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.warehouseGcsUri!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudMetastoreV2Service--; +} + +core.Map buildUnnamed12() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; + +void checkUnnamed12(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted1 = (o['x']!) as core.Map; + unittest.expect(casted1, unittest.hasLength(3)); + unittest.expect( + casted1['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted1['bool'], + unittest.equals(true), + ); + unittest.expect( + casted1['string'], + unittest.equals('foo'), + ); + var casted2 = (o['y']!) as core.Map; + unittest.expect(casted2, unittest.hasLength(3)); + unittest.expect( + casted2['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted2['bool'], + unittest.equals(true), + ); + unittest.expect( + casted2['string'], + unittest.equals('foo'), + ); +} + +core.Map buildUnnamed13() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; + +void checkUnnamed13(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted3 = (o['x']!) as core.Map; + unittest.expect(casted3, unittest.hasLength(3)); + unittest.expect( + casted3['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted3['bool'], + unittest.equals(true), + ); + unittest.expect( + casted3['string'], + unittest.equals('foo'), + ); + var casted4 = (o['y']!) as core.Map; + unittest.expect(casted4, unittest.hasLength(3)); + unittest.expect( + casted4['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted4['bool'], + unittest.equals(true), + ); + unittest.expect( + casted4['string'], + unittest.equals('foo'), + ); +} + +core.int buildCounterGoogleLongrunningOperation = 0; +api.GoogleLongrunningOperation buildGoogleLongrunningOperation() { + final o = api.GoogleLongrunningOperation(); + buildCounterGoogleLongrunningOperation++; + if (buildCounterGoogleLongrunningOperation < 3) { + o.done = true; + o.error = buildGoogleRpcStatus(); + o.metadata = buildUnnamed12(); + o.name = 'foo'; + o.response = buildUnnamed13(); + } + buildCounterGoogleLongrunningOperation--; + return o; +} + +void checkGoogleLongrunningOperation(api.GoogleLongrunningOperation o) { + buildCounterGoogleLongrunningOperation++; + if (buildCounterGoogleLongrunningOperation < 3) { + unittest.expect(o.done!, unittest.isTrue); + checkGoogleRpcStatus(o.error!); + checkUnnamed12(o.metadata!); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + checkUnnamed13(o.response!); + } + buildCounterGoogleLongrunningOperation--; +} + +core.Map buildUnnamed14() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; + +void checkUnnamed14(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted5 = (o['x']!) as core.Map; + unittest.expect(casted5, unittest.hasLength(3)); + unittest.expect( + casted5['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted5['bool'], + unittest.equals(true), + ); + unittest.expect( + casted5['string'], + unittest.equals('foo'), + ); + var casted6 = (o['y']!) as core.Map; + unittest.expect(casted6, unittest.hasLength(3)); + unittest.expect( + casted6['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted6['bool'], + unittest.equals(true), + ); + unittest.expect( + casted6['string'], + unittest.equals('foo'), + ); +} + +core.List> buildUnnamed15() => [ + buildUnnamed14(), + buildUnnamed14(), + ]; + +void checkUnnamed15(core.List> o) { + unittest.expect(o, unittest.hasLength(2)); + checkUnnamed14(o[0]); + checkUnnamed14(o[1]); +} + +core.int buildCounterGoogleRpcStatus = 0; +api.GoogleRpcStatus buildGoogleRpcStatus() { + final o = api.GoogleRpcStatus(); + buildCounterGoogleRpcStatus++; + if (buildCounterGoogleRpcStatus < 3) { + o.code = 42; + o.details = buildUnnamed15(); + o.message = 'foo'; + } + buildCounterGoogleRpcStatus--; + return o; +} + +void checkGoogleRpcStatus(api.GoogleRpcStatus o) { + buildCounterGoogleRpcStatus++; + if (buildCounterGoogleRpcStatus < 3) { + unittest.expect( + o.code!, + unittest.equals(42), + ); + checkUnnamed15(o.details!); + unittest.expect( + o.message!, + unittest.equals('foo'), + ); + } + buildCounterGoogleRpcStatus--; +} + +void main() { + unittest.group( + 'obj-schema-GoogleCloudMetastoreV2AlterMetadataResourceLocationRequest', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudMetastoreV2AlterMetadataResourceLocationRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudMetastoreV2AlterMetadataResourceLocationRequest + .fromJson(oJson as core.Map); + checkGoogleCloudMetastoreV2AlterMetadataResourceLocationRequest(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudMetastoreV2AlterTablePropertiesRequest', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudMetastoreV2AlterTablePropertiesRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudMetastoreV2AlterTablePropertiesRequest.fromJson( + oJson as core.Map); + checkGoogleCloudMetastoreV2AlterTablePropertiesRequest(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudMetastoreV2AuxiliaryVersionConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudMetastoreV2AuxiliaryVersionConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudMetastoreV2AuxiliaryVersionConfig.fromJson( + oJson as core.Map); + checkGoogleCloudMetastoreV2AuxiliaryVersionConfig(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudMetastoreV2Backup', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudMetastoreV2Backup(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudMetastoreV2Backup.fromJson( + oJson as core.Map); + checkGoogleCloudMetastoreV2Backup(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudMetastoreV2DataCatalogConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudMetastoreV2DataCatalogConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudMetastoreV2DataCatalogConfig.fromJson( + oJson as core.Map); + checkGoogleCloudMetastoreV2DataCatalogConfig(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudMetastoreV2DatabaseDump', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudMetastoreV2DatabaseDump(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudMetastoreV2DatabaseDump.fromJson( + oJson as core.Map); + checkGoogleCloudMetastoreV2DatabaseDump(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudMetastoreV2EncryptionConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudMetastoreV2EncryptionConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudMetastoreV2EncryptionConfig.fromJson( + oJson as core.Map); + checkGoogleCloudMetastoreV2EncryptionConfig(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudMetastoreV2Endpoint', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudMetastoreV2Endpoint(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudMetastoreV2Endpoint.fromJson( + oJson as core.Map); + checkGoogleCloudMetastoreV2Endpoint(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudMetastoreV2ExportMetadataRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudMetastoreV2ExportMetadataRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudMetastoreV2ExportMetadataRequest.fromJson( + oJson as core.Map); + checkGoogleCloudMetastoreV2ExportMetadataRequest(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudMetastoreV2HiveMetastoreConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudMetastoreV2HiveMetastoreConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudMetastoreV2HiveMetastoreConfig.fromJson( + oJson as core.Map); + checkGoogleCloudMetastoreV2HiveMetastoreConfig(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudMetastoreV2ImportMetadataRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudMetastoreV2ImportMetadataRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudMetastoreV2ImportMetadataRequest.fromJson( + oJson as core.Map); + checkGoogleCloudMetastoreV2ImportMetadataRequest(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudMetastoreV2LatestBackup', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudMetastoreV2LatestBackup(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudMetastoreV2LatestBackup.fromJson( + oJson as core.Map); + checkGoogleCloudMetastoreV2LatestBackup(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudMetastoreV2ListBackupsResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudMetastoreV2ListBackupsResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudMetastoreV2ListBackupsResponse.fromJson( + oJson as core.Map); + checkGoogleCloudMetastoreV2ListBackupsResponse(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudMetastoreV2ListServicesResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudMetastoreV2ListServicesResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudMetastoreV2ListServicesResponse.fromJson( + oJson as core.Map); + checkGoogleCloudMetastoreV2ListServicesResponse(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudMetastoreV2MetadataIntegration', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudMetastoreV2MetadataIntegration(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudMetastoreV2MetadataIntegration.fromJson( + oJson as core.Map); + checkGoogleCloudMetastoreV2MetadataIntegration(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudMetastoreV2MoveTableToDatabaseRequest', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudMetastoreV2MoveTableToDatabaseRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudMetastoreV2MoveTableToDatabaseRequest.fromJson( + oJson as core.Map); + checkGoogleCloudMetastoreV2MoveTableToDatabaseRequest(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudMetastoreV2QueryMetadataRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudMetastoreV2QueryMetadataRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudMetastoreV2QueryMetadataRequest.fromJson( + oJson as core.Map); + checkGoogleCloudMetastoreV2QueryMetadataRequest(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudMetastoreV2RestoreServiceRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudMetastoreV2RestoreServiceRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudMetastoreV2RestoreServiceRequest.fromJson( + oJson as core.Map); + checkGoogleCloudMetastoreV2RestoreServiceRequest(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudMetastoreV2ScalingConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudMetastoreV2ScalingConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudMetastoreV2ScalingConfig.fromJson( + oJson as core.Map); + checkGoogleCloudMetastoreV2ScalingConfig(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudMetastoreV2ScheduledBackup', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudMetastoreV2ScheduledBackup(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudMetastoreV2ScheduledBackup.fromJson( + oJson as core.Map); + checkGoogleCloudMetastoreV2ScheduledBackup(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudMetastoreV2Service', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudMetastoreV2Service(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudMetastoreV2Service.fromJson( + oJson as core.Map); + checkGoogleCloudMetastoreV2Service(od); + }); + }); + + unittest.group('obj-schema-GoogleLongrunningOperation', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleLongrunningOperation(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleLongrunningOperation.fromJson( + oJson as core.Map); + checkGoogleLongrunningOperation(od); + }); + }); + + unittest.group('obj-schema-GoogleRpcStatus', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleRpcStatus(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleRpcStatus.fromJson( + oJson as core.Map); + checkGoogleRpcStatus(od); + }); + }); + + unittest.group('resource-ProjectsLocationsServicesResource', () { + unittest.test('method--alterLocation', () async { + final mock = HttpServerMock(); + final res = api.DataprocMetastoreApi(mock).projects.locations.services; + final arg_request = + buildGoogleCloudMetastoreV2AlterMetadataResourceLocationRequest(); + final arg_service = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudMetastoreV2AlterMetadataResourceLocationRequest + .fromJson(json as core.Map); + checkGoogleCloudMetastoreV2AlterMetadataResourceLocationRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v2/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.alterLocation(arg_request, arg_service, + $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--alterTableProperties', () async { + final mock = HttpServerMock(); + final res = api.DataprocMetastoreApi(mock).projects.locations.services; + final arg_request = + buildGoogleCloudMetastoreV2AlterTablePropertiesRequest(); + final arg_service = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudMetastoreV2AlterTablePropertiesRequest.fromJson( + json as core.Map); + checkGoogleCloudMetastoreV2AlterTablePropertiesRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v2/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.alterTableProperties(arg_request, arg_service, + $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--create', () async { + final mock = HttpServerMock(); + final res = api.DataprocMetastoreApi(mock).projects.locations.services; + final arg_request = buildGoogleCloudMetastoreV2Service(); + final arg_parent = 'foo'; + final arg_requestId = 'foo'; + final arg_serviceId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudMetastoreV2Service.fromJson( + json as core.Map); + checkGoogleCloudMetastoreV2Service(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v2/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); + unittest.expect( + queryMap['serviceId']!.first, + unittest.equals(arg_serviceId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.create(arg_request, arg_parent, + requestId: arg_requestId, + serviceId: arg_serviceId, + $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = api.DataprocMetastoreApi(mock).projects.locations.services; + final arg_name = 'foo'; + final arg_requestId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v2/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete(arg_name, + requestId: arg_requestId, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--exportMetadata', () async { + final mock = HttpServerMock(); + final res = api.DataprocMetastoreApi(mock).projects.locations.services; + final arg_request = buildGoogleCloudMetastoreV2ExportMetadataRequest(); + final arg_service = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudMetastoreV2ExportMetadataRequest.fromJson( + json as core.Map); + checkGoogleCloudMetastoreV2ExportMetadataRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v2/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.exportMetadata(arg_request, arg_service, + $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.DataprocMetastoreApi(mock).projects.locations.services; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v2/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleCloudMetastoreV2Service()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleCloudMetastoreV2Service( + response as api.GoogleCloudMetastoreV2Service); + }); + + unittest.test('method--importMetadata', () async { + final mock = HttpServerMock(); + final res = api.DataprocMetastoreApi(mock).projects.locations.services; + final arg_request = buildGoogleCloudMetastoreV2ImportMetadataRequest(); + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudMetastoreV2ImportMetadataRequest.fromJson( + json as core.Map); + checkGoogleCloudMetastoreV2ImportMetadataRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v2/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.importMetadata(arg_request, arg_name, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.DataprocMetastoreApi(mock).projects.locations.services; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v2/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json + .encode(buildGoogleCloudMetastoreV2ListServicesResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_parent, + filter: arg_filter, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkGoogleCloudMetastoreV2ListServicesResponse( + response as api.GoogleCloudMetastoreV2ListServicesResponse); + }); + + unittest.test('method--moveTableToDatabase', () async { + final mock = HttpServerMock(); + final res = api.DataprocMetastoreApi(mock).projects.locations.services; + final arg_request = + buildGoogleCloudMetastoreV2MoveTableToDatabaseRequest(); + final arg_service = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudMetastoreV2MoveTableToDatabaseRequest.fromJson( + json as core.Map); + checkGoogleCloudMetastoreV2MoveTableToDatabaseRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v2/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.moveTableToDatabase(arg_request, arg_service, + $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--patch', () async { + final mock = HttpServerMock(); + final res = api.DataprocMetastoreApi(mock).projects.locations.services; + final arg_request = buildGoogleCloudMetastoreV2Service(); + final arg_name = 'foo'; + final arg_requestId = 'foo'; + final arg_updateMask = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudMetastoreV2Service.fromJson( + json as core.Map); + checkGoogleCloudMetastoreV2Service(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v2/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.patch(arg_request, arg_name, + requestId: arg_requestId, + updateMask: arg_updateMask, + $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--queryMetadata', () async { + final mock = HttpServerMock(); + final res = api.DataprocMetastoreApi(mock).projects.locations.services; + final arg_request = buildGoogleCloudMetastoreV2QueryMetadataRequest(); + final arg_service = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudMetastoreV2QueryMetadataRequest.fromJson( + json as core.Map); + checkGoogleCloudMetastoreV2QueryMetadataRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v2/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.queryMetadata(arg_request, arg_service, + $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--restore', () async { + final mock = HttpServerMock(); + final res = api.DataprocMetastoreApi(mock).projects.locations.services; + final arg_request = buildGoogleCloudMetastoreV2RestoreServiceRequest(); + final arg_service = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudMetastoreV2RestoreServiceRequest.fromJson( + json as core.Map); + checkGoogleCloudMetastoreV2RestoreServiceRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v2/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.restore(arg_request, arg_service, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + }); + + unittest.group('resource-ProjectsLocationsServicesBackupsResource', () { + unittest.test('method--create', () async { + final mock = HttpServerMock(); + final res = + api.DataprocMetastoreApi(mock).projects.locations.services.backups; + final arg_request = buildGoogleCloudMetastoreV2Backup(); + final arg_parent = 'foo'; + final arg_backupId = 'foo'; + final arg_requestId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleCloudMetastoreV2Backup.fromJson( + json as core.Map); + checkGoogleCloudMetastoreV2Backup(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v2/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['backupId']!.first, + unittest.equals(arg_backupId), + ); + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.create(arg_request, arg_parent, + backupId: arg_backupId, + requestId: arg_requestId, + $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = + api.DataprocMetastoreApi(mock).projects.locations.services.backups; + final arg_name = 'foo'; + final arg_requestId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v2/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleLongrunningOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete(arg_name, + requestId: arg_requestId, $fields: arg_$fields); + checkGoogleLongrunningOperation( + response as api.GoogleLongrunningOperation); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = + api.DataprocMetastoreApi(mock).projects.locations.services.backups; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v2/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleCloudMetastoreV2Backup()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGoogleCloudMetastoreV2Backup( + response as api.GoogleCloudMetastoreV2Backup); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = + api.DataprocMetastoreApi(mock).projects.locations.services.backups; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v2/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json + .encode(buildGoogleCloudMetastoreV2ListBackupsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_parent, + filter: arg_filter, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkGoogleCloudMetastoreV2ListBackupsResponse( + response as api.GoogleCloudMetastoreV2ListBackupsResponse); + }); + }); +} diff --git a/generated/googleapis/test/migrationcenter/v1_test.dart b/generated/googleapis/test/migrationcenter/v1_test.dart index e57aa2ce0..c746ec859 100644 --- a/generated/googleapis/test/migrationcenter/v1_test.dart +++ b/generated/googleapis/test/migrationcenter/v1_test.dart @@ -477,12 +477,15 @@ api.Asset buildAsset() { o.assignedGroups = buildUnnamed5(); o.attributes = buildUnnamed6(); o.createTime = 'foo'; + o.databaseDeploymentDetails = buildDatabaseDeploymentDetails(); + o.databaseDetails = buildDatabaseDetails(); o.insightList = buildInsightList(); o.labels = buildUnnamed7(); o.machineDetails = buildMachineDetails(); o.name = 'foo'; o.performanceData = buildAssetPerformanceData(); o.sources = buildUnnamed8(); + o.title = 'foo'; o.updateTime = 'foo'; } buildCounterAsset--; @@ -498,6 +501,8 @@ void checkAsset(api.Asset o) { o.createTime!, unittest.equals('foo'), ); + checkDatabaseDeploymentDetails(o.databaseDeploymentDetails!); + checkDatabaseDetails(o.databaseDetails!); checkInsightList(o.insightList!); checkUnnamed7(o.labels!); checkMachineDetails(o.machineDetails!); @@ -507,6 +512,10 @@ void checkAsset(api.Asset o) { ); checkAssetPerformanceData(o.performanceData!); checkUnnamed8(o.sources!); + unittest.expect( + o.title!, + unittest.equals('foo'), + ); unittest.expect( o.updateTime!, unittest.equals('foo'), @@ -567,6 +576,8 @@ api.AssetFrame buildAssetFrame() { if (buildCounterAssetFrame < 3) { o.attributes = buildUnnamed9(); o.collectionType = 'foo'; + o.databaseDeploymentDetails = buildDatabaseDeploymentDetails(); + o.databaseDetails = buildDatabaseDetails(); o.labels = buildUnnamed10(); o.machineDetails = buildMachineDetails(); o.performanceSamples = buildUnnamed11(); @@ -585,6 +596,8 @@ void checkAssetFrame(api.AssetFrame o) { o.collectionType!, unittest.equals('foo'), ); + checkDatabaseDeploymentDetails(o.databaseDeploymentDetails!); + checkDatabaseDetails(o.databaseDetails!); checkUnnamed10(o.labels!); checkMachineDetails(o.machineDetails!); checkUnnamed11(o.performanceSamples!); @@ -1097,6 +1110,8 @@ api.DailyResourceUsageAggregationDisk buildDailyResourceUsageAggregationDisk() { buildCounterDailyResourceUsageAggregationDisk++; if (buildCounterDailyResourceUsageAggregationDisk < 3) { o.iops = buildDailyResourceUsageAggregationStats(); + o.readIops = buildDailyResourceUsageAggregationStats(); + o.writeIops = buildDailyResourceUsageAggregationStats(); } buildCounterDailyResourceUsageAggregationDisk--; return o; @@ -1107,6 +1122,8 @@ void checkDailyResourceUsageAggregationDisk( buildCounterDailyResourceUsageAggregationDisk++; if (buildCounterDailyResourceUsageAggregationDisk < 3) { checkDailyResourceUsageAggregationStats(o.iops!); + checkDailyResourceUsageAggregationStats(o.readIops!); + checkDailyResourceUsageAggregationStats(o.writeIops!); } buildCounterDailyResourceUsageAggregationDisk--; } @@ -1194,6 +1211,380 @@ void checkDailyResourceUsageAggregationStats( buildCounterDailyResourceUsageAggregationStats--; } +core.int buildCounterDatabaseDeploymentDetails = 0; +api.DatabaseDeploymentDetails buildDatabaseDeploymentDetails() { + final o = api.DatabaseDeploymentDetails(); + buildCounterDatabaseDeploymentDetails++; + if (buildCounterDatabaseDeploymentDetails < 3) { + o.aggregatedStats = buildDatabaseDeploymentDetailsAggregatedStats(); + o.edition = 'foo'; + o.generatedId = 'foo'; + o.manualUniqueId = 'foo'; + o.mysql = buildMysqlDatabaseDeployment(); + o.postgresql = buildPostgreSqlDatabaseDeployment(); + o.sqlServer = buildSqlServerDatabaseDeployment(); + o.topology = buildDatabaseDeploymentTopology(); + o.version = 'foo'; + } + buildCounterDatabaseDeploymentDetails--; + return o; +} + +void checkDatabaseDeploymentDetails(api.DatabaseDeploymentDetails o) { + buildCounterDatabaseDeploymentDetails++; + if (buildCounterDatabaseDeploymentDetails < 3) { + checkDatabaseDeploymentDetailsAggregatedStats(o.aggregatedStats!); + unittest.expect( + o.edition!, + unittest.equals('foo'), + ); + unittest.expect( + o.generatedId!, + unittest.equals('foo'), + ); + unittest.expect( + o.manualUniqueId!, + unittest.equals('foo'), + ); + checkMysqlDatabaseDeployment(o.mysql!); + checkPostgreSqlDatabaseDeployment(o.postgresql!); + checkSqlServerDatabaseDeployment(o.sqlServer!); + checkDatabaseDeploymentTopology(o.topology!); + unittest.expect( + o.version!, + unittest.equals('foo'), + ); + } + buildCounterDatabaseDeploymentDetails--; +} + +core.int buildCounterDatabaseDeploymentDetailsAggregatedStats = 0; +api.DatabaseDeploymentDetailsAggregatedStats + buildDatabaseDeploymentDetailsAggregatedStats() { + final o = api.DatabaseDeploymentDetailsAggregatedStats(); + buildCounterDatabaseDeploymentDetailsAggregatedStats++; + if (buildCounterDatabaseDeploymentDetailsAggregatedStats < 3) { + o.databaseCount = 42; + } + buildCounterDatabaseDeploymentDetailsAggregatedStats--; + return o; +} + +void checkDatabaseDeploymentDetailsAggregatedStats( + api.DatabaseDeploymentDetailsAggregatedStats o) { + buildCounterDatabaseDeploymentDetailsAggregatedStats++; + if (buildCounterDatabaseDeploymentDetailsAggregatedStats < 3) { + unittest.expect( + o.databaseCount!, + unittest.equals(42), + ); + } + buildCounterDatabaseDeploymentDetailsAggregatedStats--; +} + +core.List buildUnnamed18() => [ + buildDatabaseInstance(), + buildDatabaseInstance(), + ]; + +void checkUnnamed18(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkDatabaseInstance(o[0]); + checkDatabaseInstance(o[1]); +} + +core.int buildCounterDatabaseDeploymentTopology = 0; +api.DatabaseDeploymentTopology buildDatabaseDeploymentTopology() { + final o = api.DatabaseDeploymentTopology(); + buildCounterDatabaseDeploymentTopology++; + if (buildCounterDatabaseDeploymentTopology < 3) { + o.coreCount = 42; + o.coreLimit = 42; + o.diskAllocatedBytes = 'foo'; + o.diskUsedBytes = 'foo'; + o.instances = buildUnnamed18(); + o.memoryBytes = 'foo'; + o.memoryLimitBytes = 'foo'; + o.physicalCoreCount = 42; + o.physicalCoreLimit = 42; + } + buildCounterDatabaseDeploymentTopology--; + return o; +} + +void checkDatabaseDeploymentTopology(api.DatabaseDeploymentTopology o) { + buildCounterDatabaseDeploymentTopology++; + if (buildCounterDatabaseDeploymentTopology < 3) { + unittest.expect( + o.coreCount!, + unittest.equals(42), + ); + unittest.expect( + o.coreLimit!, + unittest.equals(42), + ); + unittest.expect( + o.diskAllocatedBytes!, + unittest.equals('foo'), + ); + unittest.expect( + o.diskUsedBytes!, + unittest.equals('foo'), + ); + checkUnnamed18(o.instances!); + unittest.expect( + o.memoryBytes!, + unittest.equals('foo'), + ); + unittest.expect( + o.memoryLimitBytes!, + unittest.equals('foo'), + ); + unittest.expect( + o.physicalCoreCount!, + unittest.equals(42), + ); + unittest.expect( + o.physicalCoreLimit!, + unittest.equals(42), + ); + } + buildCounterDatabaseDeploymentTopology--; +} + +core.List buildUnnamed19() => [ + buildDatabaseSchema(), + buildDatabaseSchema(), + ]; + +void checkUnnamed19(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkDatabaseSchema(o[0]); + checkDatabaseSchema(o[1]); +} + +core.int buildCounterDatabaseDetails = 0; +api.DatabaseDetails buildDatabaseDetails() { + final o = api.DatabaseDetails(); + buildCounterDatabaseDetails++; + if (buildCounterDatabaseDetails < 3) { + o.allocatedStorageBytes = 'foo'; + o.databaseName = 'foo'; + o.parentDatabaseDeployment = buildDatabaseDetailsParentDatabaseDeployment(); + o.schemas = buildUnnamed19(); + } + buildCounterDatabaseDetails--; + return o; +} + +void checkDatabaseDetails(api.DatabaseDetails o) { + buildCounterDatabaseDetails++; + if (buildCounterDatabaseDetails < 3) { + unittest.expect( + o.allocatedStorageBytes!, + unittest.equals('foo'), + ); + unittest.expect( + o.databaseName!, + unittest.equals('foo'), + ); + checkDatabaseDetailsParentDatabaseDeployment(o.parentDatabaseDeployment!); + checkUnnamed19(o.schemas!); + } + buildCounterDatabaseDetails--; +} + +core.int buildCounterDatabaseDetailsParentDatabaseDeployment = 0; +api.DatabaseDetailsParentDatabaseDeployment + buildDatabaseDetailsParentDatabaseDeployment() { + final o = api.DatabaseDetailsParentDatabaseDeployment(); + buildCounterDatabaseDetailsParentDatabaseDeployment++; + if (buildCounterDatabaseDetailsParentDatabaseDeployment < 3) { + o.generatedId = 'foo'; + o.manualUniqueId = 'foo'; + } + buildCounterDatabaseDetailsParentDatabaseDeployment--; + return o; +} + +void checkDatabaseDetailsParentDatabaseDeployment( + api.DatabaseDetailsParentDatabaseDeployment o) { + buildCounterDatabaseDetailsParentDatabaseDeployment++; + if (buildCounterDatabaseDetailsParentDatabaseDeployment < 3) { + unittest.expect( + o.generatedId!, + unittest.equals('foo'), + ); + unittest.expect( + o.manualUniqueId!, + unittest.equals('foo'), + ); + } + buildCounterDatabaseDetailsParentDatabaseDeployment--; +} + +core.int buildCounterDatabaseInstance = 0; +api.DatabaseInstance buildDatabaseInstance() { + final o = api.DatabaseInstance(); + buildCounterDatabaseInstance++; + if (buildCounterDatabaseInstance < 3) { + o.instanceName = 'foo'; + o.network = buildDatabaseInstanceNetwork(); + o.role = 'foo'; + } + buildCounterDatabaseInstance--; + return o; +} + +void checkDatabaseInstance(api.DatabaseInstance o) { + buildCounterDatabaseInstance++; + if (buildCounterDatabaseInstance < 3) { + unittest.expect( + o.instanceName!, + unittest.equals('foo'), + ); + checkDatabaseInstanceNetwork(o.network!); + unittest.expect( + o.role!, + unittest.equals('foo'), + ); + } + buildCounterDatabaseInstance--; +} + +core.List buildUnnamed20() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed20(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.List buildUnnamed21() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed21(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterDatabaseInstanceNetwork = 0; +api.DatabaseInstanceNetwork buildDatabaseInstanceNetwork() { + final o = api.DatabaseInstanceNetwork(); + buildCounterDatabaseInstanceNetwork++; + if (buildCounterDatabaseInstanceNetwork < 3) { + o.hostNames = buildUnnamed20(); + o.ipAddresses = buildUnnamed21(); + o.primaryMacAddress = 'foo'; + } + buildCounterDatabaseInstanceNetwork--; + return o; +} + +void checkDatabaseInstanceNetwork(api.DatabaseInstanceNetwork o) { + buildCounterDatabaseInstanceNetwork++; + if (buildCounterDatabaseInstanceNetwork < 3) { + checkUnnamed20(o.hostNames!); + checkUnnamed21(o.ipAddresses!); + unittest.expect( + o.primaryMacAddress!, + unittest.equals('foo'), + ); + } + buildCounterDatabaseInstanceNetwork--; +} + +core.int buildCounterDatabaseObjects = 0; +api.DatabaseObjects buildDatabaseObjects() { + final o = api.DatabaseObjects(); + buildCounterDatabaseObjects++; + if (buildCounterDatabaseObjects < 3) { + o.category = 'foo'; + o.count = 'foo'; + } + buildCounterDatabaseObjects--; + return o; +} + +void checkDatabaseObjects(api.DatabaseObjects o) { + buildCounterDatabaseObjects++; + if (buildCounterDatabaseObjects < 3) { + unittest.expect( + o.category!, + unittest.equals('foo'), + ); + unittest.expect( + o.count!, + unittest.equals('foo'), + ); + } + buildCounterDatabaseObjects--; +} + +core.List buildUnnamed22() => [ + buildDatabaseObjects(), + buildDatabaseObjects(), + ]; + +void checkUnnamed22(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkDatabaseObjects(o[0]); + checkDatabaseObjects(o[1]); +} + +core.int buildCounterDatabaseSchema = 0; +api.DatabaseSchema buildDatabaseSchema() { + final o = api.DatabaseSchema(); + buildCounterDatabaseSchema++; + if (buildCounterDatabaseSchema < 3) { + o.mysql = buildMySqlSchemaDetails(); + o.objects = buildUnnamed22(); + o.postgresql = buildPostgreSqlSchemaDetails(); + o.schemaName = 'foo'; + o.sqlServer = buildSqlServerSchemaDetails(); + o.tablesSizeBytes = 'foo'; + } + buildCounterDatabaseSchema--; + return o; +} + +void checkDatabaseSchema(api.DatabaseSchema o) { + buildCounterDatabaseSchema++; + if (buildCounterDatabaseSchema < 3) { + checkMySqlSchemaDetails(o.mysql!); + checkUnnamed22(o.objects!); + checkPostgreSqlSchemaDetails(o.postgresql!); + unittest.expect( + o.schemaName!, + unittest.equals('foo'), + ); + checkSqlServerSchemaDetails(o.sqlServer!); + unittest.expect( + o.tablesSizeBytes!, + unittest.equals('foo'), + ); + } + buildCounterDatabaseSchema--; +} + core.int buildCounterDate = 0; api.Date buildDate() { final o = api.Date(); @@ -1226,23 +1617,23 @@ void checkDate(api.Date o) { buildCounterDate--; } -core.List buildUnnamed18() => [ +core.List buildUnnamed23() => [ buildStatus(), buildStatus(), ]; -void checkUnnamed18(core.List o) { +void checkUnnamed23(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStatus(o[0]); checkStatus(o[1]); } -core.Map buildUnnamed19() => { +core.Map buildUnnamed24() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed19(core.Map o) { +void checkUnnamed24(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1262,10 +1653,10 @@ api.DiscoveryClient buildDiscoveryClient() { o.createTime = 'foo'; o.description = 'foo'; o.displayName = 'foo'; - o.errors = buildUnnamed18(); + o.errors = buildUnnamed23(); o.expireTime = 'foo'; o.heartbeatTime = 'foo'; - o.labels = buildUnnamed19(); + o.labels = buildUnnamed24(); o.name = 'foo'; o.serviceAccount = 'foo'; o.signalsEndpoint = 'foo'; @@ -1294,7 +1685,7 @@ void checkDiscoveryClient(api.DiscoveryClient o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed18(o.errors!); + checkUnnamed23(o.errors!); unittest.expect( o.expireTime!, unittest.equals('foo'), @@ -1303,7 +1694,7 @@ void checkDiscoveryClient(api.DiscoveryClient o) { o.heartbeatTime!, unittest.equals('foo'), ); - checkUnnamed19(o.labels!); + checkUnnamed24(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -1391,12 +1782,12 @@ void checkDiskEntry(api.DiskEntry o) { buildCounterDiskEntry--; } -core.List buildUnnamed20() => [ +core.List buildUnnamed25() => [ buildDiskEntry(), buildDiskEntry(), ]; -void checkUnnamed20(core.List o) { +void checkUnnamed25(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDiskEntry(o[0]); checkDiskEntry(o[1]); @@ -1407,7 +1798,7 @@ api.DiskEntryList buildDiskEntryList() { final o = api.DiskEntryList(); buildCounterDiskEntryList++; if (buildCounterDiskEntryList < 3) { - o.entries = buildUnnamed20(); + o.entries = buildUnnamed25(); } buildCounterDiskEntryList--; return o; @@ -1416,7 +1807,7 @@ api.DiskEntryList buildDiskEntryList() { void checkDiskEntryList(api.DiskEntryList o) { buildCounterDiskEntryList++; if (buildCounterDiskEntryList < 3) { - checkUnnamed20(o.entries!); + checkUnnamed25(o.entries!); } buildCounterDiskEntryList--; } @@ -1470,12 +1861,12 @@ void checkDiskPartition(api.DiskPartition o) { buildCounterDiskPartition--; } -core.List buildUnnamed21() => [ +core.List buildUnnamed26() => [ buildDiskPartition(), buildDiskPartition(), ]; -void checkUnnamed21(core.List o) { +void checkUnnamed26(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDiskPartition(o[0]); checkDiskPartition(o[1]); @@ -1486,7 +1877,7 @@ api.DiskPartitionList buildDiskPartitionList() { final o = api.DiskPartitionList(); buildCounterDiskPartitionList++; if (buildCounterDiskPartitionList < 3) { - o.entries = buildUnnamed21(); + o.entries = buildUnnamed26(); } buildCounterDiskPartitionList--; return o; @@ -1495,7 +1886,7 @@ api.DiskPartitionList buildDiskPartitionList() { void checkDiskPartitionList(api.DiskPartitionList o) { buildCounterDiskPartitionList++; if (buildCounterDiskPartitionList < 3) { - checkUnnamed21(o.entries!); + checkUnnamed26(o.entries!); } buildCounterDiskPartitionList--; } @@ -1506,6 +1897,8 @@ api.DiskUsageSample buildDiskUsageSample() { buildCounterDiskUsageSample++; if (buildCounterDiskUsageSample < 3) { o.averageIops = 42.0; + o.averageReadIops = 42.0; + o.averageWriteIops = 42.0; } buildCounterDiskUsageSample--; return o; @@ -1518,6 +1911,14 @@ void checkDiskUsageSample(api.DiskUsageSample o) { o.averageIops!, unittest.equals(42.0), ); + unittest.expect( + o.averageReadIops!, + unittest.equals(42.0), + ); + unittest.expect( + o.averageWriteIops!, + unittest.equals(42.0), + ); } buildCounterDiskUsageSample--; } @@ -1537,12 +1938,12 @@ void checkEmpty(api.Empty o) { buildCounterEmpty--; } -core.List buildUnnamed22() => [ +core.List buildUnnamed27() => [ buildFrameViolationEntry(), buildFrameViolationEntry(), ]; -void checkUnnamed22(core.List o) { +void checkUnnamed27(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFrameViolationEntry(o[0]); checkFrameViolationEntry(o[1]); @@ -1556,7 +1957,7 @@ api.ErrorFrame buildErrorFrame() { o.ingestionTime = 'foo'; o.name = 'foo'; o.originalFrame = buildAssetFrame(); - o.violations = buildUnnamed22(); + o.violations = buildUnnamed27(); } buildCounterErrorFrame--; return o; @@ -1574,7 +1975,7 @@ void checkErrorFrame(api.ErrorFrame o) { unittest.equals('foo'), ); checkAssetFrame(o.originalFrame!); - checkUnnamed22(o.violations!); + checkUnnamed27(o.violations!); } buildCounterErrorFrame--; } @@ -1608,23 +2009,23 @@ void checkExecutionReport(api.ExecutionReport o) { buildCounterExecutionReport--; } -core.List buildUnnamed23() => [ +core.List buildUnnamed28() => [ buildImportError(), buildImportError(), ]; -void checkUnnamed23(core.List o) { +void checkUnnamed28(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkImportError(o[0]); checkImportError(o[1]); } -core.List buildUnnamed24() => [ +core.List buildUnnamed29() => [ buildImportRowError(), buildImportRowError(), ]; -void checkUnnamed24(core.List o) { +void checkUnnamed29(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkImportRowError(o[0]); checkImportRowError(o[1]); @@ -1635,10 +2036,10 @@ api.FileValidationReport buildFileValidationReport() { final o = api.FileValidationReport(); buildCounterFileValidationReport++; if (buildCounterFileValidationReport < 3) { - o.fileErrors = buildUnnamed23(); + o.fileErrors = buildUnnamed28(); o.fileName = 'foo'; o.partialReport = true; - o.rowErrors = buildUnnamed24(); + o.rowErrors = buildUnnamed29(); } buildCounterFileValidationReport--; return o; @@ -1647,13 +2048,13 @@ api.FileValidationReport buildFileValidationReport() { void checkFileValidationReport(api.FileValidationReport o) { buildCounterFileValidationReport++; if (buildCounterFileValidationReport < 3) { - checkUnnamed23(o.fileErrors!); + checkUnnamed28(o.fileErrors!); unittest.expect( o.fileName!, unittest.equals('foo'), ); unittest.expect(o.partialReport!, unittest.isTrue); - checkUnnamed24(o.rowErrors!); + checkUnnamed29(o.rowErrors!); } buildCounterFileValidationReport--; } @@ -1707,12 +2108,12 @@ void checkFrameViolationEntry(api.FrameViolationEntry o) { buildCounterFrameViolationEntry--; } -core.List buildUnnamed25() => [ +core.List buildUnnamed30() => [ buildAssetFrame(), buildAssetFrame(), ]; -void checkUnnamed25(core.List o) { +void checkUnnamed30(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAssetFrame(o[0]); checkAssetFrame(o[1]); @@ -1723,7 +2124,7 @@ api.Frames buildFrames() { final o = api.Frames(); buildCounterFrames++; if (buildCounterFrames < 3) { - o.framesData = buildUnnamed25(); + o.framesData = buildUnnamed30(); } buildCounterFrames--; return o; @@ -1732,7 +2133,7 @@ api.Frames buildFrames() { void checkFrames(api.Frames o) { buildCounterFrames++; if (buildCounterFrames < 3) { - checkUnnamed25(o.framesData!); + checkUnnamed30(o.framesData!); } buildCounterFrames--; } @@ -1784,12 +2185,12 @@ void checkFstabEntry(api.FstabEntry o) { buildCounterFstabEntry--; } -core.List buildUnnamed26() => [ +core.List buildUnnamed31() => [ buildFstabEntry(), buildFstabEntry(), ]; -void checkUnnamed26(core.List o) { +void checkUnnamed31(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFstabEntry(o[0]); checkFstabEntry(o[1]); @@ -1800,7 +2201,7 @@ api.FstabEntryList buildFstabEntryList() { final o = api.FstabEntryList(); buildCounterFstabEntryList++; if (buildCounterFstabEntryList < 3) { - o.entries = buildUnnamed26(); + o.entries = buildUnnamed31(); } buildCounterFstabEntryList--; return o; @@ -1809,17 +2210,17 @@ api.FstabEntryList buildFstabEntryList() { void checkFstabEntryList(api.FstabEntryList o) { buildCounterFstabEntryList++; if (buildCounterFstabEntryList < 3) { - checkUnnamed26(o.entries!); + checkUnnamed31(o.entries!); } buildCounterFstabEntryList--; } -core.List buildUnnamed27() => [ +core.List buildUnnamed32() => [ 'foo', 'foo', ]; -void checkUnnamed27(core.List o) { +void checkUnnamed32(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1836,7 +2237,7 @@ api.GenericInsight buildGenericInsight() { final o = api.GenericInsight(); buildCounterGenericInsight++; if (buildCounterGenericInsight < 3) { - o.additionalInformation = buildUnnamed27(); + o.additionalInformation = buildUnnamed32(); o.defaultMessage = 'foo'; o.messageId = 'foo'; } @@ -1847,7 +2248,7 @@ api.GenericInsight buildGenericInsight() { void checkGenericInsight(api.GenericInsight o) { buildCounterGenericInsight++; if (buildCounterGenericInsight < 3) { - checkUnnamed27(o.additionalInformation!); + checkUnnamed32(o.additionalInformation!); unittest.expect( o.defaultMessage!, unittest.equals('foo'), @@ -1887,12 +2288,12 @@ void checkGenericPlatformDetails(api.GenericPlatformDetails o) { buildCounterGenericPlatformDetails--; } -core.Map buildUnnamed28() => { +core.Map buildUnnamed33() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed28(core.Map o) { +void checkUnnamed33(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1912,7 +2313,7 @@ api.Group buildGroup() { o.createTime = 'foo'; o.description = 'foo'; o.displayName = 'foo'; - o.labels = buildUnnamed28(); + o.labels = buildUnnamed33(); o.name = 'foo'; o.updateTime = 'foo'; } @@ -1935,7 +2336,7 @@ void checkGroup(api.Group o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed28(o.labels!); + checkUnnamed33(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -1981,12 +2382,12 @@ void checkGuestConfigDetails(api.GuestConfigDetails o) { buildCounterGuestConfigDetails--; } -core.List buildUnnamed29() => [ +core.List buildUnnamed34() => [ 'foo', 'foo', ]; -void checkUnnamed29(core.List o) { +void checkUnnamed34(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2005,7 +2406,7 @@ api.GuestInstalledApplication buildGuestInstalledApplication() { if (buildCounterGuestInstalledApplication < 3) { o.applicationName = 'foo'; o.installTime = 'foo'; - o.licenses = buildUnnamed29(); + o.licenses = buildUnnamed34(); o.path = 'foo'; o.vendor = 'foo'; o.version = 'foo'; @@ -2025,7 +2426,7 @@ void checkGuestInstalledApplication(api.GuestInstalledApplication o) { o.installTime!, unittest.equals('foo'), ); - checkUnnamed29(o.licenses!); + checkUnnamed34(o.licenses!); unittest.expect( o.path!, unittest.equals('foo'), @@ -2042,12 +2443,12 @@ void checkGuestInstalledApplication(api.GuestInstalledApplication o) { buildCounterGuestInstalledApplication--; } -core.List buildUnnamed30() => [ +core.List buildUnnamed35() => [ buildGuestInstalledApplication(), buildGuestInstalledApplication(), ]; -void checkUnnamed30(core.List o) { +void checkUnnamed35(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGuestInstalledApplication(o[0]); checkGuestInstalledApplication(o[1]); @@ -2058,7 +2459,7 @@ api.GuestInstalledApplicationList buildGuestInstalledApplicationList() { final o = api.GuestInstalledApplicationList(); buildCounterGuestInstalledApplicationList++; if (buildCounterGuestInstalledApplicationList < 3) { - o.entries = buildUnnamed30(); + o.entries = buildUnnamed35(); } buildCounterGuestInstalledApplicationList--; return o; @@ -2067,7 +2468,7 @@ api.GuestInstalledApplicationList buildGuestInstalledApplicationList() { void checkGuestInstalledApplicationList(api.GuestInstalledApplicationList o) { buildCounterGuestInstalledApplicationList++; if (buildCounterGuestInstalledApplicationList < 3) { - checkUnnamed30(o.entries!); + checkUnnamed35(o.entries!); } buildCounterGuestInstalledApplicationList--; } @@ -2150,12 +2551,12 @@ void checkGuestRuntimeDetails(api.GuestRuntimeDetails o) { buildCounterGuestRuntimeDetails--; } -core.List buildUnnamed31() => [ +core.List buildUnnamed36() => [ 'foo', 'foo', ]; -void checkUnnamed31(core.List o) { +void checkUnnamed36(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2172,7 +2573,7 @@ api.HostsEntry buildHostsEntry() { final o = api.HostsEntry(); buildCounterHostsEntry++; if (buildCounterHostsEntry < 3) { - o.hostNames = buildUnnamed31(); + o.hostNames = buildUnnamed36(); o.ip = 'foo'; } buildCounterHostsEntry--; @@ -2182,7 +2583,7 @@ api.HostsEntry buildHostsEntry() { void checkHostsEntry(api.HostsEntry o) { buildCounterHostsEntry++; if (buildCounterHostsEntry < 3) { - checkUnnamed31(o.hostNames!); + checkUnnamed36(o.hostNames!); unittest.expect( o.ip!, unittest.equals('foo'), @@ -2191,12 +2592,12 @@ void checkHostsEntry(api.HostsEntry o) { buildCounterHostsEntry--; } -core.List buildUnnamed32() => [ +core.List buildUnnamed37() => [ buildHostsEntry(), buildHostsEntry(), ]; -void checkUnnamed32(core.List o) { +void checkUnnamed37(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHostsEntry(o[0]); checkHostsEntry(o[1]); @@ -2207,7 +2608,7 @@ api.HostsEntryList buildHostsEntryList() { final o = api.HostsEntryList(); buildCounterHostsEntryList++; if (buildCounterHostsEntryList < 3) { - o.entries = buildUnnamed32(); + o.entries = buildUnnamed37(); } buildCounterHostsEntryList--; return o; @@ -2216,7 +2617,7 @@ api.HostsEntryList buildHostsEntryList() { void checkHostsEntryList(api.HostsEntryList o) { buildCounterHostsEntryList++; if (buildCounterHostsEntryList < 3) { - checkUnnamed32(o.entries!); + checkUnnamed37(o.entries!); } buildCounterHostsEntryList--; } @@ -2292,12 +2693,12 @@ void checkImportError(api.ImportError o) { buildCounterImportError--; } -core.Map buildUnnamed33() => { +core.Map buildUnnamed38() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed33(core.Map o) { +void checkUnnamed38(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2319,7 +2720,7 @@ api.ImportJob buildImportJob() { o.createTime = 'foo'; o.displayName = 'foo'; o.executionReport = buildExecutionReport(); - o.labels = buildUnnamed33(); + o.labels = buildUnnamed38(); o.name = 'foo'; o.state = 'foo'; o.updateTime = 'foo'; @@ -2349,7 +2750,7 @@ void checkImportJob(api.ImportJob o) { unittest.equals('foo'), ); checkExecutionReport(o.executionReport!); - checkUnnamed33(o.labels!); + checkUnnamed38(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -2367,12 +2768,12 @@ void checkImportJob(api.ImportJob o) { buildCounterImportJob--; } -core.List buildUnnamed34() => [ +core.List buildUnnamed39() => [ buildImportError(), buildImportError(), ]; -void checkUnnamed34(core.List o) { +void checkUnnamed39(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkImportError(o[0]); checkImportError(o[1]); @@ -2383,8 +2784,10 @@ api.ImportRowError buildImportRowError() { final o = api.ImportRowError(); buildCounterImportRowError++; if (buildCounterImportRowError < 3) { + o.archiveError = buildImportRowErrorArchiveErrorDetails(); + o.assetTitle = 'foo'; o.csvError = buildImportRowErrorCsvErrorDetails(); - o.errors = buildUnnamed34(); + o.errors = buildUnnamed39(); o.rowNumber = 42; o.vmName = 'foo'; o.vmUuid = 'foo'; @@ -2397,8 +2800,13 @@ api.ImportRowError buildImportRowError() { void checkImportRowError(api.ImportRowError o) { buildCounterImportRowError++; if (buildCounterImportRowError < 3) { + checkImportRowErrorArchiveErrorDetails(o.archiveError!); + unittest.expect( + o.assetTitle!, + unittest.equals('foo'), + ); checkImportRowErrorCsvErrorDetails(o.csvError!); - checkUnnamed34(o.errors!); + checkUnnamed39(o.errors!); unittest.expect( o.rowNumber!, unittest.equals(42), @@ -2416,6 +2824,31 @@ void checkImportRowError(api.ImportRowError o) { buildCounterImportRowError--; } +core.int buildCounterImportRowErrorArchiveErrorDetails = 0; +api.ImportRowErrorArchiveErrorDetails buildImportRowErrorArchiveErrorDetails() { + final o = api.ImportRowErrorArchiveErrorDetails(); + buildCounterImportRowErrorArchiveErrorDetails++; + if (buildCounterImportRowErrorArchiveErrorDetails < 3) { + o.csvError = buildImportRowErrorCsvErrorDetails(); + o.filePath = 'foo'; + } + buildCounterImportRowErrorArchiveErrorDetails--; + return o; +} + +void checkImportRowErrorArchiveErrorDetails( + api.ImportRowErrorArchiveErrorDetails o) { + buildCounterImportRowErrorArchiveErrorDetails++; + if (buildCounterImportRowErrorArchiveErrorDetails < 3) { + checkImportRowErrorCsvErrorDetails(o.csvError!); + unittest.expect( + o.filePath!, + unittest.equals('foo'), + ); + } + buildCounterImportRowErrorArchiveErrorDetails--; +} + core.int buildCounterImportRowErrorCsvErrorDetails = 0; api.ImportRowErrorCsvErrorDetails buildImportRowErrorCsvErrorDetails() { final o = api.ImportRowErrorCsvErrorDetails(); @@ -2486,12 +2919,12 @@ void checkInsight(api.Insight o) { buildCounterInsight--; } -core.List buildUnnamed35() => [ +core.List buildUnnamed40() => [ buildInsight(), buildInsight(), ]; -void checkUnnamed35(core.List o) { +void checkUnnamed40(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInsight(o[0]); checkInsight(o[1]); @@ -2502,7 +2935,7 @@ api.InsightList buildInsightList() { final o = api.InsightList(); buildCounterInsightList++; if (buildCounterInsightList < 3) { - o.insights = buildUnnamed35(); + o.insights = buildUnnamed40(); o.updateTime = 'foo'; } buildCounterInsightList--; @@ -2512,7 +2945,7 @@ api.InsightList buildInsightList() { void checkInsightList(api.InsightList o) { buildCounterInsightList++; if (buildCounterInsightList < 3) { - checkUnnamed35(o.insights!); + checkUnnamed40(o.insights!); unittest.expect( o.updateTime!, unittest.equals('foo'), @@ -2521,23 +2954,23 @@ void checkInsightList(api.InsightList o) { buildCounterInsightList--; } -core.List buildUnnamed36() => [ +core.List buildUnnamed41() => [ buildAsset(), buildAsset(), ]; -void checkUnnamed36(core.List o) { +void checkUnnamed41(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAsset(o[0]); checkAsset(o[1]); } -core.List buildUnnamed37() => [ +core.List buildUnnamed42() => [ 'foo', 'foo', ]; -void checkUnnamed37(core.List o) { +void checkUnnamed42(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2554,9 +2987,9 @@ api.ListAssetsResponse buildListAssetsResponse() { final o = api.ListAssetsResponse(); buildCounterListAssetsResponse++; if (buildCounterListAssetsResponse < 3) { - o.assets = buildUnnamed36(); + o.assets = buildUnnamed41(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed37(); + o.unreachable = buildUnnamed42(); } buildCounterListAssetsResponse--; return o; @@ -2565,33 +2998,33 @@ api.ListAssetsResponse buildListAssetsResponse() { void checkListAssetsResponse(api.ListAssetsResponse o) { buildCounterListAssetsResponse++; if (buildCounterListAssetsResponse < 3) { - checkUnnamed36(o.assets!); + checkUnnamed41(o.assets!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed37(o.unreachable!); + checkUnnamed42(o.unreachable!); } buildCounterListAssetsResponse--; } -core.List buildUnnamed38() => [ +core.List buildUnnamed43() => [ buildDiscoveryClient(), buildDiscoveryClient(), ]; -void checkUnnamed38(core.List o) { +void checkUnnamed43(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDiscoveryClient(o[0]); checkDiscoveryClient(o[1]); } -core.List buildUnnamed39() => [ +core.List buildUnnamed44() => [ 'foo', 'foo', ]; -void checkUnnamed39(core.List o) { +void checkUnnamed44(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2608,9 +3041,9 @@ api.ListDiscoveryClientsResponse buildListDiscoveryClientsResponse() { final o = api.ListDiscoveryClientsResponse(); buildCounterListDiscoveryClientsResponse++; if (buildCounterListDiscoveryClientsResponse < 3) { - o.discoveryClients = buildUnnamed38(); + o.discoveryClients = buildUnnamed43(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed39(); + o.unreachable = buildUnnamed44(); } buildCounterListDiscoveryClientsResponse--; return o; @@ -2619,33 +3052,33 @@ api.ListDiscoveryClientsResponse buildListDiscoveryClientsResponse() { void checkListDiscoveryClientsResponse(api.ListDiscoveryClientsResponse o) { buildCounterListDiscoveryClientsResponse++; if (buildCounterListDiscoveryClientsResponse < 3) { - checkUnnamed38(o.discoveryClients!); + checkUnnamed43(o.discoveryClients!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed39(o.unreachable!); + checkUnnamed44(o.unreachable!); } buildCounterListDiscoveryClientsResponse--; } -core.List buildUnnamed40() => [ +core.List buildUnnamed45() => [ buildErrorFrame(), buildErrorFrame(), ]; -void checkUnnamed40(core.List o) { +void checkUnnamed45(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkErrorFrame(o[0]); checkErrorFrame(o[1]); } -core.List buildUnnamed41() => [ +core.List buildUnnamed46() => [ 'foo', 'foo', ]; -void checkUnnamed41(core.List o) { +void checkUnnamed46(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2662,9 +3095,9 @@ api.ListErrorFramesResponse buildListErrorFramesResponse() { final o = api.ListErrorFramesResponse(); buildCounterListErrorFramesResponse++; if (buildCounterListErrorFramesResponse < 3) { - o.errorFrames = buildUnnamed40(); + o.errorFrames = buildUnnamed45(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed41(); + o.unreachable = buildUnnamed46(); } buildCounterListErrorFramesResponse--; return o; @@ -2673,33 +3106,33 @@ api.ListErrorFramesResponse buildListErrorFramesResponse() { void checkListErrorFramesResponse(api.ListErrorFramesResponse o) { buildCounterListErrorFramesResponse++; if (buildCounterListErrorFramesResponse < 3) { - checkUnnamed40(o.errorFrames!); + checkUnnamed45(o.errorFrames!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed41(o.unreachable!); + checkUnnamed46(o.unreachable!); } buildCounterListErrorFramesResponse--; } -core.List buildUnnamed42() => [ +core.List buildUnnamed47() => [ buildGroup(), buildGroup(), ]; -void checkUnnamed42(core.List o) { +void checkUnnamed47(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGroup(o[0]); checkGroup(o[1]); } -core.List buildUnnamed43() => [ +core.List buildUnnamed48() => [ 'foo', 'foo', ]; -void checkUnnamed43(core.List o) { +void checkUnnamed48(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2716,9 +3149,9 @@ api.ListGroupsResponse buildListGroupsResponse() { final o = api.ListGroupsResponse(); buildCounterListGroupsResponse++; if (buildCounterListGroupsResponse < 3) { - o.groups = buildUnnamed42(); + o.groups = buildUnnamed47(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed43(); + o.unreachable = buildUnnamed48(); } buildCounterListGroupsResponse--; return o; @@ -2727,33 +3160,33 @@ api.ListGroupsResponse buildListGroupsResponse() { void checkListGroupsResponse(api.ListGroupsResponse o) { buildCounterListGroupsResponse++; if (buildCounterListGroupsResponse < 3) { - checkUnnamed42(o.groups!); + checkUnnamed47(o.groups!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed43(o.unreachable!); + checkUnnamed48(o.unreachable!); } buildCounterListGroupsResponse--; } -core.List buildUnnamed44() => [ +core.List buildUnnamed49() => [ buildImportDataFile(), buildImportDataFile(), ]; -void checkUnnamed44(core.List o) { +void checkUnnamed49(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkImportDataFile(o[0]); checkImportDataFile(o[1]); } -core.List buildUnnamed45() => [ +core.List buildUnnamed50() => [ 'foo', 'foo', ]; -void checkUnnamed45(core.List o) { +void checkUnnamed50(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2770,9 +3203,9 @@ api.ListImportDataFilesResponse buildListImportDataFilesResponse() { final o = api.ListImportDataFilesResponse(); buildCounterListImportDataFilesResponse++; if (buildCounterListImportDataFilesResponse < 3) { - o.importDataFiles = buildUnnamed44(); + o.importDataFiles = buildUnnamed49(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed45(); + o.unreachable = buildUnnamed50(); } buildCounterListImportDataFilesResponse--; return o; @@ -2781,33 +3214,33 @@ api.ListImportDataFilesResponse buildListImportDataFilesResponse() { void checkListImportDataFilesResponse(api.ListImportDataFilesResponse o) { buildCounterListImportDataFilesResponse++; if (buildCounterListImportDataFilesResponse < 3) { - checkUnnamed44(o.importDataFiles!); + checkUnnamed49(o.importDataFiles!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed45(o.unreachable!); + checkUnnamed50(o.unreachable!); } buildCounterListImportDataFilesResponse--; } -core.List buildUnnamed46() => [ +core.List buildUnnamed51() => [ buildImportJob(), buildImportJob(), ]; -void checkUnnamed46(core.List o) { +void checkUnnamed51(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkImportJob(o[0]); checkImportJob(o[1]); } -core.List buildUnnamed47() => [ +core.List buildUnnamed52() => [ 'foo', 'foo', ]; -void checkUnnamed47(core.List o) { +void checkUnnamed52(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2824,9 +3257,9 @@ api.ListImportJobsResponse buildListImportJobsResponse() { final o = api.ListImportJobsResponse(); buildCounterListImportJobsResponse++; if (buildCounterListImportJobsResponse < 3) { - o.importJobs = buildUnnamed46(); + o.importJobs = buildUnnamed51(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed47(); + o.unreachable = buildUnnamed52(); } buildCounterListImportJobsResponse--; return o; @@ -2835,22 +3268,22 @@ api.ListImportJobsResponse buildListImportJobsResponse() { void checkListImportJobsResponse(api.ListImportJobsResponse o) { buildCounterListImportJobsResponse++; if (buildCounterListImportJobsResponse < 3) { - checkUnnamed46(o.importJobs!); + checkUnnamed51(o.importJobs!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed47(o.unreachable!); + checkUnnamed52(o.unreachable!); } buildCounterListImportJobsResponse--; } -core.List buildUnnamed48() => [ +core.List buildUnnamed53() => [ buildLocation(), buildLocation(), ]; -void checkUnnamed48(core.List o) { +void checkUnnamed53(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLocation(o[0]); checkLocation(o[1]); @@ -2861,7 +3294,7 @@ api.ListLocationsResponse buildListLocationsResponse() { final o = api.ListLocationsResponse(); buildCounterListLocationsResponse++; if (buildCounterListLocationsResponse < 3) { - o.locations = buildUnnamed48(); + o.locations = buildUnnamed53(); o.nextPageToken = 'foo'; } buildCounterListLocationsResponse--; @@ -2871,7 +3304,7 @@ api.ListLocationsResponse buildListLocationsResponse() { void checkListLocationsResponse(api.ListLocationsResponse o) { buildCounterListLocationsResponse++; if (buildCounterListLocationsResponse < 3) { - checkUnnamed48(o.locations!); + checkUnnamed53(o.locations!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -2880,12 +3313,12 @@ void checkListLocationsResponse(api.ListLocationsResponse o) { buildCounterListLocationsResponse--; } -core.List buildUnnamed49() => [ +core.List buildUnnamed54() => [ buildOperation(), buildOperation(), ]; -void checkUnnamed49(core.List o) { +void checkUnnamed54(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperation(o[0]); checkOperation(o[1]); @@ -2897,7 +3330,7 @@ api.ListOperationsResponse buildListOperationsResponse() { buildCounterListOperationsResponse++; if (buildCounterListOperationsResponse < 3) { o.nextPageToken = 'foo'; - o.operations = buildUnnamed49(); + o.operations = buildUnnamed54(); } buildCounterListOperationsResponse--; return o; @@ -2910,28 +3343,28 @@ void checkListOperationsResponse(api.ListOperationsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed49(o.operations!); + checkUnnamed54(o.operations!); } buildCounterListOperationsResponse--; } -core.List buildUnnamed50() => [ +core.List buildUnnamed55() => [ buildPreferenceSet(), buildPreferenceSet(), ]; -void checkUnnamed50(core.List o) { +void checkUnnamed55(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPreferenceSet(o[0]); checkPreferenceSet(o[1]); } -core.List buildUnnamed51() => [ +core.List buildUnnamed56() => [ 'foo', 'foo', ]; -void checkUnnamed51(core.List o) { +void checkUnnamed56(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2949,8 +3382,8 @@ api.ListPreferenceSetsResponse buildListPreferenceSetsResponse() { buildCounterListPreferenceSetsResponse++; if (buildCounterListPreferenceSetsResponse < 3) { o.nextPageToken = 'foo'; - o.preferenceSets = buildUnnamed50(); - o.unreachable = buildUnnamed51(); + o.preferenceSets = buildUnnamed55(); + o.unreachable = buildUnnamed56(); } buildCounterListPreferenceSetsResponse--; return o; @@ -2963,29 +3396,29 @@ void checkListPreferenceSetsResponse(api.ListPreferenceSetsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed50(o.preferenceSets!); - checkUnnamed51(o.unreachable!); + checkUnnamed55(o.preferenceSets!); + checkUnnamed56(o.unreachable!); } buildCounterListPreferenceSetsResponse--; } -core.List buildUnnamed52() => [ +core.List buildUnnamed57() => [ buildReportConfig(), buildReportConfig(), ]; -void checkUnnamed52(core.List o) { +void checkUnnamed57(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkReportConfig(o[0]); checkReportConfig(o[1]); } -core.List buildUnnamed53() => [ +core.List buildUnnamed58() => [ 'foo', 'foo', ]; -void checkUnnamed53(core.List o) { +void checkUnnamed58(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3003,8 +3436,8 @@ api.ListReportConfigsResponse buildListReportConfigsResponse() { buildCounterListReportConfigsResponse++; if (buildCounterListReportConfigsResponse < 3) { o.nextPageToken = 'foo'; - o.reportConfigs = buildUnnamed52(); - o.unreachable = buildUnnamed53(); + o.reportConfigs = buildUnnamed57(); + o.unreachable = buildUnnamed58(); } buildCounterListReportConfigsResponse--; return o; @@ -3017,29 +3450,29 @@ void checkListReportConfigsResponse(api.ListReportConfigsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed52(o.reportConfigs!); - checkUnnamed53(o.unreachable!); + checkUnnamed57(o.reportConfigs!); + checkUnnamed58(o.unreachable!); } buildCounterListReportConfigsResponse--; } -core.List buildUnnamed54() => [ +core.List buildUnnamed59() => [ buildReport(), buildReport(), ]; -void checkUnnamed54(core.List o) { +void checkUnnamed59(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkReport(o[0]); checkReport(o[1]); } -core.List buildUnnamed55() => [ +core.List buildUnnamed60() => [ 'foo', 'foo', ]; -void checkUnnamed55(core.List o) { +void checkUnnamed60(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3057,8 +3490,8 @@ api.ListReportsResponse buildListReportsResponse() { buildCounterListReportsResponse++; if (buildCounterListReportsResponse < 3) { o.nextPageToken = 'foo'; - o.reports = buildUnnamed54(); - o.unreachable = buildUnnamed55(); + o.reports = buildUnnamed59(); + o.unreachable = buildUnnamed60(); } buildCounterListReportsResponse--; return o; @@ -3071,29 +3504,29 @@ void checkListReportsResponse(api.ListReportsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed54(o.reports!); - checkUnnamed55(o.unreachable!); + checkUnnamed59(o.reports!); + checkUnnamed60(o.unreachable!); } buildCounterListReportsResponse--; } -core.List buildUnnamed56() => [ +core.List buildUnnamed61() => [ buildSource(), buildSource(), ]; -void checkUnnamed56(core.List o) { +void checkUnnamed61(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSource(o[0]); checkSource(o[1]); } -core.List buildUnnamed57() => [ +core.List buildUnnamed62() => [ 'foo', 'foo', ]; -void checkUnnamed57(core.List o) { +void checkUnnamed62(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3111,8 +3544,8 @@ api.ListSourcesResponse buildListSourcesResponse() { buildCounterListSourcesResponse++; if (buildCounterListSourcesResponse < 3) { o.nextPageToken = 'foo'; - o.sources = buildUnnamed56(); - o.unreachable = buildUnnamed57(); + o.sources = buildUnnamed61(); + o.unreachable = buildUnnamed62(); } buildCounterListSourcesResponse--; return o; @@ -3125,18 +3558,18 @@ void checkListSourcesResponse(api.ListSourcesResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed56(o.sources!); - checkUnnamed57(o.unreachable!); + checkUnnamed61(o.sources!); + checkUnnamed62(o.unreachable!); } buildCounterListSourcesResponse--; } -core.Map buildUnnamed58() => { +core.Map buildUnnamed63() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed58(core.Map o) { +void checkUnnamed63(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3148,7 +3581,7 @@ void checkUnnamed58(core.Map o) { ); } -core.Map buildUnnamed59() => { +core.Map buildUnnamed64() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3161,7 +3594,7 @@ core.Map buildUnnamed59() => { }, }; -void checkUnnamed59(core.Map o) { +void checkUnnamed64(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted1 = (o['x']!) as core.Map; unittest.expect(casted1, unittest.hasLength(3)); @@ -3199,9 +3632,9 @@ api.Location buildLocation() { buildCounterLocation++; if (buildCounterLocation < 3) { o.displayName = 'foo'; - o.labels = buildUnnamed58(); + o.labels = buildUnnamed63(); o.locationId = 'foo'; - o.metadata = buildUnnamed59(); + o.metadata = buildUnnamed64(); o.name = 'foo'; } buildCounterLocation--; @@ -3215,12 +3648,12 @@ void checkLocation(api.Location o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed58(o.labels!); + checkUnnamed63(o.labels!); unittest.expect( o.locationId!, unittest.equals('foo'), ); - checkUnnamed59(o.metadata!); + checkUnnamed64(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), @@ -3236,6 +3669,7 @@ api.MachineArchitectureDetails buildMachineArchitectureDetails() { if (buildCounterMachineArchitectureDetails < 3) { o.bios = buildBiosDetails(); o.cpuArchitecture = 'foo'; + o.cpuManufacturer = 'foo'; o.cpuName = 'foo'; o.cpuSocketCount = 42; o.cpuThreadCount = 42; @@ -3255,6 +3689,10 @@ void checkMachineArchitectureDetails(api.MachineArchitectureDetails o) { o.cpuArchitecture!, unittest.equals('foo'), ); + unittest.expect( + o.cpuManufacturer!, + unittest.equals('foo'), + ); unittest.expect( o.cpuName!, unittest.equals('foo'), @@ -3403,12 +3841,12 @@ void checkMachineNetworkDetails(api.MachineNetworkDetails o) { buildCounterMachineNetworkDetails--; } -core.List buildUnnamed60() => [ +core.List buildUnnamed65() => [ buildMachineSeries(), buildMachineSeries(), ]; -void checkUnnamed60(core.List o) { +void checkUnnamed65(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMachineSeries(o[0]); checkMachineSeries(o[1]); @@ -3419,7 +3857,7 @@ api.MachinePreferences buildMachinePreferences() { final o = api.MachinePreferences(); buildCounterMachinePreferences++; if (buildCounterMachinePreferences < 3) { - o.allowedMachineSeries = buildUnnamed60(); + o.allowedMachineSeries = buildUnnamed65(); } buildCounterMachinePreferences--; return o; @@ -3428,7 +3866,7 @@ api.MachinePreferences buildMachinePreferences() { void checkMachinePreferences(api.MachinePreferences o) { buildCounterMachinePreferences++; if (buildCounterMachinePreferences < 3) { - checkUnnamed60(o.allowedMachineSeries!); + checkUnnamed65(o.allowedMachineSeries!); } buildCounterMachinePreferences--; } @@ -3530,6 +3968,219 @@ void checkMoney(api.Money o) { buildCounterMoney--; } +core.int buildCounterMySqlPlugin = 0; +api.MySqlPlugin buildMySqlPlugin() { + final o = api.MySqlPlugin(); + buildCounterMySqlPlugin++; + if (buildCounterMySqlPlugin < 3) { + o.enabled = true; + o.plugin = 'foo'; + o.version = 'foo'; + } + buildCounterMySqlPlugin--; + return o; +} + +void checkMySqlPlugin(api.MySqlPlugin o) { + buildCounterMySqlPlugin++; + if (buildCounterMySqlPlugin < 3) { + unittest.expect(o.enabled!, unittest.isTrue); + unittest.expect( + o.plugin!, + unittest.equals('foo'), + ); + unittest.expect( + o.version!, + unittest.equals('foo'), + ); + } + buildCounterMySqlPlugin--; +} + +core.int buildCounterMySqlProperty = 0; +api.MySqlProperty buildMySqlProperty() { + final o = api.MySqlProperty(); + buildCounterMySqlProperty++; + if (buildCounterMySqlProperty < 3) { + o.enabled = true; + o.numericValue = 'foo'; + o.property = 'foo'; + } + buildCounterMySqlProperty--; + return o; +} + +void checkMySqlProperty(api.MySqlProperty o) { + buildCounterMySqlProperty++; + if (buildCounterMySqlProperty < 3) { + unittest.expect(o.enabled!, unittest.isTrue); + unittest.expect( + o.numericValue!, + unittest.equals('foo'), + ); + unittest.expect( + o.property!, + unittest.equals('foo'), + ); + } + buildCounterMySqlProperty--; +} + +core.List buildUnnamed66() => [ + buildMySqlStorageEngineDetails(), + buildMySqlStorageEngineDetails(), + ]; + +void checkUnnamed66(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkMySqlStorageEngineDetails(o[0]); + checkMySqlStorageEngineDetails(o[1]); +} + +core.int buildCounterMySqlSchemaDetails = 0; +api.MySqlSchemaDetails buildMySqlSchemaDetails() { + final o = api.MySqlSchemaDetails(); + buildCounterMySqlSchemaDetails++; + if (buildCounterMySqlSchemaDetails < 3) { + o.storageEngines = buildUnnamed66(); + } + buildCounterMySqlSchemaDetails--; + return o; +} + +void checkMySqlSchemaDetails(api.MySqlSchemaDetails o) { + buildCounterMySqlSchemaDetails++; + if (buildCounterMySqlSchemaDetails < 3) { + checkUnnamed66(o.storageEngines!); + } + buildCounterMySqlSchemaDetails--; +} + +core.int buildCounterMySqlStorageEngineDetails = 0; +api.MySqlStorageEngineDetails buildMySqlStorageEngineDetails() { + final o = api.MySqlStorageEngineDetails(); + buildCounterMySqlStorageEngineDetails++; + if (buildCounterMySqlStorageEngineDetails < 3) { + o.encryptedTableCount = 42; + o.engine = 'foo'; + o.tableCount = 42; + } + buildCounterMySqlStorageEngineDetails--; + return o; +} + +void checkMySqlStorageEngineDetails(api.MySqlStorageEngineDetails o) { + buildCounterMySqlStorageEngineDetails++; + if (buildCounterMySqlStorageEngineDetails < 3) { + unittest.expect( + o.encryptedTableCount!, + unittest.equals(42), + ); + unittest.expect( + o.engine!, + unittest.equals('foo'), + ); + unittest.expect( + o.tableCount!, + unittest.equals(42), + ); + } + buildCounterMySqlStorageEngineDetails--; +} + +core.int buildCounterMySqlVariable = 0; +api.MySqlVariable buildMySqlVariable() { + final o = api.MySqlVariable(); + buildCounterMySqlVariable++; + if (buildCounterMySqlVariable < 3) { + o.category = 'foo'; + o.value = 'foo'; + o.variable = 'foo'; + } + buildCounterMySqlVariable--; + return o; +} + +void checkMySqlVariable(api.MySqlVariable o) { + buildCounterMySqlVariable++; + if (buildCounterMySqlVariable < 3) { + unittest.expect( + o.category!, + unittest.equals('foo'), + ); + unittest.expect( + o.value!, + unittest.equals('foo'), + ); + unittest.expect( + o.variable!, + unittest.equals('foo'), + ); + } + buildCounterMySqlVariable--; +} + +core.List buildUnnamed67() => [ + buildMySqlPlugin(), + buildMySqlPlugin(), + ]; + +void checkUnnamed67(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkMySqlPlugin(o[0]); + checkMySqlPlugin(o[1]); +} + +core.List buildUnnamed68() => [ + buildMySqlProperty(), + buildMySqlProperty(), + ]; + +void checkUnnamed68(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkMySqlProperty(o[0]); + checkMySqlProperty(o[1]); +} + +core.List buildUnnamed69() => [ + buildMySqlVariable(), + buildMySqlVariable(), + ]; + +void checkUnnamed69(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkMySqlVariable(o[0]); + checkMySqlVariable(o[1]); +} + +core.int buildCounterMysqlDatabaseDeployment = 0; +api.MysqlDatabaseDeployment buildMysqlDatabaseDeployment() { + final o = api.MysqlDatabaseDeployment(); + buildCounterMysqlDatabaseDeployment++; + if (buildCounterMysqlDatabaseDeployment < 3) { + o.plugins = buildUnnamed67(); + o.properties = buildUnnamed68(); + o.resourceGroupsCount = 42; + o.variables = buildUnnamed69(); + } + buildCounterMysqlDatabaseDeployment--; + return o; +} + +void checkMysqlDatabaseDeployment(api.MysqlDatabaseDeployment o) { + buildCounterMysqlDatabaseDeployment++; + if (buildCounterMysqlDatabaseDeployment < 3) { + checkUnnamed67(o.plugins!); + checkUnnamed68(o.properties!); + unittest.expect( + o.resourceGroupsCount!, + unittest.equals(42), + ); + checkUnnamed69(o.variables!); + } + buildCounterMysqlDatabaseDeployment--; +} + core.int buildCounterNetworkAdapterDetails = 0; api.NetworkAdapterDetails buildNetworkAdapterDetails() { final o = api.NetworkAdapterDetails(); @@ -3559,12 +4210,12 @@ void checkNetworkAdapterDetails(api.NetworkAdapterDetails o) { buildCounterNetworkAdapterDetails--; } -core.List buildUnnamed61() => [ +core.List buildUnnamed70() => [ buildNetworkAdapterDetails(), buildNetworkAdapterDetails(), ]; -void checkUnnamed61(core.List o) { +void checkUnnamed70(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkAdapterDetails(o[0]); checkNetworkAdapterDetails(o[1]); @@ -3575,7 +4226,7 @@ api.NetworkAdapterList buildNetworkAdapterList() { final o = api.NetworkAdapterList(); buildCounterNetworkAdapterList++; if (buildCounterNetworkAdapterList < 3) { - o.entries = buildUnnamed61(); + o.entries = buildUnnamed70(); } buildCounterNetworkAdapterList--; return o; @@ -3584,7 +4235,7 @@ api.NetworkAdapterList buildNetworkAdapterList() { void checkNetworkAdapterList(api.NetworkAdapterList o) { buildCounterNetworkAdapterList++; if (buildCounterNetworkAdapterList < 3) { - checkUnnamed61(o.entries!); + checkUnnamed70(o.entries!); } buildCounterNetworkAdapterList--; } @@ -3631,12 +4282,12 @@ void checkNetworkAddress(api.NetworkAddress o) { buildCounterNetworkAddress--; } -core.List buildUnnamed62() => [ +core.List buildUnnamed71() => [ buildNetworkAddress(), buildNetworkAddress(), ]; -void checkUnnamed62(core.List o) { +void checkUnnamed71(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkAddress(o[0]); checkNetworkAddress(o[1]); @@ -3647,7 +4298,7 @@ api.NetworkAddressList buildNetworkAddressList() { final o = api.NetworkAddressList(); buildCounterNetworkAddressList++; if (buildCounterNetworkAddressList < 3) { - o.entries = buildUnnamed62(); + o.entries = buildUnnamed71(); } buildCounterNetworkAddressList--; return o; @@ -3656,7 +4307,7 @@ api.NetworkAddressList buildNetworkAddressList() { void checkNetworkAddressList(api.NetworkAddressList o) { buildCounterNetworkAddressList++; if (buildCounterNetworkAddressList < 3) { - checkUnnamed62(o.entries!); + checkUnnamed71(o.entries!); } buildCounterNetworkAddressList--; } @@ -3718,12 +4369,12 @@ void checkNetworkConnection(api.NetworkConnection o) { buildCounterNetworkConnection--; } -core.List buildUnnamed63() => [ +core.List buildUnnamed72() => [ buildNetworkConnection(), buildNetworkConnection(), ]; -void checkUnnamed63(core.List o) { +void checkUnnamed72(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkConnection(o[0]); checkNetworkConnection(o[1]); @@ -3734,7 +4385,7 @@ api.NetworkConnectionList buildNetworkConnectionList() { final o = api.NetworkConnectionList(); buildCounterNetworkConnectionList++; if (buildCounterNetworkConnectionList < 3) { - o.entries = buildUnnamed63(); + o.entries = buildUnnamed72(); } buildCounterNetworkConnectionList--; return o; @@ -3743,7 +4394,7 @@ api.NetworkConnectionList buildNetworkConnectionList() { void checkNetworkConnectionList(api.NetworkConnectionList o) { buildCounterNetworkConnectionList++; if (buildCounterNetworkConnectionList < 3) { - checkUnnamed63(o.entries!); + checkUnnamed72(o.entries!); } buildCounterNetworkConnectionList--; } @@ -3775,12 +4426,12 @@ void checkNetworkUsageSample(api.NetworkUsageSample o) { buildCounterNetworkUsageSample--; } -core.List buildUnnamed64() => [ +core.List buildUnnamed73() => [ 'foo', 'foo', ]; -void checkUnnamed64(core.List o) { +void checkUnnamed73(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3798,7 +4449,7 @@ api.NfsExport buildNfsExport() { buildCounterNfsExport++; if (buildCounterNfsExport < 3) { o.exportDirectory = 'foo'; - o.hosts = buildUnnamed64(); + o.hosts = buildUnnamed73(); } buildCounterNfsExport--; return o; @@ -3811,17 +4462,17 @@ void checkNfsExport(api.NfsExport o) { o.exportDirectory!, unittest.equals('foo'), ); - checkUnnamed64(o.hosts!); + checkUnnamed73(o.hosts!); } buildCounterNfsExport--; } -core.List buildUnnamed65() => [ +core.List buildUnnamed74() => [ buildNfsExport(), buildNfsExport(), ]; -void checkUnnamed65(core.List o) { +void checkUnnamed74(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNfsExport(o[0]); checkNfsExport(o[1]); @@ -3832,7 +4483,7 @@ api.NfsExportList buildNfsExportList() { final o = api.NfsExportList(); buildCounterNfsExportList++; if (buildCounterNfsExportList < 3) { - o.entries = buildUnnamed65(); + o.entries = buildUnnamed74(); } buildCounterNfsExportList--; return o; @@ -3841,7 +4492,7 @@ api.NfsExportList buildNfsExportList() { void checkNfsExportList(api.NfsExportList o) { buildCounterNfsExportList++; if (buildCounterNfsExportList < 3) { - checkUnnamed65(o.entries!); + checkUnnamed74(o.entries!); } buildCounterNfsExportList--; } @@ -3883,12 +4534,12 @@ void checkOpenFileDetails(api.OpenFileDetails o) { buildCounterOpenFileDetails--; } -core.List buildUnnamed66() => [ +core.List buildUnnamed75() => [ buildOpenFileDetails(), buildOpenFileDetails(), ]; -void checkUnnamed66(core.List o) { +void checkUnnamed75(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOpenFileDetails(o[0]); checkOpenFileDetails(o[1]); @@ -3899,7 +4550,7 @@ api.OpenFileList buildOpenFileList() { final o = api.OpenFileList(); buildCounterOpenFileList++; if (buildCounterOpenFileList < 3) { - o.entries = buildUnnamed66(); + o.entries = buildUnnamed75(); } buildCounterOpenFileList--; return o; @@ -3908,12 +4559,12 @@ api.OpenFileList buildOpenFileList() { void checkOpenFileList(api.OpenFileList o) { buildCounterOpenFileList++; if (buildCounterOpenFileList < 3) { - checkUnnamed66(o.entries!); + checkUnnamed75(o.entries!); } buildCounterOpenFileList--; } -core.Map buildUnnamed67() => { +core.Map buildUnnamed76() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3926,7 +4577,7 @@ core.Map buildUnnamed67() => { }, }; -void checkUnnamed67(core.Map o) { +void checkUnnamed76(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted3 = (o['x']!) as core.Map; unittest.expect(casted3, unittest.hasLength(3)); @@ -3958,7 +4609,7 @@ void checkUnnamed67(core.Map o) { ); } -core.Map buildUnnamed68() => { +core.Map buildUnnamed77() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3971,7 +4622,7 @@ core.Map buildUnnamed68() => { }, }; -void checkUnnamed68(core.Map o) { +void checkUnnamed77(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted5 = (o['x']!) as core.Map; unittest.expect(casted5, unittest.hasLength(3)); @@ -4010,9 +4661,9 @@ api.Operation buildOperation() { if (buildCounterOperation < 3) { o.done = true; o.error = buildStatus(); - o.metadata = buildUnnamed67(); + o.metadata = buildUnnamed76(); o.name = 'foo'; - o.response = buildUnnamed68(); + o.response = buildUnnamed77(); } buildCounterOperation--; return o; @@ -4023,12 +4674,12 @@ void checkOperation(api.Operation o) { if (buildCounterOperation < 3) { unittest.expect(o.done!, unittest.isTrue); checkStatus(o.error!); - checkUnnamed67(o.metadata!); + checkUnnamed76(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed68(o.response!); + checkUnnamed77(o.response!); } buildCounterOperation--; } @@ -4114,7 +4765,190 @@ void checkPlatformDetails(api.PlatformDetails o) { checkPhysicalPlatformDetails(o.physicalDetails!); checkVmwarePlatformDetails(o.vmwareDetails!); } - buildCounterPlatformDetails--; + buildCounterPlatformDetails--; +} + +core.List buildUnnamed78() => [ + buildPostgreSqlProperty(), + buildPostgreSqlProperty(), + ]; + +void checkUnnamed78(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkPostgreSqlProperty(o[0]); + checkPostgreSqlProperty(o[1]); +} + +core.List buildUnnamed79() => [ + buildPostgreSqlSetting(), + buildPostgreSqlSetting(), + ]; + +void checkUnnamed79(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkPostgreSqlSetting(o[0]); + checkPostgreSqlSetting(o[1]); +} + +core.int buildCounterPostgreSqlDatabaseDeployment = 0; +api.PostgreSqlDatabaseDeployment buildPostgreSqlDatabaseDeployment() { + final o = api.PostgreSqlDatabaseDeployment(); + buildCounterPostgreSqlDatabaseDeployment++; + if (buildCounterPostgreSqlDatabaseDeployment < 3) { + o.properties = buildUnnamed78(); + o.settings = buildUnnamed79(); + } + buildCounterPostgreSqlDatabaseDeployment--; + return o; +} + +void checkPostgreSqlDatabaseDeployment(api.PostgreSqlDatabaseDeployment o) { + buildCounterPostgreSqlDatabaseDeployment++; + if (buildCounterPostgreSqlDatabaseDeployment < 3) { + checkUnnamed78(o.properties!); + checkUnnamed79(o.settings!); + } + buildCounterPostgreSqlDatabaseDeployment--; +} + +core.int buildCounterPostgreSqlExtension = 0; +api.PostgreSqlExtension buildPostgreSqlExtension() { + final o = api.PostgreSqlExtension(); + buildCounterPostgreSqlExtension++; + if (buildCounterPostgreSqlExtension < 3) { + o.extension = 'foo'; + o.version = 'foo'; + } + buildCounterPostgreSqlExtension--; + return o; +} + +void checkPostgreSqlExtension(api.PostgreSqlExtension o) { + buildCounterPostgreSqlExtension++; + if (buildCounterPostgreSqlExtension < 3) { + unittest.expect( + o.extension!, + unittest.equals('foo'), + ); + unittest.expect( + o.version!, + unittest.equals('foo'), + ); + } + buildCounterPostgreSqlExtension--; +} + +core.int buildCounterPostgreSqlProperty = 0; +api.PostgreSqlProperty buildPostgreSqlProperty() { + final o = api.PostgreSqlProperty(); + buildCounterPostgreSqlProperty++; + if (buildCounterPostgreSqlProperty < 3) { + o.enabled = true; + o.numericValue = 'foo'; + o.property = 'foo'; + } + buildCounterPostgreSqlProperty--; + return o; +} + +void checkPostgreSqlProperty(api.PostgreSqlProperty o) { + buildCounterPostgreSqlProperty++; + if (buildCounterPostgreSqlProperty < 3) { + unittest.expect(o.enabled!, unittest.isTrue); + unittest.expect( + o.numericValue!, + unittest.equals('foo'), + ); + unittest.expect( + o.property!, + unittest.equals('foo'), + ); + } + buildCounterPostgreSqlProperty--; +} + +core.List buildUnnamed80() => [ + buildPostgreSqlExtension(), + buildPostgreSqlExtension(), + ]; + +void checkUnnamed80(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkPostgreSqlExtension(o[0]); + checkPostgreSqlExtension(o[1]); +} + +core.int buildCounterPostgreSqlSchemaDetails = 0; +api.PostgreSqlSchemaDetails buildPostgreSqlSchemaDetails() { + final o = api.PostgreSqlSchemaDetails(); + buildCounterPostgreSqlSchemaDetails++; + if (buildCounterPostgreSqlSchemaDetails < 3) { + o.foreignTablesCount = 42; + o.postgresqlExtensions = buildUnnamed80(); + } + buildCounterPostgreSqlSchemaDetails--; + return o; +} + +void checkPostgreSqlSchemaDetails(api.PostgreSqlSchemaDetails o) { + buildCounterPostgreSqlSchemaDetails++; + if (buildCounterPostgreSqlSchemaDetails < 3) { + unittest.expect( + o.foreignTablesCount!, + unittest.equals(42), + ); + checkUnnamed80(o.postgresqlExtensions!); + } + buildCounterPostgreSqlSchemaDetails--; +} + +core.int buildCounterPostgreSqlSetting = 0; +api.PostgreSqlSetting buildPostgreSqlSetting() { + final o = api.PostgreSqlSetting(); + buildCounterPostgreSqlSetting++; + if (buildCounterPostgreSqlSetting < 3) { + o.boolValue = true; + o.intValue = 'foo'; + o.realValue = 42.0; + o.setting = 'foo'; + o.source = 'foo'; + o.stringValue = 'foo'; + o.unit = 'foo'; + } + buildCounterPostgreSqlSetting--; + return o; +} + +void checkPostgreSqlSetting(api.PostgreSqlSetting o) { + buildCounterPostgreSqlSetting++; + if (buildCounterPostgreSqlSetting < 3) { + unittest.expect(o.boolValue!, unittest.isTrue); + unittest.expect( + o.intValue!, + unittest.equals('foo'), + ); + unittest.expect( + o.realValue!, + unittest.equals(42.0), + ); + unittest.expect( + o.setting!, + unittest.equals('foo'), + ); + unittest.expect( + o.source!, + unittest.equals('foo'), + ); + unittest.expect( + o.stringValue!, + unittest.equals('foo'), + ); + unittest.expect( + o.unit!, + unittest.equals('foo'), + ); + } + buildCounterPostgreSqlSetting--; } core.int buildCounterPreferenceSet = 0; @@ -4161,12 +4995,12 @@ void checkPreferenceSet(api.PreferenceSet o) { buildCounterPreferenceSet--; } -core.List buildUnnamed69() => [ +core.List buildUnnamed81() => [ 'foo', 'foo', ]; -void checkUnnamed69(core.List o) { +void checkUnnamed81(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4183,7 +5017,7 @@ api.RegionPreferences buildRegionPreferences() { final o = api.RegionPreferences(); buildCounterRegionPreferences++; if (buildCounterRegionPreferences < 3) { - o.preferredRegions = buildUnnamed69(); + o.preferredRegions = buildUnnamed81(); } buildCounterRegionPreferences--; return o; @@ -4192,7 +5026,7 @@ api.RegionPreferences buildRegionPreferences() { void checkRegionPreferences(api.RegionPreferences o) { buildCounterRegionPreferences++; if (buildCounterRegionPreferences < 3) { - checkUnnamed69(o.preferredRegions!); + checkUnnamed81(o.preferredRegions!); } buildCounterRegionPreferences--; } @@ -4292,12 +5126,12 @@ void checkReportAssetFramesResponse(api.ReportAssetFramesResponse o) { buildCounterReportAssetFramesResponse--; } -core.List buildUnnamed70() => [ +core.List buildUnnamed82() => [ buildReportConfigGroupPreferenceSetAssignment(), buildReportConfigGroupPreferenceSetAssignment(), ]; -void checkUnnamed70(core.List o) { +void checkUnnamed82(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkReportConfigGroupPreferenceSetAssignment(o[0]); checkReportConfigGroupPreferenceSetAssignment(o[1]); @@ -4311,7 +5145,7 @@ api.ReportConfig buildReportConfig() { o.createTime = 'foo'; o.description = 'foo'; o.displayName = 'foo'; - o.groupPreferencesetAssignments = buildUnnamed70(); + o.groupPreferencesetAssignments = buildUnnamed82(); o.name = 'foo'; o.updateTime = 'foo'; } @@ -4334,7 +5168,7 @@ void checkReportConfig(api.ReportConfig o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed70(o.groupPreferencesetAssignments!); + checkUnnamed82(o.groupPreferencesetAssignments!); unittest.expect( o.name!, unittest.equals('foo'), @@ -4376,12 +5210,12 @@ void checkReportConfigGroupPreferenceSetAssignment( buildCounterReportConfigGroupPreferenceSetAssignment--; } -core.List buildUnnamed71() => [ +core.List buildUnnamed83() => [ buildReportSummaryGroupFinding(), buildReportSummaryGroupFinding(), ]; -void checkUnnamed71(core.List o) { +void checkUnnamed83(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkReportSummaryGroupFinding(o[0]); checkReportSummaryGroupFinding(o[1]); @@ -4393,7 +5227,7 @@ api.ReportSummary buildReportSummary() { buildCounterReportSummary++; if (buildCounterReportSummary < 3) { o.allAssetsStats = buildReportSummaryAssetAggregateStats(); - o.groupFindings = buildUnnamed71(); + o.groupFindings = buildUnnamed83(); } buildCounterReportSummary--; return o; @@ -4403,7 +5237,7 @@ void checkReportSummary(api.ReportSummary o) { buildCounterReportSummary++; if (buildCounterReportSummary < 3) { checkReportSummaryAssetAggregateStats(o.allAssetsStats!); - checkUnnamed71(o.groupFindings!); + checkUnnamed83(o.groupFindings!); } buildCounterReportSummary--; } @@ -4458,12 +5292,12 @@ void checkReportSummaryAssetAggregateStats( buildCounterReportSummaryAssetAggregateStats--; } -core.List buildUnnamed72() => [ +core.List buildUnnamed84() => [ buildReportSummaryChartDataDataPoint(), buildReportSummaryChartDataDataPoint(), ]; -void checkUnnamed72(core.List o) { +void checkUnnamed84(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkReportSummaryChartDataDataPoint(o[0]); checkReportSummaryChartDataDataPoint(o[1]); @@ -4474,7 +5308,7 @@ api.ReportSummaryChartData buildReportSummaryChartData() { final o = api.ReportSummaryChartData(); buildCounterReportSummaryChartData++; if (buildCounterReportSummaryChartData < 3) { - o.dataPoints = buildUnnamed72(); + o.dataPoints = buildUnnamed84(); } buildCounterReportSummaryChartData--; return o; @@ -4483,7 +5317,7 @@ api.ReportSummaryChartData buildReportSummaryChartData() { void checkReportSummaryChartData(api.ReportSummaryChartData o) { buildCounterReportSummaryChartData++; if (buildCounterReportSummaryChartData < 3) { - checkUnnamed72(o.dataPoints!); + checkUnnamed84(o.dataPoints!); } buildCounterReportSummaryChartData--; } @@ -4516,12 +5350,12 @@ void checkReportSummaryChartDataDataPoint( buildCounterReportSummaryChartDataDataPoint--; } -core.List buildUnnamed73() => [ +core.List buildUnnamed85() => [ 'foo', 'foo', ]; -void checkUnnamed73(core.List o) { +void checkUnnamed85(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4533,12 +5367,12 @@ void checkUnnamed73(core.List o) { ); } -core.List buildUnnamed74() => [ +core.List buildUnnamed86() => [ 'foo', 'foo', ]; -void checkUnnamed74(core.List o) { +void checkUnnamed86(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4550,12 +5384,12 @@ void checkUnnamed74(core.List o) { ); } -core.List buildUnnamed75() => [ +core.List buildUnnamed87() => [ buildReportSummaryMachineSeriesAllocation(), buildReportSummaryMachineSeriesAllocation(), ]; -void checkUnnamed75(core.List o) { +void checkUnnamed87(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkReportSummaryMachineSeriesAllocation(o[0]); checkReportSummaryMachineSeriesAllocation(o[1]); @@ -4567,9 +5401,9 @@ api.ReportSummaryComputeEngineFinding buildReportSummaryComputeEngineFinding() { buildCounterReportSummaryComputeEngineFinding++; if (buildCounterReportSummaryComputeEngineFinding < 3) { o.allocatedAssetCount = 'foo'; - o.allocatedDiskTypes = buildUnnamed73(); - o.allocatedRegions = buildUnnamed74(); - o.machineSeriesAllocations = buildUnnamed75(); + o.allocatedDiskTypes = buildUnnamed85(); + o.allocatedRegions = buildUnnamed86(); + o.machineSeriesAllocations = buildUnnamed87(); } buildCounterReportSummaryComputeEngineFinding--; return o; @@ -4583,19 +5417,19 @@ void checkReportSummaryComputeEngineFinding( o.allocatedAssetCount!, unittest.equals('foo'), ); - checkUnnamed73(o.allocatedDiskTypes!); - checkUnnamed74(o.allocatedRegions!); - checkUnnamed75(o.machineSeriesAllocations!); + checkUnnamed85(o.allocatedDiskTypes!); + checkUnnamed86(o.allocatedRegions!); + checkUnnamed87(o.machineSeriesAllocations!); } buildCounterReportSummaryComputeEngineFinding--; } -core.List buildUnnamed76() => [ +core.List buildUnnamed88() => [ buildReportSummaryGroupPreferenceSetFinding(), buildReportSummaryGroupPreferenceSetFinding(), ]; -void checkUnnamed76(core.List o) { +void checkUnnamed88(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkReportSummaryGroupPreferenceSetFinding(o[0]); checkReportSummaryGroupPreferenceSetFinding(o[1]); @@ -4610,7 +5444,7 @@ api.ReportSummaryGroupFinding buildReportSummaryGroupFinding() { o.description = 'foo'; o.displayName = 'foo'; o.overlappingAssetCount = 'foo'; - o.preferenceSetFindings = buildUnnamed76(); + o.preferenceSetFindings = buildUnnamed88(); } buildCounterReportSummaryGroupFinding--; return o; @@ -4632,7 +5466,7 @@ void checkReportSummaryGroupFinding(api.ReportSummaryGroupFinding o) { o.overlappingAssetCount!, unittest.equals('foo'), ); - checkUnnamed76(o.preferenceSetFindings!); + checkUnnamed88(o.preferenceSetFindings!); } buildCounterReportSummaryGroupFinding--; } @@ -4686,12 +5520,12 @@ void checkReportSummaryGroupPreferenceSetFinding( buildCounterReportSummaryGroupPreferenceSetFinding--; } -core.List buildUnnamed77() => [ +core.List buildUnnamed89() => [ buildReportSummaryHistogramChartDataBucket(), buildReportSummaryHistogramChartDataBucket(), ]; -void checkUnnamed77(core.List o) { +void checkUnnamed89(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkReportSummaryHistogramChartDataBucket(o[0]); checkReportSummaryHistogramChartDataBucket(o[1]); @@ -4702,7 +5536,7 @@ api.ReportSummaryHistogramChartData buildReportSummaryHistogramChartData() { final o = api.ReportSummaryHistogramChartData(); buildCounterReportSummaryHistogramChartData++; if (buildCounterReportSummaryHistogramChartData < 3) { - o.buckets = buildUnnamed77(); + o.buckets = buildUnnamed89(); } buildCounterReportSummaryHistogramChartData--; return o; @@ -4712,7 +5546,7 @@ void checkReportSummaryHistogramChartData( api.ReportSummaryHistogramChartData o) { buildCounterReportSummaryHistogramChartData++; if (buildCounterReportSummaryHistogramChartData < 3) { - checkUnnamed77(o.buckets!); + checkUnnamed89(o.buckets!); } buildCounterReportSummaryHistogramChartData--; } @@ -4777,12 +5611,12 @@ void checkReportSummaryMachineSeriesAllocation( buildCounterReportSummaryMachineSeriesAllocation--; } -core.List buildUnnamed78() => [ +core.List buildUnnamed90() => [ 'foo', 'foo', ]; -void checkUnnamed78(core.List o) { +void checkUnnamed90(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4794,12 +5628,12 @@ void checkUnnamed78(core.List o) { ); } -core.List buildUnnamed79() => [ +core.List buildUnnamed91() => [ buildReportSummarySoleTenantNodeAllocation(), buildReportSummarySoleTenantNodeAllocation(), ]; -void checkUnnamed79(core.List o) { +void checkUnnamed91(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkReportSummarySoleTenantNodeAllocation(o[0]); checkReportSummarySoleTenantNodeAllocation(o[1]); @@ -4811,8 +5645,8 @@ api.ReportSummarySoleTenantFinding buildReportSummarySoleTenantFinding() { buildCounterReportSummarySoleTenantFinding++; if (buildCounterReportSummarySoleTenantFinding < 3) { o.allocatedAssetCount = 'foo'; - o.allocatedRegions = buildUnnamed78(); - o.nodeAllocations = buildUnnamed79(); + o.allocatedRegions = buildUnnamed90(); + o.nodeAllocations = buildUnnamed91(); } buildCounterReportSummarySoleTenantFinding--; return o; @@ -4825,8 +5659,8 @@ void checkReportSummarySoleTenantFinding(api.ReportSummarySoleTenantFinding o) { o.allocatedAssetCount!, unittest.equals('foo'), ); - checkUnnamed78(o.allocatedRegions!); - checkUnnamed79(o.nodeAllocations!); + checkUnnamed90(o.allocatedRegions!); + checkUnnamed91(o.nodeAllocations!); } buildCounterReportSummarySoleTenantFinding--; } @@ -4890,12 +5724,12 @@ void checkReportSummaryUtilizationChartData( buildCounterReportSummaryUtilizationChartData--; } -core.List buildUnnamed80() => [ +core.List buildUnnamed92() => [ 'foo', 'foo', ]; -void checkUnnamed80(core.List o) { +void checkUnnamed92(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4907,12 +5741,12 @@ void checkUnnamed80(core.List o) { ); } -core.List buildUnnamed81() => [ +core.List buildUnnamed93() => [ buildReportSummaryVmwareNodeAllocation(), buildReportSummaryVmwareNodeAllocation(), ]; -void checkUnnamed81(core.List o) { +void checkUnnamed93(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkReportSummaryVmwareNodeAllocation(o[0]); checkReportSummaryVmwareNodeAllocation(o[1]); @@ -4924,8 +5758,8 @@ api.ReportSummaryVmwareEngineFinding buildReportSummaryVmwareEngineFinding() { buildCounterReportSummaryVmwareEngineFinding++; if (buildCounterReportSummaryVmwareEngineFinding < 3) { o.allocatedAssetCount = 'foo'; - o.allocatedRegions = buildUnnamed80(); - o.nodeAllocations = buildUnnamed81(); + o.allocatedRegions = buildUnnamed92(); + o.nodeAllocations = buildUnnamed93(); } buildCounterReportSummaryVmwareEngineFinding--; return o; @@ -4939,8 +5773,8 @@ void checkReportSummaryVmwareEngineFinding( o.allocatedAssetCount!, unittest.equals('foo'), ); - checkUnnamed80(o.allocatedRegions!); - checkUnnamed81(o.nodeAllocations!); + checkUnnamed92(o.allocatedRegions!); + checkUnnamed93(o.nodeAllocations!); } buildCounterReportSummaryVmwareEngineFinding--; } @@ -5019,12 +5853,12 @@ void checkRunImportJobRequest(api.RunImportJobRequest o) { buildCounterRunImportJobRequest--; } -core.Map buildUnnamed82() => { +core.Map buildUnnamed94() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed82(core.Map o) { +void checkUnnamed94(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -5041,7 +5875,7 @@ api.RunningProcess buildRunningProcess() { final o = api.RunningProcess(); buildCounterRunningProcess++; if (buildCounterRunningProcess < 3) { - o.attributes = buildUnnamed82(); + o.attributes = buildUnnamed94(); o.cmdline = 'foo'; o.exePath = 'foo'; o.pid = 'foo'; @@ -5054,7 +5888,7 @@ api.RunningProcess buildRunningProcess() { void checkRunningProcess(api.RunningProcess o) { buildCounterRunningProcess++; if (buildCounterRunningProcess < 3) { - checkUnnamed82(o.attributes!); + checkUnnamed94(o.attributes!); unittest.expect( o.cmdline!, unittest.equals('foo'), @@ -5075,12 +5909,12 @@ void checkRunningProcess(api.RunningProcess o) { buildCounterRunningProcess--; } -core.List buildUnnamed83() => [ +core.List buildUnnamed95() => [ buildRunningProcess(), buildRunningProcess(), ]; -void checkUnnamed83(core.List o) { +void checkUnnamed95(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRunningProcess(o[0]); checkRunningProcess(o[1]); @@ -5091,7 +5925,7 @@ api.RunningProcessList buildRunningProcessList() { final o = api.RunningProcessList(); buildCounterRunningProcessList++; if (buildCounterRunningProcessList < 3) { - o.entries = buildUnnamed83(); + o.entries = buildUnnamed95(); } buildCounterRunningProcessList--; return o; @@ -5100,7 +5934,7 @@ api.RunningProcessList buildRunningProcessList() { void checkRunningProcessList(api.RunningProcessList o) { buildCounterRunningProcessList++; if (buildCounterRunningProcessList < 3) { - checkUnnamed83(o.entries!); + checkUnnamed95(o.entries!); } buildCounterRunningProcessList--; } @@ -5152,12 +5986,12 @@ void checkRunningService(api.RunningService o) { buildCounterRunningService--; } -core.List buildUnnamed84() => [ +core.List buildUnnamed96() => [ buildRunningService(), buildRunningService(), ]; -void checkUnnamed84(core.List o) { +void checkUnnamed96(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRunningService(o[0]); checkRunningService(o[1]); @@ -5168,7 +6002,7 @@ api.RunningServiceList buildRunningServiceList() { final o = api.RunningServiceList(); buildCounterRunningServiceList++; if (buildCounterRunningServiceList < 3) { - o.entries = buildUnnamed84(); + o.entries = buildUnnamed96(); } buildCounterRunningServiceList--; return o; @@ -5177,7 +6011,7 @@ api.RunningServiceList buildRunningServiceList() { void checkRunningServiceList(api.RunningServiceList o) { buildCounterRunningServiceList++; if (buildCounterRunningServiceList < 3) { - checkUnnamed84(o.entries!); + checkUnnamed96(o.entries!); } buildCounterRunningServiceList--; } @@ -5206,12 +6040,12 @@ void checkRuntimeNetworkInfo(api.RuntimeNetworkInfo o) { buildCounterRuntimeNetworkInfo--; } -core.List buildUnnamed85() => [ +core.List buildUnnamed97() => [ buildStatus(), buildStatus(), ]; -void checkUnnamed85(core.List o) { +void checkUnnamed97(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStatus(o[0]); checkStatus(o[1]); @@ -5223,7 +6057,7 @@ api.SendDiscoveryClientHeartbeatRequest final o = api.SendDiscoveryClientHeartbeatRequest(); buildCounterSendDiscoveryClientHeartbeatRequest++; if (buildCounterSendDiscoveryClientHeartbeatRequest < 3) { - o.errors = buildUnnamed85(); + o.errors = buildUnnamed97(); o.version = 'foo'; } buildCounterSendDiscoveryClientHeartbeatRequest--; @@ -5234,7 +6068,7 @@ void checkSendDiscoveryClientHeartbeatRequest( api.SendDiscoveryClientHeartbeatRequest o) { buildCounterSendDiscoveryClientHeartbeatRequest++; if (buildCounterSendDiscoveryClientHeartbeatRequest < 3) { - checkUnnamed85(o.errors!); + checkUnnamed97(o.errors!); unittest.expect( o.version!, unittest.equals('foo'), @@ -5272,12 +6106,12 @@ void checkSettings(api.Settings o) { buildCounterSettings--; } -core.List buildUnnamed86() => [ +core.List buildUnnamed98() => [ buildSoleTenantNodeType(), buildSoleTenantNodeType(), ]; -void checkUnnamed86(core.List o) { +void checkUnnamed98(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSoleTenantNodeType(o[0]); checkSoleTenantNodeType(o[1]); @@ -5291,7 +6125,7 @@ api.SoleTenancyPreferences buildSoleTenancyPreferences() { o.commitmentPlan = 'foo'; o.cpuOvercommitRatio = 42.0; o.hostMaintenancePolicy = 'foo'; - o.nodeTypes = buildUnnamed86(); + o.nodeTypes = buildUnnamed98(); } buildCounterSoleTenancyPreferences--; return o; @@ -5312,7 +6146,7 @@ void checkSoleTenancyPreferences(api.SoleTenancyPreferences o) { o.hostMaintenancePolicy!, unittest.equals('foo'), ); - checkUnnamed86(o.nodeTypes!); + checkUnnamed98(o.nodeTypes!); } buildCounterSoleTenancyPreferences--; } @@ -5408,7 +6242,168 @@ void checkSource(api.Source o) { buildCounterSource--; } -core.Map buildUnnamed87() => { +core.List buildUnnamed99() => [ + buildSqlServerFeature(), + buildSqlServerFeature(), + ]; + +void checkUnnamed99(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkSqlServerFeature(o[0]); + checkSqlServerFeature(o[1]); +} + +core.List buildUnnamed100() => [ + buildSqlServerServerFlag(), + buildSqlServerServerFlag(), + ]; + +void checkUnnamed100(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkSqlServerServerFlag(o[0]); + checkSqlServerServerFlag(o[1]); +} + +core.List buildUnnamed101() => [ + buildSqlServerTraceFlag(), + buildSqlServerTraceFlag(), + ]; + +void checkUnnamed101(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkSqlServerTraceFlag(o[0]); + checkSqlServerTraceFlag(o[1]); +} + +core.int buildCounterSqlServerDatabaseDeployment = 0; +api.SqlServerDatabaseDeployment buildSqlServerDatabaseDeployment() { + final o = api.SqlServerDatabaseDeployment(); + buildCounterSqlServerDatabaseDeployment++; + if (buildCounterSqlServerDatabaseDeployment < 3) { + o.features = buildUnnamed99(); + o.serverFlags = buildUnnamed100(); + o.traceFlags = buildUnnamed101(); + } + buildCounterSqlServerDatabaseDeployment--; + return o; +} + +void checkSqlServerDatabaseDeployment(api.SqlServerDatabaseDeployment o) { + buildCounterSqlServerDatabaseDeployment++; + if (buildCounterSqlServerDatabaseDeployment < 3) { + checkUnnamed99(o.features!); + checkUnnamed100(o.serverFlags!); + checkUnnamed101(o.traceFlags!); + } + buildCounterSqlServerDatabaseDeployment--; +} + +core.int buildCounterSqlServerFeature = 0; +api.SqlServerFeature buildSqlServerFeature() { + final o = api.SqlServerFeature(); + buildCounterSqlServerFeature++; + if (buildCounterSqlServerFeature < 3) { + o.enabled = true; + o.featureName = 'foo'; + } + buildCounterSqlServerFeature--; + return o; +} + +void checkSqlServerFeature(api.SqlServerFeature o) { + buildCounterSqlServerFeature++; + if (buildCounterSqlServerFeature < 3) { + unittest.expect(o.enabled!, unittest.isTrue); + unittest.expect( + o.featureName!, + unittest.equals('foo'), + ); + } + buildCounterSqlServerFeature--; +} + +core.int buildCounterSqlServerSchemaDetails = 0; +api.SqlServerSchemaDetails buildSqlServerSchemaDetails() { + final o = api.SqlServerSchemaDetails(); + buildCounterSqlServerSchemaDetails++; + if (buildCounterSqlServerSchemaDetails < 3) { + o.clrObjectCount = 42; + } + buildCounterSqlServerSchemaDetails--; + return o; +} + +void checkSqlServerSchemaDetails(api.SqlServerSchemaDetails o) { + buildCounterSqlServerSchemaDetails++; + if (buildCounterSqlServerSchemaDetails < 3) { + unittest.expect( + o.clrObjectCount!, + unittest.equals(42), + ); + } + buildCounterSqlServerSchemaDetails--; +} + +core.int buildCounterSqlServerServerFlag = 0; +api.SqlServerServerFlag buildSqlServerServerFlag() { + final o = api.SqlServerServerFlag(); + buildCounterSqlServerServerFlag++; + if (buildCounterSqlServerServerFlag < 3) { + o.serverFlagName = 'foo'; + o.value = 'foo'; + o.valueInUse = 'foo'; + } + buildCounterSqlServerServerFlag--; + return o; +} + +void checkSqlServerServerFlag(api.SqlServerServerFlag o) { + buildCounterSqlServerServerFlag++; + if (buildCounterSqlServerServerFlag < 3) { + unittest.expect( + o.serverFlagName!, + unittest.equals('foo'), + ); + unittest.expect( + o.value!, + unittest.equals('foo'), + ); + unittest.expect( + o.valueInUse!, + unittest.equals('foo'), + ); + } + buildCounterSqlServerServerFlag--; +} + +core.int buildCounterSqlServerTraceFlag = 0; +api.SqlServerTraceFlag buildSqlServerTraceFlag() { + final o = api.SqlServerTraceFlag(); + buildCounterSqlServerTraceFlag++; + if (buildCounterSqlServerTraceFlag < 3) { + o.scope = 'foo'; + o.traceFlagName = 'foo'; + } + buildCounterSqlServerTraceFlag--; + return o; +} + +void checkSqlServerTraceFlag(api.SqlServerTraceFlag o) { + buildCounterSqlServerTraceFlag++; + if (buildCounterSqlServerTraceFlag < 3) { + unittest.expect( + o.scope!, + unittest.equals('foo'), + ); + unittest.expect( + o.traceFlagName!, + unittest.equals('foo'), + ); + } + buildCounterSqlServerTraceFlag--; +} + +core.Map buildUnnamed102() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -5421,7 +6416,7 @@ core.Map buildUnnamed87() => { }, }; -void checkUnnamed87(core.Map o) { +void checkUnnamed102(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted7 = (o['x']!) as core.Map; unittest.expect(casted7, unittest.hasLength(3)); @@ -5453,15 +6448,15 @@ void checkUnnamed87(core.Map o) { ); } -core.List> buildUnnamed88() => [ - buildUnnamed87(), - buildUnnamed87(), +core.List> buildUnnamed103() => [ + buildUnnamed102(), + buildUnnamed102(), ]; -void checkUnnamed88(core.List> o) { +void checkUnnamed103(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed87(o[0]); - checkUnnamed87(o[1]); + checkUnnamed102(o[0]); + checkUnnamed102(o[1]); } core.int buildCounterStatus = 0; @@ -5470,7 +6465,7 @@ api.Status buildStatus() { buildCounterStatus++; if (buildCounterStatus < 3) { o.code = 42; - o.details = buildUnnamed88(); + o.details = buildUnnamed103(); o.message = 'foo'; } buildCounterStatus--; @@ -5484,7 +6479,7 @@ void checkStatus(api.Status o) { o.code!, unittest.equals(42), ); - checkUnnamed88(o.details!); + checkUnnamed103(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -5522,12 +6517,12 @@ void checkUpdateAssetRequest(api.UpdateAssetRequest o) { buildCounterUpdateAssetRequest--; } -core.Map buildUnnamed89() => { +core.Map buildUnnamed104() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed89(core.Map o) { +void checkUnnamed104(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -5544,7 +6539,7 @@ api.UploadFileInfo buildUploadFileInfo() { final o = api.UploadFileInfo(); buildCounterUploadFileInfo++; if (buildCounterUploadFileInfo < 3) { - o.headers = buildUnnamed89(); + o.headers = buildUnnamed104(); o.signedUri = 'foo'; o.uriExpirationTime = 'foo'; } @@ -5555,7 +6550,7 @@ api.UploadFileInfo buildUploadFileInfo() { void checkUploadFileInfo(api.UploadFileInfo o) { buildCounterUploadFileInfo++; if (buildCounterUploadFileInfo < 3) { - checkUnnamed89(o.headers!); + checkUnnamed104(o.headers!); unittest.expect( o.signedUri!, unittest.equals('foo'), @@ -5590,23 +6585,23 @@ void checkValidateImportJobRequest(api.ValidateImportJobRequest o) { buildCounterValidateImportJobRequest--; } -core.List buildUnnamed90() => [ +core.List buildUnnamed105() => [ buildFileValidationReport(), buildFileValidationReport(), ]; -void checkUnnamed90(core.List o) { +void checkUnnamed105(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFileValidationReport(o[0]); checkFileValidationReport(o[1]); } -core.List buildUnnamed91() => [ +core.List buildUnnamed106() => [ buildImportError(), buildImportError(), ]; -void checkUnnamed91(core.List o) { +void checkUnnamed106(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkImportError(o[0]); checkImportError(o[1]); @@ -5617,8 +6612,8 @@ api.ValidationReport buildValidationReport() { final o = api.ValidationReport(); buildCounterValidationReport++; if (buildCounterValidationReport < 3) { - o.fileValidations = buildUnnamed90(); - o.jobErrors = buildUnnamed91(); + o.fileValidations = buildUnnamed105(); + o.jobErrors = buildUnnamed106(); } buildCounterValidationReport--; return o; @@ -5627,8 +6622,8 @@ api.ValidationReport buildValidationReport() { void checkValidationReport(api.ValidationReport o) { buildCounterValidationReport++; if (buildCounterValidationReport < 3) { - checkUnnamed90(o.fileValidations!); - checkUnnamed91(o.jobErrors!); + checkUnnamed105(o.fileValidations!); + checkUnnamed106(o.jobErrors!); } buildCounterValidationReport--; } @@ -6157,6 +7152,96 @@ void main() { }); }); + unittest.group('obj-schema-DatabaseDeploymentDetails', () { + unittest.test('to-json--from-json', () async { + final o = buildDatabaseDeploymentDetails(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.DatabaseDeploymentDetails.fromJson( + oJson as core.Map); + checkDatabaseDeploymentDetails(od); + }); + }); + + unittest.group('obj-schema-DatabaseDeploymentDetailsAggregatedStats', () { + unittest.test('to-json--from-json', () async { + final o = buildDatabaseDeploymentDetailsAggregatedStats(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.DatabaseDeploymentDetailsAggregatedStats.fromJson( + oJson as core.Map); + checkDatabaseDeploymentDetailsAggregatedStats(od); + }); + }); + + unittest.group('obj-schema-DatabaseDeploymentTopology', () { + unittest.test('to-json--from-json', () async { + final o = buildDatabaseDeploymentTopology(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.DatabaseDeploymentTopology.fromJson( + oJson as core.Map); + checkDatabaseDeploymentTopology(od); + }); + }); + + unittest.group('obj-schema-DatabaseDetails', () { + unittest.test('to-json--from-json', () async { + final o = buildDatabaseDetails(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.DatabaseDetails.fromJson( + oJson as core.Map); + checkDatabaseDetails(od); + }); + }); + + unittest.group('obj-schema-DatabaseDetailsParentDatabaseDeployment', () { + unittest.test('to-json--from-json', () async { + final o = buildDatabaseDetailsParentDatabaseDeployment(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.DatabaseDetailsParentDatabaseDeployment.fromJson( + oJson as core.Map); + checkDatabaseDetailsParentDatabaseDeployment(od); + }); + }); + + unittest.group('obj-schema-DatabaseInstance', () { + unittest.test('to-json--from-json', () async { + final o = buildDatabaseInstance(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.DatabaseInstance.fromJson( + oJson as core.Map); + checkDatabaseInstance(od); + }); + }); + + unittest.group('obj-schema-DatabaseInstanceNetwork', () { + unittest.test('to-json--from-json', () async { + final o = buildDatabaseInstanceNetwork(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.DatabaseInstanceNetwork.fromJson( + oJson as core.Map); + checkDatabaseInstanceNetwork(od); + }); + }); + + unittest.group('obj-schema-DatabaseObjects', () { + unittest.test('to-json--from-json', () async { + final o = buildDatabaseObjects(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.DatabaseObjects.fromJson( + oJson as core.Map); + checkDatabaseObjects(od); + }); + }); + + unittest.group('obj-schema-DatabaseSchema', () { + unittest.test('to-json--from-json', () async { + final o = buildDatabaseSchema(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.DatabaseSchema.fromJson( + oJson as core.Map); + checkDatabaseSchema(od); + }); + }); + unittest.group('obj-schema-Date', () { unittest.test('to-json--from-json', () async { final o = buildDate(); @@ -6457,6 +7542,16 @@ void main() { }); }); + unittest.group('obj-schema-ImportRowErrorArchiveErrorDetails', () { + unittest.test('to-json--from-json', () async { + final o = buildImportRowErrorArchiveErrorDetails(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ImportRowErrorArchiveErrorDetails.fromJson( + oJson as core.Map); + checkImportRowErrorArchiveErrorDetails(od); + }); + }); + unittest.group('obj-schema-ImportRowErrorCsvErrorDetails', () { unittest.test('to-json--from-json', () async { final o = buildImportRowErrorCsvErrorDetails(); @@ -6717,6 +7812,66 @@ void main() { }); }); + unittest.group('obj-schema-MySqlPlugin', () { + unittest.test('to-json--from-json', () async { + final o = buildMySqlPlugin(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.MySqlPlugin.fromJson( + oJson as core.Map); + checkMySqlPlugin(od); + }); + }); + + unittest.group('obj-schema-MySqlProperty', () { + unittest.test('to-json--from-json', () async { + final o = buildMySqlProperty(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.MySqlProperty.fromJson( + oJson as core.Map); + checkMySqlProperty(od); + }); + }); + + unittest.group('obj-schema-MySqlSchemaDetails', () { + unittest.test('to-json--from-json', () async { + final o = buildMySqlSchemaDetails(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.MySqlSchemaDetails.fromJson( + oJson as core.Map); + checkMySqlSchemaDetails(od); + }); + }); + + unittest.group('obj-schema-MySqlStorageEngineDetails', () { + unittest.test('to-json--from-json', () async { + final o = buildMySqlStorageEngineDetails(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.MySqlStorageEngineDetails.fromJson( + oJson as core.Map); + checkMySqlStorageEngineDetails(od); + }); + }); + + unittest.group('obj-schema-MySqlVariable', () { + unittest.test('to-json--from-json', () async { + final o = buildMySqlVariable(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.MySqlVariable.fromJson( + oJson as core.Map); + checkMySqlVariable(od); + }); + }); + + unittest.group('obj-schema-MysqlDatabaseDeployment', () { + unittest.test('to-json--from-json', () async { + final o = buildMysqlDatabaseDeployment(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.MysqlDatabaseDeployment.fromJson( + oJson as core.Map); + checkMysqlDatabaseDeployment(od); + }); + }); + unittest.group('obj-schema-NetworkAdapterDetails', () { unittest.test('to-json--from-json', () async { final o = buildNetworkAdapterDetails(); @@ -6867,6 +8022,56 @@ void main() { }); }); + unittest.group('obj-schema-PostgreSqlDatabaseDeployment', () { + unittest.test('to-json--from-json', () async { + final o = buildPostgreSqlDatabaseDeployment(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.PostgreSqlDatabaseDeployment.fromJson( + oJson as core.Map); + checkPostgreSqlDatabaseDeployment(od); + }); + }); + + unittest.group('obj-schema-PostgreSqlExtension', () { + unittest.test('to-json--from-json', () async { + final o = buildPostgreSqlExtension(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.PostgreSqlExtension.fromJson( + oJson as core.Map); + checkPostgreSqlExtension(od); + }); + }); + + unittest.group('obj-schema-PostgreSqlProperty', () { + unittest.test('to-json--from-json', () async { + final o = buildPostgreSqlProperty(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.PostgreSqlProperty.fromJson( + oJson as core.Map); + checkPostgreSqlProperty(od); + }); + }); + + unittest.group('obj-schema-PostgreSqlSchemaDetails', () { + unittest.test('to-json--from-json', () async { + final o = buildPostgreSqlSchemaDetails(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.PostgreSqlSchemaDetails.fromJson( + oJson as core.Map); + checkPostgreSqlSchemaDetails(od); + }); + }); + + unittest.group('obj-schema-PostgreSqlSetting', () { + unittest.test('to-json--from-json', () async { + final o = buildPostgreSqlSetting(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.PostgreSqlSetting.fromJson( + oJson as core.Map); + checkPostgreSqlSetting(od); + }); + }); + unittest.group('obj-schema-PreferenceSet', () { unittest.test('to-json--from-json', () async { final o = buildPreferenceSet(); @@ -7207,6 +8412,56 @@ void main() { }); }); + unittest.group('obj-schema-SqlServerDatabaseDeployment', () { + unittest.test('to-json--from-json', () async { + final o = buildSqlServerDatabaseDeployment(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.SqlServerDatabaseDeployment.fromJson( + oJson as core.Map); + checkSqlServerDatabaseDeployment(od); + }); + }); + + unittest.group('obj-schema-SqlServerFeature', () { + unittest.test('to-json--from-json', () async { + final o = buildSqlServerFeature(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.SqlServerFeature.fromJson( + oJson as core.Map); + checkSqlServerFeature(od); + }); + }); + + unittest.group('obj-schema-SqlServerSchemaDetails', () { + unittest.test('to-json--from-json', () async { + final o = buildSqlServerSchemaDetails(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.SqlServerSchemaDetails.fromJson( + oJson as core.Map); + checkSqlServerSchemaDetails(od); + }); + }); + + unittest.group('obj-schema-SqlServerServerFlag', () { + unittest.test('to-json--from-json', () async { + final o = buildSqlServerServerFlag(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.SqlServerServerFlag.fromJson( + oJson as core.Map); + checkSqlServerServerFlag(od); + }); + }); + + unittest.group('obj-schema-SqlServerTraceFlag', () { + unittest.test('to-json--from-json', () async { + final o = buildSqlServerTraceFlag(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.SqlServerTraceFlag.fromJson( + oJson as core.Map); + checkSqlServerTraceFlag(od); + }); + }); + unittest.group('obj-schema-Status', () { unittest.test('to-json--from-json', () async { final o = buildStatus(); diff --git a/generated/googleapis/test/monitoring/v3_test.dart b/generated/googleapis/test/monitoring/v3_test.dart index 4fe2470ef..b10a1308b 100644 --- a/generated/googleapis/test/monitoring/v3_test.dart +++ b/generated/googleapis/test/monitoring/v3_test.dart @@ -3502,6 +3502,7 @@ api.PrometheusQueryLanguageCondition buildPrometheusQueryLanguageCondition() { buildCounterPrometheusQueryLanguageCondition++; if (buildCounterPrometheusQueryLanguageCondition < 3) { o.alertRule = 'foo'; + o.disableMetricValidation = true; o.duration = 'foo'; o.evaluationInterval = 'foo'; o.labels = buildUnnamed59(); @@ -3520,6 +3521,7 @@ void checkPrometheusQueryLanguageCondition( o.alertRule!, unittest.equals('foo'), ); + unittest.expect(o.disableMetricValidation!, unittest.isTrue); unittest.expect( o.duration!, unittest.equals('foo'), diff --git a/generated/googleapis/test/netapp/v1_test.dart b/generated/googleapis/test/netapp/v1_test.dart index 52873689a..3a2e0aa02 100644 --- a/generated/googleapis/test/netapp/v1_test.dart +++ b/generated/googleapis/test/netapp/v1_test.dart @@ -515,6 +515,7 @@ api.DestinationVolumeParameters buildDestinationVolumeParameters() { o.description = 'foo'; o.shareName = 'foo'; o.storagePool = 'foo'; + o.tieringPolicy = buildTieringPolicy(); o.volumeId = 'foo'; } buildCounterDestinationVolumeParameters--; @@ -536,6 +537,7 @@ void checkDestinationVolumeParameters(api.DestinationVolumeParameters o) { o.storagePool!, unittest.equals('foo'), ); + checkTieringPolicy(o.tieringPolicy!); unittest.expect( o.volumeId!, unittest.equals('foo'), @@ -559,12 +561,63 @@ void checkEncryptVolumesRequest(api.EncryptVolumesRequest o) { buildCounterEncryptVolumesRequest--; } -core.List buildUnnamed8() => [ +core.List buildUnnamed8() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed8(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterEstablishPeeringRequest = 0; +api.EstablishPeeringRequest buildEstablishPeeringRequest() { + final o = api.EstablishPeeringRequest(); + buildCounterEstablishPeeringRequest++; + if (buildCounterEstablishPeeringRequest < 3) { + o.peerClusterName = 'foo'; + o.peerIpAddresses = buildUnnamed8(); + o.peerSvmName = 'foo'; + o.peerVolumeName = 'foo'; + } + buildCounterEstablishPeeringRequest--; + return o; +} + +void checkEstablishPeeringRequest(api.EstablishPeeringRequest o) { + buildCounterEstablishPeeringRequest++; + if (buildCounterEstablishPeeringRequest < 3) { + unittest.expect( + o.peerClusterName!, + unittest.equals('foo'), + ); + checkUnnamed8(o.peerIpAddresses!); + unittest.expect( + o.peerSvmName!, + unittest.equals('foo'), + ); + unittest.expect( + o.peerVolumeName!, + unittest.equals('foo'), + ); + } + buildCounterEstablishPeeringRequest--; +} + +core.List buildUnnamed9() => [ buildSimpleExportPolicyRule(), buildSimpleExportPolicyRule(), ]; -void checkUnnamed8(core.List o) { +void checkUnnamed9(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSimpleExportPolicyRule(o[0]); checkSimpleExportPolicyRule(o[1]); @@ -575,7 +628,7 @@ api.ExportPolicy buildExportPolicy() { final o = api.ExportPolicy(); buildCounterExportPolicy++; if (buildCounterExportPolicy < 3) { - o.rules = buildUnnamed8(); + o.rules = buildUnnamed9(); } buildCounterExportPolicy--; return o; @@ -584,7 +637,7 @@ api.ExportPolicy buildExportPolicy() { void checkExportPolicy(api.ExportPolicy o) { buildCounterExportPolicy++; if (buildCounterExportPolicy < 3) { - checkUnnamed8(o.rules!); + checkUnnamed9(o.rules!); } buildCounterExportPolicy--; } @@ -631,12 +684,134 @@ void checkHourlySchedule(api.HourlySchedule o) { buildCounterHourlySchedule--; } -core.Map buildUnnamed9() => { +core.int buildCounterHybridPeeringDetails = 0; +api.HybridPeeringDetails buildHybridPeeringDetails() { + final o = api.HybridPeeringDetails(); + buildCounterHybridPeeringDetails++; + if (buildCounterHybridPeeringDetails < 3) { + o.command = 'foo'; + o.commandExpiryTime = 'foo'; + o.passphrase = 'foo'; + o.subnetIp = 'foo'; + } + buildCounterHybridPeeringDetails--; + return o; +} + +void checkHybridPeeringDetails(api.HybridPeeringDetails o) { + buildCounterHybridPeeringDetails++; + if (buildCounterHybridPeeringDetails < 3) { + unittest.expect( + o.command!, + unittest.equals('foo'), + ); + unittest.expect( + o.commandExpiryTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.passphrase!, + unittest.equals('foo'), + ); + unittest.expect( + o.subnetIp!, + unittest.equals('foo'), + ); + } + buildCounterHybridPeeringDetails--; +} + +core.Map buildUnnamed10() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed9(core.Map o) { +void checkUnnamed10(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.List buildUnnamed11() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed11(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterHybridReplicationParameters = 0; +api.HybridReplicationParameters buildHybridReplicationParameters() { + final o = api.HybridReplicationParameters(); + buildCounterHybridReplicationParameters++; + if (buildCounterHybridReplicationParameters < 3) { + o.clusterLocation = 'foo'; + o.description = 'foo'; + o.labels = buildUnnamed10(); + o.peerClusterName = 'foo'; + o.peerIpAddresses = buildUnnamed11(); + o.peerSvmName = 'foo'; + o.peerVolumeName = 'foo'; + o.replication = 'foo'; + } + buildCounterHybridReplicationParameters--; + return o; +} + +void checkHybridReplicationParameters(api.HybridReplicationParameters o) { + buildCounterHybridReplicationParameters++; + if (buildCounterHybridReplicationParameters < 3) { + unittest.expect( + o.clusterLocation!, + unittest.equals('foo'), + ); + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + checkUnnamed10(o.labels!); + unittest.expect( + o.peerClusterName!, + unittest.equals('foo'), + ); + checkUnnamed11(o.peerIpAddresses!); + unittest.expect( + o.peerSvmName!, + unittest.equals('foo'), + ); + unittest.expect( + o.peerVolumeName!, + unittest.equals('foo'), + ); + unittest.expect( + o.replication!, + unittest.equals('foo'), + ); + } + buildCounterHybridReplicationParameters--; +} + +core.Map buildUnnamed12() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed12(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -657,7 +832,7 @@ api.KmsConfig buildKmsConfig() { o.cryptoKeyName = 'foo'; o.description = 'foo'; o.instructions = 'foo'; - o.labels = buildUnnamed9(); + o.labels = buildUnnamed12(); o.name = 'foo'; o.serviceAccount = 'foo'; o.state = 'foo'; @@ -686,7 +861,7 @@ void checkKmsConfig(api.KmsConfig o) { o.instructions!, unittest.equals('foo'), ); - checkUnnamed9(o.labels!); + checkUnnamed12(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -707,23 +882,23 @@ void checkKmsConfig(api.KmsConfig o) { buildCounterKmsConfig--; } -core.List buildUnnamed10() => [ +core.List buildUnnamed13() => [ buildActiveDirectory(), buildActiveDirectory(), ]; -void checkUnnamed10(core.List o) { +void checkUnnamed13(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkActiveDirectory(o[0]); checkActiveDirectory(o[1]); } -core.List buildUnnamed11() => [ +core.List buildUnnamed14() => [ 'foo', 'foo', ]; -void checkUnnamed11(core.List o) { +void checkUnnamed14(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -740,9 +915,9 @@ api.ListActiveDirectoriesResponse buildListActiveDirectoriesResponse() { final o = api.ListActiveDirectoriesResponse(); buildCounterListActiveDirectoriesResponse++; if (buildCounterListActiveDirectoriesResponse < 3) { - o.activeDirectories = buildUnnamed10(); + o.activeDirectories = buildUnnamed13(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed11(); + o.unreachable = buildUnnamed14(); } buildCounterListActiveDirectoriesResponse--; return o; @@ -751,33 +926,33 @@ api.ListActiveDirectoriesResponse buildListActiveDirectoriesResponse() { void checkListActiveDirectoriesResponse(api.ListActiveDirectoriesResponse o) { buildCounterListActiveDirectoriesResponse++; if (buildCounterListActiveDirectoriesResponse < 3) { - checkUnnamed10(o.activeDirectories!); + checkUnnamed13(o.activeDirectories!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed11(o.unreachable!); + checkUnnamed14(o.unreachable!); } buildCounterListActiveDirectoriesResponse--; } -core.List buildUnnamed12() => [ +core.List buildUnnamed15() => [ buildBackupPolicy(), buildBackupPolicy(), ]; -void checkUnnamed12(core.List o) { +void checkUnnamed15(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkBackupPolicy(o[0]); checkBackupPolicy(o[1]); } -core.List buildUnnamed13() => [ +core.List buildUnnamed16() => [ 'foo', 'foo', ]; -void checkUnnamed13(core.List o) { +void checkUnnamed16(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -794,9 +969,9 @@ api.ListBackupPoliciesResponse buildListBackupPoliciesResponse() { final o = api.ListBackupPoliciesResponse(); buildCounterListBackupPoliciesResponse++; if (buildCounterListBackupPoliciesResponse < 3) { - o.backupPolicies = buildUnnamed12(); + o.backupPolicies = buildUnnamed15(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed13(); + o.unreachable = buildUnnamed16(); } buildCounterListBackupPoliciesResponse--; return o; @@ -805,33 +980,33 @@ api.ListBackupPoliciesResponse buildListBackupPoliciesResponse() { void checkListBackupPoliciesResponse(api.ListBackupPoliciesResponse o) { buildCounterListBackupPoliciesResponse++; if (buildCounterListBackupPoliciesResponse < 3) { - checkUnnamed12(o.backupPolicies!); + checkUnnamed15(o.backupPolicies!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed13(o.unreachable!); + checkUnnamed16(o.unreachable!); } buildCounterListBackupPoliciesResponse--; } -core.List buildUnnamed14() => [ +core.List buildUnnamed17() => [ buildBackupVault(), buildBackupVault(), ]; -void checkUnnamed14(core.List o) { +void checkUnnamed17(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkBackupVault(o[0]); checkBackupVault(o[1]); } -core.List buildUnnamed15() => [ +core.List buildUnnamed18() => [ 'foo', 'foo', ]; -void checkUnnamed15(core.List o) { +void checkUnnamed18(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -848,9 +1023,9 @@ api.ListBackupVaultsResponse buildListBackupVaultsResponse() { final o = api.ListBackupVaultsResponse(); buildCounterListBackupVaultsResponse++; if (buildCounterListBackupVaultsResponse < 3) { - o.backupVaults = buildUnnamed14(); + o.backupVaults = buildUnnamed17(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed15(); + o.unreachable = buildUnnamed18(); } buildCounterListBackupVaultsResponse--; return o; @@ -859,33 +1034,33 @@ api.ListBackupVaultsResponse buildListBackupVaultsResponse() { void checkListBackupVaultsResponse(api.ListBackupVaultsResponse o) { buildCounterListBackupVaultsResponse++; if (buildCounterListBackupVaultsResponse < 3) { - checkUnnamed14(o.backupVaults!); + checkUnnamed17(o.backupVaults!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed15(o.unreachable!); + checkUnnamed18(o.unreachable!); } buildCounterListBackupVaultsResponse--; } -core.List buildUnnamed16() => [ +core.List buildUnnamed19() => [ buildBackup(), buildBackup(), ]; -void checkUnnamed16(core.List o) { +void checkUnnamed19(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkBackup(o[0]); checkBackup(o[1]); } -core.List buildUnnamed17() => [ +core.List buildUnnamed20() => [ 'foo', 'foo', ]; -void checkUnnamed17(core.List o) { +void checkUnnamed20(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -902,9 +1077,9 @@ api.ListBackupsResponse buildListBackupsResponse() { final o = api.ListBackupsResponse(); buildCounterListBackupsResponse++; if (buildCounterListBackupsResponse < 3) { - o.backups = buildUnnamed16(); + o.backups = buildUnnamed19(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed17(); + o.unreachable = buildUnnamed20(); } buildCounterListBackupsResponse--; return o; @@ -913,33 +1088,33 @@ api.ListBackupsResponse buildListBackupsResponse() { void checkListBackupsResponse(api.ListBackupsResponse o) { buildCounterListBackupsResponse++; if (buildCounterListBackupsResponse < 3) { - checkUnnamed16(o.backups!); + checkUnnamed19(o.backups!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed17(o.unreachable!); + checkUnnamed20(o.unreachable!); } buildCounterListBackupsResponse--; } -core.List buildUnnamed18() => [ +core.List buildUnnamed21() => [ buildKmsConfig(), buildKmsConfig(), ]; -void checkUnnamed18(core.List o) { +void checkUnnamed21(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkKmsConfig(o[0]); checkKmsConfig(o[1]); } -core.List buildUnnamed19() => [ +core.List buildUnnamed22() => [ 'foo', 'foo', ]; -void checkUnnamed19(core.List o) { +void checkUnnamed22(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -956,9 +1131,9 @@ api.ListKmsConfigsResponse buildListKmsConfigsResponse() { final o = api.ListKmsConfigsResponse(); buildCounterListKmsConfigsResponse++; if (buildCounterListKmsConfigsResponse < 3) { - o.kmsConfigs = buildUnnamed18(); + o.kmsConfigs = buildUnnamed21(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed19(); + o.unreachable = buildUnnamed22(); } buildCounterListKmsConfigsResponse--; return o; @@ -967,22 +1142,22 @@ api.ListKmsConfigsResponse buildListKmsConfigsResponse() { void checkListKmsConfigsResponse(api.ListKmsConfigsResponse o) { buildCounterListKmsConfigsResponse++; if (buildCounterListKmsConfigsResponse < 3) { - checkUnnamed18(o.kmsConfigs!); + checkUnnamed21(o.kmsConfigs!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed19(o.unreachable!); + checkUnnamed22(o.unreachable!); } buildCounterListKmsConfigsResponse--; } -core.List buildUnnamed20() => [ +core.List buildUnnamed23() => [ buildLocation(), buildLocation(), ]; -void checkUnnamed20(core.List o) { +void checkUnnamed23(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLocation(o[0]); checkLocation(o[1]); @@ -993,7 +1168,7 @@ api.ListLocationsResponse buildListLocationsResponse() { final o = api.ListLocationsResponse(); buildCounterListLocationsResponse++; if (buildCounterListLocationsResponse < 3) { - o.locations = buildUnnamed20(); + o.locations = buildUnnamed23(); o.nextPageToken = 'foo'; } buildCounterListLocationsResponse--; @@ -1003,7 +1178,7 @@ api.ListLocationsResponse buildListLocationsResponse() { void checkListLocationsResponse(api.ListLocationsResponse o) { buildCounterListLocationsResponse++; if (buildCounterListLocationsResponse < 3) { - checkUnnamed20(o.locations!); + checkUnnamed23(o.locations!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -1012,12 +1187,12 @@ void checkListLocationsResponse(api.ListLocationsResponse o) { buildCounterListLocationsResponse--; } -core.List buildUnnamed21() => [ +core.List buildUnnamed24() => [ buildOperation(), buildOperation(), ]; -void checkUnnamed21(core.List o) { +void checkUnnamed24(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperation(o[0]); checkOperation(o[1]); @@ -1029,7 +1204,7 @@ api.ListOperationsResponse buildListOperationsResponse() { buildCounterListOperationsResponse++; if (buildCounterListOperationsResponse < 3) { o.nextPageToken = 'foo'; - o.operations = buildUnnamed21(); + o.operations = buildUnnamed24(); } buildCounterListOperationsResponse--; return o; @@ -1042,28 +1217,28 @@ void checkListOperationsResponse(api.ListOperationsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed21(o.operations!); + checkUnnamed24(o.operations!); } buildCounterListOperationsResponse--; } -core.List buildUnnamed22() => [ +core.List buildUnnamed25() => [ buildReplication(), buildReplication(), ]; -void checkUnnamed22(core.List o) { +void checkUnnamed25(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkReplication(o[0]); checkReplication(o[1]); } -core.List buildUnnamed23() => [ +core.List buildUnnamed26() => [ 'foo', 'foo', ]; -void checkUnnamed23(core.List o) { +void checkUnnamed26(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1081,8 +1256,8 @@ api.ListReplicationsResponse buildListReplicationsResponse() { buildCounterListReplicationsResponse++; if (buildCounterListReplicationsResponse < 3) { o.nextPageToken = 'foo'; - o.replications = buildUnnamed22(); - o.unreachable = buildUnnamed23(); + o.replications = buildUnnamed25(); + o.unreachable = buildUnnamed26(); } buildCounterListReplicationsResponse--; return o; @@ -1095,29 +1270,29 @@ void checkListReplicationsResponse(api.ListReplicationsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed22(o.replications!); - checkUnnamed23(o.unreachable!); + checkUnnamed25(o.replications!); + checkUnnamed26(o.unreachable!); } buildCounterListReplicationsResponse--; } -core.List buildUnnamed24() => [ +core.List buildUnnamed27() => [ buildSnapshot(), buildSnapshot(), ]; -void checkUnnamed24(core.List o) { +void checkUnnamed27(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSnapshot(o[0]); checkSnapshot(o[1]); } -core.List buildUnnamed25() => [ +core.List buildUnnamed28() => [ 'foo', 'foo', ]; -void checkUnnamed25(core.List o) { +void checkUnnamed28(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1135,8 +1310,8 @@ api.ListSnapshotsResponse buildListSnapshotsResponse() { buildCounterListSnapshotsResponse++; if (buildCounterListSnapshotsResponse < 3) { o.nextPageToken = 'foo'; - o.snapshots = buildUnnamed24(); - o.unreachable = buildUnnamed25(); + o.snapshots = buildUnnamed27(); + o.unreachable = buildUnnamed28(); } buildCounterListSnapshotsResponse--; return o; @@ -1149,29 +1324,29 @@ void checkListSnapshotsResponse(api.ListSnapshotsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed24(o.snapshots!); - checkUnnamed25(o.unreachable!); + checkUnnamed27(o.snapshots!); + checkUnnamed28(o.unreachable!); } buildCounterListSnapshotsResponse--; } -core.List buildUnnamed26() => [ +core.List buildUnnamed29() => [ buildStoragePool(), buildStoragePool(), ]; -void checkUnnamed26(core.List o) { +void checkUnnamed29(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStoragePool(o[0]); checkStoragePool(o[1]); } -core.List buildUnnamed27() => [ +core.List buildUnnamed30() => [ 'foo', 'foo', ]; -void checkUnnamed27(core.List o) { +void checkUnnamed30(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1189,8 +1364,8 @@ api.ListStoragePoolsResponse buildListStoragePoolsResponse() { buildCounterListStoragePoolsResponse++; if (buildCounterListStoragePoolsResponse < 3) { o.nextPageToken = 'foo'; - o.storagePools = buildUnnamed26(); - o.unreachable = buildUnnamed27(); + o.storagePools = buildUnnamed29(); + o.unreachable = buildUnnamed30(); } buildCounterListStoragePoolsResponse--; return o; @@ -1203,18 +1378,18 @@ void checkListStoragePoolsResponse(api.ListStoragePoolsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed26(o.storagePools!); - checkUnnamed27(o.unreachable!); + checkUnnamed29(o.storagePools!); + checkUnnamed30(o.unreachable!); } buildCounterListStoragePoolsResponse--; } -core.List buildUnnamed28() => [ +core.List buildUnnamed31() => [ 'foo', 'foo', ]; -void checkUnnamed28(core.List o) { +void checkUnnamed31(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1226,12 +1401,12 @@ void checkUnnamed28(core.List o) { ); } -core.List buildUnnamed29() => [ +core.List buildUnnamed32() => [ buildVolume(), buildVolume(), ]; -void checkUnnamed29(core.List o) { +void checkUnnamed32(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkVolume(o[0]); checkVolume(o[1]); @@ -1243,8 +1418,8 @@ api.ListVolumesResponse buildListVolumesResponse() { buildCounterListVolumesResponse++; if (buildCounterListVolumesResponse < 3) { o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed28(); - o.volumes = buildUnnamed29(); + o.unreachable = buildUnnamed31(); + o.volumes = buildUnnamed32(); } buildCounterListVolumesResponse--; return o; @@ -1257,18 +1432,18 @@ void checkListVolumesResponse(api.ListVolumesResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed28(o.unreachable!); - checkUnnamed29(o.volumes!); + checkUnnamed31(o.unreachable!); + checkUnnamed32(o.volumes!); } buildCounterListVolumesResponse--; } -core.Map buildUnnamed30() => { +core.Map buildUnnamed33() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed30(core.Map o) { +void checkUnnamed33(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1280,7 +1455,7 @@ void checkUnnamed30(core.Map o) { ); } -core.Map buildUnnamed31() => { +core.Map buildUnnamed34() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -1293,7 +1468,7 @@ core.Map buildUnnamed31() => { }, }; -void checkUnnamed31(core.Map o) { +void checkUnnamed34(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted1 = (o['x']!) as core.Map; unittest.expect(casted1, unittest.hasLength(3)); @@ -1331,9 +1506,9 @@ api.Location buildLocation() { buildCounterLocation++; if (buildCounterLocation < 3) { o.displayName = 'foo'; - o.labels = buildUnnamed30(); + o.labels = buildUnnamed33(); o.locationId = 'foo'; - o.metadata = buildUnnamed31(); + o.metadata = buildUnnamed34(); o.name = 'foo'; } buildCounterLocation--; @@ -1347,12 +1522,12 @@ void checkLocation(api.Location o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed30(o.labels!); + checkUnnamed33(o.labels!); unittest.expect( o.locationId!, unittest.equals('foo'), ); - checkUnnamed31(o.metadata!); + checkUnnamed34(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), @@ -1435,7 +1610,7 @@ void checkMountOption(api.MountOption o) { buildCounterMountOption--; } -core.Map buildUnnamed32() => { +core.Map buildUnnamed35() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -1448,7 +1623,7 @@ core.Map buildUnnamed32() => { }, }; -void checkUnnamed32(core.Map o) { +void checkUnnamed35(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted3 = (o['x']!) as core.Map; unittest.expect(casted3, unittest.hasLength(3)); @@ -1480,7 +1655,7 @@ void checkUnnamed32(core.Map o) { ); } -core.Map buildUnnamed33() => { +core.Map buildUnnamed36() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -1493,7 +1668,7 @@ core.Map buildUnnamed33() => { }, }; -void checkUnnamed33(core.Map o) { +void checkUnnamed36(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted5 = (o['x']!) as core.Map; unittest.expect(casted5, unittest.hasLength(3)); @@ -1532,9 +1707,9 @@ api.Operation buildOperation() { if (buildCounterOperation < 3) { o.done = true; o.error = buildStatus(); - o.metadata = buildUnnamed32(); + o.metadata = buildUnnamed35(); o.name = 'foo'; - o.response = buildUnnamed33(); + o.response = buildUnnamed36(); } buildCounterOperation--; return o; @@ -1545,22 +1720,22 @@ void checkOperation(api.Operation o) { if (buildCounterOperation < 3) { unittest.expect(o.done!, unittest.isTrue); checkStatus(o.error!); - checkUnnamed32(o.metadata!); + checkUnnamed35(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed33(o.response!); + checkUnnamed36(o.response!); } buildCounterOperation--; } -core.Map buildUnnamed34() => { +core.Map buildUnnamed37() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed34(core.Map o) { +void checkUnnamed37(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1577,12 +1752,15 @@ api.Replication buildReplication() { final o = api.Replication(); buildCounterReplication++; if (buildCounterReplication < 3) { + o.clusterLocation = 'foo'; o.createTime = 'foo'; o.description = 'foo'; o.destinationVolume = 'foo'; o.destinationVolumeParameters = buildDestinationVolumeParameters(); o.healthy = true; - o.labels = buildUnnamed34(); + o.hybridPeeringDetails = buildHybridPeeringDetails(); + o.hybridReplicationType = 'foo'; + o.labels = buildUnnamed37(); o.mirrorState = 'foo'; o.name = 'foo'; o.replicationSchedule = 'foo'; @@ -1599,6 +1777,10 @@ api.Replication buildReplication() { void checkReplication(api.Replication o) { buildCounterReplication++; if (buildCounterReplication < 3) { + unittest.expect( + o.clusterLocation!, + unittest.equals('foo'), + ); unittest.expect( o.createTime!, unittest.equals('foo'), @@ -1613,7 +1795,12 @@ void checkReplication(api.Replication o) { ); checkDestinationVolumeParameters(o.destinationVolumeParameters!); unittest.expect(o.healthy!, unittest.isTrue); - checkUnnamed34(o.labels!); + checkHybridPeeringDetails(o.hybridPeeringDetails!); + unittest.expect( + o.hybridReplicationType!, + unittest.equals('foo'), + ); + checkUnnamed37(o.labels!); unittest.expect( o.mirrorState!, unittest.equals('foo'), @@ -1776,12 +1963,12 @@ void checkSimpleExportPolicyRule(api.SimpleExportPolicyRule o) { buildCounterSimpleExportPolicyRule--; } -core.Map buildUnnamed35() => { +core.Map buildUnnamed38() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed35(core.Map o) { +void checkUnnamed38(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1800,7 +1987,7 @@ api.Snapshot buildSnapshot() { if (buildCounterSnapshot < 3) { o.createTime = 'foo'; o.description = 'foo'; - o.labels = buildUnnamed35(); + o.labels = buildUnnamed38(); o.name = 'foo'; o.state = 'foo'; o.stateDetails = 'foo'; @@ -1821,7 +2008,7 @@ void checkSnapshot(api.Snapshot o) { o.description!, unittest.equals('foo'), ); - checkUnnamed35(o.labels!); + checkUnnamed38(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -1869,7 +2056,7 @@ void checkSnapshotPolicy(api.SnapshotPolicy o) { buildCounterSnapshotPolicy--; } -core.Map buildUnnamed36() => { +core.Map buildUnnamed39() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -1882,7 +2069,7 @@ core.Map buildUnnamed36() => { }, }; -void checkUnnamed36(core.Map o) { +void checkUnnamed39(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted7 = (o['x']!) as core.Map; unittest.expect(casted7, unittest.hasLength(3)); @@ -1914,15 +2101,15 @@ void checkUnnamed36(core.Map o) { ); } -core.List> buildUnnamed37() => [ - buildUnnamed36(), - buildUnnamed36(), +core.List> buildUnnamed40() => [ + buildUnnamed39(), + buildUnnamed39(), ]; -void checkUnnamed37(core.List> o) { +void checkUnnamed40(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed36(o[0]); - checkUnnamed36(o[1]); + checkUnnamed39(o[0]); + checkUnnamed39(o[1]); } core.int buildCounterStatus = 0; @@ -1931,7 +2118,7 @@ api.Status buildStatus() { buildCounterStatus++; if (buildCounterStatus < 3) { o.code = 42; - o.details = buildUnnamed37(); + o.details = buildUnnamed40(); o.message = 'foo'; } buildCounterStatus--; @@ -1945,7 +2132,7 @@ void checkStatus(api.Status o) { o.code!, unittest.equals(42), ); - checkUnnamed37(o.details!); + checkUnnamed40(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -1973,12 +2160,12 @@ void checkStopReplicationRequest(api.StopReplicationRequest o) { buildCounterStopReplicationRequest--; } -core.Map buildUnnamed38() => { +core.Map buildUnnamed41() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed38(core.Map o) { +void checkUnnamed41(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2003,7 +2190,7 @@ api.StoragePool buildStoragePool() { o.encryptionType = 'foo'; o.globalAccessAllowed = true; o.kmsConfig = 'foo'; - o.labels = buildUnnamed38(); + o.labels = buildUnnamed41(); o.ldapEnabled = true; o.name = 'foo'; o.network = 'foo'; @@ -2049,7 +2236,7 @@ void checkStoragePool(api.StoragePool o) { o.kmsConfig!, unittest.equals('foo'), ); - checkUnnamed38(o.labels!); + checkUnnamed41(o.labels!); unittest.expect(o.ldapEnabled!, unittest.isTrue); unittest.expect( o.name!, @@ -2110,6 +2297,21 @@ void checkSwitchActiveReplicaZoneRequest(api.SwitchActiveReplicaZoneRequest o) { buildCounterSwitchActiveReplicaZoneRequest--; } +core.int buildCounterSyncReplicationRequest = 0; +api.SyncReplicationRequest buildSyncReplicationRequest() { + final o = api.SyncReplicationRequest(); + buildCounterSyncReplicationRequest++; + if (buildCounterSyncReplicationRequest < 3) {} + buildCounterSyncReplicationRequest--; + return o; +} + +void checkSyncReplicationRequest(api.SyncReplicationRequest o) { + buildCounterSyncReplicationRequest++; + if (buildCounterSyncReplicationRequest < 3) {} + buildCounterSyncReplicationRequest--; +} + core.int buildCounterTieringPolicy = 0; api.TieringPolicy buildTieringPolicy() { final o = api.TieringPolicy(); @@ -2194,6 +2396,29 @@ void checkTransferStats(api.TransferStats o) { buildCounterTransferStats--; } +core.int buildCounterValidateDirectoryServiceRequest = 0; +api.ValidateDirectoryServiceRequest buildValidateDirectoryServiceRequest() { + final o = api.ValidateDirectoryServiceRequest(); + buildCounterValidateDirectoryServiceRequest++; + if (buildCounterValidateDirectoryServiceRequest < 3) { + o.directoryServiceType = 'foo'; + } + buildCounterValidateDirectoryServiceRequest--; + return o; +} + +void checkValidateDirectoryServiceRequest( + api.ValidateDirectoryServiceRequest o) { + buildCounterValidateDirectoryServiceRequest++; + if (buildCounterValidateDirectoryServiceRequest < 3) { + unittest.expect( + o.directoryServiceType!, + unittest.equals('foo'), + ); + } + buildCounterValidateDirectoryServiceRequest--; +} + core.int buildCounterVerifyKmsConfigRequest = 0; api.VerifyKmsConfigRequest buildVerifyKmsConfigRequest() { final o = api.VerifyKmsConfigRequest(); @@ -2238,12 +2463,12 @@ void checkVerifyKmsConfigResponse(api.VerifyKmsConfigResponse o) { buildCounterVerifyKmsConfigResponse--; } -core.Map buildUnnamed39() => { +core.Map buildUnnamed42() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed39(core.Map o) { +void checkUnnamed42(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2255,23 +2480,23 @@ void checkUnnamed39(core.Map o) { ); } -core.List buildUnnamed40() => [ +core.List buildUnnamed43() => [ buildMountOption(), buildMountOption(), ]; -void checkUnnamed40(core.List o) { +void checkUnnamed43(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMountOption(o[0]); checkMountOption(o[1]); } -core.List buildUnnamed41() => [ +core.List buildUnnamed44() => [ 'foo', 'foo', ]; -void checkUnnamed41(core.List o) { +void checkUnnamed44(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2283,12 +2508,12 @@ void checkUnnamed41(core.List o) { ); } -core.List buildUnnamed42() => [ +core.List buildUnnamed45() => [ 'foo', 'foo', ]; -void checkUnnamed42(core.List o) { +void checkUnnamed45(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2300,12 +2525,12 @@ void checkUnnamed42(core.List o) { ); } -core.List buildUnnamed43() => [ +core.List buildUnnamed46() => [ 'foo', 'foo', ]; -void checkUnnamed43(core.List o) { +void checkUnnamed46(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2331,24 +2556,25 @@ api.Volume buildVolume() { o.encryptionType = 'foo'; o.exportPolicy = buildExportPolicy(); o.hasReplication = true; + o.hybridReplicationParameters = buildHybridReplicationParameters(); o.kerberosEnabled = true; o.kmsConfig = 'foo'; - o.labels = buildUnnamed39(); + o.labels = buildUnnamed42(); o.largeCapacity = true; o.ldapEnabled = true; - o.mountOptions = buildUnnamed40(); + o.mountOptions = buildUnnamed43(); o.multipleEndpoints = true; o.name = 'foo'; o.network = 'foo'; - o.protocols = buildUnnamed41(); + o.protocols = buildUnnamed44(); o.psaRange = 'foo'; o.replicaZone = 'foo'; o.restoreParameters = buildRestoreParameters(); - o.restrictedActions = buildUnnamed42(); + o.restrictedActions = buildUnnamed45(); o.securityStyle = 'foo'; o.serviceLevel = 'foo'; o.shareName = 'foo'; - o.smbSettings = buildUnnamed43(); + o.smbSettings = buildUnnamed46(); o.snapReserve = 42.0; o.snapshotDirectory = true; o.snapshotPolicy = buildSnapshotPolicy(); @@ -2394,15 +2620,16 @@ void checkVolume(api.Volume o) { ); checkExportPolicy(o.exportPolicy!); unittest.expect(o.hasReplication!, unittest.isTrue); + checkHybridReplicationParameters(o.hybridReplicationParameters!); unittest.expect(o.kerberosEnabled!, unittest.isTrue); unittest.expect( o.kmsConfig!, unittest.equals('foo'), ); - checkUnnamed39(o.labels!); + checkUnnamed42(o.labels!); unittest.expect(o.largeCapacity!, unittest.isTrue); unittest.expect(o.ldapEnabled!, unittest.isTrue); - checkUnnamed40(o.mountOptions!); + checkUnnamed43(o.mountOptions!); unittest.expect(o.multipleEndpoints!, unittest.isTrue); unittest.expect( o.name!, @@ -2412,7 +2639,7 @@ void checkVolume(api.Volume o) { o.network!, unittest.equals('foo'), ); - checkUnnamed41(o.protocols!); + checkUnnamed44(o.protocols!); unittest.expect( o.psaRange!, unittest.equals('foo'), @@ -2422,7 +2649,7 @@ void checkVolume(api.Volume o) { unittest.equals('foo'), ); checkRestoreParameters(o.restoreParameters!); - checkUnnamed42(o.restrictedActions!); + checkUnnamed45(o.restrictedActions!); unittest.expect( o.securityStyle!, unittest.equals('foo'), @@ -2435,7 +2662,7 @@ void checkVolume(api.Volume o) { o.shareName!, unittest.equals('foo'), ); - checkUnnamed43(o.smbSettings!); + checkUnnamed46(o.smbSettings!); unittest.expect( o.snapReserve!, unittest.equals(42.0), @@ -2599,6 +2826,16 @@ void main() { }); }); + unittest.group('obj-schema-EstablishPeeringRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildEstablishPeeringRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.EstablishPeeringRequest.fromJson( + oJson as core.Map); + checkEstablishPeeringRequest(od); + }); + }); + unittest.group('obj-schema-ExportPolicy', () { unittest.test('to-json--from-json', () async { final o = buildExportPolicy(); @@ -2629,6 +2866,26 @@ void main() { }); }); + unittest.group('obj-schema-HybridPeeringDetails', () { + unittest.test('to-json--from-json', () async { + final o = buildHybridPeeringDetails(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.HybridPeeringDetails.fromJson( + oJson as core.Map); + checkHybridPeeringDetails(od); + }); + }); + + unittest.group('obj-schema-HybridReplicationParameters', () { + unittest.test('to-json--from-json', () async { + final o = buildHybridReplicationParameters(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.HybridReplicationParameters.fromJson( + oJson as core.Map); + checkHybridReplicationParameters(od); + }); + }); + unittest.group('obj-schema-KmsConfig', () { unittest.test('to-json--from-json', () async { final o = buildKmsConfig(); @@ -2909,6 +3166,16 @@ void main() { }); }); + unittest.group('obj-schema-SyncReplicationRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildSyncReplicationRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.SyncReplicationRequest.fromJson( + oJson as core.Map); + checkSyncReplicationRequest(od); + }); + }); + unittest.group('obj-schema-TieringPolicy', () { unittest.test('to-json--from-json', () async { final o = buildTieringPolicy(); @@ -2929,6 +3196,16 @@ void main() { }); }); + unittest.group('obj-schema-ValidateDirectoryServiceRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildValidateDirectoryServiceRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ValidateDirectoryServiceRequest.fromJson( + oJson as core.Map); + checkValidateDirectoryServiceRequest(od); + }); + }); + unittest.group('obj-schema-VerifyKmsConfigRequest', () { unittest.test('to-json--from-json', () async { final o = buildVerifyKmsConfigRequest(); @@ -5362,6 +5639,64 @@ void main() { await res.switch_(arg_request, arg_name, $fields: arg_$fields); checkOperation(response as api.Operation); }); + + unittest.test('method--validateDirectoryService', () async { + final mock = HttpServerMock(); + final res = api.NetAppFilesApi(mock).projects.locations.storagePools; + final arg_request = buildValidateDirectoryServiceRequest(); + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.ValidateDirectoryServiceRequest.fromJson( + json as core.Map); + checkValidateDirectoryServiceRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.validateDirectoryService(arg_request, arg_name, + $fields: arg_$fields); + checkOperation(response as api.Operation); + }); }); unittest.group('resource-ProjectsLocationsVolumesResource', () { @@ -5855,6 +6190,65 @@ void main() { checkOperation(response as api.Operation); }); + unittest.test('method--establishPeering', () async { + final mock = HttpServerMock(); + final res = + api.NetAppFilesApi(mock).projects.locations.volumes.replications; + final arg_request = buildEstablishPeeringRequest(); + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.EstablishPeeringRequest.fromJson( + json as core.Map); + checkEstablishPeeringRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.establishPeering(arg_request, arg_name, + $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + unittest.test('method--get', () async { final mock = HttpServerMock(); final res = @@ -6226,6 +6620,65 @@ void main() { await res.stop(arg_request, arg_name, $fields: arg_$fields); checkOperation(response as api.Operation); }); + + unittest.test('method--sync', () async { + final mock = HttpServerMock(); + final res = + api.NetAppFilesApi(mock).projects.locations.volumes.replications; + final arg_request = buildSyncReplicationRequest(); + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.SyncReplicationRequest.fromJson( + json as core.Map); + checkSyncReplicationRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.sync(arg_request, arg_name, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); }); unittest.group('resource-ProjectsLocationsVolumesSnapshotsResource', () { diff --git a/generated/googleapis/test/networkconnectivity/v1_test.dart b/generated/googleapis/test/networkconnectivity/v1_test.dart index 64c4d9e96..e9ec51d39 100644 --- a/generated/googleapis/test/networkconnectivity/v1_test.dart +++ b/generated/googleapis/test/networkconnectivity/v1_test.dart @@ -974,6 +974,35 @@ void checkHub(api.Hub o) { buildCounterHub--; } +core.int buildCounterHubStatusEntry = 0; +api.HubStatusEntry buildHubStatusEntry() { + final o = api.HubStatusEntry(); + buildCounterHubStatusEntry++; + if (buildCounterHubStatusEntry < 3) { + o.count = 42; + o.groupBy = 'foo'; + o.pscPropagationStatus = buildPscPropagationStatus(); + } + buildCounterHubStatusEntry--; + return o; +} + +void checkHubStatusEntry(api.HubStatusEntry o) { + buildCounterHubStatusEntry++; + if (buildCounterHubStatusEntry < 3) { + unittest.expect( + o.count!, + unittest.equals(42), + ); + unittest.expect( + o.groupBy!, + unittest.equals('foo'), + ); + checkPscPropagationStatus(o.pscPropagationStatus!); + } + buildCounterHubStatusEntry--; +} + core.int buildCounterInterconnectAttachment = 0; api.InterconnectAttachment buildInterconnectAttachment() { final o = api.InterconnectAttachment(); @@ -1215,12 +1244,30 @@ void checkUnnamed23(core.List o) { ); } +core.List buildUnnamed24() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed24(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + core.int buildCounterLinkedProducerVpcNetwork = 0; api.LinkedProducerVpcNetwork buildLinkedProducerVpcNetwork() { final o = api.LinkedProducerVpcNetwork(); buildCounterLinkedProducerVpcNetwork++; if (buildCounterLinkedProducerVpcNetwork < 3) { o.excludeExportRanges = buildUnnamed23(); + o.includeExportRanges = buildUnnamed24(); o.network = 'foo'; o.peering = 'foo'; o.producerNetwork = 'foo'; @@ -1234,6 +1281,7 @@ void checkLinkedProducerVpcNetwork(api.LinkedProducerVpcNetwork o) { buildCounterLinkedProducerVpcNetwork++; if (buildCounterLinkedProducerVpcNetwork < 3) { checkUnnamed23(o.excludeExportRanges!); + checkUnnamed24(o.includeExportRanges!); unittest.expect( o.network!, unittest.equals('foo'), @@ -1254,12 +1302,12 @@ void checkLinkedProducerVpcNetwork(api.LinkedProducerVpcNetwork o) { buildCounterLinkedProducerVpcNetwork--; } -core.List buildUnnamed24() => [ +core.List buildUnnamed25() => [ 'foo', 'foo', ]; -void checkUnnamed24(core.List o) { +void checkUnnamed25(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1271,12 +1319,12 @@ void checkUnnamed24(core.List o) { ); } -core.List buildUnnamed25() => [ +core.List buildUnnamed26() => [ buildRouterApplianceInstance(), buildRouterApplianceInstance(), ]; -void checkUnnamed25(core.List o) { +void checkUnnamed26(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRouterApplianceInstance(o[0]); checkRouterApplianceInstance(o[1]); @@ -1287,8 +1335,8 @@ api.LinkedRouterApplianceInstances buildLinkedRouterApplianceInstances() { final o = api.LinkedRouterApplianceInstances(); buildCounterLinkedRouterApplianceInstances++; if (buildCounterLinkedRouterApplianceInstances < 3) { - o.includeImportRanges = buildUnnamed24(); - o.instances = buildUnnamed25(); + o.includeImportRanges = buildUnnamed25(); + o.instances = buildUnnamed26(); o.siteToSiteDataTransfer = true; o.vpcNetwork = 'foo'; } @@ -1299,8 +1347,8 @@ api.LinkedRouterApplianceInstances buildLinkedRouterApplianceInstances() { void checkLinkedRouterApplianceInstances(api.LinkedRouterApplianceInstances o) { buildCounterLinkedRouterApplianceInstances++; if (buildCounterLinkedRouterApplianceInstances < 3) { - checkUnnamed24(o.includeImportRanges!); - checkUnnamed25(o.instances!); + checkUnnamed25(o.includeImportRanges!); + checkUnnamed26(o.instances!); unittest.expect(o.siteToSiteDataTransfer!, unittest.isTrue); unittest.expect( o.vpcNetwork!, @@ -1310,12 +1358,12 @@ void checkLinkedRouterApplianceInstances(api.LinkedRouterApplianceInstances o) { buildCounterLinkedRouterApplianceInstances--; } -core.List buildUnnamed26() => [ +core.List buildUnnamed27() => [ 'foo', 'foo', ]; -void checkUnnamed26(core.List o) { +void checkUnnamed27(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1327,12 +1375,12 @@ void checkUnnamed26(core.List o) { ); } -core.List buildUnnamed27() => [ +core.List buildUnnamed28() => [ 'foo', 'foo', ]; -void checkUnnamed27(core.List o) { +void checkUnnamed28(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1344,12 +1392,12 @@ void checkUnnamed27(core.List o) { ); } -core.List buildUnnamed28() => [ +core.List buildUnnamed29() => [ 'foo', 'foo', ]; -void checkUnnamed28(core.List o) { +void checkUnnamed29(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1366,9 +1414,9 @@ api.LinkedVpcNetwork buildLinkedVpcNetwork() { final o = api.LinkedVpcNetwork(); buildCounterLinkedVpcNetwork++; if (buildCounterLinkedVpcNetwork < 3) { - o.excludeExportRanges = buildUnnamed26(); - o.includeExportRanges = buildUnnamed27(); - o.producerVpcSpokes = buildUnnamed28(); + o.excludeExportRanges = buildUnnamed27(); + o.includeExportRanges = buildUnnamed28(); + o.producerVpcSpokes = buildUnnamed29(); o.uri = 'foo'; } buildCounterLinkedVpcNetwork--; @@ -1378,9 +1426,9 @@ api.LinkedVpcNetwork buildLinkedVpcNetwork() { void checkLinkedVpcNetwork(api.LinkedVpcNetwork o) { buildCounterLinkedVpcNetwork++; if (buildCounterLinkedVpcNetwork < 3) { - checkUnnamed26(o.excludeExportRanges!); - checkUnnamed27(o.includeExportRanges!); - checkUnnamed28(o.producerVpcSpokes!); + checkUnnamed27(o.excludeExportRanges!); + checkUnnamed28(o.includeExportRanges!); + checkUnnamed29(o.producerVpcSpokes!); unittest.expect( o.uri!, unittest.equals('foo'), @@ -1389,12 +1437,12 @@ void checkLinkedVpcNetwork(api.LinkedVpcNetwork o) { buildCounterLinkedVpcNetwork--; } -core.List buildUnnamed29() => [ +core.List buildUnnamed30() => [ 'foo', 'foo', ]; -void checkUnnamed29(core.List o) { +void checkUnnamed30(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1406,12 +1454,12 @@ void checkUnnamed29(core.List o) { ); } -core.List buildUnnamed30() => [ +core.List buildUnnamed31() => [ 'foo', 'foo', ]; -void checkUnnamed30(core.List o) { +void checkUnnamed31(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1428,9 +1476,9 @@ api.LinkedVpnTunnels buildLinkedVpnTunnels() { final o = api.LinkedVpnTunnels(); buildCounterLinkedVpnTunnels++; if (buildCounterLinkedVpnTunnels < 3) { - o.includeImportRanges = buildUnnamed29(); + o.includeImportRanges = buildUnnamed30(); o.siteToSiteDataTransfer = true; - o.uris = buildUnnamed30(); + o.uris = buildUnnamed31(); o.vpcNetwork = 'foo'; } buildCounterLinkedVpnTunnels--; @@ -1440,9 +1488,9 @@ api.LinkedVpnTunnels buildLinkedVpnTunnels() { void checkLinkedVpnTunnels(api.LinkedVpnTunnels o) { buildCounterLinkedVpnTunnels++; if (buildCounterLinkedVpnTunnels < 3) { - checkUnnamed29(o.includeImportRanges!); + checkUnnamed30(o.includeImportRanges!); unittest.expect(o.siteToSiteDataTransfer!, unittest.isTrue); - checkUnnamed30(o.uris!); + checkUnnamed31(o.uris!); unittest.expect( o.vpcNetwork!, unittest.equals('foo'), @@ -1451,23 +1499,23 @@ void checkLinkedVpnTunnels(api.LinkedVpnTunnels o) { buildCounterLinkedVpnTunnels--; } -core.List buildUnnamed31() => [ +core.List buildUnnamed32() => [ buildGroup(), buildGroup(), ]; -void checkUnnamed31(core.List o) { +void checkUnnamed32(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGroup(o[0]); checkGroup(o[1]); } -core.List buildUnnamed32() => [ +core.List buildUnnamed33() => [ 'foo', 'foo', ]; -void checkUnnamed32(core.List o) { +void checkUnnamed33(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1484,9 +1532,9 @@ api.ListGroupsResponse buildListGroupsResponse() { final o = api.ListGroupsResponse(); buildCounterListGroupsResponse++; if (buildCounterListGroupsResponse < 3) { - o.groups = buildUnnamed31(); + o.groups = buildUnnamed32(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed32(); + o.unreachable = buildUnnamed33(); } buildCounterListGroupsResponse--; return o; @@ -1495,33 +1543,33 @@ api.ListGroupsResponse buildListGroupsResponse() { void checkListGroupsResponse(api.ListGroupsResponse o) { buildCounterListGroupsResponse++; if (buildCounterListGroupsResponse < 3) { - checkUnnamed31(o.groups!); + checkUnnamed32(o.groups!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed32(o.unreachable!); + checkUnnamed33(o.unreachable!); } buildCounterListGroupsResponse--; } -core.List buildUnnamed33() => [ +core.List buildUnnamed34() => [ buildSpoke(), buildSpoke(), ]; -void checkUnnamed33(core.List o) { +void checkUnnamed34(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSpoke(o[0]); checkSpoke(o[1]); } -core.List buildUnnamed34() => [ +core.List buildUnnamed35() => [ 'foo', 'foo', ]; -void checkUnnamed34(core.List o) { +void checkUnnamed35(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1539,8 +1587,8 @@ api.ListHubSpokesResponse buildListHubSpokesResponse() { buildCounterListHubSpokesResponse++; if (buildCounterListHubSpokesResponse < 3) { o.nextPageToken = 'foo'; - o.spokes = buildUnnamed33(); - o.unreachable = buildUnnamed34(); + o.spokes = buildUnnamed34(); + o.unreachable = buildUnnamed35(); } buildCounterListHubSpokesResponse--; return o; @@ -1553,29 +1601,29 @@ void checkListHubSpokesResponse(api.ListHubSpokesResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed33(o.spokes!); - checkUnnamed34(o.unreachable!); + checkUnnamed34(o.spokes!); + checkUnnamed35(o.unreachable!); } buildCounterListHubSpokesResponse--; } -core.List buildUnnamed35() => [ +core.List buildUnnamed36() => [ buildHub(), buildHub(), ]; -void checkUnnamed35(core.List o) { +void checkUnnamed36(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHub(o[0]); checkHub(o[1]); } -core.List buildUnnamed36() => [ +core.List buildUnnamed37() => [ 'foo', 'foo', ]; -void checkUnnamed36(core.List o) { +void checkUnnamed37(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1592,9 +1640,9 @@ api.ListHubsResponse buildListHubsResponse() { final o = api.ListHubsResponse(); buildCounterListHubsResponse++; if (buildCounterListHubsResponse < 3) { - o.hubs = buildUnnamed35(); + o.hubs = buildUnnamed36(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed36(); + o.unreachable = buildUnnamed37(); } buildCounterListHubsResponse--; return o; @@ -1603,33 +1651,33 @@ api.ListHubsResponse buildListHubsResponse() { void checkListHubsResponse(api.ListHubsResponse o) { buildCounterListHubsResponse++; if (buildCounterListHubsResponse < 3) { - checkUnnamed35(o.hubs!); + checkUnnamed36(o.hubs!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed36(o.unreachable!); + checkUnnamed37(o.unreachable!); } buildCounterListHubsResponse--; } -core.List buildUnnamed37() => [ +core.List buildUnnamed38() => [ buildInternalRange(), buildInternalRange(), ]; -void checkUnnamed37(core.List o) { +void checkUnnamed38(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkInternalRange(o[0]); checkInternalRange(o[1]); } -core.List buildUnnamed38() => [ +core.List buildUnnamed39() => [ 'foo', 'foo', ]; -void checkUnnamed38(core.List o) { +void checkUnnamed39(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1646,9 +1694,9 @@ api.ListInternalRangesResponse buildListInternalRangesResponse() { final o = api.ListInternalRangesResponse(); buildCounterListInternalRangesResponse++; if (buildCounterListInternalRangesResponse < 3) { - o.internalRanges = buildUnnamed37(); + o.internalRanges = buildUnnamed38(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed38(); + o.unreachable = buildUnnamed39(); } buildCounterListInternalRangesResponse--; return o; @@ -1657,22 +1705,22 @@ api.ListInternalRangesResponse buildListInternalRangesResponse() { void checkListInternalRangesResponse(api.ListInternalRangesResponse o) { buildCounterListInternalRangesResponse++; if (buildCounterListInternalRangesResponse < 3) { - checkUnnamed37(o.internalRanges!); + checkUnnamed38(o.internalRanges!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed38(o.unreachable!); + checkUnnamed39(o.unreachable!); } buildCounterListInternalRangesResponse--; } -core.List buildUnnamed39() => [ +core.List buildUnnamed40() => [ buildLocation(), buildLocation(), ]; -void checkUnnamed39(core.List o) { +void checkUnnamed40(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLocation(o[0]); checkLocation(o[1]); @@ -1683,7 +1731,7 @@ api.ListLocationsResponse buildListLocationsResponse() { final o = api.ListLocationsResponse(); buildCounterListLocationsResponse++; if (buildCounterListLocationsResponse < 3) { - o.locations = buildUnnamed39(); + o.locations = buildUnnamed40(); o.nextPageToken = 'foo'; } buildCounterListLocationsResponse--; @@ -1693,7 +1741,7 @@ api.ListLocationsResponse buildListLocationsResponse() { void checkListLocationsResponse(api.ListLocationsResponse o) { buildCounterListLocationsResponse++; if (buildCounterListLocationsResponse < 3) { - checkUnnamed39(o.locations!); + checkUnnamed40(o.locations!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -1702,23 +1750,23 @@ void checkListLocationsResponse(api.ListLocationsResponse o) { buildCounterListLocationsResponse--; } -core.List buildUnnamed40() => [ +core.List buildUnnamed41() => [ buildPolicyBasedRoute(), buildPolicyBasedRoute(), ]; -void checkUnnamed40(core.List o) { +void checkUnnamed41(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPolicyBasedRoute(o[0]); checkPolicyBasedRoute(o[1]); } -core.List buildUnnamed41() => [ +core.List buildUnnamed42() => [ 'foo', 'foo', ]; -void checkUnnamed41(core.List o) { +void checkUnnamed42(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1736,8 +1784,8 @@ api.ListPolicyBasedRoutesResponse buildListPolicyBasedRoutesResponse() { buildCounterListPolicyBasedRoutesResponse++; if (buildCounterListPolicyBasedRoutesResponse < 3) { o.nextPageToken = 'foo'; - o.policyBasedRoutes = buildUnnamed40(); - o.unreachable = buildUnnamed41(); + o.policyBasedRoutes = buildUnnamed41(); + o.unreachable = buildUnnamed42(); } buildCounterListPolicyBasedRoutesResponse--; return o; @@ -1750,29 +1798,29 @@ void checkListPolicyBasedRoutesResponse(api.ListPolicyBasedRoutesResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed40(o.policyBasedRoutes!); - checkUnnamed41(o.unreachable!); + checkUnnamed41(o.policyBasedRoutes!); + checkUnnamed42(o.unreachable!); } buildCounterListPolicyBasedRoutesResponse--; } -core.List buildUnnamed42() => [ +core.List buildUnnamed43() => [ buildRegionalEndpoint(), buildRegionalEndpoint(), ]; -void checkUnnamed42(core.List o) { +void checkUnnamed43(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRegionalEndpoint(o[0]); checkRegionalEndpoint(o[1]); } -core.List buildUnnamed43() => [ +core.List buildUnnamed44() => [ 'foo', 'foo', ]; -void checkUnnamed43(core.List o) { +void checkUnnamed44(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1790,8 +1838,8 @@ api.ListRegionalEndpointsResponse buildListRegionalEndpointsResponse() { buildCounterListRegionalEndpointsResponse++; if (buildCounterListRegionalEndpointsResponse < 3) { o.nextPageToken = 'foo'; - o.regionalEndpoints = buildUnnamed42(); - o.unreachable = buildUnnamed43(); + o.regionalEndpoints = buildUnnamed43(); + o.unreachable = buildUnnamed44(); } buildCounterListRegionalEndpointsResponse--; return o; @@ -1804,29 +1852,29 @@ void checkListRegionalEndpointsResponse(api.ListRegionalEndpointsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed42(o.regionalEndpoints!); - checkUnnamed43(o.unreachable!); + checkUnnamed43(o.regionalEndpoints!); + checkUnnamed44(o.unreachable!); } buildCounterListRegionalEndpointsResponse--; } -core.List buildUnnamed44() => [ +core.List buildUnnamed45() => [ buildRouteTable(), buildRouteTable(), ]; -void checkUnnamed44(core.List o) { +void checkUnnamed45(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRouteTable(o[0]); checkRouteTable(o[1]); } -core.List buildUnnamed45() => [ +core.List buildUnnamed46() => [ 'foo', 'foo', ]; -void checkUnnamed45(core.List o) { +void checkUnnamed46(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1844,8 +1892,8 @@ api.ListRouteTablesResponse buildListRouteTablesResponse() { buildCounterListRouteTablesResponse++; if (buildCounterListRouteTablesResponse < 3) { o.nextPageToken = 'foo'; - o.routeTables = buildUnnamed44(); - o.unreachable = buildUnnamed45(); + o.routeTables = buildUnnamed45(); + o.unreachable = buildUnnamed46(); } buildCounterListRouteTablesResponse--; return o; @@ -1858,29 +1906,29 @@ void checkListRouteTablesResponse(api.ListRouteTablesResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed44(o.routeTables!); - checkUnnamed45(o.unreachable!); + checkUnnamed45(o.routeTables!); + checkUnnamed46(o.unreachable!); } buildCounterListRouteTablesResponse--; } -core.List buildUnnamed46() => [ +core.List buildUnnamed47() => [ buildRoute(), buildRoute(), ]; -void checkUnnamed46(core.List o) { +void checkUnnamed47(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRoute(o[0]); checkRoute(o[1]); } -core.List buildUnnamed47() => [ +core.List buildUnnamed48() => [ 'foo', 'foo', ]; -void checkUnnamed47(core.List o) { +void checkUnnamed48(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1898,8 +1946,8 @@ api.ListRoutesResponse buildListRoutesResponse() { buildCounterListRoutesResponse++; if (buildCounterListRoutesResponse < 3) { o.nextPageToken = 'foo'; - o.routes = buildUnnamed46(); - o.unreachable = buildUnnamed47(); + o.routes = buildUnnamed47(); + o.unreachable = buildUnnamed48(); } buildCounterListRoutesResponse--; return o; @@ -1912,29 +1960,29 @@ void checkListRoutesResponse(api.ListRoutesResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed46(o.routes!); - checkUnnamed47(o.unreachable!); + checkUnnamed47(o.routes!); + checkUnnamed48(o.unreachable!); } buildCounterListRoutesResponse--; } -core.List buildUnnamed48() => [ +core.List buildUnnamed49() => [ buildServiceClass(), buildServiceClass(), ]; -void checkUnnamed48(core.List o) { +void checkUnnamed49(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkServiceClass(o[0]); checkServiceClass(o[1]); } -core.List buildUnnamed49() => [ +core.List buildUnnamed50() => [ 'foo', 'foo', ]; -void checkUnnamed49(core.List o) { +void checkUnnamed50(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1952,8 +2000,8 @@ api.ListServiceClassesResponse buildListServiceClassesResponse() { buildCounterListServiceClassesResponse++; if (buildCounterListServiceClassesResponse < 3) { o.nextPageToken = 'foo'; - o.serviceClasses = buildUnnamed48(); - o.unreachable = buildUnnamed49(); + o.serviceClasses = buildUnnamed49(); + o.unreachable = buildUnnamed50(); } buildCounterListServiceClassesResponse--; return o; @@ -1966,29 +2014,29 @@ void checkListServiceClassesResponse(api.ListServiceClassesResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed48(o.serviceClasses!); - checkUnnamed49(o.unreachable!); + checkUnnamed49(o.serviceClasses!); + checkUnnamed50(o.unreachable!); } buildCounterListServiceClassesResponse--; } -core.List buildUnnamed50() => [ +core.List buildUnnamed51() => [ buildServiceConnectionMap(), buildServiceConnectionMap(), ]; -void checkUnnamed50(core.List o) { +void checkUnnamed51(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkServiceConnectionMap(o[0]); checkServiceConnectionMap(o[1]); } -core.List buildUnnamed51() => [ +core.List buildUnnamed52() => [ 'foo', 'foo', ]; -void checkUnnamed51(core.List o) { +void checkUnnamed52(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2006,8 +2054,8 @@ api.ListServiceConnectionMapsResponse buildListServiceConnectionMapsResponse() { buildCounterListServiceConnectionMapsResponse++; if (buildCounterListServiceConnectionMapsResponse < 3) { o.nextPageToken = 'foo'; - o.serviceConnectionMaps = buildUnnamed50(); - o.unreachable = buildUnnamed51(); + o.serviceConnectionMaps = buildUnnamed51(); + o.unreachable = buildUnnamed52(); } buildCounterListServiceConnectionMapsResponse--; return o; @@ -2021,29 +2069,29 @@ void checkListServiceConnectionMapsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed50(o.serviceConnectionMaps!); - checkUnnamed51(o.unreachable!); + checkUnnamed51(o.serviceConnectionMaps!); + checkUnnamed52(o.unreachable!); } buildCounterListServiceConnectionMapsResponse--; } -core.List buildUnnamed52() => [ +core.List buildUnnamed53() => [ buildServiceConnectionPolicy(), buildServiceConnectionPolicy(), ]; -void checkUnnamed52(core.List o) { +void checkUnnamed53(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkServiceConnectionPolicy(o[0]); checkServiceConnectionPolicy(o[1]); } -core.List buildUnnamed53() => [ +core.List buildUnnamed54() => [ 'foo', 'foo', ]; -void checkUnnamed53(core.List o) { +void checkUnnamed54(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2062,8 +2110,8 @@ api.ListServiceConnectionPoliciesResponse buildCounterListServiceConnectionPoliciesResponse++; if (buildCounterListServiceConnectionPoliciesResponse < 3) { o.nextPageToken = 'foo'; - o.serviceConnectionPolicies = buildUnnamed52(); - o.unreachable = buildUnnamed53(); + o.serviceConnectionPolicies = buildUnnamed53(); + o.unreachable = buildUnnamed54(); } buildCounterListServiceConnectionPoliciesResponse--; return o; @@ -2077,29 +2125,29 @@ void checkListServiceConnectionPoliciesResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed52(o.serviceConnectionPolicies!); - checkUnnamed53(o.unreachable!); + checkUnnamed53(o.serviceConnectionPolicies!); + checkUnnamed54(o.unreachable!); } buildCounterListServiceConnectionPoliciesResponse--; } -core.List buildUnnamed54() => [ +core.List buildUnnamed55() => [ buildServiceConnectionToken(), buildServiceConnectionToken(), ]; -void checkUnnamed54(core.List o) { +void checkUnnamed55(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkServiceConnectionToken(o[0]); checkServiceConnectionToken(o[1]); } -core.List buildUnnamed55() => [ +core.List buildUnnamed56() => [ 'foo', 'foo', ]; -void checkUnnamed55(core.List o) { +void checkUnnamed56(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2118,8 +2166,8 @@ api.ListServiceConnectionTokensResponse buildCounterListServiceConnectionTokensResponse++; if (buildCounterListServiceConnectionTokensResponse < 3) { o.nextPageToken = 'foo'; - o.serviceConnectionTokens = buildUnnamed54(); - o.unreachable = buildUnnamed55(); + o.serviceConnectionTokens = buildUnnamed55(); + o.unreachable = buildUnnamed56(); } buildCounterListServiceConnectionTokensResponse--; return o; @@ -2133,29 +2181,29 @@ void checkListServiceConnectionTokensResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed54(o.serviceConnectionTokens!); - checkUnnamed55(o.unreachable!); + checkUnnamed55(o.serviceConnectionTokens!); + checkUnnamed56(o.unreachable!); } buildCounterListServiceConnectionTokensResponse--; } -core.List buildUnnamed56() => [ +core.List buildUnnamed57() => [ buildSpoke(), buildSpoke(), ]; -void checkUnnamed56(core.List o) { +void checkUnnamed57(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSpoke(o[0]); checkSpoke(o[1]); } -core.List buildUnnamed57() => [ +core.List buildUnnamed58() => [ 'foo', 'foo', ]; -void checkUnnamed57(core.List o) { +void checkUnnamed58(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2173,8 +2221,8 @@ api.ListSpokesResponse buildListSpokesResponse() { buildCounterListSpokesResponse++; if (buildCounterListSpokesResponse < 3) { o.nextPageToken = 'foo'; - o.spokes = buildUnnamed56(); - o.unreachable = buildUnnamed57(); + o.spokes = buildUnnamed57(); + o.unreachable = buildUnnamed58(); } buildCounterListSpokesResponse--; return o; @@ -2187,18 +2235,18 @@ void checkListSpokesResponse(api.ListSpokesResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed56(o.spokes!); - checkUnnamed57(o.unreachable!); + checkUnnamed57(o.spokes!); + checkUnnamed58(o.unreachable!); } buildCounterListSpokesResponse--; } -core.Map buildUnnamed58() => { +core.Map buildUnnamed59() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed58(core.Map o) { +void checkUnnamed59(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2210,7 +2258,7 @@ void checkUnnamed58(core.Map o) { ); } -core.Map buildUnnamed59() => { +core.Map buildUnnamed60() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -2223,7 +2271,7 @@ core.Map buildUnnamed59() => { }, }; -void checkUnnamed59(core.Map o) { +void checkUnnamed60(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted7 = (o['x']!) as core.Map; unittest.expect(casted7, unittest.hasLength(3)); @@ -2261,9 +2309,9 @@ api.Location buildLocation() { buildCounterLocation++; if (buildCounterLocation < 3) { o.displayName = 'foo'; - o.labels = buildUnnamed58(); + o.labels = buildUnnamed59(); o.locationId = 'foo'; - o.metadata = buildUnnamed59(); + o.metadata = buildUnnamed60(); o.name = 'foo'; } buildCounterLocation--; @@ -2277,12 +2325,12 @@ void checkLocation(api.Location o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed58(o.labels!); + checkUnnamed59(o.labels!); unittest.expect( o.locationId!, unittest.equals('foo'), ); - checkUnnamed59(o.metadata!); + checkUnnamed60(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), @@ -2427,23 +2475,23 @@ void checkNextHopVpcNetwork(api.NextHopVpcNetwork o) { buildCounterNextHopVpcNetwork--; } -core.List buildUnnamed60() => [ +core.List buildUnnamed61() => [ buildAuditConfig(), buildAuditConfig(), ]; -void checkUnnamed60(core.List o) { +void checkUnnamed61(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAuditConfig(o[0]); checkAuditConfig(o[1]); } -core.List buildUnnamed61() => [ +core.List buildUnnamed62() => [ buildBinding(), buildBinding(), ]; -void checkUnnamed61(core.List o) { +void checkUnnamed62(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkBinding(o[0]); checkBinding(o[1]); @@ -2454,8 +2502,8 @@ api.Policy buildPolicy() { final o = api.Policy(); buildCounterPolicy++; if (buildCounterPolicy < 3) { - o.auditConfigs = buildUnnamed60(); - o.bindings = buildUnnamed61(); + o.auditConfigs = buildUnnamed61(); + o.bindings = buildUnnamed62(); o.etag = 'foo'; o.version = 42; } @@ -2466,8 +2514,8 @@ api.Policy buildPolicy() { void checkPolicy(api.Policy o) { buildCounterPolicy++; if (buildCounterPolicy < 3) { - checkUnnamed60(o.auditConfigs!); - checkUnnamed61(o.bindings!); + checkUnnamed61(o.auditConfigs!); + checkUnnamed62(o.bindings!); unittest.expect( o.etag!, unittest.equals('foo'), @@ -2480,12 +2528,12 @@ void checkPolicy(api.Policy o) { buildCounterPolicy--; } -core.Map buildUnnamed62() => { +core.Map buildUnnamed63() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed62(core.Map o) { +void checkUnnamed63(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2497,12 +2545,12 @@ void checkUnnamed62(core.Map o) { ); } -core.List buildUnnamed63() => [ +core.List buildUnnamed64() => [ buildWarnings(), buildWarnings(), ]; -void checkUnnamed63(core.List o) { +void checkUnnamed64(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkWarnings(o[0]); checkWarnings(o[1]); @@ -2518,7 +2566,7 @@ api.PolicyBasedRoute buildPolicyBasedRoute() { o.filter = buildFilter(); o.interconnectAttachment = buildInterconnectAttachment(); o.kind = 'foo'; - o.labels = buildUnnamed62(); + o.labels = buildUnnamed63(); o.name = 'foo'; o.network = 'foo'; o.nextHopIlbIp = 'foo'; @@ -2527,7 +2575,7 @@ api.PolicyBasedRoute buildPolicyBasedRoute() { o.selfLink = 'foo'; o.updateTime = 'foo'; o.virtualMachine = buildVirtualMachine(); - o.warnings = buildUnnamed63(); + o.warnings = buildUnnamed64(); } buildCounterPolicyBasedRoute--; return o; @@ -2550,7 +2598,7 @@ void checkPolicyBasedRoute(api.PolicyBasedRoute o) { o.kind!, unittest.equals('foo'), ); - checkUnnamed62(o.labels!); + checkUnnamed63(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -2580,7 +2628,7 @@ void checkPolicyBasedRoute(api.PolicyBasedRoute o) { unittest.equals('foo'), ); checkVirtualMachine(o.virtualMachine!); - checkUnnamed63(o.warnings!); + checkUnnamed64(o.warnings!); } buildCounterPolicyBasedRoute--; } @@ -2607,12 +2655,12 @@ void checkProducerPscConfig(api.ProducerPscConfig o) { buildCounterProducerPscConfig--; } -core.List buildUnnamed64() => [ +core.List buildUnnamed65() => [ 'foo', 'foo', ]; -void checkUnnamed64(core.List o) { +void checkUnnamed65(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2624,12 +2672,12 @@ void checkUnnamed64(core.List o) { ); } -core.List buildUnnamed65() => [ +core.List buildUnnamed66() => [ 'foo', 'foo', ]; -void checkUnnamed65(core.List o) { +void checkUnnamed66(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2646,10 +2694,10 @@ api.PscConfig buildPscConfig() { final o = api.PscConfig(); buildCounterPscConfig++; if (buildCounterPscConfig < 3) { - o.allowedGoogleProducersResourceHierarchyLevel = buildUnnamed64(); + o.allowedGoogleProducersResourceHierarchyLevel = buildUnnamed65(); o.limit = 'foo'; o.producerInstanceLocation = 'foo'; - o.subnetworks = buildUnnamed65(); + o.subnetworks = buildUnnamed66(); } buildCounterPscConfig--; return o; @@ -2658,7 +2706,7 @@ api.PscConfig buildPscConfig() { void checkPscConfig(api.PscConfig o) { buildCounterPscConfig++; if (buildCounterPscConfig < 3) { - checkUnnamed64(o.allowedGoogleProducersResourceHierarchyLevel!); + checkUnnamed65(o.allowedGoogleProducersResourceHierarchyLevel!); unittest.expect( o.limit!, unittest.equals('foo'), @@ -2667,17 +2715,17 @@ void checkPscConfig(api.PscConfig o) { o.producerInstanceLocation!, unittest.equals('foo'), ); - checkUnnamed65(o.subnetworks!); + checkUnnamed66(o.subnetworks!); } buildCounterPscConfig--; } -core.Map buildUnnamed66() => { +core.Map buildUnnamed67() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed66(core.Map o) { +void checkUnnamed67(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2703,7 +2751,7 @@ api.PscConnection buildPscConnection() { o.gceOperation = 'foo'; o.ipVersion = 'foo'; o.producerInstanceId = 'foo'; - o.producerInstanceMetadata = buildUnnamed66(); + o.producerInstanceMetadata = buildUnnamed67(); o.pscConnectionId = 'foo'; o.selectedSubnetwork = 'foo'; o.serviceClass = 'foo'; @@ -2746,7 +2794,7 @@ void checkPscConnection(api.PscConnection o) { o.producerInstanceId!, unittest.equals('foo'), ); - checkUnnamed66(o.producerInstanceMetadata!); + checkUnnamed67(o.producerInstanceMetadata!); unittest.expect( o.pscConnectionId!, unittest.equals('foo'), @@ -2767,12 +2815,99 @@ void checkPscConnection(api.PscConnection o) { buildCounterPscConnection--; } -core.Map buildUnnamed67() => { +core.int buildCounterPscPropagationStatus = 0; +api.PscPropagationStatus buildPscPropagationStatus() { + final o = api.PscPropagationStatus(); + buildCounterPscPropagationStatus++; + if (buildCounterPscPropagationStatus < 3) { + o.code = 'foo'; + o.message = 'foo'; + o.sourceForwardingRule = 'foo'; + o.sourceGroup = 'foo'; + o.sourceSpoke = 'foo'; + o.targetGroup = 'foo'; + o.targetSpoke = 'foo'; + } + buildCounterPscPropagationStatus--; + return o; +} + +void checkPscPropagationStatus(api.PscPropagationStatus o) { + buildCounterPscPropagationStatus++; + if (buildCounterPscPropagationStatus < 3) { + unittest.expect( + o.code!, + unittest.equals('foo'), + ); + unittest.expect( + o.message!, + unittest.equals('foo'), + ); + unittest.expect( + o.sourceForwardingRule!, + unittest.equals('foo'), + ); + unittest.expect( + o.sourceGroup!, + unittest.equals('foo'), + ); + unittest.expect( + o.sourceSpoke!, + unittest.equals('foo'), + ); + unittest.expect( + o.targetGroup!, + unittest.equals('foo'), + ); + unittest.expect( + o.targetSpoke!, + unittest.equals('foo'), + ); + } + buildCounterPscPropagationStatus--; +} + +core.List buildUnnamed68() => [ + buildHubStatusEntry(), + buildHubStatusEntry(), + ]; + +void checkUnnamed68(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkHubStatusEntry(o[0]); + checkHubStatusEntry(o[1]); +} + +core.int buildCounterQueryHubStatusResponse = 0; +api.QueryHubStatusResponse buildQueryHubStatusResponse() { + final o = api.QueryHubStatusResponse(); + buildCounterQueryHubStatusResponse++; + if (buildCounterQueryHubStatusResponse < 3) { + o.hubStatusEntries = buildUnnamed68(); + o.nextPageToken = 'foo'; + } + buildCounterQueryHubStatusResponse--; + return o; +} + +void checkQueryHubStatusResponse(api.QueryHubStatusResponse o) { + buildCounterQueryHubStatusResponse++; + if (buildCounterQueryHubStatusResponse < 3) { + checkUnnamed68(o.hubStatusEntries!); + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + } + buildCounterQueryHubStatusResponse--; +} + +core.Map buildUnnamed69() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed67(core.Map o) { +void checkUnnamed69(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2794,7 +2929,7 @@ api.RegionalEndpoint buildRegionalEndpoint() { o.createTime = 'foo'; o.description = 'foo'; o.ipAddress = 'foo'; - o.labels = buildUnnamed67(); + o.labels = buildUnnamed69(); o.name = 'foo'; o.network = 'foo'; o.pscForwardingRule = 'foo'; @@ -2829,7 +2964,7 @@ void checkRegionalEndpoint(api.RegionalEndpoint o) { o.ipAddress!, unittest.equals('foo'), ); - checkUnnamed67(o.labels!); + checkUnnamed69(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -2890,12 +3025,12 @@ void checkRejectHubSpokeRequest(api.RejectHubSpokeRequest o) { buildCounterRejectHubSpokeRequest--; } -core.Map buildUnnamed68() => { +core.Map buildUnnamed70() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed68(core.Map o) { +void checkUnnamed70(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2915,7 +3050,7 @@ api.Route buildRoute() { o.createTime = 'foo'; o.description = 'foo'; o.ipCidrRange = 'foo'; - o.labels = buildUnnamed68(); + o.labels = buildUnnamed70(); o.location = 'foo'; o.name = 'foo'; o.nextHopInterconnectAttachment = buildNextHopInterconnectAttachment(); @@ -2948,7 +3083,7 @@ void checkRoute(api.Route o) { o.ipCidrRange!, unittest.equals('foo'), ); - checkUnnamed68(o.labels!); + checkUnnamed70(o.labels!); unittest.expect( o.location!, unittest.equals('foo'), @@ -2989,12 +3124,12 @@ void checkRoute(api.Route o) { buildCounterRoute--; } -core.Map buildUnnamed69() => { +core.Map buildUnnamed71() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed69(core.Map o) { +void checkUnnamed71(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3013,7 +3148,7 @@ api.RouteTable buildRouteTable() { if (buildCounterRouteTable < 3) { o.createTime = 'foo'; o.description = 'foo'; - o.labels = buildUnnamed69(); + o.labels = buildUnnamed71(); o.name = 'foo'; o.state = 'foo'; o.uid = 'foo'; @@ -3034,7 +3169,7 @@ void checkRouteTable(api.RouteTable o) { o.description!, unittest.equals('foo'), ); - checkUnnamed69(o.labels!); + checkUnnamed71(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -3107,12 +3242,12 @@ void checkRoutingVPC(api.RoutingVPC o) { buildCounterRoutingVPC--; } -core.Map buildUnnamed70() => { +core.Map buildUnnamed72() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed70(core.Map o) { +void checkUnnamed72(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3132,7 +3267,7 @@ api.ServiceClass buildServiceClass() { o.createTime = 'foo'; o.description = 'foo'; o.etag = 'foo'; - o.labels = buildUnnamed70(); + o.labels = buildUnnamed72(); o.name = 'foo'; o.serviceClass = 'foo'; o.updateTime = 'foo'; @@ -3156,7 +3291,7 @@ void checkServiceClass(api.ServiceClass o) { o.etag!, unittest.equals('foo'), ); - checkUnnamed70(o.labels!); + checkUnnamed72(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -3173,34 +3308,34 @@ void checkServiceClass(api.ServiceClass o) { buildCounterServiceClass--; } -core.List buildUnnamed71() => [ +core.List buildUnnamed73() => [ buildConsumerPscConfig(), buildConsumerPscConfig(), ]; -void checkUnnamed71(core.List o) { +void checkUnnamed73(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkConsumerPscConfig(o[0]); checkConsumerPscConfig(o[1]); } -core.List buildUnnamed72() => [ +core.List buildUnnamed74() => [ buildConsumerPscConnection(), buildConsumerPscConnection(), ]; -void checkUnnamed72(core.List o) { +void checkUnnamed74(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkConsumerPscConnection(o[0]); checkConsumerPscConnection(o[1]); } -core.Map buildUnnamed73() => { +core.Map buildUnnamed75() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed73(core.Map o) { +void checkUnnamed75(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3212,12 +3347,12 @@ void checkUnnamed73(core.Map o) { ); } -core.List buildUnnamed74() => [ +core.List buildUnnamed76() => [ buildProducerPscConfig(), buildProducerPscConfig(), ]; -void checkUnnamed74(core.List o) { +void checkUnnamed76(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkProducerPscConfig(o[0]); checkProducerPscConfig(o[1]); @@ -3228,15 +3363,15 @@ api.ServiceConnectionMap buildServiceConnectionMap() { final o = api.ServiceConnectionMap(); buildCounterServiceConnectionMap++; if (buildCounterServiceConnectionMap < 3) { - o.consumerPscConfigs = buildUnnamed71(); - o.consumerPscConnections = buildUnnamed72(); + o.consumerPscConfigs = buildUnnamed73(); + o.consumerPscConnections = buildUnnamed74(); o.createTime = 'foo'; o.description = 'foo'; o.etag = 'foo'; o.infrastructure = 'foo'; - o.labels = buildUnnamed73(); + o.labels = buildUnnamed75(); o.name = 'foo'; - o.producerPscConfigs = buildUnnamed74(); + o.producerPscConfigs = buildUnnamed76(); o.serviceClass = 'foo'; o.serviceClassUri = 'foo'; o.token = 'foo'; @@ -3249,8 +3384,8 @@ api.ServiceConnectionMap buildServiceConnectionMap() { void checkServiceConnectionMap(api.ServiceConnectionMap o) { buildCounterServiceConnectionMap++; if (buildCounterServiceConnectionMap < 3) { - checkUnnamed71(o.consumerPscConfigs!); - checkUnnamed72(o.consumerPscConnections!); + checkUnnamed73(o.consumerPscConfigs!); + checkUnnamed74(o.consumerPscConnections!); unittest.expect( o.createTime!, unittest.equals('foo'), @@ -3267,12 +3402,12 @@ void checkServiceConnectionMap(api.ServiceConnectionMap o) { o.infrastructure!, unittest.equals('foo'), ); - checkUnnamed73(o.labels!); + checkUnnamed75(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed74(o.producerPscConfigs!); + checkUnnamed76(o.producerPscConfigs!); unittest.expect( o.serviceClass!, unittest.equals('foo'), @@ -3293,12 +3428,12 @@ void checkServiceConnectionMap(api.ServiceConnectionMap o) { buildCounterServiceConnectionMap--; } -core.Map buildUnnamed75() => { +core.Map buildUnnamed77() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed75(core.Map o) { +void checkUnnamed77(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3310,12 +3445,12 @@ void checkUnnamed75(core.Map o) { ); } -core.List buildUnnamed76() => [ +core.List buildUnnamed78() => [ buildPscConnection(), buildPscConnection(), ]; -void checkUnnamed76(core.List o) { +void checkUnnamed78(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPscConnection(o[0]); checkPscConnection(o[1]); @@ -3330,11 +3465,11 @@ api.ServiceConnectionPolicy buildServiceConnectionPolicy() { o.description = 'foo'; o.etag = 'foo'; o.infrastructure = 'foo'; - o.labels = buildUnnamed75(); + o.labels = buildUnnamed77(); o.name = 'foo'; o.network = 'foo'; o.pscConfig = buildPscConfig(); - o.pscConnections = buildUnnamed76(); + o.pscConnections = buildUnnamed78(); o.serviceClass = 'foo'; o.updateTime = 'foo'; } @@ -3361,7 +3496,7 @@ void checkServiceConnectionPolicy(api.ServiceConnectionPolicy o) { o.infrastructure!, unittest.equals('foo'), ); - checkUnnamed75(o.labels!); + checkUnnamed77(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -3371,7 +3506,7 @@ void checkServiceConnectionPolicy(api.ServiceConnectionPolicy o) { unittest.equals('foo'), ); checkPscConfig(o.pscConfig!); - checkUnnamed76(o.pscConnections!); + checkUnnamed78(o.pscConnections!); unittest.expect( o.serviceClass!, unittest.equals('foo'), @@ -3384,12 +3519,12 @@ void checkServiceConnectionPolicy(api.ServiceConnectionPolicy o) { buildCounterServiceConnectionPolicy--; } -core.Map buildUnnamed77() => { +core.Map buildUnnamed79() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed77(core.Map o) { +void checkUnnamed79(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3410,7 +3545,7 @@ api.ServiceConnectionToken buildServiceConnectionToken() { o.description = 'foo'; o.etag = 'foo'; o.expireTime = 'foo'; - o.labels = buildUnnamed77(); + o.labels = buildUnnamed79(); o.name = 'foo'; o.network = 'foo'; o.token = 'foo'; @@ -3439,7 +3574,7 @@ void checkServiceConnectionToken(api.ServiceConnectionToken o) { o.expireTime!, unittest.equals('foo'), ); - checkUnnamed77(o.labels!); + checkUnnamed79(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -3484,12 +3619,12 @@ void checkSetIamPolicyRequest(api.SetIamPolicyRequest o) { buildCounterSetIamPolicyRequest--; } -core.Map buildUnnamed78() => { +core.Map buildUnnamed80() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed78(core.Map o) { +void checkUnnamed80(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3501,12 +3636,12 @@ void checkUnnamed78(core.Map o) { ); } -core.List buildUnnamed79() => [ +core.List buildUnnamed81() => [ buildStateReason(), buildStateReason(), ]; -void checkUnnamed79(core.List o) { +void checkUnnamed81(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStateReason(o[0]); checkStateReason(o[1]); @@ -3521,14 +3656,14 @@ api.Spoke buildSpoke() { o.description = 'foo'; o.group = 'foo'; o.hub = 'foo'; - o.labels = buildUnnamed78(); + o.labels = buildUnnamed80(); o.linkedInterconnectAttachments = buildLinkedInterconnectAttachments(); o.linkedProducerVpcNetwork = buildLinkedProducerVpcNetwork(); o.linkedRouterApplianceInstances = buildLinkedRouterApplianceInstances(); o.linkedVpcNetwork = buildLinkedVpcNetwork(); o.linkedVpnTunnels = buildLinkedVpnTunnels(); o.name = 'foo'; - o.reasons = buildUnnamed79(); + o.reasons = buildUnnamed81(); o.spokeType = 'foo'; o.state = 'foo'; o.uniqueId = 'foo'; @@ -3557,7 +3692,7 @@ void checkSpoke(api.Spoke o) { o.hub!, unittest.equals('foo'), ); - checkUnnamed78(o.labels!); + checkUnnamed80(o.labels!); checkLinkedInterconnectAttachments(o.linkedInterconnectAttachments!); checkLinkedProducerVpcNetwork(o.linkedProducerVpcNetwork!); checkLinkedRouterApplianceInstances(o.linkedRouterApplianceInstances!); @@ -3567,7 +3702,7 @@ void checkSpoke(api.Spoke o) { o.name!, unittest.equals('foo'), ); - checkUnnamed79(o.reasons!); + checkUnnamed81(o.reasons!); unittest.expect( o.spokeType!, unittest.equals('foo'), @@ -3642,34 +3777,34 @@ void checkSpokeStateReasonCount(api.SpokeStateReasonCount o) { buildCounterSpokeStateReasonCount--; } -core.List buildUnnamed80() => [ +core.List buildUnnamed82() => [ buildSpokeStateCount(), buildSpokeStateCount(), ]; -void checkUnnamed80(core.List o) { +void checkUnnamed82(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSpokeStateCount(o[0]); checkSpokeStateCount(o[1]); } -core.List buildUnnamed81() => [ +core.List buildUnnamed83() => [ buildSpokeStateReasonCount(), buildSpokeStateReasonCount(), ]; -void checkUnnamed81(core.List o) { +void checkUnnamed83(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSpokeStateReasonCount(o[0]); checkSpokeStateReasonCount(o[1]); } -core.List buildUnnamed82() => [ +core.List buildUnnamed84() => [ buildSpokeTypeCount(), buildSpokeTypeCount(), ]; -void checkUnnamed82(core.List o) { +void checkUnnamed84(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSpokeTypeCount(o[0]); checkSpokeTypeCount(o[1]); @@ -3680,9 +3815,9 @@ api.SpokeSummary buildSpokeSummary() { final o = api.SpokeSummary(); buildCounterSpokeSummary++; if (buildCounterSpokeSummary < 3) { - o.spokeStateCounts = buildUnnamed80(); - o.spokeStateReasonCounts = buildUnnamed81(); - o.spokeTypeCounts = buildUnnamed82(); + o.spokeStateCounts = buildUnnamed82(); + o.spokeStateReasonCounts = buildUnnamed83(); + o.spokeTypeCounts = buildUnnamed84(); } buildCounterSpokeSummary--; return o; @@ -3691,9 +3826,9 @@ api.SpokeSummary buildSpokeSummary() { void checkSpokeSummary(api.SpokeSummary o) { buildCounterSpokeSummary++; if (buildCounterSpokeSummary < 3) { - checkUnnamed80(o.spokeStateCounts!); - checkUnnamed81(o.spokeStateReasonCounts!); - checkUnnamed82(o.spokeTypeCounts!); + checkUnnamed82(o.spokeStateCounts!); + checkUnnamed83(o.spokeStateReasonCounts!); + checkUnnamed84(o.spokeTypeCounts!); } buildCounterSpokeSummary--; } @@ -3757,12 +3892,12 @@ void checkStateReason(api.StateReason o) { buildCounterStateReason--; } -core.List buildUnnamed83() => [ +core.List buildUnnamed85() => [ 'foo', 'foo', ]; -void checkUnnamed83(core.List o) { +void checkUnnamed85(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3779,7 +3914,7 @@ api.TestIamPermissionsRequest buildTestIamPermissionsRequest() { final o = api.TestIamPermissionsRequest(); buildCounterTestIamPermissionsRequest++; if (buildCounterTestIamPermissionsRequest < 3) { - o.permissions = buildUnnamed83(); + o.permissions = buildUnnamed85(); } buildCounterTestIamPermissionsRequest--; return o; @@ -3788,17 +3923,17 @@ api.TestIamPermissionsRequest buildTestIamPermissionsRequest() { void checkTestIamPermissionsRequest(api.TestIamPermissionsRequest o) { buildCounterTestIamPermissionsRequest++; if (buildCounterTestIamPermissionsRequest < 3) { - checkUnnamed83(o.permissions!); + checkUnnamed85(o.permissions!); } buildCounterTestIamPermissionsRequest--; } -core.List buildUnnamed84() => [ +core.List buildUnnamed86() => [ 'foo', 'foo', ]; -void checkUnnamed84(core.List o) { +void checkUnnamed86(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3815,7 +3950,7 @@ api.TestIamPermissionsResponse buildTestIamPermissionsResponse() { final o = api.TestIamPermissionsResponse(); buildCounterTestIamPermissionsResponse++; if (buildCounterTestIamPermissionsResponse < 3) { - o.permissions = buildUnnamed84(); + o.permissions = buildUnnamed86(); } buildCounterTestIamPermissionsResponse--; return o; @@ -3824,17 +3959,17 @@ api.TestIamPermissionsResponse buildTestIamPermissionsResponse() { void checkTestIamPermissionsResponse(api.TestIamPermissionsResponse o) { buildCounterTestIamPermissionsResponse++; if (buildCounterTestIamPermissionsResponse < 3) { - checkUnnamed84(o.permissions!); + checkUnnamed86(o.permissions!); } buildCounterTestIamPermissionsResponse--; } -core.List buildUnnamed85() => [ +core.List buildUnnamed87() => [ 'foo', 'foo', ]; -void checkUnnamed85(core.List o) { +void checkUnnamed87(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3851,7 +3986,7 @@ api.VirtualMachine buildVirtualMachine() { final o = api.VirtualMachine(); buildCounterVirtualMachine++; if (buildCounterVirtualMachine < 3) { - o.tags = buildUnnamed85(); + o.tags = buildUnnamed87(); } buildCounterVirtualMachine--; return o; @@ -3860,17 +3995,17 @@ api.VirtualMachine buildVirtualMachine() { void checkVirtualMachine(api.VirtualMachine o) { buildCounterVirtualMachine++; if (buildCounterVirtualMachine < 3) { - checkUnnamed85(o.tags!); + checkUnnamed87(o.tags!); } buildCounterVirtualMachine--; } -core.Map buildUnnamed86() => { +core.Map buildUnnamed88() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed86(core.Map o) { +void checkUnnamed88(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3888,7 +4023,7 @@ api.Warnings buildWarnings() { buildCounterWarnings++; if (buildCounterWarnings < 3) { o.code = 'foo'; - o.data = buildUnnamed86(); + o.data = buildUnnamed88(); o.warningMessage = 'foo'; } buildCounterWarnings--; @@ -3902,7 +4037,7 @@ void checkWarnings(api.Warnings o) { o.code!, unittest.equals('foo'), ); - checkUnnamed86(o.data!); + checkUnnamed88(o.data!); unittest.expect( o.warningMessage!, unittest.equals('foo'), @@ -3911,12 +4046,12 @@ void checkWarnings(api.Warnings o) { buildCounterWarnings--; } -core.List buildUnnamed87() => [ +core.List buildUnnamed89() => [ 'foo', 'foo', ]; -void checkUnnamed87(core.List o) { +void checkUnnamed89(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4098,6 +4233,16 @@ void main() { }); }); + unittest.group('obj-schema-HubStatusEntry', () { + unittest.test('to-json--from-json', () async { + final o = buildHubStatusEntry(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.HubStatusEntry.fromJson( + oJson as core.Map); + checkHubStatusEntry(od); + }); + }); + unittest.group('obj-schema-InterconnectAttachment', () { unittest.test('to-json--from-json', () async { final o = buildInterconnectAttachment(); @@ -4418,6 +4563,26 @@ void main() { }); }); + unittest.group('obj-schema-PscPropagationStatus', () { + unittest.test('to-json--from-json', () async { + final o = buildPscPropagationStatus(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.PscPropagationStatus.fromJson( + oJson as core.Map); + checkPscPropagationStatus(od); + }); + }); + + unittest.group('obj-schema-QueryHubStatusResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildQueryHubStatusResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.QueryHubStatusResponse.fromJson( + oJson as core.Map); + checkQueryHubStatusResponse(od); + }); + }); + unittest.group('obj-schema-RegionalEndpoint', () { unittest.test('to-json--from-json', () async { final o = buildRegionalEndpoint(); @@ -5144,7 +5309,7 @@ void main() { final arg_orderBy = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; - final arg_spokeLocations = buildUnnamed87(); + final arg_spokeLocations = buildUnnamed89(); final arg_view = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -5297,6 +5462,90 @@ void main() { response as api.GoogleLongrunningOperation); }); + unittest.test('method--queryStatus', () async { + final mock = HttpServerMock(); + final res = + api.NetworkconnectivityApi(mock).projects.locations.global.hubs; + final arg_name = 'foo'; + final arg_filter = 'foo'; + final arg_groupBy = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['groupBy']!.first, + unittest.equals(arg_groupBy), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildQueryHubStatusResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.queryStatus(arg_name, + filter: arg_filter, + groupBy: arg_groupBy, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkQueryHubStatusResponse(response as api.QueryHubStatusResponse); + }); + unittest.test('method--rejectSpoke', () async { final mock = HttpServerMock(); final res = diff --git a/generated/googleapis/test/networkmanagement/v1_test.dart b/generated/googleapis/test/networkmanagement/v1_test.dart index 9a41928ce..ca3b54d54 100644 --- a/generated/googleapis/test/networkmanagement/v1_test.dart +++ b/generated/googleapis/test/networkmanagement/v1_test.dart @@ -485,6 +485,8 @@ api.ConnectivityTest buildConnectivityTest() { o.protocol = 'foo'; o.reachabilityDetails = buildReachabilityDetails(); o.relatedProjects = buildUnnamed5(); + o.returnReachabilityDetails = buildReachabilityDetails(); + o.roundTrip = true; o.source = buildEndpoint(); o.updateTime = 'foo'; } @@ -521,6 +523,8 @@ void checkConnectivityTest(api.ConnectivityTest o) { ); checkReachabilityDetails(o.reachabilityDetails!); checkUnnamed5(o.relatedProjects!); + checkReachabilityDetails(o.returnReachabilityDetails!); + unittest.expect(o.roundTrip!, unittest.isTrue); checkEndpoint(o.source!); unittest.expect( o.updateTime!, @@ -662,6 +666,7 @@ api.Endpoint buildEndpoint() { o.cloudSqlInstance = 'foo'; o.forwardingRule = 'foo'; o.forwardingRuleTarget = 'foo'; + o.fqdn = 'foo'; o.gkeMasterCluster = 'foo'; o.instance = 'foo'; o.ipAddress = 'foo'; @@ -696,6 +701,10 @@ void checkEndpoint(api.Endpoint o) { o.forwardingRuleTarget!, unittest.equals('foo'), ); + unittest.expect( + o.fqdn!, + unittest.equals('foo'), + ); unittest.expect( o.gkeMasterCluster!, unittest.equals('foo'), @@ -1049,6 +1058,7 @@ api.GKEMasterInfo buildGKEMasterInfo() { if (buildCounterGKEMasterInfo < 3) { o.clusterNetworkUri = 'foo'; o.clusterUri = 'foo'; + o.dnsEndpoint = 'foo'; o.externalIp = 'foo'; o.internalIp = 'foo'; } @@ -1067,6 +1077,10 @@ void checkGKEMasterInfo(api.GKEMasterInfo o) { o.clusterUri!, unittest.equals('foo'), ); + unittest.expect( + o.dnsEndpoint!, + unittest.equals('foo'), + ); unittest.expect( o.externalIp!, unittest.equals('foo'), @@ -1380,12 +1394,66 @@ void checkUnnamed14(core.List o) { ); } -core.List buildUnnamed15() => [ +core.List buildUnnamed15() => [ + buildVpcFlowLogsConfig(), + buildVpcFlowLogsConfig(), + ]; + +void checkUnnamed15(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkVpcFlowLogsConfig(o[0]); + checkVpcFlowLogsConfig(o[1]); +} + +core.int buildCounterListVpcFlowLogsConfigsResponse = 0; +api.ListVpcFlowLogsConfigsResponse buildListVpcFlowLogsConfigsResponse() { + final o = api.ListVpcFlowLogsConfigsResponse(); + buildCounterListVpcFlowLogsConfigsResponse++; + if (buildCounterListVpcFlowLogsConfigsResponse < 3) { + o.nextPageToken = 'foo'; + o.unreachable = buildUnnamed14(); + o.vpcFlowLogsConfigs = buildUnnamed15(); + } + buildCounterListVpcFlowLogsConfigsResponse--; + return o; +} + +void checkListVpcFlowLogsConfigsResponse(api.ListVpcFlowLogsConfigsResponse o) { + buildCounterListVpcFlowLogsConfigsResponse++; + if (buildCounterListVpcFlowLogsConfigsResponse < 3) { + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed14(o.unreachable!); + checkUnnamed15(o.vpcFlowLogsConfigs!); + } + buildCounterListVpcFlowLogsConfigsResponse--; +} + +core.List buildUnnamed16() => [ 'foo', 'foo', ]; -void checkUnnamed15(core.List o) { +void checkUnnamed16(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.List buildUnnamed17() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed17(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1403,8 +1471,8 @@ api.LoadBalancerBackend buildLoadBalancerBackend() { buildCounterLoadBalancerBackend++; if (buildCounterLoadBalancerBackend < 3) { o.displayName = 'foo'; - o.healthCheckAllowingFirewallRules = buildUnnamed14(); - o.healthCheckBlockingFirewallRules = buildUnnamed15(); + o.healthCheckAllowingFirewallRules = buildUnnamed16(); + o.healthCheckBlockingFirewallRules = buildUnnamed17(); o.healthCheckFirewallState = 'foo'; o.uri = 'foo'; } @@ -1419,8 +1487,8 @@ void checkLoadBalancerBackend(api.LoadBalancerBackend o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed14(o.healthCheckAllowingFirewallRules!); - checkUnnamed15(o.healthCheckBlockingFirewallRules!); + checkUnnamed16(o.healthCheckAllowingFirewallRules!); + checkUnnamed17(o.healthCheckBlockingFirewallRules!); unittest.expect( o.healthCheckFirewallState!, unittest.equals('foo'), @@ -1500,12 +1568,12 @@ void checkLoadBalancerBackendInfo(api.LoadBalancerBackendInfo o) { buildCounterLoadBalancerBackendInfo--; } -core.List buildUnnamed16() => [ +core.List buildUnnamed18() => [ buildLoadBalancerBackend(), buildLoadBalancerBackend(), ]; -void checkUnnamed16(core.List o) { +void checkUnnamed18(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLoadBalancerBackend(o[0]); checkLoadBalancerBackend(o[1]); @@ -1518,7 +1586,7 @@ api.LoadBalancerInfo buildLoadBalancerInfo() { if (buildCounterLoadBalancerInfo < 3) { o.backendType = 'foo'; o.backendUri = 'foo'; - o.backends = buildUnnamed16(); + o.backends = buildUnnamed18(); o.healthCheckUri = 'foo'; o.loadBalancerType = 'foo'; } @@ -1537,7 +1605,7 @@ void checkLoadBalancerInfo(api.LoadBalancerInfo o) { o.backendUri!, unittest.equals('foo'), ); - checkUnnamed16(o.backends!); + checkUnnamed18(o.backends!); unittest.expect( o.healthCheckUri!, unittest.equals('foo'), @@ -1550,12 +1618,12 @@ void checkLoadBalancerInfo(api.LoadBalancerInfo o) { buildCounterLoadBalancerInfo--; } -core.Map buildUnnamed17() => { +core.Map buildUnnamed19() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed17(core.Map o) { +void checkUnnamed19(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1567,7 +1635,7 @@ void checkUnnamed17(core.Map o) { ); } -core.Map buildUnnamed18() => { +core.Map buildUnnamed20() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -1580,7 +1648,7 @@ core.Map buildUnnamed18() => { }, }; -void checkUnnamed18(core.Map o) { +void checkUnnamed20(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted1 = (o['x']!) as core.Map; unittest.expect(casted1, unittest.hasLength(3)); @@ -1618,9 +1686,9 @@ api.Location buildLocation() { buildCounterLocation++; if (buildCounterLocation < 3) { o.displayName = 'foo'; - o.labels = buildUnnamed17(); + o.labels = buildUnnamed19(); o.locationId = 'foo'; - o.metadata = buildUnnamed18(); + o.metadata = buildUnnamed20(); o.name = 'foo'; } buildCounterLocation--; @@ -1634,12 +1702,12 @@ void checkLocation(api.Location o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed17(o.labels!); + checkUnnamed19(o.labels!); unittest.expect( o.locationId!, unittest.equals('foo'), ); - checkUnnamed18(o.metadata!); + checkUnnamed20(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), @@ -1772,7 +1840,7 @@ void checkNetworkInfo(api.NetworkInfo o) { buildCounterNetworkInfo--; } -core.Map buildUnnamed19() => { +core.Map buildUnnamed21() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -1785,7 +1853,7 @@ core.Map buildUnnamed19() => { }, }; -void checkUnnamed19(core.Map o) { +void checkUnnamed21(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted3 = (o['x']!) as core.Map; unittest.expect(casted3, unittest.hasLength(3)); @@ -1817,7 +1885,7 @@ void checkUnnamed19(core.Map o) { ); } -core.Map buildUnnamed20() => { +core.Map buildUnnamed22() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -1830,7 +1898,7 @@ core.Map buildUnnamed20() => { }, }; -void checkUnnamed20(core.Map o) { +void checkUnnamed22(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted5 = (o['x']!) as core.Map; unittest.expect(casted5, unittest.hasLength(3)); @@ -1869,9 +1937,9 @@ api.Operation buildOperation() { if (buildCounterOperation < 3) { o.done = true; o.error = buildStatus(); - o.metadata = buildUnnamed19(); + o.metadata = buildUnnamed21(); o.name = 'foo'; - o.response = buildUnnamed20(); + o.response = buildUnnamed22(); } buildCounterOperation--; return o; @@ -1882,33 +1950,33 @@ void checkOperation(api.Operation o) { if (buildCounterOperation < 3) { unittest.expect(o.done!, unittest.isTrue); checkStatus(o.error!); - checkUnnamed19(o.metadata!); + checkUnnamed21(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed20(o.response!); + checkUnnamed22(o.response!); } buildCounterOperation--; } -core.List buildUnnamed21() => [ +core.List buildUnnamed23() => [ buildAuditConfig(), buildAuditConfig(), ]; -void checkUnnamed21(core.List o) { +void checkUnnamed23(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAuditConfig(o[0]); checkAuditConfig(o[1]); } -core.List buildUnnamed22() => [ +core.List buildUnnamed24() => [ buildBinding(), buildBinding(), ]; -void checkUnnamed22(core.List o) { +void checkUnnamed24(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkBinding(o[0]); checkBinding(o[1]); @@ -1919,8 +1987,8 @@ api.Policy buildPolicy() { final o = api.Policy(); buildCounterPolicy++; if (buildCounterPolicy < 3) { - o.auditConfigs = buildUnnamed21(); - o.bindings = buildUnnamed22(); + o.auditConfigs = buildUnnamed23(); + o.bindings = buildUnnamed24(); o.etag = 'foo'; o.version = 42; } @@ -1931,8 +1999,8 @@ api.Policy buildPolicy() { void checkPolicy(api.Policy o) { buildCounterPolicy++; if (buildCounterPolicy < 3) { - checkUnnamed21(o.auditConfigs!); - checkUnnamed22(o.bindings!); + checkUnnamed23(o.auditConfigs!); + checkUnnamed24(o.bindings!); unittest.expect( o.etag!, unittest.equals('foo'), @@ -2067,12 +2135,12 @@ void checkProxyConnectionInfo(api.ProxyConnectionInfo o) { buildCounterProxyConnectionInfo--; } -core.List buildUnnamed23() => [ +core.List buildUnnamed25() => [ buildTrace(), buildTrace(), ]; -void checkUnnamed23(core.List o) { +void checkUnnamed25(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTrace(o[0]); checkTrace(o[1]); @@ -2085,7 +2153,7 @@ api.ReachabilityDetails buildReachabilityDetails() { if (buildCounterReachabilityDetails < 3) { o.error = buildStatus(); o.result = 'foo'; - o.traces = buildUnnamed23(); + o.traces = buildUnnamed25(); o.verifyTime = 'foo'; } buildCounterReachabilityDetails--; @@ -2100,7 +2168,7 @@ void checkReachabilityDetails(api.ReachabilityDetails o) { o.result!, unittest.equals('foo'), ); - checkUnnamed23(o.traces!); + checkUnnamed25(o.traces!); unittest.expect( o.verifyTime!, unittest.equals('foo'), @@ -2218,12 +2286,12 @@ void checkRerunConnectivityTestRequest(api.RerunConnectivityTestRequest o) { buildCounterRerunConnectivityTestRequest--; } -core.List buildUnnamed24() => [ +core.List buildUnnamed26() => [ 'foo', 'foo', ]; -void checkUnnamed24(core.List o) { +void checkUnnamed26(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2235,12 +2303,12 @@ void checkUnnamed24(core.List o) { ); } -core.List buildUnnamed25() => [ +core.List buildUnnamed27() => [ 'foo', 'foo', ]; -void checkUnnamed25(core.List o) { +void checkUnnamed27(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2252,12 +2320,12 @@ void checkUnnamed25(core.List o) { ); } -core.List buildUnnamed26() => [ +core.List buildUnnamed28() => [ 'foo', 'foo', ]; -void checkUnnamed26(core.List o) { +void checkUnnamed28(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2269,12 +2337,12 @@ void checkUnnamed26(core.List o) { ); } -core.List buildUnnamed27() => [ +core.List buildUnnamed29() => [ 'foo', 'foo', ]; -void checkUnnamed27(core.List o) { +void checkUnnamed29(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2294,21 +2362,26 @@ api.RouteInfo buildRouteInfo() { o.advertisedRouteNextHopUri = 'foo'; o.advertisedRouteSourceRouterUri = 'foo'; o.destIpRange = 'foo'; - o.destPortRanges = buildUnnamed24(); + o.destPortRanges = buildUnnamed26(); o.displayName = 'foo'; - o.instanceTags = buildUnnamed25(); + o.instanceTags = buildUnnamed27(); + o.nccHubRouteUri = 'foo'; o.nccHubUri = 'foo'; o.nccSpokeUri = 'foo'; o.networkUri = 'foo'; o.nextHop = 'foo'; + o.nextHopNetworkUri = 'foo'; o.nextHopType = 'foo'; + o.nextHopUri = 'foo'; + o.originatingRouteDisplayName = 'foo'; + o.originatingRouteUri = 'foo'; o.priority = 42; - o.protocols = buildUnnamed26(); + o.protocols = buildUnnamed28(); o.region = 'foo'; o.routeScope = 'foo'; o.routeType = 'foo'; o.srcIpRange = 'foo'; - o.srcPortRanges = buildUnnamed27(); + o.srcPortRanges = buildUnnamed29(); o.uri = 'foo'; } buildCounterRouteInfo--; @@ -2330,12 +2403,16 @@ void checkRouteInfo(api.RouteInfo o) { o.destIpRange!, unittest.equals('foo'), ); - checkUnnamed24(o.destPortRanges!); + checkUnnamed26(o.destPortRanges!); unittest.expect( o.displayName!, unittest.equals('foo'), ); - checkUnnamed25(o.instanceTags!); + checkUnnamed27(o.instanceTags!); + unittest.expect( + o.nccHubRouteUri!, + unittest.equals('foo'), + ); unittest.expect( o.nccHubUri!, unittest.equals('foo'), @@ -2352,15 +2429,31 @@ void checkRouteInfo(api.RouteInfo o) { o.nextHop!, unittest.equals('foo'), ); + unittest.expect( + o.nextHopNetworkUri!, + unittest.equals('foo'), + ); unittest.expect( o.nextHopType!, unittest.equals('foo'), ); + unittest.expect( + o.nextHopUri!, + unittest.equals('foo'), + ); + unittest.expect( + o.originatingRouteDisplayName!, + unittest.equals('foo'), + ); + unittest.expect( + o.originatingRouteUri!, + unittest.equals('foo'), + ); unittest.expect( o.priority!, unittest.equals(42), ); - checkUnnamed26(o.protocols!); + checkUnnamed28(o.protocols!); unittest.expect( o.region!, unittest.equals('foo'), @@ -2377,7 +2470,7 @@ void checkRouteInfo(api.RouteInfo o) { o.srcIpRange!, unittest.equals('foo'), ); - checkUnnamed27(o.srcPortRanges!); + checkUnnamed29(o.srcPortRanges!); unittest.expect( o.uri!, unittest.equals('foo'), @@ -2432,7 +2525,7 @@ void checkSetIamPolicyRequest(api.SetIamPolicyRequest o) { buildCounterSetIamPolicyRequest--; } -core.Map buildUnnamed28() => { +core.Map buildUnnamed30() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -2445,7 +2538,7 @@ core.Map buildUnnamed28() => { }, }; -void checkUnnamed28(core.Map o) { +void checkUnnamed30(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted7 = (o['x']!) as core.Map; unittest.expect(casted7, unittest.hasLength(3)); @@ -2477,15 +2570,15 @@ void checkUnnamed28(core.Map o) { ); } -core.List> buildUnnamed29() => [ - buildUnnamed28(), - buildUnnamed28(), +core.List> buildUnnamed31() => [ + buildUnnamed30(), + buildUnnamed30(), ]; -void checkUnnamed29(core.List> o) { +void checkUnnamed31(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed28(o[0]); - checkUnnamed28(o[1]); + checkUnnamed30(o[0]); + checkUnnamed30(o[1]); } core.int buildCounterStatus = 0; @@ -2494,7 +2587,7 @@ api.Status buildStatus() { buildCounterStatus++; if (buildCounterStatus < 3) { o.code = 42; - o.details = buildUnnamed29(); + o.details = buildUnnamed31(); o.message = 'foo'; } buildCounterStatus--; @@ -2508,7 +2601,7 @@ void checkStatus(api.Status o) { o.code!, unittest.equals(42), ); - checkUnnamed29(o.details!); + checkUnnamed31(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -2627,12 +2720,12 @@ void checkStorageBucketInfo(api.StorageBucketInfo o) { buildCounterStorageBucketInfo--; } -core.List buildUnnamed30() => [ +core.List buildUnnamed32() => [ 'foo', 'foo', ]; -void checkUnnamed30(core.List o) { +void checkUnnamed32(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2649,7 +2742,7 @@ api.TestIamPermissionsRequest buildTestIamPermissionsRequest() { final o = api.TestIamPermissionsRequest(); buildCounterTestIamPermissionsRequest++; if (buildCounterTestIamPermissionsRequest < 3) { - o.permissions = buildUnnamed30(); + o.permissions = buildUnnamed32(); } buildCounterTestIamPermissionsRequest--; return o; @@ -2658,17 +2751,17 @@ api.TestIamPermissionsRequest buildTestIamPermissionsRequest() { void checkTestIamPermissionsRequest(api.TestIamPermissionsRequest o) { buildCounterTestIamPermissionsRequest++; if (buildCounterTestIamPermissionsRequest < 3) { - checkUnnamed30(o.permissions!); + checkUnnamed32(o.permissions!); } buildCounterTestIamPermissionsRequest--; } -core.List buildUnnamed31() => [ +core.List buildUnnamed33() => [ 'foo', 'foo', ]; -void checkUnnamed31(core.List o) { +void checkUnnamed33(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2685,7 +2778,7 @@ api.TestIamPermissionsResponse buildTestIamPermissionsResponse() { final o = api.TestIamPermissionsResponse(); buildCounterTestIamPermissionsResponse++; if (buildCounterTestIamPermissionsResponse < 3) { - o.permissions = buildUnnamed31(); + o.permissions = buildUnnamed33(); } buildCounterTestIamPermissionsResponse--; return o; @@ -2694,17 +2787,17 @@ api.TestIamPermissionsResponse buildTestIamPermissionsResponse() { void checkTestIamPermissionsResponse(api.TestIamPermissionsResponse o) { buildCounterTestIamPermissionsResponse++; if (buildCounterTestIamPermissionsResponse < 3) { - checkUnnamed31(o.permissions!); + checkUnnamed33(o.permissions!); } buildCounterTestIamPermissionsResponse--; } -core.List buildUnnamed32() => [ +core.List buildUnnamed34() => [ buildStep(), buildStep(), ]; -void checkUnnamed32(core.List o) { +void checkUnnamed34(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStep(o[0]); checkStep(o[1]); @@ -2717,7 +2810,7 @@ api.Trace buildTrace() { if (buildCounterTrace < 3) { o.endpointInfo = buildEndpointInfo(); o.forwardTraceId = 42; - o.steps = buildUnnamed32(); + o.steps = buildUnnamed34(); } buildCounterTrace--; return o; @@ -2731,7 +2824,7 @@ void checkTrace(api.Trace o) { o.forwardTraceId!, unittest.equals(42), ); - checkUnnamed32(o.steps!); + checkUnnamed34(o.steps!); } buildCounterTrace--; } @@ -2768,6 +2861,121 @@ void checkVpcConnectorInfo(api.VpcConnectorInfo o) { buildCounterVpcConnectorInfo--; } +core.Map buildUnnamed35() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed35(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.List buildUnnamed36() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed36(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterVpcFlowLogsConfig = 0; +api.VpcFlowLogsConfig buildVpcFlowLogsConfig() { + final o = api.VpcFlowLogsConfig(); + buildCounterVpcFlowLogsConfig++; + if (buildCounterVpcFlowLogsConfig < 3) { + o.aggregationInterval = 'foo'; + o.createTime = 'foo'; + o.description = 'foo'; + o.filterExpr = 'foo'; + o.flowSampling = 42.0; + o.interconnectAttachment = 'foo'; + o.labels = buildUnnamed35(); + o.metadata = 'foo'; + o.metadataFields = buildUnnamed36(); + o.name = 'foo'; + o.state = 'foo'; + o.targetResourceState = 'foo'; + o.updateTime = 'foo'; + o.vpnTunnel = 'foo'; + } + buildCounterVpcFlowLogsConfig--; + return o; +} + +void checkVpcFlowLogsConfig(api.VpcFlowLogsConfig o) { + buildCounterVpcFlowLogsConfig++; + if (buildCounterVpcFlowLogsConfig < 3) { + unittest.expect( + o.aggregationInterval!, + unittest.equals('foo'), + ); + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + unittest.expect( + o.filterExpr!, + unittest.equals('foo'), + ); + unittest.expect( + o.flowSampling!, + unittest.equals(42.0), + ); + unittest.expect( + o.interconnectAttachment!, + unittest.equals('foo'), + ); + checkUnnamed35(o.labels!); + unittest.expect( + o.metadata!, + unittest.equals('foo'), + ); + checkUnnamed36(o.metadataFields!); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.state!, + unittest.equals('foo'), + ); + unittest.expect( + o.targetResourceState!, + unittest.equals('foo'), + ); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.vpnTunnel!, + unittest.equals('foo'), + ); + } + buildCounterVpcFlowLogsConfig--; +} + core.int buildCounterVpnGatewayInfo = 0; api.VpnGatewayInfo buildVpnGatewayInfo() { final o = api.VpnGatewayInfo(); @@ -3188,6 +3396,16 @@ void main() { }); }); + unittest.group('obj-schema-ListVpcFlowLogsConfigsResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListVpcFlowLogsConfigsResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListVpcFlowLogsConfigsResponse.fromJson( + oJson as core.Map); + checkListVpcFlowLogsConfigsResponse(od); + }); + }); + unittest.group('obj-schema-LoadBalancerBackend', () { unittest.test('to-json--from-json', () async { final o = buildLoadBalancerBackend(); @@ -3428,6 +3646,16 @@ void main() { }); }); + unittest.group('obj-schema-VpcFlowLogsConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildVpcFlowLogsConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.VpcFlowLogsConfig.fromJson( + oJson as core.Map); + checkVpcFlowLogsConfig(od); + }); + }); + unittest.group('obj-schema-VpnGatewayInfo', () { unittest.test('to-json--from-json', () async { final o = buildVpnGatewayInfo(); @@ -4392,4 +4620,319 @@ void main() { checkListOperationsResponse(response as api.ListOperationsResponse); }); }); + + unittest.group('resource-ProjectsLocationsVpcFlowLogsConfigsResource', () { + unittest.test('method--create', () async { + final mock = HttpServerMock(); + final res = + api.NetworkManagementApi(mock).projects.locations.vpcFlowLogsConfigs; + final arg_request = buildVpcFlowLogsConfig(); + final arg_parent = 'foo'; + final arg_vpcFlowLogsConfigId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.VpcFlowLogsConfig.fromJson( + json as core.Map); + checkVpcFlowLogsConfig(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['vpcFlowLogsConfigId']!.first, + unittest.equals(arg_vpcFlowLogsConfigId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.create(arg_request, arg_parent, + vpcFlowLogsConfigId: arg_vpcFlowLogsConfigId, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = + api.NetworkManagementApi(mock).projects.locations.vpcFlowLogsConfigs; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = + api.NetworkManagementApi(mock).projects.locations.vpcFlowLogsConfigs; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildVpcFlowLogsConfig()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkVpcFlowLogsConfig(response as api.VpcFlowLogsConfig); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = + api.NetworkManagementApi(mock).projects.locations.vpcFlowLogsConfigs; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildListVpcFlowLogsConfigsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_parent, + filter: arg_filter, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListVpcFlowLogsConfigsResponse( + response as api.ListVpcFlowLogsConfigsResponse); + }); + + unittest.test('method--patch', () async { + final mock = HttpServerMock(); + final res = + api.NetworkManagementApi(mock).projects.locations.vpcFlowLogsConfigs; + final arg_request = buildVpcFlowLogsConfig(); + final arg_name = 'foo'; + final arg_updateMask = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.VpcFlowLogsConfig.fromJson( + json as core.Map); + checkVpcFlowLogsConfig(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + }); } diff --git a/generated/googleapis/test/networksecurity/v1_test.dart b/generated/googleapis/test/networksecurity/v1_test.dart index 87431eb3c..3c58afbe8 100644 --- a/generated/googleapis/test/networksecurity/v1_test.dart +++ b/generated/googleapis/test/networksecurity/v1_test.dart @@ -250,49 +250,23 @@ void checkAuthorizationPolicy(api.AuthorizationPolicy o) { buildCounterAuthorizationPolicy--; } -core.int buildCounterCancelOperationRequest = 0; -api.CancelOperationRequest buildCancelOperationRequest() { - final o = api.CancelOperationRequest(); - buildCounterCancelOperationRequest++; - if (buildCounterCancelOperationRequest < 3) {} - buildCounterCancelOperationRequest--; - return o; -} - -void checkCancelOperationRequest(api.CancelOperationRequest o) { - buildCounterCancelOperationRequest++; - if (buildCounterCancelOperationRequest < 3) {} - buildCounterCancelOperationRequest--; -} - -core.int buildCounterCertificateProviderInstance = 0; -api.CertificateProviderInstance buildCertificateProviderInstance() { - final o = api.CertificateProviderInstance(); - buildCounterCertificateProviderInstance++; - if (buildCounterCertificateProviderInstance < 3) { - o.pluginInstance = 'foo'; - } - buildCounterCertificateProviderInstance--; - return o; -} +core.List buildUnnamed6() => [ + buildAuthzPolicyAuthzRule(), + buildAuthzPolicyAuthzRule(), + ]; -void checkCertificateProviderInstance(api.CertificateProviderInstance o) { - buildCounterCertificateProviderInstance++; - if (buildCounterCertificateProviderInstance < 3) { - unittest.expect( - o.pluginInstance!, - unittest.equals('foo'), - ); - } - buildCounterCertificateProviderInstance--; +void checkUnnamed6(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkAuthzPolicyAuthzRule(o[0]); + checkAuthzPolicyAuthzRule(o[1]); } -core.Map buildUnnamed6() => { +core.Map buildUnnamed7() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed6(core.Map o) { +void checkUnnamed7(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -304,249 +278,356 @@ void checkUnnamed6(core.Map o) { ); } -core.List buildUnnamed7() => [ - buildValidationCA(), - buildValidationCA(), - ]; - -void checkUnnamed7(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkValidationCA(o[0]); - checkValidationCA(o[1]); -} - -core.int buildCounterClientTlsPolicy = 0; -api.ClientTlsPolicy buildClientTlsPolicy() { - final o = api.ClientTlsPolicy(); - buildCounterClientTlsPolicy++; - if (buildCounterClientTlsPolicy < 3) { - o.clientCertificate = - buildGoogleCloudNetworksecurityV1CertificateProvider(); +core.int buildCounterAuthzPolicy = 0; +api.AuthzPolicy buildAuthzPolicy() { + final o = api.AuthzPolicy(); + buildCounterAuthzPolicy++; + if (buildCounterAuthzPolicy < 3) { + o.action = 'foo'; o.createTime = 'foo'; + o.customProvider = buildAuthzPolicyCustomProvider(); o.description = 'foo'; - o.labels = buildUnnamed6(); + o.httpRules = buildUnnamed6(); + o.labels = buildUnnamed7(); o.name = 'foo'; - o.serverValidationCa = buildUnnamed7(); - o.sni = 'foo'; + o.target = buildAuthzPolicyTarget(); o.updateTime = 'foo'; } - buildCounterClientTlsPolicy--; + buildCounterAuthzPolicy--; return o; } -void checkClientTlsPolicy(api.ClientTlsPolicy o) { - buildCounterClientTlsPolicy++; - if (buildCounterClientTlsPolicy < 3) { - checkGoogleCloudNetworksecurityV1CertificateProvider(o.clientCertificate!); +void checkAuthzPolicy(api.AuthzPolicy o) { + buildCounterAuthzPolicy++; + if (buildCounterAuthzPolicy < 3) { unittest.expect( - o.createTime!, + o.action!, unittest.equals('foo'), ); unittest.expect( - o.description!, + o.createTime!, unittest.equals('foo'), ); - checkUnnamed6(o.labels!); + checkAuthzPolicyCustomProvider(o.customProvider!); unittest.expect( - o.name!, + o.description!, unittest.equals('foo'), ); - checkUnnamed7(o.serverValidationCa!); + checkUnnamed6(o.httpRules!); + checkUnnamed7(o.labels!); unittest.expect( - o.sni!, + o.name!, unittest.equals('foo'), ); + checkAuthzPolicyTarget(o.target!); unittest.expect( o.updateTime!, unittest.equals('foo'), ); } - buildCounterClientTlsPolicy--; + buildCounterAuthzPolicy--; } -core.int buildCounterCloneAddressGroupItemsRequest = 0; -api.CloneAddressGroupItemsRequest buildCloneAddressGroupItemsRequest() { - final o = api.CloneAddressGroupItemsRequest(); - buildCounterCloneAddressGroupItemsRequest++; - if (buildCounterCloneAddressGroupItemsRequest < 3) { - o.requestId = 'foo'; - o.sourceAddressGroup = 'foo'; +core.int buildCounterAuthzPolicyAuthzRule = 0; +api.AuthzPolicyAuthzRule buildAuthzPolicyAuthzRule() { + final o = api.AuthzPolicyAuthzRule(); + buildCounterAuthzPolicyAuthzRule++; + if (buildCounterAuthzPolicyAuthzRule < 3) { + o.from = buildAuthzPolicyAuthzRuleFrom(); + o.to = buildAuthzPolicyAuthzRuleTo(); + o.when = 'foo'; } - buildCounterCloneAddressGroupItemsRequest--; + buildCounterAuthzPolicyAuthzRule--; return o; } -void checkCloneAddressGroupItemsRequest(api.CloneAddressGroupItemsRequest o) { - buildCounterCloneAddressGroupItemsRequest++; - if (buildCounterCloneAddressGroupItemsRequest < 3) { - unittest.expect( - o.requestId!, - unittest.equals('foo'), - ); +void checkAuthzPolicyAuthzRule(api.AuthzPolicyAuthzRule o) { + buildCounterAuthzPolicyAuthzRule++; + if (buildCounterAuthzPolicyAuthzRule < 3) { + checkAuthzPolicyAuthzRuleFrom(o.from!); + checkAuthzPolicyAuthzRuleTo(o.to!); unittest.expect( - o.sourceAddressGroup!, + o.when!, unittest.equals('foo'), ); } - buildCounterCloneAddressGroupItemsRequest--; + buildCounterAuthzPolicyAuthzRule--; } -core.int buildCounterCustomMirroringProfile = 0; -api.CustomMirroringProfile buildCustomMirroringProfile() { - final o = api.CustomMirroringProfile(); - buildCounterCustomMirroringProfile++; - if (buildCounterCustomMirroringProfile < 3) { - o.mirroringEndpointGroup = 'foo'; +core.List buildUnnamed8() => [ + buildAuthzPolicyAuthzRuleFromRequestSource(), + buildAuthzPolicyAuthzRuleFromRequestSource(), + ]; + +void checkUnnamed8(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkAuthzPolicyAuthzRuleFromRequestSource(o[0]); + checkAuthzPolicyAuthzRuleFromRequestSource(o[1]); +} + +core.List buildUnnamed9() => [ + buildAuthzPolicyAuthzRuleFromRequestSource(), + buildAuthzPolicyAuthzRuleFromRequestSource(), + ]; + +void checkUnnamed9(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkAuthzPolicyAuthzRuleFromRequestSource(o[0]); + checkAuthzPolicyAuthzRuleFromRequestSource(o[1]); +} + +core.int buildCounterAuthzPolicyAuthzRuleFrom = 0; +api.AuthzPolicyAuthzRuleFrom buildAuthzPolicyAuthzRuleFrom() { + final o = api.AuthzPolicyAuthzRuleFrom(); + buildCounterAuthzPolicyAuthzRuleFrom++; + if (buildCounterAuthzPolicyAuthzRuleFrom < 3) { + o.notSources = buildUnnamed8(); + o.sources = buildUnnamed9(); } - buildCounterCustomMirroringProfile--; + buildCounterAuthzPolicyAuthzRuleFrom--; return o; } -void checkCustomMirroringProfile(api.CustomMirroringProfile o) { - buildCounterCustomMirroringProfile++; - if (buildCounterCustomMirroringProfile < 3) { - unittest.expect( - o.mirroringEndpointGroup!, - unittest.equals('foo'), - ); +void checkAuthzPolicyAuthzRuleFrom(api.AuthzPolicyAuthzRuleFrom o) { + buildCounterAuthzPolicyAuthzRuleFrom++; + if (buildCounterAuthzPolicyAuthzRuleFrom < 3) { + checkUnnamed8(o.notSources!); + checkUnnamed9(o.sources!); } - buildCounterCustomMirroringProfile--; + buildCounterAuthzPolicyAuthzRuleFrom--; } -core.List buildUnnamed8() => [ - 'foo', - 'foo', +core.List buildUnnamed10() => [ + buildAuthzPolicyAuthzRuleStringMatch(), + buildAuthzPolicyAuthzRuleStringMatch(), ]; -void checkUnnamed8(core.List o) { +void checkUnnamed10(core.List o) { unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o[0], - unittest.equals('foo'), - ); - unittest.expect( - o[1], - unittest.equals('foo'), - ); + checkAuthzPolicyAuthzRuleStringMatch(o[0]); + checkAuthzPolicyAuthzRuleStringMatch(o[1]); } -core.List buildUnnamed9() => [ - 'foo', - 'foo', +core.List buildUnnamed11() => [ + buildAuthzPolicyAuthzRuleRequestResource(), + buildAuthzPolicyAuthzRuleRequestResource(), ]; -void checkUnnamed9(core.List o) { +void checkUnnamed11(core.List o) { unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o[0], - unittest.equals('foo'), - ); - unittest.expect( - o[1], - unittest.equals('foo'), - ); + checkAuthzPolicyAuthzRuleRequestResource(o[0]); + checkAuthzPolicyAuthzRuleRequestResource(o[1]); +} + +core.int buildCounterAuthzPolicyAuthzRuleFromRequestSource = 0; +api.AuthzPolicyAuthzRuleFromRequestSource + buildAuthzPolicyAuthzRuleFromRequestSource() { + final o = api.AuthzPolicyAuthzRuleFromRequestSource(); + buildCounterAuthzPolicyAuthzRuleFromRequestSource++; + if (buildCounterAuthzPolicyAuthzRuleFromRequestSource < 3) { + o.principals = buildUnnamed10(); + o.resources = buildUnnamed11(); + } + buildCounterAuthzPolicyAuthzRuleFromRequestSource--; + return o; } -core.List buildUnnamed10() => [ - 42, - 42, +void checkAuthzPolicyAuthzRuleFromRequestSource( + api.AuthzPolicyAuthzRuleFromRequestSource o) { + buildCounterAuthzPolicyAuthzRuleFromRequestSource++; + if (buildCounterAuthzPolicyAuthzRuleFromRequestSource < 3) { + checkUnnamed10(o.principals!); + checkUnnamed11(o.resources!); + } + buildCounterAuthzPolicyAuthzRuleFromRequestSource--; +} + +core.int buildCounterAuthzPolicyAuthzRuleHeaderMatch = 0; +api.AuthzPolicyAuthzRuleHeaderMatch buildAuthzPolicyAuthzRuleHeaderMatch() { + final o = api.AuthzPolicyAuthzRuleHeaderMatch(); + buildCounterAuthzPolicyAuthzRuleHeaderMatch++; + if (buildCounterAuthzPolicyAuthzRuleHeaderMatch < 3) { + o.name = 'foo'; + o.value = buildAuthzPolicyAuthzRuleStringMatch(); + } + buildCounterAuthzPolicyAuthzRuleHeaderMatch--; + return o; +} + +void checkAuthzPolicyAuthzRuleHeaderMatch( + api.AuthzPolicyAuthzRuleHeaderMatch o) { + buildCounterAuthzPolicyAuthzRuleHeaderMatch++; + if (buildCounterAuthzPolicyAuthzRuleHeaderMatch < 3) { + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + checkAuthzPolicyAuthzRuleStringMatch(o.value!); + } + buildCounterAuthzPolicyAuthzRuleHeaderMatch--; +} + +core.int buildCounterAuthzPolicyAuthzRuleRequestResource = 0; +api.AuthzPolicyAuthzRuleRequestResource + buildAuthzPolicyAuthzRuleRequestResource() { + final o = api.AuthzPolicyAuthzRuleRequestResource(); + buildCounterAuthzPolicyAuthzRuleRequestResource++; + if (buildCounterAuthzPolicyAuthzRuleRequestResource < 3) { + o.iamServiceAccount = buildAuthzPolicyAuthzRuleStringMatch(); + o.tagValueIdSet = buildAuthzPolicyAuthzRuleRequestResourceTagValueIdSet(); + } + buildCounterAuthzPolicyAuthzRuleRequestResource--; + return o; +} + +void checkAuthzPolicyAuthzRuleRequestResource( + api.AuthzPolicyAuthzRuleRequestResource o) { + buildCounterAuthzPolicyAuthzRuleRequestResource++; + if (buildCounterAuthzPolicyAuthzRuleRequestResource < 3) { + checkAuthzPolicyAuthzRuleStringMatch(o.iamServiceAccount!); + checkAuthzPolicyAuthzRuleRequestResourceTagValueIdSet(o.tagValueIdSet!); + } + buildCounterAuthzPolicyAuthzRuleRequestResource--; +} + +core.List buildUnnamed12() => [ + 'foo', + 'foo', ]; -void checkUnnamed10(core.List o) { +void checkUnnamed12(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], - unittest.equals(42), + unittest.equals('foo'), ); unittest.expect( o[1], - unittest.equals(42), + unittest.equals('foo'), ); } -core.int buildCounterDestination = 0; -api.Destination buildDestination() { - final o = api.Destination(); - buildCounterDestination++; - if (buildCounterDestination < 3) { - o.hosts = buildUnnamed8(); - o.httpHeaderMatch = buildHttpHeaderMatch(); - o.methods = buildUnnamed9(); - o.ports = buildUnnamed10(); +core.int buildCounterAuthzPolicyAuthzRuleRequestResourceTagValueIdSet = 0; +api.AuthzPolicyAuthzRuleRequestResourceTagValueIdSet + buildAuthzPolicyAuthzRuleRequestResourceTagValueIdSet() { + final o = api.AuthzPolicyAuthzRuleRequestResourceTagValueIdSet(); + buildCounterAuthzPolicyAuthzRuleRequestResourceTagValueIdSet++; + if (buildCounterAuthzPolicyAuthzRuleRequestResourceTagValueIdSet < 3) { + o.ids = buildUnnamed12(); } - buildCounterDestination--; + buildCounterAuthzPolicyAuthzRuleRequestResourceTagValueIdSet--; return o; } -void checkDestination(api.Destination o) { - buildCounterDestination++; - if (buildCounterDestination < 3) { - checkUnnamed8(o.hosts!); - checkHttpHeaderMatch(o.httpHeaderMatch!); - checkUnnamed9(o.methods!); - checkUnnamed10(o.ports!); +void checkAuthzPolicyAuthzRuleRequestResourceTagValueIdSet( + api.AuthzPolicyAuthzRuleRequestResourceTagValueIdSet o) { + buildCounterAuthzPolicyAuthzRuleRequestResourceTagValueIdSet++; + if (buildCounterAuthzPolicyAuthzRuleRequestResourceTagValueIdSet < 3) { + checkUnnamed12(o.ids!); } - buildCounterDestination--; -} - -core.int buildCounterEmpty = 0; -api.Empty buildEmpty() { - final o = api.Empty(); - buildCounterEmpty++; - if (buildCounterEmpty < 3) {} - buildCounterEmpty--; - return o; -} - -void checkEmpty(api.Empty o) { - buildCounterEmpty++; - if (buildCounterEmpty < 3) {} - buildCounterEmpty--; -} - -core.int buildCounterExpr = 0; -api.Expr buildExpr() { - final o = api.Expr(); - buildCounterExpr++; - if (buildCounterExpr < 3) { - o.description = 'foo'; - o.expression = 'foo'; - o.location = 'foo'; - o.title = 'foo'; + buildCounterAuthzPolicyAuthzRuleRequestResourceTagValueIdSet--; +} + +core.int buildCounterAuthzPolicyAuthzRuleStringMatch = 0; +api.AuthzPolicyAuthzRuleStringMatch buildAuthzPolicyAuthzRuleStringMatch() { + final o = api.AuthzPolicyAuthzRuleStringMatch(); + buildCounterAuthzPolicyAuthzRuleStringMatch++; + if (buildCounterAuthzPolicyAuthzRuleStringMatch < 3) { + o.contains = 'foo'; + o.exact = 'foo'; + o.ignoreCase = true; + o.prefix = 'foo'; + o.suffix = 'foo'; } - buildCounterExpr--; + buildCounterAuthzPolicyAuthzRuleStringMatch--; return o; } -void checkExpr(api.Expr o) { - buildCounterExpr++; - if (buildCounterExpr < 3) { +void checkAuthzPolicyAuthzRuleStringMatch( + api.AuthzPolicyAuthzRuleStringMatch o) { + buildCounterAuthzPolicyAuthzRuleStringMatch++; + if (buildCounterAuthzPolicyAuthzRuleStringMatch < 3) { unittest.expect( - o.description!, + o.contains!, unittest.equals('foo'), ); unittest.expect( - o.expression!, + o.exact!, unittest.equals('foo'), ); + unittest.expect(o.ignoreCase!, unittest.isTrue); unittest.expect( - o.location!, + o.prefix!, unittest.equals('foo'), ); unittest.expect( - o.title!, + o.suffix!, unittest.equals('foo'), ); } - buildCounterExpr--; + buildCounterAuthzPolicyAuthzRuleStringMatch--; +} + +core.List buildUnnamed13() => [ + buildAuthzPolicyAuthzRuleToRequestOperation(), + buildAuthzPolicyAuthzRuleToRequestOperation(), + ]; + +void checkUnnamed13(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkAuthzPolicyAuthzRuleToRequestOperation(o[0]); + checkAuthzPolicyAuthzRuleToRequestOperation(o[1]); +} + +core.List buildUnnamed14() => [ + buildAuthzPolicyAuthzRuleToRequestOperation(), + buildAuthzPolicyAuthzRuleToRequestOperation(), + ]; + +void checkUnnamed14(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkAuthzPolicyAuthzRuleToRequestOperation(o[0]); + checkAuthzPolicyAuthzRuleToRequestOperation(o[1]); +} + +core.int buildCounterAuthzPolicyAuthzRuleTo = 0; +api.AuthzPolicyAuthzRuleTo buildAuthzPolicyAuthzRuleTo() { + final o = api.AuthzPolicyAuthzRuleTo(); + buildCounterAuthzPolicyAuthzRuleTo++; + if (buildCounterAuthzPolicyAuthzRuleTo < 3) { + o.notOperations = buildUnnamed13(); + o.operations = buildUnnamed14(); + } + buildCounterAuthzPolicyAuthzRuleTo--; + return o; +} + +void checkAuthzPolicyAuthzRuleTo(api.AuthzPolicyAuthzRuleTo o) { + buildCounterAuthzPolicyAuthzRuleTo++; + if (buildCounterAuthzPolicyAuthzRuleTo < 3) { + checkUnnamed13(o.notOperations!); + checkUnnamed14(o.operations!); + } + buildCounterAuthzPolicyAuthzRuleTo--; +} + +core.List buildUnnamed15() => [ + buildAuthzPolicyAuthzRuleStringMatch(), + buildAuthzPolicyAuthzRuleStringMatch(), + ]; + +void checkUnnamed15(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkAuthzPolicyAuthzRuleStringMatch(o[0]); + checkAuthzPolicyAuthzRuleStringMatch(o[1]); } -core.List buildUnnamed11() => [ +core.List buildUnnamed16() => [ 'foo', 'foo', ]; -void checkUnnamed11(core.List o) { +void checkUnnamed16(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -558,95 +639,235 @@ void checkUnnamed11(core.List o) { ); } -core.List buildUnnamed12() => [ - buildFirewallEndpointAssociationReference(), - buildFirewallEndpointAssociationReference(), +core.List buildUnnamed17() => [ + buildAuthzPolicyAuthzRuleStringMatch(), + buildAuthzPolicyAuthzRuleStringMatch(), ]; -void checkUnnamed12(core.List o) { +void checkUnnamed17(core.List o) { unittest.expect(o, unittest.hasLength(2)); - checkFirewallEndpointAssociationReference(o[0]); - checkFirewallEndpointAssociationReference(o[1]); + checkAuthzPolicyAuthzRuleStringMatch(o[0]); + checkAuthzPolicyAuthzRuleStringMatch(o[1]); +} + +core.int buildCounterAuthzPolicyAuthzRuleToRequestOperation = 0; +api.AuthzPolicyAuthzRuleToRequestOperation + buildAuthzPolicyAuthzRuleToRequestOperation() { + final o = api.AuthzPolicyAuthzRuleToRequestOperation(); + buildCounterAuthzPolicyAuthzRuleToRequestOperation++; + if (buildCounterAuthzPolicyAuthzRuleToRequestOperation < 3) { + o.headerSet = buildAuthzPolicyAuthzRuleToRequestOperationHeaderSet(); + o.hosts = buildUnnamed15(); + o.methods = buildUnnamed16(); + o.paths = buildUnnamed17(); + } + buildCounterAuthzPolicyAuthzRuleToRequestOperation--; + return o; } -core.Map buildUnnamed13() => { - 'x': 'foo', - 'y': 'foo', - }; +void checkAuthzPolicyAuthzRuleToRequestOperation( + api.AuthzPolicyAuthzRuleToRequestOperation o) { + buildCounterAuthzPolicyAuthzRuleToRequestOperation++; + if (buildCounterAuthzPolicyAuthzRuleToRequestOperation < 3) { + checkAuthzPolicyAuthzRuleToRequestOperationHeaderSet(o.headerSet!); + checkUnnamed15(o.hosts!); + checkUnnamed16(o.methods!); + checkUnnamed17(o.paths!); + } + buildCounterAuthzPolicyAuthzRuleToRequestOperation--; +} -void checkUnnamed13(core.Map o) { +core.List buildUnnamed18() => [ + buildAuthzPolicyAuthzRuleHeaderMatch(), + buildAuthzPolicyAuthzRuleHeaderMatch(), + ]; + +void checkUnnamed18(core.List o) { unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o['x']!, - unittest.equals('foo'), - ); - unittest.expect( - o['y']!, - unittest.equals('foo'), - ); + checkAuthzPolicyAuthzRuleHeaderMatch(o[0]); + checkAuthzPolicyAuthzRuleHeaderMatch(o[1]); } -core.int buildCounterFirewallEndpoint = 0; -api.FirewallEndpoint buildFirewallEndpoint() { - final o = api.FirewallEndpoint(); - buildCounterFirewallEndpoint++; - if (buildCounterFirewallEndpoint < 3) { - o.associatedNetworks = buildUnnamed11(); - o.associations = buildUnnamed12(); - o.billingProjectId = 'foo'; - o.createTime = 'foo'; - o.description = 'foo'; - o.labels = buildUnnamed13(); - o.name = 'foo'; - o.reconciling = true; - o.state = 'foo'; - o.updateTime = 'foo'; +core.int buildCounterAuthzPolicyAuthzRuleToRequestOperationHeaderSet = 0; +api.AuthzPolicyAuthzRuleToRequestOperationHeaderSet + buildAuthzPolicyAuthzRuleToRequestOperationHeaderSet() { + final o = api.AuthzPolicyAuthzRuleToRequestOperationHeaderSet(); + buildCounterAuthzPolicyAuthzRuleToRequestOperationHeaderSet++; + if (buildCounterAuthzPolicyAuthzRuleToRequestOperationHeaderSet < 3) { + o.headers = buildUnnamed18(); } - buildCounterFirewallEndpoint--; + buildCounterAuthzPolicyAuthzRuleToRequestOperationHeaderSet--; return o; } -void checkFirewallEndpoint(api.FirewallEndpoint o) { - buildCounterFirewallEndpoint++; - if (buildCounterFirewallEndpoint < 3) { - checkUnnamed11(o.associatedNetworks!); - checkUnnamed12(o.associations!); - unittest.expect( - o.billingProjectId!, - unittest.equals('foo'), - ); - unittest.expect( - o.createTime!, - unittest.equals('foo'), - ); - unittest.expect( - o.description!, - unittest.equals('foo'), - ); - checkUnnamed13(o.labels!); - unittest.expect( - o.name!, - unittest.equals('foo'), - ); - unittest.expect(o.reconciling!, unittest.isTrue); +void checkAuthzPolicyAuthzRuleToRequestOperationHeaderSet( + api.AuthzPolicyAuthzRuleToRequestOperationHeaderSet o) { + buildCounterAuthzPolicyAuthzRuleToRequestOperationHeaderSet++; + if (buildCounterAuthzPolicyAuthzRuleToRequestOperationHeaderSet < 3) { + checkUnnamed18(o.headers!); + } + buildCounterAuthzPolicyAuthzRuleToRequestOperationHeaderSet--; +} + +core.int buildCounterAuthzPolicyCustomProvider = 0; +api.AuthzPolicyCustomProvider buildAuthzPolicyCustomProvider() { + final o = api.AuthzPolicyCustomProvider(); + buildCounterAuthzPolicyCustomProvider++; + if (buildCounterAuthzPolicyCustomProvider < 3) { + o.authzExtension = buildAuthzPolicyCustomProviderAuthzExtension(); + o.cloudIap = buildAuthzPolicyCustomProviderCloudIap(); + } + buildCounterAuthzPolicyCustomProvider--; + return o; +} + +void checkAuthzPolicyCustomProvider(api.AuthzPolicyCustomProvider o) { + buildCounterAuthzPolicyCustomProvider++; + if (buildCounterAuthzPolicyCustomProvider < 3) { + checkAuthzPolicyCustomProviderAuthzExtension(o.authzExtension!); + checkAuthzPolicyCustomProviderCloudIap(o.cloudIap!); + } + buildCounterAuthzPolicyCustomProvider--; +} + +core.List buildUnnamed19() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed19(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterAuthzPolicyCustomProviderAuthzExtension = 0; +api.AuthzPolicyCustomProviderAuthzExtension + buildAuthzPolicyCustomProviderAuthzExtension() { + final o = api.AuthzPolicyCustomProviderAuthzExtension(); + buildCounterAuthzPolicyCustomProviderAuthzExtension++; + if (buildCounterAuthzPolicyCustomProviderAuthzExtension < 3) { + o.resources = buildUnnamed19(); + } + buildCounterAuthzPolicyCustomProviderAuthzExtension--; + return o; +} + +void checkAuthzPolicyCustomProviderAuthzExtension( + api.AuthzPolicyCustomProviderAuthzExtension o) { + buildCounterAuthzPolicyCustomProviderAuthzExtension++; + if (buildCounterAuthzPolicyCustomProviderAuthzExtension < 3) { + checkUnnamed19(o.resources!); + } + buildCounterAuthzPolicyCustomProviderAuthzExtension--; +} + +core.int buildCounterAuthzPolicyCustomProviderCloudIap = 0; +api.AuthzPolicyCustomProviderCloudIap buildAuthzPolicyCustomProviderCloudIap() { + final o = api.AuthzPolicyCustomProviderCloudIap(); + buildCounterAuthzPolicyCustomProviderCloudIap++; + if (buildCounterAuthzPolicyCustomProviderCloudIap < 3) {} + buildCounterAuthzPolicyCustomProviderCloudIap--; + return o; +} + +void checkAuthzPolicyCustomProviderCloudIap( + api.AuthzPolicyCustomProviderCloudIap o) { + buildCounterAuthzPolicyCustomProviderCloudIap++; + if (buildCounterAuthzPolicyCustomProviderCloudIap < 3) {} + buildCounterAuthzPolicyCustomProviderCloudIap--; +} + +core.List buildUnnamed20() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed20(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterAuthzPolicyTarget = 0; +api.AuthzPolicyTarget buildAuthzPolicyTarget() { + final o = api.AuthzPolicyTarget(); + buildCounterAuthzPolicyTarget++; + if (buildCounterAuthzPolicyTarget < 3) { + o.loadBalancingScheme = 'foo'; + o.resources = buildUnnamed20(); + } + buildCounterAuthzPolicyTarget--; + return o; +} + +void checkAuthzPolicyTarget(api.AuthzPolicyTarget o) { + buildCounterAuthzPolicyTarget++; + if (buildCounterAuthzPolicyTarget < 3) { unittest.expect( - o.state!, + o.loadBalancingScheme!, unittest.equals('foo'), ); + checkUnnamed20(o.resources!); + } + buildCounterAuthzPolicyTarget--; +} + +core.int buildCounterCancelOperationRequest = 0; +api.CancelOperationRequest buildCancelOperationRequest() { + final o = api.CancelOperationRequest(); + buildCounterCancelOperationRequest++; + if (buildCounterCancelOperationRequest < 3) {} + buildCounterCancelOperationRequest--; + return o; +} + +void checkCancelOperationRequest(api.CancelOperationRequest o) { + buildCounterCancelOperationRequest++; + if (buildCounterCancelOperationRequest < 3) {} + buildCounterCancelOperationRequest--; +} + +core.int buildCounterCertificateProviderInstance = 0; +api.CertificateProviderInstance buildCertificateProviderInstance() { + final o = api.CertificateProviderInstance(); + buildCounterCertificateProviderInstance++; + if (buildCounterCertificateProviderInstance < 3) { + o.pluginInstance = 'foo'; + } + buildCounterCertificateProviderInstance--; + return o; +} + +void checkCertificateProviderInstance(api.CertificateProviderInstance o) { + buildCounterCertificateProviderInstance++; + if (buildCounterCertificateProviderInstance < 3) { unittest.expect( - o.updateTime!, + o.pluginInstance!, unittest.equals('foo'), ); } - buildCounterFirewallEndpoint--; + buildCounterCertificateProviderInstance--; } -core.Map buildUnnamed14() => { +core.Map buildUnnamed21() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed14(core.Map o) { +void checkUnnamed21(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -658,54 +879,56 @@ void checkUnnamed14(core.Map o) { ); } -core.int buildCounterFirewallEndpointAssociation = 0; -api.FirewallEndpointAssociation buildFirewallEndpointAssociation() { - final o = api.FirewallEndpointAssociation(); - buildCounterFirewallEndpointAssociation++; - if (buildCounterFirewallEndpointAssociation < 3) { +core.List buildUnnamed22() => [ + buildValidationCA(), + buildValidationCA(), + ]; + +void checkUnnamed22(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkValidationCA(o[0]); + checkValidationCA(o[1]); +} + +core.int buildCounterClientTlsPolicy = 0; +api.ClientTlsPolicy buildClientTlsPolicy() { + final o = api.ClientTlsPolicy(); + buildCounterClientTlsPolicy++; + if (buildCounterClientTlsPolicy < 3) { + o.clientCertificate = + buildGoogleCloudNetworksecurityV1CertificateProvider(); o.createTime = 'foo'; - o.disabled = true; - o.firewallEndpoint = 'foo'; - o.labels = buildUnnamed14(); + o.description = 'foo'; + o.labels = buildUnnamed21(); o.name = 'foo'; - o.network = 'foo'; - o.reconciling = true; - o.state = 'foo'; - o.tlsInspectionPolicy = 'foo'; + o.serverValidationCa = buildUnnamed22(); + o.sni = 'foo'; o.updateTime = 'foo'; } - buildCounterFirewallEndpointAssociation--; + buildCounterClientTlsPolicy--; return o; } -void checkFirewallEndpointAssociation(api.FirewallEndpointAssociation o) { - buildCounterFirewallEndpointAssociation++; - if (buildCounterFirewallEndpointAssociation < 3) { +void checkClientTlsPolicy(api.ClientTlsPolicy o) { + buildCounterClientTlsPolicy++; + if (buildCounterClientTlsPolicy < 3) { + checkGoogleCloudNetworksecurityV1CertificateProvider(o.clientCertificate!); unittest.expect( o.createTime!, unittest.equals('foo'), ); - unittest.expect(o.disabled!, unittest.isTrue); unittest.expect( - o.firewallEndpoint!, + o.description!, unittest.equals('foo'), ); - checkUnnamed14(o.labels!); + checkUnnamed21(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), ); + checkUnnamed22(o.serverValidationCa!); unittest.expect( - o.network!, - unittest.equals('foo'), - ); - unittest.expect(o.reconciling!, unittest.isTrue); - unittest.expect( - o.state!, - unittest.equals('foo'), - ); - unittest.expect( - o.tlsInspectionPolicy!, + o.sni!, unittest.equals('foo'), ); unittest.expect( @@ -713,229 +936,214 @@ void checkFirewallEndpointAssociation(api.FirewallEndpointAssociation o) { unittest.equals('foo'), ); } - buildCounterFirewallEndpointAssociation--; + buildCounterClientTlsPolicy--; } -core.int buildCounterFirewallEndpointAssociationReference = 0; -api.FirewallEndpointAssociationReference - buildFirewallEndpointAssociationReference() { - final o = api.FirewallEndpointAssociationReference(); - buildCounterFirewallEndpointAssociationReference++; - if (buildCounterFirewallEndpointAssociationReference < 3) { - o.name = 'foo'; - o.network = 'foo'; +core.int buildCounterCloneAddressGroupItemsRequest = 0; +api.CloneAddressGroupItemsRequest buildCloneAddressGroupItemsRequest() { + final o = api.CloneAddressGroupItemsRequest(); + buildCounterCloneAddressGroupItemsRequest++; + if (buildCounterCloneAddressGroupItemsRequest < 3) { + o.requestId = 'foo'; + o.sourceAddressGroup = 'foo'; } - buildCounterFirewallEndpointAssociationReference--; + buildCounterCloneAddressGroupItemsRequest--; return o; } -void checkFirewallEndpointAssociationReference( - api.FirewallEndpointAssociationReference o) { - buildCounterFirewallEndpointAssociationReference++; - if (buildCounterFirewallEndpointAssociationReference < 3) { +void checkCloneAddressGroupItemsRequest(api.CloneAddressGroupItemsRequest o) { + buildCounterCloneAddressGroupItemsRequest++; + if (buildCounterCloneAddressGroupItemsRequest < 3) { unittest.expect( - o.name!, + o.requestId!, unittest.equals('foo'), ); unittest.expect( - o.network!, + o.sourceAddressGroup!, unittest.equals('foo'), ); } - buildCounterFirewallEndpointAssociationReference--; + buildCounterCloneAddressGroupItemsRequest--; } -core.int buildCounterGatewaySecurityPolicy = 0; -api.GatewaySecurityPolicy buildGatewaySecurityPolicy() { - final o = api.GatewaySecurityPolicy(); - buildCounterGatewaySecurityPolicy++; - if (buildCounterGatewaySecurityPolicy < 3) { - o.createTime = 'foo'; - o.description = 'foo'; - o.name = 'foo'; - o.tlsInspectionPolicy = 'foo'; - o.updateTime = 'foo'; +core.int buildCounterCustomInterceptProfile = 0; +api.CustomInterceptProfile buildCustomInterceptProfile() { + final o = api.CustomInterceptProfile(); + buildCounterCustomInterceptProfile++; + if (buildCounterCustomInterceptProfile < 3) { + o.interceptEndpointGroup = 'foo'; } - buildCounterGatewaySecurityPolicy--; + buildCounterCustomInterceptProfile--; return o; } -void checkGatewaySecurityPolicy(api.GatewaySecurityPolicy o) { - buildCounterGatewaySecurityPolicy++; - if (buildCounterGatewaySecurityPolicy < 3) { +void checkCustomInterceptProfile(api.CustomInterceptProfile o) { + buildCounterCustomInterceptProfile++; + if (buildCounterCustomInterceptProfile < 3) { unittest.expect( - o.createTime!, - unittest.equals('foo'), - ); - unittest.expect( - o.description!, - unittest.equals('foo'), - ); - unittest.expect( - o.name!, - unittest.equals('foo'), - ); - unittest.expect( - o.tlsInspectionPolicy!, + o.interceptEndpointGroup!, unittest.equals('foo'), ); + } + buildCounterCustomInterceptProfile--; +} + +core.int buildCounterCustomMirroringProfile = 0; +api.CustomMirroringProfile buildCustomMirroringProfile() { + final o = api.CustomMirroringProfile(); + buildCounterCustomMirroringProfile++; + if (buildCounterCustomMirroringProfile < 3) { + o.mirroringEndpointGroup = 'foo'; + } + buildCounterCustomMirroringProfile--; + return o; +} + +void checkCustomMirroringProfile(api.CustomMirroringProfile o) { + buildCounterCustomMirroringProfile++; + if (buildCounterCustomMirroringProfile < 3) { unittest.expect( - o.updateTime!, + o.mirroringEndpointGroup!, unittest.equals('foo'), ); } - buildCounterGatewaySecurityPolicy--; + buildCounterCustomMirroringProfile--; } -core.int buildCounterGatewaySecurityPolicyRule = 0; -api.GatewaySecurityPolicyRule buildGatewaySecurityPolicyRule() { - final o = api.GatewaySecurityPolicyRule(); - buildCounterGatewaySecurityPolicyRule++; - if (buildCounterGatewaySecurityPolicyRule < 3) { - o.applicationMatcher = 'foo'; - o.basicProfile = 'foo'; - o.createTime = 'foo'; - o.description = 'foo'; - o.enabled = true; - o.name = 'foo'; - o.priority = 42; - o.sessionMatcher = 'foo'; - o.tlsInspectionEnabled = true; - o.updateTime = 'foo'; - } - buildCounterGatewaySecurityPolicyRule--; - return o; -} +core.List buildUnnamed23() => [ + 'foo', + 'foo', + ]; -void checkGatewaySecurityPolicyRule(api.GatewaySecurityPolicyRule o) { - buildCounterGatewaySecurityPolicyRule++; - if (buildCounterGatewaySecurityPolicyRule < 3) { - unittest.expect( - o.applicationMatcher!, - unittest.equals('foo'), - ); - unittest.expect( - o.basicProfile!, - unittest.equals('foo'), - ); - unittest.expect( - o.createTime!, - unittest.equals('foo'), - ); - unittest.expect( - o.description!, - unittest.equals('foo'), - ); - unittest.expect(o.enabled!, unittest.isTrue); - unittest.expect( - o.name!, - unittest.equals('foo'), - ); - unittest.expect( - o.priority!, - unittest.equals(42), - ); - unittest.expect( - o.sessionMatcher!, - unittest.equals('foo'), - ); - unittest.expect(o.tlsInspectionEnabled!, unittest.isTrue); - unittest.expect( - o.updateTime!, - unittest.equals('foo'), - ); - } - buildCounterGatewaySecurityPolicyRule--; +void checkUnnamed23(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); } -core.int buildCounterGoogleCloudNetworksecurityV1CertificateProvider = 0; -api.GoogleCloudNetworksecurityV1CertificateProvider - buildGoogleCloudNetworksecurityV1CertificateProvider() { - final o = api.GoogleCloudNetworksecurityV1CertificateProvider(); - buildCounterGoogleCloudNetworksecurityV1CertificateProvider++; - if (buildCounterGoogleCloudNetworksecurityV1CertificateProvider < 3) { - o.certificateProviderInstance = buildCertificateProviderInstance(); - o.grpcEndpoint = buildGoogleCloudNetworksecurityV1GrpcEndpoint(); - } - buildCounterGoogleCloudNetworksecurityV1CertificateProvider--; - return o; +core.List buildUnnamed24() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed24(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); } -void checkGoogleCloudNetworksecurityV1CertificateProvider( - api.GoogleCloudNetworksecurityV1CertificateProvider o) { - buildCounterGoogleCloudNetworksecurityV1CertificateProvider++; - if (buildCounterGoogleCloudNetworksecurityV1CertificateProvider < 3) { - checkCertificateProviderInstance(o.certificateProviderInstance!); - checkGoogleCloudNetworksecurityV1GrpcEndpoint(o.grpcEndpoint!); - } - buildCounterGoogleCloudNetworksecurityV1CertificateProvider--; +core.List buildUnnamed25() => [ + 42, + 42, + ]; + +void checkUnnamed25(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals(42), + ); + unittest.expect( + o[1], + unittest.equals(42), + ); } -core.int buildCounterGoogleCloudNetworksecurityV1GrpcEndpoint = 0; -api.GoogleCloudNetworksecurityV1GrpcEndpoint - buildGoogleCloudNetworksecurityV1GrpcEndpoint() { - final o = api.GoogleCloudNetworksecurityV1GrpcEndpoint(); - buildCounterGoogleCloudNetworksecurityV1GrpcEndpoint++; - if (buildCounterGoogleCloudNetworksecurityV1GrpcEndpoint < 3) { - o.targetUri = 'foo'; +core.int buildCounterDestination = 0; +api.Destination buildDestination() { + final o = api.Destination(); + buildCounterDestination++; + if (buildCounterDestination < 3) { + o.hosts = buildUnnamed23(); + o.httpHeaderMatch = buildHttpHeaderMatch(); + o.methods = buildUnnamed24(); + o.ports = buildUnnamed25(); } - buildCounterGoogleCloudNetworksecurityV1GrpcEndpoint--; + buildCounterDestination--; return o; } -void checkGoogleCloudNetworksecurityV1GrpcEndpoint( - api.GoogleCloudNetworksecurityV1GrpcEndpoint o) { - buildCounterGoogleCloudNetworksecurityV1GrpcEndpoint++; - if (buildCounterGoogleCloudNetworksecurityV1GrpcEndpoint < 3) { - unittest.expect( - o.targetUri!, - unittest.equals('foo'), - ); +void checkDestination(api.Destination o) { + buildCounterDestination++; + if (buildCounterDestination < 3) { + checkUnnamed23(o.hosts!); + checkHttpHeaderMatch(o.httpHeaderMatch!); + checkUnnamed24(o.methods!); + checkUnnamed25(o.ports!); } - buildCounterGoogleCloudNetworksecurityV1GrpcEndpoint--; + buildCounterDestination--; } -core.List buildUnnamed15() => [ - buildGoogleIamV1AuditLogConfig(), - buildGoogleIamV1AuditLogConfig(), - ]; +core.int buildCounterEmpty = 0; +api.Empty buildEmpty() { + final o = api.Empty(); + buildCounterEmpty++; + if (buildCounterEmpty < 3) {} + buildCounterEmpty--; + return o; +} -void checkUnnamed15(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkGoogleIamV1AuditLogConfig(o[0]); - checkGoogleIamV1AuditLogConfig(o[1]); +void checkEmpty(api.Empty o) { + buildCounterEmpty++; + if (buildCounterEmpty < 3) {} + buildCounterEmpty--; } -core.int buildCounterGoogleIamV1AuditConfig = 0; -api.GoogleIamV1AuditConfig buildGoogleIamV1AuditConfig() { - final o = api.GoogleIamV1AuditConfig(); - buildCounterGoogleIamV1AuditConfig++; - if (buildCounterGoogleIamV1AuditConfig < 3) { - o.auditLogConfigs = buildUnnamed15(); - o.service = 'foo'; +core.int buildCounterExpr = 0; +api.Expr buildExpr() { + final o = api.Expr(); + buildCounterExpr++; + if (buildCounterExpr < 3) { + o.description = 'foo'; + o.expression = 'foo'; + o.location = 'foo'; + o.title = 'foo'; } - buildCounterGoogleIamV1AuditConfig--; + buildCounterExpr--; return o; } -void checkGoogleIamV1AuditConfig(api.GoogleIamV1AuditConfig o) { - buildCounterGoogleIamV1AuditConfig++; - if (buildCounterGoogleIamV1AuditConfig < 3) { - checkUnnamed15(o.auditLogConfigs!); +void checkExpr(api.Expr o) { + buildCounterExpr++; + if (buildCounterExpr < 3) { unittest.expect( - o.service!, + o.description!, + unittest.equals('foo'), + ); + unittest.expect( + o.expression!, + unittest.equals('foo'), + ); + unittest.expect( + o.location!, + unittest.equals('foo'), + ); + unittest.expect( + o.title!, unittest.equals('foo'), ); } - buildCounterGoogleIamV1AuditConfig--; + buildCounterExpr--; } -core.List buildUnnamed16() => [ +core.List buildUnnamed26() => [ 'foo', 'foo', ]; -void checkUnnamed16(core.List o) { +void checkUnnamed26(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -947,450 +1155,384 @@ void checkUnnamed16(core.List o) { ); } -core.int buildCounterGoogleIamV1AuditLogConfig = 0; -api.GoogleIamV1AuditLogConfig buildGoogleIamV1AuditLogConfig() { - final o = api.GoogleIamV1AuditLogConfig(); - buildCounterGoogleIamV1AuditLogConfig++; - if (buildCounterGoogleIamV1AuditLogConfig < 3) { - o.exemptedMembers = buildUnnamed16(); - o.logType = 'foo'; - } - buildCounterGoogleIamV1AuditLogConfig--; - return o; -} +core.List buildUnnamed27() => [ + buildFirewallEndpointAssociationReference(), + buildFirewallEndpointAssociationReference(), + ]; -void checkGoogleIamV1AuditLogConfig(api.GoogleIamV1AuditLogConfig o) { - buildCounterGoogleIamV1AuditLogConfig++; - if (buildCounterGoogleIamV1AuditLogConfig < 3) { - checkUnnamed16(o.exemptedMembers!); - unittest.expect( - o.logType!, - unittest.equals('foo'), - ); - } - buildCounterGoogleIamV1AuditLogConfig--; +void checkUnnamed27(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkFirewallEndpointAssociationReference(o[0]); + checkFirewallEndpointAssociationReference(o[1]); } -core.List buildUnnamed17() => [ - 'foo', - 'foo', - ]; +core.Map buildUnnamed28() => { + 'x': 'foo', + 'y': 'foo', + }; -void checkUnnamed17(core.List o) { +void checkUnnamed28(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( - o[0], + o['x']!, unittest.equals('foo'), ); unittest.expect( - o[1], + o['y']!, unittest.equals('foo'), ); } -core.int buildCounterGoogleIamV1Binding = 0; -api.GoogleIamV1Binding buildGoogleIamV1Binding() { - final o = api.GoogleIamV1Binding(); - buildCounterGoogleIamV1Binding++; - if (buildCounterGoogleIamV1Binding < 3) { - o.condition = buildExpr(); - o.members = buildUnnamed17(); - o.role = 'foo'; - } - buildCounterGoogleIamV1Binding--; - return o; -} - -void checkGoogleIamV1Binding(api.GoogleIamV1Binding o) { - buildCounterGoogleIamV1Binding++; - if (buildCounterGoogleIamV1Binding < 3) { - checkExpr(o.condition!); - checkUnnamed17(o.members!); - unittest.expect( - o.role!, - unittest.equals('foo'), - ); - } - buildCounterGoogleIamV1Binding--; -} - -core.List buildUnnamed18() => [ - buildGoogleIamV1AuditConfig(), - buildGoogleIamV1AuditConfig(), - ]; - -void checkUnnamed18(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkGoogleIamV1AuditConfig(o[0]); - checkGoogleIamV1AuditConfig(o[1]); -} - -core.List buildUnnamed19() => [ - buildGoogleIamV1Binding(), - buildGoogleIamV1Binding(), - ]; - -void checkUnnamed19(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkGoogleIamV1Binding(o[0]); - checkGoogleIamV1Binding(o[1]); -} - -core.int buildCounterGoogleIamV1Policy = 0; -api.GoogleIamV1Policy buildGoogleIamV1Policy() { - final o = api.GoogleIamV1Policy(); - buildCounterGoogleIamV1Policy++; - if (buildCounterGoogleIamV1Policy < 3) { - o.auditConfigs = buildUnnamed18(); - o.bindings = buildUnnamed19(); - o.etag = 'foo'; - o.version = 42; +core.int buildCounterFirewallEndpoint = 0; +api.FirewallEndpoint buildFirewallEndpoint() { + final o = api.FirewallEndpoint(); + buildCounterFirewallEndpoint++; + if (buildCounterFirewallEndpoint < 3) { + o.associatedNetworks = buildUnnamed26(); + o.associations = buildUnnamed27(); + o.billingProjectId = 'foo'; + o.createTime = 'foo'; + o.description = 'foo'; + o.labels = buildUnnamed28(); + o.name = 'foo'; + o.reconciling = true; + o.state = 'foo'; + o.updateTime = 'foo'; } - buildCounterGoogleIamV1Policy--; + buildCounterFirewallEndpoint--; return o; } -void checkGoogleIamV1Policy(api.GoogleIamV1Policy o) { - buildCounterGoogleIamV1Policy++; - if (buildCounterGoogleIamV1Policy < 3) { - checkUnnamed18(o.auditConfigs!); - checkUnnamed19(o.bindings!); +void checkFirewallEndpoint(api.FirewallEndpoint o) { + buildCounterFirewallEndpoint++; + if (buildCounterFirewallEndpoint < 3) { + checkUnnamed26(o.associatedNetworks!); + checkUnnamed27(o.associations!); unittest.expect( - o.etag!, + o.billingProjectId!, unittest.equals('foo'), ); unittest.expect( - o.version!, - unittest.equals(42), + o.createTime!, + unittest.equals('foo'), ); - } - buildCounterGoogleIamV1Policy--; -} - -core.int buildCounterGoogleIamV1SetIamPolicyRequest = 0; -api.GoogleIamV1SetIamPolicyRequest buildGoogleIamV1SetIamPolicyRequest() { - final o = api.GoogleIamV1SetIamPolicyRequest(); - buildCounterGoogleIamV1SetIamPolicyRequest++; - if (buildCounterGoogleIamV1SetIamPolicyRequest < 3) { - o.policy = buildGoogleIamV1Policy(); - o.updateMask = 'foo'; - } - buildCounterGoogleIamV1SetIamPolicyRequest--; - return o; -} - -void checkGoogleIamV1SetIamPolicyRequest(api.GoogleIamV1SetIamPolicyRequest o) { - buildCounterGoogleIamV1SetIamPolicyRequest++; - if (buildCounterGoogleIamV1SetIamPolicyRequest < 3) { - checkGoogleIamV1Policy(o.policy!); unittest.expect( - o.updateMask!, + o.description!, + unittest.equals('foo'), + ); + checkUnnamed28(o.labels!); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect(o.reconciling!, unittest.isTrue); + unittest.expect( + o.state!, + unittest.equals('foo'), + ); + unittest.expect( + o.updateTime!, unittest.equals('foo'), ); } - buildCounterGoogleIamV1SetIamPolicyRequest--; + buildCounterFirewallEndpoint--; } -core.List buildUnnamed20() => [ - 'foo', - 'foo', - ]; +core.Map buildUnnamed29() => { + 'x': 'foo', + 'y': 'foo', + }; -void checkUnnamed20(core.List o) { +void checkUnnamed29(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( - o[0], + o['x']!, unittest.equals('foo'), ); unittest.expect( - o[1], + o['y']!, unittest.equals('foo'), ); } -core.int buildCounterGoogleIamV1TestIamPermissionsRequest = 0; -api.GoogleIamV1TestIamPermissionsRequest - buildGoogleIamV1TestIamPermissionsRequest() { - final o = api.GoogleIamV1TestIamPermissionsRequest(); - buildCounterGoogleIamV1TestIamPermissionsRequest++; - if (buildCounterGoogleIamV1TestIamPermissionsRequest < 3) { - o.permissions = buildUnnamed20(); +core.int buildCounterFirewallEndpointAssociation = 0; +api.FirewallEndpointAssociation buildFirewallEndpointAssociation() { + final o = api.FirewallEndpointAssociation(); + buildCounterFirewallEndpointAssociation++; + if (buildCounterFirewallEndpointAssociation < 3) { + o.createTime = 'foo'; + o.disabled = true; + o.firewallEndpoint = 'foo'; + o.labels = buildUnnamed29(); + o.name = 'foo'; + o.network = 'foo'; + o.reconciling = true; + o.state = 'foo'; + o.tlsInspectionPolicy = 'foo'; + o.updateTime = 'foo'; } - buildCounterGoogleIamV1TestIamPermissionsRequest--; + buildCounterFirewallEndpointAssociation--; return o; } -void checkGoogleIamV1TestIamPermissionsRequest( - api.GoogleIamV1TestIamPermissionsRequest o) { - buildCounterGoogleIamV1TestIamPermissionsRequest++; - if (buildCounterGoogleIamV1TestIamPermissionsRequest < 3) { - checkUnnamed20(o.permissions!); +void checkFirewallEndpointAssociation(api.FirewallEndpointAssociation o) { + buildCounterFirewallEndpointAssociation++; + if (buildCounterFirewallEndpointAssociation < 3) { + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + unittest.expect(o.disabled!, unittest.isTrue); + unittest.expect( + o.firewallEndpoint!, + unittest.equals('foo'), + ); + checkUnnamed29(o.labels!); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.network!, + unittest.equals('foo'), + ); + unittest.expect(o.reconciling!, unittest.isTrue); + unittest.expect( + o.state!, + unittest.equals('foo'), + ); + unittest.expect( + o.tlsInspectionPolicy!, + unittest.equals('foo'), + ); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), + ); } - buildCounterGoogleIamV1TestIamPermissionsRequest--; -} - -core.List buildUnnamed21() => [ - 'foo', - 'foo', - ]; - -void checkUnnamed21(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o[0], - unittest.equals('foo'), - ); - unittest.expect( - o[1], - unittest.equals('foo'), - ); + buildCounterFirewallEndpointAssociation--; } -core.int buildCounterGoogleIamV1TestIamPermissionsResponse = 0; -api.GoogleIamV1TestIamPermissionsResponse - buildGoogleIamV1TestIamPermissionsResponse() { - final o = api.GoogleIamV1TestIamPermissionsResponse(); - buildCounterGoogleIamV1TestIamPermissionsResponse++; - if (buildCounterGoogleIamV1TestIamPermissionsResponse < 3) { - o.permissions = buildUnnamed21(); +core.int buildCounterFirewallEndpointAssociationReference = 0; +api.FirewallEndpointAssociationReference + buildFirewallEndpointAssociationReference() { + final o = api.FirewallEndpointAssociationReference(); + buildCounterFirewallEndpointAssociationReference++; + if (buildCounterFirewallEndpointAssociationReference < 3) { + o.name = 'foo'; + o.network = 'foo'; } - buildCounterGoogleIamV1TestIamPermissionsResponse--; + buildCounterFirewallEndpointAssociationReference--; return o; } -void checkGoogleIamV1TestIamPermissionsResponse( - api.GoogleIamV1TestIamPermissionsResponse o) { - buildCounterGoogleIamV1TestIamPermissionsResponse++; - if (buildCounterGoogleIamV1TestIamPermissionsResponse < 3) { - checkUnnamed21(o.permissions!); +void checkFirewallEndpointAssociationReference( + api.FirewallEndpointAssociationReference o) { + buildCounterFirewallEndpointAssociationReference++; + if (buildCounterFirewallEndpointAssociationReference < 3) { + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.network!, + unittest.equals('foo'), + ); } - buildCounterGoogleIamV1TestIamPermissionsResponse--; + buildCounterFirewallEndpointAssociationReference--; } -core.int buildCounterHttpHeaderMatch = 0; -api.HttpHeaderMatch buildHttpHeaderMatch() { - final o = api.HttpHeaderMatch(); - buildCounterHttpHeaderMatch++; - if (buildCounterHttpHeaderMatch < 3) { - o.headerName = 'foo'; - o.regexMatch = 'foo'; +core.int buildCounterGatewaySecurityPolicy = 0; +api.GatewaySecurityPolicy buildGatewaySecurityPolicy() { + final o = api.GatewaySecurityPolicy(); + buildCounterGatewaySecurityPolicy++; + if (buildCounterGatewaySecurityPolicy < 3) { + o.createTime = 'foo'; + o.description = 'foo'; + o.name = 'foo'; + o.tlsInspectionPolicy = 'foo'; + o.updateTime = 'foo'; } - buildCounterHttpHeaderMatch--; + buildCounterGatewaySecurityPolicy--; return o; } -void checkHttpHeaderMatch(api.HttpHeaderMatch o) { - buildCounterHttpHeaderMatch++; - if (buildCounterHttpHeaderMatch < 3) { +void checkGatewaySecurityPolicy(api.GatewaySecurityPolicy o) { + buildCounterGatewaySecurityPolicy++; + if (buildCounterGatewaySecurityPolicy < 3) { unittest.expect( - o.headerName!, + o.createTime!, unittest.equals('foo'), ); unittest.expect( - o.regexMatch!, + o.description!, unittest.equals('foo'), ); - } - buildCounterHttpHeaderMatch--; -} - -core.List - buildUnnamed22() => [ - buildListAddressGroupReferencesResponseAddressGroupReference(), - buildListAddressGroupReferencesResponseAddressGroupReference(), - ]; - -void checkUnnamed22( - core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkListAddressGroupReferencesResponseAddressGroupReference(o[0]); - checkListAddressGroupReferencesResponseAddressGroupReference(o[1]); -} - -core.int buildCounterListAddressGroupReferencesResponse = 0; -api.ListAddressGroupReferencesResponse - buildListAddressGroupReferencesResponse() { - final o = api.ListAddressGroupReferencesResponse(); - buildCounterListAddressGroupReferencesResponse++; - if (buildCounterListAddressGroupReferencesResponse < 3) { - o.addressGroupReferences = buildUnnamed22(); - o.nextPageToken = 'foo'; - } - buildCounterListAddressGroupReferencesResponse--; - return o; -} - -void checkListAddressGroupReferencesResponse( - api.ListAddressGroupReferencesResponse o) { - buildCounterListAddressGroupReferencesResponse++; - if (buildCounterListAddressGroupReferencesResponse < 3) { - checkUnnamed22(o.addressGroupReferences!); unittest.expect( - o.nextPageToken!, + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.tlsInspectionPolicy!, + unittest.equals('foo'), + ); + unittest.expect( + o.updateTime!, unittest.equals('foo'), ); } - buildCounterListAddressGroupReferencesResponse--; + buildCounterGatewaySecurityPolicy--; } -core.int buildCounterListAddressGroupReferencesResponseAddressGroupReference = - 0; -api.ListAddressGroupReferencesResponseAddressGroupReference - buildListAddressGroupReferencesResponseAddressGroupReference() { - final o = api.ListAddressGroupReferencesResponseAddressGroupReference(); - buildCounterListAddressGroupReferencesResponseAddressGroupReference++; - if (buildCounterListAddressGroupReferencesResponseAddressGroupReference < 3) { - o.firewallPolicy = 'foo'; - o.rulePriority = 42; - o.securityPolicy = 'foo'; +core.int buildCounterGatewaySecurityPolicyRule = 0; +api.GatewaySecurityPolicyRule buildGatewaySecurityPolicyRule() { + final o = api.GatewaySecurityPolicyRule(); + buildCounterGatewaySecurityPolicyRule++; + if (buildCounterGatewaySecurityPolicyRule < 3) { + o.applicationMatcher = 'foo'; + o.basicProfile = 'foo'; + o.createTime = 'foo'; + o.description = 'foo'; + o.enabled = true; + o.name = 'foo'; + o.priority = 42; + o.sessionMatcher = 'foo'; + o.tlsInspectionEnabled = true; + o.updateTime = 'foo'; } - buildCounterListAddressGroupReferencesResponseAddressGroupReference--; + buildCounterGatewaySecurityPolicyRule--; return o; } -void checkListAddressGroupReferencesResponseAddressGroupReference( - api.ListAddressGroupReferencesResponseAddressGroupReference o) { - buildCounterListAddressGroupReferencesResponseAddressGroupReference++; - if (buildCounterListAddressGroupReferencesResponseAddressGroupReference < 3) { +void checkGatewaySecurityPolicyRule(api.GatewaySecurityPolicyRule o) { + buildCounterGatewaySecurityPolicyRule++; + if (buildCounterGatewaySecurityPolicyRule < 3) { unittest.expect( - o.firewallPolicy!, + o.applicationMatcher!, unittest.equals('foo'), ); unittest.expect( - o.rulePriority!, + o.basicProfile!, + unittest.equals('foo'), + ); + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + unittest.expect(o.enabled!, unittest.isTrue); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.priority!, unittest.equals(42), ); unittest.expect( - o.securityPolicy!, + o.sessionMatcher!, + unittest.equals('foo'), + ); + unittest.expect(o.tlsInspectionEnabled!, unittest.isTrue); + unittest.expect( + o.updateTime!, unittest.equals('foo'), ); } - buildCounterListAddressGroupReferencesResponseAddressGroupReference--; -} - -core.List buildUnnamed23() => [ - buildAddressGroup(), - buildAddressGroup(), - ]; - -void checkUnnamed23(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkAddressGroup(o[0]); - checkAddressGroup(o[1]); + buildCounterGatewaySecurityPolicyRule--; } -core.int buildCounterListAddressGroupsResponse = 0; -api.ListAddressGroupsResponse buildListAddressGroupsResponse() { - final o = api.ListAddressGroupsResponse(); - buildCounterListAddressGroupsResponse++; - if (buildCounterListAddressGroupsResponse < 3) { - o.addressGroups = buildUnnamed23(); - o.nextPageToken = 'foo'; +core.int buildCounterGoogleCloudNetworksecurityV1CertificateProvider = 0; +api.GoogleCloudNetworksecurityV1CertificateProvider + buildGoogleCloudNetworksecurityV1CertificateProvider() { + final o = api.GoogleCloudNetworksecurityV1CertificateProvider(); + buildCounterGoogleCloudNetworksecurityV1CertificateProvider++; + if (buildCounterGoogleCloudNetworksecurityV1CertificateProvider < 3) { + o.certificateProviderInstance = buildCertificateProviderInstance(); + o.grpcEndpoint = buildGoogleCloudNetworksecurityV1GrpcEndpoint(); } - buildCounterListAddressGroupsResponse--; + buildCounterGoogleCloudNetworksecurityV1CertificateProvider--; return o; } -void checkListAddressGroupsResponse(api.ListAddressGroupsResponse o) { - buildCounterListAddressGroupsResponse++; - if (buildCounterListAddressGroupsResponse < 3) { - checkUnnamed23(o.addressGroups!); - unittest.expect( - o.nextPageToken!, - unittest.equals('foo'), - ); +void checkGoogleCloudNetworksecurityV1CertificateProvider( + api.GoogleCloudNetworksecurityV1CertificateProvider o) { + buildCounterGoogleCloudNetworksecurityV1CertificateProvider++; + if (buildCounterGoogleCloudNetworksecurityV1CertificateProvider < 3) { + checkCertificateProviderInstance(o.certificateProviderInstance!); + checkGoogleCloudNetworksecurityV1GrpcEndpoint(o.grpcEndpoint!); } - buildCounterListAddressGroupsResponse--; -} - -core.List buildUnnamed24() => [ - buildAuthorizationPolicy(), - buildAuthorizationPolicy(), - ]; - -void checkUnnamed24(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkAuthorizationPolicy(o[0]); - checkAuthorizationPolicy(o[1]); + buildCounterGoogleCloudNetworksecurityV1CertificateProvider--; } -core.int buildCounterListAuthorizationPoliciesResponse = 0; -api.ListAuthorizationPoliciesResponse buildListAuthorizationPoliciesResponse() { - final o = api.ListAuthorizationPoliciesResponse(); - buildCounterListAuthorizationPoliciesResponse++; - if (buildCounterListAuthorizationPoliciesResponse < 3) { - o.authorizationPolicies = buildUnnamed24(); - o.nextPageToken = 'foo'; +core.int buildCounterGoogleCloudNetworksecurityV1GrpcEndpoint = 0; +api.GoogleCloudNetworksecurityV1GrpcEndpoint + buildGoogleCloudNetworksecurityV1GrpcEndpoint() { + final o = api.GoogleCloudNetworksecurityV1GrpcEndpoint(); + buildCounterGoogleCloudNetworksecurityV1GrpcEndpoint++; + if (buildCounterGoogleCloudNetworksecurityV1GrpcEndpoint < 3) { + o.targetUri = 'foo'; } - buildCounterListAuthorizationPoliciesResponse--; + buildCounterGoogleCloudNetworksecurityV1GrpcEndpoint--; return o; } -void checkListAuthorizationPoliciesResponse( - api.ListAuthorizationPoliciesResponse o) { - buildCounterListAuthorizationPoliciesResponse++; - if (buildCounterListAuthorizationPoliciesResponse < 3) { - checkUnnamed24(o.authorizationPolicies!); +void checkGoogleCloudNetworksecurityV1GrpcEndpoint( + api.GoogleCloudNetworksecurityV1GrpcEndpoint o) { + buildCounterGoogleCloudNetworksecurityV1GrpcEndpoint++; + if (buildCounterGoogleCloudNetworksecurityV1GrpcEndpoint < 3) { unittest.expect( - o.nextPageToken!, + o.targetUri!, unittest.equals('foo'), ); } - buildCounterListAuthorizationPoliciesResponse--; + buildCounterGoogleCloudNetworksecurityV1GrpcEndpoint--; } -core.List buildUnnamed25() => [ - buildClientTlsPolicy(), - buildClientTlsPolicy(), +core.List buildUnnamed30() => [ + buildGoogleIamV1AuditLogConfig(), + buildGoogleIamV1AuditLogConfig(), ]; -void checkUnnamed25(core.List o) { +void checkUnnamed30(core.List o) { unittest.expect(o, unittest.hasLength(2)); - checkClientTlsPolicy(o[0]); - checkClientTlsPolicy(o[1]); + checkGoogleIamV1AuditLogConfig(o[0]); + checkGoogleIamV1AuditLogConfig(o[1]); } -core.int buildCounterListClientTlsPoliciesResponse = 0; -api.ListClientTlsPoliciesResponse buildListClientTlsPoliciesResponse() { - final o = api.ListClientTlsPoliciesResponse(); - buildCounterListClientTlsPoliciesResponse++; - if (buildCounterListClientTlsPoliciesResponse < 3) { - o.clientTlsPolicies = buildUnnamed25(); - o.nextPageToken = 'foo'; +core.int buildCounterGoogleIamV1AuditConfig = 0; +api.GoogleIamV1AuditConfig buildGoogleIamV1AuditConfig() { + final o = api.GoogleIamV1AuditConfig(); + buildCounterGoogleIamV1AuditConfig++; + if (buildCounterGoogleIamV1AuditConfig < 3) { + o.auditLogConfigs = buildUnnamed30(); + o.service = 'foo'; } - buildCounterListClientTlsPoliciesResponse--; + buildCounterGoogleIamV1AuditConfig--; return o; } -void checkListClientTlsPoliciesResponse(api.ListClientTlsPoliciesResponse o) { - buildCounterListClientTlsPoliciesResponse++; - if (buildCounterListClientTlsPoliciesResponse < 3) { - checkUnnamed25(o.clientTlsPolicies!); +void checkGoogleIamV1AuditConfig(api.GoogleIamV1AuditConfig o) { + buildCounterGoogleIamV1AuditConfig++; + if (buildCounterGoogleIamV1AuditConfig < 3) { + checkUnnamed30(o.auditLogConfigs!); unittest.expect( - o.nextPageToken!, + o.service!, unittest.equals('foo'), ); } - buildCounterListClientTlsPoliciesResponse--; -} - -core.List buildUnnamed26() => [ - buildFirewallEndpointAssociation(), - buildFirewallEndpointAssociation(), - ]; - -void checkUnnamed26(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkFirewallEndpointAssociation(o[0]); - checkFirewallEndpointAssociation(o[1]); + buildCounterGoogleIamV1AuditConfig--; } -core.List buildUnnamed27() => [ +core.List buildUnnamed31() => [ 'foo', 'foo', ]; -void checkUnnamed27(core.List o) { +void checkUnnamed31(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1402,51 +1544,36 @@ void checkUnnamed27(core.List o) { ); } -core.int buildCounterListFirewallEndpointAssociationsResponse = 0; -api.ListFirewallEndpointAssociationsResponse - buildListFirewallEndpointAssociationsResponse() { - final o = api.ListFirewallEndpointAssociationsResponse(); - buildCounterListFirewallEndpointAssociationsResponse++; - if (buildCounterListFirewallEndpointAssociationsResponse < 3) { - o.firewallEndpointAssociations = buildUnnamed26(); - o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed27(); +core.int buildCounterGoogleIamV1AuditLogConfig = 0; +api.GoogleIamV1AuditLogConfig buildGoogleIamV1AuditLogConfig() { + final o = api.GoogleIamV1AuditLogConfig(); + buildCounterGoogleIamV1AuditLogConfig++; + if (buildCounterGoogleIamV1AuditLogConfig < 3) { + o.exemptedMembers = buildUnnamed31(); + o.logType = 'foo'; } - buildCounterListFirewallEndpointAssociationsResponse--; + buildCounterGoogleIamV1AuditLogConfig--; return o; } -void checkListFirewallEndpointAssociationsResponse( - api.ListFirewallEndpointAssociationsResponse o) { - buildCounterListFirewallEndpointAssociationsResponse++; - if (buildCounterListFirewallEndpointAssociationsResponse < 3) { - checkUnnamed26(o.firewallEndpointAssociations!); +void checkGoogleIamV1AuditLogConfig(api.GoogleIamV1AuditLogConfig o) { + buildCounterGoogleIamV1AuditLogConfig++; + if (buildCounterGoogleIamV1AuditLogConfig < 3) { + checkUnnamed31(o.exemptedMembers!); unittest.expect( - o.nextPageToken!, + o.logType!, unittest.equals('foo'), ); - checkUnnamed27(o.unreachable!); } - buildCounterListFirewallEndpointAssociationsResponse--; -} - -core.List buildUnnamed28() => [ - buildFirewallEndpoint(), - buildFirewallEndpoint(), - ]; - -void checkUnnamed28(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkFirewallEndpoint(o[0]); - checkFirewallEndpoint(o[1]); + buildCounterGoogleIamV1AuditLogConfig--; } -core.List buildUnnamed29() => [ +core.List buildUnnamed32() => [ 'foo', 'foo', ]; -void checkUnnamed29(core.List o) { +void checkUnnamed32(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1458,105 +1585,115 @@ void checkUnnamed29(core.List o) { ); } -core.int buildCounterListFirewallEndpointsResponse = 0; -api.ListFirewallEndpointsResponse buildListFirewallEndpointsResponse() { - final o = api.ListFirewallEndpointsResponse(); - buildCounterListFirewallEndpointsResponse++; - if (buildCounterListFirewallEndpointsResponse < 3) { - o.firewallEndpoints = buildUnnamed28(); - o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed29(); +core.int buildCounterGoogleIamV1Binding = 0; +api.GoogleIamV1Binding buildGoogleIamV1Binding() { + final o = api.GoogleIamV1Binding(); + buildCounterGoogleIamV1Binding++; + if (buildCounterGoogleIamV1Binding < 3) { + o.condition = buildExpr(); + o.members = buildUnnamed32(); + o.role = 'foo'; } - buildCounterListFirewallEndpointsResponse--; + buildCounterGoogleIamV1Binding--; return o; } -void checkListFirewallEndpointsResponse(api.ListFirewallEndpointsResponse o) { - buildCounterListFirewallEndpointsResponse++; - if (buildCounterListFirewallEndpointsResponse < 3) { - checkUnnamed28(o.firewallEndpoints!); +void checkGoogleIamV1Binding(api.GoogleIamV1Binding o) { + buildCounterGoogleIamV1Binding++; + if (buildCounterGoogleIamV1Binding < 3) { + checkExpr(o.condition!); + checkUnnamed32(o.members!); unittest.expect( - o.nextPageToken!, + o.role!, unittest.equals('foo'), ); - checkUnnamed29(o.unreachable!); } - buildCounterListFirewallEndpointsResponse--; + buildCounterGoogleIamV1Binding--; } -core.List buildUnnamed30() => [ - buildGatewaySecurityPolicy(), - buildGatewaySecurityPolicy(), +core.List buildUnnamed33() => [ + buildGoogleIamV1AuditConfig(), + buildGoogleIamV1AuditConfig(), ]; -void checkUnnamed30(core.List o) { +void checkUnnamed33(core.List o) { unittest.expect(o, unittest.hasLength(2)); - checkGatewaySecurityPolicy(o[0]); - checkGatewaySecurityPolicy(o[1]); + checkGoogleIamV1AuditConfig(o[0]); + checkGoogleIamV1AuditConfig(o[1]); } -core.List buildUnnamed31() => [ - 'foo', - 'foo', +core.List buildUnnamed34() => [ + buildGoogleIamV1Binding(), + buildGoogleIamV1Binding(), ]; -void checkUnnamed31(core.List o) { +void checkUnnamed34(core.List o) { unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o[0], - unittest.equals('foo'), - ); - unittest.expect( - o[1], - unittest.equals('foo'), - ); + checkGoogleIamV1Binding(o[0]); + checkGoogleIamV1Binding(o[1]); } -core.int buildCounterListGatewaySecurityPoliciesResponse = 0; -api.ListGatewaySecurityPoliciesResponse - buildListGatewaySecurityPoliciesResponse() { - final o = api.ListGatewaySecurityPoliciesResponse(); - buildCounterListGatewaySecurityPoliciesResponse++; - if (buildCounterListGatewaySecurityPoliciesResponse < 3) { - o.gatewaySecurityPolicies = buildUnnamed30(); - o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed31(); +core.int buildCounterGoogleIamV1Policy = 0; +api.GoogleIamV1Policy buildGoogleIamV1Policy() { + final o = api.GoogleIamV1Policy(); + buildCounterGoogleIamV1Policy++; + if (buildCounterGoogleIamV1Policy < 3) { + o.auditConfigs = buildUnnamed33(); + o.bindings = buildUnnamed34(); + o.etag = 'foo'; + o.version = 42; } - buildCounterListGatewaySecurityPoliciesResponse--; + buildCounterGoogleIamV1Policy--; return o; } -void checkListGatewaySecurityPoliciesResponse( - api.ListGatewaySecurityPoliciesResponse o) { - buildCounterListGatewaySecurityPoliciesResponse++; - if (buildCounterListGatewaySecurityPoliciesResponse < 3) { - checkUnnamed30(o.gatewaySecurityPolicies!); +void checkGoogleIamV1Policy(api.GoogleIamV1Policy o) { + buildCounterGoogleIamV1Policy++; + if (buildCounterGoogleIamV1Policy < 3) { + checkUnnamed33(o.auditConfigs!); + checkUnnamed34(o.bindings!); unittest.expect( - o.nextPageToken!, + o.etag!, unittest.equals('foo'), ); - checkUnnamed31(o.unreachable!); + unittest.expect( + o.version!, + unittest.equals(42), + ); } - buildCounterListGatewaySecurityPoliciesResponse--; + buildCounterGoogleIamV1Policy--; } -core.List buildUnnamed32() => [ - buildGatewaySecurityPolicyRule(), - buildGatewaySecurityPolicyRule(), - ]; +core.int buildCounterGoogleIamV1SetIamPolicyRequest = 0; +api.GoogleIamV1SetIamPolicyRequest buildGoogleIamV1SetIamPolicyRequest() { + final o = api.GoogleIamV1SetIamPolicyRequest(); + buildCounterGoogleIamV1SetIamPolicyRequest++; + if (buildCounterGoogleIamV1SetIamPolicyRequest < 3) { + o.policy = buildGoogleIamV1Policy(); + o.updateMask = 'foo'; + } + buildCounterGoogleIamV1SetIamPolicyRequest--; + return o; +} -void checkUnnamed32(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkGatewaySecurityPolicyRule(o[0]); - checkGatewaySecurityPolicyRule(o[1]); +void checkGoogleIamV1SetIamPolicyRequest(api.GoogleIamV1SetIamPolicyRequest o) { + buildCounterGoogleIamV1SetIamPolicyRequest++; + if (buildCounterGoogleIamV1SetIamPolicyRequest < 3) { + checkGoogleIamV1Policy(o.policy!); + unittest.expect( + o.updateMask!, + unittest.equals('foo'), + ); + } + buildCounterGoogleIamV1SetIamPolicyRequest--; } -core.List buildUnnamed33() => [ +core.List buildUnnamed35() => [ 'foo', 'foo', ]; -void checkUnnamed33(core.List o) { +void checkUnnamed35(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1568,263 +1705,246 @@ void checkUnnamed33(core.List o) { ); } -core.int buildCounterListGatewaySecurityPolicyRulesResponse = 0; -api.ListGatewaySecurityPolicyRulesResponse - buildListGatewaySecurityPolicyRulesResponse() { - final o = api.ListGatewaySecurityPolicyRulesResponse(); - buildCounterListGatewaySecurityPolicyRulesResponse++; - if (buildCounterListGatewaySecurityPolicyRulesResponse < 3) { - o.gatewaySecurityPolicyRules = buildUnnamed32(); - o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed33(); +core.int buildCounterGoogleIamV1TestIamPermissionsRequest = 0; +api.GoogleIamV1TestIamPermissionsRequest + buildGoogleIamV1TestIamPermissionsRequest() { + final o = api.GoogleIamV1TestIamPermissionsRequest(); + buildCounterGoogleIamV1TestIamPermissionsRequest++; + if (buildCounterGoogleIamV1TestIamPermissionsRequest < 3) { + o.permissions = buildUnnamed35(); } - buildCounterListGatewaySecurityPolicyRulesResponse--; + buildCounterGoogleIamV1TestIamPermissionsRequest--; return o; } -void checkListGatewaySecurityPolicyRulesResponse( - api.ListGatewaySecurityPolicyRulesResponse o) { - buildCounterListGatewaySecurityPolicyRulesResponse++; - if (buildCounterListGatewaySecurityPolicyRulesResponse < 3) { - checkUnnamed32(o.gatewaySecurityPolicyRules!); - unittest.expect( - o.nextPageToken!, - unittest.equals('foo'), - ); - checkUnnamed33(o.unreachable!); +void checkGoogleIamV1TestIamPermissionsRequest( + api.GoogleIamV1TestIamPermissionsRequest o) { + buildCounterGoogleIamV1TestIamPermissionsRequest++; + if (buildCounterGoogleIamV1TestIamPermissionsRequest < 3) { + checkUnnamed35(o.permissions!); } - buildCounterListGatewaySecurityPolicyRulesResponse--; + buildCounterGoogleIamV1TestIamPermissionsRequest--; } -core.List buildUnnamed34() => [ - buildLocation(), - buildLocation(), +core.List buildUnnamed36() => [ + 'foo', + 'foo', ]; -void checkUnnamed34(core.List o) { +void checkUnnamed36(core.List o) { unittest.expect(o, unittest.hasLength(2)); - checkLocation(o[0]); - checkLocation(o[1]); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); } -core.int buildCounterListLocationsResponse = 0; -api.ListLocationsResponse buildListLocationsResponse() { - final o = api.ListLocationsResponse(); - buildCounterListLocationsResponse++; - if (buildCounterListLocationsResponse < 3) { - o.locations = buildUnnamed34(); - o.nextPageToken = 'foo'; +core.int buildCounterGoogleIamV1TestIamPermissionsResponse = 0; +api.GoogleIamV1TestIamPermissionsResponse + buildGoogleIamV1TestIamPermissionsResponse() { + final o = api.GoogleIamV1TestIamPermissionsResponse(); + buildCounterGoogleIamV1TestIamPermissionsResponse++; + if (buildCounterGoogleIamV1TestIamPermissionsResponse < 3) { + o.permissions = buildUnnamed36(); } - buildCounterListLocationsResponse--; + buildCounterGoogleIamV1TestIamPermissionsResponse--; return o; } -void checkListLocationsResponse(api.ListLocationsResponse o) { - buildCounterListLocationsResponse++; - if (buildCounterListLocationsResponse < 3) { - checkUnnamed34(o.locations!); - unittest.expect( - o.nextPageToken!, - unittest.equals('foo'), - ); +void checkGoogleIamV1TestIamPermissionsResponse( + api.GoogleIamV1TestIamPermissionsResponse o) { + buildCounterGoogleIamV1TestIamPermissionsResponse++; + if (buildCounterGoogleIamV1TestIamPermissionsResponse < 3) { + checkUnnamed36(o.permissions!); } - buildCounterListLocationsResponse--; -} - -core.List buildUnnamed35() => [ - buildOperation(), - buildOperation(), - ]; - -void checkUnnamed35(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkOperation(o[0]); - checkOperation(o[1]); + buildCounterGoogleIamV1TestIamPermissionsResponse--; } -core.int buildCounterListOperationsResponse = 0; -api.ListOperationsResponse buildListOperationsResponse() { - final o = api.ListOperationsResponse(); - buildCounterListOperationsResponse++; - if (buildCounterListOperationsResponse < 3) { - o.nextPageToken = 'foo'; - o.operations = buildUnnamed35(); +core.int buildCounterHttpHeaderMatch = 0; +api.HttpHeaderMatch buildHttpHeaderMatch() { + final o = api.HttpHeaderMatch(); + buildCounterHttpHeaderMatch++; + if (buildCounterHttpHeaderMatch < 3) { + o.headerName = 'foo'; + o.regexMatch = 'foo'; } - buildCounterListOperationsResponse--; + buildCounterHttpHeaderMatch--; return o; } -void checkListOperationsResponse(api.ListOperationsResponse o) { - buildCounterListOperationsResponse++; - if (buildCounterListOperationsResponse < 3) { +void checkHttpHeaderMatch(api.HttpHeaderMatch o) { + buildCounterHttpHeaderMatch++; + if (buildCounterHttpHeaderMatch < 3) { unittest.expect( - o.nextPageToken!, + o.headerName!, + unittest.equals('foo'), + ); + unittest.expect( + o.regexMatch!, unittest.equals('foo'), ); - checkUnnamed35(o.operations!); } - buildCounterListOperationsResponse--; + buildCounterHttpHeaderMatch--; } -core.List buildUnnamed36() => [ - buildSecurityProfileGroup(), - buildSecurityProfileGroup(), - ]; +core.List + buildUnnamed37() => [ + buildListAddressGroupReferencesResponseAddressGroupReference(), + buildListAddressGroupReferencesResponseAddressGroupReference(), + ]; -void checkUnnamed36(core.List o) { +void checkUnnamed37( + core.List o) { unittest.expect(o, unittest.hasLength(2)); - checkSecurityProfileGroup(o[0]); - checkSecurityProfileGroup(o[1]); + checkListAddressGroupReferencesResponseAddressGroupReference(o[0]); + checkListAddressGroupReferencesResponseAddressGroupReference(o[1]); } -core.int buildCounterListSecurityProfileGroupsResponse = 0; -api.ListSecurityProfileGroupsResponse buildListSecurityProfileGroupsResponse() { - final o = api.ListSecurityProfileGroupsResponse(); - buildCounterListSecurityProfileGroupsResponse++; - if (buildCounterListSecurityProfileGroupsResponse < 3) { +core.int buildCounterListAddressGroupReferencesResponse = 0; +api.ListAddressGroupReferencesResponse + buildListAddressGroupReferencesResponse() { + final o = api.ListAddressGroupReferencesResponse(); + buildCounterListAddressGroupReferencesResponse++; + if (buildCounterListAddressGroupReferencesResponse < 3) { + o.addressGroupReferences = buildUnnamed37(); o.nextPageToken = 'foo'; - o.securityProfileGroups = buildUnnamed36(); } - buildCounterListSecurityProfileGroupsResponse--; + buildCounterListAddressGroupReferencesResponse--; return o; } -void checkListSecurityProfileGroupsResponse( - api.ListSecurityProfileGroupsResponse o) { - buildCounterListSecurityProfileGroupsResponse++; - if (buildCounterListSecurityProfileGroupsResponse < 3) { +void checkListAddressGroupReferencesResponse( + api.ListAddressGroupReferencesResponse o) { + buildCounterListAddressGroupReferencesResponse++; + if (buildCounterListAddressGroupReferencesResponse < 3) { + checkUnnamed37(o.addressGroupReferences!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed36(o.securityProfileGroups!); } - buildCounterListSecurityProfileGroupsResponse--; -} - -core.List buildUnnamed37() => [ - buildSecurityProfile(), - buildSecurityProfile(), - ]; - -void checkUnnamed37(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkSecurityProfile(o[0]); - checkSecurityProfile(o[1]); + buildCounterListAddressGroupReferencesResponse--; } -core.int buildCounterListSecurityProfilesResponse = 0; -api.ListSecurityProfilesResponse buildListSecurityProfilesResponse() { - final o = api.ListSecurityProfilesResponse(); - buildCounterListSecurityProfilesResponse++; - if (buildCounterListSecurityProfilesResponse < 3) { - o.nextPageToken = 'foo'; - o.securityProfiles = buildUnnamed37(); +core.int buildCounterListAddressGroupReferencesResponseAddressGroupReference = + 0; +api.ListAddressGroupReferencesResponseAddressGroupReference + buildListAddressGroupReferencesResponseAddressGroupReference() { + final o = api.ListAddressGroupReferencesResponseAddressGroupReference(); + buildCounterListAddressGroupReferencesResponseAddressGroupReference++; + if (buildCounterListAddressGroupReferencesResponseAddressGroupReference < 3) { + o.firewallPolicy = 'foo'; + o.rulePriority = 42; + o.securityPolicy = 'foo'; } - buildCounterListSecurityProfilesResponse--; + buildCounterListAddressGroupReferencesResponseAddressGroupReference--; return o; } -void checkListSecurityProfilesResponse(api.ListSecurityProfilesResponse o) { - buildCounterListSecurityProfilesResponse++; - if (buildCounterListSecurityProfilesResponse < 3) { +void checkListAddressGroupReferencesResponseAddressGroupReference( + api.ListAddressGroupReferencesResponseAddressGroupReference o) { + buildCounterListAddressGroupReferencesResponseAddressGroupReference++; + if (buildCounterListAddressGroupReferencesResponseAddressGroupReference < 3) { unittest.expect( - o.nextPageToken!, + o.firewallPolicy!, + unittest.equals('foo'), + ); + unittest.expect( + o.rulePriority!, + unittest.equals(42), + ); + unittest.expect( + o.securityPolicy!, unittest.equals('foo'), ); - checkUnnamed37(o.securityProfiles!); } - buildCounterListSecurityProfilesResponse--; + buildCounterListAddressGroupReferencesResponseAddressGroupReference--; } -core.List buildUnnamed38() => [ - buildServerTlsPolicy(), - buildServerTlsPolicy(), +core.List buildUnnamed38() => [ + buildAddressGroup(), + buildAddressGroup(), ]; -void checkUnnamed38(core.List o) { +void checkUnnamed38(core.List o) { unittest.expect(o, unittest.hasLength(2)); - checkServerTlsPolicy(o[0]); - checkServerTlsPolicy(o[1]); + checkAddressGroup(o[0]); + checkAddressGroup(o[1]); } -core.int buildCounterListServerTlsPoliciesResponse = 0; -api.ListServerTlsPoliciesResponse buildListServerTlsPoliciesResponse() { - final o = api.ListServerTlsPoliciesResponse(); - buildCounterListServerTlsPoliciesResponse++; - if (buildCounterListServerTlsPoliciesResponse < 3) { +core.int buildCounterListAddressGroupsResponse = 0; +api.ListAddressGroupsResponse buildListAddressGroupsResponse() { + final o = api.ListAddressGroupsResponse(); + buildCounterListAddressGroupsResponse++; + if (buildCounterListAddressGroupsResponse < 3) { + o.addressGroups = buildUnnamed38(); o.nextPageToken = 'foo'; - o.serverTlsPolicies = buildUnnamed38(); } - buildCounterListServerTlsPoliciesResponse--; + buildCounterListAddressGroupsResponse--; return o; } -void checkListServerTlsPoliciesResponse(api.ListServerTlsPoliciesResponse o) { - buildCounterListServerTlsPoliciesResponse++; - if (buildCounterListServerTlsPoliciesResponse < 3) { +void checkListAddressGroupsResponse(api.ListAddressGroupsResponse o) { + buildCounterListAddressGroupsResponse++; + if (buildCounterListAddressGroupsResponse < 3) { + checkUnnamed38(o.addressGroups!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed38(o.serverTlsPolicies!); } - buildCounterListServerTlsPoliciesResponse--; -} - -core.List buildUnnamed39() => [ - buildTlsInspectionPolicy(), - buildTlsInspectionPolicy(), - ]; - -void checkUnnamed39(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkTlsInspectionPolicy(o[0]); - checkTlsInspectionPolicy(o[1]); + buildCounterListAddressGroupsResponse--; } -core.List buildUnnamed40() => [ - 'foo', - 'foo', +core.List buildUnnamed39() => [ + buildAuthorizationPolicy(), + buildAuthorizationPolicy(), ]; -void checkUnnamed40(core.List o) { +void checkUnnamed39(core.List o) { unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o[0], - unittest.equals('foo'), - ); - unittest.expect( - o[1], - unittest.equals('foo'), - ); + checkAuthorizationPolicy(o[0]); + checkAuthorizationPolicy(o[1]); } -core.int buildCounterListTlsInspectionPoliciesResponse = 0; -api.ListTlsInspectionPoliciesResponse buildListTlsInspectionPoliciesResponse() { - final o = api.ListTlsInspectionPoliciesResponse(); - buildCounterListTlsInspectionPoliciesResponse++; - if (buildCounterListTlsInspectionPoliciesResponse < 3) { +core.int buildCounterListAuthorizationPoliciesResponse = 0; +api.ListAuthorizationPoliciesResponse buildListAuthorizationPoliciesResponse() { + final o = api.ListAuthorizationPoliciesResponse(); + buildCounterListAuthorizationPoliciesResponse++; + if (buildCounterListAuthorizationPoliciesResponse < 3) { + o.authorizationPolicies = buildUnnamed39(); o.nextPageToken = 'foo'; - o.tlsInspectionPolicies = buildUnnamed39(); - o.unreachable = buildUnnamed40(); } - buildCounterListTlsInspectionPoliciesResponse--; + buildCounterListAuthorizationPoliciesResponse--; return o; } -void checkListTlsInspectionPoliciesResponse( - api.ListTlsInspectionPoliciesResponse o) { - buildCounterListTlsInspectionPoliciesResponse++; - if (buildCounterListTlsInspectionPoliciesResponse < 3) { +void checkListAuthorizationPoliciesResponse( + api.ListAuthorizationPoliciesResponse o) { + buildCounterListAuthorizationPoliciesResponse++; + if (buildCounterListAuthorizationPoliciesResponse < 3) { + checkUnnamed39(o.authorizationPolicies!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed39(o.tlsInspectionPolicies!); - checkUnnamed40(o.unreachable!); } - buildCounterListTlsInspectionPoliciesResponse--; + buildCounterListAuthorizationPoliciesResponse--; +} + +core.List buildUnnamed40() => [ + buildAuthzPolicy(), + buildAuthzPolicy(), + ]; + +void checkUnnamed40(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkAuthzPolicy(o[0]); + checkAuthzPolicy(o[1]); } core.List buildUnnamed41() => [ @@ -1844,307 +1964,250 @@ void checkUnnamed41(core.List o) { ); } -core.List buildUnnamed42() => [ - buildUrlList(), - buildUrlList(), +core.int buildCounterListAuthzPoliciesResponse = 0; +api.ListAuthzPoliciesResponse buildListAuthzPoliciesResponse() { + final o = api.ListAuthzPoliciesResponse(); + buildCounterListAuthzPoliciesResponse++; + if (buildCounterListAuthzPoliciesResponse < 3) { + o.authzPolicies = buildUnnamed40(); + o.nextPageToken = 'foo'; + o.unreachable = buildUnnamed41(); + } + buildCounterListAuthzPoliciesResponse--; + return o; +} + +void checkListAuthzPoliciesResponse(api.ListAuthzPoliciesResponse o) { + buildCounterListAuthzPoliciesResponse++; + if (buildCounterListAuthzPoliciesResponse < 3) { + checkUnnamed40(o.authzPolicies!); + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed41(o.unreachable!); + } + buildCounterListAuthzPoliciesResponse--; +} + +core.List buildUnnamed42() => [ + buildClientTlsPolicy(), + buildClientTlsPolicy(), ]; -void checkUnnamed42(core.List o) { +void checkUnnamed42(core.List o) { unittest.expect(o, unittest.hasLength(2)); - checkUrlList(o[0]); - checkUrlList(o[1]); + checkClientTlsPolicy(o[0]); + checkClientTlsPolicy(o[1]); } -core.int buildCounterListUrlListsResponse = 0; -api.ListUrlListsResponse buildListUrlListsResponse() { - final o = api.ListUrlListsResponse(); - buildCounterListUrlListsResponse++; - if (buildCounterListUrlListsResponse < 3) { +core.int buildCounterListClientTlsPoliciesResponse = 0; +api.ListClientTlsPoliciesResponse buildListClientTlsPoliciesResponse() { + final o = api.ListClientTlsPoliciesResponse(); + buildCounterListClientTlsPoliciesResponse++; + if (buildCounterListClientTlsPoliciesResponse < 3) { + o.clientTlsPolicies = buildUnnamed42(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed41(); - o.urlLists = buildUnnamed42(); } - buildCounterListUrlListsResponse--; + buildCounterListClientTlsPoliciesResponse--; return o; } -void checkListUrlListsResponse(api.ListUrlListsResponse o) { - buildCounterListUrlListsResponse++; - if (buildCounterListUrlListsResponse < 3) { +void checkListClientTlsPoliciesResponse(api.ListClientTlsPoliciesResponse o) { + buildCounterListClientTlsPoliciesResponse++; + if (buildCounterListClientTlsPoliciesResponse < 3) { + checkUnnamed42(o.clientTlsPolicies!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed41(o.unreachable!); - checkUnnamed42(o.urlLists!); } - buildCounterListUrlListsResponse--; + buildCounterListClientTlsPoliciesResponse--; } -core.Map buildUnnamed43() => { - 'x': 'foo', - 'y': 'foo', - }; +core.List buildUnnamed43() => [ + buildFirewallEndpointAssociation(), + buildFirewallEndpointAssociation(), + ]; -void checkUnnamed43(core.Map o) { +void checkUnnamed43(core.List o) { unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o['x']!, - unittest.equals('foo'), - ); - unittest.expect( - o['y']!, - unittest.equals('foo'), - ); + checkFirewallEndpointAssociation(o[0]); + checkFirewallEndpointAssociation(o[1]); } -core.Map buildUnnamed44() => { - 'x': { - 'list': [1, 2, 3], - 'bool': true, - 'string': 'foo' - }, - 'y': { - 'list': [1, 2, 3], - 'bool': true, - 'string': 'foo' - }, - }; +core.List buildUnnamed44() => [ + 'foo', + 'foo', + ]; -void checkUnnamed44(core.Map o) { +void checkUnnamed44(core.List o) { unittest.expect(o, unittest.hasLength(2)); - var casted1 = (o['x']!) as core.Map; - unittest.expect(casted1, unittest.hasLength(3)); - unittest.expect( - casted1['list'], - unittest.equals([1, 2, 3]), - ); - unittest.expect( - casted1['bool'], - unittest.equals(true), - ); unittest.expect( - casted1['string'], + o[0], unittest.equals('foo'), ); - var casted2 = (o['y']!) as core.Map; - unittest.expect(casted2, unittest.hasLength(3)); - unittest.expect( - casted2['list'], - unittest.equals([1, 2, 3]), - ); - unittest.expect( - casted2['bool'], - unittest.equals(true), - ); unittest.expect( - casted2['string'], + o[1], unittest.equals('foo'), ); } -core.int buildCounterLocation = 0; -api.Location buildLocation() { - final o = api.Location(); - buildCounterLocation++; - if (buildCounterLocation < 3) { - o.displayName = 'foo'; - o.labels = buildUnnamed43(); - o.locationId = 'foo'; - o.metadata = buildUnnamed44(); - o.name = 'foo'; +core.int buildCounterListFirewallEndpointAssociationsResponse = 0; +api.ListFirewallEndpointAssociationsResponse + buildListFirewallEndpointAssociationsResponse() { + final o = api.ListFirewallEndpointAssociationsResponse(); + buildCounterListFirewallEndpointAssociationsResponse++; + if (buildCounterListFirewallEndpointAssociationsResponse < 3) { + o.firewallEndpointAssociations = buildUnnamed43(); + o.nextPageToken = 'foo'; + o.unreachable = buildUnnamed44(); } - buildCounterLocation--; + buildCounterListFirewallEndpointAssociationsResponse--; return o; } -void checkLocation(api.Location o) { - buildCounterLocation++; - if (buildCounterLocation < 3) { - unittest.expect( - o.displayName!, - unittest.equals('foo'), - ); - checkUnnamed43(o.labels!); - unittest.expect( - o.locationId!, - unittest.equals('foo'), - ); - checkUnnamed44(o.metadata!); +void checkListFirewallEndpointAssociationsResponse( + api.ListFirewallEndpointAssociationsResponse o) { + buildCounterListFirewallEndpointAssociationsResponse++; + if (buildCounterListFirewallEndpointAssociationsResponse < 3) { + checkUnnamed43(o.firewallEndpointAssociations!); unittest.expect( - o.name!, + o.nextPageToken!, unittest.equals('foo'), ); + checkUnnamed44(o.unreachable!); } - buildCounterLocation--; + buildCounterListFirewallEndpointAssociationsResponse--; } -core.List buildUnnamed45() => [ - buildValidationCA(), - buildValidationCA(), +core.List buildUnnamed45() => [ + buildFirewallEndpoint(), + buildFirewallEndpoint(), ]; -void checkUnnamed45(core.List o) { +void checkUnnamed45(core.List o) { unittest.expect(o, unittest.hasLength(2)); - checkValidationCA(o[0]); - checkValidationCA(o[1]); -} + checkFirewallEndpoint(o[0]); + checkFirewallEndpoint(o[1]); +} -core.int buildCounterMTLSPolicy = 0; -api.MTLSPolicy buildMTLSPolicy() { - final o = api.MTLSPolicy(); - buildCounterMTLSPolicy++; - if (buildCounterMTLSPolicy < 3) { - o.clientValidationCa = buildUnnamed45(); - o.clientValidationMode = 'foo'; - o.clientValidationTrustConfig = 'foo'; +core.List buildUnnamed46() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed46(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterListFirewallEndpointsResponse = 0; +api.ListFirewallEndpointsResponse buildListFirewallEndpointsResponse() { + final o = api.ListFirewallEndpointsResponse(); + buildCounterListFirewallEndpointsResponse++; + if (buildCounterListFirewallEndpointsResponse < 3) { + o.firewallEndpoints = buildUnnamed45(); + o.nextPageToken = 'foo'; + o.unreachable = buildUnnamed46(); } - buildCounterMTLSPolicy--; + buildCounterListFirewallEndpointsResponse--; return o; } -void checkMTLSPolicy(api.MTLSPolicy o) { - buildCounterMTLSPolicy++; - if (buildCounterMTLSPolicy < 3) { - checkUnnamed45(o.clientValidationCa!); - unittest.expect( - o.clientValidationMode!, - unittest.equals('foo'), - ); +void checkListFirewallEndpointsResponse(api.ListFirewallEndpointsResponse o) { + buildCounterListFirewallEndpointsResponse++; + if (buildCounterListFirewallEndpointsResponse < 3) { + checkUnnamed45(o.firewallEndpoints!); unittest.expect( - o.clientValidationTrustConfig!, + o.nextPageToken!, unittest.equals('foo'), ); + checkUnnamed46(o.unreachable!); } - buildCounterMTLSPolicy--; + buildCounterListFirewallEndpointsResponse--; } -core.Map buildUnnamed46() => { - 'x': { - 'list': [1, 2, 3], - 'bool': true, - 'string': 'foo' - }, - 'y': { - 'list': [1, 2, 3], - 'bool': true, - 'string': 'foo' - }, - }; +core.List buildUnnamed47() => [ + buildGatewaySecurityPolicy(), + buildGatewaySecurityPolicy(), + ]; -void checkUnnamed46(core.Map o) { +void checkUnnamed47(core.List o) { unittest.expect(o, unittest.hasLength(2)); - var casted3 = (o['x']!) as core.Map; - unittest.expect(casted3, unittest.hasLength(3)); - unittest.expect( - casted3['list'], - unittest.equals([1, 2, 3]), - ); - unittest.expect( - casted3['bool'], - unittest.equals(true), - ); - unittest.expect( - casted3['string'], - unittest.equals('foo'), - ); - var casted4 = (o['y']!) as core.Map; - unittest.expect(casted4, unittest.hasLength(3)); - unittest.expect( - casted4['list'], - unittest.equals([1, 2, 3]), - ); - unittest.expect( - casted4['bool'], - unittest.equals(true), - ); - unittest.expect( - casted4['string'], - unittest.equals('foo'), - ); + checkGatewaySecurityPolicy(o[0]); + checkGatewaySecurityPolicy(o[1]); } -core.Map buildUnnamed47() => { - 'x': { - 'list': [1, 2, 3], - 'bool': true, - 'string': 'foo' - }, - 'y': { - 'list': [1, 2, 3], - 'bool': true, - 'string': 'foo' - }, - }; +core.List buildUnnamed48() => [ + 'foo', + 'foo', + ]; -void checkUnnamed47(core.Map o) { +void checkUnnamed48(core.List o) { unittest.expect(o, unittest.hasLength(2)); - var casted5 = (o['x']!) as core.Map; - unittest.expect(casted5, unittest.hasLength(3)); - unittest.expect( - casted5['list'], - unittest.equals([1, 2, 3]), - ); - unittest.expect( - casted5['bool'], - unittest.equals(true), - ); unittest.expect( - casted5['string'], + o[0], unittest.equals('foo'), ); - var casted6 = (o['y']!) as core.Map; - unittest.expect(casted6, unittest.hasLength(3)); - unittest.expect( - casted6['list'], - unittest.equals([1, 2, 3]), - ); - unittest.expect( - casted6['bool'], - unittest.equals(true), - ); unittest.expect( - casted6['string'], + o[1], unittest.equals('foo'), ); } -core.int buildCounterOperation = 0; -api.Operation buildOperation() { - final o = api.Operation(); - buildCounterOperation++; - if (buildCounterOperation < 3) { - o.done = true; - o.error = buildStatus(); - o.metadata = buildUnnamed46(); - o.name = 'foo'; - o.response = buildUnnamed47(); +core.int buildCounterListGatewaySecurityPoliciesResponse = 0; +api.ListGatewaySecurityPoliciesResponse + buildListGatewaySecurityPoliciesResponse() { + final o = api.ListGatewaySecurityPoliciesResponse(); + buildCounterListGatewaySecurityPoliciesResponse++; + if (buildCounterListGatewaySecurityPoliciesResponse < 3) { + o.gatewaySecurityPolicies = buildUnnamed47(); + o.nextPageToken = 'foo'; + o.unreachable = buildUnnamed48(); } - buildCounterOperation--; + buildCounterListGatewaySecurityPoliciesResponse--; return o; } -void checkOperation(api.Operation o) { - buildCounterOperation++; - if (buildCounterOperation < 3) { - unittest.expect(o.done!, unittest.isTrue); - checkStatus(o.error!); - checkUnnamed46(o.metadata!); +void checkListGatewaySecurityPoliciesResponse( + api.ListGatewaySecurityPoliciesResponse o) { + buildCounterListGatewaySecurityPoliciesResponse++; + if (buildCounterListGatewaySecurityPoliciesResponse < 3) { + checkUnnamed47(o.gatewaySecurityPolicies!); unittest.expect( - o.name!, + o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed47(o.response!); + checkUnnamed48(o.unreachable!); } - buildCounterOperation--; + buildCounterListGatewaySecurityPoliciesResponse--; } -core.List buildUnnamed48() => [ +core.List buildUnnamed49() => [ + buildGatewaySecurityPolicyRule(), + buildGatewaySecurityPolicyRule(), + ]; + +void checkUnnamed49(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGatewaySecurityPolicyRule(o[0]); + checkGatewaySecurityPolicyRule(o[1]); +} + +core.List buildUnnamed50() => [ 'foo', 'foo', ]; -void checkUnnamed48(core.List o) { +void checkUnnamed50(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2156,310 +2219,227 @@ void checkUnnamed48(core.List o) { ); } -core.int buildCounterRemoveAddressGroupItemsRequest = 0; -api.RemoveAddressGroupItemsRequest buildRemoveAddressGroupItemsRequest() { - final o = api.RemoveAddressGroupItemsRequest(); - buildCounterRemoveAddressGroupItemsRequest++; - if (buildCounterRemoveAddressGroupItemsRequest < 3) { - o.items = buildUnnamed48(); - o.requestId = 'foo'; +core.int buildCounterListGatewaySecurityPolicyRulesResponse = 0; +api.ListGatewaySecurityPolicyRulesResponse + buildListGatewaySecurityPolicyRulesResponse() { + final o = api.ListGatewaySecurityPolicyRulesResponse(); + buildCounterListGatewaySecurityPolicyRulesResponse++; + if (buildCounterListGatewaySecurityPolicyRulesResponse < 3) { + o.gatewaySecurityPolicyRules = buildUnnamed49(); + o.nextPageToken = 'foo'; + o.unreachable = buildUnnamed50(); } - buildCounterRemoveAddressGroupItemsRequest--; + buildCounterListGatewaySecurityPolicyRulesResponse--; return o; } -void checkRemoveAddressGroupItemsRequest(api.RemoveAddressGroupItemsRequest o) { - buildCounterRemoveAddressGroupItemsRequest++; - if (buildCounterRemoveAddressGroupItemsRequest < 3) { - checkUnnamed48(o.items!); +void checkListGatewaySecurityPolicyRulesResponse( + api.ListGatewaySecurityPolicyRulesResponse o) { + buildCounterListGatewaySecurityPolicyRulesResponse++; + if (buildCounterListGatewaySecurityPolicyRulesResponse < 3) { + checkUnnamed49(o.gatewaySecurityPolicyRules!); unittest.expect( - o.requestId!, + o.nextPageToken!, unittest.equals('foo'), ); + checkUnnamed50(o.unreachable!); } - buildCounterRemoveAddressGroupItemsRequest--; -} - -core.List buildUnnamed49() => [ - buildDestination(), - buildDestination(), - ]; - -void checkUnnamed49(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkDestination(o[0]); - checkDestination(o[1]); + buildCounterListGatewaySecurityPolicyRulesResponse--; } -core.List buildUnnamed50() => [ - buildSource(), - buildSource(), +core.List buildUnnamed51() => [ + buildLocation(), + buildLocation(), ]; -void checkUnnamed50(core.List o) { +void checkUnnamed51(core.List o) { unittest.expect(o, unittest.hasLength(2)); - checkSource(o[0]); - checkSource(o[1]); + checkLocation(o[0]); + checkLocation(o[1]); } -core.int buildCounterRule = 0; -api.Rule buildRule() { - final o = api.Rule(); - buildCounterRule++; - if (buildCounterRule < 3) { - o.destinations = buildUnnamed49(); - o.sources = buildUnnamed50(); +core.int buildCounterListLocationsResponse = 0; +api.ListLocationsResponse buildListLocationsResponse() { + final o = api.ListLocationsResponse(); + buildCounterListLocationsResponse++; + if (buildCounterListLocationsResponse < 3) { + o.locations = buildUnnamed51(); + o.nextPageToken = 'foo'; } - buildCounterRule--; + buildCounterListLocationsResponse--; return o; } -void checkRule(api.Rule o) { - buildCounterRule++; - if (buildCounterRule < 3) { - checkUnnamed49(o.destinations!); - checkUnnamed50(o.sources!); +void checkListLocationsResponse(api.ListLocationsResponse o) { + buildCounterListLocationsResponse++; + if (buildCounterListLocationsResponse < 3) { + checkUnnamed51(o.locations!); + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); } - buildCounterRule--; + buildCounterListLocationsResponse--; } -core.Map buildUnnamed51() => { - 'x': 'foo', - 'y': 'foo', - }; +core.List buildUnnamed52() => [ + buildOperation(), + buildOperation(), + ]; -void checkUnnamed51(core.Map o) { +void checkUnnamed52(core.List o) { unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o['x']!, - unittest.equals('foo'), - ); - unittest.expect( - o['y']!, - unittest.equals('foo'), - ); + checkOperation(o[0]); + checkOperation(o[1]); } -core.int buildCounterSecurityProfile = 0; -api.SecurityProfile buildSecurityProfile() { - final o = api.SecurityProfile(); - buildCounterSecurityProfile++; - if (buildCounterSecurityProfile < 3) { - o.createTime = 'foo'; - o.customMirroringProfile = buildCustomMirroringProfile(); - o.description = 'foo'; - o.etag = 'foo'; - o.labels = buildUnnamed51(); - o.name = 'foo'; - o.threatPreventionProfile = buildThreatPreventionProfile(); - o.type = 'foo'; - o.updateTime = 'foo'; +core.int buildCounterListOperationsResponse = 0; +api.ListOperationsResponse buildListOperationsResponse() { + final o = api.ListOperationsResponse(); + buildCounterListOperationsResponse++; + if (buildCounterListOperationsResponse < 3) { + o.nextPageToken = 'foo'; + o.operations = buildUnnamed52(); } - buildCounterSecurityProfile--; + buildCounterListOperationsResponse--; return o; } -void checkSecurityProfile(api.SecurityProfile o) { - buildCounterSecurityProfile++; - if (buildCounterSecurityProfile < 3) { - unittest.expect( - o.createTime!, - unittest.equals('foo'), - ); - checkCustomMirroringProfile(o.customMirroringProfile!); - unittest.expect( - o.description!, - unittest.equals('foo'), - ); - unittest.expect( - o.etag!, - unittest.equals('foo'), - ); - checkUnnamed51(o.labels!); - unittest.expect( - o.name!, - unittest.equals('foo'), - ); - checkThreatPreventionProfile(o.threatPreventionProfile!); - unittest.expect( - o.type!, - unittest.equals('foo'), - ); +void checkListOperationsResponse(api.ListOperationsResponse o) { + buildCounterListOperationsResponse++; + if (buildCounterListOperationsResponse < 3) { unittest.expect( - o.updateTime!, + o.nextPageToken!, unittest.equals('foo'), ); + checkUnnamed52(o.operations!); } - buildCounterSecurityProfile--; + buildCounterListOperationsResponse--; } -core.Map buildUnnamed52() => { - 'x': 'foo', - 'y': 'foo', - }; +core.List buildUnnamed53() => [ + buildSecurityProfileGroup(), + buildSecurityProfileGroup(), + ]; -void checkUnnamed52(core.Map o) { +void checkUnnamed53(core.List o) { unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o['x']!, - unittest.equals('foo'), - ); - unittest.expect( - o['y']!, - unittest.equals('foo'), - ); + checkSecurityProfileGroup(o[0]); + checkSecurityProfileGroup(o[1]); } -core.int buildCounterSecurityProfileGroup = 0; -api.SecurityProfileGroup buildSecurityProfileGroup() { - final o = api.SecurityProfileGroup(); - buildCounterSecurityProfileGroup++; - if (buildCounterSecurityProfileGroup < 3) { - o.createTime = 'foo'; - o.customMirroringProfile = 'foo'; - o.description = 'foo'; - o.etag = 'foo'; - o.labels = buildUnnamed52(); - o.name = 'foo'; - o.threatPreventionProfile = 'foo'; - o.updateTime = 'foo'; +core.int buildCounterListSecurityProfileGroupsResponse = 0; +api.ListSecurityProfileGroupsResponse buildListSecurityProfileGroupsResponse() { + final o = api.ListSecurityProfileGroupsResponse(); + buildCounterListSecurityProfileGroupsResponse++; + if (buildCounterListSecurityProfileGroupsResponse < 3) { + o.nextPageToken = 'foo'; + o.securityProfileGroups = buildUnnamed53(); } - buildCounterSecurityProfileGroup--; + buildCounterListSecurityProfileGroupsResponse--; return o; } -void checkSecurityProfileGroup(api.SecurityProfileGroup o) { - buildCounterSecurityProfileGroup++; - if (buildCounterSecurityProfileGroup < 3) { - unittest.expect( - o.createTime!, - unittest.equals('foo'), - ); - unittest.expect( - o.customMirroringProfile!, - unittest.equals('foo'), - ); - unittest.expect( - o.description!, - unittest.equals('foo'), - ); - unittest.expect( - o.etag!, - unittest.equals('foo'), - ); - checkUnnamed52(o.labels!); - unittest.expect( - o.name!, - unittest.equals('foo'), - ); - unittest.expect( - o.threatPreventionProfile!, - unittest.equals('foo'), - ); +void checkListSecurityProfileGroupsResponse( + api.ListSecurityProfileGroupsResponse o) { + buildCounterListSecurityProfileGroupsResponse++; + if (buildCounterListSecurityProfileGroupsResponse < 3) { unittest.expect( - o.updateTime!, + o.nextPageToken!, unittest.equals('foo'), ); + checkUnnamed53(o.securityProfileGroups!); } - buildCounterSecurityProfileGroup--; + buildCounterListSecurityProfileGroupsResponse--; } -core.Map buildUnnamed53() => { - 'x': 'foo', - 'y': 'foo', - }; +core.List buildUnnamed54() => [ + buildSecurityProfile(), + buildSecurityProfile(), + ]; -void checkUnnamed53(core.Map o) { +void checkUnnamed54(core.List o) { unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o['x']!, - unittest.equals('foo'), - ); - unittest.expect( - o['y']!, - unittest.equals('foo'), - ); + checkSecurityProfile(o[0]); + checkSecurityProfile(o[1]); } -core.int buildCounterServerTlsPolicy = 0; -api.ServerTlsPolicy buildServerTlsPolicy() { - final o = api.ServerTlsPolicy(); - buildCounterServerTlsPolicy++; - if (buildCounterServerTlsPolicy < 3) { - o.allowOpen = true; - o.createTime = 'foo'; - o.description = 'foo'; - o.labels = buildUnnamed53(); - o.mtlsPolicy = buildMTLSPolicy(); - o.name = 'foo'; - o.serverCertificate = - buildGoogleCloudNetworksecurityV1CertificateProvider(); - o.updateTime = 'foo'; +core.int buildCounterListSecurityProfilesResponse = 0; +api.ListSecurityProfilesResponse buildListSecurityProfilesResponse() { + final o = api.ListSecurityProfilesResponse(); + buildCounterListSecurityProfilesResponse++; + if (buildCounterListSecurityProfilesResponse < 3) { + o.nextPageToken = 'foo'; + o.securityProfiles = buildUnnamed54(); } - buildCounterServerTlsPolicy--; + buildCounterListSecurityProfilesResponse--; return o; } -void checkServerTlsPolicy(api.ServerTlsPolicy o) { - buildCounterServerTlsPolicy++; - if (buildCounterServerTlsPolicy < 3) { - unittest.expect(o.allowOpen!, unittest.isTrue); - unittest.expect( - o.createTime!, - unittest.equals('foo'), - ); - unittest.expect( - o.description!, - unittest.equals('foo'), - ); - checkUnnamed53(o.labels!); - checkMTLSPolicy(o.mtlsPolicy!); - unittest.expect( - o.name!, - unittest.equals('foo'), - ); - checkGoogleCloudNetworksecurityV1CertificateProvider(o.serverCertificate!); +void checkListSecurityProfilesResponse(api.ListSecurityProfilesResponse o) { + buildCounterListSecurityProfilesResponse++; + if (buildCounterListSecurityProfilesResponse < 3) { unittest.expect( - o.updateTime!, + o.nextPageToken!, unittest.equals('foo'), ); + checkUnnamed54(o.securityProfiles!); } - buildCounterServerTlsPolicy--; + buildCounterListSecurityProfilesResponse--; } -core.int buildCounterSeverityOverride = 0; -api.SeverityOverride buildSeverityOverride() { - final o = api.SeverityOverride(); - buildCounterSeverityOverride++; - if (buildCounterSeverityOverride < 3) { - o.action = 'foo'; - o.severity = 'foo'; +core.List buildUnnamed55() => [ + buildServerTlsPolicy(), + buildServerTlsPolicy(), + ]; + +void checkUnnamed55(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkServerTlsPolicy(o[0]); + checkServerTlsPolicy(o[1]); +} + +core.int buildCounterListServerTlsPoliciesResponse = 0; +api.ListServerTlsPoliciesResponse buildListServerTlsPoliciesResponse() { + final o = api.ListServerTlsPoliciesResponse(); + buildCounterListServerTlsPoliciesResponse++; + if (buildCounterListServerTlsPoliciesResponse < 3) { + o.nextPageToken = 'foo'; + o.serverTlsPolicies = buildUnnamed55(); } - buildCounterSeverityOverride--; + buildCounterListServerTlsPoliciesResponse--; return o; } -void checkSeverityOverride(api.SeverityOverride o) { - buildCounterSeverityOverride++; - if (buildCounterSeverityOverride < 3) { - unittest.expect( - o.action!, - unittest.equals('foo'), - ); +void checkListServerTlsPoliciesResponse(api.ListServerTlsPoliciesResponse o) { + buildCounterListServerTlsPoliciesResponse++; + if (buildCounterListServerTlsPoliciesResponse < 3) { unittest.expect( - o.severity!, + o.nextPageToken!, unittest.equals('foo'), ); + checkUnnamed55(o.serverTlsPolicies!); } - buildCounterSeverityOverride--; + buildCounterListServerTlsPoliciesResponse--; } -core.List buildUnnamed54() => [ +core.List buildUnnamed56() => [ + buildTlsInspectionPolicy(), + buildTlsInspectionPolicy(), + ]; + +void checkUnnamed56(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkTlsInspectionPolicy(o[0]); + checkTlsInspectionPolicy(o[1]); +} + +core.List buildUnnamed57() => [ 'foo', 'foo', ]; -void checkUnnamed54(core.List o) { +void checkUnnamed57(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2471,12 +2451,39 @@ void checkUnnamed54(core.List o) { ); } -core.List buildUnnamed55() => [ +core.int buildCounterListTlsInspectionPoliciesResponse = 0; +api.ListTlsInspectionPoliciesResponse buildListTlsInspectionPoliciesResponse() { + final o = api.ListTlsInspectionPoliciesResponse(); + buildCounterListTlsInspectionPoliciesResponse++; + if (buildCounterListTlsInspectionPoliciesResponse < 3) { + o.nextPageToken = 'foo'; + o.tlsInspectionPolicies = buildUnnamed56(); + o.unreachable = buildUnnamed57(); + } + buildCounterListTlsInspectionPoliciesResponse--; + return o; +} + +void checkListTlsInspectionPoliciesResponse( + api.ListTlsInspectionPoliciesResponse o) { + buildCounterListTlsInspectionPoliciesResponse++; + if (buildCounterListTlsInspectionPoliciesResponse < 3) { + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed56(o.tlsInspectionPolicies!); + checkUnnamed57(o.unreachable!); + } + buildCounterListTlsInspectionPoliciesResponse--; +} + +core.List buildUnnamed58() => [ 'foo', 'foo', ]; -void checkUnnamed55(core.List o) { +void checkUnnamed58(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2488,28 +2495,61 @@ void checkUnnamed55(core.List o) { ); } -core.int buildCounterSource = 0; -api.Source buildSource() { - final o = api.Source(); - buildCounterSource++; - if (buildCounterSource < 3) { - o.ipBlocks = buildUnnamed54(); - o.principals = buildUnnamed55(); +core.List buildUnnamed59() => [ + buildUrlList(), + buildUrlList(), + ]; + +void checkUnnamed59(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkUrlList(o[0]); + checkUrlList(o[1]); +} + +core.int buildCounterListUrlListsResponse = 0; +api.ListUrlListsResponse buildListUrlListsResponse() { + final o = api.ListUrlListsResponse(); + buildCounterListUrlListsResponse++; + if (buildCounterListUrlListsResponse < 3) { + o.nextPageToken = 'foo'; + o.unreachable = buildUnnamed58(); + o.urlLists = buildUnnamed59(); } - buildCounterSource--; + buildCounterListUrlListsResponse--; return o; } -void checkSource(api.Source o) { - buildCounterSource++; - if (buildCounterSource < 3) { - checkUnnamed54(o.ipBlocks!); - checkUnnamed55(o.principals!); +void checkListUrlListsResponse(api.ListUrlListsResponse o) { + buildCounterListUrlListsResponse++; + if (buildCounterListUrlListsResponse < 3) { + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed58(o.unreachable!); + checkUnnamed59(o.urlLists!); } - buildCounterSource--; + buildCounterListUrlListsResponse--; +} + +core.Map buildUnnamed60() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed60(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); } -core.Map buildUnnamed56() => { +core.Map buildUnnamed61() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -2522,221 +2562,380 @@ core.Map buildUnnamed56() => { }, }; -void checkUnnamed56(core.Map o) { +void checkUnnamed61(core.Map o) { unittest.expect(o, unittest.hasLength(2)); - var casted7 = (o['x']!) as core.Map; - unittest.expect(casted7, unittest.hasLength(3)); + var casted1 = (o['x']!) as core.Map; + unittest.expect(casted1, unittest.hasLength(3)); unittest.expect( - casted7['list'], + casted1['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted7['bool'], + casted1['bool'], unittest.equals(true), ); unittest.expect( - casted7['string'], + casted1['string'], unittest.equals('foo'), ); - var casted8 = (o['y']!) as core.Map; - unittest.expect(casted8, unittest.hasLength(3)); + var casted2 = (o['y']!) as core.Map; + unittest.expect(casted2, unittest.hasLength(3)); unittest.expect( - casted8['list'], + casted2['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted8['bool'], + casted2['bool'], unittest.equals(true), ); unittest.expect( - casted8['string'], + casted2['string'], unittest.equals('foo'), ); } -core.List> buildUnnamed57() => [ - buildUnnamed56(), - buildUnnamed56(), - ]; - -void checkUnnamed57(core.List> o) { - unittest.expect(o, unittest.hasLength(2)); - checkUnnamed56(o[0]); - checkUnnamed56(o[1]); -} - -core.int buildCounterStatus = 0; -api.Status buildStatus() { - final o = api.Status(); - buildCounterStatus++; - if (buildCounterStatus < 3) { - o.code = 42; - o.details = buildUnnamed57(); - o.message = 'foo'; - } - buildCounterStatus--; - return o; -} - -void checkStatus(api.Status o) { - buildCounterStatus++; - if (buildCounterStatus < 3) { - unittest.expect( - o.code!, - unittest.equals(42), - ); - checkUnnamed57(o.details!); - unittest.expect( - o.message!, - unittest.equals('foo'), - ); - } - buildCounterStatus--; -} - -core.int buildCounterThreatOverride = 0; -api.ThreatOverride buildThreatOverride() { - final o = api.ThreatOverride(); - buildCounterThreatOverride++; - if (buildCounterThreatOverride < 3) { - o.action = 'foo'; - o.threatId = 'foo'; - o.type = 'foo'; +core.int buildCounterLocation = 0; +api.Location buildLocation() { + final o = api.Location(); + buildCounterLocation++; + if (buildCounterLocation < 3) { + o.displayName = 'foo'; + o.labels = buildUnnamed60(); + o.locationId = 'foo'; + o.metadata = buildUnnamed61(); + o.name = 'foo'; } - buildCounterThreatOverride--; + buildCounterLocation--; return o; } -void checkThreatOverride(api.ThreatOverride o) { - buildCounterThreatOverride++; - if (buildCounterThreatOverride < 3) { +void checkLocation(api.Location o) { + buildCounterLocation++; + if (buildCounterLocation < 3) { unittest.expect( - o.action!, + o.displayName!, unittest.equals('foo'), ); + checkUnnamed60(o.labels!); unittest.expect( - o.threatId!, + o.locationId!, unittest.equals('foo'), ); + checkUnnamed61(o.metadata!); unittest.expect( - o.type!, + o.name!, unittest.equals('foo'), ); } - buildCounterThreatOverride--; -} - -core.List buildUnnamed58() => [ - buildSeverityOverride(), - buildSeverityOverride(), - ]; - -void checkUnnamed58(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - checkSeverityOverride(o[0]); - checkSeverityOverride(o[1]); + buildCounterLocation--; } -core.List buildUnnamed59() => [ - buildThreatOverride(), - buildThreatOverride(), +core.List buildUnnamed62() => [ + buildValidationCA(), + buildValidationCA(), ]; -void checkUnnamed59(core.List o) { +void checkUnnamed62(core.List o) { unittest.expect(o, unittest.hasLength(2)); - checkThreatOverride(o[0]); - checkThreatOverride(o[1]); + checkValidationCA(o[0]); + checkValidationCA(o[1]); } -core.int buildCounterThreatPreventionProfile = 0; -api.ThreatPreventionProfile buildThreatPreventionProfile() { - final o = api.ThreatPreventionProfile(); - buildCounterThreatPreventionProfile++; - if (buildCounterThreatPreventionProfile < 3) { - o.severityOverrides = buildUnnamed58(); - o.threatOverrides = buildUnnamed59(); +core.int buildCounterMTLSPolicy = 0; +api.MTLSPolicy buildMTLSPolicy() { + final o = api.MTLSPolicy(); + buildCounterMTLSPolicy++; + if (buildCounterMTLSPolicy < 3) { + o.clientValidationCa = buildUnnamed62(); + o.clientValidationMode = 'foo'; + o.clientValidationTrustConfig = 'foo'; } - buildCounterThreatPreventionProfile--; + buildCounterMTLSPolicy--; return o; } -void checkThreatPreventionProfile(api.ThreatPreventionProfile o) { - buildCounterThreatPreventionProfile++; - if (buildCounterThreatPreventionProfile < 3) { - checkUnnamed58(o.severityOverrides!); - checkUnnamed59(o.threatOverrides!); +void checkMTLSPolicy(api.MTLSPolicy o) { + buildCounterMTLSPolicy++; + if (buildCounterMTLSPolicy < 3) { + checkUnnamed62(o.clientValidationCa!); + unittest.expect( + o.clientValidationMode!, + unittest.equals('foo'), + ); + unittest.expect( + o.clientValidationTrustConfig!, + unittest.equals('foo'), + ); } - buildCounterThreatPreventionProfile--; + buildCounterMTLSPolicy--; } -core.List buildUnnamed60() => [ - 'foo', - 'foo', - ]; +core.Map buildUnnamed63() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; -void checkUnnamed60(core.List o) { +void checkUnnamed63(core.Map o) { unittest.expect(o, unittest.hasLength(2)); + var casted3 = (o['x']!) as core.Map; + unittest.expect(casted3, unittest.hasLength(3)); unittest.expect( - o[0], + casted3['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted3['bool'], + unittest.equals(true), + ); + unittest.expect( + casted3['string'], unittest.equals('foo'), ); + var casted4 = (o['y']!) as core.Map; + unittest.expect(casted4, unittest.hasLength(3)); + unittest.expect( + casted4['list'], + unittest.equals([1, 2, 3]), + ); unittest.expect( - o[1], + casted4['bool'], + unittest.equals(true), + ); + unittest.expect( + casted4['string'], unittest.equals('foo'), ); } -core.int buildCounterTlsInspectionPolicy = 0; -api.TlsInspectionPolicy buildTlsInspectionPolicy() { - final o = api.TlsInspectionPolicy(); - buildCounterTlsInspectionPolicy++; - if (buildCounterTlsInspectionPolicy < 3) { - o.caPool = 'foo'; - o.createTime = 'foo'; - o.customTlsFeatures = buildUnnamed60(); - o.description = 'foo'; - o.excludePublicCaSet = true; - o.minTlsVersion = 'foo'; - o.name = 'foo'; - o.tlsFeatureProfile = 'foo'; - o.trustConfig = 'foo'; - o.updateTime = 'foo'; - } - buildCounterTlsInspectionPolicy--; - return o; -} +core.Map buildUnnamed64() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; -void checkTlsInspectionPolicy(api.TlsInspectionPolicy o) { - buildCounterTlsInspectionPolicy++; - if (buildCounterTlsInspectionPolicy < 3) { - unittest.expect( - o.caPool!, - unittest.equals('foo'), - ); - unittest.expect( - o.createTime!, +void checkUnnamed64(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted5 = (o['x']!) as core.Map; + unittest.expect(casted5, unittest.hasLength(3)); + unittest.expect( + casted5['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted5['bool'], + unittest.equals(true), + ); + unittest.expect( + casted5['string'], + unittest.equals('foo'), + ); + var casted6 = (o['y']!) as core.Map; + unittest.expect(casted6, unittest.hasLength(3)); + unittest.expect( + casted6['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted6['bool'], + unittest.equals(true), + ); + unittest.expect( + casted6['string'], + unittest.equals('foo'), + ); +} + +core.int buildCounterOperation = 0; +api.Operation buildOperation() { + final o = api.Operation(); + buildCounterOperation++; + if (buildCounterOperation < 3) { + o.done = true; + o.error = buildStatus(); + o.metadata = buildUnnamed63(); + o.name = 'foo'; + o.response = buildUnnamed64(); + } + buildCounterOperation--; + return o; +} + +void checkOperation(api.Operation o) { + buildCounterOperation++; + if (buildCounterOperation < 3) { + unittest.expect(o.done!, unittest.isTrue); + checkStatus(o.error!); + checkUnnamed63(o.metadata!); + unittest.expect( + o.name!, unittest.equals('foo'), ); - checkUnnamed60(o.customTlsFeatures!); + checkUnnamed64(o.response!); + } + buildCounterOperation--; +} + +core.List buildUnnamed65() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed65(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterRemoveAddressGroupItemsRequest = 0; +api.RemoveAddressGroupItemsRequest buildRemoveAddressGroupItemsRequest() { + final o = api.RemoveAddressGroupItemsRequest(); + buildCounterRemoveAddressGroupItemsRequest++; + if (buildCounterRemoveAddressGroupItemsRequest < 3) { + o.items = buildUnnamed65(); + o.requestId = 'foo'; + } + buildCounterRemoveAddressGroupItemsRequest--; + return o; +} + +void checkRemoveAddressGroupItemsRequest(api.RemoveAddressGroupItemsRequest o) { + buildCounterRemoveAddressGroupItemsRequest++; + if (buildCounterRemoveAddressGroupItemsRequest < 3) { + checkUnnamed65(o.items!); unittest.expect( - o.description!, + o.requestId!, unittest.equals('foo'), ); - unittest.expect(o.excludePublicCaSet!, unittest.isTrue); + } + buildCounterRemoveAddressGroupItemsRequest--; +} + +core.List buildUnnamed66() => [ + buildDestination(), + buildDestination(), + ]; + +void checkUnnamed66(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkDestination(o[0]); + checkDestination(o[1]); +} + +core.List buildUnnamed67() => [ + buildSource(), + buildSource(), + ]; + +void checkUnnamed67(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkSource(o[0]); + checkSource(o[1]); +} + +core.int buildCounterRule = 0; +api.Rule buildRule() { + final o = api.Rule(); + buildCounterRule++; + if (buildCounterRule < 3) { + o.destinations = buildUnnamed66(); + o.sources = buildUnnamed67(); + } + buildCounterRule--; + return o; +} + +void checkRule(api.Rule o) { + buildCounterRule++; + if (buildCounterRule < 3) { + checkUnnamed66(o.destinations!); + checkUnnamed67(o.sources!); + } + buildCounterRule--; +} + +core.Map buildUnnamed68() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed68(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.int buildCounterSecurityProfile = 0; +api.SecurityProfile buildSecurityProfile() { + final o = api.SecurityProfile(); + buildCounterSecurityProfile++; + if (buildCounterSecurityProfile < 3) { + o.createTime = 'foo'; + o.customInterceptProfile = buildCustomInterceptProfile(); + o.customMirroringProfile = buildCustomMirroringProfile(); + o.description = 'foo'; + o.etag = 'foo'; + o.labels = buildUnnamed68(); + o.name = 'foo'; + o.threatPreventionProfile = buildThreatPreventionProfile(); + o.type = 'foo'; + o.updateTime = 'foo'; + } + buildCounterSecurityProfile--; + return o; +} + +void checkSecurityProfile(api.SecurityProfile o) { + buildCounterSecurityProfile++; + if (buildCounterSecurityProfile < 3) { unittest.expect( - o.minTlsVersion!, + o.createTime!, unittest.equals('foo'), ); + checkCustomInterceptProfile(o.customInterceptProfile!); + checkCustomMirroringProfile(o.customMirroringProfile!); unittest.expect( - o.name!, + o.description!, unittest.equals('foo'), ); unittest.expect( - o.tlsFeatureProfile!, + o.etag!, unittest.equals('foo'), ); + checkUnnamed68(o.labels!); unittest.expect( - o.trustConfig!, + o.name!, + unittest.equals('foo'), + ); + checkThreatPreventionProfile(o.threatPreventionProfile!); + unittest.expect( + o.type!, unittest.equals('foo'), ); unittest.expect( @@ -2744,84 +2943,543 @@ void checkTlsInspectionPolicy(api.TlsInspectionPolicy o) { unittest.equals('foo'), ); } - buildCounterTlsInspectionPolicy--; + buildCounterSecurityProfile--; } -core.List buildUnnamed61() => [ - 'foo', - 'foo', - ]; +core.Map buildUnnamed69() => { + 'x': 'foo', + 'y': 'foo', + }; -void checkUnnamed61(core.List o) { +void checkUnnamed69(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( - o[0], + o['x']!, unittest.equals('foo'), ); unittest.expect( - o[1], + o['y']!, unittest.equals('foo'), ); } -core.int buildCounterUrlList = 0; -api.UrlList buildUrlList() { - final o = api.UrlList(); - buildCounterUrlList++; - if (buildCounterUrlList < 3) { +core.int buildCounterSecurityProfileGroup = 0; +api.SecurityProfileGroup buildSecurityProfileGroup() { + final o = api.SecurityProfileGroup(); + buildCounterSecurityProfileGroup++; + if (buildCounterSecurityProfileGroup < 3) { o.createTime = 'foo'; + o.customInterceptProfile = 'foo'; + o.customMirroringProfile = 'foo'; o.description = 'foo'; + o.etag = 'foo'; + o.labels = buildUnnamed69(); o.name = 'foo'; + o.threatPreventionProfile = 'foo'; o.updateTime = 'foo'; - o.values = buildUnnamed61(); } - buildCounterUrlList--; + buildCounterSecurityProfileGroup--; return o; } -void checkUrlList(api.UrlList o) { - buildCounterUrlList++; - if (buildCounterUrlList < 3) { +void checkSecurityProfileGroup(api.SecurityProfileGroup o) { + buildCounterSecurityProfileGroup++; + if (buildCounterSecurityProfileGroup < 3) { unittest.expect( o.createTime!, unittest.equals('foo'), ); + unittest.expect( + o.customInterceptProfile!, + unittest.equals('foo'), + ); + unittest.expect( + o.customMirroringProfile!, + unittest.equals('foo'), + ); unittest.expect( o.description!, unittest.equals('foo'), ); + unittest.expect( + o.etag!, + unittest.equals('foo'), + ); + checkUnnamed69(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), ); + unittest.expect( + o.threatPreventionProfile!, + unittest.equals('foo'), + ); unittest.expect( o.updateTime!, unittest.equals('foo'), ); - checkUnnamed61(o.values!); } - buildCounterUrlList--; + buildCounterSecurityProfileGroup--; } -core.int buildCounterValidationCA = 0; -api.ValidationCA buildValidationCA() { - final o = api.ValidationCA(); - buildCounterValidationCA++; - if (buildCounterValidationCA < 3) { - o.certificateProviderInstance = buildCertificateProviderInstance(); - o.grpcEndpoint = buildGoogleCloudNetworksecurityV1GrpcEndpoint(); - } - buildCounterValidationCA--; - return o; -} +core.Map buildUnnamed70() => { + 'x': 'foo', + 'y': 'foo', + }; -void checkValidationCA(api.ValidationCA o) { - buildCounterValidationCA++; - if (buildCounterValidationCA < 3) { - checkCertificateProviderInstance(o.certificateProviderInstance!); - checkGoogleCloudNetworksecurityV1GrpcEndpoint(o.grpcEndpoint!); - } - buildCounterValidationCA--; +void checkUnnamed70(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.int buildCounterServerTlsPolicy = 0; +api.ServerTlsPolicy buildServerTlsPolicy() { + final o = api.ServerTlsPolicy(); + buildCounterServerTlsPolicy++; + if (buildCounterServerTlsPolicy < 3) { + o.allowOpen = true; + o.createTime = 'foo'; + o.description = 'foo'; + o.labels = buildUnnamed70(); + o.mtlsPolicy = buildMTLSPolicy(); + o.name = 'foo'; + o.serverCertificate = + buildGoogleCloudNetworksecurityV1CertificateProvider(); + o.updateTime = 'foo'; + } + buildCounterServerTlsPolicy--; + return o; +} + +void checkServerTlsPolicy(api.ServerTlsPolicy o) { + buildCounterServerTlsPolicy++; + if (buildCounterServerTlsPolicy < 3) { + unittest.expect(o.allowOpen!, unittest.isTrue); + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + checkUnnamed70(o.labels!); + checkMTLSPolicy(o.mtlsPolicy!); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + checkGoogleCloudNetworksecurityV1CertificateProvider(o.serverCertificate!); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), + ); + } + buildCounterServerTlsPolicy--; +} + +core.int buildCounterSeverityOverride = 0; +api.SeverityOverride buildSeverityOverride() { + final o = api.SeverityOverride(); + buildCounterSeverityOverride++; + if (buildCounterSeverityOverride < 3) { + o.action = 'foo'; + o.severity = 'foo'; + } + buildCounterSeverityOverride--; + return o; +} + +void checkSeverityOverride(api.SeverityOverride o) { + buildCounterSeverityOverride++; + if (buildCounterSeverityOverride < 3) { + unittest.expect( + o.action!, + unittest.equals('foo'), + ); + unittest.expect( + o.severity!, + unittest.equals('foo'), + ); + } + buildCounterSeverityOverride--; +} + +core.List buildUnnamed71() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed71(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.List buildUnnamed72() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed72(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterSource = 0; +api.Source buildSource() { + final o = api.Source(); + buildCounterSource++; + if (buildCounterSource < 3) { + o.ipBlocks = buildUnnamed71(); + o.principals = buildUnnamed72(); + } + buildCounterSource--; + return o; +} + +void checkSource(api.Source o) { + buildCounterSource++; + if (buildCounterSource < 3) { + checkUnnamed71(o.ipBlocks!); + checkUnnamed72(o.principals!); + } + buildCounterSource--; +} + +core.Map buildUnnamed73() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; + +void checkUnnamed73(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted7 = (o['x']!) as core.Map; + unittest.expect(casted7, unittest.hasLength(3)); + unittest.expect( + casted7['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted7['bool'], + unittest.equals(true), + ); + unittest.expect( + casted7['string'], + unittest.equals('foo'), + ); + var casted8 = (o['y']!) as core.Map; + unittest.expect(casted8, unittest.hasLength(3)); + unittest.expect( + casted8['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted8['bool'], + unittest.equals(true), + ); + unittest.expect( + casted8['string'], + unittest.equals('foo'), + ); +} + +core.List> buildUnnamed74() => [ + buildUnnamed73(), + buildUnnamed73(), + ]; + +void checkUnnamed74(core.List> o) { + unittest.expect(o, unittest.hasLength(2)); + checkUnnamed73(o[0]); + checkUnnamed73(o[1]); +} + +core.int buildCounterStatus = 0; +api.Status buildStatus() { + final o = api.Status(); + buildCounterStatus++; + if (buildCounterStatus < 3) { + o.code = 42; + o.details = buildUnnamed74(); + o.message = 'foo'; + } + buildCounterStatus--; + return o; +} + +void checkStatus(api.Status o) { + buildCounterStatus++; + if (buildCounterStatus < 3) { + unittest.expect( + o.code!, + unittest.equals(42), + ); + checkUnnamed74(o.details!); + unittest.expect( + o.message!, + unittest.equals('foo'), + ); + } + buildCounterStatus--; +} + +core.int buildCounterThreatOverride = 0; +api.ThreatOverride buildThreatOverride() { + final o = api.ThreatOverride(); + buildCounterThreatOverride++; + if (buildCounterThreatOverride < 3) { + o.action = 'foo'; + o.threatId = 'foo'; + o.type = 'foo'; + } + buildCounterThreatOverride--; + return o; +} + +void checkThreatOverride(api.ThreatOverride o) { + buildCounterThreatOverride++; + if (buildCounterThreatOverride < 3) { + unittest.expect( + o.action!, + unittest.equals('foo'), + ); + unittest.expect( + o.threatId!, + unittest.equals('foo'), + ); + unittest.expect( + o.type!, + unittest.equals('foo'), + ); + } + buildCounterThreatOverride--; +} + +core.List buildUnnamed75() => [ + buildSeverityOverride(), + buildSeverityOverride(), + ]; + +void checkUnnamed75(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkSeverityOverride(o[0]); + checkSeverityOverride(o[1]); +} + +core.List buildUnnamed76() => [ + buildThreatOverride(), + buildThreatOverride(), + ]; + +void checkUnnamed76(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkThreatOverride(o[0]); + checkThreatOverride(o[1]); +} + +core.int buildCounterThreatPreventionProfile = 0; +api.ThreatPreventionProfile buildThreatPreventionProfile() { + final o = api.ThreatPreventionProfile(); + buildCounterThreatPreventionProfile++; + if (buildCounterThreatPreventionProfile < 3) { + o.severityOverrides = buildUnnamed75(); + o.threatOverrides = buildUnnamed76(); + } + buildCounterThreatPreventionProfile--; + return o; +} + +void checkThreatPreventionProfile(api.ThreatPreventionProfile o) { + buildCounterThreatPreventionProfile++; + if (buildCounterThreatPreventionProfile < 3) { + checkUnnamed75(o.severityOverrides!); + checkUnnamed76(o.threatOverrides!); + } + buildCounterThreatPreventionProfile--; +} + +core.List buildUnnamed77() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed77(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterTlsInspectionPolicy = 0; +api.TlsInspectionPolicy buildTlsInspectionPolicy() { + final o = api.TlsInspectionPolicy(); + buildCounterTlsInspectionPolicy++; + if (buildCounterTlsInspectionPolicy < 3) { + o.caPool = 'foo'; + o.createTime = 'foo'; + o.customTlsFeatures = buildUnnamed77(); + o.description = 'foo'; + o.excludePublicCaSet = true; + o.minTlsVersion = 'foo'; + o.name = 'foo'; + o.tlsFeatureProfile = 'foo'; + o.trustConfig = 'foo'; + o.updateTime = 'foo'; + } + buildCounterTlsInspectionPolicy--; + return o; +} + +void checkTlsInspectionPolicy(api.TlsInspectionPolicy o) { + buildCounterTlsInspectionPolicy++; + if (buildCounterTlsInspectionPolicy < 3) { + unittest.expect( + o.caPool!, + unittest.equals('foo'), + ); + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + checkUnnamed77(o.customTlsFeatures!); + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + unittest.expect(o.excludePublicCaSet!, unittest.isTrue); + unittest.expect( + o.minTlsVersion!, + unittest.equals('foo'), + ); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.tlsFeatureProfile!, + unittest.equals('foo'), + ); + unittest.expect( + o.trustConfig!, + unittest.equals('foo'), + ); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), + ); + } + buildCounterTlsInspectionPolicy--; +} + +core.List buildUnnamed78() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed78(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterUrlList = 0; +api.UrlList buildUrlList() { + final o = api.UrlList(); + buildCounterUrlList++; + if (buildCounterUrlList < 3) { + o.createTime = 'foo'; + o.description = 'foo'; + o.name = 'foo'; + o.updateTime = 'foo'; + o.values = buildUnnamed78(); + } + buildCounterUrlList--; + return o; +} + +void checkUrlList(api.UrlList o) { + buildCounterUrlList++; + if (buildCounterUrlList < 3) { + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), + ); + checkUnnamed78(o.values!); + } + buildCounterUrlList--; +} + +core.int buildCounterValidationCA = 0; +api.ValidationCA buildValidationCA() { + final o = api.ValidationCA(); + buildCounterValidationCA++; + if (buildCounterValidationCA < 3) { + o.certificateProviderInstance = buildCertificateProviderInstance(); + o.grpcEndpoint = buildGoogleCloudNetworksecurityV1GrpcEndpoint(); + } + buildCounterValidationCA--; + return o; +} + +void checkValidationCA(api.ValidationCA o) { + buildCounterValidationCA++; + if (buildCounterValidationCA < 3) { + checkCertificateProviderInstance(o.certificateProviderInstance!); + checkGoogleCloudNetworksecurityV1GrpcEndpoint(o.grpcEndpoint!); + } + buildCounterValidationCA--; } void main() { @@ -2855,6 +3513,158 @@ void main() { }); }); + unittest.group('obj-schema-AuthzPolicy', () { + unittest.test('to-json--from-json', () async { + final o = buildAuthzPolicy(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.AuthzPolicy.fromJson( + oJson as core.Map); + checkAuthzPolicy(od); + }); + }); + + unittest.group('obj-schema-AuthzPolicyAuthzRule', () { + unittest.test('to-json--from-json', () async { + final o = buildAuthzPolicyAuthzRule(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.AuthzPolicyAuthzRule.fromJson( + oJson as core.Map); + checkAuthzPolicyAuthzRule(od); + }); + }); + + unittest.group('obj-schema-AuthzPolicyAuthzRuleFrom', () { + unittest.test('to-json--from-json', () async { + final o = buildAuthzPolicyAuthzRuleFrom(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.AuthzPolicyAuthzRuleFrom.fromJson( + oJson as core.Map); + checkAuthzPolicyAuthzRuleFrom(od); + }); + }); + + unittest.group('obj-schema-AuthzPolicyAuthzRuleFromRequestSource', () { + unittest.test('to-json--from-json', () async { + final o = buildAuthzPolicyAuthzRuleFromRequestSource(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.AuthzPolicyAuthzRuleFromRequestSource.fromJson( + oJson as core.Map); + checkAuthzPolicyAuthzRuleFromRequestSource(od); + }); + }); + + unittest.group('obj-schema-AuthzPolicyAuthzRuleHeaderMatch', () { + unittest.test('to-json--from-json', () async { + final o = buildAuthzPolicyAuthzRuleHeaderMatch(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.AuthzPolicyAuthzRuleHeaderMatch.fromJson( + oJson as core.Map); + checkAuthzPolicyAuthzRuleHeaderMatch(od); + }); + }); + + unittest.group('obj-schema-AuthzPolicyAuthzRuleRequestResource', () { + unittest.test('to-json--from-json', () async { + final o = buildAuthzPolicyAuthzRuleRequestResource(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.AuthzPolicyAuthzRuleRequestResource.fromJson( + oJson as core.Map); + checkAuthzPolicyAuthzRuleRequestResource(od); + }); + }); + + unittest.group('obj-schema-AuthzPolicyAuthzRuleRequestResourceTagValueIdSet', + () { + unittest.test('to-json--from-json', () async { + final o = buildAuthzPolicyAuthzRuleRequestResourceTagValueIdSet(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.AuthzPolicyAuthzRuleRequestResourceTagValueIdSet.fromJson( + oJson as core.Map); + checkAuthzPolicyAuthzRuleRequestResourceTagValueIdSet(od); + }); + }); + + unittest.group('obj-schema-AuthzPolicyAuthzRuleStringMatch', () { + unittest.test('to-json--from-json', () async { + final o = buildAuthzPolicyAuthzRuleStringMatch(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.AuthzPolicyAuthzRuleStringMatch.fromJson( + oJson as core.Map); + checkAuthzPolicyAuthzRuleStringMatch(od); + }); + }); + + unittest.group('obj-schema-AuthzPolicyAuthzRuleTo', () { + unittest.test('to-json--from-json', () async { + final o = buildAuthzPolicyAuthzRuleTo(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.AuthzPolicyAuthzRuleTo.fromJson( + oJson as core.Map); + checkAuthzPolicyAuthzRuleTo(od); + }); + }); + + unittest.group('obj-schema-AuthzPolicyAuthzRuleToRequestOperation', () { + unittest.test('to-json--from-json', () async { + final o = buildAuthzPolicyAuthzRuleToRequestOperation(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.AuthzPolicyAuthzRuleToRequestOperation.fromJson( + oJson as core.Map); + checkAuthzPolicyAuthzRuleToRequestOperation(od); + }); + }); + + unittest.group('obj-schema-AuthzPolicyAuthzRuleToRequestOperationHeaderSet', + () { + unittest.test('to-json--from-json', () async { + final o = buildAuthzPolicyAuthzRuleToRequestOperationHeaderSet(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.AuthzPolicyAuthzRuleToRequestOperationHeaderSet.fromJson( + oJson as core.Map); + checkAuthzPolicyAuthzRuleToRequestOperationHeaderSet(od); + }); + }); + + unittest.group('obj-schema-AuthzPolicyCustomProvider', () { + unittest.test('to-json--from-json', () async { + final o = buildAuthzPolicyCustomProvider(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.AuthzPolicyCustomProvider.fromJson( + oJson as core.Map); + checkAuthzPolicyCustomProvider(od); + }); + }); + + unittest.group('obj-schema-AuthzPolicyCustomProviderAuthzExtension', () { + unittest.test('to-json--from-json', () async { + final o = buildAuthzPolicyCustomProviderAuthzExtension(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.AuthzPolicyCustomProviderAuthzExtension.fromJson( + oJson as core.Map); + checkAuthzPolicyCustomProviderAuthzExtension(od); + }); + }); + + unittest.group('obj-schema-AuthzPolicyCustomProviderCloudIap', () { + unittest.test('to-json--from-json', () async { + final o = buildAuthzPolicyCustomProviderCloudIap(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.AuthzPolicyCustomProviderCloudIap.fromJson( + oJson as core.Map); + checkAuthzPolicyCustomProviderCloudIap(od); + }); + }); + + unittest.group('obj-schema-AuthzPolicyTarget', () { + unittest.test('to-json--from-json', () async { + final o = buildAuthzPolicyTarget(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.AuthzPolicyTarget.fromJson( + oJson as core.Map); + checkAuthzPolicyTarget(od); + }); + }); + unittest.group('obj-schema-CancelOperationRequest', () { unittest.test('to-json--from-json', () async { final o = buildCancelOperationRequest(); @@ -2887,11 +3697,21 @@ void main() { unittest.group('obj-schema-CloneAddressGroupItemsRequest', () { unittest.test('to-json--from-json', () async { - final o = buildCloneAddressGroupItemsRequest(); + final o = buildCloneAddressGroupItemsRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.CloneAddressGroupItemsRequest.fromJson( + oJson as core.Map); + checkCloneAddressGroupItemsRequest(od); + }); + }); + + unittest.group('obj-schema-CustomInterceptProfile', () { + unittest.test('to-json--from-json', () async { + final o = buildCustomInterceptProfile(); final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.CloneAddressGroupItemsRequest.fromJson( + final od = api.CustomInterceptProfile.fromJson( oJson as core.Map); - checkCloneAddressGroupItemsRequest(od); + checkCustomInterceptProfile(od); }); }); @@ -3128,6 +3948,16 @@ void main() { }); }); + unittest.group('obj-schema-ListAuthzPoliciesResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListAuthzPoliciesResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListAuthzPoliciesResponse.fromJson( + oJson as core.Map); + checkListAuthzPoliciesResponse(od); + }); + }); + unittest.group('obj-schema-ListClientTlsPoliciesResponse', () { unittest.test('to-json--from-json', () async { final o = buildListClientTlsPoliciesResponse(); @@ -3288,139 +4118,437 @@ void main() { }); }); - unittest.group('obj-schema-Rule', () { - unittest.test('to-json--from-json', () async { - final o = buildRule(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = - api.Rule.fromJson(oJson as core.Map); - checkRule(od); - }); - }); + unittest.group('obj-schema-Rule', () { + unittest.test('to-json--from-json', () async { + final o = buildRule(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Rule.fromJson(oJson as core.Map); + checkRule(od); + }); + }); + + unittest.group('obj-schema-SecurityProfile', () { + unittest.test('to-json--from-json', () async { + final o = buildSecurityProfile(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.SecurityProfile.fromJson( + oJson as core.Map); + checkSecurityProfile(od); + }); + }); + + unittest.group('obj-schema-SecurityProfileGroup', () { + unittest.test('to-json--from-json', () async { + final o = buildSecurityProfileGroup(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.SecurityProfileGroup.fromJson( + oJson as core.Map); + checkSecurityProfileGroup(od); + }); + }); + + unittest.group('obj-schema-ServerTlsPolicy', () { + unittest.test('to-json--from-json', () async { + final o = buildServerTlsPolicy(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ServerTlsPolicy.fromJson( + oJson as core.Map); + checkServerTlsPolicy(od); + }); + }); + + unittest.group('obj-schema-SeverityOverride', () { + unittest.test('to-json--from-json', () async { + final o = buildSeverityOverride(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.SeverityOverride.fromJson( + oJson as core.Map); + checkSeverityOverride(od); + }); + }); + + unittest.group('obj-schema-Source', () { + unittest.test('to-json--from-json', () async { + final o = buildSource(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Source.fromJson(oJson as core.Map); + checkSource(od); + }); + }); + + unittest.group('obj-schema-Status', () { + unittest.test('to-json--from-json', () async { + final o = buildStatus(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Status.fromJson(oJson as core.Map); + checkStatus(od); + }); + }); + + unittest.group('obj-schema-ThreatOverride', () { + unittest.test('to-json--from-json', () async { + final o = buildThreatOverride(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ThreatOverride.fromJson( + oJson as core.Map); + checkThreatOverride(od); + }); + }); + + unittest.group('obj-schema-ThreatPreventionProfile', () { + unittest.test('to-json--from-json', () async { + final o = buildThreatPreventionProfile(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ThreatPreventionProfile.fromJson( + oJson as core.Map); + checkThreatPreventionProfile(od); + }); + }); + + unittest.group('obj-schema-TlsInspectionPolicy', () { + unittest.test('to-json--from-json', () async { + final o = buildTlsInspectionPolicy(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.TlsInspectionPolicy.fromJson( + oJson as core.Map); + checkTlsInspectionPolicy(od); + }); + }); + + unittest.group('obj-schema-UrlList', () { + unittest.test('to-json--from-json', () async { + final o = buildUrlList(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.UrlList.fromJson(oJson as core.Map); + checkUrlList(od); + }); + }); + + unittest.group('obj-schema-ValidationCA', () { + unittest.test('to-json--from-json', () async { + final o = buildValidationCA(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ValidationCA.fromJson( + oJson as core.Map); + checkValidationCA(od); + }); + }); + + unittest.group('resource-OrganizationsLocationsAddressGroupsResource', () { + unittest.test('method--addItems', () async { + final mock = HttpServerMock(); + final res = + api.NetworkSecurityApi(mock).organizations.locations.addressGroups; + final arg_request = buildAddAddressGroupItemsRequest(); + final arg_addressGroup = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.AddAddressGroupItemsRequest.fromJson( + json as core.Map); + checkAddAddressGroupItemsRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.addItems(arg_request, arg_addressGroup, + $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--cloneItems', () async { + final mock = HttpServerMock(); + final res = + api.NetworkSecurityApi(mock).organizations.locations.addressGroups; + final arg_request = buildCloneAddressGroupItemsRequest(); + final arg_addressGroup = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.CloneAddressGroupItemsRequest.fromJson( + json as core.Map); + checkCloneAddressGroupItemsRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.cloneItems(arg_request, arg_addressGroup, + $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--create', () async { + final mock = HttpServerMock(); + final res = + api.NetworkSecurityApi(mock).organizations.locations.addressGroups; + final arg_request = buildAddressGroup(); + final arg_parent = 'foo'; + final arg_addressGroupId = 'foo'; + final arg_requestId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.AddressGroup.fromJson( + json as core.Map); + checkAddressGroup(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - unittest.group('obj-schema-SecurityProfile', () { - unittest.test('to-json--from-json', () async { - final o = buildSecurityProfile(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.SecurityProfile.fromJson( - oJson as core.Map); - checkSecurityProfile(od); - }); - }); + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); - unittest.group('obj-schema-SecurityProfileGroup', () { - unittest.test('to-json--from-json', () async { - final o = buildSecurityProfileGroup(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.SecurityProfileGroup.fromJson( - oJson as core.Map); - checkSecurityProfileGroup(od); - }); - }); + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['addressGroupId']!.first, + unittest.equals(arg_addressGroupId), + ); + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); - unittest.group('obj-schema-ServerTlsPolicy', () { - unittest.test('to-json--from-json', () async { - final o = buildServerTlsPolicy(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.ServerTlsPolicy.fromJson( - oJson as core.Map); - checkServerTlsPolicy(od); + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.create(arg_request, arg_parent, + addressGroupId: arg_addressGroupId, + requestId: arg_requestId, + $fields: arg_$fields); + checkOperation(response as api.Operation); }); - }); - unittest.group('obj-schema-SeverityOverride', () { - unittest.test('to-json--from-json', () async { - final o = buildSeverityOverride(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.SeverityOverride.fromJson( - oJson as core.Map); - checkSeverityOverride(od); - }); - }); + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = + api.NetworkSecurityApi(mock).organizations.locations.addressGroups; + final arg_name = 'foo'; + final arg_requestId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - unittest.group('obj-schema-Source', () { - unittest.test('to-json--from-json', () async { - final o = buildSource(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = - api.Source.fromJson(oJson as core.Map); - checkSource(od); - }); - }); + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); - unittest.group('obj-schema-Status', () { - unittest.test('to-json--from-json', () async { - final o = buildStatus(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = - api.Status.fromJson(oJson as core.Map); - checkStatus(od); - }); - }); + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); - unittest.group('obj-schema-ThreatOverride', () { - unittest.test('to-json--from-json', () async { - final o = buildThreatOverride(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.ThreatOverride.fromJson( - oJson as core.Map); - checkThreatOverride(od); + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete(arg_name, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); - }); - unittest.group('obj-schema-ThreatPreventionProfile', () { - unittest.test('to-json--from-json', () async { - final o = buildThreatPreventionProfile(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.ThreatPreventionProfile.fromJson( - oJson as core.Map); - checkThreatPreventionProfile(od); - }); - }); + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = + api.NetworkSecurityApi(mock).organizations.locations.addressGroups; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - unittest.group('obj-schema-TlsInspectionPolicy', () { - unittest.test('to-json--from-json', () async { - final o = buildTlsInspectionPolicy(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.TlsInspectionPolicy.fromJson( - oJson as core.Map); - checkTlsInspectionPolicy(od); - }); - }); + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); - unittest.group('obj-schema-UrlList', () { - unittest.test('to-json--from-json', () async { - final o = buildUrlList(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = - api.UrlList.fromJson(oJson as core.Map); - checkUrlList(od); - }); - }); + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); - unittest.group('obj-schema-ValidationCA', () { - unittest.test('to-json--from-json', () async { - final o = buildValidationCA(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.ValidationCA.fromJson( - oJson as core.Map); - checkValidationCA(od); + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildAddressGroup()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkAddressGroup(response as api.AddressGroup); }); - }); - unittest.group('resource-OrganizationsLocationsAddressGroupsResource', () { - unittest.test('method--addItems', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); final res = api.NetworkSecurityApi(mock).organizations.locations.addressGroups; - final arg_request = buildAddAddressGroupItemsRequest(); - final arg_addressGroup = 'foo'; + final arg_parent = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.AddAddressGroupItemsRequest.fromJson( - json as core.Map); - checkAddAddressGroupItemsRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -3452,6 +4580,14 @@ void main() { ); } } + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -3460,26 +4596,25 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildListAddressGroupsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.addItems(arg_request, arg_addressGroup, + final response = await res.list(arg_parent, + pageSize: arg_pageSize, + pageToken: arg_pageToken, $fields: arg_$fields); - checkOperation(response as api.Operation); + checkListAddressGroupsResponse(response as api.ListAddressGroupsResponse); }); - unittest.test('method--cloneItems', () async { + unittest.test('method--listReferences', () async { final mock = HttpServerMock(); final res = api.NetworkSecurityApi(mock).organizations.locations.addressGroups; - final arg_request = buildCloneAddressGroupItemsRequest(); final arg_addressGroup = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.CloneAddressGroupItemsRequest.fromJson( - json as core.Map); - checkCloneAddressGroupItemsRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -3511,6 +4646,14 @@ void main() { ); } } + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -3519,22 +4662,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = + convert.json.encode(buildListAddressGroupReferencesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.cloneItems(arg_request, arg_addressGroup, + final response = await res.listReferences(arg_addressGroup, + pageSize: arg_pageSize, + pageToken: arg_pageToken, $fields: arg_$fields); - checkOperation(response as api.Operation); + checkListAddressGroupReferencesResponse( + response as api.ListAddressGroupReferencesResponse); }); - unittest.test('method--create', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); final res = api.NetworkSecurityApi(mock).organizations.locations.addressGroups; final arg_request = buildAddressGroup(); - final arg_parent = 'foo'; - final arg_addressGroupId = 'foo'; + final arg_name = 'foo'; final arg_requestId = 'foo'; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final obj = api.AddressGroup.fromJson( @@ -3572,14 +4719,14 @@ void main() { ); } } - unittest.expect( - queryMap['addressGroupId']!.first, - unittest.equals(arg_addressGroupId), - ); unittest.expect( queryMap['requestId']!.first, unittest.equals(arg_requestId), ); + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -3591,21 +4738,25 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.create(arg_request, arg_parent, - addressGroupId: arg_addressGroupId, + final response = await res.patch(arg_request, arg_name, requestId: arg_requestId, + updateMask: arg_updateMask, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--delete', () async { + unittest.test('method--removeItems', () async { final mock = HttpServerMock(); final res = api.NetworkSecurityApi(mock).organizations.locations.addressGroups; - final arg_name = 'foo'; - final arg_requestId = 'foo'; + final arg_request = buildRemoveAddressGroupItemsRequest(); + final arg_addressGroup = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.RemoveAddressGroupItemsRequest.fromJson( + json as core.Map); + checkRemoveAddressGroupItemsRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -3637,10 +4788,6 @@ void main() { ); } } - unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -3652,18 +4799,30 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, - requestId: arg_requestId, $fields: arg_$fields); + final response = await res.removeItems(arg_request, arg_addressGroup, + $fields: arg_$fields); checkOperation(response as api.Operation); }); + }); - unittest.test('method--get', () async { + unittest.group('resource-OrganizationsLocationsFirewallEndpointsResource', + () { + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = - api.NetworkSecurityApi(mock).organizations.locations.addressGroups; - final arg_name = 'foo'; + final res = api.NetworkSecurityApi(mock) + .organizations + .locations + .firewallEndpoints; + final arg_request = buildFirewallEndpoint(); + final arg_parent = 'foo'; + final arg_firewallEndpointId = 'foo'; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.FirewallEndpoint.fromJson( + json as core.Map); + checkFirewallEndpoint(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -3695,6 +4854,14 @@ void main() { ); } } + unittest.expect( + queryMap['firewallEndpointId']!.first, + unittest.equals(arg_firewallEndpointId), + ); + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -3703,20 +4870,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildAddressGroup()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkAddressGroup(response as api.AddressGroup); + final response = await res.create(arg_request, arg_parent, + firewallEndpointId: arg_firewallEndpointId, + requestId: arg_requestId, + $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--list', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = - api.NetworkSecurityApi(mock).organizations.locations.addressGroups; - final arg_parent = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final res = api.NetworkSecurityApi(mock) + .organizations + .locations + .firewallEndpoints; + final arg_name = 'foo'; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -3751,12 +4922,8 @@ void main() { } } unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), + queryMap['requestId']!.first, + unittest.equals(arg_requestId), ); unittest.expect( queryMap['fields']!.first, @@ -3766,23 +4933,21 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildListAddressGroupsResponse()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkListAddressGroupsResponse(response as api.ListAddressGroupsResponse); + final response = await res.delete(arg_name, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--listReferences', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = - api.NetworkSecurityApi(mock).organizations.locations.addressGroups; - final arg_addressGroup = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final res = api.NetworkSecurityApi(mock) + .organizations + .locations + .firewallEndpoints; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -3816,14 +4981,6 @@ void main() { ); } } - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -3832,32 +4989,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildListAddressGroupReferencesResponse()); + final resp = convert.json.encode(buildFirewallEndpoint()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.listReferences(arg_addressGroup, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkListAddressGroupReferencesResponse( - response as api.ListAddressGroupReferencesResponse); + final response = await res.get(arg_name, $fields: arg_$fields); + checkFirewallEndpoint(response as api.FirewallEndpoint); }); - unittest.test('method--patch', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = - api.NetworkSecurityApi(mock).organizations.locations.addressGroups; - final arg_request = buildAddressGroup(); - final arg_name = 'foo'; - final arg_requestId = 'foo'; - final arg_updateMask = 'foo'; + final res = api.NetworkSecurityApi(mock) + .organizations + .locations + .firewallEndpoints; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.AddressGroup.fromJson( - json as core.Map); - checkAddressGroup(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -3890,12 +5041,20 @@ void main() { } } unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), + queryMap['filter']!.first, + unittest.equals(arg_filter), ); unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), ); unittest.expect( queryMap['fields']!.first, @@ -3905,27 +5064,34 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildListFirewallEndpointsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch(arg_request, arg_name, - requestId: arg_requestId, - updateMask: arg_updateMask, + final response = await res.list(arg_parent, + filter: arg_filter, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, $fields: arg_$fields); - checkOperation(response as api.Operation); + checkListFirewallEndpointsResponse( + response as api.ListFirewallEndpointsResponse); }); - - unittest.test('method--removeItems', () async { - final mock = HttpServerMock(); - final res = - api.NetworkSecurityApi(mock).organizations.locations.addressGroups; - final arg_request = buildRemoveAddressGroupItemsRequest(); - final arg_addressGroup = 'foo'; + + unittest.test('method--patch', () async { + final mock = HttpServerMock(); + final res = api.NetworkSecurityApi(mock) + .organizations + .locations + .firewallEndpoints; + final arg_request = buildFirewallEndpoint(); + final arg_name = 'foo'; + final arg_requestId = 'foo'; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.RemoveAddressGroupItemsRequest.fromJson( + final obj = api.FirewallEndpoint.fromJson( json as core.Map); - checkRemoveAddressGroupItemsRequest(obj); + checkFirewallEndpoint(obj); final path = req.url.path; var pathOffset = 0; @@ -3958,6 +5124,14 @@ void main() { ); } } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -3969,29 +5143,26 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.removeItems(arg_request, arg_addressGroup, + final response = await res.patch(arg_request, arg_name, + requestId: arg_requestId, + updateMask: arg_updateMask, $fields: arg_$fields); checkOperation(response as api.Operation); }); }); - unittest.group('resource-OrganizationsLocationsFirewallEndpointsResource', - () { - unittest.test('method--create', () async { + unittest.group('resource-OrganizationsLocationsOperationsResource', () { + unittest.test('method--cancel', () async { final mock = HttpServerMock(); - final res = api.NetworkSecurityApi(mock) - .organizations - .locations - .firewallEndpoints; - final arg_request = buildFirewallEndpoint(); - final arg_parent = 'foo'; - final arg_firewallEndpointId = 'foo'; - final arg_requestId = 'foo'; + final res = + api.NetworkSecurityApi(mock).organizations.locations.operations; + final arg_request = buildCancelOperationRequest(); + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.FirewallEndpoint.fromJson( + final obj = api.CancelOperationRequest.fromJson( json as core.Map); - checkFirewallEndpoint(obj); + checkCancelOperationRequest(obj); final path = req.url.path; var pathOffset = 0; @@ -4024,14 +5195,6 @@ void main() { ); } } - unittest.expect( - queryMap['firewallEndpointId']!.first, - unittest.equals(arg_firewallEndpointId), - ); - unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -4040,24 +5203,19 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.create(arg_request, arg_parent, - firewallEndpointId: arg_firewallEndpointId, - requestId: arg_requestId, - $fields: arg_$fields); - checkOperation(response as api.Operation); + final response = + await res.cancel(arg_request, arg_name, $fields: arg_$fields); + checkEmpty(response as api.Empty); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.NetworkSecurityApi(mock) - .organizations - .locations - .firewallEndpoints; + final res = + api.NetworkSecurityApi(mock).organizations.locations.operations; final arg_name = 'foo'; - final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -4091,10 +5249,6 @@ void main() { ); } } - unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -4103,20 +5257,17 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildEmpty()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, - requestId: arg_requestId, $fields: arg_$fields); - checkOperation(response as api.Operation); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkEmpty(response as api.Empty); }); unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.NetworkSecurityApi(mock) - .organizations - .locations - .firewallEndpoints; + final res = + api.NetworkSecurityApi(mock).organizations.locations.operations; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -4159,22 +5310,19 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildFirewallEndpoint()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkFirewallEndpoint(response as api.FirewallEndpoint); + checkOperation(response as api.Operation); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.NetworkSecurityApi(mock) - .organizations - .locations - .firewallEndpoints; - final arg_parent = 'foo'; + final res = + api.NetworkSecurityApi(mock).organizations.locations.operations; + final arg_name = 'foo'; final arg_filter = 'foo'; - final arg_orderBy = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; final arg_$fields = 'foo'; @@ -4214,10 +5362,6 @@ void main() { queryMap['filter']!.first, unittest.equals(arg_filter), ); - unittest.expect( - queryMap['orderBy']!.first, - unittest.equals(arg_orderBy), - ); unittest.expect( core.int.parse(queryMap['pageSize']!.first), unittest.equals(arg_pageSize), @@ -4234,34 +5378,34 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildListFirewallEndpointsResponse()); + final resp = convert.json.encode(buildListOperationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, + final response = await res.list(arg_name, filter: arg_filter, - orderBy: arg_orderBy, pageSize: arg_pageSize, pageToken: arg_pageToken, $fields: arg_$fields); - checkListFirewallEndpointsResponse( - response as api.ListFirewallEndpointsResponse); + checkListOperationsResponse(response as api.ListOperationsResponse); }); + }); - unittest.test('method--patch', () async { + unittest.group('resource-OrganizationsLocationsSecurityProfileGroupsResource', + () { + unittest.test('method--create', () async { final mock = HttpServerMock(); final res = api.NetworkSecurityApi(mock) .organizations .locations - .firewallEndpoints; - final arg_request = buildFirewallEndpoint(); - final arg_name = 'foo'; - final arg_requestId = 'foo'; - final arg_updateMask = 'foo'; + .securityProfileGroups; + final arg_request = buildSecurityProfileGroup(); + final arg_parent = 'foo'; + final arg_securityProfileGroupId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.FirewallEndpoint.fromJson( + final obj = api.SecurityProfileGroup.fromJson( json as core.Map); - checkFirewallEndpoint(obj); + checkSecurityProfileGroup(obj); final path = req.url.path; var pathOffset = 0; @@ -4295,12 +5439,8 @@ void main() { } } unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), - ); - unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), + queryMap['securityProfileGroupId']!.first, + unittest.equals(arg_securityProfileGroupId), ); unittest.expect( queryMap['fields']!.first, @@ -4313,27 +5453,22 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch(arg_request, arg_name, - requestId: arg_requestId, - updateMask: arg_updateMask, + final response = await res.create(arg_request, arg_parent, + securityProfileGroupId: arg_securityProfileGroupId, $fields: arg_$fields); checkOperation(response as api.Operation); }); - }); - unittest.group('resource-OrganizationsLocationsOperationsResource', () { - unittest.test('method--cancel', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = - api.NetworkSecurityApi(mock).organizations.locations.operations; - final arg_request = buildCancelOperationRequest(); + final res = api.NetworkSecurityApi(mock) + .organizations + .locations + .securityProfileGroups; final arg_name = 'foo'; + final arg_etag = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.CancelOperationRequest.fromJson( - json as core.Map); - checkCancelOperationRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -4365,6 +5500,10 @@ void main() { ); } } + unittest.expect( + queryMap['etag']!.first, + unittest.equals(arg_etag), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -4373,18 +5512,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildEmpty()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = - await res.cancel(arg_request, arg_name, $fields: arg_$fields); - checkEmpty(response as api.Empty); + await res.delete(arg_name, etag: arg_etag, $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--delete', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = - api.NetworkSecurityApi(mock).organizations.locations.operations; + final res = api.NetworkSecurityApi(mock) + .organizations + .locations + .securityProfileGroups; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -4427,18 +5568,22 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildEmpty()); + final resp = convert.json.encode(buildSecurityProfileGroup()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkEmpty(response as api.Empty); + final response = await res.get(arg_name, $fields: arg_$fields); + checkSecurityProfileGroup(response as api.SecurityProfileGroup); }); - unittest.test('method--get', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = - api.NetworkSecurityApi(mock).organizations.locations.operations; - final arg_name = 'foo'; + final res = api.NetworkSecurityApi(mock) + .organizations + .locations + .securityProfileGroups; + final arg_parent = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -4472,6 +5617,14 @@ void main() { ); } } + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -4480,23 +5633,33 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = + convert.json.encode(buildListSecurityProfileGroupsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkOperation(response as api.Operation); + final response = await res.list(arg_parent, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListSecurityProfileGroupsResponse( + response as api.ListSecurityProfileGroupsResponse); }); - unittest.test('method--list', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = - api.NetworkSecurityApi(mock).organizations.locations.operations; + final res = api.NetworkSecurityApi(mock) + .organizations + .locations + .securityProfileGroups; + final arg_request = buildSecurityProfileGroup(); final arg_name = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.SecurityProfileGroup.fromJson( + json as core.Map); + checkSecurityProfileGroup(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -4529,16 +5692,8 @@ void main() { } } unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), ); unittest.expect( queryMap['fields']!.first, @@ -4548,34 +5703,28 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildListOperationsResponse()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.list(arg_name, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkListOperationsResponse(response as api.ListOperationsResponse); + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); + checkOperation(response as api.Operation); }); }); - unittest.group('resource-OrganizationsLocationsSecurityProfileGroupsResource', - () { + unittest.group('resource-OrganizationsLocationsSecurityProfilesResource', () { unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = api.NetworkSecurityApi(mock) - .organizations - .locations - .securityProfileGroups; - final arg_request = buildSecurityProfileGroup(); + final res = + api.NetworkSecurityApi(mock).organizations.locations.securityProfiles; + final arg_request = buildSecurityProfile(); final arg_parent = 'foo'; - final arg_securityProfileGroupId = 'foo'; + final arg_securityProfileId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.SecurityProfileGroup.fromJson( + final obj = api.SecurityProfile.fromJson( json as core.Map); - checkSecurityProfileGroup(obj); + checkSecurityProfile(obj); final path = req.url.path; var pathOffset = 0; @@ -4609,8 +5758,8 @@ void main() { } } unittest.expect( - queryMap['securityProfileGroupId']!.first, - unittest.equals(arg_securityProfileGroupId), + queryMap['securityProfileId']!.first, + unittest.equals(arg_securityProfileId), ); unittest.expect( queryMap['fields']!.first, @@ -4624,17 +5773,14 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.create(arg_request, arg_parent, - securityProfileGroupId: arg_securityProfileGroupId, - $fields: arg_$fields); + securityProfileId: arg_securityProfileId, $fields: arg_$fields); checkOperation(response as api.Operation); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.NetworkSecurityApi(mock) - .organizations - .locations - .securityProfileGroups; + final res = + api.NetworkSecurityApi(mock).organizations.locations.securityProfiles; final arg_name = 'foo'; final arg_etag = 'foo'; final arg_$fields = 'foo'; @@ -4692,10 +5838,8 @@ void main() { unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.NetworkSecurityApi(mock) - .organizations - .locations - .securityProfileGroups; + final res = + api.NetworkSecurityApi(mock).organizations.locations.securityProfiles; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -4738,19 +5882,17 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildSecurityProfileGroup()); + final resp = convert.json.encode(buildSecurityProfile()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkSecurityProfileGroup(response as api.SecurityProfileGroup); + checkSecurityProfile(response as api.SecurityProfile); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.NetworkSecurityApi(mock) - .organizations - .locations - .securityProfileGroups; + final res = + api.NetworkSecurityApi(mock).organizations.locations.securityProfiles; final arg_parent = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; @@ -4803,32 +5945,29 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildListSecurityProfileGroupsResponse()); + final resp = convert.json.encode(buildListSecurityProfilesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.list(arg_parent, pageSize: arg_pageSize, pageToken: arg_pageToken, $fields: arg_$fields); - checkListSecurityProfileGroupsResponse( - response as api.ListSecurityProfileGroupsResponse); + checkListSecurityProfilesResponse( + response as api.ListSecurityProfilesResponse); }); unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = api.NetworkSecurityApi(mock) - .organizations - .locations - .securityProfileGroups; - final arg_request = buildSecurityProfileGroup(); + final res = + api.NetworkSecurityApi(mock).organizations.locations.securityProfiles; + final arg_request = buildSecurityProfile(); final arg_name = 'foo'; final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.SecurityProfileGroup.fromJson( + final obj = api.SecurityProfile.fromJson( json as core.Map); - checkSecurityProfileGroup(obj); + checkSecurityProfile(obj); final path = req.url.path; var pathOffset = 0; @@ -4882,20 +6021,13 @@ void main() { }); }); - unittest.group('resource-OrganizationsLocationsSecurityProfilesResource', () { - unittest.test('method--create', () async { + unittest.group('resource-ProjectsLocationsResource', () { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = - api.NetworkSecurityApi(mock).organizations.locations.securityProfiles; - final arg_request = buildSecurityProfile(); - final arg_parent = 'foo'; - final arg_securityProfileId = 'foo'; + final res = api.NetworkSecurityApi(mock).projects.locations; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.SecurityProfile.fromJson( - json as core.Map); - checkSecurityProfile(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -4927,10 +6059,6 @@ void main() { ); } } - unittest.expect( - queryMap['securityProfileId']!.first, - unittest.equals(arg_securityProfileId), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -4939,20 +6067,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildLocation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.create(arg_request, arg_parent, - securityProfileId: arg_securityProfileId, $fields: arg_$fields); - checkOperation(response as api.Operation); + final response = await res.get(arg_name, $fields: arg_$fields); + checkLocation(response as api.Location); }); - unittest.test('method--delete', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = - api.NetworkSecurityApi(mock).organizations.locations.securityProfiles; + final res = api.NetworkSecurityApi(mock).projects.locations; final arg_name = 'foo'; - final arg_etag = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -4987,8 +6115,16 @@ void main() { } } unittest.expect( - queryMap['etag']!.first, - unittest.equals(arg_etag), + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), ); unittest.expect( queryMap['fields']!.first, @@ -4998,21 +6134,30 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildListLocationsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = - await res.delete(arg_name, etag: arg_etag, $fields: arg_$fields); - checkOperation(response as api.Operation); + final response = await res.list(arg_name, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListLocationsResponse(response as api.ListLocationsResponse); }); + }); - unittest.test('method--get', () async { + unittest.group('resource-ProjectsLocationsAddressGroupsResource', () { + unittest.test('method--addItems', () async { final mock = HttpServerMock(); - final res = - api.NetworkSecurityApi(mock).organizations.locations.securityProfiles; - final arg_name = 'foo'; + final res = api.NetworkSecurityApi(mock).projects.locations.addressGroups; + final arg_request = buildAddAddressGroupItemsRequest(); + final arg_addressGroup = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.AddAddressGroupItemsRequest.fromJson( + json as core.Map); + checkAddAddressGroupItemsRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -5052,22 +6197,25 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildSecurityProfile()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkSecurityProfile(response as api.SecurityProfile); + final response = await res.addItems(arg_request, arg_addressGroup, + $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--list', () async { + unittest.test('method--cloneItems', () async { final mock = HttpServerMock(); - final res = - api.NetworkSecurityApi(mock).organizations.locations.securityProfiles; - final arg_parent = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final res = api.NetworkSecurityApi(mock).projects.locations.addressGroups; + final arg_request = buildCloneAddressGroupItemsRequest(); + final arg_addressGroup = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.CloneAddressGroupItemsRequest.fromJson( + json as core.Map); + checkCloneAddressGroupItemsRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -5099,14 +6247,6 @@ void main() { ); } } - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -5115,29 +6255,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildListSecurityProfilesResponse()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - pageSize: arg_pageSize, - pageToken: arg_pageToken, + final response = await res.cloneItems(arg_request, arg_addressGroup, $fields: arg_$fields); - checkListSecurityProfilesResponse( - response as api.ListSecurityProfilesResponse); + checkOperation(response as api.Operation); }); - unittest.test('method--patch', () async { + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = - api.NetworkSecurityApi(mock).organizations.locations.securityProfiles; - final arg_request = buildSecurityProfile(); - final arg_name = 'foo'; - final arg_updateMask = 'foo'; + final res = api.NetworkSecurityApi(mock).projects.locations.addressGroups; + final arg_request = buildAddressGroup(); + final arg_parent = 'foo'; + final arg_addressGroupId = 'foo'; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.SecurityProfile.fromJson( + final obj = api.AddressGroup.fromJson( json as core.Map); - checkSecurityProfile(obj); + checkAddressGroup(obj); final path = req.url.path; var pathOffset = 0; @@ -5171,8 +6308,12 @@ void main() { } } unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), + queryMap['addressGroupId']!.first, + unittest.equals(arg_addressGroupId), + ); + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), ); unittest.expect( queryMap['fields']!.first, @@ -5185,17 +6326,18 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch(arg_request, arg_name, - updateMask: arg_updateMask, $fields: arg_$fields); + final response = await res.create(arg_request, arg_parent, + addressGroupId: arg_addressGroupId, + requestId: arg_requestId, + $fields: arg_$fields); checkOperation(response as api.Operation); }); - }); - unittest.group('resource-ProjectsLocationsResource', () { - unittest.test('method--get', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.NetworkSecurityApi(mock).projects.locations; + final res = api.NetworkSecurityApi(mock).projects.locations.addressGroups; final arg_name = 'foo'; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -5229,6 +6371,10 @@ void main() { ); } } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -5237,20 +6383,18 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildLocation()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkLocation(response as api.Location); + final response = await res.delete(arg_name, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--list', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.NetworkSecurityApi(mock).projects.locations; + final res = api.NetworkSecurityApi(mock).projects.locations.addressGroups; final arg_name = 'foo'; - final arg_filter = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -5284,18 +6428,6 @@ void main() { ); } } - unittest.expect( - queryMap['filter']!.first, - unittest.equals(arg_filter), - ); - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -5304,30 +6436,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildListLocationsResponse()); + final resp = convert.json.encode(buildAddressGroup()); return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.list(arg_name, - filter: arg_filter, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkListLocationsResponse(response as api.ListLocationsResponse); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkAddressGroup(response as api.AddressGroup); }); - }); - unittest.group('resource-ProjectsLocationsAddressGroupsResource', () { - unittest.test('method--addItems', () async { + unittest.test('method--getIamPolicy', () async { final mock = HttpServerMock(); final res = api.NetworkSecurityApi(mock).projects.locations.addressGroups; - final arg_request = buildAddAddressGroupItemsRequest(); - final arg_addressGroup = 'foo'; + final arg_resource = 'foo'; + final arg_options_requestedPolicyVersion = 42; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.AddAddressGroupItemsRequest.fromJson( - json as core.Map); - checkAddAddressGroupItemsRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -5359,6 +6481,10 @@ void main() { ); } } + unittest.expect( + core.int.parse(queryMap['options.requestedPolicyVersion']!.first), + unittest.equals(arg_options_requestedPolicyVersion), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -5367,25 +6493,23 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildGoogleIamV1Policy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.addItems(arg_request, arg_addressGroup, + final response = await res.getIamPolicy(arg_resource, + options_requestedPolicyVersion: arg_options_requestedPolicyVersion, $fields: arg_$fields); - checkOperation(response as api.Operation); + checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); }); - unittest.test('method--cloneItems', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); final res = api.NetworkSecurityApi(mock).projects.locations.addressGroups; - final arg_request = buildCloneAddressGroupItemsRequest(); - final arg_addressGroup = 'foo'; + final arg_parent = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.CloneAddressGroupItemsRequest.fromJson( - json as core.Map); - checkCloneAddressGroupItemsRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -5417,6 +6541,14 @@ void main() { ); } } + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -5425,27 +6557,24 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildListAddressGroupsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.cloneItems(arg_request, arg_addressGroup, + final response = await res.list(arg_parent, + pageSize: arg_pageSize, + pageToken: arg_pageToken, $fields: arg_$fields); - checkOperation(response as api.Operation); + checkListAddressGroupsResponse(response as api.ListAddressGroupsResponse); }); - unittest.test('method--create', () async { + unittest.test('method--listReferences', () async { final mock = HttpServerMock(); final res = api.NetworkSecurityApi(mock).projects.locations.addressGroups; - final arg_request = buildAddressGroup(); - final arg_parent = 'foo'; - final arg_addressGroupId = 'foo'; - final arg_requestId = 'foo'; + final arg_addressGroup = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.AddressGroup.fromJson( - json as core.Map); - checkAddressGroup(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -5478,12 +6607,12 @@ void main() { } } unittest.expect( - queryMap['addressGroupId']!.first, - unittest.equals(arg_addressGroupId), + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), ); unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), ); unittest.expect( queryMap['fields']!.first, @@ -5493,23 +6622,31 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = + convert.json.encode(buildListAddressGroupReferencesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.create(arg_request, arg_parent, - addressGroupId: arg_addressGroupId, - requestId: arg_requestId, + final response = await res.listReferences(arg_addressGroup, + pageSize: arg_pageSize, + pageToken: arg_pageToken, $fields: arg_$fields); - checkOperation(response as api.Operation); + checkListAddressGroupReferencesResponse( + response as api.ListAddressGroupReferencesResponse); }); - unittest.test('method--delete', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); final res = api.NetworkSecurityApi(mock).projects.locations.addressGroups; + final arg_request = buildAddressGroup(); final arg_name = 'foo'; final arg_requestId = 'foo'; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.AddressGroup.fromJson( + json as core.Map); + checkAddressGroup(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -5545,6 +6682,10 @@ void main() { queryMap['requestId']!.first, unittest.equals(arg_requestId), ); + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -5556,17 +6697,24 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, - requestId: arg_requestId, $fields: arg_$fields); + final response = await res.patch(arg_request, arg_name, + requestId: arg_requestId, + updateMask: arg_updateMask, + $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--get', () async { + unittest.test('method--removeItems', () async { final mock = HttpServerMock(); final res = api.NetworkSecurityApi(mock).projects.locations.addressGroups; - final arg_name = 'foo'; + final arg_request = buildRemoveAddressGroupItemsRequest(); + final arg_addressGroup = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.RemoveAddressGroupItemsRequest.fromJson( + json as core.Map); + checkRemoveAddressGroupItemsRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -5606,20 +6754,25 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildAddressGroup()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkAddressGroup(response as api.AddressGroup); + final response = await res.removeItems(arg_request, arg_addressGroup, + $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--getIamPolicy', () async { + unittest.test('method--setIamPolicy', () async { final mock = HttpServerMock(); final res = api.NetworkSecurityApi(mock).projects.locations.addressGroups; + final arg_request = buildGoogleIamV1SetIamPolicyRequest(); final arg_resource = 'foo'; - final arg_options_requestedPolicyVersion = 42; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleIamV1SetIamPolicyRequest.fromJson( + json as core.Map); + checkGoogleIamV1SetIamPolicyRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -5651,10 +6804,6 @@ void main() { ); } } - unittest.expect( - core.int.parse(queryMap['options.requestedPolicyVersion']!.first), - unittest.equals(arg_options_requestedPolicyVersion), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -5666,20 +6815,22 @@ void main() { final resp = convert.json.encode(buildGoogleIamV1Policy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.getIamPolicy(arg_resource, - options_requestedPolicyVersion: arg_options_requestedPolicyVersion, + final response = await res.setIamPolicy(arg_request, arg_resource, $fields: arg_$fields); checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); }); - unittest.test('method--list', () async { + unittest.test('method--testIamPermissions', () async { final mock = HttpServerMock(); final res = api.NetworkSecurityApi(mock).projects.locations.addressGroups; - final arg_parent = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final arg_request = buildGoogleIamV1TestIamPermissionsRequest(); + final arg_resource = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleIamV1TestIamPermissionsRequest.fromJson( + json as core.Map); + checkGoogleIamV1TestIamPermissionsRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -5711,14 +6862,6 @@ void main() { ); } } - unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -5727,24 +6870,31 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildListAddressGroupsResponse()); + final resp = + convert.json.encode(buildGoogleIamV1TestIamPermissionsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - pageSize: arg_pageSize, - pageToken: arg_pageToken, + final response = await res.testIamPermissions(arg_request, arg_resource, $fields: arg_$fields); - checkListAddressGroupsResponse(response as api.ListAddressGroupsResponse); + checkGoogleIamV1TestIamPermissionsResponse( + response as api.GoogleIamV1TestIamPermissionsResponse); }); + }); - unittest.test('method--listReferences', () async { + unittest.group('resource-ProjectsLocationsAuthorizationPoliciesResource', () { + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = api.NetworkSecurityApi(mock).projects.locations.addressGroups; - final arg_addressGroup = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final res = + api.NetworkSecurityApi(mock).projects.locations.authorizationPolicies; + final arg_request = buildAuthorizationPolicy(); + final arg_parent = 'foo'; + final arg_authorizationPolicyId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.AuthorizationPolicy.fromJson( + json as core.Map); + checkAuthorizationPolicy(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -5777,12 +6927,8 @@ void main() { } } unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), + queryMap['authorizationPolicyId']!.first, + unittest.equals(arg_authorizationPolicyId), ); unittest.expect( queryMap['fields']!.first, @@ -5792,31 +6938,22 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildListAddressGroupReferencesResponse()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.listReferences(arg_addressGroup, - pageSize: arg_pageSize, - pageToken: arg_pageToken, + final response = await res.create(arg_request, arg_parent, + authorizationPolicyId: arg_authorizationPolicyId, $fields: arg_$fields); - checkListAddressGroupReferencesResponse( - response as api.ListAddressGroupReferencesResponse); + checkOperation(response as api.Operation); }); - unittest.test('method--patch', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.NetworkSecurityApi(mock).projects.locations.addressGroups; - final arg_request = buildAddressGroup(); + final res = + api.NetworkSecurityApi(mock).projects.locations.authorizationPolicies; final arg_name = 'foo'; - final arg_requestId = 'foo'; - final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.AddressGroup.fromJson( - json as core.Map); - checkAddressGroup(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -5848,14 +6985,6 @@ void main() { ); } } - unittest.expect( - queryMap['requestId']!.first, - unittest.equals(arg_requestId), - ); - unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -5867,24 +6996,17 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch(arg_request, arg_name, - requestId: arg_requestId, - updateMask: arg_updateMask, - $fields: arg_$fields); + final response = await res.delete(arg_name, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--removeItems', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.NetworkSecurityApi(mock).projects.locations.addressGroups; - final arg_request = buildRemoveAddressGroupItemsRequest(); - final arg_addressGroup = 'foo'; + final res = + api.NetworkSecurityApi(mock).projects.locations.authorizationPolicies; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.RemoveAddressGroupItemsRequest.fromJson( - json as core.Map); - checkRemoveAddressGroupItemsRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -5924,25 +7046,21 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildAuthorizationPolicy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.removeItems(arg_request, arg_addressGroup, - $fields: arg_$fields); - checkOperation(response as api.Operation); + final response = await res.get(arg_name, $fields: arg_$fields); + checkAuthorizationPolicy(response as api.AuthorizationPolicy); }); - unittest.test('method--setIamPolicy', () async { + unittest.test('method--getIamPolicy', () async { final mock = HttpServerMock(); - final res = api.NetworkSecurityApi(mock).projects.locations.addressGroups; - final arg_request = buildGoogleIamV1SetIamPolicyRequest(); + final res = + api.NetworkSecurityApi(mock).projects.locations.authorizationPolicies; final arg_resource = 'foo'; + final arg_options_requestedPolicyVersion = 42; final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleIamV1SetIamPolicyRequest.fromJson( - json as core.Map); - checkGoogleIamV1SetIamPolicyRequest(obj); - + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; var pathOffset = 0; core.int index; @@ -5974,6 +7092,10 @@ void main() { ); } } + unittest.expect( + core.int.parse(queryMap['options.requestedPolicyVersion']!.first), + unittest.equals(arg_options_requestedPolicyVersion), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -5985,22 +7107,21 @@ void main() { final resp = convert.json.encode(buildGoogleIamV1Policy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setIamPolicy(arg_request, arg_resource, + final response = await res.getIamPolicy(arg_resource, + options_requestedPolicyVersion: arg_options_requestedPolicyVersion, $fields: arg_$fields); checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); }); - unittest.test('method--testIamPermissions', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.NetworkSecurityApi(mock).projects.locations.addressGroups; - final arg_request = buildGoogleIamV1TestIamPermissionsRequest(); - final arg_resource = 'foo'; + final res = + api.NetworkSecurityApi(mock).projects.locations.authorizationPolicies; + final arg_parent = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleIamV1TestIamPermissionsRequest.fromJson( - json as core.Map); - checkGoogleIamV1TestIamPermissionsRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -6032,6 +7153,14 @@ void main() { ); } } + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -6041,24 +7170,24 @@ void main() { 'content-type': 'application/json; charset=utf-8', }; final resp = - convert.json.encode(buildGoogleIamV1TestIamPermissionsResponse()); + convert.json.encode(buildListAuthorizationPoliciesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.testIamPermissions(arg_request, arg_resource, + final response = await res.list(arg_parent, + pageSize: arg_pageSize, + pageToken: arg_pageToken, $fields: arg_$fields); - checkGoogleIamV1TestIamPermissionsResponse( - response as api.GoogleIamV1TestIamPermissionsResponse); + checkListAuthorizationPoliciesResponse( + response as api.ListAuthorizationPoliciesResponse); }); - }); - unittest.group('resource-ProjectsLocationsAuthorizationPoliciesResource', () { - unittest.test('method--create', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); final res = api.NetworkSecurityApi(mock).projects.locations.authorizationPolicies; final arg_request = buildAuthorizationPolicy(); - final arg_parent = 'foo'; - final arg_authorizationPolicyId = 'foo'; + final arg_name = 'foo'; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final obj = api.AuthorizationPolicy.fromJson( @@ -6097,8 +7226,8 @@ void main() { } } unittest.expect( - queryMap['authorizationPolicyId']!.first, - unittest.equals(arg_authorizationPolicyId), + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), ); unittest.expect( queryMap['fields']!.first, @@ -6111,19 +7240,23 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.create(arg_request, arg_parent, - authorizationPolicyId: arg_authorizationPolicyId, - $fields: arg_$fields); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); checkOperation(response as api.Operation); }); - unittest.test('method--delete', () async { + unittest.test('method--setIamPolicy', () async { final mock = HttpServerMock(); final res = api.NetworkSecurityApi(mock).projects.locations.authorizationPolicies; - final arg_name = 'foo'; + final arg_request = buildGoogleIamV1SetIamPolicyRequest(); + final arg_resource = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleIamV1SetIamPolicyRequest.fromJson( + json as core.Map); + checkGoogleIamV1SetIamPolicyRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -6163,20 +7296,26 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildGoogleIamV1Policy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkOperation(response as api.Operation); + final response = await res.setIamPolicy(arg_request, arg_resource, + $fields: arg_$fields); + checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); }); - unittest.test('method--get', () async { + unittest.test('method--testIamPermissions', () async { final mock = HttpServerMock(); final res = api.NetworkSecurityApi(mock).projects.locations.authorizationPolicies; - final arg_name = 'foo'; + final arg_request = buildGoogleIamV1TestIamPermissionsRequest(); + final arg_resource = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.GoogleIamV1TestIamPermissionsRequest.fromJson( + json as core.Map); + checkGoogleIamV1TestIamPermissionsRequest(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -6216,21 +7355,31 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildAuthorizationPolicy()); + final resp = + convert.json.encode(buildGoogleIamV1TestIamPermissionsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkAuthorizationPolicy(response as api.AuthorizationPolicy); + final response = await res.testIamPermissions(arg_request, arg_resource, + $fields: arg_$fields); + checkGoogleIamV1TestIamPermissionsResponse( + response as api.GoogleIamV1TestIamPermissionsResponse); }); + }); - unittest.test('method--getIamPolicy', () async { + unittest.group('resource-ProjectsLocationsAuthzPoliciesResource', () { + unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = - api.NetworkSecurityApi(mock).projects.locations.authorizationPolicies; - final arg_resource = 'foo'; - final arg_options_requestedPolicyVersion = 42; + final res = api.NetworkSecurityApi(mock).projects.locations.authzPolicies; + final arg_request = buildAuthzPolicy(); + final arg_parent = 'foo'; + final arg_authzPolicyId = 'foo'; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.AuthzPolicy.fromJson( + json as core.Map); + checkAuthzPolicy(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -6263,8 +7412,12 @@ void main() { } } unittest.expect( - core.int.parse(queryMap['options.requestedPolicyVersion']!.first), - unittest.equals(arg_options_requestedPolicyVersion), + queryMap['authzPolicyId']!.first, + unittest.equals(arg_authzPolicyId), + ); + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), ); unittest.expect( queryMap['fields']!.first, @@ -6274,22 +7427,21 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleIamV1Policy()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.getIamPolicy(arg_resource, - options_requestedPolicyVersion: arg_options_requestedPolicyVersion, + final response = await res.create(arg_request, arg_parent, + authzPolicyId: arg_authzPolicyId, + requestId: arg_requestId, $fields: arg_$fields); - checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); + checkOperation(response as api.Operation); }); - unittest.test('method--list', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = - api.NetworkSecurityApi(mock).projects.locations.authorizationPolicies; - final arg_parent = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final res = api.NetworkSecurityApi(mock).projects.locations.authzPolicies; + final arg_name = 'foo'; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -6324,12 +7476,8 @@ void main() { } } unittest.expect( - core.int.parse(queryMap['pageSize']!.first), - unittest.equals(arg_pageSize), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), + queryMap['requestId']!.first, + unittest.equals(arg_requestId), ); unittest.expect( queryMap['fields']!.first, @@ -6339,31 +7487,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildListAuthorizationPoliciesResponse()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.list(arg_parent, - pageSize: arg_pageSize, - pageToken: arg_pageToken, - $fields: arg_$fields); - checkListAuthorizationPoliciesResponse( - response as api.ListAuthorizationPoliciesResponse); + final response = await res.delete(arg_name, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--patch', () async { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = - api.NetworkSecurityApi(mock).projects.locations.authorizationPolicies; - final arg_request = buildAuthorizationPolicy(); + final res = api.NetworkSecurityApi(mock).projects.locations.authzPolicies; final arg_name = 'foo'; - final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.AuthorizationPolicy.fromJson( - json as core.Map); - checkAuthorizationPolicy(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -6395,10 +7532,6 @@ void main() { ); } } - unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -6407,26 +7540,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildAuthzPolicy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.patch(arg_request, arg_name, - updateMask: arg_updateMask, $fields: arg_$fields); - checkOperation(response as api.Operation); + final response = await res.get(arg_name, $fields: arg_$fields); + checkAuthzPolicy(response as api.AuthzPolicy); }); - unittest.test('method--setIamPolicy', () async { + unittest.test('method--getIamPolicy', () async { final mock = HttpServerMock(); - final res = - api.NetworkSecurityApi(mock).projects.locations.authorizationPolicies; - final arg_request = buildGoogleIamV1SetIamPolicyRequest(); + final res = api.NetworkSecurityApi(mock).projects.locations.authzPolicies; final arg_resource = 'foo'; + final arg_options_requestedPolicyVersion = 42; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleIamV1SetIamPolicyRequest.fromJson( - json as core.Map); - checkGoogleIamV1SetIamPolicyRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -6458,6 +7585,10 @@ void main() { ); } } + unittest.expect( + core.int.parse(queryMap['options.requestedPolicyVersion']!.first), + unittest.equals(arg_options_requestedPolicyVersion), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -6469,23 +7600,22 @@ void main() { final resp = convert.json.encode(buildGoogleIamV1Policy()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setIamPolicy(arg_request, arg_resource, + final response = await res.getIamPolicy(arg_resource, + options_requestedPolicyVersion: arg_options_requestedPolicyVersion, $fields: arg_$fields); checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); }); - unittest.test('method--testIamPermissions', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = - api.NetworkSecurityApi(mock).projects.locations.authorizationPolicies; - final arg_request = buildGoogleIamV1TestIamPermissionsRequest(); - final arg_resource = 'foo'; + final res = api.NetworkSecurityApi(mock).projects.locations.authzPolicies; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.GoogleIamV1TestIamPermissionsRequest.fromJson( - json as core.Map); - checkGoogleIamV1TestIamPermissionsRequest(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -6517,6 +7647,22 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -6525,25 +7671,31 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = - convert.json.encode(buildGoogleIamV1TestIamPermissionsResponse()); + final resp = convert.json.encode(buildListAuthzPoliciesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.testIamPermissions(arg_request, arg_resource, + final response = await res.list(arg_parent, + filter: arg_filter, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, $fields: arg_$fields); - checkGoogleIamV1TestIamPermissionsResponse( - response as api.GoogleIamV1TestIamPermissionsResponse); + checkListAuthzPoliciesResponse(response as api.ListAuthzPoliciesResponse); }); - }); - unittest.group('resource-ProjectsLocationsAuthzPoliciesResource', () { - unittest.test('method--getIamPolicy', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); final res = api.NetworkSecurityApi(mock).projects.locations.authzPolicies; - final arg_resource = 'foo'; - final arg_options_requestedPolicyVersion = 42; + final arg_request = buildAuthzPolicy(); + final arg_name = 'foo'; + final arg_requestId = 'foo'; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.AuthzPolicy.fromJson( + json as core.Map); + checkAuthzPolicy(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -6576,8 +7728,12 @@ void main() { } } unittest.expect( - core.int.parse(queryMap['options.requestedPolicyVersion']!.first), - unittest.equals(arg_options_requestedPolicyVersion), + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), ); unittest.expect( queryMap['fields']!.first, @@ -6587,13 +7743,14 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGoogleIamV1Policy()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.getIamPolicy(arg_resource, - options_requestedPolicyVersion: arg_options_requestedPolicyVersion, + final response = await res.patch(arg_request, arg_name, + requestId: arg_requestId, + updateMask: arg_updateMask, $fields: arg_$fields); - checkGoogleIamV1Policy(response as api.GoogleIamV1Policy); + checkOperation(response as api.Operation); }); unittest.test('method--setIamPolicy', () async { diff --git a/generated/googleapis/test/networkservices/v1_test.dart b/generated/googleapis/test/networkservices/v1_test.dart index c9e43d067..46ef8d1b4 100644 --- a/generated/googleapis/test/networkservices/v1_test.dart +++ b/generated/googleapis/test/networkservices/v1_test.dart @@ -119,13 +119,162 @@ void checkUnnamed2(core.List o) { ); } +core.Map buildUnnamed3() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed3(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.Map buildUnnamed4() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; + +void checkUnnamed4(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted1 = (o['x']!) as core.Map; + unittest.expect(casted1, unittest.hasLength(3)); + unittest.expect( + casted1['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted1['bool'], + unittest.equals(true), + ); + unittest.expect( + casted1['string'], + unittest.equals('foo'), + ); + var casted2 = (o['y']!) as core.Map; + unittest.expect(casted2, unittest.hasLength(3)); + unittest.expect( + casted2['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted2['bool'], + unittest.equals(true), + ); + unittest.expect( + casted2['string'], + unittest.equals('foo'), + ); +} + +core.int buildCounterAuthzExtension = 0; +api.AuthzExtension buildAuthzExtension() { + final o = api.AuthzExtension(); + buildCounterAuthzExtension++; + if (buildCounterAuthzExtension < 3) { + o.authority = 'foo'; + o.createTime = 'foo'; + o.description = 'foo'; + o.failOpen = true; + o.forwardHeaders = buildUnnamed2(); + o.labels = buildUnnamed3(); + o.loadBalancingScheme = 'foo'; + o.metadata = buildUnnamed4(); + o.name = 'foo'; + o.service = 'foo'; + o.timeout = 'foo'; + o.updateTime = 'foo'; + o.wireFormat = 'foo'; + } + buildCounterAuthzExtension--; + return o; +} + +void checkAuthzExtension(api.AuthzExtension o) { + buildCounterAuthzExtension++; + if (buildCounterAuthzExtension < 3) { + unittest.expect( + o.authority!, + unittest.equals('foo'), + ); + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + unittest.expect(o.failOpen!, unittest.isTrue); + checkUnnamed2(o.forwardHeaders!); + checkUnnamed3(o.labels!); + unittest.expect( + o.loadBalancingScheme!, + unittest.equals('foo'), + ); + checkUnnamed4(o.metadata!); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.service!, + unittest.equals('foo'), + ); + unittest.expect( + o.timeout!, + unittest.equals('foo'), + ); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.wireFormat!, + unittest.equals('foo'), + ); + } + buildCounterAuthzExtension--; +} + +core.List buildUnnamed5() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed5(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + core.int buildCounterBinding = 0; api.Binding buildBinding() { final o = api.Binding(); buildCounterBinding++; if (buildCounterBinding < 3) { o.condition = buildExpr(); - o.members = buildUnnamed2(); + o.members = buildUnnamed5(); o.role = 'foo'; } buildCounterBinding--; @@ -136,7 +285,7 @@ void checkBinding(api.Binding o) { buildCounterBinding++; if (buildCounterBinding < 3) { checkExpr(o.condition!); - checkUnnamed2(o.members!); + checkUnnamed5(o.members!); unittest.expect( o.role!, unittest.equals('foo'), @@ -195,12 +344,12 @@ void checkEndpointMatcher(api.EndpointMatcher o) { } core.List - buildUnnamed3() => [ + buildUnnamed6() => [ buildEndpointMatcherMetadataLabelMatcherMetadataLabels(), buildEndpointMatcherMetadataLabelMatcherMetadataLabels(), ]; -void checkUnnamed3( +void checkUnnamed6( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEndpointMatcherMetadataLabelMatcherMetadataLabels(o[0]); @@ -214,7 +363,7 @@ api.EndpointMatcherMetadataLabelMatcher buildCounterEndpointMatcherMetadataLabelMatcher++; if (buildCounterEndpointMatcherMetadataLabelMatcher < 3) { o.metadataLabelMatchCriteria = 'foo'; - o.metadataLabels = buildUnnamed3(); + o.metadataLabels = buildUnnamed6(); } buildCounterEndpointMatcherMetadataLabelMatcher--; return o; @@ -228,7 +377,7 @@ void checkEndpointMatcherMetadataLabelMatcher( o.metadataLabelMatchCriteria!, unittest.equals('foo'), ); - checkUnnamed3(o.metadataLabels!); + checkUnnamed6(o.metadataLabels!); } buildCounterEndpointMatcherMetadataLabelMatcher--; } @@ -262,12 +411,12 @@ void checkEndpointMatcherMetadataLabelMatcherMetadataLabels( buildCounterEndpointMatcherMetadataLabelMatcherMetadataLabels--; } -core.Map buildUnnamed4() => { +core.Map buildUnnamed7() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed4(core.Map o) { +void checkUnnamed7(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -289,7 +438,7 @@ api.EndpointPolicy buildEndpointPolicy() { o.createTime = 'foo'; o.description = 'foo'; o.endpointMatcher = buildEndpointMatcher(); - o.labels = buildUnnamed4(); + o.labels = buildUnnamed7(); o.name = 'foo'; o.serverTlsPolicy = 'foo'; o.trafficPortSelector = buildTrafficPortSelector(); @@ -320,7 +469,7 @@ void checkEndpointPolicy(api.EndpointPolicy o) { unittest.equals('foo'), ); checkEndpointMatcher(o.endpointMatcher!); - checkUnnamed4(o.labels!); + checkUnnamed7(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -379,12 +528,12 @@ void checkExpr(api.Expr o) { buildCounterExpr--; } -core.List buildUnnamed5() => [ +core.List buildUnnamed8() => [ buildExtensionChainExtension(), buildExtensionChainExtension(), ]; -void checkUnnamed5(core.List o) { +void checkUnnamed8(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkExtensionChainExtension(o[0]); checkExtensionChainExtension(o[1]); @@ -395,7 +544,7 @@ api.ExtensionChain buildExtensionChain() { final o = api.ExtensionChain(); buildCounterExtensionChain++; if (buildCounterExtensionChain < 3) { - o.extensions = buildUnnamed5(); + o.extensions = buildUnnamed8(); o.matchCondition = buildExtensionChainMatchCondition(); o.name = 'foo'; } @@ -406,7 +555,7 @@ api.ExtensionChain buildExtensionChain() { void checkExtensionChain(api.ExtensionChain o) { buildCounterExtensionChain++; if (buildCounterExtensionChain < 3) { - checkUnnamed5(o.extensions!); + checkUnnamed8(o.extensions!); checkExtensionChainMatchCondition(o.matchCondition!); unittest.expect( o.name!, @@ -416,12 +565,12 @@ void checkExtensionChain(api.ExtensionChain o) { buildCounterExtensionChain--; } -core.List buildUnnamed6() => [ +core.List buildUnnamed9() => [ 'foo', 'foo', ]; -void checkUnnamed6(core.List o) { +void checkUnnamed9(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -433,12 +582,57 @@ void checkUnnamed6(core.List o) { ); } -core.List buildUnnamed7() => [ +core.Map buildUnnamed10() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; + +void checkUnnamed10(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted3 = (o['x']!) as core.Map; + unittest.expect(casted3, unittest.hasLength(3)); + unittest.expect( + casted3['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted3['bool'], + unittest.equals(true), + ); + unittest.expect( + casted3['string'], + unittest.equals('foo'), + ); + var casted4 = (o['y']!) as core.Map; + unittest.expect(casted4, unittest.hasLength(3)); + unittest.expect( + casted4['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted4['bool'], + unittest.equals(true), + ); + unittest.expect( + casted4['string'], + unittest.equals('foo'), + ); +} + +core.List buildUnnamed11() => [ 'foo', 'foo', ]; -void checkUnnamed7(core.List o) { +void checkUnnamed11(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -457,10 +651,11 @@ api.ExtensionChainExtension buildExtensionChainExtension() { if (buildCounterExtensionChainExtension < 3) { o.authority = 'foo'; o.failOpen = true; - o.forwardHeaders = buildUnnamed6(); + o.forwardHeaders = buildUnnamed9(); + o.metadata = buildUnnamed10(); o.name = 'foo'; o.service = 'foo'; - o.supportedEvents = buildUnnamed7(); + o.supportedEvents = buildUnnamed11(); o.timeout = 'foo'; } buildCounterExtensionChainExtension--; @@ -475,7 +670,8 @@ void checkExtensionChainExtension(api.ExtensionChainExtension o) { unittest.equals('foo'), ); unittest.expect(o.failOpen!, unittest.isTrue); - checkUnnamed6(o.forwardHeaders!); + checkUnnamed9(o.forwardHeaders!); + checkUnnamed10(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), @@ -484,7 +680,7 @@ void checkExtensionChainExtension(api.ExtensionChainExtension o) { o.service!, unittest.equals('foo'), ); - checkUnnamed7(o.supportedEvents!); + checkUnnamed11(o.supportedEvents!); unittest.expect( o.timeout!, unittest.equals('foo'), @@ -515,12 +711,12 @@ void checkExtensionChainMatchCondition(api.ExtensionChainMatchCondition o) { buildCounterExtensionChainMatchCondition--; } -core.List buildUnnamed8() => [ +core.List buildUnnamed12() => [ 'foo', 'foo', ]; -void checkUnnamed8(core.List o) { +void checkUnnamed12(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -532,12 +728,12 @@ void checkUnnamed8(core.List o) { ); } -core.List buildUnnamed9() => [ +core.List buildUnnamed13() => [ 'foo', 'foo', ]; -void checkUnnamed9(core.List o) { +void checkUnnamed13(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -549,12 +745,12 @@ void checkUnnamed9(core.List o) { ); } -core.Map buildUnnamed10() => { +core.Map buildUnnamed14() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed10(core.Map o) { +void checkUnnamed14(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -566,12 +762,12 @@ void checkUnnamed10(core.Map o) { ); } -core.List buildUnnamed11() => [ +core.List buildUnnamed15() => [ 42, 42, ]; -void checkUnnamed11(core.List o) { +void checkUnnamed15(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -588,17 +784,17 @@ api.Gateway buildGateway() { final o = api.Gateway(); buildCounterGateway++; if (buildCounterGateway < 3) { - o.addresses = buildUnnamed8(); - o.certificateUrls = buildUnnamed9(); + o.addresses = buildUnnamed12(); + o.certificateUrls = buildUnnamed13(); o.createTime = 'foo'; o.description = 'foo'; o.envoyHeaders = 'foo'; o.gatewaySecurityPolicy = 'foo'; o.ipVersion = 'foo'; - o.labels = buildUnnamed10(); + o.labels = buildUnnamed14(); o.name = 'foo'; o.network = 'foo'; - o.ports = buildUnnamed11(); + o.ports = buildUnnamed15(); o.routingMode = 'foo'; o.scope = 'foo'; o.selfLink = 'foo'; @@ -614,8 +810,8 @@ api.Gateway buildGateway() { void checkGateway(api.Gateway o) { buildCounterGateway++; if (buildCounterGateway < 3) { - checkUnnamed8(o.addresses!); - checkUnnamed9(o.certificateUrls!); + checkUnnamed12(o.addresses!); + checkUnnamed13(o.certificateUrls!); unittest.expect( o.createTime!, unittest.equals('foo'), @@ -636,7 +832,7 @@ void checkGateway(api.Gateway o) { o.ipVersion!, unittest.equals('foo'), ); - checkUnnamed10(o.labels!); + checkUnnamed14(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -645,7 +841,7 @@ void checkGateway(api.Gateway o) { o.network!, unittest.equals('foo'), ); - checkUnnamed11(o.ports!); + checkUnnamed15(o.ports!); unittest.expect( o.routingMode!, unittest.equals('foo'), @@ -678,12 +874,54 @@ void checkGateway(api.Gateway o) { buildCounterGateway--; } -core.List buildUnnamed12() => [ +core.int buildCounterGatewayRouteView = 0; +api.GatewayRouteView buildGatewayRouteView() { + final o = api.GatewayRouteView(); + buildCounterGatewayRouteView++; + if (buildCounterGatewayRouteView < 3) { + o.name = 'foo'; + o.routeId = 'foo'; + o.routeLocation = 'foo'; + o.routeProjectNumber = 'foo'; + o.routeType = 'foo'; + } + buildCounterGatewayRouteView--; + return o; +} + +void checkGatewayRouteView(api.GatewayRouteView o) { + buildCounterGatewayRouteView++; + if (buildCounterGatewayRouteView < 3) { + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.routeId!, + unittest.equals('foo'), + ); + unittest.expect( + o.routeLocation!, + unittest.equals('foo'), + ); + unittest.expect( + o.routeProjectNumber!, + unittest.equals('foo'), + ); + unittest.expect( + o.routeType!, + unittest.equals('foo'), + ); + } + buildCounterGatewayRouteView--; +} + +core.List buildUnnamed16() => [ 'foo', 'foo', ]; -void checkUnnamed12(core.List o) { +void checkUnnamed16(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -695,12 +933,12 @@ void checkUnnamed12(core.List o) { ); } -core.List buildUnnamed13() => [ +core.List buildUnnamed17() => [ 'foo', 'foo', ]; -void checkUnnamed13(core.List o) { +void checkUnnamed17(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -712,12 +950,12 @@ void checkUnnamed13(core.List o) { ); } -core.Map buildUnnamed14() => { +core.Map buildUnnamed18() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed14(core.Map o) { +void checkUnnamed18(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -729,12 +967,12 @@ void checkUnnamed14(core.Map o) { ); } -core.List buildUnnamed15() => [ +core.List buildUnnamed19() => [ 'foo', 'foo', ]; -void checkUnnamed15(core.List o) { +void checkUnnamed19(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -746,12 +984,12 @@ void checkUnnamed15(core.List o) { ); } -core.List buildUnnamed16() => [ +core.List buildUnnamed20() => [ buildGrpcRouteRouteRule(), buildGrpcRouteRouteRule(), ]; -void checkUnnamed16(core.List o) { +void checkUnnamed20(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGrpcRouteRouteRule(o[0]); checkGrpcRouteRouteRule(o[1]); @@ -764,12 +1002,12 @@ api.GrpcRoute buildGrpcRoute() { if (buildCounterGrpcRoute < 3) { o.createTime = 'foo'; o.description = 'foo'; - o.gateways = buildUnnamed12(); - o.hostnames = buildUnnamed13(); - o.labels = buildUnnamed14(); - o.meshes = buildUnnamed15(); + o.gateways = buildUnnamed16(); + o.hostnames = buildUnnamed17(); + o.labels = buildUnnamed18(); + o.meshes = buildUnnamed19(); o.name = 'foo'; - o.rules = buildUnnamed16(); + o.rules = buildUnnamed20(); o.selfLink = 'foo'; o.updateTime = 'foo'; } @@ -788,15 +1026,15 @@ void checkGrpcRoute(api.GrpcRoute o) { o.description!, unittest.equals('foo'), ); - checkUnnamed12(o.gateways!); - checkUnnamed13(o.hostnames!); - checkUnnamed14(o.labels!); - checkUnnamed15(o.meshes!); + checkUnnamed16(o.gateways!); + checkUnnamed17(o.hostnames!); + checkUnnamed18(o.labels!); + checkUnnamed19(o.meshes!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed16(o.rules!); + checkUnnamed20(o.rules!); unittest.expect( o.selfLink!, unittest.equals('foo'), @@ -981,12 +1219,12 @@ void checkGrpcRouteMethodMatch(api.GrpcRouteMethodMatch o) { buildCounterGrpcRouteMethodMatch--; } -core.List buildUnnamed17() => [ +core.List buildUnnamed21() => [ 'foo', 'foo', ]; -void checkUnnamed17(core.List o) { +void checkUnnamed21(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1004,7 +1242,7 @@ api.GrpcRouteRetryPolicy buildGrpcRouteRetryPolicy() { buildCounterGrpcRouteRetryPolicy++; if (buildCounterGrpcRouteRetryPolicy < 3) { o.numRetries = 42; - o.retryConditions = buildUnnamed17(); + o.retryConditions = buildUnnamed21(); } buildCounterGrpcRouteRetryPolicy--; return o; @@ -1017,17 +1255,17 @@ void checkGrpcRouteRetryPolicy(api.GrpcRouteRetryPolicy o) { o.numRetries!, unittest.equals(42), ); - checkUnnamed17(o.retryConditions!); + checkUnnamed21(o.retryConditions!); } buildCounterGrpcRouteRetryPolicy--; } -core.List buildUnnamed18() => [ +core.List buildUnnamed22() => [ buildGrpcRouteDestination(), buildGrpcRouteDestination(), ]; -void checkUnnamed18(core.List o) { +void checkUnnamed22(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGrpcRouteDestination(o[0]); checkGrpcRouteDestination(o[1]); @@ -1038,7 +1276,7 @@ api.GrpcRouteRouteAction buildGrpcRouteRouteAction() { final o = api.GrpcRouteRouteAction(); buildCounterGrpcRouteRouteAction++; if (buildCounterGrpcRouteRouteAction < 3) { - o.destinations = buildUnnamed18(); + o.destinations = buildUnnamed22(); o.faultInjectionPolicy = buildGrpcRouteFaultInjectionPolicy(); o.idleTimeout = 'foo'; o.retryPolicy = buildGrpcRouteRetryPolicy(); @@ -1052,7 +1290,7 @@ api.GrpcRouteRouteAction buildGrpcRouteRouteAction() { void checkGrpcRouteRouteAction(api.GrpcRouteRouteAction o) { buildCounterGrpcRouteRouteAction++; if (buildCounterGrpcRouteRouteAction < 3) { - checkUnnamed18(o.destinations!); + checkUnnamed22(o.destinations!); checkGrpcRouteFaultInjectionPolicy(o.faultInjectionPolicy!); unittest.expect( o.idleTimeout!, @@ -1068,12 +1306,12 @@ void checkGrpcRouteRouteAction(api.GrpcRouteRouteAction o) { buildCounterGrpcRouteRouteAction--; } -core.List buildUnnamed19() => [ +core.List buildUnnamed23() => [ buildGrpcRouteHeaderMatch(), buildGrpcRouteHeaderMatch(), ]; -void checkUnnamed19(core.List o) { +void checkUnnamed23(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGrpcRouteHeaderMatch(o[0]); checkGrpcRouteHeaderMatch(o[1]); @@ -1084,7 +1322,7 @@ api.GrpcRouteRouteMatch buildGrpcRouteRouteMatch() { final o = api.GrpcRouteRouteMatch(); buildCounterGrpcRouteRouteMatch++; if (buildCounterGrpcRouteRouteMatch < 3) { - o.headers = buildUnnamed19(); + o.headers = buildUnnamed23(); o.method = buildGrpcRouteMethodMatch(); } buildCounterGrpcRouteRouteMatch--; @@ -1094,18 +1332,18 @@ api.GrpcRouteRouteMatch buildGrpcRouteRouteMatch() { void checkGrpcRouteRouteMatch(api.GrpcRouteRouteMatch o) { buildCounterGrpcRouteRouteMatch++; if (buildCounterGrpcRouteRouteMatch < 3) { - checkUnnamed19(o.headers!); + checkUnnamed23(o.headers!); checkGrpcRouteMethodMatch(o.method!); } buildCounterGrpcRouteRouteMatch--; } -core.List buildUnnamed20() => [ +core.List buildUnnamed24() => [ buildGrpcRouteRouteMatch(), buildGrpcRouteRouteMatch(), ]; -void checkUnnamed20(core.List o) { +void checkUnnamed24(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGrpcRouteRouteMatch(o[0]); checkGrpcRouteRouteMatch(o[1]); @@ -1117,7 +1355,7 @@ api.GrpcRouteRouteRule buildGrpcRouteRouteRule() { buildCounterGrpcRouteRouteRule++; if (buildCounterGrpcRouteRouteRule < 3) { o.action = buildGrpcRouteRouteAction(); - o.matches = buildUnnamed20(); + o.matches = buildUnnamed24(); } buildCounterGrpcRouteRouteRule--; return o; @@ -1127,7 +1365,7 @@ void checkGrpcRouteRouteRule(api.GrpcRouteRouteRule o) { buildCounterGrpcRouteRouteRule++; if (buildCounterGrpcRouteRouteRule < 3) { checkGrpcRouteRouteAction(o.action!); - checkUnnamed20(o.matches!); + checkUnnamed24(o.matches!); } buildCounterGrpcRouteRouteRule--; } @@ -1156,12 +1394,12 @@ void checkGrpcRouteStatefulSessionAffinityPolicy( buildCounterGrpcRouteStatefulSessionAffinityPolicy--; } -core.List buildUnnamed21() => [ +core.List buildUnnamed25() => [ 'foo', 'foo', ]; -void checkUnnamed21(core.List o) { +void checkUnnamed25(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1173,12 +1411,12 @@ void checkUnnamed21(core.List o) { ); } -core.List buildUnnamed22() => [ +core.List buildUnnamed26() => [ 'foo', 'foo', ]; -void checkUnnamed22(core.List o) { +void checkUnnamed26(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1190,12 +1428,12 @@ void checkUnnamed22(core.List o) { ); } -core.Map buildUnnamed23() => { +core.Map buildUnnamed27() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed23(core.Map o) { +void checkUnnamed27(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1207,12 +1445,12 @@ void checkUnnamed23(core.Map o) { ); } -core.List buildUnnamed24() => [ +core.List buildUnnamed28() => [ 'foo', 'foo', ]; -void checkUnnamed24(core.List o) { +void checkUnnamed28(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1224,12 +1462,12 @@ void checkUnnamed24(core.List o) { ); } -core.List buildUnnamed25() => [ +core.List buildUnnamed29() => [ buildHttpRouteRouteRule(), buildHttpRouteRouteRule(), ]; -void checkUnnamed25(core.List o) { +void checkUnnamed29(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHttpRouteRouteRule(o[0]); checkHttpRouteRouteRule(o[1]); @@ -1242,12 +1480,12 @@ api.HttpRoute buildHttpRoute() { if (buildCounterHttpRoute < 3) { o.createTime = 'foo'; o.description = 'foo'; - o.gateways = buildUnnamed21(); - o.hostnames = buildUnnamed22(); - o.labels = buildUnnamed23(); - o.meshes = buildUnnamed24(); + o.gateways = buildUnnamed25(); + o.hostnames = buildUnnamed26(); + o.labels = buildUnnamed27(); + o.meshes = buildUnnamed28(); o.name = 'foo'; - o.rules = buildUnnamed25(); + o.rules = buildUnnamed29(); o.selfLink = 'foo'; o.updateTime = 'foo'; } @@ -1266,15 +1504,15 @@ void checkHttpRoute(api.HttpRoute o) { o.description!, unittest.equals('foo'), ); - checkUnnamed21(o.gateways!); - checkUnnamed22(o.hostnames!); - checkUnnamed23(o.labels!); - checkUnnamed24(o.meshes!); + checkUnnamed25(o.gateways!); + checkUnnamed26(o.hostnames!); + checkUnnamed27(o.labels!); + checkUnnamed28(o.meshes!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed25(o.rules!); + checkUnnamed29(o.rules!); unittest.expect( o.selfLink!, unittest.equals('foo'), @@ -1287,12 +1525,12 @@ void checkHttpRoute(api.HttpRoute o) { buildCounterHttpRoute--; } -core.List buildUnnamed26() => [ +core.List buildUnnamed30() => [ 'foo', 'foo', ]; -void checkUnnamed26(core.List o) { +void checkUnnamed30(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1304,12 +1542,12 @@ void checkUnnamed26(core.List o) { ); } -core.List buildUnnamed27() => [ +core.List buildUnnamed31() => [ 'foo', 'foo', ]; -void checkUnnamed27(core.List o) { +void checkUnnamed31(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1321,12 +1559,12 @@ void checkUnnamed27(core.List o) { ); } -core.List buildUnnamed28() => [ +core.List buildUnnamed32() => [ 'foo', 'foo', ]; -void checkUnnamed28(core.List o) { +void checkUnnamed32(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1338,12 +1576,12 @@ void checkUnnamed28(core.List o) { ); } -core.List buildUnnamed29() => [ +core.List buildUnnamed33() => [ 'foo', 'foo', ]; -void checkUnnamed29(core.List o) { +void checkUnnamed33(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1355,12 +1593,12 @@ void checkUnnamed29(core.List o) { ); } -core.List buildUnnamed30() => [ +core.List buildUnnamed34() => [ 'foo', 'foo', ]; -void checkUnnamed30(core.List o) { +void checkUnnamed34(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1378,12 +1616,12 @@ api.HttpRouteCorsPolicy buildHttpRouteCorsPolicy() { buildCounterHttpRouteCorsPolicy++; if (buildCounterHttpRouteCorsPolicy < 3) { o.allowCredentials = true; - o.allowHeaders = buildUnnamed26(); - o.allowMethods = buildUnnamed27(); - o.allowOriginRegexes = buildUnnamed28(); - o.allowOrigins = buildUnnamed29(); + o.allowHeaders = buildUnnamed30(); + o.allowMethods = buildUnnamed31(); + o.allowOriginRegexes = buildUnnamed32(); + o.allowOrigins = buildUnnamed33(); o.disabled = true; - o.exposeHeaders = buildUnnamed30(); + o.exposeHeaders = buildUnnamed34(); o.maxAge = 'foo'; } buildCounterHttpRouteCorsPolicy--; @@ -1394,12 +1632,12 @@ void checkHttpRouteCorsPolicy(api.HttpRouteCorsPolicy o) { buildCounterHttpRouteCorsPolicy++; if (buildCounterHttpRouteCorsPolicy < 3) { unittest.expect(o.allowCredentials!, unittest.isTrue); - checkUnnamed26(o.allowHeaders!); - checkUnnamed27(o.allowMethods!); - checkUnnamed28(o.allowOriginRegexes!); - checkUnnamed29(o.allowOrigins!); + checkUnnamed30(o.allowHeaders!); + checkUnnamed31(o.allowMethods!); + checkUnnamed32(o.allowOriginRegexes!); + checkUnnamed33(o.allowOrigins!); unittest.expect(o.disabled!, unittest.isTrue); - checkUnnamed30(o.exposeHeaders!); + checkUnnamed34(o.exposeHeaders!); unittest.expect( o.maxAge!, unittest.equals('foo'), @@ -1594,12 +1832,12 @@ void checkHttpRouteHeaderMatchIntegerRange( buildCounterHttpRouteHeaderMatchIntegerRange--; } -core.Map buildUnnamed31() => { +core.Map buildUnnamed35() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed31(core.Map o) { +void checkUnnamed35(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1611,12 +1849,12 @@ void checkUnnamed31(core.Map o) { ); } -core.List buildUnnamed32() => [ +core.List buildUnnamed36() => [ 'foo', 'foo', ]; -void checkUnnamed32(core.List o) { +void checkUnnamed36(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1628,12 +1866,12 @@ void checkUnnamed32(core.List o) { ); } -core.Map buildUnnamed33() => { +core.Map buildUnnamed37() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed33(core.Map o) { +void checkUnnamed37(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1650,9 +1888,9 @@ api.HttpRouteHeaderModifier buildHttpRouteHeaderModifier() { final o = api.HttpRouteHeaderModifier(); buildCounterHttpRouteHeaderModifier++; if (buildCounterHttpRouteHeaderModifier < 3) { - o.add = buildUnnamed31(); - o.remove = buildUnnamed32(); - o.set = buildUnnamed33(); + o.add = buildUnnamed35(); + o.remove = buildUnnamed36(); + o.set = buildUnnamed37(); } buildCounterHttpRouteHeaderModifier--; return o; @@ -1661,9 +1899,9 @@ api.HttpRouteHeaderModifier buildHttpRouteHeaderModifier() { void checkHttpRouteHeaderModifier(api.HttpRouteHeaderModifier o) { buildCounterHttpRouteHeaderModifier++; if (buildCounterHttpRouteHeaderModifier < 3) { - checkUnnamed31(o.add!); - checkUnnamed32(o.remove!); - checkUnnamed33(o.set!); + checkUnnamed35(o.add!); + checkUnnamed36(o.remove!); + checkUnnamed37(o.set!); } buildCounterHttpRouteHeaderModifier--; } @@ -1804,12 +2042,12 @@ void checkHttpRouteRequestMirrorPolicy(api.HttpRouteRequestMirrorPolicy o) { buildCounterHttpRouteRequestMirrorPolicy--; } -core.List buildUnnamed34() => [ +core.List buildUnnamed38() => [ 'foo', 'foo', ]; -void checkUnnamed34(core.List o) { +void checkUnnamed38(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1828,7 +2066,7 @@ api.HttpRouteRetryPolicy buildHttpRouteRetryPolicy() { if (buildCounterHttpRouteRetryPolicy < 3) { o.numRetries = 42; o.perTryTimeout = 'foo'; - o.retryConditions = buildUnnamed34(); + o.retryConditions = buildUnnamed38(); } buildCounterHttpRouteRetryPolicy--; return o; @@ -1845,17 +2083,17 @@ void checkHttpRouteRetryPolicy(api.HttpRouteRetryPolicy o) { o.perTryTimeout!, unittest.equals('foo'), ); - checkUnnamed34(o.retryConditions!); + checkUnnamed38(o.retryConditions!); } buildCounterHttpRouteRetryPolicy--; } -core.List buildUnnamed35() => [ +core.List buildUnnamed39() => [ buildHttpRouteDestination(), buildHttpRouteDestination(), ]; -void checkUnnamed35(core.List o) { +void checkUnnamed39(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHttpRouteDestination(o[0]); checkHttpRouteDestination(o[1]); @@ -1867,7 +2105,7 @@ api.HttpRouteRouteAction buildHttpRouteRouteAction() { buildCounterHttpRouteRouteAction++; if (buildCounterHttpRouteRouteAction < 3) { o.corsPolicy = buildHttpRouteCorsPolicy(); - o.destinations = buildUnnamed35(); + o.destinations = buildUnnamed39(); o.directResponse = buildHttpRouteHttpDirectResponse(); o.faultInjectionPolicy = buildHttpRouteFaultInjectionPolicy(); o.idleTimeout = 'foo'; @@ -1888,7 +2126,7 @@ void checkHttpRouteRouteAction(api.HttpRouteRouteAction o) { buildCounterHttpRouteRouteAction++; if (buildCounterHttpRouteRouteAction < 3) { checkHttpRouteCorsPolicy(o.corsPolicy!); - checkUnnamed35(o.destinations!); + checkUnnamed39(o.destinations!); checkHttpRouteHttpDirectResponse(o.directResponse!); checkHttpRouteFaultInjectionPolicy(o.faultInjectionPolicy!); unittest.expect( @@ -1910,23 +2148,23 @@ void checkHttpRouteRouteAction(api.HttpRouteRouteAction o) { buildCounterHttpRouteRouteAction--; } -core.List buildUnnamed36() => [ +core.List buildUnnamed40() => [ buildHttpRouteHeaderMatch(), buildHttpRouteHeaderMatch(), ]; -void checkUnnamed36(core.List o) { +void checkUnnamed40(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHttpRouteHeaderMatch(o[0]); checkHttpRouteHeaderMatch(o[1]); } -core.List buildUnnamed37() => [ +core.List buildUnnamed41() => [ buildHttpRouteQueryParameterMatch(), buildHttpRouteQueryParameterMatch(), ]; -void checkUnnamed37(core.List o) { +void checkUnnamed41(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHttpRouteQueryParameterMatch(o[0]); checkHttpRouteQueryParameterMatch(o[1]); @@ -1938,10 +2176,10 @@ api.HttpRouteRouteMatch buildHttpRouteRouteMatch() { buildCounterHttpRouteRouteMatch++; if (buildCounterHttpRouteRouteMatch < 3) { o.fullPathMatch = 'foo'; - o.headers = buildUnnamed36(); + o.headers = buildUnnamed40(); o.ignoreCase = true; o.prefixMatch = 'foo'; - o.queryParameters = buildUnnamed37(); + o.queryParameters = buildUnnamed41(); o.regexMatch = 'foo'; } buildCounterHttpRouteRouteMatch--; @@ -1955,13 +2193,13 @@ void checkHttpRouteRouteMatch(api.HttpRouteRouteMatch o) { o.fullPathMatch!, unittest.equals('foo'), ); - checkUnnamed36(o.headers!); + checkUnnamed40(o.headers!); unittest.expect(o.ignoreCase!, unittest.isTrue); unittest.expect( o.prefixMatch!, unittest.equals('foo'), ); - checkUnnamed37(o.queryParameters!); + checkUnnamed41(o.queryParameters!); unittest.expect( o.regexMatch!, unittest.equals('foo'), @@ -1970,12 +2208,12 @@ void checkHttpRouteRouteMatch(api.HttpRouteRouteMatch o) { buildCounterHttpRouteRouteMatch--; } -core.List buildUnnamed38() => [ +core.List buildUnnamed42() => [ buildHttpRouteRouteMatch(), buildHttpRouteRouteMatch(), ]; -void checkUnnamed38(core.List o) { +void checkUnnamed42(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHttpRouteRouteMatch(o[0]); checkHttpRouteRouteMatch(o[1]); @@ -1987,7 +2225,7 @@ api.HttpRouteRouteRule buildHttpRouteRouteRule() { buildCounterHttpRouteRouteRule++; if (buildCounterHttpRouteRouteRule < 3) { o.action = buildHttpRouteRouteAction(); - o.matches = buildUnnamed38(); + o.matches = buildUnnamed42(); } buildCounterHttpRouteRouteRule--; return o; @@ -1997,7 +2235,7 @@ void checkHttpRouteRouteRule(api.HttpRouteRouteRule o) { buildCounterHttpRouteRouteRule++; if (buildCounterHttpRouteRouteRule < 3) { checkHttpRouteRouteAction(o.action!); - checkUnnamed38(o.matches!); + checkUnnamed42(o.matches!); } buildCounterHttpRouteRouteRule--; } @@ -2053,23 +2291,23 @@ void checkHttpRouteURLRewrite(api.HttpRouteURLRewrite o) { buildCounterHttpRouteURLRewrite--; } -core.List buildUnnamed39() => [ +core.List buildUnnamed43() => [ buildExtensionChain(), buildExtensionChain(), ]; -void checkUnnamed39(core.List o) { +void checkUnnamed43(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkExtensionChain(o[0]); checkExtensionChain(o[1]); } -core.List buildUnnamed40() => [ +core.List buildUnnamed44() => [ 'foo', 'foo', ]; -void checkUnnamed40(core.List o) { +void checkUnnamed44(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2081,12 +2319,12 @@ void checkUnnamed40(core.List o) { ); } -core.Map buildUnnamed41() => { +core.Map buildUnnamed45() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed41(core.Map o) { +void checkUnnamed45(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2098,7 +2336,7 @@ void checkUnnamed41(core.Map o) { ); } -core.Map buildUnnamed42() => { +core.Map buildUnnamed46() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -2111,34 +2349,34 @@ core.Map buildUnnamed42() => { }, }; -void checkUnnamed42(core.Map o) { +void checkUnnamed46(core.Map o) { unittest.expect(o, unittest.hasLength(2)); - var casted1 = (o['x']!) as core.Map; - unittest.expect(casted1, unittest.hasLength(3)); + var casted5 = (o['x']!) as core.Map; + unittest.expect(casted5, unittest.hasLength(3)); unittest.expect( - casted1['list'], + casted5['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted1['bool'], + casted5['bool'], unittest.equals(true), ); unittest.expect( - casted1['string'], + casted5['string'], unittest.equals('foo'), ); - var casted2 = (o['y']!) as core.Map; - unittest.expect(casted2, unittest.hasLength(3)); + var casted6 = (o['y']!) as core.Map; + unittest.expect(casted6, unittest.hasLength(3)); unittest.expect( - casted2['list'], + casted6['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted2['bool'], + casted6['bool'], unittest.equals(true), ); unittest.expect( - casted2['string'], + casted6['string'], unittest.equals('foo'), ); } @@ -2150,11 +2388,11 @@ api.LbRouteExtension buildLbRouteExtension() { if (buildCounterLbRouteExtension < 3) { o.createTime = 'foo'; o.description = 'foo'; - o.extensionChains = buildUnnamed39(); - o.forwardingRules = buildUnnamed40(); - o.labels = buildUnnamed41(); + o.extensionChains = buildUnnamed43(); + o.forwardingRules = buildUnnamed44(); + o.labels = buildUnnamed45(); o.loadBalancingScheme = 'foo'; - o.metadata = buildUnnamed42(); + o.metadata = buildUnnamed46(); o.name = 'foo'; o.updateTime = 'foo'; } @@ -2173,14 +2411,14 @@ void checkLbRouteExtension(api.LbRouteExtension o) { o.description!, unittest.equals('foo'), ); - checkUnnamed39(o.extensionChains!); - checkUnnamed40(o.forwardingRules!); - checkUnnamed41(o.labels!); + checkUnnamed43(o.extensionChains!); + checkUnnamed44(o.forwardingRules!); + checkUnnamed45(o.labels!); unittest.expect( o.loadBalancingScheme!, unittest.equals('foo'), ); - checkUnnamed42(o.metadata!); + checkUnnamed46(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), @@ -2193,23 +2431,23 @@ void checkLbRouteExtension(api.LbRouteExtension o) { buildCounterLbRouteExtension--; } -core.List buildUnnamed43() => [ +core.List buildUnnamed47() => [ buildExtensionChain(), buildExtensionChain(), ]; -void checkUnnamed43(core.List o) { +void checkUnnamed47(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkExtensionChain(o[0]); checkExtensionChain(o[1]); } -core.List buildUnnamed44() => [ +core.List buildUnnamed48() => [ 'foo', 'foo', ]; -void checkUnnamed44(core.List o) { +void checkUnnamed48(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2221,12 +2459,12 @@ void checkUnnamed44(core.List o) { ); } -core.Map buildUnnamed45() => { +core.Map buildUnnamed49() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed45(core.Map o) { +void checkUnnamed49(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2238,7 +2476,7 @@ void checkUnnamed45(core.Map o) { ); } -core.Map buildUnnamed46() => { +core.Map buildUnnamed50() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -2251,34 +2489,34 @@ core.Map buildUnnamed46() => { }, }; -void checkUnnamed46(core.Map o) { +void checkUnnamed50(core.Map o) { unittest.expect(o, unittest.hasLength(2)); - var casted3 = (o['x']!) as core.Map; - unittest.expect(casted3, unittest.hasLength(3)); + var casted7 = (o['x']!) as core.Map; + unittest.expect(casted7, unittest.hasLength(3)); unittest.expect( - casted3['list'], + casted7['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted3['bool'], + casted7['bool'], unittest.equals(true), ); unittest.expect( - casted3['string'], + casted7['string'], unittest.equals('foo'), ); - var casted4 = (o['y']!) as core.Map; - unittest.expect(casted4, unittest.hasLength(3)); + var casted8 = (o['y']!) as core.Map; + unittest.expect(casted8, unittest.hasLength(3)); unittest.expect( - casted4['list'], + casted8['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted4['bool'], + casted8['bool'], unittest.equals(true), ); unittest.expect( - casted4['string'], + casted8['string'], unittest.equals('foo'), ); } @@ -2290,11 +2528,11 @@ api.LbTrafficExtension buildLbTrafficExtension() { if (buildCounterLbTrafficExtension < 3) { o.createTime = 'foo'; o.description = 'foo'; - o.extensionChains = buildUnnamed43(); - o.forwardingRules = buildUnnamed44(); - o.labels = buildUnnamed45(); + o.extensionChains = buildUnnamed47(); + o.forwardingRules = buildUnnamed48(); + o.labels = buildUnnamed49(); o.loadBalancingScheme = 'foo'; - o.metadata = buildUnnamed46(); + o.metadata = buildUnnamed50(); o.name = 'foo'; o.updateTime = 'foo'; } @@ -2313,14 +2551,14 @@ void checkLbTrafficExtension(api.LbTrafficExtension o) { o.description!, unittest.equals('foo'), ); - checkUnnamed43(o.extensionChains!); - checkUnnamed44(o.forwardingRules!); - checkUnnamed45(o.labels!); + checkUnnamed47(o.extensionChains!); + checkUnnamed48(o.forwardingRules!); + checkUnnamed49(o.labels!); unittest.expect( o.loadBalancingScheme!, unittest.equals('foo'), ); - checkUnnamed46(o.metadata!); + checkUnnamed50(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), @@ -2333,12 +2571,66 @@ void checkLbTrafficExtension(api.LbTrafficExtension o) { buildCounterLbTrafficExtension--; } -core.List buildUnnamed47() => [ +core.List buildUnnamed51() => [ + buildAuthzExtension(), + buildAuthzExtension(), + ]; + +void checkUnnamed51(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkAuthzExtension(o[0]); + checkAuthzExtension(o[1]); +} + +core.List buildUnnamed52() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed52(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterListAuthzExtensionsResponse = 0; +api.ListAuthzExtensionsResponse buildListAuthzExtensionsResponse() { + final o = api.ListAuthzExtensionsResponse(); + buildCounterListAuthzExtensionsResponse++; + if (buildCounterListAuthzExtensionsResponse < 3) { + o.authzExtensions = buildUnnamed51(); + o.nextPageToken = 'foo'; + o.unreachable = buildUnnamed52(); + } + buildCounterListAuthzExtensionsResponse--; + return o; +} + +void checkListAuthzExtensionsResponse(api.ListAuthzExtensionsResponse o) { + buildCounterListAuthzExtensionsResponse++; + if (buildCounterListAuthzExtensionsResponse < 3) { + checkUnnamed51(o.authzExtensions!); + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed52(o.unreachable!); + } + buildCounterListAuthzExtensionsResponse--; +} + +core.List buildUnnamed53() => [ buildEndpointPolicy(), buildEndpointPolicy(), ]; -void checkUnnamed47(core.List o) { +void checkUnnamed53(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEndpointPolicy(o[0]); checkEndpointPolicy(o[1]); @@ -2349,7 +2641,7 @@ api.ListEndpointPoliciesResponse buildListEndpointPoliciesResponse() { final o = api.ListEndpointPoliciesResponse(); buildCounterListEndpointPoliciesResponse++; if (buildCounterListEndpointPoliciesResponse < 3) { - o.endpointPolicies = buildUnnamed47(); + o.endpointPolicies = buildUnnamed53(); o.nextPageToken = 'foo'; } buildCounterListEndpointPoliciesResponse--; @@ -2359,7 +2651,7 @@ api.ListEndpointPoliciesResponse buildListEndpointPoliciesResponse() { void checkListEndpointPoliciesResponse(api.ListEndpointPoliciesResponse o) { buildCounterListEndpointPoliciesResponse++; if (buildCounterListEndpointPoliciesResponse < 3) { - checkUnnamed47(o.endpointPolicies!); + checkUnnamed53(o.endpointPolicies!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -2368,23 +2660,58 @@ void checkListEndpointPoliciesResponse(api.ListEndpointPoliciesResponse o) { buildCounterListEndpointPoliciesResponse--; } -core.List buildUnnamed48() => [ +core.List buildUnnamed54() => [ + buildGatewayRouteView(), + buildGatewayRouteView(), + ]; + +void checkUnnamed54(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGatewayRouteView(o[0]); + checkGatewayRouteView(o[1]); +} + +core.int buildCounterListGatewayRouteViewsResponse = 0; +api.ListGatewayRouteViewsResponse buildListGatewayRouteViewsResponse() { + final o = api.ListGatewayRouteViewsResponse(); + buildCounterListGatewayRouteViewsResponse++; + if (buildCounterListGatewayRouteViewsResponse < 3) { + o.gatewayRouteViews = buildUnnamed54(); + o.nextPageToken = 'foo'; + } + buildCounterListGatewayRouteViewsResponse--; + return o; +} + +void checkListGatewayRouteViewsResponse(api.ListGatewayRouteViewsResponse o) { + buildCounterListGatewayRouteViewsResponse++; + if (buildCounterListGatewayRouteViewsResponse < 3) { + checkUnnamed54(o.gatewayRouteViews!); + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + } + buildCounterListGatewayRouteViewsResponse--; +} + +core.List buildUnnamed55() => [ buildGateway(), buildGateway(), ]; -void checkUnnamed48(core.List o) { +void checkUnnamed55(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGateway(o[0]); checkGateway(o[1]); } -core.List buildUnnamed49() => [ +core.List buildUnnamed56() => [ 'foo', 'foo', ]; -void checkUnnamed49(core.List o) { +void checkUnnamed56(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2401,9 +2728,9 @@ api.ListGatewaysResponse buildListGatewaysResponse() { final o = api.ListGatewaysResponse(); buildCounterListGatewaysResponse++; if (buildCounterListGatewaysResponse < 3) { - o.gateways = buildUnnamed48(); + o.gateways = buildUnnamed55(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed49(); + o.unreachable = buildUnnamed56(); } buildCounterListGatewaysResponse--; return o; @@ -2412,22 +2739,22 @@ api.ListGatewaysResponse buildListGatewaysResponse() { void checkListGatewaysResponse(api.ListGatewaysResponse o) { buildCounterListGatewaysResponse++; if (buildCounterListGatewaysResponse < 3) { - checkUnnamed48(o.gateways!); + checkUnnamed55(o.gateways!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed49(o.unreachable!); + checkUnnamed56(o.unreachable!); } buildCounterListGatewaysResponse--; } -core.List buildUnnamed50() => [ +core.List buildUnnamed57() => [ buildGrpcRoute(), buildGrpcRoute(), ]; -void checkUnnamed50(core.List o) { +void checkUnnamed57(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGrpcRoute(o[0]); checkGrpcRoute(o[1]); @@ -2438,7 +2765,7 @@ api.ListGrpcRoutesResponse buildListGrpcRoutesResponse() { final o = api.ListGrpcRoutesResponse(); buildCounterListGrpcRoutesResponse++; if (buildCounterListGrpcRoutesResponse < 3) { - o.grpcRoutes = buildUnnamed50(); + o.grpcRoutes = buildUnnamed57(); o.nextPageToken = 'foo'; } buildCounterListGrpcRoutesResponse--; @@ -2448,7 +2775,7 @@ api.ListGrpcRoutesResponse buildListGrpcRoutesResponse() { void checkListGrpcRoutesResponse(api.ListGrpcRoutesResponse o) { buildCounterListGrpcRoutesResponse++; if (buildCounterListGrpcRoutesResponse < 3) { - checkUnnamed50(o.grpcRoutes!); + checkUnnamed57(o.grpcRoutes!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -2457,12 +2784,12 @@ void checkListGrpcRoutesResponse(api.ListGrpcRoutesResponse o) { buildCounterListGrpcRoutesResponse--; } -core.List buildUnnamed51() => [ +core.List buildUnnamed58() => [ buildHttpRoute(), buildHttpRoute(), ]; -void checkUnnamed51(core.List o) { +void checkUnnamed58(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHttpRoute(o[0]); checkHttpRoute(o[1]); @@ -2473,7 +2800,7 @@ api.ListHttpRoutesResponse buildListHttpRoutesResponse() { final o = api.ListHttpRoutesResponse(); buildCounterListHttpRoutesResponse++; if (buildCounterListHttpRoutesResponse < 3) { - o.httpRoutes = buildUnnamed51(); + o.httpRoutes = buildUnnamed58(); o.nextPageToken = 'foo'; } buildCounterListHttpRoutesResponse--; @@ -2483,7 +2810,7 @@ api.ListHttpRoutesResponse buildListHttpRoutesResponse() { void checkListHttpRoutesResponse(api.ListHttpRoutesResponse o) { buildCounterListHttpRoutesResponse++; if (buildCounterListHttpRoutesResponse < 3) { - checkUnnamed51(o.httpRoutes!); + checkUnnamed58(o.httpRoutes!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -2492,23 +2819,23 @@ void checkListHttpRoutesResponse(api.ListHttpRoutesResponse o) { buildCounterListHttpRoutesResponse--; } -core.List buildUnnamed52() => [ +core.List buildUnnamed59() => [ buildLbRouteExtension(), buildLbRouteExtension(), ]; -void checkUnnamed52(core.List o) { +void checkUnnamed59(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLbRouteExtension(o[0]); checkLbRouteExtension(o[1]); } -core.List buildUnnamed53() => [ +core.List buildUnnamed60() => [ 'foo', 'foo', ]; -void checkUnnamed53(core.List o) { +void checkUnnamed60(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2525,9 +2852,9 @@ api.ListLbRouteExtensionsResponse buildListLbRouteExtensionsResponse() { final o = api.ListLbRouteExtensionsResponse(); buildCounterListLbRouteExtensionsResponse++; if (buildCounterListLbRouteExtensionsResponse < 3) { - o.lbRouteExtensions = buildUnnamed52(); + o.lbRouteExtensions = buildUnnamed59(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed53(); + o.unreachable = buildUnnamed60(); } buildCounterListLbRouteExtensionsResponse--; return o; @@ -2536,33 +2863,33 @@ api.ListLbRouteExtensionsResponse buildListLbRouteExtensionsResponse() { void checkListLbRouteExtensionsResponse(api.ListLbRouteExtensionsResponse o) { buildCounterListLbRouteExtensionsResponse++; if (buildCounterListLbRouteExtensionsResponse < 3) { - checkUnnamed52(o.lbRouteExtensions!); + checkUnnamed59(o.lbRouteExtensions!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed53(o.unreachable!); + checkUnnamed60(o.unreachable!); } buildCounterListLbRouteExtensionsResponse--; } -core.List buildUnnamed54() => [ +core.List buildUnnamed61() => [ buildLbTrafficExtension(), buildLbTrafficExtension(), ]; -void checkUnnamed54(core.List o) { +void checkUnnamed61(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLbTrafficExtension(o[0]); checkLbTrafficExtension(o[1]); } -core.List buildUnnamed55() => [ +core.List buildUnnamed62() => [ 'foo', 'foo', ]; -void checkUnnamed55(core.List o) { +void checkUnnamed62(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2579,9 +2906,9 @@ api.ListLbTrafficExtensionsResponse buildListLbTrafficExtensionsResponse() { final o = api.ListLbTrafficExtensionsResponse(); buildCounterListLbTrafficExtensionsResponse++; if (buildCounterListLbTrafficExtensionsResponse < 3) { - o.lbTrafficExtensions = buildUnnamed54(); + o.lbTrafficExtensions = buildUnnamed61(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed55(); + o.unreachable = buildUnnamed62(); } buildCounterListLbTrafficExtensionsResponse--; return o; @@ -2591,22 +2918,22 @@ void checkListLbTrafficExtensionsResponse( api.ListLbTrafficExtensionsResponse o) { buildCounterListLbTrafficExtensionsResponse++; if (buildCounterListLbTrafficExtensionsResponse < 3) { - checkUnnamed54(o.lbTrafficExtensions!); + checkUnnamed61(o.lbTrafficExtensions!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed55(o.unreachable!); + checkUnnamed62(o.unreachable!); } buildCounterListLbTrafficExtensionsResponse--; } -core.List buildUnnamed56() => [ +core.List buildUnnamed63() => [ buildLocation(), buildLocation(), ]; -void checkUnnamed56(core.List o) { +void checkUnnamed63(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLocation(o[0]); checkLocation(o[1]); @@ -2617,7 +2944,7 @@ api.ListLocationsResponse buildListLocationsResponse() { final o = api.ListLocationsResponse(); buildCounterListLocationsResponse++; if (buildCounterListLocationsResponse < 3) { - o.locations = buildUnnamed56(); + o.locations = buildUnnamed63(); o.nextPageToken = 'foo'; } buildCounterListLocationsResponse--; @@ -2627,7 +2954,7 @@ api.ListLocationsResponse buildListLocationsResponse() { void checkListLocationsResponse(api.ListLocationsResponse o) { buildCounterListLocationsResponse++; if (buildCounterListLocationsResponse < 3) { - checkUnnamed56(o.locations!); + checkUnnamed63(o.locations!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -2636,12 +2963,47 @@ void checkListLocationsResponse(api.ListLocationsResponse o) { buildCounterListLocationsResponse--; } -core.List buildUnnamed57() => [ +core.List buildUnnamed64() => [ + buildMeshRouteView(), + buildMeshRouteView(), + ]; + +void checkUnnamed64(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkMeshRouteView(o[0]); + checkMeshRouteView(o[1]); +} + +core.int buildCounterListMeshRouteViewsResponse = 0; +api.ListMeshRouteViewsResponse buildListMeshRouteViewsResponse() { + final o = api.ListMeshRouteViewsResponse(); + buildCounterListMeshRouteViewsResponse++; + if (buildCounterListMeshRouteViewsResponse < 3) { + o.meshRouteViews = buildUnnamed64(); + o.nextPageToken = 'foo'; + } + buildCounterListMeshRouteViewsResponse--; + return o; +} + +void checkListMeshRouteViewsResponse(api.ListMeshRouteViewsResponse o) { + buildCounterListMeshRouteViewsResponse++; + if (buildCounterListMeshRouteViewsResponse < 3) { + checkUnnamed64(o.meshRouteViews!); + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + } + buildCounterListMeshRouteViewsResponse--; +} + +core.List buildUnnamed65() => [ buildMesh(), buildMesh(), ]; -void checkUnnamed57(core.List o) { +void checkUnnamed65(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMesh(o[0]); checkMesh(o[1]); @@ -2652,7 +3014,7 @@ api.ListMeshesResponse buildListMeshesResponse() { final o = api.ListMeshesResponse(); buildCounterListMeshesResponse++; if (buildCounterListMeshesResponse < 3) { - o.meshes = buildUnnamed57(); + o.meshes = buildUnnamed65(); o.nextPageToken = 'foo'; } buildCounterListMeshesResponse--; @@ -2662,7 +3024,7 @@ api.ListMeshesResponse buildListMeshesResponse() { void checkListMeshesResponse(api.ListMeshesResponse o) { buildCounterListMeshesResponse++; if (buildCounterListMeshesResponse < 3) { - checkUnnamed57(o.meshes!); + checkUnnamed65(o.meshes!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -2671,12 +3033,12 @@ void checkListMeshesResponse(api.ListMeshesResponse o) { buildCounterListMeshesResponse--; } -core.List buildUnnamed58() => [ +core.List buildUnnamed66() => [ buildOperation(), buildOperation(), ]; -void checkUnnamed58(core.List o) { +void checkUnnamed66(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperation(o[0]); checkOperation(o[1]); @@ -2688,7 +3050,7 @@ api.ListOperationsResponse buildListOperationsResponse() { buildCounterListOperationsResponse++; if (buildCounterListOperationsResponse < 3) { o.nextPageToken = 'foo'; - o.operations = buildUnnamed58(); + o.operations = buildUnnamed66(); } buildCounterListOperationsResponse--; return o; @@ -2701,17 +3063,17 @@ void checkListOperationsResponse(api.ListOperationsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed58(o.operations!); + checkUnnamed66(o.operations!); } buildCounterListOperationsResponse--; } -core.List buildUnnamed59() => [ +core.List buildUnnamed67() => [ buildServiceBinding(), buildServiceBinding(), ]; -void checkUnnamed59(core.List o) { +void checkUnnamed67(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkServiceBinding(o[0]); checkServiceBinding(o[1]); @@ -2723,7 +3085,7 @@ api.ListServiceBindingsResponse buildListServiceBindingsResponse() { buildCounterListServiceBindingsResponse++; if (buildCounterListServiceBindingsResponse < 3) { o.nextPageToken = 'foo'; - o.serviceBindings = buildUnnamed59(); + o.serviceBindings = buildUnnamed67(); } buildCounterListServiceBindingsResponse--; return o; @@ -2736,17 +3098,17 @@ void checkListServiceBindingsResponse(api.ListServiceBindingsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed59(o.serviceBindings!); + checkUnnamed67(o.serviceBindings!); } buildCounterListServiceBindingsResponse--; } -core.List buildUnnamed60() => [ +core.List buildUnnamed68() => [ buildServiceLbPolicy(), buildServiceLbPolicy(), ]; -void checkUnnamed60(core.List o) { +void checkUnnamed68(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkServiceLbPolicy(o[0]); checkServiceLbPolicy(o[1]); @@ -2758,7 +3120,7 @@ api.ListServiceLbPoliciesResponse buildListServiceLbPoliciesResponse() { buildCounterListServiceLbPoliciesResponse++; if (buildCounterListServiceLbPoliciesResponse < 3) { o.nextPageToken = 'foo'; - o.serviceLbPolicies = buildUnnamed60(); + o.serviceLbPolicies = buildUnnamed68(); } buildCounterListServiceLbPoliciesResponse--; return o; @@ -2771,17 +3133,17 @@ void checkListServiceLbPoliciesResponse(api.ListServiceLbPoliciesResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed60(o.serviceLbPolicies!); + checkUnnamed68(o.serviceLbPolicies!); } buildCounterListServiceLbPoliciesResponse--; } -core.List buildUnnamed61() => [ +core.List buildUnnamed69() => [ buildTcpRoute(), buildTcpRoute(), ]; -void checkUnnamed61(core.List o) { +void checkUnnamed69(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTcpRoute(o[0]); checkTcpRoute(o[1]); @@ -2793,7 +3155,7 @@ api.ListTcpRoutesResponse buildListTcpRoutesResponse() { buildCounterListTcpRoutesResponse++; if (buildCounterListTcpRoutesResponse < 3) { o.nextPageToken = 'foo'; - o.tcpRoutes = buildUnnamed61(); + o.tcpRoutes = buildUnnamed69(); } buildCounterListTcpRoutesResponse--; return o; @@ -2806,17 +3168,17 @@ void checkListTcpRoutesResponse(api.ListTcpRoutesResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed61(o.tcpRoutes!); + checkUnnamed69(o.tcpRoutes!); } buildCounterListTcpRoutesResponse--; } -core.List buildUnnamed62() => [ +core.List buildUnnamed70() => [ buildTlsRoute(), buildTlsRoute(), ]; -void checkUnnamed62(core.List o) { +void checkUnnamed70(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTlsRoute(o[0]); checkTlsRoute(o[1]); @@ -2828,7 +3190,7 @@ api.ListTlsRoutesResponse buildListTlsRoutesResponse() { buildCounterListTlsRoutesResponse++; if (buildCounterListTlsRoutesResponse < 3) { o.nextPageToken = 'foo'; - o.tlsRoutes = buildUnnamed62(); + o.tlsRoutes = buildUnnamed70(); } buildCounterListTlsRoutesResponse--; return o; @@ -2841,17 +3203,87 @@ void checkListTlsRoutesResponse(api.ListTlsRoutesResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed62(o.tlsRoutes!); + checkUnnamed70(o.tlsRoutes!); } buildCounterListTlsRoutesResponse--; } -core.Map buildUnnamed63() => { +core.List buildUnnamed71() => [ + buildWasmPluginVersion(), + buildWasmPluginVersion(), + ]; + +void checkUnnamed71(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkWasmPluginVersion(o[0]); + checkWasmPluginVersion(o[1]); +} + +core.int buildCounterListWasmPluginVersionsResponse = 0; +api.ListWasmPluginVersionsResponse buildListWasmPluginVersionsResponse() { + final o = api.ListWasmPluginVersionsResponse(); + buildCounterListWasmPluginVersionsResponse++; + if (buildCounterListWasmPluginVersionsResponse < 3) { + o.nextPageToken = 'foo'; + o.wasmPluginVersions = buildUnnamed71(); + } + buildCounterListWasmPluginVersionsResponse--; + return o; +} + +void checkListWasmPluginVersionsResponse(api.ListWasmPluginVersionsResponse o) { + buildCounterListWasmPluginVersionsResponse++; + if (buildCounterListWasmPluginVersionsResponse < 3) { + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed71(o.wasmPluginVersions!); + } + buildCounterListWasmPluginVersionsResponse--; +} + +core.List buildUnnamed72() => [ + buildWasmPlugin(), + buildWasmPlugin(), + ]; + +void checkUnnamed72(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkWasmPlugin(o[0]); + checkWasmPlugin(o[1]); +} + +core.int buildCounterListWasmPluginsResponse = 0; +api.ListWasmPluginsResponse buildListWasmPluginsResponse() { + final o = api.ListWasmPluginsResponse(); + buildCounterListWasmPluginsResponse++; + if (buildCounterListWasmPluginsResponse < 3) { + o.nextPageToken = 'foo'; + o.wasmPlugins = buildUnnamed72(); + } + buildCounterListWasmPluginsResponse--; + return o; +} + +void checkListWasmPluginsResponse(api.ListWasmPluginsResponse o) { + buildCounterListWasmPluginsResponse++; + if (buildCounterListWasmPluginsResponse < 3) { + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed72(o.wasmPlugins!); + } + buildCounterListWasmPluginsResponse--; +} + +core.Map buildUnnamed73() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed63(core.Map o) { +void checkUnnamed73(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2863,7 +3295,7 @@ void checkUnnamed63(core.Map o) { ); } -core.Map buildUnnamed64() => { +core.Map buildUnnamed74() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -2876,34 +3308,34 @@ core.Map buildUnnamed64() => { }, }; -void checkUnnamed64(core.Map o) { +void checkUnnamed74(core.Map o) { unittest.expect(o, unittest.hasLength(2)); - var casted5 = (o['x']!) as core.Map; - unittest.expect(casted5, unittest.hasLength(3)); + var casted9 = (o['x']!) as core.Map; + unittest.expect(casted9, unittest.hasLength(3)); unittest.expect( - casted5['list'], + casted9['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted5['bool'], + casted9['bool'], unittest.equals(true), ); unittest.expect( - casted5['string'], + casted9['string'], unittest.equals('foo'), ); - var casted6 = (o['y']!) as core.Map; - unittest.expect(casted6, unittest.hasLength(3)); + var casted10 = (o['y']!) as core.Map; + unittest.expect(casted10, unittest.hasLength(3)); unittest.expect( - casted6['list'], + casted10['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted6['bool'], + casted10['bool'], unittest.equals(true), ); unittest.expect( - casted6['string'], + casted10['string'], unittest.equals('foo'), ); } @@ -2914,9 +3346,9 @@ api.Location buildLocation() { buildCounterLocation++; if (buildCounterLocation < 3) { o.displayName = 'foo'; - o.labels = buildUnnamed63(); + o.labels = buildUnnamed73(); o.locationId = 'foo'; - o.metadata = buildUnnamed64(); + o.metadata = buildUnnamed74(); o.name = 'foo'; } buildCounterLocation--; @@ -2930,12 +3362,12 @@ void checkLocation(api.Location o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed63(o.labels!); + checkUnnamed73(o.labels!); unittest.expect( o.locationId!, unittest.equals('foo'), ); - checkUnnamed64(o.metadata!); + checkUnnamed74(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), @@ -2944,12 +3376,12 @@ void checkLocation(api.Location o) { buildCounterLocation--; } -core.Map buildUnnamed65() => { +core.Map buildUnnamed75() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed65(core.Map o) { +void checkUnnamed75(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2970,7 +3402,7 @@ api.Mesh buildMesh() { o.description = 'foo'; o.envoyHeaders = 'foo'; o.interceptionPort = 42; - o.labels = buildUnnamed65(); + o.labels = buildUnnamed75(); o.name = 'foo'; o.selfLink = 'foo'; o.updateTime = 'foo'; @@ -2998,7 +3430,7 @@ void checkMesh(api.Mesh o) { o.interceptionPort!, unittest.equals(42), ); - checkUnnamed65(o.labels!); + checkUnnamed75(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -3015,7 +3447,49 @@ void checkMesh(api.Mesh o) { buildCounterMesh--; } -core.Map buildUnnamed66() => { +core.int buildCounterMeshRouteView = 0; +api.MeshRouteView buildMeshRouteView() { + final o = api.MeshRouteView(); + buildCounterMeshRouteView++; + if (buildCounterMeshRouteView < 3) { + o.name = 'foo'; + o.routeId = 'foo'; + o.routeLocation = 'foo'; + o.routeProjectNumber = 'foo'; + o.routeType = 'foo'; + } + buildCounterMeshRouteView--; + return o; +} + +void checkMeshRouteView(api.MeshRouteView o) { + buildCounterMeshRouteView++; + if (buildCounterMeshRouteView < 3) { + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.routeId!, + unittest.equals('foo'), + ); + unittest.expect( + o.routeLocation!, + unittest.equals('foo'), + ); + unittest.expect( + o.routeProjectNumber!, + unittest.equals('foo'), + ); + unittest.expect( + o.routeType!, + unittest.equals('foo'), + ); + } + buildCounterMeshRouteView--; +} + +core.Map buildUnnamed76() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3028,39 +3502,39 @@ core.Map buildUnnamed66() => { }, }; -void checkUnnamed66(core.Map o) { +void checkUnnamed76(core.Map o) { unittest.expect(o, unittest.hasLength(2)); - var casted7 = (o['x']!) as core.Map; - unittest.expect(casted7, unittest.hasLength(3)); + var casted11 = (o['x']!) as core.Map; + unittest.expect(casted11, unittest.hasLength(3)); unittest.expect( - casted7['list'], + casted11['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted7['bool'], + casted11['bool'], unittest.equals(true), ); unittest.expect( - casted7['string'], + casted11['string'], unittest.equals('foo'), ); - var casted8 = (o['y']!) as core.Map; - unittest.expect(casted8, unittest.hasLength(3)); + var casted12 = (o['y']!) as core.Map; + unittest.expect(casted12, unittest.hasLength(3)); unittest.expect( - casted8['list'], + casted12['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted8['bool'], + casted12['bool'], unittest.equals(true), ); unittest.expect( - casted8['string'], + casted12['string'], unittest.equals('foo'), ); } -core.Map buildUnnamed67() => { +core.Map buildUnnamed77() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3073,34 +3547,34 @@ core.Map buildUnnamed67() => { }, }; -void checkUnnamed67(core.Map o) { +void checkUnnamed77(core.Map o) { unittest.expect(o, unittest.hasLength(2)); - var casted9 = (o['x']!) as core.Map; - unittest.expect(casted9, unittest.hasLength(3)); + var casted13 = (o['x']!) as core.Map; + unittest.expect(casted13, unittest.hasLength(3)); unittest.expect( - casted9['list'], + casted13['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted9['bool'], + casted13['bool'], unittest.equals(true), ); unittest.expect( - casted9['string'], + casted13['string'], unittest.equals('foo'), ); - var casted10 = (o['y']!) as core.Map; - unittest.expect(casted10, unittest.hasLength(3)); + var casted14 = (o['y']!) as core.Map; + unittest.expect(casted14, unittest.hasLength(3)); unittest.expect( - casted10['list'], + casted14['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted10['bool'], + casted14['bool'], unittest.equals(true), ); unittest.expect( - casted10['string'], + casted14['string'], unittest.equals('foo'), ); } @@ -3112,9 +3586,9 @@ api.Operation buildOperation() { if (buildCounterOperation < 3) { o.done = true; o.error = buildStatus(); - o.metadata = buildUnnamed66(); + o.metadata = buildUnnamed76(); o.name = 'foo'; - o.response = buildUnnamed67(); + o.response = buildUnnamed77(); } buildCounterOperation--; return o; @@ -3125,33 +3599,33 @@ void checkOperation(api.Operation o) { if (buildCounterOperation < 3) { unittest.expect(o.done!, unittest.isTrue); checkStatus(o.error!); - checkUnnamed66(o.metadata!); + checkUnnamed76(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed67(o.response!); + checkUnnamed77(o.response!); } buildCounterOperation--; } -core.List buildUnnamed68() => [ +core.List buildUnnamed78() => [ buildAuditConfig(), buildAuditConfig(), ]; -void checkUnnamed68(core.List o) { +void checkUnnamed78(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAuditConfig(o[0]); checkAuditConfig(o[1]); } -core.List buildUnnamed69() => [ +core.List buildUnnamed79() => [ buildBinding(), buildBinding(), ]; -void checkUnnamed69(core.List o) { +void checkUnnamed79(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkBinding(o[0]); checkBinding(o[1]); @@ -3162,8 +3636,8 @@ api.Policy buildPolicy() { final o = api.Policy(); buildCounterPolicy++; if (buildCounterPolicy < 3) { - o.auditConfigs = buildUnnamed68(); - o.bindings = buildUnnamed69(); + o.auditConfigs = buildUnnamed78(); + o.bindings = buildUnnamed79(); o.etag = 'foo'; o.version = 42; } @@ -3174,8 +3648,8 @@ api.Policy buildPolicy() { void checkPolicy(api.Policy o) { buildCounterPolicy++; if (buildCounterPolicy < 3) { - checkUnnamed68(o.auditConfigs!); - checkUnnamed69(o.bindings!); + checkUnnamed78(o.auditConfigs!); + checkUnnamed79(o.bindings!); unittest.expect( o.etag!, unittest.equals('foo'), @@ -3188,12 +3662,12 @@ void checkPolicy(api.Policy o) { buildCounterPolicy--; } -core.Map buildUnnamed70() => { +core.Map buildUnnamed80() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed70(core.Map o) { +void checkUnnamed80(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3212,7 +3686,7 @@ api.ServiceBinding buildServiceBinding() { if (buildCounterServiceBinding < 3) { o.createTime = 'foo'; o.description = 'foo'; - o.labels = buildUnnamed70(); + o.labels = buildUnnamed80(); o.name = 'foo'; o.service = 'foo'; o.serviceId = 'foo'; @@ -3233,7 +3707,7 @@ void checkServiceBinding(api.ServiceBinding o) { o.description!, unittest.equals('foo'), ); - checkUnnamed70(o.labels!); + checkUnnamed80(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -3254,12 +3728,12 @@ void checkServiceBinding(api.ServiceBinding o) { buildCounterServiceBinding--; } -core.Map buildUnnamed71() => { +core.Map buildUnnamed81() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed71(core.Map o) { +void checkUnnamed81(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3280,7 +3754,7 @@ api.ServiceLbPolicy buildServiceLbPolicy() { o.createTime = 'foo'; o.description = 'foo'; o.failoverConfig = buildServiceLbPolicyFailoverConfig(); - o.labels = buildUnnamed71(); + o.labels = buildUnnamed81(); o.loadBalancingAlgorithm = 'foo'; o.name = 'foo'; o.updateTime = 'foo'; @@ -3302,7 +3776,7 @@ void checkServiceLbPolicy(api.ServiceLbPolicy o) { unittest.equals('foo'), ); checkServiceLbPolicyFailoverConfig(o.failoverConfig!); - checkUnnamed71(o.labels!); + checkUnnamed81(o.labels!); unittest.expect( o.loadBalancingAlgorithm!, unittest.equals('foo'), @@ -3385,7 +3859,7 @@ void checkSetIamPolicyRequest(api.SetIamPolicyRequest o) { buildCounterSetIamPolicyRequest--; } -core.Map buildUnnamed72() => { +core.Map buildUnnamed82() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3398,47 +3872,47 @@ core.Map buildUnnamed72() => { }, }; -void checkUnnamed72(core.Map o) { +void checkUnnamed82(core.Map o) { unittest.expect(o, unittest.hasLength(2)); - var casted11 = (o['x']!) as core.Map; - unittest.expect(casted11, unittest.hasLength(3)); + var casted15 = (o['x']!) as core.Map; + unittest.expect(casted15, unittest.hasLength(3)); unittest.expect( - casted11['list'], + casted15['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted11['bool'], + casted15['bool'], unittest.equals(true), ); unittest.expect( - casted11['string'], + casted15['string'], unittest.equals('foo'), ); - var casted12 = (o['y']!) as core.Map; - unittest.expect(casted12, unittest.hasLength(3)); + var casted16 = (o['y']!) as core.Map; + unittest.expect(casted16, unittest.hasLength(3)); unittest.expect( - casted12['list'], + casted16['list'], unittest.equals([1, 2, 3]), ); unittest.expect( - casted12['bool'], + casted16['bool'], unittest.equals(true), ); unittest.expect( - casted12['string'], + casted16['string'], unittest.equals('foo'), ); } -core.List> buildUnnamed73() => [ - buildUnnamed72(), - buildUnnamed72(), +core.List> buildUnnamed83() => [ + buildUnnamed82(), + buildUnnamed82(), ]; -void checkUnnamed73(core.List> o) { +void checkUnnamed83(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed72(o[0]); - checkUnnamed72(o[1]); + checkUnnamed82(o[0]); + checkUnnamed82(o[1]); } core.int buildCounterStatus = 0; @@ -3447,7 +3921,7 @@ api.Status buildStatus() { buildCounterStatus++; if (buildCounterStatus < 3) { o.code = 42; - o.details = buildUnnamed73(); + o.details = buildUnnamed83(); o.message = 'foo'; } buildCounterStatus--; @@ -3461,7 +3935,7 @@ void checkStatus(api.Status o) { o.code!, unittest.equals(42), ); - checkUnnamed73(o.details!); + checkUnnamed83(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -3470,12 +3944,12 @@ void checkStatus(api.Status o) { buildCounterStatus--; } -core.List buildUnnamed74() => [ +core.List buildUnnamed84() => [ 'foo', 'foo', ]; -void checkUnnamed74(core.List o) { +void checkUnnamed84(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3487,12 +3961,12 @@ void checkUnnamed74(core.List o) { ); } -core.Map buildUnnamed75() => { +core.Map buildUnnamed85() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed75(core.Map o) { +void checkUnnamed85(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3504,12 +3978,12 @@ void checkUnnamed75(core.Map o) { ); } -core.List buildUnnamed76() => [ +core.List buildUnnamed86() => [ 'foo', 'foo', ]; -void checkUnnamed76(core.List o) { +void checkUnnamed86(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3521,12 +3995,12 @@ void checkUnnamed76(core.List o) { ); } -core.List buildUnnamed77() => [ +core.List buildUnnamed87() => [ buildTcpRouteRouteRule(), buildTcpRouteRouteRule(), ]; -void checkUnnamed77(core.List o) { +void checkUnnamed87(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTcpRouteRouteRule(o[0]); checkTcpRouteRouteRule(o[1]); @@ -3539,11 +4013,11 @@ api.TcpRoute buildTcpRoute() { if (buildCounterTcpRoute < 3) { o.createTime = 'foo'; o.description = 'foo'; - o.gateways = buildUnnamed74(); - o.labels = buildUnnamed75(); - o.meshes = buildUnnamed76(); + o.gateways = buildUnnamed84(); + o.labels = buildUnnamed85(); + o.meshes = buildUnnamed86(); o.name = 'foo'; - o.rules = buildUnnamed77(); + o.rules = buildUnnamed87(); o.selfLink = 'foo'; o.updateTime = 'foo'; } @@ -3562,14 +4036,14 @@ void checkTcpRoute(api.TcpRoute o) { o.description!, unittest.equals('foo'), ); - checkUnnamed74(o.gateways!); - checkUnnamed75(o.labels!); - checkUnnamed76(o.meshes!); + checkUnnamed84(o.gateways!); + checkUnnamed85(o.labels!); + checkUnnamed86(o.meshes!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed77(o.rules!); + checkUnnamed87(o.rules!); unittest.expect( o.selfLink!, unittest.equals('foo'), @@ -3582,12 +4056,12 @@ void checkTcpRoute(api.TcpRoute o) { buildCounterTcpRoute--; } -core.List buildUnnamed78() => [ +core.List buildUnnamed88() => [ buildTcpRouteRouteDestination(), buildTcpRouteRouteDestination(), ]; -void checkUnnamed78(core.List o) { +void checkUnnamed88(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTcpRouteRouteDestination(o[0]); checkTcpRouteRouteDestination(o[1]); @@ -3598,7 +4072,7 @@ api.TcpRouteRouteAction buildTcpRouteRouteAction() { final o = api.TcpRouteRouteAction(); buildCounterTcpRouteRouteAction++; if (buildCounterTcpRouteRouteAction < 3) { - o.destinations = buildUnnamed78(); + o.destinations = buildUnnamed88(); o.idleTimeout = 'foo'; o.originalDestination = true; } @@ -3609,7 +4083,7 @@ api.TcpRouteRouteAction buildTcpRouteRouteAction() { void checkTcpRouteRouteAction(api.TcpRouteRouteAction o) { buildCounterTcpRouteRouteAction++; if (buildCounterTcpRouteRouteAction < 3) { - checkUnnamed78(o.destinations!); + checkUnnamed88(o.destinations!); unittest.expect( o.idleTimeout!, unittest.equals('foo'), @@ -3673,12 +4147,12 @@ void checkTcpRouteRouteMatch(api.TcpRouteRouteMatch o) { buildCounterTcpRouteRouteMatch--; } -core.List buildUnnamed79() => [ +core.List buildUnnamed89() => [ buildTcpRouteRouteMatch(), buildTcpRouteRouteMatch(), ]; -void checkUnnamed79(core.List o) { +void checkUnnamed89(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTcpRouteRouteMatch(o[0]); checkTcpRouteRouteMatch(o[1]); @@ -3690,7 +4164,7 @@ api.TcpRouteRouteRule buildTcpRouteRouteRule() { buildCounterTcpRouteRouteRule++; if (buildCounterTcpRouteRouteRule < 3) { o.action = buildTcpRouteRouteAction(); - o.matches = buildUnnamed79(); + o.matches = buildUnnamed89(); } buildCounterTcpRouteRouteRule--; return o; @@ -3700,17 +4174,17 @@ void checkTcpRouteRouteRule(api.TcpRouteRouteRule o) { buildCounterTcpRouteRouteRule++; if (buildCounterTcpRouteRouteRule < 3) { checkTcpRouteRouteAction(o.action!); - checkUnnamed79(o.matches!); + checkUnnamed89(o.matches!); } buildCounterTcpRouteRouteRule--; } -core.List buildUnnamed80() => [ +core.List buildUnnamed90() => [ 'foo', 'foo', ]; -void checkUnnamed80(core.List o) { +void checkUnnamed90(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3727,7 +4201,7 @@ api.TestIamPermissionsRequest buildTestIamPermissionsRequest() { final o = api.TestIamPermissionsRequest(); buildCounterTestIamPermissionsRequest++; if (buildCounterTestIamPermissionsRequest < 3) { - o.permissions = buildUnnamed80(); + o.permissions = buildUnnamed90(); } buildCounterTestIamPermissionsRequest--; return o; @@ -3736,17 +4210,17 @@ api.TestIamPermissionsRequest buildTestIamPermissionsRequest() { void checkTestIamPermissionsRequest(api.TestIamPermissionsRequest o) { buildCounterTestIamPermissionsRequest++; if (buildCounterTestIamPermissionsRequest < 3) { - checkUnnamed80(o.permissions!); + checkUnnamed90(o.permissions!); } buildCounterTestIamPermissionsRequest--; } -core.List buildUnnamed81() => [ +core.List buildUnnamed91() => [ 'foo', 'foo', ]; -void checkUnnamed81(core.List o) { +void checkUnnamed91(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3763,7 +4237,7 @@ api.TestIamPermissionsResponse buildTestIamPermissionsResponse() { final o = api.TestIamPermissionsResponse(); buildCounterTestIamPermissionsResponse++; if (buildCounterTestIamPermissionsResponse < 3) { - o.permissions = buildUnnamed81(); + o.permissions = buildUnnamed91(); } buildCounterTestIamPermissionsResponse--; return o; @@ -3772,17 +4246,17 @@ api.TestIamPermissionsResponse buildTestIamPermissionsResponse() { void checkTestIamPermissionsResponse(api.TestIamPermissionsResponse o) { buildCounterTestIamPermissionsResponse++; if (buildCounterTestIamPermissionsResponse < 3) { - checkUnnamed81(o.permissions!); + checkUnnamed91(o.permissions!); } buildCounterTestIamPermissionsResponse--; } -core.List buildUnnamed82() => [ +core.List buildUnnamed92() => [ 'foo', 'foo', ]; -void checkUnnamed82(core.List o) { +void checkUnnamed92(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3794,12 +4268,12 @@ void checkUnnamed82(core.List o) { ); } -core.Map buildUnnamed83() => { +core.Map buildUnnamed93() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed83(core.Map o) { +void checkUnnamed93(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3811,12 +4285,12 @@ void checkUnnamed83(core.Map o) { ); } -core.List buildUnnamed84() => [ +core.List buildUnnamed94() => [ 'foo', 'foo', ]; -void checkUnnamed84(core.List o) { +void checkUnnamed94(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3828,12 +4302,12 @@ void checkUnnamed84(core.List o) { ); } -core.List buildUnnamed85() => [ +core.List buildUnnamed95() => [ buildTlsRouteRouteRule(), buildTlsRouteRouteRule(), ]; -void checkUnnamed85(core.List o) { +void checkUnnamed95(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTlsRouteRouteRule(o[0]); checkTlsRouteRouteRule(o[1]); @@ -3846,11 +4320,11 @@ api.TlsRoute buildTlsRoute() { if (buildCounterTlsRoute < 3) { o.createTime = 'foo'; o.description = 'foo'; - o.gateways = buildUnnamed82(); - o.labels = buildUnnamed83(); - o.meshes = buildUnnamed84(); + o.gateways = buildUnnamed92(); + o.labels = buildUnnamed93(); + o.meshes = buildUnnamed94(); o.name = 'foo'; - o.rules = buildUnnamed85(); + o.rules = buildUnnamed95(); o.selfLink = 'foo'; o.updateTime = 'foo'; } @@ -3869,14 +4343,14 @@ void checkTlsRoute(api.TlsRoute o) { o.description!, unittest.equals('foo'), ); - checkUnnamed82(o.gateways!); - checkUnnamed83(o.labels!); - checkUnnamed84(o.meshes!); + checkUnnamed92(o.gateways!); + checkUnnamed93(o.labels!); + checkUnnamed94(o.meshes!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed85(o.rules!); + checkUnnamed95(o.rules!); unittest.expect( o.selfLink!, unittest.equals('foo'), @@ -3889,12 +4363,12 @@ void checkTlsRoute(api.TlsRoute o) { buildCounterTlsRoute--; } -core.List buildUnnamed86() => [ +core.List buildUnnamed96() => [ buildTlsRouteRouteDestination(), buildTlsRouteRouteDestination(), ]; -void checkUnnamed86(core.List o) { +void checkUnnamed96(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTlsRouteRouteDestination(o[0]); checkTlsRouteRouteDestination(o[1]); @@ -3905,7 +4379,7 @@ api.TlsRouteRouteAction buildTlsRouteRouteAction() { final o = api.TlsRouteRouteAction(); buildCounterTlsRouteRouteAction++; if (buildCounterTlsRouteRouteAction < 3) { - o.destinations = buildUnnamed86(); + o.destinations = buildUnnamed96(); o.idleTimeout = 'foo'; } buildCounterTlsRouteRouteAction--; @@ -3915,7 +4389,7 @@ api.TlsRouteRouteAction buildTlsRouteRouteAction() { void checkTlsRouteRouteAction(api.TlsRouteRouteAction o) { buildCounterTlsRouteRouteAction++; if (buildCounterTlsRouteRouteAction < 3) { - checkUnnamed86(o.destinations!); + checkUnnamed96(o.destinations!); unittest.expect( o.idleTimeout!, unittest.equals('foo'), @@ -3951,12 +4425,12 @@ void checkTlsRouteRouteDestination(api.TlsRouteRouteDestination o) { buildCounterTlsRouteRouteDestination--; } -core.List buildUnnamed87() => [ +core.List buildUnnamed97() => [ 'foo', 'foo', ]; -void checkUnnamed87(core.List o) { +void checkUnnamed97(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3968,12 +4442,12 @@ void checkUnnamed87(core.List o) { ); } -core.List buildUnnamed88() => [ +core.List buildUnnamed98() => [ 'foo', 'foo', ]; -void checkUnnamed88(core.List o) { +void checkUnnamed98(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3990,8 +4464,8 @@ api.TlsRouteRouteMatch buildTlsRouteRouteMatch() { final o = api.TlsRouteRouteMatch(); buildCounterTlsRouteRouteMatch++; if (buildCounterTlsRouteRouteMatch < 3) { - o.alpn = buildUnnamed87(); - o.sniHost = buildUnnamed88(); + o.alpn = buildUnnamed97(); + o.sniHost = buildUnnamed98(); } buildCounterTlsRouteRouteMatch--; return o; @@ -4000,18 +4474,18 @@ api.TlsRouteRouteMatch buildTlsRouteRouteMatch() { void checkTlsRouteRouteMatch(api.TlsRouteRouteMatch o) { buildCounterTlsRouteRouteMatch++; if (buildCounterTlsRouteRouteMatch < 3) { - checkUnnamed87(o.alpn!); - checkUnnamed88(o.sniHost!); + checkUnnamed97(o.alpn!); + checkUnnamed98(o.sniHost!); } buildCounterTlsRouteRouteMatch--; } -core.List buildUnnamed89() => [ +core.List buildUnnamed99() => [ buildTlsRouteRouteMatch(), buildTlsRouteRouteMatch(), ]; -void checkUnnamed89(core.List o) { +void checkUnnamed99(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTlsRouteRouteMatch(o[0]); checkTlsRouteRouteMatch(o[1]); @@ -4023,7 +4497,7 @@ api.TlsRouteRouteRule buildTlsRouteRouteRule() { buildCounterTlsRouteRouteRule++; if (buildCounterTlsRouteRouteRule < 3) { o.action = buildTlsRouteRouteAction(); - o.matches = buildUnnamed89(); + o.matches = buildUnnamed99(); } buildCounterTlsRouteRouteRule--; return o; @@ -4033,17 +4507,17 @@ void checkTlsRouteRouteRule(api.TlsRouteRouteRule o) { buildCounterTlsRouteRouteRule++; if (buildCounterTlsRouteRouteRule < 3) { checkTlsRouteRouteAction(o.action!); - checkUnnamed89(o.matches!); + checkUnnamed99(o.matches!); } buildCounterTlsRouteRouteRule--; } -core.List buildUnnamed90() => [ +core.List buildUnnamed100() => [ 'foo', 'foo', ]; -void checkUnnamed90(core.List o) { +void checkUnnamed100(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4060,7 +4534,7 @@ api.TrafficPortSelector buildTrafficPortSelector() { final o = api.TrafficPortSelector(); buildCounterTrafficPortSelector++; if (buildCounterTrafficPortSelector < 3) { - o.ports = buildUnnamed90(); + o.ports = buildUnnamed100(); } buildCounterTrafficPortSelector--; return o; @@ -4069,11 +4543,308 @@ api.TrafficPortSelector buildTrafficPortSelector() { void checkTrafficPortSelector(api.TrafficPortSelector o) { buildCounterTrafficPortSelector++; if (buildCounterTrafficPortSelector < 3) { - checkUnnamed90(o.ports!); + checkUnnamed100(o.ports!); } buildCounterTrafficPortSelector--; } +core.Map buildUnnamed101() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed101(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.List buildUnnamed102() => [ + buildWasmPluginUsedBy(), + buildWasmPluginUsedBy(), + ]; + +void checkUnnamed102(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkWasmPluginUsedBy(o[0]); + checkWasmPluginUsedBy(o[1]); +} + +core.Map buildUnnamed103() => { + 'x': buildWasmPluginVersionDetails(), + 'y': buildWasmPluginVersionDetails(), + }; + +void checkUnnamed103(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + checkWasmPluginVersionDetails(o['x']!); + checkWasmPluginVersionDetails(o['y']!); +} + +core.int buildCounterWasmPlugin = 0; +api.WasmPlugin buildWasmPlugin() { + final o = api.WasmPlugin(); + buildCounterWasmPlugin++; + if (buildCounterWasmPlugin < 3) { + o.createTime = 'foo'; + o.description = 'foo'; + o.labels = buildUnnamed101(); + o.logConfig = buildWasmPluginLogConfig(); + o.mainVersionId = 'foo'; + o.name = 'foo'; + o.updateTime = 'foo'; + o.usedBy = buildUnnamed102(); + o.versions = buildUnnamed103(); + } + buildCounterWasmPlugin--; + return o; +} + +void checkWasmPlugin(api.WasmPlugin o) { + buildCounterWasmPlugin++; + if (buildCounterWasmPlugin < 3) { + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + checkUnnamed101(o.labels!); + checkWasmPluginLogConfig(o.logConfig!); + unittest.expect( + o.mainVersionId!, + unittest.equals('foo'), + ); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), + ); + checkUnnamed102(o.usedBy!); + checkUnnamed103(o.versions!); + } + buildCounterWasmPlugin--; +} + +core.int buildCounterWasmPluginLogConfig = 0; +api.WasmPluginLogConfig buildWasmPluginLogConfig() { + final o = api.WasmPluginLogConfig(); + buildCounterWasmPluginLogConfig++; + if (buildCounterWasmPluginLogConfig < 3) { + o.enable = true; + o.minLogLevel = 'foo'; + o.sampleRate = 42.0; + } + buildCounterWasmPluginLogConfig--; + return o; +} + +void checkWasmPluginLogConfig(api.WasmPluginLogConfig o) { + buildCounterWasmPluginLogConfig++; + if (buildCounterWasmPluginLogConfig < 3) { + unittest.expect(o.enable!, unittest.isTrue); + unittest.expect( + o.minLogLevel!, + unittest.equals('foo'), + ); + unittest.expect( + o.sampleRate!, + unittest.equals(42.0), + ); + } + buildCounterWasmPluginLogConfig--; +} + +core.int buildCounterWasmPluginUsedBy = 0; +api.WasmPluginUsedBy buildWasmPluginUsedBy() { + final o = api.WasmPluginUsedBy(); + buildCounterWasmPluginUsedBy++; + if (buildCounterWasmPluginUsedBy < 3) { + o.name = 'foo'; + } + buildCounterWasmPluginUsedBy--; + return o; +} + +void checkWasmPluginUsedBy(api.WasmPluginUsedBy o) { + buildCounterWasmPluginUsedBy++; + if (buildCounterWasmPluginUsedBy < 3) { + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + } + buildCounterWasmPluginUsedBy--; +} + +core.Map buildUnnamed104() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed104(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.int buildCounterWasmPluginVersion = 0; +api.WasmPluginVersion buildWasmPluginVersion() { + final o = api.WasmPluginVersion(); + buildCounterWasmPluginVersion++; + if (buildCounterWasmPluginVersion < 3) { + o.createTime = 'foo'; + o.description = 'foo'; + o.imageDigest = 'foo'; + o.imageUri = 'foo'; + o.labels = buildUnnamed104(); + o.name = 'foo'; + o.pluginConfigData = 'foo'; + o.pluginConfigDigest = 'foo'; + o.pluginConfigUri = 'foo'; + o.updateTime = 'foo'; + } + buildCounterWasmPluginVersion--; + return o; +} + +void checkWasmPluginVersion(api.WasmPluginVersion o) { + buildCounterWasmPluginVersion++; + if (buildCounterWasmPluginVersion < 3) { + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + unittest.expect( + o.imageDigest!, + unittest.equals('foo'), + ); + unittest.expect( + o.imageUri!, + unittest.equals('foo'), + ); + checkUnnamed104(o.labels!); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.pluginConfigData!, + unittest.equals('foo'), + ); + unittest.expect( + o.pluginConfigDigest!, + unittest.equals('foo'), + ); + unittest.expect( + o.pluginConfigUri!, + unittest.equals('foo'), + ); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), + ); + } + buildCounterWasmPluginVersion--; +} + +core.Map buildUnnamed105() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed105(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.int buildCounterWasmPluginVersionDetails = 0; +api.WasmPluginVersionDetails buildWasmPluginVersionDetails() { + final o = api.WasmPluginVersionDetails(); + buildCounterWasmPluginVersionDetails++; + if (buildCounterWasmPluginVersionDetails < 3) { + o.createTime = 'foo'; + o.description = 'foo'; + o.imageDigest = 'foo'; + o.imageUri = 'foo'; + o.labels = buildUnnamed105(); + o.pluginConfigData = 'foo'; + o.pluginConfigDigest = 'foo'; + o.pluginConfigUri = 'foo'; + o.updateTime = 'foo'; + } + buildCounterWasmPluginVersionDetails--; + return o; +} + +void checkWasmPluginVersionDetails(api.WasmPluginVersionDetails o) { + buildCounterWasmPluginVersionDetails++; + if (buildCounterWasmPluginVersionDetails < 3) { + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + unittest.expect( + o.imageDigest!, + unittest.equals('foo'), + ); + unittest.expect( + o.imageUri!, + unittest.equals('foo'), + ); + checkUnnamed105(o.labels!); + unittest.expect( + o.pluginConfigData!, + unittest.equals('foo'), + ); + unittest.expect( + o.pluginConfigDigest!, + unittest.equals('foo'), + ); + unittest.expect( + o.pluginConfigUri!, + unittest.equals('foo'), + ); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), + ); + } + buildCounterWasmPluginVersionDetails--; +} + void main() { unittest.group('obj-schema-AuditConfig', () { unittest.test('to-json--from-json', () async { @@ -4095,6 +4866,16 @@ void main() { }); }); + unittest.group('obj-schema-AuthzExtension', () { + unittest.test('to-json--from-json', () async { + final o = buildAuthzExtension(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.AuthzExtension.fromJson( + oJson as core.Map); + checkAuthzExtension(od); + }); + }); + unittest.group('obj-schema-Binding', () { unittest.test('to-json--from-json', () async { final o = buildBinding(); @@ -4216,6 +4997,16 @@ void main() { }); }); + unittest.group('obj-schema-GatewayRouteView', () { + unittest.test('to-json--from-json', () async { + final o = buildGatewayRouteView(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GatewayRouteView.fromJson( + oJson as core.Map); + checkGatewayRouteView(od); + }); + }); + unittest.group('obj-schema-GrpcRoute', () { unittest.test('to-json--from-json', () async { final o = buildGrpcRoute(); @@ -4546,6 +5337,16 @@ void main() { }); }); + unittest.group('obj-schema-ListAuthzExtensionsResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListAuthzExtensionsResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListAuthzExtensionsResponse.fromJson( + oJson as core.Map); + checkListAuthzExtensionsResponse(od); + }); + }); + unittest.group('obj-schema-ListEndpointPoliciesResponse', () { unittest.test('to-json--from-json', () async { final o = buildListEndpointPoliciesResponse(); @@ -4556,6 +5357,16 @@ void main() { }); }); + unittest.group('obj-schema-ListGatewayRouteViewsResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListGatewayRouteViewsResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListGatewayRouteViewsResponse.fromJson( + oJson as core.Map); + checkListGatewayRouteViewsResponse(od); + }); + }); + unittest.group('obj-schema-ListGatewaysResponse', () { unittest.test('to-json--from-json', () async { final o = buildListGatewaysResponse(); @@ -4616,6 +5427,16 @@ void main() { }); }); + unittest.group('obj-schema-ListMeshRouteViewsResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListMeshRouteViewsResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListMeshRouteViewsResponse.fromJson( + oJson as core.Map); + checkListMeshRouteViewsResponse(od); + }); + }); + unittest.group('obj-schema-ListMeshesResponse', () { unittest.test('to-json--from-json', () async { final o = buildListMeshesResponse(); @@ -4676,6 +5497,26 @@ void main() { }); }); + unittest.group('obj-schema-ListWasmPluginVersionsResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListWasmPluginVersionsResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListWasmPluginVersionsResponse.fromJson( + oJson as core.Map); + checkListWasmPluginVersionsResponse(od); + }); + }); + + unittest.group('obj-schema-ListWasmPluginsResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListWasmPluginsResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListWasmPluginsResponse.fromJson( + oJson as core.Map); + checkListWasmPluginsResponse(od); + }); + }); + unittest.group('obj-schema-Location', () { unittest.test('to-json--from-json', () async { final o = buildLocation(); @@ -4696,6 +5537,16 @@ void main() { }); }); + unittest.group('obj-schema-MeshRouteView', () { + unittest.test('to-json--from-json', () async { + final o = buildMeshRouteView(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.MeshRouteView.fromJson( + oJson as core.Map); + checkMeshRouteView(od); + }); + }); + unittest.group('obj-schema-Operation', () { unittest.test('to-json--from-json', () async { final o = buildOperation(); @@ -4906,19 +5757,69 @@ void main() { }); }); - unittest.group('resource-ProjectsLocationsResource', () { - unittest.test('method--get', () async { - final mock = HttpServerMock(); - final res = api.NetworkServicesApi(mock).projects.locations; - final arg_name = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), + unittest.group('obj-schema-WasmPlugin', () { + unittest.test('to-json--from-json', () async { + final o = buildWasmPlugin(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.WasmPlugin.fromJson(oJson as core.Map); + checkWasmPlugin(od); + }); + }); + + unittest.group('obj-schema-WasmPluginLogConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildWasmPluginLogConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.WasmPluginLogConfig.fromJson( + oJson as core.Map); + checkWasmPluginLogConfig(od); + }); + }); + + unittest.group('obj-schema-WasmPluginUsedBy', () { + unittest.test('to-json--from-json', () async { + final o = buildWasmPluginUsedBy(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.WasmPluginUsedBy.fromJson( + oJson as core.Map); + checkWasmPluginUsedBy(od); + }); + }); + + unittest.group('obj-schema-WasmPluginVersion', () { + unittest.test('to-json--from-json', () async { + final o = buildWasmPluginVersion(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.WasmPluginVersion.fromJson( + oJson as core.Map); + checkWasmPluginVersion(od); + }); + }); + + unittest.group('obj-schema-WasmPluginVersionDetails', () { + unittest.test('to-json--from-json', () async { + final o = buildWasmPluginVersionDetails(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.WasmPluginVersionDetails.fromJson( + oJson as core.Map); + checkWasmPluginVersionDetails(od); + }); + }); + + unittest.group('resource-ProjectsLocationsResource', () { + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.NetworkServicesApi(mock).projects.locations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), unittest.equals('/'), ); pathOffset += 1; @@ -5031,15 +5932,21 @@ void main() { }); }); - unittest.group('resource-ProjectsLocationsEdgeCacheKeysetsResource', () { - unittest.test('method--getIamPolicy', () async { + unittest.group('resource-ProjectsLocationsAuthzExtensionsResource', () { + unittest.test('method--create', () async { final mock = HttpServerMock(); final res = - api.NetworkServicesApi(mock).projects.locations.edgeCacheKeysets; - final arg_resource = 'foo'; - final arg_options_requestedPolicyVersion = 42; + api.NetworkServicesApi(mock).projects.locations.authzExtensions; + final arg_request = buildAuthzExtension(); + final arg_parent = 'foo'; + final arg_authzExtensionId = 'foo'; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.AuthzExtension.fromJson( + json as core.Map); + checkAuthzExtension(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -5072,8 +5979,12 @@ void main() { } } unittest.expect( - core.int.parse(queryMap['options.requestedPolicyVersion']!.first), - unittest.equals(arg_options_requestedPolicyVersion), + queryMap['authzExtensionId']!.first, + unittest.equals(arg_authzExtensionId), + ); + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), ); unittest.expect( queryMap['fields']!.first, @@ -5083,27 +5994,139 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildPolicy()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.getIamPolicy(arg_resource, - options_requestedPolicyVersion: arg_options_requestedPolicyVersion, + final response = await res.create(arg_request, arg_parent, + authzExtensionId: arg_authzExtensionId, + requestId: arg_requestId, $fields: arg_$fields); - checkPolicy(response as api.Policy); + checkOperation(response as api.Operation); }); - unittest.test('method--setIamPolicy', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); final res = - api.NetworkServicesApi(mock).projects.locations.edgeCacheKeysets; - final arg_request = buildSetIamPolicyRequest(); - final arg_resource = 'foo'; + api.NetworkServicesApi(mock).projects.locations.authzExtensions; + final arg_name = 'foo'; + final arg_requestId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.SetIamPolicyRequest.fromJson( - json as core.Map); - checkSetIamPolicyRequest(obj); + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete(arg_name, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = + api.NetworkServicesApi(mock).projects.locations.authzExtensions; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildAuthzExtension()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkAuthzExtension(response as api.AuthzExtension); + }); + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = + api.NetworkServicesApi(mock).projects.locations.authzExtensions; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; var pathOffset = 0; core.int index; @@ -5135,6 +6158,22 @@ void main() { ); } } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -5143,25 +6182,32 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildPolicy()); + final resp = convert.json.encode(buildListAuthzExtensionsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.setIamPolicy(arg_request, arg_resource, + final response = await res.list(arg_parent, + filter: arg_filter, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, $fields: arg_$fields); - checkPolicy(response as api.Policy); + checkListAuthzExtensionsResponse( + response as api.ListAuthzExtensionsResponse); }); - unittest.test('method--testIamPermissions', () async { + unittest.test('method--patch', () async { final mock = HttpServerMock(); final res = - api.NetworkServicesApi(mock).projects.locations.edgeCacheKeysets; - final arg_request = buildTestIamPermissionsRequest(); - final arg_resource = 'foo'; + api.NetworkServicesApi(mock).projects.locations.authzExtensions; + final arg_request = buildAuthzExtension(); + final arg_name = 'foo'; + final arg_requestId = 'foo'; + final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.TestIamPermissionsRequest.fromJson( + final obj = api.AuthzExtension.fromJson( json as core.Map); - checkTestIamPermissionsRequest(obj); + checkAuthzExtension(obj); final path = req.url.path; var pathOffset = 0; @@ -5194,6 +6240,14 @@ void main() { ); } } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -5202,21 +6256,22 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildTestIamPermissionsResponse()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.testIamPermissions(arg_request, arg_resource, + final response = await res.patch(arg_request, arg_name, + requestId: arg_requestId, + updateMask: arg_updateMask, $fields: arg_$fields); - checkTestIamPermissionsResponse( - response as api.TestIamPermissionsResponse); + checkOperation(response as api.Operation); }); }); - unittest.group('resource-ProjectsLocationsEdgeCacheOriginsResource', () { + unittest.group('resource-ProjectsLocationsEdgeCacheKeysetsResource', () { unittest.test('method--getIamPolicy', () async { final mock = HttpServerMock(); final res = - api.NetworkServicesApi(mock).projects.locations.edgeCacheOrigins; + api.NetworkServicesApi(mock).projects.locations.edgeCacheKeysets; final arg_resource = 'foo'; final arg_options_requestedPolicyVersion = 42; final arg_$fields = 'foo'; @@ -5276,7 +6331,7 @@ void main() { unittest.test('method--setIamPolicy', () async { final mock = HttpServerMock(); final res = - api.NetworkServicesApi(mock).projects.locations.edgeCacheOrigins; + api.NetworkServicesApi(mock).projects.locations.edgeCacheKeysets; final arg_request = buildSetIamPolicyRequest(); final arg_resource = 'foo'; final arg_$fields = 'foo'; @@ -5335,7 +6390,7 @@ void main() { unittest.test('method--testIamPermissions', () async { final mock = HttpServerMock(); final res = - api.NetworkServicesApi(mock).projects.locations.edgeCacheOrigins; + api.NetworkServicesApi(mock).projects.locations.edgeCacheKeysets; final arg_request = buildTestIamPermissionsRequest(); final arg_resource = 'foo'; final arg_$fields = 'foo'; @@ -5393,11 +6448,11 @@ void main() { }); }); - unittest.group('resource-ProjectsLocationsEdgeCacheServicesResource', () { + unittest.group('resource-ProjectsLocationsEdgeCacheOriginsResource', () { unittest.test('method--getIamPolicy', () async { final mock = HttpServerMock(); final res = - api.NetworkServicesApi(mock).projects.locations.edgeCacheServices; + api.NetworkServicesApi(mock).projects.locations.edgeCacheOrigins; final arg_resource = 'foo'; final arg_options_requestedPolicyVersion = 42; final arg_$fields = 'foo'; @@ -5457,7 +6512,7 @@ void main() { unittest.test('method--setIamPolicy', () async { final mock = HttpServerMock(); final res = - api.NetworkServicesApi(mock).projects.locations.edgeCacheServices; + api.NetworkServicesApi(mock).projects.locations.edgeCacheOrigins; final arg_request = buildSetIamPolicyRequest(); final arg_resource = 'foo'; final arg_$fields = 'foo'; @@ -5516,7 +6571,7 @@ void main() { unittest.test('method--testIamPermissions', () async { final mock = HttpServerMock(); final res = - api.NetworkServicesApi(mock).projects.locations.edgeCacheServices; + api.NetworkServicesApi(mock).projects.locations.edgeCacheOrigins; final arg_request = buildTestIamPermissionsRequest(); final arg_resource = 'foo'; final arg_$fields = 'foo'; @@ -5574,8 +6629,189 @@ void main() { }); }); - unittest.group('resource-ProjectsLocationsEndpointPoliciesResource', () { - unittest.test('method--create', () async { + unittest.group('resource-ProjectsLocationsEdgeCacheServicesResource', () { + unittest.test('method--getIamPolicy', () async { + final mock = HttpServerMock(); + final res = + api.NetworkServicesApi(mock).projects.locations.edgeCacheServices; + final arg_resource = 'foo'; + final arg_options_requestedPolicyVersion = 42; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + core.int.parse(queryMap['options.requestedPolicyVersion']!.first), + unittest.equals(arg_options_requestedPolicyVersion), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildPolicy()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.getIamPolicy(arg_resource, + options_requestedPolicyVersion: arg_options_requestedPolicyVersion, + $fields: arg_$fields); + checkPolicy(response as api.Policy); + }); + + unittest.test('method--setIamPolicy', () async { + final mock = HttpServerMock(); + final res = + api.NetworkServicesApi(mock).projects.locations.edgeCacheServices; + final arg_request = buildSetIamPolicyRequest(); + final arg_resource = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.SetIamPolicyRequest.fromJson( + json as core.Map); + checkSetIamPolicyRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildPolicy()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.setIamPolicy(arg_request, arg_resource, + $fields: arg_$fields); + checkPolicy(response as api.Policy); + }); + + unittest.test('method--testIamPermissions', () async { + final mock = HttpServerMock(); + final res = + api.NetworkServicesApi(mock).projects.locations.edgeCacheServices; + final arg_request = buildTestIamPermissionsRequest(); + final arg_resource = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.TestIamPermissionsRequest.fromJson( + json as core.Map); + checkTestIamPermissionsRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildTestIamPermissionsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.testIamPermissions(arg_request, arg_resource, + $fields: arg_$fields); + checkTestIamPermissionsResponse( + response as api.TestIamPermissionsResponse); + }); + }); + + unittest.group('resource-ProjectsLocationsEndpointPoliciesResource', () { + unittest.test('method--create', () async { final mock = HttpServerMock(); final res = api.NetworkServicesApi(mock).projects.locations.endpointPolicies; @@ -6174,19 +7410,14 @@ void main() { }); }); - unittest.group('resource-ProjectsLocationsGrpcRoutesResource', () { - unittest.test('method--create', () async { + unittest.group('resource-ProjectsLocationsGatewaysRouteViewsResource', () { + unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.NetworkServicesApi(mock).projects.locations.grpcRoutes; - final arg_request = buildGrpcRoute(); - final arg_parent = 'foo'; - final arg_grpcRouteId = 'foo'; + final res = + api.NetworkServicesApi(mock).projects.locations.gateways.routeViews; + final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.GrpcRoute.fromJson(json as core.Map); - checkGrpcRoute(obj); - final path = req.url.path; var pathOffset = 0; core.int index; @@ -6218,10 +7449,6 @@ void main() { ); } } - unittest.expect( - queryMap['grpcRouteId']!.first, - unittest.equals(arg_grpcRouteId), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -6230,18 +7457,20 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildGatewayRouteView()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.create(arg_request, arg_parent, - grpcRouteId: arg_grpcRouteId, $fields: arg_$fields); - checkOperation(response as api.Operation); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGatewayRouteView(response as api.GatewayRouteView); }); - unittest.test('method--delete', () async { + unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.NetworkServicesApi(mock).projects.locations.grpcRoutes; - final arg_name = 'foo'; + final res = + api.NetworkServicesApi(mock).projects.locations.gateways.routeViews; + final arg_parent = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -6275,6 +7504,14 @@ void main() { ); } } + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -6283,19 +7520,31 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildOperation()); + final resp = convert.json.encode(buildListGatewayRouteViewsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_name, $fields: arg_$fields); - checkOperation(response as api.Operation); + final response = await res.list(arg_parent, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListGatewayRouteViewsResponse( + response as api.ListGatewayRouteViewsResponse); }); + }); - unittest.test('method--get', () async { + unittest.group('resource-ProjectsLocationsGrpcRoutesResource', () { + unittest.test('method--create', () async { final mock = HttpServerMock(); final res = api.NetworkServicesApi(mock).projects.locations.grpcRoutes; - final arg_name = 'foo'; + final arg_request = buildGrpcRoute(); + final arg_parent = 'foo'; + final arg_grpcRouteId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GrpcRoute.fromJson(json as core.Map); + checkGrpcRoute(obj); + final path = req.url.path; var pathOffset = 0; core.int index; @@ -6327,6 +7576,10 @@ void main() { ); } } + unittest.expect( + queryMap['grpcRouteId']!.first, + unittest.equals(arg_grpcRouteId), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -6335,19 +7588,124 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildGrpcRoute()); + final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkGrpcRoute(response as api.GrpcRoute); + final response = await res.create(arg_request, arg_parent, + grpcRouteId: arg_grpcRouteId, $fields: arg_$fields); + checkOperation(response as api.Operation); }); - unittest.test('method--list', () async { + unittest.test('method--delete', () async { final mock = HttpServerMock(); final res = api.NetworkServicesApi(mock).projects.locations.grpcRoutes; - final arg_parent = 'foo'; - final arg_pageSize = 42; - final arg_pageToken = 'foo'; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.NetworkServicesApi(mock).projects.locations.grpcRoutes; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGrpcRoute()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkGrpcRoute(response as api.GrpcRoute); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.NetworkServicesApi(mock).projects.locations.grpcRoutes; + final arg_parent = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -7736,6 +9094,128 @@ void main() { }); }); + unittest.group('resource-ProjectsLocationsMeshesRouteViewsResource', () { + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = + api.NetworkServicesApi(mock).projects.locations.meshes.routeViews; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildMeshRouteView()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkMeshRouteView(response as api.MeshRouteView); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = + api.NetworkServicesApi(mock).projects.locations.meshes.routeViews; + final arg_parent = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildListMeshRouteViewsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_parent, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListMeshRouteViewsResponse( + response as api.ListMeshRouteViewsResponse); + }); + }); + unittest.group('resource-ProjectsLocationsOperationsResource', () { unittest.test('method--cancel', () async { final mock = HttpServerMock(); @@ -7981,9 +9461,550 @@ void main() { final arg_serviceBindingId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.ServiceBinding.fromJson( - json as core.Map); - checkServiceBinding(obj); + final obj = api.ServiceBinding.fromJson( + json as core.Map); + checkServiceBinding(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['serviceBindingId']!.first, + unittest.equals(arg_serviceBindingId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.create(arg_request, arg_parent, + serviceBindingId: arg_serviceBindingId, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = + api.NetworkServicesApi(mock).projects.locations.serviceBindings; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = + api.NetworkServicesApi(mock).projects.locations.serviceBindings; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildServiceBinding()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkServiceBinding(response as api.ServiceBinding); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = + api.NetworkServicesApi(mock).projects.locations.serviceBindings; + final arg_parent = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildListServiceBindingsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_parent, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListServiceBindingsResponse( + response as api.ListServiceBindingsResponse); + }); + }); + + unittest.group('resource-ProjectsLocationsServiceLbPoliciesResource', () { + unittest.test('method--create', () async { + final mock = HttpServerMock(); + final res = + api.NetworkServicesApi(mock).projects.locations.serviceLbPolicies; + final arg_request = buildServiceLbPolicy(); + final arg_parent = 'foo'; + final arg_serviceLbPolicyId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.ServiceLbPolicy.fromJson( + json as core.Map); + checkServiceLbPolicy(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['serviceLbPolicyId']!.first, + unittest.equals(arg_serviceLbPolicyId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.create(arg_request, arg_parent, + serviceLbPolicyId: arg_serviceLbPolicyId, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = + api.NetworkServicesApi(mock).projects.locations.serviceLbPolicies; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = + api.NetworkServicesApi(mock).projects.locations.serviceLbPolicies; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildServiceLbPolicy()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkServiceLbPolicy(response as api.ServiceLbPolicy); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = + api.NetworkServicesApi(mock).projects.locations.serviceLbPolicies; + final arg_parent = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildListServiceLbPoliciesResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_parent, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListServiceLbPoliciesResponse( + response as api.ListServiceLbPoliciesResponse); + }); + + unittest.test('method--patch', () async { + final mock = HttpServerMock(); + final res = + api.NetworkServicesApi(mock).projects.locations.serviceLbPolicies; + final arg_request = buildServiceLbPolicy(); + final arg_name = 'foo'; + final arg_updateMask = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.ServiceLbPolicy.fromJson( + json as core.Map); + checkServiceLbPolicy(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + }); + + unittest.group('resource-ProjectsLocationsTcpRoutesResource', () { + unittest.test('method--create', () async { + final mock = HttpServerMock(); + final res = api.NetworkServicesApi(mock).projects.locations.tcpRoutes; + final arg_request = buildTcpRoute(); + final arg_parent = 'foo'; + final arg_tcpRouteId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.TcpRoute.fromJson(json as core.Map); + checkTcpRoute(obj); final path = req.url.path; var pathOffset = 0; @@ -8017,8 +10038,8 @@ void main() { } } unittest.expect( - queryMap['serviceBindingId']!.first, - unittest.equals(arg_serviceBindingId), + queryMap['tcpRouteId']!.first, + unittest.equals(arg_tcpRouteId), ); unittest.expect( queryMap['fields']!.first, @@ -8032,14 +10053,13 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.create(arg_request, arg_parent, - serviceBindingId: arg_serviceBindingId, $fields: arg_$fields); + tcpRouteId: arg_tcpRouteId, $fields: arg_$fields); checkOperation(response as api.Operation); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = - api.NetworkServicesApi(mock).projects.locations.serviceBindings; + final res = api.NetworkServicesApi(mock).projects.locations.tcpRoutes; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -8091,8 +10111,7 @@ void main() { unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = - api.NetworkServicesApi(mock).projects.locations.serviceBindings; + final res = api.NetworkServicesApi(mock).projects.locations.tcpRoutes; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -8135,17 +10154,16 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildServiceBinding()); + final resp = convert.json.encode(buildTcpRoute()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkServiceBinding(response as api.ServiceBinding); + checkTcpRoute(response as api.TcpRoute); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = - api.NetworkServicesApi(mock).projects.locations.serviceBindings; + final res = api.NetworkServicesApi(mock).projects.locations.tcpRoutes; final arg_parent = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; @@ -8198,31 +10216,92 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildListServiceBindingsResponse()); + final resp = convert.json.encode(buildListTcpRoutesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.list(arg_parent, pageSize: arg_pageSize, pageToken: arg_pageToken, $fields: arg_$fields); - checkListServiceBindingsResponse( - response as api.ListServiceBindingsResponse); + checkListTcpRoutesResponse(response as api.ListTcpRoutesResponse); + }); + + unittest.test('method--patch', () async { + final mock = HttpServerMock(); + final res = api.NetworkServicesApi(mock).projects.locations.tcpRoutes; + final arg_request = buildTcpRoute(); + final arg_name = 'foo'; + final arg_updateMask = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.TcpRoute.fromJson(json as core.Map); + checkTcpRoute(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); + checkOperation(response as api.Operation); }); }); - unittest.group('resource-ProjectsLocationsServiceLbPoliciesResource', () { + unittest.group('resource-ProjectsLocationsTlsRoutesResource', () { unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = - api.NetworkServicesApi(mock).projects.locations.serviceLbPolicies; - final arg_request = buildServiceLbPolicy(); + final res = api.NetworkServicesApi(mock).projects.locations.tlsRoutes; + final arg_request = buildTlsRoute(); final arg_parent = 'foo'; - final arg_serviceLbPolicyId = 'foo'; + final arg_tlsRouteId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.ServiceLbPolicy.fromJson( - json as core.Map); - checkServiceLbPolicy(obj); + final obj = + api.TlsRoute.fromJson(json as core.Map); + checkTlsRoute(obj); final path = req.url.path; var pathOffset = 0; @@ -8256,8 +10335,8 @@ void main() { } } unittest.expect( - queryMap['serviceLbPolicyId']!.first, - unittest.equals(arg_serviceLbPolicyId), + queryMap['tlsRouteId']!.first, + unittest.equals(arg_tlsRouteId), ); unittest.expect( queryMap['fields']!.first, @@ -8271,14 +10350,13 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.create(arg_request, arg_parent, - serviceLbPolicyId: arg_serviceLbPolicyId, $fields: arg_$fields); + tlsRouteId: arg_tlsRouteId, $fields: arg_$fields); checkOperation(response as api.Operation); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = - api.NetworkServicesApi(mock).projects.locations.serviceLbPolicies; + final res = api.NetworkServicesApi(mock).projects.locations.tlsRoutes; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -8330,8 +10408,7 @@ void main() { unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = - api.NetworkServicesApi(mock).projects.locations.serviceLbPolicies; + final res = api.NetworkServicesApi(mock).projects.locations.tlsRoutes; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -8374,17 +10451,16 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildServiceLbPolicy()); + final resp = convert.json.encode(buildTlsRoute()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkServiceLbPolicy(response as api.ServiceLbPolicy); + checkTlsRoute(response as api.TlsRoute); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = - api.NetworkServicesApi(mock).projects.locations.serviceLbPolicies; + final res = api.NetworkServicesApi(mock).projects.locations.tlsRoutes; final arg_parent = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; @@ -8437,29 +10513,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildListServiceLbPoliciesResponse()); + final resp = convert.json.encode(buildListTlsRoutesResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.list(arg_parent, pageSize: arg_pageSize, pageToken: arg_pageToken, $fields: arg_$fields); - checkListServiceLbPoliciesResponse( - response as api.ListServiceLbPoliciesResponse); + checkListTlsRoutesResponse(response as api.ListTlsRoutesResponse); }); unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = - api.NetworkServicesApi(mock).projects.locations.serviceLbPolicies; - final arg_request = buildServiceLbPolicy(); + final res = api.NetworkServicesApi(mock).projects.locations.tlsRoutes; + final arg_request = buildTlsRoute(); final arg_name = 'foo'; final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = api.ServiceLbPolicy.fromJson( - json as core.Map); - checkServiceLbPolicy(obj); + final obj = + api.TlsRoute.fromJson(json as core.Map); + checkTlsRoute(obj); final path = req.url.path; var pathOffset = 0; @@ -8513,18 +10587,18 @@ void main() { }); }); - unittest.group('resource-ProjectsLocationsTcpRoutesResource', () { + unittest.group('resource-ProjectsLocationsWasmPluginsResource', () { unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = api.NetworkServicesApi(mock).projects.locations.tcpRoutes; - final arg_request = buildTcpRoute(); + final res = api.NetworkServicesApi(mock).projects.locations.wasmPlugins; + final arg_request = buildWasmPlugin(); final arg_parent = 'foo'; - final arg_tcpRouteId = 'foo'; + final arg_wasmPluginId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.TcpRoute.fromJson(json as core.Map); - checkTcpRoute(obj); + final obj = api.WasmPlugin.fromJson( + json as core.Map); + checkWasmPlugin(obj); final path = req.url.path; var pathOffset = 0; @@ -8558,8 +10632,8 @@ void main() { } } unittest.expect( - queryMap['tcpRouteId']!.first, - unittest.equals(arg_tcpRouteId), + queryMap['wasmPluginId']!.first, + unittest.equals(arg_wasmPluginId), ); unittest.expect( queryMap['fields']!.first, @@ -8573,13 +10647,13 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.create(arg_request, arg_parent, - tcpRouteId: arg_tcpRouteId, $fields: arg_$fields); + wasmPluginId: arg_wasmPluginId, $fields: arg_$fields); checkOperation(response as api.Operation); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.NetworkServicesApi(mock).projects.locations.tcpRoutes; + final res = api.NetworkServicesApi(mock).projects.locations.wasmPlugins; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -8631,8 +10705,9 @@ void main() { unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.NetworkServicesApi(mock).projects.locations.tcpRoutes; + final res = api.NetworkServicesApi(mock).projects.locations.wasmPlugins; final arg_name = 'foo'; + final arg_view = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -8666,6 +10741,10 @@ void main() { ); } } + unittest.expect( + queryMap['view']!.first, + unittest.equals(arg_view), + ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -8674,16 +10753,17 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildTcpRoute()); + final resp = convert.json.encode(buildWasmPlugin()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.get(arg_name, $fields: arg_$fields); - checkTcpRoute(response as api.TcpRoute); + final response = + await res.get(arg_name, view: arg_view, $fields: arg_$fields); + checkWasmPlugin(response as api.WasmPlugin); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.NetworkServicesApi(mock).projects.locations.tcpRoutes; + final res = api.NetworkServicesApi(mock).projects.locations.wasmPlugins; final arg_parent = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; @@ -8736,27 +10816,27 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildListTcpRoutesResponse()); + final resp = convert.json.encode(buildListWasmPluginsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.list(arg_parent, pageSize: arg_pageSize, pageToken: arg_pageToken, $fields: arg_$fields); - checkListTcpRoutesResponse(response as api.ListTcpRoutesResponse); + checkListWasmPluginsResponse(response as api.ListWasmPluginsResponse); }); unittest.test('method--patch', () async { final mock = HttpServerMock(); - final res = api.NetworkServicesApi(mock).projects.locations.tcpRoutes; - final arg_request = buildTcpRoute(); + final res = api.NetworkServicesApi(mock).projects.locations.wasmPlugins; + final arg_request = buildWasmPlugin(); final arg_name = 'foo'; final arg_updateMask = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.TcpRoute.fromJson(json as core.Map); - checkTcpRoute(obj); + final obj = api.WasmPlugin.fromJson( + json as core.Map); + checkWasmPlugin(obj); final path = req.url.path; var pathOffset = 0; @@ -8810,18 +10890,19 @@ void main() { }); }); - unittest.group('resource-ProjectsLocationsTlsRoutesResource', () { + unittest.group('resource-ProjectsLocationsWasmPluginsVersionsResource', () { unittest.test('method--create', () async { final mock = HttpServerMock(); - final res = api.NetworkServicesApi(mock).projects.locations.tlsRoutes; - final arg_request = buildTlsRoute(); + final res = + api.NetworkServicesApi(mock).projects.locations.wasmPlugins.versions; + final arg_request = buildWasmPluginVersion(); final arg_parent = 'foo'; - final arg_tlsRouteId = 'foo'; + final arg_wasmPluginVersionId = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.TlsRoute.fromJson(json as core.Map); - checkTlsRoute(obj); + final obj = api.WasmPluginVersion.fromJson( + json as core.Map); + checkWasmPluginVersion(obj); final path = req.url.path; var pathOffset = 0; @@ -8855,8 +10936,8 @@ void main() { } } unittest.expect( - queryMap['tlsRouteId']!.first, - unittest.equals(arg_tlsRouteId), + queryMap['wasmPluginVersionId']!.first, + unittest.equals(arg_wasmPluginVersionId), ); unittest.expect( queryMap['fields']!.first, @@ -8870,13 +10951,14 @@ void main() { return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.create(arg_request, arg_parent, - tlsRouteId: arg_tlsRouteId, $fields: arg_$fields); + wasmPluginVersionId: arg_wasmPluginVersionId, $fields: arg_$fields); checkOperation(response as api.Operation); }); unittest.test('method--delete', () async { final mock = HttpServerMock(); - final res = api.NetworkServicesApi(mock).projects.locations.tlsRoutes; + final res = + api.NetworkServicesApi(mock).projects.locations.wasmPlugins.versions; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -8928,7 +11010,8 @@ void main() { unittest.test('method--get', () async { final mock = HttpServerMock(); - final res = api.NetworkServicesApi(mock).projects.locations.tlsRoutes; + final res = + api.NetworkServicesApi(mock).projects.locations.wasmPlugins.versions; final arg_name = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -8971,16 +11054,17 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildTlsRoute()); + final resp = convert.json.encode(buildWasmPluginVersion()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.get(arg_name, $fields: arg_$fields); - checkTlsRoute(response as api.TlsRoute); + checkWasmPluginVersion(response as api.WasmPluginVersion); }); unittest.test('method--list', () async { final mock = HttpServerMock(); - final res = api.NetworkServicesApi(mock).projects.locations.tlsRoutes; + final res = + api.NetworkServicesApi(mock).projects.locations.wasmPlugins.versions; final arg_parent = 'foo'; final arg_pageSize = 42; final arg_pageToken = 'foo'; @@ -9033,77 +11117,15 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = convert.json.encode(buildListTlsRoutesResponse()); + final resp = convert.json.encode(buildListWasmPluginVersionsResponse()); return async.Future.value(stringResponse(200, h, resp)); }), true); final response = await res.list(arg_parent, pageSize: arg_pageSize, pageToken: arg_pageToken, $fields: arg_$fields); - checkListTlsRoutesResponse(response as api.ListTlsRoutesResponse); - }); - - unittest.test('method--patch', () async { - final mock = HttpServerMock(); - final res = api.NetworkServicesApi(mock).projects.locations.tlsRoutes; - final arg_request = buildTlsRoute(); - final arg_name = 'foo'; - final arg_updateMask = 'foo'; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final obj = - api.TlsRoute.fromJson(json as core.Map); - checkTlsRoute(obj); - - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 3), - unittest.equals('v1/'), - ); - pathOffset += 3; - // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['updateMask']!.first, - unittest.equals(arg_updateMask), - ); - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode(buildOperation()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.patch(arg_request, arg_name, - updateMask: arg_updateMask, $fields: arg_$fields); - checkOperation(response as api.Operation); + checkListWasmPluginVersionsResponse( + response as api.ListWasmPluginVersionsResponse); }); }); } diff --git a/generated/googleapis/test/notebooks/v2_test.dart b/generated/googleapis/test/notebooks/v2_test.dart index a0174db5f..8e9836544 100644 --- a/generated/googleapis/test/notebooks/v2_test.dart +++ b/generated/googleapis/test/notebooks/v2_test.dart @@ -733,6 +733,7 @@ api.Instance buildInstance() { o.createTime = 'foo'; o.creator = 'foo'; o.disableProxyAccess = true; + o.enableThirdPartyIdentity = true; o.gceSetup = buildGceSetup(); o.healthInfo = buildUnnamed9(); o.healthState = 'foo'; @@ -764,6 +765,7 @@ void checkInstance(api.Instance o) { unittest.equals('foo'), ); unittest.expect(o.disableProxyAccess!, unittest.isTrue); + unittest.expect(o.enableThirdPartyIdentity!, unittest.isTrue); checkGceSetup(o.gceSetup!); checkUnnamed9(o.healthInfo!); unittest.expect( diff --git a/generated/googleapis/test/orgpolicy/v2_test.dart b/generated/googleapis/test/orgpolicy/v2_test.dart index ed80eb702..c4302ac00 100644 --- a/generated/googleapis/test/orgpolicy/v2_test.dart +++ b/generated/googleapis/test/orgpolicy/v2_test.dart @@ -65,6 +65,7 @@ api.GoogleCloudOrgpolicyV2Constraint buildGoogleCloudOrgpolicyV2Constraint() { o.listConstraint = buildGoogleCloudOrgpolicyV2ConstraintListConstraint(); o.name = 'foo'; o.supportsDryRun = true; + o.supportsSimulation = true; } buildCounterGoogleCloudOrgpolicyV2Constraint--; return o; @@ -94,6 +95,7 @@ void checkGoogleCloudOrgpolicyV2Constraint( unittest.equals('foo'), ); unittest.expect(o.supportsDryRun!, unittest.isTrue); + unittest.expect(o.supportsSimulation!, unittest.isTrue); } buildCounterGoogleCloudOrgpolicyV2Constraint--; } @@ -607,6 +609,51 @@ void checkGoogleCloudOrgpolicyV2PolicySpec( buildCounterGoogleCloudOrgpolicyV2PolicySpec--; } +core.Map buildUnnamed9() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; + +void checkUnnamed9(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted2 = (o['x']!) as core.Map; + unittest.expect(casted2, unittest.hasLength(3)); + unittest.expect( + casted2['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted2['bool'], + unittest.equals(true), + ); + unittest.expect( + casted2['string'], + unittest.equals('foo'), + ); + var casted3 = (o['y']!) as core.Map; + unittest.expect(casted3, unittest.hasLength(3)); + unittest.expect( + casted3['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted3['bool'], + unittest.equals(true), + ); + unittest.expect( + casted3['string'], + unittest.equals('foo'), + ); +} + core.int buildCounterGoogleCloudOrgpolicyV2PolicySpecPolicyRule = 0; api.GoogleCloudOrgpolicyV2PolicySpecPolicyRule buildGoogleCloudOrgpolicyV2PolicySpecPolicyRule() { @@ -617,6 +664,7 @@ api.GoogleCloudOrgpolicyV2PolicySpecPolicyRule o.condition = buildGoogleTypeExpr(); o.denyAll = true; o.enforce = true; + o.parameters = buildUnnamed9(); o.values = buildGoogleCloudOrgpolicyV2PolicySpecPolicyRuleStringValues(); } buildCounterGoogleCloudOrgpolicyV2PolicySpecPolicyRule--; @@ -631,17 +679,18 @@ void checkGoogleCloudOrgpolicyV2PolicySpecPolicyRule( checkGoogleTypeExpr(o.condition!); unittest.expect(o.denyAll!, unittest.isTrue); unittest.expect(o.enforce!, unittest.isTrue); + checkUnnamed9(o.parameters!); checkGoogleCloudOrgpolicyV2PolicySpecPolicyRuleStringValues(o.values!); } buildCounterGoogleCloudOrgpolicyV2PolicySpecPolicyRule--; } -core.List buildUnnamed9() => [ +core.List buildUnnamed10() => [ 'foo', 'foo', ]; -void checkUnnamed9(core.List o) { +void checkUnnamed10(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -653,12 +702,12 @@ void checkUnnamed9(core.List o) { ); } -core.List buildUnnamed10() => [ +core.List buildUnnamed11() => [ 'foo', 'foo', ]; -void checkUnnamed10(core.List o) { +void checkUnnamed11(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -676,8 +725,8 @@ api.GoogleCloudOrgpolicyV2PolicySpecPolicyRuleStringValues final o = api.GoogleCloudOrgpolicyV2PolicySpecPolicyRuleStringValues(); buildCounterGoogleCloudOrgpolicyV2PolicySpecPolicyRuleStringValues++; if (buildCounterGoogleCloudOrgpolicyV2PolicySpecPolicyRuleStringValues < 3) { - o.allowedValues = buildUnnamed9(); - o.deniedValues = buildUnnamed10(); + o.allowedValues = buildUnnamed10(); + o.deniedValues = buildUnnamed11(); } buildCounterGoogleCloudOrgpolicyV2PolicySpecPolicyRuleStringValues--; return o; @@ -687,8 +736,8 @@ void checkGoogleCloudOrgpolicyV2PolicySpecPolicyRuleStringValues( api.GoogleCloudOrgpolicyV2PolicySpecPolicyRuleStringValues o) { buildCounterGoogleCloudOrgpolicyV2PolicySpecPolicyRuleStringValues++; if (buildCounterGoogleCloudOrgpolicyV2PolicySpecPolicyRuleStringValues < 3) { - checkUnnamed9(o.allowedValues!); - checkUnnamed10(o.deniedValues!); + checkUnnamed10(o.allowedValues!); + checkUnnamed11(o.deniedValues!); } buildCounterGoogleCloudOrgpolicyV2PolicySpecPolicyRuleStringValues--; } diff --git a/generated/googleapis/test/parallelstore/v1_test.dart b/generated/googleapis/test/parallelstore/v1_test.dart new file mode 100644 index 000000000..a1c31ffe1 --- /dev/null +++ b/generated/googleapis/test/parallelstore/v1_test.dart @@ -0,0 +1,1706 @@ +// ignore_for_file: camel_case_types +// ignore_for_file: comment_references +// ignore_for_file: deprecated_member_use_from_same_package +// ignore_for_file: doc_directive_unknown +// ignore_for_file: lines_longer_than_80_chars +// ignore_for_file: non_constant_identifier_names +// ignore_for_file: prefer_const_declarations +// ignore_for_file: prefer_expression_function_bodies +// ignore_for_file: prefer_final_locals +// ignore_for_file: prefer_interpolation_to_compose_strings +// ignore_for_file: unintended_html_in_doc_comment +// ignore_for_file: unnecessary_brace_in_string_interps +// ignore_for_file: unnecessary_cast +// ignore_for_file: unnecessary_lambdas +// ignore_for_file: unnecessary_string_interpolations +// ignore_for_file: unreachable_from_main +// ignore_for_file: unused_local_variable + +import 'dart:async' as async; +import 'dart:convert' as convert; +import 'dart:core' as core; + +import 'package:googleapis/parallelstore/v1.dart' as api; +import 'package:http/http.dart' as http; +import 'package:test/test.dart' as unittest; + +import '../test_shared.dart'; + +core.int buildCounterCancelOperationRequest = 0; +api.CancelOperationRequest buildCancelOperationRequest() { + final o = api.CancelOperationRequest(); + buildCounterCancelOperationRequest++; + if (buildCounterCancelOperationRequest < 3) {} + buildCounterCancelOperationRequest--; + return o; +} + +void checkCancelOperationRequest(api.CancelOperationRequest o) { + buildCounterCancelOperationRequest++; + if (buildCounterCancelOperationRequest < 3) {} + buildCounterCancelOperationRequest--; +} + +core.int buildCounterDestinationGcsBucket = 0; +api.DestinationGcsBucket buildDestinationGcsBucket() { + final o = api.DestinationGcsBucket(); + buildCounterDestinationGcsBucket++; + if (buildCounterDestinationGcsBucket < 3) { + o.uri = 'foo'; + } + buildCounterDestinationGcsBucket--; + return o; +} + +void checkDestinationGcsBucket(api.DestinationGcsBucket o) { + buildCounterDestinationGcsBucket++; + if (buildCounterDestinationGcsBucket < 3) { + unittest.expect( + o.uri!, + unittest.equals('foo'), + ); + } + buildCounterDestinationGcsBucket--; +} + +core.int buildCounterDestinationParallelstore = 0; +api.DestinationParallelstore buildDestinationParallelstore() { + final o = api.DestinationParallelstore(); + buildCounterDestinationParallelstore++; + if (buildCounterDestinationParallelstore < 3) { + o.path = 'foo'; + } + buildCounterDestinationParallelstore--; + return o; +} + +void checkDestinationParallelstore(api.DestinationParallelstore o) { + buildCounterDestinationParallelstore++; + if (buildCounterDestinationParallelstore < 3) { + unittest.expect( + o.path!, + unittest.equals('foo'), + ); + } + buildCounterDestinationParallelstore--; +} + +core.int buildCounterExportDataRequest = 0; +api.ExportDataRequest buildExportDataRequest() { + final o = api.ExportDataRequest(); + buildCounterExportDataRequest++; + if (buildCounterExportDataRequest < 3) { + o.destinationGcsBucket = buildDestinationGcsBucket(); + o.requestId = 'foo'; + o.serviceAccount = 'foo'; + o.sourceParallelstore = buildSourceParallelstore(); + } + buildCounterExportDataRequest--; + return o; +} + +void checkExportDataRequest(api.ExportDataRequest o) { + buildCounterExportDataRequest++; + if (buildCounterExportDataRequest < 3) { + checkDestinationGcsBucket(o.destinationGcsBucket!); + unittest.expect( + o.requestId!, + unittest.equals('foo'), + ); + unittest.expect( + o.serviceAccount!, + unittest.equals('foo'), + ); + checkSourceParallelstore(o.sourceParallelstore!); + } + buildCounterExportDataRequest--; +} + +core.int buildCounterGoogleProtobufEmpty = 0; +api.GoogleProtobufEmpty buildGoogleProtobufEmpty() { + final o = api.GoogleProtobufEmpty(); + buildCounterGoogleProtobufEmpty++; + if (buildCounterGoogleProtobufEmpty < 3) {} + buildCounterGoogleProtobufEmpty--; + return o; +} + +void checkGoogleProtobufEmpty(api.GoogleProtobufEmpty o) { + buildCounterGoogleProtobufEmpty++; + if (buildCounterGoogleProtobufEmpty < 3) {} + buildCounterGoogleProtobufEmpty--; +} + +core.int buildCounterImportDataRequest = 0; +api.ImportDataRequest buildImportDataRequest() { + final o = api.ImportDataRequest(); + buildCounterImportDataRequest++; + if (buildCounterImportDataRequest < 3) { + o.destinationParallelstore = buildDestinationParallelstore(); + o.requestId = 'foo'; + o.serviceAccount = 'foo'; + o.sourceGcsBucket = buildSourceGcsBucket(); + } + buildCounterImportDataRequest--; + return o; +} + +void checkImportDataRequest(api.ImportDataRequest o) { + buildCounterImportDataRequest++; + if (buildCounterImportDataRequest < 3) { + checkDestinationParallelstore(o.destinationParallelstore!); + unittest.expect( + o.requestId!, + unittest.equals('foo'), + ); + unittest.expect( + o.serviceAccount!, + unittest.equals('foo'), + ); + checkSourceGcsBucket(o.sourceGcsBucket!); + } + buildCounterImportDataRequest--; +} + +core.List buildUnnamed0() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed0(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.Map buildUnnamed1() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed1(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.int buildCounterInstance = 0; +api.Instance buildInstance() { + final o = api.Instance(); + buildCounterInstance++; + if (buildCounterInstance < 3) { + o.accessPoints = buildUnnamed0(); + o.capacityGib = 'foo'; + o.createTime = 'foo'; + o.daosVersion = 'foo'; + o.description = 'foo'; + o.directoryStripeLevel = 'foo'; + o.effectiveReservedIpRange = 'foo'; + o.fileStripeLevel = 'foo'; + o.labels = buildUnnamed1(); + o.name = 'foo'; + o.network = 'foo'; + o.reservedIpRange = 'foo'; + o.state = 'foo'; + o.updateTime = 'foo'; + } + buildCounterInstance--; + return o; +} + +void checkInstance(api.Instance o) { + buildCounterInstance++; + if (buildCounterInstance < 3) { + checkUnnamed0(o.accessPoints!); + unittest.expect( + o.capacityGib!, + unittest.equals('foo'), + ); + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.daosVersion!, + unittest.equals('foo'), + ); + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + unittest.expect( + o.directoryStripeLevel!, + unittest.equals('foo'), + ); + unittest.expect( + o.effectiveReservedIpRange!, + unittest.equals('foo'), + ); + unittest.expect( + o.fileStripeLevel!, + unittest.equals('foo'), + ); + checkUnnamed1(o.labels!); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.network!, + unittest.equals('foo'), + ); + unittest.expect( + o.reservedIpRange!, + unittest.equals('foo'), + ); + unittest.expect( + o.state!, + unittest.equals('foo'), + ); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), + ); + } + buildCounterInstance--; +} + +core.List buildUnnamed2() => [ + buildInstance(), + buildInstance(), + ]; + +void checkUnnamed2(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkInstance(o[0]); + checkInstance(o[1]); +} + +core.List buildUnnamed3() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed3(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterListInstancesResponse = 0; +api.ListInstancesResponse buildListInstancesResponse() { + final o = api.ListInstancesResponse(); + buildCounterListInstancesResponse++; + if (buildCounterListInstancesResponse < 3) { + o.instances = buildUnnamed2(); + o.nextPageToken = 'foo'; + o.unreachable = buildUnnamed3(); + } + buildCounterListInstancesResponse--; + return o; +} + +void checkListInstancesResponse(api.ListInstancesResponse o) { + buildCounterListInstancesResponse++; + if (buildCounterListInstancesResponse < 3) { + checkUnnamed2(o.instances!); + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed3(o.unreachable!); + } + buildCounterListInstancesResponse--; +} + +core.List buildUnnamed4() => [ + buildLocation(), + buildLocation(), + ]; + +void checkUnnamed4(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkLocation(o[0]); + checkLocation(o[1]); +} + +core.int buildCounterListLocationsResponse = 0; +api.ListLocationsResponse buildListLocationsResponse() { + final o = api.ListLocationsResponse(); + buildCounterListLocationsResponse++; + if (buildCounterListLocationsResponse < 3) { + o.locations = buildUnnamed4(); + o.nextPageToken = 'foo'; + } + buildCounterListLocationsResponse--; + return o; +} + +void checkListLocationsResponse(api.ListLocationsResponse o) { + buildCounterListLocationsResponse++; + if (buildCounterListLocationsResponse < 3) { + checkUnnamed4(o.locations!); + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + } + buildCounterListLocationsResponse--; +} + +core.List buildUnnamed5() => [ + buildOperation(), + buildOperation(), + ]; + +void checkUnnamed5(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkOperation(o[0]); + checkOperation(o[1]); +} + +core.int buildCounterListOperationsResponse = 0; +api.ListOperationsResponse buildListOperationsResponse() { + final o = api.ListOperationsResponse(); + buildCounterListOperationsResponse++; + if (buildCounterListOperationsResponse < 3) { + o.nextPageToken = 'foo'; + o.operations = buildUnnamed5(); + } + buildCounterListOperationsResponse--; + return o; +} + +void checkListOperationsResponse(api.ListOperationsResponse o) { + buildCounterListOperationsResponse++; + if (buildCounterListOperationsResponse < 3) { + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed5(o.operations!); + } + buildCounterListOperationsResponse--; +} + +core.Map buildUnnamed6() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed6(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.Map buildUnnamed7() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; + +void checkUnnamed7(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted1 = (o['x']!) as core.Map; + unittest.expect(casted1, unittest.hasLength(3)); + unittest.expect( + casted1['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted1['bool'], + unittest.equals(true), + ); + unittest.expect( + casted1['string'], + unittest.equals('foo'), + ); + var casted2 = (o['y']!) as core.Map; + unittest.expect(casted2, unittest.hasLength(3)); + unittest.expect( + casted2['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted2['bool'], + unittest.equals(true), + ); + unittest.expect( + casted2['string'], + unittest.equals('foo'), + ); +} + +core.int buildCounterLocation = 0; +api.Location buildLocation() { + final o = api.Location(); + buildCounterLocation++; + if (buildCounterLocation < 3) { + o.displayName = 'foo'; + o.labels = buildUnnamed6(); + o.locationId = 'foo'; + o.metadata = buildUnnamed7(); + o.name = 'foo'; + } + buildCounterLocation--; + return o; +} + +void checkLocation(api.Location o) { + buildCounterLocation++; + if (buildCounterLocation < 3) { + unittest.expect( + o.displayName!, + unittest.equals('foo'), + ); + checkUnnamed6(o.labels!); + unittest.expect( + o.locationId!, + unittest.equals('foo'), + ); + checkUnnamed7(o.metadata!); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + } + buildCounterLocation--; +} + +core.Map buildUnnamed8() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; + +void checkUnnamed8(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted3 = (o['x']!) as core.Map; + unittest.expect(casted3, unittest.hasLength(3)); + unittest.expect( + casted3['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted3['bool'], + unittest.equals(true), + ); + unittest.expect( + casted3['string'], + unittest.equals('foo'), + ); + var casted4 = (o['y']!) as core.Map; + unittest.expect(casted4, unittest.hasLength(3)); + unittest.expect( + casted4['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted4['bool'], + unittest.equals(true), + ); + unittest.expect( + casted4['string'], + unittest.equals('foo'), + ); +} + +core.Map buildUnnamed9() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; + +void checkUnnamed9(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted5 = (o['x']!) as core.Map; + unittest.expect(casted5, unittest.hasLength(3)); + unittest.expect( + casted5['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted5['bool'], + unittest.equals(true), + ); + unittest.expect( + casted5['string'], + unittest.equals('foo'), + ); + var casted6 = (o['y']!) as core.Map; + unittest.expect(casted6, unittest.hasLength(3)); + unittest.expect( + casted6['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted6['bool'], + unittest.equals(true), + ); + unittest.expect( + casted6['string'], + unittest.equals('foo'), + ); +} + +core.int buildCounterOperation = 0; +api.Operation buildOperation() { + final o = api.Operation(); + buildCounterOperation++; + if (buildCounterOperation < 3) { + o.done = true; + o.error = buildStatus(); + o.metadata = buildUnnamed8(); + o.name = 'foo'; + o.response = buildUnnamed9(); + } + buildCounterOperation--; + return o; +} + +void checkOperation(api.Operation o) { + buildCounterOperation++; + if (buildCounterOperation < 3) { + unittest.expect(o.done!, unittest.isTrue); + checkStatus(o.error!); + checkUnnamed8(o.metadata!); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + checkUnnamed9(o.response!); + } + buildCounterOperation--; +} + +core.int buildCounterSourceGcsBucket = 0; +api.SourceGcsBucket buildSourceGcsBucket() { + final o = api.SourceGcsBucket(); + buildCounterSourceGcsBucket++; + if (buildCounterSourceGcsBucket < 3) { + o.uri = 'foo'; + } + buildCounterSourceGcsBucket--; + return o; +} + +void checkSourceGcsBucket(api.SourceGcsBucket o) { + buildCounterSourceGcsBucket++; + if (buildCounterSourceGcsBucket < 3) { + unittest.expect( + o.uri!, + unittest.equals('foo'), + ); + } + buildCounterSourceGcsBucket--; +} + +core.int buildCounterSourceParallelstore = 0; +api.SourceParallelstore buildSourceParallelstore() { + final o = api.SourceParallelstore(); + buildCounterSourceParallelstore++; + if (buildCounterSourceParallelstore < 3) { + o.path = 'foo'; + } + buildCounterSourceParallelstore--; + return o; +} + +void checkSourceParallelstore(api.SourceParallelstore o) { + buildCounterSourceParallelstore++; + if (buildCounterSourceParallelstore < 3) { + unittest.expect( + o.path!, + unittest.equals('foo'), + ); + } + buildCounterSourceParallelstore--; +} + +core.Map buildUnnamed10() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; + +void checkUnnamed10(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted7 = (o['x']!) as core.Map; + unittest.expect(casted7, unittest.hasLength(3)); + unittest.expect( + casted7['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted7['bool'], + unittest.equals(true), + ); + unittest.expect( + casted7['string'], + unittest.equals('foo'), + ); + var casted8 = (o['y']!) as core.Map; + unittest.expect(casted8, unittest.hasLength(3)); + unittest.expect( + casted8['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted8['bool'], + unittest.equals(true), + ); + unittest.expect( + casted8['string'], + unittest.equals('foo'), + ); +} + +core.List> buildUnnamed11() => [ + buildUnnamed10(), + buildUnnamed10(), + ]; + +void checkUnnamed11(core.List> o) { + unittest.expect(o, unittest.hasLength(2)); + checkUnnamed10(o[0]); + checkUnnamed10(o[1]); +} + +core.int buildCounterStatus = 0; +api.Status buildStatus() { + final o = api.Status(); + buildCounterStatus++; + if (buildCounterStatus < 3) { + o.code = 42; + o.details = buildUnnamed11(); + o.message = 'foo'; + } + buildCounterStatus--; + return o; +} + +void checkStatus(api.Status o) { + buildCounterStatus++; + if (buildCounterStatus < 3) { + unittest.expect( + o.code!, + unittest.equals(42), + ); + checkUnnamed11(o.details!); + unittest.expect( + o.message!, + unittest.equals('foo'), + ); + } + buildCounterStatus--; +} + +void main() { + unittest.group('obj-schema-CancelOperationRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildCancelOperationRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.CancelOperationRequest.fromJson( + oJson as core.Map); + checkCancelOperationRequest(od); + }); + }); + + unittest.group('obj-schema-DestinationGcsBucket', () { + unittest.test('to-json--from-json', () async { + final o = buildDestinationGcsBucket(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.DestinationGcsBucket.fromJson( + oJson as core.Map); + checkDestinationGcsBucket(od); + }); + }); + + unittest.group('obj-schema-DestinationParallelstore', () { + unittest.test('to-json--from-json', () async { + final o = buildDestinationParallelstore(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.DestinationParallelstore.fromJson( + oJson as core.Map); + checkDestinationParallelstore(od); + }); + }); + + unittest.group('obj-schema-ExportDataRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildExportDataRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ExportDataRequest.fromJson( + oJson as core.Map); + checkExportDataRequest(od); + }); + }); + + unittest.group('obj-schema-GoogleProtobufEmpty', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleProtobufEmpty(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleProtobufEmpty.fromJson( + oJson as core.Map); + checkGoogleProtobufEmpty(od); + }); + }); + + unittest.group('obj-schema-ImportDataRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildImportDataRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ImportDataRequest.fromJson( + oJson as core.Map); + checkImportDataRequest(od); + }); + }); + + unittest.group('obj-schema-Instance', () { + unittest.test('to-json--from-json', () async { + final o = buildInstance(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Instance.fromJson(oJson as core.Map); + checkInstance(od); + }); + }); + + unittest.group('obj-schema-ListInstancesResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListInstancesResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListInstancesResponse.fromJson( + oJson as core.Map); + checkListInstancesResponse(od); + }); + }); + + unittest.group('obj-schema-ListLocationsResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListLocationsResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListLocationsResponse.fromJson( + oJson as core.Map); + checkListLocationsResponse(od); + }); + }); + + unittest.group('obj-schema-ListOperationsResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListOperationsResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListOperationsResponse.fromJson( + oJson as core.Map); + checkListOperationsResponse(od); + }); + }); + + unittest.group('obj-schema-Location', () { + unittest.test('to-json--from-json', () async { + final o = buildLocation(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Location.fromJson(oJson as core.Map); + checkLocation(od); + }); + }); + + unittest.group('obj-schema-Operation', () { + unittest.test('to-json--from-json', () async { + final o = buildOperation(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Operation.fromJson(oJson as core.Map); + checkOperation(od); + }); + }); + + unittest.group('obj-schema-SourceGcsBucket', () { + unittest.test('to-json--from-json', () async { + final o = buildSourceGcsBucket(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.SourceGcsBucket.fromJson( + oJson as core.Map); + checkSourceGcsBucket(od); + }); + }); + + unittest.group('obj-schema-SourceParallelstore', () { + unittest.test('to-json--from-json', () async { + final o = buildSourceParallelstore(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.SourceParallelstore.fromJson( + oJson as core.Map); + checkSourceParallelstore(od); + }); + }); + + unittest.group('obj-schema-Status', () { + unittest.test('to-json--from-json', () async { + final o = buildStatus(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Status.fromJson(oJson as core.Map); + checkStatus(od); + }); + }); + + unittest.group('resource-ProjectsLocationsResource', () { + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.ParallelstoreApi(mock).projects.locations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildLocation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkLocation(response as api.Location); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.ParallelstoreApi(mock).projects.locations; + final arg_name = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildListLocationsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_name, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListLocationsResponse(response as api.ListLocationsResponse); + }); + }); + + unittest.group('resource-ProjectsLocationsInstancesResource', () { + unittest.test('method--create', () async { + final mock = HttpServerMock(); + final res = api.ParallelstoreApi(mock).projects.locations.instances; + final arg_request = buildInstance(); + final arg_parent = 'foo'; + final arg_instanceId = 'foo'; + final arg_requestId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.Instance.fromJson(json as core.Map); + checkInstance(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['instanceId']!.first, + unittest.equals(arg_instanceId), + ); + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.create(arg_request, arg_parent, + instanceId: arg_instanceId, + requestId: arg_requestId, + $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = api.ParallelstoreApi(mock).projects.locations.instances; + final arg_name = 'foo'; + final arg_requestId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete(arg_name, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--exportData', () async { + final mock = HttpServerMock(); + final res = api.ParallelstoreApi(mock).projects.locations.instances; + final arg_request = buildExportDataRequest(); + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.ExportDataRequest.fromJson( + json as core.Map); + checkExportDataRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.exportData(arg_request, arg_name, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.ParallelstoreApi(mock).projects.locations.instances; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildInstance()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkInstance(response as api.Instance); + }); + + unittest.test('method--importData', () async { + final mock = HttpServerMock(); + final res = api.ParallelstoreApi(mock).projects.locations.instances; + final arg_request = buildImportDataRequest(); + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.ImportDataRequest.fromJson( + json as core.Map); + checkImportDataRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.importData(arg_request, arg_name, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.ParallelstoreApi(mock).projects.locations.instances; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_orderBy = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + queryMap['orderBy']!.first, + unittest.equals(arg_orderBy), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildListInstancesResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_parent, + filter: arg_filter, + orderBy: arg_orderBy, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListInstancesResponse(response as api.ListInstancesResponse); + }); + + unittest.test('method--patch', () async { + final mock = HttpServerMock(); + final res = api.ParallelstoreApi(mock).projects.locations.instances; + final arg_request = buildInstance(); + final arg_name = 'foo'; + final arg_requestId = 'foo'; + final arg_updateMask = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.Instance.fromJson(json as core.Map); + checkInstance(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.patch(arg_request, arg_name, + requestId: arg_requestId, + updateMask: arg_updateMask, + $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + }); + + unittest.group('resource-ProjectsLocationsOperationsResource', () { + unittest.test('method--cancel', () async { + final mock = HttpServerMock(); + final res = api.ParallelstoreApi(mock).projects.locations.operations; + final arg_request = buildCancelOperationRequest(); + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.CancelOperationRequest.fromJson( + json as core.Map); + checkCancelOperationRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleProtobufEmpty()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.cancel(arg_request, arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + }); + + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = api.ParallelstoreApi(mock).projects.locations.operations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildGoogleProtobufEmpty()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkGoogleProtobufEmpty(response as api.GoogleProtobufEmpty); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.ParallelstoreApi(mock).projects.locations.operations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.ParallelstoreApi(mock).projects.locations.operations; + final arg_name = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildListOperationsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_name, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListOperationsResponse(response as api.ListOperationsResponse); + }); + }); +} diff --git a/generated/googleapis/test/paymentsresellersubscription/v1_test.dart b/generated/googleapis/test/paymentsresellersubscription/v1_test.dart index 14422c517..12cb00748 100644 --- a/generated/googleapis/test/paymentsresellersubscription/v1_test.dart +++ b/generated/googleapis/test/paymentsresellersubscription/v1_test.dart @@ -113,6 +113,43 @@ void checkGoogleCloudPaymentsResellerSubscriptionV1CancelSubscriptionResponse( buildCounterGoogleCloudPaymentsResellerSubscriptionV1CancelSubscriptionResponse--; } +core.int + buildCounterGoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent = + 0; +api.GoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent + buildGoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent() { + final o = + api.GoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent(); + buildCounterGoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent++; + if (buildCounterGoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent < + 3) { + o.parent = 'foo'; + o.subscription = + buildGoogleCloudPaymentsResellerSubscriptionV1Subscription(); + o.subscriptionId = 'foo'; + } + buildCounterGoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent--; + return o; +} + +void checkGoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent( + api.GoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent o) { + buildCounterGoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent++; + if (buildCounterGoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent < + 3) { + unittest.expect( + o.parent!, + unittest.equals('foo'), + ); + checkGoogleCloudPaymentsResellerSubscriptionV1Subscription(o.subscription!); + unittest.expect( + o.subscriptionId!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent--; +} + core.int buildCounterGoogleCloudPaymentsResellerSubscriptionV1Duration = 0; api.GoogleCloudPaymentsResellerSubscriptionV1Duration buildGoogleCloudPaymentsResellerSubscriptionV1Duration() { @@ -142,6 +179,35 @@ void checkGoogleCloudPaymentsResellerSubscriptionV1Duration( buildCounterGoogleCloudPaymentsResellerSubscriptionV1Duration--; } +core.int + buildCounterGoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent = + 0; +api.GoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent + buildGoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent() { + final o = + api.GoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent(); + buildCounterGoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent++; + if (buildCounterGoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent < + 3) { + o.name = 'foo'; + } + buildCounterGoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent--; + return o; +} + +void checkGoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent( + api.GoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent o) { + buildCounterGoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent++; + if (buildCounterGoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent < + 3) { + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent--; +} + core.List< api .GoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionRequestLineItemEntitlementDetails> @@ -475,6 +541,61 @@ void checkGoogleCloudPaymentsResellerSubscriptionV1FiniteBillingCycleDetails( buildCounterGoogleCloudPaymentsResellerSubscriptionV1FiniteBillingCycleDetails--; } +core.int + buildCounterGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest = + 0; +api.GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest + buildGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest() { + final o = + api.GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest(); + buildCounterGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest++; + if (buildCounterGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest < + 3) { + o.intentPayload = + buildGoogleCloudPaymentsResellerSubscriptionV1IntentPayload(); + } + buildCounterGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest--; + return o; +} + +void checkGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest( + api.GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest o) { + buildCounterGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest++; + if (buildCounterGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest < + 3) { + checkGoogleCloudPaymentsResellerSubscriptionV1IntentPayload( + o.intentPayload!); + } + buildCounterGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest--; +} + +core.int + buildCounterGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse = + 0; +api.GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse + buildGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse() { + final o = api + .GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse(); + buildCounterGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse++; + if (buildCounterGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse < + 3) { + o.userSession = buildGoogleCloudPaymentsResellerSubscriptionV1UserSession(); + } + buildCounterGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse--; + return o; +} + +void checkGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse( + api.GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse + o) { + buildCounterGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse++; + if (buildCounterGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse < + 3) { + checkGoogleCloudPaymentsResellerSubscriptionV1UserSession(o.userSession!); + } + buildCounterGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse--; +} + core.List buildUnnamed3() => [ 'foo', 'foo', @@ -531,6 +652,33 @@ void checkGoogleCloudPaymentsResellerSubscriptionV1GoogleOnePayload( buildCounterGoogleCloudPaymentsResellerSubscriptionV1GoogleOnePayload--; } +core.int buildCounterGoogleCloudPaymentsResellerSubscriptionV1IntentPayload = 0; +api.GoogleCloudPaymentsResellerSubscriptionV1IntentPayload + buildGoogleCloudPaymentsResellerSubscriptionV1IntentPayload() { + final o = api.GoogleCloudPaymentsResellerSubscriptionV1IntentPayload(); + buildCounterGoogleCloudPaymentsResellerSubscriptionV1IntentPayload++; + if (buildCounterGoogleCloudPaymentsResellerSubscriptionV1IntentPayload < 3) { + o.createIntent = + buildGoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent(); + o.entitleIntent = + buildGoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent(); + } + buildCounterGoogleCloudPaymentsResellerSubscriptionV1IntentPayload--; + return o; +} + +void checkGoogleCloudPaymentsResellerSubscriptionV1IntentPayload( + api.GoogleCloudPaymentsResellerSubscriptionV1IntentPayload o) { + buildCounterGoogleCloudPaymentsResellerSubscriptionV1IntentPayload++; + if (buildCounterGoogleCloudPaymentsResellerSubscriptionV1IntentPayload < 3) { + checkGoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent( + o.createIntent!); + checkGoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent( + o.entitleIntent!); + } + buildCounterGoogleCloudPaymentsResellerSubscriptionV1IntentPayload--; +} + core.List buildUnnamed4() => [ buildGoogleCloudPaymentsResellerSubscriptionV1Product(), @@ -1115,6 +1263,8 @@ api.GoogleCloudPaymentsResellerSubscriptionV1Subscription o.endUserEntitled = true; o.freeTrialEndTime = 'foo'; o.lineItems = buildUnnamed13(); + o.migrationDetails = + buildGoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails(); o.name = 'foo'; o.partnerUserToken = 'foo'; o.processingState = 'foo'; @@ -1155,6 +1305,8 @@ void checkGoogleCloudPaymentsResellerSubscriptionV1Subscription( unittest.equals('foo'), ); checkUnnamed13(o.lineItems!); + checkGoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails( + o.migrationDetails!); unittest.expect( o.name!, unittest.equals('foo'), @@ -1379,6 +1531,36 @@ void checkGoogleCloudPaymentsResellerSubscriptionV1SubscriptionLineItemOneTimeRe buildCounterGoogleCloudPaymentsResellerSubscriptionV1SubscriptionLineItemOneTimeRecurrenceDetails--; } +core.int + buildCounterGoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails = + 0; +api.GoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails + buildGoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails() { + final o = api + .GoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails(); + buildCounterGoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails++; + if (buildCounterGoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails < + 3) { + o.migratedSubscriptionId = 'foo'; + } + buildCounterGoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails--; + return o; +} + +void checkGoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails( + api.GoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails + o) { + buildCounterGoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails++; + if (buildCounterGoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails < + 3) { + unittest.expect( + o.migratedSubscriptionId!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails--; +} + core.int buildCounterGoogleCloudPaymentsResellerSubscriptionV1SubscriptionPromotionSpec = 0; @@ -1507,6 +1689,35 @@ void checkGoogleCloudPaymentsResellerSubscriptionV1UndoCancelSubscriptionRespons buildCounterGoogleCloudPaymentsResellerSubscriptionV1UndoCancelSubscriptionResponse--; } +core.int buildCounterGoogleCloudPaymentsResellerSubscriptionV1UserSession = 0; +api.GoogleCloudPaymentsResellerSubscriptionV1UserSession + buildGoogleCloudPaymentsResellerSubscriptionV1UserSession() { + final o = api.GoogleCloudPaymentsResellerSubscriptionV1UserSession(); + buildCounterGoogleCloudPaymentsResellerSubscriptionV1UserSession++; + if (buildCounterGoogleCloudPaymentsResellerSubscriptionV1UserSession < 3) { + o.expireTime = 'foo'; + o.token = 'foo'; + } + buildCounterGoogleCloudPaymentsResellerSubscriptionV1UserSession--; + return o; +} + +void checkGoogleCloudPaymentsResellerSubscriptionV1UserSession( + api.GoogleCloudPaymentsResellerSubscriptionV1UserSession o) { + buildCounterGoogleCloudPaymentsResellerSubscriptionV1UserSession++; + if (buildCounterGoogleCloudPaymentsResellerSubscriptionV1UserSession < 3) { + unittest.expect( + o.expireTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.token!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudPaymentsResellerSubscriptionV1UserSession--; +} + core.List buildUnnamed18() => [ 'foo', 'foo', @@ -1709,6 +1920,21 @@ void main() { }); }); + unittest.group( + 'obj-schema-GoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent + .fromJson(oJson as core.Map); + checkGoogleCloudPaymentsResellerSubscriptionV1CreateSubscriptionIntent( + od); + }); + }); + unittest.group('obj-schema-GoogleCloudPaymentsResellerSubscriptionV1Duration', () { unittest.test('to-json--from-json', () async { @@ -1720,6 +1946,21 @@ void main() { }); }); + unittest.group( + 'obj-schema-GoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent + .fromJson(oJson as core.Map); + checkGoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionIntent( + od); + }); + }); + unittest.group( 'obj-schema-GoogleCloudPaymentsResellerSubscriptionV1EntitleSubscriptionRequest', () { @@ -1852,6 +2093,36 @@ void main() { }); }); + unittest.group( + 'obj-schema-GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest + .fromJson(oJson as core.Map); + checkGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest( + od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse + .fromJson(oJson as core.Map); + checkGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse( + od); + }); + }); + unittest.group( 'obj-schema-GoogleCloudPaymentsResellerSubscriptionV1GoogleOnePayload', () { @@ -1865,6 +2136,18 @@ void main() { }); }); + unittest.group( + 'obj-schema-GoogleCloudPaymentsResellerSubscriptionV1IntentPayload', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudPaymentsResellerSubscriptionV1IntentPayload(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudPaymentsResellerSubscriptionV1IntentPayload.fromJson( + oJson as core.Map); + checkGoogleCloudPaymentsResellerSubscriptionV1IntentPayload(od); + }); + }); + unittest.group( 'obj-schema-GoogleCloudPaymentsResellerSubscriptionV1ListProductsResponse', () { @@ -2080,6 +2363,21 @@ void main() { }); }); + unittest.group( + 'obj-schema-GoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails', + () { + unittest.test('to-json--from-json', () async { + final o = + buildGoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails + .fromJson(oJson as core.Map); + checkGoogleCloudPaymentsResellerSubscriptionV1SubscriptionMigrationDetails( + od); + }); + }); + unittest.group( 'obj-schema-GoogleCloudPaymentsResellerSubscriptionV1SubscriptionPromotionSpec', () { @@ -2140,6 +2438,18 @@ void main() { }); }); + unittest.group( + 'obj-schema-GoogleCloudPaymentsResellerSubscriptionV1UserSession', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudPaymentsResellerSubscriptionV1UserSession(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudPaymentsResellerSubscriptionV1UserSession.fromJson( + oJson as core.Map); + checkGoogleCloudPaymentsResellerSubscriptionV1UserSession(od); + }); + }); + unittest.group( 'obj-schema-GoogleCloudPaymentsResellerSubscriptionV1YoutubePayload', () { unittest.test('to-json--from-json', () async { @@ -2850,4 +3160,71 @@ void main() { .GoogleCloudPaymentsResellerSubscriptionV1UndoCancelSubscriptionResponse); }); }); + + unittest.group('resource-PartnersUserSessionsResource', () { + unittest.test('method--generate', () async { + final mock = HttpServerMock(); + final res = + api.PaymentsResellerSubscriptionApi(mock).partners.userSessions; + final arg_request = + buildGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest(); + final arg_parent = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest + .fromJson(json as core.Map); + checkGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionRequest( + obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode( + buildGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.generate(arg_request, arg_parent, $fields: arg_$fields); + checkGoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse( + response as api + .GoogleCloudPaymentsResellerSubscriptionV1GenerateUserSessionResponse); + }); + }); } diff --git a/generated/googleapis/test/places/v1_test.dart b/generated/googleapis/test/places/v1_test.dart index 0b4578ebd..8cf8c3b81 100644 --- a/generated/googleapis/test/places/v1_test.dart +++ b/generated/googleapis/test/places/v1_test.dart @@ -268,6 +268,7 @@ api.GoogleMapsPlacesV1AutocompletePlacesRequest final o = api.GoogleMapsPlacesV1AutocompletePlacesRequest(); buildCounterGoogleMapsPlacesV1AutocompletePlacesRequest++; if (buildCounterGoogleMapsPlacesV1AutocompletePlacesRequest < 3) { + o.includePureServiceAreaBusinesses = true; o.includeQueryPredictions = true; o.includedPrimaryTypes = buildUnnamed3(); o.includedRegionCodes = buildUnnamed4(); @@ -290,6 +291,7 @@ void checkGoogleMapsPlacesV1AutocompletePlacesRequest( api.GoogleMapsPlacesV1AutocompletePlacesRequest o) { buildCounterGoogleMapsPlacesV1AutocompletePlacesRequest++; if (buildCounterGoogleMapsPlacesV1AutocompletePlacesRequest < 3) { + unittest.expect(o.includePureServiceAreaBusinesses!, unittest.isTrue); unittest.expect(o.includeQueryPredictions!, unittest.isTrue); checkUnnamed3(o.includedPrimaryTypes!); checkUnnamed4(o.includedRegionCodes!); @@ -1100,6 +1102,8 @@ api.GoogleMapsPlacesV1Photo buildGoogleMapsPlacesV1Photo() { buildCounterGoogleMapsPlacesV1Photo++; if (buildCounterGoogleMapsPlacesV1Photo < 3) { o.authorAttributions = buildUnnamed14(); + o.flagContentUri = 'foo'; + o.googleMapsUri = 'foo'; o.heightPx = 42; o.name = 'foo'; o.widthPx = 42; @@ -1112,6 +1116,14 @@ void checkGoogleMapsPlacesV1Photo(api.GoogleMapsPlacesV1Photo o) { buildCounterGoogleMapsPlacesV1Photo++; if (buildCounterGoogleMapsPlacesV1Photo < 3) { checkUnnamed14(o.authorAttributions!); + unittest.expect( + o.flagContentUri!, + unittest.equals('foo'), + ); + unittest.expect( + o.googleMapsUri!, + unittest.equals('foo'), + ); unittest.expect( o.heightPx!, unittest.equals(42), @@ -1177,67 +1189,78 @@ void checkUnnamed16(core.List o) { checkGoogleMapsPlacesV1PlaceAttribution(o[1]); } -core.List buildUnnamed17() => [ +core.List buildUnnamed17() => [ + buildGoogleMapsPlacesV1PlaceContainingPlace(), + buildGoogleMapsPlacesV1PlaceContainingPlace(), + ]; + +void checkUnnamed17(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleMapsPlacesV1PlaceContainingPlace(o[0]); + checkGoogleMapsPlacesV1PlaceContainingPlace(o[1]); +} + +core.List buildUnnamed18() => [ buildGoogleMapsPlacesV1PlaceOpeningHours(), buildGoogleMapsPlacesV1PlaceOpeningHours(), ]; -void checkUnnamed17(core.List o) { +void checkUnnamed18(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleMapsPlacesV1PlaceOpeningHours(o[0]); checkGoogleMapsPlacesV1PlaceOpeningHours(o[1]); } -core.List buildUnnamed18() => [ +core.List buildUnnamed19() => [ buildGoogleMapsPlacesV1Photo(), buildGoogleMapsPlacesV1Photo(), ]; -void checkUnnamed18(core.List o) { +void checkUnnamed19(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleMapsPlacesV1Photo(o[0]); checkGoogleMapsPlacesV1Photo(o[1]); } -core.List buildUnnamed19() => [ +core.List buildUnnamed20() => [ buildGoogleMapsPlacesV1PlaceOpeningHours(), buildGoogleMapsPlacesV1PlaceOpeningHours(), ]; -void checkUnnamed19(core.List o) { +void checkUnnamed20(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleMapsPlacesV1PlaceOpeningHours(o[0]); checkGoogleMapsPlacesV1PlaceOpeningHours(o[1]); } -core.List buildUnnamed20() => [ +core.List buildUnnamed21() => [ buildGoogleMapsPlacesV1Review(), buildGoogleMapsPlacesV1Review(), ]; -void checkUnnamed20(core.List o) { +void checkUnnamed21(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleMapsPlacesV1Review(o[0]); checkGoogleMapsPlacesV1Review(o[1]); } -core.List buildUnnamed21() => [ +core.List buildUnnamed22() => [ buildGoogleMapsPlacesV1PlaceSubDestination(), buildGoogleMapsPlacesV1PlaceSubDestination(), ]; -void checkUnnamed21(core.List o) { +void checkUnnamed22(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleMapsPlacesV1PlaceSubDestination(o[0]); checkGoogleMapsPlacesV1PlaceSubDestination(o[1]); } -core.List buildUnnamed22() => [ +core.List buildUnnamed23() => [ 'foo', 'foo', ]; -void checkUnnamed22(core.List o) { +void checkUnnamed23(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1262,9 +1285,10 @@ api.GoogleMapsPlacesV1Place buildGoogleMapsPlacesV1Place() { o.areaSummary = buildGoogleMapsPlacesV1PlaceAreaSummary(); o.attributions = buildUnnamed16(); o.businessStatus = 'foo'; + o.containingPlaces = buildUnnamed17(); o.curbsidePickup = true; o.currentOpeningHours = buildGoogleMapsPlacesV1PlaceOpeningHours(); - o.currentSecondaryOpeningHours = buildUnnamed17(); + o.currentSecondaryOpeningHours = buildUnnamed18(); o.delivery = true; o.dineIn = true; o.displayName = buildGoogleTypeLocalizedText(); @@ -1276,6 +1300,7 @@ api.GoogleMapsPlacesV1Place buildGoogleMapsPlacesV1Place() { o.goodForChildren = true; o.goodForGroups = true; o.goodForWatchingSports = true; + o.googleMapsLinks = buildGoogleMapsPlacesV1PlaceGoogleMapsLinks(); o.googleMapsUri = 'foo'; o.iconBackgroundColor = 'foo'; o.iconMaskBaseUri = 'foo'; @@ -1289,17 +1314,19 @@ api.GoogleMapsPlacesV1Place buildGoogleMapsPlacesV1Place() { o.outdoorSeating = true; o.parkingOptions = buildGoogleMapsPlacesV1PlaceParkingOptions(); o.paymentOptions = buildGoogleMapsPlacesV1PlacePaymentOptions(); - o.photos = buildUnnamed18(); + o.photos = buildUnnamed19(); o.plusCode = buildGoogleMapsPlacesV1PlacePlusCode(); o.priceLevel = 'foo'; + o.priceRange = buildGoogleMapsPlacesV1PriceRange(); o.primaryType = 'foo'; o.primaryTypeDisplayName = buildGoogleTypeLocalizedText(); + o.pureServiceAreaBusiness = true; o.rating = 42.0; o.regularOpeningHours = buildGoogleMapsPlacesV1PlaceOpeningHours(); - o.regularSecondaryOpeningHours = buildUnnamed19(); + o.regularSecondaryOpeningHours = buildUnnamed20(); o.reservable = true; o.restroom = true; - o.reviews = buildUnnamed20(); + o.reviews = buildUnnamed21(); o.servesBeer = true; o.servesBreakfast = true; o.servesBrunch = true; @@ -1311,9 +1338,9 @@ api.GoogleMapsPlacesV1Place buildGoogleMapsPlacesV1Place() { o.servesVegetarianFood = true; o.servesWine = true; o.shortFormattedAddress = 'foo'; - o.subDestinations = buildUnnamed21(); + o.subDestinations = buildUnnamed22(); o.takeout = true; - o.types = buildUnnamed22(); + o.types = buildUnnamed23(); o.userRatingCount = 42; o.utcOffsetMinutes = 42; o.viewport = buildGoogleGeoTypeViewport(); @@ -1340,9 +1367,10 @@ void checkGoogleMapsPlacesV1Place(api.GoogleMapsPlacesV1Place o) { o.businessStatus!, unittest.equals('foo'), ); + checkUnnamed17(o.containingPlaces!); unittest.expect(o.curbsidePickup!, unittest.isTrue); checkGoogleMapsPlacesV1PlaceOpeningHours(o.currentOpeningHours!); - checkUnnamed17(o.currentSecondaryOpeningHours!); + checkUnnamed18(o.currentSecondaryOpeningHours!); unittest.expect(o.delivery!, unittest.isTrue); unittest.expect(o.dineIn!, unittest.isTrue); checkGoogleTypeLocalizedText(o.displayName!); @@ -1357,6 +1385,7 @@ void checkGoogleMapsPlacesV1Place(api.GoogleMapsPlacesV1Place o) { unittest.expect(o.goodForChildren!, unittest.isTrue); unittest.expect(o.goodForGroups!, unittest.isTrue); unittest.expect(o.goodForWatchingSports!, unittest.isTrue); + checkGoogleMapsPlacesV1PlaceGoogleMapsLinks(o.googleMapsLinks!); unittest.expect( o.googleMapsUri!, unittest.equals('foo'), @@ -1391,26 +1420,28 @@ void checkGoogleMapsPlacesV1Place(api.GoogleMapsPlacesV1Place o) { unittest.expect(o.outdoorSeating!, unittest.isTrue); checkGoogleMapsPlacesV1PlaceParkingOptions(o.parkingOptions!); checkGoogleMapsPlacesV1PlacePaymentOptions(o.paymentOptions!); - checkUnnamed18(o.photos!); + checkUnnamed19(o.photos!); checkGoogleMapsPlacesV1PlacePlusCode(o.plusCode!); unittest.expect( o.priceLevel!, unittest.equals('foo'), ); + checkGoogleMapsPlacesV1PriceRange(o.priceRange!); unittest.expect( o.primaryType!, unittest.equals('foo'), ); checkGoogleTypeLocalizedText(o.primaryTypeDisplayName!); + unittest.expect(o.pureServiceAreaBusiness!, unittest.isTrue); unittest.expect( o.rating!, unittest.equals(42.0), ); checkGoogleMapsPlacesV1PlaceOpeningHours(o.regularOpeningHours!); - checkUnnamed19(o.regularSecondaryOpeningHours!); + checkUnnamed20(o.regularSecondaryOpeningHours!); unittest.expect(o.reservable!, unittest.isTrue); unittest.expect(o.restroom!, unittest.isTrue); - checkUnnamed20(o.reviews!); + checkUnnamed21(o.reviews!); unittest.expect(o.servesBeer!, unittest.isTrue); unittest.expect(o.servesBreakfast!, unittest.isTrue); unittest.expect(o.servesBrunch!, unittest.isTrue); @@ -1425,9 +1456,9 @@ void checkGoogleMapsPlacesV1Place(api.GoogleMapsPlacesV1Place o) { o.shortFormattedAddress!, unittest.equals('foo'), ); - checkUnnamed21(o.subDestinations!); + checkUnnamed22(o.subDestinations!); unittest.expect(o.takeout!, unittest.isTrue); - checkUnnamed22(o.types!); + checkUnnamed23(o.types!); unittest.expect( o.userRatingCount!, unittest.equals(42), @@ -1472,12 +1503,12 @@ void checkGoogleMapsPlacesV1PlaceAccessibilityOptions( buildCounterGoogleMapsPlacesV1PlaceAccessibilityOptions--; } -core.List buildUnnamed23() => [ +core.List buildUnnamed24() => [ 'foo', 'foo', ]; -void checkUnnamed23(core.List o) { +void checkUnnamed24(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1498,7 +1529,7 @@ api.GoogleMapsPlacesV1PlaceAddressComponent o.languageCode = 'foo'; o.longText = 'foo'; o.shortText = 'foo'; - o.types = buildUnnamed23(); + o.types = buildUnnamed24(); } buildCounterGoogleMapsPlacesV1PlaceAddressComponent--; return o; @@ -1520,17 +1551,17 @@ void checkGoogleMapsPlacesV1PlaceAddressComponent( o.shortText!, unittest.equals('foo'), ); - checkUnnamed23(o.types!); + checkUnnamed24(o.types!); } buildCounterGoogleMapsPlacesV1PlaceAddressComponent--; } -core.List buildUnnamed24() => [ +core.List buildUnnamed25() => [ buildGoogleMapsPlacesV1ContentBlock(), buildGoogleMapsPlacesV1ContentBlock(), ]; -void checkUnnamed24(core.List o) { +void checkUnnamed25(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleMapsPlacesV1ContentBlock(o[0]); checkGoogleMapsPlacesV1ContentBlock(o[1]); @@ -1542,7 +1573,8 @@ api.GoogleMapsPlacesV1PlaceAreaSummary final o = api.GoogleMapsPlacesV1PlaceAreaSummary(); buildCounterGoogleMapsPlacesV1PlaceAreaSummary++; if (buildCounterGoogleMapsPlacesV1PlaceAreaSummary < 3) { - o.contentBlocks = buildUnnamed24(); + o.contentBlocks = buildUnnamed25(); + o.flagContentUri = 'foo'; } buildCounterGoogleMapsPlacesV1PlaceAreaSummary--; return o; @@ -1552,7 +1584,11 @@ void checkGoogleMapsPlacesV1PlaceAreaSummary( api.GoogleMapsPlacesV1PlaceAreaSummary o) { buildCounterGoogleMapsPlacesV1PlaceAreaSummary++; if (buildCounterGoogleMapsPlacesV1PlaceAreaSummary < 3) { - checkUnnamed24(o.contentBlocks!); + checkUnnamed25(o.contentBlocks!); + unittest.expect( + o.flagContentUri!, + unittest.equals('foo'), + ); } buildCounterGoogleMapsPlacesV1PlaceAreaSummary--; } @@ -1586,6 +1622,35 @@ void checkGoogleMapsPlacesV1PlaceAttribution( buildCounterGoogleMapsPlacesV1PlaceAttribution--; } +core.int buildCounterGoogleMapsPlacesV1PlaceContainingPlace = 0; +api.GoogleMapsPlacesV1PlaceContainingPlace + buildGoogleMapsPlacesV1PlaceContainingPlace() { + final o = api.GoogleMapsPlacesV1PlaceContainingPlace(); + buildCounterGoogleMapsPlacesV1PlaceContainingPlace++; + if (buildCounterGoogleMapsPlacesV1PlaceContainingPlace < 3) { + o.id = 'foo'; + o.name = 'foo'; + } + buildCounterGoogleMapsPlacesV1PlaceContainingPlace--; + return o; +} + +void checkGoogleMapsPlacesV1PlaceContainingPlace( + api.GoogleMapsPlacesV1PlaceContainingPlace o) { + buildCounterGoogleMapsPlacesV1PlaceContainingPlace++; + if (buildCounterGoogleMapsPlacesV1PlaceContainingPlace < 3) { + unittest.expect( + o.id!, + unittest.equals('foo'), + ); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + } + buildCounterGoogleMapsPlacesV1PlaceContainingPlace--; +} + core.int buildCounterGoogleMapsPlacesV1PlaceGenerativeSummary = 0; api.GoogleMapsPlacesV1PlaceGenerativeSummary buildGoogleMapsPlacesV1PlaceGenerativeSummary() { @@ -1593,7 +1658,9 @@ api.GoogleMapsPlacesV1PlaceGenerativeSummary buildCounterGoogleMapsPlacesV1PlaceGenerativeSummary++; if (buildCounterGoogleMapsPlacesV1PlaceGenerativeSummary < 3) { o.description = buildGoogleTypeLocalizedText(); + o.descriptionFlagContentUri = 'foo'; o.overview = buildGoogleTypeLocalizedText(); + o.overviewFlagContentUri = 'foo'; o.references = buildGoogleMapsPlacesV1References(); } buildCounterGoogleMapsPlacesV1PlaceGenerativeSummary--; @@ -1605,43 +1672,95 @@ void checkGoogleMapsPlacesV1PlaceGenerativeSummary( buildCounterGoogleMapsPlacesV1PlaceGenerativeSummary++; if (buildCounterGoogleMapsPlacesV1PlaceGenerativeSummary < 3) { checkGoogleTypeLocalizedText(o.description!); + unittest.expect( + o.descriptionFlagContentUri!, + unittest.equals('foo'), + ); checkGoogleTypeLocalizedText(o.overview!); + unittest.expect( + o.overviewFlagContentUri!, + unittest.equals('foo'), + ); checkGoogleMapsPlacesV1References(o.references!); } buildCounterGoogleMapsPlacesV1PlaceGenerativeSummary--; } -core.List buildUnnamed25() => [ +core.int buildCounterGoogleMapsPlacesV1PlaceGoogleMapsLinks = 0; +api.GoogleMapsPlacesV1PlaceGoogleMapsLinks + buildGoogleMapsPlacesV1PlaceGoogleMapsLinks() { + final o = api.GoogleMapsPlacesV1PlaceGoogleMapsLinks(); + buildCounterGoogleMapsPlacesV1PlaceGoogleMapsLinks++; + if (buildCounterGoogleMapsPlacesV1PlaceGoogleMapsLinks < 3) { + o.directionsUri = 'foo'; + o.photosUri = 'foo'; + o.placeUri = 'foo'; + o.reviewsUri = 'foo'; + o.writeAReviewUri = 'foo'; + } + buildCounterGoogleMapsPlacesV1PlaceGoogleMapsLinks--; + return o; +} + +void checkGoogleMapsPlacesV1PlaceGoogleMapsLinks( + api.GoogleMapsPlacesV1PlaceGoogleMapsLinks o) { + buildCounterGoogleMapsPlacesV1PlaceGoogleMapsLinks++; + if (buildCounterGoogleMapsPlacesV1PlaceGoogleMapsLinks < 3) { + unittest.expect( + o.directionsUri!, + unittest.equals('foo'), + ); + unittest.expect( + o.photosUri!, + unittest.equals('foo'), + ); + unittest.expect( + o.placeUri!, + unittest.equals('foo'), + ); + unittest.expect( + o.reviewsUri!, + unittest.equals('foo'), + ); + unittest.expect( + o.writeAReviewUri!, + unittest.equals('foo'), + ); + } + buildCounterGoogleMapsPlacesV1PlaceGoogleMapsLinks--; +} + +core.List buildUnnamed26() => [ buildGoogleMapsPlacesV1PlaceOpeningHoursPeriod(), buildGoogleMapsPlacesV1PlaceOpeningHoursPeriod(), ]; -void checkUnnamed25( +void checkUnnamed26( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleMapsPlacesV1PlaceOpeningHoursPeriod(o[0]); checkGoogleMapsPlacesV1PlaceOpeningHoursPeriod(o[1]); } -core.List buildUnnamed26() => +core.List buildUnnamed27() => [ buildGoogleMapsPlacesV1PlaceOpeningHoursSpecialDay(), buildGoogleMapsPlacesV1PlaceOpeningHoursSpecialDay(), ]; -void checkUnnamed26( +void checkUnnamed27( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleMapsPlacesV1PlaceOpeningHoursSpecialDay(o[0]); checkGoogleMapsPlacesV1PlaceOpeningHoursSpecialDay(o[1]); } -core.List buildUnnamed27() => [ +core.List buildUnnamed28() => [ 'foo', 'foo', ]; -void checkUnnamed27(core.List o) { +void checkUnnamed28(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1659,11 +1778,13 @@ api.GoogleMapsPlacesV1PlaceOpeningHours final o = api.GoogleMapsPlacesV1PlaceOpeningHours(); buildCounterGoogleMapsPlacesV1PlaceOpeningHours++; if (buildCounterGoogleMapsPlacesV1PlaceOpeningHours < 3) { + o.nextCloseTime = 'foo'; + o.nextOpenTime = 'foo'; o.openNow = true; - o.periods = buildUnnamed25(); + o.periods = buildUnnamed26(); o.secondaryHoursType = 'foo'; - o.specialDays = buildUnnamed26(); - o.weekdayDescriptions = buildUnnamed27(); + o.specialDays = buildUnnamed27(); + o.weekdayDescriptions = buildUnnamed28(); } buildCounterGoogleMapsPlacesV1PlaceOpeningHours--; return o; @@ -1673,14 +1794,22 @@ void checkGoogleMapsPlacesV1PlaceOpeningHours( api.GoogleMapsPlacesV1PlaceOpeningHours o) { buildCounterGoogleMapsPlacesV1PlaceOpeningHours++; if (buildCounterGoogleMapsPlacesV1PlaceOpeningHours < 3) { + unittest.expect( + o.nextCloseTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.nextOpenTime!, + unittest.equals('foo'), + ); unittest.expect(o.openNow!, unittest.isTrue); - checkUnnamed25(o.periods!); + checkUnnamed26(o.periods!); unittest.expect( o.secondaryHoursType!, unittest.equals('foo'), ); - checkUnnamed26(o.specialDays!); - checkUnnamed27(o.weekdayDescriptions!); + checkUnnamed27(o.specialDays!); + checkUnnamed28(o.weekdayDescriptions!); } buildCounterGoogleMapsPlacesV1PlaceOpeningHours--; } @@ -1906,12 +2035,33 @@ void checkGoogleMapsPlacesV1Polyline(api.GoogleMapsPlacesV1Polyline o) { buildCounterGoogleMapsPlacesV1Polyline--; } -core.List buildUnnamed28() => [ +core.int buildCounterGoogleMapsPlacesV1PriceRange = 0; +api.GoogleMapsPlacesV1PriceRange buildGoogleMapsPlacesV1PriceRange() { + final o = api.GoogleMapsPlacesV1PriceRange(); + buildCounterGoogleMapsPlacesV1PriceRange++; + if (buildCounterGoogleMapsPlacesV1PriceRange < 3) { + o.endPrice = buildGoogleTypeMoney(); + o.startPrice = buildGoogleTypeMoney(); + } + buildCounterGoogleMapsPlacesV1PriceRange--; + return o; +} + +void checkGoogleMapsPlacesV1PriceRange(api.GoogleMapsPlacesV1PriceRange o) { + buildCounterGoogleMapsPlacesV1PriceRange++; + if (buildCounterGoogleMapsPlacesV1PriceRange < 3) { + checkGoogleTypeMoney(o.endPrice!); + checkGoogleTypeMoney(o.startPrice!); + } + buildCounterGoogleMapsPlacesV1PriceRange--; +} + +core.List buildUnnamed29() => [ 'foo', 'foo', ]; -void checkUnnamed28(core.List o) { +void checkUnnamed29(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1923,12 +2073,12 @@ void checkUnnamed28(core.List o) { ); } -core.List buildUnnamed29() => [ +core.List buildUnnamed30() => [ buildGoogleMapsPlacesV1Review(), buildGoogleMapsPlacesV1Review(), ]; -void checkUnnamed29(core.List o) { +void checkUnnamed30(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleMapsPlacesV1Review(o[0]); checkGoogleMapsPlacesV1Review(o[1]); @@ -1939,8 +2089,8 @@ api.GoogleMapsPlacesV1References buildGoogleMapsPlacesV1References() { final o = api.GoogleMapsPlacesV1References(); buildCounterGoogleMapsPlacesV1References++; if (buildCounterGoogleMapsPlacesV1References < 3) { - o.places = buildUnnamed28(); - o.reviews = buildUnnamed29(); + o.places = buildUnnamed29(); + o.reviews = buildUnnamed30(); } buildCounterGoogleMapsPlacesV1References--; return o; @@ -1949,8 +2099,8 @@ api.GoogleMapsPlacesV1References buildGoogleMapsPlacesV1References() { void checkGoogleMapsPlacesV1References(api.GoogleMapsPlacesV1References o) { buildCounterGoogleMapsPlacesV1References++; if (buildCounterGoogleMapsPlacesV1References < 3) { - checkUnnamed28(o.places!); - checkUnnamed29(o.reviews!); + checkUnnamed29(o.places!); + checkUnnamed30(o.reviews!); } buildCounterGoogleMapsPlacesV1References--; } @@ -1961,6 +2111,8 @@ api.GoogleMapsPlacesV1Review buildGoogleMapsPlacesV1Review() { buildCounterGoogleMapsPlacesV1Review++; if (buildCounterGoogleMapsPlacesV1Review < 3) { o.authorAttribution = buildGoogleMapsPlacesV1AuthorAttribution(); + o.flagContentUri = 'foo'; + o.googleMapsUri = 'foo'; o.name = 'foo'; o.originalText = buildGoogleTypeLocalizedText(); o.publishTime = 'foo'; @@ -1976,6 +2128,14 @@ void checkGoogleMapsPlacesV1Review(api.GoogleMapsPlacesV1Review o) { buildCounterGoogleMapsPlacesV1Review++; if (buildCounterGoogleMapsPlacesV1Review < 3) { checkGoogleMapsPlacesV1AuthorAttribution(o.authorAttribution!); + unittest.expect( + o.flagContentUri!, + unittest.equals('foo'), + ); + unittest.expect( + o.googleMapsUri!, + unittest.equals('foo'), + ); unittest.expect( o.name!, unittest.equals('foo'), @@ -2057,12 +2217,12 @@ void checkGoogleMapsPlacesV1RoutingParameters( buildCounterGoogleMapsPlacesV1RoutingParameters--; } -core.List buildUnnamed30() => [ +core.List buildUnnamed31() => [ buildGoogleMapsPlacesV1RoutingSummaryLeg(), buildGoogleMapsPlacesV1RoutingSummaryLeg(), ]; -void checkUnnamed30(core.List o) { +void checkUnnamed31(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleMapsPlacesV1RoutingSummaryLeg(o[0]); checkGoogleMapsPlacesV1RoutingSummaryLeg(o[1]); @@ -2073,7 +2233,8 @@ api.GoogleMapsPlacesV1RoutingSummary buildGoogleMapsPlacesV1RoutingSummary() { final o = api.GoogleMapsPlacesV1RoutingSummary(); buildCounterGoogleMapsPlacesV1RoutingSummary++; if (buildCounterGoogleMapsPlacesV1RoutingSummary < 3) { - o.legs = buildUnnamed30(); + o.directionsUri = 'foo'; + o.legs = buildUnnamed31(); } buildCounterGoogleMapsPlacesV1RoutingSummary--; return o; @@ -2083,7 +2244,11 @@ void checkGoogleMapsPlacesV1RoutingSummary( api.GoogleMapsPlacesV1RoutingSummary o) { buildCounterGoogleMapsPlacesV1RoutingSummary++; if (buildCounterGoogleMapsPlacesV1RoutingSummary < 3) { - checkUnnamed30(o.legs!); + unittest.expect( + o.directionsUri!, + unittest.equals('foo'), + ); + checkUnnamed31(o.legs!); } buildCounterGoogleMapsPlacesV1RoutingSummary--; } @@ -2117,12 +2282,12 @@ void checkGoogleMapsPlacesV1RoutingSummaryLeg( buildCounterGoogleMapsPlacesV1RoutingSummaryLeg--; } -core.List buildUnnamed31() => [ +core.List buildUnnamed32() => [ 'foo', 'foo', ]; -void checkUnnamed31(core.List o) { +void checkUnnamed32(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2134,12 +2299,12 @@ void checkUnnamed31(core.List o) { ); } -core.List buildUnnamed32() => [ +core.List buildUnnamed33() => [ 'foo', 'foo', ]; -void checkUnnamed32(core.List o) { +void checkUnnamed33(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2151,12 +2316,12 @@ void checkUnnamed32(core.List o) { ); } -core.List buildUnnamed33() => [ +core.List buildUnnamed34() => [ 'foo', 'foo', ]; -void checkUnnamed33(core.List o) { +void checkUnnamed34(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2168,12 +2333,12 @@ void checkUnnamed33(core.List o) { ); } -core.List buildUnnamed34() => [ +core.List buildUnnamed35() => [ 'foo', 'foo', ]; -void checkUnnamed34(core.List o) { +void checkUnnamed35(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2191,10 +2356,10 @@ api.GoogleMapsPlacesV1SearchNearbyRequest final o = api.GoogleMapsPlacesV1SearchNearbyRequest(); buildCounterGoogleMapsPlacesV1SearchNearbyRequest++; if (buildCounterGoogleMapsPlacesV1SearchNearbyRequest < 3) { - o.excludedPrimaryTypes = buildUnnamed31(); - o.excludedTypes = buildUnnamed32(); - o.includedPrimaryTypes = buildUnnamed33(); - o.includedTypes = buildUnnamed34(); + o.excludedPrimaryTypes = buildUnnamed32(); + o.excludedTypes = buildUnnamed33(); + o.includedPrimaryTypes = buildUnnamed34(); + o.includedTypes = buildUnnamed35(); o.languageCode = 'foo'; o.locationRestriction = buildGoogleMapsPlacesV1SearchNearbyRequestLocationRestriction(); @@ -2211,10 +2376,10 @@ void checkGoogleMapsPlacesV1SearchNearbyRequest( api.GoogleMapsPlacesV1SearchNearbyRequest o) { buildCounterGoogleMapsPlacesV1SearchNearbyRequest++; if (buildCounterGoogleMapsPlacesV1SearchNearbyRequest < 3) { - checkUnnamed31(o.excludedPrimaryTypes!); - checkUnnamed32(o.excludedTypes!); - checkUnnamed33(o.includedPrimaryTypes!); - checkUnnamed34(o.includedTypes!); + checkUnnamed32(o.excludedPrimaryTypes!); + checkUnnamed33(o.excludedTypes!); + checkUnnamed34(o.includedPrimaryTypes!); + checkUnnamed35(o.includedTypes!); unittest.expect( o.languageCode!, unittest.equals('foo'), @@ -2262,23 +2427,23 @@ void checkGoogleMapsPlacesV1SearchNearbyRequestLocationRestriction( buildCounterGoogleMapsPlacesV1SearchNearbyRequestLocationRestriction--; } -core.List buildUnnamed35() => [ +core.List buildUnnamed36() => [ buildGoogleMapsPlacesV1Place(), buildGoogleMapsPlacesV1Place(), ]; -void checkUnnamed35(core.List o) { +void checkUnnamed36(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleMapsPlacesV1Place(o[0]); checkGoogleMapsPlacesV1Place(o[1]); } -core.List buildUnnamed36() => [ +core.List buildUnnamed37() => [ buildGoogleMapsPlacesV1RoutingSummary(), buildGoogleMapsPlacesV1RoutingSummary(), ]; -void checkUnnamed36(core.List o) { +void checkUnnamed37(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleMapsPlacesV1RoutingSummary(o[0]); checkGoogleMapsPlacesV1RoutingSummary(o[1]); @@ -2290,8 +2455,8 @@ api.GoogleMapsPlacesV1SearchNearbyResponse final o = api.GoogleMapsPlacesV1SearchNearbyResponse(); buildCounterGoogleMapsPlacesV1SearchNearbyResponse++; if (buildCounterGoogleMapsPlacesV1SearchNearbyResponse < 3) { - o.places = buildUnnamed35(); - o.routingSummaries = buildUnnamed36(); + o.places = buildUnnamed36(); + o.routingSummaries = buildUnnamed37(); } buildCounterGoogleMapsPlacesV1SearchNearbyResponse--; return o; @@ -2301,18 +2466,18 @@ void checkGoogleMapsPlacesV1SearchNearbyResponse( api.GoogleMapsPlacesV1SearchNearbyResponse o) { buildCounterGoogleMapsPlacesV1SearchNearbyResponse++; if (buildCounterGoogleMapsPlacesV1SearchNearbyResponse < 3) { - checkUnnamed35(o.places!); - checkUnnamed36(o.routingSummaries!); + checkUnnamed36(o.places!); + checkUnnamed37(o.routingSummaries!); } buildCounterGoogleMapsPlacesV1SearchNearbyResponse--; } -core.List buildUnnamed37() => [ +core.List buildUnnamed38() => [ 'foo', 'foo', ]; -void checkUnnamed37(core.List o) { +void checkUnnamed38(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2331,6 +2496,7 @@ api.GoogleMapsPlacesV1SearchTextRequest buildCounterGoogleMapsPlacesV1SearchTextRequest++; if (buildCounterGoogleMapsPlacesV1SearchTextRequest < 3) { o.evOptions = buildGoogleMapsPlacesV1SearchTextRequestEVOptions(); + o.includePureServiceAreaBusinesses = true; o.includedType = 'foo'; o.languageCode = 'foo'; o.locationBias = buildGoogleMapsPlacesV1SearchTextRequestLocationBias(); @@ -2341,7 +2507,7 @@ api.GoogleMapsPlacesV1SearchTextRequest o.openNow = true; o.pageSize = 42; o.pageToken = 'foo'; - o.priceLevels = buildUnnamed37(); + o.priceLevels = buildUnnamed38(); o.rankPreference = 'foo'; o.regionCode = 'foo'; o.routingParameters = buildGoogleMapsPlacesV1RoutingParameters(); @@ -2359,6 +2525,7 @@ void checkGoogleMapsPlacesV1SearchTextRequest( buildCounterGoogleMapsPlacesV1SearchTextRequest++; if (buildCounterGoogleMapsPlacesV1SearchTextRequest < 3) { checkGoogleMapsPlacesV1SearchTextRequestEVOptions(o.evOptions!); + unittest.expect(o.includePureServiceAreaBusinesses!, unittest.isTrue); unittest.expect( o.includedType!, unittest.equals('foo'), @@ -2387,7 +2554,7 @@ void checkGoogleMapsPlacesV1SearchTextRequest( o.pageToken!, unittest.equals('foo'), ); - checkUnnamed37(o.priceLevels!); + checkUnnamed38(o.priceLevels!); unittest.expect( o.rankPreference!, unittest.equals('foo'), @@ -2408,12 +2575,12 @@ void checkGoogleMapsPlacesV1SearchTextRequest( buildCounterGoogleMapsPlacesV1SearchTextRequest--; } -core.List buildUnnamed38() => [ +core.List buildUnnamed39() => [ 'foo', 'foo', ]; -void checkUnnamed38(core.List o) { +void checkUnnamed39(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2431,7 +2598,7 @@ api.GoogleMapsPlacesV1SearchTextRequestEVOptions final o = api.GoogleMapsPlacesV1SearchTextRequestEVOptions(); buildCounterGoogleMapsPlacesV1SearchTextRequestEVOptions++; if (buildCounterGoogleMapsPlacesV1SearchTextRequestEVOptions < 3) { - o.connectorTypes = buildUnnamed38(); + o.connectorTypes = buildUnnamed39(); o.minimumChargingRateKw = 42.0; } buildCounterGoogleMapsPlacesV1SearchTextRequestEVOptions--; @@ -2442,7 +2609,7 @@ void checkGoogleMapsPlacesV1SearchTextRequestEVOptions( api.GoogleMapsPlacesV1SearchTextRequestEVOptions o) { buildCounterGoogleMapsPlacesV1SearchTextRequestEVOptions++; if (buildCounterGoogleMapsPlacesV1SearchTextRequestEVOptions < 3) { - checkUnnamed38(o.connectorTypes!); + checkUnnamed39(o.connectorTypes!); unittest.expect( o.minimumChargingRateKw!, unittest.equals(42.0), @@ -2520,34 +2687,34 @@ void checkGoogleMapsPlacesV1SearchTextRequestSearchAlongRouteParameters( buildCounterGoogleMapsPlacesV1SearchTextRequestSearchAlongRouteParameters--; } -core.List buildUnnamed39() => [ +core.List buildUnnamed40() => [ buildGoogleMapsPlacesV1ContextualContent(), buildGoogleMapsPlacesV1ContextualContent(), ]; -void checkUnnamed39(core.List o) { +void checkUnnamed40(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleMapsPlacesV1ContextualContent(o[0]); checkGoogleMapsPlacesV1ContextualContent(o[1]); } -core.List buildUnnamed40() => [ +core.List buildUnnamed41() => [ buildGoogleMapsPlacesV1Place(), buildGoogleMapsPlacesV1Place(), ]; -void checkUnnamed40(core.List o) { +void checkUnnamed41(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleMapsPlacesV1Place(o[0]); checkGoogleMapsPlacesV1Place(o[1]); } -core.List buildUnnamed41() => [ +core.List buildUnnamed42() => [ buildGoogleMapsPlacesV1RoutingSummary(), buildGoogleMapsPlacesV1RoutingSummary(), ]; -void checkUnnamed41(core.List o) { +void checkUnnamed42(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleMapsPlacesV1RoutingSummary(o[0]); checkGoogleMapsPlacesV1RoutingSummary(o[1]); @@ -2559,10 +2726,11 @@ api.GoogleMapsPlacesV1SearchTextResponse final o = api.GoogleMapsPlacesV1SearchTextResponse(); buildCounterGoogleMapsPlacesV1SearchTextResponse++; if (buildCounterGoogleMapsPlacesV1SearchTextResponse < 3) { - o.contextualContents = buildUnnamed39(); + o.contextualContents = buildUnnamed40(); o.nextPageToken = 'foo'; - o.places = buildUnnamed40(); - o.routingSummaries = buildUnnamed41(); + o.places = buildUnnamed41(); + o.routingSummaries = buildUnnamed42(); + o.searchUri = 'foo'; } buildCounterGoogleMapsPlacesV1SearchTextResponse--; return o; @@ -2572,13 +2740,17 @@ void checkGoogleMapsPlacesV1SearchTextResponse( api.GoogleMapsPlacesV1SearchTextResponse o) { buildCounterGoogleMapsPlacesV1SearchTextResponse++; if (buildCounterGoogleMapsPlacesV1SearchTextResponse < 3) { - checkUnnamed39(o.contextualContents!); + checkUnnamed40(o.contextualContents!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed40(o.places!); - checkUnnamed41(o.routingSummaries!); + checkUnnamed41(o.places!); + checkUnnamed42(o.routingSummaries!); + unittest.expect( + o.searchUri!, + unittest.equals('foo'), + ); } buildCounterGoogleMapsPlacesV1SearchTextResponse--; } @@ -3098,6 +3270,16 @@ void main() { }); }); + unittest.group('obj-schema-GoogleMapsPlacesV1PlaceContainingPlace', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleMapsPlacesV1PlaceContainingPlace(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleMapsPlacesV1PlaceContainingPlace.fromJson( + oJson as core.Map); + checkGoogleMapsPlacesV1PlaceContainingPlace(od); + }); + }); + unittest.group('obj-schema-GoogleMapsPlacesV1PlaceGenerativeSummary', () { unittest.test('to-json--from-json', () async { final o = buildGoogleMapsPlacesV1PlaceGenerativeSummary(); @@ -3108,6 +3290,16 @@ void main() { }); }); + unittest.group('obj-schema-GoogleMapsPlacesV1PlaceGoogleMapsLinks', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleMapsPlacesV1PlaceGoogleMapsLinks(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleMapsPlacesV1PlaceGoogleMapsLinks.fromJson( + oJson as core.Map); + checkGoogleMapsPlacesV1PlaceGoogleMapsLinks(od); + }); + }); + unittest.group('obj-schema-GoogleMapsPlacesV1PlaceOpeningHours', () { unittest.test('to-json--from-json', () async { final o = buildGoogleMapsPlacesV1PlaceOpeningHours(); @@ -3200,6 +3392,16 @@ void main() { }); }); + unittest.group('obj-schema-GoogleMapsPlacesV1PriceRange', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleMapsPlacesV1PriceRange(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleMapsPlacesV1PriceRange.fromJson( + oJson as core.Map); + checkGoogleMapsPlacesV1PriceRange(od); + }); + }); + unittest.group('obj-schema-GoogleMapsPlacesV1References', () { unittest.test('to-json--from-json', () async { final o = buildGoogleMapsPlacesV1References(); diff --git a/generated/googleapis/test/playintegrity/v1_test.dart b/generated/googleapis/test/playintegrity/v1_test.dart index 688f349a2..44f0dd0f0 100644 --- a/generated/googleapis/test/playintegrity/v1_test.dart +++ b/generated/googleapis/test/playintegrity/v1_test.dart @@ -210,6 +210,28 @@ void checkDecodeIntegrityTokenResponse(api.DecodeIntegrityTokenResponse o) { buildCounterDecodeIntegrityTokenResponse--; } +core.int buildCounterDeviceAttributes = 0; +api.DeviceAttributes buildDeviceAttributes() { + final o = api.DeviceAttributes(); + buildCounterDeviceAttributes++; + if (buildCounterDeviceAttributes < 3) { + o.sdkVersion = 42; + } + buildCounterDeviceAttributes--; + return o; +} + +void checkDeviceAttributes(api.DeviceAttributes o) { + buildCounterDeviceAttributes++; + if (buildCounterDeviceAttributes < 3) { + unittest.expect( + o.sdkVersion!, + unittest.equals(42), + ); + } + buildCounterDeviceAttributes--; +} + core.List buildUnnamed2() => [ 'foo', 'foo', @@ -227,13 +249,32 @@ void checkUnnamed2(core.List o) { ); } +core.List buildUnnamed3() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed3(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + core.int buildCounterDeviceIntegrity = 0; api.DeviceIntegrity buildDeviceIntegrity() { final o = api.DeviceIntegrity(); buildCounterDeviceIntegrity++; if (buildCounterDeviceIntegrity < 3) { + o.deviceAttributes = buildDeviceAttributes(); o.deviceRecall = buildDeviceRecall(); o.deviceRecognitionVerdict = buildUnnamed2(); + o.legacyDeviceRecognitionVerdict = buildUnnamed3(); o.recentDeviceActivity = buildRecentDeviceActivity(); } buildCounterDeviceIntegrity--; @@ -243,8 +284,10 @@ api.DeviceIntegrity buildDeviceIntegrity() { void checkDeviceIntegrity(api.DeviceIntegrity o) { buildCounterDeviceIntegrity++; if (buildCounterDeviceIntegrity < 3) { + checkDeviceAttributes(o.deviceAttributes!); checkDeviceRecall(o.deviceRecall!); checkUnnamed2(o.deviceRecognitionVerdict!); + checkUnnamed3(o.legacyDeviceRecognitionVerdict!); checkRecentDeviceActivity(o.recentDeviceActivity!); } buildCounterDeviceIntegrity--; @@ -557,6 +600,16 @@ void main() { }); }); + unittest.group('obj-schema-DeviceAttributes', () { + unittest.test('to-json--from-json', () async { + final o = buildDeviceAttributes(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.DeviceAttributes.fromJson( + oJson as core.Map); + checkDeviceAttributes(od); + }); + }); + unittest.group('obj-schema-DeviceIntegrity', () { unittest.test('to-json--from-json', () async { final o = buildDeviceIntegrity(); diff --git a/generated/googleapis/test/redis/v1_test.dart b/generated/googleapis/test/redis/v1_test.dart index 08adea5de..ad02cf7ce 100644 --- a/generated/googleapis/test/redis/v1_test.dart +++ b/generated/googleapis/test/redis/v1_test.dart @@ -48,12 +48,232 @@ void checkAOFConfig(api.AOFConfig o) { buildCounterAOFConfig--; } -core.List buildUnnamed0() => [ +core.int buildCounterAutomatedBackupConfig = 0; +api.AutomatedBackupConfig buildAutomatedBackupConfig() { + final o = api.AutomatedBackupConfig(); + buildCounterAutomatedBackupConfig++; + if (buildCounterAutomatedBackupConfig < 3) { + o.automatedBackupMode = 'foo'; + o.fixedFrequencySchedule = buildFixedFrequencySchedule(); + o.retention = 'foo'; + } + buildCounterAutomatedBackupConfig--; + return o; +} + +void checkAutomatedBackupConfig(api.AutomatedBackupConfig o) { + buildCounterAutomatedBackupConfig++; + if (buildCounterAutomatedBackupConfig < 3) { + unittest.expect( + o.automatedBackupMode!, + unittest.equals('foo'), + ); + checkFixedFrequencySchedule(o.fixedFrequencySchedule!); + unittest.expect( + o.retention!, + unittest.equals('foo'), + ); + } + buildCounterAutomatedBackupConfig--; +} + +core.List buildUnnamed0() => [ + buildBackupFile(), + buildBackupFile(), + ]; + +void checkUnnamed0(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkBackupFile(o[0]); + checkBackupFile(o[1]); +} + +core.int buildCounterBackup = 0; +api.Backup buildBackup() { + final o = api.Backup(); + buildCounterBackup++; + if (buildCounterBackup < 3) { + o.backupFiles = buildUnnamed0(); + o.backupType = 'foo'; + o.cluster = 'foo'; + o.clusterUid = 'foo'; + o.createTime = 'foo'; + o.engineVersion = 'foo'; + o.expireTime = 'foo'; + o.name = 'foo'; + o.nodeType = 'foo'; + o.replicaCount = 42; + o.shardCount = 42; + o.state = 'foo'; + o.totalSizeBytes = 'foo'; + o.uid = 'foo'; + } + buildCounterBackup--; + return o; +} + +void checkBackup(api.Backup o) { + buildCounterBackup++; + if (buildCounterBackup < 3) { + checkUnnamed0(o.backupFiles!); + unittest.expect( + o.backupType!, + unittest.equals('foo'), + ); + unittest.expect( + o.cluster!, + unittest.equals('foo'), + ); + unittest.expect( + o.clusterUid!, + unittest.equals('foo'), + ); + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.engineVersion!, + unittest.equals('foo'), + ); + unittest.expect( + o.expireTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.nodeType!, + unittest.equals('foo'), + ); + unittest.expect( + o.replicaCount!, + unittest.equals(42), + ); + unittest.expect( + o.shardCount!, + unittest.equals(42), + ); + unittest.expect( + o.state!, + unittest.equals('foo'), + ); + unittest.expect( + o.totalSizeBytes!, + unittest.equals('foo'), + ); + unittest.expect( + o.uid!, + unittest.equals('foo'), + ); + } + buildCounterBackup--; +} + +core.int buildCounterBackupClusterRequest = 0; +api.BackupClusterRequest buildBackupClusterRequest() { + final o = api.BackupClusterRequest(); + buildCounterBackupClusterRequest++; + if (buildCounterBackupClusterRequest < 3) { + o.backupId = 'foo'; + o.ttl = 'foo'; + } + buildCounterBackupClusterRequest--; + return o; +} + +void checkBackupClusterRequest(api.BackupClusterRequest o) { + buildCounterBackupClusterRequest++; + if (buildCounterBackupClusterRequest < 3) { + unittest.expect( + o.backupId!, + unittest.equals('foo'), + ); + unittest.expect( + o.ttl!, + unittest.equals('foo'), + ); + } + buildCounterBackupClusterRequest--; +} + +core.int buildCounterBackupCollection = 0; +api.BackupCollection buildBackupCollection() { + final o = api.BackupCollection(); + buildCounterBackupCollection++; + if (buildCounterBackupCollection < 3) { + o.cluster = 'foo'; + o.clusterUid = 'foo'; + o.name = 'foo'; + o.uid = 'foo'; + } + buildCounterBackupCollection--; + return o; +} + +void checkBackupCollection(api.BackupCollection o) { + buildCounterBackupCollection++; + if (buildCounterBackupCollection < 3) { + unittest.expect( + o.cluster!, + unittest.equals('foo'), + ); + unittest.expect( + o.clusterUid!, + unittest.equals('foo'), + ); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.uid!, + unittest.equals('foo'), + ); + } + buildCounterBackupCollection--; +} + +core.int buildCounterBackupFile = 0; +api.BackupFile buildBackupFile() { + final o = api.BackupFile(); + buildCounterBackupFile++; + if (buildCounterBackupFile < 3) { + o.createTime = 'foo'; + o.fileName = 'foo'; + o.sizeBytes = 'foo'; + } + buildCounterBackupFile--; + return o; +} + +void checkBackupFile(api.BackupFile o) { + buildCounterBackupFile++; + if (buildCounterBackupFile < 3) { + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.fileName!, + unittest.equals('foo'), + ); + unittest.expect( + o.sizeBytes!, + unittest.equals('foo'), + ); + } + buildCounterBackupFile--; +} + +core.List buildUnnamed1() => [ 'foo', 'foo', ]; -void checkUnnamed0(core.List o) { +void checkUnnamed1(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -70,7 +290,7 @@ api.CertChain buildCertChain() { final o = api.CertChain(); buildCounterCertChain++; if (buildCounterCertChain < 3) { - o.certificates = buildUnnamed0(); + o.certificates = buildUnnamed1(); } buildCounterCertChain--; return o; @@ -79,7 +299,7 @@ api.CertChain buildCertChain() { void checkCertChain(api.CertChain o) { buildCounterCertChain++; if (buildCounterCertChain < 3) { - checkUnnamed0(o.certificates!); + checkUnnamed1(o.certificates!); } buildCounterCertChain--; } @@ -108,67 +328,67 @@ void checkCertificateAuthority(api.CertificateAuthority o) { buildCounterCertificateAuthority--; } -core.List buildUnnamed1() => [ +core.List buildUnnamed2() => [ buildClusterEndpoint(), buildClusterEndpoint(), ]; -void checkUnnamed1(core.List o) { +void checkUnnamed2(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkClusterEndpoint(o[0]); checkClusterEndpoint(o[1]); } -core.List buildUnnamed2() => [ +core.List buildUnnamed3() => [ buildDiscoveryEndpoint(), buildDiscoveryEndpoint(), ]; -void checkUnnamed2(core.List o) { +void checkUnnamed3(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDiscoveryEndpoint(o[0]); checkDiscoveryEndpoint(o[1]); } -core.List buildUnnamed3() => [ +core.List buildUnnamed4() => [ buildPscConfig(), buildPscConfig(), ]; -void checkUnnamed3(core.List o) { +void checkUnnamed4(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPscConfig(o[0]); checkPscConfig(o[1]); } -core.List buildUnnamed4() => [ +core.List buildUnnamed5() => [ buildPscConnection(), buildPscConnection(), ]; -void checkUnnamed4(core.List o) { +void checkUnnamed5(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPscConnection(o[0]); checkPscConnection(o[1]); } -core.List buildUnnamed5() => [ +core.List buildUnnamed6() => [ buildPscServiceAttachment(), buildPscServiceAttachment(), ]; -void checkUnnamed5(core.List o) { +void checkUnnamed6(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPscServiceAttachment(o[0]); checkPscServiceAttachment(o[1]); } -core.Map buildUnnamed6() => { +core.Map buildUnnamed7() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed6(core.Map o) { +void checkUnnamed7(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -186,21 +406,25 @@ api.Cluster buildCluster() { buildCounterCluster++; if (buildCounterCluster < 3) { o.authorizationMode = 'foo'; - o.clusterEndpoints = buildUnnamed1(); + o.automatedBackupConfig = buildAutomatedBackupConfig(); + o.backupCollection = 'foo'; + o.clusterEndpoints = buildUnnamed2(); o.createTime = 'foo'; o.crossClusterReplicationConfig = buildCrossClusterReplicationConfig(); o.deletionProtectionEnabled = true; - o.discoveryEndpoints = buildUnnamed2(); + o.discoveryEndpoints = buildUnnamed3(); + o.gcsSource = buildGcsBackupSource(); o.maintenancePolicy = buildClusterMaintenancePolicy(); o.maintenanceSchedule = buildClusterMaintenanceSchedule(); + o.managedBackupSource = buildManagedBackupSource(); o.name = 'foo'; o.nodeType = 'foo'; o.persistenceConfig = buildClusterPersistenceConfig(); o.preciseSizeGb = 42.0; - o.pscConfigs = buildUnnamed3(); - o.pscConnections = buildUnnamed4(); - o.pscServiceAttachments = buildUnnamed5(); - o.redisConfigs = buildUnnamed6(); + o.pscConfigs = buildUnnamed4(); + o.pscConnections = buildUnnamed5(); + o.pscServiceAttachments = buildUnnamed6(); + o.redisConfigs = buildUnnamed7(); o.replicaCount = 42; o.shardCount = 42; o.sizeGb = 42; @@ -221,16 +445,23 @@ void checkCluster(api.Cluster o) { o.authorizationMode!, unittest.equals('foo'), ); - checkUnnamed1(o.clusterEndpoints!); + checkAutomatedBackupConfig(o.automatedBackupConfig!); + unittest.expect( + o.backupCollection!, + unittest.equals('foo'), + ); + checkUnnamed2(o.clusterEndpoints!); unittest.expect( o.createTime!, unittest.equals('foo'), ); checkCrossClusterReplicationConfig(o.crossClusterReplicationConfig!); unittest.expect(o.deletionProtectionEnabled!, unittest.isTrue); - checkUnnamed2(o.discoveryEndpoints!); + checkUnnamed3(o.discoveryEndpoints!); + checkGcsBackupSource(o.gcsSource!); checkClusterMaintenancePolicy(o.maintenancePolicy!); checkClusterMaintenanceSchedule(o.maintenanceSchedule!); + checkManagedBackupSource(o.managedBackupSource!); unittest.expect( o.name!, unittest.equals('foo'), @@ -244,10 +475,10 @@ void checkCluster(api.Cluster o) { o.preciseSizeGb!, unittest.equals(42.0), ); - checkUnnamed3(o.pscConfigs!); - checkUnnamed4(o.pscConnections!); - checkUnnamed5(o.pscServiceAttachments!); - checkUnnamed6(o.redisConfigs!); + checkUnnamed4(o.pscConfigs!); + checkUnnamed5(o.pscConnections!); + checkUnnamed6(o.pscServiceAttachments!); + checkUnnamed7(o.redisConfigs!); unittest.expect( o.replicaCount!, unittest.equals(42), @@ -278,12 +509,12 @@ void checkCluster(api.Cluster o) { buildCounterCluster--; } -core.List buildUnnamed7() => [ +core.List buildUnnamed8() => [ buildConnectionDetail(), buildConnectionDetail(), ]; -void checkUnnamed7(core.List o) { +void checkUnnamed8(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkConnectionDetail(o[0]); checkConnectionDetail(o[1]); @@ -294,7 +525,7 @@ api.ClusterEndpoint buildClusterEndpoint() { final o = api.ClusterEndpoint(); buildCounterClusterEndpoint++; if (buildCounterClusterEndpoint < 3) { - o.connections = buildUnnamed7(); + o.connections = buildUnnamed8(); } buildCounterClusterEndpoint--; return o; @@ -303,17 +534,17 @@ api.ClusterEndpoint buildClusterEndpoint() { void checkClusterEndpoint(api.ClusterEndpoint o) { buildCounterClusterEndpoint++; if (buildCounterClusterEndpoint < 3) { - checkUnnamed7(o.connections!); + checkUnnamed8(o.connections!); } buildCounterClusterEndpoint--; } -core.List buildUnnamed8() => [ +core.List buildUnnamed9() => [ buildClusterWeeklyMaintenanceWindow(), buildClusterWeeklyMaintenanceWindow(), ]; -void checkUnnamed8(core.List o) { +void checkUnnamed9(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkClusterWeeklyMaintenanceWindow(o[0]); checkClusterWeeklyMaintenanceWindow(o[1]); @@ -326,7 +557,7 @@ api.ClusterMaintenancePolicy buildClusterMaintenancePolicy() { if (buildCounterClusterMaintenancePolicy < 3) { o.createTime = 'foo'; o.updateTime = 'foo'; - o.weeklyMaintenanceWindow = buildUnnamed8(); + o.weeklyMaintenanceWindow = buildUnnamed9(); } buildCounterClusterMaintenancePolicy--; return o; @@ -343,7 +574,7 @@ void checkClusterMaintenancePolicy(api.ClusterMaintenancePolicy o) { o.updateTime!, unittest.equals('foo'), ); - checkUnnamed8(o.weeklyMaintenanceWindow!); + checkUnnamed9(o.weeklyMaintenanceWindow!); } buildCounterClusterMaintenancePolicy--; } @@ -430,6 +661,7 @@ api.ConnectionDetail buildConnectionDetail() { final o = api.ConnectionDetail(); buildCounterConnectionDetail++; if (buildCounterConnectionDetail < 3) { + o.pscAutoConnection = buildPscAutoConnection(); o.pscConnection = buildPscConnection(); } buildCounterConnectionDetail--; @@ -439,17 +671,18 @@ api.ConnectionDetail buildConnectionDetail() { void checkConnectionDetail(api.ConnectionDetail o) { buildCounterConnectionDetail++; if (buildCounterConnectionDetail < 3) { + checkPscAutoConnection(o.pscAutoConnection!); checkPscConnection(o.pscConnection!); } buildCounterConnectionDetail--; } -core.List buildUnnamed9() => [ +core.List buildUnnamed10() => [ buildRemoteCluster(), buildRemoteCluster(), ]; -void checkUnnamed9(core.List o) { +void checkUnnamed10(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRemoteCluster(o[0]); checkRemoteCluster(o[1]); @@ -463,7 +696,7 @@ api.CrossClusterReplicationConfig buildCrossClusterReplicationConfig() { o.clusterRole = 'foo'; o.membership = buildMembership(); o.primaryCluster = buildRemoteCluster(); - o.secondaryClusters = buildUnnamed9(); + o.secondaryClusters = buildUnnamed10(); o.updateTime = 'foo'; } buildCounterCrossClusterReplicationConfig--; @@ -479,7 +712,7 @@ void checkCrossClusterReplicationConfig(api.CrossClusterReplicationConfig o) { ); checkMembership(o.membership!); checkRemoteCluster(o.primaryCluster!); - checkUnnamed9(o.secondaryClusters!); + checkUnnamed10(o.secondaryClusters!); unittest.expect( o.updateTime!, unittest.equals('foo'), @@ -532,6 +765,28 @@ void checkEmpty(api.Empty o) { buildCounterEmpty--; } +core.int buildCounterExportBackupRequest = 0; +api.ExportBackupRequest buildExportBackupRequest() { + final o = api.ExportBackupRequest(); + buildCounterExportBackupRequest++; + if (buildCounterExportBackupRequest < 3) { + o.gcsBucket = 'foo'; + } + buildCounterExportBackupRequest--; + return o; +} + +void checkExportBackupRequest(api.ExportBackupRequest o) { + buildCounterExportBackupRequest++; + if (buildCounterExportBackupRequest < 3) { + unittest.expect( + o.gcsBucket!, + unittest.equals('foo'), + ); + } + buildCounterExportBackupRequest--; +} + core.int buildCounterExportInstanceRequest = 0; api.ExportInstanceRequest buildExportInstanceRequest() { final o = api.ExportInstanceRequest(); @@ -573,6 +828,61 @@ void checkFailoverInstanceRequest(api.FailoverInstanceRequest o) { buildCounterFailoverInstanceRequest--; } +core.int buildCounterFixedFrequencySchedule = 0; +api.FixedFrequencySchedule buildFixedFrequencySchedule() { + final o = api.FixedFrequencySchedule(); + buildCounterFixedFrequencySchedule++; + if (buildCounterFixedFrequencySchedule < 3) { + o.startTime = buildTimeOfDay(); + } + buildCounterFixedFrequencySchedule--; + return o; +} + +void checkFixedFrequencySchedule(api.FixedFrequencySchedule o) { + buildCounterFixedFrequencySchedule++; + if (buildCounterFixedFrequencySchedule < 3) { + checkTimeOfDay(o.startTime!); + } + buildCounterFixedFrequencySchedule--; +} + +core.List buildUnnamed11() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed11(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterGcsBackupSource = 0; +api.GcsBackupSource buildGcsBackupSource() { + final o = api.GcsBackupSource(); + buildCounterGcsBackupSource++; + if (buildCounterGcsBackupSource < 3) { + o.uris = buildUnnamed11(); + } + buildCounterGcsBackupSource--; + return o; +} + +void checkGcsBackupSource(api.GcsBackupSource o) { + buildCounterGcsBackupSource++; + if (buildCounterGcsBackupSource < 3) { + checkUnnamed11(o.uris!); + } + buildCounterGcsBackupSource--; +} + core.int buildCounterGcsDestination = 0; api.GcsDestination buildGcsDestination() { final o = api.GcsDestination(); @@ -655,12 +965,12 @@ void checkInputConfig(api.InputConfig o) { buildCounterInputConfig--; } -core.List buildUnnamed10() => [ +core.List buildUnnamed12() => [ 'foo', 'foo', ]; -void checkUnnamed10(core.List o) { +void checkUnnamed12(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -672,12 +982,12 @@ void checkUnnamed10(core.List o) { ); } -core.Map buildUnnamed11() => { +core.Map buildUnnamed13() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed11(core.Map o) { +void checkUnnamed13(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -689,23 +999,23 @@ void checkUnnamed11(core.Map o) { ); } -core.List buildUnnamed12() => [ +core.List buildUnnamed14() => [ buildNodeInfo(), buildNodeInfo(), ]; -void checkUnnamed12(core.List o) { +void checkUnnamed14(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNodeInfo(o[0]); checkNodeInfo(o[1]); } -core.Map buildUnnamed13() => { +core.Map buildUnnamed15() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed13(core.Map o) { +void checkUnnamed15(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -717,23 +1027,23 @@ void checkUnnamed13(core.Map o) { ); } -core.List buildUnnamed14() => [ +core.List buildUnnamed16() => [ buildTlsCertificate(), buildTlsCertificate(), ]; -void checkUnnamed14(core.List o) { +void checkUnnamed16(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTlsCertificate(o[0]); checkTlsCertificate(o[1]); } -core.List buildUnnamed15() => [ +core.List buildUnnamed17() => [ 'foo', 'foo', ]; -void checkUnnamed15(core.List o) { +void checkUnnamed17(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -753,38 +1063,38 @@ api.Instance buildInstance() { o.alternativeLocationId = 'foo'; o.authEnabled = true; o.authorizedNetwork = 'foo'; - o.availableMaintenanceVersions = buildUnnamed10(); + o.availableMaintenanceVersions = buildUnnamed12(); o.connectMode = 'foo'; o.createTime = 'foo'; o.currentLocationId = 'foo'; o.customerManagedKey = 'foo'; o.displayName = 'foo'; o.host = 'foo'; - o.labels = buildUnnamed11(); + o.labels = buildUnnamed13(); o.locationId = 'foo'; o.maintenancePolicy = buildMaintenancePolicy(); o.maintenanceSchedule = buildMaintenanceSchedule(); o.maintenanceVersion = 'foo'; o.memorySizeGb = 42; o.name = 'foo'; - o.nodes = buildUnnamed12(); + o.nodes = buildUnnamed14(); o.persistenceConfig = buildPersistenceConfig(); o.persistenceIamIdentity = 'foo'; o.port = 42; o.readEndpoint = 'foo'; o.readEndpointPort = 42; o.readReplicasMode = 'foo'; - o.redisConfigs = buildUnnamed13(); + o.redisConfigs = buildUnnamed15(); o.redisVersion = 'foo'; o.replicaCount = 42; o.reservedIpRange = 'foo'; o.satisfiesPzi = true; o.satisfiesPzs = true; o.secondaryIpRange = 'foo'; - o.serverCaCerts = buildUnnamed14(); + o.serverCaCerts = buildUnnamed16(); o.state = 'foo'; o.statusMessage = 'foo'; - o.suspensionReasons = buildUnnamed15(); + o.suspensionReasons = buildUnnamed17(); o.tier = 'foo'; o.transitEncryptionMode = 'foo'; } @@ -804,7 +1114,7 @@ void checkInstance(api.Instance o) { o.authorizedNetwork!, unittest.equals('foo'), ); - checkUnnamed10(o.availableMaintenanceVersions!); + checkUnnamed12(o.availableMaintenanceVersions!); unittest.expect( o.connectMode!, unittest.equals('foo'), @@ -829,7 +1139,7 @@ void checkInstance(api.Instance o) { o.host!, unittest.equals('foo'), ); - checkUnnamed11(o.labels!); + checkUnnamed13(o.labels!); unittest.expect( o.locationId!, unittest.equals('foo'), @@ -848,7 +1158,7 @@ void checkInstance(api.Instance o) { o.name!, unittest.equals('foo'), ); - checkUnnamed12(o.nodes!); + checkUnnamed14(o.nodes!); checkPersistenceConfig(o.persistenceConfig!); unittest.expect( o.persistenceIamIdentity!, @@ -870,7 +1180,7 @@ void checkInstance(api.Instance o) { o.readReplicasMode!, unittest.equals('foo'), ); - checkUnnamed13(o.redisConfigs!); + checkUnnamed15(o.redisConfigs!); unittest.expect( o.redisVersion!, unittest.equals('foo'), @@ -889,7 +1199,7 @@ void checkInstance(api.Instance o) { o.secondaryIpRange!, unittest.equals('foo'), ); - checkUnnamed14(o.serverCaCerts!); + checkUnnamed16(o.serverCaCerts!); unittest.expect( o.state!, unittest.equals('foo'), @@ -898,7 +1208,7 @@ void checkInstance(api.Instance o) { o.statusMessage!, unittest.equals('foo'), ); - checkUnnamed15(o.suspensionReasons!); + checkUnnamed17(o.suspensionReasons!); unittest.expect( o.tier!, unittest.equals('foo'), @@ -933,23 +1243,23 @@ void checkInstanceAuthString(api.InstanceAuthString o) { buildCounterInstanceAuthString--; } -core.List buildUnnamed16() => [ - buildCluster(), - buildCluster(), +core.List buildUnnamed18() => [ + buildBackupCollection(), + buildBackupCollection(), ]; -void checkUnnamed16(core.List o) { +void checkUnnamed18(core.List o) { unittest.expect(o, unittest.hasLength(2)); - checkCluster(o[0]); - checkCluster(o[1]); + checkBackupCollection(o[0]); + checkBackupCollection(o[1]); } -core.List buildUnnamed17() => [ +core.List buildUnnamed19() => [ 'foo', 'foo', ]; -void checkUnnamed17(core.List o) { +void checkUnnamed19(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -961,49 +1271,49 @@ void checkUnnamed17(core.List o) { ); } -core.int buildCounterListClustersResponse = 0; -api.ListClustersResponse buildListClustersResponse() { - final o = api.ListClustersResponse(); - buildCounterListClustersResponse++; - if (buildCounterListClustersResponse < 3) { - o.clusters = buildUnnamed16(); +core.int buildCounterListBackupCollectionsResponse = 0; +api.ListBackupCollectionsResponse buildListBackupCollectionsResponse() { + final o = api.ListBackupCollectionsResponse(); + buildCounterListBackupCollectionsResponse++; + if (buildCounterListBackupCollectionsResponse < 3) { + o.backupCollections = buildUnnamed18(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed17(); + o.unreachable = buildUnnamed19(); } - buildCounterListClustersResponse--; + buildCounterListBackupCollectionsResponse--; return o; } -void checkListClustersResponse(api.ListClustersResponse o) { - buildCounterListClustersResponse++; - if (buildCounterListClustersResponse < 3) { - checkUnnamed16(o.clusters!); +void checkListBackupCollectionsResponse(api.ListBackupCollectionsResponse o) { + buildCounterListBackupCollectionsResponse++; + if (buildCounterListBackupCollectionsResponse < 3) { + checkUnnamed18(o.backupCollections!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed17(o.unreachable!); + checkUnnamed19(o.unreachable!); } - buildCounterListClustersResponse--; + buildCounterListBackupCollectionsResponse--; } -core.List buildUnnamed18() => [ - buildInstance(), - buildInstance(), +core.List buildUnnamed20() => [ + buildBackup(), + buildBackup(), ]; -void checkUnnamed18(core.List o) { +void checkUnnamed20(core.List o) { unittest.expect(o, unittest.hasLength(2)); - checkInstance(o[0]); - checkInstance(o[1]); + checkBackup(o[0]); + checkBackup(o[1]); } -core.List buildUnnamed19() => [ +core.List buildUnnamed21() => [ 'foo', 'foo', ]; -void checkUnnamed19(core.List o) { +void checkUnnamed21(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1015,59 +1325,167 @@ void checkUnnamed19(core.List o) { ); } -core.int buildCounterListInstancesResponse = 0; -api.ListInstancesResponse buildListInstancesResponse() { - final o = api.ListInstancesResponse(); - buildCounterListInstancesResponse++; - if (buildCounterListInstancesResponse < 3) { - o.instances = buildUnnamed18(); +core.int buildCounterListBackupsResponse = 0; +api.ListBackupsResponse buildListBackupsResponse() { + final o = api.ListBackupsResponse(); + buildCounterListBackupsResponse++; + if (buildCounterListBackupsResponse < 3) { + o.backups = buildUnnamed20(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed19(); + o.unreachable = buildUnnamed21(); } - buildCounterListInstancesResponse--; + buildCounterListBackupsResponse--; return o; } -void checkListInstancesResponse(api.ListInstancesResponse o) { - buildCounterListInstancesResponse++; - if (buildCounterListInstancesResponse < 3) { - checkUnnamed18(o.instances!); +void checkListBackupsResponse(api.ListBackupsResponse o) { + buildCounterListBackupsResponse++; + if (buildCounterListBackupsResponse < 3) { + checkUnnamed20(o.backups!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed19(o.unreachable!); + checkUnnamed21(o.unreachable!); } - buildCounterListInstancesResponse--; + buildCounterListBackupsResponse--; } -core.List buildUnnamed20() => [ - buildLocation(), - buildLocation(), +core.List buildUnnamed22() => [ + buildCluster(), + buildCluster(), ]; -void checkUnnamed20(core.List o) { +void checkUnnamed22(core.List o) { unittest.expect(o, unittest.hasLength(2)); - checkLocation(o[0]); - checkLocation(o[1]); + checkCluster(o[0]); + checkCluster(o[1]); } -core.int buildCounterListLocationsResponse = 0; -api.ListLocationsResponse buildListLocationsResponse() { - final o = api.ListLocationsResponse(); - buildCounterListLocationsResponse++; - if (buildCounterListLocationsResponse < 3) { - o.locations = buildUnnamed20(); - o.nextPageToken = 'foo'; - } - buildCounterListLocationsResponse--; - return o; +core.List buildUnnamed23() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed23(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterListClustersResponse = 0; +api.ListClustersResponse buildListClustersResponse() { + final o = api.ListClustersResponse(); + buildCounterListClustersResponse++; + if (buildCounterListClustersResponse < 3) { + o.clusters = buildUnnamed22(); + o.nextPageToken = 'foo'; + o.unreachable = buildUnnamed23(); + } + buildCounterListClustersResponse--; + return o; +} + +void checkListClustersResponse(api.ListClustersResponse o) { + buildCounterListClustersResponse++; + if (buildCounterListClustersResponse < 3) { + checkUnnamed22(o.clusters!); + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed23(o.unreachable!); + } + buildCounterListClustersResponse--; +} + +core.List buildUnnamed24() => [ + buildInstance(), + buildInstance(), + ]; + +void checkUnnamed24(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkInstance(o[0]); + checkInstance(o[1]); +} + +core.List buildUnnamed25() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed25(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterListInstancesResponse = 0; +api.ListInstancesResponse buildListInstancesResponse() { + final o = api.ListInstancesResponse(); + buildCounterListInstancesResponse++; + if (buildCounterListInstancesResponse < 3) { + o.instances = buildUnnamed24(); + o.nextPageToken = 'foo'; + o.unreachable = buildUnnamed25(); + } + buildCounterListInstancesResponse--; + return o; +} + +void checkListInstancesResponse(api.ListInstancesResponse o) { + buildCounterListInstancesResponse++; + if (buildCounterListInstancesResponse < 3) { + checkUnnamed24(o.instances!); + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed25(o.unreachable!); + } + buildCounterListInstancesResponse--; +} + +core.List buildUnnamed26() => [ + buildLocation(), + buildLocation(), + ]; + +void checkUnnamed26(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkLocation(o[0]); + checkLocation(o[1]); +} + +core.int buildCounterListLocationsResponse = 0; +api.ListLocationsResponse buildListLocationsResponse() { + final o = api.ListLocationsResponse(); + buildCounterListLocationsResponse++; + if (buildCounterListLocationsResponse < 3) { + o.locations = buildUnnamed26(); + o.nextPageToken = 'foo'; + } + buildCounterListLocationsResponse--; + return o; } void checkListLocationsResponse(api.ListLocationsResponse o) { buildCounterListLocationsResponse++; if (buildCounterListLocationsResponse < 3) { - checkUnnamed20(o.locations!); + checkUnnamed26(o.locations!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -1076,12 +1494,12 @@ void checkListLocationsResponse(api.ListLocationsResponse o) { buildCounterListLocationsResponse--; } -core.List buildUnnamed21() => [ +core.List buildUnnamed27() => [ buildOperation(), buildOperation(), ]; -void checkUnnamed21(core.List o) { +void checkUnnamed27(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperation(o[0]); checkOperation(o[1]); @@ -1093,7 +1511,7 @@ api.ListOperationsResponse buildListOperationsResponse() { buildCounterListOperationsResponse++; if (buildCounterListOperationsResponse < 3) { o.nextPageToken = 'foo'; - o.operations = buildUnnamed21(); + o.operations = buildUnnamed27(); } buildCounterListOperationsResponse--; return o; @@ -1106,17 +1524,17 @@ void checkListOperationsResponse(api.ListOperationsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed21(o.operations!); + checkUnnamed27(o.operations!); } buildCounterListOperationsResponse--; } -core.Map buildUnnamed22() => { +core.Map buildUnnamed28() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed22(core.Map o) { +void checkUnnamed28(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1128,7 +1546,7 @@ void checkUnnamed22(core.Map o) { ); } -core.Map buildUnnamed23() => { +core.Map buildUnnamed29() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -1141,7 +1559,7 @@ core.Map buildUnnamed23() => { }, }; -void checkUnnamed23(core.Map o) { +void checkUnnamed29(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted1 = (o['x']!) as core.Map; unittest.expect(casted1, unittest.hasLength(3)); @@ -1179,9 +1597,9 @@ api.Location buildLocation() { buildCounterLocation++; if (buildCounterLocation < 3) { o.displayName = 'foo'; - o.labels = buildUnnamed22(); + o.labels = buildUnnamed28(); o.locationId = 'foo'; - o.metadata = buildUnnamed23(); + o.metadata = buildUnnamed29(); o.name = 'foo'; } buildCounterLocation--; @@ -1195,12 +1613,12 @@ void checkLocation(api.Location o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed22(o.labels!); + checkUnnamed28(o.labels!); unittest.expect( o.locationId!, unittest.equals('foo'), ); - checkUnnamed23(o.metadata!); + checkUnnamed29(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), @@ -1209,12 +1627,12 @@ void checkLocation(api.Location o) { buildCounterLocation--; } -core.List buildUnnamed24() => [ +core.List buildUnnamed30() => [ buildWeeklyMaintenanceWindow(), buildWeeklyMaintenanceWindow(), ]; -void checkUnnamed24(core.List o) { +void checkUnnamed30(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkWeeklyMaintenanceWindow(o[0]); checkWeeklyMaintenanceWindow(o[1]); @@ -1228,7 +1646,7 @@ api.MaintenancePolicy buildMaintenancePolicy() { o.createTime = 'foo'; o.description = 'foo'; o.updateTime = 'foo'; - o.weeklyMaintenanceWindow = buildUnnamed24(); + o.weeklyMaintenanceWindow = buildUnnamed30(); } buildCounterMaintenancePolicy--; return o; @@ -1249,7 +1667,7 @@ void checkMaintenancePolicy(api.MaintenancePolicy o) { o.updateTime!, unittest.equals('foo'), ); - checkUnnamed24(o.weeklyMaintenanceWindow!); + checkUnnamed30(o.weeklyMaintenanceWindow!); } buildCounterMaintenancePolicy--; } @@ -1288,12 +1706,34 @@ void checkMaintenanceSchedule(api.MaintenanceSchedule o) { buildCounterMaintenanceSchedule--; } -core.List buildUnnamed25() => [ +core.int buildCounterManagedBackupSource = 0; +api.ManagedBackupSource buildManagedBackupSource() { + final o = api.ManagedBackupSource(); + buildCounterManagedBackupSource++; + if (buildCounterManagedBackupSource < 3) { + o.backup = 'foo'; + } + buildCounterManagedBackupSource--; + return o; +} + +void checkManagedBackupSource(api.ManagedBackupSource o) { + buildCounterManagedBackupSource++; + if (buildCounterManagedBackupSource < 3) { + unittest.expect( + o.backup!, + unittest.equals('foo'), + ); + } + buildCounterManagedBackupSource--; +} + +core.List buildUnnamed31() => [ buildCertChain(), buildCertChain(), ]; -void checkUnnamed25(core.List o) { +void checkUnnamed31(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkCertChain(o[0]); checkCertChain(o[1]); @@ -1304,7 +1744,7 @@ api.ManagedCertificateAuthority buildManagedCertificateAuthority() { final o = api.ManagedCertificateAuthority(); buildCounterManagedCertificateAuthority++; if (buildCounterManagedCertificateAuthority < 3) { - o.caCerts = buildUnnamed25(); + o.caCerts = buildUnnamed31(); } buildCounterManagedCertificateAuthority--; return o; @@ -1313,17 +1753,17 @@ api.ManagedCertificateAuthority buildManagedCertificateAuthority() { void checkManagedCertificateAuthority(api.ManagedCertificateAuthority o) { buildCounterManagedCertificateAuthority++; if (buildCounterManagedCertificateAuthority < 3) { - checkUnnamed25(o.caCerts!); + checkUnnamed31(o.caCerts!); } buildCounterManagedCertificateAuthority--; } -core.List buildUnnamed26() => [ +core.List buildUnnamed32() => [ buildRemoteCluster(), buildRemoteCluster(), ]; -void checkUnnamed26(core.List o) { +void checkUnnamed32(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRemoteCluster(o[0]); checkRemoteCluster(o[1]); @@ -1335,7 +1775,7 @@ api.Membership buildMembership() { buildCounterMembership++; if (buildCounterMembership < 3) { o.primaryCluster = buildRemoteCluster(); - o.secondaryClusters = buildUnnamed26(); + o.secondaryClusters = buildUnnamed32(); } buildCounterMembership--; return o; @@ -1345,7 +1785,7 @@ void checkMembership(api.Membership o) { buildCounterMembership++; if (buildCounterMembership < 3) { checkRemoteCluster(o.primaryCluster!); - checkUnnamed26(o.secondaryClusters!); + checkUnnamed32(o.secondaryClusters!); } buildCounterMembership--; } @@ -1377,7 +1817,7 @@ void checkNodeInfo(api.NodeInfo o) { buildCounterNodeInfo--; } -core.Map buildUnnamed27() => { +core.Map buildUnnamed33() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -1390,7 +1830,7 @@ core.Map buildUnnamed27() => { }, }; -void checkUnnamed27(core.Map o) { +void checkUnnamed33(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted3 = (o['x']!) as core.Map; unittest.expect(casted3, unittest.hasLength(3)); @@ -1422,7 +1862,7 @@ void checkUnnamed27(core.Map o) { ); } -core.Map buildUnnamed28() => { +core.Map buildUnnamed34() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -1435,7 +1875,7 @@ core.Map buildUnnamed28() => { }, }; -void checkUnnamed28(core.Map o) { +void checkUnnamed34(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted5 = (o['x']!) as core.Map; unittest.expect(casted5, unittest.hasLength(3)); @@ -1474,9 +1914,9 @@ api.Operation buildOperation() { if (buildCounterOperation < 3) { o.done = true; o.error = buildStatus(); - o.metadata = buildUnnamed27(); + o.metadata = buildUnnamed33(); o.name = 'foo'; - o.response = buildUnnamed28(); + o.response = buildUnnamed34(); } buildCounterOperation--; return o; @@ -1487,12 +1927,12 @@ void checkOperation(api.Operation o) { if (buildCounterOperation < 3) { unittest.expect(o.done!, unittest.isTrue); checkStatus(o.error!); - checkUnnamed27(o.metadata!); + checkUnnamed33(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed28(o.response!); + checkUnnamed34(o.response!); } buildCounterOperation--; } @@ -1553,6 +1993,63 @@ void checkPersistenceConfig(api.PersistenceConfig o) { buildCounterPersistenceConfig--; } +core.int buildCounterPscAutoConnection = 0; +api.PscAutoConnection buildPscAutoConnection() { + final o = api.PscAutoConnection(); + buildCounterPscAutoConnection++; + if (buildCounterPscAutoConnection < 3) { + o.address = 'foo'; + o.connectionType = 'foo'; + o.forwardingRule = 'foo'; + o.network = 'foo'; + o.projectId = 'foo'; + o.pscConnectionId = 'foo'; + o.pscConnectionStatus = 'foo'; + o.serviceAttachment = 'foo'; + } + buildCounterPscAutoConnection--; + return o; +} + +void checkPscAutoConnection(api.PscAutoConnection o) { + buildCounterPscAutoConnection++; + if (buildCounterPscAutoConnection < 3) { + unittest.expect( + o.address!, + unittest.equals('foo'), + ); + unittest.expect( + o.connectionType!, + unittest.equals('foo'), + ); + unittest.expect( + o.forwardingRule!, + unittest.equals('foo'), + ); + unittest.expect( + o.network!, + unittest.equals('foo'), + ); + unittest.expect( + o.projectId!, + unittest.equals('foo'), + ); + unittest.expect( + o.pscConnectionId!, + unittest.equals('foo'), + ); + unittest.expect( + o.pscConnectionStatus!, + unittest.equals('foo'), + ); + unittest.expect( + o.serviceAttachment!, + unittest.equals('foo'), + ); + } + buildCounterPscAutoConnection--; +} + core.int buildCounterPscConfig = 0; api.PscConfig buildPscConfig() { final o = api.PscConfig(); @@ -1788,7 +2285,7 @@ void checkStateInfo(api.StateInfo o) { buildCounterStateInfo--; } -core.Map buildUnnamed29() => { +core.Map buildUnnamed35() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -1801,7 +2298,7 @@ core.Map buildUnnamed29() => { }, }; -void checkUnnamed29(core.Map o) { +void checkUnnamed35(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted7 = (o['x']!) as core.Map; unittest.expect(casted7, unittest.hasLength(3)); @@ -1833,15 +2330,15 @@ void checkUnnamed29(core.Map o) { ); } -core.List> buildUnnamed30() => [ - buildUnnamed29(), - buildUnnamed29(), +core.List> buildUnnamed36() => [ + buildUnnamed35(), + buildUnnamed35(), ]; -void checkUnnamed30(core.List> o) { +void checkUnnamed36(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed29(o[0]); - checkUnnamed29(o[1]); + checkUnnamed35(o[0]); + checkUnnamed35(o[1]); } core.int buildCounterStatus = 0; @@ -1850,7 +2347,7 @@ api.Status buildStatus() { buildCounterStatus++; if (buildCounterStatus < 3) { o.code = 42; - o.details = buildUnnamed30(); + o.details = buildUnnamed36(); o.message = 'foo'; } buildCounterStatus--; @@ -1864,7 +2361,7 @@ void checkStatus(api.Status o) { o.code!, unittest.equals(42), ); - checkUnnamed30(o.details!); + checkUnnamed36(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -2068,6 +2565,56 @@ void main() { }); }); + unittest.group('obj-schema-AutomatedBackupConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildAutomatedBackupConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.AutomatedBackupConfig.fromJson( + oJson as core.Map); + checkAutomatedBackupConfig(od); + }); + }); + + unittest.group('obj-schema-Backup', () { + unittest.test('to-json--from-json', () async { + final o = buildBackup(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Backup.fromJson(oJson as core.Map); + checkBackup(od); + }); + }); + + unittest.group('obj-schema-BackupClusterRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildBackupClusterRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.BackupClusterRequest.fromJson( + oJson as core.Map); + checkBackupClusterRequest(od); + }); + }); + + unittest.group('obj-schema-BackupCollection', () { + unittest.test('to-json--from-json', () async { + final o = buildBackupCollection(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.BackupCollection.fromJson( + oJson as core.Map); + checkBackupCollection(od); + }); + }); + + unittest.group('obj-schema-BackupFile', () { + unittest.test('to-json--from-json', () async { + final o = buildBackupFile(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.BackupFile.fromJson(oJson as core.Map); + checkBackupFile(od); + }); + }); + unittest.group('obj-schema-CertChain', () { unittest.test('to-json--from-json', () async { final o = buildCertChain(); @@ -2188,6 +2735,16 @@ void main() { }); }); + unittest.group('obj-schema-ExportBackupRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildExportBackupRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ExportBackupRequest.fromJson( + oJson as core.Map); + checkExportBackupRequest(od); + }); + }); + unittest.group('obj-schema-ExportInstanceRequest', () { unittest.test('to-json--from-json', () async { final o = buildExportInstanceRequest(); @@ -2208,6 +2765,26 @@ void main() { }); }); + unittest.group('obj-schema-FixedFrequencySchedule', () { + unittest.test('to-json--from-json', () async { + final o = buildFixedFrequencySchedule(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.FixedFrequencySchedule.fromJson( + oJson as core.Map); + checkFixedFrequencySchedule(od); + }); + }); + + unittest.group('obj-schema-GcsBackupSource', () { + unittest.test('to-json--from-json', () async { + final o = buildGcsBackupSource(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GcsBackupSource.fromJson( + oJson as core.Map); + checkGcsBackupSource(od); + }); + }); + unittest.group('obj-schema-GcsDestination', () { unittest.test('to-json--from-json', () async { final o = buildGcsDestination(); @@ -2268,6 +2845,26 @@ void main() { }); }); + unittest.group('obj-schema-ListBackupCollectionsResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListBackupCollectionsResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListBackupCollectionsResponse.fromJson( + oJson as core.Map); + checkListBackupCollectionsResponse(od); + }); + }); + + unittest.group('obj-schema-ListBackupsResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListBackupsResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListBackupsResponse.fromJson( + oJson as core.Map); + checkListBackupsResponse(od); + }); + }); + unittest.group('obj-schema-ListClustersResponse', () { unittest.test('to-json--from-json', () async { final o = buildListClustersResponse(); @@ -2338,6 +2935,16 @@ void main() { }); }); + unittest.group('obj-schema-ManagedBackupSource', () { + unittest.test('to-json--from-json', () async { + final o = buildManagedBackupSource(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ManagedBackupSource.fromJson( + oJson as core.Map); + checkManagedBackupSource(od); + }); + }); + unittest.group('obj-schema-ManagedCertificateAuthority', () { unittest.test('to-json--from-json', () async { final o = buildManagedCertificateAuthority(); @@ -2398,6 +3005,16 @@ void main() { }); }); + unittest.group('obj-schema-PscAutoConnection', () { + unittest.test('to-json--from-json', () async { + final o = buildPscAutoConnection(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.PscAutoConnection.fromJson( + oJson as core.Map); + checkPscAutoConnection(od); + }); + }); + unittest.group('obj-schema-PscConfig', () { unittest.test('to-json--from-json', () async { final o = buildPscConfig(); @@ -2673,7 +3290,425 @@ void main() { }); }); - unittest.group('resource-ProjectsLocationsClustersResource', () { + unittest.group('resource-ProjectsLocationsBackupCollectionsResource', () { + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.CloudRedisApi(mock).projects.locations.backupCollections; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildBackupCollection()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkBackupCollection(response as api.BackupCollection); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.CloudRedisApi(mock).projects.locations.backupCollections; + final arg_parent = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildListBackupCollectionsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_parent, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListBackupCollectionsResponse( + response as api.ListBackupCollectionsResponse); + }); + }); + + unittest.group('resource-ProjectsLocationsBackupCollectionsBackupsResource', + () { + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = + api.CloudRedisApi(mock).projects.locations.backupCollections.backups; + final arg_name = 'foo'; + final arg_requestId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['requestId']!.first, + unittest.equals(arg_requestId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete(arg_name, + requestId: arg_requestId, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--export', () async { + final mock = HttpServerMock(); + final res = + api.CloudRedisApi(mock).projects.locations.backupCollections.backups; + final arg_request = buildExportBackupRequest(); + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.ExportBackupRequest.fromJson( + json as core.Map); + checkExportBackupRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.export(arg_request, arg_name, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = + api.CloudRedisApi(mock).projects.locations.backupCollections.backups; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildBackup()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkBackup(response as api.Backup); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = + api.CloudRedisApi(mock).projects.locations.backupCollections.backups; + final arg_parent = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildListBackupsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_parent, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListBackupsResponse(response as api.ListBackupsResponse); + }); + }); + + unittest.group('resource-ProjectsLocationsClustersResource', () { + unittest.test('method--backup', () async { + final mock = HttpServerMock(); + final res = api.CloudRedisApi(mock).projects.locations.clusters; + final arg_request = buildBackupClusterRequest(); + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.BackupClusterRequest.fromJson( + json as core.Map); + checkBackupClusterRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.backup(arg_request, arg_name, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + unittest.test('method--create', () async { final mock = HttpServerMock(); final res = api.CloudRedisApi(mock).projects.locations.clusters; diff --git a/generated/googleapis/test/retail/v2_test.dart b/generated/googleapis/test/retail/v2_test.dart index 00ac40035..8a34dcb95 100644 --- a/generated/googleapis/test/retail/v2_test.dart +++ b/generated/googleapis/test/retail/v2_test.dart @@ -2165,6 +2165,7 @@ api.GoogleCloudRetailV2LocalInventory buildGoogleCloudRetailV2LocalInventory() { buildCounterGoogleCloudRetailV2LocalInventory++; if (buildCounterGoogleCloudRetailV2LocalInventory < 3) { o.attributes = buildUnnamed39(); + o.availability = 'foo'; o.fulfillmentTypes = buildUnnamed40(); o.placeId = 'foo'; o.priceInfo = buildGoogleCloudRetailV2PriceInfo(); @@ -2178,6 +2179,10 @@ void checkGoogleCloudRetailV2LocalInventory( buildCounterGoogleCloudRetailV2LocalInventory++; if (buildCounterGoogleCloudRetailV2LocalInventory < 3) { checkUnnamed39(o.attributes!); + unittest.expect( + o.availability!, + unittest.equals('foo'), + ); checkUnnamed40(o.fulfillmentTypes!); unittest.expect( o.placeId!, @@ -2475,12 +2480,101 @@ void checkGoogleCloudRetailV2PauseModelRequest( buildCounterGoogleCloudRetailV2PauseModelRequest--; } -core.Map buildUnnamed43() => { +core.Map + buildUnnamed43() => { + 'x': buildGoogleCloudRetailV2PinControlMetadataProductPins(), + 'y': buildGoogleCloudRetailV2PinControlMetadataProductPins(), + }; + +void checkUnnamed43( + core.Map + o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudRetailV2PinControlMetadataProductPins(o['x']!); + checkGoogleCloudRetailV2PinControlMetadataProductPins(o['y']!); +} + +core.Map + buildUnnamed44() => { + 'x': buildGoogleCloudRetailV2PinControlMetadataProductPins(), + 'y': buildGoogleCloudRetailV2PinControlMetadataProductPins(), + }; + +void checkUnnamed44( + core.Map + o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudRetailV2PinControlMetadataProductPins(o['x']!); + checkGoogleCloudRetailV2PinControlMetadataProductPins(o['y']!); +} + +core.int buildCounterGoogleCloudRetailV2PinControlMetadata = 0; +api.GoogleCloudRetailV2PinControlMetadata + buildGoogleCloudRetailV2PinControlMetadata() { + final o = api.GoogleCloudRetailV2PinControlMetadata(); + buildCounterGoogleCloudRetailV2PinControlMetadata++; + if (buildCounterGoogleCloudRetailV2PinControlMetadata < 3) { + o.allMatchedPins = buildUnnamed43(); + o.droppedPins = buildUnnamed44(); + } + buildCounterGoogleCloudRetailV2PinControlMetadata--; + return o; +} + +void checkGoogleCloudRetailV2PinControlMetadata( + api.GoogleCloudRetailV2PinControlMetadata o) { + buildCounterGoogleCloudRetailV2PinControlMetadata++; + if (buildCounterGoogleCloudRetailV2PinControlMetadata < 3) { + checkUnnamed43(o.allMatchedPins!); + checkUnnamed44(o.droppedPins!); + } + buildCounterGoogleCloudRetailV2PinControlMetadata--; +} + +core.List buildUnnamed45() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed45(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterGoogleCloudRetailV2PinControlMetadataProductPins = 0; +api.GoogleCloudRetailV2PinControlMetadataProductPins + buildGoogleCloudRetailV2PinControlMetadataProductPins() { + final o = api.GoogleCloudRetailV2PinControlMetadataProductPins(); + buildCounterGoogleCloudRetailV2PinControlMetadataProductPins++; + if (buildCounterGoogleCloudRetailV2PinControlMetadataProductPins < 3) { + o.productId = buildUnnamed45(); + } + buildCounterGoogleCloudRetailV2PinControlMetadataProductPins--; + return o; +} + +void checkGoogleCloudRetailV2PinControlMetadataProductPins( + api.GoogleCloudRetailV2PinControlMetadataProductPins o) { + buildCounterGoogleCloudRetailV2PinControlMetadataProductPins++; + if (buildCounterGoogleCloudRetailV2PinControlMetadataProductPins < 3) { + checkUnnamed45(o.productId!); + } + buildCounterGoogleCloudRetailV2PinControlMetadataProductPins--; +} + +core.Map buildUnnamed46() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed43(core.Map o) { +void checkUnnamed46(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2492,7 +2586,7 @@ void checkUnnamed43(core.Map o) { ); } -core.Map buildUnnamed44() => { +core.Map buildUnnamed47() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -2505,7 +2599,7 @@ core.Map buildUnnamed44() => { }, }; -void checkUnnamed44(core.Map o) { +void checkUnnamed47(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted3 = (o['x']!) as core.Map; unittest.expect(casted3, unittest.hasLength(3)); @@ -2543,10 +2637,10 @@ api.GoogleCloudRetailV2PredictRequest buildGoogleCloudRetailV2PredictRequest() { buildCounterGoogleCloudRetailV2PredictRequest++; if (buildCounterGoogleCloudRetailV2PredictRequest < 3) { o.filter = 'foo'; - o.labels = buildUnnamed43(); + o.labels = buildUnnamed46(); o.pageSize = 42; o.pageToken = 'foo'; - o.params = buildUnnamed44(); + o.params = buildUnnamed47(); o.userEvent = buildGoogleCloudRetailV2UserEvent(); o.validateOnly = true; } @@ -2562,7 +2656,7 @@ void checkGoogleCloudRetailV2PredictRequest( o.filter!, unittest.equals('foo'), ); - checkUnnamed43(o.labels!); + checkUnnamed46(o.labels!); unittest.expect( o.pageSize!, unittest.equals(42), @@ -2571,19 +2665,19 @@ void checkGoogleCloudRetailV2PredictRequest( o.pageToken!, unittest.equals('foo'), ); - checkUnnamed44(o.params!); + checkUnnamed47(o.params!); checkGoogleCloudRetailV2UserEvent(o.userEvent!); unittest.expect(o.validateOnly!, unittest.isTrue); } buildCounterGoogleCloudRetailV2PredictRequest--; } -core.List buildUnnamed45() => [ +core.List buildUnnamed48() => [ 'foo', 'foo', ]; -void checkUnnamed45(core.List o) { +void checkUnnamed48(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2596,12 +2690,12 @@ void checkUnnamed45(core.List o) { } core.List - buildUnnamed46() => [ + buildUnnamed49() => [ buildGoogleCloudRetailV2PredictResponsePredictionResult(), buildGoogleCloudRetailV2PredictResponsePredictionResult(), ]; -void checkUnnamed46( +void checkUnnamed49( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRetailV2PredictResponsePredictionResult(o[0]); @@ -2615,8 +2709,8 @@ api.GoogleCloudRetailV2PredictResponse buildCounterGoogleCloudRetailV2PredictResponse++; if (buildCounterGoogleCloudRetailV2PredictResponse < 3) { o.attributionToken = 'foo'; - o.missingIds = buildUnnamed45(); - o.results = buildUnnamed46(); + o.missingIds = buildUnnamed48(); + o.results = buildUnnamed49(); o.validateOnly = true; } buildCounterGoogleCloudRetailV2PredictResponse--; @@ -2631,14 +2725,14 @@ void checkGoogleCloudRetailV2PredictResponse( o.attributionToken!, unittest.equals('foo'), ); - checkUnnamed45(o.missingIds!); - checkUnnamed46(o.results!); + checkUnnamed48(o.missingIds!); + checkUnnamed49(o.results!); unittest.expect(o.validateOnly!, unittest.isTrue); } buildCounterGoogleCloudRetailV2PredictResponse--; } -core.Map buildUnnamed47() => { +core.Map buildUnnamed50() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -2651,7 +2745,7 @@ core.Map buildUnnamed47() => { }, }; -void checkUnnamed47(core.Map o) { +void checkUnnamed50(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted5 = (o['x']!) as core.Map; unittest.expect(casted5, unittest.hasLength(3)); @@ -2690,7 +2784,7 @@ api.GoogleCloudRetailV2PredictResponsePredictionResult buildCounterGoogleCloudRetailV2PredictResponsePredictionResult++; if (buildCounterGoogleCloudRetailV2PredictResponsePredictionResult < 3) { o.id = 'foo'; - o.metadata = buildUnnamed47(); + o.metadata = buildUnnamed50(); } buildCounterGoogleCloudRetailV2PredictResponsePredictionResult--; return o; @@ -2704,7 +2798,7 @@ void checkGoogleCloudRetailV2PredictResponsePredictionResult( o.id!, unittest.equals('foo'), ); - checkUnnamed47(o.metadata!); + checkUnnamed50(o.metadata!); } buildCounterGoogleCloudRetailV2PredictResponsePredictionResult--; } @@ -2782,24 +2876,24 @@ void checkGoogleCloudRetailV2PriceInfoPriceRange( } core.Map - buildUnnamed48() => { + buildUnnamed51() => { 'x': buildGoogleCloudRetailV2CustomAttribute(), 'y': buildGoogleCloudRetailV2CustomAttribute(), }; -void checkUnnamed48( +void checkUnnamed51( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRetailV2CustomAttribute(o['x']!); checkGoogleCloudRetailV2CustomAttribute(o['y']!); } -core.List buildUnnamed49() => [ +core.List buildUnnamed52() => [ 'foo', 'foo', ]; -void checkUnnamed49(core.List o) { +void checkUnnamed52(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2811,12 +2905,12 @@ void checkUnnamed49(core.List o) { ); } -core.List buildUnnamed50() => [ +core.List buildUnnamed53() => [ 'foo', 'foo', ]; -void checkUnnamed50(core.List o) { +void checkUnnamed53(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2828,12 +2922,12 @@ void checkUnnamed50(core.List o) { ); } -core.List buildUnnamed51() => [ +core.List buildUnnamed54() => [ 'foo', 'foo', ]; -void checkUnnamed51(core.List o) { +void checkUnnamed54(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2845,12 +2939,12 @@ void checkUnnamed51(core.List o) { ); } -core.List buildUnnamed52() => [ +core.List buildUnnamed55() => [ 'foo', 'foo', ]; -void checkUnnamed52(core.List o) { +void checkUnnamed55(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2862,45 +2956,45 @@ void checkUnnamed52(core.List o) { ); } -core.List buildUnnamed53() => [ +core.List buildUnnamed56() => [ buildGoogleCloudRetailV2FulfillmentInfo(), buildGoogleCloudRetailV2FulfillmentInfo(), ]; -void checkUnnamed53(core.List o) { +void checkUnnamed56(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRetailV2FulfillmentInfo(o[0]); checkGoogleCloudRetailV2FulfillmentInfo(o[1]); } -core.List buildUnnamed54() => [ +core.List buildUnnamed57() => [ buildGoogleCloudRetailV2Image(), buildGoogleCloudRetailV2Image(), ]; -void checkUnnamed54(core.List o) { +void checkUnnamed57(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRetailV2Image(o[0]); checkGoogleCloudRetailV2Image(o[1]); } -core.List buildUnnamed55() => [ +core.List buildUnnamed58() => [ buildGoogleCloudRetailV2LocalInventory(), buildGoogleCloudRetailV2LocalInventory(), ]; -void checkUnnamed55(core.List o) { +void checkUnnamed58(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRetailV2LocalInventory(o[0]); checkGoogleCloudRetailV2LocalInventory(o[1]); } -core.List buildUnnamed56() => [ +core.List buildUnnamed59() => [ 'foo', 'foo', ]; -void checkUnnamed56(core.List o) { +void checkUnnamed59(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2912,12 +3006,12 @@ void checkUnnamed56(core.List o) { ); } -core.List buildUnnamed57() => [ +core.List buildUnnamed60() => [ 'foo', 'foo', ]; -void checkUnnamed57(core.List o) { +void checkUnnamed60(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2929,23 +3023,23 @@ void checkUnnamed57(core.List o) { ); } -core.List buildUnnamed58() => [ +core.List buildUnnamed61() => [ buildGoogleCloudRetailV2Promotion(), buildGoogleCloudRetailV2Promotion(), ]; -void checkUnnamed58(core.List o) { +void checkUnnamed61(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRetailV2Promotion(o[0]); checkGoogleCloudRetailV2Promotion(o[1]); } -core.List buildUnnamed59() => [ +core.List buildUnnamed62() => [ 'foo', 'foo', ]; -void checkUnnamed59(core.List o) { +void checkUnnamed62(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2957,12 +3051,12 @@ void checkUnnamed59(core.List o) { ); } -core.List buildUnnamed60() => [ +core.List buildUnnamed63() => [ 'foo', 'foo', ]; -void checkUnnamed60(core.List o) { +void checkUnnamed63(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2974,12 +3068,12 @@ void checkUnnamed60(core.List o) { ); } -core.List buildUnnamed61() => [ +core.List buildUnnamed64() => [ buildGoogleCloudRetailV2Product(), buildGoogleCloudRetailV2Product(), ]; -void checkUnnamed61(core.List o) { +void checkUnnamed64(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRetailV2Product(o[0]); checkGoogleCloudRetailV2Product(o[1]); @@ -2990,40 +3084,40 @@ api.GoogleCloudRetailV2Product buildGoogleCloudRetailV2Product() { final o = api.GoogleCloudRetailV2Product(); buildCounterGoogleCloudRetailV2Product++; if (buildCounterGoogleCloudRetailV2Product < 3) { - o.attributes = buildUnnamed48(); + o.attributes = buildUnnamed51(); o.audience = buildGoogleCloudRetailV2Audience(); o.availability = 'foo'; o.availableQuantity = 42; o.availableTime = 'foo'; - o.brands = buildUnnamed49(); - o.categories = buildUnnamed50(); - o.collectionMemberIds = buildUnnamed51(); + o.brands = buildUnnamed52(); + o.categories = buildUnnamed53(); + o.collectionMemberIds = buildUnnamed54(); o.colorInfo = buildGoogleCloudRetailV2ColorInfo(); - o.conditions = buildUnnamed52(); + o.conditions = buildUnnamed55(); o.description = 'foo'; o.expireTime = 'foo'; - o.fulfillmentInfo = buildUnnamed53(); + o.fulfillmentInfo = buildUnnamed56(); o.gtin = 'foo'; o.id = 'foo'; - o.images = buildUnnamed54(); + o.images = buildUnnamed57(); o.languageCode = 'foo'; - o.localInventories = buildUnnamed55(); - o.materials = buildUnnamed56(); + o.localInventories = buildUnnamed58(); + o.materials = buildUnnamed59(); o.name = 'foo'; - o.patterns = buildUnnamed57(); + o.patterns = buildUnnamed60(); o.priceInfo = buildGoogleCloudRetailV2PriceInfo(); o.primaryProductId = 'foo'; - o.promotions = buildUnnamed58(); + o.promotions = buildUnnamed61(); o.publishTime = 'foo'; o.rating = buildGoogleCloudRetailV2Rating(); o.retrievableFields = 'foo'; - o.sizes = buildUnnamed59(); - o.tags = buildUnnamed60(); + o.sizes = buildUnnamed62(); + o.tags = buildUnnamed63(); o.title = 'foo'; o.ttl = 'foo'; o.type = 'foo'; o.uri = 'foo'; - o.variants = buildUnnamed61(); + o.variants = buildUnnamed64(); } buildCounterGoogleCloudRetailV2Product--; return o; @@ -3032,7 +3126,7 @@ api.GoogleCloudRetailV2Product buildGoogleCloudRetailV2Product() { void checkGoogleCloudRetailV2Product(api.GoogleCloudRetailV2Product o) { buildCounterGoogleCloudRetailV2Product++; if (buildCounterGoogleCloudRetailV2Product < 3) { - checkUnnamed48(o.attributes!); + checkUnnamed51(o.attributes!); checkGoogleCloudRetailV2Audience(o.audience!); unittest.expect( o.availability!, @@ -3046,11 +3140,11 @@ void checkGoogleCloudRetailV2Product(api.GoogleCloudRetailV2Product o) { o.availableTime!, unittest.equals('foo'), ); - checkUnnamed49(o.brands!); - checkUnnamed50(o.categories!); - checkUnnamed51(o.collectionMemberIds!); + checkUnnamed52(o.brands!); + checkUnnamed53(o.categories!); + checkUnnamed54(o.collectionMemberIds!); checkGoogleCloudRetailV2ColorInfo(o.colorInfo!); - checkUnnamed52(o.conditions!); + checkUnnamed55(o.conditions!); unittest.expect( o.description!, unittest.equals('foo'), @@ -3059,7 +3153,7 @@ void checkGoogleCloudRetailV2Product(api.GoogleCloudRetailV2Product o) { o.expireTime!, unittest.equals('foo'), ); - checkUnnamed53(o.fulfillmentInfo!); + checkUnnamed56(o.fulfillmentInfo!); unittest.expect( o.gtin!, unittest.equals('foo'), @@ -3068,24 +3162,24 @@ void checkGoogleCloudRetailV2Product(api.GoogleCloudRetailV2Product o) { o.id!, unittest.equals('foo'), ); - checkUnnamed54(o.images!); + checkUnnamed57(o.images!); unittest.expect( o.languageCode!, unittest.equals('foo'), ); - checkUnnamed55(o.localInventories!); - checkUnnamed56(o.materials!); + checkUnnamed58(o.localInventories!); + checkUnnamed59(o.materials!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed57(o.patterns!); + checkUnnamed60(o.patterns!); checkGoogleCloudRetailV2PriceInfo(o.priceInfo!); unittest.expect( o.primaryProductId!, unittest.equals('foo'), ); - checkUnnamed58(o.promotions!); + checkUnnamed61(o.promotions!); unittest.expect( o.publishTime!, unittest.equals('foo'), @@ -3095,8 +3189,8 @@ void checkGoogleCloudRetailV2Product(api.GoogleCloudRetailV2Product o) { o.retrievableFields!, unittest.equals('foo'), ); - checkUnnamed59(o.sizes!); - checkUnnamed60(o.tags!); + checkUnnamed62(o.sizes!); + checkUnnamed63(o.tags!); unittest.expect( o.title!, unittest.equals('foo'), @@ -3113,7 +3207,7 @@ void checkGoogleCloudRetailV2Product(api.GoogleCloudRetailV2Product o) { o.uri!, unittest.equals('foo'), ); - checkUnnamed61(o.variants!); + checkUnnamed64(o.variants!); } buildCounterGoogleCloudRetailV2Product--; } @@ -3198,12 +3292,12 @@ void checkGoogleCloudRetailV2ProductDetail( buildCounterGoogleCloudRetailV2ProductDetail--; } -core.List buildUnnamed62() => [ +core.List buildUnnamed65() => [ buildGoogleCloudRetailV2Product(), buildGoogleCloudRetailV2Product(), ]; -void checkUnnamed62(core.List o) { +void checkUnnamed65(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRetailV2Product(o[0]); checkGoogleCloudRetailV2Product(o[1]); @@ -3215,7 +3309,7 @@ api.GoogleCloudRetailV2ProductInlineSource final o = api.GoogleCloudRetailV2ProductInlineSource(); buildCounterGoogleCloudRetailV2ProductInlineSource++; if (buildCounterGoogleCloudRetailV2ProductInlineSource < 3) { - o.products = buildUnnamed62(); + o.products = buildUnnamed65(); } buildCounterGoogleCloudRetailV2ProductInlineSource--; return o; @@ -3225,7 +3319,7 @@ void checkGoogleCloudRetailV2ProductInlineSource( api.GoogleCloudRetailV2ProductInlineSource o) { buildCounterGoogleCloudRetailV2ProductInlineSource++; if (buildCounterGoogleCloudRetailV2ProductInlineSource < 3) { - checkUnnamed62(o.products!); + checkUnnamed65(o.products!); } buildCounterGoogleCloudRetailV2ProductInlineSource--; } @@ -3402,12 +3496,12 @@ void checkGoogleCloudRetailV2PurgeUserEventsRequest( buildCounterGoogleCloudRetailV2PurgeUserEventsRequest--; } -core.List buildUnnamed63() => [ +core.List buildUnnamed66() => [ 42, 42, ]; -void checkUnnamed63(core.List o) { +void checkUnnamed66(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3426,7 +3520,7 @@ api.GoogleCloudRetailV2Rating buildGoogleCloudRetailV2Rating() { if (buildCounterGoogleCloudRetailV2Rating < 3) { o.averageRating = 42.0; o.ratingCount = 42; - o.ratingHistogram = buildUnnamed63(); + o.ratingHistogram = buildUnnamed66(); } buildCounterGoogleCloudRetailV2Rating--; return o; @@ -3443,7 +3537,7 @@ void checkGoogleCloudRetailV2Rating(api.GoogleCloudRetailV2Rating o) { o.ratingCount!, unittest.equals(42), ); - checkUnnamed63(o.ratingHistogram!); + checkUnnamed66(o.ratingHistogram!); } buildCounterGoogleCloudRetailV2Rating--; } @@ -3520,12 +3614,12 @@ void checkGoogleCloudRetailV2RemoveControlRequest( buildCounterGoogleCloudRetailV2RemoveControlRequest--; } -core.List buildUnnamed64() => [ +core.List buildUnnamed67() => [ 'foo', 'foo', ]; -void checkUnnamed64(core.List o) { +void checkUnnamed67(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3544,7 +3638,7 @@ api.GoogleCloudRetailV2RemoveFulfillmentPlacesRequest buildCounterGoogleCloudRetailV2RemoveFulfillmentPlacesRequest++; if (buildCounterGoogleCloudRetailV2RemoveFulfillmentPlacesRequest < 3) { o.allowMissing = true; - o.placeIds = buildUnnamed64(); + o.placeIds = buildUnnamed67(); o.removeTime = 'foo'; o.type = 'foo'; } @@ -3557,7 +3651,7 @@ void checkGoogleCloudRetailV2RemoveFulfillmentPlacesRequest( buildCounterGoogleCloudRetailV2RemoveFulfillmentPlacesRequest++; if (buildCounterGoogleCloudRetailV2RemoveFulfillmentPlacesRequest < 3) { unittest.expect(o.allowMissing!, unittest.isTrue); - checkUnnamed64(o.placeIds!); + checkUnnamed67(o.placeIds!); unittest.expect( o.removeTime!, unittest.equals('foo'), @@ -3570,12 +3664,12 @@ void checkGoogleCloudRetailV2RemoveFulfillmentPlacesRequest( buildCounterGoogleCloudRetailV2RemoveFulfillmentPlacesRequest--; } -core.List buildUnnamed65() => [ +core.List buildUnnamed68() => [ 'foo', 'foo', ]; -void checkUnnamed65(core.List o) { +void checkUnnamed68(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3594,7 +3688,7 @@ api.GoogleCloudRetailV2RemoveLocalInventoriesRequest buildCounterGoogleCloudRetailV2RemoveLocalInventoriesRequest++; if (buildCounterGoogleCloudRetailV2RemoveLocalInventoriesRequest < 3) { o.allowMissing = true; - o.placeIds = buildUnnamed65(); + o.placeIds = buildUnnamed68(); o.removeTime = 'foo'; } buildCounterGoogleCloudRetailV2RemoveLocalInventoriesRequest--; @@ -3606,7 +3700,7 @@ void checkGoogleCloudRetailV2RemoveLocalInventoriesRequest( buildCounterGoogleCloudRetailV2RemoveLocalInventoriesRequest++; if (buildCounterGoogleCloudRetailV2RemoveLocalInventoriesRequest < 3) { unittest.expect(o.allowMissing!, unittest.isTrue); - checkUnnamed65(o.placeIds!); + checkUnnamed68(o.placeIds!); unittest.expect( o.removeTime!, unittest.equals('foo'), @@ -3728,12 +3822,12 @@ void checkGoogleCloudRetailV2RuleBoostAction( buildCounterGoogleCloudRetailV2RuleBoostAction--; } -core.List buildUnnamed66() => [ +core.List buildUnnamed69() => [ 'foo', 'foo', ]; -void checkUnnamed66(core.List o) { +void checkUnnamed69(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3745,12 +3839,12 @@ void checkUnnamed66(core.List o) { ); } -core.List buildUnnamed67() => [ +core.List buildUnnamed70() => [ 'foo', 'foo', ]; -void checkUnnamed67(core.List o) { +void checkUnnamed70(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3762,12 +3856,12 @@ void checkUnnamed67(core.List o) { ); } -core.List buildUnnamed68() => [ +core.List buildUnnamed71() => [ 'foo', 'foo', ]; -void checkUnnamed68(core.List o) { +void checkUnnamed71(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3785,9 +3879,9 @@ api.GoogleCloudRetailV2RuleDoNotAssociateAction final o = api.GoogleCloudRetailV2RuleDoNotAssociateAction(); buildCounterGoogleCloudRetailV2RuleDoNotAssociateAction++; if (buildCounterGoogleCloudRetailV2RuleDoNotAssociateAction < 3) { - o.doNotAssociateTerms = buildUnnamed66(); - o.queryTerms = buildUnnamed67(); - o.terms = buildUnnamed68(); + o.doNotAssociateTerms = buildUnnamed69(); + o.queryTerms = buildUnnamed70(); + o.terms = buildUnnamed71(); } buildCounterGoogleCloudRetailV2RuleDoNotAssociateAction--; return o; @@ -3797,9 +3891,9 @@ void checkGoogleCloudRetailV2RuleDoNotAssociateAction( api.GoogleCloudRetailV2RuleDoNotAssociateAction o) { buildCounterGoogleCloudRetailV2RuleDoNotAssociateAction++; if (buildCounterGoogleCloudRetailV2RuleDoNotAssociateAction < 3) { - checkUnnamed66(o.doNotAssociateTerms!); - checkUnnamed67(o.queryTerms!); - checkUnnamed68(o.terms!); + checkUnnamed69(o.doNotAssociateTerms!); + checkUnnamed70(o.queryTerms!); + checkUnnamed71(o.terms!); } buildCounterGoogleCloudRetailV2RuleDoNotAssociateAction--; } @@ -3831,12 +3925,12 @@ void checkGoogleCloudRetailV2RuleFilterAction( core.List< api .GoogleCloudRetailV2RuleForceReturnFacetActionFacetPositionAdjustment> - buildUnnamed69() => [ + buildUnnamed72() => [ buildGoogleCloudRetailV2RuleForceReturnFacetActionFacetPositionAdjustment(), buildGoogleCloudRetailV2RuleForceReturnFacetActionFacetPositionAdjustment(), ]; -void checkUnnamed69( +void checkUnnamed72( core.List< api .GoogleCloudRetailV2RuleForceReturnFacetActionFacetPositionAdjustment> @@ -3854,7 +3948,7 @@ api.GoogleCloudRetailV2RuleForceReturnFacetAction final o = api.GoogleCloudRetailV2RuleForceReturnFacetAction(); buildCounterGoogleCloudRetailV2RuleForceReturnFacetAction++; if (buildCounterGoogleCloudRetailV2RuleForceReturnFacetAction < 3) { - o.facetPositionAdjustments = buildUnnamed69(); + o.facetPositionAdjustments = buildUnnamed72(); } buildCounterGoogleCloudRetailV2RuleForceReturnFacetAction--; return o; @@ -3864,7 +3958,7 @@ void checkGoogleCloudRetailV2RuleForceReturnFacetAction( api.GoogleCloudRetailV2RuleForceReturnFacetAction o) { buildCounterGoogleCloudRetailV2RuleForceReturnFacetAction++; if (buildCounterGoogleCloudRetailV2RuleForceReturnFacetAction < 3) { - checkUnnamed69(o.facetPositionAdjustments!); + checkUnnamed72(o.facetPositionAdjustments!); } buildCounterGoogleCloudRetailV2RuleForceReturnFacetAction--; } @@ -3904,12 +3998,12 @@ void checkGoogleCloudRetailV2RuleForceReturnFacetActionFacetPositionAdjustment( buildCounterGoogleCloudRetailV2RuleForceReturnFacetActionFacetPositionAdjustment--; } -core.List buildUnnamed70() => [ +core.List buildUnnamed73() => [ 'foo', 'foo', ]; -void checkUnnamed70(core.List o) { +void checkUnnamed73(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3927,7 +4021,7 @@ api.GoogleCloudRetailV2RuleIgnoreAction final o = api.GoogleCloudRetailV2RuleIgnoreAction(); buildCounterGoogleCloudRetailV2RuleIgnoreAction++; if (buildCounterGoogleCloudRetailV2RuleIgnoreAction < 3) { - o.ignoreTerms = buildUnnamed70(); + o.ignoreTerms = buildUnnamed73(); } buildCounterGoogleCloudRetailV2RuleIgnoreAction--; return o; @@ -3937,17 +4031,17 @@ void checkGoogleCloudRetailV2RuleIgnoreAction( api.GoogleCloudRetailV2RuleIgnoreAction o) { buildCounterGoogleCloudRetailV2RuleIgnoreAction++; if (buildCounterGoogleCloudRetailV2RuleIgnoreAction < 3) { - checkUnnamed70(o.ignoreTerms!); + checkUnnamed73(o.ignoreTerms!); } buildCounterGoogleCloudRetailV2RuleIgnoreAction--; } -core.List buildUnnamed71() => [ +core.List buildUnnamed74() => [ 'foo', 'foo', ]; -void checkUnnamed71(core.List o) { +void checkUnnamed74(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3959,12 +4053,12 @@ void checkUnnamed71(core.List o) { ); } -core.List buildUnnamed72() => [ +core.List buildUnnamed75() => [ 'foo', 'foo', ]; -void checkUnnamed72(core.List o) { +void checkUnnamed75(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3976,12 +4070,12 @@ void checkUnnamed72(core.List o) { ); } -core.List buildUnnamed73() => [ +core.List buildUnnamed76() => [ 'foo', 'foo', ]; -void checkUnnamed73(core.List o) { +void checkUnnamed76(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3999,9 +4093,9 @@ api.GoogleCloudRetailV2RuleOnewaySynonymsAction final o = api.GoogleCloudRetailV2RuleOnewaySynonymsAction(); buildCounterGoogleCloudRetailV2RuleOnewaySynonymsAction++; if (buildCounterGoogleCloudRetailV2RuleOnewaySynonymsAction < 3) { - o.onewayTerms = buildUnnamed71(); - o.queryTerms = buildUnnamed72(); - o.synonyms = buildUnnamed73(); + o.onewayTerms = buildUnnamed74(); + o.queryTerms = buildUnnamed75(); + o.synonyms = buildUnnamed76(); } buildCounterGoogleCloudRetailV2RuleOnewaySynonymsAction--; return o; @@ -4011,9 +4105,9 @@ void checkGoogleCloudRetailV2RuleOnewaySynonymsAction( api.GoogleCloudRetailV2RuleOnewaySynonymsAction o) { buildCounterGoogleCloudRetailV2RuleOnewaySynonymsAction++; if (buildCounterGoogleCloudRetailV2RuleOnewaySynonymsAction < 3) { - checkUnnamed71(o.onewayTerms!); - checkUnnamed72(o.queryTerms!); - checkUnnamed73(o.synonyms!); + checkUnnamed74(o.onewayTerms!); + checkUnnamed75(o.queryTerms!); + checkUnnamed76(o.synonyms!); } buildCounterGoogleCloudRetailV2RuleOnewaySynonymsAction--; } @@ -4042,12 +4136,12 @@ void checkGoogleCloudRetailV2RuleRedirectAction( buildCounterGoogleCloudRetailV2RuleRedirectAction--; } -core.List buildUnnamed74() => [ +core.List buildUnnamed77() => [ 'foo', 'foo', ]; -void checkUnnamed74(core.List o) { +void checkUnnamed77(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4065,7 +4159,7 @@ api.GoogleCloudRetailV2RuleRemoveFacetAction final o = api.GoogleCloudRetailV2RuleRemoveFacetAction(); buildCounterGoogleCloudRetailV2RuleRemoveFacetAction++; if (buildCounterGoogleCloudRetailV2RuleRemoveFacetAction < 3) { - o.attributeNames = buildUnnamed74(); + o.attributeNames = buildUnnamed77(); } buildCounterGoogleCloudRetailV2RuleRemoveFacetAction--; return o; @@ -4075,17 +4169,17 @@ void checkGoogleCloudRetailV2RuleRemoveFacetAction( api.GoogleCloudRetailV2RuleRemoveFacetAction o) { buildCounterGoogleCloudRetailV2RuleRemoveFacetAction++; if (buildCounterGoogleCloudRetailV2RuleRemoveFacetAction < 3) { - checkUnnamed74(o.attributeNames!); + checkUnnamed77(o.attributeNames!); } buildCounterGoogleCloudRetailV2RuleRemoveFacetAction--; } -core.List buildUnnamed75() => [ +core.List buildUnnamed78() => [ 'foo', 'foo', ]; -void checkUnnamed75(core.List o) { +void checkUnnamed78(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4103,7 +4197,7 @@ api.GoogleCloudRetailV2RuleReplacementAction final o = api.GoogleCloudRetailV2RuleReplacementAction(); buildCounterGoogleCloudRetailV2RuleReplacementAction++; if (buildCounterGoogleCloudRetailV2RuleReplacementAction < 3) { - o.queryTerms = buildUnnamed75(); + o.queryTerms = buildUnnamed78(); o.replacementTerm = 'foo'; o.term = 'foo'; } @@ -4115,7 +4209,7 @@ void checkGoogleCloudRetailV2RuleReplacementAction( api.GoogleCloudRetailV2RuleReplacementAction o) { buildCounterGoogleCloudRetailV2RuleReplacementAction++; if (buildCounterGoogleCloudRetailV2RuleReplacementAction < 3) { - checkUnnamed75(o.queryTerms!); + checkUnnamed78(o.queryTerms!); unittest.expect( o.replacementTerm!, unittest.equals('foo'), @@ -4128,12 +4222,12 @@ void checkGoogleCloudRetailV2RuleReplacementAction( buildCounterGoogleCloudRetailV2RuleReplacementAction--; } -core.List buildUnnamed76() => [ +core.List buildUnnamed79() => [ 'foo', 'foo', ]; -void checkUnnamed76(core.List o) { +void checkUnnamed79(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4151,7 +4245,7 @@ api.GoogleCloudRetailV2RuleTwowaySynonymsAction final o = api.GoogleCloudRetailV2RuleTwowaySynonymsAction(); buildCounterGoogleCloudRetailV2RuleTwowaySynonymsAction++; if (buildCounterGoogleCloudRetailV2RuleTwowaySynonymsAction < 3) { - o.synonyms = buildUnnamed76(); + o.synonyms = buildUnnamed79(); } buildCounterGoogleCloudRetailV2RuleTwowaySynonymsAction--; return o; @@ -4161,29 +4255,29 @@ void checkGoogleCloudRetailV2RuleTwowaySynonymsAction( api.GoogleCloudRetailV2RuleTwowaySynonymsAction o) { buildCounterGoogleCloudRetailV2RuleTwowaySynonymsAction++; if (buildCounterGoogleCloudRetailV2RuleTwowaySynonymsAction < 3) { - checkUnnamed76(o.synonyms!); + checkUnnamed79(o.synonyms!); } buildCounterGoogleCloudRetailV2RuleTwowaySynonymsAction--; } -core.List buildUnnamed77() => [ +core.List buildUnnamed80() => [ buildGoogleCloudRetailV2SearchRequestFacetSpec(), buildGoogleCloudRetailV2SearchRequestFacetSpec(), ]; -void checkUnnamed77( +void checkUnnamed80( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRetailV2SearchRequestFacetSpec(o[0]); checkGoogleCloudRetailV2SearchRequestFacetSpec(o[1]); } -core.Map buildUnnamed78() => { +core.Map buildUnnamed81() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed78(core.Map o) { +void checkUnnamed81(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -4195,12 +4289,12 @@ void checkUnnamed78(core.Map o) { ); } -core.List buildUnnamed79() => [ +core.List buildUnnamed82() => [ 'foo', 'foo', ]; -void checkUnnamed79(core.List o) { +void checkUnnamed82(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4212,12 +4306,12 @@ void checkUnnamed79(core.List o) { ); } -core.List buildUnnamed80() => [ +core.List buildUnnamed83() => [ 'foo', 'foo', ]; -void checkUnnamed80(core.List o) { +void checkUnnamed83(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4242,12 +4336,12 @@ api.GoogleCloudRetailV2SearchRequest buildGoogleCloudRetailV2SearchRequest() { o.dynamicFacetSpec = buildGoogleCloudRetailV2SearchRequestDynamicFacetSpec(); o.entity = 'foo'; - o.facetSpecs = buildUnnamed77(); + o.facetSpecs = buildUnnamed80(); o.filter = 'foo'; - o.labels = buildUnnamed78(); + o.labels = buildUnnamed81(); o.offset = 42; o.orderBy = 'foo'; - o.pageCategories = buildUnnamed79(); + o.pageCategories = buildUnnamed82(); o.pageSize = 42; o.pageToken = 'foo'; o.personalizationSpec = @@ -4261,7 +4355,7 @@ api.GoogleCloudRetailV2SearchRequest buildGoogleCloudRetailV2SearchRequest() { o.tileNavigationSpec = buildGoogleCloudRetailV2SearchRequestTileNavigationSpec(); o.userInfo = buildGoogleCloudRetailV2UserInfo(); - o.variantRollupKeys = buildUnnamed80(); + o.variantRollupKeys = buildUnnamed83(); o.visitorId = 'foo'; } buildCounterGoogleCloudRetailV2SearchRequest--; @@ -4288,12 +4382,12 @@ void checkGoogleCloudRetailV2SearchRequest( o.entity!, unittest.equals('foo'), ); - checkUnnamed77(o.facetSpecs!); + checkUnnamed80(o.facetSpecs!); unittest.expect( o.filter!, unittest.equals('foo'), ); - checkUnnamed78(o.labels!); + checkUnnamed81(o.labels!); unittest.expect( o.offset!, unittest.equals(42), @@ -4302,7 +4396,7 @@ void checkGoogleCloudRetailV2SearchRequest( o.orderBy!, unittest.equals('foo'), ); - checkUnnamed79(o.pageCategories!); + checkUnnamed82(o.pageCategories!); unittest.expect( o.pageSize!, unittest.equals(42), @@ -4328,7 +4422,7 @@ void checkGoogleCloudRetailV2SearchRequest( checkGoogleCloudRetailV2SearchRequestTileNavigationSpec( o.tileNavigationSpec!); checkGoogleCloudRetailV2UserInfo(o.userInfo!); - checkUnnamed80(o.variantRollupKeys!); + checkUnnamed83(o.variantRollupKeys!); unittest.expect( o.visitorId!, unittest.equals('foo'), @@ -4338,12 +4432,12 @@ void checkGoogleCloudRetailV2SearchRequest( } core.List - buildUnnamed81() => [ + buildUnnamed84() => [ buildGoogleCloudRetailV2SearchRequestBoostSpecConditionBoostSpec(), buildGoogleCloudRetailV2SearchRequestBoostSpecConditionBoostSpec(), ]; -void checkUnnamed81( +void checkUnnamed84( core.List o) { unittest.expect(o, unittest.hasLength(2)); @@ -4357,7 +4451,7 @@ api.GoogleCloudRetailV2SearchRequestBoostSpec final o = api.GoogleCloudRetailV2SearchRequestBoostSpec(); buildCounterGoogleCloudRetailV2SearchRequestBoostSpec++; if (buildCounterGoogleCloudRetailV2SearchRequestBoostSpec < 3) { - o.conditionBoostSpecs = buildUnnamed81(); + o.conditionBoostSpecs = buildUnnamed84(); o.skipBoostSpecValidation = true; } buildCounterGoogleCloudRetailV2SearchRequestBoostSpec--; @@ -4368,7 +4462,7 @@ void checkGoogleCloudRetailV2SearchRequestBoostSpec( api.GoogleCloudRetailV2SearchRequestBoostSpec o) { buildCounterGoogleCloudRetailV2SearchRequestBoostSpec++; if (buildCounterGoogleCloudRetailV2SearchRequestBoostSpec < 3) { - checkUnnamed81(o.conditionBoostSpecs!); + checkUnnamed84(o.conditionBoostSpecs!); unittest.expect(o.skipBoostSpecValidation!, unittest.isTrue); } buildCounterGoogleCloudRetailV2SearchRequestBoostSpec--; @@ -4472,12 +4566,12 @@ void checkGoogleCloudRetailV2SearchRequestConversationalSearchSpecUserAnswer( buildCounterGoogleCloudRetailV2SearchRequestConversationalSearchSpecUserAnswer--; } -core.List buildUnnamed82() => [ +core.List buildUnnamed85() => [ buildGoogleCloudRetailV2ProductAttributeValue(), buildGoogleCloudRetailV2ProductAttributeValue(), ]; -void checkUnnamed82(core.List o) { +void checkUnnamed85(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRetailV2ProductAttributeValue(o[0]); checkGoogleCloudRetailV2ProductAttributeValue(o[1]); @@ -4494,7 +4588,7 @@ api.GoogleCloudRetailV2SearchRequestConversationalSearchSpecUserAnswerSelectedAn if (buildCounterGoogleCloudRetailV2SearchRequestConversationalSearchSpecUserAnswerSelectedAnswer < 3) { o.productAttributeValue = buildGoogleCloudRetailV2ProductAttributeValue(); - o.productAttributeValues = buildUnnamed82(); + o.productAttributeValues = buildUnnamed85(); } buildCounterGoogleCloudRetailV2SearchRequestConversationalSearchSpecUserAnswerSelectedAnswer--; return o; @@ -4507,7 +4601,7 @@ void checkGoogleCloudRetailV2SearchRequestConversationalSearchSpecUserAnswerSele if (buildCounterGoogleCloudRetailV2SearchRequestConversationalSearchSpecUserAnswerSelectedAnswer < 3) { checkGoogleCloudRetailV2ProductAttributeValue(o.productAttributeValue!); - checkUnnamed82(o.productAttributeValues!); + checkUnnamed85(o.productAttributeValues!); } buildCounterGoogleCloudRetailV2SearchRequestConversationalSearchSpecUserAnswerSelectedAnswer--; } @@ -4536,12 +4630,12 @@ void checkGoogleCloudRetailV2SearchRequestDynamicFacetSpec( buildCounterGoogleCloudRetailV2SearchRequestDynamicFacetSpec--; } -core.List buildUnnamed83() => [ +core.List buildUnnamed86() => [ 'foo', 'foo', ]; -void checkUnnamed83(core.List o) { +void checkUnnamed86(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4560,7 +4654,7 @@ api.GoogleCloudRetailV2SearchRequestFacetSpec buildCounterGoogleCloudRetailV2SearchRequestFacetSpec++; if (buildCounterGoogleCloudRetailV2SearchRequestFacetSpec < 3) { o.enableDynamicPosition = true; - o.excludedFilterKeys = buildUnnamed83(); + o.excludedFilterKeys = buildUnnamed86(); o.facetKey = buildGoogleCloudRetailV2SearchRequestFacetSpecFacetKey(); o.limit = 42; } @@ -4573,7 +4667,7 @@ void checkGoogleCloudRetailV2SearchRequestFacetSpec( buildCounterGoogleCloudRetailV2SearchRequestFacetSpec++; if (buildCounterGoogleCloudRetailV2SearchRequestFacetSpec < 3) { unittest.expect(o.enableDynamicPosition!, unittest.isTrue); - checkUnnamed83(o.excludedFilterKeys!); + checkUnnamed86(o.excludedFilterKeys!); checkGoogleCloudRetailV2SearchRequestFacetSpecFacetKey(o.facetKey!); unittest.expect( o.limit!, @@ -4583,12 +4677,12 @@ void checkGoogleCloudRetailV2SearchRequestFacetSpec( buildCounterGoogleCloudRetailV2SearchRequestFacetSpec--; } -core.List buildUnnamed84() => [ +core.List buildUnnamed87() => [ 'foo', 'foo', ]; -void checkUnnamed84(core.List o) { +void checkUnnamed87(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4600,23 +4694,23 @@ void checkUnnamed84(core.List o) { ); } -core.List buildUnnamed85() => [ +core.List buildUnnamed88() => [ buildGoogleCloudRetailV2Interval(), buildGoogleCloudRetailV2Interval(), ]; -void checkUnnamed85(core.List o) { +void checkUnnamed88(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRetailV2Interval(o[0]); checkGoogleCloudRetailV2Interval(o[1]); } -core.List buildUnnamed86() => [ +core.List buildUnnamed89() => [ 'foo', 'foo', ]; -void checkUnnamed86(core.List o) { +void checkUnnamed89(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4628,12 +4722,12 @@ void checkUnnamed86(core.List o) { ); } -core.List buildUnnamed87() => [ +core.List buildUnnamed90() => [ 'foo', 'foo', ]; -void checkUnnamed87(core.List o) { +void checkUnnamed90(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4652,13 +4746,13 @@ api.GoogleCloudRetailV2SearchRequestFacetSpecFacetKey buildCounterGoogleCloudRetailV2SearchRequestFacetSpecFacetKey++; if (buildCounterGoogleCloudRetailV2SearchRequestFacetSpecFacetKey < 3) { o.caseInsensitive = true; - o.contains = buildUnnamed84(); - o.intervals = buildUnnamed85(); + o.contains = buildUnnamed87(); + o.intervals = buildUnnamed88(); o.key = 'foo'; o.orderBy = 'foo'; - o.prefixes = buildUnnamed86(); + o.prefixes = buildUnnamed89(); o.query = 'foo'; - o.restrictedValues = buildUnnamed87(); + o.restrictedValues = buildUnnamed90(); o.returnMinMax = true; } buildCounterGoogleCloudRetailV2SearchRequestFacetSpecFacetKey--; @@ -4670,8 +4764,8 @@ void checkGoogleCloudRetailV2SearchRequestFacetSpecFacetKey( buildCounterGoogleCloudRetailV2SearchRequestFacetSpecFacetKey++; if (buildCounterGoogleCloudRetailV2SearchRequestFacetSpecFacetKey < 3) { unittest.expect(o.caseInsensitive!, unittest.isTrue); - checkUnnamed84(o.contains!); - checkUnnamed85(o.intervals!); + checkUnnamed87(o.contains!); + checkUnnamed88(o.intervals!); unittest.expect( o.key!, unittest.equals('foo'), @@ -4680,12 +4774,12 @@ void checkGoogleCloudRetailV2SearchRequestFacetSpecFacetKey( o.orderBy!, unittest.equals('foo'), ); - checkUnnamed86(o.prefixes!); + checkUnnamed89(o.prefixes!); unittest.expect( o.query!, unittest.equals('foo'), ); - checkUnnamed87(o.restrictedValues!); + checkUnnamed90(o.restrictedValues!); unittest.expect(o.returnMinMax!, unittest.isTrue); } buildCounterGoogleCloudRetailV2SearchRequestFacetSpecFacetKey--; @@ -4765,12 +4859,12 @@ void checkGoogleCloudRetailV2SearchRequestSpellCorrectionSpec( buildCounterGoogleCloudRetailV2SearchRequestSpellCorrectionSpec--; } -core.List buildUnnamed88() => [ +core.List buildUnnamed91() => [ buildGoogleCloudRetailV2Tile(), buildGoogleCloudRetailV2Tile(), ]; -void checkUnnamed88(core.List o) { +void checkUnnamed91(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRetailV2Tile(o[0]); checkGoogleCloudRetailV2Tile(o[1]); @@ -4782,7 +4876,7 @@ api.GoogleCloudRetailV2SearchRequestTileNavigationSpec final o = api.GoogleCloudRetailV2SearchRequestTileNavigationSpec(); buildCounterGoogleCloudRetailV2SearchRequestTileNavigationSpec++; if (buildCounterGoogleCloudRetailV2SearchRequestTileNavigationSpec < 3) { - o.appliedTiles = buildUnnamed88(); + o.appliedTiles = buildUnnamed91(); o.tileNavigationRequested = true; } buildCounterGoogleCloudRetailV2SearchRequestTileNavigationSpec--; @@ -4793,18 +4887,18 @@ void checkGoogleCloudRetailV2SearchRequestTileNavigationSpec( api.GoogleCloudRetailV2SearchRequestTileNavigationSpec o) { buildCounterGoogleCloudRetailV2SearchRequestTileNavigationSpec++; if (buildCounterGoogleCloudRetailV2SearchRequestTileNavigationSpec < 3) { - checkUnnamed88(o.appliedTiles!); + checkUnnamed91(o.appliedTiles!); unittest.expect(o.tileNavigationRequested!, unittest.isTrue); } buildCounterGoogleCloudRetailV2SearchRequestTileNavigationSpec--; } -core.List buildUnnamed89() => [ +core.List buildUnnamed92() => [ 'foo', 'foo', ]; -void checkUnnamed89(core.List o) { +void checkUnnamed92(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4816,35 +4910,35 @@ void checkUnnamed89(core.List o) { ); } -core.List buildUnnamed90() => [ +core.List buildUnnamed93() => [ buildGoogleCloudRetailV2ExperimentInfo(), buildGoogleCloudRetailV2ExperimentInfo(), ]; -void checkUnnamed90(core.List o) { +void checkUnnamed93(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRetailV2ExperimentInfo(o[0]); checkGoogleCloudRetailV2ExperimentInfo(o[1]); } -core.List buildUnnamed91() => [ +core.List buildUnnamed94() => [ buildGoogleCloudRetailV2SearchResponseFacet(), buildGoogleCloudRetailV2SearchResponseFacet(), ]; -void checkUnnamed91(core.List o) { +void checkUnnamed94(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRetailV2SearchResponseFacet(o[0]); checkGoogleCloudRetailV2SearchResponseFacet(o[1]); } core.List - buildUnnamed92() => [ + buildUnnamed95() => [ buildGoogleCloudRetailV2SearchRequestBoostSpecConditionBoostSpec(), buildGoogleCloudRetailV2SearchRequestBoostSpecConditionBoostSpec(), ]; -void checkUnnamed92( +void checkUnnamed95( core.List o) { unittest.expect(o, unittest.hasLength(2)); @@ -4852,13 +4946,13 @@ void checkUnnamed92( checkGoogleCloudRetailV2SearchRequestBoostSpecConditionBoostSpec(o[1]); } -core.List buildUnnamed93() => +core.List buildUnnamed96() => [ buildGoogleCloudRetailV2SearchResponseSearchResult(), buildGoogleCloudRetailV2SearchResponseSearchResult(), ]; -void checkUnnamed93( +void checkUnnamed96( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRetailV2SearchResponseSearchResult(o[0]); @@ -4870,19 +4964,20 @@ api.GoogleCloudRetailV2SearchResponse buildGoogleCloudRetailV2SearchResponse() { final o = api.GoogleCloudRetailV2SearchResponse(); buildCounterGoogleCloudRetailV2SearchResponse++; if (buildCounterGoogleCloudRetailV2SearchResponse < 3) { - o.appliedControls = buildUnnamed89(); + o.appliedControls = buildUnnamed92(); o.attributionToken = 'foo'; o.conversationalSearchResult = buildGoogleCloudRetailV2SearchResponseConversationalSearchResult(); o.correctedQuery = 'foo'; - o.experimentInfo = buildUnnamed90(); - o.facets = buildUnnamed91(); - o.invalidConditionBoostSpecs = buildUnnamed92(); + o.experimentInfo = buildUnnamed93(); + o.facets = buildUnnamed94(); + o.invalidConditionBoostSpecs = buildUnnamed95(); o.nextPageToken = 'foo'; + o.pinControlMetadata = buildGoogleCloudRetailV2PinControlMetadata(); o.queryExpansionInfo = buildGoogleCloudRetailV2SearchResponseQueryExpansionInfo(); o.redirectUri = 'foo'; - o.results = buildUnnamed93(); + o.results = buildUnnamed96(); o.tileNavigationResult = buildGoogleCloudRetailV2SearchResponseTileNavigationResult(); o.totalSize = 42; @@ -4895,7 +4990,7 @@ void checkGoogleCloudRetailV2SearchResponse( api.GoogleCloudRetailV2SearchResponse o) { buildCounterGoogleCloudRetailV2SearchResponse++; if (buildCounterGoogleCloudRetailV2SearchResponse < 3) { - checkUnnamed89(o.appliedControls!); + checkUnnamed92(o.appliedControls!); unittest.expect( o.attributionToken!, unittest.equals('foo'), @@ -4906,20 +5001,21 @@ void checkGoogleCloudRetailV2SearchResponse( o.correctedQuery!, unittest.equals('foo'), ); - checkUnnamed90(o.experimentInfo!); - checkUnnamed91(o.facets!); - checkUnnamed92(o.invalidConditionBoostSpecs!); + checkUnnamed93(o.experimentInfo!); + checkUnnamed94(o.facets!); + checkUnnamed95(o.invalidConditionBoostSpecs!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); + checkGoogleCloudRetailV2PinControlMetadata(o.pinControlMetadata!); checkGoogleCloudRetailV2SearchResponseQueryExpansionInfo( o.queryExpansionInfo!); unittest.expect( o.redirectUri!, unittest.equals('foo'), ); - checkUnnamed93(o.results!); + checkUnnamed96(o.results!); checkGoogleCloudRetailV2SearchResponseTileNavigationResult( o.tileNavigationResult!); unittest.expect( @@ -4933,12 +5029,12 @@ void checkGoogleCloudRetailV2SearchResponse( core.List< api .GoogleCloudRetailV2SearchResponseConversationalSearchResultAdditionalFilter> - buildUnnamed94() => [ + buildUnnamed97() => [ buildGoogleCloudRetailV2SearchResponseConversationalSearchResultAdditionalFilter(), buildGoogleCloudRetailV2SearchResponseConversationalSearchResultAdditionalFilter(), ]; -void checkUnnamed94( +void checkUnnamed97( core.List< api .GoogleCloudRetailV2SearchResponseConversationalSearchResultAdditionalFilter> @@ -4953,12 +5049,12 @@ void checkUnnamed94( core.List< api .GoogleCloudRetailV2SearchResponseConversationalSearchResultSuggestedAnswer> - buildUnnamed95() => [ + buildUnnamed98() => [ buildGoogleCloudRetailV2SearchResponseConversationalSearchResultSuggestedAnswer(), buildGoogleCloudRetailV2SearchResponseConversationalSearchResultSuggestedAnswer(), ]; -void checkUnnamed95( +void checkUnnamed98( core.List< api .GoogleCloudRetailV2SearchResponseConversationalSearchResultSuggestedAnswer> @@ -4980,11 +5076,11 @@ api.GoogleCloudRetailV2SearchResponseConversationalSearchResult 3) { o.additionalFilter = buildGoogleCloudRetailV2SearchResponseConversationalSearchResultAdditionalFilter(); - o.additionalFilters = buildUnnamed94(); + o.additionalFilters = buildUnnamed97(); o.conversationId = 'foo'; o.followupQuestion = 'foo'; o.refinedQuery = 'foo'; - o.suggestedAnswers = buildUnnamed95(); + o.suggestedAnswers = buildUnnamed98(); } buildCounterGoogleCloudRetailV2SearchResponseConversationalSearchResult--; return o; @@ -4997,7 +5093,7 @@ void checkGoogleCloudRetailV2SearchResponseConversationalSearchResult( 3) { checkGoogleCloudRetailV2SearchResponseConversationalSearchResultAdditionalFilter( o.additionalFilter!); - checkUnnamed94(o.additionalFilters!); + checkUnnamed97(o.additionalFilters!); unittest.expect( o.conversationId!, unittest.equals('foo'), @@ -5010,7 +5106,7 @@ void checkGoogleCloudRetailV2SearchResponseConversationalSearchResult( o.refinedQuery!, unittest.equals('foo'), ); - checkUnnamed95(o.suggestedAnswers!); + checkUnnamed98(o.suggestedAnswers!); } buildCounterGoogleCloudRetailV2SearchResponseConversationalSearchResult--; } @@ -5070,12 +5166,12 @@ void checkGoogleCloudRetailV2SearchResponseConversationalSearchResultSuggestedAn } core.List - buildUnnamed96() => [ + buildUnnamed99() => [ buildGoogleCloudRetailV2SearchResponseFacetFacetValue(), buildGoogleCloudRetailV2SearchResponseFacetFacetValue(), ]; -void checkUnnamed96( +void checkUnnamed99( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRetailV2SearchResponseFacetFacetValue(o[0]); @@ -5090,7 +5186,7 @@ api.GoogleCloudRetailV2SearchResponseFacet if (buildCounterGoogleCloudRetailV2SearchResponseFacet < 3) { o.dynamicFacet = true; o.key = 'foo'; - o.values = buildUnnamed96(); + o.values = buildUnnamed99(); } buildCounterGoogleCloudRetailV2SearchResponseFacet--; return o; @@ -5105,7 +5201,7 @@ void checkGoogleCloudRetailV2SearchResponseFacet( o.key!, unittest.equals('foo'), ); - checkUnnamed96(o.values!); + checkUnnamed99(o.values!); } buildCounterGoogleCloudRetailV2SearchResponseFacet--; } @@ -5177,12 +5273,12 @@ void checkGoogleCloudRetailV2SearchResponseQueryExpansionInfo( buildCounterGoogleCloudRetailV2SearchResponseQueryExpansionInfo--; } -core.Map buildUnnamed97() => { +core.Map buildUnnamed100() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed97(core.Map o) { +void checkUnnamed100(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -5194,12 +5290,12 @@ void checkUnnamed97(core.Map o) { ); } -core.List buildUnnamed98() => [ +core.List buildUnnamed101() => [ 'foo', 'foo', ]; -void checkUnnamed98(core.List o) { +void checkUnnamed101(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5211,7 +5307,7 @@ void checkUnnamed98(core.List o) { ); } -core.Map buildUnnamed99() => { +core.Map buildUnnamed102() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -5224,7 +5320,7 @@ core.Map buildUnnamed99() => { }, }; -void checkUnnamed99(core.Map o) { +void checkUnnamed102(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted7 = (o['x']!) as core.Map; unittest.expect(casted7, unittest.hasLength(3)); @@ -5264,10 +5360,10 @@ api.GoogleCloudRetailV2SearchResponseSearchResult if (buildCounterGoogleCloudRetailV2SearchResponseSearchResult < 3) { o.id = 'foo'; o.matchingVariantCount = 42; - o.matchingVariantFields = buildUnnamed97(); - o.personalLabels = buildUnnamed98(); + o.matchingVariantFields = buildUnnamed100(); + o.personalLabels = buildUnnamed101(); o.product = buildGoogleCloudRetailV2Product(); - o.variantRollupValues = buildUnnamed99(); + o.variantRollupValues = buildUnnamed102(); } buildCounterGoogleCloudRetailV2SearchResponseSearchResult--; return o; @@ -5285,20 +5381,20 @@ void checkGoogleCloudRetailV2SearchResponseSearchResult( o.matchingVariantCount!, unittest.equals(42), ); - checkUnnamed97(o.matchingVariantFields!); - checkUnnamed98(o.personalLabels!); + checkUnnamed100(o.matchingVariantFields!); + checkUnnamed101(o.personalLabels!); checkGoogleCloudRetailV2Product(o.product!); - checkUnnamed99(o.variantRollupValues!); + checkUnnamed102(o.variantRollupValues!); } buildCounterGoogleCloudRetailV2SearchResponseSearchResult--; } -core.List buildUnnamed100() => [ +core.List buildUnnamed103() => [ buildGoogleCloudRetailV2Tile(), buildGoogleCloudRetailV2Tile(), ]; -void checkUnnamed100(core.List o) { +void checkUnnamed103(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRetailV2Tile(o[0]); checkGoogleCloudRetailV2Tile(o[1]); @@ -5310,7 +5406,7 @@ api.GoogleCloudRetailV2SearchResponseTileNavigationResult final o = api.GoogleCloudRetailV2SearchResponseTileNavigationResult(); buildCounterGoogleCloudRetailV2SearchResponseTileNavigationResult++; if (buildCounterGoogleCloudRetailV2SearchResponseTileNavigationResult < 3) { - o.tiles = buildUnnamed100(); + o.tiles = buildUnnamed103(); } buildCounterGoogleCloudRetailV2SearchResponseTileNavigationResult--; return o; @@ -5320,17 +5416,17 @@ void checkGoogleCloudRetailV2SearchResponseTileNavigationResult( api.GoogleCloudRetailV2SearchResponseTileNavigationResult o) { buildCounterGoogleCloudRetailV2SearchResponseTileNavigationResult++; if (buildCounterGoogleCloudRetailV2SearchResponseTileNavigationResult < 3) { - checkUnnamed100(o.tiles!); + checkUnnamed103(o.tiles!); } buildCounterGoogleCloudRetailV2SearchResponseTileNavigationResult--; } -core.List buildUnnamed101() => [ +core.List buildUnnamed104() => [ 'foo', 'foo', ]; -void checkUnnamed101(core.List o) { +void checkUnnamed104(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5342,12 +5438,12 @@ void checkUnnamed101(core.List o) { ); } -core.List buildUnnamed102() => [ +core.List buildUnnamed105() => [ 'foo', 'foo', ]; -void checkUnnamed102(core.List o) { +void checkUnnamed105(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5359,12 +5455,12 @@ void checkUnnamed102(core.List o) { ); } -core.List buildUnnamed103() => [ +core.List buildUnnamed106() => [ 'foo', 'foo', ]; -void checkUnnamed103(core.List o) { +void checkUnnamed106(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5376,12 +5472,12 @@ void checkUnnamed103(core.List o) { ); } -core.List buildUnnamed104() => [ +core.List buildUnnamed107() => [ 'foo', 'foo', ]; -void checkUnnamed104(core.List o) { +void checkUnnamed107(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5393,12 +5489,12 @@ void checkUnnamed104(core.List o) { ); } -core.List buildUnnamed105() => [ +core.List buildUnnamed108() => [ 'foo', 'foo', ]; -void checkUnnamed105(core.List o) { +void checkUnnamed108(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5410,12 +5506,12 @@ void checkUnnamed105(core.List o) { ); } -core.List buildUnnamed106() => [ +core.List buildUnnamed109() => [ 'foo', 'foo', ]; -void checkUnnamed106(core.List o) { +void checkUnnamed109(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5427,12 +5523,12 @@ void checkUnnamed106(core.List o) { ); } -core.List buildUnnamed107() => [ +core.List buildUnnamed110() => [ 'foo', 'foo', ]; -void checkUnnamed107(core.List o) { +void checkUnnamed110(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5444,12 +5540,12 @@ void checkUnnamed107(core.List o) { ); } -core.List buildUnnamed108() => [ +core.List buildUnnamed111() => [ 'foo', 'foo', ]; -void checkUnnamed108(core.List o) { +void checkUnnamed111(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5461,12 +5557,12 @@ void checkUnnamed108(core.List o) { ); } -core.List buildUnnamed109() => [ +core.List buildUnnamed112() => [ 'foo', 'foo', ]; -void checkUnnamed109(core.List o) { +void checkUnnamed112(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5478,12 +5574,12 @@ void checkUnnamed109(core.List o) { ); } -core.List buildUnnamed110() => [ +core.List buildUnnamed113() => [ 'foo', 'foo', ]; -void checkUnnamed110(core.List o) { +void checkUnnamed113(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5500,28 +5596,28 @@ api.GoogleCloudRetailV2ServingConfig buildGoogleCloudRetailV2ServingConfig() { final o = api.GoogleCloudRetailV2ServingConfig(); buildCounterGoogleCloudRetailV2ServingConfig++; if (buildCounterGoogleCloudRetailV2ServingConfig < 3) { - o.boostControlIds = buildUnnamed101(); + o.boostControlIds = buildUnnamed104(); o.displayName = 'foo'; o.diversityLevel = 'foo'; o.diversityType = 'foo'; - o.doNotAssociateControlIds = buildUnnamed102(); + o.doNotAssociateControlIds = buildUnnamed105(); o.dynamicFacetSpec = buildGoogleCloudRetailV2SearchRequestDynamicFacetSpec(); o.enableCategoryFilterLevel = 'foo'; - o.facetControlIds = buildUnnamed103(); - o.filterControlIds = buildUnnamed104(); - o.ignoreControlIds = buildUnnamed105(); + o.facetControlIds = buildUnnamed106(); + o.filterControlIds = buildUnnamed107(); + o.ignoreControlIds = buildUnnamed108(); o.ignoreRecsDenylist = true; o.modelId = 'foo'; o.name = 'foo'; - o.onewaySynonymsControlIds = buildUnnamed106(); + o.onewaySynonymsControlIds = buildUnnamed109(); o.personalizationSpec = buildGoogleCloudRetailV2SearchRequestPersonalizationSpec(); o.priceRerankingLevel = 'foo'; - o.redirectControlIds = buildUnnamed107(); - o.replacementControlIds = buildUnnamed108(); - o.solutionTypes = buildUnnamed109(); - o.twowaySynonymsControlIds = buildUnnamed110(); + o.redirectControlIds = buildUnnamed110(); + o.replacementControlIds = buildUnnamed111(); + o.solutionTypes = buildUnnamed112(); + o.twowaySynonymsControlIds = buildUnnamed113(); } buildCounterGoogleCloudRetailV2ServingConfig--; return o; @@ -5531,7 +5627,7 @@ void checkGoogleCloudRetailV2ServingConfig( api.GoogleCloudRetailV2ServingConfig o) { buildCounterGoogleCloudRetailV2ServingConfig++; if (buildCounterGoogleCloudRetailV2ServingConfig < 3) { - checkUnnamed101(o.boostControlIds!); + checkUnnamed104(o.boostControlIds!); unittest.expect( o.displayName!, unittest.equals('foo'), @@ -5544,15 +5640,15 @@ void checkGoogleCloudRetailV2ServingConfig( o.diversityType!, unittest.equals('foo'), ); - checkUnnamed102(o.doNotAssociateControlIds!); + checkUnnamed105(o.doNotAssociateControlIds!); checkGoogleCloudRetailV2SearchRequestDynamicFacetSpec(o.dynamicFacetSpec!); unittest.expect( o.enableCategoryFilterLevel!, unittest.equals('foo'), ); - checkUnnamed103(o.facetControlIds!); - checkUnnamed104(o.filterControlIds!); - checkUnnamed105(o.ignoreControlIds!); + checkUnnamed106(o.facetControlIds!); + checkUnnamed107(o.filterControlIds!); + checkUnnamed108(o.ignoreControlIds!); unittest.expect(o.ignoreRecsDenylist!, unittest.isTrue); unittest.expect( o.modelId!, @@ -5562,17 +5658,17 @@ void checkGoogleCloudRetailV2ServingConfig( o.name!, unittest.equals('foo'), ); - checkUnnamed106(o.onewaySynonymsControlIds!); + checkUnnamed109(o.onewaySynonymsControlIds!); checkGoogleCloudRetailV2SearchRequestPersonalizationSpec( o.personalizationSpec!); unittest.expect( o.priceRerankingLevel!, unittest.equals('foo'), ); - checkUnnamed107(o.redirectControlIds!); - checkUnnamed108(o.replacementControlIds!); - checkUnnamed109(o.solutionTypes!); - checkUnnamed110(o.twowaySynonymsControlIds!); + checkUnnamed110(o.redirectControlIds!); + checkUnnamed111(o.replacementControlIds!); + checkUnnamed112(o.solutionTypes!); + checkUnnamed113(o.twowaySynonymsControlIds!); } buildCounterGoogleCloudRetailV2ServingConfig--; } @@ -5718,24 +5814,24 @@ void checkGoogleCloudRetailV2UpdateGenerativeQuestionConfigRequest( } core.Map - buildUnnamed111() => { + buildUnnamed114() => { 'x': buildGoogleCloudRetailV2CustomAttribute(), 'y': buildGoogleCloudRetailV2CustomAttribute(), }; -void checkUnnamed111( +void checkUnnamed114( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRetailV2CustomAttribute(o['x']!); checkGoogleCloudRetailV2CustomAttribute(o['y']!); } -core.List buildUnnamed112() => [ +core.List buildUnnamed115() => [ 'foo', 'foo', ]; -void checkUnnamed112(core.List o) { +void checkUnnamed115(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5747,12 +5843,12 @@ void checkUnnamed112(core.List o) { ); } -core.List buildUnnamed113() => [ +core.List buildUnnamed116() => [ 'foo', 'foo', ]; -void checkUnnamed113(core.List o) { +void checkUnnamed116(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5764,12 +5860,12 @@ void checkUnnamed113(core.List o) { ); } -core.List buildUnnamed114() => [ +core.List buildUnnamed117() => [ buildGoogleCloudRetailV2ProductDetail(), buildGoogleCloudRetailV2ProductDetail(), ]; -void checkUnnamed114(core.List o) { +void checkUnnamed117(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRetailV2ProductDetail(o[0]); checkGoogleCloudRetailV2ProductDetail(o[1]); @@ -5780,20 +5876,20 @@ api.GoogleCloudRetailV2UserEvent buildGoogleCloudRetailV2UserEvent() { final o = api.GoogleCloudRetailV2UserEvent(); buildCounterGoogleCloudRetailV2UserEvent++; if (buildCounterGoogleCloudRetailV2UserEvent < 3) { - o.attributes = buildUnnamed111(); + o.attributes = buildUnnamed114(); o.attributionToken = 'foo'; o.cartId = 'foo'; o.completionDetail = buildGoogleCloudRetailV2CompletionDetail(); o.entity = 'foo'; o.eventTime = 'foo'; o.eventType = 'foo'; - o.experimentIds = buildUnnamed112(); + o.experimentIds = buildUnnamed115(); o.filter = 'foo'; o.offset = 42; o.orderBy = 'foo'; - o.pageCategories = buildUnnamed113(); + o.pageCategories = buildUnnamed116(); o.pageViewId = 'foo'; - o.productDetails = buildUnnamed114(); + o.productDetails = buildUnnamed117(); o.purchaseTransaction = buildGoogleCloudRetailV2PurchaseTransaction(); o.referrerUri = 'foo'; o.searchQuery = 'foo'; @@ -5809,7 +5905,7 @@ api.GoogleCloudRetailV2UserEvent buildGoogleCloudRetailV2UserEvent() { void checkGoogleCloudRetailV2UserEvent(api.GoogleCloudRetailV2UserEvent o) { buildCounterGoogleCloudRetailV2UserEvent++; if (buildCounterGoogleCloudRetailV2UserEvent < 3) { - checkUnnamed111(o.attributes!); + checkUnnamed114(o.attributes!); unittest.expect( o.attributionToken!, unittest.equals('foo'), @@ -5831,7 +5927,7 @@ void checkGoogleCloudRetailV2UserEvent(api.GoogleCloudRetailV2UserEvent o) { o.eventType!, unittest.equals('foo'), ); - checkUnnamed112(o.experimentIds!); + checkUnnamed115(o.experimentIds!); unittest.expect( o.filter!, unittest.equals('foo'), @@ -5844,12 +5940,12 @@ void checkGoogleCloudRetailV2UserEvent(api.GoogleCloudRetailV2UserEvent o) { o.orderBy!, unittest.equals('foo'), ); - checkUnnamed113(o.pageCategories!); + checkUnnamed116(o.pageCategories!); unittest.expect( o.pageViewId!, unittest.equals('foo'), ); - checkUnnamed114(o.productDetails!); + checkUnnamed117(o.productDetails!); checkGoogleCloudRetailV2PurchaseTransaction(o.purchaseTransaction!); unittest.expect( o.referrerUri!, @@ -5876,12 +5972,12 @@ void checkGoogleCloudRetailV2UserEvent(api.GoogleCloudRetailV2UserEvent o) { buildCounterGoogleCloudRetailV2UserEvent--; } -core.List buildUnnamed115() => [ +core.List buildUnnamed118() => [ buildGoogleCloudRetailV2UserEvent(), buildGoogleCloudRetailV2UserEvent(), ]; -void checkUnnamed115(core.List o) { +void checkUnnamed118(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRetailV2UserEvent(o[0]); checkGoogleCloudRetailV2UserEvent(o[1]); @@ -5893,7 +5989,7 @@ api.GoogleCloudRetailV2UserEventInlineSource final o = api.GoogleCloudRetailV2UserEventInlineSource(); buildCounterGoogleCloudRetailV2UserEventInlineSource++; if (buildCounterGoogleCloudRetailV2UserEventInlineSource < 3) { - o.userEvents = buildUnnamed115(); + o.userEvents = buildUnnamed118(); } buildCounterGoogleCloudRetailV2UserEventInlineSource--; return o; @@ -5903,7 +5999,7 @@ void checkGoogleCloudRetailV2UserEventInlineSource( api.GoogleCloudRetailV2UserEventInlineSource o) { buildCounterGoogleCloudRetailV2UserEventInlineSource++; if (buildCounterGoogleCloudRetailV2UserEventInlineSource < 3) { - checkUnnamed115(o.userEvents!); + checkUnnamed118(o.userEvents!); } buildCounterGoogleCloudRetailV2UserEventInlineSource--; } @@ -5967,12 +6063,12 @@ void checkGoogleCloudRetailV2UserInfo(api.GoogleCloudRetailV2UserInfo o) { buildCounterGoogleCloudRetailV2UserInfo--; } -core.List buildUnnamed116() => [ +core.List buildUnnamed119() => [ buildGoogleLongrunningOperation(), buildGoogleLongrunningOperation(), ]; -void checkUnnamed116(core.List o) { +void checkUnnamed119(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleLongrunningOperation(o[0]); checkGoogleLongrunningOperation(o[1]); @@ -5985,7 +6081,7 @@ api.GoogleLongrunningListOperationsResponse buildCounterGoogleLongrunningListOperationsResponse++; if (buildCounterGoogleLongrunningListOperationsResponse < 3) { o.nextPageToken = 'foo'; - o.operations = buildUnnamed116(); + o.operations = buildUnnamed119(); } buildCounterGoogleLongrunningListOperationsResponse--; return o; @@ -5999,12 +6095,12 @@ void checkGoogleLongrunningListOperationsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed116(o.operations!); + checkUnnamed119(o.operations!); } buildCounterGoogleLongrunningListOperationsResponse--; } -core.Map buildUnnamed117() => { +core.Map buildUnnamed120() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -6017,7 +6113,7 @@ core.Map buildUnnamed117() => { }, }; -void checkUnnamed117(core.Map o) { +void checkUnnamed120(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted9 = (o['x']!) as core.Map; unittest.expect(casted9, unittest.hasLength(3)); @@ -6049,7 +6145,7 @@ void checkUnnamed117(core.Map o) { ); } -core.Map buildUnnamed118() => { +core.Map buildUnnamed121() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -6062,7 +6158,7 @@ core.Map buildUnnamed118() => { }, }; -void checkUnnamed118(core.Map o) { +void checkUnnamed121(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted11 = (o['x']!) as core.Map; unittest.expect(casted11, unittest.hasLength(3)); @@ -6101,9 +6197,9 @@ api.GoogleLongrunningOperation buildGoogleLongrunningOperation() { if (buildCounterGoogleLongrunningOperation < 3) { o.done = true; o.error = buildGoogleRpcStatus(); - o.metadata = buildUnnamed117(); + o.metadata = buildUnnamed120(); o.name = 'foo'; - o.response = buildUnnamed118(); + o.response = buildUnnamed121(); } buildCounterGoogleLongrunningOperation--; return o; @@ -6114,12 +6210,12 @@ void checkGoogleLongrunningOperation(api.GoogleLongrunningOperation o) { if (buildCounterGoogleLongrunningOperation < 3) { unittest.expect(o.done!, unittest.isTrue); checkGoogleRpcStatus(o.error!); - checkUnnamed117(o.metadata!); + checkUnnamed120(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed118(o.response!); + checkUnnamed121(o.response!); } buildCounterGoogleLongrunningOperation--; } @@ -6139,7 +6235,7 @@ void checkGoogleProtobufEmpty(api.GoogleProtobufEmpty o) { buildCounterGoogleProtobufEmpty--; } -core.Map buildUnnamed119() => { +core.Map buildUnnamed122() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -6152,7 +6248,7 @@ core.Map buildUnnamed119() => { }, }; -void checkUnnamed119(core.Map o) { +void checkUnnamed122(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted13 = (o['x']!) as core.Map; unittest.expect(casted13, unittest.hasLength(3)); @@ -6184,15 +6280,15 @@ void checkUnnamed119(core.Map o) { ); } -core.List> buildUnnamed120() => [ - buildUnnamed119(), - buildUnnamed119(), +core.List> buildUnnamed123() => [ + buildUnnamed122(), + buildUnnamed122(), ]; -void checkUnnamed120(core.List> o) { +void checkUnnamed123(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed119(o[0]); - checkUnnamed119(o[1]); + checkUnnamed122(o[0]); + checkUnnamed122(o[1]); } core.int buildCounterGoogleRpcStatus = 0; @@ -6201,7 +6297,7 @@ api.GoogleRpcStatus buildGoogleRpcStatus() { buildCounterGoogleRpcStatus++; if (buildCounterGoogleRpcStatus < 3) { o.code = 42; - o.details = buildUnnamed120(); + o.details = buildUnnamed123(); o.message = 'foo'; } buildCounterGoogleRpcStatus--; @@ -6215,7 +6311,7 @@ void checkGoogleRpcStatus(api.GoogleRpcStatus o) { o.code!, unittest.equals(42), ); - checkUnnamed120(o.details!); + checkUnnamed123(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -6256,12 +6352,12 @@ void checkGoogleTypeDate(api.GoogleTypeDate o) { buildCounterGoogleTypeDate--; } -core.List buildUnnamed121() => [ +core.List buildUnnamed124() => [ 'foo', 'foo', ]; -void checkUnnamed121(core.List o) { +void checkUnnamed124(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6916,6 +7012,27 @@ void main() { }); }); + unittest.group('obj-schema-GoogleCloudRetailV2PinControlMetadata', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudRetailV2PinControlMetadata(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudRetailV2PinControlMetadata.fromJson( + oJson as core.Map); + checkGoogleCloudRetailV2PinControlMetadata(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudRetailV2PinControlMetadataProductPins', + () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudRetailV2PinControlMetadataProductPins(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudRetailV2PinControlMetadataProductPins.fromJson( + oJson as core.Map); + checkGoogleCloudRetailV2PinControlMetadataProductPins(od); + }); + }); + unittest.group('obj-schema-GoogleCloudRetailV2PredictRequest', () { unittest.test('to-json--from-json', () async { final o = buildGoogleCloudRetailV2PredictRequest(); @@ -7715,7 +7832,7 @@ void main() { final arg_deviceType = 'foo'; final arg_enableAttributeSuggestions = true; final arg_entity = 'foo'; - final arg_languageCodes = buildUnnamed121(); + final arg_languageCodes = buildUnnamed124(); final arg_maxSuggestions = 42; final arg_query = 'foo'; final arg_visitorId = 'foo'; diff --git a/generated/googleapis/test/run/v2_test.dart b/generated/googleapis/test/run/v2_test.dart index a901b54d2..1642b79ad 100644 --- a/generated/googleapis/test/run/v2_test.dart +++ b/generated/googleapis/test/run/v2_test.dart @@ -84,6 +84,7 @@ api.GoogleCloudRunV2BuildpacksBuild buildGoogleCloudRunV2BuildpacksBuild() { o.enableAutomaticUpdates = true; o.environmentVariables = buildUnnamed0(); o.functionTarget = 'foo'; + o.projectDescriptor = 'foo'; o.runtime = 'foo'; } buildCounterGoogleCloudRunV2BuildpacksBuild--; @@ -108,6 +109,10 @@ void checkGoogleCloudRunV2BuildpacksBuild( o.functionTarget!, unittest.equals('foo'), ); + unittest.expect( + o.projectDescriptor!, + unittest.equals('foo'), + ); unittest.expect( o.runtime!, unittest.equals('foo'), @@ -927,12 +932,30 @@ void checkGoogleCloudRunV2ExportStatusResponse( buildCounterGoogleCloudRunV2ExportStatusResponse--; } +core.List buildUnnamed16() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed16(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + core.int buildCounterGoogleCloudRunV2GCSVolumeSource = 0; api.GoogleCloudRunV2GCSVolumeSource buildGoogleCloudRunV2GCSVolumeSource() { final o = api.GoogleCloudRunV2GCSVolumeSource(); buildCounterGoogleCloudRunV2GCSVolumeSource++; if (buildCounterGoogleCloudRunV2GCSVolumeSource < 3) { o.bucket = 'foo'; + o.mountOptions = buildUnnamed16(); o.readOnly = true; } buildCounterGoogleCloudRunV2GCSVolumeSource--; @@ -947,6 +970,7 @@ void checkGoogleCloudRunV2GCSVolumeSource( o.bucket!, unittest.equals('foo'), ); + checkUnnamed16(o.mountOptions!); unittest.expect(o.readOnly!, unittest.isTrue); } buildCounterGoogleCloudRunV2GCSVolumeSource--; @@ -979,12 +1003,12 @@ void checkGoogleCloudRunV2GRPCAction(api.GoogleCloudRunV2GRPCAction o) { buildCounterGoogleCloudRunV2GRPCAction--; } -core.List buildUnnamed16() => [ +core.List buildUnnamed17() => [ buildGoogleCloudRunV2HTTPHeader(), buildGoogleCloudRunV2HTTPHeader(), ]; -void checkUnnamed16(core.List o) { +void checkUnnamed17(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRunV2HTTPHeader(o[0]); checkGoogleCloudRunV2HTTPHeader(o[1]); @@ -995,7 +1019,7 @@ api.GoogleCloudRunV2HTTPGetAction buildGoogleCloudRunV2HTTPGetAction() { final o = api.GoogleCloudRunV2HTTPGetAction(); buildCounterGoogleCloudRunV2HTTPGetAction++; if (buildCounterGoogleCloudRunV2HTTPGetAction < 3) { - o.httpHeaders = buildUnnamed16(); + o.httpHeaders = buildUnnamed17(); o.path = 'foo'; o.port = 42; } @@ -1006,7 +1030,7 @@ api.GoogleCloudRunV2HTTPGetAction buildGoogleCloudRunV2HTTPGetAction() { void checkGoogleCloudRunV2HTTPGetAction(api.GoogleCloudRunV2HTTPGetAction o) { buildCounterGoogleCloudRunV2HTTPGetAction++; if (buildCounterGoogleCloudRunV2HTTPGetAction < 3) { - checkUnnamed16(o.httpHeaders!); + checkUnnamed17(o.httpHeaders!); unittest.expect( o.path!, unittest.equals('foo'), @@ -1081,12 +1105,12 @@ void checkGoogleCloudRunV2ImageExportStatus( buildCounterGoogleCloudRunV2ImageExportStatus--; } -core.Map buildUnnamed17() => { +core.Map buildUnnamed18() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed17(core.Map o) { +void checkUnnamed18(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1098,23 +1122,23 @@ void checkUnnamed17(core.Map o) { ); } -core.List buildUnnamed18() => [ +core.List buildUnnamed19() => [ buildGoogleCloudRunV2Condition(), buildGoogleCloudRunV2Condition(), ]; -void checkUnnamed18(core.List o) { +void checkUnnamed19(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRunV2Condition(o[0]); checkGoogleCloudRunV2Condition(o[1]); } -core.Map buildUnnamed19() => { +core.Map buildUnnamed20() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed19(core.Map o) { +void checkUnnamed20(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1131,11 +1155,11 @@ api.GoogleCloudRunV2Job buildGoogleCloudRunV2Job() { final o = api.GoogleCloudRunV2Job(); buildCounterGoogleCloudRunV2Job++; if (buildCounterGoogleCloudRunV2Job < 3) { - o.annotations = buildUnnamed17(); + o.annotations = buildUnnamed18(); o.binaryAuthorization = buildGoogleCloudRunV2BinaryAuthorization(); o.client = 'foo'; o.clientVersion = 'foo'; - o.conditions = buildUnnamed18(); + o.conditions = buildUnnamed19(); o.createTime = 'foo'; o.creator = 'foo'; o.deleteTime = 'foo'; @@ -1143,7 +1167,7 @@ api.GoogleCloudRunV2Job buildGoogleCloudRunV2Job() { o.executionCount = 42; o.expireTime = 'foo'; o.generation = 'foo'; - o.labels = buildUnnamed19(); + o.labels = buildUnnamed20(); o.lastModifier = 'foo'; o.latestCreatedExecution = buildGoogleCloudRunV2ExecutionReference(); o.launchStage = 'foo'; @@ -1165,7 +1189,7 @@ api.GoogleCloudRunV2Job buildGoogleCloudRunV2Job() { void checkGoogleCloudRunV2Job(api.GoogleCloudRunV2Job o) { buildCounterGoogleCloudRunV2Job++; if (buildCounterGoogleCloudRunV2Job < 3) { - checkUnnamed17(o.annotations!); + checkUnnamed18(o.annotations!); checkGoogleCloudRunV2BinaryAuthorization(o.binaryAuthorization!); unittest.expect( o.client!, @@ -1175,7 +1199,7 @@ void checkGoogleCloudRunV2Job(api.GoogleCloudRunV2Job o) { o.clientVersion!, unittest.equals('foo'), ); - checkUnnamed18(o.conditions!); + checkUnnamed19(o.conditions!); unittest.expect( o.createTime!, unittest.equals('foo'), @@ -1204,7 +1228,7 @@ void checkGoogleCloudRunV2Job(api.GoogleCloudRunV2Job o) { o.generation!, unittest.equals('foo'), ); - checkUnnamed19(o.labels!); + checkUnnamed20(o.labels!); unittest.expect( o.lastModifier!, unittest.equals('foo'), @@ -1246,12 +1270,12 @@ void checkGoogleCloudRunV2Job(api.GoogleCloudRunV2Job o) { buildCounterGoogleCloudRunV2Job--; } -core.List buildUnnamed20() => [ +core.List buildUnnamed21() => [ buildGoogleCloudRunV2Execution(), buildGoogleCloudRunV2Execution(), ]; -void checkUnnamed20(core.List o) { +void checkUnnamed21(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRunV2Execution(o[0]); checkGoogleCloudRunV2Execution(o[1]); @@ -1263,7 +1287,7 @@ api.GoogleCloudRunV2ListExecutionsResponse final o = api.GoogleCloudRunV2ListExecutionsResponse(); buildCounterGoogleCloudRunV2ListExecutionsResponse++; if (buildCounterGoogleCloudRunV2ListExecutionsResponse < 3) { - o.executions = buildUnnamed20(); + o.executions = buildUnnamed21(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudRunV2ListExecutionsResponse--; @@ -1274,7 +1298,7 @@ void checkGoogleCloudRunV2ListExecutionsResponse( api.GoogleCloudRunV2ListExecutionsResponse o) { buildCounterGoogleCloudRunV2ListExecutionsResponse++; if (buildCounterGoogleCloudRunV2ListExecutionsResponse < 3) { - checkUnnamed20(o.executions!); + checkUnnamed21(o.executions!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -1283,12 +1307,12 @@ void checkGoogleCloudRunV2ListExecutionsResponse( buildCounterGoogleCloudRunV2ListExecutionsResponse--; } -core.List buildUnnamed21() => [ +core.List buildUnnamed22() => [ buildGoogleCloudRunV2Job(), buildGoogleCloudRunV2Job(), ]; -void checkUnnamed21(core.List o) { +void checkUnnamed22(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRunV2Job(o[0]); checkGoogleCloudRunV2Job(o[1]); @@ -1299,7 +1323,7 @@ api.GoogleCloudRunV2ListJobsResponse buildGoogleCloudRunV2ListJobsResponse() { final o = api.GoogleCloudRunV2ListJobsResponse(); buildCounterGoogleCloudRunV2ListJobsResponse++; if (buildCounterGoogleCloudRunV2ListJobsResponse < 3) { - o.jobs = buildUnnamed21(); + o.jobs = buildUnnamed22(); o.nextPageToken = 'foo'; } buildCounterGoogleCloudRunV2ListJobsResponse--; @@ -1310,7 +1334,7 @@ void checkGoogleCloudRunV2ListJobsResponse( api.GoogleCloudRunV2ListJobsResponse o) { buildCounterGoogleCloudRunV2ListJobsResponse++; if (buildCounterGoogleCloudRunV2ListJobsResponse < 3) { - checkUnnamed21(o.jobs!); + checkUnnamed22(o.jobs!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -1319,12 +1343,12 @@ void checkGoogleCloudRunV2ListJobsResponse( buildCounterGoogleCloudRunV2ListJobsResponse--; } -core.List buildUnnamed22() => [ +core.List buildUnnamed23() => [ buildGoogleCloudRunV2Revision(), buildGoogleCloudRunV2Revision(), ]; -void checkUnnamed22(core.List o) { +void checkUnnamed23(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRunV2Revision(o[0]); checkGoogleCloudRunV2Revision(o[1]); @@ -1337,7 +1361,7 @@ api.GoogleCloudRunV2ListRevisionsResponse buildCounterGoogleCloudRunV2ListRevisionsResponse++; if (buildCounterGoogleCloudRunV2ListRevisionsResponse < 3) { o.nextPageToken = 'foo'; - o.revisions = buildUnnamed22(); + o.revisions = buildUnnamed23(); } buildCounterGoogleCloudRunV2ListRevisionsResponse--; return o; @@ -1351,17 +1375,17 @@ void checkGoogleCloudRunV2ListRevisionsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed22(o.revisions!); + checkUnnamed23(o.revisions!); } buildCounterGoogleCloudRunV2ListRevisionsResponse--; } -core.List buildUnnamed23() => [ +core.List buildUnnamed24() => [ buildGoogleCloudRunV2Service(), buildGoogleCloudRunV2Service(), ]; -void checkUnnamed23(core.List o) { +void checkUnnamed24(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRunV2Service(o[0]); checkGoogleCloudRunV2Service(o[1]); @@ -1374,7 +1398,7 @@ api.GoogleCloudRunV2ListServicesResponse buildCounterGoogleCloudRunV2ListServicesResponse++; if (buildCounterGoogleCloudRunV2ListServicesResponse < 3) { o.nextPageToken = 'foo'; - o.services = buildUnnamed23(); + o.services = buildUnnamed24(); } buildCounterGoogleCloudRunV2ListServicesResponse--; return o; @@ -1388,17 +1412,17 @@ void checkGoogleCloudRunV2ListServicesResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed23(o.services!); + checkUnnamed24(o.services!); } buildCounterGoogleCloudRunV2ListServicesResponse--; } -core.List buildUnnamed24() => [ +core.List buildUnnamed25() => [ buildGoogleCloudRunV2Task(), buildGoogleCloudRunV2Task(), ]; -void checkUnnamed24(core.List o) { +void checkUnnamed25(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRunV2Task(o[0]); checkGoogleCloudRunV2Task(o[1]); @@ -1410,7 +1434,7 @@ api.GoogleCloudRunV2ListTasksResponse buildGoogleCloudRunV2ListTasksResponse() { buildCounterGoogleCloudRunV2ListTasksResponse++; if (buildCounterGoogleCloudRunV2ListTasksResponse < 3) { o.nextPageToken = 'foo'; - o.tasks = buildUnnamed24(); + o.tasks = buildUnnamed25(); } buildCounterGoogleCloudRunV2ListTasksResponse--; return o; @@ -1424,7 +1448,7 @@ void checkGoogleCloudRunV2ListTasksResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed24(o.tasks!); + checkUnnamed25(o.tasks!); } buildCounterGoogleCloudRunV2ListTasksResponse--; } @@ -1481,12 +1505,12 @@ void checkGoogleCloudRunV2NFSVolumeSource( buildCounterGoogleCloudRunV2NFSVolumeSource--; } -core.List buildUnnamed25() => [ +core.List buildUnnamed26() => [ 'foo', 'foo', ]; -void checkUnnamed25(core.List o) { +void checkUnnamed26(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1505,7 +1529,7 @@ api.GoogleCloudRunV2NetworkInterface buildGoogleCloudRunV2NetworkInterface() { if (buildCounterGoogleCloudRunV2NetworkInterface < 3) { o.network = 'foo'; o.subnetwork = 'foo'; - o.tags = buildUnnamed25(); + o.tags = buildUnnamed26(); } buildCounterGoogleCloudRunV2NetworkInterface--; return o; @@ -1523,7 +1547,7 @@ void checkGoogleCloudRunV2NetworkInterface( o.subnetwork!, unittest.equals('foo'), ); - checkUnnamed25(o.tags!); + checkUnnamed26(o.tags!); } buildCounterGoogleCloudRunV2NetworkInterface--; } @@ -1550,12 +1574,12 @@ void checkGoogleCloudRunV2NodeSelector(api.GoogleCloudRunV2NodeSelector o) { buildCounterGoogleCloudRunV2NodeSelector--; } -core.List buildUnnamed26() => [ +core.List buildUnnamed27() => [ buildGoogleCloudRunV2ContainerOverride(), buildGoogleCloudRunV2ContainerOverride(), ]; -void checkUnnamed26(core.List o) { +void checkUnnamed27(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRunV2ContainerOverride(o[0]); checkGoogleCloudRunV2ContainerOverride(o[1]); @@ -1566,7 +1590,7 @@ api.GoogleCloudRunV2Overrides buildGoogleCloudRunV2Overrides() { final o = api.GoogleCloudRunV2Overrides(); buildCounterGoogleCloudRunV2Overrides++; if (buildCounterGoogleCloudRunV2Overrides < 3) { - o.containerOverrides = buildUnnamed26(); + o.containerOverrides = buildUnnamed27(); o.taskCount = 42; o.timeout = 'foo'; } @@ -1577,7 +1601,7 @@ api.GoogleCloudRunV2Overrides buildGoogleCloudRunV2Overrides() { void checkGoogleCloudRunV2Overrides(api.GoogleCloudRunV2Overrides o) { buildCounterGoogleCloudRunV2Overrides++; if (buildCounterGoogleCloudRunV2Overrides < 3) { - checkUnnamed26(o.containerOverrides!); + checkUnnamed27(o.containerOverrides!); unittest.expect( o.taskCount!, unittest.equals(42), @@ -1633,12 +1657,12 @@ void checkGoogleCloudRunV2Probe(api.GoogleCloudRunV2Probe o) { buildCounterGoogleCloudRunV2Probe--; } -core.Map buildUnnamed27() => { +core.Map buildUnnamed28() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed27(core.Map o) { +void checkUnnamed28(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1657,7 +1681,7 @@ api.GoogleCloudRunV2ResourceRequirements buildCounterGoogleCloudRunV2ResourceRequirements++; if (buildCounterGoogleCloudRunV2ResourceRequirements < 3) { o.cpuIdle = true; - o.limits = buildUnnamed27(); + o.limits = buildUnnamed28(); o.startupCpuBoost = true; } buildCounterGoogleCloudRunV2ResourceRequirements--; @@ -1669,18 +1693,18 @@ void checkGoogleCloudRunV2ResourceRequirements( buildCounterGoogleCloudRunV2ResourceRequirements++; if (buildCounterGoogleCloudRunV2ResourceRequirements < 3) { unittest.expect(o.cpuIdle!, unittest.isTrue); - checkUnnamed27(o.limits!); + checkUnnamed28(o.limits!); unittest.expect(o.startupCpuBoost!, unittest.isTrue); } buildCounterGoogleCloudRunV2ResourceRequirements--; } -core.Map buildUnnamed28() => { +core.Map buildUnnamed29() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed28(core.Map o) { +void checkUnnamed29(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1692,34 +1716,34 @@ void checkUnnamed28(core.Map o) { ); } -core.List buildUnnamed29() => [ +core.List buildUnnamed30() => [ buildGoogleCloudRunV2Condition(), buildGoogleCloudRunV2Condition(), ]; -void checkUnnamed29(core.List o) { +void checkUnnamed30(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRunV2Condition(o[0]); checkGoogleCloudRunV2Condition(o[1]); } -core.List buildUnnamed30() => [ +core.List buildUnnamed31() => [ buildGoogleCloudRunV2Container(), buildGoogleCloudRunV2Container(), ]; -void checkUnnamed30(core.List o) { +void checkUnnamed31(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRunV2Container(o[0]); checkGoogleCloudRunV2Container(o[1]); } -core.Map buildUnnamed31() => { +core.Map buildUnnamed32() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed31(core.Map o) { +void checkUnnamed32(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1731,12 +1755,12 @@ void checkUnnamed31(core.Map o) { ); } -core.List buildUnnamed32() => [ +core.List buildUnnamed33() => [ buildGoogleCloudRunV2Volume(), buildGoogleCloudRunV2Volume(), ]; -void checkUnnamed32(core.List o) { +void checkUnnamed33(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRunV2Volume(o[0]); checkGoogleCloudRunV2Volume(o[1]); @@ -1747,9 +1771,9 @@ api.GoogleCloudRunV2Revision buildGoogleCloudRunV2Revision() { final o = api.GoogleCloudRunV2Revision(); buildCounterGoogleCloudRunV2Revision++; if (buildCounterGoogleCloudRunV2Revision < 3) { - o.annotations = buildUnnamed28(); - o.conditions = buildUnnamed29(); - o.containers = buildUnnamed30(); + o.annotations = buildUnnamed29(); + o.conditions = buildUnnamed30(); + o.containers = buildUnnamed31(); o.createTime = 'foo'; o.deleteTime = 'foo'; o.encryptionKey = 'foo'; @@ -1759,7 +1783,7 @@ api.GoogleCloudRunV2Revision buildGoogleCloudRunV2Revision() { o.executionEnvironment = 'foo'; o.expireTime = 'foo'; o.generation = 'foo'; - o.labels = buildUnnamed31(); + o.labels = buildUnnamed32(); o.launchStage = 'foo'; o.logUri = 'foo'; o.maxInstanceRequestConcurrency = 42; @@ -1777,7 +1801,7 @@ api.GoogleCloudRunV2Revision buildGoogleCloudRunV2Revision() { o.timeout = 'foo'; o.uid = 'foo'; o.updateTime = 'foo'; - o.volumes = buildUnnamed32(); + o.volumes = buildUnnamed33(); o.vpcAccess = buildGoogleCloudRunV2VpcAccess(); } buildCounterGoogleCloudRunV2Revision--; @@ -1787,9 +1811,9 @@ api.GoogleCloudRunV2Revision buildGoogleCloudRunV2Revision() { void checkGoogleCloudRunV2Revision(api.GoogleCloudRunV2Revision o) { buildCounterGoogleCloudRunV2Revision++; if (buildCounterGoogleCloudRunV2Revision < 3) { - checkUnnamed28(o.annotations!); - checkUnnamed29(o.conditions!); - checkUnnamed30(o.containers!); + checkUnnamed29(o.annotations!); + checkUnnamed30(o.conditions!); + checkUnnamed31(o.containers!); unittest.expect( o.createTime!, unittest.equals('foo'), @@ -1826,7 +1850,7 @@ void checkGoogleCloudRunV2Revision(api.GoogleCloudRunV2Revision o) { o.generation!, unittest.equals('foo'), ); - checkUnnamed31(o.labels!); + checkUnnamed32(o.labels!); unittest.expect( o.launchStage!, unittest.equals('foo'), @@ -1874,7 +1898,7 @@ void checkGoogleCloudRunV2Revision(api.GoogleCloudRunV2Revision o) { o.updateTime!, unittest.equals('foo'), ); - checkUnnamed32(o.volumes!); + checkUnnamed33(o.volumes!); checkGoogleCloudRunV2VpcAccess(o.vpcAccess!); } buildCounterGoogleCloudRunV2Revision--; @@ -1932,12 +1956,12 @@ void checkGoogleCloudRunV2RevisionScalingStatus( buildCounterGoogleCloudRunV2RevisionScalingStatus--; } -core.Map buildUnnamed33() => { +core.Map buildUnnamed34() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed33(core.Map o) { +void checkUnnamed34(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1949,23 +1973,23 @@ void checkUnnamed33(core.Map o) { ); } -core.List buildUnnamed34() => [ +core.List buildUnnamed35() => [ buildGoogleCloudRunV2Container(), buildGoogleCloudRunV2Container(), ]; -void checkUnnamed34(core.List o) { +void checkUnnamed35(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRunV2Container(o[0]); checkGoogleCloudRunV2Container(o[1]); } -core.Map buildUnnamed35() => { +core.Map buildUnnamed36() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed35(core.Map o) { +void checkUnnamed36(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1977,12 +2001,12 @@ void checkUnnamed35(core.Map o) { ); } -core.List buildUnnamed36() => [ +core.List buildUnnamed37() => [ buildGoogleCloudRunV2Volume(), buildGoogleCloudRunV2Volume(), ]; -void checkUnnamed36(core.List o) { +void checkUnnamed37(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRunV2Volume(o[0]); checkGoogleCloudRunV2Volume(o[1]); @@ -1993,12 +2017,14 @@ api.GoogleCloudRunV2RevisionTemplate buildGoogleCloudRunV2RevisionTemplate() { final o = api.GoogleCloudRunV2RevisionTemplate(); buildCounterGoogleCloudRunV2RevisionTemplate++; if (buildCounterGoogleCloudRunV2RevisionTemplate < 3) { - o.annotations = buildUnnamed33(); - o.containers = buildUnnamed34(); + o.annotations = buildUnnamed34(); + o.containers = buildUnnamed35(); o.encryptionKey = 'foo'; + o.encryptionKeyRevocationAction = 'foo'; + o.encryptionKeyShutdownDuration = 'foo'; o.executionEnvironment = 'foo'; o.healthCheckDisabled = true; - o.labels = buildUnnamed35(); + o.labels = buildUnnamed36(); o.maxInstanceRequestConcurrency = 42; o.nodeSelector = buildGoogleCloudRunV2NodeSelector(); o.revision = 'foo'; @@ -2007,7 +2033,7 @@ api.GoogleCloudRunV2RevisionTemplate buildGoogleCloudRunV2RevisionTemplate() { o.serviceMesh = buildGoogleCloudRunV2ServiceMesh(); o.sessionAffinity = true; o.timeout = 'foo'; - o.volumes = buildUnnamed36(); + o.volumes = buildUnnamed37(); o.vpcAccess = buildGoogleCloudRunV2VpcAccess(); } buildCounterGoogleCloudRunV2RevisionTemplate--; @@ -2018,18 +2044,26 @@ void checkGoogleCloudRunV2RevisionTemplate( api.GoogleCloudRunV2RevisionTemplate o) { buildCounterGoogleCloudRunV2RevisionTemplate++; if (buildCounterGoogleCloudRunV2RevisionTemplate < 3) { - checkUnnamed33(o.annotations!); - checkUnnamed34(o.containers!); + checkUnnamed34(o.annotations!); + checkUnnamed35(o.containers!); unittest.expect( o.encryptionKey!, unittest.equals('foo'), ); + unittest.expect( + o.encryptionKeyRevocationAction!, + unittest.equals('foo'), + ); + unittest.expect( + o.encryptionKeyShutdownDuration!, + unittest.equals('foo'), + ); unittest.expect( o.executionEnvironment!, unittest.equals('foo'), ); unittest.expect(o.healthCheckDisabled!, unittest.isTrue); - checkUnnamed35(o.labels!); + checkUnnamed36(o.labels!); unittest.expect( o.maxInstanceRequestConcurrency!, unittest.equals(42), @@ -2050,7 +2084,7 @@ void checkGoogleCloudRunV2RevisionTemplate( o.timeout!, unittest.equals('foo'), ); - checkUnnamed36(o.volumes!); + checkUnnamed37(o.volumes!); checkGoogleCloudRunV2VpcAccess(o.vpcAccess!); } buildCounterGoogleCloudRunV2RevisionTemplate--; @@ -2110,12 +2144,12 @@ void checkGoogleCloudRunV2SecretKeySelector( buildCounterGoogleCloudRunV2SecretKeySelector--; } -core.List buildUnnamed37() => [ +core.List buildUnnamed38() => [ buildGoogleCloudRunV2VersionToPath(), buildGoogleCloudRunV2VersionToPath(), ]; -void checkUnnamed37(core.List o) { +void checkUnnamed38(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRunV2VersionToPath(o[0]); checkGoogleCloudRunV2VersionToPath(o[1]); @@ -2128,7 +2162,7 @@ api.GoogleCloudRunV2SecretVolumeSource buildCounterGoogleCloudRunV2SecretVolumeSource++; if (buildCounterGoogleCloudRunV2SecretVolumeSource < 3) { o.defaultMode = 42; - o.items = buildUnnamed37(); + o.items = buildUnnamed38(); o.secret = 'foo'; } buildCounterGoogleCloudRunV2SecretVolumeSource--; @@ -2143,7 +2177,7 @@ void checkGoogleCloudRunV2SecretVolumeSource( o.defaultMode!, unittest.equals(42), ); - checkUnnamed37(o.items!); + checkUnnamed38(o.items!); unittest.expect( o.secret!, unittest.equals('foo'), @@ -2152,12 +2186,12 @@ void checkGoogleCloudRunV2SecretVolumeSource( buildCounterGoogleCloudRunV2SecretVolumeSource--; } -core.Map buildUnnamed38() => { +core.Map buildUnnamed39() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed38(core.Map o) { +void checkUnnamed39(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2169,23 +2203,23 @@ void checkUnnamed38(core.Map o) { ); } -core.List buildUnnamed39() => [ +core.List buildUnnamed40() => [ buildGoogleCloudRunV2Condition(), buildGoogleCloudRunV2Condition(), ]; -void checkUnnamed39(core.List o) { +void checkUnnamed40(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRunV2Condition(o[0]); checkGoogleCloudRunV2Condition(o[1]); } -core.List buildUnnamed40() => [ +core.List buildUnnamed41() => [ 'foo', 'foo', ]; -void checkUnnamed40(core.List o) { +void checkUnnamed41(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2197,12 +2231,12 @@ void checkUnnamed40(core.List o) { ); } -core.Map buildUnnamed41() => { +core.Map buildUnnamed42() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed41(core.Map o) { +void checkUnnamed42(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2214,34 +2248,34 @@ void checkUnnamed41(core.Map o) { ); } -core.List buildUnnamed42() => [ +core.List buildUnnamed43() => [ buildGoogleCloudRunV2TrafficTarget(), buildGoogleCloudRunV2TrafficTarget(), ]; -void checkUnnamed42(core.List o) { +void checkUnnamed43(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRunV2TrafficTarget(o[0]); checkGoogleCloudRunV2TrafficTarget(o[1]); } -core.List buildUnnamed43() => [ +core.List buildUnnamed44() => [ buildGoogleCloudRunV2TrafficTargetStatus(), buildGoogleCloudRunV2TrafficTargetStatus(), ]; -void checkUnnamed43(core.List o) { +void checkUnnamed44(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRunV2TrafficTargetStatus(o[0]); checkGoogleCloudRunV2TrafficTargetStatus(o[1]); } -core.List buildUnnamed44() => [ +core.List buildUnnamed45() => [ 'foo', 'foo', ]; -void checkUnnamed44(core.List o) { +void checkUnnamed45(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2258,14 +2292,14 @@ api.GoogleCloudRunV2Service buildGoogleCloudRunV2Service() { final o = api.GoogleCloudRunV2Service(); buildCounterGoogleCloudRunV2Service++; if (buildCounterGoogleCloudRunV2Service < 3) { - o.annotations = buildUnnamed38(); + o.annotations = buildUnnamed39(); o.binaryAuthorization = buildGoogleCloudRunV2BinaryAuthorization(); o.client = 'foo'; o.clientVersion = 'foo'; - o.conditions = buildUnnamed39(); + o.conditions = buildUnnamed40(); o.createTime = 'foo'; o.creator = 'foo'; - o.customAudiences = buildUnnamed40(); + o.customAudiences = buildUnnamed41(); o.defaultUriDisabled = true; o.deleteTime = 'foo'; o.description = 'foo'; @@ -2274,7 +2308,7 @@ api.GoogleCloudRunV2Service buildGoogleCloudRunV2Service() { o.generation = 'foo'; o.ingress = 'foo'; o.invokerIamDisabled = true; - o.labels = buildUnnamed41(); + o.labels = buildUnnamed42(); o.lastModifier = 'foo'; o.latestCreatedRevision = 'foo'; o.latestReadyRevision = 'foo'; @@ -2286,12 +2320,12 @@ api.GoogleCloudRunV2Service buildGoogleCloudRunV2Service() { o.scaling = buildGoogleCloudRunV2ServiceScaling(); o.template = buildGoogleCloudRunV2RevisionTemplate(); o.terminalCondition = buildGoogleCloudRunV2Condition(); - o.traffic = buildUnnamed42(); - o.trafficStatuses = buildUnnamed43(); + o.traffic = buildUnnamed43(); + o.trafficStatuses = buildUnnamed44(); o.uid = 'foo'; o.updateTime = 'foo'; o.uri = 'foo'; - o.urls = buildUnnamed44(); + o.urls = buildUnnamed45(); } buildCounterGoogleCloudRunV2Service--; return o; @@ -2300,7 +2334,7 @@ api.GoogleCloudRunV2Service buildGoogleCloudRunV2Service() { void checkGoogleCloudRunV2Service(api.GoogleCloudRunV2Service o) { buildCounterGoogleCloudRunV2Service++; if (buildCounterGoogleCloudRunV2Service < 3) { - checkUnnamed38(o.annotations!); + checkUnnamed39(o.annotations!); checkGoogleCloudRunV2BinaryAuthorization(o.binaryAuthorization!); unittest.expect( o.client!, @@ -2310,7 +2344,7 @@ void checkGoogleCloudRunV2Service(api.GoogleCloudRunV2Service o) { o.clientVersion!, unittest.equals('foo'), ); - checkUnnamed39(o.conditions!); + checkUnnamed40(o.conditions!); unittest.expect( o.createTime!, unittest.equals('foo'), @@ -2319,7 +2353,7 @@ void checkGoogleCloudRunV2Service(api.GoogleCloudRunV2Service o) { o.creator!, unittest.equals('foo'), ); - checkUnnamed40(o.customAudiences!); + checkUnnamed41(o.customAudiences!); unittest.expect(o.defaultUriDisabled!, unittest.isTrue); unittest.expect( o.deleteTime!, @@ -2346,7 +2380,7 @@ void checkGoogleCloudRunV2Service(api.GoogleCloudRunV2Service o) { unittest.equals('foo'), ); unittest.expect(o.invokerIamDisabled!, unittest.isTrue); - checkUnnamed41(o.labels!); + checkUnnamed42(o.labels!); unittest.expect( o.lastModifier!, unittest.equals('foo'), @@ -2376,8 +2410,8 @@ void checkGoogleCloudRunV2Service(api.GoogleCloudRunV2Service o) { checkGoogleCloudRunV2ServiceScaling(o.scaling!); checkGoogleCloudRunV2RevisionTemplate(o.template!); checkGoogleCloudRunV2Condition(o.terminalCondition!); - checkUnnamed42(o.traffic!); - checkUnnamed43(o.trafficStatuses!); + checkUnnamed43(o.traffic!); + checkUnnamed44(o.trafficStatuses!); unittest.expect( o.uid!, unittest.equals('foo'), @@ -2390,7 +2424,7 @@ void checkGoogleCloudRunV2Service(api.GoogleCloudRunV2Service o) { o.uri!, unittest.equals('foo'), ); - checkUnnamed44(o.urls!); + checkUnnamed45(o.urls!); } buildCounterGoogleCloudRunV2Service--; } @@ -2422,6 +2456,7 @@ api.GoogleCloudRunV2ServiceScaling buildGoogleCloudRunV2ServiceScaling() { final o = api.GoogleCloudRunV2ServiceScaling(); buildCounterGoogleCloudRunV2ServiceScaling++; if (buildCounterGoogleCloudRunV2ServiceScaling < 3) { + o.manualInstanceCount = 42; o.minInstanceCount = 42; o.scalingMode = 'foo'; } @@ -2432,6 +2467,10 @@ api.GoogleCloudRunV2ServiceScaling buildGoogleCloudRunV2ServiceScaling() { void checkGoogleCloudRunV2ServiceScaling(api.GoogleCloudRunV2ServiceScaling o) { buildCounterGoogleCloudRunV2ServiceScaling++; if (buildCounterGoogleCloudRunV2ServiceScaling < 3) { + unittest.expect( + o.manualInstanceCount!, + unittest.equals(42), + ); unittest.expect( o.minInstanceCount!, unittest.equals(42), @@ -2476,12 +2515,12 @@ void checkGoogleCloudRunV2StorageSource(api.GoogleCloudRunV2StorageSource o) { buildCounterGoogleCloudRunV2StorageSource--; } -core.List buildUnnamed45() => [ +core.List buildUnnamed46() => [ 'foo', 'foo', ]; -void checkUnnamed45(core.List o) { +void checkUnnamed46(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2504,7 +2543,7 @@ api.GoogleCloudRunV2SubmitBuildRequest o.imageUri = 'foo'; o.serviceAccount = 'foo'; o.storageSource = buildGoogleCloudRunV2StorageSource(); - o.tags = buildUnnamed45(); + o.tags = buildUnnamed46(); o.workerPool = 'foo'; } buildCounterGoogleCloudRunV2SubmitBuildRequest--; @@ -2526,7 +2565,7 @@ void checkGoogleCloudRunV2SubmitBuildRequest( unittest.equals('foo'), ); checkGoogleCloudRunV2StorageSource(o.storageSource!); - checkUnnamed45(o.tags!); + checkUnnamed46(o.tags!); unittest.expect( o.workerPool!, unittest.equals('foo'), @@ -2589,12 +2628,12 @@ void checkGoogleCloudRunV2TCPSocketAction( buildCounterGoogleCloudRunV2TCPSocketAction--; } -core.Map buildUnnamed46() => { +core.Map buildUnnamed47() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed46(core.Map o) { +void checkUnnamed47(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2606,34 +2645,34 @@ void checkUnnamed46(core.Map o) { ); } -core.List buildUnnamed47() => [ +core.List buildUnnamed48() => [ buildGoogleCloudRunV2Condition(), buildGoogleCloudRunV2Condition(), ]; -void checkUnnamed47(core.List o) { +void checkUnnamed48(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRunV2Condition(o[0]); checkGoogleCloudRunV2Condition(o[1]); } -core.List buildUnnamed48() => [ +core.List buildUnnamed49() => [ buildGoogleCloudRunV2Container(), buildGoogleCloudRunV2Container(), ]; -void checkUnnamed48(core.List o) { +void checkUnnamed49(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRunV2Container(o[0]); checkGoogleCloudRunV2Container(o[1]); } -core.Map buildUnnamed49() => { +core.Map buildUnnamed50() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed49(core.Map o) { +void checkUnnamed50(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2645,12 +2684,12 @@ void checkUnnamed49(core.Map o) { ); } -core.List buildUnnamed50() => [ +core.List buildUnnamed51() => [ buildGoogleCloudRunV2Volume(), buildGoogleCloudRunV2Volume(), ]; -void checkUnnamed50(core.List o) { +void checkUnnamed51(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRunV2Volume(o[0]); checkGoogleCloudRunV2Volume(o[1]); @@ -2661,10 +2700,10 @@ api.GoogleCloudRunV2Task buildGoogleCloudRunV2Task() { final o = api.GoogleCloudRunV2Task(); buildCounterGoogleCloudRunV2Task++; if (buildCounterGoogleCloudRunV2Task < 3) { - o.annotations = buildUnnamed46(); + o.annotations = buildUnnamed47(); o.completionTime = 'foo'; - o.conditions = buildUnnamed47(); - o.containers = buildUnnamed48(); + o.conditions = buildUnnamed48(); + o.containers = buildUnnamed49(); o.createTime = 'foo'; o.deleteTime = 'foo'; o.encryptionKey = 'foo'; @@ -2675,7 +2714,7 @@ api.GoogleCloudRunV2Task buildGoogleCloudRunV2Task() { o.generation = 'foo'; o.index = 42; o.job = 'foo'; - o.labels = buildUnnamed49(); + o.labels = buildUnnamed50(); o.lastAttemptResult = buildGoogleCloudRunV2TaskAttemptResult(); o.logUri = 'foo'; o.maxRetries = 42; @@ -2690,7 +2729,7 @@ api.GoogleCloudRunV2Task buildGoogleCloudRunV2Task() { o.timeout = 'foo'; o.uid = 'foo'; o.updateTime = 'foo'; - o.volumes = buildUnnamed50(); + o.volumes = buildUnnamed51(); o.vpcAccess = buildGoogleCloudRunV2VpcAccess(); } buildCounterGoogleCloudRunV2Task--; @@ -2700,13 +2739,13 @@ api.GoogleCloudRunV2Task buildGoogleCloudRunV2Task() { void checkGoogleCloudRunV2Task(api.GoogleCloudRunV2Task o) { buildCounterGoogleCloudRunV2Task++; if (buildCounterGoogleCloudRunV2Task < 3) { - checkUnnamed46(o.annotations!); + checkUnnamed47(o.annotations!); unittest.expect( o.completionTime!, unittest.equals('foo'), ); - checkUnnamed47(o.conditions!); - checkUnnamed48(o.containers!); + checkUnnamed48(o.conditions!); + checkUnnamed49(o.containers!); unittest.expect( o.createTime!, unittest.equals('foo'), @@ -2747,7 +2786,7 @@ void checkGoogleCloudRunV2Task(api.GoogleCloudRunV2Task o) { o.job!, unittest.equals('foo'), ); - checkUnnamed49(o.labels!); + checkUnnamed50(o.labels!); checkGoogleCloudRunV2TaskAttemptResult(o.lastAttemptResult!); unittest.expect( o.logUri!, @@ -2795,7 +2834,7 @@ void checkGoogleCloudRunV2Task(api.GoogleCloudRunV2Task o) { o.updateTime!, unittest.equals('foo'), ); - checkUnnamed50(o.volumes!); + checkUnnamed51(o.volumes!); checkGoogleCloudRunV2VpcAccess(o.vpcAccess!); } buildCounterGoogleCloudRunV2Task--; @@ -2826,23 +2865,23 @@ void checkGoogleCloudRunV2TaskAttemptResult( buildCounterGoogleCloudRunV2TaskAttemptResult--; } -core.List buildUnnamed51() => [ +core.List buildUnnamed52() => [ buildGoogleCloudRunV2Container(), buildGoogleCloudRunV2Container(), ]; -void checkUnnamed51(core.List o) { +void checkUnnamed52(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRunV2Container(o[0]); checkGoogleCloudRunV2Container(o[1]); } -core.List buildUnnamed52() => [ +core.List buildUnnamed53() => [ buildGoogleCloudRunV2Volume(), buildGoogleCloudRunV2Volume(), ]; -void checkUnnamed52(core.List o) { +void checkUnnamed53(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRunV2Volume(o[0]); checkGoogleCloudRunV2Volume(o[1]); @@ -2853,13 +2892,13 @@ api.GoogleCloudRunV2TaskTemplate buildGoogleCloudRunV2TaskTemplate() { final o = api.GoogleCloudRunV2TaskTemplate(); buildCounterGoogleCloudRunV2TaskTemplate++; if (buildCounterGoogleCloudRunV2TaskTemplate < 3) { - o.containers = buildUnnamed51(); + o.containers = buildUnnamed52(); o.encryptionKey = 'foo'; o.executionEnvironment = 'foo'; o.maxRetries = 42; o.serviceAccount = 'foo'; o.timeout = 'foo'; - o.volumes = buildUnnamed52(); + o.volumes = buildUnnamed53(); o.vpcAccess = buildGoogleCloudRunV2VpcAccess(); } buildCounterGoogleCloudRunV2TaskTemplate--; @@ -2869,7 +2908,7 @@ api.GoogleCloudRunV2TaskTemplate buildGoogleCloudRunV2TaskTemplate() { void checkGoogleCloudRunV2TaskTemplate(api.GoogleCloudRunV2TaskTemplate o) { buildCounterGoogleCloudRunV2TaskTemplate++; if (buildCounterGoogleCloudRunV2TaskTemplate < 3) { - checkUnnamed51(o.containers!); + checkUnnamed52(o.containers!); unittest.expect( o.encryptionKey!, unittest.equals('foo'), @@ -2890,7 +2929,7 @@ void checkGoogleCloudRunV2TaskTemplate(api.GoogleCloudRunV2TaskTemplate o) { o.timeout!, unittest.equals('foo'), ); - checkUnnamed52(o.volumes!); + checkUnnamed53(o.volumes!); checkGoogleCloudRunV2VpcAccess(o.vpcAccess!); } buildCounterGoogleCloudRunV2TaskTemplate--; @@ -3068,12 +3107,12 @@ void checkGoogleCloudRunV2VolumeMount(api.GoogleCloudRunV2VolumeMount o) { buildCounterGoogleCloudRunV2VolumeMount--; } -core.List buildUnnamed53() => [ +core.List buildUnnamed54() => [ buildGoogleCloudRunV2NetworkInterface(), buildGoogleCloudRunV2NetworkInterface(), ]; -void checkUnnamed53(core.List o) { +void checkUnnamed54(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudRunV2NetworkInterface(o[0]); checkGoogleCloudRunV2NetworkInterface(o[1]); @@ -3086,7 +3125,7 @@ api.GoogleCloudRunV2VpcAccess buildGoogleCloudRunV2VpcAccess() { if (buildCounterGoogleCloudRunV2VpcAccess < 3) { o.connector = 'foo'; o.egress = 'foo'; - o.networkInterfaces = buildUnnamed53(); + o.networkInterfaces = buildUnnamed54(); } buildCounterGoogleCloudRunV2VpcAccess--; return o; @@ -3103,17 +3142,17 @@ void checkGoogleCloudRunV2VpcAccess(api.GoogleCloudRunV2VpcAccess o) { o.egress!, unittest.equals('foo'), ); - checkUnnamed53(o.networkInterfaces!); + checkUnnamed54(o.networkInterfaces!); } buildCounterGoogleCloudRunV2VpcAccess--; } -core.List buildUnnamed54() => [ +core.List buildUnnamed55() => [ buildGoogleIamV1AuditLogConfig(), buildGoogleIamV1AuditLogConfig(), ]; -void checkUnnamed54(core.List o) { +void checkUnnamed55(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleIamV1AuditLogConfig(o[0]); checkGoogleIamV1AuditLogConfig(o[1]); @@ -3124,7 +3163,7 @@ api.GoogleIamV1AuditConfig buildGoogleIamV1AuditConfig() { final o = api.GoogleIamV1AuditConfig(); buildCounterGoogleIamV1AuditConfig++; if (buildCounterGoogleIamV1AuditConfig < 3) { - o.auditLogConfigs = buildUnnamed54(); + o.auditLogConfigs = buildUnnamed55(); o.service = 'foo'; } buildCounterGoogleIamV1AuditConfig--; @@ -3134,7 +3173,7 @@ api.GoogleIamV1AuditConfig buildGoogleIamV1AuditConfig() { void checkGoogleIamV1AuditConfig(api.GoogleIamV1AuditConfig o) { buildCounterGoogleIamV1AuditConfig++; if (buildCounterGoogleIamV1AuditConfig < 3) { - checkUnnamed54(o.auditLogConfigs!); + checkUnnamed55(o.auditLogConfigs!); unittest.expect( o.service!, unittest.equals('foo'), @@ -3143,12 +3182,12 @@ void checkGoogleIamV1AuditConfig(api.GoogleIamV1AuditConfig o) { buildCounterGoogleIamV1AuditConfig--; } -core.List buildUnnamed55() => [ +core.List buildUnnamed56() => [ 'foo', 'foo', ]; -void checkUnnamed55(core.List o) { +void checkUnnamed56(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3165,7 +3204,7 @@ api.GoogleIamV1AuditLogConfig buildGoogleIamV1AuditLogConfig() { final o = api.GoogleIamV1AuditLogConfig(); buildCounterGoogleIamV1AuditLogConfig++; if (buildCounterGoogleIamV1AuditLogConfig < 3) { - o.exemptedMembers = buildUnnamed55(); + o.exemptedMembers = buildUnnamed56(); o.logType = 'foo'; } buildCounterGoogleIamV1AuditLogConfig--; @@ -3175,7 +3214,7 @@ api.GoogleIamV1AuditLogConfig buildGoogleIamV1AuditLogConfig() { void checkGoogleIamV1AuditLogConfig(api.GoogleIamV1AuditLogConfig o) { buildCounterGoogleIamV1AuditLogConfig++; if (buildCounterGoogleIamV1AuditLogConfig < 3) { - checkUnnamed55(o.exemptedMembers!); + checkUnnamed56(o.exemptedMembers!); unittest.expect( o.logType!, unittest.equals('foo'), @@ -3184,12 +3223,12 @@ void checkGoogleIamV1AuditLogConfig(api.GoogleIamV1AuditLogConfig o) { buildCounterGoogleIamV1AuditLogConfig--; } -core.List buildUnnamed56() => [ +core.List buildUnnamed57() => [ 'foo', 'foo', ]; -void checkUnnamed56(core.List o) { +void checkUnnamed57(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3207,7 +3246,7 @@ api.GoogleIamV1Binding buildGoogleIamV1Binding() { buildCounterGoogleIamV1Binding++; if (buildCounterGoogleIamV1Binding < 3) { o.condition = buildGoogleTypeExpr(); - o.members = buildUnnamed56(); + o.members = buildUnnamed57(); o.role = 'foo'; } buildCounterGoogleIamV1Binding--; @@ -3218,7 +3257,7 @@ void checkGoogleIamV1Binding(api.GoogleIamV1Binding o) { buildCounterGoogleIamV1Binding++; if (buildCounterGoogleIamV1Binding < 3) { checkGoogleTypeExpr(o.condition!); - checkUnnamed56(o.members!); + checkUnnamed57(o.members!); unittest.expect( o.role!, unittest.equals('foo'), @@ -3227,23 +3266,23 @@ void checkGoogleIamV1Binding(api.GoogleIamV1Binding o) { buildCounterGoogleIamV1Binding--; } -core.List buildUnnamed57() => [ +core.List buildUnnamed58() => [ buildGoogleIamV1AuditConfig(), buildGoogleIamV1AuditConfig(), ]; -void checkUnnamed57(core.List o) { +void checkUnnamed58(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleIamV1AuditConfig(o[0]); checkGoogleIamV1AuditConfig(o[1]); } -core.List buildUnnamed58() => [ +core.List buildUnnamed59() => [ buildGoogleIamV1Binding(), buildGoogleIamV1Binding(), ]; -void checkUnnamed58(core.List o) { +void checkUnnamed59(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleIamV1Binding(o[0]); checkGoogleIamV1Binding(o[1]); @@ -3254,8 +3293,8 @@ api.GoogleIamV1Policy buildGoogleIamV1Policy() { final o = api.GoogleIamV1Policy(); buildCounterGoogleIamV1Policy++; if (buildCounterGoogleIamV1Policy < 3) { - o.auditConfigs = buildUnnamed57(); - o.bindings = buildUnnamed58(); + o.auditConfigs = buildUnnamed58(); + o.bindings = buildUnnamed59(); o.etag = 'foo'; o.version = 42; } @@ -3266,8 +3305,8 @@ api.GoogleIamV1Policy buildGoogleIamV1Policy() { void checkGoogleIamV1Policy(api.GoogleIamV1Policy o) { buildCounterGoogleIamV1Policy++; if (buildCounterGoogleIamV1Policy < 3) { - checkUnnamed57(o.auditConfigs!); - checkUnnamed58(o.bindings!); + checkUnnamed58(o.auditConfigs!); + checkUnnamed59(o.bindings!); unittest.expect( o.etag!, unittest.equals('foo'), @@ -3304,12 +3343,12 @@ void checkGoogleIamV1SetIamPolicyRequest(api.GoogleIamV1SetIamPolicyRequest o) { buildCounterGoogleIamV1SetIamPolicyRequest--; } -core.List buildUnnamed59() => [ +core.List buildUnnamed60() => [ 'foo', 'foo', ]; -void checkUnnamed59(core.List o) { +void checkUnnamed60(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3327,7 +3366,7 @@ api.GoogleIamV1TestIamPermissionsRequest final o = api.GoogleIamV1TestIamPermissionsRequest(); buildCounterGoogleIamV1TestIamPermissionsRequest++; if (buildCounterGoogleIamV1TestIamPermissionsRequest < 3) { - o.permissions = buildUnnamed59(); + o.permissions = buildUnnamed60(); } buildCounterGoogleIamV1TestIamPermissionsRequest--; return o; @@ -3337,17 +3376,17 @@ void checkGoogleIamV1TestIamPermissionsRequest( api.GoogleIamV1TestIamPermissionsRequest o) { buildCounterGoogleIamV1TestIamPermissionsRequest++; if (buildCounterGoogleIamV1TestIamPermissionsRequest < 3) { - checkUnnamed59(o.permissions!); + checkUnnamed60(o.permissions!); } buildCounterGoogleIamV1TestIamPermissionsRequest--; } -core.List buildUnnamed60() => [ +core.List buildUnnamed61() => [ 'foo', 'foo', ]; -void checkUnnamed60(core.List o) { +void checkUnnamed61(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3365,7 +3404,7 @@ api.GoogleIamV1TestIamPermissionsResponse final o = api.GoogleIamV1TestIamPermissionsResponse(); buildCounterGoogleIamV1TestIamPermissionsResponse++; if (buildCounterGoogleIamV1TestIamPermissionsResponse < 3) { - o.permissions = buildUnnamed60(); + o.permissions = buildUnnamed61(); } buildCounterGoogleIamV1TestIamPermissionsResponse--; return o; @@ -3375,17 +3414,17 @@ void checkGoogleIamV1TestIamPermissionsResponse( api.GoogleIamV1TestIamPermissionsResponse o) { buildCounterGoogleIamV1TestIamPermissionsResponse++; if (buildCounterGoogleIamV1TestIamPermissionsResponse < 3) { - checkUnnamed60(o.permissions!); + checkUnnamed61(o.permissions!); } buildCounterGoogleIamV1TestIamPermissionsResponse--; } -core.List buildUnnamed61() => [ +core.List buildUnnamed62() => [ buildGoogleLongrunningOperation(), buildGoogleLongrunningOperation(), ]; -void checkUnnamed61(core.List o) { +void checkUnnamed62(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleLongrunningOperation(o[0]); checkGoogleLongrunningOperation(o[1]); @@ -3398,7 +3437,7 @@ api.GoogleLongrunningListOperationsResponse buildCounterGoogleLongrunningListOperationsResponse++; if (buildCounterGoogleLongrunningListOperationsResponse < 3) { o.nextPageToken = 'foo'; - o.operations = buildUnnamed61(); + o.operations = buildUnnamed62(); } buildCounterGoogleLongrunningListOperationsResponse--; return o; @@ -3412,12 +3451,12 @@ void checkGoogleLongrunningListOperationsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed61(o.operations!); + checkUnnamed62(o.operations!); } buildCounterGoogleLongrunningListOperationsResponse--; } -core.Map buildUnnamed62() => { +core.Map buildUnnamed63() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3430,7 +3469,7 @@ core.Map buildUnnamed62() => { }, }; -void checkUnnamed62(core.Map o) { +void checkUnnamed63(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted1 = (o['x']!) as core.Map; unittest.expect(casted1, unittest.hasLength(3)); @@ -3462,7 +3501,7 @@ void checkUnnamed62(core.Map o) { ); } -core.Map buildUnnamed63() => { +core.Map buildUnnamed64() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3475,7 +3514,7 @@ core.Map buildUnnamed63() => { }, }; -void checkUnnamed63(core.Map o) { +void checkUnnamed64(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted3 = (o['x']!) as core.Map; unittest.expect(casted3, unittest.hasLength(3)); @@ -3514,9 +3553,9 @@ api.GoogleLongrunningOperation buildGoogleLongrunningOperation() { if (buildCounterGoogleLongrunningOperation < 3) { o.done = true; o.error = buildGoogleRpcStatus(); - o.metadata = buildUnnamed62(); + o.metadata = buildUnnamed63(); o.name = 'foo'; - o.response = buildUnnamed63(); + o.response = buildUnnamed64(); } buildCounterGoogleLongrunningOperation--; return o; @@ -3527,12 +3566,12 @@ void checkGoogleLongrunningOperation(api.GoogleLongrunningOperation o) { if (buildCounterGoogleLongrunningOperation < 3) { unittest.expect(o.done!, unittest.isTrue); checkGoogleRpcStatus(o.error!); - checkUnnamed62(o.metadata!); + checkUnnamed63(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed63(o.response!); + checkUnnamed64(o.response!); } buildCounterGoogleLongrunningOperation--; } @@ -3576,7 +3615,7 @@ void checkGoogleProtobufEmpty(api.GoogleProtobufEmpty o) { buildCounterGoogleProtobufEmpty--; } -core.Map buildUnnamed64() => { +core.Map buildUnnamed65() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3589,7 +3628,7 @@ core.Map buildUnnamed64() => { }, }; -void checkUnnamed64(core.Map o) { +void checkUnnamed65(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted5 = (o['x']!) as core.Map; unittest.expect(casted5, unittest.hasLength(3)); @@ -3621,15 +3660,15 @@ void checkUnnamed64(core.Map o) { ); } -core.List> buildUnnamed65() => [ - buildUnnamed64(), - buildUnnamed64(), +core.List> buildUnnamed66() => [ + buildUnnamed65(), + buildUnnamed65(), ]; -void checkUnnamed65(core.List> o) { +void checkUnnamed66(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed64(o[0]); - checkUnnamed64(o[1]); + checkUnnamed65(o[0]); + checkUnnamed65(o[1]); } core.int buildCounterGoogleRpcStatus = 0; @@ -3638,7 +3677,7 @@ api.GoogleRpcStatus buildGoogleRpcStatus() { buildCounterGoogleRpcStatus++; if (buildCounterGoogleRpcStatus < 3) { o.code = 42; - o.details = buildUnnamed65(); + o.details = buildUnnamed66(); o.message = 'foo'; } buildCounterGoogleRpcStatus--; @@ -3652,7 +3691,7 @@ void checkGoogleRpcStatus(api.GoogleRpcStatus o) { o.code!, unittest.equals(42), ); - checkUnnamed65(o.details!); + checkUnnamed66(o.details!); unittest.expect( o.message!, unittest.equals('foo'), diff --git a/generated/googleapis/test/securitycenter/v1_test.dart b/generated/googleapis/test/securitycenter/v1_test.dart index 52911f63f..992e9672b 100644 --- a/generated/googleapis/test/securitycenter/v1_test.dart +++ b/generated/googleapis/test/securitycenter/v1_test.dart @@ -1826,6 +1826,43 @@ void checkDataFlowEvent(api.DataFlowEvent o) { buildCounterDataFlowEvent--; } +core.int buildCounterDataRetentionDeletionEvent = 0; +api.DataRetentionDeletionEvent buildDataRetentionDeletionEvent() { + final o = api.DataRetentionDeletionEvent(); + buildCounterDataRetentionDeletionEvent++; + if (buildCounterDataRetentionDeletionEvent < 3) { + o.dataObjectCount = 'foo'; + o.eventDetectionTime = 'foo'; + o.eventType = 'foo'; + o.maxRetentionAllowed = 'foo'; + } + buildCounterDataRetentionDeletionEvent--; + return o; +} + +void checkDataRetentionDeletionEvent(api.DataRetentionDeletionEvent o) { + buildCounterDataRetentionDeletionEvent++; + if (buildCounterDataRetentionDeletionEvent < 3) { + unittest.expect( + o.dataObjectCount!, + unittest.equals('foo'), + ); + unittest.expect( + o.eventDetectionTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.eventType!, + unittest.equals('foo'), + ); + unittest.expect( + o.maxRetentionAllowed!, + unittest.equals('foo'), + ); + } + buildCounterDataRetentionDeletionEvent--; +} + core.List buildUnnamed24() => [ 'foo', 'foo', @@ -1914,6 +1951,28 @@ void checkDetection(api.Detection o) { buildCounterDetection--; } +core.int buildCounterDisk = 0; +api.Disk buildDisk() { + final o = api.Disk(); + buildCounterDisk++; + if (buildCounterDisk < 3) { + o.name = 'foo'; + } + buildCounterDisk--; + return o; +} + +void checkDisk(api.Disk o) { + buildCounterDisk++; + if (buildCounterDisk < 3) { + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + } + buildCounterDisk--; +} + core.int buildCounterDiskPath = 0; api.DiskPath buildDiskPath() { final o = api.DiskPath(); @@ -2019,6 +2078,7 @@ api.EffectiveEventThreatDetectionCustomModule final o = api.EffectiveEventThreatDetectionCustomModule(); buildCounterEffectiveEventThreatDetectionCustomModule++; if (buildCounterEffectiveEventThreatDetectionCustomModule < 3) { + o.cloudProvider = 'foo'; o.config = buildUnnamed25(); o.description = 'foo'; o.displayName = 'foo'; @@ -2034,6 +2094,10 @@ void checkEffectiveEventThreatDetectionCustomModule( api.EffectiveEventThreatDetectionCustomModule o) { buildCounterEffectiveEventThreatDetectionCustomModule++; if (buildCounterEffectiveEventThreatDetectionCustomModule < 3) { + unittest.expect( + o.cloudProvider!, + unittest.equals('foo'), + ); checkUnnamed25(o.config!); unittest.expect( o.description!, @@ -2152,6 +2216,7 @@ api.EventThreatDetectionCustomModule buildEventThreatDetectionCustomModule() { buildCounterEventThreatDetectionCustomModule++; if (buildCounterEventThreatDetectionCustomModule < 3) { o.ancestorModule = 'foo'; + o.cloudProvider = 'foo'; o.config = buildUnnamed26(); o.description = 'foo'; o.displayName = 'foo'; @@ -2173,6 +2238,10 @@ void checkEventThreatDetectionCustomModule( o.ancestorModule!, unittest.equals('foo'), ); + unittest.expect( + o.cloudProvider!, + unittest.equals('foo'), + ); checkUnnamed26(o.config!); unittest.expect( o.description!, @@ -2444,97 +2513,108 @@ void checkUnnamed35(core.List o) { checkDataFlowEvent(o[1]); } +core.List buildUnnamed36() => [ + buildDataRetentionDeletionEvent(), + buildDataRetentionDeletionEvent(), + ]; + +void checkUnnamed36(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkDataRetentionDeletionEvent(o[0]); + checkDataRetentionDeletionEvent(o[1]); +} + core.Map - buildUnnamed36() => { + buildUnnamed37() => { 'x': buildGoogleCloudSecuritycenterV1ExternalSystem(), 'y': buildGoogleCloudSecuritycenterV1ExternalSystem(), }; -void checkUnnamed36( +void checkUnnamed37( core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudSecuritycenterV1ExternalSystem(o['x']!); checkGoogleCloudSecuritycenterV1ExternalSystem(o['y']!); } -core.List buildUnnamed37() => [ +core.List buildUnnamed38() => [ buildFile(), buildFile(), ]; -void checkUnnamed37(core.List o) { +void checkUnnamed38(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFile(o[0]); checkFile(o[1]); } -core.List buildUnnamed38() => [ +core.List buildUnnamed39() => [ buildGroupMembership(), buildGroupMembership(), ]; -void checkUnnamed38(core.List o) { +void checkUnnamed39(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGroupMembership(o[0]); checkGroupMembership(o[1]); } -core.List buildUnnamed39() => [ +core.List buildUnnamed40() => [ buildIamBinding(), buildIamBinding(), ]; -void checkUnnamed39(core.List o) { +void checkUnnamed40(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkIamBinding(o[0]); checkIamBinding(o[1]); } -core.List buildUnnamed40() => [ +core.List buildUnnamed41() => [ buildLoadBalancer(), buildLoadBalancer(), ]; -void checkUnnamed40(core.List o) { +void checkUnnamed41(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLoadBalancer(o[0]); checkLoadBalancer(o[1]); } -core.List buildUnnamed41() => [ +core.List buildUnnamed42() => [ buildLogEntry(), buildLogEntry(), ]; -void checkUnnamed41(core.List o) { +void checkUnnamed42(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLogEntry(o[0]); checkLogEntry(o[1]); } -core.List buildUnnamed42() => [ +core.List buildUnnamed43() => [ buildOrgPolicy(), buildOrgPolicy(), ]; -void checkUnnamed42(core.List o) { +void checkUnnamed43(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOrgPolicy(o[0]); checkOrgPolicy(o[1]); } -core.List buildUnnamed43() => [ +core.List buildUnnamed44() => [ buildProcess(), buildProcess(), ]; -void checkUnnamed43(core.List o) { +void checkUnnamed44(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkProcess(o[0]); checkProcess(o[1]); } -core.Map buildUnnamed44() => { +core.Map buildUnnamed45() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -2547,7 +2627,7 @@ core.Map buildUnnamed44() => { }, }; -void checkUnnamed44(core.Map o) { +void checkUnnamed45(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted7 = (o['x']!) as core.Map; unittest.expect(casted7, unittest.hasLength(3)); @@ -2600,21 +2680,23 @@ api.Finding buildFinding() { o.createTime = 'foo'; o.dataAccessEvents = buildUnnamed34(); o.dataFlowEvents = buildUnnamed35(); + o.dataRetentionDeletionEvents = buildUnnamed36(); o.database = buildDatabase(); o.description = 'foo'; + o.disk = buildDisk(); o.eventTime = 'foo'; o.exfiltration = buildExfiltration(); - o.externalSystems = buildUnnamed36(); + o.externalSystems = buildUnnamed37(); o.externalUri = 'foo'; - o.files = buildUnnamed37(); + o.files = buildUnnamed38(); o.findingClass = 'foo'; - o.groupMemberships = buildUnnamed38(); - o.iamBindings = buildUnnamed39(); + o.groupMemberships = buildUnnamed39(); + o.iamBindings = buildUnnamed40(); o.indicator = buildIndicator(); o.kernelRootkit = buildKernelRootkit(); o.kubernetes = buildKubernetes(); - o.loadBalancers = buildUnnamed40(); - o.logEntries = buildUnnamed41(); + o.loadBalancers = buildUnnamed41(); + o.logEntries = buildUnnamed42(); o.mitreAttack = buildMitreAttack(); o.moduleName = 'foo'; o.mute = 'foo'; @@ -2624,15 +2706,15 @@ api.Finding buildFinding() { o.name = 'foo'; o.nextSteps = 'foo'; o.notebook = buildNotebook(); - o.orgPolicies = buildUnnamed42(); + o.orgPolicies = buildUnnamed43(); o.parent = 'foo'; o.parentDisplayName = 'foo'; - o.processes = buildUnnamed43(); + o.processes = buildUnnamed44(); o.resourceName = 'foo'; o.securityMarks = buildSecurityMarks(); o.securityPosture = buildSecurityPosture(); o.severity = 'foo'; - o.sourceProperties = buildUnnamed44(); + o.sourceProperties = buildUnnamed45(); o.state = 'foo'; o.toxicCombination = buildToxicCombination(); o.vulnerability = buildVulnerability(); @@ -2669,33 +2751,35 @@ void checkFinding(api.Finding o) { ); checkUnnamed34(o.dataAccessEvents!); checkUnnamed35(o.dataFlowEvents!); + checkUnnamed36(o.dataRetentionDeletionEvents!); checkDatabase(o.database!); unittest.expect( o.description!, unittest.equals('foo'), ); + checkDisk(o.disk!); unittest.expect( o.eventTime!, unittest.equals('foo'), ); checkExfiltration(o.exfiltration!); - checkUnnamed36(o.externalSystems!); + checkUnnamed37(o.externalSystems!); unittest.expect( o.externalUri!, unittest.equals('foo'), ); - checkUnnamed37(o.files!); + checkUnnamed38(o.files!); unittest.expect( o.findingClass!, unittest.equals('foo'), ); - checkUnnamed38(o.groupMemberships!); - checkUnnamed39(o.iamBindings!); + checkUnnamed39(o.groupMemberships!); + checkUnnamed40(o.iamBindings!); checkIndicator(o.indicator!); checkKernelRootkit(o.kernelRootkit!); checkKubernetes(o.kubernetes!); - checkUnnamed40(o.loadBalancers!); - checkUnnamed41(o.logEntries!); + checkUnnamed41(o.loadBalancers!); + checkUnnamed42(o.logEntries!); checkMitreAttack(o.mitreAttack!); unittest.expect( o.moduleName!, @@ -2723,7 +2807,7 @@ void checkFinding(api.Finding o) { unittest.equals('foo'), ); checkNotebook(o.notebook!); - checkUnnamed42(o.orgPolicies!); + checkUnnamed43(o.orgPolicies!); unittest.expect( o.parent!, unittest.equals('foo'), @@ -2732,7 +2816,7 @@ void checkFinding(api.Finding o) { o.parentDisplayName!, unittest.equals('foo'), ); - checkUnnamed43(o.processes!); + checkUnnamed44(o.processes!); unittest.expect( o.resourceName!, unittest.equals('foo'), @@ -2743,7 +2827,7 @@ void checkFinding(api.Finding o) { o.severity!, unittest.equals('foo'), ); - checkUnnamed44(o.sourceProperties!); + checkUnnamed45(o.sourceProperties!); unittest.expect( o.state!, unittest.equals('foo'), @@ -2903,12 +2987,12 @@ void checkGoogleCloudSecuritycenterV1BigQueryExport( buildCounterGoogleCloudSecuritycenterV1BigQueryExport--; } -core.List buildUnnamed45() => [ +core.List buildUnnamed46() => [ buildSubject(), buildSubject(), ]; -void checkUnnamed45(core.List o) { +void checkUnnamed46(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSubject(o[0]); checkSubject(o[1]); @@ -2923,7 +3007,7 @@ api.GoogleCloudSecuritycenterV1Binding o.name = 'foo'; o.ns = 'foo'; o.role = buildRole(); - o.subjects = buildUnnamed45(); + o.subjects = buildUnnamed46(); } buildCounterGoogleCloudSecuritycenterV1Binding--; return o; @@ -2942,7 +3026,7 @@ void checkGoogleCloudSecuritycenterV1Binding( unittest.equals('foo'), ); checkRole(o.role!); - checkUnnamed45(o.subjects!); + checkUnnamed46(o.subjects!); } buildCounterGoogleCloudSecuritycenterV1Binding--; } @@ -2987,12 +3071,12 @@ void checkGoogleCloudSecuritycenterV1CustomConfig( buildCounterGoogleCloudSecuritycenterV1CustomConfig--; } -core.List buildUnnamed46() => [ +core.List buildUnnamed47() => [ buildGoogleCloudSecuritycenterV1Property(), buildGoogleCloudSecuritycenterV1Property(), ]; -void checkUnnamed46(core.List o) { +void checkUnnamed47(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudSecuritycenterV1Property(o[0]); checkGoogleCloudSecuritycenterV1Property(o[1]); @@ -3004,7 +3088,7 @@ api.GoogleCloudSecuritycenterV1CustomOutputSpec final o = api.GoogleCloudSecuritycenterV1CustomOutputSpec(); buildCounterGoogleCloudSecuritycenterV1CustomOutputSpec++; if (buildCounterGoogleCloudSecuritycenterV1CustomOutputSpec < 3) { - o.properties = buildUnnamed46(); + o.properties = buildUnnamed47(); } buildCounterGoogleCloudSecuritycenterV1CustomOutputSpec--; return o; @@ -3014,7 +3098,7 @@ void checkGoogleCloudSecuritycenterV1CustomOutputSpec( api.GoogleCloudSecuritycenterV1CustomOutputSpec o) { buildCounterGoogleCloudSecuritycenterV1CustomOutputSpec++; if (buildCounterGoogleCloudSecuritycenterV1CustomOutputSpec < 3) { - checkUnnamed46(o.properties!); + checkUnnamed47(o.properties!); } buildCounterGoogleCloudSecuritycenterV1CustomOutputSpec--; } @@ -3029,6 +3113,7 @@ api.GoogleCloudSecuritycenterV1EffectiveSecurityHealthAnalyticsCustomModule buildCounterGoogleCloudSecuritycenterV1EffectiveSecurityHealthAnalyticsCustomModule++; if (buildCounterGoogleCloudSecuritycenterV1EffectiveSecurityHealthAnalyticsCustomModule < 3) { + o.cloudProvider = 'foo'; o.customConfig = buildGoogleCloudSecuritycenterV1CustomConfig(); o.displayName = 'foo'; o.enablementState = 'foo'; @@ -3044,6 +3129,10 @@ void checkGoogleCloudSecuritycenterV1EffectiveSecurityHealthAnalyticsCustomModul buildCounterGoogleCloudSecuritycenterV1EffectiveSecurityHealthAnalyticsCustomModule++; if (buildCounterGoogleCloudSecuritycenterV1EffectiveSecurityHealthAnalyticsCustomModule < 3) { + unittest.expect( + o.cloudProvider!, + unittest.equals('foo'), + ); checkGoogleCloudSecuritycenterV1CustomConfig(o.customConfig!); unittest.expect( o.displayName!, @@ -3061,12 +3150,12 @@ void checkGoogleCloudSecuritycenterV1EffectiveSecurityHealthAnalyticsCustomModul buildCounterGoogleCloudSecuritycenterV1EffectiveSecurityHealthAnalyticsCustomModule--; } -core.List buildUnnamed47() => [ +core.List buildUnnamed48() => [ 'foo', 'foo', ]; -void checkUnnamed47(core.List o) { +void checkUnnamed48(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3084,7 +3173,7 @@ api.GoogleCloudSecuritycenterV1ExternalSystem final o = api.GoogleCloudSecuritycenterV1ExternalSystem(); buildCounterGoogleCloudSecuritycenterV1ExternalSystem++; if (buildCounterGoogleCloudSecuritycenterV1ExternalSystem < 3) { - o.assignees = buildUnnamed47(); + o.assignees = buildUnnamed48(); o.caseCloseTime = 'foo'; o.caseCreateTime = 'foo'; o.casePriority = 'foo'; @@ -3104,7 +3193,7 @@ void checkGoogleCloudSecuritycenterV1ExternalSystem( api.GoogleCloudSecuritycenterV1ExternalSystem o) { buildCounterGoogleCloudSecuritycenterV1ExternalSystem++; if (buildCounterGoogleCloudSecuritycenterV1ExternalSystem < 3) { - checkUnnamed47(o.assignees!); + checkUnnamed48(o.assignees!); unittest.expect( o.caseCloseTime!, unittest.equals('foo'), @@ -3236,12 +3325,12 @@ void checkGoogleCloudSecuritycenterV1Property( buildCounterGoogleCloudSecuritycenterV1Property--; } -core.List buildUnnamed48() => [ +core.List buildUnnamed49() => [ 'foo', 'foo', ]; -void checkUnnamed48(core.List o) { +void checkUnnamed49(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3259,7 +3348,7 @@ api.GoogleCloudSecuritycenterV1ResourceSelector final o = api.GoogleCloudSecuritycenterV1ResourceSelector(); buildCounterGoogleCloudSecuritycenterV1ResourceSelector++; if (buildCounterGoogleCloudSecuritycenterV1ResourceSelector < 3) { - o.resourceTypes = buildUnnamed48(); + o.resourceTypes = buildUnnamed49(); } buildCounterGoogleCloudSecuritycenterV1ResourceSelector--; return o; @@ -3269,17 +3358,17 @@ void checkGoogleCloudSecuritycenterV1ResourceSelector( api.GoogleCloudSecuritycenterV1ResourceSelector o) { buildCounterGoogleCloudSecuritycenterV1ResourceSelector++; if (buildCounterGoogleCloudSecuritycenterV1ResourceSelector < 3) { - checkUnnamed48(o.resourceTypes!); + checkUnnamed49(o.resourceTypes!); } buildCounterGoogleCloudSecuritycenterV1ResourceSelector--; } -core.Map buildUnnamed49() => { +core.Map buildUnnamed50() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed49(core.Map o) { +void checkUnnamed50(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3291,12 +3380,12 @@ void checkUnnamed49(core.Map o) { ); } -core.List buildUnnamed50() => [ +core.List buildUnnamed51() => [ 'foo', 'foo', ]; -void checkUnnamed50(core.List o) { +void checkUnnamed51(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3318,13 +3407,13 @@ api.GoogleCloudSecuritycenterV1ResourceValueConfig o.createTime = 'foo'; o.description = 'foo'; o.name = 'foo'; - o.resourceLabelsSelector = buildUnnamed49(); + o.resourceLabelsSelector = buildUnnamed50(); o.resourceType = 'foo'; o.resourceValue = 'foo'; o.scope = 'foo'; o.sensitiveDataProtectionMapping = buildGoogleCloudSecuritycenterV1SensitiveDataProtectionMapping(); - o.tagValues = buildUnnamed50(); + o.tagValues = buildUnnamed51(); o.updateTime = 'foo'; } buildCounterGoogleCloudSecuritycenterV1ResourceValueConfig--; @@ -3351,7 +3440,7 @@ void checkGoogleCloudSecuritycenterV1ResourceValueConfig( o.name!, unittest.equals('foo'), ); - checkUnnamed49(o.resourceLabelsSelector!); + checkUnnamed50(o.resourceLabelsSelector!); unittest.expect( o.resourceType!, unittest.equals('foo'), @@ -3366,7 +3455,7 @@ void checkGoogleCloudSecuritycenterV1ResourceValueConfig( ); checkGoogleCloudSecuritycenterV1SensitiveDataProtectionMapping( o.sensitiveDataProtectionMapping!); - checkUnnamed50(o.tagValues!); + checkUnnamed51(o.tagValues!); unittest.expect( o.updateTime!, unittest.equals('foo'), @@ -3386,6 +3475,7 @@ api.GoogleCloudSecuritycenterV1SecurityHealthAnalyticsCustomModule if (buildCounterGoogleCloudSecuritycenterV1SecurityHealthAnalyticsCustomModule < 3) { o.ancestorModule = 'foo'; + o.cloudProvider = 'foo'; o.customConfig = buildGoogleCloudSecuritycenterV1CustomConfig(); o.displayName = 'foo'; o.enablementState = 'foo'; @@ -3406,6 +3496,10 @@ void checkGoogleCloudSecuritycenterV1SecurityHealthAnalyticsCustomModule( o.ancestorModule!, unittest.equals('foo'), ); + unittest.expect( + o.cloudProvider!, + unittest.equals('foo'), + ); checkGoogleCloudSecuritycenterV1CustomConfig(o.customConfig!); unittest.expect( o.displayName!, @@ -3510,12 +3604,12 @@ void checkGroupAssetsRequest(api.GroupAssetsRequest o) { buildCounterGroupAssetsRequest--; } -core.List buildUnnamed51() => [ +core.List buildUnnamed52() => [ buildGroupResult(), buildGroupResult(), ]; -void checkUnnamed51(core.List o) { +void checkUnnamed52(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGroupResult(o[0]); checkGroupResult(o[1]); @@ -3526,7 +3620,7 @@ api.GroupAssetsResponse buildGroupAssetsResponse() { final o = api.GroupAssetsResponse(); buildCounterGroupAssetsResponse++; if (buildCounterGroupAssetsResponse < 3) { - o.groupByResults = buildUnnamed51(); + o.groupByResults = buildUnnamed52(); o.nextPageToken = 'foo'; o.readTime = 'foo'; o.totalSize = 42; @@ -3538,7 +3632,7 @@ api.GroupAssetsResponse buildGroupAssetsResponse() { void checkGroupAssetsResponse(api.GroupAssetsResponse o) { buildCounterGroupAssetsResponse++; if (buildCounterGroupAssetsResponse < 3) { - checkUnnamed51(o.groupByResults!); + checkUnnamed52(o.groupByResults!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -3602,12 +3696,12 @@ void checkGroupFindingsRequest(api.GroupFindingsRequest o) { buildCounterGroupFindingsRequest--; } -core.List buildUnnamed52() => [ +core.List buildUnnamed53() => [ buildGroupResult(), buildGroupResult(), ]; -void checkUnnamed52(core.List o) { +void checkUnnamed53(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGroupResult(o[0]); checkGroupResult(o[1]); @@ -3618,7 +3712,7 @@ api.GroupFindingsResponse buildGroupFindingsResponse() { final o = api.GroupFindingsResponse(); buildCounterGroupFindingsResponse++; if (buildCounterGroupFindingsResponse < 3) { - o.groupByResults = buildUnnamed52(); + o.groupByResults = buildUnnamed53(); o.nextPageToken = 'foo'; o.readTime = 'foo'; o.totalSize = 42; @@ -3630,7 +3724,7 @@ api.GroupFindingsResponse buildGroupFindingsResponse() { void checkGroupFindingsResponse(api.GroupFindingsResponse o) { buildCounterGroupFindingsResponse++; if (buildCounterGroupFindingsResponse < 3) { - checkUnnamed52(o.groupByResults!); + checkUnnamed53(o.groupByResults!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -3674,7 +3768,7 @@ void checkGroupMembership(api.GroupMembership o) { buildCounterGroupMembership--; } -core.Map buildUnnamed53() => { +core.Map buildUnnamed54() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3687,7 +3781,7 @@ core.Map buildUnnamed53() => { }, }; -void checkUnnamed53(core.Map o) { +void checkUnnamed54(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted9 = (o['x']!) as core.Map; unittest.expect(casted9, unittest.hasLength(3)); @@ -3725,7 +3819,7 @@ api.GroupResult buildGroupResult() { buildCounterGroupResult++; if (buildCounterGroupResult < 3) { o.count = 'foo'; - o.properties = buildUnnamed53(); + o.properties = buildUnnamed54(); } buildCounterGroupResult--; return o; @@ -3738,7 +3832,7 @@ void checkGroupResult(api.GroupResult o) { o.count!, unittest.equals('foo'), ); - checkUnnamed53(o.properties!); + checkUnnamed54(o.properties!); } buildCounterGroupResult--; } @@ -3797,12 +3891,12 @@ void checkIamPolicy(api.IamPolicy o) { buildCounterIamPolicy--; } -core.List buildUnnamed54() => [ +core.List buildUnnamed55() => [ 'foo', 'foo', ]; -void checkUnnamed54(core.List o) { +void checkUnnamed55(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3814,12 +3908,12 @@ void checkUnnamed54(core.List o) { ); } -core.List buildUnnamed55() => [ +core.List buildUnnamed56() => [ 'foo', 'foo', ]; -void checkUnnamed55(core.List o) { +void checkUnnamed56(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3831,23 +3925,23 @@ void checkUnnamed55(core.List o) { ); } -core.List buildUnnamed56() => [ +core.List buildUnnamed57() => [ buildProcessSignature(), buildProcessSignature(), ]; -void checkUnnamed56(core.List o) { +void checkUnnamed57(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkProcessSignature(o[0]); checkProcessSignature(o[1]); } -core.List buildUnnamed57() => [ +core.List buildUnnamed58() => [ 'foo', 'foo', ]; -void checkUnnamed57(core.List o) { +void checkUnnamed58(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3864,10 +3958,10 @@ api.Indicator buildIndicator() { final o = api.Indicator(); buildCounterIndicator++; if (buildCounterIndicator < 3) { - o.domains = buildUnnamed54(); - o.ipAddresses = buildUnnamed55(); - o.signatures = buildUnnamed56(); - o.uris = buildUnnamed57(); + o.domains = buildUnnamed55(); + o.ipAddresses = buildUnnamed56(); + o.signatures = buildUnnamed57(); + o.uris = buildUnnamed58(); } buildCounterIndicator--; return o; @@ -3876,10 +3970,10 @@ api.Indicator buildIndicator() { void checkIndicator(api.Indicator o) { buildCounterIndicator++; if (buildCounterIndicator < 3) { - checkUnnamed54(o.domains!); - checkUnnamed55(o.ipAddresses!); - checkUnnamed56(o.signatures!); - checkUnnamed57(o.uris!); + checkUnnamed55(o.domains!); + checkUnnamed56(o.ipAddresses!); + checkUnnamed57(o.signatures!); + checkUnnamed58(o.uris!); } buildCounterIndicator--; } @@ -3922,78 +4016,78 @@ void checkKernelRootkit(api.KernelRootkit o) { buildCounterKernelRootkit--; } -core.List buildUnnamed58() => [ +core.List buildUnnamed59() => [ buildAccessReview(), buildAccessReview(), ]; -void checkUnnamed58(core.List o) { +void checkUnnamed59(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAccessReview(o[0]); checkAccessReview(o[1]); } -core.List buildUnnamed59() => [ +core.List buildUnnamed60() => [ buildGoogleCloudSecuritycenterV1Binding(), buildGoogleCloudSecuritycenterV1Binding(), ]; -void checkUnnamed59(core.List o) { +void checkUnnamed60(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudSecuritycenterV1Binding(o[0]); checkGoogleCloudSecuritycenterV1Binding(o[1]); } -core.List buildUnnamed60() => [ +core.List buildUnnamed61() => [ buildNodePool(), buildNodePool(), ]; -void checkUnnamed60(core.List o) { +void checkUnnamed61(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNodePool(o[0]); checkNodePool(o[1]); } -core.List buildUnnamed61() => [ +core.List buildUnnamed62() => [ buildNode(), buildNode(), ]; -void checkUnnamed61(core.List o) { +void checkUnnamed62(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNode(o[0]); checkNode(o[1]); } -core.List buildUnnamed62() => [ +core.List buildUnnamed63() => [ buildObject(), buildObject(), ]; -void checkUnnamed62(core.List o) { +void checkUnnamed63(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkObject(o[0]); checkObject(o[1]); } -core.List buildUnnamed63() => [ +core.List buildUnnamed64() => [ buildPod(), buildPod(), ]; -void checkUnnamed63(core.List o) { +void checkUnnamed64(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPod(o[0]); checkPod(o[1]); } -core.List buildUnnamed64() => [ +core.List buildUnnamed65() => [ buildRole(), buildRole(), ]; -void checkUnnamed64(core.List o) { +void checkUnnamed65(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRole(o[0]); checkRole(o[1]); @@ -4004,13 +4098,13 @@ api.Kubernetes buildKubernetes() { final o = api.Kubernetes(); buildCounterKubernetes++; if (buildCounterKubernetes < 3) { - o.accessReviews = buildUnnamed58(); - o.bindings = buildUnnamed59(); - o.nodePools = buildUnnamed60(); - o.nodes = buildUnnamed61(); - o.objects = buildUnnamed62(); - o.pods = buildUnnamed63(); - o.roles = buildUnnamed64(); + o.accessReviews = buildUnnamed59(); + o.bindings = buildUnnamed60(); + o.nodePools = buildUnnamed61(); + o.nodes = buildUnnamed62(); + o.objects = buildUnnamed63(); + o.pods = buildUnnamed64(); + o.roles = buildUnnamed65(); } buildCounterKubernetes--; return o; @@ -4019,13 +4113,13 @@ api.Kubernetes buildKubernetes() { void checkKubernetes(api.Kubernetes o) { buildCounterKubernetes++; if (buildCounterKubernetes < 3) { - checkUnnamed58(o.accessReviews!); - checkUnnamed59(o.bindings!); - checkUnnamed60(o.nodePools!); - checkUnnamed61(o.nodes!); - checkUnnamed62(o.objects!); - checkUnnamed63(o.pods!); - checkUnnamed64(o.roles!); + checkUnnamed59(o.accessReviews!); + checkUnnamed60(o.bindings!); + checkUnnamed61(o.nodePools!); + checkUnnamed62(o.nodes!); + checkUnnamed63(o.objects!); + checkUnnamed64(o.pods!); + checkUnnamed65(o.roles!); } buildCounterKubernetes--; } @@ -4057,12 +4151,12 @@ void checkLabel(api.Label o) { buildCounterLabel--; } -core.List buildUnnamed65() => [ +core.List buildUnnamed66() => [ buildListAssetsResult(), buildListAssetsResult(), ]; -void checkUnnamed65(core.List o) { +void checkUnnamed66(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkListAssetsResult(o[0]); checkListAssetsResult(o[1]); @@ -4073,7 +4167,7 @@ api.ListAssetsResponse buildListAssetsResponse() { final o = api.ListAssetsResponse(); buildCounterListAssetsResponse++; if (buildCounterListAssetsResponse < 3) { - o.listAssetsResults = buildUnnamed65(); + o.listAssetsResults = buildUnnamed66(); o.nextPageToken = 'foo'; o.readTime = 'foo'; o.totalSize = 42; @@ -4085,7 +4179,7 @@ api.ListAssetsResponse buildListAssetsResponse() { void checkListAssetsResponse(api.ListAssetsResponse o) { buildCounterListAssetsResponse++; if (buildCounterListAssetsResponse < 3) { - checkUnnamed65(o.listAssetsResults!); + checkUnnamed66(o.listAssetsResults!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -4126,12 +4220,12 @@ void checkListAssetsResult(api.ListAssetsResult o) { buildCounterListAssetsResult--; } -core.List buildUnnamed66() => [ +core.List buildUnnamed67() => [ buildAttackPath(), buildAttackPath(), ]; -void checkUnnamed66(core.List o) { +void checkUnnamed67(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAttackPath(o[0]); checkAttackPath(o[1]); @@ -4142,7 +4236,7 @@ api.ListAttackPathsResponse buildListAttackPathsResponse() { final o = api.ListAttackPathsResponse(); buildCounterListAttackPathsResponse++; if (buildCounterListAttackPathsResponse < 3) { - o.attackPaths = buildUnnamed66(); + o.attackPaths = buildUnnamed67(); o.nextPageToken = 'foo'; } buildCounterListAttackPathsResponse--; @@ -4152,7 +4246,7 @@ api.ListAttackPathsResponse buildListAttackPathsResponse() { void checkListAttackPathsResponse(api.ListAttackPathsResponse o) { buildCounterListAttackPathsResponse++; if (buildCounterListAttackPathsResponse < 3) { - checkUnnamed66(o.attackPaths!); + checkUnnamed67(o.attackPaths!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -4161,12 +4255,12 @@ void checkListAttackPathsResponse(api.ListAttackPathsResponse o) { buildCounterListAttackPathsResponse--; } -core.List buildUnnamed67() => [ +core.List buildUnnamed68() => [ buildGoogleCloudSecuritycenterV1BigQueryExport(), buildGoogleCloudSecuritycenterV1BigQueryExport(), ]; -void checkUnnamed67( +void checkUnnamed68( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudSecuritycenterV1BigQueryExport(o[0]); @@ -4178,7 +4272,7 @@ api.ListBigQueryExportsResponse buildListBigQueryExportsResponse() { final o = api.ListBigQueryExportsResponse(); buildCounterListBigQueryExportsResponse++; if (buildCounterListBigQueryExportsResponse < 3) { - o.bigQueryExports = buildUnnamed67(); + o.bigQueryExports = buildUnnamed68(); o.nextPageToken = 'foo'; } buildCounterListBigQueryExportsResponse--; @@ -4188,7 +4282,7 @@ api.ListBigQueryExportsResponse buildListBigQueryExportsResponse() { void checkListBigQueryExportsResponse(api.ListBigQueryExportsResponse o) { buildCounterListBigQueryExportsResponse++; if (buildCounterListBigQueryExportsResponse < 3) { - checkUnnamed67(o.bigQueryExports!); + checkUnnamed68(o.bigQueryExports!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -4197,12 +4291,12 @@ void checkListBigQueryExportsResponse(api.ListBigQueryExportsResponse o) { buildCounterListBigQueryExportsResponse--; } -core.List buildUnnamed68() => [ +core.List buildUnnamed69() => [ buildEventThreatDetectionCustomModule(), buildEventThreatDetectionCustomModule(), ]; -void checkUnnamed68(core.List o) { +void checkUnnamed69(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEventThreatDetectionCustomModule(o[0]); checkEventThreatDetectionCustomModule(o[1]); @@ -4215,7 +4309,7 @@ api.ListDescendantEventThreatDetectionCustomModulesResponse final o = api.ListDescendantEventThreatDetectionCustomModulesResponse(); buildCounterListDescendantEventThreatDetectionCustomModulesResponse++; if (buildCounterListDescendantEventThreatDetectionCustomModulesResponse < 3) { - o.eventThreatDetectionCustomModules = buildUnnamed68(); + o.eventThreatDetectionCustomModules = buildUnnamed69(); o.nextPageToken = 'foo'; } buildCounterListDescendantEventThreatDetectionCustomModulesResponse--; @@ -4226,7 +4320,7 @@ void checkListDescendantEventThreatDetectionCustomModulesResponse( api.ListDescendantEventThreatDetectionCustomModulesResponse o) { buildCounterListDescendantEventThreatDetectionCustomModulesResponse++; if (buildCounterListDescendantEventThreatDetectionCustomModulesResponse < 3) { - checkUnnamed68(o.eventThreatDetectionCustomModules!); + checkUnnamed69(o.eventThreatDetectionCustomModules!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -4236,12 +4330,12 @@ void checkListDescendantEventThreatDetectionCustomModulesResponse( } core.List - buildUnnamed69() => [ + buildUnnamed70() => [ buildGoogleCloudSecuritycenterV1SecurityHealthAnalyticsCustomModule(), buildGoogleCloudSecuritycenterV1SecurityHealthAnalyticsCustomModule(), ]; -void checkUnnamed69( +void checkUnnamed70( core.List< api.GoogleCloudSecuritycenterV1SecurityHealthAnalyticsCustomModule> o) { @@ -4259,7 +4353,7 @@ api.ListDescendantSecurityHealthAnalyticsCustomModulesResponse if (buildCounterListDescendantSecurityHealthAnalyticsCustomModulesResponse < 3) { o.nextPageToken = 'foo'; - o.securityHealthAnalyticsCustomModules = buildUnnamed69(); + o.securityHealthAnalyticsCustomModules = buildUnnamed70(); } buildCounterListDescendantSecurityHealthAnalyticsCustomModulesResponse--; return o; @@ -4274,17 +4368,17 @@ void checkListDescendantSecurityHealthAnalyticsCustomModulesResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed69(o.securityHealthAnalyticsCustomModules!); + checkUnnamed70(o.securityHealthAnalyticsCustomModules!); } buildCounterListDescendantSecurityHealthAnalyticsCustomModulesResponse--; } -core.List buildUnnamed70() => [ +core.List buildUnnamed71() => [ buildEffectiveEventThreatDetectionCustomModule(), buildEffectiveEventThreatDetectionCustomModule(), ]; -void checkUnnamed70( +void checkUnnamed71( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEffectiveEventThreatDetectionCustomModule(o[0]); @@ -4297,7 +4391,7 @@ api.ListEffectiveEventThreatDetectionCustomModulesResponse final o = api.ListEffectiveEventThreatDetectionCustomModulesResponse(); buildCounterListEffectiveEventThreatDetectionCustomModulesResponse++; if (buildCounterListEffectiveEventThreatDetectionCustomModulesResponse < 3) { - o.effectiveEventThreatDetectionCustomModules = buildUnnamed70(); + o.effectiveEventThreatDetectionCustomModules = buildUnnamed71(); o.nextPageToken = 'foo'; } buildCounterListEffectiveEventThreatDetectionCustomModulesResponse--; @@ -4308,7 +4402,7 @@ void checkListEffectiveEventThreatDetectionCustomModulesResponse( api.ListEffectiveEventThreatDetectionCustomModulesResponse o) { buildCounterListEffectiveEventThreatDetectionCustomModulesResponse++; if (buildCounterListEffectiveEventThreatDetectionCustomModulesResponse < 3) { - checkUnnamed70(o.effectiveEventThreatDetectionCustomModules!); + checkUnnamed71(o.effectiveEventThreatDetectionCustomModules!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -4320,12 +4414,12 @@ void checkListEffectiveEventThreatDetectionCustomModulesResponse( core.List< api .GoogleCloudSecuritycenterV1EffectiveSecurityHealthAnalyticsCustomModule> - buildUnnamed71() => [ + buildUnnamed72() => [ buildGoogleCloudSecuritycenterV1EffectiveSecurityHealthAnalyticsCustomModule(), buildGoogleCloudSecuritycenterV1EffectiveSecurityHealthAnalyticsCustomModule(), ]; -void checkUnnamed71( +void checkUnnamed72( core.List< api .GoogleCloudSecuritycenterV1EffectiveSecurityHealthAnalyticsCustomModule> @@ -4345,7 +4439,7 @@ api.ListEffectiveSecurityHealthAnalyticsCustomModulesResponse buildCounterListEffectiveSecurityHealthAnalyticsCustomModulesResponse++; if (buildCounterListEffectiveSecurityHealthAnalyticsCustomModulesResponse < 3) { - o.effectiveSecurityHealthAnalyticsCustomModules = buildUnnamed71(); + o.effectiveSecurityHealthAnalyticsCustomModules = buildUnnamed72(); o.nextPageToken = 'foo'; } buildCounterListEffectiveSecurityHealthAnalyticsCustomModulesResponse--; @@ -4357,7 +4451,7 @@ void checkListEffectiveSecurityHealthAnalyticsCustomModulesResponse( buildCounterListEffectiveSecurityHealthAnalyticsCustomModulesResponse++; if (buildCounterListEffectiveSecurityHealthAnalyticsCustomModulesResponse < 3) { - checkUnnamed71(o.effectiveSecurityHealthAnalyticsCustomModules!); + checkUnnamed72(o.effectiveSecurityHealthAnalyticsCustomModules!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -4366,12 +4460,12 @@ void checkListEffectiveSecurityHealthAnalyticsCustomModulesResponse( buildCounterListEffectiveSecurityHealthAnalyticsCustomModulesResponse--; } -core.List buildUnnamed72() => [ +core.List buildUnnamed73() => [ buildEventThreatDetectionCustomModule(), buildEventThreatDetectionCustomModule(), ]; -void checkUnnamed72(core.List o) { +void checkUnnamed73(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEventThreatDetectionCustomModule(o[0]); checkEventThreatDetectionCustomModule(o[1]); @@ -4383,7 +4477,7 @@ api.ListEventThreatDetectionCustomModulesResponse final o = api.ListEventThreatDetectionCustomModulesResponse(); buildCounterListEventThreatDetectionCustomModulesResponse++; if (buildCounterListEventThreatDetectionCustomModulesResponse < 3) { - o.eventThreatDetectionCustomModules = buildUnnamed72(); + o.eventThreatDetectionCustomModules = buildUnnamed73(); o.nextPageToken = 'foo'; } buildCounterListEventThreatDetectionCustomModulesResponse--; @@ -4394,7 +4488,7 @@ void checkListEventThreatDetectionCustomModulesResponse( api.ListEventThreatDetectionCustomModulesResponse o) { buildCounterListEventThreatDetectionCustomModulesResponse++; if (buildCounterListEventThreatDetectionCustomModulesResponse < 3) { - checkUnnamed72(o.eventThreatDetectionCustomModules!); + checkUnnamed73(o.eventThreatDetectionCustomModules!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -4403,12 +4497,12 @@ void checkListEventThreatDetectionCustomModulesResponse( buildCounterListEventThreatDetectionCustomModulesResponse--; } -core.List buildUnnamed73() => [ +core.List buildUnnamed74() => [ buildListFindingsResult(), buildListFindingsResult(), ]; -void checkUnnamed73(core.List o) { +void checkUnnamed74(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkListFindingsResult(o[0]); checkListFindingsResult(o[1]); @@ -4419,7 +4513,7 @@ api.ListFindingsResponse buildListFindingsResponse() { final o = api.ListFindingsResponse(); buildCounterListFindingsResponse++; if (buildCounterListFindingsResponse < 3) { - o.listFindingsResults = buildUnnamed73(); + o.listFindingsResults = buildUnnamed74(); o.nextPageToken = 'foo'; o.readTime = 'foo'; o.totalSize = 42; @@ -4431,7 +4525,7 @@ api.ListFindingsResponse buildListFindingsResponse() { void checkListFindingsResponse(api.ListFindingsResponse o) { buildCounterListFindingsResponse++; if (buildCounterListFindingsResponse < 3) { - checkUnnamed73(o.listFindingsResults!); + checkUnnamed74(o.listFindingsResults!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -4474,12 +4568,12 @@ void checkListFindingsResult(api.ListFindingsResult o) { buildCounterListFindingsResult--; } -core.List buildUnnamed74() => [ +core.List buildUnnamed75() => [ buildGoogleCloudSecuritycenterV1MuteConfig(), buildGoogleCloudSecuritycenterV1MuteConfig(), ]; -void checkUnnamed74(core.List o) { +void checkUnnamed75(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudSecuritycenterV1MuteConfig(o[0]); checkGoogleCloudSecuritycenterV1MuteConfig(o[1]); @@ -4490,7 +4584,7 @@ api.ListMuteConfigsResponse buildListMuteConfigsResponse() { final o = api.ListMuteConfigsResponse(); buildCounterListMuteConfigsResponse++; if (buildCounterListMuteConfigsResponse < 3) { - o.muteConfigs = buildUnnamed74(); + o.muteConfigs = buildUnnamed75(); o.nextPageToken = 'foo'; } buildCounterListMuteConfigsResponse--; @@ -4500,7 +4594,7 @@ api.ListMuteConfigsResponse buildListMuteConfigsResponse() { void checkListMuteConfigsResponse(api.ListMuteConfigsResponse o) { buildCounterListMuteConfigsResponse++; if (buildCounterListMuteConfigsResponse < 3) { - checkUnnamed74(o.muteConfigs!); + checkUnnamed75(o.muteConfigs!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -4509,12 +4603,12 @@ void checkListMuteConfigsResponse(api.ListMuteConfigsResponse o) { buildCounterListMuteConfigsResponse--; } -core.List buildUnnamed75() => [ +core.List buildUnnamed76() => [ buildNotificationConfig(), buildNotificationConfig(), ]; -void checkUnnamed75(core.List o) { +void checkUnnamed76(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNotificationConfig(o[0]); checkNotificationConfig(o[1]); @@ -4526,7 +4620,7 @@ api.ListNotificationConfigsResponse buildListNotificationConfigsResponse() { buildCounterListNotificationConfigsResponse++; if (buildCounterListNotificationConfigsResponse < 3) { o.nextPageToken = 'foo'; - o.notificationConfigs = buildUnnamed75(); + o.notificationConfigs = buildUnnamed76(); } buildCounterListNotificationConfigsResponse--; return o; @@ -4540,17 +4634,17 @@ void checkListNotificationConfigsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed75(o.notificationConfigs!); + checkUnnamed76(o.notificationConfigs!); } buildCounterListNotificationConfigsResponse--; } -core.List buildUnnamed76() => [ +core.List buildUnnamed77() => [ buildOperation(), buildOperation(), ]; -void checkUnnamed76(core.List o) { +void checkUnnamed77(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperation(o[0]); checkOperation(o[1]); @@ -4562,7 +4656,7 @@ api.ListOperationsResponse buildListOperationsResponse() { buildCounterListOperationsResponse++; if (buildCounterListOperationsResponse < 3) { o.nextPageToken = 'foo'; - o.operations = buildUnnamed76(); + o.operations = buildUnnamed77(); } buildCounterListOperationsResponse--; return o; @@ -4575,18 +4669,18 @@ void checkListOperationsResponse(api.ListOperationsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed76(o.operations!); + checkUnnamed77(o.operations!); } buildCounterListOperationsResponse--; } core.List - buildUnnamed77() => [ + buildUnnamed78() => [ buildGoogleCloudSecuritycenterV1ResourceValueConfig(), buildGoogleCloudSecuritycenterV1ResourceValueConfig(), ]; -void checkUnnamed77( +void checkUnnamed78( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGoogleCloudSecuritycenterV1ResourceValueConfig(o[0]); @@ -4599,7 +4693,7 @@ api.ListResourceValueConfigsResponse buildListResourceValueConfigsResponse() { buildCounterListResourceValueConfigsResponse++; if (buildCounterListResourceValueConfigsResponse < 3) { o.nextPageToken = 'foo'; - o.resourceValueConfigs = buildUnnamed77(); + o.resourceValueConfigs = buildUnnamed78(); } buildCounterListResourceValueConfigsResponse--; return o; @@ -4613,18 +4707,18 @@ void checkListResourceValueConfigsResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed77(o.resourceValueConfigs!); + checkUnnamed78(o.resourceValueConfigs!); } buildCounterListResourceValueConfigsResponse--; } core.List - buildUnnamed78() => [ + buildUnnamed79() => [ buildGoogleCloudSecuritycenterV1SecurityHealthAnalyticsCustomModule(), buildGoogleCloudSecuritycenterV1SecurityHealthAnalyticsCustomModule(), ]; -void checkUnnamed78( +void checkUnnamed79( core.List< api.GoogleCloudSecuritycenterV1SecurityHealthAnalyticsCustomModule> o) { @@ -4640,7 +4734,7 @@ api.ListSecurityHealthAnalyticsCustomModulesResponse buildCounterListSecurityHealthAnalyticsCustomModulesResponse++; if (buildCounterListSecurityHealthAnalyticsCustomModulesResponse < 3) { o.nextPageToken = 'foo'; - o.securityHealthAnalyticsCustomModules = buildUnnamed78(); + o.securityHealthAnalyticsCustomModules = buildUnnamed79(); } buildCounterListSecurityHealthAnalyticsCustomModulesResponse--; return o; @@ -4654,17 +4748,17 @@ void checkListSecurityHealthAnalyticsCustomModulesResponse( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed78(o.securityHealthAnalyticsCustomModules!); + checkUnnamed79(o.securityHealthAnalyticsCustomModules!); } buildCounterListSecurityHealthAnalyticsCustomModulesResponse--; } -core.List buildUnnamed79() => [ +core.List buildUnnamed80() => [ buildSource(), buildSource(), ]; -void checkUnnamed79(core.List o) { +void checkUnnamed80(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSource(o[0]); checkSource(o[1]); @@ -4676,7 +4770,7 @@ api.ListSourcesResponse buildListSourcesResponse() { buildCounterListSourcesResponse++; if (buildCounterListSourcesResponse < 3) { o.nextPageToken = 'foo'; - o.sources = buildUnnamed79(); + o.sources = buildUnnamed80(); } buildCounterListSourcesResponse--; return o; @@ -4689,17 +4783,17 @@ void checkListSourcesResponse(api.ListSourcesResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed79(o.sources!); + checkUnnamed80(o.sources!); } buildCounterListSourcesResponse--; } -core.List buildUnnamed80() => [ +core.List buildUnnamed81() => [ buildValuedResource(), buildValuedResource(), ]; -void checkUnnamed80(core.List o) { +void checkUnnamed81(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkValuedResource(o[0]); checkValuedResource(o[1]); @@ -4712,7 +4806,7 @@ api.ListValuedResourcesResponse buildListValuedResourcesResponse() { if (buildCounterListValuedResourcesResponse < 3) { o.nextPageToken = 'foo'; o.totalSize = 42; - o.valuedResources = buildUnnamed80(); + o.valuedResources = buildUnnamed81(); } buildCounterListValuedResourcesResponse--; return o; @@ -4729,7 +4823,7 @@ void checkListValuedResourcesResponse(api.ListValuedResourcesResponse o) { o.totalSize!, unittest.equals(42), ); - checkUnnamed80(o.valuedResources!); + checkUnnamed81(o.valuedResources!); } buildCounterListValuedResourcesResponse--; } @@ -4775,12 +4869,12 @@ void checkLogEntry(api.LogEntry o) { buildCounterLogEntry--; } -core.List buildUnnamed81() => [ +core.List buildUnnamed82() => [ buildDetection(), buildDetection(), ]; -void checkUnnamed81(core.List o) { +void checkUnnamed82(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDetection(o[0]); checkDetection(o[1]); @@ -4792,7 +4886,7 @@ api.MemoryHashSignature buildMemoryHashSignature() { buildCounterMemoryHashSignature++; if (buildCounterMemoryHashSignature < 3) { o.binaryFamily = 'foo'; - o.detections = buildUnnamed81(); + o.detections = buildUnnamed82(); } buildCounterMemoryHashSignature--; return o; @@ -4805,17 +4899,17 @@ void checkMemoryHashSignature(api.MemoryHashSignature o) { o.binaryFamily!, unittest.equals('foo'), ); - checkUnnamed81(o.detections!); + checkUnnamed82(o.detections!); } buildCounterMemoryHashSignature--; } -core.List buildUnnamed82() => [ +core.List buildUnnamed83() => [ 'foo', 'foo', ]; -void checkUnnamed82(core.List o) { +void checkUnnamed83(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4827,12 +4921,12 @@ void checkUnnamed82(core.List o) { ); } -core.List buildUnnamed83() => [ +core.List buildUnnamed84() => [ 'foo', 'foo', ]; -void checkUnnamed83(core.List o) { +void checkUnnamed84(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4844,12 +4938,12 @@ void checkUnnamed83(core.List o) { ); } -core.List buildUnnamed84() => [ +core.List buildUnnamed85() => [ 'foo', 'foo', ]; -void checkUnnamed84(core.List o) { +void checkUnnamed85(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4866,10 +4960,10 @@ api.MitreAttack buildMitreAttack() { final o = api.MitreAttack(); buildCounterMitreAttack++; if (buildCounterMitreAttack < 3) { - o.additionalTactics = buildUnnamed82(); - o.additionalTechniques = buildUnnamed83(); + o.additionalTactics = buildUnnamed83(); + o.additionalTechniques = buildUnnamed84(); o.primaryTactic = 'foo'; - o.primaryTechniques = buildUnnamed84(); + o.primaryTechniques = buildUnnamed85(); o.version = 'foo'; } buildCounterMitreAttack--; @@ -4879,13 +4973,13 @@ api.MitreAttack buildMitreAttack() { void checkMitreAttack(api.MitreAttack o) { buildCounterMitreAttack++; if (buildCounterMitreAttack < 3) { - checkUnnamed82(o.additionalTactics!); - checkUnnamed83(o.additionalTechniques!); + checkUnnamed83(o.additionalTactics!); + checkUnnamed84(o.additionalTechniques!); unittest.expect( o.primaryTactic!, unittest.equals('foo'), ); - checkUnnamed84(o.primaryTechniques!); + checkUnnamed85(o.primaryTechniques!); unittest.expect( o.version!, unittest.equals('foo'), @@ -4894,12 +4988,12 @@ void checkMitreAttack(api.MitreAttack o) { buildCounterMitreAttack--; } -core.List buildUnnamed85() => [ +core.List buildUnnamed86() => [ buildDynamicMuteRecord(), buildDynamicMuteRecord(), ]; -void checkUnnamed85(core.List o) { +void checkUnnamed86(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDynamicMuteRecord(o[0]); checkDynamicMuteRecord(o[1]); @@ -4910,7 +5004,7 @@ api.MuteInfo buildMuteInfo() { final o = api.MuteInfo(); buildCounterMuteInfo++; if (buildCounterMuteInfo < 3) { - o.dynamicMuteRecords = buildUnnamed85(); + o.dynamicMuteRecords = buildUnnamed86(); o.staticMute = buildStaticMute(); } buildCounterMuteInfo--; @@ -4920,7 +5014,7 @@ api.MuteInfo buildMuteInfo() { void checkMuteInfo(api.MuteInfo o) { buildCounterMuteInfo++; if (buildCounterMuteInfo < 3) { - checkUnnamed85(o.dynamicMuteRecords!); + checkUnnamed86(o.dynamicMuteRecords!); checkStaticMute(o.staticMute!); } buildCounterMuteInfo--; @@ -4948,12 +5042,12 @@ void checkNode(api.Node o) { buildCounterNode--; } -core.List buildUnnamed86() => [ +core.List buildUnnamed87() => [ buildNode(), buildNode(), ]; -void checkUnnamed86(core.List o) { +void checkUnnamed87(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNode(o[0]); checkNode(o[1]); @@ -4965,7 +5059,7 @@ api.NodePool buildNodePool() { buildCounterNodePool++; if (buildCounterNodePool < 3) { o.name = 'foo'; - o.nodes = buildUnnamed86(); + o.nodes = buildUnnamed87(); } buildCounterNodePool--; return o; @@ -4978,7 +5072,7 @@ void checkNodePool(api.NodePool o) { o.name!, unittest.equals('foo'), ); - checkUnnamed86(o.nodes!); + checkUnnamed87(o.nodes!); } buildCounterNodePool--; } @@ -5059,12 +5153,12 @@ void checkNotificationConfig(api.NotificationConfig o) { buildCounterNotificationConfig--; } -core.List buildUnnamed87() => [ +core.List buildUnnamed88() => [ buildContainer(), buildContainer(), ]; -void checkUnnamed87(core.List o) { +void checkUnnamed88(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkContainer(o[0]); checkContainer(o[1]); @@ -5075,7 +5169,7 @@ api.Object buildObject() { final o = api.Object(); buildCounterObject++; if (buildCounterObject < 3) { - o.containers = buildUnnamed87(); + o.containers = buildUnnamed88(); o.group = 'foo'; o.kind = 'foo'; o.name = 'foo'; @@ -5088,7 +5182,7 @@ api.Object buildObject() { void checkObject(api.Object o) { buildCounterObject++; if (buildCounterObject < 3) { - checkUnnamed87(o.containers!); + checkUnnamed88(o.containers!); unittest.expect( o.group!, unittest.equals('foo'), @@ -5109,7 +5203,7 @@ void checkObject(api.Object o) { buildCounterObject--; } -core.Map buildUnnamed88() => { +core.Map buildUnnamed89() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -5122,7 +5216,7 @@ core.Map buildUnnamed88() => { }, }; -void checkUnnamed88(core.Map o) { +void checkUnnamed89(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted11 = (o['x']!) as core.Map; unittest.expect(casted11, unittest.hasLength(3)); @@ -5154,7 +5248,7 @@ void checkUnnamed88(core.Map o) { ); } -core.Map buildUnnamed89() => { +core.Map buildUnnamed90() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -5167,7 +5261,7 @@ core.Map buildUnnamed89() => { }, }; -void checkUnnamed89(core.Map o) { +void checkUnnamed90(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted13 = (o['x']!) as core.Map; unittest.expect(casted13, unittest.hasLength(3)); @@ -5206,9 +5300,9 @@ api.Operation buildOperation() { if (buildCounterOperation < 3) { o.done = true; o.error = buildStatus(); - o.metadata = buildUnnamed88(); + o.metadata = buildUnnamed89(); o.name = 'foo'; - o.response = buildUnnamed89(); + o.response = buildUnnamed90(); } buildCounterOperation--; return o; @@ -5219,12 +5313,12 @@ void checkOperation(api.Operation o) { if (buildCounterOperation < 3) { unittest.expect(o.done!, unittest.isTrue); checkStatus(o.error!); - checkUnnamed88(o.metadata!); + checkUnnamed89(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed89(o.response!); + checkUnnamed90(o.response!); } buildCounterOperation--; } @@ -5346,23 +5440,23 @@ void checkPathNodeAssociatedFinding(api.PathNodeAssociatedFinding o) { buildCounterPathNodeAssociatedFinding--; } -core.List buildUnnamed90() => [ +core.List buildUnnamed91() => [ buildContainer(), buildContainer(), ]; -void checkUnnamed90(core.List o) { +void checkUnnamed91(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkContainer(o[0]); checkContainer(o[1]); } -core.List buildUnnamed91() => [ +core.List buildUnnamed92() => [ buildLabel(), buildLabel(), ]; -void checkUnnamed91(core.List o) { +void checkUnnamed92(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLabel(o[0]); checkLabel(o[1]); @@ -5373,8 +5467,8 @@ api.Pod buildPod() { final o = api.Pod(); buildCounterPod++; if (buildCounterPod < 3) { - o.containers = buildUnnamed90(); - o.labels = buildUnnamed91(); + o.containers = buildUnnamed91(); + o.labels = buildUnnamed92(); o.name = 'foo'; o.ns = 'foo'; } @@ -5385,8 +5479,8 @@ api.Pod buildPod() { void checkPod(api.Pod o) { buildCounterPod++; if (buildCounterPod < 3) { - checkUnnamed90(o.containers!); - checkUnnamed91(o.labels!); + checkUnnamed91(o.containers!); + checkUnnamed92(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -5399,23 +5493,23 @@ void checkPod(api.Pod o) { buildCounterPod--; } -core.List buildUnnamed92() => [ +core.List buildUnnamed93() => [ buildAuditConfig(), buildAuditConfig(), ]; -void checkUnnamed92(core.List o) { +void checkUnnamed93(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAuditConfig(o[0]); checkAuditConfig(o[1]); } -core.List buildUnnamed93() => [ +core.List buildUnnamed94() => [ buildBinding(), buildBinding(), ]; -void checkUnnamed93(core.List o) { +void checkUnnamed94(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkBinding(o[0]); checkBinding(o[1]); @@ -5426,8 +5520,8 @@ api.Policy buildPolicy() { final o = api.Policy(); buildCounterPolicy++; if (buildCounterPolicy < 3) { - o.auditConfigs = buildUnnamed92(); - o.bindings = buildUnnamed93(); + o.auditConfigs = buildUnnamed93(); + o.bindings = buildUnnamed94(); o.etag = 'foo'; o.version = 42; } @@ -5438,8 +5532,8 @@ api.Policy buildPolicy() { void checkPolicy(api.Policy o) { buildCounterPolicy++; if (buildCounterPolicy < 3) { - checkUnnamed92(o.auditConfigs!); - checkUnnamed93(o.bindings!); + checkUnnamed93(o.auditConfigs!); + checkUnnamed94(o.bindings!); unittest.expect( o.etag!, unittest.equals('foo'), @@ -5511,12 +5605,12 @@ void checkPosition(api.Position o) { buildCounterPosition--; } -core.List buildUnnamed94() => [ +core.List buildUnnamed95() => [ 'foo', 'foo', ]; -void checkUnnamed94(core.List o) { +void checkUnnamed95(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5528,23 +5622,23 @@ void checkUnnamed94(core.List o) { ); } -core.List buildUnnamed95() => [ +core.List buildUnnamed96() => [ buildEnvironmentVariable(), buildEnvironmentVariable(), ]; -void checkUnnamed95(core.List o) { +void checkUnnamed96(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEnvironmentVariable(o[0]); checkEnvironmentVariable(o[1]); } -core.List buildUnnamed96() => [ +core.List buildUnnamed97() => [ buildFile(), buildFile(), ]; -void checkUnnamed96(core.List o) { +void checkUnnamed97(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFile(o[0]); checkFile(o[1]); @@ -5555,12 +5649,12 @@ api.Process buildProcess() { final o = api.Process(); buildCounterProcess++; if (buildCounterProcess < 3) { - o.args = buildUnnamed94(); + o.args = buildUnnamed95(); o.argumentsTruncated = true; o.binary = buildFile(); - o.envVariables = buildUnnamed95(); + o.envVariables = buildUnnamed96(); o.envVariablesTruncated = true; - o.libraries = buildUnnamed96(); + o.libraries = buildUnnamed97(); o.name = 'foo'; o.parentPid = 'foo'; o.pid = 'foo'; @@ -5573,12 +5667,12 @@ api.Process buildProcess() { void checkProcess(api.Process o) { buildCounterProcess++; if (buildCounterProcess < 3) { - checkUnnamed94(o.args!); + checkUnnamed95(o.args!); unittest.expect(o.argumentsTruncated!, unittest.isTrue); checkFile(o.binary!); - checkUnnamed95(o.envVariables!); + checkUnnamed96(o.envVariables!); unittest.expect(o.envVariablesTruncated!, unittest.isTrue); - checkUnnamed96(o.libraries!); + checkUnnamed97(o.libraries!); unittest.expect( o.name!, unittest.equals('foo'), @@ -5686,12 +5780,12 @@ void checkRequests(api.Requests o) { buildCounterRequests--; } -core.List buildUnnamed97() => [ +core.List buildUnnamed98() => [ buildFolder(), buildFolder(), ]; -void checkUnnamed97(core.List o) { +void checkUnnamed98(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFolder(o[0]); checkFolder(o[1]); @@ -5706,7 +5800,7 @@ api.Resource buildResource() { o.azureMetadata = buildAzureMetadata(); o.cloudProvider = 'foo'; o.displayName = 'foo'; - o.folders = buildUnnamed97(); + o.folders = buildUnnamed98(); o.location = 'foo'; o.name = 'foo'; o.organization = 'foo'; @@ -5736,7 +5830,7 @@ void checkResource(api.Resource o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed97(o.folders!); + checkUnnamed98(o.folders!); unittest.expect( o.location!, unittest.equals('foo'), @@ -5782,12 +5876,12 @@ void checkResource(api.Resource o) { buildCounterResource--; } -core.List buildUnnamed98() => [ +core.List buildUnnamed99() => [ buildResourcePathNode(), buildResourcePathNode(), ]; -void checkUnnamed98(core.List o) { +void checkUnnamed99(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkResourcePathNode(o[0]); checkResourcePathNode(o[1]); @@ -5798,7 +5892,7 @@ api.ResourcePath buildResourcePath() { final o = api.ResourcePath(); buildCounterResourcePath++; if (buildCounterResourcePath < 3) { - o.nodes = buildUnnamed98(); + o.nodes = buildUnnamed99(); } buildCounterResourcePath--; return o; @@ -5807,7 +5901,7 @@ api.ResourcePath buildResourcePath() { void checkResourcePath(api.ResourcePath o) { buildCounterResourcePath++; if (buildCounterResourcePath < 3) { - checkUnnamed98(o.nodes!); + checkUnnamed99(o.nodes!); } buildCounterResourcePath--; } @@ -5945,23 +6039,23 @@ void checkSecurityBulletin(api.SecurityBulletin o) { buildCounterSecurityBulletin--; } -core.List buildUnnamed99() => [ +core.List buildUnnamed100() => [ buildFolder(), buildFolder(), ]; -void checkUnnamed99(core.List o) { +void checkUnnamed100(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFolder(o[0]); checkFolder(o[1]); } -core.List buildUnnamed100() => [ +core.List buildUnnamed101() => [ 'foo', 'foo', ]; -void checkUnnamed100(core.List o) { +void checkUnnamed101(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5978,10 +6072,10 @@ api.SecurityCenterProperties buildSecurityCenterProperties() { final o = api.SecurityCenterProperties(); buildCounterSecurityCenterProperties++; if (buildCounterSecurityCenterProperties < 3) { - o.folders = buildUnnamed99(); + o.folders = buildUnnamed100(); o.resourceDisplayName = 'foo'; o.resourceName = 'foo'; - o.resourceOwners = buildUnnamed100(); + o.resourceOwners = buildUnnamed101(); o.resourceParent = 'foo'; o.resourceParentDisplayName = 'foo'; o.resourceProject = 'foo'; @@ -5995,7 +6089,7 @@ api.SecurityCenterProperties buildSecurityCenterProperties() { void checkSecurityCenterProperties(api.SecurityCenterProperties o) { buildCounterSecurityCenterProperties++; if (buildCounterSecurityCenterProperties < 3) { - checkUnnamed99(o.folders!); + checkUnnamed100(o.folders!); unittest.expect( o.resourceDisplayName!, unittest.equals('foo'), @@ -6004,7 +6098,7 @@ void checkSecurityCenterProperties(api.SecurityCenterProperties o) { o.resourceName!, unittest.equals('foo'), ); - checkUnnamed100(o.resourceOwners!); + checkUnnamed101(o.resourceOwners!); unittest.expect( o.resourceParent!, unittest.equals('foo'), @@ -6029,12 +6123,12 @@ void checkSecurityCenterProperties(api.SecurityCenterProperties o) { buildCounterSecurityCenterProperties--; } -core.Map buildUnnamed101() => { +core.Map buildUnnamed102() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed101(core.Map o) { +void checkUnnamed102(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -6052,7 +6146,7 @@ api.SecurityMarks buildSecurityMarks() { buildCounterSecurityMarks++; if (buildCounterSecurityMarks < 3) { o.canonicalName = 'foo'; - o.marks = buildUnnamed101(); + o.marks = buildUnnamed102(); o.name = 'foo'; } buildCounterSecurityMarks--; @@ -6066,7 +6160,7 @@ void checkSecurityMarks(api.SecurityMarks o) { o.canonicalName!, unittest.equals('foo'), ); - checkUnnamed101(o.marks!); + checkUnnamed102(o.marks!); unittest.expect( o.name!, unittest.equals('foo'), @@ -6104,12 +6198,12 @@ void checkSecurityPolicy(api.SecurityPolicy o) { buildCounterSecurityPolicy--; } -core.List buildUnnamed102() => [ +core.List buildUnnamed103() => [ buildPolicyDriftDetails(), buildPolicyDriftDetails(), ]; -void checkUnnamed102(core.List o) { +void checkUnnamed103(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPolicyDriftDetails(o[0]); checkPolicyDriftDetails(o[1]); @@ -6123,7 +6217,7 @@ api.SecurityPosture buildSecurityPosture() { o.changedPolicy = 'foo'; o.name = 'foo'; o.policy = 'foo'; - o.policyDriftDetails = buildUnnamed102(); + o.policyDriftDetails = buildUnnamed103(); o.policySet = 'foo'; o.postureDeployment = 'foo'; o.postureDeploymentResource = 'foo'; @@ -6148,7 +6242,7 @@ void checkSecurityPosture(api.SecurityPosture o) { o.policy!, unittest.equals('foo'), ); - checkUnnamed102(o.policyDriftDetails!); + checkUnnamed103(o.policyDriftDetails!); unittest.expect( o.policySet!, unittest.equals('foo'), @@ -6313,7 +6407,7 @@ void checkSimulateSecurityHealthAnalyticsCustomModuleResponse( buildCounterSimulateSecurityHealthAnalyticsCustomModuleResponse--; } -core.Map buildUnnamed103() => { +core.Map buildUnnamed104() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -6326,7 +6420,7 @@ core.Map buildUnnamed103() => { }, }; -void checkUnnamed103(core.Map o) { +void checkUnnamed104(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted15 = (o['x']!) as core.Map; unittest.expect(casted15, unittest.hasLength(3)); @@ -6364,7 +6458,7 @@ api.SimulatedResource buildSimulatedResource() { buildCounterSimulatedResource++; if (buildCounterSimulatedResource < 3) { o.iamPolicyData = buildPolicy(); - o.resourceData = buildUnnamed103(); + o.resourceData = buildUnnamed104(); o.resourceType = 'foo'; } buildCounterSimulatedResource--; @@ -6375,7 +6469,7 @@ void checkSimulatedResource(api.SimulatedResource o) { buildCounterSimulatedResource++; if (buildCounterSimulatedResource < 3) { checkPolicy(o.iamPolicyData!); - checkUnnamed103(o.resourceData!); + checkUnnamed104(o.resourceData!); unittest.expect( o.resourceType!, unittest.equals('foo'), @@ -6407,12 +6501,12 @@ void checkSimulatedResult(api.SimulatedResult o) { buildCounterSimulatedResult--; } -core.List buildUnnamed104() => [ +core.List buildUnnamed105() => [ buildResourceValueConfigMetadata(), buildResourceValueConfigMetadata(), ]; -void checkUnnamed104(core.List o) { +void checkUnnamed105(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkResourceValueConfigMetadata(o[0]); checkResourceValueConfigMetadata(o[1]); @@ -6426,7 +6520,7 @@ api.Simulation buildSimulation() { o.cloudProvider = 'foo'; o.createTime = 'foo'; o.name = 'foo'; - o.resourceValueConfigsMetadata = buildUnnamed104(); + o.resourceValueConfigsMetadata = buildUnnamed105(); } buildCounterSimulation--; return o; @@ -6447,7 +6541,7 @@ void checkSimulation(api.Simulation o) { o.name!, unittest.equals('foo'), ); - checkUnnamed104(o.resourceValueConfigsMetadata!); + checkUnnamed105(o.resourceValueConfigsMetadata!); } buildCounterSimulation--; } @@ -6516,7 +6610,7 @@ void checkStaticMute(api.StaticMute o) { buildCounterStaticMute--; } -core.Map buildUnnamed105() => { +core.Map buildUnnamed106() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -6529,7 +6623,7 @@ core.Map buildUnnamed105() => { }, }; -void checkUnnamed105(core.Map o) { +void checkUnnamed106(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted17 = (o['x']!) as core.Map; unittest.expect(casted17, unittest.hasLength(3)); @@ -6561,15 +6655,15 @@ void checkUnnamed105(core.Map o) { ); } -core.List> buildUnnamed106() => [ - buildUnnamed105(), - buildUnnamed105(), +core.List> buildUnnamed107() => [ + buildUnnamed106(), + buildUnnamed106(), ]; -void checkUnnamed106(core.List> o) { +void checkUnnamed107(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed105(o[0]); - checkUnnamed105(o[1]); + checkUnnamed106(o[0]); + checkUnnamed106(o[1]); } core.int buildCounterStatus = 0; @@ -6578,7 +6672,7 @@ api.Status buildStatus() { buildCounterStatus++; if (buildCounterStatus < 3) { o.code = 42; - o.details = buildUnnamed106(); + o.details = buildUnnamed107(); o.message = 'foo'; } buildCounterStatus--; @@ -6592,7 +6686,7 @@ void checkStatus(api.Status o) { o.code!, unittest.equals(42), ); - checkUnnamed106(o.details!); + checkUnnamed107(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -6655,12 +6749,12 @@ void checkSubject(api.Subject o) { buildCounterSubject--; } -core.List buildUnnamed107() => [ +core.List buildUnnamed108() => [ 'foo', 'foo', ]; -void checkUnnamed107(core.List o) { +void checkUnnamed108(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6677,7 +6771,7 @@ api.TestIamPermissionsRequest buildTestIamPermissionsRequest() { final o = api.TestIamPermissionsRequest(); buildCounterTestIamPermissionsRequest++; if (buildCounterTestIamPermissionsRequest < 3) { - o.permissions = buildUnnamed107(); + o.permissions = buildUnnamed108(); } buildCounterTestIamPermissionsRequest--; return o; @@ -6686,17 +6780,17 @@ api.TestIamPermissionsRequest buildTestIamPermissionsRequest() { void checkTestIamPermissionsRequest(api.TestIamPermissionsRequest o) { buildCounterTestIamPermissionsRequest++; if (buildCounterTestIamPermissionsRequest < 3) { - checkUnnamed107(o.permissions!); + checkUnnamed108(o.permissions!); } buildCounterTestIamPermissionsRequest--; } -core.List buildUnnamed108() => [ +core.List buildUnnamed109() => [ 'foo', 'foo', ]; -void checkUnnamed108(core.List o) { +void checkUnnamed109(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6713,7 +6807,7 @@ api.TestIamPermissionsResponse buildTestIamPermissionsResponse() { final o = api.TestIamPermissionsResponse(); buildCounterTestIamPermissionsResponse++; if (buildCounterTestIamPermissionsResponse < 3) { - o.permissions = buildUnnamed108(); + o.permissions = buildUnnamed109(); } buildCounterTestIamPermissionsResponse--; return o; @@ -6722,7 +6816,7 @@ api.TestIamPermissionsResponse buildTestIamPermissionsResponse() { void checkTestIamPermissionsResponse(api.TestIamPermissionsResponse o) { buildCounterTestIamPermissionsResponse++; if (buildCounterTestIamPermissionsResponse < 3) { - checkUnnamed108(o.permissions!); + checkUnnamed109(o.permissions!); } buildCounterTestIamPermissionsResponse--; } @@ -6774,12 +6868,12 @@ void checkTicketInfo(api.TicketInfo o) { buildCounterTicketInfo--; } -core.List buildUnnamed109() => [ +core.List buildUnnamed110() => [ 'foo', 'foo', ]; -void checkUnnamed109(core.List o) { +void checkUnnamed110(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6797,7 +6891,7 @@ api.ToxicCombination buildToxicCombination() { buildCounterToxicCombination++; if (buildCounterToxicCombination < 3) { o.attackExposureScore = 42.0; - o.relatedFindings = buildUnnamed109(); + o.relatedFindings = buildUnnamed110(); } buildCounterToxicCombination--; return o; @@ -6810,7 +6904,7 @@ void checkToxicCombination(api.ToxicCombination o) { o.attackExposureScore!, unittest.equals(42.0), ); - checkUnnamed109(o.relatedFindings!); + checkUnnamed110(o.relatedFindings!); } buildCounterToxicCombination--; } @@ -6865,12 +6959,12 @@ void checkValidateEventThreatDetectionCustomModuleResponse( buildCounterValidateEventThreatDetectionCustomModuleResponse--; } -core.List buildUnnamed110() => [ +core.List buildUnnamed111() => [ buildResourceValueConfigMetadata(), buildResourceValueConfigMetadata(), ]; -void checkUnnamed110(core.List o) { +void checkUnnamed111(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkResourceValueConfigMetadata(o[0]); checkResourceValueConfigMetadata(o[1]); @@ -6887,7 +6981,7 @@ api.ValuedResource buildValuedResource() { o.resource = 'foo'; o.resourceType = 'foo'; o.resourceValue = 'foo'; - o.resourceValueConfigsUsed = buildUnnamed110(); + o.resourceValueConfigsUsed = buildUnnamed111(); } buildCounterValuedResource--; return o; @@ -6920,7 +7014,7 @@ void checkValuedResource(api.ValuedResource o) { o.resourceValue!, unittest.equals('foo'), ); - checkUnnamed110(o.resourceValueConfigsUsed!); + checkUnnamed111(o.resourceValueConfigsUsed!); } buildCounterValuedResource--; } @@ -7412,6 +7506,16 @@ void main() { }); }); + unittest.group('obj-schema-DataRetentionDeletionEvent', () { + unittest.test('to-json--from-json', () async { + final o = buildDataRetentionDeletionEvent(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.DataRetentionDeletionEvent.fromJson( + oJson as core.Map); + checkDataRetentionDeletionEvent(od); + }); + }); + unittest.group('obj-schema-Database', () { unittest.test('to-json--from-json', () async { final o = buildDatabase(); @@ -7432,6 +7536,16 @@ void main() { }); }); + unittest.group('obj-schema-Disk', () { + unittest.test('to-json--from-json', () async { + final o = buildDisk(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Disk.fromJson(oJson as core.Map); + checkDisk(od); + }); + }); + unittest.group('obj-schema-DiskPath', () { unittest.test('to-json--from-json', () async { final o = buildDiskPath(); @@ -12094,6 +12208,79 @@ void main() { }); }); + unittest.group('resource-OrganizationsAttackPathsResource', () { + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.SecurityCommandCenterApi(mock).organizations.attackPaths; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildListAttackPathsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_parent, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListAttackPathsResponse(response as api.ListAttackPathsResponse); + }); + }); + unittest.group('resource-OrganizationsBigQueryExportsResource', () { unittest.test('method--create', () async { final mock = HttpServerMock(); diff --git a/generated/googleapis/test/securityposture/v1_test.dart b/generated/googleapis/test/securityposture/v1_test.dart new file mode 100644 index 000000000..97d82f521 --- /dev/null +++ b/generated/googleapis/test/securityposture/v1_test.dart @@ -0,0 +1,3810 @@ +// ignore_for_file: camel_case_types +// ignore_for_file: comment_references +// ignore_for_file: deprecated_member_use_from_same_package +// ignore_for_file: doc_directive_unknown +// ignore_for_file: lines_longer_than_80_chars +// ignore_for_file: non_constant_identifier_names +// ignore_for_file: prefer_const_declarations +// ignore_for_file: prefer_expression_function_bodies +// ignore_for_file: prefer_final_locals +// ignore_for_file: prefer_interpolation_to_compose_strings +// ignore_for_file: unintended_html_in_doc_comment +// ignore_for_file: unnecessary_brace_in_string_interps +// ignore_for_file: unnecessary_cast +// ignore_for_file: unnecessary_lambdas +// ignore_for_file: unnecessary_string_interpolations +// ignore_for_file: unreachable_from_main +// ignore_for_file: unused_local_variable + +import 'dart:async' as async; +import 'dart:convert' as convert; +import 'dart:core' as core; + +import 'package:googleapis/securityposture/v1.dart' as api; +import 'package:http/http.dart' as http; +import 'package:test/test.dart' as unittest; + +import '../test_shared.dart'; + +core.int buildCounterAssetDetails = 0; +api.AssetDetails buildAssetDetails() { + final o = api.AssetDetails(); + buildCounterAssetDetails++; + if (buildCounterAssetDetails < 3) { + o.asset = 'foo'; + o.assetType = 'foo'; + } + buildCounterAssetDetails--; + return o; +} + +void checkAssetDetails(api.AssetDetails o) { + buildCounterAssetDetails++; + if (buildCounterAssetDetails < 3) { + unittest.expect( + o.asset!, + unittest.equals('foo'), + ); + unittest.expect( + o.assetType!, + unittest.equals('foo'), + ); + } + buildCounterAssetDetails--; +} + +core.int buildCounterCancelOperationRequest = 0; +api.CancelOperationRequest buildCancelOperationRequest() { + final o = api.CancelOperationRequest(); + buildCounterCancelOperationRequest++; + if (buildCounterCancelOperationRequest < 3) {} + buildCounterCancelOperationRequest--; + return o; +} + +void checkCancelOperationRequest(api.CancelOperationRequest o) { + buildCounterCancelOperationRequest++; + if (buildCounterCancelOperationRequest < 3) {} + buildCounterCancelOperationRequest--; +} + +core.int buildCounterComplianceStandard = 0; +api.ComplianceStandard buildComplianceStandard() { + final o = api.ComplianceStandard(); + buildCounterComplianceStandard++; + if (buildCounterComplianceStandard < 3) { + o.control = 'foo'; + o.standard = 'foo'; + } + buildCounterComplianceStandard--; + return o; +} + +void checkComplianceStandard(api.ComplianceStandard o) { + buildCounterComplianceStandard++; + if (buildCounterComplianceStandard < 3) { + unittest.expect( + o.control!, + unittest.equals('foo'), + ); + unittest.expect( + o.standard!, + unittest.equals('foo'), + ); + } + buildCounterComplianceStandard--; +} + +core.int buildCounterConstraint = 0; +api.Constraint buildConstraint() { + final o = api.Constraint(); + buildCounterConstraint++; + if (buildCounterConstraint < 3) { + o.orgPolicyConstraint = buildOrgPolicyConstraint(); + o.orgPolicyConstraintCustom = buildOrgPolicyConstraintCustom(); + o.securityHealthAnalyticsCustomModule = + buildSecurityHealthAnalyticsCustomModule(); + o.securityHealthAnalyticsModule = buildSecurityHealthAnalyticsModule(); + } + buildCounterConstraint--; + return o; +} + +void checkConstraint(api.Constraint o) { + buildCounterConstraint++; + if (buildCounterConstraint < 3) { + checkOrgPolicyConstraint(o.orgPolicyConstraint!); + checkOrgPolicyConstraintCustom(o.orgPolicyConstraintCustom!); + checkSecurityHealthAnalyticsCustomModule( + o.securityHealthAnalyticsCustomModule!); + checkSecurityHealthAnalyticsModule(o.securityHealthAnalyticsModule!); + } + buildCounterConstraint--; +} + +core.int buildCounterCreateIaCValidationReportRequest = 0; +api.CreateIaCValidationReportRequest buildCreateIaCValidationReportRequest() { + final o = api.CreateIaCValidationReportRequest(); + buildCounterCreateIaCValidationReportRequest++; + if (buildCounterCreateIaCValidationReportRequest < 3) { + o.iac = buildIaC(); + } + buildCounterCreateIaCValidationReportRequest--; + return o; +} + +void checkCreateIaCValidationReportRequest( + api.CreateIaCValidationReportRequest o) { + buildCounterCreateIaCValidationReportRequest++; + if (buildCounterCreateIaCValidationReportRequest < 3) { + checkIaC(o.iac!); + } + buildCounterCreateIaCValidationReportRequest--; +} + +core.int buildCounterCustomConfig = 0; +api.CustomConfig buildCustomConfig() { + final o = api.CustomConfig(); + buildCounterCustomConfig++; + if (buildCounterCustomConfig < 3) { + o.customOutput = buildCustomOutputSpec(); + o.description = 'foo'; + o.predicate = buildExpr(); + o.recommendation = 'foo'; + o.resourceSelector = buildResourceSelector(); + o.severity = 'foo'; + } + buildCounterCustomConfig--; + return o; +} + +void checkCustomConfig(api.CustomConfig o) { + buildCounterCustomConfig++; + if (buildCounterCustomConfig < 3) { + checkCustomOutputSpec(o.customOutput!); + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + checkExpr(o.predicate!); + unittest.expect( + o.recommendation!, + unittest.equals('foo'), + ); + checkResourceSelector(o.resourceSelector!); + unittest.expect( + o.severity!, + unittest.equals('foo'), + ); + } + buildCounterCustomConfig--; +} + +core.List buildUnnamed0() => [ + buildProperty(), + buildProperty(), + ]; + +void checkUnnamed0(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkProperty(o[0]); + checkProperty(o[1]); +} + +core.int buildCounterCustomOutputSpec = 0; +api.CustomOutputSpec buildCustomOutputSpec() { + final o = api.CustomOutputSpec(); + buildCounterCustomOutputSpec++; + if (buildCounterCustomOutputSpec < 3) { + o.properties = buildUnnamed0(); + } + buildCounterCustomOutputSpec--; + return o; +} + +void checkCustomOutputSpec(api.CustomOutputSpec o) { + buildCounterCustomOutputSpec++; + if (buildCounterCustomOutputSpec < 3) { + checkUnnamed0(o.properties!); + } + buildCounterCustomOutputSpec--; +} + +core.int buildCounterEmpty = 0; +api.Empty buildEmpty() { + final o = api.Empty(); + buildCounterEmpty++; + if (buildCounterEmpty < 3) {} + buildCounterEmpty--; + return o; +} + +void checkEmpty(api.Empty o) { + buildCounterEmpty++; + if (buildCounterEmpty < 3) {} + buildCounterEmpty--; +} + +core.int buildCounterExpr = 0; +api.Expr buildExpr() { + final o = api.Expr(); + buildCounterExpr++; + if (buildCounterExpr < 3) { + o.description = 'foo'; + o.expression = 'foo'; + o.location = 'foo'; + o.title = 'foo'; + } + buildCounterExpr--; + return o; +} + +void checkExpr(api.Expr o) { + buildCounterExpr++; + if (buildCounterExpr < 3) { + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + unittest.expect( + o.expression!, + unittest.equals('foo'), + ); + unittest.expect( + o.location!, + unittest.equals('foo'), + ); + unittest.expect( + o.title!, + unittest.equals('foo'), + ); + } + buildCounterExpr--; +} + +core.int buildCounterExtractPostureRequest = 0; +api.ExtractPostureRequest buildExtractPostureRequest() { + final o = api.ExtractPostureRequest(); + buildCounterExtractPostureRequest++; + if (buildCounterExtractPostureRequest < 3) { + o.postureId = 'foo'; + o.workload = 'foo'; + } + buildCounterExtractPostureRequest--; + return o; +} + +void checkExtractPostureRequest(api.ExtractPostureRequest o) { + buildCounterExtractPostureRequest++; + if (buildCounterExtractPostureRequest < 3) { + unittest.expect( + o.postureId!, + unittest.equals('foo'), + ); + unittest.expect( + o.workload!, + unittest.equals('foo'), + ); + } + buildCounterExtractPostureRequest--; +} + +core.List buildUnnamed1() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed1(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.List buildUnnamed2() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed2(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterGoogleCloudSecuritypostureV1CustomConstraint = 0; +api.GoogleCloudSecuritypostureV1CustomConstraint + buildGoogleCloudSecuritypostureV1CustomConstraint() { + final o = api.GoogleCloudSecuritypostureV1CustomConstraint(); + buildCounterGoogleCloudSecuritypostureV1CustomConstraint++; + if (buildCounterGoogleCloudSecuritypostureV1CustomConstraint < 3) { + o.actionType = 'foo'; + o.condition = 'foo'; + o.description = 'foo'; + o.displayName = 'foo'; + o.methodTypes = buildUnnamed1(); + o.name = 'foo'; + o.resourceTypes = buildUnnamed2(); + o.updateTime = 'foo'; + } + buildCounterGoogleCloudSecuritypostureV1CustomConstraint--; + return o; +} + +void checkGoogleCloudSecuritypostureV1CustomConstraint( + api.GoogleCloudSecuritypostureV1CustomConstraint o) { + buildCounterGoogleCloudSecuritypostureV1CustomConstraint++; + if (buildCounterGoogleCloudSecuritypostureV1CustomConstraint < 3) { + unittest.expect( + o.actionType!, + unittest.equals('foo'), + ); + unittest.expect( + o.condition!, + unittest.equals('foo'), + ); + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + unittest.expect( + o.displayName!, + unittest.equals('foo'), + ); + checkUnnamed1(o.methodTypes!); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + checkUnnamed2(o.resourceTypes!); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), + ); + } + buildCounterGoogleCloudSecuritypostureV1CustomConstraint--; +} + +core.Map buildUnnamed3() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; + +void checkUnnamed3(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted1 = (o['x']!) as core.Map; + unittest.expect(casted1, unittest.hasLength(3)); + unittest.expect( + casted1['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted1['bool'], + unittest.equals(true), + ); + unittest.expect( + casted1['string'], + unittest.equals('foo'), + ); + var casted2 = (o['y']!) as core.Map; + unittest.expect(casted2, unittest.hasLength(3)); + unittest.expect( + casted2['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted2['bool'], + unittest.equals(true), + ); + unittest.expect( + casted2['string'], + unittest.equals('foo'), + ); +} + +core.int buildCounterGoogleCloudSecuritypostureV1PolicyRule = 0; +api.GoogleCloudSecuritypostureV1PolicyRule + buildGoogleCloudSecuritypostureV1PolicyRule() { + final o = api.GoogleCloudSecuritypostureV1PolicyRule(); + buildCounterGoogleCloudSecuritypostureV1PolicyRule++; + if (buildCounterGoogleCloudSecuritypostureV1PolicyRule < 3) { + o.allowAll = true; + o.condition = buildExpr(); + o.denyAll = true; + o.enforce = true; + o.parameters = buildUnnamed3(); + o.resourceTypes = buildResourceTypes(); + o.values = buildGoogleCloudSecuritypostureV1PolicyRuleStringValues(); + } + buildCounterGoogleCloudSecuritypostureV1PolicyRule--; + return o; +} + +void checkGoogleCloudSecuritypostureV1PolicyRule( + api.GoogleCloudSecuritypostureV1PolicyRule o) { + buildCounterGoogleCloudSecuritypostureV1PolicyRule++; + if (buildCounterGoogleCloudSecuritypostureV1PolicyRule < 3) { + unittest.expect(o.allowAll!, unittest.isTrue); + checkExpr(o.condition!); + unittest.expect(o.denyAll!, unittest.isTrue); + unittest.expect(o.enforce!, unittest.isTrue); + checkUnnamed3(o.parameters!); + checkResourceTypes(o.resourceTypes!); + checkGoogleCloudSecuritypostureV1PolicyRuleStringValues(o.values!); + } + buildCounterGoogleCloudSecuritypostureV1PolicyRule--; +} + +core.List buildUnnamed4() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed4(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.List buildUnnamed5() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed5(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterGoogleCloudSecuritypostureV1PolicyRuleStringValues = 0; +api.GoogleCloudSecuritypostureV1PolicyRuleStringValues + buildGoogleCloudSecuritypostureV1PolicyRuleStringValues() { + final o = api.GoogleCloudSecuritypostureV1PolicyRuleStringValues(); + buildCounterGoogleCloudSecuritypostureV1PolicyRuleStringValues++; + if (buildCounterGoogleCloudSecuritypostureV1PolicyRuleStringValues < 3) { + o.allowedValues = buildUnnamed4(); + o.deniedValues = buildUnnamed5(); + } + buildCounterGoogleCloudSecuritypostureV1PolicyRuleStringValues--; + return o; +} + +void checkGoogleCloudSecuritypostureV1PolicyRuleStringValues( + api.GoogleCloudSecuritypostureV1PolicyRuleStringValues o) { + buildCounterGoogleCloudSecuritypostureV1PolicyRuleStringValues++; + if (buildCounterGoogleCloudSecuritypostureV1PolicyRuleStringValues < 3) { + checkUnnamed4(o.allowedValues!); + checkUnnamed5(o.deniedValues!); + } + buildCounterGoogleCloudSecuritypostureV1PolicyRuleStringValues--; +} + +core.int buildCounterIaC = 0; +api.IaC buildIaC() { + final o = api.IaC(); + buildCounterIaC++; + if (buildCounterIaC < 3) { + o.tfPlan = 'foo'; + } + buildCounterIaC--; + return o; +} + +void checkIaC(api.IaC o) { + buildCounterIaC++; + if (buildCounterIaC < 3) { + unittest.expect( + o.tfPlan!, + unittest.equals('foo'), + ); + } + buildCounterIaC--; +} + +core.List buildUnnamed6() => [ + buildViolation(), + buildViolation(), + ]; + +void checkUnnamed6(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkViolation(o[0]); + checkViolation(o[1]); +} + +core.int buildCounterIaCValidationReport = 0; +api.IaCValidationReport buildIaCValidationReport() { + final o = api.IaCValidationReport(); + buildCounterIaCValidationReport++; + if (buildCounterIaCValidationReport < 3) { + o.note = 'foo'; + o.violations = buildUnnamed6(); + } + buildCounterIaCValidationReport--; + return o; +} + +void checkIaCValidationReport(api.IaCValidationReport o) { + buildCounterIaCValidationReport++; + if (buildCounterIaCValidationReport < 3) { + unittest.expect( + o.note!, + unittest.equals('foo'), + ); + checkUnnamed6(o.violations!); + } + buildCounterIaCValidationReport--; +} + +core.List buildUnnamed7() => [ + buildLocation(), + buildLocation(), + ]; + +void checkUnnamed7(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkLocation(o[0]); + checkLocation(o[1]); +} + +core.int buildCounterListLocationsResponse = 0; +api.ListLocationsResponse buildListLocationsResponse() { + final o = api.ListLocationsResponse(); + buildCounterListLocationsResponse++; + if (buildCounterListLocationsResponse < 3) { + o.locations = buildUnnamed7(); + o.nextPageToken = 'foo'; + } + buildCounterListLocationsResponse--; + return o; +} + +void checkListLocationsResponse(api.ListLocationsResponse o) { + buildCounterListLocationsResponse++; + if (buildCounterListLocationsResponse < 3) { + checkUnnamed7(o.locations!); + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + } + buildCounterListLocationsResponse--; +} + +core.List buildUnnamed8() => [ + buildOperation(), + buildOperation(), + ]; + +void checkUnnamed8(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkOperation(o[0]); + checkOperation(o[1]); +} + +core.int buildCounterListOperationsResponse = 0; +api.ListOperationsResponse buildListOperationsResponse() { + final o = api.ListOperationsResponse(); + buildCounterListOperationsResponse++; + if (buildCounterListOperationsResponse < 3) { + o.nextPageToken = 'foo'; + o.operations = buildUnnamed8(); + } + buildCounterListOperationsResponse--; + return o; +} + +void checkListOperationsResponse(api.ListOperationsResponse o) { + buildCounterListOperationsResponse++; + if (buildCounterListOperationsResponse < 3) { + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed8(o.operations!); + } + buildCounterListOperationsResponse--; +} + +core.List buildUnnamed9() => [ + buildPostureDeployment(), + buildPostureDeployment(), + ]; + +void checkUnnamed9(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkPostureDeployment(o[0]); + checkPostureDeployment(o[1]); +} + +core.List buildUnnamed10() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed10(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterListPostureDeploymentsResponse = 0; +api.ListPostureDeploymentsResponse buildListPostureDeploymentsResponse() { + final o = api.ListPostureDeploymentsResponse(); + buildCounterListPostureDeploymentsResponse++; + if (buildCounterListPostureDeploymentsResponse < 3) { + o.nextPageToken = 'foo'; + o.postureDeployments = buildUnnamed9(); + o.unreachable = buildUnnamed10(); + } + buildCounterListPostureDeploymentsResponse--; + return o; +} + +void checkListPostureDeploymentsResponse(api.ListPostureDeploymentsResponse o) { + buildCounterListPostureDeploymentsResponse++; + if (buildCounterListPostureDeploymentsResponse < 3) { + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed9(o.postureDeployments!); + checkUnnamed10(o.unreachable!); + } + buildCounterListPostureDeploymentsResponse--; +} + +core.List buildUnnamed11() => [ + buildPosture(), + buildPosture(), + ]; + +void checkUnnamed11(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkPosture(o[0]); + checkPosture(o[1]); +} + +core.int buildCounterListPostureRevisionsResponse = 0; +api.ListPostureRevisionsResponse buildListPostureRevisionsResponse() { + final o = api.ListPostureRevisionsResponse(); + buildCounterListPostureRevisionsResponse++; + if (buildCounterListPostureRevisionsResponse < 3) { + o.nextPageToken = 'foo'; + o.revisions = buildUnnamed11(); + } + buildCounterListPostureRevisionsResponse--; + return o; +} + +void checkListPostureRevisionsResponse(api.ListPostureRevisionsResponse o) { + buildCounterListPostureRevisionsResponse++; + if (buildCounterListPostureRevisionsResponse < 3) { + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed11(o.revisions!); + } + buildCounterListPostureRevisionsResponse--; +} + +core.List buildUnnamed12() => [ + buildPostureTemplate(), + buildPostureTemplate(), + ]; + +void checkUnnamed12(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkPostureTemplate(o[0]); + checkPostureTemplate(o[1]); +} + +core.int buildCounterListPostureTemplatesResponse = 0; +api.ListPostureTemplatesResponse buildListPostureTemplatesResponse() { + final o = api.ListPostureTemplatesResponse(); + buildCounterListPostureTemplatesResponse++; + if (buildCounterListPostureTemplatesResponse < 3) { + o.nextPageToken = 'foo'; + o.postureTemplates = buildUnnamed12(); + } + buildCounterListPostureTemplatesResponse--; + return o; +} + +void checkListPostureTemplatesResponse(api.ListPostureTemplatesResponse o) { + buildCounterListPostureTemplatesResponse++; + if (buildCounterListPostureTemplatesResponse < 3) { + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed12(o.postureTemplates!); + } + buildCounterListPostureTemplatesResponse--; +} + +core.List buildUnnamed13() => [ + buildPosture(), + buildPosture(), + ]; + +void checkUnnamed13(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkPosture(o[0]); + checkPosture(o[1]); +} + +core.List buildUnnamed14() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed14(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterListPosturesResponse = 0; +api.ListPosturesResponse buildListPosturesResponse() { + final o = api.ListPosturesResponse(); + buildCounterListPosturesResponse++; + if (buildCounterListPosturesResponse < 3) { + o.nextPageToken = 'foo'; + o.postures = buildUnnamed13(); + o.unreachable = buildUnnamed14(); + } + buildCounterListPosturesResponse--; + return o; +} + +void checkListPosturesResponse(api.ListPosturesResponse o) { + buildCounterListPosturesResponse++; + if (buildCounterListPosturesResponse < 3) { + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed13(o.postures!); + checkUnnamed14(o.unreachable!); + } + buildCounterListPosturesResponse--; +} + +core.List buildUnnamed15() => [ + buildReport(), + buildReport(), + ]; + +void checkUnnamed15(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkReport(o[0]); + checkReport(o[1]); +} + +core.List buildUnnamed16() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed16(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterListReportsResponse = 0; +api.ListReportsResponse buildListReportsResponse() { + final o = api.ListReportsResponse(); + buildCounterListReportsResponse++; + if (buildCounterListReportsResponse < 3) { + o.nextPageToken = 'foo'; + o.reports = buildUnnamed15(); + o.unreachable = buildUnnamed16(); + } + buildCounterListReportsResponse--; + return o; +} + +void checkListReportsResponse(api.ListReportsResponse o) { + buildCounterListReportsResponse++; + if (buildCounterListReportsResponse < 3) { + unittest.expect( + o.nextPageToken!, + unittest.equals('foo'), + ); + checkUnnamed15(o.reports!); + checkUnnamed16(o.unreachable!); + } + buildCounterListReportsResponse--; +} + +core.Map buildUnnamed17() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed17(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.Map buildUnnamed18() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; + +void checkUnnamed18(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted3 = (o['x']!) as core.Map; + unittest.expect(casted3, unittest.hasLength(3)); + unittest.expect( + casted3['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted3['bool'], + unittest.equals(true), + ); + unittest.expect( + casted3['string'], + unittest.equals('foo'), + ); + var casted4 = (o['y']!) as core.Map; + unittest.expect(casted4, unittest.hasLength(3)); + unittest.expect( + casted4['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted4['bool'], + unittest.equals(true), + ); + unittest.expect( + casted4['string'], + unittest.equals('foo'), + ); +} + +core.int buildCounterLocation = 0; +api.Location buildLocation() { + final o = api.Location(); + buildCounterLocation++; + if (buildCounterLocation < 3) { + o.displayName = 'foo'; + o.labels = buildUnnamed17(); + o.locationId = 'foo'; + o.metadata = buildUnnamed18(); + o.name = 'foo'; + } + buildCounterLocation--; + return o; +} + +void checkLocation(api.Location o) { + buildCounterLocation++; + if (buildCounterLocation < 3) { + unittest.expect( + o.displayName!, + unittest.equals('foo'), + ); + checkUnnamed17(o.labels!); + unittest.expect( + o.locationId!, + unittest.equals('foo'), + ); + checkUnnamed18(o.metadata!); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + } + buildCounterLocation--; +} + +core.Map buildUnnamed19() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; + +void checkUnnamed19(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted5 = (o['x']!) as core.Map; + unittest.expect(casted5, unittest.hasLength(3)); + unittest.expect( + casted5['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted5['bool'], + unittest.equals(true), + ); + unittest.expect( + casted5['string'], + unittest.equals('foo'), + ); + var casted6 = (o['y']!) as core.Map; + unittest.expect(casted6, unittest.hasLength(3)); + unittest.expect( + casted6['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted6['bool'], + unittest.equals(true), + ); + unittest.expect( + casted6['string'], + unittest.equals('foo'), + ); +} + +core.Map buildUnnamed20() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; + +void checkUnnamed20(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted7 = (o['x']!) as core.Map; + unittest.expect(casted7, unittest.hasLength(3)); + unittest.expect( + casted7['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted7['bool'], + unittest.equals(true), + ); + unittest.expect( + casted7['string'], + unittest.equals('foo'), + ); + var casted8 = (o['y']!) as core.Map; + unittest.expect(casted8, unittest.hasLength(3)); + unittest.expect( + casted8['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted8['bool'], + unittest.equals(true), + ); + unittest.expect( + casted8['string'], + unittest.equals('foo'), + ); +} + +core.int buildCounterOperation = 0; +api.Operation buildOperation() { + final o = api.Operation(); + buildCounterOperation++; + if (buildCounterOperation < 3) { + o.done = true; + o.error = buildStatus(); + o.metadata = buildUnnamed19(); + o.name = 'foo'; + o.response = buildUnnamed20(); + } + buildCounterOperation--; + return o; +} + +void checkOperation(api.Operation o) { + buildCounterOperation++; + if (buildCounterOperation < 3) { + unittest.expect(o.done!, unittest.isTrue); + checkStatus(o.error!); + checkUnnamed19(o.metadata!); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + checkUnnamed20(o.response!); + } + buildCounterOperation--; +} + +core.List buildUnnamed21() => [ + buildGoogleCloudSecuritypostureV1PolicyRule(), + buildGoogleCloudSecuritypostureV1PolicyRule(), + ]; + +void checkUnnamed21(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudSecuritypostureV1PolicyRule(o[0]); + checkGoogleCloudSecuritypostureV1PolicyRule(o[1]); +} + +core.int buildCounterOrgPolicyConstraint = 0; +api.OrgPolicyConstraint buildOrgPolicyConstraint() { + final o = api.OrgPolicyConstraint(); + buildCounterOrgPolicyConstraint++; + if (buildCounterOrgPolicyConstraint < 3) { + o.cannedConstraintId = 'foo'; + o.policyRules = buildUnnamed21(); + } + buildCounterOrgPolicyConstraint--; + return o; +} + +void checkOrgPolicyConstraint(api.OrgPolicyConstraint o) { + buildCounterOrgPolicyConstraint++; + if (buildCounterOrgPolicyConstraint < 3) { + unittest.expect( + o.cannedConstraintId!, + unittest.equals('foo'), + ); + checkUnnamed21(o.policyRules!); + } + buildCounterOrgPolicyConstraint--; +} + +core.List buildUnnamed22() => [ + buildGoogleCloudSecuritypostureV1PolicyRule(), + buildGoogleCloudSecuritypostureV1PolicyRule(), + ]; + +void checkUnnamed22(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGoogleCloudSecuritypostureV1PolicyRule(o[0]); + checkGoogleCloudSecuritypostureV1PolicyRule(o[1]); +} + +core.int buildCounterOrgPolicyConstraintCustom = 0; +api.OrgPolicyConstraintCustom buildOrgPolicyConstraintCustom() { + final o = api.OrgPolicyConstraintCustom(); + buildCounterOrgPolicyConstraintCustom++; + if (buildCounterOrgPolicyConstraintCustom < 3) { + o.customConstraint = buildGoogleCloudSecuritypostureV1CustomConstraint(); + o.policyRules = buildUnnamed22(); + } + buildCounterOrgPolicyConstraintCustom--; + return o; +} + +void checkOrgPolicyConstraintCustom(api.OrgPolicyConstraintCustom o) { + buildCounterOrgPolicyConstraintCustom++; + if (buildCounterOrgPolicyConstraintCustom < 3) { + checkGoogleCloudSecuritypostureV1CustomConstraint(o.customConstraint!); + checkUnnamed22(o.policyRules!); + } + buildCounterOrgPolicyConstraintCustom--; +} + +core.List buildUnnamed23() => [ + buildComplianceStandard(), + buildComplianceStandard(), + ]; + +void checkUnnamed23(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkComplianceStandard(o[0]); + checkComplianceStandard(o[1]); +} + +core.int buildCounterPolicy = 0; +api.Policy buildPolicy() { + final o = api.Policy(); + buildCounterPolicy++; + if (buildCounterPolicy < 3) { + o.complianceStandards = buildUnnamed23(); + o.constraint = buildConstraint(); + o.description = 'foo'; + o.policyId = 'foo'; + } + buildCounterPolicy--; + return o; +} + +void checkPolicy(api.Policy o) { + buildCounterPolicy++; + if (buildCounterPolicy < 3) { + checkUnnamed23(o.complianceStandards!); + checkConstraint(o.constraint!); + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + unittest.expect( + o.policyId!, + unittest.equals('foo'), + ); + } + buildCounterPolicy--; +} + +core.List buildUnnamed24() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed24(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterPolicyDetails = 0; +api.PolicyDetails buildPolicyDetails() { + final o = api.PolicyDetails(); + buildCounterPolicyDetails++; + if (buildCounterPolicyDetails < 3) { + o.complianceStandards = buildUnnamed24(); + o.constraint = 'foo'; + o.constraintType = 'foo'; + o.description = 'foo'; + } + buildCounterPolicyDetails--; + return o; +} + +void checkPolicyDetails(api.PolicyDetails o) { + buildCounterPolicyDetails++; + if (buildCounterPolicyDetails < 3) { + checkUnnamed24(o.complianceStandards!); + unittest.expect( + o.constraint!, + unittest.equals('foo'), + ); + unittest.expect( + o.constraintType!, + unittest.equals('foo'), + ); + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + } + buildCounterPolicyDetails--; +} + +core.List buildUnnamed25() => [ + buildPolicy(), + buildPolicy(), + ]; + +void checkUnnamed25(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkPolicy(o[0]); + checkPolicy(o[1]); +} + +core.int buildCounterPolicySet = 0; +api.PolicySet buildPolicySet() { + final o = api.PolicySet(); + buildCounterPolicySet++; + if (buildCounterPolicySet < 3) { + o.description = 'foo'; + o.policies = buildUnnamed25(); + o.policySetId = 'foo'; + } + buildCounterPolicySet--; + return o; +} + +void checkPolicySet(api.PolicySet o) { + buildCounterPolicySet++; + if (buildCounterPolicySet < 3) { + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + checkUnnamed25(o.policies!); + unittest.expect( + o.policySetId!, + unittest.equals('foo'), + ); + } + buildCounterPolicySet--; +} + +core.Map buildUnnamed26() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed26(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.List buildUnnamed27() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed27(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.List buildUnnamed28() => [ + buildPolicySet(), + buildPolicySet(), + ]; + +void checkUnnamed28(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkPolicySet(o[0]); + checkPolicySet(o[1]); +} + +core.int buildCounterPosture = 0; +api.Posture buildPosture() { + final o = api.Posture(); + buildCounterPosture++; + if (buildCounterPosture < 3) { + o.annotations = buildUnnamed26(); + o.categories = buildUnnamed27(); + o.createTime = 'foo'; + o.description = 'foo'; + o.etag = 'foo'; + o.name = 'foo'; + o.policySets = buildUnnamed28(); + o.reconciling = true; + o.revisionId = 'foo'; + o.state = 'foo'; + o.updateTime = 'foo'; + } + buildCounterPosture--; + return o; +} + +void checkPosture(api.Posture o) { + buildCounterPosture++; + if (buildCounterPosture < 3) { + checkUnnamed26(o.annotations!); + checkUnnamed27(o.categories!); + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + unittest.expect( + o.etag!, + unittest.equals('foo'), + ); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + checkUnnamed28(o.policySets!); + unittest.expect(o.reconciling!, unittest.isTrue); + unittest.expect( + o.revisionId!, + unittest.equals('foo'), + ); + unittest.expect( + o.state!, + unittest.equals('foo'), + ); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), + ); + } + buildCounterPosture--; +} + +core.Map buildUnnamed29() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed29(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.List buildUnnamed30() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed30(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterPostureDeployment = 0; +api.PostureDeployment buildPostureDeployment() { + final o = api.PostureDeployment(); + buildCounterPostureDeployment++; + if (buildCounterPostureDeployment < 3) { + o.annotations = buildUnnamed29(); + o.categories = buildUnnamed30(); + o.createTime = 'foo'; + o.description = 'foo'; + o.desiredPostureId = 'foo'; + o.desiredPostureRevisionId = 'foo'; + o.etag = 'foo'; + o.failureMessage = 'foo'; + o.name = 'foo'; + o.postureId = 'foo'; + o.postureRevisionId = 'foo'; + o.reconciling = true; + o.state = 'foo'; + o.targetResource = 'foo'; + o.updateTime = 'foo'; + } + buildCounterPostureDeployment--; + return o; +} + +void checkPostureDeployment(api.PostureDeployment o) { + buildCounterPostureDeployment++; + if (buildCounterPostureDeployment < 3) { + checkUnnamed29(o.annotations!); + checkUnnamed30(o.categories!); + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + unittest.expect( + o.desiredPostureId!, + unittest.equals('foo'), + ); + unittest.expect( + o.desiredPostureRevisionId!, + unittest.equals('foo'), + ); + unittest.expect( + o.etag!, + unittest.equals('foo'), + ); + unittest.expect( + o.failureMessage!, + unittest.equals('foo'), + ); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.postureId!, + unittest.equals('foo'), + ); + unittest.expect( + o.postureRevisionId!, + unittest.equals('foo'), + ); + unittest.expect(o.reconciling!, unittest.isTrue); + unittest.expect( + o.state!, + unittest.equals('foo'), + ); + unittest.expect( + o.targetResource!, + unittest.equals('foo'), + ); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), + ); + } + buildCounterPostureDeployment--; +} + +core.int buildCounterPostureDetails = 0; +api.PostureDetails buildPostureDetails() { + final o = api.PostureDetails(); + buildCounterPostureDetails++; + if (buildCounterPostureDetails < 3) { + o.policySet = 'foo'; + o.posture = 'foo'; + o.postureDeployment = 'foo'; + o.postureDeploymentTargetResource = 'foo'; + o.postureRevisionId = 'foo'; + } + buildCounterPostureDetails--; + return o; +} + +void checkPostureDetails(api.PostureDetails o) { + buildCounterPostureDetails++; + if (buildCounterPostureDetails < 3) { + unittest.expect( + o.policySet!, + unittest.equals('foo'), + ); + unittest.expect( + o.posture!, + unittest.equals('foo'), + ); + unittest.expect( + o.postureDeployment!, + unittest.equals('foo'), + ); + unittest.expect( + o.postureDeploymentTargetResource!, + unittest.equals('foo'), + ); + unittest.expect( + o.postureRevisionId!, + unittest.equals('foo'), + ); + } + buildCounterPostureDetails--; +} + +core.List buildUnnamed31() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed31(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.List buildUnnamed32() => [ + buildPolicySet(), + buildPolicySet(), + ]; + +void checkUnnamed32(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkPolicySet(o[0]); + checkPolicySet(o[1]); +} + +core.int buildCounterPostureTemplate = 0; +api.PostureTemplate buildPostureTemplate() { + final o = api.PostureTemplate(); + buildCounterPostureTemplate++; + if (buildCounterPostureTemplate < 3) { + o.categories = buildUnnamed31(); + o.description = 'foo'; + o.name = 'foo'; + o.policySets = buildUnnamed32(); + o.revisionId = 'foo'; + o.state = 'foo'; + } + buildCounterPostureTemplate--; + return o; +} + +void checkPostureTemplate(api.PostureTemplate o) { + buildCounterPostureTemplate++; + if (buildCounterPostureTemplate < 3) { + checkUnnamed31(o.categories!); + unittest.expect( + o.description!, + unittest.equals('foo'), + ); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + checkUnnamed32(o.policySets!); + unittest.expect( + o.revisionId!, + unittest.equals('foo'), + ); + unittest.expect( + o.state!, + unittest.equals('foo'), + ); + } + buildCounterPostureTemplate--; +} + +core.int buildCounterProperty = 0; +api.Property buildProperty() { + final o = api.Property(); + buildCounterProperty++; + if (buildCounterProperty < 3) { + o.name = 'foo'; + o.valueExpression = buildExpr(); + } + buildCounterProperty--; + return o; +} + +void checkProperty(api.Property o) { + buildCounterProperty++; + if (buildCounterProperty < 3) { + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + checkExpr(o.valueExpression!); + } + buildCounterProperty--; +} + +core.int buildCounterReport = 0; +api.Report buildReport() { + final o = api.Report(); + buildCounterReport++; + if (buildCounterReport < 3) { + o.createTime = 'foo'; + o.iacValidationReport = buildIaCValidationReport(); + o.name = 'foo'; + o.updateTime = 'foo'; + } + buildCounterReport--; + return o; +} + +void checkReport(api.Report o) { + buildCounterReport++; + if (buildCounterReport < 3) { + unittest.expect( + o.createTime!, + unittest.equals('foo'), + ); + checkIaCValidationReport(o.iacValidationReport!); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.updateTime!, + unittest.equals('foo'), + ); + } + buildCounterReport--; +} + +core.List buildUnnamed33() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed33(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterResourceSelector = 0; +api.ResourceSelector buildResourceSelector() { + final o = api.ResourceSelector(); + buildCounterResourceSelector++; + if (buildCounterResourceSelector < 3) { + o.resourceTypes = buildUnnamed33(); + } + buildCounterResourceSelector--; + return o; +} + +void checkResourceSelector(api.ResourceSelector o) { + buildCounterResourceSelector++; + if (buildCounterResourceSelector < 3) { + checkUnnamed33(o.resourceTypes!); + } + buildCounterResourceSelector--; +} + +core.List buildUnnamed34() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed34(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.int buildCounterResourceTypes = 0; +api.ResourceTypes buildResourceTypes() { + final o = api.ResourceTypes(); + buildCounterResourceTypes++; + if (buildCounterResourceTypes < 3) { + o.included = buildUnnamed34(); + } + buildCounterResourceTypes--; + return o; +} + +void checkResourceTypes(api.ResourceTypes o) { + buildCounterResourceTypes++; + if (buildCounterResourceTypes < 3) { + checkUnnamed34(o.included!); + } + buildCounterResourceTypes--; +} + +core.int buildCounterSecurityHealthAnalyticsCustomModule = 0; +api.SecurityHealthAnalyticsCustomModule + buildSecurityHealthAnalyticsCustomModule() { + final o = api.SecurityHealthAnalyticsCustomModule(); + buildCounterSecurityHealthAnalyticsCustomModule++; + if (buildCounterSecurityHealthAnalyticsCustomModule < 3) { + o.config = buildCustomConfig(); + o.displayName = 'foo'; + o.id = 'foo'; + o.moduleEnablementState = 'foo'; + } + buildCounterSecurityHealthAnalyticsCustomModule--; + return o; +} + +void checkSecurityHealthAnalyticsCustomModule( + api.SecurityHealthAnalyticsCustomModule o) { + buildCounterSecurityHealthAnalyticsCustomModule++; + if (buildCounterSecurityHealthAnalyticsCustomModule < 3) { + checkCustomConfig(o.config!); + unittest.expect( + o.displayName!, + unittest.equals('foo'), + ); + unittest.expect( + o.id!, + unittest.equals('foo'), + ); + unittest.expect( + o.moduleEnablementState!, + unittest.equals('foo'), + ); + } + buildCounterSecurityHealthAnalyticsCustomModule--; +} + +core.int buildCounterSecurityHealthAnalyticsModule = 0; +api.SecurityHealthAnalyticsModule buildSecurityHealthAnalyticsModule() { + final o = api.SecurityHealthAnalyticsModule(); + buildCounterSecurityHealthAnalyticsModule++; + if (buildCounterSecurityHealthAnalyticsModule < 3) { + o.moduleEnablementState = 'foo'; + o.moduleName = 'foo'; + } + buildCounterSecurityHealthAnalyticsModule--; + return o; +} + +void checkSecurityHealthAnalyticsModule(api.SecurityHealthAnalyticsModule o) { + buildCounterSecurityHealthAnalyticsModule++; + if (buildCounterSecurityHealthAnalyticsModule < 3) { + unittest.expect( + o.moduleEnablementState!, + unittest.equals('foo'), + ); + unittest.expect( + o.moduleName!, + unittest.equals('foo'), + ); + } + buildCounterSecurityHealthAnalyticsModule--; +} + +core.Map buildUnnamed35() => { + 'x': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + 'y': { + 'list': [1, 2, 3], + 'bool': true, + 'string': 'foo' + }, + }; + +void checkUnnamed35(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + var casted9 = (o['x']!) as core.Map; + unittest.expect(casted9, unittest.hasLength(3)); + unittest.expect( + casted9['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted9['bool'], + unittest.equals(true), + ); + unittest.expect( + casted9['string'], + unittest.equals('foo'), + ); + var casted10 = (o['y']!) as core.Map; + unittest.expect(casted10, unittest.hasLength(3)); + unittest.expect( + casted10['list'], + unittest.equals([1, 2, 3]), + ); + unittest.expect( + casted10['bool'], + unittest.equals(true), + ); + unittest.expect( + casted10['string'], + unittest.equals('foo'), + ); +} + +core.List> buildUnnamed36() => [ + buildUnnamed35(), + buildUnnamed35(), + ]; + +void checkUnnamed36(core.List> o) { + unittest.expect(o, unittest.hasLength(2)); + checkUnnamed35(o[0]); + checkUnnamed35(o[1]); +} + +core.int buildCounterStatus = 0; +api.Status buildStatus() { + final o = api.Status(); + buildCounterStatus++; + if (buildCounterStatus < 3) { + o.code = 42; + o.details = buildUnnamed36(); + o.message = 'foo'; + } + buildCounterStatus--; + return o; +} + +void checkStatus(api.Status o) { + buildCounterStatus++; + if (buildCounterStatus < 3) { + unittest.expect( + o.code!, + unittest.equals(42), + ); + checkUnnamed36(o.details!); + unittest.expect( + o.message!, + unittest.equals('foo'), + ); + } + buildCounterStatus--; +} + +core.int buildCounterViolation = 0; +api.Violation buildViolation() { + final o = api.Violation(); + buildCounterViolation++; + if (buildCounterViolation < 3) { + o.assetId = 'foo'; + o.nextSteps = 'foo'; + o.policyId = 'foo'; + o.severity = 'foo'; + o.violatedAsset = buildAssetDetails(); + o.violatedPolicy = buildPolicyDetails(); + o.violatedPosture = buildPostureDetails(); + } + buildCounterViolation--; + return o; +} + +void checkViolation(api.Violation o) { + buildCounterViolation++; + if (buildCounterViolation < 3) { + unittest.expect( + o.assetId!, + unittest.equals('foo'), + ); + unittest.expect( + o.nextSteps!, + unittest.equals('foo'), + ); + unittest.expect( + o.policyId!, + unittest.equals('foo'), + ); + unittest.expect( + o.severity!, + unittest.equals('foo'), + ); + checkAssetDetails(o.violatedAsset!); + checkPolicyDetails(o.violatedPolicy!); + checkPostureDetails(o.violatedPosture!); + } + buildCounterViolation--; +} + +void main() { + unittest.group('obj-schema-AssetDetails', () { + unittest.test('to-json--from-json', () async { + final o = buildAssetDetails(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.AssetDetails.fromJson( + oJson as core.Map); + checkAssetDetails(od); + }); + }); + + unittest.group('obj-schema-CancelOperationRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildCancelOperationRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.CancelOperationRequest.fromJson( + oJson as core.Map); + checkCancelOperationRequest(od); + }); + }); + + unittest.group('obj-schema-ComplianceStandard', () { + unittest.test('to-json--from-json', () async { + final o = buildComplianceStandard(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ComplianceStandard.fromJson( + oJson as core.Map); + checkComplianceStandard(od); + }); + }); + + unittest.group('obj-schema-Constraint', () { + unittest.test('to-json--from-json', () async { + final o = buildConstraint(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Constraint.fromJson(oJson as core.Map); + checkConstraint(od); + }); + }); + + unittest.group('obj-schema-CreateIaCValidationReportRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildCreateIaCValidationReportRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.CreateIaCValidationReportRequest.fromJson( + oJson as core.Map); + checkCreateIaCValidationReportRequest(od); + }); + }); + + unittest.group('obj-schema-CustomConfig', () { + unittest.test('to-json--from-json', () async { + final o = buildCustomConfig(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.CustomConfig.fromJson( + oJson as core.Map); + checkCustomConfig(od); + }); + }); + + unittest.group('obj-schema-CustomOutputSpec', () { + unittest.test('to-json--from-json', () async { + final o = buildCustomOutputSpec(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.CustomOutputSpec.fromJson( + oJson as core.Map); + checkCustomOutputSpec(od); + }); + }); + + unittest.group('obj-schema-Empty', () { + unittest.test('to-json--from-json', () async { + final o = buildEmpty(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Empty.fromJson(oJson as core.Map); + checkEmpty(od); + }); + }); + + unittest.group('obj-schema-Expr', () { + unittest.test('to-json--from-json', () async { + final o = buildExpr(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Expr.fromJson(oJson as core.Map); + checkExpr(od); + }); + }); + + unittest.group('obj-schema-ExtractPostureRequest', () { + unittest.test('to-json--from-json', () async { + final o = buildExtractPostureRequest(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ExtractPostureRequest.fromJson( + oJson as core.Map); + checkExtractPostureRequest(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudSecuritypostureV1CustomConstraint', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudSecuritypostureV1CustomConstraint(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudSecuritypostureV1CustomConstraint.fromJson( + oJson as core.Map); + checkGoogleCloudSecuritypostureV1CustomConstraint(od); + }); + }); + + unittest.group('obj-schema-GoogleCloudSecuritypostureV1PolicyRule', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudSecuritypostureV1PolicyRule(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GoogleCloudSecuritypostureV1PolicyRule.fromJson( + oJson as core.Map); + checkGoogleCloudSecuritypostureV1PolicyRule(od); + }); + }); + + unittest.group( + 'obj-schema-GoogleCloudSecuritypostureV1PolicyRuleStringValues', () { + unittest.test('to-json--from-json', () async { + final o = buildGoogleCloudSecuritypostureV1PolicyRuleStringValues(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GoogleCloudSecuritypostureV1PolicyRuleStringValues.fromJson( + oJson as core.Map); + checkGoogleCloudSecuritypostureV1PolicyRuleStringValues(od); + }); + }); + + unittest.group('obj-schema-IaC', () { + unittest.test('to-json--from-json', () async { + final o = buildIaC(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.IaC.fromJson(oJson as core.Map); + checkIaC(od); + }); + }); + + unittest.group('obj-schema-IaCValidationReport', () { + unittest.test('to-json--from-json', () async { + final o = buildIaCValidationReport(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.IaCValidationReport.fromJson( + oJson as core.Map); + checkIaCValidationReport(od); + }); + }); + + unittest.group('obj-schema-ListLocationsResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListLocationsResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListLocationsResponse.fromJson( + oJson as core.Map); + checkListLocationsResponse(od); + }); + }); + + unittest.group('obj-schema-ListOperationsResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListOperationsResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListOperationsResponse.fromJson( + oJson as core.Map); + checkListOperationsResponse(od); + }); + }); + + unittest.group('obj-schema-ListPostureDeploymentsResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListPostureDeploymentsResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListPostureDeploymentsResponse.fromJson( + oJson as core.Map); + checkListPostureDeploymentsResponse(od); + }); + }); + + unittest.group('obj-schema-ListPostureRevisionsResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListPostureRevisionsResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListPostureRevisionsResponse.fromJson( + oJson as core.Map); + checkListPostureRevisionsResponse(od); + }); + }); + + unittest.group('obj-schema-ListPostureTemplatesResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListPostureTemplatesResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListPostureTemplatesResponse.fromJson( + oJson as core.Map); + checkListPostureTemplatesResponse(od); + }); + }); + + unittest.group('obj-schema-ListPosturesResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListPosturesResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListPosturesResponse.fromJson( + oJson as core.Map); + checkListPosturesResponse(od); + }); + }); + + unittest.group('obj-schema-ListReportsResponse', () { + unittest.test('to-json--from-json', () async { + final o = buildListReportsResponse(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ListReportsResponse.fromJson( + oJson as core.Map); + checkListReportsResponse(od); + }); + }); + + unittest.group('obj-schema-Location', () { + unittest.test('to-json--from-json', () async { + final o = buildLocation(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Location.fromJson(oJson as core.Map); + checkLocation(od); + }); + }); + + unittest.group('obj-schema-Operation', () { + unittest.test('to-json--from-json', () async { + final o = buildOperation(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Operation.fromJson(oJson as core.Map); + checkOperation(od); + }); + }); + + unittest.group('obj-schema-OrgPolicyConstraint', () { + unittest.test('to-json--from-json', () async { + final o = buildOrgPolicyConstraint(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.OrgPolicyConstraint.fromJson( + oJson as core.Map); + checkOrgPolicyConstraint(od); + }); + }); + + unittest.group('obj-schema-OrgPolicyConstraintCustom', () { + unittest.test('to-json--from-json', () async { + final o = buildOrgPolicyConstraintCustom(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.OrgPolicyConstraintCustom.fromJson( + oJson as core.Map); + checkOrgPolicyConstraintCustom(od); + }); + }); + + unittest.group('obj-schema-Policy', () { + unittest.test('to-json--from-json', () async { + final o = buildPolicy(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Policy.fromJson(oJson as core.Map); + checkPolicy(od); + }); + }); + + unittest.group('obj-schema-PolicyDetails', () { + unittest.test('to-json--from-json', () async { + final o = buildPolicyDetails(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.PolicyDetails.fromJson( + oJson as core.Map); + checkPolicyDetails(od); + }); + }); + + unittest.group('obj-schema-PolicySet', () { + unittest.test('to-json--from-json', () async { + final o = buildPolicySet(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.PolicySet.fromJson(oJson as core.Map); + checkPolicySet(od); + }); + }); + + unittest.group('obj-schema-Posture', () { + unittest.test('to-json--from-json', () async { + final o = buildPosture(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Posture.fromJson(oJson as core.Map); + checkPosture(od); + }); + }); + + unittest.group('obj-schema-PostureDeployment', () { + unittest.test('to-json--from-json', () async { + final o = buildPostureDeployment(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.PostureDeployment.fromJson( + oJson as core.Map); + checkPostureDeployment(od); + }); + }); + + unittest.group('obj-schema-PostureDetails', () { + unittest.test('to-json--from-json', () async { + final o = buildPostureDetails(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.PostureDetails.fromJson( + oJson as core.Map); + checkPostureDetails(od); + }); + }); + + unittest.group('obj-schema-PostureTemplate', () { + unittest.test('to-json--from-json', () async { + final o = buildPostureTemplate(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.PostureTemplate.fromJson( + oJson as core.Map); + checkPostureTemplate(od); + }); + }); + + unittest.group('obj-schema-Property', () { + unittest.test('to-json--from-json', () async { + final o = buildProperty(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Property.fromJson(oJson as core.Map); + checkProperty(od); + }); + }); + + unittest.group('obj-schema-Report', () { + unittest.test('to-json--from-json', () async { + final o = buildReport(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Report.fromJson(oJson as core.Map); + checkReport(od); + }); + }); + + unittest.group('obj-schema-ResourceSelector', () { + unittest.test('to-json--from-json', () async { + final o = buildResourceSelector(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ResourceSelector.fromJson( + oJson as core.Map); + checkResourceSelector(od); + }); + }); + + unittest.group('obj-schema-ResourceTypes', () { + unittest.test('to-json--from-json', () async { + final o = buildResourceTypes(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ResourceTypes.fromJson( + oJson as core.Map); + checkResourceTypes(od); + }); + }); + + unittest.group('obj-schema-SecurityHealthAnalyticsCustomModule', () { + unittest.test('to-json--from-json', () async { + final o = buildSecurityHealthAnalyticsCustomModule(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.SecurityHealthAnalyticsCustomModule.fromJson( + oJson as core.Map); + checkSecurityHealthAnalyticsCustomModule(od); + }); + }); + + unittest.group('obj-schema-SecurityHealthAnalyticsModule', () { + unittest.test('to-json--from-json', () async { + final o = buildSecurityHealthAnalyticsModule(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.SecurityHealthAnalyticsModule.fromJson( + oJson as core.Map); + checkSecurityHealthAnalyticsModule(od); + }); + }); + + unittest.group('obj-schema-Status', () { + unittest.test('to-json--from-json', () async { + final o = buildStatus(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Status.fromJson(oJson as core.Map); + checkStatus(od); + }); + }); + + unittest.group('obj-schema-Violation', () { + unittest.test('to-json--from-json', () async { + final o = buildViolation(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Violation.fromJson(oJson as core.Map); + checkViolation(od); + }); + }); + + unittest.group('resource-OrganizationsLocationsOperationsResource', () { + unittest.test('method--cancel', () async { + final mock = HttpServerMock(); + final res = + api.SecurityPostureApi(mock).organizations.locations.operations; + final arg_request = buildCancelOperationRequest(); + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.CancelOperationRequest.fromJson( + json as core.Map); + checkCancelOperationRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildEmpty()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.cancel(arg_request, arg_name, $fields: arg_$fields); + checkEmpty(response as api.Empty); + }); + + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = + api.SecurityPostureApi(mock).organizations.locations.operations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildEmpty()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.delete(arg_name, $fields: arg_$fields); + checkEmpty(response as api.Empty); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = + api.SecurityPostureApi(mock).organizations.locations.operations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = + api.SecurityPostureApi(mock).organizations.locations.operations; + final arg_name = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildListOperationsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_name, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListOperationsResponse(response as api.ListOperationsResponse); + }); + }); + + unittest.group('resource-OrganizationsLocationsPostureDeploymentsResource', + () { + unittest.test('method--create', () async { + final mock = HttpServerMock(); + final res = api.SecurityPostureApi(mock) + .organizations + .locations + .postureDeployments; + final arg_request = buildPostureDeployment(); + final arg_parent = 'foo'; + final arg_postureDeploymentId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.PostureDeployment.fromJson( + json as core.Map); + checkPostureDeployment(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['postureDeploymentId']!.first, + unittest.equals(arg_postureDeploymentId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.create(arg_request, arg_parent, + postureDeploymentId: arg_postureDeploymentId, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = api.SecurityPostureApi(mock) + .organizations + .locations + .postureDeployments; + final arg_name = 'foo'; + final arg_etag = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['etag']!.first, + unittest.equals(arg_etag), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.delete(arg_name, etag: arg_etag, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.SecurityPostureApi(mock) + .organizations + .locations + .postureDeployments; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildPostureDeployment()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkPostureDeployment(response as api.PostureDeployment); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.SecurityPostureApi(mock) + .organizations + .locations + .postureDeployments; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildListPostureDeploymentsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_parent, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListPostureDeploymentsResponse( + response as api.ListPostureDeploymentsResponse); + }); + + unittest.test('method--patch', () async { + final mock = HttpServerMock(); + final res = api.SecurityPostureApi(mock) + .organizations + .locations + .postureDeployments; + final arg_request = buildPostureDeployment(); + final arg_name = 'foo'; + final arg_updateMask = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.PostureDeployment.fromJson( + json as core.Map); + checkPostureDeployment(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.patch(arg_request, arg_name, + updateMask: arg_updateMask, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + }); + + unittest.group('resource-OrganizationsLocationsPostureTemplatesResource', () { + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = + api.SecurityPostureApi(mock).organizations.locations.postureTemplates; + final arg_name = 'foo'; + final arg_revisionId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['revisionId']!.first, + unittest.equals(arg_revisionId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildPostureTemplate()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, + revisionId: arg_revisionId, $fields: arg_$fields); + checkPostureTemplate(response as api.PostureTemplate); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = + api.SecurityPostureApi(mock).organizations.locations.postureTemplates; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildListPostureTemplatesResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_parent, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListPostureTemplatesResponse( + response as api.ListPostureTemplatesResponse); + }); + }); + + unittest.group('resource-OrganizationsLocationsPosturesResource', () { + unittest.test('method--create', () async { + final mock = HttpServerMock(); + final res = api.SecurityPostureApi(mock).organizations.locations.postures; + final arg_request = buildPosture(); + final arg_parent = 'foo'; + final arg_postureId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.Posture.fromJson(json as core.Map); + checkPosture(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['postureId']!.first, + unittest.equals(arg_postureId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.create(arg_request, arg_parent, + postureId: arg_postureId, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--delete', () async { + final mock = HttpServerMock(); + final res = api.SecurityPostureApi(mock).organizations.locations.postures; + final arg_name = 'foo'; + final arg_etag = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['etag']!.first, + unittest.equals(arg_etag), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.delete(arg_name, etag: arg_etag, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--extract', () async { + final mock = HttpServerMock(); + final res = api.SecurityPostureApi(mock).organizations.locations.postures; + final arg_request = buildExtractPostureRequest(); + final arg_parent = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.ExtractPostureRequest.fromJson( + json as core.Map); + checkExtractPostureRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.extract(arg_request, arg_parent, $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.SecurityPostureApi(mock).organizations.locations.postures; + final arg_name = 'foo'; + final arg_revisionId = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['revisionId']!.first, + unittest.equals(arg_revisionId), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildPosture()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, + revisionId: arg_revisionId, $fields: arg_$fields); + checkPosture(response as api.Posture); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.SecurityPostureApi(mock).organizations.locations.postures; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildListPosturesResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_parent, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListPosturesResponse(response as api.ListPosturesResponse); + }); + + unittest.test('method--listRevisions', () async { + final mock = HttpServerMock(); + final res = api.SecurityPostureApi(mock).organizations.locations.postures; + final arg_name = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildListPostureRevisionsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.listRevisions(arg_name, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListPostureRevisionsResponse( + response as api.ListPostureRevisionsResponse); + }); + + unittest.test('method--patch', () async { + final mock = HttpServerMock(); + final res = api.SecurityPostureApi(mock).organizations.locations.postures; + final arg_request = buildPosture(); + final arg_name = 'foo'; + final arg_revisionId = 'foo'; + final arg_updateMask = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = + api.Posture.fromJson(json as core.Map); + checkPosture(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['revisionId']!.first, + unittest.equals(arg_revisionId), + ); + unittest.expect( + queryMap['updateMask']!.first, + unittest.equals(arg_updateMask), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.patch(arg_request, arg_name, + revisionId: arg_revisionId, + updateMask: arg_updateMask, + $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + }); + + unittest.group('resource-OrganizationsLocationsReportsResource', () { + unittest.test('method--createIaCValidationReport', () async { + final mock = HttpServerMock(); + final res = api.SecurityPostureApi(mock).organizations.locations.reports; + final arg_request = buildCreateIaCValidationReportRequest(); + final arg_parent = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.CreateIaCValidationReportRequest.fromJson( + json as core.Map); + checkCreateIaCValidationReportRequest(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildOperation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.createIaCValidationReport( + arg_request, arg_parent, + $fields: arg_$fields); + checkOperation(response as api.Operation); + }); + + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.SecurityPostureApi(mock).organizations.locations.reports; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildReport()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkReport(response as api.Report); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.SecurityPostureApi(mock).organizations.locations.reports; + final arg_parent = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildListReportsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_parent, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListReportsResponse(response as api.ListReportsResponse); + }); + }); + + unittest.group('resource-ProjectsLocationsResource', () { + unittest.test('method--get', () async { + final mock = HttpServerMock(); + final res = api.SecurityPostureApi(mock).projects.locations; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildLocation()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.get(arg_name, $fields: arg_$fields); + checkLocation(response as api.Location); + }); + + unittest.test('method--list', () async { + final mock = HttpServerMock(); + final res = api.SecurityPostureApi(mock).projects.locations; + final arg_name = 'foo'; + final arg_filter = 'foo'; + final arg_pageSize = 42; + final arg_pageToken = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('v1/'), + ); + pathOffset += 3; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['filter']!.first, + unittest.equals(arg_filter), + ); + unittest.expect( + core.int.parse(queryMap['pageSize']!.first), + unittest.equals(arg_pageSize), + ); + unittest.expect( + queryMap['pageToken']!.first, + unittest.equals(arg_pageToken), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildListLocationsResponse()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.list(arg_name, + filter: arg_filter, + pageSize: arg_pageSize, + pageToken: arg_pageToken, + $fields: arg_$fields); + checkListLocationsResponse(response as api.ListLocationsResponse); + }); + }); +} diff --git a/generated/googleapis/test/servicemanagement/v1_test.dart b/generated/googleapis/test/servicemanagement/v1_test.dart index 28cc439e0..2311e317f 100644 --- a/generated/googleapis/test/servicemanagement/v1_test.dart +++ b/generated/googleapis/test/servicemanagement/v1_test.dart @@ -1179,6 +1179,7 @@ api.Documentation buildDocumentation() { final o = api.Documentation(); buildCounterDocumentation++; if (buildCounterDocumentation < 3) { + o.additionalIamInfo = 'foo'; o.documentationRootUrl = 'foo'; o.overview = 'foo'; o.pages = buildUnnamed26(); @@ -1194,6 +1195,10 @@ api.Documentation buildDocumentation() { void checkDocumentation(api.Documentation o) { buildCounterDocumentation++; if (buildCounterDocumentation < 3) { + unittest.expect( + o.additionalIamInfo!, + unittest.equals('foo'), + ); unittest.expect( o.documentationRootUrl!, unittest.equals('foo'), @@ -1521,6 +1526,7 @@ api.ExperimentalFeatures buildExperimentalFeatures() { final o = api.ExperimentalFeatures(); buildCounterExperimentalFeatures++; if (buildCounterExperimentalFeatures < 3) { + o.protobufPythonicTypesEnabled = true; o.restAsyncIoEnabled = true; } buildCounterExperimentalFeatures--; @@ -1530,6 +1536,7 @@ api.ExperimentalFeatures buildExperimentalFeatures() { void checkExperimentalFeatures(api.ExperimentalFeatures o) { buildCounterExperimentalFeatures++; if (buildCounterExperimentalFeatures < 3) { + unittest.expect(o.protobufPythonicTypesEnabled!, unittest.isTrue); unittest.expect(o.restAsyncIoEnabled!, unittest.isTrue); } buildCounterExperimentalFeatures--; @@ -1881,12 +1888,30 @@ void checkGetPolicyOptions(api.GetPolicyOptions o) { buildCounterGetPolicyOptions--; } +core.Map buildUnnamed43() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed43(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + core.int buildCounterGoSettings = 0; api.GoSettings buildGoSettings() { final o = api.GoSettings(); buildCounterGoSettings++; if (buildCounterGoSettings < 3) { o.common = buildCommonLanguageSettings(); + o.renamedServices = buildUnnamed43(); } buildCounterGoSettings--; return o; @@ -1896,16 +1921,17 @@ void checkGoSettings(api.GoSettings o) { buildCounterGoSettings++; if (buildCounterGoSettings < 3) { checkCommonLanguageSettings(o.common!); + checkUnnamed43(o.renamedServices!); } buildCounterGoSettings--; } -core.List buildUnnamed43() => [ +core.List buildUnnamed44() => [ buildHttpRule(), buildHttpRule(), ]; -void checkUnnamed43(core.List o) { +void checkUnnamed44(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHttpRule(o[0]); checkHttpRule(o[1]); @@ -1917,7 +1943,7 @@ api.Http buildHttp() { buildCounterHttp++; if (buildCounterHttp < 3) { o.fullyDecodeReservedExpansion = true; - o.rules = buildUnnamed43(); + o.rules = buildUnnamed44(); } buildCounterHttp--; return o; @@ -1927,17 +1953,17 @@ void checkHttp(api.Http o) { buildCounterHttp++; if (buildCounterHttp < 3) { unittest.expect(o.fullyDecodeReservedExpansion!, unittest.isTrue); - checkUnnamed43(o.rules!); + checkUnnamed44(o.rules!); } buildCounterHttp--; } -core.List buildUnnamed44() => [ +core.List buildUnnamed45() => [ buildHttpRule(), buildHttpRule(), ]; -void checkUnnamed44(core.List o) { +void checkUnnamed45(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkHttpRule(o[0]); checkHttpRule(o[1]); @@ -1948,7 +1974,7 @@ api.HttpRule buildHttpRule() { final o = api.HttpRule(); buildCounterHttpRule++; if (buildCounterHttpRule < 3) { - o.additionalBindings = buildUnnamed44(); + o.additionalBindings = buildUnnamed45(); o.body = 'foo'; o.custom = buildCustomHttpPattern(); o.delete = 'foo'; @@ -1966,7 +1992,7 @@ api.HttpRule buildHttpRule() { void checkHttpRule(api.HttpRule o) { buildCounterHttpRule++; if (buildCounterHttpRule < 3) { - checkUnnamed44(o.additionalBindings!); + checkUnnamed45(o.additionalBindings!); unittest.expect( o.body!, unittest.equals('foo'), @@ -2004,12 +2030,12 @@ void checkHttpRule(api.HttpRule o) { buildCounterHttpRule--; } -core.Map buildUnnamed45() => { +core.Map buildUnnamed46() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed45(core.Map o) { +void checkUnnamed46(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2028,7 +2054,7 @@ api.JavaSettings buildJavaSettings() { if (buildCounterJavaSettings < 3) { o.common = buildCommonLanguageSettings(); o.libraryPackage = 'foo'; - o.serviceClassNames = buildUnnamed45(); + o.serviceClassNames = buildUnnamed46(); } buildCounterJavaSettings--; return o; @@ -2042,7 +2068,7 @@ void checkJavaSettings(api.JavaSettings o) { o.libraryPackage!, unittest.equals('foo'), ); - checkUnnamed45(o.serviceClassNames!); + checkUnnamed46(o.serviceClassNames!); } buildCounterJavaSettings--; } @@ -2116,12 +2142,12 @@ void checkLabelDescriptor(api.LabelDescriptor o) { buildCounterLabelDescriptor--; } -core.List buildUnnamed46() => [ +core.List buildUnnamed47() => [ buildOperation(), buildOperation(), ]; -void checkUnnamed46(core.List o) { +void checkUnnamed47(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperation(o[0]); checkOperation(o[1]); @@ -2133,7 +2159,7 @@ api.ListOperationsResponse buildListOperationsResponse() { buildCounterListOperationsResponse++; if (buildCounterListOperationsResponse < 3) { o.nextPageToken = 'foo'; - o.operations = buildUnnamed46(); + o.operations = buildUnnamed47(); } buildCounterListOperationsResponse--; return o; @@ -2146,17 +2172,17 @@ void checkListOperationsResponse(api.ListOperationsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed46(o.operations!); + checkUnnamed47(o.operations!); } buildCounterListOperationsResponse--; } -core.List buildUnnamed47() => [ +core.List buildUnnamed48() => [ buildService(), buildService(), ]; -void checkUnnamed47(core.List o) { +void checkUnnamed48(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkService(o[0]); checkService(o[1]); @@ -2168,7 +2194,7 @@ api.ListServiceConfigsResponse buildListServiceConfigsResponse() { buildCounterListServiceConfigsResponse++; if (buildCounterListServiceConfigsResponse < 3) { o.nextPageToken = 'foo'; - o.serviceConfigs = buildUnnamed47(); + o.serviceConfigs = buildUnnamed48(); } buildCounterListServiceConfigsResponse--; return o; @@ -2181,17 +2207,17 @@ void checkListServiceConfigsResponse(api.ListServiceConfigsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed47(o.serviceConfigs!); + checkUnnamed48(o.serviceConfigs!); } buildCounterListServiceConfigsResponse--; } -core.List buildUnnamed48() => [ +core.List buildUnnamed49() => [ buildRollout(), buildRollout(), ]; -void checkUnnamed48(core.List o) { +void checkUnnamed49(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRollout(o[0]); checkRollout(o[1]); @@ -2203,7 +2229,7 @@ api.ListServiceRolloutsResponse buildListServiceRolloutsResponse() { buildCounterListServiceRolloutsResponse++; if (buildCounterListServiceRolloutsResponse < 3) { o.nextPageToken = 'foo'; - o.rollouts = buildUnnamed48(); + o.rollouts = buildUnnamed49(); } buildCounterListServiceRolloutsResponse--; return o; @@ -2216,17 +2242,17 @@ void checkListServiceRolloutsResponse(api.ListServiceRolloutsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed48(o.rollouts!); + checkUnnamed49(o.rollouts!); } buildCounterListServiceRolloutsResponse--; } -core.List buildUnnamed49() => [ +core.List buildUnnamed50() => [ buildManagedService(), buildManagedService(), ]; -void checkUnnamed49(core.List o) { +void checkUnnamed50(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkManagedService(o[0]); checkManagedService(o[1]); @@ -2238,7 +2264,7 @@ api.ListServicesResponse buildListServicesResponse() { buildCounterListServicesResponse++; if (buildCounterListServicesResponse < 3) { o.nextPageToken = 'foo'; - o.services = buildUnnamed49(); + o.services = buildUnnamed50(); } buildCounterListServicesResponse--; return o; @@ -2251,17 +2277,17 @@ void checkListServicesResponse(api.ListServicesResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed49(o.services!); + checkUnnamed50(o.services!); } buildCounterListServicesResponse--; } -core.List buildUnnamed50() => [ +core.List buildUnnamed51() => [ buildLabelDescriptor(), buildLabelDescriptor(), ]; -void checkUnnamed50(core.List o) { +void checkUnnamed51(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLabelDescriptor(o[0]); checkLabelDescriptor(o[1]); @@ -2274,7 +2300,7 @@ api.LogDescriptor buildLogDescriptor() { if (buildCounterLogDescriptor < 3) { o.description = 'foo'; o.displayName = 'foo'; - o.labels = buildUnnamed50(); + o.labels = buildUnnamed51(); o.name = 'foo'; } buildCounterLogDescriptor--; @@ -2292,7 +2318,7 @@ void checkLogDescriptor(api.LogDescriptor o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed50(o.labels!); + checkUnnamed51(o.labels!); unittest.expect( o.name!, unittest.equals('foo'), @@ -2301,23 +2327,23 @@ void checkLogDescriptor(api.LogDescriptor o) { buildCounterLogDescriptor--; } -core.List buildUnnamed51() => [ +core.List buildUnnamed52() => [ buildLoggingDestination(), buildLoggingDestination(), ]; -void checkUnnamed51(core.List o) { +void checkUnnamed52(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLoggingDestination(o[0]); checkLoggingDestination(o[1]); } -core.List buildUnnamed52() => [ +core.List buildUnnamed53() => [ buildLoggingDestination(), buildLoggingDestination(), ]; -void checkUnnamed52(core.List o) { +void checkUnnamed53(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLoggingDestination(o[0]); checkLoggingDestination(o[1]); @@ -2328,8 +2354,8 @@ api.Logging buildLogging() { final o = api.Logging(); buildCounterLogging++; if (buildCounterLogging < 3) { - o.consumerDestinations = buildUnnamed51(); - o.producerDestinations = buildUnnamed52(); + o.consumerDestinations = buildUnnamed52(); + o.producerDestinations = buildUnnamed53(); } buildCounterLogging--; return o; @@ -2338,18 +2364,18 @@ api.Logging buildLogging() { void checkLogging(api.Logging o) { buildCounterLogging++; if (buildCounterLogging < 3) { - checkUnnamed51(o.consumerDestinations!); - checkUnnamed52(o.producerDestinations!); + checkUnnamed52(o.consumerDestinations!); + checkUnnamed53(o.producerDestinations!); } buildCounterLogging--; } -core.List buildUnnamed53() => [ +core.List buildUnnamed54() => [ 'foo', 'foo', ]; -void checkUnnamed53(core.List o) { +void checkUnnamed54(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2366,7 +2392,7 @@ api.LoggingDestination buildLoggingDestination() { final o = api.LoggingDestination(); buildCounterLoggingDestination++; if (buildCounterLoggingDestination < 3) { - o.logs = buildUnnamed53(); + o.logs = buildUnnamed54(); o.monitoredResource = 'foo'; } buildCounterLoggingDestination--; @@ -2376,7 +2402,7 @@ api.LoggingDestination buildLoggingDestination() { void checkLoggingDestination(api.LoggingDestination o) { buildCounterLoggingDestination++; if (buildCounterLoggingDestination < 3) { - checkUnnamed53(o.logs!); + checkUnnamed54(o.logs!); unittest.expect( o.monitoredResource!, unittest.equals('foo'), @@ -2449,12 +2475,12 @@ void checkManagedService(api.ManagedService o) { buildCounterManagedService--; } -core.List buildUnnamed54() => [ +core.List buildUnnamed55() => [ buildOption(), buildOption(), ]; -void checkUnnamed54(core.List o) { +void checkUnnamed55(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOption(o[0]); checkOption(o[1]); @@ -2466,7 +2492,7 @@ api.Method buildMethod() { buildCounterMethod++; if (buildCounterMethod < 3) { o.name = 'foo'; - o.options = buildUnnamed54(); + o.options = buildUnnamed55(); o.requestStreaming = true; o.requestTypeUrl = 'foo'; o.responseStreaming = true; @@ -2484,7 +2510,7 @@ void checkMethod(api.Method o) { o.name!, unittest.equals('foo'), ); - checkUnnamed54(o.options!); + checkUnnamed55(o.options!); unittest.expect(o.requestStreaming!, unittest.isTrue); unittest.expect( o.requestTypeUrl!, @@ -2503,12 +2529,12 @@ void checkMethod(api.Method o) { buildCounterMethod--; } -core.List buildUnnamed55() => [ +core.List buildUnnamed56() => [ buildFieldPolicy(), buildFieldPolicy(), ]; -void checkUnnamed55(core.List o) { +void checkUnnamed56(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFieldPolicy(o[0]); checkFieldPolicy(o[1]); @@ -2519,7 +2545,7 @@ api.MethodPolicy buildMethodPolicy() { final o = api.MethodPolicy(); buildCounterMethodPolicy++; if (buildCounterMethodPolicy < 3) { - o.requestPolicies = buildUnnamed55(); + o.requestPolicies = buildUnnamed56(); o.selector = 'foo'; } buildCounterMethodPolicy--; @@ -2529,7 +2555,7 @@ api.MethodPolicy buildMethodPolicy() { void checkMethodPolicy(api.MethodPolicy o) { buildCounterMethodPolicy++; if (buildCounterMethodPolicy < 3) { - checkUnnamed55(o.requestPolicies!); + checkUnnamed56(o.requestPolicies!); unittest.expect( o.selector!, unittest.equals('foo'), @@ -2538,12 +2564,12 @@ void checkMethodPolicy(api.MethodPolicy o) { buildCounterMethodPolicy--; } -core.List buildUnnamed56() => [ +core.List buildUnnamed57() => [ 'foo', 'foo', ]; -void checkUnnamed56(core.List o) { +void checkUnnamed57(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2560,7 +2586,7 @@ api.MethodSettings buildMethodSettings() { final o = api.MethodSettings(); buildCounterMethodSettings++; if (buildCounterMethodSettings < 3) { - o.autoPopulatedFields = buildUnnamed56(); + o.autoPopulatedFields = buildUnnamed57(); o.longRunning = buildLongRunning(); o.selector = 'foo'; } @@ -2571,7 +2597,7 @@ api.MethodSettings buildMethodSettings() { void checkMethodSettings(api.MethodSettings o) { buildCounterMethodSettings++; if (buildCounterMethodSettings < 3) { - checkUnnamed56(o.autoPopulatedFields!); + checkUnnamed57(o.autoPopulatedFields!); checkLongRunning(o.longRunning!); unittest.expect( o.selector!, @@ -2581,23 +2607,23 @@ void checkMethodSettings(api.MethodSettings o) { buildCounterMethodSettings--; } -core.List buildUnnamed57() => [ +core.List buildUnnamed58() => [ buildLabelDescriptor(), buildLabelDescriptor(), ]; -void checkUnnamed57(core.List o) { +void checkUnnamed58(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLabelDescriptor(o[0]); checkLabelDescriptor(o[1]); } -core.List buildUnnamed58() => [ +core.List buildUnnamed59() => [ 'foo', 'foo', ]; -void checkUnnamed58(core.List o) { +void checkUnnamed59(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2616,11 +2642,11 @@ api.MetricDescriptor buildMetricDescriptor() { if (buildCounterMetricDescriptor < 3) { o.description = 'foo'; o.displayName = 'foo'; - o.labels = buildUnnamed57(); + o.labels = buildUnnamed58(); o.launchStage = 'foo'; o.metadata = buildMetricDescriptorMetadata(); o.metricKind = 'foo'; - o.monitoredResourceTypes = buildUnnamed58(); + o.monitoredResourceTypes = buildUnnamed59(); o.name = 'foo'; o.type = 'foo'; o.unit = 'foo'; @@ -2641,7 +2667,7 @@ void checkMetricDescriptor(api.MetricDescriptor o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed57(o.labels!); + checkUnnamed58(o.labels!); unittest.expect( o.launchStage!, unittest.equals('foo'), @@ -2651,7 +2677,7 @@ void checkMetricDescriptor(api.MetricDescriptor o) { o.metricKind!, unittest.equals('foo'), ); - checkUnnamed58(o.monitoredResourceTypes!); + checkUnnamed59(o.monitoredResourceTypes!); unittest.expect( o.name!, unittest.equals('foo'), @@ -2672,12 +2698,12 @@ void checkMetricDescriptor(api.MetricDescriptor o) { buildCounterMetricDescriptor--; } -core.List buildUnnamed59() => [ +core.List buildUnnamed60() => [ 'foo', 'foo', ]; -void checkUnnamed59(core.List o) { +void checkUnnamed60(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2697,7 +2723,7 @@ api.MetricDescriptorMetadata buildMetricDescriptorMetadata() { o.ingestDelay = 'foo'; o.launchStage = 'foo'; o.samplePeriod = 'foo'; - o.timeSeriesResourceHierarchyLevel = buildUnnamed59(); + o.timeSeriesResourceHierarchyLevel = buildUnnamed60(); } buildCounterMetricDescriptorMetadata--; return o; @@ -2718,17 +2744,17 @@ void checkMetricDescriptorMetadata(api.MetricDescriptorMetadata o) { o.samplePeriod!, unittest.equals('foo'), ); - checkUnnamed59(o.timeSeriesResourceHierarchyLevel!); + checkUnnamed60(o.timeSeriesResourceHierarchyLevel!); } buildCounterMetricDescriptorMetadata--; } -core.Map buildUnnamed60() => { +core.Map buildUnnamed61() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed60(core.Map o) { +void checkUnnamed61(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2745,7 +2771,7 @@ api.MetricRule buildMetricRule() { final o = api.MetricRule(); buildCounterMetricRule++; if (buildCounterMetricRule < 3) { - o.metricCosts = buildUnnamed60(); + o.metricCosts = buildUnnamed61(); o.selector = 'foo'; } buildCounterMetricRule--; @@ -2755,7 +2781,7 @@ api.MetricRule buildMetricRule() { void checkMetricRule(api.MetricRule o) { buildCounterMetricRule++; if (buildCounterMetricRule < 3) { - checkUnnamed60(o.metricCosts!); + checkUnnamed61(o.metricCosts!); unittest.expect( o.selector!, unittest.equals('foo'), @@ -2791,12 +2817,12 @@ void checkMixin(api.Mixin o) { buildCounterMixin--; } -core.List buildUnnamed61() => [ +core.List buildUnnamed62() => [ buildLabelDescriptor(), buildLabelDescriptor(), ]; -void checkUnnamed61(core.List o) { +void checkUnnamed62(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLabelDescriptor(o[0]); checkLabelDescriptor(o[1]); @@ -2809,7 +2835,7 @@ api.MonitoredResourceDescriptor buildMonitoredResourceDescriptor() { if (buildCounterMonitoredResourceDescriptor < 3) { o.description = 'foo'; o.displayName = 'foo'; - o.labels = buildUnnamed61(); + o.labels = buildUnnamed62(); o.launchStage = 'foo'; o.name = 'foo'; o.type = 'foo'; @@ -2829,7 +2855,7 @@ void checkMonitoredResourceDescriptor(api.MonitoredResourceDescriptor o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed61(o.labels!); + checkUnnamed62(o.labels!); unittest.expect( o.launchStage!, unittest.equals('foo'), @@ -2846,23 +2872,23 @@ void checkMonitoredResourceDescriptor(api.MonitoredResourceDescriptor o) { buildCounterMonitoredResourceDescriptor--; } -core.List buildUnnamed62() => [ +core.List buildUnnamed63() => [ buildMonitoringDestination(), buildMonitoringDestination(), ]; -void checkUnnamed62(core.List o) { +void checkUnnamed63(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMonitoringDestination(o[0]); checkMonitoringDestination(o[1]); } -core.List buildUnnamed63() => [ +core.List buildUnnamed64() => [ buildMonitoringDestination(), buildMonitoringDestination(), ]; -void checkUnnamed63(core.List o) { +void checkUnnamed64(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMonitoringDestination(o[0]); checkMonitoringDestination(o[1]); @@ -2873,8 +2899,8 @@ api.Monitoring buildMonitoring() { final o = api.Monitoring(); buildCounterMonitoring++; if (buildCounterMonitoring < 3) { - o.consumerDestinations = buildUnnamed62(); - o.producerDestinations = buildUnnamed63(); + o.consumerDestinations = buildUnnamed63(); + o.producerDestinations = buildUnnamed64(); } buildCounterMonitoring--; return o; @@ -2883,18 +2909,18 @@ api.Monitoring buildMonitoring() { void checkMonitoring(api.Monitoring o) { buildCounterMonitoring++; if (buildCounterMonitoring < 3) { - checkUnnamed62(o.consumerDestinations!); - checkUnnamed63(o.producerDestinations!); + checkUnnamed63(o.consumerDestinations!); + checkUnnamed64(o.producerDestinations!); } buildCounterMonitoring--; } -core.List buildUnnamed64() => [ +core.List buildUnnamed65() => [ 'foo', 'foo', ]; -void checkUnnamed64(core.List o) { +void checkUnnamed65(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2911,7 +2937,7 @@ api.MonitoringDestination buildMonitoringDestination() { final o = api.MonitoringDestination(); buildCounterMonitoringDestination++; if (buildCounterMonitoringDestination < 3) { - o.metrics = buildUnnamed64(); + o.metrics = buildUnnamed65(); o.monitoredResource = 'foo'; } buildCounterMonitoringDestination--; @@ -2921,7 +2947,7 @@ api.MonitoringDestination buildMonitoringDestination() { void checkMonitoringDestination(api.MonitoringDestination o) { buildCounterMonitoringDestination++; if (buildCounterMonitoringDestination < 3) { - checkUnnamed64(o.metrics!); + checkUnnamed65(o.metrics!); unittest.expect( o.monitoredResource!, unittest.equals('foo'), @@ -2971,7 +2997,7 @@ void checkOAuthRequirements(api.OAuthRequirements o) { buildCounterOAuthRequirements--; } -core.Map buildUnnamed65() => { +core.Map buildUnnamed66() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -2984,7 +3010,7 @@ core.Map buildUnnamed65() => { }, }; -void checkUnnamed65(core.Map o) { +void checkUnnamed66(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted5 = (o['x']!) as core.Map; unittest.expect(casted5, unittest.hasLength(3)); @@ -3016,7 +3042,7 @@ void checkUnnamed65(core.Map o) { ); } -core.Map buildUnnamed66() => { +core.Map buildUnnamed67() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3029,7 +3055,7 @@ core.Map buildUnnamed66() => { }, }; -void checkUnnamed66(core.Map o) { +void checkUnnamed67(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted7 = (o['x']!) as core.Map; unittest.expect(casted7, unittest.hasLength(3)); @@ -3068,9 +3094,9 @@ api.Operation buildOperation() { if (buildCounterOperation < 3) { o.done = true; o.error = buildStatus(); - o.metadata = buildUnnamed65(); + o.metadata = buildUnnamed66(); o.name = 'foo'; - o.response = buildUnnamed66(); + o.response = buildUnnamed67(); } buildCounterOperation--; return o; @@ -3081,17 +3107,17 @@ void checkOperation(api.Operation o) { if (buildCounterOperation < 3) { unittest.expect(o.done!, unittest.isTrue); checkStatus(o.error!); - checkUnnamed65(o.metadata!); + checkUnnamed66(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed66(o.response!); + checkUnnamed67(o.response!); } buildCounterOperation--; } -core.Map buildUnnamed67() => { +core.Map buildUnnamed68() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3104,7 +3130,7 @@ core.Map buildUnnamed67() => { }, }; -void checkUnnamed67(core.Map o) { +void checkUnnamed68(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted9 = (o['x']!) as core.Map; unittest.expect(casted9, unittest.hasLength(3)); @@ -3142,7 +3168,7 @@ api.Option buildOption() { buildCounterOption++; if (buildCounterOption < 3) { o.name = 'foo'; - o.value = buildUnnamed67(); + o.value = buildUnnamed68(); } buildCounterOption--; return o; @@ -3155,17 +3181,17 @@ void checkOption(api.Option o) { o.name!, unittest.equals('foo'), ); - checkUnnamed67(o.value!); + checkUnnamed68(o.value!); } buildCounterOption--; } -core.List buildUnnamed68() => [ +core.List buildUnnamed69() => [ buildPage(), buildPage(), ]; -void checkUnnamed68(core.List o) { +void checkUnnamed69(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPage(o[0]); checkPage(o[1]); @@ -3178,7 +3204,7 @@ api.Page buildPage() { if (buildCounterPage < 3) { o.content = 'foo'; o.name = 'foo'; - o.subpages = buildUnnamed68(); + o.subpages = buildUnnamed69(); } buildCounterPage--; return o; @@ -3195,7 +3221,7 @@ void checkPage(api.Page o) { o.name!, unittest.equals('foo'), ); - checkUnnamed68(o.subpages!); + checkUnnamed69(o.subpages!); } buildCounterPage--; } @@ -3219,23 +3245,23 @@ void checkPhpSettings(api.PhpSettings o) { buildCounterPhpSettings--; } -core.List buildUnnamed69() => [ +core.List buildUnnamed70() => [ buildAuditConfig(), buildAuditConfig(), ]; -void checkUnnamed69(core.List o) { +void checkUnnamed70(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAuditConfig(o[0]); checkAuditConfig(o[1]); } -core.List buildUnnamed70() => [ +core.List buildUnnamed71() => [ buildBinding(), buildBinding(), ]; -void checkUnnamed70(core.List o) { +void checkUnnamed71(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkBinding(o[0]); checkBinding(o[1]); @@ -3246,8 +3272,8 @@ api.Policy buildPolicy() { final o = api.Policy(); buildCounterPolicy++; if (buildCounterPolicy < 3) { - o.auditConfigs = buildUnnamed69(); - o.bindings = buildUnnamed70(); + o.auditConfigs = buildUnnamed70(); + o.bindings = buildUnnamed71(); o.etag = 'foo'; o.version = 42; } @@ -3258,8 +3284,8 @@ api.Policy buildPolicy() { void checkPolicy(api.Policy o) { buildCounterPolicy++; if (buildCounterPolicy < 3) { - checkUnnamed69(o.auditConfigs!); - checkUnnamed70(o.bindings!); + checkUnnamed70(o.auditConfigs!); + checkUnnamed71(o.bindings!); unittest.expect( o.etag!, unittest.equals('foo'), @@ -3272,12 +3298,12 @@ void checkPolicy(api.Policy o) { buildCounterPolicy--; } -core.List buildUnnamed71() => [ +core.List buildUnnamed72() => [ 'foo', 'foo', ]; -void checkUnnamed71(core.List o) { +void checkUnnamed72(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3289,23 +3315,23 @@ void checkUnnamed71(core.List o) { ); } -core.List buildUnnamed72() => [ +core.List buildUnnamed73() => [ buildClientLibrarySettings(), buildClientLibrarySettings(), ]; -void checkUnnamed72(core.List o) { +void checkUnnamed73(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkClientLibrarySettings(o[0]); checkClientLibrarySettings(o[1]); } -core.List buildUnnamed73() => [ +core.List buildUnnamed74() => [ buildMethodSettings(), buildMethodSettings(), ]; -void checkUnnamed73(core.List o) { +void checkUnnamed74(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMethodSettings(o[0]); checkMethodSettings(o[1]); @@ -3317,12 +3343,12 @@ api.Publishing buildPublishing() { buildCounterPublishing++; if (buildCounterPublishing < 3) { o.apiShortName = 'foo'; - o.codeownerGithubTeams = buildUnnamed71(); + o.codeownerGithubTeams = buildUnnamed72(); o.docTagPrefix = 'foo'; o.documentationUri = 'foo'; o.githubLabel = 'foo'; - o.librarySettings = buildUnnamed72(); - o.methodSettings = buildUnnamed73(); + o.librarySettings = buildUnnamed73(); + o.methodSettings = buildUnnamed74(); o.newIssueUri = 'foo'; o.organization = 'foo'; o.protoReferenceDocumentationUri = 'foo'; @@ -3339,7 +3365,7 @@ void checkPublishing(api.Publishing o) { o.apiShortName!, unittest.equals('foo'), ); - checkUnnamed71(o.codeownerGithubTeams!); + checkUnnamed72(o.codeownerGithubTeams!); unittest.expect( o.docTagPrefix!, unittest.equals('foo'), @@ -3352,8 +3378,8 @@ void checkPublishing(api.Publishing o) { o.githubLabel!, unittest.equals('foo'), ); - checkUnnamed72(o.librarySettings!); - checkUnnamed73(o.methodSettings!); + checkUnnamed73(o.librarySettings!); + checkUnnamed74(o.methodSettings!); unittest.expect( o.newIssueUri!, unittest.equals('foo'), @@ -3395,23 +3421,23 @@ void checkPythonSettings(api.PythonSettings o) { buildCounterPythonSettings--; } -core.List buildUnnamed74() => [ +core.List buildUnnamed75() => [ buildQuotaLimit(), buildQuotaLimit(), ]; -void checkUnnamed74(core.List o) { +void checkUnnamed75(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkQuotaLimit(o[0]); checkQuotaLimit(o[1]); } -core.List buildUnnamed75() => [ +core.List buildUnnamed76() => [ buildMetricRule(), buildMetricRule(), ]; -void checkUnnamed75(core.List o) { +void checkUnnamed76(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMetricRule(o[0]); checkMetricRule(o[1]); @@ -3422,8 +3448,8 @@ api.Quota buildQuota() { final o = api.Quota(); buildCounterQuota++; if (buildCounterQuota < 3) { - o.limits = buildUnnamed74(); - o.metricRules = buildUnnamed75(); + o.limits = buildUnnamed75(); + o.metricRules = buildUnnamed76(); } buildCounterQuota--; return o; @@ -3432,18 +3458,18 @@ api.Quota buildQuota() { void checkQuota(api.Quota o) { buildCounterQuota++; if (buildCounterQuota < 3) { - checkUnnamed74(o.limits!); - checkUnnamed75(o.metricRules!); + checkUnnamed75(o.limits!); + checkUnnamed76(o.metricRules!); } buildCounterQuota--; } -core.Map buildUnnamed76() => { +core.Map buildUnnamed77() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed76(core.Map o) { +void checkUnnamed77(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3469,7 +3495,7 @@ api.QuotaLimit buildQuotaLimit() { o.metric = 'foo'; o.name = 'foo'; o.unit = 'foo'; - o.values = buildUnnamed76(); + o.values = buildUnnamed77(); } buildCounterQuotaLimit--; return o; @@ -3514,7 +3540,7 @@ void checkQuotaLimit(api.QuotaLimit o) { o.unit!, unittest.equals('foo'), ); - checkUnnamed76(o.values!); + checkUnnamed77(o.values!); } buildCounterQuotaLimit--; } @@ -3584,12 +3610,12 @@ void checkRubySettings(api.RubySettings o) { buildCounterRubySettings--; } -core.List buildUnnamed77() => [ +core.List buildUnnamed78() => [ 'foo', 'foo', ]; -void checkUnnamed77(core.List o) { +void checkUnnamed78(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3606,7 +3632,7 @@ api.SelectiveGapicGeneration buildSelectiveGapicGeneration() { final o = api.SelectiveGapicGeneration(); buildCounterSelectiveGapicGeneration++; if (buildCounterSelectiveGapicGeneration < 3) { - o.methods = buildUnnamed77(); + o.methods = buildUnnamed78(); } buildCounterSelectiveGapicGeneration--; return o; @@ -3615,94 +3641,94 @@ api.SelectiveGapicGeneration buildSelectiveGapicGeneration() { void checkSelectiveGapicGeneration(api.SelectiveGapicGeneration o) { buildCounterSelectiveGapicGeneration++; if (buildCounterSelectiveGapicGeneration < 3) { - checkUnnamed77(o.methods!); + checkUnnamed78(o.methods!); } buildCounterSelectiveGapicGeneration--; } -core.List buildUnnamed78() => [ +core.List buildUnnamed79() => [ buildApi(), buildApi(), ]; -void checkUnnamed78(core.List o) { +void checkUnnamed79(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkApi(o[0]); checkApi(o[1]); } -core.List buildUnnamed79() => [ +core.List buildUnnamed80() => [ buildEndpoint(), buildEndpoint(), ]; -void checkUnnamed79(core.List o) { +void checkUnnamed80(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEndpoint(o[0]); checkEndpoint(o[1]); } -core.List buildUnnamed80() => [ +core.List buildUnnamed81() => [ buildEnum(), buildEnum(), ]; -void checkUnnamed80(core.List o) { +void checkUnnamed81(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEnum(o[0]); checkEnum(o[1]); } -core.List buildUnnamed81() => [ +core.List buildUnnamed82() => [ buildLogDescriptor(), buildLogDescriptor(), ]; -void checkUnnamed81(core.List o) { +void checkUnnamed82(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLogDescriptor(o[0]); checkLogDescriptor(o[1]); } -core.List buildUnnamed82() => [ +core.List buildUnnamed83() => [ buildMetricDescriptor(), buildMetricDescriptor(), ]; -void checkUnnamed82(core.List o) { +void checkUnnamed83(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMetricDescriptor(o[0]); checkMetricDescriptor(o[1]); } -core.List buildUnnamed83() => [ +core.List buildUnnamed84() => [ buildMonitoredResourceDescriptor(), buildMonitoredResourceDescriptor(), ]; -void checkUnnamed83(core.List o) { +void checkUnnamed84(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMonitoredResourceDescriptor(o[0]); checkMonitoredResourceDescriptor(o[1]); } -core.List buildUnnamed84() => [ +core.List buildUnnamed85() => [ buildType(), buildType(), ]; -void checkUnnamed84(core.List o) { +void checkUnnamed85(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkType(o[0]); checkType(o[1]); } -core.List buildUnnamed85() => [ +core.List buildUnnamed86() => [ buildType(), buildType(), ]; -void checkUnnamed85(core.List o) { +void checkUnnamed86(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkType(o[0]); checkType(o[1]); @@ -3713,7 +3739,7 @@ api.Service buildService() { final o = api.Service(); buildCounterService++; if (buildCounterService < 3) { - o.apis = buildUnnamed78(); + o.apis = buildUnnamed79(); o.authentication = buildAuthentication(); o.backend = buildBackend(); o.billing = buildBilling(); @@ -3722,14 +3748,14 @@ api.Service buildService() { o.control = buildControl(); o.customError = buildCustomError(); o.documentation = buildDocumentation(); - o.endpoints = buildUnnamed79(); - o.enums = buildUnnamed80(); + o.endpoints = buildUnnamed80(); + o.enums = buildUnnamed81(); o.http = buildHttp(); o.id = 'foo'; o.logging = buildLogging(); - o.logs = buildUnnamed81(); - o.metrics = buildUnnamed82(); - o.monitoredResources = buildUnnamed83(); + o.logs = buildUnnamed82(); + o.metrics = buildUnnamed83(); + o.monitoredResources = buildUnnamed84(); o.monitoring = buildMonitoring(); o.name = 'foo'; o.producerProjectId = 'foo'; @@ -3737,9 +3763,9 @@ api.Service buildService() { o.quota = buildQuota(); o.sourceInfo = buildSourceInfo(); o.systemParameters = buildSystemParameters(); - o.systemTypes = buildUnnamed84(); + o.systemTypes = buildUnnamed85(); o.title = 'foo'; - o.types = buildUnnamed85(); + o.types = buildUnnamed86(); o.usage = buildUsage(); } buildCounterService--; @@ -3749,7 +3775,7 @@ api.Service buildService() { void checkService(api.Service o) { buildCounterService++; if (buildCounterService < 3) { - checkUnnamed78(o.apis!); + checkUnnamed79(o.apis!); checkAuthentication(o.authentication!); checkBackend(o.backend!); checkBilling(o.billing!); @@ -3761,17 +3787,17 @@ void checkService(api.Service o) { checkControl(o.control!); checkCustomError(o.customError!); checkDocumentation(o.documentation!); - checkUnnamed79(o.endpoints!); - checkUnnamed80(o.enums!); + checkUnnamed80(o.endpoints!); + checkUnnamed81(o.enums!); checkHttp(o.http!); unittest.expect( o.id!, unittest.equals('foo'), ); checkLogging(o.logging!); - checkUnnamed81(o.logs!); - checkUnnamed82(o.metrics!); - checkUnnamed83(o.monitoredResources!); + checkUnnamed82(o.logs!); + checkUnnamed83(o.metrics!); + checkUnnamed84(o.monitoredResources!); checkMonitoring(o.monitoring!); unittest.expect( o.name!, @@ -3785,12 +3811,12 @@ void checkService(api.Service o) { checkQuota(o.quota!); checkSourceInfo(o.sourceInfo!); checkSystemParameters(o.systemParameters!); - checkUnnamed84(o.systemTypes!); + checkUnnamed85(o.systemTypes!); unittest.expect( o.title!, unittest.equals('foo'), ); - checkUnnamed85(o.types!); + checkUnnamed86(o.types!); checkUsage(o.usage!); } buildCounterService--; @@ -3842,7 +3868,7 @@ void checkSourceContext(api.SourceContext o) { buildCounterSourceContext--; } -core.Map buildUnnamed86() => { +core.Map buildUnnamed87() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3855,7 +3881,7 @@ core.Map buildUnnamed86() => { }, }; -void checkUnnamed86(core.Map o) { +void checkUnnamed87(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted11 = (o['x']!) as core.Map; unittest.expect(casted11, unittest.hasLength(3)); @@ -3887,15 +3913,15 @@ void checkUnnamed86(core.Map o) { ); } -core.List> buildUnnamed87() => [ - buildUnnamed86(), - buildUnnamed86(), +core.List> buildUnnamed88() => [ + buildUnnamed87(), + buildUnnamed87(), ]; -void checkUnnamed87(core.List> o) { +void checkUnnamed88(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed86(o[0]); - checkUnnamed86(o[1]); + checkUnnamed87(o[0]); + checkUnnamed87(o[1]); } core.int buildCounterSourceInfo = 0; @@ -3903,7 +3929,7 @@ api.SourceInfo buildSourceInfo() { final o = api.SourceInfo(); buildCounterSourceInfo++; if (buildCounterSourceInfo < 3) { - o.sourceFiles = buildUnnamed87(); + o.sourceFiles = buildUnnamed88(); } buildCounterSourceInfo--; return o; @@ -3912,12 +3938,12 @@ api.SourceInfo buildSourceInfo() { void checkSourceInfo(api.SourceInfo o) { buildCounterSourceInfo++; if (buildCounterSourceInfo < 3) { - checkUnnamed87(o.sourceFiles!); + checkUnnamed88(o.sourceFiles!); } buildCounterSourceInfo--; } -core.Map buildUnnamed88() => { +core.Map buildUnnamed89() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -3930,7 +3956,7 @@ core.Map buildUnnamed88() => { }, }; -void checkUnnamed88(core.Map o) { +void checkUnnamed89(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted13 = (o['x']!) as core.Map; unittest.expect(casted13, unittest.hasLength(3)); @@ -3962,15 +3988,15 @@ void checkUnnamed88(core.Map o) { ); } -core.List> buildUnnamed89() => [ - buildUnnamed88(), - buildUnnamed88(), +core.List> buildUnnamed90() => [ + buildUnnamed89(), + buildUnnamed89(), ]; -void checkUnnamed89(core.List> o) { +void checkUnnamed90(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed88(o[0]); - checkUnnamed88(o[1]); + checkUnnamed89(o[0]); + checkUnnamed89(o[1]); } core.int buildCounterStatus = 0; @@ -3979,7 +4005,7 @@ api.Status buildStatus() { buildCounterStatus++; if (buildCounterStatus < 3) { o.code = 42; - o.details = buildUnnamed89(); + o.details = buildUnnamed90(); o.message = 'foo'; } buildCounterStatus--; @@ -3993,7 +4019,7 @@ void checkStatus(api.Status o) { o.code!, unittest.equals(42), ); - checkUnnamed89(o.details!); + checkUnnamed90(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -4055,12 +4081,12 @@ void checkSystemParameter(api.SystemParameter o) { buildCounterSystemParameter--; } -core.List buildUnnamed90() => [ +core.List buildUnnamed91() => [ buildSystemParameter(), buildSystemParameter(), ]; -void checkUnnamed90(core.List o) { +void checkUnnamed91(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSystemParameter(o[0]); checkSystemParameter(o[1]); @@ -4071,7 +4097,7 @@ api.SystemParameterRule buildSystemParameterRule() { final o = api.SystemParameterRule(); buildCounterSystemParameterRule++; if (buildCounterSystemParameterRule < 3) { - o.parameters = buildUnnamed90(); + o.parameters = buildUnnamed91(); o.selector = 'foo'; } buildCounterSystemParameterRule--; @@ -4081,7 +4107,7 @@ api.SystemParameterRule buildSystemParameterRule() { void checkSystemParameterRule(api.SystemParameterRule o) { buildCounterSystemParameterRule++; if (buildCounterSystemParameterRule < 3) { - checkUnnamed90(o.parameters!); + checkUnnamed91(o.parameters!); unittest.expect( o.selector!, unittest.equals('foo'), @@ -4090,12 +4116,12 @@ void checkSystemParameterRule(api.SystemParameterRule o) { buildCounterSystemParameterRule--; } -core.List buildUnnamed91() => [ +core.List buildUnnamed92() => [ buildSystemParameterRule(), buildSystemParameterRule(), ]; -void checkUnnamed91(core.List o) { +void checkUnnamed92(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSystemParameterRule(o[0]); checkSystemParameterRule(o[1]); @@ -4106,7 +4132,7 @@ api.SystemParameters buildSystemParameters() { final o = api.SystemParameters(); buildCounterSystemParameters++; if (buildCounterSystemParameters < 3) { - o.rules = buildUnnamed91(); + o.rules = buildUnnamed92(); } buildCounterSystemParameters--; return o; @@ -4115,17 +4141,17 @@ api.SystemParameters buildSystemParameters() { void checkSystemParameters(api.SystemParameters o) { buildCounterSystemParameters++; if (buildCounterSystemParameters < 3) { - checkUnnamed91(o.rules!); + checkUnnamed92(o.rules!); } buildCounterSystemParameters--; } -core.List buildUnnamed92() => [ +core.List buildUnnamed93() => [ 'foo', 'foo', ]; -void checkUnnamed92(core.List o) { +void checkUnnamed93(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4142,7 +4168,7 @@ api.TestIamPermissionsRequest buildTestIamPermissionsRequest() { final o = api.TestIamPermissionsRequest(); buildCounterTestIamPermissionsRequest++; if (buildCounterTestIamPermissionsRequest < 3) { - o.permissions = buildUnnamed92(); + o.permissions = buildUnnamed93(); } buildCounterTestIamPermissionsRequest--; return o; @@ -4151,17 +4177,17 @@ api.TestIamPermissionsRequest buildTestIamPermissionsRequest() { void checkTestIamPermissionsRequest(api.TestIamPermissionsRequest o) { buildCounterTestIamPermissionsRequest++; if (buildCounterTestIamPermissionsRequest < 3) { - checkUnnamed92(o.permissions!); + checkUnnamed93(o.permissions!); } buildCounterTestIamPermissionsRequest--; } -core.List buildUnnamed93() => [ +core.List buildUnnamed94() => [ 'foo', 'foo', ]; -void checkUnnamed93(core.List o) { +void checkUnnamed94(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4178,7 +4204,7 @@ api.TestIamPermissionsResponse buildTestIamPermissionsResponse() { final o = api.TestIamPermissionsResponse(); buildCounterTestIamPermissionsResponse++; if (buildCounterTestIamPermissionsResponse < 3) { - o.permissions = buildUnnamed93(); + o.permissions = buildUnnamed94(); } buildCounterTestIamPermissionsResponse--; return o; @@ -4187,17 +4213,17 @@ api.TestIamPermissionsResponse buildTestIamPermissionsResponse() { void checkTestIamPermissionsResponse(api.TestIamPermissionsResponse o) { buildCounterTestIamPermissionsResponse++; if (buildCounterTestIamPermissionsResponse < 3) { - checkUnnamed93(o.permissions!); + checkUnnamed94(o.permissions!); } buildCounterTestIamPermissionsResponse--; } -core.Map buildUnnamed94() => { +core.Map buildUnnamed95() => { 'x': 42.0, 'y': 42.0, }; -void checkUnnamed94(core.Map o) { +void checkUnnamed95(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -4214,7 +4240,7 @@ api.TrafficPercentStrategy buildTrafficPercentStrategy() { final o = api.TrafficPercentStrategy(); buildCounterTrafficPercentStrategy++; if (buildCounterTrafficPercentStrategy < 3) { - o.percentages = buildUnnamed94(); + o.percentages = buildUnnamed95(); } buildCounterTrafficPercentStrategy--; return o; @@ -4223,28 +4249,28 @@ api.TrafficPercentStrategy buildTrafficPercentStrategy() { void checkTrafficPercentStrategy(api.TrafficPercentStrategy o) { buildCounterTrafficPercentStrategy++; if (buildCounterTrafficPercentStrategy < 3) { - checkUnnamed94(o.percentages!); + checkUnnamed95(o.percentages!); } buildCounterTrafficPercentStrategy--; } -core.List buildUnnamed95() => [ +core.List buildUnnamed96() => [ buildField(), buildField(), ]; -void checkUnnamed95(core.List o) { +void checkUnnamed96(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkField(o[0]); checkField(o[1]); } -core.List buildUnnamed96() => [ +core.List buildUnnamed97() => [ 'foo', 'foo', ]; -void checkUnnamed96(core.List o) { +void checkUnnamed97(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4256,12 +4282,12 @@ void checkUnnamed96(core.List o) { ); } -core.List buildUnnamed97() => [ +core.List buildUnnamed98() => [ buildOption(), buildOption(), ]; -void checkUnnamed97(core.List o) { +void checkUnnamed98(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOption(o[0]); checkOption(o[1]); @@ -4273,10 +4299,10 @@ api.Type buildType() { buildCounterType++; if (buildCounterType < 3) { o.edition = 'foo'; - o.fields = buildUnnamed95(); + o.fields = buildUnnamed96(); o.name = 'foo'; - o.oneofs = buildUnnamed96(); - o.options = buildUnnamed97(); + o.oneofs = buildUnnamed97(); + o.options = buildUnnamed98(); o.sourceContext = buildSourceContext(); o.syntax = 'foo'; } @@ -4291,13 +4317,13 @@ void checkType(api.Type o) { o.edition!, unittest.equals('foo'), ); - checkUnnamed95(o.fields!); + checkUnnamed96(o.fields!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed96(o.oneofs!); - checkUnnamed97(o.options!); + checkUnnamed97(o.oneofs!); + checkUnnamed98(o.options!); checkSourceContext(o.sourceContext!); unittest.expect( o.syntax!, @@ -4307,12 +4333,12 @@ void checkType(api.Type o) { buildCounterType--; } -core.List buildUnnamed98() => [ +core.List buildUnnamed99() => [ 'foo', 'foo', ]; -void checkUnnamed98(core.List o) { +void checkUnnamed99(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4324,12 +4350,12 @@ void checkUnnamed98(core.List o) { ); } -core.List buildUnnamed99() => [ +core.List buildUnnamed100() => [ buildUsageRule(), buildUsageRule(), ]; -void checkUnnamed99(core.List o) { +void checkUnnamed100(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkUsageRule(o[0]); checkUsageRule(o[1]); @@ -4341,8 +4367,8 @@ api.Usage buildUsage() { buildCounterUsage++; if (buildCounterUsage < 3) { o.producerNotificationChannel = 'foo'; - o.requirements = buildUnnamed98(); - o.rules = buildUnnamed99(); + o.requirements = buildUnnamed99(); + o.rules = buildUnnamed100(); } buildCounterUsage--; return o; @@ -4355,8 +4381,8 @@ void checkUsage(api.Usage o) { o.producerNotificationChannel!, unittest.equals('foo'), ); - checkUnnamed98(o.requirements!); - checkUnnamed99(o.rules!); + checkUnnamed99(o.requirements!); + checkUnnamed100(o.rules!); } buildCounterUsage--; } diff --git a/generated/googleapis/test/serviceusage/v1_test.dart b/generated/googleapis/test/serviceusage/v1_test.dart index 89d85e18c..83a7f5091 100644 --- a/generated/googleapis/test/serviceusage/v1_test.dart +++ b/generated/googleapis/test/serviceusage/v1_test.dart @@ -406,6 +406,7 @@ api.Documentation buildDocumentation() { final o = api.Documentation(); buildCounterDocumentation++; if (buildCounterDocumentation < 3) { + o.additionalIamInfo = 'foo'; o.documentationRootUrl = 'foo'; o.overview = 'foo'; o.pages = buildUnnamed9(); @@ -421,6 +422,10 @@ api.Documentation buildDocumentation() { void checkDocumentation(api.Documentation o) { buildCounterDocumentation++; if (buildCounterDocumentation < 3) { + unittest.expect( + o.additionalIamInfo!, + unittest.equals('foo'), + ); unittest.expect( o.documentationRootUrl!, unittest.equals('foo'), diff --git a/generated/googleapis/test/sheets/v4_test.dart b/generated/googleapis/test/sheets/v4_test.dart index 958e27dd4..c60ba5ba5 100644 --- a/generated/googleapis/test/sheets/v4_test.dart +++ b/generated/googleapis/test/sheets/v4_test.dart @@ -6504,6 +6504,7 @@ api.SetDataValidationRequest buildSetDataValidationRequest() { final o = api.SetDataValidationRequest(); buildCounterSetDataValidationRequest++; if (buildCounterSetDataValidationRequest < 3) { + o.filteredRowsIncluded = true; o.range = buildGridRange(); o.rule = buildDataValidationRule(); } @@ -6514,6 +6515,7 @@ api.SetDataValidationRequest buildSetDataValidationRequest() { void checkSetDataValidationRequest(api.SetDataValidationRequest o) { buildCounterSetDataValidationRequest++; if (buildCounterSetDataValidationRequest < 3) { + unittest.expect(o.filteredRowsIncluded!, unittest.isTrue); checkGridRange(o.range!); checkDataValidationRule(o.rule!); } diff --git a/generated/googleapis/test/spanner/v1_test.dart b/generated/googleapis/test/spanner/v1_test.dart index 4dbc23d5f..a253d9285 100644 --- a/generated/googleapis/test/spanner/v1_test.dart +++ b/generated/googleapis/test/spanner/v1_test.dart @@ -2016,6 +2016,7 @@ api.Instance buildInstance() { o.autoscalingConfig = buildAutoscalingConfig(); o.config = 'foo'; o.createTime = 'foo'; + o.defaultBackupScheduleType = 'foo'; o.displayName = 'foo'; o.edition = 'foo'; o.endpointUris = buildUnnamed25(); @@ -2045,6 +2046,10 @@ void checkInstance(api.Instance o) { o.createTime!, unittest.equals('foo'), ); + unittest.expect( + o.defaultBackupScheduleType!, + unittest.equals('foo'), + ); unittest.expect( o.displayName!, unittest.equals('foo'), diff --git a/generated/googleapis/test/storage/v1_test.dart b/generated/googleapis/test/storage/v1_test.dart index be9e7d9e6..c3ffe8a93 100644 --- a/generated/googleapis/test/storage/v1_test.dart +++ b/generated/googleapis/test/storage/v1_test.dart @@ -2549,6 +2549,7 @@ api.Object buildObject() { o.temporaryHold = true; o.timeCreated = core.DateTime.parse('2002-02-27T14:01:02Z'); o.timeDeleted = core.DateTime.parse('2002-02-27T14:01:02Z'); + o.timeFinalized = core.DateTime.parse('2002-02-27T14:01:02Z'); o.timeStorageClassUpdated = core.DateTime.parse('2002-02-27T14:01:02Z'); o.updated = core.DateTime.parse('2002-02-27T14:01:02Z'); } @@ -2674,6 +2675,10 @@ void checkObject(api.Object o) { o.timeDeleted!, unittest.equals(core.DateTime.parse('2002-02-27T14:01:02Z')), ); + unittest.expect( + o.timeFinalized!, + unittest.equals(core.DateTime.parse('2002-02-27T14:01:02Z')), + ); unittest.expect( o.timeStorageClassUpdated!, unittest.equals(core.DateTime.parse('2002-02-27T14:01:02Z')), @@ -5773,6 +5778,7 @@ void main() { final res = api.StorageApi(mock).buckets; final arg_bucket = 'foo'; final arg_generation = 'foo'; + final arg_projection = 'foo'; final arg_userProject = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { @@ -5829,6 +5835,10 @@ void main() { queryMap['generation']!.first, unittest.equals(arg_generation), ); + unittest.expect( + queryMap['projection']!.first, + unittest.equals(arg_projection), + ); unittest.expect( queryMap['userProject']!.first, unittest.equals(arg_userProject), @@ -5841,11 +5851,14 @@ void main() { final h = { 'content-type': 'application/json; charset=utf-8', }; - final resp = ''; + final resp = convert.json.encode(buildBucket()); return async.Future.value(stringResponse(200, h, resp)); }), true); - await res.restore(arg_bucket, arg_generation, - userProject: arg_userProject, $fields: arg_$fields); + final response = await res.restore(arg_bucket, arg_generation, + projection: arg_projection, + userProject: arg_userProject, + $fields: arg_$fields); + checkBucket(response as api.Bucket); }); unittest.test('method--setIamPolicy', () async { @@ -9795,6 +9808,154 @@ void main() { checkObjects(response as api.Objects); }); + unittest.test('method--move', () async { + final mock = HttpServerMock(); + final res = api.StorageApi(mock).objects; + final arg_bucket = 'foo'; + final arg_sourceObject = 'foo'; + final arg_destinationObject = 'foo'; + final arg_ifGenerationMatch = 'foo'; + final arg_ifGenerationNotMatch = 'foo'; + final arg_ifMetagenerationMatch = 'foo'; + final arg_ifMetagenerationNotMatch = 'foo'; + final arg_ifSourceGenerationMatch = 'foo'; + final arg_ifSourceGenerationNotMatch = 'foo'; + final arg_ifSourceMetagenerationMatch = 'foo'; + final arg_ifSourceMetagenerationNotMatch = 'foo'; + final arg_userProject = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 11), + unittest.equals('storage/v1/'), + ); + pathOffset += 11; + unittest.expect( + path.substring(pathOffset, pathOffset + 2), + unittest.equals('b/'), + ); + pathOffset += 2; + index = path.indexOf('/o/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_bucket'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 3), + unittest.equals('/o/'), + ); + pathOffset += 3; + index = path.indexOf('/moveTo/o/', pathOffset); + unittest.expect(index >= 0, unittest.isTrue); + subPart = + core.Uri.decodeQueryComponent(path.substring(pathOffset, index)); + pathOffset = index; + unittest.expect( + subPart, + unittest.equals('$arg_sourceObject'), + ); + unittest.expect( + path.substring(pathOffset, pathOffset + 10), + unittest.equals('/moveTo/o/'), + ); + pathOffset += 10; + subPart = core.Uri.decodeQueryComponent(path.substring(pathOffset)); + pathOffset = path.length; + unittest.expect( + subPart, + unittest.equals('$arg_destinationObject'), + ); + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['ifGenerationMatch']!.first, + unittest.equals(arg_ifGenerationMatch), + ); + unittest.expect( + queryMap['ifGenerationNotMatch']!.first, + unittest.equals(arg_ifGenerationNotMatch), + ); + unittest.expect( + queryMap['ifMetagenerationMatch']!.first, + unittest.equals(arg_ifMetagenerationMatch), + ); + unittest.expect( + queryMap['ifMetagenerationNotMatch']!.first, + unittest.equals(arg_ifMetagenerationNotMatch), + ); + unittest.expect( + queryMap['ifSourceGenerationMatch']!.first, + unittest.equals(arg_ifSourceGenerationMatch), + ); + unittest.expect( + queryMap['ifSourceGenerationNotMatch']!.first, + unittest.equals(arg_ifSourceGenerationNotMatch), + ); + unittest.expect( + queryMap['ifSourceMetagenerationMatch']!.first, + unittest.equals(arg_ifSourceMetagenerationMatch), + ); + unittest.expect( + queryMap['ifSourceMetagenerationNotMatch']!.first, + unittest.equals(arg_ifSourceMetagenerationNotMatch), + ); + unittest.expect( + queryMap['userProject']!.first, + unittest.equals(arg_userProject), + ); + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildObject()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = await res.move( + arg_bucket, arg_sourceObject, arg_destinationObject, + ifGenerationMatch: arg_ifGenerationMatch, + ifGenerationNotMatch: arg_ifGenerationNotMatch, + ifMetagenerationMatch: arg_ifMetagenerationMatch, + ifMetagenerationNotMatch: arg_ifMetagenerationNotMatch, + ifSourceGenerationMatch: arg_ifSourceGenerationMatch, + ifSourceGenerationNotMatch: arg_ifSourceGenerationNotMatch, + ifSourceMetagenerationMatch: arg_ifSourceMetagenerationMatch, + ifSourceMetagenerationNotMatch: arg_ifSourceMetagenerationNotMatch, + userProject: arg_userProject, + $fields: arg_$fields); + checkObject(response as api.Object); + }); + unittest.test('method--patch', () async { final mock = HttpServerMock(); final res = api.StorageApi(mock).objects; diff --git a/generated/googleapis/test/tagmanager/v2_test.dart b/generated/googleapis/test/tagmanager/v2_test.dart index 86832695d..c1848ec15 100644 --- a/generated/googleapis/test/tagmanager/v2_test.dart +++ b/generated/googleapis/test/tagmanager/v2_test.dart @@ -1251,6 +1251,7 @@ api.GalleryReference buildGalleryReference() { o.owner = 'foo'; o.repository = 'foo'; o.signature = 'foo'; + o.templateDeveloperId = 'foo'; o.version = 'foo'; } buildCounterGalleryReference--; @@ -1277,6 +1278,10 @@ void checkGalleryReference(api.GalleryReference o) { o.signature!, unittest.equals('foo'), ); + unittest.expect( + o.templateDeveloperId!, + unittest.equals('foo'), + ); unittest.expect( o.version!, unittest.equals('foo'), diff --git a/generated/googleapis/test/testing/v1_test.dart b/generated/googleapis/test/testing/v1_test.dart index f9f97e4ff..895475856 100644 --- a/generated/googleapis/test/testing/v1_test.dart +++ b/generated/googleapis/test/testing/v1_test.dart @@ -805,6 +805,17 @@ void checkUnnamed23(core.List o) { ); } +core.List buildUnnamed24() => [ + buildUsesPermissionTag(), + buildUsesPermissionTag(), + ]; + +void checkUnnamed24(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkUsesPermissionTag(o[0]); + checkUsesPermissionTag(o[1]); +} + core.int buildCounterApkManifest = 0; api.ApkManifest buildApkManifest() { final o = api.ApkManifest(); @@ -820,6 +831,7 @@ api.ApkManifest buildApkManifest() { o.targetSdkVersion = 42; o.usesFeature = buildUnnamed22(); o.usesPermission = buildUnnamed23(); + o.usesPermissionTags = buildUnnamed24(); o.versionCode = 'foo'; o.versionName = 'foo'; } @@ -855,6 +867,7 @@ void checkApkManifest(api.ApkManifest o) { ); checkUnnamed22(o.usesFeature!); checkUnnamed23(o.usesPermission!); + checkUnnamed24(o.usesPermissionTags!); unittest.expect( o.versionCode!, unittest.equals('foo'), @@ -923,12 +936,12 @@ void checkCancelTestMatrixResponse(api.CancelTestMatrixResponse o) { buildCounterCancelTestMatrixResponse--; } -core.List buildUnnamed24() => [ +core.List buildUnnamed25() => [ buildClientInfoDetail(), buildClientInfoDetail(), ]; -void checkUnnamed24(core.List o) { +void checkUnnamed25(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkClientInfoDetail(o[0]); checkClientInfoDetail(o[1]); @@ -939,7 +952,7 @@ api.ClientInfo buildClientInfo() { final o = api.ClientInfo(); buildCounterClientInfo++; if (buildCounterClientInfo < 3) { - o.clientInfoDetails = buildUnnamed24(); + o.clientInfoDetails = buildUnnamed25(); o.name = 'foo'; } buildCounterClientInfo--; @@ -949,7 +962,7 @@ api.ClientInfo buildClientInfo() { void checkClientInfo(api.ClientInfo o) { buildCounterClientInfo++; if (buildCounterClientInfo < 3) { - checkUnnamed24(o.clientInfoDetails!); + checkUnnamed25(o.clientInfoDetails!); unittest.expect( o.name!, unittest.equals('foo'), @@ -1067,12 +1080,12 @@ void checkDeviceIpBlock(api.DeviceIpBlock o) { buildCounterDeviceIpBlock--; } -core.List buildUnnamed25() => [ +core.List buildUnnamed26() => [ buildDeviceIpBlock(), buildDeviceIpBlock(), ]; -void checkUnnamed25(core.List o) { +void checkUnnamed26(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDeviceIpBlock(o[0]); checkDeviceIpBlock(o[1]); @@ -1083,7 +1096,7 @@ api.DeviceIpBlockCatalog buildDeviceIpBlockCatalog() { final o = api.DeviceIpBlockCatalog(); buildCounterDeviceIpBlockCatalog++; if (buildCounterDeviceIpBlockCatalog < 3) { - o.ipBlocks = buildUnnamed25(); + o.ipBlocks = buildUnnamed26(); } buildCounterDeviceIpBlockCatalog--; return o; @@ -1092,17 +1105,17 @@ api.DeviceIpBlockCatalog buildDeviceIpBlockCatalog() { void checkDeviceIpBlockCatalog(api.DeviceIpBlockCatalog o) { buildCounterDeviceIpBlockCatalog++; if (buildCounterDeviceIpBlockCatalog < 3) { - checkUnnamed25(o.ipBlocks!); + checkUnnamed26(o.ipBlocks!); } buildCounterDeviceIpBlockCatalog--; } -core.List buildUnnamed26() => [ +core.List buildUnnamed27() => [ buildSessionStateEvent(), buildSessionStateEvent(), ]; -void checkUnnamed26(core.List o) { +void checkUnnamed27(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSessionStateEvent(o[0]); checkSessionStateEvent(o[1]); @@ -1121,7 +1134,7 @@ api.DeviceSession buildDeviceSession() { o.inactivityTimeout = 'foo'; o.name = 'foo'; o.state = 'foo'; - o.stateHistories = buildUnnamed26(); + o.stateHistories = buildUnnamed27(); o.ttl = 'foo'; } buildCounterDeviceSession--; @@ -1160,7 +1173,7 @@ void checkDeviceSession(api.DeviceSession o) { o.state!, unittest.equals('foo'), ); - checkUnnamed26(o.stateHistories!); + checkUnnamed27(o.stateHistories!); unittest.expect( o.ttl!, unittest.equals('foo'), @@ -1384,12 +1397,12 @@ void checkGoogleCloudStorage(api.GoogleCloudStorage o) { buildCounterGoogleCloudStorage--; } -core.List buildUnnamed27() => [ +core.List buildUnnamed28() => [ 'foo', 'foo', ]; -void checkUnnamed27(core.List o) { +void checkUnnamed28(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1401,12 +1414,12 @@ void checkUnnamed27(core.List o) { ); } -core.List buildUnnamed28() => [ +core.List buildUnnamed29() => [ 'foo', 'foo', ]; -void checkUnnamed28(core.List o) { +void checkUnnamed29(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1423,8 +1436,8 @@ api.IntentFilter buildIntentFilter() { final o = api.IntentFilter(); buildCounterIntentFilter++; if (buildCounterIntentFilter < 3) { - o.actionNames = buildUnnamed27(); - o.categoryNames = buildUnnamed28(); + o.actionNames = buildUnnamed28(); + o.categoryNames = buildUnnamed29(); o.mimeType = 'foo'; } buildCounterIntentFilter--; @@ -1434,8 +1447,8 @@ api.IntentFilter buildIntentFilter() { void checkIntentFilter(api.IntentFilter o) { buildCounterIntentFilter++; if (buildCounterIntentFilter < 3) { - checkUnnamed27(o.actionNames!); - checkUnnamed28(o.categoryNames!); + checkUnnamed28(o.actionNames!); + checkUnnamed29(o.categoryNames!); unittest.expect( o.mimeType!, unittest.equals('foo'), @@ -1481,34 +1494,34 @@ void checkIosDevice(api.IosDevice o) { buildCounterIosDevice--; } -core.List buildUnnamed29() => [ +core.List buildUnnamed30() => [ buildIosModel(), buildIosModel(), ]; -void checkUnnamed29(core.List o) { +void checkUnnamed30(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkIosModel(o[0]); checkIosModel(o[1]); } -core.List buildUnnamed30() => [ +core.List buildUnnamed31() => [ buildIosVersion(), buildIosVersion(), ]; -void checkUnnamed30(core.List o) { +void checkUnnamed31(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkIosVersion(o[0]); checkIosVersion(o[1]); } -core.List buildUnnamed31() => [ +core.List buildUnnamed32() => [ buildXcodeVersion(), buildXcodeVersion(), ]; -void checkUnnamed31(core.List o) { +void checkUnnamed32(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkXcodeVersion(o[0]); checkXcodeVersion(o[1]); @@ -1519,10 +1532,10 @@ api.IosDeviceCatalog buildIosDeviceCatalog() { final o = api.IosDeviceCatalog(); buildCounterIosDeviceCatalog++; if (buildCounterIosDeviceCatalog < 3) { - o.models = buildUnnamed29(); + o.models = buildUnnamed30(); o.runtimeConfiguration = buildIosRuntimeConfiguration(); - o.versions = buildUnnamed30(); - o.xcodeVersions = buildUnnamed31(); + o.versions = buildUnnamed31(); + o.xcodeVersions = buildUnnamed32(); } buildCounterIosDeviceCatalog--; return o; @@ -1531,10 +1544,10 @@ api.IosDeviceCatalog buildIosDeviceCatalog() { void checkIosDeviceCatalog(api.IosDeviceCatalog o) { buildCounterIosDeviceCatalog++; if (buildCounterIosDeviceCatalog < 3) { - checkUnnamed29(o.models!); + checkUnnamed30(o.models!); checkIosRuntimeConfiguration(o.runtimeConfiguration!); - checkUnnamed30(o.versions!); - checkUnnamed31(o.xcodeVersions!); + checkUnnamed31(o.versions!); + checkUnnamed32(o.xcodeVersions!); } buildCounterIosDeviceCatalog--; } @@ -1568,12 +1581,12 @@ void checkIosDeviceFile(api.IosDeviceFile o) { buildCounterIosDeviceFile--; } -core.List buildUnnamed32() => [ +core.List buildUnnamed33() => [ buildIosDevice(), buildIosDevice(), ]; -void checkUnnamed32(core.List o) { +void checkUnnamed33(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkIosDevice(o[0]); checkIosDevice(o[1]); @@ -1584,7 +1597,7 @@ api.IosDeviceList buildIosDeviceList() { final o = api.IosDeviceList(); buildCounterIosDeviceList++; if (buildCounterIosDeviceList < 3) { - o.iosDevices = buildUnnamed32(); + o.iosDevices = buildUnnamed33(); } buildCounterIosDeviceList--; return o; @@ -1593,17 +1606,17 @@ api.IosDeviceList buildIosDeviceList() { void checkIosDeviceList(api.IosDeviceList o) { buildCounterIosDeviceList++; if (buildCounterIosDeviceList < 3) { - checkUnnamed32(o.iosDevices!); + checkUnnamed33(o.iosDevices!); } buildCounterIosDeviceList--; } -core.List buildUnnamed33() => [ +core.List buildUnnamed34() => [ 'foo', 'foo', ]; -void checkUnnamed33(core.List o) { +void checkUnnamed34(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1615,23 +1628,23 @@ void checkUnnamed33(core.List o) { ); } -core.List buildUnnamed34() => [ +core.List buildUnnamed35() => [ buildPerIosVersionInfo(), buildPerIosVersionInfo(), ]; -void checkUnnamed34(core.List o) { +void checkUnnamed35(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPerIosVersionInfo(o[0]); checkPerIosVersionInfo(o[1]); } -core.List buildUnnamed35() => [ +core.List buildUnnamed36() => [ 'foo', 'foo', ]; -void checkUnnamed35(core.List o) { +void checkUnnamed36(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1643,12 +1656,12 @@ void checkUnnamed35(core.List o) { ); } -core.List buildUnnamed36() => [ +core.List buildUnnamed37() => [ 'foo', 'foo', ]; -void checkUnnamed36(core.List o) { +void checkUnnamed37(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1665,16 +1678,16 @@ api.IosModel buildIosModel() { final o = api.IosModel(); buildCounterIosModel++; if (buildCounterIosModel < 3) { - o.deviceCapabilities = buildUnnamed33(); + o.deviceCapabilities = buildUnnamed34(); o.formFactor = 'foo'; o.id = 'foo'; o.name = 'foo'; - o.perVersionInfo = buildUnnamed34(); + o.perVersionInfo = buildUnnamed35(); o.screenDensity = 42; o.screenX = 42; o.screenY = 42; - o.supportedVersionIds = buildUnnamed35(); - o.tags = buildUnnamed36(); + o.supportedVersionIds = buildUnnamed36(); + o.tags = buildUnnamed37(); } buildCounterIosModel--; return o; @@ -1683,7 +1696,7 @@ api.IosModel buildIosModel() { void checkIosModel(api.IosModel o) { buildCounterIosModel++; if (buildCounterIosModel < 3) { - checkUnnamed33(o.deviceCapabilities!); + checkUnnamed34(o.deviceCapabilities!); unittest.expect( o.formFactor!, unittest.equals('foo'), @@ -1696,7 +1709,7 @@ void checkIosModel(api.IosModel o) { o.name!, unittest.equals('foo'), ); - checkUnnamed34(o.perVersionInfo!); + checkUnnamed35(o.perVersionInfo!); unittest.expect( o.screenDensity!, unittest.equals(42), @@ -1709,8 +1722,8 @@ void checkIosModel(api.IosModel o) { o.screenY!, unittest.equals(42), ); - checkUnnamed35(o.supportedVersionIds!); - checkUnnamed36(o.tags!); + checkUnnamed36(o.supportedVersionIds!); + checkUnnamed37(o.tags!); } buildCounterIosModel--; } @@ -1741,23 +1754,23 @@ void checkIosRoboTest(api.IosRoboTest o) { buildCounterIosRoboTest--; } -core.List buildUnnamed37() => [ +core.List buildUnnamed38() => [ buildLocale(), buildLocale(), ]; -void checkUnnamed37(core.List o) { +void checkUnnamed38(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLocale(o[0]); checkLocale(o[1]); } -core.List buildUnnamed38() => [ +core.List buildUnnamed39() => [ buildOrientation(), buildOrientation(), ]; -void checkUnnamed38(core.List o) { +void checkUnnamed39(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOrientation(o[0]); checkOrientation(o[1]); @@ -1768,8 +1781,8 @@ api.IosRuntimeConfiguration buildIosRuntimeConfiguration() { final o = api.IosRuntimeConfiguration(); buildCounterIosRuntimeConfiguration++; if (buildCounterIosRuntimeConfiguration < 3) { - o.locales = buildUnnamed37(); - o.orientations = buildUnnamed38(); + o.locales = buildUnnamed38(); + o.orientations = buildUnnamed39(); } buildCounterIosRuntimeConfiguration--; return o; @@ -1778,18 +1791,18 @@ api.IosRuntimeConfiguration buildIosRuntimeConfiguration() { void checkIosRuntimeConfiguration(api.IosRuntimeConfiguration o) { buildCounterIosRuntimeConfiguration++; if (buildCounterIosRuntimeConfiguration < 3) { - checkUnnamed37(o.locales!); - checkUnnamed38(o.orientations!); + checkUnnamed38(o.locales!); + checkUnnamed39(o.orientations!); } buildCounterIosRuntimeConfiguration--; } -core.List buildUnnamed39() => [ +core.List buildUnnamed40() => [ 42, 42, ]; -void checkUnnamed39(core.List o) { +void checkUnnamed40(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1808,7 +1821,7 @@ api.IosTestLoop buildIosTestLoop() { if (buildCounterIosTestLoop < 3) { o.appBundleId = 'foo'; o.appIpa = buildFileReference(); - o.scenarios = buildUnnamed39(); + o.scenarios = buildUnnamed40(); } buildCounterIosTestLoop--; return o; @@ -1822,39 +1835,39 @@ void checkIosTestLoop(api.IosTestLoop o) { unittest.equals('foo'), ); checkFileReference(o.appIpa!); - checkUnnamed39(o.scenarios!); + checkUnnamed40(o.scenarios!); } buildCounterIosTestLoop--; } -core.List buildUnnamed40() => [ +core.List buildUnnamed41() => [ buildFileReference(), buildFileReference(), ]; -void checkUnnamed40(core.List o) { +void checkUnnamed41(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFileReference(o[0]); checkFileReference(o[1]); } -core.List buildUnnamed41() => [ +core.List buildUnnamed42() => [ buildIosDeviceFile(), buildIosDeviceFile(), ]; -void checkUnnamed41(core.List o) { +void checkUnnamed42(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkIosDeviceFile(o[0]); checkIosDeviceFile(o[1]); } -core.List buildUnnamed42() => [ +core.List buildUnnamed43() => [ buildIosDeviceFile(), buildIosDeviceFile(), ]; -void checkUnnamed42(core.List o) { +void checkUnnamed43(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkIosDeviceFile(o[0]); checkIosDeviceFile(o[1]); @@ -1865,10 +1878,10 @@ api.IosTestSetup buildIosTestSetup() { final o = api.IosTestSetup(); buildCounterIosTestSetup++; if (buildCounterIosTestSetup < 3) { - o.additionalIpas = buildUnnamed40(); + o.additionalIpas = buildUnnamed41(); o.networkProfile = 'foo'; - o.pullDirectories = buildUnnamed41(); - o.pushFiles = buildUnnamed42(); + o.pullDirectories = buildUnnamed42(); + o.pushFiles = buildUnnamed43(); } buildCounterIosTestSetup--; return o; @@ -1877,23 +1890,23 @@ api.IosTestSetup buildIosTestSetup() { void checkIosTestSetup(api.IosTestSetup o) { buildCounterIosTestSetup++; if (buildCounterIosTestSetup < 3) { - checkUnnamed40(o.additionalIpas!); + checkUnnamed41(o.additionalIpas!); unittest.expect( o.networkProfile!, unittest.equals('foo'), ); - checkUnnamed41(o.pullDirectories!); - checkUnnamed42(o.pushFiles!); + checkUnnamed42(o.pullDirectories!); + checkUnnamed43(o.pushFiles!); } buildCounterIosTestSetup--; } -core.List buildUnnamed43() => [ +core.List buildUnnamed44() => [ 'foo', 'foo', ]; -void checkUnnamed43(core.List o) { +void checkUnnamed44(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1905,12 +1918,12 @@ void checkUnnamed43(core.List o) { ); } -core.List buildUnnamed44() => [ +core.List buildUnnamed45() => [ 'foo', 'foo', ]; -void checkUnnamed44(core.List o) { +void checkUnnamed45(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1930,8 +1943,8 @@ api.IosVersion buildIosVersion() { o.id = 'foo'; o.majorVersion = 42; o.minorVersion = 42; - o.supportedXcodeVersionIds = buildUnnamed43(); - o.tags = buildUnnamed44(); + o.supportedXcodeVersionIds = buildUnnamed44(); + o.tags = buildUnnamed45(); } buildCounterIosVersion--; return o; @@ -1952,8 +1965,8 @@ void checkIosVersion(api.IosVersion o) { o.minorVersion!, unittest.equals(42), ); - checkUnnamed43(o.supportedXcodeVersionIds!); - checkUnnamed44(o.tags!); + checkUnnamed44(o.supportedXcodeVersionIds!); + checkUnnamed45(o.tags!); } buildCounterIosVersion--; } @@ -2028,12 +2041,12 @@ void checkLauncherActivityIntent(api.LauncherActivityIntent o) { buildCounterLauncherActivityIntent--; } -core.List buildUnnamed45() => [ +core.List buildUnnamed46() => [ buildDeviceSession(), buildDeviceSession(), ]; -void checkUnnamed45(core.List o) { +void checkUnnamed46(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDeviceSession(o[0]); checkDeviceSession(o[1]); @@ -2044,7 +2057,7 @@ api.ListDeviceSessionsResponse buildListDeviceSessionsResponse() { final o = api.ListDeviceSessionsResponse(); buildCounterListDeviceSessionsResponse++; if (buildCounterListDeviceSessionsResponse < 3) { - o.deviceSessions = buildUnnamed45(); + o.deviceSessions = buildUnnamed46(); o.nextPageToken = 'foo'; } buildCounterListDeviceSessionsResponse--; @@ -2054,7 +2067,7 @@ api.ListDeviceSessionsResponse buildListDeviceSessionsResponse() { void checkListDeviceSessionsResponse(api.ListDeviceSessionsResponse o) { buildCounterListDeviceSessionsResponse++; if (buildCounterListDeviceSessionsResponse < 3) { - checkUnnamed45(o.deviceSessions!); + checkUnnamed46(o.deviceSessions!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -2063,12 +2076,12 @@ void checkListDeviceSessionsResponse(api.ListDeviceSessionsResponse o) { buildCounterListDeviceSessionsResponse--; } -core.List buildUnnamed46() => [ +core.List buildUnnamed47() => [ 'foo', 'foo', ]; -void checkUnnamed46(core.List o) { +void checkUnnamed47(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2088,7 +2101,7 @@ api.Locale buildLocale() { o.id = 'foo'; o.name = 'foo'; o.region = 'foo'; - o.tags = buildUnnamed46(); + o.tags = buildUnnamed47(); } buildCounterLocale--; return o; @@ -2109,17 +2122,17 @@ void checkLocale(api.Locale o) { o.region!, unittest.equals('foo'), ); - checkUnnamed46(o.tags!); + checkUnnamed47(o.tags!); } buildCounterLocale--; } -core.List buildUnnamed47() => [ +core.List buildUnnamed48() => [ buildTestTargetsForShard(), buildTestTargetsForShard(), ]; -void checkUnnamed47(core.List o) { +void checkUnnamed48(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTestTargetsForShard(o[0]); checkTestTargetsForShard(o[1]); @@ -2130,7 +2143,7 @@ api.ManualSharding buildManualSharding() { final o = api.ManualSharding(); buildCounterManualSharding++; if (buildCounterManualSharding < 3) { - o.testTargetsForShard = buildUnnamed47(); + o.testTargetsForShard = buildUnnamed48(); } buildCounterManualSharding--; return o; @@ -2139,7 +2152,7 @@ api.ManualSharding buildManualSharding() { void checkManualSharding(api.ManualSharding o) { buildCounterManualSharding++; if (buildCounterManualSharding < 3) { - checkUnnamed47(o.testTargetsForShard!); + checkUnnamed48(o.testTargetsForShard!); } buildCounterManualSharding--; } @@ -2224,12 +2237,12 @@ void checkNetworkConfiguration(api.NetworkConfiguration o) { buildCounterNetworkConfiguration--; } -core.List buildUnnamed48() => [ +core.List buildUnnamed49() => [ buildNetworkConfiguration(), buildNetworkConfiguration(), ]; -void checkUnnamed48(core.List o) { +void checkUnnamed49(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkNetworkConfiguration(o[0]); checkNetworkConfiguration(o[1]); @@ -2240,7 +2253,7 @@ api.NetworkConfigurationCatalog buildNetworkConfigurationCatalog() { final o = api.NetworkConfigurationCatalog(); buildCounterNetworkConfigurationCatalog++; if (buildCounterNetworkConfigurationCatalog < 3) { - o.configurations = buildUnnamed48(); + o.configurations = buildUnnamed49(); } buildCounterNetworkConfigurationCatalog--; return o; @@ -2249,7 +2262,7 @@ api.NetworkConfigurationCatalog buildNetworkConfigurationCatalog() { void checkNetworkConfigurationCatalog(api.NetworkConfigurationCatalog o) { buildCounterNetworkConfigurationCatalog++; if (buildCounterNetworkConfigurationCatalog < 3) { - checkUnnamed48(o.configurations!); + checkUnnamed49(o.configurations!); } buildCounterNetworkConfigurationCatalog--; } @@ -2293,12 +2306,12 @@ void checkObbFile(api.ObbFile o) { buildCounterObbFile--; } -core.List buildUnnamed49() => [ +core.List buildUnnamed50() => [ 'foo', 'foo', ]; -void checkUnnamed49(core.List o) { +void checkUnnamed50(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2317,7 +2330,7 @@ api.Orientation buildOrientation() { if (buildCounterOrientation < 3) { o.id = 'foo'; o.name = 'foo'; - o.tags = buildUnnamed49(); + o.tags = buildUnnamed50(); } buildCounterOrientation--; return o; @@ -2334,7 +2347,7 @@ void checkOrientation(api.Orientation o) { o.name!, unittest.equals('foo'), ); - checkUnnamed49(o.tags!); + checkUnnamed50(o.tags!); } buildCounterOrientation--; } @@ -2539,12 +2552,12 @@ void checkRoboStartingIntent(api.RoboStartingIntent o) { buildCounterRoboStartingIntent--; } -core.List buildUnnamed50() => [ +core.List buildUnnamed51() => [ buildIntentFilter(), buildIntentFilter(), ]; -void checkUnnamed50(core.List o) { +void checkUnnamed51(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkIntentFilter(o[0]); checkIntentFilter(o[1]); @@ -2555,7 +2568,7 @@ api.Service buildService() { final o = api.Service(); buildCounterService++; if (buildCounterService < 3) { - o.intentFilter = buildUnnamed50(); + o.intentFilter = buildUnnamed51(); o.name = 'foo'; } buildCounterService--; @@ -2565,7 +2578,7 @@ api.Service buildService() { void checkService(api.Service o) { buildCounterService++; if (buildCounterService < 3) { - checkUnnamed50(o.intentFilter!); + checkUnnamed51(o.intentFilter!); unittest.expect( o.name!, unittest.equals('foo'), @@ -2685,12 +2698,12 @@ void checkSmartSharding(api.SmartSharding o) { buildCounterSmartSharding--; } -core.List buildUnnamed51() => [ +core.List buildUnnamed52() => [ 'foo', 'foo', ]; -void checkUnnamed51(core.List o) { +void checkUnnamed52(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2708,7 +2721,7 @@ api.StartActivityIntent buildStartActivityIntent() { buildCounterStartActivityIntent++; if (buildCounterStartActivityIntent < 3) { o.action = 'foo'; - o.categories = buildUnnamed51(); + o.categories = buildUnnamed52(); o.uri = 'foo'; } buildCounterStartActivityIntent--; @@ -2722,7 +2735,7 @@ void checkStartActivityIntent(api.StartActivityIntent o) { o.action!, unittest.equals('foo'), ); - checkUnnamed51(o.categories!); + checkUnnamed52(o.categories!); unittest.expect( o.uri!, unittest.equals('foo'), @@ -2753,12 +2766,12 @@ void checkSystraceSetup(api.SystraceSetup o) { buildCounterSystraceSetup--; } -core.List buildUnnamed52() => [ +core.List buildUnnamed53() => [ 'foo', 'foo', ]; -void checkUnnamed52(core.List o) { +void checkUnnamed53(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2776,7 +2789,7 @@ api.TestDetails buildTestDetails() { buildCounterTestDetails++; if (buildCounterTestDetails < 3) { o.errorMessage = 'foo'; - o.progressMessages = buildUnnamed52(); + o.progressMessages = buildUnnamed53(); } buildCounterTestDetails--; return o; @@ -2789,7 +2802,7 @@ void checkTestDetails(api.TestDetails o) { o.errorMessage!, unittest.equals('foo'), ); - checkUnnamed52(o.progressMessages!); + checkUnnamed53(o.progressMessages!); } buildCounterTestDetails--; } @@ -2873,23 +2886,23 @@ void checkTestExecution(api.TestExecution o) { buildCounterTestExecution--; } -core.List buildUnnamed53() => [ +core.List buildUnnamed54() => [ buildMatrixErrorDetail(), buildMatrixErrorDetail(), ]; -void checkUnnamed53(core.List o) { +void checkUnnamed54(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMatrixErrorDetail(o[0]); checkMatrixErrorDetail(o[1]); } -core.List buildUnnamed54() => [ +core.List buildUnnamed55() => [ buildTestExecution(), buildTestExecution(), ]; -void checkUnnamed54(core.List o) { +void checkUnnamed55(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTestExecution(o[0]); checkTestExecution(o[1]); @@ -2902,7 +2915,7 @@ api.TestMatrix buildTestMatrix() { if (buildCounterTestMatrix < 3) { o.clientInfo = buildClientInfo(); o.environmentMatrix = buildEnvironmentMatrix(); - o.extendedInvalidMatrixDetails = buildUnnamed53(); + o.extendedInvalidMatrixDetails = buildUnnamed54(); o.failFast = true; o.flakyTestAttempts = 42; o.invalidMatrixDetails = 'foo'; @@ -2910,7 +2923,7 @@ api.TestMatrix buildTestMatrix() { o.projectId = 'foo'; o.resultStorage = buildResultStorage(); o.state = 'foo'; - o.testExecutions = buildUnnamed54(); + o.testExecutions = buildUnnamed55(); o.testMatrixId = 'foo'; o.testSpecification = buildTestSpecification(); o.timestamp = 'foo'; @@ -2924,7 +2937,7 @@ void checkTestMatrix(api.TestMatrix o) { if (buildCounterTestMatrix < 3) { checkClientInfo(o.clientInfo!); checkEnvironmentMatrix(o.environmentMatrix!); - checkUnnamed53(o.extendedInvalidMatrixDetails!); + checkUnnamed54(o.extendedInvalidMatrixDetails!); unittest.expect(o.failFast!, unittest.isTrue); unittest.expect( o.flakyTestAttempts!, @@ -2947,7 +2960,7 @@ void checkTestMatrix(api.TestMatrix o) { o.state!, unittest.equals('foo'), ); - checkUnnamed54(o.testExecutions!); + checkUnnamed55(o.testExecutions!); unittest.expect( o.testMatrixId!, unittest.equals('foo'), @@ -2961,23 +2974,23 @@ void checkTestMatrix(api.TestMatrix o) { buildCounterTestMatrix--; } -core.List buildUnnamed55() => [ +core.List buildUnnamed56() => [ buildApk(), buildApk(), ]; -void checkUnnamed55(core.List o) { +void checkUnnamed56(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkApk(o[0]); checkApk(o[1]); } -core.List buildUnnamed56() => [ +core.List buildUnnamed57() => [ 'foo', 'foo', ]; -void checkUnnamed56(core.List o) { +void checkUnnamed57(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2989,34 +3002,34 @@ void checkUnnamed56(core.List o) { ); } -core.List buildUnnamed57() => [ +core.List buildUnnamed58() => [ buildEnvironmentVariable(), buildEnvironmentVariable(), ]; -void checkUnnamed57(core.List o) { +void checkUnnamed58(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEnvironmentVariable(o[0]); checkEnvironmentVariable(o[1]); } -core.List buildUnnamed58() => [ +core.List buildUnnamed59() => [ buildDeviceFile(), buildDeviceFile(), ]; -void checkUnnamed58(core.List o) { +void checkUnnamed59(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDeviceFile(o[0]); checkDeviceFile(o[1]); } -core.List buildUnnamed59() => [ +core.List buildUnnamed60() => [ buildApk(), buildApk(), ]; -void checkUnnamed59(core.List o) { +void checkUnnamed60(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkApk(o[0]); checkApk(o[1]); @@ -3028,12 +3041,12 @@ api.TestSetup buildTestSetup() { buildCounterTestSetup++; if (buildCounterTestSetup < 3) { o.account = buildAccount(); - o.additionalApks = buildUnnamed55(); - o.directoriesToPull = buildUnnamed56(); + o.additionalApks = buildUnnamed56(); + o.directoriesToPull = buildUnnamed57(); o.dontAutograntPermissions = true; - o.environmentVariables = buildUnnamed57(); - o.filesToPush = buildUnnamed58(); - o.initialSetupApks = buildUnnamed59(); + o.environmentVariables = buildUnnamed58(); + o.filesToPush = buildUnnamed59(); + o.initialSetupApks = buildUnnamed60(); o.networkProfile = 'foo'; o.systrace = buildSystraceSetup(); } @@ -3045,12 +3058,12 @@ void checkTestSetup(api.TestSetup o) { buildCounterTestSetup++; if (buildCounterTestSetup < 3) { checkAccount(o.account!); - checkUnnamed55(o.additionalApks!); - checkUnnamed56(o.directoriesToPull!); + checkUnnamed56(o.additionalApks!); + checkUnnamed57(o.directoriesToPull!); unittest.expect(o.dontAutograntPermissions!, unittest.isTrue); - checkUnnamed57(o.environmentVariables!); - checkUnnamed58(o.filesToPush!); - checkUnnamed59(o.initialSetupApks!); + checkUnnamed58(o.environmentVariables!); + checkUnnamed59(o.filesToPush!); + checkUnnamed60(o.initialSetupApks!); unittest.expect( o.networkProfile!, unittest.equals('foo'), @@ -3102,12 +3115,12 @@ void checkTestSpecification(api.TestSpecification o) { buildCounterTestSpecification--; } -core.List buildUnnamed60() => [ +core.List buildUnnamed61() => [ 'foo', 'foo', ]; -void checkUnnamed60(core.List o) { +void checkUnnamed61(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3124,7 +3137,7 @@ api.TestTargetsForShard buildTestTargetsForShard() { final o = api.TestTargetsForShard(); buildCounterTestTargetsForShard++; if (buildCounterTestTargetsForShard < 3) { - o.testTargets = buildUnnamed60(); + o.testTargets = buildUnnamed61(); } buildCounterTestTargetsForShard--; return o; @@ -3133,7 +3146,7 @@ api.TestTargetsForShard buildTestTargetsForShard() { void checkTestTargetsForShard(api.TestTargetsForShard o) { buildCounterTestTargetsForShard++; if (buildCounterTestTargetsForShard < 3) { - checkUnnamed60(o.testTargets!); + checkUnnamed61(o.testTargets!); } buildCounterTestTargetsForShard--; } @@ -3322,12 +3335,39 @@ void checkUsesFeature(api.UsesFeature o) { buildCounterUsesFeature--; } -core.List buildUnnamed61() => [ +core.int buildCounterUsesPermissionTag = 0; +api.UsesPermissionTag buildUsesPermissionTag() { + final o = api.UsesPermissionTag(); + buildCounterUsesPermissionTag++; + if (buildCounterUsesPermissionTag < 3) { + o.maxSdkVersion = 42; + o.name = 'foo'; + } + buildCounterUsesPermissionTag--; + return o; +} + +void checkUsesPermissionTag(api.UsesPermissionTag o) { + buildCounterUsesPermissionTag++; + if (buildCounterUsesPermissionTag < 3) { + unittest.expect( + o.maxSdkVersion!, + unittest.equals(42), + ); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + } + buildCounterUsesPermissionTag--; +} + +core.List buildUnnamed62() => [ 'foo', 'foo', ]; -void checkUnnamed61(core.List o) { +void checkUnnamed62(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3344,7 +3384,7 @@ api.XcodeVersion buildXcodeVersion() { final o = api.XcodeVersion(); buildCounterXcodeVersion++; if (buildCounterXcodeVersion < 3) { - o.tags = buildUnnamed61(); + o.tags = buildUnnamed62(); o.version = 'foo'; } buildCounterXcodeVersion--; @@ -3354,7 +3394,7 @@ api.XcodeVersion buildXcodeVersion() { void checkXcodeVersion(api.XcodeVersion o) { buildCounterXcodeVersion++; if (buildCounterXcodeVersion < 3) { - checkUnnamed61(o.tags!); + checkUnnamed62(o.tags!); unittest.expect( o.version!, unittest.equals('foo'), @@ -4213,6 +4253,16 @@ void main() { }); }); + unittest.group('obj-schema-UsesPermissionTag', () { + unittest.test('to-json--from-json', () async { + final o = buildUsesPermissionTag(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.UsesPermissionTag.fromJson( + oJson as core.Map); + checkUsesPermissionTag(od); + }); + }); + unittest.group('obj-schema-XcodeVersion', () { unittest.test('to-json--from-json', () async { final o = buildXcodeVersion(); diff --git a/generated/googleapis/test/texttospeech/v1_test.dart b/generated/googleapis/test/texttospeech/v1_test.dart index 2eda14067..0dde30051 100644 --- a/generated/googleapis/test/texttospeech/v1_test.dart +++ b/generated/googleapis/test/texttospeech/v1_test.dart @@ -290,7 +290,37 @@ void checkListVoicesResponse(api.ListVoicesResponse o) { buildCounterListVoicesResponse--; } -core.Map buildUnnamed4() => { +core.List buildUnnamed4() => [ + buildTurn(), + buildTurn(), + ]; + +void checkUnnamed4(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkTurn(o[0]); + checkTurn(o[1]); +} + +core.int buildCounterMultiSpeakerMarkup = 0; +api.MultiSpeakerMarkup buildMultiSpeakerMarkup() { + final o = api.MultiSpeakerMarkup(); + buildCounterMultiSpeakerMarkup++; + if (buildCounterMultiSpeakerMarkup < 3) { + o.turns = buildUnnamed4(); + } + buildCounterMultiSpeakerMarkup--; + return o; +} + +void checkMultiSpeakerMarkup(api.MultiSpeakerMarkup o) { + buildCounterMultiSpeakerMarkup++; + if (buildCounterMultiSpeakerMarkup < 3) { + checkUnnamed4(o.turns!); + } + buildCounterMultiSpeakerMarkup--; +} + +core.Map buildUnnamed5() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -303,7 +333,7 @@ core.Map buildUnnamed4() => { }, }; -void checkUnnamed4(core.Map o) { +void checkUnnamed5(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted1 = (o['x']!) as core.Map; unittest.expect(casted1, unittest.hasLength(3)); @@ -335,7 +365,7 @@ void checkUnnamed4(core.Map o) { ); } -core.Map buildUnnamed5() => { +core.Map buildUnnamed6() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -348,7 +378,7 @@ core.Map buildUnnamed5() => { }, }; -void checkUnnamed5(core.Map o) { +void checkUnnamed6(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted3 = (o['x']!) as core.Map; unittest.expect(casted3, unittest.hasLength(3)); @@ -387,9 +417,9 @@ api.Operation buildOperation() { if (buildCounterOperation < 3) { o.done = true; o.error = buildStatus(); - o.metadata = buildUnnamed4(); + o.metadata = buildUnnamed5(); o.name = 'foo'; - o.response = buildUnnamed5(); + o.response = buildUnnamed6(); } buildCounterOperation--; return o; @@ -400,17 +430,17 @@ void checkOperation(api.Operation o) { if (buildCounterOperation < 3) { unittest.expect(o.done!, unittest.isTrue); checkStatus(o.error!); - checkUnnamed4(o.metadata!); + checkUnnamed5(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed5(o.response!); + checkUnnamed6(o.response!); } buildCounterOperation--; } -core.Map buildUnnamed6() => { +core.Map buildUnnamed7() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -423,7 +453,7 @@ core.Map buildUnnamed6() => { }, }; -void checkUnnamed6(core.Map o) { +void checkUnnamed7(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted5 = (o['x']!) as core.Map; unittest.expect(casted5, unittest.hasLength(3)); @@ -455,15 +485,15 @@ void checkUnnamed6(core.Map o) { ); } -core.List> buildUnnamed7() => [ - buildUnnamed6(), - buildUnnamed6(), +core.List> buildUnnamed8() => [ + buildUnnamed7(), + buildUnnamed7(), ]; -void checkUnnamed7(core.List> o) { +void checkUnnamed8(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed6(o[0]); - checkUnnamed6(o[1]); + checkUnnamed7(o[0]); + checkUnnamed7(o[1]); } core.int buildCounterStatus = 0; @@ -472,7 +502,7 @@ api.Status buildStatus() { buildCounterStatus++; if (buildCounterStatus < 3) { o.code = 42; - o.details = buildUnnamed7(); + o.details = buildUnnamed8(); o.message = 'foo'; } buildCounterStatus--; @@ -486,7 +516,7 @@ void checkStatus(api.Status o) { o.code!, unittest.equals(42), ); - checkUnnamed7(o.details!); + checkUnnamed8(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -501,6 +531,7 @@ api.SynthesisInput buildSynthesisInput() { buildCounterSynthesisInput++; if (buildCounterSynthesisInput < 3) { o.customPronunciations = buildCustomPronunciations(); + o.multiSpeakerMarkup = buildMultiSpeakerMarkup(); o.ssml = 'foo'; o.text = 'foo'; } @@ -512,6 +543,7 @@ void checkSynthesisInput(api.SynthesisInput o) { buildCounterSynthesisInput++; if (buildCounterSynthesisInput < 3) { checkCustomPronunciations(o.customPronunciations!); + checkMultiSpeakerMarkup(o.multiSpeakerMarkup!); unittest.expect( o.ssml!, unittest.equals('foo'), @@ -599,12 +631,39 @@ void checkSynthesizeSpeechResponse(api.SynthesizeSpeechResponse o) { buildCounterSynthesizeSpeechResponse--; } -core.List buildUnnamed8() => [ +core.int buildCounterTurn = 0; +api.Turn buildTurn() { + final o = api.Turn(); + buildCounterTurn++; + if (buildCounterTurn < 3) { + o.speaker = 'foo'; + o.text = 'foo'; + } + buildCounterTurn--; + return o; +} + +void checkTurn(api.Turn o) { + buildCounterTurn++; + if (buildCounterTurn < 3) { + unittest.expect( + o.speaker!, + unittest.equals('foo'), + ); + unittest.expect( + o.text!, + unittest.equals('foo'), + ); + } + buildCounterTurn--; +} + +core.List buildUnnamed9() => [ 'foo', 'foo', ]; -void checkUnnamed8(core.List o) { +void checkUnnamed9(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -621,7 +680,7 @@ api.Voice buildVoice() { final o = api.Voice(); buildCounterVoice++; if (buildCounterVoice < 3) { - o.languageCodes = buildUnnamed8(); + o.languageCodes = buildUnnamed9(); o.name = 'foo'; o.naturalSampleRateHertz = 42; o.ssmlGender = 'foo'; @@ -633,7 +692,7 @@ api.Voice buildVoice() { void checkVoice(api.Voice o) { buildCounterVoice++; if (buildCounterVoice < 3) { - checkUnnamed8(o.languageCodes!); + checkUnnamed9(o.languageCodes!); unittest.expect( o.name!, unittest.equals('foo'), @@ -650,6 +709,28 @@ void checkVoice(api.Voice o) { buildCounterVoice--; } +core.int buildCounterVoiceCloneParams = 0; +api.VoiceCloneParams buildVoiceCloneParams() { + final o = api.VoiceCloneParams(); + buildCounterVoiceCloneParams++; + if (buildCounterVoiceCloneParams < 3) { + o.voiceCloningKey = 'foo'; + } + buildCounterVoiceCloneParams--; + return o; +} + +void checkVoiceCloneParams(api.VoiceCloneParams o) { + buildCounterVoiceCloneParams++; + if (buildCounterVoiceCloneParams < 3) { + unittest.expect( + o.voiceCloningKey!, + unittest.equals('foo'), + ); + } + buildCounterVoiceCloneParams--; +} + core.int buildCounterVoiceSelectionParams = 0; api.VoiceSelectionParams buildVoiceSelectionParams() { final o = api.VoiceSelectionParams(); @@ -659,6 +740,7 @@ api.VoiceSelectionParams buildVoiceSelectionParams() { o.languageCode = 'foo'; o.name = 'foo'; o.ssmlGender = 'foo'; + o.voiceClone = buildVoiceCloneParams(); } buildCounterVoiceSelectionParams--; return o; @@ -680,6 +762,7 @@ void checkVoiceSelectionParams(api.VoiceSelectionParams o) { o.ssmlGender!, unittest.equals('foo'), ); + checkVoiceCloneParams(o.voiceClone!); } buildCounterVoiceSelectionParams--; } @@ -775,6 +858,16 @@ void main() { }); }); + unittest.group('obj-schema-MultiSpeakerMarkup', () { + unittest.test('to-json--from-json', () async { + final o = buildMultiSpeakerMarkup(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.MultiSpeakerMarkup.fromJson( + oJson as core.Map); + checkMultiSpeakerMarkup(od); + }); + }); + unittest.group('obj-schema-Operation', () { unittest.test('to-json--from-json', () async { final o = buildOperation(); @@ -835,6 +928,16 @@ void main() { }); }); + unittest.group('obj-schema-Turn', () { + unittest.test('to-json--from-json', () async { + final o = buildTurn(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Turn.fromJson(oJson as core.Map); + checkTurn(od); + }); + }); + unittest.group('obj-schema-Voice', () { unittest.test('to-json--from-json', () async { final o = buildVoice(); @@ -845,6 +948,16 @@ void main() { }); }); + unittest.group('obj-schema-VoiceCloneParams', () { + unittest.test('to-json--from-json', () async { + final o = buildVoiceCloneParams(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.VoiceCloneParams.fromJson( + oJson as core.Map); + checkVoiceCloneParams(od); + }); + }); + unittest.group('obj-schema-VoiceSelectionParams', () { unittest.test('to-json--from-json', () async { final o = buildVoiceSelectionParams(); diff --git a/generated/googleapis/test/walletobjects/v1_test.dart b/generated/googleapis/test/walletobjects/v1_test.dart index e7b495d8a..e02d82d2b 100644 --- a/generated/googleapis/test/walletobjects/v1_test.dart +++ b/generated/googleapis/test/walletobjects/v1_test.dart @@ -136,6 +136,7 @@ api.AppLinkData buildAppLinkData() { buildCounterAppLinkData++; if (buildCounterAppLinkData < 3) { o.androidAppLinkInfo = buildAppLinkDataAppLinkInfo(); + o.displayText = buildLocalizedString(); o.iosAppLinkInfo = buildAppLinkDataAppLinkInfo(); o.webAppLinkInfo = buildAppLinkDataAppLinkInfo(); } @@ -147,6 +148,7 @@ void checkAppLinkData(api.AppLinkData o) { buildCounterAppLinkData++; if (buildCounterAppLinkData < 3) { checkAppLinkDataAppLinkInfo(o.androidAppLinkInfo!); + checkLocalizedString(o.displayText!); checkAppLinkDataAppLinkInfo(o.iosAppLinkInfo!); checkAppLinkDataAppLinkInfo(o.webAppLinkInfo!); } @@ -1178,23 +1180,34 @@ void checkUnnamed4(core.List o) { checkLatLongPoint(o[1]); } -core.List buildUnnamed5() => [ +core.List buildUnnamed5() => [ + buildMerchantLocation(), + buildMerchantLocation(), + ]; + +void checkUnnamed5(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkMerchantLocation(o[0]); + checkMerchantLocation(o[1]); +} + +core.List buildUnnamed6() => [ buildMessage(), buildMessage(), ]; -void checkUnnamed5(core.List o) { +void checkUnnamed6(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMessage(o[0]); checkMessage(o[1]); } -core.List buildUnnamed6() => [ +core.List buildUnnamed7() => [ 'foo', 'foo', ]; -void checkUnnamed6(core.List o) { +void checkUnnamed7(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1206,23 +1219,23 @@ void checkUnnamed6(core.List o) { ); } -core.List buildUnnamed7() => [ +core.List buildUnnamed8() => [ buildTextModuleData(), buildTextModuleData(), ]; -void checkUnnamed7(core.List o) { +void checkUnnamed8(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTextModuleData(o[0]); checkTextModuleData(o[1]); } -core.List buildUnnamed8() => [ +core.List buildUnnamed9() => [ buildValueAddedModuleData(), buildValueAddedModuleData(), ]; -void checkUnnamed8(core.List o) { +void checkUnnamed9(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkValueAddedModuleData(o[0]); checkValueAddedModuleData(o[1]); @@ -1262,18 +1275,19 @@ api.EventTicketClass buildEventTicketClass() { o.localizedIssuerName = buildLocalizedString(); o.locations = buildUnnamed4(); o.logo = buildImage(); - o.messages = buildUnnamed5(); + o.merchantLocations = buildUnnamed5(); + o.messages = buildUnnamed6(); o.multipleDevicesAndHoldersAllowedStatus = 'foo'; o.notifyPreference = 'foo'; - o.redemptionIssuers = buildUnnamed6(); + o.redemptionIssuers = buildUnnamed7(); o.review = buildReview(); o.reviewStatus = 'foo'; o.rowLabel = 'foo'; o.seatLabel = 'foo'; o.sectionLabel = 'foo'; o.securityAnimation = buildSecurityAnimation(); - o.textModulesData = buildUnnamed7(); - o.valueAddedModuleData = buildUnnamed8(); + o.textModulesData = buildUnnamed8(); + o.valueAddedModuleData = buildUnnamed9(); o.venue = buildEventVenue(); o.version = 'foo'; o.viewUnlockRequirement = 'foo'; @@ -1340,7 +1354,8 @@ void checkEventTicketClass(api.EventTicketClass o) { checkLocalizedString(o.localizedIssuerName!); checkUnnamed4(o.locations!); checkImage(o.logo!); - checkUnnamed5(o.messages!); + checkUnnamed5(o.merchantLocations!); + checkUnnamed6(o.messages!); unittest.expect( o.multipleDevicesAndHoldersAllowedStatus!, unittest.equals('foo'), @@ -1349,7 +1364,7 @@ void checkEventTicketClass(api.EventTicketClass o) { o.notifyPreference!, unittest.equals('foo'), ); - checkUnnamed6(o.redemptionIssuers!); + checkUnnamed7(o.redemptionIssuers!); checkReview(o.review!); unittest.expect( o.reviewStatus!, @@ -1368,8 +1383,8 @@ void checkEventTicketClass(api.EventTicketClass o) { unittest.equals('foo'), ); checkSecurityAnimation(o.securityAnimation!); - checkUnnamed7(o.textModulesData!); - checkUnnamed8(o.valueAddedModuleData!); + checkUnnamed8(o.textModulesData!); + checkUnnamed9(o.valueAddedModuleData!); checkEventVenue(o.venue!); unittest.expect( o.version!, @@ -1406,12 +1421,12 @@ void checkEventTicketClassAddMessageResponse( buildCounterEventTicketClassAddMessageResponse--; } -core.List buildUnnamed9() => [ +core.List buildUnnamed10() => [ buildEventTicketClass(), buildEventTicketClass(), ]; -void checkUnnamed9(core.List o) { +void checkUnnamed10(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEventTicketClass(o[0]); checkEventTicketClass(o[1]); @@ -1423,7 +1438,7 @@ api.EventTicketClassListResponse buildEventTicketClassListResponse() { buildCounterEventTicketClassListResponse++; if (buildCounterEventTicketClassListResponse < 3) { o.pagination = buildPagination(); - o.resources = buildUnnamed9(); + o.resources = buildUnnamed10(); } buildCounterEventTicketClassListResponse--; return o; @@ -1433,28 +1448,28 @@ void checkEventTicketClassListResponse(api.EventTicketClassListResponse o) { buildCounterEventTicketClassListResponse++; if (buildCounterEventTicketClassListResponse < 3) { checkPagination(o.pagination!); - checkUnnamed9(o.resources!); + checkUnnamed10(o.resources!); } buildCounterEventTicketClassListResponse--; } -core.List buildUnnamed10() => [ +core.List buildUnnamed11() => [ buildImageModuleData(), buildImageModuleData(), ]; -void checkUnnamed10(core.List o) { +void checkUnnamed11(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkImageModuleData(o[0]); checkImageModuleData(o[1]); } -core.List buildUnnamed11() => [ +core.List buildUnnamed12() => [ 'foo', 'foo', ]; -void checkUnnamed11(core.List o) { +void checkUnnamed12(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1466,12 +1481,12 @@ void checkUnnamed11(core.List o) { ); } -core.List buildUnnamed12() => [ +core.List buildUnnamed13() => [ 'foo', 'foo', ]; -void checkUnnamed12(core.List o) { +void checkUnnamed13(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1483,45 +1498,56 @@ void checkUnnamed12(core.List o) { ); } -core.List buildUnnamed13() => [ +core.List buildUnnamed14() => [ buildLatLongPoint(), buildLatLongPoint(), ]; -void checkUnnamed13(core.List o) { +void checkUnnamed14(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLatLongPoint(o[0]); checkLatLongPoint(o[1]); } -core.List buildUnnamed14() => [ +core.List buildUnnamed15() => [ + buildMerchantLocation(), + buildMerchantLocation(), + ]; + +void checkUnnamed15(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkMerchantLocation(o[0]); + checkMerchantLocation(o[1]); +} + +core.List buildUnnamed16() => [ buildMessage(), buildMessage(), ]; -void checkUnnamed14(core.List o) { +void checkUnnamed16(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMessage(o[0]); checkMessage(o[1]); } -core.List buildUnnamed15() => [ +core.List buildUnnamed17() => [ buildTextModuleData(), buildTextModuleData(), ]; -void checkUnnamed15(core.List o) { +void checkUnnamed17(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTextModuleData(o[0]); checkTextModuleData(o[1]); } -core.List buildUnnamed16() => [ +core.List buildUnnamed18() => [ buildValueAddedModuleData(), buildValueAddedModuleData(), ]; -void checkUnnamed16(core.List o) { +void checkUnnamed18(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkValueAddedModuleData(o[0]); checkValueAddedModuleData(o[1]); @@ -1544,14 +1570,15 @@ api.EventTicketObject buildEventTicketObject() { o.heroImage = buildImage(); o.hexBackgroundColor = 'foo'; o.id = 'foo'; - o.imageModulesData = buildUnnamed10(); + o.imageModulesData = buildUnnamed11(); o.infoModuleData = buildInfoModuleData(); o.kind = 'foo'; - o.linkedObjectIds = buildUnnamed11(); - o.linkedOfferIds = buildUnnamed12(); + o.linkedObjectIds = buildUnnamed12(); + o.linkedOfferIds = buildUnnamed13(); o.linksModuleData = buildLinksModuleData(); - o.locations = buildUnnamed13(); - o.messages = buildUnnamed14(); + o.locations = buildUnnamed14(); + o.merchantLocations = buildUnnamed15(); + o.messages = buildUnnamed16(); o.notifyPreference = 'foo'; o.passConstraints = buildPassConstraints(); o.reservationInfo = buildEventReservationInfo(); @@ -1560,12 +1587,12 @@ api.EventTicketObject buildEventTicketObject() { o.seatInfo = buildEventSeat(); o.smartTapRedemptionValue = 'foo'; o.state = 'foo'; - o.textModulesData = buildUnnamed15(); + o.textModulesData = buildUnnamed17(); o.ticketHolderName = 'foo'; o.ticketNumber = 'foo'; o.ticketType = buildLocalizedString(); o.validTimeInterval = buildTimeInterval(); - o.valueAddedModuleData = buildUnnamed16(); + o.valueAddedModuleData = buildUnnamed18(); o.version = 'foo'; } buildCounterEventTicketObject--; @@ -1596,17 +1623,18 @@ void checkEventTicketObject(api.EventTicketObject o) { o.id!, unittest.equals('foo'), ); - checkUnnamed10(o.imageModulesData!); + checkUnnamed11(o.imageModulesData!); checkInfoModuleData(o.infoModuleData!); unittest.expect( o.kind!, unittest.equals('foo'), ); - checkUnnamed11(o.linkedObjectIds!); - checkUnnamed12(o.linkedOfferIds!); + checkUnnamed12(o.linkedObjectIds!); + checkUnnamed13(o.linkedOfferIds!); checkLinksModuleData(o.linksModuleData!); - checkUnnamed13(o.locations!); - checkUnnamed14(o.messages!); + checkUnnamed14(o.locations!); + checkUnnamed15(o.merchantLocations!); + checkUnnamed16(o.messages!); unittest.expect( o.notifyPreference!, unittest.equals('foo'), @@ -1624,7 +1652,7 @@ void checkEventTicketObject(api.EventTicketObject o) { o.state!, unittest.equals('foo'), ); - checkUnnamed15(o.textModulesData!); + checkUnnamed17(o.textModulesData!); unittest.expect( o.ticketHolderName!, unittest.equals('foo'), @@ -1635,7 +1663,7 @@ void checkEventTicketObject(api.EventTicketObject o) { ); checkLocalizedString(o.ticketType!); checkTimeInterval(o.validTimeInterval!); - checkUnnamed16(o.valueAddedModuleData!); + checkUnnamed18(o.valueAddedModuleData!); unittest.expect( o.version!, unittest.equals('foo'), @@ -1665,12 +1693,12 @@ void checkEventTicketObjectAddMessageResponse( buildCounterEventTicketObjectAddMessageResponse--; } -core.List buildUnnamed17() => [ +core.List buildUnnamed19() => [ buildEventTicketObject(), buildEventTicketObject(), ]; -void checkUnnamed17(core.List o) { +void checkUnnamed19(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEventTicketObject(o[0]); checkEventTicketObject(o[1]); @@ -1682,7 +1710,7 @@ api.EventTicketObjectListResponse buildEventTicketObjectListResponse() { buildCounterEventTicketObjectListResponse++; if (buildCounterEventTicketObjectListResponse < 3) { o.pagination = buildPagination(); - o.resources = buildUnnamed17(); + o.resources = buildUnnamed19(); } buildCounterEventTicketObjectListResponse--; return o; @@ -1692,7 +1720,7 @@ void checkEventTicketObjectListResponse(api.EventTicketObjectListResponse o) { buildCounterEventTicketObjectListResponse++; if (buildCounterEventTicketObjectListResponse < 3) { checkPagination(o.pagination!); - checkUnnamed17(o.resources!); + checkUnnamed19(o.resources!); } buildCounterEventTicketObjectListResponse--; } @@ -1769,12 +1797,12 @@ void checkFieldReference(api.FieldReference o) { buildCounterFieldReference--; } -core.List buildUnnamed18() => [ +core.List buildUnnamed20() => [ buildFieldReference(), buildFieldReference(), ]; -void checkUnnamed18(core.List o) { +void checkUnnamed20(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFieldReference(o[0]); checkFieldReference(o[1]); @@ -1785,7 +1813,7 @@ api.FieldSelector buildFieldSelector() { final o = api.FieldSelector(); buildCounterFieldSelector++; if (buildCounterFieldSelector < 3) { - o.fields = buildUnnamed18(); + o.fields = buildUnnamed20(); } buildCounterFieldSelector--; return o; @@ -1794,7 +1822,7 @@ api.FieldSelector buildFieldSelector() { void checkFieldSelector(api.FieldSelector o) { buildCounterFieldSelector++; if (buildCounterFieldSelector < 3) { - checkUnnamed18(o.fields!); + checkUnnamed20(o.fields!); } buildCounterFieldSelector--; } @@ -1863,45 +1891,56 @@ void checkFlightCarrier(api.FlightCarrier o) { buildCounterFlightCarrier--; } -core.List buildUnnamed19() => [ +core.List buildUnnamed21() => [ buildImageModuleData(), buildImageModuleData(), ]; -void checkUnnamed19(core.List o) { +void checkUnnamed21(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkImageModuleData(o[0]); checkImageModuleData(o[1]); } -core.List buildUnnamed20() => [ +core.List buildUnnamed22() => [ buildLatLongPoint(), buildLatLongPoint(), ]; -void checkUnnamed20(core.List o) { +void checkUnnamed22(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLatLongPoint(o[0]); checkLatLongPoint(o[1]); } -core.List buildUnnamed21() => [ +core.List buildUnnamed23() => [ + buildMerchantLocation(), + buildMerchantLocation(), + ]; + +void checkUnnamed23(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkMerchantLocation(o[0]); + checkMerchantLocation(o[1]); +} + +core.List buildUnnamed24() => [ buildMessage(), buildMessage(), ]; -void checkUnnamed21(core.List o) { +void checkUnnamed24(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMessage(o[0]); checkMessage(o[1]); } -core.List buildUnnamed22() => [ +core.List buildUnnamed25() => [ 'foo', 'foo', ]; -void checkUnnamed22(core.List o) { +void checkUnnamed25(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1913,23 +1952,23 @@ void checkUnnamed22(core.List o) { ); } -core.List buildUnnamed23() => [ +core.List buildUnnamed26() => [ buildTextModuleData(), buildTextModuleData(), ]; -void checkUnnamed23(core.List o) { +void checkUnnamed26(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTextModuleData(o[0]); checkTextModuleData(o[1]); } -core.List buildUnnamed24() => [ +core.List buildUnnamed27() => [ buildValueAddedModuleData(), buildValueAddedModuleData(), ]; -void checkUnnamed24(core.List o) { +void checkUnnamed27(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkValueAddedModuleData(o[0]); checkValueAddedModuleData(o[1]); @@ -1954,7 +1993,7 @@ api.FlightClass buildFlightClass() { o.hexBackgroundColor = 'foo'; o.homepageUri = buildUri(); o.id = 'foo'; - o.imageModulesData = buildUnnamed19(); + o.imageModulesData = buildUnnamed21(); o.infoModuleData = buildInfoModuleData(); o.issuerName = 'foo'; o.kind = 'foo'; @@ -1967,17 +2006,18 @@ api.FlightClass buildFlightClass() { o.localScheduledArrivalDateTime = 'foo'; o.localScheduledDepartureDateTime = 'foo'; o.localizedIssuerName = buildLocalizedString(); - o.locations = buildUnnamed20(); - o.messages = buildUnnamed21(); + o.locations = buildUnnamed22(); + o.merchantLocations = buildUnnamed23(); + o.messages = buildUnnamed24(); o.multipleDevicesAndHoldersAllowedStatus = 'foo'; o.notifyPreference = 'foo'; o.origin = buildAirportInfo(); - o.redemptionIssuers = buildUnnamed22(); + o.redemptionIssuers = buildUnnamed25(); o.review = buildReview(); o.reviewStatus = 'foo'; o.securityAnimation = buildSecurityAnimation(); - o.textModulesData = buildUnnamed23(); - o.valueAddedModuleData = buildUnnamed24(); + o.textModulesData = buildUnnamed26(); + o.valueAddedModuleData = buildUnnamed27(); o.version = 'foo'; o.viewUnlockRequirement = 'foo'; o.wordMark = buildImage(); @@ -2015,7 +2055,7 @@ void checkFlightClass(api.FlightClass o) { o.id!, unittest.equals('foo'), ); - checkUnnamed19(o.imageModulesData!); + checkUnnamed21(o.imageModulesData!); checkInfoModuleData(o.infoModuleData!); unittest.expect( o.issuerName!, @@ -2055,8 +2095,9 @@ void checkFlightClass(api.FlightClass o) { unittest.equals('foo'), ); checkLocalizedString(o.localizedIssuerName!); - checkUnnamed20(o.locations!); - checkUnnamed21(o.messages!); + checkUnnamed22(o.locations!); + checkUnnamed23(o.merchantLocations!); + checkUnnamed24(o.messages!); unittest.expect( o.multipleDevicesAndHoldersAllowedStatus!, unittest.equals('foo'), @@ -2066,15 +2107,15 @@ void checkFlightClass(api.FlightClass o) { unittest.equals('foo'), ); checkAirportInfo(o.origin!); - checkUnnamed22(o.redemptionIssuers!); + checkUnnamed25(o.redemptionIssuers!); checkReview(o.review!); unittest.expect( o.reviewStatus!, unittest.equals('foo'), ); checkSecurityAnimation(o.securityAnimation!); - checkUnnamed23(o.textModulesData!); - checkUnnamed24(o.valueAddedModuleData!); + checkUnnamed26(o.textModulesData!); + checkUnnamed27(o.valueAddedModuleData!); unittest.expect( o.version!, unittest.equals('foo'), @@ -2107,12 +2148,12 @@ void checkFlightClassAddMessageResponse(api.FlightClassAddMessageResponse o) { buildCounterFlightClassAddMessageResponse--; } -core.List buildUnnamed25() => [ +core.List buildUnnamed28() => [ buildFlightClass(), buildFlightClass(), ]; -void checkUnnamed25(core.List o) { +void checkUnnamed28(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFlightClass(o[0]); checkFlightClass(o[1]); @@ -2124,7 +2165,7 @@ api.FlightClassListResponse buildFlightClassListResponse() { buildCounterFlightClassListResponse++; if (buildCounterFlightClassListResponse < 3) { o.pagination = buildPagination(); - o.resources = buildUnnamed25(); + o.resources = buildUnnamed28(); } buildCounterFlightClassListResponse--; return o; @@ -2134,7 +2175,7 @@ void checkFlightClassListResponse(api.FlightClassListResponse o) { buildCounterFlightClassListResponse++; if (buildCounterFlightClassListResponse < 3) { checkPagination(o.pagination!); - checkUnnamed25(o.resources!); + checkUnnamed28(o.resources!); } buildCounterFlightClassListResponse--; } @@ -2180,23 +2221,23 @@ void checkFlightHeader(api.FlightHeader o) { buildCounterFlightHeader--; } -core.List buildUnnamed26() => [ +core.List buildUnnamed29() => [ buildImageModuleData(), buildImageModuleData(), ]; -void checkUnnamed26(core.List o) { +void checkUnnamed29(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkImageModuleData(o[0]); checkImageModuleData(o[1]); } -core.List buildUnnamed27() => [ +core.List buildUnnamed30() => [ 'foo', 'foo', ]; -void checkUnnamed27(core.List o) { +void checkUnnamed30(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2208,45 +2249,56 @@ void checkUnnamed27(core.List o) { ); } -core.List buildUnnamed28() => [ +core.List buildUnnamed31() => [ buildLatLongPoint(), buildLatLongPoint(), ]; -void checkUnnamed28(core.List o) { +void checkUnnamed31(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLatLongPoint(o[0]); checkLatLongPoint(o[1]); } -core.List buildUnnamed29() => [ +core.List buildUnnamed32() => [ + buildMerchantLocation(), + buildMerchantLocation(), + ]; + +void checkUnnamed32(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkMerchantLocation(o[0]); + checkMerchantLocation(o[1]); +} + +core.List buildUnnamed33() => [ buildMessage(), buildMessage(), ]; -void checkUnnamed29(core.List o) { +void checkUnnamed33(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMessage(o[0]); checkMessage(o[1]); } -core.List buildUnnamed30() => [ +core.List buildUnnamed34() => [ buildTextModuleData(), buildTextModuleData(), ]; -void checkUnnamed30(core.List o) { +void checkUnnamed34(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTextModuleData(o[0]); checkTextModuleData(o[1]); } -core.List buildUnnamed31() => [ +core.List buildUnnamed35() => [ buildValueAddedModuleData(), buildValueAddedModuleData(), ]; -void checkUnnamed31(core.List o) { +void checkUnnamed35(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkValueAddedModuleData(o[0]); checkValueAddedModuleData(o[1]); @@ -2269,13 +2321,14 @@ api.FlightObject buildFlightObject() { o.heroImage = buildImage(); o.hexBackgroundColor = 'foo'; o.id = 'foo'; - o.imageModulesData = buildUnnamed26(); + o.imageModulesData = buildUnnamed29(); o.infoModuleData = buildInfoModuleData(); o.kind = 'foo'; - o.linkedObjectIds = buildUnnamed27(); + o.linkedObjectIds = buildUnnamed30(); o.linksModuleData = buildLinksModuleData(); - o.locations = buildUnnamed28(); - o.messages = buildUnnamed29(); + o.locations = buildUnnamed31(); + o.merchantLocations = buildUnnamed32(); + o.messages = buildUnnamed33(); o.notifyPreference = 'foo'; o.passConstraints = buildPassConstraints(); o.passengerName = 'foo'; @@ -2285,9 +2338,9 @@ api.FlightObject buildFlightObject() { o.securityProgramLogo = buildImage(); o.smartTapRedemptionValue = 'foo'; o.state = 'foo'; - o.textModulesData = buildUnnamed30(); + o.textModulesData = buildUnnamed34(); o.validTimeInterval = buildTimeInterval(); - o.valueAddedModuleData = buildUnnamed31(); + o.valueAddedModuleData = buildUnnamed35(); o.version = 'foo'; } buildCounterFlightObject--; @@ -2318,16 +2371,17 @@ void checkFlightObject(api.FlightObject o) { o.id!, unittest.equals('foo'), ); - checkUnnamed26(o.imageModulesData!); + checkUnnamed29(o.imageModulesData!); checkInfoModuleData(o.infoModuleData!); unittest.expect( o.kind!, unittest.equals('foo'), ); - checkUnnamed27(o.linkedObjectIds!); + checkUnnamed30(o.linkedObjectIds!); checkLinksModuleData(o.linksModuleData!); - checkUnnamed28(o.locations!); - checkUnnamed29(o.messages!); + checkUnnamed31(o.locations!); + checkUnnamed32(o.merchantLocations!); + checkUnnamed33(o.messages!); unittest.expect( o.notifyPreference!, unittest.equals('foo'), @@ -2349,9 +2403,9 @@ void checkFlightObject(api.FlightObject o) { o.state!, unittest.equals('foo'), ); - checkUnnamed30(o.textModulesData!); + checkUnnamed34(o.textModulesData!); checkTimeInterval(o.validTimeInterval!); - checkUnnamed31(o.valueAddedModuleData!); + checkUnnamed35(o.valueAddedModuleData!); unittest.expect( o.version!, unittest.equals('foo'), @@ -2379,12 +2433,12 @@ void checkFlightObjectAddMessageResponse(api.FlightObjectAddMessageResponse o) { buildCounterFlightObjectAddMessageResponse--; } -core.List buildUnnamed32() => [ +core.List buildUnnamed36() => [ buildFlightObject(), buildFlightObject(), ]; -void checkUnnamed32(core.List o) { +void checkUnnamed36(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFlightObject(o[0]); checkFlightObject(o[1]); @@ -2396,7 +2450,7 @@ api.FlightObjectListResponse buildFlightObjectListResponse() { buildCounterFlightObjectListResponse++; if (buildCounterFlightObjectListResponse < 3) { o.pagination = buildPagination(); - o.resources = buildUnnamed32(); + o.resources = buildUnnamed36(); } buildCounterFlightObjectListResponse--; return o; @@ -2406,7 +2460,7 @@ void checkFlightObjectListResponse(api.FlightObjectListResponse o) { buildCounterFlightObjectListResponse++; if (buildCounterFlightObjectListResponse < 3) { checkPagination(o.pagination!); - checkUnnamed32(o.resources!); + checkUnnamed36(o.resources!); } buildCounterFlightObjectListResponse--; } @@ -2440,34 +2494,45 @@ void checkFrequentFlyerInfo(api.FrequentFlyerInfo o) { buildCounterFrequentFlyerInfo--; } -core.List buildUnnamed33() => [ +core.List buildUnnamed37() => [ buildImageModuleData(), buildImageModuleData(), ]; -void checkUnnamed33(core.List o) { +void checkUnnamed37(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkImageModuleData(o[0]); checkImageModuleData(o[1]); } -core.List buildUnnamed34() => [ +core.List buildUnnamed38() => [ + buildMerchantLocation(), + buildMerchantLocation(), + ]; + +void checkUnnamed38(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkMerchantLocation(o[0]); + checkMerchantLocation(o[1]); +} + +core.List buildUnnamed39() => [ buildMessage(), buildMessage(), ]; -void checkUnnamed34(core.List o) { +void checkUnnamed39(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMessage(o[0]); checkMessage(o[1]); } -core.List buildUnnamed35() => [ +core.List buildUnnamed40() => [ 'foo', 'foo', ]; -void checkUnnamed35(core.List o) { +void checkUnnamed40(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2479,23 +2544,23 @@ void checkUnnamed35(core.List o) { ); } -core.List buildUnnamed36() => [ +core.List buildUnnamed41() => [ buildTextModuleData(), buildTextModuleData(), ]; -void checkUnnamed36(core.List o) { +void checkUnnamed41(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTextModuleData(o[0]); checkTextModuleData(o[1]); } -core.List buildUnnamed37() => [ +core.List buildUnnamed42() => [ buildValueAddedModuleData(), buildValueAddedModuleData(), ]; -void checkUnnamed37(core.List o) { +void checkUnnamed42(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkValueAddedModuleData(o[0]); checkValueAddedModuleData(o[1]); @@ -2511,14 +2576,15 @@ api.GenericClass buildGenericClass() { o.classTemplateInfo = buildClassTemplateInfo(); o.enableSmartTap = true; o.id = 'foo'; - o.imageModulesData = buildUnnamed33(); + o.imageModulesData = buildUnnamed37(); o.linksModuleData = buildLinksModuleData(); - o.messages = buildUnnamed34(); + o.merchantLocations = buildUnnamed38(); + o.messages = buildUnnamed39(); o.multipleDevicesAndHoldersAllowedStatus = 'foo'; - o.redemptionIssuers = buildUnnamed35(); + o.redemptionIssuers = buildUnnamed40(); o.securityAnimation = buildSecurityAnimation(); - o.textModulesData = buildUnnamed36(); - o.valueAddedModuleData = buildUnnamed37(); + o.textModulesData = buildUnnamed41(); + o.valueAddedModuleData = buildUnnamed42(); o.viewUnlockRequirement = 'foo'; } buildCounterGenericClass--; @@ -2536,17 +2602,18 @@ void checkGenericClass(api.GenericClass o) { o.id!, unittest.equals('foo'), ); - checkUnnamed33(o.imageModulesData!); + checkUnnamed37(o.imageModulesData!); checkLinksModuleData(o.linksModuleData!); - checkUnnamed34(o.messages!); + checkUnnamed38(o.merchantLocations!); + checkUnnamed39(o.messages!); unittest.expect( o.multipleDevicesAndHoldersAllowedStatus!, unittest.equals('foo'), ); - checkUnnamed35(o.redemptionIssuers!); + checkUnnamed40(o.redemptionIssuers!); checkSecurityAnimation(o.securityAnimation!); - checkUnnamed36(o.textModulesData!); - checkUnnamed37(o.valueAddedModuleData!); + checkUnnamed41(o.textModulesData!); + checkUnnamed42(o.valueAddedModuleData!); unittest.expect( o.viewUnlockRequirement!, unittest.equals('foo'), @@ -2574,12 +2641,12 @@ void checkGenericClassAddMessageResponse(api.GenericClassAddMessageResponse o) { buildCounterGenericClassAddMessageResponse--; } -core.List buildUnnamed38() => [ +core.List buildUnnamed43() => [ buildGenericClass(), buildGenericClass(), ]; -void checkUnnamed38(core.List o) { +void checkUnnamed43(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGenericClass(o[0]); checkGenericClass(o[1]); @@ -2591,7 +2658,7 @@ api.GenericClassListResponse buildGenericClassListResponse() { buildCounterGenericClassListResponse++; if (buildCounterGenericClassListResponse < 3) { o.pagination = buildPagination(); - o.resources = buildUnnamed38(); + o.resources = buildUnnamed43(); } buildCounterGenericClassListResponse--; return o; @@ -2601,28 +2668,28 @@ void checkGenericClassListResponse(api.GenericClassListResponse o) { buildCounterGenericClassListResponse++; if (buildCounterGenericClassListResponse < 3) { checkPagination(o.pagination!); - checkUnnamed38(o.resources!); + checkUnnamed43(o.resources!); } buildCounterGenericClassListResponse--; } -core.List buildUnnamed39() => [ +core.List buildUnnamed44() => [ buildImageModuleData(), buildImageModuleData(), ]; -void checkUnnamed39(core.List o) { +void checkUnnamed44(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkImageModuleData(o[0]); checkImageModuleData(o[1]); } -core.List buildUnnamed40() => [ +core.List buildUnnamed45() => [ 'foo', 'foo', ]; -void checkUnnamed40(core.List o) { +void checkUnnamed45(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2634,34 +2701,45 @@ void checkUnnamed40(core.List o) { ); } -core.List buildUnnamed41() => [ +core.List buildUnnamed46() => [ + buildMerchantLocation(), + buildMerchantLocation(), + ]; + +void checkUnnamed46(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkMerchantLocation(o[0]); + checkMerchantLocation(o[1]); +} + +core.List buildUnnamed47() => [ buildMessage(), buildMessage(), ]; -void checkUnnamed41(core.List o) { +void checkUnnamed47(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMessage(o[0]); checkMessage(o[1]); } -core.List buildUnnamed42() => [ +core.List buildUnnamed48() => [ buildTextModuleData(), buildTextModuleData(), ]; -void checkUnnamed42(core.List o) { +void checkUnnamed48(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTextModuleData(o[0]); checkTextModuleData(o[1]); } -core.List buildUnnamed43() => [ +core.List buildUnnamed49() => [ buildValueAddedModuleData(), buildValueAddedModuleData(), ]; -void checkUnnamed43(core.List o) { +void checkUnnamed49(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkValueAddedModuleData(o[0]); checkValueAddedModuleData(o[1]); @@ -2683,11 +2761,12 @@ api.GenericObject buildGenericObject() { o.heroImage = buildImage(); o.hexBackgroundColor = 'foo'; o.id = 'foo'; - o.imageModulesData = buildUnnamed39(); - o.linkedObjectIds = buildUnnamed40(); + o.imageModulesData = buildUnnamed44(); + o.linkedObjectIds = buildUnnamed45(); o.linksModuleData = buildLinksModuleData(); o.logo = buildImage(); - o.messages = buildUnnamed41(); + o.merchantLocations = buildUnnamed46(); + o.messages = buildUnnamed47(); o.notifications = buildNotifications(); o.passConstraints = buildPassConstraints(); o.rotatingBarcode = buildRotatingBarcode(); @@ -2695,9 +2774,9 @@ api.GenericObject buildGenericObject() { o.smartTapRedemptionValue = 'foo'; o.state = 'foo'; o.subheader = buildLocalizedString(); - o.textModulesData = buildUnnamed42(); + o.textModulesData = buildUnnamed48(); o.validTimeInterval = buildTimeInterval(); - o.valueAddedModuleData = buildUnnamed43(); + o.valueAddedModuleData = buildUnnamed49(); o.wideLogo = buildImage(); } buildCounterGenericObject--; @@ -2730,11 +2809,12 @@ void checkGenericObject(api.GenericObject o) { o.id!, unittest.equals('foo'), ); - checkUnnamed39(o.imageModulesData!); - checkUnnamed40(o.linkedObjectIds!); + checkUnnamed44(o.imageModulesData!); + checkUnnamed45(o.linkedObjectIds!); checkLinksModuleData(o.linksModuleData!); checkImage(o.logo!); - checkUnnamed41(o.messages!); + checkUnnamed46(o.merchantLocations!); + checkUnnamed47(o.messages!); checkNotifications(o.notifications!); checkPassConstraints(o.passConstraints!); checkRotatingBarcode(o.rotatingBarcode!); @@ -2748,9 +2828,9 @@ void checkGenericObject(api.GenericObject o) { unittest.equals('foo'), ); checkLocalizedString(o.subheader!); - checkUnnamed42(o.textModulesData!); + checkUnnamed48(o.textModulesData!); checkTimeInterval(o.validTimeInterval!); - checkUnnamed43(o.valueAddedModuleData!); + checkUnnamed49(o.valueAddedModuleData!); checkImage(o.wideLogo!); } buildCounterGenericObject--; @@ -2776,12 +2856,12 @@ void checkGenericObjectAddMessageResponse( buildCounterGenericObjectAddMessageResponse--; } -core.List buildUnnamed44() => [ +core.List buildUnnamed50() => [ buildGenericObject(), buildGenericObject(), ]; -void checkUnnamed44(core.List o) { +void checkUnnamed50(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGenericObject(o[0]); checkGenericObject(o[1]); @@ -2793,7 +2873,7 @@ api.GenericObjectListResponse buildGenericObjectListResponse() { buildCounterGenericObjectListResponse++; if (buildCounterGenericObjectListResponse < 3) { o.pagination = buildPagination(); - o.resources = buildUnnamed44(); + o.resources = buildUnnamed50(); } buildCounterGenericObjectListResponse--; return o; @@ -2803,50 +2883,61 @@ void checkGenericObjectListResponse(api.GenericObjectListResponse o) { buildCounterGenericObjectListResponse++; if (buildCounterGenericObjectListResponse < 3) { checkPagination(o.pagination!); - checkUnnamed44(o.resources!); + checkUnnamed50(o.resources!); } buildCounterGenericObjectListResponse--; } -core.List buildUnnamed45() => [ +core.List buildUnnamed51() => [ buildImageModuleData(), buildImageModuleData(), ]; -void checkUnnamed45(core.List o) { +void checkUnnamed51(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkImageModuleData(o[0]); checkImageModuleData(o[1]); } -core.List buildUnnamed46() => [ +core.List buildUnnamed52() => [ buildLatLongPoint(), buildLatLongPoint(), ]; -void checkUnnamed46(core.List o) { +void checkUnnamed52(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLatLongPoint(o[0]); checkLatLongPoint(o[1]); } -core.List buildUnnamed47() => [ +core.List buildUnnamed53() => [ + buildMerchantLocation(), + buildMerchantLocation(), + ]; + +void checkUnnamed53(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkMerchantLocation(o[0]); + checkMerchantLocation(o[1]); +} + +core.List buildUnnamed54() => [ buildMessage(), buildMessage(), ]; -void checkUnnamed47(core.List o) { +void checkUnnamed54(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMessage(o[0]); checkMessage(o[1]); } -core.List buildUnnamed48() => [ +core.List buildUnnamed55() => [ 'foo', 'foo', ]; -void checkUnnamed48(core.List o) { +void checkUnnamed55(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -2858,23 +2949,23 @@ void checkUnnamed48(core.List o) { ); } -core.List buildUnnamed49() => [ +core.List buildUnnamed56() => [ buildTextModuleData(), buildTextModuleData(), ]; -void checkUnnamed49(core.List o) { +void checkUnnamed56(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTextModuleData(o[0]); checkTextModuleData(o[1]); } -core.List buildUnnamed50() => [ +core.List buildUnnamed57() => [ buildValueAddedModuleData(), buildValueAddedModuleData(), ]; -void checkUnnamed50(core.List o) { +void checkUnnamed57(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkValueAddedModuleData(o[0]); checkValueAddedModuleData(o[1]); @@ -2898,7 +2989,7 @@ api.GiftCardClass buildGiftCardClass() { o.hexBackgroundColor = 'foo'; o.homepageUri = buildUri(); o.id = 'foo'; - o.imageModulesData = buildUnnamed45(); + o.imageModulesData = buildUnnamed51(); o.infoModuleData = buildInfoModuleData(); o.issuerName = 'foo'; o.kind = 'foo'; @@ -2908,19 +2999,20 @@ api.GiftCardClass buildGiftCardClass() { o.localizedIssuerName = buildLocalizedString(); o.localizedMerchantName = buildLocalizedString(); o.localizedPinLabel = buildLocalizedString(); - o.locations = buildUnnamed46(); + o.locations = buildUnnamed52(); + o.merchantLocations = buildUnnamed53(); o.merchantName = 'foo'; - o.messages = buildUnnamed47(); + o.messages = buildUnnamed54(); o.multipleDevicesAndHoldersAllowedStatus = 'foo'; o.notifyPreference = 'foo'; o.pinLabel = 'foo'; o.programLogo = buildImage(); - o.redemptionIssuers = buildUnnamed48(); + o.redemptionIssuers = buildUnnamed55(); o.review = buildReview(); o.reviewStatus = 'foo'; o.securityAnimation = buildSecurityAnimation(); - o.textModulesData = buildUnnamed49(); - o.valueAddedModuleData = buildUnnamed50(); + o.textModulesData = buildUnnamed56(); + o.valueAddedModuleData = buildUnnamed57(); o.version = 'foo'; o.viewUnlockRequirement = 'foo'; o.wideProgramLogo = buildImage(); @@ -2961,7 +3053,7 @@ void checkGiftCardClass(api.GiftCardClass o) { o.id!, unittest.equals('foo'), ); - checkUnnamed45(o.imageModulesData!); + checkUnnamed51(o.imageModulesData!); checkInfoModuleData(o.infoModuleData!); unittest.expect( o.issuerName!, @@ -2977,12 +3069,13 @@ void checkGiftCardClass(api.GiftCardClass o) { checkLocalizedString(o.localizedIssuerName!); checkLocalizedString(o.localizedMerchantName!); checkLocalizedString(o.localizedPinLabel!); - checkUnnamed46(o.locations!); + checkUnnamed52(o.locations!); + checkUnnamed53(o.merchantLocations!); unittest.expect( o.merchantName!, unittest.equals('foo'), ); - checkUnnamed47(o.messages!); + checkUnnamed54(o.messages!); unittest.expect( o.multipleDevicesAndHoldersAllowedStatus!, unittest.equals('foo'), @@ -2996,15 +3089,15 @@ void checkGiftCardClass(api.GiftCardClass o) { unittest.equals('foo'), ); checkImage(o.programLogo!); - checkUnnamed48(o.redemptionIssuers!); + checkUnnamed55(o.redemptionIssuers!); checkReview(o.review!); unittest.expect( o.reviewStatus!, unittest.equals('foo'), ); checkSecurityAnimation(o.securityAnimation!); - checkUnnamed49(o.textModulesData!); - checkUnnamed50(o.valueAddedModuleData!); + checkUnnamed56(o.textModulesData!); + checkUnnamed57(o.valueAddedModuleData!); unittest.expect( o.version!, unittest.equals('foo'), @@ -3039,12 +3132,12 @@ void checkGiftCardClassAddMessageResponse( buildCounterGiftCardClassAddMessageResponse--; } -core.List buildUnnamed51() => [ +core.List buildUnnamed58() => [ buildGiftCardClass(), buildGiftCardClass(), ]; -void checkUnnamed51(core.List o) { +void checkUnnamed58(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGiftCardClass(o[0]); checkGiftCardClass(o[1]); @@ -3056,7 +3149,7 @@ api.GiftCardClassListResponse buildGiftCardClassListResponse() { buildCounterGiftCardClassListResponse++; if (buildCounterGiftCardClassListResponse < 3) { o.pagination = buildPagination(); - o.resources = buildUnnamed51(); + o.resources = buildUnnamed58(); } buildCounterGiftCardClassListResponse--; return o; @@ -3066,28 +3159,28 @@ void checkGiftCardClassListResponse(api.GiftCardClassListResponse o) { buildCounterGiftCardClassListResponse++; if (buildCounterGiftCardClassListResponse < 3) { checkPagination(o.pagination!); - checkUnnamed51(o.resources!); + checkUnnamed58(o.resources!); } buildCounterGiftCardClassListResponse--; } -core.List buildUnnamed52() => [ +core.List buildUnnamed59() => [ buildImageModuleData(), buildImageModuleData(), ]; -void checkUnnamed52(core.List o) { +void checkUnnamed59(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkImageModuleData(o[0]); checkImageModuleData(o[1]); } -core.List buildUnnamed53() => [ +core.List buildUnnamed60() => [ 'foo', 'foo', ]; -void checkUnnamed53(core.List o) { +void checkUnnamed60(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3099,45 +3192,56 @@ void checkUnnamed53(core.List o) { ); } -core.List buildUnnamed54() => [ +core.List buildUnnamed61() => [ buildLatLongPoint(), buildLatLongPoint(), ]; -void checkUnnamed54(core.List o) { +void checkUnnamed61(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLatLongPoint(o[0]); checkLatLongPoint(o[1]); } -core.List buildUnnamed55() => [ +core.List buildUnnamed62() => [ + buildMerchantLocation(), + buildMerchantLocation(), + ]; + +void checkUnnamed62(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkMerchantLocation(o[0]); + checkMerchantLocation(o[1]); +} + +core.List buildUnnamed63() => [ buildMessage(), buildMessage(), ]; -void checkUnnamed55(core.List o) { +void checkUnnamed63(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMessage(o[0]); checkMessage(o[1]); } -core.List buildUnnamed56() => [ +core.List buildUnnamed64() => [ buildTextModuleData(), buildTextModuleData(), ]; -void checkUnnamed56(core.List o) { +void checkUnnamed64(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTextModuleData(o[0]); checkTextModuleData(o[1]); } -core.List buildUnnamed57() => [ +core.List buildUnnamed65() => [ buildValueAddedModuleData(), buildValueAddedModuleData(), ]; -void checkUnnamed57(core.List o) { +void checkUnnamed65(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkValueAddedModuleData(o[0]); checkValueAddedModuleData(o[1]); @@ -3162,13 +3266,14 @@ api.GiftCardObject buildGiftCardObject() { o.hasUsers = true; o.heroImage = buildImage(); o.id = 'foo'; - o.imageModulesData = buildUnnamed52(); + o.imageModulesData = buildUnnamed59(); o.infoModuleData = buildInfoModuleData(); o.kind = 'foo'; - o.linkedObjectIds = buildUnnamed53(); + o.linkedObjectIds = buildUnnamed60(); o.linksModuleData = buildLinksModuleData(); - o.locations = buildUnnamed54(); - o.messages = buildUnnamed55(); + o.locations = buildUnnamed61(); + o.merchantLocations = buildUnnamed62(); + o.messages = buildUnnamed63(); o.notifyPreference = 'foo'; o.passConstraints = buildPassConstraints(); o.pin = 'foo'; @@ -3176,9 +3281,9 @@ api.GiftCardObject buildGiftCardObject() { o.saveRestrictions = buildSaveRestrictions(); o.smartTapRedemptionValue = 'foo'; o.state = 'foo'; - o.textModulesData = buildUnnamed56(); + o.textModulesData = buildUnnamed64(); o.validTimeInterval = buildTimeInterval(); - o.valueAddedModuleData = buildUnnamed57(); + o.valueAddedModuleData = buildUnnamed65(); o.version = 'foo'; } buildCounterGiftCardObject--; @@ -3214,16 +3319,17 @@ void checkGiftCardObject(api.GiftCardObject o) { o.id!, unittest.equals('foo'), ); - checkUnnamed52(o.imageModulesData!); + checkUnnamed59(o.imageModulesData!); checkInfoModuleData(o.infoModuleData!); unittest.expect( o.kind!, unittest.equals('foo'), ); - checkUnnamed53(o.linkedObjectIds!); + checkUnnamed60(o.linkedObjectIds!); checkLinksModuleData(o.linksModuleData!); - checkUnnamed54(o.locations!); - checkUnnamed55(o.messages!); + checkUnnamed61(o.locations!); + checkUnnamed62(o.merchantLocations!); + checkUnnamed63(o.messages!); unittest.expect( o.notifyPreference!, unittest.equals('foo'), @@ -3243,9 +3349,9 @@ void checkGiftCardObject(api.GiftCardObject o) { o.state!, unittest.equals('foo'), ); - checkUnnamed56(o.textModulesData!); + checkUnnamed64(o.textModulesData!); checkTimeInterval(o.validTimeInterval!); - checkUnnamed57(o.valueAddedModuleData!); + checkUnnamed65(o.valueAddedModuleData!); unittest.expect( o.version!, unittest.equals('foo'), @@ -3274,12 +3380,12 @@ void checkGiftCardObjectAddMessageResponse( buildCounterGiftCardObjectAddMessageResponse--; } -core.List buildUnnamed58() => [ +core.List buildUnnamed66() => [ buildGiftCardObject(), buildGiftCardObject(), ]; -void checkUnnamed58(core.List o) { +void checkUnnamed66(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGiftCardObject(o[0]); checkGiftCardObject(o[1]); @@ -3291,7 +3397,7 @@ api.GiftCardObjectListResponse buildGiftCardObjectListResponse() { buildCounterGiftCardObjectListResponse++; if (buildCounterGiftCardObjectListResponse < 3) { o.pagination = buildPagination(); - o.resources = buildUnnamed58(); + o.resources = buildUnnamed66(); } buildCounterGiftCardObjectListResponse--; return o; @@ -3301,7 +3407,7 @@ void checkGiftCardObjectListResponse(api.GiftCardObjectListResponse o) { buildCounterGiftCardObjectListResponse++; if (buildCounterGiftCardObjectListResponse < 3) { checkPagination(o.pagination!); - checkUnnamed58(o.resources!); + checkUnnamed66(o.resources!); } buildCounterGiftCardObjectListResponse--; } @@ -3412,12 +3518,12 @@ void checkImageUri(api.ImageUri o) { buildCounterImageUri--; } -core.List buildUnnamed59() => [ +core.List buildUnnamed67() => [ buildLabelValueRow(), buildLabelValueRow(), ]; -void checkUnnamed59(core.List o) { +void checkUnnamed67(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLabelValueRow(o[0]); checkLabelValueRow(o[1]); @@ -3428,7 +3534,7 @@ api.InfoModuleData buildInfoModuleData() { final o = api.InfoModuleData(); buildCounterInfoModuleData++; if (buildCounterInfoModuleData < 3) { - o.labelValueRows = buildUnnamed59(); + o.labelValueRows = buildUnnamed67(); o.showLastUpdateTime = true; } buildCounterInfoModuleData--; @@ -3438,7 +3544,7 @@ api.InfoModuleData buildInfoModuleData() { void checkInfoModuleData(api.InfoModuleData o) { buildCounterInfoModuleData++; if (buildCounterInfoModuleData < 3) { - checkUnnamed59(o.labelValueRows!); + checkUnnamed67(o.labelValueRows!); unittest.expect(o.showLastUpdateTime!, unittest.isTrue); } buildCounterInfoModuleData--; @@ -3482,12 +3588,12 @@ void checkIssuer(api.Issuer o) { buildCounterIssuer--; } -core.List buildUnnamed60() => [ +core.List buildUnnamed68() => [ 'foo', 'foo', ]; -void checkUnnamed60(core.List o) { +void checkUnnamed68(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3504,7 +3610,7 @@ api.IssuerContactInfo buildIssuerContactInfo() { final o = api.IssuerContactInfo(); buildCounterIssuerContactInfo++; if (buildCounterIssuerContactInfo < 3) { - o.alertsEmails = buildUnnamed60(); + o.alertsEmails = buildUnnamed68(); o.email = 'foo'; o.name = 'foo'; o.phone = 'foo'; @@ -3516,7 +3622,7 @@ api.IssuerContactInfo buildIssuerContactInfo() { void checkIssuerContactInfo(api.IssuerContactInfo o) { buildCounterIssuerContactInfo++; if (buildCounterIssuerContactInfo < 3) { - checkUnnamed60(o.alertsEmails!); + checkUnnamed68(o.alertsEmails!); unittest.expect( o.email!, unittest.equals('foo'), @@ -3533,12 +3639,12 @@ void checkIssuerContactInfo(api.IssuerContactInfo o) { buildCounterIssuerContactInfo--; } -core.List buildUnnamed61() => [ +core.List buildUnnamed69() => [ buildIssuer(), buildIssuer(), ]; -void checkUnnamed61(core.List o) { +void checkUnnamed69(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkIssuer(o[0]); checkIssuer(o[1]); @@ -3549,7 +3655,7 @@ api.IssuerListResponse buildIssuerListResponse() { final o = api.IssuerListResponse(); buildCounterIssuerListResponse++; if (buildCounterIssuerListResponse < 3) { - o.resources = buildUnnamed61(); + o.resources = buildUnnamed69(); } buildCounterIssuerListResponse--; return o; @@ -3558,7 +3664,7 @@ api.IssuerListResponse buildIssuerListResponse() { void checkIssuerListResponse(api.IssuerListResponse o) { buildCounterIssuerListResponse++; if (buildCounterIssuerListResponse < 3) { - checkUnnamed61(o.resources!); + checkUnnamed69(o.resources!); } buildCounterIssuerListResponse--; } @@ -3674,12 +3780,12 @@ void checkLabelValue(api.LabelValue o) { buildCounterLabelValue--; } -core.List buildUnnamed62() => [ +core.List buildUnnamed70() => [ buildLabelValue(), buildLabelValue(), ]; -void checkUnnamed62(core.List o) { +void checkUnnamed70(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLabelValue(o[0]); checkLabelValue(o[1]); @@ -3690,7 +3796,7 @@ api.LabelValueRow buildLabelValueRow() { final o = api.LabelValueRow(); buildCounterLabelValueRow++; if (buildCounterLabelValueRow < 3) { - o.columns = buildUnnamed62(); + o.columns = buildUnnamed70(); } buildCounterLabelValueRow--; return o; @@ -3699,7 +3805,7 @@ api.LabelValueRow buildLabelValueRow() { void checkLabelValueRow(api.LabelValueRow o) { buildCounterLabelValueRow++; if (buildCounterLabelValueRow < 3) { - checkUnnamed62(o.columns!); + checkUnnamed70(o.columns!); } buildCounterLabelValueRow--; } @@ -3736,12 +3842,12 @@ void checkLatLongPoint(api.LatLongPoint o) { buildCounterLatLongPoint--; } -core.List buildUnnamed63() => [ +core.List buildUnnamed71() => [ buildUri(), buildUri(), ]; -void checkUnnamed63(core.List o) { +void checkUnnamed71(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkUri(o[0]); checkUri(o[1]); @@ -3752,7 +3858,7 @@ api.LinksModuleData buildLinksModuleData() { final o = api.LinksModuleData(); buildCounterLinksModuleData++; if (buildCounterLinksModuleData < 3) { - o.uris = buildUnnamed63(); + o.uris = buildUnnamed71(); } buildCounterLinksModuleData--; return o; @@ -3761,7 +3867,7 @@ api.LinksModuleData buildLinksModuleData() { void checkLinksModuleData(api.LinksModuleData o) { buildCounterLinksModuleData++; if (buildCounterLinksModuleData < 3) { - checkUnnamed63(o.uris!); + checkUnnamed71(o.uris!); } buildCounterLinksModuleData--; } @@ -3789,12 +3895,12 @@ void checkListTemplateOverride(api.ListTemplateOverride o) { buildCounterListTemplateOverride--; } -core.List buildUnnamed64() => [ +core.List buildUnnamed72() => [ buildTranslatedString(), buildTranslatedString(), ]; -void checkUnnamed64(core.List o) { +void checkUnnamed72(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTranslatedString(o[0]); checkTranslatedString(o[1]); @@ -3807,7 +3913,7 @@ api.LocalizedString buildLocalizedString() { if (buildCounterLocalizedString < 3) { o.defaultValue = buildTranslatedString(); o.kind = 'foo'; - o.translatedValues = buildUnnamed64(); + o.translatedValues = buildUnnamed72(); } buildCounterLocalizedString--; return o; @@ -3821,50 +3927,61 @@ void checkLocalizedString(api.LocalizedString o) { o.kind!, unittest.equals('foo'), ); - checkUnnamed64(o.translatedValues!); + checkUnnamed72(o.translatedValues!); } buildCounterLocalizedString--; } -core.List buildUnnamed65() => [ +core.List buildUnnamed73() => [ buildImageModuleData(), buildImageModuleData(), ]; -void checkUnnamed65(core.List o) { +void checkUnnamed73(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkImageModuleData(o[0]); checkImageModuleData(o[1]); } -core.List buildUnnamed66() => [ +core.List buildUnnamed74() => [ buildLatLongPoint(), buildLatLongPoint(), ]; -void checkUnnamed66(core.List o) { +void checkUnnamed74(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLatLongPoint(o[0]); checkLatLongPoint(o[1]); } -core.List buildUnnamed67() => [ +core.List buildUnnamed75() => [ + buildMerchantLocation(), + buildMerchantLocation(), + ]; + +void checkUnnamed75(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkMerchantLocation(o[0]); + checkMerchantLocation(o[1]); +} + +core.List buildUnnamed76() => [ buildMessage(), buildMessage(), ]; -void checkUnnamed67(core.List o) { +void checkUnnamed76(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMessage(o[0]); checkMessage(o[1]); } -core.List buildUnnamed68() => [ +core.List buildUnnamed77() => [ 'foo', 'foo', ]; -void checkUnnamed68(core.List o) { +void checkUnnamed77(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3876,23 +3993,23 @@ void checkUnnamed68(core.List o) { ); } -core.List buildUnnamed69() => [ +core.List buildUnnamed78() => [ buildTextModuleData(), buildTextModuleData(), ]; -void checkUnnamed69(core.List o) { +void checkUnnamed78(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTextModuleData(o[0]); checkTextModuleData(o[1]); } -core.List buildUnnamed70() => [ +core.List buildUnnamed79() => [ buildValueAddedModuleData(), buildValueAddedModuleData(), ]; -void checkUnnamed70(core.List o) { +void checkUnnamed79(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkValueAddedModuleData(o[0]); checkValueAddedModuleData(o[1]); @@ -3916,7 +4033,7 @@ api.LoyaltyClass buildLoyaltyClass() { o.hexBackgroundColor = 'foo'; o.homepageUri = buildUri(); o.id = 'foo'; - o.imageModulesData = buildUnnamed65(); + o.imageModulesData = buildUnnamed73(); o.infoModuleData = buildInfoModuleData(); o.issuerName = 'foo'; o.kind = 'foo'; @@ -3929,13 +4046,14 @@ api.LoyaltyClass buildLoyaltyClass() { o.localizedRewardsTierLabel = buildLocalizedString(); o.localizedSecondaryRewardsTier = buildLocalizedString(); o.localizedSecondaryRewardsTierLabel = buildLocalizedString(); - o.locations = buildUnnamed66(); - o.messages = buildUnnamed67(); + o.locations = buildUnnamed74(); + o.merchantLocations = buildUnnamed75(); + o.messages = buildUnnamed76(); o.multipleDevicesAndHoldersAllowedStatus = 'foo'; o.notifyPreference = 'foo'; o.programLogo = buildImage(); o.programName = 'foo'; - o.redemptionIssuers = buildUnnamed68(); + o.redemptionIssuers = buildUnnamed77(); o.review = buildReview(); o.reviewStatus = 'foo'; o.rewardsTier = 'foo'; @@ -3943,8 +4061,8 @@ api.LoyaltyClass buildLoyaltyClass() { o.secondaryRewardsTier = 'foo'; o.secondaryRewardsTierLabel = 'foo'; o.securityAnimation = buildSecurityAnimation(); - o.textModulesData = buildUnnamed69(); - o.valueAddedModuleData = buildUnnamed70(); + o.textModulesData = buildUnnamed78(); + o.valueAddedModuleData = buildUnnamed79(); o.version = 'foo'; o.viewUnlockRequirement = 'foo'; o.wideProgramLogo = buildImage(); @@ -3985,7 +4103,7 @@ void checkLoyaltyClass(api.LoyaltyClass o) { o.id!, unittest.equals('foo'), ); - checkUnnamed65(o.imageModulesData!); + checkUnnamed73(o.imageModulesData!); checkInfoModuleData(o.infoModuleData!); unittest.expect( o.issuerName!, @@ -4004,8 +4122,9 @@ void checkLoyaltyClass(api.LoyaltyClass o) { checkLocalizedString(o.localizedRewardsTierLabel!); checkLocalizedString(o.localizedSecondaryRewardsTier!); checkLocalizedString(o.localizedSecondaryRewardsTierLabel!); - checkUnnamed66(o.locations!); - checkUnnamed67(o.messages!); + checkUnnamed74(o.locations!); + checkUnnamed75(o.merchantLocations!); + checkUnnamed76(o.messages!); unittest.expect( o.multipleDevicesAndHoldersAllowedStatus!, unittest.equals('foo'), @@ -4019,7 +4138,7 @@ void checkLoyaltyClass(api.LoyaltyClass o) { o.programName!, unittest.equals('foo'), ); - checkUnnamed68(o.redemptionIssuers!); + checkUnnamed77(o.redemptionIssuers!); checkReview(o.review!); unittest.expect( o.reviewStatus!, @@ -4042,8 +4161,8 @@ void checkLoyaltyClass(api.LoyaltyClass o) { unittest.equals('foo'), ); checkSecurityAnimation(o.securityAnimation!); - checkUnnamed69(o.textModulesData!); - checkUnnamed70(o.valueAddedModuleData!); + checkUnnamed78(o.textModulesData!); + checkUnnamed79(o.valueAddedModuleData!); unittest.expect( o.version!, unittest.equals('foo'), @@ -4077,12 +4196,12 @@ void checkLoyaltyClassAddMessageResponse(api.LoyaltyClassAddMessageResponse o) { buildCounterLoyaltyClassAddMessageResponse--; } -core.List buildUnnamed71() => [ +core.List buildUnnamed80() => [ buildLoyaltyClass(), buildLoyaltyClass(), ]; -void checkUnnamed71(core.List o) { +void checkUnnamed80(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLoyaltyClass(o[0]); checkLoyaltyClass(o[1]); @@ -4094,7 +4213,7 @@ api.LoyaltyClassListResponse buildLoyaltyClassListResponse() { buildCounterLoyaltyClassListResponse++; if (buildCounterLoyaltyClassListResponse < 3) { o.pagination = buildPagination(); - o.resources = buildUnnamed71(); + o.resources = buildUnnamed80(); } buildCounterLoyaltyClassListResponse--; return o; @@ -4104,28 +4223,28 @@ void checkLoyaltyClassListResponse(api.LoyaltyClassListResponse o) { buildCounterLoyaltyClassListResponse++; if (buildCounterLoyaltyClassListResponse < 3) { checkPagination(o.pagination!); - checkUnnamed71(o.resources!); + checkUnnamed80(o.resources!); } buildCounterLoyaltyClassListResponse--; } -core.List buildUnnamed72() => [ +core.List buildUnnamed81() => [ buildImageModuleData(), buildImageModuleData(), ]; -void checkUnnamed72(core.List o) { +void checkUnnamed81(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkImageModuleData(o[0]); checkImageModuleData(o[1]); } -core.List buildUnnamed73() => [ +core.List buildUnnamed82() => [ 'foo', 'foo', ]; -void checkUnnamed73(core.List o) { +void checkUnnamed82(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4137,12 +4256,12 @@ void checkUnnamed73(core.List o) { ); } -core.List buildUnnamed74() => [ +core.List buildUnnamed83() => [ 'foo', 'foo', ]; -void checkUnnamed74(core.List o) { +void checkUnnamed83(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4154,45 +4273,56 @@ void checkUnnamed74(core.List o) { ); } -core.List buildUnnamed75() => [ +core.List buildUnnamed84() => [ buildLatLongPoint(), buildLatLongPoint(), ]; -void checkUnnamed75(core.List o) { +void checkUnnamed84(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLatLongPoint(o[0]); checkLatLongPoint(o[1]); } -core.List buildUnnamed76() => [ +core.List buildUnnamed85() => [ + buildMerchantLocation(), + buildMerchantLocation(), + ]; + +void checkUnnamed85(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkMerchantLocation(o[0]); + checkMerchantLocation(o[1]); +} + +core.List buildUnnamed86() => [ buildMessage(), buildMessage(), ]; -void checkUnnamed76(core.List o) { +void checkUnnamed86(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMessage(o[0]); checkMessage(o[1]); } -core.List buildUnnamed77() => [ +core.List buildUnnamed87() => [ buildTextModuleData(), buildTextModuleData(), ]; -void checkUnnamed77(core.List o) { +void checkUnnamed87(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTextModuleData(o[0]); checkTextModuleData(o[1]); } -core.List buildUnnamed78() => [ +core.List buildUnnamed88() => [ buildValueAddedModuleData(), buildValueAddedModuleData(), ]; -void checkUnnamed78(core.List o) { +void checkUnnamed88(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkValueAddedModuleData(o[0]); checkValueAddedModuleData(o[1]); @@ -4215,15 +4345,16 @@ api.LoyaltyObject buildLoyaltyObject() { o.hasUsers = true; o.heroImage = buildImage(); o.id = 'foo'; - o.imageModulesData = buildUnnamed72(); + o.imageModulesData = buildUnnamed81(); o.infoModuleData = buildInfoModuleData(); o.kind = 'foo'; - o.linkedObjectIds = buildUnnamed73(); - o.linkedOfferIds = buildUnnamed74(); + o.linkedObjectIds = buildUnnamed82(); + o.linkedOfferIds = buildUnnamed83(); o.linksModuleData = buildLinksModuleData(); - o.locations = buildUnnamed75(); + o.locations = buildUnnamed84(); o.loyaltyPoints = buildLoyaltyPoints(); - o.messages = buildUnnamed76(); + o.merchantLocations = buildUnnamed85(); + o.messages = buildUnnamed86(); o.notifyPreference = 'foo'; o.passConstraints = buildPassConstraints(); o.rotatingBarcode = buildRotatingBarcode(); @@ -4231,9 +4362,9 @@ api.LoyaltyObject buildLoyaltyObject() { o.secondaryLoyaltyPoints = buildLoyaltyPoints(); o.smartTapRedemptionValue = 'foo'; o.state = 'foo'; - o.textModulesData = buildUnnamed77(); + o.textModulesData = buildUnnamed87(); o.validTimeInterval = buildTimeInterval(); - o.valueAddedModuleData = buildUnnamed78(); + o.valueAddedModuleData = buildUnnamed88(); o.version = 'foo'; } buildCounterLoyaltyObject--; @@ -4267,18 +4398,19 @@ void checkLoyaltyObject(api.LoyaltyObject o) { o.id!, unittest.equals('foo'), ); - checkUnnamed72(o.imageModulesData!); + checkUnnamed81(o.imageModulesData!); checkInfoModuleData(o.infoModuleData!); unittest.expect( o.kind!, unittest.equals('foo'), ); - checkUnnamed73(o.linkedObjectIds!); - checkUnnamed74(o.linkedOfferIds!); + checkUnnamed82(o.linkedObjectIds!); + checkUnnamed83(o.linkedOfferIds!); checkLinksModuleData(o.linksModuleData!); - checkUnnamed75(o.locations!); + checkUnnamed84(o.locations!); checkLoyaltyPoints(o.loyaltyPoints!); - checkUnnamed76(o.messages!); + checkUnnamed85(o.merchantLocations!); + checkUnnamed86(o.messages!); unittest.expect( o.notifyPreference!, unittest.equals('foo'), @@ -4295,9 +4427,9 @@ void checkLoyaltyObject(api.LoyaltyObject o) { o.state!, unittest.equals('foo'), ); - checkUnnamed77(o.textModulesData!); + checkUnnamed87(o.textModulesData!); checkTimeInterval(o.validTimeInterval!); - checkUnnamed78(o.valueAddedModuleData!); + checkUnnamed88(o.valueAddedModuleData!); unittest.expect( o.version!, unittest.equals('foo'), @@ -4326,12 +4458,12 @@ void checkLoyaltyObjectAddMessageResponse( buildCounterLoyaltyObjectAddMessageResponse--; } -core.List buildUnnamed79() => [ +core.List buildUnnamed89() => [ buildLoyaltyObject(), buildLoyaltyObject(), ]; -void checkUnnamed79(core.List o) { +void checkUnnamed89(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLoyaltyObject(o[0]); checkLoyaltyObject(o[1]); @@ -4343,7 +4475,7 @@ api.LoyaltyObjectListResponse buildLoyaltyObjectListResponse() { buildCounterLoyaltyObjectListResponse++; if (buildCounterLoyaltyObjectListResponse < 3) { o.pagination = buildPagination(); - o.resources = buildUnnamed79(); + o.resources = buildUnnamed89(); } buildCounterLoyaltyObjectListResponse--; return o; @@ -4353,7 +4485,7 @@ void checkLoyaltyObjectListResponse(api.LoyaltyObjectListResponse o) { buildCounterLoyaltyObjectListResponse++; if (buildCounterLoyaltyObjectListResponse < 3) { checkPagination(o.pagination!); - checkUnnamed79(o.resources!); + checkUnnamed89(o.resources!); } buildCounterLoyaltyObjectListResponse--; } @@ -4418,12 +4550,12 @@ void checkLoyaltyPointsBalance(api.LoyaltyPointsBalance o) { buildCounterLoyaltyPointsBalance--; } -core.List buildUnnamed80() => [ +core.List buildUnnamed90() => [ buildCompositeMedia(), buildCompositeMedia(), ]; -void checkUnnamed80(core.List o) { +void checkUnnamed90(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkCompositeMedia(o[0]); checkCompositeMedia(o[1]); @@ -4438,7 +4570,7 @@ api.Media buildMedia() { o.bigstoreObjectRef = 'foo'; o.blobRef = 'foo'; o.blobstore2Info = buildBlobstore2Info(); - o.compositeMedia = buildUnnamed80(); + o.compositeMedia = buildUnnamed90(); o.contentType = 'foo'; o.contentTypeInfo = buildContentTypeInfo(); o.cosmoBinaryReference = 'foo'; @@ -4485,7 +4617,7 @@ void checkMedia(api.Media o) { unittest.equals('foo'), ); checkBlobstore2Info(o.blobstore2Info!); - checkUnnamed80(o.compositeMedia!); + checkUnnamed90(o.compositeMedia!); unittest.expect( o.contentType!, unittest.equals('foo'), @@ -4619,6 +4751,33 @@ void checkMediaRequestInfo(api.MediaRequestInfo o) { buildCounterMediaRequestInfo--; } +core.int buildCounterMerchantLocation = 0; +api.MerchantLocation buildMerchantLocation() { + final o = api.MerchantLocation(); + buildCounterMerchantLocation++; + if (buildCounterMerchantLocation < 3) { + o.latitude = 42.0; + o.longitude = 42.0; + } + buildCounterMerchantLocation--; + return o; +} + +void checkMerchantLocation(api.MerchantLocation o) { + buildCounterMerchantLocation++; + if (buildCounterMerchantLocation < 3) { + unittest.expect( + o.latitude!, + unittest.equals(42.0), + ); + unittest.expect( + o.longitude!, + unittest.equals(42.0), + ); + } + buildCounterMerchantLocation--; +} + core.int buildCounterMessage = 0; api.Message buildMessage() { final o = api.Message(); @@ -4667,12 +4826,12 @@ void checkMessage(api.Message o) { buildCounterMessage--; } -core.List buildUnnamed81() => [ +core.List buildUnnamed91() => [ 'foo', 'foo', ]; -void checkUnnamed81(core.List o) { +void checkUnnamed91(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4684,12 +4843,12 @@ void checkUnnamed81(core.List o) { ); } -core.List buildUnnamed82() => [ +core.List buildUnnamed92() => [ 'foo', 'foo', ]; -void checkUnnamed82(core.List o) { +void checkUnnamed92(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4706,8 +4865,8 @@ api.ModifyLinkedOfferObjects buildModifyLinkedOfferObjects() { final o = api.ModifyLinkedOfferObjects(); buildCounterModifyLinkedOfferObjects++; if (buildCounterModifyLinkedOfferObjects < 3) { - o.addLinkedOfferObjectIds = buildUnnamed81(); - o.removeLinkedOfferObjectIds = buildUnnamed82(); + o.addLinkedOfferObjectIds = buildUnnamed91(); + o.removeLinkedOfferObjectIds = buildUnnamed92(); } buildCounterModifyLinkedOfferObjects--; return o; @@ -4716,8 +4875,8 @@ api.ModifyLinkedOfferObjects buildModifyLinkedOfferObjects() { void checkModifyLinkedOfferObjects(api.ModifyLinkedOfferObjects o) { buildCounterModifyLinkedOfferObjects++; if (buildCounterModifyLinkedOfferObjects < 3) { - checkUnnamed81(o.addLinkedOfferObjectIds!); - checkUnnamed82(o.removeLinkedOfferObjectIds!); + checkUnnamed91(o.addLinkedOfferObjectIds!); + checkUnnamed92(o.removeLinkedOfferObjectIds!); } buildCounterModifyLinkedOfferObjects--; } @@ -4846,45 +5005,56 @@ void checkObjectId(api.ObjectId o) { buildCounterObjectId--; } -core.List buildUnnamed83() => [ +core.List buildUnnamed93() => [ buildImageModuleData(), buildImageModuleData(), ]; -void checkUnnamed83(core.List o) { +void checkUnnamed93(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkImageModuleData(o[0]); checkImageModuleData(o[1]); } -core.List buildUnnamed84() => [ +core.List buildUnnamed94() => [ buildLatLongPoint(), buildLatLongPoint(), ]; -void checkUnnamed84(core.List o) { +void checkUnnamed94(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLatLongPoint(o[0]); checkLatLongPoint(o[1]); } -core.List buildUnnamed85() => [ +core.List buildUnnamed95() => [ + buildMerchantLocation(), + buildMerchantLocation(), + ]; + +void checkUnnamed95(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkMerchantLocation(o[0]); + checkMerchantLocation(o[1]); +} + +core.List buildUnnamed96() => [ buildMessage(), buildMessage(), ]; -void checkUnnamed85(core.List o) { +void checkUnnamed96(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMessage(o[0]); checkMessage(o[1]); } -core.List buildUnnamed86() => [ +core.List buildUnnamed97() => [ 'foo', 'foo', ]; -void checkUnnamed86(core.List o) { +void checkUnnamed97(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4896,23 +5066,23 @@ void checkUnnamed86(core.List o) { ); } -core.List buildUnnamed87() => [ +core.List buildUnnamed98() => [ buildTextModuleData(), buildTextModuleData(), ]; -void checkUnnamed87(core.List o) { +void checkUnnamed98(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTextModuleData(o[0]); checkTextModuleData(o[1]); } -core.List buildUnnamed88() => [ +core.List buildUnnamed99() => [ buildValueAddedModuleData(), buildValueAddedModuleData(), ]; -void checkUnnamed88(core.List o) { +void checkUnnamed99(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkValueAddedModuleData(o[0]); checkValueAddedModuleData(o[1]); @@ -4936,7 +5106,7 @@ api.OfferClass buildOfferClass() { o.hexBackgroundColor = 'foo'; o.homepageUri = buildUri(); o.id = 'foo'; - o.imageModulesData = buildUnnamed83(); + o.imageModulesData = buildUnnamed93(); o.infoModuleData = buildInfoModuleData(); o.issuerName = 'foo'; o.kind = 'foo'; @@ -4947,21 +5117,22 @@ api.OfferClass buildOfferClass() { o.localizedProvider = buildLocalizedString(); o.localizedShortTitle = buildLocalizedString(); o.localizedTitle = buildLocalizedString(); - o.locations = buildUnnamed84(); - o.messages = buildUnnamed85(); + o.locations = buildUnnamed94(); + o.merchantLocations = buildUnnamed95(); + o.messages = buildUnnamed96(); o.multipleDevicesAndHoldersAllowedStatus = 'foo'; o.notifyPreference = 'foo'; o.provider = 'foo'; o.redemptionChannel = 'foo'; - o.redemptionIssuers = buildUnnamed86(); + o.redemptionIssuers = buildUnnamed97(); o.review = buildReview(); o.reviewStatus = 'foo'; o.securityAnimation = buildSecurityAnimation(); o.shortTitle = 'foo'; - o.textModulesData = buildUnnamed87(); + o.textModulesData = buildUnnamed98(); o.title = 'foo'; o.titleImage = buildImage(); - o.valueAddedModuleData = buildUnnamed88(); + o.valueAddedModuleData = buildUnnamed99(); o.version = 'foo'; o.viewUnlockRequirement = 'foo'; o.wideTitleImage = buildImage(); @@ -5002,7 +5173,7 @@ void checkOfferClass(api.OfferClass o) { o.id!, unittest.equals('foo'), ); - checkUnnamed83(o.imageModulesData!); + checkUnnamed93(o.imageModulesData!); checkInfoModuleData(o.infoModuleData!); unittest.expect( o.issuerName!, @@ -5019,8 +5190,9 @@ void checkOfferClass(api.OfferClass o) { checkLocalizedString(o.localizedProvider!); checkLocalizedString(o.localizedShortTitle!); checkLocalizedString(o.localizedTitle!); - checkUnnamed84(o.locations!); - checkUnnamed85(o.messages!); + checkUnnamed94(o.locations!); + checkUnnamed95(o.merchantLocations!); + checkUnnamed96(o.messages!); unittest.expect( o.multipleDevicesAndHoldersAllowedStatus!, unittest.equals('foo'), @@ -5037,7 +5209,7 @@ void checkOfferClass(api.OfferClass o) { o.redemptionChannel!, unittest.equals('foo'), ); - checkUnnamed86(o.redemptionIssuers!); + checkUnnamed97(o.redemptionIssuers!); checkReview(o.review!); unittest.expect( o.reviewStatus!, @@ -5048,13 +5220,13 @@ void checkOfferClass(api.OfferClass o) { o.shortTitle!, unittest.equals('foo'), ); - checkUnnamed87(o.textModulesData!); + checkUnnamed98(o.textModulesData!); unittest.expect( o.title!, unittest.equals('foo'), ); checkImage(o.titleImage!); - checkUnnamed88(o.valueAddedModuleData!); + checkUnnamed99(o.valueAddedModuleData!); unittest.expect( o.version!, unittest.equals('foo'), @@ -5088,12 +5260,12 @@ void checkOfferClassAddMessageResponse(api.OfferClassAddMessageResponse o) { buildCounterOfferClassAddMessageResponse--; } -core.List buildUnnamed89() => [ +core.List buildUnnamed100() => [ buildOfferClass(), buildOfferClass(), ]; -void checkUnnamed89(core.List o) { +void checkUnnamed100(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOfferClass(o[0]); checkOfferClass(o[1]); @@ -5105,7 +5277,7 @@ api.OfferClassListResponse buildOfferClassListResponse() { buildCounterOfferClassListResponse++; if (buildCounterOfferClassListResponse < 3) { o.pagination = buildPagination(); - o.resources = buildUnnamed89(); + o.resources = buildUnnamed100(); } buildCounterOfferClassListResponse--; return o; @@ -5115,28 +5287,28 @@ void checkOfferClassListResponse(api.OfferClassListResponse o) { buildCounterOfferClassListResponse++; if (buildCounterOfferClassListResponse < 3) { checkPagination(o.pagination!); - checkUnnamed89(o.resources!); + checkUnnamed100(o.resources!); } buildCounterOfferClassListResponse--; } -core.List buildUnnamed90() => [ +core.List buildUnnamed101() => [ buildImageModuleData(), buildImageModuleData(), ]; -void checkUnnamed90(core.List o) { +void checkUnnamed101(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkImageModuleData(o[0]); checkImageModuleData(o[1]); } -core.List buildUnnamed91() => [ +core.List buildUnnamed102() => [ 'foo', 'foo', ]; -void checkUnnamed91(core.List o) { +void checkUnnamed102(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5148,45 +5320,56 @@ void checkUnnamed91(core.List o) { ); } -core.List buildUnnamed92() => [ +core.List buildUnnamed103() => [ buildLatLongPoint(), buildLatLongPoint(), ]; -void checkUnnamed92(core.List o) { +void checkUnnamed103(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLatLongPoint(o[0]); checkLatLongPoint(o[1]); } -core.List buildUnnamed93() => [ +core.List buildUnnamed104() => [ + buildMerchantLocation(), + buildMerchantLocation(), + ]; + +void checkUnnamed104(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkMerchantLocation(o[0]); + checkMerchantLocation(o[1]); +} + +core.List buildUnnamed105() => [ buildMessage(), buildMessage(), ]; -void checkUnnamed93(core.List o) { +void checkUnnamed105(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMessage(o[0]); checkMessage(o[1]); } -core.List buildUnnamed94() => [ +core.List buildUnnamed106() => [ buildTextModuleData(), buildTextModuleData(), ]; -void checkUnnamed94(core.List o) { +void checkUnnamed106(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTextModuleData(o[0]); checkTextModuleData(o[1]); } -core.List buildUnnamed95() => [ +core.List buildUnnamed107() => [ buildValueAddedModuleData(), buildValueAddedModuleData(), ]; -void checkUnnamed95(core.List o) { +void checkUnnamed107(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkValueAddedModuleData(o[0]); checkValueAddedModuleData(o[1]); @@ -5207,22 +5390,23 @@ api.OfferObject buildOfferObject() { o.hasUsers = true; o.heroImage = buildImage(); o.id = 'foo'; - o.imageModulesData = buildUnnamed90(); + o.imageModulesData = buildUnnamed101(); o.infoModuleData = buildInfoModuleData(); o.kind = 'foo'; - o.linkedObjectIds = buildUnnamed91(); + o.linkedObjectIds = buildUnnamed102(); o.linksModuleData = buildLinksModuleData(); - o.locations = buildUnnamed92(); - o.messages = buildUnnamed93(); + o.locations = buildUnnamed103(); + o.merchantLocations = buildUnnamed104(); + o.messages = buildUnnamed105(); o.notifyPreference = 'foo'; o.passConstraints = buildPassConstraints(); o.rotatingBarcode = buildRotatingBarcode(); o.saveRestrictions = buildSaveRestrictions(); o.smartTapRedemptionValue = 'foo'; o.state = 'foo'; - o.textModulesData = buildUnnamed94(); + o.textModulesData = buildUnnamed106(); o.validTimeInterval = buildTimeInterval(); - o.valueAddedModuleData = buildUnnamed95(); + o.valueAddedModuleData = buildUnnamed107(); o.version = 'foo'; } buildCounterOfferObject--; @@ -5248,16 +5432,17 @@ void checkOfferObject(api.OfferObject o) { o.id!, unittest.equals('foo'), ); - checkUnnamed90(o.imageModulesData!); + checkUnnamed101(o.imageModulesData!); checkInfoModuleData(o.infoModuleData!); unittest.expect( o.kind!, unittest.equals('foo'), ); - checkUnnamed91(o.linkedObjectIds!); + checkUnnamed102(o.linkedObjectIds!); checkLinksModuleData(o.linksModuleData!); - checkUnnamed92(o.locations!); - checkUnnamed93(o.messages!); + checkUnnamed103(o.locations!); + checkUnnamed104(o.merchantLocations!); + checkUnnamed105(o.messages!); unittest.expect( o.notifyPreference!, unittest.equals('foo'), @@ -5273,9 +5458,9 @@ void checkOfferObject(api.OfferObject o) { o.state!, unittest.equals('foo'), ); - checkUnnamed94(o.textModulesData!); + checkUnnamed106(o.textModulesData!); checkTimeInterval(o.validTimeInterval!); - checkUnnamed95(o.valueAddedModuleData!); + checkUnnamed107(o.valueAddedModuleData!); unittest.expect( o.version!, unittest.equals('foo'), @@ -5303,12 +5488,12 @@ void checkOfferObjectAddMessageResponse(api.OfferObjectAddMessageResponse o) { buildCounterOfferObjectAddMessageResponse--; } -core.List buildUnnamed96() => [ +core.List buildUnnamed108() => [ buildOfferObject(), buildOfferObject(), ]; -void checkUnnamed96(core.List o) { +void checkUnnamed108(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOfferObject(o[0]); checkOfferObject(o[1]); @@ -5320,7 +5505,7 @@ api.OfferObjectListResponse buildOfferObjectListResponse() { buildCounterOfferObjectListResponse++; if (buildCounterOfferObjectListResponse < 3) { o.pagination = buildPagination(); - o.resources = buildUnnamed96(); + o.resources = buildUnnamed108(); } buildCounterOfferObjectListResponse--; return o; @@ -5330,7 +5515,7 @@ void checkOfferObjectListResponse(api.OfferObjectListResponse o) { buildCounterOfferObjectListResponse++; if (buildCounterOfferObjectListResponse < 3) { checkPagination(o.pagination!); - checkUnnamed96(o.resources!); + checkUnnamed108(o.resources!); } buildCounterOfferObjectListResponse--; } @@ -5367,12 +5552,12 @@ void checkPagination(api.Pagination o) { buildCounterPagination--; } -core.List buildUnnamed97() => [ +core.List buildUnnamed109() => [ 'foo', 'foo', ]; -void checkUnnamed97(core.List o) { +void checkUnnamed109(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5389,7 +5574,7 @@ api.PassConstraints buildPassConstraints() { final o = api.PassConstraints(); buildCounterPassConstraints++; if (buildCounterPassConstraints < 3) { - o.nfcConstraint = buildUnnamed97(); + o.nfcConstraint = buildUnnamed109(); o.screenshotEligibility = 'foo'; } buildCounterPassConstraints--; @@ -5399,7 +5584,7 @@ api.PassConstraints buildPassConstraints() { void checkPassConstraints(api.PassConstraints o) { buildCounterPassConstraints++; if (buildCounterPassConstraints < 3) { - checkUnnamed97(o.nfcConstraint!); + checkUnnamed109(o.nfcConstraint!); unittest.expect( o.screenshotEligibility!, unittest.equals('foo'), @@ -5435,12 +5620,12 @@ void checkPermission(api.Permission o) { buildCounterPermission--; } -core.List buildUnnamed98() => [ +core.List buildUnnamed110() => [ buildPermission(), buildPermission(), ]; -void checkUnnamed98(core.List o) { +void checkUnnamed110(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPermission(o[0]); checkPermission(o[1]); @@ -5452,7 +5637,7 @@ api.Permissions buildPermissions() { buildCounterPermissions++; if (buildCounterPermissions < 3) { o.issuerId = 'foo'; - o.permissions = buildUnnamed98(); + o.permissions = buildUnnamed110(); } buildCounterPermissions--; return o; @@ -5465,7 +5650,7 @@ void checkPermissions(api.Permissions o) { o.issuerId!, unittest.equals('foo'), ); - checkUnnamed98(o.permissions!); + checkUnnamed110(o.permissions!); } buildCounterPermissions--; } @@ -5543,155 +5728,155 @@ void checkReservationInfo(api.ReservationInfo o) { buildCounterReservationInfo--; } -core.List buildUnnamed99() => [ +core.List buildUnnamed111() => [ buildEventTicketClass(), buildEventTicketClass(), ]; -void checkUnnamed99(core.List o) { +void checkUnnamed111(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEventTicketClass(o[0]); checkEventTicketClass(o[1]); } -core.List buildUnnamed100() => [ +core.List buildUnnamed112() => [ buildEventTicketObject(), buildEventTicketObject(), ]; -void checkUnnamed100(core.List o) { +void checkUnnamed112(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEventTicketObject(o[0]); checkEventTicketObject(o[1]); } -core.List buildUnnamed101() => [ +core.List buildUnnamed113() => [ buildFlightClass(), buildFlightClass(), ]; -void checkUnnamed101(core.List o) { +void checkUnnamed113(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFlightClass(o[0]); checkFlightClass(o[1]); } -core.List buildUnnamed102() => [ +core.List buildUnnamed114() => [ buildFlightObject(), buildFlightObject(), ]; -void checkUnnamed102(core.List o) { +void checkUnnamed114(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFlightObject(o[0]); checkFlightObject(o[1]); } -core.List buildUnnamed103() => [ +core.List buildUnnamed115() => [ buildGenericClass(), buildGenericClass(), ]; -void checkUnnamed103(core.List o) { +void checkUnnamed115(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGenericClass(o[0]); checkGenericClass(o[1]); } -core.List buildUnnamed104() => [ +core.List buildUnnamed116() => [ buildGenericObject(), buildGenericObject(), ]; -void checkUnnamed104(core.List o) { +void checkUnnamed116(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGenericObject(o[0]); checkGenericObject(o[1]); } -core.List buildUnnamed105() => [ +core.List buildUnnamed117() => [ buildGiftCardClass(), buildGiftCardClass(), ]; -void checkUnnamed105(core.List o) { +void checkUnnamed117(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGiftCardClass(o[0]); checkGiftCardClass(o[1]); } -core.List buildUnnamed106() => [ +core.List buildUnnamed118() => [ buildGiftCardObject(), buildGiftCardObject(), ]; -void checkUnnamed106(core.List o) { +void checkUnnamed118(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkGiftCardObject(o[0]); checkGiftCardObject(o[1]); } -core.List buildUnnamed107() => [ +core.List buildUnnamed119() => [ buildLoyaltyClass(), buildLoyaltyClass(), ]; -void checkUnnamed107(core.List o) { +void checkUnnamed119(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLoyaltyClass(o[0]); checkLoyaltyClass(o[1]); } -core.List buildUnnamed108() => [ +core.List buildUnnamed120() => [ buildLoyaltyObject(), buildLoyaltyObject(), ]; -void checkUnnamed108(core.List o) { +void checkUnnamed120(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLoyaltyObject(o[0]); checkLoyaltyObject(o[1]); } -core.List buildUnnamed109() => [ +core.List buildUnnamed121() => [ buildOfferClass(), buildOfferClass(), ]; -void checkUnnamed109(core.List o) { +void checkUnnamed121(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOfferClass(o[0]); checkOfferClass(o[1]); } -core.List buildUnnamed110() => [ +core.List buildUnnamed122() => [ buildOfferObject(), buildOfferObject(), ]; -void checkUnnamed110(core.List o) { +void checkUnnamed122(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOfferObject(o[0]); checkOfferObject(o[1]); } -core.List buildUnnamed111() => [ +core.List buildUnnamed123() => [ buildTransitClass(), buildTransitClass(), ]; -void checkUnnamed111(core.List o) { +void checkUnnamed123(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTransitClass(o[0]); checkTransitClass(o[1]); } -core.List buildUnnamed112() => [ +core.List buildUnnamed124() => [ buildTransitObject(), buildTransitObject(), ]; -void checkUnnamed112(core.List o) { +void checkUnnamed124(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTransitObject(o[0]); checkTransitObject(o[1]); @@ -5702,20 +5887,20 @@ api.Resources buildResources() { final o = api.Resources(); buildCounterResources++; if (buildCounterResources < 3) { - o.eventTicketClasses = buildUnnamed99(); - o.eventTicketObjects = buildUnnamed100(); - o.flightClasses = buildUnnamed101(); - o.flightObjects = buildUnnamed102(); - o.genericClasses = buildUnnamed103(); - o.genericObjects = buildUnnamed104(); - o.giftCardClasses = buildUnnamed105(); - o.giftCardObjects = buildUnnamed106(); - o.loyaltyClasses = buildUnnamed107(); - o.loyaltyObjects = buildUnnamed108(); - o.offerClasses = buildUnnamed109(); - o.offerObjects = buildUnnamed110(); - o.transitClasses = buildUnnamed111(); - o.transitObjects = buildUnnamed112(); + o.eventTicketClasses = buildUnnamed111(); + o.eventTicketObjects = buildUnnamed112(); + o.flightClasses = buildUnnamed113(); + o.flightObjects = buildUnnamed114(); + o.genericClasses = buildUnnamed115(); + o.genericObjects = buildUnnamed116(); + o.giftCardClasses = buildUnnamed117(); + o.giftCardObjects = buildUnnamed118(); + o.loyaltyClasses = buildUnnamed119(); + o.loyaltyObjects = buildUnnamed120(); + o.offerClasses = buildUnnamed121(); + o.offerObjects = buildUnnamed122(); + o.transitClasses = buildUnnamed123(); + o.transitObjects = buildUnnamed124(); } buildCounterResources--; return o; @@ -5724,20 +5909,20 @@ api.Resources buildResources() { void checkResources(api.Resources o) { buildCounterResources++; if (buildCounterResources < 3) { - checkUnnamed99(o.eventTicketClasses!); - checkUnnamed100(o.eventTicketObjects!); - checkUnnamed101(o.flightClasses!); - checkUnnamed102(o.flightObjects!); - checkUnnamed103(o.genericClasses!); - checkUnnamed104(o.genericObjects!); - checkUnnamed105(o.giftCardClasses!); - checkUnnamed106(o.giftCardObjects!); - checkUnnamed107(o.loyaltyClasses!); - checkUnnamed108(o.loyaltyObjects!); - checkUnnamed109(o.offerClasses!); - checkUnnamed110(o.offerObjects!); - checkUnnamed111(o.transitClasses!); - checkUnnamed112(o.transitObjects!); + checkUnnamed111(o.eventTicketClasses!); + checkUnnamed112(o.eventTicketObjects!); + checkUnnamed113(o.flightClasses!); + checkUnnamed114(o.flightObjects!); + checkUnnamed115(o.genericClasses!); + checkUnnamed116(o.genericObjects!); + checkUnnamed117(o.giftCardClasses!); + checkUnnamed118(o.giftCardObjects!); + checkUnnamed119(o.loyaltyClasses!); + checkUnnamed120(o.loyaltyObjects!); + checkUnnamed121(o.offerClasses!); + checkUnnamed122(o.offerObjects!); + checkUnnamed123(o.transitClasses!); + checkUnnamed124(o.transitObjects!); } buildCounterResources--; } @@ -5807,12 +5992,12 @@ void checkRotatingBarcode(api.RotatingBarcode o) { buildCounterRotatingBarcode--; } -core.List buildUnnamed113() => [ +core.List buildUnnamed125() => [ buildRotatingBarcodeTotpDetailsTotpParameters(), buildRotatingBarcodeTotpDetailsTotpParameters(), ]; -void checkUnnamed113( +void checkUnnamed125( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRotatingBarcodeTotpDetailsTotpParameters(o[0]); @@ -5825,7 +6010,7 @@ api.RotatingBarcodeTotpDetails buildRotatingBarcodeTotpDetails() { buildCounterRotatingBarcodeTotpDetails++; if (buildCounterRotatingBarcodeTotpDetails < 3) { o.algorithm = 'foo'; - o.parameters = buildUnnamed113(); + o.parameters = buildUnnamed125(); o.periodMillis = 'foo'; } buildCounterRotatingBarcodeTotpDetails--; @@ -5839,7 +6024,7 @@ void checkRotatingBarcodeTotpDetails(api.RotatingBarcodeTotpDetails o) { o.algorithm!, unittest.equals('foo'), ); - checkUnnamed113(o.parameters!); + checkUnnamed125(o.parameters!); unittest.expect( o.periodMillis!, unittest.equals('foo'), @@ -5877,12 +6062,12 @@ void checkRotatingBarcodeTotpDetailsTotpParameters( buildCounterRotatingBarcodeTotpDetailsTotpParameters--; } -core.List buildUnnamed114() => [ +core.List buildUnnamed126() => [ 'foo', 'foo', ]; -void checkUnnamed114(core.List o) { +void checkUnnamed126(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5901,7 +6086,7 @@ api.RotatingBarcodeValues buildRotatingBarcodeValues() { if (buildCounterRotatingBarcodeValues < 3) { o.periodMillis = 'foo'; o.startDateTime = 'foo'; - o.values = buildUnnamed114(); + o.values = buildUnnamed126(); } buildCounterRotatingBarcodeValues--; return o; @@ -5918,7 +6103,7 @@ void checkRotatingBarcodeValues(api.RotatingBarcodeValues o) { o.startDateTime!, unittest.equals('foo'), ); - checkUnnamed114(o.values!); + checkUnnamed126(o.values!); } buildCounterRotatingBarcodeValues--; } @@ -5989,12 +6174,12 @@ void checkSignUpInfo(api.SignUpInfo o) { buildCounterSignUpInfo--; } -core.List buildUnnamed115() => [ +core.List buildUnnamed127() => [ buildIssuerToUserInfo(), buildIssuerToUserInfo(), ]; -void checkUnnamed115(core.List o) { +void checkUnnamed127(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkIssuerToUserInfo(o[0]); checkIssuerToUserInfo(o[1]); @@ -6006,7 +6191,7 @@ api.SmartTap buildSmartTap() { buildCounterSmartTap++; if (buildCounterSmartTap < 3) { o.id = 'foo'; - o.infos = buildUnnamed115(); + o.infos = buildUnnamed127(); o.kind = 'foo'; o.merchantId = 'foo'; } @@ -6021,7 +6206,7 @@ void checkSmartTap(api.SmartTap o) { o.id!, unittest.equals('foo'), ); - checkUnnamed115(o.infos!); + checkUnnamed127(o.infos!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -6034,12 +6219,12 @@ void checkSmartTap(api.SmartTap o) { buildCounterSmartTap--; } -core.List buildUnnamed116() => [ +core.List buildUnnamed128() => [ buildAuthenticationKey(), buildAuthenticationKey(), ]; -void checkUnnamed116(core.List o) { +void checkUnnamed128(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAuthenticationKey(o[0]); checkAuthenticationKey(o[1]); @@ -6050,7 +6235,7 @@ api.SmartTapMerchantData buildSmartTapMerchantData() { final o = api.SmartTapMerchantData(); buildCounterSmartTapMerchantData++; if (buildCounterSmartTapMerchantData < 3) { - o.authenticationKeys = buildUnnamed116(); + o.authenticationKeys = buildUnnamed128(); o.smartTapMerchantId = 'foo'; } buildCounterSmartTapMerchantData--; @@ -6060,7 +6245,7 @@ api.SmartTapMerchantData buildSmartTapMerchantData() { void checkSmartTapMerchantData(api.SmartTapMerchantData o) { buildCounterSmartTapMerchantData++; if (buildCounterSmartTapMerchantData < 3) { - checkUnnamed116(o.authenticationKeys!); + checkUnnamed128(o.authenticationKeys!); unittest.expect( o.smartTapMerchantId!, unittest.equals('foo'), @@ -6154,12 +6339,12 @@ void checkTicketCost(api.TicketCost o) { buildCounterTicketCost--; } -core.List buildUnnamed117() => [ +core.List buildUnnamed129() => [ buildTicketSeat(), buildTicketSeat(), ]; -void checkUnnamed117(core.List o) { +void checkUnnamed129(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTicketSeat(o[0]); checkTicketSeat(o[1]); @@ -6180,7 +6365,7 @@ api.TicketLeg buildTicketLeg() { o.originStationCode = 'foo'; o.platform = 'foo'; o.ticketSeat = buildTicketSeat(); - o.ticketSeats = buildUnnamed117(); + o.ticketSeats = buildUnnamed129(); o.transitOperatorName = buildLocalizedString(); o.transitTerminusName = buildLocalizedString(); o.zone = 'foo'; @@ -6220,7 +6405,7 @@ void checkTicketLeg(api.TicketLeg o) { unittest.equals('foo'), ); checkTicketSeat(o.ticketSeat!); - checkUnnamed117(o.ticketSeats!); + checkUnnamed129(o.ticketSeats!); checkLocalizedString(o.transitOperatorName!); checkLocalizedString(o.transitTerminusName!); unittest.expect( @@ -6318,45 +6503,56 @@ void checkTimeInterval(api.TimeInterval o) { buildCounterTimeInterval--; } -core.List buildUnnamed118() => [ +core.List buildUnnamed130() => [ buildImageModuleData(), buildImageModuleData(), ]; -void checkUnnamed118(core.List o) { +void checkUnnamed130(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkImageModuleData(o[0]); checkImageModuleData(o[1]); } -core.List buildUnnamed119() => [ +core.List buildUnnamed131() => [ buildLatLongPoint(), buildLatLongPoint(), ]; -void checkUnnamed119(core.List o) { +void checkUnnamed131(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLatLongPoint(o[0]); checkLatLongPoint(o[1]); } -core.List buildUnnamed120() => [ +core.List buildUnnamed132() => [ + buildMerchantLocation(), + buildMerchantLocation(), + ]; + +void checkUnnamed132(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkMerchantLocation(o[0]); + checkMerchantLocation(o[1]); +} + +core.List buildUnnamed133() => [ buildMessage(), buildMessage(), ]; -void checkUnnamed120(core.List o) { +void checkUnnamed133(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMessage(o[0]); checkMessage(o[1]); } -core.List buildUnnamed121() => [ +core.List buildUnnamed134() => [ 'foo', 'foo', ]; -void checkUnnamed121(core.List o) { +void checkUnnamed134(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6368,23 +6564,23 @@ void checkUnnamed121(core.List o) { ); } -core.List buildUnnamed122() => [ +core.List buildUnnamed135() => [ buildTextModuleData(), buildTextModuleData(), ]; -void checkUnnamed122(core.List o) { +void checkUnnamed135(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTextModuleData(o[0]); checkTextModuleData(o[1]); } -core.List buildUnnamed123() => [ +core.List buildUnnamed136() => [ buildValueAddedModuleData(), buildValueAddedModuleData(), ]; -void checkUnnamed123(core.List o) { +void checkUnnamed136(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkValueAddedModuleData(o[0]); checkValueAddedModuleData(o[1]); @@ -6426,25 +6622,26 @@ api.TransitClass buildTransitClass() { o.hexBackgroundColor = 'foo'; o.homepageUri = buildUri(); o.id = 'foo'; - o.imageModulesData = buildUnnamed118(); + o.imageModulesData = buildUnnamed130(); o.infoModuleData = buildInfoModuleData(); o.issuerName = 'foo'; o.languageOverride = 'foo'; o.linksModuleData = buildLinksModuleData(); o.localizedIssuerName = buildLocalizedString(); - o.locations = buildUnnamed119(); + o.locations = buildUnnamed131(); o.logo = buildImage(); - o.messages = buildUnnamed120(); + o.merchantLocations = buildUnnamed132(); + o.messages = buildUnnamed133(); o.multipleDevicesAndHoldersAllowedStatus = 'foo'; o.notifyPreference = 'foo'; - o.redemptionIssuers = buildUnnamed121(); + o.redemptionIssuers = buildUnnamed134(); o.review = buildReview(); o.reviewStatus = 'foo'; o.securityAnimation = buildSecurityAnimation(); - o.textModulesData = buildUnnamed122(); + o.textModulesData = buildUnnamed135(); o.transitOperatorName = buildLocalizedString(); o.transitType = 'foo'; - o.valueAddedModuleData = buildUnnamed123(); + o.valueAddedModuleData = buildUnnamed136(); o.version = 'foo'; o.viewUnlockRequirement = 'foo'; o.watermark = buildImage(); @@ -6498,7 +6695,7 @@ void checkTransitClass(api.TransitClass o) { o.id!, unittest.equals('foo'), ); - checkUnnamed118(o.imageModulesData!); + checkUnnamed130(o.imageModulesData!); checkInfoModuleData(o.infoModuleData!); unittest.expect( o.issuerName!, @@ -6510,9 +6707,10 @@ void checkTransitClass(api.TransitClass o) { ); checkLinksModuleData(o.linksModuleData!); checkLocalizedString(o.localizedIssuerName!); - checkUnnamed119(o.locations!); + checkUnnamed131(o.locations!); checkImage(o.logo!); - checkUnnamed120(o.messages!); + checkUnnamed132(o.merchantLocations!); + checkUnnamed133(o.messages!); unittest.expect( o.multipleDevicesAndHoldersAllowedStatus!, unittest.equals('foo'), @@ -6521,20 +6719,20 @@ void checkTransitClass(api.TransitClass o) { o.notifyPreference!, unittest.equals('foo'), ); - checkUnnamed121(o.redemptionIssuers!); + checkUnnamed134(o.redemptionIssuers!); checkReview(o.review!); unittest.expect( o.reviewStatus!, unittest.equals('foo'), ); checkSecurityAnimation(o.securityAnimation!); - checkUnnamed122(o.textModulesData!); + checkUnnamed135(o.textModulesData!); checkLocalizedString(o.transitOperatorName!); unittest.expect( o.transitType!, unittest.equals('foo'), ); - checkUnnamed123(o.valueAddedModuleData!); + checkUnnamed136(o.valueAddedModuleData!); unittest.expect( o.version!, unittest.equals('foo'), @@ -6569,12 +6767,12 @@ void checkTransitClassAddMessageResponse(api.TransitClassAddMessageResponse o) { buildCounterTransitClassAddMessageResponse--; } -core.List buildUnnamed124() => [ +core.List buildUnnamed137() => [ buildTransitClass(), buildTransitClass(), ]; -void checkUnnamed124(core.List o) { +void checkUnnamed137(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTransitClass(o[0]); checkTransitClass(o[1]); @@ -6586,7 +6784,7 @@ api.TransitClassListResponse buildTransitClassListResponse() { buildCounterTransitClassListResponse++; if (buildCounterTransitClassListResponse < 3) { o.pagination = buildPagination(); - o.resources = buildUnnamed124(); + o.resources = buildUnnamed137(); } buildCounterTransitClassListResponse--; return o; @@ -6596,28 +6794,28 @@ void checkTransitClassListResponse(api.TransitClassListResponse o) { buildCounterTransitClassListResponse++; if (buildCounterTransitClassListResponse < 3) { checkPagination(o.pagination!); - checkUnnamed124(o.resources!); + checkUnnamed137(o.resources!); } buildCounterTransitClassListResponse--; } -core.List buildUnnamed125() => [ +core.List buildUnnamed138() => [ buildImageModuleData(), buildImageModuleData(), ]; -void checkUnnamed125(core.List o) { +void checkUnnamed138(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkImageModuleData(o[0]); checkImageModuleData(o[1]); } -core.List buildUnnamed126() => [ +core.List buildUnnamed139() => [ 'foo', 'foo', ]; -void checkUnnamed126(core.List o) { +void checkUnnamed139(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -6629,56 +6827,67 @@ void checkUnnamed126(core.List o) { ); } -core.List buildUnnamed127() => [ +core.List buildUnnamed140() => [ buildLatLongPoint(), buildLatLongPoint(), ]; -void checkUnnamed127(core.List o) { +void checkUnnamed140(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLatLongPoint(o[0]); checkLatLongPoint(o[1]); } -core.List buildUnnamed128() => [ +core.List buildUnnamed141() => [ + buildMerchantLocation(), + buildMerchantLocation(), + ]; + +void checkUnnamed141(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkMerchantLocation(o[0]); + checkMerchantLocation(o[1]); +} + +core.List buildUnnamed142() => [ buildMessage(), buildMessage(), ]; -void checkUnnamed128(core.List o) { +void checkUnnamed142(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMessage(o[0]); checkMessage(o[1]); } -core.List buildUnnamed129() => [ +core.List buildUnnamed143() => [ buildTextModuleData(), buildTextModuleData(), ]; -void checkUnnamed129(core.List o) { +void checkUnnamed143(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTextModuleData(o[0]); checkTextModuleData(o[1]); } -core.List buildUnnamed130() => [ +core.List buildUnnamed144() => [ buildTicketLeg(), buildTicketLeg(), ]; -void checkUnnamed130(core.List o) { +void checkUnnamed144(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTicketLeg(o[0]); checkTicketLeg(o[1]); } -core.List buildUnnamed131() => [ +core.List buildUnnamed145() => [ buildValueAddedModuleData(), buildValueAddedModuleData(), ]; -void checkUnnamed131(core.List o) { +void checkUnnamed145(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkValueAddedModuleData(o[0]); checkValueAddedModuleData(o[1]); @@ -6705,12 +6914,13 @@ api.TransitObject buildTransitObject() { o.heroImage = buildImage(); o.hexBackgroundColor = 'foo'; o.id = 'foo'; - o.imageModulesData = buildUnnamed125(); + o.imageModulesData = buildUnnamed138(); o.infoModuleData = buildInfoModuleData(); - o.linkedObjectIds = buildUnnamed126(); + o.linkedObjectIds = buildUnnamed139(); o.linksModuleData = buildLinksModuleData(); - o.locations = buildUnnamed127(); - o.messages = buildUnnamed128(); + o.locations = buildUnnamed140(); + o.merchantLocations = buildUnnamed141(); + o.messages = buildUnnamed142(); o.notifyPreference = 'foo'; o.passConstraints = buildPassConstraints(); o.passengerNames = 'foo'; @@ -6720,16 +6930,16 @@ api.TransitObject buildTransitObject() { o.saveRestrictions = buildSaveRestrictions(); o.smartTapRedemptionValue = 'foo'; o.state = 'foo'; - o.textModulesData = buildUnnamed129(); + o.textModulesData = buildUnnamed143(); o.ticketLeg = buildTicketLeg(); - o.ticketLegs = buildUnnamed130(); + o.ticketLegs = buildUnnamed144(); o.ticketNumber = 'foo'; o.ticketRestrictions = buildTicketRestrictions(); o.ticketStatus = 'foo'; o.tripId = 'foo'; o.tripType = 'foo'; o.validTimeInterval = buildTimeInterval(); - o.valueAddedModuleData = buildUnnamed131(); + o.valueAddedModuleData = buildUnnamed145(); o.version = 'foo'; } buildCounterTransitObject--; @@ -6767,12 +6977,13 @@ void checkTransitObject(api.TransitObject o) { o.id!, unittest.equals('foo'), ); - checkUnnamed125(o.imageModulesData!); + checkUnnamed138(o.imageModulesData!); checkInfoModuleData(o.infoModuleData!); - checkUnnamed126(o.linkedObjectIds!); + checkUnnamed139(o.linkedObjectIds!); checkLinksModuleData(o.linksModuleData!); - checkUnnamed127(o.locations!); - checkUnnamed128(o.messages!); + checkUnnamed140(o.locations!); + checkUnnamed141(o.merchantLocations!); + checkUnnamed142(o.messages!); unittest.expect( o.notifyPreference!, unittest.equals('foo'), @@ -6797,9 +7008,9 @@ void checkTransitObject(api.TransitObject o) { o.state!, unittest.equals('foo'), ); - checkUnnamed129(o.textModulesData!); + checkUnnamed143(o.textModulesData!); checkTicketLeg(o.ticketLeg!); - checkUnnamed130(o.ticketLegs!); + checkUnnamed144(o.ticketLegs!); unittest.expect( o.ticketNumber!, unittest.equals('foo'), @@ -6818,7 +7029,7 @@ void checkTransitObject(api.TransitObject o) { unittest.equals('foo'), ); checkTimeInterval(o.validTimeInterval!); - checkUnnamed131(o.valueAddedModuleData!); + checkUnnamed145(o.valueAddedModuleData!); unittest.expect( o.version!, unittest.equals('foo'), @@ -6847,12 +7058,12 @@ void checkTransitObjectAddMessageResponse( buildCounterTransitObjectAddMessageResponse--; } -core.List buildUnnamed132() => [ +core.List buildUnnamed146() => [ buildTransitObject(), buildTransitObject(), ]; -void checkUnnamed132(core.List o) { +void checkUnnamed146(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTransitObject(o[0]); checkTransitObject(o[1]); @@ -6864,7 +7075,7 @@ api.TransitObjectListResponse buildTransitObjectListResponse() { buildCounterTransitObjectListResponse++; if (buildCounterTransitObjectListResponse < 3) { o.pagination = buildPagination(); - o.resources = buildUnnamed132(); + o.resources = buildUnnamed146(); } buildCounterTransitObjectListResponse--; return o; @@ -6874,7 +7085,7 @@ void checkTransitObjectListResponse(api.TransitObjectListResponse o) { buildCounterTransitObjectListResponse++; if (buildCounterTransitObjectListResponse < 3) { checkPagination(o.pagination!); - checkUnnamed132(o.resources!); + checkUnnamed146(o.resources!); } buildCounterTransitObjectListResponse--; } @@ -8025,6 +8236,16 @@ void main() { }); }); + unittest.group('obj-schema-MerchantLocation', () { + unittest.test('to-json--from-json', () async { + final o = buildMerchantLocation(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.MerchantLocation.fromJson( + oJson as core.Map); + checkMerchantLocation(od); + }); + }); + unittest.group('obj-schema-Message', () { unittest.test('to-json--from-json', () async { final o = buildMessage(); diff --git a/generated/googleapis/test/workflows/v1_test.dart b/generated/googleapis/test/workflows/v1_test.dart index dea94756c..dd25b6d43 100644 --- a/generated/googleapis/test/workflows/v1_test.dart +++ b/generated/googleapis/test/workflows/v1_test.dart @@ -598,6 +598,23 @@ void checkUnnamed14(core.Map o) { ); } +core.Map buildUnnamed15() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed15(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + core.int buildCounterWorkflow = 0; api.Workflow buildWorkflow() { final o = api.Workflow(); @@ -619,8 +636,9 @@ api.Workflow buildWorkflow() { o.sourceContents = 'foo'; o.state = 'foo'; o.stateError = buildStateError(); + o.tags = buildUnnamed14(); o.updateTime = 'foo'; - o.userEnvVars = buildUnnamed14(); + o.userEnvVars = buildUnnamed15(); } buildCounterWorkflow--; return o; @@ -681,11 +699,12 @@ void checkWorkflow(api.Workflow o) { unittest.equals('foo'), ); checkStateError(o.stateError!); + checkUnnamed14(o.tags!); unittest.expect( o.updateTime!, unittest.equals('foo'), ); - checkUnnamed14(o.userEnvVars!); + checkUnnamed15(o.userEnvVars!); } buildCounterWorkflow--; } diff --git a/generated/googleapis/test/workloadmanager/v1_test.dart b/generated/googleapis/test/workloadmanager/v1_test.dart index 4a3ee016c..005ff0f51 100644 --- a/generated/googleapis/test/workloadmanager/v1_test.dart +++ b/generated/googleapis/test/workloadmanager/v1_test.dart @@ -202,6 +202,7 @@ api.Evaluation buildEvaluation() { o.createTime = 'foo'; o.customRulesBucket = 'foo'; o.description = 'foo'; + o.evaluationType = 'foo'; o.labels = buildUnnamed1(); o.name = 'foo'; o.resourceFilter = buildResourceFilter(); @@ -231,6 +232,10 @@ void checkEvaluation(api.Evaluation o) { o.description!, unittest.equals('foo'), ); + unittest.expect( + o.evaluationType!, + unittest.equals('foo'), + ); checkUnnamed1(o.labels!); unittest.expect( o.name!, @@ -280,12 +285,23 @@ void checkUnnamed5(core.Map o) { ); } -core.List buildUnnamed6() => [ +core.List buildUnnamed6() => [ + buildNotice(), + buildNotice(), + ]; + +void checkUnnamed6(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkNotice(o[0]); + checkNotice(o[1]); +} + +core.List buildUnnamed7() => [ buildRuleExecutionResult(), buildRuleExecutionResult(), ]; -void checkUnnamed6(core.List o) { +void checkUnnamed7(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRuleExecutionResult(o[0]); checkRuleExecutionResult(o[1]); @@ -302,7 +318,9 @@ api.Execution buildExecution() { o.inventoryTime = 'foo'; o.labels = buildUnnamed5(); o.name = 'foo'; - o.ruleResults = buildUnnamed6(); + o.notices = buildUnnamed6(); + o.resultSummary = buildSummary(); + o.ruleResults = buildUnnamed7(); o.runType = 'foo'; o.startTime = 'foo'; o.state = 'foo'; @@ -332,7 +350,9 @@ void checkExecution(api.Execution o) { o.name!, unittest.equals('foo'), ); - checkUnnamed6(o.ruleResults!); + checkUnnamed6(o.notices!); + checkSummary(o.resultSummary!); + checkUnnamed7(o.ruleResults!); unittest.expect( o.runType!, unittest.equals('foo'), @@ -349,12 +369,12 @@ void checkExecution(api.Execution o) { buildCounterExecution--; } -core.List buildUnnamed7() => [ +core.List buildUnnamed8() => [ buildCommand(), buildCommand(), ]; -void checkUnnamed7(core.List o) { +void checkUnnamed8(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkCommand(o[0]); checkCommand(o[1]); @@ -365,7 +385,7 @@ api.ExecutionResult buildExecutionResult() { final o = api.ExecutionResult(); buildCounterExecutionResult++; if (buildCounterExecutionResult < 3) { - o.commands = buildUnnamed7(); + o.commands = buildUnnamed8(); o.documentationUrl = 'foo'; o.resource = buildResource(); o.rule = 'foo'; @@ -381,7 +401,7 @@ api.ExecutionResult buildExecutionResult() { void checkExecutionResult(api.ExecutionResult o) { buildCounterExecutionResult++; if (buildCounterExecutionResult < 3) { - checkUnnamed7(o.commands!); + checkUnnamed8(o.commands!); unittest.expect( o.documentationUrl!, unittest.equals('foo'), @@ -445,12 +465,12 @@ void checkExternalDataSources(api.ExternalDataSources o) { buildCounterExternalDataSources--; } -core.List buildUnnamed8() => [ +core.List buildUnnamed9() => [ 'foo', 'foo', ]; -void checkUnnamed8(core.List o) { +void checkUnnamed9(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -467,7 +487,7 @@ api.GceInstanceFilter buildGceInstanceFilter() { final o = api.GceInstanceFilter(); buildCounterGceInstanceFilter++; if (buildCounterGceInstanceFilter < 3) { - o.serviceAccounts = buildUnnamed8(); + o.serviceAccounts = buildUnnamed9(); } buildCounterGceInstanceFilter--; return o; @@ -476,7 +496,7 @@ api.GceInstanceFilter buildGceInstanceFilter() { void checkGceInstanceFilter(api.GceInstanceFilter o) { buildCounterGceInstanceFilter++; if (buildCounterGceInstanceFilter < 3) { - checkUnnamed8(o.serviceAccounts!); + checkUnnamed9(o.serviceAccounts!); } buildCounterGceInstanceFilter--; } @@ -491,6 +511,7 @@ api.Insight buildInsight() { o.sapValidation = buildSapValidation(); o.sentTime = 'foo'; o.sqlserverValidation = buildSqlserverValidation(); + o.torsoValidation = buildTorsoValidation(); } buildCounterInsight--; return o; @@ -510,27 +531,28 @@ void checkInsight(api.Insight o) { unittest.equals('foo'), ); checkSqlserverValidation(o.sqlserverValidation!); + checkTorsoValidation(o.torsoValidation!); } buildCounterInsight--; } -core.List buildUnnamed9() => [ +core.List buildUnnamed10() => [ buildEvaluation(), buildEvaluation(), ]; -void checkUnnamed9(core.List o) { +void checkUnnamed10(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkEvaluation(o[0]); checkEvaluation(o[1]); } -core.List buildUnnamed10() => [ +core.List buildUnnamed11() => [ 'foo', 'foo', ]; -void checkUnnamed10(core.List o) { +void checkUnnamed11(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -547,9 +569,9 @@ api.ListEvaluationsResponse buildListEvaluationsResponse() { final o = api.ListEvaluationsResponse(); buildCounterListEvaluationsResponse++; if (buildCounterListEvaluationsResponse < 3) { - o.evaluations = buildUnnamed9(); + o.evaluations = buildUnnamed10(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed10(); + o.unreachable = buildUnnamed11(); } buildCounterListEvaluationsResponse--; return o; @@ -558,22 +580,22 @@ api.ListEvaluationsResponse buildListEvaluationsResponse() { void checkListEvaluationsResponse(api.ListEvaluationsResponse o) { buildCounterListEvaluationsResponse++; if (buildCounterListEvaluationsResponse < 3) { - checkUnnamed9(o.evaluations!); + checkUnnamed10(o.evaluations!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed10(o.unreachable!); + checkUnnamed11(o.unreachable!); } buildCounterListEvaluationsResponse--; } -core.List buildUnnamed11() => [ +core.List buildUnnamed12() => [ buildExecutionResult(), buildExecutionResult(), ]; -void checkUnnamed11(core.List o) { +void checkUnnamed12(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkExecutionResult(o[0]); checkExecutionResult(o[1]); @@ -584,7 +606,7 @@ api.ListExecutionResultsResponse buildListExecutionResultsResponse() { final o = api.ListExecutionResultsResponse(); buildCounterListExecutionResultsResponse++; if (buildCounterListExecutionResultsResponse < 3) { - o.executionResults = buildUnnamed11(); + o.executionResults = buildUnnamed12(); o.nextPageToken = 'foo'; } buildCounterListExecutionResultsResponse--; @@ -594,7 +616,7 @@ api.ListExecutionResultsResponse buildListExecutionResultsResponse() { void checkListExecutionResultsResponse(api.ListExecutionResultsResponse o) { buildCounterListExecutionResultsResponse++; if (buildCounterListExecutionResultsResponse < 3) { - checkUnnamed11(o.executionResults!); + checkUnnamed12(o.executionResults!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -603,23 +625,23 @@ void checkListExecutionResultsResponse(api.ListExecutionResultsResponse o) { buildCounterListExecutionResultsResponse--; } -core.List buildUnnamed12() => [ +core.List buildUnnamed13() => [ buildExecution(), buildExecution(), ]; -void checkUnnamed12(core.List o) { +void checkUnnamed13(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkExecution(o[0]); checkExecution(o[1]); } -core.List buildUnnamed13() => [ +core.List buildUnnamed14() => [ 'foo', 'foo', ]; -void checkUnnamed13(core.List o) { +void checkUnnamed14(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -636,9 +658,9 @@ api.ListExecutionsResponse buildListExecutionsResponse() { final o = api.ListExecutionsResponse(); buildCounterListExecutionsResponse++; if (buildCounterListExecutionsResponse < 3) { - o.executions = buildUnnamed12(); + o.executions = buildUnnamed13(); o.nextPageToken = 'foo'; - o.unreachable = buildUnnamed13(); + o.unreachable = buildUnnamed14(); } buildCounterListExecutionsResponse--; return o; @@ -647,22 +669,22 @@ api.ListExecutionsResponse buildListExecutionsResponse() { void checkListExecutionsResponse(api.ListExecutionsResponse o) { buildCounterListExecutionsResponse++; if (buildCounterListExecutionsResponse < 3) { - checkUnnamed12(o.executions!); + checkUnnamed13(o.executions!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed13(o.unreachable!); + checkUnnamed14(o.unreachable!); } buildCounterListExecutionsResponse--; } -core.List buildUnnamed14() => [ +core.List buildUnnamed15() => [ buildLocation(), buildLocation(), ]; -void checkUnnamed14(core.List o) { +void checkUnnamed15(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkLocation(o[0]); checkLocation(o[1]); @@ -673,7 +695,7 @@ api.ListLocationsResponse buildListLocationsResponse() { final o = api.ListLocationsResponse(); buildCounterListLocationsResponse++; if (buildCounterListLocationsResponse < 3) { - o.locations = buildUnnamed14(); + o.locations = buildUnnamed15(); o.nextPageToken = 'foo'; } buildCounterListLocationsResponse--; @@ -683,7 +705,7 @@ api.ListLocationsResponse buildListLocationsResponse() { void checkListLocationsResponse(api.ListLocationsResponse o) { buildCounterListLocationsResponse++; if (buildCounterListLocationsResponse < 3) { - checkUnnamed14(o.locations!); + checkUnnamed15(o.locations!); unittest.expect( o.nextPageToken!, unittest.equals('foo'), @@ -692,12 +714,12 @@ void checkListLocationsResponse(api.ListLocationsResponse o) { buildCounterListLocationsResponse--; } -core.List buildUnnamed15() => [ +core.List buildUnnamed16() => [ buildOperation(), buildOperation(), ]; -void checkUnnamed15(core.List o) { +void checkUnnamed16(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperation(o[0]); checkOperation(o[1]); @@ -709,7 +731,7 @@ api.ListOperationsResponse buildListOperationsResponse() { buildCounterListOperationsResponse++; if (buildCounterListOperationsResponse < 3) { o.nextPageToken = 'foo'; - o.operations = buildUnnamed15(); + o.operations = buildUnnamed16(); } buildCounterListOperationsResponse--; return o; @@ -722,17 +744,17 @@ void checkListOperationsResponse(api.ListOperationsResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed15(o.operations!); + checkUnnamed16(o.operations!); } buildCounterListOperationsResponse--; } -core.List buildUnnamed16() => [ +core.List buildUnnamed17() => [ buildRule(), buildRule(), ]; -void checkUnnamed16(core.List o) { +void checkUnnamed17(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkRule(o[0]); checkRule(o[1]); @@ -744,7 +766,7 @@ api.ListRulesResponse buildListRulesResponse() { buildCounterListRulesResponse++; if (buildCounterListRulesResponse < 3) { o.nextPageToken = 'foo'; - o.rules = buildUnnamed16(); + o.rules = buildUnnamed17(); } buildCounterListRulesResponse--; return o; @@ -757,17 +779,17 @@ void checkListRulesResponse(api.ListRulesResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed16(o.rules!); + checkUnnamed17(o.rules!); } buildCounterListRulesResponse--; } -core.List buildUnnamed17() => [ +core.List buildUnnamed18() => [ buildScannedResource(), buildScannedResource(), ]; -void checkUnnamed17(core.List o) { +void checkUnnamed18(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkScannedResource(o[0]); checkScannedResource(o[1]); @@ -779,7 +801,7 @@ api.ListScannedResourcesResponse buildListScannedResourcesResponse() { buildCounterListScannedResourcesResponse++; if (buildCounterListScannedResourcesResponse < 3) { o.nextPageToken = 'foo'; - o.scannedResources = buildUnnamed17(); + o.scannedResources = buildUnnamed18(); } buildCounterListScannedResourcesResponse--; return o; @@ -792,17 +814,17 @@ void checkListScannedResourcesResponse(api.ListScannedResourcesResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed17(o.scannedResources!); + checkUnnamed18(o.scannedResources!); } buildCounterListScannedResourcesResponse--; } -core.Map buildUnnamed18() => { +core.Map buildUnnamed19() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed18(core.Map o) { +void checkUnnamed19(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -814,7 +836,7 @@ void checkUnnamed18(core.Map o) { ); } -core.Map buildUnnamed19() => { +core.Map buildUnnamed20() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -827,7 +849,7 @@ core.Map buildUnnamed19() => { }, }; -void checkUnnamed19(core.Map o) { +void checkUnnamed20(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted1 = (o['x']!) as core.Map; unittest.expect(casted1, unittest.hasLength(3)); @@ -865,9 +887,9 @@ api.Location buildLocation() { buildCounterLocation++; if (buildCounterLocation < 3) { o.displayName = 'foo'; - o.labels = buildUnnamed18(); + o.labels = buildUnnamed19(); o.locationId = 'foo'; - o.metadata = buildUnnamed19(); + o.metadata = buildUnnamed20(); o.name = 'foo'; } buildCounterLocation--; @@ -881,12 +903,12 @@ void checkLocation(api.Location o) { o.displayName!, unittest.equals('foo'), ); - checkUnnamed18(o.labels!); + checkUnnamed19(o.labels!); unittest.expect( o.locationId!, unittest.equals('foo'), ); - checkUnnamed19(o.metadata!); + checkUnnamed20(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), @@ -895,7 +917,29 @@ void checkLocation(api.Location o) { buildCounterLocation--; } -core.Map buildUnnamed20() => { +core.int buildCounterNotice = 0; +api.Notice buildNotice() { + final o = api.Notice(); + buildCounterNotice++; + if (buildCounterNotice < 3) { + o.message = 'foo'; + } + buildCounterNotice--; + return o; +} + +void checkNotice(api.Notice o) { + buildCounterNotice++; + if (buildCounterNotice < 3) { + unittest.expect( + o.message!, + unittest.equals('foo'), + ); + } + buildCounterNotice--; +} + +core.Map buildUnnamed21() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -908,7 +952,7 @@ core.Map buildUnnamed20() => { }, }; -void checkUnnamed20(core.Map o) { +void checkUnnamed21(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted3 = (o['x']!) as core.Map; unittest.expect(casted3, unittest.hasLength(3)); @@ -940,7 +984,7 @@ void checkUnnamed20(core.Map o) { ); } -core.Map buildUnnamed21() => { +core.Map buildUnnamed22() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -953,7 +997,7 @@ core.Map buildUnnamed21() => { }, }; -void checkUnnamed21(core.Map o) { +void checkUnnamed22(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted5 = (o['x']!) as core.Map; unittest.expect(casted5, unittest.hasLength(3)); @@ -992,9 +1036,9 @@ api.Operation buildOperation() { if (buildCounterOperation < 3) { o.done = true; o.error = buildStatus(); - o.metadata = buildUnnamed20(); + o.metadata = buildUnnamed21(); o.name = 'foo'; - o.response = buildUnnamed21(); + o.response = buildUnnamed22(); } buildCounterOperation--; return o; @@ -1005,12 +1049,12 @@ void checkOperation(api.Operation o) { if (buildCounterOperation < 3) { unittest.expect(o.done!, unittest.isTrue); checkStatus(o.error!); - checkUnnamed20(o.metadata!); + checkUnnamed21(o.metadata!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed21(o.response!); + checkUnnamed22(o.response!); } buildCounterOperation--; } @@ -1047,12 +1091,12 @@ void checkResource(api.Resource o) { buildCounterResource--; } -core.Map buildUnnamed22() => { +core.Map buildUnnamed23() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed22(core.Map o) { +void checkUnnamed23(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1064,12 +1108,12 @@ void checkUnnamed22(core.Map o) { ); } -core.List buildUnnamed23() => [ +core.List buildUnnamed24() => [ 'foo', 'foo', ]; -void checkUnnamed23(core.List o) { +void checkUnnamed24(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1081,12 +1125,12 @@ void checkUnnamed23(core.List o) { ); } -core.List buildUnnamed24() => [ +core.List buildUnnamed25() => [ 'foo', 'foo', ]; -void checkUnnamed24(core.List o) { +void checkUnnamed25(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1104,9 +1148,9 @@ api.ResourceFilter buildResourceFilter() { buildCounterResourceFilter++; if (buildCounterResourceFilter < 3) { o.gceInstanceFilter = buildGceInstanceFilter(); - o.inclusionLabels = buildUnnamed22(); - o.resourceIdPatterns = buildUnnamed23(); - o.scopes = buildUnnamed24(); + o.inclusionLabels = buildUnnamed23(); + o.resourceIdPatterns = buildUnnamed24(); + o.scopes = buildUnnamed25(); } buildCounterResourceFilter--; return o; @@ -1116,19 +1160,19 @@ void checkResourceFilter(api.ResourceFilter o) { buildCounterResourceFilter++; if (buildCounterResourceFilter < 3) { checkGceInstanceFilter(o.gceInstanceFilter!); - checkUnnamed22(o.inclusionLabels!); - checkUnnamed23(o.resourceIdPatterns!); - checkUnnamed24(o.scopes!); + checkUnnamed23(o.inclusionLabels!); + checkUnnamed24(o.resourceIdPatterns!); + checkUnnamed25(o.scopes!); } buildCounterResourceFilter--; } -core.List buildUnnamed25() => [ +core.List buildUnnamed26() => [ 'foo', 'foo', ]; -void checkUnnamed25(core.List o) { +void checkUnnamed26(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1145,7 +1189,7 @@ api.ResourceStatus buildResourceStatus() { final o = api.ResourceStatus(); buildCounterResourceStatus++; if (buildCounterResourceStatus < 3) { - o.rulesNewerVersions = buildUnnamed25(); + o.rulesNewerVersions = buildUnnamed26(); o.state = 'foo'; } buildCounterResourceStatus--; @@ -1155,7 +1199,7 @@ api.ResourceStatus buildResourceStatus() { void checkResourceStatus(api.ResourceStatus o) { buildCounterResourceStatus++; if (buildCounterResourceStatus < 3) { - checkUnnamed25(o.rulesNewerVersions!); + checkUnnamed26(o.rulesNewerVersions!); unittest.expect( o.state!, unittest.equals('foo'), @@ -1164,12 +1208,12 @@ void checkResourceStatus(api.ResourceStatus o) { buildCounterResourceStatus--; } -core.List buildUnnamed26() => [ +core.List buildUnnamed27() => [ 'foo', 'foo', ]; -void checkUnnamed26(core.List o) { +void checkUnnamed27(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1195,7 +1239,7 @@ api.Rule buildRule() { o.revisionId = 'foo'; o.secondaryCategory = 'foo'; o.severity = 'foo'; - o.tags = buildUnnamed26(); + o.tags = buildUnnamed27(); o.uri = 'foo'; } buildCounterRule--; @@ -1241,7 +1285,7 @@ void checkRule(api.Rule o) { o.severity!, unittest.equals('foo'), ); - checkUnnamed26(o.tags!); + checkUnnamed27(o.tags!); unittest.expect( o.uri!, unittest.equals('foo'), @@ -1361,12 +1405,12 @@ void checkSapDiscovery(api.SapDiscovery o) { buildCounterSapDiscovery--; } -core.List buildUnnamed27() => [ +core.List buildUnnamed28() => [ 'foo', 'foo', ]; -void checkUnnamed27(core.List o) { +void checkUnnamed28(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1378,23 +1422,23 @@ void checkUnnamed27(core.List o) { ); } -core.List buildUnnamed28() => [ +core.List buildUnnamed29() => [ buildSapDiscoveryComponent(), buildSapDiscoveryComponent(), ]; -void checkUnnamed28(core.List o) { +void checkUnnamed29(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSapDiscoveryComponent(o[0]); checkSapDiscoveryComponent(o[1]); } -core.List buildUnnamed29() => [ +core.List buildUnnamed30() => [ buildSapDiscoveryResource(), buildSapDiscoveryResource(), ]; -void checkUnnamed29(core.List o) { +void checkUnnamed30(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSapDiscoveryResource(o[0]); checkSapDiscoveryResource(o[1]); @@ -1407,10 +1451,10 @@ api.SapDiscoveryComponent buildSapDiscoveryComponent() { if (buildCounterSapDiscoveryComponent < 3) { o.applicationProperties = buildSapDiscoveryComponentApplicationProperties(); o.databaseProperties = buildSapDiscoveryComponentDatabaseProperties(); - o.haHosts = buildUnnamed27(); + o.haHosts = buildUnnamed28(); o.hostProject = 'foo'; - o.replicationSites = buildUnnamed28(); - o.resources = buildUnnamed29(); + o.replicationSites = buildUnnamed29(); + o.resources = buildUnnamed30(); o.sid = 'foo'; o.topologyType = 'foo'; } @@ -1423,13 +1467,13 @@ void checkSapDiscoveryComponent(api.SapDiscoveryComponent o) { if (buildCounterSapDiscoveryComponent < 3) { checkSapDiscoveryComponentApplicationProperties(o.applicationProperties!); checkSapDiscoveryComponentDatabaseProperties(o.databaseProperties!); - checkUnnamed27(o.haHosts!); + checkUnnamed28(o.haHosts!); unittest.expect( o.hostProject!, unittest.equals('foo'), ); - checkUnnamed28(o.replicationSites!); - checkUnnamed29(o.resources!); + checkUnnamed29(o.replicationSites!); + checkUnnamed30(o.resources!); unittest.expect( o.sid!, unittest.equals('foo'), @@ -1584,12 +1628,12 @@ void checkSapDiscoveryMetadata(api.SapDiscoveryMetadata o) { buildCounterSapDiscoveryMetadata--; } -core.List buildUnnamed30() => [ +core.List buildUnnamed31() => [ 'foo', 'foo', ]; -void checkUnnamed30(core.List o) { +void checkUnnamed31(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1607,7 +1651,7 @@ api.SapDiscoveryResource buildSapDiscoveryResource() { buildCounterSapDiscoveryResource++; if (buildCounterSapDiscoveryResource < 3) { o.instanceProperties = buildSapDiscoveryResourceInstanceProperties(); - o.relatedResources = buildUnnamed30(); + o.relatedResources = buildUnnamed31(); o.resourceKind = 'foo'; o.resourceType = 'foo'; o.resourceUri = 'foo'; @@ -1621,7 +1665,7 @@ void checkSapDiscoveryResource(api.SapDiscoveryResource o) { buildCounterSapDiscoveryResource++; if (buildCounterSapDiscoveryResource < 3) { checkSapDiscoveryResourceInstanceProperties(o.instanceProperties!); - checkUnnamed30(o.relatedResources!); + checkUnnamed31(o.relatedResources!); unittest.expect( o.resourceKind!, unittest.equals('foo'), @@ -1643,24 +1687,24 @@ void checkSapDiscoveryResource(api.SapDiscoveryResource o) { } core.List - buildUnnamed31() => [ + buildUnnamed32() => [ buildSapDiscoveryResourceInstancePropertiesAppInstance(), buildSapDiscoveryResourceInstancePropertiesAppInstance(), ]; -void checkUnnamed31( +void checkUnnamed32( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSapDiscoveryResourceInstancePropertiesAppInstance(o[0]); checkSapDiscoveryResourceInstancePropertiesAppInstance(o[1]); } -core.List buildUnnamed32() => [ +core.List buildUnnamed33() => [ 'foo', 'foo', ]; -void checkUnnamed32(core.List o) { +void checkUnnamed33(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1678,8 +1722,8 @@ api.SapDiscoveryResourceInstanceProperties final o = api.SapDiscoveryResourceInstanceProperties(); buildCounterSapDiscoveryResourceInstanceProperties++; if (buildCounterSapDiscoveryResourceInstanceProperties < 3) { - o.appInstances = buildUnnamed31(); - o.clusterInstances = buildUnnamed32(); + o.appInstances = buildUnnamed32(); + o.clusterInstances = buildUnnamed33(); o.instanceNumber = 'foo'; o.instanceRole = 'foo'; o.isDrSite = true; @@ -1693,8 +1737,8 @@ void checkSapDiscoveryResourceInstanceProperties( api.SapDiscoveryResourceInstanceProperties o) { buildCounterSapDiscoveryResourceInstanceProperties++; if (buildCounterSapDiscoveryResourceInstanceProperties < 3) { - checkUnnamed31(o.appInstances!); - checkUnnamed32(o.clusterInstances!); + checkUnnamed32(o.appInstances!); + checkUnnamed33(o.clusterInstances!); unittest.expect( o.instanceNumber!, unittest.equals('foo'), @@ -1741,13 +1785,13 @@ void checkSapDiscoveryResourceInstancePropertiesAppInstance( buildCounterSapDiscoveryResourceInstancePropertiesAppInstance--; } -core.List buildUnnamed33() => +core.List buildUnnamed34() => [ buildSapDiscoveryWorkloadPropertiesProductVersion(), buildSapDiscoveryWorkloadPropertiesProductVersion(), ]; -void checkUnnamed33( +void checkUnnamed34( core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSapDiscoveryWorkloadPropertiesProductVersion(o[0]); @@ -1755,12 +1799,12 @@ void checkUnnamed33( } core.List - buildUnnamed34() => [ + buildUnnamed35() => [ buildSapDiscoveryWorkloadPropertiesSoftwareComponentProperties(), buildSapDiscoveryWorkloadPropertiesSoftwareComponentProperties(), ]; -void checkUnnamed34( +void checkUnnamed35( core.List o) { unittest.expect(o, unittest.hasLength(2)); @@ -1773,8 +1817,8 @@ api.SapDiscoveryWorkloadProperties buildSapDiscoveryWorkloadProperties() { final o = api.SapDiscoveryWorkloadProperties(); buildCounterSapDiscoveryWorkloadProperties++; if (buildCounterSapDiscoveryWorkloadProperties < 3) { - o.productVersions = buildUnnamed33(); - o.softwareComponentVersions = buildUnnamed34(); + o.productVersions = buildUnnamed34(); + o.softwareComponentVersions = buildUnnamed35(); } buildCounterSapDiscoveryWorkloadProperties--; return o; @@ -1783,8 +1827,8 @@ api.SapDiscoveryWorkloadProperties buildSapDiscoveryWorkloadProperties() { void checkSapDiscoveryWorkloadProperties(api.SapDiscoveryWorkloadProperties o) { buildCounterSapDiscoveryWorkloadProperties++; if (buildCounterSapDiscoveryWorkloadProperties < 3) { - checkUnnamed33(o.productVersions!); - checkUnnamed34(o.softwareComponentVersions!); + checkUnnamed34(o.productVersions!); + checkUnnamed35(o.softwareComponentVersions!); } buildCounterSapDiscoveryWorkloadProperties--; } @@ -1860,12 +1904,12 @@ void checkSapDiscoveryWorkloadPropertiesSoftwareComponentProperties( buildCounterSapDiscoveryWorkloadPropertiesSoftwareComponentProperties--; } -core.List buildUnnamed35() => [ +core.List buildUnnamed36() => [ buildSapValidationValidationDetail(), buildSapValidationValidationDetail(), ]; -void checkUnnamed35(core.List o) { +void checkUnnamed36(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSapValidationValidationDetail(o[0]); checkSapValidationValidationDetail(o[1]); @@ -1877,7 +1921,7 @@ api.SapValidation buildSapValidation() { buildCounterSapValidation++; if (buildCounterSapValidation < 3) { o.projectId = 'foo'; - o.validationDetails = buildUnnamed35(); + o.validationDetails = buildUnnamed36(); o.zone = 'foo'; } buildCounterSapValidation--; @@ -1891,7 +1935,7 @@ void checkSapValidation(api.SapValidation o) { o.projectId!, unittest.equals('foo'), ); - checkUnnamed35(o.validationDetails!); + checkUnnamed36(o.validationDetails!); unittest.expect( o.zone!, unittest.equals('foo'), @@ -1900,12 +1944,12 @@ void checkSapValidation(api.SapValidation o) { buildCounterSapValidation--; } -core.Map buildUnnamed36() => { +core.Map buildUnnamed37() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed36(core.Map o) { +void checkUnnamed37(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -1922,7 +1966,7 @@ api.SapValidationValidationDetail buildSapValidationValidationDetail() { final o = api.SapValidationValidationDetail(); buildCounterSapValidationValidationDetail++; if (buildCounterSapValidationValidationDetail < 3) { - o.details = buildUnnamed36(); + o.details = buildUnnamed37(); o.isPresent = true; o.sapValidationType = 'foo'; } @@ -1933,7 +1977,7 @@ api.SapValidationValidationDetail buildSapValidationValidationDetail() { void checkSapValidationValidationDetail(api.SapValidationValidationDetail o) { buildCounterSapValidationValidationDetail++; if (buildCounterSapValidationValidationDetail < 3) { - checkUnnamed36(o.details!); + checkUnnamed37(o.details!); unittest.expect(o.isPresent!, unittest.isTrue); unittest.expect( o.sapValidationType!, @@ -2002,12 +2046,12 @@ void checkShellCommand(api.ShellCommand o) { buildCounterShellCommand--; } -core.List buildUnnamed37() => [ +core.List buildUnnamed38() => [ buildSqlserverValidationValidationDetail(), buildSqlserverValidationValidationDetail(), ]; -void checkUnnamed37(core.List o) { +void checkUnnamed38(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSqlserverValidationValidationDetail(o[0]); checkSqlserverValidationValidationDetail(o[1]); @@ -2021,7 +2065,7 @@ api.SqlserverValidation buildSqlserverValidation() { o.agentVersion = 'foo'; o.instance = 'foo'; o.projectId = 'foo'; - o.validationDetails = buildUnnamed37(); + o.validationDetails = buildUnnamed38(); } buildCounterSqlserverValidation--; return o; @@ -2042,17 +2086,17 @@ void checkSqlserverValidation(api.SqlserverValidation o) { o.projectId!, unittest.equals('foo'), ); - checkUnnamed37(o.validationDetails!); + checkUnnamed38(o.validationDetails!); } buildCounterSqlserverValidation--; } -core.Map buildUnnamed38() => { +core.Map buildUnnamed39() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed38(core.Map o) { +void checkUnnamed39(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2069,7 +2113,7 @@ api.SqlserverValidationDetails buildSqlserverValidationDetails() { final o = api.SqlserverValidationDetails(); buildCounterSqlserverValidationDetails++; if (buildCounterSqlserverValidationDetails < 3) { - o.fields = buildUnnamed38(); + o.fields = buildUnnamed39(); } buildCounterSqlserverValidationDetails--; return o; @@ -2078,17 +2122,17 @@ api.SqlserverValidationDetails buildSqlserverValidationDetails() { void checkSqlserverValidationDetails(api.SqlserverValidationDetails o) { buildCounterSqlserverValidationDetails++; if (buildCounterSqlserverValidationDetails < 3) { - checkUnnamed38(o.fields!); + checkUnnamed39(o.fields!); } buildCounterSqlserverValidationDetails--; } -core.List buildUnnamed39() => [ +core.List buildUnnamed40() => [ buildSqlserverValidationDetails(), buildSqlserverValidationDetails(), ]; -void checkUnnamed39(core.List o) { +void checkUnnamed40(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSqlserverValidationDetails(o[0]); checkSqlserverValidationDetails(o[1]); @@ -2100,7 +2144,7 @@ api.SqlserverValidationValidationDetail final o = api.SqlserverValidationValidationDetail(); buildCounterSqlserverValidationValidationDetail++; if (buildCounterSqlserverValidationValidationDetail < 3) { - o.details = buildUnnamed39(); + o.details = buildUnnamed40(); o.type = 'foo'; } buildCounterSqlserverValidationValidationDetail--; @@ -2111,7 +2155,7 @@ void checkSqlserverValidationValidationDetail( api.SqlserverValidationValidationDetail o) { buildCounterSqlserverValidationValidationDetail++; if (buildCounterSqlserverValidationValidationDetail < 3) { - checkUnnamed39(o.details!); + checkUnnamed40(o.details!); unittest.expect( o.type!, unittest.equals('foo'), @@ -2120,7 +2164,7 @@ void checkSqlserverValidationValidationDetail( buildCounterSqlserverValidationValidationDetail--; } -core.Map buildUnnamed40() => { +core.Map buildUnnamed41() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -2133,7 +2177,7 @@ core.Map buildUnnamed40() => { }, }; -void checkUnnamed40(core.Map o) { +void checkUnnamed41(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted7 = (o['x']!) as core.Map; unittest.expect(casted7, unittest.hasLength(3)); @@ -2165,15 +2209,15 @@ void checkUnnamed40(core.Map o) { ); } -core.List> buildUnnamed41() => [ - buildUnnamed40(), - buildUnnamed40(), +core.List> buildUnnamed42() => [ + buildUnnamed41(), + buildUnnamed41(), ]; -void checkUnnamed41(core.List> o) { +void checkUnnamed42(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed40(o[0]); - checkUnnamed40(o[1]); + checkUnnamed41(o[0]); + checkUnnamed41(o[1]); } core.int buildCounterStatus = 0; @@ -2182,7 +2226,7 @@ api.Status buildStatus() { buildCounterStatus++; if (buildCounterStatus < 3) { o.code = 42; - o.details = buildUnnamed41(); + o.details = buildUnnamed42(); o.message = 'foo'; } buildCounterStatus--; @@ -2196,7 +2240,7 @@ void checkStatus(api.Status o) { o.code!, unittest.equals(42), ); - checkUnnamed41(o.details!); + checkUnnamed42(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -2205,12 +2249,100 @@ void checkStatus(api.Status o) { buildCounterStatus--; } -core.Map buildUnnamed42() => { +core.int buildCounterSummary = 0; +api.Summary buildSummary() { + final o = api.Summary(); + buildCounterSummary++; + if (buildCounterSummary < 3) { + o.failures = 'foo'; + o.newFailures = 'foo'; + o.newFixes = 'foo'; + } + buildCounterSummary--; + return o; +} + +void checkSummary(api.Summary o) { + buildCounterSummary++; + if (buildCounterSummary < 3) { + unittest.expect( + o.failures!, + unittest.equals('foo'), + ); + unittest.expect( + o.newFailures!, + unittest.equals('foo'), + ); + unittest.expect( + o.newFixes!, + unittest.equals('foo'), + ); + } + buildCounterSummary--; +} + +core.Map buildUnnamed43() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed42(core.Map o) { +void checkUnnamed43(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.int buildCounterTorsoValidation = 0; +api.TorsoValidation buildTorsoValidation() { + final o = api.TorsoValidation(); + buildCounterTorsoValidation++; + if (buildCounterTorsoValidation < 3) { + o.agentVersion = 'foo'; + o.instanceName = 'foo'; + o.projectId = 'foo'; + o.validationDetails = buildUnnamed43(); + o.workloadType = 'foo'; + } + buildCounterTorsoValidation--; + return o; +} + +void checkTorsoValidation(api.TorsoValidation o) { + buildCounterTorsoValidation++; + if (buildCounterTorsoValidation < 3) { + unittest.expect( + o.agentVersion!, + unittest.equals('foo'), + ); + unittest.expect( + o.instanceName!, + unittest.equals('foo'), + ); + unittest.expect( + o.projectId!, + unittest.equals('foo'), + ); + checkUnnamed43(o.validationDetails!); + unittest.expect( + o.workloadType!, + unittest.equals('foo'), + ); + } + buildCounterTorsoValidation--; +} + +core.Map buildUnnamed44() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed44(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -2228,7 +2360,7 @@ api.ViolationDetails buildViolationDetails() { buildCounterViolationDetails++; if (buildCounterViolationDetails < 3) { o.asset = 'foo'; - o.observed = buildUnnamed42(); + o.observed = buildUnnamed44(); o.serviceAccount = 'foo'; } buildCounterViolationDetails--; @@ -2242,7 +2374,7 @@ void checkViolationDetails(api.ViolationDetails o) { o.asset!, unittest.equals('foo'), ); - checkUnnamed42(o.observed!); + checkUnnamed44(o.observed!); unittest.expect( o.serviceAccount!, unittest.equals('foo'), @@ -2486,6 +2618,16 @@ void main() { }); }); + unittest.group('obj-schema-Notice', () { + unittest.test('to-json--from-json', () async { + final o = buildNotice(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Notice.fromJson(oJson as core.Map); + checkNotice(od); + }); + }); + unittest.group('obj-schema-Operation', () { unittest.test('to-json--from-json', () async { final o = buildOperation(); @@ -2750,6 +2892,26 @@ void main() { }); }); + unittest.group('obj-schema-Summary', () { + unittest.test('to-json--from-json', () async { + final o = buildSummary(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.Summary.fromJson(oJson as core.Map); + checkSummary(od); + }); + }); + + unittest.group('obj-schema-TorsoValidation', () { + unittest.test('to-json--from-json', () async { + final o = buildTorsoValidation(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.TorsoValidation.fromJson( + oJson as core.Map); + checkTorsoValidation(od); + }); + }); + unittest.group('obj-schema-ViolationDetails', () { unittest.test('to-json--from-json', () async { final o = buildViolationDetails(); diff --git a/generated/googleapis/test/youtube/v3_test.dart b/generated/googleapis/test/youtube/v3_test.dart index 6fd0f935a..4c1c93a26 100644 --- a/generated/googleapis/test/youtube/v3_test.dart +++ b/generated/googleapis/test/youtube/v3_test.dart @@ -1804,6 +1804,8 @@ api.ChannelToStoreLinkDetails buildChannelToStoreLinkDetails() { buildCounterChannelToStoreLinkDetails++; if (buildCounterChannelToStoreLinkDetails < 3) { o.billingDetails = buildChannelToStoreLinkDetailsBillingDetails(); + o.merchantAffiliateProgramDetails = + buildChannelToStoreLinkDetailsMerchantAffiliateProgramDetails(); o.merchantId = 'foo'; o.storeName = 'foo'; o.storeUrl = 'foo'; @@ -1816,6 +1818,8 @@ void checkChannelToStoreLinkDetails(api.ChannelToStoreLinkDetails o) { buildCounterChannelToStoreLinkDetails++; if (buildCounterChannelToStoreLinkDetails < 3) { checkChannelToStoreLinkDetailsBillingDetails(o.billingDetails!); + checkChannelToStoreLinkDetailsMerchantAffiliateProgramDetails( + o.merchantAffiliateProgramDetails!); unittest.expect( o.merchantId!, unittest.equals('foo'), @@ -1856,6 +1860,33 @@ void checkChannelToStoreLinkDetailsBillingDetails( buildCounterChannelToStoreLinkDetailsBillingDetails--; } +core.int buildCounterChannelToStoreLinkDetailsMerchantAffiliateProgramDetails = + 0; +api.ChannelToStoreLinkDetailsMerchantAffiliateProgramDetails + buildChannelToStoreLinkDetailsMerchantAffiliateProgramDetails() { + final o = api.ChannelToStoreLinkDetailsMerchantAffiliateProgramDetails(); + buildCounterChannelToStoreLinkDetailsMerchantAffiliateProgramDetails++; + if (buildCounterChannelToStoreLinkDetailsMerchantAffiliateProgramDetails < + 3) { + o.status = 'foo'; + } + buildCounterChannelToStoreLinkDetailsMerchantAffiliateProgramDetails--; + return o; +} + +void checkChannelToStoreLinkDetailsMerchantAffiliateProgramDetails( + api.ChannelToStoreLinkDetailsMerchantAffiliateProgramDetails o) { + buildCounterChannelToStoreLinkDetailsMerchantAffiliateProgramDetails++; + if (buildCounterChannelToStoreLinkDetailsMerchantAffiliateProgramDetails < + 3) { + unittest.expect( + o.status!, + unittest.equals('foo'), + ); + } + buildCounterChannelToStoreLinkDetailsMerchantAffiliateProgramDetails--; +} + core.List buildUnnamed19() => [ 'foo', 'foo', @@ -8035,6 +8066,7 @@ api.VideoStatus buildVideoStatus() { final o = api.VideoStatus(); buildCounterVideoStatus++; if (buildCounterVideoStatus < 3) { + o.containsSyntheticMedia = true; o.embeddable = true; o.failureReason = 'foo'; o.license = 'foo'; @@ -8053,6 +8085,7 @@ api.VideoStatus buildVideoStatus() { void checkVideoStatus(api.VideoStatus o) { buildCounterVideoStatus++; if (buildCounterVideoStatus < 3) { + unittest.expect(o.containsSyntheticMedia!, unittest.isTrue); unittest.expect(o.embeddable!, unittest.isTrue); unittest.expect( o.failureReason!, @@ -9579,23 +9612,6 @@ void checkUnnamed141(core.List o) { ); } -core.List buildUnnamed142() => [ - 'foo', - 'foo', - ]; - -void checkUnnamed142(core.List o) { - unittest.expect(o, unittest.hasLength(2)); - unittest.expect( - o[0], - unittest.equals('foo'), - ); - unittest.expect( - o[1], - unittest.equals('foo'), - ); -} - void main() { unittest.group('obj-schema-AbuseReport', () { unittest.test('to-json--from-json', () async { @@ -10057,6 +10073,19 @@ void main() { }); }); + unittest.group( + 'obj-schema-ChannelToStoreLinkDetailsMerchantAffiliateProgramDetails', + () { + unittest.test('to-json--from-json', () async { + final o = buildChannelToStoreLinkDetailsMerchantAffiliateProgramDetails(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.ChannelToStoreLinkDetailsMerchantAffiliateProgramDetails.fromJson( + oJson as core.Map); + checkChannelToStoreLinkDetailsMerchantAffiliateProgramDetails(od); + }); + }); + unittest.group('obj-schema-ChannelTopicDetails', () { unittest.test('to-json--from-json', () async { final o = buildChannelTopicDetails(); @@ -17696,94 +17725,4 @@ void main() { checkCommentThread(response as api.CommentThread); }); }); - - unittest.group('resource-YoutubeV3LiveChatMessagesResource', () { - unittest.test('method--stream', () async { - final mock = HttpServerMock(); - final res = api.YouTubeApi(mock).youtube.v3.liveChat.messages; - final arg_hl = 'foo'; - final arg_liveChatId = 'foo'; - final arg_maxResults = 42; - final arg_pageToken = 'foo'; - final arg_part = buildUnnamed142(); - final arg_profileImageSize = 42; - final arg_$fields = 'foo'; - mock.register(unittest.expectAsync2((http.BaseRequest req, json) { - final path = req.url.path; - var pathOffset = 0; - core.int index; - core.String subPart; - unittest.expect( - path.substring(pathOffset, pathOffset + 1), - unittest.equals('/'), - ); - pathOffset += 1; - unittest.expect( - path.substring(pathOffset, pathOffset + 35), - unittest.equals('youtube/v3/liveChat/messages/stream'), - ); - pathOffset += 35; - - final query = req.url.query; - var queryOffset = 0; - final queryMap = >{}; - void addQueryParam(core.String n, core.String v) => - queryMap.putIfAbsent(n, () => []).add(v); - - if (query.isNotEmpty) { - for (var part in query.split('&')) { - final keyValue = part.split('='); - addQueryParam( - core.Uri.decodeQueryComponent(keyValue[0]), - core.Uri.decodeQueryComponent(keyValue[1]), - ); - } - } - unittest.expect( - queryMap['hl']!.first, - unittest.equals(arg_hl), - ); - unittest.expect( - queryMap['liveChatId']!.first, - unittest.equals(arg_liveChatId), - ); - unittest.expect( - core.int.parse(queryMap['maxResults']!.first), - unittest.equals(arg_maxResults), - ); - unittest.expect( - queryMap['pageToken']!.first, - unittest.equals(arg_pageToken), - ); - unittest.expect( - queryMap['part']!, - unittest.equals(arg_part), - ); - unittest.expect( - core.int.parse(queryMap['profileImageSize']!.first), - unittest.equals(arg_profileImageSize), - ); - unittest.expect( - queryMap['fields']!.first, - unittest.equals(arg_$fields), - ); - - final h = { - 'content-type': 'application/json; charset=utf-8', - }; - final resp = convert.json.encode(buildLiveChatMessageListResponse()); - return async.Future.value(stringResponse(200, h, resp)); - }), true); - final response = await res.stream( - hl: arg_hl, - liveChatId: arg_liveChatId, - maxResults: arg_maxResults, - pageToken: arg_pageToken, - part: arg_part, - profileImageSize: arg_profileImageSize, - $fields: arg_$fields); - checkLiveChatMessageListResponse( - response as api.LiveChatMessageListResponse); - }); - }); } diff --git a/generated/googleapis_beta/README.md b/generated/googleapis_beta/README.md index bc3257562..bf65c937e 100644 --- a/generated/googleapis_beta/README.md +++ b/generated/googleapis_beta/README.md @@ -119,7 +119,7 @@ Firebase App Check works alongside other Firebase services to help protect your - [Original documentation](https://firebase.google.com/docs/app-check) - [Dart package details](https://pub.dev/documentation/googleapis_beta/9.1.0-wip/firebaseappcheck_v1beta/firebaseappcheck_v1beta-library.html) -#### Firebase Realtime Database API - `firebasedatabase/v1beta` +#### Firebase Realtime Database Management API - `firebasedatabase/v1beta` The Firebase Realtime Database API enables programmatic provisioning and management of Realtime Database instances. diff --git a/generated/googleapis_beta/lib/analyticsdata/v1beta.dart b/generated/googleapis_beta/lib/analyticsdata/v1beta.dart index 26c86f33e..2b00eede5 100644 --- a/generated/googleapis_beta/lib/analyticsdata/v1beta.dart +++ b/generated/googleapis_beta/lib/analyticsdata/v1beta.dart @@ -1720,11 +1720,17 @@ typedef DimensionOrderBy = $DimensionOrderBy; /// The value of a dimension. typedef DimensionValue = $DimensionValue; +/// Filter for empty values. +typedef EmptyFilter = $Empty; + /// An expression to filter dimension or metric values. class Filter { /// A filter for two values. BetweenFilter? betweenFilter; + /// A filter for empty values such as "(not set)" and "" values. + EmptyFilter? emptyFilter; + /// The dimension name or metric name. /// /// In most methods, dimensions & metrics can be used for the first time in @@ -1744,6 +1750,7 @@ class Filter { Filter({ this.betweenFilter, + this.emptyFilter, this.fieldName, this.inListFilter, this.numericFilter, @@ -1756,6 +1763,10 @@ class Filter { ? BetweenFilter.fromJson( json_['betweenFilter'] as core.Map) : null, + emptyFilter: json_.containsKey('emptyFilter') + ? EmptyFilter.fromJson( + json_['emptyFilter'] as core.Map) + : null, fieldName: json_['fieldName'] as core.String?, inListFilter: json_.containsKey('inListFilter') ? InListFilter.fromJson( @@ -1773,6 +1784,7 @@ class Filter { core.Map toJson() => { if (betweenFilter != null) 'betweenFilter': betweenFilter!, + if (emptyFilter != null) 'emptyFilter': emptyFilter!, if (fieldName != null) 'fieldName': fieldName!, if (inListFilter != null) 'inListFilter': inListFilter!, if (numericFilter != null) 'numericFilter': numericFilter!, diff --git a/generated/googleapis_beta/lib/cloudsupport/v2beta.dart b/generated/googleapis_beta/lib/cloudsupport/v2beta.dart index d71174e71..ab20b2c3b 100644 --- a/generated/googleapis_beta/lib/cloudsupport/v2beta.dart +++ b/generated/googleapis_beta/lib/cloudsupport/v2beta.dart @@ -686,7 +686,9 @@ class CasesAttachmentsResource { /// /// [pageSize] - The maximum number of attachments fetched with each request. /// If not provided, the default is 10. The maximum page size that will be - /// returned is 100. + /// returned is 100. The size of each page can be smaller than the requested + /// page size and can include zero. For example, you could request 100 + /// attachments on one page, receive 0, and then on the next page, receive 90. /// /// [pageToken] - A token identifying the page of results to return. If /// unspecified, the first page is retrieved. diff --git a/generated/googleapis_beta/lib/dataflow/v1b3.dart b/generated/googleapis_beta/lib/dataflow/v1b3.dart index 8b19cc77c..749fde943 100644 --- a/generated/googleapis_beta/lib/dataflow/v1b3.dart +++ b/generated/googleapis_beta/lib/dataflow/v1b3.dart @@ -4001,6 +4001,31 @@ class DataSamplingReport { }; } +/// The gauge value of a metric. +class DataflowGaugeValue { + /// The timestamp when the gauge was recorded. + core.String? measuredTime; + + /// The value of the gauge. + core.String? value; + + DataflowGaugeValue({ + this.measuredTime, + this.value, + }); + + DataflowGaugeValue.fromJson(core.Map json_) + : this( + measuredTime: json_['measuredTime'] as core.String?, + value: json_['value'] as core.String?, + ); + + core.Map toJson() => { + if (measuredTime != null) 'measuredTime': measuredTime!, + if (value != null) 'value': value!, + }; +} + /// Summary statistics for a population of values. /// /// HistogramValue contains a sequence of buckets and gives a count of values @@ -5225,6 +5250,60 @@ class FloatingPointMean { }; } +/// Information about the GPU usage on the worker. +class GPUUsage { + /// Timestamp of the measurement. + /// + /// Required. + core.String? timestamp; + + /// Utilization info about the GPU. + /// + /// Required. + GPUUtilization? utilization; + + GPUUsage({ + this.timestamp, + this.utilization, + }); + + GPUUsage.fromJson(core.Map json_) + : this( + timestamp: json_['timestamp'] as core.String?, + utilization: json_.containsKey('utilization') + ? GPUUtilization.fromJson( + json_['utilization'] as core.Map) + : null, + ); + + core.Map toJson() => { + if (timestamp != null) 'timestamp': timestamp!, + if (utilization != null) 'utilization': utilization!, + }; +} + +/// Utilization details about the GPU. +class GPUUtilization { + /// GPU utilization rate of any kernel over the last sample period in the + /// range of \[0, 1\]. + /// + /// Required. + core.double? rate; + + GPUUtilization({ + this.rate, + }); + + GPUUtilization.fromJson(core.Map json_) + : this( + rate: (json_['rate'] as core.num?)?.toDouble(), + ); + + core.Map toJson() => { + if (rate != null) 'rate': rate!, + }; +} + /// Request to get updated debug configuration for component. class GetDebugConfigRequest { /// The internal component id for which debug configuration is requested. @@ -7263,6 +7342,9 @@ class MetricValue { /// Optional. core.Map? metricLabels; + /// Non-cumulative int64 value of this metric. + DataflowGaugeValue? valueGauge64; + /// Histogram value of this metric. DataflowHistogramValue? valueHistogram; @@ -7272,6 +7354,7 @@ class MetricValue { MetricValue({ this.metric, this.metricLabels, + this.valueGauge64, this.valueHistogram, this.valueInt64, }); @@ -7287,6 +7370,10 @@ class MetricValue { value as core.String, ), ), + valueGauge64: json_.containsKey('valueGauge64') + ? DataflowGaugeValue.fromJson( + json_['valueGauge64'] as core.Map) + : null, valueHistogram: json_.containsKey('valueHistogram') ? DataflowHistogramValue.fromJson(json_['valueHistogram'] as core.Map) @@ -7297,6 +7384,7 @@ class MetricValue { core.Map toJson() => { if (metric != null) 'metric': metric!, if (metricLabels != null) 'metricLabels': metricLabels!, + if (valueGauge64 != null) 'valueGauge64': valueGauge64!, if (valueHistogram != null) 'valueHistogram': valueHistogram!, if (valueInt64 != null) 'valueInt64': valueInt64!, }; @@ -8470,12 +8558,18 @@ class ResourceUtilizationReport { /// CPU utilization samples. core.List? cpuTime; + /// GPU usage samples. + /// + /// Optional. + core.List? gpuUsage; + /// Memory utilization samples. core.List? memoryInfo; ResourceUtilizationReport({ this.containers, this.cpuTime, + this.gpuUsage, this.memoryInfo, }); @@ -8494,6 +8588,10 @@ class ResourceUtilizationReport { ?.map((value) => CPUTime.fromJson( value as core.Map)) .toList(), + gpuUsage: (json_['gpuUsage'] as core.List?) + ?.map((value) => GPUUsage.fromJson( + value as core.Map)) + .toList(), memoryInfo: (json_['memoryInfo'] as core.List?) ?.map((value) => MemInfo.fromJson( value as core.Map)) @@ -8503,6 +8601,7 @@ class ResourceUtilizationReport { core.Map toJson() => { if (containers != null) 'containers': containers!, if (cpuTime != null) 'cpuTime': cpuTime!, + if (gpuUsage != null) 'gpuUsage': gpuUsage!, if (memoryInfo != null) 'memoryInfo': memoryInfo!, }; } diff --git a/generated/googleapis_beta/lib/datalabeling/v1beta1.dart b/generated/googleapis_beta/lib/datalabeling/v1beta1.dart index ecf688c31..0ed06a8fe 100644 --- a/generated/googleapis_beta/lib/datalabeling/v1beta1.dart +++ b/generated/googleapis_beta/lib/datalabeling/v1beta1.dart @@ -2212,8 +2212,8 @@ class ProjectsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// Request parameters: /// diff --git a/generated/googleapis_beta/lib/datastream/v1alpha1.dart b/generated/googleapis_beta/lib/datastream/v1alpha1.dart index 972d0dd5d..01de65d1a 100644 --- a/generated/googleapis_beta/lib/datastream/v1alpha1.dart +++ b/generated/googleapis_beta/lib/datastream/v1alpha1.dart @@ -586,8 +586,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// diff --git a/generated/googleapis_beta/lib/firebase/v1beta1.dart b/generated/googleapis_beta/lib/firebase/v1beta1.dart index d10e79765..a121c77ba 100644 --- a/generated/googleapis_beta/lib/firebase/v1beta1.dart +++ b/generated/googleapis_beta/lib/firebase/v1beta1.dart @@ -86,9 +86,9 @@ class AvailableProjectsResource { AvailableProjectsResource(commons.ApiRequester client) : _requester = client; - /// Lists each \[Google Cloud Platform (GCP) + /// Lists each \[Google Cloud /// `Project`\](https://cloud.google.com/resource-manager/reference/rest/v1/projects) - /// that can have Firebase resources added to it. + /// that can have Firebase resources added and Firebase services enabled. /// /// A Project will only be listed if: - The caller has sufficient /// [Google IAM](https://cloud.google.com/iam) permissions to call @@ -198,24 +198,25 @@ class ProjectsResource { ProjectsResource(commons.ApiRequester client) : _requester = client; - /// Adds Firebase resources to the specified existing \[Google Cloud Platform - /// (GCP) + /// Adds Firebase resources and enables Firebase services in the specified + /// existing \[Google Cloud /// `Project`\](https://cloud.google.com/resource-manager/reference/rest/v1/projects). /// - /// Since a FirebaseProject is actually also a GCP `Project`, a - /// `FirebaseProject` has the same underlying GCP identifiers (`projectNumber` - /// and `projectId`). This allows for easy interop with Google APIs. The - /// result of this call is an \[`Operation`\](../../v1beta1/operations). Poll - /// the `Operation` to track the provisioning process by calling GetOperation - /// until \[`done`\](../../v1beta1/operations#Operation.FIELDS.done) is - /// `true`. When `done` is `true`, the `Operation` has either succeeded or - /// failed. If the `Operation` succeeded, its + /// Since a FirebaseProject is actually also a Google Cloud `Project`, a + /// `FirebaseProject` has the same underlying Google Cloud identifiers + /// (`projectNumber` and `projectId`). This allows for easy interop with + /// Google APIs. The result of this call is an + /// \[`Operation`\](../../v1beta1/operations). Poll the `Operation` to track + /// the provisioning process by calling GetOperation until + /// \[`done`\](../../v1beta1/operations#Operation.FIELDS.done) is `true`. When + /// `done` is `true`, the `Operation` has either succeeded or failed. If the + /// `Operation` succeeded, its /// \[`response`\](../../v1beta1/operations#Operation.FIELDS.response) is set /// to a FirebaseProject; if the `Operation` failed, its /// \[`error`\](../../v1beta1/operations#Operation.FIELDS.error) is set to a /// google.rpc.Status. The `Operation` is automatically deleted after /// completion, so there is no need to call DeleteOperation. This method does - /// not modify any billing account information on the underlying GCP + /// not modify any billing account information on the underlying Google Cloud /// `Project`. To call `AddFirebase`, a project member or service account must /// have the following permissions (the IAM roles of Editor and Owner contain /// these permissions): `firebase.projects.update`, @@ -226,16 +227,16 @@ class ProjectsResource { /// /// Request parameters: /// - /// [project] - The resource name of the GCP `Project` to which Firebase - /// resources will be added, in the format: projects/PROJECT_IDENTIFIER Refer - /// to the `FirebaseProject` + /// [project] - The resource name of the Google Cloud `Project` in which + /// Firebase resources will be added and Firebase services enabled, in the + /// format: projects/ PROJECT_IDENTIFIER Refer to the `FirebaseProject` /// \[`name`\](../projects#FirebaseProject.FIELDS.name) field for details /// about PROJECT_IDENTIFIER values. After calling `AddFirebase`, the unique /// Project identifiers ( /// \[`projectNumber`\](https://cloud.google.com/resource-manager/reference/rest/v1/projects#Project.FIELDS.project_number) /// and /// \[`projectId`\](https://cloud.google.com/resource-manager/reference/rest/v1/projects#Project.FIELDS.project_id)) - /// of the underlying GCP `Project` are also the identifiers of the + /// of the underlying Google Cloud `Project` are also the identifiers of the /// FirebaseProject. /// Value must have pattern `^projects/\[^/\]+$`. /// @@ -1238,23 +1239,25 @@ class ProjectsAvailableLocationsResource { ProjectsAvailableLocationsResource(commons.ApiRequester client) : _requester = client; - /// **DEPRECATED.** _Instead, use the applicable resource-specific REST API - /// (or associated documentation, as needed) to determine valid locations for - /// each resource used in your Project._ Lists the valid Google Cloud Platform - /// (GCP) resource locations for the specified Project (including a - /// FirebaseProject). - /// - /// One of these locations can be selected as the Project's - /// [_default_ GCP resource location](https://firebase.google.com/docs/projects/locations), - /// which is the geographical location where the Project's resources, such as - /// Cloud Firestore, will be provisioned by default. However, if the default - /// GCP resource location has already been set for the Project, then this - /// setting cannot be changed. This call checks for any possible + /// **DECOMMISSIONED.** **If called, this endpoint will return a 404 error.** + /// _Instead, use the applicable resource-specific REST API (or associated + /// documentation, as needed) to determine valid locations for each resource + /// used in your Project._ Lists the valid \["locations for default Google + /// Cloud + /// resources"\](https://firebase.google.com/docs/projects/locations#default-cloud-location) + /// for the specified Project (including a FirebaseProject). + /// + /// One of these locations can be selected as the Project's location for + /// default Google Cloud resources, which is the geographical location where + /// the Project's resources associated with Google App Engine (such as the + /// default Cloud Firestore instance) will be provisioned by default. However, + /// if the location for default Google Cloud resources has already been set + /// for the Project, then this setting cannot be changed. This call checks for + /// any possible /// [location restrictions](https://cloud.google.com/resource-manager/docs/organization-policy/defining-locations) /// for the specified Project and, thus, might return a subset of all possible - /// GCP resource locations. To list all GCP resource locations (regardless of - /// any restrictions), call the endpoint without specifying a unique project - /// identifier (that is, + /// locations. To list all locations (regardless of any restrictions), call + /// the endpoint without specifying a unique project identifier (that is, /// `/v1beta1/{parent=projects/-}/listAvailableLocations`). To call /// `ListAvailableLocations` with a specified project, a member must be at /// minimum a Viewer of the Project. Calls without a specified project do not @@ -1262,7 +1265,8 @@ class ProjectsAvailableLocationsResource { /// /// Request parameters: /// - /// [parent] - The FirebaseProject for which to list GCP resource locations, + /// [parent] - The FirebaseProject for which to list + /// [locations for default Google Cloud resources](https://firebase.google.com/docs/projects/locations#default-cloud-location), /// in the format: projects/PROJECT_IDENTIFIER Refer to the `FirebaseProject` /// \[`name`\](../projects#FirebaseProject.FIELDS.name) field for details /// about PROJECT_IDENTIFIER values. If no unique project identifier is @@ -1326,26 +1330,27 @@ class ProjectsDefaultLocationResource { ProjectsDefaultLocationResource(commons.ApiRequester client) : _requester = client; - /// **DEPRECATED.** _Instead, use the applicable resource-specific REST API to - /// set the location for each resource used in your Project._ Sets the default - /// Google Cloud Platform (GCP) resource location for the specified - /// FirebaseProject. + /// **DECOMMISSIONED.** **If called, this endpoint will return a 404 error.** + /// _Instead, use the applicable resource-specific REST API to set the + /// location for each resource used in your Project._ Sets the \["location for + /// default Google Cloud + /// resources"\](https://firebase.google.com/docs/projects/locations#default-cloud-location) + /// for the specified FirebaseProject. /// - /// This method creates an App Engine application with a + /// This method creates a Google App Engine application with a /// [default Cloud Storage bucket](https://cloud.google.com/appengine/docs/standard/python/googlecloudstorageclient/setting-up-cloud-storage#activating_a_cloud_storage_bucket), /// located in the specified /// \[`locationId`\](#body.request_body.FIELDS.location_id). This location /// must be one of the available - /// [GCP resource locations](https://firebase.google.com/docs/projects/locations). - /// After the default GCP resource location is finalized, or if it was already - /// set, it cannot be changed. The default GCP resource location for the - /// specified `FirebaseProject` might already be set because either the - /// underlying GCP `Project` already has an App Engine application or - /// `FinalizeDefaultLocation` was previously called with a specified - /// `locationId`. Any new calls to `FinalizeDefaultLocation` with a - /// *different* specified `locationId` will return a 409 error. The result of - /// this call is an \[`Operation`\](../../v1beta1/operations), which can be - /// used to track the provisioning process. The + /// [App Engine locations](https://cloud.google.com/about/locations#region). + /// After the location for default Google Cloud resources is finalized, or if + /// it was already set, it cannot be changed. The location for default Google + /// Cloud resources for the specified `FirebaseProject` might already be set + /// because either the underlying Google Cloud `Project` already has an App + /// Engine application or `FinalizeDefaultLocation` was previously called with + /// a specified `locationId`. The result of this call is an + /// \[`Operation`\](../../v1beta1/operations), which can be used to track the + /// provisioning process. The /// \[`response`\](../../v1beta1/operations#Operation.FIELDS.response) type of /// the `Operation` is google.protobuf.Empty. The `Operation` can be polled by /// its `name` using GetOperation until `done` is true. When `done` is true, @@ -1362,11 +1367,12 @@ class ProjectsDefaultLocationResource { /// /// Request parameters: /// - /// [parent] - The resource name of the FirebaseProject for which the default - /// GCP resource location will be set, in the format: - /// projects/PROJECT_IDENTIFIER Refer to the `FirebaseProject` - /// \[`name`\](../projects#FirebaseProject.FIELDS.name) field for details - /// about PROJECT_IDENTIFIER values. + /// [parent] - The resource name of the FirebaseProject for which the + /// \["location for default Google Cloud + /// resources"\](https://firebase.google.com/docs/projects/locations#default-cloud-location) + /// will be set, in the format: projects/PROJECT_IDENTIFIER Refer to the + /// `FirebaseProject` \[`name`\](../projects#FirebaseProject.FIELDS.name) + /// field for details about PROJECT_IDENTIFIER values. /// Value must have pattern `^projects/\[^/\]+$`. /// /// [$fields] - Selector specifying which fields to include in a partial @@ -2094,15 +2100,16 @@ class ProjectsWebAppsResource { /// All fields are required. class AddFirebaseRequest { - /// Instead, to set a Project's default GCP resource location, call - /// \[`FinalizeDefaultLocation`\](../projects.defaultLocation/finalize) after - /// you add Firebase resources to the GCP `Project`. - /// - /// The ID of the Project's default GCP resource location. The location must + /// **DEPRECATED.** _Instead, use product-specific REST APIs to work with the + /// location of each resource in a Project. + /// + /// This field may be ignored, especially for newly provisioned projects after + /// October 30, 2024._ The ID of the Project's \["location for default Google + /// Cloud + /// resources"\](https://firebase.google.com/docs/projects/locations#default-cloud-location), + /// which are resources associated with Google App Engine. The location must /// be one of the available - /// [GCP resource locations](https://firebase.google.com/docs/projects/locations). - /// - /// Deprecated. + /// [Google App Engine locations](https://cloud.google.com/about/locations#region). core.String? locationId; AddFirebaseRequest({ @@ -2153,29 +2160,33 @@ class AddGoogleAnalyticsRequest { } class AdminSdkConfig { - /// **DEPRECATED.** _Instead, find the default Firebase Realtime Database - /// instance name using the + /// **DEPRECATED.** _Instead, find the URL of the default Realtime Database + /// instance using the /// [list endpoint](https://firebase.google.com/docs/reference/rest/database/database-management/rest/v1beta/projects.locations.instances/list) /// within the Firebase Realtime Database REST API. /// - /// Note that the default instance for the Project might not yet be - /// provisioned, so the return might not contain a default instance._ The - /// default Firebase Realtime Database URL. + /// If the default instance for the Project has not yet been provisioned, the + /// return might not contain a default instance. Note that the config that's + /// generated for the Firebase console or the Firebase CLI uses the Realtime + /// Database endpoint to populate this value for that config._ The URL of the + /// default Firebase Realtime Database instance. @core.Deprecated( 'Not supported. Member documentation may have more information.', ) core.String? databaseURL; /// **DEPRECATED.** _Instead, use product-specific REST APIs to find the - /// location of resources._ The ID of the Project's default GCP resource - /// location. - /// - /// The location is one of the available - /// [GCP resource locations](https://firebase.google.com/docs/projects/locations). - /// This field is omitted if the default GCP resource location has not been - /// finalized yet. To set a Project's default GCP resource location, call - /// \[`FinalizeDefaultLocation`\](../projects.defaultLocation/finalize) after - /// you add Firebase resources to the Project. + /// location of each resource in a Project. + /// + /// This field may not be populated, especially for newly provisioned projects + /// after October 30, 2024._ The ID of the Project's \["location for default + /// Google Cloud + /// resources"\](https://firebase.google.com/docs/projects/locations#default-cloud-location), + /// which are resources associated with Google App Engine. The location is one + /// of the available + /// [App Engine locations](https://cloud.google.com/about/locations#region). + /// This field is omitted if the location for default Google Cloud resources + /// has not been set. @core.Deprecated( 'Not supported. Member documentation may have more information.', ) @@ -2190,14 +2201,16 @@ class AdminSdkConfig { /// Immutable. core.String? projectId; - /// **DEPRECATED.** _Instead, find the default Cloud Storage for Firebase - /// bucket using the + /// **DEPRECATED.** _Instead, find the name of the default Cloud Storage for + /// Firebase bucket using the /// [list endpoint](https://firebase.google.com/docs/reference/rest/storage/rest/v1beta/projects.buckets/list) /// within the Cloud Storage for Firebase REST API. /// - /// Note that the default bucket for the Project might not yet be provisioned, - /// so the return might not contain a default bucket._ The default Cloud - /// Storage for Firebase storage bucket name. + /// If the default bucket for the Project has not yet been provisioned, the + /// return might not contain a default bucket. Note that the config that's + /// generated for the Firebase console or the Firebase CLI uses the Cloud + /// Storage for Firebase endpoint to populate this value for that config._ The + /// name of the default Cloud Storage for Firebase bucket. @core.Deprecated( 'Not supported. Member documentation may have more information.', ) @@ -2352,10 +2365,12 @@ class AndroidApp { /// This etag is strongly validated. core.String? etag; - /// Timestamp of when the App will be considered expired and cannot be - /// undeleted. + /// If the App has been removed from the Project, this is the timestamp of + /// when the App is considered expired and will be permanently deleted. /// - /// This value is only provided if the App is in the `DELETED` state. + /// After this time, the App cannot be undeleted (that is, restored to the + /// Project). This value is only provided if the App is in the `DELETED` + /// state. /// /// Output only. core.String? expireTime; @@ -2497,16 +2512,16 @@ class AndroidAppConfig { /// resource-specific API._ The default auto-provisioned resources associated /// with the Project. class DefaultResources { - /// **DEPRECATED.** _Instead, find the default Firebase Hosting site name - /// using the + /// **DEPRECATED.** _Instead, find the name of the default Firebase Hosting + /// site using /// [ListSites](https://firebase.google.com/docs/reference/hosting/rest/v1beta1/projects.sites/list) /// within the Firebase Hosting REST API. /// - /// Note that the default site for the Project might not yet be provisioned, - /// so the return might not contain a default site._ The default Firebase - /// Hosting site name, in the format: PROJECT_ID Though rare, your `projectId` - /// might already be used as the name for an existing Hosting site in another - /// project (learn more about creating non-default, + /// If the default Hosting site for the Project has not yet been provisioned, + /// the return might not contain a default site._ The name of the default + /// Firebase Hosting site, in the format: PROJECT_ID Though rare, your + /// `projectId` might already be used as the name for an existing Hosting site + /// in another project (learn more about creating non-default, /// [additional sites](https://firebase.google.com/docs/hosting/multisites)). /// In these cases, your `projectId` is appended with a hyphen then five /// alphanumeric characters to create your default Hosting site name. For @@ -2520,15 +2535,17 @@ class DefaultResources { core.String? hostingSite; /// **DEPRECATED.** _Instead, use product-specific REST APIs to find the - /// location of resources._ The ID of the Project's default GCP resource - /// location. - /// - /// The location is one of the available - /// [GCP resource locations](https://firebase.google.com/docs/projects/locations). - /// This field is omitted if the default GCP resource location has not been - /// finalized yet. To set a Project's default GCP resource location, call - /// \[`FinalizeDefaultLocation`\](../projects.defaultLocation/finalize) after - /// you add Firebase resources to the Project. + /// location of each resource in a Project. + /// + /// This field may not be populated, especially for newly provisioned projects + /// after October 30, 2024._ The ID of the Project's \["location for default + /// Google Cloud + /// resources"\](https://firebase.google.com/docs/projects/locations#default-cloud-location), + /// which are resources associated with Google App Engine. The location is one + /// of the available + /// [Google App Engine locations](https://cloud.google.com/about/locations#region). + /// This field is omitted if the location for default Google Cloud resources + /// has not been set. /// /// Output only. @core.Deprecated( @@ -2536,17 +2553,16 @@ class DefaultResources { ) core.String? locationId; - /// **DEPRECATED.** _Instead, find the default Firebase Realtime Database - /// instance name using the + /// **DEPRECATED.** _Instead, find the name of the default Realtime Database + /// instance using the /// [list endpoint](https://firebase.google.com/docs/reference/rest/database/database-management/rest/v1beta/projects.locations.instances/list) /// within the Firebase Realtime Database REST API. /// - /// Note that the default instance for the Project might not yet be - /// provisioned, so the return might not contain a default instance._ The - /// default Firebase Realtime Database instance name, in the format: - /// PROJECT_ID Though rare, your `projectId` might already be used as the name - /// for an existing Realtime Database instance in another project (learn more - /// about + /// If the default Realtime Database instance for a Project has not yet been + /// provisioned, the return might not contain a default instance._ The default + /// Firebase Realtime Database instance name, in the format: PROJECT_ID Though + /// rare, your `projectId` might already be used as the name for an existing + /// Realtime Database instance in another project (learn more about /// [database sharding](https://firebase.google.com/docs/database/usage/sharding)). /// In these cases, your `projectId` is appended with a hyphen then five /// alphanumeric characters to create your default Realtime Database instance @@ -2559,14 +2575,16 @@ class DefaultResources { ) core.String? realtimeDatabaseInstance; - /// **DEPRECATED.** _Instead, find the default Cloud Storage for Firebase - /// bucket using the + /// **DEPRECATED.** _Instead, find the name of the default Cloud Storage for + /// Firebase bucket using the /// [list endpoint](https://firebase.google.com/docs/reference/rest/storage/rest/v1beta/projects.buckets/list) /// within the Cloud Storage for Firebase REST API. /// - /// Note that the default bucket for the Project might not yet be provisioned, - /// so the return might not contain a default bucket._ The default Cloud - /// Storage for Firebase storage bucket, in the format: PROJECT_ID.appspot.com + /// If the default bucket for the Project has not yet been provisioned, the + /// return might not contain a default bucket._ The name of the default Cloud + /// Storage for Firebase bucket, in one of the following formats: * If + /// provisioned _before_ October 30, 2024: PROJECT_ID.firebasestorage.app * If + /// provisioned _on or after_ October 30, 2024: PROJECT_ID.firebasestorage.app /// /// Output only. @core.Deprecated( @@ -2608,10 +2626,13 @@ class DefaultResources { typedef Empty = $Empty; class FinalizeDefaultLocationRequest { - /// The ID of the Project's default GCP resource location. + /// **DEPRECATED** The ID of the Project's \["location for default Google + /// Cloud + /// resources"\](https://firebase.google.com/docs/projects/locations#default-cloud-location), + /// which are resources associated with Google App Engine. /// /// The location must be one of the available - /// [GCP resource locations](https://firebase.google.com/docs/projects/locations). + /// [Google App Engine locations](https://cloud.google.com/about/locations#region). core.String? locationId; FinalizeDefaultLocationRequest({ @@ -2660,10 +2681,12 @@ class FirebaseAppInfo { /// The user-assigned display name of the Firebase App. core.String? displayName; - /// Timestamp of when the App will be considered expired and cannot be - /// undeleted. + /// If the App has been removed from the Project, this is the timestamp of + /// when the App is considered expired and will be permanently deleted. /// - /// This value is only provided if the App is in the `DELETED` state. + /// After this time, the App cannot be undeleted (that is, restored to the + /// Project). This value is only provided if the App is in the `DELETED` + /// state. /// /// Output only. core.String? expireTime; @@ -2747,14 +2770,15 @@ class FirebaseAppInfo { /// /// It is the container for Firebase Apps, Firebase Hosting sites, storage /// systems (Firebase Realtime Database, Cloud Firestore, Cloud Storage -/// buckets), and other Firebase and Google Cloud Platform (GCP) resources. You -/// create a `FirebaseProject` by calling AddFirebase and specifying an -/// *existing* \[GCP +/// buckets), and other Firebase and Google Cloud resources. You create a +/// `FirebaseProject` by calling AddFirebase and specifying an *existing* +/// \[Google Cloud /// `Project`\](https://cloud.google.com/resource-manager/reference/rest/v1/projects). -/// This adds Firebase resources to the existing GCP `Project`. Since a -/// FirebaseProject is actually also a GCP `Project`, a `FirebaseProject` has -/// the same underlying GCP identifiers (`projectNumber` and `projectId`). This -/// allows for easy interop with Google APIs. +/// This adds Firebase resources to the existing Google Cloud `Project`. Since a +/// FirebaseProject is actually also a Google Cloud `Project`, a +/// `FirebaseProject` has the same underlying Google Cloud identifiers +/// (`projectNumber` and `projectId`). This allows for easy interop with Google +/// APIs. class FirebaseProject { /// A set of user-defined annotations for the FirebaseProject. /// @@ -2923,10 +2947,12 @@ class IosApp { /// This etag is strongly validated. core.String? etag; - /// Timestamp of when the App will be considered expired and cannot be - /// undeleted. + /// If the App has been removed from the Project, this is the timestamp of + /// when the App is considered expired and will be permanently deleted. /// - /// This value is only provided if the App is in the `DELETED` state. + /// After this time, the App cannot be undeleted (that is, restored to the + /// Project). This value is only provided if the App is in the `DELETED` + /// state. /// /// Output only. core.String? expireTime; @@ -3123,8 +3149,8 @@ class ListAvailableProjectsResponse { /// not be persisted. core.String? nextPageToken; - /// The list of GCP `Projects` which can have Firebase resources added to - /// them. + /// The list of Google Cloud `Projects` which can have Firebase resources + /// added to them. core.List? projectInfo; ListAvailableProjectsResponse({ @@ -3271,20 +3297,23 @@ class ListWebAppsResponse { /// resource locations. /// /// Instead, consult product documentation to determine valid locations for each -/// resource used in your Project._ A GCP resource location that can be selected -/// for a FirebaseProject. +/// resource used in your Project._ A \["location for default Google Cloud +/// resources"\](https://firebase.google.com/docs/projects/locations#default-cloud-location) +/// that can be selected for a FirebaseProject. These are resources associated +/// with Google App Engine. class Location { - /// Products and services that are available in the GCP resource location. + /// Products and services that are available in the location for default + /// Google Cloud resources. core.List? features; - /// The ID of the GCP resource location. + /// The ID of the Project's location for default Google Cloud resources. /// /// It will be one of the available - /// [GCP resource locations](https://firebase.google.com/docs/projects/locations#types). + /// [Google App Engine locations](https://cloud.google.com/about/locations#region). core.String? locationId; - /// Indicates whether the GCP resource location is a \[regional or - /// multi-regional + /// Indicates whether the location for default Google Cloud resources is a + /// \[regional or multi-regional /// location\](https://firebase.google.com/docs/projects/locations#types) for /// data replication. /// Possible string values are: @@ -3395,26 +3424,31 @@ class Operation { }; } -/// A reference to a Google Cloud Platform (GCP) `Project`. +/// A reference to a Google Cloud `Project`. class ProjectInfo { - /// The user-assigned display name of the GCP `Project`, for example: `My App` + /// The user-assigned display name of the Google Cloud `Project`, for example: + /// `My App`. core.String? displayName; - /// The ID of the Project's default GCP resource location. + /// **DEPRECATED** _Instead, use product-specific REST APIs to work with the + /// location of each resource in a Project. /// + /// This field may not be populated, especially for newly provisioned projects + /// after October 30, 2024._ The ID of the Project's \["location for default + /// Google Cloud + /// resources"\](https://firebase.google.com/docs/projects/locations#default-cloud-location). /// The location is one of the available - /// [GCP resource locations](https://firebase.google.com/docs/projects/locations). + /// [Google App Engine locations](https://cloud.google.com/about/locations#region). /// Not all Projects will have this field populated. If it is not populated, - /// it means that the Project does not yet have a default GCP resource - /// location. To set a Project's default GCP resource location, call - /// \[`FinalizeDefaultLocation`\](../projects.defaultLocation/finalize) after - /// you add Firebase resources to the Project. + /// it means that the Project does not yet have a location for default Google + /// Cloud resources. core.String? locationId; - /// The resource name of the GCP `Project` to which Firebase resources can be - /// added, in the format: projects/PROJECT_IDENTIFIER Refer to the - /// `FirebaseProject` \[`name`\](../projects#FirebaseProject.FIELDS.name) - /// field for details about PROJECT_IDENTIFIER values. + /// The resource name of the Google Cloud `Project` to which Firebase + /// resources can be added, in the format: projects/PROJECT_IDENTIFIER Refer + /// to the `FirebaseProject` + /// \[`name`\](../projects#FirebaseProject.FIELDS.name) field for details + /// about PROJECT_IDENTIFIER values. core.String? project; ProjectInfo({ @@ -3479,9 +3513,9 @@ class RemoveAndroidAppRequest { /// Determines whether to _immediately_ delete the AndroidApp. /// /// If set to true, the App is immediately deleted from the Project and cannot - /// be restored to the Project. If not set, defaults to false, which means the - /// App will be set to expire in 30 days. Within the 30 days, the App may be - /// restored to the Project using UndeleteAndroidApp. + /// be undeleted (that is, restored to the Project). If not set, defaults to + /// false, which means the App will be set to expire in 30 days. Within the 30 + /// days, the App may be restored to the Project using UndeleteAndroidApp. core.bool? immediate; /// If set to true, the request is only validated. @@ -3526,9 +3560,9 @@ class RemoveIosAppRequest { /// Determines whether to _immediately_ delete the IosApp. /// /// If set to true, the App is immediately deleted from the Project and cannot - /// be restored to the Project. If not set, defaults to false, which means the - /// App will be set to expire in 30 days. Within the 30 days, the App may be - /// restored to the Project using UndeleteIosApp + /// be undeleted (that is, restored to the Project). If not set, defaults to + /// false, which means the App will be set to expire in 30 days. Within the 30 + /// days, the App may be restored to the Project using UndeleteIosApp core.bool? immediate; /// If set to true, the request is only validated. @@ -3573,9 +3607,9 @@ class RemoveWebAppRequest { /// Determines whether to _immediately_ delete the WebApp. /// /// If set to true, the App is immediately deleted from the Project and cannot - /// be restored to the Project. If not set, defaults to false, which means the - /// App will be set to expire in 30 days. Within the 30 days, the App may be - /// restored to the Project using UndeleteWebApp + /// be undeleted (that is, restored to the Project). If not set, defaults to + /// false, which means the App will be set to expire in 30 days. Within the 30 + /// days, the App may be restored to the Project using UndeleteWebApp core.bool? immediate; /// If set to true, the request is only validated. @@ -3874,10 +3908,12 @@ class WebApp { /// This etag is strongly validated. core.String? etag; - /// Timestamp of when the App will be considered expired and cannot be - /// undeleted. + /// If the App has been removed from the Project, this is the timestamp of + /// when the App is considered expired and will be permanently deleted. /// - /// This value is only provided if the App is in the `DELETED` state. + /// After this time, the App cannot be undeleted (that is, restored to the + /// Project). This value is only provided if the App is in the `DELETED` + /// state. /// /// Output only. core.String? expireTime; @@ -3991,29 +4027,33 @@ class WebAppConfig { /// PROJECT_ID.firebaseapp.com core.String? authDomain; - /// **DEPRECATED.** _Instead, find the default Firebase Realtime Database - /// instance name using the + /// **DEPRECATED.** _Instead, find the URL of the default Realtime Database + /// instance using the /// [list endpoint](https://firebase.google.com/docs/reference/rest/database/database-management/rest/v1beta/projects.locations.instances/list) /// within the Firebase Realtime Database REST API. /// - /// Note that the default instance for the Project might not yet be - /// provisioned, so the return might not contain a default instance._ The - /// default Firebase Realtime Database URL. + /// If the default instance for the Project has not yet been provisioned, the + /// return might not contain a default instance. Note that the config that's + /// generated for the Firebase console or the Firebase CLI uses the Realtime + /// Database endpoint to populate this value for that config._ The URL of the + /// default Firebase Realtime Database instance. @core.Deprecated( 'Not supported. Member documentation may have more information.', ) core.String? databaseURL; /// **DEPRECATED.** _Instead, use product-specific REST APIs to find the - /// location of resources._ The ID of the Project's default GCP resource - /// location. - /// - /// The location is one of the available - /// [GCP resource locations](https://firebase.google.com/docs/projects/locations). - /// This field is omitted if the default GCP resource location has not been - /// finalized yet. To set a Project's default GCP resource location, call - /// \[`FinalizeDefaultLocation`\](../projects.defaultLocation/finalize) after - /// you add Firebase resources to the Project. + /// location of each resource in a Project. + /// + /// This field may not be populated, especially for newly provisioned projects + /// after October 30, 2024._ The ID of the Project's \["location for default + /// Google Cloud + /// resources"\](https://firebase.google.com/docs/projects/locations#default-cloud-location), + /// which are resources associated with Google App Engine. The location is one + /// of the available + /// [App Engine locations](https://cloud.google.com/about/locations#region). + /// This field is omitted if the location for default Google Cloud resources + /// has not been set. @core.Deprecated( 'Not supported. Member documentation may have more information.', ) @@ -4052,26 +4092,25 @@ class WebAppConfig { /// Output only. Immutable. core.String? projectNumber; - /// Duplicate field for the URL of the default RTDB instances (if there is - /// one) that uses the same field name as the unified V2 config file format. + /// Duplicate field for the URL of the default Realtime Database instances (if + /// the default instance has been provisioned). /// - /// We wanted to make a single config file format for all the app platforms - /// (Android, iOS and web) and we had to pick consistent names for all the - /// fields since there was some varience between the platforms. If the request - /// asks for the V2 format we will populate this field instead of - /// realtime_database_instance_uri. + /// If the request asks for the V2 config format, this field will be populated + /// instead of `realtime_database_instance_uri`. /// /// Optional. core.String? realtimeDatabaseUrl; - /// **DEPRECATED.** _Instead, find the default Cloud Storage for Firebase - /// bucket using the + /// **DEPRECATED.** _Instead, find the name of the default Cloud Storage for + /// Firebase bucket using the /// [list endpoint](https://firebase.google.com/docs/reference/rest/storage/rest/v1beta/projects.buckets/list) /// within the Cloud Storage for Firebase REST API. /// - /// Note that the default bucket for the Project might not yet be provisioned, - /// so the return might not contain a default bucket._ The default Cloud - /// Storage for Firebase storage bucket name. + /// If the default bucket for the Project has not yet been provisioned, the + /// return might not contain a default bucket. Note that the config that's + /// generated for the Firebase console or the Firebase CLI uses the Cloud + /// Storage for Firebase endpoint to populate this value for that config._ The + /// name of the default Cloud Storage for Firebase bucket. @core.Deprecated( 'Not supported. Member documentation may have more information.', ) diff --git a/generated/googleapis_beta/lib/firebasedatabase/v1beta.dart b/generated/googleapis_beta/lib/firebasedatabase/v1beta.dart index 11c0cc445..7a23f5692 100644 --- a/generated/googleapis_beta/lib/firebasedatabase/v1beta.dart +++ b/generated/googleapis_beta/lib/firebasedatabase/v1beta.dart @@ -12,7 +12,7 @@ // ignore_for_file: unnecessary_lambdas // ignore_for_file: unnecessary_string_interpolations -/// Firebase Realtime Database API - v1beta +/// Firebase Realtime Database Management API - v1beta /// /// The Firebase Realtime Database API enables programmatic provisioning and /// management of Realtime Database instances. diff --git a/generated/googleapis_beta/lib/firebasestorage/v1beta.dart b/generated/googleapis_beta/lib/firebasestorage/v1beta.dart index 8413ccbb5..9948a8bf4 100644 --- a/generated/googleapis_beta/lib/firebasestorage/v1beta.dart +++ b/generated/googleapis_beta/lib/firebasestorage/v1beta.dart @@ -23,6 +23,7 @@ /// /// - [ProjectsResource] /// - [ProjectsBucketsResource] +/// - [ProjectsDefaultBucketResource] library; import 'dart:async' as async; @@ -64,8 +65,83 @@ class ProjectsResource { final commons.ApiRequester _requester; ProjectsBucketsResource get buckets => ProjectsBucketsResource(_requester); + ProjectsDefaultBucketResource get defaultBucket => + ProjectsDefaultBucketResource(_requester); ProjectsResource(commons.ApiRequester client) : _requester = client; + + /// Unlinks and deletes the default bucket. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the default bucket to delete, + /// `projects/{project_id_or_number}/defaultBucket`. + /// Value must have pattern `^projects/\[^/\]+/defaultBucket$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [Empty]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future deleteDefaultBucket( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1beta/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'DELETE', + queryParams: queryParams_, + ); + return Empty.fromJson(response_ as core.Map); + } + + /// Gets the default bucket. + /// + /// Request parameters: + /// + /// [name] - Required. The name of the default bucket to retrieve, + /// `projects/{project_id_or_number}/defaultBucket`. + /// Value must have pattern `^projects/\[^/\]+/defaultBucket$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [DefaultBucket]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future getDefaultBucket( + core.String name, { + core.String? $fields, + }) async { + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1beta/' + core.Uri.encodeFull('$name'); + + final response_ = await _requester.request( + url_, + 'GET', + queryParams: queryParams_, + ); + return DefaultBucket.fromJson( + response_ as core.Map); + } } class ProjectsBucketsResource { @@ -244,6 +320,60 @@ class ProjectsBucketsResource { } } +class ProjectsDefaultBucketResource { + final commons.ApiRequester _requester; + + ProjectsDefaultBucketResource(commons.ApiRequester client) + : _requester = client; + + /// Creates a Spark tier-eligible Cloud Storage bucket and links it to your + /// Firebase project. + /// + /// If the default bucket already exists, this method will re-link it to your + /// Firebase project. See https://firebase.google.com/pricing for pricing + /// details. + /// + /// [request] - The metadata request object. + /// + /// Request parameters: + /// + /// [parent] - Required. The parent resource where the default bucket will be + /// created, `projects/{project_id_or_number}`. + /// Value must have pattern `^projects/\[^/\]+$`. + /// + /// [$fields] - Selector specifying which fields to include in a partial + /// response. + /// + /// Completes with a [DefaultBucket]. + /// + /// Completes with a [commons.ApiRequestError] if the API endpoint returned an + /// error. + /// + /// If the used [http.Client] completes with an error when making a REST call, + /// this method will complete with the same error. + async.Future create( + DefaultBucket request, + core.String parent, { + core.String? $fields, + }) async { + final body_ = convert.json.encode(request); + final queryParams_ = >{ + if ($fields != null) 'fields': [$fields], + }; + + final url_ = 'v1beta/' + core.Uri.encodeFull('$parent') + '/defaultBucket'; + + final response_ = await _requester.request( + url_, + 'POST', + body: body_, + queryParams: queryParams_, + ); + return DefaultBucket.fromJson( + response_ as core.Map); + } +} + /// The request used to link a Google Cloud Storage bucket to a Firebase /// project. typedef AddFirebaseRequest = $Empty; @@ -269,6 +399,59 @@ class Bucket { }; } +/// Spark tier-eligible Cloud Storage bucket. +/// +/// One per project. This resource exists if the underlying Cloud Storage bucket +/// exists and it is linked to your Firebase project. See +/// https://firebase.google.com/pricing for pricing details. +class DefaultBucket { + /// Underlying bucket resource. + /// + /// Output only. + Bucket? bucket; + + /// Location of the default bucket. + /// + /// Immutable. + core.String? location; + + /// Resource name of the default bucket. + core.String? name; + + /// Storage class of the default bucket. + /// + /// Supported values are available at + /// https://cloud.google.com/storage/docs/storage-classes#classes. + /// + /// Immutable. + core.String? storageClass; + + DefaultBucket({ + this.bucket, + this.location, + this.name, + this.storageClass, + }); + + DefaultBucket.fromJson(core.Map json_) + : this( + bucket: json_.containsKey('bucket') + ? Bucket.fromJson( + json_['bucket'] as core.Map) + : null, + location: json_['location'] as core.String?, + name: json_['name'] as core.String?, + storageClass: json_['storageClass'] as core.String?, + ); + + core.Map toJson() => { + if (bucket != null) 'bucket': bucket!, + if (location != null) 'location': location!, + if (name != null) 'name': name!, + if (storageClass != null) 'storageClass': storageClass!, + }; +} + /// A generic empty message that you can re-use to avoid defining duplicated /// empty messages in your APIs. /// diff --git a/generated/googleapis_beta/lib/gkehub/v2alpha.dart b/generated/googleapis_beta/lib/gkehub/v2alpha.dart index faa9cb8ba..38fd10c6c 100644 --- a/generated/googleapis_beta/lib/gkehub/v2alpha.dart +++ b/generated/googleapis_beta/lib/gkehub/v2alpha.dart @@ -450,8 +450,8 @@ class ProjectsLocationsOperationsResource { /// or other methods to check whether the cancellation succeeded or whether /// the operation completed despite cancellation. On successful cancellation, /// the operation is not deleted; instead, it becomes an operation with an - /// Operation.error value with a google.rpc.Status.code of 1, corresponding to - /// `Code.CANCELLED`. + /// Operation.error value with a google.rpc.Status.code of `1`, corresponding + /// to `Code.CANCELLED`. /// /// [request] - The metadata request object. /// @@ -940,7 +940,13 @@ class ConfigManagementConfigSync { /// The GSA should have the Monitoring Metric Writer /// (roles/monitoring.metricWriter) IAM role. The Kubernetes ServiceAccount /// `default` in the namespace `config-management-monitoring` should be bound - /// to the GSA. + /// to the GSA. Deprecated: If Workload Identity Federation for GKE is + /// enabled, Google Cloud Service Account is no longer needed for exporting + /// Config Sync metrics: + /// https://cloud.google.com/kubernetes-engine/enterprise/config-sync/docs/how-to/monitor-config-sync-cloud-monitoring#custom-monitoring. + @core.Deprecated( + 'Not supported. Member documentation may have more information.', + ) core.String? metricsGcpServiceAccountEmail; /// OCI repo configuration for the cluster. @@ -956,6 +962,11 @@ class ConfigManagementConfigSync { /// "unstructured" mode. core.String? sourceFormat; + /// Set to true to stop syncing configs for a single cluster. + /// + /// Default to false. + core.bool? stopSyncing; + ConfigManagementConfigSync({ this.allowVerticalScale, this.enabled, @@ -964,6 +975,7 @@ class ConfigManagementConfigSync { this.oci, this.preventDrift, this.sourceFormat, + this.stopSyncing, }); ConfigManagementConfigSync.fromJson(core.Map json_) @@ -982,6 +994,7 @@ class ConfigManagementConfigSync { : null, preventDrift: json_['preventDrift'] as core.bool?, sourceFormat: json_['sourceFormat'] as core.String?, + stopSyncing: json_['stopSyncing'] as core.bool?, ); core.Map toJson() => { @@ -994,6 +1007,7 @@ class ConfigManagementConfigSync { if (oci != null) 'oci': oci!, if (preventDrift != null) 'preventDrift': preventDrift!, if (sourceFormat != null) 'sourceFormat': sourceFormat!, + if (stopSyncing != null) 'stopSyncing': stopSyncing!, }; } @@ -1161,6 +1175,11 @@ class ConfigManagementConfigSyncState { /// level. core.String? clusterLevelStopSyncingState; + /// The number of RootSync and RepoSync CRs in the cluster. + /// + /// Output only. + core.int? crCount; + /// Information about the deployment of ConfigSync, including the version. /// /// of the various Pods deployed @@ -1207,6 +1226,7 @@ class ConfigManagementConfigSyncState { ConfigManagementConfigSyncState({ this.clusterLevelStopSyncingState, + this.crCount, this.deploymentState, this.errors, this.reposyncCrd, @@ -1220,6 +1240,7 @@ class ConfigManagementConfigSyncState { : this( clusterLevelStopSyncingState: json_['clusterLevelStopSyncingState'] as core.String?, + crCount: json_['crCount'] as core.int?, deploymentState: json_.containsKey('deploymentState') ? ConfigManagementConfigSyncDeploymentState.fromJson( json_['deploymentState'] @@ -1245,6 +1266,7 @@ class ConfigManagementConfigSyncState { core.Map toJson() => { if (clusterLevelStopSyncingState != null) 'clusterLevelStopSyncingState': clusterLevelStopSyncingState!, + if (crCount != null) 'crCount': crCount!, if (deploymentState != null) 'deploymentState': deploymentState!, if (errors != null) 'errors': errors!, if (reposyncCrd != null) 'reposyncCrd': reposyncCrd!, @@ -2258,44 +2280,6 @@ class ConfigManagementSyncState { /// (google.protobuf.Empty); } typedef Empty = $Empty; -/// Information of the FeatureConfig applied on the MembershipFeature. -class FeatureConfigRef { - /// Input only. - /// - /// Resource name of FeatureConfig, in the format: - /// `projects/{project}/locations/global/featureConfigs/{feature_config}`. - core.String? config; - - /// When the FeatureConfig was last applied and copied to FeatureSpec. - /// - /// Output only. - core.String? configUpdateTime; - - /// An id that uniquely identify a FeatureConfig object. - /// - /// Output only. - core.String? uuid; - - FeatureConfigRef({ - this.config, - this.configUpdateTime, - this.uuid, - }); - - FeatureConfigRef.fromJson(core.Map json_) - : this( - config: json_['config'] as core.String?, - configUpdateTime: json_['configUpdateTime'] as core.String?, - uuid: json_['uuid'] as core.String?, - ); - - core.Map toJson() => { - if (config != null) 'config': config!, - if (configUpdateTime != null) 'configUpdateTime': configUpdateTime!, - if (uuid != null) 'uuid': uuid!, - }; -} - /// FeatureSpec contains user input per-feature spec information. class FeatureSpec { /// Cloudbuild-specific FeatureSpec. @@ -3454,10 +3438,6 @@ class MembershipFeature { /// Output only. core.String? deleteTime; - /// Reference information for a FeatureConfig applied on the - /// MembershipFeature. - FeatureConfigRef? featureConfigRef; - /// GCP labels for this MembershipFeature. core.Map? labels; @@ -3476,6 +3456,8 @@ class MembershipFeature { core.String? name; /// Spec of this membershipFeature. + /// + /// Optional. FeatureSpec? spec; /// State of the this membershipFeature. @@ -3491,7 +3473,6 @@ class MembershipFeature { MembershipFeature({ this.createTime, this.deleteTime, - this.featureConfigRef, this.labels, this.lifecycleState, this.name, @@ -3504,10 +3485,6 @@ class MembershipFeature { : this( createTime: json_['createTime'] as core.String?, deleteTime: json_['deleteTime'] as core.String?, - featureConfigRef: json_.containsKey('featureConfigRef') - ? FeatureConfigRef.fromJson(json_['featureConfigRef'] - as core.Map) - : null, labels: (json_['labels'] as core.Map?)?.map( (key, value) => core.MapEntry( @@ -3534,7 +3511,6 @@ class MembershipFeature { core.Map toJson() => { if (createTime != null) 'createTime': createTime!, if (deleteTime != null) 'deleteTime': deleteTime!, - if (featureConfigRef != null) 'featureConfigRef': featureConfigRef!, if (labels != null) 'labels': labels!, if (lifecycleState != null) 'lifecycleState': lifecycleState!, if (name != null) 'name': name!, diff --git a/generated/googleapis_beta/lib/shared.dart b/generated/googleapis_beta/lib/shared.dart index 66d719f0f..7bffbe07e 100644 --- a/generated/googleapis_beta/lib/shared.dart +++ b/generated/googleapis_beta/lib/shared.dart @@ -940,6 +940,7 @@ class $DimensionValue { /// - analyticsadmin:v1beta : GoogleAnalyticsAdminV1betaArchiveCustomDimensionRequest /// - analyticsadmin:v1beta : GoogleAnalyticsAdminV1betaArchiveCustomMetricRequest /// - analyticsadmin:v1beta : GoogleProtobufEmpty +/// - analyticsdata:v1beta : EmptyFilter /// - area120tables:v1alpha1 : Empty /// - bigqueryconnection:v1beta1 : Empty /// - clouderrorreporting:v1beta1 : DeleteEventsResponse diff --git a/generated/googleapis_beta/lib/sqladmin/v1beta4.dart b/generated/googleapis_beta/lib/sqladmin/v1beta4.dart index ab33ac74e..50e93805e 100644 --- a/generated/googleapis_beta/lib/sqladmin/v1beta4.dart +++ b/generated/googleapis_beta/lib/sqladmin/v1beta4.dart @@ -1033,11 +1033,6 @@ class InstancesResource { /// /// [instance] - Cloud SQL instance ID. This does not include the project ID. /// - /// [finalBackupExpiryTime] - Optional. Final Backup expiration time. - /// Timestamp in UTC of when this resource is considered expired. - /// - /// [finalBackupTtlDays] - Optional. Retention period of the final backup. - /// /// [$fields] - Selector specifying which fields to include in a partial /// response. /// @@ -1051,15 +1046,9 @@ class InstancesResource { async.Future delete( core.String project, core.String instance, { - core.String? finalBackupExpiryTime, - core.String? finalBackupTtlDays, core.String? $fields, }) async { final queryParams_ = >{ - if (finalBackupExpiryTime != null) - 'finalBackupExpiryTime': [finalBackupExpiryTime], - if (finalBackupTtlDays != null) - 'finalBackupTtlDays': [finalBackupTtlDays], if ($fields != null) 'fields': [$fields], }; @@ -1569,12 +1558,12 @@ class InstancesResource { /// /// [instance] - Cloud SQL read replica instance name. /// - /// [failover_1] - Set to true to invoke a replica failover to the designated - /// DR replica. As part of replica failover, the promote operation attempts to - /// add the original primary instance as a replica of the promoted DR replica - /// when the original primary instance comes back online. If set to false or - /// not specified, then the original primary instance becomes an independent - /// Cloud SQL primary instance. Only applicable to MySQL. + /// [failover_1] - Set to true to invoke a replica failover to the DR replica. + /// As part of replica failover, the promote operation attempts to add the + /// original primary instance as a replica of the promoted DR replica when the + /// original primary instance comes back online. If set to false or not + /// specified, then the original primary instance becomes an independent Cloud + /// SQL primary instance. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -1967,8 +1956,7 @@ class InstancesResource { return Operation.fromJson(response_ as core.Map); } - /// Switches over from the primary instance to the designated DR replica - /// instance. + /// Switches over from the primary instance to the DR replica instance. /// /// Request parameters: /// @@ -1976,9 +1964,9 @@ class InstancesResource { /// /// [instance] - Cloud SQL read replica instance name. /// - /// [dbTimeout] - Optional. (MySQL only) Cloud SQL instance operations - /// timeout, which is a sum of all database operations. Default value is 10 - /// minutes and can be modified to a maximum value of 24 hours. + /// [dbTimeout] - Optional. (MySQL and PostgreSQL only) Cloud SQL instance + /// operations timeout, which is a sum of all database operations. Default + /// value is 10 minutes and can be modified to a maximum value of 24 hours. /// /// [$fields] - Selector specifying which fields to include in a partial /// response. @@ -3846,6 +3834,9 @@ class ConnectSettings { /// - "EXTERNAL" : On premises instance. core.String? backendType; + /// Custom subject alternative names for the server certificate. + core.List? customSubjectAlternativeNames; + /// The database engine type and version. /// /// The `databaseVersion` field cannot be changed after instance creation. @@ -3864,22 +3855,6 @@ class ConnectSettings { /// - "MYSQL_5_5" : The database version is MySQL 5.5. /// - "MYSQL_5_6" : The database version is MySQL 5.6. /// - "MYSQL_5_7" : The database version is MySQL 5.7. - /// - "SQLSERVER_2017_STANDARD" : The database version is SQL Server 2017 - /// Standard. - /// - "SQLSERVER_2017_ENTERPRISE" : The database version is SQL Server 2017 - /// Enterprise. - /// - "SQLSERVER_2017_EXPRESS" : The database version is SQL Server 2017 - /// Express. - /// - "SQLSERVER_2017_WEB" : The database version is SQL Server 2017 Web. - /// - "POSTGRES_9_6" : The database version is PostgreSQL 9.6. - /// - "POSTGRES_10" : The database version is PostgreSQL 10. - /// - "POSTGRES_11" : The database version is PostgreSQL 11. - /// - "POSTGRES_12" : The database version is PostgreSQL 12. - /// - "POSTGRES_13" : The database version is PostgreSQL 13. - /// - "POSTGRES_14" : The database version is PostgreSQL 14. - /// - "POSTGRES_15" : The database version is PostgreSQL 15. - /// - "POSTGRES_16" : The database version is PostgreSQL 16. - /// - "POSTGRES_17" : The database version is PostgreSQL 17. /// - "MYSQL_8_0" : The database version is MySQL 8. /// - "MYSQL_8_0_18" : The database major version is MySQL 8.0 and the minor /// version is 18. @@ -3913,7 +3888,27 @@ class ConnectSettings { /// version is 39. /// - "MYSQL_8_0_40" : The database major version is MySQL 8.0 and the minor /// version is 40. + /// - "MYSQL_8_0_41" : The database major version is MySQL 8.0 and the minor + /// version is 41. + /// - "MYSQL_8_0_42" : The database major version is MySQL 8.0 and the minor + /// version is 42. /// - "MYSQL_8_4" : The database version is MySQL 8.4. + /// - "SQLSERVER_2017_STANDARD" : The database version is SQL Server 2017 + /// Standard. + /// - "SQLSERVER_2017_ENTERPRISE" : The database version is SQL Server 2017 + /// Enterprise. + /// - "SQLSERVER_2017_EXPRESS" : The database version is SQL Server 2017 + /// Express. + /// - "SQLSERVER_2017_WEB" : The database version is SQL Server 2017 Web. + /// - "POSTGRES_9_6" : The database version is PostgreSQL 9.6. + /// - "POSTGRES_10" : The database version is PostgreSQL 10. + /// - "POSTGRES_11" : The database version is PostgreSQL 11. + /// - "POSTGRES_12" : The database version is PostgreSQL 12. + /// - "POSTGRES_13" : The database version is PostgreSQL 13. + /// - "POSTGRES_14" : The database version is PostgreSQL 14. + /// - "POSTGRES_15" : The database version is PostgreSQL 15. + /// - "POSTGRES_16" : The database version is PostgreSQL 16. + /// - "POSTGRES_17" : The database version is PostgreSQL 17. /// - "SQLSERVER_2019_STANDARD" : The database version is SQL Server 2019 /// Standard. /// - "SQLSERVER_2019_ENTERPRISE" : The database version is SQL Server 2019 @@ -3962,6 +3957,7 @@ class ConnectSettings { ConnectSettings({ this.backendType, + this.customSubjectAlternativeNames, this.databaseVersion, this.dnsName, this.ipAddresses, @@ -3975,6 +3971,10 @@ class ConnectSettings { ConnectSettings.fromJson(core.Map json_) : this( backendType: json_['backendType'] as core.String?, + customSubjectAlternativeNames: + (json_['customSubjectAlternativeNames'] as core.List?) + ?.map((value) => value as core.String) + .toList(), databaseVersion: json_['databaseVersion'] as core.String?, dnsName: json_['dnsName'] as core.String?, ipAddresses: (json_['ipAddresses'] as core.List?) @@ -3993,6 +3993,8 @@ class ConnectSettings { core.Map toJson() => { if (backendType != null) 'backendType': backendType!, + if (customSubjectAlternativeNames != null) + 'customSubjectAlternativeNames': customSubjectAlternativeNames!, if (databaseVersion != null) 'databaseVersion': databaseVersion!, if (dnsName != null) 'dnsName': dnsName!, if (ipAddresses != null) 'ipAddresses': ipAddresses!, @@ -4225,22 +4227,6 @@ class DatabaseInstance { /// - "MYSQL_5_5" : The database version is MySQL 5.5. /// - "MYSQL_5_6" : The database version is MySQL 5.6. /// - "MYSQL_5_7" : The database version is MySQL 5.7. - /// - "SQLSERVER_2017_STANDARD" : The database version is SQL Server 2017 - /// Standard. - /// - "SQLSERVER_2017_ENTERPRISE" : The database version is SQL Server 2017 - /// Enterprise. - /// - "SQLSERVER_2017_EXPRESS" : The database version is SQL Server 2017 - /// Express. - /// - "SQLSERVER_2017_WEB" : The database version is SQL Server 2017 Web. - /// - "POSTGRES_9_6" : The database version is PostgreSQL 9.6. - /// - "POSTGRES_10" : The database version is PostgreSQL 10. - /// - "POSTGRES_11" : The database version is PostgreSQL 11. - /// - "POSTGRES_12" : The database version is PostgreSQL 12. - /// - "POSTGRES_13" : The database version is PostgreSQL 13. - /// - "POSTGRES_14" : The database version is PostgreSQL 14. - /// - "POSTGRES_15" : The database version is PostgreSQL 15. - /// - "POSTGRES_16" : The database version is PostgreSQL 16. - /// - "POSTGRES_17" : The database version is PostgreSQL 17. /// - "MYSQL_8_0" : The database version is MySQL 8. /// - "MYSQL_8_0_18" : The database major version is MySQL 8.0 and the minor /// version is 18. @@ -4274,7 +4260,27 @@ class DatabaseInstance { /// version is 39. /// - "MYSQL_8_0_40" : The database major version is MySQL 8.0 and the minor /// version is 40. + /// - "MYSQL_8_0_41" : The database major version is MySQL 8.0 and the minor + /// version is 41. + /// - "MYSQL_8_0_42" : The database major version is MySQL 8.0 and the minor + /// version is 42. /// - "MYSQL_8_4" : The database version is MySQL 8.4. + /// - "SQLSERVER_2017_STANDARD" : The database version is SQL Server 2017 + /// Standard. + /// - "SQLSERVER_2017_ENTERPRISE" : The database version is SQL Server 2017 + /// Enterprise. + /// - "SQLSERVER_2017_EXPRESS" : The database version is SQL Server 2017 + /// Express. + /// - "SQLSERVER_2017_WEB" : The database version is SQL Server 2017 Web. + /// - "POSTGRES_9_6" : The database version is PostgreSQL 9.6. + /// - "POSTGRES_10" : The database version is PostgreSQL 10. + /// - "POSTGRES_11" : The database version is PostgreSQL 11. + /// - "POSTGRES_12" : The database version is PostgreSQL 12. + /// - "POSTGRES_13" : The database version is PostgreSQL 13. + /// - "POSTGRES_14" : The database version is PostgreSQL 14. + /// - "POSTGRES_15" : The database version is PostgreSQL 15. + /// - "POSTGRES_16" : The database version is PostgreSQL 16. + /// - "POSTGRES_17" : The database version is PostgreSQL 17. /// - "SQLSERVER_2019_STANDARD" : The database version is SQL Server 2019 /// Standard. /// - "SQLSERVER_2019_ENTERPRISE" : The database version is SQL Server 2019 @@ -4410,8 +4416,8 @@ class DatabaseInstance { /// A primary instance and disaster recovery (DR) replica pair. /// /// A DR replica is a cross-region replica that you designate for failover in - /// the event that the primary instance experiences regional failure. Only - /// applicable to MySQL. + /// the event that the primary instance experiences regional failure. + /// Applicable to MySQL and PostgreSQL. ReplicationCluster? replicationCluster; /// Initial root password. @@ -4489,6 +4495,18 @@ class DatabaseInstance { /// files from a data disk to Cloud Storage. core.bool? switchTransactionLogsToCloudStorageEnabled; + /// Input only. + /// + /// Immutable. Tag keys and tag values that are bound to this instance. You + /// must represent each item in the map as: `"" : ""`. For example, a single + /// resource can have the following tags: ``` "123/environment": "production", + /// "123/costCenter": "marketing", ``` For more information on tag creation + /// and management, see + /// https://cloud.google.com/resource-manager/docs/tags/tags-overview. + /// + /// Optional. + core.Map? tags; + /// All database versions that are available for upgrade. /// /// Output only. @@ -4544,6 +4562,7 @@ class DatabaseInstance { this.state, this.suspensionReason, this.switchTransactionLogsToCloudStorageEnabled, + this.tags, this.upgradableDatabaseVersions, this.writeEndpoint, }); @@ -4646,6 +4665,12 @@ class DatabaseInstance { .toList(), switchTransactionLogsToCloudStorageEnabled: json_['switchTransactionLogsToCloudStorageEnabled'] as core.bool?, + tags: (json_['tags'] as core.Map?)?.map( + (key, value) => core.MapEntry( + key, + value as core.String, + ), + ), upgradableDatabaseVersions: (json_['upgradableDatabaseVersions'] as core.List?) ?.map((value) => AvailableDatabaseVersion.fromJson( @@ -4714,6 +4739,7 @@ class DatabaseInstance { if (switchTransactionLogsToCloudStorageEnabled != null) 'switchTransactionLogsToCloudStorageEnabled': switchTransactionLogsToCloudStorageEnabled!, + if (tags != null) 'tags': tags!, if (upgradableDatabaseVersions != null) 'upgradableDatabaseVersions': upgradableDatabaseVersions!, if (writeEndpoint != null) 'writeEndpoint': writeEndpoint!, @@ -5389,6 +5415,25 @@ class ExportContext { }; } +/// The selected object that Cloud SQL migrates. +class ExternalSyncSelectedObject { + /// The name of the database that Cloud SQL migrates. + core.String? database; + + ExternalSyncSelectedObject({ + this.database, + }); + + ExternalSyncSelectedObject.fromJson(core.Map json_) + : this( + database: json_['database'] as core.String?, + ); + + core.Map toJson() => { + if (database != null) 'database': database!, + }; +} + /// Database instance failover context. class FailoverContext { /// This is always `sql#failoverContext`. @@ -6556,6 +6601,11 @@ class IpConfiguration { /// `157.197.200.0/24`). core.List? authorizedNetworks; + /// Custom Subject Alternative Name(SAN)s for a Cloud SQL instance. + /// + /// Optional. + core.List? customSubjectAlternativeNames; + /// Controls connectivity to private IP instances from Google services, such /// as BigQuery. core.bool? enablePrivatePathForGoogleCloudServices; @@ -6591,8 +6641,18 @@ class IpConfiguration { /// - "GOOGLE_MANAGED_INTERNAL_CA" : Google-managed self-signed internal CA. /// - "GOOGLE_MANAGED_CAS_CA" : Google-managed regional CA part of root CA /// hierarchy hosted on Google Cloud's Certificate Authority Service (CAS). + /// - "CUSTOMER_MANAGED_CAS_CA" : Customer-managed CA hosted on Google Cloud's + /// Certificate Authority Service (CAS). core.String? serverCaMode; + /// The resource name of the server CA pool for an instance with + /// `CUSTOMER_MANAGED_CAS_CA` as the `server_ca_mode`. + /// + /// Format: projects//locations//caPools/ + /// + /// Optional. + core.String? serverCaPool; + /// Specify how SSL/TLS is enforced in database connections. /// /// If you must use the `require_ssl` flag for backward compatibility, then @@ -6634,12 +6694,14 @@ class IpConfiguration { IpConfiguration({ this.allocatedIpRange, this.authorizedNetworks, + this.customSubjectAlternativeNames, this.enablePrivatePathForGoogleCloudServices, this.ipv4Enabled, this.privateNetwork, this.pscConfig, this.requireSsl, this.serverCaMode, + this.serverCaPool, this.sslMode, }); @@ -6650,6 +6712,10 @@ class IpConfiguration { ?.map((value) => AclEntry.fromJson( value as core.Map)) .toList(), + customSubjectAlternativeNames: + (json_['customSubjectAlternativeNames'] as core.List?) + ?.map((value) => value as core.String) + .toList(), enablePrivatePathForGoogleCloudServices: json_['enablePrivatePathForGoogleCloudServices'] as core.bool?, ipv4Enabled: json_['ipv4Enabled'] as core.bool?, @@ -6660,6 +6726,7 @@ class IpConfiguration { : null, requireSsl: json_['requireSsl'] as core.bool?, serverCaMode: json_['serverCaMode'] as core.String?, + serverCaPool: json_['serverCaPool'] as core.String?, sslMode: json_['sslMode'] as core.String?, ); @@ -6667,6 +6734,8 @@ class IpConfiguration { if (allocatedIpRange != null) 'allocatedIpRange': allocatedIpRange!, if (authorizedNetworks != null) 'authorizedNetworks': authorizedNetworks!, + if (customSubjectAlternativeNames != null) + 'customSubjectAlternativeNames': customSubjectAlternativeNames!, if (enablePrivatePathForGoogleCloudServices != null) 'enablePrivatePathForGoogleCloudServices': enablePrivatePathForGoogleCloudServices!, @@ -6675,6 +6744,7 @@ class IpConfiguration { if (pscConfig != null) 'pscConfig': pscConfig!, if (requireSsl != null) 'requireSsl': requireSsl!, if (serverCaMode != null) 'serverCaMode': serverCaMode!, + if (serverCaPool != null) 'serverCaPool': serverCaPool!, if (sslMode != null) 'sslMode': sslMode!, }; } @@ -6989,9 +7059,29 @@ class OnPremisesConfiguration { /// The password for connecting to on-premises instance. core.String? password; + /// A list of objects that the user selects for replication from an external + /// source instance. + /// + /// Optional. + core.List? selectedObjects; + /// The reference to Cloud SQL instance if the source is Cloud SQL. InstanceReference? sourceInstance; + /// SslOption for replica connection to the on-premises source. + /// + /// Optional. + /// Possible string values are: + /// - "SSL_OPTION_UNSPECIFIED" : Unknown SSL option i.e. SSL option not + /// specified by user. + /// - "DISABLE" : SSL is disabled for replica connection to the on-premises + /// source. + /// - "REQUIRE" : SSL is required for replica connection to the on-premises + /// source. + /// - "VERIFY_CA" : Verify CA is required for replica connection to the + /// on-premises source. + core.String? sslOption; + /// The username for connecting to on-premises instance. core.String? username; @@ -7003,7 +7093,9 @@ class OnPremisesConfiguration { this.hostPort, this.kind, this.password, + this.selectedObjects, this.sourceInstance, + this.sslOption, this.username, }); @@ -7016,10 +7108,15 @@ class OnPremisesConfiguration { hostPort: json_['hostPort'] as core.String?, kind: json_['kind'] as core.String?, password: json_['password'] as core.String?, + selectedObjects: (json_['selectedObjects'] as core.List?) + ?.map((value) => SelectedObjects.fromJson( + value as core.Map)) + .toList(), sourceInstance: json_.containsKey('sourceInstance') ? InstanceReference.fromJson(json_['sourceInstance'] as core.Map) : null, + sslOption: json_['sslOption'] as core.String?, username: json_['username'] as core.String?, ); @@ -7031,7 +7128,9 @@ class OnPremisesConfiguration { if (hostPort != null) 'hostPort': hostPort!, if (kind != null) 'kind': kind!, if (password != null) 'password': password!, + if (selectedObjects != null) 'selectedObjects': selectedObjects!, if (sourceInstance != null) 'sourceInstance': sourceInstance!, + if (sslOption != null) 'sslOption': sslOption!, if (username != null) 'username': username!, }; } @@ -7156,6 +7255,8 @@ class Operation { /// instance. /// - "MAJOR_VERSION_UPGRADE" : Updates the major version of a Cloud SQL /// instance. + /// - "ADVANCED_BACKUP" : Creates a backup for an Advanced BackupTier Cloud + /// SQL instance. core.String? operationType; /// The URI of this resource. @@ -7175,6 +7276,11 @@ class Operation { /// - "DONE" : The operation completed. core.String? status; + /// The sub operation based on the operation type. + /// + /// Optional. + SqlSubOperationType? subOperationType; + /// Name of the database instance related to this operation. core.String? targetId; core.String? targetLink; @@ -7200,6 +7306,7 @@ class Operation { this.selfLink, this.startTime, this.status, + this.subOperationType, this.targetId, this.targetLink, this.targetProject, @@ -7241,6 +7348,10 @@ class Operation { selfLink: json_['selfLink'] as core.String?, startTime: json_['startTime'] as core.String?, status: json_['status'] as core.String?, + subOperationType: json_.containsKey('subOperationType') + ? SqlSubOperationType.fromJson(json_['subOperationType'] + as core.Map) + : null, targetId: json_['targetId'] as core.String?, targetLink: json_['targetLink'] as core.String?, targetProject: json_['targetProject'] as core.String?, @@ -7263,6 +7374,7 @@ class Operation { if (selfLink != null) 'selfLink': selfLink!, if (startTime != null) 'startTime': startTime!, if (status != null) 'status': status!, + if (subOperationType != null) 'subOperationType': subOperationType!, if (targetId != null) 'targetId': targetId!, if (targetLink != null) 'targetLink': targetLink!, if (targetProject != null) 'targetProject': targetProject!, @@ -7651,8 +7763,8 @@ class ReplicaConfiguration { /// A primary instance and disaster recovery (DR) replica pair. /// /// A DR replica is a cross-region replica that you designate for failover in -/// the event that the primary instance has regional failure. Only applicable to -/// MySQL. +/// the event that the primary instance has regional failure. Applicable to +/// MySQL and PostgreSQL. class ReplicationCluster { /// Read-only field that indicates whether the replica is a DR replica. /// @@ -7672,14 +7784,16 @@ class ReplicationCluster { /// Optional. core.String? failoverDrReplicaName; - /// If set, it indicates this instance has a private service access (PSA) dns - /// endpoint that is pointing to the primary instance of the cluster. + /// If set, this field indicates this instance has a private service access + /// (PSA) DNS endpoint that is pointing to the primary instance of the + /// cluster. /// - /// If this instance is the primary, the dns should be pointing to this - /// instance. After Switchover or Replica failover, this DNS endpoint points - /// to the promoted instance. This is a read-only field, returned to the user - /// as information. This field can exist even if a standalone instance does - /// not yet have a replica, or had a DR replica that was deleted. + /// If this instance is the primary, then the DNS endpoint points to this + /// instance. After a switchover or replica failover operation, this DNS + /// endpoint points to the promoted instance. This is a read-only field, + /// returned to the user as information. This field can exist even if a + /// standalone instance doesn't have a DR replica yet or the DR replica is + /// deleted. /// /// Output only. core.String? psaWriteEndpoint; @@ -7841,6 +7955,28 @@ class RotateServerCertificateContext { }; } +/// A list of objects that the user selects for replication from an external +/// source instance. +class SelectedObjects { + /// The name of the database to migrate. + /// + /// Required. + core.String? database; + + SelectedObjects({ + this.database, + }); + + SelectedObjects.fromJson(core.Map json_) + : this( + database: json_['database'] as core.String?, + ); + + core.Map toJson() => { + if (database != null) 'database': database!, + }; +} + /// Database instance settings. class Settings { /// The activation policy specifies when the instance is activated; it is @@ -8643,6 +8779,13 @@ class SqlInstancesVerifyExternalSyncSettingsRequest { /// Optional. MySqlSyncConfig? mysqlSyncConfig; + /// Migrate only the specified objects from the source instance. + /// + /// If this field is empty, then migrate all objects. + /// + /// Optional. + core.List? selectedObjects; + /// External sync mode /// Possible string values are: /// - "EXTERNAL_SYNC_MODE_UNSPECIFIED" : Unknown external sync mode, will be @@ -8677,6 +8820,7 @@ class SqlInstancesVerifyExternalSyncSettingsRequest { SqlInstancesVerifyExternalSyncSettingsRequest({ this.migrationType, this.mysqlSyncConfig, + this.selectedObjects, this.syncMode, this.syncParallelLevel, this.verifyConnectionOnly, @@ -8690,6 +8834,10 @@ class SqlInstancesVerifyExternalSyncSettingsRequest { ? MySqlSyncConfig.fromJson(json_['mysqlSyncConfig'] as core.Map) : null, + selectedObjects: (json_['selectedObjects'] as core.List?) + ?.map((value) => ExternalSyncSelectedObject.fromJson( + value as core.Map)) + .toList(), syncMode: json_['syncMode'] as core.String?, syncParallelLevel: json_['syncParallelLevel'] as core.String?, verifyConnectionOnly: json_['verifyConnectionOnly'] as core.bool?, @@ -8699,6 +8847,7 @@ class SqlInstancesVerifyExternalSyncSettingsRequest { core.Map toJson() => { if (migrationType != null) 'migrationType': migrationType!, if (mysqlSyncConfig != null) 'mysqlSyncConfig': mysqlSyncConfig!, + if (selectedObjects != null) 'selectedObjects': selectedObjects!, if (syncMode != null) 'syncMode': syncMode!, if (syncParallelLevel != null) 'syncParallelLevel': syncParallelLevel!, if (verifyConnectionOnly != null) @@ -8914,6 +9063,41 @@ class SqlServerUserDetails { }; } +/// The sub operation type based on the operation type. +class SqlSubOperationType { + /// The type of maintenance to be performed on the instance. + /// Possible string values are: + /// - "SQL_MAINTENANCE_TYPE_UNSPECIFIED" : Maintenance type is unspecified. + /// - "INSTANCE_MAINTENANCE" : Indicates that a standalone instance is + /// undergoing maintenance. The instance can be either a primary instance or a + /// replica. + /// - "REPLICA_INCLUDED_MAINTENANCE" : Indicates that the primary instance and + /// all of its replicas, including cascading replicas, are undergoing + /// maintenance. Maintenance is performed on groups of replicas first, + /// followed by the primary instance. + /// - "INSTANCE_SELF_SERVICE_MAINTENANCE" : Indicates that the standalone + /// instance is undergoing maintenance, initiated by self-service. The + /// instance can be either a primary instance or a replica. + /// - "REPLICA_INCLUDED_SELF_SERVICE_MAINTENANCE" : Indicates that the primary + /// instance and all of its replicas are undergoing maintenance, initiated by + /// self-service. Maintenance is performed on groups of replicas first, + /// followed by the primary instance. + core.String? maintenanceType; + + SqlSubOperationType({ + this.maintenanceType, + }); + + SqlSubOperationType.fromJson(core.Map json_) + : this( + maintenanceType: json_['maintenanceType'] as core.String?, + ); + + core.Map toJson() => { + if (maintenanceType != null) 'maintenanceType': maintenanceType!, + }; +} + /// SslCerts Resource class SslCert { /// PEM representation. diff --git a/generated/googleapis_beta/test/analyticsdata/v1beta_test.dart b/generated/googleapis_beta/test/analyticsdata/v1beta_test.dart index 2655eb462..4158e8b5d 100644 --- a/generated/googleapis_beta/test/analyticsdata/v1beta_test.dart +++ b/generated/googleapis_beta/test/analyticsdata/v1beta_test.dart @@ -858,12 +858,28 @@ void checkDimensionValue(api.DimensionValue o) { buildCounterDimensionValue--; } +core.int buildCounterEmptyFilter = 0; +api.EmptyFilter buildEmptyFilter() { + final o = api.EmptyFilter(); + buildCounterEmptyFilter++; + if (buildCounterEmptyFilter < 3) {} + buildCounterEmptyFilter--; + return o; +} + +void checkEmptyFilter(api.EmptyFilter o) { + buildCounterEmptyFilter++; + if (buildCounterEmptyFilter < 3) {} + buildCounterEmptyFilter--; +} + core.int buildCounterFilter = 0; api.Filter buildFilter() { final o = api.Filter(); buildCounterFilter++; if (buildCounterFilter < 3) { o.betweenFilter = buildBetweenFilter(); + o.emptyFilter = buildEmptyFilter(); o.fieldName = 'foo'; o.inListFilter = buildInListFilter(); o.numericFilter = buildNumericFilter(); @@ -877,6 +893,7 @@ void checkFilter(api.Filter o) { buildCounterFilter++; if (buildCounterFilter < 3) { checkBetweenFilter(o.betweenFilter!); + checkEmptyFilter(o.emptyFilter!); unittest.expect( o.fieldName!, unittest.equals('foo'), @@ -3070,6 +3087,16 @@ void main() { }); }); + unittest.group('obj-schema-EmptyFilter', () { + unittest.test('to-json--from-json', () async { + final o = buildEmptyFilter(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.EmptyFilter.fromJson( + oJson as core.Map); + checkEmptyFilter(od); + }); + }); + unittest.group('obj-schema-Filter', () { unittest.test('to-json--from-json', () async { final o = buildFilter(); diff --git a/generated/googleapis_beta/test/dataflow/v1b3_test.dart b/generated/googleapis_beta/test/dataflow/v1b3_test.dart index 896722760..37184ee05 100644 --- a/generated/googleapis_beta/test/dataflow/v1b3_test.dart +++ b/generated/googleapis_beta/test/dataflow/v1b3_test.dart @@ -939,6 +939,33 @@ void checkDataSamplingReport(api.DataSamplingReport o) { buildCounterDataSamplingReport--; } +core.int buildCounterDataflowGaugeValue = 0; +api.DataflowGaugeValue buildDataflowGaugeValue() { + final o = api.DataflowGaugeValue(); + buildCounterDataflowGaugeValue++; + if (buildCounterDataflowGaugeValue < 3) { + o.measuredTime = 'foo'; + o.value = 'foo'; + } + buildCounterDataflowGaugeValue--; + return o; +} + +void checkDataflowGaugeValue(api.DataflowGaugeValue o) { + buildCounterDataflowGaugeValue++; + if (buildCounterDataflowGaugeValue < 3) { + unittest.expect( + o.measuredTime!, + unittest.equals('foo'), + ); + unittest.expect( + o.value!, + unittest.equals('foo'), + ); + } + buildCounterDataflowGaugeValue--; +} + core.List buildUnnamed7() => [ 'foo', 'foo', @@ -1970,6 +1997,52 @@ void checkFloatingPointMean(api.FloatingPointMean o) { buildCounterFloatingPointMean--; } +core.int buildCounterGPUUsage = 0; +api.GPUUsage buildGPUUsage() { + final o = api.GPUUsage(); + buildCounterGPUUsage++; + if (buildCounterGPUUsage < 3) { + o.timestamp = 'foo'; + o.utilization = buildGPUUtilization(); + } + buildCounterGPUUsage--; + return o; +} + +void checkGPUUsage(api.GPUUsage o) { + buildCounterGPUUsage++; + if (buildCounterGPUUsage < 3) { + unittest.expect( + o.timestamp!, + unittest.equals('foo'), + ); + checkGPUUtilization(o.utilization!); + } + buildCounterGPUUsage--; +} + +core.int buildCounterGPUUtilization = 0; +api.GPUUtilization buildGPUUtilization() { + final o = api.GPUUtilization(); + buildCounterGPUUtilization++; + if (buildCounterGPUUtilization < 3) { + o.rate = 42.0; + } + buildCounterGPUUtilization--; + return o; +} + +void checkGPUUtilization(api.GPUUtilization o) { + buildCounterGPUUtilization++; + if (buildCounterGPUUtilization < 3) { + unittest.expect( + o.rate!, + unittest.equals(42.0), + ); + } + buildCounterGPUUtilization--; +} + core.int buildCounterGetDebugConfigRequest = 0; api.GetDebugConfigRequest buildGetDebugConfigRequest() { final o = api.GetDebugConfigRequest(); @@ -3846,6 +3919,7 @@ api.MetricValue buildMetricValue() { if (buildCounterMetricValue < 3) { o.metric = 'foo'; o.metricLabels = buildUnnamed61(); + o.valueGauge64 = buildDataflowGaugeValue(); o.valueHistogram = buildDataflowHistogramValue(); o.valueInt64 = 'foo'; } @@ -3861,6 +3935,7 @@ void checkMetricValue(api.MetricValue o) { unittest.equals('foo'), ); checkUnnamed61(o.metricLabels!); + checkDataflowGaugeValue(o.valueGauge64!); checkDataflowHistogramValue(o.valueHistogram!); unittest.expect( o.valueInt64!, @@ -5072,12 +5147,23 @@ void checkUnnamed84(core.List o) { checkCPUTime(o[1]); } -core.List buildUnnamed85() => [ +core.List buildUnnamed85() => [ + buildGPUUsage(), + buildGPUUsage(), + ]; + +void checkUnnamed85(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkGPUUsage(o[0]); + checkGPUUsage(o[1]); +} + +core.List buildUnnamed86() => [ buildMemInfo(), buildMemInfo(), ]; -void checkUnnamed85(core.List o) { +void checkUnnamed86(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMemInfo(o[0]); checkMemInfo(o[1]); @@ -5090,7 +5176,8 @@ api.ResourceUtilizationReport buildResourceUtilizationReport() { if (buildCounterResourceUtilizationReport < 3) { o.containers = buildUnnamed83(); o.cpuTime = buildUnnamed84(); - o.memoryInfo = buildUnnamed85(); + o.gpuUsage = buildUnnamed85(); + o.memoryInfo = buildUnnamed86(); } buildCounterResourceUtilizationReport--; return o; @@ -5101,7 +5188,8 @@ void checkResourceUtilizationReport(api.ResourceUtilizationReport o) { if (buildCounterResourceUtilizationReport < 3) { checkUnnamed83(o.containers!); checkUnnamed84(o.cpuTime!); - checkUnnamed85(o.memoryInfo!); + checkUnnamed85(o.gpuUsage!); + checkUnnamed86(o.memoryInfo!); } buildCounterResourceUtilizationReport--; } @@ -5122,12 +5210,12 @@ void checkResourceUtilizationReportResponse( buildCounterResourceUtilizationReportResponse--; } -core.List buildUnnamed86() => [ +core.List buildUnnamed87() => [ 'foo', 'foo', ]; -void checkUnnamed86(core.List o) { +void checkUnnamed87(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5139,12 +5227,12 @@ void checkUnnamed86(core.List o) { ); } -core.Map buildUnnamed87() => { +core.Map buildUnnamed88() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed87(core.Map o) { +void checkUnnamed88(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -5161,8 +5249,8 @@ api.RuntimeEnvironment buildRuntimeEnvironment() { final o = api.RuntimeEnvironment(); buildCounterRuntimeEnvironment++; if (buildCounterRuntimeEnvironment < 3) { - o.additionalExperiments = buildUnnamed86(); - o.additionalUserLabels = buildUnnamed87(); + o.additionalExperiments = buildUnnamed87(); + o.additionalUserLabels = buildUnnamed88(); o.bypassTempDirValidation = true; o.diskSizeGb = 42; o.enableStreamingEngine = true; @@ -5187,8 +5275,8 @@ api.RuntimeEnvironment buildRuntimeEnvironment() { void checkRuntimeEnvironment(api.RuntimeEnvironment o) { buildCounterRuntimeEnvironment++; if (buildCounterRuntimeEnvironment < 3) { - checkUnnamed86(o.additionalExperiments!); - checkUnnamed87(o.additionalUserLabels!); + checkUnnamed87(o.additionalExperiments!); + checkUnnamed88(o.additionalUserLabels!); unittest.expect(o.bypassTempDirValidation!, unittest.isTrue); unittest.expect( o.diskSizeGb!, @@ -5251,12 +5339,12 @@ void checkRuntimeEnvironment(api.RuntimeEnvironment o) { buildCounterRuntimeEnvironment--; } -core.List buildUnnamed88() => [ +core.List buildUnnamed89() => [ buildParameterMetadata(), buildParameterMetadata(), ]; -void checkUnnamed88(core.List o) { +void checkUnnamed89(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkParameterMetadata(o[0]); checkParameterMetadata(o[1]); @@ -5267,7 +5355,7 @@ api.RuntimeMetadata buildRuntimeMetadata() { final o = api.RuntimeMetadata(); buildCounterRuntimeMetadata++; if (buildCounterRuntimeMetadata < 3) { - o.parameters = buildUnnamed88(); + o.parameters = buildUnnamed89(); o.sdkInfo = buildSDKInfo(); } buildCounterRuntimeMetadata--; @@ -5277,7 +5365,7 @@ api.RuntimeMetadata buildRuntimeMetadata() { void checkRuntimeMetadata(api.RuntimeMetadata o) { buildCounterRuntimeMetadata++; if (buildCounterRuntimeMetadata < 3) { - checkUnnamed88(o.parameters!); + checkUnnamed89(o.parameters!); checkSDKInfo(o.sdkInfo!); } buildCounterRuntimeMetadata--; @@ -5374,12 +5462,12 @@ void checkSdkBug(api.SdkBug o) { buildCounterSdkBug--; } -core.List buildUnnamed89() => [ +core.List buildUnnamed90() => [ 'foo', 'foo', ]; -void checkUnnamed89(core.List o) { +void checkUnnamed90(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5396,7 +5484,7 @@ api.SdkHarnessContainerImage buildSdkHarnessContainerImage() { final o = api.SdkHarnessContainerImage(); buildCounterSdkHarnessContainerImage++; if (buildCounterSdkHarnessContainerImage < 3) { - o.capabilities = buildUnnamed89(); + o.capabilities = buildUnnamed90(); o.containerImage = 'foo'; o.environmentId = 'foo'; o.useSingleCorePerContainer = true; @@ -5408,7 +5496,7 @@ api.SdkHarnessContainerImage buildSdkHarnessContainerImage() { void checkSdkHarnessContainerImage(api.SdkHarnessContainerImage o) { buildCounterSdkHarnessContainerImage++; if (buildCounterSdkHarnessContainerImage < 3) { - checkUnnamed89(o.capabilities!); + checkUnnamed90(o.capabilities!); unittest.expect( o.containerImage!, unittest.equals('foo'), @@ -5422,12 +5510,12 @@ void checkSdkHarnessContainerImage(api.SdkHarnessContainerImage o) { buildCounterSdkHarnessContainerImage--; } -core.List buildUnnamed90() => [ +core.List buildUnnamed91() => [ buildSdkBug(), buildSdkBug(), ]; -void checkUnnamed90(core.List o) { +void checkUnnamed91(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSdkBug(o[0]); checkSdkBug(o[1]); @@ -5438,7 +5526,7 @@ api.SdkVersion buildSdkVersion() { final o = api.SdkVersion(); buildCounterSdkVersion++; if (buildCounterSdkVersion < 3) { - o.bugs = buildUnnamed90(); + o.bugs = buildUnnamed91(); o.sdkSupportStatus = 'foo'; o.version = 'foo'; o.versionDisplayName = 'foo'; @@ -5450,7 +5538,7 @@ api.SdkVersion buildSdkVersion() { void checkSdkVersion(api.SdkVersion o) { buildCounterSdkVersion++; if (buildCounterSdkVersion < 3) { - checkUnnamed90(o.bugs!); + checkUnnamed91(o.bugs!); unittest.expect( o.sdkSupportStatus!, unittest.equals('foo'), @@ -5524,12 +5612,12 @@ void checkSendDebugCaptureResponse(api.SendDebugCaptureResponse o) { buildCounterSendDebugCaptureResponse--; } -core.List buildUnnamed91() => [ +core.List buildUnnamed92() => [ buildWorkerMessage(), buildWorkerMessage(), ]; -void checkUnnamed91(core.List o) { +void checkUnnamed92(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkWorkerMessage(o[0]); checkWorkerMessage(o[1]); @@ -5541,7 +5629,7 @@ api.SendWorkerMessagesRequest buildSendWorkerMessagesRequest() { buildCounterSendWorkerMessagesRequest++; if (buildCounterSendWorkerMessagesRequest < 3) { o.location = 'foo'; - o.workerMessages = buildUnnamed91(); + o.workerMessages = buildUnnamed92(); } buildCounterSendWorkerMessagesRequest--; return o; @@ -5554,17 +5642,17 @@ void checkSendWorkerMessagesRequest(api.SendWorkerMessagesRequest o) { o.location!, unittest.equals('foo'), ); - checkUnnamed91(o.workerMessages!); + checkUnnamed92(o.workerMessages!); } buildCounterSendWorkerMessagesRequest--; } -core.List buildUnnamed92() => [ +core.List buildUnnamed93() => [ buildWorkerMessageResponse(), buildWorkerMessageResponse(), ]; -void checkUnnamed92(core.List o) { +void checkUnnamed93(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkWorkerMessageResponse(o[0]); checkWorkerMessageResponse(o[1]); @@ -5575,7 +5663,7 @@ api.SendWorkerMessagesResponse buildSendWorkerMessagesResponse() { final o = api.SendWorkerMessagesResponse(); buildCounterSendWorkerMessagesResponse++; if (buildCounterSendWorkerMessagesResponse < 3) { - o.workerMessageResponses = buildUnnamed92(); + o.workerMessageResponses = buildUnnamed93(); } buildCounterSendWorkerMessagesResponse--; return o; @@ -5584,34 +5672,34 @@ api.SendWorkerMessagesResponse buildSendWorkerMessagesResponse() { void checkSendWorkerMessagesResponse(api.SendWorkerMessagesResponse o) { buildCounterSendWorkerMessagesResponse++; if (buildCounterSendWorkerMessagesResponse < 3) { - checkUnnamed92(o.workerMessageResponses!); + checkUnnamed93(o.workerMessageResponses!); } buildCounterSendWorkerMessagesResponse--; } -core.List buildUnnamed93() => [ +core.List buildUnnamed94() => [ buildSideInputInfo(), buildSideInputInfo(), ]; -void checkUnnamed93(core.List o) { +void checkUnnamed94(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSideInputInfo(o[0]); checkSideInputInfo(o[1]); } -core.List buildUnnamed94() => [ +core.List buildUnnamed95() => [ buildSeqMapTaskOutputInfo(), buildSeqMapTaskOutputInfo(), ]; -void checkUnnamed94(core.List o) { +void checkUnnamed95(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSeqMapTaskOutputInfo(o[0]); checkSeqMapTaskOutputInfo(o[1]); } -core.Map buildUnnamed95() => { +core.Map buildUnnamed96() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -5624,7 +5712,7 @@ core.Map buildUnnamed95() => { }, }; -void checkUnnamed95(core.Map o) { +void checkUnnamed96(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted34 = (o['x']!) as core.Map; unittest.expect(casted34, unittest.hasLength(3)); @@ -5661,12 +5749,12 @@ api.SeqMapTask buildSeqMapTask() { final o = api.SeqMapTask(); buildCounterSeqMapTask++; if (buildCounterSeqMapTask < 3) { - o.inputs = buildUnnamed93(); + o.inputs = buildUnnamed94(); o.name = 'foo'; - o.outputInfos = buildUnnamed94(); + o.outputInfos = buildUnnamed95(); o.stageName = 'foo'; o.systemName = 'foo'; - o.userFn = buildUnnamed95(); + o.userFn = buildUnnamed96(); } buildCounterSeqMapTask--; return o; @@ -5675,12 +5763,12 @@ api.SeqMapTask buildSeqMapTask() { void checkSeqMapTask(api.SeqMapTask o) { buildCounterSeqMapTask++; if (buildCounterSeqMapTask < 3) { - checkUnnamed93(o.inputs!); + checkUnnamed94(o.inputs!); unittest.expect( o.name!, unittest.equals('foo'), ); - checkUnnamed94(o.outputInfos!); + checkUnnamed95(o.outputInfos!); unittest.expect( o.stageName!, unittest.equals('foo'), @@ -5689,7 +5777,7 @@ void checkSeqMapTask(api.SeqMapTask o) { o.systemName!, unittest.equals('foo'), ); - checkUnnamed95(o.userFn!); + checkUnnamed96(o.userFn!); } buildCounterSeqMapTask--; } @@ -5718,12 +5806,12 @@ void checkSeqMapTaskOutputInfo(api.SeqMapTaskOutputInfo o) { buildCounterSeqMapTaskOutputInfo--; } -core.List buildUnnamed96() => [ +core.List buildUnnamed97() => [ 'foo', 'foo', ]; -void checkUnnamed96(core.List o) { +void checkUnnamed97(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -5740,7 +5828,7 @@ api.ServiceResources buildServiceResources() { final o = api.ServiceResources(); buildCounterServiceResources++; if (buildCounterServiceResources < 3) { - o.zones = buildUnnamed96(); + o.zones = buildUnnamed97(); } buildCounterServiceResources--; return o; @@ -5749,7 +5837,7 @@ api.ServiceResources buildServiceResources() { void checkServiceResources(api.ServiceResources o) { buildCounterServiceResources++; if (buildCounterServiceResources < 3) { - checkUnnamed96(o.zones!); + checkUnnamed97(o.zones!); } buildCounterServiceResources--; } @@ -5781,7 +5869,7 @@ void checkShellTask(api.ShellTask o) { buildCounterShellTask--; } -core.Map buildUnnamed97() => { +core.Map buildUnnamed98() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -5794,7 +5882,7 @@ core.Map buildUnnamed97() => { }, }; -void checkUnnamed97(core.Map o) { +void checkUnnamed98(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted36 = (o['x']!) as core.Map; unittest.expect(casted36, unittest.hasLength(3)); @@ -5826,12 +5914,12 @@ void checkUnnamed97(core.Map o) { ); } -core.List buildUnnamed98() => [ +core.List buildUnnamed99() => [ buildSource(), buildSource(), ]; -void checkUnnamed98(core.List o) { +void checkUnnamed99(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSource(o[0]); checkSource(o[1]); @@ -5842,8 +5930,8 @@ api.SideInputInfo buildSideInputInfo() { final o = api.SideInputInfo(); buildCounterSideInputInfo++; if (buildCounterSideInputInfo < 3) { - o.kind = buildUnnamed97(); - o.sources = buildUnnamed98(); + o.kind = buildUnnamed98(); + o.sources = buildUnnamed99(); o.tag = 'foo'; } buildCounterSideInputInfo--; @@ -5853,8 +5941,8 @@ api.SideInputInfo buildSideInputInfo() { void checkSideInputInfo(api.SideInputInfo o) { buildCounterSideInputInfo++; if (buildCounterSideInputInfo < 3) { - checkUnnamed97(o.kind!); - checkUnnamed98(o.sources!); + checkUnnamed98(o.kind!); + checkUnnamed99(o.sources!); unittest.expect( o.tag!, unittest.equals('foo'), @@ -5863,7 +5951,7 @@ void checkSideInputInfo(api.SideInputInfo o) { buildCounterSideInputInfo--; } -core.Map buildUnnamed99() => { +core.Map buildUnnamed100() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -5876,7 +5964,7 @@ core.Map buildUnnamed99() => { }, }; -void checkUnnamed99(core.Map o) { +void checkUnnamed100(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted38 = (o['x']!) as core.Map; unittest.expect(casted38, unittest.hasLength(3)); @@ -5908,7 +5996,7 @@ void checkUnnamed99(core.Map o) { ); } -core.Map buildUnnamed100() => { +core.Map buildUnnamed101() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -5921,7 +6009,7 @@ core.Map buildUnnamed100() => { }, }; -void checkUnnamed100(core.Map o) { +void checkUnnamed101(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted40 = (o['x']!) as core.Map; unittest.expect(casted40, unittest.hasLength(3)); @@ -5958,8 +6046,8 @@ api.Sink buildSink() { final o = api.Sink(); buildCounterSink++; if (buildCounterSink < 3) { - o.codec = buildUnnamed99(); - o.spec = buildUnnamed100(); + o.codec = buildUnnamed100(); + o.spec = buildUnnamed101(); } buildCounterSink--; return o; @@ -5968,18 +6056,18 @@ api.Sink buildSink() { void checkSink(api.Sink o) { buildCounterSink++; if (buildCounterSink < 3) { - checkUnnamed99(o.codec!); - checkUnnamed100(o.spec!); + checkUnnamed100(o.codec!); + checkUnnamed101(o.spec!); } buildCounterSink--; } -core.List buildUnnamed101() => [ +core.List buildUnnamed102() => [ buildPubsubSnapshotMetadata(), buildPubsubSnapshotMetadata(), ]; -void checkUnnamed101(core.List o) { +void checkUnnamed102(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPubsubSnapshotMetadata(o[0]); checkPubsubSnapshotMetadata(o[1]); @@ -5995,7 +6083,7 @@ api.Snapshot buildSnapshot() { o.diskSizeBytes = 'foo'; o.id = 'foo'; o.projectId = 'foo'; - o.pubsubMetadata = buildUnnamed101(); + o.pubsubMetadata = buildUnnamed102(); o.region = 'foo'; o.sourceJobId = 'foo'; o.state = 'foo'; @@ -6028,7 +6116,7 @@ void checkSnapshot(api.Snapshot o) { o.projectId!, unittest.equals('foo'), ); - checkUnnamed101(o.pubsubMetadata!); + checkUnnamed102(o.pubsubMetadata!); unittest.expect( o.region!, unittest.equals('foo'), @@ -6083,7 +6171,7 @@ void checkSnapshotJobRequest(api.SnapshotJobRequest o) { buildCounterSnapshotJobRequest--; } -core.Map buildUnnamed102() => { +core.Map buildUnnamed103() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -6096,7 +6184,7 @@ core.Map buildUnnamed102() => { }, }; -void checkUnnamed102(core.Map o) { +void checkUnnamed103(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted42 = (o['x']!) as core.Map; unittest.expect(casted42, unittest.hasLength(3)); @@ -6128,18 +6216,18 @@ void checkUnnamed102(core.Map o) { ); } -core.List> buildUnnamed103() => [ - buildUnnamed102(), - buildUnnamed102(), +core.List> buildUnnamed104() => [ + buildUnnamed103(), + buildUnnamed103(), ]; -void checkUnnamed103(core.List> o) { +void checkUnnamed104(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed102(o[0]); - checkUnnamed102(o[1]); + checkUnnamed103(o[0]); + checkUnnamed103(o[1]); } -core.Map buildUnnamed104() => { +core.Map buildUnnamed105() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -6152,7 +6240,7 @@ core.Map buildUnnamed104() => { }, }; -void checkUnnamed104(core.Map o) { +void checkUnnamed105(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted44 = (o['x']!) as core.Map; unittest.expect(casted44, unittest.hasLength(3)); @@ -6184,7 +6272,7 @@ void checkUnnamed104(core.Map o) { ); } -core.Map buildUnnamed105() => { +core.Map buildUnnamed106() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -6197,7 +6285,7 @@ core.Map buildUnnamed105() => { }, }; -void checkUnnamed105(core.Map o) { +void checkUnnamed106(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted46 = (o['x']!) as core.Map; unittest.expect(casted46, unittest.hasLength(3)); @@ -6234,11 +6322,11 @@ api.Source buildSource() { final o = api.Source(); buildCounterSource++; if (buildCounterSource < 3) { - o.baseSpecs = buildUnnamed103(); - o.codec = buildUnnamed104(); + o.baseSpecs = buildUnnamed104(); + o.codec = buildUnnamed105(); o.doesNotNeedSplitting = true; o.metadata = buildSourceMetadata(); - o.spec = buildUnnamed105(); + o.spec = buildUnnamed106(); } buildCounterSource--; return o; @@ -6247,11 +6335,11 @@ api.Source buildSource() { void checkSource(api.Source o) { buildCounterSource++; if (buildCounterSource < 3) { - checkUnnamed103(o.baseSpecs!); - checkUnnamed104(o.codec!); + checkUnnamed104(o.baseSpecs!); + checkUnnamed105(o.codec!); unittest.expect(o.doesNotNeedSplitting!, unittest.isTrue); checkSourceMetadata(o.metadata!); - checkUnnamed105(o.spec!); + checkUnnamed106(o.spec!); } buildCounterSource--; } @@ -6455,23 +6543,23 @@ void checkSourceSplitRequest(api.SourceSplitRequest o) { buildCounterSourceSplitRequest--; } -core.List buildUnnamed106() => [ +core.List buildUnnamed107() => [ buildDerivedSource(), buildDerivedSource(), ]; -void checkUnnamed106(core.List o) { +void checkUnnamed107(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDerivedSource(o[0]); checkDerivedSource(o[1]); } -core.List buildUnnamed107() => [ +core.List buildUnnamed108() => [ buildSourceSplitShard(), buildSourceSplitShard(), ]; -void checkUnnamed107(core.List o) { +void checkUnnamed108(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSourceSplitShard(o[0]); checkSourceSplitShard(o[1]); @@ -6482,9 +6570,9 @@ api.SourceSplitResponse buildSourceSplitResponse() { final o = api.SourceSplitResponse(); buildCounterSourceSplitResponse++; if (buildCounterSourceSplitResponse < 3) { - o.bundles = buildUnnamed106(); + o.bundles = buildUnnamed107(); o.outcome = 'foo'; - o.shards = buildUnnamed107(); + o.shards = buildUnnamed108(); } buildCounterSourceSplitResponse--; return o; @@ -6493,12 +6581,12 @@ api.SourceSplitResponse buildSourceSplitResponse() { void checkSourceSplitResponse(api.SourceSplitResponse o) { buildCounterSourceSplitResponse++; if (buildCounterSourceSplitResponse < 3) { - checkUnnamed106(o.bundles!); + checkUnnamed107(o.bundles!); unittest.expect( o.outcome!, unittest.equals('foo'), ); - checkUnnamed107(o.shards!); + checkUnnamed108(o.shards!); } buildCounterSourceSplitResponse--; } @@ -6586,12 +6674,12 @@ void checkSplitInt64(api.SplitInt64 o) { buildCounterSplitInt64--; } -core.List buildUnnamed108() => [ +core.List buildUnnamed109() => [ buildWorkerDetails(), buildWorkerDetails(), ]; -void checkUnnamed108(core.List o) { +void checkUnnamed109(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkWorkerDetails(o[0]); checkWorkerDetails(o[1]); @@ -6603,7 +6691,7 @@ api.StageExecutionDetails buildStageExecutionDetails() { buildCounterStageExecutionDetails++; if (buildCounterStageExecutionDetails < 3) { o.nextPageToken = 'foo'; - o.workers = buildUnnamed108(); + o.workers = buildUnnamed109(); } buildCounterStageExecutionDetails--; return o; @@ -6616,7 +6704,7 @@ void checkStageExecutionDetails(api.StageExecutionDetails o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed108(o.workers!); + checkUnnamed109(o.workers!); } buildCounterStageExecutionDetails--; } @@ -6658,12 +6746,12 @@ void checkStageSource(api.StageSource o) { buildCounterStageSource--; } -core.List buildUnnamed109() => [ +core.List buildUnnamed110() => [ buildMetricUpdate(), buildMetricUpdate(), ]; -void checkUnnamed109(core.List o) { +void checkUnnamed110(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMetricUpdate(o[0]); checkMetricUpdate(o[1]); @@ -6675,7 +6763,7 @@ api.StageSummary buildStageSummary() { buildCounterStageSummary++; if (buildCounterStageSummary < 3) { o.endTime = 'foo'; - o.metrics = buildUnnamed109(); + o.metrics = buildUnnamed110(); o.progress = buildProgressTimeseries(); o.stageId = 'foo'; o.startTime = 'foo'; @@ -6693,7 +6781,7 @@ void checkStageSummary(api.StageSummary o) { o.endTime!, unittest.equals('foo'), ); - checkUnnamed109(o.metrics!); + checkUnnamed110(o.metrics!); checkProgressTimeseries(o.progress!); unittest.expect( o.stageId!, @@ -6736,7 +6824,7 @@ void checkStateFamilyConfig(api.StateFamilyConfig o) { buildCounterStateFamilyConfig--; } -core.Map buildUnnamed110() => { +core.Map buildUnnamed111() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -6749,7 +6837,7 @@ core.Map buildUnnamed110() => { }, }; -void checkUnnamed110(core.Map o) { +void checkUnnamed111(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted48 = (o['x']!) as core.Map; unittest.expect(casted48, unittest.hasLength(3)); @@ -6781,15 +6869,15 @@ void checkUnnamed110(core.Map o) { ); } -core.List> buildUnnamed111() => [ - buildUnnamed110(), - buildUnnamed110(), +core.List> buildUnnamed112() => [ + buildUnnamed111(), + buildUnnamed111(), ]; -void checkUnnamed111(core.List> o) { +void checkUnnamed112(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed110(o[0]); - checkUnnamed110(o[1]); + checkUnnamed111(o[0]); + checkUnnamed111(o[1]); } core.int buildCounterStatus = 0; @@ -6798,7 +6886,7 @@ api.Status buildStatus() { buildCounterStatus++; if (buildCounterStatus < 3) { o.code = 42; - o.details = buildUnnamed111(); + o.details = buildUnnamed112(); o.message = 'foo'; } buildCounterStatus--; @@ -6812,7 +6900,7 @@ void checkStatus(api.Status o) { o.code!, unittest.equals(42), ); - checkUnnamed111(o.details!); + checkUnnamed112(o.details!); unittest.expect( o.message!, unittest.equals('foo'), @@ -6821,7 +6909,7 @@ void checkStatus(api.Status o) { buildCounterStatus--; } -core.Map buildUnnamed112() => { +core.Map buildUnnamed113() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -6834,7 +6922,7 @@ core.Map buildUnnamed112() => { }, }; -void checkUnnamed112(core.Map o) { +void checkUnnamed113(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted50 = (o['x']!) as core.Map; unittest.expect(casted50, unittest.hasLength(3)); @@ -6873,7 +6961,7 @@ api.Step buildStep() { if (buildCounterStep < 3) { o.kind = 'foo'; o.name = 'foo'; - o.properties = buildUnnamed112(); + o.properties = buildUnnamed113(); } buildCounterStep--; return o; @@ -6890,7 +6978,7 @@ void checkStep(api.Step o) { o.name!, unittest.equals('foo'), ); - checkUnnamed112(o.properties!); + checkUnnamed113(o.properties!); } buildCounterStep--; } @@ -6935,12 +7023,12 @@ void checkStragglerDebuggingInfo(api.StragglerDebuggingInfo o) { buildCounterStragglerDebuggingInfo--; } -core.Map buildUnnamed113() => { +core.Map buildUnnamed114() => { 'x': buildStragglerDebuggingInfo(), 'y': buildStragglerDebuggingInfo(), }; -void checkUnnamed113(core.Map o) { +void checkUnnamed114(core.Map o) { unittest.expect(o, unittest.hasLength(2)); checkStragglerDebuggingInfo(o['x']!); checkStragglerDebuggingInfo(o['y']!); @@ -6951,7 +7039,7 @@ api.StragglerInfo buildStragglerInfo() { final o = api.StragglerInfo(); buildCounterStragglerInfo++; if (buildCounterStragglerInfo < 3) { - o.causes = buildUnnamed113(); + o.causes = buildUnnamed114(); o.startTime = 'foo'; } buildCounterStragglerInfo--; @@ -6961,7 +7049,7 @@ api.StragglerInfo buildStragglerInfo() { void checkStragglerInfo(api.StragglerInfo o) { buildCounterStragglerInfo++; if (buildCounterStragglerInfo < 3) { - checkUnnamed113(o.causes!); + checkUnnamed114(o.causes!); unittest.expect( o.startTime!, unittest.equals('foo'), @@ -6970,23 +7058,23 @@ void checkStragglerInfo(api.StragglerInfo o) { buildCounterStragglerInfo--; } -core.List buildUnnamed114() => [ +core.List buildUnnamed115() => [ buildStraggler(), buildStraggler(), ]; -void checkUnnamed114(core.List o) { +void checkUnnamed115(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStraggler(o[0]); checkStraggler(o[1]); } -core.Map buildUnnamed115() => { +core.Map buildUnnamed116() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed115(core.Map o) { +void checkUnnamed116(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -7003,8 +7091,8 @@ api.StragglerSummary buildStragglerSummary() { final o = api.StragglerSummary(); buildCounterStragglerSummary++; if (buildCounterStragglerSummary < 3) { - o.recentStragglers = buildUnnamed114(); - o.stragglerCauseCount = buildUnnamed115(); + o.recentStragglers = buildUnnamed115(); + o.stragglerCauseCount = buildUnnamed116(); o.totalStragglerCount = 'foo'; } buildCounterStragglerSummary--; @@ -7014,8 +7102,8 @@ api.StragglerSummary buildStragglerSummary() { void checkStragglerSummary(api.StragglerSummary o) { buildCounterStragglerSummary++; if (buildCounterStragglerSummary < 3) { - checkUnnamed114(o.recentStragglers!); - checkUnnamed115(o.stragglerCauseCount!); + checkUnnamed115(o.recentStragglers!); + checkUnnamed116(o.stragglerCauseCount!); unittest.expect( o.totalStragglerCount!, unittest.equals('foo'), @@ -7077,23 +7165,23 @@ void checkStreamingApplianceSnapshotConfig( buildCounterStreamingApplianceSnapshotConfig--; } -core.List buildUnnamed116() => [ +core.List buildUnnamed117() => [ buildParallelInstruction(), buildParallelInstruction(), ]; -void checkUnnamed116(core.List o) { +void checkUnnamed117(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkParallelInstruction(o[0]); checkParallelInstruction(o[1]); } -core.Map buildUnnamed117() => { +core.Map buildUnnamed118() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed117(core.Map o) { +void checkUnnamed118(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -7111,10 +7199,10 @@ api.StreamingComputationConfig buildStreamingComputationConfig() { buildCounterStreamingComputationConfig++; if (buildCounterStreamingComputationConfig < 3) { o.computationId = 'foo'; - o.instructions = buildUnnamed116(); + o.instructions = buildUnnamed117(); o.stageName = 'foo'; o.systemName = 'foo'; - o.transformUserNameToStateFamily = buildUnnamed117(); + o.transformUserNameToStateFamily = buildUnnamed118(); } buildCounterStreamingComputationConfig--; return o; @@ -7127,7 +7215,7 @@ void checkStreamingComputationConfig(api.StreamingComputationConfig o) { o.computationId!, unittest.equals('foo'), ); - checkUnnamed116(o.instructions!); + checkUnnamed117(o.instructions!); unittest.expect( o.stageName!, unittest.equals('foo'), @@ -7136,17 +7224,17 @@ void checkStreamingComputationConfig(api.StreamingComputationConfig o) { o.systemName!, unittest.equals('foo'), ); - checkUnnamed117(o.transformUserNameToStateFamily!); + checkUnnamed118(o.transformUserNameToStateFamily!); } buildCounterStreamingComputationConfig--; } -core.List buildUnnamed118() => [ +core.List buildUnnamed119() => [ buildKeyRangeDataDiskAssignment(), buildKeyRangeDataDiskAssignment(), ]; -void checkUnnamed118(core.List o) { +void checkUnnamed119(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkKeyRangeDataDiskAssignment(o[0]); checkKeyRangeDataDiskAssignment(o[1]); @@ -7158,7 +7246,7 @@ api.StreamingComputationRanges buildStreamingComputationRanges() { buildCounterStreamingComputationRanges++; if (buildCounterStreamingComputationRanges < 3) { o.computationId = 'foo'; - o.rangeAssignments = buildUnnamed118(); + o.rangeAssignments = buildUnnamed119(); } buildCounterStreamingComputationRanges--; return o; @@ -7171,28 +7259,28 @@ void checkStreamingComputationRanges(api.StreamingComputationRanges o) { o.computationId!, unittest.equals('foo'), ); - checkUnnamed118(o.rangeAssignments!); + checkUnnamed119(o.rangeAssignments!); } buildCounterStreamingComputationRanges--; } -core.List buildUnnamed119() => [ +core.List buildUnnamed120() => [ buildStreamingComputationRanges(), buildStreamingComputationRanges(), ]; -void checkUnnamed119(core.List o) { +void checkUnnamed120(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStreamingComputationRanges(o[0]); checkStreamingComputationRanges(o[1]); } -core.List buildUnnamed120() => [ +core.List buildUnnamed121() => [ buildMountedDataDisk(), buildMountedDataDisk(), ]; -void checkUnnamed120(core.List o) { +void checkUnnamed121(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMountedDataDisk(o[0]); checkMountedDataDisk(o[1]); @@ -7203,8 +7291,8 @@ api.StreamingComputationTask buildStreamingComputationTask() { final o = api.StreamingComputationTask(); buildCounterStreamingComputationTask++; if (buildCounterStreamingComputationTask < 3) { - o.computationRanges = buildUnnamed119(); - o.dataDisks = buildUnnamed120(); + o.computationRanges = buildUnnamed120(); + o.dataDisks = buildUnnamed121(); o.taskType = 'foo'; } buildCounterStreamingComputationTask--; @@ -7214,8 +7302,8 @@ api.StreamingComputationTask buildStreamingComputationTask() { void checkStreamingComputationTask(api.StreamingComputationTask o) { buildCounterStreamingComputationTask++; if (buildCounterStreamingComputationTask < 3) { - checkUnnamed119(o.computationRanges!); - checkUnnamed120(o.dataDisks!); + checkUnnamed120(o.computationRanges!); + checkUnnamed121(o.dataDisks!); unittest.expect( o.taskType!, unittest.equals('foo'), @@ -7224,23 +7312,23 @@ void checkStreamingComputationTask(api.StreamingComputationTask o) { buildCounterStreamingComputationTask--; } -core.List buildUnnamed121() => [ +core.List buildUnnamed122() => [ buildStreamingComputationConfig(), buildStreamingComputationConfig(), ]; -void checkUnnamed121(core.List o) { +void checkUnnamed122(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStreamingComputationConfig(o[0]); checkStreamingComputationConfig(o[1]); } -core.Map buildUnnamed122() => { +core.Map buildUnnamed123() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed122(core.Map o) { +void checkUnnamed123(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -7261,8 +7349,8 @@ api.StreamingConfigTask buildStreamingConfigTask() { o.getDataStreamChunkSizeBytes = 'foo'; o.maxWorkItemCommitBytes = 'foo'; o.operationalLimits = buildStreamingOperationalLimits(); - o.streamingComputationConfigs = buildUnnamed121(); - o.userStepToStateFamilyNameMap = buildUnnamed122(); + o.streamingComputationConfigs = buildUnnamed122(); + o.userStepToStateFamilyNameMap = buildUnnamed123(); o.userWorkerRunnerV1Settings = 'foo'; o.userWorkerRunnerV2Settings = 'foo'; o.windmillServiceEndpoint = 'foo'; @@ -7288,8 +7376,8 @@ void checkStreamingConfigTask(api.StreamingConfigTask o) { unittest.equals('foo'), ); checkStreamingOperationalLimits(o.operationalLimits!); - checkUnnamed121(o.streamingComputationConfigs!); - checkUnnamed122(o.userStepToStateFamilyNameMap!); + checkUnnamed122(o.streamingComputationConfigs!); + checkUnnamed123(o.userStepToStateFamilyNameMap!); unittest.expect( o.userWorkerRunnerV1Settings!, unittest.equals('foo'), @@ -7575,12 +7663,12 @@ void checkStreamingStragglerInfo(api.StreamingStragglerInfo o) { buildCounterStreamingStragglerInfo--; } -core.List buildUnnamed123() => [ +core.List buildUnnamed124() => [ 'foo', 'foo', ]; -void checkUnnamed123(core.List o) { +void checkUnnamed124(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -7597,7 +7685,7 @@ api.StringList buildStringList() { final o = api.StringList(); buildCounterStringList++; if (buildCounterStringList < 3) { - o.elements = buildUnnamed123(); + o.elements = buildUnnamed124(); } buildCounterStringList--; return o; @@ -7606,17 +7694,17 @@ api.StringList buildStringList() { void checkStringList(api.StringList o) { buildCounterStringList++; if (buildCounterStringList < 3) { - checkUnnamed123(o.elements!); + checkUnnamed124(o.elements!); } buildCounterStringList--; } -core.List buildUnnamed124() => [ +core.List buildUnnamed125() => [ buildParameter(), buildParameter(), ]; -void checkUnnamed124(core.List o) { +void checkUnnamed125(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkParameter(o[0]); checkParameter(o[1]); @@ -7629,7 +7717,7 @@ api.StructuredMessage buildStructuredMessage() { if (buildCounterStructuredMessage < 3) { o.messageKey = 'foo'; o.messageText = 'foo'; - o.parameters = buildUnnamed124(); + o.parameters = buildUnnamed125(); } buildCounterStructuredMessage--; return o; @@ -7646,17 +7734,17 @@ void checkStructuredMessage(api.StructuredMessage o) { o.messageText!, unittest.equals('foo'), ); - checkUnnamed124(o.parameters!); + checkUnnamed125(o.parameters!); } buildCounterStructuredMessage--; } -core.List buildUnnamed125() => [ +core.List buildUnnamed126() => [ 'foo', 'foo', ]; -void checkUnnamed125(core.List o) { +void checkUnnamed126(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -7684,7 +7772,7 @@ api.TaskRunnerSettings buildTaskRunnerSettings() { o.logDir = 'foo'; o.logToSerialconsole = true; o.logUploadLocation = 'foo'; - o.oauthScopes = buildUnnamed125(); + o.oauthScopes = buildUnnamed126(); o.parallelWorkerSettings = buildWorkerSettings(); o.streamingWorkerMainClass = 'foo'; o.taskGroup = 'foo'; @@ -7735,7 +7823,7 @@ void checkTaskRunnerSettings(api.TaskRunnerSettings o) { o.logUploadLocation!, unittest.equals('foo'), ); - checkUnnamed125(o.oauthScopes!); + checkUnnamed126(o.oauthScopes!); checkWorkerSettings(o.parallelWorkerSettings!); unittest.expect( o.streamingWorkerMainClass!, @@ -7765,12 +7853,12 @@ void checkTaskRunnerSettings(api.TaskRunnerSettings o) { buildCounterTaskRunnerSettings--; } -core.List buildUnnamed126() => [ +core.List buildUnnamed127() => [ buildParameterMetadata(), buildParameterMetadata(), ]; -void checkUnnamed126(core.List o) { +void checkUnnamed127(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkParameterMetadata(o[0]); checkParameterMetadata(o[1]); @@ -7784,7 +7872,7 @@ api.TemplateMetadata buildTemplateMetadata() { o.defaultStreamingMode = 'foo'; o.description = 'foo'; o.name = 'foo'; - o.parameters = buildUnnamed126(); + o.parameters = buildUnnamed127(); o.streaming = true; o.supportsAtLeastOnce = true; o.supportsExactlyOnce = true; @@ -7808,7 +7896,7 @@ void checkTemplateMetadata(api.TemplateMetadata o) { o.name!, unittest.equals('foo'), ); - checkUnnamed126(o.parameters!); + checkUnnamed127(o.parameters!); unittest.expect(o.streaming!, unittest.isTrue); unittest.expect(o.supportsAtLeastOnce!, unittest.isTrue); unittest.expect(o.supportsExactlyOnce!, unittest.isTrue); @@ -7816,34 +7904,34 @@ void checkTemplateMetadata(api.TemplateMetadata o) { buildCounterTemplateMetadata--; } -core.List buildUnnamed127() => [ +core.List buildUnnamed128() => [ buildComputationTopology(), buildComputationTopology(), ]; -void checkUnnamed127(core.List o) { +void checkUnnamed128(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkComputationTopology(o[0]); checkComputationTopology(o[1]); } -core.List buildUnnamed128() => [ +core.List buildUnnamed129() => [ buildDataDiskAssignment(), buildDataDiskAssignment(), ]; -void checkUnnamed128(core.List o) { +void checkUnnamed129(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDataDiskAssignment(o[0]); checkDataDiskAssignment(o[1]); } -core.Map buildUnnamed129() => { +core.Map buildUnnamed130() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed129(core.Map o) { +void checkUnnamed130(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -7860,11 +7948,11 @@ api.TopologyConfig buildTopologyConfig() { final o = api.TopologyConfig(); buildCounterTopologyConfig++; if (buildCounterTopologyConfig < 3) { - o.computations = buildUnnamed127(); - o.dataDiskAssignments = buildUnnamed128(); + o.computations = buildUnnamed128(); + o.dataDiskAssignments = buildUnnamed129(); o.forwardingKeyBits = 42; o.persistentStateVersion = 42; - o.userStageToComputationNameMap = buildUnnamed129(); + o.userStageToComputationNameMap = buildUnnamed130(); } buildCounterTopologyConfig--; return o; @@ -7873,8 +7961,8 @@ api.TopologyConfig buildTopologyConfig() { void checkTopologyConfig(api.TopologyConfig o) { buildCounterTopologyConfig++; if (buildCounterTopologyConfig < 3) { - checkUnnamed127(o.computations!); - checkUnnamed128(o.dataDiskAssignments!); + checkUnnamed128(o.computations!); + checkUnnamed129(o.dataDiskAssignments!); unittest.expect( o.forwardingKeyBits!, unittest.equals(42), @@ -7883,28 +7971,28 @@ void checkTopologyConfig(api.TopologyConfig o) { o.persistentStateVersion!, unittest.equals(42), ); - checkUnnamed129(o.userStageToComputationNameMap!); + checkUnnamed130(o.userStageToComputationNameMap!); } buildCounterTopologyConfig--; } -core.List buildUnnamed130() => [ +core.List buildUnnamed131() => [ buildDisplayData(), buildDisplayData(), ]; -void checkUnnamed130(core.List o) { +void checkUnnamed131(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDisplayData(o[0]); checkDisplayData(o[1]); } -core.List buildUnnamed131() => [ +core.List buildUnnamed132() => [ 'foo', 'foo', ]; -void checkUnnamed131(core.List o) { +void checkUnnamed132(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -7916,12 +8004,12 @@ void checkUnnamed131(core.List o) { ); } -core.List buildUnnamed132() => [ +core.List buildUnnamed133() => [ 'foo', 'foo', ]; -void checkUnnamed132(core.List o) { +void checkUnnamed133(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -7938,12 +8026,12 @@ api.TransformSummary buildTransformSummary() { final o = api.TransformSummary(); buildCounterTransformSummary++; if (buildCounterTransformSummary < 3) { - o.displayData = buildUnnamed130(); + o.displayData = buildUnnamed131(); o.id = 'foo'; - o.inputCollectionName = buildUnnamed131(); + o.inputCollectionName = buildUnnamed132(); o.kind = 'foo'; o.name = 'foo'; - o.outputCollectionName = buildUnnamed132(); + o.outputCollectionName = buildUnnamed133(); } buildCounterTransformSummary--; return o; @@ -7952,12 +8040,12 @@ api.TransformSummary buildTransformSummary() { void checkTransformSummary(api.TransformSummary o) { buildCounterTransformSummary++; if (buildCounterTransformSummary < 3) { - checkUnnamed130(o.displayData!); + checkUnnamed131(o.displayData!); unittest.expect( o.id!, unittest.equals('foo'), ); - checkUnnamed131(o.inputCollectionName!); + checkUnnamed132(o.inputCollectionName!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -7966,17 +8054,17 @@ void checkTransformSummary(api.TransformSummary o) { o.name!, unittest.equals('foo'), ); - checkUnnamed132(o.outputCollectionName!); + checkUnnamed133(o.outputCollectionName!); } buildCounterTransformSummary--; } -core.List buildUnnamed133() => [ +core.List buildUnnamed134() => [ buildPackage(), buildPackage(), ]; -void checkUnnamed133(core.List o) { +void checkUnnamed134(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPackage(o[0]); checkPackage(o[1]); @@ -7993,7 +8081,7 @@ api.WorkItem buildWorkItem() { o.jobId = 'foo'; o.leaseExpireTime = 'foo'; o.mapTask = buildMapTask(); - o.packages = buildUnnamed133(); + o.packages = buildUnnamed134(); o.projectId = 'foo'; o.reportStatusInterval = 'foo'; o.seqMapTask = buildSeqMapTask(); @@ -8031,7 +8119,7 @@ void checkWorkItem(api.WorkItem o) { unittest.equals('foo'), ); checkMapTask(o.mapTask!); - checkUnnamed133(o.packages!); + checkUnnamed134(o.packages!); unittest.expect( o.projectId!, unittest.equals('foo'), @@ -8050,12 +8138,12 @@ void checkWorkItem(api.WorkItem o) { buildCounterWorkItem--; } -core.List buildUnnamed134() => [ +core.List buildUnnamed135() => [ buildMetricUpdate(), buildMetricUpdate(), ]; -void checkUnnamed134(core.List o) { +void checkUnnamed135(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMetricUpdate(o[0]); checkMetricUpdate(o[1]); @@ -8068,7 +8156,7 @@ api.WorkItemDetails buildWorkItemDetails() { if (buildCounterWorkItemDetails < 3) { o.attemptId = 'foo'; o.endTime = 'foo'; - o.metrics = buildUnnamed134(); + o.metrics = buildUnnamed135(); o.progress = buildProgressTimeseries(); o.startTime = 'foo'; o.state = 'foo'; @@ -8090,7 +8178,7 @@ void checkWorkItemDetails(api.WorkItemDetails o) { o.endTime!, unittest.equals('foo'), ); - checkUnnamed134(o.metrics!); + checkUnnamed135(o.metrics!); checkProgressTimeseries(o.progress!); unittest.expect( o.startTime!, @@ -8109,7 +8197,7 @@ void checkWorkItemDetails(api.WorkItemDetails o) { buildCounterWorkItemDetails--; } -core.Map buildUnnamed135() => { +core.Map buildUnnamed136() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -8122,7 +8210,7 @@ core.Map buildUnnamed135() => { }, }; -void checkUnnamed135(core.Map o) { +void checkUnnamed136(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted52 = (o['x']!) as core.Map; unittest.expect(casted52, unittest.hasLength(3)); @@ -8154,12 +8242,12 @@ void checkUnnamed135(core.Map o) { ); } -core.List buildUnnamed136() => [ +core.List buildUnnamed137() => [ buildMetricShortId(), buildMetricShortId(), ]; -void checkUnnamed136(core.List o) { +void checkUnnamed137(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMetricShortId(o[0]); checkMetricShortId(o[1]); @@ -8171,10 +8259,10 @@ api.WorkItemServiceState buildWorkItemServiceState() { buildCounterWorkItemServiceState++; if (buildCounterWorkItemServiceState < 3) { o.completeWorkStatus = buildStatus(); - o.harnessData = buildUnnamed135(); + o.harnessData = buildUnnamed136(); o.hotKeyDetection = buildHotKeyDetection(); o.leaseExpireTime = 'foo'; - o.metricShortId = buildUnnamed136(); + o.metricShortId = buildUnnamed137(); o.nextReportIndex = 'foo'; o.reportStatusInterval = 'foo'; o.splitRequest = buildApproximateSplitRequest(); @@ -8189,13 +8277,13 @@ void checkWorkItemServiceState(api.WorkItemServiceState o) { buildCounterWorkItemServiceState++; if (buildCounterWorkItemServiceState < 3) { checkStatus(o.completeWorkStatus!); - checkUnnamed135(o.harnessData!); + checkUnnamed136(o.harnessData!); checkHotKeyDetection(o.hotKeyDetection!); unittest.expect( o.leaseExpireTime!, unittest.equals('foo'), ); - checkUnnamed136(o.metricShortId!); + checkUnnamed137(o.metricShortId!); unittest.expect( o.nextReportIndex!, unittest.equals('foo'), @@ -8211,34 +8299,34 @@ void checkWorkItemServiceState(api.WorkItemServiceState o) { buildCounterWorkItemServiceState--; } -core.List buildUnnamed137() => [ +core.List buildUnnamed138() => [ buildCounterUpdate(), buildCounterUpdate(), ]; -void checkUnnamed137(core.List o) { +void checkUnnamed138(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkCounterUpdate(o[0]); checkCounterUpdate(o[1]); } -core.List buildUnnamed138() => [ +core.List buildUnnamed139() => [ buildStatus(), buildStatus(), ]; -void checkUnnamed138(core.List o) { +void checkUnnamed139(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkStatus(o[0]); checkStatus(o[1]); } -core.List buildUnnamed139() => [ +core.List buildUnnamed140() => [ buildMetricUpdate(), buildMetricUpdate(), ]; -void checkUnnamed139(core.List o) { +void checkUnnamed140(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkMetricUpdate(o[0]); checkMetricUpdate(o[1]); @@ -8250,10 +8338,10 @@ api.WorkItemStatus buildWorkItemStatus() { buildCounterWorkItemStatus++; if (buildCounterWorkItemStatus < 3) { o.completed = true; - o.counterUpdates = buildUnnamed137(); + o.counterUpdates = buildUnnamed138(); o.dynamicSourceSplit = buildDynamicSourceSplit(); - o.errors = buildUnnamed138(); - o.metricUpdates = buildUnnamed139(); + o.errors = buildUnnamed139(); + o.metricUpdates = buildUnnamed140(); o.progress = buildApproximateProgress(); o.reportIndex = 'foo'; o.reportedProgress = buildApproximateReportedProgress(); @@ -8272,10 +8360,10 @@ void checkWorkItemStatus(api.WorkItemStatus o) { buildCounterWorkItemStatus++; if (buildCounterWorkItemStatus < 3) { unittest.expect(o.completed!, unittest.isTrue); - checkUnnamed137(o.counterUpdates!); + checkUnnamed138(o.counterUpdates!); checkDynamicSourceSplit(o.dynamicSourceSplit!); - checkUnnamed138(o.errors!); - checkUnnamed139(o.metricUpdates!); + checkUnnamed139(o.errors!); + checkUnnamed140(o.metricUpdates!); checkApproximateProgress(o.progress!); unittest.expect( o.reportIndex!, @@ -8301,12 +8389,12 @@ void checkWorkItemStatus(api.WorkItemStatus o) { buildCounterWorkItemStatus--; } -core.List buildUnnamed140() => [ +core.List buildUnnamed141() => [ buildWorkItemDetails(), buildWorkItemDetails(), ]; -void checkUnnamed140(core.List o) { +void checkUnnamed141(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkWorkItemDetails(o[0]); checkWorkItemDetails(o[1]); @@ -8317,7 +8405,7 @@ api.WorkerDetails buildWorkerDetails() { final o = api.WorkerDetails(); buildCounterWorkerDetails++; if (buildCounterWorkerDetails < 3) { - o.workItems = buildUnnamed140(); + o.workItems = buildUnnamed141(); o.workerName = 'foo'; } buildCounterWorkerDetails--; @@ -8327,7 +8415,7 @@ api.WorkerDetails buildWorkerDetails() { void checkWorkerDetails(api.WorkerDetails o) { buildCounterWorkerDetails++; if (buildCounterWorkerDetails < 3) { - checkUnnamed140(o.workItems!); + checkUnnamed141(o.workItems!); unittest.expect( o.workerName!, unittest.equals('foo'), @@ -8336,7 +8424,7 @@ void checkWorkerDetails(api.WorkerDetails o) { buildCounterWorkerDetails--; } -core.Map buildUnnamed141() => { +core.Map buildUnnamed142() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -8349,7 +8437,7 @@ core.Map buildUnnamed141() => { }, }; -void checkUnnamed141(core.Map o) { +void checkUnnamed142(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted54 = (o['x']!) as core.Map; unittest.expect(casted54, unittest.hasLength(3)); @@ -8381,15 +8469,15 @@ void checkUnnamed141(core.Map o) { ); } -core.List> buildUnnamed142() => [ - buildUnnamed141(), - buildUnnamed141(), +core.List> buildUnnamed143() => [ + buildUnnamed142(), + buildUnnamed142(), ]; -void checkUnnamed142(core.List> o) { +void checkUnnamed143(core.List> o) { unittest.expect(o, unittest.hasLength(2)); - checkUnnamed141(o[0]); - checkUnnamed141(o[1]); + checkUnnamed142(o[0]); + checkUnnamed142(o[1]); } core.int buildCounterWorkerHealthReport = 0; @@ -8398,7 +8486,7 @@ api.WorkerHealthReport buildWorkerHealthReport() { buildCounterWorkerHealthReport++; if (buildCounterWorkerHealthReport < 3) { o.msg = 'foo'; - o.pods = buildUnnamed142(); + o.pods = buildUnnamed143(); o.reportInterval = 'foo'; o.vmBrokenCode = 'foo'; o.vmIsBroken = true; @@ -8416,7 +8504,7 @@ void checkWorkerHealthReport(api.WorkerHealthReport o) { o.msg!, unittest.equals('foo'), ); - checkUnnamed142(o.pods!); + checkUnnamed143(o.pods!); unittest.expect( o.reportInterval!, unittest.equals('foo'), @@ -8457,12 +8545,12 @@ void checkWorkerHealthReportResponse(api.WorkerHealthReportResponse o) { buildCounterWorkerHealthReportResponse--; } -core.Map buildUnnamed143() => { +core.Map buildUnnamed144() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed143(core.Map o) { +void checkUnnamed144(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -8481,7 +8569,7 @@ api.WorkerLifecycleEvent buildWorkerLifecycleEvent() { if (buildCounterWorkerLifecycleEvent < 3) { o.containerStartTime = 'foo'; o.event = 'foo'; - o.metadata = buildUnnamed143(); + o.metadata = buildUnnamed144(); } buildCounterWorkerLifecycleEvent--; return o; @@ -8498,17 +8586,17 @@ void checkWorkerLifecycleEvent(api.WorkerLifecycleEvent o) { o.event!, unittest.equals('foo'), ); - checkUnnamed143(o.metadata!); + checkUnnamed144(o.metadata!); } buildCounterWorkerLifecycleEvent--; } -core.Map buildUnnamed144() => { +core.Map buildUnnamed145() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed144(core.Map o) { +void checkUnnamed145(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -8526,7 +8614,7 @@ api.WorkerMessage buildWorkerMessage() { buildCounterWorkerMessage++; if (buildCounterWorkerMessage < 3) { o.dataSamplingReport = buildDataSamplingReport(); - o.labels = buildUnnamed144(); + o.labels = buildUnnamed145(); o.perWorkerMetrics = buildPerWorkerMetrics(); o.streamingScalingReport = buildStreamingScalingReport(); o.time = 'foo'; @@ -8545,7 +8633,7 @@ void checkWorkerMessage(api.WorkerMessage o) { buildCounterWorkerMessage++; if (buildCounterWorkerMessage < 3) { checkDataSamplingReport(o.dataSamplingReport!); - checkUnnamed144(o.labels!); + checkUnnamed145(o.labels!); checkPerWorkerMetrics(o.perWorkerMetrics!); checkStreamingScalingReport(o.streamingScalingReport!); unittest.expect( @@ -8562,7 +8650,7 @@ void checkWorkerMessage(api.WorkerMessage o) { buildCounterWorkerMessage--; } -core.Map buildUnnamed145() => { +core.Map buildUnnamed146() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -8575,7 +8663,7 @@ core.Map buildUnnamed145() => { }, }; -void checkUnnamed145(core.Map o) { +void checkUnnamed146(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted56 = (o['x']!) as core.Map; unittest.expect(casted56, unittest.hasLength(3)); @@ -8613,7 +8701,7 @@ api.WorkerMessageCode buildWorkerMessageCode() { buildCounterWorkerMessageCode++; if (buildCounterWorkerMessageCode < 3) { o.code = 'foo'; - o.parameters = buildUnnamed145(); + o.parameters = buildUnnamed146(); } buildCounterWorkerMessageCode--; return o; @@ -8626,7 +8714,7 @@ void checkWorkerMessageCode(api.WorkerMessageCode o) { o.code!, unittest.equals('foo'), ); - checkUnnamed145(o.parameters!); + checkUnnamed146(o.parameters!); } buildCounterWorkerMessageCode--; } @@ -8660,23 +8748,23 @@ void checkWorkerMessageResponse(api.WorkerMessageResponse o) { buildCounterWorkerMessageResponse--; } -core.List buildUnnamed146() => [ +core.List buildUnnamed147() => [ buildDisk(), buildDisk(), ]; -void checkUnnamed146(core.List o) { +void checkUnnamed147(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDisk(o[0]); checkDisk(o[1]); } -core.Map buildUnnamed147() => { +core.Map buildUnnamed148() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed147(core.Map o) { +void checkUnnamed148(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -8688,18 +8776,18 @@ void checkUnnamed147(core.Map o) { ); } -core.List buildUnnamed148() => [ +core.List buildUnnamed149() => [ buildPackage(), buildPackage(), ]; -void checkUnnamed148(core.List o) { +void checkUnnamed149(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPackage(o[0]); checkPackage(o[1]); } -core.Map buildUnnamed149() => { +core.Map buildUnnamed150() => { 'x': { 'list': [1, 2, 3], 'bool': true, @@ -8712,7 +8800,7 @@ core.Map buildUnnamed149() => { }, }; -void checkUnnamed149(core.Map o) { +void checkUnnamed150(core.Map o) { unittest.expect(o, unittest.hasLength(2)); var casted58 = (o['x']!) as core.Map; unittest.expect(casted58, unittest.hasLength(3)); @@ -8744,12 +8832,12 @@ void checkUnnamed149(core.Map o) { ); } -core.List buildUnnamed150() => [ +core.List buildUnnamed151() => [ buildSdkHarnessContainerImage(), buildSdkHarnessContainerImage(), ]; -void checkUnnamed150(core.List o) { +void checkUnnamed151(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSdkHarnessContainerImage(o[0]); checkSdkHarnessContainerImage(o[1]); @@ -8761,7 +8849,7 @@ api.WorkerPool buildWorkerPool() { buildCounterWorkerPool++; if (buildCounterWorkerPool < 3) { o.autoscalingSettings = buildAutoscalingSettings(); - o.dataDisks = buildUnnamed146(); + o.dataDisks = buildUnnamed147(); o.defaultPackageSet = 'foo'; o.diskSizeGb = 42; o.diskSourceImage = 'foo'; @@ -8769,14 +8857,14 @@ api.WorkerPool buildWorkerPool() { o.ipConfiguration = 'foo'; o.kind = 'foo'; o.machineType = 'foo'; - o.metadata = buildUnnamed147(); + o.metadata = buildUnnamed148(); o.network = 'foo'; o.numThreadsPerWorker = 42; o.numWorkers = 42; o.onHostMaintenance = 'foo'; - o.packages = buildUnnamed148(); - o.poolArgs = buildUnnamed149(); - o.sdkHarnessContainerImages = buildUnnamed150(); + o.packages = buildUnnamed149(); + o.poolArgs = buildUnnamed150(); + o.sdkHarnessContainerImages = buildUnnamed151(); o.subnetwork = 'foo'; o.taskrunnerSettings = buildTaskRunnerSettings(); o.teardownPolicy = 'foo'; @@ -8791,7 +8879,7 @@ void checkWorkerPool(api.WorkerPool o) { buildCounterWorkerPool++; if (buildCounterWorkerPool < 3) { checkAutoscalingSettings(o.autoscalingSettings!); - checkUnnamed146(o.dataDisks!); + checkUnnamed147(o.dataDisks!); unittest.expect( o.defaultPackageSet!, unittest.equals('foo'), @@ -8820,7 +8908,7 @@ void checkWorkerPool(api.WorkerPool o) { o.machineType!, unittest.equals('foo'), ); - checkUnnamed147(o.metadata!); + checkUnnamed148(o.metadata!); unittest.expect( o.network!, unittest.equals('foo'), @@ -8837,9 +8925,9 @@ void checkWorkerPool(api.WorkerPool o) { o.onHostMaintenance!, unittest.equals('foo'), ); - checkUnnamed148(o.packages!); - checkUnnamed149(o.poolArgs!); - checkUnnamed150(o.sdkHarnessContainerImages!); + checkUnnamed149(o.packages!); + checkUnnamed150(o.poolArgs!); + checkUnnamed151(o.sdkHarnessContainerImages!); unittest.expect( o.subnetwork!, unittest.equals('foo'), @@ -9249,6 +9337,16 @@ void main() { }); }); + unittest.group('obj-schema-DataflowGaugeValue', () { + unittest.test('to-json--from-json', () async { + final o = buildDataflowGaugeValue(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.DataflowGaugeValue.fromJson( + oJson as core.Map); + checkDataflowGaugeValue(od); + }); + }); + unittest.group('obj-schema-DataflowHistogramValue', () { unittest.test('to-json--from-json', () async { final o = buildDataflowHistogramValue(); @@ -9429,6 +9527,26 @@ void main() { }); }); + unittest.group('obj-schema-GPUUsage', () { + unittest.test('to-json--from-json', () async { + final o = buildGPUUsage(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = + api.GPUUsage.fromJson(oJson as core.Map); + checkGPUUsage(od); + }); + }); + + unittest.group('obj-schema-GPUUtilization', () { + unittest.test('to-json--from-json', () async { + final o = buildGPUUtilization(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.GPUUtilization.fromJson( + oJson as core.Map); + checkGPUUtilization(od); + }); + }); + unittest.group('obj-schema-GetDebugConfigRequest', () { unittest.test('to-json--from-json', () async { final o = buildGetDebugConfigRequest(); diff --git a/generated/googleapis_beta/test/firebasestorage/v1beta_test.dart b/generated/googleapis_beta/test/firebasestorage/v1beta_test.dart index a427aa2d1..daff118b6 100644 --- a/generated/googleapis_beta/test/firebasestorage/v1beta_test.dart +++ b/generated/googleapis_beta/test/firebasestorage/v1beta_test.dart @@ -63,6 +63,40 @@ void checkBucket(api.Bucket o) { buildCounterBucket--; } +core.int buildCounterDefaultBucket = 0; +api.DefaultBucket buildDefaultBucket() { + final o = api.DefaultBucket(); + buildCounterDefaultBucket++; + if (buildCounterDefaultBucket < 3) { + o.bucket = buildBucket(); + o.location = 'foo'; + o.name = 'foo'; + o.storageClass = 'foo'; + } + buildCounterDefaultBucket--; + return o; +} + +void checkDefaultBucket(api.DefaultBucket o) { + buildCounterDefaultBucket++; + if (buildCounterDefaultBucket < 3) { + checkBucket(o.bucket!); + unittest.expect( + o.location!, + unittest.equals('foo'), + ); + unittest.expect( + o.name!, + unittest.equals('foo'), + ); + unittest.expect( + o.storageClass!, + unittest.equals('foo'), + ); + } + buildCounterDefaultBucket--; +} + core.int buildCounterEmpty = 0; api.Empty buildEmpty() { final o = api.Empty(); @@ -149,6 +183,16 @@ void main() { }); }); + unittest.group('obj-schema-DefaultBucket', () { + unittest.test('to-json--from-json', () async { + final o = buildDefaultBucket(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.DefaultBucket.fromJson( + oJson as core.Map); + checkDefaultBucket(od); + }); + }); + unittest.group('obj-schema-Empty', () { unittest.test('to-json--from-json', () async { final o = buildEmpty(); @@ -179,6 +223,114 @@ void main() { }); }); + unittest.group('resource-ProjectsResource', () { + unittest.test('method--deleteDefaultBucket', () async { + final mock = HttpServerMock(); + final res = api.FirebasestorageApi(mock).projects; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 7), + unittest.equals('v1beta/'), + ); + pathOffset += 7; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildEmpty()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.deleteDefaultBucket(arg_name, $fields: arg_$fields); + checkEmpty(response as api.Empty); + }); + + unittest.test('method--getDefaultBucket', () async { + final mock = HttpServerMock(); + final res = api.FirebasestorageApi(mock).projects; + final arg_name = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 7), + unittest.equals('v1beta/'), + ); + pathOffset += 7; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildDefaultBucket()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.getDefaultBucket(arg_name, $fields: arg_$fields); + checkDefaultBucket(response as api.DefaultBucket); + }); + }); + unittest.group('resource-ProjectsBucketsResource', () { unittest.test('method--addFirebase', () async { final mock = HttpServerMock(); @@ -413,4 +565,64 @@ void main() { checkEmpty(response as api.Empty); }); }); + + unittest.group('resource-ProjectsDefaultBucketResource', () { + unittest.test('method--create', () async { + final mock = HttpServerMock(); + final res = api.FirebasestorageApi(mock).projects.defaultBucket; + final arg_request = buildDefaultBucket(); + final arg_parent = 'foo'; + final arg_$fields = 'foo'; + mock.register(unittest.expectAsync2((http.BaseRequest req, json) { + final obj = api.DefaultBucket.fromJson( + json as core.Map); + checkDefaultBucket(obj); + + final path = req.url.path; + var pathOffset = 0; + core.int index; + core.String subPart; + unittest.expect( + path.substring(pathOffset, pathOffset + 1), + unittest.equals('/'), + ); + pathOffset += 1; + unittest.expect( + path.substring(pathOffset, pathOffset + 7), + unittest.equals('v1beta/'), + ); + pathOffset += 7; + // NOTE: We cannot test reserved expansions due to the inability to reverse the operation; + + final query = req.url.query; + var queryOffset = 0; + final queryMap = >{}; + void addQueryParam(core.String n, core.String v) => + queryMap.putIfAbsent(n, () => []).add(v); + + if (query.isNotEmpty) { + for (var part in query.split('&')) { + final keyValue = part.split('='); + addQueryParam( + core.Uri.decodeQueryComponent(keyValue[0]), + core.Uri.decodeQueryComponent(keyValue[1]), + ); + } + } + unittest.expect( + queryMap['fields']!.first, + unittest.equals(arg_$fields), + ); + + final h = { + 'content-type': 'application/json; charset=utf-8', + }; + final resp = convert.json.encode(buildDefaultBucket()); + return async.Future.value(stringResponse(200, h, resp)); + }), true); + final response = + await res.create(arg_request, arg_parent, $fields: arg_$fields); + checkDefaultBucket(response as api.DefaultBucket); + }); + }); } diff --git a/generated/googleapis_beta/test/gkehub/v2alpha_test.dart b/generated/googleapis_beta/test/gkehub/v2alpha_test.dart index 1990051ae..6cf99cb1b 100644 --- a/generated/googleapis_beta/test/gkehub/v2alpha_test.dart +++ b/generated/googleapis_beta/test/gkehub/v2alpha_test.dart @@ -334,6 +334,7 @@ api.ConfigManagementConfigSync buildConfigManagementConfigSync() { o.oci = buildConfigManagementOciConfig(); o.preventDrift = true; o.sourceFormat = 'foo'; + o.stopSyncing = true; } buildCounterConfigManagementConfigSync--; return o; @@ -355,6 +356,7 @@ void checkConfigManagementConfigSync(api.ConfigManagementConfigSync o) { o.sourceFormat!, unittest.equals('foo'), ); + unittest.expect(o.stopSyncing!, unittest.isTrue); } buildCounterConfigManagementConfigSync--; } @@ -463,6 +465,7 @@ api.ConfigManagementConfigSyncState buildConfigManagementConfigSyncState() { buildCounterConfigManagementConfigSyncState++; if (buildCounterConfigManagementConfigSyncState < 3) { o.clusterLevelStopSyncingState = 'foo'; + o.crCount = 42; o.deploymentState = buildConfigManagementConfigSyncDeploymentState(); o.errors = buildUnnamed1(); o.reposyncCrd = 'foo'; @@ -483,6 +486,10 @@ void checkConfigManagementConfigSyncState( o.clusterLevelStopSyncingState!, unittest.equals('foo'), ); + unittest.expect( + o.crCount!, + unittest.equals(42), + ); checkConfigManagementConfigSyncDeploymentState(o.deploymentState!); checkUnnamed1(o.errors!); unittest.expect( @@ -1299,38 +1306,6 @@ void checkEmpty(api.Empty o) { buildCounterEmpty--; } -core.int buildCounterFeatureConfigRef = 0; -api.FeatureConfigRef buildFeatureConfigRef() { - final o = api.FeatureConfigRef(); - buildCounterFeatureConfigRef++; - if (buildCounterFeatureConfigRef < 3) { - o.config = 'foo'; - o.configUpdateTime = 'foo'; - o.uuid = 'foo'; - } - buildCounterFeatureConfigRef--; - return o; -} - -void checkFeatureConfigRef(api.FeatureConfigRef o) { - buildCounterFeatureConfigRef++; - if (buildCounterFeatureConfigRef < 3) { - unittest.expect( - o.config!, - unittest.equals('foo'), - ); - unittest.expect( - o.configUpdateTime!, - unittest.equals('foo'), - ); - unittest.expect( - o.uuid!, - unittest.equals('foo'), - ); - } - buildCounterFeatureConfigRef--; -} - core.int buildCounterFeatureSpec = 0; api.FeatureSpec buildFeatureSpec() { final o = api.FeatureSpec(); @@ -2321,7 +2296,6 @@ api.MembershipFeature buildMembershipFeature() { if (buildCounterMembershipFeature < 3) { o.createTime = 'foo'; o.deleteTime = 'foo'; - o.featureConfigRef = buildFeatureConfigRef(); o.labels = buildUnnamed18(); o.lifecycleState = buildLifecycleState(); o.name = 'foo'; @@ -2344,7 +2318,6 @@ void checkMembershipFeature(api.MembershipFeature o) { o.deleteTime!, unittest.equals('foo'), ); - checkFeatureConfigRef(o.featureConfigRef!); checkUnnamed18(o.labels!); checkLifecycleState(o.lifecycleState!); unittest.expect( @@ -3811,16 +3784,6 @@ void main() { }); }); - unittest.group('obj-schema-FeatureConfigRef', () { - unittest.test('to-json--from-json', () async { - final o = buildFeatureConfigRef(); - final oJson = convert.jsonDecode(convert.jsonEncode(o)); - final od = api.FeatureConfigRef.fromJson( - oJson as core.Map); - checkFeatureConfigRef(od); - }); - }); - unittest.group('obj-schema-FeatureSpec', () { unittest.test('to-json--from-json', () async { final o = buildFeatureSpec(); diff --git a/generated/googleapis_beta/test/sqladmin/v1beta4_test.dart b/generated/googleapis_beta/test/sqladmin/v1beta4_test.dart index cd71de58e..65fcfe74b 100644 --- a/generated/googleapis_beta/test/sqladmin/v1beta4_test.dart +++ b/generated/googleapis_beta/test/sqladmin/v1beta4_test.dart @@ -562,12 +562,29 @@ void checkCloneContext(api.CloneContext o) { buildCounterCloneContext--; } -core.List buildUnnamed2() => [ +core.List buildUnnamed2() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed2(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + +core.List buildUnnamed3() => [ buildIpMapping(), buildIpMapping(), ]; -void checkUnnamed2(core.List o) { +void checkUnnamed3(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkIpMapping(o[0]); checkIpMapping(o[1]); @@ -579,9 +596,10 @@ api.ConnectSettings buildConnectSettings() { buildCounterConnectSettings++; if (buildCounterConnectSettings < 3) { o.backendType = 'foo'; + o.customSubjectAlternativeNames = buildUnnamed2(); o.databaseVersion = 'foo'; o.dnsName = 'foo'; - o.ipAddresses = buildUnnamed2(); + o.ipAddresses = buildUnnamed3(); o.kind = 'foo'; o.pscEnabled = true; o.region = 'foo'; @@ -599,6 +617,7 @@ void checkConnectSettings(api.ConnectSettings o) { o.backendType!, unittest.equals('foo'), ); + checkUnnamed2(o.customSubjectAlternativeNames!); unittest.expect( o.databaseVersion!, unittest.equals('foo'), @@ -607,7 +626,7 @@ void checkConnectSettings(api.ConnectSettings o) { o.dnsName!, unittest.equals('foo'), ); - checkUnnamed2(o.ipAddresses!); + checkUnnamed3(o.ipAddresses!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -731,12 +750,12 @@ void checkDatabaseFlags(api.DatabaseFlags o) { buildCounterDatabaseFlags--; } -core.List buildUnnamed3() => [ +core.List buildUnnamed4() => [ 'foo', 'foo', ]; -void checkUnnamed3(core.List o) { +void checkUnnamed4(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -773,23 +792,23 @@ void checkDatabaseInstanceFailoverReplica( buildCounterDatabaseInstanceFailoverReplica--; } -core.List buildUnnamed4() => [ +core.List buildUnnamed5() => [ buildIpMapping(), buildIpMapping(), ]; -void checkUnnamed4(core.List o) { +void checkUnnamed5(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkIpMapping(o[0]); checkIpMapping(o[1]); } -core.List buildUnnamed5() => [ +core.List buildUnnamed6() => [ 'foo', 'foo', ]; -void checkUnnamed5(core.List o) { +void checkUnnamed6(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -801,12 +820,12 @@ void checkUnnamed5(core.List o) { ); } -core.List buildUnnamed6() => [ +core.List buildUnnamed7() => [ 'foo', 'foo', ]; -void checkUnnamed6(core.List o) { +void checkUnnamed7(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -818,12 +837,29 @@ void checkUnnamed6(core.List o) { ); } -core.List buildUnnamed7() => [ +core.Map buildUnnamed8() => { + 'x': 'foo', + 'y': 'foo', + }; + +void checkUnnamed8(core.Map o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o['x']!, + unittest.equals('foo'), + ); + unittest.expect( + o['y']!, + unittest.equals('foo'), + ); +} + +core.List buildUnnamed9() => [ buildAvailableDatabaseVersion(), buildAvailableDatabaseVersion(), ]; -void checkUnnamed7(core.List o) { +void checkUnnamed9(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAvailableDatabaseVersion(o[0]); checkAvailableDatabaseVersion(o[1]); @@ -834,7 +870,7 @@ api.DatabaseInstance buildDatabaseInstance() { final o = api.DatabaseInstance(); buildCounterDatabaseInstance++; if (buildCounterDatabaseInstance < 3) { - o.availableMaintenanceVersions = buildUnnamed3(); + o.availableMaintenanceVersions = buildUnnamed4(); o.backendType = 'foo'; o.connectionName = 'foo'; o.createTime = 'foo'; @@ -849,7 +885,7 @@ api.DatabaseInstance buildDatabaseInstance() { o.gceZone = 'foo'; o.geminiConfig = buildGeminiInstanceConfig(); o.instanceType = 'foo'; - o.ipAddresses = buildUnnamed4(); + o.ipAddresses = buildUnnamed5(); o.ipv6Address = 'foo'; o.kind = 'foo'; o.maintenanceVersion = 'foo'; @@ -863,7 +899,7 @@ api.DatabaseInstance buildDatabaseInstance() { o.pscServiceAttachmentLink = 'foo'; o.region = 'foo'; o.replicaConfiguration = buildReplicaConfiguration(); - o.replicaNames = buildUnnamed5(); + o.replicaNames = buildUnnamed6(); o.replicationCluster = buildReplicationCluster(); o.rootPassword = 'foo'; o.satisfiesPzi = true; @@ -876,9 +912,10 @@ api.DatabaseInstance buildDatabaseInstance() { o.settings = buildSettings(); o.sqlNetworkArchitecture = 'foo'; o.state = 'foo'; - o.suspensionReason = buildUnnamed6(); + o.suspensionReason = buildUnnamed7(); o.switchTransactionLogsToCloudStorageEnabled = true; - o.upgradableDatabaseVersions = buildUnnamed7(); + o.tags = buildUnnamed8(); + o.upgradableDatabaseVersions = buildUnnamed9(); o.writeEndpoint = 'foo'; } buildCounterDatabaseInstance--; @@ -888,7 +925,7 @@ api.DatabaseInstance buildDatabaseInstance() { void checkDatabaseInstance(api.DatabaseInstance o) { buildCounterDatabaseInstance++; if (buildCounterDatabaseInstance < 3) { - checkUnnamed3(o.availableMaintenanceVersions!); + checkUnnamed4(o.availableMaintenanceVersions!); unittest.expect( o.backendType!, unittest.equals('foo'), @@ -933,7 +970,7 @@ void checkDatabaseInstance(api.DatabaseInstance o) { o.instanceType!, unittest.equals('foo'), ); - checkUnnamed4(o.ipAddresses!); + checkUnnamed5(o.ipAddresses!); unittest.expect( o.ipv6Address!, unittest.equals('foo'), @@ -977,7 +1014,7 @@ void checkDatabaseInstance(api.DatabaseInstance o) { unittest.equals('foo'), ); checkReplicaConfiguration(o.replicaConfiguration!); - checkUnnamed5(o.replicaNames!); + checkUnnamed6(o.replicaNames!); checkReplicationCluster(o.replicationCluster!); unittest.expect( o.rootPassword!, @@ -1008,10 +1045,11 @@ void checkDatabaseInstance(api.DatabaseInstance o) { o.state!, unittest.equals('foo'), ); - checkUnnamed6(o.suspensionReason!); + checkUnnamed7(o.suspensionReason!); unittest.expect( o.switchTransactionLogsToCloudStorageEnabled!, unittest.isTrue); - checkUnnamed7(o.upgradableDatabaseVersions!); + checkUnnamed8(o.tags!); + checkUnnamed9(o.upgradableDatabaseVersions!); unittest.expect( o.writeEndpoint!, unittest.equals('foo'), @@ -1020,12 +1058,12 @@ void checkDatabaseInstance(api.DatabaseInstance o) { buildCounterDatabaseInstance--; } -core.List buildUnnamed8() => [ +core.List buildUnnamed10() => [ buildDatabase(), buildDatabase(), ]; -void checkUnnamed8(core.List o) { +void checkUnnamed10(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDatabase(o[0]); checkDatabase(o[1]); @@ -1036,7 +1074,7 @@ api.DatabasesListResponse buildDatabasesListResponse() { final o = api.DatabasesListResponse(); buildCounterDatabasesListResponse++; if (buildCounterDatabasesListResponse < 3) { - o.items = buildUnnamed8(); + o.items = buildUnnamed10(); o.kind = 'foo'; } buildCounterDatabasesListResponse--; @@ -1046,7 +1084,7 @@ api.DatabasesListResponse buildDatabasesListResponse() { void checkDatabasesListResponse(api.DatabasesListResponse o) { buildCounterDatabasesListResponse++; if (buildCounterDatabasesListResponse < 3) { - checkUnnamed8(o.items!); + checkUnnamed10(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -1374,12 +1412,12 @@ void checkExportContextCsvExportOptions(api.ExportContextCsvExportOptions o) { buildCounterExportContextCsvExportOptions--; } -core.List buildUnnamed9() => [ +core.List buildUnnamed11() => [ 'foo', 'foo', ]; -void checkUnnamed9(core.List o) { +void checkUnnamed11(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1438,12 +1476,12 @@ void checkExportContextSqlExportOptionsPostgresExportOptions( buildCounterExportContextSqlExportOptionsPostgresExportOptions--; } -core.List buildUnnamed10() => [ +core.List buildUnnamed12() => [ 'foo', 'foo', ]; -void checkUnnamed10(core.List o) { +void checkUnnamed12(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1466,7 +1504,7 @@ api.ExportContextSqlExportOptions buildExportContextSqlExportOptions() { o.postgresExportOptions = buildExportContextSqlExportOptionsPostgresExportOptions(); o.schemaOnly = true; - o.tables = buildUnnamed10(); + o.tables = buildUnnamed12(); o.threads = 42; } buildCounterExportContextSqlExportOptions--; @@ -1481,7 +1519,7 @@ void checkExportContextSqlExportOptions(api.ExportContextSqlExportOptions o) { checkExportContextSqlExportOptionsPostgresExportOptions( o.postgresExportOptions!); unittest.expect(o.schemaOnly!, unittest.isTrue); - checkUnnamed10(o.tables!); + checkUnnamed12(o.tables!); unittest.expect( o.threads!, unittest.equals(42), @@ -1497,7 +1535,7 @@ api.ExportContext buildExportContext() { if (buildCounterExportContext < 3) { o.bakExportOptions = buildExportContextBakExportOptions(); o.csvExportOptions = buildExportContextCsvExportOptions(); - o.databases = buildUnnamed9(); + o.databases = buildUnnamed11(); o.fileType = 'foo'; o.kind = 'foo'; o.offload = true; @@ -1513,7 +1551,7 @@ void checkExportContext(api.ExportContext o) { if (buildCounterExportContext < 3) { checkExportContextBakExportOptions(o.bakExportOptions!); checkExportContextCsvExportOptions(o.csvExportOptions!); - checkUnnamed9(o.databases!); + checkUnnamed11(o.databases!); unittest.expect( o.fileType!, unittest.equals('foo'), @@ -1532,6 +1570,28 @@ void checkExportContext(api.ExportContext o) { buildCounterExportContext--; } +core.int buildCounterExternalSyncSelectedObject = 0; +api.ExternalSyncSelectedObject buildExternalSyncSelectedObject() { + final o = api.ExternalSyncSelectedObject(); + buildCounterExternalSyncSelectedObject++; + if (buildCounterExternalSyncSelectedObject < 3) { + o.database = 'foo'; + } + buildCounterExternalSyncSelectedObject--; + return o; +} + +void checkExternalSyncSelectedObject(api.ExternalSyncSelectedObject o) { + buildCounterExternalSyncSelectedObject++; + if (buildCounterExternalSyncSelectedObject < 3) { + unittest.expect( + o.database!, + unittest.equals('foo'), + ); + } + buildCounterExternalSyncSelectedObject--; +} + core.int buildCounterFailoverContext = 0; api.FailoverContext buildFailoverContext() { final o = api.FailoverContext(); @@ -1559,12 +1619,12 @@ void checkFailoverContext(api.FailoverContext o) { buildCounterFailoverContext--; } -core.List buildUnnamed11() => [ +core.List buildUnnamed13() => [ 'foo', 'foo', ]; -void checkUnnamed11(core.List o) { +void checkUnnamed13(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1576,12 +1636,12 @@ void checkUnnamed11(core.List o) { ); } -core.List buildUnnamed12() => [ +core.List buildUnnamed14() => [ 'foo', 'foo', ]; -void checkUnnamed12(core.List o) { +void checkUnnamed14(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1593,12 +1653,12 @@ void checkUnnamed12(core.List o) { ); } -core.List buildUnnamed13() => [ +core.List buildUnnamed15() => [ 'foo', 'foo', ]; -void checkUnnamed13(core.List o) { +void checkUnnamed15(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1615,9 +1675,9 @@ api.Flag buildFlag() { final o = api.Flag(); buildCounterFlag++; if (buildCounterFlag < 3) { - o.allowedIntValues = buildUnnamed11(); - o.allowedStringValues = buildUnnamed12(); - o.appliesTo = buildUnnamed13(); + o.allowedIntValues = buildUnnamed13(); + o.allowedStringValues = buildUnnamed14(); + o.appliesTo = buildUnnamed15(); o.inBeta = true; o.kind = 'foo'; o.maxValue = 'foo'; @@ -1633,9 +1693,9 @@ api.Flag buildFlag() { void checkFlag(api.Flag o) { buildCounterFlag++; if (buildCounterFlag < 3) { - checkUnnamed11(o.allowedIntValues!); - checkUnnamed12(o.allowedStringValues!); - checkUnnamed13(o.appliesTo!); + checkUnnamed13(o.allowedIntValues!); + checkUnnamed14(o.allowedStringValues!); + checkUnnamed15(o.appliesTo!); unittest.expect(o.inBeta!, unittest.isTrue); unittest.expect( o.kind!, @@ -1662,12 +1722,12 @@ void checkFlag(api.Flag o) { buildCounterFlag--; } -core.List buildUnnamed14() => [ +core.List buildUnnamed16() => [ buildFlag(), buildFlag(), ]; -void checkUnnamed14(core.List o) { +void checkUnnamed16(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkFlag(o[0]); checkFlag(o[1]); @@ -1678,7 +1738,7 @@ api.FlagsListResponse buildFlagsListResponse() { final o = api.FlagsListResponse(); buildCounterFlagsListResponse++; if (buildCounterFlagsListResponse < 3) { - o.items = buildUnnamed14(); + o.items = buildUnnamed16(); o.kind = 'foo'; } buildCounterFlagsListResponse--; @@ -1688,7 +1748,7 @@ api.FlagsListResponse buildFlagsListResponse() { void checkFlagsListResponse(api.FlagsListResponse o) { buildCounterFlagsListResponse++; if (buildCounterFlagsListResponse < 3) { - checkUnnamed14(o.items!); + checkUnnamed16(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -1856,12 +1916,12 @@ void checkImportContextBakImportOptions(api.ImportContextBakImportOptions o) { buildCounterImportContextBakImportOptions--; } -core.List buildUnnamed15() => [ +core.List buildUnnamed17() => [ 'foo', 'foo', ]; -void checkUnnamed15(core.List o) { +void checkUnnamed17(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -1878,7 +1938,7 @@ api.ImportContextCsvImportOptions buildImportContextCsvImportOptions() { final o = api.ImportContextCsvImportOptions(); buildCounterImportContextCsvImportOptions++; if (buildCounterImportContextCsvImportOptions < 3) { - o.columns = buildUnnamed15(); + o.columns = buildUnnamed17(); o.escapeCharacter = 'foo'; o.fieldsTerminatedBy = 'foo'; o.linesTerminatedBy = 'foo'; @@ -1892,7 +1952,7 @@ api.ImportContextCsvImportOptions buildImportContextCsvImportOptions() { void checkImportContextCsvImportOptions(api.ImportContextCsvImportOptions o) { buildCounterImportContextCsvImportOptions++; if (buildCounterImportContextCsvImportOptions < 3) { - checkUnnamed15(o.columns!); + checkUnnamed17(o.columns!); unittest.expect( o.escapeCharacter!, unittest.equals('foo'), @@ -2215,23 +2275,23 @@ void checkInstancesImportRequest(api.InstancesImportRequest o) { buildCounterInstancesImportRequest--; } -core.List buildUnnamed16() => [ +core.List buildUnnamed18() => [ buildDatabaseInstance(), buildDatabaseInstance(), ]; -void checkUnnamed16(core.List o) { +void checkUnnamed18(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDatabaseInstance(o[0]); checkDatabaseInstance(o[1]); } -core.List buildUnnamed17() => [ +core.List buildUnnamed19() => [ buildApiWarning(), buildApiWarning(), ]; -void checkUnnamed17(core.List o) { +void checkUnnamed19(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkApiWarning(o[0]); checkApiWarning(o[1]); @@ -2242,10 +2302,10 @@ api.InstancesListResponse buildInstancesListResponse() { final o = api.InstancesListResponse(); buildCounterInstancesListResponse++; if (buildCounterInstancesListResponse < 3) { - o.items = buildUnnamed16(); + o.items = buildUnnamed18(); o.kind = 'foo'; o.nextPageToken = 'foo'; - o.warnings = buildUnnamed17(); + o.warnings = buildUnnamed19(); } buildCounterInstancesListResponse--; return o; @@ -2254,7 +2314,7 @@ api.InstancesListResponse buildInstancesListResponse() { void checkInstancesListResponse(api.InstancesListResponse o) { buildCounterInstancesListResponse++; if (buildCounterInstancesListResponse < 3) { - checkUnnamed16(o.items!); + checkUnnamed18(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -2263,17 +2323,17 @@ void checkInstancesListResponse(api.InstancesListResponse o) { o.nextPageToken!, unittest.equals('foo'), ); - checkUnnamed17(o.warnings!); + checkUnnamed19(o.warnings!); } buildCounterInstancesListResponse--; } -core.List buildUnnamed18() => [ +core.List buildUnnamed20() => [ buildSslCert(), buildSslCert(), ]; -void checkUnnamed18(core.List o) { +void checkUnnamed20(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSslCert(o[0]); checkSslCert(o[1]); @@ -2285,7 +2345,7 @@ api.InstancesListServerCasResponse buildInstancesListServerCasResponse() { buildCounterInstancesListServerCasResponse++; if (buildCounterInstancesListServerCasResponse < 3) { o.activeVersion = 'foo'; - o.certs = buildUnnamed18(); + o.certs = buildUnnamed20(); o.kind = 'foo'; } buildCounterInstancesListServerCasResponse--; @@ -2299,7 +2359,7 @@ void checkInstancesListServerCasResponse(api.InstancesListServerCasResponse o) { o.activeVersion!, unittest.equals('foo'), ); - checkUnnamed18(o.certs!); + checkUnnamed20(o.certs!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -2308,23 +2368,23 @@ void checkInstancesListServerCasResponse(api.InstancesListServerCasResponse o) { buildCounterInstancesListServerCasResponse--; } -core.List buildUnnamed19() => [ +core.List buildUnnamed21() => [ buildSslCert(), buildSslCert(), ]; -void checkUnnamed19(core.List o) { +void checkUnnamed21(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSslCert(o[0]); checkSslCert(o[1]); } -core.List buildUnnamed20() => [ +core.List buildUnnamed22() => [ buildSslCert(), buildSslCert(), ]; -void checkUnnamed20(core.List o) { +void checkUnnamed22(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSslCert(o[0]); checkSslCert(o[1]); @@ -2337,9 +2397,9 @@ api.InstancesListServerCertificatesResponse buildCounterInstancesListServerCertificatesResponse++; if (buildCounterInstancesListServerCertificatesResponse < 3) { o.activeVersion = 'foo'; - o.caCerts = buildUnnamed19(); + o.caCerts = buildUnnamed21(); o.kind = 'foo'; - o.serverCerts = buildUnnamed20(); + o.serverCerts = buildUnnamed22(); } buildCounterInstancesListServerCertificatesResponse--; return o; @@ -2353,12 +2413,12 @@ void checkInstancesListServerCertificatesResponse( o.activeVersion!, unittest.equals('foo'), ); - checkUnnamed19(o.caCerts!); + checkUnnamed21(o.caCerts!); unittest.expect( o.kind!, unittest.equals('foo'), ); - checkUnnamed20(o.serverCerts!); + checkUnnamed22(o.serverCerts!); } buildCounterInstancesListServerCertificatesResponse--; } @@ -2460,30 +2520,49 @@ void checkInstancesTruncateLogRequest(api.InstancesTruncateLogRequest o) { buildCounterInstancesTruncateLogRequest--; } -core.List buildUnnamed21() => [ +core.List buildUnnamed23() => [ buildAclEntry(), buildAclEntry(), ]; -void checkUnnamed21(core.List o) { +void checkUnnamed23(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkAclEntry(o[0]); checkAclEntry(o[1]); } +core.List buildUnnamed24() => [ + 'foo', + 'foo', + ]; + +void checkUnnamed24(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + unittest.expect( + o[0], + unittest.equals('foo'), + ); + unittest.expect( + o[1], + unittest.equals('foo'), + ); +} + core.int buildCounterIpConfiguration = 0; api.IpConfiguration buildIpConfiguration() { final o = api.IpConfiguration(); buildCounterIpConfiguration++; if (buildCounterIpConfiguration < 3) { o.allocatedIpRange = 'foo'; - o.authorizedNetworks = buildUnnamed21(); + o.authorizedNetworks = buildUnnamed23(); + o.customSubjectAlternativeNames = buildUnnamed24(); o.enablePrivatePathForGoogleCloudServices = true; o.ipv4Enabled = true; o.privateNetwork = 'foo'; o.pscConfig = buildPscConfig(); o.requireSsl = true; o.serverCaMode = 'foo'; + o.serverCaPool = 'foo'; o.sslMode = 'foo'; } buildCounterIpConfiguration--; @@ -2497,7 +2576,8 @@ void checkIpConfiguration(api.IpConfiguration o) { o.allocatedIpRange!, unittest.equals('foo'), ); - checkUnnamed21(o.authorizedNetworks!); + checkUnnamed23(o.authorizedNetworks!); + checkUnnamed24(o.customSubjectAlternativeNames!); unittest.expect( o.enablePrivatePathForGoogleCloudServices!, unittest.isTrue); unittest.expect(o.ipv4Enabled!, unittest.isTrue); @@ -2511,6 +2591,10 @@ void checkIpConfiguration(api.IpConfiguration o) { o.serverCaMode!, unittest.equals('foo'), ); + unittest.expect( + o.serverCaPool!, + unittest.equals('foo'), + ); unittest.expect( o.sslMode!, unittest.equals('foo'), @@ -2694,12 +2778,12 @@ void checkMySqlReplicaConfiguration(api.MySqlReplicaConfiguration o) { buildCounterMySqlReplicaConfiguration--; } -core.List buildUnnamed22() => [ +core.List buildUnnamed25() => [ buildSyncFlags(), buildSyncFlags(), ]; -void checkUnnamed22(core.List o) { +void checkUnnamed25(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSyncFlags(o[0]); checkSyncFlags(o[1]); @@ -2710,7 +2794,7 @@ api.MySqlSyncConfig buildMySqlSyncConfig() { final o = api.MySqlSyncConfig(); buildCounterMySqlSyncConfig++; if (buildCounterMySqlSyncConfig < 3) { - o.initialSyncFlags = buildUnnamed22(); + o.initialSyncFlags = buildUnnamed25(); } buildCounterMySqlSyncConfig--; return o; @@ -2719,11 +2803,22 @@ api.MySqlSyncConfig buildMySqlSyncConfig() { void checkMySqlSyncConfig(api.MySqlSyncConfig o) { buildCounterMySqlSyncConfig++; if (buildCounterMySqlSyncConfig < 3) { - checkUnnamed22(o.initialSyncFlags!); + checkUnnamed25(o.initialSyncFlags!); } buildCounterMySqlSyncConfig--; } +core.List buildUnnamed26() => [ + buildSelectedObjects(), + buildSelectedObjects(), + ]; + +void checkUnnamed26(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkSelectedObjects(o[0]); + checkSelectedObjects(o[1]); +} + core.int buildCounterOnPremisesConfiguration = 0; api.OnPremisesConfiguration buildOnPremisesConfiguration() { final o = api.OnPremisesConfiguration(); @@ -2736,7 +2831,9 @@ api.OnPremisesConfiguration buildOnPremisesConfiguration() { o.hostPort = 'foo'; o.kind = 'foo'; o.password = 'foo'; + o.selectedObjects = buildUnnamed26(); o.sourceInstance = buildInstanceReference(); + o.sslOption = 'foo'; o.username = 'foo'; } buildCounterOnPremisesConfiguration--; @@ -2774,7 +2871,12 @@ void checkOnPremisesConfiguration(api.OnPremisesConfiguration o) { o.password!, unittest.equals('foo'), ); + checkUnnamed26(o.selectedObjects!); checkInstanceReference(o.sourceInstance!); + unittest.expect( + o.sslOption!, + unittest.equals('foo'), + ); unittest.expect( o.username!, unittest.equals('foo'), @@ -2802,6 +2904,7 @@ api.Operation buildOperation() { o.selfLink = 'foo'; o.startTime = 'foo'; o.status = 'foo'; + o.subOperationType = buildSqlSubOperationType(); o.targetId = 'foo'; o.targetLink = 'foo'; o.targetProject = 'foo'; @@ -2852,6 +2955,7 @@ void checkOperation(api.Operation o) { o.status!, unittest.equals('foo'), ); + checkSqlSubOperationType(o.subOperationType!); unittest.expect( o.targetId!, unittest.equals('foo'), @@ -2904,12 +3008,12 @@ void checkOperationError(api.OperationError o) { buildCounterOperationError--; } -core.List buildUnnamed23() => [ +core.List buildUnnamed27() => [ buildOperationError(), buildOperationError(), ]; -void checkUnnamed23(core.List o) { +void checkUnnamed27(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperationError(o[0]); checkOperationError(o[1]); @@ -2920,7 +3024,7 @@ api.OperationErrors buildOperationErrors() { final o = api.OperationErrors(); buildCounterOperationErrors++; if (buildCounterOperationErrors < 3) { - o.errors = buildUnnamed23(); + o.errors = buildUnnamed27(); o.kind = 'foo'; } buildCounterOperationErrors--; @@ -2930,7 +3034,7 @@ api.OperationErrors buildOperationErrors() { void checkOperationErrors(api.OperationErrors o) { buildCounterOperationErrors++; if (buildCounterOperationErrors < 3) { - checkUnnamed23(o.errors!); + checkUnnamed27(o.errors!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -2939,12 +3043,12 @@ void checkOperationErrors(api.OperationErrors o) { buildCounterOperationErrors--; } -core.List buildUnnamed24() => [ +core.List buildUnnamed28() => [ buildOperation(), buildOperation(), ]; -void checkUnnamed24(core.List o) { +void checkUnnamed28(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkOperation(o[0]); checkOperation(o[1]); @@ -2955,7 +3059,7 @@ api.OperationsListResponse buildOperationsListResponse() { final o = api.OperationsListResponse(); buildCounterOperationsListResponse++; if (buildCounterOperationsListResponse < 3) { - o.items = buildUnnamed24(); + o.items = buildUnnamed28(); o.kind = 'foo'; o.nextPageToken = 'foo'; } @@ -2966,7 +3070,7 @@ api.OperationsListResponse buildOperationsListResponse() { void checkOperationsListResponse(api.OperationsListResponse o) { buildCounterOperationsListResponse++; if (buildCounterOperationsListResponse < 3) { - checkUnnamed24(o.items!); + checkUnnamed28(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -3110,12 +3214,12 @@ void checkPscAutoConnectionConfig(api.PscAutoConnectionConfig o) { buildCounterPscAutoConnectionConfig--; } -core.List buildUnnamed25() => [ +core.List buildUnnamed29() => [ 'foo', 'foo', ]; -void checkUnnamed25(core.List o) { +void checkUnnamed29(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3127,12 +3231,12 @@ void checkUnnamed25(core.List o) { ); } -core.List buildUnnamed26() => [ +core.List buildUnnamed30() => [ buildPscAutoConnectionConfig(), buildPscAutoConnectionConfig(), ]; -void checkUnnamed26(core.List o) { +void checkUnnamed30(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkPscAutoConnectionConfig(o[0]); checkPscAutoConnectionConfig(o[1]); @@ -3143,8 +3247,8 @@ api.PscConfig buildPscConfig() { final o = api.PscConfig(); buildCounterPscConfig++; if (buildCounterPscConfig < 3) { - o.allowedConsumerProjects = buildUnnamed25(); - o.pscAutoConnections = buildUnnamed26(); + o.allowedConsumerProjects = buildUnnamed29(); + o.pscAutoConnections = buildUnnamed30(); o.pscEnabled = true; } buildCounterPscConfig--; @@ -3154,8 +3258,8 @@ api.PscConfig buildPscConfig() { void checkPscConfig(api.PscConfig o) { buildCounterPscConfig++; if (buildCounterPscConfig < 3) { - checkUnnamed25(o.allowedConsumerProjects!); - checkUnnamed26(o.pscAutoConnections!); + checkUnnamed29(o.allowedConsumerProjects!); + checkUnnamed30(o.pscAutoConnections!); unittest.expect(o.pscEnabled!, unittest.isTrue); } buildCounterPscConfig--; @@ -3336,12 +3440,34 @@ void checkRotateServerCertificateContext(api.RotateServerCertificateContext o) { buildCounterRotateServerCertificateContext--; } -core.List buildUnnamed27() => [ +core.int buildCounterSelectedObjects = 0; +api.SelectedObjects buildSelectedObjects() { + final o = api.SelectedObjects(); + buildCounterSelectedObjects++; + if (buildCounterSelectedObjects < 3) { + o.database = 'foo'; + } + buildCounterSelectedObjects--; + return o; +} + +void checkSelectedObjects(api.SelectedObjects o) { + buildCounterSelectedObjects++; + if (buildCounterSelectedObjects < 3) { + unittest.expect( + o.database!, + unittest.equals('foo'), + ); + } + buildCounterSelectedObjects--; +} + +core.List buildUnnamed31() => [ 'foo', 'foo', ]; -void checkUnnamed27(core.List o) { +void checkUnnamed31(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -3353,34 +3479,34 @@ void checkUnnamed27(core.List o) { ); } -core.List buildUnnamed28() => [ +core.List buildUnnamed32() => [ buildDatabaseFlags(), buildDatabaseFlags(), ]; -void checkUnnamed28(core.List o) { +void checkUnnamed32(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDatabaseFlags(o[0]); checkDatabaseFlags(o[1]); } -core.List buildUnnamed29() => [ +core.List buildUnnamed33() => [ buildDenyMaintenancePeriod(), buildDenyMaintenancePeriod(), ]; -void checkUnnamed29(core.List o) { +void checkUnnamed33(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkDenyMaintenancePeriod(o[0]); checkDenyMaintenancePeriod(o[1]); } -core.Map buildUnnamed30() => { +core.Map buildUnnamed34() => { 'x': 'foo', 'y': 'foo', }; -void checkUnnamed30(core.Map o) { +void checkUnnamed34(core.Map o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o['x']!, @@ -3400,7 +3526,7 @@ api.Settings buildSettings() { o.activationPolicy = 'foo'; o.activeDirectoryConfig = buildSqlActiveDirectoryConfig(); o.advancedMachineFeatures = buildAdvancedMachineFeatures(); - o.authorizedGaeApplications = buildUnnamed27(); + o.authorizedGaeApplications = buildUnnamed31(); o.availabilityType = 'foo'; o.backupConfiguration = buildBackupConfiguration(); o.collation = 'foo'; @@ -3409,10 +3535,10 @@ api.Settings buildSettings() { o.dataCacheConfig = buildDataCacheConfig(); o.dataDiskSizeGb = 'foo'; o.dataDiskType = 'foo'; - o.databaseFlags = buildUnnamed28(); + o.databaseFlags = buildUnnamed32(); o.databaseReplicationEnabled = true; o.deletionProtectionEnabled = true; - o.denyMaintenancePeriods = buildUnnamed29(); + o.denyMaintenancePeriods = buildUnnamed33(); o.edition = 'foo'; o.enableDataplexIntegration = true; o.enableGoogleMlIntegration = true; @@ -3430,7 +3556,7 @@ api.Settings buildSettings() { o.storageAutoResizeLimit = 'foo'; o.tier = 'foo'; o.timeZone = 'foo'; - o.userLabels = buildUnnamed30(); + o.userLabels = buildUnnamed34(); } buildCounterSettings--; return o; @@ -3445,7 +3571,7 @@ void checkSettings(api.Settings o) { ); checkSqlActiveDirectoryConfig(o.activeDirectoryConfig!); checkAdvancedMachineFeatures(o.advancedMachineFeatures!); - checkUnnamed27(o.authorizedGaeApplications!); + checkUnnamed31(o.authorizedGaeApplications!); unittest.expect( o.availabilityType!, unittest.equals('foo'), @@ -3469,10 +3595,10 @@ void checkSettings(api.Settings o) { o.dataDiskType!, unittest.equals('foo'), ); - checkUnnamed28(o.databaseFlags!); + checkUnnamed32(o.databaseFlags!); unittest.expect(o.databaseReplicationEnabled!, unittest.isTrue); unittest.expect(o.deletionProtectionEnabled!, unittest.isTrue); - checkUnnamed29(o.denyMaintenancePeriods!); + checkUnnamed33(o.denyMaintenancePeriods!); unittest.expect( o.edition!, unittest.equals('foo'), @@ -3514,7 +3640,7 @@ void checkSettings(api.Settings o) { o.timeZone!, unittest.equals('foo'), ); - checkUnnamed30(o.userLabels!); + checkUnnamed34(o.userLabels!); } buildCounterSettings--; } @@ -3765,6 +3891,17 @@ void checkSqlInstancesStartExternalSyncRequest( buildCounterSqlInstancesStartExternalSyncRequest--; } +core.List buildUnnamed35() => [ + buildExternalSyncSelectedObject(), + buildExternalSyncSelectedObject(), + ]; + +void checkUnnamed35(core.List o) { + unittest.expect(o, unittest.hasLength(2)); + checkExternalSyncSelectedObject(o[0]); + checkExternalSyncSelectedObject(o[1]); +} + core.int buildCounterSqlInstancesVerifyExternalSyncSettingsRequest = 0; api.SqlInstancesVerifyExternalSyncSettingsRequest buildSqlInstancesVerifyExternalSyncSettingsRequest() { @@ -3773,6 +3910,7 @@ api.SqlInstancesVerifyExternalSyncSettingsRequest if (buildCounterSqlInstancesVerifyExternalSyncSettingsRequest < 3) { o.migrationType = 'foo'; o.mysqlSyncConfig = buildMySqlSyncConfig(); + o.selectedObjects = buildUnnamed35(); o.syncMode = 'foo'; o.syncParallelLevel = 'foo'; o.verifyConnectionOnly = true; @@ -3791,6 +3929,7 @@ void checkSqlInstancesVerifyExternalSyncSettingsRequest( unittest.equals('foo'), ); checkMySqlSyncConfig(o.mysqlSyncConfig!); + checkUnnamed35(o.selectedObjects!); unittest.expect( o.syncMode!, unittest.equals('foo'), @@ -3805,23 +3944,23 @@ void checkSqlInstancesVerifyExternalSyncSettingsRequest( buildCounterSqlInstancesVerifyExternalSyncSettingsRequest--; } -core.List buildUnnamed31() => [ +core.List buildUnnamed36() => [ buildSqlExternalSyncSettingError(), buildSqlExternalSyncSettingError(), ]; -void checkUnnamed31(core.List o) { +void checkUnnamed36(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSqlExternalSyncSettingError(o[0]); checkSqlExternalSyncSettingError(o[1]); } -core.List buildUnnamed32() => [ +core.List buildUnnamed37() => [ buildSqlExternalSyncSettingError(), buildSqlExternalSyncSettingError(), ]; -void checkUnnamed32(core.List o) { +void checkUnnamed37(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSqlExternalSyncSettingError(o[0]); checkSqlExternalSyncSettingError(o[1]); @@ -3833,9 +3972,9 @@ api.SqlInstancesVerifyExternalSyncSettingsResponse final o = api.SqlInstancesVerifyExternalSyncSettingsResponse(); buildCounterSqlInstancesVerifyExternalSyncSettingsResponse++; if (buildCounterSqlInstancesVerifyExternalSyncSettingsResponse < 3) { - o.errors = buildUnnamed31(); + o.errors = buildUnnamed36(); o.kind = 'foo'; - o.warnings = buildUnnamed32(); + o.warnings = buildUnnamed37(); } buildCounterSqlInstancesVerifyExternalSyncSettingsResponse--; return o; @@ -3845,12 +3984,12 @@ void checkSqlInstancesVerifyExternalSyncSettingsResponse( api.SqlInstancesVerifyExternalSyncSettingsResponse o) { buildCounterSqlInstancesVerifyExternalSyncSettingsResponse++; if (buildCounterSqlInstancesVerifyExternalSyncSettingsResponse < 3) { - checkUnnamed31(o.errors!); + checkUnnamed36(o.errors!); unittest.expect( o.kind!, unittest.equals('foo'), ); - checkUnnamed32(o.warnings!); + checkUnnamed37(o.warnings!); } buildCounterSqlInstancesVerifyExternalSyncSettingsResponse--; } @@ -3977,12 +4116,12 @@ void checkSqlServerDatabaseDetails(api.SqlServerDatabaseDetails o) { buildCounterSqlServerDatabaseDetails--; } -core.List buildUnnamed33() => [ +core.List buildUnnamed38() => [ 'foo', 'foo', ]; -void checkUnnamed33(core.List o) { +void checkUnnamed38(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4000,7 +4139,7 @@ api.SqlServerUserDetails buildSqlServerUserDetails() { buildCounterSqlServerUserDetails++; if (buildCounterSqlServerUserDetails < 3) { o.disabled = true; - o.serverRoles = buildUnnamed33(); + o.serverRoles = buildUnnamed38(); } buildCounterSqlServerUserDetails--; return o; @@ -4010,11 +4149,33 @@ void checkSqlServerUserDetails(api.SqlServerUserDetails o) { buildCounterSqlServerUserDetails++; if (buildCounterSqlServerUserDetails < 3) { unittest.expect(o.disabled!, unittest.isTrue); - checkUnnamed33(o.serverRoles!); + checkUnnamed38(o.serverRoles!); } buildCounterSqlServerUserDetails--; } +core.int buildCounterSqlSubOperationType = 0; +api.SqlSubOperationType buildSqlSubOperationType() { + final o = api.SqlSubOperationType(); + buildCounterSqlSubOperationType++; + if (buildCounterSqlSubOperationType < 3) { + o.maintenanceType = 'foo'; + } + buildCounterSqlSubOperationType--; + return o; +} + +void checkSqlSubOperationType(api.SqlSubOperationType o) { + buildCounterSqlSubOperationType++; + if (buildCounterSqlSubOperationType < 3) { + unittest.expect( + o.maintenanceType!, + unittest.equals('foo'), + ); + } + buildCounterSqlSubOperationType--; +} + core.int buildCounterSslCert = 0; api.SslCert buildSslCert() { final o = api.SslCert(); @@ -4178,12 +4339,12 @@ void checkSslCertsInsertResponse(api.SslCertsInsertResponse o) { buildCounterSslCertsInsertResponse--; } -core.List buildUnnamed34() => [ +core.List buildUnnamed39() => [ buildSslCert(), buildSslCert(), ]; -void checkUnnamed34(core.List o) { +void checkUnnamed39(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkSslCert(o[0]); checkSslCert(o[1]); @@ -4194,7 +4355,7 @@ api.SslCertsListResponse buildSslCertsListResponse() { final o = api.SslCertsListResponse(); buildCounterSslCertsListResponse++; if (buildCounterSslCertsListResponse < 3) { - o.items = buildUnnamed34(); + o.items = buildUnnamed39(); o.kind = 'foo'; } buildCounterSslCertsListResponse--; @@ -4204,7 +4365,7 @@ api.SslCertsListResponse buildSslCertsListResponse() { void checkSslCertsListResponse(api.SslCertsListResponse o) { buildCounterSslCertsListResponse++; if (buildCounterSslCertsListResponse < 3) { - checkUnnamed34(o.items!); + checkUnnamed39(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -4240,12 +4401,12 @@ void checkSyncFlags(api.SyncFlags o) { buildCounterSyncFlags--; } -core.List buildUnnamed35() => [ +core.List buildUnnamed40() => [ 'foo', 'foo', ]; -void checkUnnamed35(core.List o) { +void checkUnnamed40(core.List o) { unittest.expect(o, unittest.hasLength(2)); unittest.expect( o[0], @@ -4265,7 +4426,7 @@ api.Tier buildTier() { o.DiskQuota = 'foo'; o.RAM = 'foo'; o.kind = 'foo'; - o.region = buildUnnamed35(); + o.region = buildUnnamed40(); o.tier = 'foo'; } buildCounterTier--; @@ -4287,7 +4448,7 @@ void checkTier(api.Tier o) { o.kind!, unittest.equals('foo'), ); - checkUnnamed35(o.region!); + checkUnnamed40(o.region!); unittest.expect( o.tier!, unittest.equals('foo'), @@ -4296,12 +4457,12 @@ void checkTier(api.Tier o) { buildCounterTier--; } -core.List buildUnnamed36() => [ +core.List buildUnnamed41() => [ buildTier(), buildTier(), ]; -void checkUnnamed36(core.List o) { +void checkUnnamed41(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkTier(o[0]); checkTier(o[1]); @@ -4312,7 +4473,7 @@ api.TiersListResponse buildTiersListResponse() { final o = api.TiersListResponse(); buildCounterTiersListResponse++; if (buildCounterTiersListResponse < 3) { - o.items = buildUnnamed36(); + o.items = buildUnnamed41(); o.kind = 'foo'; } buildCounterTiersListResponse--; @@ -4322,7 +4483,7 @@ api.TiersListResponse buildTiersListResponse() { void checkTiersListResponse(api.TiersListResponse o) { buildCounterTiersListResponse++; if (buildCounterTiersListResponse < 3) { - checkUnnamed36(o.items!); + checkUnnamed41(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -4457,12 +4618,12 @@ void checkUserPasswordValidationPolicy(api.UserPasswordValidationPolicy o) { buildCounterUserPasswordValidationPolicy--; } -core.List buildUnnamed37() => [ +core.List buildUnnamed42() => [ buildUser(), buildUser(), ]; -void checkUnnamed37(core.List o) { +void checkUnnamed42(core.List o) { unittest.expect(o, unittest.hasLength(2)); checkUser(o[0]); checkUser(o[1]); @@ -4473,7 +4634,7 @@ api.UsersListResponse buildUsersListResponse() { final o = api.UsersListResponse(); buildCounterUsersListResponse++; if (buildCounterUsersListResponse < 3) { - o.items = buildUnnamed37(); + o.items = buildUnnamed42(); o.kind = 'foo'; o.nextPageToken = 'foo'; } @@ -4484,7 +4645,7 @@ api.UsersListResponse buildUsersListResponse() { void checkUsersListResponse(api.UsersListResponse o) { buildCounterUsersListResponse++; if (buildCounterUsersListResponse < 3) { - checkUnnamed37(o.items!); + checkUnnamed42(o.items!); unittest.expect( o.kind!, unittest.equals('foo'), @@ -4841,6 +5002,16 @@ void main() { }); }); + unittest.group('obj-schema-ExternalSyncSelectedObject', () { + unittest.test('to-json--from-json', () async { + final o = buildExternalSyncSelectedObject(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.ExternalSyncSelectedObject.fromJson( + oJson as core.Map); + checkExternalSyncSelectedObject(od); + }); + }); + unittest.group('obj-schema-FailoverContext', () { unittest.test('to-json--from-json', () async { final o = buildFailoverContext(); @@ -5354,6 +5525,16 @@ void main() { }); }); + unittest.group('obj-schema-SelectedObjects', () { + unittest.test('to-json--from-json', () async { + final o = buildSelectedObjects(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.SelectedObjects.fromJson( + oJson as core.Map); + checkSelectedObjects(od); + }); + }); + unittest.group('obj-schema-Settings', () { unittest.test('to-json--from-json', () async { final o = buildSettings(); @@ -5526,6 +5707,16 @@ void main() { }); }); + unittest.group('obj-schema-SqlSubOperationType', () { + unittest.test('to-json--from-json', () async { + final o = buildSqlSubOperationType(); + final oJson = convert.jsonDecode(convert.jsonEncode(o)); + final od = api.SqlSubOperationType.fromJson( + oJson as core.Map); + checkSqlSubOperationType(od); + }); + }); + unittest.group('obj-schema-SslCert', () { unittest.test('to-json--from-json', () async { final o = buildSslCert(); @@ -7294,8 +7485,6 @@ void main() { final res = api.SQLAdminApi(mock).instances; final arg_project = 'foo'; final arg_instance = 'foo'; - final arg_finalBackupExpiryTime = 'foo'; - final arg_finalBackupTtlDays = 'foo'; final arg_$fields = 'foo'; mock.register(unittest.expectAsync2((http.BaseRequest req, json) { final path = req.url.path; @@ -7348,14 +7537,6 @@ void main() { ); } } - unittest.expect( - queryMap['finalBackupExpiryTime']!.first, - unittest.equals(arg_finalBackupExpiryTime), - ); - unittest.expect( - queryMap['finalBackupTtlDays']!.first, - unittest.equals(arg_finalBackupTtlDays), - ); unittest.expect( queryMap['fields']!.first, unittest.equals(arg_$fields), @@ -7367,10 +7548,8 @@ void main() { final resp = convert.json.encode(buildOperation()); return async.Future.value(stringResponse(200, h, resp)); }), true); - final response = await res.delete(arg_project, arg_instance, - finalBackupExpiryTime: arg_finalBackupExpiryTime, - finalBackupTtlDays: arg_finalBackupTtlDays, - $fields: arg_$fields); + final response = + await res.delete(arg_project, arg_instance, $fields: arg_$fields); checkOperation(response as api.Operation); }); diff --git a/overrides/i322.diff b/overrides/i322.diff index 5f2775d5a..b766cf746 100644 --- a/overrides/i322.diff +++ b/overrides/i322.diff @@ -1,8 +1,8 @@ diff --git a/discovery/googleapis/chromemanagement__v1.json b/discovery/googleapis/chromemanagement__v1.json -index 537f0163d..e33fcefab 100644 +index 688456f2d..29e3dfc9b 100644 --- a/discovery/googleapis/chromemanagement__v1.json +++ b/discovery/googleapis/chromemanagement__v1.json -@@ -17,7 +17,7 @@ +@@ -20,7 +20,7 @@ "canonicalName": "Chrome Management", "description": "The Chrome Management API is a suite of services that allows Chrome administrators to view, manage and gain insights on their Chrome OS and Chrome Browser devices.", "discoveryVersion": "v1", @@ -12,7 +12,7 @@ index 537f0163d..e33fcefab 100644 "x16": "http://www.google.com/images/icons/product/search-16.gif", "x32": "http://www.google.com/images/icons/product/search-32.gif" diff --git a/discovery/googleapis/chromepolicy__v1.json b/discovery/googleapis/chromepolicy__v1.json -index 8a9583c22..48cd14409 100644 +index ab44260d2..89ac4ce6b 100644 --- a/discovery/googleapis/chromepolicy__v1.json +++ b/discovery/googleapis/chromepolicy__v1.json @@ -17,7 +17,7 @@ @@ -25,7 +25,7 @@ index 8a9583c22..48cd14409 100644 "x16": "http://www.google.com/images/icons/product/search-16.gif", "x32": "http://www.google.com/images/icons/product/search-32.gif" diff --git a/discovery/googleapis/sts__v1.json b/discovery/googleapis/sts__v1.json -index 4817ca793..dd48d71a9 100644 +index 8854b81c9..a943922e7 100644 --- a/discovery/googleapis/sts__v1.json +++ b/discovery/googleapis/sts__v1.json @@ -5,7 +5,7 @@ @@ -34,11 +34,11 @@ index 4817ca793..dd48d71a9 100644 "discoveryVersion": "v1", - "documentationLink": "http://cloud.google.com/iam/docs/workload-identity-federation", + "documentationLink": "https://cloud.google.com/iam/docs/workload-identity-federation", - "icons": { - "x16": "http://www.google.com/images/icons/product/search-16.gif", - "x32": "http://www.google.com/images/icons/product/search-32.gif" + "endpoints": [ + { + "description": "Regional Endpoint", diff --git a/discovery/googleapis/versionhistory__v1.json b/discovery/googleapis/versionhistory__v1.json -index 9c8e53aa0..60fdc75ba 100644 +index b58039a1c..96384ec58 100644 --- a/discovery/googleapis/versionhistory__v1.json +++ b/discovery/googleapis/versionhistory__v1.json @@ -5,7 +5,7 @@ @@ -46,7 +46,7 @@ index 9c8e53aa0..60fdc75ba 100644 "description": "Version History API - Prod", "discoveryVersion": "v1", - "documentationLink": "https://developers.chrome.com/versionhistory", -+ "documentationLink": "https://developer.chrome.com/docs/versionhistory/", ++ "documentationLink": "https://developer.chrome.com/docs/web-platform/versionhistory/guide", "icons": { "x16": "http://www.google.com/images/icons/product/search-16.gif", "x32": "http://www.google.com/images/icons/product/search-32.gif"